diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index d2a017bd2a023..af071c706856e 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -31,3 +31,5 @@ ec2cc761bc7067712ecc7734502f703fe3b024c8 c682aa162b0d41e21cc6748f4fecfe01efb69d1f # reformat with updated edition 2024 1fcae03369abb4c2cc180cd5a49e1f4440a81300 +# Breaking up of compiletest runtest.rs +60600a6fa403216bfd66e04f948b1822f6450af7 diff --git a/.github/ISSUE_TEMPLATE/library_tracking_issue.md b/.github/ISSUE_TEMPLATE/library_tracking_issue.md index 934312662beb6..d56da9d5d025a 100644 --- a/.github/ISSUE_TEMPLATE/library_tracking_issue.md +++ b/.github/ISSUE_TEMPLATE/library_tracking_issue.md @@ -2,7 +2,7 @@ name: Library Tracking Issue about: A tracking issue for an unstable library feature. title: Tracking Issue for XXX -labels: C-tracking-issue, T-libs-api +labels: C-tracking-issue, T-libs-api, S-tracking-unimplemented --- +(Remember to update the `S-tracking-*` label when checking boxes.) + - [ ] Implementation: #... - [ ] Final comment period (FCP)[^1] - [ ] Stabilization PR diff --git a/.github/ISSUE_TEMPLATE/tracking_issue.md b/.github/ISSUE_TEMPLATE/tracking_issue.md index 3a9d8408b3c9d..aedc15a54c274 100644 --- a/.github/ISSUE_TEMPLATE/tracking_issue.md +++ b/.github/ISSUE_TEMPLATE/tracking_issue.md @@ -41,7 +41,10 @@ for larger features an implementation could be broken up into multiple PRs. - [ ] Implement the RFC (cc @rust-lang/XXX -- can anyone write up mentoring instructions?) - [ ] Adjust documentation ([see instructions on rustc-dev-guide][doc-guide]) -- [ ] Formatting for new syntax has been added to the [Style Guide] ([nightly-style-procedure]) +- [ ] Style updates for any new syntax ([nightly-style-procedure]) + - [ ] Style team decision on new formatting + - [ ] Formatting for new syntax has been added to the [Style Guide] + - [ ] (non-blocking) Formatting has been implemented in `rustfmt` - [ ] Stabilization PR ([see instructions on rustc-dev-guide][stabilization-guide]) [stabilization-guide]: https://rustc-dev-guide.rust-lang.org/stabilization_guide.html#stabilization-pr diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 51dd0f81ed147..93316b9cff7b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -53,6 +53,13 @@ jobs: steps: - name: Checkout the source code uses: actions/checkout@v4 + # Cache citool to make its build faster, as it's in the critical path. + # The rust-cache doesn't bleed into the main `job`, so it should not affect any other + # Rust compilation. + - name: Cache citool + uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + with: + workspaces: src/ci/citool - name: Calculate the CI job matrix env: COMMIT_MESSAGE: ${{ github.event.head_commit.message }} @@ -84,6 +91,17 @@ jobs: # Check the `calculate_matrix` job to see how is the matrix defined. include: ${{ fromJSON(needs.calculate_matrix.outputs.jobs) }} steps: + - name: Install cargo in AWS CodeBuild + if: matrix.codebuild + run: | + # Check if cargo is installed + if ! command -v cargo &> /dev/null; then + echo "Cargo not found, installing Rust..." + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile=minimal + # Make cargo available in PATH + echo "$HOME/.cargo/bin" >> $GITHUB_PATH + fi + - name: disable git crlf conversion run: git config --global core.autocrlf false @@ -118,9 +136,6 @@ jobs: # which then uses log commands to actually set them. EXTRA_VARIABLES: ${{ toJson(matrix.env) }} - - name: setup upstream remote - run: src/ci/scripts/setup-upstream-remote.sh - - name: ensure the channel matches the target branch run: src/ci/scripts/verify-channel.sh @@ -161,6 +176,8 @@ jobs: run: src/ci/scripts/install-ninja.sh - name: enable ipv6 on Docker + # Don't run on codebuild because systemctl is not available + if: ${{ !matrix.codebuild }} run: src/ci/scripts/enable-docker-ipv6.sh # Disable automatic line ending conversion (again). On Windows, when we're diff --git a/.github/workflows/post-merge.yml b/.github/workflows/post-merge.yml index 94553608a2f48..ca088ba31fdf9 100644 --- a/.github/workflows/post-merge.yml +++ b/.github/workflows/post-merge.yml @@ -25,12 +25,19 @@ jobs: env: GH_TOKEN: ${{ github.token }} run: | + # Give GitHub some time to propagate the information that the PR was merged + sleep 60 + # Get closest bors merge commit PARENT_COMMIT=`git rev-list --author='bors ' -n1 --first-parent HEAD^1` echo "Parent: ${PARENT_COMMIT}" # Find PR for the current commit HEAD_PR=`gh pr list --search "${{ github.sha }}" --state merged --json number --jq '.[0].number'` + if [ -z "${HEAD_PR}" ]; then + echo "PR for commit SHA ${{ github.sha }} not found, exiting" + exit 1 + fi echo "HEAD: ${{ github.sha }} (#${HEAD_PR})" cd src/ci/citool diff --git a/.gitmodules b/.gitmodules index 97a0c0c54cf9f..d09d81ccadcb0 100644 --- a/.gitmodules +++ b/.gitmodules @@ -45,7 +45,7 @@ shallow = true [submodule "src/tools/enzyme"] path = src/tools/enzyme - url = https://github.com/EnzymeAD/Enzyme.git + url = https://github.com/rust-lang/Enzyme.git shallow = true [submodule "src/gcc"] path = src/gcc diff --git a/.mailmap b/.mailmap index a791daa681d47..c3ce111bfe3b4 100644 --- a/.mailmap +++ b/.mailmap @@ -276,7 +276,7 @@ Jacob Greenfield Jacob Pratt Jacob Pratt Jake Goulding -Jake Goulding +Jake Goulding Jake Goulding Jake Vossen Jakob Degen @@ -292,6 +292,7 @@ James Hinshelwood James Miller James Perry James Sanderson +Jamie Hill-Daniel Jana Dönszelmann Jana Dönszelmann Jana Dönszelmann @@ -408,10 +409,13 @@ Luqman Aden Luqman Aden Lzu Tao Maik Klein +Maja Kądziołka +Maja Kądziołka Malo Jaffré Manish Goregaokar Mara Bos Marcell Pardavi +Marco Ieni <11428655+MarcoIeni@users.noreply.github.com> Marcus Klaas de Vries Margaret Meyerhofer Mark Mansi @@ -565,6 +569,9 @@ Robert Habermeier Robert Millar Roc Yu Rohit Joshi Rohit Joshi +Ross Smyth <18294397+RossSmyth@users.noreply.github.com> +Ross Smyth <18294397+RossSmyth@users.noreply.github.com> +Ross Smyth <18294397+RossSmyth@users.noreply.github.com> Roxane Fruytier Rui Russell Johnston diff --git a/Cargo.lock b/Cargo.lock index a5d8d6f86d77d..fa0fa33ea75ac 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -158,12 +158,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" -dependencies = [ - "backtrace", -] +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" [[package]] name = "ar_archive_writer" @@ -186,6 +183,48 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +[[package]] +name = "askama" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4744ed2eef2645831b441d8f5459689ade2ab27c854488fbab1fbe94fce1a7" +dependencies = [ + "askama_derive", + "itoa", + "percent-encoding", + "serde", + "serde_json", +] + +[[package]] +name = "askama_derive" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d661e0f57be36a5c14c48f78d09011e67e0cb618f269cca9f2fd8d15b68c46ac" +dependencies = [ + "askama_parser", + "basic-toml", + "memchr", + "proc-macro2", + "quote", + "rustc-hash 2.1.1", + "serde", + "serde_derive", + "syn 2.0.101", +] + +[[package]] +name = "askama_parser" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf315ce6524c857bb129ff794935cf6d42c82a6cff60526fe2a63593de4d0d4f" +dependencies = [ + "memchr", + "serde", + "serde_derive", + "winnow 0.7.9", +] + [[package]] name = "autocfg" version = "1.4.0" @@ -215,9 +254,9 @@ checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "basic-toml" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "823388e228f614e9558c6804262db37960ec8821856535f5c3f59913140558f8" +checksum = "ba62675e8242a4c4e806d12f11d136e626e6c8361d6b829310732241652a178a" dependencies = [ "serde", ] @@ -233,15 +272,15 @@ dependencies = [ [[package]] name = "bitflags" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" [[package]] name = "blake3" -version = "1.5.5" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" +checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0" dependencies = [ "arrayref", "arrayvec", @@ -261,9 +300,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.11.3" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" +checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4" dependencies = [ "memchr", "regex-automata 0.4.9", @@ -308,9 +347,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "bytecount" @@ -318,17 +357,11 @@ version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "camino" @@ -343,10 +376,10 @@ dependencies = [ name = "cargo-miri" version = "0.1.0" dependencies = [ - "cargo_metadata 0.18.1", + "cargo_metadata 0.19.2", "directories", "rustc-build-sysroot", - "rustc_tools_util 0.4.0", + "rustc_tools_util 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version", "serde", "serde_json", @@ -377,16 +410,16 @@ dependencies = [ [[package]] name = "cargo_metadata" -version = "0.19.1" +version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8769706aad5d996120af43197bf46ef6ad0fda35216b4505f926a365a232d924" +checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" dependencies = [ "camino", "cargo-platform", "semver", "serde", "serde_json", - "thiserror 2.0.11", + "thiserror 2.0.12", ] [[package]] @@ -416,22 +449,22 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.39" +version = "0.4.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", "serde", - "windows-targets 0.52.6", + "windows-link", ] [[package]] name = "chrono-tz" -version = "0.10.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c6ac4f2c0bf0f44e9161aec9675e1050aa4a530663c4a9e37e108fa948bca9f" +checksum = "efdce149c370f133a071ca8ef6ea340b7b88748ab0810097a9e2976eaa34b4f3" dependencies = [ "chrono", "chrono-tz-build", @@ -440,9 +473,9 @@ dependencies = [ [[package]] name = "chrono-tz-build" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94fea34d77a245229e7746bd2beb786cd2a896f306ff491fb8cecb3074b10a7" +checksum = "8f10f8c9340e31fc120ff885fcdb54a0b48e474bbd77cab557f0c30a3e569402" dependencies = [ "parse-zoneinfo", "phf_codegen", @@ -460,9 +493,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.26" +version = "4.5.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8eb5e908ef3a6efbe1ed62520fb7287959888c88485abe072543190ecc66783" +checksum = "eccb054f56cbd38340b380d4a8e69ef1f02f1af43db2f0cc817a4774d80ae071" dependencies = [ "clap_builder", "clap_derive", @@ -480,9 +513,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.26" +version = "4.5.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96b01801b5fc6a0a232407abc821660c9c6d25a1cafc0d4f85f29fb8d9afc121" +checksum = "efd9466fac8543255d3b1fcad4762c5e116ffe808c8a3043d4263cd4fd4862a2" dependencies = [ "anstream", "anstyle", @@ -492,14 +525,14 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.24" +version = "4.5.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" +checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -510,12 +543,14 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "clippy" -version = "0.1.87" +version = "0.1.88" dependencies = [ "anstream", + "askama", "cargo_metadata 0.18.1", "clippy_config", "clippy_lints", + "clippy_lints_internal", "clippy_utils", "color-print", "filetime", @@ -523,14 +558,13 @@ dependencies = [ "if_chain", "itertools", "parking_lot", - "pulldown-cmark 0.11.3", + "pulldown-cmark", "quote", "regex", - "rinja", "rustc_tools_util 0.4.2", "serde", "serde_json", - "syn 2.0.96", + "syn 2.0.101", "tempfile", "termize", "tokio", @@ -541,7 +575,7 @@ dependencies = [ [[package]] name = "clippy_config" -version = "0.1.87" +version = "0.1.88" dependencies = [ "clippy_utils", "itertools", @@ -566,7 +600,7 @@ dependencies = [ [[package]] name = "clippy_lints" -version = "0.1.87" +version = "0.1.88" dependencies = [ "arrayvec", "cargo_metadata 0.18.1", @@ -574,12 +608,9 @@ dependencies = [ "clippy_utils", "itertools", "quine-mc_cluskey", - "regex", "regex-syntax 0.8.5", "semver", "serde", - "serde_json", - "tempfile", "toml 0.7.8", "unicode-normalization", "unicode-script", @@ -587,9 +618,19 @@ dependencies = [ "walkdir", ] +[[package]] +name = "clippy_lints_internal" +version = "0.0.1" +dependencies = [ + "clippy_config", + "clippy_utils", + "regex", + "rustc-semver", +] + [[package]] name = "clippy_utils" -version = "0.1.87" +version = "0.1.88" dependencies = [ "arrayvec", "itertools", @@ -609,16 +650,16 @@ dependencies = [ [[package]] name = "color-eyre" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5" +checksum = "e6e1761c0e16f8883bbbb8ce5990867f4f06bf11a0253da6495a04ce4b6ef0ec" dependencies = [ "backtrace", "color-spantrace", "eyre", "indenter", "once_cell", - "owo-colors", + "owo-colors 4.2.0", "tracing-error", ] @@ -640,17 +681,17 @@ dependencies = [ "nom", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] name = "color-spantrace" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" +checksum = "2ddd8d5bfda1e11a501d0a7303f3bfed9aa632ebdb859be40d0fd70478ed70d5" dependencies = [ "once_cell", - "owo-colors", + "owo-colors 4.2.0", "tracing-core", "tracing-error", ] @@ -682,8 +723,8 @@ name = "compiletest" version = "0.0.0" dependencies = [ "anstyle-svg", - "anyhow", "build_helper", + "camino", "colored", "diff", "getopts", @@ -693,6 +734,7 @@ dependencies = [ "libc", "miow", "miropt-test-tools", + "rayon", "regex", "rustfix", "semver", @@ -707,9 +749,9 @@ dependencies = [ [[package]] name = "console" -version = "0.15.10" +version = "0.15.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b" +checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" dependencies = [ "encode_unicode", "libc", @@ -735,6 +777,7 @@ name = "coverage-dump" version = "0.1.0" dependencies = [ "anyhow", + "itertools", "leb128", "md-5", "miniz_oxide 0.7.4", @@ -744,9 +787,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] @@ -762,9 +805,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.14" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" dependencies = [ "crossbeam-utils", ] @@ -806,9 +849,9 @@ dependencies = [ [[package]] name = "ctrlc" -version = "3.4.5" +version = "3.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3" +checksum = "697b5419f348fd5ae2478e8018cb016c00a5881c7f46c717de98ffd135a5651c" dependencies = [ "nix", "windows-sys 0.59.0", @@ -831,9 +874,9 @@ dependencies = [ [[package]] name = "curl-sys" -version = "0.4.78+curl-8.11.0" +version = "0.4.80+curl-8.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eec768341c5c7789611ae51cf6c459099f22e64a5d5d0ce4892434e33821eaf" +checksum = "55f7df2eac63200c3ab25bde3b2268ef2ee56af3d238e76d61f01c3c49bff734" dependencies = [ "cc", "libc", @@ -846,9 +889,9 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ "darling_core", "darling_macro", @@ -856,27 +899,27 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] name = "darling_macro" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -896,24 +939,15 @@ dependencies = [ "winapi", ] -[[package]] -name = "deranged" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" -dependencies = [ - "powerfmt", -] - [[package]] name = "derive-where" -version = "1.2.7" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62d671cc41a825ebabc75757b62d3d168c577f9149b2d49ece1dad1f72119d25" +checksum = "e73f2692d4bd3cac41dca28934a39894200c9fabf49586d77d0e5954af1d7902" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -934,7 +968,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -944,7 +978,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -956,7 +990,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -977,11 +1011,11 @@ dependencies = [ [[package]] name = "directories" -version = "5.0.1" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" +checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" dependencies = [ - "dirs-sys", + "dirs-sys 0.5.0", ] [[package]] @@ -990,7 +1024,7 @@ version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" dependencies = [ - "dirs-sys", + "dirs-sys 0.4.1", ] [[package]] @@ -1011,10 +1045,22 @@ checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" dependencies = [ "libc", "option-ext", - "redox_users", + "redox_users 0.4.6", "windows-sys 0.48.0", ] +[[package]] +name = "dirs-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" +dependencies = [ + "libc", + "option-ext", + "redox_users 0.5.0", + "windows-sys 0.59.0", +] + [[package]] name = "dirs-sys-next" version = "0.1.2" @@ -1022,7 +1068,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", - "redox_users", + "redox_users 0.4.6", "winapi", ] @@ -1034,26 +1080,26 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] name = "dissimilar" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59f8e79d1fbf76bdfbde321e902714bf6c49df88a7dda6fc682fc2979226962d" +checksum = "8975ffdaa0ef3661bfe02dbdcc06c9f829dfafe6a3c474de366a8d5e44276921" [[package]] name = "either" -version = "1.13.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "elsa" -version = "1.11.0" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2343daaeabe09879d4ea058bb4f1e63da3fc07dadc6634e01bda1b3d6a9d9d2b" +checksum = "9abf33c656a7256451ebb7d0082c5a471820c31269e49d807c538c252352186e" dependencies = [ "stable_deref_trait", ] @@ -1085,28 +1131,28 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.6" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcaee3d8e3cfc3fd92428d477bc97fc29ec8716d180c0d74c643bb26166660e0" +checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f" dependencies = [ "anstream", "anstyle", "env_filter", - "humantime", + "jiff", "log", ] [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" dependencies = [ "libc", "windows-sys 0.59.0", @@ -1169,12 +1215,12 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.35" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" dependencies = [ "crc32fast", - "miniz_oxide 0.8.3", + "miniz_oxide 0.8.8", ] [[package]] @@ -1219,9 +1265,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "form_urlencoded" @@ -1313,7 +1359,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -1351,8 +1397,8 @@ name = "generate-copyright" version = "0.1.0" dependencies = [ "anyhow", + "askama", "cargo_metadata 0.18.1", - "rinja", "serde", "serde_json", "thiserror 1.0.69", @@ -1386,9 +1432,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "libc", @@ -1397,14 +1443,14 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" dependencies = [ "cfg-if", "libc", - "wasi 0.13.3+wasi-0.2.2", - "windows-targets 0.52.6", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", ] [[package]] @@ -1432,9 +1478,9 @@ checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "globset" -version = "0.4.15" +version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19" +checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5" dependencies = [ "aho-corasick", "bstr", @@ -1454,9 +1500,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.2" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" dependencies = [ "allocator-api2", "equivalent", @@ -1516,9 +1562,9 @@ dependencies = [ [[package]] name = "html5ever" -version = "0.29.2" +version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b958f80f0fde8601dc6c08685adc743eecaa046181cebd5a57551468dfc2ddc" +checksum = "3b7410cae13cbc75623c98ac4cbfd1f0bedddf3227afc24f370cf0f50a44a11c" dependencies = [ "log", "mac", @@ -1537,22 +1583,23 @@ dependencies = [ [[package]] name = "humantime" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +checksum = "9b112acc8b3adf4b107a8ec20977da0273a8c386765a3ec0229bd500a1443f9f" [[package]] name = "iana-time-zone" -version = "0.1.61" +version = "0.1.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", + "log", "wasm-bindgen", - "windows-core 0.52.0", + "windows-core 0.61.0", ] [[package]] @@ -1592,9 +1639,9 @@ dependencies = [ [[package]] name = "icu_list_data" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1825170d2c6679cb20dbd96a589d034e49f698aed9a2ef4fafc9a0101ed298f" +checksum = "52b1a7fbdbf3958f1be8354cb59ac73f165b7b7082d447ff2090355c9a069120" [[package]] name = "icu_locid" @@ -1625,9 +1672,9 @@ dependencies = [ [[package]] name = "icu_locid_transform_data" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" +checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d" [[package]] name = "icu_normalizer" @@ -1649,9 +1696,9 @@ dependencies = [ [[package]] name = "icu_normalizer_data" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" +checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7" [[package]] name = "icu_properties" @@ -1670,9 +1717,9 @@ dependencies = [ [[package]] name = "icu_properties_data" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" +checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2" [[package]] name = "icu_provider" @@ -1712,7 +1759,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -1778,9 +1825,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] name = "indexmap" -version = "2.7.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" dependencies = [ "equivalent", "hashbrown", @@ -1789,9 +1836,9 @@ dependencies = [ [[package]] name = "indicatif" -version = "0.17.9" +version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf675b85ed934d3c67b5c5469701eec7db22689d0a2139d856e0925fa28b281" +checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" dependencies = [ "console", "number_prefix", @@ -1808,9 +1855,9 @@ checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306" [[package]] name = "inout" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" dependencies = [ "generic-array", ] @@ -1864,16 +1911,41 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.14" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "jiff" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d07d8d955d798e7a4d6f9c58cd1f1916e790b42b092758a9ef6e16fef9f1b3fd" +dependencies = [ + "jiff-static", + "log", + "portable-atomic", + "portable-atomic-util", + "serde", +] + +[[package]] +name = "jiff-static" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" +checksum = "f244cfe006d98d26f859c7abd1318d85327e1882dc9cef80f62daeeb0adcf300" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", +] [[package]] name = "jobserver" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" dependencies = [ + "getrandom 0.3.2", "libc", ] @@ -1914,15 +1986,15 @@ dependencies = [ [[package]] name = "jsonpath-rust" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b0231bb404a6cd6c8f0ab41b907049063a089fc02aa7636cc5cd9a4d87364c9" +checksum = "6a37c2c87b8d16e788ce359660fead0ea5f4ed29ff400d55be74a4e01d1817d9" dependencies = [ "pest", "pest_derive", "regex", "serde_json", - "thiserror 2.0.11", + "thiserror 2.0.12", ] [[package]] @@ -1937,6 +2009,12 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + [[package]] name = "levenshtein" version = "1.0.5" @@ -1945,15 +2023,15 @@ checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" [[package]] name = "lexopt" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baff4b617f7df3d896f97fe922b64817f6cd9a756bb81d40f8883f2f66dcb401" +checksum = "9fa0e2a1fcbe2f6be6c42e342259976206b383122fc152e872795338b5a3f3a7" [[package]] name = "libc" -version = "0.2.169" +version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" [[package]] name = "libdbus-sys" @@ -1967,9 +2045,9 @@ dependencies = [ [[package]] name = "libffi" -version = "3.2.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce826c243048e3d5cec441799724de52e2d42f820468431fc3fceee2341871e2" +checksum = "4a9434b6fc77375fb624698d5f8c49d7e80b10d59eb1219afda27d1f824d4074" dependencies = [ "libc", "libffi-sys", @@ -1977,9 +2055,9 @@ dependencies = [ [[package]] name = "libffi-sys" -version = "2.3.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36115160c57e8529781b4183c2bb51fdc1f6d6d1ed345591d84be7703befb3c" +checksum = "ead36a2496acfc8edd6cc32352110e9478ac5b9b5f5b9856ebd3d28019addb84" dependencies = [ "cc", ] @@ -1991,14 +2069,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] name = "libm" -version = "0.2.11" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" +checksum = "c9627da5196e5d8ed0b0495e61e518847578da83483c37288316d9b2e03a7f72" [[package]] name = "libredox" @@ -2013,9 +2091,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.21" +version = "1.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df9b68e50e6e0b26f672573834882eb57759f6db9b3be2ea3c35c91188bb4eaa" +checksum = "8b70e7a7df205e92a1a4cd9aaae7898dac0aa555503cc0a649494d0d60e7651d" dependencies = [ "cc", "libc", @@ -2042,15 +2120,15 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.15" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "litemap" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" +checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" [[package]] name = "lld-wrapper" @@ -2079,9 +2157,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.25" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "lzma-sys" @@ -2102,9 +2180,9 @@ checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" [[package]] name = "markup5ever" -version = "0.15.0" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a7b81dfb91586d0677086d40a6d755070e0799b71bb897485bac408dfd5c69" +checksum = "c7a7213d12e1864c0f002f52c2923d4556935a43dec5e71355c2760e0f6e7a18" dependencies = [ "log", "phf", @@ -2122,7 +2200,7 @@ checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -2144,20 +2222,6 @@ dependencies = [ "digest", ] -[[package]] -name = "measureme" -version = "11.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfa4a40f09af7aa6faef38285402a78847d0d72bf8827006cd2a332e1e6e4a8d" -dependencies = [ - "log", - "memmap2", - "parking_lot", - "perf-event-open-sys", - "rustc-hash 1.1.0", - "smallvec", -] - [[package]] name = "measureme" version = "12.0.1" @@ -2187,22 +2251,6 @@ dependencies = [ "libc", ] -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - -[[package]] -name = "mime_guess" -version = "2.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" -dependencies = [ - "mime", - "unicase", -] - [[package]] name = "minifier" version = "0.3.5" @@ -2226,9 +2274,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.8.3" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" +checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" dependencies = [ "adler2", ] @@ -2247,23 +2295,24 @@ name = "miri" version = "0.1.0" dependencies = [ "aes", + "bitflags", "chrono", "chrono-tz", "colored", "directories", - "getrandom 0.3.1", + "getrandom 0.3.2", "libc", "libffi", "libloading", - "measureme 11.0.1", - "rand 0.9.0", + "measureme", + "rand 0.9.1", "regex", "rustc_version", "smallvec", "tempfile", "tikv-jemalloc-sys", "ui_test", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2359,12 +2408,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - [[package]] name = "num-integer" version = "0.1.46" @@ -2421,6 +2464,25 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" +[[package]] +name = "objc2-core-foundation" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166" +dependencies = [ + "bitflags", +] + +[[package]] +name = "objc2-io-kit" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71c1c64d6120e51cd86033f67176b1cb66780c2efe34dec55176f77befd93c0a" +dependencies = [ + "libc", + "objc2-core-foundation", +] + [[package]] name = "object" version = "0.32.2" @@ -2442,7 +2504,7 @@ dependencies = [ "indexmap", "memchr", "ruzstd", - "wasmparser 0.222.0", + "wasmparser 0.222.1", ] [[package]] @@ -2456,9 +2518,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.20.2" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "opener" @@ -2474,15 +2536,15 @@ dependencies = [ [[package]] name = "openssl-probe" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" -version = "0.9.104" +version = "0.9.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" +checksum = "e145e1651e858e820e4860f7b9c5e169bc1d8ce1c86043be79fa7b7634821847" dependencies = [ "cc", "libc", @@ -2505,7 +2567,6 @@ dependencies = [ "humansize", "humantime", "log", - "serde", "serde_json", "sysinfo", "tabled", @@ -2520,16 +2581,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" -[[package]] -name = "os_pipe" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982" -dependencies = [ - "libc", - "windows-sys 0.59.0", -] - [[package]] name = "overload" version = "0.1.1" @@ -2542,6 +2593,12 @@ version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" +[[package]] +name = "owo-colors" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1036865bb9422d3300cf723f657c2851d0e9ab12567854b1f4eba3d77decf564" + [[package]] name = "pad" version = "0.1.6" @@ -2617,20 +2674,20 @@ dependencies = [ [[package]] name = "pest" -version = "2.7.15" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" +checksum = "198db74531d58c70a361c42201efde7e2591e976d518caf7662a47dc5720e7b6" dependencies = [ "memchr", - "thiserror 2.0.11", + "thiserror 2.0.12", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.15" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e" +checksum = "d725d9cfd79e87dccc9341a2ef39d1b6f6353d68c4b33c177febbe1a402c97c5" dependencies = [ "pest", "pest_generator", @@ -2638,22 +2695,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.15" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b" +checksum = "db7d01726be8ab66ab32f9df467ae8b1148906685bbe75c82d1e65d7f5b3f841" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] name = "pest_meta" -version = "2.7.15" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea" +checksum = "7f9f832470494906d1fca5329f8ab5791cc60beb230c74815dff541cbd2b5ca0" dependencies = [ "once_cell", "pest", @@ -2712,9 +2769,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "polonius-engine" @@ -2729,23 +2786,26 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" +checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" [[package]] -name = "powerfmt" -version = "0.2.0" +name = "portable-atomic-util" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" +checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" +dependencies = [ + "portable-atomic", +] [[package]] name = "ppv-lite86" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ - "zerocopy 0.7.35", + "zerocopy", ] [[package]] @@ -2760,7 +2820,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "abec3fb083c10660b3854367697da94c674e9e82aa7511014dc958beeb7215e9" dependencies = [ - "owo-colors", + "owo-colors 3.5.0", "pad", ] @@ -2772,33 +2832,22 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.93" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] [[package]] name = "psm" -version = "0.1.25" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f58e5423e24c18cc840e1c98370b3993c6649cd1678b4d24318bcf0a083cbe88" +checksum = "6e944464ec8536cd1beb0bbfd96987eb5e3b72f2ecdafdc5c769a37f1fa2ae1f" dependencies = [ "cc", ] -[[package]] -name = "pulldown-cmark" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" -dependencies = [ - "bitflags", - "memchr", - "unicase", -] - [[package]] name = "pulldown-cmark" version = "0.11.3" @@ -2831,13 +2880,19 @@ checksum = "07589615d719a60c8dd8a4622e7946465dfef20d1a428f969e3443e7386d5f45" [[package]] name = "quote" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "rand" version = "0.8.5" @@ -2851,13 +2906,12 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" +checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" dependencies = [ "rand_chacha 0.9.0", - "rand_core 0.9.0", - "zerocopy 0.8.14", + "rand_core 0.9.3", ] [[package]] @@ -2877,7 +2931,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", - "rand_core 0.9.0", + "rand_core 0.9.3", ] [[package]] @@ -2886,17 +2940,16 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", ] [[package]] name = "rand_core" -version = "0.9.0" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b08f3c9802962f7e1b25113931d94f43ed9725bebc59db9d0c3e9a23b67e15ff" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.3.1", - "zerocopy 0.8.14", + "getrandom 0.3.2", ] [[package]] @@ -2905,7 +2958,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f703f4665700daf5512dcca5f43afa6af89f09db47fb56be587f80636bda2d41" dependencies = [ - "rand_core 0.9.0", + "rand_core 0.9.3", ] [[package]] @@ -2930,9 +2983,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.8" +version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af" dependencies = [ "bitflags", ] @@ -2943,11 +2996,22 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "libredox", "thiserror 1.0.69", ] +[[package]] +name = "redox_users" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" +dependencies = [ + "getrandom 0.2.16", + "libredox", + "thiserror 2.0.12", +] + [[package]] name = "regex" version = "1.11.1" @@ -3023,47 +3087,6 @@ dependencies = [ "walkdir", ] -[[package]] -name = "rinja" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dc4940d00595430b3d7d5a01f6222b5e5b51395d1120bdb28d854bb8abb17a5" -dependencies = [ - "humansize", - "itoa", - "percent-encoding", - "rinja_derive", -] - -[[package]] -name = "rinja_derive" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d9ed0146aef6e2825f1b1515f074510549efba38d71f4554eec32eb36ba18b" -dependencies = [ - "basic-toml", - "memchr", - "mime", - "mime_guess", - "proc-macro2", - "quote", - "rinja_parser", - "rustc-hash 2.1.1", - "serde", - "syn 2.0.96", -] - -[[package]] -name = "rinja_parser" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f9a866e2e00a7a1fb27e46e9e324a6f7c0e7edc4543cae1d38f4e4a100c610" -dependencies = [ - "memchr", - "nom", - "serde", -] - [[package]] name = "run_make_support" version = "0.2.0" @@ -3073,11 +3096,10 @@ dependencies = [ "gimli 0.31.1", "libc", "object 0.36.7", - "os_pipe", "regex", "serde_json", "similar", - "wasmparser 0.219.1", + "wasmparser 0.219.2", ] [[package]] @@ -3110,6 +3132,12 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +[[package]] +name = "rustc-literal-escaper" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04" + [[package]] name = "rustc-main" version = "0.0.0" @@ -3123,33 +3151,26 @@ dependencies = [ ] [[package]] -name = "rustc-rayon" +name = "rustc-rayon-core" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cd9fb077db982d7ceb42a90471e5a69a990b58f71e06f0d8340bb2cf35eb751" +checksum = "2f42932dcd3bcbe484b38a3ccf79b7906fac41c02d408b5b1bac26da3416efdb" dependencies = [ - "either", - "indexmap", - "rustc-rayon-core", + "crossbeam-deque", + "crossbeam-utils", ] [[package]] -name = "rustc-rayon-core" -version = "0.5.0" +name = "rustc-semver" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67668daaf00e359c126f6dcb40d652d89b458a008c8afa727a42a2d20fca0b7f" -dependencies = [ - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-utils", - "num_cpus", -] +checksum = "5be1bdc7edf596692617627bbfeaba522131b18e06ca4df2b6b689e3c5d5ce84" [[package]] name = "rustc-stable-hash" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2febf9acc5ee5e99d1ad0afcdbccc02d87aa3f857a1f01f825b80eacf8edfcd1" +checksum = "781442f29170c5c93b7185ad559492601acdc71d5bb0706f5868094f45cfcd08" [[package]] name = "rustc-std-workspace-alloc" @@ -3168,7 +3189,7 @@ name = "rustc_abi" version = "0.0.0" dependencies = [ "bitflags", - "rand 0.9.0", + "rand 0.9.1", "rand_xoshiro", "rustc_data_structures", "rustc_hashes", @@ -3202,10 +3223,10 @@ version = "0.0.0" dependencies = [ "bitflags", "memchr", + "rustc-literal-escaper", "rustc_ast_ir", "rustc_data_structures", "rustc_index", - "rustc_lexer", "rustc_macros", "rustc_serialize", "rustc_span", @@ -3384,7 +3405,7 @@ dependencies = [ "gimli 0.31.1", "itertools", "libc", - "measureme 12.0.1", + "measureme", "object 0.36.7", "rustc-demangle", "rustc_abi", @@ -3457,7 +3478,7 @@ dependencies = [ "thin-vec", "thorin-dwp", "tracing", - "wasm-encoder 0.219.1", + "wasm-encoder 0.219.2", "windows 0.59.0", ] @@ -3499,12 +3520,12 @@ dependencies = [ "indexmap", "jobserver", "libc", - "measureme 12.0.1", + "measureme", "memmap2", "parking_lot", "portable-atomic", "rustc-hash 2.1.1", - "rustc-rayon", + "rustc-rayon-core", "rustc-stable-hash", "rustc_arena", "rustc_graphviz", @@ -3532,6 +3553,7 @@ name = "rustc_driver_impl" version = "0.0.0" dependencies = [ "ctrlc", + "jiff", "libc", "rustc_abi", "rustc_ast", @@ -3571,14 +3593,13 @@ dependencies = [ "rustc_query_system", "rustc_resolve", "rustc_session", - "rustc_smir", "rustc_span", "rustc_target", "rustc_trait_selection", "rustc_ty_utils", "serde_json", "shlex", - "time", + "stable_mir", "tracing", "windows 0.59.0", ] @@ -3682,13 +3703,16 @@ dependencies = [ "fluent-syntax", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", "unic-langid", ] [[package]] name = "rustc_fs_util" version = "0.0.0" +dependencies = [ + "tempfile", +] [[package]] name = "rustc_graphviz" @@ -3743,7 +3767,6 @@ dependencies = [ "rustc_middle", "rustc_session", "rustc_span", - "rustc_target", "rustc_trait_selection", "smallvec", "tracing", @@ -3782,6 +3805,7 @@ dependencies = [ "rustc_middle", "rustc_session", "rustc_span", + "rustc_target", "rustc_trait_selection", "smallvec", "tracing", @@ -3791,7 +3815,7 @@ dependencies = [ name = "rustc_incremental" version = "0.0.0" dependencies = [ - "rand 0.9.0", + "rand 0.9.1", "rustc_ast", "rustc_data_structures", "rustc_errors", @@ -3825,7 +3849,7 @@ version = "0.0.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -3850,7 +3874,6 @@ dependencies = [ name = "rustc_interface" version = "0.0.0" dependencies = [ - "rustc-rayon", "rustc-rayon-core", "rustc_abi", "rustc_ast", @@ -3972,7 +3995,7 @@ version = "0.0.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", "synstructure", ] @@ -4149,7 +4172,6 @@ dependencies = [ "rustc_data_structures", "rustc_index", "rustc_macros", - "rustc_serialize", "rustc_type_ir", "rustc_type_ir_macros", "tracing", @@ -4160,6 +4182,7 @@ name = "rustc_parse" version = "0.0.0" dependencies = [ "bitflags", + "rustc-literal-escaper", "rustc_ast", "rustc_ast_pretty", "rustc_data_structures", @@ -4182,6 +4205,7 @@ dependencies = [ name = "rustc_parse_format" version = "0.0.0" dependencies = [ + "rustc-literal-escaper", "rustc_index", "rustc_lexer", ] @@ -4257,7 +4281,7 @@ dependencies = [ name = "rustc_query_impl" version = "0.0.0" dependencies = [ - "measureme 12.0.1", + "measureme", "rustc_data_structures", "rustc_hashes", "rustc_hir", @@ -4301,7 +4325,7 @@ version = "0.0.0" dependencies = [ "bitflags", "itertools", - "pulldown-cmark 0.11.3", + "pulldown-cmark", "rustc_arena", "rustc_ast", "rustc_ast_pretty", @@ -4312,7 +4336,6 @@ dependencies = [ "rustc_feature", "rustc_fluent_macro", "rustc_hir", - "rustc_index", "rustc_macros", "rustc_metadata", "rustc_middle", @@ -4359,6 +4382,7 @@ dependencies = [ "bitflags", "getopts", "libc", + "rand 0.9.1", "rustc_abi", "rustc_ast", "rustc_data_structures", @@ -4392,7 +4416,7 @@ dependencies = [ "rustc_span", "rustc_target", "scoped-tls", - "stable_mir", + "serde", "tracing", ] @@ -4453,13 +4477,13 @@ dependencies = [ [[package]] name = "rustc_tools_util" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3316159ab19e19d1065ecc49278e87f767a9dae9fae80348d2b4d4fa4ae02d4d" +version = "0.4.2" [[package]] name = "rustc_tools_util" version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3b75158011a63889ba12084cf1224baad7bcad50f6ee7c842f772b74aa148ed" [[package]] name = "rustc_trait_selection" @@ -4468,7 +4492,6 @@ dependencies = [ "itertools", "rustc_abi", "rustc_ast", - "rustc_attr_parsing", "rustc_data_structures", "rustc_errors", "rustc_fluent_macro", @@ -4481,7 +4504,6 @@ dependencies = [ "rustc_session", "rustc_span", "rustc_transmute", - "rustc_type_ir", "smallvec", "thin-vec", "tracing", @@ -4510,6 +4532,7 @@ dependencies = [ "rustc_hir", "rustc_middle", "rustc_span", + "smallvec", "tracing", ] @@ -4541,6 +4564,7 @@ version = "0.0.0" dependencies = [ "bitflags", "derive-where", + "ena", "indexmap", "rustc-hash 1.1.0", "rustc_ast_ir", @@ -4561,7 +4585,7 @@ version = "0.0.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", "synstructure", ] @@ -4579,15 +4603,14 @@ name = "rustdoc" version = "0.0.0" dependencies = [ "arrayvec", + "askama", "base64", "expect-test", "indexmap", "itertools", "minifier", - "pulldown-cmark 0.9.6", "pulldown-cmark-escape", "regex", - "rinja", "rustdoc-json-types", "serde", "serde_json", @@ -4606,6 +4629,7 @@ name = "rustdoc-gui-test" version = "0.1.0" dependencies = [ "build_helper", + "camino", "compiletest", "getopts", "walkdir", @@ -4651,7 +4675,7 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -4685,9 +4709,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.43" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6" +checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" dependencies = [ "bitflags", "errno", @@ -4698,9 +4722,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" +checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" [[package]] name = "ruzstd" @@ -4713,9 +4737,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "same-file" @@ -4753,49 +4777,49 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e14e4d63b804dc0c7ec4a1e52bcb63f02c7ac94476755aa579edac21e01f915d" dependencies = [ - "self_cell 1.1.0", + "self_cell 1.2.0", ] [[package]] name = "self_cell" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2fdfc24bc566f839a2da4c4295b82db7d25a24253867d5c64355abb5799bdbe" +checksum = "0f7d95a54511e0c7be3f51e8867aa8cf35148d7b9445d44de2f943e2b206e749" [[package]] name = "semver" -version = "1.0.24" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.217" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.217" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] name = "serde_json" -version = "1.0.135" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b0d7ba2887406110130a978386c4e1befb98c674b4fba677954e4db976630d9" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "itoa", "memchr", @@ -4825,9 +4849,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", @@ -4857,9 +4881,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "similar" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" [[package]] name = "siphasher" @@ -4878,15 +4902,15 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.2" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" [[package]] name = "socket2" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" dependencies = [ "libc", "windows-sys 0.52.0", @@ -4902,15 +4926,6 @@ dependencies = [ "color-eyre", ] -[[package]] -name = "spdx" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58b69356da67e2fc1f542c71ea7e654a361a79c938e4424392ecf4fa065d2193" -dependencies = [ - "smallvec", -] - [[package]] name = "spdx-expression" version = "0.5.2" @@ -4950,15 +4965,14 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" name = "stable_mir" version = "0.1.0-preview" dependencies = [ - "scoped-tls", - "serde", + "rustc_smir", ] [[package]] name = "stacker" -version = "0.1.18" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d08feb8f695b465baed819b03c128dc23f57a694510ab1f06c77f763975685e" +checksum = "cddb07e32ddb770749da91081d8d0ac3a16f1a569a18b20348cd371f5dead06b" dependencies = [ "cc", "cfg-if", @@ -4975,9 +4989,9 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "string_cache" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938d512196766101d333398efde81bc1f37b00cb42c2f8350e5df639f040bbbe" +checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" dependencies = [ "new_debug_unreachable", "parking_lot", @@ -5044,9 +5058,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.96" +version = "2.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" dependencies = [ "proc-macro2", "quote", @@ -5055,24 +5069,25 @@ dependencies = [ [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] name = "sysinfo" -version = "0.31.4" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "355dbe4f8799b304b05e1b0f05fc59b2a18d36645cf169607da45bde2f69a1be" +checksum = "b897c8ea620e181c7955369a31be5f48d9a9121cb59fd33ecef9ff2a34323422" dependencies = [ - "core-foundation-sys", "libc", - "windows 0.57.0", + "objc2-core-foundation", + "objc2-io-kit", + "windows 0.61.1", ] [[package]] @@ -5087,9 +5102,9 @@ dependencies = [ [[package]] name = "tar" -version = "0.4.43" +version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c65998313f8e17d0d553d28f91a0df93e4dbbbf770279c7bc21ca0f09ea1a1f6" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" dependencies = [ "filetime", "libc", @@ -5098,13 +5113,12 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.15.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" +checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" dependencies = [ - "cfg-if", "fastrand", - "getrandom 0.2.15", + "getrandom 0.3.2", "once_cell", "rustix", "windows-sys 0.59.0", @@ -5157,16 +5171,16 @@ version = "0.1.0" dependencies = [ "indicatif", "num", - "rand 0.9.0", + "rand 0.9.1", "rand_chacha 0.9.0", "rayon", ] [[package]] name = "thin-vec" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b" +checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d" [[package]] name = "thiserror" @@ -5179,11 +5193,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.11" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" dependencies = [ - "thiserror-impl 2.0.11", + "thiserror-impl 2.0.12", ] [[package]] @@ -5194,18 +5208,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] name = "thiserror-impl" -version = "2.0.11" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -5244,7 +5258,7 @@ name = "tidy" version = "0.1.0" dependencies = [ "build_helper", - "cargo_metadata 0.19.1", + "cargo_metadata 0.19.2", "fluent-syntax", "ignore", "miropt-test-tools", @@ -5272,37 +5286,6 @@ dependencies = [ "libc", ] -[[package]] -name = "time" -version = "0.3.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" -dependencies = [ - "deranged", - "itoa", - "num-conv", - "powerfmt", - "serde", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" - -[[package]] -name = "time-macros" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" -dependencies = [ - "num-conv", - "time-core", -] - [[package]] name = "tinystr" version = "0.7.6" @@ -5315,9 +5298,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" +checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" dependencies = [ "tinyvec_macros", ] @@ -5330,9 +5313,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.43.0" +version = "1.44.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" +checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" dependencies = [ "backtrace", "bytes", @@ -5362,9 +5345,9 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" dependencies = [ "serde", ] @@ -5379,7 +5362,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] @@ -5402,7 +5385,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] @@ -5489,9 +5472,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "ucd-parse" @@ -5573,7 +5556,7 @@ checksum = "1ed7f4237ba393424195053097c1516bd4590dc82b84f2f97c5c69e12704555b" dependencies = [ "proc-macro-hack", "quote", - "syn 2.0.96", + "syn 2.0.101", "unic-langid-impl", ] @@ -5585,9 +5568,9 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.14" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-normalization" @@ -5711,11 +5694,11 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.12.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744018581f9a3454a9e15beb8a33b017183f1e7c0cd170232a2d1453b23a51c4" +checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.3.2", ] [[package]] @@ -5754,18 +5737,18 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi" -version = "0.13.3+wasi-0.2.2" +version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "wasi-preview1-component-adapter-provider" -version = "29.0.1" +version = "31.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcd9f21bbde82ba59e415a8725e6ad0d0d7e9e460b1a3ccbca5bdee952c1a324" +checksum = "86fabda09a0d89ffd1615b297b4a5d4b4d99df9598aeb24685837e63019e927b" [[package]] name = "wasm-bindgen" @@ -5789,7 +5772,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", "wasm-bindgen-shared", ] @@ -5811,7 +5794,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5827,9 +5810,9 @@ dependencies = [ [[package]] name = "wasm-component-ld" -version = "0.5.12" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "580305a8e3f1b7a79859a8db897de643533b2851c5eb080fe5800233f16dec88" +checksum = "a60a07a994a3538b57d8c5f8caba19f4793fb4c7156276e5e90e90acbb829e20" dependencies = [ "anyhow", "clap", @@ -5837,7 +5820,7 @@ dependencies = [ "libc", "tempfile", "wasi-preview1-component-adapter-provider", - "wasmparser 0.223.0", + "wasmparser 0.229.0", "wat", "windows-sys 0.59.0", "winsplit", @@ -5854,46 +5837,41 @@ dependencies = [ [[package]] name = "wasm-encoder" -version = "0.219.1" +version = "0.219.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29cbbd772edcb8e7d524a82ee8cef8dd046fc14033796a754c3ad246d019fa54" +checksum = "8aa79bcd666a043b58f5fa62b221b0b914dd901e6f620e8ab7371057a797f3e1" dependencies = [ "leb128", - "wasmparser 0.219.1", + "wasmparser 0.219.2", ] [[package]] name = "wasm-encoder" -version = "0.223.0" +version = "0.229.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e636076193fa68103e937ac951b5f2f587624097017d764b8984d9c0f149464" +checksum = "38ba1d491ecacb085a2552025c10a675a6fddcbd03b1fc9b36c536010ce265d2" dependencies = [ - "leb128", - "wasmparser 0.223.0", + "leb128fmt", + "wasmparser 0.229.0", ] [[package]] name = "wasm-metadata" -version = "0.223.0" +version = "0.229.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c730c3379d3d20e5a0245b0724b924483e853588ca8fba547c1e21f19e7d735" +checksum = "78fdb7d29a79191ab363dc90c1ddd3a1e880ffd5348d92d48482393a9e6c5f4d" dependencies = [ "anyhow", "indexmap", - "serde", - "serde_derive", - "serde_json", - "spdx", - "url", - "wasm-encoder 0.223.0", - "wasmparser 0.223.0", + "wasm-encoder 0.229.0", + "wasmparser 0.229.0", ] [[package]] name = "wasmparser" -version = "0.219.1" +version = "0.219.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c771866898879073c53b565a6c7b49953795159836714ac56a5befb581227c5" +checksum = "5220ee4c6ffcc0cb9d7c47398052203bc902c8ef3985b0c8134118440c0b2921" dependencies = [ "bitflags", "indexmap", @@ -5901,18 +5879,18 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.222.0" +version = "0.222.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4adf50fde1b1a49c1add6a80d47aea500c88db70551805853aa8b88f3ea27ab5" +checksum = "fa210fd1788e6b37a1d1930f3389c48e1d6ebd1a013d34fa4b7f9e3e3bf03146" dependencies = [ "bitflags", ] [[package]] name = "wasmparser" -version = "0.223.0" +version = "0.229.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5a99faceb1a5a84dd6084ec4bfa4b2ab153b5793b43fd8f58b89232634afc35" +checksum = "0cc3b1f053f5d41aa55640a1fa9b6d1b8a9e4418d118ce308d20e24ff3575a8c" dependencies = [ "bitflags", "hashbrown", @@ -5923,22 +5901,22 @@ dependencies = [ [[package]] name = "wast" -version = "223.0.0" +version = "229.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59b2ba8a2ff9f06194b7be9524f92e45e70149f4dacc0d0c7ad92b59ac875e4" +checksum = "63fcaff613c12225696bb163f79ca38ffb40e9300eff0ff4b8aa8b2f7eadf0d9" dependencies = [ "bumpalo", - "leb128", + "leb128fmt", "memchr", "unicode-width 0.2.0", - "wasm-encoder 0.223.0", + "wasm-encoder 0.229.0", ] [[package]] name = "wat" -version = "1.223.0" +version = "1.229.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "662786915c427e4918ff01eabb3c4756d4d947cd8f635761526b4cc9da2eaaad" +checksum = "4189bad08b70455a9e9e67dc126d2dcf91fac143a80f1046747a5dde6d4c33e0" dependencies = [ "wast", ] @@ -5986,76 +5964,81 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.57.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" +checksum = "7f919aee0a93304be7f62e8e5027811bbba96bcb1de84d6618be56e43f8a32a1" dependencies = [ - "windows-core 0.57.0", - "windows-targets 0.52.6", + "windows-core 0.59.0", + "windows-targets 0.53.0", ] [[package]] name = "windows" -version = "0.59.0" +version = "0.61.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f919aee0a93304be7f62e8e5027811bbba96bcb1de84d6618be56e43f8a32a1" +checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419" dependencies = [ - "windows-core 0.59.0", - "windows-targets 0.53.0", + "windows-collections", + "windows-core 0.61.0", + "windows-future", + "windows-link", + "windows-numerics", ] [[package]] name = "windows-bindgen" -version = "0.59.0" +version = "0.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7fb600834d7e868f6e5bb748a86101427330fafbf9485c331b9d5f562d54a5" +checksum = "ac1c59c20569610dd9ed784d5f003fb493ec57b4cf39d974eb03a84bb7156c90" dependencies = [ "rayon", + "serde", + "serde_json", ] [[package]] -name = "windows-core" -version = "0.52.0" +name = "windows-collections" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" dependencies = [ - "windows-targets 0.52.6", + "windows-core 0.61.0", ] [[package]] name = "windows-core" -version = "0.57.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" +checksum = "810ce18ed2112484b0d4e15d022e5f598113e220c53e373fb31e67e21670c1ce" dependencies = [ - "windows-implement 0.57.0", - "windows-interface 0.57.0", - "windows-result 0.1.2", - "windows-targets 0.52.6", + "windows-implement 0.59.0", + "windows-interface", + "windows-result", + "windows-strings 0.3.1", + "windows-targets 0.53.0", ] [[package]] name = "windows-core" -version = "0.59.0" +version = "0.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "810ce18ed2112484b0d4e15d022e5f598113e220c53e373fb31e67e21670c1ce" +checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" dependencies = [ - "windows-implement 0.59.0", - "windows-interface 0.59.0", - "windows-result 0.3.0", - "windows-strings", - "windows-targets 0.53.0", + "windows-implement 0.60.0", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings 0.4.0", ] [[package]] -name = "windows-implement" -version = "0.57.0" +name = "windows-future" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" +checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.96", + "windows-core 0.61.0", + "windows-link", ] [[package]] @@ -6066,56 +6049,72 @@ checksum = "83577b051e2f49a058c308f17f273b570a6a758386fc291b5f6a934dd84e48c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] -name = "windows-interface" -version = "0.57.0" +name = "windows-implement" +version = "0.60.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] name = "windows-interface" -version = "0.59.0" +version = "0.59.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb26fd936d991781ea39e87c3a27285081e3c0da5ca0fcbc02d368cc6f52ff01" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] -name = "windows-result" -version = "0.1.2" +name = "windows-link" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" +checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" + +[[package]] +name = "windows-numerics" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" dependencies = [ - "windows-targets 0.52.6", + "windows-core 0.61.0", + "windows-link", ] [[package]] name = "windows-result" -version = "0.3.0" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d08106ce80268c4067c0571ca55a9b4e9516518eaa1a1fe9b37ca403ae1d1a34" +checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" dependencies = [ - "windows-targets 0.53.0", + "windows-link", ] [[package]] name = "windows-strings" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b888f919960b42ea4e11c2f408fadb55f78a9f236d5eef084103c8ce52893491" +checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" dependencies = [ - "windows-targets 0.53.0", + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +dependencies = [ + "windows-link", ] [[package]] @@ -6339,6 +6338,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "winnow" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9fb597c990f03753e08d3c29efbfcf2019a003b4bf4ba19225c158e1549f0f3" +dependencies = [ + "memchr", +] + [[package]] name = "winsplit" version = "0.1.0" @@ -6347,18 +6355,18 @@ checksum = "3ab703352da6a72f35c39a533526393725640575bb211f61987a2748323ad956" [[package]] name = "wit-bindgen-rt" -version = "0.33.0" +version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ "bitflags", ] [[package]] name = "wit-component" -version = "0.223.0" +version = "0.229.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c10ed2aeee4c8ec5715875f62f4a3de3608d6987165c116810d8c2908aa9d93b" +checksum = "7f550067740e223bfe6c4878998e81cdbe2529dd9a793dc49248dd6613394e8b" dependencies = [ "anyhow", "bitflags", @@ -6367,17 +6375,17 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "wasm-encoder 0.223.0", + "wasm-encoder 0.229.0", "wasm-metadata", - "wasmparser 0.223.0", + "wasmparser 0.229.0", "wit-parser", ] [[package]] name = "wit-parser" -version = "0.223.0" +version = "0.229.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92772f4dcacb804b275981eea1d920b12b377993b53307f1e33d87404e080281" +checksum = "459c6ba62bf511d6b5f2a845a2a736822e38059c1cfa0b644b467bbbfae4efa6" dependencies = [ "anyhow", "id-arena", @@ -6388,7 +6396,7 @@ dependencies = [ "serde_derive", "serde_json", "unicode-xid", - "wasmparser 0.223.0", + "wasmparser 0.229.0", ] [[package]] @@ -6409,12 +6417,11 @@ version = "0.1.1" [[package]] name = "xattr" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e105d177a3871454f754b33bb0ee637ecaaac997446375fd3e5d43a2ed00c909" +checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e" dependencies = [ "libc", - "linux-raw-sys", "rustix", ] @@ -6456,69 +6463,48 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", "synstructure", ] [[package]] name = "zerocopy" -version = "0.7.35" +version = "0.8.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb" dependencies = [ - "byteorder", - "zerocopy-derive 0.7.35", -] - -[[package]] -name = "zerocopy" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a367f292d93d4eab890745e75a778da40909cab4d6ff8173693812f79c4a2468" -dependencies = [ - "zerocopy-derive 0.8.14", + "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.35" +version = "0.8.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3931cb58c62c13adec22e38686b559c86a30565e16ad6e8510a337cedc611e1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.96", + "syn 2.0.101", ] [[package]] name = "zerofrom" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", "synstructure", ] @@ -6541,5 +6527,5 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.101", ] diff --git a/INSTALL.md b/INSTALL.md index 30e08201d6dfb..98eb825cd10f6 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -75,8 +75,31 @@ See [the rustc-dev-guide for more info][sysllvm]. 2. Configure the build settings: + If you're unsure which build configurations to use and need a good default, you + can run the interactive `x.py setup` command. This will guide you through selecting + a config profile, setting up the LSP, configuring a Git hook, etc. + + With `configure` script, you can handle multiple configurations in a single + command which is useful to create complex/advanced config files. For example: + ```sh - ./configure + ./configure --build=aarch64-unknown-linux-gnu \ + --enable-full-tools \ + --enable-profiler \ + --enable-sanitizers \ + --enable-compiler-docs \ + --set target.aarch64-unknown-linux-gnu.linker=clang \ + --set target.aarch64-unknown-linux-gnu.ar=/rustroot/bin/llvm-ar \ + --set target.aarch64-unknown-linux-gnu.ranlib=/rustroot/bin/llvm-ranlib \ + --set llvm.link-shared=true \ + --set llvm.thin-lto=true \ + --set llvm.libzstd=true \ + --set llvm.ninja=false \ + --set rust.debug-assertions=false \ + --set rust.jemalloc \ + --set rust.use-lld=true \ + --set rust.lto=thin \ + --set rust.codegen-units=1 ``` If you plan to use `x.py install` to create an installation, you can either diff --git a/RELEASES.md b/RELEASES.md index 755e73a34c6bf..1a77c33b9957d 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,3 +1,292 @@ +Version 1.87.0 (2025-05-15) +========================== + + + +Language +-------- +- [Stabilize `asm_goto` feature](https://github.com/rust-lang/rust/pull/133870) +- [Allow parsing open beginning ranges (`..EXPR`) after unary operators `!`, `~`, `-`, and `*`}](https://github.com/rust-lang/rust/pull/134900). +- [Don't require method impls for methods with `Self: Sized` bounds in `impl`s for unsized types](https://github.com/rust-lang/rust/pull/135480) +- [Stabilize `feature(precise_capturing_in_traits)` allowing `use<...>` bounds on return position `impl Trait` in `trait`s](https://github.com/rust-lang/rust/pull/138128) + + + +Compiler +-------- +- [x86: make SSE2 required for i686 targets and use it to pass SIMD types](https://github.com/rust-lang/rust/pull/135408) + + + +Platform Support +---------------- +- [Remove `i586-pc-windows-msvc` target](https://github.com/rust-lang/rust/pull/137957) + +Refer to Rust's [platform support page][platform-support-doc] +for more information on Rust's tiered platform support. + +[platform-support-doc]: https://doc.rust-lang.org/rustc/platform-support.html + + + +Libraries +--------- +- [Stabilize the anonymous pipe API](https://github.com/rust-lang/rust/issues/127154) +- [Add support for unbounded left/right shift operations](https://github.com/rust-lang/rust/issues/129375) +- [Print pointer metadata in `Debug` impl of raw pointers](https://github.com/rust-lang/rust/pull/135080) +- [`Vec::with_capacity` guarantees it allocates with the amount requested, even if `Vec::capacity` returns a different number.](https://github.com/rust-lang/rust/pull/135933) +- Most `std::arch` intrinsics which don't take pointer arguments can now be called from safe code if the caller has the appropriate target features already enabled (https://github.com/rust-lang/stdarch/pull/1714, https://github.com/rust-lang/stdarch/pull/1716, https://github.com/rust-lang/stdarch/pull/1717) +- [Undeprecate `env::home_dir`](https://github.com/rust-lang/rust/pull/137327) +- [Denote `ControlFlow` as `#[must_use]`](https://github.com/rust-lang/rust/pull/137449) +- [Macros such as `assert_eq!` and `vec!` now support `const {...}` expressions](https://github.com/rust-lang/rust/pull/138162) + + + +Stabilized APIs +--------------- + +- [`Vec::extract_if`](https://doc.rust-lang.org/nightly/std/vec/struct.Vec.html#method.extract_if) +- [`vec::ExtractIf`](https://doc.rust-lang.org/nightly/std/vec/struct.ExtractIf.html) +- [`LinkedList::extract_if`](https://doc.rust-lang.org/nightly/std/collections/struct.LinkedList.html#method.extract_if) +- [`linked_list::ExtractIf`](https://doc.rust-lang.org/nightly/std/collections/linked_list/struct.ExtractIf.html) +- [`<[T]>::split_off`](https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.split_off) +- [`<[T]>::split_off_mut`](https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.split_off_mut) +- [`<[T]>::split_off_first`](https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.split_off_first) +- [`<[T]>::split_off_first_mut`](https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.split_off_first_mut) +- [`<[T]>::split_off_last`](https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.split_off_last) +- [`<[T]>::split_off_last_mut`](https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.split_off_last_mut) +- [`String::extend_from_within`](https://doc.rust-lang.org/stable/alloc/string/struct.String.html#method.extend_from_within) +- [`os_str::Display`](https://doc.rust-lang.org/nightly/std/ffi/os_str/struct.Display.html) +- [`OsString::display`](https://doc.rust-lang.org/nightly/std/ffi/struct.OsString.html#method.display) +- [`OsStr::display`](https://doc.rust-lang.org/nightly/std/ffi/struct.OsStr.html#method.display) +- [`io::pipe`](https://doc.rust-lang.org/nightly/std/io/fn.pipe.html) +- [`io::PipeReader`](https://doc.rust-lang.org/nightly/std/io/struct.PipeReader.html) +- [`io::PipeWriter`](https://doc.rust-lang.org/nightly/std/io/struct.PipeWriter.html) +- [`impl From for OwnedHandle`](https://doc.rust-lang.org/nightly/std/os/windows/io/struct.OwnedHandle.html#impl-From%3CPipeReader%3E-for-OwnedHandle) +- [`impl From for OwnedHandle`](https://doc.rust-lang.org/nightly/std/os/windows/io/struct.OwnedHandle.html#impl-From%3CPipeWriter%3E-for-OwnedHandle) +- [`impl From for Stdio`](https://doc.rust-lang.org/nightly/std/process/struct.Stdio.html) +- [`impl From for Stdio`](https://doc.rust-lang.org/nightly/std/process/struct.Stdio.html#impl-From%3CPipeWriter%3E-for-Stdio) +- [`impl From for OwnedFd`](https://doc.rust-lang.org/nightly/std/os/fd/struct.OwnedFd.html#impl-From%3CPipeReader%3E-for-OwnedFd) +- [`impl From for OwnedFd`](https://doc.rust-lang.org/nightly/std/os/fd/struct.OwnedFd.html#impl-From%3CPipeWriter%3E-for-OwnedFd) +- [`Box>::write`](https://doc.rust-lang.org/nightly/std/boxed/struct.Box.html#method.write) +- [`impl TryFrom> for String`](https://doc.rust-lang.org/nightly/std/string/struct.String.html#impl-TryFrom%3CVec%3Cu8%3E%3E-for-String) + +These APIs are now stable in const contexts: + +- [`<*const T>::offset_from_unsigned`](https://doc.rust-lang.org/nightly/std/primitive.pointer.html#method.offset_from_unsigned) +- [`<*const T>::byte_offset_from_unsigned`](https://doc.rust-lang.org/nightly/std/primitive.pointer.html#method.byte_offset_from_unsigned) +- [`<*mut T>::offset_from_unsigned`](https://doc.rust-lang.org/nightly/std/primitive.pointer.html#method.offset_from_unsigned-1) +- [`<*mut T>::byte_offset_from_unsigned`](https://doc.rust-lang.org/nightly/std/primitive.pointer.html#method.byte_offset_from_unsigned-1) +- [`NonNull::offset_from_unsigned`](https://doc.rust-lang.org/nightly/std/ptr/struct.NonNull.html#method.offset_from_unsigned) +- [`NonNull::byte_offset_from_unsigned`](https://doc.rust-lang.org/nightly/std/ptr/struct.NonNull.html#method.byte_offset_from_unsigned) +- [`::cast_signed`](https://doc.rust-lang.org/nightly/std/primitive.usize.html#method.cast_signed) +- [`NonZero::::cast_signed`](https://doc.rust-lang.org/nightly/std/num/struct.NonZero.html#method.cast_signed-5). +- [`::cast_signed`](https://doc.rust-lang.org/nightly/std/primitive.isize.html#method.cast_signed). +- [`NonZero::::cast_unsigned`](https://doc.rust-lang.org/nightly/std/num/struct.NonZero.html#method.cast_unsigned-5). +- [`::is_multiple_of`](https://doc.rust-lang.org/nightly/std/primitive.usize.html#method.is_multiple_of) +- [`::unbounded_shl`](https://doc.rust-lang.org/nightly/std/primitive.usize.html#method.unbounded_shl) +- [`::unbounded_shr`](https://doc.rust-lang.org/nightly/std/primitive.usize.html#method.unbounded_shr) +- [`::unbounded_shl`](https://doc.rust-lang.org/nightly/std/primitive.isize.html#method.unbounded_shl) +- [`::unbounded_shr`](https://doc.rust-lang.org/nightly/std/primitive.isize.html#method.unbounded_shr) +- [`::from_utf8`](https://doc.rust-lang.org/nightly/std/primitive.str.html#method.from_utf8) +- [`::from_utf8_mut`](https://doc.rust-lang.org/nightly/std/primitive.str.html#method.from_utf8_mut) +- [`::from_utf8_unchecked`](https://doc.rust-lang.org/nightly/std/primitive.str.html#method.from_utf8_unchecked) +- [`::from_utf8_unchecked_mut`](https://doc.rust-lang.org/nightly/std/primitive.str.html#method.from_utf8_unchecked_mut) +- [`core::str::from_utf8_mut`](https://doc.rust-lang.org/nightly/std/str/fn.from_utf8_mut.html) +- [`<[T]>::copy_from_slice`](https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.copy_from_slice) +- [`SocketAddr::set_ip`](https://doc.rust-lang.org/nightly/std/net/enum.SocketAddr.html#method.set_ip) +- [`SocketAddr::set_port`](https://doc.rust-lang.org/nightly/std/net/enum.SocketAddr.html#method.set_port), +- [`SocketAddrV4::set_ip`](https://doc.rust-lang.org/nightly/std/net/struct.SocketAddrV4.html#method.set_ip) +- [`SocketAddrV4::set_port`](https://doc.rust-lang.org/nightly/std/net/struct.SocketAddrV4.html#method.set_port), +- [`SocketAddrV6::set_ip`](https://doc.rust-lang.org/nightly/std/net/struct.SocketAddrV6.html#method.set_ip) +- [`SocketAddrV6::set_port`](https://doc.rust-lang.org/nightly/std/net/struct.SocketAddrV6.html#method.set_port) +- [`SocketAddrV6::set_flowinfo`](https://doc.rust-lang.org/nightly/std/net/struct.SocketAddrV6.html#method.set_flowinfo) +- [`SocketAddrV6::set_scope_id`](https://doc.rust-lang.org/nightly/std/net/struct.SocketAddrV6.html#method.set_scope_id) +- [`char::is_digit`](https://doc.rust-lang.org/nightly/std/primitive.char.html#method.is_digit) +- [`char::is_whitespace`](https://doc.rust-lang.org/nightly/std/primitive.char.html#method.is_whitespace) +- [`::midpoint`](https://doc.rust-lang.org/std/primitive.isize.html#method.midpoint) +- [`<[[T; N]]>::as_flattened`](https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.as_flattened) +- [`<[[T; N]]>::as_flattened_mut`](https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.as_flattened_mut) +- [`String::into_bytes`](https://doc.rust-lang.org/nightly/std/string/struct.String.html#method.into_bytes) +- [`String::as_str`](https://doc.rust-lang.org/nightly/std/string/struct.String.html#method.as_str) +- [`String::capacity`](https://doc.rust-lang.org/nightly/std/string/struct.String.html#method.capacity) +- [`String::as_bytes`](https://doc.rust-lang.org/nightly/std/string/struct.String.html#method.as_bytes) +- [`String::len`](https://doc.rust-lang.org/nightly/std/string/struct.String.html#method.len) +- [`String::is_empty`](https://doc.rust-lang.org/nightly/std/string/struct.String.html#method.is_empty) +- [`String::as_mut_str`](https://doc.rust-lang.org/nightly/std/string/struct.String.html#method.as_mut_str) +- [`String::as_mut_vec`](https://doc.rust-lang.org/nightly/std/string/struct.String.html#method.as_mut_vec) +- [`Vec::as_ptr`](https://doc.rust-lang.org/nightly/std/vec/struct.Vec.html#method.as_ptr) +- [`Vec::as_slice`](https://doc.rust-lang.org/nightly/std/vec/struct.Vec.html#method.as_slice) +- [`Vec::capacity`](https://doc.rust-lang.org/nightly/std/vec/struct.Vec.html#method.capacity) +- [`Vec::len`](https://doc.rust-lang.org/nightly/std/vec/struct.Vec.html#method.len) +- [`Vec::is_empty`](https://doc.rust-lang.org/nightly/std/vec/struct.Vec.html#method.is_empty) +- [`Vec::as_mut_slice`](https://doc.rust-lang.org/nightly/std/vec/struct.Vec.html#method.as_mut_slice) +- [`Vec::as_mut_ptr`](https://doc.rust-lang.org/nightly/std/vec/struct.Vec.html#method.as_mut_ptr) + + + +Cargo +----- +- [Add terminal integration via ANSI OSC 9;4 sequences](https://github.com/rust-lang/cargo/pull/14615/) +- [chore: bump openssl to v3](https://github.com/rust-lang/cargo/pull/15232/) +- [feat(package): add --exclude-lockfile flag](https://github.com/rust-lang/cargo/pull/15234/) + + + +Compatibility Notes +------------------- +- [Rust now raises an error for macro invocations inside the `#![crate_name]` attribute](https://github.com/rust-lang/rust/pull/127581) +- [Unstable fields are now always considered to be inhabited](https://github.com/rust-lang/rust/pull/133889) +- [Macro arguments of unary operators followed by open beginning ranges may now be matched differently](https://github.com/rust-lang/rust/pull/134900) +- [Make `Debug` impl of raw pointers print metadata if present](https://github.com/rust-lang/rust/pull/135080) +- [Warn against function pointers using unsupported ABI strings in dependencies](https://github.com/rust-lang/rust/pull/135767) +- [Associated types on `dyn` types are no longer deduplicated](https://github.com/rust-lang/rust/pull/136458) +- [Forbid attributes on `..` inside of struct patterns (`let Struct { #[attribute] .. }) =`](https://github.com/rust-lang/rust/pull/136490) +- [Make `ptr_cast_add_auto_to_object` lint into hard error](https://github.com/rust-lang/rust/pull/136764) +- Many `std::arch` intrinsics are now safe to call in some contexts, there may now be new `unused_unsafe` warnings in existing codebases. +- [Limit `width` and `precision` formatting options to 16 bits on all targets](https://github.com/rust-lang/rust/pull/136932) +- [Turn order dependent trait objects future incompat warning into a hard error](https://github.com/rust-lang/rust/pull/136968) +- [Denote `ControlFlow` as `#[must_use]`](https://github.com/rust-lang/rust/pull/137449) +- [Windows: The standard library no longer links `advapi32`, except on win7.](https://github.com/rust-lang/rust/pull/138233) Code such as C libraries that were relying on this assumption may need to explicitly link advapi32. +- [Proc macros can no longer observe expanded `cfg(true)` attributes.](https://github.com/rust-lang/rust/pull/138844) +- [Start changing the internal representation of pasted tokens](https://github.com/rust-lang/rust/pull/124141). Certain invalid declarative macros that were previously accepted in obscure circumstances are now correctly rejected by the compiler. Use of a `tt` fragment specifier can often fix these macros. +- [Don't allow flattened format_args in const.](https://github.com/rust-lang/rust/pull/139624) + + + +Internal Changes +---------------- + +These changes do not affect any public interfaces of Rust, but they represent +significant improvements to the performance or internals of rustc and related +tools. + +- [Update to LLVM 20](https://github.com/rust-lang/rust/pull/135763) + + +Version 1.86.0 (2025-04-03) +========================== + + + +Language +-------- +- [Stabilize upcasting trait objects to supertraits.](https://github.com/rust-lang/rust/pull/134367) +- [Allow safe functions to be marked with the `#[target_feature]` attribute.](https://github.com/rust-lang/rust/pull/134090) +- [The `missing_abi` lint now warns-by-default.](https://github.com/rust-lang/rust/pull/132397) +- Rust now lints about double negations, to catch cases that might have intended to be a prefix decrement operator (`--x`) as written in other languages. This was previously a clippy lint, `clippy::double_neg`, and is [now available directly in Rust as `double_negations`.](https://github.com/rust-lang/rust/pull/126604) +- [More pointers are now detected as definitely not-null based on their alignment in const eval.](https://github.com/rust-lang/rust/pull/133700) +- [Empty `repr()` attribute applied to invalid items are now correctly rejected.](https://github.com/rust-lang/rust/pull/133925) +- [Inner attributes `#![test]` and `#![rustfmt::skip]` are no longer accepted in more places than intended.](https://github.com/rust-lang/rust/pull/134276) + + + +Compiler +-------- +- [Debug-assert that raw pointers are non-null on access.](https://github.com/rust-lang/rust/pull/134424) +- [Change `-O` to mean `-C opt-level=3` instead of `-C opt-level=2` to match Cargo's defaults.](https://github.com/rust-lang/rust/pull/135439) +- [Fix emission of `overflowing_literals` under certain macro environments.](https://github.com/rust-lang/rust/pull/136393) + + + +Platform Support +---------------- +- [Replace `i686-unknown-redox` target with `i586-unknown-redox`.](https://github.com/rust-lang/rust/pull/136698) +- [Increase baseline CPU of `i686-unknown-hurd-gnu` to Pentium 4.](https://github.com/rust-lang/rust/pull/136700) +- New tier 3 targets: + - [`{aarch64-unknown,x86_64-pc}-nto-qnx710_iosock`](https://github.com/rust-lang/rust/pull/133631). + For supporting Neutrino QNX 7.1 with `io-socket` network stack. + - [`{aarch64-unknown,x86_64-pc}-nto-qnx800`](https://github.com/rust-lang/rust/pull/133631). + For supporting Neutrino QNX 8.0 (`no_std`-only). + - [`{x86_64,i686}-win7-windows-gnu`](https://github.com/rust-lang/rust/pull/134609). + Intended for backwards compatibility with Windows 7. `{x86_64,i686}-win7-windows-msvc` are the Windows MSVC counterparts that already exist as Tier 3 targets. + - [`amdgcn-amd-amdhsa`](https://github.com/rust-lang/rust/pull/134740). + - [`x86_64-pc-cygwin`](https://github.com/rust-lang/rust/pull/134999). + - [`{mips,mipsel}-mti-none-elf`](https://github.com/rust-lang/rust/pull/135074). + Initial bare-metal support. + - [`m68k-unknown-none-elf`](https://github.com/rust-lang/rust/pull/135085). + - [`armv7a-nuttx-{eabi,eabihf}`, `aarch64-unknown-nuttx`, and `thumbv7a-nuttx-{eabi,eabihf}`](https://github.com/rust-lang/rust/pull/135757). + +Refer to Rust's [platform support page][platform-support-doc] +for more information on Rust's tiered platform support. + + + +Libraries +--------- +- The type of `FromBytesWithNulError` in `CStr::from_bytes_with_nul(bytes: &[u8]) -> Result<&Self, FromBytesWithNulError>` was [changed from an opaque struct to an enum](https://github.com/rust-lang/rust/pull/134143), allowing users to examine why the conversion failed. +- [Remove `RustcDecodable` and `RustcEncodable`.](https://github.com/rust-lang/rust/pull/134272) +- [Deprecate libtest's `--logfile` option.](https://github.com/rust-lang/rust/pull/134283) +- [On recent versions of Windows, `std::fs::remove_file` will now remove read-only files.](https://github.com/rust-lang/rust/pull/134679) + + + +Stabilized APIs +--------------- + +- [`{float}::next_down`](https://doc.rust-lang.org/stable/std/primitive.f64.html#method.next_down) +- [`{float}::next_up`](https://doc.rust-lang.org/stable/std/primitive.f64.html#method.next_up) +- [`<[_]>::get_disjoint_mut`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.get_disjoint_mut) +- [`<[_]>::get_disjoint_unchecked_mut`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.get_disjoint_unchecked_mut) +- [`slice::GetDisjointMutError`](https://doc.rust-lang.org/stable/std/slice/enum.GetDisjointMutError.html) +- [`HashMap::get_disjoint_mut`](https://doc.rust-lang.org/std/collections/hash_map/struct.HashMap.html#method.get_disjoint_mut) +- [`HashMap::get_disjoint_unchecked_mut`](https://doc.rust-lang.org/std/collections/hash_map/struct.HashMap.html#method.get_disjoint_unchecked_mut) +- [`NonZero::count_ones`](https://doc.rust-lang.org/stable/std/num/struct.NonZero.html#method.count_ones) +- [`Vec::pop_if`](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.pop_if) +- [`sync::Once::wait`](https://doc.rust-lang.org/stable/std/sync/struct.Once.html#method.wait) +- [`sync::Once::wait_force`](https://doc.rust-lang.org/stable/std/sync/struct.Once.html#method.wait_force) +- [`sync::OnceLock::wait`](https://doc.rust-lang.org/stable/std/sync/struct.OnceLock.html#method.wait) + +These APIs are now stable in const contexts: + +- [`hint::black_box`](https://doc.rust-lang.org/stable/std/hint/fn.black_box.html) +- [`io::Cursor::get_mut`](https://doc.rust-lang.org/stable/std/io/struct.Cursor.html#method.get_mut) +- [`io::Cursor::set_position`](https://doc.rust-lang.org/stable/std/io/struct.Cursor.html#method.set_position) +- [`str::is_char_boundary`](https://doc.rust-lang.org/stable/std/primitive.str.html#method.is_char_boundary) +- [`str::split_at`](https://doc.rust-lang.org/stable/std/primitive.str.html#method.split_at) +- [`str::split_at_checked`](https://doc.rust-lang.org/stable/std/primitive.str.html#method.split_at_checked) +- [`str::split_at_mut`](https://doc.rust-lang.org/stable/std/primitive.str.html#method.split_at_mut) +- [`str::split_at_mut_checked`](https://doc.rust-lang.org/stable/std/primitive.str.html#method.split_at_mut_checked) + + + +Cargo +----- +- [When merging, replace rather than combine configuration keys that refer to a program path and its arguments.](https://github.com/rust-lang/cargo/pull/15066/) +- [Error if both `--package` and `--workspace` are passed but the requested package is missing.](https://github.com/rust-lang/cargo/pull/15071/) This was previously silently ignored, which was considered a bug since missing packages should be reported. +- [Deprecate the token argument in `cargo login` to avoid shell history leaks.](https://github.com/rust-lang/cargo/pull/15057/) +- [Simplify the implementation of `SourceID` comparisons.](https://github.com/rust-lang/cargo/pull/14980/) This may potentially change behavior if the canonicalized URL compares differently in alternative registries. + + + +Rustdoc +----- +- [Add a sans-serif font setting.](https://github.com/rust-lang/rust/pull/133636) + + + +Compatibility Notes +------------------- +- [The `wasm_c_abi` future compatibility warning is now a hard error.](https://github.com/rust-lang/rust/pull/133951) + Users of `wasm-bindgen` should upgrade to at least version 0.2.89, otherwise compilation will fail. +- [Remove long-deprecated no-op attributes `#![no_start]` and `#![crate_id]`.](https://github.com/rust-lang/rust/pull/134300) +- [The future incompatibility lint `cenum_impl_drop_cast` has been made into a hard error.](https://github.com/rust-lang/rust/pull/135964) This means it is now an error to cast a field-less enum to an integer if the enum implements `Drop`. +- [SSE2 is now required for "i686" 32-bit x86 hard-float targets; disabling it causes a warning that will become a hard error eventually.](https://github.com/rust-lang/rust/pull/137037) + To compile for pre-SSE2 32-bit x86, use a "i586" target instead. + + + +Internal Changes +---------------- + +These changes do not affect any public interfaces of Rust, but they represent +significant improvements to the performance or internals of rustc and related +tools. + +- [Build the rustc on AArch64 Linux with ThinLTO + PGO.](https://github.com/rust-lang/rust/pull/133807) + The ARM 64-bit compiler (AArch64) on Linux is now optimized with ThinLTO and PGO, similar to the optimizations we have already performed for the x86-64 compiler on Linux. This should make it up to 30% faster. + + Version 1.85.1 (2025-03-18) ========================== diff --git a/bootstrap.example.toml b/bootstrap.example.toml index 2a98821f22529..1371fd6442f96 100644 --- a/bootstrap.example.toml +++ b/bootstrap.example.toml @@ -19,6 +19,14 @@ # Note that this has no default value (x.py uses the defaults in `bootstrap.example.toml`). #profile = +# Inherits configuration values from different configuration files (a.k.a. config extensions). +# Supports absolute paths, and uses the current directory (where the bootstrap was invoked) +# as the base if the given path is not absolute. +# +# The overriding logic follows a right-to-left order. For example, in `include = ["a.toml", "b.toml"]`, +# extension `b.toml` overrides `a.toml`. Also, parent extensions always overrides the inner ones. +#include = [] + # Keeps track of major changes made to this configuration. # # This value also represents ID of the PR that caused major changes. Meaning, @@ -28,7 +36,7 @@ # - A new option # - A change in the default values # -# If the change-id does not match the version currently in use, x.py will +# If the change-id does not match the version currently in use, x.py will # display the changes made to the bootstrap. # To suppress these warnings, you can set change-id = "ignore". #change-id = @@ -172,7 +180,7 @@ # Note that this will attempt to download GCC even if there are local # modifications to the `src/gcc` submodule. # Currently, this is only supported for the `x86_64-unknown-linux-gnu` target. -# download-ci-gcc = false +#download-ci-gcc = false # ============================================================================= # General build configuration options @@ -442,6 +450,9 @@ # What custom diff tool to use for displaying compiletest tests. #compiletest-diff-tool = +# Whether to use the precompiled stage0 libtest with compiletest. +#compiletest-use-stage0-libtest = true + # Indicates whether ccache is used when building certain artifacts (e.g. LLVM). # Set to `true` to use the first `ccache` in PATH, or set an absolute path to use # a specific version. @@ -489,7 +500,7 @@ # building without optimizations takes much longer than optimizing. Further, some platforms # fail to build without this optimization (c.f. #65352). # The valid options are: -# true - Enable optimizations. +# true - Enable optimizations (same as 3). # false - Disable optimizations. # 0 - Disable optimizations. # 1 - Basic optimizations. @@ -559,6 +570,12 @@ # Defaults to rust.debug-assertions value #debug-assertions-std = rust.debug-assertions (boolean) +# Whether or not debug assertions are enabled for the tools built by bootstrap. +# Overrides the `debug-assertions` option, if defined. +# +# Defaults to rust.debug-assertions value +#debug-assertions-tools = rust.debug-assertions (boolean) + # Whether or not to leave debug! and trace! calls in the rust binary. # # Defaults to rust.debug-assertions value diff --git a/compiler/rustc_abi/src/extern_abi.rs b/compiler/rustc_abi/src/extern_abi.rs index 4d70afd4e0bca..55f4845d21670 100644 --- a/compiler/rustc_abi/src/extern_abi.rs +++ b/compiler/rustc_abi/src/extern_abi.rs @@ -60,7 +60,6 @@ pub enum ExternAbi { System { unwind: bool, }, - RustIntrinsic, RustCall, /// *Not* a stable ABI, just directly use the Rust types to describe the ABI for LLVM. Even /// normally ABI-compatible Rust types can become ABI-incompatible with this ABI! @@ -128,7 +127,6 @@ abi_impls! { RiscvInterruptS =><= "riscv-interrupt-s", RustCall =><= "rust-call", RustCold =><= "rust-cold", - RustIntrinsic =><= "rust-intrinsic", Stdcall { unwind: false } =><= "stdcall", Stdcall { unwind: true } =><= "stdcall-unwind", System { unwind: false } =><= "system", @@ -199,7 +197,7 @@ impl ExternAbi { /// - are subject to change between compiler versions pub fn is_rustic_abi(self) -> bool { use ExternAbi::*; - matches!(self, Rust | RustCall | RustIntrinsic | RustCold) + matches!(self, Rust | RustCall | RustCold) } pub fn supports_varargs(self) -> bool { diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs index 7bffeaf4cc9e2..42250aa173bbd 100644 --- a/compiler/rustc_abi/src/layout.rs +++ b/compiler/rustc_abi/src/layout.rs @@ -315,7 +315,7 @@ impl LayoutCalculator { repr: &ReprOptions, variants: &IndexSlice>, is_enum: bool, - is_unsafe_cell: bool, + is_special_no_niche: bool, scalar_valid_range: (Bound, Bound), discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool), discriminants: impl Iterator, @@ -348,7 +348,7 @@ impl LayoutCalculator { repr, variants, is_enum, - is_unsafe_cell, + is_special_no_niche, scalar_valid_range, always_sized, present_first, @@ -505,7 +505,7 @@ impl LayoutCalculator { repr: &ReprOptions, variants: &IndexSlice>, is_enum: bool, - is_unsafe_cell: bool, + is_special_no_niche: bool, scalar_valid_range: (Bound, Bound), always_sized: bool, present_first: VariantIdx, @@ -524,7 +524,7 @@ impl LayoutCalculator { let mut st = self.univariant(&variants[v], repr, kind)?; st.variants = Variants::Single { index: v }; - if is_unsafe_cell { + if is_special_no_niche { let hide_niches = |scalar: &mut _| match scalar { Scalar::Initialized { value, valid_range } => { *valid_range = WrappingRange::full(value.size(dl)) diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs index 843d5ca61dddb..59b74d2922145 100644 --- a/compiler/rustc_abi/src/lib.rs +++ b/compiler/rustc_abi/src/lib.rs @@ -1829,7 +1829,7 @@ pub struct PointeeInfo { pub safe: Option, /// If `safe` is `Some`, then the pointer is either null or dereferenceable for this many bytes. /// On a function argument, "dereferenceable" here means "dereferenceable for the entire duration - /// of this function call", i.e. it is UB for the memory that this pointer points to to be freed + /// of this function call", i.e. it is UB for the memory that this pointer points to be freed /// while this function is still running. /// The size can be zero if the pointer is not dereferenceable. pub size: Size, diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs index 6aaac072e4b28..d3b7e679d171b 100644 --- a/compiler/rustc_arena/src/lib.rs +++ b/compiler/rustc_arena/src/lib.rs @@ -21,8 +21,10 @@ #![feature(decl_macro)] #![feature(dropck_eyepatch)] #![feature(maybe_uninit_slice)] +#![feature(never_type)] #![feature(rustc_attrs)] #![feature(rustdoc_internals)] +#![feature(unwrap_infallible)] // tidy-alphabetical-end use std::alloc::Layout; @@ -200,6 +202,18 @@ impl TypedArena { /// storing the elements in the arena. #[inline] pub fn alloc_from_iter>(&self, iter: I) -> &mut [T] { + self.try_alloc_from_iter(iter.into_iter().map(Ok::)).into_ok() + } + + /// Allocates the elements of this iterator into a contiguous slice in the `TypedArena`. + /// + /// Note: for reasons of reentrancy and panic safety we collect into a `SmallVec<[_; 8]>` before + /// storing the elements in the arena. + #[inline] + pub fn try_alloc_from_iter( + &self, + iter: impl IntoIterator>, + ) -> Result<&mut [T], E> { // Despite the similarlty with `DroplessArena`, we cannot reuse their fast case. The reason // is subtle: these arenas are reentrant. In other words, `iter` may very well be holding a // reference to `self` and adding elements to the arena during iteration. @@ -214,18 +228,19 @@ impl TypedArena { // doesn't need to be hyper-optimized. assert!(size_of::() != 0); - let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect(); + let vec: Result, E> = iter.into_iter().collect(); + let mut vec = vec?; if vec.is_empty() { - return &mut []; + return Ok(&mut []); } // Move the content to the arena by copying and then forgetting it. let len = vec.len(); let start_ptr = self.alloc_raw_slice(len); - unsafe { + Ok(unsafe { vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); vec.set_len(0); slice::from_raw_parts_mut(start_ptr, len) - } + }) } /// Grows the arena. @@ -566,27 +581,34 @@ impl DroplessArena { // `drop`. unsafe { self.write_from_iter(iter, len, mem) } } - (_, _) => { - outline(move || -> &mut [T] { - // Takes care of reentrancy. - let mut vec: SmallVec<[_; 8]> = iter.collect(); - if vec.is_empty() { - return &mut []; - } - // Move the content to the arena by copying it and then forgetting - // the content of the SmallVec - unsafe { - let len = vec.len(); - let start_ptr = - self.alloc_raw(Layout::for_value::<[T]>(vec.as_slice())) as *mut T; - vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); - vec.set_len(0); - slice::from_raw_parts_mut(start_ptr, len) - } - }) - } + (_, _) => outline(move || self.try_alloc_from_iter(iter.map(Ok::)).into_ok()), } } + + #[inline] + pub fn try_alloc_from_iter( + &self, + iter: impl IntoIterator>, + ) -> Result<&mut [T], E> { + // Despite the similarlty with `alloc_from_iter`, we cannot reuse their fast case, as we + // cannot know the minimum length of the iterator in this case. + assert!(size_of::() != 0); + + // Takes care of reentrancy. + let vec: Result, E> = iter.into_iter().collect(); + let mut vec = vec?; + if vec.is_empty() { + return Ok(&mut []); + } + // Move the content to the arena by copying and then forgetting it. + let len = vec.len(); + Ok(unsafe { + let start_ptr = self.alloc_raw(Layout::for_value::<[T]>(vec.as_slice())) as *mut T; + vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); + vec.set_len(0); + slice::from_raw_parts_mut(start_ptr, len) + }) + } } /// Declare an `Arena` containing one dropless arena and many typed arenas (the diff --git a/compiler/rustc_ast/Cargo.toml b/compiler/rustc_ast/Cargo.toml index 902287d032802..b2d3b90fc4494 100644 --- a/compiler/rustc_ast/Cargo.toml +++ b/compiler/rustc_ast/Cargo.toml @@ -7,10 +7,10 @@ edition = "2024" # tidy-alphabetical-start bitflags = "2.4.1" memchr = "2.7.4" +rustc-literal-escaper = "0.0.2" rustc_ast_ir = { path = "../rustc_ast_ir" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_index = { path = "../rustc_index" } -rustc_lexer = { path = "../rustc_lexer" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index 064f05ef1f3ef..bc75d6d79263e 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -120,6 +120,17 @@ impl Path { Path { segments: thin_vec![PathSegment::from_ident(ident)], span: ident.span, tokens: None } } + pub fn is_ident(&self, name: Symbol) -> bool { + if let [segment] = self.segments.as_ref() + && segment.args.is_none() + && segment.ident.name == name + { + true + } else { + false + } + } + pub fn is_global(&self) -> bool { self.segments.first().is_some_and(|segment| segment.ident.name == kw::PathRoot) } @@ -563,6 +574,7 @@ impl Pat { /// This is intended for use by diagnostics. pub fn to_ty(&self) -> Option> { let kind = match &self.kind { + PatKind::Missing => unreachable!(), // In a type expression `_` is an inference variable. PatKind::Wild => TyKind::Infer, // An IDENT pattern with no binding mode would be valid as path to a type. E.g. `u32`. @@ -625,7 +637,8 @@ impl Pat { | PatKind::Guard(s, _) => s.walk(it), // These patterns do not contain subpatterns, skip. - PatKind::Wild + PatKind::Missing + | PatKind::Wild | PatKind::Rest | PatKind::Never | PatKind::Expr(_) @@ -676,6 +689,7 @@ impl Pat { /// Return a name suitable for diagnostics. pub fn descr(&self) -> Option { match &self.kind { + PatKind::Missing => unreachable!(), PatKind::Wild => Some("_".to_string()), PatKind::Ident(BindingMode::NONE, ident, None) => Some(format!("{ident}")), PatKind::Ref(pat, mutbl) => pat.descr().map(|d| format!("&{}{d}", mutbl.prefix_str())), @@ -769,6 +783,9 @@ pub enum RangeSyntax { // Adding a new variant? Please update `test_pat` in `tests/ui/macros/stringify.rs`. #[derive(Clone, Encodable, Decodable, Debug)] pub enum PatKind { + /// A missing pattern, e.g. for an anonymous param in a bare fn like `fn f(u32)`. + Missing, + /// Represents a wildcard pattern (`_`). Wild, @@ -981,6 +998,75 @@ impl BinOpKind { pub type BinOp = Spanned; +// Sometimes `BinOpKind` and `AssignOpKind` need the same treatment. The +// operations covered by `AssignOpKind` are a subset of those covered by +// `BinOpKind`, so it makes sense to convert `AssignOpKind` to `BinOpKind`. +impl From for BinOpKind { + fn from(op: AssignOpKind) -> BinOpKind { + match op { + AssignOpKind::AddAssign => BinOpKind::Add, + AssignOpKind::SubAssign => BinOpKind::Sub, + AssignOpKind::MulAssign => BinOpKind::Mul, + AssignOpKind::DivAssign => BinOpKind::Div, + AssignOpKind::RemAssign => BinOpKind::Rem, + AssignOpKind::BitXorAssign => BinOpKind::BitXor, + AssignOpKind::BitAndAssign => BinOpKind::BitAnd, + AssignOpKind::BitOrAssign => BinOpKind::BitOr, + AssignOpKind::ShlAssign => BinOpKind::Shl, + AssignOpKind::ShrAssign => BinOpKind::Shr, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)] +pub enum AssignOpKind { + /// The `+=` operator (addition) + AddAssign, + /// The `-=` operator (subtraction) + SubAssign, + /// The `*=` operator (multiplication) + MulAssign, + /// The `/=` operator (division) + DivAssign, + /// The `%=` operator (modulus) + RemAssign, + /// The `^=` operator (bitwise xor) + BitXorAssign, + /// The `&=` operator (bitwise and) + BitAndAssign, + /// The `|=` operator (bitwise or) + BitOrAssign, + /// The `<<=` operator (shift left) + ShlAssign, + /// The `>>=` operator (shift right) + ShrAssign, +} + +impl AssignOpKind { + pub fn as_str(&self) -> &'static str { + use AssignOpKind::*; + match self { + AddAssign => "+=", + SubAssign => "-=", + MulAssign => "*=", + DivAssign => "/=", + RemAssign => "%=", + BitXorAssign => "^=", + BitAndAssign => "&=", + BitOrAssign => "|=", + ShlAssign => "<<=", + ShrAssign => ">>=", + } + } + + /// AssignOps are always by value. + pub fn is_by_value(self) -> bool { + true + } +} + +pub type AssignOp = Spanned; + /// Unary operator. /// /// Note that `&data` is not an operator, it's an `AddrOf` expression. @@ -1100,6 +1186,7 @@ pub enum MacStmtStyle { #[derive(Clone, Encodable, Decodable, Debug)] pub struct Local { pub id: NodeId, + pub super_: Option, pub pat: P, pub ty: Option>, pub kind: LocalKind, @@ -1546,6 +1633,9 @@ pub enum ExprKind { /// An `if` block, with an optional `else` block. /// /// `if expr { block } else { expr }` + /// + /// If present, the "else" expr is always `ExprKind::Block` (for `else`) or + /// `ExprKind::If` (for `else if`). If(P, P, Option>), /// A while loop, with an optional label. /// @@ -1593,7 +1683,7 @@ pub enum ExprKind { /// An assignment with an operator. /// /// E.g., `a += 1`. - AssignOp(BinOp, P, P), + AssignOp(AssignOp, P, P), /// Access of a named (e.g., `obj.foo`) or unnamed (e.g., `obj.0`) struct field. Field(P, Ident), /// An indexing operation (e.g., `foo[2]`). @@ -1840,7 +1930,7 @@ impl AttrArgs { } /// Delimited arguments, as used in `#[attr()/[]/{}]` or `mac!()/[]/{}`. -#[derive(Clone, Encodable, Decodable, Debug)] +#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)] pub struct DelimArgs { pub dspan: DelimSpan, pub delim: Delimiter, // Note: `Delimiter::Invisible` never occurs @@ -1855,24 +1945,21 @@ impl DelimArgs { } } -impl HashStable for DelimArgs -where - CTX: crate::HashStableContext, -{ - fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { - let DelimArgs { dspan, delim, tokens } = self; - dspan.hash_stable(ctx, hasher); - delim.hash_stable(ctx, hasher); - tokens.hash_stable(ctx, hasher); - } -} - /// Represents a macro definition. #[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)] pub struct MacroDef { pub body: P, /// `true` if macro was defined with `macro_rules`. pub macro_rules: bool, + + pub eii_macro_for: Option, +} + +#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)] +pub struct EIIMacroFor { + pub extern_item_path: Path, + pub impl_unsafe: bool, + pub span: Span, } #[derive(Clone, Encodable, Decodable, Debug, Copy, Hash, Eq, PartialEq)] @@ -2394,6 +2481,8 @@ pub enum TyPatKind { /// A range pattern (e.g., `1...2`, `1..2`, `1..`, `..2`, `1..=2`, `..=2`). Range(Option>, Option>, Spanned), + Or(ThinVec>), + /// Placeholder for a pattern that wasn't syntactically well formed in some way. Err(ErrorGuaranteed), } @@ -3303,9 +3392,6 @@ pub struct Item { pub id: NodeId, pub span: Span, pub vis: Visibility, - /// The name of the item. - /// It might be a dummy name in case of anonymous items. - pub ident: Ident, pub kind: K, @@ -3327,23 +3413,23 @@ impl Item { pub fn opt_generics(&self) -> Option<&Generics> { match &self.kind { - ItemKind::ExternCrate(_) + ItemKind::ExternCrate(..) | ItemKind::Use(_) - | ItemKind::Mod(_, _) + | ItemKind::Mod(..) | ItemKind::ForeignMod(_) | ItemKind::GlobalAsm(_) | ItemKind::MacCall(_) | ItemKind::Delegation(_) | ItemKind::DelegationMac(_) - | ItemKind::MacroDef(_) => None, + | ItemKind::MacroDef(..) => None, ItemKind::Static(_) => None, ItemKind::Const(i) => Some(&i.generics), ItemKind::Fn(i) => Some(&i.generics), ItemKind::TyAlias(i) => Some(&i.generics), - ItemKind::TraitAlias(generics, _) - | ItemKind::Enum(_, generics) - | ItemKind::Struct(_, generics) - | ItemKind::Union(_, generics) => Some(&generics), + ItemKind::TraitAlias(_, generics, _) + | ItemKind::Enum(_, _, generics) + | ItemKind::Struct(_, _, generics) + | ItemKind::Union(_, _, generics) => Some(&generics), ItemKind::Trait(i) => Some(&i.generics), ItemKind::Impl(i) => Some(&i.generics), } @@ -3420,6 +3506,7 @@ impl Default for FnHeader { pub struct Trait { pub safety: Safety, pub is_auto: IsAuto, + pub ident: Ident, pub generics: Generics, pub bounds: GenericBounds, pub items: ThinVec>, @@ -3465,6 +3552,7 @@ pub struct TyAliasWhereClauses { #[derive(Clone, Encodable, Decodable, Debug)] pub struct TyAlias { pub defaultness: Defaultness, + pub ident: Ident, pub generics: Generics, pub where_clauses: TyAliasWhereClauses, pub bounds: GenericBounds, @@ -3493,11 +3581,25 @@ pub struct FnContract { #[derive(Clone, Encodable, Decodable, Debug)] pub struct Fn { pub defaultness: Defaultness, + pub ident: Ident, pub generics: Generics, pub sig: FnSig, pub contract: Option>, pub define_opaque: Option>, pub body: Option>, + + /// This fn implements some EII, pointed to by the `path` + pub eii_impl: ThinVec, +} + +#[derive(Clone, Encodable, Decodable, Debug)] +pub struct EIIImpl { + pub node_id: NodeId, + pub eii_macro_path: Path, + pub impl_safety: Safety, + pub span: Span, + pub inner_span: Span, + pub is_default: bool, } #[derive(Clone, Encodable, Decodable, Debug)] @@ -3506,6 +3608,7 @@ pub struct Delegation { pub id: NodeId, pub qself: Option>, pub path: Path, + pub ident: Ident, pub rename: Option, pub body: Option>, /// The item was expanded from a glob delegation item. @@ -3523,6 +3626,7 @@ pub struct DelegationMac { #[derive(Clone, Encodable, Decodable, Debug)] pub struct StaticItem { + pub ident: Ident, pub ty: P, pub safety: Safety, pub mutability: Mutability, @@ -3533,6 +3637,7 @@ pub struct StaticItem { #[derive(Clone, Encodable, Decodable, Debug)] pub struct ConstItem { pub defaultness: Defaultness, + pub ident: Ident, pub generics: Generics, pub ty: P, pub expr: Option>, @@ -3545,7 +3650,7 @@ pub enum ItemKind { /// An `extern crate` item, with the optional *original* crate name if the crate was renamed. /// /// E.g., `extern crate foo` or `extern crate foo_bar as foo`. - ExternCrate(Option), + ExternCrate(Option, Ident), /// A use declaration item (`use`). /// /// E.g., `use foo;`, `use foo::bar;` or `use foo::bar as FooBar;`. @@ -3567,7 +3672,7 @@ pub enum ItemKind { /// E.g., `mod foo;` or `mod foo { .. }`. /// `unsafe` keyword on modules is accepted syntactically for macro DSLs, but not /// semantically by Rust. - Mod(Safety, ModKind), + Mod(Safety, Ident, ModKind), /// An external module (`extern`). /// /// E.g., `extern {}` or `extern "C" {}`. @@ -3581,15 +3686,15 @@ pub enum ItemKind { /// An enum definition (`enum`). /// /// E.g., `enum Foo { C, D }`. - Enum(EnumDef, Generics), + Enum(Ident, EnumDef, Generics), /// A struct definition (`struct`). /// /// E.g., `struct Foo { x: A }`. - Struct(VariantData, Generics), + Struct(Ident, VariantData, Generics), /// A union definition (`union`). /// /// E.g., `union Foo { x: A, y: B }`. - Union(VariantData, Generics), + Union(Ident, VariantData, Generics), /// A trait declaration (`trait`). /// /// E.g., `trait Foo { .. }`, `trait Foo { .. }` or `auto trait Foo {}`. @@ -3597,7 +3702,7 @@ pub enum ItemKind { /// Trait alias. /// /// E.g., `trait Foo = Bar + Quux;`. - TraitAlias(Generics, GenericBounds), + TraitAlias(Ident, Generics, GenericBounds), /// An implementation. /// /// E.g., `impl Foo { .. }` or `impl Trait for Foo { .. }`. @@ -3608,7 +3713,7 @@ pub enum ItemKind { MacCall(P), /// A macro definition. - MacroDef(MacroDef), + MacroDef(Ident, MacroDef), /// A single delegation item (`reuse`). /// @@ -3620,6 +3725,31 @@ pub enum ItemKind { } impl ItemKind { + pub fn ident(&self) -> Option { + match *self { + ItemKind::ExternCrate(_, ident) + | ItemKind::Static(box StaticItem { ident, .. }) + | ItemKind::Const(box ConstItem { ident, .. }) + | ItemKind::Fn(box Fn { ident, .. }) + | ItemKind::Mod(_, ident, _) + | ItemKind::TyAlias(box TyAlias { ident, .. }) + | ItemKind::Enum(ident, ..) + | ItemKind::Struct(ident, ..) + | ItemKind::Union(ident, ..) + | ItemKind::Trait(box Trait { ident, .. }) + | ItemKind::TraitAlias(ident, ..) + | ItemKind::MacroDef(ident, _) + | ItemKind::Delegation(box Delegation { ident, .. }) => Some(ident), + + ItemKind::Use(_) + | ItemKind::ForeignMod(_) + | ItemKind::GlobalAsm(_) + | ItemKind::Impl(_) + | ItemKind::MacCall(_) + | ItemKind::DelegationMac(_) => None, + } + } + /// "a" or "an" pub fn article(&self) -> &'static str { use ItemKind::*; @@ -3660,11 +3790,11 @@ impl ItemKind { Self::Fn(box Fn { generics, .. }) | Self::TyAlias(box TyAlias { generics, .. }) | Self::Const(box ConstItem { generics, .. }) - | Self::Enum(_, generics) - | Self::Struct(_, generics) - | Self::Union(_, generics) + | Self::Enum(_, _, generics) + | Self::Struct(_, _, generics) + | Self::Union(_, _, generics) | Self::Trait(box Trait { generics, .. }) - | Self::TraitAlias(generics, _) + | Self::TraitAlias(_, generics, _) | Self::Impl(box Impl { generics, .. }) => Some(generics), _ => None, } @@ -3700,6 +3830,17 @@ pub enum AssocItemKind { } impl AssocItemKind { + pub fn ident(&self) -> Option { + match *self { + AssocItemKind::Const(box ConstItem { ident, .. }) + | AssocItemKind::Fn(box Fn { ident, .. }) + | AssocItemKind::Type(box TyAlias { ident, .. }) + | AssocItemKind::Delegation(box Delegation { ident, .. }) => Some(ident), + + AssocItemKind::MacCall(_) | AssocItemKind::DelegationMac(_) => None, + } + } + pub fn defaultness(&self) -> Defaultness { match *self { Self::Const(box ConstItem { defaultness, .. }) @@ -3746,14 +3887,26 @@ impl TryFrom for AssocItemKind { pub enum ForeignItemKind { /// A foreign static item (`static FOO: u8`). Static(Box), - /// An foreign function. + /// A foreign function. Fn(Box), - /// An foreign type. + /// A foreign type. TyAlias(Box), /// A macro expanding to foreign items. MacCall(P), } +impl ForeignItemKind { + pub fn ident(&self) -> Option { + match *self { + ForeignItemKind::Static(box StaticItem { ident, .. }) + | ForeignItemKind::Fn(box Fn { ident, .. }) + | ForeignItemKind::TyAlias(box TyAlias { ident, .. }) => Some(ident), + + ForeignItemKind::MacCall(_) => None, + } + } +} + impl From for ItemKind { fn from(foreign_item_kind: ForeignItemKind) -> ItemKind { match foreign_item_kind { @@ -3790,23 +3943,23 @@ mod size_asserts { use super::*; // tidy-alphabetical-start - static_assert_size!(AssocItem, 88); + static_assert_size!(AssocItem, 80); static_assert_size!(AssocItemKind, 16); static_assert_size!(Attribute, 32); static_assert_size!(Block, 32); static_assert_size!(Expr, 72); static_assert_size!(ExprKind, 40); - static_assert_size!(Fn, 176); - static_assert_size!(ForeignItem, 88); + static_assert_size!(Fn, 192); + static_assert_size!(ForeignItem, 80); static_assert_size!(ForeignItemKind, 16); static_assert_size!(GenericArg, 24); static_assert_size!(GenericBound, 88); static_assert_size!(Generics, 40); static_assert_size!(Impl, 136); - static_assert_size!(Item, 136); - static_assert_size!(ItemKind, 64); + static_assert_size!(Item, 144); + static_assert_size!(ItemKind, 80); static_assert_size!(LitKind, 24); - static_assert_size!(Local, 80); + static_assert_size!(Local, 96); static_assert_size!(MetaItemLit, 40); static_assert_size!(Param, 40); static_assert_size!(Pat, 72); diff --git a/compiler/rustc_ast/src/ast_traits.rs b/compiler/rustc_ast/src/ast_traits.rs index 849cc650e9d6a..7f98e7ba8a615 100644 --- a/compiler/rustc_ast/src/ast_traits.rs +++ b/compiler/rustc_ast/src/ast_traits.rs @@ -6,7 +6,6 @@ use std::fmt; use std::marker::PhantomData; use crate::ptr::P; -use crate::token::Nonterminal; use crate::tokenstream::LazyAttrTokenStream; use crate::{ Arm, AssocItem, AttrItem, AttrKind, AttrVec, Attribute, Block, Crate, Expr, ExprField, @@ -206,21 +205,6 @@ impl HasTokens for Attribute { } } -impl HasTokens for Nonterminal { - fn tokens(&self) -> Option<&LazyAttrTokenStream> { - match self { - Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens(), - Nonterminal::NtBlock(block) => block.tokens(), - } - } - fn tokens_mut(&mut self) -> Option<&mut Option> { - match self { - Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens_mut(), - Nonterminal::NtBlock(block) => block.tokens_mut(), - } - } -} - /// A trait for AST nodes having (or not having) attributes. pub trait HasAttrs { /// This is `true` if this `HasAttrs` might support 'custom' (proc-macro) inner diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 4d613085d793e..f165c4ddcdd4d 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -305,8 +305,8 @@ impl MetaItem { if let [PathSegment { ident, .. }] = self.path.segments[..] { Some(ident) } else { None } } - pub fn name_or_empty(&self) -> Symbol { - self.ident().unwrap_or_else(Ident::empty).name + pub fn name(&self) -> Option { + self.ident().map(|ident| ident.name) } pub fn has_name(&self, name: Symbol) -> bool { @@ -416,10 +416,7 @@ impl MetaItem { // This path is currently unreachable in the test suite. unreachable!() } - Some(TokenTree::Token( - Token { kind: token::OpenDelim(_) | token::CloseDelim(_), .. }, - _, - )) => { + Some(TokenTree::Token(Token { kind, .. }, _)) if kind.is_delim() => { panic!("Should be `AttrTokenTree::Delimited`, not delim tokens: {:?}", tt); } _ => return None, @@ -511,13 +508,14 @@ impl MetaItemInner { } } - /// For a single-segment meta item, returns its name; otherwise, returns `None`. + /// For a single-segment meta item, returns its identifier; otherwise, returns `None`. pub fn ident(&self) -> Option { self.meta_item().and_then(|meta_item| meta_item.ident()) } - pub fn name_or_empty(&self) -> Symbol { - self.ident().unwrap_or_else(Ident::empty).name + /// For a single-segment meta item, returns its name; otherwise, returns `None`. + pub fn name(&self) -> Option { + self.ident().map(|ident| ident.name) } /// Returns `true` if this list item is a MetaItem with a name of `name`. @@ -570,6 +568,14 @@ impl MetaItemInner { } } + /// Returns the bool if `self` is a boolean `MetaItemInner::Literal`. + pub fn boolean_literal(&self) -> Option { + match self { + MetaItemInner::Lit(MetaItemLit { kind: LitKind::Bool(b), .. }) => Some(*b), + _ => None, + } + } + /// Returns the `MetaItem` if `self` is a `MetaItemInner::MetaItem` or if it's /// `MetaItemInner::Lit(MetaItemLit { kind: LitKind::Bool(_), .. })`. pub fn meta_item_or_bool(&self) -> Option<&MetaItemInner> { @@ -619,7 +625,7 @@ pub fn mk_doc_comment( Attribute { kind: AttrKind::DocComment(comment_kind, data), id: g.mk_attr_id(), style, span } } -pub fn mk_attr( +fn mk_attr( g: &AttrIdGenerator, style: AttrStyle, unsafety: Safety, @@ -730,9 +736,9 @@ pub trait AttributeExt: Debug { fn id(&self) -> AttrId; /// For a single-segment attribute (i.e., `#[attr]` and not `#[path::atrr]`), - /// return the name of the attribute, else return the empty identifier. - fn name_or_empty(&self) -> Symbol { - self.ident().unwrap_or_else(Ident::empty).name + /// return the name of the attribute; otherwise, returns `None`. + fn name(&self) -> Option { + self.ident().map(|ident| ident.name) } /// Get the meta item list, `#[attr(meta item list)]` @@ -744,7 +750,7 @@ pub trait AttributeExt: Debug { /// Gets the span of the value literal, as string, when using `#[attr = value]` fn value_span(&self) -> Option; - /// For a single-segment attribute, returns its name; otherwise, returns `None`. + /// For a single-segment attribute, returns its ident; otherwise, returns `None`. fn ident(&self) -> Option; /// Checks whether the path of this attribute matches the name. @@ -762,6 +768,11 @@ pub trait AttributeExt: Debug { self.ident().map(|x| x.name == name).unwrap_or(false) } + #[inline] + fn has_any_name(&self, names: &[Symbol]) -> bool { + names.iter().any(|&name| self.has_name(name)) + } + /// get the span of the entire attribute fn span(&self) -> Span; @@ -805,8 +816,8 @@ impl Attribute { AttributeExt::id(self) } - pub fn name_or_empty(&self) -> Symbol { - AttributeExt::name_or_empty(self) + pub fn name(&self) -> Option { + AttributeExt::name(self) } pub fn meta_item_list(&self) -> Option> { @@ -838,6 +849,11 @@ impl Attribute { AttributeExt::has_name(self, name) } + #[inline] + pub fn has_any_name(&self, names: &[Symbol]) -> bool { + AttributeExt::has_any_name(self, names) + } + pub fn span(&self) -> Span { AttributeExt::span(self) } diff --git a/compiler/rustc_ast/src/expand/autodiff_attrs.rs b/compiler/rustc_ast/src/expand/autodiff_attrs.rs index c8ec185ee5e29..2f918faaf752b 100644 --- a/compiler/rustc_ast/src/expand/autodiff_attrs.rs +++ b/compiler/rustc_ast/src/expand/autodiff_attrs.rs @@ -50,8 +50,16 @@ pub enum DiffActivity { /// with it. Dual, /// Forward Mode, Compute derivatives for this input/output and *overwrite* the shadow argument + /// with it. It expects the shadow argument to be `width` times larger than the original + /// input/output. + Dualv, + /// Forward Mode, Compute derivatives for this input/output and *overwrite* the shadow argument /// with it. Drop the code which updates the original input/output for maximum performance. DualOnly, + /// Forward Mode, Compute derivatives for this input/output and *overwrite* the shadow argument + /// with it. Drop the code which updates the original input/output for maximum performance. + /// It expects the shadow argument to be `width` times larger than the original input/output. + DualvOnly, /// Reverse Mode, Compute derivatives for this &T or *T input and *add* it to the shadow argument. Duplicated, /// Reverse Mode, Compute derivatives for this &T or *T input and *add* it to the shadow argument. @@ -59,7 +67,15 @@ pub enum DiffActivity { DuplicatedOnly, /// All Integers must be Const, but these are used to mark the integer which represents the /// length of a slice/vec. This is used for safety checks on slices. - FakeActivitySize, + /// The integer (if given) specifies the size of the slice element in bytes. + FakeActivitySize(Option), +} + +impl DiffActivity { + pub fn is_dual_or_const(&self) -> bool { + use DiffActivity::*; + matches!(self, |Dual| DualOnly | Dualv | DualvOnly | Const) + } } /// We generate one of these structs for each `#[autodiff(...)]` attribute. #[derive(Clone, Eq, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] @@ -77,10 +93,27 @@ pub struct AutoDiffAttrs { /// e.g. in the [JAX /// Documentation](https://jax.readthedocs.io/en/latest/_tutorials/advanced-autodiff.html#how-it-s-made-two-foundational-autodiff-functions). pub mode: DiffMode, + /// A user-provided, batching width. If not given, we will default to 1 (no batching). + /// Calling a differentiated, non-batched function through a loop 100 times is equivalent to: + /// - Calling the function 50 times with a batch size of 2 + /// - Calling the function 25 times with a batch size of 4, + /// etc. A batched function takes more (or longer) arguments, and might be able to benefit from + /// cache locality, better re-usal of primal values, and other optimizations. + /// We will (before LLVM's vectorizer runs) just generate most LLVM-IR instructions `width` + /// times, so this massively increases code size. As such, values like 1024 are unlikely to + /// work. We should consider limiting this to u8 or u16, but will leave it at u32 for + /// experiments for now and focus on documenting the implications of a large width. + pub width: u32, pub ret_activity: DiffActivity, pub input_activity: Vec, } +impl AutoDiffAttrs { + pub fn has_primal_ret(&self) -> bool { + matches!(self.ret_activity, DiffActivity::Active | DiffActivity::Dual) + } +} + impl DiffMode { pub fn is_rev(&self) -> bool { matches!(self, DiffMode::Reverse) @@ -114,11 +147,7 @@ pub fn valid_ret_activity(mode: DiffMode, activity: DiffActivity) -> bool { match mode { DiffMode::Error => false, DiffMode::Source => false, - DiffMode::Forward => { - activity == DiffActivity::Dual - || activity == DiffActivity::DualOnly - || activity == DiffActivity::Const - } + DiffMode::Forward => activity.is_dual_or_const(), DiffMode::Reverse => { activity == DiffActivity::Const || activity == DiffActivity::Active @@ -136,10 +165,8 @@ pub fn valid_ret_activity(mode: DiffMode, activity: DiffActivity) -> bool { pub fn valid_ty_for_activity(ty: &P, activity: DiffActivity) -> bool { use DiffActivity::*; // It's always allowed to mark something as Const, since we won't compute derivatives wrt. it. - if matches!(activity, Const) { - return true; - } - if matches!(activity, Dual | DualOnly) { + // Dual variants also support all types. + if activity.is_dual_or_const() { return true; } // FIXME(ZuseZ4) We should make this more robust to also @@ -155,9 +182,7 @@ pub fn valid_input_activity(mode: DiffMode, activity: DiffActivity) -> bool { return match mode { DiffMode::Error => false, DiffMode::Source => false, - DiffMode::Forward => { - matches!(activity, Dual | DualOnly | Const) - } + DiffMode::Forward => activity.is_dual_or_const(), DiffMode::Reverse => { matches!(activity, Active | ActiveOnly | Duplicated | DuplicatedOnly | Const) } @@ -172,10 +197,12 @@ impl Display for DiffActivity { DiffActivity::Active => write!(f, "Active"), DiffActivity::ActiveOnly => write!(f, "ActiveOnly"), DiffActivity::Dual => write!(f, "Dual"), + DiffActivity::Dualv => write!(f, "Dualv"), DiffActivity::DualOnly => write!(f, "DualOnly"), + DiffActivity::DualvOnly => write!(f, "DualvOnly"), DiffActivity::Duplicated => write!(f, "Duplicated"), DiffActivity::DuplicatedOnly => write!(f, "DuplicatedOnly"), - DiffActivity::FakeActivitySize => write!(f, "FakeActivitySize"), + DiffActivity::FakeActivitySize(s) => write!(f, "FakeActivitySize({:?})", s), } } } @@ -203,7 +230,9 @@ impl FromStr for DiffActivity { "ActiveOnly" => Ok(DiffActivity::ActiveOnly), "Const" => Ok(DiffActivity::Const), "Dual" => Ok(DiffActivity::Dual), + "Dualv" => Ok(DiffActivity::Dualv), "DualOnly" => Ok(DiffActivity::DualOnly), + "DualvOnly" => Ok(DiffActivity::DualvOnly), "Duplicated" => Ok(DiffActivity::Duplicated), "DuplicatedOnly" => Ok(DiffActivity::DuplicatedOnly), _ => Err(()), @@ -222,6 +251,7 @@ impl AutoDiffAttrs { pub const fn error() -> Self { AutoDiffAttrs { mode: DiffMode::Error, + width: 0, ret_activity: DiffActivity::None, input_activity: Vec::new(), } @@ -229,6 +259,7 @@ impl AutoDiffAttrs { pub fn source() -> Self { AutoDiffAttrs { mode: DiffMode::Source, + width: 0, ret_activity: DiffActivity::None, input_activity: Vec::new(), } diff --git a/compiler/rustc_ast/src/expand/mod.rs b/compiler/rustc_ast/src/expand/mod.rs index 04c8162932369..323a8fab6d592 100644 --- a/compiler/rustc_ast/src/expand/mod.rs +++ b/compiler/rustc_ast/src/expand/mod.rs @@ -13,12 +13,12 @@ pub mod typetree; #[derive(Debug, Clone, Encodable, Decodable, HashStable_Generic)] pub struct StrippedCfgItem { pub parent_module: ModId, - pub name: Ident, + pub ident: Ident, pub cfg: MetaItem, } impl StrippedCfgItem { pub fn map_mod_id(self, f: impl FnOnce(ModId) -> New) -> StrippedCfgItem { - StrippedCfgItem { parent_module: f(self.parent_module), name: self.name, cfg: self.cfg } + StrippedCfgItem { parent_module: f(self.parent_module), ident: self.ident, cfg: self.cfg } } } diff --git a/compiler/rustc_ast/src/lib.rs b/compiler/rustc_ast/src/lib.rs index da510e4967dba..e572ec99dabf3 100644 --- a/compiler/rustc_ast/src/lib.rs +++ b/compiler/rustc_ast/src/lib.rs @@ -6,20 +6,21 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc( html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(attr(deny(warnings))) )] #![doc(rust_logo)] +#![feature(array_windows)] #![feature(associated_type_defaults)] #![feature(box_patterns)] #![feature(if_let_guard)] -#![feature(let_chains)] #![feature(negative_impls)] #![feature(never_type)] #![feature(rustdoc_internals)] #![feature(stmt_expr_attributes)] +#![recursion_limit = "256"] // tidy-alphabetical-end pub mod util { diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index 274fe312f7fad..fc335902d80f5 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -9,7 +9,6 @@ use std::ops::DerefMut; use std::panic; -use std::sync::Arc; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; use rustc_data_structures::stack::ensure_sufficient_stack; @@ -20,7 +19,6 @@ use thin_vec::ThinVec; use crate::ast::*; use crate::ptr::P; -use crate::token::{self, Token}; use crate::tokenstream::*; use crate::visit::{AssocCtxt, BoundKind, FnCtxt}; @@ -41,7 +39,6 @@ pub trait WalkItemKind { &mut self, span: Span, id: NodeId, - ident: &mut Ident, visibility: &mut Visibility, ctxt: Self::Ctxt, visitor: &mut impl MutVisitor, @@ -49,11 +46,6 @@ pub trait WalkItemKind { } pub trait MutVisitor: Sized { - /// Mutable token visiting only exists for the `macro_rules` token marker and should not be - /// used otherwise. Token visitor would be entirely separate from the regular visitor if - /// the marker didn't have to visit AST fragments in nonterminal tokens. - const VISIT_TOKENS: bool = false; - // Methods in this trait have one of three forms: // // fn visit_t(&mut self, t: &mut T); // common @@ -361,6 +353,8 @@ pub trait MutVisitor: Sized { // Do nothing. } + // Span visiting is no longer used, but we keep it for now, + // in case it's needed for something like #127241. fn visit_span(&mut self, _sp: &mut Span) { // Do nothing. } @@ -474,12 +468,8 @@ fn visit_attr_args(vis: &mut T, args: &mut AttrArgs) { // No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. fn visit_delim_args(vis: &mut T, args: &mut DelimArgs) { - let DelimArgs { dspan, delim: _, tokens } = args; - visit_tts(vis, tokens); - visit_delim_span(vis, dspan); -} - -pub fn visit_delim_span(vis: &mut T, DelimSpan { open, close }: &mut DelimSpan) { + let DelimArgs { dspan, delim: _, tokens: _ } = args; + let DelimSpan { open, close } = dspan; vis.visit_span(open); vis.visit_span(close); } @@ -553,7 +543,7 @@ fn walk_assoc_item_constraint( } pub fn walk_ty(vis: &mut T, ty: &mut P) { - let Ty { id, kind, span, tokens } = ty.deref_mut(); + let Ty { id, kind, span, tokens: _ } = ty.deref_mut(); vis.visit_id(id); match kind { TyKind::Err(_guar) => {} @@ -601,21 +591,20 @@ pub fn walk_ty(vis: &mut T, ty: &mut P) { } TyKind::MacCall(mac) => vis.visit_mac_call(mac), } - visit_lazy_tts(vis, tokens); vis.visit_span(span); } pub fn walk_ty_pat(vis: &mut T, ty: &mut P) { - let TyPat { id, kind, span, tokens } = ty.deref_mut(); + let TyPat { id, kind, span, tokens: _ } = ty.deref_mut(); vis.visit_id(id); match kind { TyPatKind::Range(start, end, _include_end) => { visit_opt(start, |c| vis.visit_anon_const(c)); visit_opt(end, |c| vis.visit_anon_const(c)); } + TyPatKind::Or(variants) => visit_thin_vec(variants, |p| vis.visit_ty_pat(p)), TyPatKind::Err(_) => {} } - visit_lazy_tts(vis, tokens); vis.visit_span(span); } @@ -655,11 +644,10 @@ fn walk_path_segment(vis: &mut T, segment: &mut PathSegment) { visit_opt(args, |args| vis.visit_generic_args(args)); } -fn walk_path(vis: &mut T, Path { segments, span, tokens }: &mut Path) { +fn walk_path(vis: &mut T, Path { segments, span, tokens: _ }: &mut Path) { for segment in segments { vis.visit_path_segment(segment); } - visit_lazy_tts(vis, tokens); vis.visit_span(span); } @@ -705,7 +693,8 @@ fn walk_parenthesized_parameter_data(vis: &mut T, args: &mut Pare } fn walk_local(vis: &mut T, local: &mut P) { - let Local { id, pat, ty, kind, span, colon_sp, attrs, tokens } = local.deref_mut(); + let Local { id, super_, pat, ty, kind, span, colon_sp, attrs, tokens: _ } = local.deref_mut(); + visit_opt(super_, |sp| vis.visit_span(sp)); vis.visit_id(id); visit_attrs(vis, attrs); vis.visit_pat(pat); @@ -720,7 +709,6 @@ fn walk_local(vis: &mut T, local: &mut P) { vis.visit_block(els); } } - visit_lazy_tts(vis, tokens); visit_opt(colon_sp, |sp| vis.visit_span(sp)); vis.visit_span(span); } @@ -729,14 +717,10 @@ fn walk_attribute(vis: &mut T, attr: &mut Attribute) { let Attribute { kind, id: _, style: _, span } = attr; match kind { AttrKind::Normal(normal) => { - let NormalAttr { - item: AttrItem { unsafety: _, path, args, tokens }, - tokens: attr_tokens, - } = &mut **normal; + let NormalAttr { item: AttrItem { unsafety: _, path, args, tokens: _ }, tokens: _ } = + &mut **normal; vis.visit_path(path); visit_attr_args(vis, args); - visit_lazy_tts(vis, tokens); - visit_lazy_tts(vis, attr_tokens); } AttrKind::DocComment(_kind, _sym) => {} } @@ -750,7 +734,10 @@ fn walk_mac(vis: &mut T, mac: &mut MacCall) { } fn walk_macro_def(vis: &mut T, macro_def: &mut MacroDef) { - let MacroDef { body, macro_rules: _ } = macro_def; + let MacroDef { body, macro_rules: _, eii_macro_for } = macro_def; + if let Some(EIIMacroFor { extern_item_path, impl_unsafe: _, span: _ }) = eii_macro_for { + vis.visit_path(extern_item_path); + } visit_delim_args(vis, body); } @@ -785,126 +772,6 @@ pub fn walk_flat_map_param(vis: &mut T, mut param: Param) -> Smal smallvec![param] } -// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. -fn visit_attr_tt(vis: &mut T, tt: &mut AttrTokenTree) { - match tt { - AttrTokenTree::Token(token, _spacing) => { - visit_token(vis, token); - } - AttrTokenTree::Delimited(dspan, _spacing, _delim, tts) => { - visit_attr_tts(vis, tts); - visit_delim_span(vis, dspan); - } - AttrTokenTree::AttrsTarget(AttrsTarget { attrs, tokens }) => { - visit_attrs(vis, attrs); - visit_lazy_tts_opt_mut(vis, Some(tokens)); - } - } -} - -// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. -fn visit_tt(vis: &mut T, tt: &mut TokenTree) { - match tt { - TokenTree::Token(token, _spacing) => { - visit_token(vis, token); - } - TokenTree::Delimited(dspan, _spacing, _delim, tts) => { - visit_tts(vis, tts); - visit_delim_span(vis, dspan); - } - } -} - -// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. -fn visit_tts(vis: &mut T, TokenStream(tts): &mut TokenStream) { - if T::VISIT_TOKENS && !tts.is_empty() { - let tts = Arc::make_mut(tts); - visit_vec(tts, |tree| visit_tt(vis, tree)); - } -} - -fn visit_attr_tts(vis: &mut T, AttrTokenStream(tts): &mut AttrTokenStream) { - if T::VISIT_TOKENS && !tts.is_empty() { - let tts = Arc::make_mut(tts); - visit_vec(tts, |tree| visit_attr_tt(vis, tree)); - } -} - -fn visit_lazy_tts_opt_mut(vis: &mut T, lazy_tts: Option<&mut LazyAttrTokenStream>) { - if T::VISIT_TOKENS { - if let Some(lazy_tts) = lazy_tts { - let mut tts = lazy_tts.to_attr_token_stream(); - visit_attr_tts(vis, &mut tts); - *lazy_tts = LazyAttrTokenStream::new(tts); - } - } -} - -fn visit_lazy_tts(vis: &mut T, lazy_tts: &mut Option) { - visit_lazy_tts_opt_mut(vis, lazy_tts.as_mut()); -} - -/// Applies ident visitor if it's an ident; applies other visits to interpolated nodes. -/// In practice the ident part is not actually used by specific visitors right now, -/// but there's a test below checking that it works. -// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. -pub fn visit_token(vis: &mut T, t: &mut Token) { - let Token { kind, span } = t; - match kind { - token::Ident(name, _is_raw) | token::Lifetime(name, _is_raw) => { - let mut ident = Ident::new(*name, *span); - vis.visit_ident(&mut ident); - *name = ident.name; - *span = ident.span; - return; // Avoid visiting the span for the second time. - } - token::NtIdent(ident, _is_raw) => { - vis.visit_ident(ident); - } - token::NtLifetime(ident, _is_raw) => { - vis.visit_ident(ident); - } - token::Interpolated(nt) => { - let nt = Arc::make_mut(nt); - visit_nonterminal(vis, nt); - } - _ => {} - } - vis.visit_span(span); -} - -// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. -/// Applies the visitor to elements of interpolated nodes. -// -// N.B., this can occur only when applying a visitor to partially expanded -// code, where parsed pieces have gotten implanted ito *other* macro -// invocations. This is relevant for macro hygiene, but possibly not elsewhere. -// -// One problem here occurs because the types for flat_map_item, flat_map_stmt, -// etc., allow the visitor to return *multiple* items; this is a problem for the -// nodes here, because they insist on having exactly one piece. One solution -// would be to mangle the MutVisitor trait to include one-to-many and -// one-to-one versions of these entry points, but that would probably confuse a -// lot of people and help very few. Instead, I'm just going to put in dynamic -// checks. I think the performance impact of this will be pretty much -// nonexistent. The danger is that someone will apply a `MutVisitor` to a -// partially expanded node, and will be confused by the fact that their -// `flat_map_item` or `flat_map_stmt` isn't getting called on `NtItem` or `NtStmt` -// nodes. Hopefully they'll wind up reading this comment, and doing something -// appropriate. -// -// BTW, design choice: I considered just changing the type of, e.g., `NtItem` to -// contain multiple items, but decided against it when I looked at -// `parse_item_or_view_item` and tried to figure out what I would do with -// multiple items there.... -fn visit_nonterminal(vis: &mut T, nt: &mut token::Nonterminal) { - match nt { - token::NtBlock(block) => vis.visit_block(block), - token::NtExpr(expr) => vis.visit_expr(expr), - token::NtLiteral(expr) => vis.visit_expr(expr), - } -} - // No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. fn visit_defaultness(vis: &mut T, defaultness: &mut Defaultness) { match defaultness { @@ -963,19 +830,26 @@ fn walk_fn(vis: &mut T, kind: FnKind<'_>) { match kind { FnKind::Fn( _ctxt, - _ident, _vis, Fn { defaultness, + ident, generics, contract, body, sig: FnSig { header, decl, span }, define_opaque, + eii_impl, }, ) => { - // Identifier and visibility are visited as a part of the item. + // Visibility is visited as a part of the item. visit_defaultness(vis, defaultness); + vis.visit_ident(ident); + + for EIIImpl { node_id, eii_macro_path, .. } in eii_impl { + vis.visit_id(node_id); + vis.visit_path(eii_macro_path); + } vis.visit_fn_header(header); vis.visit_generics(generics); vis.visit_fn_decl(decl); @@ -1222,10 +1096,9 @@ fn walk_mt(vis: &mut T, MutTy { ty, mutbl: _ }: &mut MutTy) { } pub fn walk_block(vis: &mut T, block: &mut P) { - let Block { id, stmts, rules: _, span, tokens } = block.deref_mut(); + let Block { id, stmts, rules: _, span, tokens: _ } = block.deref_mut(); vis.visit_id(id); stmts.flat_map_in_place(|stmt| vis.flat_map_stmt(stmt)); - visit_lazy_tts(vis, tokens); vis.visit_span(span); } @@ -1233,12 +1106,11 @@ pub fn walk_item_kind( kind: &mut K, span: Span, id: NodeId, - ident: &mut Ident, visibility: &mut Visibility, ctxt: K::Ctxt, vis: &mut impl MutVisitor, ) { - kind.walk(span, id, ident, visibility, ctxt, vis) + kind.walk(span, id, visibility, ctxt, vis) } impl WalkItemKind for ItemKind { @@ -1247,21 +1119,22 @@ impl WalkItemKind for ItemKind { &mut self, span: Span, id: NodeId, - ident: &mut Ident, visibility: &mut Visibility, _ctxt: Self::Ctxt, vis: &mut impl MutVisitor, ) { match self { - ItemKind::ExternCrate(_orig_name) => {} + ItemKind::ExternCrate(_orig_name, ident) => vis.visit_ident(ident), ItemKind::Use(use_tree) => vis.visit_use_tree(use_tree), ItemKind::Static(box StaticItem { + ident, ty, safety: _, mutability: _, expr, define_opaque, }) => { + vis.visit_ident(ident); vis.visit_ty(ty); visit_opt(expr, |expr| vis.visit_expr(expr)); walk_define_opaques(vis, define_opaque); @@ -1270,10 +1143,11 @@ impl WalkItemKind for ItemKind { walk_const_item(vis, item); } ItemKind::Fn(func) => { - vis.visit_fn(FnKind::Fn(FnCtxt::Free, ident, visibility, &mut *func), span, id); + vis.visit_fn(FnKind::Fn(FnCtxt::Free, visibility, &mut *func), span, id); } - ItemKind::Mod(safety, mod_kind) => { + ItemKind::Mod(safety, ident, mod_kind) => { visit_safety(vis, safety); + vis.visit_ident(ident); match mod_kind { ModKind::Loaded( items, @@ -1290,18 +1164,29 @@ impl WalkItemKind for ItemKind { } ItemKind::ForeignMod(nm) => vis.visit_foreign_mod(nm), ItemKind::GlobalAsm(asm) => vis.visit_inline_asm(asm), - ItemKind::TyAlias(box TyAlias { defaultness, generics, where_clauses, bounds, ty }) => { + ItemKind::TyAlias(box TyAlias { + defaultness, + ident, + generics, + where_clauses, + bounds, + ty, + }) => { visit_defaultness(vis, defaultness); + vis.visit_ident(ident); vis.visit_generics(generics); visit_bounds(vis, bounds, BoundKind::Bound); visit_opt(ty, |ty| vis.visit_ty(ty)); walk_ty_alias_where_clauses(vis, where_clauses); } - ItemKind::Enum(EnumDef { variants }, generics) => { + ItemKind::Enum(ident, EnumDef { variants }, generics) => { + vis.visit_ident(ident); vis.visit_generics(generics); variants.flat_map_in_place(|variant| vis.flat_map_variant(variant)); } - ItemKind::Struct(variant_data, generics) | ItemKind::Union(variant_data, generics) => { + ItemKind::Struct(ident, variant_data, generics) + | ItemKind::Union(ident, variant_data, generics) => { + vis.visit_ident(ident); vis.visit_generics(generics); vis.visit_variant_data(variant_data); } @@ -1326,22 +1211,28 @@ impl WalkItemKind for ItemKind { vis.flat_map_assoc_item(item, AssocCtxt::Impl { of_trait: of_trait.is_some() }) }); } - ItemKind::Trait(box Trait { safety, is_auto: _, generics, bounds, items }) => { + ItemKind::Trait(box Trait { safety, is_auto: _, ident, generics, bounds, items }) => { visit_safety(vis, safety); + vis.visit_ident(ident); vis.visit_generics(generics); visit_bounds(vis, bounds, BoundKind::Bound); items.flat_map_in_place(|item| vis.flat_map_assoc_item(item, AssocCtxt::Trait)); } - ItemKind::TraitAlias(generics, bounds) => { + ItemKind::TraitAlias(ident, generics, bounds) => { + vis.visit_ident(ident); vis.visit_generics(generics); visit_bounds(vis, bounds, BoundKind::Bound); } ItemKind::MacCall(m) => vis.visit_mac_call(m), - ItemKind::MacroDef(def) => vis.visit_macro_def(def), + ItemKind::MacroDef(ident, def) => { + vis.visit_ident(ident); + vis.visit_macro_def(def) + } ItemKind::Delegation(box Delegation { id, qself, path, + ident, rename, body, from_glob: _, @@ -1349,6 +1240,7 @@ impl WalkItemKind for ItemKind { vis.visit_id(id); vis.visit_qself(qself); vis.visit_path(path); + vis.visit_ident(ident); if let Some(rename) = rename { vis.visit_ident(rename); } @@ -1381,7 +1273,6 @@ impl WalkItemKind for AssocItemKind { &mut self, span: Span, id: NodeId, - ident: &mut Ident, visibility: &mut Visibility, ctxt: Self::Ctxt, visitor: &mut impl MutVisitor, @@ -1391,20 +1282,18 @@ impl WalkItemKind for AssocItemKind { walk_const_item(visitor, item); } AssocItemKind::Fn(func) => { - visitor.visit_fn( - FnKind::Fn(FnCtxt::Assoc(ctxt), ident, visibility, &mut *func), - span, - id, - ); + visitor.visit_fn(FnKind::Fn(FnCtxt::Assoc(ctxt), visibility, &mut *func), span, id); } AssocItemKind::Type(box TyAlias { defaultness, + ident, generics, where_clauses, bounds, ty, }) => { visit_defaultness(visitor, defaultness); + visitor.visit_ident(ident); visitor.visit_generics(generics); visit_bounds(visitor, bounds, BoundKind::Bound); visit_opt(ty, |ty| visitor.visit_ty(ty)); @@ -1415,6 +1304,7 @@ impl WalkItemKind for AssocItemKind { id, qself, path, + ident, rename, body, from_glob: _, @@ -1422,6 +1312,7 @@ impl WalkItemKind for AssocItemKind { visitor.visit_id(id); visitor.visit_qself(qself); visitor.visit_path(path); + visitor.visit_ident(ident); if let Some(rename) = rename { visitor.visit_ident(rename); } @@ -1449,8 +1340,9 @@ impl WalkItemKind for AssocItemKind { } fn walk_const_item(vis: &mut T, item: &mut ConstItem) { - let ConstItem { defaultness, generics, ty, expr, define_opaque } = item; + let ConstItem { defaultness, ident, generics, ty, expr, define_opaque } = item; visit_defaultness(vis, defaultness); + vis.visit_ident(ident); vis.visit_generics(generics); vis.visit_ty(ty); visit_opt(expr, |expr| vis.visit_expr(expr)); @@ -1487,13 +1379,11 @@ fn walk_item_ctxt( item: &mut P>, ctxt: K::Ctxt, ) { - let Item { ident, attrs, id, kind, vis, span, tokens } = item.deref_mut(); + let Item { attrs, id, kind, vis, span, tokens: _ } = item.deref_mut(); visitor.visit_id(id); visit_attrs(visitor, attrs); visitor.visit_vis(vis); - visitor.visit_ident(ident); - kind.walk(*span, *id, ident, vis, ctxt, visitor); - visit_lazy_tts(visitor, tokens); + kind.walk(*span, *id, vis, ctxt, visitor); visitor.visit_span(span); } @@ -1525,38 +1415,37 @@ impl WalkItemKind for ForeignItemKind { &mut self, span: Span, id: NodeId, - ident: &mut Ident, visibility: &mut Visibility, _ctxt: Self::Ctxt, visitor: &mut impl MutVisitor, ) { match self { ForeignItemKind::Static(box StaticItem { + ident, ty, mutability: _, expr, safety: _, define_opaque, }) => { + visitor.visit_ident(ident); visitor.visit_ty(ty); visit_opt(expr, |expr| visitor.visit_expr(expr)); walk_define_opaques(visitor, define_opaque); } ForeignItemKind::Fn(func) => { - visitor.visit_fn( - FnKind::Fn(FnCtxt::Foreign, ident, visibility, &mut *func), - span, - id, - ); + visitor.visit_fn(FnKind::Fn(FnCtxt::Foreign, visibility, &mut *func), span, id); } ForeignItemKind::TyAlias(box TyAlias { defaultness, + ident, generics, where_clauses, bounds, ty, }) => { visit_defaultness(visitor, defaultness); + visitor.visit_ident(ident); visitor.visit_generics(generics); visit_bounds(visitor, bounds, BoundKind::Bound); visit_opt(ty, |ty| visitor.visit_ty(ty)); @@ -1568,11 +1457,11 @@ impl WalkItemKind for ForeignItemKind { } pub fn walk_pat(vis: &mut T, pat: &mut P) { - let Pat { id, kind, span, tokens } = pat.deref_mut(); + let Pat { id, kind, span, tokens: _ } = pat.deref_mut(); vis.visit_id(id); match kind { PatKind::Err(_guar) => {} - PatKind::Wild | PatKind::Rest | PatKind::Never => {} + PatKind::Missing | PatKind::Wild | PatKind::Rest | PatKind::Never => {} PatKind::Ident(_binding_mode, ident, sub) => { vis.visit_ident(ident); visit_opt(sub, |sub| vis.visit_pat(sub)); @@ -1610,7 +1499,6 @@ pub fn walk_pat(vis: &mut T, pat: &mut P) { PatKind::Paren(inner) => vis.visit_pat(inner), PatKind::MacCall(mac) => vis.visit_mac_call(mac), } - visit_lazy_tts(vis, tokens); vis.visit_span(span); } @@ -1674,7 +1562,7 @@ fn walk_format_args(vis: &mut T, fmt: &mut FormatArgs) { vis.visit_span(span); } -pub fn walk_expr(vis: &mut T, Expr { kind, id, span, attrs, tokens }: &mut Expr) { +pub fn walk_expr(vis: &mut T, Expr { kind, id, span, attrs, tokens: _ }: &mut Expr) { vis.visit_id(id); visit_attrs(vis, attrs); match kind { @@ -1865,7 +1753,6 @@ pub fn walk_expr(vis: &mut T, Expr { kind, id, span, attrs, token ExprKind::Err(_guar) => {} ExprKind::Dummy => {} } - visit_lazy_tts(vis, tokens); vis.visit_span(span); } @@ -1907,17 +1794,16 @@ fn walk_flat_map_stmt_kind(vis: &mut T, kind: StmtKind) -> SmallV StmtKind::Semi(expr) => vis.filter_map_expr(expr).into_iter().map(StmtKind::Semi).collect(), StmtKind::Empty => smallvec![StmtKind::Empty], StmtKind::MacCall(mut mac) => { - let MacCallStmt { mac: mac_, style: _, attrs, tokens } = mac.deref_mut(); + let MacCallStmt { mac: mac_, style: _, attrs, tokens: _ } = mac.deref_mut(); visit_attrs(vis, attrs); vis.visit_mac_call(mac_); - visit_lazy_tts(vis, tokens); smallvec![StmtKind::MacCall(mac)] } } } fn walk_vis(vis: &mut T, visibility: &mut Visibility) { - let Visibility { kind, span, tokens } = visibility; + let Visibility { kind, span, tokens: _ } = visibility; match kind { VisibilityKind::Public | VisibilityKind::Inherited => {} VisibilityKind::Restricted { path, id, shorthand: _ } => { @@ -1925,7 +1811,6 @@ fn walk_vis(vis: &mut T, visibility: &mut Visibility) { vis.visit_path(path); } } - visit_lazy_tts(vis, tokens); vis.visit_span(span); } @@ -1984,8 +1869,7 @@ impl DummyAstNode for Item { span: Default::default(), tokens: Default::default(), }, - ident: Ident::dummy(), - kind: ItemKind::ExternCrate(None), + kind: ItemKind::ExternCrate(None, Ident::dummy()), tokens: Default::default(), } } @@ -2052,7 +1936,7 @@ impl DummyAstNode for crate::ast_traits::AstNo #[derive(Debug)] pub enum FnKind<'a> { /// E.g., `fn foo()`, `fn foo(&self)`, or `extern "Abi" fn foo()`. - Fn(FnCtxt, &'a mut Ident, &'a mut Visibility, &'a mut Fn), + Fn(FnCtxt, &'a mut Visibility, &'a mut Fn), /// E.g., `|x, y| body`. Closure( diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index 6e6f0f1b2660e..54781e8235e2f 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -1,13 +1,10 @@ use std::borrow::Cow; use std::fmt; -use std::sync::Arc; pub use LitKind::*; -pub use Nonterminal::*; pub use NtExprKind::*; pub use NtPatKind::*; pub use TokenKind::*; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::edition::Edition; use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym}; @@ -16,7 +13,6 @@ use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym}; use rustc_span::{Ident, Symbol}; use crate::ast; -use crate::ptr::P; use crate::util::case::Case; #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] @@ -34,10 +30,18 @@ pub enum InvisibleOrigin { // Converted from `proc_macro::Delimiter` in // `proc_macro::Delimiter::to_internal`, i.e. returned by a proc macro. ProcMacro, +} - // Converted from `TokenKind::Interpolated` in - // `TokenStream::flatten_token`. Treated similarly to `ProcMacro`. - FlattenToken, +impl InvisibleOrigin { + // Should the parser skip these invisible delimiters? Ideally this function + // will eventually disappear and no invisible delimiters will be skipped. + #[inline] + pub fn skip(&self) -> bool { + match self { + InvisibleOrigin::MetaVar(_) => false, + InvisibleOrigin::ProcMacro => true, + } + } } impl PartialEq for InvisibleOrigin { @@ -133,10 +137,7 @@ impl Delimiter { pub fn skip(&self) -> bool { match self { Delimiter::Parenthesis | Delimiter::Bracket | Delimiter::Brace => false, - Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) => false, - Delimiter::Invisible(InvisibleOrigin::FlattenToken | InvisibleOrigin::ProcMacro) => { - true - } + Delimiter::Invisible(origin) => origin.skip(), } } @@ -150,6 +151,24 @@ impl Delimiter { _ => false, } } + + pub fn as_open_token_kind(&self) -> TokenKind { + match *self { + Delimiter::Parenthesis => OpenParen, + Delimiter::Brace => OpenBrace, + Delimiter::Bracket => OpenBracket, + Delimiter::Invisible(origin) => OpenInvisible(origin), + } + } + + pub fn as_close_token_kind(&self) -> TokenKind { + match *self { + Delimiter::Parenthesis => CloseParen, + Delimiter::Brace => CloseBrace, + Delimiter::Bracket => CloseBracket, + Delimiter::Invisible(origin) => CloseInvisible(origin), + } + } } // Note that the suffix is *not* considered when deciding the `LitKind` in this @@ -198,16 +217,17 @@ impl Lit { } } - /// Keep this in sync with `Token::can_begin_literal_maybe_minus` excluding unary negation. + /// Keep this in sync with `Token::can_begin_literal_maybe_minus` and + /// `Parser::eat_token_lit` (excluding unary negation). pub fn from_token(token: &Token) -> Option { match token.uninterpolate().kind { Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)), Literal(token_lit) => Some(token_lit), - Interpolated(ref nt) - if let NtExpr(expr) | NtLiteral(expr) = &**nt - && let ast::ExprKind::Lit(token_lit) = expr.kind => - { - Some(token_lit) + OpenInvisible(InvisibleOrigin::MetaVar( + MetaVarKind::Literal | MetaVarKind::Expr { .. }, + )) => { + // Unreachable with the current test suite. + panic!("from_token metavar"); } _ => None, } @@ -336,9 +356,7 @@ impl From for bool { } } -// SAFETY: due to the `Clone` impl below, all fields of all variants other than -// `Interpolated` must impl `Copy`. -#[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub enum TokenKind { /* Expression-operator symbols. */ /// `=` @@ -437,18 +455,31 @@ pub enum TokenKind { Question, /// Used by proc macros for representing lifetimes, not generated by lexer right now. SingleQuote, - /// An opening delimiter (e.g., `{`). - OpenDelim(Delimiter), - /// A closing delimiter (e.g., `}`). - CloseDelim(Delimiter), + /// `(` + OpenParen, + /// `)` + CloseParen, + /// `{` + OpenBrace, + /// `}` + CloseBrace, + /// `[` + OpenBracket, + /// `]` + CloseBracket, + /// Invisible opening delimiter, produced by a macro. + OpenInvisible(InvisibleOrigin), + /// Invisible closing delimiter, produced by a macro. + CloseInvisible(InvisibleOrigin), /* Literals */ Literal(Lit), /// Identifier token. /// Do not forget about `NtIdent` when you want to match on identifiers. - /// It's recommended to use `Token::(ident,uninterpolate,uninterpolated_span)` to - /// treat regular and interpolated identifiers in the same way. + /// It's recommended to use `Token::{ident,uninterpolate}` and + /// `Parser::token_uninterpolated_span` to treat regular and interpolated + /// identifiers in the same way. Ident(Symbol, IdentIsRaw), /// This identifier (and its span) is the identifier passed to the /// declarative macro. The span in the surrounding `Token` is the span of @@ -457,29 +488,15 @@ pub enum TokenKind { /// Lifetime identifier token. /// Do not forget about `NtLifetime` when you want to match on lifetime identifiers. - /// It's recommended to use `Token::(lifetime,uninterpolate,uninterpolated_span)` to - /// treat regular and interpolated lifetime identifiers in the same way. + /// It's recommended to use `Token::{ident,uninterpolate}` and + /// `Parser::token_uninterpolated_span` to treat regular and interpolated + /// identifiers in the same way. Lifetime(Symbol, IdentIsRaw), /// This identifier (and its span) is the lifetime passed to the /// declarative macro. The span in the surrounding `Token` is the span of /// the `lifetime` metavariable in the macro's RHS. NtLifetime(Ident, IdentIsRaw), - /// An embedded AST node, as produced by a macro. This only exists for - /// historical reasons. We'd like to get rid of it, for multiple reasons. - /// - It's conceptually very strange. Saying a token can contain an AST - /// node is like saying, in natural language, that a word can contain a - /// sentence. - /// - It requires special handling in a bunch of places in the parser. - /// - It prevents `Token` from implementing `Copy`. - /// It adds complexity and likely slows things down. Please don't add new - /// occurrences of this token kind! - /// - /// The span in the surrounding `Token` is that of the metavariable in the - /// macro's RHS. The span within the Nonterminal is that of the fragment - /// passed to the macro at the call site. - Interpolated(Arc), - /// A doc comment token. /// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc) /// similarly to symbols in string literal tokens. @@ -489,20 +506,7 @@ pub enum TokenKind { Eof, } -impl Clone for TokenKind { - fn clone(&self) -> Self { - // `TokenKind` would impl `Copy` if it weren't for `Interpolated`. So - // for all other variants, this implementation of `clone` is just like - // a copy. This is faster than the `derive(Clone)` version which has a - // separate path for every variant. - match self { - Interpolated(nt) => Interpolated(Arc::clone(nt)), - _ => unsafe { std::ptr::read(self) }, - } - } -} - -#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub struct Token { pub kind: TokenKind, pub span: Span, @@ -567,6 +571,37 @@ impl TokenKind { pub fn should_end_const_arg(&self) -> bool { matches!(self, Gt | Ge | Shr | ShrEq) } + + pub fn is_delim(&self) -> bool { + self.open_delim().is_some() || self.close_delim().is_some() + } + + pub fn open_delim(&self) -> Option { + match *self { + OpenParen => Some(Delimiter::Parenthesis), + OpenBrace => Some(Delimiter::Brace), + OpenBracket => Some(Delimiter::Bracket), + OpenInvisible(origin) => Some(Delimiter::Invisible(origin)), + _ => None, + } + } + + pub fn close_delim(&self) -> Option { + match *self { + CloseParen => Some(Delimiter::Parenthesis), + CloseBrace => Some(Delimiter::Brace), + CloseBracket => Some(Delimiter::Bracket), + CloseInvisible(origin) => Some(Delimiter::Invisible(origin)), + _ => None, + } + } + + pub fn is_close_delim_or_eof(&self) -> bool { + match self { + CloseParen | CloseBrace | CloseBracket | CloseInvisible(_) | Eof => true, + _ => false, + } + } } impl Token { @@ -584,20 +619,6 @@ impl Token { Token::new(Ident(ident.name, ident.is_raw_guess().into()), ident.span) } - /// For interpolated tokens, returns a span of the fragment to which the interpolated - /// token refers. For all other tokens this is just a regular span. - /// It is particularly important to use this for identifiers and lifetimes - /// for which spans affect name resolution and edition checks. - /// Note that keywords are also identifiers, so they should use this - /// if they keep spans or perform edition checks. - pub fn uninterpolated_span(&self) -> Span { - match self.kind { - NtIdent(ident, _) | NtLifetime(ident, _) => ident.span, - Interpolated(ref nt) => nt.use_span(), - _ => self.span, - } - } - pub fn is_range_separator(&self) -> bool { [DotDot, DotDotDot, DotDotEq].contains(&self.kind) } @@ -610,8 +631,9 @@ impl Token { | DotDotDot | DotDotEq | Comma | Semi | Colon | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | SingleQuote => true, - OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..) - | NtIdent(..) | Lifetime(..) | NtLifetime(..) | Interpolated(..) | Eof => false, + OpenParen | CloseParen | OpenBrace | CloseBrace | OpenBracket | CloseBracket + | OpenInvisible(_) | CloseInvisible(_) | Literal(..) | DocComment(..) | Ident(..) + | NtIdent(..) | Lifetime(..) | NtLifetime(..) | Eof => false, } } @@ -624,11 +646,12 @@ impl Token { /// **NB**: Take care when modifying this function, since it will change /// the stable set of tokens that are allowed to match an expr nonterminal. pub fn can_begin_expr(&self) -> bool { - use Delimiter::*; match self.uninterpolate().kind { Ident(name, is_raw) => ident_can_begin_expr(name, self.span, is_raw), // value name or keyword - OpenDelim(Parenthesis | Brace | Bracket) | // tuple, array or block + OpenParen | // tuple + OpenBrace | // block + OpenBracket | // array Literal(..) | // literal Bang | // operator not Minus | // unary minus @@ -642,18 +665,12 @@ impl Token { PathSep | // global path Lifetime(..) | // labeled loop Pound => true, // expression attributes - Interpolated(ref nt) => - matches!(&**nt, - NtBlock(..) | - NtExpr(..) | - NtLiteral(..) - ), - OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( + OpenInvisible(InvisibleOrigin::MetaVar( MetaVarKind::Block | MetaVarKind::Expr { .. } | MetaVarKind::Literal | MetaVarKind::Path - ))) => true, + )) => true, _ => false, } } @@ -665,8 +682,8 @@ impl Token { match &self.uninterpolate().kind { // box, ref, mut, and other identifiers (can stricten) Ident(..) | NtIdent(..) | - OpenDelim(Delimiter::Parenthesis) | // tuple pattern - OpenDelim(Delimiter::Bracket) | // slice pattern + OpenParen | // tuple pattern + OpenBracket | // slice pattern And | // reference Minus | // negative literal AndAnd | // double reference @@ -677,19 +694,14 @@ impl Token { Lt | // path (UFCS constant) Shl => true, // path (double UFCS) Or => matches!(pat_kind, PatWithOr), // leading vert `|` or-pattern - Interpolated(nt) => - matches!(&**nt, - | NtExpr(..) - | NtLiteral(..) - ), - OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( + OpenInvisible(InvisibleOrigin::MetaVar( MetaVarKind::Expr { .. } | MetaVarKind::Literal | MetaVarKind::Meta { .. } | MetaVarKind::Pat(_) | MetaVarKind::Path | MetaVarKind::Ty { .. } - ))) => true, + )) => true, _ => false, } } @@ -699,8 +711,8 @@ impl Token { match self.uninterpolate().kind { Ident(name, is_raw) => ident_can_begin_type(name, self.span, is_raw), // type name or keyword - OpenDelim(Delimiter::Parenthesis) | // tuple - OpenDelim(Delimiter::Bracket) | // array + OpenParen | // tuple + OpenBracket | // array Bang | // never Star | // raw pointer And | // reference @@ -709,10 +721,10 @@ impl Token { Lifetime(..) | // lifetime bound in trait object Lt | Shl | // associated path PathSep => true, // global path - OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( + OpenInvisible(InvisibleOrigin::MetaVar( MetaVarKind::Ty { .. } | MetaVarKind::Path - ))) => true, + )) => true, // For anonymous structs or unions, which only appear in specific positions // (type of struct fields or union fields), we don't consider them as regular types _ => false, @@ -722,12 +734,11 @@ impl Token { /// Returns `true` if the token can appear at the start of a const param. pub fn can_begin_const_arg(&self) -> bool { match self.kind { - OpenDelim(Delimiter::Brace) | Literal(..) | Minus => true, + OpenBrace | Literal(..) | Minus => true, Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true, - Interpolated(ref nt) => matches!(&**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)), - OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( + OpenInvisible(InvisibleOrigin::MetaVar( MetaVarKind::Expr { .. } | MetaVarKind::Block | MetaVarKind::Literal, - ))) => true, + )) => true, _ => false, } } @@ -768,23 +779,13 @@ impl Token { /// /// In other words, would this token be a valid start of `parse_literal_maybe_minus`? /// - /// Keep this in sync with and `Lit::from_token`, excluding unary negation. + /// Keep this in sync with `Lit::from_token` and `Parser::eat_token_lit` + /// (excluding unary negation). pub fn can_begin_literal_maybe_minus(&self) -> bool { match self.uninterpolate().kind { Literal(..) | Minus => true, Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true, - Interpolated(ref nt) => match &**nt { - NtLiteral(_) => true, - NtExpr(e) => match &e.kind { - ast::ExprKind::Lit(_) => true, - ast::ExprKind::Unary(ast::UnOp::Neg, e) => { - matches!(&e.kind, ast::ExprKind::Lit(_)) - } - _ => false, - }, - _ => false, - }, - OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind))) => match mv_kind { + OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) => match mv_kind { MetaVarKind::Literal => true, MetaVarKind::Expr { can_begin_literal_maybe_minus, .. } => { can_begin_literal_maybe_minus @@ -798,15 +799,7 @@ impl Token { pub fn can_begin_string_literal(&self) -> bool { match self.uninterpolate().kind { Literal(..) => true, - Interpolated(ref nt) => match &**nt { - NtLiteral(_) => true, - NtExpr(e) => match &e.kind { - ast::ExprKind::Lit(_) => true, - _ => false, - }, - _ => false, - }, - OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind))) => match mv_kind { + OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) => match mv_kind { MetaVarKind::Literal => true, MetaVarKind::Expr { can_begin_string_literal, .. } => can_begin_string_literal, _ => false, @@ -869,26 +862,21 @@ impl Token { /// Is this a pre-parsed expression dropped into the token stream /// (which happens while parsing the result of macro expansion)? - pub fn is_whole_expr(&self) -> bool { - #[allow(irrefutable_let_patterns)] // FIXME: temporary - if let Interpolated(nt) = &self.kind - && let NtExpr(_) | NtLiteral(_) | NtBlock(_) = &**nt - { - true - } else { - matches!(self.is_metavar_seq(), Some(MetaVarKind::Path)) - } + pub fn is_metavar_expr(&self) -> bool { + matches!( + self.is_metavar_seq(), + Some( + MetaVarKind::Expr { .. } + | MetaVarKind::Literal + | MetaVarKind::Path + | MetaVarKind::Block + ) + ) } - /// Is the token an interpolated block (`$b:block`)? - pub fn is_whole_block(&self) -> bool { - if let Interpolated(nt) = &self.kind - && let NtBlock(..) = &**nt - { - return true; - } - - false + /// Are we at a block from a metavar (`$b:block`)? + pub fn is_metavar_block(&self) -> bool { + matches!(self.is_metavar_seq(), Some(MetaVarKind::Block)) } /// Returns `true` if the token is either the `mut` or `const` keyword. @@ -978,7 +966,7 @@ impl Token { /// from an expanded metavar? pub fn is_metavar_seq(&self) -> Option { match self.kind { - OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => Some(kind), + OpenInvisible(InvisibleOrigin::MetaVar(kind)) => Some(kind), _ => None, } } @@ -1056,8 +1044,9 @@ impl Token { Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | PlusEq | MinusEq | StarEq | SlashEq | PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | DotDotDot | DotDotEq | Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question - | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | NtIdent(..) - | Lifetime(..) | NtLifetime(..) | Interpolated(..) | DocComment(..) | Eof, + | OpenParen | CloseParen | OpenBrace | CloseBrace | OpenBracket | CloseBracket + | OpenInvisible(_) | CloseInvisible(_) | Literal(..) | Ident(..) | NtIdent(..) + | Lifetime(..) | NtLifetime(..) | DocComment(..) | Eof, _, ) => { return None; @@ -1096,14 +1085,6 @@ pub enum NtExprKind { Expr2021 { inferred: bool }, } -#[derive(Clone, Encodable, Decodable)] -/// For interpolation during macro expansion. -pub enum Nonterminal { - NtBlock(P), - NtExpr(P), - NtLiteral(P), -} - #[derive(Debug, Copy, Clone, PartialEq, Eq, Encodable, Decodable, Hash, HashStable_Generic)] pub enum NonterminalKind { Item, @@ -1187,52 +1168,6 @@ impl fmt::Display for NonterminalKind { } } -impl Nonterminal { - pub fn use_span(&self) -> Span { - match self { - NtBlock(block) => block.span, - NtExpr(expr) | NtLiteral(expr) => expr.span, - } - } - - pub fn descr(&self) -> &'static str { - match self { - NtBlock(..) => "block", - NtExpr(..) => "expression", - NtLiteral(..) => "literal", - } - } -} - -impl PartialEq for Nonterminal { - fn eq(&self, _rhs: &Self) -> bool { - // FIXME: Assume that all nonterminals are not equal, we can't compare them - // correctly based on data from AST. This will prevent them from matching each other - // in macros. The comparison will become possible only when each nonterminal has an - // attached token stream from which it was parsed. - false - } -} - -impl fmt::Debug for Nonterminal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - NtBlock(..) => f.pad("NtBlock(..)"), - NtExpr(..) => f.pad("NtExpr(..)"), - NtLiteral(..) => f.pad("NtLiteral(..)"), - } - } -} - -impl HashStable for Nonterminal -where - CTX: crate::HashStableContext, -{ - fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) { - panic!("interpolated tokens should not be present in the HIR") - } -} - // Some types are used a lot. Make sure they don't unintentionally get bigger. #[cfg(target_pointer_width = "64")] mod size_asserts { @@ -1242,7 +1177,6 @@ mod size_asserts { // tidy-alphabetical-start static_assert_size!(Lit, 12); static_assert_size!(LitKind, 2); - static_assert_size!(Nonterminal, 16); static_assert_size!(Token, 24); static_assert_size!(TokenKind, 16); // tidy-alphabetical-end diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index bdd244be6d1cc..636c26bcde04b 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -14,18 +14,20 @@ //! ownership of the original. use std::borrow::Cow; +use std::ops::Range; use std::sync::Arc; -use std::{cmp, fmt, iter}; +use std::{cmp, fmt, iter, mem}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_serialize::{Decodable, Encodable}; use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym}; +use thin_vec::ThinVec; use crate::ast::AttrStyle; use crate::ast_traits::{HasAttrs, HasTokens}; -use crate::token::{self, Delimiter, InvisibleOrigin, Nonterminal, Token, TokenKind}; +use crate::token::{self, Delimiter, Token, TokenKind}; use crate::{AttrVec, Attribute}; /// Part of a `TokenStream`. @@ -106,25 +108,30 @@ where } } -pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync { - fn to_attr_token_stream(&self) -> AttrTokenStream; -} - -impl ToAttrTokenStream for AttrTokenStream { - fn to_attr_token_stream(&self) -> AttrTokenStream { - self.clone() - } -} - -/// A lazy version of [`TokenStream`], which defers creation -/// of an actual `TokenStream` until it is needed. -/// `Box` is here only to reduce the structure size. +/// A lazy version of [`AttrTokenStream`], which defers creation of an actual +/// `AttrTokenStream` until it is needed. #[derive(Clone)] -pub struct LazyAttrTokenStream(Arc>); +pub struct LazyAttrTokenStream(Arc); impl LazyAttrTokenStream { - pub fn new(inner: impl ToAttrTokenStream + 'static) -> LazyAttrTokenStream { - LazyAttrTokenStream(Arc::new(Box::new(inner))) + pub fn new_direct(stream: AttrTokenStream) -> LazyAttrTokenStream { + LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Direct(stream))) + } + + pub fn new_pending( + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + num_calls: u32, + break_last_token: u32, + node_replacements: ThinVec, + ) -> LazyAttrTokenStream { + LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Pending { + start_token, + cursor_snapshot, + num_calls, + break_last_token, + node_replacements, + })) } pub fn to_attr_token_stream(&self) -> AttrTokenStream { @@ -156,6 +163,184 @@ impl HashStable for LazyAttrTokenStream { } } +/// A token range within a `Parser`'s full token stream. +#[derive(Clone, Debug)] +pub struct ParserRange(pub Range); + +/// A token range within an individual AST node's (lazy) token stream, i.e. +/// relative to that node's first token. Distinct from `ParserRange` so the two +/// kinds of range can't be mixed up. +#[derive(Clone, Debug)] +pub struct NodeRange(pub Range); + +/// Indicates a range of tokens that should be replaced by an `AttrsTarget` +/// (replacement) or be replaced by nothing (deletion). This is used in two +/// places during token collection. +/// +/// 1. Replacement. During the parsing of an AST node that may have a +/// `#[derive]` attribute, when we parse a nested AST node that has `#[cfg]` +/// or `#[cfg_attr]`, we replace the entire inner AST node with +/// `FlatToken::AttrsTarget`. This lets us perform eager cfg-expansion on an +/// `AttrTokenStream`. +/// +/// 2. Deletion. We delete inner attributes from all collected token streams, +/// and instead track them through the `attrs` field on the AST node. This +/// lets us manipulate them similarly to outer attributes. When we create a +/// `TokenStream`, the inner attributes are inserted into the proper place +/// in the token stream. +/// +/// Each replacement starts off in `ParserReplacement` form but is converted to +/// `NodeReplacement` form when it is attached to a single AST node, via +/// `LazyAttrTokenStreamImpl`. +pub type ParserReplacement = (ParserRange, Option); + +/// See the comment on `ParserReplacement`. +pub type NodeReplacement = (NodeRange, Option); + +impl NodeRange { + // Converts a range within a parser's tokens to a range within a + // node's tokens beginning at `start_pos`. + // + // For example, imagine a parser with 50 tokens in its token stream, a + // function that spans `ParserRange(20..40)` and an inner attribute within + // that function that spans `ParserRange(30..35)`. We would find the inner + // attribute's range within the function's tokens by subtracting 20, which + // is the position of the function's start token. This gives + // `NodeRange(10..15)`. + pub fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange { + assert!(!parser_range.is_empty()); + assert!(parser_range.start >= start_pos); + NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos)) + } +} + +enum LazyAttrTokenStreamInner { + // The token stream has already been produced. + Direct(AttrTokenStream), + + // From a value of this type we can reconstruct the `TokenStream` seen by + // the `f` callback passed to a call to `Parser::collect_tokens`, by + // replaying the getting of the tokens. This saves us producing a + // `TokenStream` if it is never needed, e.g. a captured `macro_rules!` + // argument that is never passed to a proc macro. In practice, token stream + // creation happens rarely compared to calls to `collect_tokens` (see some + // statistics in #78736) so we are doing as little up-front work as + // possible. + // + // This also makes `Parser` very cheap to clone, since there is no + // intermediate collection buffer to clone. + Pending { + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + num_calls: u32, + break_last_token: u32, + node_replacements: ThinVec, + }, +} + +impl LazyAttrTokenStreamInner { + fn to_attr_token_stream(&self) -> AttrTokenStream { + match self { + LazyAttrTokenStreamInner::Direct(stream) => stream.clone(), + LazyAttrTokenStreamInner::Pending { + start_token, + cursor_snapshot, + num_calls, + break_last_token, + node_replacements, + } => { + // The token produced by the final call to `{,inlined_}next` was not + // actually consumed by the callback. The combination of chaining the + // initial token and using `take` produces the desired result - we + // produce an empty `TokenStream` if no calls were made, and omit the + // final token otherwise. + let mut cursor_snapshot = cursor_snapshot.clone(); + let tokens = iter::once(FlatToken::Token(*start_token)) + .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) + .take(*num_calls as usize); + + if node_replacements.is_empty() { + make_attr_token_stream(tokens, *break_last_token) + } else { + let mut tokens: Vec<_> = tokens.collect(); + let mut node_replacements = node_replacements.to_vec(); + node_replacements.sort_by_key(|(range, _)| range.0.start); + + #[cfg(debug_assertions)] + for [(node_range, tokens), (next_node_range, next_tokens)] in + node_replacements.array_windows() + { + assert!( + node_range.0.end <= next_node_range.0.start + || node_range.0.end >= next_node_range.0.end, + "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})", + node_range, + tokens, + next_node_range, + next_tokens, + ); + } + + // Process the replace ranges, starting from the highest start + // position and working our way back. If have tokens like: + // + // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` + // + // Then we will generate replace ranges for both + // the `#[cfg(FALSE)] field: bool` and the entire + // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` + // + // By starting processing from the replace range with the greatest + // start position, we ensure that any (outer) replace range which + // encloses another (inner) replace range will fully overwrite the + // inner range's replacement. + for (node_range, target) in node_replacements.into_iter().rev() { + assert!( + !node_range.0.is_empty(), + "Cannot replace an empty node range: {:?}", + node_range.0 + ); + + // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, + // plus enough `FlatToken::Empty`s to fill up the rest of the range. This + // keeps the total length of `tokens` constant throughout the replacement + // process, allowing us to do all replacements without adjusting indices. + let target_len = target.is_some() as usize; + tokens.splice( + (node_range.0.start as usize)..(node_range.0.end as usize), + target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain( + iter::repeat(FlatToken::Empty) + .take(node_range.0.len() - target_len), + ), + ); + } + make_attr_token_stream(tokens.into_iter(), *break_last_token) + } + } + } + } +} + +/// A helper struct used when building an `AttrTokenStream` from +/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens +/// are stored as `FlatToken::Token`. A vector of `FlatToken`s +/// is then 'parsed' to build up an `AttrTokenStream` with nested +/// `AttrTokenTree::Delimited` tokens. +#[derive(Debug, Clone)] +enum FlatToken { + /// A token - this holds both delimiter (e.g. '{' and '}') + /// and non-delimiter tokens + Token((Token, Spacing)), + /// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted + /// directly into the constructed `AttrTokenStream` as an + /// `AttrTokenTree::AttrsTarget`. + AttrsTarget(AttrsTarget), + /// A special 'empty' token that is ignored during the conversion + /// to an `AttrTokenStream`. This is used to simplify the + /// handling of replace ranges. + Empty, +} + /// An `AttrTokenStream` is similar to a `TokenStream`, but with extra /// information about the tokens for attribute targets. This is used /// during expansion to perform early cfg-expansion, and to process attributes @@ -163,6 +348,71 @@ impl HashStable for LazyAttrTokenStream { #[derive(Clone, Debug, Default, Encodable, Decodable)] pub struct AttrTokenStream(pub Arc>); +/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an +/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and +/// close delims. +fn make_attr_token_stream( + iter: impl Iterator, + break_last_token: u32, +) -> AttrTokenStream { + #[derive(Debug)] + struct FrameData { + // This is `None` for the first frame, `Some` for all others. + open_delim_sp: Option<(Delimiter, Span, Spacing)>, + inner: Vec, + } + // The stack always has at least one element. Storing it separately makes for shorter code. + let mut stack_top = FrameData { open_delim_sp: None, inner: vec![] }; + let mut stack_rest = vec![]; + for flat_token in iter { + match flat_token { + FlatToken::Token((token @ Token { kind, span }, spacing)) => { + if let Some(delim) = kind.open_delim() { + stack_rest.push(mem::replace( + &mut stack_top, + FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] }, + )); + } else if let Some(delim) = kind.close_delim() { + let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap()); + let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap(); + assert!( + open_delim.eq_ignoring_invisible_origin(&delim), + "Mismatched open/close delims: open={open_delim:?} close={span:?}" + ); + let dspan = DelimSpan::from_pair(open_sp, span); + let dspacing = DelimSpacing::new(open_spacing, spacing); + let stream = AttrTokenStream::new(frame_data.inner); + let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream); + stack_top.inner.push(delimited); + } else { + stack_top.inner.push(AttrTokenTree::Token(token, spacing)) + } + } + FlatToken::AttrsTarget(target) => { + stack_top.inner.push(AttrTokenTree::AttrsTarget(target)) + } + FlatToken::Empty => {} + } + } + + if break_last_token > 0 { + let last_token = stack_top.inner.pop().unwrap(); + if let AttrTokenTree::Token(last_token, spacing) = last_token { + let (unglued, _) = last_token.kind.break_two_token_op(break_last_token).unwrap(); + + // Tokens are always ASCII chars, so we can use byte arithmetic here. + let mut first_span = last_token.span.shrink_to_lo(); + first_span = + first_span.with_hi(first_span.lo() + rustc_span::BytePos(break_last_token)); + + stack_top.inner.push(AttrTokenTree::Token(Token::new(unglued, first_span), spacing)); + } else { + panic!("Unexpected last token {last_token:?}") + } + } + AttrTokenStream::new(stack_top.inner) +} + /// Like `TokenTree`, but for `AttrTokenStream`. #[derive(Clone, Debug, Encodable, Decodable)] pub enum AttrTokenTree { @@ -233,35 +483,52 @@ fn attrs_and_tokens_to_token_trees( // Insert inner attribute tokens. if !inner_attrs.is_empty() { - let mut found = false; - // Check the last two trees (to account for a trailing semi) - for tree in res.iter_mut().rev().take(2) { - if let TokenTree::Delimited(span, spacing, delim, delim_tokens) = tree { - // Inner attributes are only supported on extern blocks, functions, - // impls, and modules. All of these have their inner attributes - // placed at the beginning of the rightmost outermost braced group: - // e.g. fn foo() { #![my_attr] } - // - // Therefore, we can insert them back into the right location - // without needing to do any extra position tracking. - // - // Note: Outline modules are an exception - they can - // have attributes like `#![my_attr]` at the start of a file. - // Support for custom attributes in this position is not - // properly implemented - we always synthesize fake tokens, - // so we never reach this code. + let found = insert_inner_attrs(inner_attrs, res); + assert!(found, "Failed to find trailing delimited group in: {res:?}"); + } + + // Inner attributes are only supported on blocks, functions, impls, and + // modules. All of these have their inner attributes placed at the + // beginning of the rightmost outermost braced group: + // e.g. `fn foo() { #![my_attr] }`. (Note: the braces may be within + // invisible delimiters.) + // + // Therefore, we can insert them back into the right location without + // needing to do any extra position tracking. + // + // Note: Outline modules are an exception - they can have attributes like + // `#![my_attr]` at the start of a file. Support for custom attributes in + // this position is not properly implemented - we always synthesize fake + // tokens, so we never reach this code. + fn insert_inner_attrs(inner_attrs: &[Attribute], tts: &mut Vec) -> bool { + for tree in tts.iter_mut().rev() { + if let TokenTree::Delimited(span, spacing, Delimiter::Brace, stream) = tree { + // Found it: the rightmost, outermost braced group. let mut tts = vec![]; for inner_attr in inner_attrs { tts.extend(inner_attr.token_trees()); } - tts.extend(delim_tokens.0.iter().cloned()); + tts.extend(stream.0.iter().cloned()); let stream = TokenStream::new(tts); - *tree = TokenTree::Delimited(*span, *spacing, *delim, stream); - found = true; - break; + *tree = TokenTree::Delimited(*span, *spacing, Delimiter::Brace, stream); + return true; + } else if let TokenTree::Delimited(span, spacing, Delimiter::Invisible(src), stream) = + tree + { + // Recurse inside invisible delimiters. + let mut vec: Vec<_> = stream.iter().cloned().collect(); + if insert_inner_attrs(inner_attrs, &mut vec) { + *tree = TokenTree::Delimited( + *span, + *spacing, + Delimiter::Invisible(*src), + TokenStream::new(vec), + ); + return true; + } } } - assert!(found, "Failed to find trailing delimited group in: {res:?}"); + false } } @@ -288,11 +555,6 @@ pub struct AttrsTarget { } /// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s. -/// -/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s -/// instead of a representation of the abstract syntax tree. -/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for -/// backwards compatibility. #[derive(Clone, Debug, Default, Encodable, Decodable)] pub struct TokenStream(pub(crate) Arc>); @@ -459,62 +721,6 @@ impl TokenStream { TokenStream::new(tts) } - pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream { - match nt { - Nonterminal::NtBlock(block) => TokenStream::from_ast(block), - Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => TokenStream::from_ast(expr), - } - } - - fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree { - match token.kind { - token::NtIdent(ident, is_raw) => { - TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing) - } - token::NtLifetime(ident, is_raw) => TokenTree::Delimited( - DelimSpan::from_single(token.span), - DelimSpacing::new(Spacing::JointHidden, spacing), - Delimiter::Invisible(InvisibleOrigin::FlattenToken), - TokenStream::token_alone(token::Lifetime(ident.name, is_raw), ident.span), - ), - token::Interpolated(ref nt) => TokenTree::Delimited( - DelimSpan::from_single(token.span), - DelimSpacing::new(Spacing::JointHidden, spacing), - Delimiter::Invisible(InvisibleOrigin::FlattenToken), - TokenStream::from_nonterminal_ast(&nt).flattened(), - ), - _ => TokenTree::Token(token.clone(), spacing), - } - } - - fn flatten_token_tree(tree: &TokenTree) -> TokenTree { - match tree { - TokenTree::Token(token, spacing) => TokenStream::flatten_token(token, *spacing), - TokenTree::Delimited(span, spacing, delim, tts) => { - TokenTree::Delimited(*span, *spacing, *delim, tts.flattened()) - } - } - } - - #[must_use] - pub fn flattened(&self) -> TokenStream { - fn can_skip(stream: &TokenStream) -> bool { - stream.iter().all(|tree| match tree { - TokenTree::Token(token, _) => !matches!( - token.kind, - token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..) - ), - TokenTree::Delimited(.., inner) => can_skip(inner), - }) - } - - if can_skip(self) { - return self.clone(); - } - - self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect() - } - // If `vec` is not empty, try to glue `tt` onto its last token. The return // value indicates if gluing took place. fn try_glue_to_last(vec: &mut Vec, tt: &TokenTree) -> bool { @@ -685,6 +891,104 @@ impl<'t> Iterator for TokenStreamIter<'t> { } } +#[derive(Clone, Debug)] +pub struct TokenTreeCursor { + stream: TokenStream, + /// Points to the current token tree in the stream. In `TokenCursor::curr`, + /// this can be any token tree. In `TokenCursor::stack`, this is always a + /// `TokenTree::Delimited`. + index: usize, +} + +impl TokenTreeCursor { + #[inline] + pub fn new(stream: TokenStream) -> Self { + TokenTreeCursor { stream, index: 0 } + } + + #[inline] + pub fn curr(&self) -> Option<&TokenTree> { + self.stream.get(self.index) + } + + pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> { + self.stream.get(self.index + n) + } + + #[inline] + pub fn bump(&mut self) { + self.index += 1; + } +} + +/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that +/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b) +/// use this type to emit them as a linear sequence. But a linear sequence is +/// what the parser expects, for the most part. +#[derive(Clone, Debug)] +pub struct TokenCursor { + // Cursor for the current (innermost) token stream. The index within the + // cursor can point to any token tree in the stream (or one past the end). + // The delimiters for this token stream are found in `self.stack.last()`; + // if that is `None` we are in the outermost token stream which never has + // delimiters. + pub curr: TokenTreeCursor, + + // Token streams surrounding the current one. The index within each cursor + // always points to a `TokenTree::Delimited`. + pub stack: Vec, +} + +impl TokenCursor { + pub fn next(&mut self) -> (Token, Spacing) { + self.inlined_next() + } + + /// This always-inlined version should only be used on hot code paths. + #[inline(always)] + pub fn inlined_next(&mut self) -> (Token, Spacing) { + loop { + // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix + // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions + // below can be removed. + if let Some(tree) = self.curr.curr() { + match tree { + &TokenTree::Token(token, spacing) => { + debug_assert!(!token.kind.is_delim()); + let res = (token, spacing); + self.curr.bump(); + return res; + } + &TokenTree::Delimited(sp, spacing, delim, ref tts) => { + let trees = TokenTreeCursor::new(tts.clone()); + self.stack.push(mem::replace(&mut self.curr, trees)); + if !delim.skip() { + return (Token::new(delim.as_open_token_kind(), sp.open), spacing.open); + } + // No open delimiter to return; continue on to the next iteration. + } + }; + } else if let Some(parent) = self.stack.pop() { + // We have exhausted this token stream. Move back to its parent token stream. + let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else { + panic!("parent should be Delimited") + }; + self.curr = parent; + self.curr.bump(); // move past the `Delimited` + if !delim.skip() { + return (Token::new(delim.as_close_token_kind(), span.close), spacing.close); + } + // No close delimiter to return; continue on to the next iteration. + } else { + // We have exhausted the outermost token stream. The use of + // `Spacing::Alone` is arbitrary and immaterial, because the + // `Eof` token's spacing is never used. + return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone); + } + } + } +} + #[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)] pub struct DelimSpan { pub open: Span, @@ -731,6 +1035,7 @@ mod size_asserts { static_assert_size!(AttrTokenStream, 8); static_assert_size!(AttrTokenTree, 32); static_assert_size!(LazyAttrTokenStream, 8); + static_assert_size!(LazyAttrTokenStreamInner, 88); static_assert_size!(Option, 8); // must be small, used in many AST nodes static_assert_size!(TokenStream, 8); static_assert_size!(TokenTree, 32); diff --git a/compiler/rustc_ast/src/util/literal.rs b/compiler/rustc_ast/src/util/literal.rs index 6896ac723fa58..b8526cf9d9529 100644 --- a/compiler/rustc_ast/src/util/literal.rs +++ b/compiler/rustc_ast/src/util/literal.rs @@ -2,7 +2,7 @@ use std::{ascii, fmt, str}; -use rustc_lexer::unescape::{ +use rustc_literal_escaper::{ MixedUnit, Mode, byte_from_char, unescape_byte, unescape_char, unescape_mixed, unescape_unicode, }; use rustc_span::{Span, Symbol, kw, sym}; diff --git a/compiler/rustc_ast/src/util/parser.rs b/compiler/rustc_ast/src/util/parser.rs index 98b1fc52ed747..1e5f414fae1c7 100644 --- a/compiler/rustc_ast/src/util/parser.rs +++ b/compiler/rustc_ast/src/util/parser.rs @@ -1,6 +1,6 @@ use rustc_span::kw; -use crate::ast::{self, BinOpKind, RangeLimits}; +use crate::ast::{self, AssignOpKind, BinOpKind, RangeLimits}; use crate::token::{self, Token}; /// Associative operator. @@ -9,7 +9,7 @@ pub enum AssocOp { /// A binary op. Binary(BinOpKind), /// `?=` where ? is one of the assignable BinOps - AssignOp(BinOpKind), + AssignOp(AssignOpKind), /// `=` Assign, /// `as` @@ -44,16 +44,16 @@ impl AssocOp { token::Or => Some(Binary(BinOpKind::BitOr)), token::Shl => Some(Binary(BinOpKind::Shl)), token::Shr => Some(Binary(BinOpKind::Shr)), - token::PlusEq => Some(AssignOp(BinOpKind::Add)), - token::MinusEq => Some(AssignOp(BinOpKind::Sub)), - token::StarEq => Some(AssignOp(BinOpKind::Mul)), - token::SlashEq => Some(AssignOp(BinOpKind::Div)), - token::PercentEq => Some(AssignOp(BinOpKind::Rem)), - token::CaretEq => Some(AssignOp(BinOpKind::BitXor)), - token::AndEq => Some(AssignOp(BinOpKind::BitAnd)), - token::OrEq => Some(AssignOp(BinOpKind::BitOr)), - token::ShlEq => Some(AssignOp(BinOpKind::Shl)), - token::ShrEq => Some(AssignOp(BinOpKind::Shr)), + token::PlusEq => Some(AssignOp(AssignOpKind::AddAssign)), + token::MinusEq => Some(AssignOp(AssignOpKind::SubAssign)), + token::StarEq => Some(AssignOp(AssignOpKind::MulAssign)), + token::SlashEq => Some(AssignOp(AssignOpKind::DivAssign)), + token::PercentEq => Some(AssignOp(AssignOpKind::RemAssign)), + token::CaretEq => Some(AssignOp(AssignOpKind::BitXorAssign)), + token::AndEq => Some(AssignOp(AssignOpKind::BitAndAssign)), + token::OrEq => Some(AssignOp(AssignOpKind::BitOrAssign)), + token::ShlEq => Some(AssignOp(AssignOpKind::ShlAssign)), + token::ShrEq => Some(AssignOp(AssignOpKind::ShrAssign)), token::Lt => Some(Binary(BinOpKind::Lt)), token::Le => Some(Binary(BinOpKind::Le)), token::Ge => Some(Binary(BinOpKind::Ge)), diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs index 2716601ca4f9d..f1aa7b5533a90 100644 --- a/compiler/rustc_ast/src/visit.rs +++ b/compiler/rustc_ast/src/visit.rs @@ -47,7 +47,6 @@ pub enum BoundKind { /// Trait bounds in trait object type. /// E.g., `dyn Bound1 + Bound2 + Bound3`. TraitObject, - /// Super traits of a trait. /// E.g., `trait A: B` SuperTraits, @@ -66,7 +65,7 @@ impl BoundKind { #[derive(Copy, Clone, Debug)] pub enum FnKind<'a> { /// E.g., `fn foo()`, `fn foo(&self)`, or `extern "Abi" fn foo()`. - Fn(FnCtxt, &'a Ident, &'a Visibility, &'a Fn), + Fn(FnCtxt, &'a Visibility, &'a Fn), /// E.g., `|x, y| body`. Closure(&'a ClosureBinder, &'a Option, &'a FnDecl, &'a Expr), @@ -75,21 +74,21 @@ pub enum FnKind<'a> { impl<'a> FnKind<'a> { pub fn header(&self) -> Option<&'a FnHeader> { match *self { - FnKind::Fn(_, _, _, Fn { sig, .. }) => Some(&sig.header), + FnKind::Fn(_, _, Fn { sig, .. }) => Some(&sig.header), FnKind::Closure(..) => None, } } pub fn ident(&self) -> Option<&Ident> { match self { - FnKind::Fn(_, ident, ..) => Some(ident), + FnKind::Fn(_, _, Fn { ident, .. }) => Some(ident), _ => None, } } pub fn decl(&self) -> &'a FnDecl { match self { - FnKind::Fn(_, _, _, Fn { sig, .. }) => &sig.decl, + FnKind::Fn(_, _, Fn { sig, .. }) => &sig.decl, FnKind::Closure(_, _, decl, _) => decl, } } @@ -118,7 +117,6 @@ pub trait WalkItemKind { &'a self, span: Span, id: NodeId, - ident: &'a Ident, visibility: &'a Visibility, ctxt: Self::Ctxt, visitor: &mut V, @@ -324,7 +322,7 @@ pub fn walk_crate<'a, V: Visitor<'a>>(visitor: &mut V, krate: &'a Crate) -> V::R } pub fn walk_local<'a, V: Visitor<'a>>(visitor: &mut V, local: &'a Local) -> V::Result { - let Local { id: _, pat, ty, kind, span: _, colon_sp: _, attrs, tokens: _ } = local; + let Local { id: _, super_: _, pat, ty, kind, span: _, colon_sp: _, attrs, tokens: _ } = local; walk_list!(visitor, visit_attribute, attrs); try_visit!(visitor.visit_pat(pat)); visit_opt!(visitor, visit_ty, ty); @@ -364,63 +362,72 @@ impl WalkItemKind for ItemKind { &'a self, span: Span, id: NodeId, - ident: &'a Ident, vis: &'a Visibility, _ctxt: Self::Ctxt, visitor: &mut V, ) -> V::Result { match self { - ItemKind::ExternCrate(_rename) => {} + ItemKind::ExternCrate(_rename, ident) => try_visit!(visitor.visit_ident(ident)), ItemKind::Use(use_tree) => try_visit!(visitor.visit_use_tree(use_tree, id, false)), ItemKind::Static(box StaticItem { + ident, ty, safety: _, mutability: _, expr, define_opaque, }) => { + try_visit!(visitor.visit_ident(ident)); try_visit!(visitor.visit_ty(ty)); visit_opt!(visitor, visit_expr, expr); try_visit!(walk_define_opaques(visitor, define_opaque)); } ItemKind::Const(box ConstItem { defaultness: _, + ident, generics, ty, expr, define_opaque, }) => { + try_visit!(visitor.visit_ident(ident)); try_visit!(visitor.visit_generics(generics)); try_visit!(visitor.visit_ty(ty)); visit_opt!(visitor, visit_expr, expr); try_visit!(walk_define_opaques(visitor, define_opaque)); } ItemKind::Fn(func) => { - let kind = FnKind::Fn(FnCtxt::Free, ident, vis, &*func); + let kind = FnKind::Fn(FnCtxt::Free, vis, &*func); try_visit!(visitor.visit_fn(kind, span, id)); } - ItemKind::Mod(_unsafety, mod_kind) => match mod_kind { - ModKind::Loaded(items, _inline, _inner_span, _) => { - walk_list!(visitor, visit_item, items); + ItemKind::Mod(_unsafety, ident, mod_kind) => { + try_visit!(visitor.visit_ident(ident)); + match mod_kind { + ModKind::Loaded(items, _inline, _inner_span, _) => { + walk_list!(visitor, visit_item, items); + } + ModKind::Unloaded => {} } - ModKind::Unloaded => {} - }, + } ItemKind::ForeignMod(ForeignMod { extern_span: _, safety: _, abi: _, items }) => { walk_list!(visitor, visit_foreign_item, items); } ItemKind::GlobalAsm(asm) => try_visit!(visitor.visit_inline_asm(asm)), ItemKind::TyAlias(box TyAlias { generics, + ident, bounds, ty, defaultness: _, where_clauses: _, }) => { + try_visit!(visitor.visit_ident(ident)); try_visit!(visitor.visit_generics(generics)); walk_list!(visitor, visit_param_bound, bounds, BoundKind::Bound); visit_opt!(visitor, visit_ty, ty); } - ItemKind::Enum(enum_definition, generics) => { + ItemKind::Enum(ident, enum_definition, generics) => { + try_visit!(visitor.visit_ident(ident)); try_visit!(visitor.visit_generics(generics)); try_visit!(visitor.visit_enum_def(enum_definition)); } @@ -444,32 +451,52 @@ impl WalkItemKind for ItemKind { AssocCtxt::Impl { of_trait: of_trait.is_some() } ); } - ItemKind::Struct(struct_definition, generics) - | ItemKind::Union(struct_definition, generics) => { + ItemKind::Struct(ident, struct_definition, generics) + | ItemKind::Union(ident, struct_definition, generics) => { + try_visit!(visitor.visit_ident(ident)); try_visit!(visitor.visit_generics(generics)); try_visit!(visitor.visit_variant_data(struct_definition)); } - ItemKind::Trait(box Trait { safety: _, is_auto: _, generics, bounds, items }) => { + ItemKind::Trait(box Trait { + safety: _, + is_auto: _, + ident, + generics, + bounds, + items, + }) => { + try_visit!(visitor.visit_ident(ident)); try_visit!(visitor.visit_generics(generics)); walk_list!(visitor, visit_param_bound, bounds, BoundKind::SuperTraits); walk_list!(visitor, visit_assoc_item, items, AssocCtxt::Trait); } - ItemKind::TraitAlias(generics, bounds) => { + ItemKind::TraitAlias(ident, generics, bounds) => { + try_visit!(visitor.visit_ident(ident)); try_visit!(visitor.visit_generics(generics)); walk_list!(visitor, visit_param_bound, bounds, BoundKind::Bound); } ItemKind::MacCall(mac) => try_visit!(visitor.visit_mac_call(mac)), - ItemKind::MacroDef(ts) => try_visit!(visitor.visit_mac_def(ts, id)), + ItemKind::MacroDef(ident, ts) => { + try_visit!(visitor.visit_ident(ident)); + try_visit!(visitor.visit_mac_def(ts, id)); + if let Some(EIIMacroFor { extern_item_path, impl_unsafe: _, span: _ }) = + &ts.eii_macro_for + { + try_visit!(visitor.visit_path(extern_item_path, id)); + } + } ItemKind::Delegation(box Delegation { id, qself, path, + ident, rename, body, from_glob: _, }) => { try_visit!(visitor.visit_qself(qself)); try_visit!(visitor.visit_path(path, *id)); + try_visit!(visitor.visit_ident(ident)); visit_opt!(visitor, visit_ident, rename); visit_opt!(visitor, visit_block, body); } @@ -585,6 +612,7 @@ pub fn walk_ty_pat<'a, V: Visitor<'a>>(visitor: &mut V, tp: &'a TyPat) -> V::Res visit_opt!(visitor, visit_anon_const, start); visit_opt!(visitor, visit_anon_const, end); } + TyPatKind::Or(variants) => walk_list!(visitor, visit_ty_pat, variants), TyPatKind::Err(_) => {} } V::Result::output() @@ -727,7 +755,7 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) -> V::Res try_visit!(visitor.visit_pat(subpattern)); try_visit!(visitor.visit_expr(guard_condition)); } - PatKind::Wild | PatKind::Rest | PatKind::Never => {} + PatKind::Missing | PatKind::Wild | PatKind::Rest | PatKind::Never => {} PatKind::Err(_guar) => {} PatKind::Tuple(elems) | PatKind::Slice(elems) | PatKind::Or(elems) => { walk_list!(visitor, visit_pat, elems); @@ -743,34 +771,37 @@ impl WalkItemKind for ForeignItemKind { &'a self, span: Span, id: NodeId, - ident: &'a Ident, vis: &'a Visibility, _ctxt: Self::Ctxt, visitor: &mut V, ) -> V::Result { match self { ForeignItemKind::Static(box StaticItem { + ident, ty, mutability: _, expr, safety: _, define_opaque, }) => { + try_visit!(visitor.visit_ident(ident)); try_visit!(visitor.visit_ty(ty)); visit_opt!(visitor, visit_expr, expr); try_visit!(walk_define_opaques(visitor, define_opaque)); } ForeignItemKind::Fn(func) => { - let kind = FnKind::Fn(FnCtxt::Foreign, ident, vis, &*func); + let kind = FnKind::Fn(FnCtxt::Foreign, vis, &*func); try_visit!(visitor.visit_fn(kind, span, id)); } ForeignItemKind::TyAlias(box TyAlias { generics, + ident, bounds, ty, defaultness: _, where_clauses: _, }) => { + try_visit!(visitor.visit_ident(ident)); try_visit!(visitor.visit_generics(generics)); walk_list!(visitor, visit_param_bound, bounds, BoundKind::Bound); visit_opt!(visitor, visit_ty, ty); @@ -917,18 +948,25 @@ pub fn walk_fn<'a, V: Visitor<'a>>(visitor: &mut V, kind: FnKind<'a>) -> V::Resu match kind { FnKind::Fn( _ctxt, - _ident, _vis, Fn { defaultness: _, + ident, sig: FnSig { header, decl, span: _ }, generics, contract, body, define_opaque, + eii_impl, }, ) => { - // Identifier and visibility are visited as a part of the item. + // Visibility is visited as a part of the item. + try_visit!(visitor.visit_ident(ident)); + + for EIIImpl { node_id, eii_macro_path, .. } in eii_impl { + try_visit!(visitor.visit_path(eii_macro_path, *node_id)); + } + try_visit!(visitor.visit_fn_header(header)); try_visit!(visitor.visit_generics(generics)); try_visit!(visitor.visit_fn_decl(decl)); @@ -952,7 +990,6 @@ impl WalkItemKind for AssocItemKind { &'a self, span: Span, id: NodeId, - ident: &'a Ident, vis: &'a Visibility, ctxt: Self::Ctxt, visitor: &mut V, @@ -960,28 +997,32 @@ impl WalkItemKind for AssocItemKind { match self { AssocItemKind::Const(box ConstItem { defaultness: _, + ident, generics, ty, expr, define_opaque, }) => { + try_visit!(visitor.visit_ident(ident)); try_visit!(visitor.visit_generics(generics)); try_visit!(visitor.visit_ty(ty)); visit_opt!(visitor, visit_expr, expr); try_visit!(walk_define_opaques(visitor, define_opaque)); } AssocItemKind::Fn(func) => { - let kind = FnKind::Fn(FnCtxt::Assoc(ctxt), ident, vis, &*func); + let kind = FnKind::Fn(FnCtxt::Assoc(ctxt), vis, &*func); try_visit!(visitor.visit_fn(kind, span, id)); } AssocItemKind::Type(box TyAlias { generics, + ident, bounds, ty, defaultness: _, where_clauses: _, }) => { try_visit!(visitor.visit_generics(generics)); + try_visit!(visitor.visit_ident(ident)); walk_list!(visitor, visit_param_bound, bounds, BoundKind::Bound); visit_opt!(visitor, visit_ty, ty); } @@ -992,12 +1033,14 @@ impl WalkItemKind for AssocItemKind { id, qself, path, + ident, rename, body, from_glob: _, }) => { try_visit!(visitor.visit_qself(qself)); try_visit!(visitor.visit_path(path, *id)); + try_visit!(visitor.visit_ident(ident)); visit_opt!(visitor, visit_ident, rename); visit_opt!(visitor, visit_block, body); } @@ -1039,11 +1082,10 @@ fn walk_item_ctxt<'a, V: Visitor<'a>, K: WalkItemKind>( item: &'a Item, ctxt: K::Ctxt, ) -> V::Result { - let Item { id, span, ident, vis, attrs, kind, tokens: _ } = item; + let Item { id, span, vis, attrs, kind, tokens: _ } = item; walk_list!(visitor, visit_attribute, attrs); try_visit!(visitor.visit_vis(vis)); - try_visit!(visitor.visit_ident(ident)); - try_visit!(kind.walk(*span, *id, ident, vis, ctxt, visitor)); + try_visit!(kind.walk(*span, *id, vis, ctxt, visitor)); V::Result::output() } diff --git a/compiler/rustc_ast_lowering/src/block.rs b/compiler/rustc_ast_lowering/src/block.rs index 1d9ca6bb9c8cb..5fa801c73c916 100644 --- a/compiler/rustc_ast_lowering/src/block.rs +++ b/compiler/rustc_ast_lowering/src/block.rs @@ -95,6 +95,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { fn lower_local(&mut self, l: &Local) -> &'hir hir::LetStmt<'hir> { // Let statements are allowed to have impl trait in bindings. + let super_ = l.super_; let ty = l.ty.as_ref().map(|t| { self.lower_ty(t, self.impl_trait_in_bindings_ctxt(ImplTraitPosition::Variable)) }); @@ -108,8 +109,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { }; let span = self.lower_span(l.span); let source = hir::LocalSource::Normal; - self.lower_attrs(hir_id, &l.attrs, l.span); - self.arena.alloc(hir::LetStmt { hir_id, ty, pat, init, els, span, source }) + self.lower_attrs(hir_id, &l.attrs, l.span, &[]); + self.arena.alloc(hir::LetStmt { hir_id, super_, ty, pat, init, els, span, source }) } fn lower_block_check_mode(&mut self, b: &BlockCheckMode) -> hir::BlockCheckMode { diff --git a/compiler/rustc_ast_lowering/src/delegation.rs b/compiler/rustc_ast_lowering/src/delegation.rs index f7640c602d6fd..93c627f64c967 100644 --- a/compiler/rustc_ast_lowering/src/delegation.rs +++ b/compiler/rustc_ast_lowering/src/delegation.rs @@ -47,7 +47,7 @@ use rustc_errors::ErrorGuaranteed; use rustc_hir::def_id::DefId; use rustc_middle::span_bug; use rustc_middle::ty::{Asyncness, ResolverAstLowering}; -use rustc_span::{Ident, Span}; +use rustc_span::{Ident, Span, Symbol}; use {rustc_ast as ast, rustc_hir as hir}; use super::{GenericArgsMode, ImplTraitContext, LoweringContext, ParamMode}; @@ -56,6 +56,7 @@ use crate::{AllowReturnTypeNotation, ImplTraitPosition, ResolverAstLoweringExt}; pub(crate) struct DelegationResults<'hir> { pub body_id: hir::BodyId, pub sig: hir::FnSig<'hir>, + pub ident: Ident, pub generics: &'hir hir::Generics<'hir>, } @@ -84,7 +85,7 @@ impl<'hir> LoweringContext<'_, 'hir> { .delegation_fn_sigs .get(&local_def_id) .is_some_and(|sig| sig.has_self), - None => self.tcx.associated_item(def_id).fn_has_self_parameter, + None => self.tcx.associated_item(def_id).is_method(), }, _ => span_bug!(span, "unexpected DefKind for delegation item"), } @@ -104,9 +105,9 @@ impl<'hir> LoweringContext<'_, 'hir> { let decl = self.lower_delegation_decl(sig_id, param_count, c_variadic, span); let sig = self.lower_delegation_sig(sig_id, decl, span); let body_id = self.lower_delegation_body(delegation, param_count, span); - + let ident = self.lower_ident(delegation.ident); let generics = self.lower_delegation_generics(span); - DelegationResults { body_id, sig, generics } + DelegationResults { body_id, sig, ident, generics } } Err(err) => self.generate_delegation_error(err, span), } @@ -233,12 +234,13 @@ impl<'hir> LoweringContext<'_, 'hir> { hir::FnSig { decl, header, span } } - fn generate_param(&mut self, span: Span) -> (hir::Param<'hir>, NodeId) { + fn generate_param(&mut self, idx: usize, span: Span) -> (hir::Param<'hir>, NodeId) { let pat_node_id = self.next_node_id(); let pat_id = self.lower_node_id(pat_node_id); + let ident = Ident::with_dummy_span(Symbol::intern(&format!("arg{idx}"))); let pat = self.arena.alloc(hir::Pat { hir_id: pat_id, - kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, Ident::empty(), None), + kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, ident, None), span, default_binding_modes: false, }); @@ -246,9 +248,9 @@ impl<'hir> LoweringContext<'_, 'hir> { (hir::Param { hir_id: self.next_id(), pat, ty_span: span, span }, pat_node_id) } - fn generate_arg(&mut self, param_id: HirId, span: Span) -> hir::Expr<'hir> { + fn generate_arg(&mut self, idx: usize, param_id: HirId, span: Span) -> hir::Expr<'hir> { let segments = self.arena.alloc_from_iter(iter::once(hir::PathSegment { - ident: Ident::empty(), + ident: Ident::with_dummy_span(Symbol::intern(&format!("arg{idx}"))), hir_id: self.next_id(), res: Res::Local(param_id), args: None, @@ -272,7 +274,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let mut args: Vec> = Vec::with_capacity(param_count); for idx in 0..param_count { - let (param, pat_node_id) = this.generate_param(span); + let (param, pat_node_id) = this.generate_param(idx, span); parameters.push(param); let arg = if let Some(block) = block @@ -288,7 +290,7 @@ impl<'hir> LoweringContext<'_, 'hir> { this.ident_and_label_to_local_id.insert(pat_node_id, param.pat.hir_id.local_id); this.lower_target_expr(&block) } else { - this.generate_arg(param.pat.hir_id, span) + this.generate_arg(idx, param.pat.hir_id, span) }; args.push(arg); } @@ -405,8 +407,9 @@ impl<'hir> LoweringContext<'_, 'hir> { let header = self.generate_header_error(); let sig = hir::FnSig { decl, header, span }; + let ident = Ident::dummy(); let body_id = self.lower_body(|this| (&[], this.mk_expr(hir::ExprKind::Err(err), span))); - DelegationResults { generics, body_id, sig } + DelegationResults { ident, generics, body_id, sig } } fn generate_header_error(&self) -> hir::FnHeader { diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index 52291fdfb3029..60e310f6b4f21 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -74,14 +74,16 @@ impl<'hir> LoweringContext<'_, 'hir> { // Merge attributes into the inner expression. if !e.attrs.is_empty() { let old_attrs = self.attrs.get(&ex.hir_id.local_id).copied().unwrap_or(&[]); - self.attrs.insert( - ex.hir_id.local_id, - &*self.arena.alloc_from_iter( - self.lower_attrs_vec(&e.attrs, e.span) - .into_iter() - .chain(old_attrs.iter().cloned()), - ), + let attrs = &*self.arena.alloc_from_iter( + self.lower_attrs_vec(&e.attrs, e.span) + .into_iter() + .chain(old_attrs.iter().cloned()), ); + if attrs.is_empty() { + return ex; + } + + self.attrs.insert(ex.hir_id.local_id, attrs); } return ex; } @@ -97,7 +99,7 @@ impl<'hir> LoweringContext<'_, 'hir> { } let expr_hir_id = self.lower_node_id(e.id); - self.lower_attrs(expr_hir_id, &e.attrs, e.span); + self.lower_attrs(expr_hir_id, &e.attrs, e.span, &[]); let kind = match &e.kind { ExprKind::Array(exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)), @@ -274,7 +276,7 @@ impl<'hir> LoweringContext<'_, 'hir> { } ExprKind::Assign(el, er, span) => self.lower_expr_assign(el, er, *span, e.span), ExprKind::AssignOp(op, el, er) => hir::ExprKind::AssignOp( - self.lower_binop(*op), + self.lower_assign_op(*op), self.lower_expr(el), self.lower_expr(er), ), @@ -397,12 +399,16 @@ impl<'hir> LoweringContext<'_, 'hir> { &mut self, expr: &'hir hir::Expr<'hir>, span: Span, - check_ident: Ident, - check_hir_id: HirId, + cond_ident: Ident, + cond_hir_id: HirId, ) -> &'hir hir::Expr<'hir> { - let checker_fn = self.expr_ident(span, check_ident, check_hir_id); - let span = self.mark_span_with_reason(DesugaringKind::Contract, span, None); - self.expr_call(span, checker_fn, std::slice::from_ref(expr)) + let cond_fn = self.expr_ident(span, cond_ident, cond_hir_id); + let call_expr = self.expr_call_lang_item_fn_mut( + span, + hir::LangItem::ContractCheckEnsures, + arena_vec![self; *cond_fn, *expr], + ); + self.arena.alloc(call_expr) } pub(crate) fn lower_const_block(&mut self, c: &AnonConst) -> hir::ConstBlock { @@ -443,6 +449,10 @@ impl<'hir> LoweringContext<'_, 'hir> { Spanned { node: b.node, span: self.lower_span(b.span) } } + fn lower_assign_op(&mut self, a: AssignOp) -> AssignOp { + Spanned { node: a.node, span: self.lower_span(a.span) } + } + fn lower_legacy_const_generics( &mut self, mut f: Expr, @@ -482,9 +492,8 @@ impl<'hir> LoweringContext<'_, 'hir> { let mut generic_args = ThinVec::new(); for (idx, arg) in args.iter().cloned().enumerate() { if legacy_args_idx.contains(&idx) { - let parent_def_id = self.current_hir_id_owner.def_id; let node_id = self.next_node_id(); - self.create_def(parent_def_id, node_id, None, DefKind::AnonConst, f.span); + self.create_def(node_id, None, DefKind::AnonConst, f.span); let mut visitor = WillCreateDefIdsVisitor {}; let const_value = if let ControlFlow::Break(span) = visitor.visit_expr(&arg) { AstP(Expr { @@ -670,7 +679,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let guard = arm.guard.as_ref().map(|cond| self.lower_expr(cond)); let hir_id = self.next_id(); let span = self.lower_span(arm.span); - self.lower_attrs(hir_id, &arm.attrs, arm.span); + self.lower_attrs(hir_id, &arm.attrs, arm.span, &[]); let is_never_pattern = pat.is_never_pattern(); // We need to lower the body even if it's unneeded for never pattern in match, // ensure that we can get HirId for DefId if need (issue #137708). @@ -843,6 +852,7 @@ impl<'hir> LoweringContext<'_, 'hir> { span: unstable_span, }], span, + &[], ); } } @@ -1681,7 +1691,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> { let hir_id = self.lower_node_id(f.id); - self.lower_attrs(hir_id, &f.attrs, f.span); + self.lower_attrs(hir_id, &f.attrs, f.span, &[]); hir::ExprField { hir_id, ident: self.lower_ident(f.ident), @@ -1937,7 +1947,7 @@ impl<'hir> LoweringContext<'_, 'hir> { // // Also, add the attributes to the outer returned expr node. let expr = self.expr_drop_temps_mut(for_span, match_expr); - self.lower_attrs(expr.hir_id, &e.attrs, e.span); + self.lower_attrs(expr.hir_id, &e.attrs, e.span, &[]); expr } @@ -1994,7 +2004,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let val_ident = Ident::with_dummy_span(sym::val); let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident); let val_expr = self.expr_ident(span, val_ident, val_pat_nid); - self.lower_attrs(val_expr.hir_id, &attrs, span); + self.lower_attrs(val_expr.hir_id, &attrs, span, &[]); let continue_pat = self.pat_cf_continue(unstable_span, val_pat); self.arm(continue_pat, val_expr) }; @@ -2025,7 +2035,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let ret_expr = self.checked_return(Some(from_residual_expr)); self.arena.alloc(self.expr(try_span, ret_expr)) }; - self.lower_attrs(ret_expr.hir_id, &attrs, ret_expr.span); + self.lower_attrs(ret_expr.hir_id, &attrs, ret_expr.span, &[]); let break_pat = self.pat_cf_break(try_span, residual_local); self.arm(break_pat, ret_expr) diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs index 43bf951eddc6c..00a0f1d86b2ea 100644 --- a/compiler/rustc_ast_lowering/src/item.rs +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -2,10 +2,11 @@ use rustc_abi::ExternAbi; use rustc_ast::ptr::P; use rustc_ast::visit::AssocCtxt; use rustc_ast::*; +use rustc_attr_parsing::{AttributeKind, EIIDecl}; use rustc_errors::ErrorGuaranteed; use rustc_hir::def::{DefKind, Res}; -use rustc_hir::def_id::{CRATE_DEF_ID, LocalDefId}; -use rustc_hir::{self as hir, HirId, IsAnonInPath, PredicateOrigin}; +use rustc_hir::def_id::{CRATE_DEF_ID, DefId, LocalDefId}; +use rustc_hir::{self as hir, HirId, LifetimeSource, PredicateOrigin}; use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::ty::{ResolverAstLowering, TyCtxt}; use rustc_span::edit_distance::find_best_match_for_name; @@ -92,7 +93,7 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> { self.with_lctx(CRATE_NODE_ID, |lctx| { let module = lctx.lower_mod(&c.items, &c.spans); // FIXME(jdonszelman): is dummy span ever a problem here? - lctx.lower_attrs(hir::CRATE_HIR_ID, &c.attrs, DUMMY_SP); + lctx.lower_attrs(hir::CRATE_HIR_ID, &c.attrs, DUMMY_SP, &[]); hir::OwnerNode::Crate(module) }) } @@ -107,7 +108,6 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> { } fn lower_foreign_item(&mut self, item: &ForeignItem) { - debug_assert_ne!(item.ident.name, kw::Empty); self.with_lctx(item.id, |lctx| hir::OwnerNode::ForeignItem(lctx.lower_foreign_item(item))) } } @@ -147,11 +147,88 @@ impl<'hir> LoweringContext<'_, 'hir> { } } + fn generate_extra_attrs_for_item_kind( + &mut self, + id: NodeId, + i: &ItemKind, + ) -> Vec { + match i { + ItemKind::Fn(box Fn { eii_impl, .. }) if eii_impl.is_empty() => Vec::new(), + ItemKind::Fn(box Fn { eii_impl, .. }) => { + vec![hir::Attribute::Parsed(AttributeKind::EiiImpl( + eii_impl + .iter() + .flat_map( + |EIIImpl { + node_id, + eii_macro_path, + impl_safety, + span, + inner_span, + is_default, + }| { + self.lower_path_simple_eii(*node_id, eii_macro_path).map(|did| { + rustc_attr_parsing::EIIImpl { + eii_macro: did, + span: self.lower_span(*span), + inner_span: self.lower_span(*inner_span), + impl_marked_unsafe: self + .lower_safety(*impl_safety, hir::Safety::Safe) + .is_unsafe(), + is_default: *is_default, + } + }) + }, + ) + .collect(), + ))] + } + ItemKind::MacroDef( + _, + MacroDef { + eii_macro_for: Some(EIIMacroFor { extern_item_path, impl_unsafe, span }), + .. + }, + ) => self + .lower_path_simple_eii(id, extern_item_path) + .map(|did| { + vec![hir::Attribute::Parsed(AttributeKind::EiiMacroFor(EIIDecl { + eii_extern_item: did, + impl_unsafe: *impl_unsafe, + span: self.lower_span(*span), + }))] + }) + .unwrap_or_default(), + ItemKind::ExternCrate(..) + | ItemKind::Use(..) + | ItemKind::Static(..) + | ItemKind::Const(..) + | ItemKind::Mod(..) + | ItemKind::ForeignMod(..) + | ItemKind::GlobalAsm(..) + | ItemKind::TyAlias(..) + | ItemKind::Enum(..) + | ItemKind::Struct(..) + | ItemKind::Union(..) + | ItemKind::Trait(..) + | ItemKind::TraitAlias(..) + | ItemKind::Impl(..) + | ItemKind::MacCall(..) + | ItemKind::MacroDef(..) + | ItemKind::Delegation(..) + | ItemKind::DelegationMac(..) => Vec::new(), + } + } + fn lower_item(&mut self, i: &Item) -> &'hir hir::Item<'hir> { let vis_span = self.lower_span(i.vis.span); let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); - let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); - let kind = self.lower_item_kind(i.span, i.id, hir_id, i.ident, attrs, vis_span, &i.kind); + + let extra_hir_attributes = self.generate_extra_attrs_for_item_kind(i.id, &i.kind); + + let attrs = self.lower_attrs(hir_id, &i.attrs, i.span, &extra_hir_attributes); + let kind = self.lower_item_kind(i.span, i.id, hir_id, attrs, vis_span, &i.kind); + let item = hir::Item { owner_id: hir_id.expect_owner(), kind, @@ -166,41 +243,44 @@ impl<'hir> LoweringContext<'_, 'hir> { span: Span, id: NodeId, hir_id: hir::HirId, - ident: Ident, attrs: &'hir [hir::Attribute], vis_span: Span, i: &ItemKind, ) -> hir::ItemKind<'hir> { match i { - ItemKind::ExternCrate(orig_name) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); + ItemKind::ExternCrate(orig_name, ident) => { + let ident = self.lower_ident(*ident); hir::ItemKind::ExternCrate(*orig_name, ident) } ItemKind::Use(use_tree) => { - debug_assert_eq!(ident.name, kw::Empty); // Start with an empty prefix. let prefix = Path { segments: ThinVec::new(), span: use_tree.span, tokens: None }; self.lower_use_tree(use_tree, &prefix, id, vis_span, attrs) } ItemKind::Static(box ast::StaticItem { + ident, ty: t, safety: _, mutability: m, expr: e, define_opaque, }) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); + let ident = self.lower_ident(*ident); let (ty, body_id) = self.lower_const_item(t, span, e.as_deref(), ImplTraitPosition::StaticTy); self.lower_define_opaque(hir_id, define_opaque); hir::ItemKind::Static(ident, ty, *m, body_id) } - ItemKind::Const(box ast::ConstItem { generics, ty, expr, define_opaque, .. }) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); + ItemKind::Const(box ast::ConstItem { + ident, + generics, + ty, + expr, + define_opaque, + .. + }) => { + let ident = self.lower_ident(*ident); let (generics, (ty, body_id)) = self.lower_generics( generics, id, @@ -214,13 +294,13 @@ impl<'hir> LoweringContext<'_, 'hir> { } ItemKind::Fn(box Fn { sig: FnSig { decl, header, span: fn_sig_span }, + ident, generics, body, contract, define_opaque, .. }) => { - debug_assert_ne!(ident.name, kw::Empty); self.with_new_scopes(*fn_sig_span, |this| { // Note: we don't need to change the return type from `T` to // `impl Future` here because lower_body @@ -248,7 +328,7 @@ impl<'hir> LoweringContext<'_, 'hir> { span: this.lower_span(*fn_sig_span), }; this.lower_define_opaque(hir_id, define_opaque); - let ident = this.lower_ident(ident); + let ident = this.lower_ident(*ident); hir::ItemKind::Fn { ident, sig, @@ -258,9 +338,8 @@ impl<'hir> LoweringContext<'_, 'hir> { } }) } - ItemKind::Mod(_, mod_kind) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); + ItemKind::Mod(_, ident, mod_kind) => { + let ident = self.lower_ident(*ident); match mod_kind { ModKind::Loaded(items, _, spans, _) => { hir::ItemKind::Mod(ident, self.lower_mod(items, spans)) @@ -268,24 +347,19 @@ impl<'hir> LoweringContext<'_, 'hir> { ModKind::Unloaded => panic!("`mod` items should have been loaded by now"), } } - ItemKind::ForeignMod(fm) => { - debug_assert_eq!(ident.name, kw::Empty); - hir::ItemKind::ForeignMod { - abi: fm.abi.map_or(ExternAbi::FALLBACK, |abi| self.lower_abi(abi)), - items: self - .arena - .alloc_from_iter(fm.items.iter().map(|x| self.lower_foreign_item_ref(x))), - } - } + ItemKind::ForeignMod(fm) => hir::ItemKind::ForeignMod { + abi: fm.abi.map_or(ExternAbi::FALLBACK, |abi| self.lower_abi(abi)), + items: self + .arena + .alloc_from_iter(fm.items.iter().map(|x| self.lower_foreign_item_ref(x))), + }, ItemKind::GlobalAsm(asm) => { - debug_assert_eq!(ident.name, kw::Empty); let asm = self.lower_inline_asm(span, asm); let fake_body = self.lower_body(|this| (&[], this.expr(span, hir::ExprKind::InlineAsm(asm)))); hir::ItemKind::GlobalAsm { asm, fake_body } } - ItemKind::TyAlias(box TyAlias { generics, where_clauses, ty, .. }) => { - debug_assert_ne!(ident.name, kw::Empty); + ItemKind::TyAlias(box TyAlias { ident, generics, where_clauses, ty, .. }) => { // We lower // // type Foo = impl Trait @@ -294,7 +368,7 @@ impl<'hir> LoweringContext<'_, 'hir> { // // type Foo = Foo1 // opaque type Foo1: Trait - let ident = self.lower_ident(ident); + let ident = self.lower_ident(*ident); let mut generics = generics.clone(); add_ty_alias_where_clause(&mut generics, *where_clauses, true); let (generics, ty) = self.lower_generics( @@ -322,9 +396,8 @@ impl<'hir> LoweringContext<'_, 'hir> { ); hir::ItemKind::TyAlias(ident, ty, generics) } - ItemKind::Enum(enum_definition, generics) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); + ItemKind::Enum(ident, enum_definition, generics) => { + let ident = self.lower_ident(*ident); let (generics, variants) = self.lower_generics( generics, id, @@ -337,9 +410,8 @@ impl<'hir> LoweringContext<'_, 'hir> { ); hir::ItemKind::Enum(ident, hir::EnumDef { variants }, generics) } - ItemKind::Struct(struct_def, generics) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); + ItemKind::Struct(ident, struct_def, generics) => { + let ident = self.lower_ident(*ident); let (generics, struct_def) = self.lower_generics( generics, id, @@ -348,9 +420,8 @@ impl<'hir> LoweringContext<'_, 'hir> { ); hir::ItemKind::Struct(ident, struct_def, generics) } - ItemKind::Union(vdata, generics) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); + ItemKind::Union(ident, vdata, generics) => { + let ident = self.lower_ident(*ident); let (generics, vdata) = self.lower_generics( generics, id, @@ -369,7 +440,6 @@ impl<'hir> LoweringContext<'_, 'hir> { self_ty: ty, items: impl_items, }) => { - debug_assert_eq!(ident.name, kw::Empty); // Lower the "impl header" first. This ordering is important // for in-band lifetimes! Consider `'a` here: // @@ -435,9 +505,8 @@ impl<'hir> LoweringContext<'_, 'hir> { items: new_impl_items, })) } - ItemKind::Trait(box Trait { is_auto, safety, generics, bounds, items }) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); + ItemKind::Trait(box Trait { is_auto, safety, ident, generics, bounds, items }) => { + let ident = self.lower_ident(*ident); let (generics, (safety, items, bounds)) = self.lower_generics( generics, id, @@ -456,9 +525,8 @@ impl<'hir> LoweringContext<'_, 'hir> { ); hir::ItemKind::Trait(*is_auto, safety, ident, generics, bounds, items) } - ItemKind::TraitAlias(generics, bounds) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); + ItemKind::TraitAlias(ident, generics, bounds) => { + let ident = self.lower_ident(*ident); let (generics, bounds) = self.lower_generics( generics, id, @@ -472,9 +540,8 @@ impl<'hir> LoweringContext<'_, 'hir> { ); hir::ItemKind::TraitAlias(ident, generics, bounds) } - ItemKind::MacroDef(MacroDef { body, macro_rules }) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); + ItemKind::MacroDef(ident, MacroDef { body, macro_rules, eii_macro_for: _ }) => { + let ident = self.lower_ident(*ident); let body = P(self.lower_delim_args(body)); let def_id = self.local_def_id(id); let def_kind = self.tcx.def_kind(def_id); @@ -484,15 +551,18 @@ impl<'hir> LoweringContext<'_, 'hir> { def_kind.descr(def_id.to_def_id()) ); }; - let macro_def = self.arena.alloc(ast::MacroDef { body, macro_rules: *macro_rules }); + let macro_def = self.arena.alloc(ast::MacroDef { + body, + macro_rules: *macro_rules, + eii_macro_for: None, + }); + hir::ItemKind::Macro(ident, macro_def, macro_kind) } ItemKind::Delegation(box delegation) => { - debug_assert_ne!(ident.name, kw::Empty); - let ident = self.lower_ident(ident); let delegation_results = self.lower_delegation(delegation, id, false); hir::ItemKind::Fn { - ident, + ident: delegation_results.ident, sig: delegation_results.sig, generics: delegation_results.generics, body: delegation_results.body_id, @@ -505,6 +575,16 @@ impl<'hir> LoweringContext<'_, 'hir> { } } + fn lower_path_simple_eii(&mut self, id: NodeId, path: &Path) -> Option { + let res = self.resolver.get_partial_res(id)?; + let Some(did) = res.expect_full_res().opt_def_id() else { + self.dcx().span_delayed_bug(path.span, "should have errored in resolve"); + return None; + }; + + Some(did) + } + fn lower_const_item( &mut self, ty: &Ty, @@ -648,62 +728,62 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_foreign_item(&mut self, i: &ForeignItem) -> &'hir hir::ForeignItem<'hir> { let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); let owner_id = hir_id.expect_owner(); - let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); - let item = hir::ForeignItem { - owner_id, - ident: self.lower_ident(i.ident), - kind: match &i.kind { - ForeignItemKind::Fn(box Fn { sig, generics, define_opaque, .. }) => { - let fdec = &sig.decl; - let itctx = ImplTraitContext::Universal; - let (generics, (decl, fn_args)) = - self.lower_generics(generics, i.id, itctx, |this| { - ( - // Disallow `impl Trait` in foreign items. - this.lower_fn_decl( - fdec, - i.id, - sig.span, - FnDeclKind::ExternFn, - None, - ), - this.lower_fn_params_to_names(fdec), - ) - }); + let attrs = self.lower_attrs(hir_id, &i.attrs, i.span, &[]); + let (ident, kind) = match &i.kind { + ForeignItemKind::Fn(box Fn { sig, ident, generics, define_opaque, .. }) => { + let fdec = &sig.decl; + let itctx = ImplTraitContext::Universal; + let (generics, (decl, fn_args)) = + self.lower_generics(generics, i.id, itctx, |this| { + ( + // Disallow `impl Trait` in foreign items. + this.lower_fn_decl(fdec, i.id, sig.span, FnDeclKind::ExternFn, None), + this.lower_fn_params_to_idents(fdec), + ) + }); - // Unmarked safety in unsafe block defaults to unsafe. - let header = self.lower_fn_header(sig.header, hir::Safety::Unsafe, attrs); + // Unmarked safety in unsafe block defaults to unsafe. + let header = self.lower_fn_header(sig.header, hir::Safety::Unsafe, attrs); - if define_opaque.is_some() { - self.dcx().span_err(i.span, "foreign functions cannot define opaque types"); - } + if define_opaque.is_some() { + self.dcx().span_err(i.span, "foreign functions cannot define opaque types"); + } + ( + ident, hir::ForeignItemKind::Fn( hir::FnSig { header, decl, span: self.lower_span(sig.span) }, fn_args, generics, - ) + ), + ) + } + ForeignItemKind::Static(box StaticItem { + ident, + ty, + mutability, + expr: _, + safety, + define_opaque, + }) => { + let ty = + self.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::StaticTy)); + let safety = self.lower_safety(*safety, hir::Safety::Unsafe); + if define_opaque.is_some() { + self.dcx().span_err(i.span, "foreign statics cannot define opaque types"); } - ForeignItemKind::Static(box StaticItem { - ty, - mutability, - expr: _, - safety, - define_opaque, - }) => { - let ty = self - .lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::StaticTy)); - let safety = self.lower_safety(*safety, hir::Safety::Unsafe); - - if define_opaque.is_some() { - self.dcx().span_err(i.span, "foreign statics cannot define opaque types"); - } + (ident, hir::ForeignItemKind::Static(ty, *mutability, safety)) + } + ForeignItemKind::TyAlias(box TyAlias { ident, .. }) => { + (ident, hir::ForeignItemKind::Type) + } + ForeignItemKind::MacCall(_) => panic!("macro shouldn't exist here"), + }; - hir::ForeignItemKind::Static(ty, *mutability, safety) - } - ForeignItemKind::TyAlias(..) => hir::ForeignItemKind::Type, - ForeignItemKind::MacCall(_) => panic!("macro shouldn't exist here"), - }, + let item = hir::ForeignItem { + owner_id, + ident: self.lower_ident(*ident), + kind, vis_span: self.lower_span(i.vis.span), span: self.lower_span(i.span), }; @@ -713,14 +793,16 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_foreign_item_ref(&mut self, i: &ForeignItem) -> hir::ForeignItemRef { hir::ForeignItemRef { id: hir::ForeignItemId { owner_id: self.owner_id(i.id) }, - ident: self.lower_ident(i.ident), + // `unwrap` is safe because `ForeignItemKind::MacCall` is the only foreign item kind + // without an identifier and it cannot reach here. + ident: self.lower_ident(i.kind.ident().unwrap()), span: self.lower_span(i.span), } } fn lower_variant(&mut self, v: &Variant) -> hir::Variant<'hir> { let hir_id = self.lower_node_id(v.id); - self.lower_attrs(hir_id, &v.attrs, v.span); + self.lower_attrs(hir_id, &v.attrs, v.span, &[]); hir::Variant { hir_id, def_id: self.local_def_id(v.id), @@ -782,7 +864,7 @@ impl<'hir> LoweringContext<'_, 'hir> { ) -> hir::FieldDef<'hir> { let ty = self.lower_ty(&f.ty, ImplTraitContext::Disallowed(ImplTraitPosition::FieldTy)); let hir_id = self.lower_node_id(f.id); - self.lower_attrs(hir_id, &f.attrs, f.span); + self.lower_attrs(hir_id, &f.attrs, f.span, &[]); hir::FieldDef { span: self.lower_span(f.span), hir_id, @@ -800,13 +882,19 @@ impl<'hir> LoweringContext<'_, 'hir> { } fn lower_trait_item(&mut self, i: &AssocItem) -> &'hir hir::TraitItem<'hir> { - debug_assert_ne!(i.ident.name, kw::Empty); let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); - let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); + let attrs = self.lower_attrs(hir_id, &i.attrs, i.span, &[]); let trait_item_def_id = hir_id.expect_owner(); - let (generics, kind, has_default) = match &i.kind { - AssocItemKind::Const(box ConstItem { generics, ty, expr, define_opaque, .. }) => { + let (ident, generics, kind, has_default) = match &i.kind { + AssocItemKind::Const(box ConstItem { + ident, + generics, + ty, + expr, + define_opaque, + .. + }) => { let (generics, kind) = self.lower_generics( generics, i.id, @@ -831,12 +919,14 @@ impl<'hir> LoweringContext<'_, 'hir> { } } - (generics, kind, expr.is_some()) + (*ident, generics, kind, expr.is_some()) } - AssocItemKind::Fn(box Fn { sig, generics, body: None, define_opaque, .. }) => { + AssocItemKind::Fn(box Fn { + sig, ident, generics, body: None, define_opaque, .. + }) => { // FIXME(contracts): Deny contract here since it won't apply to // any impl method or callees. - let names = self.lower_fn_params_to_names(&sig.decl); + let idents = self.lower_fn_params_to_idents(&sig.decl); let (generics, sig) = self.lower_method_sig( generics, sig, @@ -851,10 +941,16 @@ impl<'hir> LoweringContext<'_, 'hir> { "only trait methods with default bodies can define opaque types", ); } - (generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(names)), false) + ( + *ident, + generics, + hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(idents)), + false, + ) } AssocItemKind::Fn(box Fn { sig, + ident, generics, body: Some(body), contract, @@ -880,9 +976,16 @@ impl<'hir> LoweringContext<'_, 'hir> { attrs, ); self.lower_define_opaque(hir_id, &define_opaque); - (generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)), true) + ( + *ident, + generics, + hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)), + true, + ) } - AssocItemKind::Type(box TyAlias { generics, where_clauses, bounds, ty, .. }) => { + AssocItemKind::Type(box TyAlias { + ident, generics, where_clauses, bounds, ty, .. + }) => { let mut generics = generics.clone(); add_ty_alias_where_clause(&mut generics, *where_clauses, false); let (generics, kind) = self.lower_generics( @@ -905,7 +1008,7 @@ impl<'hir> LoweringContext<'_, 'hir> { ) }, ); - (generics, kind, ty.is_some()) + (*ident, generics, kind, ty.is_some()) } AssocItemKind::Delegation(box delegation) => { let delegation_results = self.lower_delegation(delegation, i.id, false); @@ -913,7 +1016,7 @@ impl<'hir> LoweringContext<'_, 'hir> { delegation_results.sig, hir::TraitFn::Provided(delegation_results.body_id), ); - (delegation_results.generics, item_kind, true) + (delegation.ident, delegation_results.generics, item_kind, true) } AssocItemKind::MacCall(..) | AssocItemKind::DelegationMac(..) => { panic!("macros should have been expanded by now") @@ -922,7 +1025,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let item = hir::TraitItem { owner_id: trait_item_def_id, - ident: self.lower_ident(i.ident), + ident: self.lower_ident(ident), generics, kind, span: self.lower_span(i.span), @@ -932,15 +1035,20 @@ impl<'hir> LoweringContext<'_, 'hir> { } fn lower_trait_item_ref(&mut self, i: &AssocItem) -> hir::TraitItemRef { - let kind = match &i.kind { - AssocItemKind::Const(..) => hir::AssocItemKind::Const, - AssocItemKind::Type(..) => hir::AssocItemKind::Type, - AssocItemKind::Fn(box Fn { sig, .. }) => { - hir::AssocItemKind::Fn { has_self: sig.decl.has_self() } + let (ident, kind) = match &i.kind { + AssocItemKind::Const(box ConstItem { ident, .. }) => { + (*ident, hir::AssocItemKind::Const) } - AssocItemKind::Delegation(box delegation) => hir::AssocItemKind::Fn { - has_self: self.delegatee_is_method(i.id, delegation.id, i.span, false), - }, + AssocItemKind::Type(box TyAlias { ident, .. }) => (*ident, hir::AssocItemKind::Type), + AssocItemKind::Fn(box Fn { ident, sig, .. }) => { + (*ident, hir::AssocItemKind::Fn { has_self: sig.decl.has_self() }) + } + AssocItemKind::Delegation(box delegation) => ( + delegation.ident, + hir::AssocItemKind::Fn { + has_self: self.delegatee_is_method(i.id, delegation.id, i.span, false), + }, + ), AssocItemKind::MacCall(..) | AssocItemKind::DelegationMac(..) => { panic!("macros should have been expanded by now") } @@ -948,7 +1056,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let id = hir::TraitItemId { owner_id: self.owner_id(i.id) }; hir::TraitItemRef { id, - ident: self.lower_ident(i.ident), + ident: self.lower_ident(ident), span: self.lower_span(i.span), kind, } @@ -964,16 +1072,23 @@ impl<'hir> LoweringContext<'_, 'hir> { i: &AssocItem, is_in_trait_impl: bool, ) -> &'hir hir::ImplItem<'hir> { - debug_assert_ne!(i.ident.name, kw::Empty); // Since `default impl` is not yet implemented, this is always true in impls. let has_value = true; let (defaultness, _) = self.lower_defaultness(i.kind.defaultness(), has_value); let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id); - let attrs = self.lower_attrs(hir_id, &i.attrs, i.span); + let attrs = self.lower_attrs(hir_id, &i.attrs, i.span, &[]); - let (generics, kind) = match &i.kind { - AssocItemKind::Const(box ConstItem { generics, ty, expr, define_opaque, .. }) => self - .lower_generics( + let (ident, (generics, kind)) = match &i.kind { + AssocItemKind::Const(box ConstItem { + ident, + generics, + ty, + expr, + define_opaque, + .. + }) => ( + *ident, + self.lower_generics( generics, i.id, ImplTraitContext::Disallowed(ImplTraitPosition::Generic), @@ -982,11 +1097,19 @@ impl<'hir> LoweringContext<'_, 'hir> { .lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::ConstTy)); let body = this.lower_const_body(i.span, expr.as_deref()); this.lower_define_opaque(hir_id, &define_opaque); - hir::ImplItemKind::Const(ty, body) }, ), - AssocItemKind::Fn(box Fn { sig, generics, body, contract, define_opaque, .. }) => { + ), + AssocItemKind::Fn(box Fn { + sig, + ident, + generics, + body, + contract, + define_opaque, + .. + }) => { let body_id = self.lower_maybe_coroutine_body( sig.span, i.span, @@ -1007,44 +1130,50 @@ impl<'hir> LoweringContext<'_, 'hir> { ); self.lower_define_opaque(hir_id, &define_opaque); - (generics, hir::ImplItemKind::Fn(sig, body_id)) + (*ident, (generics, hir::ImplItemKind::Fn(sig, body_id))) } - AssocItemKind::Type(box TyAlias { generics, where_clauses, ty, .. }) => { + AssocItemKind::Type(box TyAlias { ident, generics, where_clauses, ty, .. }) => { let mut generics = generics.clone(); add_ty_alias_where_clause(&mut generics, *where_clauses, false); - self.lower_generics( - &generics, - i.id, - ImplTraitContext::Disallowed(ImplTraitPosition::Generic), - |this| match ty { - None => { - let guar = this.dcx().span_delayed_bug( - i.span, - "expected to lower associated type, but it was missing", - ); - let ty = this.arena.alloc(this.ty(i.span, hir::TyKind::Err(guar))); - hir::ImplItemKind::Type(ty) - } - Some(ty) => { - let ty = this.lower_ty( - ty, - ImplTraitContext::OpaqueTy { - origin: hir::OpaqueTyOrigin::TyAlias { - parent: this.local_def_id(i.id), - in_assoc_ty: true, + ( + *ident, + self.lower_generics( + &generics, + i.id, + ImplTraitContext::Disallowed(ImplTraitPosition::Generic), + |this| match ty { + None => { + let guar = this.dcx().span_delayed_bug( + i.span, + "expected to lower associated type, but it was missing", + ); + let ty = this.arena.alloc(this.ty(i.span, hir::TyKind::Err(guar))); + hir::ImplItemKind::Type(ty) + } + Some(ty) => { + let ty = this.lower_ty( + ty, + ImplTraitContext::OpaqueTy { + origin: hir::OpaqueTyOrigin::TyAlias { + parent: this.local_def_id(i.id), + in_assoc_ty: true, + }, }, - }, - ); - hir::ImplItemKind::Type(ty) - } - }, + ); + hir::ImplItemKind::Type(ty) + } + }, + ), ) } AssocItemKind::Delegation(box delegation) => { let delegation_results = self.lower_delegation(delegation, i.id, is_in_trait_impl); ( - delegation_results.generics, - hir::ImplItemKind::Fn(delegation_results.sig, delegation_results.body_id), + delegation.ident, + ( + delegation_results.generics, + hir::ImplItemKind::Fn(delegation_results.sig, delegation_results.body_id), + ), ) } AssocItemKind::MacCall(..) | AssocItemKind::DelegationMac(..) => { @@ -1054,7 +1183,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let item = hir::ImplItem { owner_id: hir_id.expect_owner(), - ident: self.lower_ident(i.ident), + ident: self.lower_ident(ident), generics, kind, vis_span: self.lower_span(i.vis.span), @@ -1067,7 +1196,9 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_impl_item_ref(&mut self, i: &AssocItem, is_in_trait_impl: bool) -> hir::ImplItemRef { hir::ImplItemRef { id: hir::ImplItemId { owner_id: self.owner_id(i.id) }, - ident: self.lower_ident(i.ident), + // `unwrap` is safe because `AssocItemKind::{MacCall,DelegationMac}` are the only + // assoc item kinds without an identifier and they cannot reach here. + ident: self.lower_ident(i.kind.ident().unwrap()), span: self.lower_span(i.span), kind: match &i.kind { AssocItemKind::Const(..) => hir::AssocItemKind::Const, @@ -1138,7 +1269,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_param(&mut self, param: &Param) -> hir::Param<'hir> { let hir_id = self.lower_node_id(param.id); - self.lower_attrs(hir_id, ¶m.attrs, param.span); + self.lower_attrs(hir_id, ¶m.attrs, param.span, &[]); hir::Param { hir_id, pat: self.lower_pat(¶m.pat), @@ -1168,8 +1299,13 @@ impl<'hir> LoweringContext<'_, 'hir> { let precond = if let Some(req) = &contract.requires { // Lower the precondition check intrinsic. let lowered_req = this.lower_expr_mut(&req); + let req_span = this.mark_span_with_reason( + DesugaringKind::Contract, + lowered_req.span, + None, + ); let precond = this.expr_call_lang_item_fn_mut( - req.span, + req_span, hir::LangItem::ContractCheckRequires, &*arena_vec![this; lowered_req], ); @@ -1179,6 +1315,8 @@ impl<'hir> LoweringContext<'_, 'hir> { }; let (postcond, body) = if let Some(ens) = &contract.ensures { let ens_span = this.lower_span(ens.span); + let ens_span = + this.mark_span_with_reason(DesugaringKind::Contract, ens_span, None); // Set up the postcondition `let` statement. let check_ident: Ident = Ident::from_str_and_span("__ensures_checker", ens_span); @@ -1265,7 +1403,9 @@ impl<'hir> LoweringContext<'_, 'hir> { // create a fake body so that the entire rest of the compiler doesn't have to deal with // this as a special case. return self.lower_fn_body(decl, contract, |this| { - if attrs.iter().any(|a| a.name_or_empty() == sym::rustc_intrinsic) { + if attrs.iter().any(|a| a.has_name(sym::rustc_intrinsic)) + || this.tcx.is_sdylib_interface_build() + { let span = this.lower_span(span); let empty_block = hir::Block { hir_id: this.next_id(), @@ -1823,7 +1963,8 @@ impl<'hir> LoweringContext<'_, 'hir> { } GenericParamKind::Lifetime => { let lt_id = self.next_node_id(); - let lifetime = self.new_named_lifetime(id, lt_id, ident, IsAnonInPath::No); + let lifetime = + self.new_named_lifetime(id, lt_id, ident, LifetimeSource::Other, ident.into()); hir::WherePredicateKind::RegionPredicate(hir::WhereRegionPredicate { lifetime, bounds, @@ -1837,7 +1978,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_where_predicate(&mut self, pred: &WherePredicate) -> hir::WherePredicate<'hir> { let hir_id = self.lower_node_id(pred.id); let span = self.lower_span(pred.span); - self.lower_attrs(hir_id, &pred.attrs, span); + self.lower_attrs(hir_id, &pred.attrs, span, &[]); let kind = self.arena.alloc(match &pred.kind { WherePredicateKind::BoundPredicate(WhereBoundPredicate { bound_generic_params, @@ -1856,7 +1997,11 @@ impl<'hir> LoweringContext<'_, 'hir> { }), WherePredicateKind::RegionPredicate(WhereRegionPredicate { lifetime, bounds }) => { hir::WherePredicateKind::RegionPredicate(hir::WhereRegionPredicate { - lifetime: self.lower_lifetime(lifetime), + lifetime: self.lower_lifetime( + lifetime, + LifetimeSource::Other, + lifetime.ident.into(), + ), bounds: self.lower_param_bounds( bounds, ImplTraitContext::Disallowed(ImplTraitPosition::Bound), diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index d5d6dcd8d631d..4835dd3e4252d 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -32,13 +32,12 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(rust_logo)] #![feature(assert_matches)] #![feature(box_patterns)] #![feature(exact_size_is_empty)] #![feature(if_let_guard)] -#![feature(let_chains)] #![feature(rustdoc_internals)] // tidy-alphabetical-end @@ -50,13 +49,14 @@ use rustc_attr_parsing::{AttributeParser, OmitDoc}; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::sorted_map::SortedMap; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_data_structures::sync::spawn; use rustc_data_structures::tagged_ptr::TaggedRef; use rustc_errors::{DiagArgFromDisplay, DiagCtxtHandle, StashKey}; use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res}; use rustc_hir::def_id::{CRATE_DEF_ID, LOCAL_CRATE, LocalDefId}; use rustc_hir::{ - self as hir, ConstArg, GenericArg, HirId, IsAnonInPath, ItemLocalMap, LangItem, ParamName, - TraitCandidate, + self as hir, AngleBrackets, ConstArg, GenericArg, HirId, ItemLocalMap, LangItem, + LifetimeSource, LifetimeSyntax, ParamName, TraitCandidate, }; use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_macros::extension; @@ -455,9 +455,14 @@ pub fn lower_to_hir(tcx: TyCtxt<'_>, (): ()) -> hir::Crate<'_> { .lower_node(def_id); } - // Drop AST to free memory drop(ast_index); - sess.time("drop_ast", || drop(krate)); + + // Drop AST to free memory. It can be expensive so try to drop it on a separate thread. + let prof = sess.prof.clone(); + spawn(move || { + let _timer = prof.verbose_generic_activity("drop_ast"); + drop(krate); + }); // Don't hash unless necessary, because it's expensive. let opt_hir_hash = @@ -495,12 +500,12 @@ enum GenericArgsMode { impl<'a, 'hir> LoweringContext<'a, 'hir> { fn create_def( &mut self, - parent: LocalDefId, node_id: ast::NodeId, name: Option, def_kind: DefKind, span: Span, ) -> LocalDefId { + let parent = self.current_hir_id_owner.def_id; debug_assert_ne!(node_id, ast::DUMMY_NODE_ID); assert!( self.opt_local_def_id(node_id).is_none(), @@ -510,7 +515,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.tcx.hir_def_key(self.local_def_id(node_id)), ); - let def_id = self.tcx.at(span).create_def(parent, name, def_kind).def_id(); + let def_id = self + .tcx + .at(span) + .create_def(parent, name, def_kind, None, &mut self.resolver.disambiguator) + .def_id(); debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id); self.resolver.node_id_to_def_id.insert(node_id, def_id); @@ -782,7 +791,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { LifetimeRes::Fresh { param, kind, .. } => { // Late resolution delegates to us the creation of the `LocalDefId`. let _def_id = self.create_def( - self.current_hir_id_owner.def_id, param, Some(kw::UnderscoreLifetime), DefKind::LifetimeParam, @@ -878,11 +886,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { id: HirId, attrs: &[Attribute], target_span: Span, + extra_hir_attributes: &[hir::Attribute], ) -> &'hir [hir::Attribute] { - if attrs.is_empty() { + if attrs.is_empty() && extra_hir_attributes.is_empty() { &[] } else { - let lowered_attrs = self.lower_attrs_vec(attrs, self.lower_span(target_span)); + let mut lowered_attrs = self.lower_attrs_vec(attrs, self.lower_span(target_span)); + lowered_attrs.extend(extra_hir_attributes.iter().cloned()); debug_assert_eq!(id.owner, self.current_hir_id_owner); let ret = self.arena.alloc_from_iter(lowered_attrs); @@ -917,7 +927,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } fn lower_delim_args(&self, args: &DelimArgs) -> DelimArgs { - DelimArgs { dspan: args.dspan, delim: args.delim, tokens: args.tokens.flattened() } + args.clone() } /// Lower an associated item constraint. @@ -1080,7 +1090,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { itctx: ImplTraitContext, ) -> hir::GenericArg<'hir> { match arg { - ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(lt)), + ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime( + lt, + LifetimeSource::Path { angle_brackets: hir::AngleBrackets::Full }, + lt.ident.into(), + )), ast::GenericArg::Type(ty) => { // We cannot just match on `TyKind::Infer` as `(_)` is represented as // `TyKind::Paren(TyKind::Infer)` and should also be lowered to `GenericArg::Infer` @@ -1199,35 +1213,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { TyKind::Slice(ty) => hir::TyKind::Slice(self.lower_ty(ty, itctx)), TyKind::Ptr(mt) => hir::TyKind::Ptr(self.lower_mt(mt, itctx)), TyKind::Ref(region, mt) => { - let region = region.unwrap_or_else(|| { - let id = if let Some(LifetimeRes::ElidedAnchor { start, end }) = - self.resolver.get_lifetime_res(t.id) - { - debug_assert_eq!(start.plus(1), end); - start - } else { - self.next_node_id() - }; - let span = self.tcx.sess.source_map().start_point(t.span).shrink_to_hi(); - Lifetime { ident: Ident::new(kw::UnderscoreLifetime, span), id } - }); - let lifetime = self.lower_lifetime(®ion); + let lifetime = self.lower_ty_direct_lifetime(t, *region); hir::TyKind::Ref(lifetime, self.lower_mt(mt, itctx)) } TyKind::PinnedRef(region, mt) => { - let region = region.unwrap_or_else(|| { - let id = if let Some(LifetimeRes::ElidedAnchor { start, end }) = - self.resolver.get_lifetime_res(t.id) - { - debug_assert_eq!(start.plus(1), end); - start - } else { - self.next_node_id() - }; - let span = self.tcx.sess.source_map().start_point(t.span).shrink_to_hi(); - Lifetime { ident: Ident::new(kw::UnderscoreLifetime, span), id } - }); - let lifetime = self.lower_lifetime(®ion); + let lifetime = self.lower_ty_direct_lifetime(t, *region); let kind = hir::TyKind::Ref(lifetime, self.lower_mt(mt, itctx)); let span = self.lower_span(t.span); let arg = hir::Ty { kind, span, hir_id: self.next_id() }; @@ -1247,7 +1237,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { safety: self.lower_safety(f.safety, hir::Safety::Safe), abi: self.lower_extern(f.ext), decl: self.lower_fn_decl(&f.decl, t.id, t.span, FnDeclKind::Pointer, None), - param_names: self.lower_fn_params_to_names(&f.decl), + param_idents: self.lower_fn_params_to_idents(&f.decl), })) } TyKind::UnsafeBinder(f) => { @@ -1303,7 +1293,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } GenericBound::Outlives(lifetime) => { if lifetime_bound.is_none() { - lifetime_bound = Some(this.lower_lifetime(lifetime)); + lifetime_bound = Some(this.lower_lifetime( + lifetime, + LifetimeSource::Other, + lifetime.ident.into(), + )); } None } @@ -1394,6 +1388,31 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { hir::Ty { kind, span: self.lower_span(t.span), hir_id: self.lower_node_id(t.id) } } + fn lower_ty_direct_lifetime( + &mut self, + t: &Ty, + region: Option, + ) -> &'hir hir::Lifetime { + let (region, syntax) = match region { + Some(region) => (region, region.ident.into()), + + None => { + let id = if let Some(LifetimeRes::ElidedAnchor { start, end }) = + self.resolver.get_lifetime_res(t.id) + { + debug_assert_eq!(start.plus(1), end); + start + } else { + self.next_node_id() + }; + let span = self.tcx.sess.source_map().start_point(t.span).shrink_to_hi(); + let region = Lifetime { ident: Ident::new(kw::UnderscoreLifetime, span), id }; + (region, LifetimeSyntax::Hidden) + } + }; + self.lower_lifetime(®ion, LifetimeSource::Reference, syntax) + } + /// Lowers a `ReturnPositionOpaqueTy` (`-> impl Trait`) or a `TypeAliasesOpaqueTy` (`type F = /// impl Trait`): this creates the associated Opaque Type (TAIT) definition and then returns a /// HIR type that references the TAIT. @@ -1475,9 +1494,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { precise_capturing_args: &[PreciseCapturingArg], ) -> &'hir [hir::PreciseCapturingArg<'hir>] { self.arena.alloc_from_iter(precise_capturing_args.iter().map(|arg| match arg { - PreciseCapturingArg::Lifetime(lt) => { - hir::PreciseCapturingArg::Lifetime(self.lower_lifetime(lt)) - } + PreciseCapturingArg::Lifetime(lt) => hir::PreciseCapturingArg::Lifetime( + self.lower_lifetime(lt, LifetimeSource::PreciseCapturing, lt.ident.into()), + ), PreciseCapturingArg::Arg(path, id) => { let [segment] = path.segments.as_slice() else { panic!(); @@ -1494,20 +1513,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { })) } - fn lower_fn_params_to_names(&mut self, decl: &FnDecl) -> &'hir [Option] { + fn lower_fn_params_to_idents(&mut self, decl: &FnDecl) -> &'hir [Option] { self.arena.alloc_from_iter(decl.inputs.iter().map(|param| match param.pat.kind { - PatKind::Ident(_, ident, _) => { - if ident.name != kw::Empty { - Some(self.lower_ident(ident)) - } else { - None - } - } + PatKind::Missing => None, + PatKind::Ident(_, ident, _) => Some(self.lower_ident(ident)), PatKind::Wild => Some(Ident::new(kw::Underscore, self.lower_span(param.pat.span))), _ => { self.dcx().span_delayed_bug( param.pat.span, - "non-ident/wild param pat must trigger an error", + "non-missing/ident/wild param pat must trigger an error", ); None } @@ -1745,9 +1759,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ) -> hir::GenericBound<'hir> { match tpb { GenericBound::Trait(p) => hir::GenericBound::Trait(self.lower_poly_trait_ref(p, itctx)), - GenericBound::Outlives(lifetime) => { - hir::GenericBound::Outlives(self.lower_lifetime(lifetime)) - } + GenericBound::Outlives(lifetime) => hir::GenericBound::Outlives(self.lower_lifetime( + lifetime, + LifetimeSource::OutlivesBound, + lifetime.ident.into(), + )), GenericBound::Use(args, span) => hir::GenericBound::Use( self.lower_precise_capturing_args(args), self.lower_span(*span), @@ -1755,12 +1771,28 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } } - fn lower_lifetime(&mut self, l: &Lifetime) -> &'hir hir::Lifetime { - self.new_named_lifetime(l.id, l.id, l.ident, IsAnonInPath::No) + fn lower_lifetime( + &mut self, + l: &Lifetime, + source: LifetimeSource, + syntax: LifetimeSyntax, + ) -> &'hir hir::Lifetime { + self.new_named_lifetime(l.id, l.id, l.ident, source, syntax) } - fn lower_lifetime_anon_in_path(&mut self, id: NodeId, span: Span) -> &'hir hir::Lifetime { - self.new_named_lifetime(id, id, Ident::new(kw::UnderscoreLifetime, span), IsAnonInPath::Yes) + fn lower_lifetime_hidden_in_path( + &mut self, + id: NodeId, + span: Span, + angle_brackets: AngleBrackets, + ) -> &'hir hir::Lifetime { + self.new_named_lifetime( + id, + id, + Ident::new(kw::UnderscoreLifetime, span), + LifetimeSource::Path { angle_brackets }, + LifetimeSyntax::Hidden, + ) } #[instrument(level = "debug", skip(self))] @@ -1769,42 +1801,38 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { id: NodeId, new_id: NodeId, ident: Ident, - is_anon_in_path: IsAnonInPath, + source: LifetimeSource, + syntax: LifetimeSyntax, ) -> &'hir hir::Lifetime { - debug_assert_ne!(ident.name, kw::Empty); let res = self.resolver.get_lifetime_res(id).unwrap_or(LifetimeRes::Error); let res = match res { - LifetimeRes::Param { param, .. } => hir::LifetimeName::Param(param), + LifetimeRes::Param { param, .. } => hir::LifetimeKind::Param(param), LifetimeRes::Fresh { param, .. } => { debug_assert_eq!(ident.name, kw::UnderscoreLifetime); let param = self.local_def_id(param); - hir::LifetimeName::Param(param) + hir::LifetimeKind::Param(param) } LifetimeRes::Infer => { debug_assert_eq!(ident.name, kw::UnderscoreLifetime); - hir::LifetimeName::Infer + hir::LifetimeKind::Infer } LifetimeRes::Static { .. } => { debug_assert!(matches!(ident.name, kw::StaticLifetime | kw::UnderscoreLifetime)); - hir::LifetimeName::Static + hir::LifetimeKind::Static } - LifetimeRes::Error => hir::LifetimeName::Error, + LifetimeRes::Error => hir::LifetimeKind::Error, LifetimeRes::ElidedAnchor { .. } => { panic!("Unexpected `ElidedAnchar` {:?} at {:?}", ident, ident.span); } }; - #[cfg(debug_assertions)] - if is_anon_in_path == IsAnonInPath::Yes { - debug_assert_eq!(ident.name, kw::UnderscoreLifetime); - } - debug!(?res); self.arena.alloc(hir::Lifetime::new( self.lower_node_id(new_id), self.lower_ident(ident), res, - is_anon_in_path, + source, + syntax, )) } @@ -1833,7 +1861,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let (name, kind) = self.lower_generic_param_kind(param, source); let hir_id = self.lower_node_id(param.id); - self.lower_attrs(hir_id, ¶m.attrs, param.span()); + self.lower_attrs(hir_id, ¶m.attrs, param.span(), &[]); hir::GenericParam { hir_id, def_id: self.local_def_id(param.id), @@ -2039,7 +2067,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } fn lower_array_length_to_const_arg(&mut self, c: &AnonConst) -> &'hir hir::ConstArg<'hir> { - match c.value.kind { + // We cannot just match on `ExprKind::Underscore` as `(_)` is represented as + // `ExprKind::Paren(ExprKind::Underscore)` and should also be lowered to `GenericArg::Infer` + match c.value.peel_parens().kind { ExprKind::Underscore => { if !self.tcx.features().generic_arg_infer() { feature_err( @@ -2088,8 +2118,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { hir::ConstArgKind::Path(qpath) } else { // Construct an AnonConst where the expr is the "ty"'s path. - - let parent_def_id = self.current_hir_id_owner.def_id; let node_id = self.next_node_id(); let span = self.lower_span(span); @@ -2097,7 +2125,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // We're lowering a const argument that was originally thought to be a type argument, // so the def collector didn't create the def ahead of time. That's why we have to do // it here. - let def_id = self.create_def(parent_def_id, node_id, None, DefKind::AnonConst, span); + let def_id = self.create_def(node_id, None, DefKind::AnonConst, span); let hir_id = self.lower_node_id(node_id); let path_expr = Expr { @@ -2223,6 +2251,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.attrs.insert(hir_id.local_id, a); } let local = hir::LetStmt { + super_: None, hir_id, init, pat, @@ -2392,8 +2421,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let r = hir::Lifetime::new( self.next_id(), Ident::new(kw::UnderscoreLifetime, self.lower_span(span)), - hir::LifetimeName::ImplicitObjectLifetimeDefault, - IsAnonInPath::No, + hir::LifetimeKind::ImplicitObjectLifetimeDefault, + LifetimeSource::Other, + LifetimeSyntax::Hidden, ); debug!("elided_dyn_bound: r={:?}", r); self.arena.alloc(r) diff --git a/compiler/rustc_ast_lowering/src/pat.rs b/compiler/rustc_ast_lowering/src/pat.rs index 07cc64a1358ee..7b1665a124baf 100644 --- a/compiler/rustc_ast_lowering/src/pat.rs +++ b/compiler/rustc_ast_lowering/src/pat.rs @@ -26,6 +26,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let pat_hir_id = self.lower_node_id(pattern.id); let node = loop { match &pattern.kind { + PatKind::Missing => break hir::PatKind::Missing, PatKind::Wild => break hir::PatKind::Wild, PatKind::Never => break hir::PatKind::Never, PatKind::Ident(binding_mode, ident, sub) => { @@ -93,7 +94,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let fs = self.arena.alloc_from_iter(fields.iter().map(|f| { let hir_id = self.lower_node_id(f.id); - self.lower_attrs(hir_id, &f.attrs, f.span); + self.lower_attrs(hir_id, &f.attrs, f.span, &[]); hir::PatField { hir_id, @@ -463,6 +464,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ) }), ), + TyPatKind::Or(variants) => { + hir::TyPatKind::Or(self.arena.alloc_from_iter( + variants.iter().map(|pat| self.lower_ty_pat_mut(pat, base_type)), + )) + } TyPatKind::Err(guar) => hir::TyPatKind::Err(*guar), }; @@ -516,14 +522,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { span: Span, base_type: Span, ) -> &'hir hir::ConstArg<'hir> { - let parent_def_id = self.current_hir_id_owner.def_id; let node_id = self.next_node_id(); // Add a definition for the in-band const def. // We're generating a range end that didn't exist in the AST, // so the def collector didn't create the def ahead of time. That's why we have to do // it here. - let def_id = self.create_def(parent_def_id, node_id, None, DefKind::AnonConst, span); + let def_id = self.create_def(node_id, None, DefKind::AnonConst, span); let hir_id = self.lower_node_id(node_id); let unstable_span = self.mark_span_with_reason( diff --git a/compiler/rustc_ast_lowering/src/path.rs b/compiler/rustc_ast_lowering/src/path.rs index c464c159c34cd..5cda64ce7b4ba 100644 --- a/compiler/rustc_ast_lowering/src/path.rs +++ b/compiler/rustc_ast_lowering/src/path.rs @@ -1,11 +1,10 @@ use std::sync::Arc; use rustc_ast::{self as ast, *}; -use rustc_hir as hir; -use rustc_hir::GenericArg; use rustc_hir::def::{DefKind, PartialRes, Res}; use rustc_hir::def_id::DefId; -use rustc_middle::span_bug; +use rustc_hir::{self as hir, GenericArg}; +use rustc_middle::{span_bug, ty}; use rustc_session::parse::add_feature_diagnostics; use rustc_span::{BytePos, DUMMY_SP, DesugaringKind, Ident, Span, Symbol, sym}; use smallvec::{SmallVec, smallvec}; @@ -433,24 +432,31 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // Note: these spans are used for diagnostics when they can't be inferred. // See rustc_resolve::late::lifetimes::LifetimeContext::add_missing_lifetime_specifiers_label - let elided_lifetime_span = if generic_args.span.is_empty() { - // If there are no brackets, use the identifier span. + let (elided_lifetime_span, angle_brackets) = if generic_args.span.is_empty() { + // No brackets, e.g. `Path`: use an empty span just past the end of the identifier. // HACK: we use find_ancestor_inside to properly suggest elided spans in paths // originating from macros, since the segment's span might be from a macro arg. - segment_ident_span.find_ancestor_inside(path_span).unwrap_or(path_span) - } else if generic_args.is_empty() { - // If there are brackets, but not generic arguments, then use the opening bracket - generic_args.span.with_hi(generic_args.span.lo() + BytePos(1)) + ( + segment_ident_span.find_ancestor_inside(path_span).unwrap_or(path_span), + hir::AngleBrackets::Missing, + ) } else { - // Else use an empty span right after the opening bracket. - generic_args.span.with_lo(generic_args.span.lo() + BytePos(1)).shrink_to_lo() + // Brackets, e.g. `Path<>` or `Path`: use an empty span just after the `<`. + ( + generic_args.span.with_lo(generic_args.span.lo() + BytePos(1)).shrink_to_lo(), + if generic_args.is_empty() { + hir::AngleBrackets::Empty + } else { + hir::AngleBrackets::Full + }, + ) }; generic_args.args.insert_many( 0, - (start.as_u32()..end.as_u32()).map(|i| { - let id = NodeId::from_u32(i); - let l = self.lower_lifetime_anon_in_path(id, elided_lifetime_span); + (start..end).map(|id| { + let l = + self.lower_lifetime_hidden_in_path(id, elided_lifetime_span, angle_brackets); GenericArg::Lifetime(l) }), ); @@ -591,14 +597,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { /// lowering of `async Fn()` bounds to desugar to another trait like `LendingFn`. fn map_trait_to_async_trait(&self, def_id: DefId) -> Option { let lang_items = self.tcx.lang_items(); - if Some(def_id) == lang_items.fn_trait() { - lang_items.async_fn_trait() - } else if Some(def_id) == lang_items.fn_mut_trait() { - lang_items.async_fn_mut_trait() - } else if Some(def_id) == lang_items.fn_once_trait() { - lang_items.async_fn_once_trait() - } else { - None + match self.tcx.fn_trait_kind_from_def_id(def_id)? { + ty::ClosureKind::Fn => lang_items.async_fn_trait(), + ty::ClosureKind::FnMut => lang_items.async_fn_mut_trait(), + ty::ClosureKind::FnOnce => lang_items.async_fn_once_trait(), } } } diff --git a/compiler/rustc_ast_lowering/src/stability.rs b/compiler/rustc_ast_lowering/src/stability.rs index a2004bbb39f09..eb052ba1c6d78 100644 --- a/compiler/rustc_ast_lowering/src/stability.rs +++ b/compiler/rustc_ast_lowering/src/stability.rs @@ -79,10 +79,6 @@ pub fn extern_abi_stability(abi: ExternAbi) -> Result<(), UnstableAbi> { | ExternAbi::SysV64 { .. } | ExternAbi::System { .. } | ExternAbi::EfiApi => Ok(()), - // implementation details - ExternAbi::RustIntrinsic => { - Err(UnstableAbi { abi, feature: sym::intrinsics, explain: GateReason::ImplDetail }) - } ExternAbi::Unadjusted => { Err(UnstableAbi { abi, feature: sym::abi_unadjusted, explain: GateReason::ImplDetail }) } diff --git a/compiler/rustc_ast_passes/messages.ftl b/compiler/rustc_ast_passes/messages.ftl index 25944392a52a9..80754a8f65a69 100644 --- a/compiler/rustc_ast_passes/messages.ftl +++ b/compiler/rustc_ast_passes/messages.ftl @@ -79,6 +79,10 @@ ast_passes_extern_types_cannot = `type`s inside `extern` blocks cannot have {$de .suggestion = remove the {$remove_descr} .label = `extern` block begins here +ast_passes_extern_without_abi = `extern` declarations without an explicit ABI are disallowed + .suggestion = specify an ABI + .help = prior to Rust 2024, a default ABI was inferred + ast_passes_feature_on_non_nightly = `#![feature]` may not be used on the {$channel} release channel .suggestion = remove the attribute .stable_since = the feature `{$name}` has been stable since `{$since}` and no longer requires an attribute to enable diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index da739b0e4532b..e7609b8e05286 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -47,14 +47,14 @@ enum SelfSemantic { } enum TraitOrTraitImpl { - Trait { span: Span, constness: Option }, - TraitImpl { constness: Const, polarity: ImplPolarity, trait_ref: Span }, + Trait { span: Span, constness_span: Option }, + TraitImpl { constness: Const, polarity: ImplPolarity, trait_ref_span: Span }, } impl TraitOrTraitImpl { fn constness(&self) -> Option { match self { - Self::Trait { constness: Some(span), .. } + Self::Trait { constness_span: Some(span), .. } | Self::TraitImpl { constness: Const::Yes(span), .. } => Some(*span), _ => None, } @@ -66,7 +66,7 @@ struct AstValidator<'a> { features: &'a Features, /// The span of the `extern` in an `extern { ... }` block, if any. - extern_mod: Option, + extern_mod_span: Option, outer_trait_or_trait_impl: Option, @@ -75,13 +75,17 @@ struct AstValidator<'a> { /// Used to ban nested `impl Trait`, e.g., `impl Into`. /// Nested `impl Trait` _is_ allowed in associated type position, /// e.g., `impl Iterator`. - outer_impl_trait: Option, + outer_impl_trait_span: Option, disallow_tilde_const: Option, /// Used to ban explicit safety on foreign items when the extern block is not marked as unsafe. extern_mod_safety: Option, + lint_node_id: NodeId, + + is_sdylib_interface: bool, + lint_buffer: &'a mut LintBuffer, } @@ -96,17 +100,22 @@ impl<'a> AstValidator<'a> { trait_.map(|(constness, polarity, trait_ref)| TraitOrTraitImpl::TraitImpl { constness, polarity, - trait_ref: trait_ref.path.span, + trait_ref_span: trait_ref.path.span, }), ); f(self); self.outer_trait_or_trait_impl = old; } - fn with_in_trait(&mut self, span: Span, constness: Option, f: impl FnOnce(&mut Self)) { + fn with_in_trait( + &mut self, + span: Span, + constness_span: Option, + f: impl FnOnce(&mut Self), + ) { let old = mem::replace( &mut self.outer_trait_or_trait_impl, - Some(TraitOrTraitImpl::Trait { span, constness }), + Some(TraitOrTraitImpl::Trait { span, constness_span }), ); f(self); self.outer_trait_or_trait_impl = old; @@ -170,10 +179,10 @@ impl<'a> AstValidator<'a> { Err(errors::WhereClauseBeforeTypeAlias { span, sugg }) } - fn with_impl_trait(&mut self, outer: Option, f: impl FnOnce(&mut Self)) { - let old = mem::replace(&mut self.outer_impl_trait, outer); + fn with_impl_trait(&mut self, outer_span: Option, f: impl FnOnce(&mut Self)) { + let old = mem::replace(&mut self.outer_impl_trait_span, outer_span); f(self); - self.outer_impl_trait = old; + self.outer_impl_trait_span = old; } // Mirrors `visit::walk_ty`, but tracks relevant state. @@ -239,7 +248,7 @@ impl<'a> AstValidator<'a> { fn check_decl_no_pat(decl: &FnDecl, mut report_err: impl FnMut(Span, Option, bool)) { for Param { pat, .. } in &decl.inputs { match pat.kind { - PatKind::Ident(BindingMode::NONE, _, None) | PatKind::Wild => {} + PatKind::Missing | PatKind::Ident(BindingMode::NONE, _, None) | PatKind::Wild => {} PatKind::Ident(BindingMode::MUT, ident, None) => { report_err(pat.span, Some(ident), true) } @@ -258,21 +267,22 @@ impl<'a> AstValidator<'a> { && let TraitOrTraitImpl::TraitImpl { constness: Const::No, polarity: ImplPolarity::Positive, - trait_ref, + trait_ref_span, .. } = parent { - Some(trait_ref.shrink_to_lo()) + Some(trait_ref_span.shrink_to_lo()) } else { None }; - let make_trait_const_sugg = - if const_trait_impl && let TraitOrTraitImpl::Trait { span, constness: None } = parent { - Some(span.shrink_to_lo()) - } else { - None - }; + let make_trait_const_sugg = if const_trait_impl + && let TraitOrTraitImpl::Trait { span, constness_span: None } = parent + { + Some(span.shrink_to_lo()) + } else { + None + }; let parent_constness = parent.constness(); self.dcx().emit_err(errors::TraitFnConst { @@ -341,7 +351,7 @@ impl<'a> AstValidator<'a> { sym::forbid, sym::warn, ]; - !arr.contains(&attr.name_or_empty()) && rustc_attr_parsing::is_builtin_attr(*attr) + !attr.has_any_name(&arr) && rustc_attr_parsing::is_builtin_attr(*attr) }) .for_each(|attr| { if attr.is_doc_comment() { @@ -448,13 +458,13 @@ impl<'a> AstValidator<'a> { check_where_clause(where_clauses.after); } - fn check_foreign_kind_bodyless(&self, ident: Ident, kind: &str, body: Option) { - let Some(body) = body else { + fn check_foreign_kind_bodyless(&self, ident: Ident, kind: &str, body_span: Option) { + let Some(body_span) = body_span else { return; }; self.dcx().emit_err(errors::BodyInExtern { span: ident.span, - body, + body: body_span, block: self.current_extern_span(), kind, }); @@ -473,7 +483,7 @@ impl<'a> AstValidator<'a> { } fn current_extern_span(&self) -> Span { - self.sess.source_map().guess_head_span(self.extern_mod.unwrap()) + self.sess.source_map().guess_head_span(self.extern_mod_span.unwrap()) } /// An `fn` in `extern { ... }` cannot have qualifiers, e.g. `async fn`. @@ -583,9 +593,10 @@ impl<'a> AstValidator<'a> { self.dcx().emit_err(errors::ModuleNonAscii { span: ident.span, name: ident.name }); } - fn deny_generic_params(&self, generics: &Generics, ident: Span) { + fn deny_generic_params(&self, generics: &Generics, ident_span: Span) { if !generics.params.is_empty() { - self.dcx().emit_err(errors::AutoTraitGeneric { span: generics.span, ident }); + self.dcx() + .emit_err(errors::AutoTraitGeneric { span: generics.span, ident: ident_span }); } } @@ -605,11 +616,11 @@ impl<'a> AstValidator<'a> { } } - fn deny_items(&self, trait_items: &[P], ident: Span) { + fn deny_items(&self, trait_items: &[P], ident_span: Span) { if !trait_items.is_empty() { - let spans: Vec<_> = trait_items.iter().map(|i| i.ident.span).collect(); + let spans: Vec<_> = trait_items.iter().map(|i| i.kind.ident().unwrap().span).collect(); let total = trait_items.first().unwrap().span.to(trait_items.last().unwrap().span); - self.dcx().emit_err(errors::AutoTraitItems { spans, total, ident }); + self.dcx().emit_err(errors::AutoTraitItems { spans, total, ident: ident_span }); } } @@ -677,7 +688,7 @@ impl<'a> AstValidator<'a> { self.dcx().emit_err(errors::PatternFnPointer { span }); }); if let Extern::Implicit(extern_span) = bfty.ext { - self.maybe_lint_missing_abi(extern_span, ty.id); + self.handle_missing_abi(extern_span, ty.id); } } TyKind::TraitObject(bounds, ..) => { @@ -694,7 +705,7 @@ impl<'a> AstValidator<'a> { } } TyKind::ImplTrait(_, bounds) => { - if let Some(outer_impl_trait_sp) = self.outer_impl_trait { + if let Some(outer_impl_trait_sp) = self.outer_impl_trait_span { self.dcx().emit_err(errors::NestedImplTrait { span: ty.span, outer: outer_impl_trait_sp, @@ -710,10 +721,12 @@ impl<'a> AstValidator<'a> { } } - fn maybe_lint_missing_abi(&mut self, span: Span, id: NodeId) { + fn handle_missing_abi(&mut self, span: Span, id: NodeId) { // FIXME(davidtwco): This is a hack to detect macros which produce spans of the // call site which do not have a macro backtrace. See #61963. - if self + if span.edition().at_least_edition_future() && self.features.explicit_extern_abis() { + self.dcx().emit_err(errors::MissingAbi { span }); + } else if self .sess .source_map() .span_to_snippet(span) @@ -727,6 +740,19 @@ impl<'a> AstValidator<'a> { ) } } + + // Used within `visit_item` for item kinds where we don't call `visit::walk_item`. + fn visit_attrs_vis(&mut self, attrs: &'a AttrVec, vis: &'a Visibility) { + walk_list!(self, visit_attribute, attrs); + self.visit_vis(vis); + } + + // Used within `visit_item` for item kinds where we don't call `visit::walk_item`. + fn visit_attrs_vis_ident(&mut self, attrs: &'a AttrVec, vis: &'a Visibility, ident: &'a Ident) { + walk_list!(self, visit_attribute, attrs); + self.visit_vis(vis); + self.visit_ident(ident); + } } /// Checks that generic parameters are in the correct order, @@ -804,7 +830,7 @@ fn validate_generic_param_order(dcx: DiagCtxtHandle<'_>, generics: &[GenericPara impl<'a> Visitor<'a> for AstValidator<'a> { fn visit_attribute(&mut self, attr: &Attribute) { - validate_attr::check_attr(&self.sess.psess, attr); + validate_attr::check_attr(&self.sess.psess, attr, self.lint_node_id); } fn visit_ty(&mut self, ty: &'a Ty) { @@ -817,8 +843,12 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self.has_proc_macro_decls = true; } - if attr::contains_name(&item.attrs, sym::no_mangle) { - self.check_nomangle_item_asciionly(item.ident, item.span); + let previous_lint_node_id = mem::replace(&mut self.lint_node_id, item.id); + + if let Some(ident) = item.kind.ident() + && attr::contains_name(&item.attrs, sym::no_mangle) + { + self.check_nomangle_item_asciionly(ident, item.span); } match &item.kind { @@ -832,37 +862,33 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self_ty, items, }) => { - self.with_in_trait_impl(Some((*constness, *polarity, t)), |this| { - this.visibility_not_permitted( - &item.vis, - errors::VisibilityNotPermittedNote::TraitImpl, - ); - if let TyKind::Dummy = self_ty.kind { - // Abort immediately otherwise the `TyKind::Dummy` will reach HIR lowering, - // which isn't allowed. Not a problem for this obscure, obsolete syntax. - this.dcx().emit_fatal(errors::ObsoleteAuto { span: item.span }); - } - if let (&Safety::Unsafe(span), &ImplPolarity::Negative(sp)) = (safety, polarity) - { - this.dcx().emit_err(errors::UnsafeNegativeImpl { - span: sp.to(t.path.span), - negative: sp, - r#unsafe: span, - }); - } + self.visit_attrs_vis(&item.attrs, &item.vis); + self.visibility_not_permitted( + &item.vis, + errors::VisibilityNotPermittedNote::TraitImpl, + ); + if let TyKind::Dummy = self_ty.kind { + // Abort immediately otherwise the `TyKind::Dummy` will reach HIR lowering, + // which isn't allowed. Not a problem for this obscure, obsolete syntax. + self.dcx().emit_fatal(errors::ObsoleteAuto { span: item.span }); + } + if let (&Safety::Unsafe(span), &ImplPolarity::Negative(sp)) = (safety, polarity) { + self.dcx().emit_err(errors::UnsafeNegativeImpl { + span: sp.to(t.path.span), + negative: sp, + r#unsafe: span, + }); + } - this.visit_vis(&item.vis); - this.visit_ident(&item.ident); - let disallowed = matches!(constness, Const::No) - .then(|| TildeConstReason::TraitImpl { span: item.span }); - this.with_tilde_const(disallowed, |this| this.visit_generics(generics)); - this.visit_trait_ref(t); - this.visit_ty(self_ty); + let disallowed = matches!(constness, Const::No) + .then(|| TildeConstReason::TraitImpl { span: item.span }); + self.with_tilde_const(disallowed, |this| this.visit_generics(generics)); + self.visit_trait_ref(t); + self.visit_ty(self_ty); + self.with_in_trait_impl(Some((*constness, *polarity, t)), |this| { walk_list!(this, visit_assoc_item, items, AssocCtxt::Impl { of_trait: true }); }); - walk_list!(self, visit_attribute, &item.attrs); - return; // Avoid visiting again. } ItemKind::Impl(box Impl { safety, @@ -882,49 +908,57 @@ impl<'a> Visitor<'a> for AstValidator<'a> { only_trait, }; - self.with_in_trait_impl(None, |this| { - this.visibility_not_permitted( - &item.vis, - errors::VisibilityNotPermittedNote::IndividualImplItems, - ); - if let &Safety::Unsafe(span) = safety { - this.dcx().emit_err(errors::InherentImplCannotUnsafe { - span: self_ty.span, - annotation_span: span, - annotation: "unsafe", - self_ty: self_ty.span, - }); - } - if let &ImplPolarity::Negative(span) = polarity { - this.dcx().emit_err(error(span, "negative", false)); - } - if let &Defaultness::Default(def_span) = defaultness { - this.dcx().emit_err(error(def_span, "`default`", true)); - } - if let &Const::Yes(span) = constness { - this.dcx().emit_err(error(span, "`const`", true)); - } + self.visit_attrs_vis(&item.attrs, &item.vis); + self.visibility_not_permitted( + &item.vis, + errors::VisibilityNotPermittedNote::IndividualImplItems, + ); + if let &Safety::Unsafe(span) = safety { + self.dcx().emit_err(errors::InherentImplCannotUnsafe { + span: self_ty.span, + annotation_span: span, + annotation: "unsafe", + self_ty: self_ty.span, + }); + } + if let &ImplPolarity::Negative(span) = polarity { + self.dcx().emit_err(error(span, "negative", false)); + } + if let &Defaultness::Default(def_span) = defaultness { + self.dcx().emit_err(error(def_span, "`default`", true)); + } + if let &Const::Yes(span) = constness { + self.dcx().emit_err(error(span, "`const`", true)); + } - this.visit_vis(&item.vis); - this.visit_ident(&item.ident); - this.with_tilde_const( - Some(TildeConstReason::Impl { span: item.span }), - |this| this.visit_generics(generics), - ); - this.visit_ty(self_ty); + self.with_tilde_const(Some(TildeConstReason::Impl { span: item.span }), |this| { + this.visit_generics(generics) + }); + self.visit_ty(self_ty); + self.with_in_trait_impl(None, |this| { walk_list!(this, visit_assoc_item, items, AssocCtxt::Impl { of_trait: false }); }); - walk_list!(self, visit_attribute, &item.attrs); - return; // Avoid visiting again. } ItemKind::Fn( - func - @ box Fn { defaultness, generics: _, sig, contract: _, body, define_opaque: _ }, + func @ box Fn { + defaultness, + ident, + generics: _, + sig, + contract: _, + body, + define_opaque: _, + eii_impl, + }, ) => { + self.visit_attrs_vis_ident(&item.attrs, &item.vis, ident); self.check_defaultness(item.span, *defaultness); - let is_intrinsic = - item.attrs.iter().any(|a| a.name_or_empty() == sym::rustc_intrinsic); + for EIIImpl { node_id, eii_macro_path, .. } in eii_impl { + self.visit_path(eii_macro_path, *node_id); + } + + let is_intrinsic = item.attrs.iter().any(|a| a.has_name(sym::rustc_intrinsic)); if body.is_none() && !is_intrinsic { self.dcx().emit_err(errors::FnWithoutBody { span: item.span, @@ -948,45 +982,40 @@ impl<'a> Visitor<'a> for AstValidator<'a> { }); } - self.visit_vis(&item.vis); - self.visit_ident(&item.ident); - let kind = FnKind::Fn(FnCtxt::Free, &item.ident, &item.vis, &*func); + let kind = FnKind::Fn(FnCtxt::Free, &item.vis, &*func); self.visit_fn(kind, item.span, item.id); - walk_list!(self, visit_attribute, &item.attrs); - return; // Avoid visiting again. } ItemKind::ForeignMod(ForeignMod { extern_span, abi, safety, .. }) => { - self.with_in_extern_mod(*safety, |this| { - let old_item = mem::replace(&mut this.extern_mod, Some(item.span)); - this.visibility_not_permitted( - &item.vis, - errors::VisibilityNotPermittedNote::IndividualForeignItems, - ); - - if &Safety::Default == safety { - if item.span.at_least_rust_2024() { - this.dcx().emit_err(errors::MissingUnsafeOnExtern { span: item.span }); - } else { - this.lint_buffer.buffer_lint( - MISSING_UNSAFE_ON_EXTERN, - item.id, - item.span, - BuiltinLintDiag::MissingUnsafeOnExtern { - suggestion: item.span.shrink_to_lo(), - }, - ); - } + let old_item = mem::replace(&mut self.extern_mod_span, Some(item.span)); + self.visibility_not_permitted( + &item.vis, + errors::VisibilityNotPermittedNote::IndividualForeignItems, + ); + + if &Safety::Default == safety { + if item.span.at_least_rust_2024() { + self.dcx().emit_err(errors::MissingUnsafeOnExtern { span: item.span }); + } else { + self.lint_buffer.buffer_lint( + MISSING_UNSAFE_ON_EXTERN, + item.id, + item.span, + BuiltinLintDiag::MissingUnsafeOnExtern { + suggestion: item.span.shrink_to_lo(), + }, + ); } + } - if abi.is_none() { - this.maybe_lint_missing_abi(*extern_span, item.id); - } + if abi.is_none() { + self.handle_missing_abi(*extern_span, item.id); + } + self.with_in_extern_mod(*safety, |this| { visit::walk_item(this, item); - this.extern_mod = old_item; }); - return; // Avoid visiting again. + self.extern_mod_span = old_item; } - ItemKind::Enum(def, _) => { + ItemKind::Enum(_, def, _) => { for variant in &def.variants { self.visibility_not_permitted( &variant.vis, @@ -999,36 +1028,33 @@ impl<'a> Visitor<'a> for AstValidator<'a> { ); } } + visit::walk_item(self, item) } - ItemKind::Trait(box Trait { is_auto, generics, bounds, items, .. }) => { + ItemKind::Trait(box Trait { is_auto, generics, ident, bounds, items, .. }) => { + self.visit_attrs_vis_ident(&item.attrs, &item.vis, ident); let is_const_trait = attr::find_by_name(&item.attrs, sym::const_trait).map(|attr| attr.span); - self.with_in_trait(item.span, is_const_trait, |this| { - if *is_auto == IsAuto::Yes { - // Auto traits cannot have generics, super traits nor contain items. - this.deny_generic_params(generics, item.ident.span); - this.deny_super_traits(bounds, item.ident.span); - this.deny_where_clause(&generics.where_clause, item.ident.span); - this.deny_items(items, item.ident.span); - } + if *is_auto == IsAuto::Yes { + // Auto traits cannot have generics, super traits nor contain items. + self.deny_generic_params(generics, ident.span); + self.deny_super_traits(bounds, ident.span); + self.deny_where_clause(&generics.where_clause, ident.span); + self.deny_items(items, ident.span); + } - // Equivalent of `visit::walk_item` for `ItemKind::Trait` that inserts a bound - // context for the supertraits. - this.visit_vis(&item.vis); - this.visit_ident(&item.ident); - let disallowed = is_const_trait - .is_none() - .then(|| TildeConstReason::Trait { span: item.span }); - this.with_tilde_const(disallowed, |this| { - this.visit_generics(generics); - walk_list!(this, visit_param_bound, bounds, BoundKind::SuperTraits) - }); + // Equivalent of `visit::walk_item` for `ItemKind::Trait` that inserts a bound + // context for the supertraits. + let disallowed = + is_const_trait.is_none().then(|| TildeConstReason::Trait { span: item.span }); + self.with_tilde_const(disallowed, |this| { + this.visit_generics(generics); + walk_list!(this, visit_param_bound, bounds, BoundKind::SuperTraits) + }); + self.with_in_trait(item.span, is_const_trait, |this| { walk_list!(this, visit_assoc_item, items, AssocCtxt::Trait); }); - walk_list!(self, visit_attribute, &item.attrs); - return; // Avoid visiting again } - ItemKind::Mod(safety, mod_kind) => { + ItemKind::Mod(safety, ident, mod_kind) => { if let &Safety::Unsafe(span) = safety { self.dcx().emit_err(errors::UnsafeItem { span, kind: "module" }); } @@ -1036,36 +1062,31 @@ impl<'a> Visitor<'a> for AstValidator<'a> { if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _, _)) && !attr::contains_name(&item.attrs, sym::path) { - self.check_mod_file_item_asciionly(item.ident); + self.check_mod_file_item_asciionly(*ident); } + visit::walk_item(self, item) } - ItemKind::Struct(vdata, generics) => match vdata { + ItemKind::Struct(ident, vdata, generics) => match vdata { VariantData::Struct { fields, .. } => { - self.visit_vis(&item.vis); - self.visit_ident(&item.ident); + self.visit_attrs_vis_ident(&item.attrs, &item.vis, ident); self.visit_generics(generics); // Permit `Anon{Struct,Union}` as field type. walk_list!(self, visit_struct_field_def, fields); - walk_list!(self, visit_attribute, &item.attrs); - return; } - _ => {} + _ => visit::walk_item(self, item), }, - ItemKind::Union(vdata, generics) => { + ItemKind::Union(ident, vdata, generics) => { if vdata.fields().is_empty() { self.dcx().emit_err(errors::FieldlessUnion { span: item.span }); } match vdata { VariantData::Struct { fields, .. } => { - self.visit_vis(&item.vis); - self.visit_ident(&item.ident); + self.visit_attrs_vis_ident(&item.attrs, &item.vis, ident); self.visit_generics(generics); // Permit `Anon{Struct,Union}` as field type. walk_list!(self, visit_struct_field_def, fields); - walk_list!(self, visit_attribute, &item.attrs); - return; } - _ => {} + _ => visit::walk_item(self, item), } } ItemKind::Const(box ConstItem { defaultness, expr, .. }) => { @@ -1076,6 +1097,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { replace_span: self.ending_semi_or_hi(item.span), }); } + visit::walk_item(self, item); } ItemKind::Static(box StaticItem { expr, safety, .. }) => { self.check_item_safety(item.span, *safety); @@ -1089,6 +1111,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { replace_span: self.ending_semi_or_hi(item.span), }); } + visit::walk_item(self, item); } ItemKind::TyAlias( ty_alias @ box TyAlias { defaultness, bounds, where_clauses, ty, .. }, @@ -1112,23 +1135,25 @@ impl<'a> Visitor<'a> for AstValidator<'a> { help: self.sess.is_nightly_build(), }); } + visit::walk_item(self, item); } - _ => {} + _ => visit::walk_item(self, item), } - visit::walk_item(self, item); + self.lint_node_id = previous_lint_node_id; } fn visit_foreign_item(&mut self, fi: &'a ForeignItem) { match &fi.kind { - ForeignItemKind::Fn(box Fn { defaultness, sig, body, .. }) => { + ForeignItemKind::Fn(box Fn { defaultness, ident, sig, body, .. }) => { self.check_defaultness(fi.span, *defaultness); - self.check_foreign_fn_bodyless(fi.ident, body.as_deref()); + self.check_foreign_fn_bodyless(*ident, body.as_deref()); self.check_foreign_fn_headerless(sig.header); - self.check_foreign_item_ascii_only(fi.ident); + self.check_foreign_item_ascii_only(*ident); } ForeignItemKind::TyAlias(box TyAlias { defaultness, + ident, generics, where_clauses, bounds, @@ -1136,15 +1161,15 @@ impl<'a> Visitor<'a> for AstValidator<'a> { .. }) => { self.check_defaultness(fi.span, *defaultness); - self.check_foreign_kind_bodyless(fi.ident, "type", ty.as_ref().map(|b| b.span)); + self.check_foreign_kind_bodyless(*ident, "type", ty.as_ref().map(|b| b.span)); self.check_type_no_bounds(bounds, "`extern` blocks"); self.check_foreign_ty_genericless(generics, where_clauses); - self.check_foreign_item_ascii_only(fi.ident); + self.check_foreign_item_ascii_only(*ident); } - ForeignItemKind::Static(box StaticItem { expr, safety, .. }) => { + ForeignItemKind::Static(box StaticItem { ident, safety, expr, .. }) => { self.check_item_safety(fi.span, *safety); - self.check_foreign_kind_bodyless(fi.ident, "static", expr.as_ref().map(|b| b.span)); - self.check_foreign_item_ascii_only(fi.ident); + self.check_foreign_kind_bodyless(*ident, "static", expr.as_ref().map(|b| b.span)); + self.check_foreign_item_ascii_only(*ident); } ForeignItemKind::MacCall(..) => {} } @@ -1351,7 +1376,6 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } if let FnKind::Fn( - _, _, _, Fn { @@ -1360,11 +1384,11 @@ impl<'a> Visitor<'a> for AstValidator<'a> { }, ) = fk { - self.maybe_lint_missing_abi(*extern_span, id); + self.handle_missing_abi(*extern_span, id); } // Functions without bodies cannot have patterns. - if let FnKind::Fn(ctxt, _, _, Fn { body: None, sig, .. }) = fk { + if let FnKind::Fn(ctxt, _, Fn { body: None, sig, .. }) = fk { Self::check_decl_no_pat(&sig.decl, |span, ident, mut_ident| { if mut_ident && matches!(ctxt, FnCtxt::Assoc(_)) { if let Some(ident) = ident { @@ -1398,15 +1422,17 @@ impl<'a> Visitor<'a> for AstValidator<'a> { .is_some(); let disallowed = (!tilde_const_allowed).then(|| match fk { - FnKind::Fn(_, ident, _, _) => TildeConstReason::Function { ident: ident.span }, + FnKind::Fn(_, _, f) => TildeConstReason::Function { ident: f.ident.span }, FnKind::Closure(..) => TildeConstReason::Closure, }); self.with_tilde_const(disallowed, |this| visit::walk_fn(this, fk)); } fn visit_assoc_item(&mut self, item: &'a AssocItem, ctxt: AssocCtxt) { - if attr::contains_name(&item.attrs, sym::no_mangle) { - self.check_nomangle_item_asciionly(item.ident, item.span); + if let Some(ident) = item.kind.ident() + && attr::contains_name(&item.attrs, sym::no_mangle) + { + self.check_nomangle_item_asciionly(ident, item.span); } if ctxt == AssocCtxt::Trait || self.outer_trait_or_trait_impl.is_none() { @@ -1422,7 +1448,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { }); } AssocItemKind::Fn(box Fn { body, .. }) => { - if body.is_none() { + if body.is_none() && !self.is_sdylib_interface { self.dcx().emit_err(errors::AssocFnWithoutBody { span: item.span, replace_span: self.ending_semi_or_hi(item.span), @@ -1466,8 +1492,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } } - if let AssocItemKind::Const(..) = item.kind { - self.check_item_named(item.ident, "const"); + if let AssocItemKind::Const(ci) = &item.kind { + self.check_item_named(ci.ident, "const"); } let parent_is_const = @@ -1479,10 +1505,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> { || ctxt == AssocCtxt::Trait || matches!(func.sig.header.constness, Const::Yes(_)) => { - self.visit_vis(&item.vis); - self.visit_ident(&item.ident); - let kind = FnKind::Fn(FnCtxt::Assoc(ctxt), &item.ident, &item.vis, &*func); - walk_list!(self, visit_attribute, &item.attrs); + self.visit_attrs_vis_ident(&item.attrs, &item.vis, &func.ident); + let kind = FnKind::Fn(FnCtxt::Assoc(ctxt), &item.vis, &*func); self.visit_fn(kind, item.span, item.id); } AssocItemKind::Type(_) => { @@ -1587,7 +1611,7 @@ fn deny_equality_constraints( generics.where_clause.span } else { let mut span = predicate_span; - let mut prev: Option = None; + let mut prev_span: Option = None; let mut preds = generics.where_clause.predicates.iter().peekable(); // Find the predicate that shouldn't have been in the where bound list. while let Some(pred) = preds.next() { @@ -1597,12 +1621,12 @@ fn deny_equality_constraints( if let Some(next) = preds.peek() { // This is the first predicate, remove the trailing comma as well. span = span.with_hi(next.span.lo()); - } else if let Some(prev) = prev { + } else if let Some(prev_span) = prev_span { // Remove the previous comma as well. - span = span.with_lo(prev.hi()); + span = span.with_lo(prev_span.hi()); } } - prev = Some(pred.span); + prev_span = Some(pred.span); } span }; @@ -1672,17 +1696,20 @@ pub fn check_crate( sess: &Session, features: &Features, krate: &Crate, + is_sdylib_interface: bool, lints: &mut LintBuffer, ) -> bool { let mut validator = AstValidator { sess, features, - extern_mod: None, + extern_mod_span: None, outer_trait_or_trait_impl: None, has_proc_macro_decls: false, - outer_impl_trait: None, + outer_impl_trait_span: None, disallow_tilde_const: Some(TildeConstReason::Item), extern_mod_safety: None, + lint_node_id: CRATE_NODE_ID, + is_sdylib_interface, lint_buffer: lints, }; visit::walk_crate(&mut validator, krate); diff --git a/compiler/rustc_ast_passes/src/errors.rs b/compiler/rustc_ast_passes/src/errors.rs index 8e53e600f7ac6..6f9737e08314e 100644 --- a/compiler/rustc_ast_passes/src/errors.rs +++ b/compiler/rustc_ast_passes/src/errors.rs @@ -2,7 +2,7 @@ use rustc_ast::ParamKindOrd; use rustc_errors::codes::*; -use rustc_errors::{Applicability, Diag, EmissionGuarantee, SubdiagMessageOp, Subdiagnostic}; +use rustc_errors::{Applicability, Diag, EmissionGuarantee, Subdiagnostic}; use rustc_macros::{Diagnostic, Subdiagnostic}; use rustc_span::{Ident, Span, Symbol}; @@ -394,11 +394,7 @@ pub(crate) struct EmptyLabelManySpans(pub Vec); // The derive for `Vec` does multiple calls to `span_label`, adding commas between each impl Subdiagnostic for EmptyLabelManySpans { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.span_labels(self.0, ""); } } @@ -749,11 +745,7 @@ pub(crate) struct StableFeature { } impl Subdiagnostic for StableFeature { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.arg("name", self.name); diag.arg("since", self.since); diag.help(fluent::ast_passes_stable_since); @@ -823,3 +815,12 @@ pub(crate) struct DuplicatePreciseCapturing { #[label] pub bound2: Span, } + +#[derive(Diagnostic)] +#[diag(ast_passes_extern_without_abi)] +#[help] +pub(crate) struct MissingAbi { + #[primary_span] + #[suggestion(code = "extern \"\"", applicability = "has-placeholders")] + pub span: Span, +} diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs index a3fcc110a1666..915613a391374 100644 --- a/compiler/rustc_ast_passes/src/feature_gate.rs +++ b/compiler/rustc_ast_passes/src/feature_gate.rs @@ -236,7 +236,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { gate!(&self, trait_alias, i.span, "trait aliases are experimental"); } - ast::ItemKind::MacroDef(ast::MacroDef { macro_rules: false, .. }) => { + ast::ItemKind::MacroDef(_, ast::MacroDef { macro_rules: false, .. }) => { let msg = "`macro` is experimental"; gate!(&self, decl_macro, i.span, msg); } @@ -332,17 +332,19 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { ast::ExprKind::TryBlock(_) => { gate!(&self, try_blocks, e.span, "`try` expression is experimental"); } - ast::ExprKind::Lit(token::Lit { kind: token::LitKind::Float, suffix, .. }) => { - match suffix { - Some(sym::f16) => { - gate!(&self, f16, e.span, "the type `f16` is unstable") - } - Some(sym::f128) => { - gate!(&self, f128, e.span, "the type `f128` is unstable") - } - _ => (), + ast::ExprKind::Lit(token::Lit { + kind: token::LitKind::Float | token::LitKind::Integer, + suffix, + .. + }) => match suffix { + Some(sym::f16) => { + gate!(&self, f16, e.span, "the type `f16` is unstable") } - } + Some(sym::f128) => { + gate!(&self, f128, e.span, "the type `f128` is unstable") + } + _ => (), + }, _ => {} } visit::walk_expr(self, e) @@ -490,7 +492,6 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) { half_open_range_patterns_in_slices, "half-open range patterns in slices are unstable" ); - gate_all!(inline_const_pat, "inline-const in pattern position is experimental"); gate_all!(associated_const_equality, "associated const equality is incomplete"); gate_all!(yeet_expr, "`do yeet` expression is experimental"); gate_all!(dyn_star, "`dyn*` trait objects are experimental"); @@ -512,6 +513,8 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) { gate_all!(contracts, "contracts are incomplete"); gate_all!(contracts_internals, "contract internal machinery is for internal use only"); gate_all!(where_clause_attrs, "attributes in `where` clause are unstable"); + gate_all!(super_let, "`super let` is experimental"); + gate_all!(frontmatter, "frontmatters are experimental"); if !visitor.features.never_patterns() { if let Some(spans) = spans.get(&sym::never_patterns) { diff --git a/compiler/rustc_ast_passes/src/lib.rs b/compiler/rustc_ast_passes/src/lib.rs index 093199cf34212..7956057f88ee7 100644 --- a/compiler/rustc_ast_passes/src/lib.rs +++ b/compiler/rustc_ast_passes/src/lib.rs @@ -4,11 +4,11 @@ // tidy-alphabetical-start #![allow(internal_features)] +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(rust_logo)] #![feature(box_patterns)] #![feature(if_let_guard)] #![feature(iter_is_partitioned)] -#![feature(let_chains)] #![feature(rustdoc_internals)] // tidy-alphabetical-end diff --git a/compiler/rustc_ast_pretty/src/lib.rs b/compiler/rustc_ast_pretty/src/lib.rs index 84d9ce278a21a..1079ccccb03e7 100644 --- a/compiler/rustc_ast_pretty/src/lib.rs +++ b/compiler/rustc_ast_pretty/src/lib.rs @@ -2,6 +2,7 @@ #![allow(internal_features)] #![doc(rust_logo)] #![feature(box_patterns)] +#![feature(negative_impls)] #![feature(rustdoc_internals)] // tidy-alphabetical-end diff --git a/compiler/rustc_ast_pretty/src/pp.rs b/compiler/rustc_ast_pretty/src/pp.rs index e4fd7e94fde14..8a0dbadf18cdf 100644 --- a/compiler/rustc_ast_pretty/src/pp.rs +++ b/compiler/rustc_ast_pretty/src/pp.rs @@ -234,6 +234,34 @@ struct BufEntry { size: isize, } +// Boxes opened with methods like `Printer::{cbox,ibox}` must be closed with +// `Printer::end`. Failure to do so can result in bad indenting, or in extreme +// cases, cause no output to be produced at all. +// +// Box opening and closing used to be entirely implicit, which was hard to +// understand and easy to get wrong. This marker type is now returned from the +// box opening methods and forgotten by `Printer::end`. Any marker that isn't +// forgotten will trigger a panic in `drop`. (Closing a box more than once +// isn't possible because `BoxMarker` doesn't implement `Copy` or `Clone`.) +// +// Note: it would be better to make open/close mismatching impossible and avoid +// the need for this marker type altogether by having functions like +// `with_ibox` that open a box, call a closure, and then close the box. That +// would work for simple cases, but box lifetimes sometimes interact with +// complex control flow and across function boundaries in ways that are +// difficult to handle with such a technique. +#[must_use] +pub struct BoxMarker; + +impl !Clone for BoxMarker {} +impl !Copy for BoxMarker {} + +impl Drop for BoxMarker { + fn drop(&mut self) { + panic!("BoxMarker not ended with `Printer::end()`"); + } +} + impl Printer { pub fn new() -> Self { Printer { @@ -270,7 +298,8 @@ impl Printer { } } - fn scan_begin(&mut self, token: BeginToken) { + // This is is where `BoxMarker`s are produced. + fn scan_begin(&mut self, token: BeginToken) -> BoxMarker { if self.scan_stack.is_empty() { self.left_total = 1; self.right_total = 1; @@ -278,15 +307,18 @@ impl Printer { } let right = self.buf.push(BufEntry { token: Token::Begin(token), size: -self.right_total }); self.scan_stack.push_back(right); + BoxMarker } - fn scan_end(&mut self) { + // This is is where `BoxMarker`s are consumed. + fn scan_end(&mut self, b: BoxMarker) { if self.scan_stack.is_empty() { self.print_end(); } else { let right = self.buf.push(BufEntry { token: Token::End, size: -1 }); self.scan_stack.push_back(right); } + std::mem::forget(b) } fn scan_break(&mut self, token: BreakToken) { diff --git a/compiler/rustc_ast_pretty/src/pp/convenience.rs b/compiler/rustc_ast_pretty/src/pp/convenience.rs index a1c07bb07e42a..9b902b38122c8 100644 --- a/compiler/rustc_ast_pretty/src/pp/convenience.rs +++ b/compiler/rustc_ast_pretty/src/pp/convenience.rs @@ -1,25 +1,27 @@ use std::borrow::Cow; -use crate::pp::{BeginToken, BreakToken, Breaks, IndentStyle, Printer, SIZE_INFINITY, Token}; +use crate::pp::{ + BeginToken, BoxMarker, BreakToken, Breaks, IndentStyle, Printer, SIZE_INFINITY, Token, +}; impl Printer { /// "raw box" - pub fn rbox(&mut self, indent: isize, breaks: Breaks) { + pub fn rbox(&mut self, indent: isize, breaks: Breaks) -> BoxMarker { self.scan_begin(BeginToken { indent: IndentStyle::Block { offset: indent }, breaks }) } /// Inconsistent breaking box - pub fn ibox(&mut self, indent: isize) { + pub fn ibox(&mut self, indent: isize) -> BoxMarker { self.rbox(indent, Breaks::Inconsistent) } /// Consistent breaking box - pub fn cbox(&mut self, indent: isize) { + pub fn cbox(&mut self, indent: isize) -> BoxMarker { self.rbox(indent, Breaks::Consistent) } - pub fn visual_align(&mut self) { - self.scan_begin(BeginToken { indent: IndentStyle::Visual, breaks: Breaks::Consistent }); + pub fn visual_align(&mut self) -> BoxMarker { + self.scan_begin(BeginToken { indent: IndentStyle::Visual, breaks: Breaks::Consistent }) } pub fn break_offset(&mut self, n: usize, off: isize) { @@ -30,8 +32,8 @@ impl Printer { }); } - pub fn end(&mut self) { - self.scan_end() + pub fn end(&mut self, b: BoxMarker) { + self.scan_end(b) } pub fn eof(mut self) -> String { diff --git a/compiler/rustc_ast_pretty/src/pprust/mod.rs b/compiler/rustc_ast_pretty/src/pprust/mod.rs index 97cb6e52d5625..a05e2bd6a5d72 100644 --- a/compiler/rustc_ast_pretty/src/pprust/mod.rs +++ b/compiler/rustc_ast_pretty/src/pprust/mod.rs @@ -5,13 +5,11 @@ pub mod state; use std::borrow::Cow; use rustc_ast as ast; -use rustc_ast::token::{Nonterminal, Token, TokenKind}; +use rustc_ast::token::{Token, TokenKind}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; -pub use state::{AnnNode, Comments, PpAnn, PrintState, State, print_crate}; - -pub fn nonterminal_to_string(nt: &Nonterminal) -> String { - State::new().nonterminal_to_string(nt) -} +pub use state::{ + AnnNode, Comments, PpAnn, PrintState, State, print_crate, print_crate_as_interface, +}; /// Print the token kind precisely, without converting `$crate` into its respective crate name. pub fn token_kind_to_string(tok: &TokenKind) -> Cow<'static, str> { diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 3dbfc191f8f50..669e6de04e080 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -11,7 +11,7 @@ use std::sync::Arc; use rustc_ast::attr::AttrIdGenerator; use rustc_ast::ptr::P; -use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind}; +use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind}; use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree}; use rustc_ast::util::classify; use rustc_ast::util::comments::{Comment, CommentStyle}; @@ -26,7 +26,7 @@ use rustc_span::symbol::IdentPrinter; use rustc_span::{BytePos, CharPos, DUMMY_SP, FileName, Ident, Pos, Span, Symbol, kw, sym}; use crate::pp::Breaks::{Consistent, Inconsistent}; -use crate::pp::{self, Breaks}; +use crate::pp::{self, BoxMarker, Breaks}; use crate::pprust::state::fixup::FixupContext; pub enum MacHeader<'a> { @@ -221,6 +221,7 @@ pub struct State<'a> { pub s: pp::Printer, comments: Option>, ann: &'a (dyn PpAnn + 'a), + is_sdylib_interface: bool, } const INDENT_UNIT: isize = 4; @@ -236,9 +237,41 @@ pub fn print_crate<'a>( is_expanded: bool, edition: Edition, g: &AttrIdGenerator, +) -> String { + let mut s = State { + s: pp::Printer::new(), + comments: Some(Comments::new(sm, filename, input)), + ann, + is_sdylib_interface: false, + }; + + print_crate_inner(&mut s, krate, is_expanded, edition, g); + s.s.eof() +} + +pub fn print_crate_as_interface( + krate: &ast::Crate, + edition: Edition, + g: &AttrIdGenerator, ) -> String { let mut s = - State { s: pp::Printer::new(), comments: Some(Comments::new(sm, filename, input)), ann }; + State { s: pp::Printer::new(), comments: None, ann: &NoAnn, is_sdylib_interface: true }; + + print_crate_inner(&mut s, krate, false, edition, g); + s.s.eof() +} + +fn print_crate_inner<'a>( + s: &mut State<'a>, + krate: &ast::Crate, + is_expanded: bool, + edition: Edition, + g: &AttrIdGenerator, +) { + // We need to print shebang before anything else + // otherwise the resulting code will not compile + // and shebang will be useless. + s.maybe_print_shebang(); if is_expanded && !krate.attrs.iter().any(|attr| attr.has_name(sym::no_core)) { // We need to print `#![no_std]` (and its feature gate) so that @@ -277,8 +310,7 @@ pub fn print_crate<'a>( s.print_item(item); } s.print_remaining_comments(); - s.ann.post(&mut s, AnnNode::Crate(krate)); - s.s.eof() + s.ann.post(s, AnnNode::Crate(krate)); } /// Should two consecutive tokens be printed with a space between them? @@ -419,7 +451,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere { let mut it = elts.into_iter(); - self.rbox(0, b); + let rb = self.rbox(0, b); if let Some(first) = it.next() { op(self, first); for elt in it { @@ -430,7 +462,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere op(self, elt); } } - self.end(); + self.end(rb); } fn commasep<'x, T: 'x, F, I>(&mut self, b: Breaks, elts: I, op: F) @@ -461,7 +493,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere self.zerobreak(); } if let Some((last, lines)) = cmnt.lines.split_last() { - self.ibox(0); + let ib = self.ibox(0); for line in lines { self.word(line.clone()); @@ -471,7 +503,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere self.word(last.clone()); self.space(); - self.end(); + self.end(ib); } self.zerobreak() } @@ -494,14 +526,14 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere self.word(line.clone()); self.hardbreak() } else { - self.visual_align(); + let vb = self.visual_align(); for line in &cmnt.lines { if !line.is_empty() { self.word(line.clone()); } self.hardbreak(); } - self.end(); + self.end(vb); } } CommentStyle::BlankLine => { @@ -560,6 +592,20 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere self.word(st) } + fn maybe_print_shebang(&mut self) { + if let Some(cmnt) = self.peek_comment() { + // Comment is a shebang if it's: + // Isolated, starts with #! and doesn't continue with `[` + // See [rustc_lexer::strip_shebang] and [gather_comments] from pprust/state.rs for details + if cmnt.style == CommentStyle::Isolated + && cmnt.lines.first().map_or(false, |l| l.starts_with("#!")) + { + let cmnt = self.next_comment().unwrap(); + self.print_comment(cmnt); + } + } + } + fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) -> bool { self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, true) } @@ -620,7 +666,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere } fn print_attr_item(&mut self, item: &ast::AttrItem, span: Span) { - self.ibox(0); + let ib = self.ibox(0); match item.unsafety { ast::Safety::Unsafe(_) => { self.word("unsafe"); @@ -634,6 +680,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere false, None, *delim, + None, tokens, true, span, @@ -653,7 +700,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere ast::Safety::Unsafe(_) => self.pclose(), ast::Safety::Default | ast::Safety::Safe(_) => {} } - self.end(); + self.end(ib); } /// This doesn't deserve to be called "pretty" printing, but it should be @@ -679,6 +726,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere false, None, *delim, + Some(spacing.open), tts, convert_dollar_crate, dspan.entire(), @@ -735,13 +783,12 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere has_bang: bool, ident: Option, delim: Delimiter, + open_spacing: Option, tts: &TokenStream, convert_dollar_crate: bool, span: Span, ) { - if delim == Delimiter::Brace { - self.cbox(INDENT_UNIT); - } + let cb = (delim == Delimiter::Brace).then(|| self.cbox(INDENT_UNIT)); match header { Some(MacHeader::Path(path)) => self.print_path(path, false, 0), Some(MacHeader::Keyword(kw)) => self.word(kw), @@ -760,22 +807,32 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere self.nbsp(); } self.word("{"); - if !tts.is_empty() { + + // Respect `Alone`, if provided, and print a space. Unless the list is empty. + let open_space = (open_spacing == None || open_spacing == Some(Spacing::Alone)) + && !tts.is_empty(); + if open_space { self.space(); } - self.ibox(0); + let ib = self.ibox(0); self.print_tts(tts, convert_dollar_crate); - self.end(); - let empty = tts.is_empty(); - self.bclose(span, empty); + self.end(ib); + + // Use `open_space` for the spacing *before* the closing delim. + // Because spacing on delimiters is lost when going through + // proc macros, and otherwise we can end up with ugly cases + // like `{ x}`. Symmetry is better. + self.bclose(span, !open_space, cb.unwrap()); } delim => { - let token_str = self.token_kind_to_string(&token::OpenDelim(delim)); + // `open_spacing` is ignored. We never print spaces after + // non-brace opening delims or before non-brace closing delims. + let token_str = self.token_kind_to_string(&delim.as_open_token_kind()); self.word(token_str); - self.ibox(0); + let ib = self.ibox(0); self.print_tts(tts, convert_dollar_crate); - self.end(); - let token_str = self.token_kind_to_string(&token::CloseDelim(delim)); + self.end(ib); + let token_str = self.token_kind_to_string(&delim.as_close_token_kind()); self.word(token_str); } } @@ -788,6 +845,17 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere sp: Span, print_visibility: impl FnOnce(&mut Self), ) { + if let Some(eii_macro_for) = ¯o_def.eii_macro_for { + self.word("#[eii_macro_for("); + self.print_path(&eii_macro_for.extern_item_path, false, 0); + if eii_macro_for.impl_unsafe { + self.word(","); + self.space(); + self.word("unsafe"); + } + self.word(")]"); + self.hardbreak(); + } let (kw, has_bang) = if macro_def.macro_rules { ("macro_rules", true) } else { @@ -799,7 +867,8 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere has_bang, Some(*ident), macro_def.body.delim, - ¯o_def.body.tokens.clone(), + None, + ¯o_def.body.tokens, true, sp, ); @@ -828,37 +897,38 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere } } - fn head>>(&mut self, w: S) { + fn head>>(&mut self, w: S) -> (BoxMarker, BoxMarker) { let w = w.into(); // Outer-box is consistent. - self.cbox(INDENT_UNIT); + let cb = self.cbox(INDENT_UNIT); // Head-box is inconsistent. - self.ibox(0); + let ib = self.ibox(0); // Keyword that starts the head. if !w.is_empty() { self.word_nbsp(w); } + (cb, ib) } - fn bopen(&mut self) { + fn bopen(&mut self, ib: BoxMarker) { self.word("{"); - self.end(); // Close the head-box. + self.end(ib); } - fn bclose_maybe_open(&mut self, span: rustc_span::Span, empty: bool, close_box: bool) { + fn bclose_maybe_open(&mut self, span: rustc_span::Span, no_space: bool, cb: Option) { let has_comment = self.maybe_print_comment(span.hi()); - if !empty || has_comment { + if !no_space || has_comment { self.break_offset_if_not_bol(1, -INDENT_UNIT); } self.word("}"); - if close_box { - self.end(); // Close the outer-box. + if let Some(cb) = cb { + self.end(cb); } } - fn bclose(&mut self, span: rustc_span::Span, empty: bool) { - let close_box = true; - self.bclose_maybe_open(span, empty, close_box) + fn bclose(&mut self, span: rustc_span::Span, no_space: bool, cb: BoxMarker) { + let cb = Some(cb); + self.bclose_maybe_open(span, no_space, cb) } fn break_offset_if_not_bol(&mut self, n: usize, off: isize) { @@ -876,14 +946,6 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere } } - fn nonterminal_to_string(&self, nt: &Nonterminal) -> String { - // We extract the token stream from the AST fragment and pretty print - // it, rather than using AST pretty printing, because `Nonterminal` is - // slated for removal in #124141. (This method will also then be - // removed.) - self.tts_to_string(&TokenStream::from_nonterminal_ast(nt)) - } - /// Print the token kind precisely, without converting `$crate` into its respective crate name. fn token_kind_to_string(&self, tok: &TokenKind) -> Cow<'static, str> { self.token_kind_to_string_ext(tok, None) @@ -940,14 +1002,13 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere token::RArrow => "->".into(), token::LArrow => "<-".into(), token::FatArrow => "=>".into(), - token::OpenDelim(Delimiter::Parenthesis) => "(".into(), - token::CloseDelim(Delimiter::Parenthesis) => ")".into(), - token::OpenDelim(Delimiter::Bracket) => "[".into(), - token::CloseDelim(Delimiter::Bracket) => "]".into(), - token::OpenDelim(Delimiter::Brace) => "{".into(), - token::CloseDelim(Delimiter::Brace) => "}".into(), - token::OpenDelim(Delimiter::Invisible(_)) - | token::CloseDelim(Delimiter::Invisible(_)) => "".into(), + token::OpenParen => "(".into(), + token::CloseParen => ")".into(), + token::OpenBracket => "[".into(), + token::CloseBracket => "]".into(), + token::OpenBrace => "{".into(), + token::CloseBrace => "}".into(), + token::OpenInvisible(_) | token::CloseInvisible(_) => "".into(), token::Pound => "#".into(), token::Dollar => "$".into(), token::Question => "?".into(), @@ -976,8 +1037,6 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere doc_comment_to_string(comment_kind, attr_style, data).into() } token::Eof => "".into(), - - token::Interpolated(ref nt) => self.nonterminal_to_string(&nt).into(), } } @@ -1025,11 +1084,8 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere fn block_to_string(&self, blk: &ast::Block) -> String { Self::to_string(|s| { - // Containing cbox, will be closed by `print_block` at `}`. - s.cbox(INDENT_UNIT); - // Head-ibox, will be closed by `print_block` after `{`. - s.ibox(0); - s.print_block(blk) + let (cb, ib) = s.head(""); + s.print_block(blk, cb, ib) }) } @@ -1093,7 +1149,7 @@ impl<'a> PrintState<'a> for State<'a> { impl<'a> State<'a> { pub fn new() -> State<'a> { - State { s: pp::Printer::new(), comments: None, ann: &NoAnn } + State { s: pp::Printer::new(), comments: None, ann: &NoAnn, is_sdylib_interface: false } } fn commasep_cmnt(&mut self, b: Breaks, elts: &[T], mut op: F, mut get_span: G) @@ -1101,7 +1157,7 @@ impl<'a> State<'a> { F: FnMut(&mut State<'_>, &T), G: FnMut(&T) -> rustc_span::Span, { - self.rbox(0, b); + let rb = self.rbox(0, b); let len = elts.len(); let mut i = 0; for elt in elts { @@ -1114,7 +1170,7 @@ impl<'a> State<'a> { self.space_if_not_bol(); } } - self.end(); + self.end(rb); } fn commasep_exprs(&mut self, b: Breaks, exprs: &[P]) { @@ -1173,6 +1229,17 @@ impl<'a> State<'a> { self.print_expr_anon_const(end, &[]); } } + rustc_ast::TyPatKind::Or(variants) => { + let mut first = true; + for pat in variants { + if first { + first = false + } else { + self.word(" | "); + } + self.print_ty_pat(pat); + } + } rustc_ast::TyPatKind::Err(_) => { self.popen(); self.word("/*ERROR*/"); @@ -1183,7 +1250,7 @@ impl<'a> State<'a> { pub fn print_type(&mut self, ty: &ast::Ty) { self.maybe_print_comment(ty.span.lo()); - self.ibox(0); + let ib = self.ibox(0); match &ty.kind { ast::TyKind::Slice(ty) => { self.word("["); @@ -1225,12 +1292,12 @@ impl<'a> State<'a> { self.print_ty_fn(f.ext, f.safety, &f.decl, None, &f.generic_params); } ast::TyKind::UnsafeBinder(f) => { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word("unsafe"); self.print_generic_params(&f.generic_params); self.nbsp(); self.print_type(&f.inner_ty); - self.end(); + self.end(ib); } ast::TyKind::Path(None, path) => { self.print_path(path, false, 0); @@ -1288,7 +1355,7 @@ impl<'a> State<'a> { self.print_ty_pat(pat); } } - self.end(); + self.end(ib); } fn print_trait_ref(&mut self, t: &ast::TraitRef) { @@ -1335,12 +1402,15 @@ impl<'a> State<'a> { ast::StmtKind::Let(loc) => { self.print_outer_attributes(&loc.attrs); self.space_if_not_bol(); - self.ibox(INDENT_UNIT); + let ib1 = self.ibox(INDENT_UNIT); + if loc.super_.is_some() { + self.word_nbsp("super"); + } self.word_nbsp("let"); - self.ibox(INDENT_UNIT); + let ib2 = self.ibox(INDENT_UNIT); self.print_local_decl(loc); - self.end(); + self.end(ib2); if let Some((init, els)) = loc.kind.init_else_opt() { self.nbsp(); self.word_space("="); @@ -1350,14 +1420,14 @@ impl<'a> State<'a> { FixupContext::default(), ); if let Some(els) = els { - self.cbox(INDENT_UNIT); - self.ibox(INDENT_UNIT); + let cb = self.cbox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word(" else "); - self.print_block(els); + self.print_block(els, cb, ib); } } self.word(";"); - self.end(); // `let` ibox + self.end(ib1); } ast::StmtKind::Item(item) => self.print_item(item), ast::StmtKind::Expr(expr) => { @@ -1388,23 +1458,30 @@ impl<'a> State<'a> { self.maybe_print_trailing_comment(st.span, None) } - fn print_block(&mut self, blk: &ast::Block) { - self.print_block_with_attrs(blk, &[]) + fn print_block(&mut self, blk: &ast::Block, cb: BoxMarker, ib: BoxMarker) { + self.print_block_with_attrs(blk, &[], cb, ib) } - fn print_block_unclosed_indent(&mut self, blk: &ast::Block) { - self.print_block_maybe_unclosed(blk, &[], false) + fn print_block_unclosed_indent(&mut self, blk: &ast::Block, ib: BoxMarker) { + self.print_block_maybe_unclosed(blk, &[], None, ib) } - fn print_block_with_attrs(&mut self, blk: &ast::Block, attrs: &[ast::Attribute]) { - self.print_block_maybe_unclosed(blk, attrs, true) + fn print_block_with_attrs( + &mut self, + blk: &ast::Block, + attrs: &[ast::Attribute], + cb: BoxMarker, + ib: BoxMarker, + ) { + self.print_block_maybe_unclosed(blk, attrs, Some(cb), ib) } fn print_block_maybe_unclosed( &mut self, blk: &ast::Block, attrs: &[ast::Attribute], - close_box: bool, + cb: Option, + ib: BoxMarker, ) { match blk.rules { BlockCheckMode::Unsafe(..) => self.word_space("unsafe"), @@ -1412,7 +1489,7 @@ impl<'a> State<'a> { } self.maybe_print_comment(blk.span.lo()); self.ann.pre(self, AnnNode::Block(blk)); - self.bopen(); + self.bopen(ib); let has_attrs = self.print_inner_attributes(attrs); @@ -1428,8 +1505,8 @@ impl<'a> State<'a> { } } - let empty = !has_attrs && blk.stmts.is_empty(); - self.bclose_maybe_open(blk.span, empty, close_box); + let no_space = !has_attrs && blk.stmts.is_empty(); + self.bclose_maybe_open(blk.span, no_space, cb); self.ann.post(self, AnnNode::Block(blk)) } @@ -1476,7 +1553,8 @@ impl<'a> State<'a> { true, None, m.args.delim, - &m.args.tokens.clone(), + None, + &m.args.tokens, true, m.span(), ); @@ -1564,8 +1642,8 @@ impl<'a> State<'a> { } } InlineAsmOperand::Label { block } => { - s.head("label"); - s.print_block(block); + let (cb, ib) = s.head("label"); + s.print_block(block, cb, ib); } } } @@ -1622,9 +1700,9 @@ impl<'a> State<'a> { fn print_pat(&mut self, pat: &ast::Pat) { self.maybe_print_comment(pat.span.lo()); self.ann.pre(self, AnnNode::Pat(pat)); - /* Pat isn't normalized, but the beauty of it - is that it doesn't matter */ + /* Pat isn't normalized, but the beauty of it is that it doesn't matter */ match &pat.kind { + PatKind::Missing => unreachable!(), PatKind::Wild => self.word("_"), PatKind::Never => self.word("!"), PatKind::Ident(BindingMode(by_ref, mutbl), ident, sub) => { @@ -1679,13 +1757,13 @@ impl<'a> State<'a> { Consistent, fields, |s, f| { - s.cbox(INDENT_UNIT); + let cb = s.cbox(INDENT_UNIT); if !f.is_shorthand { s.print_ident(f.ident); s.word_nbsp(":"); } s.print_pat(&f.pat); - s.end(); + s.end(cb); }, |f| f.pat.span, ); @@ -1936,7 +2014,7 @@ impl<'a> State<'a> { } fn print_param(&mut self, input: &ast::Param, is_closure: bool) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.print_outer_attributes_inline(&input.attrs); @@ -1946,12 +2024,7 @@ impl<'a> State<'a> { if let Some(eself) = input.to_self() { self.print_explicit_self(&eself); } else { - let invalid = if let PatKind::Ident(_, ident, _) = input.pat.kind { - ident.name == kw::Empty - } else { - false - }; - if !invalid { + if !matches!(input.pat.kind, PatKind::Missing) { self.print_pat(&input.pat); self.word(":"); self.space(); @@ -1960,16 +2033,16 @@ impl<'a> State<'a> { } } } - self.end(); + self.end(ib); } fn print_fn_ret_ty(&mut self, fn_ret_ty: &ast::FnRetTy) { if let ast::FnRetTy::Ty(ty) = fn_ret_ty { self.space_if_not_bol(); - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word_space("->"); self.print_type(ty); - self.end(); + self.end(ib); self.maybe_print_comment(ty.span.lo()); } } @@ -1982,12 +2055,12 @@ impl<'a> State<'a> { name: Option, generic_params: &[ast::GenericParam], ) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.print_formal_generic_params(generic_params); let generics = ast::Generics::default(); let header = ast::FnHeader { safety, ext, ..ast::FnHeader::default() }; self.print_fn(decl, header, name, &generics); - self.end(); + self.end(ib); } fn print_fn_header_info(&mut self, header: ast::FnHeader) { @@ -2065,7 +2138,15 @@ impl<'a> State<'a> { } fn print_meta_item(&mut self, item: &ast::MetaItem) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); + match item.unsafety { + ast::Safety::Unsafe(_) => { + self.word("unsafe"); + self.popen(); + } + ast::Safety::Default | ast::Safety::Safe(_) => {} + } + match &item.kind { ast::MetaItemKind::Word => self.print_path(&item.path, false, 0), ast::MetaItemKind::NameValue(value) => { @@ -2081,7 +2162,13 @@ impl<'a> State<'a> { self.pclose(); } } - self.end(); + + match item.unsafety { + ast::Safety::Unsafe(_) => self.pclose(), + ast::Safety::Default | ast::Safety::Safe(_) => {} + } + + self.end(ib); } pub(crate) fn bounds_to_string(&self, bounds: &[ast::GenericBound]) -> String { diff --git a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs index 7d9dc89bd7567..c9a7e2aebd01b 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs @@ -21,20 +21,20 @@ impl<'a> State<'a> { match &_else.kind { // Another `else if` block. ast::ExprKind::If(i, then, e) => { - self.cbox(INDENT_UNIT - 1); - self.ibox(0); + let cb = self.cbox(0); + let ib = self.ibox(0); self.word(" else if "); self.print_expr_as_cond(i); self.space(); - self.print_block(then); + self.print_block(then, cb, ib); self.print_else(e.as_deref()) } // Final `else` block. - ast::ExprKind::Block(b, _) => { - self.cbox(INDENT_UNIT - 1); - self.ibox(0); + ast::ExprKind::Block(b, None) => { + let cb = self.cbox(0); + let ib = self.ibox(0); self.word(" else "); - self.print_block(b) + self.print_block(b, cb, ib) } // Constraints would be great here! _ => { @@ -45,10 +45,12 @@ impl<'a> State<'a> { } fn print_if(&mut self, test: &ast::Expr, blk: &ast::Block, elseopt: Option<&ast::Expr>) { - self.head("if"); + let cb = self.cbox(0); + let ib = self.ibox(0); + self.word_nbsp("if"); self.print_expr_as_cond(test); self.space(); - self.print_block(blk); + self.print_block(blk, cb, ib); self.print_else(elseopt) } @@ -110,11 +112,11 @@ impl<'a> State<'a> { } fn print_expr_vec(&mut self, exprs: &[P]) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word("["); self.commasep_exprs(Inconsistent, exprs); self.word("]"); - self.end(); + self.end(ib); } pub(super) fn print_expr_anon_const( @@ -122,27 +124,27 @@ impl<'a> State<'a> { expr: &ast::AnonConst, attrs: &[ast::Attribute], ) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word("const"); self.nbsp(); if let ast::ExprKind::Block(block, None) = &expr.value.kind { - self.cbox(0); - self.ibox(0); - self.print_block_with_attrs(block, attrs); + let cb = self.cbox(0); + let ib = self.ibox(0); + self.print_block_with_attrs(block, attrs, cb, ib); } else { self.print_expr(&expr.value, FixupContext::default()); } - self.end(); + self.end(ib); } fn print_expr_repeat(&mut self, element: &ast::Expr, count: &ast::AnonConst) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word("["); self.print_expr(element, FixupContext::default()); self.word_space(";"); self.print_expr(&count.value, FixupContext::default()); self.word("]"); - self.end(); + self.end(ib); } fn print_expr_struct( @@ -167,7 +169,7 @@ impl<'a> State<'a> { self.word("}"); return; } - self.cbox(0); + let cb = self.cbox(0); for (pos, field) in fields.iter().with_position() { let is_first = matches!(pos, Position::First | Position::Only); let is_last = matches!(pos, Position::Last | Position::Only); @@ -198,7 +200,7 @@ impl<'a> State<'a> { self.space(); } self.offset(-INDENT_UNIT); - self.end(); + self.end(cb); self.word("}"); } @@ -274,22 +276,22 @@ impl<'a> State<'a> { fn print_expr_binary( &mut self, - op: ast::BinOp, + op: ast::BinOpKind, lhs: &ast::Expr, rhs: &ast::Expr, fixup: FixupContext, ) { - let binop_prec = op.node.precedence(); + let binop_prec = op.precedence(); let left_prec = lhs.precedence(); let right_prec = rhs.precedence(); - let (mut left_needs_paren, right_needs_paren) = match op.node.fixity() { + let (mut left_needs_paren, right_needs_paren) = match op.fixity() { Fixity::Left => (left_prec < binop_prec, right_prec <= binop_prec), Fixity::Right => (left_prec <= binop_prec, right_prec < binop_prec), Fixity::None => (left_prec <= binop_prec, right_prec <= binop_prec), }; - match (&lhs.kind, op.node) { + match (&lhs.kind, op) { // These cases need parens: `x as i32 < y` has the parser thinking that `i32 < y` is // the beginning of a path type. It starts trying to parse `x as (i32 < y ...` instead // of `(x as i32) < ...`. We need to convince it _not_ to do that. @@ -312,7 +314,7 @@ impl<'a> State<'a> { self.print_expr_cond_paren(lhs, left_needs_paren, fixup.leftmost_subexpression()); self.space(); - self.word_space(op.node.as_str()); + self.word_space(op.as_str()); self.print_expr_cond_paren(rhs, right_needs_paren, fixup.subsequent_subexpression()); } @@ -366,7 +368,7 @@ impl<'a> State<'a> { self.print_outer_attributes(attrs); } - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); // The Match subexpression in `match x {} - 1` must be parenthesized if // it is the leftmost subexpression in a statement: @@ -410,7 +412,7 @@ impl<'a> State<'a> { self.print_expr_method_call(seg, receiver, args, fixup); } ast::ExprKind::Binary(op, lhs, rhs) => { - self.print_expr_binary(*op, lhs, rhs, fixup); + self.print_expr_binary(op.node, lhs, rhs, fixup); } ast::ExprKind::Unary(op, expr) => { self.print_expr_unary(*op, expr, fixup); @@ -438,14 +440,14 @@ impl<'a> State<'a> { ast::ExprKind::Type(expr, ty) => { self.word("builtin # type_ascribe"); self.popen(); - self.ibox(0); + let ib = self.ibox(0); self.print_expr(expr, FixupContext::default()); self.word(","); self.space_if_not_bol(); self.print_type(ty); - self.end(); + self.end(ib); self.pclose(); } ast::ExprKind::Let(pat, scrutinee, _, _) => { @@ -457,20 +459,20 @@ impl<'a> State<'a> { self.print_ident(label.ident); self.word_space(":"); } - self.cbox(0); - self.ibox(0); + let cb = self.cbox(0); + let ib = self.ibox(0); self.word_nbsp("while"); self.print_expr_as_cond(test); self.space(); - self.print_block_with_attrs(blk, attrs); + self.print_block_with_attrs(blk, attrs, cb, ib); } ast::ExprKind::ForLoop { pat, iter, body, label, kind } => { if let Some(label) = label { self.print_ident(label.ident); self.word_space(":"); } - self.cbox(0); - self.ibox(0); + let cb = self.cbox(0); + let ib = self.ibox(0); self.word_nbsp("for"); if kind == &ForLoopKind::ForAwait { self.word_nbsp("await"); @@ -480,21 +482,21 @@ impl<'a> State<'a> { self.word_space("in"); self.print_expr_as_cond(iter); self.space(); - self.print_block_with_attrs(body, attrs); + self.print_block_with_attrs(body, attrs, cb, ib); } ast::ExprKind::Loop(blk, opt_label, _) => { + let cb = self.cbox(0); + let ib = self.ibox(0); if let Some(label) = opt_label { self.print_ident(label.ident); self.word_space(":"); } - self.cbox(0); - self.ibox(0); self.word_nbsp("loop"); - self.print_block_with_attrs(blk, attrs); + self.print_block_with_attrs(blk, attrs, cb, ib); } ast::ExprKind::Match(expr, arms, match_kind) => { - self.cbox(0); - self.ibox(0); + let cb = self.cbox(0); + let ib = self.ibox(0); match match_kind { MatchKind::Prefix => { @@ -512,13 +514,13 @@ impl<'a> State<'a> { } } - self.bopen(); + self.bopen(ib); self.print_inner_attributes_no_trailing_hardbreak(attrs); for arm in arms { self.print_arm(arm); } let empty = attrs.is_empty() && arms.is_empty(); - self.bclose(expr.span, empty); + self.bclose(expr.span, empty, cb); } ast::ExprKind::Closure(box ast::Closure { binder, @@ -540,12 +542,6 @@ impl<'a> State<'a> { self.print_fn_params_and_ret(fn_decl, true); self.space(); self.print_expr(body, FixupContext::default()); - self.end(); // need to close a box - - // a box will be closed by print_expr, but we didn't want an overall - // wrapper so we closed the corresponding opening. so create an - // empty box to satisfy the close. - self.ibox(0); } ast::ExprKind::Block(blk, opt_label) => { if let Some(label) = opt_label { @@ -553,18 +549,18 @@ impl<'a> State<'a> { self.word_space(":"); } // containing cbox, will be closed by print-block at } - self.cbox(0); + let cb = self.cbox(0); // head-box, will be closed by print-block after { - self.ibox(0); - self.print_block_with_attrs(blk, attrs); + let ib = self.ibox(0); + self.print_block_with_attrs(blk, attrs, cb, ib); } ast::ExprKind::Gen(capture_clause, blk, kind, _decl_span) => { self.word_nbsp(kind.modifier()); self.print_capture_clause(*capture_clause); // cbox/ibox in analogy to the `ExprKind::Block` arm above - self.cbox(0); - self.ibox(0); - self.print_block_with_attrs(blk, attrs); + let cb = self.cbox(0); + let ib = self.ibox(0); + self.print_block_with_attrs(blk, attrs, cb, ib); } ast::ExprKind::Await(expr, _) => { self.print_expr_cond_paren( @@ -605,8 +601,7 @@ impl<'a> State<'a> { fixup.leftmost_subexpression(), ); self.space(); - self.word(op.node.as_str()); - self.word_space("="); + self.word_space(op.node.as_str()); self.print_expr_cond_paren( rhs, rhs.precedence() < ExprPrecedence::Assign, @@ -727,19 +722,19 @@ impl<'a> State<'a> { // FIXME: Print `builtin # format_args` once macro `format_args` uses `builtin_syntax`. self.word("format_args!"); self.popen(); - self.ibox(0); + let ib = self.ibox(0); self.word(reconstruct_format_args_template_string(&fmt.template)); for arg in fmt.arguments.all_args() { self.word_space(","); self.print_expr(&arg.expr, FixupContext::default()); } - self.end(); + self.end(ib); self.pclose(); } ast::ExprKind::OffsetOf(container, fields) => { self.word("builtin # offset_of"); self.popen(); - self.ibox(0); + let ib = self.ibox(0); self.print_type(container); self.word(","); self.space(); @@ -752,7 +747,7 @@ impl<'a> State<'a> { self.print_ident(field); } } - self.end(); + self.end(ib); self.pclose(); } ast::ExprKind::MacCall(m) => self.print_mac(m), @@ -790,10 +785,10 @@ impl<'a> State<'a> { self.word("?") } ast::ExprKind::TryBlock(blk) => { - self.cbox(0); - self.ibox(0); + let cb = self.cbox(0); + let ib = self.ibox(0); self.word_nbsp("try"); - self.print_block_with_attrs(blk, attrs) + self.print_block_with_attrs(blk, attrs, cb, ib) } ast::ExprKind::UnsafeBinderCast(kind, expr, ty) => { self.word("builtin # "); @@ -802,7 +797,7 @@ impl<'a> State<'a> { ast::UnsafeBinderCastKind::Unwrap => self.word("unwrap_binder"), } self.popen(); - self.ibox(0); + let ib = self.ibox(0); self.print_expr(expr, FixupContext::default()); if let Some(ty) = ty { @@ -811,7 +806,7 @@ impl<'a> State<'a> { self.print_type(ty); } - self.end(); + self.end(ib); self.pclose(); } ast::ExprKind::Err(_) => { @@ -832,7 +827,7 @@ impl<'a> State<'a> { self.pclose(); } - self.end(); + self.end(ib); } fn print_arm(&mut self, arm: &ast::Arm) { @@ -840,8 +835,8 @@ impl<'a> State<'a> { if arm.attrs.is_empty() { self.space(); } - self.cbox(INDENT_UNIT); - self.ibox(0); + let cb = self.cbox(INDENT_UNIT); + let ib = self.ibox(0); self.maybe_print_comment(arm.pat.span.lo()); self.print_outer_attributes(&arm.attrs); self.print_pat(&arm.pat); @@ -862,8 +857,7 @@ impl<'a> State<'a> { self.word_space(":"); } - // The block will close the pattern's ibox. - self.print_block_unclosed_indent(blk); + self.print_block_unclosed_indent(blk, ib); // If it is a user-provided unsafe block, print a comma after it. if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules { @@ -871,15 +865,16 @@ impl<'a> State<'a> { } } _ => { - self.end(); // Close the ibox for the pattern. + self.end(ib); self.print_expr(body, FixupContext::new_match_arm()); self.word(","); } } } else { + self.end(ib); self.word(","); } - self.end(); // Close enclosing cbox. + self.end(cb); } fn print_closure_binder(&mut self, binder: &ast::ClosureBinder) { diff --git a/compiler/rustc_ast_pretty/src/pprust/state/item.rs b/compiler/rustc_ast_pretty/src/pprust/state/item.rs index d406a56c05da0..8445988373873 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/item.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/item.rs @@ -1,10 +1,10 @@ use ast::StaticItem; use itertools::{Itertools, Position}; -use rustc_ast as ast; -use rustc_ast::ModKind; use rustc_ast::ptr::P; +use rustc_ast::{self as ast, EIIImpl, ModKind, Safety}; use rustc_span::Ident; +use crate::pp::BoxMarker; use crate::pp::Breaks::Inconsistent; use crate::pprust::state::fixup::FixupContext; use crate::pprust::state::{AnnNode, INDENT_UNIT, PrintState, State}; @@ -28,23 +28,24 @@ impl<'a> State<'a> { } fn print_foreign_item(&mut self, item: &ast::ForeignItem) { - let ast::Item { id, span, ident, ref attrs, ref kind, ref vis, tokens: _ } = *item; + let ast::Item { id, span, ref attrs, ref kind, ref vis, tokens: _ } = *item; self.ann.pre(self, AnnNode::SubItem(id)); self.hardbreak_if_not_bol(); self.maybe_print_comment(span.lo()); self.print_outer_attributes(attrs); match kind { ast::ForeignItemKind::Fn(func) => { - self.print_fn_full(ident, vis, attrs, &*func); + self.print_fn_full(vis, attrs, &*func); } ast::ForeignItemKind::Static(box ast::StaticItem { + ident, ty, mutability, expr, safety, define_opaque, }) => self.print_item_const( - ident, + *ident, Some(*mutability), &ast::Generics::default(), ty, @@ -56,13 +57,14 @@ impl<'a> State<'a> { ), ast::ForeignItemKind::TyAlias(box ast::TyAlias { defaultness, + ident, generics, where_clauses, bounds, ty, }) => { self.print_associated_type( - ident, + *ident, generics, *where_clauses, bounds, @@ -94,7 +96,7 @@ impl<'a> State<'a> { define_opaque: Option<&[(ast::NodeId, ast::Path)]>, ) { self.print_define_opaques(define_opaque); - self.head(""); + let (cb, ib) = self.head(""); self.print_visibility(vis); self.print_safety(safety); self.print_defaultness(defaultness); @@ -111,14 +113,14 @@ impl<'a> State<'a> { if body.is_some() { self.space(); } - self.end(); // end the head-ibox + self.end(ib); if let Some(body) = body { self.word_space("="); self.print_expr(body, FixupContext::default()); } self.print_where_clause(&generics.where_clause); self.word(";"); - self.end(); // end the outer cbox + self.end(cb); } fn print_associated_type( @@ -133,7 +135,7 @@ impl<'a> State<'a> { ) { let (before_predicates, after_predicates) = generics.where_clause.predicates.split_at(where_clauses.split); - self.head(""); + let (cb, ib) = self.head(""); self.print_visibility(vis); self.print_defaultness(defaultness); self.word_space("type"); @@ -151,29 +153,33 @@ impl<'a> State<'a> { } self.print_where_clause_parts(where_clauses.after.has_where_token, after_predicates); self.word(";"); - self.end(); // end inner head-block - self.end(); // end outer head-block + self.end(ib); + self.end(cb); } /// Pretty-prints an item. pub(crate) fn print_item(&mut self, item: &ast::Item) { + if self.is_sdylib_interface && item.span.is_dummy() { + // Do not print prelude for interface files. + return; + } self.hardbreak_if_not_bol(); self.maybe_print_comment(item.span.lo()); self.print_outer_attributes(&item.attrs); self.ann.pre(self, AnnNode::Item(item)); match &item.kind { - ast::ItemKind::ExternCrate(orig_name) => { - self.head(visibility_qualified(&item.vis, "extern crate")); + ast::ItemKind::ExternCrate(orig_name, ident) => { + let (cb, ib) = self.head(visibility_qualified(&item.vis, "extern crate")); if let &Some(orig_name) = orig_name { self.print_name(orig_name); self.space(); self.word("as"); self.space(); } - self.print_ident(item.ident); + self.print_ident(*ident); self.word(";"); - self.end(); // end inner head-block - self.end(); // end outer head-block + self.end(ib); + self.end(cb); } ast::ItemKind::Use(tree) => { self.print_visibility(&item.vis); @@ -182,6 +188,7 @@ impl<'a> State<'a> { self.word(";"); } ast::ItemKind::Static(box StaticItem { + ident, ty, safety, mutability: mutbl, @@ -190,7 +197,7 @@ impl<'a> State<'a> { }) => { self.print_safety(*safety); self.print_item_const( - item.ident, + *ident, Some(*mutbl), &ast::Generics::default(), ty, @@ -203,13 +210,14 @@ impl<'a> State<'a> { } ast::ItemKind::Const(box ast::ConstItem { defaultness, + ident, generics, ty, expr, define_opaque, }) => { self.print_item_const( - item.ident, + *ident, None, generics, ty, @@ -221,36 +229,36 @@ impl<'a> State<'a> { ); } ast::ItemKind::Fn(func) => { - self.print_fn_full(item.ident, &item.vis, &item.attrs, &*func); + self.print_fn_full(&item.vis, &item.attrs, &*func); } - ast::ItemKind::Mod(safety, mod_kind) => { - self.head(Self::to_string(|s| { + ast::ItemKind::Mod(safety, ident, mod_kind) => { + let (cb, ib) = self.head(Self::to_string(|s| { s.print_visibility(&item.vis); s.print_safety(*safety); s.word("mod"); })); - self.print_ident(item.ident); + self.print_ident(*ident); match mod_kind { ModKind::Loaded(items, ..) => { self.nbsp(); - self.bopen(); + self.bopen(ib); self.print_inner_attributes(&item.attrs); for item in items { self.print_item(item); } let empty = item.attrs.is_empty() && items.is_empty(); - self.bclose(item.span, empty); + self.bclose(item.span, empty, cb); } ModKind::Unloaded => { self.word(";"); - self.end(); // end inner head-block - self.end(); // end outer head-block + self.end(ib); + self.end(cb); } } } ast::ItemKind::ForeignMod(nmod) => { - self.head(Self::to_string(|s| { + let (cb, ib) = self.head(Self::to_string(|s| { s.print_safety(nmod.safety); s.word("extern"); })); @@ -258,28 +266,29 @@ impl<'a> State<'a> { self.print_token_literal(abi.as_token_lit(), abi.span); self.nbsp(); } - self.bopen(); + self.bopen(ib); self.print_foreign_mod(nmod, &item.attrs); let empty = item.attrs.is_empty() && nmod.items.is_empty(); - self.bclose(item.span, empty); + self.bclose(item.span, empty, cb); } ast::ItemKind::GlobalAsm(asm) => { // FIXME: Print `builtin # global_asm` once macro `global_asm` uses `builtin_syntax`. - self.head(visibility_qualified(&item.vis, "global_asm!")); + let (cb, ib) = self.head(visibility_qualified(&item.vis, "global_asm!")); self.print_inline_asm(asm); self.word(";"); - self.end(); - self.end(); + self.end(ib); + self.end(cb); } ast::ItemKind::TyAlias(box ast::TyAlias { defaultness, + ident, generics, where_clauses, bounds, ty, }) => { self.print_associated_type( - item.ident, + *ident, generics, *where_clauses, bounds, @@ -288,16 +297,16 @@ impl<'a> State<'a> { *defaultness, ); } - ast::ItemKind::Enum(enum_definition, params) => { - self.print_enum_def(enum_definition, params, item.ident, item.span, &item.vis); + ast::ItemKind::Enum(ident, enum_definition, params) => { + self.print_enum_def(enum_definition, params, *ident, item.span, &item.vis); } - ast::ItemKind::Struct(struct_def, generics) => { - self.head(visibility_qualified(&item.vis, "struct")); - self.print_struct(struct_def, generics, item.ident, item.span, true); + ast::ItemKind::Struct(ident, struct_def, generics) => { + let (cb, ib) = self.head(visibility_qualified(&item.vis, "struct")); + self.print_struct(struct_def, generics, *ident, item.span, true, cb, ib); } - ast::ItemKind::Union(struct_def, generics) => { - self.head(visibility_qualified(&item.vis, "union")); - self.print_struct(struct_def, generics, item.ident, item.span, true); + ast::ItemKind::Union(ident, struct_def, generics) => { + let (cb, ib) = self.head(visibility_qualified(&item.vis, "union")); + self.print_struct(struct_def, generics, *ident, item.span, true, cb, ib); } ast::ItemKind::Impl(box ast::Impl { safety, @@ -309,7 +318,7 @@ impl<'a> State<'a> { self_ty, items, }) => { - self.head(""); + let (cb, ib) = self.head(""); self.print_visibility(&item.vis); self.print_defaultness(*defaultness); self.print_safety(*safety); @@ -338,28 +347,28 @@ impl<'a> State<'a> { self.print_where_clause(&generics.where_clause); self.space(); - self.bopen(); + self.bopen(ib); self.print_inner_attributes(&item.attrs); for impl_item in items { self.print_assoc_item(impl_item); } let empty = item.attrs.is_empty() && items.is_empty(); - self.bclose(item.span, empty); + self.bclose(item.span, empty, cb); } ast::ItemKind::Trait(box ast::Trait { - is_auto, safety, + is_auto, + ident, generics, bounds, items, - .. }) => { - self.head(""); + let (cb, ib) = self.head(""); self.print_visibility(&item.vis); self.print_safety(*safety); self.print_is_auto(*is_auto); self.word_nbsp("trait"); - self.print_ident(item.ident); + self.print_ident(*ident); self.print_generic_params(&generics.params); if !bounds.is_empty() { self.word_nbsp(":"); @@ -367,17 +376,17 @@ impl<'a> State<'a> { } self.print_where_clause(&generics.where_clause); self.word(" "); - self.bopen(); + self.bopen(ib); self.print_inner_attributes(&item.attrs); for trait_item in items { self.print_assoc_item(trait_item); } let empty = item.attrs.is_empty() && items.is_empty(); - self.bclose(item.span, empty); + self.bclose(item.span, empty, cb); } - ast::ItemKind::TraitAlias(generics, bounds) => { - self.head(visibility_qualified(&item.vis, "trait")); - self.print_ident(item.ident); + ast::ItemKind::TraitAlias(ident, generics, bounds) => { + let (cb, ib) = self.head(visibility_qualified(&item.vis, "trait")); + self.print_ident(*ident); self.print_generic_params(&generics.params); self.nbsp(); if !bounds.is_empty() { @@ -386,8 +395,8 @@ impl<'a> State<'a> { } self.print_where_clause(&generics.where_clause); self.word(";"); - self.end(); // end inner head-block - self.end(); // end outer head-block + self.end(ib); + self.end(cb); } ast::ItemKind::MacCall(mac) => { self.print_mac(mac); @@ -395,8 +404,8 @@ impl<'a> State<'a> { self.word(";"); } } - ast::ItemKind::MacroDef(macro_def) => { - self.print_mac_def(macro_def, &item.ident, item.span, |state| { + ast::ItemKind::MacroDef(ident, macro_def) => { + self.print_mac_def(macro_def, &ident, item.span, |state| { state.print_visibility(&item.vis) }); } @@ -428,28 +437,24 @@ impl<'a> State<'a> { span: rustc_span::Span, visibility: &ast::Visibility, ) { - self.head(visibility_qualified(visibility, "enum")); + let (cb, ib) = self.head(visibility_qualified(visibility, "enum")); self.print_ident(ident); self.print_generic_params(&generics.params); self.print_where_clause(&generics.where_clause); self.space(); - self.print_variants(&enum_definition.variants, span) - } - - fn print_variants(&mut self, variants: &[ast::Variant], span: rustc_span::Span) { - self.bopen(); - for v in variants { + self.bopen(ib); + for v in enum_definition.variants.iter() { self.space_if_not_bol(); self.maybe_print_comment(v.span.lo()); self.print_outer_attributes(&v.attrs); - self.ibox(0); + let ib = self.ibox(0); self.print_variant(v); self.word(","); - self.end(); + self.end(ib); self.maybe_print_trailing_comment(v.span, None); } - let empty = variants.is_empty(); - self.bclose(span, empty) + let empty = enum_definition.variants.is_empty(); + self.bclose(span, empty, cb) } pub(crate) fn print_visibility(&mut self, vis: &ast::Visibility) { @@ -473,33 +478,6 @@ impl<'a> State<'a> { } } - pub(crate) fn print_record_struct_body( - &mut self, - fields: &[ast::FieldDef], - span: rustc_span::Span, - ) { - self.nbsp(); - self.bopen(); - - let empty = fields.is_empty(); - if !empty { - self.hardbreak_if_not_bol(); - - for field in fields { - self.hardbreak_if_not_bol(); - self.maybe_print_comment(field.span.lo()); - self.print_outer_attributes(&field.attrs); - self.print_visibility(&field.vis); - self.print_ident(field.ident.unwrap()); - self.word_nbsp(":"); - self.print_type(&field.ty); - self.word(","); - } - } - - self.bclose(span, empty); - } - fn print_struct( &mut self, struct_def: &ast::VariantData, @@ -507,6 +485,8 @@ impl<'a> State<'a> { ident: Ident, span: rustc_span::Span, print_finalizer: bool, + cb: BoxMarker, + ib: BoxMarker, ) { self.print_ident(ident); self.print_generic_params(&generics.params); @@ -526,21 +506,40 @@ impl<'a> State<'a> { if print_finalizer { self.word(";"); } - self.end(); - self.end(); // Close the outer-box. + self.end(ib); + self.end(cb); } ast::VariantData::Struct { fields, .. } => { self.print_where_clause(&generics.where_clause); - self.print_record_struct_body(fields, span); + self.nbsp(); + self.bopen(ib); + + let empty = fields.is_empty(); + if !empty { + self.hardbreak_if_not_bol(); + + for field in fields { + self.hardbreak_if_not_bol(); + self.maybe_print_comment(field.span.lo()); + self.print_outer_attributes(&field.attrs); + self.print_visibility(&field.vis); + self.print_ident(field.ident.unwrap()); + self.word_nbsp(":"); + self.print_type(&field.ty); + self.word(","); + } + } + + self.bclose(span, empty, cb); } } } pub(crate) fn print_variant(&mut self, v: &ast::Variant) { - self.head(""); + let (cb, ib) = self.head(""); self.print_visibility(&v.vis); let generics = ast::Generics::default(); - self.print_struct(&v.data, &generics, v.ident, v.span, false); + self.print_struct(&v.data, &generics, v.ident, v.span, false, cb, ib); if let Some(d) = &v.disr_expr { self.space(); self.word_space("="); @@ -549,24 +548,25 @@ impl<'a> State<'a> { } fn print_assoc_item(&mut self, item: &ast::AssocItem) { - let ast::Item { id, span, ident, ref attrs, ref kind, ref vis, tokens: _ } = *item; + let ast::Item { id, span, ref attrs, ref kind, ref vis, tokens: _ } = *item; self.ann.pre(self, AnnNode::SubItem(id)); self.hardbreak_if_not_bol(); self.maybe_print_comment(span.lo()); self.print_outer_attributes(attrs); match kind { ast::AssocItemKind::Fn(func) => { - self.print_fn_full(ident, vis, attrs, &*func); + self.print_fn_full(vis, attrs, &*func); } ast::AssocItemKind::Const(box ast::ConstItem { defaultness, + ident, generics, ty, expr, define_opaque, }) => { self.print_item_const( - ident, + *ident, None, generics, ty, @@ -579,13 +579,14 @@ impl<'a> State<'a> { } ast::AssocItemKind::Type(box ast::TyAlias { defaultness, + ident, generics, where_clauses, bounds, ty, }) => { self.print_associated_type( - ident, + *ident, generics, *where_clauses, bounds, @@ -629,9 +630,7 @@ impl<'a> State<'a> { kind: DelegationKind<'_>, body: &Option>, ) { - if body.is_some() { - self.head(""); - } + let body_cb_ib = body.as_ref().map(|body| (body, self.head(""))); self.print_visibility(vis); self.word_nbsp("reuse"); @@ -663,38 +662,52 @@ impl<'a> State<'a> { self.word("*"); } } - if let Some(body) = body { + if let Some((body, (cb, ib))) = body_cb_ib { self.nbsp(); - self.print_block_with_attrs(body, attrs); + self.print_block_with_attrs(body, attrs, cb, ib); } else { self.word(";"); } } - fn print_fn_full( - &mut self, - name: Ident, - vis: &ast::Visibility, - attrs: &[ast::Attribute], - func: &ast::Fn, - ) { - let ast::Fn { defaultness, generics, sig, contract, body, define_opaque } = func; - + fn print_fn_full(&mut self, vis: &ast::Visibility, attrs: &[ast::Attribute], func: &ast::Fn) { + let ast::Fn { defaultness, ident, generics, sig, contract, body, define_opaque, eii_impl } = + func; self.print_define_opaques(define_opaque.as_deref()); - if body.is_some() { - self.head(""); + for EIIImpl { eii_macro_path, impl_safety, .. } in eii_impl { + self.word("#["); + if let Safety::Unsafe(..) = impl_safety { + self.word("unsafe"); + self.popen(); + } + self.print_path(eii_macro_path, false, 0); + if let Safety::Unsafe(..) = impl_safety { + self.pclose(); + } + self.word("]"); + self.hardbreak(); } + + let body_cb_ib = body.as_ref().map(|body| (body, self.head(""))); + self.print_visibility(vis); self.print_defaultness(*defaultness); - self.print_fn(&sig.decl, sig.header, Some(name), generics); + self.print_fn(&sig.decl, sig.header, Some(*ident), generics); if let Some(contract) = &contract { self.nbsp(); self.print_contract(contract); } - if let Some(body) = body { + if let Some((body, (cb, ib))) = body_cb_ib { + if self.is_sdylib_interface { + self.word(";"); + self.end(ib); // end inner head-block + self.end(cb); // end outer head-block + return; + } + self.nbsp(); - self.print_block_with_attrs(body, attrs); + self.print_block_with_attrs(body, attrs, cb, ib); } else { self.word(";"); } @@ -734,13 +747,13 @@ impl<'a> State<'a> { &mut self, decl: &ast::FnDecl, header: ast::FnHeader, - name: Option, + ident: Option, generics: &ast::Generics, ) { self.print_fn_header_info(header); - if let Some(name) = name { + if let Some(ident) = ident { self.nbsp(); - self.print_ident(name); + self.print_ident(ident); } self.print_generic_params(&generics.params); self.print_fn_params_and_ret(decl, false); @@ -850,10 +863,10 @@ impl<'a> State<'a> { } else if let [(item, _)] = items.as_slice() { self.print_use_tree(item); } else { - self.cbox(INDENT_UNIT); + let cb = self.cbox(INDENT_UNIT); self.word("{"); self.zerobreak(); - self.ibox(0); + let ib = self.ibox(0); for (pos, use_tree) in items.iter().with_position() { let is_last = matches!(pos, Position::Last | Position::Only); self.print_use_tree(&use_tree.0); @@ -866,11 +879,11 @@ impl<'a> State<'a> { } } } - self.end(); + self.end(ib); self.trailing_comma(); self.offset(-INDENT_UNIT); self.word("}"); - self.end(); + self.end(cb); } } } diff --git a/compiler/rustc_ast_pretty/src/pprust/tests.rs b/compiler/rustc_ast_pretty/src/pprust/tests.rs index 4c42dd1f2023f..786de529c5b89 100644 --- a/compiler/rustc_ast_pretty/src/pprust/tests.rs +++ b/compiler/rustc_ast_pretty/src/pprust/tests.rs @@ -7,14 +7,14 @@ use super::*; fn fun_to_string( decl: &ast::FnDecl, header: ast::FnHeader, - name: Ident, + ident: Ident, generics: &ast::Generics, ) -> String { to_string(|s| { - s.head(""); - s.print_fn(decl, header, Some(name), generics); - s.end(); // Close the head box. - s.end(); // Close the outer box. + let (cb, ib) = s.head(""); + s.print_fn(decl, header, Some(ident), generics); + s.end(ib); + s.end(cb); }) } diff --git a/compiler/rustc_attr_data_structures/src/attributes.rs b/compiler/rustc_attr_data_structures/src/attributes.rs index 969bce7ae20a6..ced25f307e0bd 100644 --- a/compiler/rustc_attr_data_structures/src/attributes.rs +++ b/compiler/rustc_attr_data_structures/src/attributes.rs @@ -2,6 +2,7 @@ use rustc_abi::Align; use rustc_ast::token::CommentKind; use rustc_ast::{self as ast, AttrStyle}; use rustc_macros::{Decodable, Encodable, HashStable_Generic, PrintAttribute}; +use rustc_span::def_id::DefId; use rustc_span::hygiene::Transparency; use rustc_span::{Span, Symbol}; use thin_vec::ThinVec; @@ -138,6 +139,23 @@ impl Deprecation { } } +#[derive(Copy, Clone, Debug, HashStable_Generic, Encodable, Decodable, PrintAttribute)] +pub struct EIIImpl { + pub eii_macro: DefId, + pub impl_marked_unsafe: bool, + pub span: Span, + pub inner_span: Span, + pub is_default: bool, +} + +#[derive(Copy, Clone, Debug, HashStable_Generic, Encodable, Decodable, PrintAttribute)] +pub struct EIIDecl { + pub eii_extern_item: DefId, + /// whether or not it is unsafe to implement this EII + pub impl_unsafe: bool, + pub span: Span, +} + /// Represent parsed, *built in*, inert attributes. /// /// That means attributes that are not actually ever expanded. @@ -189,9 +207,11 @@ pub enum AttributeKind { span: Span, comment: Symbol, }, + EiiImpl(ThinVec), + EiiMacroFor(EIIDecl), + EiiMangleExtern, MacroTransparency(Transparency), Repr(ThinVec<(ReprAttr, Span)>), - RustcMacroEdition2021, Stability { stability: Stability, /// Span of the `#[stable(...)]` or `#[unstable(...)]` attribute diff --git a/compiler/rustc_attr_data_structures/src/lib.rs b/compiler/rustc_attr_data_structures/src/lib.rs index c61b44b273de5..13efbffd16d3e 100644 --- a/compiler/rustc_attr_data_structures/src/lib.rs +++ b/compiler/rustc_attr_data_structures/src/lib.rs @@ -1,7 +1,7 @@ // tidy-alphabetical-start #![allow(internal_features)] +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(rust_logo)] -#![feature(let_chains)] #![feature(rustdoc_internals)] // tidy-alphabetical-end @@ -16,6 +16,7 @@ use rustc_abi::Align; use rustc_ast::token::CommentKind; use rustc_ast::{AttrStyle, IntTy, UintTy}; use rustc_ast_pretty::pp::Printer; +use rustc_span::def_id::DefId; use rustc_span::hygiene::Transparency; use rustc_span::{Span, Symbol}; pub use stability::*; @@ -155,7 +156,7 @@ macro_rules! print_tup { print_tup!(A B C D E F G H); print_skip!(Span, ()); print_disp!(u16, bool, NonZero); -print_debug!(Symbol, UintTy, IntTy, Align, AttrStyle, CommentKind, Transparency); +print_debug!(Symbol, UintTy, IntTy, Align, AttrStyle, CommentKind, Transparency, DefId); /// Finds attributes in sequences of attributes by pattern matching. /// diff --git a/compiler/rustc_attr_data_structures/src/lints.rs b/compiler/rustc_attr_data_structures/src/lints.rs new file mode 100644 index 0000000000000..48b27e93d3cf0 --- /dev/null +++ b/compiler/rustc_attr_data_structures/src/lints.rs @@ -0,0 +1,17 @@ +// pub type HirDelayedLint = ( +// &'static Lint, +// HirId, +// Span, +// Box FnOnce(&'b mut Diag<'a, ()>) + 'static>, +// ); + +use rustc_span::Span; + +pub enum AttributeLintKind { + UnusedDuplicate { unused: Span, used: Span, warning: bool }, +} + +pub struct AttributeLint { + pub id: Id, + pub kind: AttributeLintKind, +} diff --git a/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs b/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs index d37ede86cfd2a..c1d95d07f4c65 100644 --- a/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs +++ b/compiler/rustc_attr_parsing/src/attributes/allow_unstable.rs @@ -53,7 +53,7 @@ fn parse_unstable<'a>( for param in list.mixed() { let param_span = param.span(); - if let Some(ident) = param.meta_item().and_then(|i| i.word_without_args()) { + if let Some(ident) = param.meta_item().and_then(|i| i.path_without_args().word()) { res.push(ident.name); } else { cx.emit_err(session_diagnostics::ExpectsFeatures { diff --git a/compiler/rustc_attr_parsing/src/attributes/cfg.rs b/compiler/rustc_attr_parsing/src/attributes/cfg.rs index 0d6d521b40c61..7cb1fede1741a 100644 --- a/compiler/rustc_attr_parsing/src/attributes/cfg.rs +++ b/compiler/rustc_attr_parsing/src/attributes/cfg.rs @@ -7,7 +7,6 @@ use rustc_session::config::ExpectedValues; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::UNEXPECTED_CFGS; use rustc_session::parse::feature_err; -use rustc_span::symbol::kw; use rustc_span::{Span, Symbol, sym}; use crate::session_diagnostics::{self, UnsupportedLiteralReason}; @@ -89,20 +88,6 @@ pub fn eval_condition( let cfg = match cfg { MetaItemInner::MetaItem(meta_item) => meta_item, MetaItemInner::Lit(MetaItemLit { kind: LitKind::Bool(b), .. }) => { - if let Some(features) = features { - // we can't use `try_gate_cfg` as symbols don't differentiate between `r#true` - // and `true`, and we want to keep the former working without feature gate - gate_cfg( - &( - if *b { kw::True } else { kw::False }, - sym::cfg_boolean_literals, - |features: &Features| features.cfg_boolean_literals(), - ), - cfg.span(), - sess, - features, - ); - } return *b; } _ => { @@ -117,7 +102,7 @@ pub fn eval_condition( }; match &cfg.kind { - MetaItemKind::List(mis) if cfg.name_or_empty() == sym::version => { + MetaItemKind::List(mis) if cfg.has_name(sym::version) => { try_gate_cfg(sym::version, cfg.span, sess, features); let (min_version, span) = match &mis[..] { [MetaItemInner::Lit(MetaItemLit { kind: LitKind::Str(sym, ..), span, .. })] => { @@ -164,18 +149,18 @@ pub fn eval_condition( // The unwraps below may look dangerous, but we've already asserted // that they won't fail with the loop above. - match cfg.name_or_empty() { - sym::any => mis + match cfg.name() { + Some(sym::any) => mis .iter() // We don't use any() here, because we want to evaluate all cfg condition // as eval_condition can (and does) extra checks .fold(false, |res, mi| res | eval_condition(mi, sess, features, eval)), - sym::all => mis + Some(sym::all) => mis .iter() // We don't use all() here, because we want to evaluate all cfg condition // as eval_condition can (and does) extra checks .fold(true, |res, mi| res & eval_condition(mi, sess, features, eval)), - sym::not => { + Some(sym::not) => { let [mi] = mis.as_slice() else { dcx.emit_err(session_diagnostics::ExpectedOneCfgPattern { span: cfg.span }); return false; @@ -183,7 +168,7 @@ pub fn eval_condition( !eval_condition(mi, sess, features, eval) } - sym::target => { + Some(sym::target) => { if let Some(features) = features && !features.cfg_target_compact() { diff --git a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs index 7d1417446b21d..fb3d5f57d4fac 100644 --- a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs +++ b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs @@ -1,5 +1,4 @@ use rustc_attr_data_structures::{AttributeKind, DeprecatedSince, Deprecation}; -use rustc_span::symbol::Ident; use rustc_span::{Span, Symbol, sym}; use super::SingleAttributeParser; @@ -13,16 +12,13 @@ pub(crate) struct DeprecationParser; fn get( cx: &AcceptContext<'_>, - ident: Ident, + name: Symbol, param_span: Span, arg: &ArgParser<'_>, item: &Option, ) -> Option { if item.is_some() { - cx.emit_err(session_diagnostics::MultipleItem { - span: param_span, - item: ident.to_string(), - }); + cx.emit_err(session_diagnostics::MultipleItem { span: param_span, item: name.to_string() }); return None; } if let Some(v) = arg.name_value() { @@ -83,16 +79,16 @@ impl SingleAttributeParser for DeprecationParser { return None; }; - let (ident, arg) = param.word_or_empty(); + let ident_name = param.path_without_args().word_sym(); - match ident.name { - sym::since => { - since = Some(get(cx, ident, param_span, arg, &since)?); + match ident_name { + Some(name @ sym::since) => { + since = Some(get(cx, name, param_span, param.args(), &since)?); } - sym::note => { - note = Some(get(cx, ident, param_span, arg, ¬e)?); + Some(name @ sym::note) => { + note = Some(get(cx, name, param_span, param.args(), ¬e)?); } - sym::suggestion => { + Some(name @ sym::suggestion) => { if !features.deprecated_suggestion() { cx.emit_err(session_diagnostics::DeprecatedItemSuggestion { span: param_span, @@ -101,12 +97,12 @@ impl SingleAttributeParser for DeprecationParser { }); } - suggestion = Some(get(cx, ident, param_span, arg, &suggestion)?); + suggestion = Some(get(cx, name, param_span, param.args(), &suggestion)?); } _ => { cx.emit_err(session_diagnostics::UnknownMetaItem { span: param_span, - item: ident.to_string(), + item: param.path_without_args().to_string(), expected: if features.deprecated_suggestion() { &["since", "note", "suggestion"] } else { diff --git a/compiler/rustc_attr_parsing/src/attributes/eii.rs b/compiler/rustc_attr_parsing/src/attributes/eii.rs new file mode 100644 index 0000000000000..3e81d9f5de8d4 --- /dev/null +++ b/compiler/rustc_attr_parsing/src/attributes/eii.rs @@ -0,0 +1,17 @@ +use rustc_attr_data_structures::AttributeKind; +use rustc_span::{Span, Symbol, sym}; + +use super::{AcceptContext, SingleAttributeParser}; +use crate::parser::ArgParser; + +pub(crate) struct EiiMangleExternParser; + +impl SingleAttributeParser for EiiMangleExternParser { + const PATH: &'static [Symbol] = &[sym::eii_mangle_extern]; + + fn on_duplicate(_cx: &AcceptContext<'_>, _first_span: Span) {} + fn convert(_cx: &AcceptContext<'_>, args: &ArgParser<'_>) -> Option { + assert!(args.no_args()); + Some(AttributeKind::EiiMangleExtern) + } +} diff --git a/compiler/rustc_attr_parsing/src/attributes/mod.rs b/compiler/rustc_attr_parsing/src/attributes/mod.rs index bac111159db53..970a1e40c2515 100644 --- a/compiler/rustc_attr_parsing/src/attributes/mod.rs +++ b/compiler/rustc_attr_parsing/src/attributes/mod.rs @@ -27,8 +27,8 @@ pub(crate) mod allow_unstable; pub(crate) mod cfg; pub(crate) mod confusables; pub(crate) mod deprecation; +pub(crate) mod eii; pub(crate) mod repr; -pub(crate) mod rustc; pub(crate) mod stability; pub(crate) mod transparency; pub(crate) mod util; diff --git a/compiler/rustc_attr_parsing/src/attributes/repr.rs b/compiler/rustc_attr_parsing/src/attributes/repr.rs index 26ca637faec68..43dfb85a7c411 100644 --- a/compiler/rustc_attr_parsing/src/attributes/repr.rs +++ b/compiler/rustc_attr_parsing/src/attributes/repr.rs @@ -96,58 +96,75 @@ fn parse_repr(cx: &AcceptContext<'_>, param: &MetaItemParser<'_>) -> Option { - cx.emit_err(session_diagnostics::InvalidReprAlignNeedArg { span: ident.span }); + match (name, args) { + (Some(sym::align), ArgParser::NoArgs) => { + cx.emit_err(session_diagnostics::InvalidReprAlignNeedArg { span: ident_span }); None } - (sym::align, ArgParser::List(l)) => parse_repr_align(cx, l, param.span(), AlignKind::Align), + (Some(sym::align), ArgParser::List(l)) => { + parse_repr_align(cx, l, param.span(), AlignKind::Align) + } - (sym::packed, ArgParser::NoArgs) => Some(ReprPacked(Align::ONE)), - (sym::packed, ArgParser::List(l)) => { + (Some(sym::packed), ArgParser::NoArgs) => Some(ReprPacked(Align::ONE)), + (Some(sym::packed), ArgParser::List(l)) => { parse_repr_align(cx, l, param.span(), AlignKind::Packed) } - (sym::align | sym::packed, ArgParser::NameValue(l)) => { + (Some(name @ sym::align | name @ sym::packed), ArgParser::NameValue(l)) => { cx.emit_err(session_diagnostics::IncorrectReprFormatGeneric { span: param.span(), // FIXME(jdonszelmann) can just be a string in the diag type - repr_arg: &ident.to_string(), + repr_arg: name, cause: IncorrectReprFormatGenericCause::from_lit_kind( param.span(), &l.value_as_lit().kind, - ident.name.as_str(), + name, ), }); None } - (sym::Rust, ArgParser::NoArgs) => Some(ReprRust), - (sym::C, ArgParser::NoArgs) => Some(ReprC), - (sym::simd, ArgParser::NoArgs) => Some(ReprSimd), - (sym::transparent, ArgParser::NoArgs) => Some(ReprTransparent), - (i @ int_pat!(), ArgParser::NoArgs) => { + (Some(sym::Rust), ArgParser::NoArgs) => Some(ReprRust), + (Some(sym::C), ArgParser::NoArgs) => Some(ReprC), + (Some(sym::simd), ArgParser::NoArgs) => Some(ReprSimd), + (Some(sym::transparent), ArgParser::NoArgs) => Some(ReprTransparent), + (Some(name @ int_pat!()), ArgParser::NoArgs) => { // int_pat!() should make sure it always parses - Some(ReprInt(int_type_of_word(i).unwrap())) + Some(ReprInt(int_type_of_word(name).unwrap())) } ( - sym::Rust | sym::C | sym::simd | sym::transparent | int_pat!(), + Some( + name @ sym::Rust + | name @ sym::C + | name @ sym::simd + | name @ sym::transparent + | name @ int_pat!(), + ), ArgParser::NameValue(_), ) => { - cx.emit_err(session_diagnostics::InvalidReprHintNoValue { - span: param.span(), - name: ident.to_string(), - }); + cx.emit_err(session_diagnostics::InvalidReprHintNoValue { span: param.span(), name }); None } - (sym::Rust | sym::C | sym::simd | sym::transparent | int_pat!(), ArgParser::List(_)) => { - cx.emit_err(session_diagnostics::InvalidReprHintNoParen { - span: param.span(), - name: ident.to_string(), - }); + ( + Some( + name @ sym::Rust + | name @ sym::C + | name @ sym::simd + | name @ sym::transparent + | name @ int_pat!(), + ), + ArgParser::List(_), + ) => { + cx.emit_err(session_diagnostics::InvalidReprHintNoParen { span: param.span(), name }); None } diff --git a/compiler/rustc_attr_parsing/src/attributes/rustc.rs b/compiler/rustc_attr_parsing/src/attributes/rustc.rs deleted file mode 100644 index bdd3bef2834bb..0000000000000 --- a/compiler/rustc_attr_parsing/src/attributes/rustc.rs +++ /dev/null @@ -1,19 +0,0 @@ -use rustc_attr_data_structures::AttributeKind; -use rustc_span::sym; - -use super::{AcceptContext, SingleAttributeParser}; -use crate::parser::ArgParser; - -pub(crate) struct RustcMacroEdition2021Parser; - -// FIXME(jdonszelmann): make these proper diagnostics -impl SingleAttributeParser for RustcMacroEdition2021Parser { - const PATH: &'static [rustc_span::Symbol] = &[sym::rustc_macro_edition_2021]; - - fn on_duplicate(_cx: &crate::context::AcceptContext<'_>, _first_span: rustc_span::Span) {} - - fn convert(_cx: &AcceptContext<'_>, args: &ArgParser<'_>) -> Option { - assert!(args.no_args()); - Some(AttributeKind::RustcMacroEdition2021) - } -} diff --git a/compiler/rustc_attr_parsing/src/attributes/stability.rs b/compiler/rustc_attr_parsing/src/attributes/stability.rs index bdad6b50186dd..cd1f21d92e7e2 100644 --- a/compiler/rustc_attr_parsing/src/attributes/stability.rs +++ b/compiler/rustc_attr_parsing/src/attributes/stability.rs @@ -242,9 +242,9 @@ pub(crate) fn parse_stability( return None; }; - match param.word_or_empty_without_args().name { - sym::feature => insert_value_into_option_or_error(cx, ¶m, &mut feature)?, - sym::since => insert_value_into_option_or_error(cx, ¶m, &mut since)?, + match param.path_without_args().word_sym() { + Some(sym::feature) => insert_value_into_option_or_error(cx, ¶m, &mut feature)?, + Some(sym::since) => insert_value_into_option_or_error(cx, ¶m, &mut since)?, _ => { cx.emit_err(session_diagnostics::UnknownMetaItem { span: param_span, @@ -310,11 +310,10 @@ pub(crate) fn parse_unstability( return None; }; - let (word, args) = param.word_or_empty(); - match word.name { - sym::feature => insert_value_into_option_or_error(cx, ¶m, &mut feature)?, - sym::reason => insert_value_into_option_or_error(cx, ¶m, &mut reason)?, - sym::issue => { + match param.path_without_args().word_sym() { + Some(sym::feature) => insert_value_into_option_or_error(cx, ¶m, &mut feature)?, + Some(sym::reason) => insert_value_into_option_or_error(cx, ¶m, &mut reason)?, + Some(sym::issue) => { insert_value_into_option_or_error(cx, ¶m, &mut issue)?; // These unwraps are safe because `insert_value_into_option_or_error` ensures the meta item @@ -328,7 +327,7 @@ pub(crate) fn parse_unstability( session_diagnostics::InvalidIssueString { span: param.span(), cause: session_diagnostics::InvalidIssueStringCause::from_int_error_kind( - args.name_value().unwrap().value_span, + param.args().name_value().unwrap().value_span, err.kind(), ), }, @@ -338,13 +337,15 @@ pub(crate) fn parse_unstability( }, }; } - sym::soft => { - if !args.no_args() { + Some(sym::soft) => { + if !param.args().no_args() { cx.emit_err(session_diagnostics::SoftNoArgs { span: param.span() }); } is_soft = true; } - sym::implied_by => insert_value_into_option_or_error(cx, ¶m, &mut implied_by)?, + Some(sym::implied_by) => { + insert_value_into_option_or_error(cx, ¶m, &mut implied_by)? + } _ => { cx.emit_err(session_diagnostics::UnknownMetaItem { span: param.span(), diff --git a/compiler/rustc_attr_parsing/src/attributes/transparency.rs b/compiler/rustc_attr_parsing/src/attributes/transparency.rs index ad83a1f7af80c..ce42b0507ed57 100644 --- a/compiler/rustc_attr_parsing/src/attributes/transparency.rs +++ b/compiler/rustc_attr_parsing/src/attributes/transparency.rs @@ -20,7 +20,7 @@ impl SingleAttributeParser for TransparencyParser { fn convert(cx: &AcceptContext<'_>, args: &ArgParser<'_>) -> Option { match args.name_value().and_then(|nv| nv.value_as_str()) { Some(sym::transparent) => Some(Transparency::Transparent), - Some(sym::semitransparent) => Some(Transparency::SemiTransparent), + Some(sym::semiopaque | sym::semitransparent) => Some(Transparency::SemiOpaque), Some(sym::opaque) => Some(Transparency::Opaque), Some(other) => { cx.dcx().span_err(cx.attr_span, format!("unknown macro transparency: `{other}`")); diff --git a/compiler/rustc_attr_parsing/src/context.rs b/compiler/rustc_attr_parsing/src/context.rs index a68d4578b40f7..fd990a02c7fe1 100644 --- a/compiler/rustc_attr_parsing/src/context.rs +++ b/compiler/rustc_attr_parsing/src/context.rs @@ -3,7 +3,7 @@ use std::collections::BTreeMap; use std::ops::Deref; use std::sync::LazyLock; -use rustc_ast::{self as ast, DelimArgs}; +use rustc_ast as ast; use rustc_attr_data_structures::AttributeKind; use rustc_errors::{DiagCtxtHandle, Diagnostic}; use rustc_feature::Features; @@ -14,8 +14,8 @@ use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol, sym}; use crate::attributes::allow_unstable::{AllowConstFnUnstableParser, AllowInternalUnstableParser}; use crate::attributes::confusables::ConfusablesParser; use crate::attributes::deprecation::DeprecationParser; +use crate::attributes::eii::EiiMangleExternParser; use crate::attributes::repr::ReprParser; -use crate::attributes::rustc::RustcMacroEdition2021Parser; use crate::attributes::stability::{ BodyStabilityParser, ConstStabilityIndirectParser, ConstStabilityParser, StabilityParser, }; @@ -77,7 +77,7 @@ attribute_groups!( // tidy-alphabetical-start Single, Single, - Single, + Single, Single, // tidy-alphabetical-end ]; @@ -211,7 +211,6 @@ impl<'sess> AttributeParser<'sess> { attrs: &'a [ast::Attribute], target_span: Span, omit_doc: OmitDoc, - lower_span: impl Copy + Fn(Span) -> Span, ) -> Vec { let mut attributes = Vec::new(); @@ -222,7 +221,7 @@ impl<'sess> AttributeParser<'sess> { // if we're only looking for a single attribute, // skip all the ones we don't care about if let Some(expected) = self.parse_only { - if attr.name_or_empty() != expected { + if !attr.has_name(expected) { continue; } } @@ -232,7 +231,7 @@ impl<'sess> AttributeParser<'sess> { // that's expanded right? But no, sometimes, when parsing attributes on macros, // we already use the lowering logic and these are still there. So, when `omit_doc` // is set we *also* want to ignore these - if omit_doc == OmitDoc::Skip && attr.name_or_empty() == sym::doc { + if omit_doc == OmitDoc::Skip && attr.has_name(sym::doc) { continue; } @@ -250,7 +249,7 @@ impl<'sess> AttributeParser<'sess> { })) } // // FIXME: make doc attributes go through a proper attribute parser - // ast::AttrKind::Normal(n) if n.name_or_empty() == sym::doc => { + // ast::AttrKind::Normal(n) if n.has_name(sym::doc) => { // let p = GenericMetaItemParser::from_attr(&n, self.dcx()); // // attributes.push(Attribute::Parsed(AttributeKind::DocComment { @@ -317,11 +316,7 @@ impl<'sess> AttributeParser<'sess> { fn lower_attr_args(&self, args: &ast::AttrArgs, lower_span: impl Fn(Span) -> Span) -> AttrArgs { match args { ast::AttrArgs::Empty => AttrArgs::Empty, - ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(DelimArgs { - dspan: args.dspan, - delim: args.delim, - tokens: args.tokens.flattened(), - }), + ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(args.clone()), // This is an inert key-value attribute - it will never be visible to macros // after it gets lowered to HIR. Therefore, we can extract literals to handle // nonterminals in `#[doc]` (e.g. `#[doc = $e]`). diff --git a/compiler/rustc_attr_parsing/src/lib.rs b/compiler/rustc_attr_parsing/src/lib.rs index a7465847e18bf..874fccf7ff6da 100644 --- a/compiler/rustc_attr_parsing/src/lib.rs +++ b/compiler/rustc_attr_parsing/src/lib.rs @@ -77,10 +77,10 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(rust_logo)] -#![feature(let_chains)] #![feature(rustdoc_internals)] +#![recursion_limit = "256"] // tidy-alphabetical-end #[macro_use] diff --git a/compiler/rustc_attr_parsing/src/parser.rs b/compiler/rustc_attr_parsing/src/parser.rs index a8a1460591cf3..077d953cfa318 100644 --- a/compiler/rustc_attr_parsing/src/parser.rs +++ b/compiler/rustc_attr_parsing/src/parser.rs @@ -78,8 +78,8 @@ impl<'a> PathParser<'a> { (self.len() == 1).then(|| **self.segments().next().as_ref().unwrap()) } - pub fn word_or_empty(&self) -> Ident { - self.word().unwrap_or_else(Ident::empty) + pub fn word_sym(&self) -> Option { + self.word().map(|ident| ident.name) } /// Asserts that this MetaItem is some specific word. @@ -284,11 +284,6 @@ impl<'a> MetaItemParser<'a> { Some(self.word()?.0) } - /// Like [`word`](Self::word), but returns an empty symbol instead of None - pub fn word_or_empty_without_args(&self) -> Ident { - self.word_or_empty().0 - } - /// Asserts that this MetaItem starts with a word, or single segment path. /// /// Some examples: @@ -300,12 +295,6 @@ impl<'a> MetaItemParser<'a> { Some((path.word()?, args)) } - /// Like [`word`](Self::word), but returns an empty symbol instead of None - pub fn word_or_empty(&self) -> (Ident, &ArgParser<'a>) { - let (path, args) = self.deconstruct(); - (path.word().unwrap_or(Ident::empty()), args) - } - /// Asserts that this MetaItem starts with some specific word. /// /// See [`word`](Self::word) for examples of what a word is. @@ -430,9 +419,7 @@ impl<'a> MetaItemListParserContext<'a> { let span = span.with_hi(segments.last().unwrap().span.hi()); Some(AttrPath { segments: segments.into_boxed_slice(), span }) } - TokenTree::Token(Token { kind: token::OpenDelim(_) | token::CloseDelim(_), .. }, _) => { - None - } + TokenTree::Token(Token { kind, .. }, _) if kind.is_delim() => None, _ => { // malformed attributes can get here. We can't crash, but somewhere else should've // already warned for this. @@ -485,25 +472,7 @@ impl<'a> MetaItemListParserContext<'a> { } // or a path. - let path = - if let Some(TokenTree::Token(Token { kind: token::Interpolated(_), span, .. }, _)) = - self.inside_delimiters.peek() - { - self.inside_delimiters.next(); - // We go into this path if an expr ended up in an attribute that - // expansion did not turn into a literal. Say, `#[repr(align(macro!()))]` - // where the macro didn't expand to a literal. An error is already given - // for this at this point, and then we do continue. This makes this path - // reachable... - let e = self.dcx.span_delayed_bug( - *span, - "expr in place where literal is expected (builtin attr parsing)", - ); - - return Some(MetaItemOrLitParser::Err(*span, e)); - } else { - self.next_path()? - }; + let path = self.next_path()?; // Paths can be followed by: // - `(more meta items)` (another list) diff --git a/compiler/rustc_attr_parsing/src/session_diagnostics.rs b/compiler/rustc_attr_parsing/src/session_diagnostics.rs index 9d34b807ac2fe..2c434175b4b69 100644 --- a/compiler/rustc_attr_parsing/src/session_diagnostics.rs +++ b/compiler/rustc_attr_parsing/src/session_diagnostics.rs @@ -204,7 +204,7 @@ pub(crate) struct InvalidReprHintNoParen { #[primary_span] pub span: Span, - pub name: String, + pub name: Symbol, } #[derive(Diagnostic)] @@ -213,7 +213,7 @@ pub(crate) struct InvalidReprHintNoValue { #[primary_span] pub span: Span, - pub name: String, + pub name: Symbol, } /// Error code: E0565 @@ -295,21 +295,21 @@ pub(crate) struct IncorrectReprFormatExpectInteger { #[derive(Diagnostic)] #[diag(attr_parsing_incorrect_repr_format_generic, code = E0693)] -pub(crate) struct IncorrectReprFormatGeneric<'a> { +pub(crate) struct IncorrectReprFormatGeneric { #[primary_span] pub span: Span, - pub repr_arg: &'a str, + pub repr_arg: Symbol, #[subdiagnostic] - pub cause: Option>, + pub cause: Option, } #[derive(Subdiagnostic)] -pub(crate) enum IncorrectReprFormatGenericCause<'a> { +pub(crate) enum IncorrectReprFormatGenericCause { #[suggestion( attr_parsing_suggestion, - code = "{name}({int})", + code = "{name}({value})", applicability = "machine-applicable" )] Int { @@ -317,15 +317,15 @@ pub(crate) enum IncorrectReprFormatGenericCause<'a> { span: Span, #[skip_arg] - name: &'a str, + name: Symbol, #[skip_arg] - int: u128, + value: u128, }, #[suggestion( attr_parsing_suggestion, - code = "{name}({symbol})", + code = "{name}({value})", applicability = "machine-applicable" )] Symbol { @@ -333,20 +333,20 @@ pub(crate) enum IncorrectReprFormatGenericCause<'a> { span: Span, #[skip_arg] - name: &'a str, + name: Symbol, #[skip_arg] - symbol: Symbol, + value: Symbol, }, } -impl<'a> IncorrectReprFormatGenericCause<'a> { - pub(crate) fn from_lit_kind(span: Span, kind: &ast::LitKind, name: &'a str) -> Option { - match kind { - ast::LitKind::Int(int, ast::LitIntType::Unsuffixed) => { - Some(Self::Int { span, name, int: int.get() }) +impl IncorrectReprFormatGenericCause { + pub(crate) fn from_lit_kind(span: Span, kind: &ast::LitKind, name: Symbol) -> Option { + match *kind { + ast::LitKind::Int(value, ast::LitIntType::Unsuffixed) => { + Some(Self::Int { span, name, value: value.get() }) } - ast::LitKind::Str(symbol, _) => Some(Self::Symbol { span, name, symbol: *symbol }), + ast::LitKind::Str(value, _) => Some(Self::Symbol { span, name, value }), _ => None, } } diff --git a/compiler/rustc_borrowck/messages.ftl b/compiler/rustc_borrowck/messages.ftl index ada20e5c614f8..33b80c4b03d6f 100644 --- a/compiler/rustc_borrowck/messages.ftl +++ b/compiler/rustc_borrowck/messages.ftl @@ -162,13 +162,6 @@ borrowck_opaque_type_lifetime_mismatch = .prev_lifetime_label = lifetime `{$prev}` previously used here .note = if all non-lifetime generic parameters are the same, but the lifetime parameters differ, it is not possible to differentiate the opaque types -borrowck_opaque_type_non_generic_param = - expected generic {$kind} parameter, found `{$ty}` - .label = {STREQ($ty, "'static") -> - [true] cannot use static lifetime; use a bound lifetime instead or remove the lifetime parameter from the opaque type - *[other] this generic parameter must be used with a generic {$kind} parameter - } - borrowck_partial_var_move_by_use_in_closure = variable {$is_partial -> [true] partially moved diff --git a/compiler/rustc_borrowck/src/constraints/mod.rs b/compiler/rustc_borrowck/src/constraints/mod.rs index a52269df68289..514bbfe359b1d 100644 --- a/compiler/rustc_borrowck/src/constraints/mod.rs +++ b/compiler/rustc_borrowck/src/constraints/mod.rs @@ -7,7 +7,7 @@ use rustc_middle::ty::{RegionVid, TyCtxt, VarianceDiagInfo}; use rustc_span::Span; use tracing::{debug, instrument}; -use crate::region_infer::{ConstraintSccs, RegionDefinition, RegionTracker}; +use crate::region_infer::{AnnotatedSccs, ConstraintSccs, RegionDefinition, SccAnnotations}; use crate::type_check::Locations; use crate::universal_regions::UniversalRegions; @@ -61,12 +61,14 @@ impl<'tcx> OutlivesConstraintSet<'tcx> { &self, static_region: RegionVid, definitions: &IndexVec>, - ) -> ConstraintSccs { + ) -> AnnotatedSccs { let constraint_graph = self.graph(definitions.len()); let region_graph = &constraint_graph.region_graph(self, static_region); - ConstraintSccs::new_with_annotation(®ion_graph, |r| { - RegionTracker::new(r, &definitions[r]) - }) + let mut annotation_visitor = SccAnnotations::new(definitions); + ( + ConstraintSccs::new_with_annotation(®ion_graph, &mut annotation_visitor), + annotation_visitor.scc_to_annotation, + ) } /// This method handles Universe errors by rewriting the constraint @@ -79,12 +81,12 @@ impl<'tcx> OutlivesConstraintSet<'tcx> { /// eventually go away. /// /// For a more precise definition, see the documentation for - /// [`RegionTracker::has_incompatible_universes()`]. + /// [`crate::region_infer::RegionTracker`]. /// /// This edge case used to be handled during constraint propagation /// by iterating over the strongly connected components in the constraint /// graph while maintaining a set of bookkeeping mappings similar - /// to what is stored in `RegionTracker` and manually adding 'sttaic as + /// to what is stored in `RegionTracker` and manually adding 'static as /// needed. /// /// It was rewritten as part of the Polonius project with the goal of moving @@ -108,9 +110,9 @@ impl<'tcx> OutlivesConstraintSet<'tcx> { &mut self, universal_regions: &UniversalRegions<'tcx>, definitions: &IndexVec>, - ) -> ConstraintSccs { + ) -> AnnotatedSccs { let fr_static = universal_regions.fr_static; - let sccs = self.compute_sccs(fr_static, definitions); + let (sccs, annotations) = self.compute_sccs(fr_static, definitions); // Changed to `true` if we added any constraints to `self` and need to // recompute SCCs. @@ -124,7 +126,7 @@ impl<'tcx> OutlivesConstraintSet<'tcx> { continue; } - let annotation = sccs.annotation(scc); + let annotation = annotations[scc]; // If this SCC participates in a universe violation, // e.g. if it reaches a region with a universe smaller than @@ -154,7 +156,7 @@ impl<'tcx> OutlivesConstraintSet<'tcx> { self.compute_sccs(fr_static, definitions) } else { // If we didn't add any back-edges; no more work needs doing - sccs + (sccs, annotations) } } } diff --git a/compiler/rustc_borrowck/src/consumers.rs b/compiler/rustc_borrowck/src/consumers.rs index 5a89f7c351cf6..1f087b092346e 100644 --- a/compiler/rustc_borrowck/src/consumers.rs +++ b/compiler/rustc_borrowck/src/consumers.rs @@ -1,7 +1,7 @@ //! This file provides API for compiler consumers. use rustc_hir::def_id::LocalDefId; -use rustc_index::{IndexSlice, IndexVec}; +use rustc_index::IndexVec; use rustc_middle::mir::{Body, Promoted}; use rustc_middle::ty::TyCtxt; @@ -15,6 +15,7 @@ pub use super::polonius::legacy::{ RichLocation, RustcFacts, }; pub use super::region_infer::RegionInferenceContext; +use crate::{BorrowCheckRootCtxt, do_mir_borrowck}; /// Options determining the output behavior of [`get_body_with_borrowck_facts`]. /// @@ -97,11 +98,9 @@ pub struct BodyWithBorrowckFacts<'tcx> { /// * Polonius is highly unstable, so expect regular changes in its signature or other details. pub fn get_body_with_borrowck_facts( tcx: TyCtxt<'_>, - def: LocalDefId, + def_id: LocalDefId, options: ConsumerOptions, ) -> BodyWithBorrowckFacts<'_> { - let (input_body, promoted) = tcx.mir_promoted(def); - let input_body: &Body<'_> = &input_body.borrow(); - let promoted: &IndexSlice<_, _> = &promoted.borrow(); - *super::do_mir_borrowck(tcx, input_body, promoted, Some(options)).1.unwrap() + let mut root_cx = BorrowCheckRootCtxt::new(tcx, def_id); + *do_mir_borrowck(&mut root_cx, def_id, Some(options)).1.unwrap() } diff --git a/compiler/rustc_borrowck/src/def_use.rs b/compiler/rustc_borrowck/src/def_use.rs index 263f68d6a3dc7..b9ced81c46c19 100644 --- a/compiler/rustc_borrowck/src/def_use.rs +++ b/compiler/rustc_borrowck/src/def_use.rs @@ -77,6 +77,9 @@ pub(crate) fn categorize(context: PlaceContext) -> Option { // Debug info is neither def nor use. PlaceContext::NonUse(NonUseContext::VarDebugInfo) => None, + // Backwards incompatible drop hint is not a use, just a marker for linting. + PlaceContext::NonUse(NonUseContext::BackwardIncompatibleDropHint) => None, + PlaceContext::MutatingUse(MutatingUseContext::Deinit | MutatingUseContext::SetDiscriminant) => { bug!("These statements are not allowed in this MIR phase") } diff --git a/compiler/rustc_borrowck/src/diagnostics/bound_region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/bound_region_errors.rs index aa968a1e40f3e..0de4bd67f0ce7 100644 --- a/compiler/rustc_borrowck/src/diagnostics/bound_region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/bound_region_errors.rs @@ -49,7 +49,7 @@ impl<'tcx> UniverseInfo<'tcx> { UniverseInfo::RelateTys { expected, found } } - pub(crate) fn report_error( + pub(crate) fn report_erroneous_element( &self, mbcx: &mut MirBorrowckCtxt<'_, '_, 'tcx>, placeholder: ty::PlaceholderRegion, @@ -68,7 +68,7 @@ impl<'tcx> UniverseInfo<'tcx> { mbcx.buffer_error(err); } UniverseInfo::TypeOp(ref type_op_info) => { - type_op_info.report_error(mbcx, placeholder, error_element, cause); + type_op_info.report_erroneous_element(mbcx, placeholder, error_element, cause); } UniverseInfo::Other => { // FIXME: This error message isn't great, but it doesn't show @@ -145,8 +145,11 @@ pub(crate) trait TypeOpInfo<'tcx> { error_region: Option>, ) -> Option>; + /// Constraints require that `error_element` appear in the + /// values of `placeholder`, but this cannot be proven to + /// hold. Report an error. #[instrument(level = "debug", skip(self, mbcx))] - fn report_error( + fn report_erroneous_element( &self, mbcx: &mut MirBorrowckCtxt<'_, '_, 'tcx>, placeholder: ty::PlaceholderRegion, @@ -190,12 +193,7 @@ pub(crate) trait TypeOpInfo<'tcx> { let nice_error = self.nice_error(mbcx, cause, placeholder_region, error_region); debug!(?nice_error); - - if let Some(nice_error) = nice_error { - mbcx.buffer_error(nice_error); - } else { - mbcx.buffer_error(self.fallback_error(tcx, span)); - } + mbcx.buffer_error(nice_error.unwrap_or_else(|| self.fallback_error(tcx, span))); } } @@ -406,8 +404,8 @@ impl<'tcx> TypeOpInfo<'tcx> for crate::type_check::InstantiateOpaqueType<'tcx> { // started MIR borrowchecking with, so the region // constraints have already been taken. Use the data from // our `mbcx` instead. - |vid| mbcx.regioncx.var_infos[vid].origin, - |vid| mbcx.regioncx.var_infos[vid].universe, + |vid| RegionVariableOrigin::Nll(mbcx.regioncx.definitions[vid].origin), + |vid| mbcx.regioncx.definitions[vid].universe, ) } } @@ -450,7 +448,8 @@ fn try_extract_error_from_region_constraints<'a, 'tcx>( ty::ReVar(vid) => universe_of_region(vid), _ => ty::UniverseIndex::ROOT, }; - let matches = + // Are the two regions the same? + let regions_the_same = |a_region: Region<'tcx>, b_region: Region<'tcx>| match (a_region.kind(), b_region.kind()) { (RePlaceholder(a_p), RePlaceholder(b_p)) => a_p.bound == b_p.bound, _ => a_region == b_region, @@ -459,7 +458,7 @@ fn try_extract_error_from_region_constraints<'a, 'tcx>( |constraint: &Constraint<'tcx>, cause: &SubregionOrigin<'tcx>, exact| match *constraint { Constraint::RegSubReg(sub, sup) if ((exact && sup == placeholder_region) - || (!exact && matches(sup, placeholder_region))) + || (!exact && regions_the_same(sup, placeholder_region))) && sup != sub => { Some((sub, cause.clone())) @@ -468,26 +467,24 @@ fn try_extract_error_from_region_constraints<'a, 'tcx>( if (exact && sup == placeholder_region && !universe_of_region(vid).can_name(placeholder_universe)) - || (!exact && matches(sup, placeholder_region)) => + || (!exact && regions_the_same(sup, placeholder_region)) => { Some((ty::Region::new_var(infcx.tcx, vid), cause.clone())) } _ => None, }; - let mut info = region_constraints - .constraints - .iter() - .find_map(|(constraint, cause)| check(constraint, cause, true)); - if info.is_none() { - info = region_constraints + + let mut find_culprit = |exact_match: bool| { + region_constraints .constraints .iter() - .find_map(|(constraint, cause)| check(constraint, cause, false)); - } - let (sub_region, cause) = info?; + .find_map(|(constraint, cause)| check(constraint, cause, exact_match)) + }; + + let (sub_region, cause) = find_culprit(true).or_else(|| find_culprit(false))?; debug!(?sub_region, "cause = {:#?}", cause); - let error = match (error_region, *sub_region) { + let error = match (error_region, sub_region.kind()) { (Some(error_region), ty::ReVar(vid)) => RegionResolutionError::SubSupConflict( vid, region_var_origin(vid), diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index 978186f76a1f0..3b7d31b1b13bd 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -181,7 +181,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let closure = self.add_moved_or_invoked_closure_note(location, used_place, &mut err); let mut is_loop_move = false; - let mut in_pattern = false; let mut seen_spans = FxIndexSet::default(); for move_site in &move_site_vec { @@ -204,7 +203,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { self.suggest_ref_or_clone( mpi, &mut err, - &mut in_pattern, move_spans, moved_place.as_ref(), &mut has_suggest_reborrow, @@ -256,15 +254,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let place = &self.move_data.move_paths[mpi].place; let ty = place.ty(self.body, self.infcx.tcx).ty; - // If we're in pattern, we do nothing in favor of the previous suggestion (#80913). - // Same for if we're in a loop, see #101119. - if is_loop_move & !in_pattern && !matches!(use_spans, UseSpans::ClosureUse { .. }) { - if let ty::Ref(_, _, hir::Mutability::Mut) = ty.kind() { - // We have a `&mut` ref, we need to reborrow on each iteration (#62112). - self.suggest_reborrow(&mut err, span, moved_place); - } - } - if self.infcx.param_env.caller_bounds().iter().any(|c| { c.as_trait_clause().is_some_and(|pred| { pred.skip_binder().self_ty() == ty && self.infcx.tcx.is_fn_trait(pred.def_id()) @@ -330,7 +319,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { &self, mpi: MovePathIndex, err: &mut Diag<'infcx>, - in_pattern: &mut bool, move_spans: UseSpans<'tcx>, moved_place: PlaceRef<'tcx>, has_suggest_reborrow: &mut bool, @@ -545,7 +533,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { && !move_span.is_dummy() && !self.infcx.tcx.sess.source_map().is_imported(move_span) { - *in_pattern = true; let mut sugg = vec![(pat.span.shrink_to_lo(), "ref ".to_string())]; if let Some(pat) = finder.parent_pat { sugg.insert(0, (pat.span.shrink_to_lo(), "ref ".to_string())); @@ -660,7 +647,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { && tc.polarity() == ty::PredicatePolarity::Positive && supertrait_def_ids(tcx, tc.def_id()) .flat_map(|trait_did| tcx.associated_items(trait_did).in_definition_order()) - .any(|item| item.fn_has_self_parameter) + .any(|item| item.is_method()) }) }) { return None; @@ -1278,12 +1265,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { && let CallKind::FnCall { fn_trait_id, self_ty } = kind && let ty::Param(_) = self_ty.kind() && ty == self_ty - && [ - self.infcx.tcx.lang_items().fn_once_trait(), - self.infcx.tcx.lang_items().fn_mut_trait(), - self.infcx.tcx.lang_items().fn_trait(), - ] - .contains(&Some(fn_trait_id)) + && self.infcx.tcx.fn_trait_kind_from_def_id(fn_trait_id).is_some() { // Do not suggest `F: FnOnce() + Clone`. false @@ -1364,7 +1346,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } // Try to find predicates on *generic params* that would allow copying `ty` let mut suggestion = - if let Some(symbol) = tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + if let Some(symbol) = tcx.hir_maybe_get_struct_pattern_shorthand_field(expr) { format!(": {symbol}.clone()") } else { ".clone()".to_owned() @@ -2513,11 +2495,11 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { ); let ty::Tuple(params) = tupled_params.kind() else { return }; - // Find the first argument with a matching type, get its name - let Some(this_name) = params.iter().zip(tcx.hir_body_param_names(closure.body)).find_map( - |(param_ty, name)| { + // Find the first argument with a matching type and get its identifier. + let Some(this_name) = params.iter().zip(tcx.hir_body_param_idents(closure.body)).find_map( + |(param_ty, ident)| { // FIXME: also support deref for stuff like `Rc` arguments - if param_ty.peel_refs() == local_ty { name } else { None } + if param_ty.peel_refs() == local_ty { ident } else { None } }, ) else { return; @@ -2972,21 +2954,22 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } } - let mut err = self.path_does_not_live_long_enough(borrow_span, &format!("`{name}`")); + let name = format!("`{name}`"); + + let mut err = self.path_does_not_live_long_enough(borrow_span, &name); if let Some(annotation) = self.annotate_argument_and_return_for_borrow(borrow) { let region_name = annotation.emit(self, &mut err); err.span_label( borrow_span, - format!("`{name}` would have to be valid for `{region_name}`..."), + format!("{name} would have to be valid for `{region_name}`..."), ); err.span_label( drop_span, format!( - "...but `{}` will be dropped here, when the {} returns", - name, + "...but {name} will be dropped here, when the {} returns", self.infcx .tcx .opt_item_name(self.mir_def_id().to_def_id()) @@ -3024,7 +3007,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } } else { err.span_label(borrow_span, "borrowed value does not live long enough"); - err.span_label(drop_span, format!("`{name}` dropped here while still borrowed")); + err.span_label(drop_span, format!("{name} dropped here while still borrowed")); borrow_spans.args_subdiag(&mut err, |args_span| { crate::session_diagnostics::CaptureArgLabel::Capture { @@ -3389,10 +3372,15 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let (sugg_span, suggestion) = match tcx.sess.source_map().span_to_snippet(args_span) { Ok(string) => { - let coro_prefix = if string.starts_with("async") { - // `async` is 5 chars long. Not using `.len()` to avoid the cast from `usize` - // to `u32`. - Some(5) + let coro_prefix = if let Some(sub) = string.strip_prefix("async") { + let trimmed_sub = sub.trim_end(); + if trimmed_sub.ends_with("gen") { + // `async` is 5 chars long. + Some((trimmed_sub.len() + 5) as _) + } else { + // `async` is 5 chars long. + Some(5) + } } else if string.starts_with("gen") { // `gen` is 3 chars long Some(3) @@ -3787,7 +3775,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { method_args, *fn_span, call_source.from_hir_call(), - self.infcx.tcx.fn_arg_names(method_did)[0], + self.infcx.tcx.fn_arg_idents(method_did)[0], ) { err.note(format!("borrow occurs due to deref coercion to `{deref_target_ty}`")); diff --git a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs index f77dda0d386aa..a845431facac1 100644 --- a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs +++ b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs @@ -95,7 +95,9 @@ impl<'tcx> BorrowExplanation<'tcx> { && let hir::def::Res::Local(hir_id) = p.res && let hir::Node::Pat(pat) = tcx.hir_node(hir_id) { - err.span_label(pat.span, format!("binding `{ident}` declared here")); + if !ident.span.in_external_macro(tcx.sess.source_map()) { + err.span_label(pat.span, format!("binding `{ident}` declared here")); + } } } } diff --git a/compiler/rustc_borrowck/src/diagnostics/mod.rs b/compiler/rustc_borrowck/src/diagnostics/mod.rs index 899e145c2c049..5e3f3ffa2ea85 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mod.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mod.rs @@ -8,9 +8,7 @@ use rustc_errors::{Applicability, Diag, EmissionGuarantee, MultiSpan, listify}; use rustc_hir::def::{CtorKind, Namespace}; use rustc_hir::{self as hir, CoroutineKind, LangItem}; use rustc_index::IndexSlice; -use rustc_infer::infer::{ - BoundRegionConversionTime, NllRegionVariableOrigin, RegionVariableOrigin, -}; +use rustc_infer::infer::{BoundRegionConversionTime, NllRegionVariableOrigin}; use rustc_infer::traits::SelectionError; use rustc_middle::bug; use rustc_middle::mir::{ @@ -319,6 +317,14 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { opt: DescribePlaceOpt, ) -> Option { let local = place.local; + if self.body.local_decls[local] + .source_info + .span + .in_external_macro(self.infcx.tcx.sess.source_map()) + { + return None; + } + let mut autoderef_index = None; let mut buf = String::new(); let mut ok = self.append_local_to_string(local, &mut buf); @@ -587,7 +593,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { // this by hooking into the pretty printer and telling it to label the // lifetimes without names with the value `'0`. if let ty::Ref(region, ..) = ty.kind() { - match **region { + match region.kind() { ty::ReBound(_, ty::BoundRegion { kind: br, .. }) | ty::RePlaceholder(ty::PlaceholderRegion { bound: ty::BoundRegion { kind: br, .. }, @@ -607,7 +613,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let mut printer = ty::print::FmtPrinter::new(self.infcx.tcx, Namespace::TypeNS); let region = if let ty::Ref(region, ..) = ty.kind() { - match **region { + match region.kind() { ty::ReBound(_, ty::BoundRegion { kind: br, .. }) | ty::RePlaceholder(ty::PlaceholderRegion { bound: ty::BoundRegion { kind: br, .. }, @@ -633,9 +639,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { ) { let predicate_span = path.iter().find_map(|constraint| { let outlived = constraint.sub; - if let Some(origin) = self.regioncx.var_infos.get(outlived) - && let RegionVariableOrigin::Nll(NllRegionVariableOrigin::Placeholder(_)) = - origin.origin + if let Some(origin) = self.regioncx.definitions.get(outlived) + && let NllRegionVariableOrigin::Placeholder(_) = origin.origin && let ConstraintCategory::Predicate(span) = constraint.category { Some(span) @@ -1029,7 +1034,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { method_args, *fn_span, call_source.from_hir_call(), - self.infcx.tcx.fn_arg_names(method_did)[0], + self.infcx.tcx.fn_arg_idents(method_did)[0], ); return FnSelfUse { diff --git a/compiler/rustc_borrowck/src/diagnostics/move_errors.rs b/compiler/rustc_borrowck/src/diagnostics/move_errors.rs index 29cc749877b3e..0394a42ea9c77 100644 --- a/compiler/rustc_borrowck/src/diagnostics/move_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/move_errors.rs @@ -502,7 +502,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let upvar = &self.upvars[upvar_field.unwrap().index()]; let upvar_hir_id = upvar.get_root_variable(); let upvar_name = upvar.to_string(tcx); - let upvar_span = tcx.hir().span(upvar_hir_id); + let upvar_span = tcx.hir_span(upvar_hir_id); let place_name = self.describe_any_place(move_place.as_ref()); diff --git a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs index fddddf404dbc0..56cc432758511 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs @@ -969,7 +969,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { } }; - // If we can detect the expression to be an function or method call where the closure was + // If we can detect the expression to be a function or method call where the closure was // an argument, we point at the function or method definition argument... if let Some((callee_def_id, call_span, call_args)) = get_call_details() { let arg_pos = call_args diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs index 50a18b04de4ef..3bec07afa0fe0 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs @@ -190,7 +190,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { where T: TypeFoldable>, { - fold_regions(tcx, ty, |region, _| match *region { + fold_regions(tcx, ty, |region, _| match region.kind() { ty::ReVar(vid) => self.to_error_region(vid).unwrap_or(region), _ => region, }) @@ -198,7 +198,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { /// Returns `true` if a closure is inferred to be an `FnMut` closure. fn is_closure_fn_mut(&self, fr: RegionVid) -> bool { - if let Some(ty::ReLateParam(late_param)) = self.to_error_region(fr).as_deref() + if let Some(r) = self.to_error_region(fr) + && let ty::ReLateParam(late_param) = r.kind() && let ty::LateParamRegionKind::ClosureEnv = late_param.kind && let DefiningTy::Closure(_, args) = self.regioncx.universal_regions().defining_ty { @@ -404,7 +405,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { let universe = placeholder.universe; let universe_info = self.regioncx.universe_info(universe); - universe_info.report_error(self, placeholder, error_element, cause); + universe_info.report_erroneous_element(self, placeholder, error_element, cause); } RegionErrorKind::RegionError { fr_origin, longer_fr, shorter_fr, is_reported } => { @@ -628,7 +629,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { if let Some(def_hir) = defined_hir { let upvars_map = self.infcx.tcx.upvars_mentioned(def_id).unwrap(); - let upvar_def_span = self.infcx.tcx.hir().span(def_hir); + let upvar_def_span = self.infcx.tcx.hir_span(def_hir); let upvar_span = upvars_map.get(&def_hir).unwrap().span; diag.subdiagnostic(VarHereDenote::Defined { span: upvar_def_span }); diag.subdiagnostic(VarHereDenote::Captured { span: upvar_span }); @@ -832,7 +833,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { if let (Some(f), Some(outlived_f)) = (self.to_error_region(fr), self.to_error_region(outlived_fr)) { - if *outlived_f != ty::ReStatic { + if outlived_f.kind() != ty::ReStatic { return; } let suitable_region = self.infcx.tcx.is_suitable_region(self.mir_def_id(), f); @@ -887,7 +888,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> { // Skip `async` desugaring `impl Future`. } if let TyKind::TraitObject(_, lt) = alias_ty.kind { - if lt.res == hir::LifetimeName::ImplicitObjectLifetimeDefault { + if lt.kind == hir::LifetimeKind::ImplicitObjectLifetimeDefault { spans_suggs.push((lt.ident.span.shrink_to_hi(), " + 'a".to_string())); } else { spans_suggs.push((lt.ident.span, "'a".to_string())); diff --git a/compiler/rustc_borrowck/src/diagnostics/region_name.rs b/compiler/rustc_borrowck/src/diagnostics/region_name.rs index 412aaf70c3f19..b08c10983bbc0 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_name.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_name.rs @@ -288,10 +288,10 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { let tcx = self.infcx.tcx; debug!("give_region_a_name: error_region = {:?}", error_region); - match *error_region { + match error_region.kind() { ty::ReEarlyParam(ebr) => ebr.has_name().then(|| { let def_id = tcx.generics_of(self.mir_def_id()).region_param(ebr, tcx).def_id; - let span = tcx.hir().span_if_local(def_id).unwrap_or(DUMMY_SP); + let span = tcx.hir_span_if_local(def_id).unwrap_or(DUMMY_SP); RegionName { name: ebr.name, source: RegionNameSource::NamedEarlyParamRegion(span) } }), @@ -302,7 +302,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { ty::ReLateParam(late_param) => match late_param.kind { ty::LateParamRegionKind::Named(region_def_id, name) => { // Get the span to point to, even if we don't use the name. - let span = tcx.hir().span_if_local(region_def_id).unwrap_or(DUMMY_SP); + let span = tcx.hir_span_if_local(region_def_id).unwrap_or(DUMMY_SP); debug!( "bound region named: {:?}, is_named: {:?}", name, @@ -896,7 +896,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { &self, fr: RegionVid, ) -> Option { - let ty::ReEarlyParam(region) = *self.to_error_region(fr)? else { + let ty::ReEarlyParam(region) = self.to_error_region(fr)?.kind() else { return None; }; if region.has_name() { @@ -912,7 +912,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { let found = tcx .any_free_region_meets(&tcx.type_of(region_parent).instantiate_identity(), |r| { - *r == ty::ReEarlyParam(region) + r.kind() == ty::ReEarlyParam(region) }); Some(RegionName { @@ -931,7 +931,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { &self, fr: RegionVid, ) -> Option { - let ty::ReEarlyParam(region) = *self.to_error_region(fr)? else { + let ty::ReEarlyParam(region) = self.to_error_region(fr)?.kind() else { return None; }; if region.has_name() { @@ -1007,7 +1007,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { if data.projection_term.self_ty() == ty => {} _ => return false, } - tcx.any_free_region_meets(pred, |r| *r == ty::ReEarlyParam(region)) + tcx.any_free_region_meets(pred, |r| r.kind() == ty::ReEarlyParam(region)) }) } else { false diff --git a/compiler/rustc_borrowck/src/diagnostics/var_name.rs b/compiler/rustc_borrowck/src/diagnostics/var_name.rs index 693d22abbe6c8..14ed6a27a7a15 100644 --- a/compiler/rustc_borrowck/src/diagnostics/var_name.rs +++ b/compiler/rustc_borrowck/src/diagnostics/var_name.rs @@ -70,7 +70,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { debug!("get_upvar_name_and_span_for_region: upvar_hir_id={upvar_hir_id:?}"); let upvar_name = tcx.hir_name(upvar_hir_id); - let upvar_span = tcx.hir().span(upvar_hir_id); + let upvar_span = tcx.hir_span(upvar_hir_id); debug!( "get_upvar_name_and_span_for_region: upvar_name={upvar_name:?} upvar_span={upvar_span:?}", ); diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 84b7b8c6a2de9..3e075d420a3a9 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -2,13 +2,13 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(rust_logo)] #![feature(assert_matches)] #![feature(box_patterns)] #![feature(file_buffered)] #![feature(if_let_guard)] -#![feature(let_chains)] +#![feature(negative_impls)] #![feature(never_type)] #![feature(rustc_attrs)] #![feature(rustdoc_internals)] @@ -21,6 +21,8 @@ use std::cell::RefCell; use std::marker::PhantomData; use std::ops::{ControlFlow, Deref}; +use borrow_set::LocalsStateAtExit; +use root_cx::BorrowCheckRootCtxt; use rustc_abi::FieldIdx; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::graph::dominators::Dominators; @@ -35,7 +37,9 @@ use rustc_infer::infer::{ }; use rustc_middle::mir::*; use rustc_middle::query::Providers; -use rustc_middle::ty::{self, ParamEnv, RegionVid, TyCtxt, TypingMode, fold_regions}; +use rustc_middle::ty::{ + self, ParamEnv, RegionVid, Ty, TyCtxt, TypeFoldable, TypeVisitable, TypingMode, fold_regions, +}; use rustc_middle::{bug, span_bug}; use rustc_mir_dataflow::impls::{ EverInitializedPlaces, MaybeInitializedPlaces, MaybeUninitializedPlaces, @@ -43,9 +47,9 @@ use rustc_mir_dataflow::impls::{ use rustc_mir_dataflow::move_paths::{ InitIndex, InitLocation, LookupResult, MoveData, MovePathIndex, }; -use rustc_mir_dataflow::{Analysis, EntryStates, Results, ResultsVisitor, visit_results}; +use rustc_mir_dataflow::{Analysis, Results, ResultsVisitor, visit_results}; use rustc_session::lint::builtin::{TAIL_EXPR_DROP_ORDER, UNUSED_MUT}; -use rustc_span::{Span, Symbol}; +use rustc_span::{ErrorGuaranteed, Span, Symbol}; use smallvec::SmallVec; use tracing::{debug, instrument}; @@ -73,7 +77,6 @@ mod def_use; mod diagnostics; mod member_constraints; mod nll; -mod opaque_types; mod path_utils; mod place_ext; mod places_conflict; @@ -81,6 +84,7 @@ mod polonius; mod prefixes; mod region_infer; mod renumber; +mod root_cx; mod session_diagnostics; mod type_check; mod universal_regions; @@ -102,66 +106,210 @@ pub fn provide(providers: &mut Providers) { *providers = Providers { mir_borrowck, ..*providers }; } -fn mir_borrowck(tcx: TyCtxt<'_>, def: LocalDefId) -> &BorrowCheckResult<'_> { - let (input_body, promoted) = tcx.mir_promoted(def); +/// Provider for `query mir_borrowck`. Similar to `typeck`, this must +/// only be called for typeck roots which will then borrowck all +/// nested bodies as well. +fn mir_borrowck( + tcx: TyCtxt<'_>, + def: LocalDefId, +) -> Result<&ConcreteOpaqueTypes<'_>, ErrorGuaranteed> { + assert!(!tcx.is_typeck_child(def.to_def_id())); + let (input_body, _) = tcx.mir_promoted(def); debug!("run query mir_borrowck: {}", tcx.def_path_str(def)); let input_body: &Body<'_> = &input_body.borrow(); + if let Some(guar) = input_body.tainted_by_errors { + debug!("Skipping borrowck because of tainted body"); + Err(guar) + } else if input_body.should_skip() { + debug!("Skipping borrowck because of injected body"); + let opaque_types = ConcreteOpaqueTypes(Default::default()); + Ok(tcx.arena.alloc(opaque_types)) + } else { + let mut root_cx = BorrowCheckRootCtxt::new(tcx, def); + // We need to manually borrowck all nested bodies from the HIR as + // we do not generate MIR for dead code. Not doing so causes us to + // never check closures in dead code. + let nested_bodies = tcx.nested_bodies_within(def); + for def_id in nested_bodies { + root_cx.get_or_insert_nested(def_id); + } - if input_body.should_skip() || input_body.tainted_by_errors.is_some() { - debug!("Skipping borrowck because of injected body or tainted body"); - // Let's make up a borrowck result! Fun times! - let result = BorrowCheckResult { - concrete_opaque_types: FxIndexMap::default(), - closure_requirements: None, - used_mut_upvars: SmallVec::new(), - tainted_by_errors: input_body.tainted_by_errors, - }; - return tcx.arena.alloc(result); + let PropagatedBorrowCheckResults { closure_requirements, used_mut_upvars } = + do_mir_borrowck(&mut root_cx, def, None).0; + debug_assert!(closure_requirements.is_none()); + debug_assert!(used_mut_upvars.is_empty()); + root_cx.finalize() } +} + +/// Data propagated to the typeck parent by nested items. +/// This should always be empty for the typeck root. +#[derive(Debug)] +struct PropagatedBorrowCheckResults<'tcx> { + closure_requirements: Option>, + used_mut_upvars: SmallVec<[FieldIdx; 8]>, +} + +/// After we borrow check a closure, we are left with various +/// requirements that we have inferred between the free regions that +/// appear in the closure's signature or on its field types. These +/// requirements are then verified and proved by the closure's +/// creating function. This struct encodes those requirements. +/// +/// The requirements are listed as being between various `RegionVid`. The 0th +/// region refers to `'static`; subsequent region vids refer to the free +/// regions that appear in the closure (or coroutine's) type, in order of +/// appearance. (This numbering is actually defined by the `UniversalRegions` +/// struct in the NLL region checker. See for example +/// `UniversalRegions::closure_mapping`.) Note the free regions in the +/// closure's signature and captures are erased. +/// +/// Example: If type check produces a closure with the closure args: +/// +/// ```text +/// ClosureArgs = [ +/// 'a, // From the parent. +/// 'b, +/// i8, // the "closure kind" +/// for<'x> fn(&' &'x u32) -> &'x u32, // the "closure signature" +/// &' String, // some upvar +/// ] +/// ``` +/// +/// We would "renumber" each free region to a unique vid, as follows: +/// +/// ```text +/// ClosureArgs = [ +/// '1, // From the parent. +/// '2, +/// i8, // the "closure kind" +/// for<'x> fn(&'3 &'x u32) -> &'x u32, // the "closure signature" +/// &'4 String, // some upvar +/// ] +/// ``` +/// +/// Now the code might impose a requirement like `'1: '2`. When an +/// instance of the closure is created, the corresponding free regions +/// can be extracted from its type and constrained to have the given +/// outlives relationship. +#[derive(Clone, Debug)] +pub struct ClosureRegionRequirements<'tcx> { + /// The number of external regions defined on the closure. In our + /// example above, it would be 3 -- one for `'static`, then `'1` + /// and `'2`. This is just used for a sanity check later on, to + /// make sure that the number of regions we see at the callsite + /// matches. + pub num_external_vids: usize, + + /// Requirements between the various free regions defined in + /// indices. + pub outlives_requirements: Vec>, +} + +/// Indicates an outlives-constraint between a type or between two +/// free regions declared on the closure. +#[derive(Copy, Clone, Debug)] +pub struct ClosureOutlivesRequirement<'tcx> { + // This region or type ... + pub subject: ClosureOutlivesSubject<'tcx>, + + // ... must outlive this one. + pub outlived_free_region: ty::RegionVid, - let borrowck_result = do_mir_borrowck(tcx, input_body, &*promoted.borrow(), None).0; - debug!("mir_borrowck done"); + // If not, report an error here ... + pub blame_span: Span, - tcx.arena.alloc(borrowck_result) + // ... due to this reason. + pub category: ConstraintCategory<'tcx>, +} + +// Make sure this enum doesn't unintentionally grow +#[cfg(target_pointer_width = "64")] +rustc_data_structures::static_assert_size!(ConstraintCategory<'_>, 16); + +/// The subject of a `ClosureOutlivesRequirement` -- that is, the thing +/// that must outlive some region. +#[derive(Copy, Clone, Debug)] +pub enum ClosureOutlivesSubject<'tcx> { + /// Subject is a type, typically a type parameter, but could also + /// be a projection. Indicates a requirement like `T: 'a` being + /// passed to the caller, where the type here is `T`. + Ty(ClosureOutlivesSubjectTy<'tcx>), + + /// Subject is a free region from the closure. Indicates a requirement + /// like `'a: 'b` being passed to the caller; the region here is `'a`. + Region(ty::RegionVid), +} + +/// Represents a `ty::Ty` for use in [`ClosureOutlivesSubject`]. +/// +/// This abstraction is necessary because the type may include `ReVar` regions, +/// which is what we use internally within NLL code, and they can't be used in +/// a query response. +#[derive(Copy, Clone, Debug)] +pub struct ClosureOutlivesSubjectTy<'tcx> { + inner: Ty<'tcx>, +} +// DO NOT implement `TypeVisitable` or `TypeFoldable` traits, because this +// type is not recognized as a binder for late-bound region. +impl<'tcx, I> !TypeVisitable for ClosureOutlivesSubjectTy<'tcx> {} +impl<'tcx, I> !TypeFoldable for ClosureOutlivesSubjectTy<'tcx> {} + +impl<'tcx> ClosureOutlivesSubjectTy<'tcx> { + /// All regions of `ty` must be of kind `ReVar` and must represent + /// universal regions *external* to the closure. + pub fn bind(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Self { + let inner = fold_regions(tcx, ty, |r, depth| match r.kind() { + ty::ReVar(vid) => { + let br = ty::BoundRegion { + var: ty::BoundVar::from_usize(vid.index()), + kind: ty::BoundRegionKind::Anon, + }; + ty::Region::new_bound(tcx, depth, br) + } + _ => bug!("unexpected region in ClosureOutlivesSubjectTy: {r:?}"), + }); + + Self { inner } + } + + pub fn instantiate( + self, + tcx: TyCtxt<'tcx>, + mut map: impl FnMut(ty::RegionVid) -> ty::Region<'tcx>, + ) -> Ty<'tcx> { + fold_regions(tcx, self.inner, |r, depth| match r.kind() { + ty::ReBound(debruijn, br) => { + debug_assert_eq!(debruijn, depth); + map(ty::RegionVid::from_usize(br.var.index())) + } + _ => bug!("unexpected region {r:?}"), + }) + } } /// Perform the actual borrow checking. /// /// Use `consumer_options: None` for the default behavior of returning -/// [`BorrowCheckResult`] only. Otherwise, return [`BodyWithBorrowckFacts`] according -/// to the given [`ConsumerOptions`]. -#[instrument(skip(tcx, input_body, input_promoted), fields(id=?input_body.source.def_id()), level = "debug")] +/// [`PropagatedBorrowCheckResults`] only. Otherwise, return [`BodyWithBorrowckFacts`] +/// according to the given [`ConsumerOptions`]. +/// +/// For nested bodies this should only be called through `root_cx.get_or_insert_nested`. +#[instrument(skip(root_cx), level = "debug")] fn do_mir_borrowck<'tcx>( - tcx: TyCtxt<'tcx>, - input_body: &Body<'tcx>, - input_promoted: &IndexSlice>, + root_cx: &mut BorrowCheckRootCtxt<'tcx>, + def: LocalDefId, consumer_options: Option, -) -> (BorrowCheckResult<'tcx>, Option>>) { - let def = input_body.source.def_id().expect_local(); +) -> (PropagatedBorrowCheckResults<'tcx>, Option>>) { + let tcx = root_cx.tcx; let infcx = BorrowckInferCtxt::new(tcx, def); + let (input_body, promoted) = tcx.mir_promoted(def); + let input_body: &Body<'_> = &input_body.borrow(); + let input_promoted: &IndexSlice<_, _> = &promoted.borrow(); if let Some(e) = input_body.tainted_by_errors { infcx.set_tainted_by_errors(e); - } - - let mut local_names = IndexVec::from_elem(None, &input_body.local_decls); - for var_debug_info in &input_body.var_debug_info { - if let VarDebugInfoContents::Place(place) = var_debug_info.value { - if let Some(local) = place.as_local() { - if let Some(prev_name) = local_names[local] - && var_debug_info.name != prev_name - { - span_bug!( - var_debug_info.source_info.span, - "local {:?} has many names (`{}` vs `{}`)", - local, - prev_name, - var_debug_info.name - ); - } - local_names[local] = Some(var_debug_info.name); - } - } + root_cx.set_tainted_by_errors(e); } // Replace all regions with fresh inference variables. This @@ -170,15 +318,9 @@ fn do_mir_borrowck<'tcx>( // will have a lifetime tied to the inference context. let mut body_owned = input_body.clone(); let mut promoted = input_promoted.to_owned(); - let free_regions = nll::replace_regions_in_mir(&infcx, &mut body_owned, &mut promoted); + let universal_regions = nll::replace_regions_in_mir(&infcx, &mut body_owned, &mut promoted); let body = &body_owned; // no further changes - // FIXME(-Znext-solver): A bit dubious that we're only registering - // predefined opaques in the typeck root. - if infcx.next_trait_solver() && !infcx.tcx.is_typeck_child(body.source.def_id()) { - infcx.register_predefined_opaques_for_next_solver(def); - } - let location_table = PoloniusLocationTable::new(body); let move_data = MoveData::gather_moves(body, tcx, |_| true); @@ -193,15 +335,15 @@ fn do_mir_borrowck<'tcx>( // Compute non-lexical lifetimes. let nll::NllOutput { regioncx, - concrete_opaque_types, polonius_input, polonius_output, opt_closure_req, nll_errors, polonius_diagnostics, } = nll::compute_regions( + root_cx, &infcx, - free_regions, + universal_regions, body, &promoted, &location_table, @@ -214,31 +356,23 @@ fn do_mir_borrowck<'tcx>( // Dump MIR results into a file, if that is enabled. This lets us // write unit-tests, as well as helping with debugging. nll::dump_nll_mir(&infcx, body, ®ioncx, &opt_closure_req, &borrow_set); - - // We also have a `#[rustc_regions]` annotation that causes us to dump - // information. - let diags_buffer = &mut BorrowckDiagnosticsBuffer::default(); - nll::dump_annotation( + polonius::dump_polonius_mir( &infcx, body, ®ioncx, &opt_closure_req, - &concrete_opaque_types, - diags_buffer, + &borrow_set, + polonius_diagnostics.as_ref(), ); - let movable_coroutine = - // The first argument is the coroutine type passed by value - if let Some(local) = body.local_decls.raw.get(1) - // Get the interior types and args which typeck computed - && let ty::Coroutine(def_id, _) = *local.ty.kind() - && tcx.coroutine_movability(def_id) == hir::Movability::Movable - { - true - } else { - false - }; + // We also have a `#[rustc_regions]` annotation that causes us to dump + // information. + nll::dump_annotation(&infcx, body, ®ioncx, &opt_closure_req); + + let movable_coroutine = body.coroutine.is_some() + && tcx.coroutine_movability(def.to_def_id()) == hir::Movability::Movable; + let diags_buffer = &mut BorrowckDiagnosticsBuffer::default(); // While promoteds should mostly be correct by construction, we need to check them for // invalid moves to detect moving out of arrays:`struct S; fn main() { &([S][0]); }`. for promoted_body in &promoted { @@ -248,6 +382,7 @@ fn do_mir_borrowck<'tcx>( // this check out of `MirBorrowckCtxt`, actually doing so is far from trivial. let move_data = MoveData::gather_moves(promoted_body, tcx, |_| true); let mut promoted_mbcx = MirBorrowckCtxt { + root_cx, infcx: &infcx, body: promoted_body, move_data: &move_data, @@ -255,7 +390,6 @@ fn do_mir_borrowck<'tcx>( location_table: &location_table, movable_coroutine, fn_self_span_reported: Default::default(), - locals_are_invalidated_at_exit, access_place_error_reported: Default::default(), reservation_error_reported: Default::default(), uninitialized_error_reported: Default::default(), @@ -287,13 +421,33 @@ fn do_mir_borrowck<'tcx>( promoted_mbcx.report_move_errors(); } + let mut local_names = IndexVec::from_elem(None, &body.local_decls); + for var_debug_info in &body.var_debug_info { + if let VarDebugInfoContents::Place(place) = var_debug_info.value { + if let Some(local) = place.as_local() { + if let Some(prev_name) = local_names[local] + && var_debug_info.name != prev_name + { + span_bug!( + var_debug_info.source_info.span, + "local {:?} has many names (`{}` vs `{}`)", + local, + prev_name, + var_debug_info.name + ); + } + local_names[local] = Some(var_debug_info.name); + } + } + } + let mut mbcx = MirBorrowckCtxt { + root_cx, infcx: &infcx, body, move_data: &move_data, location_table: &location_table, movable_coroutine, - locals_are_invalidated_at_exit, fn_self_span_reported: Default::default(), access_place_error_reported: Default::default(), reservation_error_reported: Default::default(), @@ -306,35 +460,27 @@ fn do_mir_borrowck<'tcx>( local_names, region_names: RefCell::default(), next_region_name: RefCell::new(1), - polonius_output, move_errors: Vec::new(), diags_buffer, + polonius_output: polonius_output.as_deref(), polonius_diagnostics: polonius_diagnostics.as_ref(), }; // Compute and report region errors, if any. mbcx.report_region_errors(nll_errors); - let mut flow_results = get_flow_results(tcx, body, &move_data, &borrow_set, ®ioncx); + let (mut flow_analysis, flow_entry_states) = + get_flow_results(tcx, body, &move_data, &borrow_set, ®ioncx); visit_results( body, traversal::reverse_postorder(body).map(|(bb, _)| bb), - &mut flow_results, + &mut flow_analysis, + &flow_entry_states, &mut mbcx, ); mbcx.report_move_errors(); - // If requested, dump polonius MIR. - polonius::dump_polonius_mir( - &infcx, - body, - ®ioncx, - &borrow_set, - polonius_diagnostics.as_ref(), - &opt_closure_req, - ); - // For each non-user used mutable variable, check if it's been assigned from // a user-declared local. If so, then put that local into the used_mut set. // Note that this set is expected to be small - only upvars from closures @@ -355,17 +501,16 @@ fn do_mir_borrowck<'tcx>( debug!("mbcx.used_mut: {:?}", mbcx.used_mut); mbcx.lint_unused_mut(); - let tainted_by_errors = mbcx.emit_errors(); + if let Some(guar) = mbcx.emit_errors() { + mbcx.root_cx.set_tainted_by_errors(guar); + } - let result = BorrowCheckResult { - concrete_opaque_types: concrete_opaque_types.into_inner(), + let result = PropagatedBorrowCheckResults { closure_requirements: opt_closure_req, used_mut_upvars: mbcx.used_mut_upvars, - tainted_by_errors, }; let body_with_facts = if consumer_options.is_some() { - let output_facts = mbcx.polonius_output; Some(Box::new(BodyWithBorrowckFacts { body: body_owned, promoted, @@ -373,7 +518,7 @@ fn do_mir_borrowck<'tcx>( region_inference_context: regioncx, location_table: polonius_input.as_ref().map(|_| location_table), input_facts: polonius_input, - output_facts, + output_facts: polonius_output, })) } else { None @@ -390,7 +535,7 @@ fn get_flow_results<'a, 'tcx>( move_data: &'a MoveData<'tcx>, borrow_set: &'a BorrowSet<'tcx>, regioncx: &RegionInferenceContext<'tcx>, -) -> Results<'tcx, Borrowck<'a, 'tcx>> { +) -> (Borrowck<'a, 'tcx>, Results) { // We compute these three analyses individually, but them combine them into // a single results so that `mbcx` can visit them all together. let borrows = Borrows::new(tcx, body, regioncx, borrow_set).iterate_to_fixpoint( @@ -415,14 +560,14 @@ fn get_flow_results<'a, 'tcx>( ever_inits: ever_inits.analysis, }; - assert_eq!(borrows.entry_states.len(), uninits.entry_states.len()); - assert_eq!(borrows.entry_states.len(), ever_inits.entry_states.len()); - let entry_states: EntryStates<'_, Borrowck<'_, '_>> = - itertools::izip!(borrows.entry_states, uninits.entry_states, ever_inits.entry_states) + assert_eq!(borrows.results.len(), uninits.results.len()); + assert_eq!(borrows.results.len(), ever_inits.results.len()); + let results: Results<_> = + itertools::izip!(borrows.results, uninits.results, ever_inits.results) .map(|(borrows, uninits, ever_inits)| BorrowckDomain { borrows, uninits, ever_inits }) .collect(); - Results { analysis, entry_states } + (analysis, results) } pub(crate) struct BorrowckInferCtxt<'tcx> { @@ -433,7 +578,12 @@ pub(crate) struct BorrowckInferCtxt<'tcx> { impl<'tcx> BorrowckInferCtxt<'tcx> { pub(crate) fn new(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> Self { - let infcx = tcx.infer_ctxt().build(TypingMode::analysis_in_body(tcx, def_id)); + let typing_mode = if tcx.use_typing_mode_borrowck() { + TypingMode::borrowck(tcx, def_id) + } else { + TypingMode::analysis_in_body(tcx, def_id) + }; + let infcx = tcx.infer_ctxt().build(typing_mode); let param_env = tcx.param_env(def_id); BorrowckInferCtxt { infcx, reg_var_to_origin: RefCell::new(Default::default()), param_env } } @@ -480,28 +630,6 @@ impl<'tcx> BorrowckInferCtxt<'tcx> { next_region } - - /// With the new solver we prepopulate the opaque type storage during - /// MIR borrowck with the hidden types from HIR typeck. This is necessary - /// to avoid ambiguities as earlier goals can rely on the hidden type - /// of an opaque which is only constrained by a later goal. - fn register_predefined_opaques_for_next_solver(&self, def_id: LocalDefId) { - let tcx = self.tcx; - // OK to use the identity arguments for each opaque type key, since - // we remap opaques from HIR typeck back to their definition params. - for data in tcx.typeck(def_id).concrete_opaque_types.iter().map(|(k, v)| (*k, *v)) { - // HIR typeck did not infer the regions of the opaque, so we instantiate - // them with fresh inference variables. - let (key, hidden_ty) = fold_regions(tcx, data, |_, _| { - self.next_nll_region_var_in_universe( - NllRegionVariableOrigin::Existential { from_forall: false }, - ty::UniverseIndex::ROOT, - ) - }); - - self.inject_new_hidden_type_unchecked(key, hidden_ty); - } - } } impl<'tcx> Deref for BorrowckInferCtxt<'tcx> { @@ -513,6 +641,7 @@ impl<'tcx> Deref for BorrowckInferCtxt<'tcx> { } struct MirBorrowckCtxt<'a, 'infcx, 'tcx> { + root_cx: &'a mut BorrowCheckRootCtxt<'tcx>, infcx: &'infcx BorrowckInferCtxt<'tcx>, body: &'a Body<'tcx>, move_data: &'a MoveData<'tcx>, @@ -522,13 +651,6 @@ struct MirBorrowckCtxt<'a, 'infcx, 'tcx> { location_table: &'a PoloniusLocationTable, movable_coroutine: bool, - /// This keeps track of whether local variables are free-ed when the function - /// exits even without a `StorageDead`, which appears to be the case for - /// constants. - /// - /// I'm not sure this is the right approach - @eddyb could you try and - /// figure this out? - locals_are_invalidated_at_exit: bool, /// This field keeps track of when borrow errors are reported in the access_place function /// so that there is no duplicate reporting. This field cannot also be used for the conflicting /// borrow errors that is handled by the `reservation_error_reported` field as the inclusion @@ -576,12 +698,11 @@ struct MirBorrowckCtxt<'a, 'infcx, 'tcx> { /// The counter for generating new region names. next_region_name: RefCell, - /// Results of Polonius analysis. - polonius_output: Option>, - diags_buffer: &'a mut BorrowckDiagnosticsBuffer<'infcx, 'tcx>, move_errors: Vec>, + /// Results of Polonius analysis. + polonius_output: Option<&'a PoloniusOutput>, /// When using `-Zpolonius=next`: the data used to compute errors and diagnostics. polonius_diagnostics: Option<&'a PoloniusDiagnosticsContext>, } @@ -591,12 +712,12 @@ struct MirBorrowckCtxt<'a, 'infcx, 'tcx> { // 2. loans made in overlapping scopes do not conflict // 3. assignments do not affect things loaned out as immutable // 4. moves do not affect things loaned out in any way -impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt<'a, '_, 'tcx> { +impl<'a, 'tcx> ResultsVisitor<'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt<'a, '_, 'tcx> { fn visit_after_early_statement_effect( &mut self, - _results: &mut Results<'tcx, Borrowck<'a, 'tcx>>, + _analysis: &mut Borrowck<'a, 'tcx>, state: &BorrowckDomain, - stmt: &'a Statement<'tcx>, + stmt: &Statement<'tcx>, location: Location, ) { debug!("MirBorrowckCtxt::process_statement({:?}, {:?}): {:?}", location, stmt, state); @@ -670,9 +791,9 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt< fn visit_after_early_terminator_effect( &mut self, - _results: &mut Results<'tcx, Borrowck<'a, 'tcx>>, + _analysis: &mut Borrowck<'a, 'tcx>, state: &BorrowckDomain, - term: &'a Terminator<'tcx>, + term: &Terminator<'tcx>, loc: Location, ) { debug!("MirBorrowckCtxt::process_terminator({:?}, {:?}): {:?}", loc, term, state); @@ -684,7 +805,14 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt< TerminatorKind::SwitchInt { discr, targets: _ } => { self.consume_operand(loc, (discr, span), state); } - TerminatorKind::Drop { place, target: _, unwind: _, replace } => { + TerminatorKind::Drop { + place, + target: _, + unwind: _, + replace, + drop: _, + async_fut: _, + } => { debug!( "visit_terminator_drop \ loc: {:?} term: {:?} place: {:?} span: {:?}", @@ -783,9 +911,9 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt< fn visit_after_primary_terminator_effect( &mut self, - _results: &mut Results<'tcx, Borrowck<'a, 'tcx>>, + _analysis: &mut Borrowck<'a, 'tcx>, state: &BorrowckDomain, - term: &'a Terminator<'tcx>, + term: &Terminator<'tcx>, loc: Location, ) { let span = term.source_info.span; @@ -805,13 +933,20 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt< | TerminatorKind::Return | TerminatorKind::TailCall { .. } | TerminatorKind::CoroutineDrop => { - // Returning from the function implicitly kills storage for all locals and statics. - // Often, the storage will already have been killed by an explicit - // StorageDead, but we don't always emit those (notably on unwind paths), - // so this "extra check" serves as a kind of backup. - for i in state.borrows.iter() { - let borrow = &self.borrow_set[i]; - self.check_for_invalidation_at_exit(loc, borrow, span); + match self.borrow_set.locals_state_at_exit() { + LocalsStateAtExit::AllAreInvalidated => { + // Returning from the function implicitly kills storage for all locals and statics. + // Often, the storage will already have been killed by an explicit + // StorageDead, but we don't always emit those (notably on unwind paths), + // so this "extra check" serves as a kind of backup. + for i in state.borrows.iter() { + let borrow = &self.borrow_set[i]; + self.check_for_invalidation_at_exit(loc, borrow, span); + } + } + // If we do not implicitly invalidate all locals on exit, + // we check for conflicts when dropping or moving this local. + LocalsStateAtExit::SomeAreInvalidated { has_storage_dead_or_moved: _ } => {} } } @@ -1168,7 +1303,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { error_reported } - /// Through #123739, backward incompatible drops (BIDs) are introduced. + /// Through #123739, `BackwardIncompatibleDropHint`s (BIDs) are introduced. /// We would like to emit lints whether borrow checking fails at these future drop locations. #[instrument(level = "debug", skip(self, state))] fn check_backward_incompatible_drop( @@ -1245,7 +1380,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { fn consume_rvalue( &mut self, location: Location, - (rvalue, span): (&'a Rvalue<'tcx>, Span), + (rvalue, span): (&Rvalue<'tcx>, Span), state: &BorrowckDomain, ) { match rvalue { @@ -1386,11 +1521,13 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { | AggregateKind::CoroutineClosure(def_id, _) | AggregateKind::Coroutine(def_id, _) => { let def_id = def_id.expect_local(); - let BorrowCheckResult { used_mut_upvars, .. } = - self.infcx.tcx.mir_borrowck(def_id); + let used_mut_upvars = self.root_cx.used_mut_upvars(def_id); debug!("{:?} used_mut_upvars={:?}", def_id, used_mut_upvars); - for field in used_mut_upvars { - self.propagate_closure_used_mut_upvar(&operands[*field]); + // FIXME: We're cloning the `SmallVec` here to avoid borrowing `root_cx` + // when calling `propagate_closure_used_mut_upvar`. This should ideally + // be unnecessary. + for field in used_mut_upvars.clone() { + self.propagate_closure_used_mut_upvar(&operands[field]); } } AggregateKind::Adt(..) @@ -1516,7 +1653,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { fn consume_operand( &mut self, location: Location, - (operand, span): (&'a Operand<'tcx>, Span), + (operand, span): (&Operand<'tcx>, Span), state: &BorrowckDomain, ) { match *operand { @@ -1581,22 +1718,15 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> { // we'll have a memory leak) and assume that all statics have a destructor. // // FIXME: allow thread-locals to borrow other thread locals? - - let (might_be_alive, will_be_dropped) = - if self.body.local_decls[root_place.local].is_ref_to_thread_local() { - // Thread-locals might be dropped after the function exits - // We have to dereference the outer reference because - // borrows don't conflict behind shared references. - root_place.projection = TyCtxtConsts::DEREF_PROJECTION; - (true, true) - } else { - (false, self.locals_are_invalidated_at_exit) - }; - - if !will_be_dropped { - debug!("place_is_invalidated_at_exit({:?}) - won't be dropped", place); - return; - } + let might_be_alive = if self.body.local_decls[root_place.local].is_ref_to_thread_local() { + // Thread-locals might be dropped after the function exits + // We have to dereference the outer reference because + // borrows don't conflict behind shared references. + root_place.projection = TyCtxtConsts::DEREF_PROJECTION; + true + } else { + false + }; let sd = if might_be_alive { Deep } else { Shallow(None) }; diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs index 8e7b6f083acaa..fe899bb054fa9 100644 --- a/compiler/rustc_borrowck/src/nll.rs +++ b/compiler/rustc_borrowck/src/nll.rs @@ -8,10 +8,7 @@ use std::str::FromStr; use polonius_engine::{Algorithm, Output}; use rustc_index::IndexSlice; use rustc_middle::mir::pretty::{PrettyPrintMirOptions, dump_mir_with_options}; -use rustc_middle::mir::{ - Body, ClosureOutlivesSubject, ClosureRegionRequirements, PassWhere, Promoted, create_dump_file, - dump_enabled, dump_mir, -}; +use rustc_middle::mir::{Body, PassWhere, Promoted, create_dump_file, dump_enabled, dump_mir}; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{self, TyCtxt}; use rustc_mir_dataflow::ResultsCursor; @@ -24,8 +21,7 @@ use tracing::{debug, instrument}; use crate::borrow_set::BorrowSet; use crate::consumers::ConsumerOptions; -use crate::diagnostics::{BorrowckDiagnosticsBuffer, RegionErrors}; -use crate::opaque_types::ConcreteOpaqueTypes; +use crate::diagnostics::RegionErrors; use crate::polonius::PoloniusDiagnosticsContext; use crate::polonius::legacy::{ PoloniusFacts, PoloniusFactsExt, PoloniusLocationTable, PoloniusOutput, @@ -33,13 +29,15 @@ use crate::polonius::legacy::{ use crate::region_infer::RegionInferenceContext; use crate::type_check::{self, MirTypeckResults}; use crate::universal_regions::UniversalRegions; -use crate::{BorrowckInferCtxt, polonius, renumber}; +use crate::{ + BorrowCheckRootCtxt, BorrowckInferCtxt, ClosureOutlivesSubject, ClosureRegionRequirements, + polonius, renumber, +}; /// The output of `nll::compute_regions`. This includes the computed `RegionInferenceContext`, any /// closure requirements to propagate, and any generated errors. pub(crate) struct NllOutput<'tcx> { pub regioncx: RegionInferenceContext<'tcx>, - pub concrete_opaque_types: ConcreteOpaqueTypes<'tcx>, pub polonius_input: Option>, pub polonius_output: Option>, pub opt_closure_req: Option>, @@ -78,6 +76,7 @@ pub(crate) fn replace_regions_in_mir<'tcx>( /// /// This may result in errors being reported. pub(crate) fn compute_regions<'a, 'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, infcx: &BorrowckInferCtxt<'tcx>, universal_regions: UniversalRegions<'tcx>, body: &Body<'tcx>, @@ -98,8 +97,6 @@ pub(crate) fn compute_regions<'a, 'tcx>( let location_map = Rc::new(DenseLocationMap::new(body)); - let mut concrete_opaque_types = ConcreteOpaqueTypes::default(); - // Run the MIR type-checker. let MirTypeckResults { constraints, @@ -107,6 +104,7 @@ pub(crate) fn compute_regions<'a, 'tcx>( opaque_type_values, polonius_context, } = type_check::type_check( + root_cx, infcx, body, promoted, @@ -117,14 +115,8 @@ pub(crate) fn compute_regions<'a, 'tcx>( flow_inits, move_data, Rc::clone(&location_map), - &mut concrete_opaque_types, ); - // Create the region inference context, taking ownership of the - // region inference data that was contained in `infcx`, and the - // base constraints generated by the type-check. - let var_infos = infcx.get_region_var_infos(); - // If requested, emit legacy polonius facts. polonius::legacy::emit_facts( &mut polonius_facts, @@ -137,13 +129,8 @@ pub(crate) fn compute_regions<'a, 'tcx>( &constraints, ); - let mut regioncx = RegionInferenceContext::new( - infcx, - var_infos, - constraints, - universal_region_relations, - location_map, - ); + let mut regioncx = + RegionInferenceContext::new(infcx, constraints, universal_region_relations, location_map); // If requested for `-Zpolonius=next`, convert NLL constraints to localized outlives constraints // and use them to compute loan liveness. @@ -181,11 +168,10 @@ pub(crate) fn compute_regions<'a, 'tcx>( infcx.set_tainted_by_errors(guar); } - regioncx.infer_opaque_types(infcx, opaque_type_values, &mut concrete_opaque_types); + regioncx.infer_opaque_types(root_cx, infcx, opaque_type_values); NllOutput { regioncx, - concrete_opaque_types, polonius_input: polonius_facts.map(Box::new), polonius_output, opt_closure_req: closure_region_requirements, @@ -301,8 +287,6 @@ pub(super) fn dump_annotation<'tcx, 'infcx>( body: &Body<'tcx>, regioncx: &RegionInferenceContext<'tcx>, closure_region_requirements: &Option>, - concrete_opaque_types: &ConcreteOpaqueTypes<'tcx>, - diagnostics_buffer: &mut BorrowckDiagnosticsBuffer<'infcx, 'tcx>, ) { let tcx = infcx.tcx; let base_def_id = tcx.typeck_root_def_id(body.source.def_id()); @@ -318,7 +302,7 @@ pub(super) fn dump_annotation<'tcx, 'infcx>( // better. let def_span = tcx.def_span(body.source.def_id()); - let mut err = if let Some(closure_region_requirements) = closure_region_requirements { + let err = if let Some(closure_region_requirements) = closure_region_requirements { let mut err = infcx.dcx().struct_span_note(def_span, "external requirements"); regioncx.annotate(tcx, &mut err); @@ -340,15 +324,11 @@ pub(super) fn dump_annotation<'tcx, 'infcx>( } else { let mut err = infcx.dcx().struct_span_note(def_span, "no external requirements"); regioncx.annotate(tcx, &mut err); - err }; - if !concrete_opaque_types.is_empty() { - err.note(format!("Inferred opaque type values:\n{concrete_opaque_types:#?}")); - } - - diagnostics_buffer.buffer_non_error(err); + // FIXME(@lcnr): We currently don't dump the inferred hidden types here. + err.emit(); } fn for_each_region_constraint<'tcx>( diff --git a/compiler/rustc_borrowck/src/opaque_types.rs b/compiler/rustc_borrowck/src/opaque_types.rs deleted file mode 100644 index 5c78814abdd2d..0000000000000 --- a/compiler/rustc_borrowck/src/opaque_types.rs +++ /dev/null @@ -1,55 +0,0 @@ -use rustc_data_structures::fx::FxIndexMap; -use rustc_hir::def_id::LocalDefId; -use rustc_middle::ty::{OpaqueHiddenType, Ty, TyCtxt}; - -#[derive(Debug, Default)] -pub(super) struct ConcreteOpaqueTypes<'tcx> { - concrete_opaque_types: FxIndexMap>, -} - -impl<'tcx> ConcreteOpaqueTypes<'tcx> { - pub(super) fn is_empty(&self) -> bool { - self.concrete_opaque_types.is_empty() - } - - pub(super) fn into_inner(self) -> FxIndexMap> { - self.concrete_opaque_types - } - - /// Insert an opaque type into the list of opaque types defined by this function - /// after mapping the hidden type to the generic parameters of the opaque type - /// definition. - pub(super) fn insert( - &mut self, - tcx: TyCtxt<'tcx>, - def_id: LocalDefId, - hidden_ty: OpaqueHiddenType<'tcx>, - ) { - // Sometimes two opaque types are the same only after we remap the generic parameters - // back to the opaque type definition. E.g. we may have `OpaqueType` mapped to - // `(X, Y)` and `OpaqueType` mapped to `(Y, X)`, and those are the same, but we - // only know that once we convert the generic parameters to those of the opaque type. - if let Some(prev) = self.concrete_opaque_types.get_mut(&def_id) { - if prev.ty != hidden_ty.ty { - let (Ok(guar) | Err(guar)) = - prev.build_mismatch_error(&hidden_ty, tcx).map(|d| d.emit()); - prev.ty = Ty::new_error(tcx, guar); - } - // Pick a better span if there is one. - // FIXME(oli-obk): collect multiple spans for better diagnostics down the road. - prev.span = prev.span.substitute_dummy(hidden_ty.span); - } else { - self.concrete_opaque_types.insert(def_id, hidden_ty); - } - } - - pub(super) fn extend_from_nested_body( - &mut self, - tcx: TyCtxt<'tcx>, - nested_body: &FxIndexMap>, - ) { - for (&def_id, &hidden_ty) in nested_body { - self.insert(tcx, def_id, hidden_ty); - } - } -} diff --git a/compiler/rustc_borrowck/src/polonius/dump.rs b/compiler/rustc_borrowck/src/polonius/dump.rs index aa64a7c4e2a68..6a943e1920821 100644 --- a/compiler/rustc_borrowck/src/polonius/dump.rs +++ b/compiler/rustc_borrowck/src/polonius/dump.rs @@ -5,7 +5,7 @@ use rustc_index::IndexVec; use rustc_middle::mir::pretty::{ PassWhere, PrettyPrintMirOptions, create_dump_file, dump_enabled, dump_mir_to_writer, }; -use rustc_middle::mir::{Body, ClosureRegionRequirements, Location}; +use rustc_middle::mir::{Body, Location}; use rustc_middle::ty::{RegionVid, TyCtxt}; use rustc_mir_dataflow::points::PointIndex; use rustc_session::config::MirIncludeSpans; @@ -17,16 +17,16 @@ use crate::polonius::{ }; use crate::region_infer::values::LivenessValues; use crate::type_check::Locations; -use crate::{BorrowckInferCtxt, RegionInferenceContext}; +use crate::{BorrowckInferCtxt, ClosureRegionRequirements, RegionInferenceContext}; /// `-Zdump-mir=polonius` dumps MIR annotated with NLL and polonius specific information. pub(crate) fn dump_polonius_mir<'tcx>( infcx: &BorrowckInferCtxt<'tcx>, body: &Body<'tcx>, regioncx: &RegionInferenceContext<'tcx>, + closure_region_requirements: &Option>, borrow_set: &BorrowSet<'tcx>, polonius_diagnostics: Option<&PoloniusDiagnosticsContext>, - closure_region_requirements: &Option>, ) { let tcx = infcx.tcx; if !tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { @@ -334,7 +334,7 @@ fn emit_mermaid_nll_regions<'tcx>( writeln!(out, "flowchart TD")?; // Emit the region nodes. - for region in regioncx.var_infos.indices() { + for region in regioncx.definitions.indices() { write!(out, "{}[\"", region.as_usize())?; render_region(region, regioncx, out)?; writeln!(out, "\"]")?; @@ -387,7 +387,7 @@ fn emit_mermaid_nll_sccs<'tcx>( // Gather and emit the SCC nodes. let mut nodes_per_scc: IndexVec<_, _> = regioncx.constraint_sccs().all_sccs().map(|_| Vec::new()).collect(); - for region in regioncx.var_infos.indices() { + for region in regioncx.definitions.indices() { let scc = regioncx.constraint_sccs().scc(region); nodes_per_scc[scc].push(region); } diff --git a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs index 0d1d8642bcacc..99dd0b2dd4664 100644 --- a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs +++ b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs @@ -101,7 +101,14 @@ impl<'a, 'tcx> Visitor<'tcx> for LoanInvalidationsGenerator<'a, 'tcx> { TerminatorKind::SwitchInt { discr, targets: _ } => { self.consume_operand(location, discr); } - TerminatorKind::Drop { place: drop_place, target: _, unwind: _, replace } => { + TerminatorKind::Drop { + place: drop_place, + target: _, + unwind: _, + replace, + drop: _, + async_fut: _, + } => { let write_kind = if *replace { WriteKind::Replace } else { WriteKind::StorageDeadOrDrop }; self.access_place( diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs index a80d74d9e370a..b4ff3d66f3d5b 100644 --- a/compiler/rustc_borrowck/src/region_infer/mod.rs +++ b/compiler/rustc_borrowck/src/region_infer/mod.rs @@ -9,22 +9,20 @@ use rustc_errors::Diag; use rustc_hir::def_id::CRATE_DEF_ID; use rustc_index::IndexVec; use rustc_infer::infer::outlives::test_type_match; -use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound, VerifyIfEq}; +use rustc_infer::infer::region_constraints::{GenericKind, VerifyBound, VerifyIfEq}; use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin}; use rustc_middle::bug; use rustc_middle::mir::{ - AnnotationSource, BasicBlock, Body, ClosureOutlivesRequirement, ClosureOutlivesSubject, - ClosureOutlivesSubjectTy, ClosureRegionRequirements, ConstraintCategory, Local, Location, - ReturnConstraint, TerminatorKind, + AnnotationSource, BasicBlock, Body, ConstraintCategory, Local, Location, ReturnConstraint, + TerminatorKind, }; use rustc_middle::traits::{ObligationCause, ObligationCauseCode}; use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable, UniverseIndex, fold_regions}; use rustc_mir_dataflow::points::DenseLocationMap; use rustc_span::hygiene::DesugaringKind; use rustc_span::{DUMMY_SP, Span}; -use tracing::{debug, instrument, trace}; +use tracing::{Level, debug, enabled, instrument, trace}; -use crate::BorrowckInferCtxt; use crate::constraints::graph::{self, NormalConstraintGraph, RegionGraph}; use crate::constraints::{ConstraintSccIndex, OutlivesConstraint, OutlivesConstraintSet}; use crate::dataflow::BorrowIndex; @@ -37,6 +35,10 @@ use crate::region_infer::values::{LivenessValues, RegionElement, RegionValues, T use crate::type_check::free_region_relations::UniversalRegionRelations; use crate::type_check::{Locations, MirTypeckRegionConstraints}; use crate::universal_regions::UniversalRegions; +use crate::{ + BorrowckInferCtxt, ClosureOutlivesRequirement, ClosureOutlivesSubject, + ClosureOutlivesSubjectTy, ClosureRegionRequirements, +}; mod dump_mir; mod graphviz; @@ -45,12 +47,13 @@ mod reverse_sccs; pub(crate) mod values; -pub(crate) type ConstraintSccs = Sccs; +pub(crate) type ConstraintSccs = Sccs; +pub(crate) type AnnotatedSccs = (ConstraintSccs, IndexVec); /// An annotation for region graph SCCs that tracks -/// the values of its elements. +/// the values of its elements. This annotates a single SCC. #[derive(Copy, Debug, Clone)] -pub struct RegionTracker { +pub(crate) struct RegionTracker { /// The largest universe of a placeholder reached from this SCC. /// This includes placeholders within this SCC. max_placeholder_universe_reached: UniverseIndex, @@ -95,6 +98,32 @@ impl scc::Annotation for RegionTracker { } } +/// A Visitor for SCC annotation construction. +pub(crate) struct SccAnnotations<'d, 'tcx, A: scc::Annotation> { + pub(crate) scc_to_annotation: IndexVec, + definitions: &'d IndexVec>, +} + +impl<'d, 'tcx, A: scc::Annotation> SccAnnotations<'d, 'tcx, A> { + pub(crate) fn new(definitions: &'d IndexVec>) -> Self { + Self { scc_to_annotation: IndexVec::new(), definitions } + } +} + +impl scc::Annotations for SccAnnotations<'_, '_, RegionTracker> { + fn new(&self, element: RegionVid) -> RegionTracker { + RegionTracker::new(element, &self.definitions[element]) + } + + fn annotate_scc(&mut self, scc: ConstraintSccIndex, annotation: RegionTracker) { + let idx = self.scc_to_annotation.push(annotation); + assert!(idx == scc); + } + + type Ann = RegionTracker; + type SccIdx = ConstraintSccIndex; +} + impl RegionTracker { pub(crate) fn new(rvid: RegionVid, definition: &RegionDefinition<'_>) -> Self { let (representative_is_placeholder, representative_is_existential) = match definition.origin @@ -139,13 +168,11 @@ impl RegionTracker { } pub struct RegionInferenceContext<'tcx> { - pub var_infos: VarInfos, - /// Contains the definition for every region variable. Region /// variables are identified by their index (`RegionVid`). The /// definition contains information about where the region came /// from as well as its final inferred value. - definitions: IndexVec>, + pub(crate) definitions: Frozen>>, /// The liveness constraints added to each region. For most /// regions, these start out empty and steadily grow, though for @@ -166,6 +193,8 @@ pub struct RegionInferenceContext<'tcx> { /// compute the values of each region. constraint_sccs: ConstraintSccs, + scc_annotations: IndexVec, + /// Reverse of the SCC constraint graph -- i.e., an edge `A -> B` exists if /// `B: A`. This is used to compute the universal regions that are required /// to outlive a given SCC. Computed lazily. @@ -327,31 +356,34 @@ fn sccs_info<'tcx>(infcx: &BorrowckInferCtxt<'tcx>, sccs: &ConstraintSccs) { let mut var_to_origin_sorted = var_to_origin.clone().into_iter().collect::>(); var_to_origin_sorted.sort_by_key(|vto| vto.0); - let mut reg_vars_to_origins_str = "region variables to origins:\n".to_string(); - for (reg_var, origin) in var_to_origin_sorted.into_iter() { - reg_vars_to_origins_str.push_str(&format!("{reg_var:?}: {origin:?}\n")); + if enabled!(Level::DEBUG) { + let mut reg_vars_to_origins_str = "region variables to origins:\n".to_string(); + for (reg_var, origin) in var_to_origin_sorted.into_iter() { + reg_vars_to_origins_str.push_str(&format!("{reg_var:?}: {origin:?}\n")); + } + debug!("{}", reg_vars_to_origins_str); } - debug!("{}", reg_vars_to_origins_str); let num_components = sccs.num_sccs(); let mut components = vec![FxIndexSet::default(); num_components]; - for (reg_var_idx, scc_idx) in sccs.scc_indices().iter().enumerate() { - let reg_var = ty::RegionVid::from_usize(reg_var_idx); + for (reg_var, scc_idx) in sccs.scc_indices().iter_enumerated() { let origin = var_to_origin.get(®_var).unwrap_or(&RegionCtxt::Unknown); components[scc_idx.as_usize()].insert((reg_var, *origin)); } - let mut components_str = "strongly connected components:".to_string(); - for (scc_idx, reg_vars_origins) in components.iter().enumerate() { - let regions_info = reg_vars_origins.clone().into_iter().collect::>(); - components_str.push_str(&format!( - "{:?}: {:?},\n)", - ConstraintSccIndex::from_usize(scc_idx), - regions_info, - )) + if enabled!(Level::DEBUG) { + let mut components_str = "strongly connected components:".to_string(); + for (scc_idx, reg_vars_origins) in components.iter().enumerate() { + let regions_info = reg_vars_origins.clone().into_iter().collect::>(); + components_str.push_str(&format!( + "{:?}: {:?},\n)", + ConstraintSccIndex::from_usize(scc_idx), + regions_info, + )) + } + debug!("{}", components_str); } - debug!("{}", components_str); // calculate the best representative for each component let components_representatives = components @@ -381,6 +413,26 @@ fn sccs_info<'tcx>(infcx: &BorrowckInferCtxt<'tcx>, sccs: &ConstraintSccs) { debug!("SCC edges {:#?}", scc_node_to_edges); } +fn create_definitions<'tcx>( + infcx: &BorrowckInferCtxt<'tcx>, + universal_regions: &UniversalRegions<'tcx>, +) -> Frozen>> { + // Create a RegionDefinition for each inference variable. + let mut definitions: IndexVec<_, _> = infcx + .get_region_var_infos() + .iter() + .map(|info| RegionDefinition::new(info.universe, info.origin)) + .collect(); + + // Add the external name for all universal regions. + for (external_name, variable) in universal_regions.named_universal_regions_iter() { + debug!("region {variable:?} has external name {external_name:?}"); + definitions[variable].external_name = Some(external_name); + } + + Frozen::freeze(definitions) +} + impl<'tcx> RegionInferenceContext<'tcx> { /// Creates a new region inference context with a total of /// `num_region_variables` valid inference variables; the first N @@ -391,7 +443,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// of constraints produced by the MIR type check. pub(crate) fn new( infcx: &BorrowckInferCtxt<'tcx>, - var_infos: VarInfos, constraints: MirTypeckRegionConstraints<'tcx>, universal_region_relations: Frozen>, location_map: Rc, @@ -422,13 +473,9 @@ impl<'tcx> RegionInferenceContext<'tcx> { infcx.set_tainted_by_errors(guar); } - // Create a RegionDefinition for each inference variable. - let definitions: IndexVec<_, _> = var_infos - .iter() - .map(|info| RegionDefinition::new(info.universe, info.origin)) - .collect(); + let definitions = create_definitions(infcx, &universal_regions); - let constraint_sccs = + let (constraint_sccs, scc_annotations) = outlives_constraints.add_outlives_static(&universal_regions, &definitions); let constraints = Frozen::freeze(outlives_constraints); let constraint_graph = Frozen::freeze(constraints.graph(definitions.len())); @@ -449,12 +496,12 @@ impl<'tcx> RegionInferenceContext<'tcx> { Rc::new(member_constraints.into_mapped(|r| constraint_sccs.scc(r))); let mut result = Self { - var_infos, definitions, liveness_constraints, constraints, constraint_graph, constraint_sccs, + scc_annotations, rev_scc_graph: None, member_constraints, member_constraints_applied: Vec::new(), @@ -523,18 +570,6 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// means that the `R1: !1` constraint here will cause /// `R1` to become `'static`. fn init_free_and_bound_regions(&mut self) { - // Update the names (if any) - // This iterator has unstable order but we collect it all into an IndexVec - for (external_name, variable) in - self.universal_region_relations.universal_regions.named_universal_regions_iter() - { - debug!( - "init_free_and_bound_regions: region {:?} has external name {:?}", - variable, external_name - ); - self.definitions[variable].external_name = Some(external_name); - } - for variable in self.definitions.indices() { let scc = self.constraint_sccs.scc(variable); @@ -793,7 +828,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { // If the member region lives in a higher universe, we currently choose // the most conservative option by leaving it unchanged. - if !self.constraint_sccs().annotation(scc).min_universe().is_root() { + if !self.scc_universe(scc).is_root() { return; } @@ -869,8 +904,8 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// in `scc_a`. Used during constraint propagation, and only once /// the value of `scc_b` has been computed. fn universe_compatible(&self, scc_b: ConstraintSccIndex, scc_a: ConstraintSccIndex) -> bool { - let a_annotation = self.constraint_sccs().annotation(scc_a); - let b_annotation = self.constraint_sccs().annotation(scc_b); + let a_annotation = self.scc_annotations[scc_a]; + let b_annotation = self.scc_annotations[scc_b]; let a_universe = a_annotation.min_universe(); // If scc_b's declared universe is a subset of @@ -986,7 +1021,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { "lower_bound = {:?} r_scc={:?} universe={:?}", lower_bound, r_scc, - self.constraint_sccs.annotation(r_scc).min_universe() + self.scc_universe(r_scc) ); // If the type test requires that `T: 'a` where `'a` is a // placeholder from another universe, that effectively requires @@ -1467,7 +1502,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// The minimum universe of any variable reachable from this /// SCC, inside or outside of it. fn scc_universe(&self, scc: ConstraintSccIndex) -> UniverseIndex { - self.constraint_sccs().annotation(scc).min_universe() + self.scc_annotations[scc].min_universe() } /// Checks the final value for the free region `fr` to see if it @@ -1623,30 +1658,23 @@ impl<'tcx> RegionInferenceContext<'tcx> { let longer_fr_scc = self.constraint_sccs.scc(longer_fr); debug!("check_bound_universal_region: longer_fr_scc={:?}", longer_fr_scc,); - for error_element in self.scc_values.elements_contained_in(longer_fr_scc) { - match error_element { - RegionElement::Location(_) | RegionElement::RootUniversalRegion(_) => {} - // If we have some bound universal region `'a`, then the only - // elements it can contain is itself -- we don't know anything - // else about it! - RegionElement::PlaceholderRegion(placeholder1) => { - if placeholder == placeholder1 { - continue; - } - } - } - + // If we have some bound universal region `'a`, then the only + // elements it can contain is itself -- we don't know anything + // else about it! + if let Some(error_element) = self + .scc_values + .elements_contained_in(longer_fr_scc) + .find(|e| *e != RegionElement::PlaceholderRegion(placeholder)) + { + // Stop after the first error, it gets too noisy otherwise, and does not provide more information. errors_buffer.push(RegionErrorKind::BoundUniversalRegionError { longer_fr, error_element, placeholder, }); - - // Stop after the first error, it gets too noisy otherwise, and does not provide more - // information. - break; + } else { + debug!("check_bound_universal_region: all bounds satisfied"); } - debug!("check_bound_universal_region: all bounds satisfied"); } #[instrument(level = "debug", skip(self, infcx, errors_buffer))] @@ -2066,7 +2094,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { constraint.category }; - match category { + let interest = match category { // Returns usually provide a type to blame and have specially written diagnostics, // so prioritize them. ConstraintCategory::Return(_) => 0, @@ -2118,9 +2146,13 @@ impl<'tcx> RegionInferenceContext<'tcx> { // specific, and are not used for relations that would make sense to blame. ConstraintCategory::BoringNoLocation => 6, // Do not blame internal constraints. - ConstraintCategory::Internal => 7, - ConstraintCategory::IllegalUniverse => 8, - } + ConstraintCategory::IllegalUniverse => 7, + ConstraintCategory::Internal => 8, + }; + + debug!("constraint {constraint:?} category: {category:?}, interest: {interest:?}"); + + interest }; let best_choice = if blame_source { @@ -2214,7 +2246,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// they *must* be equal (though not having the same repr does not /// mean they are unequal). fn scc_representative(&self, scc: ConstraintSccIndex) -> RegionVid { - self.constraint_sccs.annotation(scc).representative + self.scc_annotations[scc].representative } pub(crate) fn liveness_constraints(&self) -> &LivenessValues { diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs index ca8b9fb4e9d82..25cbd579ea1cd 100644 --- a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs +++ b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs @@ -1,22 +1,17 @@ use rustc_data_structures::fx::FxIndexMap; -use rustc_errors::ErrorGuaranteed; -use rustc_hir::OpaqueTyOrigin; -use rustc_hir::def_id::LocalDefId; -use rustc_infer::infer::outlives::env::OutlivesEnvironment; -use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, TyCtxtInferExt as _}; +use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin}; use rustc_macros::extension; use rustc_middle::ty::{ - self, GenericArgKind, GenericArgs, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable, - TypeVisitableExt, TypingMode, fold_regions, + self, DefiningScopeKind, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable, + TypeVisitableExt, fold_regions, }; use rustc_span::Span; -use rustc_trait_selection::regions::OutlivesEnvironmentBuildExt; -use rustc_trait_selection::traits::ObligationCtxt; +use rustc_trait_selection::opaque_types::check_opaque_type_parameter_valid; use tracing::{debug, instrument}; use super::RegionInferenceContext; -use crate::opaque_types::ConcreteOpaqueTypes; -use crate::session_diagnostics::{LifetimeMismatchOpaqueParam, NonGenericOpaqueTypeParam}; +use crate::BorrowCheckRootCtxt; +use crate::session_diagnostics::LifetimeMismatchOpaqueParam; use crate::universal_regions::RegionClassification; impl<'tcx> RegionInferenceContext<'tcx> { @@ -63,12 +58,12 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// /// [rustc-dev-guide chapter]: /// https://rustc-dev-guide.rust-lang.org/opaque-types-region-infer-restrictions.html - #[instrument(level = "debug", skip(self, infcx), ret)] + #[instrument(level = "debug", skip(self, root_cx, infcx), ret)] pub(crate) fn infer_opaque_types( &self, + root_cx: &mut BorrowCheckRootCtxt<'tcx>, infcx: &InferCtxt<'tcx>, opaque_ty_decls: FxIndexMap, OpaqueHiddenType<'tcx>>, - concrete_opaque_types: &mut ConcreteOpaqueTypes<'tcx>, ) { let mut decls_modulo_regions: FxIndexMap, (OpaqueTypeKey<'tcx>, Span)> = FxIndexMap::default(); @@ -145,11 +140,11 @@ impl<'tcx> RegionInferenceContext<'tcx> { } } - concrete_opaque_types.insert( - infcx.tcx, + root_cx.add_concrete_opaque_type( opaque_type_key.def_id, - OpaqueHiddenType { ty, span: concrete_type.span }, + OpaqueHiddenType { span: concrete_type.span, ty }, ); + // Check that all opaque types have the same region parameters if they have the same // non-region parameters. This is necessary because within the new solver we perform // various query operations modulo regions, and thus could unsoundly select some impls @@ -191,7 +186,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { where T: TypeFoldable>, { - fold_regions(tcx, ty, |region, _| match *region { + fold_regions(tcx, ty, |region, _| match region.kind() { ty::ReVar(vid) => { let scc = self.constraint_sccs.scc(vid); @@ -272,14 +267,21 @@ impl<'tcx> InferCtxt<'tcx> { return Ty::new_error(self.tcx, e); } - if let Err(guar) = - check_opaque_type_parameter_valid(self, opaque_type_key, instantiated_ty.span) - { - return Ty::new_error(self.tcx, guar); + if let Err(err) = check_opaque_type_parameter_valid( + self, + opaque_type_key, + instantiated_ty.span, + DefiningScopeKind::MirBorrowck, + ) { + return Ty::new_error(self.tcx, err.report(self)); } let definition_ty = instantiated_ty - .remap_generic_params_to_declaration_params(opaque_type_key, self.tcx, false) + .remap_generic_params_to_declaration_params( + opaque_type_key, + self.tcx, + DefiningScopeKind::MirBorrowck, + ) .ty; if let Err(e) = definition_ty.error_reported() { @@ -289,156 +291,3 @@ impl<'tcx> InferCtxt<'tcx> { definition_ty } } - -/// Opaque type parameter validity check as documented in the [rustc-dev-guide chapter]. -/// -/// [rustc-dev-guide chapter]: -/// https://rustc-dev-guide.rust-lang.org/opaque-types-region-infer-restrictions.html -fn check_opaque_type_parameter_valid<'tcx>( - infcx: &InferCtxt<'tcx>, - opaque_type_key: OpaqueTypeKey<'tcx>, - span: Span, -) -> Result<(), ErrorGuaranteed> { - let tcx = infcx.tcx; - let opaque_generics = tcx.generics_of(opaque_type_key.def_id); - let opaque_env = LazyOpaqueTyEnv::new(tcx, opaque_type_key.def_id); - let mut seen_params: FxIndexMap<_, Vec<_>> = FxIndexMap::default(); - - for (i, arg) in opaque_type_key.iter_captured_args(tcx) { - let arg_is_param = match arg.unpack() { - GenericArgKind::Type(ty) => matches!(ty.kind(), ty::Param(_)), - GenericArgKind::Lifetime(lt) => { - matches!(*lt, ty::ReEarlyParam(_) | ty::ReLateParam(_)) - || (lt.is_static() && opaque_env.param_equal_static(i)) - } - GenericArgKind::Const(ct) => matches!(ct.kind(), ty::ConstKind::Param(_)), - }; - - if arg_is_param { - // Register if the same lifetime appears multiple times in the generic args. - // There is an exception when the opaque type *requires* the lifetimes to be equal. - // See [rustc-dev-guide chapter] § "An exception to uniqueness rule". - let seen_where = seen_params.entry(arg).or_default(); - if !seen_where.first().is_some_and(|&prev_i| opaque_env.params_equal(i, prev_i)) { - seen_where.push(i); - } - } else { - // Prevent `fn foo() -> Foo` from being defining. - let opaque_param = opaque_generics.param_at(i, tcx); - let kind = opaque_param.kind.descr(); - - opaque_env.param_is_error(i)?; - - return Err(infcx.dcx().emit_err(NonGenericOpaqueTypeParam { - ty: arg, - kind, - span, - param_span: tcx.def_span(opaque_param.def_id), - })); - } - } - - for (_, indices) in seen_params { - if indices.len() > 1 { - let descr = opaque_generics.param_at(indices[0], tcx).kind.descr(); - let spans: Vec<_> = indices - .into_iter() - .map(|i| tcx.def_span(opaque_generics.param_at(i, tcx).def_id)) - .collect(); - #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] - return Err(infcx - .dcx() - .struct_span_err(span, "non-defining opaque type use in defining scope") - .with_span_note(spans, format!("{descr} used multiple times")) - .emit()); - } - } - - Ok(()) -} - -/// Computes if an opaque type requires a lifetime parameter to be equal to -/// another one or to the `'static` lifetime. -/// These requirements are derived from the explicit and implied bounds. -struct LazyOpaqueTyEnv<'tcx> { - tcx: TyCtxt<'tcx>, - def_id: LocalDefId, - - /// Equal parameters will have the same name. Computed Lazily. - /// Example: - /// `type Opaque<'a: 'static, 'b: 'c, 'c: 'b> = impl Sized;` - /// Identity args: `['a, 'b, 'c]` - /// Canonical args: `['static, 'b, 'b]` - canonical_args: std::cell::OnceCell>, -} - -impl<'tcx> LazyOpaqueTyEnv<'tcx> { - fn new(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> Self { - Self { tcx, def_id, canonical_args: std::cell::OnceCell::new() } - } - - fn param_equal_static(&self, param_index: usize) -> bool { - self.get_canonical_args()[param_index].expect_region().is_static() - } - - fn params_equal(&self, param1: usize, param2: usize) -> bool { - let canonical_args = self.get_canonical_args(); - canonical_args[param1] == canonical_args[param2] - } - - fn param_is_error(&self, param_index: usize) -> Result<(), ErrorGuaranteed> { - self.get_canonical_args()[param_index].error_reported() - } - - fn get_canonical_args(&self) -> ty::GenericArgsRef<'tcx> { - if let Some(&canonical_args) = self.canonical_args.get() { - return canonical_args; - } - - let &Self { tcx, def_id, .. } = self; - let origin = tcx.local_opaque_ty_origin(def_id); - let parent = match origin { - OpaqueTyOrigin::FnReturn { parent, .. } - | OpaqueTyOrigin::AsyncFn { parent, .. } - | OpaqueTyOrigin::TyAlias { parent, .. } => parent, - }; - let param_env = tcx.param_env(parent); - let args = GenericArgs::identity_for_item(tcx, parent).extend_to( - tcx, - def_id.to_def_id(), - |param, _| { - tcx.map_opaque_lifetime_to_parent_lifetime(param.def_id.expect_local()).into() - }, - ); - - // FIXME(#132279): It feels wrong to use `non_body_analysis` here given that we're - // in a body here. - let infcx = tcx.infer_ctxt().build(TypingMode::non_body_analysis()); - let ocx = ObligationCtxt::new(&infcx); - - let wf_tys = ocx.assumed_wf_types(param_env, parent).unwrap_or_else(|_| { - tcx.dcx().span_delayed_bug(tcx.def_span(def_id), "error getting implied bounds"); - Default::default() - }); - let outlives_env = OutlivesEnvironment::new(&infcx, parent, param_env, wf_tys); - - let mut seen = vec![tcx.lifetimes.re_static]; - let canonical_args = fold_regions(tcx, args, |r1, _| { - if r1.is_error() { - r1 - } else if let Some(&r2) = seen.iter().find(|&&r2| { - let free_regions = outlives_env.free_region_map(); - free_regions.sub_free_regions(tcx, r1, r2) - && free_regions.sub_free_regions(tcx, r2, r1) - }) { - r2 - } else { - seen.push(r1); - r1 - } - }); - self.canonical_args.set(canonical_args).unwrap(); - canonical_args - } -} diff --git a/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs b/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs index b2ed8a3582796..8e04791461b26 100644 --- a/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs +++ b/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs @@ -7,6 +7,8 @@ use rustc_middle::ty::RegionVid; use crate::RegionInferenceContext; use crate::constraints::ConstraintSccIndex; +use crate::region_infer::ConstraintSccs; +use crate::universal_regions::UniversalRegions; pub(crate) struct ReverseSccGraph { graph: VecGraph, @@ -19,6 +21,29 @@ pub(crate) struct ReverseSccGraph { } impl ReverseSccGraph { + pub(super) fn compute( + constraint_sccs: &ConstraintSccs, + universal_regions: &UniversalRegions<'_>, + ) -> Self { + let graph = constraint_sccs.reverse(); + let mut paired_scc_regions = universal_regions + .universal_regions_iter() + .map(|region| (constraint_sccs.scc(region), region)) + .collect::>(); + paired_scc_regions.sort(); + let universal_regions = paired_scc_regions.iter().map(|&(_, region)| region).collect(); + + let mut scc_regions = FxIndexMap::default(); + let mut start = 0; + for chunk in paired_scc_regions.chunk_by(|&(scc1, _), &(scc2, _)| scc1 == scc2) { + let (scc, _) = chunk[0]; + + scc_regions.insert(scc, start..start + chunk.len()); + start += chunk.len(); + } + ReverseSccGraph { graph, scc_regions, universal_regions } + } + /// Find all universal regions that are required to outlive the given SCC. pub(super) fn upper_bounds(&self, scc0: ConstraintSccIndex) -> impl Iterator { let mut duplicates = FxIndexSet::default(); @@ -40,23 +65,7 @@ impl RegionInferenceContext<'_> { return; } - let graph = self.constraint_sccs.reverse(); - let mut paired_scc_regions = self - .universal_regions() - .universal_regions_iter() - .map(|region| (self.constraint_sccs.scc(region), region)) - .collect::>(); - paired_scc_regions.sort(); - let universal_regions = paired_scc_regions.iter().map(|&(_, region)| region).collect(); - - let mut scc_regions = FxIndexMap::default(); - let mut start = 0; - for chunk in paired_scc_regions.chunk_by(|&(scc1, _), &(scc2, _)| scc1 == scc2) { - let (scc, _) = chunk[0]; - scc_regions.insert(scc, start..start + chunk.len()); - start += chunk.len(); - } - - self.rev_scc_graph = Some(ReverseSccGraph { graph, scc_regions, universal_regions }); + self.rev_scc_graph = + Some(ReverseSccGraph::compute(&self.constraint_sccs, self.universal_regions())); } } diff --git a/compiler/rustc_borrowck/src/region_infer/values.rs b/compiler/rustc_borrowck/src/region_infer/values.rs index d9ac5b5cb132a..f1427218cdb02 100644 --- a/compiler/rustc_borrowck/src/region_infer/values.rs +++ b/compiler/rustc_borrowck/src/region_infer/values.rs @@ -21,7 +21,7 @@ rustc_index::newtype_index! { /// An individual element in a region value -- the value of a /// particular region variable consists of a set of these elements. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq)] pub(crate) enum RegionElement { /// A point in the control-flow graph. Location(Location), diff --git a/compiler/rustc_borrowck/src/root_cx.rs b/compiler/rustc_borrowck/src/root_cx.rs new file mode 100644 index 0000000000000..66b526fa02a50 --- /dev/null +++ b/compiler/rustc_borrowck/src/root_cx.rs @@ -0,0 +1,104 @@ +use rustc_abi::FieldIdx; +use rustc_data_structures::fx::FxHashMap; +use rustc_hir::def_id::LocalDefId; +use rustc_middle::bug; +use rustc_middle::ty::{OpaqueHiddenType, Ty, TyCtxt, TypeVisitableExt}; +use rustc_span::ErrorGuaranteed; +use smallvec::SmallVec; + +use crate::{ClosureRegionRequirements, ConcreteOpaqueTypes, PropagatedBorrowCheckResults}; + +/// The shared context used by both the root as well as all its nested +/// items. +pub(super) struct BorrowCheckRootCtxt<'tcx> { + pub tcx: TyCtxt<'tcx>, + root_def_id: LocalDefId, + concrete_opaque_types: ConcreteOpaqueTypes<'tcx>, + nested_bodies: FxHashMap>, + tainted_by_errors: Option, +} + +impl<'tcx> BorrowCheckRootCtxt<'tcx> { + pub(super) fn new(tcx: TyCtxt<'tcx>, root_def_id: LocalDefId) -> BorrowCheckRootCtxt<'tcx> { + BorrowCheckRootCtxt { + tcx, + root_def_id, + concrete_opaque_types: Default::default(), + nested_bodies: Default::default(), + tainted_by_errors: None, + } + } + + /// Collect all defining uses of opaque types inside of this typeck root. This + /// expects the hidden type to be mapped to the definition parameters of the opaque + /// and errors if we end up with distinct hidden types. + pub(super) fn add_concrete_opaque_type( + &mut self, + def_id: LocalDefId, + hidden_ty: OpaqueHiddenType<'tcx>, + ) { + // Sometimes two opaque types are the same only after we remap the generic parameters + // back to the opaque type definition. E.g. we may have `OpaqueType` mapped to + // `(X, Y)` and `OpaqueType` mapped to `(Y, X)`, and those are the same, but we + // only know that once we convert the generic parameters to those of the opaque type. + if let Some(prev) = self.concrete_opaque_types.0.get_mut(&def_id) { + if prev.ty != hidden_ty.ty { + let guar = hidden_ty.ty.error_reported().err().unwrap_or_else(|| { + let (Ok(e) | Err(e)) = + prev.build_mismatch_error(&hidden_ty, self.tcx).map(|d| d.emit()); + e + }); + prev.ty = Ty::new_error(self.tcx, guar); + } + // Pick a better span if there is one. + // FIXME(oli-obk): collect multiple spans for better diagnostics down the road. + prev.span = prev.span.substitute_dummy(hidden_ty.span); + } else { + self.concrete_opaque_types.0.insert(def_id, hidden_ty); + } + } + + pub(super) fn set_tainted_by_errors(&mut self, guar: ErrorGuaranteed) { + self.tainted_by_errors = Some(guar); + } + + pub(super) fn get_or_insert_nested( + &mut self, + def_id: LocalDefId, + ) -> &PropagatedBorrowCheckResults<'tcx> { + debug_assert_eq!( + self.tcx.typeck_root_def_id(def_id.to_def_id()), + self.root_def_id.to_def_id() + ); + if !self.nested_bodies.contains_key(&def_id) { + let result = super::do_mir_borrowck(self, def_id, None).0; + if let Some(prev) = self.nested_bodies.insert(def_id, result) { + bug!("unexpected previous nested body: {prev:?}"); + } + } + + self.nested_bodies.get(&def_id).unwrap() + } + + pub(super) fn closure_requirements( + &mut self, + nested_body_def_id: LocalDefId, + ) -> &Option> { + &self.get_or_insert_nested(nested_body_def_id).closure_requirements + } + + pub(super) fn used_mut_upvars( + &mut self, + nested_body_def_id: LocalDefId, + ) -> &SmallVec<[FieldIdx; 8]> { + &self.get_or_insert_nested(nested_body_def_id).used_mut_upvars + } + + pub(super) fn finalize(self) -> Result<&'tcx ConcreteOpaqueTypes<'tcx>, ErrorGuaranteed> { + if let Some(guar) = self.tainted_by_errors { + Err(guar) + } else { + Ok(self.tcx.arena.alloc(self.concrete_opaque_types)) + } + } +} diff --git a/compiler/rustc_borrowck/src/session_diagnostics.rs b/compiler/rustc_borrowck/src/session_diagnostics.rs index 4be5d0dbf4284..5143b2fa20598 100644 --- a/compiler/rustc_borrowck/src/session_diagnostics.rs +++ b/compiler/rustc_borrowck/src/session_diagnostics.rs @@ -294,17 +294,6 @@ pub(crate) struct MoveBorrow<'a> { pub borrow_span: Span, } -#[derive(Diagnostic)] -#[diag(borrowck_opaque_type_non_generic_param, code = E0792)] -pub(crate) struct NonGenericOpaqueTypeParam<'a, 'tcx> { - pub ty: GenericArg<'tcx>, - pub kind: &'a str, - #[primary_span] - pub span: Span, - #[label] - pub param_span: Span, -} - #[derive(Diagnostic)] #[diag(borrowck_opaque_type_lifetime_mismatch)] pub(crate) struct LifetimeMismatchOpaqueParam<'tcx> { diff --git a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs index 6fbe1db6330e2..57516565147eb 100644 --- a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs +++ b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs @@ -6,7 +6,6 @@ use rustc_infer::infer::region_constraints::{GenericKind, VerifyBound}; use rustc_infer::infer::{self, InferCtxt, SubregionOrigin}; use rustc_infer::traits::query::type_op::DeeplyNormalize; use rustc_middle::bug; -use rustc_middle::mir::{ClosureOutlivesSubject, ClosureRegionRequirements, ConstraintCategory}; use rustc_middle::ty::{ self, GenericArgKind, Ty, TyCtxt, TypeFoldable, TypeVisitableExt, fold_regions, }; @@ -18,10 +17,10 @@ use crate::constraints::OutlivesConstraint; use crate::region_infer::TypeTest; use crate::type_check::{Locations, MirTypeckRegionConstraints}; use crate::universal_regions::UniversalRegions; +use crate::{ClosureOutlivesSubject, ClosureRegionRequirements, ConstraintCategory}; pub(crate) struct ConstraintConversion<'a, 'tcx> { infcx: &'a InferCtxt<'tcx>, - tcx: TyCtxt<'tcx>, universal_regions: &'a UniversalRegions<'tcx>, /// Each RBP `GK: 'a` is assumed to be true. These encode /// relationships like `T: 'a` that are added via implicit bounds @@ -34,7 +33,6 @@ pub(crate) struct ConstraintConversion<'a, 'tcx> { /// logic expecting to see (e.g.) `ReStatic`, and if we supplied /// our special inference variable there, we would mess that up. region_bound_pairs: &'a RegionBoundPairs<'tcx>, - implicit_region_bound: ty::Region<'tcx>, param_env: ty::ParamEnv<'tcx>, known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>], locations: Locations, @@ -49,7 +47,6 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { infcx: &'a InferCtxt<'tcx>, universal_regions: &'a UniversalRegions<'tcx>, region_bound_pairs: &'a RegionBoundPairs<'tcx>, - implicit_region_bound: ty::Region<'tcx>, param_env: ty::ParamEnv<'tcx>, known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>], locations: Locations, @@ -59,10 +56,8 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { ) -> Self { Self { infcx, - tcx: infcx.tcx, universal_regions, region_bound_pairs, - implicit_region_bound, param_env, known_type_outlives_obligations, locations, @@ -96,7 +91,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { // into a vector. These are the regions that we will be // relating to one another. let closure_mapping = &UniversalRegions::closure_mapping( - self.tcx, + self.infcx.tcx, closure_args, closure_requirements.num_external_vids, closure_def_id, @@ -111,7 +106,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { let subject = match outlives_requirement.subject { ClosureOutlivesSubject::Region(re) => closure_mapping[re].into(), ClosureOutlivesSubject::Ty(subject_ty) => { - subject_ty.instantiate(self.tcx, |vid| closure_mapping[vid]).into() + subject_ty.instantiate(self.infcx.tcx, |vid| closure_mapping[vid]).into() } }; @@ -127,14 +122,14 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { predicate: ty::OutlivesPredicate<'tcx, ty::GenericArg<'tcx>>, constraint_category: ConstraintCategory<'tcx>, ) { + let tcx = self.infcx.tcx; debug!("generate: constraints at: {:#?}", self.locations); // Extract out various useful fields we'll need below. let ConstraintConversion { - tcx, infcx, + universal_regions, region_bound_pairs, - implicit_region_bound, known_type_outlives_obligations, .. } = *self; @@ -145,7 +140,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { break; } - if !self.tcx.recursion_limit().value_within_limit(iteration) { + if !tcx.recursion_limit().value_within_limit(iteration) { bug!( "FIXME(-Znext-solver): Overflowed when processing region obligations: {outlives_predicates:#?}" ); @@ -170,10 +165,11 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { ); } + let implicit_region_bound = + ty::Region::new_var(tcx, universal_regions.implicit_region_bound()); // we don't actually use this for anything, but // the `TypeOutlives` code needs an origin. let origin = infer::RelateParamBound(self.span, t1, None); - TypeOutlives::new( &mut *self, tcx, @@ -205,7 +201,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { /// are dealt with during trait solving. fn replace_placeholders_with_nll>>(&mut self, value: T) -> T { if value.has_placeholders() { - fold_regions(self.tcx, value, |r, _| match *r { + fold_regions(self.infcx.tcx, value, |r, _| match r.kind() { ty::RePlaceholder(placeholder) => { self.constraints.placeholder_region(self.infcx, placeholder) } @@ -227,7 +223,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { } fn to_region_vid(&mut self, r: ty::Region<'tcx>) -> ty::RegionVid { - if let ty::RePlaceholder(placeholder) = *r { + if let ty::RePlaceholder(placeholder) = r.kind() { self.constraints.placeholder_region(self.infcx, placeholder).as_var() } else { self.universal_regions.to_region_vid(r) diff --git a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs index eaac633b512d6..92732aba29ba3 100644 --- a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs +++ b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs @@ -41,22 +41,20 @@ type NormalizedInputsAndOutput<'tcx> = Vec>; pub(crate) struct CreateResult<'tcx> { pub(crate) universal_region_relations: Frozen>, - pub(crate) region_bound_pairs: RegionBoundPairs<'tcx>, - pub(crate) known_type_outlives_obligations: Vec>, + pub(crate) region_bound_pairs: Frozen>, + pub(crate) known_type_outlives_obligations: Frozen>>, pub(crate) normalized_inputs_and_output: NormalizedInputsAndOutput<'tcx>, } pub(crate) fn create<'tcx>( infcx: &InferCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, - implicit_region_bound: ty::Region<'tcx>, universal_regions: UniversalRegions<'tcx>, constraints: &mut MirTypeckRegionConstraints<'tcx>, ) -> CreateResult<'tcx> { UniversalRegionRelationsBuilder { infcx, param_env, - implicit_region_bound, constraints, universal_regions, region_bound_pairs: Default::default(), @@ -181,7 +179,6 @@ struct UniversalRegionRelationsBuilder<'a, 'tcx> { infcx: &'a InferCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, universal_regions: UniversalRegions<'tcx>, - implicit_region_bound: ty::Region<'tcx>, constraints: &'a mut MirTypeckRegionConstraints<'tcx>, // outputs: @@ -320,7 +317,6 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { self.infcx, &self.universal_regions, &self.region_bound_pairs, - self.implicit_region_bound, param_env, &known_type_outlives_obligations, Locations::All(span), @@ -337,8 +333,8 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { outlives: self.outlives.freeze(), inverse_outlives: self.inverse_outlives.freeze(), }), - known_type_outlives_obligations, - region_bound_pairs: self.region_bound_pairs, + known_type_outlives_obligations: Frozen::freeze(known_type_outlives_obligations), + region_bound_pairs: Frozen::freeze(self.region_bound_pairs), normalized_inputs_and_output, } } diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index f6144a25938ce..8c51225712093 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -45,7 +45,6 @@ use crate::borrow_set::BorrowSet; use crate::constraints::{OutlivesConstraint, OutlivesConstraintSet}; use crate::diagnostics::UniverseInfo; use crate::member_constraints::MemberConstraintSet; -use crate::opaque_types::ConcreteOpaqueTypes; use crate::polonius::legacy::{PoloniusFacts, PoloniusLocationTable}; use crate::polonius::{PoloniusContext, PoloniusLivenessContext}; use crate::region_infer::TypeTest; @@ -53,7 +52,7 @@ use crate::region_infer::values::{LivenessValues, PlaceholderIndex, PlaceholderI use crate::session_diagnostics::{MoveUnsized, SimdIntrinsicArgConst}; use crate::type_check::free_region_relations::{CreateResult, UniversalRegionRelations}; use crate::universal_regions::{DefiningTy, UniversalRegions}; -use crate::{BorrowckInferCtxt, path_utils}; +use crate::{BorrowCheckRootCtxt, BorrowckInferCtxt, path_utils}; macro_rules! span_mirbug { ($context:expr, $elem:expr, $($message:tt)*) => ({ @@ -102,6 +101,7 @@ mod relate_tys; /// - `move_data` -- move-data constructed when performing the maybe-init dataflow analysis /// - `location_map` -- map between MIR `Location` and `PointIndex` pub(crate) fn type_check<'a, 'tcx>( + root_cx: &mut BorrowCheckRootCtxt<'tcx>, infcx: &BorrowckInferCtxt<'tcx>, body: &Body<'tcx>, promoted: &IndexSlice>, @@ -112,9 +112,7 @@ pub(crate) fn type_check<'a, 'tcx>( flow_inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>, move_data: &MoveData<'tcx>, location_map: Rc, - concrete_opaque_types: &mut ConcreteOpaqueTypes<'tcx>, ) -> MirTypeckResults<'tcx> { - let implicit_region_bound = ty::Region::new_var(infcx.tcx, universal_regions.fr_fn_body); let mut constraints = MirTypeckRegionConstraints { placeholder_indices: PlaceholderIndices::default(), placeholder_index_to_region: IndexVec::default(), @@ -130,13 +128,7 @@ pub(crate) fn type_check<'a, 'tcx>( region_bound_pairs, normalized_inputs_and_output, known_type_outlives_obligations, - } = free_region_relations::create( - infcx, - infcx.param_env, - implicit_region_bound, - universal_regions, - &mut constraints, - ); + } = free_region_relations::create(infcx, infcx.param_env, universal_regions, &mut constraints); let pre_obligations = infcx.take_registered_region_obligations(); assert!( @@ -153,21 +145,20 @@ pub(crate) fn type_check<'a, 'tcx>( }; let mut typeck = TypeChecker { + root_cx, infcx, last_span: body.span, body, promoted, user_type_annotations: &body.user_type_annotations, - region_bound_pairs, - known_type_outlives_obligations, - implicit_region_bound, + region_bound_pairs: ®ion_bound_pairs, + known_type_outlives_obligations: &known_type_outlives_obligations, reported_errors: Default::default(), universal_regions: &universal_region_relations.universal_regions, location_table, polonius_facts, borrow_set, constraints: &mut constraints, - concrete_opaque_types, polonius_liveness, }; @@ -215,6 +206,7 @@ enum FieldAccessError { /// way, it accrues region constraints -- these can later be used by /// NLL region checking. struct TypeChecker<'a, 'tcx> { + root_cx: &'a mut BorrowCheckRootCtxt<'tcx>, infcx: &'a BorrowckInferCtxt<'tcx>, last_span: Span, body: &'a Body<'tcx>, @@ -224,16 +216,14 @@ struct TypeChecker<'a, 'tcx> { /// User type annotations are shared between the main MIR and the MIR of /// all of the promoted items. user_type_annotations: &'a CanonicalUserTypeAnnotations<'tcx>, - region_bound_pairs: RegionBoundPairs<'tcx>, - known_type_outlives_obligations: Vec>, - implicit_region_bound: ty::Region<'tcx>, + region_bound_pairs: &'a RegionBoundPairs<'tcx>, + known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>], reported_errors: FxIndexSet<(Ty<'tcx>, Span)>, universal_regions: &'a UniversalRegions<'tcx>, location_table: &'a PoloniusLocationTable, polonius_facts: &'a mut Option, borrow_set: &'a BorrowSet<'tcx>, constraints: &'a mut MirTypeckRegionConstraints<'tcx>, - concrete_opaque_types: &'a mut ConcreteOpaqueTypes<'tcx>, /// When using `-Zpolonius=next`, the liveness helper data used to create polonius constraints. polonius_liveness: Option, } @@ -422,10 +412,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { constraint_conversion::ConstraintConversion::new( self.infcx, self.universal_regions, - &self.region_bound_pairs, - self.implicit_region_bound, + self.region_bound_pairs, self.infcx.param_env, - &self.known_type_outlives_obligations, + self.known_type_outlives_obligations, locations, locations.span(self.body), category, @@ -1568,11 +1557,15 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { } } CastKind::Transmute => { - span_mirbug!( - self, - rvalue, - "Unexpected CastKind::Transmute, which is not permitted in Analysis MIR", - ); + let ty_from = op.ty(self.body, tcx); + match ty_from.kind() { + ty::Pat(base, _) if base == ty => {} + _ => span_mirbug!( + self, + rvalue, + "Unexpected CastKind::Transmute {ty_from:?} -> {ty:?}, which is not permitted in Analysis MIR", + ), + } } } } @@ -2086,8 +2079,14 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } } TerminatorKind::Unreachable => {} - TerminatorKind::Drop { target, unwind, .. } - | TerminatorKind::Assert { target, unwind, .. } => { + TerminatorKind::Drop { target, unwind, drop, .. } => { + self.assert_iscleanup(block_data, target, is_cleanup); + self.assert_iscleanup_unwind(block_data, unwind, is_cleanup); + if let Some(drop) = drop { + self.assert_iscleanup(block_data, drop, is_cleanup); + } + } + TerminatorKind::Assert { target, unwind, .. } => { self.assert_iscleanup(block_data, target, is_cleanup); self.assert_iscleanup_unwind(block_data, unwind, is_cleanup); } @@ -2503,18 +2502,13 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { args: GenericArgsRef<'tcx>, locations: Locations, ) -> ty::InstantiatedPredicates<'tcx> { - let closure_borrowck_results = tcx.mir_borrowck(def_id); - self.concrete_opaque_types - .extend_from_nested_body(tcx, &closure_borrowck_results.concrete_opaque_types); - - if let Some(closure_requirements) = &closure_borrowck_results.closure_requirements { + if let Some(closure_requirements) = &self.root_cx.closure_requirements(def_id) { constraint_conversion::ConstraintConversion::new( self.infcx, self.universal_regions, - &self.region_bound_pairs, - self.implicit_region_bound, + self.region_bound_pairs, self.infcx.param_env, - &self.known_type_outlives_obligations, + self.known_type_outlives_obligations, locations, self.body.span, // irrelevant; will be overridden. ConstraintCategory::Boring, // same as above. diff --git a/compiler/rustc_borrowck/src/type_check/opaque_types.rs b/compiler/rustc_borrowck/src/type_check/opaque_types.rs index 8bab979a72464..341c50c37f6db 100644 --- a/compiler/rustc_borrowck/src/type_check/opaque_types.rs +++ b/compiler/rustc_borrowck/src/type_check/opaque_types.rs @@ -266,12 +266,8 @@ impl<'tcx, OP> TypeVisitor> for ConstrainOpaqueTypeRegionVisitor<'t where OP: FnMut(ty::Region<'tcx>), { - fn visit_binder>>(&mut self, t: &ty::Binder<'tcx, T>) { - t.super_visit_with(self); - } - fn visit_region(&mut self, r: ty::Region<'tcx>) { - match *r { + match r.kind() { // ignore bound regions, keep visiting ty::ReBound(_, _) => {} _ => (self.op)(r), diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs index 8f6b405fcef2b..c11e14d214c42 100644 --- a/compiler/rustc_borrowck/src/universal_regions.rs +++ b/compiler/rustc_borrowck/src/universal_regions.rs @@ -438,6 +438,10 @@ impl<'tcx> UniversalRegions<'tcx> { } } + pub(crate) fn implicit_region_bound(&self) -> RegionVid { + self.fr_fn_body + } + pub(crate) fn tainted_by_errors(&self) -> Option { self.indices.tainted_by_errors.get() } @@ -909,19 +913,19 @@ impl<'tcx> UniversalRegionIndices<'tcx> { /// if it is a placeholder. Handling placeholders requires access to the /// `MirTypeckRegionConstraints`. fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid { - if let ty::ReVar(..) = *r { - r.as_var() - } else if let ty::ReError(guar) = *r { - self.tainted_by_errors.set(Some(guar)); - // We use the `'static` `RegionVid` because `ReError` doesn't actually exist in the - // `UniversalRegionIndices`. This is fine because 1) it is a fallback only used if - // errors are being emitted and 2) it leaves the happy path unaffected. - self.fr_static - } else { - *self + match r.kind() { + ty::ReVar(..) => r.as_var(), + ty::ReError(guar) => { + self.tainted_by_errors.set(Some(guar)); + // We use the `'static` `RegionVid` because `ReError` doesn't actually exist in the + // `UniversalRegionIndices`. This is fine because 1) it is a fallback only used if + // errors are being emitted and 2) it leaves the happy path unaffected. + self.fr_static + } + _ => *self .indices .get(&r) - .unwrap_or_else(|| bug!("cannot convert `{:?}` to a region vid", r)) + .unwrap_or_else(|| bug!("cannot convert `{:?}` to a region vid", r)), } } diff --git a/compiler/rustc_builtin_macros/messages.ftl b/compiler/rustc_builtin_macros/messages.ftl index 3f03834f8d781..19190f1e267e0 100644 --- a/compiler/rustc_builtin_macros/messages.ftl +++ b/compiler/rustc_builtin_macros/messages.ftl @@ -79,6 +79,7 @@ builtin_macros_autodiff_ret_activity = invalid return activity {$act} in {$mode} builtin_macros_autodiff_ty_activity = {$act} can not be used for this type builtin_macros_autodiff_unknown_activity = did not recognize Activity: `{$act}` +builtin_macros_autodiff_width = autodiff width must fit u32, but is {$width} builtin_macros_bad_derive_target = `derive` may only be applied to `struct`s, `enum`s and `union`s .label = not applicable here .label2 = not a `struct`, `enum` or `union` @@ -149,6 +150,13 @@ builtin_macros_derive_path_args_list = traits in `#[derive(...)]` don't accept a builtin_macros_derive_path_args_value = traits in `#[derive(...)]` don't accept values .suggestion = remove the value +builtin_macros_eii_macro_expected_function = `#[{$name}]` is only valid on functions +builtin_macros_eii_macro_for_expected_list = `#[eii_macro_for(...)]` expects a list of one or two elements +builtin_macros_eii_macro_for_expected_macro = `#[eii_macro_for(...)]` is only valid on macros +builtin_macros_eii_macro_for_expected_max_one_argument = `#[{$name}]` expected no arguments or a single argument: `#[{$name}(default)]` +builtin_macros_eii_macro_for_expected_unsafe = expected this argument to be "unsafe" + .note = the second argument is optional + builtin_macros_env_not_defined = environment variable `{$var}` not defined at compile time .cargo = Cargo sets build script variables at run time. Use `std::env::var({$var_expr})` instead .custom = use `std::env::var({$var_expr})` to read the variable at run time @@ -230,8 +238,6 @@ builtin_macros_format_unused_args = multiple unused formatting arguments builtin_macros_format_use_positional = consider using a positional formatting argument instead -builtin_macros_invalid_crate_attribute = invalid crate attribute - builtin_macros_multiple_default_attrs = multiple `#[default]` attributes .note = only one `#[default]` attribute is needed .label = `#[default]` used here @@ -248,9 +254,9 @@ builtin_macros_multiple_defaults = multiple declared defaults .suggestion = make `{$ident}` default builtin_macros_naked_functions_testing_attribute = - cannot use `#[naked]` with testing attributes + cannot use `#[unsafe(naked)]` with testing attributes .label = function marked with testing attribute here - .naked_attribute = `#[naked]` is incompatible with testing attributes + .naked_attribute = `#[unsafe(naked)]` is incompatible with testing attributes builtin_macros_no_default_variant = `#[derive(Default)]` on enum with no `#[default]` .label = this enum needs a unit variant marked with `#[default]` diff --git a/compiler/rustc_builtin_macros/src/alloc_error_handler.rs b/compiler/rustc_builtin_macros/src/alloc_error_handler.rs index 1c1b2c88f76ee..e637ff1451c41 100644 --- a/compiler/rustc_builtin_macros/src/alloc_error_handler.rs +++ b/compiler/rustc_builtin_macros/src/alloc_error_handler.rs @@ -21,15 +21,15 @@ pub(crate) fn expand( // Allow using `#[alloc_error_handler]` on an item statement // FIXME - if we get deref patterns, use them to reduce duplication here - let (item, is_stmt, sig_span) = if let Annotatable::Item(item) = &item + let (item, ident, is_stmt, sig_span) = if let Annotatable::Item(item) = &item && let ItemKind::Fn(fn_kind) = &item.kind { - (item, false, ecx.with_def_site_ctxt(fn_kind.sig.span)) + (item, fn_kind.ident, false, ecx.with_def_site_ctxt(fn_kind.sig.span)) } else if let Annotatable::Stmt(stmt) = &item && let StmtKind::Item(item) = &stmt.kind && let ItemKind::Fn(fn_kind) = &item.kind { - (item, true, ecx.with_def_site_ctxt(fn_kind.sig.span)) + (item, fn_kind.ident, true, ecx.with_def_site_ctxt(fn_kind.sig.span)) } else { ecx.dcx().emit_err(errors::AllocErrorMustBeFn { span: item.span() }); return vec![orig_item]; @@ -39,7 +39,7 @@ pub(crate) fn expand( let span = ecx.with_def_site_ctxt(item.span); // Generate item statements for the allocator methods. - let stmts = thin_vec![generate_handler(ecx, item.ident, span, sig_span)]; + let stmts = thin_vec![generate_handler(ecx, ident, span, sig_span)]; // Generate anonymous constant serving as container for the allocator methods. let const_ty = ecx.ty(sig_span, TyKind::Tup(ThinVec::new())); @@ -85,14 +85,16 @@ fn generate_handler(cx: &ExtCtxt<'_>, handler: Ident, span: Span, sig_span: Span let kind = ItemKind::Fn(Box::new(Fn { defaultness: ast::Defaultness::Final, sig, + ident: Ident::from_str_and_span("__rg_oom", span), generics: Generics::default(), contract: None, body, define_opaque: None, + eii_impl: ThinVec::new(), })); let attrs = thin_vec![cx.attr_word(sym::rustc_std_internal_symbol, span)]; - let item = cx.item(span, Ident::from_str_and_span("__rg_oom", span), attrs, kind); + let item = cx.item(span, attrs, kind); cx.stmt_item(sig_span, item) } diff --git a/compiler/rustc_builtin_macros/src/asm.rs b/compiler/rustc_builtin_macros/src/asm.rs index eb5b345e49ecd..3e8ddb8abd43f 100644 --- a/compiler/rustc_builtin_macros/src/asm.rs +++ b/compiler/rustc_builtin_macros/src/asm.rs @@ -10,7 +10,7 @@ use rustc_index::bit_set::GrowableBitSet; use rustc_parse::exp; use rustc_parse::parser::{ExpKeywordPair, Parser}; use rustc_session::lint; -use rustc_span::{ErrorGuaranteed, Ident, InnerSpan, Span, Symbol, kw}; +use rustc_span::{ErrorGuaranteed, InnerSpan, Span, Symbol, kw}; use rustc_target::asm::InlineAsmArch; use smallvec::smallvec; use {rustc_ast as ast, rustc_parse_format as parse}; @@ -888,7 +888,6 @@ pub(super) fn expand_global_asm<'cx>( }; match mac { Ok(inline_asm) => MacEager::items(smallvec![P(ast::Item { - ident: Ident::empty(), attrs: ast::AttrVec::new(), id: ast::DUMMY_NODE_ID, kind: ast::ItemKind::GlobalAsm(Box::new(inline_asm)), diff --git a/compiler/rustc_builtin_macros/src/assert/context.rs b/compiler/rustc_builtin_macros/src/assert/context.rs index a949ab94f3ad7..ea7248ca5393a 100644 --- a/compiler/rustc_builtin_macros/src/assert/context.rs +++ b/compiler/rustc_builtin_macros/src/assert/context.rs @@ -112,7 +112,6 @@ impl<'cx, 'a> Context<'cx, 'a> { self.span, self.cx.item( self.span, - Ident::empty(), thin_vec![self.cx.attr_nested_word(sym::allow, sym::unused_imports, self.span)], ItemKind::Use(UseTree { prefix: self.cx.path(self.span, self.cx.std_path(&[sym::asserting])), diff --git a/compiler/rustc_builtin_macros/src/autodiff.rs b/compiler/rustc_builtin_macros/src/autodiff.rs index be11711757e4b..1712775d0a191 100644 --- a/compiler/rustc_builtin_macros/src/autodiff.rs +++ b/compiler/rustc_builtin_macros/src/autodiff.rs @@ -12,12 +12,12 @@ mod llvm_enzyme { valid_ty_for_activity, }; use rustc_ast::ptr::P; - use rustc_ast::token::{Token, TokenKind}; + use rustc_ast::token::{Lit, LitKind, Token, TokenKind}; use rustc_ast::tokenstream::*; use rustc_ast::visit::AssocCtxt::*; use rustc_ast::{ - self as ast, AssocItemKind, BindingMode, FnRetTy, FnSig, Generics, ItemKind, MetaItemInner, - PatKind, TyKind, + self as ast, AssocItemKind, BindingMode, ExprKind, FnRetTy, FnSig, Generics, ItemKind, + MetaItemInner, PatKind, QSelf, TyKind, Visibility, }; use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_span::{Ident, Span, Symbol, kw, sym}; @@ -45,6 +45,16 @@ mod llvm_enzyme { } } fn first_ident(x: &MetaItemInner) -> rustc_span::Ident { + if let Some(l) = x.lit() { + match l.kind { + ast::LitKind::Int(val, _) => { + // get an Ident from a lit + return rustc_span::Ident::from_str(val.get().to_string().as_str()); + } + _ => {} + } + } + let segments = &x.meta_item().unwrap().path.segments; assert!(segments.len() == 1); segments[0].ident @@ -54,6 +64,24 @@ mod llvm_enzyme { first_ident(x).name.to_string() } + fn width(x: &MetaItemInner) -> Option { + let lit = x.lit()?; + match lit.kind { + ast::LitKind::Int(x, _) => Some(x.get()), + _ => return None, + } + } + + // Get information about the function the macro is applied to + fn extract_item_info(iitem: &P) -> Option<(Visibility, FnSig, Ident)> { + match &iitem.kind { + ItemKind::Fn(box ast::Fn { sig, ident, .. }) => { + Some((iitem.vis.clone(), sig.clone(), ident.clone())) + } + _ => None, + } + } + pub(crate) fn from_ast( ecx: &mut ExtCtxt<'_>, meta_item: &ThinVec, @@ -65,9 +93,32 @@ mod llvm_enzyme { dcx.emit_err(errors::AutoDiffInvalidMode { span: meta_item[1].span(), mode }); return AutoDiffAttrs::error(); }; + + // Now we check, whether the user wants autodiff in batch/vector mode, or scalar mode. + // If he doesn't specify an integer (=width), we default to scalar mode, thus width=1. + let mut first_activity = 2; + + let width = if let [_, _, x, ..] = &meta_item[..] + && let Some(x) = width(x) + { + first_activity = 3; + match x.try_into() { + Ok(x) => x, + Err(_) => { + dcx.emit_err(errors::AutoDiffInvalidWidth { + span: meta_item[2].span(), + width: x, + }); + return AutoDiffAttrs::error(); + } + } + } else { + 1 + }; + let mut activities: Vec = vec![]; let mut errors = false; - for x in &meta_item[2..] { + for x in &meta_item[first_activity..] { let activity_str = name(&x); let res = DiffActivity::from_str(&activity_str); match res { @@ -98,7 +149,20 @@ mod llvm_enzyme { (&DiffActivity::None, activities.as_slice()) }; - AutoDiffAttrs { mode, ret_activity: *ret_activity, input_activity: input_activity.to_vec() } + AutoDiffAttrs { + mode, + width, + ret_activity: *ret_activity, + input_activity: input_activity.to_vec(), + } + } + + fn meta_item_inner_to_ts(t: &MetaItemInner, ts: &mut Vec) { + let comma: Token = Token::new(TokenKind::Comma, Span::default()); + let val = first_ident(t); + let t = Token::from_ast_ident(val); + ts.push(TokenTree::Token(t, Spacing::Joint)); + ts.push(TokenTree::Token(comma.clone(), Spacing::Alone)); } /// We expand the autodiff macro to generate a new placeholder function which passes @@ -145,38 +209,30 @@ mod llvm_enzyme { return vec![item]; } let dcx = ecx.sess.dcx(); - // first get the annotable item: - let (sig, is_impl): (FnSig, bool) = match &item { - Annotatable::Item(iitem) => { - let sig = match &iitem.kind { - ItemKind::Fn(box ast::Fn { sig, .. }) => sig, - _ => { - dcx.emit_err(errors::AutoDiffInvalidApplication { span: item.span() }); - return vec![item]; - } - }; - (sig.clone(), false) - } - Annotatable::AssocItem(assoc_item, Impl { of_trait: false }) => { - let sig = match &assoc_item.kind { - ast::AssocItemKind::Fn(box ast::Fn { sig, .. }) => sig, - _ => { - dcx.emit_err(errors::AutoDiffInvalidApplication { span: item.span() }); - return vec![item]; - } - }; - (sig.clone(), true) - } - _ => { - dcx.emit_err(errors::AutoDiffInvalidApplication { span: item.span() }); - return vec![item]; - } + + // first get information about the annotable item: + let Some((vis, sig, primal)) = (match &item { + Annotatable::Item(iitem) => extract_item_info(iitem), + Annotatable::Stmt(stmt) => match &stmt.kind { + ast::StmtKind::Item(iitem) => extract_item_info(iitem), + _ => None, + }, + Annotatable::AssocItem(assoc_item, Impl { .. }) => match &assoc_item.kind { + ast::AssocItemKind::Fn(box ast::Fn { sig, ident, .. }) => { + Some((assoc_item.vis.clone(), sig.clone(), ident.clone())) + } + _ => None, + }, + _ => None, + }) else { + dcx.emit_err(errors::AutoDiffInvalidApplication { span: item.span() }); + return vec![item]; }; let meta_item_vec: ThinVec = match meta_item.kind { ast::MetaItemKind::List(ref vec) => vec.clone(), _ => { - dcx.emit_err(errors::AutoDiffInvalidApplication { span: item.span() }); + dcx.emit_err(errors::AutoDiffMissingConfig { span: item.span() }); return vec![item]; } }; @@ -184,40 +240,51 @@ mod llvm_enzyme { let has_ret = has_ret(&sig.decl.output); let sig_span = ecx.with_call_site_ctxt(sig.span); - let (vis, primal) = match &item { - Annotatable::Item(iitem) => (iitem.vis.clone(), iitem.ident.clone()), - Annotatable::AssocItem(assoc_item, _) => { - (assoc_item.vis.clone(), assoc_item.ident.clone()) - } - _ => { - dcx.emit_err(errors::AutoDiffInvalidApplication { span: item.span() }); - return vec![item]; - } - }; - // create TokenStream from vec elemtents: // meta_item doesn't have a .tokens field - let comma: Token = Token::new(TokenKind::Comma, Span::default()); let mut ts: Vec = vec![]; if meta_item_vec.len() < 2 { // At the bare minimum, we need a fnc name and a mode, even for a dummy function with no // input and output args. dcx.emit_err(errors::AutoDiffMissingConfig { span: item.span() }); return vec![item]; + } + + meta_item_inner_to_ts(&meta_item_vec[1], &mut ts); + + // Now, if the user gave a width (vector aka batch-mode ad), then we copy it. + // If it is not given, we default to 1 (scalar mode). + let start_position; + let kind: LitKind = LitKind::Integer; + let symbol; + if meta_item_vec.len() >= 3 + && let Some(width) = width(&meta_item_vec[2]) + { + start_position = 3; + symbol = Symbol::intern(&width.to_string()); } else { - for t in meta_item_vec.clone()[1..].iter() { - let val = first_ident(t); - let t = Token::from_ast_ident(val); - ts.push(TokenTree::Token(t, Spacing::Joint)); - ts.push(TokenTree::Token(comma.clone(), Spacing::Alone)); - } + start_position = 2; + symbol = sym::integer(1); } + let l: Lit = Lit { kind, symbol, suffix: None }; + let t = Token::new(TokenKind::Literal(l), Span::default()); + let comma = Token::new(TokenKind::Comma, Span::default()); + ts.push(TokenTree::Token(t, Spacing::Joint)); + ts.push(TokenTree::Token(comma.clone(), Spacing::Alone)); + + for t in meta_item_vec.clone()[start_position..].iter() { + meta_item_inner_to_ts(t, &mut ts); + } + if !has_ret { // We don't want users to provide a return activity if the function doesn't return anything. // For simplicity, we just add a dummy token to the end of the list. let t = Token::new(TokenKind::Ident(sym::None, false.into()), Span::default()); ts.push(TokenTree::Token(t, Spacing::Joint)); + ts.push(TokenTree::Token(comma, Spacing::Alone)); } + // We remove the last, trailing comma. + ts.pop(); let ts: TokenStream = TokenStream::from_iter(ts); let x: AutoDiffAttrs = from_ast(ecx, &meta_item_vec, has_ret); @@ -237,16 +304,17 @@ mod llvm_enzyme { let d_body = gen_enzyme_body( ecx, &x, n_active, &sig, &d_sig, primal, &new_args, span, sig_span, idents, errored, ); - let d_ident = first_ident(&meta_item_vec[0]); // The first element of it is the name of the function to be generated let asdf = Box::new(ast::Fn { defaultness: ast::Defaultness::Final, sig: d_sig, + ident: first_ident(&meta_item_vec[0]), generics: Generics::default(), contract: None, body: Some(d_body), define_opaque: None, + eii_impl: ThinVec::new(), }); let mut rustc_ad_attr = P(ast::NormalAttr::from_ident(Ident::with_dummy_span(sym::rustc_autodiff))); @@ -256,9 +324,9 @@ mod llvm_enzyme { Spacing::Joint, )]; let never_arg = ast::DelimArgs { - dspan: ast::tokenstream::DelimSpan::from_single(span), + dspan: DelimSpan::from_single(span), delim: ast::token::Delimiter::Parenthesis, - tokens: ast::tokenstream::TokenStream::from_iter(ts2), + tokens: TokenStream::from_iter(ts2), }; let inline_item = ast::AttrItem { unsafety: ast::Safety::Default, @@ -296,7 +364,7 @@ mod llvm_enzyme { } Annotatable::Item(iitem.clone()) } - Annotatable::AssocItem(ref mut assoc_item, i @ Impl { of_trait: false }) => { + Annotatable::AssocItem(ref mut assoc_item, i @ Impl { .. }) => { if !assoc_item.attrs.iter().any(|a| same_attribute(&a.kind, &attr.kind)) { assoc_item.attrs.push(attr); } @@ -305,6 +373,22 @@ mod llvm_enzyme { } Annotatable::AssocItem(assoc_item.clone(), i) } + Annotatable::Stmt(ref mut stmt) => { + match stmt.kind { + ast::StmtKind::Item(ref mut iitem) => { + if !iitem.attrs.iter().any(|a| same_attribute(&a.kind, &attr.kind)) { + iitem.attrs.push(attr); + } + if !iitem.attrs.iter().any(|a| same_attribute(&a.kind, &inline_never.kind)) + { + iitem.attrs.push(inline_never.clone()); + } + } + _ => unreachable!("stmt kind checked previously"), + }; + + Annotatable::Stmt(stmt.clone()) + } _ => { unreachable!("annotatable kind checked previously") } @@ -315,24 +399,40 @@ mod llvm_enzyme { delim: rustc_ast::token::Delimiter::Parenthesis, tokens: ts, }); + let d_attr = outer_normal_attr(&rustc_ad_attr, new_id, span); - let d_annotatable = if is_impl { - let assoc_item: AssocItemKind = ast::AssocItemKind::Fn(asdf); - let d_fn = P(ast::AssocItem { - attrs: thin_vec![d_attr, inline_never], - id: ast::DUMMY_NODE_ID, - span, - vis, - ident: d_ident, - kind: assoc_item, - tokens: None, - }); - Annotatable::AssocItem(d_fn, Impl { of_trait: false }) - } else { - let mut d_fn = - ecx.item(span, d_ident, thin_vec![d_attr, inline_never], ItemKind::Fn(asdf)); - d_fn.vis = vis; - Annotatable::Item(d_fn) + let d_annotatable = match &item { + Annotatable::AssocItem(_, _) => { + let assoc_item: AssocItemKind = ast::AssocItemKind::Fn(asdf); + let d_fn = P(ast::AssocItem { + attrs: thin_vec![d_attr, inline_never], + id: ast::DUMMY_NODE_ID, + span, + vis, + kind: assoc_item, + tokens: None, + }); + Annotatable::AssocItem(d_fn, Impl { of_trait: false }) + } + Annotatable::Item(_) => { + let mut d_fn = ecx.item(span, thin_vec![d_attr, inline_never], ItemKind::Fn(asdf)); + d_fn.vis = vis; + + Annotatable::Item(d_fn) + } + Annotatable::Stmt(_) => { + let mut d_fn = ecx.item(span, thin_vec![d_attr, inline_never], ItemKind::Fn(asdf)); + d_fn.vis = vis; + + Annotatable::Stmt(P(ast::Stmt { + id: ast::DUMMY_NODE_ID, + kind: ast::StmtKind::Item(d_fn), + span, + })) + } + _ => { + unreachable!("item kind checked previously") + } }; return vec![orig_annotatable, d_annotatable]; @@ -474,6 +574,8 @@ mod llvm_enzyme { return body; } + // Everything from here onwards just tries to fullfil the return type. Fun! + // having an active-only return means we'll drop the original return type. // So that can be treated identical to not having one in the first place. let primal_ret = has_ret(&sig.decl.output) && !x.has_active_only_ret(); @@ -493,94 +595,71 @@ mod llvm_enzyme { } }; let arg = ty.kind.is_simple_path().unwrap(); - let sl: Vec = vec![arg, kw::Default]; - let tmp = ecx.def_site_path(&sl); + let tmp = ecx.def_site_path(&[arg, kw::Default]); let default_call_expr = ecx.expr_path(ecx.path(span, tmp)); let default_call_expr = ecx.expr_call(new_decl_span, default_call_expr, thin_vec![]); body.stmts.push(ecx.stmt_expr(default_call_expr)); return body; } - let mut exprs = ThinVec::>::new(); - if primal_ret { - // We have both primal ret and active floats. - // primal ret is first, by construction. - exprs.push(primal_call); - } - - // Now construct default placeholder for each active float. - // Is there something nicer than f32::default() and f64::default()? + let mut exprs: P = primal_call; let d_ret_ty = match d_sig.decl.output { FnRetTy::Ty(ref ty) => ty.clone(), FnRetTy::Default(span) => { panic!("Did not expect Default ret ty: {:?}", span); } }; - let mut d_ret_ty = match d_ret_ty.kind.clone() { - TyKind::Tup(ref tys) => tys.clone(), - TyKind::Path(_, rustc_ast::Path { segments, .. }) => { - if let [segment] = &segments[..] - && segment.args.is_none() - { - let id = vec![segments[0].ident]; - let kind = TyKind::Path(None, ecx.path(span, id)); - let ty = P(rustc_ast::Ty { kind, id: ast::DUMMY_NODE_ID, span, tokens: None }); - thin_vec![ty] - } else { - panic!("Expected tuple or simple path return type"); - } - } - _ => { - // We messed up construction of d_sig - panic!("Did not expect non-tuple ret ty: {:?}", d_ret_ty); - } - }; - if x.mode.is_fwd() && x.ret_activity == DiffActivity::Dual { - assert!(d_ret_ty.len() == 2); - // both should be identical, by construction - let arg = d_ret_ty[0].kind.is_simple_path().unwrap(); - let arg2 = d_ret_ty[1].kind.is_simple_path().unwrap(); - assert!(arg == arg2); - let sl: Vec = vec![arg, kw::Default]; - let tmp = ecx.def_site_path(&sl); - let default_call_expr = ecx.expr_path(ecx.path(span, tmp)); - let default_call_expr = ecx.expr_call(new_decl_span, default_call_expr, thin_vec![]); - exprs.push(default_call_expr); - } else if x.mode.is_rev() { - if primal_ret { - // We have extra handling above for the primal ret - d_ret_ty = d_ret_ty[1..].to_vec().into(); - } - - for arg in d_ret_ty.iter() { - let arg = arg.kind.is_simple_path().unwrap(); - let sl: Vec = vec![arg, kw::Default]; - let tmp = ecx.def_site_path(&sl); - let default_call_expr = ecx.expr_path(ecx.path(span, tmp)); + if x.mode.is_fwd() { + // Fwd mode is easy. If the return activity is Const, we support arbitrary types. + // Otherwise, we only support a scalar, a pair of scalars, or an array of scalars. + // We checked that (on a best-effort base) in the preceding gen_enzyme_decl function. + // In all three cases, we can return `std::hint::black_box(::default())`. + if x.ret_activity == DiffActivity::Const { + // Here we call the primal function, since our dummy function has the same return + // type due to the Const return activity. + exprs = ecx.expr_call(new_decl_span, bb_call_expr, thin_vec![exprs]); + } else { + let q = QSelf { ty: d_ret_ty, path_span: span, position: 0 }; + let y = + ExprKind::Path(Some(P(q)), ecx.path_ident(span, Ident::from_str("default"))); + let default_call_expr = ecx.expr(span, y); let default_call_expr = ecx.expr_call(new_decl_span, default_call_expr, thin_vec![]); - exprs.push(default_call_expr); - } - } - - let ret: P; - match &exprs[..] { - [] => { - assert!(!has_ret(&d_sig.decl.output)); - // We don't have to match the return type. - return body; + exprs = ecx.expr_call(new_decl_span, bb_call_expr, thin_vec![default_call_expr]); } - [arg] => { - ret = ecx.expr_call(new_decl_span, bb_call_expr, thin_vec![arg.clone()]); - } - args => { - let ret_tuple: P = ecx.expr_tuple(span, args.into()); - ret = ecx.expr_call(new_decl_span, bb_call_expr, thin_vec![ret_tuple]); + } else if x.mode.is_rev() { + if x.width == 1 { + // We either have `-> ArbitraryType` or `-> (ArbitraryType, repeated_float_scalars)`. + match d_ret_ty.kind { + TyKind::Tup(ref args) => { + // We have a tuple return type. We need to create a tuple of the same size + // and fill it with default values. + let mut exprs2 = thin_vec![exprs]; + for arg in args.iter().skip(1) { + let arg = arg.kind.is_simple_path().unwrap(); + let tmp = ecx.def_site_path(&[arg, kw::Default]); + let default_call_expr = ecx.expr_path(ecx.path(span, tmp)); + let default_call_expr = + ecx.expr_call(new_decl_span, default_call_expr, thin_vec![]); + exprs2.push(default_call_expr); + } + exprs = ecx.expr_tuple(new_decl_span, exprs2); + } + _ => { + // Interestingly, even the `-> ArbitraryType` case + // ends up getting matched and handled correctly above, + // so we don't have to handle any other case for now. + panic!("Unsupported return type: {:?}", d_ret_ty); + } + } } + exprs = ecx.expr_call(new_decl_span, bb_call_expr, thin_vec![exprs]); + } else { + unreachable!("Unsupported mode: {:?}", x.mode); } - assert!(has_ret(&d_sig.decl.output)); - body.stmts.push(ecx.stmt_expr(ret)); + + body.stmts.push(ecx.stmt_expr(exprs)); body } @@ -688,55 +767,71 @@ mod llvm_enzyme { match activity { DiffActivity::Active => { act_ret.push(arg.ty.clone()); + // if width =/= 1, then push [arg.ty; width] to act_ret } DiffActivity::ActiveOnly => { // We will add the active scalar to the return type. // This is handled later. } DiffActivity::Duplicated | DiffActivity::DuplicatedOnly => { - let mut shadow_arg = arg.clone(); - // We += into the shadow in reverse mode. - shadow_arg.ty = P(assure_mut_ref(&arg.ty)); - let old_name = if let PatKind::Ident(_, ident, _) = arg.pat.kind { - ident.name - } else { - debug!("{:#?}", &shadow_arg.pat); - panic!("not an ident?"); - }; - let name: String = format!("d{}", old_name); - new_inputs.push(name.clone()); - let ident = Ident::from_str_and_span(&name, shadow_arg.pat.span); - shadow_arg.pat = P(ast::Pat { - id: ast::DUMMY_NODE_ID, - kind: PatKind::Ident(BindingMode::NONE, ident, None), - span: shadow_arg.pat.span, - tokens: shadow_arg.pat.tokens.clone(), - }); - d_inputs.push(shadow_arg); + for i in 0..x.width { + let mut shadow_arg = arg.clone(); + // We += into the shadow in reverse mode. + shadow_arg.ty = P(assure_mut_ref(&arg.ty)); + let old_name = if let PatKind::Ident(_, ident, _) = arg.pat.kind { + ident.name + } else { + debug!("{:#?}", &shadow_arg.pat); + panic!("not an ident?"); + }; + let name: String = format!("d{}_{}", old_name, i); + new_inputs.push(name.clone()); + let ident = Ident::from_str_and_span(&name, shadow_arg.pat.span); + shadow_arg.pat = P(ast::Pat { + id: ast::DUMMY_NODE_ID, + kind: PatKind::Ident(BindingMode::NONE, ident, None), + span: shadow_arg.pat.span, + tokens: shadow_arg.pat.tokens.clone(), + }); + d_inputs.push(shadow_arg.clone()); + } } - DiffActivity::Dual | DiffActivity::DualOnly => { - let mut shadow_arg = arg.clone(); - let old_name = if let PatKind::Ident(_, ident, _) = arg.pat.kind { - ident.name - } else { - debug!("{:#?}", &shadow_arg.pat); - panic!("not an ident?"); - }; - let name: String = format!("b{}", old_name); - new_inputs.push(name.clone()); - let ident = Ident::from_str_and_span(&name, shadow_arg.pat.span); - shadow_arg.pat = P(ast::Pat { - id: ast::DUMMY_NODE_ID, - kind: PatKind::Ident(BindingMode::NONE, ident, None), - span: shadow_arg.pat.span, - tokens: shadow_arg.pat.tokens.clone(), - }); - d_inputs.push(shadow_arg); + DiffActivity::Dual + | DiffActivity::DualOnly + | DiffActivity::Dualv + | DiffActivity::DualvOnly => { + // the *v variants get lowered to enzyme_dupv and enzyme_dupnoneedv, which cause + // Enzyme to not expect N arguments, but one argument (which is instead larger). + let iterations = + if matches!(activity, DiffActivity::Dualv | DiffActivity::DualvOnly) { + 1 + } else { + x.width + }; + for i in 0..iterations { + let mut shadow_arg = arg.clone(); + let old_name = if let PatKind::Ident(_, ident, _) = arg.pat.kind { + ident.name + } else { + debug!("{:#?}", &shadow_arg.pat); + panic!("not an ident?"); + }; + let name: String = format!("b{}_{}", old_name, i); + new_inputs.push(name.clone()); + let ident = Ident::from_str_and_span(&name, shadow_arg.pat.span); + shadow_arg.pat = P(ast::Pat { + id: ast::DUMMY_NODE_ID, + kind: PatKind::Ident(BindingMode::NONE, ident, None), + span: shadow_arg.pat.span, + tokens: shadow_arg.pat.tokens.clone(), + }); + d_inputs.push(shadow_arg.clone()); + } } DiffActivity::Const => { // Nothing to do here. } - DiffActivity::None | DiffActivity::FakeActivitySize => { + DiffActivity::None | DiffActivity::FakeActivitySize(_) => { panic!("Should not happen"); } } @@ -787,23 +882,48 @@ mod llvm_enzyme { d_decl.inputs = d_inputs.into(); if x.mode.is_fwd() { - if let DiffActivity::Dual = x.ret_activity { - let ty = match d_decl.output { - FnRetTy::Ty(ref ty) => ty.clone(), - FnRetTy::Default(span) => { - panic!("Did not expect Default ret ty: {:?}", span); - } + let ty = match d_decl.output { + FnRetTy::Ty(ref ty) => ty.clone(), + FnRetTy::Default(span) => { + // We want to return std::hint::black_box(()). + let kind = TyKind::Tup(ThinVec::new()); + let ty = P(rustc_ast::Ty { kind, id: ast::DUMMY_NODE_ID, span, tokens: None }); + d_decl.output = FnRetTy::Ty(ty.clone()); + assert!(matches!(x.ret_activity, DiffActivity::None)); + // this won't be used below, so any type would be fine. + ty + } + }; + + if matches!(x.ret_activity, DiffActivity::Dual | DiffActivity::Dualv) { + let kind = if x.width == 1 || matches!(x.ret_activity, DiffActivity::Dualv) { + // Dual can only be used for f32/f64 ret. + // In that case we return now a tuple with two floats. + TyKind::Tup(thin_vec![ty.clone(), ty.clone()]) + } else { + // We have to return [T; width+1], +1 for the primal return. + let anon_const = rustc_ast::AnonConst { + id: ast::DUMMY_NODE_ID, + value: ecx.expr_usize(span, 1 + x.width as usize), + }; + TyKind::Array(ty.clone(), anon_const) }; - // Dual can only be used for f32/f64 ret. - // In that case we return now a tuple with two floats. - let kind = TyKind::Tup(thin_vec![ty.clone(), ty.clone()]); let ty = P(rustc_ast::Ty { kind, id: ty.id, span: ty.span, tokens: None }); d_decl.output = FnRetTy::Ty(ty); } - if let DiffActivity::DualOnly = x.ret_activity { + if matches!(x.ret_activity, DiffActivity::DualOnly | DiffActivity::DualvOnly) { // No need to change the return type, - // we will just return the shadow in place - // of the primal return. + // we will just return the shadow in place of the primal return. + // However, if we have a width > 1, then we don't return -> T, but -> [T; width] + if x.width > 1 { + let anon_const = rustc_ast::AnonConst { + id: ast::DUMMY_NODE_ID, + value: ecx.expr_usize(span, x.width as usize), + }; + let kind = TyKind::Array(ty.clone(), anon_const); + let ty = P(rustc_ast::Ty { kind, id: ty.id, span: ty.span, tokens: None }); + d_decl.output = FnRetTy::Ty(ty); + } } } diff --git a/compiler/rustc_builtin_macros/src/cfg_eval.rs b/compiler/rustc_builtin_macros/src/cfg_eval.rs index b3ba90731184d..da01e3e9607bb 100644 --- a/compiler/rustc_builtin_macros/src/cfg_eval.rs +++ b/compiler/rustc_builtin_macros/src/cfg_eval.rs @@ -92,11 +92,7 @@ impl CfgEval<'_> { // the location of `#[cfg]` and `#[cfg_attr]` in the token stream. The tokenization // process is lossless, so this process is invisible to proc-macros. - // 'Flatten' all nonterminals (i.e. `TokenKind::Interpolated`) - // to `None`-delimited groups containing the corresponding tokens. This - // is normally delayed until the proc-macro server actually needs to - // provide a `TokenKind::Interpolated` to a proc-macro. We do this earlier, - // so that we can handle cases like: + // Interesting cases: // // ```rust // #[cfg_eval] #[cfg] $item @@ -104,8 +100,8 @@ impl CfgEval<'_> { // // where `$item` is `#[cfg_attr] struct Foo {}`. We want to make // sure to evaluate *all* `#[cfg]` and `#[cfg_attr]` attributes - the simplest - // way to do this is to do a single parse of a stream without any nonterminals. - let orig_tokens = annotatable.to_tokens().flattened(); + // way to do this is to do a single parse of the token stream. + let orig_tokens = annotatable.to_tokens(); // Re-parse the tokens, setting the `capture_cfg` flag to save extra information // to the captured `AttrTokenStream` (specifically, we capture diff --git a/compiler/rustc_builtin_macros/src/cmdline_attrs.rs b/compiler/rustc_builtin_macros/src/cmdline_attrs.rs index 6afd8c4b43b9a..423b6a15b646d 100644 --- a/compiler/rustc_builtin_macros/src/cmdline_attrs.rs +++ b/compiler/rustc_builtin_macros/src/cmdline_attrs.rs @@ -1,44 +1,37 @@ //! Attributes injected into the crate root from command line using `-Z crate-attr`. -use rustc_ast::attr::mk_attr; -use rustc_ast::{self as ast, AttrItem, AttrStyle, token}; -use rustc_parse::parser::ForceCollect; -use rustc_parse::{new_parser_from_source_str, unwrap_or_emit_fatal}; +use rustc_ast::{self as ast}; +use rustc_errors::Diag; +use rustc_parse::parser::attr::InnerAttrPolicy; +use rustc_parse::{parse_in, source_str_to_stream}; use rustc_session::parse::ParseSess; use rustc_span::FileName; -use crate::errors; - pub fn inject(krate: &mut ast::Crate, psess: &ParseSess, attrs: &[String]) { for raw_attr in attrs { - let mut parser = unwrap_or_emit_fatal(new_parser_from_source_str( - psess, - FileName::cli_crate_attr_source_code(raw_attr), - raw_attr.clone(), - )); - - let start_span = parser.token.span; - let AttrItem { unsafety, path, args, tokens: _ } = - match parser.parse_attr_item(ForceCollect::No) { - Ok(ai) => ai, - Err(err) => { + let source = format!("#![{raw_attr}]"); + let parse = || -> Result>> { + let tokens = source_str_to_stream( + psess, + FileName::cli_crate_attr_source_code(raw_attr), + source, + None, + )?; + parse_in(psess, tokens, "", |p| { + p.parse_attribute(InnerAttrPolicy::Permitted) + }) + .map_err(|e| vec![e]) + }; + let meta = match parse() { + Ok(meta) => meta, + Err(errs) => { + for err in errs { err.emit(); - continue; } - }; - let end_span = parser.token.span; - if parser.token != token::Eof { - psess.dcx().emit_err(errors::InvalidCrateAttr { span: start_span.to(end_span) }); - continue; - } + continue; + } + }; - krate.attrs.push(mk_attr( - &psess.attr_id_generator, - AttrStyle::Inner, - unsafety, - path, - args, - start_span.to(end_span), - )); + krate.attrs.push(meta); } } diff --git a/compiler/rustc_builtin_macros/src/deriving/clone.rs b/compiler/rustc_builtin_macros/src/deriving/clone.rs index c3656e8244fe0..44cf215c66227 100644 --- a/compiler/rustc_builtin_macros/src/deriving/clone.rs +++ b/compiler/rustc_builtin_macros/src/deriving/clone.rs @@ -34,8 +34,8 @@ pub(crate) fn expand_deriving_clone( let is_simple; match item { Annotatable::Item(annitem) => match &annitem.kind { - ItemKind::Struct(_, Generics { params, .. }) - | ItemKind::Enum(_, Generics { params, .. }) => { + ItemKind::Struct(_, _, Generics { params, .. }) + | ItemKind::Enum(_, _, Generics { params, .. }) => { let container_id = cx.current_expansion.id.expn_data().parent.expect_local(); let has_derive_copy = cx.resolver.has_derive_copy(container_id); if has_derive_copy diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs index 7958e037555d5..aa01da3151eb4 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs @@ -21,7 +21,7 @@ pub(crate) fn expand_deriving_partial_ord( // Order in which to perform matching let discr_then_data = if let Annotatable::Item(item) = item - && let ItemKind::Enum(def, _) = &item.kind + && let ItemKind::Enum(_, def, _) = &item.kind { let dataful: Vec = def.variants.iter().map(|v| !v.data.fields().is_empty()).collect(); match dataful.iter().filter(|&&b| b).count() { diff --git a/compiler/rustc_builtin_macros/src/deriving/coerce_pointee.rs b/compiler/rustc_builtin_macros/src/deriving/coerce_pointee.rs index 46b79e0978082..446d8afeedd7f 100644 --- a/compiler/rustc_builtin_macros/src/deriving/coerce_pointee.rs +++ b/compiler/rustc_builtin_macros/src/deriving/coerce_pointee.rs @@ -30,7 +30,7 @@ pub(crate) fn expand_deriving_coerce_pointee( item.visit_with(&mut DetectNonGenericPointeeAttr { cx }); let (name_ident, generics) = if let Annotatable::Item(aitem) = item - && let ItemKind::Struct(struct_data, g) = &aitem.kind + && let ItemKind::Struct(ident, struct_data, g) = &aitem.kind { if !matches!( struct_data, @@ -40,7 +40,7 @@ pub(crate) fn expand_deriving_coerce_pointee( cx.dcx().emit_err(RequireOneField { span }); return; } - (aitem.ident, g) + (*ident, g) } else { cx.dcx().emit_err(RequireTransparent { span }); return; @@ -108,7 +108,6 @@ pub(crate) fn expand_deriving_coerce_pointee( push(Annotatable::Item( cx.item( span, - Ident::empty(), attrs.clone(), ast::ItemKind::Impl(Box::new(ast::Impl { safety: ast::Safety::Default, @@ -153,7 +152,6 @@ pub(crate) fn expand_deriving_coerce_pointee( let trait_ref = cx.trait_ref(trait_path); let item = cx.item( span, - Ident::empty(), attrs.clone(), ast::ItemKind::Impl(Box::new(ast::Impl { safety: ast::Safety::Default, diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 03ee59de70e12..f293320ef98a2 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -487,28 +487,28 @@ impl<'a> TraitDef<'a> { ); let newitem = match &item.kind { - ast::ItemKind::Struct(struct_def, generics) => self.expand_struct_def( + ast::ItemKind::Struct(ident, struct_def, generics) => self.expand_struct_def( cx, struct_def, - item.ident, + *ident, generics, from_scratch, is_packed, ), - ast::ItemKind::Enum(enum_def, generics) => { + ast::ItemKind::Enum(ident, enum_def, generics) => { // We ignore `is_packed` here, because `repr(packed)` // enums cause an error later on. // // This can only cause further compilation errors // downstream in blatantly illegal code, so it is fine. - self.expand_enum_def(cx, enum_def, item.ident, generics, from_scratch) + self.expand_enum_def(cx, enum_def, *ident, generics, from_scratch) } - ast::ItemKind::Union(struct_def, generics) => { + ast::ItemKind::Union(ident, struct_def, generics) => { if self.supports_unions { self.expand_struct_def( cx, struct_def, - item.ident, + *ident, generics, from_scratch, is_packed, @@ -527,15 +527,14 @@ impl<'a> TraitDef<'a> { item.attrs .iter() .filter(|a| { - [ + a.has_any_name(&[ sym::allow, sym::warn, sym::deny, sym::forbid, sym::stable, sym::unstable, - ] - .contains(&a.name_or_empty()) + ]) }) .cloned(), ); @@ -596,7 +595,6 @@ impl<'a> TraitDef<'a> { P(ast::AssocItem { id: ast::DUMMY_NODE_ID, span: self.span, - ident, vis: ast::Visibility { span: self.span.shrink_to_lo(), kind: ast::VisibilityKind::Inherited, @@ -605,6 +603,7 @@ impl<'a> TraitDef<'a> { attrs: ast::AttrVec::new(), kind: ast::AssocItemKind::Type(Box::new(ast::TyAlias { defaultness: ast::Defaultness::Final, + ident, generics: Generics::default(), where_clauses: ast::TyAliasWhereClauses::default(), bounds: Vec::new(), @@ -789,7 +788,6 @@ impl<'a> TraitDef<'a> { cx.item( self.span, - Ident::empty(), attrs, ast::ItemKind::Impl(Box::new(ast::Impl { safety: ast::Safety::Default, @@ -1033,14 +1031,15 @@ impl<'a> MethodDef<'a> { kind: ast::VisibilityKind::Inherited, tokens: None, }, - ident: method_ident, kind: ast::AssocItemKind::Fn(Box::new(ast::Fn { defaultness, sig, + ident: method_ident, generics: fn_generics, contract: None, body: Some(body_block), define_opaque: None, + eii_impl: ThinVec::new(), })), tokens: None, }) diff --git a/compiler/rustc_builtin_macros/src/eii.rs b/compiler/rustc_builtin_macros/src/eii.rs new file mode 100644 index 0000000000000..1d206ac42d339 --- /dev/null +++ b/compiler/rustc_builtin_macros/src/eii.rs @@ -0,0 +1,423 @@ +use rustc_ast::ptr::P; +use rustc_ast::token::{Delimiter, TokenKind}; +use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree}; +use rustc_ast::{ + DUMMY_NODE_ID, EIIImpl, EIIMacroFor, ItemKind, Stmt, StmtKind, ast, token, tokenstream, +}; +use rustc_ast_pretty::pprust::path_to_string; +use rustc_expand::base::{Annotatable, ExtCtxt}; +use rustc_span::{Ident, Span, kw, sym}; +use thin_vec::{ThinVec, thin_vec}; + +// ```rust +// #[eii] +// fn panic_handler(); +// +// // or: +// +// #[eii(panic_handler)] +// fn panic_handler(); +// +// // expansion: +// +// extern "Rust" { +// fn panic_handler(); +// } +// +// #[rustc_builtin_macro(eii_macro)] +// #[eii_macro_for(panic_handler)] +// macro panic_handler() {} +// ``` +pub(crate) fn eii( + ecx: &mut ExtCtxt<'_>, + span: Span, + meta_item: &ast::MetaItem, + item: Annotatable, +) -> Vec { + eii_(ecx, span, meta_item, item, false) +} + +pub(crate) fn unsafe_eii( + ecx: &mut ExtCtxt<'_>, + span: Span, + meta_item: &ast::MetaItem, + item: Annotatable, +) -> Vec { + eii_(ecx, span, meta_item, item, true) +} + +fn eii_( + ecx: &mut ExtCtxt<'_>, + span: Span, + meta_item: &ast::MetaItem, + item: Annotatable, + impl_unsafe: bool, +) -> Vec { + let span = ecx.with_def_site_ctxt(span); + + let (item, stmt) = if let Annotatable::Item(item) = item { + (item, false) + } else if let Annotatable::Stmt(ref stmt) = item + && let StmtKind::Item(ref item) = stmt.kind + { + (item.clone(), true) + } else { + ecx.dcx() + .emit_err(EIIMacroExpectedFunction { span, name: path_to_string(&meta_item.path) }); + return vec![item]; + }; + + let orig_item = item.clone(); + + let item = item.into_inner(); + + let ast::Item { + mut attrs, + id: _, + span: item_span, + vis, + kind: ItemKind::Fn(mut func), + tokens: _, + } = item + else { + ecx.dcx() + .emit_err(EIIMacroExpectedFunction { span, name: path_to_string(&meta_item.path) }); + return vec![Annotatable::Item(P(item))]; + }; + + let macro_name = if meta_item.is_word() { + func.ident + } else if let Some([first]) = meta_item.meta_item_list() + && let Some(m) = first.meta_item() + && m.path.segments.len() == 1 + { + m.path.segments[0].ident + } else { + ecx.dcx().emit_err(EIIMacroExpectedMaxOneArgument { + span: meta_item.span, + name: path_to_string(&meta_item.path), + }); + return vec![Annotatable::Item(orig_item)]; + }; + + let mut return_items = Vec::new(); + + if func.body.is_some() { + let mut default_func = func.clone(); + func.body = None; + default_func.eii_impl.push(ast::EIIImpl { + node_id: DUMMY_NODE_ID, + eii_macro_path: ast::Path::from_ident(macro_name), + impl_safety: if impl_unsafe { ast::Safety::Unsafe(span) } else { ast::Safety::Default }, + span, + inner_span: macro_name.span, + is_default: true, // important! + }); + + return_items.push(P(ast::Item { + attrs: ThinVec::new(), + id: ast::DUMMY_NODE_ID, + span, + vis: ast::Visibility { span, kind: ast::VisibilityKind::Inherited, tokens: None }, + kind: ast::ItemKind::Const(Box::new(ast::ConstItem { + ident: Ident { name: kw::Underscore, span }, + defaultness: ast::Defaultness::Final, + generics: ast::Generics::default(), + ty: P(ast::Ty { + id: DUMMY_NODE_ID, + kind: ast::TyKind::Tup(ThinVec::new()), + span, + tokens: None, + }), + expr: Some(P(ast::Expr { + id: DUMMY_NODE_ID, + kind: ast::ExprKind::Block( + P(ast::Block { + stmts: thin_vec![ast::Stmt { + id: DUMMY_NODE_ID, + kind: ast::StmtKind::Item(P(ast::Item { + attrs: thin_vec![], // FIXME: re-add some original attrs + id: DUMMY_NODE_ID, + span: item_span, + vis: ast::Visibility { + span, + kind: ast::VisibilityKind::Inherited, + tokens: None + }, + kind: ItemKind::Fn(default_func), + tokens: None, + })), + span + }], + id: DUMMY_NODE_ID, + rules: ast::BlockCheckMode::Default, + span, + tokens: None, + }), + None, + ), + span, + attrs: ThinVec::new(), + tokens: None, + })), + define_opaque: None, + })), + tokens: None, + })) + } + + let decl_span = span.to(func.sig.span); + + let abi = match func.sig.header.ext { + // extern "X" fn => extern "X" {} + ast::Extern::Explicit(lit, _) => Some(lit), + // extern fn => extern {} + ast::Extern::Implicit(_) => None, + // fn => extern "Rust" {} + ast::Extern::None => Some(ast::StrLit { + symbol: sym::Rust, + suffix: None, + symbol_unescaped: sym::Rust, + style: ast::StrStyle::Cooked, + span, + }), + }; + + // ABI has been moved to the extern {} block, so we remove it from the fn item. + func.sig.header.ext = ast::Extern::None; + + // And mark safe functions explicitly as `safe fn`. + if func.sig.header.safety == ast::Safety::Default { + func.sig.header.safety = ast::Safety::Safe(func.sig.span); + } + + // extern "…" { safe fn item(); } + // #[eii_mangle_extern] + attrs.push(ast::Attribute { + kind: ast::AttrKind::Normal(P(ast::NormalAttr { + item: ast::AttrItem { + unsafety: ast::Safety::Default, + path: ast::Path::from_ident(Ident::new(sym::eii_mangle_extern, span)), + args: ast::AttrArgs::Empty, + tokens: None, + }, + tokens: None, + })), + id: ecx.sess.psess.attr_id_generator.mk_attr_id(), + style: ast::AttrStyle::Outer, + span, + }); + let extern_block = P(ast::Item { + attrs: ast::AttrVec::default(), + id: ast::DUMMY_NODE_ID, + span, + vis: ast::Visibility { span, kind: ast::VisibilityKind::Inherited, tokens: None }, + kind: ast::ItemKind::ForeignMod(ast::ForeignMod { + extern_span: span, + safety: ast::Safety::Unsafe(span), + abi, + items: From::from([P(ast::ForeignItem { + attrs: attrs.clone(), + id: ast::DUMMY_NODE_ID, + span: item_span, + vis, + kind: ast::ForeignItemKind::Fn(func.clone()), + tokens: None, + })]), + }), + tokens: None, + }); + + let mut macro_attrs = attrs.clone(); + macro_attrs.push( + // #[builtin_macro(eii_macro)] + ast::Attribute { + kind: ast::AttrKind::Normal(P(ast::NormalAttr { + item: ast::AttrItem { + unsafety: ast::Safety::Default, + path: ast::Path::from_ident(Ident::new(sym::rustc_builtin_macro, span)), + args: ast::AttrArgs::Delimited(ast::DelimArgs { + dspan: DelimSpan::from_single(span), + delim: Delimiter::Parenthesis, + tokens: TokenStream::new(vec![tokenstream::TokenTree::token_alone( + token::TokenKind::Ident(sym::eii_macro, token::IdentIsRaw::No), + span, + )]), + }), + tokens: None, + }, + tokens: None, + })), + id: ecx.sess.psess.attr_id_generator.mk_attr_id(), + style: ast::AttrStyle::Outer, + span, + }, + ); + + let macro_def = P(ast::Item { + attrs: macro_attrs, + id: ast::DUMMY_NODE_ID, + span, + // pub + vis: ast::Visibility { span, kind: ast::VisibilityKind::Public, tokens: None }, + kind: ast::ItemKind::MacroDef( + // macro macro_name + macro_name, + ast::MacroDef { + // { () => {} } + body: P(ast::DelimArgs { + dspan: DelimSpan::from_single(span), + delim: Delimiter::Brace, + tokens: TokenStream::from_iter([ + TokenTree::Delimited( + DelimSpan::from_single(span), + DelimSpacing::new(Spacing::Alone, Spacing::Alone), + Delimiter::Parenthesis, + TokenStream::default(), + ), + TokenTree::token_alone(TokenKind::FatArrow, span), + TokenTree::Delimited( + DelimSpan::from_single(span), + DelimSpacing::new(Spacing::Alone, Spacing::Alone), + Delimiter::Brace, + TokenStream::default(), + ), + ]), + }), + macro_rules: false, + // #[eii_macro_for(func.ident)] + eii_macro_for: Some(ast::EIIMacroFor { + extern_item_path: ast::Path::from_ident(func.ident), + impl_unsafe, + span: decl_span, + }), + }, + ), + tokens: None, + }); + + return_items.push(extern_block); + return_items.push(macro_def); + + if stmt { + return_items + .into_iter() + .map(|i| { + Annotatable::Stmt(P(Stmt { id: DUMMY_NODE_ID, kind: StmtKind::Item(i), span })) + }) + .collect() + } else { + return_items.into_iter().map(|i| Annotatable::Item(i)).collect() + } +} + +use crate::errors::{ + EIIMacroExpectedFunction, EIIMacroExpectedMaxOneArgument, EIIMacroForExpectedList, + EIIMacroForExpectedMacro, EIIMacroForExpectedUnsafe, +}; + +pub(crate) fn eii_macro_for( + ecx: &mut ExtCtxt<'_>, + span: Span, + meta_item: &ast::MetaItem, + mut item: Annotatable, +) -> Vec { + let i = if let Annotatable::Item(ref mut item) = item { + item + } else if let Annotatable::Stmt(ref mut stmt) = item + && let StmtKind::Item(ref mut item) = stmt.kind + { + item + } else { + ecx.dcx().emit_err(EIIMacroForExpectedMacro { span }); + return vec![item]; + }; + + let ItemKind::MacroDef(_, d) = &mut i.kind else { + ecx.dcx().emit_err(EIIMacroForExpectedMacro { span }); + return vec![item]; + }; + + let Some(list) = meta_item.meta_item_list() else { + ecx.dcx().emit_err(EIIMacroForExpectedList { span: meta_item.span }); + return vec![item]; + }; + + if list.len() > 2 { + ecx.dcx().emit_err(EIIMacroForExpectedList { span: meta_item.span }); + return vec![item]; + } + + let Some(extern_item_path) = list.get(0).and_then(|i| i.meta_item()).map(|i| i.path.clone()) + else { + ecx.dcx().emit_err(EIIMacroForExpectedList { span: meta_item.span }); + return vec![item]; + }; + + let impl_unsafe = if let Some(i) = list.get(1) { + if i.lit().and_then(|i| i.kind.str()).is_some_and(|i| i == kw::Unsafe) { + true + } else { + ecx.dcx().emit_err(EIIMacroForExpectedUnsafe { span: i.span() }); + return vec![item]; + } + } else { + false + }; + + d.eii_macro_for = Some(EIIMacroFor { extern_item_path, impl_unsafe, span }); + + // Return the original item and the new methods. + vec![item] +} + +pub(crate) fn eii_macro( + ecx: &mut ExtCtxt<'_>, + span: Span, + meta_item: &ast::MetaItem, + mut item: Annotatable, +) -> Vec { + let i = if let Annotatable::Item(ref mut item) = item { + item + } else if let Annotatable::Stmt(ref mut stmt) = item + && let StmtKind::Item(ref mut item) = stmt.kind + { + item + } else { + ecx.dcx() + .emit_err(EIIMacroExpectedFunction { span, name: path_to_string(&meta_item.path) }); + return vec![item]; + }; + + let ItemKind::Fn(f) = &mut i.kind else { + ecx.dcx() + .emit_err(EIIMacroExpectedFunction { span, name: path_to_string(&meta_item.path) }); + return vec![item]; + }; + + let is_default = if meta_item.is_word() { + false + } else if let Some([first]) = meta_item.meta_item_list() + && let Some(m) = first.meta_item() + && m.path.segments.len() == 1 + { + m.path.segments[0].ident.name == kw::Default + } else { + ecx.dcx().emit_err(EIIMacroExpectedMaxOneArgument { + span: meta_item.span, + name: path_to_string(&meta_item.path), + }); + return vec![item]; + }; + + f.eii_impl.push(EIIImpl { + node_id: DUMMY_NODE_ID, + eii_macro_path: meta_item.path.clone(), + impl_safety: meta_item.unsafety, + span, + inner_span: meta_item.path.span, + is_default, + }); + + vec![item] +} diff --git a/compiler/rustc_builtin_macros/src/errors.rs b/compiler/rustc_builtin_macros/src/errors.rs index 30597944124cb..68c3a55a3521f 100644 --- a/compiler/rustc_builtin_macros/src/errors.rs +++ b/compiler/rustc_builtin_macros/src/errors.rs @@ -1,7 +1,7 @@ use rustc_errors::codes::*; use rustc_errors::{ Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, MultiSpan, SingleLabelManySpans, - SubdiagMessageOp, Subdiagnostic, + Subdiagnostic, }; use rustc_macros::{Diagnostic, Subdiagnostic}; use rustc_span::{Ident, Span, Symbol}; @@ -109,13 +109,6 @@ pub(crate) struct ProcMacro { pub(crate) span: Span, } -#[derive(Diagnostic)] -#[diag(builtin_macros_invalid_crate_attribute)] -pub(crate) struct InvalidCrateAttr { - #[primary_span] - pub(crate) span: Span, -} - #[derive(Diagnostic)] #[diag(builtin_macros_non_abi)] pub(crate) struct NonABI { @@ -202,6 +195,14 @@ mod autodiff { pub(crate) mode: String, } + #[derive(Diagnostic)] + #[diag(builtin_macros_autodiff_width)] + pub(crate) struct AutoDiffInvalidWidth { + #[primary_span] + pub(crate) span: Span, + pub(crate) width: u128, + } + #[derive(Diagnostic)] #[diag(builtin_macros_autodiff)] pub(crate) struct AutoDiffInvalidApplication { @@ -683,13 +684,9 @@ pub(crate) struct FormatUnusedArg { // Allow the singular form to be a subdiagnostic of the multiple-unused // form of diagnostic. impl Subdiagnostic for FormatUnusedArg { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.arg("named", self.named); - let msg = f(diag, crate::fluent_generated::builtin_macros_format_unused_arg.into()); + let msg = diag.eagerly_translate(crate::fluent_generated::builtin_macros_format_unused_arg); diag.span_label(self.span, msg); } } @@ -1030,3 +1027,41 @@ pub(crate) struct NonGenericPointee { #[primary_span] pub span: Span, } + +#[derive(Diagnostic)] +#[diag(builtin_macros_eii_macro_for_expected_macro)] +pub(crate) struct EIIMacroForExpectedMacro { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(builtin_macros_eii_macro_for_expected_list)] +pub(crate) struct EIIMacroForExpectedList { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(builtin_macros_eii_macro_for_expected_unsafe)] +pub(crate) struct EIIMacroForExpectedUnsafe { + #[primary_span] + #[note] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(builtin_macros_eii_macro_expected_function)] +pub(crate) struct EIIMacroExpectedFunction { + #[primary_span] + pub span: Span, + pub name: String, +} + +#[derive(Diagnostic)] +#[diag(builtin_macros_eii_macro_for_expected_max_one_argument)] +pub(crate) struct EIIMacroExpectedMaxOneArgument { + #[primary_span] + pub span: Span, + pub name: String, +} diff --git a/compiler/rustc_builtin_macros/src/format.rs b/compiler/rustc_builtin_macros/src/format.rs index 12654001a1e28..39f9d5f900514 100644 --- a/compiler/rustc_builtin_macros/src/format.rs +++ b/compiler/rustc_builtin_macros/src/format.rs @@ -1,3 +1,5 @@ +use std::ops::Range; + use parse::Position::ArgumentNamed; use rustc_ast::ptr::P; use rustc_ast::tokenstream::TokenStream; @@ -335,7 +337,7 @@ fn make_format_args( return ExpandResult::Ready(Err(guar)); } - let to_span = |inner_span: parse::InnerSpan| { + let to_span = |inner_span: Range| { is_source_literal.then(|| { fmt_span.from_inner(InnerSpan { start: inner_span.start, end: inner_span.end }) }) @@ -407,7 +409,7 @@ fn make_format_args( let mut placeholder_index = 0; for piece in &pieces { - match *piece { + match piece.clone() { parse::Piece::Lit(s) => { unfinished_literal.push_str(s); } @@ -417,7 +419,8 @@ fn make_format_args( unfinished_literal.clear(); } - let span = parser.arg_places.get(placeholder_index).and_then(|&s| to_span(s)); + let span = + parser.arg_places.get(placeholder_index).and_then(|s| to_span(s.clone())); placeholder_index += 1; let position_span = to_span(position_span); @@ -609,7 +612,7 @@ fn make_format_args( fn invalid_placeholder_type_error( ecx: &ExtCtxt<'_>, ty: &str, - ty_span: Option, + ty_span: Option>, fmt_span: Span, ) { let sp = ty_span.map(|sp| fmt_span.from_inner(InnerSpan::new(sp.start, sp.end))); diff --git a/compiler/rustc_builtin_macros/src/global_allocator.rs b/compiler/rustc_builtin_macros/src/global_allocator.rs index 90d79235820f4..23ed3b983d622 100644 --- a/compiler/rustc_builtin_macros/src/global_allocator.rs +++ b/compiler/rustc_builtin_macros/src/global_allocator.rs @@ -25,15 +25,15 @@ pub(crate) fn expand( // Allow using `#[global_allocator]` on an item statement // FIXME - if we get deref patterns, use them to reduce duplication here - let (item, is_stmt, ty_span) = if let Annotatable::Item(item) = &item - && let ItemKind::Static(box ast::StaticItem { ty, .. }) = &item.kind + let (item, ident, is_stmt, ty_span) = if let Annotatable::Item(item) = &item + && let ItemKind::Static(box ast::StaticItem { ident, ty, .. }) = &item.kind { - (item, false, ecx.with_def_site_ctxt(ty.span)) + (item, *ident, false, ecx.with_def_site_ctxt(ty.span)) } else if let Annotatable::Stmt(stmt) = &item && let StmtKind::Item(item) = &stmt.kind - && let ItemKind::Static(box ast::StaticItem { ty, .. }) = &item.kind + && let ItemKind::Static(box ast::StaticItem { ident, ty, .. }) = &item.kind { - (item, true, ecx.with_def_site_ctxt(ty.span)) + (item, *ident, true, ecx.with_def_site_ctxt(ty.span)) } else { ecx.dcx().emit_err(errors::AllocMustStatics { span: item.span() }); return vec![orig_item]; @@ -41,7 +41,7 @@ pub(crate) fn expand( // Generate a bunch of new items using the AllocFnFactory let span = ecx.with_def_site_ctxt(item.span); - let f = AllocFnFactory { span, ty_span, global: item.ident, cx: ecx }; + let f = AllocFnFactory { span, ty_span, global: ident, cx: ecx }; // Generate item statements for the allocator methods. let stmts = ALLOCATOR_METHODS.iter().map(|method| f.allocator_fn(method)).collect(); @@ -80,17 +80,14 @@ impl AllocFnFactory<'_, '_> { let kind = ItemKind::Fn(Box::new(Fn { defaultness: ast::Defaultness::Final, sig, + ident: Ident::from_str_and_span(&global_fn_name(method.name), self.span), generics: Generics::default(), contract: None, body, define_opaque: None, + eii_impl: ThinVec::new(), })); - let item = self.cx.item( - self.span, - Ident::from_str_and_span(&global_fn_name(method.name), self.span), - self.attrs(), - kind, - ); + let item = self.cx.item(self.span, self.attrs(), kind); self.cx.stmt_item(self.ty_span, item) } diff --git a/compiler/rustc_builtin_macros/src/lib.rs b/compiler/rustc_builtin_macros/src/lib.rs index 606e85577f7e7..61cc87b6a87dd 100644 --- a/compiler/rustc_builtin_macros/src/lib.rs +++ b/compiler/rustc_builtin_macros/src/lib.rs @@ -5,7 +5,7 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] @@ -13,12 +13,12 @@ #![feature(box_patterns)] #![feature(decl_macro)] #![feature(if_let_guard)] -#![feature(let_chains)] #![feature(proc_macro_internals)] #![feature(proc_macro_quote)] #![feature(rustdoc_internals)] #![feature(string_from_utf8_lossy_owned)] #![feature(try_blocks)] +#![recursion_limit = "256"] // tidy-alphabetical-end extern crate proc_macro; @@ -45,6 +45,7 @@ mod define_opaque; mod derive; mod deriving; mod edition_panic; +mod eii; mod env; mod errors; mod format; @@ -123,6 +124,10 @@ pub fn register_builtin_macros(resolver: &mut dyn ResolverExpand) { global_allocator: global_allocator::expand, test: test::expand_test, test_case: test::expand_test_case, + eii: eii::eii, + unsafe_eii: eii::unsafe_eii, + eii_macro_for: eii::eii_macro_for, + eii_macro: eii::eii_macro, } register_derive! { diff --git a/compiler/rustc_builtin_macros/src/pattern_type.rs b/compiler/rustc_builtin_macros/src/pattern_type.rs index a55c7e962d098..3529e5525fcd2 100644 --- a/compiler/rustc_builtin_macros/src/pattern_type.rs +++ b/compiler/rustc_builtin_macros/src/pattern_type.rs @@ -1,9 +1,10 @@ use rustc_ast::ptr::P; use rustc_ast::tokenstream::TokenStream; -use rustc_ast::{AnonConst, DUMMY_NODE_ID, Ty, TyPat, TyPatKind, ast}; +use rustc_ast::{AnonConst, DUMMY_NODE_ID, Ty, TyPat, TyPatKind, ast, token}; use rustc_errors::PResult; use rustc_expand::base::{self, DummyResult, ExpandResult, ExtCtxt, MacroExpanderResult}; use rustc_parse::exp; +use rustc_parse::parser::{CommaRecoveryMode, RecoverColon, RecoverComma}; use rustc_span::Span; pub(crate) fn expand<'cx>( @@ -26,19 +27,42 @@ fn parse_pat_ty<'a>(cx: &mut ExtCtxt<'a>, stream: TokenStream) -> PResult<'a, (P let ty = parser.parse_ty()?; parser.expect_keyword(exp!(Is))?; - let pat = parser.parse_pat_no_top_alt(None, None)?.into_inner(); + let pat = pat_to_ty_pat( + cx, + parser + .parse_pat_no_top_guard( + None, + RecoverComma::No, + RecoverColon::No, + CommaRecoveryMode::EitherTupleOrPipe, + )? + .into_inner(), + ); + + if parser.token != token::Eof { + parser.unexpected()?; + } + + Ok((ty, pat)) +} + +fn ty_pat(kind: TyPatKind, span: Span) -> P { + P(TyPat { id: DUMMY_NODE_ID, kind, span, tokens: None }) +} + +fn pat_to_ty_pat(cx: &mut ExtCtxt<'_>, pat: ast::Pat) -> P { let kind = match pat.kind { ast::PatKind::Range(start, end, include_end) => TyPatKind::Range( start.map(|value| P(AnonConst { id: DUMMY_NODE_ID, value })), end.map(|value| P(AnonConst { id: DUMMY_NODE_ID, value })), include_end, ), + ast::PatKind::Or(variants) => TyPatKind::Or( + variants.into_iter().map(|pat| pat_to_ty_pat(cx, pat.into_inner())).collect(), + ), ast::PatKind::Err(guar) => TyPatKind::Err(guar), _ => TyPatKind::Err(cx.dcx().span_err(pat.span, "pattern not supported in pattern types")), }; - - let pat = P(TyPat { id: pat.id, kind, span: pat.span, tokens: pat.tokens }); - - Ok((ty, pat)) + ty_pat(kind, pat.span) } diff --git a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs index ee6475c8b8e91..a91f2d38a93ae 100644 --- a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs +++ b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs @@ -20,14 +20,14 @@ use crate::errors; struct ProcMacroDerive { id: NodeId, trait_name: Symbol, - function_name: Ident, + function_ident: Ident, span: Span, attrs: Vec, } struct ProcMacroDef { id: NodeId, - function_name: Ident, + function_ident: Ident, span: Span, } @@ -92,7 +92,12 @@ impl<'a> CollectProcMacros<'a> { } } - fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) { + fn collect_custom_derive( + &mut self, + item: &'a ast::Item, + function_ident: Ident, + attr: &'a ast::Attribute, + ) { let Some((trait_name, proc_attrs)) = parse_macro_name_and_helper_attrs(self.dcx, attr, "derive") else { @@ -104,7 +109,7 @@ impl<'a> CollectProcMacros<'a> { id: item.id, span: item.span, trait_name, - function_name: item.ident, + function_ident, attrs: proc_attrs, })); } else { @@ -118,12 +123,12 @@ impl<'a> CollectProcMacros<'a> { } } - fn collect_attr_proc_macro(&mut self, item: &'a ast::Item) { + fn collect_attr_proc_macro(&mut self, item: &'a ast::Item, function_ident: Ident) { if self.in_root && item.vis.kind.is_pub() { self.macros.push(ProcMacro::Attr(ProcMacroDef { id: item.id, span: item.span, - function_name: item.ident, + function_ident, })); } else { let msg = if !self.in_root { @@ -136,12 +141,12 @@ impl<'a> CollectProcMacros<'a> { } } - fn collect_bang_proc_macro(&mut self, item: &'a ast::Item) { + fn collect_bang_proc_macro(&mut self, item: &'a ast::Item, function_ident: Ident) { if self.in_root && item.vis.kind.is_pub() { self.macros.push(ProcMacro::Bang(ProcMacroDef { id: item.id, span: item.span, - function_name: item.ident, + function_ident, })); } else { let msg = if !self.in_root { @@ -165,12 +170,6 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { } } - // First up, make sure we're checking a bare function. If we're not then - // we're just not interested in this item. - // - // If we find one, try to locate a `#[proc_macro_derive]` attribute on it. - let is_fn = matches!(item.kind, ast::ItemKind::Fn(..)); - let mut found_attr: Option<&'a ast::Attribute> = None; for attr in &item.attrs { @@ -214,7 +213,11 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { return; }; - if !is_fn { + // Make sure we're checking a bare function. If we're not then we're + // just not interested any further in this item. + let fn_ident = if let ast::ItemKind::Fn(fn_) = &item.kind { + fn_.ident + } else { self.dcx .create_err(errors::AttributeOnlyBeUsedOnBareFunctions { span: attr.span, @@ -222,7 +225,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { }) .emit(); return; - } + }; if self.is_test_crate { return; @@ -238,12 +241,13 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { return; } + // Try to locate a `#[proc_macro_derive]` attribute. if attr.has_name(sym::proc_macro_derive) { - self.collect_custom_derive(item, attr); + self.collect_custom_derive(item, fn_ident, attr); } else if attr.has_name(sym::proc_macro_attribute) { - self.collect_attr_proc_macro(item); + self.collect_attr_proc_macro(item, fn_ident); } else if attr.has_name(sym::proc_macro) { - self.collect_bang_proc_macro(item); + self.collect_bang_proc_macro(item, fn_ident); }; let prev_in_root = mem::replace(&mut self.in_root, false); @@ -278,7 +282,7 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P { let span = DUMMY_SP.with_def_site_ctxt(expn_id.to_expn_id()); let proc_macro = Ident::new(sym::proc_macro, span); - let krate = cx.item(span, proc_macro, ast::AttrVec::new(), ast::ItemKind::ExternCrate(None)); + let krate = cx.item(span, ast::AttrVec::new(), ast::ItemKind::ExternCrate(None, proc_macro)); let bridge = Ident::new(sym::bridge, span); let client = Ident::new(sym::client, span); @@ -299,7 +303,7 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P { ProcMacro::Derive(m) => m.span, ProcMacro::Attr(m) | ProcMacro::Bang(m) => m.span, }; - let local_path = |cx: &ExtCtxt<'_>, name| cx.expr_path(cx.path(span, vec![name])); + let local_path = |cx: &ExtCtxt<'_>, ident| cx.expr_path(cx.path(span, vec![ident])); let proc_macro_ty_method_path = |cx: &ExtCtxt<'_>, method| { cx.expr_path(cx.path( span.with_ctxt(harness_span.ctxt()), @@ -323,7 +327,7 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P { .map(|&s| cx.expr_str(span, s)) .collect::>(), ), - local_path(cx, cd.function_name), + local_path(cx, cd.function_ident), ], ) } @@ -341,8 +345,8 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P { harness_span, proc_macro_ty_method_path(cx, ident), thin_vec![ - cx.expr_str(span, ca.function_name.name), - local_path(cx, ca.function_name), + cx.expr_str(span, ca.function_ident.name), + local_path(cx, ca.function_ident), ], ) } diff --git a/compiler/rustc_builtin_macros/src/standard_library_imports.rs b/compiler/rustc_builtin_macros/src/standard_library_imports.rs index ba63b185e0967..a1ee53b7ca21f 100644 --- a/compiler/rustc_builtin_macros/src/standard_library_imports.rs +++ b/compiler/rustc_builtin_macros/src/standard_library_imports.rs @@ -43,9 +43,8 @@ pub fn inject( let item = cx.item( span, - Ident::new(name, ident_span), thin_vec![cx.attr_word(sym::macro_use, span)], - ast::ItemKind::ExternCrate(None), + ast::ItemKind::ExternCrate(None, Ident::new(name, ident_span)), ); krate.items.insert(0, item); @@ -68,7 +67,6 @@ pub fn inject( // Inject the relevant crate's prelude. let use_item = cx.item( span, - Ident::empty(), thin_vec![cx.attr_word(sym::prelude_import, span)], ast::ItemKind::Use(ast::UseTree { prefix: cx.path(span, import_path), diff --git a/compiler/rustc_builtin_macros/src/test.rs b/compiler/rustc_builtin_macros/src/test.rs index 239f8657284d5..1cef4f9514cd7 100644 --- a/compiler/rustc_builtin_macros/src/test.rs +++ b/compiler/rustc_builtin_macros/src/test.rs @@ -51,21 +51,28 @@ pub(crate) fn expand_test_case( return vec![]; } }; - item = item.map(|mut item| { - let test_path_symbol = Symbol::intern(&item_path( - // skip the name of the root module - &ecx.current_expansion.module.mod_path[1..], - &item.ident, - )); - item.vis = ast::Visibility { - span: item.vis.span, - kind: ast::VisibilityKind::Public, - tokens: None, - }; - item.ident.span = item.ident.span.with_ctxt(sp.ctxt()); - item.attrs.push(ecx.attr_name_value_str(sym::rustc_test_marker, test_path_symbol, sp)); - item - }); + + // `#[test_case]` is valid on functions, consts, and statics. Only modify + // the item in those cases. + match &mut item.kind { + ast::ItemKind::Fn(box ast::Fn { ident, .. }) + | ast::ItemKind::Const(box ast::ConstItem { ident, .. }) + | ast::ItemKind::Static(box ast::StaticItem { ident, .. }) => { + ident.span = ident.span.with_ctxt(sp.ctxt()); + let test_path_symbol = Symbol::intern(&item_path( + // skip the name of the root module + &ecx.current_expansion.module.mod_path[1..], + ident, + )); + item.vis = ast::Visibility { + span: item.vis.span, + kind: ast::VisibilityKind::Public, + tokens: None, + }; + item.attrs.push(ecx.attr_name_value_str(sym::rustc_test_marker, test_path_symbol, sp)); + } + _ => {} + } let ret = if is_stmt { Annotatable::Stmt(P(ecx.stmt_item(item.span, item))) @@ -162,17 +169,17 @@ pub(crate) fn expand_test_or_bench( let ret_ty_sp = cx.with_def_site_ctxt(fn_.sig.decl.output.span()); let attr_sp = cx.with_def_site_ctxt(attr_sp); - let test_id = Ident::new(sym::test, attr_sp); + let test_ident = Ident::new(sym::test, attr_sp); // creates test::$name - let test_path = |name| cx.path(ret_ty_sp, vec![test_id, Ident::from_str_and_span(name, sp)]); + let test_path = |name| cx.path(ret_ty_sp, vec![test_ident, Ident::from_str_and_span(name, sp)]); // creates test::ShouldPanic::$name let should_panic_path = |name| { cx.path( sp, vec![ - test_id, + test_ident, Ident::from_str_and_span("ShouldPanic", sp), Ident::from_str_and_span(name, sp), ], @@ -184,7 +191,7 @@ pub(crate) fn expand_test_or_bench( cx.path( sp, vec![ - test_id, + test_ident, Ident::from_str_and_span("TestType", sp), Ident::from_str_and_span(name, sp), ], @@ -223,7 +230,7 @@ pub(crate) fn expand_test_or_bench( // super::$test_fn(b) cx.expr_call( ret_ty_sp, - cx.expr_path(cx.path(sp, vec![item.ident])), + cx.expr_path(cx.path(sp, vec![fn_.ident])), thin_vec![cx.expr_ident(sp, b)], ), ], @@ -249,7 +256,7 @@ pub(crate) fn expand_test_or_bench( // $test_fn() cx.expr_call( ret_ty_sp, - cx.expr_path(cx.path(sp, vec![item.ident])), + cx.expr_path(cx.path(sp, vec![fn_.ident])), ThinVec::new(), ), // ) ], @@ -262,15 +269,14 @@ pub(crate) fn expand_test_or_bench( let test_path_symbol = Symbol::intern(&item_path( // skip the name of the root module &cx.current_expansion.module.mod_path[1..], - &item.ident, + &fn_.ident, )); - let location_info = get_location_info(cx, &item); + let location_info = get_location_info(cx, &fn_); let mut test_const = cx.item( sp, - Ident::new(item.ident.name, sp), thin_vec![ // #[cfg(test)] cx.attr_nested_word(sym::cfg, sym::test, attr_sp), @@ -283,6 +289,7 @@ pub(crate) fn expand_test_or_bench( ast::ItemKind::Const( ast::ConstItem { defaultness: ast::Defaultness::Final, + ident: Ident::new(fn_.ident.name, sp), generics: ast::Generics::default(), ty: cx.ty(sp, ast::TyKind::Path(None, test_path("TestDescAndFn"))), define_opaque: None, @@ -380,7 +387,8 @@ pub(crate) fn expand_test_or_bench( }); // extern crate test - let test_extern = cx.item(sp, test_id, ast::AttrVec::new(), ast::ItemKind::ExternCrate(None)); + let test_extern = + cx.item(sp, ast::AttrVec::new(), ast::ItemKind::ExternCrate(None, test_ident)); debug!("synthetic test item:\n{}\n", pprust::item_to_string(&test_const)); @@ -434,8 +442,8 @@ fn not_testable_error(cx: &ExtCtxt<'_>, attr_sp: Span, item: Option<&ast::Item>) .emit(); } -fn get_location_info(cx: &ExtCtxt<'_>, item: &ast::Item) -> (Symbol, usize, usize, usize, usize) { - let span = item.ident.span; +fn get_location_info(cx: &ExtCtxt<'_>, fn_: &ast::Fn) -> (Symbol, usize, usize, usize, usize) { + let span = fn_.ident.span; let (source_file, lo_line, lo_col, hi_line, hi_col) = cx.sess.source_map().span_to_location_info(span); diff --git a/compiler/rustc_builtin_macros/src/test_harness.rs b/compiler/rustc_builtin_macros/src/test_harness.rs index 768b459ec5e30..aefbcc72e219f 100644 --- a/compiler/rustc_builtin_macros/src/test_harness.rs +++ b/compiler/rustc_builtin_macros/src/test_harness.rs @@ -134,27 +134,21 @@ impl<'a> MutVisitor for TestHarnessGenerator<'a> { if let Some(name) = get_test_name(&item) { debug!("this is a test item"); - let test = Test { span: item.span, ident: item.ident, name }; + // `unwrap` is ok because only functions, consts, and static should reach here. + let test = Test { span: item.span, ident: item.kind.ident().unwrap(), name }; self.tests.push(test); } // We don't want to recurse into anything other than mods, since // mods or tests inside of functions will break things if let ast::ItemKind::Mod( + _, _, ModKind::Loaded(.., ast::ModSpans { inner_span: span, .. }, _), ) = item.kind { let prev_tests = mem::take(&mut self.tests); - walk_item_kind( - &mut item.kind, - item.span, - item.id, - &mut item.ident, - &mut item.vis, - (), - self, - ); + walk_item_kind(&mut item.kind, item.span, item.id, &mut item.vis, (), self); self.add_test_cases(item.id, span, prev_tests); } else { // But in those cases, we emit a lint to warn the user of these missing tests. @@ -181,9 +175,9 @@ impl<'a> Visitor<'a> for InnerItemLinter<'_> { } fn entry_point_type(item: &ast::Item, at_root: bool) -> EntryPointType { - match item.kind { - ast::ItemKind::Fn(..) => { - rustc_ast::entry::entry_point_type(&item.attrs, at_root, Some(item.ident.name)) + match &item.kind { + ast::ItemKind::Fn(fn_) => { + rustc_ast::entry::entry_point_type(&item.attrs, at_root, Some(fn_.ident.name)) } _ => EntryPointType::None, } @@ -295,7 +289,7 @@ fn generate_test_harness( fn mk_main(cx: &mut TestCtxt<'_>) -> P { let sp = cx.def_site; let ecx = &cx.ext_cx; - let test_id = Ident::new(sym::test, sp); + let test_ident = Ident::new(sym::test, sp); let runner_name = match cx.panic_strategy { PanicStrategy::Unwind => "test_main_static", @@ -303,10 +297,9 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P { }; // test::test_main_static(...) - let mut test_runner = cx - .test_runner - .clone() - .unwrap_or_else(|| ecx.path(sp, vec![test_id, Ident::from_str_and_span(runner_name, sp)])); + let mut test_runner = cx.test_runner.clone().unwrap_or_else(|| { + ecx.path(sp, vec![test_ident, Ident::from_str_and_span(runner_name, sp)]) + }); test_runner.span = sp; @@ -317,7 +310,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P { // extern crate test let test_extern_stmt = ecx.stmt_item( sp, - ecx.item(sp, test_id, ast::AttrVec::new(), ast::ItemKind::ExternCrate(None)), + ecx.item(sp, ast::AttrVec::new(), ast::ItemKind::ExternCrate(None, test_ident)), ); // #[rustc_main] @@ -340,23 +333,25 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P { let decl = ecx.fn_decl(ThinVec::new(), ast::FnRetTy::Ty(main_ret_ty)); let sig = ast::FnSig { decl, header: ast::FnHeader::default(), span: sp }; let defaultness = ast::Defaultness::Final; + + // Honor the reexport_test_harness_main attribute + let main_ident = match cx.reexport_test_harness_main { + Some(sym) => Ident::new(sym, sp.with_ctxt(SyntaxContext::root())), + None => Ident::new(sym::main, sp), + }; + let main = ast::ItemKind::Fn(Box::new(ast::Fn { defaultness, sig, + ident: main_ident, generics: ast::Generics::default(), contract: None, body: Some(main_body), define_opaque: None, + eii_impl: ThinVec::new(), })); - // Honor the reexport_test_harness_main attribute - let main_id = match cx.reexport_test_harness_main { - Some(sym) => Ident::new(sym, sp.with_ctxt(SyntaxContext::root())), - None => Ident::new(sym::main, sp), - }; - let main = P(ast::Item { - ident: main_id, attrs: thin_vec![main_attr, coverage_attr, doc_hidden_attr], id: ast::DUMMY_NODE_ID, kind: main, diff --git a/compiler/rustc_codegen_cranelift/build_system/tests.rs b/compiler/rustc_codegen_cranelift/build_system/tests.rs index 122b541fa35f8..eec89c026b26a 100644 --- a/compiler/rustc_codegen_cranelift/build_system/tests.rs +++ b/compiler/rustc_codegen_cranelift/build_system/tests.rs @@ -99,6 +99,34 @@ const BASE_SYSROOT_SUITE: &[TestCase] = &[ runner.run_out_command("gen_block_iterate", &[]); }), TestCase::build_bin_and_run("aot.raw-dylib", "example/raw-dylib.rs", &[]), + TestCase::custom("test.sysroot", &|runner| { + apply_patches( + &runner.dirs, + "sysroot_tests", + &runner.stdlib_source.join("library"), + &SYSROOT_TESTS_SRC.to_path(&runner.dirs), + ); + + SYSROOT_TESTS.clean(&runner.dirs); + + let mut target_compiler = runner.target_compiler.clone(); + // coretests and alloctests produce a bunch of warnings. When running + // in rust's CI warnings are denied, so we have to override that here. + target_compiler.rustflags.push("--cap-lints=allow".to_owned()); + // The standard library may have been compiled with -Zrandomize-layout. + target_compiler.rustflags.extend(["--cfg".to_owned(), "randomized_layouts".to_owned()]); + + if runner.is_native { + let mut test_cmd = SYSROOT_TESTS.test(&target_compiler, &runner.dirs); + test_cmd.args(["-p", "coretests", "-p", "alloctests", "--tests", "--", "-q"]); + spawn_and_wait(test_cmd); + } else { + eprintln!("Cross-Compiling: Not running tests"); + let mut build_cmd = SYSROOT_TESTS.build(&target_compiler, &runner.dirs); + build_cmd.args(["-p", "coretests", "-p", "alloctests", "--tests"]); + spawn_and_wait(build_cmd); + } + }), ]; pub(crate) static RAND_REPO: GitRepo = GitRepo::github( @@ -146,27 +174,6 @@ const EXTENDED_SYSROOT_SUITE: &[TestCase] = &[ spawn_and_wait(build_cmd); } }), - TestCase::custom("test.sysroot", &|runner| { - apply_patches( - &runner.dirs, - "sysroot_tests", - &runner.stdlib_source.join("library"), - &SYSROOT_TESTS_SRC.to_path(&runner.dirs), - ); - - SYSROOT_TESTS.clean(&runner.dirs); - - if runner.is_native { - let mut test_cmd = SYSROOT_TESTS.test(&runner.target_compiler, &runner.dirs); - test_cmd.args(["-p", "coretests", "-p", "alloctests", "--", "-q"]); - spawn_and_wait(test_cmd); - } else { - eprintln!("Cross-Compiling: Not running tests"); - let mut build_cmd = SYSROOT_TESTS.build(&runner.target_compiler, &runner.dirs); - build_cmd.args(["-p", "coretests", "-p", "alloctests", "--tests"]); - spawn_and_wait(build_cmd); - } - }), TestCase::custom("test.regex", &|runner| { REGEX_REPO.patch(&runner.dirs); diff --git a/compiler/rustc_codegen_cranelift/build_system/utils.rs b/compiler/rustc_codegen_cranelift/build_system/utils.rs index c2114caf8692e..f239976845964 100644 --- a/compiler/rustc_codegen_cranelift/build_system/utils.rs +++ b/compiler/rustc_codegen_cranelift/build_system/utils.rs @@ -105,7 +105,11 @@ impl CargoProject { .arg(self.manifest_path(dirs)) .arg("--target-dir") .arg(self.target_dir(dirs)) - .arg("--locked"); + .arg("--locked") + // bootstrap sets both RUSTC and RUSTC_WRAPPER to the same wrapper. RUSTC is already + // respected by the rustc-clif wrapper, but RUSTC_WRAPPER will misinterpret rustc-clif + // as filename, so we need to unset it. + .env_remove("RUSTC_WRAPPER"); if dirs.frozen { cmd.arg("--frozen"); diff --git a/compiler/rustc_codegen_cranelift/config.txt b/compiler/rustc_codegen_cranelift/config.txt index 714414fe8d685..6ae4767adfdf5 100644 --- a/compiler/rustc_codegen_cranelift/config.txt +++ b/compiler/rustc_codegen_cranelift/config.txt @@ -32,9 +32,9 @@ aot.issue-59326 aot.neon aot.gen_block_iterate aot.raw-dylib +test.sysroot testsuite.extended_sysroot test.rust-random/rand -test.sysroot test.regex test.portable-simd diff --git a/compiler/rustc_codegen_cranelift/example/example.rs b/compiler/rustc_codegen_cranelift/example/example.rs index 1ef2aa5dd8ea4..aeb38331edb02 100644 --- a/compiler/rustc_codegen_cranelift/example/example.rs +++ b/compiler/rustc_codegen_cranelift/example/example.rs @@ -1,6 +1,6 @@ #![feature(no_core, unboxed_closures)] #![no_core] -#![allow(dead_code)] +#![allow(dead_code, unnecessary_transmutes)] extern crate mini_core; diff --git a/compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs b/compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs index 25bfe542d228a..de9a3d550eccd 100644 --- a/compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs +++ b/compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs @@ -6,16 +6,25 @@ #![feature(gen_blocks)] fn foo() -> impl Iterator { - gen { yield 42; for x in 3..6 { yield x } } + gen { + yield 42; + for x in 3..6 { + yield x + } + } } fn moved() -> impl Iterator { let mut x = "foo".to_string(); gen move { yield 42; - if x == "foo" { return } + if x == "foo" { + return; + } x.clear(); - for x in 3..6 { yield x } + for x in 3..6 { + yield x + } } } @@ -32,5 +41,4 @@ fn main() { let mut iter = moved(); assert_eq!(iter.next(), Some(42)); assert_eq!(iter.next(), None); - } diff --git a/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs b/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs index 09d5b73fd3d9d..93ca2e0e42188 100644 --- a/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs +++ b/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs @@ -1,13 +1,4 @@ -#![feature( - no_core, - lang_items, - never_type, - linkage, - extern_types, - naked_functions, - thread_local, - repr_simd -)] +#![feature(no_core, lang_items, never_type, linkage, extern_types, thread_local, repr_simd)] #![no_core] #![allow(dead_code, non_camel_case_types, internal_features)] @@ -387,11 +378,9 @@ global_asm! { } #[cfg(all(not(jit), target_arch = "x86_64"))] -#[naked] +#[unsafe(naked)] extern "C" fn naked_test() { - unsafe { - naked_asm!("ret"); - } + naked_asm!("ret") } #[repr(C)] diff --git a/compiler/rustc_codegen_cranelift/example/std_example.rs b/compiler/rustc_codegen_cranelift/example/std_example.rs index ffdc6a7d48491..2d9de2a5b8d6b 100644 --- a/compiler/rustc_codegen_cranelift/example/std_example.rs +++ b/compiler/rustc_codegen_cranelift/example/std_example.rs @@ -8,9 +8,6 @@ unboxed_closures )] #![allow(internal_features)] -// FIXME once abi_unsupported_vector_types is a hard error disable the foo test when the respective -// target feature is not enabled. -#![allow(abi_unsupported_vector_types)] #[cfg(target_arch = "x86_64")] use std::arch::x86_64::*; diff --git a/compiler/rustc_codegen_cranelift/rustfmt.toml b/compiler/rustc_codegen_cranelift/rustfmt.toml index f31fa9c76abc6..35c92663eb904 100644 --- a/compiler/rustc_codegen_cranelift/rustfmt.toml +++ b/compiler/rustc_codegen_cranelift/rustfmt.toml @@ -1,7 +1,3 @@ -ignore = [ - "example/gen_block_iterate.rs", # uses edition 2024 -] - # Matches rustfmt.toml of rustc style_edition = "2024" use_small_heuristics = "Max" diff --git a/compiler/rustc_codegen_cranelift/src/abi/mod.rs b/compiler/rustc_codegen_cranelift/src/abi/mod.rs index e8076ce77abcf..5f631405a9a4a 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/mod.rs @@ -441,7 +441,9 @@ pub(crate) fn codegen_terminator_call<'tcx>( Err(instance) => Some(instance), } } - InstanceKind::DropGlue(_, None) | ty::InstanceKind::AsyncDropGlueCtorShim(_, None) => { + // We don't need AsyncDropGlueCtorShim here because it is not `noop func`, + // it is `func returning noop future` + InstanceKind::DropGlue(_, None) => { // empty drop glue - a nop. let dest = target.expect("Non terminating drop_in_place_real???"); let ret_block = fx.get_block(dest); @@ -641,7 +643,7 @@ pub(crate) fn codegen_terminator_call<'tcx>( .flat_map(|arg_abi| arg_abi.get_abi_param(fx.tcx).into_iter()), ); - if fx.tcx.sess.target.is_like_osx && fx.tcx.sess.target.arch == "aarch64" { + if fx.tcx.sess.target.is_like_darwin && fx.tcx.sess.target.arch == "aarch64" { // Add any padding arguments needed for Apple AArch64. // There's no need to pad the argument list unless variadic arguments are actually being // passed. @@ -707,9 +709,8 @@ pub(crate) fn codegen_drop<'tcx>( let ty = drop_place.layout().ty; let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty); - if let ty::InstanceKind::DropGlue(_, None) | ty::InstanceKind::AsyncDropGlueCtorShim(_, None) = - drop_instance.def - { + // AsyncDropGlueCtorShim can't be here + if let ty::InstanceKind::DropGlue(_, None) = drop_instance.def { // we don't actually need to drop anything } else { match ty.kind() { diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs index adaa754491e56..524e0d9fe35ed 100644 --- a/compiler/rustc_codegen_cranelift/src/base.rs +++ b/compiler/rustc_codegen_cranelift/src/base.rs @@ -8,8 +8,6 @@ use rustc_ast::InlineAsmOptions; use rustc_codegen_ssa::base::is_call_from_compiler_builtins_to_upstream_monomorphization; use rustc_data_structures::profiling::SelfProfilerRef; use rustc_index::IndexVec; -use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; -use rustc_middle::mir::InlineAsmMacro; use rustc_middle::ty::TypeVisitableExt; use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv}; @@ -18,7 +16,6 @@ use rustc_middle::ty::print::with_no_trimmed_paths; use crate::constant::ConstantCx; use crate::debuginfo::{FunctionDebugContext, TypeDebugContext}; use crate::enable_verifier; -use crate::inline_asm::codegen_naked_asm; use crate::prelude::*; use crate::pretty_clif::CommentWriter; @@ -37,7 +34,7 @@ pub(crate) fn codegen_fn<'tcx>( cached_func: Function, module: &mut dyn Module, instance: Instance<'tcx>, -) -> Option { +) -> CodegenedFunction { debug_assert!(!instance.args.has_infer()); let symbol_name = tcx.symbol_name(instance).name.to_string(); @@ -54,38 +51,6 @@ pub(crate) fn codegen_fn<'tcx>( String::from_utf8_lossy(&buf).into_owned() }); - if tcx.codegen_fn_attrs(instance.def_id()).flags.contains(CodegenFnAttrFlags::NAKED) { - assert_eq!(mir.basic_blocks.len(), 1); - assert!(mir.basic_blocks[START_BLOCK].statements.is_empty()); - - match &mir.basic_blocks[START_BLOCK].terminator().kind { - TerminatorKind::InlineAsm { - asm_macro: InlineAsmMacro::NakedAsm, - template, - operands, - options, - line_spans: _, - targets: _, - unwind: _, - } => { - codegen_naked_asm( - tcx, - cx, - module, - instance, - mir.basic_blocks[START_BLOCK].terminator().source_info.span, - &symbol_name, - template, - operands, - *options, - ); - } - _ => unreachable!(), - } - - return None; - } - // Declare function let sig = get_function_sig(tcx, module.target_config().default_call_conv, instance); let func_id = module.declare_function(&symbol_name, Linkage::Local, &sig).unwrap(); @@ -166,7 +131,7 @@ pub(crate) fn codegen_fn<'tcx>( // Verify function verify_func(tcx, &clif_comments, &func); - Some(CodegenedFunction { symbol_name, func_id, func, clif_comments, func_debug_cx }) + CodegenedFunction { symbol_name, func_id, func, clif_comments, func_debug_cx } } pub(crate) fn compile_fn( @@ -565,7 +530,11 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { | TerminatorKind::CoroutineDrop => { bug!("shouldn't exist at codegen {:?}", bb_data.terminator()); } - TerminatorKind::Drop { place, target, unwind: _, replace: _ } => { + TerminatorKind::Drop { place, target, unwind: _, replace: _, drop, async_fut } => { + assert!( + async_fut.is_none() && drop.is_none(), + "Async Drop must be expanded or reset to sync before codegen" + ); let drop_place = codegen_place(fx, *place); crate::abi::codegen_drop(fx, source_info, drop_place, *target); } diff --git a/compiler/rustc_codegen_cranelift/src/constant.rs b/compiler/rustc_codegen_cranelift/src/constant.rs index bcc70f4567fbd..c8527c3a57dfe 100644 --- a/compiler/rustc_codegen_cranelift/src/constant.rs +++ b/compiler/rustc_codegen_cranelift/src/constant.rs @@ -391,7 +391,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant data.set_align(alloc.align.bytes()); if let Some(section_name) = section_name { - let (segment_name, section_name) = if tcx.sess.target.is_like_osx { + let (segment_name, section_name) = if tcx.sess.target.is_like_darwin { // See https://github.com/llvm/llvm-project/blob/main/llvm/lib/MC/MCSectionMachO.cpp let mut parts = section_name.as_str().split(','); let Some(segment_name) = parts.next() else { diff --git a/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs b/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs index bba6567774d7e..286e02b986b3c 100644 --- a/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs @@ -58,7 +58,7 @@ impl DebugContext { // FIXME this should be configurable // macOS doesn't seem to support DWARF > 3 // 5 version is required for md5 file hash - version: if tcx.sess.target.is_like_osx { + version: if tcx.sess.target.is_like_darwin { 3 } else { // FIXME change to version 5 once the gdb and lldb shipping with the latest debian diff --git a/compiler/rustc_codegen_cranelift/src/driver/aot.rs b/compiler/rustc_codegen_cranelift/src/driver/aot.rs index 444dc4412868f..5d07c94859f33 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/aot.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/aot.rs @@ -22,7 +22,10 @@ use rustc_data_structures::sync::{IntoDynSyncSend, par_map}; use rustc_metadata::EncodedMetadata; use rustc_metadata::fs::copy_to_stdout; use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; -use rustc_middle::mir::mono::{CodegenUnit, MonoItem}; +use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; +use rustc_middle::mir::mono::{ + CodegenUnit, Linkage as RLinkage, MonoItem, MonoItemData, Visibility, +}; use rustc_session::Session; use rustc_session::config::{DebugInfo, OutFileName, OutputFilenames, OutputType}; @@ -30,7 +33,7 @@ use crate::CodegenCx; use crate::base::CodegenedFunction; use crate::concurrency_limiter::{ConcurrencyLimiter, ConcurrencyLimiterToken}; use crate::debuginfo::TypeDebugContext; -use crate::global_asm::GlobalAsmConfig; +use crate::global_asm::{GlobalAsmConfig, GlobalAsmContext}; use crate::prelude::*; use crate::unwind_module::UnwindModule; @@ -169,8 +172,11 @@ fn produce_final_output_artifacts( if codegen_results.modules.len() == 1 { // 1) Only one codegen unit. In this case it's no difficulty // to copy `foo.0.x` to `foo.x`. - let module_name = Some(&codegen_results.modules[0].name[..]); - let path = crate_output.temp_path(output_type, module_name); + let path = crate_output.temp_path_for_cgu( + output_type, + &codegen_results.modules[0].name, + sess.invocation_temp.as_deref(), + ); let output = crate_output.path(output_type); if !output_type.is_text_output() && output.is_tty() { sess.dcx() @@ -183,22 +189,16 @@ fn produce_final_output_artifacts( ensure_removed(sess.dcx(), &path); } } else { - let extension = crate_output - .temp_path(output_type, None) - .extension() - .unwrap() - .to_str() - .unwrap() - .to_owned(); - if crate_output.outputs.contains_explicit_name(&output_type) { // 2) Multiple codegen units, with `--emit foo=some_name`. We have // no good solution for this case, so warn the user. - sess.dcx().emit_warn(ssa_errors::IgnoringEmitPath { extension }); + sess.dcx() + .emit_warn(ssa_errors::IgnoringEmitPath { extension: output_type.extension() }); } else if crate_output.single_output_file.is_some() { // 3) Multiple codegen units, with `-o some_name`. We have // no good solution for this case, so warn the user. - sess.dcx().emit_warn(ssa_errors::IgnoringOutput { extension }); + sess.dcx() + .emit_warn(ssa_errors::IgnoringOutput { extension: output_type.extension() }); } else { // 4) Multiple codegen units, but no explicit name. We // just leave the `foo.0.x` files in place. @@ -351,6 +351,7 @@ fn make_module(sess: &Session, name: String) -> UnwindModule { fn emit_cgu( output_filenames: &OutputFilenames, + invocation_temp: Option<&str>, prof: &SelfProfilerRef, name: String, module: UnwindModule, @@ -366,6 +367,7 @@ fn emit_cgu( let module_regular = emit_module( output_filenames, + invocation_temp, prof, product.object, ModuleKind::Regular, @@ -391,6 +393,7 @@ fn emit_cgu( fn emit_module( output_filenames: &OutputFilenames, + invocation_temp: Option<&str>, prof: &SelfProfilerRef, mut object: cranelift_object::object::write::Object<'_>, kind: ModuleKind, @@ -409,7 +412,7 @@ fn emit_module( object.set_section_data(comment_section, producer, 1); } - let tmp_file = output_filenames.temp_path(OutputType::Object, Some(&name)); + let tmp_file = output_filenames.temp_path_for_cgu(OutputType::Object, &name, invocation_temp); let file = match File::create(&tmp_file) { Ok(file) => file, Err(err) => return Err(format!("error creating object file: {}", err)), @@ -449,8 +452,11 @@ fn reuse_workproduct_for_cgu( cgu: &CodegenUnit<'_>, ) -> Result { let work_product = cgu.previous_work_product(tcx); - let obj_out_regular = - tcx.output_filenames(()).temp_path(OutputType::Object, Some(cgu.name().as_str())); + let obj_out_regular = tcx.output_filenames(()).temp_path_for_cgu( + OutputType::Object, + cgu.name().as_str(), + tcx.sess.invocation_temp.as_deref(), + ); let source_file_regular = rustc_incremental::in_incr_comp_dir_sess( &tcx.sess, &work_product.saved_files.get("o").expect("no saved object file in work product"), @@ -527,19 +533,35 @@ fn codegen_cgu_content( let mut type_dbg = TypeDebugContext::default(); super::predefine_mono_items(tcx, module, &mono_items); let mut codegened_functions = vec![]; - for (mono_item, _) in mono_items { + for (mono_item, item_data) in mono_items { match mono_item { - MonoItem::Fn(inst) => { - if let Some(codegened_function) = crate::base::codegen_fn( + MonoItem::Fn(instance) => { + if tcx.codegen_fn_attrs(instance.def_id()).flags.contains(CodegenFnAttrFlags::NAKED) + { + rustc_codegen_ssa::mir::naked_asm::codegen_naked_asm( + &mut GlobalAsmContext { tcx, global_asm: &mut cx.global_asm }, + instance, + MonoItemData { + linkage: RLinkage::External, + visibility: if item_data.linkage == RLinkage::Internal { + Visibility::Hidden + } else { + item_data.visibility + }, + ..item_data + }, + ); + continue; + } + let codegened_function = crate::base::codegen_fn( tcx, &mut cx, &mut type_dbg, Function::new(), module, - inst, - ) { - codegened_functions.push(codegened_function); - } + instance, + ); + codegened_functions.push(codegened_function); } MonoItem::Static(def_id) => { let data_id = crate::constant::codegen_static(tcx, module, def_id); @@ -548,7 +570,10 @@ fn codegen_cgu_content( } } MonoItem::GlobalAsm(item_id) => { - crate::global_asm::codegen_global_asm_item(tcx, &mut cx.global_asm, item_id); + rustc_codegen_ssa::base::codegen_global_asm( + &mut GlobalAsmContext { tcx, global_asm: &mut cx.global_asm }, + item_id, + ); } } } @@ -595,13 +620,19 @@ fn module_codegen( let global_asm_object_file = profiler.generic_activity_with_arg("compile assembly", &*cgu_name).run(|| { - crate::global_asm::compile_global_asm(&global_asm_config, &cgu_name, &cx.global_asm) + crate::global_asm::compile_global_asm( + &global_asm_config, + &cgu_name, + &cx.global_asm, + cx.invocation_temp.as_deref(), + ) })?; let codegen_result = profiler.generic_activity_with_arg("write object file", &*cgu_name).run(|| { emit_cgu( &global_asm_config.output_filenames, + cx.invocation_temp.as_deref(), &profiler, cgu_name, module, @@ -626,8 +657,11 @@ fn emit_metadata_module(tcx: TyCtxt<'_>, metadata: &EncodedMetadata) -> Compiled .as_str() .to_string(); - let tmp_file = - tcx.output_filenames(()).temp_path(OutputType::Metadata, Some(&metadata_cgu_name)); + let tmp_file = tcx.output_filenames(()).temp_path_for_cgu( + OutputType::Metadata, + &metadata_cgu_name, + tcx.sess.invocation_temp.as_deref(), + ); let symbol_name = rustc_middle::middle::exported_symbols::metadata_symbol_name(tcx); let obj = create_compressed_metadata_file(tcx.sess, metadata, &symbol_name); @@ -657,6 +691,7 @@ fn emit_allocator_module(tcx: TyCtxt<'_>) -> Option { match emit_module( tcx.output_filenames(()), + tcx.sess.invocation_temp.as_deref(), &tcx.sess.prof, product.object, ModuleKind::Allocator, @@ -728,26 +763,27 @@ pub(crate) fn run_aot( let concurrency_limiter = IntoDynSyncSend(ConcurrencyLimiter::new(todo_cgus.len())); - let modules = tcx.sess.time("codegen mono items", || { - let mut modules: Vec<_> = par_map(todo_cgus, |(_, cgu)| { - let dep_node = cgu.codegen_dep_node(tcx); - tcx.dep_graph - .with_task( + let modules: Vec<_> = + tcx.sess.time("codegen mono items", || { + let modules: Vec<_> = par_map(todo_cgus, |(_, cgu)| { + let dep_node = cgu.codegen_dep_node(tcx); + let (module, _) = tcx.dep_graph.with_task( dep_node, tcx, (global_asm_config.clone(), cgu.name(), concurrency_limiter.acquire(tcx.dcx())), module_codegen, Some(rustc_middle::dep_graph::hash_result), - ) - .0 - }); - modules.extend( - done_cgus + ); + IntoDynSyncSend(module) + }); + modules .into_iter() - .map(|(_, cgu)| OngoingModuleCodegen::Sync(reuse_workproduct_for_cgu(tcx, cgu))), - ); - modules - }); + .map(|module| module.0) + .chain(done_cgus.into_iter().map(|(_, cgu)| { + OngoingModuleCodegen::Sync(reuse_workproduct_for_cgu(tcx, cgu)) + })) + .collect() + }); let allocator_module = emit_allocator_module(tcx); diff --git a/compiler/rustc_codegen_cranelift/src/driver/jit.rs b/compiler/rustc_codegen_cranelift/src/driver/jit.rs index 41f8bb9161ca2..e368cf4386d01 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/jit.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/jit.rs @@ -126,6 +126,11 @@ pub(crate) fn codegen_and_compile_fn<'tcx>( module: &mut dyn Module, instance: Instance<'tcx>, ) { + if tcx.codegen_fn_attrs(instance.def_id()).flags.contains(CodegenFnAttrFlags::NAKED) { + tcx.dcx() + .span_fatal(tcx.def_span(instance.def_id()), "Naked asm is not supported in JIT mode"); + } + cranelift_codegen::timing::set_thread_profiler(Box::new(super::MeasuremeProfiler( tcx.prof.clone(), ))); @@ -135,16 +140,15 @@ pub(crate) fn codegen_and_compile_fn<'tcx>( crate::PrintOnPanic(|| format!("{:?} {}", instance, tcx.symbol_name(instance).name)); let cached_func = std::mem::replace(&mut cached_context.func, Function::new()); - if let Some(codegened_func) = crate::base::codegen_fn( + let codegened_func = crate::base::codegen_fn( tcx, cx, &mut TypeDebugContext::default(), cached_func, module, instance, - ) { - crate::base::compile_fn(cx, &tcx.prof, cached_context, module, codegened_func); - } + ); + crate::base::compile_fn(cx, &tcx.prof, cached_context, module, codegened_func); }); } diff --git a/compiler/rustc_codegen_cranelift/src/global_asm.rs b/compiler/rustc_codegen_cranelift/src/global_asm.rs index 9ea92c300f898..203b443269fa7 100644 --- a/compiler/rustc_codegen_cranelift/src/global_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/global_asm.rs @@ -7,102 +7,139 @@ use std::process::{Command, Stdio}; use std::sync::Arc; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; -use rustc_hir::{InlineAsmOperand, ItemId}; -use rustc_middle::mir::interpret::ErrorHandled; +use rustc_codegen_ssa::traits::{AsmCodegenMethods, GlobalAsmOperandRef}; +use rustc_middle::ty::TyCtxt; +use rustc_middle::ty::layout::{ + FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasTyCtxt, HasTypingEnv, LayoutError, LayoutOfHelpers, +}; use rustc_session::config::{OutputFilenames, OutputType}; use rustc_target::asm::InlineAsmArch; use crate::prelude::*; -pub(crate) fn codegen_global_asm_item(tcx: TyCtxt<'_>, global_asm: &mut String, item_id: ItemId) { - let item = tcx.hir_item(item_id); - if let rustc_hir::ItemKind::GlobalAsm { asm, .. } = item.kind { - let is_x86 = - matches!(tcx.sess.asm_arch.unwrap(), InlineAsmArch::X86 | InlineAsmArch::X86_64); - - if is_x86 { - if !asm.options.contains(InlineAsmOptions::ATT_SYNTAX) { - global_asm.push_str("\n.intel_syntax noprefix\n"); - } else { - global_asm.push_str("\n.att_syntax\n"); - } +pub(crate) struct GlobalAsmContext<'a, 'tcx> { + pub tcx: TyCtxt<'tcx>, + pub global_asm: &'a mut String, +} + +impl<'tcx> AsmCodegenMethods<'tcx> for GlobalAsmContext<'_, 'tcx> { + fn codegen_global_asm( + &mut self, + template: &[InlineAsmTemplatePiece], + operands: &[GlobalAsmOperandRef<'tcx>], + options: InlineAsmOptions, + _line_spans: &[Span], + ) { + codegen_global_asm_inner(self.tcx, self.global_asm, template, operands, options); + } + + fn mangled_name(&self, instance: Instance<'tcx>) -> String { + let symbol_name = self.tcx.symbol_name(instance).name.to_owned(); + if self.tcx.sess.target.is_like_darwin { format!("_{symbol_name}") } else { symbol_name } + } +} + +impl<'tcx> LayoutOfHelpers<'tcx> for GlobalAsmContext<'_, 'tcx> { + #[inline] + fn handle_layout_err(&self, err: LayoutError<'tcx>, span: Span, ty: Ty<'tcx>) -> ! { + if let LayoutError::SizeOverflow(_) | LayoutError::ReferencesError(_) = err { + self.tcx.sess.dcx().span_fatal(span, err.to_string()) + } else { + self.tcx + .sess + .dcx() + .span_fatal(span, format!("failed to get layout for `{}`: {}", ty, err)) } - for piece in asm.template { - match *piece { - InlineAsmTemplatePiece::String(ref s) => global_asm.push_str(s), - InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: op_sp } => { - match asm.operands[operand_idx].0 { - InlineAsmOperand::Const { ref anon_const } => { - match tcx.const_eval_poly(anon_const.def_id.to_def_id()) { - Ok(const_value) => { - let ty = tcx - .typeck_body(anon_const.body) - .node_type(anon_const.hir_id); - let string = rustc_codegen_ssa::common::asm_const_to_str( - tcx, - op_sp, - const_value, - FullyMonomorphizedLayoutCx(tcx).layout_of(ty), - ); - global_asm.push_str(&string); - } - Err(ErrorHandled::Reported { .. }) => { - // An error has already been reported and compilation is - // guaranteed to fail if execution hits this path. - } - Err(ErrorHandled::TooGeneric(_)) => { - span_bug!(op_sp, "asm const cannot be resolved; too generic"); - } - } - } - InlineAsmOperand::SymFn { expr } => { - if cfg!(not(feature = "inline_asm_sym")) { - tcx.dcx().span_err( - item.span, - "asm! and global_asm! sym operands are not yet supported", - ); - } - - let ty = tcx.typeck(item_id.owner_id).expr_ty(expr); - let instance = match ty.kind() { - &ty::FnDef(def_id, args) => Instance::new(def_id, args), - _ => span_bug!(op_sp, "asm sym is not a function"), - }; - let symbol = tcx.symbol_name(instance); - // FIXME handle the case where the function was made private to the - // current codegen unit - global_asm.push_str(symbol.name); - } - InlineAsmOperand::SymStatic { path: _, def_id } => { - if cfg!(not(feature = "inline_asm_sym")) { - tcx.dcx().span_err( - item.span, - "asm! and global_asm! sym operands are not yet supported", - ); - } - - let instance = Instance::mono(tcx, def_id); - let symbol = tcx.symbol_name(instance); - global_asm.push_str(symbol.name); + } +} + +impl<'tcx> FnAbiOfHelpers<'tcx> for GlobalAsmContext<'_, 'tcx> { + #[inline] + fn handle_fn_abi_err( + &self, + err: FnAbiError<'tcx>, + span: Span, + fn_abi_request: FnAbiRequest<'tcx>, + ) -> ! { + FullyMonomorphizedLayoutCx(self.tcx).handle_fn_abi_err(err, span, fn_abi_request) + } +} + +impl<'tcx> HasTyCtxt<'tcx> for GlobalAsmContext<'_, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { + self.tcx + } +} + +impl<'tcx> rustc_abi::HasDataLayout for GlobalAsmContext<'_, 'tcx> { + fn data_layout(&self) -> &rustc_abi::TargetDataLayout { + &self.tcx.data_layout + } +} + +impl<'tcx> HasTypingEnv<'tcx> for GlobalAsmContext<'_, 'tcx> { + fn typing_env(&self) -> ty::TypingEnv<'tcx> { + ty::TypingEnv::fully_monomorphized() + } +} + +fn codegen_global_asm_inner<'tcx>( + tcx: TyCtxt<'tcx>, + global_asm: &mut String, + template: &[InlineAsmTemplatePiece], + operands: &[GlobalAsmOperandRef<'tcx>], + options: InlineAsmOptions, +) { + let is_x86 = matches!(tcx.sess.asm_arch.unwrap(), InlineAsmArch::X86 | InlineAsmArch::X86_64); + + if is_x86 { + if !options.contains(InlineAsmOptions::ATT_SYNTAX) { + global_asm.push_str("\n.intel_syntax noprefix\n"); + } else { + global_asm.push_str("\n.att_syntax\n"); + } + } + for piece in template { + match *piece { + InlineAsmTemplatePiece::String(ref s) => global_asm.push_str(s), + InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span } => { + match operands[operand_idx] { + GlobalAsmOperandRef::Const { ref string } => { + global_asm.push_str(string); + } + GlobalAsmOperandRef::SymFn { instance } => { + if cfg!(not(feature = "inline_asm_sym")) { + tcx.dcx().span_err( + span, + "asm! and global_asm! sym operands are not yet supported", + ); } - InlineAsmOperand::In { .. } - | InlineAsmOperand::Out { .. } - | InlineAsmOperand::InOut { .. } - | InlineAsmOperand::SplitInOut { .. } - | InlineAsmOperand::Label { .. } => { - span_bug!(op_sp, "invalid operand type for global_asm!") + + let symbol = tcx.symbol_name(instance); + // FIXME handle the case where the function was made private to the + // current codegen unit + global_asm.push_str(symbol.name); + } + GlobalAsmOperandRef::SymStatic { def_id } => { + if cfg!(not(feature = "inline_asm_sym")) { + tcx.dcx().span_err( + span, + "asm! and global_asm! sym operands are not yet supported", + ); } + + let instance = Instance::mono(tcx, def_id); + let symbol = tcx.symbol_name(instance); + global_asm.push_str(symbol.name); } } } } + } - global_asm.push('\n'); - if is_x86 { - global_asm.push_str(".att_syntax\n\n"); - } - } else { - bug!("Expected GlobalAsm found {:?}", item); + global_asm.push('\n'); + if is_x86 { + global_asm.push_str(".att_syntax\n\n"); } } @@ -132,6 +169,7 @@ pub(crate) fn compile_global_asm( config: &GlobalAsmConfig, cgu_name: &str, global_asm: &str, + invocation_temp: Option<&str>, ) -> Result, String> { if global_asm.is_empty() { return Ok(None); @@ -146,7 +184,7 @@ pub(crate) fn compile_global_asm( global_asm.push('\n'); let global_asm_object_file = add_file_stem_postfix( - config.output_filenames.temp_path(OutputType::Object, Some(cgu_name)), + config.output_filenames.temp_path_for_cgu(OutputType::Object, cgu_name, invocation_temp), ".asm", ); diff --git a/compiler/rustc_codegen_cranelift/src/inline_asm.rs b/compiler/rustc_codegen_cranelift/src/inline_asm.rs index fbc33a642853c..afee50955497c 100644 --- a/compiler/rustc_codegen_cranelift/src/inline_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/inline_asm.rs @@ -161,7 +161,6 @@ pub(crate) fn codegen_inline_asm_inner<'tcx>( stack_slots_input: Vec::new(), stack_slots_output: Vec::new(), stack_slot_size: Size::from_bytes(0), - is_naked: false, }; asm_gen.allocate_registers(); asm_gen.allocate_stack_slots(); @@ -201,114 +200,6 @@ pub(crate) fn codegen_inline_asm_inner<'tcx>( call_inline_asm(fx, &asm_name, asm_gen.stack_slot_size, inputs, outputs); } -pub(crate) fn codegen_naked_asm<'tcx>( - tcx: TyCtxt<'tcx>, - cx: &mut crate::CodegenCx, - module: &mut dyn Module, - instance: Instance<'tcx>, - span: Span, - symbol_name: &str, - template: &[InlineAsmTemplatePiece], - operands: &[InlineAsmOperand<'tcx>], - options: InlineAsmOptions, -) { - // FIXME add .eh_frame unwind info directives - - let operands = operands - .iter() - .map(|operand| match *operand { - InlineAsmOperand::In { .. } - | InlineAsmOperand::Out { .. } - | InlineAsmOperand::InOut { .. } => { - span_bug!(span, "invalid operand type for naked asm") - } - InlineAsmOperand::Const { ref value } => { - let cv = instance.instantiate_mir_and_normalize_erasing_regions( - tcx, - ty::TypingEnv::fully_monomorphized(), - ty::EarlyBinder::bind(value.const_), - ); - let const_value = cv - .eval(tcx, ty::TypingEnv::fully_monomorphized(), value.span) - .expect("erroneous constant missed by mono item collection"); - - let value = rustc_codegen_ssa::common::asm_const_to_str( - tcx, - span, - const_value, - FullyMonomorphizedLayoutCx(tcx).layout_of(cv.ty()), - ); - CInlineAsmOperand::Const { value } - } - InlineAsmOperand::SymFn { ref value } => { - if cfg!(not(feature = "inline_asm_sym")) { - tcx.dcx() - .span_err(span, "asm! and global_asm! sym operands are not yet supported"); - } - - let const_ = instance.instantiate_mir_and_normalize_erasing_regions( - tcx, - ty::TypingEnv::fully_monomorphized(), - ty::EarlyBinder::bind(value.const_), - ); - if let ty::FnDef(def_id, args) = *const_.ty().kind() { - let instance = ty::Instance::resolve_for_fn_ptr( - tcx, - ty::TypingEnv::fully_monomorphized(), - def_id, - args, - ) - .unwrap(); - let symbol = tcx.symbol_name(instance); - - // Pass a wrapper rather than the function itself as the function itself may not - // be exported from the main codegen unit and may thus be unreachable from the - // object file created by an external assembler. - let wrapper_name = format!( - "__inline_asm_{}_wrapper_n{}", - cx.cgu_name.as_str().replace('.', "__").replace('-', "_"), - cx.inline_asm_index - ); - cx.inline_asm_index += 1; - let sig = - get_function_sig(tcx, module.target_config().default_call_conv, instance); - create_wrapper_function(module, sig, &wrapper_name, symbol.name); - - CInlineAsmOperand::Symbol { symbol: wrapper_name } - } else { - span_bug!(span, "invalid type for asm sym (fn)"); - } - } - InlineAsmOperand::SymStatic { def_id } => { - assert!(tcx.is_static(def_id)); - let instance = Instance::mono(tcx, def_id); - CInlineAsmOperand::Symbol { symbol: tcx.symbol_name(instance).name.to_owned() } - } - InlineAsmOperand::Label { .. } => { - span_bug!(span, "asm! label operands are not yet supported"); - } - }) - .collect::>(); - - let asm_gen = InlineAssemblyGenerator { - tcx, - arch: tcx.sess.asm_arch.unwrap(), - enclosing_def_id: instance.def_id(), - template, - operands: &operands, - options, - registers: Vec::new(), - stack_slots_clobber: Vec::new(), - stack_slots_input: Vec::new(), - stack_slots_output: Vec::new(), - stack_slot_size: Size::from_bytes(0), - is_naked: true, - }; - - let generated_asm = asm_gen.generate_asm_wrapper(symbol_name); - cx.global_asm.push_str(&generated_asm); -} - struct InlineAssemblyGenerator<'a, 'tcx> { tcx: TyCtxt<'tcx>, arch: InlineAsmArch, @@ -321,13 +212,10 @@ struct InlineAssemblyGenerator<'a, 'tcx> { stack_slots_input: Vec>, stack_slots_output: Vec>, stack_slot_size: Size, - is_naked: bool, } impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { fn allocate_registers(&mut self) { - assert!(!self.is_naked); - let sess = self.tcx.sess; let map = allocatable_registers( self.arch, @@ -451,8 +339,6 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { } fn allocate_stack_slots(&mut self) { - assert!(!self.is_naked); - let mut slot_size = Size::from_bytes(0); let mut slots_clobber = vec![None; self.operands.len()]; let mut slots_input = vec![None; self.operands.len()]; @@ -582,32 +468,31 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { if is_x86 { generated_asm.push_str(".intel_syntax noprefix\n"); } - if !self.is_naked { - Self::prologue(&mut generated_asm, self.arch); - - // Save clobbered registers - if !self.options.contains(InlineAsmOptions::NORETURN) { - for (reg, slot) in self - .registers - .iter() - .zip(self.stack_slots_clobber.iter().copied()) - .filter_map(|(r, s)| r.zip(s)) - { - Self::save_register(&mut generated_asm, self.arch, reg, slot); - } - } - // Write input registers + Self::prologue(&mut generated_asm, self.arch); + + // Save clobbered registers + if !self.options.contains(InlineAsmOptions::NORETURN) { for (reg, slot) in self .registers .iter() - .zip(self.stack_slots_input.iter().copied()) + .zip(self.stack_slots_clobber.iter().copied()) .filter_map(|(r, s)| r.zip(s)) { - Self::restore_register(&mut generated_asm, self.arch, reg, slot); + Self::save_register(&mut generated_asm, self.arch, reg, slot); } } + // Write input registers + for (reg, slot) in self + .registers + .iter() + .zip(self.stack_slots_input.iter().copied()) + .filter_map(|(r, s)| r.zip(s)) + { + Self::restore_register(&mut generated_asm, self.arch, reg, slot); + } + if is_x86 && self.options.contains(InlineAsmOptions::ATT_SYNTAX) { generated_asm.push_str(".att_syntax\n"); } @@ -701,32 +586,30 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { generated_asm.push_str(".intel_syntax noprefix\n"); } - if !self.is_naked { - if !self.options.contains(InlineAsmOptions::NORETURN) { - // Read output registers - for (reg, slot) in self - .registers - .iter() - .zip(self.stack_slots_output.iter().copied()) - .filter_map(|(r, s)| r.zip(s)) - { - Self::save_register(&mut generated_asm, self.arch, reg, slot); - } - - // Restore clobbered registers - for (reg, slot) in self - .registers - .iter() - .zip(self.stack_slots_clobber.iter().copied()) - .filter_map(|(r, s)| r.zip(s)) - { - Self::restore_register(&mut generated_asm, self.arch, reg, slot); - } + if !self.options.contains(InlineAsmOptions::NORETURN) { + // Read output registers + for (reg, slot) in self + .registers + .iter() + .zip(self.stack_slots_output.iter().copied()) + .filter_map(|(r, s)| r.zip(s)) + { + Self::save_register(&mut generated_asm, self.arch, reg, slot); + } - Self::epilogue(&mut generated_asm, self.arch); - } else { - Self::epilogue_noreturn(&mut generated_asm, self.arch); + // Restore clobbered registers + for (reg, slot) in self + .registers + .iter() + .zip(self.stack_slots_clobber.iter().copied()) + .filter_map(|(r, s)| r.zip(s)) + { + Self::restore_register(&mut generated_asm, self.arch, reg, slot); } + + Self::epilogue(&mut generated_asm, self.arch); + } else { + Self::epilogue_noreturn(&mut generated_asm, self.arch); } if is_x86 { diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs index 75f3a3c19724f..e866b8962551a 100644 --- a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs @@ -1,5 +1,4 @@ -//! Codegen of intrinsics. This includes `extern "rust-intrinsic"`, -//! functions marked with the `#[rustc_intrinsic]` attribute +//! Codegen of intrinsics. This includes functions marked with the `#[rustc_intrinsic]` attribute //! and LLVM intrinsics that have symbol names starting with `llvm.`. macro_rules! intrinsic_args { @@ -1283,7 +1282,7 @@ fn codegen_regular_intrinsic_call<'tcx>( intrinsic.name, ); } - return Err(Instance::new(instance.def_id(), instance.args)); + return Err(Instance::new_raw(instance.def_id(), instance.args)); } } diff --git a/compiler/rustc_codegen_cranelift/src/lib.rs b/compiler/rustc_codegen_cranelift/src/lib.rs index e7afaff3b428f..ab09a6f8b38e1 100644 --- a/compiler/rustc_codegen_cranelift/src/lib.rs +++ b/compiler/rustc_codegen_cranelift/src/lib.rs @@ -41,8 +41,8 @@ use std::sync::Arc; use cranelift_codegen::isa::TargetIsa; use cranelift_codegen::settings::{self, Configurable}; -use rustc_codegen_ssa::CodegenResults; use rustc_codegen_ssa::traits::CodegenBackend; +use rustc_codegen_ssa::{CodegenResults, TargetConfig}; use rustc_metadata::EncodedMetadata; use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; use rustc_session::Session; @@ -124,6 +124,7 @@ impl String> Drop for PrintOnPanic { /// inside a single codegen unit with the exception of the Cranelift [`Module`](cranelift_module::Module). struct CodegenCx { output_filenames: Arc, + invocation_temp: Option, should_write_ir: bool, global_asm: String, inline_asm_index: usize, @@ -142,6 +143,7 @@ impl CodegenCx { }; CodegenCx { output_filenames: tcx.output_filenames(()).clone(), + invocation_temp: tcx.sess.invocation_temp.clone(), should_write_ir: crate::pretty_clif::should_write_ir(tcx), global_asm: String::new(), inline_asm_index: 0, @@ -176,7 +178,7 @@ impl CodegenBackend for CraneliftCodegenBackend { } } - fn target_features_cfg(&self, sess: &Session) -> (Vec, Vec) { + fn target_config(&self, sess: &Session) -> TargetConfig { // FIXME return the actually used target features. this is necessary for #[cfg(target_feature)] let target_features = if sess.target.arch == "x86_64" && sess.target.os != "none" { // x86_64 mandates SSE2 support and rustc requires the x87 feature to be enabled @@ -195,7 +197,16 @@ impl CodegenBackend for CraneliftCodegenBackend { }; // FIXME do `unstable_target_features` properly let unstable_target_features = target_features.clone(); - (target_features, unstable_target_features) + + TargetConfig { + target_features, + unstable_target_features, + // Cranelift does not yet support f16 or f128 + has_reliable_f16: false, + has_reliable_f16_math: false, + has_reliable_f128: false, + has_reliable_f128_math: false, + } } fn print_version(&self) { diff --git a/compiler/rustc_codegen_cranelift/src/main_shim.rs b/compiler/rustc_codegen_cranelift/src/main_shim.rs index 6d5df2b00437b..6eef97c14dd28 100644 --- a/compiler/rustc_codegen_cranelift/src/main_shim.rs +++ b/compiler/rustc_codegen_cranelift/src/main_shim.rs @@ -1,6 +1,6 @@ use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext}; use rustc_hir::LangItem; -use rustc_middle::ty::{AssocKind, GenericArg}; +use rustc_middle::ty::{AssocTag, GenericArg}; use rustc_session::config::EntryFnType; use rustc_span::{DUMMY_SP, Ident}; @@ -104,10 +104,10 @@ pub(crate) fn maybe_create_entry_wrapper( let termination_trait = tcx.require_lang_item(LangItem::Termination, None); let report = tcx .associated_items(termination_trait) - .find_by_name_and_kind( + .find_by_ident_and_kind( tcx, Ident::from_str("report"), - AssocKind::Fn, + AssocTag::Fn, termination_trait, ) .unwrap(); diff --git a/compiler/rustc_codegen_gcc/.github/workflows/ci.yml b/compiler/rustc_codegen_gcc/.github/workflows/ci.yml index f96912e6b7a83..ef024258ffc86 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/ci.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/ci.yml @@ -1,8 +1,10 @@ name: CI on: - - push - - pull_request + push: + branches: + - master + pull_request: permissions: contents: read @@ -121,3 +123,22 @@ jobs: run: | cd build_system cargo test + + # Summary job for the merge queue. + # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB! + success: + needs: [build, duplicates, build_system] + # We need to ensure this job does *not* get skipped if its dependencies fail, + # because a skipped job is considered a success by GitHub. So we have to + # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run + # when the workflow is canceled manually. + if: ${{ !cancelled() }} + runs-on: ubuntu-latest + steps: + # Manually check the status of all dependencies. `if: failure()` does not work. + - name: Conclusion + run: | + # Print the dependent jobs to see them in the CI log + jq -C <<< '${{ toJson(needs) }}' + # Check if all jobs that we depend on (in the needs array) were successful. + jq --exit-status 'all(.result == "success")' <<< '${{ toJson(needs) }}' diff --git a/compiler/rustc_codegen_gcc/.github/workflows/failures.yml b/compiler/rustc_codegen_gcc/.github/workflows/failures.yml index d080bbfe91fe6..bc42eb1468ea2 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/failures.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/failures.yml @@ -2,7 +2,10 @@ name: Failures on: - - pull_request + push: + branches: + - master + pull_request: permissions: contents: read @@ -108,3 +111,22 @@ jobs: echo "Error: 'the compiler unexpectedly panicked' found in output logs. CI Error!!" exit 1 fi + + # Summary job for the merge queue. + # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB! + success_failures: + needs: [build] + # We need to ensure this job does *not* get skipped if its dependencies fail, + # because a skipped job is considered a success by GitHub. So we have to + # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run + # when the workflow is canceled manually. + if: ${{ !cancelled() }} + runs-on: ubuntu-latest + steps: + # Manually check the status of all dependencies. `if: failure()` does not work. + - name: Conclusion + run: | + # Print the dependent jobs to see them in the CI log + jq -C <<< '${{ toJson(needs) }}' + # Check if all jobs that we depend on (in the needs array) were successful. + jq --exit-status 'all(.result == "success")' <<< '${{ toJson(needs) }}' diff --git a/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml b/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml index bb9e020dc6a4c..da9a1506855c3 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml @@ -1,8 +1,10 @@ name: CI libgccjit 12 on: - - push - - pull_request + push: + branches: + - master + pull_request: permissions: contents: read @@ -85,3 +87,22 @@ jobs: #- name: Run tests #run: | #./y.sh test --release --clean --build-sysroot ${{ matrix.commands }} --no-default-features + + # Summary job for the merge queue. + # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB! + success_gcc12: + needs: [build] + # We need to ensure this job does *not* get skipped if its dependencies fail, + # because a skipped job is considered a success by GitHub. So we have to + # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run + # when the workflow is canceled manually. + if: ${{ !cancelled() }} + runs-on: ubuntu-latest + steps: + # Manually check the status of all dependencies. `if: failure()` does not work. + - name: Conclusion + run: | + # Print the dependent jobs to see them in the CI log + jq -C <<< '${{ toJson(needs) }}' + # Check if all jobs that we depend on (in the needs array) were successful. + jq --exit-status 'all(.result == "success")' <<< '${{ toJson(needs) }}' diff --git a/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml b/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml index ed1fc02bd9131..21731f7087e2c 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml @@ -3,8 +3,10 @@ name: m68k CI on: - - push - - pull_request + push: + branches: + - master + pull_request: permissions: contents: read @@ -105,3 +107,22 @@ jobs: - name: Run tests run: | ./y.sh test --release --clean --build-sysroot --sysroot-features compiler_builtins/no-f16-f128 ${{ matrix.commands }} + + # Summary job for the merge queue. + # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB! + success_m68k: + needs: [build] + # We need to ensure this job does *not* get skipped if its dependencies fail, + # because a skipped job is considered a success by GitHub. So we have to + # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run + # when the workflow is canceled manually. + if: ${{ !cancelled() }} + runs-on: ubuntu-latest + steps: + # Manually check the status of all dependencies. `if: failure()` does not work. + - name: Conclusion + run: | + # Print the dependent jobs to see them in the CI log + jq -C <<< '${{ toJson(needs) }}' + # Check if all jobs that we depend on (in the needs array) were successful. + jq --exit-status 'all(.result == "success")' <<< '${{ toJson(needs) }}' diff --git a/compiler/rustc_codegen_gcc/.github/workflows/release.yml b/compiler/rustc_codegen_gcc/.github/workflows/release.yml index 886ce90b4713e..47a40286554e4 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/release.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/release.yml @@ -1,8 +1,10 @@ name: CI with sysroot compiled in release mode on: - - push - - pull_request + push: + branches: + - master + pull_request: permissions: contents: read @@ -82,3 +84,22 @@ jobs: echo "Test is done with LTO enabled, hence inlining should occur across crates" exit 1 fi + + # Summary job for the merge queue. + # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB! + success_release: + needs: [build] + # We need to ensure this job does *not* get skipped if its dependencies fail, + # because a skipped job is considered a success by GitHub. So we have to + # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run + # when the workflow is canceled manually. + if: ${{ !cancelled() }} + runs-on: ubuntu-latest + steps: + # Manually check the status of all dependencies. `if: failure()` does not work. + - name: Conclusion + run: | + # Print the dependent jobs to see them in the CI log + jq -C <<< '${{ toJson(needs) }}' + # Check if all jobs that we depend on (in the needs array) were successful. + jq --exit-status 'all(.result == "success")' <<< '${{ toJson(needs) }}' diff --git a/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml b/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml index d5ae6144496f1..4b9f48e7b1835 100644 --- a/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml +++ b/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml @@ -1,8 +1,10 @@ name: stdarch tests with sysroot compiled in release mode on: - - push - - pull_request + push: + branches: + - master + pull_request: permissions: contents: read @@ -102,3 +104,22 @@ jobs: # TODO: remove --skip test_mm512_stream_ps when stdarch is updated in rustc. # TODO: remove --skip test_tile_ when it's implemented. STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features --cfg stdarch_intel_sde" ./y.sh cargo test --manifest-path build/build_sysroot/sysroot_src/library/stdarch/Cargo.toml -- --skip rtm --skip tbm --skip sse4a --skip test_mm512_stream_ps --skip test_tile_ + + # Summary job for the merge queue. + # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB! + success_stdarch: + needs: [build] + # We need to ensure this job does *not* get skipped if its dependencies fail, + # because a skipped job is considered a success by GitHub. So we have to + # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run + # when the workflow is canceled manually. + if: ${{ !cancelled() }} + runs-on: ubuntu-latest + steps: + # Manually check the status of all dependencies. `if: failure()` does not work. + - name: Conclusion + run: | + # Print the dependent jobs to see them in the CI log + jq -C <<< '${{ toJson(needs) }}' + # Check if all jobs that we depend on (in the needs array) were successful. + jq --exit-status 'all(.result == "success")' <<< '${{ toJson(needs) }}' diff --git a/compiler/rustc_codegen_gcc/Cargo.lock b/compiler/rustc_codegen_gcc/Cargo.lock index 636e75b94a3f2..832603aa79252 100644 --- a/compiler/rustc_codegen_gcc/Cargo.lock +++ b/compiler/rustc_codegen_gcc/Cargo.lock @@ -56,18 +56,18 @@ dependencies = [ [[package]] name = "gccjit" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72fd91f4adbf02b53cfc73c97bc33c5f253009043f30c56a5ec08dd5c8094dc8" +checksum = "2895ddec764de7ac76fe6c056050c4801a80109c066f177a00a9cc8dee02b29b" dependencies = [ "gccjit_sys", ] [[package]] name = "gccjit_sys" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fb7b8f48a75e2cfe78c3d9a980b32771c34ffd12d196021ab3f98c49fbd2f0d" +checksum = "ac133db68db8a6a8b2c51ef4b18d8ea16682d5814c4641272fe37bbbc223d5f3" dependencies = [ "libc", ] diff --git a/compiler/rustc_codegen_gcc/Cargo.toml b/compiler/rustc_codegen_gcc/Cargo.toml index 63d37358561e6..b50f2a626d57c 100644 --- a/compiler/rustc_codegen_gcc/Cargo.toml +++ b/compiler/rustc_codegen_gcc/Cargo.toml @@ -22,7 +22,7 @@ master = ["gccjit/master"] default = ["master"] [dependencies] -gccjit = "2.4" +gccjit = "2.5" #gccjit = { git = "https://github.com/rust-lang/gccjit.rs" } # Local copy. diff --git a/compiler/rustc_codegen_gcc/Readme.md b/compiler/rustc_codegen_gcc/Readme.md index e92c16ece2f10..d0e4dbba6d355 100644 --- a/compiler/rustc_codegen_gcc/Readme.md +++ b/compiler/rustc_codegen_gcc/Readme.md @@ -23,7 +23,7 @@ A secondary goal is to check if using the gcc backend will provide any run-time ## Building **This requires a patched libgccjit in order to work. -You need to use my [fork of gcc](https://github.com/antoyo/gcc) which already includes these patches.** +You need to use my [fork of gcc](https://github.com/rust-lang/gcc) which already includes these patches.** ```bash $ cp config.example.toml config.toml @@ -40,7 +40,7 @@ to do a few more things. To build it (most of these instructions come from [here](https://gcc.gnu.org/onlinedocs/jit/internals/index.html), so don't hesitate to take a look there if you encounter an issue): ```bash -$ git clone https://github.com/antoyo/gcc +$ git clone https://github.com/rust-lang/gcc $ sudo apt install flex libmpfr-dev libgmp-dev libmpc3 libmpc-dev $ mkdir gcc-build gcc-install $ cd gcc-build diff --git a/compiler/rustc_codegen_gcc/build_system/src/clone_gcc.rs b/compiler/rustc_codegen_gcc/build_system/src/clone_gcc.rs index e28ee873eb6be..b49dd47f35219 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/clone_gcc.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/clone_gcc.rs @@ -61,7 +61,7 @@ pub fn run() -> Result<(), String> { return Ok(()); }; - let result = git_clone("https://github.com/antoyo/gcc", Some(&args.out_path), false)?; + let result = git_clone("https://github.com/rust-lang/gcc", Some(&args.out_path), false)?; if result.ran_clone { let gcc_commit = args.config_info.get_gcc_commit()?; println!("Checking out GCC commit `{}`...", gcc_commit); diff --git a/compiler/rustc_codegen_gcc/build_system/src/test.rs b/compiler/rustc_codegen_gcc/build_system/src/test.rs index 6c29c7d1825b9..df4ac85233b02 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/test.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/test.rs @@ -529,20 +529,21 @@ fn asm_tests(env: &Env, args: &TestArg) -> Result<(), String> { env.insert("COMPILETEST_FORCE_STAGE0".to_string(), "1".to_string()); - let extra = - if args.is_using_gcc_master_branch() { "" } else { " -Csymbol-mangling-version=v0" }; - - let rustc_args = &format!( - r#"-Zpanic-abort-tests \ - -Zcodegen-backend="{pwd}/target/{channel}/librustc_codegen_gcc.{dylib_ext}" \ - --sysroot "{sysroot_dir}" -Cpanic=abort{extra}"#, + let codegen_backend_path = format!( + "{pwd}/target/{channel}/librustc_codegen_gcc.{dylib_ext}", pwd = std::env::current_dir() .map_err(|error| format!("`current_dir` failed: {:?}", error))? .display(), channel = args.config_info.channel.as_str(), dylib_ext = args.config_info.dylib_ext, - sysroot_dir = args.config_info.sysroot_path, - extra = extra, + ); + + let extra = + if args.is_using_gcc_master_branch() { "" } else { " -Csymbol-mangling-version=v0" }; + + let rustc_args = format!( + "-Zpanic-abort-tests -Zcodegen-backend={codegen_backend_path} --sysroot {} -Cpanic=abort{extra}", + args.config_info.sysroot_path ); run_command_with_env( @@ -677,7 +678,7 @@ fn test_projects(env: &Env, args: &TestArg) -> Result<(), String> { fn test_libcore(env: &Env, args: &TestArg) -> Result<(), String> { // FIXME: create a function "display_if_not_quiet" or something along the line. println!("[TEST] libcore"); - let path = get_sysroot_dir().join("sysroot_src/library/core/tests"); + let path = get_sysroot_dir().join("sysroot_src/library/coretests"); let _ = remove_dir_all(path.join("target")); run_cargo_command(&[&"test"], Some(&path), env, args)?; Ok(()) diff --git a/compiler/rustc_codegen_gcc/doc/add-attribute.md b/compiler/rustc_codegen_gcc/doc/add-attribute.md index ae3bcc5e2ebe2..267c181952556 100644 --- a/compiler/rustc_codegen_gcc/doc/add-attribute.md +++ b/compiler/rustc_codegen_gcc/doc/add-attribute.md @@ -14,4 +14,4 @@ Finally, you need to update this repository by calling the relevant API you adde To test it, build `gcc`, run `cargo update -p gccjit` and then you can test the generated output for a given Rust crate. -[gccjit.rs]: https://github.com/antoyo/gccjit.rs +[gccjit.rs]: https://github.com/rust-lang/gccjit.rs diff --git a/compiler/rustc_codegen_gcc/example/example.rs b/compiler/rustc_codegen_gcc/example/example.rs index 03470b74d0a13..888fa89201e13 100644 --- a/compiler/rustc_codegen_gcc/example/example.rs +++ b/compiler/rustc_codegen_gcc/example/example.rs @@ -1,6 +1,6 @@ #![feature(no_core, unboxed_closures)] #![no_core] -#![allow(dead_code)] +#![allow(dead_code, unnecessary_transmutes)] extern crate mini_core; @@ -11,11 +11,7 @@ fn abc(a: u8) -> u8 { } fn bcd(b: bool, a: u8) -> u8 { - if b { - a * 2 - } else { - a * 3 - } + if b { a * 2 } else { a * 3 } } fn call() { diff --git a/compiler/rustc_codegen_gcc/example/mini_core.rs b/compiler/rustc_codegen_gcc/example/mini_core.rs index 5544aee9eaf16..c554a87b8256c 100644 --- a/compiler/rustc_codegen_gcc/example/mini_core.rs +++ b/compiler/rustc_codegen_gcc/example/mini_core.rs @@ -51,6 +51,10 @@ impl LegacyReceiver for &T {} impl LegacyReceiver for &mut T {} impl LegacyReceiver for Box {} +#[lang = "receiver"] +trait Receiver { +} + #[lang = "copy"] pub trait Copy {} @@ -134,6 +138,14 @@ impl Mul for u8 { } } +impl Mul for i32 { + type Output = Self; + + fn mul(self, rhs: Self) -> Self::Output { + self * rhs + } +} + impl Mul for usize { type Output = Self; @@ -142,6 +154,14 @@ impl Mul for usize { } } +impl Mul for isize { + type Output = Self; + + fn mul(self, rhs: Self) -> Self::Output { + self * rhs + } +} + #[lang = "add"] pub trait Add { type Output; @@ -165,6 +185,14 @@ impl Add for i8 { } } +impl Add for i32 { + type Output = Self; + + fn add(self, rhs: Self) -> Self { + self + rhs + } +} + impl Add for usize { type Output = Self; @@ -196,6 +224,14 @@ impl Sub for usize { } } +impl Sub for isize { + type Output = Self; + + fn sub(self, rhs: Self) -> Self { + self - rhs + } +} + impl Sub for u8 { type Output = Self; @@ -220,6 +256,14 @@ impl Sub for i16 { } } +impl Sub for i32 { + type Output = Self; + + fn sub(self, rhs: Self) -> Self { + self - rhs + } +} + #[lang = "rem"] pub trait Rem { type Output; @@ -628,6 +672,10 @@ pub mod libc { pub fn memcpy(dst: *mut u8, src: *const u8, size: usize); pub fn memmove(dst: *mut u8, src: *const u8, size: usize); pub fn strncpy(dst: *mut u8, src: *const u8, size: usize); + pub fn fflush(stream: *mut i32) -> i32; + pub fn exit(status: i32); + + pub static stdout: *mut i32; } } diff --git a/compiler/rustc_codegen_gcc/libgccjit.version b/compiler/rustc_codegen_gcc/libgccjit.version index 417fd5b03935d..125b04004b07f 100644 --- a/compiler/rustc_codegen_gcc/libgccjit.version +++ b/compiler/rustc_codegen_gcc/libgccjit.version @@ -1 +1 @@ -e607be166673a8de9fc07f6f02c60426e556c5f2 +0ea98a1365b81f7488073512c850e8ee951a4afd diff --git a/compiler/rustc_codegen_gcc/patches/0022-core-Disable-not-compiling-tests.patch b/compiler/rustc_codegen_gcc/patches/0022-core-Disable-not-compiling-tests.patch deleted file mode 100644 index 70e3e2ba7fee1..0000000000000 --- a/compiler/rustc_codegen_gcc/patches/0022-core-Disable-not-compiling-tests.patch +++ /dev/null @@ -1,44 +0,0 @@ -From af0e237f056fa838c77463381a19b0dc993c0a35 Mon Sep 17 00:00:00 2001 -From: None -Date: Sun, 1 Sep 2024 11:42:17 -0400 -Subject: [PATCH] Disable not compiling tests - ---- - library/core/tests/Cargo.toml | 14 ++++++++++++++ - library/core/tests/lib.rs | 1 + - 2 files changed, 15 insertions(+) - create mode 100644 library/core/tests/Cargo.toml - -diff --git a/library/core/tests/Cargo.toml b/library/core/tests/Cargo.toml -new file mode 100644 -index 0000000..ca326ac ---- /dev/null -+++ b/library/core/tests/Cargo.toml -@@ -0,0 +1,14 @@ -+[workspace] -+ -+[package] -+name = "coretests" -+version = "0.0.0" -+edition = "2021" -+ -+[lib] -+name = "coretests" -+path = "lib.rs" -+ -+[dependencies] -+rand = { version = "0.8.5", default-features = false } -+rand_xorshift = { version = "0.3.0", default-features = false } -diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs -index a4a7946..ecfe43f 100644 ---- a/library/core/tests/lib.rs -+++ b/library/core/tests/lib.rs -@@ -1,4 +1,5 @@ - // tidy-alphabetical-start -+#![cfg(test)] - #![cfg_attr(target_has_atomic = "128", feature(integer_atomics))] - #![cfg_attr(test, feature(cfg_match))] - #![feature(alloc_layout_extra)] --- -2.47.1 - diff --git a/compiler/rustc_codegen_gcc/patches/0028-core-Disable-long-running-tests.patch b/compiler/rustc_codegen_gcc/patches/0028-core-Disable-long-running-tests.patch index dc1beae6d2e71..20df4245cfdf2 100644 --- a/compiler/rustc_codegen_gcc/patches/0028-core-Disable-long-running-tests.patch +++ b/compiler/rustc_codegen_gcc/patches/0028-core-Disable-long-running-tests.patch @@ -1,17 +1,17 @@ -From eb703e627e7a84f1cd8d0d87f0f69da1f0acf765 Mon Sep 17 00:00:00 2001 -From: bjorn3 -Date: Fri, 3 Dec 2021 12:16:30 +0100 +From ec2d0dc77fb484d926b45bb626b0db6a4bb0ab5c Mon Sep 17 00:00:00 2001 +From: None +Date: Thu, 27 Mar 2025 09:20:41 -0400 Subject: [PATCH] Disable long running tests --- - library/core/tests/slice.rs | 2 ++ + library/coretests/tests/slice.rs | 2 ++ 1 file changed, 2 insertions(+) -diff --git a/library/core/tests/slice.rs b/library/core/tests/slice.rs -index 8402833..84592e0 100644 ---- a/library/core/tests/slice.rs -+++ b/library/core/tests/slice.rs -@@ -2462,6 +2462,7 @@ take_tests! { +diff --git a/library/coretests/tests/slice.rs b/library/coretests/tests/slice.rs +index d17e681..fba5cd6 100644 +--- a/library/coretests/tests/slice.rs ++++ b/library/coretests/tests/slice.rs +@@ -2486,6 +2486,7 @@ split_off_tests! { #[cfg(not(miri))] // unused in Miri const EMPTY_MAX: &'static [()] = &[(); usize::MAX]; @@ -19,14 +19,14 @@ index 8402833..84592e0 100644 // can't be a constant due to const mutability rules #[cfg(not(miri))] // unused in Miri macro_rules! empty_max_mut { -@@ -2485,6 +2486,7 @@ take_tests! { - (take_mut_oob_max_range_to_inclusive, (..=usize::MAX), None, empty_max_mut!()), - (take_mut_in_bounds_max_range_from, (usize::MAX..), Some(&mut [] as _), empty_max_mut!()), +@@ -2509,6 +2510,7 @@ split_off_tests! { + (split_off_mut_oob_max_range_to_inclusive, (..=usize::MAX), None, empty_max_mut!()), + (split_off_mut_in_bounds_max_range_from, (usize::MAX..), Some(&mut [] as _), empty_max_mut!()), } +*/ #[test] fn test_slice_from_ptr_range() { -- -2.26.2.7.g19db9cfb68 +2.49.0 diff --git a/compiler/rustc_codegen_gcc/patches/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch b/compiler/rustc_codegen_gcc/patches/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch index c220f53040f05..fa360fe9e74e7 100644 --- a/compiler/rustc_codegen_gcc/patches/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch +++ b/compiler/rustc_codegen_gcc/patches/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch @@ -1,19 +1,18 @@ -From 966beefe08be6045bfcca26079b76a7a80413080 Mon Sep 17 00:00:00 2001 +From b2911e732d1bf0e28872495c4c47af1dad3c7911 Mon Sep 17 00:00:00 2001 From: None -Date: Thu, 28 Sep 2023 17:37:38 -0400 +Date: Thu, 27 Mar 2025 14:30:10 -0400 Subject: [PATCH] Disable libstd and libtest dylib --- - library/std/Cargo.toml | 2 +- - library/test/Cargo.toml | 2 +- - 2 files changed, 2 insertions(+), 2 deletions(-) + library/std/Cargo.toml | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml -index 5b21355..cb0c49b 100644 +index 176da60..c183cdb 100644 --- a/library/std/Cargo.toml +++ b/library/std/Cargo.toml -@@ -9,7 +9,7 @@ description = "The Rust Standard Library" - edition = "2021" +@@ -10,7 +10,7 @@ edition = "2024" + autobenches = false [lib] -crate-type = ["dylib", "rlib"] @@ -21,3 +20,6 @@ index 5b21355..cb0c49b 100644 [dependencies] alloc = { path = "../alloc", public = true } +-- +2.49.0 + diff --git a/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch b/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch index 9ef5e0e4f4672..9d5b2dc537d2c 100644 --- a/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch +++ b/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch @@ -1,25 +1,17 @@ -From 124a11ce086952a5794d5cfbaa45175809497b81 Mon Sep 17 00:00:00 2001 +From 1a8f6b8e39f343959d4d2e6b6957a6d780ac3fc0 Mon Sep 17 00:00:00 2001 From: None -Date: Sat, 18 Nov 2023 10:50:36 -0500 -Subject: [PATCH] [core] Disable portable-simd test +Date: Thu, 27 Mar 2025 14:32:14 -0400 +Subject: [PATCH] Disable portable-simd test --- - library/core/tests/lib.rs | 2 -- - 1 file changed, 2 deletions(-) + library/coretests/tests/lib.rs | 1 - + 1 file changed, 1 deletion(-) -diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs -index b71786c..cf484d5 100644 ---- a/library/core/tests/lib.rs -+++ b/library/core/tests/lib.rs -@@ -87,7 +87,6 @@ - #![feature(numfmt)] - #![feature(pattern)] - #![feature(pointer_is_aligned_to)] --#![feature(portable_simd)] - #![feature(ptr_metadata)] - #![feature(slice_from_ptr_range)] - #![feature(slice_internals)] -@@ -155,7 +154,6 @@ mod pin; +diff --git a/library/coretests/tests/lib.rs b/library/coretests/tests/lib.rs +index 79022fe..9223b2f 100644 +--- a/library/coretests/tests/lib.rs ++++ b/library/coretests/tests/lib.rs +@@ -165,7 +165,6 @@ mod pin; mod pin_macro; mod ptr; mod result; @@ -27,4 +19,6 @@ index b71786c..cf484d5 100644 mod slice; mod str; mod str_lossy; --- 2.45.2 +-- +2.49.0 + diff --git a/compiler/rustc_codegen_gcc/rust-toolchain b/compiler/rustc_codegen_gcc/rust-toolchain index 940b3de9f7453..452d3f22dc518 100644 --- a/compiler/rustc_codegen_gcc/rust-toolchain +++ b/compiler/rustc_codegen_gcc/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2025-01-12" +channel = "nightly-2025-04-25" components = ["rust-src", "rustc-dev", "llvm-tools-preview"] diff --git a/compiler/rustc_codegen_gcc/src/abi.rs b/compiler/rustc_codegen_gcc/src/abi.rs index 9fe6baa3d2573..a96b18e01c087 100644 --- a/compiler/rustc_codegen_gcc/src/abi.rs +++ b/compiler/rustc_codegen_gcc/src/abi.rs @@ -9,6 +9,8 @@ use rustc_middle::ty::Ty; use rustc_middle::ty::layout::LayoutOf; #[cfg(feature = "master")] use rustc_session::config; +#[cfg(feature = "master")] +use rustc_target::callconv::Conv; use rustc_target::callconv::{ArgAttributes, CastTarget, FnAbi, PassMode}; use crate::builder::Builder; @@ -105,6 +107,8 @@ pub trait FnAbiGccExt<'gcc, 'tcx> { // TODO(antoyo): return a function pointer type instead? fn gcc_type(&self, cx: &CodegenCx<'gcc, 'tcx>) -> FnAbiGcc<'gcc>; fn ptr_to_gcc_type(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>; + #[cfg(feature = "master")] + fn gcc_cconv(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Option>; } impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { @@ -227,4 +231,47 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { ); pointer_type } + + #[cfg(feature = "master")] + fn gcc_cconv(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Option> { + conv_to_fn_attribute(self.conv, &cx.tcx.sess.target.arch) + } +} + +#[cfg(feature = "master")] +pub fn conv_to_fn_attribute<'gcc>(conv: Conv, arch: &str) -> Option> { + // TODO: handle the calling conventions returning None. + let attribute = match conv { + Conv::C + | Conv::Rust + | Conv::CCmseNonSecureCall + | Conv::CCmseNonSecureEntry + | Conv::RiscvInterrupt { .. } => return None, + Conv::Cold => return None, + Conv::PreserveMost => return None, + Conv::PreserveAll => return None, + Conv::GpuKernel => { + // TODO(antoyo): remove clippy allow attribute when this is implemented. + #[allow(clippy::if_same_then_else)] + if arch == "amdgpu" { + return None; + } else if arch == "nvptx64" { + return None; + } else { + panic!("Architecture {} does not support GpuKernel calling convention", arch); + } + } + Conv::AvrInterrupt => return None, + Conv::AvrNonBlockingInterrupt => return None, + Conv::ArmAapcs => return None, + Conv::Msp430Intr => return None, + Conv::X86Fastcall => return None, + Conv::X86Intr => return None, + Conv::X86Stdcall => return None, + Conv::X86ThisCall => return None, + Conv::X86VectorCall => return None, + Conv::X86_64SysV => FnAttribute::SysvAbi, + Conv::X86_64Win64 => FnAttribute::MsAbi, + }; + Some(attribute) } diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index 415f8affab901..c35337ae7ce0c 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -36,7 +36,8 @@ use crate::type_of::LayoutGccExt; // // 3. Clobbers. GCC has a separate list of clobbers, and clobbers don't have indexes. // Contrary, Rust expresses clobbers through "out" operands that aren't tied to -// a variable (`_`), and such "clobbers" do have index. +// a variable (`_`), and such "clobbers" do have index. Input operands cannot also +// be clobbered. // // 4. Furthermore, GCC Extended Asm does not support explicit register constraints // (like `out("eax")`) directly, offering so-called "local register variables" @@ -161,6 +162,16 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { // Also, we don't emit any asm operands immediately; we save them to // the one of the buffers to be emitted later. + let mut input_registers = vec![]; + + for op in rust_operands { + if let InlineAsmOperandRef::In { reg, .. } = *op + && let ConstraintOrRegister::Register(reg_name) = reg_to_gcc(reg) + { + input_registers.push(reg_name); + } + } + // 1. Normal variables (and saving operands to buffers). for (rust_idx, op) in rust_operands.iter().enumerate() { match *op { @@ -183,25 +194,39 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { continue; } (Register(reg_name), None) => { - // `clobber_abi` can add lots of clobbers that are not supported by the target, - // such as AVX-512 registers, so we just ignore unsupported registers - let is_target_supported = - reg.reg_class().supported_types(asm_arch, true).iter().any( - |&(_, feature)| { - if let Some(feature) = feature { - self.tcx - .asm_target_features(instance.def_id()) - .contains(&feature) - } else { - true // Register class is unconditionally supported - } - }, - ); - - if is_target_supported && !clobbers.contains(®_name) { - clobbers.push(reg_name); + if input_registers.contains(®_name) { + // the `clobber_abi` operand is converted into a series of + // `lateout("reg") _` operands. Of course, a user could also + // explicitly define such an output operand. + // + // GCC does not allow input registers to be clobbered, so if this out register + // is also used as an in register, do not add it to the clobbers list. + // it will be treated as a lateout register with `out_place: None` + if !late { + bug!("input registers can only be used as lateout regisers"); + } + ("r", dummy_output_type(self.cx, reg.reg_class())) + } else { + // `clobber_abi` can add lots of clobbers that are not supported by the target, + // such as AVX-512 registers, so we just ignore unsupported registers + let is_target_supported = + reg.reg_class().supported_types(asm_arch, true).iter().any( + |&(_, feature)| { + if let Some(feature) = feature { + self.tcx + .asm_target_features(instance.def_id()) + .contains(&feature) + } else { + true // Register class is unconditionally supported + } + }, + ); + + if is_target_supported && !clobbers.contains(®_name) { + clobbers.push(reg_name); + } + continue; } - continue; } }; @@ -230,13 +255,10 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { } InlineAsmOperandRef::InOut { reg, late, in_value, out_place } => { - let constraint = - if let ConstraintOrRegister::Constraint(constraint) = reg_to_gcc(reg) { - constraint - } else { - // left for the next pass - continue; - }; + let ConstraintOrRegister::Constraint(constraint) = reg_to_gcc(reg) else { + // left for the next pass + continue; + }; // Rustc frontend guarantees that input and output types are "compatible", // so we can just use input var's type for the output variable. @@ -589,114 +611,127 @@ fn estimate_template_length( } /// Converts a register class to a GCC constraint code. -fn reg_to_gcc(reg: InlineAsmRegOrRegClass) -> ConstraintOrRegister { - let constraint = match reg { - // For vector registers LLVM wants the register name to match the type size. +fn reg_to_gcc(reg_or_reg_class: InlineAsmRegOrRegClass) -> ConstraintOrRegister { + match reg_or_reg_class { InlineAsmRegOrRegClass::Reg(reg) => { - match reg { - InlineAsmReg::X86(_) => { - // TODO(antoyo): add support for vector register. - // - // // For explicit registers, we have to create a register variable: https://stackoverflow.com/a/31774784/389119 - return ConstraintOrRegister::Register(match reg.name() { - // Some of registers' names does not map 1-1 from rust to gcc - "st(0)" => "st", + ConstraintOrRegister::Register(explicit_reg_to_gcc(reg)) + } + InlineAsmRegOrRegClass::RegClass(reg_class) => { + ConstraintOrRegister::Constraint(reg_class_to_gcc(reg_class)) + } + } +} - name => name, - }); +fn explicit_reg_to_gcc(reg: InlineAsmReg) -> &'static str { + // For explicit registers, we have to create a register variable: https://stackoverflow.com/a/31774784/389119 + match reg { + InlineAsmReg::X86(reg) => { + // TODO(antoyo): add support for vector register. + match reg.reg_class() { + X86InlineAsmRegClass::reg_byte => { + // GCC does not support the `b` suffix, so we just strip it + // see https://github.com/rust-lang/rustc_codegen_gcc/issues/485 + reg.name().trim_end_matches('b') } + _ => match reg.name() { + // Some of registers' names does not map 1-1 from rust to gcc + "st(0)" => "st", - _ => unimplemented!(), + name => name, + }, } } - // They can be retrieved from https://gcc.gnu.org/onlinedocs/gcc/Machine-Constraints.html - InlineAsmRegOrRegClass::RegClass(reg) => match reg { - InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::reg) => "r", - InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg) => "w", - InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg_low16) => "x", - InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::preg) => { - unreachable!("clobber-only") - } - InlineAsmRegClass::Arm(ArmInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::Arm(ArmInlineAsmRegClass::sreg) - | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::dreg_low16) - | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg_low8) - | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::sreg_low16) - | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::dreg_low8) - | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg_low4) - | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::dreg) - | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg) => "t", - InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_upper) => "d", - InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_pair) => "r", - InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_iw) => "w", - InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_ptr) => "e", - InlineAsmRegClass::Bpf(BpfInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::Bpf(BpfInlineAsmRegClass::wreg) => "w", - InlineAsmRegClass::Hexagon(HexagonInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::Hexagon(HexagonInlineAsmRegClass::preg) => { - unreachable!("clobber-only") - } - InlineAsmRegClass::LoongArch(LoongArchInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::LoongArch(LoongArchInlineAsmRegClass::freg) => "f", - InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_addr) => "a", - InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_data) => "d", - InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::freg) => "f", - InlineAsmRegClass::Mips(MipsInlineAsmRegClass::reg) => "d", // more specific than "r" - InlineAsmRegClass::Mips(MipsInlineAsmRegClass::freg) => "f", - InlineAsmRegClass::Msp430(Msp430InlineAsmRegClass::reg) => "r", - // https://github.com/gcc-mirror/gcc/blob/master/gcc/config/nvptx/nvptx.md -> look for - // "define_constraint". - InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg16) => "h", - InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg32) => "r", - InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg64) => "l", - - InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::reg_nonzero) => "b", - InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::freg) => "f", - InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::vreg) => "v", - InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::cr) - | InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::xer) => { - unreachable!("clobber-only") - } - InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::freg) => "f", - InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::vreg) => { - unreachable!("clobber-only") - } - InlineAsmRegClass::X86(X86InlineAsmRegClass::reg) => "r", - InlineAsmRegClass::X86(X86InlineAsmRegClass::reg_abcd) => "Q", - InlineAsmRegClass::X86(X86InlineAsmRegClass::reg_byte) => "q", - InlineAsmRegClass::X86(X86InlineAsmRegClass::xmm_reg) - | InlineAsmRegClass::X86(X86InlineAsmRegClass::ymm_reg) => "x", - InlineAsmRegClass::X86(X86InlineAsmRegClass::zmm_reg) => "v", - InlineAsmRegClass::X86(X86InlineAsmRegClass::kreg) => "Yk", - InlineAsmRegClass::X86( - X86InlineAsmRegClass::kreg0 - | X86InlineAsmRegClass::x87_reg - | X86InlineAsmRegClass::mmx_reg - | X86InlineAsmRegClass::tmm_reg, - ) => unreachable!("clobber-only"), - InlineAsmRegClass::SpirV(SpirVInlineAsmRegClass::reg) => { - bug!("GCC backend does not support SPIR-V") - } - InlineAsmRegClass::Wasm(WasmInlineAsmRegClass::local) => "r", - InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg_addr) => "a", - InlineAsmRegClass::S390x(S390xInlineAsmRegClass::freg) => "f", - InlineAsmRegClass::S390x(S390xInlineAsmRegClass::vreg) => "v", - InlineAsmRegClass::S390x(S390xInlineAsmRegClass::areg) => { - unreachable!("clobber-only") - } - InlineAsmRegClass::Sparc(SparcInlineAsmRegClass::reg) => "r", - InlineAsmRegClass::Sparc(SparcInlineAsmRegClass::yreg) => unreachable!("clobber-only"), - InlineAsmRegClass::Err => unreachable!(), - }, - }; - ConstraintOrRegister::Constraint(constraint) + _ => unimplemented!(), + } +} + +/// They can be retrieved from https://gcc.gnu.org/onlinedocs/gcc/Machine-Constraints.html +fn reg_class_to_gcc(reg_class: InlineAsmRegClass) -> &'static str { + match reg_class { + InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::reg) => "r", + InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg) => "w", + InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg_low16) => "x", + InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::preg) => { + unreachable!("clobber-only") + } + InlineAsmRegClass::Arm(ArmInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::Arm(ArmInlineAsmRegClass::sreg) + | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::dreg_low16) + | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg_low8) + | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::sreg_low16) + | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::dreg_low8) + | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg_low4) + | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::dreg) + | InlineAsmRegClass::Arm(ArmInlineAsmRegClass::qreg) => "t", + InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_upper) => "d", + InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_pair) => "r", + InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_iw) => "w", + InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_ptr) => "e", + InlineAsmRegClass::Bpf(BpfInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::Bpf(BpfInlineAsmRegClass::wreg) => "w", + InlineAsmRegClass::Hexagon(HexagonInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::Hexagon(HexagonInlineAsmRegClass::preg) => { + unreachable!("clobber-only") + } + InlineAsmRegClass::LoongArch(LoongArchInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::LoongArch(LoongArchInlineAsmRegClass::freg) => "f", + InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_addr) => "a", + InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_data) => "d", + InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::freg) => "f", + InlineAsmRegClass::Mips(MipsInlineAsmRegClass::reg) => "d", // more specific than "r" + InlineAsmRegClass::Mips(MipsInlineAsmRegClass::freg) => "f", + InlineAsmRegClass::Msp430(Msp430InlineAsmRegClass::reg) => "r", + // https://github.com/gcc-mirror/gcc/blob/master/gcc/config/nvptx/nvptx.md -> look for + // "define_constraint". + InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg16) => "h", + InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg32) => "r", + InlineAsmRegClass::Nvptx(NvptxInlineAsmRegClass::reg64) => "l", + + InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::reg_nonzero) => "b", + InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::freg) => "f", + InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::vreg) => "v", + InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::cr) + | InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::xer) => { + unreachable!("clobber-only") + } + InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::freg) => "f", + InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::vreg) => { + unreachable!("clobber-only") + } + InlineAsmRegClass::X86(X86InlineAsmRegClass::reg) => "r", + InlineAsmRegClass::X86(X86InlineAsmRegClass::reg_abcd) => "Q", + InlineAsmRegClass::X86(X86InlineAsmRegClass::reg_byte) => "q", + InlineAsmRegClass::X86(X86InlineAsmRegClass::xmm_reg) + | InlineAsmRegClass::X86(X86InlineAsmRegClass::ymm_reg) => "x", + InlineAsmRegClass::X86(X86InlineAsmRegClass::zmm_reg) => "v", + InlineAsmRegClass::X86(X86InlineAsmRegClass::kreg) => "Yk", + InlineAsmRegClass::X86( + X86InlineAsmRegClass::kreg0 + | X86InlineAsmRegClass::x87_reg + | X86InlineAsmRegClass::mmx_reg + | X86InlineAsmRegClass::tmm_reg, + ) => unreachable!("clobber-only"), + InlineAsmRegClass::SpirV(SpirVInlineAsmRegClass::reg) => { + bug!("GCC backend does not support SPIR-V") + } + InlineAsmRegClass::Wasm(WasmInlineAsmRegClass::local) => "r", + InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg_addr) => "a", + InlineAsmRegClass::S390x(S390xInlineAsmRegClass::freg) => "f", + InlineAsmRegClass::S390x(S390xInlineAsmRegClass::vreg) => "v", + InlineAsmRegClass::S390x(S390xInlineAsmRegClass::areg) => { + unreachable!("clobber-only") + } + InlineAsmRegClass::Sparc(SparcInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::Sparc(SparcInlineAsmRegClass::yreg) => unreachable!("clobber-only"), + InlineAsmRegClass::Err => unreachable!(), + } } /// Type to use for outputs that are discarded. It doesn't really matter what @@ -794,7 +829,7 @@ fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegCl impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { fn codegen_global_asm( - &self, + &mut self, template: &[InlineAsmTemplatePiece], operands: &[GlobalAsmOperandRef<'tcx>], options: InlineAsmOptions, diff --git a/compiler/rustc_codegen_gcc/src/back/lto.rs b/compiler/rustc_codegen_gcc/src/back/lto.rs index e5221c7da3197..faeb2643ecb88 100644 --- a/compiler/rustc_codegen_gcc/src/back/lto.rs +++ b/compiler/rustc_codegen_gcc/src/back/lto.rs @@ -44,7 +44,11 @@ use crate::{GccCodegenBackend, GccContext, SyncContext, to_gcc_opt_level}; pub fn crate_type_allows_lto(crate_type: CrateType) -> bool { match crate_type { - CrateType::Executable | CrateType::Dylib | CrateType::Staticlib | CrateType::Cdylib => true, + CrateType::Executable + | CrateType::Dylib + | CrateType::Staticlib + | CrateType::Cdylib + | CrateType::Sdylib => true, CrateType::Rlib | CrateType::ProcMacro => false, } } diff --git a/compiler/rustc_codegen_gcc/src/back/write.rs b/compiler/rustc_codegen_gcc/src/back/write.rs index 51c5ba73e32bc..16c895322e88a 100644 --- a/compiler/rustc_codegen_gcc/src/back/write.rs +++ b/compiler/rustc_codegen_gcc/src/back/write.rs @@ -24,19 +24,23 @@ pub(crate) unsafe fn codegen( { let context = &module.module_llvm.context; - let module_name = module.name.clone(); - let should_combine_object_files = module.module_llvm.should_combine_object_files; - let module_name = Some(&module_name[..]); - // NOTE: Only generate object files with GIMPLE when this environment variable is set for // now because this requires a particular setup (same gcc/lto1/lto-wrapper commit as libgccjit). // TODO(antoyo): remove this environment variable. let fat_lto = env::var("EMBED_LTO_BITCODE").as_deref() == Ok("1"); - let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name); - let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name); + let bc_out = cgcx.output_filenames.temp_path_for_cgu( + OutputType::Bitcode, + &module.name, + cgcx.invocation_temp.as_deref(), + ); + let obj_out = cgcx.output_filenames.temp_path_for_cgu( + OutputType::Object, + &module.name, + cgcx.invocation_temp.as_deref(), + ); if config.bitcode_needed() { if fat_lto { @@ -117,14 +121,22 @@ pub(crate) unsafe fn codegen( } if config.emit_ir { - let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name); + let out = cgcx.output_filenames.temp_path_for_cgu( + OutputType::LlvmAssembly, + &module.name, + cgcx.invocation_temp.as_deref(), + ); std::fs::write(out, "").expect("write file"); } if config.emit_asm { let _timer = cgcx.prof.generic_activity_with_arg("GCC_module_codegen_emit_asm", &*module.name); - let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); + let path = cgcx.output_filenames.temp_path_for_cgu( + OutputType::Assembly, + &module.name, + cgcx.invocation_temp.as_deref(), + ); context.compile_to_file(OutputKind::Assembler, path.to_str().expect("path to str")); } @@ -238,6 +250,7 @@ pub(crate) unsafe fn codegen( config.emit_asm, config.emit_ir, &cgcx.output_filenames, + cgcx.invocation_temp.as_deref(), )) } diff --git a/compiler/rustc_codegen_gcc/src/base.rs b/compiler/rustc_codegen_gcc/src/base.rs index 9b495174a3fab..a9d7808c833bb 100644 --- a/compiler/rustc_codegen_gcc/src/base.rs +++ b/compiler/rustc_codegen_gcc/src/base.rs @@ -206,7 +206,7 @@ pub fn compile_codegen_unit( let f128_type_supported = target_info.supports_target_dependent_type(CType::Float128); let u128_type_supported = target_info.supports_target_dependent_type(CType::UInt128t); // TODO: improve this to avoid passing that many arguments. - let cx = CodegenCx::new( + let mut cx = CodegenCx::new( &context, cgu, tcx, @@ -223,8 +223,8 @@ pub fn compile_codegen_unit( } // ... and now that we have everything pre-defined, fill out those definitions. - for &(mono_item, _) in &mono_items { - mono_item.define::>(&cx); + for &(mono_item, item_data) in &mono_items { + mono_item.define::>(&mut cx, item_data); } // If this codegen unit contains the main function, also create the diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs index 6573b5b165e61..6720f6186d16e 100644 --- a/compiler/rustc_codegen_gcc/src/builder.rs +++ b/compiler/rustc_codegen_gcc/src/builder.rs @@ -45,7 +45,7 @@ enum ExtremumOperation { Min, } -pub struct Builder<'a: 'gcc, 'gcc, 'tcx> { +pub struct Builder<'a, 'gcc, 'tcx> { pub cx: &'a CodegenCx<'gcc, 'tcx>, pub block: Block<'gcc>, pub location: Option>, @@ -368,16 +368,8 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { let previous_arg_count = args.len(); let orig_args = args; let args = { - let function_address_names = self.function_address_names.borrow(); - let original_function_name = function_address_names.get(&func_ptr); func_ptr = llvm::adjust_function(self.context, &func_name, func_ptr, args); - llvm::adjust_intrinsic_arguments( - self, - gcc_func, - args.into(), - &func_name, - original_function_name, - ) + llvm::adjust_intrinsic_arguments(self, gcc_func, args.into(), &func_name) }; let args_adjusted = args.len() != previous_arg_count; let args = self.check_ptr_call("call", func_ptr, &args); @@ -1271,7 +1263,50 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { } fn fcmp(&mut self, op: RealPredicate, lhs: RValue<'gcc>, rhs: RValue<'gcc>) -> RValue<'gcc> { - self.context.new_comparison(self.location, op.to_gcc_comparison(), lhs, rhs) + // LLVM has a concept of "unordered compares", where eg ULT returns true if either the two + // arguments are unordered (i.e. either is NaN), or the lhs is less than the rhs. GCC does + // not natively have this concept, so in some cases we must manually handle NaNs + let must_handle_nan = match op { + RealPredicate::RealPredicateFalse => unreachable!(), + RealPredicate::RealOEQ => false, + RealPredicate::RealOGT => false, + RealPredicate::RealOGE => false, + RealPredicate::RealOLT => false, + RealPredicate::RealOLE => false, + RealPredicate::RealONE => false, + RealPredicate::RealORD => unreachable!(), + RealPredicate::RealUNO => unreachable!(), + RealPredicate::RealUEQ => false, + RealPredicate::RealUGT => true, + RealPredicate::RealUGE => true, + RealPredicate::RealULT => true, + RealPredicate::RealULE => true, + RealPredicate::RealUNE => false, + RealPredicate::RealPredicateTrue => unreachable!(), + }; + + let cmp = self.context.new_comparison(self.location, op.to_gcc_comparison(), lhs, rhs); + + if must_handle_nan { + let is_nan = self.context.new_binary_op( + self.location, + BinaryOp::LogicalOr, + self.cx.bool_type, + // compare a value to itself to check whether it is NaN + self.context.new_comparison(self.location, ComparisonOp::NotEquals, lhs, lhs), + self.context.new_comparison(self.location, ComparisonOp::NotEquals, rhs, rhs), + ); + + self.context.new_binary_op( + self.location, + BinaryOp::LogicalOr, + self.cx.bool_type, + is_nan, + cmp, + ) + } else { + cmp + } } /* Miscellaneous instructions */ diff --git a/compiler/rustc_codegen_gcc/src/common.rs b/compiler/rustc_codegen_gcc/src/common.rs index a63da6b6e27d8..918195364ffee 100644 --- a/compiler/rustc_codegen_gcc/src/common.rs +++ b/compiler/rustc_codegen_gcc/src/common.rs @@ -33,12 +33,11 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { } pub fn const_bitcast(&self, value: RValue<'gcc>, typ: Type<'gcc>) -> RValue<'gcc> { - if value.get_type() == self.bool_type.make_pointer() { - if let Some(pointee) = typ.get_pointee() { - if pointee.dyncast_vector().is_some() { - panic!() - } - } + if value.get_type() == self.bool_type.make_pointer() + && let Some(pointee) = typ.get_pointee() + && pointee.dyncast_vector().is_some() + { + panic!() } // NOTE: since bitcast makes a value non-constant, don't bitcast if not necessary as some // SIMD builtins require a constant value. diff --git a/compiler/rustc_codegen_gcc/src/consts.rs b/compiler/rustc_codegen_gcc/src/consts.rs index 474475f311f71..0a67bd7bc71af 100644 --- a/compiler/rustc_codegen_gcc/src/consts.rs +++ b/compiler/rustc_codegen_gcc/src/consts.rs @@ -131,7 +131,7 @@ impl<'gcc, 'tcx> StaticCodegenMethods for CodegenCx<'gcc, 'tcx> { // will use load-unaligned instructions instead, and thus avoiding the crash. // // We could remove this hack whenever we decide to drop macOS 10.10 support. - if self.tcx.sess.target.options.is_like_osx { + if self.tcx.sess.target.options.is_like_darwin { // The `inspect` method is okay here because we checked for provenance, and // because we are doing this access to inspect the final interpreter state // (not as part of the interpreter execution). @@ -242,10 +242,10 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { let fn_attrs = self.tcx.codegen_fn_attrs(def_id); let global = if def_id.is_local() && !self.tcx.is_foreign_item(def_id) { - if let Some(global) = self.get_declared_value(sym) { - if self.val_ty(global) != self.type_ptr_to(gcc_type) { - span_bug!(self.tcx.def_span(def_id), "Conflicting types for static"); - } + if let Some(global) = self.get_declared_value(sym) + && self.val_ty(global) != self.type_ptr_to(gcc_type) + { + span_bug!(self.tcx.def_span(def_id), "Conflicting types for static"); } let is_tls = fn_attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL); diff --git a/compiler/rustc_codegen_gcc/src/context.rs b/compiler/rustc_codegen_gcc/src/context.rs index 1e1f577bb3a15..73718994e6417 100644 --- a/compiler/rustc_codegen_gcc/src/context.rs +++ b/compiler/rustc_codegen_gcc/src/context.rs @@ -23,6 +23,8 @@ use rustc_target::spec::{ HasTargetSpec, HasWasmCAbiOpt, HasX86AbiOpt, Target, TlsModel, WasmCAbi, X86Abi, }; +#[cfg(feature = "master")] +use crate::abi::conv_to_fn_attribute; use crate::callee::get_fn; use crate::common::SignType; @@ -213,33 +215,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { let bool_type = context.new_type::(); let mut functions = FxHashMap::default(); - let builtins = [ - "__builtin_unreachable", - "abort", - "__builtin_expect", /*"__builtin_expect_with_probability",*/ - "__builtin_constant_p", - "__builtin_add_overflow", - "__builtin_mul_overflow", - "__builtin_saddll_overflow", - /*"__builtin_sadd_overflow",*/ - "__builtin_smulll_overflow", /*"__builtin_smul_overflow",*/ - "__builtin_ssubll_overflow", - /*"__builtin_ssub_overflow",*/ "__builtin_sub_overflow", - "__builtin_uaddll_overflow", - "__builtin_uadd_overflow", - "__builtin_umulll_overflow", - "__builtin_umul_overflow", - "__builtin_usubll_overflow", - "__builtin_usub_overflow", - "__builtin_powif", - "__builtin_powi", - "fabsf", - "fabs", - "copysignf", - "copysign", - "nearbyintf", - "nearbyint", - ]; + let builtins = ["abort"]; for builtin in builtins.iter() { functions.insert(builtin.to_string(), context.get_builtin_function(builtin)); @@ -509,7 +485,11 @@ impl<'gcc, 'tcx> MiscCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { fn declare_c_main(&self, fn_type: Self::Type) -> Option { let entry_name = self.sess().target.entry_name.as_ref(); if !self.functions.borrow().contains_key(entry_name) { - Some(self.declare_entry_fn(entry_name, fn_type, ())) + #[cfg(feature = "master")] + let conv = conv_to_fn_attribute(self.sess().target.entry_abi, &self.sess().target.arch); + #[cfg(not(feature = "master"))] + let conv = None; + Some(self.declare_entry_fn(entry_name, fn_type, conv)) } else { // If the symbol already exists, it is an error: for example, the user wrote // #[no_mangle] extern "C" fn main(..) {..} @@ -605,7 +585,10 @@ impl<'b, 'tcx> CodegenCx<'b, 'tcx> { let mut name = String::with_capacity(prefix.len() + 6); name.push_str(prefix); name.push('.'); - name.push_str(&(idx as u64).to_base(ALPHANUMERIC_ONLY)); + // Offset the index by the base so that always at least two characters + // are generated. This avoids cases where the suffix is interpreted as + // size by the assembler (for m68k: .b, .w, .l). + name.push_str(&(idx as u64 + ALPHANUMERIC_ONLY as u64).to_base(ALPHANUMERIC_ONLY)); name } } diff --git a/compiler/rustc_codegen_gcc/src/debuginfo.rs b/compiler/rustc_codegen_gcc/src/debuginfo.rs index 55e01687400aa..f3ced86439527 100644 --- a/compiler/rustc_codegen_gcc/src/debuginfo.rs +++ b/compiler/rustc_codegen_gcc/src/debuginfo.rs @@ -126,14 +126,15 @@ fn make_mir_scope<'gcc, 'tcx>( return; }; - if let Some(ref vars) = *variables { - if !vars.contains(scope) && scope_data.inlined.is_none() { - // Do not create a DIScope if there are no variables defined in this - // MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat. - debug_context.scopes[scope] = parent_scope; - instantiated.insert(scope); - return; - } + if let Some(ref vars) = *variables + && !vars.contains(scope) + && scope_data.inlined.is_none() + { + // Do not create a DIScope if there are no variables defined in this + // MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat. + debug_context.scopes[scope] = parent_scope; + instantiated.insert(scope); + return; } let loc = cx.lookup_debug_loc(scope_data.span.lo()); diff --git a/compiler/rustc_codegen_gcc/src/declare.rs b/compiler/rustc_codegen_gcc/src/declare.rs index 7cdbe3c0c6290..c1ca3eb849e88 100644 --- a/compiler/rustc_codegen_gcc/src/declare.rs +++ b/compiler/rustc_codegen_gcc/src/declare.rs @@ -58,7 +58,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { variadic: bool, ) -> Function<'gcc> { self.linkage.set(FunctionType::Extern); - declare_raw_fn(self, name, () /*llvm::CCallConv*/, return_type, params, variadic) + declare_raw_fn(self, name, None, return_type, params, variadic) } pub fn declare_global( @@ -92,7 +92,8 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { &self, name: &str, _fn_type: Type<'gcc>, - callconv: (), /*llvm::CCallConv*/ + #[cfg(feature = "master")] callconv: Option>, + #[cfg(not(feature = "master"))] callconv: Option<()>, ) -> RValue<'gcc> { // TODO(antoyo): use the fn_type parameter. let const_string = self.context.new_type::().make_pointer().make_pointer(); @@ -123,14 +124,11 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { #[cfg(feature = "master")] fn_attributes, } = fn_abi.gcc_type(self); - let func = declare_raw_fn( - self, - name, - (), /*fn_abi.llvm_cconv()*/ - return_type, - &arguments_type, - is_c_variadic, - ); + #[cfg(feature = "master")] + let conv = fn_abi.gcc_cconv(self); + #[cfg(not(feature = "master"))] + let conv = None; + let func = declare_raw_fn(self, name, conv, return_type, &arguments_type, is_c_variadic); self.on_stack_function_params.borrow_mut().insert(func, on_stack_param_indices); #[cfg(feature = "master")] for fn_attr in fn_attributes { @@ -162,7 +160,8 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { fn declare_raw_fn<'gcc>( cx: &CodegenCx<'gcc, '_>, name: &str, - _callconv: (), /*llvm::CallConv*/ + #[cfg(feature = "master")] callconv: Option>, + #[cfg(not(feature = "master"))] _callconv: Option<()>, return_type: Type<'gcc>, param_types: &[Type<'gcc>], variadic: bool, @@ -192,6 +191,10 @@ fn declare_raw_fn<'gcc>( let name = &mangle_name(name); let func = cx.context.new_function(None, cx.linkage.get(), return_type, ¶ms, name, variadic); + #[cfg(feature = "master")] + if let Some(attribute) = callconv { + func.add_attribute(attribute); + } cx.functions.borrow_mut().insert(name.to_string(), func); #[cfg(feature = "master")] diff --git a/compiler/rustc_codegen_gcc/src/gcc_util.rs b/compiler/rustc_codegen_gcc/src/gcc_util.rs index 6eae0c24f48ad..2b053abdd190a 100644 --- a/compiler/rustc_codegen_gcc/src/gcc_util.rs +++ b/compiler/rustc_codegen_gcc/src/gcc_util.rs @@ -55,7 +55,7 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec. all_rust_features.push((false, feature)); } else if !feature.is_empty() && diagnostics { @@ -136,14 +136,12 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]> fn arch_to_gcc(name: &str) -> &str { match name { + "M68000" => "68000", "M68020" => "68020", _ => name, } diff --git a/compiler/rustc_codegen_gcc/src/int.rs b/compiler/rustc_codegen_gcc/src/int.rs index f3552d9b12fcb..9b5b0fde6e2f1 100644 --- a/compiler/rustc_codegen_gcc/src/int.rs +++ b/compiler/rustc_codegen_gcc/src/int.rs @@ -404,7 +404,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { let ret_indirect = matches!(fn_abi.ret.mode, PassMode::Indirect { .. }); - let result = if ret_indirect { + let call = if ret_indirect { let res_value = self.current_func().new_local(self.location, res_type, "result_value"); let res_addr = res_value.get_address(self.location); let res_param_type = res_type.make_pointer(); @@ -432,8 +432,17 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> { ); self.context.new_call(self.location, func, &[lhs, rhs, overflow_addr]) }; - - (result, self.context.new_cast(self.location, overflow_value, self.bool_type).to_rvalue()) + // NOTE: we must assign the result of the operation to a variable at this point to make + // sure it will be evaluated by libgccjit now. + // Otherwise, it will only be evaluated when the rvalue for the call is used somewhere else + // and overflow_value will not be initialized at the correct point in the program. + let result = self.current_func().new_local(self.location, res_type, "result"); + self.block.add_assignment(self.location, result, call); + + ( + result.to_rvalue(), + self.context.new_cast(self.location, overflow_value, self.bool_type).to_rvalue(), + ) } pub fn gcc_icmp( @@ -865,6 +874,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { let value_type = value.get_type(); if self.is_native_int_type_or_bool(dest_typ) && self.is_native_int_type_or_bool(value_type) { + // TODO: use self.location. self.context.new_cast(None, value, dest_typ) } else if self.is_native_int_type_or_bool(dest_typ) { self.context.new_cast(None, self.low(value), dest_typ) @@ -905,6 +915,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { let name_suffix = match self.type_kind(dest_typ) { TypeKind::Float => "tisf", TypeKind::Double => "tidf", + TypeKind::FP128 => "tixf", kind => panic!("cannot cast a non-native integer to type {:?}", kind), }; let sign = if signed { "" } else { "un" }; diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs b/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs index 2d731f88d7d36..0eebd21001a90 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs @@ -1,11 +1,90 @@ use std::borrow::Cow; -use gccjit::{CType, Context, Function, FunctionPtrType, RValue, ToRValue, UnaryOp}; +use gccjit::{CType, Context, Field, Function, FunctionPtrType, RValue, ToRValue, Type}; use rustc_codegen_ssa::traits::BuilderMethods; use crate::builder::Builder; use crate::context::CodegenCx; +fn encode_key_128_type<'a, 'gcc, 'tcx>( + builder: &Builder<'a, 'gcc, 'tcx>, +) -> (Type<'gcc>, Field<'gcc>, Field<'gcc>) { + let m128i = builder.context.new_vector_type(builder.i64_type, 2); + let field1 = builder.context.new_field(None, builder.u32_type, "field1"); + let field2 = builder.context.new_field(None, m128i, "field2"); + let field3 = builder.context.new_field(None, m128i, "field3"); + let field4 = builder.context.new_field(None, m128i, "field4"); + let field5 = builder.context.new_field(None, m128i, "field5"); + let field6 = builder.context.new_field(None, m128i, "field6"); + let field7 = builder.context.new_field(None, m128i, "field7"); + let encode_type = builder.context.new_struct_type( + None, + "EncodeKey128Output", + &[field1, field2, field3, field4, field5, field6, field7], + ); + #[cfg(feature = "master")] + encode_type.as_type().set_packed(); + (encode_type.as_type(), field1, field2) +} + +fn encode_key_256_type<'a, 'gcc, 'tcx>( + builder: &Builder<'a, 'gcc, 'tcx>, +) -> (Type<'gcc>, Field<'gcc>, Field<'gcc>) { + let m128i = builder.context.new_vector_type(builder.i64_type, 2); + let field1 = builder.context.new_field(None, builder.u32_type, "field1"); + let field2 = builder.context.new_field(None, m128i, "field2"); + let field3 = builder.context.new_field(None, m128i, "field3"); + let field4 = builder.context.new_field(None, m128i, "field4"); + let field5 = builder.context.new_field(None, m128i, "field5"); + let field6 = builder.context.new_field(None, m128i, "field6"); + let field7 = builder.context.new_field(None, m128i, "field7"); + let field8 = builder.context.new_field(None, m128i, "field8"); + let encode_type = builder.context.new_struct_type( + None, + "EncodeKey256Output", + &[field1, field2, field3, field4, field5, field6, field7, field8], + ); + #[cfg(feature = "master")] + encode_type.as_type().set_packed(); + (encode_type.as_type(), field1, field2) +} + +fn aes_output_type<'a, 'gcc, 'tcx>( + builder: &Builder<'a, 'gcc, 'tcx>, +) -> (Type<'gcc>, Field<'gcc>, Field<'gcc>) { + let m128i = builder.context.new_vector_type(builder.i64_type, 2); + let field1 = builder.context.new_field(None, builder.u8_type, "field1"); + let field2 = builder.context.new_field(None, m128i, "field2"); + let aes_output_type = builder.context.new_struct_type(None, "AesOutput", &[field1, field2]); + let typ = aes_output_type.as_type(); + #[cfg(feature = "master")] + typ.set_packed(); + (typ, field1, field2) +} + +fn wide_aes_output_type<'a, 'gcc, 'tcx>( + builder: &Builder<'a, 'gcc, 'tcx>, +) -> (Type<'gcc>, Field<'gcc>, Field<'gcc>) { + let m128i = builder.context.new_vector_type(builder.i64_type, 2); + let field1 = builder.context.new_field(None, builder.u8_type, "field1"); + let field2 = builder.context.new_field(None, m128i, "field2"); + let field3 = builder.context.new_field(None, m128i, "field3"); + let field4 = builder.context.new_field(None, m128i, "field4"); + let field5 = builder.context.new_field(None, m128i, "field5"); + let field6 = builder.context.new_field(None, m128i, "field6"); + let field7 = builder.context.new_field(None, m128i, "field7"); + let field8 = builder.context.new_field(None, m128i, "field8"); + let field9 = builder.context.new_field(None, m128i, "field9"); + let aes_output_type = builder.context.new_struct_type( + None, + "WideAesOutput", + &[field1, field2, field3, field4, field5, field6, field7, field8, field9], + ); + #[cfg(feature = "master")] + aes_output_type.as_type().set_packed(); + (aes_output_type.as_type(), field1, field2) +} + #[cfg_attr(not(feature = "master"), allow(unused_variables))] pub fn adjust_function<'gcc>( context: &'gcc Context<'gcc>, @@ -43,7 +122,6 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>( gcc_func: FunctionPtrType<'gcc>, mut args: Cow<'b, [RValue<'gcc>]>, func_name: &str, - original_function_name: Option<&String>, ) -> Cow<'b, [RValue<'gcc>]> { // TODO: this might not be a good way to workaround the missing tile builtins. if func_name == "__builtin_trap" { @@ -504,6 +582,72 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>( let arg4 = builder.context.new_rvalue_from_int(arg4_type, -1); args = vec![a, b, c, arg4, new_args[3]].into(); } + "__builtin_ia32_encodekey128_u32" => { + let mut new_args = args.to_vec(); + let m128i = builder.context.new_vector_type(builder.i64_type, 2); + let array_type = builder.context.new_array_type(None, m128i, 6); + let result = builder.current_func().new_local(None, array_type, "result"); + new_args.push(result.get_address(None)); + args = new_args.into(); + } + "__builtin_ia32_encodekey256_u32" => { + let mut new_args = args.to_vec(); + let m128i = builder.context.new_vector_type(builder.i64_type, 2); + let array_type = builder.context.new_array_type(None, m128i, 7); + let result = builder.current_func().new_local(None, array_type, "result"); + new_args.push(result.get_address(None)); + args = new_args.into(); + } + "__builtin_ia32_aesenc128kl_u8" + | "__builtin_ia32_aesdec128kl_u8" + | "__builtin_ia32_aesenc256kl_u8" + | "__builtin_ia32_aesdec256kl_u8" => { + let mut new_args = vec![]; + let m128i = builder.context.new_vector_type(builder.i64_type, 2); + let result = builder.current_func().new_local(None, m128i, "result"); + new_args.push(result.get_address(None)); + new_args.extend(args.to_vec()); + args = new_args.into(); + } + "__builtin_ia32_aesencwide128kl_u8" + | "__builtin_ia32_aesdecwide128kl_u8" + | "__builtin_ia32_aesencwide256kl_u8" + | "__builtin_ia32_aesdecwide256kl_u8" => { + let mut new_args = vec![]; + + let mut old_args = args.to_vec(); + let handle = old_args.swap_remove(0); // Called __P in GCC. + let first_value = old_args.swap_remove(0); + + let element_type = first_value.get_type(); + let array_type = builder.context.new_array_type(None, element_type, 8); + let result = builder.current_func().new_local(None, array_type, "result"); + new_args.push(result.get_address(None)); + + let array = builder.current_func().new_local(None, array_type, "array"); + let input = builder.context.new_array_constructor( + None, + array_type, + &[ + first_value, + old_args.swap_remove(0), + old_args.swap_remove(0), + old_args.swap_remove(0), + old_args.swap_remove(0), + old_args.swap_remove(0), + old_args.swap_remove(0), + old_args.swap_remove(0), + ], + ); + builder.llbb().add_assignment(None, array, input); + let input_ptr = array.get_address(None); + let arg2_type = gcc_func.get_param_type(1); + let input_ptr = builder.context.new_cast(None, input_ptr, arg2_type); + new_args.push(input_ptr); + + new_args.push(handle); + args = new_args.into(); + } _ => (), } } else { @@ -541,33 +685,6 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>( let c = builder.context.new_rvalue_from_vector(None, arg3_type, &[new_args[2]; 2]); args = vec![a, b, c, new_args[3]].into(); } - "__builtin_ia32_vfmaddsubpd256" - | "__builtin_ia32_vfmaddsubps" - | "__builtin_ia32_vfmaddsubps256" - | "__builtin_ia32_vfmaddsubpd" => { - if let Some(original_function_name) = original_function_name { - match &**original_function_name { - "llvm.x86.fma.vfmsubadd.pd.256" - | "llvm.x86.fma.vfmsubadd.ps" - | "llvm.x86.fma.vfmsubadd.ps.256" - | "llvm.x86.fma.vfmsubadd.pd" => { - // NOTE: since both llvm.x86.fma.vfmsubadd.ps and llvm.x86.fma.vfmaddsub.ps maps to - // __builtin_ia32_vfmaddsubps, only add minus if this comes from a - // subadd LLVM intrinsic, e.g. _mm256_fmsubadd_pd. - let mut new_args = args.to_vec(); - let arg3 = &mut new_args[2]; - *arg3 = builder.context.new_unary_op( - None, - UnaryOp::Minus, - arg3.get_type(), - *arg3, - ); - args = new_args.into(); - } - _ => (), - } - } - } "__builtin_ia32_ldmxcsr" => { // The builtin __builtin_ia32_ldmxcsr takes an integer value while llvm.x86.sse.ldmxcsr takes a pointer, // so dereference the pointer. @@ -728,6 +845,96 @@ pub fn adjust_intrinsic_return_value<'a, 'gcc, 'tcx>( let f16_type = builder.context.new_c_type(CType::Float16); return_value = builder.context.new_cast(None, return_value, f16_type); } + "__builtin_ia32_encodekey128_u32" => { + // The builtin __builtin_ia32_encodekey128_u32 writes the result in its pointer argument while + // llvm.x86.encodekey128 returns a value. + // We added a result pointer argument and now need to assign its value to the return_value expected by + // the LLVM intrinsic. + let (encode_type, field1, field2) = encode_key_128_type(builder); + let result = builder.current_func().new_local(None, encode_type, "result"); + let field1 = result.access_field(None, field1); + builder.llbb().add_assignment(None, field1, return_value); + let field2 = result.access_field(None, field2); + let field2_type = field2.to_rvalue().get_type(); + let array_type = builder.context.new_array_type(None, field2_type, 6); + let ptr = builder.context.new_cast(None, args[2], array_type.make_pointer()); + let field2_ptr = + builder.context.new_cast(None, field2.get_address(None), array_type.make_pointer()); + builder.llbb().add_assignment( + None, + field2_ptr.dereference(None), + ptr.dereference(None), + ); + return_value = result.to_rvalue(); + } + "__builtin_ia32_encodekey256_u32" => { + // The builtin __builtin_ia32_encodekey256_u32 writes the result in its pointer argument while + // llvm.x86.encodekey256 returns a value. + // We added a result pointer argument and now need to assign its value to the return_value expected by + // the LLVM intrinsic. + let (encode_type, field1, field2) = encode_key_256_type(builder); + let result = builder.current_func().new_local(None, encode_type, "result"); + let field1 = result.access_field(None, field1); + builder.llbb().add_assignment(None, field1, return_value); + let field2 = result.access_field(None, field2); + let field2_type = field2.to_rvalue().get_type(); + let array_type = builder.context.new_array_type(None, field2_type, 7); + let ptr = builder.context.new_cast(None, args[3], array_type.make_pointer()); + let field2_ptr = + builder.context.new_cast(None, field2.get_address(None), array_type.make_pointer()); + builder.llbb().add_assignment( + None, + field2_ptr.dereference(None), + ptr.dereference(None), + ); + return_value = result.to_rvalue(); + } + "__builtin_ia32_aesdec128kl_u8" + | "__builtin_ia32_aesenc128kl_u8" + | "__builtin_ia32_aesdec256kl_u8" + | "__builtin_ia32_aesenc256kl_u8" => { + // The builtin for aesdec/aesenc writes the result in its pointer argument while + // llvm.x86.aesdec128kl returns a value. + // We added a result pointer argument and now need to assign its value to the return_value expected by + // the LLVM intrinsic. + let (aes_output_type, field1, field2) = aes_output_type(builder); + let result = builder.current_func().new_local(None, aes_output_type, "result"); + let field1 = result.access_field(None, field1); + builder.llbb().add_assignment(None, field1, return_value); + let field2 = result.access_field(None, field2); + let ptr = builder.context.new_cast( + None, + args[0], + field2.to_rvalue().get_type().make_pointer(), + ); + builder.llbb().add_assignment(None, field2, ptr.dereference(None)); + return_value = result.to_rvalue(); + } + "__builtin_ia32_aesencwide128kl_u8" + | "__builtin_ia32_aesdecwide128kl_u8" + | "__builtin_ia32_aesencwide256kl_u8" + | "__builtin_ia32_aesdecwide256kl_u8" => { + // The builtin for aesdecwide/aesencwide writes the result in its pointer argument while + // llvm.x86.aesencwide128kl returns a value. + // We added a result pointer argument and now need to assign its value to the return_value expected by + // the LLVM intrinsic. + let (aes_output_type, field1, field2) = wide_aes_output_type(builder); + let result = builder.current_func().new_local(None, aes_output_type, "result"); + let field1 = result.access_field(None, field1); + builder.llbb().add_assignment(None, field1, return_value); + let field2 = result.access_field(None, field2); + let field2_type = field2.to_rvalue().get_type(); + let array_type = builder.context.new_array_type(None, field2_type, 8); + let ptr = builder.context.new_cast(None, args[0], array_type.make_pointer()); + let field2_ptr = + builder.context.new_cast(None, field2.get_address(None), array_type.make_pointer()); + builder.llbb().add_assignment( + None, + field2_ptr.dereference(None), + ptr.dereference(None), + ); + return_value = result.to_rvalue(); + } _ => (), } @@ -915,16 +1122,6 @@ pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function "llvm.ctlz.v4i64" => "__builtin_ia32_vplzcntq_256_mask", "llvm.ctlz.v2i64" => "__builtin_ia32_vplzcntq_128_mask", "llvm.ctpop.v32i16" => "__builtin_ia32_vpopcountw_v32hi", - "llvm.x86.fma.vfmsub.sd" => "__builtin_ia32_vfmsubsd3", - "llvm.x86.fma.vfmsub.ss" => "__builtin_ia32_vfmsubss3", - "llvm.x86.fma.vfmsubadd.pd" => "__builtin_ia32_vfmaddsubpd", - "llvm.x86.fma.vfmsubadd.pd.256" => "__builtin_ia32_vfmaddsubpd256", - "llvm.x86.fma.vfmsubadd.ps" => "__builtin_ia32_vfmaddsubps", - "llvm.x86.fma.vfmsubadd.ps.256" => "__builtin_ia32_vfmaddsubps256", - "llvm.x86.fma.vfnmadd.sd" => "__builtin_ia32_vfnmaddsd3", - "llvm.x86.fma.vfnmadd.ss" => "__builtin_ia32_vfnmaddss3", - "llvm.x86.fma.vfnmsub.sd" => "__builtin_ia32_vfnmsubsd3", - "llvm.x86.fma.vfnmsub.ss" => "__builtin_ia32_vfnmsubss3", "llvm.x86.avx512.conflict.d.512" => "__builtin_ia32_vpconflictsi_512_mask", "llvm.x86.avx512.conflict.d.256" => "__builtin_ia32_vpconflictsi_256_mask", "llvm.x86.avx512.conflict.d.128" => "__builtin_ia32_vpconflictsi_128_mask", @@ -1002,8 +1199,6 @@ pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function "llvm.fshr.v32i16" => "__builtin_ia32_vpshrdv_v32hi", "llvm.fshr.v16i16" => "__builtin_ia32_vpshrdv_v16hi", "llvm.fshr.v8i16" => "__builtin_ia32_vpshrdv_v8hi", - "llvm.x86.fma.vfmadd.sd" => "__builtin_ia32_vfmaddsd3", - "llvm.x86.fma.vfmadd.ss" => "__builtin_ia32_vfmaddss3", "llvm.x86.rdrand.64" => "__builtin_ia32_rdrand64_step", // The above doc points to unknown builtins for the following, so override them: @@ -1324,6 +1519,16 @@ pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function "llvm.x86.avx512fp16.mask.vfmadd.cph.256" => "__builtin_ia32_vfmaddcph256_mask3", "llvm.x86.avx512fp16.mask.vfcmadd.cph.128" => "__builtin_ia32_vfcmaddcph128_mask3", "llvm.x86.avx512fp16.mask.vfmadd.cph.128" => "__builtin_ia32_vfmaddcph128_mask3", + "llvm.x86.encodekey128" => "__builtin_ia32_encodekey128_u32", + "llvm.x86.encodekey256" => "__builtin_ia32_encodekey256_u32", + "llvm.x86.aesenc128kl" => "__builtin_ia32_aesenc128kl_u8", + "llvm.x86.aesdec128kl" => "__builtin_ia32_aesdec128kl_u8", + "llvm.x86.aesenc256kl" => "__builtin_ia32_aesenc256kl_u8", + "llvm.x86.aesdec256kl" => "__builtin_ia32_aesdec256kl_u8", + "llvm.x86.aesencwide128kl" => "__builtin_ia32_aesencwide128kl_u8", + "llvm.x86.aesdecwide128kl" => "__builtin_ia32_aesdecwide128kl_u8", + "llvm.x86.aesencwide256kl" => "__builtin_ia32_aesencwide256kl_u8", + "llvm.x86.aesdecwide256kl" => "__builtin_ia32_aesdecwide256kl_u8", // TODO: support the tile builtins: "llvm.x86.ldtilecfg" => "__builtin_trap", diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs index f38622074f18b..2ed5ec4381edf 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs @@ -78,6 +78,7 @@ fn get_simple_intrinsic<'gcc, 'tcx>( sym::maxnumf64 => "fmax", sym::copysignf32 => "copysignf", sym::copysignf64 => "copysign", + sym::copysignf128 => "copysignl", sym::floorf32 => "floorf", sym::floorf64 => "floor", sym::ceilf32 => "ceilf", @@ -398,7 +399,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tc } // Fall back to default body - _ => return Err(Instance::new(instance.def_id(), instance.args)), + _ => return Err(Instance::new_raw(instance.def_id(), instance.args)), }; if !fn_abi.ret.is_ignore() { diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs b/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs index 8b454ab2a4241..b897d07924914 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs @@ -399,7 +399,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( } #[cfg(feature = "master")] - if name == sym::simd_insert { + if name == sym::simd_insert || name == sym::simd_insert_dyn { require!( in_elem == arg_tys[2], InvalidMonomorphization::InsertedType { @@ -410,6 +410,8 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( out_ty: arg_tys[2] } ); + + // TODO(antoyo): For simd_insert, check if the index is a constant of the correct size. let vector = args[0].immediate(); let index = args[1].immediate(); let value = args[2].immediate(); @@ -422,13 +424,15 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( } #[cfg(feature = "master")] - if name == sym::simd_extract { + if name == sym::simd_extract || name == sym::simd_extract_dyn { require!( ret_ty == in_elem, InvalidMonomorphization::ReturnType { span, name, in_elem, in_ty, ret_ty } ); + // TODO(antoyo): For simd_extract, check if the index is a constant of the correct size. let vector = args[0].immediate(); - return Ok(bx.context.new_vector_access(None, vector, args[1].immediate()).to_rvalue()); + let index = args[1].immediate(); + return Ok(bx.context.new_vector_access(None, vector, index).to_rvalue()); } if name == sym::simd_select { @@ -443,9 +447,14 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( m_len == v_len, InvalidMonomorphization::MismatchedLengths { span, name, m_len, v_len } ); + // TODO: also support unsigned integers. match *m_elem_ty.kind() { ty::Int(_) => {} - _ => return_error!(InvalidMonomorphization::MaskType { span, name, ty: m_elem_ty }), + _ => return_error!(InvalidMonomorphization::MaskWrongElementType { + span, + name, + ty: m_elem_ty + }), } return Ok(bx.vector_select(args[0].immediate(), args[1].immediate(), args[2].immediate())); } @@ -987,19 +996,15 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( assert_eq!(pointer_count - 1, ptr_count(element_ty0)); assert_eq!(underlying_ty, non_ptr(element_ty0)); - // The element type of the third argument must be a signed integer type of any width: + // The element type of the third argument must be an integer type of any width: + // TODO: also support unsigned integers. let (_, element_ty2) = arg_tys[2].simd_size_and_type(bx.tcx()); match *element_ty2.kind() { ty::Int(_) => (), _ => { require!( false, - InvalidMonomorphization::ThirdArgElementType { - span, - name, - expected_element: element_ty2, - third_arg: arg_tys[2] - } + InvalidMonomorphization::MaskWrongElementType { span, name, ty: element_ty2 } ); } } @@ -1105,17 +1110,13 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( assert_eq!(underlying_ty, non_ptr(element_ty0)); // The element type of the third argument must be a signed integer type of any width: + // TODO: also support unsigned integers. match *element_ty2.kind() { ty::Int(_) => (), _ => { require!( false, - InvalidMonomorphization::ThirdArgElementType { - span, - name, - expected_element: element_ty2, - third_arg: arg_tys[2] - } + InvalidMonomorphization::MaskWrongElementType { span, name, ty: element_ty2 } ); } } diff --git a/compiler/rustc_codegen_gcc/src/lib.rs b/compiler/rustc_codegen_gcc/src/lib.rs index bfa23174a19d9..2c5a787168381 100644 --- a/compiler/rustc_codegen_gcc/src/lib.rs +++ b/compiler/rustc_codegen_gcc/src/lib.rs @@ -22,7 +22,7 @@ #![warn(rust_2018_idioms)] #![warn(unused_lifetimes)] #![deny(clippy::pattern_type_mismatch)] -#![allow(clippy::needless_lifetimes)] +#![allow(clippy::needless_lifetimes, clippy::uninlined_format_args)] // Some "regular" crates we want to share with rustc extern crate object; @@ -102,7 +102,7 @@ use rustc_codegen_ssa::back::write::{ }; use rustc_codegen_ssa::base::codegen_crate; use rustc_codegen_ssa::traits::{CodegenBackend, ExtraBackendMethods, WriteBackendMethods}; -use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen}; +use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen, TargetConfig}; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::sync::IntoDynSyncSend; use rustc_errors::DiagCtxtHandle; @@ -188,10 +188,10 @@ impl CodegenBackend for GccCodegenBackend { crate::DEFAULT_LOCALE_RESOURCE } - fn init(&self, sess: &Session) { + fn init(&self, _sess: &Session) { #[cfg(feature = "master")] { - let target_cpu = target_cpu(sess); + let target_cpu = target_cpu(_sess); // Get the second TargetInfo with the correct CPU features by setting the arch. let context = Context::default(); @@ -260,8 +260,8 @@ impl CodegenBackend for GccCodegenBackend { .join(sess) } - fn target_features_cfg(&self, sess: &Session) -> (Vec, Vec) { - target_features_cfg(sess, &self.target_info) + fn target_config(&self, sess: &Session) -> TargetConfig { + target_config(sess, &self.target_info) } } @@ -485,10 +485,7 @@ fn to_gcc_opt_level(optlevel: Option) -> OptimizationLevel { } /// Returns the features that should be set in `cfg(target_feature)`. -fn target_features_cfg( - sess: &Session, - target_info: &LockedTargetInfo, -) -> (Vec, Vec) { +fn target_config(sess: &Session, target_info: &LockedTargetInfo) -> TargetConfig { // TODO(antoyo): use global_gcc_features. let f = |allow_unstable| { sess.target @@ -523,5 +520,14 @@ fn target_features_cfg( let target_features = f(false); let unstable_target_features = f(true); - (target_features, unstable_target_features) + + TargetConfig { + target_features, + unstable_target_features, + // There are no known bugs with GCC support for f16 or f128 + has_reliable_f16: true, + has_reliable_f16_math: true, + has_reliable_f128: true, + has_reliable_f128_math: true, + } } diff --git a/compiler/rustc_codegen_gcc/src/type_of.rs b/compiler/rustc_codegen_gcc/src/type_of.rs index ae98b3d0b56e2..5745acce6fee7 100644 --- a/compiler/rustc_codegen_gcc/src/type_of.rs +++ b/compiler/rustc_codegen_gcc/src/type_of.rs @@ -102,10 +102,10 @@ fn uncached_gcc_type<'gcc, 'tcx>( let mut name = with_no_trimmed_paths!(layout.ty.to_string()); if let (&ty::Adt(def, _), &Variants::Single { index }) = (layout.ty.kind(), &layout.variants) + && def.is_enum() + && !def.variants().is_empty() { - if def.is_enum() && !def.variants().is_empty() { - write!(&mut name, "::{}", def.variant(index).name).unwrap(); - } + write!(&mut name, "::{}", def.variant(index).name).unwrap(); } if let (&ty::Coroutine(_, _), &Variants::Single { index }) = (layout.ty.kind(), &layout.variants) @@ -264,10 +264,10 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> { } fn immediate_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> { - if let BackendRepr::Scalar(ref scalar) = self.backend_repr { - if scalar.is_bool() { - return cx.type_i1(); - } + if let BackendRepr::Scalar(ref scalar) = self.backend_repr + && scalar.is_bool() + { + return cx.type_i1(); } self.gcc_type(cx) } diff --git a/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt b/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt index 082958bfe1f85..499c1a962311c 100644 --- a/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt +++ b/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt @@ -1,11 +1,9 @@ tests/ui/allocator/no_std-alloc-error-handler-custom.rs tests/ui/allocator/no_std-alloc-error-handler-default.rs tests/ui/asm/may_unwind.rs -tests/ui/asm/x86_64/multiple-clobber-abi.rs tests/ui/functions-closures/parallel-codegen-closures.rs tests/ui/linkage-attr/linkage1.rs tests/ui/lto/dylib-works.rs -tests/ui/numbers-arithmetic/saturating-float-casts.rs tests/ui/sepcomp/sepcomp-cci.rs tests/ui/sepcomp/sepcomp-extern.rs tests/ui/sepcomp/sepcomp-fns-backwards.rs @@ -33,7 +31,6 @@ tests/ui/unwind-no-uwtable.rs tests/ui/parser/unclosed-delimiter-in-dep.rs tests/ui/consts/missing_span_in_backtrace.rs tests/ui/drop/dynamic-drop.rs -tests/ui/issues/issue-40883.rs tests/ui/issues/issue-43853.rs tests/ui/issues/issue-47364.rs tests/ui/macros/rfc-2011-nicer-assert-messages/assert-without-captures-does-not-create-unnecessary-code.rs @@ -102,14 +99,12 @@ tests/ui/codegen/equal-pointers-unequal/as-cast/basic.rs tests/ui/codegen/equal-pointers-unequal/as-cast/inline1.rs tests/ui/codegen/equal-pointers-unequal/as-cast/print.rs tests/ui/codegen/equal-pointers-unequal/as-cast/inline2.rs -tests/ui/codegen/equal-pointers-unequal/as-cast/print3.rs tests/ui/codegen/equal-pointers-unequal/as-cast/segfault.rs tests/ui/codegen/equal-pointers-unequal/exposed-provenance/function.rs tests/ui/codegen/equal-pointers-unequal/exposed-provenance/basic.rs tests/ui/codegen/equal-pointers-unequal/as-cast/zero.rs tests/ui/codegen/equal-pointers-unequal/exposed-provenance/inline1.rs tests/ui/codegen/equal-pointers-unequal/exposed-provenance/print.rs -tests/ui/codegen/equal-pointers-unequal/exposed-provenance/print3.rs tests/ui/codegen/equal-pointers-unequal/exposed-provenance/inline2.rs tests/ui/codegen/equal-pointers-unequal/exposed-provenance/segfault.rs tests/ui/codegen/equal-pointers-unequal/exposed-provenance/zero.rs @@ -117,8 +112,9 @@ tests/ui/codegen/equal-pointers-unequal/strict-provenance/basic.rs tests/ui/codegen/equal-pointers-unequal/strict-provenance/function.rs tests/ui/codegen/equal-pointers-unequal/strict-provenance/print.rs tests/ui/codegen/equal-pointers-unequal/strict-provenance/inline1.rs -tests/ui/codegen/equal-pointers-unequal/strict-provenance/print3.rs tests/ui/codegen/equal-pointers-unequal/strict-provenance/inline2.rs tests/ui/codegen/equal-pointers-unequal/strict-provenance/segfault.rs tests/ui/codegen/equal-pointers-unequal/strict-provenance/zero.rs tests/ui/simd/simd-bitmask-notpow2.rs +tests/ui/codegen/StackColoring-not-blowup-stack-issue-40883.rs +tests/ui/uninhabited/uninhabited-transparent-return-abi.rs diff --git a/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs b/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs index 64c932a265819..d5a0d71c4b298 100644 --- a/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs +++ b/compiler/rustc_codegen_gcc/tests/lang_tests_common.rs @@ -1,5 +1,7 @@ //! The common code for `tests/lang_tests_*.rs` +#![allow(clippy::uninlined_format_args)] + use std::env::{self, current_dir}; use std::path::{Path, PathBuf}; use std::process::Command; diff --git a/compiler/rustc_codegen_gcc/tests/run/abort1.rs b/compiler/rustc_codegen_gcc/tests/run/abort1.rs index fe46d9ae41849..ff2bb75ece22a 100644 --- a/compiler/rustc_codegen_gcc/tests/run/abort1.rs +++ b/compiler/rustc_codegen_gcc/tests/run/abort1.rs @@ -3,45 +3,13 @@ // Run-time: // status: signal -#![feature(auto_traits, lang_items, no_core, intrinsics, rustc_attrs)] -#![allow(internal_features)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -mod intrinsics { - use super::Sized; - - #[rustc_nounwind] - #[rustc_intrinsic] - pub fn abort() -> !; -} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; fn test_fail() -> ! { unsafe { intrinsics::abort() }; diff --git a/compiler/rustc_codegen_gcc/tests/run/abort2.rs b/compiler/rustc_codegen_gcc/tests/run/abort2.rs index 4123f4f4beebc..781f518e0b222 100644 --- a/compiler/rustc_codegen_gcc/tests/run/abort2.rs +++ b/compiler/rustc_codegen_gcc/tests/run/abort2.rs @@ -3,45 +3,13 @@ // Run-time: // status: signal -#![feature(auto_traits, lang_items, no_core, intrinsics, rustc_attrs)] -#![allow(internal_features)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -mod intrinsics { - use super::Sized; - - #[rustc_nounwind] - #[rustc_intrinsic] - pub fn abort() -> !; -} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; fn fail() -> i32 { unsafe { intrinsics::abort() }; diff --git a/compiler/rustc_codegen_gcc/tests/run/array.rs b/compiler/rustc_codegen_gcc/tests/run/array.rs index e18a4ced6bc46..3ab0c309fdeae 100644 --- a/compiler/rustc_codegen_gcc/tests/run/array.rs +++ b/compiler/rustc_codegen_gcc/tests/run/array.rs @@ -8,20 +8,12 @@ // 10 #![feature(no_core)] - #![no_std] #![no_core] #![no_main] extern crate mini_core; - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - pub fn puts(s: *const u8) -> i32; - } -} +use mini_core::*; static mut ONE: usize = 1; diff --git a/compiler/rustc_codegen_gcc/tests/run/asm.rs b/compiler/rustc_codegen_gcc/tests/run/asm.rs index 4e05d026868e7..2dbf43be664dc 100644 --- a/compiler/rustc_codegen_gcc/tests/run/asm.rs +++ b/compiler/rustc_codegen_gcc/tests/run/asm.rs @@ -174,6 +174,59 @@ fn asm() { mem_cpy(array2.as_mut_ptr(), array1.as_ptr(), 3); } assert_eq!(array1, array2); + + // in and clobber registers cannot overlap. This tests that the lateout register without an + // output place (indicated by the `_`) is not added to the list of clobbered registers + let x = 8; + let y: i32; + unsafe { + asm!( + "mov rax, rdi", + in("rdi") x, + lateout("rdi") _, + out("rax") y, + ); + } + assert_eq!((x, y), (8, 8)); + + // sysv64 is the default calling convention on unix systems. The rdi register is + // used to pass arguments in the sysv64 calling convention, so this register will be clobbered + #[cfg(unix)] + { + let x = 16; + let y: i32; + unsafe { + asm!( + "mov rax, rdi", + in("rdi") x, + out("rax") y, + clobber_abi("sysv64"), + ); + } + assert_eq!((x, y), (16, 16)); + } + + // the `b` suffix for registers in the `reg_byte` register class is not supported in GCC + // and needs to be stripped in order to use these registers. + unsafe { + core::arch::asm!( + "", + out("al") _, + out("bl") _, + out("cl") _, + out("dl") _, + out("sil") _, + out("dil") _, + out("r8b") _, + out("r9b") _, + out("r10b") _, + out("r11b") _, + out("r12b") _, + out("r13b") _, + out("r14b") _, + out("r15b") _, + ); + } } #[cfg(not(target_arch = "x86_64"))] diff --git a/compiler/rustc_codegen_gcc/tests/run/assign.rs b/compiler/rustc_codegen_gcc/tests/run/assign.rs index 286155852d50f..4535ab5778e93 100644 --- a/compiler/rustc_codegen_gcc/tests/run/assign.rs +++ b/compiler/rustc_codegen_gcc/tests/run/assign.rs @@ -5,130 +5,13 @@ // 7 8 // 10 -#![allow(internal_features, unused_attributes)] -#![feature(auto_traits, lang_items, no_core, intrinsics, rustc_attrs, track_caller)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for *mut i32 {} -impl Copy for usize {} -impl Copy for u8 {} -impl Copy for i8 {} -impl Copy for i32 {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -#[lang = "panic_location"] -struct PanicLocation { - file: &'static str, - line: u32, - column: u32, -} - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn puts(s: *const u8) -> i32; - pub fn fflush(stream: *mut i32) -> i32; - pub fn printf(format: *const i8, ...) -> i32; - - pub static stdout: *mut i32; - } -} - -mod intrinsics { - #[rustc_nounwind] - #[rustc_intrinsic] - pub fn abort() -> !; -} - -#[lang = "panic"] -#[track_caller] -#[no_mangle] -pub fn panic(_msg: &'static str) -> ! { - unsafe { - libc::puts("Panicking\0" as *const str as *const u8); - libc::fflush(libc::stdout); - intrinsics::abort(); - } -} - -#[lang = "add"] -trait Add { - type Output; - - fn add(self, rhs: RHS) -> Self::Output; -} - -impl Add for u8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i32 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for usize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for isize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -#[track_caller] -#[lang = "panic_const_add_overflow"] -pub fn panic_const_add_overflow() -> ! { - panic("attempt to add with overflow"); -} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; fn inc_ref(num: &mut isize) -> isize { *num = *num + 5; @@ -139,9 +22,8 @@ fn inc(num: isize) -> isize { num + 1 } - #[no_mangle] -extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { +extern "C" fn main(mut argc: isize, _argv: *const *const u8) -> i32 { argc = inc(argc); unsafe { libc::printf(b"%ld\n\0" as *const u8 as *const i8, argc); diff --git a/compiler/rustc_codegen_gcc/tests/run/closure.rs b/compiler/rustc_codegen_gcc/tests/run/closure.rs index c7a236f74f9e6..a8a3fadfed47b 100644 --- a/compiler/rustc_codegen_gcc/tests/run/closure.rs +++ b/compiler/rustc_codegen_gcc/tests/run/closure.rs @@ -9,55 +9,38 @@ // Both args: 11 #![feature(no_core)] - #![no_std] #![no_core] #![no_main] extern crate mini_core; - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - } -} +use mini_core::*; #[no_mangle] -extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { +extern "C" fn main(argc: isize, _argv: *const *const u8) -> i32 { let string = "Arg: %d\n\0"; - let mut closure = || { - unsafe { - libc::printf(string as *const str as *const i8, argc); - } + let mut closure = || unsafe { + libc::printf(string as *const str as *const i8, argc); }; closure(); - let mut closure = || { - unsafe { - libc::printf("Argument: %d\n\0" as *const str as *const i8, argc); - } + let mut closure = || unsafe { + libc::printf("Argument: %d\n\0" as *const str as *const i8, argc); }; closure(); - let mut closure = |string| { - unsafe { - libc::printf(string as *const str as *const i8, argc); - } + let mut closure = |string| unsafe { + libc::printf(string as *const str as *const i8, argc); }; closure("String arg: %d\n\0"); - let mut closure = |arg: isize| { - unsafe { - libc::printf("Int argument: %d\n\0" as *const str as *const i8, arg); - } + let mut closure = |arg: isize| unsafe { + libc::printf("Int argument: %d\n\0" as *const str as *const i8, arg); }; closure(argc + 1); - let mut closure = |string, arg: isize| { - unsafe { - libc::printf(string as *const str as *const i8, arg); - } + let mut closure = |string, arg: isize| unsafe { + libc::printf(string as *const str as *const i8, arg); }; closure("Both args: %d\n\0", argc + 10); diff --git a/compiler/rustc_codegen_gcc/tests/run/condition.rs b/compiler/rustc_codegen_gcc/tests/run/condition.rs index b02359702ed2e..bd3b6f7497fbd 100644 --- a/compiler/rustc_codegen_gcc/tests/run/condition.rs +++ b/compiler/rustc_codegen_gcc/tests/run/condition.rs @@ -6,19 +6,12 @@ // 1 #![feature(no_core)] - #![no_std] #![no_core] #![no_main] extern crate mini_core; - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - } -} +use mini_core::*; #[no_mangle] extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { @@ -27,15 +20,14 @@ extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { libc::printf(b"true\n\0" as *const u8 as *const i8); } - let string = - match argc { - 1 => b"1\n\0", - 2 => b"2\n\0", - 3 => b"3\n\0", - 4 => b"4\n\0", - 5 => b"5\n\0", - _ => b"_\n\0", - }; + let string = match argc { + 1 => b"1\n\0", + 2 => b"2\n\0", + 3 => b"3\n\0", + 4 => b"4\n\0", + 5 => b"5\n\0", + _ => b"_\n\0", + }; libc::printf(string as *const u8 as *const i8); } 0 diff --git a/compiler/rustc_codegen_gcc/tests/run/empty_main.rs b/compiler/rustc_codegen_gcc/tests/run/empty_main.rs index 042e44080c53a..fe3df5a2389ca 100644 --- a/compiler/rustc_codegen_gcc/tests/run/empty_main.rs +++ b/compiler/rustc_codegen_gcc/tests/run/empty_main.rs @@ -3,37 +3,13 @@ // Run-time: // status: 0 -#![feature(auto_traits, lang_items, no_core)] -#![allow(internal_features)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; #[no_mangle] extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { diff --git a/compiler/rustc_codegen_gcc/tests/run/exit.rs b/compiler/rustc_codegen_gcc/tests/run/exit.rs index 9a7c91c0adb20..e0a59174bd383 100644 --- a/compiler/rustc_codegen_gcc/tests/run/exit.rs +++ b/compiler/rustc_codegen_gcc/tests/run/exit.rs @@ -3,44 +3,13 @@ // Run-time: // status: 2 -#![feature(auto_traits, lang_items, no_core, intrinsics)] -#![allow(internal_features)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -mod libc { - #[link(name = "c")] - extern "C" { - pub fn exit(status: i32); - } -} - -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; #[no_mangle] extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { diff --git a/compiler/rustc_codegen_gcc/tests/run/exit_code.rs b/compiler/rustc_codegen_gcc/tests/run/exit_code.rs index c50d2b0d7107c..376824da845c3 100644 --- a/compiler/rustc_codegen_gcc/tests/run/exit_code.rs +++ b/compiler/rustc_codegen_gcc/tests/run/exit_code.rs @@ -3,37 +3,13 @@ // Run-time: // status: 1 -#![feature(auto_traits, lang_items, no_core)] -#![allow(internal_features)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; #[no_mangle] extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { diff --git a/compiler/rustc_codegen_gcc/tests/run/float.rs b/compiler/rustc_codegen_gcc/tests/run/float.rs new file mode 100644 index 0000000000000..424fa1cf4ad53 --- /dev/null +++ b/compiler/rustc_codegen_gcc/tests/run/float.rs @@ -0,0 +1,28 @@ +// Compiler: +// +// Run-time: +// status: 0 + +#![feature(const_black_box)] + +fn main() { + use std::hint::black_box; + + macro_rules! check { + ($ty:ty, $expr:expr) => {{ + const EXPECTED: $ty = $expr; + assert_eq!($expr, EXPECTED); + }}; + } + + check!(i32, (black_box(0.0f32) as i32)); + + check!(u64, (black_box(f32::NAN) as u64)); + check!(u128, (black_box(f32::NAN) as u128)); + + check!(i64, (black_box(f64::NAN) as i64)); + check!(u64, (black_box(f64::NAN) as u64)); + + check!(i16, (black_box(f32::MIN) as i16)); + check!(i16, (black_box(f32::MAX) as i16)); +} diff --git a/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs b/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs index 98b351e504495..93b9baee1b24a 100644 --- a/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs +++ b/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs @@ -5,19 +5,12 @@ // stdout: 1 #![feature(no_core)] - #![no_std] #![no_core] #![no_main] extern crate mini_core; - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - } -} +use mini_core::*; fn i16_as_i8(a: i16) -> i8 { a as i8 diff --git a/compiler/rustc_codegen_gcc/tests/run/int.rs b/compiler/rustc_codegen_gcc/tests/run/int.rs index 58a26801b678c..47b5dea46f8de 100644 --- a/compiler/rustc_codegen_gcc/tests/run/int.rs +++ b/compiler/rustc_codegen_gcc/tests/run/int.rs @@ -3,9 +3,7 @@ // Run-time: // status: 0 -/* - * Code - */ +#![feature(const_black_box)] fn main() { use std::hint::black_box; diff --git a/compiler/rustc_codegen_gcc/tests/run/mut_ref.rs b/compiler/rustc_codegen_gcc/tests/run/mut_ref.rs index b0215860406e5..fa50d5bc5d3d3 100644 --- a/compiler/rustc_codegen_gcc/tests/run/mut_ref.rs +++ b/compiler/rustc_codegen_gcc/tests/run/mut_ref.rs @@ -1,4 +1,3 @@ - // Compiler: // // Run-time: @@ -7,139 +6,20 @@ // 6 // 11 -#![allow(internal_features, unused_attributes)] -#![feature(auto_traits, lang_items, no_core, intrinsics, rustc_attrs, track_caller)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for *mut i32 {} -impl Copy for usize {} -impl Copy for u8 {} -impl Copy for i8 {} -impl Copy for i32 {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -#[lang = "panic_location"] -struct PanicLocation { - file: &'static str, - line: u32, - column: u32, -} - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn puts(s: *const u8) -> i32; - pub fn fflush(stream: *mut i32) -> i32; - pub fn printf(format: *const i8, ...) -> i32; - - pub static stdout: *mut i32; - } -} - -mod intrinsics { - #[rustc_nounwind] - #[rustc_intrinsic] - pub fn abort() -> !; -} - -#[lang = "panic"] -#[track_caller] -#[no_mangle] -pub fn panic(_msg: &'static str) -> ! { - unsafe { - libc::puts("Panicking\0" as *const str as *const u8); - libc::fflush(libc::stdout); - intrinsics::abort(); - } -} - -#[lang = "add"] -trait Add { - type Output; - - fn add(self, rhs: RHS) -> Self::Output; -} - -impl Add for u8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i32 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for usize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for isize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -#[track_caller] -#[lang = "panic_const_add_overflow"] -pub fn panic_const_add_overflow() -> ! { - panic("attempt to add with overflow"); -} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; struct Test { field: isize, } fn test(num: isize) -> Test { - Test { - field: num + 1, - } + Test { field: num + 1 } } fn update_num(num: &mut isize) { @@ -147,7 +27,7 @@ fn update_num(num: &mut isize) { } #[no_mangle] -extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { +extern "C" fn main(mut argc: isize, _argv: *const *const u8) -> i32 { let mut test = test(argc); unsafe { libc::printf(b"%ld\n\0" as *const u8 as *const i8, test.field); diff --git a/compiler/rustc_codegen_gcc/tests/run/operations.rs b/compiler/rustc_codegen_gcc/tests/run/operations.rs index 8ba7a4c5ed8ce..a1b0772f76b62 100644 --- a/compiler/rustc_codegen_gcc/tests/run/operations.rs +++ b/compiler/rustc_codegen_gcc/tests/run/operations.rs @@ -5,229 +5,13 @@ // 39 // 10 -#![allow(internal_features, unused_attributes)] -#![feature(auto_traits, lang_items, no_core, intrinsics, arbitrary_self_types, rustc_attrs)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for *mut i32 {} -impl Copy for usize {} -impl Copy for u8 {} -impl Copy for i8 {} -impl Copy for i16 {} -impl Copy for i32 {} - -#[lang = "deref"] -pub trait Deref { - type Target: ?Sized; - - fn deref(&self) -> &Self::Target; -} - -#[lang = "legacy_receiver"] -trait LegacyReceiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -#[lang = "panic_location"] -struct PanicLocation { - file: &'static str, - line: u32, - column: u32, -} - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - pub fn puts(s: *const u8) -> i32; - pub fn fflush(stream: *mut i32) -> i32; - - pub static stdout: *mut i32; - } -} - -mod intrinsics { - #[rustc_nounwind] - #[rustc_intrinsic] - pub fn abort() -> !; -} - -#[lang = "panic"] -#[track_caller] -#[no_mangle] -pub fn panic(_msg: &'static str) -> ! { - unsafe { - libc::puts("Panicking\0" as *const str as *const u8); - libc::fflush(libc::stdout); - intrinsics::abort(); - } -} - -#[lang = "add"] -trait Add { - type Output; - - fn add(self, rhs: RHS) -> Self::Output; -} - -impl Add for u8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i32 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for usize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for isize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -#[lang = "sub"] -pub trait Sub { - type Output; - - fn sub(self, rhs: RHS) -> Self::Output; -} - -impl Sub for usize { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for isize { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for u8 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for i8 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -impl Sub for i16 { - type Output = Self; - - fn sub(self, rhs: Self) -> Self { - self - rhs - } -} - -#[lang = "mul"] -pub trait Mul { - type Output; - - #[must_use] - fn mul(self, rhs: RHS) -> Self::Output; -} - -impl Mul for u8 { - type Output = Self; - - fn mul(self, rhs: Self) -> Self::Output { - self * rhs - } -} - -impl Mul for usize { - type Output = Self; - - fn mul(self, rhs: Self) -> Self::Output { - self * rhs - } -} - -impl Mul for isize { - type Output = Self; - - fn mul(self, rhs: Self) -> Self::Output { - self * rhs - } -} - -#[track_caller] -#[lang = "panic_const_add_overflow"] -pub fn panic_const_add_overflow() -> ! { - panic("attempt to add with overflow"); -} - -#[track_caller] -#[lang = "panic_const_sub_overflow"] -pub fn panic_const_sub_overflow() -> ! { - panic("attempt to subtract with overflow"); -} - -#[track_caller] -#[lang = "panic_const_mul_overflow"] -pub fn panic_const_mul_overflow() -> ! { - panic("attempt to multiply with overflow"); -} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; #[no_mangle] extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { diff --git a/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs b/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs index 0ba49e7187fca..e627886a9d575 100644 --- a/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs +++ b/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs @@ -2,32 +2,36 @@ // // Run-time: // status: 0 -// stdout: 1 +// stdout: 10 +// 10 +// 42 +// 1 #![feature(no_core)] - #![no_std] #![no_core] #![no_main] extern crate mini_core; +use mini_core::*; -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - } +fn int_cast(a: u16, b: i16) -> (u8, u16, u32, usize, i8, i16, i32, isize, u8, u32) { + ( + a as u8, a as u16, a as u32, a as usize, a as i8, a as i16, a as i32, a as isize, b as u8, + b as u32, + ) } static mut ONE: usize = 1; -fn make_array() -> [u8; 3] { - [42, 10, 5] -} - #[no_mangle] extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { + let (a, b, c, d, e, f, g, h, i, j) = int_cast(10, 42); unsafe { + libc::printf(b"%d\n\0" as *const u8 as *const i8, c); + libc::printf(b"%ld\n\0" as *const u8 as *const i8, d); + libc::printf(b"%ld\n\0" as *const u8 as *const i8, j); + let ptr = ONE as *mut usize; let value = ptr as usize; libc::printf(b"%ld\n\0" as *const u8 as *const i8, value); diff --git a/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs b/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs index 3cc1e274001e7..c1254c51ce91d 100644 --- a/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs +++ b/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs @@ -6,54 +6,13 @@ // 10 // 42 -#![feature(auto_traits, lang_items, no_core, intrinsics)] -#![allow(internal_features)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -#[lang = "copy"] -pub unsafe trait Copy {} - -impl Copy for bool {} -impl Copy for u8 {} -impl Copy for u16 {} -impl Copy for u32 {} -impl Copy for u64 {} -impl Copy for usize {} -impl Copy for i8 {} -impl Copy for i16 {} -impl Copy for i32 {} -impl Copy for isize {} -impl Copy for f32 {} -impl Copy for char {} - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - } -} - -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "legacy_receiver"] -trait LegacyReceiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; fn int_cast(a: u16, b: i16) -> (u8, u16, u32, usize, i8, i16, i32, isize, u8, u32) { ( diff --git a/compiler/rustc_codegen_gcc/tests/run/slice.rs b/compiler/rustc_codegen_gcc/tests/run/slice.rs index 825fcb8a081e7..449ccabef7fe3 100644 --- a/compiler/rustc_codegen_gcc/tests/run/slice.rs +++ b/compiler/rustc_codegen_gcc/tests/run/slice.rs @@ -5,26 +5,17 @@ // stdout: 5 #![feature(no_core)] - #![no_std] #![no_core] #![no_main] extern crate mini_core; - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - } -} +use mini_core::*; static mut TWO: usize = 2; fn index_slice(s: &[u32]) -> u32 { - unsafe { - s[TWO] - } + unsafe { s[TWO] } } #[no_mangle] diff --git a/compiler/rustc_codegen_gcc/tests/run/static.rs b/compiler/rustc_codegen_gcc/tests/run/static.rs index c3c8121b1e195..1e36cf4f3d316 100644 --- a/compiler/rustc_codegen_gcc/tests/run/static.rs +++ b/compiler/rustc_codegen_gcc/tests/run/static.rs @@ -9,70 +9,13 @@ // 12 // 1 -#![feature(auto_traits, lang_items, no_core, intrinsics, rustc_attrs)] -#![allow(internal_features)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "destruct"] -pub trait Destruct {} - -#[lang = "drop"] -pub trait Drop {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for *mut T {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -mod intrinsics { - use super::Sized; - - #[rustc_nounwind] - #[rustc_intrinsic] - pub fn abort() -> !; -} - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - } -} - -#[lang = "structural_peq"] -pub trait StructuralPartialEq {} - -#[lang = "drop_in_place"] -#[allow(unconditional_recursion)] -pub unsafe fn drop_in_place(to_drop: *mut T) { - // Code here does not matter - this is replaced by the - // real drop glue by the compiler. - drop_in_place(to_drop); -} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; struct Test { field: isize, @@ -84,20 +27,14 @@ struct WithRef { static mut CONSTANT: isize = 10; -static mut TEST: Test = Test { - field: 12, -}; +static mut TEST: Test = Test { field: 12 }; -static mut TEST2: Test = Test { - field: 14, -}; +static mut TEST2: Test = Test { field: 14 }; -static mut WITH_REF: WithRef = WithRef { - refe: unsafe { &TEST }, -}; +static mut WITH_REF: WithRef = WithRef { refe: unsafe { &TEST } }; #[no_mangle] -extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { +extern "C" fn main(argc: isize, _argv: *const *const u8) -> i32 { unsafe { libc::printf(b"%ld\n\0" as *const u8 as *const i8, CONSTANT); libc::printf(b"%ld\n\0" as *const u8 as *const i8, TEST2.field); diff --git a/compiler/rustc_codegen_gcc/tests/run/structs.rs b/compiler/rustc_codegen_gcc/tests/run/structs.rs index 59b8f358863f2..da73cbed9ae97 100644 --- a/compiler/rustc_codegen_gcc/tests/run/structs.rs +++ b/compiler/rustc_codegen_gcc/tests/run/structs.rs @@ -5,44 +5,13 @@ // stdout: 1 // 2 -#![feature(auto_traits, lang_items, no_core, intrinsics)] -#![allow(internal_features)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - } -} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; struct Test { field: isize, diff --git a/compiler/rustc_codegen_gcc/tests/run/tuple.rs b/compiler/rustc_codegen_gcc/tests/run/tuple.rs index ed60a56a68c4c..e0f2e95f6289b 100644 --- a/compiler/rustc_codegen_gcc/tests/run/tuple.rs +++ b/compiler/rustc_codegen_gcc/tests/run/tuple.rs @@ -4,44 +4,13 @@ // status: 0 // stdout: 3 -#![feature(auto_traits, lang_items, no_core, intrinsics)] -#![allow(internal_features)] - +#![feature(no_core)] #![no_std] #![no_core] #![no_main] -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn printf(format: *const i8, ...) -> i32; - } -} - -/* - * Code - */ +extern crate mini_core; +use mini_core::*; #[no_mangle] extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 { diff --git a/compiler/rustc_codegen_llvm/messages.ftl b/compiler/rustc_codegen_llvm/messages.ftl index 17f2e7ca9f702..41391b096cca8 100644 --- a/compiler/rustc_codegen_llvm/messages.ftl +++ b/compiler/rustc_codegen_llvm/messages.ftl @@ -56,6 +56,8 @@ codegen_llvm_prepare_thin_lto_module_with_llvm_err = failed to prepare thin LTO codegen_llvm_run_passes = failed to run LLVM passes codegen_llvm_run_passes_with_llvm_err = failed to run LLVM passes: {$llvm_err} +codegen_llvm_sanitizer_kcfi_arity_requires_llvm_21_0_0 = `-Zsanitizer-kcfi-arity` requires LLVM 21.0.0 or later. + codegen_llvm_sanitizer_memtag_requires_mte = `-Zsanitizer=memtag` requires `-Ctarget-feature=+mte` diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 7105933815138..8294e29d07df6 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -17,14 +17,13 @@ use rustc_target::callconv::{ use rustc_target::spec::SanitizerSet; use smallvec::SmallVec; -use crate::attributes::llfn_attrs_from_instance; +use crate::attributes::{self, llfn_attrs_from_instance}; use crate::builder::Builder; use crate::context::CodegenCx; use crate::llvm::{self, Attribute, AttributePlace}; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; use crate::value::Value; -use crate::{attributes, llvm_util}; trait ArgAttributesExt { fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value); @@ -437,7 +436,6 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { let apply_range_attr = |idx: AttributePlace, scalar: rustc_abi::Scalar| { if cx.sess().opts.optimize != config::OptLevel::No - && llvm_util::get_version() >= (19, 0, 0) && matches!(scalar.primitive(), Primitive::Int(..)) // If the value is a boolean, the range is 0..2 and that ultimately // become 0..0 when the type becomes i1, which would be rejected @@ -571,19 +569,6 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } _ => {} } - if bx.cx.sess().opts.optimize != config::OptLevel::No - && llvm_util::get_version() < (19, 0, 0) - && let BackendRepr::Scalar(scalar) = self.ret.layout.backend_repr - && matches!(scalar.primitive(), Primitive::Int(..)) - // If the value is a boolean, the range is 0..2 and that ultimately - // become 0..0 when the type becomes i1, which would be rejected - // by the LLVM verifier. - && !scalar.is_bool() - // LLVM also rejects full range. - && !scalar.is_always_valid(bx) - { - bx.range_metadata(callsite, scalar.valid_range(bx)); - } for arg in self.args.iter() { match &arg.mode { PassMode::Ignore => {} diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index 88daa02574048..e481b99afcc67 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -376,7 +376,7 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> { fn codegen_global_asm( - &self, + &mut self, template: &[InlineAsmTemplatePiece], operands: &[GlobalAsmOperandRef<'tcx>], options: InlineAsmOptions, diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs index 3d7afa17bdf3d..176fb72dfdc5e 100644 --- a/compiler/rustc_codegen_llvm/src/attributes.rs +++ b/compiler/rustc_codegen_llvm/src/attributes.rs @@ -1,5 +1,4 @@ //! Set and unset common attributes on LLVM values. - use rustc_attr_parsing::{InlineAttr, InstructionSetAttr, OptimizeAttr}; use rustc_codegen_ssa::traits::*; use rustc_hir::def_id::DefId; @@ -28,6 +27,22 @@ pub(crate) fn apply_to_callsite(callsite: &Value, idx: AttributePlace, attrs: &[ } } +pub(crate) fn has_attr(llfn: &Value, idx: AttributePlace, attr: AttributeKind) -> bool { + llvm::HasAttributeAtIndex(llfn, idx, attr) +} + +pub(crate) fn has_string_attr(llfn: &Value, name: &str) -> bool { + llvm::HasStringAttribute(llfn, name) +} + +pub(crate) fn remove_from_llfn(llfn: &Value, place: AttributePlace, kind: AttributeKind) { + llvm::RemoveRustEnumAttributeAtIndex(llfn, place, kind); +} + +pub(crate) fn remove_string_attr_from_llfn(llfn: &Value, name: &str) { + llvm::RemoveStringAttrFromFn(llfn, name); +} + /// Get LLVM attribute for the provided inline heuristic. #[inline] fn inline_attr<'ll>(cx: &CodegenCx<'ll, '_>, inline: InlineAttr) -> Option<&'ll Attribute> { @@ -407,30 +422,28 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>( // Do not set sanitizer attributes for naked functions. to_add.extend(sanitize_attrs(cx, codegen_fn_attrs.no_sanitize)); - if llvm_util::get_version() >= (19, 0, 0) { - // For non-naked functions, set branch protection attributes on aarch64. - if let Some(BranchProtection { bti, pac_ret }) = - cx.sess().opts.unstable_opts.branch_protection - { - assert!(cx.sess().target.arch == "aarch64"); - if bti { - to_add.push(llvm::CreateAttrString(cx.llcx, "branch-target-enforcement")); - } - if let Some(PacRet { leaf, pc, key }) = pac_ret { - if pc { - to_add.push(llvm::CreateAttrString(cx.llcx, "branch-protection-pauth-lr")); - } - to_add.push(llvm::CreateAttrStringValue( - cx.llcx, - "sign-return-address", - if leaf { "all" } else { "non-leaf" }, - )); - to_add.push(llvm::CreateAttrStringValue( - cx.llcx, - "sign-return-address-key", - if key == PAuthKey::A { "a_key" } else { "b_key" }, - )); + // For non-naked functions, set branch protection attributes on aarch64. + if let Some(BranchProtection { bti, pac_ret }) = + cx.sess().opts.unstable_opts.branch_protection + { + assert!(cx.sess().target.arch == "aarch64"); + if bti { + to_add.push(llvm::CreateAttrString(cx.llcx, "branch-target-enforcement")); + } + if let Some(PacRet { leaf, pc, key }) = pac_ret { + if pc { + to_add.push(llvm::CreateAttrString(cx.llcx, "branch-protection-pauth-lr")); } + to_add.push(llvm::CreateAttrStringValue( + cx.llcx, + "sign-return-address", + if leaf { "all" } else { "non-leaf" }, + )); + to_add.push(llvm::CreateAttrStringValue( + cx.llcx, + "sign-return-address-key", + if key == PAuthKey::A { "a_key" } else { "b_key" }, + )); } } } @@ -510,12 +523,6 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>( InstructionSetAttr::ArmA32 => "-thumb-mode".to_string(), InstructionSetAttr::ArmT32 => "+thumb-mode".to_string(), })) - // HACK: LLVM versions 19+ do not have the FPMR feature and treat it as always enabled - // It only exists as a feature in LLVM 18, cannot be passed down for any other version - .chain(match &*cx.tcx.sess.target.arch { - "aarch64" if llvm_util::get_version().0 == 18 => vec!["+fpmr".to_string()], - _ => vec![], - }) .collect::>(); if cx.tcx.sess.target.is_like_wasm { diff --git a/compiler/rustc_codegen_llvm/src/back/lto.rs b/compiler/rustc_codegen_llvm/src/back/lto.rs index f083cfbd7d306..cb329323f5d72 100644 --- a/compiler/rustc_codegen_llvm/src/back/lto.rs +++ b/compiler/rustc_codegen_llvm/src/back/lto.rs @@ -28,8 +28,9 @@ use crate::back::write::{ use crate::errors::{ DynamicLinkingWithLTO, LlvmError, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib, LtoProcMacro, }; +use crate::llvm::AttributePlace::Function; use crate::llvm::{self, build_string}; -use crate::{LlvmCodegenBackend, ModuleLlvm}; +use crate::{LlvmCodegenBackend, ModuleLlvm, SimpleCx, attributes}; /// We keep track of the computed LTO cache keys from the previous /// session to determine which CGUs we can reuse. @@ -41,7 +42,8 @@ fn crate_type_allows_lto(crate_type: CrateType) -> bool { | CrateType::Dylib | CrateType::Staticlib | CrateType::Cdylib - | CrateType::ProcMacro => true, + | CrateType::ProcMacro + | CrateType::Sdylib => true, CrateType::Rlib => false, } } @@ -584,12 +586,10 @@ fn thin_lto( } } -fn enable_autodiff_settings(ad: &[config::AutoDiff], module: &mut ModuleCodegen) { +fn enable_autodiff_settings(ad: &[config::AutoDiff]) { for &val in ad { + // We intentionally don't use a wildcard, to not forget handling anything new. match val { - config::AutoDiff::PrintModBefore => { - unsafe { llvm::LLVMDumpModule(module.module_llvm.llmod()) }; - } config::AutoDiff::PrintPerf => { llvm::set_print_perf(true); } @@ -603,15 +603,23 @@ fn enable_autodiff_settings(ad: &[config::AutoDiff], module: &mut ModuleCodegen< llvm::set_inline(true); } config::AutoDiff::LooseTypes => { - llvm::set_loose_types(false); + llvm::set_loose_types(true); } config::AutoDiff::PrintSteps => { llvm::set_print(true); } - // We handle this below + // We handle this in the PassWrapper.cpp + config::AutoDiff::PrintPasses => {} + // We handle this in the PassWrapper.cpp + config::AutoDiff::PrintModBefore => {} + // We handle this in the PassWrapper.cpp config::AutoDiff::PrintModAfter => {} + // We handle this in the PassWrapper.cpp + config::AutoDiff::PrintModFinal => {} // This is required and already checked config::AutoDiff::Enable => {} + // We handle this below + config::AutoDiff::NoPostopt => {} } } // This helps with handling enums for now. @@ -645,26 +653,57 @@ pub(crate) fn run_pass_manager( // We then run the llvm_optimize function a second time, to optimize the code which we generated // in the enzyme differentiation pass. let enable_ad = config.autodiff.contains(&config::AutoDiff::Enable); - let stage = - if enable_ad { write::AutodiffStage::DuringAD } else { write::AutodiffStage::PostAD }; + let stage = if thin { + write::AutodiffStage::PreAD + } else { + if enable_ad { write::AutodiffStage::DuringAD } else { write::AutodiffStage::PostAD } + }; if enable_ad { - enable_autodiff_settings(&config.autodiff, module); + enable_autodiff_settings(&config.autodiff); } unsafe { write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage)?; } - if cfg!(llvm_enzyme) && enable_ad { + if cfg!(llvm_enzyme) && enable_ad && !thin { + let cx = + SimpleCx::new(module.module_llvm.llmod(), &module.module_llvm.llcx, cgcx.pointer_size); + + for function in cx.get_functions() { + let enzyme_marker = "enzyme_marker"; + if attributes::has_string_attr(function, enzyme_marker) { + // Sanity check: Ensure 'noinline' is present before replacing it. + assert!( + !attributes::has_attr(function, Function, llvm::AttributeKind::NoInline), + "Expected __enzyme function to have 'noinline' before adding 'alwaysinline'" + ); + + attributes::remove_from_llfn(function, Function, llvm::AttributeKind::NoInline); + attributes::remove_string_attr_from_llfn(function, enzyme_marker); + + assert!( + !attributes::has_string_attr(function, enzyme_marker), + "Expected function to not have 'enzyme_marker'" + ); + + let always_inline = llvm::AttributeKind::AlwaysInline.create_attr(cx.llcx); + attributes::apply_to_llfn(function, Function, &[always_inline]); + } + } + let opt_stage = llvm::OptStage::FatLTO; let stage = write::AutodiffStage::PostAD; - unsafe { - write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage)?; + if !config.autodiff.contains(&config::AutoDiff::NoPostopt) { + unsafe { + write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage)?; + } } - // This is the final IR, so people should be able to inspect the optimized autodiff output. - if config.autodiff.contains(&config::AutoDiff::PrintModAfter) { + // This is the final IR, so people should be able to inspect the optimized autodiff output, + // for manual inspection. + if config.autodiff.contains(&config::AutoDiff::PrintModFinal) { unsafe { llvm::LLVMDumpModule(module.module_llvm.llmod()) }; } } diff --git a/compiler/rustc_codegen_llvm/src/back/write.rs b/compiler/rustc_codegen_llvm/src/back/write.rs index bead4c82a8120..4ac77c8f7f165 100644 --- a/compiler/rustc_codegen_llvm/src/back/write.rs +++ b/compiler/rustc_codegen_llvm/src/back/write.rs @@ -119,14 +119,18 @@ pub(crate) fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTar tcx.output_filenames(()).split_dwarf_path( tcx.sess.split_debuginfo(), tcx.sess.opts.unstable_opts.split_dwarf_kind, - Some(mod_name), + mod_name, + tcx.sess.invocation_temp.as_deref(), ) } else { None }; - let output_obj_file = - Some(tcx.output_filenames(()).temp_path(OutputType::Object, Some(mod_name))); + let output_obj_file = Some(tcx.output_filenames(()).temp_path_for_cgu( + OutputType::Object, + mod_name, + tcx.sess.invocation_temp.as_deref(), + )); let config = TargetMachineFactoryConfig { split_dwarf_file, output_obj_file }; target_machine_factory( @@ -330,8 +334,11 @@ pub(crate) fn save_temp_bitcode( return; } let ext = format!("{name}.bc"); - let cgu = Some(&module.name[..]); - let path = cgcx.output_filenames.temp_path_ext(&ext, cgu); + let path = cgcx.output_filenames.temp_path_ext_for_cgu( + &ext, + &module.name, + cgcx.invocation_temp.as_deref(), + ); write_bitcode_to_file(module, &path) } @@ -439,12 +446,9 @@ fn report_inline_asm( let span = if cookie == 0 || matches!(cgcx.lto, Lto::Fat | Lto::Thin) { SpanData::default() } else { - let lo = BytePos::from_u32(cookie as u32); - let hi = BytePos::from_u32((cookie >> 32) as u32); SpanData { - lo, - // LLVM version < 19 silently truncates the cookie to 32 bits in some situations. - hi: if hi.to_u32() != 0 { hi } else { lo }, + lo: BytePos::from_u32(cookie as u32), + hi: BytePos::from_u32((cookie >> 32) as u32), ctxt: SyntaxContext::root(), parent: None, } @@ -568,6 +572,10 @@ pub(crate) unsafe fn llvm_optimize( let consider_ad = cfg!(llvm_enzyme) && config.autodiff.contains(&config::AutoDiff::Enable); let run_enzyme = autodiff_stage == AutodiffStage::DuringAD; + let print_before_enzyme = config.autodiff.contains(&config::AutoDiff::PrintModBefore); + let print_after_enzyme = config.autodiff.contains(&config::AutoDiff::PrintModAfter); + let print_passes = config.autodiff.contains(&config::AutoDiff::PrintPasses); + let merge_functions; let unroll_loops; let vectorize_slp; let vectorize_loop; @@ -575,13 +583,20 @@ pub(crate) unsafe fn llvm_optimize( // When we build rustc with enzyme/autodiff support, we want to postpone size-increasing // optimizations until after differentiation. Our pipeline is thus: (opt + enzyme), (full opt). // We therefore have two calls to llvm_optimize, if autodiff is used. + // + // We also must disable merge_functions, since autodiff placeholder/dummy bodies tend to be + // identical. We run opts before AD, so there is a chance that LLVM will merge our dummies. + // In that case, we lack some dummy bodies and can't replace them with the real AD code anymore. + // We then would need to abort compilation. This was especially common in test cases. if consider_ad && autodiff_stage != AutodiffStage::PostAD { + merge_functions = false; unroll_loops = false; vectorize_slp = false; vectorize_loop = false; } else { unroll_loops = opt_level != config::OptLevel::Size && opt_level != config::OptLevel::SizeMin; + merge_functions = config.merge_functions; vectorize_slp = config.vectorize_slp; vectorize_loop = config.vectorize_loop; } @@ -659,13 +674,16 @@ pub(crate) unsafe fn llvm_optimize( thin_lto_buffer, config.emit_thin_lto, config.emit_thin_lto_summary, - config.merge_functions, + merge_functions, unroll_loops, vectorize_slp, vectorize_loop, config.no_builtins, config.emit_lifetime_markers, run_enzyme, + print_before_enzyme, + print_after_enzyme, + print_passes, sanitizer_options.as_ref(), pgo_gen_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()), pgo_use_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()), @@ -697,11 +715,12 @@ pub(crate) unsafe fn optimize( let llcx = &*module.module_llvm.llcx; let _handlers = DiagnosticHandlers::new(cgcx, dcx, llcx, module, CodegenDiagnosticsStage::Opt); - let module_name = module.name.clone(); - let module_name = Some(&module_name[..]); - if config.emit_no_opt_bc { - let out = cgcx.output_filenames.temp_path_ext("no-opt.bc", module_name); + let out = cgcx.output_filenames.temp_path_ext_for_cgu( + "no-opt.bc", + &module.name, + cgcx.invocation_temp.as_deref(), + ); write_bitcode_to_file(module, &out) } @@ -746,8 +765,11 @@ pub(crate) unsafe fn optimize( if let Some(thin_lto_buffer) = thin_lto_buffer { let thin_lto_buffer = unsafe { ThinBuffer::from_raw_ptr(thin_lto_buffer) }; module.thin_lto_buffer = Some(thin_lto_buffer.data().to_vec()); - let bc_summary_out = - cgcx.output_filenames.temp_path(OutputType::ThinLinkBitcode, module_name); + let bc_summary_out = cgcx.output_filenames.temp_path_for_cgu( + OutputType::ThinLinkBitcode, + &module.name, + cgcx.invocation_temp.as_deref(), + ); if config.emit_thin_lto_summary && let Some(thin_link_bitcode_filename) = bc_summary_out.file_name() { @@ -804,8 +826,6 @@ pub(crate) unsafe fn codegen( let llmod = module.module_llvm.llmod(); let llcx = &*module.module_llvm.llcx; let tm = &*module.module_llvm.tm; - let module_name = module.name.clone(); - let module_name = Some(&module_name[..]); let _handlers = DiagnosticHandlers::new(cgcx, dcx, llcx, &module, CodegenDiagnosticsStage::Codegen); @@ -817,8 +837,16 @@ pub(crate) unsafe fn codegen( // copy it to the .o file, and delete the bitcode if it wasn't // otherwise requested. - let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name); - let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name); + let bc_out = cgcx.output_filenames.temp_path_for_cgu( + OutputType::Bitcode, + &module.name, + cgcx.invocation_temp.as_deref(), + ); + let obj_out = cgcx.output_filenames.temp_path_for_cgu( + OutputType::Object, + &module.name, + cgcx.invocation_temp.as_deref(), + ); if config.bitcode_needed() { if config.emit_bc || config.emit_obj == EmitObj::Bitcode { @@ -860,7 +888,11 @@ pub(crate) unsafe fn codegen( if config.emit_ir { let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_ir", &*module.name); - let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name); + let out = cgcx.output_filenames.temp_path_for_cgu( + OutputType::LlvmAssembly, + &module.name, + cgcx.invocation_temp.as_deref(), + ); let out_c = path_to_c_string(&out); extern "C" fn demangle_callback( @@ -902,7 +934,11 @@ pub(crate) unsafe fn codegen( if config.emit_asm { let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name); - let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); + let path = cgcx.output_filenames.temp_path_for_cgu( + OutputType::Assembly, + &module.name, + cgcx.invocation_temp.as_deref(), + ); // We can't use the same module for asm and object code output, // because that triggers various errors like invalid IR or broken @@ -932,7 +968,9 @@ pub(crate) unsafe fn codegen( .prof .generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name); - let dwo_out = cgcx.output_filenames.temp_path_dwo(module_name); + let dwo_out = cgcx + .output_filenames + .temp_path_dwo_for_cgu(&module.name, cgcx.invocation_temp.as_deref()); let dwo_out = match (cgcx.split_debuginfo, cgcx.split_dwarf_kind) { // Don't change how DWARF is emitted when disabled. (SplitDebuginfo::Off, _) => None, @@ -997,6 +1035,7 @@ pub(crate) unsafe fn codegen( config.emit_asm, config.emit_ir, &cgcx.output_filenames, + cgcx.invocation_temp.as_deref(), )) } @@ -1024,7 +1063,7 @@ fn create_section_with_flags_asm(section_name: &str, section_flags: &str, data: } pub(crate) fn bitcode_section_name(cgcx: &CodegenContext) -> &'static CStr { - if cgcx.target_is_like_osx { + if cgcx.target_is_like_darwin { c"__LLVM,__bitcode" } else if cgcx.target_is_like_aix { c".ipa" @@ -1077,7 +1116,7 @@ unsafe fn embed_bitcode( // and COFF we emit the sections using module level inline assembly for that // reason (see issue #90326 for historical background). unsafe { - if cgcx.target_is_like_osx + if cgcx.target_is_like_darwin || cgcx.target_is_like_aix || cgcx.target_arch == "wasm32" || cgcx.target_arch == "wasm64" @@ -1096,7 +1135,7 @@ unsafe fn embed_bitcode( let llglobal = llvm::add_global(llmod, common::val_ty(llconst), c"rustc.embedded.cmdline"); llvm::set_initializer(llglobal, llconst); - let section = if cgcx.target_is_like_osx { + let section = if cgcx.target_is_like_darwin { c"__LLVM,__cmdline" } else if cgcx.target_is_like_aix { c".info" diff --git a/compiler/rustc_codegen_llvm/src/base.rs b/compiler/rustc_codegen_llvm/src/base.rs index 6bd27914dbd16..e4fac35aa4499 100644 --- a/compiler/rustc_codegen_llvm/src/base.rs +++ b/compiler/rustc_codegen_llvm/src/base.rs @@ -83,15 +83,15 @@ pub(crate) fn compile_codegen_unit( // Instantiate monomorphizations without filling out definitions yet... let llvm_module = ModuleLlvm::new(tcx, cgu_name.as_str()); { - let cx = CodegenCx::new(tcx, cgu, &llvm_module); + let mut cx = CodegenCx::new(tcx, cgu, &llvm_module); let mono_items = cx.codegen_unit.items_in_deterministic_order(cx.tcx); for &(mono_item, data) in &mono_items { mono_item.predefine::>(&cx, data.linkage, data.visibility); } // ... and now that we have everything pre-defined, fill out those definitions. - for &(mono_item, _) in &mono_items { - mono_item.define::>(&cx); + for &(mono_item, item_data) in &mono_items { + mono_item.define::>(&mut cx, item_data); } // If this codegen unit contains the main function, also create the diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 297f104d124ad..04c8118b6160c 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -30,6 +30,7 @@ use smallvec::SmallVec; use tracing::{debug, instrument}; use crate::abi::FnAbiLlvmExt; +use crate::attributes; use crate::common::Funclet; use crate::context::{CodegenCx, FullCx, GenericCx, SCx}; use crate::llvm::{ @@ -38,7 +39,6 @@ use crate::llvm::{ use crate::type_::Type; use crate::type_of::LayoutLlvmExt; use crate::value::Value; -use crate::{attributes, llvm_util}; #[must_use] pub(crate) struct GenericBuilder<'a, 'll, CX: Borrow>> { @@ -123,7 +123,7 @@ impl<'a, 'll, CX: Borrow>> GenericBuilder<'a, 'll, CX> { /// Empty string, to be used where LLVM expects an instruction name, indicating /// that the instruction is to be left unnamed (i.e. numbered, in textual IR). // FIXME(eddyb) pass `&CStr` directly to FFI once it's a thin pointer. -const UNNAMED: *const c_char = c"".as_ptr(); +pub(crate) const UNNAMED: *const c_char = c"".as_ptr(); impl<'ll, CX: Borrow>> BackendTypes for GenericBuilder<'_, 'll, CX> { type Value = as BackendTypes>::Value; @@ -594,6 +594,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { fn load(&mut self, ty: &'ll Type, ptr: &'ll Value, align: Align) -> &'ll Value { unsafe { let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED); + let align = align.min(self.cx().tcx.sess.target.max_reliable_alignment()); llvm::LLVMSetAlignment(load, align.bytes() as c_uint); load } @@ -807,6 +808,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer); unsafe { let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr); + let align = align.min(self.cx().tcx.sess.target.max_reliable_alignment()); let align = if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint }; llvm::LLVMSetAlignment(store, align); @@ -927,11 +929,9 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { debug_assert_ne!(self.val_ty(val), dest_ty); let trunc = self.trunc(val, dest_ty); - if llvm_util::get_version() >= (19, 0, 0) { - unsafe { - if llvm::LLVMIsAInstruction(trunc).is_some() { - llvm::LLVMSetNUW(trunc, True); - } + unsafe { + if llvm::LLVMIsAInstruction(trunc).is_some() { + llvm::LLVMSetNUW(trunc, True); } } trunc @@ -941,11 +941,9 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { debug_assert_ne!(self.val_ty(val), dest_ty); let trunc = self.trunc(val, dest_ty); - if llvm_util::get_version() >= (19, 0, 0) { - unsafe { - if llvm::LLVMIsAInstruction(trunc).is_some() { - llvm::LLVMSetNSW(trunc, True); - } + unsafe { + if llvm::LLVMIsAInstruction(trunc).is_some() { + llvm::LLVMSetNSW(trunc, True); } } trunc @@ -1899,10 +1897,6 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { hash: &'ll Value, bitmap_bits: &'ll Value, ) { - assert!( - crate::llvm_util::get_version() >= (19, 0, 0), - "MCDC intrinsics require LLVM 19 or later" - ); self.call_intrinsic("llvm.instrprof.mcdc.parameters", &[fn_name, hash, bitmap_bits]); } @@ -1914,10 +1908,6 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { bitmap_index: &'ll Value, mcdc_temp: &'ll Value, ) { - assert!( - crate::llvm_util::get_version() >= (19, 0, 0), - "MCDC intrinsics require LLVM 19 or later" - ); let args = &[fn_name, hash, bitmap_index, mcdc_temp]; self.call_intrinsic("llvm.instrprof.mcdc.tvbitmap.update", args); } @@ -1929,10 +1919,6 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { #[instrument(level = "debug", skip(self))] pub(crate) fn mcdc_condbitmap_update(&mut self, cond_index: &'ll Value, mcdc_temp: &'ll Value) { - assert!( - crate::llvm_util::get_version() >= (19, 0, 0), - "MCDC intrinsics require LLVM 19 or later" - ); let align = self.tcx.data_layout.i32_align.abi; let current_tv_index = self.load(self.cx.type_i32(), mcdc_temp, align); let new_tv_index = self.add(current_tv_index, cond_index); diff --git a/compiler/rustc_codegen_llvm/src/builder/autodiff.rs b/compiler/rustc_codegen_llvm/src/builder/autodiff.rs index 7cd4ee539d875..c5c13ac097a27 100644 --- a/compiler/rustc_codegen_llvm/src/builder/autodiff.rs +++ b/compiler/rustc_codegen_llvm/src/builder/autodiff.rs @@ -3,12 +3,14 @@ use std::ptr; use rustc_ast::expand::autodiff_attrs::{AutoDiffAttrs, AutoDiffItem, DiffActivity, DiffMode}; use rustc_codegen_ssa::ModuleCodegen; use rustc_codegen_ssa::back::write::ModuleConfig; -use rustc_codegen_ssa::traits::BaseTypeCodegenMethods as _; +use rustc_codegen_ssa::common::TypeKind; +use rustc_codegen_ssa::traits::BaseTypeCodegenMethods; use rustc_errors::FatalError; +use rustc_middle::bug; use tracing::{debug, trace}; use crate::back::write::llvm_err; -use crate::builder::SBuilder; +use crate::builder::{SBuilder, UNNAMED}; use crate::context::SimpleCx; use crate::declare::declare_simple_fn; use crate::errors::{AutoDiffWithoutEnable, LlvmError}; @@ -18,21 +20,43 @@ use crate::value::Value; use crate::{CodegenContext, LlvmCodegenBackend, ModuleLlvm, attributes, llvm}; fn get_params(fnc: &Value) -> Vec<&Value> { + let param_num = llvm::LLVMCountParams(fnc) as usize; + let mut fnc_args: Vec<&Value> = vec![]; + fnc_args.reserve(param_num); unsafe { - let param_num = llvm::LLVMCountParams(fnc) as usize; - let mut fnc_args: Vec<&Value> = vec![]; - fnc_args.reserve(param_num); llvm::LLVMGetParams(fnc, fnc_args.as_mut_ptr()); fnc_args.set_len(param_num); - fnc_args } + fnc_args } +fn has_sret(fnc: &Value) -> bool { + let num_args = llvm::LLVMCountParams(fnc) as usize; + if num_args == 0 { + false + } else { + unsafe { llvm::LLVMRustHasAttributeAtIndex(fnc, 0, llvm::AttributeKind::StructRet) } + } +} + +// When we call the `__enzyme_autodiff` or `__enzyme_fwddiff` function, we need to pass all the +// original inputs, as well as metadata and the additional shadow arguments. +// This function matches the arguments from the outer function to the inner enzyme call. +// +// This function also considers that Rust level arguments not always match the llvm-ir level +// arguments. A slice, `&[f32]`, for example, is represented as a pointer and a length on +// llvm-ir level. The number of activities matches the number of Rust level arguments, so we +// need to match those. +// FIXME(ZuseZ4): This logic is a bit more complicated than it should be, can we simplify it +// using iterators and peek()? fn match_args_from_caller_to_enzyme<'ll>( cx: &SimpleCx<'ll>, + builder: &SBuilder<'ll, 'll>, + width: u32, args: &mut Vec<&'ll llvm::Value>, inputs: &[DiffActivity], outer_args: &[&'ll llvm::Value], + has_sret: bool, ) { debug!("matching autodiff arguments"); // We now handle the issue that Rust level arguments not always match the llvm-ir level @@ -44,10 +68,20 @@ fn match_args_from_caller_to_enzyme<'ll>( let mut outer_pos: usize = 0; let mut activity_pos = 0; + if has_sret { + // Then the first outer arg is the sret pointer. Enzyme doesn't know about sret, so the + // inner function will still return something. We increase our outer_pos by one, + // and once we're done with all other args we will take the return of the inner call and + // update the sret pointer with it + outer_pos = 1; + } + let enzyme_const = cx.create_metadata("enzyme_const".to_string()).unwrap(); let enzyme_out = cx.create_metadata("enzyme_out".to_string()).unwrap(); let enzyme_dup = cx.create_metadata("enzyme_dup".to_string()).unwrap(); + let enzyme_dupv = cx.create_metadata("enzyme_dupv".to_string()).unwrap(); let enzyme_dupnoneed = cx.create_metadata("enzyme_dupnoneed".to_string()).unwrap(); + let enzyme_dupnoneedv = cx.create_metadata("enzyme_dupnoneedv".to_string()).unwrap(); while activity_pos < inputs.len() { let diff_activity = inputs[activity_pos as usize]; @@ -59,13 +93,34 @@ fn match_args_from_caller_to_enzyme<'ll>( DiffActivity::Active => (enzyme_out, false), DiffActivity::ActiveOnly => (enzyme_out, false), DiffActivity::Dual => (enzyme_dup, true), + DiffActivity::Dualv => (enzyme_dupv, true), DiffActivity::DualOnly => (enzyme_dupnoneed, true), + DiffActivity::DualvOnly => (enzyme_dupnoneedv, true), DiffActivity::Duplicated => (enzyme_dup, true), DiffActivity::DuplicatedOnly => (enzyme_dupnoneed, true), - DiffActivity::FakeActivitySize => (enzyme_const, false), + DiffActivity::FakeActivitySize(_) => (enzyme_const, false), }; let outer_arg = outer_args[outer_pos]; args.push(cx.get_metadata_value(activity)); + if matches!(diff_activity, DiffActivity::Dualv) { + let next_outer_arg = outer_args[outer_pos + 1]; + let elem_bytes_size: u64 = match inputs[activity_pos + 1] { + DiffActivity::FakeActivitySize(Some(s)) => s.into(), + _ => bug!("incorrect Dualv handling recognized."), + }; + // stride: sizeof(T) * n_elems. + // n_elems is the next integer. + // Now we multiply `4 * next_outer_arg` to get the stride. + let mul = unsafe { + llvm::LLVMBuildMul( + builder.llbuilder, + cx.get_const_i64(elem_bytes_size), + next_outer_arg, + UNNAMED, + ) + }; + args.push(mul); + } args.push(outer_arg); if duplicated { // We know that duplicated args by construction have a following argument, @@ -83,7 +138,7 @@ fn match_args_from_caller_to_enzyme<'ll>( } else { let next_activity = inputs[activity_pos + 1]; // We analyze the MIR types and add this dummy activity if we visit a slice. - next_activity == DiffActivity::FakeActivitySize + matches!(next_activity, DiffActivity::FakeActivitySize(_)) } }; if slice { @@ -92,23 +147,23 @@ fn match_args_from_caller_to_enzyme<'ll>( // (..., metadata! enzyme_dup, ptr, ptr, int1, ...). // FIXME(ZuseZ4): We will upstream a safety check later which asserts that // int2 >= int1, which means the shadow vector is large enough to store the gradient. - assert!(unsafe { - llvm::LLVMRustGetTypeKind(next_outer_ty) == llvm::TypeKind::Integer - }); - let next_outer_arg2 = outer_args[outer_pos + 2]; - let next_outer_ty2 = cx.val_ty(next_outer_arg2); - assert!(unsafe { - llvm::LLVMRustGetTypeKind(next_outer_ty2) == llvm::TypeKind::Pointer - }); - let next_outer_arg3 = outer_args[outer_pos + 3]; - let next_outer_ty3 = cx.val_ty(next_outer_arg3); - assert!(unsafe { - llvm::LLVMRustGetTypeKind(next_outer_ty3) == llvm::TypeKind::Integer - }); - args.push(next_outer_arg2); + assert_eq!(cx.type_kind(next_outer_ty), TypeKind::Integer); + + let iterations = + if matches!(diff_activity, DiffActivity::Dualv) { 1 } else { width as usize }; + + for i in 0..iterations { + let next_outer_arg2 = outer_args[outer_pos + 2 * (i + 1)]; + let next_outer_ty2 = cx.val_ty(next_outer_arg2); + assert_eq!(cx.type_kind(next_outer_ty2), TypeKind::Pointer); + let next_outer_arg3 = outer_args[outer_pos + 2 * (i + 1) + 1]; + let next_outer_ty3 = cx.val_ty(next_outer_arg3); + assert_eq!(cx.type_kind(next_outer_ty3), TypeKind::Integer); + args.push(next_outer_arg2); + } args.push(cx.get_metadata_value(enzyme_const)); args.push(next_outer_arg); - outer_pos += 4; + outer_pos += 2 + 2 * iterations; activity_pos += 2; } else { // A duplicated pointer will have the following two outer_fn arguments: @@ -116,15 +171,19 @@ fn match_args_from_caller_to_enzyme<'ll>( // (..., metadata! enzyme_dup, ptr, ptr, ...). if matches!(diff_activity, DiffActivity::Duplicated | DiffActivity::DuplicatedOnly) { - assert!( - unsafe { llvm::LLVMRustGetTypeKind(next_outer_ty) } - == llvm::TypeKind::Pointer - ); + assert_eq!(cx.type_kind(next_outer_ty), TypeKind::Pointer); } // In the case of Dual we don't have assumptions, e.g. f32 would be valid. args.push(next_outer_arg); outer_pos += 2; activity_pos += 1; + + // Now, if width > 1, we need to account for that + for _ in 1..width { + let next_outer_arg = outer_args[outer_pos]; + args.push(next_outer_arg); + outer_pos += 1; + } } } else { // We do not differentiate with resprect to this argument. @@ -135,6 +194,92 @@ fn match_args_from_caller_to_enzyme<'ll>( } } +// On LLVM-IR, we can luckily declare __enzyme_ functions without specifying the input +// arguments. We do however need to declare them with their correct return type. +// We already figured the correct return type out in our frontend, when generating the outer_fn, +// so we can now just go ahead and use that. This is not always trivial, e.g. because sret. +// Beyond sret, this article describes our challenges nicely: +// +// I.e. (i32, f32) will get merged into i64, but we don't handle that yet. +fn compute_enzyme_fn_ty<'ll>( + cx: &SimpleCx<'ll>, + attrs: &AutoDiffAttrs, + fn_to_diff: &'ll Value, + outer_fn: &'ll Value, +) -> &'ll llvm::Type { + let fn_ty = cx.get_type_of_global(outer_fn); + let mut ret_ty = cx.get_return_type(fn_ty); + + let has_sret = has_sret(outer_fn); + + if has_sret { + // Now we don't just forward the return type, so we have to figure it out based on the + // primal return type, in combination with the autodiff settings. + let fn_ty = cx.get_type_of_global(fn_to_diff); + let inner_ret_ty = cx.get_return_type(fn_ty); + + let void_ty = unsafe { llvm::LLVMVoidTypeInContext(cx.llcx) }; + if inner_ret_ty == void_ty { + // This indicates that even the inner function has an sret. + // Right now I only look for an sret in the outer function. + // This *probably* needs some extra handling, but I never ran + // into such a case. So I'll wait for user reports to have a test case. + bug!("sret in inner function"); + } + + if attrs.width == 1 { + // Enzyme returns a struct of style: + // `{ original_ret(if requested), float, float, ... }` + let mut struct_elements = vec![]; + if attrs.has_primal_ret() { + struct_elements.push(inner_ret_ty); + } + // Next, we push the list of active floats, since they will be lowered to `enzyme_out`, + // and therefore part of the return struct. + let param_tys = cx.func_params_types(fn_ty); + for (act, param_ty) in attrs.input_activity.iter().zip(param_tys) { + if matches!(act, DiffActivity::Active) { + // Now find the float type at position i based on the fn_ty, + // to know what (f16/f32/f64/...) to add to the struct. + struct_elements.push(param_ty); + } + } + ret_ty = cx.type_struct(&struct_elements, false); + } else { + // First we check if we also have to deal with the primal return. + match attrs.mode { + DiffMode::Forward => match attrs.ret_activity { + DiffActivity::Dual => { + let arr_ty = + unsafe { llvm::LLVMArrayType2(inner_ret_ty, attrs.width as u64 + 1) }; + ret_ty = arr_ty; + } + DiffActivity::DualOnly => { + let arr_ty = + unsafe { llvm::LLVMArrayType2(inner_ret_ty, attrs.width as u64) }; + ret_ty = arr_ty; + } + DiffActivity::Const => { + todo!("Not sure, do we need to do something here?"); + } + _ => { + bug!("unreachable"); + } + }, + DiffMode::Reverse => { + todo!("Handle sret for reverse mode"); + } + _ => { + bug!("unreachable"); + } + } + } + } + + // LLVM can figure out the input types on it's own, so we take a shortcut here. + unsafe { llvm::LLVMFunctionType(ret_ty, ptr::null(), 0, True) } +} + /// When differentiating `fn_to_diff`, take a `outer_fn` and generate another /// function with expected naming and calling conventions[^1] which will be /// discovered by the enzyme LLVM pass and its body populated with the differentiated @@ -197,17 +342,9 @@ fn generate_enzyme_call<'ll>( // } // ``` unsafe { - // On LLVM-IR, we can luckily declare __enzyme_ functions without specifying the input - // arguments. We do however need to declare them with their correct return type. - // We already figured the correct return type out in our frontend, when generating the outer_fn, - // so we can now just go ahead and use that. FIXME(ZuseZ4): This doesn't handle sret yet. - let fn_ty = llvm::LLVMGlobalGetValueType(outer_fn); - let ret_ty = llvm::LLVMGetReturnType(fn_ty); - - // LLVM can figure out the input types on it's own, so we take a shortcut here. - let enzyme_ty = llvm::LLVMFunctionType(ret_ty, ptr::null(), 0, True); + let enzyme_ty = compute_enzyme_fn_ty(cx, &attrs, fn_to_diff, outer_fn); - //FIXME(ZuseZ4): the CC/Addr/Vis values are best effort guesses, we should look at tests and + // FIXME(ZuseZ4): the CC/Addr/Vis values are best effort guesses, we should look at tests and // think a bit more about what should go here. let cc = llvm::LLVMGetFunctionCallConv(outer_fn); let ad_fn = declare_simple_fn( @@ -224,6 +361,11 @@ fn generate_enzyme_call<'ll>( let attr = llvm::AttributeKind::NoInline.create_attr(cx.llcx); attributes::apply_to_llfn(ad_fn, Function, &[attr]); + // We add a made-up attribute just such that we can recognize it after AD to update + // (no)-inline attributes. We'll then also remove this attribute. + let enzyme_marker_attr = llvm::CreateAttrString(cx.llcx, "enzyme_marker"); + attributes::apply_to_llfn(outer_fn, Function, &[enzyme_marker_attr]); + // first, remove all calls from fnc let entry = llvm::LLVMGetFirstBasicBlock(outer_fn); let br = llvm::LLVMRustGetTerminator(entry); @@ -240,14 +382,28 @@ fn generate_enzyme_call<'ll>( if matches!(attrs.ret_activity, DiffActivity::Dual | DiffActivity::Active) { args.push(cx.get_metadata_value(enzyme_primal_ret)); } + if attrs.width > 1 { + let enzyme_width = cx.create_metadata("enzyme_width".to_string()).unwrap(); + args.push(cx.get_metadata_value(enzyme_width)); + args.push(cx.get_const_i64(attrs.width as u64)); + } + let has_sret = has_sret(outer_fn); let outer_args: Vec<&llvm::Value> = get_params(outer_fn); - match_args_from_caller_to_enzyme(&cx, &mut args, &attrs.input_activity, &outer_args); + match_args_from_caller_to_enzyme( + &cx, + &builder, + attrs.width, + &mut args, + &attrs.input_activity, + &outer_args, + has_sret, + ); let call = builder.call(enzyme_ty, ad_fn, &args, None); // This part is a bit iffy. LLVM requires that a call to an inlineable function has some - // metadata attachted to it, but we just created this code oota. Given that the + // metadata attached to it, but we just created this code oota. Given that the // differentiated function already has partly confusing metadata, and given that this // affects nothing but the auttodiff IR, we take a shortcut and just steal metadata from the // dummy code which we inserted at a higher level. @@ -268,7 +424,26 @@ fn generate_enzyme_call<'ll>( // Now that we copied the metadata, get rid of dummy code. llvm::LLVMRustEraseInstUntilInclusive(entry, last_inst); - if cx.val_ty(call) == cx.type_void() { + if cx.val_ty(call) == cx.type_void() || has_sret { + if has_sret { + // This is what we already have in our outer_fn (shortened): + // define void @_foo(ptr <..> sret([32 x i8]) initializes((0, 32)) %0, <...>) { + // %7 = call [4 x double] (...) @__enzyme_fwddiff_foo(ptr @square, metadata !"enzyme_width", i64 4, <...>) + // + // store [4 x double] %7, ptr %0, align 8 + // ret void + // } + + // now store the result of the enzyme call into the sret pointer. + let sret_ptr = outer_args[0]; + let call_ty = cx.val_ty(call); + if attrs.width == 1 { + assert_eq!(cx.type_kind(call_ty), TypeKind::Struct); + } else { + assert_eq!(cx.type_kind(call_ty), TypeKind::Array); + } + llvm::LLVMBuildStore(&builder.llbuilder, call, sret_ptr); + } builder.ret_void(); } else { builder.ret(call); @@ -300,11 +475,10 @@ pub(crate) fn differentiate<'ll>( if !diff_items.is_empty() && !cgcx.opts.unstable_opts.autodiff.contains(&rustc_session::config::AutoDiff::Enable) { - let dcx = cgcx.create_dcx(); - return Err(dcx.handle().emit_almost_fatal(AutoDiffWithoutEnable)); + return Err(diag_handler.handle().emit_almost_fatal(AutoDiffWithoutEnable)); } - // Before dumping the module, we want all the TypeTrees to become part of the module. + // Here we replace the placeholder code with the actual autodiff code, which calls Enzyme. for item in diff_items.iter() { let name = item.source.clone(); let fn_def: Option<&llvm::Value> = cx.get_function(&name); diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index 457e5452ce946..a6f277e4455b9 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -4,8 +4,8 @@ use std::borrow::Borrow; use libc::{c_char, c_uint}; use rustc_abi as abi; +use rustc_abi::HasDataLayout; use rustc_abi::Primitive::Pointer; -use rustc_abi::{AddressSpace, HasDataLayout}; use rustc_ast::Mutability; use rustc_codegen_ssa::common::TypeKind; use rustc_codegen_ssa::traits::*; @@ -269,7 +269,8 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { } Scalar::Ptr(ptr, _size) => { let (prov, offset) = ptr.into_parts(); - let (base_addr, base_addr_space) = match self.tcx.global_alloc(prov.alloc_id()) { + let global_alloc = self.tcx.global_alloc(prov.alloc_id()); + let base_addr = match global_alloc { GlobalAlloc::Memory(alloc) => { // For ZSTs directly codegen an aligned pointer. // This avoids generating a zero-sized constant value and actually needing a @@ -301,12 +302,10 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { format!("alloc_{hash:032x}").as_bytes(), ); } - (value, AddressSpace::DATA) + value } } - GlobalAlloc::Function { instance, .. } => { - (self.get_fn_addr(instance), self.data_layout().instruction_address_space) - } + GlobalAlloc::Function { instance, .. } => self.get_fn_addr(instance), GlobalAlloc::VTable(ty, dyn_ty) => { let alloc = self .tcx @@ -319,14 +318,15 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { .unwrap_memory(); let init = const_alloc_to_llvm(self, alloc, /*static*/ false); let value = self.static_addr_of_impl(init, alloc.inner().align, None); - (value, AddressSpace::DATA) + value } GlobalAlloc::Static(def_id) => { assert!(self.tcx.is_static(def_id)); assert!(!self.tcx.is_thread_local_static(def_id)); - (self.get_static(def_id), AddressSpace::DATA) + self.get_static(def_id) } }; + let base_addr_space = global_alloc.address_space(self); let llval = unsafe { llvm::LLVMConstInBoundsGEP2( self.type_i8(), diff --git a/compiler/rustc_codegen_llvm/src/consts.rs b/compiler/rustc_codegen_llvm/src/consts.rs index 7675e75338a3e..cbac55c715310 100644 --- a/compiler/rustc_codegen_llvm/src/consts.rs +++ b/compiler/rustc_codegen_llvm/src/consts.rs @@ -364,7 +364,12 @@ impl<'ll> CodegenCx<'ll, '_> { if !def_id.is_local() { let needs_dll_storage_attr = self.use_dll_storage_attrs - && !self.tcx.is_foreign_item(def_id) + // If the symbol is a foreign item, then don't automatically apply DLLImport, as + // we'll rely on the #[link] attribute instead. BUT, if this is an internal symbol + // then it may be generated by the compiler in some crate, so we do need to apply + // DLLImport when linking with the MSVC linker. + && (!self.tcx.is_foreign_item(def_id) + || (self.sess().target.is_like_msvc && fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL))) // Local definitions can never be imported, so we must not apply // the DLLImport annotation. && !dso_local @@ -430,7 +435,7 @@ impl<'ll> CodegenCx<'ll, '_> { let val_llty = self.val_ty(v); let g = self.get_static_inner(def_id, val_llty); - let llty = llvm::LLVMGlobalGetValueType(g); + let llty = self.get_type_of_global(g); let g = if val_llty == llty { g diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs index f7b096ff976a6..ed50515b70716 100644 --- a/compiler/rustc_codegen_llvm/src/context.rs +++ b/compiler/rustc_codegen_llvm/src/context.rs @@ -8,6 +8,7 @@ use std::str; use rustc_abi::{HasDataLayout, Size, TargetDataLayout, VariantIdx}; use rustc_codegen_ssa::back::versioned_llvm_target; use rustc_codegen_ssa::base::{wants_msvc_seh, wants_wasm_eh}; +use rustc_codegen_ssa::common::TypeKind; use rustc_codegen_ssa::errors as ssa_errors; use rustc_codegen_ssa::traits::*; use rustc_data_structures::base_n::{ALPHANUMERIC_ONLY, ToBaseN}; @@ -38,7 +39,7 @@ use crate::debuginfo::metadata::apply_vcall_visibility_metadata; use crate::llvm::Metadata; use crate::type_::Type; use crate::value::Value; -use crate::{attributes, coverageinfo, debuginfo, llvm, llvm_util}; +use crate::{attributes, common, coverageinfo, debuginfo, llvm, llvm_util}; /// `TyCtxt` (and related cache datastructures) can't be move between threads. /// However, there are various cx related functions which we want to be available to the builder and @@ -163,23 +164,6 @@ pub(crate) unsafe fn create_module<'ll>( let mut target_data_layout = sess.target.data_layout.to_string(); let llvm_version = llvm_util::get_version(); - if llvm_version < (19, 0, 0) { - if sess.target.arch == "aarch64" || sess.target.arch.starts_with("arm64") { - // LLVM 19 sets -Fn32 in its data layout string for 64-bit ARM - // Earlier LLVMs leave this default, so remove it. - // See https://github.com/llvm/llvm-project/pull/90702 - target_data_layout = target_data_layout.replace("-Fn32", ""); - } - } - - if llvm_version < (19, 0, 0) { - if sess.target.arch == "loongarch64" { - // LLVM 19 updates the LoongArch64 data layout. - // See https://github.com/llvm/llvm-project/pull/93814 - target_data_layout = target_data_layout.replace("-n32:64", "-n64"); - } - } - if llvm_version < (20, 0, 0) { if sess.target.arch == "aarch64" || sess.target.arch.starts_with("arm64") { // LLVM 20 defines three additional address spaces for alternate @@ -327,6 +311,22 @@ pub(crate) unsafe fn create_module<'ll>( pfe.prefix().into(), ); } + + // Add "kcfi-arity" module flag if KCFI arity indicator is enabled. (See + // https://github.com/llvm/llvm-project/pull/117121.) + if sess.is_sanitizer_kcfi_arity_enabled() { + // KCFI arity indicator requires LLVM 21.0.0 or later. + if llvm_version < (21, 0, 0) { + tcx.dcx().emit_err(crate::errors::SanitizerKcfiArityRequiresLLVM2100); + } + + llvm::add_module_flag_u32( + llmod, + llvm::ModuleFlagMergeBehavior::Override, + "kcfi-arity", + 1, + ); + } } // Control Flow Guard is currently only supported by MSVC and LLVM on Windows. @@ -643,7 +643,18 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> { llvm::set_section(g, c"llvm.metadata"); } } - +impl<'ll> SimpleCx<'ll> { + pub(crate) fn get_return_type(&self, ty: &'ll Type) -> &'ll Type { + assert_eq!(self.type_kind(ty), TypeKind::Function); + unsafe { llvm::LLVMGetReturnType(ty) } + } + pub(crate) fn get_type_of_global(&self, val: &'ll Value) -> &'ll Type { + unsafe { llvm::LLVMGlobalGetValueType(val) } + } + pub(crate) fn val_ty(&self, v: &'ll Value) -> &'ll Type { + common::val_ty(v) + } +} impl<'ll> SimpleCx<'ll> { pub(crate) fn new( llmod: &'ll llvm::Module, @@ -660,6 +671,13 @@ impl<'ll, CX: Borrow>> GenericCx<'ll, CX> { llvm::LLVMMetadataAsValue(self.llcx(), metadata) } + // FIXME(autodiff): We should split `ConstCodegenMethods` to pull the reusable parts + // onto a trait that is also implemented for GenericCx. + pub(crate) fn get_const_i64(&self, n: u64) -> &'ll Value { + let ty = unsafe { llvm::LLVMInt64TypeInContext(self.llcx()) }; + unsafe { llvm::LLVMConstInt(ty, n, llvm::False) } + } + pub(crate) fn get_function(&self, name: &str) -> Option<&'ll Value> { let name = SmallCStr::new(name); unsafe { llvm::LLVMGetNamedFunction((**self).borrow().llmod, name.as_ptr()) } @@ -680,6 +698,16 @@ impl<'ll, CX: Borrow>> GenericCx<'ll, CX> { llvm::LLVMMDStringInContext2(self.llcx(), name.as_ptr() as *const c_char, name.len()) }) } + + pub(crate) fn get_functions(&self) -> Vec<&'ll Value> { + let mut functions = vec![]; + let mut func = unsafe { llvm::LLVMGetFirstFunction(self.llmod()) }; + while let Some(f) = func { + functions.push(f); + func = unsafe { llvm::LLVMGetNextFunction(f) } + } + functions + } } impl<'ll, 'tcx> MiscCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { @@ -1183,10 +1211,8 @@ impl<'ll> CodegenCx<'ll, '_> { if self.sess().instrument_coverage() { ifn!("llvm.instrprof.increment", fn(ptr, t_i64, t_i32, t_i32) -> void); - if crate::llvm_util::get_version() >= (19, 0, 0) { - ifn!("llvm.instrprof.mcdc.parameters", fn(ptr, t_i64, t_i32) -> void); - ifn!("llvm.instrprof.mcdc.tvbitmap.update", fn(ptr, t_i64, t_i32, ptr) -> void); - } + ifn!("llvm.instrprof.mcdc.parameters", fn(ptr, t_i64, t_i32) -> void); + ifn!("llvm.instrprof.mcdc.tvbitmap.update", fn(ptr, t_i64, t_i32, ptr) -> void); } ifn!("llvm.type.test", fn(ptr, t_metadata) -> i1); diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs index 9a2473d6cf23d..55b1e728b70db 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs @@ -5,15 +5,11 @@ use rustc_abi::Align; use rustc_codegen_ssa::traits::{ BaseTypeCodegenMethods, ConstCodegenMethods, StaticCodegenMethods, }; -use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; -use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_data_structures::fx::FxIndexMap; use rustc_index::IndexVec; -use rustc_middle::mir; -use rustc_middle::mir::mono::MonoItemPartitions; -use rustc_middle::ty::{self, TyCtxt}; +use rustc_middle::ty::TyCtxt; use rustc_session::RemapFileNameExt; use rustc_session::config::RemapPathScopeComponents; -use rustc_span::def_id::DefIdSet; use rustc_span::{SourceFile, StableSourceFileId}; use tracing::debug; @@ -24,6 +20,7 @@ use crate::llvm; mod covfun; mod spans; +mod unused; /// Generates and exports the coverage map, which is embedded in special /// linker sections in the final binary. @@ -56,13 +53,6 @@ pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) { None => return, }; - // The order of entries in this global file table needs to be deterministic, - // and ideally should also be independent of the details of stable-hashing, - // because coverage tests snapshots (`.cov-map`) can observe the order and - // would need to be re-blessed if it changes. As long as those requirements - // are satisfied, the order can be arbitrary. - let mut global_file_table = GlobalFileTable::new(); - let mut covfun_records = instances_used .iter() .copied() @@ -70,18 +60,13 @@ pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) { // order that doesn't depend on the stable-hash-based order in which // instances were visited during codegen. .sorted_by_cached_key(|&instance| tcx.symbol_name(instance).name) - .filter_map(|instance| prepare_covfun_record(tcx, &mut global_file_table, instance, true)) + .filter_map(|instance| prepare_covfun_record(tcx, instance, true)) .collect::>(); // In a single designated CGU, also prepare covfun records for functions // in this crate that were instrumented for coverage, but are unused. if cx.codegen_unit.is_code_coverage_dead_code_cgu() { - let mut unused_instances = gather_unused_function_instances(cx); - // Sort the unused instances by symbol name, for the same reason as the used ones. - unused_instances.sort_by_cached_key(|&instance| tcx.symbol_name(instance).name); - covfun_records.extend(unused_instances.into_iter().filter_map(|instance| { - prepare_covfun_record(tcx, &mut global_file_table, instance, false) - })); + unused::prepare_covfun_records_for_unused_functions(cx, &mut covfun_records); } // If there are no covfun records for this CGU, don't generate a covmap record. @@ -93,91 +78,88 @@ pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) { return; } - // Encode all filenames referenced by coverage mappings in this CGU. - let filenames_buffer = global_file_table.make_filenames_buffer(tcx); - // The `llvm-cov` tool uses this hash to associate each covfun record with - // its corresponding filenames table, since the final binary will typically - // contain multiple covmap records from different compilation units. - let filenames_hash = llvm_cov::hash_bytes(&filenames_buffer); - - let mut unused_function_names = vec![]; + // Prepare the global file table for this CGU, containing all paths needed + // by one or more covfun records. + let global_file_table = + GlobalFileTable::build(tcx, covfun_records.iter().flat_map(|c| c.all_source_files())); for covfun in &covfun_records { - unused_function_names.extend(covfun.mangled_function_name_if_unused()); - - covfun::generate_covfun_record(cx, filenames_hash, covfun) - } - - // For unused functions, we need to take their mangled names and store them - // in a specially-named global array. LLVM's `InstrProfiling` pass will - // detect this global and include those names in its `__llvm_prf_names` - // section. (See `llvm/lib/Transforms/Instrumentation/InstrProfiling.cpp`.) - if !unused_function_names.is_empty() { - assert!(cx.codegen_unit.is_code_coverage_dead_code_cgu()); - - let name_globals = unused_function_names - .into_iter() - .map(|mangled_function_name| cx.const_str(mangled_function_name).0) - .collect::>(); - let initializer = cx.const_array(cx.type_ptr(), &name_globals); - - let array = llvm::add_global(cx.llmod, cx.val_ty(initializer), c"__llvm_coverage_names"); - llvm::set_global_constant(array, true); - llvm::set_linkage(array, llvm::Linkage::InternalLinkage); - llvm::set_initializer(array, initializer); + covfun::generate_covfun_record(cx, &global_file_table, covfun) } // Generate the coverage map header, which contains the filenames used by // this CGU's coverage mappings, and store it in a well-known global. // (This is skipped if we returned early due to having no covfun records.) - generate_covmap_record(cx, covmap_version, &filenames_buffer); + generate_covmap_record(cx, covmap_version, &global_file_table.filenames_buffer); } -/// Maps "global" (per-CGU) file ID numbers to their underlying source files. +/// Maps "global" (per-CGU) file ID numbers to their underlying source file paths. +#[derive(Debug)] struct GlobalFileTable { /// This "raw" table doesn't include the working dir, so a file's /// global ID is its index in this set **plus one**. - raw_file_table: FxIndexMap>, + raw_file_table: FxIndexMap, + + /// The file table in encoded form (possibly compressed), which can be + /// included directly in this CGU's `__llvm_covmap` record. + filenames_buffer: Vec, + + /// Truncated hash of the bytes in `filenames_buffer`. + /// + /// The `llvm-cov` tool uses this hash to associate each covfun record with + /// its corresponding filenames table, since the final binary will typically + /// contain multiple covmap records from different compilation units. + filenames_hash: u64, } impl GlobalFileTable { - fn new() -> Self { - Self { raw_file_table: FxIndexMap::default() } - } + /// Builds a "global file table" for this CGU, mapping numeric IDs to + /// path strings. + fn build<'a>(tcx: TyCtxt<'_>, all_files: impl Iterator) -> Self { + let mut raw_file_table = FxIndexMap::default(); + + for file in all_files { + raw_file_table.entry(file.stable_id).or_insert_with(|| { + file.name + .for_scope(tcx.sess, RemapPathScopeComponents::MACRO) + .to_string_lossy() + .into_owned() + }); + } - fn global_file_id_for_file(&mut self, file: &Arc) -> GlobalFileId { - // Ensure the given file has a table entry, and get its index. - let entry = self.raw_file_table.entry(file.stable_id); - let raw_id = entry.index(); - entry.or_insert_with(|| Arc::clone(file)); + // FIXME(Zalathar): Consider sorting the file table here, but maybe + // only after adding filename support to coverage-dump, so that the + // table order isn't directly visible in `.coverage-map` snapshots. - // The raw file table doesn't include an entry for the working dir - // (which has ID 0), so add 1 to get the correct ID. - GlobalFileId::from_usize(raw_id + 1) - } + let mut table = Vec::with_capacity(raw_file_table.len() + 1); - fn make_filenames_buffer(&self, tcx: TyCtxt<'_>) -> Vec { - let mut table = Vec::with_capacity(self.raw_file_table.len() + 1); - - // LLVM Coverage Mapping Format version 6 (zero-based encoded as 5) - // requires setting the first filename to the compilation directory. - // Since rustc generates coverage maps with relative paths, the - // compilation directory can be combined with the relative paths - // to get absolute paths, if needed. - table.push( - tcx.sess - .opts - .working_dir - .for_scope(tcx.sess, RemapPathScopeComponents::MACRO) - .to_string_lossy(), - ); + // Since version 6 of the LLVM coverage mapping format, the first entry + // in the global file table is treated as a base directory, used to + // resolve any other entries that are stored as relative paths. + let base_dir = tcx + .sess + .opts + .working_dir + .for_scope(tcx.sess, RemapPathScopeComponents::MACRO) + .to_string_lossy(); + table.push(base_dir.as_ref()); // Add the regular entries after the base directory. - table.extend(self.raw_file_table.values().map(|file| { - file.name.for_scope(tcx.sess, RemapPathScopeComponents::MACRO).to_string_lossy() - })); + table.extend(raw_file_table.values().map(|name| name.as_str())); - llvm_cov::write_filenames_to_buffer(&table) + // Encode the file table into a buffer, and get the hash of its encoded + // bytes, so that we can embed that hash in `__llvm_covfun` records. + let filenames_buffer = llvm_cov::write_filenames_to_buffer(&table); + let filenames_hash = llvm_cov::hash_bytes(&filenames_buffer); + + Self { raw_file_table, filenames_buffer, filenames_hash } + } + + fn get_existing_id(&self, file: &SourceFile) -> Option { + let raw_id = self.raw_file_table.get_index_of(&file.stable_id)?; + // The raw file table doesn't include an entry for the base dir + // (which has ID 0), so add 1 to get the correct ID. + Some(GlobalFileId::from_usize(raw_id + 1)) } } @@ -193,26 +175,31 @@ rustc_index::newtype_index! { struct LocalFileId {} } -/// Holds a mapping from "local" (per-function) file IDs to "global" (per-CGU) -/// file IDs. +/// Holds a mapping from "local" (per-function) file IDs to their corresponding +/// source files. #[derive(Debug, Default)] struct VirtualFileMapping { - local_to_global: IndexVec, - global_to_local: FxIndexMap, + local_file_table: IndexVec>, } impl VirtualFileMapping { - fn local_id_for_global(&mut self, global_file_id: GlobalFileId) -> LocalFileId { - *self - .global_to_local - .entry(global_file_id) - .or_insert_with(|| self.local_to_global.push(global_file_id)) + fn push_file(&mut self, source_file: &Arc) -> LocalFileId { + self.local_file_table.push(Arc::clone(source_file)) } - fn to_vec(&self) -> Vec { - // This clone could be avoided by transmuting `&[GlobalFileId]` to `&[u32]`, - // but it isn't hot or expensive enough to justify the extra unsafety. - self.local_to_global.iter().map(|&global| GlobalFileId::as_u32(global)).collect() + /// Resolves all of the filenames in this local file mapping to a list of + /// global file IDs in its CGU, for inclusion in this function's + /// `__llvm_covfun` record. + /// + /// The global file IDs are returned as `u32` to make FFI easier. + fn resolve_all(&self, global_file_table: &GlobalFileTable) -> Option> { + self.local_file_table + .iter() + .map(|file| try { + let id = global_file_table.get_existing_id(file)?; + GlobalFileId::as_u32(id) + }) + .collect::>>() } } @@ -249,121 +236,3 @@ fn generate_covmap_record<'ll>(cx: &CodegenCx<'ll, '_>, version: u32, filenames_ cx.add_used_global(covmap_global); } - -/// Each CGU will normally only emit coverage metadata for the functions that it actually generates. -/// But since we don't want unused functions to disappear from coverage reports, we also scan for -/// functions that were instrumented but are not participating in codegen. -/// -/// These unused functions don't need to be codegenned, but we do need to add them to the function -/// coverage map (in a single designated CGU) so that we still emit coverage mappings for them. -/// We also end up adding their symbol names to a special global array that LLVM will include in -/// its embedded coverage data. -fn gather_unused_function_instances<'tcx>(cx: &CodegenCx<'_, 'tcx>) -> Vec> { - assert!(cx.codegen_unit.is_code_coverage_dead_code_cgu()); - - let tcx = cx.tcx; - let usage = prepare_usage_sets(tcx); - - let is_unused_fn = |def_id: LocalDefId| -> bool { - // Usage sets expect `DefId`, so convert from `LocalDefId`. - let d: DefId = LocalDefId::to_def_id(def_id); - // To be potentially eligible for "unused function" mappings, a definition must: - // - Be eligible for coverage instrumentation - // - Not participate directly in codegen (or have lost all its coverage statements) - // - Not have any coverage statements inlined into codegenned functions - tcx.is_eligible_for_coverage(def_id) - && (!usage.all_mono_items.contains(&d) || usage.missing_own_coverage.contains(&d)) - && !usage.used_via_inlining.contains(&d) - }; - - // FIXME(#79651): Consider trying to filter out dummy instantiations of - // unused generic functions from library crates, because they can produce - // "unused instantiation" in coverage reports even when they are actually - // used by some downstream crate in the same binary. - - tcx.mir_keys(()) - .iter() - .copied() - .filter(|&def_id| is_unused_fn(def_id)) - .map(|def_id| make_dummy_instance(tcx, def_id)) - .collect::>() -} - -struct UsageSets<'tcx> { - all_mono_items: &'tcx DefIdSet, - used_via_inlining: FxHashSet, - missing_own_coverage: FxHashSet, -} - -/// Prepare sets of definitions that are relevant to deciding whether something -/// is an "unused function" for coverage purposes. -fn prepare_usage_sets<'tcx>(tcx: TyCtxt<'tcx>) -> UsageSets<'tcx> { - let MonoItemPartitions { all_mono_items, codegen_units, .. } = - tcx.collect_and_partition_mono_items(()); - - // Obtain a MIR body for each function participating in codegen, via an - // arbitrary instance. - let mut def_ids_seen = FxHashSet::default(); - let def_and_mir_for_all_mono_fns = codegen_units - .iter() - .flat_map(|cgu| cgu.items().keys()) - .filter_map(|item| match item { - mir::mono::MonoItem::Fn(instance) => Some(instance), - mir::mono::MonoItem::Static(_) | mir::mono::MonoItem::GlobalAsm(_) => None, - }) - // We only need one arbitrary instance per definition. - .filter(move |instance| def_ids_seen.insert(instance.def_id())) - .map(|instance| { - // We don't care about the instance, just its underlying MIR. - let body = tcx.instance_mir(instance.def); - (instance.def_id(), body) - }); - - // Functions whose coverage statements were found inlined into other functions. - let mut used_via_inlining = FxHashSet::default(); - // Functions that were instrumented, but had all of their coverage statements - // removed by later MIR transforms (e.g. UnreachablePropagation). - let mut missing_own_coverage = FxHashSet::default(); - - for (def_id, body) in def_and_mir_for_all_mono_fns { - let mut saw_own_coverage = false; - - // Inspect every coverage statement in the function's MIR. - for stmt in body - .basic_blocks - .iter() - .flat_map(|block| &block.statements) - .filter(|stmt| matches!(stmt.kind, mir::StatementKind::Coverage(_))) - { - if let Some(inlined) = stmt.source_info.scope.inlined_instance(&body.source_scopes) { - // This coverage statement was inlined from another function. - used_via_inlining.insert(inlined.def_id()); - } else { - // Non-inlined coverage statements belong to the enclosing function. - saw_own_coverage = true; - } - } - - if !saw_own_coverage && body.function_coverage_info.is_some() { - missing_own_coverage.insert(def_id); - } - } - - UsageSets { all_mono_items, used_via_inlining, missing_own_coverage } -} - -fn make_dummy_instance<'tcx>(tcx: TyCtxt<'tcx>, local_def_id: LocalDefId) -> ty::Instance<'tcx> { - let def_id = local_def_id.to_def_id(); - - // Make a dummy instance that fills in all generics with placeholders. - ty::Instance::new( - def_id, - ty::GenericArgs::for_item(tcx, def_id, |param, _| { - if let ty::GenericParamDefKind::Lifetime = param.kind { - tcx.lifetimes.re_erased.into() - } else { - tcx.mk_param_from_def(param) - } - }), - ) -} diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs index 048e1988c3278..7bdbc68595290 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs @@ -5,6 +5,7 @@ //! [^win]: On Windows the section name is `.lcovfun`. use std::ffi::CString; +use std::sync::Arc; use rustc_abi::Align; use rustc_codegen_ssa::traits::{ @@ -15,7 +16,7 @@ use rustc_middle::mir::coverage::{ MappingKind, Op, }; use rustc_middle::ty::{Instance, TyCtxt}; -use rustc_span::Span; +use rustc_span::{SourceFile, Span}; use rustc_target::spec::HasTargetSpec; use tracing::debug; @@ -38,16 +39,15 @@ pub(crate) struct CovfunRecord<'tcx> { } impl<'tcx> CovfunRecord<'tcx> { - /// FIXME(Zalathar): Make this the responsibility of the code that determines - /// which functions are unused. - pub(crate) fn mangled_function_name_if_unused(&self) -> Option<&'tcx str> { - (!self.is_used).then_some(self.mangled_function_name) + /// Iterator that yields all source files referred to by this function's + /// coverage mappings. Used to build the global file table for the CGU. + pub(crate) fn all_source_files(&self) -> impl Iterator { + self.virtual_file_mapping.local_file_table.iter().map(Arc::as_ref) } } pub(crate) fn prepare_covfun_record<'tcx>( tcx: TyCtxt<'tcx>, - global_file_table: &mut GlobalFileTable, instance: Instance<'tcx>, is_used: bool, ) -> Option> { @@ -65,7 +65,7 @@ pub(crate) fn prepare_covfun_record<'tcx>( regions: ffi::Regions::default(), }; - fill_region_tables(tcx, global_file_table, fn_cov_info, ids_info, &mut covfun); + fill_region_tables(tcx, fn_cov_info, ids_info, &mut covfun); if covfun.regions.has_no_regions() { debug!(?covfun, "function has no mappings to embed; skipping"); @@ -100,7 +100,6 @@ fn prepare_expressions(ids_info: &CoverageIdsInfo) -> Vec( tcx: TyCtxt<'tcx>, - global_file_table: &mut GlobalFileTable, fn_cov_info: &'tcx FunctionCoverageInfo, ids_info: &'tcx CoverageIdsInfo, covfun: &mut CovfunRecord<'tcx>, @@ -114,11 +113,7 @@ fn fill_region_tables<'tcx>( }; let source_file = source_map.lookup_source_file(first_span.lo()); - // Look up the global file ID for that file. - let global_file_id = global_file_table.global_file_id_for_file(&source_file); - - // Associate that global file ID with a local file ID for this function. - let local_file_id = covfun.virtual_file_mapping.local_id_for_global(global_file_id); + let local_file_id = covfun.virtual_file_mapping.push_file(&source_file); // In rare cases, _all_ of a function's spans are discarded, and coverage // codegen needs to handle that gracefully to avoid #133606. @@ -187,7 +182,7 @@ fn fill_region_tables<'tcx>( /// as a global variable in the `__llvm_covfun` section. pub(crate) fn generate_covfun_record<'tcx>( cx: &CodegenCx<'_, 'tcx>, - filenames_hash: u64, + global_file_table: &GlobalFileTable, covfun: &CovfunRecord<'tcx>, ) { let &CovfunRecord { @@ -199,12 +194,19 @@ pub(crate) fn generate_covfun_record<'tcx>( ref regions, } = covfun; + let Some(local_file_table) = virtual_file_mapping.resolve_all(global_file_table) else { + debug_assert!( + false, + "all local files should be present in the global file table: \ + global_file_table = {global_file_table:?}, \ + virtual_file_mapping = {virtual_file_mapping:?}" + ); + return; + }; + // Encode the function's coverage mappings into a buffer. - let coverage_mapping_buffer = llvm_cov::write_function_mappings_to_buffer( - &virtual_file_mapping.to_vec(), - expressions, - regions, - ); + let coverage_mapping_buffer = + llvm_cov::write_function_mappings_to_buffer(&local_file_table, expressions, regions); // A covfun record consists of four target-endian integers, followed by the // encoded mapping data in bytes. Note that the length field is 32 bits. @@ -217,7 +219,7 @@ pub(crate) fn generate_covfun_record<'tcx>( cx.const_u64(func_name_hash), cx.const_u32(coverage_mapping_buffer.len() as u32), cx.const_u64(source_hash), - cx.const_u64(filenames_hash), + cx.const_u64(global_file_table.filenames_hash), cx.const_bytes(&coverage_mapping_buffer), ], // This struct needs to be packed, so that the 32-bit length field diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/unused.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/unused.rs new file mode 100644 index 0000000000000..fe3a7a1580b53 --- /dev/null +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/unused.rs @@ -0,0 +1,170 @@ +use rustc_codegen_ssa::traits::{BaseTypeCodegenMethods, ConstCodegenMethods}; +use rustc_data_structures::fx::FxHashSet; +use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_middle::mir; +use rustc_middle::mir::mono::MonoItemPartitions; +use rustc_middle::ty::{self, TyCtxt}; +use rustc_span::def_id::DefIdSet; + +use crate::common::CodegenCx; +use crate::coverageinfo::mapgen::covfun::{CovfunRecord, prepare_covfun_record}; +use crate::llvm; + +/// Each CGU will normally only emit coverage metadata for the functions that it actually generates. +/// But since we don't want unused functions to disappear from coverage reports, we also scan for +/// functions that were instrumented but are not participating in codegen. +/// +/// These unused functions don't need to be codegenned, but we do need to add them to the function +/// coverage map (in a single designated CGU) so that we still emit coverage mappings for them. +/// We also end up adding their symbol names to a special global array that LLVM will include in +/// its embedded coverage data. +pub(crate) fn prepare_covfun_records_for_unused_functions<'tcx>( + cx: &CodegenCx<'_, 'tcx>, + covfun_records: &mut Vec>, +) { + assert!(cx.codegen_unit.is_code_coverage_dead_code_cgu()); + + let mut unused_instances = gather_unused_function_instances(cx); + // Sort the unused instances by symbol name, so that their order isn't hash-sensitive. + unused_instances.sort_by_key(|instance| instance.symbol_name); + + // Try to create a covfun record for each unused function. + let mut name_globals = Vec::with_capacity(unused_instances.len()); + covfun_records.extend(unused_instances.into_iter().filter_map(|unused| try { + let record = prepare_covfun_record(cx.tcx, unused.instance, false)?; + // If successful, also store its symbol name in a global constant. + name_globals.push(cx.const_str(unused.symbol_name.name).0); + record + })); + + // Store the names of unused functions in a specially-named global array. + // LLVM's `InstrProfilling` pass will detect this array, and include the + // referenced names in its `__llvm_prf_names` section. + // (See `llvm/lib/Transforms/Instrumentation/InstrProfiling.cpp`.) + if !name_globals.is_empty() { + let initializer = cx.const_array(cx.type_ptr(), &name_globals); + + let array = llvm::add_global(cx.llmod, cx.val_ty(initializer), c"__llvm_coverage_names"); + llvm::set_global_constant(array, true); + llvm::set_linkage(array, llvm::Linkage::InternalLinkage); + llvm::set_initializer(array, initializer); + } +} + +/// Holds a dummy function instance along with its symbol name, to avoid having +/// to repeatedly query for the name. +struct UnusedInstance<'tcx> { + instance: ty::Instance<'tcx>, + symbol_name: ty::SymbolName<'tcx>, +} + +fn gather_unused_function_instances<'tcx>(cx: &CodegenCx<'_, 'tcx>) -> Vec> { + assert!(cx.codegen_unit.is_code_coverage_dead_code_cgu()); + + let tcx = cx.tcx; + let usage = prepare_usage_sets(tcx); + + let is_unused_fn = |def_id: LocalDefId| -> bool { + // Usage sets expect `DefId`, so convert from `LocalDefId`. + let d: DefId = LocalDefId::to_def_id(def_id); + // To be potentially eligible for "unused function" mappings, a definition must: + // - Be eligible for coverage instrumentation + // - Not participate directly in codegen (or have lost all its coverage statements) + // - Not have any coverage statements inlined into codegenned functions + tcx.is_eligible_for_coverage(def_id) + && (!usage.all_mono_items.contains(&d) || usage.missing_own_coverage.contains(&d)) + && !usage.used_via_inlining.contains(&d) + }; + + // FIXME(#79651): Consider trying to filter out dummy instantiations of + // unused generic functions from library crates, because they can produce + // "unused instantiation" in coverage reports even when they are actually + // used by some downstream crate in the same binary. + + tcx.mir_keys(()) + .iter() + .copied() + .filter(|&def_id| is_unused_fn(def_id)) + .map(|def_id| make_dummy_instance(tcx, def_id)) + .map(|instance| UnusedInstance { instance, symbol_name: tcx.symbol_name(instance) }) + .collect::>() +} + +struct UsageSets<'tcx> { + all_mono_items: &'tcx DefIdSet, + used_via_inlining: FxHashSet, + missing_own_coverage: FxHashSet, +} + +/// Prepare sets of definitions that are relevant to deciding whether something +/// is an "unused function" for coverage purposes. +fn prepare_usage_sets<'tcx>(tcx: TyCtxt<'tcx>) -> UsageSets<'tcx> { + let MonoItemPartitions { all_mono_items, codegen_units, .. } = + tcx.collect_and_partition_mono_items(()); + + // Obtain a MIR body for each function participating in codegen, via an + // arbitrary instance. + let mut def_ids_seen = FxHashSet::default(); + let def_and_mir_for_all_mono_fns = codegen_units + .iter() + .flat_map(|cgu| cgu.items().keys()) + .filter_map(|item| match item { + mir::mono::MonoItem::Fn(instance) => Some(instance), + mir::mono::MonoItem::Static(_) | mir::mono::MonoItem::GlobalAsm(_) => None, + }) + // We only need one arbitrary instance per definition. + .filter(move |instance| def_ids_seen.insert(instance.def_id())) + .map(|instance| { + // We don't care about the instance, just its underlying MIR. + let body = tcx.instance_mir(instance.def); + (instance.def_id(), body) + }); + + // Functions whose coverage statements were found inlined into other functions. + let mut used_via_inlining = FxHashSet::default(); + // Functions that were instrumented, but had all of their coverage statements + // removed by later MIR transforms (e.g. UnreachablePropagation). + let mut missing_own_coverage = FxHashSet::default(); + + for (def_id, body) in def_and_mir_for_all_mono_fns { + let mut saw_own_coverage = false; + + // Inspect every coverage statement in the function's MIR. + for stmt in body + .basic_blocks + .iter() + .flat_map(|block| &block.statements) + .filter(|stmt| matches!(stmt.kind, mir::StatementKind::Coverage(_))) + { + if let Some(inlined) = stmt.source_info.scope.inlined_instance(&body.source_scopes) { + // This coverage statement was inlined from another function. + used_via_inlining.insert(inlined.def_id()); + } else { + // Non-inlined coverage statements belong to the enclosing function. + saw_own_coverage = true; + } + } + + if !saw_own_coverage && body.function_coverage_info.is_some() { + missing_own_coverage.insert(def_id); + } + } + + UsageSets { all_mono_items, used_via_inlining, missing_own_coverage } +} + +fn make_dummy_instance<'tcx>(tcx: TyCtxt<'tcx>, local_def_id: LocalDefId) -> ty::Instance<'tcx> { + let def_id = local_def_id.to_def_id(); + + // Make a dummy instance that fills in all generics with placeholders. + ty::Instance::new_raw( + def_id, + ty::GenericArgs::for_item(tcx, def_id, |param, _| { + if let ty::GenericParamDefKind::Lifetime = param.kind { + tcx.lifetimes.re_erased.into() + } else { + tcx.mk_param_from_def(param) + } + }), + ) +} diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs b/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs index f52991b369797..d2591139d6edc 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs @@ -3,7 +3,6 @@ use std::collections::hash_map::Entry; use rustc_codegen_ssa::mir::debuginfo::{DebugScope, FunctionDebugContext}; use rustc_codegen_ssa::traits::*; use rustc_data_structures::fx::FxHashMap; -use rustc_index::Idx; use rustc_index::bit_set::DenseBitSet; use rustc_middle::mir::{Body, SourceScope}; use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv}; @@ -43,8 +42,7 @@ pub(crate) fn compute_mir_scopes<'ll, 'tcx>( let mut instantiated = DenseBitSet::new_empty(mir.source_scopes.len()); let mut discriminators = FxHashMap::default(); // Instantiate all scopes. - for idx in 0..mir.source_scopes.len() { - let scope = SourceScope::new(idx); + for scope in mir.source_scopes.indices() { make_mir_scope( cx, instance, diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs index 4ffe551df09b5..8f0948b8183bf 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs @@ -95,7 +95,11 @@ pub(crate) fn needs_gdb_debug_scripts_section(cx: &CodegenCx<'_, '_>) -> bool { // in the `.debug_gdb_scripts` section. For that reason, we make sure that the // section is only emitted for leaf crates. let embed_visualizers = cx.tcx.crate_types().iter().any(|&crate_type| match crate_type { - CrateType::Executable | CrateType::Dylib | CrateType::Cdylib | CrateType::Staticlib => { + CrateType::Executable + | CrateType::Dylib + | CrateType::Cdylib + | CrateType::Staticlib + | CrateType::Sdylib => { // These are crate types for which we will embed pretty printers since they // are treated as leaf crates. true diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs index 2eaaf127e41ea..7f3e486ca310d 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs @@ -910,7 +910,8 @@ pub(crate) fn build_compile_unit_di_node<'ll, 'tcx>( && let Some(f) = output_filenames.split_dwarf_path( tcx.sess.split_debuginfo(), tcx.sess.opts.unstable_opts.split_dwarf_kind, - Some(codegen_unit_name), + codegen_unit_name, + tcx.sess.invocation_temp.as_deref(), ) { // We get a path relative to the working directory from split_dwarf_path Some(tcx.sess.source_map().path_mapping().to_real_filename(f)) @@ -1314,31 +1315,21 @@ fn build_generic_type_param_di_nodes<'ll, 'tcx>( ty: Ty<'tcx>, ) -> SmallVec> { if let ty::Adt(def, args) = *ty.kind() { - let generics = cx.tcx.generics_of(def.did()); - return get_template_parameters(cx, generics, args); - } - - return smallvec![]; -} - -pub(super) fn get_template_parameters<'ll, 'tcx>( - cx: &CodegenCx<'ll, 'tcx>, - generics: &ty::Generics, - args: ty::GenericArgsRef<'tcx>, -) -> SmallVec> { - if args.types().next().is_some() { - let names = get_parameter_names(cx, generics); - let template_params: SmallVec<_> = iter::zip(args, names) - .filter_map(|(kind, name)| { - kind.as_type().map(|ty| { - let actual_type = cx.tcx.normalize_erasing_regions(cx.typing_env(), ty); - let actual_type_di_node = type_di_node(cx, actual_type); - Some(cx.create_template_type_parameter(name.as_str(), actual_type_di_node)) + if args.types().next().is_some() { + let generics = cx.tcx.generics_of(def.did()); + let names = get_parameter_names(cx, generics); + let template_params: SmallVec<_> = iter::zip(args, names) + .filter_map(|(kind, name)| { + kind.as_type().map(|ty| { + let actual_type = cx.tcx.normalize_erasing_regions(cx.typing_env(), ty); + let actual_type_di_node = type_di_node(cx, actual_type); + Some(cx.create_template_type_parameter(name.as_str(), actual_type_di_node)) + }) }) - }) - .collect(); + .collect(); - return template_params; + return template_params; + } } return smallvec![]; diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs index 07075be55fa1f..e9574108696ba 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs @@ -721,8 +721,7 @@ fn build_union_fields_for_direct_tag_coroutine<'ll, 'tcx>( _ => unreachable!(), }; - let coroutine_layout = - cx.tcx.coroutine_layout(coroutine_def_id, coroutine_args.kind_ty()).unwrap(); + let coroutine_layout = cx.tcx.coroutine_layout(coroutine_def_id, coroutine_args.args).unwrap(); let common_upvar_names = cx.tcx.closure_saved_names_of_captured_variables(coroutine_def_id); let variant_range = coroutine_args.variant_range(coroutine_def_id, cx.tcx); diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs index 6792c307fdc45..7c701926d2c5e 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs @@ -363,7 +363,6 @@ fn build_coroutine_variant_struct_type_di_node<'ll, 'tcx>( state_specific_fields.into_iter().chain(common_fields).collect() }, - // FIXME: this is a no-op. `build_generic_type_param_di_nodes` only works for Adts. |cx| build_generic_type_param_di_nodes(cx, coroutine_type_and_layout.ty), ) .di_node diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs index bfd131cfd3dbb..20a841f2287ae 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs @@ -174,10 +174,8 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>( DIFlags::FlagZero, ), |cx, coroutine_type_di_node| { - let coroutine_layout = cx - .tcx - .coroutine_layout(coroutine_def_id, coroutine_args.as_coroutine().kind_ty()) - .unwrap(); + let coroutine_layout = + cx.tcx.coroutine_layout(coroutine_def_id, coroutine_args).unwrap(); let Variants::Multiple { tag_encoding: TagEncoding::Direct, ref variants, .. } = coroutine_type_and_layout.variants diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs index ae2ab32ef533c..56fb12d3c22ea 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs @@ -247,6 +247,16 @@ pub(super) fn stub<'ll, 'tcx>( StubInfo { metadata, unique_type_id } } +struct AdtStackPopGuard<'ll, 'tcx, 'a> { + cx: &'a CodegenCx<'ll, 'tcx>, +} + +impl<'ll, 'tcx, 'a> Drop for AdtStackPopGuard<'ll, 'tcx, 'a> { + fn drop(&mut self) { + debug_context(self.cx).adt_stack.borrow_mut().pop(); + } +} + /// This function enables creating debuginfo nodes that can recursively refer to themselves. /// It will first insert the given stub into the type map and only then execute the `members` /// and `generics` closures passed in. These closures have access to the stub so they can @@ -261,6 +271,44 @@ pub(super) fn build_type_with_children<'ll, 'tcx>( ) -> DINodeCreationResult<'ll> { assert_eq!(debug_context(cx).type_map.di_node_for_unique_id(stub_info.unique_type_id), None); + let mut _adt_stack_pop_guard = None; + if let UniqueTypeId::Ty(ty, ..) = stub_info.unique_type_id + && let ty::Adt(adt_def, args) = ty.kind() + { + let def_id = adt_def.did(); + // If any sub type reference the original type definition and the sub type has a type + // parameter that strictly contains the original parameter, the original type is a recursive + // type that can expanding indefinitely. Example, + // ``` + // enum Recursive { + // Recurse(*const Recursive>), + // Item(T), + // } + // ``` + let is_expanding_recursive = + debug_context(cx).adt_stack.borrow().iter().any(|(parent_def_id, parent_args)| { + if def_id == *parent_def_id { + args.iter().zip(parent_args.iter()).any(|(arg, parent_arg)| { + if let (Some(arg), Some(parent_arg)) = (arg.as_type(), parent_arg.as_type()) + { + arg != parent_arg && arg.contains(parent_arg) + } else { + false + } + }) + } else { + false + } + }); + if is_expanding_recursive { + // FIXME: indicate that this is an expanding recursive type in stub metadata? + return DINodeCreationResult::new(stub_info.metadata, false); + } else { + debug_context(cx).adt_stack.borrow_mut().push((def_id, args)); + _adt_stack_pop_guard = Some(AdtStackPopGuard { cx }); + } + } + debug_context(cx).type_map.insert(stub_info.unique_type_id, stub_info.metadata); let members: SmallVec<_> = diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs index ae7d080db66f7..c508592792347 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs @@ -2,8 +2,8 @@ use std::cell::{OnceCell, RefCell}; use std::ops::Range; -use std::ptr; use std::sync::Arc; +use std::{iter, ptr}; use libc::c_uint; use metadata::create_subroutine_type; @@ -66,6 +66,7 @@ pub(crate) struct CodegenUnitDebugContext<'ll, 'tcx> { created_files: RefCell, &'ll DIFile>>, type_map: metadata::TypeMap<'ll, 'tcx>, + adt_stack: RefCell)>>, namespace_map: RefCell>, recursion_marker_type: OnceCell<&'ll DIType>, } @@ -80,6 +81,7 @@ impl<'ll, 'tcx> CodegenUnitDebugContext<'ll, 'tcx> { builder, created_files: Default::default(), type_map: Default::default(), + adt_stack: Default::default(), namespace_map: RefCell::new(Default::default()), recursion_marker_type: OnceCell::new(), } @@ -486,10 +488,40 @@ impl<'ll, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { generics: &ty::Generics, args: GenericArgsRef<'tcx>, ) -> &'ll DIArray { - let template_params = metadata::get_template_parameters(cx, generics, args); + if args.types().next().is_none() { + return create_DIArray(DIB(cx), &[]); + } + + // Again, only create type information if full debuginfo is enabled + let template_params: Vec<_> = if cx.sess().opts.debuginfo == DebugInfo::Full { + let names = get_parameter_names(cx, generics); + iter::zip(args, names) + .filter_map(|(kind, name)| { + kind.as_type().map(|ty| { + let actual_type = cx.tcx.normalize_erasing_regions(cx.typing_env(), ty); + let actual_type_metadata = type_di_node(cx, actual_type); + Some(cx.create_template_type_parameter( + name.as_str(), + actual_type_metadata, + )) + }) + }) + .collect() + } else { + vec![] + }; + create_DIArray(DIB(cx), &template_params) } + fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec { + let mut names = generics.parent.map_or_else(Vec::new, |def_id| { + get_parameter_names(cx, cx.tcx.generics_of(def_id)) + }); + names.extend(generics.own_params.iter().map(|param| param.name)); + names + } + /// Returns a scope, plus `true` if that's a type scope for "class" methods, /// otherwise `false` for plain namespace scopes. fn get_containing_scope<'ll, 'tcx>( diff --git a/compiler/rustc_codegen_llvm/src/errors.rs b/compiler/rustc_codegen_llvm/src/errors.rs index 4c5a78ca74fe4..ecf108f988f0d 100644 --- a/compiler/rustc_codegen_llvm/src/errors.rs +++ b/compiler/rustc_codegen_llvm/src/errors.rs @@ -217,3 +217,7 @@ pub(crate) struct MismatchedDataLayout<'a> { pub(crate) struct FixedX18InvalidArch<'a> { pub arch: &'a str, } + +#[derive(Diagnostic)] +#[diag(codegen_llvm_sanitizer_kcfi_arity_requires_llvm_21_0_0)] +pub(crate) struct SanitizerKcfiArityRequiresLLVM2100; diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 67135fcc3080d..bfaad8f2f1ef0 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -613,7 +613,7 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { _ => { debug!("unknown intrinsic '{}' -- falling back to default body", name); // Call the fallback body instead of generating the intrinsic code - return Err(ty::Instance::new(instance.def_id(), instance.args)); + return Err(ty::Instance::new_raw(instance.def_id(), instance.args)); } }; @@ -1184,18 +1184,6 @@ fn generic_simd_intrinsic<'ll, 'tcx>( }}; } - /// Returns the bitwidth of the `$ty` argument if it is an `Int` type. - macro_rules! require_int_ty { - ($ty: expr, $diag: expr) => { - match $ty { - ty::Int(i) => i.bit_width().unwrap_or_else(|| bx.data_layout().pointer_size.bits()), - _ => { - return_error!($diag); - } - } - }; - } - /// Returns the bitwidth of the `$ty` argument if it is an `Int` or `Uint` type. macro_rules! require_int_or_uint_ty { ($ty: expr, $diag: expr) => { @@ -1421,7 +1409,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( return Ok(bx.shuffle_vector(args[0].immediate(), args[1].immediate(), indices)); } - if name == sym::simd_insert { + if name == sym::simd_insert || name == sym::simd_insert_dyn { require!( in_elem == arg_tys[2], InvalidMonomorphization::InsertedType { @@ -1432,40 +1420,49 @@ fn generic_simd_intrinsic<'ll, 'tcx>( out_ty: arg_tys[2] } ); - let idx = bx - .const_to_opt_u128(args[1].immediate(), false) - .expect("typeck should have ensure that this is a const"); - if idx >= in_len.into() { - return_error!(InvalidMonomorphization::SimdIndexOutOfBounds { - span, - name, - arg_idx: 1, - total_len: in_len.into(), - }); - } - return Ok(bx.insert_element( - args[0].immediate(), - args[2].immediate(), - bx.const_i32(idx as i32), - )); + + let index_imm = if name == sym::simd_insert { + let idx = bx + .const_to_opt_u128(args[1].immediate(), false) + .expect("typeck should have ensure that this is a const"); + if idx >= in_len.into() { + return_error!(InvalidMonomorphization::SimdIndexOutOfBounds { + span, + name, + arg_idx: 1, + total_len: in_len.into(), + }); + } + bx.const_i32(idx as i32) + } else { + args[1].immediate() + }; + + return Ok(bx.insert_element(args[0].immediate(), args[2].immediate(), index_imm)); } - if name == sym::simd_extract { + if name == sym::simd_extract || name == sym::simd_extract_dyn { require!( ret_ty == in_elem, InvalidMonomorphization::ReturnType { span, name, in_elem, in_ty, ret_ty } ); - let idx = bx - .const_to_opt_u128(args[1].immediate(), false) - .expect("typeck should have ensure that this is a const"); - if idx >= in_len.into() { - return_error!(InvalidMonomorphization::SimdIndexOutOfBounds { - span, - name, - arg_idx: 1, - total_len: in_len.into(), - }); - } - return Ok(bx.extract_element(args[0].immediate(), bx.const_i32(idx as i32))); + let index_imm = if name == sym::simd_extract { + let idx = bx + .const_to_opt_u128(args[1].immediate(), false) + .expect("typeck should have ensure that this is a const"); + if idx >= in_len.into() { + return_error!(InvalidMonomorphization::SimdIndexOutOfBounds { + span, + name, + arg_idx: 1, + total_len: in_len.into(), + }); + } + bx.const_i32(idx as i32) + } else { + args[1].immediate() + }; + + return Ok(bx.extract_element(args[0].immediate(), index_imm)); } if name == sym::simd_select { @@ -1476,9 +1473,9 @@ fn generic_simd_intrinsic<'ll, 'tcx>( m_len == v_len, InvalidMonomorphization::MismatchedLengths { span, name, m_len, v_len } ); - let in_elem_bitwidth = require_int_ty!( + let in_elem_bitwidth = require_int_or_uint_ty!( m_elem_ty.kind(), - InvalidMonomorphization::MaskType { span, name, ty: m_elem_ty } + InvalidMonomorphization::MaskWrongElementType { span, name, ty: m_elem_ty } ); let m_i1s = vector_mask_to_bitmask(bx, args[0].immediate(), in_elem_bitwidth, m_len); return Ok(bx.select(m_i1s, args[1].immediate(), args[2].immediate())); @@ -1499,7 +1496,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( // Integer vector : let in_elem_bitwidth = require_int_or_uint_ty!( in_elem.kind(), - InvalidMonomorphization::VectorArgument { span, name, in_ty, in_elem } + InvalidMonomorphization::MaskWrongElementType { span, name, ty: in_elem } ); let i1xn = vector_mask_to_bitmask(bx, args[0].immediate(), in_elem_bitwidth, in_len); @@ -1723,14 +1720,9 @@ fn generic_simd_intrinsic<'ll, 'tcx>( } ); - let mask_elem_bitwidth = require_int_ty!( + let mask_elem_bitwidth = require_int_or_uint_ty!( element_ty2.kind(), - InvalidMonomorphization::ThirdArgElementType { - span, - name, - expected_element: element_ty2, - third_arg: arg_tys[2] - } + InvalidMonomorphization::MaskWrongElementType { span, name, ty: element_ty2 } ); // Alignment of T, must be a constant integer value: @@ -1825,14 +1817,9 @@ fn generic_simd_intrinsic<'ll, 'tcx>( } ); - let m_elem_bitwidth = require_int_ty!( + let m_elem_bitwidth = require_int_or_uint_ty!( mask_elem.kind(), - InvalidMonomorphization::ThirdArgElementType { - span, - name, - expected_element: values_elem, - third_arg: mask_ty, - } + InvalidMonomorphization::MaskWrongElementType { span, name, ty: mask_elem } ); let mask = vector_mask_to_bitmask(bx, args[0].immediate(), m_elem_bitwidth, mask_len); @@ -1915,14 +1902,9 @@ fn generic_simd_intrinsic<'ll, 'tcx>( } ); - let m_elem_bitwidth = require_int_ty!( + let m_elem_bitwidth = require_int_or_uint_ty!( mask_elem.kind(), - InvalidMonomorphization::ThirdArgElementType { - span, - name, - expected_element: values_elem, - third_arg: mask_ty, - } + InvalidMonomorphization::MaskWrongElementType { span, name, ty: mask_elem } ); let mask = vector_mask_to_bitmask(bx, args[0].immediate(), m_elem_bitwidth, mask_len); @@ -2010,15 +1992,10 @@ fn generic_simd_intrinsic<'ll, 'tcx>( } ); - // The element type of the third argument must be a signed integer type of any width: - let mask_elem_bitwidth = require_int_ty!( + // The element type of the third argument must be an integer type of any width: + let mask_elem_bitwidth = require_int_or_uint_ty!( element_ty2.kind(), - InvalidMonomorphization::ThirdArgElementType { - span, - name, - expected_element: element_ty2, - third_arg: arg_tys[2] - } + InvalidMonomorphization::MaskWrongElementType { span, name, ty: element_ty2 } ); // Alignment of T, must be a constant integer value: diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs index 425381b0ffab7..e8010ec9fc495 100644 --- a/compiler/rustc_codegen_llvm/src/lib.rs +++ b/compiler/rustc_codegen_llvm/src/lib.rs @@ -6,6 +6,7 @@ // tidy-alphabetical-start #![allow(internal_features)] +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] @@ -15,7 +16,6 @@ #![feature(if_let_guard)] #![feature(impl_trait_in_assoc_type)] #![feature(iter_intersperse)] -#![feature(let_chains)] #![feature(rustdoc_internals)] #![feature(slice_as_array)] #![feature(try_blocks)] @@ -29,7 +29,7 @@ use back::owned_target_machine::OwnedTargetMachine; use back::write::{create_informational_target_machine, create_target_machine}; use context::SimpleCx; use errors::{AutoDiffWithoutLTO, ParseTargetMachineConfig}; -use llvm_util::target_features_cfg; +use llvm_util::target_config; use rustc_ast::expand::allocator::AllocatorKind; use rustc_ast::expand::autodiff_attrs::AutoDiffItem; use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule}; @@ -37,7 +37,7 @@ use rustc_codegen_ssa::back::write::{ CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryConfig, TargetMachineFactoryFn, }; use rustc_codegen_ssa::traits::*; -use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen}; +use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen, TargetConfig}; use rustc_data_structures::fx::FxIndexMap; use rustc_errors::{DiagCtxtHandle, FatalError}; use rustc_metadata::EncodedMetadata; @@ -338,8 +338,8 @@ impl CodegenBackend for LlvmCodegenBackend { llvm_util::print_version(); } - fn target_features_cfg(&self, sess: &Session) -> (Vec, Vec) { - target_features_cfg(sess) + fn target_config(&self, sess: &Session) -> TargetConfig { + target_config(sess) } fn codegen_crate<'tcx>( diff --git a/compiler/rustc_codegen_llvm/src/llvm/enzyme_ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/enzyme_ffi.rs index 79e4cc8aa7744..2ad39fc853819 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/enzyme_ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/enzyme_ffi.rs @@ -4,7 +4,7 @@ use libc::{c_char, c_uint}; use super::MetadataKindId; -use super::ffi::{BasicBlock, Metadata, Module, Type, Value}; +use super::ffi::{AttributeKind, BasicBlock, Metadata, Module, Type, Value}; use crate::llvm::Bool; #[link(name = "llvm-wrapper", kind = "static")] @@ -17,6 +17,21 @@ unsafe extern "C" { pub(crate) fn LLVMRustEraseInstFromParent(V: &Value); pub(crate) fn LLVMRustGetTerminator<'a>(B: &BasicBlock) -> &'a Value; pub(crate) fn LLVMRustVerifyFunction(V: &Value, action: LLVMRustVerifierFailureAction) -> Bool; + pub(crate) fn LLVMRustHasAttributeAtIndex(V: &Value, i: c_uint, Kind: AttributeKind) -> bool; + pub(crate) fn LLVMRustGetArrayNumElements(Ty: &Type) -> u64; + pub(crate) fn LLVMRustHasFnAttribute( + F: &Value, + Name: *const c_char, + NameLen: libc::size_t, + ) -> bool; + pub(crate) fn LLVMRustRemoveFnAttribute(F: &Value, Name: *const c_char, NameLen: libc::size_t); + pub(crate) fn LLVMGetFirstFunction(M: &Module) -> Option<&Value>; + pub(crate) fn LLVMGetNextFunction(Fn: &Value) -> Option<&Value>; + pub(crate) fn LLVMRustRemoveEnumAttributeAtIndex( + Fn: &Value, + index: c_uint, + kind: AttributeKind, + ); } unsafe extern "C" { diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs index 3ce3761944b3a..ffb490dcdc22b 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs @@ -1180,7 +1180,7 @@ unsafe extern "C" { // Operations on parameters pub(crate) fn LLVMIsAArgument(Val: &Value) -> Option<&Value>; - pub(crate) fn LLVMCountParams(Fn: &Value) -> c_uint; + pub(crate) safe fn LLVMCountParams(Fn: &Value) -> c_uint; pub(crate) fn LLVMGetParam(Fn: &Value, Index: c_uint) -> &Value; // Operations on basic blocks @@ -2454,6 +2454,9 @@ unsafe extern "C" { DisableSimplifyLibCalls: bool, EmitLifetimeMarkers: bool, RunEnzyme: bool, + PrintBeforeEnzyme: bool, + PrintAfterEnzyme: bool, + PrintPasses: bool, SanitizerOptions: Option<&SanitizerOptions>, PGOGenPath: *const c_char, PGOUsePath: *const c_char, diff --git a/compiler/rustc_codegen_llvm/src/llvm/mod.rs b/compiler/rustc_codegen_llvm/src/llvm/mod.rs index 6ca81c651ed42..d14aab060731a 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/mod.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/mod.rs @@ -41,6 +41,32 @@ pub(crate) fn AddFunctionAttributes<'ll>( } } +pub(crate) fn HasAttributeAtIndex<'ll>( + llfn: &'ll Value, + idx: AttributePlace, + kind: AttributeKind, +) -> bool { + unsafe { LLVMRustHasAttributeAtIndex(llfn, idx.as_uint(), kind) } +} + +pub(crate) fn HasStringAttribute<'ll>(llfn: &'ll Value, name: &str) -> bool { + unsafe { LLVMRustHasFnAttribute(llfn, name.as_c_char_ptr(), name.len()) } +} + +pub(crate) fn RemoveStringAttrFromFn<'ll>(llfn: &'ll Value, name: &str) { + unsafe { LLVMRustRemoveFnAttribute(llfn, name.as_c_char_ptr(), name.len()) } +} + +pub(crate) fn RemoveRustEnumAttributeAtIndex( + llfn: &Value, + place: AttributePlace, + kind: AttributeKind, +) { + unsafe { + LLVMRustRemoveEnumAttributeAtIndex(llfn, place.as_uint(), kind); + } +} + pub(crate) fn AddCallSiteAttributes<'ll>( callsite: &'ll Value, idx: AttributePlace, diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs index 5bf931965c703..8f57f0983abb9 100644 --- a/compiler/rustc_codegen_llvm/src/llvm_util.rs +++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs @@ -6,6 +6,7 @@ use std::sync::Once; use std::{ptr, slice, str}; use libc::c_int; +use rustc_codegen_ssa::TargetConfig; use rustc_codegen_ssa::base::wants_wasm_eh; use rustc_codegen_ssa::codegen_attrs::check_tied_features; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; @@ -18,6 +19,7 @@ use rustc_session::config::{PrintKind, PrintRequest}; use rustc_span::Symbol; use rustc_target::spec::{MergeFunctions, PanicStrategy, SmallDataThresholdSupport}; use rustc_target::target_features::{RUSTC_SPECIAL_FEATURES, RUSTC_SPECIFIC_FEATURES}; +use smallvec::{SmallVec, smallvec}; use crate::back::write::create_informational_target_machine; use crate::errors::{ @@ -179,27 +181,27 @@ impl<'a> TargetFeatureFoldStrength<'a> { pub(crate) struct LLVMFeature<'a> { llvm_feature_name: &'a str, - dependency: Option>, + dependencies: SmallVec<[TargetFeatureFoldStrength<'a>; 1]>, } impl<'a> LLVMFeature<'a> { fn new(llvm_feature_name: &'a str) -> Self { - Self { llvm_feature_name, dependency: None } + Self { llvm_feature_name, dependencies: SmallVec::new() } } - fn with_dependency( + fn with_dependencies( llvm_feature_name: &'a str, - dependency: TargetFeatureFoldStrength<'a>, + dependencies: SmallVec<[TargetFeatureFoldStrength<'a>; 1]>, ) -> Self { - Self { llvm_feature_name, dependency: Some(dependency) } + Self { llvm_feature_name, dependencies } } - fn contains(&self, feat: &str) -> bool { + fn contains(&'a self, feat: &str) -> bool { self.iter().any(|dep| dep == feat) } fn iter(&'a self) -> impl Iterator { - let dependencies = self.dependency.iter().map(|feat| feat.as_str()); + let dependencies = self.dependencies.iter().map(|feat| feat.as_str()); std::iter::once(self.llvm_feature_name).chain(dependencies) } } @@ -209,7 +211,7 @@ impl<'a> IntoIterator for LLVMFeature<'a> { type IntoIter = impl Iterator; fn into_iter(self) -> Self::IntoIter { - let dependencies = self.dependency.into_iter().map(|feat| feat.as_str()); + let dependencies = self.dependencies.into_iter().map(|feat| feat.as_str()); std::iter::once(self.llvm_feature_name).chain(dependencies) } } @@ -239,9 +241,9 @@ pub(crate) fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> Option Some(LLVMFeature::with_dependency( + ("x86", "sse4.2") => Some(LLVMFeature::with_dependencies( "sse4.2", - TargetFeatureFoldStrength::EnableOnly("crc32"), + smallvec![TargetFeatureFoldStrength::EnableOnly("crc32")], )), ("x86", "pclmulqdq") => Some(LLVMFeature::new("pclmul")), ("x86", "rdrand") => Some(LLVMFeature::new("rdrnd")), @@ -256,50 +258,67 @@ pub(crate) fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> Option Some(LLVMFeature::new("perfmon")), ("aarch64", "paca") => Some(LLVMFeature::new("pauth")), ("aarch64", "pacg") => Some(LLVMFeature::new("pauth")), - ("aarch64", "pauth-lr") if get_version().0 < 19 => None, // Before LLVM 20 those two features were packaged together as b16b16 ("aarch64", "sve-b16b16") if get_version().0 < 20 => Some(LLVMFeature::new("b16b16")), ("aarch64", "sme-b16b16") if get_version().0 < 20 => Some(LLVMFeature::new("b16b16")), ("aarch64", "flagm2") => Some(LLVMFeature::new("altnzcv")), // Rust ties fp and neon together. - ("aarch64", "neon") => { - Some(LLVMFeature::with_dependency("neon", TargetFeatureFoldStrength::Both("fp-armv8"))) - } + ("aarch64", "neon") => Some(LLVMFeature::with_dependencies( + "neon", + smallvec![TargetFeatureFoldStrength::Both("fp-armv8")], + )), // In LLVM neon implicitly enables fp, but we manually enable // neon when a feature only implicitly enables fp ("aarch64", "fhm") => Some(LLVMFeature::new("fp16fml")), ("aarch64", "fp16") => Some(LLVMFeature::new("fullfp16")), // Filter out features that are not supported by the current LLVM version - ("aarch64", "fpmr") if get_version().0 != 18 => None, + ("aarch64", "fpmr") => None, // only existed in 18 ("arm", "fp16") => Some(LLVMFeature::new("fullfp16")), - // In LLVM 18, `unaligned-scalar-mem` was merged with `unaligned-vector-mem` into a single - // feature called `fast-unaligned-access`. In LLVM 19, it was split back out. - ("riscv32" | "riscv64", "unaligned-scalar-mem" | "unaligned-vector-mem") - if get_version().0 == 18 => + // Filter out features that are not supported by the current LLVM version + ("loongarch64", "div32" | "lam-bh" | "lamcas" | "ld-seq-sa" | "scq") + if get_version().0 < 20 => { - Some(LLVMFeature::new("fast-unaligned-access")) + None } // Filter out features that are not supported by the current LLVM version - ("riscv32" | "riscv64", "zaamo") if get_version().0 < 19 => None, - ("riscv32" | "riscv64", "zabha") if get_version().0 < 19 => None, - ("riscv32" | "riscv64", "zalrsc") if get_version().0 < 19 => None, - ("riscv32" | "riscv64", "zama16b") if get_version().0 < 19 => None, ("riscv32" | "riscv64", "zacas") if get_version().0 < 20 => None, // Enable the evex512 target feature if an avx512 target feature is enabled. - ("x86", s) if s.starts_with("avx512") => { - Some(LLVMFeature::with_dependency(s, TargetFeatureFoldStrength::EnableOnly("evex512"))) - } + ("x86", s) if s.starts_with("avx512") => Some(LLVMFeature::with_dependencies( + s, + smallvec![TargetFeatureFoldStrength::EnableOnly("evex512")], + )), // Support for `wide-arithmetic` will first land in LLVM 20 as part of // llvm/llvm-project#111598 ("wasm32" | "wasm64", "wide-arithmetic") if get_version() < (20, 0, 0) => None, ("sparc", "leoncasa") => Some(LLVMFeature::new("hasleoncasa")), // In LLVM 19, there is no `v8plus` feature and `v9` means "SPARC-V9 instruction available and SPARC-V8+ ABI used". // https://github.com/llvm/llvm-project/blob/llvmorg-19.1.0/llvm/lib/Target/Sparc/MCTargetDesc/SparcELFObjectWriter.cpp#L27-L28 - // Before LLVM 19, there is no `v8plus` feature and `v9` means "SPARC-V9 instruction available". + // Before LLVM 19, there was no `v8plus` feature and `v9` means "SPARC-V9 instruction available". // https://github.com/llvm/llvm-project/blob/llvmorg-18.1.0/llvm/lib/Target/Sparc/MCTargetDesc/SparcELFObjectWriter.cpp#L26 ("sparc", "v8plus") if get_version().0 == 19 => Some(LLVMFeature::new("v9")), - ("sparc", "v8plus") if get_version().0 < 19 => None, ("powerpc", "power8-crypto") => Some(LLVMFeature::new("crypto")), + // These new `amx` variants and `movrs` were introduced in LLVM20 + ("x86", "amx-avx512" | "amx-fp8" | "amx-movrs" | "amx-tf32" | "amx-transpose") + if get_version().0 < 20 => + { + None + } + ("x86", "movrs") if get_version().0 < 20 => None, + ("x86", "avx10.1") => Some(LLVMFeature::new("avx10.1-512")), + ("x86", "avx10.2") if get_version().0 < 20 => None, + ("x86", "avx10.2") if get_version().0 >= 20 => Some(LLVMFeature::new("avx10.2-512")), + ("x86", "apxf") => Some(LLVMFeature::with_dependencies( + "egpr", + smallvec![ + TargetFeatureFoldStrength::Both("push2pop2"), + TargetFeatureFoldStrength::Both("ppx"), + TargetFeatureFoldStrength::Both("ndd"), + TargetFeatureFoldStrength::Both("ccmp"), + TargetFeatureFoldStrength::Both("cf"), + TargetFeatureFoldStrength::Both("nf"), + TargetFeatureFoldStrength::Both("zu"), + ], + )), (_, s) => Some(LLVMFeature::new(s)), } } @@ -308,7 +327,7 @@ pub(crate) fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> Option (Vec, Vec) { +pub(crate) fn target_config(sess: &Session) -> TargetConfig { // Add base features for the target. // We do *not* add the -Ctarget-features there, and instead duplicate the logic for that below. // The reason is that if LLVM considers a feature implied but we do not, we don't want that to @@ -408,7 +427,85 @@ pub(crate) fn target_features_cfg(sess: &Session) -> (Vec, Vec) let target_features = f(false); let unstable_target_features = f(true); - (target_features, unstable_target_features) + let mut cfg = TargetConfig { + target_features, + unstable_target_features, + has_reliable_f16: true, + has_reliable_f16_math: true, + has_reliable_f128: true, + has_reliable_f128_math: true, + }; + + update_target_reliable_float_cfg(sess, &mut cfg); + cfg +} + +/// Determine whether or not experimental float types are reliable based on known bugs. +fn update_target_reliable_float_cfg(sess: &Session, cfg: &mut TargetConfig) { + let target_arch = sess.target.arch.as_ref(); + let target_os = sess.target.options.os.as_ref(); + let target_env = sess.target.options.env.as_ref(); + let target_abi = sess.target.options.abi.as_ref(); + let target_pointer_width = sess.target.pointer_width; + + cfg.has_reliable_f16 = match (target_arch, target_os) { + // Selection failure + ("s390x", _) => false, + // Unsupported + ("arm64ec", _) => false, + // MinGW ABI bugs + ("x86_64", "windows") if target_env == "gnu" && target_abi != "llvm" => false, + // Infinite recursion + ("csky", _) => false, + ("hexagon", _) => false, + ("powerpc" | "powerpc64", _) => false, + ("sparc" | "sparc64", _) => false, + ("wasm32" | "wasm64", _) => false, + // `f16` support only requires that symbols converting to and from `f32` are available. We + // provide these in `compiler-builtins`, so `f16` should be available on all platforms that + // do not have other ABI issues or LLVM crashes. + _ => true, + }; + + cfg.has_reliable_f128 = match (target_arch, target_os) { + // Unsupported + ("arm64ec", _) => false, + // Selection bug + ("mips64" | "mips64r6", _) => false, + // Selection bug + ("nvptx64", _) => false, + // ABI bugs et al. (full + // list at ) + ("powerpc" | "powerpc64", _) => false, + // ABI unsupported + ("sparc", _) => false, + // Stack alignment bug . NB: tests may + // not fail if our compiler-builtins is linked. + ("x86", _) => false, + // MinGW ABI bugs + ("x86_64", "windows") if target_env == "gnu" && target_abi != "llvm" => false, + // There are no known problems on other platforms, so the only requirement is that symbols + // are available. `compiler-builtins` provides all symbols required for core `f128` + // support, so this should work for everything else. + _ => true, + }; + + // Assume that working `f16` means working `f16` math for most platforms, since + // operations just go through `f32`. + cfg.has_reliable_f16_math = cfg.has_reliable_f16; + + cfg.has_reliable_f128_math = match (target_arch, target_os) { + // LLVM lowers `fp128` math to `long double` symbols even on platforms where + // `long double` is not IEEE binary128. See + // . + // + // This rules out anything that doesn't have `long double` = `binary128`; <= 32 bits + // (ld is `f64`), anything other than Linux (Windows and MacOS use `f64`), and `x86` + // (ld is 80-bit extended precision). + ("x86_64", _) => false, + (_, "linux") if target_pointer_width == 64 => true, + _ => false, + } && cfg.has_reliable_f128; } pub(crate) fn print_version() { @@ -692,7 +789,7 @@ pub(crate) fn global_llvm_features( ) } else if let Some(feature) = feature.strip_prefix('-') { // FIXME: Why do we not remove implied features on "-" here? - // We do the equivalent above in `target_features_cfg`. + // We do the equivalent above in `target_config`. // See . all_rust_features.push((false, feature)); } else if !feature.is_empty() { @@ -771,7 +868,7 @@ pub(crate) fn global_llvm_features( "{}{}", enable_disable, llvm_feature.llvm_feature_name )) - .chain(llvm_feature.dependency.into_iter().filter_map( + .chain(llvm_feature.dependencies.into_iter().filter_map( move |feat| match (enable, feat) { (_, TargetFeatureFoldStrength::Both(f)) | (true, TargetFeatureFoldStrength::EnableOnly(f)) => { diff --git a/compiler/rustc_codegen_llvm/src/mono_item.rs b/compiler/rustc_codegen_llvm/src/mono_item.rs index a64627eaf5986..fdf62a08065c2 100644 --- a/compiler/rustc_codegen_llvm/src/mono_item.rs +++ b/compiler/rustc_codegen_llvm/src/mono_item.rs @@ -120,7 +120,7 @@ impl CodegenCx<'_, '_> { } // Match clang by only supporting COFF and ELF for now. - if self.tcx.sess.target.is_like_osx { + if self.tcx.sess.target.is_like_darwin { return false; } diff --git a/compiler/rustc_codegen_llvm/src/type_.rs b/compiler/rustc_codegen_llvm/src/type_.rs index b89ce90d1a1dc..169036f515298 100644 --- a/compiler/rustc_codegen_llvm/src/type_.rs +++ b/compiler/rustc_codegen_llvm/src/type_.rs @@ -128,6 +128,10 @@ impl<'ll, CX: Borrow>> GenericCx<'ll, CX> { (**self).borrow().llcx } + pub(crate) fn llmod(&self) -> &'ll llvm::Module { + (**self).borrow().llmod + } + pub(crate) fn isize_ty(&self) -> &'ll Type { (**self).borrow().isize_ty } diff --git a/compiler/rustc_codegen_llvm/src/va_arg.rs b/compiler/rustc_codegen_llvm/src/va_arg.rs index 8baa69cefe1e2..c216f0f4a09d0 100644 --- a/compiler/rustc_codegen_llvm/src/va_arg.rs +++ b/compiler/rustc_codegen_llvm/src/va_arg.rs @@ -399,7 +399,7 @@ pub(super) fn emit_va_arg<'ll, 'tcx>( emit_ptr_va_arg(bx, addr, target_ty, false, Align::from_bytes(8).unwrap(), false) } // macOS / iOS AArch64 - "aarch64" if target.is_like_osx => { + "aarch64" if target.is_like_darwin => { emit_ptr_va_arg(bx, addr, target_ty, false, Align::from_bytes(8).unwrap(), true) } "aarch64" => emit_aapcs_va_arg(bx, addr, target_ty), diff --git a/compiler/rustc_codegen_ssa/messages.ftl b/compiler/rustc_codegen_ssa/messages.ftl index 954a601480935..2621935eecf94 100644 --- a/compiler/rustc_codegen_ssa/messages.ftl +++ b/compiler/rustc_codegen_ssa/messages.ftl @@ -4,12 +4,6 @@ codegen_ssa_add_native_library = failed to add native library {$library_path}: { codegen_ssa_aix_strip_not_used = using host's `strip` binary to cross-compile to AIX which is not guaranteed to work -codegen_ssa_apple_deployment_target_invalid = - failed to parse deployment target specified in {$env_var}: {$error} - -codegen_ssa_apple_deployment_target_too_low = - deployment target in {$env_var} was set to {$version}, but the minimum supported by `rustc` is {$os_min} - codegen_ssa_archive_build_failure = failed to build archive at `{$path}`: {$error} codegen_ssa_atomic_compare_exchange = Atomic compare-exchange intrinsic missing failure memory ordering @@ -131,8 +125,7 @@ codegen_ssa_invalid_monomorphization_inserted_type = invalid monomorphization of codegen_ssa_invalid_monomorphization_invalid_bitmask = invalid monomorphization of `{$name}` intrinsic: invalid bitmask `{$mask_ty}`, expected `u{$expected_int_bits}` or `[u8; {$expected_bytes}]` -codegen_ssa_invalid_monomorphization_mask_type = invalid monomorphization of `{$name}` intrinsic: found mask element type is `{$ty}`, expected a signed integer type - .note = the mask may be widened, which only has the correct behavior for signed integers +codegen_ssa_invalid_monomorphization_mask_wrong_element_type = invalid monomorphization of `{$name}` intrinsic: expected mask element type to be an integer, found `{$ty}` codegen_ssa_invalid_monomorphization_mismatched_lengths = invalid monomorphization of `{$name}` intrinsic: mismatched lengths: mask length `{$m_len}` != other vector length `{$v_len}` @@ -164,8 +157,6 @@ codegen_ssa_invalid_monomorphization_simd_shuffle = invalid monomorphization of codegen_ssa_invalid_monomorphization_simd_third = invalid monomorphization of `{$name}` intrinsic: expected SIMD third type, found non-SIMD `{$ty}` -codegen_ssa_invalid_monomorphization_third_arg_element_type = invalid monomorphization of `{$name}` intrinsic: expected element type `{$expected_element}` of third argument `{$third_arg}` to be a signed integer type - codegen_ssa_invalid_monomorphization_third_argument_length = invalid monomorphization of `{$name}` intrinsic: expected third argument with length {$in_len} (same as input type `{$in_ty}`), found `{$arg_ty}` with length {$out_len} codegen_ssa_invalid_monomorphization_unrecognized_intrinsic = invalid monomorphization of `{$name}` intrinsic: unrecognized intrinsic `{$name}` @@ -178,8 +169,6 @@ codegen_ssa_invalid_monomorphization_unsupported_symbol = invalid monomorphizati codegen_ssa_invalid_monomorphization_unsupported_symbol_of_size = invalid monomorphization of `{$name}` intrinsic: unsupported {$symbol} from `{$in_ty}` with element `{$in_elem}` of size `{$size}` to `{$ret_ty}` -codegen_ssa_invalid_monomorphization_vector_argument = invalid monomorphization of `{$name}` intrinsic: vector argument `{$in_ty}`'s element type `{$in_elem}`, expected integer element type - codegen_ssa_invalid_no_sanitize = invalid argument for `no_sanitize` .note = expected one of: `address`, `cfi`, `hwaddress`, `kcfi`, `memory`, `memtag`, `shadow-call-stack`, or `thread` diff --git a/compiler/rustc_codegen_ssa/src/back/apple.rs b/compiler/rustc_codegen_ssa/src/back/apple.rs index 2c8b0ec418dd8..d242efaf4fd42 100644 --- a/compiler/rustc_codegen_ssa/src/back/apple.rs +++ b/compiler/rustc_codegen_ssa/src/back/apple.rs @@ -1,7 +1,4 @@ -use std::env; use std::ffi::OsString; -use std::fmt::{Display, from_fn}; -use std::num::ParseIntError; use std::path::PathBuf; use std::process::Command; @@ -9,9 +6,10 @@ use itertools::Itertools; use rustc_middle::middle::exported_symbols::SymbolExportKind; use rustc_session::Session; use rustc_target::spec::Target; +pub(super) use rustc_target::spec::apple::OSVersion; use tracing::debug; -use crate::errors::{AppleDeploymentTarget, XcrunError, XcrunSdkPathWarning}; +use crate::errors::{XcrunError, XcrunSdkPathWarning}; use crate::fluent_generated as fluent; #[cfg(test)] @@ -134,124 +132,6 @@ pub(super) fn add_data_and_relocation( Ok(()) } -/// Deployment target or SDK version. -/// -/// The size of the numbers in here are limited by Mach-O's `LC_BUILD_VERSION`. -type OSVersion = (u16, u8, u8); - -/// Parse an OS version triple (SDK version or deployment target). -fn parse_version(version: &str) -> Result { - if let Some((major, minor)) = version.split_once('.') { - let major = major.parse()?; - if let Some((minor, patch)) = minor.split_once('.') { - Ok((major, minor.parse()?, patch.parse()?)) - } else { - Ok((major, minor.parse()?, 0)) - } - } else { - Ok((version.parse()?, 0, 0)) - } -} - -pub fn pretty_version(version: OSVersion) -> impl Display { - let (major, minor, patch) = version; - from_fn(move |f| { - write!(f, "{major}.{minor}")?; - if patch != 0 { - write!(f, ".{patch}")?; - } - Ok(()) - }) -} - -/// Minimum operating system versions currently supported by `rustc`. -fn os_minimum_deployment_target(os: &str) -> OSVersion { - // When bumping a version in here, remember to update the platform-support docs too. - // - // NOTE: The defaults may change in future `rustc` versions, so if you are looking for the - // default deployment target, prefer: - // ``` - // $ rustc --print deployment-target - // ``` - match os { - "macos" => (10, 12, 0), - "ios" => (10, 0, 0), - "tvos" => (10, 0, 0), - "watchos" => (5, 0, 0), - "visionos" => (1, 0, 0), - _ => unreachable!("tried to get deployment target for non-Apple platform"), - } -} - -/// The deployment target for the given target. -/// -/// This is similar to `os_minimum_deployment_target`, except that on certain targets it makes sense -/// to raise the minimum OS version. -/// -/// This matches what LLVM does, see in part: -/// -fn minimum_deployment_target(target: &Target) -> OSVersion { - match (&*target.os, &*target.arch, &*target.abi) { - ("macos", "aarch64", _) => (11, 0, 0), - ("ios", "aarch64", "macabi") => (14, 0, 0), - ("ios", "aarch64", "sim") => (14, 0, 0), - ("ios", _, _) if target.llvm_target.starts_with("arm64e") => (14, 0, 0), - // Mac Catalyst defaults to 13.1 in Clang. - ("ios", _, "macabi") => (13, 1, 0), - ("tvos", "aarch64", "sim") => (14, 0, 0), - ("watchos", "aarch64", "sim") => (7, 0, 0), - (os, _, _) => os_minimum_deployment_target(os), - } -} - -/// Name of the environment variable used to fetch the deployment target on the given OS. -pub fn deployment_target_env_var(os: &str) -> &'static str { - match os { - "macos" => "MACOSX_DEPLOYMENT_TARGET", - "ios" => "IPHONEOS_DEPLOYMENT_TARGET", - "watchos" => "WATCHOS_DEPLOYMENT_TARGET", - "tvos" => "TVOS_DEPLOYMENT_TARGET", - "visionos" => "XROS_DEPLOYMENT_TARGET", - _ => unreachable!("tried to get deployment target env var for non-Apple platform"), - } -} - -/// Get the deployment target based on the standard environment variables, or fall back to the -/// minimum version supported by `rustc`. -pub fn deployment_target(sess: &Session) -> OSVersion { - let min = minimum_deployment_target(&sess.target); - let env_var = deployment_target_env_var(&sess.target.os); - - if let Ok(deployment_target) = env::var(env_var) { - match parse_version(&deployment_target) { - Ok(version) => { - let os_min = os_minimum_deployment_target(&sess.target.os); - // It is common that the deployment target is set a bit too low, for example on - // macOS Aarch64 to also target older x86_64. So we only want to warn when variable - // is lower than the minimum OS supported by rustc, not when the variable is lower - // than the minimum for a specific target. - if version < os_min { - sess.dcx().emit_warn(AppleDeploymentTarget::TooLow { - env_var, - version: pretty_version(version).to_string(), - os_min: pretty_version(os_min).to_string(), - }); - } - - // Raise the deployment target to the minimum supported. - version.max(min) - } - Err(error) => { - sess.dcx().emit_err(AppleDeploymentTarget::Invalid { env_var, error }); - min - } - } - } else { - // If no deployment target variable is set, default to the minimum found above. - min - } -} - pub(super) fn add_version_to_llvm_target( llvm_target: &str, deployment_target: OSVersion, @@ -263,18 +143,17 @@ pub(super) fn add_version_to_llvm_target( let environment = components.next(); assert_eq!(components.next(), None, "too many LLVM triple components"); - let (major, minor, patch) = deployment_target; - assert!( !os.contains(|c: char| c.is_ascii_digit()), "LLVM target must not already be versioned" ); + let version = deployment_target.fmt_full(); if let Some(env) = environment { // Insert version into OS, before environment - format!("{arch}-{vendor}-{os}{major}.{minor}.{patch}-{env}") + format!("{arch}-{vendor}-{os}{version}-{env}") } else { - format!("{arch}-{vendor}-{os}{major}.{minor}.{patch}") + format!("{arch}-{vendor}-{os}{version}") } } diff --git a/compiler/rustc_codegen_ssa/src/back/apple/tests.rs b/compiler/rustc_codegen_ssa/src/back/apple/tests.rs index 8df740a4bcf7d..5afe79b71954a 100644 --- a/compiler/rustc_codegen_ssa/src/back/apple/tests.rs +++ b/compiler/rustc_codegen_ssa/src/back/apple/tests.rs @@ -3,23 +3,15 @@ use super::*; #[test] fn test_add_version_to_llvm_target() { assert_eq!( - add_version_to_llvm_target("aarch64-apple-macosx", (10, 14, 1)), + add_version_to_llvm_target("aarch64-apple-macosx", OSVersion::new(10, 14, 1)), "aarch64-apple-macosx10.14.1" ); assert_eq!( - add_version_to_llvm_target("aarch64-apple-ios-simulator", (16, 1, 0)), + add_version_to_llvm_target("aarch64-apple-ios-simulator", OSVersion::new(16, 1, 0)), "aarch64-apple-ios16.1.0-simulator" ); } -#[test] -fn test_parse_version() { - assert_eq!(parse_version("10"), Ok((10, 0, 0))); - assert_eq!(parse_version("10.12"), Ok((10, 12, 0))); - assert_eq!(parse_version("10.12.6"), Ok((10, 12, 6))); - assert_eq!(parse_version("9999.99.99"), Ok((9999, 99, 99))); -} - #[test] #[cfg_attr(not(target_os = "macos"), ignore = "xcode-select is only available on macOS")] fn lookup_developer_dir() { diff --git a/compiler/rustc_codegen_ssa/src/back/archive.rs b/compiler/rustc_codegen_ssa/src/back/archive.rs index 34c84c64070d2..1e1bdfb5977af 100644 --- a/compiler/rustc_codegen_ssa/src/back/archive.rs +++ b/compiler/rustc_codegen_ssa/src/back/archive.rs @@ -13,9 +13,9 @@ use object::read::archive::ArchiveFile; use object::read::macho::FatArch; use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::memmap::Mmap; +use rustc_fs_util::TempDirBuilder; use rustc_session::Session; use rustc_span::Symbol; -use tempfile::Builder as TempFileBuilder; use tracing::trace; use super::metadata::search_for_section; @@ -501,7 +501,7 @@ impl<'a> ArArchiveBuilder<'a> { // it creates. We need it to be the default mode for back compat reasons however. (See // #107495) To handle this we are telling tempfile to create a temporary directory instead // and then inside this directory create a file using File::create. - let archive_tmpdir = TempFileBuilder::new() + let archive_tmpdir = TempDirBuilder::new() .suffix(".temp-archive") .tempdir_in(output.parent().unwrap_or_else(|| Path::new(""))) .map_err(|err| { diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs index b59d73a9aae09..159c17b0af757 100644 --- a/compiler/rustc_codegen_ssa/src/back/link.rs +++ b/compiler/rustc_codegen_ssa/src/back/link.rs @@ -18,7 +18,7 @@ use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::memmap::Mmap; use rustc_data_structures::temp_dir::MaybeTempDir; use rustc_errors::{DiagCtxtHandle, LintDiagnostic}; -use rustc_fs_util::{fix_windows_verbatim_for_gcc, try_canonicalize}; +use rustc_fs_util::{TempDirBuilder, fix_windows_verbatim_for_gcc, try_canonicalize}; use rustc_hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc_macros::LintDiagnostic; use rustc_metadata::fs::{METADATA_FILENAME, copy_to_stdout, emit_wrapper_file}; @@ -48,7 +48,6 @@ use rustc_target::spec::{ LinkerFeatures, LinkerFlavor, LinkerFlavorCli, Lld, PanicStrategy, RelocModel, RelroLevel, SanitizerSet, SplitDebuginfo, }; -use tempfile::Builder as TempFileBuilder; use tracing::{debug, info, warn}; use super::archive::{ArchiveBuilder, ArchiveBuilderBuilder}; @@ -100,7 +99,7 @@ pub fn link_binary( }); if outputs.outputs.should_link() { - let tmpdir = TempFileBuilder::new() + let tmpdir = TempDirBuilder::new() .prefix("rustc") .tempdir() .unwrap_or_else(|error| sess.dcx().emit_fatal(errors::CreateTempDir { error })); @@ -112,8 +111,12 @@ pub fn link_binary( codegen_results.crate_info.local_crate_name, ); let crate_name = format!("{}", codegen_results.crate_info.local_crate_name); - let out_filename = - output.file_for_writing(outputs, OutputType::Exe, Some(crate_name.as_str())); + let out_filename = output.file_for_writing( + outputs, + OutputType::Exe, + &crate_name, + sess.invocation_temp.as_deref(), + ); match crate_type { CrateType::Rlib => { let _timer = sess.timer("link_rlib"); @@ -294,7 +297,7 @@ fn link_rlib<'a>( let (metadata, metadata_position) = create_wrapper_file( sess, ".rmeta".to_string(), - codegen_results.metadata.raw_data(), + codegen_results.metadata.stub_or_full(), ); let metadata = emit_wrapper_file(sess, &metadata, tmpdir, METADATA_FILENAME); match metadata_position { @@ -959,9 +962,9 @@ fn link_natively( } } - let (level, src) = codegen_results.crate_info.lint_levels.linker_messages; + let level = codegen_results.crate_info.lint_levels.linker_messages; let lint = |msg| { - lint_level(sess, LINKER_MESSAGES, level, src, None, |diag| { + lint_level(sess, LINKER_MESSAGES, level, None, |diag| { LinkerOutput { inner: msg }.decorate_lint(diag) }) }; @@ -1012,7 +1015,7 @@ fn link_natively( // On macOS the external `dsymutil` tool is used to create the packed // debug information. Note that this will read debug information from // the objects on the filesystem which we'll clean up later. - SplitDebuginfo::Packed if sess.target.is_like_osx => { + SplitDebuginfo::Packed if sess.target.is_like_darwin => { let prog = Command::new("dsymutil").arg(out_filename).output(); match prog { Ok(prog) => { @@ -1043,16 +1046,17 @@ fn link_natively( let strip = sess.opts.cg.strip; - if sess.target.is_like_osx { + if sess.target.is_like_darwin { let stripcmd = "rust-objcopy"; match (strip, crate_type) { (Strip::Debuginfo, _) => { strip_with_external_utility(sess, stripcmd, out_filename, &["--strip-debug"]) } // Per the manpage, `-x` is the maximum safe strip level for dynamic libraries. (#93988) - (Strip::Symbols, CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro) => { - strip_with_external_utility(sess, stripcmd, out_filename, &["-x"]) - } + ( + Strip::Symbols, + CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro | CrateType::Sdylib, + ) => strip_with_external_utility(sess, stripcmd, out_filename, &["-x"]), (Strip::Symbols, _) => { strip_with_external_utility(sess, stripcmd, out_filename, &["--strip-all"]) } @@ -1240,8 +1244,10 @@ fn add_sanitizer_libraries( // which should be linked to both executables and dynamic libraries. // Everywhere else the runtimes are currently distributed as static // libraries which should be linked to executables only. - if matches!(crate_type, CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro) - && !(sess.target.is_like_osx || sess.target.is_like_msvc) + if matches!( + crate_type, + CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro | CrateType::Sdylib + ) && !(sess.target.is_like_darwin || sess.target.is_like_msvc) { return; } @@ -1294,7 +1300,7 @@ fn link_sanitizer_runtime( let channel = option_env!("CFG_RELEASE_CHANNEL").map(|channel| format!("-{channel}")).unwrap_or_default(); - if sess.target.is_like_osx { + if sess.target.is_like_darwin { // On Apple platforms, the sanitizer is always built as a dylib, and // LLVM will link to `@rpath/*.dylib`, so we need to specify an // rpath to the library as well (the rpath should be absolute, see @@ -1935,6 +1941,7 @@ fn add_late_link_args( codegen_results: &CodegenResults, ) { let any_dynamic_crate = crate_type == CrateType::Dylib + || crate_type == CrateType::Sdylib || codegen_results.crate_info.dependency_formats.iter().any(|(ty, list)| { *ty == crate_type && list.iter().any(|&linkage| linkage == Linkage::Dynamic) }); @@ -2011,6 +2018,12 @@ fn add_linked_symbol_object( file.set_mangling(object::write::Mangling::None); } + if file.format() == object::BinaryFormat::MachO { + // Divide up the sections into sub-sections via symbols for dead code stripping. + // Without this flag, unused `#[no_mangle]` or `#[used]` cannot be discard on MachO targets. + file.set_subsections_via_symbols(); + } + // ld64 requires a relocation to load undefined symbols, see below. // Not strictly needed if linking with lld, but might as well do it there too. let ld64_section_helper = if file.format() == object::BinaryFormat::MachO { @@ -2182,7 +2195,7 @@ fn add_rpath_args( let rpath_config = RPathConfig { libs: &*libs, out_filename: out_filename.to_path_buf(), - is_like_osx: sess.target.is_like_osx, + is_like_darwin: sess.target.is_like_darwin, linker_is_gnu: sess.target.linker_flavor.is_gnu(), }; cmd.link_args(&rpath::get_rpath_linker_args(&rpath_config)); @@ -3044,7 +3057,7 @@ pub(crate) fn are_upstream_rust_objects_already_included(sess: &Session) -> bool /// - The deployment target. /// - The SDK version. fn add_apple_link_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) { - if !sess.target.is_like_osx { + if !sess.target.is_like_darwin { return; } let LinkerFlavor::Darwin(cc, _) = flavor else { @@ -3115,8 +3128,7 @@ fn add_apple_link_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavo _ => bug!("invalid OS/ABI combination for Apple target: {target_os}, {target_abi}"), }; - let (major, minor, patch) = apple::deployment_target(sess); - let min_version = format!("{major}.{minor}.{patch}"); + let min_version = sess.apple_deployment_target().fmt_full().to_string(); // The SDK version is used at runtime when compiling with a newer SDK / version of Xcode: // - By dyld to give extra warnings and errors, see e.g.: @@ -3185,10 +3197,10 @@ fn add_apple_link_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavo // The presence of `-mmacosx-version-min` makes CC default to // macOS, and it sets the deployment target. - let (major, minor, patch) = apple::deployment_target(sess); + let version = sess.apple_deployment_target().fmt_full(); // Intentionally pass this as a single argument, Clang doesn't // seem to like it otherwise. - cmd.cc_arg(&format!("-mmacosx-version-min={major}.{minor}.{patch}")); + cmd.cc_arg(&format!("-mmacosx-version-min={version}")); // macOS has no environment, so with these two, we've told CC the // four desired parameters. diff --git a/compiler/rustc_codegen_ssa/src/back/linker.rs b/compiler/rustc_codegen_ssa/src/back/linker.rs index bcf18cf57be23..80ee8ea22288a 100644 --- a/compiler/rustc_codegen_ssa/src/back/linker.rs +++ b/compiler/rustc_codegen_ssa/src/back/linker.rs @@ -337,7 +337,12 @@ pub(crate) trait Linker { fn debuginfo(&mut self, strip: Strip, natvis_debugger_visualizers: &[PathBuf]); fn no_crt_objects(&mut self); fn no_default_libraries(&mut self); - fn export_symbols(&mut self, tmpdir: &Path, crate_type: CrateType, symbols: &[String]); + fn export_symbols( + &mut self, + tmpdir: &Path, + crate_type: CrateType, + symbols: &[(String, SymbolExportKind)], + ); fn subsystem(&mut self, subsystem: &str); fn linker_plugin_lto(&mut self); fn add_eh_frame_header(&mut self) {} @@ -373,7 +378,7 @@ impl<'a> GccLinker<'a> { // * On OSX they have their own linker, not binutils' // * For WebAssembly the only functional linker is LLD, which doesn't // support hint flags - !self.sess.target.is_like_osx && !self.sess.target.is_like_wasm + !self.sess.target.is_like_darwin && !self.sess.target.is_like_wasm } // Some platforms take hints about whether a library is static or dynamic. @@ -425,7 +430,7 @@ impl<'a> GccLinker<'a> { fn build_dylib(&mut self, crate_type: CrateType, out_filename: &Path) { // On mac we need to tell the linker to let this library be rpathed - if self.sess.target.is_like_osx { + if self.sess.target.is_like_darwin { if self.is_cc() { // `-dynamiclib` makes `cc` pass `-dylib` to the linker. self.cc_arg("-dynamiclib"); @@ -471,7 +476,7 @@ impl<'a> GccLinker<'a> { fn with_as_needed(&mut self, as_needed: bool, f: impl FnOnce(&mut Self)) { if !as_needed { - if self.sess.target.is_like_osx { + if self.sess.target.is_like_darwin { // FIXME(81490): ld64 doesn't support these flags but macOS 11 // has -needed-l{} / -needed_library {} // but we have no way to detect that here. @@ -486,7 +491,7 @@ impl<'a> GccLinker<'a> { f(self); if !as_needed { - if self.sess.target.is_like_osx { + if self.sess.target.is_like_darwin { // See above FIXME comment } else if self.is_gnu && !self.sess.target.is_like_windows { self.link_arg("--as-needed"); @@ -619,7 +624,7 @@ impl<'a> Linker for GccLinker<'a> { let colon = if verbatim && self.is_gnu { ":" } else { "" }; if !whole_archive { self.link_or_cc_arg(format!("-l{colon}{name}")); - } else if self.sess.target.is_like_osx { + } else if self.sess.target.is_like_darwin { // -force_load is the macOS equivalent of --whole-archive, but it // involves passing the full path to the library to link. self.link_arg("-force_load"); @@ -635,7 +640,7 @@ impl<'a> Linker for GccLinker<'a> { self.hint_static(); if !whole_archive { self.link_or_cc_arg(path); - } else if self.sess.target.is_like_osx { + } else if self.sess.target.is_like_darwin { self.link_arg("-force_load").link_arg(path); } else { self.link_arg("--whole-archive").link_arg(path).link_arg("--no-whole-archive"); @@ -670,7 +675,7 @@ impl<'a> Linker for GccLinker<'a> { // -dead_strip can't be part of the pre_link_args because it's also used // for partial linking when using multiple codegen units (-r). So we // insert it here. - if self.sess.target.is_like_osx { + if self.sess.target.is_like_darwin { self.link_arg("-dead_strip"); // If we're building a dylib, we don't use --gc-sections because LLVM @@ -728,7 +733,7 @@ impl<'a> Linker for GccLinker<'a> { fn debuginfo(&mut self, strip: Strip, _: &[PathBuf]) { // MacOS linker doesn't support stripping symbols directly anymore. - if self.sess.target.is_like_osx { + if self.sess.target.is_like_darwin { return; } @@ -770,7 +775,12 @@ impl<'a> Linker for GccLinker<'a> { } } - fn export_symbols(&mut self, tmpdir: &Path, crate_type: CrateType, symbols: &[String]) { + fn export_symbols( + &mut self, + tmpdir: &Path, + crate_type: CrateType, + symbols: &[(String, SymbolExportKind)], + ) { // Symbol visibility in object files typically takes care of this. if crate_type == CrateType::Executable { let should_export_executable_symbols = @@ -795,11 +805,11 @@ impl<'a> Linker for GccLinker<'a> { debug!("EXPORTED SYMBOLS:"); - if self.sess.target.is_like_osx { + if self.sess.target.is_like_darwin { // Write a plain, newline-separated list of symbols let res: io::Result<()> = try { let mut f = File::create_buffered(&path)?; - for sym in symbols { + for (sym, _) in symbols { debug!(" _{sym}"); writeln!(f, "_{sym}")?; } @@ -814,9 +824,12 @@ impl<'a> Linker for GccLinker<'a> { // .def file similar to MSVC one but without LIBRARY section // because LD doesn't like when it's empty writeln!(f, "EXPORTS")?; - for symbol in symbols { + for (symbol, kind) in symbols { + let kind_marker = if *kind == SymbolExportKind::Data { " DATA" } else { "" }; debug!(" _{symbol}"); - writeln!(f, " {symbol}")?; + // Quote the name in case it's reserved by linker in some way + // (this accounts for names with dots in particular). + writeln!(f, " \"{symbol}\"{kind_marker}")?; } }; if let Err(error) = res { @@ -829,7 +842,7 @@ impl<'a> Linker for GccLinker<'a> { writeln!(f, "{{")?; if !symbols.is_empty() { writeln!(f, " global:")?; - for sym in symbols { + for (sym, _) in symbols { debug!(" {sym};"); writeln!(f, " {sym};")?; } @@ -841,7 +854,7 @@ impl<'a> Linker for GccLinker<'a> { } } - if self.sess.target.is_like_osx { + if self.sess.target.is_like_darwin { self.link_arg("-exported_symbols_list").link_arg(path); } else if self.sess.target.is_like_solaris { self.link_arg("-M").link_arg(path); @@ -1096,7 +1109,12 @@ impl<'a> Linker for MsvcLinker<'a> { // crates. Upstream rlibs may be linked statically to this dynamic library, // in which case they may continue to transitively be used and hence need // their symbols exported. - fn export_symbols(&mut self, tmpdir: &Path, crate_type: CrateType, symbols: &[String]) { + fn export_symbols( + &mut self, + tmpdir: &Path, + crate_type: CrateType, + symbols: &[(String, SymbolExportKind)], + ) { // Symbol visibility takes care of this typically if crate_type == CrateType::Executable { let should_export_executable_symbols = @@ -1114,9 +1132,10 @@ impl<'a> Linker for MsvcLinker<'a> { // straight to exports. writeln!(f, "LIBRARY")?; writeln!(f, "EXPORTS")?; - for symbol in symbols { + for (symbol, kind) in symbols { + let kind_marker = if *kind == SymbolExportKind::Data { " DATA" } else { "" }; debug!(" _{symbol}"); - writeln!(f, " {symbol}")?; + writeln!(f, " {symbol}{kind_marker}")?; } }; if let Err(error) = res { @@ -1257,14 +1276,19 @@ impl<'a> Linker for EmLinker<'a> { self.cc_arg("-nodefaultlibs"); } - fn export_symbols(&mut self, _tmpdir: &Path, _crate_type: CrateType, symbols: &[String]) { + fn export_symbols( + &mut self, + _tmpdir: &Path, + _crate_type: CrateType, + symbols: &[(String, SymbolExportKind)], + ) { debug!("EXPORTED SYMBOLS:"); self.cc_arg("-s"); let mut arg = OsString::from("EXPORTED_FUNCTIONS="); let encoded = serde_json::to_string( - &symbols.iter().map(|sym| "_".to_owned() + sym).collect::>(), + &symbols.iter().map(|(sym, _)| "_".to_owned() + sym).collect::>(), ) .unwrap(); debug!("{encoded}"); @@ -1426,8 +1450,13 @@ impl<'a> Linker for WasmLd<'a> { fn no_default_libraries(&mut self) {} - fn export_symbols(&mut self, _tmpdir: &Path, _crate_type: CrateType, symbols: &[String]) { - for sym in symbols { + fn export_symbols( + &mut self, + _tmpdir: &Path, + _crate_type: CrateType, + symbols: &[(String, SymbolExportKind)], + ) { + for (sym, _) in symbols { self.link_args(&["--export", sym]); } @@ -1561,7 +1590,7 @@ impl<'a> Linker for L4Bender<'a> { self.cc_arg("-nostdlib"); } - fn export_symbols(&mut self, _: &Path, _: CrateType, _: &[String]) { + fn export_symbols(&mut self, _: &Path, _: CrateType, _: &[(String, SymbolExportKind)]) { // ToDo, not implemented, copy from GCC self.sess.dcx().emit_warn(errors::L4BenderExportingSymbolsUnimplemented); } @@ -1718,12 +1747,17 @@ impl<'a> Linker for AixLinker<'a> { fn no_default_libraries(&mut self) {} - fn export_symbols(&mut self, tmpdir: &Path, _crate_type: CrateType, symbols: &[String]) { + fn export_symbols( + &mut self, + tmpdir: &Path, + _crate_type: CrateType, + symbols: &[(String, SymbolExportKind)], + ) { let path = tmpdir.join("list.exp"); let res: io::Result<()> = try { let mut f = File::create_buffered(&path)?; // FIXME: use llvm-nm to generate export list. - for symbol in symbols { + for (symbol, _) in symbols { debug!(" _{symbol}"); writeln!(f, " {symbol}")?; } @@ -1767,9 +1801,12 @@ fn for_each_exported_symbols_include_dep<'tcx>( } } -pub(crate) fn exported_symbols(tcx: TyCtxt<'_>, crate_type: CrateType) -> Vec { +pub(crate) fn exported_symbols( + tcx: TyCtxt<'_>, + crate_type: CrateType, +) -> Vec<(String, SymbolExportKind)> { if let Some(ref exports) = tcx.sess.target.override_export_symbols { - return exports.iter().map(ToString::to_string).collect(); + return exports.iter().map(|name| (name.to_string(), SymbolExportKind::Text)).collect(); } if let CrateType::ProcMacro = crate_type { @@ -1779,7 +1816,10 @@ pub(crate) fn exported_symbols(tcx: TyCtxt<'_>, crate_type: CrateType) -> Vec, crate_type: CrateType) -> Vec { +fn exported_symbols_for_non_proc_macro( + tcx: TyCtxt<'_>, + crate_type: CrateType, +) -> Vec<(String, SymbolExportKind)> { let mut symbols = Vec::new(); let export_threshold = symbol_export::crates_export_threshold(&[crate_type]); for_each_exported_symbols_include_dep(tcx, crate_type, |symbol, info, cnum| { @@ -1787,17 +1827,18 @@ fn exported_symbols_for_non_proc_macro(tcx: TyCtxt<'_>, crate_type: CrateType) - // from any cdylib. The latter doesn't work anyway as we use hidden visibility for // compiler-builtins. Most linkers silently ignore it, but ld64 gives a warning. if info.level.is_below_threshold(export_threshold) && !tcx.is_compiler_builtins(cnum) { - symbols.push(symbol_export::exporting_symbol_name_for_instance_in_crate( - tcx, symbol, cnum, + symbols.push(( + symbol_export::exporting_symbol_name_for_instance_in_crate(tcx, symbol, cnum), + info.kind, )); - symbol_export::extend_exported_symbols(&mut symbols, tcx, symbol, cnum); + symbol_export::extend_exported_symbols(&mut symbols, tcx, symbol, info, cnum); } }); symbols } -fn exported_symbols_for_proc_macro_crate(tcx: TyCtxt<'_>) -> Vec { +fn exported_symbols_for_proc_macro_crate(tcx: TyCtxt<'_>) -> Vec<(String, SymbolExportKind)> { // `exported_symbols` will be empty when !should_codegen. if !tcx.sess.opts.output_types.should_codegen() { return Vec::new(); @@ -1807,7 +1848,10 @@ fn exported_symbols_for_proc_macro_crate(tcx: TyCtxt<'_>) -> Vec { let proc_macro_decls_name = tcx.sess.generate_proc_macro_decls_symbol(stable_crate_id); let metadata_symbol_name = exported_symbols::metadata_symbol_name(tcx); - vec![proc_macro_decls_name, metadata_symbol_name] + vec![ + (proc_macro_decls_name, SymbolExportKind::Text), + (metadata_symbol_name, SymbolExportKind::Text), + ] } pub(crate) fn linked_symbols( @@ -1815,7 +1859,7 @@ pub(crate) fn linked_symbols( crate_type: CrateType, ) -> Vec<(String, SymbolExportKind)> { match crate_type { - CrateType::Executable | CrateType::Cdylib | CrateType::Dylib => (), + CrateType::Executable | CrateType::Cdylib | CrateType::Dylib | CrateType::Sdylib => (), CrateType::Staticlib | CrateType::ProcMacro | CrateType::Rlib => { return Vec::new(); } @@ -1829,7 +1873,9 @@ pub(crate) fn linked_symbols( || info.used { symbols.push(( - symbol_export::linking_symbol_name_for_instance_in_crate(tcx, symbol, cnum), + symbol_export::linking_symbol_name_for_instance_in_crate( + tcx, symbol, info.kind, cnum, + ), info.kind, )); } @@ -1904,7 +1950,13 @@ impl<'a> Linker for PtxLinker<'a> { fn ehcont_guard(&mut self) {} - fn export_symbols(&mut self, _tmpdir: &Path, _crate_type: CrateType, _symbols: &[String]) {} + fn export_symbols( + &mut self, + _tmpdir: &Path, + _crate_type: CrateType, + _symbols: &[(String, SymbolExportKind)], + ) { + } fn subsystem(&mut self, _subsystem: &str) {} @@ -1973,10 +2025,15 @@ impl<'a> Linker for LlbcLinker<'a> { fn ehcont_guard(&mut self) {} - fn export_symbols(&mut self, _tmpdir: &Path, _crate_type: CrateType, symbols: &[String]) { + fn export_symbols( + &mut self, + _tmpdir: &Path, + _crate_type: CrateType, + symbols: &[(String, SymbolExportKind)], + ) { match _crate_type { CrateType::Cdylib => { - for sym in symbols { + for (sym, _) in symbols { self.link_args(&["--export-symbol", sym]); } } @@ -2050,11 +2107,16 @@ impl<'a> Linker for BpfLinker<'a> { fn ehcont_guard(&mut self) {} - fn export_symbols(&mut self, tmpdir: &Path, _crate_type: CrateType, symbols: &[String]) { + fn export_symbols( + &mut self, + tmpdir: &Path, + _crate_type: CrateType, + symbols: &[(String, SymbolExportKind)], + ) { let path = tmpdir.join("symbols"); let res: io::Result<()> = try { let mut f = File::create_buffered(&path)?; - for sym in symbols { + for (sym, _) in symbols { writeln!(f, "{sym}")?; } }; diff --git a/compiler/rustc_codegen_ssa/src/back/metadata.rs b/compiler/rustc_codegen_ssa/src/back/metadata.rs index 68b453ff42425..ec46c71b0e401 100644 --- a/compiler/rustc_codegen_ssa/src/back/metadata.rs +++ b/compiler/rustc_codegen_ssa/src/back/metadata.rs @@ -214,7 +214,7 @@ pub(crate) fn create_object_file(sess: &Session) -> Option u8 { pub(super) fn elf_e_flags(architecture: Architecture, sess: &Session) -> u32 { match architecture { - Architecture::Mips => { - let arch = match sess.target.options.cpu.as_ref() { - "mips1" => elf::EF_MIPS_ARCH_1, - "mips2" => elf::EF_MIPS_ARCH_2, + Architecture::Mips | Architecture::Mips64 | Architecture::Mips64_N32 => { + // "N32" indicates an "ILP32" data model on a 64-bit MIPS CPU + // like SPARC's "v8+", x86_64's "x32", or the watchOS "arm64_32". + let is_32bit = architecture == Architecture::Mips; + let mut e_flags = match sess.target.options.cpu.as_ref() { + "mips1" if is_32bit => elf::EF_MIPS_ARCH_1, + "mips2" if is_32bit => elf::EF_MIPS_ARCH_2, "mips3" => elf::EF_MIPS_ARCH_3, "mips4" => elf::EF_MIPS_ARCH_4, "mips5" => elf::EF_MIPS_ARCH_5, - s if s.contains("r6") => elf::EF_MIPS_ARCH_32R6, - _ => elf::EF_MIPS_ARCH_32R2, + "mips32r2" if is_32bit => elf::EF_MIPS_ARCH_32R2, + "mips32r6" if is_32bit => elf::EF_MIPS_ARCH_32R6, + "mips64r2" if !is_32bit => elf::EF_MIPS_ARCH_64R2, + "mips64r6" if !is_32bit => elf::EF_MIPS_ARCH_64R6, + s if s.starts_with("mips32") && !is_32bit => { + sess.dcx().fatal(format!("invalid CPU `{}` for 64-bit MIPS target", s)) + } + s if s.starts_with("mips64") && is_32bit => { + sess.dcx().fatal(format!("invalid CPU `{}` for 32-bit MIPS target", s)) + } + _ if is_32bit => elf::EF_MIPS_ARCH_32R2, + _ => elf::EF_MIPS_ARCH_64R2, }; - let mut e_flags = elf::EF_MIPS_CPIC | arch; - - // If the ABI is explicitly given, use it or default to O32. - match sess.target.options.llvm_abiname.to_lowercase().as_str() { - "n32" => e_flags |= elf::EF_MIPS_ABI2, - "o32" => e_flags |= elf::EF_MIPS_ABI_O32, - _ => e_flags |= elf::EF_MIPS_ABI_O32, + // If the ABI is explicitly given, use it, or default to O32 on 32-bit MIPS, + // which is the only "true" 32-bit option that LLVM supports. + match sess.target.options.llvm_abiname.as_ref() { + "o32" if is_32bit => e_flags |= elf::EF_MIPS_ABI_O32, + "n32" if !is_32bit => e_flags |= elf::EF_MIPS_ABI2, + "n64" if !is_32bit => {} + "" if is_32bit => e_flags |= elf::EF_MIPS_ABI_O32, + "" => sess.dcx().fatal("LLVM ABI must be specifed for 64-bit MIPS targets"), + s if is_32bit => { + sess.dcx().fatal(format!("invalid LLVM ABI `{}` for 32-bit MIPS target", s)) + } + s => sess.dcx().fatal(format!("invalid LLVM ABI `{}` for 64-bit MIPS target", s)), }; if sess.target.options.relocation_model != RelocModel::Static { - e_flags |= elf::EF_MIPS_PIC; + // PIC means position-independent code. CPIC means "calls PIC". + // CPIC was mutually exclusive with PIC according to + // the SVR4 MIPS ABI https://refspecs.linuxfoundation.org/elf/mipsabi.pdf + // and should have only appeared on static objects with dynamically calls. + // At some point someone (GCC?) decided to set CPIC even for PIC. + // Nowadays various things expect both set on the same object file + // and may even error if you mix CPIC and non-CPIC object files, + // despite that being the entire point of the CPIC ABI extension! + // As we are in Rome, we do as the Romans do. + e_flags |= elf::EF_MIPS_PIC | elf::EF_MIPS_CPIC; } if sess.target.options.cpu.contains("r6") { e_flags |= elf::EF_MIPS_NAN2008; } e_flags } - Architecture::Mips64 => { - // copied from `mips64el-linux-gnuabi64-gcc foo.c -c` - let e_flags = elf::EF_MIPS_CPIC - | elf::EF_MIPS_PIC - | if sess.target.options.cpu.contains("r6") { - elf::EF_MIPS_ARCH_64R6 | elf::EF_MIPS_NAN2008 - } else { - elf::EF_MIPS_ARCH_64R2 - }; - e_flags - } Architecture::Riscv32 | Architecture::Riscv64 => { // Source: https://github.com/riscv-non-isa/riscv-elf-psabi-doc/blob/079772828bd10933d34121117a222b4cc0ee2200/riscv-elf.adoc let mut e_flags: u32 = 0x0; @@ -388,13 +404,13 @@ pub(super) fn elf_e_flags(architecture: Architecture, sess: &Session) -> u32 { fn macho_object_build_version_for_target(sess: &Session) -> object::write::MachOBuildVersion { /// The `object` crate demands "X.Y.Z encoded in nibbles as xxxx.yy.zz" /// e.g. minOS 14.0 = 0x000E0000, or SDK 16.2 = 0x00100200 - fn pack_version((major, minor, patch): (u16, u8, u8)) -> u32 { + fn pack_version(apple::OSVersion { major, minor, patch }: apple::OSVersion) -> u32 { let (major, minor, patch) = (major as u32, minor as u32, patch as u32); (major << 16) | (minor << 8) | patch } let platform = apple::macho_platform(&sess.target); - let min_os = apple::deployment_target(sess); + let min_os = sess.apple_deployment_target(); let mut build_version = object::write::MachOBuildVersion::default(); build_version.platform = platform; @@ -540,8 +556,8 @@ pub fn create_compressed_metadata_file( symbol_name: &str, ) -> Vec { let mut packed_metadata = rustc_metadata::METADATA_HEADER.to_vec(); - packed_metadata.write_all(&(metadata.raw_data().len() as u64).to_le_bytes()).unwrap(); - packed_metadata.extend(metadata.raw_data()); + packed_metadata.write_all(&(metadata.stub_or_full().len() as u64).to_le_bytes()).unwrap(); + packed_metadata.extend(metadata.stub_or_full()); let Some(mut file) = create_object_file(sess) else { if sess.target.is_like_wasm { diff --git a/compiler/rustc_codegen_ssa/src/back/mod.rs b/compiler/rustc_codegen_ssa/src/back/mod.rs index 64b5d4569ecce..8d1adb9993038 100644 --- a/compiler/rustc_codegen_ssa/src/back/mod.rs +++ b/compiler/rustc_codegen_ssa/src/back/mod.rs @@ -19,8 +19,8 @@ pub mod write; /// /// Certain optimizations also depend on the deployment target. pub fn versioned_llvm_target(sess: &Session) -> Cow<'_, str> { - if sess.target.is_like_osx { - apple::add_version_to_llvm_target(&sess.target.llvm_target, apple::deployment_target(sess)) + if sess.target.is_like_darwin { + apple::add_version_to_llvm_target(&sess.target.llvm_target, sess.apple_deployment_target()) .into() } else { // FIXME(madsmtm): Certain other targets also include a version, diff --git a/compiler/rustc_codegen_ssa/src/back/rpath.rs b/compiler/rustc_codegen_ssa/src/back/rpath.rs index d633cc98ac87e..7bb8979e8820f 100644 --- a/compiler/rustc_codegen_ssa/src/back/rpath.rs +++ b/compiler/rustc_codegen_ssa/src/back/rpath.rs @@ -9,7 +9,7 @@ use tracing::debug; pub(super) struct RPathConfig<'a> { pub libs: &'a [&'a Path], pub out_filename: PathBuf, - pub is_like_osx: bool, + pub is_like_darwin: bool, pub linker_is_gnu: bool, } @@ -63,7 +63,7 @@ fn get_rpaths_relative_to_output(config: &RPathConfig<'_>) -> Vec { fn get_rpath_relative_to_output(config: &RPathConfig<'_>, lib: &Path) -> OsString { // Mac doesn't appear to support $ORIGIN - let prefix = if config.is_like_osx { "@loader_path" } else { "$ORIGIN" }; + let prefix = if config.is_like_darwin { "@loader_path" } else { "$ORIGIN" }; // Strip filenames let lib = lib.parent().unwrap(); diff --git a/compiler/rustc_codegen_ssa/src/back/rpath/tests.rs b/compiler/rustc_codegen_ssa/src/back/rpath/tests.rs index f1a30105c5908..ab8fbedb81297 100644 --- a/compiler/rustc_codegen_ssa/src/back/rpath/tests.rs +++ b/compiler/rustc_codegen_ssa/src/back/rpath/tests.rs @@ -28,7 +28,7 @@ fn test_rpath_relative() { if cfg!(target_os = "macos") { let config = &mut RPathConfig { libs: &[], - is_like_osx: true, + is_like_darwin: true, linker_is_gnu: false, out_filename: PathBuf::from("bin/rustc"), }; @@ -38,7 +38,7 @@ fn test_rpath_relative() { let config = &mut RPathConfig { libs: &[], out_filename: PathBuf::from("bin/rustc"), - is_like_osx: false, + is_like_darwin: false, linker_is_gnu: true, }; let res = get_rpath_relative_to_output(config, Path::new("lib/libstd.so")); @@ -51,7 +51,7 @@ fn test_rpath_relative_issue_119571() { let config = &mut RPathConfig { libs: &[], out_filename: PathBuf::from("rustc"), - is_like_osx: false, + is_like_darwin: false, linker_is_gnu: true, }; // Should not panic when out_filename only contains filename. diff --git a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs index fd06c50eb8145..5f139ea1ac510 100644 --- a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs +++ b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs @@ -29,7 +29,7 @@ fn crate_export_threshold(crate_type: CrateType) -> SymbolExportLevel { CrateType::Executable | CrateType::Staticlib | CrateType::ProcMacro | CrateType::Cdylib => { SymbolExportLevel::C } - CrateType::Rlib | CrateType::Dylib => SymbolExportLevel::Rust, + CrateType::Rlib | CrateType::Dylib | CrateType::Sdylib => SymbolExportLevel::Rust, } } @@ -45,7 +45,7 @@ pub fn crates_export_threshold(crate_types: &[CrateType]) -> SymbolExportLevel { } fn reachable_non_generics_provider(tcx: TyCtxt<'_>, _: LocalCrate) -> DefIdMap { - if !tcx.sess.opts.output_types.should_codegen() { + if !tcx.sess.opts.output_types.should_codegen() && !tcx.is_sdylib_interface_build() { return Default::default(); } @@ -59,6 +59,8 @@ fn reachable_non_generics_provider(tcx: TyCtxt<'_>, _: LocalCrate) -> DefIdMap = tcx .reachable_set(()) .items() @@ -99,6 +101,17 @@ fn reachable_non_generics_provider(tcx: TyCtxt<'_>, _: LocalCrate) -> DefIdMap, def_id: DefId) -> b tcx.reachable_non_generics(def_id.krate).contains_key(&def_id) } -fn exported_symbols_provider_local( - tcx: TyCtxt<'_>, +fn exported_symbols_provider_local<'tcx>( + tcx: TyCtxt<'tcx>, _: LocalCrate, -) -> &[(ExportedSymbol<'_>, SymbolExportInfo)] { - if !tcx.sess.opts.output_types.should_codegen() { +) -> &'tcx [(ExportedSymbol<'tcx>, SymbolExportInfo)] { + if !tcx.sess.opts.output_types.should_codegen() && !tcx.is_sdylib_interface_build() { return &[]; } @@ -321,6 +334,38 @@ fn exported_symbols_provider_local( let cgus = tcx.collect_and_partition_mono_items(()).codegen_units; + // Do not export symbols that cannot be instantiated by downstream crates. + let reachable_set = tcx.reachable_set(()); + let is_local_to_current_crate = |ty: Ty<'_>| { + let no_refs = ty.peel_refs(); + let root_def_id = match no_refs.kind() { + ty::Closure(closure, _) => *closure, + ty::FnDef(def_id, _) => *def_id, + ty::Coroutine(def_id, _) => *def_id, + ty::CoroutineClosure(def_id, _) => *def_id, + ty::CoroutineWitness(def_id, _) => *def_id, + _ => return false, + }; + let Some(root_def_id) = root_def_id.as_local() else { + return false; + }; + + let is_local = !reachable_set.contains(&root_def_id); + is_local + }; + + let is_instantiable_downstream = + |did: Option, generic_args: GenericArgsRef<'tcx>| { + generic_args + .types() + .chain(did.into_iter().map(move |did| tcx.type_of(did).skip_binder())) + .all(move |arg| { + arg.walk().all(|ty| { + ty.as_type().map_or(true, |ty| !is_local_to_current_crate(ty)) + }) + }) + }; + // The symbols created in this loop are sorted below it #[allow(rustc::potential_query_instability)] for (mono_item, data) in cgus.iter().flat_map(|cgu| cgu.items().iter()) { @@ -349,7 +394,12 @@ fn exported_symbols_provider_local( match *mono_item { MonoItem::Fn(Instance { def: InstanceKind::Item(def), args }) => { - if args.non_erasable_generics().next().is_some() { + let has_generics = args.non_erasable_generics().next().is_some(); + + let should_export = + has_generics && is_instantiable_downstream(Some(def), &args); + + if should_export { let symbol = ExportedSymbol::Generic(def, args); symbols.push(( symbol, @@ -364,8 +414,33 @@ fn exported_symbols_provider_local( MonoItem::Fn(Instance { def: InstanceKind::DropGlue(_, Some(ty)), args }) => { // A little sanity-check assert_eq!(args.non_erasable_generics().next(), Some(GenericArgKind::Type(ty))); + + // Drop glue did is always going to be non-local outside of libcore, thus we don't need to check it's locality (which includes invoking `type_of` query). + let should_export = match ty.kind() { + ty::Adt(_, args) => is_instantiable_downstream(None, args), + ty::Closure(_, args) => is_instantiable_downstream(None, args), + _ => true, + }; + + if should_export { + symbols.push(( + ExportedSymbol::DropGlue(ty), + SymbolExportInfo { + level: SymbolExportLevel::Rust, + kind: SymbolExportKind::Text, + used: false, + }, + )); + } + } + MonoItem::Fn(Instance { + def: InstanceKind::AsyncDropGlueCtorShim(_, ty), + args, + }) => { + // A little sanity-check + assert_eq!(args.non_erasable_generics().next(), Some(GenericArgKind::Type(ty))); symbols.push(( - ExportedSymbol::DropGlue(ty), + ExportedSymbol::AsyncDropGlueCtorShim(ty), SymbolExportInfo { level: SymbolExportLevel::Rust, kind: SymbolExportKind::Text, @@ -373,14 +448,9 @@ fn exported_symbols_provider_local( }, )); } - MonoItem::Fn(Instance { - def: InstanceKind::AsyncDropGlueCtorShim(_, Some(ty)), - args, - }) => { - // A little sanity-check - assert_eq!(args.non_erasable_generics().next(), Some(GenericArgKind::Type(ty))); + MonoItem::Fn(Instance { def: InstanceKind::AsyncDropGlue(def, ty), args: _ }) => { symbols.push(( - ExportedSymbol::AsyncDropGlueCtorShim(ty), + ExportedSymbol::AsyncDropGlue(def, ty), SymbolExportInfo { level: SymbolExportLevel::Rust, kind: SymbolExportKind::Text, @@ -429,14 +499,14 @@ fn upstream_monomorphizations_provider( if let Some(async_drop_in_place_fn_def_id) = async_drop_in_place_fn_def_id { (async_drop_in_place_fn_def_id, tcx.mk_args(&[ty.into()])) } else { - // `drop_in_place` in place does not exist, don't try - // to use it. continue; } } + ExportedSymbol::AsyncDropGlue(def_id, ty) => (def_id, tcx.mk_args(&[ty.into()])), ExportedSymbol::NonGeneric(..) | ExportedSymbol::ThreadLocalShim(..) - | ExportedSymbol::NoDefId(..) => { + | ExportedSymbol::NoDefId(..) + | ExportedSymbol::Alias { .. } => { // These are no monomorphizations continue; } @@ -556,7 +626,7 @@ pub(crate) fn symbol_name_for_instance_in_crate<'tcx>( ExportedSymbol::Generic(def_id, args) => { rustc_symbol_mangling::symbol_name_for_instance_in_crate( tcx, - Instance::new(def_id, args), + Instance::new_raw(def_id, args), instantiating_crate, ) } @@ -582,6 +652,14 @@ pub(crate) fn symbol_name_for_instance_in_crate<'tcx>( instantiating_crate, ) } + ExportedSymbol::AsyncDropGlue(def_id, ty) => { + rustc_symbol_mangling::symbol_name_for_instance_in_crate( + tcx, + Instance::resolve_async_drop_in_place_poll(tcx, def_id, ty), + instantiating_crate, + ) + } + ExportedSymbol::Alias { original: _, alternative_symbol } => alternative_symbol.to_string(), ExportedSymbol::NoDefId(symbol_name) => symbol_name.to_string(), } } @@ -597,17 +675,22 @@ fn calling_convention_for_symbol<'tcx>( None } ExportedSymbol::NonGeneric(def_id) => Some(Instance::mono(tcx, def_id)), - ExportedSymbol::Generic(def_id, args) => Some(Instance::new(def_id, args)), + ExportedSymbol::Generic(def_id, args) => Some(Instance::new_raw(def_id, args)), // DropGlue always use the Rust calling convention and thus follow the target's default // symbol decoration scheme. ExportedSymbol::DropGlue(..) => None, // AsyncDropGlueCtorShim always use the Rust calling convention and thus follow the // target's default symbol decoration scheme. ExportedSymbol::AsyncDropGlueCtorShim(..) => None, + ExportedSymbol::AsyncDropGlue(..) => None, // NoDefId always follow the target's default symbol decoration scheme. ExportedSymbol::NoDefId(..) => None, // ThreadLocalShim always follow the target's default symbol decoration scheme. ExportedSymbol::ThreadLocalShim(..) => None, + // Aliases have the same calling convention as the thing they alias. + ExportedSymbol::Alias { original, alternative_symbol: _ } => { + Some(Instance::mono(tcx, original)) + } }; instance @@ -628,6 +711,7 @@ fn calling_convention_for_symbol<'tcx>( pub(crate) fn linking_symbol_name_for_instance_in_crate<'tcx>( tcx: TyCtxt<'tcx>, symbol: ExportedSymbol<'tcx>, + export_kind: SymbolExportKind, instantiating_crate: CrateNum, ) -> String { let mut undecorated = symbol_name_for_instance_in_crate(tcx, symbol, instantiating_crate); @@ -648,8 +732,9 @@ pub(crate) fn linking_symbol_name_for_instance_in_crate<'tcx>( let prefix = match &target.arch[..] { "x86" => Some('_'), "x86_64" => None, - "arm64ec" => Some('#'), - // Only x86/64 use symbol decorations. + // Only functions are decorated for arm64ec. + "arm64ec" if export_kind == SymbolExportKind::Text => Some('#'), + // Only x86/64 and arm64ec use symbol decorations. _ => return undecorated, }; @@ -689,9 +774,10 @@ pub(crate) fn exporting_symbol_name_for_instance_in_crate<'tcx>( /// Add it to the symbols list for all kernel functions, so that it is exported in the linked /// object. pub(crate) fn extend_exported_symbols<'tcx>( - symbols: &mut Vec, + symbols: &mut Vec<(String, SymbolExportKind)>, tcx: TyCtxt<'tcx>, symbol: ExportedSymbol<'tcx>, + info: SymbolExportInfo, instantiating_crate: CrateNum, ) { let (conv, _) = calling_convention_for_symbol(tcx, symbol); @@ -703,7 +789,7 @@ pub(crate) fn extend_exported_symbols<'tcx>( let undecorated = symbol_name_for_instance_in_crate(tcx, symbol, instantiating_crate); // Add the symbol for the kernel descriptor (with .kd suffix) - symbols.push(format!("{undecorated}.kd")); + symbols.push((format!("{undecorated}.kd"), info.kind)); } fn maybe_emutls_symbol_name<'tcx>( diff --git a/compiler/rustc_codegen_ssa/src/back/write.rs b/compiler/rustc_codegen_ssa/src/back/write.rs index ccc0273280fe3..0fd4ed8475b49 100644 --- a/compiler/rustc_codegen_ssa/src/back/write.rs +++ b/compiler/rustc_codegen_ssa/src/back/write.rs @@ -306,14 +306,18 @@ impl TargetMachineFactoryConfig { cgcx.output_filenames.split_dwarf_path( cgcx.split_debuginfo, cgcx.split_dwarf_kind, - Some(module_name), + module_name, + cgcx.invocation_temp.as_deref(), ) } else { None }; - let output_obj_file = - Some(cgcx.output_filenames.temp_path(OutputType::Object, Some(module_name))); + let output_obj_file = Some(cgcx.output_filenames.temp_path_for_cgu( + OutputType::Object, + module_name, + cgcx.invocation_temp.as_deref(), + )); TargetMachineFactoryConfig { split_dwarf_file, output_obj_file } } } @@ -344,6 +348,7 @@ pub struct CodegenContext { pub crate_types: Vec, pub each_linked_rlib_for_lto: Vec<(CrateNum, PathBuf)>, pub output_filenames: Arc, + pub invocation_temp: Option, pub regular_module_config: Arc, pub metadata_module_config: Arc, pub allocator_module_config: Arc, @@ -352,7 +357,7 @@ pub struct CodegenContext { pub is_pe_coff: bool, pub target_can_use_split_dwarf: bool, pub target_arch: String, - pub target_is_like_osx: bool, + pub target_is_like_darwin: bool, pub target_is_like_aix: bool, pub split_debuginfo: rustc_target::spec::SplitDebuginfo, pub split_dwarf_kind: rustc_session::config::SplitDwarfKind, @@ -582,8 +587,11 @@ fn produce_final_output_artifacts( if let [module] = &compiled_modules.modules[..] { // 1) Only one codegen unit. In this case it's no difficulty // to copy `foo.0.x` to `foo.x`. - let module_name = Some(&module.name[..]); - let path = crate_output.temp_path(output_type, module_name); + let path = crate_output.temp_path_for_cgu( + output_type, + &module.name, + sess.invocation_temp.as_deref(), + ); let output = crate_output.path(output_type); if !output_type.is_text_output() && output.is_tty() { sess.dcx() @@ -596,22 +604,15 @@ fn produce_final_output_artifacts( ensure_removed(sess.dcx(), &path); } } else { - let extension = crate_output - .temp_path(output_type, None) - .extension() - .unwrap() - .to_str() - .unwrap() - .to_owned(); - if crate_output.outputs.contains_explicit_name(&output_type) { // 2) Multiple codegen units, with `--emit foo=some_name`. We have // no good solution for this case, so warn the user. - sess.dcx().emit_warn(errors::IgnoringEmitPath { extension }); + sess.dcx() + .emit_warn(errors::IgnoringEmitPath { extension: output_type.extension() }); } else if crate_output.single_output_file.is_some() { // 3) Multiple codegen units, with `-o some_name`. We have // no good solution for this case, so warn the user. - sess.dcx().emit_warn(errors::IgnoringOutput { extension }); + sess.dcx().emit_warn(errors::IgnoringOutput { extension: output_type.extension() }); } else { // 4) Multiple codegen units, but no explicit name. We // just leave the `foo.0.x` files in place. @@ -967,7 +968,12 @@ fn execute_copy_from_cache_work_item( module.source.saved_files.get("dwo").as_ref().and_then(|saved_dwarf_object_file| { let dwarf_obj_out = cgcx .output_filenames - .split_dwarf_path(cgcx.split_debuginfo, cgcx.split_dwarf_kind, Some(&module.name)) + .split_dwarf_path( + cgcx.split_debuginfo, + cgcx.split_dwarf_kind, + &module.name, + cgcx.invocation_temp.as_deref(), + ) .expect( "saved dwarf object in work product but `split_dwarf_path` returned `None`", ); @@ -977,7 +983,11 @@ fn execute_copy_from_cache_work_item( let mut load_from_incr_cache = |perform, output_type: OutputType| { if perform { let saved_file = module.source.saved_files.get(output_type.extension())?; - let output_path = cgcx.output_filenames.temp_path(output_type, Some(&module.name)); + let output_path = cgcx.output_filenames.temp_path_for_cgu( + output_type, + &module.name, + cgcx.invocation_temp.as_deref(), + ); load_from_incr_comp_dir(output_path, &saved_file) } else { None @@ -1216,12 +1226,13 @@ fn start_executing_work( is_pe_coff: tcx.sess.target.is_like_windows, target_can_use_split_dwarf: tcx.sess.target_can_use_split_dwarf(), target_arch: tcx.sess.target.arch.to_string(), - target_is_like_osx: tcx.sess.target.is_like_osx, + target_is_like_darwin: tcx.sess.target.is_like_darwin, target_is_like_aix: tcx.sess.target.is_like_aix, split_debuginfo: tcx.sess.split_debuginfo(), split_dwarf_kind: tcx.sess.opts.unstable_opts.split_dwarf_kind, parallel: backend.supports_parallel() && !sess.opts.unstable_opts.no_parallel_backend, pointer_size: tcx.data_layout.pointer_size, + invocation_temp: sess.invocation_temp.clone(), }; // This is the "main loop" of parallel work happening for parallel codegen. diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs index 1985b3b717063..93cbd4cbb7cc9 100644 --- a/compiler/rustc_codegen_ssa/src/base.rs +++ b/compiler/rustc_codegen_ssa/src/base.rs @@ -10,21 +10,23 @@ use rustc_ast::expand::allocator::{ALLOCATOR_METHODS, AllocatorKind, global_fn_n use rustc_attr_parsing::OptimizeAttr; use rustc_data_structures::fx::{FxHashMap, FxIndexSet}; use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry}; -use rustc_data_structures::sync::par_map; +use rustc_data_structures::sync::{IntoDynSyncSend, par_map}; use rustc_data_structures::unord::UnordMap; use rustc_hir::def_id::{DefId, LOCAL_CRATE}; use rustc_hir::lang_items::LangItem; +use rustc_hir::{ItemId, Target}; use rustc_metadata::EncodedMetadata; -use rustc_middle::bug; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; use rustc_middle::middle::debugger_visualizer::{DebuggerVisualizerFile, DebuggerVisualizerType}; use rustc_middle::middle::exported_symbols::SymbolExportKind; use rustc_middle::middle::{exported_symbols, lang_items}; use rustc_middle::mir::BinOp; +use rustc_middle::mir::interpret::ErrorHandled; use rustc_middle::mir::mono::{CodegenUnit, CodegenUnitNameBuilder, MonoItem, MonoItemPartitions}; use rustc_middle::query::Providers; use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; +use rustc_middle::{bug, span_bug}; use rustc_session::Session; use rustc_session::config::{self, CrateType, EntryFnType, OutputType}; use rustc_span::{DUMMY_SP, Symbol, sym}; @@ -417,6 +419,75 @@ pub(crate) fn codegen_instance<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( mir::codegen_mir::(cx, instance); } +pub fn codegen_global_asm<'tcx, Cx>(cx: &mut Cx, item_id: ItemId) +where + Cx: LayoutOf<'tcx, LayoutOfResult = TyAndLayout<'tcx>> + AsmCodegenMethods<'tcx>, +{ + let item = cx.tcx().hir_item(item_id); + if let rustc_hir::ItemKind::GlobalAsm { asm, .. } = item.kind { + let operands: Vec<_> = asm + .operands + .iter() + .map(|(op, op_sp)| match *op { + rustc_hir::InlineAsmOperand::Const { ref anon_const } => { + match cx.tcx().const_eval_poly(anon_const.def_id.to_def_id()) { + Ok(const_value) => { + let ty = + cx.tcx().typeck_body(anon_const.body).node_type(anon_const.hir_id); + let string = common::asm_const_to_str( + cx.tcx(), + *op_sp, + const_value, + cx.layout_of(ty), + ); + GlobalAsmOperandRef::Const { string } + } + Err(ErrorHandled::Reported { .. }) => { + // An error has already been reported and + // compilation is guaranteed to fail if execution + // hits this path. So an empty string instead of + // a stringified constant value will suffice. + GlobalAsmOperandRef::Const { string: String::new() } + } + Err(ErrorHandled::TooGeneric(_)) => { + span_bug!(*op_sp, "asm const cannot be resolved; too generic") + } + } + } + rustc_hir::InlineAsmOperand::SymFn { expr } => { + let ty = cx.tcx().typeck(item_id.owner_id).expr_ty(expr); + let instance = match ty.kind() { + &ty::FnDef(def_id, args) => Instance::expect_resolve( + cx.tcx(), + ty::TypingEnv::fully_monomorphized(), + def_id, + args, + expr.span, + ), + _ => span_bug!(*op_sp, "asm sym is not a function"), + }; + + GlobalAsmOperandRef::SymFn { instance } + } + rustc_hir::InlineAsmOperand::SymStatic { path: _, def_id } => { + GlobalAsmOperandRef::SymStatic { def_id } + } + rustc_hir::InlineAsmOperand::In { .. } + | rustc_hir::InlineAsmOperand::Out { .. } + | rustc_hir::InlineAsmOperand::InOut { .. } + | rustc_hir::InlineAsmOperand::SplitInOut { .. } + | rustc_hir::InlineAsmOperand::Label { .. } => { + span_bug!(*op_sp, "invalid operand type for global_asm!") + } + }) + .collect(); + + cx.codegen_global_asm(asm.template, &operands, asm.options, asm.line_spans); + } else { + span_bug!(item.span, "Mismatch between hir::Item type and MonoItem type") + } +} + /// Creates the `main` function which will initialize the rust runtime and call /// users main function. pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( @@ -640,8 +711,11 @@ pub fn codegen_crate( let metadata_cgu_name = cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("metadata")).to_string(); tcx.sess.time("write_compressed_metadata", || { - let file_name = - tcx.output_filenames(()).temp_path(OutputType::Metadata, Some(&metadata_cgu_name)); + let file_name = tcx.output_filenames(()).temp_path_for_cgu( + OutputType::Metadata, + &metadata_cgu_name, + tcx.sess.invocation_temp.as_deref(), + ); let data = create_compressed_metadata_file( tcx.sess, &metadata, @@ -757,7 +831,7 @@ pub fn codegen_crate( let pre_compiled_cgus = par_map(cgus, |(i, _)| { let module = backend.compile_codegen_unit(tcx, codegen_units[i].name()); - (i, module) + (i, IntoDynSyncSend(module)) }); total_codegen_time += start_time.elapsed(); @@ -777,7 +851,7 @@ pub fn codegen_crate( match cgu_reuse { CguReuse::No => { let (module, cost) = if let Some(cgu) = pre_compiled_cgus.remove(&i) { - cgu + cgu.0 } else { let start_time = Instant::now(); let module = backend.compile_codegen_unit(tcx, cgu.name()); @@ -964,21 +1038,35 @@ impl CrateInfo { // by the compiler, but that's ok because all this stuff is unstable anyway. let target = &tcx.sess.target; if !are_upstream_rust_objects_already_included(tcx.sess) { - let missing_weak_lang_items: FxIndexSet = info + let add_prefix = match (target.is_like_windows, target.arch.as_ref()) { + (true, "x86") => |name: String, _: SymbolExportKind| format!("_{name}"), + (true, "arm64ec") => { + // Only functions are decorated for arm64ec. + |name: String, export_kind: SymbolExportKind| match export_kind { + SymbolExportKind::Text => format!("#{name}"), + _ => name, + } + } + _ => |name: String, _: SymbolExportKind| name, + }; + let missing_weak_lang_items: FxIndexSet<(Symbol, SymbolExportKind)> = info .used_crates .iter() .flat_map(|&cnum| tcx.missing_lang_items(cnum)) .filter(|l| l.is_weak()) .filter_map(|&l| { let name = l.link_name()?; - lang_items::required(tcx, l).then_some(name) + let export_kind = match l.target() { + Target::Fn => SymbolExportKind::Text, + Target::Static => SymbolExportKind::Data, + _ => bug!( + "Don't know what the export kind is for lang item of kind {:?}", + l.target() + ), + }; + lang_items::required(tcx, l).then_some((name, export_kind)) }) .collect(); - let prefix = match (target.is_like_windows, target.arch.as_ref()) { - (true, "x86") => "_", - (true, "arm64ec") => "#", - _ => "", - }; // This loop only adds new items to values of the hash map, so the order in which we // iterate over the values is not important. @@ -991,10 +1079,13 @@ impl CrateInfo { .for_each(|(_, linked_symbols)| { let mut symbols = missing_weak_lang_items .iter() - .map(|item| { + .map(|(item, export_kind)| { ( - format!("{prefix}{}", mangle_internal_symbol(tcx, item.as_str())), - SymbolExportKind::Text, + add_prefix( + mangle_internal_symbol(tcx, item.as_str()), + *export_kind, + ), + *export_kind, ) }) .collect::>(); @@ -1009,12 +1100,12 @@ impl CrateInfo { // errors. linked_symbols.extend(ALLOCATOR_METHODS.iter().map(|method| { ( - format!( - "{prefix}{}", + add_prefix( mangle_internal_symbol( tcx, - global_fn_name(method.name).as_str() - ) + global_fn_name(method.name).as_str(), + ), + SymbolExportKind::Text, ), SymbolExportKind::Text, ) @@ -1024,7 +1115,7 @@ impl CrateInfo { } let embed_visualizers = tcx.crate_types().iter().any(|&crate_type| match crate_type { - CrateType::Executable | CrateType::Dylib | CrateType::Cdylib => { + CrateType::Executable | CrateType::Dylib | CrateType::Cdylib | CrateType::Sdylib => { // These are crate types for which we invoke the linker and can embed // NatVis visualizers. true diff --git a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs index a85d032f36eee..967a1305c90e2 100644 --- a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs +++ b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs @@ -2,7 +2,7 @@ use std::str::FromStr; use rustc_abi::ExternAbi; use rustc_ast::expand::autodiff_attrs::{AutoDiffAttrs, DiffActivity, DiffMode}; -use rustc_ast::{MetaItem, MetaItemInner, attr}; +use rustc_ast::{LitKind, MetaItem, MetaItemInner, attr}; use rustc_attr_parsing::ReprAttr::ReprAlign; use rustc_attr_parsing::{AttributeKind, InlineAttr, InstructionSetAttr, OptimizeAttr}; use rustc_data_structures::fx::FxHashMap; @@ -51,7 +51,7 @@ fn linkage_by_name(tcx: TyCtxt<'_>, def_id: LocalDefId, name: &str) -> Linkage { } } -fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { +fn codegen_fn_attrs<'tcx>(tcx: TyCtxt<'tcx>, did: LocalDefId) -> CodegenFnAttrs { if cfg!(debug_assertions) { let def_kind = tcx.def_kind(did); assert!( @@ -87,6 +87,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { let mut link_ordinal_span = None; let mut no_sanitize_span = None; let mut mixed_export_name_no_mangle_lint_state = MixedExportNameAndNoMangleState::default(); + let mut no_mangle_span = None; for attr in attrs.iter() { // In some cases, attribute are only valid on functions, but it's the `check_attr` @@ -111,12 +112,15 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { if let hir::Attribute::Parsed(p) = attr { match p { + AttributeKind::EiiMangleExtern => { + codegen_fn_attrs.flags |= CodegenFnAttrFlags::EII_MANGLE_EXTERN; + } AttributeKind::Repr(reprs) => { codegen_fn_attrs.alignment = reprs .iter() - .find_map(|(r, _)| if let ReprAlign(x) = r { Some(*x) } else { None }); + .filter_map(|(r, _)| if let ReprAlign(x) = r { Some(*x) } else { None }) + .max(); } - _ => {} } } @@ -138,6 +142,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } sym::naked => codegen_fn_attrs.flags |= CodegenFnAttrFlags::NAKED, sym::no_mangle => { + no_mangle_span = Some(attr.span()); if tcx.opt_item_name(did.to_def_id()).is_some() { codegen_fn_attrs.flags |= CodegenFnAttrFlags::NO_MANGLE; mixed_export_name_no_mangle_lint_state.track_no_mangle( @@ -213,7 +218,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { // somewhat, and is subject to change in the future (which // is a good thing, because this would ideally be a bit // more firmed up). - let is_like_elf = !(tcx.sess.target.is_like_osx + let is_like_elf = !(tcx.sess.target.is_like_darwin || tcx.sess.target.is_like_windows || tcx.sess.target.is_like_wasm); codegen_fn_attrs.flags |= if is_like_elf { @@ -345,20 +350,26 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { no_sanitize_span = Some(attr.span()); if let Some(list) = attr.meta_item_list() { for item in list.iter() { - match item.name_or_empty() { - sym::address => { + match item.name() { + Some(sym::address) => { codegen_fn_attrs.no_sanitize |= SanitizerSet::ADDRESS | SanitizerSet::KERNELADDRESS } - sym::cfi => codegen_fn_attrs.no_sanitize |= SanitizerSet::CFI, - sym::kcfi => codegen_fn_attrs.no_sanitize |= SanitizerSet::KCFI, - sym::memory => codegen_fn_attrs.no_sanitize |= SanitizerSet::MEMORY, - sym::memtag => codegen_fn_attrs.no_sanitize |= SanitizerSet::MEMTAG, - sym::shadow_call_stack => { + Some(sym::cfi) => codegen_fn_attrs.no_sanitize |= SanitizerSet::CFI, + Some(sym::kcfi) => codegen_fn_attrs.no_sanitize |= SanitizerSet::KCFI, + Some(sym::memory) => { + codegen_fn_attrs.no_sanitize |= SanitizerSet::MEMORY + } + Some(sym::memtag) => { + codegen_fn_attrs.no_sanitize |= SanitizerSet::MEMTAG + } + Some(sym::shadow_call_stack) => { codegen_fn_attrs.no_sanitize |= SanitizerSet::SHADOWCALLSTACK } - sym::thread => codegen_fn_attrs.no_sanitize |= SanitizerSet::THREAD, - sym::hwaddress => { + Some(sym::thread) => { + codegen_fn_attrs.no_sanitize |= SanitizerSet::THREAD + } + Some(sym::hwaddress) => { codegen_fn_attrs.no_sanitize |= SanitizerSet::HWADDRESS } _ => { @@ -419,9 +430,9 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { continue; }; - let attrib_to_write = match meta_item.name_or_empty() { - sym::prefix_nops => &mut prefix, - sym::entry_nops => &mut entry, + let attrib_to_write = match meta_item.name() { + Some(sym::prefix_nops) => &mut prefix, + Some(sym::entry_nops) => &mut entry, _ => { tcx.dcx().emit_err(errors::UnexpectedParameterName { span: item.span(), @@ -614,6 +625,34 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } check_link_name_xor_ordinal(tcx, &codegen_fn_attrs, link_ordinal_span); + if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) + && codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE) + { + let lang_item = + lang_items::extract(attrs).map_or(None, |(name, _span)| LangItem::from_name(name)); + let mut err = tcx + .dcx() + .struct_span_err( + no_mangle_span.unwrap_or_default(), + "`#[no_mangle]` cannot be used on internal language items", + ) + .with_note("Rustc requires this item to have a specific mangled name.") + .with_span_label(tcx.def_span(did), "should be the internal language item"); + if let Some(lang_item) = lang_item { + if let Some(link_name) = lang_item.link_name() { + err = err + .with_note("If you are trying to prevent mangling to ease debugging, many") + .with_note(format!( + "debuggers support a command such as `rbreak {link_name}` to" + )) + .with_note(format!( + "match `.*{link_name}.*` instead of `break {link_name}` on a specific name" + )) + } + } + err.emit(); + } + // Any linkage to LLVM intrinsics for now forcibly marks them all as never // unwinds since LLVM sometimes can't handle codegen which `invoke`s // intrinsic functions. @@ -718,8 +757,8 @@ fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &hir::Attribute) -> Option { } } -fn check_link_name_xor_ordinal( - tcx: TyCtxt<'_>, +fn check_link_name_xor_ordinal<'tcx>( + tcx: TyCtxt<'tcx>, codegen_fn_attrs: &CodegenFnAttrs, inline_span: Option, ) { @@ -785,8 +824,7 @@ impl<'a> MixedExportNameAndNoMangleState<'a> { fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option { let attrs = tcx.get_attrs(id, sym::rustc_autodiff); - let attrs = - attrs.filter(|attr| attr.name_or_empty() == sym::rustc_autodiff).collect::>(); + let attrs = attrs.filter(|attr| attr.has_name(sym::rustc_autodiff)).collect::>(); // check for exactly one autodiff attribute on placeholder functions. // There should only be one, since we generate a new placeholder per ad macro. @@ -805,8 +843,8 @@ fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option { return Some(AutoDiffAttrs::source()); } - let [mode, input_activities @ .., ret_activity] = &list[..] else { - span_bug!(attr.span(), "rustc_autodiff attribute must contain mode and activities"); + let [mode, width_meta, input_activities @ .., ret_activity] = &list[..] else { + span_bug!(attr.span(), "rustc_autodiff attribute must contain mode, width and activities"); }; let mode = if let MetaItemInner::MetaItem(MetaItem { path: p1, .. }) = mode { p1.segments.first().unwrap().ident @@ -823,6 +861,30 @@ fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option { } }; + let width: u32 = match width_meta { + MetaItemInner::MetaItem(MetaItem { path: p1, .. }) => { + let w = p1.segments.first().unwrap().ident; + match w.as_str().parse() { + Ok(val) => val, + Err(_) => { + span_bug!(w.span, "rustc_autodiff width should fit u32"); + } + } + } + MetaItemInner::Lit(lit) => { + if let LitKind::Int(val, _) = lit.kind { + match val.get().try_into() { + Ok(val) => val, + Err(_) => { + span_bug!(lit.span, "rustc_autodiff width should fit u32"); + } + } + } else { + span_bug!(lit.span, "rustc_autodiff width should be an integer"); + } + } + }; + // First read the ret symbol from the attribute let ret_symbol = if let MetaItemInner::MetaItem(MetaItem { path: p1, .. }) = ret_activity { p1.segments.first().unwrap().ident @@ -860,7 +922,7 @@ fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option { } } - Some(AutoDiffAttrs { mode, ret_activity, input_activity: arg_activities }) + Some(AutoDiffAttrs { mode, width, ret_activity, input_activity: arg_activities }) } pub(crate) fn provide(providers: &mut Providers) { diff --git a/compiler/rustc_codegen_ssa/src/errors.rs b/compiler/rustc_codegen_ssa/src/errors.rs index f52d29baf9dc0..c2064397855f0 100644 --- a/compiler/rustc_codegen_ssa/src/errors.rs +++ b/compiler/rustc_codegen_ssa/src/errors.rs @@ -3,7 +3,6 @@ use std::borrow::Cow; use std::ffi::OsString; use std::io::Error; -use std::num::ParseIntError; use std::path::{Path, PathBuf}; use std::process::ExitStatus; @@ -278,13 +277,13 @@ pub struct BinaryOutputToTty { #[derive(Diagnostic)] #[diag(codegen_ssa_ignoring_emit_path)] pub struct IgnoringEmitPath { - pub extension: String, + pub extension: &'static str, } #[derive(Diagnostic)] #[diag(codegen_ssa_ignoring_output)] pub struct IgnoringOutput { - pub extension: String, + pub extension: &'static str, } #[derive(Diagnostic)] @@ -738,14 +737,6 @@ pub enum ExtractBundledLibsError<'a> { ExtractSection { rlib: &'a Path, error: Box }, } -#[derive(Diagnostic)] -pub(crate) enum AppleDeploymentTarget { - #[diag(codegen_ssa_apple_deployment_target_invalid)] - Invalid { env_var: &'static str, error: ParseIntError }, - #[diag(codegen_ssa_apple_deployment_target_too_low)] - TooLow { env_var: &'static str, version: String, os_min: String }, -} - #[derive(Diagnostic)] #[diag(codegen_ssa_read_file)] pub(crate) struct ReadFileError { @@ -1046,24 +1037,14 @@ pub enum InvalidMonomorphization<'tcx> { v_len: u64, }, - #[diag(codegen_ssa_invalid_monomorphization_mask_type, code = E0511)] - #[note] - MaskType { + #[diag(codegen_ssa_invalid_monomorphization_mask_wrong_element_type, code = E0511)] + MaskWrongElementType { #[primary_span] span: Span, name: Symbol, ty: Ty<'tcx>, }, - #[diag(codegen_ssa_invalid_monomorphization_vector_argument, code = E0511)] - VectorArgument { - #[primary_span] - span: Span, - name: Symbol, - in_ty: Ty<'tcx>, - in_elem: Ty<'tcx>, - }, - #[diag(codegen_ssa_invalid_monomorphization_cannot_return, code = E0511)] CannotReturn { #[primary_span] @@ -1086,15 +1067,6 @@ pub enum InvalidMonomorphization<'tcx> { mutability: ExpectedPointerMutability, }, - #[diag(codegen_ssa_invalid_monomorphization_third_arg_element_type, code = E0511)] - ThirdArgElementType { - #[primary_span] - span: Span, - name: Symbol, - expected_element: Ty<'tcx>, - third_arg: Ty<'tcx>, - }, - #[diag(codegen_ssa_invalid_monomorphization_unsupported_symbol_of_size, code = E0511)] UnsupportedSymbolOfSize { #[primary_span] diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs index d26d6edf3149d..4f9757f198b84 100644 --- a/compiler/rustc_codegen_ssa/src/lib.rs +++ b/compiler/rustc_codegen_ssa/src/lib.rs @@ -2,20 +2,19 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] #![feature(box_patterns)] -#![feature(debug_closure_helpers)] #![feature(file_buffered)] #![feature(if_let_guard)] -#![feature(let_chains)] #![feature(negative_impls)] #![feature(rustdoc_internals)] #![feature(string_from_utf8_lossy_owned)] #![feature(trait_alias)] #![feature(try_blocks)] +#![recursion_limit = "256"] // tidy-alphabetical-end //! This crate contains codegen code that is used by all codegen backends (LLVM and others). @@ -34,7 +33,7 @@ use rustc_hir::CRATE_HIR_ID; use rustc_hir::def_id::CrateNum; use rustc_macros::{Decodable, Encodable, HashStable}; use rustc_middle::dep_graph::WorkProduct; -use rustc_middle::lint::LintLevelSource; +use rustc_middle::lint::LevelAndSource; use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile; use rustc_middle::middle::dependency_format::Dependencies; use rustc_middle::middle::exported_symbols::SymbolExportKind; @@ -45,7 +44,6 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_session::Session; use rustc_session::config::{CrateType, OutputFilenames, OutputType, RUST_CGU_EXT}; use rustc_session::cstore::{self, CrateSource}; -use rustc_session::lint::Level; use rustc_session::lint::builtin::LINKER_MESSAGES; use rustc_session::utils::NativeLibKind; use rustc_span::Symbol; @@ -107,13 +105,19 @@ impl ModuleCodegen { emit_asm: bool, emit_ir: bool, outputs: &OutputFilenames, + invocation_temp: Option<&str>, ) -> CompiledModule { - let object = emit_obj.then(|| outputs.temp_path(OutputType::Object, Some(&self.name))); - let dwarf_object = emit_dwarf_obj.then(|| outputs.temp_path_dwo(Some(&self.name))); - let bytecode = emit_bc.then(|| outputs.temp_path(OutputType::Bitcode, Some(&self.name))); - let assembly = emit_asm.then(|| outputs.temp_path(OutputType::Assembly, Some(&self.name))); - let llvm_ir = - emit_ir.then(|| outputs.temp_path(OutputType::LlvmAssembly, Some(&self.name))); + let object = emit_obj + .then(|| outputs.temp_path_for_cgu(OutputType::Object, &self.name, invocation_temp)); + let dwarf_object = + emit_dwarf_obj.then(|| outputs.temp_path_dwo_for_cgu(&self.name, invocation_temp)); + let bytecode = emit_bc + .then(|| outputs.temp_path_for_cgu(OutputType::Bitcode, &self.name, invocation_temp)); + let assembly = emit_asm + .then(|| outputs.temp_path_for_cgu(OutputType::Assembly, &self.name, invocation_temp)); + let llvm_ir = emit_ir.then(|| { + outputs.temp_path_for_cgu(OutputType::LlvmAssembly, &self.name, invocation_temp) + }); CompiledModule { name: self.name.clone(), @@ -215,7 +219,7 @@ pub struct CrateInfo { pub target_cpu: String, pub target_features: Vec, pub crate_types: Vec, - pub exported_symbols: UnordMap>, + pub exported_symbols: UnordMap>, pub linked_symbols: FxIndexMap>, pub local_crate_name: Symbol, pub compiler_builtins: Option, @@ -232,6 +236,24 @@ pub struct CrateInfo { pub lint_levels: CodegenLintLevels, } +/// Target-specific options that get set in `cfg(...)`. +/// +/// RUSTC_SPECIFIC_FEATURES should be skipped here, those are handled outside codegen. +pub struct TargetConfig { + /// Options to be set in `cfg(target_features)`. + pub target_features: Vec, + /// Options to be set in `cfg(target_features)`, but including unstable features. + pub unstable_target_features: Vec, + /// Option for `cfg(target_has_reliable_f16)`, true if `f16` basic arithmetic works. + pub has_reliable_f16: bool, + /// Option for `cfg(target_has_reliable_f16_math)`, true if `f16` math calls work. + pub has_reliable_f16_math: bool, + /// Option for `cfg(target_has_reliable_f128)`, true if `f128` basic arithmetic works. + pub has_reliable_f128: bool, + /// Option for `cfg(target_has_reliable_f128_math)`, true if `f128` math calls work. + pub has_reliable_f128_math: bool, +} + #[derive(Encodable, Decodable)] pub struct CodegenResults { pub modules: Vec, @@ -341,7 +363,7 @@ impl CodegenResults { /// Instead, encode exactly the information we need. #[derive(Copy, Clone, Debug, Encodable, Decodable)] pub struct CodegenLintLevels { - linker_messages: (Level, LintLevelSource), + linker_messages: LevelAndSource, } impl CodegenLintLevels { diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index d184ce3d61dea..950f19a6f0f4e 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -926,10 +926,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let def = instance.map(|i| i.def); - if let Some( - ty::InstanceKind::DropGlue(_, None) | ty::InstanceKind::AsyncDropGlueCtorShim(_, None), - ) = def - { + // We don't need AsyncDropGlueCtorShim here because it is not `noop func`, + // it is `func returning noop future` + if let Some(ty::InstanceKind::DropGlue(_, None)) = def { // Empty drop glue; a no-op. let target = target.unwrap(); return helper.funclet_br(self, bx, target, mergeable_succ); @@ -1386,8 +1385,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { MergingSucc::False } - mir::TerminatorKind::Drop { place, target, unwind, replace: _ } => self - .codegen_drop_terminator( + mir::TerminatorKind::Drop { place, target, unwind, replace: _, drop, async_fut } => { + assert!( + async_fut.is_none() && drop.is_none(), + "Async Drop must be expanded or reset to sync before codegen" + ); + self.codegen_drop_terminator( helper, bx, &terminator.source_info, @@ -1395,7 +1398,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { target, unwind, mergeable_succ(), - ), + ) + } mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, unwind } => self .codegen_assert_terminator( diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs index 0758e5d045673..96a04473aba2e 100644 --- a/compiler/rustc_codegen_ssa/src/mir/mod.rs +++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs @@ -3,7 +3,7 @@ use std::iter; use rustc_index::IndexVec; use rustc_index::bit_set::DenseBitSet; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; -use rustc_middle::mir::{Local, UnwindTerminateReason, traversal}; +use rustc_middle::mir::{Body, Local, UnwindTerminateReason, traversal}; use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv, TyAndLayout}; use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt}; use rustc_middle::{bug, mir, span_bug}; @@ -20,7 +20,7 @@ mod coverageinfo; pub mod debuginfo; mod intrinsic; mod locals; -mod naked_asm; +pub mod naked_asm; pub mod operand; pub mod place; mod rvalue; @@ -170,19 +170,24 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( ) { assert!(!instance.args.has_infer()); + let tcx = cx.tcx(); let llfn = cx.get_fn(instance); - let mir = cx.tcx().instance_mir(instance.def); + let mut mir = tcx.instance_mir(instance.def); let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty()); debug!("fn_abi: {:?}", fn_abi); - if cx.tcx().codegen_fn_attrs(instance.def_id()).flags.contains(CodegenFnAttrFlags::NAKED) { - crate::mir::naked_asm::codegen_naked_asm::(cx, &mir, instance); - return; + if tcx.features().ergonomic_clones() { + let monomorphized_mir = instance.instantiate_mir_and_normalize_erasing_regions( + tcx, + ty::TypingEnv::fully_monomorphized(), + ty::EarlyBinder::bind(mir.clone()), + ); + mir = tcx.arena.alloc(optimize_use_clone::(cx, monomorphized_mir)); } - let debug_context = cx.create_function_debug_context(instance, fn_abi, llfn, mir); + let debug_context = cx.create_function_debug_context(instance, fn_abi, llfn, &mir); let start_llbb = Bx::append_block(cx, llfn, "start"); let mut start_bx = Bx::build(cx, start_llbb); @@ -194,7 +199,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( } let cleanup_kinds = - base::wants_new_eh_instructions(cx.tcx().sess).then(|| analyze::cleanup_kinds(mir)); + base::wants_new_eh_instructions(tcx.sess).then(|| analyze::cleanup_kinds(&mir)); let cached_llbbs: IndexVec> = mir.basic_blocks @@ -217,7 +222,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( cleanup_kinds, landing_pads: IndexVec::from_elem(None, &mir.basic_blocks), funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()), - cold_blocks: find_cold_blocks(cx.tcx(), mir), + cold_blocks: find_cold_blocks(tcx, mir), locals: locals::Locals::empty(), debug_context, per_local_var_debug_info: None, @@ -233,7 +238,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( fx.compute_per_local_var_debug_info(&mut start_bx).unzip(); fx.per_local_var_debug_info = per_local_var_debug_info; - let traversal_order = traversal::mono_reachable_reverse_postorder(mir, cx.tcx(), instance); + let traversal_order = traversal::mono_reachable_reverse_postorder(mir, tcx, instance); let memory_locals = analyze::non_ssa_locals(&fx, &traversal_order); // Allocate variable and temp allocas @@ -310,6 +315,61 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( } } +// FIXME: Move this function to mir::transform when post-mono MIR passes land. +fn optimize_use_clone<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( + cx: &'a Bx::CodegenCx, + mut mir: Body<'tcx>, +) -> Body<'tcx> { + let tcx = cx.tcx(); + + if tcx.features().ergonomic_clones() { + for bb in mir.basic_blocks.as_mut() { + let mir::TerminatorKind::Call { + args, + destination, + target, + call_source: mir::CallSource::Use, + .. + } = &bb.terminator().kind + else { + continue; + }; + + // CallSource::Use calls always use 1 argument. + assert_eq!(args.len(), 1); + let arg = &args[0]; + + // These types are easily available from locals, so check that before + // doing DefId lookups to figure out what we're actually calling. + let arg_ty = arg.node.ty(&mir.local_decls, tcx); + + let ty::Ref(_region, inner_ty, mir::Mutability::Not) = *arg_ty.kind() else { continue }; + + if !tcx.type_is_copy_modulo_regions(cx.typing_env(), inner_ty) { + continue; + } + + let Some(arg_place) = arg.node.place() else { continue }; + + let destination_block = target.unwrap(); + + bb.statements.push(mir::Statement { + source_info: bb.terminator().source_info, + kind: mir::StatementKind::Assign(Box::new(( + *destination, + mir::Rvalue::Use(mir::Operand::Copy( + arg_place.project_deeper(&[mir::ProjectionElem::Deref], tcx), + )), + ))), + }); + + bb.terminator_mut().kind = mir::TerminatorKind::Goto { target: destination_block }; + } + } + + mir +} + /// Produces, for each argument, a `Value` pointing at the /// argument's value. As arguments are places, these are always /// indirect. diff --git a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs index 3a6b1f8d4efc9..d2a687359e0bc 100644 --- a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs +++ b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs @@ -1,23 +1,33 @@ use rustc_abi::{BackendRepr, Float, Integer, Primitive, RegKind}; use rustc_attr_parsing::InstructionSetAttr; use rustc_hir::def_id::DefId; -use rustc_middle::mir::mono::{Linkage, MonoItem, MonoItemData, Visibility}; -use rustc_middle::mir::{Body, InlineAsmOperand}; -use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv, LayoutOf}; -use rustc_middle::ty::{Instance, Ty, TyCtxt}; +use rustc_middle::mir::mono::{Linkage, MonoItemData, Visibility}; +use rustc_middle::mir::{InlineAsmOperand, START_BLOCK}; +use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout}; +use rustc_middle::ty::{Instance, Ty, TyCtxt, TypeVisitableExt}; use rustc_middle::{bug, span_bug, ty}; use rustc_span::sym; use rustc_target::callconv::{ArgAbi, FnAbi, PassMode}; use rustc_target::spec::{BinaryFormat, WasmCAbi}; use crate::common; -use crate::traits::{AsmCodegenMethods, BuilderMethods, GlobalAsmOperandRef, MiscCodegenMethods}; - -pub(crate) fn codegen_naked_asm<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( - cx: &'a Bx::CodegenCx, - mir: &Body<'tcx>, +use crate::mir::AsmCodegenMethods; +use crate::traits::GlobalAsmOperandRef; + +pub fn codegen_naked_asm< + 'a, + 'tcx, + Cx: LayoutOf<'tcx, LayoutOfResult = TyAndLayout<'tcx>> + + FnAbiOf<'tcx, FnAbiOfResult = &'tcx FnAbi<'tcx, Ty<'tcx>>> + + AsmCodegenMethods<'tcx>, +>( + cx: &'a mut Cx, instance: Instance<'tcx>, + item_data: MonoItemData, ) { + assert!(!instance.args.has_infer()); + let mir = cx.tcx().instance_mir(instance.def); + let rustc_middle::mir::TerminatorKind::InlineAsm { asm_macro: _, template, @@ -26,15 +36,14 @@ pub(crate) fn codegen_naked_asm<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( line_spans, targets: _, unwind: _, - } = mir.basic_blocks.iter().next().unwrap().terminator().kind + } = mir.basic_blocks[START_BLOCK].terminator().kind else { bug!("#[naked] functions should always terminate with an asm! block") }; let operands: Vec<_> = - operands.iter().map(|op| inline_to_global_operand::(cx, instance, op)).collect(); + operands.iter().map(|op| inline_to_global_operand::(cx, instance, op)).collect(); - let item_data = cx.codegen_unit().items().get(&MonoItem::Fn(instance)).unwrap(); let name = cx.mangled_name(instance); let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty()); let (begin, end) = prefix_and_suffix(cx.tcx(), instance, &name, item_data, fn_abi); @@ -47,8 +56,8 @@ pub(crate) fn codegen_naked_asm<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( cx.codegen_global_asm(&template_vec, &operands, options, line_spans); } -fn inline_to_global_operand<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( - cx: &'a Bx::CodegenCx, +fn inline_to_global_operand<'a, 'tcx, Cx: LayoutOf<'tcx, LayoutOfResult = TyAndLayout<'tcx>>>( + cx: &'a Cx, instance: Instance<'tcx>, op: &InlineAsmOperand<'tcx>, ) -> GlobalAsmOperandRef<'tcx> { @@ -86,7 +95,9 @@ fn inline_to_global_operand<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( ); let instance = match mono_type.kind() { - &ty::FnDef(def_id, args) => Instance::new(def_id, args), + &ty::FnDef(def_id, args) => { + Instance::expect_resolve(cx.tcx(), cx.typing_env(), def_id, args, value.span) + } _ => bug!("asm sym is not a function"), }; @@ -108,7 +119,7 @@ fn prefix_and_suffix<'tcx>( tcx: TyCtxt<'tcx>, instance: Instance<'tcx>, asm_name: &str, - item_data: &MonoItemData, + item_data: MonoItemData, fn_abi: &FnAbi<'tcx, Ty<'tcx>>, ) -> (String, String) { use std::fmt::Write; @@ -210,8 +221,10 @@ fn prefix_and_suffix<'tcx>( writeln!(begin, ".pushsection {section},\"ax\", {progbits}").unwrap(); writeln!(begin, ".balign {align_bytes}").unwrap(); write_linkage(&mut begin).unwrap(); - if let Visibility::Hidden = item_data.visibility { - writeln!(begin, ".hidden {asm_name}").unwrap(); + match item_data.visibility { + Visibility::Default => {} + Visibility::Protected => writeln!(begin, ".protected {asm_name}").unwrap(), + Visibility::Hidden => writeln!(begin, ".hidden {asm_name}").unwrap(), } writeln!(begin, ".type {asm_name}, {function}").unwrap(); if !arch_prefix.is_empty() { @@ -231,8 +244,9 @@ fn prefix_and_suffix<'tcx>( writeln!(begin, ".pushsection {},regular,pure_instructions", section).unwrap(); writeln!(begin, ".balign {align_bytes}").unwrap(); write_linkage(&mut begin).unwrap(); - if let Visibility::Hidden = item_data.visibility { - writeln!(begin, ".private_extern {asm_name}").unwrap(); + match item_data.visibility { + Visibility::Default | Visibility::Protected => {} + Visibility::Hidden => writeln!(begin, ".private_extern {asm_name}").unwrap(), } writeln!(begin, "{asm_name}:").unwrap(); diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs index 7e355b6406aed..eade9e52de95a 100644 --- a/compiler/rustc_codegen_ssa/src/mir/operand.rs +++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs @@ -9,6 +9,7 @@ use rustc_middle::mir::{self, ConstValue}; use rustc_middle::ty::Ty; use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_middle::{bug, span_bug}; +use rustc_session::config::OptLevel; use tracing::{debug, instrument}; use super::place::{PlaceRef, PlaceValue}; @@ -496,6 +497,18 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { _ => (tag_imm, bx.cx().immediate_backend_type(tag_op.layout)), }; + // Layout ensures that we only get here for cases where the discriminant + // value and the variant index match, since that's all `Niche` can encode. + // But for emphasis and debugging, let's double-check one anyway. + debug_assert_eq!( + self.layout + .ty + .discriminant_for_variant(bx.tcx(), untagged_variant) + .unwrap() + .val, + u128::from(untagged_variant.as_u32()), + ); + let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32(); // We have a subrange `niche_start..=niche_end` inside `range`. @@ -537,6 +550,21 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { relative_discr, bx.cx().const_uint(tag_llty, relative_max as u64), ); + + // Thanks to parameter attributes and load metadata, LLVM already knows + // the general valid range of the tag. It's possible, though, for there + // to be an impossible value *in the middle*, which those ranges don't + // communicate, so it's worth an `assume` to let the optimizer know. + if niche_variants.contains(&untagged_variant) + && bx.cx().sess().opts.optimize != OptLevel::No + { + let impossible = + u64::from(untagged_variant.as_u32() - niche_variants.start().as_u32()); + let impossible = bx.cx().const_uint(tag_llty, impossible); + let ne = bx.icmp(IntPredicate::IntNE, relative_discr, impossible); + bx.assume(ne); + } + (is_niche, cast_tag, niche_variants.start().as_u32() as u128) }; @@ -553,7 +581,9 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { ); // In principle we could insert assumes on the possible range of `discr`, but - // currently in LLVM this seems to be a pessimization. + // currently in LLVM this isn't worth it because the original `tag` will + // have either a `range` parameter attribute or `!range` metadata, + // or come from a `transmute` that already `assume`d it. discr } diff --git a/compiler/rustc_codegen_ssa/src/mono_item.rs b/compiler/rustc_codegen_ssa/src/mono_item.rs index f6af889fd6ecb..c2067e52afecd 100644 --- a/compiler/rustc_codegen_ssa/src/mono_item.rs +++ b/compiler/rustc_codegen_ssa/src/mono_item.rs @@ -1,17 +1,18 @@ -use rustc_hir as hir; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; -use rustc_middle::mir::interpret::ErrorHandled; -use rustc_middle::mir::mono::{Linkage, MonoItem, Visibility}; -use rustc_middle::ty::Instance; -use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf}; -use rustc_middle::{span_bug, ty}; +use rustc_middle::mir::mono::{Linkage, MonoItem, MonoItemData, Visibility}; +use rustc_middle::ty::layout::HasTyCtxt; use tracing::debug; +use crate::base; +use crate::mir::naked_asm; use crate::traits::*; -use crate::{base, common}; pub trait MonoItemExt<'a, 'tcx> { - fn define>(&self, cx: &'a Bx::CodegenCx); + fn define>( + &self, + cx: &'a mut Bx::CodegenCx, + item_data: MonoItemData, + ); fn predefine>( &self, cx: &'a Bx::CodegenCx, @@ -22,7 +23,11 @@ pub trait MonoItemExt<'a, 'tcx> { } impl<'a, 'tcx: 'a> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> { - fn define>(&self, cx: &'a Bx::CodegenCx) { + fn define>( + &self, + cx: &'a mut Bx::CodegenCx, + item_data: MonoItemData, + ) { debug!( "BEGIN IMPLEMENTING '{} ({})' in cgu {}", self, @@ -35,71 +40,19 @@ impl<'a, 'tcx: 'a> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> { cx.codegen_static(def_id); } MonoItem::GlobalAsm(item_id) => { - let item = cx.tcx().hir_item(item_id); - if let hir::ItemKind::GlobalAsm { asm, .. } = item.kind { - let operands: Vec<_> = asm - .operands - .iter() - .map(|(op, op_sp)| match *op { - hir::InlineAsmOperand::Const { ref anon_const } => { - match cx.tcx().const_eval_poly(anon_const.def_id.to_def_id()) { - Ok(const_value) => { - let ty = cx - .tcx() - .typeck_body(anon_const.body) - .node_type(anon_const.hir_id); - let string = common::asm_const_to_str( - cx.tcx(), - *op_sp, - const_value, - cx.layout_of(ty), - ); - GlobalAsmOperandRef::Const { string } - } - Err(ErrorHandled::Reported { .. }) => { - // An error has already been reported and - // compilation is guaranteed to fail if execution - // hits this path. So an empty string instead of - // a stringified constant value will suffice. - GlobalAsmOperandRef::Const { string: String::new() } - } - Err(ErrorHandled::TooGeneric(_)) => { - span_bug!( - *op_sp, - "asm const cannot be resolved; too generic" - ) - } - } - } - hir::InlineAsmOperand::SymFn { expr } => { - let ty = cx.tcx().typeck(item_id.owner_id).expr_ty(expr); - let instance = match ty.kind() { - &ty::FnDef(def_id, args) => Instance::new(def_id, args), - _ => span_bug!(*op_sp, "asm sym is not a function"), - }; - - GlobalAsmOperandRef::SymFn { instance } - } - hir::InlineAsmOperand::SymStatic { path: _, def_id } => { - GlobalAsmOperandRef::SymStatic { def_id } - } - hir::InlineAsmOperand::In { .. } - | hir::InlineAsmOperand::Out { .. } - | hir::InlineAsmOperand::InOut { .. } - | hir::InlineAsmOperand::SplitInOut { .. } - | hir::InlineAsmOperand::Label { .. } => { - span_bug!(*op_sp, "invalid operand type for global_asm!") - } - }) - .collect(); - - cx.codegen_global_asm(asm.template, &operands, asm.options, asm.line_spans); - } else { - span_bug!(item.span, "Mismatch between hir::Item type and MonoItem type") - } + base::codegen_global_asm(cx, item_id); } MonoItem::Fn(instance) => { - base::codegen_instance::(cx, instance); + if cx + .tcx() + .codegen_fn_attrs(instance.def_id()) + .flags + .contains(CodegenFnAttrFlags::NAKED) + { + naked_asm::codegen_naked_asm::(cx, instance, item_data); + } else { + base::codegen_instance::(cx, instance); + } } } diff --git a/compiler/rustc_codegen_ssa/src/traits/asm.rs b/compiler/rustc_codegen_ssa/src/traits/asm.rs index 7767bffbfbfd6..cc7a6a3f19e9e 100644 --- a/compiler/rustc_codegen_ssa/src/traits/asm.rs +++ b/compiler/rustc_codegen_ssa/src/traits/asm.rs @@ -62,7 +62,7 @@ pub trait AsmBuilderMethods<'tcx>: BackendTypes { pub trait AsmCodegenMethods<'tcx> { fn codegen_global_asm( - &self, + &mut self, template: &[InlineAsmTemplatePiece], operands: &[GlobalAsmOperandRef<'tcx>], options: InlineAsmOptions, diff --git a/compiler/rustc_codegen_ssa/src/traits/backend.rs b/compiler/rustc_codegen_ssa/src/traits/backend.rs index 65fd843e7a59e..e2f1458d06232 100644 --- a/compiler/rustc_codegen_ssa/src/traits/backend.rs +++ b/compiler/rustc_codegen_ssa/src/traits/backend.rs @@ -18,7 +18,7 @@ use super::write::WriteBackendMethods; use crate::back::archive::ArArchiveBuilderBuilder; use crate::back::link::link_binary; use crate::back::write::TargetMachineFactoryFn; -use crate::{CodegenResults, ModuleCodegen}; +use crate::{CodegenResults, ModuleCodegen, TargetConfig}; pub trait BackendTypes { type Value: CodegenObject; @@ -45,13 +45,19 @@ pub trait CodegenBackend { fn print(&self, _req: &PrintRequest, _out: &mut String, _sess: &Session) {} - /// Returns two feature sets: - /// - The first has the features that should be set in `cfg(target_features)`. - /// - The second is like the first, but also includes unstable features. - /// - /// RUSTC_SPECIFIC_FEATURES should be skipped here, those are handled outside codegen. - fn target_features_cfg(&self, _sess: &Session) -> (Vec, Vec) { - (vec![], vec![]) + /// Collect target-specific options that should be set in `cfg(...)`, including + /// `target_feature` and support for unstable float types. + fn target_config(&self, _sess: &Session) -> TargetConfig { + TargetConfig { + target_features: vec![], + unstable_target_features: vec![], + // `true` is used as a default so backends need to acknowledge when they do not + // support the float types, rather than accidentally quietly skipping all tests. + has_reliable_f16: true, + has_reliable_f16_math: true, + has_reliable_f128: true, + has_reliable_f128_math: true, + } } fn print_passes(&self) {} diff --git a/compiler/rustc_const_eval/messages.ftl b/compiler/rustc_const_eval/messages.ftl index dd481e04abbdc..f4defd2aa1343 100644 --- a/compiler/rustc_const_eval/messages.ftl +++ b/compiler/rustc_const_eval/messages.ftl @@ -12,6 +12,27 @@ const_eval_already_reported = const_eval_assume_false = `assume` called with `false` +const_eval_bad_pointer_op = {$operation -> + [MemoryAccess] memory access failed + [InboundsPointerArithmetic] in-bounds pointer arithmetic failed + *[Dereferenceable] pointer not dereferenceable +} +const_eval_bad_pointer_op_attempting = {const_eval_bad_pointer_op}: {$operation -> + [MemoryAccess] attempting to access {$inbounds_size -> + [1] 1 byte + *[x] {$inbounds_size} bytes + } + [InboundsPointerArithmetic] attempting to offset pointer by {$inbounds_size -> + [1] 1 byte + *[x] {$inbounds_size} bytes + } + *[Dereferenceable] pointer must {$inbounds_size -> + [0] point to some allocation + [1] be dereferenceable for 1 byte + *[x] be dereferenceable for {$inbounds_size} bytes + } + } + const_eval_bounds_check_failed = indexing out of bounds: the len is {$len} but the index is {$index} const_eval_call_nonzero_intrinsic = @@ -39,9 +60,9 @@ const_eval_copy_nonoverlapping_overlapping = `copy_nonoverlapping` called on overlapping ranges const_eval_dangling_int_pointer = - {$bad_pointer_message}: {const_eval_expected_inbounds_pointer}, but got {$pointer} which is a dangling pointer (it has no provenance) + {const_eval_bad_pointer_op_attempting}, but got {$pointer} which is a dangling pointer (it has no provenance) const_eval_dangling_null_pointer = - {$bad_pointer_message}: {const_eval_expected_inbounds_pointer}, but got a null pointer + {const_eval_bad_pointer_op_attempting}, but got null pointer const_eval_dangling_ptr_in_final = encountered dangling pointer in final value of {const_eval_intern_kind} const_eval_dead_local = @@ -77,21 +98,6 @@ const_eval_error = {$error_kind -> const_eval_exact_div_has_remainder = exact_div: {$a} cannot be divided by {$b} without remainder -const_eval_expected_inbounds_pointer = - expected a pointer to {$inbounds_size_abs -> - [0] some allocation - *[x] {$inbounds_size_is_neg -> - [false] {$inbounds_size_abs -> - [1] 1 byte of memory - *[x] {$inbounds_size_abs} bytes of memory - } - *[true] the end of {$inbounds_size_abs -> - [1] 1 byte of memory - *[x] {$inbounds_size_abs} bytes of memory - } - } - } - const_eval_extern_static = cannot access extern static `{$did}` const_eval_extern_type_field = `extern type` field does not have a known offset @@ -111,7 +117,6 @@ const_eval_frame_note_inner = inside {$where_ -> const_eval_frame_note_last = the failure occurred here -const_eval_in_bounds_test = out-of-bounds pointer use const_eval_incompatible_calling_conventions = calling a function with calling convention {$callee_conv} using calling convention {$caller_conv} @@ -206,7 +211,6 @@ const_eval_long_running = const_eval_max_num_nodes_in_const = maximum number of nodes exceeded in constant {$global_const_id} -const_eval_memory_access_test = memory access failed const_eval_memory_exhausted = tried to allocate more memory than available to compiler @@ -287,8 +291,6 @@ const_eval_offset_from_out_of_bounds = `{$name}` called on two different pointers where the memory range between them is not in-bounds of an allocation const_eval_offset_from_overflow = `{$name}` called when first pointer is too far ahead of second -const_eval_offset_from_test = - out-of-bounds `offset_from` origin const_eval_offset_from_underflow = `{$name}` called when first pointer is too far before second const_eval_offset_from_unsigned_overflow = @@ -312,27 +314,25 @@ const_eval_partial_pointer_overwrite = unable to overwrite parts of a pointer in memory at {$ptr} const_eval_pointer_arithmetic_overflow = overflowing pointer arithmetic: the total offset in bytes does not fit in an `isize` -const_eval_pointer_arithmetic_test = out-of-bounds pointer arithmetic + const_eval_pointer_out_of_bounds = - {$bad_pointer_message}: {const_eval_expected_inbounds_pointer}, but got {$pointer} {$ptr_offset_is_neg -> - [true] which points to before the beginning of the allocation - *[false] {$inbounds_size_is_neg -> - [true] {$ptr_offset_abs -> - [0] which is at the beginning of the allocation - *[other] which does not have enough space to the beginning of the allocation - } - *[false] {$alloc_size_minus_ptr_offset -> - [0] which is at or beyond the end of the allocation of size {$alloc_size -> + {const_eval_bad_pointer_op_attempting}, but got {$pointer} which {$inbounds_size_is_neg -> + [false] {$alloc_size_minus_ptr_offset -> + [0] is at or beyond the end of the allocation of size {$alloc_size -> [1] 1 byte *[x] {$alloc_size} bytes } - [1] which is only 1 byte from the end of the allocation - *[x] which is only {$alloc_size_minus_ptr_offset} bytes from the end of the allocation + [1] is only 1 byte from the end of the allocation + *[x] is only {$alloc_size_minus_ptr_offset} bytes from the end of the allocation + } + *[true] {$ptr_offset_abs -> + [0] is at the beginning of the allocation + *[other] is only {$ptr_offset_abs} bytes from the beginning of the allocation } - } } + const_eval_pointer_use_after_free = - {$bad_pointer_message}: {$alloc_id} has been freed, so this pointer is dangling + {const_eval_bad_pointer_op}: {$alloc_id} has been freed, so this pointer is dangling const_eval_ptr_as_bytes_1 = this code performed an operation that depends on the underlying bytes representing a pointer const_eval_ptr_as_bytes_2 = diff --git a/compiler/rustc_const_eval/src/check_consts/check.rs b/compiler/rustc_const_eval/src/check_consts/check.rs index 90002d3f10905..b600b8918dd01 100644 --- a/compiler/rustc_const_eval/src/check_consts/check.rs +++ b/compiler/rustc_const_eval/src/check_consts/check.rs @@ -34,7 +34,7 @@ use crate::check_consts::is_fn_or_trait_safe_to_expose_on_stable; use crate::errors; type QualifResults<'mir, 'tcx, Q> = - rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>; + rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'tcx, Q>>; #[derive(Copy, Clone, PartialEq, Eq, Debug)] enum ConstConditionsHold { @@ -335,7 +335,7 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> { self.tcx.dcx().span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef`"); } if let Some(def_id) = def_id.as_local() - && let Err(guar) = self.tcx.at(span).check_well_formed(hir::OwnerId { def_id }) + && let Err(guar) = self.tcx.ensure_ok().check_well_formed(hir::OwnerId { def_id }) { self.error_emitted = Some(guar); } diff --git a/compiler/rustc_const_eval/src/check_consts/ops.rs b/compiler/rustc_const_eval/src/check_consts/ops.rs index 7756e51c4c5f2..1e5b98675c4f0 100644 --- a/compiler/rustc_const_eval/src/check_consts/ops.rs +++ b/compiler/rustc_const_eval/src/check_consts/ops.rs @@ -352,7 +352,7 @@ fn build_error_for_const_call<'tcx>( ); err } - _ if tcx.opt_parent(callee) == tcx.get_diagnostic_item(sym::ArgumentMethods) => { + _ if tcx.opt_parent(callee) == tcx.get_diagnostic_item(sym::FmtArgumentsNew) => { ccx.dcx().create_err(errors::NonConstFmtMacroCall { span, kind: ccx.const_kind(), diff --git a/compiler/rustc_const_eval/src/check_consts/resolver.rs b/compiler/rustc_const_eval/src/check_consts/resolver.rs index 8cee282311f03..9f7fcc509a587 100644 --- a/compiler/rustc_const_eval/src/check_consts/resolver.rs +++ b/compiler/rustc_const_eval/src/check_consts/resolver.rs @@ -22,17 +22,17 @@ use super::{ConstCx, Qualif, qualifs}; /// qualified immediately after it is borrowed or its address escapes. The borrow must allow for /// mutation, which includes shared borrows of places with interior mutability. The type of /// borrowed place must contain the qualif. -struct TransferFunction<'a, 'mir, 'tcx, Q> { - ccx: &'a ConstCx<'mir, 'tcx>, - state: &'a mut State, +struct TransferFunction<'mir, 'tcx, Q> { + ccx: &'mir ConstCx<'mir, 'tcx>, + state: &'mir mut State, _qualif: PhantomData, } -impl<'a, 'mir, 'tcx, Q> TransferFunction<'a, 'mir, 'tcx, Q> +impl<'mir, 'tcx, Q> TransferFunction<'mir, 'tcx, Q> where Q: Qualif, { - fn new(ccx: &'a ConstCx<'mir, 'tcx>, state: &'a mut State) -> Self { + fn new(ccx: &'mir ConstCx<'mir, 'tcx>, state: &'mir mut State) -> Self { TransferFunction { ccx, state, _qualif: PhantomData } } @@ -124,7 +124,7 @@ where } } -impl<'tcx, Q> Visitor<'tcx> for TransferFunction<'_, '_, 'tcx, Q> +impl<'tcx, Q> Visitor<'tcx> for TransferFunction<'_, 'tcx, Q> where Q: Qualif, { @@ -228,20 +228,20 @@ where } /// The dataflow analysis used to propagate qualifs on arbitrary CFGs. -pub(super) struct FlowSensitiveAnalysis<'a, 'mir, 'tcx, Q> { - ccx: &'a ConstCx<'mir, 'tcx>, +pub(super) struct FlowSensitiveAnalysis<'mir, 'tcx, Q> { + ccx: &'mir ConstCx<'mir, 'tcx>, _qualif: PhantomData, } -impl<'a, 'mir, 'tcx, Q> FlowSensitiveAnalysis<'a, 'mir, 'tcx, Q> +impl<'mir, 'tcx, Q> FlowSensitiveAnalysis<'mir, 'tcx, Q> where Q: Qualif, { - pub(super) fn new(_: Q, ccx: &'a ConstCx<'mir, 'tcx>) -> Self { + pub(super) fn new(_: Q, ccx: &'mir ConstCx<'mir, 'tcx>) -> Self { FlowSensitiveAnalysis { ccx, _qualif: PhantomData } } - fn transfer_function(&self, state: &'a mut State) -> TransferFunction<'a, 'mir, 'tcx, Q> { + fn transfer_function(&self, state: &'mir mut State) -> TransferFunction<'mir, 'tcx, Q> { TransferFunction::::new(self.ccx, state) } } @@ -313,7 +313,7 @@ impl JoinSemiLattice for State { } } -impl<'tcx, Q> Analysis<'tcx> for FlowSensitiveAnalysis<'_, '_, 'tcx, Q> +impl<'tcx, Q> Analysis<'tcx> for FlowSensitiveAnalysis<'_, 'tcx, Q> where Q: Qualif, { diff --git a/compiler/rustc_const_eval/src/const_eval/fn_queries.rs b/compiler/rustc_const_eval/src/const_eval/fn_queries.rs index 35c3e3ed3150e..a5572810b335c 100644 --- a/compiler/rustc_const_eval/src/const_eval/fn_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/fn_queries.rs @@ -39,6 +39,8 @@ fn constness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> hir::Constness { // If the function itself is not annotated with `const`, it may still be a `const fn` // if it resides in a const trait impl. parent_impl_or_trait_constness(tcx, def_id) + } else if tcx.get_externally_implementable_item_impls(()).contains_key(&def_id) { + hir::Constness::NotConst } else { tcx.dcx().span_bug( tcx.def_span(def_id), diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs index 496f6c86f7196..7c7daed525b2d 100644 --- a/compiler/rustc_const_eval/src/const_eval/machine.rs +++ b/compiler/rustc_const_eval/src/const_eval/machine.rs @@ -502,6 +502,7 @@ impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> { RemainderByZero(op) => RemainderByZero(eval_to_int(op)?), ResumedAfterReturn(coroutine_kind) => ResumedAfterReturn(*coroutine_kind), ResumedAfterPanic(coroutine_kind) => ResumedAfterPanic(*coroutine_kind), + ResumedAfterDrop(coroutine_kind) => ResumedAfterDrop(*coroutine_kind), MisalignedPointerDereference { required, found } => MisalignedPointerDereference { required: eval_to_int(required)?, found: eval_to_int(found)?, @@ -546,7 +547,7 @@ impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> { rustc_session::lint::builtin::LONG_RUNNING_CONST_EVAL, hir_id, ) - .0 + .level .is_error(); let span = ecx.cur_span(); ecx.tcx.emit_node_span_lint( diff --git a/compiler/rustc_const_eval/src/const_eval/valtrees.rs b/compiler/rustc_const_eval/src/const_eval/valtrees.rs index 3776fb55c2e5f..34239ae1d1527 100644 --- a/compiler/rustc_const_eval/src/const_eval/valtrees.rs +++ b/compiler/rustc_const_eval/src/const_eval/valtrees.rs @@ -256,7 +256,7 @@ pub(crate) fn eval_to_valtree<'tcx>( Err(err) => { let did = cid.instance.def_id(); let global_const_id = cid.display(tcx); - let span = tcx.hir().span_if_local(did); + let span = tcx.hir_span_if_local(did); match err { ValTreeCreationError::NodesOverflow => { let handled = diff --git a/compiler/rustc_const_eval/src/errors.rs b/compiler/rustc_const_eval/src/errors.rs index e2675e2f4c900..826ea0e58ecca 100644 --- a/compiler/rustc_const_eval/src/errors.rs +++ b/compiler/rustc_const_eval/src/errors.rs @@ -5,15 +5,14 @@ use either::Either; use rustc_abi::WrappingRange; use rustc_errors::codes::*; use rustc_errors::{ - Diag, DiagArgValue, DiagCtxtHandle, DiagMessage, Diagnostic, EmissionGuarantee, Level, - MultiSpan, SubdiagMessageOp, Subdiagnostic, + Diag, DiagArgValue, DiagMessage, Diagnostic, EmissionGuarantee, Level, MultiSpan, Subdiagnostic, }; use rustc_hir::ConstContext; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_middle::mir::interpret::{ - CheckInAllocMsg, CtfeProvenance, ExpectedKind, InterpErrorKind, InvalidMetaKind, - InvalidProgramInfo, Misalignment, Pointer, PointerKind, ResourceExhaustionInfo, - UndefinedBehaviorInfo, UnsupportedOpInfo, ValidationErrorInfo, + CtfeProvenance, ExpectedKind, InterpErrorKind, InvalidMetaKind, InvalidProgramInfo, + Misalignment, Pointer, PointerKind, ResourceExhaustionInfo, UndefinedBehaviorInfo, + UnsupportedOpInfo, ValidationErrorInfo, }; use rustc_middle::ty::{self, Mutability, Ty}; use rustc_span::{Span, Symbol}; @@ -290,11 +289,7 @@ pub struct FrameNote { } impl Subdiagnostic for FrameNote { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.arg("times", self.times); diag.arg("where_", self.where_); diag.arg("instance", self.instance); @@ -302,7 +297,7 @@ impl Subdiagnostic for FrameNote { if self.has_label && !self.span.is_dummy() { span.push_span_label(self.span, fluent::const_eval_frame_note_last); } - let msg = f(diag, fluent::const_eval_frame_note.into()); + let msg = diag.eagerly_translate(fluent::const_eval_frame_note); diag.span_note(span, msg); } } @@ -502,19 +497,6 @@ pub trait ReportErrorExt { } } -fn bad_pointer_message(msg: CheckInAllocMsg, dcx: DiagCtxtHandle<'_>) -> String { - use crate::fluent_generated::*; - - let msg = match msg { - CheckInAllocMsg::MemoryAccessTest => const_eval_memory_access_test, - CheckInAllocMsg::PointerArithmeticTest => const_eval_pointer_arithmetic_test, - CheckInAllocMsg::OffsetFromTest => const_eval_offset_from_test, - CheckInAllocMsg::InboundsTest => const_eval_in_bounds_test, - }; - - dcx.eagerly_translate_to_string(msg, [].into_iter()) -} - impl<'a> ReportErrorExt for UndefinedBehaviorInfo<'a> { fn diagnostic_message(&self) -> DiagMessage { use UndefinedBehaviorInfo::*; @@ -568,7 +550,6 @@ impl<'a> ReportErrorExt for UndefinedBehaviorInfo<'a> { fn add_args(self, diag: &mut Diag<'_, G>) { use UndefinedBehaviorInfo::*; - let dcx = diag.dcx; match self { Ub(_) => {} Custom(custom) => { @@ -616,12 +597,10 @@ impl<'a> ReportErrorExt for UndefinedBehaviorInfo<'a> { diag.arg("vtable_dyn_type", vtable_dyn_type.to_string()); } PointerUseAfterFree(alloc_id, msg) => { - diag.arg("alloc_id", alloc_id) - .arg("bad_pointer_message", bad_pointer_message(msg, dcx)); + diag.arg("alloc_id", alloc_id).arg("operation", format!("{:?}", msg)); } PointerOutOfBounds { alloc_id, alloc_size, ptr_offset, inbounds_size, msg } => { diag.arg("alloc_size", alloc_size.bytes()); - diag.arg("bad_pointer_message", bad_pointer_message(msg, dcx)); diag.arg("pointer", { let mut out = format!("{:?}", alloc_id); if ptr_offset > 0 { @@ -631,14 +610,17 @@ impl<'a> ReportErrorExt for UndefinedBehaviorInfo<'a> { } out }); + diag.arg("inbounds_size", inbounds_size); diag.arg("inbounds_size_is_neg", inbounds_size < 0); diag.arg("inbounds_size_abs", inbounds_size.unsigned_abs()); + diag.arg("ptr_offset", ptr_offset); diag.arg("ptr_offset_is_neg", ptr_offset < 0); diag.arg("ptr_offset_abs", ptr_offset.unsigned_abs()); diag.arg( "alloc_size_minus_ptr_offset", alloc_size.bytes().saturating_sub(ptr_offset as u64), ); + diag.arg("operation", format!("{:?}", msg)); } DanglingIntPointer { addr, inbounds_size, msg } => { if addr != 0 { @@ -648,9 +630,10 @@ impl<'a> ReportErrorExt for UndefinedBehaviorInfo<'a> { ); } + diag.arg("inbounds_size", inbounds_size); diag.arg("inbounds_size_is_neg", inbounds_size < 0); diag.arg("inbounds_size_abs", inbounds_size.unsigned_abs()); - diag.arg("bad_pointer_message", bad_pointer_message(msg, dcx)); + diag.arg("operation", format!("{:?}", msg)); } AlignmentCheckFailed(Misalignment { required, has }, msg) => { diag.arg("required", required.bytes()); diff --git a/compiler/rustc_const_eval/src/interpret/call.rs b/compiler/rustc_const_eval/src/interpret/call.rs index 29f819cca1fb6..a58e1241a054d 100644 --- a/compiler/rustc_const_eval/src/interpret/call.rs +++ b/compiler/rustc_const_eval/src/interpret/call.rs @@ -570,6 +570,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { | ty::InstanceKind::FnPtrAddrShim(..) | ty::InstanceKind::ThreadLocalShim(..) | ty::InstanceKind::AsyncDropGlueCtorShim(..) + | ty::InstanceKind::AsyncDropGlue(..) + | ty::InstanceKind::FutureDropPollShim(..) + | ty::InstanceKind::EiiShim { .. } | ty::InstanceKind::Item(_) => { // We need MIR for this fn. // Note that this can be an intrinsic, if we are executing its fallback body. diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs index c1948e9f31f10..b69bc0918be82 100644 --- a/compiler/rustc_const_eval/src/interpret/eval_context.rs +++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs @@ -268,7 +268,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// Call this on things you got out of the MIR (so it is as generic as the current /// stack frame), to bring it into the proper environment for this interpreter. - pub(super) fn instantiate_from_current_frame_and_normalize_erasing_regions< + pub fn instantiate_from_current_frame_and_normalize_erasing_regions< T: TypeFoldable>, >( &self, @@ -279,9 +279,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// Call this on things you got out of the MIR (so it is as generic as the provided /// stack frame), to bring it into the proper environment for this interpreter. - pub(super) fn instantiate_from_frame_and_normalize_erasing_regions< - T: TypeFoldable>, - >( + pub fn instantiate_from_frame_and_normalize_erasing_regions>>( &self, frame: &Frame<'tcx, M::Provenance, M::FrameExtra>, value: T, diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs index e4b2fe5d153e1..1dd96297d1fda 100644 --- a/compiler/rustc_const_eval/src/interpret/intern.rs +++ b/compiler/rustc_const_eval/src/interpret/intern.rs @@ -17,19 +17,20 @@ use hir::def::DefKind; use rustc_ast::Mutability; use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; use rustc_hir as hir; +use rustc_hir::definitions::{DefPathData, DisambiguatorState}; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; use rustc_middle::mir::interpret::{ConstAllocation, CtfeProvenance, InterpResult}; use rustc_middle::query::TyCtxtAt; use rustc_middle::span_bug; use rustc_middle::ty::layout::TyAndLayout; use rustc_span::def_id::LocalDefId; -use rustc_span::sym; use tracing::{instrument, trace}; use super::{ AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy, err_ub, interp_ok, }; use crate::const_eval; +use crate::const_eval::DummyMachine; use crate::errors::NestedStaticInThreadLocal; pub trait CompileTimeMachine<'tcx, T> = Machine< @@ -65,6 +66,7 @@ fn intern_shallow<'tcx, T, M: CompileTimeMachine<'tcx, T>>( ecx: &mut InterpCx<'tcx, M>, alloc_id: AllocId, mutability: Mutability, + disambiguator: Option<&mut DisambiguatorState>, ) -> Result + 'tcx, ()> { trace!("intern_shallow {:?}", alloc_id); // remove allocation @@ -87,7 +89,13 @@ fn intern_shallow<'tcx, T, M: CompileTimeMachine<'tcx, T>>( // link the alloc id to the actual allocation let alloc = ecx.tcx.mk_const_alloc(alloc); if let Some(static_id) = ecx.machine.static_def_id() { - intern_as_new_static(ecx.tcx, static_id, alloc_id, alloc); + intern_as_new_static( + ecx.tcx, + static_id, + alloc_id, + alloc, + disambiguator.expect("disambiguator needed"), + ); } else { ecx.tcx.set_alloc_id_memory(alloc_id, alloc); } @@ -101,11 +109,18 @@ fn intern_as_new_static<'tcx>( static_id: LocalDefId, alloc_id: AllocId, alloc: ConstAllocation<'tcx>, + disambiguator: &mut DisambiguatorState, ) { + // `intern_const_alloc_recursive` is called once per static and it contains the `DisambiguatorState`. + // The `::{{nested}}` path is thus unique to `intern_const_alloc_recursive` and the + // `DisambiguatorState` ensures the generated path is unique for this call as we generate + // `::{{nested#n}}` where `n` is the `n`th `intern_as_new_static` call. let feed = tcx.create_def( static_id, - Some(sym::nested), + None, DefKind::Static { safety: hir::Safety::Safe, mutability: alloc.0.mutability, nested: true }, + Some(DefPathData::NestedStatic), + disambiguator, ); tcx.set_nested_alloc_id_static(alloc_id, feed.def_id()); @@ -153,6 +168,8 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx, const_eval intern_kind: InternKind, ret: &MPlaceTy<'tcx>, ) -> Result<(), InternResult> { + let mut disambiguator = DisambiguatorState::new(); + // We are interning recursively, and for mutability we are distinguishing the "root" allocation // that we are starting in, and all other allocations that we are encountering recursively. let (base_mutability, inner_mutability, is_static) = match intern_kind { @@ -196,7 +213,9 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx, const_eval alloc.1.mutability = base_mutability; alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect() } else { - intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().collect() + intern_shallow(ecx, base_alloc_id, base_mutability, Some(&mut disambiguator)) + .unwrap() + .collect() }; // We need to distinguish "has just been interned" from "was already in `tcx`", // so we track this in a separate set. @@ -290,7 +309,7 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx, const_eval // okay with losing some potential for immutability here. This can anyway only affect // `static mut`. just_interned.insert(alloc_id); - match intern_shallow(ecx, alloc_id, inner_mutability) { + match intern_shallow(ecx, alloc_id, inner_mutability, Some(&mut disambiguator)) { Ok(nested) => todo.extend(nested), Err(()) => { ecx.tcx.dcx().delayed_bug("found dangling pointer during const interning"); @@ -312,8 +331,9 @@ pub fn intern_const_alloc_for_constprop<'tcx, T, M: CompileTimeMachine<'tcx, T>> return interp_ok(()); } // Move allocation to `tcx`. - if let Some(_) = - (intern_shallow(ecx, alloc_id, Mutability::Not).map_err(|()| err_ub!(DeadLocal))?).next() + if let Some(_) = intern_shallow(ecx, alloc_id, Mutability::Not, None) + .map_err(|()| err_ub!(DeadLocal))? + .next() { // We are not doing recursive interning, so we don't currently support provenance. // (If this assertion ever triggers, we should just implement a @@ -323,20 +343,23 @@ pub fn intern_const_alloc_for_constprop<'tcx, T, M: CompileTimeMachine<'tcx, T>> interp_ok(()) } -impl<'tcx, M: super::intern::CompileTimeMachine<'tcx, !>> InterpCx<'tcx, M> { +impl<'tcx> InterpCx<'tcx, DummyMachine> { /// A helper function that allocates memory for the layout given and gives you access to mutate /// it. Once your own mutation code is done, the backing `Allocation` is removed from the /// current `Memory` and interned as read-only into the global memory. pub fn intern_with_temp_alloc( &mut self, layout: TyAndLayout<'tcx>, - f: impl FnOnce(&mut InterpCx<'tcx, M>, &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx, ()>, + f: impl FnOnce( + &mut InterpCx<'tcx, DummyMachine>, + &PlaceTy<'tcx, CtfeProvenance>, + ) -> InterpResult<'tcx, ()>, ) -> InterpResult<'tcx, AllocId> { // `allocate` picks a fresh AllocId that we will associate with its data below. let dest = self.allocate(layout, MemoryKind::Stack)?; f(self, &dest.clone().into())?; let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance - for prov in intern_shallow(self, alloc_id, Mutability::Not).unwrap() { + for prov in intern_shallow(self, alloc_id, Mutability::Not, None).unwrap() { // We are not doing recursive interning, so we don't currently support provenance. // (If this assertion ever triggers, we should just implement a // proper recursive interning loop -- or just call `intern_const_alloc_recursive`. diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs index 4ca317e3a1e53..04a8ed1e0f1e9 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs @@ -61,16 +61,21 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>( ensure_monomorphic_enough(tcx, tp_ty)?; ConstValue::from_u128(tcx.type_id_hash(tp_ty).as_u128()) } - sym::variant_count => match tp_ty.kind() { + sym::variant_count => match match tp_ty.kind() { + // Pattern types have the same number of variants as their base type. + // Even if we restrict e.g. which variants are valid, the variants are essentially just uninhabited. + // And `Result<(), !>` still has two variants according to `variant_count`. + ty::Pat(base, _) => *base, + _ => tp_ty, + } + .kind() + { // Correctly handles non-monomorphic calls, so there is no need for ensure_monomorphic_enough. ty::Adt(adt, _) => ConstValue::from_target_usize(adt.variants().len() as u64, &tcx), ty::Alias(..) | ty::Param(_) | ty::Placeholder(_) | ty::Infer(_) => { throw_inval!(TooGeneric) } - ty::Pat(_, pat) => match **pat { - ty::PatternKind::Range { .. } => ConstValue::from_target_usize(0u64, &tcx), - // Future pattern kinds may have more variants - }, + ty::Pat(..) => unreachable!(), ty::Bound(_, _) => bug!("bound ty during ctfe"), ty::Bool | ty::Char @@ -158,6 +163,30 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { self.copy_op(&val, dest)?; } + sym::fadd_algebraic + | sym::fsub_algebraic + | sym::fmul_algebraic + | sym::fdiv_algebraic + | sym::frem_algebraic => { + let a = self.read_immediate(&args[0])?; + let b = self.read_immediate(&args[1])?; + + let op = match intrinsic_name { + sym::fadd_algebraic => BinOp::Add, + sym::fsub_algebraic => BinOp::Sub, + sym::fmul_algebraic => BinOp::Mul, + sym::fdiv_algebraic => BinOp::Div, + sym::frem_algebraic => BinOp::Rem, + + _ => bug!(), + }; + + let res = self.binary_op(op, &a, &b)?; + // `binary_op` already called `generate_nan` if needed. + let res = M::apply_float_nondet(self, res)?; + self.write_immediate(*res, dest)?; + } + sym::ctpop | sym::cttz | sym::cttz_nonzero @@ -319,7 +348,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // Check that the memory between them is dereferenceable at all, starting from the // origin pointer: `dist` is `a - b`, so it is based on `b`. - self.check_ptr_access_signed(b, dist, CheckInAllocMsg::OffsetFromTest) + self.check_ptr_access_signed(b, dist, CheckInAllocMsg::Dereferenceable) .map_err_kind(|_| { // This could mean they point to different allocations, or they point to the same allocation // but not the entire range between the pointers is in-bounds. @@ -343,7 +372,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { self.check_ptr_access_signed( a, dist.checked_neg().unwrap(), // i64::MIN is impossible as no allocation can be that large - CheckInAllocMsg::OffsetFromTest, + CheckInAllocMsg::Dereferenceable, ) .map_err_kind(|_| { // Make the error more specific. @@ -622,7 +651,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { offset_bytes: i64, ) -> InterpResult<'tcx, Pointer>> { // The offset must be in bounds starting from `ptr`. - self.check_ptr_access_signed(ptr, offset_bytes, CheckInAllocMsg::PointerArithmeticTest)?; + self.check_ptr_access_signed( + ptr, + offset_bytes, + CheckInAllocMsg::InboundsPointerArithmetic, + )?; // This also implies that there is no overflow, so we are done. interp_ok(ptr.wrapping_signed_offset(offset_bytes, self)) } diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs index e5026eff21f46..a1386b4e1be49 100644 --- a/compiler/rustc_const_eval/src/interpret/machine.rs +++ b/compiler/rustc_const_eval/src/interpret/machine.rs @@ -276,6 +276,14 @@ pub trait Machine<'tcx>: Sized { F2::NAN } + /// Apply non-determinism to float operations that do not return a precise result. + fn apply_float_nondet( + _ecx: &mut InterpCx<'tcx, Self>, + val: ImmTy<'tcx, Self::Provenance>, + ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> { + interp_ok(val) + } + /// Determines the result of `min`/`max` on floats when the arguments are equal. fn equal_float_min_max(_ecx: &InterpCx<'tcx, Self>, a: F, _b: F) -> F { // By default, we pick the left argument. diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs index 8f286971e6389..43bf48a9b9612 100644 --- a/compiler/rustc_const_eval/src/interpret/memory.rs +++ b/compiler/rustc_const_eval/src/interpret/memory.rs @@ -8,8 +8,9 @@ use std::assert_matches::assert_matches; use std::borrow::{Borrow, Cow}; +use std::cell::Cell; use std::collections::VecDeque; -use std::{fmt, mem, ptr}; +use std::{fmt, ptr}; use rustc_abi::{Align, HasDataLayout, Size}; use rustc_ast::Mutability; @@ -131,7 +132,7 @@ pub struct Memory<'tcx, M: Machine<'tcx>> { /// This stores whether we are currently doing reads purely for the purpose of validation. /// Those reads do not trigger the machine's hooks for memory reads. /// Needless to say, this must only be set with great care! - validation_in_progress: bool, + validation_in_progress: Cell, } /// A reference to some allocation that was already bounds-checked for the given region @@ -158,7 +159,7 @@ impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> { alloc_map: M::MemoryMap::default(), extra_fn_ptr_map: FxIndexMap::default(), dead_alloc_map: FxIndexMap::default(), - validation_in_progress: false, + validation_in_progress: Cell::new(false), } } @@ -350,7 +351,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { kind = "static_mem" ) } - None => err_ub!(PointerUseAfterFree(alloc_id, CheckInAllocMsg::MemoryAccessTest)), + None => err_ub!(PointerUseAfterFree(alloc_id, CheckInAllocMsg::MemoryAccess)), }) .into(); }; @@ -413,10 +414,10 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { self, ptr, size, - CheckInAllocMsg::MemoryAccessTest, + CheckInAllocMsg::MemoryAccess, |this, alloc_id, offset, prov| { - let (size, align) = this - .get_live_alloc_size_and_align(alloc_id, CheckInAllocMsg::MemoryAccessTest)?; + let (size, align) = + this.get_live_alloc_size_and_align(alloc_id, CheckInAllocMsg::MemoryAccess)?; interp_ok((size, align, (alloc_id, offset, prov))) }, ) @@ -612,7 +613,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } Some(GlobalAlloc::Function { .. }) => throw_ub!(DerefFunctionPointer(id)), Some(GlobalAlloc::VTable(..)) => throw_ub!(DerefVTablePointer(id)), - None => throw_ub!(PointerUseAfterFree(id, CheckInAllocMsg::MemoryAccessTest)), + None => throw_ub!(PointerUseAfterFree(id, CheckInAllocMsg::MemoryAccess)), Some(GlobalAlloc::Static(def_id)) => { assert!(self.tcx.is_static(def_id)); // Thread-local statics do not have a constant address. They *must* be accessed via @@ -706,7 +707,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { self, ptr, size_i64, - CheckInAllocMsg::MemoryAccessTest, + CheckInAllocMsg::MemoryAccess, |this, alloc_id, offset, prov| { let alloc = this.get_alloc_raw(alloc_id)?; interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc))) @@ -715,7 +716,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // We want to call the hook on *all* accesses that involve an AllocId, including zero-sized // accesses. That means we cannot rely on the closure above or the `Some` branch below. We // do this after `check_and_deref_ptr` to ensure some basic sanity has already been checked. - if !self.memory.validation_in_progress { + if !self.memory.validation_in_progress.get() { if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(ptr, size_i64) { M::before_alloc_read(self, alloc_id)?; } @@ -723,7 +724,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc { let range = alloc_range(offset, size); - if !self.memory.validation_in_progress { + if !self.memory.validation_in_progress.get() { M::before_memory_read( self.tcx, &self.machine, @@ -801,14 +802,14 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { ) -> InterpResult<'tcx, Option>> { let tcx = self.tcx; - let validation_in_progress = self.memory.validation_in_progress; + let validation_in_progress = self.memory.validation_in_progress.get(); let size_i64 = i64::try_from(size.bytes()).unwrap(); // it would be an error to even ask for more than isize::MAX bytes let ptr_and_alloc = Self::check_and_deref_ptr( self, ptr, size_i64, - CheckInAllocMsg::MemoryAccessTest, + CheckInAllocMsg::MemoryAccess, |this, alloc_id, offset, prov| { let (alloc, machine) = this.get_alloc_raw_mut(alloc_id)?; interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc, machine))) @@ -871,8 +872,21 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // # Function pointers // (both global from `alloc_map` and local from `extra_fn_ptr_map`) - if self.get_fn_alloc(id).is_some() { - return AllocInfo::new(Size::ZERO, Align::ONE, AllocKind::Function, Mutability::Not); + if let Some(fn_val) = self.get_fn_alloc(id) { + let align = match fn_val { + FnVal::Instance(instance) => { + // Function alignment can be set globally with the `-Zmin-function-alignment=` flag; + // the alignment from a `#[repr(align())]` is used if it specifies a higher alignment. + let fn_align = self.tcx.codegen_fn_attrs(instance.def_id()).alignment; + let global_align = self.tcx.sess.opts.unstable_opts.min_function_alignment; + + Ord::max(global_align, fn_align).unwrap_or(Align::ONE) + } + // Machine-specific extra functions currently do not support alignment restrictions. + FnVal::Other(_) => Align::ONE, + }; + + return AllocInfo::new(Size::ZERO, align, AllocKind::Function, Mutability::Not); } // # Global allocations @@ -1087,23 +1101,43 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// /// We do this so Miri's allocation access tracking does not show the validation /// reads as spurious accesses. - pub fn run_for_validation(&mut self, f: impl FnOnce(&mut Self) -> R) -> R { + pub fn run_for_validation_mut(&mut self, f: impl FnOnce(&mut Self) -> R) -> R { + // This deliberately uses `==` on `bool` to follow the pattern + // `assert!(val.replace(new) == old)`. + assert!( + self.memory.validation_in_progress.replace(true) == false, + "`validation_in_progress` was already set" + ); + let res = f(self); + assert!( + self.memory.validation_in_progress.replace(false) == true, + "`validation_in_progress` was unset by someone else" + ); + res + } + + /// Runs the closure in "validation" mode, which means the machine's memory read hooks will be + /// suppressed. Needless to say, this must only be set with great care! Cannot be nested. + /// + /// We do this so Miri's allocation access tracking does not show the validation + /// reads as spurious accesses. + pub fn run_for_validation_ref(&self, f: impl FnOnce(&Self) -> R) -> R { // This deliberately uses `==` on `bool` to follow the pattern // `assert!(val.replace(new) == old)`. assert!( - mem::replace(&mut self.memory.validation_in_progress, true) == false, + self.memory.validation_in_progress.replace(true) == false, "`validation_in_progress` was already set" ); let res = f(self); assert!( - mem::replace(&mut self.memory.validation_in_progress, false) == true, + self.memory.validation_in_progress.replace(false) == true, "`validation_in_progress` was unset by someone else" ); res } pub(super) fn validation_in_progress(&self) -> bool { - self.memory.validation_in_progress + self.memory.validation_in_progress.get() } } @@ -1375,7 +1409,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { }; let src_alloc = self.get_alloc_raw(src_alloc_id)?; let src_range = alloc_range(src_offset, size); - assert!(!self.memory.validation_in_progress, "we can't be copying during validation"); + assert!(!self.memory.validation_in_progress.get(), "we can't be copying during validation"); // For the overlapping case, it is crucial that we trigger the read hook // before the write hook -- the aliasing model cares about the order. M::before_memory_read( @@ -1581,7 +1615,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { err_ub!(DanglingIntPointer { addr: offset, inbounds_size: size, - msg: CheckInAllocMsg::InboundsTest + msg: CheckInAllocMsg::Dereferenceable }) }) .into() diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index ddf2d65914f6c..363ceee1970ee 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -539,7 +539,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } } - Drop { place, target, unwind, replace: _ } => { + Drop { place, target, unwind, replace: _, drop, async_fut } => { + assert!( + async_fut.is_none() && drop.is_none(), + "Async Drop must be expanded or reset to sync in runtime MIR" + ); let place = self.eval_place(place)?; let instance = Instance::resolve_drop_in_place(*self.tcx, place.layout.ty); if let ty::InstanceKind::DropGlue(_, None) = instance.def { diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index eb3f552cd2787..8f39afa642aef 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -510,7 +510,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { self.ecx.check_ptr_access( place.ptr(), size, - CheckInAllocMsg::InboundsTest, // will anyway be replaced by validity message + CheckInAllocMsg::Dereferenceable, // will anyway be replaced by validity message ), self.path, Ub(DanglingIntPointer { addr: 0, .. }) => NullPtr { ptr_kind }, @@ -1248,6 +1248,14 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, // Range patterns are precisely reflected into `valid_range` and thus // handled fully by `visit_scalar` (called below). ty::PatternKind::Range { .. } => {}, + + // FIXME(pattern_types): check that the value is covered by one of the variants. + // For now, we rely on layout computation setting the scalar's `valid_range` to + // match the pattern. However, this cannot always work; the layout may + // pessimistically cover actually illegal ranges and Miri would miss that UB. + // The consolation here is that codegen also will miss that UB, so at least + // we won't see optimizations actually breaking such programs. + ty::PatternKind::Or(_patterns) => {} } } _ => { @@ -1322,7 +1330,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { trace!("validate_operand_internal: {:?}, {:?}", *val, val.layout.ty); // Run the visitor. - self.run_for_validation(|ecx| { + self.run_for_validation_mut(|ecx| { let reset_padding = reset_provenance_and_padding && { // Check if `val` is actually stored in memory. If not, padding is not even // represented and we need not reset it. diff --git a/compiler/rustc_const_eval/src/lib.rs b/compiler/rustc_const_eval/src/lib.rs index e03849c32f94f..7a0c2543c3002 100644 --- a/compiler/rustc_const_eval/src/lib.rs +++ b/compiler/rustc_const_eval/src/lib.rs @@ -1,13 +1,12 @@ // tidy-alphabetical-start #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(rust_logo)] #![feature(assert_matches)] #![feature(box_patterns)] #![feature(decl_macro)] #![feature(if_let_guard)] -#![feature(let_chains)] #![feature(never_type)] #![feature(rustdoc_internals)] #![feature(slice_ptr_get)] diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index e14cd603c5828..30e96ae414359 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -56,7 +56,7 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { | ty::Coroutine(def_id, args) => self.print_def_path(def_id, args), ty::Foreign(def_id) => self.print_def_path(def_id, &[]), - ty::Alias(ty::Weak, _) => bug!("type_name: unexpected weak projection"), + ty::Alias(ty::Free, _) => bug!("type_name: unexpected free alias"), ty::Alias(ty::Inherent, _) => bug!("type_name: unexpected inherent projection"), ty::CoroutineWitness(..) => bug!("type_name: unexpected `CoroutineWitness`"), } diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml index fcaf2750507dd..f48c73b13b961 100644 --- a/compiler/rustc_data_structures/Cargo.toml +++ b/compiler/rustc_data_structures/Cargo.toml @@ -14,7 +14,7 @@ indexmap = "2.4.0" jobserver_crate = { version = "0.1.28", package = "jobserver" } measureme = "12.0.1" rustc-hash = "2.0.0" -rustc-rayon = { version = "0.5.1", features = ["indexmap"] } +rustc-rayon-core = { version = "0.5.0" } rustc-stable-hash = { version = "0.1.0", features = ["nightly"] } rustc_arena = { path = "../rustc_arena" } rustc_graphviz = { path = "../rustc_graphviz" } diff --git a/compiler/rustc_data_structures/src/fx.rs b/compiler/rustc_data_structures/src/fx.rs index 80e72250470c0..f0db9623b674e 100644 --- a/compiler/rustc_data_structures/src/fx.rs +++ b/compiler/rustc_data_structures/src/fx.rs @@ -9,6 +9,8 @@ pub type FxIndexSet = indexmap::IndexSet>; pub type IndexEntry<'a, K, V> = indexmap::map::Entry<'a, K, V>; pub type IndexOccupiedEntry<'a, K, V> = indexmap::map::OccupiedEntry<'a, K, V>; +pub use indexmap::set::MutableValues; + #[macro_export] macro_rules! define_id_collections { ($map_name:ident, $set_name:ident, $entry_name:ident, $key:ty) => { diff --git a/compiler/rustc_data_structures/src/graph/implementation/mod.rs b/compiler/rustc_data_structures/src/graph/implementation/mod.rs deleted file mode 100644 index a80365938b96c..0000000000000 --- a/compiler/rustc_data_structures/src/graph/implementation/mod.rs +++ /dev/null @@ -1,347 +0,0 @@ -//! A graph module for use in dataflow, region resolution, and elsewhere. -//! -//! # Interface details -//! -//! You customize the graph by specifying a "node data" type `N` and an -//! "edge data" type `E`. You can then later gain access (mutable or -//! immutable) to these "user-data" bits. Currently, you can only add -//! nodes or edges to the graph. You cannot remove or modify them once -//! added. This could be changed if we have a need. -//! -//! # Implementation details -//! -//! The main tricky thing about this code is the way that edges are -//! stored. The edges are stored in a central array, but they are also -//! threaded onto two linked lists for each node, one for incoming edges -//! and one for outgoing edges. Note that every edge is a member of some -//! incoming list and some outgoing list. Basically you can load the -//! first index of the linked list from the node data structures (the -//! field `first_edge`) and then, for each edge, load the next index from -//! the field `next_edge`). Each of those fields is an array that should -//! be indexed by the direction (see the type `Direction`). - -use std::fmt::Debug; - -use rustc_index::bit_set::DenseBitSet; -use tracing::debug; - -#[cfg(test)] -mod tests; - -pub struct Graph { - nodes: Vec>, - edges: Vec>, -} - -pub struct Node { - first_edge: [EdgeIndex; 2], // see module comment - pub data: N, -} - -#[derive(Debug)] -pub struct Edge { - next_edge: [EdgeIndex; 2], // see module comment - source: NodeIndex, - target: NodeIndex, - pub data: E, -} - -#[derive(Copy, Clone, PartialEq, Debug)] -pub struct NodeIndex(pub usize); - -#[derive(Copy, Clone, PartialEq, Debug)] -pub struct EdgeIndex(pub usize); - -pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX); - -// Use a private field here to guarantee no more instances are created: -#[derive(Copy, Clone, Debug, PartialEq)] -pub struct Direction { - repr: usize, -} - -pub const OUTGOING: Direction = Direction { repr: 0 }; - -pub const INCOMING: Direction = Direction { repr: 1 }; - -impl NodeIndex { - /// Returns unique ID (unique with respect to the graph holding associated node). - pub fn node_id(self) -> usize { - self.0 - } -} - -impl Graph { - pub fn new() -> Graph { - Graph { nodes: Vec::new(), edges: Vec::new() } - } - - pub fn with_capacity(nodes: usize, edges: usize) -> Graph { - Graph { nodes: Vec::with_capacity(nodes), edges: Vec::with_capacity(edges) } - } - - // # Simple accessors - - #[inline] - pub fn all_nodes(&self) -> &[Node] { - &self.nodes - } - - #[inline] - pub fn len_nodes(&self) -> usize { - self.nodes.len() - } - - #[inline] - pub fn all_edges(&self) -> &[Edge] { - &self.edges - } - - #[inline] - pub fn len_edges(&self) -> usize { - self.edges.len() - } - - // # Node construction - - pub fn next_node_index(&self) -> NodeIndex { - NodeIndex(self.nodes.len()) - } - - pub fn add_node(&mut self, data: N) -> NodeIndex { - let idx = self.next_node_index(); - self.nodes.push(Node { first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX], data }); - idx - } - - pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N { - &mut self.nodes[idx.0].data - } - - pub fn node_data(&self, idx: NodeIndex) -> &N { - &self.nodes[idx.0].data - } - - pub fn node(&self, idx: NodeIndex) -> &Node { - &self.nodes[idx.0] - } - - // # Edge construction and queries - - pub fn next_edge_index(&self) -> EdgeIndex { - EdgeIndex(self.edges.len()) - } - - pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex { - debug!("graph: add_edge({:?}, {:?}, {:?})", source, target, data); - - let idx = self.next_edge_index(); - - // read current first of the list of edges from each node - let source_first = self.nodes[source.0].first_edge[OUTGOING.repr]; - let target_first = self.nodes[target.0].first_edge[INCOMING.repr]; - - // create the new edge, with the previous firsts from each node - // as the next pointers - self.edges.push(Edge { next_edge: [source_first, target_first], source, target, data }); - - // adjust the firsts for each node target be the next object. - self.nodes[source.0].first_edge[OUTGOING.repr] = idx; - self.nodes[target.0].first_edge[INCOMING.repr] = idx; - - idx - } - - pub fn edge(&self, idx: EdgeIndex) -> &Edge { - &self.edges[idx.0] - } - - // # Iterating over nodes, edges - - pub fn enumerated_nodes(&self) -> impl Iterator)> { - self.nodes.iter().enumerate().map(|(idx, n)| (NodeIndex(idx), n)) - } - - pub fn enumerated_edges(&self) -> impl Iterator)> { - self.edges.iter().enumerate().map(|(idx, e)| (EdgeIndex(idx), e)) - } - - pub fn each_node<'a>(&'a self, mut f: impl FnMut(NodeIndex, &'a Node) -> bool) -> bool { - //! Iterates over all edges defined in the graph. - self.enumerated_nodes().all(|(node_idx, node)| f(node_idx, node)) - } - - pub fn each_edge<'a>(&'a self, mut f: impl FnMut(EdgeIndex, &'a Edge) -> bool) -> bool { - //! Iterates over all edges defined in the graph - self.enumerated_edges().all(|(edge_idx, edge)| f(edge_idx, edge)) - } - - pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges<'_, N, E> { - self.adjacent_edges(source, OUTGOING) - } - - pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges<'_, N, E> { - self.adjacent_edges(source, INCOMING) - } - - pub fn adjacent_edges( - &self, - source: NodeIndex, - direction: Direction, - ) -> AdjacentEdges<'_, N, E> { - let first_edge = self.node(source).first_edge[direction.repr]; - AdjacentEdges { graph: self, direction, next: first_edge } - } - - pub fn successor_nodes(&self, source: NodeIndex) -> impl Iterator { - self.outgoing_edges(source).targets() - } - - pub fn predecessor_nodes(&self, target: NodeIndex) -> impl Iterator { - self.incoming_edges(target).sources() - } - - pub fn depth_traverse( - &self, - start: NodeIndex, - direction: Direction, - ) -> DepthFirstTraversal<'_, N, E> { - DepthFirstTraversal::with_start_node(self, start, direction) - } - - pub fn nodes_in_postorder( - &self, - direction: Direction, - entry_node: NodeIndex, - ) -> Vec { - let mut visited = DenseBitSet::new_empty(self.len_nodes()); - let mut stack = vec![]; - let mut result = Vec::with_capacity(self.len_nodes()); - let mut push_node = |stack: &mut Vec<_>, node: NodeIndex| { - if visited.insert(node.0) { - stack.push((node, self.adjacent_edges(node, direction))); - } - }; - - for node in - Some(entry_node).into_iter().chain(self.enumerated_nodes().map(|(node, _)| node)) - { - push_node(&mut stack, node); - while let Some((node, mut iter)) = stack.pop() { - if let Some((_, child)) = iter.next() { - let target = child.source_or_target(direction); - // the current node needs more processing, so - // add it back to the stack - stack.push((node, iter)); - // and then push the new node - push_node(&mut stack, target); - } else { - result.push(node); - } - } - } - - assert_eq!(result.len(), self.len_nodes()); - result - } -} - -// # Iterators - -pub struct AdjacentEdges<'g, N, E> { - graph: &'g Graph, - direction: Direction, - next: EdgeIndex, -} - -impl<'g, N: Debug, E: Debug> AdjacentEdges<'g, N, E> { - fn targets(self) -> impl Iterator { - self.map(|(_, edge)| edge.target) - } - - fn sources(self) -> impl Iterator { - self.map(|(_, edge)| edge.source) - } -} - -impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> { - type Item = (EdgeIndex, &'g Edge); - - fn next(&mut self) -> Option<(EdgeIndex, &'g Edge)> { - let edge_index = self.next; - if edge_index == INVALID_EDGE_INDEX { - return None; - } - - let edge = self.graph.edge(edge_index); - self.next = edge.next_edge[self.direction.repr]; - Some((edge_index, edge)) - } - - fn size_hint(&self) -> (usize, Option) { - // At most, all the edges in the graph. - (0, Some(self.graph.len_edges())) - } -} - -pub struct DepthFirstTraversal<'g, N, E> { - graph: &'g Graph, - stack: Vec, - visited: DenseBitSet, - direction: Direction, -} - -impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> { - pub fn with_start_node( - graph: &'g Graph, - start_node: NodeIndex, - direction: Direction, - ) -> Self { - let mut visited = DenseBitSet::new_empty(graph.len_nodes()); - visited.insert(start_node.node_id()); - DepthFirstTraversal { graph, stack: vec![start_node], visited, direction } - } - - fn visit(&mut self, node: NodeIndex) { - if self.visited.insert(node.node_id()) { - self.stack.push(node); - } - } -} - -impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> { - type Item = NodeIndex; - - fn next(&mut self) -> Option { - let next = self.stack.pop(); - if let Some(idx) = next { - for (_, edge) in self.graph.adjacent_edges(idx, self.direction) { - let target = edge.source_or_target(self.direction); - self.visit(target); - } - } - next - } - - fn size_hint(&self) -> (usize, Option) { - // We will visit every node in the graph exactly once. - let remaining = self.graph.len_nodes() - self.visited.count(); - (remaining, Some(remaining)) - } -} - -impl<'g, N: Debug, E: Debug> ExactSizeIterator for DepthFirstTraversal<'g, N, E> {} - -impl Edge { - pub fn source(&self) -> NodeIndex { - self.source - } - - pub fn target(&self) -> NodeIndex { - self.target - } - - pub fn source_or_target(&self, direction: Direction) -> NodeIndex { - if direction == OUTGOING { self.target } else { self.source } - } -} diff --git a/compiler/rustc_data_structures/src/graph/implementation/tests.rs b/compiler/rustc_data_structures/src/graph/implementation/tests.rs deleted file mode 100644 index 32a6d9ec881a9..0000000000000 --- a/compiler/rustc_data_structures/src/graph/implementation/tests.rs +++ /dev/null @@ -1,132 +0,0 @@ -use tracing::debug; - -use crate::graph::implementation::*; - -type TestGraph = Graph<&'static str, &'static str>; - -fn create_graph() -> TestGraph { - let mut graph = Graph::new(); - - // Create a simple graph - // - // F - // | - // V - // A --> B --> C - // | ^ - // v | - // D --> E - - let a = graph.add_node("A"); - let b = graph.add_node("B"); - let c = graph.add_node("C"); - let d = graph.add_node("D"); - let e = graph.add_node("E"); - let f = graph.add_node("F"); - - graph.add_edge(a, b, "AB"); - graph.add_edge(b, c, "BC"); - graph.add_edge(b, d, "BD"); - graph.add_edge(d, e, "DE"); - graph.add_edge(e, c, "EC"); - graph.add_edge(f, b, "FB"); - - return graph; -} - -#[test] -fn each_node() { - let graph = create_graph(); - let expected = ["A", "B", "C", "D", "E", "F"]; - graph.each_node(|idx, node| { - assert_eq!(&expected[idx.0], graph.node_data(idx)); - assert_eq!(expected[idx.0], node.data); - true - }); -} - -#[test] -fn each_edge() { - let graph = create_graph(); - let expected = ["AB", "BC", "BD", "DE", "EC", "FB"]; - graph.each_edge(|idx, edge| { - assert_eq!(expected[idx.0], edge.data); - true - }); -} - -fn test_adjacent_edges( - graph: &Graph, - start_index: NodeIndex, - start_data: N, - expected_incoming: &[(E, N)], - expected_outgoing: &[(E, N)], -) { - assert!(graph.node_data(start_index) == &start_data); - - let mut counter = 0; - for (edge_index, edge) in graph.incoming_edges(start_index) { - assert!(counter < expected_incoming.len()); - debug!( - "counter={:?} expected={:?} edge_index={:?} edge={:?}", - counter, expected_incoming[counter], edge_index, edge - ); - match &expected_incoming[counter] { - (e, n) => { - assert!(e == &edge.data); - assert!(n == graph.node_data(edge.source())); - assert!(start_index == edge.target); - } - } - counter += 1; - } - assert_eq!(counter, expected_incoming.len()); - - let mut counter = 0; - for (edge_index, edge) in graph.outgoing_edges(start_index) { - assert!(counter < expected_outgoing.len()); - debug!( - "counter={:?} expected={:?} edge_index={:?} edge={:?}", - counter, expected_outgoing[counter], edge_index, edge - ); - match &expected_outgoing[counter] { - (e, n) => { - assert!(e == &edge.data); - assert!(start_index == edge.source); - assert!(n == graph.node_data(edge.target)); - } - } - counter += 1; - } - assert_eq!(counter, expected_outgoing.len()); -} - -#[test] -fn each_adjacent_from_a() { - let graph = create_graph(); - test_adjacent_edges(&graph, NodeIndex(0), "A", &[], &[("AB", "B")]); -} - -#[test] -fn each_adjacent_from_b() { - let graph = create_graph(); - test_adjacent_edges( - &graph, - NodeIndex(1), - "B", - &[("FB", "F"), ("AB", "A")], - &[("BD", "D"), ("BC", "C")], - ); -} - -#[test] -fn each_adjacent_from_c() { - let graph = create_graph(); - test_adjacent_edges(&graph, NodeIndex(2), "C", &[("EC", "E"), ("BC", "B")], &[]); -} - -#[test] -fn each_adjacent_from_d() { - let graph = create_graph(); - test_adjacent_edges(&graph, NodeIndex(3), "D", &[("BD", "B")], &[("DE", "E")]); -} diff --git a/compiler/rustc_data_structures/src/graph/linked_graph/mod.rs b/compiler/rustc_data_structures/src/graph/linked_graph/mod.rs new file mode 100644 index 0000000000000..ecb0095626b4a --- /dev/null +++ b/compiler/rustc_data_structures/src/graph/linked_graph/mod.rs @@ -0,0 +1,363 @@ +//! See [`LinkedGraph`]. +//! +//! # Interface details +//! +//! You customize the graph by specifying a "node data" type `N` and an +//! "edge data" type `E`. You can then later gain access (mutable or +//! immutable) to these "user-data" bits. Currently, you can only add +//! nodes or edges to the graph. You cannot remove or modify them once +//! added. This could be changed if we have a need. +//! +//! # Implementation details +//! +//! The main tricky thing about this code is the way that edges are +//! stored. The edges are stored in a central array, but they are also +//! threaded onto two linked lists for each node, one for incoming edges +//! and one for outgoing edges. Note that every edge is a member of some +//! incoming list and some outgoing list. Basically you can load the +//! first index of the linked list from the node data structures (the +//! field `first_edge`) and then, for each edge, load the next index from +//! the field `next_edge`). Each of those fields is an array that should +//! be indexed by the direction (see the type `Direction`). + +use std::fmt::Debug; + +use rustc_index::bit_set::DenseBitSet; +use tracing::debug; + +#[cfg(test)] +mod tests; + +/// A concrete graph implementation that supports: +/// - Nodes and/or edges labelled with custom data types (`N` and `E` respectively). +/// - Incremental addition of new nodes/edges (but not removal). +/// - Flat storage of node/edge data in a pair of vectors. +/// - Iteration over any node's out-edges or in-edges, via linked lists +/// threaded through the node/edge data. +/// +/// # Caution +/// This is an older graph implementation that is still used by some pieces +/// of diagnostic/debugging code. New code that needs a graph data structure +/// should consider using `VecGraph` instead, or implementing its own +/// special-purpose graph with the specific features needed. +/// +/// This graph implementation predates the later [graph traits](crate::graph), +/// and does not implement those traits, so it has its own implementations of a +/// few basic graph algorithms. +pub struct LinkedGraph { + nodes: Vec>, + edges: Vec>, +} + +pub struct Node { + first_edge: [EdgeIndex; 2], // see module comment + pub data: N, +} + +#[derive(Debug)] +pub struct Edge { + next_edge: [EdgeIndex; 2], // see module comment + source: NodeIndex, + target: NodeIndex, + pub data: E, +} + +#[derive(Copy, Clone, PartialEq, Debug)] +pub struct NodeIndex(pub usize); + +#[derive(Copy, Clone, PartialEq, Debug)] +pub struct EdgeIndex(pub usize); + +pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX); + +// Use a private field here to guarantee no more instances are created: +#[derive(Copy, Clone, Debug, PartialEq)] +pub struct Direction { + repr: usize, +} + +pub const OUTGOING: Direction = Direction { repr: 0 }; + +pub const INCOMING: Direction = Direction { repr: 1 }; + +impl NodeIndex { + /// Returns unique ID (unique with respect to the graph holding associated node). + pub fn node_id(self) -> usize { + self.0 + } +} + +impl LinkedGraph { + pub fn new() -> Self { + Self { nodes: Vec::new(), edges: Vec::new() } + } + + pub fn with_capacity(nodes: usize, edges: usize) -> Self { + Self { nodes: Vec::with_capacity(nodes), edges: Vec::with_capacity(edges) } + } + + // # Simple accessors + + #[inline] + pub fn all_nodes(&self) -> &[Node] { + &self.nodes + } + + #[inline] + pub fn len_nodes(&self) -> usize { + self.nodes.len() + } + + #[inline] + pub fn all_edges(&self) -> &[Edge] { + &self.edges + } + + #[inline] + pub fn len_edges(&self) -> usize { + self.edges.len() + } + + // # Node construction + + pub fn next_node_index(&self) -> NodeIndex { + NodeIndex(self.nodes.len()) + } + + pub fn add_node(&mut self, data: N) -> NodeIndex { + let idx = self.next_node_index(); + self.nodes.push(Node { first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX], data }); + idx + } + + pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N { + &mut self.nodes[idx.0].data + } + + pub fn node_data(&self, idx: NodeIndex) -> &N { + &self.nodes[idx.0].data + } + + pub fn node(&self, idx: NodeIndex) -> &Node { + &self.nodes[idx.0] + } + + // # Edge construction and queries + + pub fn next_edge_index(&self) -> EdgeIndex { + EdgeIndex(self.edges.len()) + } + + pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex { + debug!("graph: add_edge({:?}, {:?}, {:?})", source, target, data); + + let idx = self.next_edge_index(); + + // read current first of the list of edges from each node + let source_first = self.nodes[source.0].first_edge[OUTGOING.repr]; + let target_first = self.nodes[target.0].first_edge[INCOMING.repr]; + + // create the new edge, with the previous firsts from each node + // as the next pointers + self.edges.push(Edge { next_edge: [source_first, target_first], source, target, data }); + + // adjust the firsts for each node target be the next object. + self.nodes[source.0].first_edge[OUTGOING.repr] = idx; + self.nodes[target.0].first_edge[INCOMING.repr] = idx; + + idx + } + + pub fn edge(&self, idx: EdgeIndex) -> &Edge { + &self.edges[idx.0] + } + + // # Iterating over nodes, edges + + pub fn enumerated_nodes(&self) -> impl Iterator)> { + self.nodes.iter().enumerate().map(|(idx, n)| (NodeIndex(idx), n)) + } + + pub fn enumerated_edges(&self) -> impl Iterator)> { + self.edges.iter().enumerate().map(|(idx, e)| (EdgeIndex(idx), e)) + } + + pub fn each_node<'a>(&'a self, mut f: impl FnMut(NodeIndex, &'a Node) -> bool) -> bool { + //! Iterates over all edges defined in the graph. + self.enumerated_nodes().all(|(node_idx, node)| f(node_idx, node)) + } + + pub fn each_edge<'a>(&'a self, mut f: impl FnMut(EdgeIndex, &'a Edge) -> bool) -> bool { + //! Iterates over all edges defined in the graph + self.enumerated_edges().all(|(edge_idx, edge)| f(edge_idx, edge)) + } + + pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges<'_, N, E> { + self.adjacent_edges(source, OUTGOING) + } + + pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges<'_, N, E> { + self.adjacent_edges(source, INCOMING) + } + + pub fn adjacent_edges( + &self, + source: NodeIndex, + direction: Direction, + ) -> AdjacentEdges<'_, N, E> { + let first_edge = self.node(source).first_edge[direction.repr]; + AdjacentEdges { graph: self, direction, next: first_edge } + } + + pub fn successor_nodes(&self, source: NodeIndex) -> impl Iterator { + self.outgoing_edges(source).targets() + } + + pub fn predecessor_nodes(&self, target: NodeIndex) -> impl Iterator { + self.incoming_edges(target).sources() + } + + pub fn depth_traverse( + &self, + start: NodeIndex, + direction: Direction, + ) -> DepthFirstTraversal<'_, N, E> { + DepthFirstTraversal::with_start_node(self, start, direction) + } + + pub fn nodes_in_postorder( + &self, + direction: Direction, + entry_node: NodeIndex, + ) -> Vec { + let mut visited = DenseBitSet::new_empty(self.len_nodes()); + let mut stack = vec![]; + let mut result = Vec::with_capacity(self.len_nodes()); + let mut push_node = |stack: &mut Vec<_>, node: NodeIndex| { + if visited.insert(node.0) { + stack.push((node, self.adjacent_edges(node, direction))); + } + }; + + for node in + Some(entry_node).into_iter().chain(self.enumerated_nodes().map(|(node, _)| node)) + { + push_node(&mut stack, node); + while let Some((node, mut iter)) = stack.pop() { + if let Some((_, child)) = iter.next() { + let target = child.source_or_target(direction); + // the current node needs more processing, so + // add it back to the stack + stack.push((node, iter)); + // and then push the new node + push_node(&mut stack, target); + } else { + result.push(node); + } + } + } + + assert_eq!(result.len(), self.len_nodes()); + result + } +} + +// # Iterators + +pub struct AdjacentEdges<'g, N, E> { + graph: &'g LinkedGraph, + direction: Direction, + next: EdgeIndex, +} + +impl<'g, N: Debug, E: Debug> AdjacentEdges<'g, N, E> { + fn targets(self) -> impl Iterator { + self.map(|(_, edge)| edge.target) + } + + fn sources(self) -> impl Iterator { + self.map(|(_, edge)| edge.source) + } +} + +impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> { + type Item = (EdgeIndex, &'g Edge); + + fn next(&mut self) -> Option<(EdgeIndex, &'g Edge)> { + let edge_index = self.next; + if edge_index == INVALID_EDGE_INDEX { + return None; + } + + let edge = self.graph.edge(edge_index); + self.next = edge.next_edge[self.direction.repr]; + Some((edge_index, edge)) + } + + fn size_hint(&self) -> (usize, Option) { + // At most, all the edges in the graph. + (0, Some(self.graph.len_edges())) + } +} + +pub struct DepthFirstTraversal<'g, N, E> { + graph: &'g LinkedGraph, + stack: Vec, + visited: DenseBitSet, + direction: Direction, +} + +impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> { + pub fn with_start_node( + graph: &'g LinkedGraph, + start_node: NodeIndex, + direction: Direction, + ) -> Self { + let mut visited = DenseBitSet::new_empty(graph.len_nodes()); + visited.insert(start_node.node_id()); + DepthFirstTraversal { graph, stack: vec![start_node], visited, direction } + } + + fn visit(&mut self, node: NodeIndex) { + if self.visited.insert(node.node_id()) { + self.stack.push(node); + } + } +} + +impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> { + type Item = NodeIndex; + + fn next(&mut self) -> Option { + let next = self.stack.pop(); + if let Some(idx) = next { + for (_, edge) in self.graph.adjacent_edges(idx, self.direction) { + let target = edge.source_or_target(self.direction); + self.visit(target); + } + } + next + } + + fn size_hint(&self) -> (usize, Option) { + // We will visit every node in the graph exactly once. + let remaining = self.graph.len_nodes() - self.visited.count(); + (remaining, Some(remaining)) + } +} + +impl<'g, N: Debug, E: Debug> ExactSizeIterator for DepthFirstTraversal<'g, N, E> {} + +impl Edge { + pub fn source(&self) -> NodeIndex { + self.source + } + + pub fn target(&self) -> NodeIndex { + self.target + } + + pub fn source_or_target(&self, direction: Direction) -> NodeIndex { + if direction == OUTGOING { self.target } else { self.source } + } +} diff --git a/compiler/rustc_data_structures/src/graph/linked_graph/tests.rs b/compiler/rustc_data_structures/src/graph/linked_graph/tests.rs new file mode 100644 index 0000000000000..357aa81a57ca3 --- /dev/null +++ b/compiler/rustc_data_structures/src/graph/linked_graph/tests.rs @@ -0,0 +1,132 @@ +use tracing::debug; + +use super::{Debug, LinkedGraph, NodeIndex}; + +type TestGraph = LinkedGraph<&'static str, &'static str>; + +fn create_graph() -> TestGraph { + let mut graph = LinkedGraph::new(); + + // Create a simple graph + // + // F + // | + // V + // A --> B --> C + // | ^ + // v | + // D --> E + + let a = graph.add_node("A"); + let b = graph.add_node("B"); + let c = graph.add_node("C"); + let d = graph.add_node("D"); + let e = graph.add_node("E"); + let f = graph.add_node("F"); + + graph.add_edge(a, b, "AB"); + graph.add_edge(b, c, "BC"); + graph.add_edge(b, d, "BD"); + graph.add_edge(d, e, "DE"); + graph.add_edge(e, c, "EC"); + graph.add_edge(f, b, "FB"); + + return graph; +} + +#[test] +fn each_node() { + let graph = create_graph(); + let expected = ["A", "B", "C", "D", "E", "F"]; + graph.each_node(|idx, node| { + assert_eq!(&expected[idx.0], graph.node_data(idx)); + assert_eq!(expected[idx.0], node.data); + true + }); +} + +#[test] +fn each_edge() { + let graph = create_graph(); + let expected = ["AB", "BC", "BD", "DE", "EC", "FB"]; + graph.each_edge(|idx, edge| { + assert_eq!(expected[idx.0], edge.data); + true + }); +} + +fn test_adjacent_edges( + graph: &LinkedGraph, + start_index: NodeIndex, + start_data: N, + expected_incoming: &[(E, N)], + expected_outgoing: &[(E, N)], +) { + assert!(graph.node_data(start_index) == &start_data); + + let mut counter = 0; + for (edge_index, edge) in graph.incoming_edges(start_index) { + assert!(counter < expected_incoming.len()); + debug!( + "counter={:?} expected={:?} edge_index={:?} edge={:?}", + counter, expected_incoming[counter], edge_index, edge + ); + match &expected_incoming[counter] { + (e, n) => { + assert!(e == &edge.data); + assert!(n == graph.node_data(edge.source())); + assert!(start_index == edge.target); + } + } + counter += 1; + } + assert_eq!(counter, expected_incoming.len()); + + let mut counter = 0; + for (edge_index, edge) in graph.outgoing_edges(start_index) { + assert!(counter < expected_outgoing.len()); + debug!( + "counter={:?} expected={:?} edge_index={:?} edge={:?}", + counter, expected_outgoing[counter], edge_index, edge + ); + match &expected_outgoing[counter] { + (e, n) => { + assert!(e == &edge.data); + assert!(start_index == edge.source); + assert!(n == graph.node_data(edge.target)); + } + } + counter += 1; + } + assert_eq!(counter, expected_outgoing.len()); +} + +#[test] +fn each_adjacent_from_a() { + let graph = create_graph(); + test_adjacent_edges(&graph, NodeIndex(0), "A", &[], &[("AB", "B")]); +} + +#[test] +fn each_adjacent_from_b() { + let graph = create_graph(); + test_adjacent_edges( + &graph, + NodeIndex(1), + "B", + &[("FB", "F"), ("AB", "A")], + &[("BD", "D"), ("BC", "C")], + ); +} + +#[test] +fn each_adjacent_from_c() { + let graph = create_graph(); + test_adjacent_edges(&graph, NodeIndex(2), "C", &[("EC", "E"), ("BC", "B")], &[]); +} + +#[test] +fn each_adjacent_from_d() { + let graph = create_graph(); + test_adjacent_edges(&graph, NodeIndex(3), "D", &[("BD", "B")], &[("DE", "E")]); +} diff --git a/compiler/rustc_data_structures/src/graph/mod.rs b/compiler/rustc_data_structures/src/graph/mod.rs index 4a1e5db6768db..20416b472b210 100644 --- a/compiler/rustc_data_structures/src/graph/mod.rs +++ b/compiler/rustc_data_structures/src/graph/mod.rs @@ -1,8 +1,8 @@ use rustc_index::Idx; pub mod dominators; -pub mod implementation; pub mod iterate; +pub mod linked_graph; mod reference; pub mod reversed; pub mod scc; diff --git a/compiler/rustc_data_structures/src/graph/scc/mod.rs b/compiler/rustc_data_structures/src/graph/scc/mod.rs index e7c4ea3daae45..518817ea0f53a 100644 --- a/compiler/rustc_data_structures/src/graph/scc/mod.rs +++ b/compiler/rustc_data_structures/src/graph/scc/mod.rs @@ -10,10 +10,11 @@ use std::assert_matches::debug_assert_matches; use std::fmt::Debug; +use std::marker::PhantomData; use std::ops::Range; use rustc_index::{Idx, IndexSlice, IndexVec}; -use tracing::{debug, instrument}; +use tracing::{debug, instrument, trace}; use crate::fx::FxHashSet; use crate::graph::vec_graph::VecGraph; @@ -48,6 +49,25 @@ pub trait Annotation: Debug + Copy { } } +/// An accumulator for annotations. +pub trait Annotations { + type Ann: Annotation; + type SccIdx: Idx + Ord; + + fn new(&self, element: N) -> Self::Ann; + fn annotate_scc(&mut self, scc: Self::SccIdx, annotation: Self::Ann); +} + +/// The nil annotation accumulator, which does nothing. +struct NoAnnotations(PhantomData); + +impl Annotations for NoAnnotations { + type SccIdx = S; + type Ann = (); + fn new(&self, _element: N) {} + fn annotate_scc(&mut self, _scc: S, _annotation: ()) {} +} + /// The empty annotation, which does nothing. impl Annotation for () { fn merge_reached(self, _other: Self) -> Self { @@ -62,23 +82,20 @@ impl Annotation for () { /// the index type for the graph nodes and `S` is the index type for /// the SCCs. We can map from each node to the SCC that it /// participates in, and we also have the successors of each SCC. -pub struct Sccs { +pub struct Sccs { /// For each node, what is the SCC index of the SCC to which it /// belongs. scc_indices: IndexVec, /// Data about all the SCCs. - scc_data: SccData, + scc_data: SccData, } /// Information about an invidividual SCC node. -struct SccDetails { +struct SccDetails { /// For this SCC, the range of `all_successors` where its /// successors can be found. range: Range, - - /// User-specified metadata about the SCC. - annotation: A, } // The name of this struct should discourage you from making it public and leaking @@ -87,10 +104,10 @@ struct SccDetails { // is difficult when it's publicly inspectable. // // Obey the law of Demeter! -struct SccData { +struct SccData { /// Maps SCC indices to their metadata, including /// offsets into `all_successors`. - scc_details: IndexVec>, + scc_details: IndexVec, /// Contains the successors for all the Sccs, concatenated. The /// range of indices corresponding to a given SCC is found in its @@ -98,24 +115,18 @@ struct SccData { all_successors: Vec, } -impl Sccs { +impl Sccs { /// Compute SCCs without annotations. pub fn new(graph: &impl Successors) -> Self { - Self::new_with_annotation(graph, |_| ()) + Self::new_with_annotation(graph, &mut NoAnnotations(PhantomData::)) } -} -impl Sccs { /// Compute SCCs and annotate them with a user-supplied annotation - pub fn new_with_annotation A>( + pub fn new_with_annotation>( graph: &impl Successors, - to_annotation: F, + annotations: &mut A, ) -> Self { - SccsConstruction::construct(graph, to_annotation) - } - - pub fn annotation(&self, scc: S) -> A { - self.scc_data.annotation(scc) + SccsConstruction::construct(graph, annotations) } pub fn scc_indices(&self) -> &IndexSlice { @@ -160,7 +171,7 @@ impl Sccs { } } -impl DirectedGraph for Sccs { +impl DirectedGraph for Sccs { type Node = S; fn num_nodes(&self) -> usize { @@ -168,19 +179,19 @@ impl DirectedGraph for Sccs { } } -impl NumEdges for Sccs { +impl NumEdges for Sccs { fn num_edges(&self) -> usize { self.scc_data.all_successors.len() } } -impl Successors for Sccs { +impl Successors for Sccs { fn successors(&self, node: S) -> impl Iterator { self.successors(node).iter().cloned() } } -impl SccData { +impl SccData { /// Number of SCCs, fn len(&self) -> usize { self.scc_details.len() @@ -192,9 +203,8 @@ impl SccData { } /// Creates a new SCC with `successors` as its successors and - /// the maximum weight of its internal nodes `scc_max_weight` and /// returns the resulting index. - fn create_scc(&mut self, successors: impl IntoIterator, annotation: A) -> S { + fn create_scc(&mut self, successors: impl IntoIterator) -> S { // Store the successors on `scc_successors_vec`, remembering // the range of indices. let all_successors_start = self.all_successors.len(); @@ -202,35 +212,28 @@ impl SccData { let all_successors_end = self.all_successors.len(); debug!( - "create_scc({:?}) successors={:?}, annotation={:?}", + "create_scc({:?}) successors={:?}", self.len(), &self.all_successors[all_successors_start..all_successors_end], - annotation ); let range = all_successors_start..all_successors_end; - let metadata = SccDetails { range, annotation }; + let metadata = SccDetails { range }; self.scc_details.push(metadata) } - - fn annotation(&self, scc: S) -> A { - self.scc_details[scc].annotation - } } -struct SccsConstruction<'c, G, S, A, F> +struct SccsConstruction<'c, 'a, G, A> where G: DirectedGraph + Successors, - S: Idx, - A: Annotation, - F: Fn(G::Node) -> A, + A: Annotations, { graph: &'c G, /// The state of each node; used during walk to record the stack /// and after walk to record what cycle each node ended up being /// in. - node_states: IndexVec>, + node_states: IndexVec>, /// The stack of nodes that we are visiting as part of the DFS. node_stack: Vec, @@ -239,23 +242,21 @@ where /// position in this stack, and when we encounter a successor SCC, /// we push it on the stack. When we complete an SCC, we can pop /// everything off the stack that was found along the way. - successors_stack: Vec, + successors_stack: Vec, /// A set used to strip duplicates. As we accumulate successors /// into the successors_stack, we sometimes get duplicate entries. /// We use this set to remove those -- we also keep its storage /// around between successors to amortize memory allocation costs. - duplicate_set: FxHashSet, + duplicate_set: FxHashSet, - scc_data: SccData, + scc_data: SccData, - /// A function that constructs an initial SCC annotation - /// out of a single node. - to_annotation: F, + annotations: &'a mut A, } #[derive(Copy, Clone, Debug)] -enum NodeState { +enum NodeState { /// This node has not yet been visited as part of the DFS. /// /// After SCC construction is complete, this state ought to be @@ -286,7 +287,7 @@ enum NodeState { /// The state of walking a given node. #[derive(Copy, Clone, Debug)] -enum WalkReturn { +enum WalkReturn { /// The walk found a cycle, but the entire component is not known to have /// been fully walked yet. We only know the minimum depth of this /// component in a minimum spanning tree of the graph. This component @@ -299,12 +300,10 @@ enum WalkReturn { Complete { scc_index: S, annotation: A }, } -impl<'c, G, S, A, F> SccsConstruction<'c, G, S, A, F> +impl<'c, 'a, G, A> SccsConstruction<'c, 'a, G, A> where G: DirectedGraph + Successors, - S: Idx, - F: Fn(G::Node) -> A, - A: Annotation, + A: Annotations, { /// Identifies SCCs in the graph `G` and computes the resulting /// DAG. This uses a variant of [Tarjan's @@ -320,7 +319,7 @@ where /// Additionally, we keep track of a current annotation of the SCC. /// /// [wikipedia]: https://bit.ly/2EZIx84 - fn construct(graph: &'c G, to_annotation: F) -> Sccs { + fn construct(graph: &'c G, annotations: &'a mut A) -> Sccs { let num_nodes = graph.num_nodes(); let mut this = Self { @@ -330,7 +329,7 @@ where successors_stack: Vec::new(), scc_data: SccData { scc_details: IndexVec::new(), all_successors: Vec::new() }, duplicate_set: FxHashSet::default(), - to_annotation, + annotations, }; let scc_indices = graph @@ -346,7 +345,7 @@ where Sccs { scc_indices, scc_data: this.scc_data } } - fn start_walk_from(&mut self, node: G::Node) -> WalkReturn { + fn start_walk_from(&mut self, node: G::Node) -> WalkReturn { self.inspect_node(node).unwrap_or_else(|| self.walk_unvisited_node(node)) } @@ -362,7 +361,7 @@ where /// Otherwise, we are looking at a node that has already been /// completely visited. We therefore return `WalkReturn::Complete` /// with its associated SCC index. - fn inspect_node(&mut self, node: G::Node) -> Option> { + fn inspect_node(&mut self, node: G::Node) -> Option> { Some(match self.find_state(node) { NodeState::InCycle { scc_index, annotation } => { WalkReturn::Complete { scc_index, annotation } @@ -385,7 +384,7 @@ where /// of `r2` (and updates `r` to reflect current result). This is /// basically the "find" part of a standard union-find algorithm /// (with path compression). - fn find_state(&mut self, mut node: G::Node) -> NodeState { + fn find_state(&mut self, mut node: G::Node) -> NodeState { // To avoid recursion we temporarily reuse the `parent` of each // InCycleWith link to encode a downwards link while compressing // the path. After we have found the root or deepest node being @@ -408,7 +407,7 @@ where // a potentially derived version of the root state for non-root nodes in the chain. let (root_state, assigned_state) = { loop { - debug!("find_state(r = {node:?} in state {:?})", self.node_states[node]); + trace!("find_state(r = {node:?} in state {:?})", self.node_states[node]); match self.node_states[node] { // This must have been the first and only state since it is unexplored*; // no update needed! * Unless there is a bug :') @@ -482,7 +481,7 @@ where if previous_node == node { return root_state; } - debug!("Compressing {node:?} down to {previous_node:?} with state {assigned_state:?}"); + trace!("Compressing {node:?} down to {previous_node:?} with state {assigned_state:?}"); // Update to previous node in the link. match self.node_states[previous_node] { @@ -507,9 +506,9 @@ where /// Call this method when `inspect_node` has returned `None`. Having the /// caller decide avoids mutual recursion between the two methods and allows /// us to maintain an allocated stack for nodes on the path between calls. - #[instrument(skip(self, initial), level = "debug")] - fn walk_unvisited_node(&mut self, initial: G::Node) -> WalkReturn { - debug!("Walk unvisited node: {initial:?}"); + #[instrument(skip(self, initial), level = "trace")] + fn walk_unvisited_node(&mut self, initial: G::Node) -> WalkReturn { + trace!("Walk unvisited node: {initial:?}"); struct VisitingNodeFrame { node: G::Node, successors: Option, @@ -537,7 +536,7 @@ where successors_len: 0, min_cycle_root: initial, successor_node: initial, - current_component_annotation: (self.to_annotation)(initial), + current_component_annotation: self.annotations.new(initial), }]; let mut return_value = None; @@ -556,11 +555,7 @@ where let node = *node; let depth = *depth; - // node is definitely in the current component, add it to the annotation. - if node != initial { - current_component_annotation.update_scc((self.to_annotation)(node)); - } - debug!( + trace!( "Visiting {node:?} at depth {depth:?}, annotation: {current_component_annotation:?}" ); @@ -568,7 +563,7 @@ where Some(successors) => successors, None => { // This None marks that we still have the initialize this node's frame. - debug!(?depth, ?node); + trace!(?depth, ?node); debug_assert_matches!(self.node_states[node], NodeState::NotVisited); @@ -598,7 +593,7 @@ where return_value.take().into_iter().map(|walk| (*successor_node, Some(walk))); let successor_walk = successors.map(|successor_node| { - debug!(?node, ?successor_node); + trace!(?node, ?successor_node); (successor_node, self.inspect_node(successor_node)) }); for (successor_node, walk) in returned_walk.chain(successor_walk) { @@ -609,13 +604,13 @@ where min_depth: successor_min_depth, annotation: successor_annotation, }) => { - debug!( + trace!( "Cycle found from {node:?}, minimum depth: {successor_min_depth:?}, annotation: {successor_annotation:?}" ); // Track the minimum depth we can reach. assert!(successor_min_depth <= depth); if successor_min_depth < *min_depth { - debug!(?node, ?successor_min_depth); + trace!(?node, ?successor_min_depth); *min_depth = successor_min_depth; *min_cycle_root = successor_node; } @@ -627,20 +622,20 @@ where scc_index: successor_scc_index, annotation: successor_annotation, }) => { - debug!( + trace!( "Complete; {node:?} is root of complete-visited SCC idx {successor_scc_index:?} with annotation {successor_annotation:?}" ); // Push the completed SCC indices onto // the `successors_stack` for later. - debug!(?node, ?successor_scc_index); + trace!(?node, ?successor_scc_index); successors_stack.push(successor_scc_index); current_component_annotation.update_reachable(successor_annotation); } // `node` has no more (direct) successors; search recursively. None => { let depth = depth + 1; - debug!("Recursing down into {successor_node:?} at depth {depth:?}"); - debug!(?depth, ?successor_node); + trace!("Recursing down into {successor_node:?} at depth {depth:?}"); + trace!(?depth, ?successor_node); // Remember which node the return value will come from. frame.successor_node = successor_node; // Start a new stack frame, then step into it. @@ -652,14 +647,14 @@ where min_depth: depth, min_cycle_root: successor_node, successor_node, - current_component_annotation: (self.to_annotation)(successor_node), + current_component_annotation: self.annotations.new(successor_node), }); continue 'recurse; } } } - debug!("Finished walk from {node:?} with annotation: {current_component_annotation:?}"); + trace!("Finished walk from {node:?} with annotation: {current_component_annotation:?}"); // Completed walk, remove `node` from the stack. let r = self.node_stack.pop(); @@ -691,8 +686,9 @@ where debug!("Creating SCC rooted in {node:?} with successor {:?}", frame.successor_node); - let scc_index = - self.scc_data.create_scc(deduplicated_successors, current_component_annotation); + let scc_index = self.scc_data.create_scc(deduplicated_successors); + + self.annotations.annotate_scc(scc_index, current_component_annotation); self.node_states[node] = NodeState::InCycle { scc_index, annotation: current_component_annotation }; diff --git a/compiler/rustc_data_structures/src/graph/scc/tests.rs b/compiler/rustc_data_structures/src/graph/scc/tests.rs index 373f87bfdbcfa..8f04baf51f355 100644 --- a/compiler/rustc_data_structures/src/graph/scc/tests.rs +++ b/compiler/rustc_data_structures/src/graph/scc/tests.rs @@ -5,8 +5,31 @@ use crate::graph::tests::TestGraph; #[derive(Copy, Clone, Debug)] struct MaxReached(usize); -type UsizeSccs = Sccs; -type MaxReachedSccs = Sccs; +struct Maxes(IndexVec, fn(usize) -> usize); +type UsizeSccs = Sccs; + +impl Annotations for Maxes { + fn new(&self, element: usize) -> MaxReached { + MaxReached(self.1(element)) + } + + fn annotate_scc(&mut self, scc: usize, annotation: MaxReached) { + let i = self.0.push(annotation); + assert!(i == scc); + } + + type Ann = MaxReached; + type SccIdx = usize; +} + +impl Maxes { + fn annotation(&self, scc: usize) -> MaxReached { + self.0[scc] + } + fn new(mapping: fn(usize) -> usize) -> Self { + Self(IndexVec::new(), mapping) + } +} impl Annotation for MaxReached { fn merge_scc(self, other: Self) -> Self { @@ -14,7 +37,7 @@ impl Annotation for MaxReached { } fn merge_reached(self, other: Self) -> Self { - self.merge_scc(other) + Self(std::cmp::max(other.0, self.0)) } } @@ -24,17 +47,32 @@ impl PartialEq for MaxReached { } } -impl MaxReached { - fn from_usize(nr: usize) -> Self { - Self(nr) - } -} - #[derive(Copy, Clone, Debug)] struct MinMaxIn { min: usize, max: usize, } +struct MinMaxes(IndexVec, fn(usize) -> MinMaxIn); + +impl MinMaxes { + fn annotation(&self, scc: usize) -> MinMaxIn { + self.0[scc] + } +} + +impl Annotations for MinMaxes { + fn new(&self, element: usize) -> MinMaxIn { + self.1(element) + } + + fn annotate_scc(&mut self, scc: usize, annotation: MinMaxIn) { + let i = self.0.push(annotation); + assert!(i == scc); + } + + type Ann = MinMaxIn; + type SccIdx = usize; +} impl Annotation for MinMaxIn { fn merge_scc(self, other: Self) -> Self { @@ -261,67 +299,68 @@ fn bench_sccc(b: &mut test::Bencher) { #[test] fn test_max_self_loop() { let graph = TestGraph::new(0, &[(0, 0)]); - let sccs: MaxReachedSccs = - Sccs::new_with_annotation(&graph, |n| if n == 0 { MaxReached(17) } else { MaxReached(0) }); - assert_eq!(sccs.annotation(0), 17); + let mut annotations = Maxes(IndexVec::new(), |n| if n == 0 { 17 } else { 0 }); + Sccs::new_with_annotation(&graph, &mut annotations); + assert_eq!(annotations.0[0], 17); } #[test] fn test_max_branch() { let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 4)]); - let sccs: MaxReachedSccs = Sccs::new_with_annotation(&graph, MaxReached::from_usize); - assert_eq!(sccs.annotation(sccs.scc(0)), 4); - assert_eq!(sccs.annotation(sccs.scc(1)), 3); - assert_eq!(sccs.annotation(sccs.scc(2)), 4); + let mut annotations = Maxes(IndexVec::new(), |n| n); + let sccs = Sccs::new_with_annotation(&graph, &mut annotations); + assert_eq!(annotations.0[sccs.scc(0)], 4); + assert_eq!(annotations.0[sccs.scc(1)], 3); + assert_eq!(annotations.0[sccs.scc(2)], 4); } + #[test] fn test_single_cycle_max() { let graph = TestGraph::new(0, &[(0, 2), (2, 3), (2, 4), (4, 1), (1, 2)]); - let sccs: MaxReachedSccs = Sccs::new_with_annotation(&graph, MaxReached::from_usize); - assert_eq!(sccs.annotation(sccs.scc(2)), 4); - assert_eq!(sccs.annotation(sccs.scc(0)), 4); -} - -#[test] -fn test_simple_cycle_max() { - let graph = TestGraph::new(0, &[(0, 1), (1, 2), (2, 0)]); - let sccs: MaxReachedSccs = Sccs::new_with_annotation(&graph, MaxReached::from_usize); - assert_eq!(sccs.num_sccs(), 1); + let mut annotations = Maxes(IndexVec::new(), |n| n); + let sccs = Sccs::new_with_annotation(&graph, &mut annotations); + assert_eq!(annotations.0[sccs.scc(2)], 4); + assert_eq!(annotations.0[sccs.scc(0)], 4); } #[test] fn test_double_cycle_max() { let graph = TestGraph::new(0, &[(0, 1), (1, 2), (1, 4), (2, 3), (2, 4), (3, 5), (4, 1), (5, 4)]); - let sccs: MaxReachedSccs = - Sccs::new_with_annotation(&graph, |n| if n == 5 { MaxReached(2) } else { MaxReached(1) }); + let mut annotations = Maxes(IndexVec::new(), |n| if n == 5 { 2 } else { 1 }); + + let sccs = Sccs::new_with_annotation(&graph, &mut annotations); - assert_eq!(sccs.annotation(sccs.scc(0)).0, 2); + assert_eq!(annotations.0[sccs.scc(0)].0, 2); } #[test] fn test_bug_minimised() { let graph = TestGraph::new(0, &[(0, 3), (0, 1), (3, 2), (2, 3), (1, 4), (4, 5), (5, 4)]); - let sccs: MaxReachedSccs = Sccs::new_with_annotation(&graph, |n| match n { - 3 => MaxReached(1), - _ => MaxReached(0), + let mut annotations = Maxes(IndexVec::new(), |n| match n { + 3 => 1, + _ => 0, }); - assert_eq!(sccs.annotation(sccs.scc(2)), 1); - assert_eq!(sccs.annotation(sccs.scc(1)), 0); - assert_eq!(sccs.annotation(sccs.scc(4)), 0); + + let sccs = Sccs::new_with_annotation(&graph, &mut annotations); + assert_eq!(annotations.annotation(sccs.scc(2)), 1); + assert_eq!(annotations.annotation(sccs.scc(1)), 0); + assert_eq!(annotations.annotation(sccs.scc(4)), 0); } #[test] fn test_bug_max_leak_minimised() { let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (3, 0), (3, 4), (4, 3)]); - let sccs: MaxReachedSccs = Sccs::new_with_annotation(&graph, |w| match w { - 4 => MaxReached(1), - _ => MaxReached(0), + let mut annotations = Maxes(IndexVec::new(), |w| match w { + 4 => 1, + _ => 0, }); - assert_eq!(sccs.annotation(sccs.scc(2)), 0); - assert_eq!(sccs.annotation(sccs.scc(3)), 1); - assert_eq!(sccs.annotation(sccs.scc(0)), 1); + let sccs = Sccs::new_with_annotation(&graph, &mut annotations); + + assert_eq!(annotations.annotation(sccs.scc(2)), 0); + assert_eq!(annotations.annotation(sccs.scc(3)), 1); + assert_eq!(annotations.annotation(sccs.scc(0)), 1); } #[test] @@ -369,48 +408,49 @@ fn test_bug_max_leak() { (23, 24), ], ); - let sccs: MaxReachedSccs = Sccs::new_with_annotation(&graph, |w| match w { - 22 => MaxReached(1), - 24 => MaxReached(2), - 27 => MaxReached(2), - _ => MaxReached(0), + let mut annotations = Maxes::new(|w| match w { + 22 => 1, + 24 => 2, + 27 => 2, + _ => 0, }); - - assert_eq!(sccs.annotation(sccs.scc(2)), 0); - assert_eq!(sccs.annotation(sccs.scc(7)), 0); - assert_eq!(sccs.annotation(sccs.scc(8)), 2); - assert_eq!(sccs.annotation(sccs.scc(23)), 2); - assert_eq!(sccs.annotation(sccs.scc(3)), 2); - assert_eq!(sccs.annotation(sccs.scc(0)), 2); + let sccs = Sccs::new_with_annotation(&graph, &mut annotations); + + assert_eq!(annotations.annotation(sccs.scc(2)), 0); + assert_eq!(annotations.annotation(sccs.scc(7)), 0); + assert_eq!(annotations.annotation(sccs.scc(8)), 2); + assert_eq!(annotations.annotation(sccs.scc(23)), 2); + assert_eq!(annotations.annotation(sccs.scc(3)), 2); + assert_eq!(annotations.annotation(sccs.scc(0)), 2); } #[test] fn test_bug_max_zero_stick_shape() { let graph = TestGraph::new(0, &[(0, 1), (1, 2), (2, 3), (3, 2), (3, 4)]); - - let sccs: MaxReachedSccs = Sccs::new_with_annotation(&graph, |w| match w { - 4 => MaxReached(1), - _ => MaxReached(0), + let mut annotations = Maxes::new(|w| match w { + 4 => 1, + _ => 0, }); + let sccs = Sccs::new_with_annotation(&graph, &mut annotations); - assert_eq!(sccs.annotation(sccs.scc(0)), 1); - assert_eq!(sccs.annotation(sccs.scc(1)), 1); - assert_eq!(sccs.annotation(sccs.scc(2)), 1); - assert_eq!(sccs.annotation(sccs.scc(3)), 1); - assert_eq!(sccs.annotation(sccs.scc(4)), 1); + assert_eq!(annotations.annotation(sccs.scc(0)), 1); + assert_eq!(annotations.annotation(sccs.scc(1)), 1); + assert_eq!(annotations.annotation(sccs.scc(2)), 1); + assert_eq!(annotations.annotation(sccs.scc(3)), 1); + assert_eq!(annotations.annotation(sccs.scc(4)), 1); } #[test] fn test_min_max_in() { let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (3, 0), (3, 4), (4, 3), (3, 5)]); - let sccs: Sccs = - Sccs::new_with_annotation(&graph, |w| MinMaxIn { min: w, max: w }); - - assert_eq!(sccs.annotation(sccs.scc(2)).min, 2); - assert_eq!(sccs.annotation(sccs.scc(2)).max, 2); - assert_eq!(sccs.annotation(sccs.scc(0)).min, 0); - assert_eq!(sccs.annotation(sccs.scc(0)).max, 4); - assert_eq!(sccs.annotation(sccs.scc(3)).min, 0); - assert_eq!(sccs.annotation(sccs.scc(3)).max, 4); - assert_eq!(sccs.annotation(sccs.scc(5)).min, 5); + let mut annotations = MinMaxes(IndexVec::new(), |w| MinMaxIn { min: w, max: w }); + let sccs = Sccs::new_with_annotation(&graph, &mut annotations); + + assert_eq!(annotations.annotation(sccs.scc(2)).min, 2); + assert_eq!(annotations.annotation(sccs.scc(2)).max, 2); + assert_eq!(annotations.annotation(sccs.scc(0)).min, 0); + assert_eq!(annotations.annotation(sccs.scc(0)).max, 4); + assert_eq!(annotations.annotation(sccs.scc(3)).min, 0); + assert_eq!(annotations.annotation(sccs.scc(3)).max, 4); + assert_eq!(annotations.annotation(sccs.scc(5)).min, 5); } diff --git a/compiler/rustc_data_structures/src/jobserver.rs b/compiler/rustc_data_structures/src/jobserver.rs index 1204f2d692d6c..3ed1ea7543f40 100644 --- a/compiler/rustc_data_structures/src/jobserver.rs +++ b/compiler/rustc_data_structures/src/jobserver.rs @@ -1,7 +1,8 @@ -use std::sync::{LazyLock, OnceLock}; +use std::sync::{Arc, LazyLock, OnceLock}; pub use jobserver_crate::{Acquired, Client, HelperThread}; use jobserver_crate::{FromEnv, FromEnvErrorKind}; +use parking_lot::{Condvar, Mutex}; // We can only call `from_env_ext` once per process @@ -71,10 +72,93 @@ pub fn client() -> Client { GLOBAL_CLIENT_CHECKED.get().expect(ACCESS_ERROR).clone() } -pub fn acquire_thread() { - GLOBAL_CLIENT_CHECKED.get().expect(ACCESS_ERROR).acquire_raw().ok(); +struct ProxyData { + /// The number of tokens assigned to threads. + /// If this is 0, a single token is still assigned to this process, but is unused. + used: u16, + + /// The number of threads requesting a token + pending: u16, +} + +/// This is a jobserver proxy used to ensure that we hold on to at least one token. +pub struct Proxy { + client: Client, + data: Mutex, + + /// Threads which are waiting on a token will wait on this. + wake_pending: Condvar, + + helper: OnceLock, } -pub fn release_thread() { - GLOBAL_CLIENT_CHECKED.get().expect(ACCESS_ERROR).release_raw().ok(); +impl Proxy { + pub fn new() -> Arc { + let proxy = Arc::new(Proxy { + client: client(), + data: Mutex::new(ProxyData { used: 1, pending: 0 }), + wake_pending: Condvar::new(), + helper: OnceLock::new(), + }); + let proxy_ = Arc::clone(&proxy); + let helper = proxy + .client + .clone() + .into_helper_thread(move |token| { + if let Ok(token) = token { + let mut data = proxy_.data.lock(); + if data.pending > 0 { + // Give the token to a waiting thread + token.drop_without_releasing(); + assert!(data.used > 0); + data.used += 1; + data.pending -= 1; + proxy_.wake_pending.notify_one(); + } else { + // The token is no longer needed, drop it. + drop(data); + drop(token); + } + } + }) + .expect("failed to create helper thread"); + proxy.helper.set(helper).unwrap(); + proxy + } + + pub fn acquire_thread(&self) { + let mut data = self.data.lock(); + + if data.used == 0 { + // There was a free token around. This can + // happen when all threads release their token. + assert_eq!(data.pending, 0); + data.used += 1; + } else { + // Request a token from the helper thread. We can't directly use `acquire_raw` + // as we also need to be able to wait for the final token in the process which + // does not get a corresponding `release_raw` call. + self.helper.get().unwrap().request_token(); + data.pending += 1; + self.wake_pending.wait(&mut data); + } + } + + pub fn release_thread(&self) { + let mut data = self.data.lock(); + + if data.pending > 0 { + // Give the token to a waiting thread + data.pending -= 1; + self.wake_pending.notify_one(); + } else { + data.used -= 1; + + // Release the token unless it's the last one in the process + if data.used > 0 { + drop(data); + self.client.release_raw().ok(); + } + } + } } diff --git a/compiler/rustc_data_structures/src/marker.rs b/compiler/rustc_data_structures/src/marker.rs index 64c64bfa3c296..e0df1b232e134 100644 --- a/compiler/rustc_data_structures/src/marker.rs +++ b/compiler/rustc_data_structures/src/marker.rs @@ -1,13 +1,13 @@ use std::alloc::Allocator; -#[rustc_on_unimplemented(message = "`{Self}` doesn't implement `DynSend`. \ +#[diagnostic::on_unimplemented(message = "`{Self}` doesn't implement `DynSend`. \ Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`")] // This is an auto trait for types which can be sent across threads if `sync::is_dyn_thread_safe()` // is true. These types can be wrapped in a `FromDyn` to get a `Send` type. Wrapping a // `Send` type in `IntoDynSyncSend` will create a `DynSend` type. pub unsafe auto trait DynSend {} -#[rustc_on_unimplemented(message = "`{Self}` doesn't implement `DynSync`. \ +#[diagnostic::on_unimplemented(message = "`{Self}` doesn't implement `DynSync`. \ Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Sync`")] // This is an auto trait for types which can be shared across threads if `sync::is_dyn_thread_safe()` // is true. These types can be wrapped in a `FromDyn` to get a `Sync` type. Wrapping a @@ -39,8 +39,15 @@ impls_dyn_send_neg!( [std::io::StderrLock<'_>] ); -#[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] -// Consistent with `std`, `os_imp::Env` is `!Sync` in these platforms +#[cfg(any( + unix, + target_os = "hermit", + all(target_vendor = "fortanix", target_env = "sgx"), + target_os = "solid_asp3", + target_os = "wasi", + target_os = "xous" +))] +// Consistent with `std`, `env_imp::Env` is `!Sync` in these platforms impl !DynSend for std::env::VarsOs {} macro_rules! already_send { @@ -52,8 +59,8 @@ macro_rules! already_send { // These structures are already `Send`. already_send!( [std::backtrace::Backtrace][std::io::Stdout][std::io::Stderr][std::io::Error][std::fs::File] - [rustc_arena::DroplessArena][crate::memmap::Mmap][crate::profiling::SelfProfiler] - [crate::owned_slice::OwnedSlice] + [rustc_arena::DroplessArena][jobserver_crate::Client][jobserver_crate::HelperThread] + [crate::memmap::Mmap][crate::profiling::SelfProfiler][crate::owned_slice::OwnedSlice] ); macro_rules! impl_dyn_send { @@ -106,8 +113,15 @@ impls_dyn_sync_neg!( [std::sync::mpsc::Sender where T] ); -#[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] -// Consistent with `std`, `os_imp::Env` is `!Sync` in these platforms +#[cfg(any( + unix, + target_os = "hermit", + all(target_vendor = "fortanix", target_env = "sgx"), + target_os = "solid_asp3", + target_os = "wasi", + target_os = "xous" +))] +// Consistent with `std`, `env_imp::Env` is `!Sync` in these platforms impl !DynSync for std::env::VarsOs {} macro_rules! already_sync { @@ -120,8 +134,8 @@ macro_rules! already_sync { already_sync!( [std::sync::atomic::AtomicBool][std::sync::atomic::AtomicUsize][std::sync::atomic::AtomicU8] [std::sync::atomic::AtomicU32][std::backtrace::Backtrace][std::io::Error][std::fs::File] - [jobserver_crate::Client][crate::memmap::Mmap][crate::profiling::SelfProfiler] - [crate::owned_slice::OwnedSlice] + [jobserver_crate::Client][jobserver_crate::HelperThread][crate::memmap::Mmap] + [crate::profiling::SelfProfiler][crate::owned_slice::OwnedSlice] ); // Use portable AtomicU64 for targets without native 64-bit atomics @@ -179,6 +193,12 @@ impl FromDyn { FromDyn(val) } + #[inline(always)] + pub fn derive(&self, val: O) -> FromDyn { + // We already did the check for `sync::is_dyn_thread_safe()` when creating `Self` + FromDyn(val) + } + #[inline(always)] pub fn into_inner(self) -> T { self.0 @@ -200,6 +220,13 @@ impl std::ops::Deref for FromDyn { } } +impl std::ops::DerefMut for FromDyn { + #[inline(always)] + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + // A wrapper to convert a struct that is already a `Send` or `Sync` into // an instance of `DynSend` and `DynSync`, since the compiler cannot infer // it automatically in some cases. (e.g. Box) diff --git a/compiler/rustc_data_structures/src/obligation_forest/mod.rs b/compiler/rustc_data_structures/src/obligation_forest/mod.rs index f63b201742d9a..2c62034c6e87e 100644 --- a/compiler/rustc_data_structures/src/obligation_forest/mod.rs +++ b/compiler/rustc_data_structures/src/obligation_forest/mod.rs @@ -315,7 +315,7 @@ mod helper { use super::*; pub(super) type ObligationTreeIdGenerator = impl Iterator; impl ObligationForest { - #[cfg_attr(not(bootstrap), define_opaque(ObligationTreeIdGenerator))] + #[define_opaque(ObligationTreeIdGenerator)] pub fn new() -> ObligationForest { ObligationForest { nodes: vec![], diff --git a/compiler/rustc_data_structures/src/sync.rs b/compiler/rustc_data_structures/src/sync.rs index 616a18a72ab7e..b28c333d860c6 100644 --- a/compiler/rustc_data_structures/src/sync.rs +++ b/compiler/rustc_data_structures/src/sync.rs @@ -43,7 +43,7 @@ pub use self::freeze::{FreezeLock, FreezeReadGuard, FreezeWriteGuard}; pub use self::lock::{Lock, LockGuard, Mode}; pub use self::mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode}; pub use self::parallel::{ - join, par_for_each_in, par_map, parallel_guard, scope, try_par_for_each_in, + broadcast, join, par_for_each_in, par_map, parallel_guard, scope, spawn, try_par_for_each_in, }; pub use self::vec::{AppendOnlyIndexVec, AppendOnlyVec}; pub use self::worker_local::{Registry, WorkerLocal}; diff --git a/compiler/rustc_data_structures/src/sync/freeze.rs b/compiler/rustc_data_structures/src/sync/freeze.rs index 9720b22ea7d1b..6338afb92c34b 100644 --- a/compiler/rustc_data_structures/src/sync/freeze.rs +++ b/compiler/rustc_data_structures/src/sync/freeze.rs @@ -88,7 +88,7 @@ impl FreezeLock { #[inline] #[track_caller] pub fn write(&self) -> FreezeWriteGuard<'_, T> { - self.try_write().expect("still mutable") + self.try_write().expect("data should not be frozen if we're still attempting to mutate it") } #[inline] diff --git a/compiler/rustc_data_structures/src/sync/parallel.rs b/compiler/rustc_data_structures/src/sync/parallel.rs index 8ef8a3f358569..ab65c7f3a6b5f 100644 --- a/compiler/rustc_data_structures/src/sync/parallel.rs +++ b/compiler/rustc_data_structures/src/sync/parallel.rs @@ -7,7 +7,6 @@ use std::any::Any; use std::panic::{AssertUnwindSafe, catch_unwind, resume_unwind}; use parking_lot::Mutex; -use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelIterator}; use crate::FatalErrorMarker; use crate::sync::{DynSend, DynSync, FromDyn, IntoDynSyncSend, mode}; @@ -94,14 +93,25 @@ macro_rules! parallel { }; } +pub fn spawn(func: impl FnOnce() + DynSend + 'static) { + if mode::is_dyn_thread_safe() { + let func = FromDyn::from(func); + rayon_core::spawn(|| { + (func.into_inner())(); + }); + } else { + func() + } +} + // This function only works when `mode::is_dyn_thread_safe()`. pub fn scope<'scope, OP, R>(op: OP) -> R where - OP: FnOnce(&rayon::Scope<'scope>) -> R + DynSend, + OP: FnOnce(&rayon_core::Scope<'scope>) -> R + DynSend, R: DynSend, { let op = FromDyn::from(op); - rayon::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner() + rayon_core::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner() } #[inline] @@ -114,7 +124,7 @@ where let oper_a = FromDyn::from(oper_a); let oper_b = FromDyn::from(oper_b); let (a, b) = parallel_guard(|guard| { - rayon::join( + rayon_core::join( move || guard.run(move || FromDyn::from(oper_a.into_inner()())), move || guard.run(move || FromDyn::from(oper_b.into_inner()())), ) @@ -125,58 +135,115 @@ where } } -pub fn par_for_each_in + IntoParallelIterator>( +fn par_slice( + items: &mut [I], + guard: &ParallelGuard, + for_each: impl Fn(&mut I) + DynSync + DynSend, +) { + struct State<'a, F> { + for_each: FromDyn, + guard: &'a ParallelGuard, + group: usize, + } + + fn par_rec( + items: &mut [I], + state: &State<'_, F>, + ) { + if items.len() <= state.group { + for item in items { + state.guard.run(|| (state.for_each)(item)); + } + } else { + let (left, right) = items.split_at_mut(items.len() / 2); + let mut left = state.for_each.derive(left); + let mut right = state.for_each.derive(right); + rayon_core::join(move || par_rec(*left, state), move || par_rec(*right, state)); + } + } + + let state = State { + for_each: FromDyn::from(for_each), + guard, + group: std::cmp::max(items.len() / 128, 1), + }; + par_rec(items, &state) +} + +pub fn par_for_each_in>( t: T, - for_each: impl Fn(I) + DynSync + DynSend, + for_each: impl Fn(&I) + DynSync + DynSend, ) { parallel_guard(|guard| { if mode::is_dyn_thread_safe() { - let for_each = FromDyn::from(for_each); - t.into_par_iter().for_each(|i| { - guard.run(|| for_each(i)); - }); + let mut items: Vec<_> = t.into_iter().collect(); + par_slice(&mut items, guard, |i| for_each(&*i)) } else { t.into_iter().for_each(|i| { - guard.run(|| for_each(i)); + guard.run(|| for_each(&i)); }); } }); } -pub fn try_par_for_each_in< - T: IntoIterator + IntoParallelIterator::Item>, - E: Send, ->( +/// This runs `for_each` in parallel for each iterator item. If one or more of the +/// `for_each` calls returns `Err`, the function will also return `Err`. The error returned +/// will be non-deterministic, but this is expected to be used with `ErrorGuaranteed` which +/// are all equivalent. +pub fn try_par_for_each_in( t: T, - for_each: impl Fn(::Item) -> Result<(), E> + DynSync + DynSend, -) -> Result<(), E> { + for_each: impl Fn(&::Item) -> Result<(), E> + DynSync + DynSend, +) -> Result<(), E> +where + ::Item: DynSend, +{ parallel_guard(|guard| { if mode::is_dyn_thread_safe() { - let for_each = FromDyn::from(for_each); - t.into_par_iter() - .filter_map(|i| guard.run(|| for_each(i))) - .reduce(|| Ok(()), Result::and) + let mut items: Vec<_> = t.into_iter().collect(); + + let error = Mutex::new(None); + + par_slice(&mut items, guard, |i| { + if let Err(err) = for_each(&*i) { + *error.lock() = Some(err); + } + }); + + if let Some(err) = error.into_inner() { Err(err) } else { Ok(()) } } else { - t.into_iter().filter_map(|i| guard.run(|| for_each(i))).fold(Ok(()), Result::and) + t.into_iter().filter_map(|i| guard.run(|| for_each(&i))).fold(Ok(()), Result::and) } }) } -pub fn par_map< - I, - T: IntoIterator + IntoParallelIterator, - R: std::marker::Send, - C: FromIterator + FromParallelIterator, ->( +pub fn par_map, R: DynSend, C: FromIterator>( t: T, map: impl Fn(I) -> R + DynSync + DynSend, ) -> C { parallel_guard(|guard| { if mode::is_dyn_thread_safe() { let map = FromDyn::from(map); - t.into_par_iter().filter_map(|i| guard.run(|| map(i))).collect() + + let mut items: Vec<(Option, Option)> = + t.into_iter().map(|i| (Some(i), None)).collect(); + + par_slice(&mut items, guard, |i| { + i.1 = Some(map(i.0.take().unwrap())); + }); + + items.into_iter().filter_map(|i| i.1).collect() } else { t.into_iter().filter_map(|i| guard.run(|| map(i))).collect() } }) } + +pub fn broadcast(op: impl Fn(usize) -> R + DynSync) -> Vec { + if mode::is_dyn_thread_safe() { + let op = FromDyn::from(op); + let results = rayon_core::broadcast(|context| op.derive(op(context.index()))); + results.into_iter().map(|r| r.into_inner()).collect() + } else { + vec![op(0)] + } +} diff --git a/compiler/rustc_data_structures/src/unord.rs b/compiler/rustc_data_structures/src/unord.rs index baa66cd7c8520..3d44fb1fd48db 100644 --- a/compiler/rustc_data_structures/src/unord.rs +++ b/compiler/rustc_data_structures/src/unord.rs @@ -109,6 +109,16 @@ impl> UnordItems { pub fn collect>>(self) -> C { self.into() } + + /// If the iterator has only one element, returns it, otherwise returns `None`. + #[track_caller] + pub fn get_only(mut self) -> Option { + let item = self.0.next(); + if self.0.next().is_some() { + return None; + } + item + } } impl UnordItems> { diff --git a/compiler/rustc_driver/src/lib.rs b/compiler/rustc_driver/src/lib.rs index 381309f83b2c1..a03834c519d59 100644 --- a/compiler/rustc_driver/src/lib.rs +++ b/compiler/rustc_driver/src/lib.rs @@ -3,7 +3,6 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 #![doc(rust_logo)] #![feature(rustdoc_internals)] // tidy-alphabetical-end diff --git a/compiler/rustc_driver_impl/Cargo.toml b/compiler/rustc_driver_impl/Cargo.toml index de643355f5f44..9da4f2dbc2730 100644 --- a/compiler/rustc_driver_impl/Cargo.toml +++ b/compiler/rustc_driver_impl/Cargo.toml @@ -4,6 +4,7 @@ version = "0.0.0" edition = "2024" [dependencies] +jiff = { version = "0.2.5", default-features = false, features = ["std"] } # tidy-alphabetical-start rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } @@ -43,14 +44,13 @@ rustc_privacy = { path = "../rustc_privacy" } rustc_query_system = { path = "../rustc_query_system" } rustc_resolve = { path = "../rustc_resolve" } rustc_session = { path = "../rustc_session" } -rustc_smir = { path = "../rustc_smir" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_ty_utils = { path = "../rustc_ty_utils" } serde_json = "1.0.59" shlex = "1.0" -time = { version = "0.3.36", default-features = false, features = ["alloc", "formatting", "macros"] } +stable_mir = { path = "../stable_mir", features = ["rustc_internal"] } tracing = { version = "0.1.35" } # tidy-alphabetical-end diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index 37755e7d61db1..95cfe221d3fe1 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -7,11 +7,10 @@ // tidy-alphabetical-start #![allow(internal_features)] #![allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(decl_macro)] -#![feature(let_chains)] #![feature(panic_backtrace_config)] #![feature(panic_update_hook)] #![feature(result_flattening)] @@ -30,11 +29,10 @@ use std::path::{Path, PathBuf}; use std::process::{self, Command, Stdio}; use std::sync::OnceLock; use std::sync::atomic::{AtomicBool, Ordering}; -use std::time::{Instant, SystemTime}; +use std::time::Instant; use std::{env, str}; use rustc_ast as ast; -use rustc_codegen_ssa::back::apple; use rustc_codegen_ssa::traits::CodegenBackend; use rustc_codegen_ssa::{CodegenErrors, CodegenResults}; use rustc_data_structures::profiling::{ @@ -56,18 +54,17 @@ use rustc_metadata::locator; use rustc_middle::ty::TyCtxt; use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal}; use rustc_session::config::{ - CG_OPTIONS, ErrorOutputType, Input, OptionDesc, OutFileName, OutputType, UnstableOptions, - Z_OPTIONS, nightly_options, parse_target_triple, + CG_OPTIONS, CrateType, ErrorOutputType, Input, OptionDesc, OutFileName, OutputType, + UnstableOptions, Z_OPTIONS, nightly_options, parse_target_triple, }; use rustc_session::getopts::{self, Matches}; use rustc_session::lint::{Lint, LintId}; use rustc_session::output::{CRATE_TYPES, collect_crate_types, invalid_output_for_target}; use rustc_session::{EarlyDiagCtxt, Session, config, filesearch}; use rustc_span::FileName; +use rustc_span::def_id::LOCAL_CRATE; use rustc_target::json::ToJson; use rustc_target::spec::{Target, TargetTuple}; -use time::OffsetDateTime; -use time::macros::format_description; use tracing::trace; #[allow(unused_macros)] @@ -264,6 +261,7 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send)) hash_untracked_state: None, register_lints: None, override_queries: None, + extra_symbols: Vec::new(), make_codegen_backend: None, registry: diagnostics_registry(), using_internal_features: &USING_INTERNAL_FEATURES, @@ -348,16 +346,14 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send)) // Make sure name resolution and macro expansion is run. let _ = tcx.resolver_for_lowering(); - if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir { - dump_feature_usage_metrics(tcx, metrics_dir); - } - if callbacks.after_expansion(compiler, tcx) == Compilation::Stop { return early_exit(); } passes::write_dep_info(tcx); + passes::write_interface(tcx); + if sess.opts.output_types.contains_key(&OutputType::DepInfo) && sess.opts.output_types.len() == 1 { @@ -370,6 +366,10 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send)) tcx.ensure_ok().analysis(()); + if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir { + dump_feature_usage_metrics(tcx, metrics_dir); + } + if callbacks.after_analysis(compiler, tcx) == Compilation::Stop { return early_exit(); } @@ -392,14 +392,10 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send)) } fn dump_feature_usage_metrics(tcxt: TyCtxt<'_>, metrics_dir: &Path) { - let output_filenames = tcxt.output_filenames(()); - let mut metrics_file_name = std::ffi::OsString::from("unstable_feature_usage_metrics-"); - let mut metrics_path = output_filenames.with_directory_and_extension(metrics_dir, "json"); - let metrics_file_stem = - metrics_path.file_name().expect("there should be a valid default output filename"); - metrics_file_name.push(metrics_file_stem); - metrics_path.pop(); - metrics_path.push(metrics_file_name); + let hash = tcxt.crate_hash(LOCAL_CRATE); + let crate_name = tcxt.crate_name(LOCAL_CRATE); + let metrics_file_name = format!("unstable_feature_usage_metrics-{crate_name}-{hash}.json"); + let metrics_path = metrics_dir.join(metrics_file_name); if let Err(error) = tcxt.features().dump_feature_usage_metrics(metrics_path) { // FIXME(yaahc): once metrics can be enabled by default we will want "failure to emit // default metrics" to only produce a warning when metrics are enabled by default and emit @@ -467,6 +463,7 @@ fn handle_explain(early_dcx: &EarlyDiagCtxt, registry: Registry, code: &str, col // Allow "E0123" or "0123" form. let upper_cased_code = code.to_ascii_uppercase(); if let Ok(code) = upper_cased_code.strip_prefix('E').unwrap_or(&upper_cased_code).parse::() + && code <= ErrCode::MAX_AS_U32 && let Ok(description) = registry.try_find_description(ErrCode::from_u32(code)) { let mut is_in_code_block = false; @@ -691,6 +688,34 @@ fn print_crate_info( }; println_info!("{}", passes::get_crate_name(sess, attrs)); } + CrateRootLintLevels => { + let Some(attrs) = attrs.as_ref() else { + // no crate attributes, print out an error and exit + return Compilation::Continue; + }; + let crate_name = passes::get_crate_name(sess, attrs); + let lint_store = crate::unerased_lint_store(sess); + let registered_tools = rustc_resolve::registered_tools_ast(sess.dcx(), attrs); + let features = rustc_expand::config::features(sess, attrs, crate_name); + let lint_levels = rustc_lint::LintLevelsBuilder::crate_root( + sess, + &features, + true, + lint_store, + ®istered_tools, + attrs, + ); + for lint in lint_store.get_lints() { + if let Some(feature_symbol) = lint.feature_gate + && !features.enabled(feature_symbol) + { + // lint is unstable and feature gate isn't active, don't print + continue; + } + let level = lint_levels.lint_level(lint).level; + println_info!("{}={}", lint.name_lower(), level.as_str()); + } + } Cfg => { let mut cfgs = sess .psess @@ -779,11 +804,11 @@ fn print_crate_info( } } DeploymentTarget => { - if sess.target.is_like_osx { + if sess.target.is_like_darwin { println_info!( "{}={}", - apple::deployment_target_env_var(&sess.target.os), - apple::pretty_version(apple::deployment_target(sess)), + rustc_target::spec::apple::deployment_target_env_var(&sess.target.os), + sess.apple_deployment_target().fmt_pretty(), ) } else { #[allow(rustc::diagnostic_outside_of_impl)] @@ -794,6 +819,7 @@ fn print_crate_info( let supported_crate_types = CRATE_TYPES .iter() .filter(|(_, crate_type)| !invalid_output_for_target(&sess, *crate_type)) + .filter(|(_, crate_type)| *crate_type != CrateType::Sdylib) .map(|(crate_type_sym, _)| *crate_type_sym) .collect::>(); for supported_crate_type in supported_crate_types { @@ -1276,13 +1302,8 @@ fn ice_path_with_config(config: Option<&UnstableOptions>) -> &'static Option Contains for Foo { Please note that unconstrained lifetime parameters are not supported if they are being used by an associated type. -In cases where the associated type's lifetime is meant to be tied to the the +In cases where the associated type's lifetime is meant to be tied to the self type, and none of the methods on the trait need ownership or different mutability, then an option is to implement the trait on a borrowed type: diff --git a/compiler/rustc_error_codes/src/error_codes/E0253.md b/compiler/rustc_error_codes/src/error_codes/E0253.md index 705d1bfc53e59..628f5e252fbb6 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0253.md +++ b/compiler/rustc_error_codes/src/error_codes/E0253.md @@ -1,9 +1,13 @@ +#### Note: this error code is no longer emitted by the compiler. + Attempt was made to import an unimportable type. This can happen when trying to import a type from a trait. Erroneous code example: -```compile_fail,E0253 +``` +#![feature(import_trait_associated_functions)] + mod foo { pub trait MyTrait { type SomeType; diff --git a/compiler/rustc_error_codes/src/error_codes/E0264.md b/compiler/rustc_error_codes/src/error_codes/E0264.md index 33ddf3405acca..e17b72061582d 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0264.md +++ b/compiler/rustc_error_codes/src/error_codes/E0264.md @@ -1,3 +1,5 @@ +#### this error code is no longer emitted by the compiler. + An unknown external lang item was used. Erroneous code example: diff --git a/compiler/rustc_error_codes/src/error_codes/E0622.md b/compiler/rustc_error_codes/src/error_codes/E0622.md index 4cb605b636d2e..9b8131a061e39 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0622.md +++ b/compiler/rustc_error_codes/src/error_codes/E0622.md @@ -1,13 +1,16 @@ +#### Note: this error code is no longer emitted by the compiler. + An intrinsic was declared without being a function. Erroneous code example: -```compile_fail,E0622 +```no_run #![feature(intrinsics)] #![allow(internal_features)] -extern "rust-intrinsic" { - pub static atomic_singlethreadfence_seqcst: fn(); +extern "C" { + #[rustc_intrinsic] + pub static atomic_singlethreadfence_seqcst: unsafe fn(); // error: intrinsic must be a function } @@ -22,9 +25,8 @@ error, just declare a function. Example: #![feature(intrinsics)] #![allow(internal_features)] -extern "rust-intrinsic" { - pub fn atomic_singlethreadfence_seqcst(); // ok! -} +#[rustc_intrinsic] +pub unsafe fn atomic_singlethreadfence_seqcst(); // ok! fn main() { unsafe { atomic_singlethreadfence_seqcst(); } } ``` diff --git a/compiler/rustc_error_codes/src/error_codes/E0736.md b/compiler/rustc_error_codes/src/error_codes/E0736.md index cb7633b7068a3..66d5fbb80cf29 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0736.md +++ b/compiler/rustc_error_codes/src/error_codes/E0736.md @@ -11,7 +11,7 @@ Erroneous code example: ```compile_fail,E0736 #[inline] -#[naked] +#[unsafe(naked)] fn foo() {} ``` diff --git a/compiler/rustc_error_codes/src/error_codes/E0755.md b/compiler/rustc_error_codes/src/error_codes/E0755.md index 88b7f48496906..b67f078c78ec7 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0755.md +++ b/compiler/rustc_error_codes/src/error_codes/E0755.md @@ -5,7 +5,7 @@ Erroneous code example: ```compile_fail,E0755 #![feature(ffi_pure)] -#[ffi_pure] // error! +#[unsafe(ffi_pure)] // error! pub fn foo() {} # fn main() {} ``` @@ -17,7 +17,7 @@ side effects or infinite loops: #![feature(ffi_pure)] extern "C" { - #[ffi_pure] // ok! + #[unsafe(ffi_pure)] // ok! pub fn strlen(s: *const i8) -> isize; } # fn main() {} diff --git a/compiler/rustc_error_codes/src/error_codes/E0756.md b/compiler/rustc_error_codes/src/error_codes/E0756.md index ffdc421aab584..aadde038d12c9 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0756.md +++ b/compiler/rustc_error_codes/src/error_codes/E0756.md @@ -6,7 +6,7 @@ Erroneous code example: ```compile_fail,E0756 #![feature(ffi_const)] -#[ffi_const] // error! +#[unsafe(ffi_const)] // error! pub fn foo() {} # fn main() {} ``` @@ -18,7 +18,7 @@ which have no side effects except for their return value: #![feature(ffi_const)] extern "C" { - #[ffi_const] // ok! + #[unsafe(ffi_const)] // ok! pub fn strlen(s: *const i8) -> i32; } # fn main() {} diff --git a/compiler/rustc_error_codes/src/error_codes/E0757.md b/compiler/rustc_error_codes/src/error_codes/E0757.md index 41b06b23c4f2b..fb75b028f45c8 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0757.md +++ b/compiler/rustc_error_codes/src/error_codes/E0757.md @@ -6,8 +6,9 @@ Erroneous code example: #![feature(ffi_const, ffi_pure)] extern "C" { - #[ffi_const] - #[ffi_pure] // error: `#[ffi_const]` function cannot be `#[ffi_pure]` + #[unsafe(ffi_const)] + #[unsafe(ffi_pure)] + //~^ ERROR `#[ffi_const]` function cannot be `#[ffi_pure]` pub fn square(num: i32) -> i32; } ``` @@ -19,7 +20,7 @@ As `ffi_const` provides stronger guarantees than `ffi_pure`, remove the #![feature(ffi_const)] extern "C" { - #[ffi_const] + #[unsafe(ffi_const)] pub fn square(num: i32) -> i32; } ``` diff --git a/compiler/rustc_error_codes/src/error_codes/E0787.md b/compiler/rustc_error_codes/src/error_codes/E0787.md index f5c5faa066b6b..b7f92c8feb587 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0787.md +++ b/compiler/rustc_error_codes/src/error_codes/E0787.md @@ -3,9 +3,7 @@ An unsupported naked function definition. Erroneous code example: ```compile_fail,E0787 -#![feature(naked_functions)] - -#[naked] +#[unsafe(naked)] pub extern "C" fn f() -> u32 { 42 } diff --git a/compiler/rustc_error_codes/src/error_codes/E0805.md b/compiler/rustc_error_codes/src/error_codes/E0805.md new file mode 100644 index 0000000000000..f5dc13b14b812 --- /dev/null +++ b/compiler/rustc_error_codes/src/error_codes/E0805.md @@ -0,0 +1,64 @@ +An externally implementable item is not compatible with its declaration. + +Erroneous code example: + +```rust,edition2021,compile_fail,E0805 +#![feature(eii)] + +#[eii(foo)] +fn x(); + +#[foo] +fn y(a: u64) -> u64 { +//~^ ERROR E0805 + a +} + + +fn main() {} +``` + +To fix this, `y`'s signature must match that of `x`: + +```rust,edition2021 +#![feature(eii)] + +#[eii(foo)] +fn x(); + +#[foo] +fn y() {} + + +fn main() {} +``` + +One common way this can be triggered is by using the wrong +signature for `#[panic_handler]`. +The signature is provided by `core`. + +```rust,edition2021,ignore +#![no_std] + +#[panic_handler] +fn on_panic() -> ! { +//~^ ERROR E0805 + + loop {} +} + +fn main() {} +``` + +Should be: + +```rust,edition2021,ignore +#![no_std] + +#[panic_handler] +fn on_panic(info: &core::panic::PanicInfo<'_>) -> ! { + loop {} +} + +fn main() {} +``` diff --git a/compiler/rustc_error_codes/src/lib.rs b/compiler/rustc_error_codes/src/lib.rs index dfeef5a957d69..6f5e4829802e9 100644 --- a/compiler/rustc_error_codes/src/lib.rs +++ b/compiler/rustc_error_codes/src/lib.rs @@ -397,7 +397,7 @@ E0618: 0618, E0619: 0619, E0620: 0620, E0621: 0621, -E0622: 0622, +E0622: 0622, // REMOVED: rustc-intrinsic ABI was removed E0623: 0623, E0624: 0624, E0625: 0625, @@ -547,6 +547,7 @@ E0801: 0801, E0802: 0802, E0803: 0803, E0804: 0804, +E0805: 0805, ); ) } diff --git a/compiler/rustc_error_messages/src/lib.rs b/compiler/rustc_error_messages/src/lib.rs index 39e78ae88416e..3c6df147b1ba5 100644 --- a/compiler/rustc_error_messages/src/lib.rs +++ b/compiler/rustc_error_messages/src/lib.rs @@ -208,7 +208,7 @@ pub type LazyFallbackBundle = Arc Fluent /// Return the default `FluentBundle` with standard "en-US" diagnostic messages. #[instrument(level = "trace", skip(resources))] -#[cfg_attr(not(bootstrap), define_opaque(LazyFallbackBundle))] +#[define_opaque(LazyFallbackBundle)] pub fn fallback_fluent_bundle( resources: Vec<&'static str>, with_directionality_markers: bool, diff --git a/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs b/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs index f0636b600b70b..f3aeb8d224b9d 100644 --- a/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs +++ b/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs @@ -91,13 +91,13 @@ fn annotation_level_for_level(level: Level) -> annotate_snippets::Level { Level::Bug | Level::Fatal | Level::Error | Level::DelayedBug => { annotate_snippets::Level::Error } - Level::ForceWarning(_) | Level::Warning => annotate_snippets::Level::Warning, + Level::ForceWarning | Level::Warning => annotate_snippets::Level::Warning, Level::Note | Level::OnceNote => annotate_snippets::Level::Note, Level::Help | Level::OnceHelp => annotate_snippets::Level::Help, // FIXME(#59346): Not sure how to map this level Level::FailureNote => annotate_snippets::Level::Error, Level::Allow => panic!("Should not call with Allow"), - Level::Expect(_) => panic!("Should not call with Expect"), + Level::Expect => panic!("Should not call with Expect"), } } diff --git a/compiler/rustc_errors/src/diagnostic.rs b/compiler/rustc_errors/src/diagnostic.rs index 9f4d2ea5c1ad6..539ecfbb42e2c 100644 --- a/compiler/rustc_errors/src/diagnostic.rs +++ b/compiler/rustc_errors/src/diagnostic.rs @@ -9,7 +9,7 @@ use std::thread::panicking; use rustc_data_structures::fx::FxIndexMap; use rustc_error_messages::{FluentValue, fluent_value_from_str_list_sep_by_and}; -use rustc_lint_defs::Applicability; +use rustc_lint_defs::{Applicability, LintExpectationId}; use rustc_macros::{Decodable, Encodable}; use rustc_span::source_map::Spanned; use rustc_span::{DUMMY_SP, Span, Symbol}; @@ -181,22 +181,9 @@ where Self: Sized, { /// Add a subdiagnostic to an existing diagnostic. - fn add_to_diag(self, diag: &mut Diag<'_, G>) { - self.add_to_diag_with(diag, &|_, m| m); - } - - /// Add a subdiagnostic to an existing diagnostic where `f` is invoked on every message used - /// (to optionally perform eager translation). - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - f: &F, - ); + fn add_to_diag(self, diag: &mut Diag<'_, G>); } -pub trait SubdiagMessageOp = - Fn(&mut Diag<'_, G>, SubdiagMessage) -> SubdiagMessage; - /// Trait implemented by lint types. This should not be implemented manually. Instead, use /// `#[derive(LintDiagnostic)]` -- see [rustc_macros::LintDiagnostic]. #[rustc_diagnostic_item = "LintDiagnostic"] @@ -296,6 +283,7 @@ pub struct DiagInner { pub messages: Vec<(DiagMessage, Style)>, pub code: Option, + pub lint_id: Option, pub span: MultiSpan, pub children: Vec, pub suggestions: Suggestions, @@ -324,6 +312,7 @@ impl DiagInner { pub fn new_with_messages(level: Level, messages: Vec<(DiagMessage, Style)>) -> Self { DiagInner { level, + lint_id: None, messages, code: None, span: MultiSpan::new(), @@ -346,7 +335,7 @@ impl DiagInner { match self.level { Level::Bug | Level::Fatal | Level::Error | Level::DelayedBug => true, - Level::ForceWarning(_) + Level::ForceWarning | Level::Warning | Level::Note | Level::OnceNote @@ -354,7 +343,7 @@ impl DiagInner { | Level::OnceHelp | Level::FailureNote | Level::Allow - | Level::Expect(_) => false, + | Level::Expect => false, } } @@ -365,7 +354,7 @@ impl DiagInner { pub(crate) fn is_force_warn(&self) -> bool { match self.level { - Level::ForceWarning(_) => { + Level::ForceWarning => { assert!(self.is_lint.is_some()); true } @@ -645,9 +634,9 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> { #[rustc_lint_diagnostics] pub fn note_expected_found( &mut self, - expected_label: &dyn fmt::Display, + expected_label: &str, expected: DiagStyledString, - found_label: &dyn fmt::Display, + found_label: &str, found: DiagStyledString, ) -> &mut Self { self.note_expected_found_extra( @@ -663,9 +652,9 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> { #[rustc_lint_diagnostics] pub fn note_expected_found_extra( &mut self, - expected_label: &dyn fmt::Display, + expected_label: &str, expected: DiagStyledString, - found_label: &dyn fmt::Display, + found_label: &str, found: DiagStyledString, expected_extra: DiagStyledString, found_extra: DiagStyledString, @@ -1225,15 +1214,21 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> { /// interpolated variables). #[rustc_lint_diagnostics] pub fn subdiagnostic(&mut self, subdiagnostic: impl Subdiagnostic) -> &mut Self { - let dcx = self.dcx; - subdiagnostic.add_to_diag_with(self, &|diag, msg| { - let args = diag.args.iter(); - let msg = diag.subdiagnostic_message_to_diagnostic_message(msg); - dcx.eagerly_translate(msg, args) - }); + subdiagnostic.add_to_diag(self); self } + /// Fluent variables are not namespaced from each other, so when + /// `Diagnostic`s and `Subdiagnostic`s use the same variable name, + /// one value will clobber the other. Eagerly translating the + /// diagnostic uses the variables defined right then, before the + /// clobbering occurs. + pub fn eagerly_translate(&self, msg: impl Into) -> SubdiagMessage { + let args = self.args.iter(); + let msg = self.subdiagnostic_message_to_diagnostic_message(msg.into()); + self.dcx.eagerly_translate(msg, args) + } + with_fn! { with_span, /// Add a span. #[rustc_lint_diagnostics] @@ -1259,6 +1254,17 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> { self } } + with_fn! { with_lint_id, + /// Add an argument. + #[rustc_lint_diagnostics] + pub fn lint_id( + &mut self, + id: LintExpectationId, + ) -> &mut Self { + self.lint_id = Some(id); + self + } } + with_fn! { with_primary_message, /// Add a primary message. #[rustc_lint_diagnostics] diff --git a/compiler/rustc_errors/src/diagnostic_impls.rs b/compiler/rustc_errors/src/diagnostic_impls.rs index cb2e1769fa1cf..8b59ba9984c10 100644 --- a/compiler/rustc_errors/src/diagnostic_impls.rs +++ b/compiler/rustc_errors/src/diagnostic_impls.rs @@ -19,7 +19,7 @@ use {rustc_ast as ast, rustc_hir as hir}; use crate::diagnostic::DiagLocation; use crate::{ Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, ErrCode, IntoDiagArg, Level, - SubdiagMessageOp, Subdiagnostic, fluent_generated as fluent, + Subdiagnostic, fluent_generated as fluent, }; pub struct DiagArgFromDisplay<'a>(pub &'a dyn fmt::Display); @@ -384,11 +384,7 @@ pub struct SingleLabelManySpans { pub label: &'static str, } impl Subdiagnostic for SingleLabelManySpans { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.span_labels(self.spans, self.label); } } diff --git a/compiler/rustc_errors/src/json.rs b/compiler/rustc_errors/src/json.rs index 7d7f364fec236..a6583407b7e7e 100644 --- a/compiler/rustc_errors/src/json.rs +++ b/compiler/rustc_errors/src/json.rs @@ -144,7 +144,7 @@ impl Emitter for JsonEmitter { // // So to avoid ICEs and confused users we "upgrade" the lint level for // those `FutureBreakageItem` to warn. - if matches!(diag.level, crate::Level::Allow | crate::Level::Expect(..)) { + if matches!(diag.level, crate::Level::Allow | crate::Level::Expect) { diag.level = crate::Level::Warning; } FutureBreakageItem { diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index 80e43ede4453a..6f37bad9bb462 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -7,6 +7,7 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(array_windows)] @@ -17,7 +18,6 @@ #![feature(default_field_values)] #![feature(error_reporter)] #![feature(if_let_guard)] -#![feature(let_chains)] #![feature(negative_impls)] #![feature(never_type)] #![feature(rustc_attrs)] @@ -47,7 +47,7 @@ pub use codes::*; pub use diagnostic::{ BugAbort, Diag, DiagArg, DiagArgMap, DiagArgName, DiagArgValue, DiagInner, DiagStyledString, Diagnostic, EmissionGuarantee, FatalAbort, IntoDiagArg, LintDiagnostic, StringPart, Subdiag, - SubdiagMessageOp, Subdiagnostic, + Subdiagnostic, }; pub use diagnostic_impls::{ DiagArgFromDisplay, DiagSymbolList, ElidedLifetimeInPathSubdiag, ExpectedLifetimeParameter, @@ -589,7 +589,8 @@ struct DiagCtxtInner { /// add more information). All stashed diagnostics must be emitted with /// `emit_stashed_diagnostics` by the time the `DiagCtxtInner` is dropped, /// otherwise an assertion failure will occur. - stashed_diagnostics: FxIndexMap<(Span, StashKey), (DiagInner, Option)>, + stashed_diagnostics: + FxIndexMap)>>, future_breakage_diagnostics: Vec, @@ -905,15 +906,19 @@ impl<'a> DiagCtxtHandle<'a> { DelayedBug => { return self.inner.borrow_mut().emit_diagnostic(diag, self.tainted_with_errors); } - ForceWarning(_) | Warning | Note | OnceNote | Help | OnceHelp | FailureNote | Allow - | Expect(_) => None, + ForceWarning | Warning | Note | OnceNote | Help | OnceHelp | FailureNote | Allow + | Expect => None, }; // FIXME(Centril, #69537): Consider reintroducing panic on overwriting a stashed diagnostic // if/when we have a more robust macro-friendly replacement for `(span, key)` as a key. // See the PR for a discussion. - let key = (span.with_parent(None), key); - self.inner.borrow_mut().stashed_diagnostics.insert(key, (diag, guar)); + self.inner + .borrow_mut() + .stashed_diagnostics + .entry(key) + .or_default() + .insert(span.with_parent(None), (diag, guar)); guar } @@ -922,9 +927,10 @@ impl<'a> DiagCtxtHandle<'a> { /// and [`StashKey`] as the key. Panics if the found diagnostic is an /// error. pub fn steal_non_err(self, span: Span, key: StashKey) -> Option> { - let key = (span.with_parent(None), key); // FIXME(#120456) - is `swap_remove` correct? - let (diag, guar) = self.inner.borrow_mut().stashed_diagnostics.swap_remove(&key)?; + let (diag, guar) = self.inner.borrow_mut().stashed_diagnostics.get_mut(&key).and_then( + |stashed_diagnostics| stashed_diagnostics.swap_remove(&span.with_parent(None)), + )?; assert!(!diag.is_error()); assert!(guar.is_none()); Some(Diag::new_diagnostic(self, diag)) @@ -943,9 +949,10 @@ impl<'a> DiagCtxtHandle<'a> { where F: FnMut(&mut Diag<'_>), { - let key = (span.with_parent(None), key); // FIXME(#120456) - is `swap_remove` correct? - let err = self.inner.borrow_mut().stashed_diagnostics.swap_remove(&key); + let err = self.inner.borrow_mut().stashed_diagnostics.get_mut(&key).and_then( + |stashed_diagnostics| stashed_diagnostics.swap_remove(&span.with_parent(None)), + ); err.map(|(err, guar)| { // The use of `::` is safe because level is `Level::Error`. assert_eq!(err.level, Error); @@ -966,9 +973,10 @@ impl<'a> DiagCtxtHandle<'a> { key: StashKey, new_err: Diag<'_>, ) -> ErrorGuaranteed { - let key = (span.with_parent(None), key); // FIXME(#120456) - is `swap_remove` correct? - let old_err = self.inner.borrow_mut().stashed_diagnostics.swap_remove(&key); + let old_err = self.inner.borrow_mut().stashed_diagnostics.get_mut(&key).and_then( + |stashed_diagnostics| stashed_diagnostics.swap_remove(&span.with_parent(None)), + ); match old_err { Some((old_err, guar)) => { assert_eq!(old_err.level, Error); @@ -983,7 +991,14 @@ impl<'a> DiagCtxtHandle<'a> { } pub fn has_stashed_diagnostic(&self, span: Span, key: StashKey) -> bool { - self.inner.borrow().stashed_diagnostics.get(&(span.with_parent(None), key)).is_some() + let inner = self.inner.borrow(); + if let Some(stashed_diagnostics) = inner.stashed_diagnostics.get(&key) + && !stashed_diagnostics.is_empty() + { + stashed_diagnostics.contains_key(&span.with_parent(None)) + } else { + false + } } /// Emit all stashed diagnostics. @@ -997,7 +1012,11 @@ impl<'a> DiagCtxtHandle<'a> { let inner = self.inner.borrow(); inner.err_guars.len() + inner.lint_err_guars.len() - + inner.stashed_diagnostics.values().filter(|(_diag, guar)| guar.is_some()).count() + + inner + .stashed_diagnostics + .values() + .map(|a| a.values().filter(|(_, guar)| guar.is_some()).count()) + .sum::() } /// This excludes lint errors and delayed bugs. Unless absolutely @@ -1045,7 +1064,7 @@ impl<'a> DiagCtxtHandle<'a> { // Use `ForceWarning` rather than `Warning` to guarantee emission, e.g. with a // configuration like `--cap-lints allow --force-warn bare_trait_objects`. inner.emit_diagnostic( - DiagInner::new(ForceWarning(None), DiagMessage::Str(warnings)), + DiagInner::new(ForceWarning, DiagMessage::Str(warnings)), None, ); } @@ -1450,7 +1469,7 @@ impl<'a> DiagCtxtHandle<'a> { #[rustc_lint_diagnostics] #[track_caller] pub fn struct_expect(self, msg: impl Into, id: LintExpectationId) -> Diag<'a, ()> { - Diag::new(self, Expect(id), msg) + Diag::new(self, Expect, msg).with_lint_id(id) } } @@ -1486,16 +1505,18 @@ impl DiagCtxtInner { fn emit_stashed_diagnostics(&mut self) -> Option { let mut guar = None; let has_errors = !self.err_guars.is_empty(); - for (_, (diag, _guar)) in std::mem::take(&mut self.stashed_diagnostics).into_iter() { - if !diag.is_error() { - // Unless they're forced, don't flush stashed warnings when - // there are errors, to avoid causing warning overload. The - // stash would've been stolen already if it were important. - if !diag.is_force_warn() && has_errors { - continue; + for (_, stashed_diagnostics) in std::mem::take(&mut self.stashed_diagnostics).into_iter() { + for (_, (diag, _guar)) in stashed_diagnostics { + if !diag.is_error() { + // Unless they're forced, don't flush stashed warnings when + // there are errors, to avoid causing warning overload. The + // stash would've been stolen already if it were important. + if !diag.is_force_warn() && has_errors { + continue; + } } + guar = guar.or(self.emit_diagnostic(diag, None)); } - guar = guar.or(self.emit_diagnostic(diag, None)); } guar } @@ -1510,7 +1531,7 @@ impl DiagCtxtInner { // Future breakages aren't emitted if they're `Level::Allow` or // `Level::Expect`, but they still need to be constructed and // stashed below, so they'll trigger the must_produce_diag check. - assert_matches!(diagnostic.level, Error | Warning | Allow | Expect(_)); + assert_matches!(diagnostic.level, Error | Warning | Allow | Expect); self.future_breakage_diagnostics.push(diagnostic.clone()); } @@ -1558,7 +1579,7 @@ impl DiagCtxtInner { }; } } - ForceWarning(None) => {} // `ForceWarning(Some(...))` is below, with `Expect` + ForceWarning if diagnostic.lint_id.is_none() => {} // `ForceWarning(Some(...))` is below, with `Expect` Warning => { if !self.flags.can_emit_warnings { // We are not emitting warnings. @@ -1580,9 +1601,9 @@ impl DiagCtxtInner { } return None; } - Expect(expect_id) | ForceWarning(Some(expect_id)) => { - self.fulfilled_expectations.insert(expect_id); - if let Expect(_) = diagnostic.level { + Expect | ForceWarning => { + self.fulfilled_expectations.insert(diagnostic.lint_id.unwrap()); + if let Expect = diagnostic.level { // Nothing emitted here for expected lints. TRACK_DIAGNOSTIC(diagnostic, &mut |_| None); self.suppressed_expected_diag = true; @@ -1631,7 +1652,7 @@ impl DiagCtxtInner { if is_error { self.deduplicated_err_count += 1; - } else if matches!(diagnostic.level, ForceWarning(_) | Warning) { + } else if matches!(diagnostic.level, ForceWarning | Warning) { self.deduplicated_warn_count += 1; } self.has_printed = true; @@ -1688,6 +1709,7 @@ impl DiagCtxtInner { if let Some((_diag, guar)) = self .stashed_diagnostics .values() + .flat_map(|stashed_diagnostics| stashed_diagnostics.values()) .find(|(diag, guar)| guar.is_some() && diag.is_lint.is_none()) { *guar @@ -1700,13 +1722,9 @@ impl DiagCtxtInner { fn has_errors(&self) -> Option { self.err_guars.get(0).copied().or_else(|| self.lint_err_guars.get(0).copied()).or_else( || { - if let Some((_diag, guar)) = - self.stashed_diagnostics.values().find(|(_diag, guar)| guar.is_some()) - { - *guar - } else { - None - } + self.stashed_diagnostics.values().find_map(|stashed_diagnostics| { + stashed_diagnostics.values().find_map(|(_, guar)| *guar) + }) }, ) } @@ -1899,9 +1917,9 @@ pub enum Level { /// A `force-warn` lint warning about the code being compiled. Does not prevent compilation /// from finishing. /// - /// The [`LintExpectationId`] is used for expected lint diagnostics. In all other cases this + /// Requires a [`LintExpectationId`] for expected lint diagnostics. In all other cases this /// should be `None`. - ForceWarning(Option), + ForceWarning, /// A warning about the code being compiled. Does not prevent compilation from finishing. /// Will be skipped if `can_emit_warnings` is false. @@ -1926,8 +1944,8 @@ pub enum Level { /// Only used for lints. Allow, - /// Only used for lints. - Expect(LintExpectationId), + /// Only used for lints. Requires a [`LintExpectationId`] for silencing the lints. + Expect, } impl fmt::Display for Level { @@ -1943,7 +1961,7 @@ impl Level { Bug | Fatal | Error | DelayedBug => { spec.set_fg(Some(Color::Red)).set_intense(true); } - ForceWarning(_) | Warning => { + ForceWarning | Warning => { spec.set_fg(Some(Color::Yellow)).set_intense(cfg!(windows)); } Note | OnceNote => { @@ -1953,7 +1971,7 @@ impl Level { spec.set_fg(Some(Color::Cyan)).set_intense(true); } FailureNote => {} - Allow | Expect(_) => unreachable!(), + Allow | Expect => unreachable!(), } spec } @@ -1962,11 +1980,11 @@ impl Level { match self { Bug | DelayedBug => "error: internal compiler error", Fatal | Error => "error", - ForceWarning(_) | Warning => "warning", + ForceWarning | Warning => "warning", Note | OnceNote => "note", Help | OnceHelp => "help", FailureNote => "failure-note", - Allow | Expect(_) => unreachable!(), + Allow | Expect => unreachable!(), } } @@ -1977,8 +1995,7 @@ impl Level { // Can this level be used in a subdiagnostic message? fn can_be_subdiag(&self) -> bool { match self { - Bug | DelayedBug | Fatal | Error | ForceWarning(_) | FailureNote | Allow - | Expect(_) => false, + Bug | DelayedBug | Fatal | Error | ForceWarning | FailureNote | Allow | Expect => false, Warning | Note | Help | OnceNote | OnceHelp => true, } diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index 990d0f2e028aa..f5eaf7d616b22 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -824,10 +824,10 @@ impl SyntaxExtension { return Err(item.span); } - match item.name_or_empty() { - sym::no => Ok(CollapseMacroDebuginfo::No), - sym::external => Ok(CollapseMacroDebuginfo::External), - sym::yes => Ok(CollapseMacroDebuginfo::Yes), + match item.name() { + Some(sym::no) => Ok(CollapseMacroDebuginfo::No), + Some(sym::external) => Ok(CollapseMacroDebuginfo::External), + Some(sym::yes) => Ok(CollapseMacroDebuginfo::Yes), _ => Err(item.path.span), } } @@ -1102,7 +1102,7 @@ pub trait ResolverExpand { /// HIR proc macros items back to their harness items. fn declare_proc_macro(&mut self, id: NodeId); - fn append_stripped_cfg_item(&mut self, parent_node: NodeId, name: Ident, cfg: ast::MetaItem); + fn append_stripped_cfg_item(&mut self, parent_node: NodeId, ident: Ident, cfg: ast::MetaItem); /// Tools registered with `#![register_tool]` and used by tool attributes and lints. fn registered_tools(&self) -> &RegisteredTools; @@ -1424,12 +1424,11 @@ pub fn parse_macro_name_and_helper_attrs( /// See #73345 and #83125 for more details. /// FIXME(#73933): Remove this eventually. fn pretty_printing_compatibility_hack(item: &Item, psess: &ParseSess) { - let name = item.ident.name; - if name == sym::ProceduralMasqueradeDummyType - && let ast::ItemKind::Enum(enum_def, _) = &item.kind + if let ast::ItemKind::Enum(ident, enum_def, _) = &item.kind + && ident.name == sym::ProceduralMasqueradeDummyType && let [variant] = &*enum_def.variants && variant.ident.name == sym::Input - && let FileName::Real(real) = psess.source_map().span_to_filename(item.ident.span) + && let FileName::Real(real) = psess.source_map().span_to_filename(ident.span) && let Some(c) = real .local_path() .unwrap_or(Path::new("")) diff --git a/compiler/rustc_expand/src/build.rs b/compiler/rustc_expand/src/build.rs index 89a750bb39f0c..14b8cc90d97d6 100644 --- a/compiler/rustc_expand/src/build.rs +++ b/compiler/rustc_expand/src/build.rs @@ -1,8 +1,10 @@ use rustc_ast::ptr::P; +use rustc_ast::token::Delimiter; +use rustc_ast::tokenstream::TokenStream; use rustc_ast::util::literal; use rustc_ast::{ self as ast, AnonConst, AttrVec, BlockCheckMode, Expr, LocalKind, MatchKind, PatKind, UnOp, - attr, token, + attr, token, tokenstream, }; use rustc_span::source_map::Spanned; use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym}; @@ -55,13 +57,13 @@ impl<'a> ExtCtxt<'a> { &self, span: Span, path: ast::Path, - delim: ast::token::Delimiter, - tokens: ast::tokenstream::TokenStream, + delim: Delimiter, + tokens: TokenStream, ) -> P { P(ast::MacCall { path, args: P(ast::DelimArgs { - dspan: ast::tokenstream::DelimSpan { open: span, close: span }, + dspan: tokenstream::DelimSpan { open: span, close: span }, delim, tokens, }), @@ -230,6 +232,7 @@ impl<'a> ExtCtxt<'a> { self.pat_ident(sp, ident) }; let local = P(ast::Local { + super_: None, pat, ty, id: ast::DUMMY_NODE_ID, @@ -245,6 +248,7 @@ impl<'a> ExtCtxt<'a> { /// Generates `let _: Type;`, which is usually used for type assertions. pub fn stmt_let_type_only(&self, span: Span, ty: P) -> ast::Stmt { let local = P(ast::Local { + super_: None, pat: self.pat_wild(span), ty: Some(ty), id: ast::DUMMY_NODE_ID, @@ -478,8 +482,8 @@ impl<'a> ExtCtxt<'a> { span, [sym::std, sym::unreachable].map(|s| Ident::new(s, span)).to_vec(), ), - ast::token::Delimiter::Parenthesis, - ast::tokenstream::TokenStream::default(), + Delimiter::Parenthesis, + TokenStream::default(), ), ) } @@ -662,15 +666,8 @@ impl<'a> ExtCtxt<'a> { P(ast::FnDecl { inputs, output }) } - pub fn item( - &self, - span: Span, - name: Ident, - attrs: ast::AttrVec, - kind: ast::ItemKind, - ) -> P { + pub fn item(&self, span: Span, attrs: ast::AttrVec, kind: ast::ItemKind) -> P { P(ast::Item { - ident: name, attrs, id: ast::DUMMY_NODE_ID, kind, @@ -687,17 +684,17 @@ impl<'a> ExtCtxt<'a> { pub fn item_static( &self, span: Span, - name: Ident, + ident: Ident, ty: P, mutability: ast::Mutability, expr: P, ) -> P { self.item( span, - name, AttrVec::new(), ast::ItemKind::Static( ast::StaticItem { + ident, ty, safety: ast::Safety::Default, mutability, @@ -712,18 +709,18 @@ impl<'a> ExtCtxt<'a> { pub fn item_const( &self, span: Span, - name: Ident, + ident: Ident, ty: P, expr: P, ) -> P { let defaultness = ast::Defaultness::Final; self.item( span, - name, AttrVec::new(), ast::ItemKind::Const( ast::ConstItem { defaultness, + ident, // FIXME(generic_const_items): Pass the generics as a parameter. generics: ast::Generics::default(), ty, diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index bcc2703c39b39..0994813ecb9c5 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -162,7 +162,7 @@ pub(crate) fn attr_into_trace(mut attr: Attribute, trace_name: Symbol) -> Attrib let NormalAttr { item, tokens } = &mut **normal; item.path.segments[0].ident.name = trace_name; // This makes the trace attributes unobservable to token-based proc macros. - *tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::default())); + *tokens = Some(LazyAttrTokenStream::new_direct(AttrTokenStream::default())); } AttrKind::DocComment(..) => unreachable!(), } @@ -192,7 +192,7 @@ impl<'a> StripUnconfigured<'a> { if self.config_tokens { if let Some(Some(tokens)) = node.tokens_mut() { let attr_stream = tokens.to_attr_token_stream(); - *tokens = LazyAttrTokenStream::new(self.configure_tokens(&attr_stream)); + *tokens = LazyAttrTokenStream::new_direct(self.configure_tokens(&attr_stream)); } } } @@ -223,7 +223,7 @@ impl<'a> StripUnconfigured<'a> { target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr)); if self.in_cfg(&target.attrs) { - target.tokens = LazyAttrTokenStream::new( + target.tokens = LazyAttrTokenStream::new_direct( self.configure_tokens(&target.tokens.to_attr_token_stream()), ); Some(AttrTokenTree::AttrsTarget(target)) @@ -237,22 +237,7 @@ impl<'a> StripUnconfigured<'a> { inner = self.configure_tokens(&inner); Some(AttrTokenTree::Delimited(sp, spacing, delim, inner)) } - AttrTokenTree::Token( - Token { - kind: - TokenKind::NtIdent(..) - | TokenKind::NtLifetime(..) - | TokenKind::Interpolated(..), - .. - }, - _, - ) => { - panic!("Nonterminal should have been flattened: {:?}", tree); - } - AttrTokenTree::Token( - Token { kind: TokenKind::OpenDelim(_) | TokenKind::CloseDelim(_), .. }, - _, - ) => { + AttrTokenTree::Token(Token { kind, .. }, _) if kind.is_delim() => { panic!("Should be `AttrTokenTree::Delimited`, not delim tokens: {:?}", tree); } AttrTokenTree::Token(token, spacing) => Some(AttrTokenTree::Token(token, spacing)), @@ -289,7 +274,12 @@ impl<'a> StripUnconfigured<'a> { /// is in the original source file. Gives a compiler error if the syntax of /// the attribute is incorrect. pub(crate) fn expand_cfg_attr(&self, cfg_attr: &Attribute, recursive: bool) -> Vec { - validate_attr::check_attribute_safety(&self.sess.psess, AttributeSafety::Normal, &cfg_attr); + validate_attr::check_attribute_safety( + &self.sess.psess, + Some(AttributeSafety::Normal), + &cfg_attr, + ast::CRATE_NODE_ID, + ); // A trace attribute left in AST in place of the original `cfg_attr` attribute. // It can later be used by lints or other diagnostics. @@ -376,7 +366,7 @@ impl<'a> StripUnconfigured<'a> { .to_attr_token_stream(), )); - let tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::new(trees))); + let tokens = Some(LazyAttrTokenStream::new_direct(AttrTokenStream::new(trees))); let attr = ast::attr::mk_attr_from_item( &self.sess.psess.attr_id_generator, item, diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 22da1179feb98..d4853d1357f30 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -7,13 +7,12 @@ use std::{iter, mem}; use rustc_ast as ast; use rustc_ast::mut_visit::*; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter}; use rustc_ast::tokenstream::TokenStream; use rustc_ast::visit::{self, AssocCtxt, Visitor, VisitorResult, try_visit, walk_list}; use rustc_ast::{ AssocItemKind, AstNodeWrapper, AttrArgs, AttrStyle, AttrVec, ExprKind, ForeignItemKind, HasAttrs, HasNodeId, Inline, ItemKind, MacStmtStyle, MetaItemInner, MetaItemKind, ModKind, - NodeId, PatKind, StmtKind, TyKind, + NodeId, PatKind, StmtKind, TyKind, token, }; use rustc_ast_pretty::pprust; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; @@ -743,6 +742,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { && matches!( item_inner.kind, ItemKind::Mod( + _, _, ModKind::Unloaded | ModKind::Loaded(_, Inline::No, _, _), ) @@ -911,7 +911,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { impl<'ast, 'a> Visitor<'ast> for GateProcMacroInput<'a> { fn visit_item(&mut self, item: &'ast ast::Item) { match &item.kind { - ItemKind::Mod(_, mod_kind) + ItemKind::Mod(_, _, mod_kind) if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _, _)) => { feature_err( @@ -1003,7 +1003,7 @@ pub fn parse_ast_fragment<'a>( AstFragmentKind::Stmts => { let mut stmts = SmallVec::new(); // Won't make progress on a `}`. - while this.token != token::Eof && this.token != token::CloseDelim(Delimiter::Brace) { + while this.token != token::Eof && this.token != token::CloseBrace { if let Some(stmt) = this.parse_full_stmt(AttemptLocalParseRecovery::Yes)? { stmts.push(stmt); } @@ -1168,9 +1168,9 @@ trait InvocationCollectorNode: HasAttrs + HasNodeId + Sized { collector.cx.dcx().emit_err(RemoveNodeNotSupported { span, descr: Self::descr() }); } - /// All of the names (items) declared by this node. + /// All of the identifiers (items) declared by this node. /// This is an approximation and should only be used for diagnostics. - fn declared_names(&self) -> Vec { + fn declared_idents(&self) -> Vec { vec![] } } @@ -1221,9 +1221,8 @@ impl InvocationCollectorNode for P { } // Work around borrow checker not seeing through `P`'s deref. - let (ident, span, mut attrs) = (node.ident, node.span, mem::take(&mut node.attrs)); - let ItemKind::Mod(_, mod_kind) = &mut node.kind else { unreachable!() }; - + let (span, mut attrs) = (node.span, mem::take(&mut node.attrs)); + let ItemKind::Mod(_, ident, ref mut mod_kind) = node.kind else { unreachable!() }; let ecx = &mut collector.cx; let (file_path, dir_path, dir_ownership) = match mod_kind { ModKind::Loaded(_, inline, _, _) => { @@ -1305,7 +1304,8 @@ impl InvocationCollectorNode for P { collector.cx.current_expansion.module = orig_module; res } - fn declared_names(&self) -> Vec { + + fn declared_idents(&self) -> Vec { if let ItemKind::Use(ut) = &self.kind { fn collect_use_tree_leaves(ut: &ast::UseTree, idents: &mut Vec) { match &ut.kind { @@ -1324,7 +1324,7 @@ impl InvocationCollectorNode for P { return idents; } - vec![self.ident] + if let Some(ident) = self.kind.ident() { vec![ident] } else { vec![] } } } @@ -1844,11 +1844,11 @@ fn build_single_delegations<'a, Node: InvocationCollectorNode>( id: ast::DUMMY_NODE_ID, span: if from_glob { item_span } else { ident.span }, vis: item.vis.clone(), - ident: rename.unwrap_or(ident), kind: Node::delegation_item_kind(Box::new(ast::Delegation { id: ast::DUMMY_NODE_ID, qself: deleg.qself.clone(), path, + ident: rename.unwrap_or(ident), rename, body: deleg.body.clone(), from_glob, @@ -1983,7 +1983,11 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { let mut span: Option = None; while let Some(attr) = attrs.next() { rustc_ast_passes::feature_gate::check_attribute(attr, self.cx.sess, features); - validate_attr::check_attr(&self.cx.sess.psess, attr); + validate_attr::check_attr( + &self.cx.sess.psess, + attr, + self.cx.current_expansion.lint_node_id, + ); let current_span = if let Some(sp) = span { sp.to(attr.span) } else { attr.span }; span = Some(current_span); @@ -2052,25 +2056,25 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { ) -> Node::OutputTy { loop { return match self.take_first_attr(&mut node) { - Some((attr, pos, derives)) => match attr.name_or_empty() { - sym::cfg => { + Some((attr, pos, derives)) => match attr.name() { + Some(sym::cfg) => { let (res, meta_item) = self.expand_cfg_true(&mut node, attr, pos); if res { continue; } if let Some(meta_item) = meta_item { - for name in node.declared_names() { + for ident in node.declared_idents() { self.cx.resolver.append_stripped_cfg_item( self.cx.current_expansion.lint_node_id, - name, + ident, meta_item.clone(), ) } } Default::default() } - sym::cfg_attr => { + Some(sym::cfg_attr) => { self.expand_cfg_attr(&mut node, &attr, pos); continue; } @@ -2143,8 +2147,8 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { ) { loop { return match self.take_first_attr(node) { - Some((attr, pos, derives)) => match attr.name_or_empty() { - sym::cfg => { + Some((attr, pos, derives)) => match attr.name() { + Some(sym::cfg) => { let span = attr.span; if self.expand_cfg_true(node, attr, pos).0 { continue; @@ -2153,7 +2157,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { node.expand_cfg_false(self, pos, span); continue; } - sym::cfg_attr => { + Some(sym::cfg_attr) => { self.expand_cfg_attr(node, &attr, pos); continue; } diff --git a/compiler/rustc_expand/src/lib.rs b/compiler/rustc_expand/src/lib.rs index 4222c9fe90616..79f838e2e33f2 100644 --- a/compiler/rustc_expand/src/lib.rs +++ b/compiler/rustc_expand/src/lib.rs @@ -1,11 +1,11 @@ // tidy-alphabetical-start #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(rust_logo)] #![feature(array_windows)] #![feature(associated_type_defaults)] #![feature(if_let_guard)] -#![feature(let_chains)] #![feature(macro_metavar_expr)] #![feature(map_try_insert)] #![feature(proc_macro_diagnostic)] diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs index e60a9e83184fb..698492f42e28f 100644 --- a/compiler/rustc_expand/src/mbe/diagnostics.rs +++ b/compiler/rustc_expand/src/mbe/diagnostics.rs @@ -1,6 +1,6 @@ use std::borrow::Cow; -use rustc_ast::token::{self, Delimiter, Token, TokenKind}; +use rustc_ast::token::{self, Token}; use rustc_ast::tokenstream::TokenStream; use rustc_errors::{Applicability, Diag, DiagCtxtHandle, DiagMessage}; use rustc_macros::Subdiagnostic; @@ -66,10 +66,8 @@ pub(super) fn failed_to_match_macro( } if let MatcherLoc::Token { token: expected_token } = &remaining_matcher - && (matches!(expected_token.kind, TokenKind::Interpolated(_)) - || matches!(token.kind, TokenKind::Interpolated(_)) - || matches!(expected_token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_))) - || matches!(token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_)))) + && (matches!(expected_token.kind, token::OpenInvisible(_)) + || matches!(token.kind, token::OpenInvisible(_))) { err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens"); err.note("see for more information"); @@ -162,7 +160,7 @@ impl<'dcx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'dcx, 'match .is_none_or(|failure| failure.is_better_position(*approx_position)) { self.best_failure = Some(BestFailure { - token: token.clone(), + token: *token, position_in_tokenstream: *approx_position, msg, remaining_matcher: self diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs index d709fd792817a..c78beb40688fe 100644 --- a/compiler/rustc_expand/src/mbe/macro_parser.rs +++ b/compiler/rustc_expand/src/mbe/macro_parser.rs @@ -179,11 +179,11 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec { for tt in tts { match tt { TokenTree::Token(token) => { - locs.push(MatcherLoc::Token { token: token.clone() }); + locs.push(MatcherLoc::Token { token: *token }); } TokenTree::Delimited(span, _, delimited) => { - let open_token = Token::new(token::OpenDelim(delimited.delim), span.open); - let close_token = Token::new(token::CloseDelim(delimited.delim), span.close); + let open_token = Token::new(delimited.delim.as_open_token_kind(), span.open); + let close_token = Token::new(delimited.delim.as_close_token_kind(), span.close); locs.push(MatcherLoc::Delimited); locs.push(MatcherLoc::Token { token: open_token }); @@ -648,7 +648,7 @@ impl TtParser { // There are no possible next positions AND we aren't waiting for the black-box // parser: syntax error. return Failure(T::build_failure( - parser.token.clone(), + parser.token, parser.approx_token_stream_pos(), "no rules expected this token in macro call", )); diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 77ec598e62a17..93604a149f1d9 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -6,13 +6,13 @@ use std::{mem, slice}; use ast::token::IdentIsRaw; use rustc_ast::token::NtPatKind::*; use rustc_ast::token::TokenKind::*; -use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind}; +use rustc_ast::token::{self, NonterminalKind, Token, TokenKind}; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId}; use rustc_ast_pretty::pprust; use rustc_attr_parsing::{AttributeKind, find_attr}; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; -use rustc_errors::{Applicability, ErrorGuaranteed}; +use rustc_errors::{Applicability, Diag, ErrorGuaranteed}; use rustc_feature::Features; use rustc_hir as hir; use rustc_lint_defs::BuiltinLintDiag; @@ -27,19 +27,18 @@ use rustc_span::hygiene::Transparency; use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, kw, sym}; use tracing::{debug, instrument, trace, trace_span}; -use super::diagnostics; use super::macro_parser::{NamedMatches, NamedParseResult}; +use super::{SequenceRepetition, diagnostics}; use crate::base::{ DummyResult, ExpandResult, ExtCtxt, MacResult, MacroExpanderResult, SyntaxExtension, SyntaxExtensionKind, TTMacroExpander, }; use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment}; -use crate::mbe; use crate::mbe::diagnostics::{annotate_doc_comment, parse_failure_msg}; -use crate::mbe::macro_check; use crate::mbe::macro_parser::NamedMatch::*; use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser}; use crate::mbe::transcribe::transcribe; +use crate::mbe::{self, KleeneOp, macro_check}; pub(crate) struct ParserAnyMacro<'a> { parser: Parser<'a>, @@ -640,6 +639,37 @@ fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool { } } +/// Checks if a `vis` nonterminal fragment is unnecessarily wrapped in an optional repetition. +/// +/// When a `vis` fragment (which can already be empty) is wrapped in `$(...)?`, +/// this suggests removing the redundant repetition syntax since it provides no additional benefit. +fn check_redundant_vis_repetition( + err: &mut Diag<'_>, + sess: &Session, + seq: &SequenceRepetition, + span: &DelimSpan, +) { + let is_zero_or_one: bool = seq.kleene.op == KleeneOp::ZeroOrOne; + let is_vis = seq.tts.first().map_or(false, |tt| { + matches!(tt, mbe::TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis))) + }); + + if is_vis && is_zero_or_one { + err.note("a `vis` fragment can already be empty"); + err.multipart_suggestion( + "remove the `$(` and `)?`", + vec![ + ( + sess.source_map().span_extend_to_prev_char_before(span.open, '$', true), + "".to_string(), + ), + (span.close.with_hi(seq.kleene.span.hi()), "".to_string()), + ], + Applicability::MaybeIncorrect, + ); + } +} + /// Checks that the lhs contains no repetition which could match an empty token /// tree, because then the matcher would hang indefinitely. fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> { @@ -654,8 +684,10 @@ fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), TokenTree::Sequence(span, seq) => { if is_empty_token_tree(sess, seq) { let sp = span.entire(); - let guar = sess.dcx().span_err(sp, "repetition matches empty token tree"); - return Err(guar); + let mut err = + sess.dcx().struct_span_err(sp, "repetition matches empty token tree"); + check_redundant_vis_repetition(&mut err, sess, seq, span); + return Err(err.emit()); } check_lhs_no_empty_seq(sess, &seq.tts)? } @@ -752,7 +784,7 @@ impl<'tt> FirstSets<'tt> { TokenTree::Delimited(span, _, delimited) => { build_recur(sets, &delimited.tts); first.replace_with(TtHandle::from_token_kind( - token::OpenDelim(delimited.delim), + delimited.delim.as_open_token_kind(), span.open, )); } @@ -778,7 +810,7 @@ impl<'tt> FirstSets<'tt> { // token could be the separator token itself. if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) { - first.add_one_maybe(TtHandle::from_token(sep.clone())); + first.add_one_maybe(TtHandle::from_token(*sep)); } // Reverse scan: Sequence comes before `first`. @@ -820,7 +852,7 @@ impl<'tt> FirstSets<'tt> { } TokenTree::Delimited(span, _, delimited) => { first.add_one(TtHandle::from_token_kind( - token::OpenDelim(delimited.delim), + delimited.delim.as_open_token_kind(), span.open, )); return first; @@ -841,7 +873,7 @@ impl<'tt> FirstSets<'tt> { // If the sequence contents can be empty, then the first // token could be the separator token itself. if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) { - first.add_one_maybe(TtHandle::from_token(sep.clone())); + first.add_one_maybe(TtHandle::from_token(*sep)); } assert!(first.maybe_empty); @@ -917,7 +949,7 @@ impl<'tt> Clone for TtHandle<'tt> { // This variant *must* contain a `mbe::TokenTree::Token`, and not // any other variant of `mbe::TokenTree`. TtHandle::Token(mbe::TokenTree::Token(tok)) => { - TtHandle::Token(mbe::TokenTree::Token(tok.clone())) + TtHandle::Token(mbe::TokenTree::Token(*tok)) } _ => unreachable!(), @@ -1067,7 +1099,7 @@ fn check_matcher_core<'tt>( } TokenTree::Delimited(span, _, d) => { let my_suffix = TokenSet::singleton(TtHandle::from_token_kind( - token::CloseDelim(d.delim), + d.delim.as_close_token_kind(), span.close, )); check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?; @@ -1093,7 +1125,7 @@ fn check_matcher_core<'tt>( let mut new; let my_suffix = if let Some(sep) = &seq_rep.separator { new = suffix_first.clone(); - new.add_one_maybe(TtHandle::from_token(sep.clone())); + new.add_one_maybe(TtHandle::from_token(*sep)); &new } else { &suffix_first @@ -1267,7 +1299,9 @@ enum IsInFollow { fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { use mbe::TokenTree; - if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok { + if let TokenTree::Token(Token { kind, .. }) = tok + && kind.close_delim().is_some() + { // closing a token tree can never be matched by any fragment; // iow, we always require that `(` and `)` match, etc. IsInFollow::Yes @@ -1326,16 +1360,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { ]; match tok { TokenTree::Token(token) => match token.kind { - OpenDelim(Delimiter::Brace) - | OpenDelim(Delimiter::Bracket) - | Comma - | FatArrow - | Colon - | Eq - | Gt - | Shr - | Semi - | Or => IsInFollow::Yes, + OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr + | Semi | Or => IsInFollow::Yes, Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => { IsInFollow::Yes } diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 0ea53627fe786..0c2362f23bcfc 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -181,7 +181,10 @@ fn parse_tree<'a>( if delim != Delimiter::Parenthesis { span_dollar_dollar_or_metavar_in_the_lhs_err( sess, - &Token { kind: token::OpenDelim(delim), span: delim_span.entire() }, + &Token { + kind: delim.as_open_token_kind(), + span: delim_span.entire(), + }, ); } } else { @@ -217,7 +220,8 @@ fn parse_tree<'a>( } Delimiter::Parenthesis => {} _ => { - let token = pprust::token_kind_to_string(&token::OpenDelim(delim)); + let token = + pprust::token_kind_to_string(&delim.as_open_token_kind()); sess.dcx().emit_err(errors::ExpectedParenOrBrace { span: delim_span.entire(), token, @@ -283,7 +287,7 @@ fn parse_tree<'a>( } // `tree` is an arbitrary token. Keep it. - tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()), + tokenstream::TokenTree::Token(token, _) => TokenTree::Token(*token), // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to // descend into the delimited set and further parse it. @@ -321,7 +325,7 @@ fn parse_kleene_op( match iter.next() { Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) { Some(op) => Ok(Ok((op, token.span))), - None => Ok(Err(token.clone())), + None => Ok(Err(*token)), }, tree => Err(tree.map_or(span, tokenstream::TokenTree::span)), } diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index cffa4af6ac3d0..2d3fd7702da5b 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -1,13 +1,10 @@ use std::mem; -use std::sync::Arc; -use rustc_ast::mut_visit::{self, MutVisitor}; use rustc_ast::token::{ - self, Delimiter, IdentIsRaw, InvisibleOrigin, Lit, LitKind, MetaVarKind, Nonterminal, Token, - TokenKind, + self, Delimiter, IdentIsRaw, InvisibleOrigin, Lit, LitKind, MetaVarKind, Token, TokenKind, }; use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree}; -use rustc_ast::{ExprKind, StmtKind, TyKind}; +use rustc_ast::{ExprKind, StmtKind, TyKind, UnOp}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize}; use rustc_parse::lexer::nfc_normalize; @@ -31,10 +28,8 @@ use crate::mbe::{self, KleeneOp, MetaVarExpr}; // A Marker adds the given mark to the syntax context. struct Marker(LocalExpnId, Transparency, FxHashMap); -impl MutVisitor for Marker { - const VISIT_TOKENS: bool = true; - - fn visit_span(&mut self, span: &mut Span) { +impl Marker { + fn mark_span(&mut self, span: &mut Span) { // `apply_mark` is a relatively expensive operation, both due to taking hygiene lock, and // by itself. All tokens in a macro body typically have the same syntactic context, unless // it's some advanced case with macro-generated macros. So if we cache the marked version @@ -166,7 +161,7 @@ pub(super) fn transcribe<'a>( if repeat_idx < repeat_len { frame.idx = 0; if let Some(sep) = sep { - result.push(TokenTree::Token(sep.clone(), Spacing::Alone)); + result.push(TokenTree::Token(*sep, Spacing::Alone)); } continue; } @@ -294,7 +289,7 @@ pub(super) fn transcribe<'a>( // Emit as a token stream within `Delimiter::Invisible` to maintain // parsing priorities. - marker.visit_span(&mut sp); + marker.mark_span(&mut sp); with_metavar_spans(|mspans| mspans.insert(mk_span, sp)); // Both the open delim and close delim get the same span, which covers the // `$foo` in the decl macro RHS. @@ -308,17 +303,19 @@ pub(super) fn transcribe<'a>( let tt = match cur_matched { MatchedSingle(ParseNtResult::Tt(tt)) => { // `tt`s are emitted into the output stream directly as "raw tokens", - // without wrapping them into groups. + // without wrapping them into groups. Other variables are emitted into + // the output stream as groups with `Delimiter::Invisible` to maintain + // parsing priorities. maybe_use_metavar_location(psess, &stack, sp, tt, &mut marker) } MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => { - marker.visit_span(&mut sp); + marker.mark_span(&mut sp); with_metavar_spans(|mspans| mspans.insert(ident.span, sp)); let kind = token::NtIdent(*ident, *is_raw); TokenTree::token_alone(kind, sp) } MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => { - marker.visit_span(&mut sp); + marker.mark_span(&mut sp); with_metavar_spans(|mspans| mspans.insert(ident.span, sp)); let kind = token::NtLifetime(*ident, *is_raw); TokenTree::token_alone(kind, sp) @@ -326,6 +323,11 @@ pub(super) fn transcribe<'a>( MatchedSingle(ParseNtResult::Item(item)) => { mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item)) } + MatchedSingle(ParseNtResult::Block(block)) => mk_delimited( + block.span, + MetaVarKind::Block, + TokenStream::from_ast(block), + ), MatchedSingle(ParseNtResult::Stmt(stmt)) => { let stream = if let StmtKind::Empty = stmt.kind { // FIXME: Properly collect tokens for empty statements. @@ -340,6 +342,30 @@ pub(super) fn transcribe<'a>( MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat), ), + MatchedSingle(ParseNtResult::Expr(expr, kind)) => { + let (can_begin_literal_maybe_minus, can_begin_string_literal) = + match &expr.kind { + ExprKind::Lit(_) => (true, true), + ExprKind::Unary(UnOp::Neg, e) + if matches!(&e.kind, ExprKind::Lit(_)) => + { + (true, false) + } + _ => (false, false), + }; + mk_delimited( + expr.span, + MetaVarKind::Expr { + kind: *kind, + can_begin_literal_maybe_minus, + can_begin_string_literal, + }, + TokenStream::from_ast(expr), + ) + } + MatchedSingle(ParseNtResult::Literal(lit)) => { + mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit)) + } MatchedSingle(ParseNtResult::Ty(ty)) => { let is_path = matches!(&ty.kind, TyKind::Path(None, _path)); mk_delimited( @@ -362,15 +388,6 @@ pub(super) fn transcribe<'a>( MatchedSingle(ParseNtResult::Vis(vis)) => { mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis)) } - MatchedSingle(ParseNtResult::Nt(nt)) => { - // Other variables are emitted into the output stream as groups with - // `Delimiter::Invisible` to maintain parsing priorities. - // `Interpolated` is currently used for such groups in rustc parser. - marker.visit_span(&mut sp); - let use_span = nt.use_span(); - with_metavar_spans(|mspans| mspans.insert(use_span, sp)); - TokenTree::token_alone(token::Interpolated(Arc::clone(nt)), sp) - } MatchedSeq(..) => { // We were unable to descend far enough. This is an error. return Err(dcx.create_err(VarStillRepeating { span: sp, ident })); @@ -380,8 +397,8 @@ pub(super) fn transcribe<'a>( } else { // If we aren't able to match the meta-var, we push it back into the result but // with modified syntax context. (I believe this supports nested macros). - marker.visit_span(&mut sp); - marker.visit_ident(&mut original_ident); + marker.mark_span(&mut sp); + marker.mark_span(&mut original_ident.span); result.push(TokenTree::token_joint_hidden(token::Dollar, sp)); result.push(TokenTree::Token( Token::from_ast_ident(original_ident), @@ -410,16 +427,19 @@ pub(super) fn transcribe<'a>( // jump back out of the Delimited, pop the result_stack and add the new results back to // the previous results (from outside the Delimited). &mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => { - mut_visit::visit_delim_span(&mut marker, &mut span); + marker.mark_span(&mut span.open); + marker.mark_span(&mut span.close); stack.push(Frame::new_delimited(delimited, span, *spacing)); result_stack.push(mem::take(&mut result)); } // Nothing much to do here. Just push the token to the result, being careful to // preserve syntax context. - mbe::TokenTree::Token(token) => { - let mut token = token.clone(); - mut_visit::visit_token(&mut marker, &mut token); + &mbe::TokenTree::Token(mut token) => { + marker.mark_span(&mut token.span); + if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind { + marker.mark_span(&mut ident.span); + } let tt = TokenTree::Token(token, Spacing::Alone); result.push(tt); } @@ -484,7 +504,7 @@ fn maybe_use_metavar_location( return orig_tt.clone(); } - marker.visit_span(&mut metavar_span); + marker.mark_span(&mut metavar_span); let no_collision = match orig_tt { TokenTree::Token(token, ..) => { with_metavar_spans(|mspans| mspans.insert(token.span, metavar_span)) @@ -754,7 +774,7 @@ fn transcribe_metavar_expr<'a>( ) -> PResult<'a, ()> { let mut visited_span = || { let mut span = sp.entire(); - marker.visit_span(&mut span); + marker.mark_span(&mut span); span }; match *expr { @@ -869,10 +889,8 @@ fn extract_symbol_from_pnr<'a>( }, _, )) => Ok(*symbol), - ParseNtResult::Nt(nt) - if let Nonterminal::NtLiteral(expr) = &**nt - && let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) = - &expr.kind => + ParseNtResult::Literal(expr) + if let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) = &expr.kind => { Ok(*symbol) } diff --git a/compiler/rustc_expand/src/placeholders.rs b/compiler/rustc_expand/src/placeholders.rs index a60a87244cc6e..0136292decbcf 100644 --- a/compiler/rustc_expand/src/placeholders.rs +++ b/compiler/rustc_expand/src/placeholders.rs @@ -26,7 +26,7 @@ pub(crate) fn placeholder( }) } - let ident = Ident::empty(); + let ident = Ident::dummy(); let attrs = ast::AttrVec::new(); let vis = vis.unwrap_or(ast::Visibility { span: DUMMY_SP, @@ -62,7 +62,6 @@ pub(crate) fn placeholder( AstFragmentKind::Items => AstFragment::Items(smallvec![P(ast::Item { id, span, - ident, vis, attrs, kind: ast::ItemKind::MacCall(mac_placeholder()), @@ -71,7 +70,6 @@ pub(crate) fn placeholder( AstFragmentKind::TraitItems => AstFragment::TraitItems(smallvec![P(ast::AssocItem { id, span, - ident, vis, attrs, kind: ast::AssocItemKind::MacCall(mac_placeholder()), @@ -80,7 +78,6 @@ pub(crate) fn placeholder( AstFragmentKind::ImplItems => AstFragment::ImplItems(smallvec![P(ast::AssocItem { id, span, - ident, vis, attrs, kind: ast::AssocItemKind::MacCall(mac_placeholder()), @@ -90,7 +87,6 @@ pub(crate) fn placeholder( AstFragment::TraitImplItems(smallvec![P(ast::AssocItem { id, span, - ident, vis, attrs, kind: ast::AssocItemKind::MacCall(mac_placeholder()), @@ -101,7 +97,6 @@ pub(crate) fn placeholder( AstFragment::ForeignItems(smallvec![P(ast::ForeignItem { id, span, - ident, vis, attrs, kind: ast::ForeignItemKind::MacCall(mac_placeholder()), diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index ee6306e396103..f00201ad202af 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -1,5 +1,4 @@ use std::ops::{Bound, Range}; -use std::sync::Arc; use ast::token::IdentIsRaw; use pm::bridge::{ @@ -18,7 +17,7 @@ use rustc_parse::parser::Parser; use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal}; use rustc_session::parse::ParseSess; use rustc_span::def_id::CrateNum; -use rustc_span::{BytePos, FileName, Pos, SourceFile, Span, Symbol, sym}; +use rustc_span::{BytePos, FileName, Pos, Span, Symbol, sym}; use smallvec::{SmallVec, smallvec}; use crate::base::ExtCtxt; @@ -309,17 +308,8 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec { - let stream = TokenStream::from_nonterminal_ast(&nt); - trees.push(TokenTree::Group(Group { - delimiter: pm::Delimiter::None, - stream: Some(stream), - span: DelimSpan::from_single(span), - })) - } - - OpenDelim(..) | CloseDelim(..) => unreachable!(), - Eof => unreachable!(), + OpenParen | CloseParen | OpenBrace | CloseBrace | OpenBracket | CloseBracket + | OpenInvisible(_) | CloseInvisible(_) | Eof => unreachable!(), } } trees @@ -467,7 +457,6 @@ impl<'a, 'b> Rustc<'a, 'b> { impl server::Types for Rustc<'_, '_> { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; - type SourceFile = Arc; type Span = Span; type Symbol = Symbol; } @@ -673,28 +662,6 @@ impl server::TokenStream for Rustc<'_, '_> { } } -impl server::SourceFile for Rustc<'_, '_> { - fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool { - Arc::ptr_eq(file1, file2) - } - - fn path(&mut self, file: &Self::SourceFile) -> String { - match &file.name { - FileName::Real(name) => name - .local_path() - .expect("attempting to get a file path in an imported file in `proc_macro::SourceFile::path`") - .to_str() - .expect("non-UTF8 file path in `proc_macro::SourceFile::path`") - .to_string(), - _ => file.name.prefer_local().to_string(), - } - } - - fn is_real(&mut self, file: &Self::SourceFile) -> bool { - file.is_real_file() - } -} - impl server::Span for Rustc<'_, '_> { fn debug(&mut self, span: Self::Span) -> String { if self.ecx.ecfg.span_debug { @@ -704,8 +671,29 @@ impl server::Span for Rustc<'_, '_> { } } - fn source_file(&mut self, span: Self::Span) -> Self::SourceFile { - self.psess().source_map().lookup_char_pos(span.lo()).file + fn file(&mut self, span: Self::Span) -> String { + self.psess() + .source_map() + .lookup_char_pos(span.lo()) + .file + .name + .prefer_remapped_unconditionaly() + .to_string() + } + + fn local_file(&mut self, span: Self::Span) -> Option { + self.psess() + .source_map() + .lookup_char_pos(span.lo()) + .file + .name + .clone() + .into_local_path() + .map(|p| { + p.to_str() + .expect("non-UTF8 file path in `proc_macro::SourceFile::path`") + .to_string() + }) } fn parent(&mut self, span: Self::Span) -> Option { diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs index 88e6593572bc6..e3e4eefe5e10a 100644 --- a/compiler/rustc_feature/src/accepted.rs +++ b/compiler/rustc_feature/src/accepted.rs @@ -63,7 +63,7 @@ declare_features! ( /// Allows using `const` operands in inline assembly. (accepted, asm_const, "1.82.0", Some(93332)), /// Allows using `label` operands in inline assembly. - (accepted, asm_goto, "CURRENT_RUSTC_VERSION", Some(119364)), + (accepted, asm_goto, "1.87.0", Some(119364)), /// Allows using `sym` operands in inline assembly. (accepted, asm_sym, "1.66.0", Some(93333)), /// Allows the definition of associated constants in `trait` or `impl` blocks. @@ -95,6 +95,8 @@ declare_features! ( (accepted, c_unwind, "1.81.0", Some(74990)), /// Allows `#[cfg_attr(predicate, multiple, attributes, here)]`. (accepted, cfg_attr_multi, "1.33.0", Some(54881)), + /// Allows the use of `#[cfg()]`. + (accepted, cfg_boolean_literals, "CURRENT_RUSTC_VERSION", Some(131204)), /// Allows the use of `#[cfg(doctest)]`, set when rustdoc is collecting doctests. (accepted, cfg_doctest, "1.40.0", Some(62210)), /// Enables `#[cfg(panic = "...")]` config key. @@ -298,6 +300,8 @@ declare_features! ( /// Allows patterns with concurrent by-move and by-ref bindings. /// For example, you can write `Foo(a, ref b)` where `a` is by-move and `b` is by-ref. (accepted, move_ref_pattern, "1.49.0", Some(68354)), + /// Allows using `#[naked]` on functions. + (accepted, naked_functions, "CURRENT_RUSTC_VERSION", Some(90957)), /// Allows specifying modifiers in the link attribute: `#[link(modifiers = "...")]` (accepted, native_link_modifiers, "1.61.0", Some(81490)), /// Allows specifying the bundle link modifier @@ -332,7 +336,7 @@ declare_features! ( /// Allows `use<'a, 'b, A, B>` in `impl Trait + use<...>` for precise capture of generic args. (accepted, precise_capturing, "1.82.0", Some(123432)), /// Allows `use<..>` precise capturign on impl Trait in traits. - (accepted, precise_capturing_in_traits, "CURRENT_RUSTC_VERSION", Some(130044)), + (accepted, precise_capturing_in_traits, "1.87.0", Some(130044)), /// Allows procedural macros in `proc-macro` crates. (accepted, proc_macro, "1.29.0", Some(38356)), /// Allows multi-segment paths in attributes and derives. diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 1e33e2e9393f7..a657022068ff4 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -6,6 +6,7 @@ use AttributeDuplicates::*; use AttributeGate::*; use AttributeType::*; use rustc_data_structures::fx::FxHashMap; +use rustc_span::edition::Edition; use rustc_span::{Symbol, sym}; use crate::{Features, Stability}; @@ -39,6 +40,26 @@ const GATED_CFGS: &[GatedCfg] = &[ // this is consistent with naming of the compiler flag it's for (sym::fmt_debug, sym::fmt_debug, Features::fmt_debug), (sym::emscripten_wasm_eh, sym::cfg_emscripten_wasm_eh, Features::cfg_emscripten_wasm_eh), + ( + sym::target_has_reliable_f16, + sym::cfg_target_has_reliable_f16_f128, + Features::cfg_target_has_reliable_f16_f128, + ), + ( + sym::target_has_reliable_f16_math, + sym::cfg_target_has_reliable_f16_f128, + Features::cfg_target_has_reliable_f16_f128, + ), + ( + sym::target_has_reliable_f128, + sym::cfg_target_has_reliable_f16_f128, + Features::cfg_target_has_reliable_f16_f128, + ), + ( + sym::target_has_reliable_f128_math, + sym::cfg_target_has_reliable_f16_f128, + Features::cfg_target_has_reliable_f16_f128, + ), ]; /// Find a gated cfg determined by the `pred`icate which is given the cfg's name. @@ -65,9 +86,12 @@ pub enum AttributeSafety { /// Normal attribute that does not need `#[unsafe(...)]` Normal, - /// Unsafe attribute that requires safety obligations - /// to be discharged - Unsafe, + /// Unsafe attribute that requires safety obligations to be discharged. + /// + /// An error is emitted when `#[unsafe(...)]` is omitted, except when the attribute's edition + /// is less than the one stored in `unsafe_since`. This handles attributes that were safe in + /// earlier editions, but become unsafe in later ones. + Unsafe { unsafe_since: Option }, } #[derive(Clone, Copy)] @@ -187,12 +211,23 @@ macro_rules! template { } macro_rules! ungated { + (unsafe($edition:ident) $attr:ident, $typ:expr, $tpl:expr, $duplicates:expr, $encode_cross_crate:expr $(,)?) => { + BuiltinAttribute { + name: sym::$attr, + encode_cross_crate: $encode_cross_crate, + type_: $typ, + safety: AttributeSafety::Unsafe { unsafe_since: Some(Edition::$edition) }, + template: $tpl, + gate: Ungated, + duplicates: $duplicates, + } + }; (unsafe $attr:ident, $typ:expr, $tpl:expr, $duplicates:expr, $encode_cross_crate:expr $(,)?) => { BuiltinAttribute { name: sym::$attr, encode_cross_crate: $encode_cross_crate, type_: $typ, - safety: AttributeSafety::Unsafe, + safety: AttributeSafety::Unsafe { unsafe_since: None }, template: $tpl, gate: Ungated, duplicates: $duplicates, @@ -217,7 +252,7 @@ macro_rules! gated { name: sym::$attr, encode_cross_crate: $encode_cross_crate, type_: $typ, - safety: AttributeSafety::Unsafe, + safety: AttributeSafety::Unsafe { unsafe_since: None }, template: $tpl, duplicates: $duplicates, gate: Gated(Stability::Unstable, sym::$gate, $msg, Features::$gate), @@ -228,7 +263,7 @@ macro_rules! gated { name: sym::$attr, encode_cross_crate: $encode_cross_crate, type_: $typ, - safety: AttributeSafety::Unsafe, + safety: AttributeSafety::Unsafe { unsafe_since: None }, template: $tpl, duplicates: $duplicates, gate: Gated(Stability::Unstable, sym::$attr, $msg, Features::$attr), @@ -423,11 +458,12 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ ), ungated!(no_link, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No), ungated!(repr, Normal, template!(List: "C"), DuplicatesOk, EncodeCrossCrate::No), - ungated!(unsafe export_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding, EncodeCrossCrate::No), - ungated!(unsafe link_section, Normal, template!(NameValueStr: "name"), FutureWarnPreceding, EncodeCrossCrate::No), - ungated!(unsafe no_mangle, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No), + ungated!(unsafe(Edition2024) export_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding, EncodeCrossCrate::No), + ungated!(unsafe(Edition2024) link_section, Normal, template!(NameValueStr: "name"), FutureWarnPreceding, EncodeCrossCrate::No), + ungated!(unsafe(Edition2024) no_mangle, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No), ungated!(used, Normal, template!(Word, List: "compiler|linker"), WarnFollowing, EncodeCrossCrate::No), ungated!(link_ordinal, Normal, template!(List: "ordinal"), ErrorPreceding, EncodeCrossCrate::Yes), + ungated!(unsafe naked, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No), // Limits: ungated!( @@ -458,7 +494,6 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ template!(NameValueStr: "windows|console"), FutureWarnFollowing, EncodeCrossCrate::No ), - ungated!(panic_handler, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::Yes), // RFC 2070 // Code generation: ungated!(inline, Normal, template!(Word, List: "always|never"), FutureWarnFollowing, EncodeCrossCrate::No), @@ -502,8 +537,8 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ // Linking: gated!( - naked, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No, - naked_functions, experimental!(naked) + export_stable, Normal, template!(Word), WarnFollowing, + EncodeCrossCrate::No, experimental!(export_stable) ), // Testing: @@ -661,14 +696,6 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ "`rustc_never_type_options` is used to experiment with never type fallback and work on \ never type stabilization, and will never be stable" ), - rustc_attr!( - rustc_macro_edition_2021, - Normal, - template!(Word), - ErrorFollowing, - EncodeCrossCrate::No, - "makes spans in this macro edition 2021" - ), // ========================================================================== // Internal attributes: Runtime related: @@ -752,7 +779,7 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ ), rustc_attr!( rustc_macro_transparency, Normal, - template!(NameValueStr: "transparent|semitransparent|opaque"), ErrorFollowing, + template!(NameValueStr: "transparent|semiopaque|opaque"), ErrorFollowing, EncodeCrossCrate::Yes, "used internally for testing macro hygiene", ), rustc_attr!( @@ -916,6 +943,10 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ EncodeCrossCrate::Yes, "#[rustc_never_returns_null_ptr] is used to mark functions returning non-null pointers." ), + rustc_attr!( + rustc_no_implicit_autorefs, AttributeType::Normal, template!(Word), ErrorFollowing, EncodeCrossCrate::Yes, + "`#[rustc_no_implicit_autorefs]` is used to mark functions for which an autoref to the dereference of a raw pointer should not be used as an argument." + ), rustc_attr!( rustc_coherence_is_core, AttributeType::CrateLevel, template!(Word), ErrorFollowing, EncodeCrossCrate::No, "#![rustc_coherence_is_core] allows inherent methods on builtin types, only intended to be used in `core`." @@ -1040,6 +1071,11 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_force_inline, Normal, template!(Word, NameValueStr: "reason"), WarnFollowing, EncodeCrossCrate::Yes, "#[rustc_force_inline] forces a free function to be inlined" ), + gated!( + // Used in resolve: + eii_mangle_extern, Normal, template!(Word), ErrorFollowing, + EncodeCrossCrate::Yes, eii_internals, "`#[eii_mangle_extern]` is for use by rustc only", + ), // ========================================================================== // Internal attributes, Testing: diff --git a/compiler/rustc_feature/src/removed.rs b/compiler/rustc_feature/src/removed.rs index 47e4f9a2fe885..402e18c5d1482 100644 --- a/compiler/rustc_feature/src/removed.rs +++ b/compiler/rustc_feature/src/removed.rs @@ -142,6 +142,9 @@ declare_features! ( /// Allows inferring `'static` outlives requirements (RFC 2093). (removed, infer_static_outlives_requirements, "1.63.0", Some(54185), Some("removed as it caused some confusion and discussion was inactive for years")), + /// Allow anonymous constants from an inline `const` block in pattern position + (removed, inline_const_pat, "CURRENT_RUSTC_VERSION", Some(76001), + Some("removed due to implementation concerns as it requires significant refactorings")), /// Lazily evaluate constants. This allows constants to depend on type parameters. (removed, lazy_normalization_consts, "1.46.0", Some(72219), Some("superseded by `generic_const_exprs`")), /// Changes `impl Trait` to capture all lifetimes in scope. @@ -244,7 +247,7 @@ declare_features! ( /// Allows unnamed fields of struct and union type (removed, unnamed_fields, "1.83.0", Some(49804), Some("feature needs redesign")), (removed, unsafe_no_drop_flag, "1.0.0", None, None), - (removed, unsized_tuple_coercion, "CURRENT_RUSTC_VERSION", Some(42877), + (removed, unsized_tuple_coercion, "1.87.0", Some(42877), Some("The feature restricts possible layouts for tuples, and this restriction is not worth it.")), /// Allows `union` fields that don't implement `Copy` as long as they don't have any drop glue. (removed, untagged_unions, "1.13.0", Some(55149), diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 72468dd4714d7..84755fd5047af 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -205,13 +205,17 @@ declare_features! ( (unstable, anonymous_lifetime_in_impl_trait, "1.63.0", None), /// Allows access to the emscripten_wasm_eh config, used by panic_unwind and unwind (internal, cfg_emscripten_wasm_eh, "1.86.0", None), + /// Allows checking whether or not the backend correctly supports unstable float types. + (internal, cfg_target_has_reliable_f16_f128, "CURRENT_RUSTC_VERSION", None), /// Allows identifying the `compiler_builtins` crate. (internal, compiler_builtins, "1.13.0", None), /// Allows writing custom MIR (internal, custom_mir, "1.65.0", None), + /// Implementation details of externally implementatble items + (unstable, eii_internals, "CURRENT_RUSTC_VERSION", None), /// Outputs useful `assert!` messages (unstable, generic_assert, "1.63.0", None), - /// Allows using the `rust-intrinsic`'s "ABI". + /// Allows using the #[rustc_intrinsic] attribute. (internal, intrinsics, "1.0.0", None), /// Allows using `#[lang = ".."]` attribute for linking items to special compiler logic. (internal, lang_items, "1.0.0", None), @@ -314,6 +318,7 @@ declare_features! ( // Unstable `#[target_feature]` directives. (unstable, aarch64_unstable_target_feature, "1.82.0", Some(44839)), (unstable, aarch64_ver_target_feature, "1.27.0", Some(44839)), + (unstable, apx_target_feature, "CURRENT_RUSTC_VERSION", Some(139284)), (unstable, arm_target_feature, "1.27.0", Some(44839)), (unstable, avx512_target_feature, "1.27.0", Some(44839)), (unstable, bpf_target_feature, "1.54.0", Some(44839)), @@ -324,6 +329,7 @@ declare_features! ( (unstable, loongarch_target_feature, "1.73.0", Some(44839)), (unstable, m68k_target_feature, "1.85.0", Some(134328)), (unstable, mips_target_feature, "1.27.0", Some(44839)), + (unstable, movrs_target_feature, "CURRENT_RUSTC_VERSION", Some(137976)), (unstable, powerpc_target_feature, "1.27.0", Some(44839)), (unstable, prfchw_target_feature, "1.78.0", Some(44839)), (unstable, riscv_target_feature, "1.45.0", Some(44839)), @@ -380,6 +386,8 @@ declare_features! ( (unstable, associated_const_equality, "1.58.0", Some(92827)), /// Allows associated type defaults. (unstable, associated_type_defaults, "1.2.0", Some(29661)), + /// Allows implementing `AsyncDrop`. + (incomplete, async_drop, "CURRENT_RUSTC_VERSION", Some(126482)), /// Allows async functions to be called from `dyn Trait`. (incomplete, async_fn_in_dyn_trait, "1.85.0", Some(133119)), /// Allows `#[track_caller]` on async functions. @@ -388,10 +396,10 @@ declare_features! ( (unstable, async_for_loop, "1.77.0", Some(118898)), /// Allows `async` trait bound modifier. (unstable, async_trait_bounds, "1.85.0", Some(62290)), + /// Allows using Intel AVX10 target features and intrinsics + (unstable, avx10_target_feature, "CURRENT_RUSTC_VERSION", Some(138843)), /// Allows using C-variadics. (unstable, c_variadic, "1.34.0", Some(44930)), - /// Allows the use of `#[cfg()]`. - (unstable, cfg_boolean_literals, "1.83.0", Some(131204)), /// Allows the use of `#[cfg(contract_checks)` to check if contract checks are enabled. (unstable, cfg_contract_checks, "1.86.0", Some(128044)), /// Allows the use of `#[cfg(overflow_checks)` to check if integer overflow behaviour. @@ -472,12 +480,18 @@ declare_features! ( (unstable, doc_masked, "1.21.0", Some(44027)), /// Allows `dyn* Trait` objects. (incomplete, dyn_star, "1.65.0", Some(102425)), + /// Externally implementatble items + (unstable, eii, "CURRENT_RUSTC_VERSION", Some(125418)), /// Allows the .use postfix syntax `x.use` and use closures `use |x| { ... }` - (incomplete, ergonomic_clones, "CURRENT_RUSTC_VERSION", Some(132290)), + (incomplete, ergonomic_clones, "1.87.0", Some(132290)), /// Allows exhaustive pattern matching on types that contain uninhabited types. (unstable, exhaustive_patterns, "1.13.0", Some(51085)), + /// Disallows `extern` without an explicit ABI. + (unstable, explicit_extern_abis, "CURRENT_RUSTC_VERSION", Some(134986)), /// Allows explicit tail calls via `become` expression. (incomplete, explicit_tail_calls, "1.72.0", Some(112788)), + /// Allows using `#[export_stable]` which indicates that an item is exportable. + (incomplete, export_stable, "CURRENT_RUSTC_VERSION", Some(139939)), /// Allows using `aapcs`, `efiapi`, `sysv64` and `win64` as calling conventions /// for functions with varargs. (unstable, extended_varargs_abi_support, "1.65.0", Some(100189)), @@ -501,6 +515,8 @@ declare_features! ( (incomplete, fn_delegation, "1.76.0", Some(118212)), /// Allows impls for the Freeze trait. (internal, freeze_impls, "1.78.0", Some(121675)), + /// Frontmatter `---` blocks for use by external tools. + (unstable, frontmatter, "CURRENT_RUSTC_VERSION", Some(136889)), /// Allows defining gen blocks and `gen fn`. (unstable, gen_blocks, "1.75.0", Some(117078)), /// Infer generic args for both consts and types. @@ -510,7 +526,7 @@ declare_features! ( /// Allows generic parameters and where-clauses on free & associated const items. (incomplete, generic_const_items, "1.73.0", Some(113521)), /// Allows the type of const generics to depend on generic parameters - (incomplete, generic_const_parameter_types, "CURRENT_RUSTC_VERSION", Some(137626)), + (incomplete, generic_const_parameter_types, "1.87.0", Some(137626)), /// Allows any generic constants being used as pattern type range ends (incomplete, generic_pattern_types, "1.86.0", Some(136574)), /// Allows registering static items globally, possibly across crates, to iterate over at runtime. @@ -531,8 +547,6 @@ declare_features! ( (unstable, import_trait_associated_functions, "1.86.0", Some(134691)), /// Allows associated types in inherent impls. (incomplete, inherent_associated_types, "1.52.0", Some(8995)), - /// Allow anonymous constants from an inline `const` block in pattern position - (unstable, inline_const_pat, "1.58.0", Some(76001)), /// Allows using `pointer` and `reference` in intra-doc links (unstable, intra_doc_pointers, "1.51.0", Some(80896)), // Allows using the `kl` and `widekl` target features and the associated intrinsics @@ -564,8 +578,8 @@ declare_features! ( (unstable, must_not_suspend, "1.57.0", Some(83310)), /// Allows `mut ref` and `mut ref mut` identifier patterns. (incomplete, mut_ref, "1.79.0", Some(123076)), - /// Allows using `#[naked]` on functions. - (unstable, naked_functions, "1.9.0", Some(90957)), + /// Allows using `#[naked]` on `extern "Rust"` functions. + (unstable, naked_functions_rustic_abi, "CURRENT_RUSTC_VERSION", Some(138997)), /// Allows using `#[target_feature(enable = "...")]` on `#[naked]` on functions. (unstable, naked_functions_target_feature, "1.86.0", Some(138568)), /// Allows specifying the as-needed link modifier @@ -603,7 +617,7 @@ declare_features! ( /// Allows macro attributes on expressions, statements and non-inline modules. (unstable, proc_macro_hygiene, "1.30.0", Some(54727)), /// Allows the use of raw-dylibs on ELF platforms - (incomplete, raw_dylib_elf, "CURRENT_RUSTC_VERSION", Some(135694)), + (incomplete, raw_dylib_elf, "1.87.0", Some(135694)), /// Makes `&` and `&mut` patterns eat only one layer of references in Rust 2024. (incomplete, ref_pat_eat_one_layer_2024, "1.79.0", Some(123076)), /// Makes `&` and `&mut` patterns eat only one layer of references in Rust 2024—structural variant @@ -630,6 +644,8 @@ declare_features! ( (unstable, strict_provenance_lints, "1.61.0", Some(130351)), /// Allows string patterns to dereference values to match them. (unstable, string_deref_patterns, "1.67.0", Some(87121)), + /// Allows `super let` statements. + (unstable, super_let, "CURRENT_RUSTC_VERSION", Some(139076)), /// Allows subtrait items to shadow supertrait items. (unstable, supertrait_item_shadowing, "1.86.0", Some(89151)), /// Allows using `#[thread_local]` on `static` items. @@ -663,14 +679,14 @@ declare_features! ( /// Allows using the `#[used(linker)]` (or `#[used(compiler)]`) attribute. (unstable, used_with_arg, "1.60.0", Some(93798)), /// Allows use of attributes in `where` clauses. - (unstable, where_clause_attrs, "CURRENT_RUSTC_VERSION", Some(115590)), + (unstable, where_clause_attrs, "1.87.0", Some(115590)), /// Allows use of x86 `AMX` target-feature attributes and intrinsics (unstable, x86_amx_intrinsics, "1.81.0", Some(126622)), /// Allows use of the `xop` target-feature (unstable, xop_target_feature, "1.81.0", Some(127208)), /// Allows `do yeet` expressions (unstable, yeet_expr, "1.62.0", Some(96373)), - (unstable, yield_expr, "CURRENT_RUSTC_VERSION", Some(43122)), + (unstable, yield_expr, "1.87.0", Some(43122)), // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! // Features are listed in alphabetical order. Tidy will fail if you don't keep it this way. // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! diff --git a/compiler/rustc_fluent_macro/src/fluent.rs b/compiler/rustc_fluent_macro/src/fluent.rs index b04fd1b48f7ad..c96bb48a0368c 100644 --- a/compiler/rustc_fluent_macro/src/fluent.rs +++ b/compiler/rustc_fluent_macro/src/fluent.rs @@ -25,7 +25,10 @@ fn invocation_relative_path_to_absolute(span: Span, path: &str) -> PathBuf { path.to_path_buf() } else { // `/a/b/c/foo/bar.rs` contains the current macro invocation + #[cfg(bootstrap)] let mut source_file_path = span.source_file().path(); + #[cfg(not(bootstrap))] + let mut source_file_path = span.local_file().unwrap(); // `/a/b/c/foo/` source_file_path.pop(); // `/a/b/c/foo/../locales/en-US/example.ftl` diff --git a/compiler/rustc_fs_util/Cargo.toml b/compiler/rustc_fs_util/Cargo.toml index baca3bc7d49eb..90a6acade8b03 100644 --- a/compiler/rustc_fs_util/Cargo.toml +++ b/compiler/rustc_fs_util/Cargo.toml @@ -5,4 +5,5 @@ edition = "2024" [dependencies] # tidy-alphabetical-start +tempfile = "3.7.1" # tidy-alphabetical-end diff --git a/compiler/rustc_fs_util/src/lib.rs b/compiler/rustc_fs_util/src/lib.rs index 0df1b243d697e..7a883a13b72da 100644 --- a/compiler/rustc_fs_util/src/lib.rs +++ b/compiler/rustc_fs_util/src/lib.rs @@ -1,6 +1,8 @@ -use std::ffi::CString; +use std::ffi::{CString, OsStr}; use std::path::{Path, PathBuf, absolute}; -use std::{fs, io}; +use std::{env, fs, io}; + +use tempfile::TempDir; // Unfortunately, on windows, it looks like msvcrt.dll is silently translating // verbatim paths under the hood to non-verbatim paths! This manifests itself as @@ -102,3 +104,43 @@ pub fn path_to_c_string(p: &Path) -> CString { pub fn try_canonicalize>(path: P) -> io::Result { fs::canonicalize(&path).or_else(|_| absolute(&path)) } + +pub struct TempDirBuilder<'a, 'b> { + builder: tempfile::Builder<'a, 'b>, +} + +impl<'a, 'b> TempDirBuilder<'a, 'b> { + pub fn new() -> Self { + Self { builder: tempfile::Builder::new() } + } + + pub fn prefix + ?Sized>(&mut self, prefix: &'a S) -> &mut Self { + self.builder.prefix(prefix); + self + } + + pub fn suffix + ?Sized>(&mut self, suffix: &'b S) -> &mut Self { + self.builder.suffix(suffix); + self + } + + pub fn tempdir_in>(&self, dir: P) -> io::Result { + let dir = dir.as_ref(); + // On Windows in CI, we had been getting fairly frequent "Access is denied" + // errors when creating temporary directories. + // So this implements a simple retry with backoff loop. + #[cfg(windows)] + for wait in 1..11 { + match self.builder.tempdir_in(dir) { + Err(e) if e.kind() == io::ErrorKind::PermissionDenied => {} + t => return t, + } + std::thread::sleep(std::time::Duration::from_millis(1 << wait)); + } + self.builder.tempdir_in(dir) + } + + pub fn tempdir(&self) -> io::Result { + self.tempdir_in(env::temp_dir()) + } +} diff --git a/compiler/rustc_hir/src/def.rs b/compiler/rustc_hir/src/def.rs index 5f8941d4754e6..77a522814f882 100644 --- a/compiler/rustc_hir/src/def.rs +++ b/compiler/rustc_hir/src/def.rs @@ -267,15 +267,12 @@ impl DefKind { | DefKind::ForeignTy | DefKind::TraitAlias | DefKind::TyParam - | DefKind::ExternCrate => DefPathData::TypeNs(Some(name.unwrap())), + | DefKind::ExternCrate => DefPathData::TypeNs(name.unwrap()), - // An associated type names will be missing for an RPITIT. It will - // later be given a name with `synthetic` in it, if necessary. - DefKind::AssocTy => DefPathData::TypeNs(name), + // An associated type name will be missing for an RPITIT (DefPathData::AnonAssocTy), + // but those provide their own DefPathData. + DefKind::AssocTy => DefPathData::TypeNs(name.unwrap()), - // It's not exactly an anon const, but wrt DefPathData, there - // is no difference. - DefKind::Static { nested: true, .. } => DefPathData::AnonConst, DefKind::Fn | DefKind::Const | DefKind::ConstParam @@ -294,7 +291,7 @@ impl DefKind { DefKind::GlobalAsm => DefPathData::GlobalAsm, DefKind::Impl { .. } => DefPathData::Impl, DefKind::Closure => DefPathData::Closure, - DefKind::SyntheticCoroutineBody => DefPathData::Closure, + DefKind::SyntheticCoroutineBody => DefPathData::SyntheticCoroutineBody, } } @@ -451,7 +448,7 @@ pub enum Res { // FIXME(generic_const_exprs): Remove this bodge once that feature is stable. forbid_generic: bool, - /// Is this within an `impl Foo for bar`? + /// Is this within an `impl Foo for bar`?: is_trait_impl: bool, }, diff --git a/compiler/rustc_hir/src/definitions.rs b/compiler/rustc_hir/src/definitions.rs index 61f5efd9978c3..f93b9e5af5345 100644 --- a/compiler/rustc_hir/src/definitions.rs +++ b/compiler/rustc_hir/src/definitions.rs @@ -47,12 +47,9 @@ impl DefPathTable { debug_assert_eq!(self.stable_crate_id, def_path_hash.stable_crate_id()); let local_hash = def_path_hash.local_hash(); - let index = { - let index = DefIndex::from(self.index_to_key.len()); - debug!("DefPathTable::insert() - {:?} <-> {:?}", key, index); - self.index_to_key.push(key); - index - }; + let index = self.index_to_key.push(key); + debug!("DefPathTable::insert() - {key:?} <-> {index:?}"); + self.def_path_hashes.push(local_hash); debug_assert!(self.def_path_hashes.len() == self.index_to_key.len()); @@ -71,7 +68,7 @@ impl DefPathTable { // // See the documentation for DefPathHash for more information. panic!( - "found DefPathHash collision between {def_path1:?} and {def_path2:?}. \ + "found DefPathHash collision between {def_path1:#?} and {def_path2:#?}. \ Compilation cannot continue." ); } @@ -100,13 +97,31 @@ impl DefPathTable { } } +#[derive(Debug)] +pub struct DisambiguatorState { + next: UnordMap<(LocalDefId, DefPathData), u32>, +} + +impl DisambiguatorState { + pub fn new() -> Self { + Self { next: Default::default() } + } + + /// Creates a `DisambiguatorState` where the next allocated `(LocalDefId, DefPathData)` pair + /// will have `index` as the disambiguator. + pub fn with(def_id: LocalDefId, data: DefPathData, index: u32) -> Self { + let mut this = Self::new(); + this.next.insert((def_id, data), index); + this + } +} + /// The definition table containing node definitions. /// It holds the `DefPathTable` for `LocalDefId`s/`DefPath`s. /// It also stores mappings to convert `LocalDefId`s to/from `HirId`s. #[derive(Debug)] pub struct Definitions { table: DefPathTable, - next_disambiguator: UnordMap<(LocalDefId, DefPathData), u32>, } /// A unique identifier that we can use to lookup a definition @@ -130,7 +145,7 @@ impl DefKey { let DisambiguatedDefPathData { ref data, disambiguator } = self.disambiguated_data; std::mem::discriminant(data).hash(&mut hasher); - if let Some(name) = data.get_opt_name() { + if let Some(name) = data.hashed_symbol() { // Get a stable hash by considering the symbol chars rather than // the symbol index. name.as_str().hash(&mut hasher); @@ -176,7 +191,11 @@ impl DisambiguatedDefPathData { } } DefPathDataName::Anon { namespace } => { - write!(writer, "{{{}#{}}}", namespace, self.disambiguator) + if let DefPathData::AnonAssocTy(method) = self.data { + write!(writer, "{}::{{{}#{}}}", method, namespace, self.disambiguator) + } else { + write!(writer, "{{{}#{}}}", namespace, self.disambiguator) + } } } } @@ -271,9 +290,8 @@ pub enum DefPathData { Use, /// A global asm item. GlobalAsm, - /// Something in the type namespace. Will be empty for RPITIT associated - /// types, which are given a synthetic name later, if necessary. - TypeNs(Option), + /// Something in the type namespace. + TypeNs(Symbol), /// Something in the value namespace. ValueNs(Symbol), /// Something in the macro namespace. @@ -291,6 +309,15 @@ pub enum DefPathData { /// An existential `impl Trait` type node. /// Argument position `impl Trait` have a `TypeNs` with their pretty-printed name. OpaqueTy, + /// Used for remapped captured lifetimes in an existential `impl Trait` type node. + OpaqueLifetime(Symbol), + /// An anonymous associated type from an RPITIT. The symbol refers to the name of the method + /// that defined the type. + AnonAssocTy(Symbol), + /// A synthetic body for a coroutine's by-move body. + SyntheticCoroutineBody, + /// Additional static data referred to by a static. + NestedStatic, } impl Definitions { @@ -342,11 +369,20 @@ impl Definitions { let root = LocalDefId { local_def_index: table.allocate(key, def_path_hash) }; assert_eq!(root.local_def_index, CRATE_DEF_INDEX); - Definitions { table, next_disambiguator: Default::default() } + Definitions { table } } - /// Adds a definition with a parent definition. - pub fn create_def(&mut self, parent: LocalDefId, data: DefPathData) -> LocalDefId { + /// Creates a definition with a parent definition. + /// If there are multiple definitions with the same DefPathData and the same parent, use + /// `disambiguator` to differentiate them. Distinct `DisambiguatorState` instances are not + /// guaranteed to generate unique disambiguators and should instead ensure that the `parent` + /// and `data` pair is distinct from other instances. + pub fn create_def( + &mut self, + parent: LocalDefId, + data: DefPathData, + disambiguator: &mut DisambiguatorState, + ) -> LocalDefId { // We can't use `Debug` implementation for `LocalDefId` here, since it tries to acquire a // reference to `Definitions` and we're already holding a mutable reference. debug!( @@ -354,12 +390,12 @@ impl Definitions { self.def_path(parent).to_string_no_crate_verbose(), ); - // The root node must be created with `create_root_def()`. + // The root node must be created in `new()`. assert!(data != DefPathData::CrateRoot); // Find the next free disambiguator for this key. let disambiguator = { - let next_disamb = self.next_disambiguator.entry((parent, data)).or_insert(0); + let next_disamb = disambiguator.next.entry((parent, data)).or_insert(0); let disambiguator = *next_disamb; *next_disamb = next_disamb.checked_add(1).expect("disambiguator overflow"); disambiguator @@ -411,26 +447,49 @@ impl DefPathData { pub fn get_opt_name(&self) -> Option { use self::DefPathData::*; match *self { - TypeNs(name) => name, - - ValueNs(name) | MacroNs(name) | LifetimeNs(name) => Some(name), + TypeNs(name) | ValueNs(name) | MacroNs(name) | LifetimeNs(name) + | OpaqueLifetime(name) => Some(name), + + Impl + | ForeignMod + | CrateRoot + | Use + | GlobalAsm + | Closure + | Ctor + | AnonConst + | OpaqueTy + | AnonAssocTy(..) + | SyntheticCoroutineBody + | NestedStatic => None, + } + } - Impl | ForeignMod | CrateRoot | Use | GlobalAsm | Closure | Ctor | AnonConst - | OpaqueTy => None, + fn hashed_symbol(&self) -> Option { + use self::DefPathData::*; + match *self { + TypeNs(name) | ValueNs(name) | MacroNs(name) | LifetimeNs(name) | AnonAssocTy(name) + | OpaqueLifetime(name) => Some(name), + + Impl + | ForeignMod + | CrateRoot + | Use + | GlobalAsm + | Closure + | Ctor + | AnonConst + | OpaqueTy + | SyntheticCoroutineBody + | NestedStatic => None, } } pub fn name(&self) -> DefPathDataName { use self::DefPathData::*; match *self { - TypeNs(name) => { - if let Some(name) = name { - DefPathDataName::Named(name) - } else { - DefPathDataName::Anon { namespace: sym::synthetic } - } - } - ValueNs(name) | MacroNs(name) | LifetimeNs(name) => DefPathDataName::Named(name), + TypeNs(name) | ValueNs(name) | MacroNs(name) | LifetimeNs(name) + | OpaqueLifetime(name) => DefPathDataName::Named(name), // Note that this does not show up in user print-outs. CrateRoot => DefPathDataName::Anon { namespace: kw::Crate }, Impl => DefPathDataName::Anon { namespace: kw::Impl }, @@ -441,6 +500,9 @@ impl DefPathData { Ctor => DefPathDataName::Anon { namespace: sym::constructor }, AnonConst => DefPathDataName::Anon { namespace: sym::constant }, OpaqueTy => DefPathDataName::Anon { namespace: sym::opaque }, + AnonAssocTy(..) => DefPathDataName::Anon { namespace: sym::anon_assoc }, + SyntheticCoroutineBody => DefPathDataName::Anon { namespace: sym::synthetic }, + NestedStatic => DefPathDataName::Anon { namespace: sym::nested }, } } } diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index e3e96894ed1f7..2f7620a1ffc62 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -10,9 +10,9 @@ use rustc_ast::{ LitKind, TraitObjectSyntax, UintTy, UnsafeBinderCastKind, }; pub use rustc_ast::{ - AttrId, AttrStyle, BinOp, BinOpKind, BindingMode, BorrowKind, BoundConstness, BoundPolarity, - ByRef, CaptureBy, DelimArgs, ImplPolarity, IsAuto, MetaItemInner, MetaItemLit, Movability, - Mutability, UnOp, + AssignOp, AssignOpKind, AttrId, AttrStyle, BinOp, BinOpKind, BindingMode, BorrowKind, + BoundConstness, BoundPolarity, ByRef, CaptureBy, DelimArgs, ImplPolarity, IsAuto, + MetaItemInner, MetaItemLit, Movability, Mutability, UnOp, }; use rustc_attr_data_structures::AttributeKind; use rustc_data_structures::fingerprint::Fingerprint; @@ -36,43 +36,116 @@ pub(crate) use crate::hir_id::{HirId, ItemLocalId, ItemLocalMap, OwnerId}; use crate::intravisit::{FnKind, VisitorExt}; #[derive(Debug, Copy, Clone, PartialEq, Eq, HashStable_Generic)] -pub enum IsAnonInPath { - No, - Yes, +pub enum AngleBrackets { + /// E.g. `Path`. + Missing, + /// E.g. `Path<>`. + Empty, + /// E.g. `Path`. + Full, } -/// A lifetime. The valid field combinations are non-obvious. The following -/// example shows some of them. See also the comments on `LifetimeName`. +#[derive(Debug, Copy, Clone, PartialEq, Eq, HashStable_Generic)] +pub enum LifetimeSource { + /// E.g. `&Type`, `&'_ Type`, `&'a Type`, `&mut Type`, `&'_ mut Type`, `&'a mut Type` + Reference, + + /// E.g. `ContainsLifetime`, `ContainsLifetime<>`, `ContainsLifetime<'_>`, + /// `ContainsLifetime<'a>` + Path { angle_brackets: AngleBrackets }, + + /// E.g. `impl Trait + '_`, `impl Trait + 'a` + OutlivesBound, + + /// E.g. `impl Trait + use<'_>`, `impl Trait + use<'a>` + PreciseCapturing, + + /// Other usages which have not yet been categorized. Feel free to + /// add new sources that you find useful. + /// + /// Some non-exhaustive examples: + /// - `where T: 'a` + /// - `fn(_: dyn Trait + 'a)` + Other, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, HashStable_Generic)] +pub enum LifetimeSyntax { + /// E.g. `&Type`, `ContainsLifetime` + Hidden, + + /// E.g. `&'_ Type`, `ContainsLifetime<'_>`, `impl Trait + '_`, `impl Trait + use<'_>` + Anonymous, + + /// E.g. `&'a Type`, `ContainsLifetime<'a>`, `impl Trait + 'a`, `impl Trait + use<'a>` + Named, +} + +impl From for LifetimeSyntax { + fn from(ident: Ident) -> Self { + let name = ident.name; + + if name == kw::Empty { + unreachable!("A lifetime name should never be empty"); + } else if name == kw::UnderscoreLifetime { + LifetimeSyntax::Anonymous + } else { + debug_assert!(name.as_str().starts_with('\'')); + LifetimeSyntax::Named + } + } +} + +/// A lifetime. The valid field combinations are non-obvious and not all +/// combinations are possible. The following example shows some of +/// them. See also the comments on `LifetimeKind` and `LifetimeSource`. +/// /// ``` /// #[repr(C)] -/// struct S<'a>(&'a u32); // res=Param, name='a, IsAnonInPath::No +/// struct S<'a>(&'a u32); // res=Param, name='a, source=Reference, syntax=Named /// unsafe extern "C" { -/// fn f1(s: S); // res=Param, name='_, IsAnonInPath::Yes -/// fn f2(s: S<'_>); // res=Param, name='_, IsAnonInPath::No -/// fn f3<'a>(s: S<'a>); // res=Param, name='a, IsAnonInPath::No +/// fn f1(s: S); // res=Param, name='_, source=Path, syntax=Hidden +/// fn f2(s: S<'_>); // res=Param, name='_, source=Path, syntax=Anonymous +/// fn f3<'a>(s: S<'a>); // res=Param, name='a, source=Path, syntax=Named /// } /// -/// struct St<'a> { x: &'a u32 } // res=Param, name='a, IsAnonInPath::No +/// struct St<'a> { x: &'a u32 } // res=Param, name='a, source=Reference, syntax=Named /// fn f() { -/// _ = St { x: &0 }; // res=Infer, name='_, IsAnonInPath::Yes -/// _ = St::<'_> { x: &0 }; // res=Infer, name='_, IsAnonInPath::No +/// _ = St { x: &0 }; // res=Infer, name='_, source=Path, syntax=Hidden +/// _ = St::<'_> { x: &0 }; // res=Infer, name='_, source=Path, syntax=Anonymous /// } /// -/// struct Name<'a>(&'a str); // res=Param, name='a, IsAnonInPath::No -/// const A: Name = Name("a"); // res=Static, name='_, IsAnonInPath::Yes -/// const B: &str = ""; // res=Static, name='_, IsAnonInPath::No -/// static C: &'_ str = ""; // res=Static, name='_, IsAnonInPath::No -/// static D: &'static str = ""; // res=Static, name='static, IsAnonInPath::No +/// struct Name<'a>(&'a str); // res=Param, name='a, source=Reference, syntax=Named +/// const A: Name = Name("a"); // res=Static, name='_, source=Path, syntax=Hidden +/// const B: &str = ""; // res=Static, name='_, source=Reference, syntax=Hidden +/// static C: &'_ str = ""; // res=Static, name='_, source=Reference, syntax=Anonymous +/// static D: &'static str = ""; // res=Static, name='static, source=Reference, syntax=Named /// /// trait Tr {} -/// fn tr(_: Box) {} // res=ImplicitObjectLifetimeDefault, name='_, IsAnonInPath::No +/// fn tr(_: Box) {} // res=ImplicitObjectLifetimeDefault, name='_, source=Other, syntax=Hidden +/// +/// fn capture_outlives<'a>() -> +/// impl FnOnce() + 'a // res=Param, ident='a, source=OutlivesBound, syntax=Named +/// { +/// || {} +/// } +/// +/// fn capture_precise<'a>() -> +/// impl FnOnce() + use<'a> // res=Param, ident='a, source=PreciseCapturing, syntax=Named +/// { +/// || {} +/// } /// /// // (commented out because these cases trigger errors) -/// // struct S1<'a>(&'a str); // res=Param, name='a, IsAnonInPath::No -/// // struct S2(S1); // res=Error, name='_, IsAnonInPath::Yes -/// // struct S3(S1<'_>); // res=Error, name='_, IsAnonInPath::No -/// // struct S4(S1<'a>); // res=Error, name='a, IsAnonInPath::No +/// // struct S1<'a>(&'a str); // res=Param, name='a, source=Reference, syntax=Named +/// // struct S2(S1); // res=Error, name='_, source=Path, syntax=Hidden +/// // struct S3(S1<'_>); // res=Error, name='_, source=Path, syntax=Anonymous +/// // struct S4(S1<'a>); // res=Error, name='a, source=Path, syntax=Named /// ``` +/// +/// Some combinations that cannot occur are `LifetimeSyntax::Hidden` with +/// `LifetimeSource::OutlivesBound` or `LifetimeSource::PreciseCapturing` +/// — there's no way to "elide" these lifetimes. #[derive(Debug, Copy, Clone, HashStable_Generic)] pub struct Lifetime { #[stable_hasher(ignore)] @@ -84,11 +157,15 @@ pub struct Lifetime { pub ident: Ident, /// Semantics of this lifetime. - pub res: LifetimeName, + pub kind: LifetimeKind, + + /// The context in which the lifetime occurred. See `Lifetime::suggestion` + /// for example use. + pub source: LifetimeSource, - /// Is the lifetime anonymous and in a path? Used only for error - /// suggestions. See `Lifetime::suggestion` for example use. - pub is_anon_in_path: IsAnonInPath, + /// The syntax that the user used to declare this lifetime. See + /// `Lifetime::suggestion` for example use. + pub syntax: LifetimeSyntax, } #[derive(Debug, Copy, Clone, HashStable_Generic)] @@ -130,7 +207,7 @@ impl ParamName { } #[derive(Debug, Copy, Clone, PartialEq, Eq, HashStable_Generic)] -pub enum LifetimeName { +pub enum LifetimeKind { /// User-given names or fresh (synthetic) names. Param(LocalDefId), @@ -160,16 +237,16 @@ pub enum LifetimeName { Static, } -impl LifetimeName { +impl LifetimeKind { fn is_elided(&self) -> bool { match self { - LifetimeName::ImplicitObjectLifetimeDefault | LifetimeName::Infer => true, + LifetimeKind::ImplicitObjectLifetimeDefault | LifetimeKind::Infer => true, // It might seem surprising that `Fresh` counts as not *elided* // -- but this is because, as far as the code in the compiler is // concerned -- `Fresh` variants act equivalently to "some fresh name". // They correspond to early-bound regions on an impl, in other words. - LifetimeName::Error | LifetimeName::Param(..) | LifetimeName::Static => false, + LifetimeKind::Error | LifetimeKind::Param(..) | LifetimeKind::Static => false, } } } @@ -184,10 +261,11 @@ impl Lifetime { pub fn new( hir_id: HirId, ident: Ident, - res: LifetimeName, - is_anon_in_path: IsAnonInPath, + kind: LifetimeKind, + source: LifetimeSource, + syntax: LifetimeSyntax, ) -> Lifetime { - let lifetime = Lifetime { hir_id, ident, res, is_anon_in_path }; + let lifetime = Lifetime { hir_id, ident, kind, source, syntax }; // Sanity check: elided lifetimes form a strict subset of anonymous lifetimes. #[cfg(debug_assertions)] @@ -202,30 +280,56 @@ impl Lifetime { } pub fn is_elided(&self) -> bool { - self.res.is_elided() + self.kind.is_elided() } pub fn is_anonymous(&self) -> bool { self.ident.name == kw::UnderscoreLifetime } + pub fn is_syntactically_hidden(&self) -> bool { + matches!(self.syntax, LifetimeSyntax::Hidden) + } + + pub fn is_syntactically_anonymous(&self) -> bool { + matches!(self.syntax, LifetimeSyntax::Anonymous) + } + + pub fn is_static(&self) -> bool { + self.kind == LifetimeKind::Static + } + pub fn suggestion(&self, new_lifetime: &str) -> (Span, String) { + use LifetimeSource::*; + use LifetimeSyntax::*; + debug_assert!(new_lifetime.starts_with('\'')); - match (self.is_anon_in_path, self.ident.span.is_empty()) { + match (self.syntax, self.source) { + // The user wrote `'a` or `'_`. + (Named | Anonymous, _) => (self.ident.span, format!("{new_lifetime}")), + // The user wrote `Path`, and omitted the `'_,`. - (IsAnonInPath::Yes, true) => (self.ident.span, format!("{new_lifetime}, ")), + (Hidden, Path { angle_brackets: AngleBrackets::Full }) => { + (self.ident.span, format!("{new_lifetime}, ")) + } + + // The user wrote `Path<>`, and omitted the `'_`.. + (Hidden, Path { angle_brackets: AngleBrackets::Empty }) => { + (self.ident.span, format!("{new_lifetime}")) + } // The user wrote `Path` and omitted the `<'_>`. - (IsAnonInPath::Yes, false) => { + (Hidden, Path { angle_brackets: AngleBrackets::Missing }) => { (self.ident.span.shrink_to_hi(), format!("<{new_lifetime}>")) } // The user wrote `&type` or `&mut type`. - (IsAnonInPath::No, true) => (self.ident.span, format!("{new_lifetime} ")), + (Hidden, Reference) => (self.ident.span, format!("{new_lifetime} ")), - // The user wrote `'a` or `'_`. - (IsAnonInPath::No, false) => (self.ident.span, format!("{new_lifetime}")), + (Hidden, source) => { + unreachable!("can't suggest for a hidden lifetime of {source:?}") + } } } } @@ -1014,7 +1118,7 @@ pub struct WhereRegionPredicate<'hir> { impl<'hir> WhereRegionPredicate<'hir> { /// Returns `true` if `param_def_id` matches the `lifetime` of this predicate. fn is_param_bound(&self, param_def_id: LocalDefId) -> bool { - self.lifetime.res == LifetimeName::Param(param_def_id) + self.lifetime.kind == LifetimeKind::Param(param_def_id) } } @@ -1237,7 +1341,7 @@ impl AttributeExt for Attribute { Attribute::Parsed(AttributeKind::DocComment { kind, comment, .. }) => { Some((*comment, *kind)) } - Attribute::Unparsed(_) if self.name_or_empty() == sym::doc => { + Attribute::Unparsed(_) if self.has_name(sym::doc) => { self.value_str().map(|s| (s, CommentKind::Line)) } _ => None, @@ -1262,8 +1366,8 @@ impl Attribute { } #[inline] - pub fn name_or_empty(&self) -> Symbol { - AttributeExt::name_or_empty(self) + pub fn name(&self) -> Option { + AttributeExt::name(self) } #[inline] @@ -1301,6 +1405,11 @@ impl Attribute { AttributeExt::has_name(self, name) } + #[inline] + pub fn has_any_name(&self, names: &[Symbol]) -> bool { + AttributeExt::has_any_name(self, names) + } + #[inline] pub fn span(&self) -> Span { AttributeExt::span(self) @@ -1555,6 +1664,7 @@ impl<'hir> Pat<'hir> { use PatKind::*; match self.kind { + Missing => unreachable!(), Wild | Never | Expr(_) | Range(..) | Binding(.., None) | Err(_) => true, Box(s) | Deref(s) | Ref(s, _) | Binding(.., Some(s)) | Guard(s, _) => s.walk_short_(it), Struct(_, fields, _) => fields.iter().all(|field| field.pat.walk_short_(it)), @@ -1582,7 +1692,7 @@ impl<'hir> Pat<'hir> { use PatKind::*; match self.kind { - Wild | Never | Expr(_) | Range(..) | Binding(.., None) | Err(_) => {} + Missing | Wild | Never | Expr(_) | Range(..) | Binding(.., None) | Err(_) => {} Box(s) | Deref(s) | Ref(s, _) | Binding(.., Some(s)) | Guard(s, _) => s.walk_(it), Struct(_, fields, _) => fields.iter().for_each(|field| field.pat.walk_(it)), TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().for_each(|p| p.walk_(it)), @@ -1714,12 +1824,18 @@ pub enum TyPatKind<'hir> { /// A range pattern (e.g., `1..=2` or `1..2`). Range(&'hir ConstArg<'hir>, &'hir ConstArg<'hir>), + /// A list of patterns where only one needs to be satisfied + Or(&'hir [TyPat<'hir>]), + /// A placeholder for a pattern that wasn't well formed in some way. Err(ErrorGuaranteed), } #[derive(Debug, Clone, Copy, HashStable_Generic)] pub enum PatKind<'hir> { + /// A missing pattern, e.g. for an anonymous param in a bare fn like `fn f(u32)`. + Missing, + /// Represents a wildcard pattern (i.e., `_`). Wild, @@ -1752,7 +1868,7 @@ pub enum PatKind<'hir> { Never, /// A tuple pattern (e.g., `(a, b)`). - /// If the `..` pattern fragment is present, then `Option` denotes its position. + /// If the `..` pattern fragment is present, then `DotDotPos` denotes its position. /// `0 <= position <= subpats.len()` Tuple(&'hir [Pat<'hir>], DotDotPos), @@ -1817,6 +1933,8 @@ pub enum StmtKind<'hir> { /// Represents a `let` statement (i.e., `let : = ;`). #[derive(Debug, Clone, Copy, HashStable_Generic)] pub struct LetStmt<'hir> { + /// Span of `super` in `super let`. + pub super_: Option, pub pat: &'hir Pat<'hir>, /// Type annotation, if any (otherwise the type will be inferred). pub ty: Option<&'hir Ty<'hir>>, @@ -2123,7 +2241,7 @@ pub type Lit = Spanned; /// explicit discriminant values for enum variants. /// /// You can check if this anon const is a default in a const param -/// `const N: usize = { ... }` with `tcx.hir().opt_const_param_default_param_def_id(..)` +/// `const N: usize = { ... }` with `tcx.hir_opt_const_param_default_param_def_id(..)` #[derive(Copy, Clone, Debug, HashStable_Generic)] pub struct AnonConst { #[stable_hasher(ignore)] @@ -2623,6 +2741,9 @@ pub enum ExprKind<'hir> { /// An `if` block, with an optional else block. /// /// I.e., `if { } else { }`. + /// + /// The "then" expr is always `ExprKind::Block`. If present, the "else" expr is always + /// `ExprKind::Block` (for `else`) or `ExprKind::If` (for `else if`). If(&'hir Expr<'hir>, &'hir Expr<'hir>, Option<&'hir Expr<'hir>>), /// A conditionless loop (can be exited with `break`, `continue`, or `return`). /// @@ -2648,7 +2769,7 @@ pub enum ExprKind<'hir> { /// An assignment with an operator. /// /// E.g., `a += 1`. - AssignOp(BinOp, &'hir Expr<'hir>, &'hir Expr<'hir>), + AssignOp(AssignOp, &'hir Expr<'hir>, &'hir Expr<'hir>), /// Access of a named (e.g., `obj.foo`) or unnamed (e.g., `obj.0`) struct or tuple field. Field(&'hir Expr<'hir>, Ident), /// An indexing operation (`foo[2]`). @@ -3393,9 +3514,9 @@ pub struct BareFnTy<'hir> { pub abi: ExternAbi, pub generic_params: &'hir [GenericParam<'hir>], pub decl: &'hir FnDecl<'hir>, - // `Option` because bare fn parameter names are optional. We also end up + // `Option` because bare fn parameter identifiers are optional. We also end up // with `None` in some error cases, e.g. invalid parameter patterns. - pub param_names: &'hir [Option], + pub param_idents: &'hir [Option], } #[derive(Debug, Clone, Copy, HashStable_Generic)] @@ -3992,8 +4113,7 @@ impl<'hir> Item<'hir> { expect_fn, (Ident, &FnSig<'hir>, &'hir Generics<'hir>, BodyId), ItemKind::Fn { ident, sig, generics, body, .. }, (*ident, sig, generics, *body); - expect_macro, (Ident, &ast::MacroDef, MacroKind), - ItemKind::Macro(ident, def, mk), (*ident, def, *mk); + expect_macro, (Ident, &ast::MacroDef, MacroKind), ItemKind::Macro(ident, def, mk), (*ident, def, *mk); expect_mod, (Ident, &'hir Mod<'hir>), ItemKind::Mod(ident, m), (*ident, m); @@ -4850,7 +4970,7 @@ mod size_asserts { static_assert_size!(ImplItemKind<'_>, 40); static_assert_size!(Item<'_>, 88); static_assert_size!(ItemKind<'_>, 64); - static_assert_size!(LetStmt<'_>, 64); + static_assert_size!(LetStmt<'_>, 72); static_assert_size!(Param<'_>, 32); static_assert_size!(Pat<'_>, 72); static_assert_size!(Path<'_>, 40); diff --git a/compiler/rustc_hir/src/hir/tests.rs b/compiler/rustc_hir/src/hir/tests.rs index 62ef02d2f500c..8684adee29c99 100644 --- a/compiler/rustc_hir/src/hir/tests.rs +++ b/compiler/rustc_hir/src/hir/tests.rs @@ -50,20 +50,14 @@ fn trait_object_roundtrips() { } fn trait_object_roundtrips_impl(syntax: TraitObjectSyntax) { - let unambig = TyKind::TraitObject::<'_, ()>( - &[], - TaggedRef::new( - &const { - Lifetime { - hir_id: HirId::INVALID, - ident: Ident::new(sym::name, DUMMY_SP), - res: LifetimeName::Static, - is_anon_in_path: IsAnonInPath::No, - } - }, - syntax, - ), - ); + let lt = Lifetime { + hir_id: HirId::INVALID, + ident: Ident::new(sym::name, DUMMY_SP), + kind: LifetimeKind::Static, + source: LifetimeSource::Other, + syntax: LifetimeSyntax::Hidden, + }; + let unambig = TyKind::TraitObject::<'_, ()>(&[], TaggedRef::new(<, syntax)); let unambig_to_ambig = unsafe { std::mem::transmute::<_, TyKind<'_, AmbigArg>>(unambig) }; match unambig_to_ambig { diff --git a/compiler/rustc_hir/src/hir_id.rs b/compiler/rustc_hir/src/hir_id.rs index 3fa06620ea8d1..b48a081d3714d 100644 --- a/compiler/rustc_hir/src/hir_id.rs +++ b/compiler/rustc_hir/src/hir_id.rs @@ -83,6 +83,12 @@ pub struct HirId { pub local_id: ItemLocalId, } +// To ensure correctness of incremental compilation, +// `HirId` must not implement `Ord` or `PartialOrd`. +// See https://github.com/rust-lang/rust/issues/90317. +impl !Ord for HirId {} +impl !PartialOrd for HirId {} + impl Debug for HirId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // Example: HirId(DefId(0:1 ~ aa[7697]::{use#0}).10) @@ -116,10 +122,6 @@ impl HirId { pub fn make_owner(owner: LocalDefId) -> Self { Self { owner: OwnerId { def_id: owner }, local_id: ItemLocalId::ZERO } } - - pub fn index(self) -> (usize, usize) { - (rustc_index::Idx::index(self.owner.def_id), rustc_index::Idx::index(self.local_id)) - } } impl fmt::Display for HirId { @@ -128,18 +130,6 @@ impl fmt::Display for HirId { } } -impl Ord for HirId { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - (self.index()).cmp(&(other.index())) - } -} - -impl PartialOrd for HirId { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - rustc_data_structures::define_stable_id_collections!(HirIdMap, HirIdSet, HirIdMapEntry, HirId); rustc_data_structures::define_id_collections!( ItemLocalMap, diff --git a/compiler/rustc_hir/src/intravisit.rs b/compiler/rustc_hir/src/intravisit.rs index 506358341b501..6a34728c8366a 100644 --- a/compiler/rustc_hir/src/intravisit.rs +++ b/compiler/rustc_hir/src/intravisit.rs @@ -6,7 +6,7 @@ //! 1. **Shallow visit**: Get a simple callback for every item (or item-like thing) in the HIR. //! - Example: find all items with a `#[foo]` attribute on them. //! - How: Use the `hir_crate_items` or `hir_module_items` query to traverse over item-like ids -//! (ItemId, TraitItemId, etc.) and use tcx.def_kind and `tcx.hir().item*(id)` to filter and +//! (ItemId, TraitItemId, etc.) and use tcx.def_kind and `tcx.hir_item*(id)` to filter and //! access actual item-like thing, respectively. //! - Pro: Efficient; just walks the lists of item ids and gives users control whether to access //! the hir_owners themselves or not. @@ -566,8 +566,8 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item<'v>) -> V:: item.owner_id.def_id, )); } - ItemKind::Macro(ident, _def, _kind) => { - try_visit!(visitor.visit_ident(ident)); + ItemKind::Macro(name, _def, _kind) => { + try_visit!(visitor.visit_ident(name)); } ItemKind::Mod(ident, ref module) => { try_visit!(visitor.visit_ident(ident)); @@ -652,10 +652,10 @@ pub fn walk_foreign_item<'v, V: Visitor<'v>>( try_visit!(visitor.visit_ident(foreign_item.ident)); match foreign_item.kind { - ForeignItemKind::Fn(ref sig, param_names, ref generics) => { + ForeignItemKind::Fn(ref sig, param_idents, ref generics) => { try_visit!(visitor.visit_generics(generics)); try_visit!(visitor.visit_fn_decl(sig.decl)); - for ident in param_names.iter().copied() { + for ident in param_idents.iter().copied() { visit_opt!(visitor, visit_ident, ident); } } @@ -710,6 +710,7 @@ pub fn walk_ty_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v TyPat<'v>) try_visit!(visitor.visit_const_arg_unambig(lower_bound)); try_visit!(visitor.visit_const_arg_unambig(upper_bound)); } + TyPatKind::Or(patterns) => walk_list!(visitor, visit_pattern_type_pattern, patterns), TyPatKind::Err(_) => (), } V::Result::output() @@ -744,7 +745,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat<'v>) -> V: visit_opt!(visitor, visit_pat_expr, lower_bound); visit_opt!(visitor, visit_pat_expr, upper_bound); } - PatKind::Never | PatKind::Wild | PatKind::Err(_) => (), + PatKind::Missing | PatKind::Never | PatKind::Wild | PatKind::Err(_) => (), PatKind::Slice(prepatterns, ref slice_pattern, postpatterns) => { walk_list!(visitor, visit_pat, prepatterns); visit_opt!(visitor, visit_pat, slice_pattern); @@ -1169,9 +1170,9 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>( try_visit!(visitor.visit_ty_unambig(ty)); visit_opt!(visitor, visit_nested_body, default); } - TraitItemKind::Fn(ref sig, TraitFn::Required(param_names)) => { + TraitItemKind::Fn(ref sig, TraitFn::Required(param_idents)) => { try_visit!(visitor.visit_fn_decl(sig.decl)); - for ident in param_names.iter().copied() { + for ident in param_idents.iter().copied() { visit_opt!(visitor, visit_ident, ident); } } diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs index e625514e9ffa1..6db9fb6543408 100644 --- a/compiler/rustc_hir/src/lang_items.rs +++ b/compiler/rustc_hir/src/lang_items.rs @@ -151,12 +151,10 @@ impl HashStable for LangItem { } /// Extracts the first `lang = "$name"` out of a list of attributes. -/// The `#[panic_handler]` attribute is also extracted out when found. pub fn extract(attrs: &[impl AttributeExt]) -> Option<(Symbol, Span)> { attrs.iter().find_map(|attr| { Some(match attr { _ if attr.has_name(sym::lang) => (attr.value_str()?, attr.span()), - _ if attr.has_name(sym::panic_handler) => (sym::panic_impl, attr.span()), _ => return None, }) }) @@ -182,25 +180,15 @@ language_item_table! { DynMetadata, sym::dyn_metadata, dyn_metadata, Target::Struct, GenericRequirement::None; Freeze, sym::freeze, freeze_trait, Target::Trait, GenericRequirement::Exact(0); + UnsafeUnpin, sym::unsafe_unpin, unsafe_unpin_trait, Target::Trait, GenericRequirement::Exact(0); FnPtrTrait, sym::fn_ptr_trait, fn_ptr_trait, Target::Trait, GenericRequirement::Exact(0); FnPtrAddr, sym::fn_ptr_addr, fn_ptr_addr, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; Drop, sym::drop, drop_trait, Target::Trait, GenericRequirement::None; Destruct, sym::destruct, destruct_trait, Target::Trait, GenericRequirement::None; - - AsyncDrop, sym::async_drop, async_drop_trait, Target::Trait, GenericRequirement::Exact(0); - AsyncDestruct, sym::async_destruct, async_destruct_trait, Target::Trait, GenericRequirement::Exact(0); + AsyncDrop, sym::async_drop, async_drop_trait, Target::Trait, GenericRequirement::None; AsyncDropInPlace, sym::async_drop_in_place, async_drop_in_place_fn, Target::Fn, GenericRequirement::Exact(1); - SurfaceAsyncDropInPlace, sym::surface_async_drop_in_place, surface_async_drop_in_place_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropSurfaceDropInPlace, sym::async_drop_surface_drop_in_place, async_drop_surface_drop_in_place_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropSlice, sym::async_drop_slice, async_drop_slice_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropChain, sym::async_drop_chain, async_drop_chain_fn, Target::Fn, GenericRequirement::Exact(2); - AsyncDropNoop, sym::async_drop_noop, async_drop_noop_fn, Target::Fn, GenericRequirement::Exact(0); - AsyncDropDeferredDropInPlace, sym::async_drop_deferred_drop_in_place, async_drop_deferred_drop_in_place_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropFuse, sym::async_drop_fuse, async_drop_fuse_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropDefer, sym::async_drop_defer, async_drop_defer_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropEither, sym::async_drop_either, async_drop_either_fn, Target::Fn, GenericRequirement::Exact(3); CoerceUnsized, sym::coerce_unsized, coerce_unsized_trait, Target::Trait, GenericRequirement::Minimum(1); DispatchFromDyn, sym::dispatch_from_dyn, dispatch_from_dyn_trait, Target::Trait, GenericRequirement::Minimum(1); @@ -235,6 +223,8 @@ language_item_table! { IndexMut, sym::index_mut, index_mut_trait, Target::Trait, GenericRequirement::Exact(1); UnsafeCell, sym::unsafe_cell, unsafe_cell_type, Target::Struct, GenericRequirement::None; + UnsafePinned, sym::unsafe_pinned, unsafe_pinned_type, Target::Struct, GenericRequirement::None; + VaList, sym::va_list, va_list, Target::Struct, GenericRequirement::None; Deref, sym::deref, deref_trait, Target::Trait, GenericRequirement::Exact(0); @@ -295,6 +285,10 @@ language_item_table! { PanicMisalignedPointerDereference, sym::panic_misaligned_pointer_dereference, panic_misaligned_pointer_dereference_fn, Target::Fn, GenericRequirement::Exact(0); PanicInfo, sym::panic_info, panic_info, Target::Struct, GenericRequirement::None; PanicLocation, sym::panic_location, panic_location, Target::Struct, GenericRequirement::None; + /// Note: used to mark an extern item but now marks an externally implementable item. This means + /// that the PanicImpl used to be marked to be specially treated in the compiler, while it now + /// is only marked so we can check if it exists. There's no other reason for this lang item + /// anymore. PanicImpl, sym::panic_impl, panic_impl, Target::Fn, GenericRequirement::None; PanicCannotUnwind, sym::panic_cannot_unwind, panic_cannot_unwind, Target::Fn, GenericRequirement::Exact(0); PanicInCleanup, sym::panic_in_cleanup, panic_in_cleanup, Target::Fn, GenericRequirement::Exact(0); @@ -318,6 +312,10 @@ language_item_table! { PanicAsyncGenFnResumedPanic, sym::panic_const_async_gen_fn_resumed_panic, panic_const_async_gen_fn_resumed_panic, Target::Fn, GenericRequirement::None; PanicGenFnNonePanic, sym::panic_const_gen_fn_none_panic, panic_const_gen_fn_none_panic, Target::Fn, GenericRequirement::None; PanicNullPointerDereference, sym::panic_null_pointer_dereference, panic_null_pointer_dereference, Target::Fn, GenericRequirement::None; + PanicCoroutineResumedDrop, sym::panic_const_coroutine_resumed_drop, panic_const_coroutine_resumed_drop, Target::Fn, GenericRequirement::None; + PanicAsyncFnResumedDrop, sym::panic_const_async_fn_resumed_drop, panic_const_async_fn_resumed_drop, Target::Fn, GenericRequirement::None; + PanicAsyncGenFnResumedDrop, sym::panic_const_async_gen_fn_resumed_drop, panic_const_async_gen_fn_resumed_drop, Target::Fn, GenericRequirement::None; + PanicGenFnNoneDrop, sym::panic_const_gen_fn_none_drop, panic_const_gen_fn_none_drop, Target::Fn, GenericRequirement::None; /// libstd panic entry point. Necessary for const eval to be able to catch it BeginPanic, sym::begin_panic, begin_panic_fn, Target::Fn, GenericRequirement::None; @@ -330,7 +328,6 @@ language_item_table! { ExchangeMalloc, sym::exchange_malloc, exchange_malloc_fn, Target::Fn, GenericRequirement::None; DropInPlace, sym::drop_in_place, drop_in_place_fn, Target::Fn, GenericRequirement::Minimum(1); - FallbackSurfaceDrop, sym::fallback_surface_drop, fallback_surface_drop_fn, Target::Fn, GenericRequirement::None; AllocLayout, sym::alloc_layout, alloc_layout, Target::Struct, GenericRequirement::None; /// For all binary crates without `#![no_main]`, Rust will generate a "main" function. @@ -433,6 +430,14 @@ language_item_table! { // Experimental lang items for implementing contract pre- and post-condition checking. ContractBuildCheckEnsures, sym::contract_build_check_ensures, contract_build_check_ensures_fn, Target::Fn, GenericRequirement::None; ContractCheckRequires, sym::contract_check_requires, contract_check_requires_fn, Target::Fn, GenericRequirement::None; + + // Experimental lang items for `MCP: Low level components for async drop`(https://github.com/rust-lang/compiler-team/issues/727) + DefaultTrait4, sym::default_trait4, default_trait4_trait, Target::Trait, GenericRequirement::None; + DefaultTrait3, sym::default_trait3, default_trait3_trait, Target::Trait, GenericRequirement::None; + DefaultTrait2, sym::default_trait2, default_trait2_trait, Target::Trait, GenericRequirement::None; + DefaultTrait1, sym::default_trait1, default_trait1_trait, Target::Trait, GenericRequirement::None; + + ContractCheckEnsures, sym::contract_check_ensures, contract_check_ensures_fn, Target::Fn, GenericRequirement::None; } /// The requirement imposed on the generics of a lang item diff --git a/compiler/rustc_hir/src/lib.rs b/compiler/rustc_hir/src/lib.rs index 4a839d4057181..5533920aee4ab 100644 --- a/compiler/rustc_hir/src/lib.rs +++ b/compiler/rustc_hir/src/lib.rs @@ -4,16 +4,17 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![feature(associated_type_defaults)] #![feature(box_patterns)] #![feature(closure_track_caller)] #![feature(debug_closure_helpers)] #![feature(exhaustive_patterns)] -#![feature(let_chains)] +#![feature(negative_impls)] #![feature(never_type)] #![feature(rustc_attrs)] #![feature(variant_count)] +#![recursion_limit = "256"] // tidy-alphabetical-end extern crate self as rustc_hir; diff --git a/compiler/rustc_hir/src/target.rs b/compiler/rustc_hir/src/target.rs index 601898023fc39..7e428b97b8491 100644 --- a/compiler/rustc_hir/src/target.rs +++ b/compiler/rustc_hir/src/target.rs @@ -109,7 +109,7 @@ impl Target { ItemKind::Static { .. } => Target::Static, ItemKind::Const(..) => Target::Const, ItemKind::Fn { .. } => Target::Fn, - ItemKind::Macro(..) => Target::MacroDef, + ItemKind::Macro { .. } => Target::MacroDef, ItemKind::Mod(..) => Target::Mod, ItemKind::ForeignMod { .. } => Target::ForeignMod, ItemKind::GlobalAsm { .. } => Target::GlobalAsm, diff --git a/compiler/rustc_hir/src/tests.rs b/compiler/rustc_hir/src/tests.rs index 0837444ffdbe5..18c2bfdac8ce1 100644 --- a/compiler/rustc_hir/src/tests.rs +++ b/compiler/rustc_hir/src/tests.rs @@ -17,7 +17,7 @@ fn def_path_hash_depends_on_crate_id() { // the crate by changing the crate disambiguator (e.g. via bumping the // crate's version number). - create_session_globals_then(Edition::Edition2024, None, || { + create_session_globals_then(Edition::Edition2024, &[], None, || { let id0 = StableCrateId::new(Symbol::intern("foo"), false, vec!["1".to_string()], ""); let id1 = StableCrateId::new(Symbol::intern("foo"), false, vec!["2".to_string()], ""); diff --git a/compiler/rustc_hir/src/weak_lang_items.rs b/compiler/rustc_hir/src/weak_lang_items.rs index b4e548effd46d..66458500a5caa 100644 --- a/compiler/rustc_hir/src/weak_lang_items.rs +++ b/compiler/rustc_hir/src/weak_lang_items.rs @@ -24,7 +24,6 @@ macro_rules! weak_lang_items { } weak_lang_items! { - PanicImpl, rust_begin_unwind; EhPersonality, rust_eh_personality; EhCatchTypeinfo, rust_eh_catch_typeinfo; } diff --git a/compiler/rustc_hir_analysis/Cargo.toml b/compiler/rustc_hir_analysis/Cargo.toml index e5017794d8f29..58213c4f4e462 100644 --- a/compiler/rustc_hir_analysis/Cargo.toml +++ b/compiler/rustc_hir_analysis/Cargo.toml @@ -26,7 +26,6 @@ rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } -rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } tracing = "0.1" diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl index 194f2cd04e468..24443c5099440 100644 --- a/compiler/rustc_hir_analysis/messages.ftl +++ b/compiler/rustc_hir_analysis/messages.ftl @@ -1,5 +1,5 @@ -hir_analysis_ambiguous_assoc_item = ambiguous associated {$assoc_kind} `{$assoc_name}` in bounds of `{$qself}` - .label = ambiguous associated {$assoc_kind} `{$assoc_name}` +hir_analysis_ambiguous_assoc_item = ambiguous associated {$assoc_kind} `{$assoc_ident}` in bounds of `{$qself}` + .label = ambiguous associated {$assoc_kind} `{$assoc_ident}` hir_analysis_ambiguous_lifetime_bound = ambiguous lifetime bound, explicit lifetime bound required @@ -12,13 +12,13 @@ hir_analysis_assoc_item_is_private = {$kind} `{$name}` is private .label = private {$kind} .defined_here_label = the {$kind} is defined here -hir_analysis_assoc_item_not_found = associated {$assoc_kind} `{$assoc_name}` not found for `{$qself}` +hir_analysis_assoc_item_not_found = associated {$assoc_kind} `{$assoc_ident}` not found for `{$qself}` hir_analysis_assoc_item_not_found_found_in_other_trait_label = there is {$identically_named -> [true] an *[false] a similarly named } associated {$assoc_kind} `{$suggested_name}` in the trait `{$trait_name}` -hir_analysis_assoc_item_not_found_label = associated {$assoc_kind} `{$assoc_name}` not found +hir_analysis_assoc_item_not_found_label = associated {$assoc_kind} `{$assoc_ident}` not found hir_analysis_assoc_item_not_found_other_sugg = `{$qself}` has the following associated {$assoc_kind} hir_analysis_assoc_item_not_found_similar_in_other_trait_qpath_sugg = consider fully qualifying{$identically_named -> @@ -159,6 +159,10 @@ hir_analysis_drop_impl_reservation = reservation `Drop` impls are not supported hir_analysis_duplicate_precise_capture = cannot capture parameter `{$name}` twice .label = parameter captured again here +hir_analysis_eii_with_generics = + #[{$eii_name}] cannot have generic parameters other than lifetimes + .label = required by this attribute + hir_analysis_empty_specialization = specialization impl does not specialize any associated items .note = impl is a specialization of this impl @@ -294,6 +298,13 @@ hir_analysis_lifetime_not_captured = `impl Trait` captures lifetime parameter, b .label = lifetime captured due to being mentioned in the bounds of the `impl Trait` .param_label = this lifetime parameter is captured +hir_analysis_lifetimes_or_bounds_mismatch_on_eii = + lifetime parameters or bounds of `{$ident}` do not match the declaration + .label = lifetimes do not match + .generics_label = lifetimes in impl do not match this signature + .where_label = this `where` clause might not match the one in the declaration + .bounds_label = this bound might be missing in the implementation + hir_analysis_lifetimes_or_bounds_mismatch_on_trait = lifetime parameters or bounds on {$item_kind} `{$ident}` do not match the trait declaration .label = lifetimes do not match {$item_kind} in trait @@ -450,9 +461,6 @@ hir_analysis_recursive_generic_parameter = {$param_def_kind} `{$param_name}` is hir_analysis_redundant_lifetime_args = unnecessary lifetime parameter `{$victim}` .note = you can use the `{$candidate}` lifetime directly, in place of `{$victim}` -hir_analysis_register_type_unstable = - type `{$ty}` cannot be used with this register class in stable - hir_analysis_requires_note = the `{$trait_name}` impl for `{$ty}` requires that `{$error_predicate}` hir_analysis_return_type_notation_equality_bound = @@ -486,6 +494,9 @@ hir_analysis_self_in_impl_self = `Self` is not valid in the self type of an impl block .note = replace `Self` with a different type +hir_analysis_self_in_type_alias = `Self` is not allowed in type aliases + .label = `Self` is only available in impls, traits, and concrete type definitions + hir_analysis_self_ty_not_captured = `impl Trait` must mention the `Self` type of the trait in `use<...>` .label = `Self` type parameter is implicitly captured by this `impl Trait` .note = currently, all type parameters are required to be mentioned in the precise captures list diff --git a/compiler/rustc_hir_analysis/src/autoderef.rs b/compiler/rustc_hir_analysis/src/autoderef.rs index b3eade8c8ae47..99e495d926690 100644 --- a/compiler/rustc_hir_analysis/src/autoderef.rs +++ b/compiler/rustc_hir_analysis/src/autoderef.rs @@ -2,8 +2,8 @@ use rustc_infer::infer::InferCtxt; use rustc_infer::traits::PredicateObligations; use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt}; use rustc_session::Limit; -use rustc_span::Span; use rustc_span::def_id::{LOCAL_CRATE, LocalDefId}; +use rustc_span::{ErrorGuaranteed, Span}; use rustc_trait_selection::traits::ObligationCtxt; use tracing::{debug, instrument}; @@ -259,7 +259,11 @@ impl<'a, 'tcx> Autoderef<'a, 'tcx> { } } -pub fn report_autoderef_recursion_limit_error<'tcx>(tcx: TyCtxt<'tcx>, span: Span, ty: Ty<'tcx>) { +pub fn report_autoderef_recursion_limit_error<'tcx>( + tcx: TyCtxt<'tcx>, + span: Span, + ty: Ty<'tcx>, +) -> ErrorGuaranteed { // We've reached the recursion limit, error gracefully. let suggested_limit = match tcx.recursion_limit() { Limit(0) => Limit(2), @@ -270,5 +274,5 @@ pub fn report_autoderef_recursion_limit_error<'tcx>(tcx: TyCtxt<'tcx>, span: Spa ty, suggested_limit, crate_name: tcx.crate_name(LOCAL_CRATE), - }); + }) } diff --git a/compiler/rustc_hir_analysis/src/check/always_applicable.rs b/compiler/rustc_hir_analysis/src/check/always_applicable.rs index 8a841a1155674..58c3020f60ede 100644 --- a/compiler/rustc_hir_analysis/src/check/always_applicable.rs +++ b/compiler/rustc_hir_analysis/src/check/always_applicable.rs @@ -36,10 +36,8 @@ use crate::hir::def_id::{DefId, LocalDefId}; /// cannot do `struct S; impl Drop for S { ... }`). pub(crate) fn check_drop_impl( tcx: TyCtxt<'_>, - drop_impl_did: DefId, + drop_impl_did: LocalDefId, ) -> Result<(), ErrorGuaranteed> { - let drop_impl_did = drop_impl_did.expect_local(); - match tcx.impl_polarity(drop_impl_did) { ty::ImplPolarity::Positive => {} ty::ImplPolarity::Negative => { @@ -56,9 +54,9 @@ pub(crate) fn check_drop_impl( tcx.ensure_ok().orphan_check_impl(drop_impl_did)?; - let dtor_impl_trait_ref = tcx.impl_trait_ref(drop_impl_did).unwrap().instantiate_identity(); + let self_ty = tcx.type_of(drop_impl_did).instantiate_identity(); - match dtor_impl_trait_ref.self_ty().kind() { + match self_ty.kind() { ty::Adt(adt_def, adt_to_impl_args) => { ensure_impl_params_and_item_params_correspond( tcx, diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index 07b5837bd871d..f92b2aea160a1 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -397,8 +397,11 @@ fn best_definition_site_of_opaque<'tcx>( return ControlFlow::Continue(()); } - if let Some(hidden_ty) = - self.tcx.mir_borrowck(item_def_id).concrete_opaque_types.get(&self.opaque_def_id) + if let Some(hidden_ty) = self + .tcx + .mir_borrowck(item_def_id) + .ok() + .and_then(|opaque_types| opaque_types.0.get(&self.opaque_def_id)) { ControlFlow::Break((hidden_ty.span, item_def_id)) } else { @@ -413,9 +416,6 @@ fn best_definition_site_of_opaque<'tcx>( self.tcx } fn visit_expr(&mut self, ex: &'tcx hir::Expr<'tcx>) -> Self::Result { - if let hir::ExprKind::Closure(closure) = ex.kind { - self.check(closure.def_id)?; - } intravisit::walk_expr(self, ex) } fn visit_item(&mut self, it: &'tcx hir::Item<'tcx>) -> Self::Result { @@ -443,13 +443,13 @@ fn best_definition_site_of_opaque<'tcx>( let impl_def_id = tcx.local_parent(parent); for assoc in tcx.associated_items(impl_def_id).in_definition_order() { match assoc.kind { - ty::AssocKind::Const | ty::AssocKind::Fn => { + ty::AssocKind::Const { .. } | ty::AssocKind::Fn { .. } => { if let ControlFlow::Break(span) = locator.check(assoc.def_id.expect_local()) { return Some(span); } } - ty::AssocKind::Type => {} + ty::AssocKind::Type { .. } => {} } } @@ -578,10 +578,8 @@ fn check_opaque_precise_captures<'tcx>(tcx: TyCtxt<'tcx>, opaque_def_id: LocalDe } } _ => { - tcx.dcx().span_delayed_bug( - tcx.hir().span(hir_id), - "parameter should have been resolved", - ); + tcx.dcx() + .span_delayed_bug(tcx.hir_span(hir_id), "parameter should have been resolved"); } } } @@ -721,7 +719,6 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) { def_id, tcx.def_ident_span(def_id).unwrap(), i.name, - ExternAbi::Rust, ) } } @@ -733,7 +730,6 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) { .is_ok() { check_impl_items_against_trait(tcx, def_id, impl_trait_header); - check_on_unimplemented(tcx, def_id); } } } @@ -743,11 +739,7 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) { for &assoc_item in assoc_items.in_definition_order() { match assoc_item.kind { - ty::AssocKind::Fn => { - let abi = tcx.fn_sig(assoc_item.def_id).skip_binder().abi(); - forbid_intrinsic_abi(tcx, assoc_item.ident(tcx).span, abi); - } - ty::AssocKind::Type if assoc_item.defaultness(tcx).has_value() => { + ty::AssocKind::Type { .. } if assoc_item.defaultness(tcx).has_value() => { let trait_args = GenericArgs::identity_for_item(tcx, def_id); let _: Result<_, rustc_errors::ErrorGuaranteed> = check_type_bounds( tcx, @@ -790,65 +782,49 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) { }; check_abi(tcx, it.span, abi); - match abi { - ExternAbi::RustIntrinsic => { - for item in items { - intrinsic::check_intrinsic_type( - tcx, - item.id.owner_id.def_id, - item.span, - item.ident.name, - abi, - ); - } + for item in items { + let def_id = item.id.owner_id.def_id; + + let generics = tcx.generics_of(def_id); + let own_counts = generics.own_counts(); + if generics.own_params.len() - own_counts.lifetimes != 0 { + let (kinds, kinds_pl, egs) = match (own_counts.types, own_counts.consts) { + (_, 0) => ("type", "types", Some("u32")), + // We don't specify an example value, because we can't generate + // a valid value for any type. + (0, _) => ("const", "consts", None), + _ => ("type or const", "types or consts", None), + }; + struct_span_code_err!( + tcx.dcx(), + item.span, + E0044, + "foreign items may not have {kinds} parameters", + ) + .with_span_label(item.span, format!("can't have {kinds} parameters")) + .with_help( + // FIXME: once we start storing spans for type arguments, turn this + // into a suggestion. + format!( + "replace the {} parameters with concrete {}{}", + kinds, + kinds_pl, + egs.map(|egs| format!(" like `{egs}`")).unwrap_or_default(), + ), + ) + .emit(); } - _ => { - for item in items { - let def_id = item.id.owner_id.def_id; - let generics = tcx.generics_of(def_id); - let own_counts = generics.own_counts(); - if generics.own_params.len() - own_counts.lifetimes != 0 { - let (kinds, kinds_pl, egs) = match (own_counts.types, own_counts.consts) - { - (_, 0) => ("type", "types", Some("u32")), - // We don't specify an example value, because we can't generate - // a valid value for any type. - (0, _) => ("const", "consts", None), - _ => ("type or const", "types or consts", None), - }; - struct_span_code_err!( - tcx.dcx(), - item.span, - E0044, - "foreign items may not have {kinds} parameters", - ) - .with_span_label(item.span, format!("can't have {kinds} parameters")) - .with_help( - // FIXME: once we start storing spans for type arguments, turn this - // into a suggestion. - format!( - "replace the {} parameters with concrete {}{}", - kinds, - kinds_pl, - egs.map(|egs| format!(" like `{egs}`")).unwrap_or_default(), - ), - ) - .emit(); - } - - let item = tcx.hir_foreign_item(item.id); - match &item.kind { - hir::ForeignItemKind::Fn(sig, _, _) => { - require_c_abi_if_c_variadic(tcx, sig.decl, abi, item.span); - } - hir::ForeignItemKind::Static(..) => { - check_static_inhabited(tcx, def_id); - check_static_linkage(tcx, def_id); - } - _ => {} - } + let item = tcx.hir_foreign_item(item.id); + match &item.kind { + hir::ForeignItemKind::Fn(sig, _, _) => { + require_c_abi_if_c_variadic(tcx, sig.decl, abi, item.span); } + hir::ForeignItemKind::Static(..) => { + check_static_inhabited(tcx, def_id); + check_static_linkage(tcx, def_id); + } + _ => {} } } } @@ -949,31 +925,7 @@ fn check_impl_items_against_trait<'tcx>( let trait_def = tcx.trait_def(trait_ref.def_id); - let infcx = tcx.infer_ctxt().ignoring_regions().build(TypingMode::non_body_analysis()); - - let ocx = ObligationCtxt::new_with_diagnostics(&infcx); - let cause = ObligationCause::misc(tcx.def_span(impl_id), impl_id); - let param_env = tcx.param_env(impl_id); - - let self_is_guaranteed_unsized = match tcx - .struct_tail_raw( - trait_ref.self_ty(), - |ty| { - ocx.structurally_normalize_ty(&cause, param_env, ty).unwrap_or_else(|_| { - Ty::new_error_with_message( - tcx, - tcx.def_span(impl_id), - "struct tail should be computable", - ) - }) - }, - || (), - ) - .kind() - { - ty::Dynamic(_, _, ty::DynKind::Dyn) | ty::Slice(_) | ty::Str => true, - _ => false, - }; + let self_is_guaranteed_unsize_self = tcx.impl_self_is_guaranteed_unsized(impl_id); for &impl_item in impl_item_refs { let ty_impl_item = tcx.associated_item(impl_item); @@ -989,7 +941,7 @@ fn check_impl_items_against_trait<'tcx>( if res.is_ok() { match ty_impl_item.kind { - ty::AssocKind::Fn => { + ty::AssocKind::Fn { .. } => { compare_impl_item::refine::check_refining_return_position_impl_trait_in_trait( tcx, ty_impl_item, @@ -999,12 +951,12 @@ fn check_impl_items_against_trait<'tcx>( .instantiate_identity(), ); } - ty::AssocKind::Const => {} - ty::AssocKind::Type => {} + ty::AssocKind::Const { .. } => {} + ty::AssocKind::Type { .. } => {} } } - if self_is_guaranteed_unsized && tcx.generics_require_sized_self(ty_trait_item.def_id) { + if self_is_guaranteed_unsize_self && tcx.generics_require_sized_self(ty_trait_item.def_id) { tcx.emit_node_span_lint( rustc_lint_defs::builtin::DEAD_CODE, tcx.local_def_id_to_hir_id(ty_impl_item.def_id.expect_local()), @@ -1039,7 +991,7 @@ fn check_impl_items_against_trait<'tcx>( if !is_implemented && tcx.defaultness(impl_id).is_final() // unsized types don't need to implement methods that have `Self: Sized` bounds. - && !(self_is_guaranteed_unsized && tcx.generics_require_sized_self(trait_item_id)) + && !(self_is_guaranteed_unsize_self && tcx.generics_require_sized_self(trait_item_id)) { missing_items.push(tcx.associated_item(trait_item_id)); } @@ -1049,7 +1001,7 @@ fn check_impl_items_against_trait<'tcx>( leaf_def.as_ref().is_some_and(|node_item| !node_item.defining_node.is_from_trait()); if !is_implemented_here { - let full_impl_span = tcx.hir().span_with_body(tcx.local_def_id_to_hir_id(impl_id)); + let full_impl_span = tcx.hir_span_with_body(tcx.local_def_id_to_hir_id(impl_id)); match tcx.eval_default_body_stability(trait_item_id, full_impl_span) { EvalResult::Deny { feature, reason, issue, .. } => default_body_is_unstable( tcx, @@ -1105,7 +1057,7 @@ fn check_impl_items_against_trait<'tcx>( } if !missing_items.is_empty() { - let full_impl_span = tcx.hir().span_with_body(tcx.local_def_id_to_hir_id(impl_id)); + let full_impl_span = tcx.hir_span_with_body(tcx.local_def_id_to_hir_id(impl_id)); missing_items_err(tcx, impl_id, &missing_items, full_impl_span); } @@ -1321,7 +1273,7 @@ pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, adt: ty::AdtDef<'tcx>) let typing_env = ty::TypingEnv::non_body_analysis(tcx, field.did); let layout = tcx.layout_of(typing_env.as_query_input(ty)); // We are currently checking the type this field came from, so it must be local - let span = tcx.hir().span_if_local(field.did).unwrap(); + let span = tcx.hir_span_if_local(field.did).unwrap(); let trivial = layout.is_ok_and(|layout| layout.is_1zst()); if !trivial { return (span, trivial, None); diff --git a/compiler/rustc_hir_analysis/src/check/compare_eii.rs b/compiler/rustc_hir_analysis/src/check/compare_eii.rs new file mode 100644 index 0000000000000..66d55eab6fff3 --- /dev/null +++ b/compiler/rustc_hir_analysis/src/check/compare_eii.rs @@ -0,0 +1,429 @@ +use std::borrow::Cow; +use std::iter; + +use rustc_data_structures::fx::FxIndexSet; +use rustc_errors::{Applicability, E0805, struct_span_code_err}; +use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_hir::{self as hir, FnSig, HirId, ItemKind}; +use rustc_infer::infer::{self, InferCtxt, TyCtxtInferExt}; +use rustc_infer::traits::{ObligationCause, ObligationCauseCode}; +use rustc_middle::ty::error::{ExpectedFound, TypeError}; +use rustc_middle::ty::{self, TyCtxt, TypingMode}; +use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol}; +use rustc_trait_selection::error_reporting::InferCtxtErrorExt; +use rustc_trait_selection::regions::InferCtxtRegionExt; +use rustc_trait_selection::traits::ObligationCtxt; +use tracing::{debug, instrument}; + +use super::potentially_plural_count; +use crate::errors::{EiiWithGenerics, LifetimesOrBoundsMismatchOnEII}; + +/// Checks a bunch of different properties of the impl/trait methods for +/// compatibility, such as asyncness, number of argument, self receiver kind, +/// and number of early- and late-bound generics. +fn check_is_structurally_compatible<'tcx>( + tcx: TyCtxt<'tcx>, + external_impl: LocalDefId, + declaration: DefId, + eii_name: Symbol, + eii_attr_span: Span, +) -> Result<(), ErrorGuaranteed> { + check_no_generics(tcx, external_impl, declaration, eii_name, eii_attr_span)?; + compare_number_of_method_arguments(tcx, external_impl, declaration, eii_name, eii_attr_span)?; + check_region_bounds_on_impl_item(tcx, external_impl, declaration, eii_attr_span)?; + Ok(()) +} + +fn check_no_generics<'tcx>( + tcx: TyCtxt<'tcx>, + external_impl: LocalDefId, + _declaration: DefId, + eii_name: Symbol, + eii_attr_span: Span, +) -> Result<(), ErrorGuaranteed> { + let generics = tcx.generics_of(external_impl); + if generics.own_requires_monomorphization() { + tcx.dcx().emit_err(EiiWithGenerics { + span: tcx.def_span(external_impl), + attr: eii_attr_span, + eii_name, + }); + } + + Ok(()) +} + +fn check_region_bounds_on_impl_item<'tcx>( + tcx: TyCtxt<'tcx>, + external_impl: LocalDefId, + declaration: DefId, + eii_attr_span: Span, +) -> Result<(), ErrorGuaranteed> { + let external_impl_generics = tcx.generics_of(external_impl.to_def_id()); + let external_impl_params = external_impl_generics.own_counts().lifetimes; + + let declaration_generics = tcx.generics_of(declaration); + let declaration_params = declaration_generics.own_counts().lifetimes; + + debug!(?declaration_generics, ?external_impl_generics); + + // Must have same number of early-bound lifetime parameters. + // Unfortunately, if the user screws up the bounds, then this + // will change classification between early and late. E.g., + // if in trait we have `<'a,'b:'a>`, and in impl we just have + // `<'a,'b>`, then we have 2 early-bound lifetime parameters + // in trait but 0 in the impl. But if we report "expected 2 + // but found 0" it's confusing, because it looks like there + // are zero. Since I don't quite know how to phrase things at + // the moment, give a kind of vague error message. + if declaration_params != external_impl_params { + let span = tcx + .hir_get_generics(external_impl) + .expect("expected impl item to have generics or else we can't compare them") + .span; + + let mut generics_span = None; + let mut bounds_span = vec![]; + let mut where_span = None; + + if let Some(declaration_node) = tcx.hir_get_if_local(declaration) + && let Some(declaration_generics) = declaration_node.generics() + { + generics_span = Some(declaration_generics.span); + // FIXME: we could potentially look at the impl's bounds to not point at bounds that + // *are* present in the impl. + for p in declaration_generics.predicates { + if let hir::WherePredicateKind::BoundPredicate(pred) = p.kind { + for b in pred.bounds { + if let hir::GenericBound::Outlives(lt) = b { + bounds_span.push(lt.ident.span); + } + } + } + } + if let Some(implementation_generics) = tcx.hir_get_generics(external_impl) { + let mut impl_bounds = 0; + for p in implementation_generics.predicates { + if let hir::WherePredicateKind::BoundPredicate(pred) = p.kind { + for b in pred.bounds { + if let hir::GenericBound::Outlives(_) = b { + impl_bounds += 1; + } + } + } + } + if impl_bounds == bounds_span.len() { + bounds_span = vec![]; + } else if implementation_generics.has_where_clause_predicates { + where_span = Some(implementation_generics.where_clause_span); + } + } + } + let mut diag = tcx.dcx().create_err(LifetimesOrBoundsMismatchOnEII { + span, + ident: tcx.item_name(external_impl.to_def_id()), + generics_span, + bounds_span, + where_span, + }); + + diag.span_label(eii_attr_span, format!("required because of this attribute")); + return Err(diag.emit()); + } + + Ok(()) +} + +fn compare_number_of_method_arguments<'tcx>( + tcx: TyCtxt<'tcx>, + external_impl: LocalDefId, + declaration: DefId, + eii_name: Symbol, + eii_attr_span: Span, +) -> Result<(), ErrorGuaranteed> { + let external_impl_fty = tcx.fn_sig(external_impl); + let declaration_fty = tcx.fn_sig(declaration); + let declaration_number_args = declaration_fty.skip_binder().inputs().skip_binder().len(); + let external_impl_number_args = external_impl_fty.skip_binder().inputs().skip_binder().len(); + let external_impl_name = tcx.item_name(external_impl.to_def_id()); + + if declaration_number_args != external_impl_number_args { + let declaration_span = declaration + .as_local() + .and_then(|def_id| { + let declaration_sig = get_declaration_sig(tcx, def_id).expect("foreign item sig"); + let pos = declaration_number_args.saturating_sub(1); + declaration_sig.decl.inputs.get(pos).map(|arg| { + if pos == 0 { + arg.span + } else { + arg.span.with_lo(declaration_sig.decl.inputs[0].span.lo()) + } + }) + }) + .or_else(|| tcx.hir_span_if_local(declaration)) + .unwrap_or_else(|| tcx.def_span(declaration)); + + let (_, external_impl_sig, _, _) = &tcx.hir_expect_item(external_impl).expect_fn(); + let pos = external_impl_number_args.saturating_sub(1); + let impl_span = external_impl_sig + .decl + .inputs + .get(pos) + .map(|arg| { + if pos == 0 { + arg.span + } else { + arg.span.with_lo(external_impl_sig.decl.inputs[0].span.lo()) + } + }) + .unwrap_or_else(|| tcx.def_span(external_impl)); + + let mut err = struct_span_code_err!( + tcx.dcx(), + impl_span, + E0805, + "`{external_impl_name}` has {} but #[{eii_name}] requires it to have {}", + potentially_plural_count(external_impl_number_args, "parameter"), + declaration_number_args + ); + + // if let Some(declaration_span) = declaration_span { + err.span_label( + declaration_span, + format!("requires {}", potentially_plural_count(declaration_number_args, "parameter")), + ); + // } + + err.span_label( + impl_span, + format!( + "expected {}, found {}", + potentially_plural_count(declaration_number_args, "parameter"), + external_impl_number_args + ), + ); + + err.span_label(eii_attr_span, format!("required because of this attribute")); + + return Err(err.emit()); + } + + Ok(()) +} + +// checks whether the signature of some `external_impl`, matches +// the signature of `declaration`, which it is supposed to be compatible +// with in order to implement the item. +pub(crate) fn compare_eii_function_types<'tcx>( + tcx: TyCtxt<'tcx>, + external_impl: LocalDefId, + declaration: DefId, + eii_name: Symbol, + eii_attr_span: Span, +) -> Result<(), ErrorGuaranteed> { + check_is_structurally_compatible(tcx, external_impl, declaration, eii_name, eii_attr_span)?; + + let external_impl_span = tcx.def_span(external_impl); + let cause = ObligationCause::new( + external_impl_span, + external_impl, + ObligationCauseCode::CompareEII { external_impl, declaration }, + ); + + // no trait bounds + let param_env = ty::ParamEnv::empty(); + + let infcx = &tcx.infer_ctxt().build(TypingMode::non_body_analysis()); + let ocx = ObligationCtxt::new_with_diagnostics(infcx); + + // We now need to check that the signature of the impl method is + // compatible with that of the trait method. We do this by + // checking that `impl_fty <: trait_fty`. + // + // FIXME. Unfortunately, this doesn't quite work right now because + // associated type normalization is not integrated into subtype + // checks. For the comparison to be valid, we need to + // normalize the associated types in the impl/trait methods + // first. However, because function types bind regions, just + // calling `FnCtxt::normalize` would have no effect on + // any associated types appearing in the fn arguments or return + // type. + + let wf_tys = FxIndexSet::default(); + let norm_cause = ObligationCause::misc(external_impl_span, external_impl); + + let declaration_sig = tcx.fn_sig(declaration).instantiate_identity(); + let declaration_sig = infcx.enter_forall_and_leak_universe(declaration_sig); + let declaration_sig = ocx.normalize(&norm_cause, param_env, declaration_sig); + + let external_impl_sig = infcx.instantiate_binder_with_fresh_vars( + external_impl_span, + infer::HigherRankedType, + tcx.fn_sig(external_impl).instantiate( + tcx, + infcx.fresh_args_for_item(external_impl_span, external_impl.to_def_id()), + ), + ); + let external_impl_sig = ocx.normalize(&norm_cause, param_env, external_impl_sig); + debug!(?external_impl_sig); + + // FIXME: We'd want to keep more accurate spans than "the method signature" when + // processing the comparison between the trait and impl fn, but we sadly lose them + // and point at the whole signature when a trait bound or specific input or output + // type would be more appropriate. In other places we have a `Vec` + // corresponding to their `Vec`, but we don't have that here. + // Fixing this would improve the output of test `issue-83765.rs`. + let result = ocx.sup(&cause, param_env, declaration_sig, external_impl_sig); + + if let Err(terr) = result { + debug!(?external_impl_sig, ?declaration_sig, ?terr, "sub_types failed"); + + let emitted = report_eii_mismatch( + infcx, + cause, + param_env, + terr, + (declaration, declaration_sig), + (external_impl, external_impl_sig), + eii_attr_span, + eii_name, + ); + return Err(emitted); + } + + // Check that all obligations are satisfied by the implementation's + // version. + let errors = ocx.select_all_or_error(); + if !errors.is_empty() { + let reported = infcx.err_ctxt().report_fulfillment_errors(errors); + return Err(reported); + } + + // Finally, resolve all regions. This catches wily misuses of + // lifetime parameters. + let errors = infcx.resolve_regions(external_impl, param_env, wf_tys); + if !errors.is_empty() { + return Err(infcx + .tainted_by_errors() + .unwrap_or_else(|| infcx.err_ctxt().report_region_errors(external_impl, &errors))); + } + + Ok(()) +} + +fn report_eii_mismatch<'tcx>( + infcx: &InferCtxt<'tcx>, + mut cause: ObligationCause<'tcx>, + param_env: ty::ParamEnv<'tcx>, + terr: TypeError<'tcx>, + (declaration_did, declaration_sig): (DefId, ty::FnSig<'tcx>), + (external_impl_did, external_impl_sig): (LocalDefId, ty::FnSig<'tcx>), + eii_attr_span: Span, + eii_name: Symbol, +) -> ErrorGuaranteed { + let tcx = infcx.tcx; + let (impl_err_span, trait_err_span, external_impl_name) = + extract_spans_for_error_reporting(infcx, terr, &cause, declaration_did, external_impl_did); + + let mut diag = struct_span_code_err!( + tcx.dcx(), + impl_err_span, + E0805, + "function `{}` has a type that is incompatible with the declaration of `#[{eii_name}]`", + external_impl_name + ); + + diag.span_note(eii_attr_span, "expected this because of this attribute"); + + match &terr { + TypeError::ArgumentMutability(i) | TypeError::ArgumentSorts(_, i) => { + if declaration_sig.inputs().len() == *i { + // Suggestion to change output type. We do not suggest in `async` functions + // to avoid complex logic or incorrect output. + if let ItemKind::Fn { sig, .. } = &tcx.hir_expect_item(external_impl_did).kind + && !sig.header.asyncness.is_async() + { + let msg = "change the output type to match the declaration"; + let ap = Applicability::MachineApplicable; + match sig.decl.output { + hir::FnRetTy::DefaultReturn(sp) => { + let sugg = format!(" -> {}", declaration_sig.output()); + diag.span_suggestion_verbose(sp, msg, sugg, ap); + } + hir::FnRetTy::Return(hir_ty) => { + let sugg = declaration_sig.output(); + diag.span_suggestion_verbose(hir_ty.span, msg, sugg, ap); + } + }; + }; + } else if let Some(trait_ty) = declaration_sig.inputs().get(*i) { + diag.span_suggestion_verbose( + impl_err_span, + "change the parameter type to match the declaration", + trait_ty, + Applicability::MachineApplicable, + ); + } + } + _ => {} + } + + cause.span = impl_err_span; + infcx.err_ctxt().note_type_err( + &mut diag, + &cause, + trait_err_span.map(|sp| (sp, Cow::from("type in declaration"), false)), + Some(param_env.and(infer::ValuePairs::PolySigs(ExpectedFound { + expected: ty::Binder::dummy(declaration_sig), + found: ty::Binder::dummy(external_impl_sig), + }))), + terr, + false, + None, + ); + + diag.emit() +} + +#[instrument(level = "debug", skip(infcx))] +fn extract_spans_for_error_reporting<'tcx>( + infcx: &infer::InferCtxt<'tcx>, + terr: TypeError<'_>, + cause: &ObligationCause<'tcx>, + declaration: DefId, + external_impl: LocalDefId, +) -> (Span, Option, Ident) { + let tcx = infcx.tcx; + let (mut external_impl_args, external_impl_name) = { + let item = tcx.hir_expect_item(external_impl); + let (ident, sig, _, _) = item.expect_fn(); + (sig.decl.inputs.iter().map(|t| t.span).chain(iter::once(sig.decl.output.span())), ident) + }; + + let declaration_args = declaration.as_local().map(|def_id| { + if let Some(sig) = get_declaration_sig(tcx, def_id) { + sig.decl.inputs.iter().map(|t| t.span).chain(iter::once(sig.decl.output.span())) + } else { + panic!("expected {def_id:?} to be a foreign function"); + } + }); + + match terr { + TypeError::ArgumentMutability(i) | TypeError::ArgumentSorts(ExpectedFound { .. }, i) => ( + external_impl_args.nth(i).unwrap(), + declaration_args.and_then(|mut args| args.nth(i)), + external_impl_name, + ), + _ => ( + cause.span, + tcx.hir_span_if_local(declaration).or_else(|| Some(tcx.def_span(declaration))), + external_impl_name, + ), + } +} + +fn get_declaration_sig<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> Option<&'tcx FnSig<'tcx>> { + let hir_id: HirId = tcx.local_def_id_to_hir_id(def_id); + tcx.hir_fn_sig_by_hir_id(hir_id) +} diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs index 84d07c711fa40..fb67f2fd22350 100644 --- a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs +++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs @@ -5,20 +5,19 @@ use std::iter; use hir::def_id::{DefId, DefIdMap, LocalDefId}; use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; use rustc_errors::codes::*; -use rustc_errors::{Applicability, ErrorGuaranteed, pluralize, struct_span_code_err}; +use rustc_errors::{Applicability, ErrorGuaranteed, MultiSpan, pluralize, struct_span_code_err}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::intravisit::VisitorExt; use rustc_hir::{self as hir, AmbigArg, GenericParamKind, ImplItemKind, intravisit}; use rustc_infer::infer::{self, InferCtxt, TyCtxtInferExt}; use rustc_infer::traits::util; use rustc_middle::ty::error::{ExpectedFound, TypeError}; -use rustc_middle::ty::util::ExplicitSelf; use rustc_middle::ty::{ self, BottomUpFolder, GenericArgs, GenericParamDefKind, Ty, TyCtxt, TypeFoldable, TypeFolder, - TypeSuperFoldable, TypeVisitableExt, TypingMode, Upcast, + TypeSuperFoldable, TypeVisitable, TypeVisitableExt, TypeVisitor, TypingMode, Upcast, }; use rustc_middle::{bug, span_bug}; -use rustc_span::Span; +use rustc_span::{DUMMY_SP, Span}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::regions::InferCtxtRegionExt; @@ -44,9 +43,11 @@ pub(super) fn compare_impl_item( debug!(?impl_trait_ref); match impl_item.kind { - ty::AssocKind::Fn => compare_impl_method(tcx, impl_item, trait_item, impl_trait_ref), - ty::AssocKind::Type => compare_impl_ty(tcx, impl_item, trait_item, impl_trait_ref), - ty::AssocKind::Const => compare_impl_const(tcx, impl_item, trait_item, impl_trait_ref), + ty::AssocKind::Fn { .. } => compare_impl_method(tcx, impl_item, trait_item, impl_trait_ref), + ty::AssocKind::Type { .. } => compare_impl_ty(tcx, impl_item, trait_item, impl_trait_ref), + ty::AssocKind::Const { .. } => { + compare_impl_const(tcx, impl_item, trait_item, impl_trait_ref) + } } } @@ -378,13 +379,13 @@ fn compare_method_predicate_entailment<'tcx>( // Annoyingly, asking for the WF predicates of an array (with an unevaluated const (only?)) // will give back the well-formed predicate of the same array. let mut wf_args_seen: FxHashSet<_> = wf_args.iter().copied().collect(); - while let Some(arg) = wf_args.pop() { + while let Some(term) = wf_args.pop() { let Some(obligations) = rustc_trait_selection::traits::wf::obligations( infcx, param_env, impl_m_def_id, 0, - arg, + term, impl_m_span, ) else { continue; @@ -401,9 +402,9 @@ fn compare_method_predicate_entailment<'tcx>( | ty::ClauseKind::TypeOutlives(..) | ty::ClauseKind::Projection(..), ) => ocx.register_obligation(obligation), - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { - if wf_args_seen.insert(arg) { - wf_args.push(arg) + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(term)) => { + if wf_args_seen.insert(term) { + wf_args.push(term) } } _ => {} @@ -443,7 +444,7 @@ impl<'tcx> TypeFolder> for RemapLateParam<'tcx> { } fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { - if let ty::ReLateParam(fr) = *r { + if let ty::ReLateParam(fr) = r.kind() { ty::Region::new_late_param( self.tcx, fr.scope, @@ -607,7 +608,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>( // with placeholders, which imply nothing about outlives bounds, and then // prove below that the hidden types are well formed. let universe = infcx.create_next_universe(); - let mut idx = 0; + let mut idx = ty::BoundVar::ZERO; let mapping: FxIndexMap<_, _> = collector .types .iter() @@ -624,10 +625,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>( tcx, ty::Placeholder { universe, - bound: ty::BoundTy { - var: ty::BoundVar::from_usize(idx), - kind: ty::BoundTyKind::Anon, - }, + bound: ty::BoundTy { var: idx, kind: ty::BoundTyKind::Anon }, }, ), ) @@ -655,7 +653,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>( cause.span, E0053, "method `{}` has an incompatible return type for trait", - trait_m.name + trait_m.name() ); infcx.err_ctxt().note_type_err( &mut diag, @@ -995,6 +993,26 @@ impl<'tcx> ty::FallibleTypeFolder> for RemapHiddenTyRegions<'tcx> { } } +/// Gets the string for an explicit self declaration, e.g. "self", "&self", +/// etc. +fn get_self_string<'tcx, P>(self_arg_ty: Ty<'tcx>, is_self_ty: P) -> String +where + P: Fn(Ty<'tcx>) -> bool, +{ + if is_self_ty(self_arg_ty) { + "self".to_owned() + } else if let ty::Ref(_, ty, mutbl) = self_arg_ty.kind() + && is_self_ty(*ty) + { + match mutbl { + hir::Mutability::Not => "&self".to_owned(), + hir::Mutability::Mut => "&mut self".to_owned(), + } + } else { + format!("self: {self_arg_ty}") + } +} + fn report_trait_method_mismatch<'tcx>( infcx: &InferCtxt<'tcx>, mut cause: ObligationCause<'tcx>, @@ -1013,30 +1031,25 @@ fn report_trait_method_mismatch<'tcx>( impl_err_span, E0053, "method `{}` has an incompatible type for trait", - trait_m.name + trait_m.name() ); match &terr { TypeError::ArgumentMutability(0) | TypeError::ArgumentSorts(_, 0) - if trait_m.fn_has_self_parameter => + if trait_m.is_method() => { let ty = trait_sig.inputs()[0]; - let sugg = match ExplicitSelf::determine(ty, |ty| ty == impl_trait_ref.self_ty()) { - ExplicitSelf::ByValue => "self".to_owned(), - ExplicitSelf::ByReference(_, hir::Mutability::Not) => "&self".to_owned(), - ExplicitSelf::ByReference(_, hir::Mutability::Mut) => "&mut self".to_owned(), - _ => format!("self: {ty}"), - }; + let sugg = get_self_string(ty, |ty| ty == impl_trait_ref.self_ty()); // When the `impl` receiver is an arbitrary self type, like `self: Box`, the // span points only at the type `Box, but we want to cover the whole // argument pattern and type. let (sig, body) = tcx.hir_expect_impl_item(impl_m.def_id.expect_local()).expect_fn(); let span = tcx - .hir_body_param_names(body) + .hir_body_param_idents(body) .zip(sig.decl.inputs.iter()) - .map(|(param_name, ty)| { - if let Some(param_name) = param_name { - param_name.span.to(ty.span) + .map(|(param_ident, ty)| { + if let Some(param_ident) = param_ident { + param_ident.span.to(ty.span) } else { ty.span } @@ -1124,65 +1137,319 @@ fn check_region_bounds_on_impl_item<'tcx>( // but found 0" it's confusing, because it looks like there // are zero. Since I don't quite know how to phrase things at // the moment, give a kind of vague error message. - if trait_params != impl_params { - let span = tcx - .hir_get_generics(impl_m.def_id.expect_local()) - .expect("expected impl item to have generics or else we can't compare them") - .span; - - let mut generics_span = None; - let mut bounds_span = vec![]; - let mut where_span = None; - if let Some(trait_node) = tcx.hir_get_if_local(trait_m.def_id) - && let Some(trait_generics) = trait_node.generics() - { - generics_span = Some(trait_generics.span); - // FIXME: we could potentially look at the impl's bounds to not point at bounds that - // *are* present in the impl. - for p in trait_generics.predicates { - if let hir::WherePredicateKind::BoundPredicate(pred) = p.kind { - for b in pred.bounds { + if trait_params == impl_params { + return Ok(()); + } + + if !delay && let Some(guar) = check_region_late_boundedness(tcx, impl_m, trait_m) { + return Err(guar); + } + + let span = tcx + .hir_get_generics(impl_m.def_id.expect_local()) + .expect("expected impl item to have generics or else we can't compare them") + .span; + + let mut generics_span = None; + let mut bounds_span = vec![]; + let mut where_span = None; + + if let Some(trait_node) = tcx.hir_get_if_local(trait_m.def_id) + && let Some(trait_generics) = trait_node.generics() + { + generics_span = Some(trait_generics.span); + // FIXME: we could potentially look at the impl's bounds to not point at bounds that + // *are* present in the impl. + for p in trait_generics.predicates { + match p.kind { + hir::WherePredicateKind::BoundPredicate(hir::WhereBoundPredicate { + bounds, + .. + }) + | hir::WherePredicateKind::RegionPredicate(hir::WhereRegionPredicate { + bounds, + .. + }) => { + for b in *bounds { if let hir::GenericBound::Outlives(lt) = b { bounds_span.push(lt.ident.span); } } } + _ => {} } - if let Some(impl_node) = tcx.hir_get_if_local(impl_m.def_id) - && let Some(impl_generics) = impl_node.generics() - { - let mut impl_bounds = 0; - for p in impl_generics.predicates { - if let hir::WherePredicateKind::BoundPredicate(pred) = p.kind { - for b in pred.bounds { + } + if let Some(impl_node) = tcx.hir_get_if_local(impl_m.def_id) + && let Some(impl_generics) = impl_node.generics() + { + let mut impl_bounds = 0; + for p in impl_generics.predicates { + match p.kind { + hir::WherePredicateKind::BoundPredicate(hir::WhereBoundPredicate { + bounds, + .. + }) + | hir::WherePredicateKind::RegionPredicate(hir::WhereRegionPredicate { + bounds, + .. + }) => { + for b in *bounds { if let hir::GenericBound::Outlives(_) = b { impl_bounds += 1; } } } + _ => {} } - if impl_bounds == bounds_span.len() { - bounds_span = vec![]; - } else if impl_generics.has_where_clause_predicates { - where_span = Some(impl_generics.where_clause_span); + } + if impl_bounds == bounds_span.len() { + bounds_span = vec![]; + } else if impl_generics.has_where_clause_predicates { + where_span = Some(impl_generics.where_clause_span); + } + } + } + + let reported = tcx + .dcx() + .create_err(LifetimesOrBoundsMismatchOnTrait { + span, + item_kind: impl_m.descr(), + ident: impl_m.ident(tcx), + generics_span, + bounds_span, + where_span, + }) + .emit_unless(delay); + + Err(reported) +} + +#[allow(unused)] +enum LateEarlyMismatch<'tcx> { + EarlyInImpl(DefId, DefId, ty::Region<'tcx>), + LateInImpl(DefId, DefId, ty::Region<'tcx>), +} + +fn check_region_late_boundedness<'tcx>( + tcx: TyCtxt<'tcx>, + impl_m: ty::AssocItem, + trait_m: ty::AssocItem, +) -> Option { + if !impl_m.is_fn() { + return None; + } + + let (infcx, param_env) = tcx + .infer_ctxt() + .build_with_typing_env(ty::TypingEnv::non_body_analysis(tcx, impl_m.def_id)); + + let impl_m_args = infcx.fresh_args_for_item(DUMMY_SP, impl_m.def_id); + let impl_m_sig = tcx.fn_sig(impl_m.def_id).instantiate(tcx, impl_m_args); + let impl_m_sig = tcx.liberate_late_bound_regions(impl_m.def_id, impl_m_sig); + + let trait_m_args = infcx.fresh_args_for_item(DUMMY_SP, trait_m.def_id); + let trait_m_sig = tcx.fn_sig(trait_m.def_id).instantiate(tcx, trait_m_args); + let trait_m_sig = tcx.liberate_late_bound_regions(impl_m.def_id, trait_m_sig); + + let ocx = ObligationCtxt::new(&infcx); + + // Equate the signatures so that we can infer whether a late-bound param was present where + // an early-bound param was expected, since we replace the late-bound lifetimes with + // `ReLateParam`, and early-bound lifetimes with infer vars, so the early-bound args will + // resolve to `ReLateParam` if there is a mismatch. + let Ok(()) = ocx.eq( + &ObligationCause::dummy(), + param_env, + ty::Binder::dummy(trait_m_sig), + ty::Binder::dummy(impl_m_sig), + ) else { + return None; + }; + + let errors = ocx.select_where_possible(); + if !errors.is_empty() { + return None; + } + + let mut mismatched = vec![]; + + let impl_generics = tcx.generics_of(impl_m.def_id); + for (id_arg, arg) in + std::iter::zip(ty::GenericArgs::identity_for_item(tcx, impl_m.def_id), impl_m_args) + { + if let ty::GenericArgKind::Lifetime(r) = arg.unpack() + && let ty::ReVar(vid) = r.kind() + && let r = infcx + .inner + .borrow_mut() + .unwrap_region_constraints() + .opportunistic_resolve_var(tcx, vid) + && let ty::ReLateParam(ty::LateParamRegion { + kind: ty::LateParamRegionKind::Named(trait_param_def_id, _), + .. + }) = r.kind() + && let ty::ReEarlyParam(ebr) = id_arg.expect_region().kind() + { + mismatched.push(LateEarlyMismatch::EarlyInImpl( + impl_generics.region_param(ebr, tcx).def_id, + trait_param_def_id, + id_arg.expect_region(), + )); + } + } + + let trait_generics = tcx.generics_of(trait_m.def_id); + for (id_arg, arg) in + std::iter::zip(ty::GenericArgs::identity_for_item(tcx, trait_m.def_id), trait_m_args) + { + if let ty::GenericArgKind::Lifetime(r) = arg.unpack() + && let ty::ReVar(vid) = r.kind() + && let r = infcx + .inner + .borrow_mut() + .unwrap_region_constraints() + .opportunistic_resolve_var(tcx, vid) + && let ty::ReLateParam(ty::LateParamRegion { + kind: ty::LateParamRegionKind::Named(impl_param_def_id, _), + .. + }) = r.kind() + && let ty::ReEarlyParam(ebr) = id_arg.expect_region().kind() + { + mismatched.push(LateEarlyMismatch::LateInImpl( + impl_param_def_id, + trait_generics.region_param(ebr, tcx).def_id, + id_arg.expect_region(), + )); + } + } + + if mismatched.is_empty() { + return None; + } + + let spans: Vec<_> = mismatched + .iter() + .map(|param| { + let (LateEarlyMismatch::EarlyInImpl(impl_param_def_id, ..) + | LateEarlyMismatch::LateInImpl(impl_param_def_id, ..)) = param; + tcx.def_span(impl_param_def_id) + }) + .collect(); + + let mut diag = tcx + .dcx() + .struct_span_err(spans, "lifetime parameters do not match the trait definition") + .with_note("lifetime parameters differ in whether they are early- or late-bound") + .with_code(E0195); + for mismatch in mismatched { + match mismatch { + LateEarlyMismatch::EarlyInImpl( + impl_param_def_id, + trait_param_def_id, + early_bound_region, + ) => { + let mut multispan = MultiSpan::from_spans(vec![ + tcx.def_span(impl_param_def_id), + tcx.def_span(trait_param_def_id), + ]); + multispan + .push_span_label(tcx.def_span(tcx.parent(impl_m.def_id)), "in this impl..."); + multispan + .push_span_label(tcx.def_span(tcx.parent(trait_m.def_id)), "in this trait..."); + multispan.push_span_label( + tcx.def_span(impl_param_def_id), + format!("`{}` is early-bound", tcx.item_name(impl_param_def_id)), + ); + multispan.push_span_label( + tcx.def_span(trait_param_def_id), + format!("`{}` is late-bound", tcx.item_name(trait_param_def_id)), + ); + if let Some(span) = + find_region_in_predicates(tcx, impl_m.def_id, early_bound_region) + { + multispan.push_span_label( + span, + format!( + "this lifetime bound makes `{}` early-bound", + tcx.item_name(impl_param_def_id) + ), + ); } + diag.span_note( + multispan, + format!( + "`{}` differs between the trait and impl", + tcx.item_name(impl_param_def_id) + ), + ); + } + LateEarlyMismatch::LateInImpl( + impl_param_def_id, + trait_param_def_id, + early_bound_region, + ) => { + let mut multispan = MultiSpan::from_spans(vec![ + tcx.def_span(impl_param_def_id), + tcx.def_span(trait_param_def_id), + ]); + multispan + .push_span_label(tcx.def_span(tcx.parent(impl_m.def_id)), "in this impl..."); + multispan + .push_span_label(tcx.def_span(tcx.parent(trait_m.def_id)), "in this trait..."); + multispan.push_span_label( + tcx.def_span(impl_param_def_id), + format!("`{}` is late-bound", tcx.item_name(impl_param_def_id)), + ); + multispan.push_span_label( + tcx.def_span(trait_param_def_id), + format!("`{}` is early-bound", tcx.item_name(trait_param_def_id)), + ); + if let Some(span) = + find_region_in_predicates(tcx, trait_m.def_id, early_bound_region) + { + multispan.push_span_label( + span, + format!( + "this lifetime bound makes `{}` early-bound", + tcx.item_name(trait_param_def_id) + ), + ); + } + diag.span_note( + multispan, + format!( + "`{}` differs between the trait and impl", + tcx.item_name(impl_param_def_id) + ), + ); } } - let reported = tcx - .dcx() - .create_err(LifetimesOrBoundsMismatchOnTrait { - span, - item_kind: impl_m.descr(), - ident: impl_m.ident(tcx), - generics_span, - bounds_span, - where_span, - }) - .emit_unless(delay); - return Err(reported); } - Ok(()) + Some(diag.emit()) +} + +fn find_region_in_predicates<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, + early_bound_region: ty::Region<'tcx>, +) -> Option { + for (pred, span) in tcx.explicit_predicates_of(def_id).instantiate_identity(tcx) { + if pred.visit_with(&mut FindRegion(early_bound_region)).is_break() { + return Some(span); + } + } + + struct FindRegion<'tcx>(ty::Region<'tcx>); + impl<'tcx> TypeVisitor> for FindRegion<'tcx> { + type Result = ControlFlow<()>; + fn visit_region(&mut self, r: ty::Region<'tcx>) -> Self::Result { + if r == self.0 { ControlFlow::Break(()) } else { ControlFlow::Continue(()) } + } + } + + None } #[instrument(level = "debug", skip(infcx))] @@ -1208,7 +1475,7 @@ fn extract_spans_for_error_reporting<'tcx>( TypeError::ArgumentMutability(i) | TypeError::ArgumentSorts(ExpectedFound { .. }, i) => { (impl_args.nth(i).unwrap(), trait_args.and_then(|mut args| args.nth(i))) } - _ => (cause.span, tcx.hir().span_if_local(trait_m.def_id)), + _ => (cause.span, tcx.hir_span_if_local(trait_m.def_id)), } } @@ -1238,15 +1505,10 @@ fn compare_self_type<'tcx>( .build_with_typing_env(ty::TypingEnv::non_body_analysis(tcx, method.def_id)); let self_arg_ty = tcx.liberate_late_bound_regions(method.def_id, self_arg_ty); let can_eq_self = |ty| infcx.can_eq(param_env, untransformed_self_ty, ty); - match ExplicitSelf::determine(self_arg_ty, can_eq_self) { - ExplicitSelf::ByValue => "self".to_owned(), - ExplicitSelf::ByReference(_, hir::Mutability::Not) => "&self".to_owned(), - ExplicitSelf::ByReference(_, hir::Mutability::Mut) => "&mut self".to_owned(), - _ => format!("self: {self_arg_ty}"), - } + get_self_string(self_arg_ty, can_eq_self) }; - match (trait_m.fn_has_self_parameter, impl_m.fn_has_self_parameter) { + match (trait_m.is_method(), impl_m.is_method()) { (false, false) | (true, true) => {} (false, true) => { @@ -1257,14 +1519,14 @@ fn compare_self_type<'tcx>( impl_m_span, E0185, "method `{}` has a `{}` declaration in the impl, but not in the trait", - trait_m.name, + trait_m.name(), self_descr ); err.span_label(impl_m_span, format!("`{self_descr}` used in impl")); - if let Some(span) = tcx.hir().span_if_local(trait_m.def_id) { + if let Some(span) = tcx.hir_span_if_local(trait_m.def_id) { err.span_label(span, format!("trait method declared without `{self_descr}`")); } else { - err.note_trait_signature(trait_m.name, trait_m.signature(tcx)); + err.note_trait_signature(trait_m.name(), trait_m.signature(tcx)); } return Err(err.emit_unless(delay)); } @@ -1277,14 +1539,14 @@ fn compare_self_type<'tcx>( impl_m_span, E0186, "method `{}` has a `{}` declaration in the trait, but not in the impl", - trait_m.name, + trait_m.name(), self_descr ); err.span_label(impl_m_span, format!("expected `{self_descr}` in impl")); - if let Some(span) = tcx.hir().span_if_local(trait_m.def_id) { + if let Some(span) = tcx.hir_span_if_local(trait_m.def_id) { err.span_label(span, format!("`{self_descr}` used in trait")); } else { - err.note_trait_signature(trait_m.name, trait_m.signature(tcx)); + err.note_trait_signature(trait_m.name(), trait_m.signature(tcx)); } return Err(err.emit_unless(delay)); @@ -1354,7 +1616,7 @@ fn compare_number_of_generics<'tcx>( let mut err_occurred = None; for (kind, trait_count, impl_count) in matchings { if impl_count != trait_count { - let arg_spans = |kind: ty::AssocKind, generics: &hir::Generics<'_>| { + let arg_spans = |item: &ty::AssocItem, generics: &hir::Generics<'_>| { let mut spans = generics .params .iter() @@ -1364,7 +1626,7 @@ fn compare_number_of_generics<'tcx>( } => { // A fn can have an arbitrary number of extra elided lifetimes for the // same signature. - !matches!(kind, ty::AssocKind::Fn) + !item.is_fn() } _ => true, }) @@ -1377,7 +1639,7 @@ fn compare_number_of_generics<'tcx>( }; let (trait_spans, impl_trait_spans) = if let Some(def_id) = trait_.def_id.as_local() { let trait_item = tcx.hir_expect_trait_item(def_id); - let arg_spans: Vec = arg_spans(trait_.kind, trait_item.generics); + let arg_spans: Vec = arg_spans(&trait_, trait_item.generics); let impl_trait_spans: Vec = trait_item .generics .params @@ -1389,7 +1651,7 @@ fn compare_number_of_generics<'tcx>( .collect(); (Some(arg_spans), impl_trait_spans) } else { - let trait_span = tcx.hir().span_if_local(trait_.def_id); + let trait_span = tcx.hir_span_if_local(trait_.def_id); (trait_span.map(|s| vec![s]), vec![]) }; @@ -1403,7 +1665,7 @@ fn compare_number_of_generics<'tcx>( _ => None, }) .collect(); - let spans = arg_spans(impl_.kind, impl_item.generics); + let spans = arg_spans(&impl_, impl_item.generics); let span = spans.first().copied(); let mut err = tcx.dcx().struct_span_err( @@ -1412,7 +1674,7 @@ fn compare_number_of_generics<'tcx>( "{} `{}` has {} {kind} parameter{} but its trait \ declaration has {} {kind} parameter{}", item_kind, - trait_.name, + trait_.name(), impl_count, pluralize!(impl_count), trait_count, @@ -1481,7 +1743,7 @@ fn compare_number_of_method_arguments<'tcx>( } }) }) - .or_else(|| tcx.hir().span_if_local(trait_m.def_id)); + .or_else(|| tcx.hir_span_if_local(trait_m.def_id)); let (impl_m_sig, _) = &tcx.hir_expect_impl_item(impl_m.def_id.expect_local()).expect_fn(); let pos = impl_number_args.saturating_sub(1); @@ -1503,7 +1765,7 @@ fn compare_number_of_method_arguments<'tcx>( impl_span, E0050, "method `{}` has {} but the declaration in trait `{}` has {}", - trait_m.name, + trait_m.name(), potentially_plural_count(impl_number_args, "parameter"), tcx.def_path_str(trait_m.def_id), trait_number_args @@ -1518,7 +1780,7 @@ fn compare_number_of_method_arguments<'tcx>( ), ); } else { - err.note_trait_signature(trait_m.name, trait_m.signature(tcx)); + err.note_trait_signature(trait_m.name(), trait_m.signature(tcx)); } err.span_label( @@ -1572,7 +1834,7 @@ fn compare_synthetic_generics<'tcx>( impl_span, E0643, "method `{}` has incompatible signature for trait", - trait_m.name + trait_m.name() ); err.span_label(trait_span, "declaration in trait here"); if impl_synthetic { @@ -1694,7 +1956,7 @@ fn compare_generic_param_kinds<'tcx>( trait_item: ty::AssocItem, delay: bool, ) -> Result<(), ErrorGuaranteed> { - assert_eq!(impl_item.kind, trait_item.kind); + assert_eq!(impl_item.as_tag(), trait_item.as_tag()); let ty_const_params_of = |def_id| { tcx.generics_of(def_id).own_params.iter().filter(|param| { @@ -1732,7 +1994,7 @@ fn compare_generic_param_kinds<'tcx>( E0053, "{} `{}` has an incompatible generic parameter for trait `{}`", impl_item.descr(), - trait_item.name, + trait_item.name(), &tcx.def_path_str(tcx.parent(trait_item.def_id)) ); @@ -1868,7 +2130,7 @@ fn compare_const_predicate_entailment<'tcx>( cause.span, E0326, "implemented const `{}` has an incompatible type for trait", - trait_ct.name + trait_ct.name() ); let trait_c_span = trait_ct.def_id.as_local().map(|trait_ct_def_id| { @@ -2226,16 +2488,19 @@ fn param_env_with_gat_bounds<'tcx>( // of the RPITITs associated with the same body. This is because checking // the item bounds of RPITITs often involves nested RPITITs having to prove // bounds about themselves. - let impl_tys_to_install = match impl_ty.opt_rpitit_info { - None => vec![impl_ty], - Some( - ty::ImplTraitInTraitData::Impl { fn_def_id } - | ty::ImplTraitInTraitData::Trait { fn_def_id, .. }, - ) => tcx + let impl_tys_to_install = match impl_ty.kind { + ty::AssocKind::Type { + data: + ty::AssocTypeData::Rpitit( + ty::ImplTraitInTraitData::Impl { fn_def_id } + | ty::ImplTraitInTraitData::Trait { fn_def_id, .. }, + ), + } => tcx .associated_types_for_impl_traits_in_associated_fn(fn_def_id) .iter() .map(|def_id| tcx.associated_item(*def_id)) .collect(), + _ => vec![impl_ty], }; for impl_ty in impl_tys_to_install { @@ -2366,7 +2631,7 @@ fn try_report_async_mismatch<'tcx>( return Err(tcx.sess.dcx().emit_err(MethodShouldReturnFuture { span: tcx.def_span(impl_m.def_id), method_name: tcx.item_ident(impl_m.def_id), - trait_item_span: tcx.hir().span_if_local(trait_m.def_id), + trait_item_span: tcx.hir_span_if_local(trait_m.def_id), })); } } diff --git a/compiler/rustc_hir_analysis/src/check/intrinsic.rs b/compiler/rustc_hir_analysis/src/check/intrinsic.rs index 42d785c8dd0fe..692784bf1714d 100644 --- a/compiler/rustc_hir_analysis/src/check/intrinsic.rs +++ b/compiler/rustc_hir_analysis/src/check/intrinsic.rs @@ -1,10 +1,8 @@ -//! Type-checking for the rust-intrinsic intrinsics that the compiler exposes. +//! Type-checking for the `#[rustc_intrinsic]` intrinsics that the compiler exposes. use rustc_abi::ExternAbi; -use rustc_errors::codes::*; -use rustc_errors::{DiagMessage, struct_span_code_err}; -use rustc_hir::{self as hir, Safety}; -use rustc_middle::bug; +use rustc_errors::DiagMessage; +use rustc_hir::{self as hir, LangItem}; use rustc_middle::traits::{ObligationCause, ObligationCauseCode}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_span::def_id::LocalDefId; @@ -26,17 +24,10 @@ fn equate_intrinsic_type<'tcx>( sig: ty::PolyFnSig<'tcx>, ) { let (generics, span) = match tcx.hir_node_by_def_id(def_id) { - hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn { generics, .. }, .. }) - | hir::Node::ForeignItem(hir::ForeignItem { - kind: hir::ForeignItemKind::Fn(_, _, generics), - .. - }) => (tcx.generics_of(def_id), generics.span), - _ => { - struct_span_code_err!(tcx.dcx(), span, E0622, "intrinsic must be a function") - .with_span_label(span, "expected a function") - .emit(); - return; + hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn { generics, .. }, .. }) => { + (tcx.generics_of(def_id), generics.span) } + _ => tcx.dcx().span_bug(span, "intrinsic must be a function"), }; let own_counts = generics.own_counts(); @@ -70,13 +61,7 @@ fn equate_intrinsic_type<'tcx>( } /// Returns the unsafety of the given intrinsic. -pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -> hir::Safety { - let has_safe_attr = if tcx.has_attr(intrinsic_id, sym::rustc_intrinsic) { - tcx.fn_sig(intrinsic_id).skip_binder().safety() - } else { - // Old-style intrinsics are never safe - Safety::Unsafe - }; +fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -> hir::Safety { let is_in_list = match tcx.item_name(intrinsic_id.into()) { // When adding a new intrinsic to this list, // it's usually worth updating that intrinsic's documentation @@ -148,7 +133,7 @@ pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) - _ => hir::Safety::Unsafe, }; - if has_safe_attr != is_in_list { + if tcx.fn_sig(intrinsic_id).skip_binder().safety() != is_in_list { tcx.dcx().struct_span_err( tcx.def_span(intrinsic_id), DiagMessage::from(format!( @@ -163,12 +148,11 @@ pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) - /// Remember to add all intrinsics here, in `compiler/rustc_codegen_llvm/src/intrinsic.rs`, /// and in `library/core/src/intrinsics.rs`. -pub fn check_intrinsic_type( +pub(crate) fn check_intrinsic_type( tcx: TyCtxt<'_>, intrinsic_id: LocalDefId, span: Span, intrinsic_name: Symbol, - abi: ExternAbi, ) { let generics = tcx.generics_of(intrinsic_id); let param = |n| { @@ -188,23 +172,22 @@ pub fn check_intrinsic_type( ty::BoundVariableKind::Region(ty::BoundRegionKind::ClosureEnv), ]); let mk_va_list_ty = |mutbl| { - tcx.lang_items().va_list().map(|did| { - let region = ty::Region::new_bound( - tcx, - ty::INNERMOST, - ty::BoundRegion { var: ty::BoundVar::ZERO, kind: ty::BoundRegionKind::Anon }, - ); - let env_region = ty::Region::new_bound( - tcx, - ty::INNERMOST, - ty::BoundRegion { - var: ty::BoundVar::from_u32(2), - kind: ty::BoundRegionKind::ClosureEnv, - }, - ); - let va_list_ty = tcx.type_of(did).instantiate(tcx, &[region.into()]); - (Ty::new_ref(tcx, env_region, va_list_ty, mutbl), va_list_ty) - }) + let did = tcx.require_lang_item(LangItem::VaList, Some(span)); + let region = ty::Region::new_bound( + tcx, + ty::INNERMOST, + ty::BoundRegion { var: ty::BoundVar::ZERO, kind: ty::BoundRegionKind::Anon }, + ); + let env_region = ty::Region::new_bound( + tcx, + ty::INNERMOST, + ty::BoundRegion { + var: ty::BoundVar::from_u32(2), + kind: ty::BoundRegionKind::ClosureEnv, + }, + ); + let va_list_ty = tcx.type_of(did).instantiate(tcx, &[region.into()]); + (Ty::new_ref(tcx, env_region, va_list_ty, mutbl), va_list_ty) }; let (n_tps, n_lts, n_cts, inputs, output, safety) = if name_str.starts_with("atomic_") { @@ -232,15 +215,11 @@ pub fn check_intrinsic_type( }; (n_tps, 0, 0, inputs, output, hir::Safety::Unsafe) } else if intrinsic_name == sym::contract_check_ensures { - // contract_check_ensures::<'a, Ret, C>(&'a Ret, C) - // where C: impl Fn(&'a Ret) -> bool, + // contract_check_ensures::(Ret, C) -> Ret + // where C: for<'a> Fn(&'a Ret) -> bool, // - // so: two type params, one lifetime param, 0 const params, two inputs, no return - - let p = generics.param_at(0, tcx); - let r = ty::Region::new_early_param(tcx, p.to_early_bound_region_data()); - let ref_ret = Ty::new_imm_ref(tcx, r, param(1)); - (2, 1, 0, vec![ref_ret, param(2)], tcx.types.unit, hir::Safety::Safe) + // so: two type params, 0 lifetime param, 0 const params, two inputs, no return + (2, 0, 0, vec![param(0), param(1)], param(1), hir::Safety::Safe) } else { let safety = intrinsic_operation_unsafety(tcx, intrinsic_id); let (n_tps, n_cts, inputs, output) = match intrinsic_name { @@ -567,23 +546,17 @@ pub fn check_intrinsic_type( ) } - sym::va_start | sym::va_end => match mk_va_list_ty(hir::Mutability::Mut) { - Some((va_list_ref_ty, _)) => (0, 0, vec![va_list_ref_ty], tcx.types.unit), - None => bug!("`va_list` lang item needed for C-variadic intrinsics"), - }, + sym::va_start | sym::va_end => { + (0, 0, vec![mk_va_list_ty(hir::Mutability::Mut).0], tcx.types.unit) + } - sym::va_copy => match mk_va_list_ty(hir::Mutability::Not) { - Some((va_list_ref_ty, va_list_ty)) => { - let va_list_ptr_ty = Ty::new_mut_ptr(tcx, va_list_ty); - (0, 0, vec![va_list_ptr_ty, va_list_ref_ty], tcx.types.unit) - } - None => bug!("`va_list` lang item needed for C-variadic intrinsics"), - }, + sym::va_copy => { + let (va_list_ref_ty, va_list_ty) = mk_va_list_ty(hir::Mutability::Not); + let va_list_ptr_ty = Ty::new_mut_ptr(tcx, va_list_ty); + (0, 0, vec![va_list_ptr_ty, va_list_ref_ty], tcx.types.unit) + } - sym::va_arg => match mk_va_list_ty(hir::Mutability::Mut) { - Some((va_list_ref_ty, _)) => (1, 0, vec![va_list_ref_ty], param(0)), - None => bug!("`va_list` lang item needed for C-variadic intrinsics"), - }, + sym::va_arg => (1, 0, vec![mk_va_list_ty(hir::Mutability::Mut).0], param(0)), sym::nontemporal_store => { (1, 0, vec![Ty::new_mut_ptr(tcx, param(0)), param(0)], tcx.types.unit) @@ -674,8 +647,12 @@ pub fn check_intrinsic_type( sym::simd_masked_load => (3, 0, vec![param(0), param(1), param(2)], param(2)), sym::simd_masked_store => (3, 0, vec![param(0), param(1), param(2)], tcx.types.unit), sym::simd_scatter => (3, 0, vec![param(0), param(1), param(2)], tcx.types.unit), - sym::simd_insert => (2, 0, vec![param(0), tcx.types.u32, param(1)], param(0)), - sym::simd_extract => (2, 0, vec![param(0), tcx.types.u32], param(1)), + sym::simd_insert | sym::simd_insert_dyn => { + (2, 0, vec![param(0), tcx.types.u32, param(1)], param(0)) + } + sym::simd_extract | sym::simd_extract_dyn => { + (2, 0, vec![param(0), tcx.types.u32], param(1)) + } sym::simd_cast | sym::simd_as | sym::simd_cast_ptr @@ -706,7 +683,7 @@ pub fn check_intrinsic_type( }; (n_tps, 0, n_cts, inputs, output, safety) }; - let sig = tcx.mk_fn_sig(inputs, output, false, safety, abi); + let sig = tcx.mk_fn_sig(inputs, output, false, safety, ExternAbi::Rust); let sig = ty::Binder::bind_with_vars(sig, bound_vars); equate_intrinsic_type(tcx, span, intrinsic_id, n_tps, n_lts, n_cts, sig) } diff --git a/compiler/rustc_hir_analysis/src/check/intrinsicck.rs b/compiler/rustc_hir_analysis/src/check/intrinsicck.rs deleted file mode 100644 index d63165f0f1698..0000000000000 --- a/compiler/rustc_hir_analysis/src/check/intrinsicck.rs +++ /dev/null @@ -1,547 +0,0 @@ -use rustc_abi::FieldIdx; -use rustc_ast::InlineAsmTemplatePiece; -use rustc_data_structures::fx::FxIndexSet; -use rustc_hir::def_id::DefId; -use rustc_hir::{self as hir, LangItem}; -use rustc_infer::infer::InferCtxt; -use rustc_middle::bug; -use rustc_middle::ty::{ - self, Article, FloatTy, IntTy, Ty, TyCtxt, TypeVisitableExt, TypeckResults, UintTy, -}; -use rustc_session::lint; -use rustc_span::def_id::LocalDefId; -use rustc_span::{Symbol, sym}; -use rustc_target::asm::{ - InlineAsmReg, InlineAsmRegClass, InlineAsmRegOrRegClass, InlineAsmType, ModifierInfo, -}; - -use crate::errors::RegisterTypeUnstable; - -pub struct InlineAsmCtxt<'a, 'tcx> { - typing_env: ty::TypingEnv<'tcx>, - target_features: &'tcx FxIndexSet, - infcx: &'a InferCtxt<'tcx>, - typeck_results: &'a TypeckResults<'tcx>, -} - -enum NonAsmTypeReason<'tcx> { - UnevaluatedSIMDArrayLength(DefId, ty::Const<'tcx>), - Invalid(Ty<'tcx>), - InvalidElement(DefId, Ty<'tcx>), - NotSizedPtr(Ty<'tcx>), -} - -impl<'a, 'tcx> InlineAsmCtxt<'a, 'tcx> { - pub fn new( - def_id: LocalDefId, - infcx: &'a InferCtxt<'tcx>, - typing_env: ty::TypingEnv<'tcx>, - typeck_results: &'a TypeckResults<'tcx>, - ) -> Self { - InlineAsmCtxt { - typing_env, - target_features: infcx.tcx.asm_target_features(def_id), - infcx, - typeck_results, - } - } - - fn tcx(&self) -> TyCtxt<'tcx> { - self.infcx.tcx - } - - fn expr_ty(&self, expr: &hir::Expr<'tcx>) -> Ty<'tcx> { - let ty = self.typeck_results.expr_ty_adjusted(expr); - let ty = self.infcx.resolve_vars_if_possible(ty); - if ty.has_non_region_infer() { - Ty::new_misc_error(self.tcx()) - } else { - self.tcx().erase_regions(ty) - } - } - - // FIXME(compiler-errors): This could use `<$ty as Pointee>::Metadata == ()` - fn is_thin_ptr_ty(&self, ty: Ty<'tcx>) -> bool { - // Type still may have region variables, but `Sized` does not depend - // on those, so just erase them before querying. - if ty.is_sized(self.tcx(), self.typing_env) { - return true; - } - if let ty::Foreign(..) = ty.kind() { - return true; - } - false - } - - fn get_asm_ty(&self, ty: Ty<'tcx>) -> Result> { - let asm_ty_isize = match self.tcx().sess.target.pointer_width { - 16 => InlineAsmType::I16, - 32 => InlineAsmType::I32, - 64 => InlineAsmType::I64, - width => bug!("unsupported pointer width: {width}"), - }; - - match *ty.kind() { - ty::Int(IntTy::I8) | ty::Uint(UintTy::U8) => Ok(InlineAsmType::I8), - ty::Int(IntTy::I16) | ty::Uint(UintTy::U16) => Ok(InlineAsmType::I16), - ty::Int(IntTy::I32) | ty::Uint(UintTy::U32) => Ok(InlineAsmType::I32), - ty::Int(IntTy::I64) | ty::Uint(UintTy::U64) => Ok(InlineAsmType::I64), - ty::Int(IntTy::I128) | ty::Uint(UintTy::U128) => Ok(InlineAsmType::I128), - ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize) => Ok(asm_ty_isize), - ty::Float(FloatTy::F16) => Ok(InlineAsmType::F16), - ty::Float(FloatTy::F32) => Ok(InlineAsmType::F32), - ty::Float(FloatTy::F64) => Ok(InlineAsmType::F64), - ty::Float(FloatTy::F128) => Ok(InlineAsmType::F128), - ty::FnPtr(..) => Ok(asm_ty_isize), - ty::RawPtr(elem_ty, _) => { - if self.is_thin_ptr_ty(elem_ty) { - Ok(asm_ty_isize) - } else { - Err(NonAsmTypeReason::NotSizedPtr(ty)) - } - } - ty::Adt(adt, args) if adt.repr().simd() => { - let fields = &adt.non_enum_variant().fields; - let field = &fields[FieldIdx::ZERO]; - let elem_ty = field.ty(self.tcx(), args); - - let (size, ty) = match elem_ty.kind() { - ty::Array(ty, len) => { - let len = self.tcx().normalize_erasing_regions(self.typing_env, *len); - if let Some(len) = len.try_to_target_usize(self.tcx()) { - (len, *ty) - } else { - return Err(NonAsmTypeReason::UnevaluatedSIMDArrayLength( - field.did, len, - )); - } - } - _ => (fields.len() as u64, elem_ty), - }; - - match ty.kind() { - ty::Int(IntTy::I8) | ty::Uint(UintTy::U8) => Ok(InlineAsmType::VecI8(size)), - ty::Int(IntTy::I16) | ty::Uint(UintTy::U16) => Ok(InlineAsmType::VecI16(size)), - ty::Int(IntTy::I32) | ty::Uint(UintTy::U32) => Ok(InlineAsmType::VecI32(size)), - ty::Int(IntTy::I64) | ty::Uint(UintTy::U64) => Ok(InlineAsmType::VecI64(size)), - ty::Int(IntTy::I128) | ty::Uint(UintTy::U128) => { - Ok(InlineAsmType::VecI128(size)) - } - ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize) => { - Ok(match self.tcx().sess.target.pointer_width { - 16 => InlineAsmType::VecI16(size), - 32 => InlineAsmType::VecI32(size), - 64 => InlineAsmType::VecI64(size), - width => bug!("unsupported pointer width: {width}"), - }) - } - ty::Float(FloatTy::F16) => Ok(InlineAsmType::VecF16(size)), - ty::Float(FloatTy::F32) => Ok(InlineAsmType::VecF32(size)), - ty::Float(FloatTy::F64) => Ok(InlineAsmType::VecF64(size)), - ty::Float(FloatTy::F128) => Ok(InlineAsmType::VecF128(size)), - _ => Err(NonAsmTypeReason::InvalidElement(field.did, ty)), - } - } - ty::Infer(_) => bug!("unexpected infer ty in asm operand"), - _ => Err(NonAsmTypeReason::Invalid(ty)), - } - } - - fn check_asm_operand_type( - &self, - idx: usize, - reg: InlineAsmRegOrRegClass, - expr: &'tcx hir::Expr<'tcx>, - template: &[InlineAsmTemplatePiece], - is_input: bool, - tied_input: Option<(&'tcx hir::Expr<'tcx>, Option)>, - ) -> Option { - let ty = self.expr_ty(expr); - if ty.has_non_region_infer() { - bug!("inference variable in asm operand ty: {:?} {:?}", expr, ty); - } - - let asm_ty = match *ty.kind() { - // `!` is allowed for input but not for output (issue #87802) - ty::Never if is_input => return None, - _ if ty.references_error() => return None, - ty::Adt(adt, args) if self.tcx().is_lang_item(adt.did(), LangItem::MaybeUninit) => { - let fields = &adt.non_enum_variant().fields; - let ty = fields[FieldIdx::from_u32(1)].ty(self.tcx(), args); - // FIXME: Are we just trying to map to the `T` in `MaybeUninit`? - // If so, just get it from the args. - let ty::Adt(ty, args) = ty.kind() else { - unreachable!("expected first field of `MaybeUninit` to be an ADT") - }; - assert!( - ty.is_manually_drop(), - "expected first field of `MaybeUninit` to be `ManuallyDrop`" - ); - let fields = &ty.non_enum_variant().fields; - let ty = fields[FieldIdx::ZERO].ty(self.tcx(), args); - self.get_asm_ty(ty) - } - _ => self.get_asm_ty(ty), - }; - let asm_ty = match asm_ty { - Ok(asm_ty) => asm_ty, - Err(reason) => { - match reason { - NonAsmTypeReason::UnevaluatedSIMDArrayLength(did, len) => { - let msg = format!("cannot evaluate SIMD vector length `{len}`"); - self.infcx - .dcx() - .struct_span_err(self.tcx().def_span(did), msg) - .with_span_note( - expr.span, - "SIMD vector length needs to be known statically for use in `asm!`", - ) - .emit(); - } - NonAsmTypeReason::Invalid(ty) => { - let msg = format!("cannot use value of type `{ty}` for inline assembly"); - self.infcx.dcx().struct_span_err(expr.span, msg).with_note( - "only integers, floats, SIMD vectors, pointers and function pointers \ - can be used as arguments for inline assembly", - ).emit(); - } - NonAsmTypeReason::NotSizedPtr(ty) => { - let msg = format!( - "cannot use value of unsized pointer type `{ty}` for inline assembly" - ); - self.infcx - .dcx() - .struct_span_err(expr.span, msg) - .with_note("only sized pointers can be used in inline assembly") - .emit(); - } - NonAsmTypeReason::InvalidElement(did, ty) => { - let msg = format!( - "cannot use SIMD vector with element type `{ty}` for inline assembly" - ); - self.infcx.dcx() - .struct_span_err(self.tcx().def_span(did), msg).with_span_note( - expr.span, - "only integers, floats, SIMD vectors, pointers and function pointers \ - can be used as arguments for inline assembly", - ).emit(); - } - } - return None; - } - }; - - // Check that the type implements Copy. The only case where this can - // possibly fail is for SIMD types which don't #[derive(Copy)]. - if !self.tcx().type_is_copy_modulo_regions(self.typing_env, ty) { - let msg = "arguments for inline assembly must be copyable"; - self.infcx - .dcx() - .struct_span_err(expr.span, msg) - .with_note(format!("`{ty}` does not implement the Copy trait")) - .emit(); - } - - // Ideally we wouldn't need to do this, but LLVM's register allocator - // really doesn't like it when tied operands have different types. - // - // This is purely an LLVM limitation, but we have to live with it since - // there is no way to hide this with implicit conversions. - // - // For the purposes of this check we only look at the `InlineAsmType`, - // which means that pointers and integers are treated as identical (modulo - // size). - if let Some((in_expr, Some(in_asm_ty))) = tied_input { - if in_asm_ty != asm_ty { - let msg = "incompatible types for asm inout argument"; - let in_expr_ty = self.expr_ty(in_expr); - self.infcx - .dcx() - .struct_span_err(vec![in_expr.span, expr.span], msg) - .with_span_label(in_expr.span, format!("type `{in_expr_ty}`")) - .with_span_label(expr.span, format!("type `{ty}`")) - .with_note( - "asm inout arguments must have the same type, \ - unless they are both pointers or integers of the same size", - ) - .emit(); - } - - // All of the later checks have already been done on the input, so - // let's not emit errors and warnings twice. - return Some(asm_ty); - } - - // Check the type against the list of types supported by the selected - // register class. - let asm_arch = self.tcx().sess.asm_arch.unwrap(); - let allow_experimental_reg = self.tcx().features().asm_experimental_reg(); - let reg_class = reg.reg_class(); - let supported_tys = reg_class.supported_types(asm_arch, allow_experimental_reg); - let Some((_, feature)) = supported_tys.iter().find(|&&(t, _)| t == asm_ty) else { - let mut err = if !allow_experimental_reg - && reg_class.supported_types(asm_arch, true).iter().any(|&(t, _)| t == asm_ty) - { - self.tcx().sess.create_feature_err( - RegisterTypeUnstable { span: expr.span, ty }, - sym::asm_experimental_reg, - ) - } else { - let msg = format!("type `{ty}` cannot be used with this register class"); - let mut err = self.infcx.dcx().struct_span_err(expr.span, msg); - let supported_tys: Vec<_> = - supported_tys.iter().map(|(t, _)| t.to_string()).collect(); - err.note(format!( - "register class `{}` supports these types: {}", - reg_class.name(), - supported_tys.join(", "), - )); - err - }; - if let Some(suggest) = reg_class.suggest_class(asm_arch, asm_ty) { - err.help(format!("consider using the `{}` register class instead", suggest.name())); - } - err.emit(); - return Some(asm_ty); - }; - - // Check whether the selected type requires a target feature. Note that - // this is different from the feature check we did earlier. While the - // previous check checked that this register class is usable at all - // with the currently enabled features, some types may only be usable - // with a register class when a certain feature is enabled. We check - // this here since it depends on the results of typeck. - // - // Also note that this check isn't run when the operand type is never - // (!). In that case we still need the earlier check to verify that the - // register class is usable at all. - if let Some(feature) = feature { - if !self.target_features.contains(feature) { - let msg = format!("`{feature}` target feature is not enabled"); - self.infcx - .dcx() - .struct_span_err(expr.span, msg) - .with_note(format!( - "this is required to use type `{}` with register class `{}`", - ty, - reg_class.name(), - )) - .emit(); - return Some(asm_ty); - } - } - - // Check whether a modifier is suggested for using this type. - if let Some(ModifierInfo { - modifier: suggested_modifier, - result: suggested_result, - size: suggested_size, - }) = reg_class.suggest_modifier(asm_arch, asm_ty) - { - // Search for any use of this operand without a modifier and emit - // the suggestion for them. - let mut spans = vec![]; - for piece in template { - if let &InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span } = piece - { - if operand_idx == idx && modifier.is_none() { - spans.push(span); - } - } - } - if !spans.is_empty() { - let ModifierInfo { - modifier: default_modifier, - result: default_result, - size: default_size, - } = reg_class.default_modifier(asm_arch).unwrap(); - self.tcx().node_span_lint( - lint::builtin::ASM_SUB_REGISTER, - expr.hir_id, - spans, - |lint| { - lint.primary_message("formatting may not be suitable for sub-register argument"); - lint.span_label(expr.span, "for this argument"); - lint.help(format!( - "use `{{{idx}:{suggested_modifier}}}` to have the register formatted as `{suggested_result}` (for {suggested_size}-bit values)", - )); - lint.help(format!( - "or use `{{{idx}:{default_modifier}}}` to keep the default formatting of `{default_result}` (for {default_size}-bit values)", - )); - }, - ); - } - } - - Some(asm_ty) - } - - pub fn check_asm(&self, asm: &hir::InlineAsm<'tcx>) { - let Some(asm_arch) = self.tcx().sess.asm_arch else { - self.infcx.dcx().delayed_bug("target architecture does not support asm"); - return; - }; - let allow_experimental_reg = self.tcx().features().asm_experimental_reg(); - for (idx, &(op, op_sp)) in asm.operands.iter().enumerate() { - // Validate register classes against currently enabled target - // features. We check that at least one type is available for - // the enabled features. - // - // We ignore target feature requirements for clobbers: if the - // feature is disabled then the compiler doesn't care what we - // do with the registers. - // - // Note that this is only possible for explicit register - // operands, which cannot be used in the asm string. - if let Some(reg) = op.reg() { - // Some explicit registers cannot be used depending on the - // target. Reject those here. - if let InlineAsmRegOrRegClass::Reg(reg) = reg { - if let InlineAsmReg::Err = reg { - // `validate` will panic on `Err`, as an error must - // already have been reported. - continue; - } - if let Err(msg) = reg.validate( - asm_arch, - self.tcx().sess.relocation_model(), - self.target_features, - &self.tcx().sess.target, - op.is_clobber(), - ) { - let msg = format!("cannot use register `{}`: {}", reg.name(), msg); - self.infcx.dcx().span_err(op_sp, msg); - continue; - } - } - - if !op.is_clobber() { - let mut missing_required_features = vec![]; - let reg_class = reg.reg_class(); - if let InlineAsmRegClass::Err = reg_class { - continue; - } - for &(_, feature) in reg_class.supported_types(asm_arch, allow_experimental_reg) - { - match feature { - Some(feature) => { - if self.target_features.contains(&feature) { - missing_required_features.clear(); - break; - } else { - missing_required_features.push(feature); - } - } - None => { - missing_required_features.clear(); - break; - } - } - } - - // We are sorting primitive strs here and can use unstable sort here - missing_required_features.sort_unstable(); - missing_required_features.dedup(); - match &missing_required_features[..] { - [] => {} - [feature] => { - let msg = format!( - "register class `{}` requires the `{}` target feature", - reg_class.name(), - feature - ); - self.infcx.dcx().span_err(op_sp, msg); - // register isn't enabled, don't do more checks - continue; - } - features => { - let msg = format!( - "register class `{}` requires at least one of the following target features: {}", - reg_class.name(), - features - .iter() - .map(|f| f.as_str()) - .intersperse(", ") - .collect::(), - ); - self.infcx.dcx().span_err(op_sp, msg); - // register isn't enabled, don't do more checks - continue; - } - } - } - } - - match op { - hir::InlineAsmOperand::In { reg, expr } => { - self.check_asm_operand_type(idx, reg, expr, asm.template, true, None); - } - hir::InlineAsmOperand::Out { reg, late: _, expr } => { - if let Some(expr) = expr { - self.check_asm_operand_type(idx, reg, expr, asm.template, false, None); - } - } - hir::InlineAsmOperand::InOut { reg, late: _, expr } => { - self.check_asm_operand_type(idx, reg, expr, asm.template, false, None); - } - hir::InlineAsmOperand::SplitInOut { reg, late: _, in_expr, out_expr } => { - let in_ty = - self.check_asm_operand_type(idx, reg, in_expr, asm.template, true, None); - if let Some(out_expr) = out_expr { - self.check_asm_operand_type( - idx, - reg, - out_expr, - asm.template, - false, - Some((in_expr, in_ty)), - ); - } - } - hir::InlineAsmOperand::Const { anon_const } => { - let ty = self.expr_ty(self.tcx().hir_body(anon_const.body).value); - match ty.kind() { - ty::Error(_) => {} - _ if ty.is_integral() => {} - _ => { - self.infcx - .dcx() - .struct_span_err(op_sp, "invalid type for `const` operand") - .with_span_label( - self.tcx().def_span(anon_const.def_id), - format!("is {} `{}`", ty.kind().article(), ty), - ) - .with_help("`const` operands must be of an integer type") - .emit(); - } - } - } - // Typeck has checked that SymFn refers to a function. - hir::InlineAsmOperand::SymFn { expr } => { - let ty = self.expr_ty(expr); - match ty.kind() { - ty::FnDef(..) => {} - ty::Error(_) => {} - _ => { - self.infcx - .dcx() - .struct_span_err(op_sp, "invalid `sym` operand") - .with_span_label( - expr.span, - format!("is {} `{}`", ty.kind().article(), ty), - ) - .with_help( - "`sym` operands must refer to either a function or a static", - ) - .emit(); - } - } - } - // AST lowering guarantees that SymStatic points to a static. - hir::InlineAsmOperand::SymStatic { .. } => {} - // No special checking is needed for labels. - hir::InlineAsmOperand::Label { .. } => {} - } - } - } -} diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs index d8ae421452759..70791a70fab3c 100644 --- a/compiler/rustc_hir_analysis/src/check/mod.rs +++ b/compiler/rustc_hir_analysis/src/check/mod.rs @@ -64,10 +64,10 @@ a type parameter). pub mod always_applicable; mod check; +mod compare_eii; mod compare_impl_item; mod entry; pub mod intrinsic; -pub mod intrinsicck; mod region; pub mod wfcheck; @@ -114,11 +114,11 @@ pub fn provide(providers: &mut Providers) { } fn adt_destructor(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option { - tcx.calculate_dtor(def_id.to_def_id(), always_applicable::check_drop_impl) + tcx.calculate_dtor(def_id, always_applicable::check_drop_impl) } fn adt_async_destructor(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option { - tcx.calculate_async_dtor(def_id.to_def_id(), always_applicable::check_drop_impl) + tcx.calculate_async_dtor(def_id, always_applicable::check_drop_impl) } /// Given a `DefId` for an opaque type in return position, find its parent item's return @@ -137,15 +137,6 @@ fn get_owner_return_paths( }) } -/// Forbid defining intrinsics in Rust code, -/// as they must always be defined by the compiler. -// FIXME: Move this to a more appropriate place. -pub fn forbid_intrinsic_abi(tcx: TyCtxt<'_>, sp: Span, abi: ExternAbi) { - if let ExternAbi::RustIntrinsic = abi { - tcx.dcx().span_err(sp, "intrinsic must be in `extern \"rust-intrinsic\" { ... }` block"); - } -} - pub(super) fn maybe_check_static_with_link_section(tcx: TyCtxt<'_>, id: LocalDefId) { // Only restricted on wasm target for now if !tcx.sess.target.is_like_wasm { @@ -214,7 +205,7 @@ fn missing_items_err( let missing_items_msg = missing_items .clone() - .map(|trait_item| trait_item.name.to_string()) + .map(|trait_item| trait_item.name().to_string()) .collect::>() .join("`, `"); @@ -243,9 +234,9 @@ fn missing_items_err( tcx.impl_trait_ref(impl_def_id).unwrap().instantiate_identity(), )); let code = format!("{padding}{snippet}\n{padding}"); - if let Some(span) = tcx.hir().span_if_local(trait_item.def_id) { + if let Some(span) = tcx.hir_span_if_local(trait_item.def_id) { missing_trait_item_label - .push(errors::MissingTraitItemLabel { span, item: trait_item.name }); + .push(errors::MissingTraitItemLabel { span, item: trait_item.name() }); missing_trait_item.push(errors::MissingTraitItemSuggestion { span: sugg_sp, code, @@ -341,9 +332,8 @@ fn bounds_from_generic_predicates<'tcx>( ty::ClauseKind::Trait(trait_predicate) => { let entry = types.entry(trait_predicate.self_ty()).or_default(); let def_id = trait_predicate.def_id(); - if Some(def_id) != tcx.lang_items().sized_trait() { - // Type params are `Sized` by default, do not add that restriction to the list - // if it is a positive requirement. + if !tcx.is_default_trait(def_id) { + // Do not add that restriction to the list if it is a positive requirement. entry.push(trait_predicate.def_id()); } } @@ -417,14 +407,14 @@ fn fn_sig_suggestion<'tcx>( .enumerate() .map(|(i, ty)| { Some(match ty.kind() { - ty::Param(_) if assoc.fn_has_self_parameter && i == 0 => "self".to_string(), + ty::Param(_) if assoc.is_method() && i == 0 => "self".to_string(), ty::Ref(reg, ref_ty, mutability) if i == 0 => { let reg = format!("{reg} "); let reg = match ®[..] { "'_ " | " " => "", reg => reg, }; - if assoc.fn_has_self_parameter { + if assoc.is_method() { match ref_ty.kind() { ty::Param(param) if param.name == kw::SelfUpper => { format!("&{}{}self", reg, mutability.prefix_str()) @@ -437,7 +427,7 @@ fn fn_sig_suggestion<'tcx>( } } _ => { - if assoc.fn_has_self_parameter && i == 0 { + if assoc.is_method() && i == 0 { format!("self: {ty}") } else { format!("_: {ty}") @@ -499,7 +489,7 @@ fn suggestion_signature<'tcx>( ); match assoc.kind { - ty::AssocKind::Fn => fn_sig_suggestion( + ty::AssocKind::Fn { .. } => fn_sig_suggestion( tcx, tcx.liberate_late_bound_regions( assoc.def_id, @@ -509,14 +499,14 @@ fn suggestion_signature<'tcx>( tcx.predicates_of(assoc.def_id).instantiate_own(tcx, args), assoc, ), - ty::AssocKind::Type => { + ty::AssocKind::Type { .. } => { let (generics, where_clauses) = bounds_from_generic_predicates( tcx, tcx.predicates_of(assoc.def_id).instantiate_own(tcx, args), ); - format!("type {}{generics} = /* Type */{where_clauses};", assoc.name) + format!("type {}{generics} = /* Type */{where_clauses};", assoc.name()) } - ty::AssocKind::Const => { + ty::AssocKind::Const { name } => { let ty = tcx.type_of(assoc.def_id).instantiate_identity(); let val = tcx .infer_ctxt() @@ -524,7 +514,7 @@ fn suggestion_signature<'tcx>( .err_ctxt() .ty_kind_suggestion(tcx.param_env(assoc.def_id), ty) .unwrap_or_else(|| "value".to_string()); - format!("const {}: {} = {};", assoc.name, ty, val) + format!("const {}: {} = {};", name, ty, val) } } } @@ -534,7 +524,7 @@ fn bad_variant_count<'tcx>(tcx: TyCtxt<'tcx>, adt: ty::AdtDef<'tcx>, sp: Span, d let variant_spans: Vec<_> = adt .variants() .iter() - .map(|variant| tcx.hir().span_if_local(variant.def_id).unwrap()) + .map(|variant| tcx.hir_span_if_local(variant.def_id).unwrap()) .collect(); let (mut spans, mut many) = (Vec::new(), None); if let [start @ .., end] = &*variant_spans { diff --git a/compiler/rustc_hir_analysis/src/check/region.rs b/compiler/rustc_hir_analysis/src/check/region.rs index cf66ab708bb9e..7cb31a64e13e6 100644 --- a/compiler/rustc_hir_analysis/src/check/region.rs +++ b/compiler/rustc_hir_analysis/src/check/region.rs @@ -8,6 +8,7 @@ use std::mem; +use rustc_data_structures::fx::FxHashMap; use rustc_hir as hir; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::{self, Visitor}; @@ -44,6 +45,8 @@ struct ScopeResolutionVisitor<'tcx> { scope_tree: ScopeTree, cx: Context, + + extended_super_lets: FxHashMap>, } /// Records the lifetime of a local variable as `cx.var_parent` @@ -214,18 +217,29 @@ fn resolve_stmt<'tcx>(visitor: &mut ScopeResolutionVisitor<'tcx>, stmt: &'tcx hi let stmt_id = stmt.hir_id.local_id; debug!("resolve_stmt(stmt.id={:?})", stmt_id); - // Every statement will clean up the temporaries created during - // execution of that statement. Therefore each statement has an - // associated destruction scope that represents the scope of the - // statement plus its destructors, and thus the scope for which - // regions referenced by the destructors need to survive. + if let hir::StmtKind::Let(LetStmt { super_: Some(_), .. }) = stmt.kind { + // `super let` statement does not start a new scope, such that + // + // { super let x = identity(&temp()); &x }.method(); + // + // behaves exactly as + // + // (&identity(&temp()).method(); + intravisit::walk_stmt(visitor, stmt); + } else { + // Every statement will clean up the temporaries created during + // execution of that statement. Therefore each statement has an + // associated destruction scope that represents the scope of the + // statement plus its destructors, and thus the scope for which + // regions referenced by the destructors need to survive. - let prev_parent = visitor.cx.parent; - visitor.enter_node_scope_with_dtor(stmt_id, true); + let prev_parent = visitor.cx.parent; + visitor.enter_node_scope_with_dtor(stmt_id, true); - intravisit::walk_stmt(visitor, stmt); + intravisit::walk_stmt(visitor, stmt); - visitor.cx.parent = prev_parent; + visitor.cx.parent = prev_parent; + } } fn resolve_expr<'tcx>( @@ -446,14 +460,11 @@ fn resolve_expr<'tcx>( // Mark this expr's scope and all parent scopes as containing `yield`. let mut scope = Scope { local_id: expr.hir_id.local_id, data: ScopeData::Node }; loop { - let span = match expr.kind { - hir::ExprKind::Yield(expr, hir::YieldSource::Await { .. }) => { - expr.span.shrink_to_hi().to(expr.span) - } - _ => expr.span, + let data = YieldData { + span: expr.span, + expr_and_pat_count: visitor.expr_and_pat_count, + source: *source, }; - let data = - YieldData { span, expr_and_pat_count: visitor.expr_and_pat_count, source: *source }; match visitor.scope_tree.yield_in_scope.get_mut(&scope) { Some(yields) => yields.push(data), None => { @@ -481,14 +492,19 @@ fn resolve_expr<'tcx>( visitor.cx = prev_cx; } +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +enum LetKind { + Regular, + Super, +} + fn resolve_local<'tcx>( visitor: &mut ScopeResolutionVisitor<'tcx>, pat: Option<&'tcx hir::Pat<'tcx>>, init: Option<&'tcx hir::Expr<'tcx>>, + let_kind: LetKind, ) { - debug!("resolve_local(pat={:?}, init={:?})", pat, init); - - let blk_scope = visitor.cx.var_parent; + debug!("resolve_local(pat={:?}, init={:?}, let_kind={:?})", pat, init, let_kind); // As an exception to the normal rules governing temporary // lifetimes, initializers in a let have a temporary lifetime @@ -546,14 +562,50 @@ fn resolve_local<'tcx>( // A, but the inner rvalues `a()` and `b()` have an extended lifetime // due to rule C. + if let_kind == LetKind::Super { + if let Some(scope) = visitor.extended_super_lets.remove(&pat.unwrap().hir_id.local_id) { + // This expression was lifetime-extended by a parent let binding. E.g. + // + // let a = { + // super let b = temp(); + // &b + // }; + // + // (Which needs to behave exactly as: let a = &temp();) + // + // Processing of `let a` will have already decided to extend the lifetime of this + // `super let` to its own var_scope. We use that scope. + visitor.cx.var_parent = scope; + } else { + // This `super let` is not subject to lifetime extension from a parent let binding. E.g. + // + // identity({ super let x = temp(); &x }).method(); + // + // (Which needs to behave exactly as: identity(&temp()).method();) + // + // Iterate up to the enclosing destruction scope to find the same scope that will also + // be used for the result of the block itself. + while let Some(s) = visitor.cx.var_parent { + let parent = visitor.scope_tree.parent_map.get(&s).cloned(); + if let Some(Scope { data: ScopeData::Destruction, .. }) = parent { + break; + } + visitor.cx.var_parent = parent; + } + } + } + if let Some(expr) = init { - record_rvalue_scope_if_borrow_expr(visitor, expr, blk_scope); + record_rvalue_scope_if_borrow_expr(visitor, expr, visitor.cx.var_parent); if let Some(pat) = pat { if is_binding_pat(pat) { visitor.scope_tree.record_rvalue_candidate( expr.hir_id, - RvalueCandidate { target: expr.hir_id.local_id, lifetime: blk_scope }, + RvalueCandidate { + target: expr.hir_id.local_id, + lifetime: visitor.cx.var_parent, + }, ); } } @@ -565,6 +617,7 @@ fn resolve_local<'tcx>( if let Some(expr) = init { visitor.visit_expr(expr); } + if let Some(pat) = pat { visitor.visit_pat(pat); } @@ -626,6 +679,7 @@ fn resolve_local<'tcx>( PatKind::Ref(_, _) | PatKind::Binding(hir::BindingMode(hir::ByRef::No, _), ..) + | PatKind::Missing | PatKind::Wild | PatKind::Never | PatKind::Expr(_) @@ -642,6 +696,7 @@ fn resolve_local<'tcx>( /// | [ ..., E&, ... ] /// | ( ..., E&, ... ) /// | {...; E&} + /// | { super let ... = E&; ... } /// | if _ { ...; E& } else { ...; E& } /// | match _ { ..., _ => E&, ... } /// | box E& @@ -678,6 +733,13 @@ fn resolve_local<'tcx>( if let Some(subexpr) = block.expr { record_rvalue_scope_if_borrow_expr(visitor, subexpr, blk_id); } + for stmt in block.stmts { + if let hir::StmtKind::Let(local) = stmt.kind + && let Some(_) = local.super_ + { + visitor.extended_super_lets.insert(local.pat.hir_id.local_id, blk_id); + } + } } hir::ExprKind::If(_, then_block, else_block) => { record_rvalue_scope_if_borrow_expr(visitor, then_block, blk_id); @@ -803,7 +865,7 @@ impl<'tcx> Visitor<'tcx> for ScopeResolutionVisitor<'tcx> { local_id: body.value.hir_id.local_id, data: ScopeData::Destruction, }); - resolve_local(this, None, Some(body.value)); + resolve_local(this, None, Some(body.value), LetKind::Regular); } }) } @@ -821,7 +883,11 @@ impl<'tcx> Visitor<'tcx> for ScopeResolutionVisitor<'tcx> { resolve_expr(self, ex, false); } fn visit_local(&mut self, l: &'tcx LetStmt<'tcx>) { - resolve_local(self, Some(l.pat), l.init) + let let_kind = match l.super_ { + Some(_) => LetKind::Super, + None => LetKind::Regular, + }; + resolve_local(self, Some(l.pat), l.init, let_kind); } fn visit_inline_const(&mut self, c: &'tcx hir::ConstBlock) { let body = self.tcx.hir_body(c.body); @@ -850,6 +916,7 @@ pub(crate) fn region_scope_tree(tcx: TyCtxt<'_>, def_id: DefId) -> &ScopeTree { cx: Context { parent: None, var_parent: None }, pessimistic_yield: false, fixup_scopes: vec![], + extended_super_lets: Default::default(), }; visitor.scope_tree.root_body = Some(body.value.hir_id); diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs index 83d095ab72e82..b384603680a64 100644 --- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs +++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs @@ -3,6 +3,7 @@ use std::ops::{ControlFlow, Deref}; use hir::intravisit::{self, Visitor}; use rustc_abi::ExternAbi; +use rustc_attr_parsing::{AttributeKind, EIIDecl, EIIImpl, find_attr}; use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; use rustc_errors::codes::*; use rustc_errors::{Applicability, ErrorGuaranteed, pluralize, struct_span_code_err}; @@ -39,6 +40,7 @@ use rustc_trait_selection::traits::{ use tracing::{debug, instrument}; use {rustc_ast as ast, rustc_hir as hir}; +use super::compare_eii::compare_eii_function_types; use crate::autoderef::Autoderef; use crate::collect::CollectItemTypesVisitor; use crate::constrained_generic_params::{Parameter, identify_constrained_generic_params}; @@ -76,12 +78,37 @@ impl<'tcx> WfCheckingCtxt<'_, 'tcx> { ) } - fn register_wf_obligation( - &self, - span: Span, - loc: Option, - arg: ty::GenericArg<'tcx>, - ) { + /// Convenience function to *deeply* normalize during wfcheck. In the old solver, + /// this just dispatches to [`WfCheckingCtxt::normalize`], but in the new solver + /// this calls `deeply_normalize` and reports errors if they are encountered. + /// + /// This function should be called in favor of `normalize` in cases where we will + /// then check the well-formedness of the type, since we only use the normalized + /// signature types for implied bounds when checking regions. + // FIXME(-Znext-solver): This should be removed when we compute implied outlives + // bounds using the unnormalized signature of the function we're checking. + fn deeply_normalize(&self, span: Span, loc: Option, value: T) -> T + where + T: TypeFoldable>, + { + if self.infcx.next_trait_solver() { + match self.ocx.deeply_normalize( + &ObligationCause::new(span, self.body_def_id, ObligationCauseCode::WellFormed(loc)), + self.param_env, + value.clone(), + ) { + Ok(value) => value, + Err(errors) => { + self.infcx.err_ctxt().report_fulfillment_errors(errors); + value + } + } + } else { + self.normalize(span, loc, value) + } + } + + fn register_wf_obligation(&self, span: Span, loc: Option, term: ty::Term<'tcx>) { let cause = traits::ObligationCause::new( span, self.body_def_id, @@ -91,7 +118,7 @@ impl<'tcx> WfCheckingCtxt<'_, 'tcx> { self.tcx(), cause, self.param_env, - ty::Binder::dummy(ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg))), + ty::ClauseKind::WellFormed(term), )); } } @@ -302,7 +329,8 @@ fn check_item<'tcx>(tcx: TyCtxt<'tcx>, item: &'tcx hir::Item<'tcx>) -> Result<() { let res = enter_wf_checking_ctxt(tcx, item.span, def_id, |wfcx| { let ty = tcx.type_of(def_id).instantiate_identity(); - let item_ty = wfcx.normalize(hir_ty.span, Some(WellFormedLoc::Ty(def_id)), ty); + let item_ty = + wfcx.deeply_normalize(hir_ty.span, Some(WellFormedLoc::Ty(def_id)), ty); wfcx.register_wf_obligation( hir_ty.span, Some(WellFormedLoc::Ty(def_id)), @@ -408,7 +436,7 @@ fn check_gat_where_clauses(tcx: TyCtxt<'_>, trait_def_id: LocalDefId) { let gat_def_id = gat_item.def_id.expect_local(); let gat_item = tcx.associated_item(gat_def_id); // If this item is not an assoc ty, or has no args, then it's not a GAT - if gat_item.kind != ty::AssocKind::Type { + if !gat_item.is_type() { continue; } let gat_generics = tcx.generics_of(gat_def_id); @@ -432,7 +460,7 @@ fn check_gat_where_clauses(tcx: TyCtxt<'_>, trait_def_id: LocalDefId) { let item_required_bounds = match tcx.associated_item(item_def_id).kind { // In our example, this corresponds to `into_iter` method - ty::AssocKind::Fn => { + ty::AssocKind::Fn { .. } => { // For methods, we check the function signature's return type for any GATs // to constrain. In the `into_iter` case, we see that the return type // `Self::Iter<'a>` is a GAT we want to gather any potential missing bounds from. @@ -453,7 +481,7 @@ fn check_gat_where_clauses(tcx: TyCtxt<'_>, trait_def_id: LocalDefId) { ) } // In our example, this corresponds to the `Iter` and `Item` associated types - ty::AssocKind::Type => { + ty::AssocKind::Type { .. } => { // If our associated item is a GAT with missing bounds, add them to // the param-env here. This allows this GAT to propagate missing bounds // to other GATs. @@ -474,7 +502,7 @@ fn check_gat_where_clauses(tcx: TyCtxt<'_>, trait_def_id: LocalDefId) { gat_generics, ) } - ty::AssocKind::Const => None, + ty::AssocKind::Const { .. } => None, }; if let Some(item_required_bounds) = item_required_bounds { @@ -631,7 +659,7 @@ fn gather_gat_bounds<'tcx, T: TypeFoldable>>( // Ignore `'static` lifetimes for the purpose of this lint: it's // because we know it outlives everything and so doesn't give meaningful // clues. Also ignore `ReError`, to avoid knock-down errors. - if let ty::ReStatic | ty::ReError(_) = **region_a { + if let ty::ReStatic | ty::ReError(_) = region_a.kind() { continue; } // For each region argument (e.g., `'a` in our example), check for a @@ -672,7 +700,7 @@ fn gather_gat_bounds<'tcx, T: TypeFoldable>>( // Again, skip `'static` because it outlives everything. Also, we trivially // know that a region outlives itself. Also ignore `ReError`, to avoid // knock-down errors. - if matches!(**region_b, ty::ReStatic | ty::ReError(_)) || region_a == region_b { + if matches!(region_b.kind(), ty::ReStatic | ty::ReError(_)) || region_a == region_b { continue; } if region_known_to_outlive(tcx, item_def_id, param_env, wf_tys, *region_a, *region_b) { @@ -1076,9 +1104,9 @@ fn check_associated_item( }; match item.kind { - ty::AssocKind::Const => { + ty::AssocKind::Const { .. } => { let ty = tcx.type_of(item.def_id).instantiate_identity(); - let ty = wfcx.normalize(span, Some(WellFormedLoc::Ty(item_id)), ty); + let ty = wfcx.deeply_normalize(span, Some(WellFormedLoc::Ty(item_id)), ty); wfcx.register_wf_obligation(span, loc, ty.into()); check_sized_if_body( wfcx, @@ -1089,7 +1117,7 @@ fn check_associated_item( ); Ok(()) } - ty::AssocKind::Fn => { + ty::AssocKind::Fn { .. } => { let sig = tcx.fn_sig(item.def_id).instantiate_identity(); let hir_sig = sig_if_method.expect("bad signature for method"); check_fn_or_method( @@ -1101,13 +1129,13 @@ fn check_associated_item( ); check_method_receiver(wfcx, hir_sig, item, self_ty) } - ty::AssocKind::Type => { + ty::AssocKind::Type { .. } => { if let ty::AssocItemContainer::Trait = item.container { check_associated_type_bounds(wfcx, item, span) } if item.defaultness(tcx).has_value() { let ty = tcx.type_of(item.def_id).instantiate_identity(); - let ty = wfcx.normalize(span, Some(WellFormedLoc::Ty(item_id)), ty); + let ty = wfcx.deeply_normalize(span, Some(WellFormedLoc::Ty(item_id)), ty); wfcx.register_wf_obligation(span, loc, ty.into()); } Ok(()) @@ -1154,7 +1182,7 @@ fn check_type_defn<'tcx>( let field_id = field.did.expect_local(); let hir::FieldDef { ty: hir_ty, .. } = tcx.hir_node_by_def_id(field_id).expect_field(); - let ty = wfcx.normalize( + let ty = wfcx.deeply_normalize( hir_ty.span, None, tcx.type_of(field.did).instantiate_identity(), @@ -1294,6 +1322,31 @@ fn check_item_fn( decl: &hir::FnDecl<'_>, ) -> Result<(), ErrorGuaranteed> { enter_wf_checking_ctxt(tcx, span, def_id, |wfcx| { + // does the function have an EiiImpl attribute? that contains the defid of a *macro* + // that was used to mark the implementation. This is a two step process. + for EIIImpl { eii_macro, span, .. } in + find_attr!(tcx.get_all_attrs(def_id), AttributeKind::EiiImpl(impls) => impls) + .into_iter() + .flatten() + { + // we expect this macro to have the `EiiMacroFor` attribute, that points to a function + // signature that we'd like to compare the function we're currently checking with + if let Some(eii_extern_item) = find_attr!(tcx.get_all_attrs(*eii_macro), AttributeKind::EiiMacroFor(EIIDecl {eii_extern_item, ..}) => *eii_extern_item) + { + let _ = compare_eii_function_types( + tcx, + def_id, + eii_extern_item, + tcx.item_name(*eii_macro), + *span, + ); + } else { + panic!( + "EII impl macro {eii_macro:?} did not have an eii macro for attribute pointing to a function" + ) + } + } + let sig = tcx.fn_sig(def_id).instantiate_identity(); check_fn_or_method(wfcx, ident.span, sig, decl, def_id); Ok(()) @@ -1315,7 +1368,7 @@ fn check_item_type( enter_wf_checking_ctxt(tcx, ty_span, item_id, |wfcx| { let ty = tcx.type_of(item_id).instantiate_identity(); - let item_ty = wfcx.normalize(ty_span, Some(WellFormedLoc::Ty(item_id)), ty); + let item_ty = wfcx.deeply_normalize(ty_span, Some(WellFormedLoc::Ty(item_id)), ty); let forbid_unsized = match unsized_handling { UnsizedHandling::Forbid => true, @@ -1380,7 +1433,7 @@ fn check_impl<'tcx>( // other `Foo` impls are incoherent. tcx.ensure_ok().coherent_trait(trait_ref.def_id)?; let trait_span = hir_trait_ref.path.span; - let trait_ref = wfcx.normalize( + let trait_ref = wfcx.deeply_normalize( trait_span, Some(WellFormedLoc::Ty(item.hir_id().expect_owner().def_id)), trait_ref, @@ -1440,7 +1493,7 @@ fn check_impl<'tcx>( } None => { let self_ty = tcx.type_of(item.owner_id).instantiate_identity(); - let self_ty = wfcx.normalize( + let self_ty = wfcx.deeply_normalize( item.span, Some(WellFormedLoc::Ty(item.hir_id().expect_owner().def_id)), self_ty, @@ -1486,8 +1539,41 @@ fn check_where_clauses<'tcx>(wfcx: &WfCheckingCtxt<'_, 'tcx>, span: Span, def_id tcx.def_span(param.def_id), matches!(param.kind, GenericParamDefKind::Type { .. }) .then(|| WellFormedLoc::Ty(param.def_id.expect_local())), - default, + default.as_term().unwrap(), ); + } else { + // If we've got a generic const parameter we still want to check its + // type is correct in case both it and the param type are fully concrete. + let GenericArgKind::Const(ct) = default.unpack() else { + continue; + }; + + let ct_ty = match ct.kind() { + ty::ConstKind::Infer(_) + | ty::ConstKind::Placeholder(_) + | ty::ConstKind::Bound(_, _) => unreachable!(), + ty::ConstKind::Error(_) | ty::ConstKind::Expr(_) => continue, + ty::ConstKind::Value(cv) => cv.ty, + ty::ConstKind::Unevaluated(uv) => { + infcx.tcx.type_of(uv.def).instantiate(infcx.tcx, uv.args) + } + ty::ConstKind::Param(param_ct) => param_ct.find_ty_from_env(wfcx.param_env), + }; + + let param_ty = tcx.type_of(param.def_id).instantiate_identity(); + if !ct_ty.has_param() && !param_ty.has_param() { + let cause = traits::ObligationCause::new( + tcx.def_span(param.def_id), + wfcx.body_def_id, + ObligationCauseCode::WellFormed(None), + ); + wfcx.register_obligation(Obligation::new( + tcx, + cause, + wfcx.param_env, + ty::ClauseKind::ConstArgHasType(ct, param_ty), + )); + } } } } @@ -1612,7 +1698,7 @@ fn check_fn_or_method<'tcx>( sig.inputs_and_output = tcx.mk_type_list_from_iter(sig.inputs_and_output.iter().enumerate().map(|(idx, ty)| { - wfcx.normalize( + wfcx.deeply_normalize( arg_span(idx), Some(WellFormedLoc::Param { function: def_id, @@ -1716,7 +1802,7 @@ fn check_method_receiver<'tcx>( ) -> Result<(), ErrorGuaranteed> { let tcx = wfcx.tcx(); - if !method.fn_has_self_parameter { + if !method.is_method() { return Ok(()); } @@ -1991,7 +2077,7 @@ fn check_variances_for_type_defn<'tcx>( ItemKind::TyAlias(..) => { assert!( tcx.type_alias_is_lazy(item.owner_id), - "should not be computing variance of non-weak type alias" + "should not be computing variance of non-free type alias" ); } kind => span_bug!(item.span, "cannot compute the variances of {kind:?}"), @@ -2223,7 +2309,7 @@ impl<'tcx> TypeVisitor> for IsProbablyCyclical<'tcx> { fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<(), ()> { let def_id = match ty.kind() { ty::Adt(adt_def, _) => Some(adt_def.did()), - ty::Alias(ty::Weak, alias_ty) => Some(alias_ty.def_id), + ty::Alias(ty::Free, alias_ty) => Some(alias_ty.def_id), _ => None, }; if let Some(def_id) = def_id { diff --git a/compiler/rustc_hir_analysis/src/coherence/builtin.rs b/compiler/rustc_hir_analysis/src/coherence/builtin.rs index 1f3f0b754bb1e..52656fc2d9001 100644 --- a/compiler/rustc_hir_analysis/src/coherence/builtin.rs +++ b/compiler/rustc_hir_analysis/src/coherence/builtin.rs @@ -656,7 +656,7 @@ fn infringing_fields_error<'tcx>( .entry((ty.clone(), predicate.clone())) .or_default() .push(origin.span()); - if let ty::RegionKind::ReEarlyParam(ebr) = *b + if let ty::RegionKind::ReEarlyParam(ebr) = b.kind() && ebr.has_name() { bounds.push((b.to_string(), a.to_string(), None)); @@ -750,7 +750,7 @@ fn visit_implementation_of_pointer_like(checker: &Checker<'_>) -> Result<(), Err ObligationCause::misc(impl_span, checker.impl_def_id), param_env, nontrivial_field_ty, - tcx.lang_items().pointer_like().unwrap(), + tcx.require_lang_item(LangItem::PointerLike, Some(impl_span)), ); // FIXME(dyn-star): We should regionck this implementation. if ocx.select_all_or_error().is_empty() { diff --git a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs index c9a9180c5c9dc..bd25b4a326086 100644 --- a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs +++ b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs @@ -150,7 +150,7 @@ impl<'tcx> InherentCollect<'tcx> { let id = id.owner_id.def_id; let item_span = self.tcx.def_span(id); let self_ty = self.tcx.type_of(id).instantiate_identity(); - let mut self_ty = self.tcx.peel_off_weak_alias_tys(self_ty); + let mut self_ty = self.tcx.peel_off_free_alias_tys(self_ty); // We allow impls on pattern types exactly when we allow impls on the base type. // FIXME(pattern_types): Figure out the exact coherence rules we want here. while let ty::Pat(base, _) = *self_ty.kind() { @@ -188,7 +188,7 @@ impl<'tcx> InherentCollect<'tcx> { | ty::CoroutineClosure(..) | ty::Coroutine(..) | ty::CoroutineWitness(..) - | ty::Alias(ty::Weak, _) + | ty::Alias(ty::Free, _) | ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) => { diff --git a/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs b/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs index dc616576c9c1b..242639125b19a 100644 --- a/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs +++ b/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs @@ -51,7 +51,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> { for &item1 in impl_items1.in_definition_order() { let collision = impl_items2 - .filter_by_name_unhygienic(item1.name) + .filter_by_name_unhygienic(item1.name()) .any(|&item2| self.compare_hygienically(item1, item2)); if collision { @@ -64,7 +64,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> { fn compare_hygienically(&self, item1: ty::AssocItem, item2: ty::AssocItem) -> bool { // Symbols and namespace match, compare hygienically. - item1.kind.namespace() == item2.kind.namespace() + item1.namespace() == item2.namespace() && item1.ident(self.tcx).normalize_to_macros_2_0() == item2.ident(self.tcx).normalize_to_macros_2_0() } @@ -113,7 +113,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> { let mut res = Ok(()); for &item1 in impl_items1.in_definition_order() { let collision = impl_items2 - .filter_by_name_unhygienic(item1.name) + .filter_by_name_unhygienic(item1.name()) .find(|&&item2| self.compare_hygienically(item1, item2)); if let Some(item2) = collision { @@ -230,11 +230,11 @@ impl<'tcx> InherentOverlapChecker<'tcx> { let mut ids = impl_items .in_definition_order() .filter_map(|item| { - let entry = connected_region_ids.entry(item.name); + let entry = connected_region_ids.entry(item.name()); if let IndexEntry::Occupied(e) = &entry { Some(*e.get()) } else { - idents_to_add.push(item.name); + idents_to_add.push(item.name()); None } }) diff --git a/compiler/rustc_hir_analysis/src/coherence/mod.rs b/compiler/rustc_hir_analysis/src/coherence/mod.rs index 15e0a72fdcbd5..16bac4304910c 100644 --- a/compiler/rustc_hir_analysis/src/coherence/mod.rs +++ b/compiler/rustc_hir_analysis/src/coherence/mod.rs @@ -153,9 +153,12 @@ pub(crate) fn provide(providers: &mut Providers) { } fn coherent_trait(tcx: TyCtxt<'_>, def_id: DefId) -> Result<(), ErrorGuaranteed> { + let impls = tcx.local_trait_impls(def_id); // If there are no impls for the trait, then "all impls" are trivially coherent and we won't check anything // anyway. Thus we bail out even before the specialization graph, avoiding the dep_graph edge. - let Some(impls) = tcx.all_local_trait_impls(()).get(&def_id) else { return Ok(()) }; + if impls.is_empty() { + return Ok(()); + } // Trigger building the specialization graph for the trait. This will detect and report any // overlap errors. let mut res = tcx.ensure_ok().specialization_graph_of(def_id); diff --git a/compiler/rustc_hir_analysis/src/coherence/orphan.rs b/compiler/rustc_hir_analysis/src/coherence/orphan.rs index 74ba4ffe25ea1..c75fef9f716d6 100644 --- a/compiler/rustc_hir_analysis/src/coherence/orphan.rs +++ b/compiler/rustc_hir_analysis/src/coherence/orphan.rs @@ -189,7 +189,7 @@ pub(crate) fn orphan_check_impl( ty::Projection => "associated type", // type Foo = (impl Sized, bool) // impl AutoTrait for Foo {} - ty::Weak => "type alias", + ty::Free => "type alias", // type Opaque = impl Trait; // impl AutoTrait for Opaque {} ty::Opaque => "opaque type", diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs index 075abc3259449..49d90127e5539 100644 --- a/compiler/rustc_hir_analysis/src/collect.rs +++ b/compiler/rustc_hir_analysis/src/collect.rs @@ -14,6 +14,7 @@ //! At present, however, we do run collection across all items in the //! crate as a kind of pass. This should eventually be factored away. +use std::assert_matches::assert_matches; use std::cell::Cell; use std::iter; use std::ops::Bound; @@ -32,6 +33,7 @@ use rustc_hir::{self as hir, GenericParamKind, HirId, Node, PreciseCapturingArgK use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; use rustc_infer::traits::ObligationCause; use rustc_middle::hir::nested_filter; +use rustc_middle::middle::eii::EiiMapping; use rustc_middle::query::Providers; use rustc_middle::ty::util::{Discr, IntTypeExt}; use rustc_middle::ty::{self, AdtKind, Const, IsSuggestable, Ty, TyCtxt, TypingMode, fold_regions}; @@ -42,9 +44,8 @@ use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::ObligationCtxt; use tracing::{debug, instrument}; -use crate::check::intrinsic::intrinsic_operation_unsafety; use crate::errors; -use crate::hir_ty_lowering::errors::assoc_kind_str; +use crate::hir_ty_lowering::errors::assoc_tag_str; use crate::hir_ty_lowering::{FeedConstTy, HirTyLowerer, RegionInferReason}; pub(crate) mod dump; @@ -61,6 +62,7 @@ pub(crate) fn provide(providers: &mut Providers) { *providers = Providers { type_of: type_of::type_of, type_of_opaque: type_of::type_of_opaque, + type_of_opaque_hir_typeck: type_of::type_of_opaque_hir_typeck, type_alias_is_lazy: type_of::type_alias_is_lazy, item_bounds: item_bounds::item_bounds, explicit_item_bounds: item_bounds::explicit_item_bounds, @@ -438,9 +440,9 @@ impl<'tcx> HirTyLowerer<'tcx> for ItemCtxt<'tcx> { &self, span: Span, def_id: LocalDefId, - assoc_name: Ident, + assoc_ident: Ident, ) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> { - self.tcx.at(span).type_param_predicates((self.item_def_id, def_id, assoc_name)) + self.tcx.at(span).type_param_predicates((self.item_def_id, def_id, assoc_ident)) } fn lower_assoc_shared( @@ -449,7 +451,7 @@ impl<'tcx> HirTyLowerer<'tcx> for ItemCtxt<'tcx> { item_def_id: DefId, item_segment: &rustc_hir::PathSegment<'tcx>, poly_trait_ref: ty::PolyTraitRef<'tcx>, - kind: ty::AssocKind, + assoc_tag: ty::AssocTag, ) -> Result<(DefId, ty::GenericArgsRef<'tcx>), ErrorGuaranteed> { if let Some(trait_ref) = poly_trait_ref.no_bound_vars() { let item_args = self.lowerer().lower_generic_args_of_assoc_item( @@ -524,7 +526,7 @@ impl<'tcx> HirTyLowerer<'tcx> for ItemCtxt<'tcx> { inferred_sugg, bound, mpart_sugg, - what: assoc_kind_str(kind), + what: assoc_tag_str(assoc_tag), })) } } @@ -675,7 +677,7 @@ fn lower_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) { // These don't define types. hir::ItemKind::ExternCrate(..) | hir::ItemKind::Use(..) - | hir::ItemKind::Macro(..) + | hir::ItemKind::Macro { .. } | hir::ItemKind::Mod(..) | hir::ItemKind::GlobalAsm { .. } => {} hir::ItemKind::ForeignMod { items, .. } => { @@ -1344,7 +1346,8 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<'_, ty::PolyFn compute_sig_of_foreign_fn_decl(tcx, def_id, sig.decl, abi, sig.header.safety()) } - Ctor(data) | Variant(hir::Variant { data, .. }) if data.ctor().is_some() => { + Ctor(data) => { + assert_matches!(data.ctor(), Some(_)); let adt_def_id = tcx.hir_get_parent_item(hir_id).def_id.to_def_id(); let ty = tcx.type_of(adt_def_id).instantiate_identity(); let inputs = data.fields().iter().map(|f| tcx.type_of(f.def_id).instantiate_identity()); @@ -1370,6 +1373,16 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<'_, ty::PolyFn bug!("to get the signature of a closure, use `args.as_closure().sig()` not `fn_sig()`",); } + x @ Synthetic => { + if let Some(EiiMapping { extern_item, .. }) = + tcx.get_externally_implementable_item_impls(()).get(&def_id) + { + return tcx.fn_sig(extern_item); + } else { + bug!("unexpected sort of node in fn_sig(): {:?}", x); + } + } + x => { bug!("unexpected sort of node in fn_sig(): {:?}", x); } @@ -1416,7 +1429,7 @@ fn recover_infer_ret_ty<'tcx>( GenericParamKind::Lifetime { .. } => true, _ => false, }); - let fn_sig = fold_regions(tcx, fn_sig, |r, _| match *r { + let fn_sig = fold_regions(tcx, fn_sig, |r, _| match r.kind() { ty::ReErased => { if has_region_params { ty::Region::new_error_with_message( @@ -1703,18 +1716,13 @@ fn compute_sig_of_foreign_fn_decl<'tcx>( abi: ExternAbi, safety: hir::Safety, ) -> ty::PolyFnSig<'tcx> { - let safety = if abi == ExternAbi::RustIntrinsic { - intrinsic_operation_unsafety(tcx, def_id) - } else { - safety - }; let hir_id = tcx.local_def_id_to_hir_id(def_id); let fty = ItemCtxt::new(tcx, def_id).lowerer().lower_fn_ty(hir_id, safety, abi, decl, None, None); // Feature gate SIMD types in FFI, since I am not sure that the // ABIs are handled at all correctly. -huonw - if abi != ExternAbi::RustIntrinsic && !tcx.features().simd_ffi() { + if !tcx.features().simd_ffi() { let check = |hir_ty: &hir::Ty<'_>, ty: Ty<'_>| { if ty.is_simd() { let snip = tcx diff --git a/compiler/rustc_hir_analysis/src/collect/generics_of.rs b/compiler/rustc_hir_analysis/src/collect/generics_of.rs index a153ce8ea902d..2bed28d7b710c 100644 --- a/compiler/rustc_hir_analysis/src/collect/generics_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/generics_of.rs @@ -116,8 +116,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics { { // enum variant discriminants are not allowed to use any kind of generics None - } else if let Some(param_id) = - tcx.hir().opt_const_param_default_param_def_id(hir_id) + } else if let Some(param_id) = tcx.hir_opt_const_param_default_param_def_id(hir_id) { // If the def_id we are calling generics_of on is an anon ct default i.e: // diff --git a/compiler/rustc_hir_analysis/src/collect/item_bounds.rs b/compiler/rustc_hir_analysis/src/collect/item_bounds.rs index 6e07f0ff53c39..279b1e82a716f 100644 --- a/compiler/rustc_hir_analysis/src/collect/item_bounds.rs +++ b/compiler/rustc_hir_analysis/src/collect/item_bounds.rs @@ -38,13 +38,13 @@ fn associated_type_bounds<'tcx>( let icx = ItemCtxt::new(tcx, assoc_item_def_id); let mut bounds = Vec::new(); icx.lowerer().lower_bounds(item_ty, hir_bounds, &mut bounds, ty::List::empty(), filter); - // Associated types are implicitly sized unless a `?Sized` bound is found + // Implicit bounds are added to associated types unless a `?Trait` bound is found match filter { PredicateFilter::All | PredicateFilter::SelfOnly | PredicateFilter::SelfTraitThatDefines(_) | PredicateFilter::SelfAndAssociatedTypeBounds => { - icx.lowerer().add_sized_bound(&mut bounds, item_ty, hir_bounds, None, span); + icx.lowerer().add_default_traits(&mut bounds, item_ty, hir_bounds, None, span); } // `ConstIfConst` is only interested in `~const` bounds. PredicateFilter::ConstIfConst | PredicateFilter::SelfConstIfConst => {} @@ -327,14 +327,13 @@ fn opaque_type_bounds<'tcx>( let icx = ItemCtxt::new(tcx, opaque_def_id); let mut bounds = Vec::new(); icx.lowerer().lower_bounds(item_ty, hir_bounds, &mut bounds, ty::List::empty(), filter); - // Opaque types are implicitly sized unless a `?Sized` bound is found + // Implicit bounds are added to opaque types unless a `?Trait` bound is found match filter { PredicateFilter::All | PredicateFilter::SelfOnly | PredicateFilter::SelfTraitThatDefines(_) | PredicateFilter::SelfAndAssociatedTypeBounds => { - // Associated types are implicitly sized unless a `?Sized` bound is found - icx.lowerer().add_sized_bound(&mut bounds, item_ty, hir_bounds, None, span); + icx.lowerer().add_default_traits(&mut bounds, item_ty, hir_bounds, None, span); } //`ConstIfConst` is only interested in `~const` bounds. PredicateFilter::ConstIfConst | PredicateFilter::SelfConstIfConst => {} diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs index 4bd89861a9e5a..ce0f83d0ec288 100644 --- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs @@ -165,12 +165,18 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen ItemKind::Trait(_, _, _, _, self_bounds, ..) | ItemKind::TraitAlias(_, _, self_bounds) => { - is_trait = Some(self_bounds); + is_trait = Some((self_bounds, item.span)); } _ => {} } }; + if let Node::TraitItem(item) = node { + let mut bounds = Vec::new(); + icx.lowerer().add_default_trait_item_bounds(item, &mut bounds); + predicates.extend(bounds); + } + let generics = tcx.generics_of(def_id); // Below we'll consider the bounds on the type parameters (including `Self`) @@ -181,11 +187,18 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen let mut bounds = Vec::new(); icx.lowerer().lower_bounds( tcx.types.self_param, - self_bounds, + self_bounds.0, &mut bounds, ty::List::empty(), PredicateFilter::All, ); + icx.lowerer().add_default_super_traits( + def_id, + &mut bounds, + self_bounds.0, + hir_generics, + self_bounds.1, + ); predicates.extend(bounds); } @@ -210,8 +223,8 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen GenericParamKind::Type { .. } => { let param_ty = icx.lowerer().lower_ty_param(param.hir_id); let mut bounds = Vec::new(); - // Params are implicitly sized unless a `?Sized` bound is found - icx.lowerer().add_sized_bound( + // Implicit bounds are added to type params unless a `?Trait` bound is found + icx.lowerer().add_default_traits( &mut bounds, param_ty, &[], @@ -346,7 +359,7 @@ fn compute_bidirectional_outlives_predicates<'tcx>( ) { for param in opaque_own_params { let orig_lifetime = tcx.map_opaque_lifetime_to_parent_lifetime(param.def_id.expect_local()); - if let ty::ReEarlyParam(..) = *orig_lifetime { + if let ty::ReEarlyParam(..) = orig_lifetime.kind() { let dup_lifetime = ty::Region::new_early_param( tcx, ty::EarlyParamRegion { index: param.index, name: param.name }, @@ -508,7 +521,7 @@ pub(super) fn explicit_predicates_of<'tcx>( if matches!(def_kind, DefKind::AnonConst) && tcx.features().generic_const_exprs() && let Some(defaulted_param_def_id) = - tcx.hir().opt_const_param_default_param_def_id(tcx.local_def_id_to_hir_id(def_id)) + tcx.hir_opt_const_param_default_param_def_id(tcx.local_def_id_to_hir_id(def_id)) { // In `generics_of` we set the generics' parent to be our parent's parent which means that // we lose out on the predicates of our actual parent if we dont return those predicates here. @@ -571,12 +584,12 @@ pub(super) fn explicit_super_predicates_of<'tcx>( pub(super) fn explicit_supertraits_containing_assoc_item<'tcx>( tcx: TyCtxt<'tcx>, - (trait_def_id, assoc_name): (DefId, Ident), + (trait_def_id, assoc_ident): (DefId, Ident), ) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> { implied_predicates_with_filter( tcx, trait_def_id, - PredicateFilter::SelfTraitThatDefines(assoc_name), + PredicateFilter::SelfTraitThatDefines(assoc_ident), ) } @@ -604,7 +617,7 @@ pub(super) fn implied_predicates_with_filter<'tcx>( filter: PredicateFilter, ) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> { let Some(trait_def_id) = trait_def_id.as_local() else { - // if `assoc_name` is None, then the query should've been redirected to an + // if `assoc_ident` is None, then the query should've been redirected to an // external provider assert_matches!(filter, PredicateFilter::SelfTraitThatDefines(_)); return tcx.explicit_super_predicates_of(trait_def_id); @@ -625,6 +638,22 @@ pub(super) fn implied_predicates_with_filter<'tcx>( let self_param_ty = tcx.types.self_param; let mut bounds = Vec::new(); icx.lowerer().lower_bounds(self_param_ty, superbounds, &mut bounds, ty::List::empty(), filter); + match filter { + PredicateFilter::All + | PredicateFilter::SelfOnly + | PredicateFilter::SelfTraitThatDefines(_) + | PredicateFilter::SelfAndAssociatedTypeBounds => { + icx.lowerer().add_default_super_traits( + trait_def_id, + &mut bounds, + superbounds, + generics, + item.span, + ); + } + //`ConstIfConst` is only interested in `~const` bounds. + PredicateFilter::ConstIfConst | PredicateFilter::SelfConstIfConst => {} + } let where_bounds_that_match = icx.probe_ty_param_bounds_in_generics(generics, item.owner_id.def_id, filter); @@ -805,11 +834,11 @@ pub(super) fn assert_only_contains_predicates_from<'tcx>( #[instrument(level = "trace", skip(tcx))] pub(super) fn type_param_predicates<'tcx>( tcx: TyCtxt<'tcx>, - (item_def_id, def_id, assoc_name): (LocalDefId, LocalDefId, Ident), + (item_def_id, def_id, assoc_ident): (LocalDefId, LocalDefId, Ident), ) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> { match tcx.opt_rpitit_info(item_def_id.to_def_id()) { Some(ty::ImplTraitInTraitData::Trait { opaque_def_id, .. }) => { - return tcx.type_param_predicates((opaque_def_id.expect_local(), def_id, assoc_name)); + return tcx.type_param_predicates((opaque_def_id.expect_local(), def_id, assoc_ident)); } Some(ty::ImplTraitInTraitData::Impl { .. }) => { unreachable!("should not be lowering bounds on RPITIT in impl") @@ -834,7 +863,7 @@ pub(super) fn type_param_predicates<'tcx>( let result = if let Some(parent) = parent { let icx = ItemCtxt::new(tcx, parent); - icx.probe_ty_param_bounds(DUMMY_SP, def_id, assoc_name) + icx.probe_ty_param_bounds(DUMMY_SP, def_id, assoc_ident) } else { ty::EarlyBinder::bind(&[] as &[_]) }; @@ -860,7 +889,7 @@ pub(super) fn type_param_predicates<'tcx>( let extra_predicates = extend.into_iter().chain(icx.probe_ty_param_bounds_in_generics( hir_generics, def_id, - PredicateFilter::SelfTraitThatDefines(assoc_name), + PredicateFilter::SelfTraitThatDefines(assoc_ident), )); let bounds = @@ -879,7 +908,7 @@ pub(super) fn type_param_predicates<'tcx>( _ => unreachable!(), }; assert_only_contains_predicates_from( - PredicateFilter::SelfTraitThatDefines(assoc_name), + PredicateFilter::SelfTraitThatDefines(assoc_ident), bounds, self_ty, ); diff --git a/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs b/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs index 9b0d57bd75b11..eaf5a41c26d23 100644 --- a/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs +++ b/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs @@ -14,9 +14,10 @@ use rustc_ast::visit::walk_list; use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; use rustc_errors::ErrorGuaranteed; use rustc_hir::def::{DefKind, Res}; +use rustc_hir::definitions::{DefPathData, DisambiguatorState}; use rustc_hir::intravisit::{self, InferKind, Visitor, VisitorExt}; use rustc_hir::{ - self as hir, AmbigArg, GenericArg, GenericParam, GenericParamKind, HirId, LifetimeName, Node, + self as hir, AmbigArg, GenericArg, GenericParam, GenericParamKind, HirId, LifetimeKind, Node, }; use rustc_macros::extension; use rustc_middle::hir::nested_filter; @@ -63,6 +64,7 @@ impl ResolvedArg { struct BoundVarContext<'a, 'tcx> { tcx: TyCtxt<'tcx>, rbv: &'a mut ResolveBoundVars, + disambiguator: &'a mut DisambiguatorState, scope: ScopeRef<'a>, } @@ -245,8 +247,12 @@ pub(crate) fn provide(providers: &mut Providers) { #[instrument(level = "debug", skip(tcx))] fn resolve_bound_vars(tcx: TyCtxt<'_>, local_def_id: hir::OwnerId) -> ResolveBoundVars { let mut rbv = ResolveBoundVars::default(); - let mut visitor = - BoundVarContext { tcx, rbv: &mut rbv, scope: &Scope::Root { opt_parent_item: None } }; + let mut visitor = BoundVarContext { + tcx, + rbv: &mut rbv, + scope: &Scope::Root { opt_parent_item: None }, + disambiguator: &mut DisambiguatorState::new(), + }; match tcx.hir_owner_node(local_def_id) { hir::OwnerNode::Item(item) => visitor.visit_item(item), hir::OwnerNode::ForeignItem(item) => visitor.visit_foreign_item(item), @@ -515,9 +521,10 @@ impl<'a, 'tcx> Visitor<'tcx> for BoundVarContext<'a, 'tcx> { let capture_all_in_scope_lifetimes = opaque_captures_all_in_scope_lifetimes(opaque); if capture_all_in_scope_lifetimes { + let tcx = self.tcx; let lifetime_ident = |def_id: LocalDefId| { - let name = self.tcx.item_name(def_id.to_def_id()); - let span = self.tcx.def_span(def_id); + let name = tcx.item_name(def_id.to_def_id()); + let span = tcx.def_span(def_id); Ident::new(name, span) }; @@ -619,7 +626,7 @@ impl<'a, 'tcx> Visitor<'tcx> for BoundVarContext<'a, 'tcx> { hir::ItemKind::ExternCrate(..) | hir::ItemKind::Use(..) - | hir::ItemKind::Macro(..) + | hir::ItemKind::Macro { .. } | hir::ItemKind::Mod(..) | hir::ItemKind::ForeignMod { .. } | hir::ItemKind::Static(..) @@ -646,14 +653,14 @@ impl<'a, 'tcx> Visitor<'tcx> for BoundVarContext<'a, 'tcx> { arg: &'tcx hir::PreciseCapturingArg<'tcx>, ) -> Self::Result { match *arg { - hir::PreciseCapturingArg::Lifetime(lt) => match lt.res { - LifetimeName::Param(def_id) => { + hir::PreciseCapturingArg::Lifetime(lt) => match lt.kind { + LifetimeKind::Param(def_id) => { self.resolve_lifetime_ref(def_id, lt); } - LifetimeName::Error => {} - LifetimeName::ImplicitObjectLifetimeDefault - | LifetimeName::Infer - | LifetimeName::Static => { + LifetimeKind::Error => {} + LifetimeKind::ImplicitObjectLifetimeDefault + | LifetimeKind::Infer + | LifetimeKind::Static => { self.tcx.dcx().emit_err(errors::BadPreciseCapture { span: lt.ident.span, kind: "lifetime", @@ -774,26 +781,26 @@ impl<'a, 'tcx> Visitor<'tcx> for BoundVarContext<'a, 'tcx> { ); } }); - match lifetime.res { - LifetimeName::ImplicitObjectLifetimeDefault => { + match lifetime.kind { + LifetimeKind::ImplicitObjectLifetimeDefault => { // If the user does not write *anything*, we // use the object lifetime defaulting // rules. So e.g., `Box` becomes // `Box`. self.resolve_object_lifetime_default(&*lifetime) } - LifetimeName::Infer => { + LifetimeKind::Infer => { // If the user writes `'_`, we use the *ordinary* elision // rules. So the `'_` in e.g., `Box` will be // resolved the same as the `'_` in `&'_ Foo`. // // cc #48468 } - LifetimeName::Param(..) | LifetimeName::Static => { + LifetimeKind::Param(..) | LifetimeKind::Static => { // If the user wrote an explicit name, use that. self.visit_lifetime(&*lifetime); } - LifetimeName::Error => {} + LifetimeKind::Error => {} } } hir::TyKind::Ref(lifetime_ref, ref mt) => { @@ -873,17 +880,17 @@ impl<'a, 'tcx> Visitor<'tcx> for BoundVarContext<'a, 'tcx> { #[instrument(level = "debug", skip(self))] fn visit_lifetime(&mut self, lifetime_ref: &'tcx hir::Lifetime) { - match lifetime_ref.res { - hir::LifetimeName::Static => { + match lifetime_ref.kind { + hir::LifetimeKind::Static => { self.insert_lifetime(lifetime_ref, ResolvedArg::StaticLifetime) } - hir::LifetimeName::Param(param_def_id) => { + hir::LifetimeKind::Param(param_def_id) => { self.resolve_lifetime_ref(param_def_id, lifetime_ref) } // If we've already reported an error, just ignore `lifetime_ref`. - hir::LifetimeName::Error => {} + hir::LifetimeKind::Error => {} // Those will be resolved by typechecking. - hir::LifetimeName::ImplicitObjectLifetimeDefault | hir::LifetimeName::Infer => {} + hir::LifetimeKind::ImplicitObjectLifetimeDefault | hir::LifetimeKind::Infer => {} } } @@ -1063,15 +1070,15 @@ fn object_lifetime_default(tcx: TyCtxt<'_>, param_def_id: LocalDefId) -> ObjectL for bound in bound.bounds { if let hir::GenericBound::Outlives(lifetime) = bound { - set.insert(lifetime.res); + set.insert(lifetime.kind); } } } match set { Set1::Empty => ObjectLifetimeDefault::Empty, - Set1::One(hir::LifetimeName::Static) => ObjectLifetimeDefault::Static, - Set1::One(hir::LifetimeName::Param(param_def_id)) => { + Set1::One(hir::LifetimeKind::Static) => ObjectLifetimeDefault::Static, + Set1::One(hir::LifetimeKind::Param(param_def_id)) => { ObjectLifetimeDefault::Param(param_def_id.to_def_id()) } _ => ObjectLifetimeDefault::Ambiguous, @@ -1091,8 +1098,8 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { where F: for<'b> FnOnce(&mut BoundVarContext<'b, 'tcx>), { - let BoundVarContext { tcx, rbv, .. } = self; - let mut this = BoundVarContext { tcx: *tcx, rbv, scope: &wrap_scope }; + let BoundVarContext { tcx, rbv, disambiguator, .. } = self; + let mut this = BoundVarContext { tcx: *tcx, rbv, disambiguator, scope: &wrap_scope }; let span = debug_span!("scope", scope = ?this.scope.debug_truncated()); { let _enter = span.enter(); @@ -1241,7 +1248,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { // Fresh lifetimes in APIT used to be allowed in async fns and forbidden in // regular fns. if let Some(hir::PredicateOrigin::ImplTrait) = where_bound_origin - && let hir::LifetimeName::Param(param_id) = lifetime_ref.res + && let hir::LifetimeKind::Param(param_id) = lifetime_ref.kind && let Some(generics) = self.tcx.hir_get_generics(self.tcx.local_parent(param_id)) && let Some(param) = generics.params.iter().find(|p| p.def_id == param_id) @@ -1446,7 +1453,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { #[instrument(level = "trace", skip(self, opaque_capture_scopes), ret)] fn remap_opaque_captures( - &self, + &mut self, opaque_capture_scopes: &Vec<(LocalDefId, &RefCell>)>, mut lifetime: ResolvedArg, ident: Ident, @@ -1462,8 +1469,17 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { for &(opaque_def_id, captures) in opaque_capture_scopes.iter().rev() { let mut captures = captures.borrow_mut(); let remapped = *captures.entry(lifetime).or_insert_with(|| { - let feed = - self.tcx.create_def(opaque_def_id, Some(ident.name), DefKind::LifetimeParam); + // `opaque_def_id` is unique to the `BoundVarContext` pass which is executed once + // per `resolve_bound_vars` query. This is the only location that creates + // `OpaqueLifetime` paths. `::OpaqueLifetime(..)` is thus unique + // to this query and duplicates within the query are handled by `self.disambiguator`. + let feed = self.tcx.create_def( + opaque_def_id, + None, + DefKind::LifetimeParam, + Some(DefPathData::OpaqueLifetime(ident.name)), + &mut self.disambiguator, + ); feed.def_span(ident.span); feed.def_ident_span(Some(ident.span)); feed.def_id() @@ -1529,7 +1545,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { if let ResolvedArg::LateBound(..) = def && let Some(what) = crossed_late_boundary { - let use_span = self.tcx.hir().span(hir_id); + let use_span = self.tcx.hir_span(hir_id); let def_span = self.tcx.def_span(param_def_id); let guar = match self.tcx.def_kind(param_def_id) { DefKind::ConstParam => { @@ -1576,11 +1592,11 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { } => { let guar = self.tcx.dcx().emit_err(match self.tcx.def_kind(param_def_id) { DefKind::TyParam => errors::LateBoundInApit::Type { - span: self.tcx.hir().span(hir_id), + span: self.tcx.hir_span(hir_id), param_span: self.tcx.def_span(param_def_id), }, DefKind::ConstParam => errors::LateBoundInApit::Const { - span: self.tcx.hir().span(hir_id), + span: self.tcx.hir_span(hir_id), param_span: self.tcx.def_span(param_def_id), }, kind => { @@ -1605,7 +1621,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { self.tcx .dcx() - .span_bug(self.tcx.hir().span(hir_id), format!("could not resolve {param_def_id:?}")); + .span_bug(self.tcx.hir_span(hir_id), format!("could not resolve {param_def_id:?}")); } #[instrument(level = "debug", skip(self))] @@ -1811,7 +1827,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { self.tcx, type_def_id, constraint.ident, - ty::AssocKind::Fn, + ty::AssocTag::Fn, ) { bound_vars.extend( self.tcx @@ -1843,7 +1859,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { self.tcx, type_def_id, constraint.ident, - ty::AssocKind::Type, + ty::AssocTag::Type, ) .map(|(bound_vars, _)| bound_vars); self.with(scope, |this| { @@ -1874,14 +1890,14 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { fn supertrait_hrtb_vars( tcx: TyCtxt<'tcx>, def_id: DefId, - assoc_name: Ident, - assoc_kind: ty::AssocKind, + assoc_ident: Ident, + assoc_tag: ty::AssocTag, ) -> Option<(Vec, &'tcx ty::AssocItem)> { let trait_defines_associated_item_named = |trait_def_id: DefId| { - tcx.associated_items(trait_def_id).find_by_name_and_kind( + tcx.associated_items(trait_def_id).find_by_ident_and_kind( tcx, - assoc_name, - assoc_kind, + assoc_ident, + assoc_tag, trait_def_id, ) }; @@ -1894,8 +1910,8 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { let Some((def_id, bound_vars)) = stack.pop() else { break None; }; - // See issue #83753. If someone writes an associated type on a non-trait, just treat it as - // there being no supertrait HRTBs. + // See issue #83753. If someone writes an associated type on a non-trait, just treat it + // as there being no supertrait HRTBs. match tcx.def_kind(def_id) { DefKind::Trait | DefKind::TraitAlias | DefKind::Impl { .. } => {} _ => break None, @@ -1904,7 +1920,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { if let Some(assoc_item) = trait_defines_associated_item_named(def_id) { break Some((bound_vars.into_iter().collect(), assoc_item)); } - let predicates = tcx.explicit_supertraits_containing_assoc_item((def_id, assoc_name)); + let predicates = tcx.explicit_supertraits_containing_assoc_item((def_id, assoc_ident)); let obligations = predicates.iter_identity_copied().filter_map(|(pred, _)| { let bound_predicate = pred.kind(); match bound_predicate.skip_binder() { @@ -2067,7 +2083,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { self.tcx, trait_def_id, item_segment.ident, - ty::AssocKind::Fn, + ty::AssocTag::Fn, ) }); @@ -2112,7 +2128,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> { self.tcx, trait_def_id, item_segment.ident, - ty::AssocKind::Fn, + ty::AssocTag::Fn, ) else { return; }; @@ -2440,7 +2456,7 @@ fn is_late_bound_map( } fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) { - if let hir::LifetimeName::Param(def_id) = lifetime_ref.res { + if let hir::LifetimeKind::Param(def_id) = lifetime_ref.kind { self.regions.insert(def_id); } } @@ -2453,7 +2469,7 @@ fn is_late_bound_map( impl<'tcx> Visitor<'tcx> for AllCollector { fn visit_lifetime(&mut self, lifetime_ref: &'tcx hir::Lifetime) { - if let hir::LifetimeName::Param(def_id) = lifetime_ref.res { + if let hir::LifetimeKind::Param(def_id) = lifetime_ref.kind { self.regions.insert(def_id); } } diff --git a/compiler/rustc_hir_analysis/src/collect/type_of.rs b/compiler/rustc_hir_analysis/src/collect/type_of.rs index afda2c142e228..817975c242f5f 100644 --- a/compiler/rustc_hir_analysis/src/collect/type_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/type_of.rs @@ -7,7 +7,9 @@ use rustc_hir::{self as hir, AmbigArg, HirId}; use rustc_middle::query::plumbing::CyclePlaceholder; use rustc_middle::ty::print::with_forced_trimmed_paths; use rustc_middle::ty::util::IntTypeExt; -use rustc_middle::ty::{self, IsSuggestable, Ty, TyCtxt, TypeVisitableExt, fold_regions}; +use rustc_middle::ty::{ + self, DefiningScopeKind, IsSuggestable, Ty, TyCtxt, TypeVisitableExt, fold_regions, +}; use rustc_middle::{bug, span_bug}; use rustc_span::{DUMMY_SP, Ident, Span}; @@ -92,10 +94,12 @@ fn const_arg_anon_type_of<'tcx>(icx: &ItemCtxt<'tcx>, arg_hir_id: HirId, span: S } Node::TyPat(pat) => { - let hir::TyKind::Pat(ty, p) = tcx.parent_hir_node(pat.hir_id).expect_ty().kind else { - bug!() + let node = match tcx.parent_hir_node(pat.hir_id) { + // Or patterns can be nested one level deep + Node::TyPat(p) => tcx.parent_hir_node(p.hir_id), + other => other, }; - assert_eq!(p.hir_id, pat.hir_id); + let hir::TyKind::Pat(ty, _) = node.expect_ty().kind else { bug!() }; icx.lower_ty(ty) } @@ -252,7 +256,7 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<'_ ItemKind::GlobalAsm { .. } => tcx.typeck(def_id).node_type(hir_id), ItemKind::Trait(..) | ItemKind::TraitAlias(..) - | ItemKind::Macro(..) + | ItemKind::Macro { .. } | ItemKind::Mod(..) | ItemKind::ForeignMod { .. } | ItemKind::ExternCrate(..) @@ -324,10 +328,18 @@ pub(super) fn type_of_opaque( if let Some(def_id) = def_id.as_local() { Ok(ty::EarlyBinder::bind(match tcx.hir_node_by_def_id(def_id).expect_opaque_ty().origin { hir::OpaqueTyOrigin::TyAlias { in_assoc_ty: false, .. } => { - opaque::find_opaque_ty_constraints_for_tait(tcx, def_id) + opaque::find_opaque_ty_constraints_for_tait( + tcx, + def_id, + DefiningScopeKind::MirBorrowck, + ) } hir::OpaqueTyOrigin::TyAlias { in_assoc_ty: true, .. } => { - opaque::find_opaque_ty_constraints_for_impl_trait_in_assoc_type(tcx, def_id) + opaque::find_opaque_ty_constraints_for_impl_trait_in_assoc_type( + tcx, + def_id, + DefiningScopeKind::MirBorrowck, + ) } // Opaque types desugared from `impl Trait`. hir::OpaqueTyOrigin::FnReturn { parent: owner, in_trait_or_impl } @@ -340,7 +352,12 @@ pub(super) fn type_of_opaque( "tried to get type of this RPITIT with no definition" ); } - opaque::find_opaque_ty_constraints_for_rpit(tcx, def_id, owner) + opaque::find_opaque_ty_constraints_for_rpit( + tcx, + def_id, + owner, + DefiningScopeKind::MirBorrowck, + ) } })) } else { @@ -350,6 +367,42 @@ pub(super) fn type_of_opaque( } } +pub(super) fn type_of_opaque_hir_typeck( + tcx: TyCtxt<'_>, + def_id: LocalDefId, +) -> ty::EarlyBinder<'_, Ty<'_>> { + ty::EarlyBinder::bind(match tcx.hir_node_by_def_id(def_id).expect_opaque_ty().origin { + hir::OpaqueTyOrigin::TyAlias { in_assoc_ty: false, .. } => { + opaque::find_opaque_ty_constraints_for_tait(tcx, def_id, DefiningScopeKind::HirTypeck) + } + hir::OpaqueTyOrigin::TyAlias { in_assoc_ty: true, .. } => { + opaque::find_opaque_ty_constraints_for_impl_trait_in_assoc_type( + tcx, + def_id, + DefiningScopeKind::HirTypeck, + ) + } + // Opaque types desugared from `impl Trait`. + hir::OpaqueTyOrigin::FnReturn { parent: owner, in_trait_or_impl } + | hir::OpaqueTyOrigin::AsyncFn { parent: owner, in_trait_or_impl } => { + if in_trait_or_impl == Some(hir::RpitContext::Trait) + && !tcx.defaultness(owner).has_value() + { + span_bug!( + tcx.def_span(def_id), + "tried to get type of this RPITIT with no definition" + ); + } + opaque::find_opaque_ty_constraints_for_rpit( + tcx, + def_id, + owner, + DefiningScopeKind::HirTypeck, + ) + } + }) +} + fn infer_placeholder_type<'tcx>( cx: &dyn HirTyLowerer<'tcx>, def_id: LocalDefId, diff --git a/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs b/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs index 3dec1e286b4ef..50e20a19edaef 100644 --- a/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs +++ b/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs @@ -3,8 +3,7 @@ use rustc_hir::def_id::LocalDefId; use rustc_hir::{self as hir, Expr, ImplItem, Item, Node, TraitItem, def, intravisit}; use rustc_middle::bug; use rustc_middle::hir::nested_filter; -use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt}; -use rustc_span::DUMMY_SP; +use rustc_middle::ty::{self, DefiningScopeKind, Ty, TyCtxt, TypeVisitableExt}; use tracing::{debug, instrument, trace}; use crate::errors::{TaitForwardCompat2, UnconstrainedOpaqueType}; @@ -15,6 +14,7 @@ use crate::errors::{TaitForwardCompat2, UnconstrainedOpaqueType}; pub(super) fn find_opaque_ty_constraints_for_impl_trait_in_assoc_type( tcx: TyCtxt<'_>, def_id: LocalDefId, + opaque_types_from: DefiningScopeKind, ) -> Ty<'_> { let mut parent_def_id = def_id; while tcx.def_kind(parent_def_id) == def::DefKind::OpaqueTy { @@ -27,37 +27,28 @@ pub(super) fn find_opaque_ty_constraints_for_impl_trait_in_assoc_type( other => bug!("invalid impl trait in assoc type parent: {other:?}"), } - let mut locator = TaitConstraintLocator { def_id, tcx, found: None, typeck_types: vec![] }; + let mut locator = TaitConstraintLocator { def_id, tcx, found: None, opaque_types_from }; for &assoc_id in tcx.associated_item_def_ids(impl_def_id) { let assoc = tcx.associated_item(assoc_id); match assoc.kind { - ty::AssocKind::Const | ty::AssocKind::Fn => locator.check(assoc_id.expect_local()), + ty::AssocKind::Const { .. } | ty::AssocKind::Fn { .. } => { + locator.check(assoc_id.expect_local()) + } // Associated types don't have bodies, so they can't constrain hidden types - ty::AssocKind::Type => {} + ty::AssocKind::Type { .. } => {} } } if let Some(hidden) = locator.found { - // Only check against typeck if we didn't already error - if !hidden.ty.references_error() { - for concrete_type in locator.typeck_types { - if concrete_type.ty != tcx.erase_regions(hidden.ty) { - if let Ok(d) = hidden.build_mismatch_error(&concrete_type, tcx) { - d.emit(); - } - } - } - } - hidden.ty } else { - let reported = tcx.dcx().emit_err(UnconstrainedOpaqueType { + let guar = tcx.dcx().emit_err(UnconstrainedOpaqueType { span: tcx.def_span(def_id), name: tcx.item_ident(parent_def_id.to_def_id()), what: "impl", }); - Ty::new_error(tcx, reported) + Ty::new_error(tcx, guar) } } @@ -80,23 +71,16 @@ pub(super) fn find_opaque_ty_constraints_for_impl_trait_in_assoc_type( /// fn b() -> Foo { .. } /// ``` #[instrument(skip(tcx), level = "debug")] -pub(super) fn find_opaque_ty_constraints_for_tait(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Ty<'_> { - let mut locator = TaitConstraintLocator { def_id, tcx, found: None, typeck_types: vec![] }; +pub(super) fn find_opaque_ty_constraints_for_tait( + tcx: TyCtxt<'_>, + def_id: LocalDefId, + opaque_types_from: DefiningScopeKind, +) -> Ty<'_> { + let mut locator = TaitConstraintLocator { def_id, tcx, found: None, opaque_types_from }; tcx.hir_walk_toplevel_module(&mut locator); if let Some(hidden) = locator.found { - // Only check against typeck if we didn't already error - if !hidden.ty.references_error() { - for concrete_type in locator.typeck_types { - if concrete_type.ty != tcx.erase_regions(hidden.ty) { - if let Ok(d) = hidden.build_mismatch_error(&concrete_type, tcx) { - d.emit(); - } - } - } - } - hidden.ty } else { let mut parent_def_id = def_id; @@ -104,12 +88,12 @@ pub(super) fn find_opaque_ty_constraints_for_tait(tcx: TyCtxt<'_>, def_id: Local // Account for `type Alias = impl Trait;` (#116031) parent_def_id = tcx.local_parent(parent_def_id); } - let reported = tcx.dcx().emit_err(UnconstrainedOpaqueType { + let guar = tcx.dcx().emit_err(UnconstrainedOpaqueType { span: tcx.def_span(def_id), name: tcx.item_ident(parent_def_id.to_def_id()), what: "crate", }); - Ty::new_error(tcx, reported) + Ty::new_error(tcx, guar) } } @@ -126,22 +110,44 @@ struct TaitConstraintLocator<'tcx> { /// type). found: Option>, - /// In the presence of dead code, typeck may figure out a hidden type - /// while borrowck will not. We collect these cases here and check at - /// the end that we actually found a type that matches (modulo regions). - typeck_types: Vec>, + opaque_types_from: DefiningScopeKind, } -impl TaitConstraintLocator<'_> { +impl<'tcx> TaitConstraintLocator<'tcx> { + fn insert_found(&mut self, hidden_ty: ty::OpaqueHiddenType<'tcx>) { + if let Some(prev) = &mut self.found { + if hidden_ty.ty != prev.ty { + let (Ok(guar) | Err(guar)) = + prev.build_mismatch_error(&hidden_ty, self.tcx).map(|d| d.emit()); + prev.ty = Ty::new_error(self.tcx, guar); + } + } else { + self.found = Some(hidden_ty); + } + } + + fn non_defining_use_in_defining_scope(&mut self, item_def_id: LocalDefId) { + let guar = self.tcx.dcx().emit_err(TaitForwardCompat2 { + span: self + .tcx + .def_ident_span(item_def_id) + .unwrap_or_else(|| self.tcx.def_span(item_def_id)), + opaque_type_span: self.tcx.def_span(self.def_id), + opaque_type: self.tcx.def_path_str(self.def_id), + }); + self.insert_found(ty::OpaqueHiddenType::new_error(self.tcx, guar)); + } + #[instrument(skip(self), level = "debug")] fn check(&mut self, item_def_id: LocalDefId) { // Don't try to check items that cannot possibly constrain the type. - if !self.tcx.has_typeck_results(item_def_id) { + let tcx = self.tcx; + if !tcx.has_typeck_results(item_def_id) { debug!("no constraint: no typeck results"); return; } - let opaque_types_defined_by = self.tcx.opaque_types_defined_by(item_def_id); + let opaque_types_defined_by = tcx.opaque_types_defined_by(item_def_id); // Don't try to check items that cannot possibly constrain the type. if !opaque_types_defined_by.contains(&self.def_id) { debug!("no constraint: no opaque types defined"); @@ -152,7 +158,7 @@ impl TaitConstraintLocator<'_> { // "non-defining use" errors for them. // Note that we use `Node::fn_sig` instead of `Node::fn_decl` here, because the former // excludes closures, which are allowed to have `_` in their return type. - let hir_node = self.tcx.hir_node_by_def_id(item_def_id); + let hir_node = tcx.hir_node_by_def_id(item_def_id); debug_assert!( !matches!(hir_node, Node::ForeignItem(..)), "foreign items cannot constrain opaque types", @@ -164,89 +170,38 @@ impl TaitConstraintLocator<'_> { hir_sig.decl.output.span(), "inferring return types and opaque types do not mix well", ); - self.found = - Some(ty::OpaqueHiddenType { span: DUMMY_SP, ty: Ty::new_error(self.tcx, guar) }); + self.found = Some(ty::OpaqueHiddenType::new_error(tcx, guar)); return; } - // Calling `mir_borrowck` can lead to cycle errors through - // const-checking, avoid calling it if we don't have to. - // ```rust - // type Foo = impl Fn() -> usize; // when computing type for this - // const fn bar() -> Foo { - // || 0usize - // } - // const BAZR: Foo = bar(); // we would mir-borrowck this, causing cycles - // // because we again need to reveal `Foo` so we can check whether the - // // constant does not contain interior mutability. - // ``` - let tables = self.tcx.typeck(item_def_id); - if let Some(guar) = tables.tainted_by_errors { - self.found = - Some(ty::OpaqueHiddenType { span: DUMMY_SP, ty: Ty::new_error(self.tcx, guar) }); - return; - } - - let mut constrained = false; - for (&opaque_type_key, &hidden_type) in &tables.concrete_opaque_types { - if opaque_type_key.def_id != self.def_id { - continue; - } - constrained = true; - - let concrete_type = - self.tcx.erase_regions(hidden_type.remap_generic_params_to_declaration_params( - opaque_type_key, - self.tcx, - true, - )); - if self.typeck_types.iter().all(|prev| prev.ty != concrete_type.ty) { - self.typeck_types.push(concrete_type); - } - } - - if !constrained { - debug!("no constraints in typeck results"); - if opaque_types_defined_by.contains(&self.def_id) { - let guar = self.tcx.dcx().emit_err(TaitForwardCompat2 { - span: self - .tcx - .def_ident_span(item_def_id) - .unwrap_or_else(|| self.tcx.def_span(item_def_id)), - opaque_type_span: self.tcx.def_span(self.def_id), - opaque_type: self.tcx.def_path_str(self.def_id), - }); - // Avoid "opaque type not constrained" errors on the opaque itself. - self.found = Some(ty::OpaqueHiddenType { - span: DUMMY_SP, - ty: Ty::new_error(self.tcx, guar), - }); - } - return; - }; - - // Use borrowck to get the type with unerased regions. - let borrowck_results = &self.tcx.mir_borrowck(item_def_id); - - // If the body was tainted, then assume the opaque may have been constrained and just set it to error. - if let Some(guar) = borrowck_results.tainted_by_errors { - self.found = - Some(ty::OpaqueHiddenType { span: DUMMY_SP, ty: Ty::new_error(self.tcx, guar) }); - return; - } - - debug!(?borrowck_results.concrete_opaque_types); - if let Some(&concrete_type) = borrowck_results.concrete_opaque_types.get(&self.def_id) { - debug!(?concrete_type, "found constraint"); - if let Some(prev) = &mut self.found { - if concrete_type.ty != prev.ty { - let (Ok(guar) | Err(guar)) = - prev.build_mismatch_error(&concrete_type, self.tcx).map(|d| d.emit()); - prev.ty = Ty::new_error(self.tcx, guar); + match self.opaque_types_from { + DefiningScopeKind::HirTypeck => { + let tables = tcx.typeck(item_def_id); + if let Some(guar) = tables.tainted_by_errors { + self.insert_found(ty::OpaqueHiddenType::new_error(tcx, guar)); + } else if let Some(&hidden_type) = tables.concrete_opaque_types.get(&self.def_id) { + self.insert_found(hidden_type); + } else { + self.non_defining_use_in_defining_scope(item_def_id); } - } else { - self.found = Some(concrete_type); } + DefiningScopeKind::MirBorrowck => match tcx.mir_borrowck(item_def_id) { + Err(guar) => self.insert_found(ty::OpaqueHiddenType::new_error(tcx, guar)), + Ok(concrete_opaque_types) => { + if let Some(&hidden_type) = concrete_opaque_types.0.get(&self.def_id) { + debug!(?hidden_type, "found constraint"); + self.insert_found(hidden_type); + } else if let Err(guar) = tcx + .type_of_opaque_hir_typeck(self.def_id) + .instantiate_identity() + .error_reported() + { + self.insert_found(ty::OpaqueHiddenType::new_error(tcx, guar)); + } else { + self.non_defining_use_in_defining_scope(item_def_id); + } + } + }, } } } @@ -287,126 +242,42 @@ pub(super) fn find_opaque_ty_constraints_for_rpit<'tcx>( tcx: TyCtxt<'tcx>, def_id: LocalDefId, owner_def_id: LocalDefId, + opaque_types_from: DefiningScopeKind, ) -> Ty<'tcx> { - let tables = tcx.typeck(owner_def_id); - - // Check that all of the opaques we inferred during HIR are compatible. - // FIXME: We explicitly don't check that the types inferred during HIR - // typeck are compatible with the one that we infer during borrowck, - // because that one actually sometimes has consts evaluated eagerly so - // using strict type equality will fail. - let mut hir_opaque_ty: Option> = None; - if tables.tainted_by_errors.is_none() { - for (&opaque_type_key, &hidden_type) in &tables.concrete_opaque_types { - if opaque_type_key.def_id != def_id { - continue; - } - let concrete_type = tcx.erase_regions( - hidden_type.remap_generic_params_to_declaration_params(opaque_type_key, tcx, true), - ); - if let Some(prev) = &mut hir_opaque_ty { - if concrete_type.ty != prev.ty { - if let Ok(d) = prev.build_mismatch_error(&concrete_type, tcx) { - d.emit(); - } - } + match opaque_types_from { + DefiningScopeKind::HirTypeck => { + let tables = tcx.typeck(owner_def_id); + if let Some(guar) = tables.tainted_by_errors { + Ty::new_error(tcx, guar) + } else if let Some(hidden_ty) = tables.concrete_opaque_types.get(&def_id) { + hidden_ty.ty } else { - hir_opaque_ty = Some(concrete_type); + // FIXME(-Znext-solver): This should not be necessary and we should + // instead rely on inference variable fallback inside of typeck itself. + + // We failed to resolve the opaque type or it + // resolves to itself. We interpret this as the + // no values of the hidden type ever being constructed, + // so we can just make the hidden type be `!`. + // For backwards compatibility reasons, we fall back to + // `()` until we the diverging default is changed. + Ty::new_diverging_default(tcx) } } - } - - let mir_opaque_ty = tcx.mir_borrowck(owner_def_id).concrete_opaque_types.get(&def_id).copied(); - if let Some(mir_opaque_ty) = mir_opaque_ty { - if mir_opaque_ty.references_error() { - return mir_opaque_ty.ty; - } - - debug!(?owner_def_id); - let mut locator = RpitConstraintChecker { def_id, tcx, found: mir_opaque_ty }; - - match tcx.hir_node_by_def_id(owner_def_id) { - Node::Item(it) => intravisit::walk_item(&mut locator, it), - Node::ImplItem(it) => intravisit::walk_impl_item(&mut locator, it), - Node::TraitItem(it) => intravisit::walk_trait_item(&mut locator, it), - other => bug!("{:?} is not a valid scope for an opaque type item", other), - } - - mir_opaque_ty.ty - } else if let Some(guar) = tables.tainted_by_errors { - // Some error in the owner fn prevented us from populating - // the `concrete_opaque_types` table. - Ty::new_error(tcx, guar) - } else { - // Fall back to the RPIT we inferred during HIR typeck - if let Some(hir_opaque_ty) = hir_opaque_ty { - hir_opaque_ty.ty - } else { - // We failed to resolve the opaque type or it - // resolves to itself. We interpret this as the - // no values of the hidden type ever being constructed, - // so we can just make the hidden type be `!`. - // For backwards compatibility reasons, we fall back to - // `()` until we the diverging default is changed. - Ty::new_diverging_default(tcx) - } - } -} - -struct RpitConstraintChecker<'tcx> { - tcx: TyCtxt<'tcx>, - - /// def_id of the opaque type whose defining uses are being checked - def_id: LocalDefId, - - found: ty::OpaqueHiddenType<'tcx>, -} - -impl RpitConstraintChecker<'_> { - #[instrument(skip(self), level = "debug")] - fn check(&self, def_id: LocalDefId) { - // Use borrowck to get the type with unerased regions. - let concrete_opaque_types = &self.tcx.mir_borrowck(def_id).concrete_opaque_types; - debug!(?concrete_opaque_types); - if let Some(&concrete_type) = concrete_opaque_types.get(&self.def_id) { - debug!(?concrete_type, "found constraint"); - if concrete_type.ty != self.found.ty { - if let Ok(d) = self.found.build_mismatch_error(&concrete_type, self.tcx) { - d.emit(); + DefiningScopeKind::MirBorrowck => match tcx.mir_borrowck(owner_def_id) { + Ok(concrete_opaque_types) => { + if let Some(hidden_ty) = concrete_opaque_types.0.get(&def_id) { + hidden_ty.ty + } else { + let hir_ty = tcx.type_of_opaque_hir_typeck(def_id).instantiate_identity(); + if let Err(guar) = hir_ty.error_reported() { + Ty::new_error(tcx, guar) + } else { + hir_ty + } } } - } - } -} - -impl<'tcx> intravisit::Visitor<'tcx> for RpitConstraintChecker<'tcx> { - type NestedFilter = nested_filter::OnlyBodies; - - fn maybe_tcx(&mut self) -> Self::MaybeTyCtxt { - self.tcx - } - fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) { - intravisit::walk_expr(self, ex); - } - fn visit_item(&mut self, it: &'tcx Item<'tcx>) { - trace!(?it.owner_id); - // The opaque type itself or its children are not within its reveal scope. - if it.owner_id.def_id != self.def_id { - self.check(it.owner_id.def_id); - intravisit::walk_item(self, it); - } - } - fn visit_impl_item(&mut self, it: &'tcx ImplItem<'tcx>) { - trace!(?it.owner_id); - // The opaque type itself or its children are not within its reveal scope. - if it.owner_id.def_id != self.def_id { - self.check(it.owner_id.def_id); - intravisit::walk_impl_item(self, it); - } - } - fn visit_trait_item(&mut self, it: &'tcx TraitItem<'tcx>) { - trace!(?it.owner_id); - self.check(it.owner_id.def_id); - intravisit::walk_trait_item(self, it); + Err(guar) => Ty::new_error(tcx, guar), + }, } } diff --git a/compiler/rustc_hir_analysis/src/constrained_generic_params.rs b/compiler/rustc_hir_analysis/src/constrained_generic_params.rs index 53866aa27b166..366b3943a0589 100644 --- a/compiler/rustc_hir_analysis/src/constrained_generic_params.rs +++ b/compiler/rustc_hir_analysis/src/constrained_generic_params.rs @@ -49,7 +49,7 @@ pub(crate) fn parameters_for<'tcx>( include_nonconstraining: bool, ) -> Vec { let mut collector = ParameterCollector { parameters: vec![], include_nonconstraining }; - let value = if !include_nonconstraining { tcx.expand_weak_alias_tys(value) } else { value }; + let value = if !include_nonconstraining { tcx.expand_free_alias_tys(value) } else { value }; value.visit_with(&mut collector); collector.parameters } @@ -68,9 +68,9 @@ impl<'tcx> TypeVisitor> for ParameterCollector { { return; } - // All weak alias types should've been expanded beforehand. - ty::Alias(ty::Weak, _) if !self.include_nonconstraining => { - bug!("unexpected weak alias type") + // All free alias types should've been expanded beforehand. + ty::Alias(ty::Free, _) if !self.include_nonconstraining => { + bug!("unexpected free alias type") } ty::Param(param) => self.parameters.push(Parameter::from(param)), _ => {} @@ -80,7 +80,7 @@ impl<'tcx> TypeVisitor> for ParameterCollector { } fn visit_region(&mut self, r: ty::Region<'tcx>) { - if let ty::ReEarlyParam(data) = *r { + if let ty::ReEarlyParam(data) = r.kind() { self.parameters.push(Parameter::from(data)); } } diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs index f2560f22874bc..638ead4a2d520 100644 --- a/compiler/rustc_hir_analysis/src/errors.rs +++ b/compiler/rustc_hir_analysis/src/errors.rs @@ -23,7 +23,7 @@ pub(crate) struct AmbiguousAssocItem<'a> { #[label] pub span: Span, pub assoc_kind: &'static str, - pub assoc_name: Ident, + pub assoc_ident: Ident, pub qself: &'a str, } @@ -75,7 +75,7 @@ pub(crate) struct AssocItemIsPrivate { pub(crate) struct AssocItemNotFound<'a> { #[primary_span] pub span: Span, - pub assoc_name: Ident, + pub assoc_ident: Ident, pub assoc_kind: &'static str, pub qself: &'a str, #[subdiagnostic] @@ -207,6 +207,31 @@ pub(crate) struct LifetimesOrBoundsMismatchOnTrait { pub ident: Ident, } +#[derive(Diagnostic)] +#[diag(hir_analysis_lifetimes_or_bounds_mismatch_on_eii)] +pub(crate) struct LifetimesOrBoundsMismatchOnEII { + #[primary_span] + #[label] + pub span: Span, + #[label(hir_analysis_generics_label)] + pub generics_span: Option, + #[label(hir_analysis_where_label)] + pub where_span: Option, + #[label(hir_analysis_bounds_label)] + pub bounds_span: Vec, + pub ident: Symbol, +} + +#[derive(Diagnostic)] +#[diag(hir_analysis_eii_with_generics)] +pub(crate) struct EiiWithGenerics { + #[primary_span] + pub span: Span, + #[label] + pub attr: Span, + pub eii_name: Symbol, +} + #[derive(Diagnostic)] #[diag(hir_analysis_drop_impl_on_wrong_item, code = E0120)] pub(crate) struct DropImplOnWrongItem { @@ -1675,14 +1700,6 @@ pub(crate) struct CmseEntryGeneric { pub span: Span, } -#[derive(Diagnostic)] -#[diag(hir_analysis_register_type_unstable)] -pub(crate) struct RegisterTypeUnstable<'a> { - #[primary_span] - pub span: Span, - pub ty: Ty<'a>, -} - #[derive(LintDiagnostic)] #[diag(hir_analysis_supertrait_item_shadowing)] pub(crate) struct SupertraitItemShadowing { @@ -1707,3 +1724,11 @@ pub(crate) enum SupertraitItemShadowee { traits: DiagSymbolList, }, } + +#[derive(Diagnostic)] +#[diag(hir_analysis_self_in_type_alias, code = E0411)] +pub(crate) struct SelfInTypeAlias { + #[primary_span] + #[label] + pub span: Span, +} diff --git a/compiler/rustc_hir_analysis/src/errors/wrong_number_of_generic_args.rs b/compiler/rustc_hir_analysis/src/errors/wrong_number_of_generic_args.rs index 610b293a114e6..a3c8ce620b366 100644 --- a/compiler/rustc_hir_analysis/src/errors/wrong_number_of_generic_args.rs +++ b/compiler/rustc_hir_analysis/src/errors/wrong_number_of_generic_args.rs @@ -4,7 +4,7 @@ use GenericArgsInfo::*; use rustc_errors::codes::*; use rustc_errors::{Applicability, Diag, Diagnostic, EmissionGuarantee, MultiSpan, pluralize}; use rustc_hir as hir; -use rustc_middle::ty::{self as ty, AssocItems, AssocKind, TyCtxt}; +use rustc_middle::ty::{self as ty, AssocItems, TyCtxt}; use rustc_span::def_id::DefId; use tracing::debug; @@ -486,15 +486,15 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> { let items: &AssocItems = self.tcx.associated_items(self.def_id); items .in_definition_order() - .filter(|item| item.kind == AssocKind::Type) .filter(|item| { - !self - .gen_args - .constraints - .iter() - .any(|constraint| constraint.ident.name == item.name) + item.is_type() + && !item.is_impl_trait_in_trait() + && !self + .gen_args + .constraints + .iter() + .any(|constraint| constraint.ident.name == item.name()) }) - .filter(|item| !item.is_impl_trait_in_trait()) .map(|item| self.tcx.item_ident(item.def_id).to_string()) .collect() } else { diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs index f0dffd780bcc7..4419d5dc7d663 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs @@ -4,13 +4,13 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_errors::codes::*; use rustc_errors::struct_span_code_err; use rustc_hir as hir; -use rustc_hir::HirId; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_hir::{AmbigArg, HirId}; use rustc_middle::bug; use rustc_middle::ty::{ - self as ty, IsSuggestable, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, - TypeVisitor, Upcast, + self as ty, IsSuggestable, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable, TypeVisitable, + TypeVisitableExt, TypeVisitor, Upcast, }; use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw, sym}; use rustc_trait_selection::traits; @@ -24,25 +24,231 @@ use crate::hir_ty_lowering::{ }; impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { - /// Add a `Sized` bound to the `bounds` if appropriate. - /// - /// Doesn't add the bound if the HIR bounds contain any of `Sized`, `?Sized` or `!Sized`. - pub(crate) fn add_sized_bound( + pub(crate) fn add_default_traits( &self, bounds: &mut Vec<(ty::Clause<'tcx>, Span)>, self_ty: Ty<'tcx>, + hir_bounds: &[hir::GenericBound<'tcx>], + self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>, + span: Span, + ) { + self.add_default_traits_with_filter( + bounds, + self_ty, + hir_bounds, + self_ty_where_predicates, + span, + |_| true, + ); + } + + /// Checks whether `Self: DefaultAutoTrait` bounds should be added on trait super bounds + /// or associated items. + /// + /// To keep backward compatibility with existing code, `experimental_default_bounds` bounds + /// should be added everywhere, including super bounds. However this causes a huge performance + /// costs. For optimization purposes instead of adding default supertraits, bounds + /// are added to the associated items: + /// + /// ```ignore(illustrative) + /// // Default bounds are generated in the following way: + /// trait Trait { + /// fn foo(&self) where Self: Leak {} + /// } + /// + /// // instead of this: + /// trait Trait: Leak { + /// fn foo(&self) {} + /// } + /// ``` + /// It is not always possible to do this because of backward compatibility: + /// + /// ```ignore(illustrative) + /// pub trait Trait {} + /// pub trait Trait1 : Trait {} + /// //~^ ERROR: `Rhs` requires `DefaultAutoTrait`, but `Self` is not `DefaultAutoTrait` + /// ``` + /// + /// or: + /// + /// ```ignore(illustrative) + /// trait Trait { + /// type Type where Self: Sized; + /// } + /// trait Trait2 : Trait {} + /// //~^ ERROR: `DefaultAutoTrait` required for `Trait2`, by implicit `Self: DefaultAutoTrait` in `Trait::Type` + /// ``` + /// + /// Therefore, `experimental_default_bounds` are still being added to supertraits if + /// the `SelfTyParam` or `AssocItemConstraint` were found in a trait header. + fn requires_default_supertraits( + &self, + hir_bounds: &'tcx [hir::GenericBound<'tcx>], + hir_generics: &'tcx hir::Generics<'tcx>, + ) -> bool { + struct TraitInfoCollector; + + impl<'tcx> hir::intravisit::Visitor<'tcx> for TraitInfoCollector { + type Result = ControlFlow<()>; + + fn visit_assoc_item_constraint( + &mut self, + _constraint: &'tcx hir::AssocItemConstraint<'tcx>, + ) -> Self::Result { + ControlFlow::Break(()) + } + + fn visit_ty(&mut self, t: &'tcx hir::Ty<'tcx, AmbigArg>) -> Self::Result { + if matches!( + &t.kind, + hir::TyKind::Path(hir::QPath::Resolved( + _, + hir::Path { res: hir::def::Res::SelfTyParam { .. }, .. }, + )) + ) { + return ControlFlow::Break(()); + } + hir::intravisit::walk_ty(self, t) + } + } + + let mut found = false; + for bound in hir_bounds { + found |= hir::intravisit::walk_param_bound(&mut TraitInfoCollector, bound).is_break(); + } + found |= hir::intravisit::walk_generics(&mut TraitInfoCollector, hir_generics).is_break(); + found + } + + /// Implicitly add `Self: DefaultAutoTrait` clauses on trait associated items if + /// they are not added as super trait bounds to the trait itself. See + /// `requires_default_supertraits` for more information. + pub(crate) fn add_default_trait_item_bounds( + &self, + trait_item: &hir::TraitItem<'tcx>, + bounds: &mut Vec<(ty::Clause<'tcx>, Span)>, + ) { + let tcx = self.tcx(); + if !tcx.sess.opts.unstable_opts.experimental_default_bounds { + return; + } + + let parent = tcx.local_parent(trait_item.hir_id().owner.def_id); + let hir::Node::Item(parent_trait) = tcx.hir_node_by_def_id(parent) else { + unreachable!(); + }; + + let (trait_generics, trait_bounds) = match parent_trait.kind { + hir::ItemKind::Trait(_, _, _, generics, supertraits, _) => (generics, supertraits), + hir::ItemKind::TraitAlias(_, generics, supertraits) => (generics, supertraits), + _ => unreachable!(), + }; + + if !self.requires_default_supertraits(trait_bounds, trait_generics) { + let self_ty_where_predicates = (parent, trait_item.generics.predicates); + self.add_default_traits_with_filter( + bounds, + tcx.types.self_param, + &[], + Some(self_ty_where_predicates), + trait_item.span, + |tr| tr != hir::LangItem::Sized, + ); + } + } + + /// Lazily sets `experimental_default_bounds` to true on trait super bounds. + /// See `requires_default_supertraits` for more information. + pub(crate) fn add_default_super_traits( + &self, + trait_def_id: LocalDefId, + bounds: &mut Vec<(ty::Clause<'tcx>, Span)>, hir_bounds: &'tcx [hir::GenericBound<'tcx>], + hir_generics: &'tcx hir::Generics<'tcx>, + span: Span, + ) { + if !self.tcx().sess.opts.unstable_opts.experimental_default_bounds { + return; + } + + assert!(matches!(self.tcx().def_kind(trait_def_id), DefKind::Trait | DefKind::TraitAlias)); + if self.requires_default_supertraits(hir_bounds, hir_generics) { + let self_ty_where_predicates = (trait_def_id, hir_generics.predicates); + self.add_default_traits_with_filter( + bounds, + self.tcx().types.self_param, + hir_bounds, + Some(self_ty_where_predicates), + span, + |default_trait| default_trait != hir::LangItem::Sized, + ); + } + } + + pub(crate) fn add_default_traits_with_filter( + &self, + bounds: &mut Vec<(ty::Clause<'tcx>, Span)>, + self_ty: Ty<'tcx>, + hir_bounds: &[hir::GenericBound<'tcx>], self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>, span: Span, + f: impl Fn(hir::LangItem) -> bool, ) { + self.tcx().default_traits().iter().filter(|&&default_trait| f(default_trait)).for_each( + |default_trait| { + self.add_default_trait( + *default_trait, + bounds, + self_ty, + hir_bounds, + self_ty_where_predicates, + span, + ); + }, + ); + } + + /// Add a `Sized` or `experimental_default_bounds` bounds to the `bounds` if appropriate. + /// + /// Doesn't add the bound if the HIR bounds contain any of `Trait`, `?Trait` or `!Trait`. + pub(crate) fn add_default_trait( + &self, + trait_: hir::LangItem, + bounds: &mut Vec<(ty::Clause<'tcx>, Span)>, + self_ty: Ty<'tcx>, + hir_bounds: &[hir::GenericBound<'tcx>], + self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>, + span: Span, + ) { + let trait_id = self.tcx().lang_items().get(trait_); + if let Some(trait_id) = trait_id + && self.do_not_provide_default_trait_bound( + trait_id, + hir_bounds, + self_ty_where_predicates, + ) + { + // There was no `?Trait` or `!Trait` bound; + // add `Trait` if it's available. + let trait_ref = ty::TraitRef::new(self.tcx(), trait_id, [self_ty]); + // Preferable to put this obligation first, since we report better errors for sized ambiguity. + bounds.insert(0, (trait_ref.upcast(self.tcx()), span)); + } + } + + fn do_not_provide_default_trait_bound<'a>( + &self, + trait_def_id: DefId, + hir_bounds: &'a [hir::GenericBound<'tcx>], + self_ty_where_predicates: Option<(LocalDefId, &'tcx [hir::WherePredicate<'tcx>])>, + ) -> bool { let tcx = self.tcx(); - let sized_def_id = tcx.lang_items().sized_trait(); - let mut seen_negative_sized_bound = false; - let mut seen_positive_sized_bound = false; + let mut seen_negative_bound = false; + let mut seen_positive_bound = false; // Try to find an unbound in bounds. let mut unbounds: SmallVec<[_; 1]> = SmallVec::new(); - let mut search_bounds = |hir_bounds: &'tcx [hir::GenericBound<'tcx>]| { + let mut search_bounds = |hir_bounds: &'a [hir::GenericBound<'tcx>]| { for hir_bound in hir_bounds { let hir::GenericBound::Trait(ptr) = hir_bound else { continue; @@ -50,17 +256,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { match ptr.modifiers.polarity { hir::BoundPolarity::Maybe(_) => unbounds.push(ptr), hir::BoundPolarity::Negative(_) => { - if let Some(sized_def_id) = sized_def_id - && ptr.trait_ref.path.res == Res::Def(DefKind::Trait, sized_def_id) - { - seen_negative_sized_bound = true; + if ptr.trait_ref.path.res == Res::Def(DefKind::Trait, trait_def_id) { + seen_negative_bound = true; } } hir::BoundPolarity::Positive => { - if let Some(sized_def_id) = sized_def_id - && ptr.trait_ref.path.res == Res::Def(DefKind::Trait, sized_def_id) - { - seen_positive_sized_bound = true; + if ptr.trait_ref.path.res == Res::Def(DefKind::Trait, trait_def_id) { + seen_positive_bound = true; } } } @@ -95,32 +297,35 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { }; } - let mut seen_sized_unbound = false; + let mut seen_unbound = false; for unbound in unbounds { - if let Some(sized_def_id) = sized_def_id - && unbound.trait_ref.path.res == Res::Def(DefKind::Trait, sized_def_id) - { - seen_sized_unbound = true; - continue; + let unbound_def_id = unbound.trait_ref.trait_def_id(); + if unbound_def_id == Some(trait_def_id) { + seen_unbound = true; + } + let emit_relax_err = || { + let unbound_traits = match tcx.sess.opts.unstable_opts.experimental_default_bounds { + true => "`?Sized` and `experimental_default_bounds`", + false => "`?Sized`", + }; + // There was a `?Trait` bound, but it was neither `?Sized` nor `experimental_default_bounds`. + tcx.dcx().span_err( + unbound.span, + format!( + "relaxing a default bound only does something for {}; \ + all other traits are not bound by default", + unbound_traits + ), + ); + }; + match unbound_def_id { + Some(def_id) if !tcx.is_default_trait(def_id) => emit_relax_err(), + None => emit_relax_err(), + _ => {} } - // There was a `?Trait` bound, but it was not `?Sized` - self.dcx().span_err( - unbound.span, - "relaxing a default bound only does something for `?Sized`; \ - all other traits are not bound by default", - ); } - if seen_sized_unbound || seen_negative_sized_bound || seen_positive_sized_bound { - // There was in fact a `?Sized`, `!Sized` or explicit `Sized` bound; - // we don't need to do anything. - } else if let Some(sized_def_id) = sized_def_id { - // There was no `?Sized`, `!Sized` or explicit `Sized` bound; - // add `Sized` if it's available. - let trait_ref = ty::TraitRef::new(tcx, sized_def_id, [self_ty]); - // Preferable to put this obligation first, since we report better errors for sized ambiguity. - bounds.insert(0, (trait_ref.upcast(tcx), span)); - } + !(seen_unbound || seen_negative_bound || seen_positive_bound) } /// Lower HIR bounds into `bounds` given the self type `param_ty` and the overarching late-bound vars if any. @@ -158,10 +363,10 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { for hir_bound in hir_bounds { // In order to avoid cycles, when we're lowering `SelfTraitThatDefines`, // we skip over any traits that don't define the given associated type. - if let PredicateFilter::SelfTraitThatDefines(assoc_name) = predicate_filter { + if let PredicateFilter::SelfTraitThatDefines(assoc_ident) = predicate_filter { if let Some(trait_ref) = hir_bound.trait_ref() && let Some(trait_did) = trait_ref.trait_def_id() - && self.tcx().trait_may_define_assoc_item(trait_did, assoc_name) + && self.tcx().trait_may_define_assoc_item(trait_did, assoc_ident) { // Okay } else { @@ -226,16 +431,16 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { ) -> Result<(), ErrorGuaranteed> { let tcx = self.tcx(); - let assoc_kind = if constraint.gen_args.parenthesized + let assoc_tag = if constraint.gen_args.parenthesized == hir::GenericArgsParentheses::ReturnTypeNotation { - ty::AssocKind::Fn + ty::AssocTag::Fn } else if let hir::AssocItemConstraintKind::Equality { term: hir::Term::Const(_) } = constraint.kind { - ty::AssocKind::Const + ty::AssocTag::Const } else { - ty::AssocKind::Type + ty::AssocTag::Type }; // Given something like `U: Trait`, we want to produce a predicate like @@ -248,7 +453,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // trait SuperTrait { type T; } let candidate = if self.probe_trait_that_defines_assoc_item( trait_ref.def_id(), - assoc_kind, + assoc_tag, constraint.ident, ) { // Simple case: The assoc item is defined in the current trait. @@ -259,7 +464,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { self.probe_single_bound_for_assoc_item( || traits::supertraits(tcx, trait_ref), AssocItemQSelf::Trait(trait_ref.def_id()), - assoc_kind, + assoc_tag, constraint.ident, path_span, Some(constraint), @@ -269,7 +474,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let assoc_item = self .probe_assoc_item( constraint.ident, - assoc_kind, + assoc_tag, hir_ref_id, constraint.span, candidate.def_id(), @@ -288,7 +493,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { }) .or_insert(constraint.span); - let projection_term = if let ty::AssocKind::Fn = assoc_kind { + let projection_term = if let ty::AssocTag::Fn = assoc_tag { let bound_vars = tcx.late_bound_vars(constraint.hir_id); ty::Binder::bind_with_vars( self.lower_return_type_notation_ty(candidate, assoc_item.def_id, path_span)?.into(), @@ -337,7 +542,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { }; match constraint.kind { - hir::AssocItemConstraintKind::Equality { .. } if let ty::AssocKind::Fn = assoc_kind => { + hir::AssocItemConstraintKind::Equality { .. } if let ty::AssocTag::Fn = assoc_tag => { return Err(self.dcx().emit_err(crate::errors::ReturnTypeNotationEqualityBound { span: constraint.span, })); @@ -474,7 +679,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { trait_def_id, hir_ty.span, item_segment, - ty::AssocKind::Type, + ty::AssocTag::Type, ); return Ty::new_error(tcx, guar); }; @@ -566,7 +771,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { ) }, AssocItemQSelf::SelfTyAlias, - ty::AssocKind::Fn, + ty::AssocTag::Fn, assoc_ident, span, None, @@ -578,7 +783,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { ) => self.probe_single_ty_param_bound_for_assoc_item( param_did.expect_local(), qself.span, - ty::AssocKind::Fn, + ty::AssocTag::Fn, assoc_ident, span, )?, @@ -618,7 +823,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let trait_def_id = bound.def_id(); let assoc_ty = self - .probe_assoc_item(assoc_ident, ty::AssocKind::Fn, qpath_hir_id, span, trait_def_id) + .probe_assoc_item(assoc_ident, ty::AssocTag::Fn, qpath_hir_id, span, trait_def_id) .expect("failed to find associated type"); Ok((bound, assoc_ty.def_id)) @@ -684,7 +889,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { return Err(self.dcx().emit_err(crate::errors::ReturnTypeNotationOnNonRpitit { span: path_span, ty: tcx.liberate_late_bound_regions(item_def_id, output), - fn_span: tcx.hir().span_if_local(item_def_id), + fn_span: tcx.hir_span_if_local(item_def_id), note: (), })); }; @@ -791,7 +996,7 @@ struct GenericParamAndBoundVarCollector<'a, 'tcx> { impl<'tcx> TypeVisitor> for GenericParamAndBoundVarCollector<'_, 'tcx> { type Result = ControlFlow; - fn visit_binder>>( + fn visit_binder>>( &mut self, binder: &ty::Binder<'tcx, T>, ) -> Self::Result { diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/cmse.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/cmse.rs index 170500c7a1628..d1ee5a5494c00 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/cmse.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/cmse.rs @@ -32,7 +32,7 @@ pub(crate) fn validate_cmse_abi<'tcx>( span, .. }) => *span, - _ => tcx.hir().span(hir_id), + _ => tcx.hir_span(hir_id), }; struct_span_code_err!( tcx.dcx(), @@ -49,13 +49,13 @@ pub(crate) fn validate_cmse_abi<'tcx>( Ok(Err(index)) => { // fn(x: u32, u32, u32, u16, y: u16) -> u32, // ^^^^^^ - let span = if let Some(ident) = bare_fn_ty.param_names[index] { + let span = if let Some(ident) = bare_fn_ty.param_idents[index] { ident.span.to(bare_fn_ty.decl.inputs[index].span) } else { bare_fn_ty.decl.inputs[index].span } .to(bare_fn_ty.decl.inputs.last().unwrap().span); - let plural = bare_fn_ty.param_names.len() - index != 1; + let plural = bare_fn_ty.param_idents.len() - index != 1; dcx.emit_err(errors::CmseInputsStackSpill { span, plural, abi }); } Err(layout_err) => { diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/dyn_compatibility.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/dyn_compatibility.rs index 27643e715e6b1..f6e5149bd2b17 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/dyn_compatibility.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/dyn_compatibility.rs @@ -16,6 +16,7 @@ use smallvec::{SmallVec, smallvec}; use tracing::{debug, instrument}; use super::HirTyLowerer; +use crate::errors::SelfInTypeAlias; use crate::hir_ty_lowering::{ GenericArgCountMismatch, GenericArgCountResult, PredicateFilter, RegionInferReason, }; @@ -57,6 +58,18 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { } } + let ast_bounds: Vec<_> = + hir_bounds.iter().map(|&trait_ref| hir::GenericBound::Trait(trait_ref)).collect(); + + self.add_default_traits_with_filter( + &mut user_written_bounds, + dummy_self, + &ast_bounds, + None, + span, + |tr| tr != hir::LangItem::Sized, + ); + let (elaborated_trait_bounds, elaborated_projection_bounds) = traits::expand_trait_aliases(tcx, user_written_bounds.iter().copied()); let (regular_traits, mut auto_traits): (Vec<_>, Vec<_>) = elaborated_trait_bounds @@ -113,6 +126,19 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // ``` let mut projection_bounds = FxIndexMap::default(); for (proj, proj_span) in elaborated_projection_bounds { + let proj = proj.map_bound(|mut b| { + if let Some(term_ty) = &b.term.as_type() { + let references_self = term_ty.walk().any(|arg| arg == dummy_self.into()); + if references_self { + // With trait alias and type alias combined, type resolver + // may not be able to catch all illegal `Self` usages (issue 139082) + let guar = tcx.dcx().emit_err(SelfInTypeAlias { span }); + b.term = replace_dummy_self_with_error(tcx, b.term, guar); + } + } + b + }); + let key = ( proj.skip_binder().projection_term.def_id, tcx.anonymize_bound_vars( @@ -146,7 +172,14 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let principal_trait = regular_traits.into_iter().next(); - let mut needed_associated_types = vec![]; + // A stable ordering of associated types from the principal trait and all its + // supertraits. We use this to ensure that different substitutions of a trait + // don't result in `dyn Trait` types with different projections lists, which + // can be unsound: . + // We achieve a stable ordering by walking over the unsubstituted principal + // trait ref. + let mut ordered_associated_types = vec![]; + if let Some((principal_trait, ref spans)) = principal_trait { let principal_trait = principal_trait.map_bound(|trait_pred| { assert_eq!(trait_pred.polarity, ty::PredicatePolarity::Positive); @@ -171,16 +204,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // FIXME(negative_bounds): Handle this correctly... let trait_ref = tcx.anonymize_bound_vars(bound_predicate.rebind(pred.trait_ref)); - needed_associated_types.extend( + ordered_associated_types.extend( tcx.associated_items(pred.trait_ref.def_id) .in_definition_order() // We only care about associated types. - .filter(|item| item.kind == ty::AssocKind::Type) + .filter(|item| item.is_type()) // No RPITITs -- they're not dyn-compatible for now. .filter(|item| !item.is_impl_trait_in_trait()) - // If the associated type has a `where Self: Sized` bound, - // we do not need to constrain the associated type. - .filter(|item| !tcx.generics_require_sized_self(item.def_id)) .map(|item| (item.def_id, trait_ref)), ); } @@ -252,14 +282,26 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { } } + // We compute the list of projection bounds taking the ordered associated types, + // and check if there was an entry in the collected `projection_bounds`. Those + // are computed by first taking the user-written associated types, then elaborating + // the principal trait ref, and only using those if there was no user-written. + // See note below about how we handle missing associated types with `Self: Sized`, + // which are not required to be provided, but are still used if they are provided. let mut missing_assoc_types = FxIndexSet::default(); - let projection_bounds: Vec<_> = needed_associated_types + let projection_bounds: Vec<_> = ordered_associated_types .into_iter() .filter_map(|key| { if let Some(assoc) = projection_bounds.get(&key) { Some(*assoc) } else { - missing_assoc_types.insert(key); + // If the associated type has a `where Self: Sized` bound, then + // we do not need to provide the associated type. This results in + // a `dyn Trait` type that has a different number of projection + // bounds, which may lead to type mismatches. + if !tcx.generics_require_sized_self(key.0) { + missing_assoc_types.insert(key); + } None } }) @@ -389,7 +431,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { self.lower_lifetime(lifetime, RegionInferReason::ExplicitObjectLifetime) } else { let reason = - if let hir::LifetimeName::ImplicitObjectLifetimeDefault = lifetime.res { + if let hir::LifetimeKind::ImplicitObjectLifetimeDefault = lifetime.kind { if let hir::Node::Ty(hir::Ty { kind: hir::TyKind::Ref(parent_lifetime, _), .. diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs index 3b007c7719880..3759a224ff75b 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs @@ -116,8 +116,8 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { &self, all_candidates: impl Fn() -> I, qself: AssocItemQSelf, - assoc_kind: ty::AssocKind, - assoc_name: Ident, + assoc_tag: ty::AssocTag, + assoc_ident: Ident, span: Span, constraint: Option<&hir::AssocItemConstraint<'tcx>>, ) -> ErrorGuaranteed @@ -129,31 +129,35 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // First and foremost, provide a more user-friendly & “intuitive” error on kind mismatches. if let Some(assoc_item) = all_candidates().find_map(|r| { tcx.associated_items(r.def_id()) - .filter_by_name_unhygienic(assoc_name.name) - .find(|item| tcx.hygienic_eq(assoc_name, item.ident(tcx), r.def_id())) + .filter_by_name_unhygienic(assoc_ident.name) + .find(|item| tcx.hygienic_eq(assoc_ident, item.ident(tcx), r.def_id())) }) { return self.complain_about_assoc_kind_mismatch( - assoc_item, assoc_kind, assoc_name, span, constraint, + assoc_item, + assoc_tag, + assoc_ident, + span, + constraint, ); } - let assoc_kind_str = assoc_kind_str(assoc_kind); + let assoc_kind_str = assoc_tag_str(assoc_tag); let qself_str = qself.to_string(tcx); // The fallback span is needed because `assoc_name` might be an `Fn()`'s `Output` without a // valid span, so we point at the whole path segment instead. - let is_dummy = assoc_name.span == DUMMY_SP; + let is_dummy = assoc_ident.span == DUMMY_SP; let mut err = errors::AssocItemNotFound { - span: if is_dummy { span } else { assoc_name.span }, - assoc_name, + span: if is_dummy { span } else { assoc_ident.span }, + assoc_ident, assoc_kind: assoc_kind_str, qself: &qself_str, label: None, sugg: None, // Try to get the span of the identifier within the path's syntax context // (if that's different). - within_macro_span: assoc_name.span.within_macro(span, tcx.sess.source_map()), + within_macro_span: assoc_ident.span.within_macro(span, tcx.sess.source_map()), }; if is_dummy { @@ -164,15 +168,19 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let all_candidate_names: Vec<_> = all_candidates() .flat_map(|r| tcx.associated_items(r.def_id()).in_definition_order()) .filter_map(|item| { - (!item.is_impl_trait_in_trait() && item.kind == assoc_kind).then_some(item.name) + if !item.is_impl_trait_in_trait() && item.as_tag() == assoc_tag { + item.opt_name() + } else { + None + } }) .collect(); if let Some(suggested_name) = - find_best_match_for_name(&all_candidate_names, assoc_name.name, None) + find_best_match_for_name(&all_candidate_names, assoc_ident.name, None) { err.sugg = Some(errors::AssocItemNotFoundSugg::Similar { - span: assoc_name.span, + span: assoc_ident.span, assoc_kind: assoc_kind_str, suggested_name, }); @@ -196,12 +204,12 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { .iter() .flat_map(|trait_def_id| tcx.associated_items(*trait_def_id).in_definition_order()) .filter_map(|item| { - (!item.is_impl_trait_in_trait() && item.kind == assoc_kind).then_some(item.name) + (!item.is_impl_trait_in_trait() && item.as_tag() == assoc_tag).then(|| item.name()) }) .collect(); if let Some(suggested_name) = - find_best_match_for_name(&wider_candidate_names, assoc_name.name, None) + find_best_match_for_name(&wider_candidate_names, assoc_ident.name, None) { if let [best_trait] = visible_traits .iter() @@ -209,17 +217,17 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { .filter(|trait_def_id| { tcx.associated_items(trait_def_id) .filter_by_name_unhygienic(suggested_name) - .any(|item| item.kind == assoc_kind) + .any(|item| item.as_tag() == assoc_tag) }) .collect::>()[..] { let trait_name = tcx.def_path_str(best_trait); err.label = Some(errors::AssocItemNotFoundLabel::FoundInOtherTrait { - span: assoc_name.span, + span: assoc_ident.span, assoc_kind: assoc_kind_str, trait_name: &trait_name, suggested_name, - identically_named: suggested_name == assoc_name.name, + identically_named: suggested_name == assoc_ident.name, }); if let AssocItemQSelf::TyParam(ty_param_def_id, ty_param_span) = qself // Not using `self.item_def_id()` here as that would yield the opaque type itself if we're @@ -246,7 +254,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // The type param already has a bound for `trait_name`, we just need to // change the associated item. err.sugg = Some(errors::AssocItemNotFoundSugg::SimilarInOtherTrait { - span: assoc_name.span, + span: assoc_ident.span, assoc_kind: assoc_kind_str, suggested_name, }); @@ -265,7 +273,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { Applicability::MaybeIncorrect }; - let identically_named = suggested_name == assoc_name.name; + let identically_named = suggested_name == assoc_ident.name; if let DefKind::TyAlias = tcx.def_kind(item_def_id) && !tcx.type_alias_is_lazy(item_def_id) @@ -273,7 +281,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { err.sugg = Some(errors::AssocItemNotFoundSugg::SimilarInOtherTraitQPath { lo: ty_param_span.shrink_to_lo(), mi: ty_param_span.shrink_to_hi(), - hi: (!identically_named).then_some(assoc_name.span), + hi: (!identically_named).then_some(assoc_ident.span), trait_ref, identically_named, suggested_name, @@ -294,7 +302,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // We suggested constraining a type parameter, but the associated item on it // was also not an exact match, so we also suggest changing it. err.span_suggestion_verbose( - assoc_name.span, + assoc_ident.span, fluent::hir_analysis_assoc_item_not_found_similar_in_other_trait_with_bound_sugg, suggested_name, Applicability::MaybeIncorrect, @@ -311,13 +319,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // suggest using it. if let [candidate_name] = all_candidate_names.as_slice() { err.sugg = Some(errors::AssocItemNotFoundSugg::Other { - span: assoc_name.span, + span: assoc_ident.span, qself: &qself_str, assoc_kind: assoc_kind_str, suggested_name: *candidate_name, }); } else { - err.label = Some(errors::AssocItemNotFoundLabel::NotFound { span: assoc_name.span }); + err.label = Some(errors::AssocItemNotFoundLabel::NotFound { span: assoc_ident.span }); } self.dcx().emit_err(err) @@ -326,14 +334,14 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { fn complain_about_assoc_kind_mismatch( &self, assoc_item: &ty::AssocItem, - assoc_kind: ty::AssocKind, + assoc_tag: ty::AssocTag, ident: Ident, span: Span, constraint: Option<&hir::AssocItemConstraint<'tcx>>, ) -> ErrorGuaranteed { let tcx = self.tcx(); - let bound_on_assoc_const_label = if let ty::AssocKind::Const = assoc_item.kind + let bound_on_assoc_const_label = if let ty::AssocKind::Const { .. } = assoc_item.kind && let Some(constraint) = constraint && let hir::AssocItemConstraintKind::Bound { .. } = constraint.kind { @@ -371,17 +379,17 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { hir::Term::Ty(ty) => ty.span, hir::Term::Const(ct) => ct.span(), }; - (span, Some(ident.span), assoc_item.kind, assoc_kind) + (span, Some(ident.span), assoc_item.as_tag(), assoc_tag) } else { - (ident.span, None, assoc_kind, assoc_item.kind) + (ident.span, None, assoc_tag, assoc_item.as_tag()) }; self.dcx().emit_err(errors::AssocKindMismatch { span, - expected: assoc_kind_str(expected), - got: assoc_kind_str(got), + expected: assoc_tag_str(expected), + got: assoc_tag_str(got), expected_because_label, - assoc_kind: assoc_kind_str(assoc_item.kind), + assoc_kind: assoc_tag_str(assoc_item.as_tag()), def_span: tcx.def_span(assoc_item.def_id), bound_on_assoc_const_label, wrap_in_braces_sugg, @@ -394,9 +402,9 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { types: &[String], traits: &[String], name: Symbol, - kind: ty::AssocKind, + assoc_tag: ty::AssocTag, ) -> ErrorGuaranteed { - let kind_str = assoc_kind_str(kind); + let kind_str = assoc_tag_str(assoc_tag); let mut err = struct_span_code_err!(self.dcx(), span, E0223, "ambiguous associated {kind_str}"); if self @@ -565,7 +573,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { candidates: Vec<(DefId, (DefId, DefId))>, fulfillment_errors: Vec>, span: Span, - kind: ty::AssocKind, + assoc_tag: ty::AssocTag, ) -> ErrorGuaranteed { // FIXME(fmease): This was copied in parts from an old version of `rustc_hir_typeck::method::suggest`. // Either @@ -575,14 +583,14 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let tcx = self.tcx(); - let kind_str = assoc_kind_str(kind); + let assoc_tag_str = assoc_tag_str(assoc_tag); let adt_did = self_ty.ty_adt_def().map(|def| def.did()); let add_def_label = |err: &mut Diag<'_>| { if let Some(did) = adt_did { err.span_label( tcx.def_span(did), format!( - "associated {kind_str} `{name}` not found for this {}", + "associated {assoc_tag_str} `{name}` not found for this {}", tcx.def_descr(did) ), ); @@ -611,11 +619,11 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { self.dcx(), name.span, E0220, - "associated {kind_str} `{name}` not found for `{self_ty}` in the current scope" + "associated {assoc_tag_str} `{name}` not found for `{self_ty}` in the current scope" ); err.span_label(name.span, format!("associated item not found in `{self_ty}`")); err.note(format!( - "the associated {kind_str} was found for\n{type_candidates}{additional_types}", + "the associated {assoc_tag_str} was found for\n{type_candidates}{additional_types}", )); add_def_label(&mut err); return err.emit(); @@ -696,7 +704,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let mut err = self.dcx().struct_span_err( name.span, - format!("the associated {kind_str} `{name}` exists for `{self_ty}`, but its trait bounds were not satisfied") + format!("the associated {assoc_tag_str} `{name}` exists for `{self_ty}`, but its trait bounds were not satisfied") ); if !bounds.is_empty() { err.note(format!( @@ -706,7 +714,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { } err.span_label( name.span, - format!("associated {kind_str} cannot be referenced on `{self_ty}` due to unsatisfied trait bounds") + format!("associated {assoc_tag_str} cannot be referenced on `{self_ty}` due to unsatisfied trait bounds") ); for (span, mut bounds) in bound_spans { @@ -757,7 +765,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // `issue-22560.rs`. let mut dyn_compatibility_violations = Ok(()); for (assoc_item, trait_ref) in &missing_assoc_types { - names.entry(trait_ref).or_default().push(assoc_item.name); + names.entry(trait_ref).or_default().push(assoc_item.name()); names_len += 1; let violations = @@ -805,10 +813,10 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { return None; }; - let assoc_item = tcx.associated_items(trait_def).find_by_name_and_kind( + let assoc_item = tcx.associated_items(trait_def).find_by_ident_and_kind( tcx, ident, - ty::AssocKind::Type, + ty::AssocTag::Type, trait_def, ); @@ -848,16 +856,17 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let mut names: UnordMap<_, usize> = Default::default(); for (item, _) in &missing_assoc_types { types_count += 1; - *names.entry(item.name).or_insert(0) += 1; + *names.entry(item.name()).or_insert(0) += 1; } let mut dupes = false; let mut shadows = false; for (item, trait_ref) in &missing_assoc_types { - let prefix = if names[&item.name] > 1 { + let name = item.name(); + let prefix = if names[&name] > 1 { let trait_def_id = trait_ref.def_id(); dupes = true; format!("{}::", tcx.def_path_str(trait_def_id)) - } else if bound_names.get(&item.name).is_some_and(|x| *x != item) { + } else if bound_names.get(&name).is_some_and(|x| *x != item) { let trait_def_id = trait_ref.def_id(); shadows = true; format!("{}::", tcx.def_path_str(trait_def_id)) @@ -867,7 +876,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let mut is_shadowed = false; - if let Some(assoc_item) = bound_names.get(&item.name) + if let Some(assoc_item) = bound_names.get(&name) && *assoc_item != item { is_shadowed = true; @@ -876,17 +885,14 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { if assoc_item.def_id.is_local() { ", consider renaming it" } else { "" }; err.span_label( tcx.def_span(assoc_item.def_id), - format!("`{}{}` shadowed here{}", prefix, item.name, rename_message), + format!("`{}{}` shadowed here{}", prefix, name, rename_message), ); } let rename_message = if is_shadowed { ", consider renaming it" } else { "" }; - if let Some(sp) = tcx.hir().span_if_local(item.def_id) { - err.span_label( - sp, - format!("`{}{}` defined here{}", prefix, item.name, rename_message), - ); + if let Some(sp) = tcx.hir_span_if_local(item.def_id) { + err.span_label(sp, format!("`{}{}` defined here{}", prefix, name, rename_message)); } } if potential_assoc_types.len() == missing_assoc_types.len() { @@ -899,7 +905,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { { let types: Vec<_> = missing_assoc_types .iter() - .map(|(item, _)| format!("{} = Type", item.name)) + .map(|(item, _)| format!("{} = Type", item.name())) .collect(); let code = if let Some(snippet) = snippet.strip_suffix('>') { // The user wrote `Trait<'a>` or similar and we don't have a type we can @@ -934,16 +940,17 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let mut names: FxIndexMap<_, usize> = FxIndexMap::default(); for (item, _) in &missing_assoc_types { types_count += 1; - *names.entry(item.name).or_insert(0) += 1; + *names.entry(item.name()).or_insert(0) += 1; } let mut label = vec![]; for (item, trait_ref) in &missing_assoc_types { - let postfix = if names[&item.name] > 1 { + let name = item.name(); + let postfix = if names[&name] > 1 { format!(" (from trait `{}`)", trait_ref.print_trait_sugared()) } else { String::new() }; - label.push(format!("`{}`{}", item.name, postfix)); + label.push(format!("`{}`{}", name, postfix)); } if !label.is_empty() { err.span_label( @@ -1018,12 +1025,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { .map(|simple_ty| tcx.incoherent_impls(simple_ty)) }) && let name = Symbol::intern(&format!("{ident2}_{ident3}")) - && let Some(ty::AssocItem { kind: ty::AssocKind::Fn, .. }) = inherent_impls + && let Some(item) = inherent_impls .iter() .flat_map(|inherent_impl| { tcx.associated_items(inherent_impl).filter_by_name_unhygienic(name) }) .next() + && item.is_fn() { Err(struct_span_code_err!(self.dcx(), span, E0223, "ambiguous associated type") .with_span_suggestion_verbose( @@ -1381,7 +1389,7 @@ pub(crate) fn fn_trait_to_string( .find_map(|c| { if c.ident.name == sym::Output && let Some(ty) = c.ty() - && ty.span != tcx.hir().span(trait_segment.hir_id) + && ty.span != tcx.hir_span(trait_segment.hir_id) { tcx.sess.source_map().span_to_snippet(ty.span).ok() } else { @@ -1625,10 +1633,10 @@ fn generics_args_err_extend<'a>( } } -pub(crate) fn assoc_kind_str(kind: ty::AssocKind) -> &'static str { - match kind { - ty::AssocKind::Fn => "function", - ty::AssocKind::Const => "constant", - ty::AssocKind::Type => "type", +pub(crate) fn assoc_tag_str(assoc_tag: ty::AssocTag) -> &'static str { + match assoc_tag { + ty::AssocTag::Fn => "function", + ty::AssocTag::Const => "constant", + ty::AssocTag::Type => "type", } } diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs index 60a60f6415a22..21f0f9648ea30 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs @@ -92,7 +92,7 @@ fn generic_arg_mismatch_err( GenericArg::Type(hir::Ty { kind: hir::TyKind::Array(_, len), .. }), GenericParamDefKind::Const { .. }, ) if tcx.type_of(param.def_id).skip_binder() == tcx.types.usize => { - let snippet = sess.source_map().span_to_snippet(tcx.hir().span(len.hir_id)); + let snippet = sess.source_map().span_to_snippet(tcx.hir_span(len.hir_id)); if let Ok(snippet) = snippet { err.span_suggestion( arg.span(), diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/lint.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/lint.rs index 5588631228441..483b61add3380 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/lint.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/lint.rs @@ -86,6 +86,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { "expected a type, found a trait" ); if self_ty.span.can_be_used_for_suggestions() + && poly_trait_ref.trait_ref.trait_def_id().is_some() && !self.maybe_suggest_impl_trait(self_ty, &mut diag) && !self.maybe_suggest_dyn_trait(self_ty, sugg, &mut diag) { @@ -500,8 +501,8 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let names: Vec<_> = tcx .associated_items(trait_def_id) .in_definition_order() - .filter(|assoc| assoc.kind.namespace() == Namespace::ValueNS) - .map(|cand| cand.name) + .filter(|assoc| assoc.namespace() == Namespace::ValueNS) + .map(|cand| cand.name()) .collect(); if let Some(typo) = find_best_match_for_name(&names, segment.ident.name, None) { diag.span_suggestion_verbose( diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs index 76a880da41855..5e79e93201531 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs @@ -29,7 +29,7 @@ use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, FatalError, struct_span_code_err, }; -use rustc_hir::def::{CtorKind, CtorOf, DefKind, Namespace, Res}; +use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res}; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::{self as hir, AnonConst, GenericArg, GenericArgs, HirId}; use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; @@ -38,8 +38,8 @@ use rustc_middle::middle::stability::AllowUnstable; use rustc_middle::mir::interpret::LitToConstInput; use rustc_middle::ty::print::PrintPolyTraitRefExt as _; use rustc_middle::ty::{ - self, Const, GenericArgKind, GenericArgsRef, GenericParamDefKind, ParamEnv, Ty, TyCtxt, - TypeVisitableExt, TypingMode, Upcast, fold_regions, + self, AssocTag, Const, GenericArgKind, GenericArgsRef, GenericParamDefKind, ParamEnv, Ty, + TyCtxt, TypeVisitableExt, TypingMode, Upcast, fold_regions, }; use rustc_middle::{bug, span_bug}; use rustc_session::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS; @@ -51,7 +51,7 @@ use rustc_trait_selection::traits::wf::object_region_bounds; use rustc_trait_selection::traits::{self, ObligationCtxt}; use tracing::{debug, instrument}; -use self::errors::assoc_kind_str; +use self::errors::assoc_tag_str; use crate::check::check_abi_fn_ptr; use crate::errors::{AmbiguousLifetimeBound, BadReturnTypeNotation, NoVariantNamed}; use crate::hir_ty_lowering::errors::{GenericsArgsErrExtend, prohibit_assoc_item_constraint}; @@ -147,7 +147,7 @@ pub trait HirTyLowerer<'tcx> { &self, span: Span, def_id: LocalDefId, - assoc_name: Ident, + assoc_ident: Ident, ) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]>; /// Lower an associated type/const (from a trait) to a projection. @@ -168,7 +168,7 @@ pub trait HirTyLowerer<'tcx> { item_def_id: DefId, item_segment: &hir::PathSegment<'tcx>, poly_trait_ref: ty::PolyTraitRef<'tcx>, - kind: ty::AssocKind, + assoc_tag: ty::AssocTag, ) -> Result<(DefId, GenericArgsRef<'tcx>), ErrorGuaranteed>; fn lower_fn_sig( @@ -251,10 +251,10 @@ enum LowerAssocMode { } impl LowerAssocMode { - fn kind(self) -> ty::AssocKind { + fn assoc_tag(self) -> ty::AssocTag { match self { - LowerAssocMode::Type { .. } => ty::AssocKind::Type, - LowerAssocMode::Const => ty::AssocKind::Const, + LowerAssocMode::Type { .. } => ty::AssocTag::Type, + LowerAssocMode::Const => ty::AssocTag::Const, } } @@ -268,7 +268,8 @@ impl LowerAssocMode { fn permit_variants(self) -> bool { match self { LowerAssocMode::Type { permit_variants } => permit_variants, - // FIXME(mgca): Support paths like `Option::::None` or `Option::::Some` which resolve to const ctors/fn items respectively + // FIXME(mgca): Support paths like `Option::::None` or `Option::::Some` which + // resolve to const ctors/fn items respectively. LowerAssocMode::Const => false, } } @@ -838,7 +839,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { | PredicateFilter::SelfOnly | PredicateFilter::SelfAndAssociatedTypeBounds => { match constness { - hir::BoundConstness::Always(span) => { + hir::BoundConstness::Always(_) => { if polarity == ty::PredicatePolarity::Positive { bounds.push(( poly_trait_ref @@ -864,7 +865,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // in `lower_assoc_item_constraint`. PredicateFilter::ConstIfConst | PredicateFilter::SelfConstIfConst => { match constness { - hir::BoundConstness::Maybe(span) => { + hir::BoundConstness::Maybe(_) => { if polarity == ty::PredicatePolarity::Positive { bounds.push(( poly_trait_ref @@ -932,12 +933,12 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { fn probe_trait_that_defines_assoc_item( &self, trait_def_id: DefId, - assoc_kind: ty::AssocKind, - assoc_name: Ident, + assoc_tag: AssocTag, + assoc_ident: Ident, ) -> bool { self.tcx() .associated_items(trait_def_id) - .find_by_name_and_kind(self.tcx(), assoc_name, assoc_kind, trait_def_id) + .find_by_ident_and_kind(self.tcx(), assoc_ident, assoc_tag, trait_def_id) .is_some() } @@ -957,14 +958,14 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // feature `lazy_type_alias` enabled get encoded as a type alias that normalization will // then actually instantiate the where bounds of. let alias_ty = ty::AliasTy::new_from_args(tcx, did, args); - Ty::new_alias(tcx, ty::Weak, alias_ty) + Ty::new_alias(tcx, ty::Free, alias_ty) } else { tcx.at(span).type_of(did).instantiate(tcx, args) } } /// Search for a trait bound on a type parameter whose trait defines the associated item - /// given by `assoc_name` and `kind`. + /// given by `assoc_ident` and `kind`. /// /// This fails if there is no such bound in the list of candidates or if there are multiple /// candidates in which case it reports ambiguity. @@ -975,14 +976,14 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { &self, ty_param_def_id: LocalDefId, ty_param_span: Span, - kind: ty::AssocKind, - assoc_name: Ident, + assoc_tag: AssocTag, + assoc_ident: Ident, span: Span, ) -> Result, ErrorGuaranteed> { - debug!(?ty_param_def_id, ?assoc_name, ?span); + debug!(?ty_param_def_id, ?assoc_ident, ?span); let tcx = self.tcx(); - let predicates = &self.probe_ty_param_bounds(span, ty_param_def_id, assoc_name); + let predicates = &self.probe_ty_param_bounds(span, ty_param_def_id, assoc_ident); debug!("predicates={:#?}", predicates); self.probe_single_bound_for_assoc_item( @@ -990,17 +991,18 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let trait_refs = predicates .iter_identity_copied() .filter_map(|(p, _)| Some(p.as_trait_clause()?.map_bound(|t| t.trait_ref))); - traits::transitive_bounds_that_define_assoc_item(tcx, trait_refs, assoc_name) + traits::transitive_bounds_that_define_assoc_item(tcx, trait_refs, assoc_ident) }, AssocItemQSelf::TyParam(ty_param_def_id, ty_param_span), - kind, - assoc_name, + assoc_tag, + assoc_ident, span, None, ) } - /// Search for a single trait bound whose trait defines the associated item given by `assoc_name`. + /// Search for a single trait bound whose trait defines the associated item given by + /// `assoc_ident`. /// /// This fails if there is no such bound in the list of candidates or if there are multiple /// candidates in which case it reports ambiguity. @@ -1009,8 +1011,8 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { &self, all_candidates: impl Fn() -> I, qself: AssocItemQSelf, - assoc_kind: ty::AssocKind, - assoc_name: Ident, + assoc_tag: AssocTag, + assoc_ident: Ident, span: Span, constraint: Option<&hir::AssocItemConstraint<'tcx>>, ) -> Result, ErrorGuaranteed> @@ -1020,15 +1022,15 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let tcx = self.tcx(); let mut matching_candidates = all_candidates().filter(|r| { - self.probe_trait_that_defines_assoc_item(r.def_id(), assoc_kind, assoc_name) + self.probe_trait_that_defines_assoc_item(r.def_id(), assoc_tag, assoc_ident) }); let Some(bound) = matching_candidates.next() else { let reported = self.complain_about_assoc_item_not_found( all_candidates, qself, - assoc_kind, - assoc_name, + assoc_tag, + assoc_ident, span, constraint, ); @@ -1039,12 +1041,12 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { if let Some(bound2) = matching_candidates.next() { debug!(?bound2); - let assoc_kind_str = errors::assoc_kind_str(assoc_kind); + let assoc_kind_str = errors::assoc_tag_str(assoc_tag); let qself_str = qself.to_string(tcx); let mut err = self.dcx().create_err(crate::errors::AmbiguousAssocItem { span, assoc_kind: assoc_kind_str, - assoc_name, + assoc_ident, qself: &qself_str, }); // Provide a more specific error code index entry for equality bindings. @@ -1058,20 +1060,21 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { }, ); - // FIXME(#97583): Print associated item bindings properly (i.e., not as equality predicates!). + // FIXME(#97583): Print associated item bindings properly (i.e., not as equality + // predicates!). // FIXME: Turn this into a structured, translateable & more actionable suggestion. let mut where_bounds = vec![]; for bound in [bound, bound2].into_iter().chain(matching_candidates) { let bound_id = bound.def_id(); let bound_span = tcx .associated_items(bound_id) - .find_by_name_and_kind(tcx, assoc_name, assoc_kind, bound_id) - .and_then(|item| tcx.hir().span_if_local(item.def_id)); + .find_by_ident_and_kind(tcx, assoc_ident, assoc_tag, bound_id) + .and_then(|item| tcx.hir_span_if_local(item.def_id)); if let Some(bound_span) = bound_span { err.span_label( bound_span, - format!("ambiguous `{assoc_name}` from `{}`", bound.print_trait_sugared(),), + format!("ambiguous `{assoc_ident}` from `{}`", bound.print_trait_sugared(),), ); if let Some(constraint) = constraint { match constraint.kind { @@ -1087,7 +1090,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { } // FIXME(#97583): This isn't syntactically well-formed! where_bounds.push(format!( - " T: {trait}::{assoc_name} = {term}", + " T: {trait}::{assoc_ident} = {term}", trait = bound.print_only_trait_path(), )); } @@ -1096,7 +1099,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { } } else { err.span_suggestion_verbose( - span.with_hi(assoc_name.span.lo()), + span.with_hi(assoc_ident.span.lo()), "use fully-qualified syntax to disambiguate", format!("<{qself_str} as {}>::", bound.print_only_trait_path()), Applicability::MaybeIncorrect, @@ -1104,7 +1107,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { } } else { err.note(format!( - "associated {assoc_kind_str} `{assoc_name}` could derive from `{}`", + "associated {assoc_kind_str} `{assoc_ident}` could derive from `{}`", bound.print_only_trait_path(), )); } @@ -1264,7 +1267,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { qself_ty, hir_ref_id, span, - mode.kind(), + mode.assoc_tag(), )? { return Ok(LoweredAssoc::Term(did, args)); } @@ -1295,7 +1298,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { ) }, AssocItemQSelf::SelfTyAlias, - mode.kind(), + mode.assoc_tag(), assoc_ident, span, None, @@ -1307,12 +1310,12 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { ) => self.probe_single_ty_param_bound_for_assoc_item( param_did.expect_local(), qself.span, - mode.kind(), + mode.assoc_tag(), assoc_ident, span, )?, _ => { - let kind_str = assoc_kind_str(mode.kind()); + let kind_str = assoc_tag_str(mode.assoc_tag()); let reported = if variant_resolution.is_some() { // Variant in type position let msg = format!("expected {kind_str}, found variant `{assoc_ident}`"); @@ -1400,7 +1403,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { ); } - if let Some(sp) = tcx.hir().span_if_local(adt_def.did()) { + if let Some(sp) = tcx.hir_span_if_local(adt_def.did()) { err.span_label(sp, format!("variant `{assoc_ident}` not found here")); } @@ -1419,7 +1422,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { &[qself_ty.to_string()], &traits, assoc_ident.name, - mode.kind(), + mode.assoc_tag(), ) }; return Err(reported); @@ -1428,10 +1431,15 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let trait_did = bound.def_id(); let assoc_item = self - .probe_assoc_item(assoc_ident, mode.kind(), hir_ref_id, span, trait_did) + .probe_assoc_item(assoc_ident, mode.assoc_tag(), hir_ref_id, span, trait_did) .expect("failed to find associated item"); - let (def_id, args) = - self.lower_assoc_shared(span, assoc_item.def_id, assoc_segment, bound, mode.kind())?; + let (def_id, args) = self.lower_assoc_shared( + span, + assoc_item.def_id, + assoc_segment, + bound, + mode.assoc_tag(), + )?; let result = LoweredAssoc::Term(def_id, args); if let Some(variant_def_id) = variant_resolution { @@ -1468,20 +1476,21 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { self_ty: Ty<'tcx>, block: HirId, span: Span, - kind: ty::AssocKind, + assoc_tag: ty::AssocTag, ) -> Result)>, ErrorGuaranteed> { let tcx = self.tcx(); if !tcx.features().inherent_associated_types() { - match kind { - // Don't attempt to look up inherent associated types when the feature is not enabled. - // Theoretically it'd be fine to do so since we feature-gate their definition site. - // However, due to current limitations of the implementation (caused by us performing - // selection during HIR ty lowering instead of in the trait solver), IATs can lead to cycle - // errors (#108491) which mask the feature-gate error, needlessly confusing users - // who use IATs by accident (#113265). - ty::AssocKind::Type => return Ok(None), - ty::AssocKind::Const => { + match assoc_tag { + // Don't attempt to look up inherent associated types when the feature is not + // enabled. Theoretically it'd be fine to do so since we feature-gate their + // definition site. However, due to current limitations of the implementation + // (caused by us performing selection during HIR ty lowering instead of in the + // trait solver), IATs can lead to cycle errors (#108491) which mask the + // feature-gate error, needlessly confusing users who use IATs by accident + // (#113265). + ty::AssocTag::Type => return Ok(None), + ty::AssocTag::Const => { // We also gate the mgca codepath for type-level uses of inherent consts // with the inherent_associated_types feature gate since it relies on the // same machinery and has similar rough edges. @@ -1493,7 +1502,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { ) .emit()); } - ty::AssocKind::Fn => unreachable!(), + ty::AssocTag::Fn => unreachable!(), } } @@ -1502,7 +1511,8 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { .inherent_impls(adt_did) .iter() .filter_map(|&impl_| { - let (item, scope) = self.probe_assoc_item_unchecked(name, kind, block, impl_)?; + let (item, scope) = + self.probe_assoc_item_unchecked(name, assoc_tag, block, impl_)?; Some((impl_, (item.def_id, scope))) }) .collect(); @@ -1541,7 +1551,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { self_ty, |self_ty| { self.select_inherent_assoc_candidates( - infcx, name, span, self_ty, param_env, candidates, kind, + infcx, name, span, self_ty, param_env, candidates, assoc_tag, ) }, )?; @@ -1569,7 +1579,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { self_ty: Ty<'tcx>, param_env: ParamEnv<'tcx>, candidates: Vec<(DefId, (DefId, DefId))>, - kind: ty::AssocKind, + assoc_tag: ty::AssocTag, ) -> Result<(DefId, (DefId, DefId)), ErrorGuaranteed> { let tcx = self.tcx(); let mut fulfillment_errors = Vec::new(); @@ -1620,7 +1630,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { candidates, fulfillment_errors, span, - kind, + assoc_tag, )), &[applicable_candidate] => Ok(applicable_candidate), @@ -1639,12 +1649,12 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { fn probe_assoc_item( &self, ident: Ident, - kind: ty::AssocKind, + assoc_tag: ty::AssocTag, block: HirId, span: Span, scope: DefId, ) -> Option { - let (item, scope) = self.probe_assoc_item_unchecked(ident, kind, block, scope)?; + let (item, scope) = self.probe_assoc_item_unchecked(ident, assoc_tag, block, scope)?; self.check_assoc_item(item.def_id, ident, scope, block, span); Some(item) } @@ -1656,7 +1666,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { fn probe_assoc_item_unchecked( &self, ident: Ident, - kind: ty::AssocKind, + assoc_tag: ty::AssocTag, block: HirId, scope: DefId, ) -> Option<(ty::AssocItem, /*scope*/ DefId)> { @@ -1669,7 +1679,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let item = tcx .associated_items(scope) .filter_by_name_unhygienic(ident.name) - .find(|i| i.kind == kind && i.ident(tcx).normalize_to_macros_2_0() == ident)?; + .find(|i| i.as_tag() == assoc_tag && i.ident(tcx).normalize_to_macros_2_0() == ident)?; Some((*item, def_scope)) } @@ -1721,34 +1731,32 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { tcx.associated_items(*trait_def_id) .in_definition_order() .any(|i| { - i.kind.namespace() == Namespace::TypeNS + i.is_type() + && !i.is_impl_trait_in_trait() && i.ident(tcx).normalize_to_macros_2_0() == assoc_ident - && matches!(i.kind, ty::AssocKind::Type) }) // Consider only accessible traits && tcx.visibility(*trait_def_id) .is_accessible_from(self.item_def_id(), tcx) && tcx.all_impls(*trait_def_id) .any(|impl_def_id| { - let impl_header = tcx.impl_trait_header(impl_def_id); - impl_header.is_some_and(|header| { - let trait_ref = header.trait_ref.instantiate( - tcx, - infcx.fresh_args_for_item(DUMMY_SP, impl_def_id), - ); - - let value = fold_regions(tcx, qself_ty, |_, _| tcx.lifetimes.re_erased); - // FIXME: Don't bother dealing with non-lifetime binders here... - if value.has_escaping_bound_vars() { - return false; - } - infcx - .can_eq( - ty::ParamEnv::empty(), - trait_ref.self_ty(), - value, - ) && header.polarity != ty::ImplPolarity::Negative - }) + let header = tcx.impl_trait_header(impl_def_id).unwrap(); + let trait_ref = header.trait_ref.instantiate( + tcx, + infcx.fresh_args_for_item(DUMMY_SP, impl_def_id), + ); + + let value = fold_regions(tcx, qself_ty, |_, _| tcx.lifetimes.re_erased); + // FIXME: Don't bother dealing with non-lifetime binders here... + if value.has_escaping_bound_vars() { + return false; + } + infcx + .can_eq( + ty::ParamEnv::empty(), + trait_ref.self_ty(), + value, + ) && header.polarity != ty::ImplPolarity::Negative }) }) .map(|trait_def_id| tcx.def_path_str(trait_def_id)) @@ -1762,7 +1770,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { span: Span, opt_self_ty: Option>, item_def_id: DefId, - trait_segment: &hir::PathSegment<'tcx>, + trait_segment: Option<&hir::PathSegment<'tcx>>, item_segment: &hir::PathSegment<'tcx>, ) -> Ty<'tcx> { match self.lower_qpath_shared( @@ -1771,7 +1779,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { item_def_id, trait_segment, item_segment, - ty::AssocKind::Type, + ty::AssocTag::Type, ) { Ok((item_def_id, item_args)) => { Ty::new_projection_from_args(self.tcx(), item_def_id, item_args) @@ -1787,7 +1795,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { span: Span, opt_self_ty: Option>, item_def_id: DefId, - trait_segment: &hir::PathSegment<'tcx>, + trait_segment: Option<&hir::PathSegment<'tcx>>, item_segment: &hir::PathSegment<'tcx>, ) -> Const<'tcx> { match self.lower_qpath_shared( @@ -1796,7 +1804,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { item_def_id, trait_segment, item_segment, - ty::AssocKind::Const, + ty::AssocTag::Const, ) { Ok((item_def_id, item_args)) => { let uv = ty::UnevaluatedConst::new(item_def_id, item_args); @@ -1812,9 +1820,9 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { span: Span, opt_self_ty: Option>, item_def_id: DefId, - trait_segment: &hir::PathSegment<'tcx>, + trait_segment: Option<&hir::PathSegment<'tcx>>, item_segment: &hir::PathSegment<'tcx>, - kind: ty::AssocKind, + assoc_tag: ty::AssocTag, ) -> Result<(DefId, GenericArgsRef<'tcx>), ErrorGuaranteed> { let tcx = self.tcx(); @@ -1822,12 +1830,17 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { debug!(?trait_def_id); let Some(self_ty) = opt_self_ty else { - return Err(self.error_missing_qpath_self_ty(trait_def_id, span, item_segment, kind)); + return Err(self.error_missing_qpath_self_ty( + trait_def_id, + span, + item_segment, + assoc_tag, + )); }; debug!(?self_ty); let trait_ref = - self.lower_mono_trait_ref(span, trait_def_id, self_ty, trait_segment, false); + self.lower_mono_trait_ref(span, trait_def_id, self_ty, trait_segment.unwrap(), false); debug!(?trait_ref); let item_args = @@ -1841,7 +1854,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { trait_def_id: DefId, span: Span, item_segment: &hir::PathSegment<'tcx>, - kind: ty::AssocKind, + assoc_tag: ty::AssocTag, ) -> ErrorGuaranteed { let tcx = self.tcx(); let path_str = tcx.def_path_str(trait_def_id); @@ -1878,7 +1891,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // FIXME: also look at `tcx.generics_of(self.item_def_id()).params` any that // references the trait. Relevant for the first case in // `src/test/ui/associated-types/associated-types-in-ambiguous-context.rs` - self.report_ambiguous_assoc(span, &type_names, &[path_str], item_segment.ident.name, kind) + self.report_ambiguous_assoc( + span, + &type_names, + &[path_str], + item_segment.ident.name, + assoc_tag, + ) } pub fn prohibit_generic_args<'a>( @@ -2177,16 +2196,17 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { } } Res::Def(DefKind::AssocTy, def_id) => { - debug_assert!(path.segments.len() >= 2); - let _ = self.prohibit_generic_args( - path.segments[..path.segments.len() - 2].iter(), - GenericsArgsErrExtend::None, - ); + let trait_segment = if let [modules @ .., trait_, _item] = path.segments { + let _ = self.prohibit_generic_args(modules.iter(), GenericsArgsErrExtend::None); + Some(trait_) + } else { + None + }; self.lower_qpath_ty( span, opt_self_ty, def_id, - &path.segments[path.segments.len() - 2], + trait_segment, path.segments.last().unwrap(), ) } @@ -2394,16 +2414,17 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { ty::Const::new_unevaluated(tcx, ty::UnevaluatedConst::new(did, args)) } Res::Def(DefKind::AssocConst, did) => { - debug_assert!(path.segments.len() >= 2); - let _ = self.prohibit_generic_args( - path.segments[..path.segments.len() - 2].iter(), - GenericsArgsErrExtend::None, - ); + let trait_segment = if let [modules @ .., trait_, _item] = path.segments { + let _ = self.prohibit_generic_args(modules.iter(), GenericsArgsErrExtend::None); + Some(trait_) + } else { + None + }; self.lower_qpath_const( span, opt_self_ty, did, - &path.segments[path.segments.len() - 2], + trait_segment, path.segments.last().unwrap(), ) } @@ -2696,30 +2717,9 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { hir::TyKind::Pat(ty, pat) => { let ty_span = ty.span; let ty = self.lower_ty(ty); - let pat_ty = match pat.kind { - hir::TyPatKind::Range(start, end) => { - let (ty, start, end) = match ty.kind() { - // Keep this list of types in sync with the list of types that - // the `RangePattern` trait is implemented for. - ty::Int(_) | ty::Uint(_) | ty::Char => { - let start = self.lower_const_arg(start, FeedConstTy::No); - let end = self.lower_const_arg(end, FeedConstTy::No); - (ty, start, end) - } - _ => { - let guar = self.dcx().span_delayed_bug( - ty_span, - "invalid base type for range pattern", - ); - let errc = ty::Const::new_error(tcx, guar); - (Ty::new_error(tcx, guar), errc, errc) - } - }; - - let pat = tcx.mk_pat(ty::PatternKind::Range { start, end }); - Ty::new_pat(tcx, ty, pat) - } - hir::TyPatKind::Err(e) => Ty::new_error(tcx, e), + let pat_ty = match self.lower_pat_ty_pat(ty, ty_span, pat) { + Ok(kind) => Ty::new_pat(tcx, ty, tcx.mk_pat(kind)), + Err(guar) => Ty::new_error(tcx, guar), }; self.record_ty(pat.hir_id, ty, pat.span); pat_ty @@ -2731,6 +2731,39 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { result_ty } + fn lower_pat_ty_pat( + &self, + ty: Ty<'tcx>, + ty_span: Span, + pat: &hir::TyPat<'tcx>, + ) -> Result, ErrorGuaranteed> { + let tcx = self.tcx(); + match pat.kind { + hir::TyPatKind::Range(start, end) => { + match ty.kind() { + // Keep this list of types in sync with the list of types that + // the `RangePattern` trait is implemented for. + ty::Int(_) | ty::Uint(_) | ty::Char => { + let start = self.lower_const_arg(start, FeedConstTy::No); + let end = self.lower_const_arg(end, FeedConstTy::No); + Ok(ty::PatternKind::Range { start, end }) + } + _ => Err(self + .dcx() + .span_delayed_bug(ty_span, "invalid base type for range pattern")), + } + } + hir::TyPatKind::Or(patterns) => { + self.tcx() + .mk_patterns_from_iter(patterns.iter().map(|pat| { + self.lower_pat_ty_pat(ty, ty_span, pat).map(|pat| tcx.mk_pat(pat)) + })) + .map(ty::PatternKind::Or) + } + hir::TyPatKind::Err(e) => Err(e), + } + } + /// Lower an opaque type (i.e., an existential impl-Trait type) from the HIR. #[instrument(level = "debug", skip(self), ret)] fn lower_opaque_ty(&self, def_id: LocalDefId, in_trait: bool) -> Ty<'tcx> { @@ -2860,10 +2893,10 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { let trait_ref = self.lower_impl_trait_ref(i.of_trait.as_ref()?, self.lower_ty(i.self_ty)); - let assoc = tcx.associated_items(trait_ref.def_id).find_by_name_and_kind( + let assoc = tcx.associated_items(trait_ref.def_id).find_by_ident_and_kind( tcx, *ident, - ty::AssocKind::Fn, + ty::AssocTag::Fn, trait_ref.def_id, )?; diff --git a/compiler/rustc_hir_analysis/src/hir_wf_check.rs b/compiler/rustc_hir_analysis/src/hir_wf_check.rs index e27a81d4976f4..64c1a78bd1c8b 100644 --- a/compiler/rustc_hir_analysis/src/hir_wf_check.rs +++ b/compiler/rustc_hir_analysis/src/hir_wf_check.rs @@ -4,7 +4,7 @@ use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::traits::{ObligationCause, WellFormedLoc}; use rustc_middle::bug; use rustc_middle::query::Providers; -use rustc_middle::ty::{self, TyCtxt, TypingMode, fold_regions}; +use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt, TypingMode, fold_regions}; use rustc_span::def_id::LocalDefId; use rustc_trait_selection::traits::{self, ObligationCtxt}; use tracing::debug; @@ -77,6 +77,15 @@ fn diagnostic_hir_wf_check<'tcx>( let tcx_ty = fold_regions(self.tcx, tcx_ty, |r, _| { if r.is_bound() { self.tcx.lifetimes.re_erased } else { r } }); + + // We may be checking the WFness of a type in an opaque with a non-lifetime bound. + // Perhaps we could rebind all the escaping bound vars, but they're coming from + // arbitrary debruijn indices and aren't particularly important anyways, since they + // are only coming from `feature(non_lifetime_binders)` anyways. + if tcx_ty.has_escaping_bound_vars() { + return; + } + let cause = traits::ObligationCause::new( ty.span, self.def_id, @@ -170,7 +179,7 @@ fn diagnostic_hir_wf_check<'tcx>( .. }) => vec![*ty], hir::Node::AnonConst(_) => { - if let Some(const_param_id) = tcx.hir().opt_const_param_default_param_def_id(hir_id) + if let Some(const_param_id) = tcx.hir_opt_const_param_default_param_def_id(hir_id) && let hir::Node::GenericParam(hir::GenericParam { kind: hir::GenericParamKind::Const { ty, .. }, .. diff --git a/compiler/rustc_hir_analysis/src/impl_wf_check.rs b/compiler/rustc_hir_analysis/src/impl_wf_check.rs index c30b39dfe7656..cbdc501291bc8 100644 --- a/compiler/rustc_hir_analysis/src/impl_wf_check.rs +++ b/compiler/rustc_hir_analysis/src/impl_wf_check.rs @@ -112,14 +112,14 @@ pub(crate) fn enforce_impl_lifetime_params_are_constrained( .flat_map(|def_id| { let item = tcx.associated_item(def_id); match item.kind { - ty::AssocKind::Type => { + ty::AssocKind::Type { .. } => { if item.defaultness(tcx).has_value() { cgp::parameters_for(tcx, tcx.type_of(def_id).instantiate_identity(), true) } else { vec![] } } - ty::AssocKind::Fn | ty::AssocKind::Const => vec![], + ty::AssocKind::Fn { .. } | ty::AssocKind::Const { .. } => vec![], } }) .collect(); diff --git a/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs b/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs index af1107b499f4e..309221f9a127a 100644 --- a/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs +++ b/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs @@ -374,9 +374,12 @@ fn check_predicates<'tcx>( // Include the well-formed predicates of the type parameters of the impl. for arg in tcx.impl_trait_ref(impl1_def_id).unwrap().instantiate_identity().args { + let Some(term) = arg.as_term() else { + continue; + }; let infcx = &tcx.infer_ctxt().build(TypingMode::non_body_analysis()); let obligations = - wf::obligations(infcx, tcx.param_env(impl1_def_id), impl1_def_id, 0, arg, span) + wf::obligations(infcx, tcx.param_env(impl1_def_id), impl1_def_id, 0, term, span) .unwrap(); assert!(!obligations.has_infer()); diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs index e7ecd727a852f..91dde13be550e 100644 --- a/compiler/rustc_hir_analysis/src/lib.rs +++ b/compiler/rustc_hir_analysis/src/lib.rs @@ -59,7 +59,7 @@ This API is completely unstable and subject to change. #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] @@ -68,7 +68,6 @@ This API is completely unstable and subject to change. #![feature(if_let_guard)] #![feature(iter_from_coroutine)] #![feature(iter_intersperse)] -#![feature(let_chains)] #![feature(never_type)] #![feature(rustdoc_internals)] #![feature(slice_partition_dedup)] @@ -217,7 +216,7 @@ pub fn check_crate(tcx: TyCtxt<'_>) { check::maybe_check_static_with_link_section(tcx, item_def_id); } DefKind::Const if tcx.generics_of(item_def_id).is_empty() => { - let instance = ty::Instance::new(item_def_id.into(), ty::GenericArgs::empty()); + let instance = ty::Instance::new_raw(item_def_id.into(), ty::GenericArgs::empty()); let cid = GlobalId { instance, promoted: None }; let typing_env = ty::TypingEnv::fully_monomorphized(); tcx.ensure_ok().eval_to_const_value_raw(typing_env.as_query_input(cid)); diff --git a/compiler/rustc_hir_analysis/src/outlives/implicit_infer.rs b/compiler/rustc_hir_analysis/src/outlives/implicit_infer.rs index a0faa5e8429e5..c99eb12efcca2 100644 --- a/compiler/rustc_hir_analysis/src/outlives/implicit_infer.rs +++ b/compiler/rustc_hir_analysis/src/outlives/implicit_infer.rs @@ -24,8 +24,8 @@ pub(super) fn infer_predicates( // If new predicates were added then we need to re-calculate // all crates since there could be new implied predicates. - loop { - let mut predicates_added = false; + for i in 0.. { + let mut predicates_added = vec![]; // Visit all the crates and infer predicates for id in tcx.hir_free_items() { @@ -83,14 +83,27 @@ pub(super) fn infer_predicates( .get(&item_did.to_def_id()) .map_or(0, |p| p.as_ref().skip_binder().len()); if item_required_predicates.len() > item_predicates_len { - predicates_added = true; + predicates_added.push(item_did); global_inferred_outlives .insert(item_did.to_def_id(), ty::EarlyBinder::bind(item_required_predicates)); } } - if !predicates_added { + if predicates_added.is_empty() { + // We've reached a fixed point. break; + } else if !tcx.recursion_limit().value_within_limit(i) { + let msg = if let &[id] = &predicates_added[..] { + format!("overflow computing implied lifetime bounds for `{}`", tcx.def_path_str(id),) + } else { + "overflow computing implied lifetime bounds".to_string() + }; + tcx.dcx() + .struct_span_fatal( + predicates_added.iter().map(|id| tcx.def_span(*id)).collect::>(), + msg, + ) + .emit(); } } @@ -144,10 +157,10 @@ fn insert_required_predicates_to_be_wf<'tcx>( ); } - ty::Alias(ty::Weak, alias) => { + ty::Alias(ty::Free, alias) => { // This corresponds to a type like `Type<'a, T>`. // We check inferred and explicit predicates. - debug!("Weak"); + debug!("Free"); check_inferred_predicates( tcx, alias.def_id, diff --git a/compiler/rustc_hir_analysis/src/outlives/mod.rs b/compiler/rustc_hir_analysis/src/outlives/mod.rs index 32b05dcc569ef..daa908c8c78e4 100644 --- a/compiler/rustc_hir_analysis/src/outlives/mod.rs +++ b/compiler/rustc_hir_analysis/src/outlives/mod.rs @@ -25,7 +25,7 @@ fn inferred_outlives_of(tcx: TyCtxt<'_>, item_def_id: LocalDefId) -> &[(ty::Clau } DefKind::AnonConst if tcx.features().generic_const_exprs() => { let id = tcx.local_def_id_to_hir_id(item_def_id); - if tcx.hir().opt_const_param_default_param_def_id(id).is_some() { + if tcx.hir_opt_const_param_default_param_def_id(id).is_some() { // In `generics_of` we set the generics' parent to be our parent's parent which means that // we lose out on the predicates of our actual parent if we dont return those predicates here. // (See comment in `generics_of` for more information on why the parent shenanigans is necessary) diff --git a/compiler/rustc_hir_analysis/src/outlives/utils.rs b/compiler/rustc_hir_analysis/src/outlives/utils.rs index d0a2a2230ab77..044fb64ca8216 100644 --- a/compiler/rustc_hir_analysis/src/outlives/utils.rs +++ b/compiler/rustc_hir_analysis/src/outlives/utils.rs @@ -146,7 +146,7 @@ pub(crate) fn insert_outlives_predicate<'tcx>( fn is_free_region(region: Region<'_>) -> bool { // First, screen for regions that might appear in a type header. - match *region { + match region.kind() { // These correspond to `T: 'a` relationships: // // struct Foo<'a, T> { diff --git a/compiler/rustc_hir_analysis/src/variance/constraints.rs b/compiler/rustc_hir_analysis/src/variance/constraints.rs index 8475903c68fde..92cfece77c473 100644 --- a/compiler/rustc_hir_analysis/src/variance/constraints.rs +++ b/compiler/rustc_hir_analysis/src/variance/constraints.rs @@ -107,7 +107,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { let current_item = &CurrentItem { inferred_start }; let ty = tcx.type_of(def_id).instantiate_identity(); - // The type as returned by `type_of` is the underlying type and generally not a weak projection. + // The type as returned by `type_of` is the underlying type and generally not a free alias. // Therefore we need to check the `DefKind` first. if let DefKind::TyAlias = tcx.def_kind(def_id) && tcx.type_alias_is_lazy(def_id) @@ -251,12 +251,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { } ty::Pat(typ, pat) => { - match *pat { - ty::PatternKind::Range { start, end } => { - self.add_constraints_from_const(current, start, variance); - self.add_constraints_from_const(current, end, variance); - } - } + self.add_constraints_from_pat(current, variance, pat); self.add_constraints_from_ty(current, typ, variance); } @@ -282,7 +277,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.add_constraints_from_invariant_args(current, data.args, variance); } - ty::Alias(ty::Weak, ref data) => { + ty::Alias(ty::Free, ref data) => { self.add_constraints_from_args(current, data.def_id, data.args, variance); } @@ -334,6 +329,25 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { } } + fn add_constraints_from_pat( + &mut self, + current: &CurrentItem, + variance: VarianceTermPtr<'a>, + pat: ty::Pattern<'tcx>, + ) { + match *pat { + ty::PatternKind::Range { start, end } => { + self.add_constraints_from_const(current, start, variance); + self.add_constraints_from_const(current, end, variance); + } + ty::PatternKind::Or(patterns) => { + for pat in patterns { + self.add_constraints_from_pat(current, variance, pat) + } + } + } + } + /// Adds constraints appropriate for a nominal type (enum, struct, /// object, etc) appearing in a context with ambient variance `variance` fn add_constraints_from_args( @@ -428,7 +442,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { region: ty::Region<'tcx>, variance: VarianceTermPtr<'a>, ) { - match *region { + match region.kind() { ty::ReEarlyParam(ref data) => { self.add_constraint(current, data.index, variance); } diff --git a/compiler/rustc_hir_analysis/src/variance/mod.rs b/compiler/rustc_hir_analysis/src/variance/mod.rs index 0800d99e9452e..dbba45dc7bb58 100644 --- a/compiler/rustc_hir_analysis/src/variance/mod.rs +++ b/compiler/rustc_hir_analysis/src/variance/mod.rs @@ -44,13 +44,13 @@ fn variances_of(tcx: TyCtxt<'_>, item_def_id: LocalDefId) -> &[ty::Variance] { return &[]; } - match tcx.def_kind(item_def_id) { + let kind = tcx.def_kind(item_def_id); + match kind { DefKind::Fn | DefKind::AssocFn | DefKind::Enum | DefKind::Struct | DefKind::Union - | DefKind::Variant | DefKind::Ctor(..) => { // These are inferred. let crate_map = tcx.crate_variances(()); @@ -89,7 +89,11 @@ fn variances_of(tcx: TyCtxt<'_>, item_def_id: LocalDefId) -> &[ty::Variance] { } // Variance not relevant. - span_bug!(tcx.def_span(item_def_id), "asked to compute variance for wrong kind of item"); + span_bug!( + tcx.def_span(item_def_id), + "asked to compute variance for {}", + kind.descr(item_def_id.to_def_id()) + ); } #[derive(Debug, Copy, Clone)] diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index 1c23761b2e5bf..09bf84ab64fbe 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -2,7 +2,7 @@ //! the definitions in this file have equivalents in `rustc_ast_pretty`. // tidy-alphabetical-start -#![feature(let_chains)] +#![cfg_attr(bootstrap, feature(let_chains))] #![recursion_limit = "256"] // tidy-alphabetical-end @@ -13,7 +13,7 @@ use rustc_abi::ExternAbi; use rustc_ast::util::parser::{self, ExprPrecedence, Fixity}; use rustc_ast::{AttrStyle, DUMMY_NODE_ID, DelimArgs}; use rustc_ast_pretty::pp::Breaks::{Consistent, Inconsistent}; -use rustc_ast_pretty::pp::{self, Breaks}; +use rustc_ast_pretty::pp::{self, BoxMarker, Breaks}; use rustc_ast_pretty::pprust::state::MacHeader; use rustc_ast_pretty::pprust::{Comments, PrintState}; use rustc_attr_data_structures::{AttributeKind, PrintAttribute}; @@ -110,6 +110,7 @@ impl<'a> State<'a> { } self.print_attr_item(&unparsed, unparsed.span); self.word("]"); + self.hardbreak() } hir::Attribute::Parsed(AttributeKind::DocComment { style, kind, comment, .. }) => { self.word(rustc_ast_pretty::pprust::state::doc_comment_to_string( @@ -127,7 +128,7 @@ impl<'a> State<'a> { } fn print_attr_item(&mut self, item: &hir::AttrItem, span: Span) { - self.ibox(0); + let ib = self.ibox(0); let path = ast::Path { span, segments: item @@ -146,6 +147,7 @@ impl<'a> State<'a> { false, None, *delim, + None, &tokens, true, span, @@ -161,7 +163,7 @@ impl<'a> State<'a> { self.word(token_str); } } - self.end(); + self.end(ib); } fn print_node(&mut self, node: Node<'_>) { @@ -182,7 +184,7 @@ impl<'a> State<'a> { Node::Ty(a) => self.print_type(a), Node::AssocItemConstraint(a) => self.print_assoc_item_constraint(a), Node::TraitRef(a) => self.print_trait_ref(a), - Node::OpaqueTy(o) => self.print_opaque_ty(o), + Node::OpaqueTy(_) => panic!("cannot print Node::OpaqueTy"), Node::Pat(a) => self.print_pat(a), Node::TyPat(a) => self.print_ty_pat(a), Node::PatField(a) => self.print_patfield(a), @@ -192,10 +194,10 @@ impl<'a> State<'a> { Node::PreciseCapturingNonLifetimeArg(param) => self.print_ident(param.ident), Node::Block(a) => { // Containing cbox, will be closed by print-block at `}`. - self.cbox(INDENT_UNIT); + let cb = self.cbox(INDENT_UNIT); // Head-ibox, will be closed by print-block after `{`. - self.ibox(0); - self.print_block(a); + let ib = self.ibox(0); + self.print_block(a, cb, ib); } Node::Lifetime(a) => self.print_lifetime(a), Node::GenericParam(_) => panic!("cannot print Node::GenericParam"), @@ -315,17 +317,17 @@ pub fn item_to_string(ann: &dyn PpAnn, pat: &hir::Item<'_>) -> String { } impl<'a> State<'a> { - fn bclose_maybe_open(&mut self, span: rustc_span::Span, close_box: bool) { + fn bclose_maybe_open(&mut self, span: rustc_span::Span, cb: Option) { self.maybe_print_comment(span.hi()); self.break_offset_if_not_bol(1, -INDENT_UNIT); self.word("}"); - if close_box { - self.end(); // close the outer-box + if let Some(cb) = cb { + self.end(cb); } } - fn bclose(&mut self, span: rustc_span::Span) { - self.bclose_maybe_open(span, true) + fn bclose(&mut self, span: rustc_span::Span, cb: BoxMarker) { + self.bclose_maybe_open(span, Some(cb)) } fn commasep_cmnt(&mut self, b: Breaks, elts: &[T], mut op: F, mut get_span: G) @@ -333,7 +335,7 @@ impl<'a> State<'a> { F: FnMut(&mut State<'_>, &T), G: FnMut(&T) -> rustc_span::Span, { - self.rbox(0, b); + let rb = self.rbox(0, b); let len = elts.len(); let mut i = 0; for elt in elts { @@ -346,7 +348,7 @@ impl<'a> State<'a> { self.space_if_not_bol(); } } - self.end(); + self.end(rb); } fn commasep_exprs(&mut self, b: Breaks, exprs: &[hir::Expr<'_>]) { @@ -369,7 +371,7 @@ impl<'a> State<'a> { fn print_type(&mut self, ty: &hir::Ty<'_>) { self.maybe_print_comment(ty.span.lo()); - self.ibox(0); + let ib = self.ibox(0); match ty.kind { hir::TyKind::Slice(ty) => { self.word("["); @@ -397,7 +399,7 @@ impl<'a> State<'a> { self.pclose(); } hir::TyKind::BareFn(f) => { - self.print_ty_fn(f.abi, f.safety, f.decl, None, f.generic_params, f.param_names); + self.print_ty_fn(f.abi, f.safety, f.decl, None, f.generic_params, f.param_idents); } hir::TyKind::UnsafeBinder(unsafe_binder) => { self.print_unsafe_binder(unsafe_binder); @@ -456,16 +458,16 @@ impl<'a> State<'a> { self.print_ty_pat(pat); } } - self.end() + self.end(ib) } fn print_unsafe_binder(&mut self, unsafe_binder: &hir::UnsafeBinderTy<'_>) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word("unsafe"); self.print_generic_params(unsafe_binder.generic_params); self.nbsp(); self.print_type(unsafe_binder.inner_ty); - self.end(); + self.end(ib); } fn print_foreign_item(&mut self, item: &hir::ForeignItem<'_>) { @@ -473,23 +475,23 @@ impl<'a> State<'a> { self.maybe_print_comment(item.span.lo()); self.print_attrs_as_outer(self.attrs(item.hir_id())); match item.kind { - hir::ForeignItemKind::Fn(sig, arg_names, generics) => { - self.head(""); + hir::ForeignItemKind::Fn(sig, arg_idents, generics) => { + let (cb, ib) = self.head(""); self.print_fn( sig.decl, sig.header, Some(item.ident.name), generics, - arg_names, + arg_idents, None, ); - self.end(); // end head-ibox + self.end(ib); self.word(";"); - self.end() // end the outer fn box + self.end(cb) } hir::ForeignItemKind::Static(t, m, safety) => { self.print_safety(safety); - self.head("static"); + let (cb, ib) = self.head("static"); if m.is_mut() { self.word_space("mut"); } @@ -497,15 +499,15 @@ impl<'a> State<'a> { self.word_space(":"); self.print_type(t); self.word(";"); - self.end(); // end the head-ibox - self.end() // end the outer cbox + self.end(ib); + self.end(cb) } hir::ForeignItemKind::Type => { - self.head("type"); + let (cb, ib) = self.head("type"); self.print_ident(item.ident); self.word(";"); - self.end(); // end the head-ibox - self.end() // end the outer cbox + self.end(ib); + self.end(cb) } } } @@ -561,7 +563,7 @@ impl<'a> State<'a> { self.ann.pre(self, AnnNode::Item(item)); match item.kind { hir::ItemKind::ExternCrate(orig_name, ident) => { - self.head("extern crate"); + let (cb, ib) = self.head("extern crate"); if let Some(orig_name) = orig_name { self.print_name(orig_name); self.space(); @@ -570,11 +572,11 @@ impl<'a> State<'a> { } self.print_ident(ident); self.word(";"); - self.end(); // end inner head-block - self.end(); // end outer head-block + self.end(ib); + self.end(cb); } hir::ItemKind::Use(path, kind) => { - self.head("use"); + let (cb, ib) = self.head("use"); self.print_path(path, false); match kind { @@ -589,11 +591,11 @@ impl<'a> State<'a> { hir::UseKind::Glob => self.word("::*;"), hir::UseKind::ListStem => self.word("::{};"), } - self.end(); // end inner head-block - self.end(); // end outer head-block + self.end(ib); + self.end(cb); } hir::ItemKind::Static(ident, ty, m, expr) => { - self.head("static"); + let (cb, ib) = self.head("static"); if m.is_mut() { self.word_space("mut"); } @@ -601,85 +603,87 @@ impl<'a> State<'a> { self.word_space(":"); self.print_type(ty); self.space(); - self.end(); // end the head-ibox + self.end(ib); self.word_space("="); self.ann.nested(self, Nested::Body(expr)); self.word(";"); - self.end(); // end the outer cbox + self.end(cb); } hir::ItemKind::Const(ident, ty, generics, expr) => { - self.head("const"); + let (cb, ib) = self.head("const"); self.print_ident(ident); self.print_generic_params(generics.params); self.word_space(":"); self.print_type(ty); self.space(); - self.end(); // end the head-ibox + self.end(ib); self.word_space("="); self.ann.nested(self, Nested::Body(expr)); self.print_where_clause(generics); self.word(";"); - self.end(); // end the outer cbox + self.end(cb); } hir::ItemKind::Fn { ident, sig, generics, body, .. } => { - self.head(""); + let (cb, ib) = self.head(""); self.print_fn(sig.decl, sig.header, Some(ident.name), generics, &[], Some(body)); self.word(" "); - self.end(); // need to close a box - self.end(); // need to close a box + self.end(ib); + self.end(cb); self.ann.nested(self, Nested::Body(body)); } hir::ItemKind::Macro(ident, macro_def, _) => { self.print_mac_def(macro_def, &ident, item.span, |_| {}); } hir::ItemKind::Mod(ident, mod_) => { - self.head("mod"); + let (cb, ib) = self.head("mod"); self.print_ident(ident); self.nbsp(); - self.bopen(); + self.bopen(ib); self.print_mod(mod_, attrs); - self.bclose(item.span); + self.bclose(item.span, cb); } hir::ItemKind::ForeignMod { abi, items } => { - self.head("extern"); + let (cb, ib) = self.head("extern"); self.word_nbsp(abi.to_string()); - self.bopen(); + self.bopen(ib); self.print_attrs_as_inner(self.attrs(item.hir_id())); for item in items { self.ann.nested(self, Nested::ForeignItem(item.id)); } - self.bclose(item.span); + self.bclose(item.span, cb); } hir::ItemKind::GlobalAsm { asm, .. } => { - self.head("global_asm!"); + let (cb, ib) = self.head("global_asm!"); self.print_inline_asm(asm); - self.end() + self.word(";"); + self.end(cb); + self.end(ib); } hir::ItemKind::TyAlias(ident, ty, generics) => { - self.head("type"); + let (cb, ib) = self.head("type"); self.print_ident(ident); self.print_generic_params(generics.params); - self.end(); // end the inner ibox + self.end(ib); self.print_where_clause(generics); self.space(); self.word_space("="); self.print_type(ty); self.word(";"); - self.end(); // end the outer ibox + self.end(cb); } hir::ItemKind::Enum(ident, ref enum_definition, params) => { self.print_enum_def(enum_definition, params, ident.name, item.span); } hir::ItemKind::Struct(ident, ref struct_def, generics) => { - self.head("struct"); - self.print_struct(struct_def, generics, ident.name, item.span, true); + let (cb, ib) = self.head("struct"); + self.print_struct(struct_def, generics, ident.name, item.span, true, cb, ib); } hir::ItemKind::Union(ident, ref struct_def, generics) => { - self.head("union"); - self.print_struct(struct_def, generics, ident.name, item.span, true); + let (cb, ib) = self.head("union"); + self.print_struct(struct_def, generics, ident.name, item.span, true, cb, ib); } hir::ItemKind::Impl(&hir::Impl { constness, @@ -692,7 +696,7 @@ impl<'a> State<'a> { self_ty, items, }) => { - self.head(""); + let (cb, ib) = self.head(""); self.print_defaultness(defaultness); self.print_safety(safety); self.word_nbsp("impl"); @@ -720,15 +724,15 @@ impl<'a> State<'a> { self.print_where_clause(generics); self.space(); - self.bopen(); + self.bopen(ib); self.print_attrs_as_inner(attrs); for impl_item in items { self.ann.nested(self, Nested::ImplItem(impl_item.id)); } - self.bclose(item.span); + self.bclose(item.span, cb); } hir::ItemKind::Trait(is_auto, safety, ident, generics, bounds, trait_items) => { - self.head(""); + let (cb, ib) = self.head(""); self.print_is_auto(is_auto); self.print_safety(safety); self.word_nbsp("trait"); @@ -737,22 +741,22 @@ impl<'a> State<'a> { self.print_bounds(":", bounds); self.print_where_clause(generics); self.word(" "); - self.bopen(); + self.bopen(ib); for trait_item in trait_items { self.ann.nested(self, Nested::TraitItem(trait_item.id)); } - self.bclose(item.span); + self.bclose(item.span, cb); } hir::ItemKind::TraitAlias(ident, generics, bounds) => { - self.head("trait"); + let (cb, ib) = self.head("trait"); self.print_ident(ident); self.print_generic_params(generics.params); self.nbsp(); self.print_bounds("=", bounds); self.print_where_clause(generics); self.word(";"); - self.end(); // end inner head-block - self.end(); // end outer head-block + self.end(ib); + self.end(cb); } } self.ann.post(self, AnnNode::Item(item)) @@ -762,13 +766,6 @@ impl<'a> State<'a> { self.print_path(t.path, false); } - fn print_opaque_ty(&mut self, o: &hir::OpaqueTy<'_>) { - self.head("opaque"); - self.word("{"); - self.print_bounds("impl", o.bounds); - self.word("}"); - } - fn print_formal_generic_params(&mut self, generic_params: &[hir::GenericParam<'_>]) { if !generic_params.is_empty() { self.word("for"); @@ -800,27 +797,33 @@ impl<'a> State<'a> { name: Symbol, span: rustc_span::Span, ) { - self.head("enum"); + let (cb, ib) = self.head("enum"); self.print_name(name); self.print_generic_params(generics.params); self.print_where_clause(generics); self.space(); - self.print_variants(enum_definition.variants, span); + self.print_variants(enum_definition.variants, span, cb, ib); } - fn print_variants(&mut self, variants: &[hir::Variant<'_>], span: rustc_span::Span) { - self.bopen(); + fn print_variants( + &mut self, + variants: &[hir::Variant<'_>], + span: rustc_span::Span, + cb: BoxMarker, + ib: BoxMarker, + ) { + self.bopen(ib); for v in variants { self.space_if_not_bol(); self.maybe_print_comment(v.span.lo()); self.print_attrs_as_outer(self.attrs(v.hir_id)); - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.print_variant(v); self.word(","); - self.end(); + self.end(ib); self.maybe_print_trailing_comment(v.span, None); } - self.bclose(span) + self.bclose(span, cb) } fn print_defaultness(&mut self, defaultness: hir::Defaultness) { @@ -837,6 +840,8 @@ impl<'a> State<'a> { name: Symbol, span: rustc_span::Span, print_finalizer: bool, + cb: BoxMarker, + ib: BoxMarker, ) { self.print_name(name); self.print_generic_params(generics.params); @@ -855,38 +860,34 @@ impl<'a> State<'a> { if print_finalizer { self.word(";"); } - self.end(); - self.end() // close the outer-box + self.end(ib); + self.end(cb); } hir::VariantData::Struct { .. } => { self.print_where_clause(generics); - self.print_variant_struct(span, struct_def.fields()) - } - } - } - - fn print_variant_struct(&mut self, span: rustc_span::Span, fields: &[hir::FieldDef<'_>]) { - self.nbsp(); - self.bopen(); - self.hardbreak_if_not_bol(); + self.nbsp(); + self.bopen(ib); + self.hardbreak_if_not_bol(); + + for field in struct_def.fields() { + self.hardbreak_if_not_bol(); + self.maybe_print_comment(field.span.lo()); + self.print_attrs_as_outer(self.attrs(field.hir_id)); + self.print_ident(field.ident); + self.word_nbsp(":"); + self.print_type(field.ty); + self.word(","); + } - for field in fields { - self.hardbreak_if_not_bol(); - self.maybe_print_comment(field.span.lo()); - self.print_attrs_as_outer(self.attrs(field.hir_id)); - self.print_ident(field.ident); - self.word_nbsp(":"); - self.print_type(field.ty); - self.word(","); + self.bclose(span, cb) + } } - - self.bclose(span) } pub fn print_variant(&mut self, v: &hir::Variant<'_>) { - self.head(""); + let (cb, ib) = self.head(""); let generics = hir::Generics::empty(); - self.print_struct(&v.data, generics, v.ident.name, v.span, false); + self.print_struct(&v.data, generics, v.ident.name, v.span, false, cb, ib); if let Some(ref d) = v.disr_expr { self.space(); self.word_space("="); @@ -899,10 +900,10 @@ impl<'a> State<'a> { ident: Ident, m: &hir::FnSig<'_>, generics: &hir::Generics<'_>, - arg_names: &[Option], + arg_idents: &[Option], body_id: Option, ) { - self.print_fn(m.decl, m.header, Some(ident.name), generics, arg_names, body_id); + self.print_fn(m.decl, m.header, Some(ident.name), generics, arg_idents, body_id); } fn print_trait_item(&mut self, ti: &hir::TraitItem<'_>) { @@ -914,16 +915,16 @@ impl<'a> State<'a> { hir::TraitItemKind::Const(ty, default) => { self.print_associated_const(ti.ident, ti.generics, ty, default); } - hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(arg_names)) => { - self.print_method_sig(ti.ident, sig, ti.generics, arg_names, None); + hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(arg_idents)) => { + self.print_method_sig(ti.ident, sig, ti.generics, arg_idents, None); self.word(";"); } hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(body)) => { - self.head(""); + let (cb, ib) = self.head(""); self.print_method_sig(ti.ident, sig, ti.generics, &[], Some(body)); self.nbsp(); - self.end(); // need to close a box - self.end(); // need to close a box + self.end(ib); + self.end(cb); self.ann.nested(self, Nested::Body(body)); } hir::TraitItemKind::Type(bounds, default) => { @@ -944,11 +945,11 @@ impl<'a> State<'a> { self.print_associated_const(ii.ident, ii.generics, ty, Some(expr)); } hir::ImplItemKind::Fn(ref sig, body) => { - self.head(""); + let (cb, ib) = self.head(""); self.print_method_sig(ii.ident, sig, ii.generics, &[], Some(body)); self.nbsp(); - self.end(); // need to close a box - self.end(); // need to close a box + self.end(ib); + self.end(cb); self.ann.nested(self, Nested::Body(body)); } hir::ImplItemKind::Type(ty) => { @@ -960,17 +961,21 @@ impl<'a> State<'a> { fn print_local( &mut self, + super_: bool, init: Option<&hir::Expr<'_>>, els: Option<&hir::Block<'_>>, decl: impl Fn(&mut Self), ) { self.space_if_not_bol(); - self.ibox(INDENT_UNIT); + let ibm1 = self.ibox(INDENT_UNIT); + if super_ { + self.word_nbsp("super"); + } self.word_nbsp("let"); - self.ibox(INDENT_UNIT); + let ibm2 = self.ibox(INDENT_UNIT); decl(self); - self.end(); + self.end(ibm2); if let Some(init) = init { self.nbsp(); @@ -982,20 +987,22 @@ impl<'a> State<'a> { self.nbsp(); self.word_space("else"); // containing cbox, will be closed by print-block at `}` - self.cbox(0); + let cb = self.cbox(0); // head-box, will be closed by print-block after `{` - self.ibox(0); - self.print_block(els); + let ib = self.ibox(0); + self.print_block(els, cb, ib); } - self.end() + self.end(ibm1) } fn print_stmt(&mut self, st: &hir::Stmt<'_>) { self.maybe_print_comment(st.span.lo()); match st.kind { hir::StmtKind::Let(loc) => { - self.print_local(loc.init, loc.els, |this| this.print_local_decl(loc)); + self.print_local(loc.super_.is_some(), loc.init, loc.els, |this| { + this.print_local_decl(loc) + }); } hir::StmtKind::Item(item) => self.ann.nested(self, Nested::Item(item)), hir::StmtKind::Expr(expr) => { @@ -1014,23 +1021,30 @@ impl<'a> State<'a> { self.maybe_print_trailing_comment(st.span, None) } - fn print_block(&mut self, blk: &hir::Block<'_>) { - self.print_block_with_attrs(blk, &[]) + fn print_block(&mut self, blk: &hir::Block<'_>, cb: BoxMarker, ib: BoxMarker) { + self.print_block_with_attrs(blk, &[], cb, ib) } - fn print_block_unclosed(&mut self, blk: &hir::Block<'_>) { - self.print_block_maybe_unclosed(blk, &[], false) + fn print_block_unclosed(&mut self, blk: &hir::Block<'_>, ib: BoxMarker) { + self.print_block_maybe_unclosed(blk, &[], None, ib) } - fn print_block_with_attrs(&mut self, blk: &hir::Block<'_>, attrs: &[hir::Attribute]) { - self.print_block_maybe_unclosed(blk, attrs, true) + fn print_block_with_attrs( + &mut self, + blk: &hir::Block<'_>, + attrs: &[hir::Attribute], + cb: BoxMarker, + ib: BoxMarker, + ) { + self.print_block_maybe_unclosed(blk, attrs, Some(cb), ib) } fn print_block_maybe_unclosed( &mut self, blk: &hir::Block<'_>, attrs: &[hir::Attribute], - close_box: bool, + cb: Option, + ib: BoxMarker, ) { match blk.rules { hir::BlockCheckMode::UnsafeBlock(..) => self.word_space("unsafe"), @@ -1038,7 +1052,7 @@ impl<'a> State<'a> { } self.maybe_print_comment(blk.span.lo()); self.ann.pre(self, AnnNode::Block(blk)); - self.bopen(); + self.bopen(ib); self.print_attrs_as_inner(attrs); @@ -1050,7 +1064,7 @@ impl<'a> State<'a> { self.print_expr(expr); self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi())); } - self.bclose_maybe_open(blk.span, close_box); + self.bclose_maybe_open(blk.span, cb); self.ann.post(self, AnnNode::Block(blk)) } @@ -1058,21 +1072,21 @@ impl<'a> State<'a> { if let Some(els_inner) = els { match els_inner.kind { // Another `else if` block. - hir::ExprKind::If(i, then, e) => { - self.cbox(INDENT_UNIT - 1); - self.ibox(0); + hir::ExprKind::If(i, hir::Expr { kind: hir::ExprKind::Block(t, None), .. }, e) => { + let cb = self.cbox(0); + let ib = self.ibox(0); self.word(" else if "); self.print_expr_as_cond(i); self.space(); - self.print_expr(then); + self.print_block(t, cb, ib); self.print_else(e); } // Final `else` block. - hir::ExprKind::Block(b, _) => { - self.cbox(INDENT_UNIT - 1); - self.ibox(0); + hir::ExprKind::Block(b, None) => { + let cb = self.cbox(0); + let ib = self.ibox(0); self.word(" else "); - self.print_block(b); + self.print_block(b, cb, ib); } // Constraints would be great here! _ => { @@ -1088,11 +1102,18 @@ impl<'a> State<'a> { blk: &hir::Expr<'_>, elseopt: Option<&hir::Expr<'_>>, ) { - self.head("if"); - self.print_expr_as_cond(test); - self.space(); - self.print_expr(blk); - self.print_else(elseopt) + match blk.kind { + hir::ExprKind::Block(blk, None) => { + let cb = self.cbox(0); + let ib = self.ibox(0); + self.word_nbsp("if"); + self.print_expr_as_cond(test); + self.space(); + self.print_block(blk, cb, ib); + self.print_else(elseopt) + } + _ => panic!("non-block then expr"), + } } fn print_anon_const(&mut self, constant: &hir::AnonConst) { @@ -1162,28 +1183,28 @@ impl<'a> State<'a> { } fn print_expr_vec(&mut self, exprs: &[hir::Expr<'_>]) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word("["); self.commasep_exprs(Inconsistent, exprs); self.word("]"); - self.end() + self.end(ib) } fn print_inline_const(&mut self, constant: &hir::ConstBlock) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word_space("const"); self.ann.nested(self, Nested::Body(constant.body)); - self.end() + self.end(ib) } fn print_expr_repeat(&mut self, element: &hir::Expr<'_>, count: &hir::ConstArg<'_>) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word("["); self.print_expr(element); self.word_space(";"); self.print_const_arg(count); self.word("]"); - self.end() + self.end(ib) } fn print_expr_struct( @@ -1198,23 +1219,23 @@ impl<'a> State<'a> { self.commasep_cmnt(Consistent, fields, |s, field| s.print_expr_field(field), |f| f.span); match wth { hir::StructTailExpr::Base(expr) => { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); if !fields.is_empty() { self.word(","); self.space(); } self.word(".."); self.print_expr(expr); - self.end(); + self.end(ib); } hir::StructTailExpr::DefaultFields(_) => { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); if !fields.is_empty() { self.word(","); self.space(); } self.word(".."); - self.end(); + self.end(ib); } hir::StructTailExpr::None => {} } @@ -1223,14 +1244,14 @@ impl<'a> State<'a> { } fn print_expr_field(&mut self, field: &hir::ExprField<'_>) { - self.cbox(INDENT_UNIT); + let cb = self.cbox(INDENT_UNIT); self.print_attrs_as_outer(self.attrs(field.hir_id)); if !field.is_shorthand { self.print_ident(field.ident); self.word_space(":"); } self.print_expr(field.expr); - self.end() + self.end(cb) } fn print_expr_tup(&mut self, exprs: &[hir::Expr<'_>]) { @@ -1271,18 +1292,18 @@ impl<'a> State<'a> { self.print_call_post(base_args) } - fn print_expr_binary(&mut self, op: hir::BinOp, lhs: &hir::Expr<'_>, rhs: &hir::Expr<'_>) { - let binop_prec = op.node.precedence(); + fn print_expr_binary(&mut self, op: hir::BinOpKind, lhs: &hir::Expr<'_>, rhs: &hir::Expr<'_>) { + let binop_prec = op.precedence(); let left_prec = lhs.precedence(); let right_prec = rhs.precedence(); - let (mut left_needs_paren, right_needs_paren) = match op.node.fixity() { + let (mut left_needs_paren, right_needs_paren) = match op.fixity() { Fixity::Left => (left_prec < binop_prec, right_prec <= binop_prec), Fixity::Right => (left_prec <= binop_prec, right_prec < binop_prec), Fixity::None => (left_prec <= binop_prec, right_prec <= binop_prec), }; - match (&lhs.kind, op.node) { + match (&lhs.kind, op) { // These cases need parens: `x as i32 < y` has the parser thinking that `i32 < y` is // the beginning of a path type. It starts trying to parse `x as (i32 < y ...` instead // of `(x as i32) < ...`. We need to convince it _not_ to do that. @@ -1297,7 +1318,7 @@ impl<'a> State<'a> { self.print_expr_cond_paren(lhs, left_needs_paren); self.space(); - self.word_space(op.node.as_str()); + self.word_space(op.as_str()); self.print_expr_cond_paren(rhs, right_needs_paren); } @@ -1403,8 +1424,8 @@ impl<'a> State<'a> { s.print_qpath(path, true); } hir::InlineAsmOperand::Label { block } => { - s.head("label"); - s.print_block(block); + let (cb, ib) = s.head("label"); + s.print_block(block, cb, ib); } }, AsmArg::Options(opts) => { @@ -1422,7 +1443,7 @@ impl<'a> State<'a> { fn print_expr(&mut self, expr: &hir::Expr<'_>) { self.maybe_print_comment(expr.span.lo()); self.print_attrs_as_outer(self.attrs(expr.hir_id)); - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.ann.pre(self, AnnNode::Expr(expr)); match expr.kind { hir::ExprKind::Array(exprs) => { @@ -1451,7 +1472,7 @@ impl<'a> State<'a> { self.word(".use"); } hir::ExprKind::Binary(op, lhs, rhs) => { - self.print_expr_binary(op, lhs, rhs); + self.print_expr_binary(op.node, lhs, rhs); } hir::ExprKind::Unary(op, expr) => { self.print_expr_unary(op, expr); @@ -1470,25 +1491,25 @@ impl<'a> State<'a> { } hir::ExprKind::Type(expr, ty) => { self.word("type_ascribe!("); - self.ibox(0); + let ib = self.ibox(0); self.print_expr(expr); self.word(","); self.space_if_not_bol(); self.print_type(ty); - self.end(); + self.end(ib); self.word(")"); } hir::ExprKind::DropTemps(init) => { // Print `{`: - self.cbox(INDENT_UNIT); - self.ibox(0); - self.bopen(); + let cb = self.cbox(0); + let ib = self.ibox(0); + self.bopen(ib); // Print `let _t = $init;`: let temp = Ident::from_str("_t"); - self.print_local(Some(init), None, |this| this.print_ident(temp)); + self.print_local(false, Some(init), None, |this| this.print_ident(temp)); self.word(";"); // Print `_t`: @@ -1496,7 +1517,7 @@ impl<'a> State<'a> { self.print_ident(temp); // Print `}`: - self.bclose_maybe_open(expr.span, true); + self.bclose_maybe_open(expr.span, Some(cb)); } hir::ExprKind::Let(&hir::LetExpr { pat, ty, init, .. }) => { self.print_let(pat, ty, init); @@ -1505,24 +1526,26 @@ impl<'a> State<'a> { self.print_if(test, blk, elseopt); } hir::ExprKind::Loop(blk, opt_label, _, _) => { + let cb = self.cbox(0); + let ib = self.ibox(0); if let Some(label) = opt_label { self.print_ident(label.ident); self.word_space(":"); } - self.head("loop"); - self.print_block(blk); + self.word_nbsp("loop"); + self.print_block(blk, cb, ib); } hir::ExprKind::Match(expr, arms, _) => { - self.cbox(INDENT_UNIT); - self.ibox(INDENT_UNIT); + let cb = self.cbox(0); + let ib = self.ibox(0); self.word_nbsp("match"); self.print_expr_as_cond(expr); self.space(); - self.bopen(); + self.bopen(ib); for arm in arms { self.print_arm(arm); } - self.bclose(expr.span); + self.bclose(expr.span, cb); } hir::ExprKind::Closure(&hir::Closure { binder, @@ -1545,12 +1568,6 @@ impl<'a> State<'a> { // This is a bare expression. self.ann.nested(self, Nested::Body(body)); - self.end(); // need to close a box - - // A box will be closed by `print_expr`, but we didn't want an overall - // wrapper so we closed the corresponding opening. so create an - // empty box to satisfy the close. - self.ibox(0); } hir::ExprKind::Block(blk, opt_label) => { if let Some(label) = opt_label { @@ -1558,10 +1575,10 @@ impl<'a> State<'a> { self.word_space(":"); } // containing cbox, will be closed by print-block at `}` - self.cbox(0); + let cb = self.cbox(0); // head-box, will be closed by print-block after `{` - self.ibox(0); - self.print_block(blk); + let ib = self.ibox(0); + self.print_block(blk, cb, ib); } hir::ExprKind::Assign(lhs, rhs, _) => { self.print_expr_cond_paren(lhs, lhs.precedence() <= ExprPrecedence::Assign); @@ -1572,8 +1589,7 @@ impl<'a> State<'a> { hir::ExprKind::AssignOp(op, lhs, rhs) => { self.print_expr_cond_paren(lhs, lhs.precedence() <= ExprPrecedence::Assign); self.space(); - self.word(op.node.as_str()); - self.word_space("="); + self.word_space(op.node.as_str()); self.print_expr_cond_paren(rhs, rhs.precedence() < ExprPrecedence::Assign); } hir::ExprKind::Field(expr, ident) => { @@ -1663,7 +1679,7 @@ impl<'a> State<'a> { } } self.ann.post(self, AnnNode::Expr(expr)); - self.end() + self.end(ib) } fn print_local_decl(&mut self, loc: &hir::LetStmt<'_>) { @@ -1854,6 +1870,19 @@ impl<'a> State<'a> { self.word("..="); self.print_const_arg(end); } + TyPatKind::Or(patterns) => { + self.popen(); + let mut first = true; + for pat in patterns { + if first { + first = false; + } else { + self.word(" | "); + } + self.print_ty_pat(pat); + } + self.pclose(); + } TyPatKind::Err(_) => { self.popen(); self.word("/*ERROR*/"); @@ -1866,9 +1895,11 @@ impl<'a> State<'a> { fn print_pat(&mut self, pat: &hir::Pat<'_>) { self.maybe_print_comment(pat.span.lo()); self.ann.pre(self, AnnNode::Pat(pat)); - // Pat isn't normalized, but the beauty of it - // is that it doesn't matter + // Pat isn't normalized, but the beauty of it is that it doesn't matter. match pat.kind { + // Printing `_` isn't ideal for a missing pattern, but it's easy and good enough. + // E.g. `fn(u32)` gets printed as `fn(_: u32)`. + PatKind::Missing => self.word("_"), PatKind::Wild => self.word("_"), PatKind::Never => self.word("!"), PatKind::Binding(BindingMode(by_ref, mutbl), _, ident, sub) => { @@ -2029,14 +2060,14 @@ impl<'a> State<'a> { if self.attrs(field.hir_id).is_empty() { self.space(); } - self.cbox(INDENT_UNIT); + let cb = self.cbox(INDENT_UNIT); self.print_attrs_as_outer(self.attrs(field.hir_id)); if !field.is_shorthand { self.print_ident(field.ident); self.word_nbsp(":"); } self.print_pat(field.pat); - self.end(); + self.end(cb); } fn print_param(&mut self, arg: &hir::Param<'_>) { @@ -2072,9 +2103,9 @@ impl<'a> State<'a> { if self.attrs(arm.hir_id).is_empty() { self.space(); } - self.cbox(INDENT_UNIT); + let cb = self.cbox(INDENT_UNIT); self.ann.pre(self, AnnNode::Arm(arm)); - self.ibox(0); + let ib = self.ibox(0); self.print_attrs_as_outer(self.attrs(arm.hir_id)); self.print_pat(arm.pat); self.space(); @@ -2091,8 +2122,7 @@ impl<'a> State<'a> { self.print_ident(label.ident); self.word_space(":"); } - // the block will close the pattern's ibox - self.print_block_unclosed(blk); + self.print_block_unclosed(blk, ib); // If it is a user-provided unsafe block, print a comma after it if let hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::UserProvided) = blk.rules @@ -2101,13 +2131,13 @@ impl<'a> State<'a> { } } _ => { - self.end(); // close the ibox for the pattern + self.end(ib); self.print_expr(arm.body); self.word(","); } } self.ann.post(self, AnnNode::Arm(arm)); - self.end() // close enclosing cbox + self.end(cb) } fn print_fn( @@ -2116,7 +2146,7 @@ impl<'a> State<'a> { header: hir::FnHeader, name: Option, generics: &hir::Generics<'_>, - arg_names: &[Option], + arg_idents: &[Option], body_id: Option, ) { self.print_fn_header_info(header); @@ -2128,16 +2158,16 @@ impl<'a> State<'a> { self.print_generic_params(generics.params); self.popen(); - // Make sure we aren't supplied *both* `arg_names` and `body_id`. - assert!(arg_names.is_empty() || body_id.is_none()); + // Make sure we aren't supplied *both* `arg_idents` and `body_id`. + assert!(arg_idents.is_empty() || body_id.is_none()); let mut i = 0; let mut print_arg = |s: &mut Self, ty: Option<&hir::Ty<'_>>| { if i == 0 && decl.implicit_self.has_implicit_self() { s.print_implicit_self(&decl.implicit_self); } else { - if let Some(arg_name) = arg_names.get(i) { - if let Some(arg_name) = arg_name { - s.word(arg_name.to_string()); + if let Some(arg_ident) = arg_idents.get(i) { + if let Some(arg_ident) = arg_ident { + s.word(arg_ident.to_string()); s.word(":"); s.space(); } @@ -2153,12 +2183,14 @@ impl<'a> State<'a> { i += 1; }; self.commasep(Inconsistent, decl.inputs, |s, ty| { - s.ibox(INDENT_UNIT); + let ib = s.ibox(INDENT_UNIT); print_arg(s, Some(ty)); - s.end(); + s.end(ib); }); if decl.c_variadic { - self.word(", "); + if !decl.inputs.is_empty() { + self.word(", "); + } print_arg(self, None); self.word("..."); } @@ -2172,7 +2204,7 @@ impl<'a> State<'a> { self.word("|"); let mut i = 0; self.commasep(Inconsistent, decl.inputs, |s, ty| { - s.ibox(INDENT_UNIT); + let ib = s.ibox(INDENT_UNIT); s.ann.nested(s, Nested::BodyParamPat(body_id, i)); i += 1; @@ -2184,7 +2216,7 @@ impl<'a> State<'a> { s.space(); s.print_type(ty); } - s.end(); + s.end(ib); }); self.word("|"); @@ -2426,16 +2458,16 @@ impl<'a> State<'a> { match decl.output { hir::FnRetTy::Return(ty) => { self.space_if_not_bol(); - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.word_space("->"); self.print_type(ty); - } - hir::FnRetTy::DefaultReturn(..) => return, - } - self.end(); + self.end(ib); - if let hir::FnRetTy::Return(output) = decl.output { - self.maybe_print_comment(output.span.lo()); + if let hir::FnRetTy::Return(output) = decl.output { + self.maybe_print_comment(output.span.lo()); + } + } + hir::FnRetTy::DefaultReturn(..) => {} } } @@ -2446,9 +2478,9 @@ impl<'a> State<'a> { decl: &hir::FnDecl<'_>, name: Option, generic_params: &[hir::GenericParam<'_>], - arg_names: &[Option], + arg_idents: &[Option], ) { - self.ibox(INDENT_UNIT); + let ib = self.ibox(INDENT_UNIT); self.print_formal_generic_params(generic_params); let generics = hir::Generics::empty(); self.print_fn( @@ -2461,10 +2493,10 @@ impl<'a> State<'a> { }, name, generics, - arg_names, + arg_idents, None, ); - self.end(); + self.end(ib); } fn print_fn_header_info(&mut self, header: hir::FnHeader) { diff --git a/compiler/rustc_hir_typeck/Cargo.toml b/compiler/rustc_hir_typeck/Cargo.toml index b2b90cb29e36f..f00125c3e090a 100644 --- a/compiler/rustc_hir_typeck/Cargo.toml +++ b/compiler/rustc_hir_typeck/Cargo.toml @@ -22,6 +22,7 @@ rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } +rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } tracing = "0.1" diff --git a/compiler/rustc_hir_typeck/messages.ftl b/compiler/rustc_hir_typeck/messages.ftl index 872861d6289d9..23309102c4da5 100644 --- a/compiler/rustc_hir_typeck/messages.ftl +++ b/compiler/rustc_hir_typeck/messages.ftl @@ -148,7 +148,7 @@ hir_typeck_never_type_fallback_flowing_into_unsafe_path = never type fallback af hir_typeck_never_type_fallback_flowing_into_unsafe_union_field = never type fallback affects this union access .help = specify the type explicitly -hir_typeck_no_associated_item = no {$item_kind} named `{$item_name}` found for {$ty_prefix} `{$ty_str}`{$trait_missing_method -> +hir_typeck_no_associated_item = no {$item_kind} named `{$item_ident}` found for {$ty_prefix} `{$ty_str}`{$trait_missing_method -> [true] {""} *[other] {" "}in the current scope } @@ -179,6 +179,9 @@ hir_typeck_ptr_cast_add_auto_to_object = cannot add {$traits_len -> .help = use `transmute` if you're sure this is sound .label = unsupported cast +hir_typeck_register_type_unstable = + type `{$ty}` cannot be used with this register class in stable + hir_typeck_remove_semi_for_coerce = you might have meant to return the `match` expression hir_typeck_remove_semi_for_coerce_expr = this could be implicitly returned but it is a statement, not a tail expression hir_typeck_remove_semi_for_coerce_ret = the `match` arms can conform to this return type diff --git a/compiler/rustc_hir_typeck/src/_match.rs b/compiler/rustc_hir_typeck/src/_match.rs index 38319862334a8..61dd8c5730734 100644 --- a/compiler/rustc_hir_typeck/src/_match.rs +++ b/compiler/rustc_hir_typeck/src/_match.rs @@ -11,7 +11,7 @@ use rustc_trait_selection::traits::{ use tracing::{debug, instrument}; use crate::coercion::{AsCoercionSite, CoerceMany}; -use crate::{Diverges, Expectation, FnCtxt, Needs}; +use crate::{Diverges, Expectation, FnCtxt, GatherLocalsVisitor, Needs}; impl<'a, 'tcx> FnCtxt<'a, 'tcx> { #[instrument(skip(self), level = "debug", ret)] @@ -43,6 +43,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // #55810: Type check patterns first so we get types for all bindings. let scrut_span = scrut.span.find_ancestor_inside(expr.span).unwrap_or(scrut.span); for arm in arms { + GatherLocalsVisitor::gather_from_arm(self, arm); + self.check_pat_top(arm.pat, scrutinee_ty, Some(scrut_span), Some(scrut), None); } @@ -601,7 +603,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // FIXME(-Znext-solver): Remove this branch once `replace_opaque_types_with_infer` is gone. ty::Infer(ty::TyVar(_)) => self .inner - .borrow() + .borrow_mut() + .opaque_types() .iter_opaque_types() .find(|(_, v)| v.ty == expected_ty) .map(|(k, _)| (k.def_id, k.args))?, diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs index 5e2daa69628f4..f555d116c52db 100644 --- a/compiler/rustc_hir_typeck/src/callee.rs +++ b/compiler/rustc_hir_typeck/src/callee.rs @@ -14,7 +14,7 @@ use rustc_middle::ty::adjustment::{ use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, TypeVisitableExt}; use rustc_middle::{bug, span_bug}; use rustc_span::def_id::LocalDefId; -use rustc_span::{Ident, Span, sym}; +use rustc_span::{Span, sym}; use rustc_trait_selection::error_reporting::traits::DefIdOrName; use rustc_trait_selection::infer::InferCtxtExt as _; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _; @@ -34,11 +34,9 @@ pub(crate) fn check_legal_trait_for_method_call( receiver: Option, expr_span: Span, trait_id: DefId, - body_id: DefId, + _body_id: DefId, ) -> Result<(), ErrorGuaranteed> { - if tcx.is_lang_item(trait_id, LangItem::Drop) - && tcx.lang_items().fallback_surface_drop_fn() != Some(body_id) - { + if tcx.is_lang_item(trait_id, LangItem::Drop) { let sugg = if let Some(receiver) = receiver.filter(|s| !s.is_empty()) { errors::ExplicitDestructorCallSugg::Snippet { lo: expr_span.shrink_to_lo(), @@ -89,14 +87,29 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let output = match result { None => { - // this will report an error since original_callee_ty is not a fn - self.confirm_builtin_call( - call_expr, - callee_expr, - original_callee_ty, - arg_exprs, - expected, - ) + // Check all of the arg expressions, but with no expectations + // since we don't have a signature to compare them to. + for arg in arg_exprs { + self.check_expr(arg); + } + + if let hir::ExprKind::Path(hir::QPath::Resolved(_, path)) = &callee_expr.kind + && let [segment] = path.segments + { + self.dcx().try_steal_modify_and_emit_err( + segment.ident.span, + StashKey::CallIntoMethod, + |err| { + // Try suggesting `foo(a)` -> `a.foo()` if possible. + self.suggest_call_as_method( + err, segment, arg_exprs, call_expr, expected, + ); + }, + ); + } + + let guar = self.report_invalid_callee(call_expr, callee_expr, expr_ty, arg_exprs); + Ty::new_error(self.tcx, guar) } Some(CallStep::Builtin(callee_ty)) => { @@ -298,9 +311,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Ty::new_tup_from_iter(self.tcx, arg_exprs.iter().map(|e| self.next_ty_var(e.span))) }); - if let Some(ok) = self.lookup_method_in_trait( + if let Some(ok) = self.lookup_method_for_operator( self.misc(call_expr.span), - Ident::with_dummy_span(method_name), + method_name, trait_def_id, adjusted_ty, opt_input_type, @@ -463,32 +476,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } (fn_sig, Some(def_id)) } + // FIXME(const_trait_impl): these arms should error because we can't enforce them ty::FnPtr(sig_tys, hdr) => (sig_tys.with(hdr), None), - _ => { - for arg in arg_exprs { - self.check_expr(arg); - } - if let hir::ExprKind::Path(hir::QPath::Resolved(_, path)) = &callee_expr.kind - && let [segment] = path.segments - { - self.dcx().try_steal_modify_and_emit_err( - segment.ident.span, - StashKey::CallIntoMethod, - |err| { - // Try suggesting `foo(a)` -> `a.foo()` if possible. - self.suggest_call_as_method( - err, segment, arg_exprs, call_expr, expected, - ); - }, - ); - } - - let err = self.report_invalid_callee(call_expr, callee_expr, callee_ty, arg_exprs); - - return Ty::new_error(self.tcx, err); - } + _ => unreachable!(), }; // Replace any late-bound regions that appear in the function @@ -771,7 +763,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { format!("this {descr} returns an unsized value `{output_ty}`, so it cannot be called") ); if let DefIdOrName::DefId(def_id) = maybe_def - && let Some(def_span) = self.tcx.hir().span_if_local(def_id) + && let Some(def_span) = self.tcx.hir_span_if_local(def_id) { err.span_label(def_span, "the callable type is defined here"); } @@ -780,7 +772,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - if let Some(span) = self.tcx.hir().res_span(def) { + if let Some(span) = self.tcx.hir_res_span(def) { let callee_ty = callee_ty.to_string(); let label = match (unit_variant, inner_callee_path) { (Some((_, kind, path)), _) => { @@ -910,19 +902,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { call_expr: &'tcx hir::Expr<'tcx>, arg_exprs: &'tcx [hir::Expr<'tcx>], expected: Expectation<'tcx>, - method_callee: MethodCallee<'tcx>, + method: MethodCallee<'tcx>, ) -> Ty<'tcx> { - let output_type = self.check_method_argument_types( + self.check_argument_types( call_expr.span, call_expr, - Ok(method_callee), + &method.sig.inputs()[1..], + method.sig.output(), + expected, arg_exprs, + method.sig.c_variadic, TupleArgumentsFlag::TupleArguments, - expected, + Some(method.def_id), ); - self.write_method_call_and_enforce_effects(call_expr.hir_id, call_expr.span, method_callee); - output_type + self.write_method_call_and_enforce_effects(call_expr.hir_id, call_expr.span, method); + + method.sig.output() } } diff --git a/compiler/rustc_hir_typeck/src/cast.rs b/compiler/rustc_hir_typeck/src/cast.rs index 8f5fddd19d7f6..5bfc3e810d9fd 100644 --- a/compiler/rustc_hir_typeck/src/cast.rs +++ b/compiler/rustc_hir_typeck/src/cast.rs @@ -501,12 +501,25 @@ impl<'a, 'tcx> CastCheck<'tcx> { .must_apply_modulo_regions() { label = false; - err.span_suggestion( - self.span, - "consider using the `From` trait instead", - format!("{}::from({})", self.cast_ty, snippet), - Applicability::MaybeIncorrect, - ); + if let ty::Adt(def, args) = self.cast_ty.kind() { + err.span_suggestion_verbose( + self.span, + "consider using the `From` trait instead", + format!( + "{}::from({})", + fcx.tcx.value_path_str_with_args(def.did(), args), + snippet + ), + Applicability::MaybeIncorrect, + ); + } else { + err.span_suggestion( + self.span, + "consider using the `From` trait instead", + format!("{}::from({})", self.cast_ty, snippet), + Applicability::MaybeIncorrect, + ); + }; } } @@ -1038,34 +1051,34 @@ impl<'a, 'tcx> CastCheck<'tcx> { fn check_ref_cast( &self, fcx: &FnCtxt<'a, 'tcx>, - m_expr: ty::TypeAndMut<'tcx>, - m_cast: ty::TypeAndMut<'tcx>, + mut m_expr: ty::TypeAndMut<'tcx>, + mut m_cast: ty::TypeAndMut<'tcx>, ) -> Result> { // array-ptr-cast: allow mut-to-mut, mut-to-const, const-to-const - if m_expr.mutbl >= m_cast.mutbl { - if let ty::Array(ety, _) = m_expr.ty.kind() { - // Due to the limitations of LLVM global constants, - // region pointers end up pointing at copies of - // vector elements instead of the original values. - // To allow raw pointers to work correctly, we - // need to special-case obtaining a raw pointer - // from a region pointer to a vector. - - // Coerce to a raw pointer so that we generate RawPtr in MIR. - let array_ptr_type = Ty::new_ptr(fcx.tcx, m_expr.ty, m_expr.mutbl); - fcx.coerce(self.expr, self.expr_ty, array_ptr_type, AllowTwoPhase::No, None) - .unwrap_or_else(|_| { - bug!( + m_expr.ty = fcx.try_structurally_resolve_type(self.expr_span, m_expr.ty); + m_cast.ty = fcx.try_structurally_resolve_type(self.cast_span, m_cast.ty); + + if m_expr.mutbl >= m_cast.mutbl + && let ty::Array(ety, _) = m_expr.ty.kind() + && fcx.can_eq(fcx.param_env, *ety, m_cast.ty) + { + // Due to historical reasons we allow directly casting references of + // arrays into raw pointers of their element type. + + // Coerce to a raw pointer so that we generate RawPtr in MIR. + let array_ptr_type = Ty::new_ptr(fcx.tcx, m_expr.ty, m_expr.mutbl); + fcx.coerce(self.expr, self.expr_ty, array_ptr_type, AllowTwoPhase::No, None) + .unwrap_or_else(|_| { + bug!( "could not cast from reference to array to pointer to array ({:?} to {:?})", self.expr_ty, array_ptr_type, ) - }); + }); - // this will report a type mismatch if needed - fcx.demand_eqtype(self.span, *ety, m_cast.ty); - return Ok(CastKind::ArrayPtrCast); - } + // this will report a type mismatch if needed + fcx.demand_eqtype(self.span, *ety, m_cast.ty); + return Ok(CastKind::ArrayPtrCast); } Err(CastError::IllegalCast) diff --git a/compiler/rustc_hir_typeck/src/check.rs b/compiler/rustc_hir_typeck/src/check.rs index dabae7b1d094c..e9e380a1541f4 100644 --- a/compiler/rustc_hir_typeck/src/check.rs +++ b/compiler/rustc_hir_typeck/src/check.rs @@ -2,10 +2,8 @@ use std::cell::RefCell; use rustc_abi::ExternAbi; use rustc_hir as hir; -use rustc_hir::def::DefKind; -use rustc_hir::intravisit::Visitor; use rustc_hir::lang_items::LangItem; -use rustc_hir_analysis::check::{check_function_signature, forbid_intrinsic_abi}; +use rustc_hir_analysis::check::check_function_signature; use rustc_infer::infer::RegionVariableOrigin; use rustc_infer::traits::WellFormedLoc; use rustc_middle::ty::{self, Binder, Ty, TyCtxt}; @@ -50,9 +48,9 @@ pub(super) fn check_fn<'a, 'tcx>( let span = body.value.span; - forbid_intrinsic_abi(tcx, span, fn_sig.abi); - - GatherLocalsVisitor::new(fcx).visit_body(body); + for param in body.params { + GatherLocalsVisitor::gather_from_param(fcx, param); + } // C-variadic fns also have a `VaList` input that's not listed in `fn_sig` // (as it's created inside the body itself, not passed in from outside). @@ -151,11 +149,6 @@ pub(super) fn check_fn<'a, 'tcx>( // we have a recursive call site and do the sadly stabilized fallback to `()`. fcx.demand_suptype(span, ret_ty, actual_return_ty); - // Check that a function marked as `#[panic_handler]` has signature `fn(&PanicInfo) -> !` - if tcx.is_lang_item(fn_def_id.to_def_id(), LangItem::PanicImpl) { - check_panic_info_fn(tcx, fn_def_id, fn_sig); - } - if tcx.is_lang_item(fn_def_id.to_def_id(), LangItem::Start) { check_lang_start_fn(tcx, fn_sig, fn_def_id); } @@ -163,60 +156,6 @@ pub(super) fn check_fn<'a, 'tcx>( fcx.coroutine_types } -fn check_panic_info_fn(tcx: TyCtxt<'_>, fn_id: LocalDefId, fn_sig: ty::FnSig<'_>) { - let span = tcx.def_span(fn_id); - - let DefKind::Fn = tcx.def_kind(fn_id) else { - tcx.dcx().span_err(span, "should be a function"); - return; - }; - - let generic_counts = tcx.generics_of(fn_id).own_counts(); - if generic_counts.types != 0 { - tcx.dcx().span_err(span, "should have no type parameters"); - } - if generic_counts.consts != 0 { - tcx.dcx().span_err(span, "should have no const parameters"); - } - - let panic_info_did = tcx.require_lang_item(hir::LangItem::PanicInfo, Some(span)); - - // build type `for<'a, 'b> fn(&'a PanicInfo<'b>) -> !` - let panic_info_ty = tcx.type_of(panic_info_did).instantiate( - tcx, - &[ty::GenericArg::from(ty::Region::new_bound( - tcx, - ty::INNERMOST, - ty::BoundRegion { var: ty::BoundVar::from_u32(1), kind: ty::BoundRegionKind::Anon }, - ))], - ); - let panic_info_ref_ty = Ty::new_imm_ref( - tcx, - ty::Region::new_bound( - tcx, - ty::INNERMOST, - ty::BoundRegion { var: ty::BoundVar::ZERO, kind: ty::BoundRegionKind::Anon }, - ), - panic_info_ty, - ); - - let bounds = tcx.mk_bound_variable_kinds(&[ - ty::BoundVariableKind::Region(ty::BoundRegionKind::Anon), - ty::BoundVariableKind::Region(ty::BoundRegionKind::Anon), - ]); - let expected_sig = ty::Binder::bind_with_vars( - tcx.mk_fn_sig([panic_info_ref_ty], tcx.types.never, false, fn_sig.safety, ExternAbi::Rust), - bounds, - ); - - let _ = check_function_signature( - tcx, - ObligationCause::new(span, fn_id, ObligationCauseCode::LangFunctionType(sym::panic_impl)), - fn_id.into(), - expected_sig, - ); -} - fn check_lang_start_fn<'tcx>(tcx: TyCtxt<'tcx>, fn_sig: ty::FnSig<'tcx>, def_id: LocalDefId) { // build type `fn(main: fn() -> T, argc: isize, argv: *const *const u8, sigpipe: u8)` diff --git a/compiler/rustc_hir_typeck/src/closure.rs b/compiler/rustc_hir_typeck/src/closure.rs index a4776338f6c39..8fd59999fce5e 100644 --- a/compiler/rustc_hir_typeck/src/closure.rs +++ b/compiler/rustc_hir_typeck/src/closure.rs @@ -163,7 +163,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Resume type defaults to `()` if the coroutine has no argument. let resume_ty = liberated_sig.inputs().get(0).copied().unwrap_or(tcx.types.unit); - let interior = self.next_ty_var(expr_span); + // In the new solver, we can just instantiate this eagerly + // with the witness. This will ensure that goals that don't need + // to stall on interior types will get processed eagerly. + let interior = if self.next_trait_solver() { + Ty::new_coroutine_witness(tcx, expr_def_id.to_def_id(), parent_args) + } else { + self.next_ty_var(expr_span) + }; + self.deferred_coroutine_interiors.borrow_mut().push((expr_def_id, interior)); // Coroutines that come from coroutine closures have not yet determined @@ -970,7 +978,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.typeck_results.borrow_mut().user_provided_sigs.insert(expr_def_id, c_result); // Normalize only after registering in `user_provided_sigs`. - self.normalize(self.tcx.hir().span(hir_id), result) + self.normalize(self.tcx.def_span(expr_def_id), result) } /// Invoked when we are translating the coroutine that results diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs index f42ca3af2b35c..fd899425f62d2 100644 --- a/compiler/rustc_hir_typeck/src/coercion.rs +++ b/compiler/rustc_hir_typeck/src/coercion.rs @@ -37,7 +37,6 @@ use std::ops::Deref; -use rustc_abi::ExternAbi; use rustc_attr_parsing::InlineAttr; use rustc_errors::codes::*; use rustc_errors::{Applicability, Diag, struct_span_code_err}; @@ -104,15 +103,6 @@ fn coerce_mutbls<'tcx>( if from_mutbl >= to_mutbl { Ok(()) } else { Err(TypeError::Mutability) } } -/// Do not require any adjustments, i.e. coerce `x -> x`. -fn identity(_: Ty<'_>) -> Vec> { - vec![] -} - -fn simple<'tcx>(kind: Adjust) -> impl FnOnce(Ty<'tcx>) -> Vec> { - move |target| vec![Adjustment { kind, target }] -} - /// This always returns `Ok(...)`. fn success<'tcx>( adj: Vec>, @@ -132,7 +122,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { Coerce { fcx, cause, allow_two_phase, use_lub: false, coerce_never } } - fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { + fn unify_raw(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { debug!("unify(a: {:?}, b: {:?}, use_lub: {})", a, b, self.use_lub); self.commit_if_ok(|_| { let at = self.at(&self.cause, self.fcx.param_env); @@ -162,13 +152,30 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { }) } + /// Unify two types (using sub or lub). + fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { + self.unify_raw(a, b) + .and_then(|InferOk { value: ty, obligations }| success(vec![], ty, obligations)) + } + /// Unify two types (using sub or lub) and produce a specific coercion. - fn unify_and(&self, a: Ty<'tcx>, b: Ty<'tcx>, f: F) -> CoerceResult<'tcx> - where - F: FnOnce(Ty<'tcx>) -> Vec>, - { - self.unify(a, b) - .and_then(|InferOk { value: ty, obligations }| success(f(ty), ty, obligations)) + fn unify_and( + &self, + a: Ty<'tcx>, + b: Ty<'tcx>, + adjustments: impl IntoIterator>, + final_adjustment: Adjust, + ) -> CoerceResult<'tcx> { + self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| { + success( + adjustments + .into_iter() + .chain(std::iter::once(Adjustment { target: ty, kind: final_adjustment })) + .collect(), + ty, + obligations, + ) + }) } #[instrument(skip(self))] @@ -181,10 +188,14 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // Coercing from `!` to any type is allowed: if a.is_never() { if self.coerce_never { - return success(simple(Adjust::NeverToAny)(b), b, PredicateObligations::new()); + return success( + vec![Adjustment { kind: Adjust::NeverToAny, target: b }], + b, + PredicateObligations::new(), + ); } else { // Otherwise the only coercion we can do is unification. - return self.unify_and(a, b, identity); + return self.unify(a, b); } } @@ -192,7 +203,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // we have no information about the source type. This will always // ultimately fall back to some form of subtyping. if a.is_ty_var() { - return self.coerce_from_inference_variable(a, b, identity); + return self.coerce_from_inference_variable(a, b); } // Consider coercing the subtype to a DST @@ -248,7 +259,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { ty::FnPtr(a_sig_tys, a_hdr) => { // We permit coercion of fn pointers to drop the // unsafe qualifier. - self.coerce_from_fn_pointer(a, a_sig_tys.with(a_hdr), b) + self.coerce_from_fn_pointer(a_sig_tys.with(a_hdr), b) } ty::Closure(closure_def_id_a, args_a) => { // Non-capturing closures are coercible to @@ -258,7 +269,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } _ => { // Otherwise, just use unification rules. - self.unify_and(a, b, identity) + self.unify(a, b) } } } @@ -266,12 +277,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { /// Coercing *from* an inference variable. In this case, we have no information /// about the source type, so we can't really do a true coercion and we always /// fall back to subtyping (`unify_and`). - fn coerce_from_inference_variable( - &self, - a: Ty<'tcx>, - b: Ty<'tcx>, - make_adjustments: impl FnOnce(Ty<'tcx>) -> Vec>, - ) -> CoerceResult<'tcx> { + fn coerce_from_inference_variable(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { debug!("coerce_from_inference_variable(a={:?}, b={:?})", a, b); assert!(a.is_ty_var() && self.shallow_resolve(a) == a); assert!(self.shallow_resolve(b) == b); @@ -299,12 +305,11 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { "coerce_from_inference_variable: two inference variables, target_ty={:?}, obligations={:?}", target_ty, obligations ); - let adjustments = make_adjustments(target_ty); - InferResult::Ok(InferOk { value: (adjustments, target_ty), obligations }) + success(vec![], target_ty, obligations) } else { // One unresolved type variable: just apply subtyping, we may be able // to do something useful. - self.unify_and(a, b, make_adjustments) + self.unify(a, b) } } @@ -332,7 +337,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { coerce_mutbls(mt_a.mutbl, mutbl_b)?; (r_a, mt_a) } - _ => return self.unify_and(a, b, identity), + _ => return self.unify(a, b), }; let span = self.cause.span; @@ -438,7 +443,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { referent_ty, mutbl_b, // [1] above ); - match self.unify(derefd_ty_a, b) { + match self.unify_raw(derefd_ty_a, b) { Ok(ok) => { found = Some(ok); break; @@ -580,13 +585,13 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // We only have the latter, so we use an inference variable // for the former and let type inference do the rest. let coerce_target = self.next_ty_var(self.cause.span); - let mut coercion = self.unify_and(coerce_target, target, |target| { - let unsize = Adjustment { kind: Adjust::Pointer(PointerCoercion::Unsize), target }; - match reborrow { - None => vec![unsize], - Some((ref deref, ref autoref)) => vec![deref.clone(), autoref.clone(), unsize], - } - })?; + + let mut coercion = self.unify_and( + coerce_target, + target, + reborrow.into_iter().flat_map(|(deref, autoref)| [deref, autoref]), + Adjust::Pointer(PointerCoercion::Unsize), + )?; let mut selcx = traits::SelectionContext::new(self); @@ -709,7 +714,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { && let ty::Dynamic(b_data, _, ty::DynStar) = b.kind() && a_data.principal_def_id() == b_data.principal_def_id() { - return self.unify_and(a, b, |_| vec![]); + return self.unify(a, b); } // Check the obligations of the cast -- for example, when casting @@ -809,23 +814,15 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // To complete the reborrow, we need to make sure we can unify the inner types, and if so we // add the adjustments. - self.unify_and(a, b, |_inner_ty| { - vec![Adjustment { kind: Adjust::ReborrowPin(mut_b), target: b }] - }) + self.unify_and(a, b, [], Adjust::ReborrowPin(mut_b)) } - fn coerce_from_safe_fn( + fn coerce_from_safe_fn( &self, - a: Ty<'tcx>, fn_ty_a: ty::PolyFnSig<'tcx>, b: Ty<'tcx>, - to_unsafe: F, - normal: G, - ) -> CoerceResult<'tcx> - where - F: FnOnce(Ty<'tcx>) -> Vec>, - G: FnOnce(Ty<'tcx>) -> Vec>, - { + adjustment: Option, + ) -> CoerceResult<'tcx> { self.commit_if_ok(|snapshot| { let outer_universe = self.infcx.universe(); @@ -834,9 +831,19 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { && hdr_b.safety.is_unsafe() { let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a); - self.unify_and(unsafe_a, b, to_unsafe) + self.unify_and( + unsafe_a, + b, + adjustment + .map(|kind| Adjustment { kind, target: Ty::new_fn_ptr(self.tcx, fn_ty_a) }), + Adjust::Pointer(PointerCoercion::UnsafeFnPointer), + ) } else { - self.unify_and(a, b, normal) + let a = Ty::new_fn_ptr(self.tcx, fn_ty_a); + match adjustment { + Some(adjust) => self.unify_and(a, b, [], adjust), + None => self.unify(a, b), + } }; // FIXME(#73154): This is a hack. Currently LUB can generate @@ -853,7 +860,6 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { fn coerce_from_fn_pointer( &self, - a: Ty<'tcx>, fn_ty_a: ty::PolyFnSig<'tcx>, b: Ty<'tcx>, ) -> CoerceResult<'tcx> { @@ -862,15 +868,9 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { //! let b = self.shallow_resolve(b); - debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b); - - self.coerce_from_safe_fn( - a, - fn_ty_a, - b, - simple(Adjust::Pointer(PointerCoercion::UnsafeFnPointer)), - identity, - ) + debug!(?fn_ty_a, ?b, "coerce_from_fn_pointer"); + + self.coerce_from_safe_fn(fn_ty_a, b, None) } fn coerce_from_fn_item(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { @@ -917,30 +917,16 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { self.at(&self.cause, self.param_env).normalize(a_sig); obligations.extend(o1); - let a_fn_pointer = Ty::new_fn_ptr(self.tcx, a_sig); let InferOk { value, obligations: o2 } = self.coerce_from_safe_fn( - a_fn_pointer, a_sig, b, - |unsafe_ty| { - vec![ - Adjustment { - kind: Adjust::Pointer(PointerCoercion::ReifyFnPointer), - target: a_fn_pointer, - }, - Adjustment { - kind: Adjust::Pointer(PointerCoercion::UnsafeFnPointer), - target: unsafe_ty, - }, - ] - }, - simple(Adjust::Pointer(PointerCoercion::ReifyFnPointer)), + Some(Adjust::Pointer(PointerCoercion::ReifyFnPointer)), )?; obligations.extend(o2); Ok(InferOk { value, obligations }) } - _ => self.unify_and(a, b, identity), + _ => self.unify(a, b), } } @@ -984,10 +970,11 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { self.unify_and( pointer_ty, b, - simple(Adjust::Pointer(PointerCoercion::ClosureFnPointer(safety))), + [], + Adjust::Pointer(PointerCoercion::ClosureFnPointer(safety)), ) } - _ => self.unify_and(a, b, identity), + _ => self.unify(a, b), } } @@ -1002,7 +989,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let (is_ref, mt_a) = match *a.kind() { ty::Ref(_, ty, mutbl) => (true, ty::TypeAndMut { ty, mutbl }), ty::RawPtr(ty, mutbl) => (false, ty::TypeAndMut { ty, mutbl }), - _ => return self.unify_and(a, b, identity), + _ => return self.unify(a, b), }; coerce_mutbls(mt_a.mutbl, mutbl_b)?; @@ -1012,16 +999,16 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // representation, we still register an Adjust::DerefRef so that // regionck knows that the region for `a` must be valid here. if is_ref { - self.unify_and(a_raw, b, |target| { - vec![ - Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }, - Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)), target }, - ] - }) + self.unify_and( + a_raw, + b, + [Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }], + Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)), + ) } else if mt_a.mutbl != mutbl_b { - self.unify_and(a_raw, b, simple(Adjust::Pointer(PointerCoercion::MutToConstPointer))) + self.unify_and(a_raw, b, [], Adjust::Pointer(PointerCoercion::MutToConstPointer)) } else { - self.unify_and(a_raw, b, identity) + self.unify(a_raw, b) } } } @@ -1119,9 +1106,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let cause = self.cause(DUMMY_SP, ObligationCauseCode::ExprAssignable); // We don't ever need two-phase here since we throw out the result of the coercion. let coerce = Coerce::new(self, cause, AllowTwoPhase::No, true); - coerce - .autoderef(DUMMY_SP, expr_ty) - .find_map(|(ty, steps)| self.probe(|_| coerce.unify(ty, target)).ok().map(|_| steps)) + coerce.autoderef(DUMMY_SP, expr_ty).find_map(|(ty, steps)| { + self.probe(|_| coerce.unify_raw(ty, target)).ok().map(|_| steps) + }) } /// Given a type, this function will calculate and return the type given @@ -1240,10 +1227,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } }; if let (Some(a_sig), Some(b_sig)) = (a_sig, b_sig) { - // Intrinsics are not coercible to function pointers. - if a_sig.abi() == ExternAbi::RustIntrinsic || b_sig.abi() == ExternAbi::RustIntrinsic { - return Err(TypeError::IntrinsicCast); - } // The signature must match. let (a_sig, b_sig) = self.normalize(new.span, (a_sig, b_sig)); let sig = self diff --git a/compiler/rustc_hir_typeck/src/demand.rs b/compiler/rustc_hir_typeck/src/demand.rs index 41bdd0ca43ef1..d1bc54ed73ead 100644 --- a/compiler/rustc_hir_typeck/src/demand.rs +++ b/compiler/rustc_hir_typeck/src/demand.rs @@ -865,10 +865,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // `&'name Ty` -> `&'name mut Ty` or `&Ty` -> `&mut Ty` vec![( ty_ref.1.ty.span.shrink_to_lo(), - format!( - "{}mut ", - if ty_ref.0.ident.span.lo() == ty_ref.0.ident.span.hi() { "" } else { " " }, - ), + format!("{}mut ", if ty_ref.0.ident.span.is_empty() { "" } else { " " },), )] }; sugg.extend([ @@ -999,10 +996,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let container = with_no_trimmed_paths!(self.tcx.def_path_str(container_id)); for def_id in pick.import_ids { let hir_id = self.tcx.local_def_id_to_hir_id(def_id); - path_span.push_span_label( - self.tcx.hir().span(hir_id), - format!("`{container}` imported here"), - ); + path_span + .push_span_label(self.tcx.hir_span(hir_id), format!("`{container}` imported here")); } let tail = with_no_trimmed_paths!(match &other_methods_in_scope[..] { [] => return, @@ -1094,14 +1089,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// This function checks whether the method is not static and does not accept other parameters than `self`. fn has_only_self_parameter(&self, method: &AssocItem) -> bool { - match method.kind { - ty::AssocKind::Fn => { - method.fn_has_self_parameter - && self.tcx.fn_sig(method.def_id).skip_binder().inputs().skip_binder().len() - == 1 - } - _ => false, - } + method.is_method() + && self.tcx.fn_sig(method.def_id).skip_binder().inputs().skip_binder().len() == 1 } /// If the given `HirId` corresponds to a block with a trailing expression, return that expression diff --git a/compiler/rustc_hir_typeck/src/errors.rs b/compiler/rustc_hir_typeck/src/errors.rs index b73cd26927a5c..732795535087e 100644 --- a/compiler/rustc_hir_typeck/src/errors.rs +++ b/compiler/rustc_hir_typeck/src/errors.rs @@ -5,7 +5,7 @@ use std::borrow::Cow; use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagArgValue, DiagSymbolList, EmissionGuarantee, IntoDiagArg, MultiSpan, - SubdiagMessageOp, Subdiagnostic, + Subdiagnostic, }; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_middle::ty::{self, Ty}; @@ -270,11 +270,7 @@ pub(crate) struct SuggestAnnotations { pub suggestions: Vec, } impl Subdiagnostic for SuggestAnnotations { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { if self.suggestions.is_empty() { return; } @@ -337,11 +333,7 @@ pub(crate) struct TypeMismatchFruTypo { } impl Subdiagnostic for TypeMismatchFruTypo { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.arg("expr", self.expr.as_deref().unwrap_or("NONE")); // Only explain that `a ..b` is a range if it's split up @@ -599,11 +591,7 @@ pub(crate) struct RemoveSemiForCoerce { } impl Subdiagnostic for RemoveSemiForCoerce { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { let mut multispan: MultiSpan = self.semi.into(); multispan.push_span_label(self.expr, fluent::hir_typeck_remove_semi_for_coerce_expr); multispan.push_span_label(self.ret, fluent::hir_typeck_remove_semi_for_coerce_ret); @@ -727,7 +715,7 @@ pub(crate) struct NoAssociatedItem { #[primary_span] pub span: Span, pub item_kind: &'static str, - pub item_name: Ident, + pub item_ident: Ident, pub ty_prefix: Cow<'static, str>, pub ty_str: String, pub trait_missing_method: bool, @@ -778,20 +766,16 @@ pub(crate) enum CastUnknownPointerSub { } impl rustc_errors::Subdiagnostic for CastUnknownPointerSub { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { match self { CastUnknownPointerSub::To(span) => { - let msg = f(diag, crate::fluent_generated::hir_typeck_label_to); + let msg = diag.eagerly_translate(fluent::hir_typeck_label_to); diag.span_label(span, msg); - let msg = f(diag, crate::fluent_generated::hir_typeck_note); + let msg = diag.eagerly_translate(fluent::hir_typeck_note); diag.note(msg); } CastUnknownPointerSub::From(span) => { - let msg = f(diag, crate::fluent_generated::hir_typeck_label_from); + let msg = diag.eagerly_translate(fluent::hir_typeck_label_from); diag.span_label(span, msg); } } @@ -979,3 +963,11 @@ pub(crate) enum SupertraitItemShadowee { traits: DiagSymbolList, }, } + +#[derive(Diagnostic)] +#[diag(hir_typeck_register_type_unstable)] +pub(crate) struct RegisterTypeUnstable<'a> { + #[primary_span] + pub span: Span, + pub ty: Ty<'a>, +} diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index a75f6f4caac91..2c28ffd1fe3df 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -16,7 +16,6 @@ use rustc_errors::{ }; use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def_id::DefId; -use rustc_hir::intravisit::Visitor; use rustc_hir::lang_items::LangItem; use rustc_hir::{ExprKind, HirId, QPath}; use rustc_hir_analysis::NoVariantNamed; @@ -40,7 +39,6 @@ use tracing::{debug, instrument, trace}; use {rustc_ast as ast, rustc_hir as hir}; use crate::Expectation::{self, ExpectCastableToType, ExpectHasType, NoExpectation}; -use crate::TupleArgumentsFlag::DontTupleArguments; use crate::coercion::{CoerceMany, DynamicCoerceMany}; use crate::errors::{ AddressOfTemporaryTaken, BaseExpressionDoubleDot, BaseExpressionDoubleDotAddExpr, @@ -51,8 +49,8 @@ use crate::errors::{ YieldExprOutsideOfCoroutine, }; use crate::{ - BreakableCtxt, CoroutineTypes, Diverges, FnCtxt, Needs, cast, fatally_break_rust, - report_unexpected_variant_res, type_error_struct, + BreakableCtxt, CoroutineTypes, Diverges, FnCtxt, GatherLocalsVisitor, Needs, + TupleArgumentsFlag, cast, fatally_break_rust, report_unexpected_variant_res, type_error_struct, }; impl<'a, 'tcx> FnCtxt<'a, 'tcx> { @@ -482,7 +480,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // All of these constitute a read, or match on something that isn't `!`, // which would require a `NeverToAny` coercion. - hir::PatKind::Binding(_, _, _, _) + hir::PatKind::Missing + | hir::PatKind::Binding(_, _, _, _) | hir::PatKind::Struct(_, _, _) | hir::PatKind::TupleStruct(_, _, _) | hir::PatKind::Tuple(_, _) @@ -512,7 +511,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.check_expr_assign(expr, expected, lhs, rhs, span) } ExprKind::AssignOp(op, lhs, rhs) => { - self.check_expr_binop_assign(expr, op, lhs, rhs, expected) + self.check_expr_assign_op(expr, op, lhs, rhs, expected) } ExprKind::Unary(unop, oprnd) => self.check_expr_unop(unop, oprnd, expected, expr), ExprKind::AddrOf(kind, mutbl, oprnd) => { @@ -1518,11 +1517,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let_expr: &'tcx hir::LetExpr<'tcx>, hir_id: HirId, ) -> Ty<'tcx> { + GatherLocalsVisitor::gather_from_let_expr(self, let_expr, hir_id); + // for let statements, this is done in check_stmt let init = let_expr.init; self.warn_if_unreachable(init.hir_id, init.span, "block in `let` expression"); + // otherwise check exactly as a let statement self.check_decl((let_expr, hir_id).into()); + // but return a bool, for this is a boolean expression if let ast::Recovered::Yes(error_guaranteed) = let_expr.recovered { self.set_tainted_by_errors(error_guaranteed); @@ -1590,32 +1593,45 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // no need to check for bot/err -- callee does that let rcvr_t = self.structurally_resolve_type(rcvr.span, rcvr_t); - let method = match self.lookup_method(rcvr_t, segment, segment.ident.span, expr, rcvr, args) - { + match self.lookup_method(rcvr_t, segment, segment.ident.span, expr, rcvr, args) { Ok(method) => { - // We could add a "consider `foo::`" suggestion here, but I wasn't able to - // trigger this codepath causing `structurally_resolve_type` to emit an error. self.write_method_call_and_enforce_effects(expr.hir_id, expr.span, method); - Ok(method) + + self.check_argument_types( + segment.ident.span, + expr, + &method.sig.inputs()[1..], + method.sig.output(), + expected, + args, + method.sig.c_variadic, + TupleArgumentsFlag::DontTupleArguments, + Some(method.def_id), + ); + + method.sig.output() } Err(error) => { - if segment.ident.name == kw::Empty { - span_bug!(rcvr.span, "empty method name") - } else { - Err(self.report_method_error(expr.hir_id, rcvr_t, error, expected, false)) - } - } - }; + let guar = self.report_method_error(expr.hir_id, rcvr_t, error, expected, false); - // Call the generic checker. - self.check_method_argument_types( - segment.ident.span, - expr, - method, - args, - DontTupleArguments, - expected, - ) + let err_inputs = self.err_args(args.len(), guar); + let err_output = Ty::new_error(self.tcx, guar); + + self.check_argument_types( + segment.ident.span, + expr, + &err_inputs, + err_output, + NoExpectation, + args, + false, + TupleArgumentsFlag::DontTupleArguments, + None, + ); + + err_output + } + } } /// Checks use `x.use`. @@ -1814,7 +1830,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Create a new function context. let def_id = block.def_id; let fcx = FnCtxt::new(self, self.param_env, def_id); - crate::GatherLocalsVisitor::new(&fcx).visit_body(body); let ty = fcx.check_expr_with_expectation(body.value, expected); fcx.require_type_is_sized(ty, body.value.span, ObligationCauseCode::SizedConstOrStatic); @@ -2204,8 +2219,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let fields = listify(&missing_mandatory_fields, |f| format!("`{f}`")).unwrap(); self.dcx() .struct_span_err( - span.shrink_to_hi(), - format!("missing mandatory field{s} {fields}"), + span.shrink_to_lo(), + format!("missing field{s} {fields} in initializer"), + ) + .with_span_label( + span.shrink_to_lo(), + "fields that do not have a defaulted value must be provided explicitly", ) .emit(); return; @@ -2583,9 +2602,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .into_iter() .flat_map(|i| self.tcx.associated_items(i).in_definition_order()) // Only assoc fn with no receivers. - .filter(|item| { - matches!(item.kind, ty::AssocKind::Fn) && !item.fn_has_self_parameter - }) + .filter(|item| item.is_fn() && !item.is_method()) .filter_map(|item| { // Only assoc fns that return `Self` let fn_sig = self.tcx.fn_sig(item.def_id).skip_binder(); @@ -2598,8 +2615,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return None; } let input_len = fn_sig.inputs().skip_binder().len(); - let order = !item.name.as_str().starts_with("new"); - Some((order, item.name, input_len)) + let name = item.name(); + let order = !name.as_str().starts_with("new"); + Some((order, name, input_len)) }) .collect::>(); items.sort_by_key(|(order, _, _)| *order); @@ -2915,8 +2933,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } // We failed to check the expression, report an error. - // Emits an error if we deref an infer variable, like calling `.field` on a base type of &_. - self.structurally_resolve_type(autoderef.span(), autoderef.final_ty(false)); + // Emits an error if we deref an infer variable, like calling `.field` on a base type + // of `&_`. We can also use this to suppress unnecessary "missing field" errors that + // will follow ambiguity errors. + let final_ty = self.structurally_resolve_type(autoderef.span(), autoderef.final_ty(false)); + if let ty::Error(_) = final_ty.kind() { + return final_ty; + } if let Some((adjustments, did)) = private_candidate { // (#90483) apply adjustments to avoid ExprUseVisitor from @@ -2932,9 +2955,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return Ty::new_error(self.tcx(), guar); } - let guar = if field.name == kw::Empty { - self.dcx().span_bug(field.span, "field name with no name") - } else if self.method_exists_for_diagnostic( + let guar = if self.method_exists_for_diagnostic( field, base_ty, expr.hir_id, @@ -3239,7 +3260,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(x) => self.tcx.local_def_id_to_hir_id(x), None => return, }; - let param_span = self.tcx.hir().span(param_hir_id); + let param_span = self.tcx.hir_span(param_hir_id); let param_name = self.tcx.hir_ty_param_name(param_def_id.expect_local()); err.span_label(param_span, format!("type parameter '{param_name}' declared here")); @@ -3286,8 +3307,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.multipart_suggestion( format!("{val} is a raw pointer; try dereferencing it"), vec![ - (base.span.shrink_to_lo(), "(*".to_string()), - (base.span.shrink_to_hi(), ")".to_string()), + (base.span.shrink_to_lo(), "(*".into()), + (base.span.between(field.span), format!(").")), ], Applicability::MaybeIncorrect, ); diff --git a/compiler/rustc_hir_typeck/src/expr_use_visitor.rs b/compiler/rustc_hir_typeck/src/expr_use_visitor.rs index 6fb289235de96..3493d359028d9 100644 --- a/compiler/rustc_hir_typeck/src/expr_use_visitor.rs +++ b/compiler/rustc_hir_typeck/src/expr_use_visitor.rs @@ -158,7 +158,7 @@ pub trait TypeInformationCtxt<'tcx> { fn resolve_vars_if_possible>>(&self, t: T) -> T; - fn try_structurally_resolve_type(&self, span: Span, ty: Ty<'tcx>) -> Ty<'tcx>; + fn structurally_resolve_type(&self, span: Span, ty: Ty<'tcx>) -> Ty<'tcx>; fn report_bug(&self, span: Span, msg: impl ToString) -> Self::Error; @@ -191,8 +191,8 @@ impl<'tcx> TypeInformationCtxt<'tcx> for &FnCtxt<'_, 'tcx> { self.infcx.resolve_vars_if_possible(t) } - fn try_structurally_resolve_type(&self, sp: Span, ty: Ty<'tcx>) -> Ty<'tcx> { - (**self).try_structurally_resolve_type(sp, ty) + fn structurally_resolve_type(&self, sp: Span, ty: Ty<'tcx>) -> Ty<'tcx> { + (**self).structurally_resolve_type(sp, ty) } fn report_bug(&self, span: Span, msg: impl ToString) -> Self::Error { @@ -236,7 +236,7 @@ impl<'tcx> TypeInformationCtxt<'tcx> for (&LateContext<'tcx>, LocalDefId) { self.0.maybe_typeck_results().expect("expected typeck results") } - fn try_structurally_resolve_type(&self, _span: Span, ty: Ty<'tcx>) -> Ty<'tcx> { + fn structurally_resolve_type(&self, _span: Span, ty: Ty<'tcx>) -> Ty<'tcx> { // FIXME: Maybe need to normalize here. ty } @@ -611,6 +611,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx for pat in pats { self.cat_pattern(discr_place.clone(), pat, &mut |place, pat| { match &pat.kind { + PatKind::Missing => unreachable!(), PatKind::Binding(.., opt_sub_pat) => { // If the opt_sub_pat is None, then the binding does not count as // a wildcard for the purpose of borrowing discr. @@ -775,7 +776,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx // Select just those fields of the `with` // expression that will actually be used - match self.cx.try_structurally_resolve_type(with_expr.span, with_place.place.ty()).kind() { + match self.cx.structurally_resolve_type(with_expr.span, with_place.place.ty()).kind() { ty::Adt(adt, args) if adt.is_struct() => { // Consume those fields of the with expression that are needed. for (f_index, with_field) in adt.non_enum_variant().fields.iter_enumerated() { @@ -999,11 +1000,15 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx // determines whether to borrow *at the level of the deref pattern* rather than // borrowing the bound place (since that inner place is inside the temporary that // stores the result of calling `deref()`/`deref_mut()` so can't be captured). - let mutable = self.cx.typeck_results().pat_has_ref_mut_binding(subpattern); - let mutability = - if mutable { hir::Mutability::Mut } else { hir::Mutability::Not }; - let bk = ty::BorrowKind::from_mutbl(mutability); - self.delegate.borrow_mut().borrow(place, discr_place.hir_id, bk); + // Deref patterns on boxes don't borrow, so we ignore them here. + // HACK: this could be a fake pattern corresponding to a deref inserted by match + // ergonomics, in which case `pat.hir_id` will be the id of the subpattern. + if let hir::ByRef::Yes(mutability) = + self.cx.typeck_results().deref_pat_borrow_mode(place.place.ty(), subpattern) + { + let bk = ty::BorrowKind::from_mutbl(mutability); + self.delegate.borrow_mut().borrow(place, discr_place.hir_id, bk); + } } PatKind::Never => { // A `!` pattern always counts as an immutable read of the discriminant, @@ -1171,7 +1176,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx /// two operations: a dereference to reach the array data and then an index to /// jump forward to the relevant item. impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx, Cx, D> { - fn resolve_type_vars_or_bug( + fn expect_and_resolve_type( &self, id: HirId, ty: Option>, @@ -1180,14 +1185,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx Some(ty) => { let ty = self.cx.resolve_vars_if_possible(ty); self.cx.error_reported_in_ty(ty)?; - if ty.is_ty_var() { - debug!("resolve_type_vars_or_bug: infer var from {:?}", ty); - Err(self - .cx - .report_bug(self.cx.tcx().hir().span(id), "encountered type variable")) - } else { - Ok(ty) - } + Ok(ty) } None => { // FIXME: We shouldn't be relying on the infcx being tainted. @@ -1198,15 +1196,15 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx } fn node_ty(&self, hir_id: HirId) -> Result, Cx::Error> { - self.resolve_type_vars_or_bug(hir_id, self.cx.typeck_results().node_type_opt(hir_id)) + self.expect_and_resolve_type(hir_id, self.cx.typeck_results().node_type_opt(hir_id)) } fn expr_ty(&self, expr: &hir::Expr<'_>) -> Result, Cx::Error> { - self.resolve_type_vars_or_bug(expr.hir_id, self.cx.typeck_results().expr_ty_opt(expr)) + self.expect_and_resolve_type(expr.hir_id, self.cx.typeck_results().expr_ty_opt(expr)) } fn expr_ty_adjusted(&self, expr: &hir::Expr<'_>) -> Result, Cx::Error> { - self.resolve_type_vars_or_bug( + self.expect_and_resolve_type( expr.hir_id, self.cx.typeck_results().expr_ty_adjusted_opt(expr), ) @@ -1228,9 +1226,9 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx // actually this is somewhat "disjoint" from the code below // that aims to account for `ref x`. if let Some(vec) = self.cx.typeck_results().pat_adjustments().get(pat.hir_id) { - if let Some(first_ty) = vec.first() { - debug!("pat_ty(pat={:?}) found adjusted ty `{:?}`", pat, first_ty); - return Ok(*first_ty); + if let Some(first_adjust) = vec.first() { + debug!("pat_ty(pat={:?}) found adjustment `{:?}`", pat, first_adjust); + return Ok(first_adjust.source); } } else if let PatKind::Ref(subpat, _) = pat.kind && self.cx.typeck_results().skipped_ref_pats().contains(pat.hir_id) @@ -1261,10 +1259,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx // a bind-by-ref means that the base_ty will be the type of the ident itself, // but what we want here is the type of the underlying value being borrowed. // So peel off one-level, turning the &T into T. - match self - .cx - .try_structurally_resolve_type(pat.span, base_ty) - .builtin_deref(false) + match self.cx.structurally_resolve_type(pat.span, base_ty).builtin_deref(false) { Some(ty) => Ok(ty), None => { @@ -1503,19 +1498,18 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx let mut projections = base_place.place.projections; let node_ty = self.cx.typeck_results().node_type(node); - // Opaque types can't have field projections, but we can instead convert - // the current place in-place (heh) to the hidden type, and then apply all - // follow up projections on that. - if node_ty != place_ty - && self - .cx - .try_structurally_resolve_type( - self.cx.tcx().hir().span(base_place.hir_id), - place_ty, - ) - .is_impl_trait() - { - projections.push(Projection { kind: ProjectionKind::OpaqueCast, ty: node_ty }); + if !self.cx.tcx().next_trait_solver_globally() { + // Opaque types can't have field projections, but we can instead convert + // the current place in-place (heh) to the hidden type, and then apply all + // follow up projections on that. + if node_ty != place_ty + && self + .cx + .structurally_resolve_type(self.cx.tcx().hir_span(base_place.hir_id), place_ty) + .is_impl_trait() + { + projections.push(Projection { kind: ProjectionKind::OpaqueCast, ty: node_ty }); + } } projections.push(Projection { kind, ty }); PlaceWithHirId::new(node, base_place.place.base_ty, base_place.place.base, projections) @@ -1533,7 +1527,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx let base_ty = self.expr_ty_adjusted(base)?; let ty::Ref(region, _, mutbl) = - *self.cx.try_structurally_resolve_type(base.span, base_ty).kind() + *self.cx.structurally_resolve_type(base.span, base_ty).kind() else { span_bug!(expr.span, "cat_overloaded_place: base is not a reference"); }; @@ -1551,17 +1545,14 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx let base_curr_ty = base_place.place.ty(); let deref_ty = match self .cx - .try_structurally_resolve_type( - self.cx.tcx().hir().span(base_place.hir_id), - base_curr_ty, - ) + .structurally_resolve_type(self.cx.tcx().hir_span(base_place.hir_id), base_curr_ty) .builtin_deref(true) { Some(ty) => ty, None => { debug!("explicit deref of non-derefable type: {:?}", base_curr_ty); return Err(self.cx.report_bug( - self.cx.tcx().hir().span(node), + self.cx.tcx().hir_span(node), "explicit deref of non-derefable type", )); } @@ -1582,7 +1573,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx ) -> Result { let res = self.cx.typeck_results().qpath_res(qpath, pat_hir_id); let ty = self.cx.typeck_results().node_type(pat_hir_id); - let ty::Adt(adt_def, _) = self.cx.try_structurally_resolve_type(span, ty).kind() else { + let ty::Adt(adt_def, _) = self.cx.structurally_resolve_type(span, ty).kind() else { return Err(self .cx .report_bug(span, "struct or tuple struct pattern not applied to an ADT")); @@ -1614,7 +1605,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx span: Span, ) -> Result { let ty = self.cx.typeck_results().node_type(pat_hir_id); - match self.cx.try_structurally_resolve_type(span, ty).kind() { + match self.cx.structurally_resolve_type(span, ty).kind() { ty::Adt(adt_def, _) => Ok(adt_def.variant(variant_index).fields.len()), _ => { self.cx @@ -1629,7 +1620,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx /// Here `pat_hir_id` is the HirId of the pattern itself. fn total_fields_in_tuple(&self, pat_hir_id: HirId, span: Span) -> Result { let ty = self.cx.typeck_results().node_type(pat_hir_id); - match self.cx.try_structurally_resolve_type(span, ty).kind() { + match self.cx.structurally_resolve_type(span, ty).kind() { ty::Tuple(args) => Ok(args.len()), _ => Err(self.cx.report_bug(span, "tuple pattern not applied to a tuple")), } @@ -1682,12 +1673,32 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx // Then we see that to get the same result, we must start with // `deref { deref { place_foo }}` instead of `place_foo` since the pattern is now `Some(x,)` // and not `&&Some(x,)`, even though its assigned type is that of `&&Some(x,)`. - for _ in - 0..self.cx.typeck_results().pat_adjustments().get(pat.hir_id).map_or(0, |v| v.len()) - { + let typeck_results = self.cx.typeck_results(); + let adjustments: &[adjustment::PatAdjustment<'tcx>] = + typeck_results.pat_adjustments().get(pat.hir_id).map_or(&[], |v| &**v); + let mut adjusts = adjustments.iter().peekable(); + while let Some(adjust) = adjusts.next() { debug!("applying adjustment to place_with_id={:?}", place_with_id); - place_with_id = self.cat_deref(pat.hir_id, place_with_id)?; + place_with_id = match adjust.kind { + adjustment::PatAdjust::BuiltinDeref => self.cat_deref(pat.hir_id, place_with_id)?, + adjustment::PatAdjust::OverloadedDeref => { + // This adjustment corresponds to an overloaded deref; unless it's on a box, it + // borrows the scrutinee to call `Deref::deref` or `DerefMut::deref_mut`. Invoke + // the callback before setting `place_with_id` to the temporary storing the + // result of the deref. + // HACK(dianne): giving the callback a fake deref pattern makes sure it behaves the + // same as it would if this were an explicit deref pattern (including for boxes). + op(&place_with_id, &hir::Pat { kind: PatKind::Deref(pat), ..*pat })?; + let target_ty = match adjusts.peek() { + Some(&&next_adjust) => next_adjust.source, + // At the end of the deref chain, we get `pat`'s scrutinee. + None => self.pat_ty_unadjusted(pat)?, + }; + self.pat_deref_place(pat.hir_id, place_with_id, pat, target_ty)? + } + }; } + drop(typeck_results); // explicitly release borrow of typeck results, just in case. let place_with_id = place_with_id; // lose mutability debug!("applied adjustment derefs to get place_with_id={:?}", place_with_id); @@ -1790,21 +1801,15 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx self.cat_pattern(subplace, subpat, op)?; } PatKind::Deref(subpat) => { - let mutable = self.cx.typeck_results().pat_has_ref_mut_binding(subpat); - let mutability = if mutable { hir::Mutability::Mut } else { hir::Mutability::Not }; - let re_erased = self.cx.tcx().lifetimes.re_erased; let ty = self.pat_ty_adjusted(subpat)?; - let ty = Ty::new_ref(self.cx.tcx(), re_erased, ty, mutability); - // A deref pattern generates a temporary. - let base = self.cat_rvalue(pat.hir_id, ty); - let place = self.cat_deref(pat.hir_id, base)?; + let place = self.pat_deref_place(pat.hir_id, place_with_id, subpat, ty)?; self.cat_pattern(place, subpat, op)?; } PatKind::Slice(before, ref slice, after) => { let Some(element_ty) = self .cx - .try_structurally_resolve_type(pat.span, place_with_id.place.ty()) + .structurally_resolve_type(pat.span, place_with_id.place.ty()) .builtin_index() else { debug!("explicit index of non-indexable type {:?}", place_with_id); @@ -1840,6 +1845,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx | PatKind::Expr(..) | PatKind::Range(..) | PatKind::Never + | PatKind::Missing | PatKind::Wild | PatKind::Err(_) => { // always ok @@ -1849,8 +1855,31 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx Ok(()) } + /// Represents the place matched on by a deref pattern's interior. + fn pat_deref_place( + &self, + hir_id: HirId, + base_place: PlaceWithHirId<'tcx>, + inner: &hir::Pat<'_>, + target_ty: Ty<'tcx>, + ) -> Result, Cx::Error> { + match self.cx.typeck_results().deref_pat_borrow_mode(base_place.place.ty(), inner) { + // Deref patterns on boxes are lowered using a built-in deref. + hir::ByRef::No => self.cat_deref(hir_id, base_place), + // For other types, we create a temporary to match on. + hir::ByRef::Yes(mutability) => { + let re_erased = self.cx.tcx().lifetimes.re_erased; + let ty = Ty::new_ref(self.cx.tcx(), re_erased, target_ty, mutability); + // A deref pattern stores the result of `Deref::deref` or `DerefMut::deref_mut` ... + let base = self.cat_rvalue(hir_id, ty); + // ... and the inner pattern matches on the place behind that reference. + self.cat_deref(hir_id, base) + } + } + } + fn is_multivariant_adt(&self, ty: Ty<'tcx>, span: Span) -> bool { - if let ty::Adt(def, _) = self.cx.try_structurally_resolve_type(span, ty).kind() { + if let ty::Adt(def, _) = self.cx.structurally_resolve_type(span, ty).kind() { // Note that if a non-exhaustive SingleVariant is defined in another crate, we need // to assume that more cases will be added to the variant in the future. This mean // that we should handle non-exhaustive SingleVariant the same way we would handle diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs index d75c2853ba080..362c7d8efac08 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs @@ -22,14 +22,14 @@ use rustc_infer::infer::{DefineOpaqueTypes, InferResult}; use rustc_lint::builtin::SELF_CONSTRUCTOR_FROM_OUTER_ITEM; use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, AutoBorrowMutability}; use rustc_middle::ty::{ - self, AdtKind, CanonicalUserType, GenericArgKind, GenericArgsRef, GenericParamDefKind, - IsIdentity, Ty, TyCtxt, TypeFoldable, TypeVisitable, TypeVisitableExt, UserArgs, UserSelfTy, + self, AdtKind, CanonicalUserType, GenericArgsRef, GenericParamDefKind, IsIdentity, Ty, TyCtxt, + TypeFoldable, TypeVisitable, TypeVisitableExt, UserArgs, UserSelfTy, }; use rustc_middle::{bug, span_bug}; use rustc_session::lint; +use rustc_span::Span; use rustc_span::def_id::LocalDefId; use rustc_span::hygiene::DesugaringKind; -use rustc_span::{Span, kw}; use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded; use rustc_trait_selection::traits::{ self, NormalizeExt, ObligationCauseCode, StructurallyNormalizeExt, @@ -157,7 +157,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Lots of that diagnostics code relies on subtle effects of re-lowering, so we'll // let it keep doing that and just ensure that compilation won't succeed. self.dcx().span_delayed_bug( - self.tcx.hir().span(id), + self.tcx.hir_span(id), format!("`{prev}` overridden by `{ty}` for {id:?} in {:?}", self.body_id), ); } @@ -532,7 +532,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let ct = self.lowerer().lower_const_arg(const_arg, feed); self.register_wf_obligation( ct.into(), - self.tcx.hir().span(const_arg.hir_id), + self.tcx.hir_span(const_arg.hir_id), ObligationCauseCode::WellFormed(None), ); ct @@ -573,7 +573,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Registers an obligation for checking later, during regionck, that `arg` is well-formed. pub(crate) fn register_wf_obligation( &self, - arg: ty::GenericArg<'tcx>, + term: ty::Term<'tcx>, span: Span, code: traits::ObligationCauseCode<'tcx>, ) { @@ -583,16 +583,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx, cause, self.param_env, - ty::Binder::dummy(ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg))), + ty::ClauseKind::WellFormed(term), )); } /// Registers obligations that all `args` are well-formed. pub(crate) fn add_wf_bounds(&self, args: GenericArgsRef<'tcx>, span: Span) { - for arg in args.iter().filter(|arg| { - matches!(arg.unpack(), GenericArgKind::Type(..) | GenericArgKind::Const(..)) - }) { - self.register_wf_obligation(arg, span, ObligationCauseCode::WellFormed(None)); + for term in args.iter().filter_map(ty::GenericArg::as_term) { + self.register_wf_obligation(term, span, ObligationCauseCode::WellFormed(None)); } } @@ -635,34 +633,39 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let mut obligations = vec![]; - for &(coroutine_def_id, interior) in coroutines.iter() { - debug!(?coroutine_def_id); + if !self.next_trait_solver() { + for &(coroutine_def_id, interior) in coroutines.iter() { + debug!(?coroutine_def_id); - // Create the `CoroutineWitness` type that we will unify with `interior`. - let args = ty::GenericArgs::identity_for_item( - self.tcx, - self.tcx.typeck_root_def_id(coroutine_def_id.to_def_id()), - ); - let witness = Ty::new_coroutine_witness(self.tcx, coroutine_def_id.to_def_id(), args); - - // Unify `interior` with `witness` and collect all the resulting obligations. - let span = self.tcx.hir_body_owned_by(coroutine_def_id).value.span; - let ty::Infer(ty::InferTy::TyVar(_)) = interior.kind() else { - span_bug!(span, "coroutine interior witness not infer: {:?}", interior.kind()) - }; - let ok = self - .at(&self.misc(span), self.param_env) - // Will never define opaque types, as all we do is instantiate a type variable. - .eq(DefineOpaqueTypes::Yes, interior, witness) - .expect("Failed to unify coroutine interior type"); - - obligations.extend(ok.obligations); + // Create the `CoroutineWitness` type that we will unify with `interior`. + let args = ty::GenericArgs::identity_for_item( + self.tcx, + self.tcx.typeck_root_def_id(coroutine_def_id.to_def_id()), + ); + let witness = + Ty::new_coroutine_witness(self.tcx, coroutine_def_id.to_def_id(), args); + + // Unify `interior` with `witness` and collect all the resulting obligations. + let span = self.tcx.hir_body_owned_by(coroutine_def_id).value.span; + let ty::Infer(ty::InferTy::TyVar(_)) = interior.kind() else { + span_bug!(span, "coroutine interior witness not infer: {:?}", interior.kind()) + }; + let ok = self + .at(&self.misc(span), self.param_env) + // Will never define opaque types, as all we do is instantiate a type variable. + .eq(DefineOpaqueTypes::Yes, interior, witness) + .expect("Failed to unify coroutine interior type"); + + obligations.extend(ok.obligations); + } } - // FIXME: Use a real visitor for unstalled obligations in the new solver. if !coroutines.is_empty() { - obligations - .extend(self.fulfillment_cx.borrow_mut().drain_unstalled_obligations(&self.infcx)); + obligations.extend( + self.fulfillment_cx + .borrow_mut() + .drain_stalled_obligations_for_coroutines(&self.infcx), + ); } self.typeck_results @@ -833,7 +836,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let trait_missing_method = matches!(error, method::MethodError::NoMatch(_)) && ty.normalized.is_trait(); - assert_ne!(item_name.name, kw::Empty); self.report_method_error( hir_id, ty.normalized, @@ -1316,27 +1318,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { infer_args: bool, ) -> ty::GenericArg<'tcx> { let tcx = self.fcx.tcx(); - match param.kind { - GenericParamDefKind::Lifetime => self - .fcx - .re_infer( - self.span, - rustc_hir_analysis::hir_ty_lowering::RegionInferReason::Param(param), - ) - .into(), - GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => { - if !infer_args && let Some(default) = param.default_value(tcx) { - // If we have a default, then it doesn't matter that we're not inferring - // the type/const arguments: We provide the default where any is missing. - return default.instantiate(tcx, preceding_args); - } - // If no type/const arguments were provided, we have to infer them. - // This case also occurs as a result of some malformed input, e.g., - // a lifetime argument being given instead of a type/const parameter. - // Using inference instead of `Error` gives better error messages. - self.fcx.var_for_def(self.span, param) - } + if !infer_args && let Some(default) = param.default_value(tcx) { + // If we have a default, then it doesn't matter that we're not inferring + // the type/const arguments: We provide the default where any is missing. + return default.instantiate(tcx, preceding_args); } + // If no type/const arguments were provided, we have to infer them. + // This case also occurs as a result of some malformed input, e.g., + // a lifetime argument being given instead of a type/const parameter. + // Using inference instead of `Error` gives better error messages. + self.fcx.var_for_def(self.span, param) } } diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index f4bd7ec701f8a..6cc7e82bbf735 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -8,7 +8,6 @@ use rustc_hir::def::{CtorOf, DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::Visitor; use rustc_hir::{ExprKind, HirId, Node, QPath}; -use rustc_hir_analysis::check::intrinsicck::InlineAsmCtxt; use rustc_hir_analysis::check::potentially_plural_count; use rustc_hir_analysis::hir_ty_lowering::HirTyLowerer; use rustc_index::IndexVec; @@ -33,13 +32,13 @@ use crate::errors::SuggestPtrNullMut; use crate::fn_ctxt::arg_matrix::{ArgMatrix, Compatibility, Error, ExpectedIdx, ProvidedIdx}; use crate::fn_ctxt::infer::FnCall; use crate::gather_locals::Declaration; -use crate::method::MethodCallee; +use crate::inline_asm::InlineAsmCtxt; use crate::method::probe::IsSuggestion; use crate::method::probe::Mode::MethodCall; use crate::method::probe::ProbeScope::TraitsInScope; use crate::{ - BreakableCtxt, Diverges, Expectation, FnCtxt, LoweredTy, Needs, TupleArgumentsFlag, errors, - struct_span_code_err, + BreakableCtxt, Diverges, Expectation, FnCtxt, GatherLocalsVisitor, LoweredTy, Needs, + TupleArgumentsFlag, errors, struct_span_code_err, }; rustc_index::newtype_index! { @@ -98,13 +97,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { debug!("FnCtxt::check_asm: {} deferred checks", deferred_asm_checks.len()); for (asm, hir_id) in deferred_asm_checks.drain(..) { let enclosing_id = self.tcx.hir_enclosing_body_owner(hir_id); - InlineAsmCtxt::new( - enclosing_id, - &self.infcx, - self.typing_env(self.param_env), - &*self.typeck_results.borrow(), - ) - .check_asm(asm); + InlineAsmCtxt::new(self, enclosing_id).check_asm(asm); } } @@ -133,61 +126,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - pub(in super::super) fn check_method_argument_types( - &self, - sp: Span, - expr: &'tcx hir::Expr<'tcx>, - method: Result, ErrorGuaranteed>, - args_no_rcvr: &'tcx [hir::Expr<'tcx>], - tuple_arguments: TupleArgumentsFlag, - expected: Expectation<'tcx>, - ) -> Ty<'tcx> { - let has_error = match method { - Ok(method) => method.args.error_reported().and(method.sig.error_reported()), - Err(guar) => Err(guar), - }; - if let Err(guar) = has_error { - let err_inputs = self.err_args( - method.map_or(args_no_rcvr.len(), |method| method.sig.inputs().len() - 1), - guar, - ); - let err_output = Ty::new_error(self.tcx, guar); - - let err_inputs = match tuple_arguments { - DontTupleArguments => err_inputs, - TupleArguments => vec![Ty::new_tup(self.tcx, &err_inputs)], - }; - - self.check_argument_types( - sp, - expr, - &err_inputs, - err_output, - NoExpectation, - args_no_rcvr, - false, - tuple_arguments, - method.ok().map(|method| method.def_id), - ); - return err_output; - } - - let method = method.unwrap(); - self.check_argument_types( - sp, - expr, - &method.sig.inputs()[1..], - method.sig.output(), - expected, - args_no_rcvr, - method.sig.c_variadic, - tuple_arguments, - Some(method.def_id), - ); - - method.sig.output() - } - /// Generic function that factors out common logic from function calls, /// method calls and overloaded operators. pub(in super::super) fn check_argument_types( @@ -616,7 +554,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let Some((DefKind::AssocFn, def_id)) = self.typeck_results.borrow().type_dependent_def(call_expr.hir_id) && let Some(assoc) = tcx.opt_associated_item(def_id) - && assoc.fn_has_self_parameter + && assoc.is_method() { Some(*receiver) } else { @@ -642,8 +580,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { TraitsInScope, |mut ctxt| ctxt.probe_for_similar_candidate(), ) - && let ty::AssocKind::Fn = assoc.kind - && assoc.fn_has_self_parameter + && assoc.is_method() { let args = self.infcx.fresh_args_for_item(call_name.span, assoc.def_id); let fn_sig = tcx.fn_sig(assoc.def_id).instantiate(tcx, args); @@ -684,10 +621,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .all(|(expected, found)| self.may_coerce(*expected, *found)) && fn_sig.inputs()[1..].len() == input_types.len() { + let assoc_name = assoc.name(); err.span_suggestion_verbose( call_name.span, - format!("you might have meant to use `{}`", assoc.name), - assoc.name, + format!("you might have meant to use `{}`", assoc_name), + assoc_name, Applicability::MaybeIncorrect, ); return; @@ -707,7 +645,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { tcx.def_span(assoc.def_id), format!( "there's is a method with similar name `{}`, but the arguments don't match", - assoc.name, + assoc.name(), ), ); return; @@ -719,7 +657,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { format!( "there's is a method with similar name `{}`, but their argument count \ doesn't match", - assoc.name, + assoc.name(), ), ); return; @@ -925,7 +863,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let detect_dotdot = |err: &mut Diag<'_>, ty: Ty<'_>, expr: &hir::Expr<'_>| { if let ty::Adt(adt, _) = ty.kind() - && self.tcx().lang_items().get(hir::LangItem::RangeFull) == Some(adt.did()) + && self.tcx().is_lang_item(adt.did(), hir::LangItem::RangeFull) && let hir::ExprKind::Struct( hir::QPath::LangItem(hir::LangItem::RangeFull, _), [], @@ -1136,7 +1074,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { && let self_implicit = matches!(call_expr.kind, hir::ExprKind::MethodCall(..)) as usize && let Some(Some(arg)) = - self.tcx.fn_arg_names(fn_def_id).get(expected_idx.as_usize() + self_implicit) + self.tcx.fn_arg_idents(fn_def_id).get(expected_idx.as_usize() + self_implicit) && arg.name != kw::SelfLower { format!("/* {} */", arg.name) @@ -1827,6 +1765,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Type check a `let` statement. fn check_decl_local(&self, local: &'tcx hir::LetStmt<'tcx>) { + GatherLocalsVisitor::gather_from_local(self, local); + let ty = self.check_decl(local.into()); self.write_ty(local.hir_id, ty); if local.pat.is_never_pattern() { @@ -2619,7 +2559,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { is_method: bool, ) -> Option<(IndexVec, FnParam<'_>)>, &hir::Generics<'_>)> { - let (sig, generics, body_id, param_names) = match self.tcx.hir_get_if_local(def_id)? { + let (sig, generics, body_id, params) = match self.tcx.hir_get_if_local(def_id)? { hir::Node::TraitItem(&hir::TraitItem { generics, kind: hir::TraitItemKind::Fn(sig, trait_fn), @@ -2661,7 +2601,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { None } }); - match (body_id, param_names) { + match (body_id, params) { (Some(_), Some(_)) | (None, None) => unreachable!(), (Some(body), None) => { let params = self.tcx.hir_body(body).params; @@ -2678,7 +2618,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { params.get(is_method as usize..params.len() - sig.decl.c_variadic as usize)?; debug_assert_eq!(params.len(), fn_inputs.len()); Some(( - fn_inputs.zip(params.iter().map(|&ident| FnParam::Name(ident))).collect(), + fn_inputs.zip(params.iter().map(|&ident| FnParam::Ident(ident))).collect(), generics, )) } @@ -2709,14 +2649,14 @@ impl<'tcx> Visitor<'tcx> for FindClosureArg<'tcx> { #[derive(Clone, Copy)] enum FnParam<'hir> { Param(&'hir hir::Param<'hir>), - Name(Option), + Ident(Option), } impl FnParam<'_> { fn span(&self) -> Span { match self { Self::Param(param) => param.span, - Self::Name(ident) => { + Self::Ident(ident) => { if let Some(ident) = ident { ident.span } else { @@ -2738,7 +2678,7 @@ impl FnParam<'_> { { Some(ident.name) } - FnParam::Name(ident) + FnParam::Ident(ident) if let Some(ident) = ident && ident.name != kw::Underscore => { diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs index e14f1528d2c4c..de189b301092c 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs @@ -314,7 +314,7 @@ impl<'tcx> HirTyLowerer<'tcx> for FnCtxt<'_, 'tcx> { item_def_id: DefId, item_segment: &rustc_hir::PathSegment<'tcx>, poly_trait_ref: ty::PolyTraitRef<'tcx>, - _kind: ty::AssocKind, + _assoc_tag: ty::AssocTag, ) -> Result<(DefId, ty::GenericArgsRef<'tcx>), ErrorGuaranteed> { let trait_ref = self.instantiate_binder_with_fresh_vars( span, @@ -337,7 +337,7 @@ impl<'tcx> HirTyLowerer<'tcx> for FnCtxt<'_, 'tcx> { match ty.kind() { ty::Adt(adt_def, _) => Some(*adt_def), // FIXME(#104767): Should we handle bound regions here? - ty::Alias(ty::Projection | ty::Inherent | ty::Weak, _) + ty::Alias(ty::Projection | ty::Inherent | ty::Free, _) if !ty.has_escaping_bound_vars() => { if self.next_trait_solver() { @@ -357,7 +357,7 @@ impl<'tcx> HirTyLowerer<'tcx> for FnCtxt<'_, 'tcx> { // WF obligations that are registered elsewhere, but they have a // better cause code assigned to them in `add_required_obligations_for_hir`. // This means that they should shadow obligations with worse spans. - if let ty::Alias(ty::Projection | ty::Weak, ty::AliasTy { args, def_id, .. }) = + if let ty::Alias(ty::Projection | ty::Free, ty::AliasTy { args, def_id, .. }) = ty.kind() { self.add_required_obligations_for_hir(span, *def_id, args, hir_id); @@ -488,7 +488,7 @@ fn parse_never_type_options_attr( item.span(), format!( "unknown or duplicate never type option: `{}` (supported: `fallback`, `diverging_block_default`)", - item.name_or_empty() + item.name().unwrap() ), ); } diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index 264719ca56916..91eb1989864ff 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -351,7 +351,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if self.suggest_fn_call(err, expr, found, |output| self.may_coerce(output, expected)) && let ty::FnDef(def_id, ..) = *found.kind() - && let Some(sp) = self.tcx.hir().span_if_local(def_id) + && let Some(sp) = self.tcx.hir_span_if_local(def_id) { let name = self.tcx.item_name(def_id); let kind = self.tcx.def_kind(def_id); @@ -381,9 +381,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let mut suggestions = methods .iter() .filter_map(|conversion_method| { + let conversion_method_name = conversion_method.name(); let receiver_method_ident = expr.method_ident(); if let Some(method_ident) = receiver_method_ident - && method_ident.name == conversion_method.name + && method_ident.name == conversion_method_name { return None; // do not suggest code that is already there (#53348) } @@ -391,23 +392,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let method_call_list = [sym::to_vec, sym::to_string]; let mut sugg = if let ExprKind::MethodCall(receiver_method, ..) = expr.kind && receiver_method.ident.name == sym::clone - && method_call_list.contains(&conversion_method.name) + && method_call_list.contains(&conversion_method_name) // If receiver is `.clone()` and found type has one of those methods, // we guess that the user wants to convert from a slice type (`&[]` or `&str`) // to an owned type (`Vec` or `String`). These conversions clone internally, // so we remove the user's `clone` call. { - vec![(receiver_method.ident.span, conversion_method.name.to_string())] + vec![(receiver_method.ident.span, conversion_method_name.to_string())] } else if expr.precedence() < ExprPrecedence::Unambiguous { vec![ (expr.span.shrink_to_lo(), "(".to_string()), - (expr.span.shrink_to_hi(), format!(").{}()", conversion_method.name)), + (expr.span.shrink_to_hi(), format!(").{}()", conversion_method_name)), ] } else { - vec![(expr.span.shrink_to_hi(), format!(".{}()", conversion_method.name))] + vec![(expr.span.shrink_to_hi(), format!(".{}()", conversion_method_name))] }; let struct_pat_shorthand_field = - self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr); + self.tcx.hir_maybe_get_struct_pattern_shorthand_field(expr); if let Some(name) = struct_pat_shorthand_field { sugg.insert(0, (expr.span.shrink_to_lo(), format!("{name}: "))); } @@ -449,7 +450,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }); let prefix_wrap = |sugg: &str| { - if let Some(name) = self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + if let Some(name) = self.tcx.hir_maybe_get_struct_pattern_shorthand_field(expr) { format!(": {}{}", name, sugg) } else { sugg.to_string() @@ -671,7 +672,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } _ => { let prefix = if let Some(name) = - self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) + self.tcx.hir_maybe_get_struct_pattern_shorthand_field(expr) { format!("{}: ", name) } else { @@ -1153,7 +1154,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; if can_return && let Some(span) = expr.span.find_ancestor_inside( - self.tcx.hir().span_with_body(self.tcx.local_def_id_to_hir_id(fn_id)), + self.tcx.hir_span_with_body(self.tcx.local_def_id_to_hir_id(fn_id)), ) { // When the expr is in a match arm's body, we shouldn't add semicolon ';' at the end. @@ -1275,7 +1276,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ) .must_apply_modulo_regions() { - let suggestion = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + let suggestion = match self.tcx.hir_maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!(": {ident}.clone()"), None => ".clone()".to_string(), }; @@ -1381,7 +1382,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { (span.shrink_to_hi(), ").into()".to_owned()), ] }; - if let Some(name) = self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + if let Some(name) = self.tcx.hir_maybe_get_struct_pattern_shorthand_field(expr) { sugg.insert(0, (expr.span.shrink_to_lo(), format!("{}: ", name))); } diag.multipart_suggestion( @@ -1436,7 +1437,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return false; } - let suggestion = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + let suggestion = match self.tcx.hir_maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!(": {ident}.is_some()"), None => ".is_some()".to_string(), }; @@ -2032,7 +2033,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ) }; - let sugg = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + let sugg = match self.tcx.hir_maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!(": {ident}{sugg}"), None => sugg.to_string(), }; @@ -2289,7 +2290,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Unroll desugaring, to make sure this works for `for` loops etc. loop { parent = self.tcx.parent_hir_id(id); - let parent_span = self.tcx.hir().span(parent); + let parent_span = self.tcx.hir_span(parent); if parent_span.find_ancestor_inside(expr.span).is_some() { // The parent node is part of the same span, so is the result of the // same expansion/desugaring and not the 'real' parent node. @@ -2378,7 +2379,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .collect(); let suggestions_for = |variant: &_, ctor_kind, field_name| { - let prefix = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + let prefix = match self.tcx.hir_maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!("{ident}: "), None => String::new(), }; @@ -2700,8 +2701,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { )); } - let prefix = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) - { + let prefix = match self.tcx.hir_maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!("{ident}: "), None => String::new(), }; @@ -2911,7 +2911,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; let prefix = - match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + match self.tcx.hir_maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!("{ident}: "), None => String::new(), }; @@ -3478,30 +3478,24 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { lhs_ty: Ty<'tcx>, rhs_expr: &'tcx hir::Expr<'tcx>, lhs_expr: &'tcx hir::Expr<'tcx>, - op: hir::BinOp, ) { - match op.node { - hir::BinOpKind::Eq => { - if let Some(partial_eq_def_id) = self.infcx.tcx.lang_items().eq_trait() - && self - .infcx - .type_implements_trait(partial_eq_def_id, [rhs_ty, lhs_ty], self.param_env) - .must_apply_modulo_regions() - { - let sm = self.tcx.sess.source_map(); - if let Ok(rhs_snippet) = sm.span_to_snippet(rhs_expr.span) - && let Ok(lhs_snippet) = sm.span_to_snippet(lhs_expr.span) - { - err.note(format!("`{rhs_ty}` implements `PartialEq<{lhs_ty}>`")); - err.multipart_suggestion( - "consider swapping the equality", - vec![(lhs_expr.span, rhs_snippet), (rhs_expr.span, lhs_snippet)], - Applicability::MaybeIncorrect, - ); - } - } + if let Some(partial_eq_def_id) = self.infcx.tcx.lang_items().eq_trait() + && self + .infcx + .type_implements_trait(partial_eq_def_id, [rhs_ty, lhs_ty], self.param_env) + .must_apply_modulo_regions() + { + let sm = self.tcx.sess.source_map(); + if let Ok(rhs_snippet) = sm.span_to_snippet(rhs_expr.span) + && let Ok(lhs_snippet) = sm.span_to_snippet(lhs_expr.span) + { + err.note(format!("`{rhs_ty}` implements `PartialEq<{lhs_ty}>`")); + err.multipart_suggestion( + "consider swapping the equality", + vec![(lhs_expr.span, rhs_snippet), (rhs_expr.span, lhs_snippet)], + Applicability::MaybeIncorrect, + ); } - _ => {} } } } diff --git a/compiler/rustc_hir_typeck/src/gather_locals.rs b/compiler/rustc_hir_typeck/src/gather_locals.rs index 48fd5f1f98249..a8bbc89dbded5 100644 --- a/compiler/rustc_hir_typeck/src/gather_locals.rs +++ b/compiler/rustc_hir_typeck/src/gather_locals.rs @@ -43,7 +43,7 @@ pub(super) struct Declaration<'a> { impl<'a> From<&'a hir::LetStmt<'a>> for Declaration<'a> { fn from(local: &'a hir::LetStmt<'a>) -> Self { - let hir::LetStmt { hir_id, pat, ty, span, init, els, source: _ } = *local; + let hir::LetStmt { hir_id, super_: _, pat, ty, span, init, els, source: _ } = *local; Declaration { hir_id, pat, ty, span, init, origin: DeclOrigin::LocalDecl { els } } } } @@ -55,6 +55,14 @@ impl<'a> From<(&'a hir::LetExpr<'a>, HirId)> for Declaration<'a> { } } +/// The `GatherLocalsVisitor` is responsible for initializing local variable types +/// in the [`ty::TypeckResults`] for all subpatterns in statements and expressions +/// like `let`, `match`, and params of function bodies. It also adds `Sized` bounds +/// for these types (with exceptions for unsized feature gates like `unsized_fn_params`). +/// +/// Failure to visit locals will cause an ICE in writeback when the local's type is +/// resolved. Visiting locals twice will ICE in the `GatherLocalsVisitor`, since it +/// will overwrite the type previously stored in the local. pub(super) struct GatherLocalsVisitor<'a, 'tcx> { fcx: &'a FnCtxt<'a, 'tcx>, // parameters are special cases of patterns, but we want to handle them as @@ -63,9 +71,37 @@ pub(super) struct GatherLocalsVisitor<'a, 'tcx> { outermost_fn_param_pat: Option<(Span, HirId)>, } +// N.B. additional `gather_*` functions should be careful to only walk the pattern +// for new expressions, since visiting sub-expressions or nested bodies may initialize +// locals which are not conceptually owned by the gathered statement or expression. impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> { - pub(super) fn new(fcx: &'a FnCtxt<'a, 'tcx>) -> Self { - Self { fcx, outermost_fn_param_pat: None } + pub(crate) fn gather_from_local(fcx: &'a FnCtxt<'a, 'tcx>, local: &'tcx hir::LetStmt<'tcx>) { + let mut visitor = GatherLocalsVisitor { fcx, outermost_fn_param_pat: None }; + visitor.declare(local.into()); + visitor.visit_pat(local.pat); + } + + pub(crate) fn gather_from_let_expr( + fcx: &'a FnCtxt<'a, 'tcx>, + let_expr: &'tcx hir::LetExpr<'tcx>, + expr_hir_id: hir::HirId, + ) { + let mut visitor = GatherLocalsVisitor { fcx, outermost_fn_param_pat: None }; + visitor.declare((let_expr, expr_hir_id).into()); + visitor.visit_pat(let_expr.pat); + } + + pub(crate) fn gather_from_param(fcx: &'a FnCtxt<'a, 'tcx>, param: &'tcx hir::Param<'tcx>) { + let mut visitor = GatherLocalsVisitor { + fcx, + outermost_fn_param_pat: Some((param.ty_span, param.hir_id)), + }; + visitor.visit_pat(param.pat); + } + + pub(crate) fn gather_from_arm(fcx: &'a FnCtxt<'a, 'tcx>, local: &'tcx hir::Arm<'tcx>) { + let mut visitor = GatherLocalsVisitor { fcx, outermost_fn_param_pat: None }; + visitor.visit_pat(local.pat); } fn assign(&mut self, span: Span, nid: HirId, ty_opt: Option>) -> Ty<'tcx> { @@ -73,12 +109,12 @@ impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> { None => { // Infer the variable's type. let var_ty = self.fcx.next_ty_var(span); - self.fcx.locals.borrow_mut().insert(nid, var_ty); + assert_eq!(self.fcx.locals.borrow_mut().insert(nid, var_ty), None); var_ty } Some(typ) => { // Take type that the user specified. - self.fcx.locals.borrow_mut().insert(nid, typ); + assert_eq!(self.fcx.locals.borrow_mut().insert(nid, typ), None); typ } } @@ -133,13 +169,6 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { intravisit::walk_expr(self, expr) } - fn visit_param(&mut self, param: &'tcx hir::Param<'tcx>) { - let old_outermost_fn_param_pat = - self.outermost_fn_param_pat.replace((param.ty_span, param.hir_id)); - intravisit::walk_param(self, param); - self.outermost_fn_param_pat = old_outermost_fn_param_pat; - } - // Add pattern bindings. fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) { if let PatKind::Binding(_, _, ident, _) = p.kind { diff --git a/compiler/rustc_hir_typeck/src/inline_asm.rs b/compiler/rustc_hir_typeck/src/inline_asm.rs new file mode 100644 index 0000000000000..6399f0a78ae2f --- /dev/null +++ b/compiler/rustc_hir_typeck/src/inline_asm.rs @@ -0,0 +1,555 @@ +use rustc_abi::FieldIdx; +use rustc_ast::InlineAsmTemplatePiece; +use rustc_data_structures::fx::FxIndexSet; +use rustc_hir::def_id::DefId; +use rustc_hir::{self as hir, LangItem}; +use rustc_middle::bug; +use rustc_middle::ty::{self, Article, FloatTy, IntTy, Ty, TyCtxt, TypeVisitableExt, UintTy}; +use rustc_session::lint; +use rustc_span::def_id::LocalDefId; +use rustc_span::{Span, Symbol, sym}; +use rustc_target::asm::{ + InlineAsmReg, InlineAsmRegClass, InlineAsmRegOrRegClass, InlineAsmType, ModifierInfo, +}; +use rustc_trait_selection::infer::InferCtxtExt; + +use crate::FnCtxt; +use crate::errors::RegisterTypeUnstable; + +pub(crate) struct InlineAsmCtxt<'a, 'tcx> { + target_features: &'tcx FxIndexSet, + fcx: &'a FnCtxt<'a, 'tcx>, +} + +enum NonAsmTypeReason<'tcx> { + UnevaluatedSIMDArrayLength(DefId, ty::Const<'tcx>), + Invalid(Ty<'tcx>), + InvalidElement(DefId, Ty<'tcx>), + NotSizedPtr(Ty<'tcx>), + EmptySIMDArray(Ty<'tcx>), +} + +impl<'a, 'tcx> InlineAsmCtxt<'a, 'tcx> { + pub(crate) fn new(fcx: &'a FnCtxt<'a, 'tcx>, def_id: LocalDefId) -> Self { + InlineAsmCtxt { target_features: fcx.tcx.asm_target_features(def_id), fcx } + } + + fn tcx(&self) -> TyCtxt<'tcx> { + self.fcx.tcx + } + + fn expr_ty(&self, expr: &hir::Expr<'tcx>) -> Ty<'tcx> { + let ty = self.fcx.typeck_results.borrow().expr_ty_adjusted(expr); + let ty = self.fcx.try_structurally_resolve_type(expr.span, ty); + if ty.has_non_region_infer() { + Ty::new_misc_error(self.tcx()) + } else { + self.tcx().erase_regions(ty) + } + } + + // FIXME(compiler-errors): This could use `<$ty as Pointee>::Metadata == ()` + fn is_thin_ptr_ty(&self, span: Span, ty: Ty<'tcx>) -> bool { + // Type still may have region variables, but `Sized` does not depend + // on those, so just erase them before querying. + if self.fcx.type_is_sized_modulo_regions(self.fcx.param_env, ty) { + return true; + } + if let ty::Foreign(..) = self.fcx.try_structurally_resolve_type(span, ty).kind() { + return true; + } + false + } + + fn get_asm_ty( + &self, + span: Span, + ty: Ty<'tcx>, + ) -> Result> { + let asm_ty_isize = match self.tcx().sess.target.pointer_width { + 16 => InlineAsmType::I16, + 32 => InlineAsmType::I32, + 64 => InlineAsmType::I64, + width => bug!("unsupported pointer width: {width}"), + }; + + match *ty.kind() { + ty::Int(IntTy::I8) | ty::Uint(UintTy::U8) => Ok(InlineAsmType::I8), + ty::Int(IntTy::I16) | ty::Uint(UintTy::U16) => Ok(InlineAsmType::I16), + ty::Int(IntTy::I32) | ty::Uint(UintTy::U32) => Ok(InlineAsmType::I32), + ty::Int(IntTy::I64) | ty::Uint(UintTy::U64) => Ok(InlineAsmType::I64), + ty::Int(IntTy::I128) | ty::Uint(UintTy::U128) => Ok(InlineAsmType::I128), + ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize) => Ok(asm_ty_isize), + ty::Float(FloatTy::F16) => Ok(InlineAsmType::F16), + ty::Float(FloatTy::F32) => Ok(InlineAsmType::F32), + ty::Float(FloatTy::F64) => Ok(InlineAsmType::F64), + ty::Float(FloatTy::F128) => Ok(InlineAsmType::F128), + ty::FnPtr(..) => Ok(asm_ty_isize), + ty::RawPtr(elem_ty, _) => { + if self.is_thin_ptr_ty(span, elem_ty) { + Ok(asm_ty_isize) + } else { + Err(NonAsmTypeReason::NotSizedPtr(ty)) + } + } + ty::Adt(adt, args) if adt.repr().simd() => { + let fields = &adt.non_enum_variant().fields; + if fields.is_empty() { + return Err(NonAsmTypeReason::EmptySIMDArray(ty)); + } + let field = &fields[FieldIdx::ZERO]; + let elem_ty = field.ty(self.tcx(), args); + + let (size, ty) = match *elem_ty.kind() { + ty::Array(ty, len) => { + // FIXME: `try_structurally_resolve_const` doesn't eval consts + // in the old solver. + let len = if self.fcx.next_trait_solver() { + self.fcx.try_structurally_resolve_const(span, len) + } else { + self.fcx.tcx.normalize_erasing_regions( + self.fcx.typing_env(self.fcx.param_env), + len, + ) + }; + if let Some(len) = len.try_to_target_usize(self.tcx()) { + (len, ty) + } else { + return Err(NonAsmTypeReason::UnevaluatedSIMDArrayLength( + field.did, len, + )); + } + } + _ => (fields.len() as u64, elem_ty), + }; + + match ty.kind() { + ty::Int(IntTy::I8) | ty::Uint(UintTy::U8) => Ok(InlineAsmType::VecI8(size)), + ty::Int(IntTy::I16) | ty::Uint(UintTy::U16) => Ok(InlineAsmType::VecI16(size)), + ty::Int(IntTy::I32) | ty::Uint(UintTy::U32) => Ok(InlineAsmType::VecI32(size)), + ty::Int(IntTy::I64) | ty::Uint(UintTy::U64) => Ok(InlineAsmType::VecI64(size)), + ty::Int(IntTy::I128) | ty::Uint(UintTy::U128) => { + Ok(InlineAsmType::VecI128(size)) + } + ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize) => { + Ok(match self.tcx().sess.target.pointer_width { + 16 => InlineAsmType::VecI16(size), + 32 => InlineAsmType::VecI32(size), + 64 => InlineAsmType::VecI64(size), + width => bug!("unsupported pointer width: {width}"), + }) + } + ty::Float(FloatTy::F16) => Ok(InlineAsmType::VecF16(size)), + ty::Float(FloatTy::F32) => Ok(InlineAsmType::VecF32(size)), + ty::Float(FloatTy::F64) => Ok(InlineAsmType::VecF64(size)), + ty::Float(FloatTy::F128) => Ok(InlineAsmType::VecF128(size)), + _ => Err(NonAsmTypeReason::InvalidElement(field.did, ty)), + } + } + ty::Infer(_) => bug!("unexpected infer ty in asm operand"), + _ => Err(NonAsmTypeReason::Invalid(ty)), + } + } + + fn check_asm_operand_type( + &self, + idx: usize, + reg: InlineAsmRegOrRegClass, + expr: &'tcx hir::Expr<'tcx>, + template: &[InlineAsmTemplatePiece], + is_input: bool, + tied_input: Option<(&'tcx hir::Expr<'tcx>, Option)>, + ) -> Option { + let ty = self.expr_ty(expr); + if ty.has_non_region_infer() { + bug!("inference variable in asm operand ty: {:?} {:?}", expr, ty); + } + + let asm_ty = match *ty.kind() { + // `!` is allowed for input but not for output (issue #87802) + ty::Never if is_input => return None, + _ if ty.references_error() => return None, + ty::Adt(adt, args) if self.tcx().is_lang_item(adt.did(), LangItem::MaybeUninit) => { + let fields = &adt.non_enum_variant().fields; + let ty = fields[FieldIdx::from_u32(1)].ty(self.tcx(), args); + // FIXME: Are we just trying to map to the `T` in `MaybeUninit`? + // If so, just get it from the args. + let ty::Adt(ty, args) = ty.kind() else { + unreachable!("expected first field of `MaybeUninit` to be an ADT") + }; + assert!( + ty.is_manually_drop(), + "expected first field of `MaybeUninit` to be `ManuallyDrop`" + ); + let fields = &ty.non_enum_variant().fields; + let ty = fields[FieldIdx::ZERO].ty(self.tcx(), args); + self.get_asm_ty(expr.span, ty) + } + _ => self.get_asm_ty(expr.span, ty), + }; + let asm_ty = match asm_ty { + Ok(asm_ty) => asm_ty, + Err(reason) => { + match reason { + NonAsmTypeReason::UnevaluatedSIMDArrayLength(did, len) => { + let msg = format!("cannot evaluate SIMD vector length `{len}`"); + self.fcx + .dcx() + .struct_span_err(self.tcx().def_span(did), msg) + .with_span_note( + expr.span, + "SIMD vector length needs to be known statically for use in `asm!`", + ) + .emit(); + } + NonAsmTypeReason::Invalid(ty) => { + let msg = format!("cannot use value of type `{ty}` for inline assembly"); + self.fcx.dcx().struct_span_err(expr.span, msg).with_note( + "only integers, floats, SIMD vectors, pointers and function pointers \ + can be used as arguments for inline assembly", + ).emit(); + } + NonAsmTypeReason::NotSizedPtr(ty) => { + let msg = format!( + "cannot use value of unsized pointer type `{ty}` for inline assembly" + ); + self.fcx + .dcx() + .struct_span_err(expr.span, msg) + .with_note("only sized pointers can be used in inline assembly") + .emit(); + } + NonAsmTypeReason::InvalidElement(did, ty) => { + let msg = format!( + "cannot use SIMD vector with element type `{ty}` for inline assembly" + ); + self.fcx.dcx() + .struct_span_err(self.tcx().def_span(did), msg).with_span_note( + expr.span, + "only integers, floats, SIMD vectors, pointers and function pointers \ + can be used as arguments for inline assembly", + ).emit(); + } + NonAsmTypeReason::EmptySIMDArray(ty) => { + let msg = format!("use of empty SIMD vector `{ty}`"); + self.fcx.dcx().struct_span_err(expr.span, msg).emit(); + } + } + return None; + } + }; + + // Check that the type implements Copy. The only case where this can + // possibly fail is for SIMD types which don't #[derive(Copy)]. + if !self.fcx.type_is_copy_modulo_regions(self.fcx.param_env, ty) { + let msg = "arguments for inline assembly must be copyable"; + self.fcx + .dcx() + .struct_span_err(expr.span, msg) + .with_note(format!("`{ty}` does not implement the Copy trait")) + .emit(); + } + + // Ideally we wouldn't need to do this, but LLVM's register allocator + // really doesn't like it when tied operands have different types. + // + // This is purely an LLVM limitation, but we have to live with it since + // there is no way to hide this with implicit conversions. + // + // For the purposes of this check we only look at the `InlineAsmType`, + // which means that pointers and integers are treated as identical (modulo + // size). + if let Some((in_expr, Some(in_asm_ty))) = tied_input { + if in_asm_ty != asm_ty { + let msg = "incompatible types for asm inout argument"; + let in_expr_ty = self.expr_ty(in_expr); + self.fcx + .dcx() + .struct_span_err(vec![in_expr.span, expr.span], msg) + .with_span_label(in_expr.span, format!("type `{in_expr_ty}`")) + .with_span_label(expr.span, format!("type `{ty}`")) + .with_note( + "asm inout arguments must have the same type, \ + unless they are both pointers or integers of the same size", + ) + .emit(); + } + + // All of the later checks have already been done on the input, so + // let's not emit errors and warnings twice. + return Some(asm_ty); + } + + // Check the type against the list of types supported by the selected + // register class. + let asm_arch = self.tcx().sess.asm_arch.unwrap(); + let allow_experimental_reg = self.tcx().features().asm_experimental_reg(); + let reg_class = reg.reg_class(); + let supported_tys = reg_class.supported_types(asm_arch, allow_experimental_reg); + let Some((_, feature)) = supported_tys.iter().find(|&&(t, _)| t == asm_ty) else { + let mut err = if !allow_experimental_reg + && reg_class.supported_types(asm_arch, true).iter().any(|&(t, _)| t == asm_ty) + { + self.tcx().sess.create_feature_err( + RegisterTypeUnstable { span: expr.span, ty }, + sym::asm_experimental_reg, + ) + } else { + let msg = format!("type `{ty}` cannot be used with this register class"); + let mut err = self.fcx.dcx().struct_span_err(expr.span, msg); + let supported_tys: Vec<_> = + supported_tys.iter().map(|(t, _)| t.to_string()).collect(); + err.note(format!( + "register class `{}` supports these types: {}", + reg_class.name(), + supported_tys.join(", "), + )); + err + }; + if let Some(suggest) = reg_class.suggest_class(asm_arch, asm_ty) { + err.help(format!("consider using the `{}` register class instead", suggest.name())); + } + err.emit(); + return Some(asm_ty); + }; + + // Check whether the selected type requires a target feature. Note that + // this is different from the feature check we did earlier. While the + // previous check checked that this register class is usable at all + // with the currently enabled features, some types may only be usable + // with a register class when a certain feature is enabled. We check + // this here since it depends on the results of typeck. + // + // Also note that this check isn't run when the operand type is never + // (!). In that case we still need the earlier check to verify that the + // register class is usable at all. + if let Some(feature) = feature { + if !self.target_features.contains(feature) { + let msg = format!("`{feature}` target feature is not enabled"); + self.fcx + .dcx() + .struct_span_err(expr.span, msg) + .with_note(format!( + "this is required to use type `{}` with register class `{}`", + ty, + reg_class.name(), + )) + .emit(); + return Some(asm_ty); + } + } + + // Check whether a modifier is suggested for using this type. + if let Some(ModifierInfo { + modifier: suggested_modifier, + result: suggested_result, + size: suggested_size, + }) = reg_class.suggest_modifier(asm_arch, asm_ty) + { + // Search for any use of this operand without a modifier and emit + // the suggestion for them. + let mut spans = vec![]; + for piece in template { + if let &InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span } = piece + { + if operand_idx == idx && modifier.is_none() { + spans.push(span); + } + } + } + if !spans.is_empty() { + let ModifierInfo { + modifier: default_modifier, + result: default_result, + size: default_size, + } = reg_class.default_modifier(asm_arch).unwrap(); + self.tcx().node_span_lint( + lint::builtin::ASM_SUB_REGISTER, + expr.hir_id, + spans, + |lint| { + lint.primary_message("formatting may not be suitable for sub-register argument"); + lint.span_label(expr.span, "for this argument"); + lint.help(format!( + "use `{{{idx}:{suggested_modifier}}}` to have the register formatted as `{suggested_result}` (for {suggested_size}-bit values)", + )); + lint.help(format!( + "or use `{{{idx}:{default_modifier}}}` to keep the default formatting of `{default_result}` (for {default_size}-bit values)", + )); + }, + ); + } + } + + Some(asm_ty) + } + + pub(crate) fn check_asm(&self, asm: &hir::InlineAsm<'tcx>) { + let Some(asm_arch) = self.tcx().sess.asm_arch else { + self.fcx.dcx().delayed_bug("target architecture does not support asm"); + return; + }; + let allow_experimental_reg = self.tcx().features().asm_experimental_reg(); + for (idx, &(op, op_sp)) in asm.operands.iter().enumerate() { + // Validate register classes against currently enabled target + // features. We check that at least one type is available for + // the enabled features. + // + // We ignore target feature requirements for clobbers: if the + // feature is disabled then the compiler doesn't care what we + // do with the registers. + // + // Note that this is only possible for explicit register + // operands, which cannot be used in the asm string. + if let Some(reg) = op.reg() { + // Some explicit registers cannot be used depending on the + // target. Reject those here. + if let InlineAsmRegOrRegClass::Reg(reg) = reg { + if let InlineAsmReg::Err = reg { + // `validate` will panic on `Err`, as an error must + // already have been reported. + continue; + } + if let Err(msg) = reg.validate( + asm_arch, + self.tcx().sess.relocation_model(), + self.target_features, + &self.tcx().sess.target, + op.is_clobber(), + ) { + let msg = format!("cannot use register `{}`: {}", reg.name(), msg); + self.fcx.dcx().span_err(op_sp, msg); + continue; + } + } + + if !op.is_clobber() { + let mut missing_required_features = vec![]; + let reg_class = reg.reg_class(); + if let InlineAsmRegClass::Err = reg_class { + continue; + } + for &(_, feature) in reg_class.supported_types(asm_arch, allow_experimental_reg) + { + match feature { + Some(feature) => { + if self.target_features.contains(&feature) { + missing_required_features.clear(); + break; + } else { + missing_required_features.push(feature); + } + } + None => { + missing_required_features.clear(); + break; + } + } + } + + // We are sorting primitive strs here and can use unstable sort here + missing_required_features.sort_unstable(); + missing_required_features.dedup(); + match &missing_required_features[..] { + [] => {} + [feature] => { + let msg = format!( + "register class `{}` requires the `{}` target feature", + reg_class.name(), + feature + ); + self.fcx.dcx().span_err(op_sp, msg); + // register isn't enabled, don't do more checks + continue; + } + features => { + let msg = format!( + "register class `{}` requires at least one of the following target features: {}", + reg_class.name(), + features + .iter() + .map(|f| f.as_str()) + .intersperse(", ") + .collect::(), + ); + self.fcx.dcx().span_err(op_sp, msg); + // register isn't enabled, don't do more checks + continue; + } + } + } + } + + match op { + hir::InlineAsmOperand::In { reg, expr } => { + self.check_asm_operand_type(idx, reg, expr, asm.template, true, None); + } + hir::InlineAsmOperand::Out { reg, late: _, expr } => { + if let Some(expr) = expr { + self.check_asm_operand_type(idx, reg, expr, asm.template, false, None); + } + } + hir::InlineAsmOperand::InOut { reg, late: _, expr } => { + self.check_asm_operand_type(idx, reg, expr, asm.template, false, None); + } + hir::InlineAsmOperand::SplitInOut { reg, late: _, in_expr, out_expr } => { + let in_ty = + self.check_asm_operand_type(idx, reg, in_expr, asm.template, true, None); + if let Some(out_expr) = out_expr { + self.check_asm_operand_type( + idx, + reg, + out_expr, + asm.template, + false, + Some((in_expr, in_ty)), + ); + } + } + hir::InlineAsmOperand::Const { anon_const } => { + let ty = self.expr_ty(self.tcx().hir_body(anon_const.body).value); + match ty.kind() { + ty::Error(_) => {} + _ if ty.is_integral() => {} + _ => { + self.fcx + .dcx() + .struct_span_err(op_sp, "invalid type for `const` operand") + .with_span_label( + self.tcx().def_span(anon_const.def_id), + format!("is {} `{}`", ty.kind().article(), ty), + ) + .with_help("`const` operands must be of an integer type") + .emit(); + } + } + } + // Typeck has checked that SymFn refers to a function. + hir::InlineAsmOperand::SymFn { expr } => { + let ty = self.expr_ty(expr); + match ty.kind() { + ty::FnDef(..) => {} + ty::Error(_) => {} + _ => { + self.fcx + .dcx() + .struct_span_err(op_sp, "invalid `sym` operand") + .with_span_label( + expr.span, + format!("is {} `{}`", ty.kind().article(), ty), + ) + .with_help( + "`sym` operands must refer to either a function or a static", + ) + .emit(); + } + } + } + // AST lowering guarantees that SymStatic points to a static. + hir::InlineAsmOperand::SymStatic { .. } => {} + // No special checking is needed for labels. + hir::InlineAsmOperand::Label { .. } => {} + } + } + } +} diff --git a/compiler/rustc_hir_typeck/src/intrinsicck.rs b/compiler/rustc_hir_typeck/src/intrinsicck.rs index 54e9e699353fa..194e420b606ea 100644 --- a/compiler/rustc_hir_typeck/src/intrinsicck.rs +++ b/compiler/rustc_hir_typeck/src/intrinsicck.rs @@ -45,7 +45,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { pub(crate) fn check_transmute(&self, from: Ty<'tcx>, to: Ty<'tcx>, hir_id: HirId) { let tcx = self.tcx; let dl = &tcx.data_layout; - let span = tcx.hir().span(hir_id); + let span = tcx.hir_span(hir_id); let normalize = |ty| { let ty = self.resolve_vars_if_possible(ty); if let Ok(ty) = diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs index 4968998fd512b..60187abd55897 100644 --- a/compiler/rustc_hir_typeck/src/lib.rs +++ b/compiler/rustc_hir_typeck/src/lib.rs @@ -1,12 +1,11 @@ // tidy-alphabetical-start #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![feature(array_windows)] #![feature(box_patterns)] #![feature(if_let_guard)] #![feature(iter_intersperse)] -#![feature(let_chains)] #![feature(never_type)] #![feature(try_blocks)] // tidy-alphabetical-end @@ -24,6 +23,7 @@ mod diverges; mod errors; mod expectation; mod expr; +mod inline_asm; // Used by clippy; pub mod expr_use_visitor; mod fallback; @@ -32,6 +32,7 @@ mod gather_locals; mod intrinsicck; mod method; mod op; +mod opaque_types; mod pat; mod place_op; mod rvalue_scopes; @@ -46,7 +47,6 @@ use rustc_errors::codes::*; use rustc_errors::{Applicability, ErrorGuaranteed, pluralize, struct_span_code_err}; use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; -use rustc_hir::intravisit::Visitor; use rustc_hir::{HirId, HirIdMap, Node}; use rustc_hir_analysis::check::check_abi; use rustc_hir_analysis::hir_ty_lowering::HirTyLowerer; @@ -117,7 +117,7 @@ fn typeck_with_inspect<'tcx>( let id = tcx.local_def_id_to_hir_id(def_id); let node = tcx.hir_node(id); - let span = tcx.hir().span(id); + let span = tcx.def_span(def_id); // Figure out what primary body this item has. let body_id = node.body_id().unwrap_or_else(|| { @@ -191,9 +191,6 @@ fn typeck_with_inspect<'tcx>( let wf_code = ObligationCauseCode::WellFormed(Some(WellFormedLoc::Ty(def_id))); fcx.register_wf_obligation(expected_type.into(), body.value.span, wf_code); - // Gather locals in statics (because of block expressions). - GatherLocalsVisitor::new(&fcx).visit_body(body); - fcx.check_expr_coercible_to_type(body.value, expected_type, None); fcx.write_ty(id, expected_type); @@ -249,9 +246,7 @@ fn typeck_with_inspect<'tcx>( let typeck_results = fcx.resolve_type_vars_in_body(body); - // We clone the defined opaque types during writeback in the new solver - // because we have to use them during normalization. - let _ = fcx.infcx.take_opaque_types(); + fcx.detect_opaque_types_added_during_writeback(); // Consistency check our TypeckResults instance can hold all ItemLocalIds // it will need to hold. @@ -266,7 +261,7 @@ fn infer_type_if_missing<'tcx>(fcx: &FnCtxt<'_, 'tcx>, node: Node<'tcx>) -> Opti let expected_type = if let Some(&hir::Ty { kind: hir::TyKind::Infer(()), span, .. }) = node.ty() { if let Some(item) = tcx.opt_associated_item(def_id.into()) - && let ty::AssocKind::Const = item.kind + && let ty::AssocKind::Const { .. } = item.kind && let ty::AssocItemContainer::Impl = item.container && let Some(trait_item_def_id) = item.trait_item_def_id { diff --git a/compiler/rustc_hir_typeck/src/method/mod.rs b/compiler/rustc_hir_typeck/src/method/mod.rs index 4008021c3a857..34bbb7d7c05e6 100644 --- a/compiler/rustc_hir_typeck/src/method/mod.rs +++ b/compiler/rustc_hir_typeck/src/method/mod.rs @@ -19,7 +19,7 @@ use rustc_middle::ty::{ self, GenericArgs, GenericArgsRef, GenericParamDefKind, Ty, TypeVisitableExt, }; use rustc_middle::{bug, span_bug}; -use rustc_span::{ErrorGuaranteed, Ident, Span}; +use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol}; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt; use rustc_trait_selection::traits::{self, NormalizeExt}; use tracing::{debug, instrument}; @@ -329,10 +329,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// an obligation for a particular trait with the given self type and checks /// whether that trait is implemented. #[instrument(level = "debug", skip(self))] - pub(super) fn lookup_method_in_trait( + pub(super) fn lookup_method_for_operator( &self, cause: ObligationCause<'tcx>, - m_name: Ident, + method_name: Symbol, trait_def_id: DefId, self_ty: Ty<'tcx>, opt_rhs_ty: Option>, @@ -374,13 +374,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Trait must have a method named `m_name` and it should not have // type parameters or early-bound regions. let tcx = self.tcx; - let Some(method_item) = self.associated_value(trait_def_id, m_name) else { + // We use `Ident::with_dummy_span` since no built-in operator methods have + // any macro-specific hygeine, so the span's context doesn't really matter. + let Some(method_item) = + self.associated_value(trait_def_id, Ident::with_dummy_span(method_name)) + else { bug!("expected associated item for operator trait") }; let def_id = method_item.def_id; - if method_item.kind != ty::AssocKind::Fn { - span_bug!(tcx.def_span(def_id), "expected `{m_name}` to be an associated function"); + if !method_item.is_fn() { + span_bug!( + tcx.def_span(def_id), + "expected `{method_name}` to be an associated function" + ); } debug!("lookup_in_trait_adjusted: method_item={:?}", method_item); @@ -529,17 +536,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - let def_kind = pick.item.kind.as_def_kind(); + let def_kind = pick.item.as_def_kind(); tcx.check_stability(pick.item.def_id, Some(expr_id), span, Some(method_name.span)); Ok((def_kind, pick.item.def_id)) } - /// Finds item with name `item_name` defined in impl/trait `def_id` + /// Finds item with name `item_ident` defined in impl/trait `def_id` /// and return it, or `None`, if no such item was defined there. - fn associated_value(&self, def_id: DefId, item_name: Ident) -> Option { + fn associated_value(&self, def_id: DefId, item_ident: Ident) -> Option { self.tcx .associated_items(def_id) - .find_by_name_and_namespace(self.tcx, item_name, Namespace::ValueNS, def_id) + .find_by_ident_and_namespace(self.tcx, item_ident, Namespace::ValueNS, def_id) .copied() } } diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs index 0a01ec89a327d..bda051f156084 100644 --- a/compiler/rustc_hir_typeck/src/method/probe.rs +++ b/compiler/rustc_hir_typeck/src/method/probe.rs @@ -992,7 +992,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { fn matches_return_type(&self, method: ty::AssocItem, expected: Ty<'tcx>) -> bool { match method.kind { - ty::AssocKind::Fn => self.probe(|_| { + ty::AssocKind::Fn { .. } => self.probe(|_| { let args = self.fresh_args_for_item(self.span, method.def_id); let fty = self.tcx.fn_sig(method.def_id).instantiate(self.tcx, args); let fty = self.instantiate_binder_with_fresh_vars(self.span, infer::FnCall, fty); @@ -1213,7 +1213,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { debug!("pick_all_method: step={:?}", step); // skip types that are from a type error or that would require dereferencing // a raw pointer - !step.self_ty.references_error() && !step.from_unsafe_deref + !step.self_ty.value.references_error() && !step.from_unsafe_deref }) .find_map(|step| { let InferOk { value: self_ty, obligations: _ } = self @@ -1583,7 +1583,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { }, None, ) { - self.private_candidate.set(Some((pick.item.kind.as_def_kind(), pick.item.def_id))); + self.private_candidate.set(Some((pick.item.as_def_kind(), pick.item.def_id))); } } None @@ -1671,16 +1671,7 @@ impl<'tcx> Pick<'tcx> { /// Do not use for type checking. pub(crate) fn differs_from(&self, other: &Self) -> bool { let Self { - item: - AssocItem { - def_id, - name: _, - kind: _, - container: _, - trait_item_def_id: _, - fn_has_self_parameter: _, - opt_rpitit_info: _, - }, + item: AssocItem { def_id, kind: _, container: _, trait_item_def_id: _ }, kind: _, import_ids: _, autoderefs: _, @@ -1703,7 +1694,7 @@ impl<'tcx> Pick<'tcx> { if self.unstable_candidates.is_empty() { return; } - let def_kind = self.item.kind.as_def_kind(); + let def_kind = self.item.as_def_kind(); tcx.node_span_lint(lint::builtin::UNSTABLE_NAME_COLLISIONS, scope_expr_id, span, |lint| { lint.primary_message(format!( "{} {} with this name may be added to the standard library in the future", @@ -1712,7 +1703,7 @@ impl<'tcx> Pick<'tcx> { )); match (self.item.kind, self.item.container) { - (ty::AssocKind::Fn, _) => { + (ty::AssocKind::Fn { .. }, _) => { // FIXME: This should be a `span_suggestion` instead of `help` // However `self.span` only // highlights the method name, so we can't use it. Also consider reusing @@ -1723,17 +1714,12 @@ impl<'tcx> Pick<'tcx> { tcx.def_path_str(self.item.def_id), )); } - (ty::AssocKind::Const, ty::AssocItemContainer::Trait) => { + (ty::AssocKind::Const { name }, ty::AssocItemContainer::Trait) => { let def_id = self.item.container_id(tcx); lint.span_suggestion( span, "use the fully qualified path to the associated const", - format!( - "<{} as {}>::{}", - self.self_ty, - tcx.def_path_str(def_id), - self.item.name - ), + format!("<{} as {}>::{}", self.self_ty, tcx.def_path_str(def_id), name), Applicability::MachineApplicable, ); } @@ -2222,7 +2208,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { let best_name = { let names = applicable_close_candidates .iter() - .map(|cand| cand.name) + .map(|cand| cand.name()) .collect::>(); find_best_match_for_name_with_substrings( &names, @@ -2234,10 +2220,12 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { applicable_close_candidates .iter() .find(|cand| self.matches_by_doc_alias(cand.def_id)) - .map(|cand| cand.name) + .map(|cand| cand.name()) }); Ok(best_name.and_then(|best_name| { - applicable_close_candidates.into_iter().find(|method| method.name == best_name) + applicable_close_candidates + .into_iter() + .find(|method| method.name() == best_name) })) } }) @@ -2252,10 +2240,10 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { // In Path mode (i.e., resolving a value like `T::next`), consider any // associated value (i.e., methods, constants) but not types. match self.mode { - Mode::MethodCall => item.fn_has_self_parameter, + Mode::MethodCall => item.is_method(), Mode::Path => match item.kind { - ty::AssocKind::Type => false, - ty::AssocKind::Fn | ty::AssocKind::Const => true, + ty::AssocKind::Type { .. } => false, + ty::AssocKind::Fn { .. } | ty::AssocKind::Const { .. } => true, }, } // FIXME -- check for types that deref to `Self`, @@ -2277,7 +2265,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { impl_ty: Ty<'tcx>, args: GenericArgsRef<'tcx>, ) -> (Ty<'tcx>, Option>) { - if item.kind == ty::AssocKind::Fn && self.mode == Mode::MethodCall { + if item.is_fn() && self.mode == Mode::MethodCall { let sig = self.xform_method_sig(item.def_id, args); (sig.inputs()[0], Some(sig.output())) } else { @@ -2328,8 +2316,8 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { /// Determine if the given associated item type is relevant in the current context. fn is_relevant_kind_for_mode(&self, kind: ty::AssocKind) -> bool { match (self.mode, kind) { - (Mode::MethodCall, ty::AssocKind::Fn) => true, - (Mode::Path, ty::AssocKind::Const | ty::AssocKind::Fn) => true, + (Mode::MethodCall, ty::AssocKind::Fn { .. }) => true, + (Mode::Path, ty::AssocKind::Const { .. } | ty::AssocKind::Fn { .. }) => true, _ => false, } } @@ -2346,8 +2334,9 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { let hir_id = self.fcx.tcx.local_def_id_to_hir_id(local_def_id); let attrs = self.fcx.tcx.hir_attrs(hir_id); for attr in attrs { - if sym::doc == attr.name_or_empty() { - } else if sym::rustc_confusables == attr.name_or_empty() { + if attr.has_name(sym::doc) { + // do nothing + } else if attr.has_name(sym::rustc_confusables) { let Some(confusables) = attr.meta_item_list() else { continue; }; @@ -2367,7 +2356,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { continue; }; for v in values { - if v.name_or_empty() != sym::alias { + if !v.has_name(sym::alias) { continue; } if let Some(nested) = v.meta_item_list() { @@ -2411,7 +2400,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { } match edit_distance_with_substrings( name.as_str(), - x.name.as_str(), + x.name().as_str(), max_dist, ) { Some(d) => d > 0, diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index 908c3ee2eb8e3..6a9fd7cdd483b 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -25,6 +25,7 @@ use rustc_middle::bug; use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams, simplify_type}; use rustc_middle::ty::print::{ PrintTraitRefExt as _, with_crate_prefix, with_forced_trimmed_paths, + with_no_visible_paths_if_doc_hidden, }; use rustc_middle::ty::{self, GenericArgKind, IsSuggestable, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::def_id::DefIdSet; @@ -254,11 +255,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { item_name ); err.span_label(item_name.span, format!("private {kind}")); - let sp = self - .tcx - .hir() - .span_if_local(def_id) - .unwrap_or_else(|| self.tcx.def_span(def_id)); + let sp = + self.tcx.hir_span_if_local(def_id).unwrap_or_else(|| self.tcx.def_span(def_id)); err.span_label(sp, format!("private {kind} defined here")); if let Some(within_macro_span) = within_macro_span { err.span_label(within_macro_span, "due to this macro variable"); @@ -566,7 +564,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { span.push_span_label(sugg_let.span, format!("`{rcvr_name}` of type `{self_ty}` that has method `{method_name}` defined earlier here")); span.push_span_label( - self.tcx.hir().span(recv_id), + self.tcx.hir_span(recv_id), format!( "earlier `{rcvr_name}` shadowed here with type `{ty_str_reported}`" ), @@ -587,7 +585,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &self, mut span: Span, rcvr_ty: Ty<'tcx>, - item_name: Ident, + item_ident: Ident, expr_id: hir::HirId, source: SelfSource<'tcx>, args: Option<&'tcx [hir::Expr<'tcx>]>, @@ -618,7 +616,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } else if rcvr_ty.is_enum() { "variant or associated item" } else { - match (item_name.as_str().chars().next(), rcvr_ty.is_fresh_ty()) { + match (item_ident.as_str().chars().next(), rcvr_ty.is_fresh_ty()) { (Some(name), false) if name.is_lowercase() => "function or associated item", (Some(_), false) => "associated item", (Some(_), true) | (None, false) => "variant or associated item", @@ -633,7 +631,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { rcvr_ty, source, span, - item_name, + item_ident, &short_ty_str, &mut ty_file, ) { @@ -645,13 +643,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { source, span, item_kind, - item_name, + item_ident, &short_ty_str, &mut ty_file, ) { return guar; } - span = item_name.span; + span = item_ident.span; // Don't show generic arguments when the method can't be found in any implementation (#81576). let mut ty_str_reported = ty_str.clone(); @@ -663,7 +661,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx .inherent_impls(adt_def.did()) .into_iter() - .any(|def_id| self.associated_value(*def_id, item_name).is_some()) + .any(|def_id| self.associated_value(*def_id, item_ident).is_some()) } else { false } @@ -680,14 +678,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let is_write = sugg_span.ctxt().outer_expn_data().macro_def_id.is_some_and(|def_id| { tcx.is_diagnostic_item(sym::write_macro, def_id) || tcx.is_diagnostic_item(sym::writeln_macro, def_id) - }) && item_name.name == sym::write_fmt; + }) && item_ident.name == sym::write_fmt; let mut err = if is_write && let SelfSource::MethodCall(rcvr_expr) = source { self.suggest_missing_writer(rcvr_ty, rcvr_expr) } else { let mut err = self.dcx().create_err(NoAssociatedItem { span, item_kind, - item_name, + item_ident, ty_prefix: if trait_missing_method { // FIXME(mu001999) E0599 maybe not suitable here because it is for types Cow::from("trait") @@ -701,7 +699,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if is_method { self.suggest_use_shadowed_binding_with_method( source, - item_name, + item_ident, &ty_str_reported, &mut err, ); @@ -712,10 +710,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { && let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = ty.kind && let Res::SelfTyAlias { alias_to: impl_def_id, .. } = path.res && let DefKind::Impl { .. } = self.tcx.def_kind(impl_def_id) - && let Some(candidate) = tcx.associated_items(impl_def_id).find_by_name_and_kind( + && let Some(candidate) = tcx.associated_items(impl_def_id).find_by_ident_and_kind( self.tcx, - item_name, - ty::AssocKind::Type, + item_ident, + ty::AssocTag::Type, impl_def_id, ) && let Some(adt_def) = tcx.type_of(candidate.def_id).skip_binder().ty_adt_def() @@ -724,7 +722,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { let def_path = tcx.def_path_str(adt_def.did()); err.span_suggestion( - ty.span.to(item_name.span), + ty.span.to(item_ident.span), format!("to construct a value of type `{}`, use the explicit path", def_path), def_path, Applicability::MachineApplicable, @@ -752,7 +750,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.find_builder_fn(&mut err, rcvr_ty, expr_id); } - if tcx.ty_is_opaque_future(rcvr_ty) && item_name.name == sym::poll { + if tcx.ty_is_opaque_future(rcvr_ty) && item_ident.name == sym::poll { err.help(format!( "method `poll` found on `Pin<&mut {ty_str}>`, \ see documentation for `std::pin::Pin`" @@ -767,7 +765,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { self.suggest_await_before_method( &mut err, - item_name, + item_ident, rcvr_ty, cal, span, @@ -789,7 +787,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let SelfSource::MethodCall(rcvr_expr) = source && let ty::RawPtr(ty, ptr_mutbl) = *rcvr_ty.kind() && let Ok(pick) = self.lookup_probe_for_diagnostic( - item_name, + item_ident, Ty::new_ref(tcx, ty::Region::new_error_misc(tcx), ty, ptr_mutbl), self.tcx.hir_expect_expr(self.tcx.parent_hir_id(rcvr_expr.hir_id)), ProbeScope::TraitsInScope, @@ -810,7 +808,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; err.span_note( tcx.def_span(pick.item.def_id), - format!("the method `{item_name}` exists on the type `{ty}`", ty = pick.self_ty), + format!("the method `{item_ident}` exists on the type `{ty}`", ty = pick.self_ty), ); let mut_str = ptr_mutbl.ptr_str(); err.note(format!( @@ -836,7 +834,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.suggest_fn_call(&mut err, rcvr_expr, rcvr_ty, |output_ty| { let call_expr = self.tcx.hir_expect_expr(self.tcx.parent_hir_id(rcvr_expr.hir_id)); let probe = self.lookup_probe_for_diagnostic( - item_name, + item_ident, output_ty, call_expr, ProbeScope::AllTraits, @@ -875,13 +873,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { static_candidates, rcvr_ty, source, - item_name, + item_ident, args, sugg_span, ); self.note_candidates_on_method_error( rcvr_ty, - item_name, + item_ident, source, args, span, @@ -892,7 +890,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } else if static_candidates.len() > 1 { self.note_candidates_on_method_error( rcvr_ty, - item_name, + item_ident, source, args, span, @@ -906,7 +904,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let mut restrict_type_params = false; let mut suggested_derive = false; let mut unsatisfied_bounds = false; - if item_name.name == sym::count && self.is_slice_ty(rcvr_ty, span) { + if item_ident.name == sym::count && self.is_slice_ty(rcvr_ty, span) { let msg = "consider using `len` instead"; if let SelfSource::MethodCall(_expr) = source { err.span_suggestion_short(span, msg, "len", Applicability::MachineApplicable); @@ -1351,7 +1349,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; let primary_message = primary_message.unwrap_or_else(|| { format!( - "the {item_kind} `{item_name}` exists for {actual_prefix} `{ty_str}`, \ + "the {item_kind} `{item_ident}` exists for {actual_prefix} `{ty_str}`, \ but its trait bounds were not satisfied" ) }); @@ -1381,7 +1379,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // `Pin<&Self>`. if targs.len() == 1 { let mut item_segment = hir::PathSegment::invalid(); - item_segment.ident = item_name; + item_segment.ident = item_ident; for t in [Ty::new_mut_ref, Ty::new_imm_ref, |_, _, t| t] { let new_args = tcx.mk_args_from_iter(targs.iter().map(|arg| match arg.as_type() { @@ -1425,9 +1423,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ty::Adt(adt, _) => self.tcx.is_lang_item(adt.did(), LangItem::String), _ => false, }; - if is_string_or_ref_str && item_name.name == sym::iter { + if is_string_or_ref_str && item_ident.name == sym::iter { err.span_suggestion_verbose( - item_name.span, + item_ident.span, "because of the in-memory representation of `&str`, to obtain \ an `Iterator` over each of its codepoint use method `chars`", "chars", @@ -1441,10 +1439,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .into_iter() .copied() .filter(|def_id| { - if let Some(assoc) = self.associated_value(*def_id, item_name) { + if let Some(assoc) = self.associated_value(*def_id, item_ident) { // Check for both mode is the same so we avoid suggesting // incorrect associated item. - match (mode, assoc.fn_has_self_parameter, source) { + match (mode, assoc.is_method(), source) { (Mode::MethodCall, true, SelfSource::MethodCall(_)) => { // We check that the suggest type is actually // different from the received one @@ -1502,7 +1500,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // If the method name is the name of a field with a function or closure type, // give a helping note that it has to be called as `(x.f)(...)`. if let SelfSource::MethodCall(expr) = source { - if !self.suggest_calling_field_as_fn(span, rcvr_ty, expr, item_name, &mut err) + if !self.suggest_calling_field_as_fn(span, rcvr_ty, expr, item_ident, &mut err) && similar_candidate.is_none() && !custom_span_label { @@ -1515,7 +1513,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let confusable_suggested = self.confusable_method_name( &mut err, rcvr_ty, - item_name, + item_ident, args.map(|args| { args.iter() .map(|expr| { @@ -1533,12 +1531,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { source, span, rcvr_ty, - item_name, + item_ident, expected.only_has_type(self), ); } - self.suggest_unwrapping_inner_self(&mut err, source, rcvr_ty, item_name); + self.suggest_unwrapping_inner_self(&mut err, source, rcvr_ty, item_ident); for (span, mut bounds) in bound_spans { if !tcx.sess.source_map().is_span_accessible(span) { @@ -1549,7 +1547,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let pre = if Some(span) == ty_span { ty_span.take(); format!( - "{item_kind} `{item_name}` not found for this {} because it ", + "{item_kind} `{item_ident}` not found for this {} because it ", rcvr_ty.prefix_string(self.tcx) ) } else { @@ -1569,7 +1567,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.span_label( span, format!( - "{item_kind} `{item_name}` not found for this {}", + "{item_kind} `{item_ident}` not found for this {}", rcvr_ty.prefix_string(self.tcx) ), ); @@ -1581,7 +1579,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &mut err, span, rcvr_ty, - item_name, + item_ident, args.map(|args| args.len() + 1), source, no_match_data.out_of_scope_traits.clone(), @@ -1598,7 +1596,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let adt_def = rcvr_ty.ty_adt_def().expect("enum is not an ADT"); if let Some(var_name) = edit_distance::find_best_match_for_name( &adt_def.variants().iter().map(|s| s.name).collect::>(), - item_name.name, + item_ident.name, None, ) && let Some(variant) = adt_def.variants().iter().find(|s| s.name == var_name) { @@ -1724,7 +1722,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // that had unsatisfied trait bounds if unsatisfied_predicates.is_empty() // ...or if we already suggested that name because of `rustc_confusable` annotation. - && Some(similar_candidate.name) != confusable_suggested + && Some(similar_candidate.name()) != confusable_suggested { self.find_likely_intended_associated_item( &mut err, @@ -1739,14 +1737,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if !find_candidate_for_method { self.lookup_segments_chain_for_no_match_method( &mut err, - item_name, + item_ident, item_kind, source, no_match_data, ); } - self.note_derefed_ty_has_method(&mut err, source, rcvr_ty, item_name, expected); + self.note_derefed_ty_has_method(&mut err, source, rcvr_ty, item_ident, expected); err.emit() } @@ -1821,12 +1819,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { mode: Mode, ) { let tcx = self.tcx; - let def_kind = similar_candidate.kind.as_def_kind(); + let def_kind = similar_candidate.as_def_kind(); let an = self.tcx.def_kind_descr_article(def_kind, similar_candidate.def_id); + let similar_candidate_name = similar_candidate.name(); let msg = format!( "there is {an} {} `{}` with a similar name", self.tcx.def_kind_descr(def_kind, similar_candidate.def_id), - similar_candidate.name, + similar_candidate_name, ); // Methods are defined within the context of a struct and their first parameter // is always `self`, which represents the instance of the struct the method is @@ -1836,7 +1835,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let ty_args = self.infcx.fresh_args_for_item(span, similar_candidate.def_id); let fn_sig = tcx.fn_sig(similar_candidate.def_id).instantiate(tcx, ty_args); let fn_sig = self.instantiate_binder_with_fresh_vars(span, infer::FnCall, fn_sig); - if similar_candidate.fn_has_self_parameter { + if similar_candidate.is_method() { if let Some(args) = args && fn_sig.inputs()[1..].len() == args.len() { @@ -1845,7 +1844,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.span_suggestion_verbose( span, msg, - similar_candidate.name, + similar_candidate_name, Applicability::MaybeIncorrect, ); } else { @@ -1867,7 +1866,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.span_suggestion_verbose( span, msg, - similar_candidate.name, + similar_candidate_name, Applicability::MaybeIncorrect, ); } else { @@ -1880,7 +1879,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.span_suggestion_verbose( span, msg, - similar_candidate.name, + similar_candidate_name, Applicability::MaybeIncorrect, ); } else { @@ -1904,7 +1903,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { if let Some(candidates) = find_attr!(self.tcx.get_all_attrs(inherent_method.def_id), AttributeKind::Confusables{symbols, ..} => symbols) && candidates.contains(&item_name.name) - && let ty::AssocKind::Fn = inherent_method.kind + && inherent_method.is_fn() { let args = ty::GenericArgs::identity_for_item(self.tcx, inherent_method.def_id) @@ -1920,6 +1919,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { infer::FnCall, fn_sig, ); + let name = inherent_method.name(); if let Some(ref args) = call_args && fn_sig.inputs()[1..] .iter() @@ -1929,20 +1929,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { err.span_suggestion_verbose( item_name.span, - format!("you might have meant to use `{}`", inherent_method.name), - inherent_method.name, + format!("you might have meant to use `{}`", name), + name, Applicability::MaybeIncorrect, ); - return Some(inherent_method.name); + return Some(name); } else if let None = call_args { err.span_note( self.tcx.def_span(inherent_method.def_id), - format!( - "you might have meant to use method `{}`", - inherent_method.name, - ), + format!("you might have meant to use method `{}`", name), ); - return Some(inherent_method.name); + return Some(name); } } } @@ -2118,8 +2115,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Only assoc fn with no receivers and only if // they are resolvable .filter(|item| { - matches!(item.kind, ty::AssocKind::Fn) - && !item.fn_has_self_parameter + matches!(item.kind, ty::AssocKind::Fn { has_self: false, .. }) && self .probe_for_name( Mode::Path, @@ -2263,7 +2259,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; let assoc = self.associated_value(assoc_did, item_name)?; - if assoc.kind != ty::AssocKind::Fn { + if !assoc.is_fn() { return None; } @@ -2561,7 +2557,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ExprKind::Path(QPath::Resolved(_, path)) => { // local binding if let hir::def::Res::Local(hir_id) = path.res { - let span = tcx.hir().span(hir_id); + let span = tcx.hir_span(hir_id); let filename = tcx.sess.source_map().span_to_filename(span); let parent_node = self.tcx.parent_hir_node(hir_id); @@ -3210,7 +3206,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // If this method receives `&self`, then the provided // argument _should_ coerce, so it's valid to suggest // just changing the path. - && pick.item.fn_has_self_parameter + && pick.item.is_method() && let Some(self_ty) = self.tcx.fn_sig(pick.item.def_id).instantiate_identity().inputs().skip_binder().get(0) && self_ty.is_ref() @@ -3331,7 +3327,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let path_strings = candidates.iter().map(|trait_did| { format!( "{prefix}{}{postfix}\n", - with_crate_prefix!(self.tcx.def_path_str(*trait_did)), + with_no_visible_paths_if_doc_hidden!(with_crate_prefix!( + self.tcx.def_path_str(*trait_did) + )), ) }); @@ -3339,7 +3337,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let parent_did = parent_map.get(trait_did).unwrap(); format!( "{prefix}{}::*{postfix} // trait {}\n", - with_crate_prefix!(self.tcx.def_path_str(*parent_did)), + with_no_visible_paths_if_doc_hidden!(with_crate_prefix!( + self.tcx.def_path_str(*parent_did) + )), self.tcx.item_name(*trait_did), ) }); @@ -3558,7 +3558,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { || (("Pin::new" == *pre) && ((sym::as_ref == item_name.name) || !unpin)) || inputs_len.is_some_and(|inputs_len| { - pick.item.kind == ty::AssocKind::Fn + pick.item.is_fn() && self .tcx .fn_sig(pick.item.def_id) @@ -3616,7 +3616,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { && pick.autoderefs == 0 // Check that the method of the same name that was found on the new `Pin` // receiver has the same number of arguments that appear in the user's code. - && inputs_len.is_some_and(|inputs_len| pick.item.kind == ty::AssocKind::Fn && self.tcx.fn_sig(pick.item.def_id).skip_binder().skip_binder().inputs().len() == inputs_len) + && inputs_len.is_some_and(|inputs_len| pick.item.is_fn() && self.tcx.fn_sig(pick.item.def_id).skip_binder().skip_binder().inputs().len() == inputs_len) { let indent = self .tcx @@ -3754,7 +3754,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { && self .associated_value(info.def_id, item_name) .filter(|item| { - if let ty::AssocKind::Fn = item.kind { + if item.is_fn() { let id = item .def_id .as_local() @@ -4277,17 +4277,17 @@ fn print_disambiguation_help<'tcx>( item: ty::AssocItem, ) -> Option { let trait_impl_type = trait_ref.self_ty().peel_refs(); - let trait_ref = if item.fn_has_self_parameter { + let trait_ref = if item.is_method() { trait_ref.print_only_trait_name().to_string() } else { format!("<{} as {}>", trait_ref.args[0], trait_ref.print_only_trait_name()) }; Some( - if matches!(item.kind, ty::AssocKind::Fn) + if item.is_fn() && let SelfSource::MethodCall(receiver) = source && let Some(args) = args { - let def_kind_descr = tcx.def_kind_descr(item.kind.as_def_kind(), item.def_id); + let def_kind_descr = tcx.def_kind_descr(item.as_def_kind(), item.def_id); let item_name = item.ident(tcx); let first_input = tcx.fn_sig(item.def_id).instantiate_identity().skip_binder().inputs().get(0); @@ -4302,7 +4302,7 @@ fn print_disambiguation_help<'tcx>( let args = if let Some(first_arg_type) = first_arg_type && (first_arg_type == tcx.types.self_param || first_arg_type == trait_impl_type - || item.fn_has_self_parameter) + || item.is_method()) { Some(receiver) } else { diff --git a/compiler/rustc_hir_typeck/src/op.rs b/compiler/rustc_hir_typeck/src/op.rs index a473e14b24445..7f7921b66b572 100644 --- a/compiler/rustc_hir_typeck/src/op.rs +++ b/compiler/rustc_hir_typeck/src/op.rs @@ -4,15 +4,15 @@ use rustc_data_structures::packed::Pu128; use rustc_errors::codes::*; use rustc_errors::{Applicability, Diag, struct_span_code_err}; use rustc_infer::traits::ObligationCauseCode; +use rustc_middle::bug; use rustc_middle::ty::adjustment::{ Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, }; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{self, IsSuggestable, Ty, TyCtxt, TypeVisitableExt}; -use rustc_middle::{bug, span_bug}; use rustc_session::errors::ExprParenthesesNeeded; use rustc_span::source_map::Spanned; -use rustc_span::{Ident, Span, sym}; +use rustc_span::{Span, Symbol, sym}; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::{FulfillmentError, Obligation, ObligationCtxt}; use tracing::debug; @@ -24,24 +24,27 @@ use crate::Expectation; impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Checks a `a = b` - pub(crate) fn check_expr_binop_assign( + pub(crate) fn check_expr_assign_op( &self, expr: &'tcx hir::Expr<'tcx>, - op: hir::BinOp, + op: hir::AssignOp, lhs: &'tcx hir::Expr<'tcx>, rhs: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, ) -> Ty<'tcx> { let (lhs_ty, rhs_ty, return_ty) = - self.check_overloaded_binop(expr, lhs, rhs, op, IsAssign::Yes, expected); - - let ty = - if !lhs_ty.is_ty_var() && !rhs_ty.is_ty_var() && is_builtin_binop(lhs_ty, rhs_ty, op) { - self.enforce_builtin_binop_types(lhs.span, lhs_ty, rhs.span, rhs_ty, op); - self.tcx.types.unit - } else { - return_ty - }; + self.check_overloaded_binop(expr, lhs, rhs, Op::AssignOp(op), expected); + + let category = BinOpCategory::from(op.node); + let ty = if !lhs_ty.is_ty_var() + && !rhs_ty.is_ty_var() + && is_builtin_binop(lhs_ty, rhs_ty, category) + { + self.enforce_builtin_binop_types(lhs.span, lhs_ty, rhs.span, rhs_ty, category); + self.tcx.types.unit + } else { + return_ty + }; self.check_lhs_assignable(lhs, E0067, op.span, |err| { if let Some(lhs_deref_ty) = self.deref_once_mutably_for_diagnostic(lhs_ty) { @@ -49,7 +52,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .lookup_op_method( (lhs, lhs_deref_ty), Some((rhs, rhs_ty)), - Op::Binary(op, IsAssign::Yes), + lang_item_for_binop(self.tcx, Op::AssignOp(op)), + op.span, expected, ) .is_ok() @@ -60,7 +64,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .lookup_op_method( (lhs, lhs_ty), Some((rhs, rhs_ty)), - Op::Binary(op, IsAssign::Yes), + lang_item_for_binop(self.tcx, Op::AssignOp(op)), + op.span, expected, ) .is_err() @@ -98,7 +103,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expr.hir_id, expr, op, lhs_expr, rhs_expr ); - match BinOpCategory::from(op) { + match BinOpCategory::from(op.node) { BinOpCategory::Shortcircuit => { // && and || are a simple case. self.check_expr_coercible_to_type(lhs_expr, tcx.types.bool, None); @@ -114,14 +119,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Otherwise, we always treat operators as if they are // overloaded. This is the way to be most flexible w/r/t // types that get inferred. - let (lhs_ty, rhs_ty, return_ty) = self.check_overloaded_binop( - expr, - lhs_expr, - rhs_expr, - op, - IsAssign::No, - expected, - ); + let (lhs_ty, rhs_ty, return_ty) = + self.check_overloaded_binop(expr, lhs_expr, rhs_expr, Op::BinOp(op), expected); // Supply type inference hints if relevant. Probably these // hints should be enforced during select as part of the @@ -135,16 +134,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // deduce that the result type should be `u32`, even // though we don't know yet what type 2 has and hence // can't pin this down to a specific impl. + let category = BinOpCategory::from(op.node); if !lhs_ty.is_ty_var() && !rhs_ty.is_ty_var() - && is_builtin_binop(lhs_ty, rhs_ty, op) + && is_builtin_binop(lhs_ty, rhs_ty, category) { let builtin_return_ty = self.enforce_builtin_binop_types( lhs_expr.span, lhs_ty, rhs_expr.span, rhs_ty, - op, + category, ); self.demand_eqtype(expr.span, builtin_return_ty, return_ty); builtin_return_ty @@ -161,16 +161,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { lhs_ty: Ty<'tcx>, rhs_span: Span, rhs_ty: Ty<'tcx>, - op: hir::BinOp, + category: BinOpCategory, ) -> Ty<'tcx> { - debug_assert!(is_builtin_binop(lhs_ty, rhs_ty, op)); + debug_assert!(is_builtin_binop(lhs_ty, rhs_ty, category)); // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work. // (See https://github.com/rust-lang/rust/issues/57447.) let (lhs_ty, rhs_ty) = (deref_ty_if_possible(lhs_ty), deref_ty_if_possible(rhs_ty)); let tcx = self.tcx; - match BinOpCategory::from(op) { + match category { BinOpCategory::Shortcircuit => { self.demand_suptype(lhs_span, tcx.types.bool, lhs_ty); self.demand_suptype(rhs_span, tcx.types.bool, rhs_ty); @@ -201,17 +201,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expr: &'tcx hir::Expr<'tcx>, lhs_expr: &'tcx hir::Expr<'tcx>, rhs_expr: &'tcx hir::Expr<'tcx>, - op: hir::BinOp, - is_assign: IsAssign, + op: Op, expected: Expectation<'tcx>, ) -> (Ty<'tcx>, Ty<'tcx>, Ty<'tcx>) { - debug!( - "check_overloaded_binop(expr.hir_id={}, op={:?}, is_assign={:?})", - expr.hir_id, op, is_assign - ); + debug!("check_overloaded_binop(expr.hir_id={}, op={:?})", expr.hir_id, op); - let lhs_ty = match is_assign { - IsAssign::No => { + let lhs_ty = match op { + Op::BinOp(_) => { // Find a suitable supertype of the LHS expression's type, by coercing to // a type variable, to pass as the `Self` to the trait, avoiding invariant // trait matching creating lifetime constraints that are too strict. @@ -221,7 +217,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let fresh_var = self.next_ty_var(lhs_expr.span); self.demand_coerce(lhs_expr, lhs_ty, fresh_var, Some(rhs_expr), AllowTwoPhase::No) } - IsAssign::Yes => { + Op::AssignOp(_) => { // rust-lang/rust#52126: We have to use strict // equivalence on the LHS of an assign-op like `+=`; // overwritten or mutably-borrowed places cannot be @@ -238,11 +234,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // us do better coercions than we would be able to do otherwise, // particularly for things like `String + &String`. let rhs_ty_var = self.next_ty_var(rhs_expr.span); - let result = self.lookup_op_method( (lhs_expr, lhs_ty), Some((rhs_expr, rhs_ty_var)), - Op::Binary(op, is_assign), + lang_item_for_binop(self.tcx, op), + op.span(), expected, ); @@ -252,15 +248,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { rhs_ty_var, Some(lhs_expr), |err, ty| { - self.suggest_swapping_lhs_and_rhs(err, ty, lhs_ty, rhs_expr, lhs_expr, op); + if let Op::BinOp(binop) = op + && binop.node == hir::BinOpKind::Eq + { + self.suggest_swapping_lhs_and_rhs(err, ty, lhs_ty, rhs_expr, lhs_expr); + } }, ); let rhs_ty = self.resolve_vars_with_obligations(rhs_ty); let return_ty = match result { Ok(method) => { - let by_ref_binop = !op.node.is_by_value(); - if is_assign == IsAssign::Yes || by_ref_binop { + let by_ref_binop = !op.is_by_value(); + if matches!(op, Op::AssignOp(_)) || by_ref_binop { if let ty::Ref(_, _, mutbl) = method.sig.inputs()[0].kind() { let mutbl = AutoBorrowMutability::new(*mutbl, AllowTwoPhase::Yes); let autoref = Adjustment { @@ -301,32 +301,32 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Ty::new_misc_error(self.tcx) } Err(errors) => { - let (_, trait_def_id) = - lang_item_for_op(self.tcx, Op::Binary(op, is_assign), op.span); + let (_, trait_def_id) = lang_item_for_binop(self.tcx, op); let missing_trait = trait_def_id .map(|def_id| with_no_trimmed_paths!(self.tcx.def_path_str(def_id))); let mut path = None; let lhs_ty_str = self.tcx.short_string(lhs_ty, &mut path); let rhs_ty_str = self.tcx.short_string(rhs_ty, &mut path); - let (mut err, output_def_id) = match is_assign { - IsAssign::Yes => { + let (mut err, output_def_id) = match op { + Op::AssignOp(assign_op) => { + let s = assign_op.node.as_str(); let mut err = struct_span_code_err!( self.dcx(), expr.span, E0368, - "binary assignment operation `{}=` cannot be applied to type `{}`", - op.node.as_str(), + "binary assignment operation `{}` cannot be applied to type `{}`", + s, lhs_ty_str, ); err.span_label( lhs_expr.span, - format!("cannot use `{}=` on type `{}`", op.node.as_str(), lhs_ty_str), + format!("cannot use `{}` on type `{}`", s, lhs_ty_str), ); self.note_unmet_impls_on_type(&mut err, errors, false); (err, None) } - IsAssign::No => { - let message = match op.node { + Op::BinOp(bin_op) => { + let message = match bin_op.node { hir::BinOpKind::Add => { format!("cannot add `{rhs_ty_str}` to `{lhs_ty_str}`") } @@ -362,8 +362,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } _ => format!( "binary operation `{}` cannot be applied to type `{}`", - op.node.as_str(), - lhs_ty_str, + bin_op.node.as_str(), + lhs_ty_str ), }; let output_def_id = trait_def_id.and_then(|def_id| { @@ -371,12 +371,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .associated_item_def_ids(def_id) .iter() .find(|item_def_id| { - self.tcx.associated_item(*item_def_id).name == sym::Output + self.tcx.associated_item(*item_def_id).name() == sym::Output }) .cloned() }); let mut err = - struct_span_code_err!(self.dcx(), op.span, E0369, "{message}"); + struct_span_code_err!(self.dcx(), bin_op.span, E0369, "{message}"); if !lhs_expr.span.eq(&rhs_expr.span) { err.span_label(lhs_expr.span, lhs_ty_str.clone()); err.span_label(rhs_expr.span, rhs_ty_str); @@ -409,18 +409,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .lookup_op_method( (lhs_expr, lhs_deref_ty), Some((rhs_expr, rhs_ty)), - Op::Binary(op, is_assign), + lang_item_for_binop(self.tcx, op), + op.span(), expected, ) .is_ok() { let msg = format!( - "`{}{}` can be used on `{}` if you dereference the left-hand side", - op.node.as_str(), - match is_assign { - IsAssign::Yes => "=", - IsAssign::No => "", - }, + "`{}` can be used on `{}` if you dereference the left-hand side", + op.as_str(), self.tcx.short_string(lhs_deref_ty, err.long_ty_path()), ); err.span_suggestion_verbose( @@ -442,14 +439,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .lookup_op_method( (lhs_expr, lhs_adjusted_ty), Some((rhs_expr, rhs_adjusted_ty)), - Op::Binary(op, is_assign), + lang_item_for_binop(self.tcx, op), + op.span(), expected, ) .is_ok() { let lhs = self.tcx.short_string(lhs_adjusted_ty, err.long_ty_path()); let rhs = self.tcx.short_string(rhs_adjusted_ty, err.long_ty_path()); - let op = op.node.as_str(); + let op = op.as_str(); err.note(format!("an implementation for `{lhs} {op} {rhs}` exists")); if let Some(lhs_new_mutbl) = lhs_new_mutbl @@ -499,7 +497,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.lookup_op_method( (lhs_expr, lhs_ty), Some((rhs_expr, rhs_ty)), - Op::Binary(op, is_assign), + lang_item_for_binop(self.tcx, op), + op.span(), expected, ) .is_ok() @@ -511,13 +510,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // We should suggest `a + b` => `*a + b` if `a` is copy, and suggest // `a += b` => `*a += b` if a is a mut ref. - if !op.span.can_be_used_for_suggestions() { + if !op.span().can_be_used_for_suggestions() { // Suppress suggestions when lhs and rhs are not in the same span as the error - } else if is_assign == IsAssign::Yes + } else if let Op::AssignOp(_) = op && let Some(lhs_deref_ty) = self.deref_once_mutably_for_diagnostic(lhs_ty) { suggest_deref_binop(&mut err, lhs_deref_ty); - } else if is_assign == IsAssign::No + } else if let Op::BinOp(_) = op && let ty::Ref(region, lhs_deref_ty, mutbl) = lhs_ty.kind() { if self.type_is_copy_modulo_regions(self.param_env, *lhs_deref_ty) { @@ -572,10 +571,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } if let Some(missing_trait) = missing_trait { - if op.node == hir::BinOpKind::Add - && self.check_str_addition( - lhs_expr, rhs_expr, lhs_ty, rhs_ty, &mut err, is_assign, op, - ) + if matches!( + op, + Op::BinOp(Spanned { node: hir::BinOpKind::Add, .. }) + | Op::AssignOp(Spanned { node: hir::AssignOpKind::AddAssign, .. }) + ) && self + .check_str_addition(lhs_expr, rhs_expr, lhs_ty, rhs_ty, &mut err, op) { // This has nothing here because it means we did string // concatenation (e.g., "Hello " + "World!"). This means @@ -592,7 +593,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .lookup_op_method( (lhs_expr, lhs_ty), Some((rhs_expr, rhs_ty)), - Op::Binary(op, is_assign), + lang_item_for_binop(self.tcx, op), + op.span(), expected, ) .unwrap_err(); @@ -642,9 +644,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Suggest using `add`, `offset` or `offset_from` for pointer - {integer}, // pointer + {integer} or pointer - pointer. - if op.span.can_be_used_for_suggestions() { - match op.node { - hir::BinOpKind::Add if lhs_ty.is_raw_ptr() && rhs_ty.is_integral() => { + if op.span().can_be_used_for_suggestions() { + match op { + Op::BinOp(Spanned { node: hir::BinOpKind::Add, .. }) + if lhs_ty.is_raw_ptr() && rhs_ty.is_integral() => + { err.multipart_suggestion( "consider using `wrapping_add` or `add` for pointer + {integer}", vec![ @@ -657,7 +661,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Applicability::MaybeIncorrect, ); } - hir::BinOpKind::Sub => { + Op::BinOp(Spanned { node: hir::BinOpKind::Sub, .. }) => { if lhs_ty.is_raw_ptr() && rhs_ty.is_integral() { err.multipart_suggestion( "consider using `wrapping_sub` or `sub` for \ @@ -693,6 +697,57 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } + let lhs_name_str = match lhs_expr.kind { + hir::ExprKind::Path(hir::QPath::Resolved(_, path)) => { + path.segments.last().map_or("_".to_string(), |s| s.ident.to_string()) + } + _ => self + .tcx + .sess + .source_map() + .span_to_snippet(lhs_expr.span) + .unwrap_or("_".to_string()), + }; + + if op.span().can_be_used_for_suggestions() { + match op { + Op::AssignOp(Spanned { node: hir::AssignOpKind::AddAssign, .. }) + if lhs_ty.is_raw_ptr() && rhs_ty.is_integral() => + { + err.multipart_suggestion( + "consider using `add` or `wrapping_add` to do pointer arithmetic", + vec![ + (lhs_expr.span.shrink_to_lo(), format!("{} = ", lhs_name_str)), + ( + lhs_expr.span.between(rhs_expr.span), + ".wrapping_add(".to_owned(), + ), + (rhs_expr.span.shrink_to_hi(), ")".to_owned()), + ], + Applicability::MaybeIncorrect, + ); + } + Op::AssignOp(Spanned { node: hir::AssignOpKind::SubAssign, .. }) => { + if lhs_ty.is_raw_ptr() && rhs_ty.is_integral() { + err.multipart_suggestion( + "consider using `sub` or `wrapping_sub` to do pointer arithmetic", + vec![ + (lhs_expr.span.shrink_to_lo(), format!("{} = ", lhs_name_str)), + ( + lhs_expr.span.between(rhs_expr.span), + ".wrapping_sub(".to_owned(), + + ), + (rhs_expr.span.shrink_to_hi(), ")".to_owned()), + ], + Applicability::MaybeIncorrect, + ); + } + } + _ => {} + } + } + let reported = err.emit(); Ty::new_error(self.tcx, reported) } @@ -713,8 +768,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { lhs_ty: Ty<'tcx>, rhs_ty: Ty<'tcx>, err: &mut Diag<'_>, - is_assign: IsAssign, - op: hir::BinOp, + op: Op, ) -> bool { let str_concat_note = "string concatenation requires an owned `String` on the left"; let rm_borrow_msg = "remove the borrow to obtain an owned `String`"; @@ -733,8 +787,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { r_ty.kind(), ty::Ref(_, inner_ty, _) if *inner_ty.kind() == ty::Str )) => { - if let IsAssign::No = is_assign { // Do not supply this message if `&str += &str` - err.span_label(op.span, "`+` cannot be used to concatenate two `&str` strings"); + if let Op::BinOp(_) = op { // Do not supply this message if `&str += &str` + err.span_label( + op.span(), + "`+` cannot be used to concatenate two `&str` strings" + ); err.note(str_concat_note); if let hir::ExprKind::AddrOf(_, _, lhs_inner_expr) = lhs_expr.kind { err.span_suggestion_verbose( @@ -758,11 +815,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if (*l_ty.kind() == ty::Str || is_std_string(l_ty)) && is_std_string(rhs_ty) => { err.span_label( - op.span, + op.span(), "`+` cannot be used to concatenate a `&str` with a `String`", ); - match is_assign { - IsAssign::No => { + match op { + Op::BinOp(_) => { let sugg_msg; let lhs_sugg = if let hir::ExprKind::AddrOf(_, _, lhs_inner_expr) = lhs_expr.kind { sugg_msg = "remove the borrow on the left and add one on the right"; @@ -781,7 +838,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Applicability::MachineApplicable, ); } - IsAssign::Yes => { + Op::AssignOp(_) => { err.note(str_concat_note); } } @@ -799,7 +856,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expected: Expectation<'tcx>, ) -> Ty<'tcx> { assert!(op.is_by_value()); - match self.lookup_op_method((ex, operand_ty), None, Op::Unary(op, ex.span), expected) { + match self.lookup_op_method( + (ex, operand_ty), + None, + lang_item_for_unop(self.tcx, op), + ex.span, + expected, + ) { Ok(method) => { self.write_method_call_and_enforce_effects(ex.hir_id, ex.span, method); method.sig.output() @@ -898,24 +961,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &self, (lhs_expr, lhs_ty): (&'tcx hir::Expr<'tcx>, Ty<'tcx>), opt_rhs: Option<(&'tcx hir::Expr<'tcx>, Ty<'tcx>)>, - op: Op, + (opname, trait_did): (Symbol, Option), + span: Span, expected: Expectation<'tcx>, ) -> Result, Vec>> { - let span = match op { - Op::Binary(op, _) => op.span, - Op::Unary(_, span) => span, - }; - let (opname, Some(trait_did)) = lang_item_for_op(self.tcx, op, span) else { + let Some(trait_did) = trait_did else { // Bail if the operator trait is not defined. return Err(vec![]); }; debug!( - "lookup_op_method(lhs_ty={:?}, op={:?}, opname={:?}, trait_did={:?})", - lhs_ty, op, opname, trait_did + "lookup_op_method(lhs_ty={:?}, opname={:?}, trait_did={:?})", + lhs_ty, opname, trait_did ); - let opname = Ident::with_dummy_span(opname); let (opt_rhs_expr, opt_rhs_ty) = opt_rhs.unzip(); let cause = self.cause( span, @@ -930,7 +989,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); let method = - self.lookup_method_in_trait(cause.clone(), opname, trait_did, lhs_ty, opt_rhs_ty); + self.lookup_method_for_operator(cause.clone(), opname, trait_did, lhs_ty, opt_rhs_ty); match method { Some(ok) => { let method = self.register_infer_ok_obligations(ok); @@ -980,37 +1039,22 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } -fn lang_item_for_op( - tcx: TyCtxt<'_>, - op: Op, - span: Span, -) -> (rustc_span::Symbol, Option) { +fn lang_item_for_binop(tcx: TyCtxt<'_>, op: Op) -> (Symbol, Option) { let lang = tcx.lang_items(); - if let Op::Binary(op, IsAssign::Yes) = op { - match op.node { - hir::BinOpKind::Add => (sym::add_assign, lang.add_assign_trait()), - hir::BinOpKind::Sub => (sym::sub_assign, lang.sub_assign_trait()), - hir::BinOpKind::Mul => (sym::mul_assign, lang.mul_assign_trait()), - hir::BinOpKind::Div => (sym::div_assign, lang.div_assign_trait()), - hir::BinOpKind::Rem => (sym::rem_assign, lang.rem_assign_trait()), - hir::BinOpKind::BitXor => (sym::bitxor_assign, lang.bitxor_assign_trait()), - hir::BinOpKind::BitAnd => (sym::bitand_assign, lang.bitand_assign_trait()), - hir::BinOpKind::BitOr => (sym::bitor_assign, lang.bitor_assign_trait()), - hir::BinOpKind::Shl => (sym::shl_assign, lang.shl_assign_trait()), - hir::BinOpKind::Shr => (sym::shr_assign, lang.shr_assign_trait()), - hir::BinOpKind::Lt - | hir::BinOpKind::Le - | hir::BinOpKind::Ge - | hir::BinOpKind::Gt - | hir::BinOpKind::Eq - | hir::BinOpKind::Ne - | hir::BinOpKind::And - | hir::BinOpKind::Or => { - span_bug!(span, "impossible assignment operation: {}=", op.node.as_str()) - } - } - } else if let Op::Binary(op, IsAssign::No) = op { - match op.node { + match op { + Op::AssignOp(op) => match op.node { + hir::AssignOpKind::AddAssign => (sym::add_assign, lang.add_assign_trait()), + hir::AssignOpKind::SubAssign => (sym::sub_assign, lang.sub_assign_trait()), + hir::AssignOpKind::MulAssign => (sym::mul_assign, lang.mul_assign_trait()), + hir::AssignOpKind::DivAssign => (sym::div_assign, lang.div_assign_trait()), + hir::AssignOpKind::RemAssign => (sym::rem_assign, lang.rem_assign_trait()), + hir::AssignOpKind::BitXorAssign => (sym::bitxor_assign, lang.bitxor_assign_trait()), + hir::AssignOpKind::BitAndAssign => (sym::bitand_assign, lang.bitand_assign_trait()), + hir::AssignOpKind::BitOrAssign => (sym::bitor_assign, lang.bitor_assign_trait()), + hir::AssignOpKind::ShlAssign => (sym::shl_assign, lang.shl_assign_trait()), + hir::AssignOpKind::ShrAssign => (sym::shr_assign, lang.shr_assign_trait()), + }, + Op::BinOp(op) => match op.node { hir::BinOpKind::Add => (sym::add, lang.add_trait()), hir::BinOpKind::Sub => (sym::sub, lang.sub_trait()), hir::BinOpKind::Mul => (sym::mul, lang.mul_trait()), @@ -1028,20 +1072,24 @@ fn lang_item_for_op( hir::BinOpKind::Eq => (sym::eq, lang.eq_trait()), hir::BinOpKind::Ne => (sym::ne, lang.eq_trait()), hir::BinOpKind::And | hir::BinOpKind::Or => { - span_bug!(span, "&& and || are not overloadable") + bug!("&& and || are not overloadable") } - } - } else if let Op::Unary(hir::UnOp::Not, _) = op { - (sym::not, lang.not_trait()) - } else if let Op::Unary(hir::UnOp::Neg, _) = op { - (sym::neg, lang.neg_trait()) - } else { - bug!("lookup_op_method: op not supported: {:?}", op) + }, + } +} + +fn lang_item_for_unop(tcx: TyCtxt<'_>, op: hir::UnOp) -> (Symbol, Option) { + let lang = tcx.lang_items(); + match op { + hir::UnOp::Not => (sym::not, lang.not_trait()), + hir::UnOp::Neg => (sym::neg, lang.neg_trait()), + hir::UnOp::Deref => bug!("Deref is not overloadable"), } } // Binary operator categories. These categories summarize the behavior // with respect to the builtin operations supported. +#[derive(Clone, Copy)] enum BinOpCategory { /// &&, || -- cannot be overridden Shortcircuit, @@ -1063,44 +1111,58 @@ enum BinOpCategory { Comparison, } -impl BinOpCategory { - fn from(op: hir::BinOp) -> BinOpCategory { - match op.node { - hir::BinOpKind::Shl | hir::BinOpKind::Shr => BinOpCategory::Shift, - - hir::BinOpKind::Add - | hir::BinOpKind::Sub - | hir::BinOpKind::Mul - | hir::BinOpKind::Div - | hir::BinOpKind::Rem => BinOpCategory::Math, - - hir::BinOpKind::BitXor | hir::BinOpKind::BitAnd | hir::BinOpKind::BitOr => { - BinOpCategory::Bitwise - } - - hir::BinOpKind::Eq - | hir::BinOpKind::Ne - | hir::BinOpKind::Lt - | hir::BinOpKind::Le - | hir::BinOpKind::Ge - | hir::BinOpKind::Gt => BinOpCategory::Comparison, +impl From for BinOpCategory { + fn from(op: hir::BinOpKind) -> BinOpCategory { + use hir::BinOpKind::*; + match op { + Shl | Shr => BinOpCategory::Shift, + Add | Sub | Mul | Div | Rem => BinOpCategory::Math, + BitXor | BitAnd | BitOr => BinOpCategory::Bitwise, + Eq | Ne | Lt | Le | Ge | Gt => BinOpCategory::Comparison, + And | Or => BinOpCategory::Shortcircuit, + } + } +} - hir::BinOpKind::And | hir::BinOpKind::Or => BinOpCategory::Shortcircuit, +impl From for BinOpCategory { + fn from(op: hir::AssignOpKind) -> BinOpCategory { + use hir::AssignOpKind::*; + match op { + ShlAssign | ShrAssign => BinOpCategory::Shift, + AddAssign | SubAssign | MulAssign | DivAssign | RemAssign => BinOpCategory::Math, + BitXorAssign | BitAndAssign | BitOrAssign => BinOpCategory::Bitwise, } } } -/// Whether the binary operation is an assignment (`a += b`), or not (`a + b`) +/// An assignment op (e.g. `a += b`), or a binary op (e.g. `a + b`). #[derive(Clone, Copy, Debug, PartialEq)] -enum IsAssign { - No, - Yes, +enum Op { + BinOp(hir::BinOp), + AssignOp(hir::AssignOp), } -#[derive(Clone, Copy, Debug)] -enum Op { - Binary(hir::BinOp, IsAssign), - Unary(hir::UnOp, Span), +impl Op { + fn span(&self) -> Span { + match self { + Op::BinOp(op) => op.span, + Op::AssignOp(op) => op.span, + } + } + + fn as_str(&self) -> &'static str { + match self { + Op::BinOp(op) => op.node.as_str(), + Op::AssignOp(op) => op.node.as_str(), + } + } + + fn is_by_value(&self) -> bool { + match self { + Op::BinOp(op) => op.node.is_by_value(), + Op::AssignOp(op) => op.node.is_by_value(), + } + } } /// Dereferences a single level of immutable referencing. @@ -1127,27 +1189,24 @@ fn deref_ty_if_possible(ty: Ty<'_>) -> Ty<'_> { /// Reason #2 is the killer. I tried for a while to always use /// overloaded logic and just check the types in constants/codegen after /// the fact, and it worked fine, except for SIMD types. -nmatsakis -fn is_builtin_binop<'tcx>(lhs: Ty<'tcx>, rhs: Ty<'tcx>, op: hir::BinOp) -> bool { +fn is_builtin_binop<'tcx>(lhs: Ty<'tcx>, rhs: Ty<'tcx>, category: BinOpCategory) -> bool { // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work. // (See https://github.com/rust-lang/rust/issues/57447.) let (lhs, rhs) = (deref_ty_if_possible(lhs), deref_ty_if_possible(rhs)); - match BinOpCategory::from(op) { + match category.into() { BinOpCategory::Shortcircuit => true, - BinOpCategory::Shift => { lhs.references_error() || rhs.references_error() || lhs.is_integral() && rhs.is_integral() } - BinOpCategory::Math => { lhs.references_error() || rhs.references_error() || lhs.is_integral() && rhs.is_integral() || lhs.is_floating_point() && rhs.is_floating_point() } - BinOpCategory::Bitwise => { lhs.references_error() || rhs.references_error() @@ -1155,7 +1214,6 @@ fn is_builtin_binop<'tcx>(lhs: Ty<'tcx>, rhs: Ty<'tcx>, op: hir::BinOp) -> bool || lhs.is_floating_point() && rhs.is_floating_point() || lhs.is_bool() && rhs.is_bool() } - BinOpCategory::Comparison => { lhs.references_error() || rhs.references_error() || lhs.is_scalar() && rhs.is_scalar() } diff --git a/compiler/rustc_hir_typeck/src/opaque_types.rs b/compiler/rustc_hir_typeck/src/opaque_types.rs new file mode 100644 index 0000000000000..e0224f8c6e1b4 --- /dev/null +++ b/compiler/rustc_hir_typeck/src/opaque_types.rs @@ -0,0 +1,26 @@ +use super::FnCtxt; +impl<'tcx> FnCtxt<'_, 'tcx> { + /// We may in theory add further uses of an opaque after cloning the opaque + /// types storage during writeback when computing the defining uses. + /// + /// Silently ignoring them is dangerous and could result in ICE or even in + /// unsoundness, so we make sure we catch such cases here. There's currently + /// no known code where this actually happens, even with the new solver which + /// does normalize types in writeback after cloning the opaque type storage. + /// + /// FIXME(@lcnr): I believe this should be possible in theory and would like + /// an actual test here. After playing around with this for an hour, I wasn't + /// able to do anything which didn't already try to normalize the opaque before + /// then, either allowing compilation to succeed or causing an ambiguity error. + pub(super) fn detect_opaque_types_added_during_writeback(&self) { + let num_entries = self.checked_opaque_types_storage_entries.take().unwrap(); + for (key, hidden_type) in + self.inner.borrow_mut().opaque_types().opaque_types_added_since(num_entries) + { + let opaque_type_string = self.tcx.def_path_str(key.def_id); + let msg = format!("unexpected cyclic definition of `{opaque_type_string}`"); + self.dcx().span_delayed_bug(hidden_type.span, msg); + } + let _ = self.take_opaque_types(); + } +} diff --git a/compiler/rustc_hir_typeck/src/pat.rs b/compiler/rustc_hir_typeck/src/pat.rs index f1f956779c94f..17d48184dd971 100644 --- a/compiler/rustc_hir_typeck/src/pat.rs +++ b/compiler/rustc_hir_typeck/src/pat.rs @@ -9,11 +9,13 @@ use rustc_errors::{ Applicability, Diag, ErrorGuaranteed, MultiSpan, pluralize, struct_span_code_err, }; use rustc_hir::def::{CtorKind, DefKind, Res}; +use rustc_hir::def_id::DefId; use rustc_hir::pat_util::EnumerateAndAdjustIterator; use rustc_hir::{ self as hir, BindingMode, ByRef, ExprKind, HirId, LangItem, Mutability, Pat, PatExpr, PatExprKind, PatKind, expr_needs_parens, }; +use rustc_hir_analysis::autoderef::report_autoderef_recursion_limit_error; use rustc_infer::infer; use rustc_middle::traits::PatternOriginExpr; use rustc_middle::ty::{self, Ty, TypeVisitableExt}; @@ -29,11 +31,12 @@ use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode}; use tracing::{debug, instrument, trace}; use ty::VariantDef; +use ty::adjustment::{PatAdjust, PatAdjustment}; use super::report_unexpected_variant_res; use crate::expectation::Expectation; use crate::gather_locals::DeclOrigin; -use crate::{FnCtxt, LoweredTy, errors}; +use crate::{FnCtxt, errors}; const CANNOT_IMPLICITLY_DEREF_POINTER_TRAIT_OBJ: &str = "\ This error indicates that a pointer to a trait type cannot be implicitly dereferenced by a \ @@ -161,15 +164,39 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Mode for adjusting the expected type and binding mode. #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum AdjustMode { - /// Peel off all immediate reference types. - Peel, - /// Reset binding mode to the initial mode. - /// Used for destructuring assignment, where we don't want any match ergonomics. - Reset, + /// Peel off all immediate reference types. If the `deref_patterns` feature is enabled, this + /// also peels smart pointer ADTs. + Peel { kind: PeelKind }, /// Pass on the input binding mode and expected type. Pass, } +/// Restrictions on what types to peel when adjusting the expected type and binding mode. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +enum PeelKind { + /// Only peel reference types. This is used for explicit `deref!(_)` patterns, which dereference + /// any number of `&`/`&mut` references, plus a single smart pointer. + ExplicitDerefPat, + /// Implicitly peel references, and if `deref_patterns` is enabled, smart pointer ADTs. + Implicit { + /// The ADT the pattern is a constructor for, if applicable, so that we don't peel it. See + /// [`ResolvedPat`] for more information. + until_adt: Option, + /// The number of references at the head of the pattern's type, so we can leave that many + /// untouched. This is `1` for string literals, and `0` for most patterns. + pat_ref_layers: usize, + }, +} + +impl AdjustMode { + const fn peel_until_adt(opt_adt_def: Option) -> AdjustMode { + AdjustMode::Peel { kind: PeelKind::Implicit { until_adt: opt_adt_def, pat_ref_layers: 0 } } + } + const fn peel_all() -> AdjustMode { + AdjustMode::peel_until_adt(None) + } +} + /// `ref mut` bindings (explicit or match-ergonomics) are not allowed behind an `&` reference. /// Normally, the borrow checker enforces this, but for (currently experimental) match ergonomics, /// we track this when typing patterns for two purposes: @@ -245,6 +272,47 @@ enum InheritedRefMatchRule { }, } +/// When checking patterns containing paths, we need to know the path's resolution to determine +/// whether to apply match ergonomics and implicitly dereference the scrutinee. For instance, when +/// the `deref_patterns` feature is enabled and we're matching against a scrutinee of type +/// `Cow<'a, Option>`, we insert an implicit dereference to allow the pattern `Some(_)` to type, +/// but we must not dereference it when checking the pattern `Cow::Borrowed(_)`. +/// +/// `ResolvedPat` contains the information from resolution needed to determine match ergonomics +/// adjustments, and to finish checking the pattern once we know its adjusted type. +#[derive(Clone, Copy, Debug)] +struct ResolvedPat<'tcx> { + /// The type of the pattern, to be checked against the type of the scrutinee after peeling. This + /// is also used to avoid peeling the scrutinee's constructors (see the `Cow` example above). + ty: Ty<'tcx>, + kind: ResolvedPatKind<'tcx>, +} + +#[derive(Clone, Copy, Debug)] +enum ResolvedPatKind<'tcx> { + Path { res: Res, pat_res: Res, segments: &'tcx [hir::PathSegment<'tcx>] }, + Struct { variant: &'tcx VariantDef }, + TupleStruct { res: Res, variant: &'tcx VariantDef }, +} + +impl<'tcx> ResolvedPat<'tcx> { + fn adjust_mode(&self) -> AdjustMode { + if let ResolvedPatKind::Path { res, .. } = self.kind + && matches!(res, Res::Def(DefKind::Const | DefKind::AssocConst, _)) + { + // These constants can be of a reference type, e.g. `const X: &u8 = &0;`. + // Peeling the reference types too early will cause type checking failures. + // Although it would be possible to *also* peel the types of the constants too. + AdjustMode::Pass + } else { + // The remaining possible resolutions for path, struct, and tuple struct patterns are + // ADT constructors. As such, we may peel references freely, but we must not peel the + // ADT itself from the scrutinee if it's a smart pointer. + AdjustMode::peel_until_adt(self.ty.ty_adt_def().map(|adt| adt.did())) + } + } +} + impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Experimental pattern feature: after matching against a shared reference, do we limit the /// default binding mode in subpatterns to be `ref` when it would otherwise be `ref mut`? @@ -321,79 +389,35 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Conversely, inside this module, `check_pat_top` should never be used. #[instrument(level = "debug", skip(self, pat_info))] fn check_pat(&self, pat: &'tcx Pat<'tcx>, expected: Ty<'tcx>, pat_info: PatInfo<'tcx>) { - let PatInfo { binding_mode, max_ref_mutbl, top_info: ti, current_depth, .. } = pat_info; - - let path_res = match pat.kind { + // For patterns containing paths, we need the path's resolution to determine whether to + // implicitly dereference the scrutinee before matching. + let opt_path_res = match pat.kind { PatKind::Expr(PatExpr { kind: PatExprKind::Path(qpath), hir_id, span }) => { - Some(self.resolve_ty_and_res_fully_qualified_call(qpath, *hir_id, *span)) + Some(self.resolve_pat_path(*hir_id, *span, qpath)) } + PatKind::Struct(ref qpath, ..) => Some(self.resolve_pat_struct(pat, qpath)), + PatKind::TupleStruct(ref qpath, ..) => Some(self.resolve_pat_tuple_struct(pat, qpath)), _ => None, }; - let adjust_mode = self.calc_adjust_mode(pat, path_res.map(|(res, ..)| res)); - let (expected, binding_mode, max_ref_mutbl) = - self.calc_default_binding_mode(pat, expected, binding_mode, adjust_mode, max_ref_mutbl); - let pat_info = PatInfo { - binding_mode, - max_ref_mutbl, - top_info: ti, - decl_origin: pat_info.decl_origin, - current_depth: current_depth + 1, - }; - - let ty = match pat.kind { - PatKind::Wild | PatKind::Err(_) => expected, - // We allow any type here; we ensure that the type is uninhabited during match checking. - PatKind::Never => expected, - PatKind::Expr(PatExpr { kind: PatExprKind::Path(qpath), hir_id, span }) => { - let ty = self.check_pat_path( - *hir_id, - pat.hir_id, - *span, - qpath, - path_res.unwrap(), - expected, - &pat_info.top_info, - ); - self.write_ty(*hir_id, ty); - ty - } - PatKind::Expr(lt) => self.check_pat_lit(pat.span, lt, expected, &pat_info.top_info), - PatKind::Range(lhs, rhs, _) => { - self.check_pat_range(pat.span, lhs, rhs, expected, &pat_info.top_info) - } - PatKind::Binding(ba, var_id, ident, sub) => { - self.check_pat_ident(pat, ba, var_id, ident, sub, expected, pat_info) - } - PatKind::TupleStruct(ref qpath, subpats, ddpos) => { - self.check_pat_tuple_struct(pat, qpath, subpats, ddpos, expected, pat_info) - } - PatKind::Struct(ref qpath, fields, has_rest_pat) => { - self.check_pat_struct(pat, qpath, fields, has_rest_pat, expected, pat_info) - } - PatKind::Guard(pat, cond) => { - self.check_pat(pat, expected, pat_info); - self.check_expr_has_type_or_error(cond, self.tcx.types.bool, |_| {}); - expected - } - PatKind::Or(pats) => { - for pat in pats { - self.check_pat(pat, expected, pat_info); - } - expected - } - PatKind::Tuple(elements, ddpos) => { - self.check_pat_tuple(pat.span, elements, ddpos, expected, pat_info) - } - PatKind::Box(inner) => self.check_pat_box(pat.span, inner, expected, pat_info), - PatKind::Deref(inner) => self.check_pat_deref(pat.span, inner, expected, pat_info), - PatKind::Ref(inner, mutbl) => self.check_pat_ref(pat, inner, mutbl, expected, pat_info), - PatKind::Slice(before, slice, after) => { - self.check_pat_slice(pat.span, before, slice, after, expected, pat_info) - } - }; - + let adjust_mode = self.calc_adjust_mode(pat, opt_path_res); + let ty = self.check_pat_inner(pat, opt_path_res, adjust_mode, expected, pat_info); self.write_ty(pat.hir_id, ty); + // If we implicitly inserted overloaded dereferences before matching, check the pattern to + // see if the dereferenced types need `DerefMut` bounds. + if let Some(derefed_tys) = self.typeck_results.borrow().pat_adjustments().get(pat.hir_id) + && derefed_tys.iter().any(|adjust| adjust.kind == PatAdjust::OverloadedDeref) + { + self.register_deref_mut_bounds_if_needed( + pat.span, + pat, + derefed_tys.iter().filter_map(|adjust| match adjust.kind { + PatAdjust::OverloadedDeref => Some(adjust.source), + PatAdjust::BuiltinDeref => None, + }), + ); + } + // (note_1): In most of the cases where (note_1) is referenced // (literals and constants being the exception), we relate types // using strict equality, even though subtyping would be sufficient. @@ -437,77 +461,257 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // `regions-relate-bound-regions-on-closures-to-inference-variables.rs`, } - /// Compute the new expected type and default binding mode from the old ones - /// as well as the pattern form we are currently checking. - fn calc_default_binding_mode( + // Helper to avoid resolving the same path pattern several times. + fn check_pat_inner( &self, pat: &'tcx Pat<'tcx>, - expected: Ty<'tcx>, - def_br: ByRef, + opt_path_res: Option, ErrorGuaranteed>>, adjust_mode: AdjustMode, - max_ref_mutbl: MutblCap, - ) -> (Ty<'tcx>, ByRef, MutblCap) { + expected: Ty<'tcx>, + pat_info: PatInfo<'tcx>, + ) -> Ty<'tcx> { #[cfg(debug_assertions)] - if def_br == ByRef::Yes(Mutability::Mut) - && max_ref_mutbl != MutblCap::Mut + if pat_info.binding_mode == ByRef::Yes(Mutability::Mut) + && pat_info.max_ref_mutbl != MutblCap::Mut && self.downgrade_mut_inside_shared() { span_bug!(pat.span, "Pattern mutability cap violated!"); } - match adjust_mode { - AdjustMode::Pass => (expected, def_br, max_ref_mutbl), - AdjustMode::Reset => (expected, ByRef::No, MutblCap::Mut), - AdjustMode::Peel => self.peel_off_references(pat, expected, def_br, max_ref_mutbl), + + // Resolve type if needed. + let expected = if let AdjustMode::Peel { .. } = adjust_mode + && pat.default_binding_modes + { + self.try_structurally_resolve_type(pat.span, expected) + } else { + expected + }; + let old_pat_info = pat_info; + let pat_info = PatInfo { current_depth: old_pat_info.current_depth + 1, ..old_pat_info }; + + match pat.kind { + // Peel off a `&` or `&mut` from the scrutinee type. See the examples in + // `tests/ui/rfcs/rfc-2005-default-binding-mode`. + _ if let AdjustMode::Peel { kind: peel_kind } = adjust_mode + && pat.default_binding_modes + && let ty::Ref(_, inner_ty, inner_mutability) = *expected.kind() + && self.should_peel_ref(peel_kind, expected) => + { + debug!("inspecting {:?}", expected); + + debug!("current discriminant is Ref, inserting implicit deref"); + // Preserve the reference type. We'll need it later during THIR lowering. + self.typeck_results + .borrow_mut() + .pat_adjustments_mut() + .entry(pat.hir_id) + .or_default() + .push(PatAdjustment { kind: PatAdjust::BuiltinDeref, source: expected }); + + let mut binding_mode = ByRef::Yes(match pat_info.binding_mode { + // If default binding mode is by value, make it `ref` or `ref mut` + // (depending on whether we observe `&` or `&mut`). + ByRef::No | + // When `ref mut`, stay a `ref mut` (on `&mut`) or downgrade to `ref` (on `&`). + ByRef::Yes(Mutability::Mut) => inner_mutability, + // Once a `ref`, always a `ref`. + // This is because a `& &mut` cannot mutate the underlying value. + ByRef::Yes(Mutability::Not) => Mutability::Not, + }); + + let mut max_ref_mutbl = pat_info.max_ref_mutbl; + if self.downgrade_mut_inside_shared() { + binding_mode = binding_mode.cap_ref_mutability(max_ref_mutbl.as_mutbl()); + } + if binding_mode == ByRef::Yes(Mutability::Not) { + max_ref_mutbl = MutblCap::Not; + } + debug!("default binding mode is now {:?}", binding_mode); + + // Use the old pat info to keep `current_depth` to its old value. + let new_pat_info = PatInfo { binding_mode, max_ref_mutbl, ..old_pat_info }; + // Recurse with the new expected type. + self.check_pat_inner(pat, opt_path_res, adjust_mode, inner_ty, new_pat_info) + } + // If `deref_patterns` is enabled, peel a smart pointer from the scrutinee type. See the + // examples in `tests/ui/pattern/deref_patterns/`. + _ if self.tcx.features().deref_patterns() + && let AdjustMode::Peel { kind: peel_kind } = adjust_mode + && pat.default_binding_modes + && self.should_peel_smart_pointer(peel_kind, expected) => + { + debug!("scrutinee ty {expected:?} is a smart pointer, inserting overloaded deref"); + // The scrutinee is a smart pointer; implicitly dereference it. This adds a + // requirement that `expected: DerefPure`. + let mut inner_ty = self.deref_pat_target(pat.span, expected); + // Once we've checked `pat`, we'll add a `DerefMut` bound if it contains any + // `ref mut` bindings. See `Self::register_deref_mut_bounds_if_needed`. + + let mut typeck_results = self.typeck_results.borrow_mut(); + let mut pat_adjustments_table = typeck_results.pat_adjustments_mut(); + let pat_adjustments = pat_adjustments_table.entry(pat.hir_id).or_default(); + // We may reach the recursion limit if a user matches on a type `T` satisfying + // `T: Deref`; error gracefully in this case. + // FIXME(deref_patterns): If `deref_patterns` stabilizes, it may make sense to move + // this check out of this branch. Alternatively, this loop could be implemented with + // autoderef and this check removed. For now though, don't break code compiling on + // stable with lots of `&`s and a low recursion limit, if anyone's done that. + if self.tcx.recursion_limit().value_within_limit(pat_adjustments.len()) { + // Preserve the smart pointer type for THIR lowering and closure upvar analysis. + pat_adjustments + .push(PatAdjustment { kind: PatAdjust::OverloadedDeref, source: expected }); + } else { + let guar = report_autoderef_recursion_limit_error(self.tcx, pat.span, expected); + inner_ty = Ty::new_error(self.tcx, guar); + } + drop(typeck_results); + + // Recurse, using the old pat info to keep `current_depth` to its old value. + // Peeling smart pointers does not update the default binding mode. + self.check_pat_inner(pat, opt_path_res, adjust_mode, inner_ty, old_pat_info) + } + PatKind::Missing | PatKind::Wild | PatKind::Err(_) => expected, + // We allow any type here; we ensure that the type is uninhabited during match checking. + PatKind::Never => expected, + PatKind::Expr(PatExpr { kind: PatExprKind::Path(_), hir_id, .. }) => { + let ty = match opt_path_res.unwrap() { + Ok(ref pr) => { + self.check_pat_path(pat.hir_id, pat.span, pr, expected, &pat_info.top_info) + } + Err(guar) => Ty::new_error(self.tcx, guar), + }; + self.write_ty(*hir_id, ty); + ty + } + PatKind::Expr(lt) => self.check_pat_lit(pat.span, lt, expected, &pat_info.top_info), + PatKind::Range(lhs, rhs, _) => { + self.check_pat_range(pat.span, lhs, rhs, expected, &pat_info.top_info) + } + PatKind::Binding(ba, var_id, ident, sub) => { + self.check_pat_ident(pat, ba, var_id, ident, sub, expected, pat_info) + } + PatKind::TupleStruct(ref qpath, subpats, ddpos) => match opt_path_res.unwrap() { + Ok(ResolvedPat { ty, kind: ResolvedPatKind::TupleStruct { res, variant } }) => self + .check_pat_tuple_struct( + pat, qpath, subpats, ddpos, res, ty, variant, expected, pat_info, + ), + Err(guar) => { + let ty_err = Ty::new_error(self.tcx, guar); + for subpat in subpats { + self.check_pat(subpat, ty_err, pat_info); + } + ty_err + } + Ok(pr) => span_bug!(pat.span, "tuple struct pattern resolved to {pr:?}"), + }, + PatKind::Struct(_, fields, has_rest_pat) => match opt_path_res.unwrap() { + Ok(ResolvedPat { ty, kind: ResolvedPatKind::Struct { variant } }) => self + .check_pat_struct(pat, fields, has_rest_pat, ty, variant, expected, pat_info), + Err(guar) => { + let ty_err = Ty::new_error(self.tcx, guar); + for field in fields { + self.check_pat(field.pat, ty_err, pat_info); + } + ty_err + } + Ok(pr) => span_bug!(pat.span, "struct pattern resolved to {pr:?}"), + }, + PatKind::Guard(pat, cond) => { + self.check_pat(pat, expected, pat_info); + self.check_expr_has_type_or_error(cond, self.tcx.types.bool, |_| {}); + expected + } + PatKind::Or(pats) => { + for pat in pats { + self.check_pat(pat, expected, pat_info); + } + expected + } + PatKind::Tuple(elements, ddpos) => { + self.check_pat_tuple(pat.span, elements, ddpos, expected, pat_info) + } + PatKind::Box(inner) => self.check_pat_box(pat.span, inner, expected, pat_info), + PatKind::Deref(inner) => self.check_pat_deref(pat.span, inner, expected, pat_info), + PatKind::Ref(inner, mutbl) => self.check_pat_ref(pat, inner, mutbl, expected, pat_info), + PatKind::Slice(before, slice, after) => { + self.check_pat_slice(pat.span, before, slice, after, expected, pat_info) + } } } /// How should the binding mode and expected type be adjusted? /// - /// When the pattern is a path pattern, `opt_path_res` must be `Some(res)`. - fn calc_adjust_mode(&self, pat: &'tcx Pat<'tcx>, opt_path_res: Option) -> AdjustMode { - // When we perform destructuring assignment, we disable default match bindings, which are - // unintuitive in this context. - if !pat.default_binding_modes { - return AdjustMode::Reset; - } + /// When the pattern contains a path, `opt_path_res` must be `Some(path_res)`. + fn calc_adjust_mode( + &self, + pat: &'tcx Pat<'tcx>, + opt_path_res: Option, ErrorGuaranteed>>, + ) -> AdjustMode { match &pat.kind { // Type checking these product-like types successfully always require // that the expected type be of those types and not reference types. + PatKind::Tuple(..) | PatKind::Range(..) | PatKind::Slice(..) => AdjustMode::peel_all(), + // When checking an explicit deref pattern, only peel reference types. + // FIXME(deref_patterns): If box patterns and deref patterns need to coexist, box + // patterns may want `PeelKind::Implicit`, stopping on encountering a box. + PatKind::Box(_) | PatKind::Deref(_) => { + AdjustMode::Peel { kind: PeelKind::ExplicitDerefPat } + } + // A never pattern behaves somewhat like a literal or unit variant. + PatKind::Never => AdjustMode::peel_all(), + // For patterns with paths, how we peel the scrutinee depends on the path's resolution. PatKind::Struct(..) | PatKind::TupleStruct(..) - | PatKind::Tuple(..) - | PatKind::Box(_) - | PatKind::Deref(_) - | PatKind::Range(..) - | PatKind::Slice(..) => AdjustMode::Peel, - // A never pattern behaves somewhat like a literal or unit variant. - PatKind::Never => AdjustMode::Peel, - PatKind::Expr(PatExpr { kind: PatExprKind::Path(_), .. }) => match opt_path_res.unwrap() { - // These constants can be of a reference type, e.g. `const X: &u8 = &0;`. - // Peeling the reference types too early will cause type checking failures. - // Although it would be possible to *also* peel the types of the constants too. - Res::Def(DefKind::Const | DefKind::AssocConst, _) => AdjustMode::Pass, - // In the `ValueNS`, we have `SelfCtor(..) | Ctor(_, Const), _)` remaining which - // could successfully compile. The former being `Self` requires a unit struct. - // In either case, and unlike constants, the pattern itself cannot be - // a reference type wherefore peeling doesn't give up any expressiveness. - _ => AdjustMode::Peel, - }, + | PatKind::Expr(PatExpr { kind: PatExprKind::Path(_), .. }) => { + // If there was an error resolving the path, default to peeling everything. + opt_path_res.unwrap().map_or(AdjustMode::peel_all(), |pr| pr.adjust_mode()) + } // String and byte-string literals result in types `&str` and `&[u8]` respectively. // All other literals result in non-reference types. - // As a result, we allow `if let 0 = &&0 {}` but not `if let "foo" = &&"foo" {}`. - // - // Call `resolve_vars_if_possible` here for inline const blocks. - PatKind::Expr(lt) => match self.resolve_vars_if_possible(self.check_pat_expr_unadjusted(lt)).kind() { - ty::Ref(..) => AdjustMode::Pass, - _ => AdjustMode::Peel, - }, + // As a result, we allow `if let 0 = &&0 {}` but not `if let "foo" = &&"foo" {}` unless + // `deref_patterns` is enabled. + PatKind::Expr(lt) => { + // Path patterns have already been handled, and inline const blocks currently + // aren't possible to write, so any handling for them would be untested. + if cfg!(debug_assertions) + && self.tcx.features().deref_patterns() + && !matches!(lt.kind, PatExprKind::Lit { .. }) + { + span_bug!( + lt.span, + "FIXME(deref_patterns): adjust mode unimplemented for {:?}", + lt.kind + ); + } + // Call `resolve_vars_if_possible` here for inline const blocks. + let lit_ty = self.resolve_vars_if_possible(self.check_pat_expr_unadjusted(lt)); + // If `deref_patterns` is enabled, allow `if let "foo" = &&"foo" {}`. + if self.tcx.features().deref_patterns() { + let mut peeled_ty = lit_ty; + let mut pat_ref_layers = 0; + while let ty::Ref(_, inner_ty, mutbl) = + *self.try_structurally_resolve_type(pat.span, peeled_ty).kind() + { + // We rely on references at the head of constants being immutable. + debug_assert!(mutbl.is_not()); + pat_ref_layers += 1; + peeled_ty = inner_ty; + } + AdjustMode::Peel { + kind: PeelKind::Implicit { until_adt: None, pat_ref_layers }, + } + } else { + if lit_ty.is_ref() { AdjustMode::Pass } else { AdjustMode::peel_all() } + } + } // Ref patterns are complicated, we handle them in `check_pat_ref`. - PatKind::Ref(..) => AdjustMode::Pass, + PatKind::Ref(..) + // No need to do anything on a missing pattern. + | PatKind::Missing // A `_` pattern works with any expected type, so there's no need to do anything. - PatKind::Wild + | PatKind::Wild // A malformed pattern doesn't have an expected type, so let's just accept any type. | PatKind::Err(_) // Bindings also work with whatever the expected type is, @@ -524,62 +728,65 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - /// Peel off as many immediately nested `& mut?` from the expected type as possible - /// and return the new expected type and binding default binding mode. - /// The adjustments vector, if non-empty is stored in a table. - fn peel_off_references( - &self, - pat: &'tcx Pat<'tcx>, - expected: Ty<'tcx>, - mut def_br: ByRef, - mut max_ref_mutbl: MutblCap, - ) -> (Ty<'tcx>, ByRef, MutblCap) { - let mut expected = self.try_structurally_resolve_type(pat.span, expected); - // Peel off as many `&` or `&mut` from the scrutinee type as possible. For example, - // for `match &&&mut Some(5)` the loop runs three times, aborting when it reaches - // the `Some(5)` which is not of type Ref. - // - // For each ampersand peeled off, update the binding mode and push the original - // type into the adjustments vector. - // - // See the examples in `ui/match-defbm*.rs`. - let mut pat_adjustments = vec![]; - while let ty::Ref(_, inner_ty, inner_mutability) = *expected.kind() { - debug!("inspecting {:?}", expected); - - debug!("current discriminant is Ref, inserting implicit deref"); - // Preserve the reference type. We'll need it later during THIR lowering. - pat_adjustments.push(expected); - - expected = self.try_structurally_resolve_type(pat.span, inner_ty); - def_br = ByRef::Yes(match def_br { - // If default binding mode is by value, make it `ref` or `ref mut` - // (depending on whether we observe `&` or `&mut`). - ByRef::No | - // When `ref mut`, stay a `ref mut` (on `&mut`) or downgrade to `ref` (on `&`). - ByRef::Yes(Mutability::Mut) => inner_mutability, - // Once a `ref`, always a `ref`. - // This is because a `& &mut` cannot mutate the underlying value. - ByRef::Yes(Mutability::Not) => Mutability::Not, - }); - } + /// Assuming `expected` is a reference type, determine whether to peel it before matching. + fn should_peel_ref(&self, peel_kind: PeelKind, mut expected: Ty<'tcx>) -> bool { + debug_assert!(expected.is_ref()); + let pat_ref_layers = match peel_kind { + PeelKind::ExplicitDerefPat => 0, + PeelKind::Implicit { pat_ref_layers, .. } => pat_ref_layers, + }; - if self.downgrade_mut_inside_shared() { - def_br = def_br.cap_ref_mutability(max_ref_mutbl.as_mutbl()); - } - if def_br == ByRef::Yes(Mutability::Not) { - max_ref_mutbl = MutblCap::Not; + // Most patterns don't have reference types, so we'll want to peel all references from the + // scrutinee before matching. To optimize for the common case, return early. + if pat_ref_layers == 0 { + return true; } + debug_assert!( + self.tcx.features().deref_patterns(), + "Peeling for patterns with reference types is gated by `deref_patterns`." + ); - if !pat_adjustments.is_empty() { - debug!("default binding mode is now {:?}", def_br); - self.typeck_results - .borrow_mut() - .pat_adjustments_mut() - .insert(pat.hir_id, pat_adjustments); + // If the pattern has as many or more layers of reference as the expected type, we can match + // without peeling more, unless we find a smart pointer or `&mut` that we also need to peel. + // We don't treat `&` and `&mut` as interchangeable, but by peeling `&mut`s before matching, + // we can still, e.g., match on a `&mut str` with a string literal pattern. This is because + // string literal patterns may be used where `str` is expected. + let mut expected_ref_layers = 0; + while let ty::Ref(_, inner_ty, mutbl) = *expected.kind() { + if mutbl.is_mut() { + // Mutable references can't be in the final value of constants, thus they can't be + // at the head of their types, thus we should always peel `&mut`. + return true; + } + expected_ref_layers += 1; + expected = inner_ty; } + pat_ref_layers < expected_ref_layers || self.should_peel_smart_pointer(peel_kind, expected) + } - (expected, def_br, max_ref_mutbl) + /// Determine whether `expected` is a smart pointer type that should be peeled before matching. + fn should_peel_smart_pointer(&self, peel_kind: PeelKind, expected: Ty<'tcx>) -> bool { + // Explicit `deref!(_)` patterns match against smart pointers; don't peel in that case. + if let PeelKind::Implicit { until_adt, .. } = peel_kind + // For simplicity, only apply overloaded derefs if `expected` is a known ADT. + // FIXME(deref_patterns): we'll get better diagnostics for users trying to + // implicitly deref generics if we allow them here, but primitives, tuples, and + // inference vars definitely should be stopped. Figure out what makes most sense. + && let ty::Adt(scrutinee_adt, _) = *expected.kind() + // Don't peel if the pattern type already matches the scrutinee. E.g., stop here if + // matching on a `Cow<'a, T>` scrutinee with a `Cow::Owned(_)` pattern. + && until_adt != Some(scrutinee_adt.did()) + // At this point, the pattern isn't able to match `expected` without peeling. Check + // that it implements `Deref` before assuming it's a smart pointer, to get a normal + // type error instead of a missing impl error if not. This only checks for `Deref`, + // not `DerefPure`: we require that too, but we want a trait error if it's missing. + && let Some(deref_trait) = self.tcx.lang_items().deref_trait() + && self.type_implements_trait(deref_trait, [expected], self.param_env).may_apply() + { + true + } else { + false + } } fn check_pat_expr_unadjusted(&self, lt: &'tcx hir::PatExpr<'tcx>) -> Ty<'tcx> { @@ -621,26 +828,54 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Byte string patterns behave the same way as array patterns // They can denote both statically and dynamically-sized byte arrays. + // Additionally, when `deref_patterns` is enabled, byte string literal patterns may have + // types `[u8]` or `[u8; N]`, in order to type, e.g., `deref!(b"..."): Vec`. let mut pat_ty = ty; if let hir::PatExprKind::Lit { lit: Spanned { node: ast::LitKind::ByteStr(..), .. }, .. } = lt.kind { + let tcx = self.tcx; let expected = self.structurally_resolve_type(span, expected); - if let ty::Ref(_, inner_ty, _) = *expected.kind() - && self.try_structurally_resolve_type(span, inner_ty).is_slice() - { - let tcx = self.tcx; - trace!(?lt.hir_id.local_id, "polymorphic byte string lit"); - self.typeck_results - .borrow_mut() - .treat_byte_string_as_slice - .insert(lt.hir_id.local_id); - pat_ty = - Ty::new_imm_ref(tcx, tcx.lifetimes.re_static, Ty::new_slice(tcx, tcx.types.u8)); + match *expected.kind() { + // Allow `b"...": &[u8]` + ty::Ref(_, inner_ty, _) + if self.try_structurally_resolve_type(span, inner_ty).is_slice() => + { + trace!(?lt.hir_id.local_id, "polymorphic byte string lit"); + pat_ty = Ty::new_imm_ref( + tcx, + tcx.lifetimes.re_static, + Ty::new_slice(tcx, tcx.types.u8), + ); + } + // Allow `b"...": [u8; 3]` for `deref_patterns` + ty::Array(..) if tcx.features().deref_patterns() => { + pat_ty = match *ty.kind() { + ty::Ref(_, inner_ty, _) => inner_ty, + _ => span_bug!(span, "found byte string literal with non-ref type {ty:?}"), + } + } + // Allow `b"...": [u8]` for `deref_patterns` + ty::Slice(..) if tcx.features().deref_patterns() => { + pat_ty = Ty::new_slice(tcx, tcx.types.u8); + } + // Otherwise, `b"...": &[u8; 3]` + _ => {} } } + // When `deref_patterns` is enabled, in order to allow `deref!("..."): String`, we allow + // string literal patterns to have type `str`. This is accounted for when lowering to MIR. + if self.tcx.features().deref_patterns() + && let hir::PatExprKind::Lit { + lit: Spanned { node: ast::LitKind::Str(..), .. }, .. + } = lt.kind + && self.try_structurally_resolve_type(span, expected).is_str() + { + pat_ty = self.tcx.types.str_; + } + if self.tcx.features().string_deref_patterns() && let hir::PatExprKind::Lit { lit: Spanned { node: ast::LitKind::Str(..), .. }, .. @@ -700,6 +935,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // be peeled to `str` while ty here is still `&str`, if we don't // err early here, a rather confusing unification error will be // emitted instead). + let ty = self.try_structurally_resolve_type(expr.span, ty); let fail = !(ty.is_numeric() || ty.is_char() || ty.is_ty_var() || ty.references_error()); Some((fail, ty, expr.span)) @@ -944,10 +1180,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ) { let var_ty = self.local_ty(span, var_id); if let Err(mut err) = self.demand_eqtype_pat_diag(span, var_ty, ty, ti) { - let hir = self.tcx.hir(); let var_ty = self.resolve_vars_if_possible(var_ty); let msg = format!("first introduced with type `{var_ty}` here"); - err.span_label(hir.span(var_id), msg); + err.span_label(self.tcx.hir_span(var_id), msg); let in_match = self.tcx.hir_parent_iter(var_id).any(|(_, n)| { matches!( n, @@ -1037,7 +1272,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { | PatKind::Tuple(..) | PatKind::Slice(..) => "binding", - PatKind::Wild + PatKind::Missing + | PatKind::Wild | PatKind::Never | PatKind::Binding(..) | PatKind::Box(..) @@ -1144,27 +1380,26 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Ok(()) } - fn check_pat_struct( + fn resolve_pat_struct( &self, pat: &'tcx Pat<'tcx>, qpath: &hir::QPath<'tcx>, + ) -> Result, ErrorGuaranteed> { + // Resolve the path and check the definition for errors. + let (variant, pat_ty) = self.check_struct_path(qpath, pat.hir_id)?; + Ok(ResolvedPat { ty: pat_ty, kind: ResolvedPatKind::Struct { variant } }) + } + + fn check_pat_struct( + &self, + pat: &'tcx Pat<'tcx>, fields: &'tcx [hir::PatField<'tcx>], has_rest_pat: bool, + pat_ty: Ty<'tcx>, + variant: &'tcx VariantDef, expected: Ty<'tcx>, pat_info: PatInfo<'tcx>, ) -> Ty<'tcx> { - // Resolve the path and check the definition for errors. - let (variant, pat_ty) = match self.check_struct_path(qpath, pat.hir_id) { - Ok(data) => data, - Err(guar) => { - let err = Ty::new_error(self.tcx, guar); - for field in fields { - self.check_pat(field.pat, err, pat_info); - } - return err; - } - }; - // Type-check the path. let _ = self.demand_eqtype_pat(pat.span, expected, pat_ty, &pat_info.top_info); @@ -1175,31 +1410,27 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - fn check_pat_path( + fn resolve_pat_path( &self, path_id: HirId, - pat_id_for_diag: HirId, span: Span, - qpath: &hir::QPath<'_>, - path_resolution: (Res, Option>, &'tcx [hir::PathSegment<'tcx>]), - expected: Ty<'tcx>, - ti: &TopInfo<'tcx>, - ) -> Ty<'tcx> { + qpath: &'tcx hir::QPath<'_>, + ) -> Result, ErrorGuaranteed> { let tcx = self.tcx; - // We have already resolved the path. - let (res, opt_ty, segments) = path_resolution; + let (res, opt_ty, segments) = + self.resolve_ty_and_res_fully_qualified_call(qpath, path_id, span); match res { Res::Err => { let e = self.dcx().span_delayed_bug(qpath.span(), "`Res::Err` but no error emitted"); self.set_tainted_by_errors(e); - return Ty::new_error(tcx, e); + return Err(e); } Res::Def(DefKind::AssocFn | DefKind::Ctor(_, CtorKind::Fn) | DefKind::Variant, _) => { let expected = "unit struct, unit variant or constant"; let e = report_unexpected_variant_res(tcx, res, None, qpath, span, E0533, expected); - return Ty::new_error(tcx, e); + return Err(e); } Res::SelfCtor(def_id) => { if let ty::Adt(adt_def, _) = *tcx.type_of(def_id).skip_binder().kind() @@ -1217,7 +1448,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { E0533, "unit struct", ); - return Ty::new_error(tcx, e); + return Err(e); } } Res::Def( @@ -1230,15 +1461,26 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { _ => bug!("unexpected pattern resolution: {:?}", res), } - // Type-check the path. + // Find the type of the path pattern, for later checking. let (pat_ty, pat_res) = self.instantiate_value_path(segments, opt_ty, res, span, span, path_id); + Ok(ResolvedPat { ty: pat_ty, kind: ResolvedPatKind::Path { res, pat_res, segments } }) + } + + fn check_pat_path( + &self, + pat_id_for_diag: HirId, + span: Span, + resolved: &ResolvedPat<'tcx>, + expected: Ty<'tcx>, + ti: &TopInfo<'tcx>, + ) -> Ty<'tcx> { if let Err(err) = - self.demand_suptype_with_origin(&self.pattern_cause(ti, span), expected, pat_ty) + self.demand_suptype_with_origin(&self.pattern_cause(ti, span), expected, resolved.ty) { - self.emit_bad_pat_path(err, pat_id_for_diag, span, res, pat_res, pat_ty, segments); + self.emit_bad_pat_path(err, pat_id_for_diag, span, resolved); } - pat_ty + resolved.ty } fn maybe_suggest_range_literal( @@ -1255,7 +1497,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { })) => match self.tcx.hir_node(body_id.hir_id) { hir::Node::Expr(expr) => { if hir::is_range_literal(expr) { - let span = self.tcx.hir().span(body_id.hir_id); + let span = self.tcx.hir_span(body_id.hir_id); if let Ok(snip) = self.tcx.sess.source_map().span_to_snippet(span) { e.span_suggestion_verbose( ident.span, @@ -1281,12 +1523,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { mut e: Diag<'_>, hir_id: HirId, pat_span: Span, - res: Res, - pat_res: Res, - pat_ty: Ty<'tcx>, - segments: &'tcx [hir::PathSegment<'tcx>], + resolved_pat: &ResolvedPat<'tcx>, ) { - if let Some(span) = self.tcx.hir().res_span(pat_res) { + let ResolvedPatKind::Path { res, pat_res, segments } = resolved_pat.kind else { + span_bug!(pat_span, "unexpected resolution for path pattern: {resolved_pat:?}"); + }; + + if let Some(span) = self.tcx.hir_res_span(pat_res) { e.span_label(span, format!("{} defined here", res.descr())); if let [hir::PathSegment { ident, .. }] = &*segments { e.span_label( @@ -1308,7 +1551,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); } _ => { - let (type_def_id, item_def_id) = match pat_ty.kind() { + let (type_def_id, item_def_id) = match resolved_pat.ty.kind() { ty::Adt(def, _) => match res { Res::Def(DefKind::Const, def_id) => (Some(def.did()), Some(def_id)), _ => (None, None), @@ -1316,15 +1559,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { _ => (None, None), }; - let ranges = &[ - self.tcx.lang_items().range_struct(), - self.tcx.lang_items().range_from_struct(), - self.tcx.lang_items().range_to_struct(), - self.tcx.lang_items().range_full_struct(), - self.tcx.lang_items().range_inclusive_struct(), - self.tcx.lang_items().range_to_inclusive_struct(), - ]; - if type_def_id != None && ranges.contains(&type_def_id) { + let is_range = match type_def_id.and_then(|id| self.tcx.as_lang_item(id)) { + Some( + LangItem::Range + | LangItem::RangeFrom + | LangItem::RangeTo + | LangItem::RangeFull + | LangItem::RangeInclusiveStruct + | LangItem::RangeToInclusive, + ) => true, + _ => false, + }; + if is_range { if !self.maybe_suggest_range_literal(&mut e, item_def_id, *ident) { let msg = "constants only support matching by type, \ if you meant to match against a range of values, \ @@ -1348,26 +1594,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { e.emit(); } - fn check_pat_tuple_struct( + fn resolve_pat_tuple_struct( &self, pat: &'tcx Pat<'tcx>, qpath: &'tcx hir::QPath<'tcx>, - subpats: &'tcx [Pat<'tcx>], - ddpos: hir::DotDotPos, - expected: Ty<'tcx>, - pat_info: PatInfo<'tcx>, - ) -> Ty<'tcx> { + ) -> Result, ErrorGuaranteed> { let tcx = self.tcx; - let on_error = |e| { - for pat in subpats { - self.check_pat(pat, Ty::new_error(tcx, e), pat_info); - } - }; let report_unexpected_res = |res: Res| { let expected = "tuple struct or tuple variant"; let e = report_unexpected_variant_res(tcx, res, None, qpath, pat.span, E0164, expected); - on_error(e); - e + Err(e) }; // Resolve the path and check the definition for errors. @@ -1376,16 +1612,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if res == Res::Err { let e = self.dcx().span_delayed_bug(pat.span, "`Res::Err` but no error emitted"); self.set_tainted_by_errors(e); - on_error(e); - return Ty::new_error(tcx, e); + return Err(e); } // Type-check the path. let (pat_ty, res) = self.instantiate_value_path(segments, opt_ty, res, pat.span, pat.span, pat.hir_id); if !pat_ty.is_fn() { - let e = report_unexpected_res(res); - return Ty::new_error(tcx, e); + return report_unexpected_res(res); } let variant = match res { @@ -1393,8 +1627,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.dcx().span_bug(pat.span, "`Res::Err` but no error emitted"); } Res::Def(DefKind::AssocConst | DefKind::AssocFn, _) => { - let e = report_unexpected_res(res); - return Ty::new_error(tcx, e); + return report_unexpected_res(res); } Res::Def(DefKind::Ctor(_, CtorKind::Fn), _) => tcx.expect_variant_res(res), _ => bug!("unexpected pattern resolution: {:?}", res), @@ -1404,6 +1637,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let pat_ty = pat_ty.fn_sig(tcx).output(); let pat_ty = pat_ty.no_bound_vars().expect("expected fn type"); + Ok(ResolvedPat { ty: pat_ty, kind: ResolvedPatKind::TupleStruct { res, variant } }) + } + + fn check_pat_tuple_struct( + &self, + pat: &'tcx Pat<'tcx>, + qpath: &'tcx hir::QPath<'tcx>, + subpats: &'tcx [Pat<'tcx>], + ddpos: hir::DotDotPos, + res: Res, + pat_ty: Ty<'tcx>, + variant: &'tcx VariantDef, + expected: Ty<'tcx>, + pat_info: PatInfo<'tcx>, + ) -> Ty<'tcx> { + let tcx = self.tcx; + let on_error = |e| { + for pat in subpats { + self.check_pat(pat, Ty::new_error(tcx, e), pat_info); + } + }; + // Type-check the tuple struct pattern against the expected type. let diag = self.demand_eqtype_pat_diag(pat.span, expected, pat_ty, &pat_info.top_info); let had_err = diag.map_err(|diag| diag.emit()); @@ -2287,36 +2542,49 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expected: Ty<'tcx>, pat_info: PatInfo<'tcx>, ) -> Ty<'tcx> { - let tcx = self.tcx; + let target_ty = self.deref_pat_target(span, expected); + self.check_pat(inner, target_ty, pat_info); + self.register_deref_mut_bounds_if_needed(span, inner, [expected]); + expected + } + + fn deref_pat_target(&self, span: Span, source_ty: Ty<'tcx>) -> Ty<'tcx> { // Register a `DerefPure` bound, which is required by all `deref!()` pats. + let tcx = self.tcx; self.register_bound( - expected, + source_ty, tcx.require_lang_item(hir::LangItem::DerefPure, Some(span)), self.misc(span), ); - // ::Target - let ty = Ty::new_projection( + // The expected type for the deref pat's inner pattern is `::Target`. + let target_ty = Ty::new_projection( tcx, tcx.require_lang_item(hir::LangItem::DerefTarget, Some(span)), - [expected], + [source_ty], ); - let ty = self.normalize(span, ty); - let ty = self.try_structurally_resolve_type(span, ty); - self.check_pat(inner, ty, pat_info); - - // Check if the pattern has any `ref mut` bindings, which would require - // `DerefMut` to be emitted in MIR building instead of just `Deref`. - // We do this *after* checking the inner pattern, since we want to make - // sure to apply any match-ergonomics adjustments. + let target_ty = self.normalize(span, target_ty); + self.try_structurally_resolve_type(span, target_ty) + } + + /// Check if the interior of a deref pattern (either explicit or implicit) has any `ref mut` + /// bindings, which would require `DerefMut` to be emitted in MIR building instead of just + /// `Deref`. We do this *after* checking the inner pattern, since we want to make sure to + /// account for `ref mut` binding modes inherited from implicitly dereferencing `&mut` refs. + fn register_deref_mut_bounds_if_needed( + &self, + span: Span, + inner: &'tcx Pat<'tcx>, + derefed_tys: impl IntoIterator>, + ) { if self.typeck_results.borrow().pat_has_ref_mut_binding(inner) { - self.register_bound( - expected, - tcx.require_lang_item(hir::LangItem::DerefMut, Some(span)), - self.misc(span), - ); + for mutably_derefed_ty in derefed_tys { + self.register_bound( + mutably_derefed_ty, + self.tcx.require_lang_item(hir::LangItem::DerefMut, Some(span)), + self.misc(span), + ); + } } - - expected } // Precondition: Pat is Ref(inner) diff --git a/compiler/rustc_hir_typeck/src/place_op.rs b/compiler/rustc_hir_typeck/src/place_op.rs index 4fc903cf68b88..fedc75abe4927 100644 --- a/compiler/rustc_hir_typeck/src/place_op.rs +++ b/compiler/rustc_hir_typeck/src/place_op.rs @@ -8,7 +8,7 @@ use rustc_middle::ty::adjustment::{ PointerCoercion, }; use rustc_middle::ty::{self, Ty}; -use rustc_span::{Ident, Span, sym}; +use rustc_span::{Span, sym}; use tracing::debug; use {rustc_ast as ast, rustc_hir as hir}; @@ -211,13 +211,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return None; }; - self.lookup_method_in_trait( - self.misc(span), - Ident::with_dummy_span(imm_op), - imm_tr, - base_ty, - opt_rhs_ty, - ) + self.lookup_method_for_operator(self.misc(span), imm_op, imm_tr, base_ty, opt_rhs_ty) } fn try_mutable_overloaded_place_op( @@ -237,13 +231,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return None; }; - self.lookup_method_in_trait( - self.misc(span), - Ident::with_dummy_span(mut_op), - mut_tr, - base_ty, - opt_rhs_ty, - ) + self.lookup_method_for_operator(self.misc(span), mut_op, mut_tr, base_ty, opt_rhs_ty) } /// Convert auto-derefs, indices, etc of an expression from `Deref` and `Index` diff --git a/compiler/rustc_hir_typeck/src/typeck_root_ctxt.rs b/compiler/rustc_hir_typeck/src/typeck_root_ctxt.rs index 5b4fc51cec885..26be5fc6d1904 100644 --- a/compiler/rustc_hir_typeck/src/typeck_root_ctxt.rs +++ b/compiler/rustc_hir_typeck/src/typeck_root_ctxt.rs @@ -1,11 +1,10 @@ -use std::cell::RefCell; +use std::cell::{Cell, RefCell}; use std::ops::Deref; use rustc_data_structures::unord::{UnordMap, UnordSet}; -use rustc_hir as hir; use rustc_hir::def_id::LocalDefId; -use rustc_hir::{HirId, HirIdMap}; -use rustc_infer::infer::{InferCtxt, InferOk, TyCtxtInferExt}; +use rustc_hir::{self as hir, HirId, HirIdMap, LangItem}; +use rustc_infer::infer::{InferCtxt, InferOk, OpaqueTypeStorageEntries, TyCtxtInferExt}; use rustc_middle::span_bug; use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt, TypingMode}; use rustc_span::Span; @@ -38,6 +37,11 @@ pub(crate) struct TypeckRootCtxt<'tcx> { pub(super) fulfillment_cx: RefCell>>>, + // Used to detect opaque types uses added after we've already checked them. + // + // See [FnCtxt::detect_opaque_types_added_during_writeback] for more details. + pub(super) checked_opaque_types_storage_entries: Cell>, + /// Some additional `Sized` obligations badly affect type inference. /// These obligations are added in a later stage of typeck. /// Removing these may also cause additional complications, see #101066. @@ -84,14 +88,16 @@ impl<'tcx> TypeckRootCtxt<'tcx> { let hir_owner = tcx.local_def_id_to_hir_id(def_id).owner; let infcx = - tcx.infer_ctxt().ignoring_regions().build(TypingMode::analysis_in_body(tcx, def_id)); + tcx.infer_ctxt().ignoring_regions().build(TypingMode::typeck_for_body(tcx, def_id)); let typeck_results = RefCell::new(ty::TypeckResults::new(hir_owner)); + let fulfillment_cx = RefCell::new(>::new(&infcx)); TypeckRootCtxt { - typeck_results, - fulfillment_cx: RefCell::new(>::new(&infcx)), infcx, + typeck_results, locals: RefCell::new(Default::default()), + fulfillment_cx, + checked_opaque_types_storage_entries: Cell::new(None), deferred_sized_obligations: RefCell::new(Vec::new()), deferred_call_resolutions: RefCell::new(Default::default()), deferred_cast_checks: RefCell::new(Vec::new()), @@ -137,7 +143,7 @@ impl<'tcx> TypeckRootCtxt<'tcx> { obligation.predicate.kind().skip_binder() && let Some(ty) = self.shallow_resolve(tpred.self_ty()).ty_vid().map(|t| self.root_var(t)) - && self.tcx.lang_items().sized_trait().is_some_and(|st| st != tpred.trait_ref.def_id) + && !self.tcx.is_lang_item(tpred.trait_ref.def_id, LangItem::Sized) { let new_self_ty = self.tcx.types.unit; diff --git a/compiler/rustc_hir_typeck/src/upvar.rs b/compiler/rustc_hir_typeck/src/upvar.rs index d07bfade15708..8ab71e5220bb7 100644 --- a/compiler/rustc_hir_typeck/src/upvar.rs +++ b/compiler/rustc_hir_typeck/src/upvar.rs @@ -85,7 +85,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Intermediate format to store the hir_id pointing to the use that resulted in the /// corresponding place being captured and a String which contains the captured value's /// name (i.e: a.b.c) -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] enum UpvarMigrationInfo { /// We previously captured all of `x`, but now we capture some sub-path. CapturingPrecise { source_expr: Option, var_name: String }, @@ -635,7 +635,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let (place, capture_kind) = truncate_capture_for_optimization(place, capture_kind); let usage_span = if let Some(usage_expr) = capture_info.path_expr_id { - self.tcx.hir().span(usage_expr) + self.tcx.hir_span(usage_expr) } else { unreachable!() }; @@ -986,7 +986,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { for lint_note in diagnostics_info.iter() { match &lint_note.captures_info { UpvarMigrationInfo::CapturingPrecise { source_expr: Some(capture_expr_id), var_name: captured_name } => { - let cause_span = self.tcx.hir().span(*capture_expr_id); + let cause_span = self.tcx.hir_span(*capture_expr_id); lint.span_label(cause_span, format!("in Rust 2018, this closure captures all of `{}`, but in Rust 2021, it will only capture `{}`", self.tcx.hir_name(*var_hir_id), captured_name, @@ -1047,13 +1047,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { "add a dummy let to cause {migrated_variables_concat} to be fully captured" ); - let closure_span = self.tcx.hir().span_with_body(closure_hir_id); + let closure_span = self.tcx.hir_span_with_body(closure_hir_id); let mut closure_body_span = { // If the body was entirely expanded from a macro // invocation, i.e. the body is not contained inside the // closure span, then we walk up the expansion until we // find the span before the expansion. - let s = self.tcx.hir().span_with_body(body_id.hir_id); + let s = self.tcx.hir_span_with_body(body_id.hir_id); s.find_ancestor_inside(closure_span).unwrap_or(s) }; @@ -1396,14 +1396,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { FxIndexSet::default() }; - // Combine all the captures responsible for needing migrations into one HashSet + // Combine all the captures responsible for needing migrations into one IndexSet let mut capture_diagnostic = drop_reorder_diagnostic.clone(); for key in auto_trait_diagnostic.keys() { capture_diagnostic.insert(key.clone()); } let mut capture_diagnostic = capture_diagnostic.into_iter().collect::>(); - capture_diagnostic.sort(); + capture_diagnostic.sort_by_cached_key(|info| match info { + UpvarMigrationInfo::CapturingPrecise { source_expr: _, var_name } => { + (0, Some(var_name.clone())) + } + UpvarMigrationInfo::CapturingNothing { use_span: _ } => (1, None), + }); for captures_info in capture_diagnostic { // Get the auto trait reasons of why migration is needed because of that capture, if there are any let capture_trait_reasons = @@ -1752,8 +1757,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let capture_str = construct_capture_info_string(self.tcx, place, capture_info); let output_str = format!("Capturing {capture_str}"); - let span = - capture_info.path_expr_id.map_or(closure_span, |e| self.tcx.hir().span(e)); + let span = capture_info.path_expr_id.map_or(closure_span, |e| self.tcx.hir_span(e)); diag.span_note(span, output_str); } diag.emit(); @@ -1780,10 +1784,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if capture.info.path_expr_id != capture.info.capture_kind_expr_id { let path_span = capture_info .path_expr_id - .map_or(closure_span, |e| self.tcx.hir().span(e)); + .map_or(closure_span, |e| self.tcx.hir_span(e)); let capture_kind_span = capture_info .capture_kind_expr_id - .map_or(closure_span, |e| self.tcx.hir().span(e)); + .map_or(closure_span, |e| self.tcx.hir_span(e)); let mut multi_span: MultiSpan = MultiSpan::from_spans(vec![path_span, capture_kind_span]); @@ -1799,7 +1803,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } else { let span = capture_info .path_expr_id - .map_or(closure_span, |e| self.tcx.hir().span(e)); + .map_or(closure_span, |e| self.tcx.hir_span(e)); diag.span_note(span, output_str); }; @@ -1828,8 +1832,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let mut is_mutbl = bm.1; for pointer_ty in place.deref_tys() { - match self.structurally_resolve_type(self.tcx.hir().span(var_hir_id), pointer_ty).kind() - { + match self.structurally_resolve_type(self.tcx.hir_span(var_hir_id), pointer_ty).kind() { // We don't capture derefs of raw ptrs ty::RawPtr(_, _) => unreachable!(), @@ -1844,7 +1847,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ty::Adt(def, ..) if def.is_box() => {} unexpected_ty => span_bug!( - self.tcx.hir().span(var_hir_id), + self.tcx.hir_span(var_hir_id), "deref of unexpected pointer type {:?}", unexpected_ty ), @@ -1975,14 +1978,14 @@ fn drop_location_span(tcx: TyCtxt<'_>, hir_id: HirId) -> Span { let owner_node = tcx.hir_node(owner_id); let owner_span = match owner_node { hir::Node::Item(item) => match item.kind { - hir::ItemKind::Fn { body: owner_id, .. } => tcx.hir().span(owner_id.hir_id), + hir::ItemKind::Fn { body: owner_id, .. } => tcx.hir_span(owner_id.hir_id), _ => { bug!("Drop location span error: need to handle more ItemKind '{:?}'", item.kind); } }, - hir::Node::Block(block) => tcx.hir().span(block.hir_id), - hir::Node::TraitItem(item) => tcx.hir().span(item.hir_id()), - hir::Node::ImplItem(item) => tcx.hir().span(item.hir_id()), + hir::Node::Block(block) => tcx.hir_span(block.hir_id), + hir::Node::TraitItem(item) => tcx.hir_span(item.hir_id()), + hir::Node::ImplItem(item) => tcx.hir_span(item.hir_id()), _ => { bug!("Drop location span error: need to handle more Node '{:?}'", owner_node); } @@ -2325,8 +2328,9 @@ fn should_do_rust_2021_incompatible_closure_captures_analysis( return false; } - let (level, _) = - tcx.lint_level_at_node(lint::builtin::RUST_2021_INCOMPATIBLE_CLOSURE_CAPTURES, closure_id); + let level = tcx + .lint_level_at_node(lint::builtin::RUST_2021_INCOMPATIBLE_CLOSURE_CAPTURES, closure_id) + .level; !matches!(level, lint::Level::Allow) } diff --git a/compiler/rustc_hir_typeck/src/writeback.rs b/compiler/rustc_hir_typeck/src/writeback.rs index b63c0b6ab7e09..9be041f75d769 100644 --- a/compiler/rustc_hir_typeck/src/writeback.rs +++ b/compiler/rustc_hir_typeck/src/writeback.rs @@ -1,6 +1,12 @@ -// Type resolution: the phase that finds all the types in the AST with -// unresolved type variables and replaces "ty_var" types with their -// generic parameters. +//! During type inference, partially inferred terms are +//! represented using inference variables (ty::Infer). These don't appear in +//! the final [`ty::TypeckResults`] since all of the types should have been +//! inferred once typeck is done. +//! +//! When type inference is running however, having to update the typeck results +//! every time a new type is inferred would be unreasonably slow, so instead all +//! of the replacement happens at the end in [`FnCtxt::resolve_type_vars_in_body`], +//! which creates a new `TypeckResults` which doesn't contain any inference variables. use std::mem; @@ -8,14 +14,16 @@ use rustc_data_structures::unord::ExtendUnord; use rustc_errors::ErrorGuaranteed; use rustc_hir::intravisit::{self, InferKind, Visitor}; use rustc_hir::{self as hir, AmbigArg, HirId}; -use rustc_middle::span_bug; +use rustc_infer::traits::solve::Goal; use rustc_middle::traits::ObligationCause; use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCoercion}; use rustc_middle::ty::{ - self, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, fold_regions, + self, DefiningScopeKind, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, + TypeVisitableExt, fold_regions, }; use rustc_span::{Span, sym}; use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded; +use rustc_trait_selection::opaque_types::check_opaque_type_parameter_valid; use rustc_trait_selection::solve; use tracing::{debug, instrument}; @@ -24,15 +32,6 @@ use crate::FnCtxt; /////////////////////////////////////////////////////////////////////////// // Entry point -// During type inference, partially inferred types are -// represented using Type variables (ty::Infer). These don't appear in -// the final TypeckResults since all of the types should have been -// inferred once typeck is done. -// When type inference is running however, having to update the typeck -// typeck results every time a new type is inferred would be unreasonably slow, -// so instead all of the replacement happens at the end in -// resolve_type_vars_in_body, which creates a new TypeTables which -// doesn't contain any inference types. impl<'a, 'tcx> FnCtxt<'a, 'tcx> { pub(crate) fn resolve_type_vars_in_body( &self, @@ -81,23 +80,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports); wbcx.typeck_results.used_trait_imports = used_trait_imports; - wbcx.typeck_results.treat_byte_string_as_slice = - mem::take(&mut self.typeck_results.borrow_mut().treat_byte_string_as_slice); - debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results); self.tcx.arena.alloc(wbcx.typeck_results) } } -/////////////////////////////////////////////////////////////////////////// -// The Writeback context. This visitor walks the HIR, checking the -// fn-specific typeck results to find references to types or regions. It -// resolves those regions to remove inference variables and writes the -// final result back into the master typeck results in the tcx. Here and -// there, it applies a few ad-hoc checks that were not convenient to -// do elsewhere. - +/// The Writeback context. This visitor walks the HIR, checking the +/// fn-specific typeck results to find inference variables. It resolves +/// those inference variables and writes the final result into the +/// `TypeckResults`. It also applies a few ad-hoc checks that were not +/// convenient to do elsewhere. struct WritebackCx<'cx, 'tcx> { fcx: &'cx FnCtxt<'cx, 'tcx>, @@ -160,7 +153,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { self.typeck_results.node_args_mut().remove(e.hir_id); } } - hir::ExprKind::Binary(ref op, lhs, rhs) | hir::ExprKind::AssignOp(ref op, lhs, rhs) => { + hir::ExprKind::Binary(ref op, lhs, rhs) => { let lhs_ty = self.typeck_results.node_type(lhs.hir_id); let rhs_ty = self.typeck_results.node_type(rhs.hir_id); @@ -168,25 +161,27 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { self.typeck_results.type_dependent_defs_mut().remove(e.hir_id); self.typeck_results.node_args_mut().remove(e.hir_id); - match e.kind { - hir::ExprKind::Binary(..) => { - if !op.node.is_by_value() { - let mut adjustments = self.typeck_results.adjustments_mut(); - if let Some(a) = adjustments.get_mut(lhs.hir_id) { - a.pop(); - } - if let Some(a) = adjustments.get_mut(rhs.hir_id) { - a.pop(); - } - } + if !op.node.is_by_value() { + let mut adjustments = self.typeck_results.adjustments_mut(); + if let Some(a) = adjustments.get_mut(lhs.hir_id) { + a.pop(); } - hir::ExprKind::AssignOp(..) - if let Some(a) = - self.typeck_results.adjustments_mut().get_mut(lhs.hir_id) => - { + if let Some(a) = adjustments.get_mut(rhs.hir_id) { a.pop(); } - _ => {} + } + } + } + hir::ExprKind::AssignOp(_, lhs, rhs) => { + let lhs_ty = self.typeck_results.node_type(lhs.hir_id); + let rhs_ty = self.typeck_results.node_type(rhs.hir_id); + + if lhs_ty.is_scalar() && rhs_ty.is_scalar() { + self.typeck_results.type_dependent_defs_mut().remove(e.hir_id); + self.typeck_results.node_args_mut().remove(e.hir_id); + + if let Some(a) = self.typeck_results.adjustments_mut().get_mut(lhs.hir_id) { + a.pop(); } } } @@ -491,7 +486,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { if let ty::UserTypeKind::TypeOf(_, user_args) = c_ty.value.kind { // This is a unit-testing mechanism. - let span = self.tcx().hir().span(hir_id); + let span = self.tcx().hir_span(hir_id); // We need to buffer the errors in order to guarantee a consistent // order when emitting them. let err = @@ -511,15 +506,6 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { self.typeck_results.user_provided_types_mut().extend( fcx_typeck_results.user_provided_types().items().map(|(local_id, c_ty)| { let hir_id = HirId { owner: common_hir_owner, local_id }; - - if cfg!(debug_assertions) && c_ty.has_infer() { - span_bug!( - hir_id.to_span(self.fcx.tcx), - "writeback: `{:?}` has inference variables", - c_ty - ); - }; - (hir_id, *c_ty) }), ); @@ -530,17 +516,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner); self.typeck_results.user_provided_sigs.extend_unord( - fcx_typeck_results.user_provided_sigs.items().map(|(&def_id, c_sig)| { - if cfg!(debug_assertions) && c_sig.has_infer() { - span_bug!( - self.fcx.tcx.def_span(def_id), - "writeback: `{:?}` has inference variables", - c_sig - ); - }; - - (def_id, *c_sig) - }), + fcx_typeck_results.user_provided_sigs.items().map(|(def_id, c_sig)| (*def_id, *c_sig)), ); } @@ -556,15 +532,13 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { #[instrument(skip(self), level = "debug")] fn visit_opaque_types(&mut self) { + let tcx = self.tcx(); // We clone the opaques instead of stealing them here as they are still used for // normalization in the next generation trait solver. - // - // FIXME(-Znext-solver): Opaque types defined after this would simply get dropped - // at the end of typeck. While this seems unlikely to happen in practice this - // should still get fixed. Either by preventing writeback from defining new opaque - // types or by using this function at the end of writeback and running it as a - // fixpoint. let opaque_types = self.fcx.infcx.clone_opaque_types(); + let num_entries = self.fcx.inner.borrow_mut().opaque_types().num_entries(); + let prev = self.fcx.checked_opaque_types_storage_entries.replace(Some(num_entries)); + debug_assert_eq!(prev, None); for (opaque_type_key, hidden_type) in opaque_types { let hidden_type = self.resolve(hidden_type, &hidden_type.span); let opaque_type_key = self.resolve(opaque_type_key, &hidden_type.span); @@ -578,16 +552,47 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { } } - // Here we only detect impl trait definition conflicts when they - // are equal modulo regions. - if let Some(last_opaque_ty) = - self.typeck_results.concrete_opaque_types.insert(opaque_type_key, hidden_type) - && last_opaque_ty.ty != hidden_type.ty + if let Err(err) = check_opaque_type_parameter_valid( + &self.fcx, + opaque_type_key, + hidden_type.span, + DefiningScopeKind::HirTypeck, + ) { + self.typeck_results.concrete_opaque_types.insert( + opaque_type_key.def_id, + ty::OpaqueHiddenType::new_error(tcx, err.report(self.fcx)), + ); + } + + let hidden_type = hidden_type.remap_generic_params_to_declaration_params( + opaque_type_key, + tcx, + DefiningScopeKind::HirTypeck, + ); + + if let Some(prev) = self + .typeck_results + .concrete_opaque_types + .insert(opaque_type_key.def_id, hidden_type) { - assert!(!self.fcx.next_trait_solver()); - if let Ok(d) = hidden_type.build_mismatch_error(&last_opaque_ty, self.tcx()) { - d.emit(); + let entry = &mut self + .typeck_results + .concrete_opaque_types + .get_mut(&opaque_type_key.def_id) + .unwrap(); + if prev.ty != hidden_type.ty { + if let Some(guar) = self.typeck_results.tainted_by_errors { + entry.ty = Ty::new_error(tcx, guar); + } else { + let (Ok(guar) | Err(guar)) = + prev.build_mismatch_error(&hidden_type, tcx).map(|d| d.emit()); + entry.ty = Ty::new_error(tcx, guar); + } } + + // Pick a better span if there is one. + // FIXME(oli-obk): collect multiple spans for better diagnostics down the road. + entry.span = prev.span.substitute_dummy(hidden_type.span); } } } @@ -731,7 +736,32 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { T: TypeFoldable>, { let value = self.fcx.resolve_vars_if_possible(value); - let value = value.fold_with(&mut Resolver::new(self.fcx, span, self.body, true)); + + let mut goals = vec![]; + let value = + value.fold_with(&mut Resolver::new(self.fcx, span, self.body, true, &mut goals)); + + // Ensure that we resolve goals we get from normalizing coroutine interiors, + // but we shouldn't expect those goals to need normalizing (or else we'd get + // into a somewhat awkward fixpoint situation, and we don't need it anyways). + let mut unexpected_goals = vec![]; + self.typeck_results.coroutine_stalled_predicates.extend( + goals + .into_iter() + .map(|pred| { + self.fcx.resolve_vars_if_possible(pred).fold_with(&mut Resolver::new( + self.fcx, + span, + self.body, + false, + &mut unexpected_goals, + )) + }) + // FIXME: throwing away the param-env :( + .map(|goal| (goal.predicate, self.fcx.misc(span.to_span(self.fcx.tcx)))), + ); + assert_eq!(unexpected_goals, vec![]); + assert!(!value.has_infer()); // We may have introduced e.g. `ty::Error`, if inference failed, make sure @@ -749,7 +779,12 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { T: TypeFoldable>, { let value = self.fcx.resolve_vars_if_possible(value); - let value = value.fold_with(&mut Resolver::new(self.fcx, span, self.body, false)); + + let mut goals = vec![]; + let value = + value.fold_with(&mut Resolver::new(self.fcx, span, self.body, false, &mut goals)); + assert_eq!(goals, vec![]); + assert!(!value.has_infer()); // We may have introduced e.g. `ty::Error`, if inference failed, make sure @@ -775,7 +810,7 @@ impl Locatable for Span { impl Locatable for HirId { fn to_span(&self, tcx: TyCtxt<'_>) -> Span { - tcx.hir().span(*self) + tcx.hir_span(*self) } } @@ -786,6 +821,7 @@ struct Resolver<'cx, 'tcx> { /// Whether we should normalize using the new solver, disabled /// both when using the old solver and when resolving predicates. should_normalize: bool, + nested_goals: &'cx mut Vec>>, } impl<'cx, 'tcx> Resolver<'cx, 'tcx> { @@ -794,11 +830,12 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> { span: &'cx dyn Locatable, body: &'tcx hir::Body<'tcx>, should_normalize: bool, + nested_goals: &'cx mut Vec>>, ) -> Resolver<'cx, 'tcx> { - Resolver { fcx, span, body, should_normalize } + Resolver { fcx, span, body, nested_goals, should_normalize } } - fn report_error(&self, p: impl Into>) -> ErrorGuaranteed { + fn report_error(&self, p: impl Into>) -> ErrorGuaranteed { if let Some(guar) = self.fcx.tainted_by_errors() { guar } else { @@ -822,7 +859,7 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> { new_err: impl Fn(TyCtxt<'tcx>, ErrorGuaranteed) -> T, ) -> T where - T: Into> + TypeSuperFoldable> + Copy, + T: Into> + TypeSuperFoldable> + Copy, { let tcx = self.fcx.tcx; // We must deeply normalize in the new solver, since later lints expect @@ -832,12 +869,18 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> { let cause = ObligationCause::misc(self.span.to_span(tcx), body_id); let at = self.fcx.at(&cause, self.fcx.param_env); let universes = vec![None; outer_exclusive_binder(value).as_usize()]; - solve::deeply_normalize_with_skipped_universes(at, value, universes).unwrap_or_else( - |errors| { + match solve::deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals( + at, value, universes, + ) { + Ok((value, goals)) => { + self.nested_goals.extend(goals); + value + } + Err(errors) => { let guar = self.fcx.err_ctxt().report_fulfillment_errors(errors); new_err(tcx, guar) - }, - ) + } + } } else { value }; diff --git a/compiler/rustc_incremental/messages.ftl b/compiler/rustc_incremental/messages.ftl index 2a65101d360d3..bbc1fab05dfeb 100644 --- a/compiler/rustc_incremental/messages.ftl +++ b/compiler/rustc_incremental/messages.ftl @@ -93,7 +93,7 @@ incremental_undefined_clean_dirty_assertions = incremental_undefined_clean_dirty_assertions_item = clean/dirty auto-assertions not yet defined for Node::Item.node={$kind} -incremental_unknown_item = unknown item `{$name}` +incremental_unknown_rustc_clean_argument = unknown `rustc_clean` argument incremental_unrecognized_depnode = unrecognized `DepNode` variant: {$name} diff --git a/compiler/rustc_incremental/src/assert_dep_graph.rs b/compiler/rustc_incremental/src/assert_dep_graph.rs index 1b2056f541f3e..0e04a2a784ec8 100644 --- a/compiler/rustc_incremental/src/assert_dep_graph.rs +++ b/compiler/rustc_incremental/src/assert_dep_graph.rs @@ -38,7 +38,7 @@ use std::fs::{self, File}; use std::io::Write; use rustc_data_structures::fx::FxIndexSet; -use rustc_data_structures::graph::implementation::{Direction, INCOMING, NodeIndex, OUTGOING}; +use rustc_data_structures::graph::linked_graph::{Direction, INCOMING, NodeIndex, OUTGOING}; use rustc_hir::def_id::{CRATE_DEF_ID, DefId, LocalDefId}; use rustc_hir::intravisit::{self, Visitor}; use rustc_middle::dep_graph::{ diff --git a/compiler/rustc_incremental/src/errors.rs b/compiler/rustc_incremental/src/errors.rs index b4a207386dc44..dbc72d085be99 100644 --- a/compiler/rustc_incremental/src/errors.rs +++ b/compiler/rustc_incremental/src/errors.rs @@ -107,11 +107,10 @@ pub(crate) struct NotLoaded<'a> { } #[derive(Diagnostic)] -#[diag(incremental_unknown_item)] -pub(crate) struct UnknownItem { +#[diag(incremental_unknown_rustc_clean_argument)] +pub(crate) struct UnknownRustcCleanArgument { #[primary_span] pub span: Span, - pub name: Symbol, } #[derive(Diagnostic)] diff --git a/compiler/rustc_incremental/src/lib.rs b/compiler/rustc_incremental/src/lib.rs index dabfb6a90cade..299ee4876389c 100644 --- a/compiler/rustc_incremental/src/lib.rs +++ b/compiler/rustc_incremental/src/lib.rs @@ -2,7 +2,6 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 #![deny(missing_docs)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] diff --git a/compiler/rustc_incremental/src/persist/dirty_clean.rs b/compiler/rustc_incremental/src/persist/dirty_clean.rs index d40a0d514f6f9..64166255fa485 100644 --- a/compiler/rustc_incremental/src/persist/dirty_clean.rs +++ b/compiler/rustc_incremental/src/persist/dirty_clean.rs @@ -405,8 +405,7 @@ fn check_config(tcx: TyCtxt<'_>, attr: &Attribute) -> bool { debug!("check_config: searching for cfg {:?}", value); cfg = Some(config.contains(&(value, None))); } else if !(item.has_name(EXCEPT) || item.has_name(LOADED_FROM_DISK)) { - tcx.dcx() - .emit_err(errors::UnknownItem { span: attr.span(), name: item.name_or_empty() }); + tcx.dcx().emit_err(errors::UnknownRustcCleanArgument { span: item.span() }); } } diff --git a/compiler/rustc_incremental/src/persist/fs.rs b/compiler/rustc_incremental/src/persist/fs.rs index 76a1ff3cf3828..f0d24d27e85a3 100644 --- a/compiler/rustc_incremental/src/persist/fs.rs +++ b/compiler/rustc_incremental/src/persist/fs.rs @@ -290,7 +290,7 @@ pub(crate) fn prepare_session_directory(sess: &Session, crate_name: Symbol) { // Try to remove the session directory we just allocated. We don't // know if there's any garbage in it from the failed copy action. - if let Err(err) = safe_remove_dir_all(&session_dir) { + if let Err(err) = std_fs::remove_dir_all(&session_dir) { sess.dcx().emit_warn(errors::DeletePartial { path: &session_dir, err }); } @@ -324,7 +324,7 @@ pub fn finalize_session_directory(sess: &Session, svh: Option) { incr_comp_session_dir.display() ); - if let Err(err) = safe_remove_dir_all(&*incr_comp_session_dir) { + if let Err(err) = std_fs::remove_dir_all(&*incr_comp_session_dir) { sess.dcx().emit_warn(errors::DeleteFull { path: &incr_comp_session_dir, err }); } @@ -715,7 +715,7 @@ pub(crate) fn garbage_collect_session_directories(sess: &Session) -> io::Result< for directory_name in session_directories { if !lock_file_to_session_dir.items().any(|(_, dir)| *dir == directory_name) { let path = crate_directory.join(directory_name); - if let Err(err) = safe_remove_dir_all(&path) { + if let Err(err) = std_fs::remove_dir_all(&path) { sess.dcx().emit_warn(errors::InvalidGcFailed { path: &path, err }); } } @@ -821,7 +821,7 @@ pub(crate) fn garbage_collect_session_directories(sess: &Session) -> io::Result< all_except_most_recent(deletion_candidates).into_items().all(|(path, lock)| { debug!("garbage_collect_session_directories() - deleting `{}`", path.display()); - if let Err(err) = safe_remove_dir_all(&path) { + if let Err(err) = std_fs::remove_dir_all(&path) { sess.dcx().emit_warn(errors::FinalizedGcFailed { path: &path, err }); } else { delete_session_dir_lock_file(sess, &lock_file_path(&path)); @@ -839,7 +839,7 @@ pub(crate) fn garbage_collect_session_directories(sess: &Session) -> io::Result< fn delete_old(sess: &Session, path: &Path) { debug!("garbage_collect_session_directories() - deleting `{}`", path.display()); - if let Err(err) = safe_remove_dir_all(path) { + if let Err(err) = std_fs::remove_dir_all(path) { sess.dcx().emit_warn(errors::SessionGcFailed { path, err }); } else { delete_session_dir_lock_file(sess, &lock_file_path(path)); @@ -862,30 +862,8 @@ fn all_except_most_recent( } } -/// Since paths of artifacts within session directories can get quite long, we -/// need to support deleting files with very long paths. The regular -/// WinApi functions only support paths up to 260 characters, however. In order -/// to circumvent this limitation, we canonicalize the path of the directory -/// before passing it to std::fs::remove_dir_all(). This will convert the path -/// into the '\\?\' format, which supports much longer paths. -fn safe_remove_dir_all(p: &Path) -> io::Result<()> { - let canonicalized = match try_canonicalize(p) { - Ok(canonicalized) => canonicalized, - Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(()), - Err(err) => return Err(err), - }; - - std_fs::remove_dir_all(canonicalized) -} - fn safe_remove_file(p: &Path) -> io::Result<()> { - let canonicalized = match try_canonicalize(p) { - Ok(canonicalized) => canonicalized, - Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(()), - Err(err) => return Err(err), - }; - - match std_fs::remove_file(canonicalized) { + match std_fs::remove_file(p) { Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(()), result => result, } diff --git a/compiler/rustc_incremental/src/persist/save.rs b/compiler/rustc_incremental/src/persist/save.rs index 94ce6d9fa81f1..58fea3278a839 100644 --- a/compiler/rustc_incremental/src/persist/save.rs +++ b/compiler/rustc_incremental/src/persist/save.rs @@ -44,10 +44,6 @@ pub(crate) fn save_dep_graph(tcx: TyCtxt<'_>) { sess.time("assert_dep_graph", || assert_dep_graph(tcx)); sess.time("check_dirty_clean", || dirty_clean::check_dirty_clean_annotations(tcx)); - if sess.opts.unstable_opts.incremental_info { - tcx.dep_graph.print_incremental_info() - } - join( move || { sess.time("incr_comp_persist_dep_graph", || { @@ -172,12 +168,5 @@ pub(crate) fn build_dep_graph( // First encode the commandline arguments hash sess.opts.dep_tracking_hash(false).encode(&mut encoder); - Some(DepGraph::new( - sess, - prev_graph, - prev_work_products, - encoder, - sess.opts.unstable_opts.query_dep_graph, - sess.opts.unstable_opts.incremental_info, - )) + Some(DepGraph::new(sess, prev_graph, prev_work_products, encoder)) } diff --git a/compiler/rustc_index/src/slice.rs b/compiler/rustc_index/src/slice.rs index 67ac805c2bfe5..d2702bdb0571d 100644 --- a/compiler/rustc_index/src/slice.rs +++ b/compiler/rustc_index/src/slice.rs @@ -1,6 +1,6 @@ use std::fmt; use std::marker::PhantomData; -use std::ops::{Index, IndexMut}; +use std::ops::{Index, IndexMut, RangeBounds}; use std::slice::GetDisjointMutError::*; use std::slice::{self, SliceIndex}; @@ -104,6 +104,17 @@ impl IndexSlice { self.raw.swap(a.index(), b.index()) } + #[inline] + pub fn copy_within( + &mut self, + src: impl IntoSliceIdx>, + dest: I, + ) where + T: Copy, + { + self.raw.copy_within(src.into_slice_idx(), dest.index()); + } + #[inline] pub fn get>( &self, diff --git a/compiler/rustc_index_macros/src/newtype.rs b/compiler/rustc_index_macros/src/newtype.rs index f0b58eabbff9a..eedbe630cf2c4 100644 --- a/compiler/rustc_index_macros/src/newtype.rs +++ b/compiler/rustc_index_macros/src/newtype.rs @@ -257,6 +257,13 @@ impl Parse for Newtype { } } + impl std::ops::AddAssign for #name { + #[inline] + fn add_assign(&mut self, other: usize) { + *self = *self + other; + } + } + impl rustc_index::Idx for #name { #[inline] fn new(value: usize) -> Self { diff --git a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs index 307110d9fbc2c..a1a0926cd8188 100644 --- a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs +++ b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs @@ -159,7 +159,7 @@ impl CanonicalizeMode for CanonicalizeQueryResponse { ) -> ty::Region<'tcx> { let infcx = canonicalizer.infcx.unwrap(); - if let ty::ReVar(vid) = *r { + if let ty::ReVar(vid) = r.kind() { r = infcx .inner .borrow_mut() @@ -171,7 +171,7 @@ impl CanonicalizeMode for CanonicalizeQueryResponse { ); }; - match *r { + match r.kind() { ty::ReLateParam(_) | ty::ReErased | ty::ReStatic | ty::ReEarlyParam(..) => r, ty::RePlaceholder(placeholder) => canonicalizer.canonical_var_for_region( @@ -227,7 +227,7 @@ impl CanonicalizeMode for CanonicalizeUserTypeAnnotation { canonicalizer: &mut Canonicalizer<'_, 'tcx>, r: ty::Region<'tcx>, ) -> ty::Region<'tcx> { - match *r { + match r.kind() { ty::ReEarlyParam(_) | ty::ReLateParam(_) | ty::ReErased @@ -321,7 +321,7 @@ impl<'cx, 'tcx> TypeFolder> for Canonicalizer<'cx, 'tcx> { } fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { - match *r { + match r.kind() { ty::ReBound(index, ..) => { if index >= self.binder_index { bug!("escaping late-bound region during canonicalization"); diff --git a/compiler/rustc_infer/src/infer/canonical/query_response.rs b/compiler/rustc_infer/src/infer/canonical/query_response.rs index d53f631cc07a9..1ae864c454f28 100644 --- a/compiler/rustc_infer/src/infer/canonical/query_response.rs +++ b/compiler/rustc_infer/src/infer/canonical/query_response.rs @@ -132,7 +132,13 @@ impl<'tcx> InferCtxt<'tcx> { let certainty = if errors.is_empty() { Certainty::Proven } else { Certainty::Ambiguous }; - let opaque_types = self.take_opaque_types_for_query_response(); + let opaque_types = self + .inner + .borrow_mut() + .opaque_type_storage + .take_opaque_types() + .map(|(k, v)| (k, v.ty)) + .collect(); Ok(QueryResponse { var_values: inference_vars, @@ -143,24 +149,6 @@ impl<'tcx> InferCtxt<'tcx> { }) } - /// Used by the new solver as that one takes the opaque types at the end of a probe - /// to deal with multiple candidates without having to recompute them. - pub fn clone_opaque_types_for_query_response( - &self, - ) -> Vec<(ty::OpaqueTypeKey<'tcx>, Ty<'tcx>)> { - self.inner - .borrow() - .opaque_type_storage - .opaque_types - .iter() - .map(|(k, v)| (*k, v.ty)) - .collect() - } - - fn take_opaque_types_for_query_response(&self) -> Vec<(ty::OpaqueTypeKey<'tcx>, Ty<'tcx>)> { - self.take_opaque_types().into_iter().map(|(k, v)| (k, v.ty)).collect() - } - /// Given the (canonicalized) result to a canonical query, /// instantiates the result so it can be used, plugging in the /// values from the canonical query. (Note that the result may @@ -432,7 +420,7 @@ impl<'tcx> InferCtxt<'tcx> { } GenericArgKind::Lifetime(result_value) => { // e.g., here `result_value` might be `'?1` in the example above... - if let ty::ReBound(debruijn, br) = *result_value { + if let ty::ReBound(debruijn, br) = result_value.kind() { // ... in which case we would set `canonical_vars[0]` to `Some('static)`. // We only allow a `ty::INNERMOST` index in generic parameters. diff --git a/compiler/rustc_infer/src/infer/context.rs b/compiler/rustc_infer/src/infer/context.rs index 75affa1397705..359b9da11ced6 100644 --- a/compiler/rustc_infer/src/infer/context.rs +++ b/compiler/rustc_infer/src/infer/context.rs @@ -6,7 +6,10 @@ use rustc_middle::ty::relate::combine::PredicateEmittingRelation; use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable}; use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span}; -use super::{BoundRegionConversionTime, InferCtxt, RegionVariableOrigin, SubregionOrigin}; +use super::{ + BoundRegionConversionTime, InferCtxt, OpaqueTypeStorageEntries, RegionVariableOrigin, + SubregionOrigin, +}; impl<'tcx> rustc_type_ir::InferCtxtLike for InferCtxt<'tcx> { type Interner = TyCtxt<'tcx>; @@ -121,19 +124,19 @@ impl<'tcx> rustc_type_ir::InferCtxtLike for InferCtxt<'tcx> { self.enter_forall(value, f) } - fn equate_ty_vids_raw(&self, a: rustc_type_ir::TyVid, b: rustc_type_ir::TyVid) { + fn equate_ty_vids_raw(&self, a: ty::TyVid, b: ty::TyVid) { self.inner.borrow_mut().type_variables().equate(a, b); } - fn equate_int_vids_raw(&self, a: rustc_type_ir::IntVid, b: rustc_type_ir::IntVid) { + fn equate_int_vids_raw(&self, a: ty::IntVid, b: ty::IntVid) { self.inner.borrow_mut().int_unification_table().union(a, b); } - fn equate_float_vids_raw(&self, a: rustc_type_ir::FloatVid, b: rustc_type_ir::FloatVid) { + fn equate_float_vids_raw(&self, a: ty::FloatVid, b: ty::FloatVid) { self.inner.borrow_mut().float_unification_table().union(a, b); } - fn equate_const_vids_raw(&self, a: rustc_type_ir::ConstVid, b: rustc_type_ir::ConstVid) { + fn equate_const_vids_raw(&self, a: ty::ConstVid, b: ty::ConstVid) { self.inner.borrow_mut().const_unification_table().union(a, b); } @@ -141,8 +144,8 @@ impl<'tcx> rustc_type_ir::InferCtxtLike for InferCtxt<'tcx> { &self, relation: &mut R, target_is_expected: bool, - target_vid: rustc_type_ir::TyVid, - instantiation_variance: rustc_type_ir::Variance, + target_vid: ty::TyVid, + instantiation_variance: ty::Variance, source_ty: Ty<'tcx>, ) -> RelateResult<'tcx, ()> { self.instantiate_ty_var( @@ -154,19 +157,11 @@ impl<'tcx> rustc_type_ir::InferCtxtLike for InferCtxt<'tcx> { ) } - fn instantiate_int_var_raw( - &self, - vid: rustc_type_ir::IntVid, - value: rustc_type_ir::IntVarValue, - ) { + fn instantiate_int_var_raw(&self, vid: ty::IntVid, value: ty::IntVarValue) { self.inner.borrow_mut().int_unification_table().union_value(vid, value); } - fn instantiate_float_var_raw( - &self, - vid: rustc_type_ir::FloatVid, - value: rustc_type_ir::FloatVarValue, - ) { + fn instantiate_float_var_raw(&self, vid: ty::FloatVid, value: ty::FloatVarValue) { self.inner.borrow_mut().float_unification_table().union_value(vid, value); } @@ -174,7 +169,7 @@ impl<'tcx> rustc_type_ir::InferCtxtLike for InferCtxt<'tcx> { &self, relation: &mut R, target_is_expected: bool, - target_vid: rustc_type_ir::ConstVid, + target_vid: ty::ConstVid, source_ct: ty::Const<'tcx>, ) -> RelateResult<'tcx, ()> { self.instantiate_const_var(relation, target_is_expected, target_vid, source_ct) @@ -221,4 +216,58 @@ impl<'tcx> rustc_type_ir::InferCtxtLike for InferCtxt<'tcx> { fn register_ty_outlives(&self, ty: Ty<'tcx>, r: ty::Region<'tcx>, span: Span) { self.register_region_obligation_with_cause(ty, r, &ObligationCause::dummy_with_span(span)); } + + type OpaqueTypeStorageEntries = OpaqueTypeStorageEntries; + fn opaque_types_storage_num_entries(&self) -> OpaqueTypeStorageEntries { + self.inner.borrow_mut().opaque_types().num_entries() + } + fn clone_opaque_types_lookup_table(&self) -> Vec<(ty::OpaqueTypeKey<'tcx>, Ty<'tcx>)> { + self.inner.borrow_mut().opaque_types().iter_lookup_table().map(|(k, h)| (k, h.ty)).collect() + } + fn clone_duplicate_opaque_types(&self) -> Vec<(ty::OpaqueTypeKey<'tcx>, Ty<'tcx>)> { + self.inner + .borrow_mut() + .opaque_types() + .iter_duplicate_entries() + .map(|(k, h)| (k, h.ty)) + .collect() + } + fn clone_opaque_types_added_since( + &self, + prev_entries: OpaqueTypeStorageEntries, + ) -> Vec<(ty::OpaqueTypeKey<'tcx>, Ty<'tcx>)> { + self.inner + .borrow_mut() + .opaque_types() + .opaque_types_added_since(prev_entries) + .map(|(k, h)| (k, h.ty)) + .collect() + } + + fn register_hidden_type_in_storage( + &self, + opaque_type_key: ty::OpaqueTypeKey<'tcx>, + hidden_ty: Ty<'tcx>, + span: Span, + ) -> Option> { + self.register_hidden_type_in_storage( + opaque_type_key, + ty::OpaqueHiddenType { span, ty: hidden_ty }, + ) + } + fn add_duplicate_opaque_type( + &self, + opaque_type_key: ty::OpaqueTypeKey<'tcx>, + hidden_ty: Ty<'tcx>, + span: Span, + ) { + self.inner + .borrow_mut() + .opaque_types() + .add_duplicate(opaque_type_key, ty::OpaqueHiddenType { span, ty: hidden_ty }) + } + + fn reset_opaque_types(&self) { + let _ = self.take_opaque_types(); + } } diff --git a/compiler/rustc_infer/src/infer/freshen.rs b/compiler/rustc_infer/src/infer/freshen.rs index f2bb66ff73689..cae674165f0ef 100644 --- a/compiler/rustc_infer/src/infer/freshen.rs +++ b/compiler/rustc_infer/src/infer/freshen.rs @@ -109,7 +109,7 @@ impl<'a, 'tcx> TypeFolder> for TypeFreshener<'a, 'tcx> { } fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { - match *r { + match r.kind() { ty::ReBound(..) => { // leave bound regions alone r diff --git a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs index 91595de97f7d2..2185886901e57 100644 --- a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs +++ b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs @@ -3,8 +3,8 @@ use std::fmt; use rustc_data_structures::fx::FxHashSet; -use rustc_data_structures::graph::implementation::{ - Direction, Graph, INCOMING, NodeIndex, OUTGOING, +use rustc_data_structures::graph::linked_graph::{ + Direction, INCOMING, LinkedGraph, NodeIndex, OUTGOING, }; use rustc_data_structures::intern::Interned; use rustc_data_structures::unord::UnordSet; @@ -118,7 +118,7 @@ struct RegionAndOrigin<'tcx> { origin: SubregionOrigin<'tcx>, } -type RegionGraph<'tcx> = Graph<(), Constraint<'tcx>>; +type RegionGraph<'tcx> = LinkedGraph<(), Constraint<'tcx>>; struct LexicalResolver<'cx, 'tcx> { region_rels: &'cx RegionRelations<'cx, 'tcx>, @@ -218,7 +218,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { true } VarValue::Value(cur_region) => { - match *cur_region { + match cur_region.kind() { // If this empty region is from a universe that can name the // placeholder universe, then the LUB is the Placeholder region // (which is the cur_region). Otherwise, the LUB is the Static @@ -310,7 +310,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { match *b_data { VarValue::Empty(empty_ui) => { - let lub = match *a_region { + let lub = match a_region.kind() { RePlaceholder(placeholder) => { // If this empty region is from a universe that can // name the placeholder, then the placeholder is @@ -350,7 +350,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { // tighter bound than `'static`. // // (This might e.g. arise from being asked to prove `for<'a> { 'b: 'a }`.) - if let ty::RePlaceholder(p) = *lub + if let ty::RePlaceholder(p) = lub.kind() && b_universe.cannot_name(p.universe) { lub = self.tcx().lifetimes.re_static; @@ -377,7 +377,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { a_ui.min(b_ui) == b_ui } (VarValue::Value(a), VarValue::Empty(_)) => { - match *a { + match a.kind() { // this is always on an error path, // so it doesn't really matter if it's shorter or longer than an empty region ReError(_) => false, @@ -410,7 +410,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } } (VarValue::Empty(a_ui), VarValue::Value(b)) => { - match *b { + match b.kind() { // this is always on an error path, // so it doesn't really matter if it's shorter or longer than an empty region ReError(_) => false, @@ -479,7 +479,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { /// term "concrete regions"). #[instrument(level = "trace", skip(self), ret)] fn lub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> Region<'tcx> { - match (*a, *b) { + match (a.kind(), b.kind()) { (ReBound(..), _) | (_, ReBound(..)) | (ReErased, _) | (_, ReErased) => { bug!("cannot relate region: LUB({:?}, {:?})", a, b); } @@ -668,7 +668,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { fn construct_graph(&self) -> RegionGraph<'tcx> { let num_vars = self.num_vars(); - let mut graph = Graph::new(); + let mut graph = LinkedGraph::new(); for _ in 0..num_vars { graph.add_node(()); @@ -725,7 +725,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { // SubSupConflict(ReLateParam, ReLateParam) when reporting error, and so // the user will more likely get a specific suggestion. fn region_order_key(x: &RegionAndOrigin<'_>) -> u8 { - match *x.region { + match x.region.kind() { ReEarlyParam(_) => 0, ReLateParam(_) => 1, _ => 2, @@ -737,7 +737,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { let node_universe = self.var_infos[node_idx].universe; for lower_bound in &lower_bounds { - let effective_lower_bound = if let ty::RePlaceholder(p) = *lower_bound.region { + let effective_lower_bound = if let ty::RePlaceholder(p) = lower_bound.region.kind() { if node_universe.cannot_name(p.universe) { self.tcx().lifetimes.re_static } else { @@ -785,7 +785,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { .expect("lower_vid_bounds should at least include `node_idx`"); for upper_bound in &upper_bounds { - if let ty::RePlaceholder(p) = *upper_bound.region { + if let ty::RePlaceholder(p) = upper_bound.region.kind() { if min_universe.cannot_name(p.universe) { let origin = self.var_infos[node_idx].origin; errors.push(RegionResolutionError::UpperBoundUniverseConflict( @@ -913,7 +913,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { generic_ty: Ty<'tcx>, min: ty::Region<'tcx>, ) -> bool { - if let ty::ReError(_) = *min { + if let ty::ReError(_) = min.kind() { return true; } @@ -931,18 +931,18 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } VerifyBound::OutlivedBy(r) => { - let a = match *min { + let a = match min.kind() { ty::ReVar(rid) => var_values.values[rid], _ => VarValue::Value(min), }; - let b = match **r { + let b = match r.kind() { ty::ReVar(rid) => var_values.values[rid], _ => VarValue::Value(*r), }; self.sub_region_values(a, b) } - VerifyBound::IsEmpty => match *min { + VerifyBound::IsEmpty => match min.kind() { ty::ReVar(rid) => match var_values.values[rid] { VarValue::ErrorValue => false, VarValue::Empty(_) => true, @@ -989,7 +989,7 @@ impl<'tcx> LexicalRegionResolutions<'tcx> { tcx: TyCtxt<'tcx>, r: ty::Region<'tcx>, ) -> ty::Region<'tcx> { - let result = match *r { + let result = match r.kind() { ty::ReVar(rid) => match self.values[rid] { VarValue::Empty(_) => r, VarValue::Value(r) => r, diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs index fa8dea064daaa..b408d76010d7b 100644 --- a/compiler/rustc_infer/src/infer/mod.rs +++ b/compiler/rustc_infer/src/infer/mod.rs @@ -9,7 +9,7 @@ use free_regions::RegionRelations; pub use freshen::TypeFreshener; use lexical_region_resolve::LexicalRegionResolutions; pub use lexical_region_resolve::RegionResolutionError; -use opaque_types::OpaqueTypeStorage; +pub use opaque_types::{OpaqueTypeStorage, OpaqueTypeStorageEntries, OpaqueTypeTable}; use region_constraints::{ GenericKind, RegionConstraintCollector, RegionConstraintStorage, VarInfos, VerifyBound, }; @@ -27,12 +27,13 @@ use rustc_middle::bug; use rustc_middle::infer::canonical::{CanonicalQueryInput, CanonicalVarValues}; use rustc_middle::mir::ConstraintCategory; use rustc_middle::traits::select; +use rustc_middle::traits::solve::Goal; use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::{ self, BoundVarReplacerDelegate, ConstVid, FloatVid, GenericArg, GenericArgKind, GenericArgs, - GenericArgsRef, GenericParamDefKind, InferConst, IntVid, PseudoCanonicalInput, Ty, TyCtxt, - TyVid, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitable, TypeVisitableExt, TypingEnv, - TypingMode, fold_regions, + GenericArgsRef, GenericParamDefKind, InferConst, IntVid, OpaqueHiddenType, OpaqueTypeKey, + PseudoCanonicalInput, Term, TermKind, Ty, TyCtxt, TyVid, TypeFoldable, TypeFolder, + TypeSuperFoldable, TypeVisitable, TypeVisitableExt, TypingEnv, TypingMode, fold_regions, }; use rustc_span::{Span, Symbol}; use snapshot::undo_log::InferCtxtUndoLogs; @@ -197,7 +198,7 @@ impl<'tcx> InferCtxtInner<'tcx> { } #[inline] - fn opaque_types(&mut self) -> opaque_types::OpaqueTypeTable<'_, 'tcx> { + pub fn opaque_types(&mut self) -> opaque_types::OpaqueTypeTable<'_, 'tcx> { self.opaque_type_storage.with_log(&mut self.undo_log) } @@ -223,15 +224,6 @@ impl<'tcx> InferCtxtInner<'tcx> { .expect("region constraints already solved") .with_log(&mut self.undo_log) } - - // Iterates through the opaque type definitions without taking them; this holds the - // `InferCtxtInner` lock, so make sure to not do anything with `InferCtxt` side-effects - // while looping through this. - pub fn iter_opaque_types( - &self, - ) -> impl Iterator, ty::OpaqueHiddenType<'tcx>)> { - self.opaque_type_storage.opaque_types.iter().map(|(&k, &v)| (k, v)) - } } pub struct InferCtxt<'tcx> { @@ -268,7 +260,7 @@ pub struct InferCtxt<'tcx> { /// The set of predicates on which errors have been reported, to /// avoid reporting the same error twice. pub reported_trait_errors: - RefCell>, ErrorGuaranteed)>>, + RefCell>>, ErrorGuaranteed)>>, pub reported_signature_mismatch: RefCell)>>, @@ -953,20 +945,23 @@ impl<'tcx> InferCtxt<'tcx> { } #[instrument(level = "debug", skip(self), ret)] - pub fn take_opaque_types(&self) -> opaque_types::OpaqueTypeMap<'tcx> { - std::mem::take(&mut self.inner.borrow_mut().opaque_type_storage.opaque_types) + pub fn take_opaque_types(&self) -> Vec<(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)> { + self.inner.borrow_mut().opaque_type_storage.take_opaque_types().collect() } #[instrument(level = "debug", skip(self), ret)] - pub fn clone_opaque_types(&self) -> opaque_types::OpaqueTypeMap<'tcx> { - self.inner.borrow().opaque_type_storage.opaque_types.clone() + pub fn clone_opaque_types(&self) -> Vec<(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)> { + self.inner.borrow_mut().opaque_type_storage.iter_opaque_types().collect() } #[inline(always)] pub fn can_define_opaque_ty(&self, id: impl Into) -> bool { debug_assert!(!self.next_trait_solver()); match self.typing_mode() { - TypingMode::Analysis { defining_opaque_types } => { + TypingMode::Analysis { + defining_opaque_types_and_generators: defining_opaque_types, + } + | TypingMode::Borrowck { defining_opaque_types } => { id.into().as_local().is_some_and(|def_id| defining_opaque_types.contains(&def_id)) } // FIXME(#132279): This function is quite weird in post-analysis @@ -1260,7 +1255,8 @@ impl<'tcx> InferCtxt<'tcx> { // to handle them without proper canonicalization. This means we may cause cycle // errors and fail to reveal opaques while inside of bodies. We should rename this // function and require explicit comments on all use-sites in the future. - ty::TypingMode::Analysis { defining_opaque_types: _ } => { + ty::TypingMode::Analysis { defining_opaque_types_and_generators: _ } + | ty::TypingMode::Borrowck { defining_opaque_types: _ } => { TypingMode::non_body_analysis() } mode @ (ty::TypingMode::Coherence @@ -1396,6 +1392,16 @@ impl<'tcx> TyOrConstInferVar { } } + /// Tries to extract an inference variable from a type or a constant, returns `None` + /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`) and + /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). + pub fn maybe_from_term(term: Term<'tcx>) -> Option { + match term.unpack() { + TermKind::Ty(ty) => Self::maybe_from_ty(ty), + TermKind::Const(ct) => Self::maybe_from_const(ct), + } + } + /// Tries to extract an inference variable from a type, returns `None` /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`). fn maybe_from_ty(ty: Ty<'tcx>) -> Option { diff --git a/compiler/rustc_infer/src/infer/opaque_types/mod.rs b/compiler/rustc_infer/src/infer/opaque_types/mod.rs index 215b133372664..220d5e9bda2d1 100644 --- a/compiler/rustc_infer/src/infer/opaque_types/mod.rs +++ b/compiler/rustc_infer/src/infer/opaque_types/mod.rs @@ -1,5 +1,4 @@ use hir::def_id::{DefId, LocalDefId}; -use rustc_data_structures::fx::FxIndexMap; use rustc_hir as hir; use rustc_middle::bug; use rustc_middle::traits::ObligationCause; @@ -12,15 +11,14 @@ use rustc_middle::ty::{ use rustc_span::Span; use tracing::{debug, instrument}; -use super::DefineOpaqueTypes; +use super::{DefineOpaqueTypes, RegionVariableOrigin}; use crate::errors::OpaqueHiddenTypeDiag; use crate::infer::{InferCtxt, InferOk}; use crate::traits::{self, Obligation, PredicateObligations}; mod table; -pub(crate) type OpaqueTypeMap<'tcx> = FxIndexMap, OpaqueHiddenType<'tcx>>; -pub(crate) use table::{OpaqueTypeStorage, OpaqueTypeTable}; +pub use table::{OpaqueTypeStorage, OpaqueTypeStorageEntries, OpaqueTypeTable}; impl<'tcx> InferCtxt<'tcx> { /// This is a backwards compatibility hack to prevent breaking changes from @@ -198,13 +196,12 @@ impl<'tcx> InferCtxt<'tcx> { /// it hasn't previously been defined. This does not emit any /// constraints and it's the responsibility of the caller to make /// sure that the item bounds of the opaque are checked. - pub fn inject_new_hidden_type_unchecked( + pub fn register_hidden_type_in_storage( &self, opaque_type_key: OpaqueTypeKey<'tcx>, hidden_ty: OpaqueHiddenType<'tcx>, - ) { - let prev = self.inner.borrow_mut().opaque_types().register(opaque_type_key, hidden_ty); - assert_eq!(prev, None); + ) -> Option> { + self.inner.borrow_mut().opaque_types().register(opaque_type_key, hidden_ty) } /// Insert a hidden type into the opaque type storage, equating it @@ -222,6 +219,7 @@ impl<'tcx> InferCtxt<'tcx> { hidden_ty: Ty<'tcx>, goals: &mut Vec>>, ) -> Result<(), TypeError<'tcx>> { + let tcx = self.tcx; // Ideally, we'd get the span where *this specific `ty` came // from*, but right now we just use the span from the overall // value being folded. In simple cases like `-> impl Foo`, @@ -232,7 +230,7 @@ impl<'tcx> InferCtxt<'tcx> { // During intercrate we do not define opaque types but instead always // force ambiguity unless the hidden type is known to not implement // our trait. - goals.push(Goal::new(self.tcx, param_env, ty::PredicateKind::Ambiguous)); + goals.push(Goal::new(tcx, param_env, ty::PredicateKind::Ambiguous)); } ty::TypingMode::Analysis { .. } => { let prev = self @@ -250,6 +248,36 @@ impl<'tcx> InferCtxt<'tcx> { ); } } + ty::TypingMode::Borrowck { .. } => { + let prev = self + .inner + .borrow_mut() + .opaque_types() + .register(opaque_type_key, OpaqueHiddenType { ty: hidden_ty, span }); + + // We either equate the new hidden type with the previous entry or with the type + // inferred by HIR typeck. + let actual = prev.unwrap_or_else(|| { + let actual = tcx + .type_of_opaque_hir_typeck(opaque_type_key.def_id) + .instantiate(self.tcx, opaque_type_key.args); + let actual = ty::fold_regions(tcx, actual, |re, _dbi| match re.kind() { + ty::ReErased => { + self.next_region_var(RegionVariableOrigin::MiscVariable(span)) + } + _ => re, + }); + actual + }); + + goals.extend( + self.at(&ObligationCause::dummy_with_span(span), param_env) + .eq(DefineOpaqueTypes::Yes, hidden_ty, actual)? + .obligations + .into_iter() + .map(|obligation| obligation.as_goal()), + ); + } mode @ (ty::TypingMode::PostBorrowckAnalysis { .. } | ty::TypingMode::PostAnalysis) => { bug!("insert hidden type in {mode:?}") } diff --git a/compiler/rustc_infer/src/infer/opaque_types/table.rs b/compiler/rustc_infer/src/infer/opaque_types/table.rs index ba6cc0d783dd3..46752840e1bab 100644 --- a/compiler/rustc_infer/src/infer/opaque_types/table.rs +++ b/compiler/rustc_infer/src/infer/opaque_types/table.rs @@ -1,18 +1,27 @@ +use std::ops::Deref; + +use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::undo_log::UndoLogs; use rustc_middle::bug; use rustc_middle::ty::{self, OpaqueHiddenType, OpaqueTypeKey, Ty}; use tracing::instrument; -use super::OpaqueTypeMap; use crate::infer::snapshot::undo_log::{InferCtxtUndoLogs, UndoLog}; #[derive(Default, Debug, Clone)] -pub(crate) struct OpaqueTypeStorage<'tcx> { - /// Opaque types found in explicit return types and their - /// associated fresh inference variable. Writeback resolves these - /// variables to get the concrete type, which can be used to - /// 'de-opaque' OpaqueHiddenType, after typeck is done with all functions. - pub opaque_types: OpaqueTypeMap<'tcx>, +pub struct OpaqueTypeStorage<'tcx> { + opaque_types: FxIndexMap, OpaqueHiddenType<'tcx>>, + duplicate_entries: Vec<(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)>, +} + +/// The number of entries in the opaque type storage at a given point. +/// +/// Used to check that we haven't added any new opaque types after checking +/// the opaque types currently in the storage. +#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)] +pub struct OpaqueTypeStorageEntries { + opaque_types: usize, + duplicate_entries: usize, } impl<'tcx> OpaqueTypeStorage<'tcx> { @@ -33,6 +42,70 @@ impl<'tcx> OpaqueTypeStorage<'tcx> { } } + pub(crate) fn pop_duplicate_entry(&mut self) { + let entry = self.duplicate_entries.pop(); + assert!(entry.is_some()); + } + + pub(crate) fn is_empty(&self) -> bool { + let OpaqueTypeStorage { opaque_types, duplicate_entries } = self; + opaque_types.is_empty() && duplicate_entries.is_empty() + } + + pub(crate) fn take_opaque_types( + &mut self, + ) -> impl Iterator, OpaqueHiddenType<'tcx>)> { + let OpaqueTypeStorage { opaque_types, duplicate_entries } = self; + std::mem::take(opaque_types).into_iter().chain(std::mem::take(duplicate_entries)) + } + + pub fn num_entries(&self) -> OpaqueTypeStorageEntries { + OpaqueTypeStorageEntries { + opaque_types: self.opaque_types.len(), + duplicate_entries: self.duplicate_entries.len(), + } + } + + pub fn opaque_types_added_since( + &self, + prev_entries: OpaqueTypeStorageEntries, + ) -> impl Iterator, OpaqueHiddenType<'tcx>)> { + self.opaque_types + .iter() + .skip(prev_entries.opaque_types) + .map(|(k, v)| (*k, *v)) + .chain(self.duplicate_entries.iter().skip(prev_entries.duplicate_entries).copied()) + } + + /// Only returns the opaque types from the lookup table. These are used + /// when normalizing opaque types and have a unique key. + /// + /// Outside of canonicalization one should generally use `iter_opaque_types` + /// to also consider duplicate entries. + pub fn iter_lookup_table( + &self, + ) -> impl Iterator, OpaqueHiddenType<'tcx>)> { + self.opaque_types.iter().map(|(k, v)| (*k, *v)) + } + + /// Only returns the opaque types which are stored in `duplicate_entries`. + /// + /// These have to considered when checking all opaque type uses but are e.g. + /// irrelevant for canonical inputs as nested queries never meaningfully + /// accesses them. + pub fn iter_duplicate_entries( + &self, + ) -> impl Iterator, OpaqueHiddenType<'tcx>)> { + self.duplicate_entries.iter().copied() + } + + pub fn iter_opaque_types( + &self, + ) -> impl Iterator, OpaqueHiddenType<'tcx>)> { + let OpaqueTypeStorage { opaque_types, duplicate_entries } = self; + opaque_types.iter().map(|(k, v)| (*k, *v)).chain(duplicate_entries.iter().copied()) + } + #[inline] pub(crate) fn with_log<'a>( &'a mut self, @@ -44,21 +117,27 @@ impl<'tcx> OpaqueTypeStorage<'tcx> { impl<'tcx> Drop for OpaqueTypeStorage<'tcx> { fn drop(&mut self) { - if !self.opaque_types.is_empty() { + if !self.is_empty() { ty::tls::with(|tcx| tcx.dcx().delayed_bug(format!("{:?}", self.opaque_types))); } } } -pub(crate) struct OpaqueTypeTable<'a, 'tcx> { +pub struct OpaqueTypeTable<'a, 'tcx> { storage: &'a mut OpaqueTypeStorage<'tcx>, undo_log: &'a mut InferCtxtUndoLogs<'tcx>, } +impl<'tcx> Deref for OpaqueTypeTable<'_, 'tcx> { + type Target = OpaqueTypeStorage<'tcx>; + fn deref(&self) -> &Self::Target { + self.storage + } +} impl<'a, 'tcx> OpaqueTypeTable<'a, 'tcx> { #[instrument(skip(self), level = "debug")] - pub(crate) fn register( + pub fn register( &mut self, key: OpaqueTypeKey<'tcx>, hidden_type: OpaqueHiddenType<'tcx>, @@ -72,4 +151,9 @@ impl<'a, 'tcx> OpaqueTypeTable<'a, 'tcx> { self.undo_log.push(UndoLog::OpaqueTypes(key, None)); None } + + pub fn add_duplicate(&mut self, key: OpaqueTypeKey<'tcx>, hidden_type: OpaqueHiddenType<'tcx>) { + self.storage.duplicate_entries.push((key, hidden_type)); + self.undo_log.push(UndoLog::DuplicateOpaqueType); + } } diff --git a/compiler/rustc_infer/src/infer/outlives/env.rs b/compiler/rustc_infer/src/infer/outlives/env.rs index e924c974a02c9..cb5a33c5c972a 100644 --- a/compiler/rustc_infer/src/infer/outlives/env.rs +++ b/compiler/rustc_infer/src/infer/outlives/env.rs @@ -69,7 +69,7 @@ impl<'tcx> OutlivesEnvironment<'tcx> { region_bound_pairs .insert(ty::OutlivesPredicate(GenericKind::Alias(alias_b), r_a)); } - OutlivesBound::RegionSubRegion(r_a, r_b) => match (*r_a, *r_b) { + OutlivesBound::RegionSubRegion(r_a, r_b) => match (r_a.kind(), r_b.kind()) { ( ty::ReStatic | ty::ReEarlyParam(_) | ty::ReLateParam(_), ty::ReStatic | ty::ReEarlyParam(_) | ty::ReLateParam(_), diff --git a/compiler/rustc_infer/src/infer/outlives/for_liveness.rs b/compiler/rustc_infer/src/infer/outlives/for_liveness.rs index 379410641fe5b..2a4b9776f68cb 100644 --- a/compiler/rustc_infer/src/infer/outlives/for_liveness.rs +++ b/compiler/rustc_infer/src/infer/outlives/for_liveness.rs @@ -24,12 +24,8 @@ impl<'tcx, OP> TypeVisitor> for FreeRegionsVisitor<'tcx, OP> where OP: FnMut(ty::Region<'tcx>), { - fn visit_binder>>(&mut self, t: &ty::Binder<'tcx, T>) { - t.super_visit_with(self); - } - fn visit_region(&mut self, r: ty::Region<'tcx>) { - match *r { + match r.kind() { // ignore bound regions, keep visiting ty::ReBound(_, _) => {} _ => (self.op)(r), diff --git a/compiler/rustc_infer/src/infer/outlives/obligations.rs b/compiler/rustc_infer/src/infer/outlives/obligations.rs index a89cef50c9b40..8dde99c45cfa8 100644 --- a/compiler/rustc_infer/src/infer/outlives/obligations.rs +++ b/compiler/rustc_infer/src/infer/outlives/obligations.rs @@ -63,11 +63,11 @@ use rustc_data_structures::undo_log::UndoLogs; use rustc_middle::bug; use rustc_middle::mir::ConstraintCategory; use rustc_middle::traits::query::NoSolution; +use rustc_middle::ty::outlives::{Component, push_outlives_components}; use rustc_middle::ty::{ self, GenericArgKind, GenericArgsRef, PolyTypeOutlivesPredicate, Region, Ty, TyCtxt, TypeFoldable as _, TypeVisitableExt, }; -use rustc_type_ir::outlives::{Component, push_outlives_components}; use smallvec::smallvec; use tracing::{debug, instrument}; diff --git a/compiler/rustc_infer/src/infer/outlives/verify.rs b/compiler/rustc_infer/src/infer/outlives/verify.rs index c14c288c6e4e6..69feecfe30a49 100644 --- a/compiler/rustc_infer/src/infer/outlives/verify.rs +++ b/compiler/rustc_infer/src/infer/outlives/verify.rs @@ -1,7 +1,7 @@ use std::assert_matches::assert_matches; +use rustc_middle::ty::outlives::{Component, compute_alias_components_recursive}; use rustc_middle::ty::{self, OutlivesPredicate, Ty, TyCtxt}; -use rustc_type_ir::outlives::{Component, compute_alias_components_recursive}; use smallvec::smallvec; use tracing::{debug, instrument, trace}; diff --git a/compiler/rustc_infer/src/infer/projection.rs b/compiler/rustc_infer/src/infer/projection.rs index 1bee9632110bd..2a4f9db8963c8 100644 --- a/compiler/rustc_infer/src/infer/projection.rs +++ b/compiler/rustc_infer/src/infer/projection.rs @@ -1,7 +1,8 @@ use rustc_middle::traits::ObligationCause; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::ty; use super::InferCtxt; +use crate::infer::Term; use crate::traits::{Obligation, PredicateObligations}; impl<'tcx> InferCtxt<'tcx> { @@ -11,24 +12,32 @@ impl<'tcx> InferCtxt<'tcx> { /// of the given projection. This allows us to proceed with projections /// while they cannot be resolved yet due to missing information or /// simply due to the lack of access to the trait resolution machinery. - pub fn projection_ty_to_infer( + pub fn projection_term_to_infer( &self, param_env: ty::ParamEnv<'tcx>, - projection_ty: ty::AliasTy<'tcx>, + alias_term: ty::AliasTerm<'tcx>, cause: ObligationCause<'tcx>, recursion_depth: usize, obligations: &mut PredicateObligations<'tcx>, - ) -> Ty<'tcx> { + ) -> Term<'tcx> { debug_assert!(!self.next_trait_solver()); - let ty_var = self.next_ty_var(self.tcx.def_span(projection_ty.def_id)); + + let span = self.tcx.def_span(alias_term.def_id); + let infer_var = if alias_term.kind(self.tcx).is_type() { + self.next_ty_var(span).into() + } else { + self.next_const_var(span).into() + }; + let projection = ty::PredicateKind::Clause(ty::ClauseKind::Projection(ty::ProjectionPredicate { - projection_term: projection_ty.into(), - term: ty_var.into(), + projection_term: alias_term, + term: infer_var, })); let obligation = Obligation::with_depth(self.tcx, cause, recursion_depth, param_env, projection); obligations.push(obligation); - ty_var + + infer_var } } diff --git a/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs b/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs index 3cfc58dea05bd..e332b6d0447a9 100644 --- a/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs +++ b/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs @@ -155,7 +155,7 @@ impl<'a, 'tcx> LeakCheck<'a, 'tcx> { self.scc_universes[scc].take_min(universe, *region); // Detect those SCCs that directly contain a placeholder - if let ty::RePlaceholder(placeholder) = **region { + if let ty::RePlaceholder(placeholder) = region.kind() { if self.outer_universe.cannot_name(placeholder.universe) { // Update `scc_placeholders` to account for the fact that `P: S` must hold. match self.scc_placeholders[scc] { diff --git a/compiler/rustc_infer/src/infer/region_constraints/mod.rs b/compiler/rustc_infer/src/infer/region_constraints/mod.rs index 57555db37abd5..40e2e654b2ea7 100644 --- a/compiler/rustc_infer/src/infer/region_constraints/mod.rs +++ b/compiler/rustc_infer/src/infer/region_constraints/mod.rs @@ -463,7 +463,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { // cannot add constraints once regions are resolved debug!("origin = {:#?}", origin); - match (*sub, *sup) { + match (sub.kind(), sup.kind()) { (ReBound(..), _) | (_, ReBound(..)) => { span_bug!(origin.span(), "cannot relate bound region: {:?} <= {:?}", sub, sup); } @@ -595,7 +595,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { } pub fn universe(&mut self, region: Region<'tcx>) -> ty::UniverseIndex { - match *region { + match region.kind() { ty::ReStatic | ty::ReErased | ty::ReLateParam(..) @@ -618,9 +618,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { RegionVid::from(value_count)..RegionVid::from(self.storage.unification_table.len()); ( range.clone(), - (range.start.index()..range.end.index()) - .map(|index| self.storage.var_infos[ty::RegionVid::from(index)].origin) - .collect(), + (range.start..range.end).map(|index| self.storage.var_infos[index].origin).collect(), ) } diff --git a/compiler/rustc_infer/src/infer/relate/generalize.rs b/compiler/rustc_infer/src/infer/relate/generalize.rs index e16212955ffee..210b8f37d883d 100644 --- a/compiler/rustc_infer/src/infer/relate/generalize.rs +++ b/compiler/rustc_infer/src/infer/relate/generalize.rs @@ -113,7 +113,7 @@ impl<'tcx> InferCtxt<'tcx> { }]); } // The old solver only accepts projection predicates for associated types. - ty::Alias(ty::Inherent | ty::Weak | ty::Opaque, _) => { + ty::Alias(ty::Inherent | ty::Free | ty::Opaque, _) => { return Err(TypeError::CyclicTy(source_ty)); } _ => bug!("generalized `{source_ty:?} to infer, not an alias"), @@ -571,7 +571,7 @@ impl<'tcx> TypeRelation> for Generalizer<'_, 'tcx> { ) -> RelateResult<'tcx, ty::Region<'tcx>> { assert_eq!(r, r2); // we are misusing TypeRelation here; both LHS and RHS ought to be == - match *r { + match r.kind() { // Never make variables for regions bound within the type itself, // nor for erased regions. ty::ReBound(..) | ty::ReErased => { diff --git a/compiler/rustc_infer/src/infer/relate/mod.rs b/compiler/rustc_infer/src/infer/relate/mod.rs index e6d1003cab61e..6d25dfeb85933 100644 --- a/compiler/rustc_infer/src/infer/relate/mod.rs +++ b/compiler/rustc_infer/src/infer/relate/mod.rs @@ -2,9 +2,8 @@ //! (except for some relations used for diagnostics and heuristics in the compiler). //! As well as the implementation of `Relate` for interned things (`Ty`/`Const`/etc). -pub use rustc_middle::ty::relate::RelateResult; -pub use rustc_type_ir::relate::combine::PredicateEmittingRelation; -pub use rustc_type_ir::relate::*; +pub use rustc_middle::ty::relate::combine::PredicateEmittingRelation; +pub use rustc_middle::ty::relate::{RelateResult, *}; mod generalize; mod higher_ranked; diff --git a/compiler/rustc_infer/src/infer/relate/type_relating.rs b/compiler/rustc_infer/src/infer/relate/type_relating.rs index 009271a8378f3..04ff776594e66 100644 --- a/compiler/rustc_infer/src/infer/relate/type_relating.rs +++ b/compiler/rustc_infer/src/infer/relate/type_relating.rs @@ -3,9 +3,8 @@ use rustc_middle::ty::relate::combine::{super_combine_consts, super_combine_tys} use rustc_middle::ty::relate::{ Relate, RelateResult, TypeRelation, relate_args_invariantly, relate_args_with_variances, }; -use rustc_middle::ty::{self, Ty, TyCtxt, TyVar}; +use rustc_middle::ty::{self, DelayedSet, Ty, TyCtxt, TyVar}; use rustc_span::Span; -use rustc_type_ir::data_structures::DelayedSet; use tracing::{debug, instrument}; use crate::infer::BoundRegionConversionTime::HigherRankedType; diff --git a/compiler/rustc_infer/src/infer/resolve.rs b/compiler/rustc_infer/src/infer/resolve.rs index 4a99c2209755d..4b0ace8c554d6 100644 --- a/compiler/rustc_infer/src/infer/resolve.rs +++ b/compiler/rustc_infer/src/infer/resolve.rs @@ -1,9 +1,8 @@ use rustc_middle::bug; use rustc_middle::ty::{ - self, Const, FallibleTypeFolder, InferConst, Ty, TyCtxt, TypeFoldable, TypeFolder, + self, Const, DelayedMap, FallibleTypeFolder, InferConst, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, }; -use rustc_type_ir::data_structures::DelayedMap; use super::{FixupError, FixupResult, InferCtxt}; @@ -89,7 +88,7 @@ impl<'a, 'tcx> TypeFolder> for OpportunisticRegionResolver<'a, 'tcx } fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { - match *r { + match r.kind() { ty::ReVar(vid) => self .infcx .inner @@ -153,7 +152,7 @@ impl<'a, 'tcx> FallibleTypeFolder> for FullTypeResolver<'a, 'tcx> { } fn try_fold_region(&mut self, r: ty::Region<'tcx>) -> Result, Self::Error> { - match *r { + match r.kind() { ty::ReVar(_) => Ok(self .infcx .lexical_region_resolutions diff --git a/compiler/rustc_infer/src/infer/snapshot/fudge.rs b/compiler/rustc_infer/src/infer/snapshot/fudge.rs index b5d3c26b05e0b..e210479581ba6 100644 --- a/compiler/rustc_infer/src/infer/snapshot/fudge.rs +++ b/compiler/rustc_infer/src/infer/snapshot/fudge.rs @@ -3,9 +3,8 @@ use std::ops::Range; use rustc_data_structures::{snapshot_vec as sv, unify as ut}; use rustc_middle::ty::{ self, ConstVid, FloatVid, IntVid, RegionVid, Ty, TyCtxt, TyVid, TypeFoldable, TypeFolder, - TypeSuperFoldable, + TypeSuperFoldable, TypeVisitableExt, }; -use rustc_type_ir::TypeVisitableExt; use tracing::instrument; use ut::UnifyKey; @@ -30,11 +29,12 @@ fn const_vars_since_snapshot<'tcx>( snapshot_var_len: usize, ) -> (Range, Vec) { let range = vars_since_snapshot(table, snapshot_var_len); + let range = range.start.vid..range.end.vid; ( - range.start.vid..range.end.vid, - (range.start.index()..range.end.index()) - .map(|index| match table.probe_value(ConstVid::from_u32(index)) { + range.clone(), + range + .map(|index| match table.probe_value(index) { ConstVariableValue::Known { value: _ } => { ConstVariableOrigin { param_def_id: None, span: rustc_span::DUMMY_SP } } diff --git a/compiler/rustc_infer/src/infer/snapshot/undo_log.rs b/compiler/rustc_infer/src/infer/snapshot/undo_log.rs index ba7d8f588e68e..b7412d3d6a6da 100644 --- a/compiler/rustc_infer/src/infer/snapshot/undo_log.rs +++ b/compiler/rustc_infer/src/infer/snapshot/undo_log.rs @@ -17,6 +17,7 @@ pub struct Snapshot<'tcx> { /// Records the "undo" data for a single operation that affects some form of inference variable. #[derive(Clone)] pub(crate) enum UndoLog<'tcx> { + DuplicateOpaqueType, OpaqueTypes(OpaqueTypeKey<'tcx>, Option>), TypeVariables(sv::UndoLog>>), ConstUnificationTable(sv::UndoLog>>), @@ -58,6 +59,7 @@ impl_from! { impl<'tcx> Rollback> for InferCtxtInner<'tcx> { fn reverse(&mut self, undo: UndoLog<'tcx>) { match undo { + UndoLog::DuplicateOpaqueType => self.opaque_type_storage.pop_duplicate_entry(), UndoLog::OpaqueTypes(key, idx) => self.opaque_type_storage.remove(key, idx), UndoLog::TypeVariables(undo) => self.type_variable_storage.reverse(undo), UndoLog::ConstUnificationTable(undo) => self.const_unification_storage.reverse(undo), diff --git a/compiler/rustc_infer/src/lib.rs b/compiler/rustc_infer/src/lib.rs index ece18f4ea64ee..8b2aab4204228 100644 --- a/compiler/rustc_infer/src/lib.rs +++ b/compiler/rustc_infer/src/lib.rs @@ -16,12 +16,12 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] #![feature(extend_one)] #![feature(iterator_try_collect)] -#![feature(let_chains)] #![feature(rustdoc_internals)] #![recursion_limit = "512"] // For rustdoc // tidy-alphabetical-end diff --git a/compiler/rustc_infer/src/traits/engine.rs b/compiler/rustc_infer/src/traits/engine.rs index 1eae10673b62b..9e51a53ae95fa 100644 --- a/compiler/rustc_infer/src/traits/engine.rs +++ b/compiler/rustc_infer/src/traits/engine.rs @@ -94,7 +94,7 @@ pub trait TraitEngine<'tcx, E: 'tcx>: 'tcx { /// Among all pending obligations, collect those are stalled on a inference variable which has /// changed since the last call to `select_where_possible`. Those obligations are marked as /// successful and returned. - fn drain_unstalled_obligations( + fn drain_stalled_obligations_for_coroutines( &mut self, infcx: &InferCtxt<'tcx>, ) -> PredicateObligations<'tcx>; diff --git a/compiler/rustc_infer/src/traits/mod.rs b/compiler/rustc_infer/src/traits/mod.rs index b537750f1b51b..6d5ad96e31c90 100644 --- a/compiler/rustc_infer/src/traits/mod.rs +++ b/compiler/rustc_infer/src/traits/mod.rs @@ -12,6 +12,7 @@ use std::hash::{Hash, Hasher}; use hir::def_id::LocalDefId; use rustc_hir as hir; +use rustc_macros::{TypeFoldable, TypeVisitable}; use rustc_middle::traits::query::NoSolution; use rustc_middle::traits::solve::Certainty; pub use rustc_middle::traits::*; @@ -35,9 +36,11 @@ use crate::infer::InferCtxt; /// either identifying an `impl` (e.g., `impl Eq for i32`) that /// satisfies the obligation, or else finding a bound that is in /// scope. The eventual result is usually a `Selection` (defined below). -#[derive(Clone)] +#[derive(Clone, TypeFoldable, TypeVisitable)] pub struct Obligation<'tcx, T> { /// The reason we have to prove this thing. + #[type_foldable(identity)] + #[type_visitable(ignore)] pub cause: ObligationCause<'tcx>, /// The environment in which we should prove this thing. @@ -51,6 +54,8 @@ pub struct Obligation<'tcx, T> { /// If this goes over a certain threshold, we abort compilation -- /// in such cases, we can not say whether or not the predicate /// holds for certain. Stupid halting problem; such a drag. + #[type_foldable(identity)] + #[type_visitable(ignore)] pub recursion_depth: usize, } diff --git a/compiler/rustc_infer/src/traits/structural_impls.rs b/compiler/rustc_infer/src/traits/structural_impls.rs index 4335073d9bc6b..03661ebf7ec53 100644 --- a/compiler/rustc_infer/src/traits/structural_impls.rs +++ b/compiler/rustc_infer/src/traits/structural_impls.rs @@ -1,8 +1,6 @@ use std::fmt; -use rustc_middle::ty::{ - self, FallibleTypeFolder, TyCtxt, TypeFoldable, TypeVisitable, TypeVisitor, try_visit, -}; +use rustc_middle::ty; use crate::traits; use crate::traits::project::Normalized; @@ -34,31 +32,3 @@ impl<'tcx> fmt::Debug for traits::MismatchedProjectionTypes<'tcx> { write!(f, "MismatchedProjectionTypes({:?})", self.err) } } - -/////////////////////////////////////////////////////////////////////////// -// TypeFoldable implementations. - -impl<'tcx, O: TypeFoldable>> TypeFoldable> - for traits::Obligation<'tcx, O> -{ - fn try_fold_with>>( - self, - folder: &mut F, - ) -> Result { - Ok(traits::Obligation { - cause: self.cause, - recursion_depth: self.recursion_depth, - predicate: self.predicate.try_fold_with(folder)?, - param_env: self.param_env.try_fold_with(folder)?, - }) - } -} - -impl<'tcx, O: TypeVisitable>> TypeVisitable> - for traits::Obligation<'tcx, O> -{ - fn visit_with>>(&self, visitor: &mut V) -> V::Result { - try_visit!(self.predicate.visit_with(visitor)); - self.param_env.visit_with(visitor) - } -} diff --git a/compiler/rustc_infer/src/traits/util.rs b/compiler/rustc_infer/src/traits/util.rs index 66ed49fe32676..6461fbe0d33bb 100644 --- a/compiler/rustc_infer/src/traits/util.rs +++ b/compiler/rustc_infer/src/traits/util.rs @@ -1,7 +1,7 @@ use rustc_data_structures::fx::FxHashSet; +pub use rustc_middle::ty::elaborate::*; use rustc_middle::ty::{self, TyCtxt}; use rustc_span::{Ident, Span}; -pub use rustc_type_ir::elaborate::*; use crate::traits::{self, Obligation, ObligationCauseCode, PredicateObligation}; diff --git a/compiler/rustc_interface/Cargo.toml b/compiler/rustc_interface/Cargo.toml index e36739356648a..ff28dbeaee698 100644 --- a/compiler/rustc_interface/Cargo.toml +++ b/compiler/rustc_interface/Cargo.toml @@ -5,7 +5,6 @@ edition = "2024" [dependencies] # tidy-alphabetical-start -rustc-rayon = { version = "0.5.0" } rustc-rayon-core = { version = "0.5.0" } rustc_ast = { path = "../rustc_ast" } rustc_ast_lowering = { path = "../rustc_ast_lowering" } diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs index 3f87b1a547be5..cf494f8d686e8 100644 --- a/compiler/rustc_interface/src/interface.rs +++ b/compiler/rustc_interface/src/interface.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use rustc_ast::{LitKind, MetaItemKind, token}; use rustc_codegen_ssa::traits::CodegenBackend; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_data_structures::jobserver; +use rustc_data_structures::jobserver::{self, Proxy}; use rustc_data_structures::stable_hasher::StableHasher; use rustc_errors::registry::Registry; use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed}; @@ -40,7 +40,12 @@ pub struct Compiler { pub sess: Session, pub codegen_backend: Box, pub(crate) override_queries: Option, + + /// A reference to the current `GlobalCtxt` which we pass on to `GlobalCtxt`. pub(crate) current_gcx: CurrentGcx, + + /// A jobserver reference which we pass on to `GlobalCtxt`. + pub(crate) jobserver_proxy: Arc, } /// Converts strings provided as `--cfg [cfgspec]` into a `Cfg`. @@ -204,6 +209,14 @@ pub(crate) fn parse_check_cfg(dcx: DiagCtxtHandle<'_>, specs: Vec) -> Ch error!("`cfg()` names cannot be after values"); } names.push(ident); + } else if let Some(boolean) = arg.boolean_literal() { + if values_specified { + error!("`cfg()` names cannot be after values"); + } + names.push(rustc_span::Ident::new( + if boolean { rustc_span::kw::True } else { rustc_span::kw::False }, + arg.span(), + )); } else if arg.has_name(sym::any) && let Some(args) = arg.meta_item_list() { @@ -340,6 +353,10 @@ pub struct Config { /// the list of queries. pub override_queries: Option, + /// An extra set of symbols to add to the symbol interner, the symbol indices + /// will start at [`PREDEFINED_SYMBOLS_COUNT`](rustc_span::symbol::PREDEFINED_SYMBOLS_COUNT) + pub extra_symbols: Vec<&'static str>, + /// This is a callback from the driver that is called to create a codegen backend. /// /// Has no uses within this repository, but is used by bjorn3 for "the @@ -401,8 +418,9 @@ pub fn run_compiler(config: Config, f: impl FnOnce(&Compiler) -> R + Se &early_dcx, config.opts.edition, config.opts.unstable_opts.threads, + &config.extra_symbols, SourceMapInputs { file_loader, path_mapping, hash_kind, checksum_hash_kind }, - |current_gcx| { + |current_gcx, jobserver_proxy| { // The previous `early_dcx` can't be reused here because it doesn't // impl `Send`. Creating a new one is fine. let early_dcx = EarlyDiagCtxt::new(config.opts.error_format); @@ -498,6 +516,7 @@ pub fn run_compiler(config: Config, f: impl FnOnce(&Compiler) -> R + Se codegen_backend, override_queries: config.override_queries, current_gcx, + jobserver_proxy, }; // There are two paths out of `f`. diff --git a/compiler/rustc_interface/src/lib.rs b/compiler/rustc_interface/src/lib.rs index 67e0be93523d9..4128070718308 100644 --- a/compiler/rustc_interface/src/lib.rs +++ b/compiler/rustc_interface/src/lib.rs @@ -1,8 +1,8 @@ // tidy-alphabetical-start +#![cfg_attr(bootstrap, feature(let_chains))] #![feature(decl_macro)] #![feature(file_buffered)] #![feature(iter_intersperse)] -#![feature(let_chains)] #![feature(try_blocks)] // tidy-alphabetical-end diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 93013c8b3f612..4993b923b2919 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -7,6 +7,7 @@ use std::{env, fs, iter}; use rustc_ast as ast; use rustc_codegen_ssa::traits::CodegenBackend; +use rustc_data_structures::jobserver::Proxy; use rustc_data_structures::parallel; use rustc_data_structures::steal::Steal; use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, WorkerLocal}; @@ -30,10 +31,11 @@ use rustc_resolve::Resolver; use rustc_session::config::{CrateType, Input, OutFileName, OutputFilenames, OutputType}; use rustc_session::cstore::Untracked; use rustc_session::output::{collect_crate_types, filename_for_input}; +use rustc_session::parse::feature_err; use rustc_session::search_paths::PathKind; use rustc_session::{Limit, Session}; use rustc_span::{ - ErrorGuaranteed, FileName, SourceFileHash, SourceFileHashAlgorithm, Span, Symbol, sym, + DUMMY_SP, ErrorGuaranteed, FileName, SourceFileHash, SourceFileHashAlgorithm, Span, Symbol, sym, }; use rustc_target::spec::PanicStrategy; use rustc_trait_selection::traits; @@ -236,6 +238,7 @@ fn configure_and_expand( sess, features, &krate, + tcx.is_sdylib_interface_build(), resolver.lint_buffer(), ) }); @@ -252,6 +255,9 @@ fn configure_and_expand( sess.dcx().emit_err(errors::MixedProcMacroCrate); } } + if crate_types.contains(&CrateType::Sdylib) && !tcx.features().export_stable() { + feature_err(sess, sym::export_stable, DUMMY_SP, "`sdylib` crate type is unstable").emit(); + } if is_proc_macro_crate && sess.panic_strategy() == PanicStrategy::Abort { sess.dcx().emit_warn(errors::ProcMacroCratePanicAbort); @@ -741,6 +747,25 @@ pub fn write_dep_info(tcx: TyCtxt<'_>) { } } +pub fn write_interface<'tcx>(tcx: TyCtxt<'tcx>) { + if !tcx.crate_types().contains(&rustc_session::config::CrateType::Sdylib) { + return; + } + let _timer = tcx.sess.timer("write_interface"); + let (_, krate) = &*tcx.resolver_for_lowering().borrow(); + + let krate = rustc_ast_pretty::pprust::print_crate_as_interface( + krate, + tcx.sess.psess.edition, + &tcx.sess.psess.attr_id_generator, + ); + let export_output = tcx.output_filenames(()).interface_path(); + let mut file = fs::File::create_buffered(export_output).unwrap(); + if let Err(err) = write!(file, "{}", krate) { + tcx.dcx().fatal(format!("error writing interface file: {}", err)); + } +} + pub static DEFAULT_QUERY_PROVIDERS: LazyLock = LazyLock::new(|| { let providers = &mut Providers::default(); providers.analysis = analysis; @@ -800,6 +825,7 @@ pub fn create_and_enter_global_ctxt FnOnce(TyCtxt<'tcx>) -> T>( sess.opts.cg.metadata.clone(), sess.cfg_version, ); + let outputs = util::build_output_filenames(&pre_configured_attrs, sess); let dep_type = DepsType { dep_names: rustc_query_impl::dep_kind_names() }; @@ -840,12 +866,13 @@ pub fn create_and_enter_global_ctxt FnOnce(TyCtxt<'tcx>) -> T>( dyn for<'tcx> FnOnce( &'tcx Session, CurrentGcx, + Arc, &'tcx OnceLock>, &'tcx WorkerLocal>, &'tcx WorkerLocal>, F, ) -> T, - > = Box::new(move |sess, current_gcx, gcx_cell, arena, hir_arena, f| { + > = Box::new(move |sess, current_gcx, jobserver_proxy, gcx_cell, arena, hir_arena, f| { TyCtxt::create_global_ctxt( gcx_cell, sess, @@ -864,6 +891,7 @@ pub fn create_and_enter_global_ctxt FnOnce(TyCtxt<'tcx>) -> T>( ), providers.hooks, current_gcx, + jobserver_proxy, |tcx| { let feed = tcx.create_crate_num(stable_crate_id).unwrap(); assert_eq!(feed.key(), LOCAL_CRATE); @@ -886,7 +914,15 @@ pub fn create_and_enter_global_ctxt FnOnce(TyCtxt<'tcx>) -> T>( ) }); - inner(&compiler.sess, compiler.current_gcx.clone(), &gcx_cell, &arena, &hir_arena, f) + inner( + &compiler.sess, + compiler.current_gcx.clone(), + Arc::clone(&compiler.jobserver_proxy), + &gcx_cell, + &arena, + &hir_arena, + f, + ) } /// Runs all analyses that we guarantee to run, even if errors were reported in earlier analyses. @@ -899,11 +935,20 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { // is not defined. So we need to cfg it out. #[cfg(all(not(doc), debug_assertions))] rustc_passes::hir_id_validator::check_crate(tcx); + + // Prefetch this to prevent multiple threads from blocking on it later. + // This is needed since the `hir_id_validator::check_crate` call above is not guaranteed + // to use `hir_crate`. + tcx.ensure_done().hir_crate(()); + let sess = tcx.sess; sess.time("misc_checking_1", || { parallel!( { sess.time("looking_for_entry_point", || tcx.ensure_ok().entry_fn(())); + sess.time("check_externally_implementable_items", || { + tcx.ensure_ok().get_externally_implementable_item_impls(()) + }); sess.time("looking_for_derive_registrar", || { tcx.ensure_ok().proc_macro_decls_static(()) @@ -912,6 +957,8 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { CStore::from_tcx(tcx).report_unused_deps(tcx); }, { + tcx.ensure_ok().exportable_items(LOCAL_CRATE); + tcx.ensure_ok().stable_order_of_exportable_impls(LOCAL_CRATE); tcx.par_hir_for_each_module(|module| { tcx.ensure_ok().check_mod_loops(module); tcx.ensure_ok().check_mod_attrs(module); @@ -955,7 +1002,9 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { // Run unsafety check because it's responsible for stealing and // deallocating THIR. tcx.ensure_ok().check_unsafety(def_id); - tcx.ensure_ok().mir_borrowck(def_id) + if !tcx.is_typeck_child(def_id.to_def_id()) { + tcx.ensure_ok().mir_borrowck(def_id) + } }); }); sess.time("MIR_effect_checking", || { @@ -976,14 +1025,16 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { tcx.par_hir_body_owners(|def_id| { if tcx.is_coroutine(def_id.to_def_id()) { tcx.ensure_ok().mir_coroutine_witnesses(def_id); - tcx.ensure_ok().check_coroutine_obligations( + let _ = tcx.ensure_ok().check_coroutine_obligations( tcx.typeck_root_def_id(def_id.to_def_id()).expect_local(), ); - // Eagerly check the unsubstituted layout for cycles. - tcx.ensure_ok().layout_of( - ty::TypingEnv::post_analysis(tcx, def_id.to_def_id()) - .as_query_input(tcx.type_of(def_id).instantiate_identity()), - ); + if !tcx.is_async_drop_in_place_coroutine(def_id.to_def_id()) { + // Eagerly check the unsubstituted layout for cycles. + tcx.ensure_ok().layout_of( + ty::TypingEnv::post_analysis(tcx, def_id.to_def_id()) + .as_query_input(tcx.type_of(def_id).instantiate_identity()), + ); + } } }); }); diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index b44be1710edf7..0ceda2201344e 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -1,7 +1,7 @@ #![allow(rustc::bad_opt_access)] -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeMap; use std::num::NonZero; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::sync::atomic::AtomicBool; use rustc_abi::Align; @@ -53,7 +53,7 @@ where checksum_hash_kind, }); - rustc_span::create_session_globals_then(DEFAULT_EDITION, sm_inputs, || { + rustc_span::create_session_globals_then(DEFAULT_EDITION, &[], sm_inputs, || { let temps_dir = sessopts.unstable_opts.temps_dir.as_deref().map(PathBuf::from); let io = CompilerIO { input: Input::Str { name: FileName::Custom(String::new()), input: String::new() }, @@ -89,8 +89,8 @@ where S: Into, I: IntoIterator, { - let locations: BTreeSet = - locations.into_iter().map(|s| CanonicalizedPath::new(Path::new(&s.into()))).collect(); + let locations = + locations.into_iter().map(|s| CanonicalizedPath::new(PathBuf::from(s.into()))).collect(); ExternEntry { location: ExternLocation::ExactPaths(locations), @@ -614,6 +614,7 @@ fn test_codegen_options_tracking_hash() { tracked!(control_flow_guard, CFGuard::Checks); tracked!(debug_assertions, Some(true)); tracked!(debuginfo, DebugInfo::Limited); + tracked!(dwarf_version, Some(5)); tracked!(embed_bitcode, false); tracked!(force_frame_pointers, FramePointer::Always); tracked!(force_unwind_tables, Some(true)); @@ -787,6 +788,7 @@ fn test_unstable_options_tracking_hash() { tracked!(direct_access_external_data, Some(true)); tracked!(dual_proc_macros, true); tracked!(dwarf_version, Some(5)); + tracked!(embed_metadata, false); tracked!(embed_source, true); tracked!(emit_thin_lto, false); tracked!(emscripten_wasm_eh, true); @@ -816,8 +818,8 @@ fn test_unstable_options_tracking_hash() { tracked!(min_function_alignment, Some(Align::EIGHT)); tracked!(mir_emit_retag, true); tracked!(mir_enable_passes, vec![("DestProp".to_string(), false)]); - tracked!(mir_keep_place_mention, true); tracked!(mir_opt_level, Some(4)); + tracked!(mir_preserve_ub, true); tracked!(move_size_limit, Some(4096)); tracked!(mutable_noalias, false); tracked!(next_solver, NextSolverConfig { coherence: true, globally: true }); @@ -852,6 +854,7 @@ fn test_unstable_options_tracking_hash() { tracked!(sanitizer_cfi_generalize_pointers, Some(true)); tracked!(sanitizer_cfi_normalize_integers, Some(true)); tracked!(sanitizer_dataflow_abilist, vec![String::from("/rustc/abc")]); + tracked!(sanitizer_kcfi_arity, Some(true)); tracked!(sanitizer_memory_track_origins, 2); tracked!(sanitizer_recover, SanitizerSet::ADDRESS); tracked!(saturating_float_casts, Some(true)); diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 83d80938b4e30..087b11fdf9d9c 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -1,11 +1,12 @@ use std::env::consts::{DLL_PREFIX, DLL_SUFFIX}; use std::path::{Path, PathBuf}; -use std::sync::OnceLock; use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::{Arc, OnceLock}; use std::{env, iter, thread}; use rustc_ast as ast; use rustc_codegen_ssa::traits::CodegenBackend; +use rustc_data_structures::jobserver::Proxy; use rustc_data_structures::sync; use rustc_metadata::{DylibError, load_symbol_from_dylib}; use rustc_middle::ty::CurrentGcx; @@ -38,14 +39,25 @@ pub(crate) fn add_configuration( codegen_backend: &dyn CodegenBackend, ) { let tf = sym::target_feature; + let tf_cfg = codegen_backend.target_config(sess); - let (target_features, unstable_target_features) = codegen_backend.target_features_cfg(sess); + sess.unstable_target_features.extend(tf_cfg.unstable_target_features.iter().copied()); + sess.target_features.extend(tf_cfg.target_features.iter().copied()); - sess.unstable_target_features.extend(unstable_target_features.iter().copied()); + cfg.extend(tf_cfg.target_features.into_iter().map(|feat| (tf, Some(feat)))); - sess.target_features.extend(target_features.iter().copied()); - - cfg.extend(target_features.into_iter().map(|feat| (tf, Some(feat)))); + if tf_cfg.has_reliable_f16 { + cfg.insert((sym::target_has_reliable_f16, None)); + } + if tf_cfg.has_reliable_f16_math { + cfg.insert((sym::target_has_reliable_f16_math, None)); + } + if tf_cfg.has_reliable_f128 { + cfg.insert((sym::target_has_reliable_f128, None)); + } + if tf_cfg.has_reliable_f128_math { + cfg.insert((sym::target_has_reliable_f128_math, None)); + } if sess.crt_static(None) { cfg.insert((tf, Some(sym::crt_dash_static))); @@ -113,10 +125,11 @@ fn init_stack_size(early_dcx: &EarlyDiagCtxt) -> usize { }) } -fn run_in_thread_with_globals R + Send, R: Send>( +fn run_in_thread_with_globals) -> R + Send, R: Send>( thread_stack_size: usize, edition: Edition, sm_inputs: SourceMapInputs, + extra_symbols: &[&'static str], f: F, ) -> R { // The "thread pool" is a single spawned thread in the non-parallel @@ -134,9 +147,12 @@ fn run_in_thread_with_globals R + Send, R: Send>( // name contains null bytes. let r = builder .spawn_scoped(s, move || { - rustc_span::create_session_globals_then(edition, Some(sm_inputs), || { - f(CurrentGcx::new()) - }) + rustc_span::create_session_globals_then( + edition, + extra_symbols, + Some(sm_inputs), + || f(CurrentGcx::new(), Proxy::new()), + ) }) .unwrap() .join(); @@ -148,17 +164,21 @@ fn run_in_thread_with_globals R + Send, R: Send>( }) } -pub(crate) fn run_in_thread_pool_with_globals R + Send, R: Send>( +pub(crate) fn run_in_thread_pool_with_globals< + F: FnOnce(CurrentGcx, Arc) -> R + Send, + R: Send, +>( thread_builder_diag: &EarlyDiagCtxt, edition: Edition, threads: usize, + extra_symbols: &[&'static str], sm_inputs: SourceMapInputs, f: F, ) -> R { use std::process; + use rustc_data_structures::defer; use rustc_data_structures::sync::FromDyn; - use rustc_data_structures::{defer, jobserver}; use rustc_middle::ty::tls; use rustc_query_impl::QueryCtxt; use rustc_query_system::query::{QueryContext, break_query_cycles}; @@ -168,21 +188,31 @@ pub(crate) fn run_in_thread_pool_with_globals R + Send, let registry = sync::Registry::new(std::num::NonZero::new(threads).unwrap()); if !sync::is_dyn_thread_safe() { - return run_in_thread_with_globals(thread_stack_size, edition, sm_inputs, |current_gcx| { - // Register the thread for use with the `WorkerLocal` type. - registry.register(); - - f(current_gcx) - }); + return run_in_thread_with_globals( + thread_stack_size, + edition, + sm_inputs, + extra_symbols, + |current_gcx, jobserver_proxy| { + // Register the thread for use with the `WorkerLocal` type. + registry.register(); + + f(current_gcx, jobserver_proxy) + }, + ); } let current_gcx = FromDyn::from(CurrentGcx::new()); let current_gcx2 = current_gcx.clone(); - let builder = rayon::ThreadPoolBuilder::new() + let proxy = Proxy::new(); + + let proxy_ = Arc::clone(&proxy); + let proxy__ = Arc::clone(&proxy); + let builder = rayon_core::ThreadPoolBuilder::new() .thread_name(|_| "rustc".to_string()) - .acquire_thread_handler(jobserver::acquire_thread) - .release_thread_handler(jobserver::release_thread) + .acquire_thread_handler(move || proxy_.acquire_thread()) + .release_thread_handler(move || proxy__.release_thread()) .num_threads(threads) .deadlock_handler(move || { // On deadlock, creates a new thread and forwards information in thread @@ -230,13 +260,13 @@ pub(crate) fn run_in_thread_pool_with_globals R + Send, // pool. Upon creation, each worker thread created gets a copy of the // session globals in TLS. This is possible because `SessionGlobals` impls // `Send` in the parallel compiler. - rustc_span::create_session_globals_then(edition, Some(sm_inputs), || { + rustc_span::create_session_globals_then(edition, extra_symbols, Some(sm_inputs), || { rustc_span::with_session_globals(|session_globals| { let session_globals = FromDyn::from(session_globals); builder .build_scoped( // Initialize each new worker thread when created. - move |thread: rayon::ThreadBuilder| { + move |thread: rayon_core::ThreadBuilder| { // Register the thread for use with the `WorkerLocal` type. registry.register(); @@ -245,7 +275,9 @@ pub(crate) fn run_in_thread_pool_with_globals R + Send, }) }, // Run `f` on the first thread in the thread pool. - move |pool: &rayon::ThreadPool| pool.install(|| f(current_gcx.into_inner())), + move |pool: &rayon_core::ThreadPool| { + pool.install(|| f(current_gcx.into_inner(), proxy)) + }, ) .unwrap() }) diff --git a/compiler/rustc_lexer/src/cursor.rs b/compiler/rustc_lexer/src/cursor.rs index e0e3bd0e30b16..526693d3de1f0 100644 --- a/compiler/rustc_lexer/src/cursor.rs +++ b/compiler/rustc_lexer/src/cursor.rs @@ -1,5 +1,10 @@ use std::str::Chars; +pub enum FrontmatterAllowed { + Yes, + No, +} + /// Peekable iterator over a char sequence. /// /// Next characters can be peeked via `first` method, @@ -8,6 +13,7 @@ pub struct Cursor<'a> { len_remaining: usize, /// Iterator over chars. Slightly faster than a &str. chars: Chars<'a>, + pub(crate) frontmatter_allowed: FrontmatterAllowed, #[cfg(debug_assertions)] prev: char, } @@ -15,10 +21,11 @@ pub struct Cursor<'a> { pub(crate) const EOF_CHAR: char = '\0'; impl<'a> Cursor<'a> { - pub fn new(input: &'a str) -> Cursor<'a> { + pub fn new(input: &'a str, frontmatter_allowed: FrontmatterAllowed) -> Cursor<'a> { Cursor { len_remaining: input.len(), chars: input.chars(), + frontmatter_allowed, #[cfg(debug_assertions)] prev: EOF_CHAR, } @@ -95,6 +102,11 @@ impl<'a> Cursor<'a> { Some(c) } + /// Moves to a substring by a number of bytes. + pub(crate) fn bump_bytes(&mut self, n: usize) { + self.chars = self.as_str()[n..].chars(); + } + /// Eats symbols while predicate returns true or until the end of file is reached. pub(crate) fn eat_while(&mut self, mut predicate: impl FnMut(char) -> bool) { // It was tried making optimized version of this for eg. line comments, but diff --git a/compiler/rustc_lexer/src/lib.rs b/compiler/rustc_lexer/src/lib.rs index 61638e45253fd..2374f38825099 100644 --- a/compiler/rustc_lexer/src/lib.rs +++ b/compiler/rustc_lexer/src/lib.rs @@ -26,7 +26,6 @@ // tidy-alphabetical-end mod cursor; -pub mod unescape; #[cfg(test)] mod tests; @@ -36,8 +35,8 @@ pub use unicode_xid::UNICODE_VERSION as UNICODE_XID_VERSION; use self::LiteralKind::*; use self::TokenKind::*; -pub use crate::cursor::Cursor; use crate::cursor::EOF_CHAR; +pub use crate::cursor::{Cursor, FrontmatterAllowed}; /// Parsed token. /// It doesn't contain information about data that has been parsed, @@ -58,17 +57,27 @@ impl Token { #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum TokenKind { /// A line comment, e.g. `// comment`. - LineComment { doc_style: Option }, + LineComment { + doc_style: Option, + }, /// A block comment, e.g. `/* block comment */`. /// /// Block comments can be recursive, so a sequence like `/* /* */` /// will not be considered terminated and will result in a parsing error. - BlockComment { doc_style: Option, terminated: bool }, + BlockComment { + doc_style: Option, + terminated: bool, + }, /// Any whitespace character sequence. Whitespace, + Frontmatter { + has_invalid_preceding_whitespace: bool, + invalid_infostring: bool, + }, + /// An identifier or keyword, e.g. `ident` or `continue`. Ident, @@ -110,10 +119,15 @@ pub enum TokenKind { /// this type will need to check for and reject that case. /// /// See [LiteralKind] for more details. - Literal { kind: LiteralKind, suffix_start: u32 }, + Literal { + kind: LiteralKind, + suffix_start: u32, + }, /// A lifetime, e.g. `'a`. - Lifetime { starts_with_number: bool }, + Lifetime { + starts_with_number: bool, + }, /// `;` Semi, @@ -281,7 +295,7 @@ pub fn strip_shebang(input: &str) -> Option { #[inline] pub fn validate_raw_str(input: &str, prefix_len: u32) -> Result<(), RawStrError> { debug_assert!(!input.is_empty()); - let mut cursor = Cursor::new(input); + let mut cursor = Cursor::new(input, FrontmatterAllowed::No); // Move past the leading `r` or `br`. for _ in 0..prefix_len { cursor.bump().unwrap(); @@ -291,7 +305,7 @@ pub fn validate_raw_str(input: &str, prefix_len: u32) -> Result<(), RawStrError> /// Creates an iterator that produces tokens from the input string. pub fn tokenize(input: &str) -> impl Iterator { - let mut cursor = Cursor::new(input); + let mut cursor = Cursor::new(input, FrontmatterAllowed::No); std::iter::from_fn(move || { let token = cursor.advance_token(); if token.kind != TokenKind::Eof { Some(token) } else { None } @@ -362,7 +376,34 @@ impl Cursor<'_> { Some(c) => c, None => return Token::new(TokenKind::Eof, 0), }; + let token_kind = match first_char { + c if matches!(self.frontmatter_allowed, FrontmatterAllowed::Yes) + && is_whitespace(c) => + { + let mut last = first_char; + while is_whitespace(self.first()) { + let Some(c) = self.bump() else { + break; + }; + last = c; + } + // invalid frontmatter opening as whitespace preceding it isn't newline. + // combine the whitespace and the frontmatter to a single token as we shall + // error later. + if last != '\n' && self.as_str().starts_with("---") { + self.bump(); + self.frontmatter(true) + } else { + Whitespace + } + } + '-' if matches!(self.frontmatter_allowed, FrontmatterAllowed::Yes) + && self.as_str().starts_with("--") => + { + // happy path + self.frontmatter(false) + } // Slash, comment or block comment. '/' => match self.first() { '/' => self.line_comment(), @@ -465,11 +506,110 @@ impl Cursor<'_> { c if !c.is_ascii() && c.is_emoji_char() => self.invalid_ident(), _ => Unknown, }; + if matches!(self.frontmatter_allowed, FrontmatterAllowed::Yes) + && !matches!(token_kind, Whitespace) + { + // stop allowing frontmatters after first non-whitespace token + self.frontmatter_allowed = FrontmatterAllowed::No; + } let res = Token::new(token_kind, self.pos_within_token()); self.reset_pos_within_token(); res } + /// Given that one `-` was eaten, eat the rest of the frontmatter. + fn frontmatter(&mut self, has_invalid_preceding_whitespace: bool) -> TokenKind { + debug_assert_eq!('-', self.prev()); + + let pos = self.pos_within_token(); + self.eat_while(|c| c == '-'); + + // one `-` is eaten by the caller. + let length_opening = self.pos_within_token() - pos + 1; + + // must be ensured by the caller + debug_assert!(length_opening >= 3); + + // whitespace between the opening and the infostring. + self.eat_while(|ch| ch != '\n' && is_whitespace(ch)); + + // copied from `eat_identifier`, but allows `.` in infostring to allow something like + // `---Cargo.toml` as a valid opener + if is_id_start(self.first()) { + self.bump(); + self.eat_while(|c| is_id_continue(c) || c == '.'); + } + + self.eat_while(|ch| ch != '\n' && is_whitespace(ch)); + let invalid_infostring = self.first() != '\n'; + + let mut s = self.as_str(); + let mut found = false; + while let Some(closing) = s.find(&"-".repeat(length_opening as usize)) { + let preceding_chars_start = s[..closing].rfind("\n").map_or(0, |i| i + 1); + if s[preceding_chars_start..closing].chars().all(is_whitespace) { + // candidate found + self.bump_bytes(closing); + // in case like + // ---cargo + // --- blahblah + // or + // ---cargo + // ---- + // combine those stuff into this frontmatter token such that it gets detected later. + self.eat_until(b'\n'); + found = true; + break; + } else { + s = &s[closing + length_opening as usize..]; + } + } + + if !found { + // recovery strategy: a closing statement might have precending whitespace/newline + // but not have enough dashes to properly close. In this case, we eat until there, + // and report a mismatch in the parser. + let mut rest = self.as_str(); + // We can look for a shorter closing (starting with four dashes but closing with three) + // and other indications that Rust has started and the infostring has ended. + let mut potential_closing = rest + .find("\n---") + // n.b. only in the case where there are dashes, we move the index to the line where + // the dashes start as we eat to include that line. For other cases those are Rust code + // and not included in the frontmatter. + .map(|x| x + 1) + .or_else(|| rest.find("\nuse ")) + .or_else(|| rest.find("\n//!")) + .or_else(|| rest.find("\n#![")); + + if potential_closing.is_none() { + // a less fortunate recovery if all else fails which finds any dashes preceded by whitespace + // on a standalone line. Might be wrong. + while let Some(closing) = rest.find("---") { + let preceding_chars_start = rest[..closing].rfind("\n").map_or(0, |i| i + 1); + if rest[preceding_chars_start..closing].chars().all(is_whitespace) { + // candidate found + potential_closing = Some(closing); + break; + } else { + rest = &rest[closing + 3..]; + } + } + } + + if let Some(potential_closing) = potential_closing { + // bump to the potential closing, and eat everything on that line. + self.bump_bytes(potential_closing); + self.eat_until(b'\n'); + } else { + // eat everything. this will get reported as an unclosed frontmatter. + self.eat_while(|_| true); + } + } + + Frontmatter { has_invalid_preceding_whitespace, invalid_infostring } + } + fn line_comment(&mut self) -> TokenKind { debug_assert!(self.prev() == '/' && self.first() == '/'); self.bump(); diff --git a/compiler/rustc_lexer/src/tests.rs b/compiler/rustc_lexer/src/tests.rs index 8203ae70b0700..fc8d9b9d57bc4 100644 --- a/compiler/rustc_lexer/src/tests.rs +++ b/compiler/rustc_lexer/src/tests.rs @@ -4,7 +4,7 @@ use super::*; fn check_raw_str(s: &str, expected: Result) { let s = &format!("r{}", s); - let mut cursor = Cursor::new(s); + let mut cursor = Cursor::new(s, FrontmatterAllowed::No); cursor.bump(); let res = cursor.raw_double_quoted_string(0); assert_eq!(res, expected); diff --git a/compiler/rustc_lexer/src/unescape.rs b/compiler/rustc_lexer/src/unescape.rs deleted file mode 100644 index d6ea4249247f3..0000000000000 --- a/compiler/rustc_lexer/src/unescape.rs +++ /dev/null @@ -1,438 +0,0 @@ -//! Utilities for validating string and char literals and turning them into -//! values they represent. - -use std::ops::Range; -use std::str::Chars; - -use Mode::*; - -#[cfg(test)] -mod tests; - -/// Errors and warnings that can occur during string unescaping. They mostly -/// relate to malformed escape sequences, but there are a few that are about -/// other problems. -#[derive(Debug, PartialEq, Eq)] -pub enum EscapeError { - /// Expected 1 char, but 0 were found. - ZeroChars, - /// Expected 1 char, but more than 1 were found. - MoreThanOneChar, - - /// Escaped '\' character without continuation. - LoneSlash, - /// Invalid escape character (e.g. '\z'). - InvalidEscape, - /// Raw '\r' encountered. - BareCarriageReturn, - /// Raw '\r' encountered in raw string. - BareCarriageReturnInRawString, - /// Unescaped character that was expected to be escaped (e.g. raw '\t'). - EscapeOnlyChar, - - /// Numeric character escape is too short (e.g. '\x1'). - TooShortHexEscape, - /// Invalid character in numeric escape (e.g. '\xz') - InvalidCharInHexEscape, - /// Character code in numeric escape is non-ascii (e.g. '\xFF'). - OutOfRangeHexEscape, - - /// '\u' not followed by '{'. - NoBraceInUnicodeEscape, - /// Non-hexadecimal value in '\u{..}'. - InvalidCharInUnicodeEscape, - /// '\u{}' - EmptyUnicodeEscape, - /// No closing brace in '\u{..}', e.g. '\u{12'. - UnclosedUnicodeEscape, - /// '\u{_12}' - LeadingUnderscoreUnicodeEscape, - /// More than 6 characters in '\u{..}', e.g. '\u{10FFFF_FF}' - OverlongUnicodeEscape, - /// Invalid in-bound unicode character code, e.g. '\u{DFFF}'. - LoneSurrogateUnicodeEscape, - /// Out of bounds unicode character code, e.g. '\u{FFFFFF}'. - OutOfRangeUnicodeEscape, - - /// Unicode escape code in byte literal. - UnicodeEscapeInByte, - /// Non-ascii character in byte literal, byte string literal, or raw byte string literal. - NonAsciiCharInByte, - - // `\0` in a C string literal. - NulInCStr, - - /// After a line ending with '\', the next line contains whitespace - /// characters that are not skipped. - UnskippedWhitespaceWarning, - - /// After a line ending with '\', multiple lines are skipped. - MultipleSkippedLinesWarning, -} - -impl EscapeError { - /// Returns true for actual errors, as opposed to warnings. - pub fn is_fatal(&self) -> bool { - !matches!( - self, - EscapeError::UnskippedWhitespaceWarning | EscapeError::MultipleSkippedLinesWarning - ) - } -} - -/// Takes the contents of a unicode-only (non-mixed-utf8) literal (without -/// quotes) and produces a sequence of escaped characters or errors. -/// -/// Values are returned by invoking `callback`. For `Char` and `Byte` modes, -/// the callback will be called exactly once. -pub fn unescape_unicode(src: &str, mode: Mode, callback: &mut F) -where - F: FnMut(Range, Result), -{ - match mode { - Char | Byte => { - let mut chars = src.chars(); - let res = unescape_char_or_byte(&mut chars, mode); - callback(0..(src.len() - chars.as_str().len()), res); - } - Str | ByteStr => unescape_non_raw_common(src, mode, callback), - RawStr | RawByteStr => check_raw_common(src, mode, callback), - RawCStr => check_raw_common(src, mode, &mut |r, mut result| { - if let Ok('\0') = result { - result = Err(EscapeError::NulInCStr); - } - callback(r, result) - }), - CStr => unreachable!(), - } -} - -/// Used for mixed utf8 string literals, i.e. those that allow both unicode -/// chars and high bytes. -pub enum MixedUnit { - /// Used for ASCII chars (written directly or via `\x00`..`\x7f` escapes) - /// and Unicode chars (written directly or via `\u` escapes). - /// - /// For example, if '¥' appears in a string it is represented here as - /// `MixedUnit::Char('¥')`, and it will be appended to the relevant byte - /// string as the two-byte UTF-8 sequence `[0xc2, 0xa5]` - Char(char), - - /// Used for high bytes (`\x80`..`\xff`). - /// - /// For example, if `\xa5` appears in a string it is represented here as - /// `MixedUnit::HighByte(0xa5)`, and it will be appended to the relevant - /// byte string as the single byte `0xa5`. - HighByte(u8), -} - -impl From for MixedUnit { - fn from(c: char) -> Self { - MixedUnit::Char(c) - } -} - -impl From for MixedUnit { - fn from(n: u8) -> Self { - if n.is_ascii() { MixedUnit::Char(n as char) } else { MixedUnit::HighByte(n) } - } -} - -/// Takes the contents of a mixed-utf8 literal (without quotes) and produces -/// a sequence of escaped characters or errors. -/// -/// Values are returned by invoking `callback`. -pub fn unescape_mixed(src: &str, mode: Mode, callback: &mut F) -where - F: FnMut(Range, Result), -{ - match mode { - CStr => unescape_non_raw_common(src, mode, &mut |r, mut result| { - if let Ok(MixedUnit::Char('\0')) = result { - result = Err(EscapeError::NulInCStr); - } - callback(r, result) - }), - Char | Byte | Str | RawStr | ByteStr | RawByteStr | RawCStr => unreachable!(), - } -} - -/// Takes a contents of a char literal (without quotes), and returns an -/// unescaped char or an error. -pub fn unescape_char(src: &str) -> Result { - unescape_char_or_byte(&mut src.chars(), Char) -} - -/// Takes a contents of a byte literal (without quotes), and returns an -/// unescaped byte or an error. -pub fn unescape_byte(src: &str) -> Result { - unescape_char_or_byte(&mut src.chars(), Byte).map(byte_from_char) -} - -/// What kind of literal do we parse. -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum Mode { - Char, - - Byte, - - Str, - RawStr, - - ByteStr, - RawByteStr, - - CStr, - RawCStr, -} - -impl Mode { - pub fn in_double_quotes(self) -> bool { - match self { - Str | RawStr | ByteStr | RawByteStr | CStr | RawCStr => true, - Char | Byte => false, - } - } - - /// Are `\x80`..`\xff` allowed? - fn allow_high_bytes(self) -> bool { - match self { - Char | Str => false, - Byte | ByteStr | CStr => true, - RawStr | RawByteStr | RawCStr => unreachable!(), - } - } - - /// Are unicode (non-ASCII) chars allowed? - #[inline] - fn allow_unicode_chars(self) -> bool { - match self { - Byte | ByteStr | RawByteStr => false, - Char | Str | RawStr | CStr | RawCStr => true, - } - } - - /// Are unicode escapes (`\u`) allowed? - fn allow_unicode_escapes(self) -> bool { - match self { - Byte | ByteStr => false, - Char | Str | CStr => true, - RawByteStr | RawStr | RawCStr => unreachable!(), - } - } - - pub fn prefix_noraw(self) -> &'static str { - match self { - Char | Str | RawStr => "", - Byte | ByteStr | RawByteStr => "b", - CStr | RawCStr => "c", - } - } -} - -fn scan_escape + From>( - chars: &mut Chars<'_>, - mode: Mode, -) -> Result { - // Previous character was '\\', unescape what follows. - let res: char = match chars.next().ok_or(EscapeError::LoneSlash)? { - '"' => '"', - 'n' => '\n', - 'r' => '\r', - 't' => '\t', - '\\' => '\\', - '\'' => '\'', - '0' => '\0', - 'x' => { - // Parse hexadecimal character code. - - let hi = chars.next().ok_or(EscapeError::TooShortHexEscape)?; - let hi = hi.to_digit(16).ok_or(EscapeError::InvalidCharInHexEscape)?; - - let lo = chars.next().ok_or(EscapeError::TooShortHexEscape)?; - let lo = lo.to_digit(16).ok_or(EscapeError::InvalidCharInHexEscape)?; - - let value = (hi * 16 + lo) as u8; - - return if !mode.allow_high_bytes() && !value.is_ascii() { - Err(EscapeError::OutOfRangeHexEscape) - } else { - // This may be a high byte, but that will only happen if `T` is - // `MixedUnit`, because of the `allow_high_bytes` check above. - Ok(T::from(value)) - }; - } - 'u' => return scan_unicode(chars, mode.allow_unicode_escapes()).map(T::from), - _ => return Err(EscapeError::InvalidEscape), - }; - Ok(T::from(res)) -} - -fn scan_unicode(chars: &mut Chars<'_>, allow_unicode_escapes: bool) -> Result { - // We've parsed '\u', now we have to parse '{..}'. - - if chars.next() != Some('{') { - return Err(EscapeError::NoBraceInUnicodeEscape); - } - - // First character must be a hexadecimal digit. - let mut n_digits = 1; - let mut value: u32 = match chars.next().ok_or(EscapeError::UnclosedUnicodeEscape)? { - '_' => return Err(EscapeError::LeadingUnderscoreUnicodeEscape), - '}' => return Err(EscapeError::EmptyUnicodeEscape), - c => c.to_digit(16).ok_or(EscapeError::InvalidCharInUnicodeEscape)?, - }; - - // First character is valid, now parse the rest of the number - // and closing brace. - loop { - match chars.next() { - None => return Err(EscapeError::UnclosedUnicodeEscape), - Some('_') => continue, - Some('}') => { - if n_digits > 6 { - return Err(EscapeError::OverlongUnicodeEscape); - } - - // Incorrect syntax has higher priority for error reporting - // than unallowed value for a literal. - if !allow_unicode_escapes { - return Err(EscapeError::UnicodeEscapeInByte); - } - - break std::char::from_u32(value).ok_or({ - if value > 0x10FFFF { - EscapeError::OutOfRangeUnicodeEscape - } else { - EscapeError::LoneSurrogateUnicodeEscape - } - }); - } - Some(c) => { - let digit: u32 = c.to_digit(16).ok_or(EscapeError::InvalidCharInUnicodeEscape)?; - n_digits += 1; - if n_digits > 6 { - // Stop updating value since we're sure that it's incorrect already. - continue; - } - value = value * 16 + digit; - } - }; - } -} - -#[inline] -fn ascii_check(c: char, allow_unicode_chars: bool) -> Result { - if allow_unicode_chars || c.is_ascii() { Ok(c) } else { Err(EscapeError::NonAsciiCharInByte) } -} - -fn unescape_char_or_byte(chars: &mut Chars<'_>, mode: Mode) -> Result { - let c = chars.next().ok_or(EscapeError::ZeroChars)?; - let res = match c { - '\\' => scan_escape(chars, mode), - '\n' | '\t' | '\'' => Err(EscapeError::EscapeOnlyChar), - '\r' => Err(EscapeError::BareCarriageReturn), - _ => ascii_check(c, mode.allow_unicode_chars()), - }?; - if chars.next().is_some() { - return Err(EscapeError::MoreThanOneChar); - } - Ok(res) -} - -/// Takes a contents of a string literal (without quotes) and produces a -/// sequence of escaped characters or errors. -fn unescape_non_raw_common + From>(src: &str, mode: Mode, callback: &mut F) -where - F: FnMut(Range, Result), -{ - let mut chars = src.chars(); - let allow_unicode_chars = mode.allow_unicode_chars(); // get this outside the loop - - // The `start` and `end` computation here is complicated because - // `skip_ascii_whitespace` makes us to skip over chars without counting - // them in the range computation. - while let Some(c) = chars.next() { - let start = src.len() - chars.as_str().len() - c.len_utf8(); - let res = match c { - '\\' => { - match chars.clone().next() { - Some('\n') => { - // Rust language specification requires us to skip whitespaces - // if unescaped '\' character is followed by '\n'. - // For details see [Rust language reference] - // (https://doc.rust-lang.org/reference/tokens.html#string-literals). - skip_ascii_whitespace(&mut chars, start, &mut |range, err| { - callback(range, Err(err)) - }); - continue; - } - _ => scan_escape::(&mut chars, mode), - } - } - '"' => Err(EscapeError::EscapeOnlyChar), - '\r' => Err(EscapeError::BareCarriageReturn), - _ => ascii_check(c, allow_unicode_chars).map(T::from), - }; - let end = src.len() - chars.as_str().len(); - callback(start..end, res); - } -} - -fn skip_ascii_whitespace(chars: &mut Chars<'_>, start: usize, callback: &mut F) -where - F: FnMut(Range, EscapeError), -{ - let tail = chars.as_str(); - let first_non_space = tail - .bytes() - .position(|b| b != b' ' && b != b'\t' && b != b'\n' && b != b'\r') - .unwrap_or(tail.len()); - if tail[1..first_non_space].contains('\n') { - // The +1 accounts for the escaping slash. - let end = start + first_non_space + 1; - callback(start..end, EscapeError::MultipleSkippedLinesWarning); - } - let tail = &tail[first_non_space..]; - if let Some(c) = tail.chars().next() { - if c.is_whitespace() { - // For error reporting, we would like the span to contain the character that was not - // skipped. The +1 is necessary to account for the leading \ that started the escape. - let end = start + first_non_space + c.len_utf8() + 1; - callback(start..end, EscapeError::UnskippedWhitespaceWarning); - } - } - *chars = tail.chars(); -} - -/// Takes a contents of a string literal (without quotes) and produces a -/// sequence of characters or errors. -/// NOTE: Raw strings do not perform any explicit character escaping, here we -/// only produce errors on bare CR. -fn check_raw_common(src: &str, mode: Mode, callback: &mut F) -where - F: FnMut(Range, Result), -{ - let mut chars = src.chars(); - let allow_unicode_chars = mode.allow_unicode_chars(); // get this outside the loop - - // The `start` and `end` computation here matches the one in - // `unescape_non_raw_common` for consistency, even though this function - // doesn't have to worry about skipping any chars. - while let Some(c) = chars.next() { - let start = src.len() - chars.as_str().len() - c.len_utf8(); - let res = match c { - '\r' => Err(EscapeError::BareCarriageReturnInRawString), - _ => ascii_check(c, allow_unicode_chars), - }; - let end = src.len() - chars.as_str().len(); - callback(start..end, res); - } -} - -#[inline] -pub fn byte_from_char(c: char) -> u8 { - let res = c as u32; - debug_assert!(res <= u8::MAX as u32, "guaranteed because of ByteStr"); - res as u8 -} diff --git a/compiler/rustc_lexer/src/unescape/tests.rs b/compiler/rustc_lexer/src/unescape/tests.rs deleted file mode 100644 index 5b99495f47581..0000000000000 --- a/compiler/rustc_lexer/src/unescape/tests.rs +++ /dev/null @@ -1,286 +0,0 @@ -use super::*; - -#[test] -fn test_unescape_char_bad() { - fn check(literal_text: &str, expected_error: EscapeError) { - assert_eq!(unescape_char(literal_text), Err(expected_error)); - } - - check("", EscapeError::ZeroChars); - check(r"\", EscapeError::LoneSlash); - - check("\n", EscapeError::EscapeOnlyChar); - check("\t", EscapeError::EscapeOnlyChar); - check("'", EscapeError::EscapeOnlyChar); - check("\r", EscapeError::BareCarriageReturn); - - check("spam", EscapeError::MoreThanOneChar); - check(r"\x0ff", EscapeError::MoreThanOneChar); - check(r#"\"a"#, EscapeError::MoreThanOneChar); - check(r"\na", EscapeError::MoreThanOneChar); - check(r"\ra", EscapeError::MoreThanOneChar); - check(r"\ta", EscapeError::MoreThanOneChar); - check(r"\\a", EscapeError::MoreThanOneChar); - check(r"\'a", EscapeError::MoreThanOneChar); - check(r"\0a", EscapeError::MoreThanOneChar); - check(r"\u{0}x", EscapeError::MoreThanOneChar); - check(r"\u{1F63b}}", EscapeError::MoreThanOneChar); - - check(r"\v", EscapeError::InvalidEscape); - check(r"\💩", EscapeError::InvalidEscape); - check(r"\●", EscapeError::InvalidEscape); - check("\\\r", EscapeError::InvalidEscape); - - check(r"\x", EscapeError::TooShortHexEscape); - check(r"\x0", EscapeError::TooShortHexEscape); - check(r"\xf", EscapeError::TooShortHexEscape); - check(r"\xa", EscapeError::TooShortHexEscape); - check(r"\xx", EscapeError::InvalidCharInHexEscape); - check(r"\xы", EscapeError::InvalidCharInHexEscape); - check(r"\x🦀", EscapeError::InvalidCharInHexEscape); - check(r"\xtt", EscapeError::InvalidCharInHexEscape); - check(r"\xff", EscapeError::OutOfRangeHexEscape); - check(r"\xFF", EscapeError::OutOfRangeHexEscape); - check(r"\x80", EscapeError::OutOfRangeHexEscape); - - check(r"\u", EscapeError::NoBraceInUnicodeEscape); - check(r"\u[0123]", EscapeError::NoBraceInUnicodeEscape); - check(r"\u{0x}", EscapeError::InvalidCharInUnicodeEscape); - check(r"\u{", EscapeError::UnclosedUnicodeEscape); - check(r"\u{0000", EscapeError::UnclosedUnicodeEscape); - check(r"\u{}", EscapeError::EmptyUnicodeEscape); - check(r"\u{_0000}", EscapeError::LeadingUnderscoreUnicodeEscape); - check(r"\u{0000000}", EscapeError::OverlongUnicodeEscape); - check(r"\u{FFFFFF}", EscapeError::OutOfRangeUnicodeEscape); - check(r"\u{ffffff}", EscapeError::OutOfRangeUnicodeEscape); - check(r"\u{ffffff}", EscapeError::OutOfRangeUnicodeEscape); - - check(r"\u{DC00}", EscapeError::LoneSurrogateUnicodeEscape); - check(r"\u{DDDD}", EscapeError::LoneSurrogateUnicodeEscape); - check(r"\u{DFFF}", EscapeError::LoneSurrogateUnicodeEscape); - - check(r"\u{D800}", EscapeError::LoneSurrogateUnicodeEscape); - check(r"\u{DAAA}", EscapeError::LoneSurrogateUnicodeEscape); - check(r"\u{DBFF}", EscapeError::LoneSurrogateUnicodeEscape); -} - -#[test] -fn test_unescape_char_good() { - fn check(literal_text: &str, expected_char: char) { - assert_eq!(unescape_char(literal_text), Ok(expected_char)); - } - - check("a", 'a'); - check("ы", 'ы'); - check("🦀", '🦀'); - - check(r#"\""#, '"'); - check(r"\n", '\n'); - check(r"\r", '\r'); - check(r"\t", '\t'); - check(r"\\", '\\'); - check(r"\'", '\''); - check(r"\0", '\0'); - - check(r"\x00", '\0'); - check(r"\x5a", 'Z'); - check(r"\x5A", 'Z'); - check(r"\x7f", 127 as char); - - check(r"\u{0}", '\0'); - check(r"\u{000000}", '\0'); - check(r"\u{41}", 'A'); - check(r"\u{0041}", 'A'); - check(r"\u{00_41}", 'A'); - check(r"\u{4__1__}", 'A'); - check(r"\u{1F63b}", '😻'); -} - -#[test] -fn test_unescape_str_warn() { - fn check(literal: &str, expected: &[(Range, Result)]) { - let mut unescaped = Vec::with_capacity(literal.len()); - unescape_unicode(literal, Mode::Str, &mut |range, res| unescaped.push((range, res))); - assert_eq!(unescaped, expected); - } - - // Check we can handle escaped newlines at the end of a file. - check("\\\n", &[]); - check("\\\n ", &[]); - - check( - "\\\n \u{a0} x", - &[ - (0..5, Err(EscapeError::UnskippedWhitespaceWarning)), - (3..5, Ok('\u{a0}')), - (5..6, Ok(' ')), - (6..7, Ok('x')), - ], - ); - check("\\\n \n x", &[(0..7, Err(EscapeError::MultipleSkippedLinesWarning)), (7..8, Ok('x'))]); -} - -#[test] -fn test_unescape_str_good() { - fn check(literal_text: &str, expected: &str) { - let mut buf = Ok(String::with_capacity(literal_text.len())); - unescape_unicode(literal_text, Mode::Str, &mut |range, c| { - if let Ok(b) = &mut buf { - match c { - Ok(c) => b.push(c), - Err(e) => buf = Err((range, e)), - } - } - }); - assert_eq!(buf.as_deref(), Ok(expected)) - } - - check("foo", "foo"); - check("", ""); - check(" \t\n", " \t\n"); - - check("hello \\\n world", "hello world"); - check("thread's", "thread's") -} - -#[test] -fn test_unescape_byte_bad() { - fn check(literal_text: &str, expected_error: EscapeError) { - assert_eq!(unescape_byte(literal_text), Err(expected_error)); - } - - check("", EscapeError::ZeroChars); - check(r"\", EscapeError::LoneSlash); - - check("\n", EscapeError::EscapeOnlyChar); - check("\t", EscapeError::EscapeOnlyChar); - check("'", EscapeError::EscapeOnlyChar); - check("\r", EscapeError::BareCarriageReturn); - - check("spam", EscapeError::MoreThanOneChar); - check(r"\x0ff", EscapeError::MoreThanOneChar); - check(r#"\"a"#, EscapeError::MoreThanOneChar); - check(r"\na", EscapeError::MoreThanOneChar); - check(r"\ra", EscapeError::MoreThanOneChar); - check(r"\ta", EscapeError::MoreThanOneChar); - check(r"\\a", EscapeError::MoreThanOneChar); - check(r"\'a", EscapeError::MoreThanOneChar); - check(r"\0a", EscapeError::MoreThanOneChar); - - check(r"\v", EscapeError::InvalidEscape); - check(r"\💩", EscapeError::InvalidEscape); - check(r"\●", EscapeError::InvalidEscape); - - check(r"\x", EscapeError::TooShortHexEscape); - check(r"\x0", EscapeError::TooShortHexEscape); - check(r"\xa", EscapeError::TooShortHexEscape); - check(r"\xf", EscapeError::TooShortHexEscape); - check(r"\xx", EscapeError::InvalidCharInHexEscape); - check(r"\xы", EscapeError::InvalidCharInHexEscape); - check(r"\x🦀", EscapeError::InvalidCharInHexEscape); - check(r"\xtt", EscapeError::InvalidCharInHexEscape); - - check(r"\u", EscapeError::NoBraceInUnicodeEscape); - check(r"\u[0123]", EscapeError::NoBraceInUnicodeEscape); - check(r"\u{0x}", EscapeError::InvalidCharInUnicodeEscape); - check(r"\u{", EscapeError::UnclosedUnicodeEscape); - check(r"\u{0000", EscapeError::UnclosedUnicodeEscape); - check(r"\u{}", EscapeError::EmptyUnicodeEscape); - check(r"\u{_0000}", EscapeError::LeadingUnderscoreUnicodeEscape); - check(r"\u{0000000}", EscapeError::OverlongUnicodeEscape); - - check("ы", EscapeError::NonAsciiCharInByte); - check("🦀", EscapeError::NonAsciiCharInByte); - - check(r"\u{0}", EscapeError::UnicodeEscapeInByte); - check(r"\u{000000}", EscapeError::UnicodeEscapeInByte); - check(r"\u{41}", EscapeError::UnicodeEscapeInByte); - check(r"\u{0041}", EscapeError::UnicodeEscapeInByte); - check(r"\u{00_41}", EscapeError::UnicodeEscapeInByte); - check(r"\u{4__1__}", EscapeError::UnicodeEscapeInByte); - check(r"\u{1F63b}", EscapeError::UnicodeEscapeInByte); - check(r"\u{0}x", EscapeError::UnicodeEscapeInByte); - check(r"\u{1F63b}}", EscapeError::UnicodeEscapeInByte); - check(r"\u{FFFFFF}", EscapeError::UnicodeEscapeInByte); - check(r"\u{ffffff}", EscapeError::UnicodeEscapeInByte); - check(r"\u{ffffff}", EscapeError::UnicodeEscapeInByte); - check(r"\u{DC00}", EscapeError::UnicodeEscapeInByte); - check(r"\u{DDDD}", EscapeError::UnicodeEscapeInByte); - check(r"\u{DFFF}", EscapeError::UnicodeEscapeInByte); - check(r"\u{D800}", EscapeError::UnicodeEscapeInByte); - check(r"\u{DAAA}", EscapeError::UnicodeEscapeInByte); - check(r"\u{DBFF}", EscapeError::UnicodeEscapeInByte); -} - -#[test] -fn test_unescape_byte_good() { - fn check(literal_text: &str, expected_byte: u8) { - assert_eq!(unescape_byte(literal_text), Ok(expected_byte)); - } - - check("a", b'a'); - - check(r#"\""#, b'"'); - check(r"\n", b'\n'); - check(r"\r", b'\r'); - check(r"\t", b'\t'); - check(r"\\", b'\\'); - check(r"\'", b'\''); - check(r"\0", b'\0'); - - check(r"\x00", b'\0'); - check(r"\x5a", b'Z'); - check(r"\x5A", b'Z'); - check(r"\x7f", 127); - check(r"\x80", 128); - check(r"\xff", 255); - check(r"\xFF", 255); -} - -#[test] -fn test_unescape_byte_str_good() { - fn check(literal_text: &str, expected: &[u8]) { - let mut buf = Ok(Vec::with_capacity(literal_text.len())); - unescape_unicode(literal_text, Mode::ByteStr, &mut |range, c| { - if let Ok(b) = &mut buf { - match c { - Ok(c) => b.push(byte_from_char(c)), - Err(e) => buf = Err((range, e)), - } - } - }); - assert_eq!(buf.as_deref(), Ok(expected)) - } - - check("foo", b"foo"); - check("", b""); - check(" \t\n", b" \t\n"); - - check("hello \\\n world", b"hello world"); - check("thread's", b"thread's") -} - -#[test] -fn test_unescape_raw_str() { - fn check(literal: &str, expected: &[(Range, Result)]) { - let mut unescaped = Vec::with_capacity(literal.len()); - unescape_unicode(literal, Mode::RawStr, &mut |range, res| unescaped.push((range, res))); - assert_eq!(unescaped, expected); - } - - check("\r", &[(0..1, Err(EscapeError::BareCarriageReturnInRawString))]); - check("\rx", &[(0..1, Err(EscapeError::BareCarriageReturnInRawString)), (1..2, Ok('x'))]); -} - -#[test] -fn test_unescape_raw_byte_str() { - fn check(literal: &str, expected: &[(Range, Result)]) { - let mut unescaped = Vec::with_capacity(literal.len()); - unescape_unicode(literal, Mode::RawByteStr, &mut |range, res| unescaped.push((range, res))); - assert_eq!(unescaped, expected); - } - - check("\r", &[(0..1, Err(EscapeError::BareCarriageReturnInRawString))]); - check("🦀", &[(0..4, Err(EscapeError::NonAsciiCharInByte))]); - check("🦀a", &[(0..4, Err(EscapeError::NonAsciiCharInByte)), (4..5, Ok('a'))]); -} diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index 782d328a95102..99b42ee548029 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -271,7 +271,7 @@ lint_expectation = this lint expectation is unfulfilled lint_extern_crate_not_idiomatic = `extern crate` is not idiomatic in the new edition .suggestion = convert it to a `use` -lint_extern_without_abi = extern declarations without an explicit ABI are deprecated +lint_extern_without_abi = `extern` declarations without an explicit ABI are deprecated .label = ABI should be specified here .suggestion = explicitly specify the {$default_abi} ABI @@ -360,6 +360,10 @@ lint_impl_trait_overcaptures = `{$self_ty}` will capture more lifetimes than pos lint_impl_trait_redundant_captures = all possible in-scope parameters are already captured, so `use<...>` syntax is redundant .suggestion = remove the `use<...>` syntax +lint_implicit_unsafe_autorefs = implicit autoref creates a reference to the dereference of a raw pointer + .note = creating a reference requires the pointer target to be valid and imposes aliasing requirements + .suggestion = try using a raw pointer method instead; or if this reference is intentional, make it explicit + lint_improper_ctypes = `extern` {$desc} uses type `{$ty}`, which is not FFI-safe .label = not FFI-safe .note = the type is defined here @@ -757,7 +761,7 @@ lint_single_use_lifetime = lifetime parameter `{$ident}` only used once lint_span_use_eq_ctxt = use `.eq_ctxt()` instead of `.ctxt() == .ctxt()` -lint_static_mut_refs_lint = creating a {$shared_label}reference to mutable static is discouraged +lint_static_mut_refs_lint = creating a {$shared_label}reference to mutable static .label = {$shared_label}reference to mutable static .suggestion = use `&raw const` instead to create a raw pointer .suggestion_mut = use `&raw mut` instead to create a raw pointer diff --git a/compiler/rustc_lint/src/autorefs.rs b/compiler/rustc_lint/src/autorefs.rs new file mode 100644 index 0000000000000..91d58d92466ac --- /dev/null +++ b/compiler/rustc_lint/src/autorefs.rs @@ -0,0 +1,160 @@ +use rustc_ast::{BorrowKind, UnOp}; +use rustc_hir::{Expr, ExprKind, Mutability}; +use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, OverloadedDeref}; +use rustc_session::{declare_lint, declare_lint_pass}; +use rustc_span::sym; + +use crate::lints::{ImplicitUnsafeAutorefsDiag, ImplicitUnsafeAutorefsSuggestion}; +use crate::{LateContext, LateLintPass, LintContext}; + +declare_lint! { + /// The `dangerous_implicit_autorefs` lint checks for implicitly taken references + /// to dereferences of raw pointers. + /// + /// ### Example + /// + /// ```rust + /// unsafe fn fun(ptr: *mut [u8]) -> *mut [u8] { + /// unsafe { &raw mut (*ptr)[..16] } + /// // ^^^^^^ this calls `IndexMut::index_mut(&mut ..., ..16)`, + /// // implicitly creating a reference + /// } + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// When working with raw pointers it's usually undesirable to create references, + /// since they inflict additional safety requirements. Unfortunately, it's possible + /// to take a reference to the dereference of a raw pointer implicitly, which inflicts + /// the usual reference requirements. + /// + /// If you are sure that you can soundly take a reference, then you can take it explicitly: + /// + /// ```rust + /// unsafe fn fun(ptr: *mut [u8]) -> *mut [u8] { + /// unsafe { &raw mut (&mut *ptr)[..16] } + /// } + /// ``` + /// + /// Otherwise try to find an alternative way to achive your goals using only raw pointers: + /// + /// ```rust + /// use std::ptr; + /// + /// fn fun(ptr: *mut [u8]) -> *mut [u8] { + /// ptr::slice_from_raw_parts_mut(ptr.cast(), 16) + /// } + /// ``` + pub DANGEROUS_IMPLICIT_AUTOREFS, + Warn, + "implicit reference to a dereference of a raw pointer", + report_in_external_macro +} + +declare_lint_pass!(ImplicitAutorefs => [DANGEROUS_IMPLICIT_AUTOREFS]); + +impl<'tcx> LateLintPass<'tcx> for ImplicitAutorefs { + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { + // This logic has mostly been taken from + // + + // 5. Either of the following: + // a. A deref followed by any non-deref place projection (that intermediate + // deref will typically be auto-inserted). + // b. A method call annotated with `#[rustc_no_implicit_refs]`. + // c. A deref followed by a `&raw const` or `&raw mut`. + let mut is_coming_from_deref = false; + let inner = match expr.kind { + ExprKind::AddrOf(BorrowKind::Raw, _, inner) => match inner.kind { + ExprKind::Unary(UnOp::Deref, inner) => { + is_coming_from_deref = true; + inner + } + _ => return, + }, + ExprKind::Index(base, _, _) => base, + ExprKind::MethodCall(_, inner, _, _) => { + // PERF: Checking of `#[rustc_no_implicit_refs]` is deferred below + // because checking for attribute is a bit costly. + inner + } + ExprKind::Field(inner, _) => inner, + _ => return, + }; + + let typeck = cx.typeck_results(); + let adjustments_table = typeck.adjustments(); + + if let Some(adjustments) = adjustments_table.get(inner.hir_id) + // 4. Any number of automatically inserted deref/derefmut calls. + && let adjustments = peel_derefs_adjustments(&**adjustments) + // 3. An automatically inserted reference (might come from a deref). + && let [adjustment] = adjustments + && let Some(borrow_mutbl) = has_implicit_borrow(adjustment) + && let ExprKind::Unary(UnOp::Deref, dereferenced) = + // 2. Any number of place projections. + peel_place_mappers(inner).kind + // 1. Deref of a raw pointer. + && typeck.expr_ty(dereferenced).is_raw_ptr() + // PERF: 5. b. A method call annotated with `#[rustc_no_implicit_refs]` + && match expr.kind { + ExprKind::MethodCall(..) => matches!( + cx.typeck_results().type_dependent_def_id(expr.hir_id), + Some(def_id) if cx.tcx.has_attr(def_id, sym::rustc_no_implicit_autorefs) + ), + _ => true, + } + { + cx.emit_span_lint( + DANGEROUS_IMPLICIT_AUTOREFS, + expr.span.source_callsite(), + ImplicitUnsafeAutorefsDiag { + suggestion: ImplicitUnsafeAutorefsSuggestion { + mutbl: borrow_mutbl.ref_prefix_str(), + deref: if is_coming_from_deref { "*" } else { "" }, + start_span: inner.span.shrink_to_lo(), + end_span: inner.span.shrink_to_hi(), + }, + }, + ) + } + } +} + +/// Peels expressions from `expr` that can map a place. +fn peel_place_mappers<'tcx>(mut expr: &'tcx Expr<'tcx>) -> &'tcx Expr<'tcx> { + loop { + match expr.kind { + ExprKind::Index(base, _idx, _) => expr = &base, + ExprKind::Field(e, _) => expr = &e, + _ => break expr, + } + } +} + +/// Peel derefs adjustments until the last last element. +fn peel_derefs_adjustments<'a>(mut adjs: &'a [Adjustment<'a>]) -> &'a [Adjustment<'a>] { + while let [Adjustment { kind: Adjust::Deref(_), .. }, end @ ..] = adjs + && !end.is_empty() + { + adjs = end; + } + adjs +} + +/// Test if some adjustment has some implicit borrow. +/// +/// Returns `Some(mutability)` if the argument adjustment has implicit borrow in it. +fn has_implicit_borrow(Adjustment { kind, .. }: &Adjustment<'_>) -> Option { + match kind { + &Adjust::Deref(Some(OverloadedDeref { mutbl, .. })) => Some(mutbl), + &Adjust::Borrow(AutoBorrow::Ref(mutbl)) => Some(mutbl.into()), + Adjust::NeverToAny + | Adjust::Pointer(..) + | Adjust::ReborrowPin(..) + | Adjust::Deref(None) + | Adjust::Borrow(AutoBorrow::RawPtr(..)) => None, + } +} diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index 9dccd4a0552c3..41b43f6479867 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -29,6 +29,7 @@ use rustc_hir::def_id::{CRATE_DEF_ID, DefId, LocalDefId}; use rustc_hir::intravisit::FnKind as HirFnKind; use rustc_hir::{Body, FnDecl, GenericParamKind, PatKind, PredicateOrigin}; use rustc_middle::bug; +use rustc_middle::lint::LevelAndSource; use rustc_middle::ty::layout::LayoutOf; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt, Upcast, VariantDef}; @@ -330,7 +331,6 @@ impl EarlyLintPass for UnsafeCode { if let FnKind::Fn( ctxt, _, - _, ast::Fn { sig: ast::FnSig { header: ast::FnHeader { safety: ast::Safety::Unsafe(_), .. }, .. }, body, @@ -695,7 +695,8 @@ impl<'tcx> LateLintPass<'tcx> for MissingDebugImplementations { } // Avoid listing trait impls if the trait is allowed. - let (level, _) = cx.tcx.lint_level_at_node(MISSING_DEBUG_IMPLEMENTATIONS, item.hir_id()); + let LevelAndSource { level, .. } = + cx.tcx.lint_level_at_node(MISSING_DEBUG_IMPLEMENTATIONS, item.hir_id()); if level == Level::Allow { return; } @@ -778,21 +779,19 @@ impl EarlyLintPass for AnonymousParameters { } if let ast::AssocItemKind::Fn(box Fn { ref sig, .. }) = it.kind { for arg in sig.decl.inputs.iter() { - if let ast::PatKind::Ident(_, ident, None) = arg.pat.kind { - if ident.name == kw::Empty { - let ty_snip = cx.sess().source_map().span_to_snippet(arg.ty.span); + if let ast::PatKind::Missing = arg.pat.kind { + let ty_snip = cx.sess().source_map().span_to_snippet(arg.ty.span); - let (ty_snip, appl) = if let Ok(ref snip) = ty_snip { - (snip.as_str(), Applicability::MachineApplicable) - } else { - ("", Applicability::HasPlaceholders) - }; - cx.emit_span_lint( - ANONYMOUS_PARAMETERS, - arg.pat.span, - BuiltinAnonymousParams { suggestion: (arg.pat.span, appl), ty_snip }, - ); - } + let (ty_snip, appl) = if let Ok(ref snip) = ty_snip { + (snip.as_str(), Applicability::MachineApplicable) + } else { + ("", Applicability::HasPlaceholders) + }; + cx.emit_span_lint( + ANONYMOUS_PARAMETERS, + arg.pat.span, + BuiltinAnonymousParams { suggestion: (arg.pat.span, appl), ty_snip }, + ); } } } @@ -949,7 +948,7 @@ declare_lint! { /// /// ### Example /// - /// ```rust,compile_fail + /// ```rust,compile_fail,edition2021 /// #[no_mangle] /// const FOO: i32 = 5; /// ``` @@ -1989,7 +1988,7 @@ impl ExplicitOutlivesRequirements { inferred_outlives .filter_map(|(clause, _)| match clause.kind().skip_binder() { - ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(a, b)) => match *a { + ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(a, b)) => match a.kind() { ty::ReEarlyParam(ebr) if item_generics.region_param(ebr, tcx).def_id == lifetime.to_def_id() => { @@ -2039,7 +2038,7 @@ impl ExplicitOutlivesRequirements { let is_inferred = match tcx.named_bound_var(lifetime.hir_id) { Some(ResolvedArg::EarlyBound(def_id)) => inferred_outlives .iter() - .any(|r| matches!(**r, ty::ReEarlyParam(ebr) if { item_generics.region_param(ebr, tcx).def_id == def_id.to_def_id() })), + .any(|r| matches!(r.kind(), ty::ReEarlyParam(ebr) if { item_generics.region_param(ebr, tcx).def_id == def_id.to_def_id() })), _ => false, }; @@ -3116,6 +3115,7 @@ impl EarlyLintPass for SpecialModuleName { for item in &krate.items { if let ast::ItemKind::Mod( _, + ident, ast::ModKind::Unloaded | ast::ModKind::Loaded(_, ast::Inline::No, _, _), ) = item.kind { @@ -3123,7 +3123,7 @@ impl EarlyLintPass for SpecialModuleName { continue; } - match item.ident.name.as_str() { + match ident.name.as_str() { "lib" => cx.emit_span_lint( SPECIAL_MODULE_NAME, item.span, diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index 017ae943e9161..5679d4566dcd4 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -17,13 +17,12 @@ use rustc_hir::def_id::{CrateNum, DefId}; use rustc_hir::definitions::{DefPathData, DisambiguatedDefPathData}; use rustc_hir::{Pat, PatKind}; use rustc_middle::bug; +use rustc_middle::lint::LevelAndSource; use rustc_middle::middle::privacy::EffectiveVisibilities; use rustc_middle::ty::layout::{LayoutError, LayoutOfHelpers, TyAndLayout}; use rustc_middle::ty::print::{PrintError, PrintTraitRefExt as _, Printer, with_no_trimmed_paths}; use rustc_middle::ty::{self, GenericArg, RegisteredTools, Ty, TyCtxt, TypingEnv, TypingMode}; -use rustc_session::lint::{ - FutureIncompatibleInfo, Level, Lint, LintBuffer, LintExpectationId, LintId, -}; +use rustc_session::lint::{FutureIncompatibleInfo, Lint, LintBuffer, LintExpectationId, LintId}; use rustc_session::{LintStoreMarker, Session}; use rustc_span::edit_distance::find_best_match_for_names; use rustc_span::{Ident, Span, Symbol, sym}; @@ -84,11 +83,6 @@ enum TargetLint { Ignored, } -pub enum FindLintError { - NotFound, - Removed, -} - struct LintAlias { name: &'static str, /// Whether deprecation warnings should be suppressed for this alias. @@ -232,13 +226,24 @@ impl LintStore { } } - pub fn register_group_alias(&mut self, lint_name: &'static str, alias: &'static str) { - self.lint_groups.insert( + fn insert_group(&mut self, name: &'static str, group: LintGroup) { + let previous = self.lint_groups.insert(name, group); + if previous.is_some() { + bug!("group {name:?} already exists"); + } + } + + pub fn register_group_alias(&mut self, group_name: &'static str, alias: &'static str) { + let Some(LintGroup { lint_ids, .. }) = self.lint_groups.get(group_name) else { + bug!("group alias {alias:?} points to unregistered group {group_name:?}") + }; + + self.insert_group( alias, LintGroup { - lint_ids: vec![], + lint_ids: lint_ids.clone(), is_externally_loaded: false, - depr: Some(LintAlias { name: lint_name, silent: true }), + depr: Some(LintAlias { name: group_name, silent: true }), }, ); } @@ -250,24 +255,17 @@ impl LintStore { deprecated_name: Option<&'static str>, to: Vec, ) { - let new = self - .lint_groups - .insert(name, LintGroup { lint_ids: to, is_externally_loaded, depr: None }) - .is_none(); if let Some(deprecated) = deprecated_name { - self.lint_groups.insert( + self.insert_group( deprecated, LintGroup { - lint_ids: vec![], + lint_ids: to.clone(), is_externally_loaded, depr: Some(LintAlias { name, silent: false }), }, ); } - - if !new { - bug!("duplicate specification of lint group {}", name); - } + self.insert_group(name, LintGroup { lint_ids: to, is_externally_loaded, depr: None }); } /// This lint should give no warning and have no effect. @@ -293,23 +291,15 @@ impl LintStore { self.by_name.insert(name.into(), Removed(reason.into())); } - pub fn find_lints(&self, mut lint_name: &str) -> Result, FindLintError> { + pub fn find_lints(&self, lint_name: &str) -> Option<&[LintId]> { match self.by_name.get(lint_name) { - Some(&Id(lint_id)) => Ok(vec![lint_id]), - Some(&Renamed(_, lint_id)) => Ok(vec![lint_id]), - Some(&Removed(_)) => Err(FindLintError::Removed), - Some(&Ignored) => Ok(vec![]), - None => loop { - return match self.lint_groups.get(lint_name) { - Some(LintGroup { lint_ids, depr, .. }) => { - if let Some(LintAlias { name, .. }) = depr { - lint_name = name; - continue; - } - Ok(lint_ids.clone()) - } - None => Err(FindLintError::Removed), - }; + Some(Id(lint_id)) => Some(slice::from_ref(lint_id)), + Some(Renamed(_, lint_id)) => Some(slice::from_ref(lint_id)), + Some(Removed(_)) => None, + Some(Ignored) => Some(&[]), + None => match self.lint_groups.get(lint_name) { + Some(LintGroup { lint_ids, .. }) => Some(lint_ids), + None => None, }, } } @@ -375,8 +365,12 @@ impl LintStore { CheckLintNameResult::MissingTool }; } - Some(LintGroup { lint_ids, .. }) => { - return CheckLintNameResult::Tool(lint_ids, None); + Some(LintGroup { lint_ids, depr, .. }) => { + return if let &Some(LintAlias { name, silent: false }) = depr { + CheckLintNameResult::Tool(lint_ids, Some(name.to_string())) + } else { + CheckLintNameResult::Tool(lint_ids, None) + }; } }, Some(Id(id)) => return CheckLintNameResult::Tool(slice::from_ref(id), None), @@ -394,15 +388,11 @@ impl LintStore { None => self.check_tool_name_for_backwards_compat(&complete_name, "clippy"), Some(LintGroup { lint_ids, depr, .. }) => { // Check if the lint group name is deprecated - if let Some(LintAlias { name, silent }) = depr { - let LintGroup { lint_ids, .. } = self.lint_groups.get(name).unwrap(); - return if *silent { - CheckLintNameResult::Ok(lint_ids) - } else { - CheckLintNameResult::Tool(lint_ids, Some((*name).to_string())) - }; + if let &Some(LintAlias { name, silent: false }) = depr { + CheckLintNameResult::Tool(lint_ids, Some(name.to_string())) + } else { + CheckLintNameResult::Ok(lint_ids) } - CheckLintNameResult::Ok(lint_ids) } }, Some(Id(id)) => CheckLintNameResult::Ok(slice::from_ref(id)), @@ -413,7 +403,7 @@ impl LintStore { fn no_lint_suggestion(&self, lint_name: &str, tool_name: &str) -> CheckLintNameResult<'_> { let name_lower = lint_name.to_lowercase(); - if lint_name.chars().any(char::is_uppercase) && self.find_lints(&name_lower).is_ok() { + if lint_name.chars().any(char::is_uppercase) && self.find_lints(&name_lower).is_some() { // First check if the lint name is (partly) in upper case instead of lower case... return CheckLintNameResult::NoLint(Some((Symbol::intern(&name_lower), false))); } @@ -456,18 +446,8 @@ impl LintStore { None => match self.lint_groups.get(&*complete_name) { // Now we are sure, that this lint exists nowhere None => self.no_lint_suggestion(lint_name, tool_name), - Some(LintGroup { lint_ids, depr, .. }) => { - // Reaching this would be weird, but let's cover this case anyway - if let Some(LintAlias { name, silent }) = depr { - let LintGroup { lint_ids, .. } = self.lint_groups.get(name).unwrap(); - if *silent { - CheckLintNameResult::Tool(lint_ids, Some(complete_name)) - } else { - CheckLintNameResult::Tool(lint_ids, Some((*name).to_string())) - } - } else { - CheckLintNameResult::Tool(lint_ids, Some(complete_name)) - } + Some(LintGroup { lint_ids, .. }) => { + CheckLintNameResult::Tool(lint_ids, Some(complete_name)) } }, Some(Id(id)) => CheckLintNameResult::Tool(slice::from_ref(id), Some(complete_name)), @@ -573,7 +553,7 @@ pub trait LintContext { } /// This returns the lint level for the given lint at the current location. - fn get_lint_level(&self, lint: &'static Lint) -> Level; + fn get_lint_level(&self, lint: &'static Lint) -> LevelAndSource; /// This function can be used to manually fulfill an expectation. This can /// be used for lints which contain several spans, and should be suppressed, @@ -642,8 +622,8 @@ impl<'tcx> LintContext for LateContext<'tcx> { } } - fn get_lint_level(&self, lint: &'static Lint) -> Level { - self.tcx.lint_level_at_node(lint, self.last_node_with_lint_attrs).0 + fn get_lint_level(&self, lint: &'static Lint) -> LevelAndSource { + self.tcx.lint_level_at_node(lint, self.last_node_with_lint_attrs) } } @@ -663,8 +643,8 @@ impl LintContext for EarlyContext<'_> { self.builder.opt_span_lint(lint, span.map(|s| s.into()), decorate) } - fn get_lint_level(&self, lint: &'static Lint) -> Level { - self.builder.lint_level(lint).0 + fn get_lint_level(&self, lint: &'static Lint) -> LevelAndSource { + self.builder.lint_level(lint) } } @@ -832,7 +812,10 @@ impl<'tcx> LateContext<'tcx> { return Ok(()); } - self.path.push(Symbol::intern(&disambiguated_data.data.to_string())); + self.path.push(match disambiguated_data.data.get_opt_name() { + Some(sym) => sym, + None => Symbol::intern(&disambiguated_data.data.to_string()), + }); Ok(()) } @@ -856,11 +839,11 @@ impl<'tcx> LateContext<'tcx> { &self, self_ty: Ty<'tcx>, trait_id: DefId, - name: &str, + name: Symbol, ) -> Option> { let tcx = self.tcx; tcx.associated_items(trait_id) - .find_by_name_and_kind(tcx, Ident::from_str(name), ty::AssocKind::Type, trait_id) + .find_by_ident_and_kind(tcx, Ident::with_dummy_span(name), ty::AssocTag::Type, trait_id) .and_then(|assoc| { let proj = Ty::new_projection(tcx, assoc.def_id, [self_ty]); tcx.try_normalize_erasing_regions(self.typing_env(), proj).ok() diff --git a/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs b/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs index ec8f84415759f..5989ef9519cd7 100644 --- a/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs +++ b/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs @@ -69,7 +69,7 @@ impl<'tcx> LateLintPass<'tcx> for DerefIntoDynSupertrait { && let ty::Dynamic(data, _, ty::Dyn) = self_ty.kind() && let Some(self_principal) = data.principal() // `::Target` is `dyn target_principal` - && let Some(target) = cx.get_associated_type(self_ty, did, "Target") + && let Some(target) = cx.get_associated_type(self_ty, did, sym::Target) && let ty::Dynamic(data, _, ty::Dyn) = target.kind() && let Some(target_principal) = data.principal() // `target_principal` is a supertrait of `t_principal` diff --git a/compiler/rustc_lint/src/errors.rs b/compiler/rustc_lint/src/errors.rs index d109a5c90305e..586e55c8055c9 100644 --- a/compiler/rustc_lint/src/errors.rs +++ b/compiler/rustc_lint/src/errors.rs @@ -1,5 +1,5 @@ use rustc_errors::codes::*; -use rustc_errors::{Diag, EmissionGuarantee, SubdiagMessageOp, Subdiagnostic}; +use rustc_errors::{Diag, EmissionGuarantee, Subdiagnostic}; use rustc_macros::{Diagnostic, Subdiagnostic}; use rustc_session::lint::Level; use rustc_span::{Span, Symbol}; @@ -26,11 +26,7 @@ pub(crate) enum OverruledAttributeSub { } impl Subdiagnostic for OverruledAttributeSub { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { match self { OverruledAttributeSub::DefaultSource { id } => { diag.note(fluent::lint_default_source); diff --git a/compiler/rustc_lint/src/for_loops_over_fallibles.rs b/compiler/rustc_lint/src/for_loops_over_fallibles.rs index 757fc1f58bd51..a56b753bda726 100644 --- a/compiler/rustc_lint/src/for_loops_over_fallibles.rs +++ b/compiler/rustc_lint/src/for_loops_over_fallibles.rs @@ -49,6 +49,8 @@ impl<'tcx> LateLintPass<'tcx> for ForLoopsOverFallibles { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { let Some((pat, arg)) = extract_for_loop(expr) else { return }; + let arg_span = arg.span.source_callsite(); + let ty = cx.typeck_results().expr_ty(arg); let (adt, args, ref_mutability) = match ty.kind() { @@ -78,27 +80,27 @@ impl<'tcx> LateLintPass<'tcx> for ForLoopsOverFallibles { && let Ok(recv_snip) = cx.sess().source_map().span_to_snippet(recv.span) { ForLoopsOverFalliblesLoopSub::RemoveNext { - suggestion: recv.span.between(arg.span.shrink_to_hi()), + suggestion: recv.span.between(arg_span.shrink_to_hi()), recv_snip, } } else { ForLoopsOverFalliblesLoopSub::UseWhileLet { start_span: expr.span.with_hi(pat.span.lo()), - end_span: pat.span.between(arg.span), + end_span: pat.span.between(arg_span), var, } }; let question_mark = suggest_question_mark(cx, adt, args, expr.span) - .then(|| ForLoopsOverFalliblesQuestionMark { suggestion: arg.span.shrink_to_hi() }); + .then(|| ForLoopsOverFalliblesQuestionMark { suggestion: arg_span.shrink_to_hi() }); let suggestion = ForLoopsOverFalliblesSuggestion { var, start_span: expr.span.with_hi(pat.span.lo()), - end_span: pat.span.between(arg.span), + end_span: pat.span.between(arg_span), }; cx.emit_span_lint( FOR_LOOPS_OVER_FALLIBLES, - arg.span, + arg_span, ForLoopsOverFalliblesDiag { article, ref_prefix, ty, sub, question_mark, suggestion }, ); } diff --git a/compiler/rustc_lint/src/foreign_modules.rs b/compiler/rustc_lint/src/foreign_modules.rs index 0494c78a7a97c..d0668794198ac 100644 --- a/compiler/rustc_lint/src/foreign_modules.rs +++ b/compiler/rustc_lint/src/foreign_modules.rs @@ -104,7 +104,7 @@ impl ClashingExternDeclarations { /// for the item, return its HirId without updating the set. fn insert(&mut self, tcx: TyCtxt<'_>, fi: hir::ForeignItemId) -> Option { let did = fi.owner_id.to_def_id(); - let instance = Instance::new(did, ty::List::identity_for_item(tcx, did)); + let instance = Instance::new_raw(did, ty::List::identity_for_item(tcx, did)); let name = Symbol::intern(tcx.symbol_name(instance).name); if let Some(&existing_id) = self.seen_decls.get(&name) { // Avoid updating the map with the new entry when we do find a collision. We want to diff --git a/compiler/rustc_lint/src/if_let_rescope.rs b/compiler/rustc_lint/src/if_let_rescope.rs index 39ea8d8e3246c..a9b04511c6b47 100644 --- a/compiler/rustc_lint/src/if_let_rescope.rs +++ b/compiler/rustc_lint/src/if_let_rescope.rs @@ -3,9 +3,7 @@ use std::ops::ControlFlow; use hir::intravisit::{self, Visitor}; use rustc_ast::Recovered; -use rustc_errors::{ - Applicability, Diag, EmissionGuarantee, SubdiagMessageOp, Subdiagnostic, SuggestionStyle, -}; +use rustc_errors::{Applicability, Diag, EmissionGuarantee, Subdiagnostic, SuggestionStyle}; use rustc_hir::{self as hir, HirIdSet}; use rustc_macros::{LintDiagnostic, Subdiagnostic}; use rustc_middle::ty::adjustment::Adjust; @@ -327,11 +325,7 @@ struct IfLetRescopeRewrite { } impl Subdiagnostic for IfLetRescopeRewrite { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { let mut suggestions = vec![]; for match_head in self.match_heads { match match_head { @@ -360,7 +354,7 @@ impl Subdiagnostic for IfLetRescopeRewrite { .chain(repeat('}').take(closing_brackets.count)) .collect(), )); - let msg = f(diag, crate::fluent_generated::lint_suggestion); + let msg = diag.eagerly_translate(crate::fluent_generated::lint_suggestion); diag.multipart_suggestion_with_style( msg, suggestions, diff --git a/compiler/rustc_lint/src/impl_trait_overcaptures.rs b/compiler/rustc_lint/src/impl_trait_overcaptures.rs index c2404a7b84326..a8f45d043be97 100644 --- a/compiler/rustc_lint/src/impl_trait_overcaptures.rs +++ b/compiler/rustc_lint/src/impl_trait_overcaptures.rs @@ -15,7 +15,8 @@ use rustc_middle::ty::relate::{ Relate, RelateResult, TypeRelation, structurally_relate_consts, structurally_relate_tys, }; use rustc_middle::ty::{ - self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, + self, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, + TypeVisitor, }; use rustc_middle::{bug, span_bug}; use rustc_session::lint::FutureIncompatibilityReason; @@ -41,7 +42,7 @@ declare_lint! { /// /// ### Example /// - /// ```rust,compile_fail + /// ```rust,compile_fail,edition2021 /// # #![deny(impl_trait_overcaptures)] /// # use std::fmt::Display; /// let mut x = vec![]; @@ -209,7 +210,7 @@ where VarFn: FnOnce() -> FxHashMap, OutlivesFn: FnOnce() -> OutlivesEnvironment<'tcx>, { - fn visit_binder>>(&mut self, t: &ty::Binder<'tcx, T>) { + fn visit_binder>>(&mut self, t: &ty::Binder<'tcx, T>) { // When we get into a binder, we need to add its own bound vars to the scope. let mut added = vec![]; for arg in t.bound_vars() { @@ -392,7 +393,7 @@ where } _ => { self.tcx.dcx().span_delayed_bug( - self.tcx.hir().span(arg.hir_id()), + self.tcx.hir_span(arg.hir_id()), "no valid for captured arg", ); } @@ -461,7 +462,7 @@ fn extract_def_id_from_arg<'tcx>( arg: ty::GenericArg<'tcx>, ) -> DefId { match arg.unpack() { - ty::GenericArgKind::Lifetime(re) => match *re { + ty::GenericArgKind::Lifetime(re) => match re.kind() { ty::ReEarlyParam(ebr) => generics.region_param(ebr, tcx).def_id, ty::ReBound( _, @@ -530,7 +531,7 @@ impl<'tcx> TypeRelation> for FunctionalVariances<'tcx> { a: ty::Region<'tcx>, _: ty::Region<'tcx>, ) -> RelateResult<'tcx, ty::Region<'tcx>> { - let def_id = match *a { + let def_id = match a.kind() { ty::ReEarlyParam(ebr) => self.generics.region_param(ebr, self.tcx).def_id, ty::ReBound( _, diff --git a/compiler/rustc_lint/src/levels.rs b/compiler/rustc_lint/src/levels.rs index 8718fb807ecf5..00775647ac61a 100644 --- a/compiler/rustc_lint/src/levels.rs +++ b/compiler/rustc_lint/src/levels.rs @@ -1,10 +1,11 @@ use rustc_ast::attr::AttributeExt; use rustc_ast_pretty::pprust; -use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; +use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; +use rustc_data_structures::unord::UnordSet; use rustc_errors::{Diag, LintDiagnostic, MultiSpan}; use rustc_feature::{Features, GateIssue}; +use rustc_hir::HirId; use rustc_hir::intravisit::{self, Visitor}; -use rustc_hir::{CRATE_HIR_ID, HirId}; use rustc_index::IndexVec; use rustc_middle::bug; use rustc_middle::hir::nested_filter; @@ -84,10 +85,10 @@ impl LintLevelSets { ) -> LevelAndSource { let lint = LintId::of(lint); let (level, mut src) = self.raw_lint_id_level(lint, idx, aux); - let level = reveal_actual_level(level, &mut src, sess, lint, |id| { + let (level, lint_id) = reveal_actual_level(level, &mut src, sess, lint, |id| { self.raw_lint_id_level(id, idx, aux) }); - (level, src) + LevelAndSource { level, lint_id, src } } fn raw_lint_id_level( @@ -95,17 +96,17 @@ impl LintLevelSets { id: LintId, mut idx: LintStackIndex, aux: Option<&FxIndexMap>, - ) -> (Option, LintLevelSource) { + ) -> (Option<(Level, Option)>, LintLevelSource) { if let Some(specs) = aux - && let Some(&(level, src)) = specs.get(&id) + && let Some(&LevelAndSource { level, lint_id, src }) = specs.get(&id) { - return (Some(level), src); + return (Some((level, lint_id)), src); } loop { let LintSet { ref specs, parent } = self.list[idx]; - if let Some(&(level, src)) = specs.get(&id) { - return (Some(level), src); + if let Some(&LevelAndSource { level, lint_id, src }) = specs.get(&id) { + return (Some((level, lint_id)), src); } if idx == COMMAND_LINE { return (None, LintLevelSource::Default); @@ -115,12 +116,11 @@ impl LintLevelSets { } } -fn lints_that_dont_need_to_run(tcx: TyCtxt<'_>, (): ()) -> FxIndexSet { +fn lints_that_dont_need_to_run(tcx: TyCtxt<'_>, (): ()) -> UnordSet { let store = unerased_lint_store(&tcx.sess); + let root_map = tcx.shallow_lint_levels_on(hir::CRATE_OWNER_ID); - let map = tcx.shallow_lint_levels_on(rustc_hir::CRATE_OWNER_ID); - - let dont_need_to_run: FxIndexSet = store + let mut dont_need_to_run: FxHashSet = store .get_lints() .into_iter() .filter(|lint| { @@ -129,24 +129,31 @@ fn lints_that_dont_need_to_run(tcx: TyCtxt<'_>, (): ()) -> FxIndexSet { lint.future_incompatible.is_some_and(|fut| fut.reason.has_future_breakage()); !has_future_breakage && !lint.eval_always }) - .filter_map(|lint| { - let lint_level = map.lint_level_id_at_node(tcx, LintId::of(lint), CRATE_HIR_ID); - if matches!(lint_level, (Level::Allow, ..)) - || (matches!(lint_level, (.., LintLevelSource::Default))) - && lint.default_level(tcx.sess.edition()) == Level::Allow - { - Some(LintId::of(lint)) - } else { - None - } + .filter(|lint| { + let lint_level = + root_map.lint_level_id_at_node(tcx, LintId::of(lint), hir::CRATE_HIR_ID); + // Only include lints that are allowed at crate root or by default. + matches!(lint_level.level, Level::Allow) + || (matches!(lint_level.src, LintLevelSource::Default) + && lint.default_level(tcx.sess.edition()) == Level::Allow) }) + .map(|lint| LintId::of(*lint)) .collect(); - let mut visitor = LintLevelMaximum { tcx, dont_need_to_run }; - visitor.process_opts(); - tcx.hir_walk_attributes(&mut visitor); + for owner in tcx.hir_crate_items(()).owners() { + let map = tcx.shallow_lint_levels_on(owner); - visitor.dont_need_to_run + // All lints that appear with a non-allow level must be run. + for (_, specs) in map.specs.iter() { + for (lint, level_and_source) in specs.iter() { + if !matches!(level_and_source.level, Level::Allow) { + dont_need_to_run.remove(lint); + } + } + } + } + + dont_need_to_run.into() } #[instrument(level = "trace", skip(tcx), ret)] @@ -340,82 +347,6 @@ impl<'tcx> Visitor<'tcx> for LintLevelsBuilder<'_, LintLevelQueryMap<'tcx>> { } } -/// Visitor with the only function of visiting every item-like in a crate and -/// computing the highest level that every lint gets put to. -/// -/// E.g., if a crate has a global #![allow(lint)] attribute, but a single item -/// uses #[warn(lint)], this visitor will set that lint level as `Warn` -struct LintLevelMaximum<'tcx> { - tcx: TyCtxt<'tcx>, - /// The actual list of detected lints. - dont_need_to_run: FxIndexSet, -} - -impl<'tcx> LintLevelMaximum<'tcx> { - fn process_opts(&mut self) { - let store = unerased_lint_store(self.tcx.sess); - for (lint_group, level) in &self.tcx.sess.opts.lint_opts { - if *level != Level::Allow { - let Ok(lints) = store.find_lints(lint_group) else { - return; - }; - for lint in lints { - self.dont_need_to_run.swap_remove(&lint); - } - } - } - } -} - -impl<'tcx> Visitor<'tcx> for LintLevelMaximum<'tcx> { - type NestedFilter = nested_filter::All; - - fn maybe_tcx(&mut self) -> Self::MaybeTyCtxt { - self.tcx - } - - /// FIXME(blyxyas): In a future revision, we should also graph #![allow]s, - /// but that is handled with more care - fn visit_attribute(&mut self, attribute: &'tcx hir::Attribute) { - if matches!( - Level::from_attr(attribute), - Some( - Level::Warn - | Level::Deny - | Level::Forbid - | Level::Expect(..) - | Level::ForceWarn(..), - ) - ) { - let store = unerased_lint_store(self.tcx.sess); - // Lint attributes are always a metalist inside a - // metalist (even with just one lint). - let Some(meta_item_list) = attribute.meta_item_list() else { return }; - - for meta_list in meta_item_list { - // Convert Path to String - let Some(meta_item) = meta_list.meta_item() else { return }; - let ident: &str = &meta_item - .path - .segments - .iter() - .map(|segment| segment.ident.as_str()) - .collect::>() - .join("::"); - let Ok(lints) = store.find_lints( - // Lint attributes can only have literals - ident, - ) else { - return; - }; - for lint in lints { - self.dont_need_to_run.swap_remove(&lint); - } - } - } - } -} - pub struct LintLevelsBuilder<'s, P> { sess: &'s Session, features: &'s Features, @@ -450,6 +381,19 @@ impl<'s> LintLevelsBuilder<'s, TopDown> { builder } + pub fn crate_root( + sess: &'s Session, + features: &'s Features, + lint_added_lints: bool, + store: &'s LintStore, + registered_tools: &'s RegisteredTools, + crate_attrs: &[ast::Attribute], + ) -> Self { + let mut builder = Self::new(sess, features, lint_added_lints, store, registered_tools); + builder.add(crate_attrs, true, None); + builder + } + fn process_command_line(&mut self) { self.provider.cur = self .provider @@ -528,9 +472,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { for &(ref lint_name, level) in &self.sess.opts.lint_opts { // Checks the validity of lint names derived from the command line. let (tool_name, lint_name_only) = parse_lint_and_tool_name(lint_name); - if lint_name_only == crate::WARNINGS.name_lower() - && matches!(level, Level::ForceWarn(_)) - { + if lint_name_only == crate::WARNINGS.name_lower() && matches!(level, Level::ForceWarn) { self.sess .dcx() .emit_err(UnsupportedGroup { lint_group: crate::WARNINGS.name_lower() }); @@ -573,24 +515,23 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { _ => {} }; - let orig_level = level; let lint_flag_val = Symbol::intern(lint_name); - let Ok(ids) = self.store.find_lints(lint_name) else { + let Some(ids) = self.store.find_lints(lint_name) else { // errors already handled above continue; }; - for id in ids { + for &id in ids { // ForceWarn and Forbid cannot be overridden - if let Some((Level::ForceWarn(_) | Level::Forbid, _)) = + if let Some(LevelAndSource { level: Level::ForceWarn | Level::Forbid, .. }) = self.current_specs().get(&id) { continue; } if self.check_gated_lint(id, DUMMY_SP, true) { - let src = LintLevelSource::CommandLine(lint_flag_val, orig_level); - self.insert(id, (level, src)); + let src = LintLevelSource::CommandLine(lint_flag_val, level); + self.insert(id, LevelAndSource { level, lint_id: None, src }); } } } @@ -599,8 +540,9 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { /// Attempts to insert the `id` to `level_src` map entry. If unsuccessful /// (e.g. if a forbid was already inserted on the same scope), then emits a /// diagnostic with no change to `specs`. - fn insert_spec(&mut self, id: LintId, (level, src): LevelAndSource) { - let (old_level, old_src) = self.provider.get_lint_level(id.lint, self.sess); + fn insert_spec(&mut self, id: LintId, LevelAndSource { level, lint_id, src }: LevelAndSource) { + let LevelAndSource { level: old_level, src: old_src, .. } = + self.provider.get_lint_level(id.lint, self.sess); // Setting to a non-forbid level is an error if the lint previously had // a forbid level. Note that this is not necessarily true even with a @@ -672,7 +614,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { // The lint `unfulfilled_lint_expectations` can't be expected, as it would suppress itself. // Handling expectations of this lint would add additional complexity with little to no // benefit. The expect level for this lint will therefore be ignored. - if let Level::Expect(_) = level + if let Level::Expect = level && id == LintId::of(UNFULFILLED_LINT_EXPECTATIONS) { return; @@ -680,13 +622,16 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { match (old_level, level) { // If the new level is an expectation store it in `ForceWarn` - (Level::ForceWarn(_), Level::Expect(expectation_id)) => { - self.insert(id, (Level::ForceWarn(Some(expectation_id)), old_src)) + (Level::ForceWarn, Level::Expect) => { + self.insert(id, LevelAndSource { level: Level::ForceWarn, lint_id, src: old_src }) } // Keep `ForceWarn` level but drop the expectation - (Level::ForceWarn(_), _) => self.insert(id, (Level::ForceWarn(None), old_src)), + (Level::ForceWarn, _) => self.insert( + id, + LevelAndSource { level: Level::ForceWarn, lint_id: None, src: old_src }, + ), // Set the lint level as normal - _ => self.insert(id, (level, src)), + _ => self.insert(id, LevelAndSource { level, lint_id, src }), }; } @@ -701,7 +646,11 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { if attr.has_name(sym::automatically_derived) { self.insert( LintId::of(SINGLE_USE_LIFETIMES), - (Level::Allow, LintLevelSource::Default), + LevelAndSource { + level: Level::Allow, + lint_id: None, + src: LintLevelSource::Default, + }, ); continue; } @@ -712,15 +661,22 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { .meta_item_list() .is_some_and(|l| ast::attr::list_contains_name(&l, sym::hidden)) { - self.insert(LintId::of(MISSING_DOCS), (Level::Allow, LintLevelSource::Default)); + self.insert( + LintId::of(MISSING_DOCS), + LevelAndSource { + level: Level::Allow, + lint_id: None, + src: LintLevelSource::Default, + }, + ); continue; } - let level = match Level::from_attr(attr) { + let (level, lint_id) = match Level::from_attr(attr) { None => continue, // This is the only lint level with a `LintExpectationId` that can be created from // an attribute. - Some(Level::Expect(unstable_id)) if let Some(hir_id) = source_hir_id => { + Some((Level::Expect, Some(unstable_id))) if let Some(hir_id) = source_hir_id => { let LintExpectationId::Unstable { lint_index: None, attr_id: _ } = unstable_id else { bug!("stable id Level::from_attr") @@ -732,9 +688,9 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { lint_index: None, }; - Level::Expect(stable_id) + (Level::Expect, Some(stable_id)) } - Some(lvl) => lvl, + Some((lvl, id)) => (lvl, id), }; let Some(mut metas) = attr.meta_item_list() else { continue }; @@ -782,13 +738,10 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { } for (lint_index, li) in metas.iter_mut().enumerate() { - let level = match level { - Level::Expect(mut id) => { - id.set_lint_index(Some(lint_index as u16)); - Level::Expect(id) - } - level => level, - }; + let mut lint_id = lint_id; + if let Some(id) = &mut lint_id { + id.set_lint_index(Some(lint_index as u16)); + } let sp = li.span(); let meta_item = match li { @@ -920,7 +873,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { let src = LintLevelSource::Node { name, span: sp, reason }; for &id in ids { if self.check_gated_lint(id, sp, false) { - self.insert_spec(id, (level, src)); + self.insert_spec(id, LevelAndSource { level, lint_id, src }); } } @@ -929,7 +882,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { // overriding the lint level but instead add an expectation that can't be // fulfilled. The lint message will include an explanation, that the // `unfulfilled_lint_expectations` lint can't be expected. - if let Level::Expect(expect_id) = level { + if let (Level::Expect, Some(expect_id)) = (level, lint_id) { // The `unfulfilled_lint_expectations` lint is not part of any lint // groups. Therefore. we only need to check the slice if it contains a // single lint. @@ -951,7 +904,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { } if self.lint_added_lints && !is_crate_node { - for (id, &(level, ref src)) in self.current_specs().iter() { + for (id, &LevelAndSource { level, ref src, .. }) in self.current_specs().iter() { if !id.lint.crate_level_only { continue; } @@ -989,10 +942,10 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { if self.lint_added_lints { let lint = builtin::UNKNOWN_LINTS; - let (level, src) = self.lint_level(builtin::UNKNOWN_LINTS); + let level = self.lint_level(builtin::UNKNOWN_LINTS); // FIXME: make this translatable #[allow(rustc::diagnostic_outside_of_impl)] - lint_level(self.sess, lint, level, src, Some(span.into()), |lint| { + lint_level(self.sess, lint, level, Some(span.into()), |lint| { lint.primary_message(fluent::lint_unknown_gated_lint); lint.arg("name", lint_id.lint.name_lower()); lint.note(fluent::lint_note); @@ -1027,8 +980,8 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { span: Option, decorate: impl for<'a, 'b> FnOnce(&'b mut Diag<'a, ()>), ) { - let (level, src) = self.lint_level(lint); - lint_level(self.sess, lint, level, src, span, decorate) + let level = self.lint_level(lint); + lint_level(self.sess, lint, level, span, decorate) } #[track_caller] @@ -1038,16 +991,16 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { span: MultiSpan, decorate: impl for<'a> LintDiagnostic<'a, ()>, ) { - let (level, src) = self.lint_level(lint); - lint_level(self.sess, lint, level, src, Some(span), |lint| { + let level = self.lint_level(lint); + lint_level(self.sess, lint, level, Some(span), |lint| { decorate.decorate_lint(lint); }); } #[track_caller] pub fn emit_lint(&self, lint: &'static Lint, decorate: impl for<'a> LintDiagnostic<'a, ()>) { - let (level, src) = self.lint_level(lint); - lint_level(self.sess, lint, level, src, None, |lint| { + let level = self.lint_level(lint); + lint_level(self.sess, lint, level, None, |lint| { decorate.decorate_lint(lint); }); } diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index 25878c7ac8144..b910d6a138e11 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -21,7 +21,7 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(array_windows)] @@ -29,7 +29,6 @@ #![feature(box_patterns)] #![feature(if_let_guard)] #![feature(iter_order_by)] -#![feature(let_chains)] #![feature(rustc_attrs)] #![feature(rustdoc_internals)] #![feature(try_blocks)] @@ -37,6 +36,7 @@ mod async_closures; mod async_fn_in_trait; +mod autorefs; pub mod builtin; mod context; mod dangling; @@ -84,6 +84,7 @@ mod utils; use async_closures::AsyncClosureUsage; use async_fn_in_trait::AsyncFnInTrait; +use autorefs::*; use builtin::*; use dangling::*; use default_could_be_derived::DefaultCouldBeDerived; @@ -125,11 +126,10 @@ use unused::*; #[rustfmt::skip] pub use builtin::{MissingDoc, SoftLints}; -pub use context::{ - CheckLintNameResult, EarlyContext, FindLintError, LateContext, LintContext, LintStore, -}; +pub use context::{CheckLintNameResult, EarlyContext, LateContext, LintContext, LintStore}; pub use early::{EarlyCheckNode, check_ast_node}; pub use late::{check_crate, late_lint_mod, unerased_lint_store}; +pub use levels::LintLevelsBuilder; pub use passes::{EarlyLintPass, LateLintPass}; pub use rustc_session::lint::Level::{self, *}; pub use rustc_session::lint::{ @@ -202,6 +202,7 @@ late_lint_methods!( PathStatements: PathStatements, LetUnderscore: LetUnderscore, InvalidReferenceCasting: InvalidReferenceCasting, + ImplicitAutorefs: ImplicitAutorefs, // Depends on referenced function signatures in expressions UnusedResults: UnusedResults, UnitBindings: UnitBindings, @@ -605,6 +606,16 @@ fn register_builtins(store: &mut LintStore) { "converted into hard error, see issue #127323 \ for more information", ); + store.register_removed( + "undefined_naked_function_abi", + "converted into hard error, see PR #139001 \ + for more information", + ); + store.register_removed( + "abi_unsupported_vector_types", + "converted into hard error, \ + see for more information", + ); } fn register_internals(store: &mut LintStore) { diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index 55d010e6d34aa..487184b836a43 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -6,7 +6,7 @@ use rustc_abi::ExternAbi; use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagArgValue, DiagMessage, DiagStyledString, ElidedLifetimeInPathSubdiag, - EmissionGuarantee, LintDiagnostic, MultiSpan, SubdiagMessageOp, Subdiagnostic, SuggestionStyle, + EmissionGuarantee, LintDiagnostic, MultiSpan, Subdiagnostic, SuggestionStyle, }; use rustc_hir::def::Namespace; use rustc_hir::def_id::DefId; @@ -55,6 +55,26 @@ pub(crate) enum ShadowedIntoIterDiagSub { }, } +// autorefs.rs +#[derive(LintDiagnostic)] +#[diag(lint_implicit_unsafe_autorefs)] +#[note] +pub(crate) struct ImplicitUnsafeAutorefsDiag { + #[subdiagnostic] + pub suggestion: ImplicitUnsafeAutorefsSuggestion, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion(lint_suggestion, applicability = "maybe-incorrect")] +pub(crate) struct ImplicitUnsafeAutorefsSuggestion { + pub mutbl: &'static str, + pub deref: &'static str, + #[suggestion_part(code = "({mutbl}{deref}")] + pub start_span: Span, + #[suggestion_part(code = ")")] + pub end_span: Span, +} + // builtin.rs #[derive(LintDiagnostic)] #[diag(lint_builtin_while_true)] @@ -449,11 +469,7 @@ pub(crate) struct BuiltinUnpermittedTypeInitSub { } impl Subdiagnostic for BuiltinUnpermittedTypeInitSub { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { let mut err = self.err; loop { if let Some(span) = err.span { @@ -504,16 +520,12 @@ pub(crate) struct BuiltinClashingExternSub<'a> { } impl Subdiagnostic for BuiltinClashingExternSub<'_> { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { let mut expected_str = DiagStyledString::new(); expected_str.push(self.expected.fn_sig(self.tcx).to_string(), false); let mut found_str = DiagStyledString::new(); found_str.push(self.found.fn_sig(self.tcx).to_string(), true); - diag.note_expected_found(&"", expected_str, &"", found_str); + diag.note_expected_found("", expected_str, "", found_str); } } @@ -824,11 +836,7 @@ pub(crate) struct HiddenUnicodeCodepointsDiagLabels { } impl Subdiagnostic for HiddenUnicodeCodepointsDiagLabels { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { for (c, span) in self.spans { diag.span_label(span, format!("{c:?}")); } @@ -842,11 +850,7 @@ pub(crate) enum HiddenUnicodeCodepointsDiagSub { // Used because of multiple multipart_suggestion and note impl Subdiagnostic for HiddenUnicodeCodepointsDiagSub { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { match self { HiddenUnicodeCodepointsDiagSub::Escape { spans } => { diag.multipart_suggestion_with_style( @@ -1015,11 +1019,7 @@ pub(crate) struct NonBindingLetSub { } impl Subdiagnostic for NonBindingLetSub { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { let can_suggest_binding = self.drop_fn_start_end.is_some() || !self.is_assign_desugar; if can_suggest_binding { @@ -1303,11 +1303,7 @@ pub(crate) enum NonSnakeCaseDiagSub { } impl Subdiagnostic for NonSnakeCaseDiagSub { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { match self { NonSnakeCaseDiagSub::Label { span } => { diag.span_label(span, fluent::lint_label); @@ -1629,11 +1625,7 @@ pub(crate) enum OverflowingBinHexSign { } impl Subdiagnostic for OverflowingBinHexSign { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { match self { OverflowingBinHexSign::Positive => { diag.note(fluent::lint_positive_note); diff --git a/compiler/rustc_lint/src/non_ascii_idents.rs b/compiler/rustc_lint/src/non_ascii_idents.rs index 66e207a451ef0..9c11fb41aa6d6 100644 --- a/compiler/rustc_lint/src/non_ascii_idents.rs +++ b/compiler/rustc_lint/src/non_ascii_idents.rs @@ -159,12 +159,13 @@ impl EarlyLintPass for NonAsciiIdents { use rustc_span::Span; use unicode_security::GeneralSecurityProfile; - let check_non_ascii_idents = cx.builder.lint_level(NON_ASCII_IDENTS).0 != Level::Allow; + let check_non_ascii_idents = cx.builder.lint_level(NON_ASCII_IDENTS).level != Level::Allow; let check_uncommon_codepoints = - cx.builder.lint_level(UNCOMMON_CODEPOINTS).0 != Level::Allow; - let check_confusable_idents = cx.builder.lint_level(CONFUSABLE_IDENTS).0 != Level::Allow; + cx.builder.lint_level(UNCOMMON_CODEPOINTS).level != Level::Allow; + let check_confusable_idents = + cx.builder.lint_level(CONFUSABLE_IDENTS).level != Level::Allow; let check_mixed_script_confusables = - cx.builder.lint_level(MIXED_SCRIPT_CONFUSABLES).0 != Level::Allow; + cx.builder.lint_level(MIXED_SCRIPT_CONFUSABLES).level != Level::Allow; if !check_non_ascii_idents && !check_uncommon_codepoints diff --git a/compiler/rustc_lint/src/nonstandard_style.rs b/compiler/rustc_lint/src/nonstandard_style.rs index 752636ccaf061..d1138e8f1fa53 100644 --- a/compiler/rustc_lint/src/nonstandard_style.rs +++ b/compiler/rustc_lint/src/nonstandard_style.rs @@ -172,20 +172,22 @@ impl EarlyLintPass for NonCamelCaseTypes { } match &it.kind { - ast::ItemKind::TyAlias(..) - | ast::ItemKind::Enum(..) - | ast::ItemKind::Struct(..) - | ast::ItemKind::Union(..) => self.check_case(cx, "type", &it.ident), - ast::ItemKind::Trait(..) => self.check_case(cx, "trait", &it.ident), - ast::ItemKind::TraitAlias(..) => self.check_case(cx, "trait alias", &it.ident), + ast::ItemKind::TyAlias(box ast::TyAlias { ident, .. }) + | ast::ItemKind::Enum(ident, ..) + | ast::ItemKind::Struct(ident, ..) + | ast::ItemKind::Union(ident, ..) => self.check_case(cx, "type", ident), + ast::ItemKind::Trait(box ast::Trait { ident, .. }) => { + self.check_case(cx, "trait", ident) + } + ast::ItemKind::TraitAlias(ident, _, _) => self.check_case(cx, "trait alias", ident), // N.B. This check is only for inherent associated types, so that we don't lint against // trait impls where we should have warned for the trait definition already. ast::ItemKind::Impl(box ast::Impl { of_trait: None, items, .. }) => { for it in items { // FIXME: this doesn't respect `#[allow(..)]` on the item itself. - if let ast::AssocItemKind::Type(..) = it.kind { - self.check_case(cx, "associated type", &it.ident); + if let ast::AssocItemKind::Type(alias) = &it.kind { + self.check_case(cx, "associated type", &alias.ident); } } } @@ -194,8 +196,8 @@ impl EarlyLintPass for NonCamelCaseTypes { } fn check_trait_item(&mut self, cx: &EarlyContext<'_>, it: &ast::AssocItem) { - if let ast::AssocItemKind::Type(..) = it.kind { - self.check_case(cx, "associated type", &it.ident); + if let ast::AssocItemKind::Type(alias) = &it.kind { + self.check_case(cx, "associated type", &alias.ident); } } @@ -420,12 +422,22 @@ impl<'tcx> LateLintPass<'tcx> for NonSnakeCase { } } + fn check_ty(&mut self, cx: &LateContext<'_>, ty: &hir::Ty<'_, hir::AmbigArg>) { + if let hir::TyKind::BareFn(hir::BareFnTy { param_idents, .. }) = &ty.kind { + for param_ident in *param_idents { + if let Some(param_ident) = param_ident { + self.check_snake_case(cx, "variable", param_ident); + } + } + } + } + fn check_trait_item(&mut self, cx: &LateContext<'_>, item: &hir::TraitItem<'_>) { - if let hir::TraitItemKind::Fn(_, hir::TraitFn::Required(pnames)) = item.kind { + if let hir::TraitItemKind::Fn(_, hir::TraitFn::Required(param_idents)) = item.kind { self.check_snake_case(cx, "trait method", &item.ident); - for param_name in pnames { - if let Some(param_name) = param_name { - self.check_snake_case(cx, "variable", param_name); + for param_ident in param_idents { + if let Some(param_ident) = param_ident { + self.check_snake_case(cx, "variable", param_ident); } } } diff --git a/compiler/rustc_lint/src/shadowed_into_iter.rs b/compiler/rustc_lint/src/shadowed_into_iter.rs index 571cab934fd6a..00fa0499556cb 100644 --- a/compiler/rustc_lint/src/shadowed_into_iter.rs +++ b/compiler/rustc_lint/src/shadowed_into_iter.rs @@ -1,4 +1,4 @@ -use rustc_hir as hir; +use rustc_hir::{self as hir, LangItem}; use rustc_middle::ty::{self, Ty}; use rustc_session::lint::FutureIncompatibilityReason; use rustc_session::{declare_lint, impl_lint_pass}; @@ -81,7 +81,7 @@ impl<'tcx> LateLintPass<'tcx> for ShadowedIntoIter { let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) else { return; }; - if Some(method_def_id) != cx.tcx.lang_items().into_iter_fn() { + if !cx.tcx.is_lang_item(method_def_id, LangItem::IntoIterIntoIter) { return; } diff --git a/compiler/rustc_lint/src/static_mut_refs.rs b/compiler/rustc_lint/src/static_mut_refs.rs index 50021157ddab7..4dda3c7951b87 100644 --- a/compiler/rustc_lint/src/static_mut_refs.rs +++ b/compiler/rustc_lint/src/static_mut_refs.rs @@ -51,7 +51,7 @@ declare_lint! { /// This lint is "warn" by default on editions up to 2021, in 2024 is "deny". pub STATIC_MUT_REFS, Warn, - "shared references or mutable references of mutable static is discouraged", + "creating a shared reference to mutable static", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::EditionError(Edition::Edition2024), reference: "", diff --git a/compiler/rustc_lint/src/types.rs b/compiler/rustc_lint/src/types.rs index 6966985bdf059..5f1f1ed5db491 100644 --- a/compiler/rustc_lint/src/types.rs +++ b/compiler/rustc_lint/src/types.rs @@ -1,7 +1,9 @@ use std::iter; use std::ops::ControlFlow; -use rustc_abi::{BackendRepr, TagEncoding, VariantIdx, Variants, WrappingRange}; +use rustc_abi::{ + BackendRepr, Integer, IntegerType, TagEncoding, VariantIdx, Variants, WrappingRange, +}; use rustc_data_structures::fx::FxHashSet; use rustc_errors::DiagMessage; use rustc_hir::intravisit::VisitorExt; @@ -14,7 +16,7 @@ use rustc_middle::ty::{ }; use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass}; use rustc_span::def_id::LocalDefId; -use rustc_span::{Span, Symbol, source_map, sym}; +use rustc_span::{Span, Symbol, sym}; use tracing::debug; use {rustc_ast as ast, rustc_hir as hir}; @@ -223,7 +225,7 @@ impl TypeLimits { fn lint_nan<'tcx>( cx: &LateContext<'tcx>, e: &'tcx hir::Expr<'tcx>, - binop: hir::BinOp, + binop: hir::BinOpKind, l: &'tcx hir::Expr<'tcx>, r: &'tcx hir::Expr<'tcx>, ) { @@ -262,19 +264,19 @@ fn lint_nan<'tcx>( InvalidNanComparisons::EqNe { suggestion } } - let lint = match binop.node { + let lint = match binop { hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, l) => { eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful { nan_plus_binop: l_span.until(r_span), float: r_span.shrink_to_hi(), - neg: (binop.node == hir::BinOpKind::Ne).then(|| r_span.shrink_to_lo()), + neg: (binop == hir::BinOpKind::Ne).then(|| r_span.shrink_to_lo()), }) } hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, r) => { eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful { nan_plus_binop: l_span.shrink_to_hi().to(r_span), float: l_span.shrink_to_hi(), - neg: (binop.node == hir::BinOpKind::Ne).then(|| l_span.shrink_to_lo()), + neg: (binop == hir::BinOpKind::Ne).then(|| l_span.shrink_to_lo()), }) } hir::BinOpKind::Lt | hir::BinOpKind::Le | hir::BinOpKind::Gt | hir::BinOpKind::Ge @@ -560,11 +562,11 @@ impl<'tcx> LateLintPass<'tcx> for TypeLimits { } } hir::ExprKind::Binary(binop, ref l, ref r) => { - if is_comparison(binop) { - if !check_limits(cx, binop, l, r) { + if is_comparison(binop.node) { + if !check_limits(cx, binop.node, l, r) { cx.emit_span_lint(UNUSED_COMPARISONS, e.span, UnusedComparisons); } else { - lint_nan(cx, e, binop, l, r); + lint_nan(cx, e, binop.node, l, r); let cmpop = ComparisonOp::BinOp(binop.node); lint_wide_pointer(cx, e, cmpop, l, r); lint_fn_pointer(cx, e, cmpop, l, r); @@ -591,8 +593,8 @@ impl<'tcx> LateLintPass<'tcx> for TypeLimits { _ => {} }; - fn is_valid(binop: hir::BinOp, v: T, min: T, max: T) -> bool { - match binop.node { + fn is_valid(binop: hir::BinOpKind, v: T, min: T, max: T) -> bool { + match binop { hir::BinOpKind::Lt => v > min && v <= max, hir::BinOpKind::Le => v >= min && v < max, hir::BinOpKind::Gt => v >= min && v < max, @@ -602,22 +604,19 @@ impl<'tcx> LateLintPass<'tcx> for TypeLimits { } } - fn rev_binop(binop: hir::BinOp) -> hir::BinOp { - source_map::respan( - binop.span, - match binop.node { - hir::BinOpKind::Lt => hir::BinOpKind::Gt, - hir::BinOpKind::Le => hir::BinOpKind::Ge, - hir::BinOpKind::Gt => hir::BinOpKind::Lt, - hir::BinOpKind::Ge => hir::BinOpKind::Le, - _ => return binop, - }, - ) + fn rev_binop(binop: hir::BinOpKind) -> hir::BinOpKind { + match binop { + hir::BinOpKind::Lt => hir::BinOpKind::Gt, + hir::BinOpKind::Le => hir::BinOpKind::Ge, + hir::BinOpKind::Gt => hir::BinOpKind::Lt, + hir::BinOpKind::Ge => hir::BinOpKind::Le, + _ => binop, + } } fn check_limits( cx: &LateContext<'_>, - binop: hir::BinOp, + binop: hir::BinOpKind, l: &hir::Expr<'_>, r: &hir::Expr<'_>, ) -> bool { @@ -659,9 +658,9 @@ impl<'tcx> LateLintPass<'tcx> for TypeLimits { } } - fn is_comparison(binop: hir::BinOp) -> bool { + fn is_comparison(binop: hir::BinOpKind) -> bool { matches!( - binop.node, + binop, hir::BinOpKind::Eq | hir::BinOpKind::Lt | hir::BinOpKind::Le @@ -756,10 +755,10 @@ declare_lint! { /// *subsequent* fields of the associated structs to use an alignment value /// where the floating-point type is aligned on a 4-byte boundary. /// - /// The power alignment rule for structs needed for C compatibility is - /// unimplementable within `repr(C)` in the compiler without building in - /// handling of references to packed fields and infectious nested layouts, - /// so a warning is produced in these situations. + /// Effectively, subsequent floating-point fields act as-if they are `repr(packed(4))`. This + /// would be unsound to do in a `repr(C)` type without all the restrictions that come with + /// `repr(packed)`. Rust instead chooses a layout that maintains soundness of Rust code, at the + /// expense of incompatibility with C code. /// /// ### Example /// @@ -791,8 +790,10 @@ declare_lint! { /// - offset_of!(Floats, a) == 0 /// - offset_of!(Floats, b) == 8 /// - offset_of!(Floats, c) == 12 - /// However, rust currently aligns `c` at offset_of!(Floats, c) == 16. - /// Thus, a warning should be produced for the above struct in this case. + /// + /// However, Rust currently aligns `c` at `offset_of!(Floats, c) == 16`. + /// Using offset 12 would be unsound since `f64` generally must be 8-aligned on this target. + /// Thus, a warning is produced for the above struct. USES_POWER_ALIGNMENT, Warn, "Structs do not follow the power alignment rule under repr(C)" @@ -867,8 +868,8 @@ fn ty_is_known_nonnull<'tcx>( return true; } - // `UnsafeCell` has its niche hidden. - if def.is_unsafe_cell() { + // `UnsafeCell` and `UnsafePinned` have their niche hidden. + if def.is_unsafe_cell() || def.is_unsafe_pinned() { return false; } @@ -879,25 +880,36 @@ fn ty_is_known_nonnull<'tcx>( } ty::Pat(base, pat) => { ty_is_known_nonnull(tcx, typing_env, *base, mode) - || Option::unwrap_or_default( - try { - match **pat { - ty::PatternKind::Range { start, end } => { - let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?; - let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?; - - // This also works for negative numbers, as we just need - // to ensure we aren't wrapping over zero. - start > 0 && end >= start - } - } - }, - ) + || pat_ty_is_known_nonnull(tcx, typing_env, *pat) } _ => false, } } +fn pat_ty_is_known_nonnull<'tcx>( + tcx: TyCtxt<'tcx>, + typing_env: ty::TypingEnv<'tcx>, + pat: ty::Pattern<'tcx>, +) -> bool { + Option::unwrap_or_default( + try { + match *pat { + ty::PatternKind::Range { start, end } => { + let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?; + let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?; + + // This also works for negative numbers, as we just need + // to ensure we aren't wrapping over zero. + start > 0 && end >= start + } + ty::PatternKind::Or(patterns) => { + patterns.iter().all(|pat| pat_ty_is_known_nonnull(tcx, typing_env, pat)) + } + } + }, + ) +} + /// Given a non-null scalar (or transparent) type `ty`, return the nullable version of that type. /// If the type passed in was not scalar, returns None. fn get_nullable_type<'tcx>( @@ -1039,13 +1051,29 @@ pub(crate) fn repr_nullable_ptr<'tcx>( } None } - ty::Pat(base, pat) => match **pat { - ty::PatternKind::Range { .. } => get_nullable_type(tcx, typing_env, *base), - }, + ty::Pat(base, pat) => get_nullable_type_from_pat(tcx, typing_env, *base, *pat), _ => None, } } +fn get_nullable_type_from_pat<'tcx>( + tcx: TyCtxt<'tcx>, + typing_env: ty::TypingEnv<'tcx>, + base: Ty<'tcx>, + pat: ty::Pattern<'tcx>, +) -> Option> { + match *pat { + ty::PatternKind::Range { .. } => get_nullable_type(tcx, typing_env, base), + ty::PatternKind::Or(patterns) => { + let first = get_nullable_type_from_pat(tcx, typing_env, base, patterns[0])?; + for &pat in &patterns[1..] { + assert_eq!(first, get_nullable_type_from_pat(tcx, typing_env, base, pat)?); + } + Some(first) + } + } +} + impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { /// Check if the type is array and emit an unsafe type lint. fn check_for_array_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool { @@ -1246,6 +1274,14 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { }; } + if let Some(IntegerType::Fixed(Integer::I128, _)) = def.repr().int { + return FfiUnsafe { + ty, + reason: fluent::lint_improper_ctypes_128bit, + help: None, + }; + } + use improper_ctypes::check_non_exhaustive_variant; let non_exhaustive = def.variant_list_has_applicable_non_exhaustive(); @@ -1374,7 +1410,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), ty::Param(..) - | ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ..) + | ty::Alias(ty::Projection | ty::Inherent | ty::Free, ..) | ty::Infer(..) | ty::Bound(..) | ty::Error(_) @@ -1403,7 +1439,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { CItemKind::Definition => "fn", }; let span_note = if let ty::Adt(def, _) = ty.kind() - && let Some(sp) = self.cx.tcx.hir().span_if_local(def.did()) + && let Some(sp) = self.cx.tcx.hir_span_if_local(def.did()) { Some(sp) } else { @@ -1621,15 +1657,13 @@ impl ImproperCTypesDefinitions { cx: &LateContext<'tcx>, ty: Ty<'tcx>, ) -> bool { + assert!(cx.tcx.sess.target.os == "aix"); // Structs (under repr(C)) follow the power alignment rule if: // - the first field of the struct is a floating-point type that // is greater than 4-bytes, or // - the first field of the struct is an aggregate whose // recursively first field is a floating-point type greater than // 4 bytes. - if cx.tcx.sess.target.os != "aix" { - return false; - } if ty.is_floating_point() && ty.primitive_size(cx.tcx).bytes() > 4 { return true; } else if let Adt(adt_def, _) = ty.kind() @@ -1667,21 +1701,14 @@ impl ImproperCTypesDefinitions { && !adt_def.all_fields().next().is_none() { let struct_variant_data = item.expect_struct().1; - for (index, ..) in struct_variant_data.fields().iter().enumerate() { + for field_def in struct_variant_data.fields().iter().skip(1) { // Struct fields (after the first field) are checked for the // power alignment rule, as fields after the first are likely // to be the fields that are misaligned. - if index != 0 { - let first_field_def = struct_variant_data.fields()[index]; - let def_id = first_field_def.def_id; - let ty = cx.tcx.type_of(def_id).instantiate_identity(); - if self.check_arg_for_power_alignment(cx, ty) { - cx.emit_span_lint( - USES_POWER_ALIGNMENT, - first_field_def.span, - UsesPowerAlignment, - ); - } + let def_id = field_def.def_id; + let ty = cx.tcx.type_of(def_id).instantiate_identity(); + if self.check_arg_for_power_alignment(cx, ty) { + cx.emit_span_lint(USES_POWER_ALIGNMENT, field_def.span, UsesPowerAlignment); } } } @@ -1723,7 +1750,7 @@ impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDefinitions { | hir::ItemKind::GlobalAsm { .. } | hir::ItemKind::ForeignMod { .. } | hir::ItemKind::Mod(..) - | hir::ItemKind::Macro(..) + | hir::ItemKind::Macro { .. } | hir::ItemKind::Use(..) | hir::ItemKind::ExternCrate(..) => {} } diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs index 7b43aac90c741..50a27d7e84f58 100644 --- a/compiler/rustc_lint/src/unused.rs +++ b/compiler/rustc_lint/src/unused.rs @@ -942,6 +942,22 @@ trait UnusedDelimLint { match s.kind { StmtKind::Let(ref local) if Self::LINT_EXPR_IN_PATTERN_MATCHING_CTX => { if let Some((init, els)) = local.kind.init_else_opt() { + if els.is_some() + && let ExprKind::Paren(paren) = &init.kind + && !init.span.eq_ctxt(paren.span) + { + // This branch prevents cases where parentheses wrap an expression + // resulting from macro expansion, such as: + // ``` + // macro_rules! x { + // () => { None:: }; + // } + // let Some(_) = (x!{}) else { return }; + // // -> let Some(_) = (None::) else { return }; + // // ~ ~ No Lint + // ``` + return; + } let ctx = match els { None => UnusedDelimsCtx::AssignedValue, Some(_) => UnusedDelimsCtx::AssignedValueLetElse, @@ -1201,7 +1217,8 @@ impl EarlyLintPass for UnusedParens { // Do not lint on `(..)` as that will result in the other arms being useless. Paren(_) // The other cases do not contain sub-patterns. - | Wild | Never | Rest | Expr(..) | MacCall(..) | Range(..) | Ident(.., None) | Path(..) | Err(_) => {}, + | Missing | Wild | Never | Rest | Expr(..) | MacCall(..) | Range(..) | Ident(.., None) + | Path(..) | Err(_) => {}, // These are list-like patterns; parens can always be removed. TupleStruct(_, _, ps) | Tuple(ps) | Slice(ps) | Or(ps) => for p in ps { self.check_unused_parens_pat(cx, p, false, false, keep_space); diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index 8a761a0a0969b..3cea24634feee 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -16,7 +16,6 @@ declare_lint_pass! { /// that are used by other parts of the compiler. HardwiredLints => [ // tidy-alphabetical-start - ABI_UNSUPPORTED_VECTOR_TYPES, ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, AMBIGUOUS_ASSOCIATED_ITEMS, AMBIGUOUS_GLOB_IMPORTS, @@ -110,7 +109,6 @@ declare_lint_pass! { UNCONDITIONAL_PANIC, UNCONDITIONAL_RECURSION, UNCOVERED_PARAM_IN_PROJECTION, - UNDEFINED_NAKED_FUNCTION_ABI, UNEXPECTED_CFGS, UNFULFILLED_LINT_EXPECTATIONS, UNINHABITED_STATIC, @@ -119,6 +117,7 @@ declare_lint_pass! { UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, UNNAMEABLE_TEST_ITEMS, UNNAMEABLE_TYPES, + UNNECESSARY_TRANSMUTES, UNREACHABLE_CODE, UNREACHABLE_PATTERNS, UNSAFE_ATTR_OUTSIDE_UNSAFE, @@ -1425,7 +1424,7 @@ declare_lint! { /// /// ### Example /// - /// ```rust,compile_fail + /// ```rust,compile_fail,edition2021 /// macro_rules! foo { /// () => {}; /// ($name) => { }; @@ -2353,37 +2352,11 @@ declare_lint! { } declare_lint! { - /// The `soft_unstable` lint detects unstable features that were - /// unintentionally allowed on stable. - /// - /// ### Example - /// - /// ```rust,compile_fail - /// #[cfg(test)] - /// extern crate test; - /// - /// #[bench] - /// fn name(b: &mut test::Bencher) { - /// b.iter(|| 123) - /// } - /// ``` - /// - /// {{produces}} - /// - /// ### Explanation - /// - /// The [`bench` attribute] was accidentally allowed to be specified on - /// the [stable release channel]. Turning this to a hard error would have - /// broken some projects. This lint allows those projects to continue to - /// build correctly when [`--cap-lints`] is used, but otherwise signal an - /// error that `#[bench]` should not be used on the stable channel. This - /// is a [future-incompatible] lint to transition this to a hard error in - /// the future. See [issue #64266] for more details. + /// The `soft_unstable` lint detects unstable features that were unintentionally allowed on + /// stable. This is a [future-incompatible] lint to transition this to a hard error in the + /// future. See [issue #64266] for more details. /// /// [issue #64266]: https://github.com/rust-lang/rust/issues/64266 - /// [`bench` attribute]: https://doc.rust-lang.org/nightly/unstable-book/library-features/test.html - /// [stable release channel]: https://doc.rust-lang.org/book/appendix-07-nightly-rust.html - /// [`--cap-lints`]: https://doc.rust-lang.org/rustc/lints/levels.html#capping-lints /// [future-incompatible]: ../index.md#future-incompatible-lints pub SOFT_UNSTABLE, Deny, @@ -2830,39 +2803,6 @@ declare_lint! { "detects deprecation attributes with no effect", } -declare_lint! { - /// The `undefined_naked_function_abi` lint detects naked function definitions that - /// either do not specify an ABI or specify the Rust ABI. - /// - /// ### Example - /// - /// ```rust - /// #![feature(asm_experimental_arch, naked_functions)] - /// - /// use std::arch::naked_asm; - /// - /// #[naked] - /// pub fn default_abi() -> u32 { - /// unsafe { naked_asm!(""); } - /// } - /// - /// #[naked] - /// pub extern "Rust" fn rust_abi() -> u32 { - /// unsafe { naked_asm!(""); } - /// } - /// ``` - /// - /// {{produces}} - /// - /// ### Explanation - /// - /// The Rust ABI is currently undefined. Therefore, naked functions should - /// specify a non-Rust ABI. - pub UNDEFINED_NAKED_FUNCTION_ABI, - Warn, - "undefined naked function ABI" -} - declare_lint! { /// The `ineffective_unstable_trait_impl` lint detects `#[unstable]` attributes which are not used. /// @@ -4162,7 +4102,7 @@ declare_lint! { /// /// ### Example /// - /// ```rust,compile_fail + /// ```rust,compile_fail,edition2021 /// #![deny(dependency_on_unit_never_type_fallback)] /// fn main() { /// if true { @@ -4944,6 +4884,30 @@ declare_lint! { "detects pointer to integer transmutes in const functions and associated constants", } +declare_lint! { + /// The `unnecessary_transmutes` lint detects transmutations that have safer alternatives. + /// + /// ### Example + /// + /// ```rust + /// fn bytes_at_home(x: [u8; 4]) -> u32 { + /// unsafe { std::mem::transmute(x) } + /// } + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// Using an explicit method is preferable over calls to + /// [`transmute`](https://doc.rust-lang.org/std/mem/fn.transmute.html) as + /// they more clearly communicate the intent, are easier to review, and + /// are less likely to accidentally result in unsoundness. + pub UNNECESSARY_TRANSMUTES, + Warn, + "detects transmutes that are shadowed by std methods" +} + declare_lint! { /// The `tail_expr_drop_order` lint looks for those values generated at the tail expression location, /// that runs a custom `Drop` destructor. @@ -5061,74 +5025,6 @@ declare_lint! { crate_level_only } -declare_lint! { - /// The `abi_unsupported_vector_types` lint detects function definitions and calls - /// whose ABI depends on enabling certain target features, but those features are not enabled. - /// - /// ### Example - /// - /// ```rust,ignore (fails on non-x86_64) - /// extern "C" fn missing_target_feature(_: std::arch::x86_64::__m256) { - /// todo!() - /// } - /// - /// #[target_feature(enable = "avx")] - /// unsafe extern "C" fn with_target_feature(_: std::arch::x86_64::__m256) { - /// todo!() - /// } - /// - /// fn main() { - /// let v = unsafe { std::mem::zeroed() }; - /// unsafe { with_target_feature(v); } - /// } - /// ``` - /// - /// This will produce: - /// - /// ```text - /// warning: ABI error: this function call uses a avx vector type, which is not enabled in the caller - /// --> lint_example.rs:18:12 - /// | - /// | unsafe { with_target_feature(v); } - /// | ^^^^^^^^^^^^^^^^^^^^^^ function called here - /// | - /// = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - /// = note: for more information, see issue #116558 - /// = help: consider enabling it globally (-C target-feature=+avx) or locally (#[target_feature(enable="avx")]) - /// = note: `#[warn(abi_unsupported_vector_types)]` on by default - /// - /// - /// warning: ABI error: this function definition uses a avx vector type, which is not enabled - /// --> lint_example.rs:3:1 - /// | - /// | pub extern "C" fn with_target_feature(_: std::arch::x86_64::__m256) { - /// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ function defined here - /// | - /// = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - /// = note: for more information, see issue #116558 - /// = help: consider enabling it globally (-C target-feature=+avx) or locally (#[target_feature(enable="avx")]) - /// ``` - /// - /// - /// - /// ### Explanation - /// - /// The C ABI for `__m256` requires the value to be passed in an AVX register, - /// which is only possible when the `avx` target feature is enabled. - /// Therefore, `missing_target_feature` cannot be compiled without that target feature. - /// A similar (but complementary) message is triggered when `with_target_feature` is called - /// by a function that does not enable the `avx` target feature. - /// - /// Note that this lint is very similar to the `-Wpsabi` warning in `gcc`/`clang`. - pub ABI_UNSUPPORTED_VECTOR_TYPES, - Warn, - "this function call or definition uses a vector type which is not enabled", - @future_incompatible = FutureIncompatibleInfo { - reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps, - reference: "issue #116558 ", - }; -} - declare_lint! { /// The `wasm_c_abi` lint detects usage of the `extern "C"` ABI of wasm that is affected /// by a planned ABI change that has the goal of aligning Rust with the standard C ABI diff --git a/compiler/rustc_lint_defs/src/lib.rs b/compiler/rustc_lint_defs/src/lib.rs index 46b4b1d438386..b4069b317bfa1 100644 --- a/compiler/rustc_lint_defs/src/lib.rs +++ b/compiler/rustc_lint_defs/src/lib.rs @@ -8,7 +8,8 @@ use rustc_data_structures::stable_hasher::{ }; use rustc_error_messages::{DiagMessage, MultiSpan}; use rustc_hir::def::Namespace; -use rustc_hir::{HashStableContext, HirId, MissingLifetimeKind}; +use rustc_hir::def_id::DefPathHash; +use rustc_hir::{HashStableContext, HirId, ItemLocalId, MissingLifetimeKind}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; pub use rustc_span::edition::Edition; use rustc_span::{Ident, MacroRulesNormalizedIdent, Span, Symbol, sym}; @@ -102,7 +103,7 @@ pub enum Applicability { /// The index values have a type of `u16` to reduce the size of the `LintExpectationId`. /// It's reasonable to assume that no user will define 2^16 attributes on one node or /// have that amount of lints listed. `u16` values should therefore suffice. -#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash, Encodable, Decodable)] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, Encodable, Decodable)] pub enum LintExpectationId { /// Used for lints emitted during the `EarlyLintPass`. This id is not /// hash stable and should not be cached. @@ -156,13 +157,14 @@ impl HashStable for LintExpectationId { } impl ToStableHashKey for LintExpectationId { - type KeyType = (HirId, u16, u16); + type KeyType = (DefPathHash, ItemLocalId, u16, u16); #[inline] - fn to_stable_hash_key(&self, _: &HCX) -> Self::KeyType { + fn to_stable_hash_key(&self, hcx: &HCX) -> Self::KeyType { match self { LintExpectationId::Stable { hir_id, attr_index, lint_index: Some(lint_index) } => { - (*hir_id, *attr_index, *lint_index) + let (def_path_hash, lint_idx) = hir_id.to_stable_hash_key(hcx); + (def_path_hash, lint_idx, *attr_index, *lint_index) } _ => { unreachable!("HashStable should only be called for a filled `LintExpectationId`") @@ -199,9 +201,9 @@ pub enum Level { /// /// See RFC 2383. /// - /// The [`LintExpectationId`] is used to later link a lint emission to the actual + /// Requires a [`LintExpectationId`] to later link a lint emission to the actual /// expectation. It can be ignored in most cases. - Expect(LintExpectationId), + Expect, /// The `warn` level will produce a warning if the lint was violated, however the /// compiler will continue with its execution. Warn, @@ -209,9 +211,9 @@ pub enum Level { /// to ensure that a lint can't be suppressed. This lint level can currently only be set /// via the console and is therefore session specific. /// - /// The [`LintExpectationId`] is intended to fulfill expectations marked via the + /// Requires a [`LintExpectationId`] to fulfill expectations marked via the /// `#[expect]` attribute, that will still be suppressed due to the level. - ForceWarn(Option), + ForceWarn, /// The `deny` level will produce an error and stop further execution after the lint /// pass is complete. Deny, @@ -225,9 +227,9 @@ impl Level { pub fn as_str(self) -> &'static str { match self { Level::Allow => "allow", - Level::Expect(_) => "expect", + Level::Expect => "expect", Level::Warn => "warn", - Level::ForceWarn(_) => "force-warn", + Level::ForceWarn => "force-warn", Level::Deny => "deny", Level::Forbid => "forbid", } @@ -246,24 +248,30 @@ impl Level { } /// Converts an `Attribute` to a level. - pub fn from_attr(attr: &impl AttributeExt) -> Option { - Self::from_symbol(attr.name_or_empty(), || Some(attr.id())) + pub fn from_attr(attr: &impl AttributeExt) -> Option<(Self, Option)> { + attr.name().and_then(|name| Self::from_symbol(name, || Some(attr.id()))) } /// Converts a `Symbol` to a level. - pub fn from_symbol(s: Symbol, id: impl FnOnce() -> Option) -> Option { + pub fn from_symbol( + s: Symbol, + id: impl FnOnce() -> Option, + ) -> Option<(Self, Option)> { match s { - sym::allow => Some(Level::Allow), + sym::allow => Some((Level::Allow, None)), sym::expect => { if let Some(attr_id) = id() { - Some(Level::Expect(LintExpectationId::Unstable { attr_id, lint_index: None })) + Some(( + Level::Expect, + Some(LintExpectationId::Unstable { attr_id, lint_index: None }), + )) } else { None } } - sym::warn => Some(Level::Warn), - sym::deny => Some(Level::Deny), - sym::forbid => Some(Level::Forbid), + sym::warn => Some((Level::Warn, None)), + sym::deny => Some((Level::Deny, None)), + sym::forbid => Some((Level::Forbid, None)), _ => None, } } @@ -274,8 +282,8 @@ impl Level { Level::Deny => "-D", Level::Forbid => "-F", Level::Allow => "-A", - Level::ForceWarn(_) => "--force-warn", - Level::Expect(_) => { + Level::ForceWarn => "--force-warn", + Level::Expect => { unreachable!("the expect level does not have a commandline flag") } } @@ -283,17 +291,10 @@ impl Level { pub fn is_error(self) -> bool { match self { - Level::Allow | Level::Expect(_) | Level::Warn | Level::ForceWarn(_) => false, + Level::Allow | Level::Expect | Level::Warn | Level::ForceWarn => false, Level::Deny | Level::Forbid => true, } } - - pub fn get_expectation_id(&self) -> Option { - match self { - Level::Expect(id) | Level::ForceWarn(Some(id)) => Some(*id), - _ => None, - } - } } /// Specification of a single lint. diff --git a/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp index b8884486c3330..4695de8ea09a3 100644 --- a/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp @@ -47,7 +47,6 @@ struct LLVMRustMCDCBranchParameters { int16_t ConditionIDs[2]; }; -#if LLVM_VERSION_GE(19, 0) static coverage::mcdc::BranchParameters fromRust(LLVMRustMCDCBranchParameters Params) { return coverage::mcdc::BranchParameters( @@ -59,7 +58,6 @@ fromRust(LLVMRustMCDCDecisionParameters Params) { return coverage::mcdc::DecisionParameters(Params.BitmapIdx, Params.NumConditions); } -#endif // Must match the layout of // `rustc_codegen_llvm::coverageinfo::ffi::CoverageSpan`. @@ -203,7 +201,6 @@ extern "C" void LLVMRustCoverageWriteFunctionMappingsToBuffer( Region.Span.LineEnd, Region.Span.ColumnEnd)); } -#if LLVM_VERSION_GE(19, 0) // MC/DC branch regions: for (const auto &Region : ArrayRef(MCDCBranchRegions, NumMCDCBranchRegions)) { MappingRegions.push_back(coverage::CounterMappingRegion::makeBranchRegion( @@ -221,7 +218,6 @@ extern "C" void LLVMRustCoverageWriteFunctionMappingsToBuffer( Region.Span.LineStart, Region.Span.ColumnStart, Region.Span.LineEnd, Region.Span.ColumnEnd)); } -#endif // Write the converted expressions and mappings to a byte buffer. auto CoverageMappingWriter = coverage::CoverageMappingWriter( diff --git a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp index 86f1bcc46eea1..d4a05fbbbc5d1 100644 --- a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp @@ -14,6 +14,7 @@ #include "llvm/IR/LegacyPassManager.h" #include "llvm/IR/PassManager.h" #include "llvm/IR/Verifier.h" +#include "llvm/IRPrinter/IRPrintingPasses.h" #include "llvm/LTO/LTO.h" #include "llvm/MC/MCSubtargetInfo.h" #include "llvm/MC/TargetRegistry.h" @@ -47,10 +48,7 @@ #include // Conditional includes prevent clang-format from fully sorting the list, -// so keep them separate. -#if LLVM_VERSION_GE(19, 0) -#include "llvm/Support/PGOOptions.h" -#endif +// so if any are needed, keep them separate down here. using namespace llvm; @@ -432,31 +430,15 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine( } if (!strcmp("zlib", DebugInfoCompression) && llvm::compression::zlib::isAvailable()) { -#if LLVM_VERSION_GE(19, 0) Options.MCOptions.CompressDebugSections = DebugCompressionType::Zlib; -#else - Options.CompressDebugSections = DebugCompressionType::Zlib; -#endif } else if (!strcmp("zstd", DebugInfoCompression) && llvm::compression::zstd::isAvailable()) { -#if LLVM_VERSION_GE(19, 0) Options.MCOptions.CompressDebugSections = DebugCompressionType::Zstd; -#else - Options.CompressDebugSections = DebugCompressionType::Zstd; -#endif } else if (!strcmp("none", DebugInfoCompression)) { -#if LLVM_VERSION_GE(19, 0) Options.MCOptions.CompressDebugSections = DebugCompressionType::None; -#else - Options.CompressDebugSections = DebugCompressionType::None; -#endif } -#if LLVM_VERSION_GE(19, 0) Options.MCOptions.X86RelaxRelocations = RelaxELFRelocations; -#else - Options.RelaxELFRelocations = RelaxELFRelocations; -#endif Options.UseInitArray = UseInitArray; Options.EmulatedTLS = UseEmulatedTls; @@ -530,8 +512,13 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine( #endif } +#if LLVM_VERSION_GE(21, 0) + TargetMachine *TM = TheTarget->createTargetMachine(Trip, CPU, Feature, + Options, RM, CM, OptLevel); +#else TargetMachine *TM = TheTarget->createTargetMachine( Trip.getTriple(), CPU, Feature, Options, RM, CM, OptLevel); +#endif return wrap(TM); } @@ -722,7 +709,8 @@ extern "C" LLVMRustResult LLVMRustOptimize( bool LintIR, LLVMRustThinLTOBuffer **ThinLTOBufferRef, bool EmitThinLTO, bool EmitThinLTOSummary, bool MergeFunctions, bool UnrollLoops, bool SLPVectorize, bool LoopVectorize, bool DisableSimplifyLibCalls, - bool EmitLifetimeMarkers, bool RunEnzyme, + bool EmitLifetimeMarkers, bool RunEnzyme, bool PrintBeforeEnzyme, + bool PrintAfterEnzyme, bool PrintPasses, LLVMRustSanitizerOptions *SanitizerOptions, const char *PGOGenPath, const char *PGOUsePath, bool InstrumentCoverage, const char *InstrProfileOutput, const char *PGOSampleUsePath, @@ -753,34 +741,23 @@ extern "C" LLVMRustResult LLVMRustOptimize( auto FS = vfs::getRealFileSystem(); if (PGOGenPath) { assert(!PGOUsePath && !PGOSampleUsePath); - PGOOpt = PGOOptions(PGOGenPath, "", "", "", FS, PGOOptions::IRInstr, - PGOOptions::NoCSAction, -#if LLVM_VERSION_GE(19, 0) - PGOOptions::ColdFuncOpt::Default, -#endif - DebugInfoForProfiling); + PGOOpt = PGOOptions( + PGOGenPath, "", "", "", FS, PGOOptions::IRInstr, PGOOptions::NoCSAction, + PGOOptions::ColdFuncOpt::Default, DebugInfoForProfiling); } else if (PGOUsePath) { assert(!PGOSampleUsePath); - PGOOpt = PGOOptions(PGOUsePath, "", "", "", FS, PGOOptions::IRUse, - PGOOptions::NoCSAction, -#if LLVM_VERSION_GE(19, 0) - PGOOptions::ColdFuncOpt::Default, -#endif - DebugInfoForProfiling); + PGOOpt = PGOOptions( + PGOUsePath, "", "", "", FS, PGOOptions::IRUse, PGOOptions::NoCSAction, + PGOOptions::ColdFuncOpt::Default, DebugInfoForProfiling); } else if (PGOSampleUsePath) { - PGOOpt = PGOOptions(PGOSampleUsePath, "", "", "", FS, PGOOptions::SampleUse, - PGOOptions::NoCSAction, -#if LLVM_VERSION_GE(19, 0) - PGOOptions::ColdFuncOpt::Default, -#endif - DebugInfoForProfiling); + PGOOpt = + PGOOptions(PGOSampleUsePath, "", "", "", FS, PGOOptions::SampleUse, + PGOOptions::NoCSAction, PGOOptions::ColdFuncOpt::Default, + DebugInfoForProfiling); } else if (DebugInfoForProfiling) { - PGOOpt = PGOOptions("", "", "", "", FS, PGOOptions::NoAction, - PGOOptions::NoCSAction, -#if LLVM_VERSION_GE(19, 0) - PGOOptions::ColdFuncOpt::Default, -#endif - DebugInfoForProfiling); + PGOOpt = PGOOptions( + "", "", "", "", FS, PGOOptions::NoAction, PGOOptions::NoCSAction, + PGOOptions::ColdFuncOpt::Default, DebugInfoForProfiling); } auto PB = PassBuilder(TM, PTO, PGOOpt, &PIC); @@ -855,10 +832,15 @@ extern "C" LLVMRustResult LLVMRustOptimize( } if (LintIR) { - PipelineStartEPCallbacks.push_back( - [](ModulePassManager &MPM, OptimizationLevel Level) { - MPM.addPass(createModuleToFunctionPassAdaptor(LintPass())); - }); + PipelineStartEPCallbacks.push_back([](ModulePassManager &MPM, + OptimizationLevel Level) { +#if LLVM_VERSION_GE(21, 0) + MPM.addPass( + createModuleToFunctionPassAdaptor(LintPass(/*AbortOnError=*/true))); +#else + MPM.addPass(createModuleToFunctionPassAdaptor(LintPass())); +#endif + }); } if (InstrumentCoverage) { @@ -1073,14 +1055,38 @@ extern "C" LLVMRustResult LLVMRustOptimize( // now load "-enzyme" pass: #ifdef ENZYME if (RunEnzyme) { - registerEnzymeAndPassPipeline(PB, true); + + if (PrintBeforeEnzyme) { + // Handle the Rust flag `-Zautodiff=PrintModBefore`. + std::string Banner = "Module before EnzymeNewPM"; + MPM.addPass(PrintModulePass(outs(), Banner, true, false)); + } + + registerEnzymeAndPassPipeline(PB, false); if (auto Err = PB.parsePassPipeline(MPM, "enzyme")) { std::string ErrMsg = toString(std::move(Err)); LLVMRustSetLastError(ErrMsg.c_str()); return LLVMRustResult::Failure; } + + if (PrintAfterEnzyme) { + // Handle the Rust flag `-Zautodiff=PrintModAfter`. + std::string Banner = "Module after EnzymeNewPM"; + MPM.addPass(PrintModulePass(outs(), Banner, true, false)); + } } #endif + if (PrintPasses) { + // Print all passes from the PM: + std::string Pipeline; + raw_string_ostream SOS(Pipeline); + MPM.printPipeline(SOS, [&PIC](StringRef ClassName) { + auto PassName = PIC.getPassNameForClassName(ClassName); + return PassName.empty() ? ClassName : PassName; + }); + outs() << Pipeline; + outs() << "\n"; + } // Upgrade all calls to old intrinsics first. for (Module::iterator I = TheModule->begin(), E = TheModule->end(); I != E;) @@ -1364,7 +1370,12 @@ LLVMRustCreateThinLTOData(LLVMRustThinLTOModule *modules, size_t num_modules, // Convert the preserved symbols set from string to GUID, this is then needed // for internalization. for (size_t i = 0; i < num_symbols; i++) { +#if LLVM_VERSION_GE(21, 0) + auto GUID = + GlobalValue::getGUIDAssumingExternalLinkage(preserved_symbols[i]); +#else auto GUID = GlobalValue::getGUID(preserved_symbols[i]); +#endif Ret->GUIDPreservedSymbols.insert(GUID); } @@ -1684,11 +1695,11 @@ extern "C" void LLVMRustComputeLTOCacheKey(RustStringRef KeyOut, // Based on the 'InProcessThinBackend' constructor in LLVM #if LLVM_VERSION_GE(21, 0) for (auto &Name : Data->Index.cfiFunctionDefs().symbols()) - CfiFunctionDefs.insert( - GlobalValue::getGUID(GlobalValue::dropLLVMManglingEscape(Name))); + CfiFunctionDefs.insert(GlobalValue::getGUIDAssumingExternalLinkage( + GlobalValue::dropLLVMManglingEscape(Name))); for (auto &Name : Data->Index.cfiFunctionDecls().symbols()) - CfiFunctionDecls.insert( - GlobalValue::getGUID(GlobalValue::dropLLVMManglingEscape(Name))); + CfiFunctionDecls.insert(GlobalValue::getGUIDAssumingExternalLinkage( + GlobalValue::dropLLVMManglingEscape(Name))); #else for (auto &Name : Data->Index.cfiFunctionDefs()) CfiFunctionDefs.insert( diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp index 53df59930f4fd..72369ab7b692a 100644 --- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp @@ -384,6 +384,12 @@ static inline void AddAttributes(T *t, unsigned Index, LLVMAttributeRef *Attrs, t->setAttributes(PALNew); } +extern "C" bool LLVMRustHasAttributeAtIndex(LLVMValueRef Fn, unsigned Index, + LLVMRustAttributeKind RustAttr) { + Function *F = unwrap(Fn); + return F->hasParamAttribute(Index, fromRust(RustAttr)); +} + extern "C" void LLVMRustAddFunctionAttributes(LLVMValueRef Fn, unsigned Index, LLVMAttributeRef *Attrs, size_t AttrsLen) { @@ -467,12 +473,8 @@ extern "C" LLVMAttributeRef LLVMRustCreateRangeAttribute(LLVMContextRef C, unsigned NumBits, const uint64_t LowerWords[], const uint64_t UpperWords[]) { -#if LLVM_VERSION_GE(19, 0) return LLVMCreateConstantRangeAttribute(C, Attribute::Range, NumBits, LowerWords, UpperWords); -#else - report_fatal_error("LLVM 19.0 is required for Range Attribute"); -#endif } // These values **must** match ffi::AllocKindFlags. @@ -636,6 +638,10 @@ static InlineAsm::AsmDialect fromRust(LLVMRustAsmDialect Dialect) { } } +extern "C" uint64_t LLVMRustGetArrayNumElements(LLVMTypeRef Ty) { + return unwrap(Ty)->getArrayNumElements(); +} + extern "C" LLVMValueRef LLVMRustInlineAsm(LLVMTypeRef Ty, char *AsmString, size_t AsmStringLen, char *Constraints, size_t ConstraintsLen, @@ -967,6 +973,27 @@ extern "C" LLVMMetadataRef LLVMRustDIGetInstMetadata(LLVMValueRef x) { return nullptr; } +extern "C" void +LLVMRustRemoveEnumAttributeAtIndex(LLVMValueRef F, size_t index, + LLVMRustAttributeKind RustAttr) { + LLVMRemoveEnumAttributeAtIndex(F, index, fromRust(RustAttr)); +} + +extern "C" bool LLVMRustHasFnAttribute(LLVMValueRef F, const char *Name, + size_t NameLen) { + if (auto *Fn = dyn_cast(unwrap(F))) { + return Fn->hasFnAttribute(StringRef(Name, NameLen)); + } + return false; +} + +extern "C" void LLVMRustRemoveFnAttribute(LLVMValueRef Fn, const char *Name, + size_t NameLen) { + if (auto *F = dyn_cast(unwrap(Fn))) { + F->removeFnAttr(StringRef(Name, NameLen)); + } +} + extern "C" void LLVMRustGlobalAddMetadata(LLVMValueRef Global, unsigned Kind, LLVMMetadataRef MD) { unwrap(Global)->addMetadata(Kind, *unwrap(MD)); @@ -1591,43 +1618,6 @@ extern "C" LLVMValueRef LLVMRustBuildMemSet(LLVMBuilderRef B, LLVMValueRef Dst, MaybeAlign(DstAlign), IsVolatile)); } -// Polyfill for `LLVMBuildCallBr`, which was added in LLVM 19. -// -// FIXME: Remove when Rust's minimum supported LLVM version reaches 19. -#if LLVM_VERSION_LT(19, 0) -DEFINE_SIMPLE_CONVERSION_FUNCTIONS(OperandBundleDef, LLVMOperandBundleRef) - -extern "C" LLVMValueRef -LLVMBuildCallBr(LLVMBuilderRef B, LLVMTypeRef Ty, LLVMValueRef Fn, - LLVMBasicBlockRef DefaultDest, LLVMBasicBlockRef *IndirectDests, - unsigned NumIndirectDests, LLVMValueRef *Args, unsigned NumArgs, - LLVMOperandBundleRef *Bundles, unsigned NumBundles, - const char *Name) { - Value *Callee = unwrap(Fn); - FunctionType *FTy = unwrap(Ty); - - // FIXME: Is there a way around this? - std::vector IndirectDestsUnwrapped; - IndirectDestsUnwrapped.reserve(NumIndirectDests); - for (unsigned i = 0; i < NumIndirectDests; ++i) { - IndirectDestsUnwrapped.push_back(unwrap(IndirectDests[i])); - } - - // FIXME: Is there a way around this? - SmallVector OpBundles; - OpBundles.reserve(NumBundles); - for (unsigned i = 0; i < NumBundles; ++i) { - OpBundles.push_back(*unwrap(Bundles[i])); - } - - return wrap( - unwrap(B)->CreateCallBr(FTy, Callee, unwrap(DefaultDest), - ArrayRef(IndirectDestsUnwrapped), - ArrayRef(unwrap(Args), NumArgs), - ArrayRef(OpBundles), Name)); -} -#endif - extern "C" void LLVMRustPositionBuilderAtStart(LLVMBuilderRef B, LLVMBasicBlockRef BB) { auto Point = unwrap(BB)->getFirstInsertionPt(); @@ -1771,24 +1761,6 @@ extern "C" LLVMValueRef LLVMRustBuildMaxNum(LLVMBuilderRef B, LLVMValueRef LHS, return wrap(unwrap(B)->CreateMaxNum(unwrap(LHS), unwrap(RHS))); } -#if LLVM_VERSION_LT(19, 0) -enum { - LLVMGEPFlagInBounds = (1 << 0), - LLVMGEPFlagNUSW = (1 << 1), - LLVMGEPFlagNUW = (1 << 2), -}; -extern "C" LLVMValueRef -LLVMBuildGEPWithNoWrapFlags(LLVMBuilderRef B, LLVMTypeRef Ty, - LLVMValueRef Pointer, LLVMValueRef *Indices, - unsigned NumIndices, const char *Name, - unsigned NoWrapFlags) { - if (NoWrapFlags & LLVMGEPFlagInBounds) - return LLVMBuildInBoundsGEP2(B, Ty, Pointer, Indices, NumIndices, Name); - else - return LLVMBuildGEP2(B, Ty, Pointer, Indices, NumIndices, Name); -} -#endif - // Transfers ownership of DiagnosticHandler unique_ptr to the caller. extern "C" DiagnosticHandler * LLVMRustContextGetDiagnosticHandler(LLVMContextRef C) { @@ -1856,11 +1828,7 @@ extern "C" void LLVMRustContextConfigureDiagnosticHandler( } } if (DiagnosticHandlerCallback) { -#if LLVM_VERSION_GE(19, 0) DiagnosticHandlerCallback(&DI, DiagnosticHandlerContext); -#else - DiagnosticHandlerCallback(DI, DiagnosticHandlerContext); -#endif return true; } return false; @@ -2008,21 +1976,3 @@ extern "C" void LLVMRustSetNoSanitizeHWAddress(LLVMValueRef Global) { MD.NoHWAddress = true; GV.setSanitizerMetadata(MD); } - -// Operations on composite constants. -// These are clones of LLVM api functions that will become available in future -// releases. They can be removed once Rust's minimum supported LLVM version -// supports them. See https://github.com/rust-lang/rust/issues/121868 See -// https://llvm.org/doxygen/group__LLVMCCoreValueConstantComposite.html - -// FIXME: Remove when Rust's minimum supported LLVM version reaches 19. -// https://github.com/llvm/llvm-project/commit/e1405e4f71c899420ebf8262d5e9745598419df8 -#if LLVM_VERSION_LT(19, 0) -extern "C" LLVMValueRef LLVMConstStringInContext2(LLVMContextRef C, - const char *Str, - size_t Length, - bool DontNullTerminate) { - return wrap(ConstantDataArray::getString(*unwrap(C), StringRef(Str, Length), - !DontNullTerminate)); -} -#endif diff --git a/compiler/rustc_llvm/src/lib.rs b/compiler/rustc_llvm/src/lib.rs index 68058250a2671..ed5edeef1617d 100644 --- a/compiler/rustc_llvm/src/lib.rs +++ b/compiler/rustc_llvm/src/lib.rs @@ -229,6 +229,7 @@ pub fn initialize_available_targets() { LLVMInitializeXtensaTargetInfo, LLVMInitializeXtensaTarget, LLVMInitializeXtensaTargetMC, + LLVMInitializeXtensaAsmPrinter, LLVMInitializeXtensaAsmParser ); init_target!( diff --git a/compiler/rustc_log/src/lib.rs b/compiler/rustc_log/src/lib.rs index 49dd388f14cc8..1bb502ca3d062 100644 --- a/compiler/rustc_log/src/lib.rs +++ b/compiler/rustc_log/src/lib.rs @@ -37,6 +37,7 @@ use std::env::{self, VarError}; use std::fmt::{self, Display}; use std::io::{self, IsTerminal}; +use tracing::dispatcher::SetGlobalDefaultError; use tracing_core::{Event, Subscriber}; use tracing_subscriber::filter::{Directive, EnvFilter, LevelFilter}; use tracing_subscriber::fmt::FmtContext; @@ -131,10 +132,10 @@ pub fn init_logger(cfg: LoggerConfig) -> Result<(), Error> { .without_time() .event_format(BacktraceFormatter { backtrace_target }); let subscriber = subscriber.with(fmt_layer); - tracing::subscriber::set_global_default(subscriber).unwrap(); + tracing::subscriber::set_global_default(subscriber)?; } Err(_) => { - tracing::subscriber::set_global_default(subscriber).unwrap(); + tracing::subscriber::set_global_default(subscriber)?; } }; @@ -180,6 +181,7 @@ pub enum Error { InvalidColorValue(String), NonUnicodeColorValue, InvalidWraptree(String), + AlreadyInit(SetGlobalDefaultError), } impl std::error::Error for Error {} @@ -199,6 +201,13 @@ impl Display for Error { formatter, "invalid log WRAPTREE value '{value}': expected a non-negative integer", ), + Error::AlreadyInit(tracing_error) => Display::fmt(tracing_error, formatter), } } } + +impl From for Error { + fn from(tracing_error: SetGlobalDefaultError) -> Self { + Error::AlreadyInit(tracing_error) + } +} diff --git a/compiler/rustc_macros/src/diagnostics/mod.rs b/compiler/rustc_macros/src/diagnostics/mod.rs index 91398f1a9da9d..55228248188e5 100644 --- a/compiler/rustc_macros/src/diagnostics/mod.rs +++ b/compiler/rustc_macros/src/diagnostics/mod.rs @@ -55,8 +55,7 @@ use synstructure::Structure; /// /// See rustc dev guide for more examples on using the `#[derive(Diagnostic)]`: /// -pub(super) fn diagnostic_derive(mut s: Structure<'_>) -> TokenStream { - s.underscore_const(true); +pub(super) fn diagnostic_derive(s: Structure<'_>) -> TokenStream { DiagnosticDerive::new(s).into_tokens() } @@ -102,8 +101,7 @@ pub(super) fn diagnostic_derive(mut s: Structure<'_>) -> TokenStream { /// /// See rustc dev guide for more examples on using the `#[derive(LintDiagnostic)]`: /// -pub(super) fn lint_diagnostic_derive(mut s: Structure<'_>) -> TokenStream { - s.underscore_const(true); +pub(super) fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream { LintDiagnosticDerive::new(s).into_tokens() } @@ -153,7 +151,6 @@ pub(super) fn lint_diagnostic_derive(mut s: Structure<'_>) -> TokenStream { /// /// diag.subdiagnostic(RawIdentifierSuggestion { span, applicability, ident }); /// ``` -pub(super) fn subdiagnostic_derive(mut s: Structure<'_>) -> TokenStream { - s.underscore_const(true); +pub(super) fn subdiagnostic_derive(s: Structure<'_>) -> TokenStream { SubdiagnosticDerive::new().into_tokens(s) } diff --git a/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs b/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs index 909083d5e8652..bc9516b2e0c67 100644 --- a/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs +++ b/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs @@ -20,14 +20,12 @@ use crate::diagnostics::utils::{ /// The central struct for constructing the `add_to_diag` method from an annotated struct. pub(crate) struct SubdiagnosticDerive { diag: syn::Ident, - f: syn::Ident, } impl SubdiagnosticDerive { pub(crate) fn new() -> Self { let diag = format_ident!("diag"); - let f = format_ident!("f"); - Self { diag, f } + Self { diag } } pub(crate) fn into_tokens(self, mut structure: Structure<'_>) -> TokenStream { @@ -86,19 +84,16 @@ impl SubdiagnosticDerive { }; let diag = &self.diag; - let f = &self.f; // FIXME(edition_2024): Fix the `keyword_idents_2024` lint to not trigger here? #[allow(keyword_idents_2024)] let ret = structure.gen_impl(quote! { gen impl rustc_errors::Subdiagnostic for @Self { - fn add_to_diag_with<__G, __F>( + fn add_to_diag<__G>( self, #diag: &mut rustc_errors::Diag<'_, __G>, - #f: &__F ) where __G: rustc_errors::EmissionGuarantee, - __F: rustc_errors::SubdiagMessageOp<__G>, { #implementation } @@ -384,11 +379,10 @@ impl<'parent, 'a> SubdiagnosticDeriveVariantBuilder<'parent, 'a> { Ok(quote! {}) } "subdiagnostic" => { - let f = &self.parent.f; let diag = &self.parent.diag; let binding = &info.binding; self.has_subdiagnostic = true; - Ok(quote! { #binding.add_to_diag_with(#diag, #f); }) + Ok(quote! { #binding.add_to_diag(#diag); }) } _ => { let mut span_attrs = vec![]; @@ -531,12 +525,11 @@ impl<'parent, 'a> SubdiagnosticDeriveVariantBuilder<'parent, 'a> { let span_field = self.span_field.value_ref(); let diag = &self.parent.diag; - let f = &self.parent.f; let mut calls = TokenStream::new(); for (kind, slug, no_span) in kind_slugs { let message = format_ident!("__message"); calls.extend( - quote! { let #message = #f(#diag, crate::fluent_generated::#slug.into()); }, + quote! { let #message = #diag.eagerly_translate(crate::fluent_generated::#slug); }, ); let name = format_ident!( diff --git a/compiler/rustc_macros/src/hash_stable.rs b/compiler/rustc_macros/src/hash_stable.rs index 6b3210cad7be6..a6396ba687d11 100644 --- a/compiler/rustc_macros/src/hash_stable.rs +++ b/compiler/rustc_macros/src/hash_stable.rs @@ -74,8 +74,6 @@ fn hash_stable_derive_with_mode( HashStableMode::Generic | HashStableMode::NoContext => parse_quote!(__CTX), }; - s.underscore_const(true); - // no_context impl is able to derive by-field, which is closer to a perfect derive. s.add_bounds(match mode { HashStableMode::Normal | HashStableMode::Generic => synstructure::AddBounds::Generics, diff --git a/compiler/rustc_macros/src/lib.rs b/compiler/rustc_macros/src/lib.rs index edb25e799045a..62ca7ce3ca995 100644 --- a/compiler/rustc_macros/src/lib.rs +++ b/compiler/rustc_macros/src/lib.rs @@ -1,7 +1,7 @@ // tidy-alphabetical-start #![allow(rustc::default_hash_types)] +#![cfg_attr(bootstrap, feature(let_chains))] #![feature(if_let_guard)] -#![feature(let_chains)] #![feature(never_type)] #![feature(proc_macro_diagnostic)] #![feature(proc_macro_span)] diff --git a/compiler/rustc_macros/src/lift.rs b/compiler/rustc_macros/src/lift.rs index 341447f7e6f26..03ea396a42c75 100644 --- a/compiler/rustc_macros/src/lift.rs +++ b/compiler/rustc_macros/src/lift.rs @@ -4,7 +4,6 @@ use syn::parse_quote; pub(super) fn lift_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { s.add_bounds(synstructure::AddBounds::Generics); s.bind_with(|_| synstructure::BindStyle::Move); - s.underscore_const(true); let tcx: syn::Lifetime = parse_quote!('tcx); let newtcx: syn::GenericParam = parse_quote!('__lifted); diff --git a/compiler/rustc_macros/src/query.rs b/compiler/rustc_macros/src/query.rs index 62bf34ad5adce..33fb13e23bf89 100644 --- a/compiler/rustc_macros/src/query.rs +++ b/compiler/rustc_macros/src/query.rs @@ -407,11 +407,23 @@ pub(super) fn rustc_queries(input: TokenStream) -> TokenStream { } TokenStream::from(quote! { + /// Higher-order macro that invokes the specified macro with a prepared + /// list of all query signatures (including modifiers). + /// + /// This allows multiple simpler macros to each have access to the list + /// of queries. #[macro_export] - macro_rules! rustc_query_append { - ($macro:ident! $( [$($other:tt)*] )?) => { + macro_rules! rustc_with_all_queries { + ( + // The macro to invoke once, on all queries (plus extras). + $macro:ident! + + // Within [], an optional list of extra "query" signatures to + // pass to the given macro, in addition to the actual queries. + $( [$($extra_fake_queries:tt)*] )? + ) => { $macro! { - $( $($other)* )? + $( $($extra_fake_queries)* )? #query_stream } } diff --git a/compiler/rustc_macros/src/serialize.rs b/compiler/rustc_macros/src/serialize.rs index 673e6cd618ff8..c7aaaf0da4679 100644 --- a/compiler/rustc_macros/src/serialize.rs +++ b/compiler/rustc_macros/src/serialize.rs @@ -12,7 +12,6 @@ pub(super) fn type_decodable_derive( let decoder_ty = quote! { __D }; s.add_impl_generic(parse_quote! { #decoder_ty: ::rustc_middle::ty::codec::TyDecoder<'tcx> }); s.add_bounds(synstructure::AddBounds::Fields); - s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -26,7 +25,6 @@ pub(super) fn meta_decodable_derive( s.add_impl_generic(parse_quote! { '__a }); let decoder_ty = quote! { DecodeContext<'__a, 'tcx> }; s.add_bounds(synstructure::AddBounds::Generics); - s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -35,7 +33,6 @@ pub(super) fn decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro let decoder_ty = quote! { __D }; s.add_impl_generic(parse_quote! { #decoder_ty: ::rustc_span::SpanDecoder }); s.add_bounds(synstructure::AddBounds::Generics); - s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -46,13 +43,12 @@ pub(super) fn decodable_nocontext_derive( let decoder_ty = quote! { __D }; s.add_impl_generic(parse_quote! { #decoder_ty: ::rustc_serialize::Decoder }); s.add_bounds(synstructure::AddBounds::Fields); - s.underscore_const(true); decodable_body(s, decoder_ty) } fn decodable_body( - mut s: synstructure::Structure<'_>, + s: synstructure::Structure<'_>, decoder_ty: TokenStream, ) -> proc_macro2::TokenStream { if let syn::Data::Union(_) = s.ast().data { @@ -98,7 +94,6 @@ fn decodable_body( } } }; - s.underscore_const(true); s.bound_impl( quote!(::rustc_serialize::Decodable<#decoder_ty>), @@ -133,7 +128,6 @@ pub(super) fn type_encodable_derive( } s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_middle::ty::codec::TyEncoder<'tcx> }); s.add_bounds(synstructure::AddBounds::Fields); - s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -147,7 +141,6 @@ pub(super) fn meta_encodable_derive( s.add_impl_generic(parse_quote! { '__a }); let encoder_ty = quote! { EncodeContext<'__a, 'tcx> }; s.add_bounds(synstructure::AddBounds::Generics); - s.underscore_const(true); encodable_body(s, encoder_ty, true) } @@ -156,7 +149,6 @@ pub(super) fn encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_span::SpanEncoder }); s.add_bounds(synstructure::AddBounds::Generics); - s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -167,7 +159,6 @@ pub(super) fn encodable_nocontext_derive( let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_serialize::Encoder }); s.add_bounds(synstructure::AddBounds::Fields); - s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -181,7 +172,6 @@ fn encodable_body( panic!("cannot derive on union") } - s.underscore_const(true); s.bind_with(|binding| { // Handle the lack of a blanket reference impl. if let syn::Type::Reference(_) = binding.ast().ty { diff --git a/compiler/rustc_macros/src/symbols.rs b/compiler/rustc_macros/src/symbols.rs index 37200f62eb5a2..0400de622740e 100644 --- a/compiler/rustc_macros/src/symbols.rs +++ b/compiler/rustc_macros/src/symbols.rs @@ -142,13 +142,13 @@ pub(super) fn symbols(input: TokenStream) -> TokenStream { output } -struct Preinterned { +struct Predefined { idx: u32, span_of_name: Span, } struct Entries { - map: HashMap, + map: HashMap, } impl Entries { @@ -163,7 +163,7 @@ impl Entries { prev.idx } else { let idx = self.len(); - self.map.insert(s.to_string(), Preinterned { idx, span_of_name: span }); + self.map.insert(s.to_string(), Predefined { idx, span_of_name: span }); idx } } @@ -295,10 +295,14 @@ fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec) { } let symbol_digits_base = entries.map["0"].idx; - let preinterned_symbols_count = entries.len(); + let predefined_symbols_count = entries.len(); let output = quote! { const SYMBOL_DIGITS_BASE: u32 = #symbol_digits_base; - const PREINTERNED_SYMBOLS_COUNT: u32 = #preinterned_symbols_count; + + /// The number of predefined symbols; this is the the first index for + /// extra pre-interned symbols in an Interner created via + /// [`Interner::with_extra_symbols`]. + pub const PREDEFINED_SYMBOLS_COUNT: u32 = #predefined_symbols_count; #[doc(hidden)] #[allow(non_upper_case_globals)] @@ -315,10 +319,13 @@ fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec) { } impl Interner { - pub(crate) fn fresh() -> Self { - Interner::prefill(&[ - #prefill_stream - ]) + /// Creates an `Interner` with the predefined symbols from the `symbols!` macro and + /// any extra symbols provided by external drivers such as Clippy + pub(crate) fn with_extra_symbols(extra_symbols: &[&'static str]) -> Self { + Interner::prefill( + &[#prefill_stream], + extra_symbols, + ) } } }; diff --git a/compiler/rustc_macros/src/type_foldable.rs b/compiler/rustc_macros/src/type_foldable.rs index c4f584dca430a..91b747f18569d 100644 --- a/compiler/rustc_macros/src/type_foldable.rs +++ b/compiler/rustc_macros/src/type_foldable.rs @@ -6,35 +6,19 @@ pub(super) fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_m panic!("cannot derive on union") } - s.underscore_const(true); - if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") { s.add_impl_generic(parse_quote! { 'tcx }); } s.add_bounds(synstructure::AddBounds::Generics); s.bind_with(|_| synstructure::BindStyle::Move); - let body_fold = s.each_variant(|vi| { + let try_body_fold = s.each_variant(|vi| { let bindings = vi.bindings(); vi.construct(|_, index| { let bind = &bindings[index]; - let mut fixed = false; - // retain value of fields with #[type_foldable(identity)] - bind.ast().attrs.iter().for_each(|x| { - if !x.path().is_ident("type_foldable") { - return; - } - let _ = x.parse_nested_meta(|nested| { - if nested.path.is_ident("identity") { - fixed = true; - } - Ok(()) - }); - }); - - if fixed { + if has_ignore_attr(&bind.ast().attrs, "type_foldable", "identity") { bind.to_token_stream() } else { quote! { @@ -44,6 +28,22 @@ pub(super) fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_m }) }); + let body_fold = s.each_variant(|vi| { + let bindings = vi.bindings(); + vi.construct(|_, index| { + let bind = &bindings[index]; + + // retain value of fields with #[type_foldable(identity)] + if has_ignore_attr(&bind.ast().attrs, "type_foldable", "identity") { + bind.to_token_stream() + } else { + quote! { + ::rustc_middle::ty::TypeFoldable::fold_with(#bind, __folder) + } + } + }) + }); + s.bound_impl( quote!(::rustc_middle::ty::TypeFoldable<::rustc_middle::ty::TyCtxt<'tcx>>), quote! { @@ -51,8 +51,32 @@ pub(super) fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_m self, __folder: &mut __F ) -> Result { - Ok(match self { #body_fold }) + Ok(match self { #try_body_fold }) + } + + fn fold_with<__F: ::rustc_middle::ty::TypeFolder<::rustc_middle::ty::TyCtxt<'tcx>>>( + self, + __folder: &mut __F + ) -> Self { + match self { #body_fold } } }, ) } + +fn has_ignore_attr(attrs: &[syn::Attribute], name: &'static str, meta: &'static str) -> bool { + let mut ignored = false; + attrs.iter().for_each(|attr| { + if !attr.path().is_ident(name) { + return; + } + let _ = attr.parse_nested_meta(|nested| { + if nested.path.is_ident(meta) { + ignored = true; + } + Ok(()) + }); + }); + + ignored +} diff --git a/compiler/rustc_macros/src/type_visitable.rs b/compiler/rustc_macros/src/type_visitable.rs index fb37e1a39edbe..f99c5113a6053 100644 --- a/compiler/rustc_macros/src/type_visitable.rs +++ b/compiler/rustc_macros/src/type_visitable.rs @@ -8,8 +8,6 @@ pub(super) fn type_visitable_derive( panic!("cannot derive on union") } - s.underscore_const(true); - // ignore fields with #[type_visitable(ignore)] s.filter(|bi| { let mut ignored = false; diff --git a/compiler/rustc_metadata/Cargo.toml b/compiler/rustc_metadata/Cargo.toml index 08dcc3d519a2b..26878c488b74a 100644 --- a/compiler/rustc_metadata/Cargo.toml +++ b/compiler/rustc_metadata/Cargo.toml @@ -26,7 +26,7 @@ rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } -tempfile = "3.2" +tempfile = "3.7.1" tracing = "0.1" # tidy-alphabetical-end diff --git a/compiler/rustc_metadata/messages.ftl b/compiler/rustc_metadata/messages.ftl index 9adbcabcf4506..d997ba198aca8 100644 --- a/compiler/rustc_metadata/messages.ftl +++ b/compiler/rustc_metadata/messages.ftl @@ -97,6 +97,10 @@ metadata_found_staticlib = found staticlib `{$crate_name}` instead of rlib or dylib{$add_info} .help = please recompile that crate using --crate-type lib +metadata_full_metadata_not_found = + only metadata stub found for `{$flavor}` dependency `{$crate_name}` + please provide path to the corresponding .rmeta file with full metadata + metadata_global_alloc_required = no global memory allocator found but one is required; link to std or add `#[global_allocator]` to a static item that implements the GlobalAlloc trait diff --git a/compiler/rustc_metadata/src/creader.rs b/compiler/rustc_metadata/src/creader.rs index b7f13e0afdcde..07fb2de8a3e0c 100644 --- a/compiler/rustc_metadata/src/creader.rs +++ b/compiler/rustc_metadata/src/creader.rs @@ -148,7 +148,7 @@ impl<'a> std::fmt::Debug for CrateDump<'a> { writeln!(fmt, " hash: {}", data.hash())?; writeln!(fmt, " reqd: {:?}", data.dep_kind())?; writeln!(fmt, " priv: {:?}", data.is_private_dep())?; - let CrateSource { dylib, rlib, rmeta } = data.source(); + let CrateSource { dylib, rlib, rmeta, sdylib_interface } = data.source(); if let Some(dylib) = dylib { writeln!(fmt, " dylib: {}", dylib.0.display())?; } @@ -158,6 +158,9 @@ impl<'a> std::fmt::Debug for CrateDump<'a> { if let Some(rmeta) = rmeta { writeln!(fmt, " rmeta: {}", rmeta.0.display())?; } + if let Some(sdylib_interface) = sdylib_interface { + writeln!(fmt, " sdylib interface: {}", sdylib_interface.0.display())?; + } } Ok(()) } @@ -340,7 +343,7 @@ impl CStore { } let level = tcx .lint_level_at_node(lint::builtin::UNUSED_CRATE_DEPENDENCIES, rustc_hir::CRATE_HIR_ID) - .0; + .level; if level != lint::Level::Allow { let unused_externs = self.unused_externs.iter().map(|ident| ident.to_ident_string()).collect::>(); @@ -1032,14 +1035,19 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { } fn inject_allocator_crate(&mut self, krate: &ast::Crate) { - self.cstore.has_global_allocator = match &*global_allocator_spans(krate) { - [span1, span2, ..] => { - self.dcx().emit_err(errors::NoMultipleGlobalAlloc { span2: *span2, span1: *span1 }); - true - } - spans => !spans.is_empty(), - }; - self.cstore.has_alloc_error_handler = match &*alloc_error_handler_spans(krate) { + self.cstore.has_global_allocator = + match &*fn_spans(krate, Symbol::intern(&global_fn_name(sym::alloc))) { + [span1, span2, ..] => { + self.dcx() + .emit_err(errors::NoMultipleGlobalAlloc { span2: *span2, span1: *span1 }); + true + } + spans => !spans.is_empty(), + }; + self.cstore.has_alloc_error_handler = match &*fn_spans( + krate, + Symbol::intern(alloc_error_handler_name(AllocatorKind::Global)), + ) { [span1, span2, ..] => { self.dcx() .emit_err(errors::NoMultipleAllocErrorHandler { span2: *span2, span1: *span1 }); @@ -1310,17 +1318,14 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { definitions: &Definitions, ) -> Option { match item.kind { - ast::ItemKind::ExternCrate(orig_name) => { - debug!( - "resolving extern crate stmt. ident: {} orig_name: {:?}", - item.ident, orig_name - ); + ast::ItemKind::ExternCrate(orig_name, ident) => { + debug!("resolving extern crate stmt. ident: {} orig_name: {:?}", ident, orig_name); let name = match orig_name { Some(orig_name) => { validate_crate_name(self.sess, orig_name, Some(item.span)); orig_name } - None => item.ident.name, + None => ident.name, }; let dep_kind = if attr::contains_name(&item.attrs, sym::no_link) { CrateDepKind::MacrosOnly @@ -1368,36 +1373,15 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { } } -fn global_allocator_spans(krate: &ast::Crate) -> Vec { - struct Finder { - name: Symbol, - spans: Vec, - } - impl<'ast> visit::Visitor<'ast> for Finder { - fn visit_item(&mut self, item: &'ast ast::Item) { - if item.ident.name == self.name - && attr::contains_name(&item.attrs, sym::rustc_std_internal_symbol) - { - self.spans.push(item.span); - } - visit::walk_item(self, item) - } - } - - let name = Symbol::intern(&global_fn_name(sym::alloc)); - let mut f = Finder { name, spans: Vec::new() }; - visit::walk_crate(&mut f, krate); - f.spans -} - -fn alloc_error_handler_spans(krate: &ast::Crate) -> Vec { +fn fn_spans(krate: &ast::Crate, name: Symbol) -> Vec { struct Finder { name: Symbol, spans: Vec, } impl<'ast> visit::Visitor<'ast> for Finder { fn visit_item(&mut self, item: &'ast ast::Item) { - if item.ident.name == self.name + if let Some(ident) = item.kind.ident() + && ident.name == self.name && attr::contains_name(&item.attrs, sym::rustc_std_internal_symbol) { self.spans.push(item.span); @@ -1406,7 +1390,6 @@ fn alloc_error_handler_spans(krate: &ast::Crate) -> Vec { } } - let name = Symbol::intern(alloc_error_handler_name(AllocatorKind::Global)); let mut f = Finder { name, spans: Vec::new() }; visit::walk_crate(&mut f, krate); f.spans diff --git a/compiler/rustc_metadata/src/dependency_format.rs b/compiler/rustc_metadata/src/dependency_format.rs index be31aa629c86e..fcae33c73c9c0 100644 --- a/compiler/rustc_metadata/src/dependency_format.rs +++ b/compiler/rustc_metadata/src/dependency_format.rs @@ -88,45 +88,42 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { return IndexVec::new(); } - let preferred_linkage = match ty { - // Generating a dylib without `-C prefer-dynamic` means that we're going - // to try to eagerly statically link all dependencies. This is normally - // done for end-product dylibs, not intermediate products. - // - // Treat cdylibs and staticlibs similarly. If `-C prefer-dynamic` is set, - // the caller may be code-size conscious, but without it, it makes sense - // to statically link a cdylib or staticlib. For staticlibs we use - // `-Z staticlib-prefer-dynamic` for now. This may be merged into - // `-C prefer-dynamic` in the future. - CrateType::Dylib | CrateType::Cdylib => { - if sess.opts.cg.prefer_dynamic { - Linkage::Dynamic - } else { - Linkage::Static + let preferred_linkage = + match ty { + // Generating a dylib without `-C prefer-dynamic` means that we're going + // to try to eagerly statically link all dependencies. This is normally + // done for end-product dylibs, not intermediate products. + // + // Treat cdylibs and staticlibs similarly. If `-C prefer-dynamic` is set, + // the caller may be code-size conscious, but without it, it makes sense + // to statically link a cdylib or staticlib. For staticlibs we use + // `-Z staticlib-prefer-dynamic` for now. This may be merged into + // `-C prefer-dynamic` in the future. + CrateType::Dylib | CrateType::Cdylib | CrateType::Sdylib => { + if sess.opts.cg.prefer_dynamic { Linkage::Dynamic } else { Linkage::Static } } - } - CrateType::Staticlib => { - if sess.opts.unstable_opts.staticlib_prefer_dynamic { - Linkage::Dynamic - } else { - Linkage::Static + CrateType::Staticlib => { + if sess.opts.unstable_opts.staticlib_prefer_dynamic { + Linkage::Dynamic + } else { + Linkage::Static + } } - } - // If the global prefer_dynamic switch is turned off, or the final - // executable will be statically linked, prefer static crate linkage. - CrateType::Executable if !sess.opts.cg.prefer_dynamic || sess.crt_static(Some(ty)) => { - Linkage::Static - } - CrateType::Executable => Linkage::Dynamic, + // If the global prefer_dynamic switch is turned off, or the final + // executable will be statically linked, prefer static crate linkage. + CrateType::Executable if !sess.opts.cg.prefer_dynamic || sess.crt_static(Some(ty)) => { + Linkage::Static + } + CrateType::Executable => Linkage::Dynamic, - // proc-macro crates are mostly cdylibs, but we also need metadata. - CrateType::ProcMacro => Linkage::Static, + // proc-macro crates are mostly cdylibs, but we also need metadata. + CrateType::ProcMacro => Linkage::Static, - // No linkage happens with rlibs, we just needed the metadata (which we - // got long ago), so don't bother with anything. - CrateType::Rlib => Linkage::NotLinked, - }; + // No linkage happens with rlibs, we just needed the metadata (which we + // got long ago), so don't bother with anything. + CrateType::Rlib => Linkage::NotLinked, + }; let mut unavailable_as_static = Vec::new(); @@ -165,7 +162,9 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { let all_dylibs = || { tcx.crates(()).iter().filter(|&&cnum| { - !tcx.dep_kind(cnum).macros_only() && tcx.used_crate_source(cnum).dylib.is_some() + !tcx.dep_kind(cnum).macros_only() + && (tcx.used_crate_source(cnum).dylib.is_some() + || tcx.used_crate_source(cnum).sdylib_interface.is_some()) }) }; @@ -273,7 +272,7 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { match *kind { Linkage::NotLinked | Linkage::IncludedFromDylib => {} Linkage::Static if src.rlib.is_some() => continue, - Linkage::Dynamic if src.dylib.is_some() => continue, + Linkage::Dynamic if src.dylib.is_some() || src.sdylib_interface.is_some() => continue, kind => { let kind = match kind { Linkage::Static => "rlib", diff --git a/compiler/rustc_metadata/src/eii.rs b/compiler/rustc_metadata/src/eii.rs new file mode 100644 index 0000000000000..e25415e2d5d48 --- /dev/null +++ b/compiler/rustc_metadata/src/eii.rs @@ -0,0 +1,40 @@ +use rustc_attr_parsing::{AttributeKind, EIIDecl, EIIImpl, find_attr}; +use rustc_data_structures::fx::FxIndexMap; +use rustc_hir::def_id::DefId; +use rustc_middle::query::LocalCrate; +use rustc_middle::ty::TyCtxt; + +type EIIMap = FxIndexMap< + DefId, // the defid of the macro that declared the eii + ( + EIIDecl, // the corresponding declaration + FxIndexMap, // all the given implementations, indexed by defid. + // We expect there to be only one, but collect them all to give errors if there are more + // (or if there are none) in the final crate we build. + ), +>; + +pub(crate) fn collect<'tcx>(tcx: TyCtxt<'tcx>, LocalCrate: LocalCrate) -> EIIMap { + let mut eiis = EIIMap::default(); + + // now we've seen all EIIs declared and maybe even implemented in dependencies. Let's look at + // the current crate! + for id in tcx.hir_crate_items(()).definitions() { + for i in + find_attr!(tcx.get_all_attrs(id), AttributeKind::EiiImpl(e) => e).into_iter().flatten() + { + eiis.entry(i.eii_macro) + .or_insert_with(|| { + // find the decl for this one if it wasn't in yet (maybe it's from the local crate? not very useful but not illegal) + (find_attr!(tcx.get_all_attrs(i.eii_macro), AttributeKind::EiiMacroFor(d) => *d).unwrap(), Default::default()) + }).1.insert(id.into(), *i); + } + + // if we find a new declaration, add it to the list without a known implementation + if let Some(decl) = find_attr!(tcx.get_all_attrs(id), AttributeKind::EiiMacroFor(d) => *d) { + eiis.entry(id.into()).or_insert((decl, Default::default())); + } + } + + eiis +} diff --git a/compiler/rustc_metadata/src/errors.rs b/compiler/rustc_metadata/src/errors.rs index 0c54628598c48..c45daeda85dbc 100644 --- a/compiler/rustc_metadata/src/errors.rs +++ b/compiler/rustc_metadata/src/errors.rs @@ -525,6 +525,15 @@ impl Diagnostic<'_, G> for MultipleCandidates { } } +#[derive(Diagnostic)] +#[diag(metadata_full_metadata_not_found)] +pub(crate) struct FullMetadataNotFound { + #[primary_span] + pub span: Span, + pub flavor: CrateFlavor, + pub crate_name: Symbol, +} + #[derive(Diagnostic)] #[diag(metadata_symbol_conflicts_current, code = E0519)] pub struct SymbolConflictsCurrent { diff --git a/compiler/rustc_metadata/src/fs.rs b/compiler/rustc_metadata/src/fs.rs index 4450d050c8e1f..e57534b847ef0 100644 --- a/compiler/rustc_metadata/src/fs.rs +++ b/compiler/rustc_metadata/src/fs.rs @@ -2,11 +2,11 @@ use std::path::{Path, PathBuf}; use std::{fs, io}; use rustc_data_structures::temp_dir::MaybeTempDir; +use rustc_fs_util::TempDirBuilder; use rustc_middle::ty::TyCtxt; -use rustc_session::config::{OutFileName, OutputType}; +use rustc_session::config::{CrateType, OutFileName, OutputType}; use rustc_session::output::filename_for_metadata; use rustc_session::{MetadataKind, Session}; -use tempfile::Builder as TempFileBuilder; use crate::errors::{ BinaryOutputToTty, FailedCopyToStdout, FailedCreateEncodedMetadata, FailedCreateFile, @@ -45,12 +45,19 @@ pub fn encode_and_write_metadata(tcx: TyCtxt<'_>) -> (EncodedMetadata, bool) { // final destination, with an `fs::rename` call. In order for the rename to // always succeed, the temporary file needs to be on the same filesystem, // which is why we create it inside the output directory specifically. - let metadata_tmpdir = TempFileBuilder::new() + let metadata_tmpdir = TempDirBuilder::new() .prefix("rmeta") .tempdir_in(out_filename.parent().unwrap_or_else(|| Path::new(""))) .unwrap_or_else(|err| tcx.dcx().emit_fatal(FailedCreateTempdir { err })); let metadata_tmpdir = MaybeTempDir::new(metadata_tmpdir, tcx.sess.opts.cg.save_temps); - let metadata_filename = metadata_tmpdir.as_ref().join(METADATA_FILENAME); + let metadata_filename = metadata_tmpdir.as_ref().join("full.rmeta"); + let metadata_stub_filename = if !tcx.sess.opts.unstable_opts.embed_metadata + && !tcx.crate_types().contains(&CrateType::ProcMacro) + { + Some(metadata_tmpdir.as_ref().join("stub.rmeta")) + } else { + None + }; // Always create a file at `metadata_filename`, even if we have nothing to write to it. // This simplifies the creation of the output `out_filename` when requested. @@ -60,9 +67,15 @@ pub fn encode_and_write_metadata(tcx: TyCtxt<'_>) -> (EncodedMetadata, bool) { std::fs::File::create(&metadata_filename).unwrap_or_else(|err| { tcx.dcx().emit_fatal(FailedCreateFile { filename: &metadata_filename, err }); }); + if let Some(metadata_stub_filename) = &metadata_stub_filename { + std::fs::File::create(metadata_stub_filename).unwrap_or_else(|err| { + tcx.dcx() + .emit_fatal(FailedCreateFile { filename: &metadata_stub_filename, err }); + }); + } } MetadataKind::Uncompressed | MetadataKind::Compressed => { - encode_metadata(tcx, &metadata_filename); + encode_metadata(tcx, &metadata_filename, metadata_stub_filename.as_deref()) } }; @@ -100,9 +113,10 @@ pub fn encode_and_write_metadata(tcx: TyCtxt<'_>) -> (EncodedMetadata, bool) { // Load metadata back to memory: codegen may need to include it in object files. let metadata = - EncodedMetadata::from_path(metadata_filename, metadata_tmpdir).unwrap_or_else(|err| { - tcx.dcx().emit_fatal(FailedCreateEncodedMetadata { err }); - }); + EncodedMetadata::from_path(metadata_filename, metadata_stub_filename, metadata_tmpdir) + .unwrap_or_else(|err| { + tcx.dcx().emit_fatal(FailedCreateEncodedMetadata { err }); + }); let need_metadata_module = metadata_kind == MetadataKind::Compressed; diff --git a/compiler/rustc_metadata/src/lib.rs b/compiler/rustc_metadata/src/lib.rs index 028d5c8b60996..aa0acb5568e0b 100644 --- a/compiler/rustc_metadata/src/lib.rs +++ b/compiler/rustc_metadata/src/lib.rs @@ -1,6 +1,6 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(coroutines)] @@ -9,7 +9,6 @@ #![feature(file_buffered)] #![feature(if_let_guard)] #![feature(iter_from_coroutine)] -#![feature(let_chains)] #![feature(macro_metavar_expr)] #![feature(min_specialization)] #![feature(never_type)] @@ -23,6 +22,7 @@ extern crate proc_macro; pub use rmeta::provide; mod dependency_format; +mod eii; mod foreign_modules; mod native_libs; mod rmeta; diff --git a/compiler/rustc_metadata/src/locator.rs b/compiler/rustc_metadata/src/locator.rs index d5dd5059aacc6..10123cb9a9ddb 100644 --- a/compiler/rustc_metadata/src/locator.rs +++ b/compiler/rustc_metadata/src/locator.rs @@ -220,7 +220,7 @@ use std::{cmp, fmt}; use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; use rustc_data_structures::memmap::Mmap; -use rustc_data_structures::owned_slice::slice_owned; +use rustc_data_structures::owned_slice::{OwnedSlice, slice_owned}; use rustc_data_structures::svh::Svh; use rustc_errors::{DiagArgValue, IntoDiagArg}; use rustc_fs_util::try_canonicalize; @@ -231,6 +231,7 @@ use rustc_session::search_paths::PathKind; use rustc_session::utils::CanonicalizedPath; use rustc_span::{Span, Symbol}; use rustc_target::spec::{Target, TargetTuple}; +use tempfile::Builder as TempFileBuilder; use tracing::{debug, info}; use crate::creader::{Library, MetadataLoader}; @@ -277,6 +278,7 @@ pub(crate) enum CrateFlavor { Rlib, Rmeta, Dylib, + SDylib, } impl fmt::Display for CrateFlavor { @@ -285,6 +287,7 @@ impl fmt::Display for CrateFlavor { CrateFlavor::Rlib => "rlib", CrateFlavor::Rmeta => "rmeta", CrateFlavor::Dylib => "dylib", + CrateFlavor::SDylib => "sdylib", }) } } @@ -295,6 +298,7 @@ impl IntoDiagArg for CrateFlavor { CrateFlavor::Rlib => DiagArgValue::Str(Cow::Borrowed("rlib")), CrateFlavor::Rmeta => DiagArgValue::Str(Cow::Borrowed("rmeta")), CrateFlavor::Dylib => DiagArgValue::Str(Cow::Borrowed("dylib")), + CrateFlavor::SDylib => DiagArgValue::Str(Cow::Borrowed("sdylib")), } } } @@ -379,14 +383,18 @@ impl<'a> CrateLocator<'a> { &format!("{}{}{}", self.target.dll_prefix, self.crate_name, extra_prefix); let staticlib_prefix = &format!("{}{}{}", self.target.staticlib_prefix, self.crate_name, extra_prefix); + let interface_prefix = rmeta_prefix; let rmeta_suffix = ".rmeta"; let rlib_suffix = ".rlib"; let dylib_suffix = &self.target.dll_suffix; let staticlib_suffix = &self.target.staticlib_suffix; + let interface_suffix = ".rs"; - let mut candidates: FxIndexMap<_, (FxIndexMap<_, _>, FxIndexMap<_, _>, FxIndexMap<_, _>)> = - Default::default(); + let mut candidates: FxIndexMap< + _, + (FxIndexMap<_, _>, FxIndexMap<_, _>, FxIndexMap<_, _>, FxIndexMap<_, _>), + > = Default::default(); // First, find all possible candidate rlibs and dylibs purely based on // the name of the files themselves. We're trying to match against an @@ -417,6 +425,7 @@ impl<'a> CrateLocator<'a> { (rlib_prefix.as_str(), rlib_suffix, CrateFlavor::Rlib), (rmeta_prefix.as_str(), rmeta_suffix, CrateFlavor::Rmeta), (dylib_prefix, dylib_suffix, CrateFlavor::Dylib), + (interface_prefix, interface_suffix, CrateFlavor::SDylib), ] { if prefix == staticlib_prefix && suffix == staticlib_suffix { should_check_staticlibs = false; @@ -425,18 +434,28 @@ impl<'a> CrateLocator<'a> { for (hash, spf) in matches { info!("lib candidate: {}", spf.path.display()); - let (rlibs, rmetas, dylibs) = + let (rlibs, rmetas, dylibs, interfaces) = candidates.entry(hash.to_string()).or_default(); - let path = - try_canonicalize(&spf.path).unwrap_or_else(|_| spf.path.to_path_buf()); - if seen_paths.contains(&path) { - continue; - }; - seen_paths.insert(path.clone()); + { + // As a perforamnce optimisation we canonicalize the path and skip + // ones we've already seeen. This allows us to ignore crates + // we know are exactual equal to ones we've already found. + // Going to the same crate through different symlinks does not change the result. + let path = try_canonicalize(&spf.path) + .unwrap_or_else(|_| spf.path.to_path_buf()); + if seen_paths.contains(&path) { + continue; + }; + seen_paths.insert(path); + } + // Use the original path (potentially with unresolved symlinks), + // filesystem code should not care, but this is nicer for diagnostics. + let path = spf.path.to_path_buf(); match kind { CrateFlavor::Rlib => rlibs.insert(path, search_path.kind), CrateFlavor::Rmeta => rmetas.insert(path, search_path.kind), CrateFlavor::Dylib => dylibs.insert(path, search_path.kind), + CrateFlavor::SDylib => interfaces.insert(path, search_path.kind), }; } } @@ -463,8 +482,8 @@ impl<'a> CrateLocator<'a> { // libraries corresponds to the crate id and hash criteria that this // search is being performed for. let mut libraries = FxIndexMap::default(); - for (_hash, (rlibs, rmetas, dylibs)) in candidates { - if let Some((svh, lib)) = self.extract_lib(rlibs, rmetas, dylibs)? { + for (_hash, (rlibs, rmetas, dylibs, interfaces)) in candidates { + if let Some((svh, lib)) = self.extract_lib(rlibs, rmetas, dylibs, interfaces)? { libraries.insert(svh, lib); } } @@ -499,6 +518,7 @@ impl<'a> CrateLocator<'a> { rlibs: FxIndexMap, rmetas: FxIndexMap, dylibs: FxIndexMap, + interfaces: FxIndexMap, ) -> Result, CrateError> { let mut slot = None; // Order here matters, rmeta should come first. @@ -506,12 +526,17 @@ impl<'a> CrateLocator<'a> { // Make sure there's at most one rlib and at most one dylib. // // See comment in `extract_one` below. - let source = CrateSource { - rmeta: self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot)?, - rlib: self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot)?, - dylib: self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot)?, - }; - Ok(slot.map(|(svh, metadata, _)| (svh, Library { source, metadata }))) + let rmeta = self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot)?; + let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot)?; + let sdylib_interface = self.extract_one(interfaces, CrateFlavor::SDylib, &mut slot)?; + let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot)?; + + if sdylib_interface.is_some() && dylib.is_none() { + return Err(CrateError::FullMetadataNotFound(self.crate_name, CrateFlavor::SDylib)); + } + + let source = CrateSource { rmeta, rlib, dylib, sdylib_interface }; + Ok(slot.map(|(svh, metadata, _, _)| (svh, Library { source, metadata }))) } fn needs_crate_flavor(&self, flavor: CrateFlavor) -> bool { @@ -541,7 +566,7 @@ impl<'a> CrateLocator<'a> { &mut self, m: FxIndexMap, flavor: CrateFlavor, - slot: &mut Option<(Svh, MetadataBlob, PathBuf)>, + slot: &mut Option<(Svh, MetadataBlob, PathBuf, CrateFlavor)>, ) -> Result, CrateError> { // If we are producing an rlib, and we've already loaded metadata, then // we should not attempt to discover further crate sources (unless we're @@ -577,6 +602,7 @@ impl<'a> CrateLocator<'a> { &lib, self.metadata_loader, self.cfg_version, + Some(self.crate_name), ) { Ok(blob) => { if let Some(h) = self.crate_matches(&blob, &lib) { @@ -601,6 +627,11 @@ impl<'a> CrateLocator<'a> { } Err(MetadataError::LoadFailure(err)) => { info!("no metadata found: {}", err); + // Metadata was loaded from interface file earlier. + if let Some((.., CrateFlavor::SDylib)) = slot { + ret = Some((lib, kind)); + continue; + } // The file was present and created by the same compiler version, but we // couldn't load it for some reason. Give a hard error instead of silently // ignoring it, but only if we would have given an error anyway. @@ -654,7 +685,24 @@ impl<'a> CrateLocator<'a> { continue; } } - *slot = Some((hash, metadata, lib.clone())); + + // We error eagerly here. If we're locating a rlib, then in theory the full metadata + // could still be in a (later resolved) dylib. In practice, if the rlib and dylib + // were produced in a way where one has full metadata and the other hasn't, it would + // mean that they were compiled using different compiler flags and probably also have + // a different SVH value. + if metadata.get_header().is_stub { + // `is_stub` should never be true for .rmeta files. + assert_ne!(flavor, CrateFlavor::Rmeta); + + // Because rmeta files are resolved before rlib/dylib files, if this is a stub and + // we haven't found a slot already, it means that the full metadata is missing. + if slot.is_none() { + return Err(CrateError::FullMetadataNotFound(self.crate_name, flavor)); + } + } else { + *slot = Some((hash, metadata, lib.clone(), flavor)); + } ret = Some((lib, kind)); } @@ -710,6 +758,7 @@ impl<'a> CrateLocator<'a> { let mut rlibs = FxIndexMap::default(); let mut rmetas = FxIndexMap::default(); let mut dylibs = FxIndexMap::default(); + let mut sdylib_interfaces = FxIndexMap::default(); for loc in &self.exact_paths { let loc_canon = loc.canonicalized(); let loc_orig = loc.original(); @@ -728,41 +777,33 @@ impl<'a> CrateLocator<'a> { let Some(file) = loc_orig.file_name().and_then(|s| s.to_str()) else { return Err(CrateError::ExternLocationNotFile(self.crate_name, loc_orig.clone())); }; - // FnMut cannot return reference to captured value, so references - // must be taken outside the closure. - let rlibs = &mut rlibs; - let rmetas = &mut rmetas; - let dylibs = &mut dylibs; - let type_via_filename = (|| { - if file.starts_with("lib") { - if file.ends_with(".rlib") { - return Some(rlibs); - } - if file.ends_with(".rmeta") { - return Some(rmetas); - } - } - let dll_prefix = self.target.dll_prefix.as_ref(); - let dll_suffix = self.target.dll_suffix.as_ref(); - if file.starts_with(dll_prefix) && file.ends_with(dll_suffix) { - return Some(dylibs); + if file.starts_with("lib") { + if file.ends_with(".rlib") { + rlibs.insert(loc_canon.clone(), PathKind::ExternFlag); + continue; } - None - })(); - match type_via_filename { - Some(type_via_filename) => { - type_via_filename.insert(loc_canon.clone(), PathKind::ExternFlag); + if file.ends_with(".rmeta") { + rmetas.insert(loc_canon.clone(), PathKind::ExternFlag); + continue; } - None => { - self.crate_rejections - .via_filename - .push(CrateMismatch { path: loc_orig.clone(), got: String::new() }); + if file.ends_with(".rs") { + sdylib_interfaces.insert(loc_canon.clone(), PathKind::ExternFlag); } } + let dll_prefix = self.target.dll_prefix.as_ref(); + let dll_suffix = self.target.dll_suffix.as_ref(); + if file.starts_with(dll_prefix) && file.ends_with(dll_suffix) { + dylibs.insert(loc_canon.clone(), PathKind::ExternFlag); + continue; + } + self.crate_rejections + .via_filename + .push(CrateMismatch { path: loc_orig.clone(), got: String::new() }); } // Extract the dylib/rlib/rmeta triple. - self.extract_lib(rlibs, rmetas, dylibs).map(|opt| opt.map(|(_, lib)| lib)) + self.extract_lib(rlibs, rmetas, dylibs, sdylib_interfaces) + .map(|opt| opt.map(|(_, lib)| lib)) } pub(crate) fn into_error(self, dep_root: Option) -> CrateError { @@ -783,6 +824,7 @@ fn get_metadata_section<'p>( filename: &'p Path, loader: &dyn MetadataLoader, cfg_version: &'static str, + crate_name: Option, ) -> Result> { if !filename.exists() { return Err(MetadataError::NotPresent(filename)); @@ -791,6 +833,55 @@ fn get_metadata_section<'p>( CrateFlavor::Rlib => { loader.get_rlib_metadata(target, filename).map_err(MetadataError::LoadFailure)? } + CrateFlavor::SDylib => { + let compiler = std::env::current_exe().map_err(|_err| { + MetadataError::LoadFailure( + "couldn't obtain current compiler binary when loading sdylib interface" + .to_string(), + ) + })?; + + let tmp_path = match TempFileBuilder::new().prefix("rustc").tempdir() { + Ok(tmp_path) => tmp_path, + Err(error) => { + return Err(MetadataError::LoadFailure(format!( + "couldn't create a temp dir: {}", + error + ))); + } + }; + + let crate_name = crate_name.unwrap(); + debug!("compiling {}", filename.display()); + // FIXME: This will need to be done either within the current compiler session or + // as a separate compiler session in the same process. + let res = std::process::Command::new(compiler) + .arg(&filename) + .arg("--emit=metadata") + .arg(format!("--crate-name={}", crate_name)) + .arg(format!("--out-dir={}", tmp_path.path().display())) + .arg("-Zbuild-sdylib-interface") + .output() + .map_err(|err| { + MetadataError::LoadFailure(format!("couldn't compile interface: {}", err)) + })?; + + if !res.status.success() { + return Err(MetadataError::LoadFailure(format!( + "couldn't compile interface: {}", + std::str::from_utf8(&res.stderr).unwrap_or_default() + ))); + } + + // Load interface metadata instead of crate metadata. + let interface_metadata_name = format!("lib{}.rmeta", crate_name); + let rmeta_file = tmp_path.path().join(interface_metadata_name); + debug!("loading interface metadata from {}", rmeta_file.display()); + let rmeta = get_rmeta_metadata_section(&rmeta_file)?; + let _ = std::fs::remove_file(rmeta_file); + + rmeta + } CrateFlavor::Dylib => { let buf = loader.get_dylib_metadata(target, filename).map_err(MetadataError::LoadFailure)?; @@ -820,24 +911,7 @@ fn get_metadata_section<'p>( // Header is okay -> inflate the actual metadata buf.slice(|buf| &buf[data_start..(data_start + metadata_len)]) } - CrateFlavor::Rmeta => { - // mmap the file, because only a small fraction of it is read. - let file = std::fs::File::open(filename).map_err(|_| { - MetadataError::LoadFailure(format!( - "failed to open rmeta metadata: '{}'", - filename.display() - )) - })?; - let mmap = unsafe { Mmap::map(file) }; - let mmap = mmap.map_err(|_| { - MetadataError::LoadFailure(format!( - "failed to mmap rmeta metadata: '{}'", - filename.display() - )) - })?; - - slice_owned(mmap, Deref::deref) - } + CrateFlavor::Rmeta => get_rmeta_metadata_section(filename)?, }; let Ok(blob) = MetadataBlob::new(raw_bytes) else { return Err(MetadataError::LoadFailure(format!( @@ -863,6 +937,25 @@ fn get_metadata_section<'p>( } } +fn get_rmeta_metadata_section<'a, 'p>(filename: &'p Path) -> Result> { + // mmap the file, because only a small fraction of it is read. + let file = std::fs::File::open(filename).map_err(|_| { + MetadataError::LoadFailure(format!( + "failed to open rmeta metadata: '{}'", + filename.display() + )) + })?; + let mmap = unsafe { Mmap::map(file) }; + let mmap = mmap.map_err(|_| { + MetadataError::LoadFailure(format!( + "failed to mmap rmeta metadata: '{}'", + filename.display() + )) + })?; + + Ok(slice_owned(mmap, Deref::deref)) +} + /// A diagnostic function for dumping crate metadata to an output stream. pub fn list_file_metadata( target: &Target, @@ -873,7 +966,7 @@ pub fn list_file_metadata( cfg_version: &'static str, ) -> IoResult<()> { let flavor = get_flavor_from_path(path); - match get_metadata_section(target, flavor, path, metadata_loader, cfg_version) { + match get_metadata_section(target, flavor, path, metadata_loader, cfg_version, None) { Ok(metadata) => metadata.list_crate_metadata(out, ls_kinds), Err(msg) => write!(out, "{msg}\n"), } @@ -928,6 +1021,7 @@ pub(crate) enum CrateError { ExternLocationNotExist(Symbol, PathBuf), ExternLocationNotFile(Symbol, PathBuf), MultipleCandidates(Symbol, CrateFlavor, Vec), + FullMetadataNotFound(Symbol, CrateFlavor), SymbolConflictsCurrent(Symbol), StableCrateIdCollision(Symbol, Symbol), DlOpen(String, String), @@ -978,6 +1072,9 @@ impl CrateError { CrateError::MultipleCandidates(crate_name, flavor, candidates) => { dcx.emit_err(errors::MultipleCandidates { span, crate_name, flavor, candidates }); } + CrateError::FullMetadataNotFound(crate_name, flavor) => { + dcx.emit_err(errors::FullMetadataNotFound { span, crate_name, flavor }); + } CrateError::SymbolConflictsCurrent(root_name) => { dcx.emit_err(errors::SymbolConflictsCurrent { span, crate_name: root_name }); } diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs index f63ae8079dcd4..cee9cff077503 100644 --- a/compiler/rustc_metadata/src/native_libs.rs +++ b/compiler/rustc_metadata/src/native_libs.rs @@ -73,7 +73,7 @@ pub fn walk_native_lib_search_dirs( || sess.target.os == "linux" || sess.target.os == "fuchsia" || sess.target.is_like_aix - || sess.target.is_like_osx && !sess.opts.unstable_opts.sanitizer.is_empty() + || sess.target.is_like_darwin && !sess.opts.unstable_opts.sanitizer.is_empty() { f(&sess.target_tlib_path.dir, false)?; } @@ -207,7 +207,7 @@ impl<'tcx> Collector<'tcx> { let sess = self.tcx.sess; - if matches!(abi, ExternAbi::Rust | ExternAbi::RustIntrinsic) { + if matches!(abi, ExternAbi::Rust) { return; } @@ -226,8 +226,8 @@ impl<'tcx> Collector<'tcx> { let mut wasm_import_module = None; let mut import_name_type = None; for item in items.iter() { - match item.name_or_empty() { - sym::name => { + match item.name() { + Some(sym::name) => { if name.is_some() { sess.dcx().emit_err(errors::MultipleNamesInLink { span: item.span() }); continue; @@ -242,7 +242,7 @@ impl<'tcx> Collector<'tcx> { } name = Some((link_name, span)); } - sym::kind => { + Some(sym::kind) => { if kind.is_some() { sess.dcx().emit_err(errors::MultipleKindsInLink { span: item.span() }); continue; @@ -257,7 +257,7 @@ impl<'tcx> Collector<'tcx> { "static" => NativeLibKind::Static { bundle: None, whole_archive: None }, "dylib" => NativeLibKind::Dylib { as_needed: None }, "framework" => { - if !sess.target.is_like_osx { + if !sess.target.is_like_darwin { sess.dcx().emit_err(errors::LinkFrameworkApple { span }); } NativeLibKind::Framework { as_needed: None } @@ -304,7 +304,7 @@ impl<'tcx> Collector<'tcx> { }; kind = Some(link_kind); } - sym::modifiers => { + Some(sym::modifiers) => { if modifiers.is_some() { sess.dcx() .emit_err(errors::MultipleLinkModifiers { span: item.span() }); @@ -316,7 +316,7 @@ impl<'tcx> Collector<'tcx> { }; modifiers = Some((link_modifiers, item.name_value_literal_span().unwrap())); } - sym::cfg => { + Some(sym::cfg) => { if cfg.is_some() { sess.dcx().emit_err(errors::MultipleCfgs { span: item.span() }); continue; @@ -346,7 +346,7 @@ impl<'tcx> Collector<'tcx> { } cfg = Some(link_cfg.clone()); } - sym::wasm_import_module => { + Some(sym::wasm_import_module) => { if wasm_import_module.is_some() { sess.dcx().emit_err(errors::MultipleWasmImport { span: item.span() }); continue; @@ -357,7 +357,7 @@ impl<'tcx> Collector<'tcx> { }; wasm_import_module = Some((link_wasm_import_module, item.span())); } - sym::import_name_type => { + Some(sym::import_name_type) => { if import_name_type.is_some() { sess.dcx() .emit_err(errors::MultipleImportNameType { span: item.span() }); @@ -531,7 +531,7 @@ impl<'tcx> Collector<'tcx> { let mut renames = FxHashSet::default(); for lib in &self.tcx.sess.opts.libs { if let NativeLibKind::Framework { .. } = lib.kind - && !self.tcx.sess.target.is_like_osx + && !self.tcx.sess.target.is_like_darwin { // Cannot check this when parsing options because the target is not yet available. self.tcx.dcx().emit_err(errors::LibFrameworkApple); diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 4610a571da088..36ad4fea3a7a2 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -562,9 +562,9 @@ impl<'a, 'tcx> SpanDecoder for DecodeContext<'a, 'tcx> { Symbol::intern(s) }) } - SYMBOL_PREINTERNED => { + SYMBOL_PREDEFINED => { let symbol_index = self.read_u32(); - Symbol::new_from_decoded(symbol_index) + Symbol::new(symbol_index) } _ => unreachable!(), } @@ -782,10 +782,9 @@ impl MetadataBlob { )?; writeln!( out, - "has_global_allocator {} has_alloc_error_handler {} has_panic_handler {} has_default_lib_allocator {}", + "has_global_allocator {} has_alloc_error_handler {} has_default_lib_allocator {}", root.has_global_allocator, root.has_alloc_error_handler, - root.has_panic_handler, root.has_default_lib_allocator )?; writeln!( @@ -1313,7 +1312,7 @@ impl<'a> CrateMetadataRef<'a> { fn get_fn_has_self_parameter(self, id: DefIndex, sess: &'a Session) -> bool { self.root .tables - .fn_arg_names + .fn_arg_idents .get(self, id) .expect("argument names not encoded for a function") .decode((self, sess)) @@ -1332,29 +1331,30 @@ impl<'a> CrateMetadataRef<'a> { } fn get_associated_item(self, id: DefIndex, sess: &'a Session) -> ty::AssocItem { - let name = if self.root.tables.opt_rpitit_info.get(self, id).is_some() { - kw::Empty - } else { - self.item_name(id) - }; - let (kind, has_self) = match self.def_kind(id) { - DefKind::AssocConst => (ty::AssocKind::Const, false), - DefKind::AssocFn => (ty::AssocKind::Fn, self.get_fn_has_self_parameter(id, sess)), - DefKind::AssocTy => (ty::AssocKind::Type, false), + let kind = match self.def_kind(id) { + DefKind::AssocConst => ty::AssocKind::Const { name: self.item_name(id) }, + DefKind::AssocFn => ty::AssocKind::Fn { + name: self.item_name(id), + has_self: self.get_fn_has_self_parameter(id, sess), + }, + DefKind::AssocTy => { + let data = if let Some(rpitit_info) = self.root.tables.opt_rpitit_info.get(self, id) + { + ty::AssocTypeData::Rpitit(rpitit_info.decode(self)) + } else { + ty::AssocTypeData::Normal(self.item_name(id)) + }; + ty::AssocKind::Type { data } + } _ => bug!("cannot get associated-item of `{:?}`", self.def_key(id)), }; let container = self.root.tables.assoc_container.get(self, id).unwrap(); - let opt_rpitit_info = - self.root.tables.opt_rpitit_info.get(self, id).map(|d| d.decode(self)); ty::AssocItem { - name, kind, def_id: self.local_def_id(id), trait_item_def_id: self.get_trait_item_def_id(id), container, - fn_has_self_parameter: has_self, - opt_rpitit_info, } } @@ -1472,6 +1472,13 @@ impl<'a> CrateMetadataRef<'a> { self.root.foreign_modules.decode((self, sess)) } + fn get_externally_implementable_items( + self, + sess: &'a Session, + ) -> impl Iterator))> { + self.root.externally_implementable_items.decode((self, sess)) + } + fn get_dylib_dependency_formats<'tcx>( self, tcx: TyCtxt<'tcx>, @@ -1488,6 +1495,17 @@ impl<'a> CrateMetadataRef<'a> { tcx.arena.alloc_from_iter(self.root.lang_items_missing.decode(self)) } + fn get_exportable_items(self) -> impl Iterator { + self.root.exportable_items.decode(self).map(move |index| self.local_def_id(index)) + } + + fn get_stable_order_of_exportable_impls(self) -> impl Iterator { + self.root + .stable_order_of_exportable_impls + .decode(self) + .map(move |v| (self.local_def_id(v.0), v.1)) + } + fn exported_symbols<'tcx>( self, tcx: TyCtxt<'tcx>, @@ -1501,7 +1519,7 @@ impl<'a> CrateMetadataRef<'a> { let macro_rules = self.root.tables.is_macro_rules.get(self, id); let body = self.root.tables.macro_definition.get(self, id).unwrap().decode((self, sess)); - ast::MacroDef { macro_rules, body: ast::ptr::P(body) } + ast::MacroDef { macro_rules, body: ast::ptr::P(body), eii_macro_for: None } } _ => bug!(), } diff --git a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs index 776b081a4630f..79e18fe790ffd 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs @@ -24,7 +24,7 @@ use super::{Decodable, DecodeContext, DecodeIterator}; use crate::creader::{CStore, LoadedMacro}; use crate::rmeta::AttrFlags; use crate::rmeta::table::IsDefault; -use crate::{foreign_modules, native_libs}; +use crate::{eii, foreign_modules, native_libs}; trait ProcessQueryValue<'tcx, T> { fn process_decoded(self, _tcx: TyCtxt<'tcx>, _err: impl Fn() -> !) -> T; @@ -286,7 +286,7 @@ provide! { tcx, def_id, other, cdata, rendered_const => { table } rendered_precise_capturing_args => { table } asyncness => { table_direct } - fn_arg_names => { table } + fn_arg_idents => { table } coroutine_kind => { table_direct } coroutine_for_closure => { table } coroutine_by_move_body_def_id => { table } @@ -330,14 +330,8 @@ provide! { tcx, def_id, other, cdata, visibility => { cdata.get_visibility(def_id.index) } adt_def => { cdata.get_adt_def(def_id.index, tcx) } - adt_destructor => { - let _ = cdata; - tcx.calculate_dtor(def_id, |_,_| Ok(())) - } - adt_async_destructor => { - let _ = cdata; - tcx.calculate_async_dtor(def_id, |_,_| Ok(())) - } + adt_destructor => { table } + adt_async_destructor => { table } associated_item_def_ids => { tcx.arena.alloc_from_iter(cdata.get_associated_item_or_field_def_ids(def_id.index)) } @@ -354,7 +348,6 @@ provide! { tcx, def_id, other, cdata, is_compiler_builtins => { cdata.root.compiler_builtins } has_global_allocator => { cdata.root.has_global_allocator } has_alloc_error_handler => { cdata.root.has_alloc_error_handler } - has_panic_handler => { cdata.root.has_panic_handler } is_profiler_runtime => { cdata.root.profiler_runtime } required_panic_strategy => { cdata.root.required_panic_strategy } panic_in_drop_strategy => { cdata.root.panic_in_drop_strategy } @@ -379,6 +372,13 @@ provide! { tcx, def_id, other, cdata, } native_libraries => { cdata.get_native_libraries(tcx.sess).collect() } foreign_modules => { cdata.get_foreign_modules(tcx.sess).map(|m| (m.def_id, m)).collect() } + externally_implementable_items => { + cdata.get_externally_implementable_items(tcx.sess) + .map(|(decl_did, (decl, impls))| ( + decl_did, + (decl, impls.into_iter().collect()) + )).collect() + } crate_hash => { cdata.root.header.hash } crate_host_hash => { cdata.host_hash } crate_name => { cdata.root.header.name } @@ -412,6 +412,8 @@ provide! { tcx, def_id, other, cdata, used_crate_source => { Arc::clone(&cdata.source) } debugger_visualizers => { cdata.get_debugger_visualizers() } + exportable_items => { tcx.arena.alloc_from_iter(cdata.get_exportable_items()) } + stable_order_of_exportable_impls => { tcx.arena.alloc(cdata.get_stable_order_of_exportable_impls().collect()) } exported_symbols => { let syms = cdata.exported_symbols(tcx); @@ -454,6 +456,7 @@ pub(in crate::rmeta) fn provide(providers: &mut Providers) { }, native_libraries: native_libs::collect, foreign_modules: foreign_modules::collect, + externally_implementable_items: eii::collect, // Returns a map from a sufficiently visible external item (i.e., an // external item that is visible from at least one local module) to a diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 7ab3d432bdf81..f2aa9cd1a8940 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -201,9 +201,9 @@ impl<'a, 'tcx> SpanEncoder for EncodeContext<'a, 'tcx> { } fn encode_symbol(&mut self, symbol: Symbol) { - // if symbol preinterned, emit tag and symbol index - if symbol.is_preinterned() { - self.opaque.emit_u8(SYMBOL_PREINTERNED); + // if symbol predefined, emit tag and symbol index + if symbol.is_predefined() { + self.opaque.emit_u8(SYMBOL_PREDEFINED); self.opaque.emit_u32(symbol.as_u32()); } else { // otherwise write it as string or as offset to it @@ -551,8 +551,6 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { match source_file.name { FileName::Real(ref original_file_name) => { - // FIXME: This should probably to conditionally remapped under - // a RemapPathScopeComponents but which one? let adapted_file_name = source_map .path_mapping() .to_embeddable_absolute_path(original_file_name.clone(), working_directory); @@ -606,6 +604,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { // We have already encoded some things. Get their combined size from the current position. stats.push(("preamble", self.position())); + let externally_implementable_items = stat!("externally-implementable-items", || self + .encode_externally_implementable_items()); + let (crate_deps, dylib_dependency_formats) = stat!("dep", || (self.encode_crate_deps(), self.encode_dylib_dependency_formats())); @@ -673,6 +674,11 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let debugger_visualizers = stat!("debugger-visualizers", || self.encode_debugger_visualizers()); + let exportable_items = stat!("exportable-items", || self.encode_exportable_items()); + + let stable_order_of_exportable_impls = + stat!("exportable-items", || self.encode_stable_order_of_exportable_impls()); + // Encode exported symbols info. This is prefetched in `encode_metadata`. let exported_symbols = stat!("exported-symbols", || { self.encode_exported_symbols(tcx.exported_symbols(LOCAL_CRATE)) @@ -701,6 +707,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { triple: tcx.sess.opts.target_triple.clone(), hash: tcx.crate_hash(LOCAL_CRATE), is_proc_macro_crate: proc_macro_data.is_some(), + is_stub: false, }, extra_filename: tcx.sess.opts.cg.extra_filename.clone(), stable_crate_id: tcx.def_path_hash(LOCAL_CRATE.as_def_id()).stable_crate_id(), @@ -709,7 +716,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { edition: tcx.sess.edition(), has_global_allocator: tcx.has_global_allocator(LOCAL_CRATE), has_alloc_error_handler: tcx.has_alloc_error_handler(LOCAL_CRATE), - has_panic_handler: tcx.has_panic_handler(LOCAL_CRATE), + externally_implementable_items, + has_default_lib_allocator: ast::attr::contains_name( attrs, sym::default_lib_allocator, @@ -739,6 +747,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { traits, impls, incoherent_impls, + exportable_items, + stable_order_of_exportable_impls, exported_symbols, interpret_alloc_index, tables, @@ -820,7 +830,9 @@ struct AnalyzeAttrState<'a> { #[inline] fn analyze_attr(attr: &impl AttributeExt, state: &mut AnalyzeAttrState<'_>) -> bool { let mut should_encode = false; - if !rustc_feature::encode_cross_crate(attr.name_or_empty()) { + if let Some(name) = attr.name() + && !rustc_feature::encode_cross_crate(name) + { // Attributes not marked encode-cross-crate don't need to be encoded for downstream crates. } else if attr.doc_str().is_some() { // We keep all doc comments reachable to rustdoc because they might be "imported" into @@ -1098,7 +1110,6 @@ fn should_encode_variances<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, def_kind: Def DefKind::Struct | DefKind::Union | DefKind::Enum - | DefKind::Variant | DefKind::OpaqueTy | DefKind::Fn | DefKind::Ctor(..) @@ -1108,6 +1119,7 @@ fn should_encode_variances<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, def_kind: Def matches!(tcx.opt_rpitit_info(def_id), Some(ty::ImplTraitInTraitData::Trait { .. })) } DefKind::Mod + | DefKind::Variant | DefKind::Field | DefKind::AssocConst | DefKind::TyParam @@ -1337,7 +1349,7 @@ fn should_encode_const(def_kind: DefKind) -> bool { fn should_encode_fn_impl_trait_in_trait<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool { if let Some(assoc_item) = tcx.opt_associated_item(def_id) && assoc_item.container == ty::AssocItemContainer::Trait - && assoc_item.kind == ty::AssocKind::Fn + && assoc_item.is_fn() { true } else { @@ -1468,7 +1480,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } if let DefKind::Fn | DefKind::AssocFn = def_kind { self.tables.asyncness.set_some(def_id.index, tcx.asyncness(def_id)); - record_array!(self.tables.fn_arg_names[def_id] <- tcx.fn_arg_names(def_id)); + record_array!(self.tables.fn_arg_idents[def_id] <- tcx.fn_arg_idents(def_id)); } if let Some(name) = tcx.intrinsic(def_id) { record!(self.tables.intrinsic[def_id] <- name); @@ -1633,6 +1645,14 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { record!(self.tables.fn_sig[variant.def_id] <- fn_sig); } } + + if let Some(destructor) = tcx.adt_destructor(local_def_id) { + record!(self.tables.adt_destructor[def_id] <- destructor); + } + + if let Some(destructor) = tcx.adt_async_destructor(local_def_id) { + record!(self.tables.adt_async_destructor[def_id] <- destructor); + } } #[instrument(level = "debug", skip(self))] @@ -1682,7 +1702,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { match item.container { AssocItemContainer::Trait => { - if let ty::AssocKind::Type = item.kind { + if item.is_type() { self.encode_explicit_item_bounds(def_id); self.encode_explicit_item_self_bounds(def_id); if tcx.is_conditionally_const(def_id) { @@ -1697,7 +1717,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } } } - if let Some(rpitit_info) = item.opt_rpitit_info { + if let ty::AssocKind::Type { data: ty::AssocTypeData::Rpitit(rpitit_info) } = item.kind { record!(self.tables.opt_rpitit_info[def_id] <- rpitit_info); if matches!(rpitit_info, ty::ImplTraitInTraitData::Trait { .. }) { record_array!( @@ -1835,7 +1855,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { fn encode_info_for_macro(&mut self, def_id: LocalDefId) { let tcx = self.tcx; - let (_, macro_def, _) = tcx.hir_expect_item(def_id).expect_macro(); + let hir::ItemKind::Macro(_, macro_def, _) = tcx.hir_expect_item(def_id).kind else { + bug!() + }; self.tables.is_macro_rules.set(def_id.local_def_index, macro_def.macro_rules); record!(self.tables.macro_definition[def_id.to_def_id()] <- &*macro_def.body); } @@ -1852,6 +1874,17 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.lazy_array(foreign_modules.iter().map(|(_, m)| m).cloned()) } + fn encode_externally_implementable_items( + &mut self, + ) -> LazyArray<(DefId, (EIIDecl, Vec<(DefId, EIIImpl)>))> { + empty_proc_macro!(self); + let externally_implementable_items = self.tcx.externally_implementable_items(LOCAL_CRATE); + + self.lazy_array(externally_implementable_items.iter().map(|(decl_did, (decl, impls))| { + (*decl_did, (decl.clone(), impls.iter().map(|(impl_did, i)| (*impl_did, *i)).collect())) + })) + } + fn encode_hygiene(&mut self) -> (SyntaxContextTable, ExpnDataTable, ExpnHashTable) { let mut syntax_contexts: TableBuilder<_, _> = Default::default(); let mut expn_data_table: TableBuilder<_, _> = Default::default(); @@ -1881,8 +1914,6 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let is_proc_macro = self.tcx.crate_types().contains(&CrateType::ProcMacro); if is_proc_macro { let tcx = self.tcx; - let hir = tcx.hir(); - let proc_macro_decls_static = tcx.proc_macro_decls_static(()).unwrap().local_def_index; let stability = tcx.lookup_stability(CRATE_DEF_ID); let macros = @@ -1915,7 +1946,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let id = proc_macro; let proc_macro = tcx.local_def_id_to_hir_id(proc_macro); let mut name = tcx.hir_name(proc_macro); - let span = hir.span(proc_macro); + let span = tcx.hir_span(proc_macro); // Proc-macros may have attributes like `#[allow_internal_unstable]`, // so downstream crates need access to them. let attrs = tcx.hir_attrs(proc_macro); @@ -2140,6 +2171,20 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.lazy_array(&all_impls) } + fn encode_exportable_items(&mut self) -> LazyArray { + empty_proc_macro!(self); + self.lazy_array(self.tcx.exportable_items(LOCAL_CRATE).iter().map(|def_id| def_id.index)) + } + + fn encode_stable_order_of_exportable_impls(&mut self) -> LazyArray<(DefIndex, usize)> { + empty_proc_macro!(self); + let stable_order_of_exportable_impls = + self.tcx.stable_order_of_exportable_impls(LOCAL_CRATE); + self.lazy_array( + stable_order_of_exportable_impls.iter().map(|(def_id, idx)| (def_id.index, *idx)), + ) + } + // Encodes all symbols exported from this crate into the metadata. // // This pass is seeded off the reachability list calculated in the @@ -2192,7 +2237,7 @@ fn prefetch_mir(tcx: TyCtxt<'_>) { } let reachable_set = tcx.reachable_set(()); - par_for_each_in(tcx.mir_keys(()), |&def_id| { + par_for_each_in(tcx.mir_keys(()), |&&def_id| { let (encode_const, encode_opt) = should_encode_mir(tcx, reachable_set, def_id); if encode_const { @@ -2231,8 +2276,12 @@ fn prefetch_mir(tcx: TyCtxt<'_>) { // generated regardless of trailing bytes that end up in it. pub struct EncodedMetadata { - // The declaration order matters because `mmap` should be dropped before `_temp_dir`. - mmap: Option, + // The declaration order matters because `full_metadata` should be dropped + // before `_temp_dir`. + full_metadata: Option, + // This is an optional stub metadata containing only the crate header. + // The header should be very small, so we load it directly into memory. + stub_metadata: Option>, // We need to carry MaybeTempDir to avoid deleting the temporary // directory while accessing the Mmap. _temp_dir: Option, @@ -2240,33 +2289,50 @@ pub struct EncodedMetadata { impl EncodedMetadata { #[inline] - pub fn from_path(path: PathBuf, temp_dir: Option) -> std::io::Result { + pub fn from_path( + path: PathBuf, + stub_path: Option, + temp_dir: Option, + ) -> std::io::Result { let file = std::fs::File::open(&path)?; let file_metadata = file.metadata()?; if file_metadata.len() == 0 { - return Ok(Self { mmap: None, _temp_dir: None }); + return Ok(Self { full_metadata: None, stub_metadata: None, _temp_dir: None }); } - let mmap = unsafe { Some(Mmap::map(file)?) }; - Ok(Self { mmap, _temp_dir: temp_dir }) + let full_mmap = unsafe { Some(Mmap::map(file)?) }; + + let stub = + if let Some(stub_path) = stub_path { Some(std::fs::read(stub_path)?) } else { None }; + + Ok(Self { full_metadata: full_mmap, stub_metadata: stub, _temp_dir: temp_dir }) } #[inline] - pub fn raw_data(&self) -> &[u8] { - self.mmap.as_deref().unwrap_or_default() + pub fn full(&self) -> &[u8] { + &self.full_metadata.as_deref().unwrap_or_default() + } + + #[inline] + pub fn stub_or_full(&self) -> &[u8] { + self.stub_metadata.as_deref().unwrap_or(self.full()) } } impl Encodable for EncodedMetadata { fn encode(&self, s: &mut S) { - let slice = self.raw_data(); + self.stub_metadata.encode(s); + + let slice = self.full(); slice.encode(s) } } impl Decodable for EncodedMetadata { fn decode(d: &mut D) -> Self { + let stub = >>::decode(d); + let len = d.read_usize(); - let mmap = if len > 0 { + let full_metadata = if len > 0 { let mut mmap = MmapMut::map_anon(len).unwrap(); mmap.copy_from_slice(d.read_raw_bytes(len)); Some(mmap.make_read_only().unwrap()) @@ -2274,11 +2340,11 @@ impl Decodable for EncodedMetadata { None }; - Self { mmap, _temp_dir: None } + Self { full_metadata, stub_metadata: stub, _temp_dir: None } } } -pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path) { +pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path, ref_path: Option<&Path>) { let _prof_timer = tcx.prof.verbose_generic_activity("generate_crate_metadata"); // Since encoding metadata is not in a query, and nothing is cached, @@ -2292,6 +2358,42 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path) { join(|| prefetch_mir(tcx), || tcx.exported_symbols(LOCAL_CRATE)); } + with_encode_metadata_header(tcx, path, |ecx| { + // Encode all the entries and extra information in the crate, + // culminating in the `CrateRoot` which points to all of it. + let root = ecx.encode_crate_root(); + + // Flush buffer to ensure backing file has the correct size. + ecx.opaque.flush(); + // Record metadata size for self-profiling + tcx.prof.artifact_size( + "crate_metadata", + "crate_metadata", + ecx.opaque.file().metadata().unwrap().len(), + ); + + root.position.get() + }); + + if let Some(ref_path) = ref_path { + with_encode_metadata_header(tcx, ref_path, |ecx| { + let header: LazyValue = ecx.lazy(CrateHeader { + name: tcx.crate_name(LOCAL_CRATE), + triple: tcx.sess.opts.target_triple.clone(), + hash: tcx.crate_hash(LOCAL_CRATE), + is_proc_macro_crate: false, + is_stub: true, + }); + header.position.get() + }); + } +} + +fn with_encode_metadata_header( + tcx: TyCtxt<'_>, + path: &Path, + f: impl FnOnce(&mut EncodeContext<'_, '_>) -> usize, +) { let mut encoder = opaque::FileEncoder::new(path) .unwrap_or_else(|err| tcx.dcx().emit_fatal(FailCreateFileEncoder { err })); encoder.emit_raw_bytes(METADATA_HEADER); @@ -2326,9 +2428,7 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path) { // Encode the rustc version string in a predictable location. rustc_version(tcx.sess.cfg_version).encode(&mut ecx); - // Encode all the entries and extra information in the crate, - // culminating in the `CrateRoot` which points to all of it. - let root = ecx.encode_crate_root(); + let root_position = f(&mut ecx); // Make sure we report any errors from writing to the file. // If we forget this, compilation can succeed with an incomplete rmeta file, @@ -2338,12 +2438,9 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path) { } let file = ecx.opaque.file(); - if let Err(err) = encode_root_position(file, root.position.get()) { + if let Err(err) = encode_root_position(file, root_position) { tcx.dcx().emit_fatal(FailWriteFile { path: ecx.opaque.path(), err }); } - - // Record metadata size for self-profiling - tcx.prof.artifact_size("crate_metadata", "crate_metadata", file.metadata().unwrap().len()); } fn encode_root_position(mut file: &File, pos: usize) -> Result<(), std::io::Error> { diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index dc453b1e747ca..af2e767de21ef 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -8,6 +8,7 @@ use encoder::EncodeContext; pub use encoder::{EncodedMetadata, encode_metadata, rendered_const}; use rustc_abi::{FieldIdx, ReprOptions, VariantIdx}; use rustc_ast::expand::StrippedCfgItem; +use rustc_attr_parsing::{EIIDecl, EIIImpl}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::svh::Svh; use rustc_hir::PreciseCapturingArgKind; @@ -36,7 +37,7 @@ use rustc_serialize::opaque::FileEncoder; use rustc_session::config::{SymbolManglingVersion, TargetModifier}; use rustc_session::cstore::{CrateDepKind, ForeignModule, LinkagePreference, NativeLib}; use rustc_span::edition::Edition; -use rustc_span::hygiene::{ExpnIndex, MacroKind, SyntaxContextData}; +use rustc_span::hygiene::{ExpnIndex, MacroKind, SyntaxContextKey}; use rustc_span::{self, ExpnData, ExpnHash, ExpnId, Ident, Span, Symbol}; use rustc_target::spec::{PanicStrategy, TargetTuple}; use table::TableBuilder; @@ -56,7 +57,7 @@ pub(crate) fn rustc_version(cfg_version: &'static str) -> String { /// Metadata encoding version. /// N.B., increment this if you change the format of metadata such that /// the rustc version can't be found to compare with `rustc_version()`. -const METADATA_VERSION: u8 = 9; +const METADATA_VERSION: u8 = 10; /// Metadata header which includes `METADATA_VERSION`. /// @@ -193,7 +194,7 @@ enum LazyState { Previous(NonZero), } -type SyntaxContextTable = LazyTable>>; +type SyntaxContextTable = LazyTable>>; type ExpnDataTable = LazyTable>>; type ExpnHashTable = LazyTable>>; @@ -221,6 +222,12 @@ pub(crate) struct CrateHeader { /// This is separate from [`ProcMacroData`] to avoid having to update [`METADATA_VERSION`] every /// time ProcMacroData changes. pub(crate) is_proc_macro_crate: bool, + /// Whether this crate metadata section is just a stub. + /// Stubs do not contain the full metadata (it will be typically stored + /// in a separate rmeta file). + /// + /// This is used inside rlibs and dylibs when using `-Zembed-metadata=no`. + pub(crate) is_stub: bool, } /// Serialized `.rmeta` data for a crate. @@ -250,10 +257,12 @@ pub(crate) struct CrateRoot { required_panic_strategy: Option, panic_in_drop_strategy: PanicStrategy, edition: Edition, + + // FIXME(jdonszelmann): these booleans can be replaced by the entries in `externally_implementable_items` has_global_allocator: bool, has_alloc_error_handler: bool, - has_panic_handler: bool, has_default_lib_allocator: bool, + externally_implementable_items: LazyArray<(DefId, (EIIDecl, Vec<(DefId, EIIImpl)>))>, crate_deps: LazyArray, dylib_dependency_formats: LazyArray>, @@ -274,6 +283,8 @@ pub(crate) struct CrateRoot { tables: LazyTables, debugger_visualizers: LazyArray, + exportable_items: LazyArray, + stable_order_of_exportable_impls: LazyArray<(DefIndex, usize)>, exported_symbols: LazyArray<(ExportedSymbol<'static>, SymbolExportInfo)>, syntax_contexts: SyntaxContextTable, @@ -443,9 +454,11 @@ define_tables! { rendered_const: Table>, rendered_precise_capturing_args: Table>>, asyncness: Table, - fn_arg_names: Table>>, + fn_arg_idents: Table>>, coroutine_kind: Table, coroutine_for_closure: Table, + adt_destructor: Table>, + adt_async_destructor: Table>, coroutine_by_move_body_def_id: Table, eval_static_initializer: Table>>, trait_def: Table>, @@ -576,7 +589,7 @@ impl SpanTag { // Tags for encoding Symbol's const SYMBOL_STR: u8 = 0; const SYMBOL_OFFSET: u8 = 1; -const SYMBOL_PREINTERNED: u8 = 2; +const SYMBOL_PREDEFINED: u8 = 2; pub fn provide(providers: &mut Providers) { encoder::provide(providers); diff --git a/compiler/rustc_middle/messages.ftl b/compiler/rustc_middle/messages.ftl index 0b3c0be1a4e1a..3d27e587b6cb4 100644 --- a/compiler/rustc_middle/messages.ftl +++ b/compiler/rustc_middle/messages.ftl @@ -1,7 +1,11 @@ +middle_assert_async_resume_after_drop = `async fn` resumed after async drop + middle_assert_async_resume_after_panic = `async fn` resumed after panicking middle_assert_async_resume_after_return = `async fn` resumed after completion +middle_assert_coroutine_resume_after_drop = coroutine resumed after async drop + middle_assert_coroutine_resume_after_panic = coroutine resumed after panicking middle_assert_coroutine_resume_after_return = coroutine resumed after completion @@ -9,6 +13,8 @@ middle_assert_coroutine_resume_after_return = coroutine resumed after completion middle_assert_divide_by_zero = attempt to divide `{$val}` by zero +middle_assert_gen_resume_after_drop = `gen` fn or block cannot be further iterated on after it async dropped + middle_assert_gen_resume_after_panic = `gen` fn or block cannot be further iterated on after it panicked middle_assert_misaligned_ptr_deref = diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index aef56ea46e957..3520eb428bc32 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -9,6 +9,7 @@ macro_rules! arena_types { ($macro:path) => ( $macro!([ [] layout: rustc_abi::LayoutData, + [] proxy_coroutine_layout: rustc_middle::mir::CoroutineLayout<'tcx>, [] fn_abi: rustc_target::callconv::FnAbi<'tcx, rustc_middle::ty::Ty<'tcx>>, // AdtDef are interned and compared by address [decode] adt_def: rustc_middle::ty::AdtDefData, @@ -28,7 +29,7 @@ macro_rules! arena_types { rustc_middle::mir::Body<'tcx> >, [decode] typeck_results: rustc_middle::ty::TypeckResults<'tcx>, - [decode] borrowck_result: rustc_middle::mir::BorrowCheckResult<'tcx>, + [decode] borrowck_result: rustc_middle::mir::ConcreteOpaqueTypes<'tcx>, [] resolver: rustc_data_structures::steal::Steal<( rustc_middle::ty::ResolverAstLowering, std::sync::Arc, @@ -89,8 +90,9 @@ macro_rules! arena_types { [] name_set: rustc_data_structures::unord::UnordSet, [] autodiff_item: rustc_ast::expand::autodiff_attrs::AutoDiffItem, [] ordered_name_set: rustc_data_structures::fx::FxIndexSet, - [] pats: rustc_middle::ty::PatternKind<'tcx>, [] valtree: rustc_middle::ty::ValTreeKind<'tcx>, + [] stable_order_of_exportable_impls: + rustc_data_structures::fx::FxIndexMap, // Note that this deliberately duplicates items in the `rustc_hir::arena`, // since we need to allocate this type on both the `rustc_hir` arena @@ -116,6 +118,7 @@ macro_rules! arena_types { [decode] specialization_graph: rustc_middle::traits::specialization_graph::Graph, [] crate_inherent_impls: rustc_middle::ty::CrateInherentImpls, [] hir_owner_nodes: rustc_hir::OwnerNodes<'tcx>, + [] get_externally_implementable_item_impls: rustc_middle::middle::eii::EiiMap, ]); ) } diff --git a/compiler/rustc_middle/src/dep_graph/dep_node.rs b/compiler/rustc_middle/src/dep_graph/dep_node.rs index be34c7ef4bd52..0c998a2cbb38b 100644 --- a/compiler/rustc_middle/src/dep_graph/dep_node.rs +++ b/compiler/rustc_middle/src/dep_graph/dep_node.rs @@ -13,8 +13,11 @@ use crate::ty::TyCtxt; macro_rules! define_dep_nodes { ( - $($(#[$attr:meta])* - [$($modifiers:tt)*] fn $variant:ident($($K:tt)*) -> $V:ty,)*) => { + $( + $(#[$attr:meta])* + [$($modifiers:tt)*] fn $variant:ident($($K:tt)*) -> $V:ty, + )* + ) => { #[macro_export] macro_rules! make_dep_kind_array { @@ -83,12 +86,15 @@ macro_rules! define_dep_nodes { }; } -rustc_query_append!(define_dep_nodes![ +// Create various data structures for each query, and also for a few things +// that aren't queries. +rustc_with_all_queries!(define_dep_nodes![ /// We use this for most things when incr. comp. is turned off. [] fn Null() -> (), /// We use this to create a forever-red node. [] fn Red() -> (), [] fn SideEffect() -> (), + [] fn AnonZeroDeps() -> (), [] fn TraitSelect() -> (), [] fn CompileCodegenUnit() -> (), [] fn CompileMonoItem() -> (), diff --git a/compiler/rustc_middle/src/dep_graph/mod.rs b/compiler/rustc_middle/src/dep_graph/mod.rs index 739c0be1a91d6..931d67087acab 100644 --- a/compiler/rustc_middle/src/dep_graph/mod.rs +++ b/compiler/rustc_middle/src/dep_graph/mod.rs @@ -53,6 +53,7 @@ impl Deps for DepsType { const DEP_KIND_NULL: DepKind = dep_kinds::Null; const DEP_KIND_RED: DepKind = dep_kinds::Red; const DEP_KIND_SIDE_EFFECT: DepKind = dep_kinds::SideEffect; + const DEP_KIND_ANON_ZERO_DEPS: DepKind = dep_kinds::AnonZeroDeps; const DEP_KIND_MAX: u16 = dep_node::DEP_KIND_VARIANTS - 1; } diff --git a/compiler/rustc_middle/src/hir/map.rs b/compiler/rustc_middle/src/hir/map.rs index 52f155a16b868..400f272772cd2 100644 --- a/compiler/rustc_middle/src/hir/map.rs +++ b/compiler/rustc_middle/src/hir/map.rs @@ -1,3 +1,7 @@ +//! This module used to contain a type called `Map`. That type has since been +//! eliminated, and all its methods are now on `TyCtxt`. But the module name +//! stays as `map` because there isn't an obviously better name for it. + use rustc_abi::ExternAbi; use rustc_ast::visit::{VisitorResult, walk_list}; use rustc_data_structures::fingerprint::Fingerprint; @@ -15,18 +19,10 @@ use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw, sym, with_metavar_spa use crate::hir::{ModuleItems, nested_filter}; use crate::middle::debugger_visualizer::DebuggerVisualizerFile; +use crate::middle::eii::EiiMapping; use crate::query::LocalCrate; use crate::ty::TyCtxt; -// FIXME: the structure was necessary in the past but now it -// only serves as "namespace" for HIR-related methods, and can be -// removed if all the methods are reasonably renamed and moved to tcx -// (https://github.com/rust-lang/rust/pull/118256#issuecomment-1826442834). -#[derive(Copy, Clone)] -pub struct Map<'hir> { - pub(super) tcx: TyCtxt<'hir>, -} - /// An iterator that walks up the ancestor tree of a given `HirId`. /// Constructed using `tcx.hir_parent_iter(hir_id)`. struct ParentHirIterator<'tcx> { @@ -273,14 +269,14 @@ impl<'tcx> TyCtxt<'tcx> { self.hir_maybe_body_owned_by(id).unwrap_or_else(|| { let hir_id = self.local_def_id_to_hir_id(id); span_bug!( - self.hir().span(hir_id), + self.hir_span(hir_id), "body_owned_by: {} has no associated body", self.hir_id_to_string(hir_id) ); }) } - pub fn hir_body_param_names(self, id: BodyId) -> impl Iterator> { + pub fn hir_body_param_idents(self, id: BodyId) -> impl Iterator> { self.hir_body(id).params.iter().map(|param| match param.pat.kind { PatKind::Binding(_, _, ident, _) => Some(ident), PatKind::Wild => Some(Ident::new(kw::Underscore, param.pat.span)), @@ -334,7 +330,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Returns an iterator of the `DefId`s for all body-owners in this /// crate. If you would prefer to iterate over the bodies - /// themselves, you can do `self.hir().krate().body_ids.iter()`. + /// themselves, you can do `self.hir_crate(()).body_ids.iter()`. #[inline] pub fn hir_body_owners(self) -> impl Iterator { self.hir_crate_items(()).body_owners.iter().copied() @@ -342,7 +338,7 @@ impl<'tcx> TyCtxt<'tcx> { #[inline] pub fn par_hir_body_owners(self, f: impl Fn(LocalDefId) + DynSend + DynSync) { - par_for_each_in(&self.hir_crate_items(()).body_owners[..], |&def_id| f(def_id)); + par_for_each_in(&self.hir_crate_items(()).body_owners[..], |&&def_id| f(def_id)); } pub fn hir_ty_param_owner(self, def_id: LocalDefId) -> LocalDefId { @@ -367,10 +363,6 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn hir_trait_impls(self, trait_did: DefId) -> &'tcx [LocalDefId] { - self.all_local_trait_impls(()).get(&trait_did).map_or(&[], |xs| &xs[..]) - } - /// Gets the attributes on the crate. This is preferable to /// invoking `krate.attrs` because it registers a tighter /// dep-graph access. @@ -681,9 +673,8 @@ impl<'tcx> TyCtxt<'tcx> { pub fn hir_id_to_string(self, id: HirId) -> String { let path_str = |def_id: LocalDefId| self.def_path_str(def_id); - let span_str = || { - self.sess.source_map().span_to_snippet(Map { tcx: self }.span(id)).unwrap_or_default() - }; + let span_str = + || self.sess.source_map().span_to_snippet(self.hir_span(id)).unwrap_or_default(); let node_str = |prefix| format!("{id} ({prefix} `{}`)", span_str()); match self.hir_node(id) { @@ -879,12 +870,10 @@ impl<'tcx> TyCtxt<'tcx> { pub fn hir_attrs(self, id: HirId) -> &'tcx [Attribute] { self.hir_attr_map(id.owner).get(id.local_id) } -} -impl<'hir> Map<'hir> { /// Gets the span of the definition of the specified HIR node. /// This is used by `tcx.def_span`. - pub fn span(self, hir_id: HirId) -> Span { + pub fn hir_span(self, hir_id: HirId) -> Span { fn until_within(outer: Span, end: Span) -> Span { if let Some(end) = end.find_ancestor_inside(outer) { outer.with_hi(end.hi()) @@ -904,7 +893,7 @@ impl<'hir> Map<'hir> { span } - let span = match self.tcx.hir_node(hir_id) { + let span = match self.hir_node(hir_id) { // Function-like. Node::Item(Item { kind: ItemKind::Fn { sig, .. }, span: outer_span, .. }) | Node::TraitItem(TraitItem { @@ -984,7 +973,7 @@ impl<'hir> Map<'hir> { Node::Variant(variant) => named_span(variant.span, variant.ident, None), Node::ImplItem(item) => named_span(item.span, item.ident, Some(item.generics)), Node::ForeignItem(item) => named_span(item.span, item.ident, None), - Node::Ctor(_) => return self.span(self.tcx.parent_hir_id(hir_id)), + Node::Ctor(_) => return self.hir_span(self.parent_hir_id(hir_id)), Node::Expr(Expr { kind: ExprKind::Closure(Closure { fn_decl_span, .. }), span, @@ -993,16 +982,16 @@ impl<'hir> Map<'hir> { // Ensure that the returned span has the item's SyntaxContext. fn_decl_span.find_ancestor_inside(*span).unwrap_or(*span) } - _ => self.span_with_body(hir_id), + _ => self.hir_span_with_body(hir_id), }; - debug_assert_eq!(span.ctxt(), self.span_with_body(hir_id).ctxt()); + debug_assert_eq!(span.ctxt(), self.hir_span_with_body(hir_id).ctxt()); span } - /// Like `hir.span()`, but includes the body of items + /// Like `hir_span()`, but includes the body of items /// (instead of just the item header) - pub fn span_with_body(self, hir_id: HirId) -> Span { - match self.tcx.hir_node(hir_id) { + pub fn hir_span_with_body(self, hir_id: HirId) -> Span { + match self.hir_node(hir_id) { Node::Param(param) => param.span, Node::Item(item) => item.span, Node::ForeignItem(foreign_item) => foreign_item.span, @@ -1011,7 +1000,7 @@ impl<'hir> Map<'hir> { Node::Variant(variant) => variant.span, Node::Field(field) => field.span, Node::AnonConst(constant) => constant.span, - Node::ConstBlock(constant) => self.tcx.hir_body(constant.body).value.span, + Node::ConstBlock(constant) => self.hir_body(constant.body).value.span, Node::ConstArg(const_arg) => const_arg.span(), Node::Expr(expr) => expr.span, Node::ExprField(field) => field.span, @@ -1031,7 +1020,7 @@ impl<'hir> Map<'hir> { Node::PatExpr(lit) => lit.span, Node::Arm(arm) => arm.span, Node::Block(block) => block.span, - Node::Ctor(..) => self.span_with_body(self.tcx.parent_hir_id(hir_id)), + Node::Ctor(..) => self.hir_span_with_body(self.parent_hir_id(hir_id)), Node::Lifetime(lifetime) => lifetime.ident.span, Node::GenericParam(param) => param.span, Node::Infer(i) => i.span, @@ -1039,28 +1028,36 @@ impl<'hir> Map<'hir> { Node::Crate(item) => item.spans.inner_span, Node::WherePredicate(pred) => pred.span, Node::PreciseCapturingNonLifetimeArg(param) => param.ident.span, - Node::Synthetic => unreachable!(), + Node::Synthetic => { + if let Some(EiiMapping { chosen_impl, .. }) = + self.get_externally_implementable_item_impls(()).get(&hir_id.owner.def_id) + { + self.def_span(chosen_impl) + } else { + unreachable!() + } + } Node::Err(span) => span, } } - pub fn span_if_local(self, id: DefId) -> Option { - id.is_local().then(|| self.tcx.def_span(id)) + pub fn hir_span_if_local(self, id: DefId) -> Option { + id.is_local().then(|| self.def_span(id)) } - pub fn res_span(self, res: Res) -> Option { + pub fn hir_res_span(self, res: Res) -> Option { match res { Res::Err => None, - Res::Local(id) => Some(self.span(id)), - res => self.span_if_local(res.opt_def_id()?), + Res::Local(id) => Some(self.hir_span(id)), + res => self.hir_span_if_local(res.opt_def_id()?), } } /// Returns the HirId of `N` in `struct Foo` when /// called with the HirId for the `{ ... }` anon const - pub fn opt_const_param_default_param_def_id(self, anon_const: HirId) -> Option { - let const_arg = self.tcx.parent_hir_id(anon_const); - match self.tcx.parent_hir_node(const_arg) { + pub fn hir_opt_const_param_default_param_def_id(self, anon_const: HirId) -> Option { + let const_arg = self.parent_hir_id(anon_const); + match self.parent_hir_node(const_arg) { Node::GenericParam(GenericParam { def_id: param_id, kind: GenericParamKind::Const { .. }, @@ -1070,7 +1067,7 @@ impl<'hir> Map<'hir> { } } - pub fn maybe_get_struct_pattern_shorthand_field(&self, expr: &Expr<'_>) -> Option { + pub fn hir_maybe_get_struct_pattern_shorthand_field(self, expr: &Expr<'_>) -> Option { let local = match expr { Expr { kind: @@ -1085,7 +1082,7 @@ impl<'hir> Map<'hir> { _ => None, }?; - match self.tcx.parent_hir_node(expr.hir_id) { + match self.parent_hir_node(expr.hir_id) { Node::ExprField(field) => { if field.ident.name == local.name && field.is_shorthand { return Some(local.name); diff --git a/compiler/rustc_middle/src/hir/mod.rs b/compiler/rustc_middle/src/hir/mod.rs index 347bc5ea31289..044c2d8b6b169 100644 --- a/compiler/rustc_middle/src/hir/mod.rs +++ b/compiler/rustc_middle/src/hir/mod.rs @@ -16,6 +16,7 @@ use rustc_hir::*; use rustc_macros::{Decodable, Encodable, HashStable}; use rustc_span::{ErrorGuaranteed, ExpnId, Span}; +use crate::middle::eii::EiiMapping; use crate::query::Providers; use crate::ty::{EarlyBinder, ImplSubject, TyCtxt}; @@ -83,44 +84,39 @@ impl ModuleItems { &self, f: impl Fn(ItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, ) -> Result<(), ErrorGuaranteed> { - try_par_for_each_in(&self.free_items[..], |&id| f(id)) + try_par_for_each_in(&self.free_items[..], |&&id| f(id)) } pub fn par_trait_items( &self, f: impl Fn(TraitItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, ) -> Result<(), ErrorGuaranteed> { - try_par_for_each_in(&self.trait_items[..], |&id| f(id)) + try_par_for_each_in(&self.trait_items[..], |&&id| f(id)) } pub fn par_impl_items( &self, f: impl Fn(ImplItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, ) -> Result<(), ErrorGuaranteed> { - try_par_for_each_in(&self.impl_items[..], |&id| f(id)) + try_par_for_each_in(&self.impl_items[..], |&&id| f(id)) } pub fn par_foreign_items( &self, f: impl Fn(ForeignItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, ) -> Result<(), ErrorGuaranteed> { - try_par_for_each_in(&self.foreign_items[..], |&id| f(id)) + try_par_for_each_in(&self.foreign_items[..], |&&id| f(id)) } pub fn par_opaques( &self, f: impl Fn(LocalDefId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, ) -> Result<(), ErrorGuaranteed> { - try_par_for_each_in(&self.opaques[..], |&id| f(id)) + try_par_for_each_in(&self.opaques[..], |&&id| f(id)) } } impl<'tcx> TyCtxt<'tcx> { - #[inline(always)] - pub fn hir(self) -> map::Map<'tcx> { - map::Map { tcx: self } - } - pub fn parent_module(self, id: HirId) -> LocalModDefId { if !id.is_owner() && self.def_kind(id.owner) == DefKind::Mod { LocalModDefId::new_unchecked(id.owner.def_id) @@ -210,15 +206,14 @@ pub fn provide(providers: &mut Providers) { providers.hir_attr_map = |tcx, id| { tcx.hir_crate(()).owners[id.def_id].as_owner().map_or(AttributeMap::EMPTY, |o| &o.attrs) }; - providers.def_span = |tcx, def_id| tcx.hir().span(tcx.local_def_id_to_hir_id(def_id)); + providers.def_span = |tcx, def_id| tcx.hir_span(tcx.local_def_id_to_hir_id(def_id)); providers.def_ident_span = |tcx, def_id| { let hir_id = tcx.local_def_id_to_hir_id(def_id); tcx.hir_opt_ident_span(hir_id) }; - providers.fn_arg_names = |tcx, def_id| { - let hir = tcx.hir(); + providers.fn_arg_idents = |tcx, def_id| { if let Some(body_id) = tcx.hir_node_by_def_id(def_id).body_id() { - tcx.arena.alloc_from_iter(tcx.hir_body_param_names(body_id)) + tcx.arena.alloc_from_iter(tcx.hir_body_param_idents(body_id)) } else if let Node::TraitItem(&TraitItem { kind: TraitItemKind::Fn(_, TraitFn::Required(idents)), .. @@ -229,15 +224,21 @@ pub fn provide(providers: &mut Providers) { }) = tcx.hir_node(tcx.local_def_id_to_hir_id(def_id)) { idents + } else if let Some(EiiMapping { chosen_impl, .. }) = + tcx.get_externally_implementable_item_impls(()).get(&def_id) + { + tcx.fn_arg_idents(chosen_impl) } else { span_bug!( - hir.span(tcx.local_def_id_to_hir_id(def_id)), - "fn_arg_names: unexpected item {:?}", + tcx.hir_span(tcx.local_def_id_to_hir_id(def_id)), + "fn_arg_idents: unexpected item {:?}", def_id ); } }; providers.all_local_trait_impls = |tcx, ()| &tcx.resolutions(()).trait_impls; + providers.local_trait_impls = + |tcx, trait_id| tcx.resolutions(()).trait_impls.get(&trait_id).map_or(&[], |xs| &xs[..]); providers.expn_that_defined = |tcx, id| tcx.resolutions(()).expn_that_defined.get(&id).copied().unwrap_or(ExpnId::root()); providers.in_scope_traits_map = |tcx, id| { diff --git a/compiler/rustc_middle/src/hir/place.rs b/compiler/rustc_middle/src/hir/place.rs index 60ce8544aa0ed..c3d10615cf10c 100644 --- a/compiler/rustc_middle/src/hir/place.rs +++ b/compiler/rustc_middle/src/hir/place.rs @@ -40,6 +40,8 @@ pub enum ProjectionKind { /// A conversion from an opaque type to its hidden type so we can /// do further projections on it. + /// + /// This is unused if `-Znext-solver` is enabled. OpaqueCast, } diff --git a/compiler/rustc_middle/src/infer/canonical.rs b/compiler/rustc_middle/src/infer/canonical.rs index 3cd148cd44278..5b8603744961c 100644 --- a/compiler/rustc_middle/src/infer/canonical.rs +++ b/compiler/rustc_middle/src/infer/canonical.rs @@ -39,15 +39,6 @@ pub type CanonicalVarInfo<'tcx> = ir::CanonicalVarInfo>; pub type CanonicalVarValues<'tcx> = ir::CanonicalVarValues>; pub type CanonicalVarInfos<'tcx> = &'tcx List>; -impl<'tcx> ty::TypeFoldable> for CanonicalVarInfos<'tcx> { - fn try_fold_with>>( - self, - folder: &mut F, - ) -> Result { - ty::util::fold_list(self, folder, |tcx, v| tcx.mk_canonical_var_infos(v)) - } -} - /// When we canonicalize a value to form a query, we wind up replacing /// various parts of it with canonical variables. This struct stores /// those replaced bits to remember for when we process the query diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index 1e6178144c921..979608df79c08 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -27,9 +27,8 @@ // tidy-alphabetical-start #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] -#![allow(rustc::potential_query_instability)] #![allow(rustc::untranslatable_diagnostic)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(allocator_api)] @@ -49,7 +48,6 @@ #![feature(if_let_guard)] #![feature(intra_doc_pointers)] #![feature(iter_from_coroutine)] -#![feature(let_chains)] #![feature(min_specialization)] #![feature(negative_impls)] #![feature(never_type)] @@ -62,6 +60,7 @@ #![feature(try_trait_v2_yeet)] #![feature(type_alias_impl_trait)] #![feature(yeet_expr)] +#![recursion_limit = "256"] // tidy-alphabetical-end #[cfg(test)] diff --git a/compiler/rustc_middle/src/lint.rs b/compiler/rustc_middle/src/lint.rs index 88bf17070b9c5..d5a408fdfa6a3 100644 --- a/compiler/rustc_middle/src/lint.rs +++ b/compiler/rustc_middle/src/lint.rs @@ -51,8 +51,13 @@ impl LintLevelSource { } } -/// A tuple of a lint level and its source. -pub type LevelAndSource = (Level, LintLevelSource); +/// Convenience helper for moving things around together that frequently are paired +#[derive(Copy, Clone, Debug, HashStable, Encodable, Decodable)] +pub struct LevelAndSource { + pub level: Level, + pub lint_id: Option, + pub src: LintLevelSource, +} /// Return type for the `shallow_lint_levels_on` query. /// @@ -69,14 +74,18 @@ pub struct ShallowLintLevelMap { /// /// The return of this function is suitable for diagnostics. pub fn reveal_actual_level( - level: Option, + level: Option<(Level, Option)>, src: &mut LintLevelSource, sess: &Session, lint: LintId, - probe_for_lint_level: impl FnOnce(LintId) -> (Option, LintLevelSource), -) -> Level { + probe_for_lint_level: impl FnOnce( + LintId, + ) + -> (Option<(Level, Option)>, LintLevelSource), +) -> (Level, Option) { // If `level` is none then we actually assume the default level for this lint. - let mut level = level.unwrap_or_else(|| lint.lint.default_level(sess.edition())); + let (mut level, mut lint_id) = + level.unwrap_or_else(|| (lint.lint.default_level(sess.edition()), None)); // If we're about to issue a warning, check at the last minute for any // directives against the warnings "lint". If, for example, there's an @@ -88,16 +97,17 @@ pub fn reveal_actual_level( // future compatibility warning. if level == Level::Warn && lint != LintId::of(FORBIDDEN_LINT_GROUPS) { let (warnings_level, warnings_src) = probe_for_lint_level(LintId::of(builtin::WARNINGS)); - if let Some(configured_warning_level) = warnings_level { + if let Some((configured_warning_level, configured_lint_id)) = warnings_level { if configured_warning_level != Level::Warn { level = configured_warning_level; + lint_id = configured_lint_id; *src = warnings_src; } } } // Ensure that we never exceed the `--cap-lints` argument unless the source is a --force-warn - level = if let LintLevelSource::CommandLine(_, Level::ForceWarn(_)) = src { + level = if let LintLevelSource::CommandLine(_, Level::ForceWarn) = src { level } else { cmp::min(level, sess.opts.lint_cap.unwrap_or(Level::Forbid)) @@ -108,7 +118,7 @@ pub fn reveal_actual_level( level = cmp::min(*driver_level, level); } - level + (level, lint_id) } impl ShallowLintLevelMap { @@ -121,11 +131,11 @@ impl ShallowLintLevelMap { tcx: TyCtxt<'_>, id: LintId, start: HirId, - ) -> (Option, LintLevelSource) { + ) -> (Option<(Level, Option)>, LintLevelSource) { if let Some(map) = self.specs.get(&start.local_id) - && let Some(&(level, src)) = map.get(&id) + && let Some(&LevelAndSource { level, lint_id, src }) = map.get(&id) { - return (Some(level), src); + return (Some((level, lint_id)), src); } let mut owner = start.owner; @@ -137,9 +147,9 @@ impl ShallowLintLevelMap { specs = &tcx.shallow_lint_levels_on(owner).specs; } if let Some(map) = specs.get(&parent.local_id) - && let Some(&(level, src)) = map.get(&id) + && let Some(&LevelAndSource { level, lint_id, src }) = map.get(&id) { - return (Some(level), src); + return (Some((level, lint_id)), src); } } @@ -153,18 +163,18 @@ impl ShallowLintLevelMap { tcx: TyCtxt<'_>, lint: LintId, cur: HirId, - ) -> (Level, LintLevelSource) { + ) -> LevelAndSource { let (level, mut src) = self.probe_for_lint_level(tcx, lint, cur); - let level = reveal_actual_level(level, &mut src, tcx.sess, lint, |lint| { + let (level, lint_id) = reveal_actual_level(level, &mut src, tcx.sess, lint, |lint| { self.probe_for_lint_level(tcx, lint, cur) }); - (level, src) + LevelAndSource { level, lint_id, src } } } impl TyCtxt<'_> { /// Fetch and return the user-visible lint level for the given lint at the given HirId. - pub fn lint_level_at_node(self, lint: &'static Lint, id: HirId) -> (Level, LintLevelSource) { + pub fn lint_level_at_node(self, lint: &'static Lint, id: HirId) -> LevelAndSource { self.shallow_lint_levels_on(id.owner).lint_level_id_at_node(self, LintId::of(lint), id) } } @@ -267,8 +277,7 @@ fn explain_lint_level_source( pub fn lint_level( sess: &Session, lint: &'static Lint, - level: Level, - src: LintLevelSource, + level: LevelAndSource, span: Option, decorate: impl for<'a, 'b> FnOnce(&'b mut Diag<'a, ()>), ) { @@ -278,11 +287,12 @@ pub fn lint_level( fn lint_level_impl( sess: &Session, lint: &'static Lint, - level: Level, - src: LintLevelSource, + level: LevelAndSource, span: Option, decorate: Box FnOnce(&'b mut Diag<'a, ()>)>, ) { + let LevelAndSource { level, lint_id, src } = level; + // Check for future incompatibility lints and issue a stronger warning. let future_incompatible = lint.future_incompatible; @@ -301,7 +311,7 @@ pub fn lint_level( return; } } - Level::Expect(expect_id) => { + Level::Expect => { // This case is special as we actually allow the lint itself in this context, but // we can't return early like in the case for `Level::Allow` because we still // need the lint diagnostic to be emitted to `rustc_error::DiagCtxtInner`. @@ -309,10 +319,9 @@ pub fn lint_level( // We can also not mark the lint expectation as fulfilled here right away, as it // can still be cancelled in the decorate function. All of this means that we simply // create a `Diag` and continue as we would for warnings. - rustc_errors::Level::Expect(expect_id) + rustc_errors::Level::Expect } - Level::ForceWarn(Some(expect_id)) => rustc_errors::Level::ForceWarning(Some(expect_id)), - Level::ForceWarn(None) => rustc_errors::Level::ForceWarning(None), + Level::ForceWarn => rustc_errors::Level::ForceWarning, Level::Warn => rustc_errors::Level::Warning, Level::Deny | Level::Forbid => rustc_errors::Level::Error, }; @@ -320,6 +329,9 @@ pub fn lint_level( if let Some(span) = span { err.span(span); } + if let Some(lint_id) = lint_id { + err.lint_id(lint_id); + } // If this code originates in a foreign macro, aka something that this crate // did not itself author, then it's likely that there's nothing this crate @@ -350,7 +362,7 @@ pub fn lint_level( // the compiler. It is therefore not necessary to add any information for the user. // This will therefore directly call the decorate function which will in turn emit // the diagnostic. - if let Level::Expect(_) = level { + if let Level::Expect = level { decorate(&mut err); err.emit(); return; @@ -419,5 +431,5 @@ pub fn lint_level( explain_lint_level_source(lint, level, src, &mut err); err.emit() } - lint_level_impl(sess, lint, level, src, span, Box::new(decorate)) + lint_level_impl(sess, lint, level, span, Box::new(decorate)) } diff --git a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs index 0cc72a261a5a3..a04da59d79ec3 100644 --- a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs +++ b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs @@ -108,6 +108,7 @@ bitflags::bitflags! { /// `#[no_mangle]`: an indicator that the function's name should be the same /// as its symbol. const NO_MANGLE = 1 << 5; + // FIXME(jdonszelmann): EIIs can replace this, most likely /// `#[rustc_std_internal_symbol]`: an indicator that this symbol is a /// "weird symbol" for the standard library in that it has slightly /// different linkage, visibility, and reachability rules. @@ -139,12 +140,15 @@ bitflags::bitflags! { const ALLOCATOR_ZEROED = 1 << 18; /// `#[no_builtins]`: indicates that disable implicit builtin knowledge of functions for the function. const NO_BUILTINS = 1 << 19; + /// Usually items in extern blocks aren't mangled, but for EII this is exactly what we want + /// This signals that. + const EII_MANGLE_EXTERN = 1 << 20; } } rustc_data_structures::external_bitflags_debug! { CodegenFnAttrFlags } -impl CodegenFnAttrs { - pub const EMPTY: &'static Self = &Self::new(); +impl<'tcx> CodegenFnAttrs { + pub const EMPTY: &'tcx Self = &Self::new(); pub const fn new() -> CodegenFnAttrs { CodegenFnAttrs { @@ -172,6 +176,8 @@ impl CodegenFnAttrs { /// * `#[no_mangle]` is present /// * `#[export_name(...)]` is present /// * `#[linkage]` is present + /// + /// Keep this in sync with the logic for the unused_attributes for `#[inline]` lint. pub fn contains_extern_indicator(&self) -> bool { self.flags.contains(CodegenFnAttrFlags::NO_MANGLE) || self.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) diff --git a/compiler/rustc_middle/src/middle/eii.rs b/compiler/rustc_middle/src/middle/eii.rs new file mode 100644 index 0000000000000..f8e5724c303ca --- /dev/null +++ b/compiler/rustc_middle/src/middle/eii.rs @@ -0,0 +1,13 @@ +use rustc_data_structures::fx::FxIndexMap; +use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_macros::{HashStable, TyDecodable, TyEncodable}; + +#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)] +#[derive(TyEncodable, TyDecodable, HashStable)] +pub struct EiiMapping { + pub extern_item: DefId, + pub chosen_impl: DefId, + pub weak_linkage: bool, +} + +pub type EiiMap = FxIndexMap; diff --git a/compiler/rustc_middle/src/middle/exported_symbols.rs b/compiler/rustc_middle/src/middle/exported_symbols.rs index 0bfbd39879747..d8be6a56d1536 100644 --- a/compiler/rustc_middle/src/middle/exported_symbols.rs +++ b/compiler/rustc_middle/src/middle/exported_symbols.rs @@ -22,7 +22,7 @@ impl SymbolExportLevel { } /// Kind of exported symbols. -#[derive(Eq, PartialEq, Debug, Copy, Clone, Encodable, Decodable, HashStable)] +#[derive(Eq, PartialEq, Debug, Copy, Clone, Encodable, Decodable, HashStable, Hash)] pub enum SymbolExportKind { Text, Data, @@ -44,8 +44,10 @@ pub enum ExportedSymbol<'tcx> { Generic(DefId, GenericArgsRef<'tcx>), DropGlue(Ty<'tcx>), AsyncDropGlueCtorShim(Ty<'tcx>), + AsyncDropGlue(DefId, Ty<'tcx>), ThreadLocalShim(DefId), NoDefId(ty::SymbolName<'tcx>), + Alias { original: DefId, alternative_symbol: ty::SymbolName<'tcx> }, } impl<'tcx> ExportedSymbol<'tcx> { @@ -55,7 +57,7 @@ impl<'tcx> ExportedSymbol<'tcx> { match *self { ExportedSymbol::NonGeneric(def_id) => tcx.symbol_name(ty::Instance::mono(tcx, def_id)), ExportedSymbol::Generic(def_id, args) => { - tcx.symbol_name(ty::Instance::new(def_id, args)) + tcx.symbol_name(ty::Instance::new_raw(def_id, args)) } ExportedSymbol::DropGlue(ty) => { tcx.symbol_name(ty::Instance::resolve_drop_in_place(tcx, ty)) @@ -63,11 +65,15 @@ impl<'tcx> ExportedSymbol<'tcx> { ExportedSymbol::AsyncDropGlueCtorShim(ty) => { tcx.symbol_name(ty::Instance::resolve_async_drop_in_place(tcx, ty)) } + ExportedSymbol::AsyncDropGlue(def_id, ty) => { + tcx.symbol_name(ty::Instance::resolve_async_drop_in_place_poll(tcx, def_id, ty)) + } ExportedSymbol::ThreadLocalShim(def_id) => tcx.symbol_name(ty::Instance { def: ty::InstanceKind::ThreadLocalShim(def_id), args: ty::GenericArgs::empty(), }), ExportedSymbol::NoDefId(symbol_name) => symbol_name, + ExportedSymbol::Alias { original: _, alternative_symbol } => alternative_symbol, } } } diff --git a/compiler/rustc_middle/src/middle/lang_items.rs b/compiler/rustc_middle/src/middle/lang_items.rs index 7a91bfad4836c..0f92c1910f1bb 100644 --- a/compiler/rustc_middle/src/middle/lang_items.rs +++ b/compiler/rustc_middle/src/middle/lang_items.rs @@ -35,11 +35,10 @@ impl<'tcx> TyCtxt<'tcx> { /// returns a corresponding [`ty::ClosureKind`]. /// For any other [`DefId`] return `None`. pub fn fn_trait_kind_from_def_id(self, id: DefId) -> Option { - let items = self.lang_items(); - match Some(id) { - x if x == items.fn_trait() => Some(ty::ClosureKind::Fn), - x if x == items.fn_mut_trait() => Some(ty::ClosureKind::FnMut), - x if x == items.fn_once_trait() => Some(ty::ClosureKind::FnOnce), + match self.as_lang_item(id)? { + LangItem::Fn => Some(ty::ClosureKind::Fn), + LangItem::FnMut => Some(ty::ClosureKind::FnMut), + LangItem::FnOnce => Some(ty::ClosureKind::FnOnce), _ => None, } } @@ -48,11 +47,10 @@ impl<'tcx> TyCtxt<'tcx> { /// returns a corresponding [`ty::ClosureKind`]. /// For any other [`DefId`] return `None`. pub fn async_fn_trait_kind_from_def_id(self, id: DefId) -> Option { - let items = self.lang_items(); - match Some(id) { - x if x == items.async_fn_trait() => Some(ty::ClosureKind::Fn), - x if x == items.async_fn_mut_trait() => Some(ty::ClosureKind::FnMut), - x if x == items.async_fn_once_trait() => Some(ty::ClosureKind::FnOnce), + match self.as_lang_item(id)? { + LangItem::AsyncFn => Some(ty::ClosureKind::Fn), + LangItem::AsyncFnMut => Some(ty::ClosureKind::FnMut), + LangItem::AsyncFnOnce => Some(ty::ClosureKind::FnOnce), _ => None, } } diff --git a/compiler/rustc_middle/src/middle/mod.rs b/compiler/rustc_middle/src/middle/mod.rs index 4587dcaddc487..707ae98990592 100644 --- a/compiler/rustc_middle/src/middle/mod.rs +++ b/compiler/rustc_middle/src/middle/mod.rs @@ -30,6 +30,7 @@ pub mod lib_features { } } } +pub mod eii; pub mod privacy; pub mod region; pub mod resolve_bound_vars; diff --git a/compiler/rustc_middle/src/middle/region.rs b/compiler/rustc_middle/src/middle/region.rs index ba31f775b651e..92eab59dd0274 100644 --- a/compiler/rustc_middle/src/middle/region.rs +++ b/compiler/rustc_middle/src/middle/region.rs @@ -175,7 +175,7 @@ impl Scope { let Some(hir_id) = self.hir_id(scope_tree) else { return DUMMY_SP; }; - let span = tcx.hir().span(hir_id); + let span = tcx.hir_span(hir_id); if let ScopeData::Remainder(first_statement_index) = self.data { if let Node::Block(blk) = tcx.hir_node(hir_id) { // Want span for scope starting after the diff --git a/compiler/rustc_middle/src/middle/stability.rs b/compiler/rustc_middle/src/middle/stability.rs index ec128c8c47863..9912e659b05f5 100644 --- a/compiler/rustc_middle/src/middle/stability.rs +++ b/compiler/rustc_middle/src/middle/stability.rs @@ -255,7 +255,7 @@ fn late_report_deprecation( // Calculating message for lint involves calling `self.def_path_str`, // which will by default invoke the expensive `visible_parent_map` query. // Skip all that work if the lint is allowed anyway. - if tcx.lint_level_at_node(lint, hir_id).0 == Level::Allow { + if tcx.lint_level_at_node(lint, hir_id).level == Level::Allow { return; } diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs index 890756a17cae7..6ff3cac049b3a 100644 --- a/compiler/rustc_middle/src/mir/interpret/error.rs +++ b/compiler/rustc_middle/src/mir/interpret/error.rs @@ -221,13 +221,11 @@ pub enum InvalidProgramInfo<'tcx> { #[derive(Debug, Copy, Clone)] pub enum CheckInAllocMsg { /// We are access memory. - MemoryAccessTest, + MemoryAccess, /// We are doing pointer arithmetic. - PointerArithmeticTest, - /// We are doing pointer offset_from. - OffsetFromTest, + InboundsPointerArithmetic, /// None of the above -- generic/unspecific inbounds test. - InboundsTest, + Dereferenceable, } /// Details of which pointer is not aligned. diff --git a/compiler/rustc_middle/src/mir/interpret/queries.rs b/compiler/rustc_middle/src/mir/interpret/queries.rs index 4222a68e5447d..4a5c42c721c12 100644 --- a/compiler/rustc_middle/src/mir/interpret/queries.rs +++ b/compiler/rustc_middle/src/mir/interpret/queries.rs @@ -23,7 +23,7 @@ impl<'tcx> TyCtxt<'tcx> { // into `const_eval` which will return `ErrorHandled::TooGeneric` if any of them are // encountered. let args = GenericArgs::identity_for_item(self, def_id); - let instance = ty::Instance::new(def_id, args); + let instance = ty::Instance::new_raw(def_id, args); let cid = GlobalId { instance, promoted: None }; let typing_env = ty::TypingEnv::post_analysis(self, def_id); self.const_eval_global_id(typing_env, cid, DUMMY_SP) @@ -39,7 +39,7 @@ impl<'tcx> TyCtxt<'tcx> { // into `const_eval` which will return `ErrorHandled::TooGeneric` if any of them are // encountered. let args = GenericArgs::identity_for_item(self, def_id); - let instance = ty::Instance::new(def_id, args); + let instance = ty::Instance::new_raw(def_id, args); let cid = GlobalId { instance, promoted: None }; let typing_env = ty::TypingEnv::post_analysis(self, def_id); let inputs = self.erase_regions(typing_env.as_query_input(cid)); @@ -115,15 +115,16 @@ impl<'tcx> TyCtxt<'tcx> { // @lcnr believes that successfully evaluating even though there are // used generic parameters is a bug of evaluation, so checking for it // here does feel somewhat sensible. - if !self.features().generic_const_exprs() && ct.args.has_non_region_param() { - let def_kind = self.def_kind(instance.def_id()); - assert!( - matches!( - def_kind, - DefKind::InlineConst | DefKind::AnonConst | DefKind::AssocConst - ), - "{cid:?} is {def_kind:?}", - ); + if !self.features().generic_const_exprs() + && ct.args.has_non_region_param() + // We only FCW for anon consts as repeat expr counts with anon consts are the only place + // that we have a back compat hack for. We don't need to check this is a const argument + // as only anon consts as const args should get evaluated "for the type system". + // + // If we don't *only* FCW anon consts we can wind up incorrectly FCW'ing uses of assoc + // consts in pattern positions. #140447 + && self.def_kind(instance.def_id()) == DefKind::AnonConst + { let mir_body = self.mir_for_ctfe(instance.def_id()); if mir_body.is_polymorphic { let Some(local_def_id) = ct.def.as_local() else { return }; @@ -208,7 +209,7 @@ impl<'tcx> TyCtxtEnsureOk<'tcx> { // into `const_eval` which will return `ErrorHandled::TooGeneric` if any of them are // encountered. let args = GenericArgs::identity_for_item(self.tcx, def_id); - let instance = ty::Instance::new(def_id, self.tcx.erase_regions(args)); + let instance = ty::Instance::new_raw(def_id, self.tcx.erase_regions(args)); let cid = GlobalId { instance, promoted: None }; let typing_env = ty::TypingEnv::post_analysis(self.tcx, def_id); // Const-eval shouldn't depend on lifetimes at all, so we can erase them, which should diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 4dfb362f3a22b..ba734a2e7d108 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -200,7 +200,13 @@ pub struct CoroutineInfo<'tcx> { /// Coroutine drop glue. This field is populated after the state transform pass. pub coroutine_drop: Option>, - /// The layout of a coroutine. This field is populated after the state transform pass. + /// Coroutine async drop glue. + pub coroutine_drop_async: Option>, + + /// When coroutine has sync drop, this is async proxy calling `coroutine_drop` sync impl. + pub coroutine_drop_proxy_async: Option>, + + /// The layout of a coroutine. Produced by the state transformation. pub coroutine_layout: Option>, /// If this is a coroutine then record the type of source expression that caused this coroutine @@ -220,6 +226,8 @@ impl<'tcx> CoroutineInfo<'tcx> { yield_ty: Some(yield_ty), resume_ty: Some(resume_ty), coroutine_drop: None, + coroutine_drop_async: None, + coroutine_drop_proxy_async: None, coroutine_layout: None, } } @@ -587,6 +595,26 @@ impl<'tcx> Body<'tcx> { self.coroutine.as_ref().and_then(|coroutine| coroutine.coroutine_drop.as_ref()) } + #[inline] + pub fn coroutine_drop_async(&self) -> Option<&Body<'tcx>> { + self.coroutine.as_ref().and_then(|coroutine| coroutine.coroutine_drop_async.as_ref()) + } + + #[inline] + pub fn coroutine_requires_async_drop(&self) -> bool { + self.coroutine_drop_async().is_some() + } + + #[inline] + pub fn future_drop_poll(&self) -> Option<&Body<'tcx>> { + self.coroutine.as_ref().and_then(|coroutine| { + coroutine + .coroutine_drop_async + .as_ref() + .or(coroutine.coroutine_drop_proxy_async.as_ref()) + }) + } + #[inline] pub fn coroutine_kind(&self) -> Option { self.coroutine.as_ref().map(|coroutine| coroutine.coroutine_kind) @@ -1636,8 +1664,8 @@ pub fn find_self_call<'tcx>( &body[block].terminator && let Operand::Constant(box ConstOperand { const_, .. }) = func && let ty::FnDef(def_id, fn_args) = *const_.ty().kind() - && let Some(ty::AssocItem { fn_has_self_parameter: true, .. }) = - tcx.opt_associated_item(def_id) + && let Some(item) = tcx.opt_associated_item(def_id) + && item.is_method() && let [Spanned { node: Operand::Move(self_place) | Operand::Copy(self_place), .. }, ..] = **args { @@ -1672,7 +1700,7 @@ mod size_asserts { // tidy-alphabetical-start static_assert_size!(BasicBlockData<'_>, 128); static_assert_size!(LocalDecl<'_>, 40); - static_assert_size!(SourceScopeData<'_>, 64); + static_assert_size!(SourceScopeData<'_>, 72); static_assert_size!(Statement<'_>, 32); static_assert_size!(Terminator<'_>, 96); static_assert_size!(VarDebugInfo<'_>, 88); diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs index 83ada5c8afb3e..5b1926f154eb6 100644 --- a/compiler/rustc_middle/src/mir/mono.rs +++ b/compiler/rustc_middle/src/mir/mono.rs @@ -150,6 +150,7 @@ impl<'tcx> MonoItem<'tcx> { // If the function is #[naked] or contains any other attribute that requires exactly-once // instantiation: + // We emit an unused_attributes lint for this case, which should be kept in sync if possible. let codegen_fn_attrs = tcx.codegen_fn_attrs(instance.def_id()); if codegen_fn_attrs.contains_extern_indicator() || codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) @@ -180,6 +181,15 @@ impl<'tcx> MonoItem<'tcx> { return opt_incr_drop_glue_mode(tcx, ty); } + // Eii shims are only generated in the final crate because we need to resolve defaults. + // Specifically, only when making the final crate we know whether there was an explicit + // implementation given *somewhere* and if not we then have to decide whether there is + // a default which we need to insert. That default needs to be shared between all + // dependencies; hence globally shared. + if let InstanceKind::EiiShim { .. } = instance.def { + return InstantiationMode::GloballyShared { may_conflict: false }; + } + // We need to ensure that we do not decide the InstantiationMode of an exported symbol is // LocalCopy. Since exported symbols are computed based on the output of // cross_crate_inlinable, we are beholden to our previous decisions. @@ -317,7 +327,7 @@ impl<'tcx> fmt::Display for MonoItem<'tcx> { match *self { MonoItem::Fn(instance) => write!(f, "fn {instance}"), MonoItem::Static(def_id) => { - write!(f, "static {}", Instance::new(def_id, GenericArgs::empty())) + write!(f, "static {}", Instance::new_raw(def_id, GenericArgs::empty())) } MonoItem::GlobalAsm(..) => write!(f, "global_asm"), } @@ -373,7 +383,7 @@ pub struct MonoItemData { /// Specifies the linkage type for a `MonoItem`. /// /// See for more details about these variants. -#[derive(Copy, Clone, PartialEq, Debug, TyEncodable, TyDecodable, HashStable)] +#[derive(Copy, Clone, PartialEq, Debug, TyEncodable, TyDecodable, HashStable, Eq, Hash)] pub enum Linkage { External, AvailableExternally, @@ -529,7 +539,10 @@ impl<'tcx> CodegenUnit<'tcx> { | InstanceKind::CloneShim(..) | InstanceKind::ThreadLocalShim(..) | InstanceKind::FnPtrAddrShim(..) - | InstanceKind::AsyncDropGlueCtorShim(..) => None, + | InstanceKind::AsyncDropGlue(..) + | InstanceKind::FutureDropPollShim(..) + | InstanceKind::AsyncDropGlueCtorShim(..) + | InstanceKind::EiiShim { .. } => None, } } MonoItem::Static(def_id) => def_id.as_local().map(Idx::index), diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index 5a038b27337cf..57ae7dc55c5c8 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -253,9 +253,7 @@ fn dump_path<'tcx>( })); s } - ty::InstanceKind::AsyncDropGlueCtorShim(_, Some(ty)) => { - // Unfortunately, pretty-printed typed are not very filename-friendly. - // We dome some filtering. + ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) => { let mut s = ".".to_owned(); s.extend(ty.to_string().chars().filter_map(|c| match c { ' ' => None, @@ -264,6 +262,34 @@ fn dump_path<'tcx>( })); s } + ty::InstanceKind::AsyncDropGlue(_, ty) => { + let ty::Coroutine(_, args) = ty.kind() else { + bug!(); + }; + let ty = args.first().unwrap().expect_ty(); + let mut s = ".".to_owned(); + s.extend(ty.to_string().chars().filter_map(|c| match c { + ' ' => None, + ':' | '<' | '>' => Some('_'), + c => Some(c), + })); + s + } + ty::InstanceKind::FutureDropPollShim(_, proxy_cor, impl_cor) => { + let mut s = ".".to_owned(); + s.extend(proxy_cor.to_string().chars().filter_map(|c| match c { + ' ' => None, + ':' | '<' | '>' => Some('_'), + c => Some(c), + })); + s.push('.'); + s.extend(impl_cor.to_string().chars().filter_map(|c| match c { + ' ' => None, + ':' | '<' | '>' => Some('_'), + c => Some(c), + })); + s + } _ => String::new(), }; @@ -531,12 +557,12 @@ fn write_mir_intro<'tcx>( // construct a scope tree and write it out let mut scope_tree: FxHashMap> = Default::default(); - for (index, scope_data) in body.source_scopes.iter().enumerate() { + for (index, scope_data) in body.source_scopes.iter_enumerated() { if let Some(parent) = scope_data.parent_scope { - scope_tree.entry(parent).or_default().push(SourceScope::new(index)); + scope_tree.entry(parent).or_default().push(index); } else { // Only the argument scope has no parent, because it's the root. - assert_eq!(index, OUTERMOST_SOURCE_SCOPE.index()); + assert_eq!(index, OUTERMOST_SOURCE_SCOPE); } } @@ -859,7 +885,7 @@ impl Debug for Statement<'_> { BackwardIncompatibleDropHint { ref place, reason: _ } => { // For now, we don't record the reason because there is only one use case, // which is to report breaking change in drop order by Edition 2024 - write!(fmt, "backward incompatible drop({place:?})") + write!(fmt, "BackwardIncompatibleDropHint({place:?})") } } } @@ -1050,7 +1076,13 @@ impl<'tcx> TerminatorKind<'tcx> { Call { target: None, unwind: _, .. } => vec![], Yield { drop: Some(_), .. } => vec!["resume".into(), "drop".into()], Yield { drop: None, .. } => vec!["resume".into()], - Drop { unwind: UnwindAction::Cleanup(_), .. } => vec!["return".into(), "unwind".into()], + Drop { unwind: UnwindAction::Cleanup(_), drop: Some(_), .. } => { + vec!["return".into(), "unwind".into(), "drop".into()] + } + Drop { unwind: UnwindAction::Cleanup(_), drop: None, .. } => { + vec!["return".into(), "unwind".into()] + } + Drop { unwind: _, drop: Some(_), .. } => vec!["return".into(), "drop".into()], Drop { unwind: _, .. } => vec!["return".into()], Assert { unwind: UnwindAction::Cleanup(_), .. } => { vec!["success".into(), "unwind".into()] diff --git a/compiler/rustc_middle/src/mir/query.rs b/compiler/rustc_middle/src/mir/query.rs index 5a9fe10938ae1..3fc05f2caf2ad 100644 --- a/compiler/rustc_middle/src/mir/query.rs +++ b/compiler/rustc_middle/src/mir/query.rs @@ -6,14 +6,13 @@ use rustc_abi::{FieldIdx, VariantIdx}; use rustc_data_structures::fx::FxIndexMap; use rustc_errors::ErrorGuaranteed; use rustc_hir::def_id::LocalDefId; +use rustc_index::IndexVec; use rustc_index::bit_set::BitMatrix; -use rustc_index::{Idx, IndexVec}; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; use rustc_span::{Span, Symbol}; -use smallvec::SmallVec; use super::{ConstValue, SourceInfo}; -use crate::ty::{self, CoroutineArgsExt, OpaqueHiddenType, Ty, TyCtxt, fold_regions}; +use crate::ty::{self, CoroutineArgsExt, OpaqueHiddenType, Ty}; rustc_index::newtype_index! { #[derive(HashStable)] @@ -85,16 +84,11 @@ impl Debug for CoroutineLayout<'_> { } } -#[derive(Debug, TyEncodable, TyDecodable, HashStable)] -pub struct BorrowCheckResult<'tcx> { - /// All the opaque types that are restricted to concrete types - /// by this function. Unlike the value in `TypeckResults`, this has - /// unerased regions. - pub concrete_opaque_types: FxIndexMap>, - pub closure_requirements: Option>, - pub used_mut_upvars: SmallVec<[FieldIdx; 8]>, - pub tainted_by_errors: Option, -} +/// All the opaque types that are restricted to concrete types +/// by this function. Unlike the value in `TypeckResults`, this has +/// unerased regions. +#[derive(Default, Debug, TyEncodable, TyDecodable, HashStable)] +pub struct ConcreteOpaqueTypes<'tcx>(pub FxIndexMap>); /// The result of the `mir_const_qualif` query. /// @@ -108,84 +102,6 @@ pub struct ConstQualifs { pub needs_non_const_drop: bool, pub tainted_by_errors: Option, } - -/// After we borrow check a closure, we are left with various -/// requirements that we have inferred between the free regions that -/// appear in the closure's signature or on its field types. These -/// requirements are then verified and proved by the closure's -/// creating function. This struct encodes those requirements. -/// -/// The requirements are listed as being between various `RegionVid`. The 0th -/// region refers to `'static`; subsequent region vids refer to the free -/// regions that appear in the closure (or coroutine's) type, in order of -/// appearance. (This numbering is actually defined by the `UniversalRegions` -/// struct in the NLL region checker. See for example -/// `UniversalRegions::closure_mapping`.) Note the free regions in the -/// closure's signature and captures are erased. -/// -/// Example: If type check produces a closure with the closure args: -/// -/// ```text -/// ClosureArgs = [ -/// 'a, // From the parent. -/// 'b, -/// i8, // the "closure kind" -/// for<'x> fn(&' &'x u32) -> &'x u32, // the "closure signature" -/// &' String, // some upvar -/// ] -/// ``` -/// -/// We would "renumber" each free region to a unique vid, as follows: -/// -/// ```text -/// ClosureArgs = [ -/// '1, // From the parent. -/// '2, -/// i8, // the "closure kind" -/// for<'x> fn(&'3 &'x u32) -> &'x u32, // the "closure signature" -/// &'4 String, // some upvar -/// ] -/// ``` -/// -/// Now the code might impose a requirement like `'1: '2`. When an -/// instance of the closure is created, the corresponding free regions -/// can be extracted from its type and constrained to have the given -/// outlives relationship. -#[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable)] -pub struct ClosureRegionRequirements<'tcx> { - /// The number of external regions defined on the closure. In our - /// example above, it would be 3 -- one for `'static`, then `'1` - /// and `'2`. This is just used for a sanity check later on, to - /// make sure that the number of regions we see at the callsite - /// matches. - pub num_external_vids: usize, - - /// Requirements between the various free regions defined in - /// indices. - pub outlives_requirements: Vec>, -} - -/// Indicates an outlives-constraint between a type or between two -/// free regions declared on the closure. -#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] -pub struct ClosureOutlivesRequirement<'tcx> { - // This region or type ... - pub subject: ClosureOutlivesSubject<'tcx>, - - // ... must outlive this one. - pub outlived_free_region: ty::RegionVid, - - // If not, report an error here ... - pub blame_span: Span, - - // ... due to this reason. - pub category: ConstraintCategory<'tcx>, -} - -// Make sure this enum doesn't unintentionally grow -#[cfg(target_pointer_width = "64")] -rustc_data_structures::static_assert_size!(ConstraintCategory<'_>, 16); - /// Outlives-constraints can be categorized to determine whether and why they /// are interesting (for error reporting). Order of variants indicates sort /// order of the category, thereby influencing diagnostic output. @@ -253,66 +169,6 @@ pub enum AnnotationSource { GenericArg, } -/// The subject of a `ClosureOutlivesRequirement` -- that is, the thing -/// that must outlive some region. -#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] -pub enum ClosureOutlivesSubject<'tcx> { - /// Subject is a type, typically a type parameter, but could also - /// be a projection. Indicates a requirement like `T: 'a` being - /// passed to the caller, where the type here is `T`. - Ty(ClosureOutlivesSubjectTy<'tcx>), - - /// Subject is a free region from the closure. Indicates a requirement - /// like `'a: 'b` being passed to the caller; the region here is `'a`. - Region(ty::RegionVid), -} - -/// Represents a `ty::Ty` for use in [`ClosureOutlivesSubject`]. -/// -/// This abstraction is necessary because the type may include `ReVar` regions, -/// which is what we use internally within NLL code, and they can't be used in -/// a query response. -/// -/// DO NOT implement `TypeVisitable` or `TypeFoldable` traits, because this -/// type is not recognized as a binder for late-bound region. -#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] -pub struct ClosureOutlivesSubjectTy<'tcx> { - inner: Ty<'tcx>, -} - -impl<'tcx> ClosureOutlivesSubjectTy<'tcx> { - /// All regions of `ty` must be of kind `ReVar` and must represent - /// universal regions *external* to the closure. - pub fn bind(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Self { - let inner = fold_regions(tcx, ty, |r, depth| match r.kind() { - ty::ReVar(vid) => { - let br = ty::BoundRegion { - var: ty::BoundVar::new(vid.index()), - kind: ty::BoundRegionKind::Anon, - }; - ty::Region::new_bound(tcx, depth, br) - } - _ => bug!("unexpected region in ClosureOutlivesSubjectTy: {r:?}"), - }); - - Self { inner } - } - - pub fn instantiate( - self, - tcx: TyCtxt<'tcx>, - mut map: impl FnMut(ty::RegionVid) -> ty::Region<'tcx>, - ) -> Ty<'tcx> { - fold_regions(tcx, self.inner, |r, depth| match r.kind() { - ty::ReBound(debruijn, br) => { - debug_assert_eq!(debruijn, depth); - map(ty::RegionVid::new(br.var.index())) - } - _ => bug!("unexpected region {r:?}"), - }) - } -} - /// The constituent parts of a mir constant of kind ADT or array. #[derive(Copy, Clone, Debug, HashStable)] pub struct DestructuredConstant<'tcx> { diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 6d6e6a1f185b5..bb068f3821db8 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -77,6 +77,8 @@ pub enum MirPhase { /// exception is fields of packed structs. In analysis MIR, `Drop(P)` for a `P` that might be /// misaligned for this reason implicitly moves `P` to a temporary before dropping. Runtime /// MIR has no such rules, and dropping a misaligned place is simply UB. + /// - Async drops: after drop elaboration some drops may become async (`drop`, `async_fut` fields). + /// StateTransform pass will expand those async drops or reset to sync. /// - Unwinding: in analysis MIR, unwinding from a function which may not unwind aborts. In /// runtime MIR, this is UB. /// - Retags: If `-Zmir-emit-retag` is enabled, analysis MIR has "implicit" retags in the same @@ -652,6 +654,8 @@ pub enum CallSource { /// Other types of desugaring that did not come from the HIR, but we don't care about /// for diagnostics (yet). Misc, + /// Use of value, generating a clone function call + Use, /// Normal function call, no special source Normal, } @@ -766,7 +770,34 @@ pub enum TerminatorKind<'tcx> { /// The `replace` flag indicates whether this terminator was created as part of an assignment. /// This should only be used for diagnostic purposes, and does not have any operational /// meaning. - Drop { place: Place<'tcx>, target: BasicBlock, unwind: UnwindAction, replace: bool }, + /// + /// Async drop processing: + /// In compiler/rustc_mir_build/src/build/scope.rs we detect possible async drop: + /// drop of object with `needs_async_drop`. + /// Async drop later, in StateTransform pass, may be expanded into additional yield-point + /// for poll-loop of async drop future. + /// So we need prepared 'drop' target block in the similar way as for `Yield` terminator + /// (see `drops.build_mir::` in scopes.rs). + /// In compiler/rustc_mir_transform/src/elaborate_drops.rs for object implementing `AsyncDrop` trait + /// we need to prepare async drop feature - resolve `AsyncDrop::drop` and codegen call. + /// `async_fut` is set to the corresponding local. + /// For coroutine drop we don't need this logic because coroutine drop works with the same + /// layout object as coroutine itself. So `async_fut` will be `None` for coroutine drop. + /// Both `drop` and `async_fut` fields are only used in compiler/rustc_mir_transform/src/coroutine.rs, + /// StateTransform pass. In `expand_async_drops` async drops are expanded + /// into one or two yield points with poll ready/pending switch. + /// When a coroutine has any internal async drop, the coroutine drop function will be async + /// (generated by `create_coroutine_drop_shim_async`, not `create_coroutine_drop_shim`). + Drop { + place: Place<'tcx>, + target: BasicBlock, + unwind: UnwindAction, + replace: bool, + /// Cleanup to be done if the coroutine is dropped at this suspend point (for async drop). + drop: Option, + /// Prepared async future local (for async drop) + async_fut: Option, + }, /// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of /// the referred to function. The operand types must match the argument types of the function. @@ -929,6 +960,8 @@ pub enum TerminatorKind<'tcx> { asm_macro: InlineAsmMacro, /// The template for the inline assembly, with placeholders. + #[type_foldable(identity)] + #[type_visitable(ignore)] template: &'tcx [InlineAsmTemplatePiece], /// The operands for the inline assembly, as `Operand`s or `Place`s. @@ -939,6 +972,8 @@ pub enum TerminatorKind<'tcx> { /// Source spans for each line of the inline assembly code. These are /// used to map assembler errors back to the line in the source code. + #[type_foldable(identity)] + #[type_visitable(ignore)] line_spans: &'tcx [Span], /// Valid targets for the inline assembly. @@ -1037,6 +1072,7 @@ pub enum AssertKind { RemainderByZero(O), ResumedAfterReturn(CoroutineKind), ResumedAfterPanic(CoroutineKind), + ResumedAfterDrop(CoroutineKind), MisalignedPointerDereference { required: O, found: O }, NullPointerDereference, } @@ -1236,6 +1272,8 @@ pub enum ProjectionElem { /// Like an explicit cast from an opaque type to a concrete type, but without /// requiring an intermediate variable. + /// + /// This is unused with `-Znext-solver`. OpaqueCast(T), /// A transmute from an unsafe binder to the type that it wraps. This is a projection @@ -1668,6 +1706,42 @@ pub enum BinOp { Offset, } +// Assignment operators, e.g. `+=`. See comments on the corresponding variants +// in `BinOp` for details. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)] +pub enum AssignOp { + AddAssign, + SubAssign, + MulAssign, + DivAssign, + RemAssign, + BitXorAssign, + BitAndAssign, + BitOrAssign, + ShlAssign, + ShrAssign, +} + +// Sometimes `BinOp` and `AssignOp` need the same treatment. The operations +// covered by `AssignOp` are a subset of those covered by `BinOp`, so it makes +// sense to convert `AssignOp` to `BinOp`. +impl From for BinOp { + fn from(op: AssignOp) -> BinOp { + match op { + AssignOp::AddAssign => BinOp::Add, + AssignOp::SubAssign => BinOp::Sub, + AssignOp::MulAssign => BinOp::Mul, + AssignOp::DivAssign => BinOp::Div, + AssignOp::RemAssign => BinOp::Rem, + AssignOp::BitXorAssign => BinOp::BitXor, + AssignOp::BitAndAssign => BinOp::BitAnd, + AssignOp::BitOrAssign => BinOp::BitOr, + AssignOp::ShlAssign => BinOp::Shl, + AssignOp::ShrAssign => BinOp::Shr, + } + } +} + // Some nodes are used a lot. Make sure they don't unintentionally get bigger. #[cfg(target_pointer_width = "64")] mod size_asserts { diff --git a/compiler/rustc_middle/src/mir/terminator.rs b/compiler/rustc_middle/src/mir/terminator.rs index b2c51ad88645c..2ce76d7b7368d 100644 --- a/compiler/rustc_middle/src/mir/terminator.rs +++ b/compiler/rustc_middle/src/mir/terminator.rs @@ -208,6 +208,16 @@ impl AssertKind { LangItem::PanicGenFnNonePanic } NullPointerDereference => LangItem::PanicNullPointerDereference, + ResumedAfterDrop(CoroutineKind::Coroutine(_)) => LangItem::PanicCoroutineResumedDrop, + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => { + LangItem::PanicAsyncFnResumedDrop + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) => { + LangItem::PanicAsyncGenFnResumedDrop + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { + LangItem::PanicGenFnNoneDrop + } BoundsCheck { .. } | MisalignedPointerDereference { .. } => { bug!("Unexpected AssertKind") @@ -215,6 +225,12 @@ impl AssertKind { } } + /// Generally do not use this and use `panic_function` instead. + /// Gives the lang item that is required to exist for this assertion + /// to be emitted. This sometimes causes the assertion not to be emitted + /// if a lang item isn't there. + pub fn required_lang_item(&self) {} + /// Format the message arguments for the `assert(cond, msg..)` terminator in MIR printing. /// /// Needs to be kept in sync with the run-time behavior (which is defined by @@ -298,6 +314,18 @@ impl AssertKind { ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { write!(f, "\"`gen fn` should just keep returning `None` after panicking\"") } + ResumedAfterDrop(CoroutineKind::Coroutine(_)) => { + write!(f, "\"coroutine resumed after async drop\"") + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => { + write!(f, "\"`async fn` resumed after async drop\"") + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) => { + write!(f, "\"`async gen fn` resumed after async drop\"") + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { + write!(f, "\"`gen fn` resumed after drop\"") + } } } @@ -345,6 +373,19 @@ impl AssertKind { middle_assert_coroutine_resume_after_panic } NullPointerDereference => middle_assert_null_ptr_deref, + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => { + middle_assert_async_resume_after_drop + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) => { + todo!() + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { + middle_assert_gen_resume_after_drop + } + ResumedAfterDrop(CoroutineKind::Coroutine(_)) => { + middle_assert_coroutine_resume_after_drop + } + MisalignedPointerDereference { .. } => middle_assert_misaligned_ptr_deref, } } @@ -377,7 +418,10 @@ impl AssertKind { add!("left", format!("{left:#?}")); add!("right", format!("{right:#?}")); } - ResumedAfterReturn(_) | ResumedAfterPanic(_) | NullPointerDereference => {} + ResumedAfterReturn(_) + | ResumedAfterPanic(_) + | NullPointerDereference + | ResumedAfterDrop(_) => {} MisalignedPointerDereference { required, found } => { add!("required", format!("{required:#?}")); add!("found", format!("{found:#?}")); @@ -399,8 +443,8 @@ impl<'tcx> Terminator<'tcx> { } #[inline] - pub fn successors_mut(&mut self) -> SuccessorsMut<'_> { - self.kind.successors_mut() + pub fn successors_mut<'a>(&'a mut self, f: impl FnMut(&'a mut BasicBlock)) { + self.kind.successors_mut(f) } #[inline] @@ -448,32 +492,41 @@ pub use helper::*; mod helper { use super::*; pub type Successors<'a> = impl DoubleEndedIterator + 'a; - pub type SuccessorsMut<'a> = impl DoubleEndedIterator + 'a; impl SwitchTargets { /// Like [`SwitchTargets::target_for_value`], but returning the same type as /// [`Terminator::successors`]. #[inline] - #[cfg_attr(not(bootstrap), define_opaque(Successors))] + #[define_opaque(Successors)] pub fn successors_for_value(&self, value: u128) -> Successors<'_> { let target = self.target_for_value(value); - (&[]).into_iter().copied().chain(Some(target)) + (&[]).into_iter().copied().chain(Some(target).into_iter().chain(None)) } } impl<'tcx> TerminatorKind<'tcx> { #[inline] - #[cfg_attr(not(bootstrap), define_opaque(Successors))] + #[define_opaque(Successors)] pub fn successors(&self) -> Successors<'_> { use self::TerminatorKind::*; match *self { + // 3-successors for async drop: target, unwind, dropline (parent coroutine drop) + Drop { target: ref t, unwind: UnwindAction::Cleanup(u), drop: Some(d), .. } => { + slice::from_ref(t) + .into_iter() + .copied() + .chain(Some(u).into_iter().chain(Some(d))) + } + // 2-successors Call { target: Some(ref t), unwind: UnwindAction::Cleanup(u), .. } | Yield { resume: ref t, drop: Some(u), .. } - | Drop { target: ref t, unwind: UnwindAction::Cleanup(u), .. } + | Drop { target: ref t, unwind: UnwindAction::Cleanup(u), drop: None, .. } + | Drop { target: ref t, unwind: _, drop: Some(u), .. } | Assert { target: ref t, unwind: UnwindAction::Cleanup(u), .. } | FalseUnwind { real_target: ref t, unwind: UnwindAction::Cleanup(u) } => { - slice::from_ref(t).into_iter().copied().chain(Some(u)) + slice::from_ref(t).into_iter().copied().chain(Some(u).into_iter().chain(None)) } + // single successor Goto { target: ref t } | Call { target: None, unwind: UnwindAction::Cleanup(ref t), .. } | Call { target: Some(ref t), unwind: _, .. } @@ -481,64 +534,94 @@ mod helper { | Drop { target: ref t, unwind: _, .. } | Assert { target: ref t, unwind: _, .. } | FalseUnwind { real_target: ref t, unwind: _ } => { - slice::from_ref(t).into_iter().copied().chain(None) + slice::from_ref(t).into_iter().copied().chain(None.into_iter().chain(None)) } + // No successors UnwindResume | UnwindTerminate(_) | CoroutineDrop | Return | Unreachable | TailCall { .. } - | Call { target: None, unwind: _, .. } => (&[]).into_iter().copied().chain(None), + | Call { target: None, unwind: _, .. } => { + (&[]).into_iter().copied().chain(None.into_iter().chain(None)) + } + // Multiple successors InlineAsm { ref targets, unwind: UnwindAction::Cleanup(u), .. } => { - targets.iter().copied().chain(Some(u)) + targets.iter().copied().chain(Some(u).into_iter().chain(None)) } - InlineAsm { ref targets, unwind: _, .. } => targets.iter().copied().chain(None), - SwitchInt { ref targets, .. } => targets.targets.iter().copied().chain(None), - FalseEdge { ref real_target, imaginary_target } => { - slice::from_ref(real_target).into_iter().copied().chain(Some(imaginary_target)) + InlineAsm { ref targets, unwind: _, .. } => { + targets.iter().copied().chain(None.into_iter().chain(None)) } + SwitchInt { ref targets, .. } => { + targets.targets.iter().copied().chain(None.into_iter().chain(None)) + } + // FalseEdge + FalseEdge { ref real_target, imaginary_target } => slice::from_ref(real_target) + .into_iter() + .copied() + .chain(Some(imaginary_target).into_iter().chain(None)), } } #[inline] - #[cfg_attr(not(bootstrap), define_opaque(SuccessorsMut))] - pub fn successors_mut(&mut self) -> SuccessorsMut<'_> { + pub fn successors_mut<'a>(&'a mut self, mut f: impl FnMut(&'a mut BasicBlock)) { use self::TerminatorKind::*; - match *self { - Call { - target: Some(ref mut t), unwind: UnwindAction::Cleanup(ref mut u), .. + match self { + Drop { target, unwind, drop, .. } => { + f(target); + if let UnwindAction::Cleanup(u) = unwind { + f(u) + } + if let Some(d) = drop { + f(d) + } + } + Call { target, unwind, .. } => { + if let Some(target) = target { + f(target); + } + if let UnwindAction::Cleanup(u) = unwind { + f(u) + } + } + Yield { resume, drop, .. } => { + f(resume); + if let Some(d) = drop { + f(d) + } + } + Assert { target, unwind, .. } | FalseUnwind { real_target: target, unwind } => { + f(target); + if let UnwindAction::Cleanup(u) = unwind { + f(u) + } } - | Yield { resume: ref mut t, drop: Some(ref mut u), .. } - | Drop { target: ref mut t, unwind: UnwindAction::Cleanup(ref mut u), .. } - | Assert { target: ref mut t, unwind: UnwindAction::Cleanup(ref mut u), .. } - | FalseUnwind { - real_target: ref mut t, - unwind: UnwindAction::Cleanup(ref mut u), - } => slice::from_mut(t).into_iter().chain(Some(u)), - Goto { target: ref mut t } - | Call { target: None, unwind: UnwindAction::Cleanup(ref mut t), .. } - | Call { target: Some(ref mut t), unwind: _, .. } - | Yield { resume: ref mut t, drop: None, .. } - | Drop { target: ref mut t, unwind: _, .. } - | Assert { target: ref mut t, unwind: _, .. } - | FalseUnwind { real_target: ref mut t, unwind: _ } => { - slice::from_mut(t).into_iter().chain(None) + Goto { target } => { + f(target); } UnwindResume | UnwindTerminate(_) | CoroutineDrop | Return | Unreachable - | TailCall { .. } - | Call { target: None, unwind: _, .. } => (&mut []).into_iter().chain(None), - InlineAsm { ref mut targets, unwind: UnwindAction::Cleanup(ref mut u), .. } => { - targets.iter_mut().chain(Some(u)) + | TailCall { .. } => {} + InlineAsm { targets, unwind, .. } => { + for target in targets { + f(target); + } + if let UnwindAction::Cleanup(u) = unwind { + f(u) + } + } + SwitchInt { targets, .. } => { + for target in &mut targets.targets { + f(target); + } } - InlineAsm { ref mut targets, unwind: _, .. } => targets.iter_mut().chain(None), - SwitchInt { ref mut targets, .. } => targets.targets.iter_mut().chain(None), - FalseEdge { ref mut real_target, ref mut imaginary_target } => { - slice::from_mut(real_target).into_iter().chain(Some(imaginary_target)) + FalseEdge { real_target, imaginary_target } => { + f(real_target); + f(imaginary_target); } } } @@ -671,8 +754,10 @@ impl<'tcx> TerminatorKind<'tcx> { Goto { target } => TerminatorEdges::Single(target), + // FIXME: Maybe we need also TerminatorEdges::Trio for async drop + // (target + unwind + dropline) Assert { target, unwind, expected: _, msg: _, cond: _ } - | Drop { target, unwind, place: _, replace: _ } + | Drop { target, unwind, place: _, replace: _, drop: _, async_fut: _ } | FalseUnwind { real_target: target, unwind } => match unwind { UnwindAction::Cleanup(unwind) => TerminatorEdges::Double(target, unwind), UnwindAction::Continue | UnwindAction::Terminate(_) | UnwindAction::Unreachable => { diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 3c83d962900ae..bdec91e2c57f6 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -353,17 +353,22 @@ macro_rules! make_mir_visitor { coroutine_closure_def_id: _def_id, receiver_by_ref: _, } - | ty::InstanceKind::AsyncDropGlueCtorShim(_def_id, None) - | ty::InstanceKind::DropGlue(_def_id, None) => {} + | ty::InstanceKind::DropGlue(_def_id, None) + | ty::InstanceKind::EiiShim { def_id: _def_id, extern_item: _, chosen_impl: _, weak_linkage: _ } => {} ty::InstanceKind::FnPtrShim(_def_id, ty) | ty::InstanceKind::DropGlue(_def_id, Some(ty)) | ty::InstanceKind::CloneShim(_def_id, ty) | ty::InstanceKind::FnPtrAddrShim(_def_id, ty) - | ty::InstanceKind::AsyncDropGlueCtorShim(_def_id, Some(ty)) => { + | ty::InstanceKind::AsyncDropGlue(_def_id, ty) + | ty::InstanceKind::AsyncDropGlueCtorShim(_def_id, ty) => { // FIXME(eddyb) use a better `TyContext` here. self.visit_ty($(& $mutability)? *ty, TyContext::Location(location)); } + ty::InstanceKind::FutureDropPollShim(_def_id, proxy_ty, impl_ty) => { + self.visit_ty($(& $mutability)? *proxy_ty, TyContext::Location(location)); + self.visit_ty($(& $mutability)? *impl_ty, TyContext::Location(location)); + } } self.visit_args(callee_args, location); } @@ -457,9 +462,15 @@ macro_rules! make_mir_visitor { } } } + StatementKind::BackwardIncompatibleDropHint { place, .. } => { + self.visit_place( + place, + PlaceContext::NonUse(NonUseContext::BackwardIncompatibleDropHint), + location + ); + } StatementKind::ConstEvalCounter => {} StatementKind::Nop => {} - StatementKind::BackwardIncompatibleDropHint { .. } => {} } } @@ -515,7 +526,14 @@ macro_rules! make_mir_visitor { self.visit_operand(discr, location); } - TerminatorKind::Drop { place, target: _, unwind: _, replace: _ } => { + TerminatorKind::Drop { + place, + target: _, + unwind: _, + replace: _, + drop: _, + async_fut: _, + } => { self.visit_place( place, PlaceContext::MutatingUse(MutatingUseContext::Drop), @@ -628,7 +646,7 @@ macro_rules! make_mir_visitor { OverflowNeg(op) | DivisionByZero(op) | RemainderByZero(op) => { self.visit_operand(op, location); } - ResumedAfterReturn(_) | ResumedAfterPanic(_) | NullPointerDereference => { + ResumedAfterReturn(_) | ResumedAfterPanic(_) | NullPointerDereference | ResumedAfterDrop(_) => { // Nothing to visit } MisalignedPointerDereference { required, found } => { @@ -1348,6 +1366,8 @@ pub enum NonUseContext { AscribeUserTy(ty::Variance), /// The data of a user variable, for debug info. VarDebugInfo, + /// A `BackwardIncompatibleDropHint` statement, meant for edition 2024 lints. + BackwardIncompatibleDropHint, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -1422,7 +1442,9 @@ impl PlaceContext { use NonUseContext::*; match self { PlaceContext::MutatingUse(_) => ty::Invariant, - PlaceContext::NonUse(StorageDead | StorageLive | VarDebugInfo) => ty::Invariant, + PlaceContext::NonUse( + StorageDead | StorageLive | VarDebugInfo | BackwardIncompatibleDropHint, + ) => ty::Invariant, PlaceContext::NonMutatingUse( Inspect | Copy | Move | PlaceMention | SharedBorrow | FakeBorrow | RawBorrow | Projection, diff --git a/compiler/rustc_middle/src/query/erase.rs b/compiler/rustc_middle/src/query/erase.rs index 6c6b9a5510c69..5bd111fa2f22d 100644 --- a/compiler/rustc_middle/src/query/erase.rs +++ b/compiler/rustc_middle/src/query/erase.rs @@ -25,7 +25,7 @@ pub trait EraseType: Copy { pub type Erase = Erased; #[inline(always)] -#[cfg_attr(not(bootstrap), define_opaque(Erase))] +#[define_opaque(Erase)] pub fn erase(src: T) -> Erase { // Ensure the sizes match const { @@ -49,7 +49,7 @@ pub fn erase(src: T) -> Erase { /// Restores an erased value. #[inline(always)] -#[cfg_attr(not(bootstrap), define_opaque(Erase))] +#[define_opaque(Erase)] pub fn restore(value: Erase) -> T { let value: Erased<::Result> = value; // See comment in `erase` for why we use `transmute_unchecked`. diff --git a/compiler/rustc_middle/src/query/keys.rs b/compiler/rustc_middle/src/query/keys.rs index c382bcd726ffa..9ed1f10455ad3 100644 --- a/compiler/rustc_middle/src/query/keys.rs +++ b/compiler/rustc_middle/src/query/keys.rs @@ -586,7 +586,7 @@ impl Key for HirId { type Cache = DefaultCache; fn default_span(&self, tcx: TyCtxt<'_>) -> Span { - tcx.hir().span(*self) + tcx.hir_span(*self) } #[inline(always)] @@ -599,7 +599,7 @@ impl Key for (LocalDefId, HirId) { type Cache = DefaultCache; fn default_span(&self, tcx: TyCtxt<'_>) -> Span { - tcx.hir().span(self.1) + tcx.hir_span(self.1) } #[inline(always)] diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index d7ed703f4ae30..53f5114f1fefb 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -14,6 +14,7 @@ use std::sync::Arc; use rustc_arena::TypedArena; use rustc_ast::expand::StrippedCfgItem; use rustc_ast::expand::allocator::AllocatorKind; +use rustc_attr_data_structures::{EIIDecl, EIIImpl}; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::sorted_map::SortedMap; @@ -51,6 +52,7 @@ use crate::lint::LintExpectation; use crate::metadata::ModChild; use crate::middle::codegen_fn_attrs::CodegenFnAttrs; use crate::middle::debugger_visualizer::DebuggerVisualizerFile; +use crate::middle::eii::EiiMap; use crate::middle::exported_symbols::{ExportedSymbol, SymbolExportInfo}; use crate::middle::lib_features::LibFeatures; use crate::middle::privacy::EffectiveVisibilities; @@ -161,11 +163,11 @@ rustc_queries! { /// Represents crate as a whole (as distinct from the top-level crate module). /// - /// If you call `hir_crate` (e.g., indirectly by calling `tcx.hir_crate()`), - /// we will have to assume that any change means that you need to be recompiled. - /// This is because the `hir_crate` query gives you access to all other items. - /// To avoid this fate, do not call `tcx.hir_crate()`; instead, - /// prefer wrappers like [`TyCtxt::hir_visit_all_item_likes_in_crate`]. + /// If you call `tcx.hir_crate(())` we will have to assume that any change + /// means that you need to be recompiled. This is because the `hir_crate` + /// query gives you access to all other items. To avoid this fate, do not + /// call `tcx.hir_crate(())`; instead, prefer wrappers like + /// [`TyCtxt::hir_visit_all_item_likes_in_crate`]. query hir_crate(key: ()) -> &'tcx Crate<'tcx> { arena_cache eval_always @@ -197,7 +199,7 @@ rustc_queries! { /// Gives access to the HIR node's parent for the HIR owner `key`. /// - /// This can be conveniently accessed by methods on `tcx.hir()`. + /// This can be conveniently accessed by `tcx.hir_*` methods. /// Avoid calling this query directly. query hir_owner_parent(key: hir::OwnerId) -> hir::HirId { desc { |tcx| "getting HIR parent of `{}`", tcx.def_path_str(key) } @@ -205,7 +207,7 @@ rustc_queries! { /// Gives access to the HIR nodes and bodies inside `key` if it's a HIR owner. /// - /// This can be conveniently accessed by methods on `tcx.hir()`. + /// This can be conveniently accessed by `tcx.hir_*` methods. /// Avoid calling this query directly. query opt_hir_owner_nodes(key: LocalDefId) -> Option<&'tcx hir::OwnerNodes<'tcx>> { desc { |tcx| "getting HIR owner items in `{}`", tcx.def_path_str(key) } @@ -214,7 +216,7 @@ rustc_queries! { /// Gives access to the HIR attributes inside the HIR owner `key`. /// - /// This can be conveniently accessed by methods on `tcx.hir()`. + /// This can be conveniently accessed by `tcx.hir_*` methods. /// Avoid calling this query directly. query hir_attr_map(key: hir::OwnerId) -> &'tcx hir::AttributeMap<'tcx> { desc { |tcx| "getting HIR owner attributes in `{}`", tcx.def_path_str(key) } @@ -267,6 +269,8 @@ rustc_queries! { /// /// This is a specialized instance of [`Self::type_of`] that detects query cycles. /// Unless `CyclePlaceholder` needs to be handled separately, call [`Self::type_of`] instead. + /// This is used to improve the error message in cases where revealing the hidden type + /// for auto-trait leakage cycles. /// /// # Panics /// @@ -278,10 +282,16 @@ rustc_queries! { } cycle_stash } + query type_of_opaque_hir_typeck(key: LocalDefId) -> ty::EarlyBinder<'tcx, Ty<'tcx>> { + desc { |tcx| + "computing type of opaque `{path}` via HIR typeck", + path = tcx.def_path_str(key), + } + } /// Returns whether the type alias given by `DefId` is lazy. /// - /// I.e., if the type alias expands / ought to expand to a [weak] [alias type] + /// I.e., if the type alias expands / ought to expand to a [free] [alias type] /// instead of the underyling aliased type. /// /// Relevant for features `lazy_type_alias` and `type_alias_impl_trait`. @@ -290,7 +300,7 @@ rustc_queries! { /// /// This query *may* panic if the given definition is not a type alias. /// - /// [weak]: rustc_middle::ty::Weak + /// [free]: rustc_middle::ty::Free /// [alias type]: rustc_middle::ty::AliasTy query type_alias_is_lazy(key: DefId) -> bool { desc { |tcx| @@ -379,6 +389,15 @@ rustc_queries! { } } + query nested_bodies_within( + key: LocalDefId + ) -> &'tcx ty::List { + desc { + |tcx| "computing the coroutines defined within `{}`", + tcx.def_path_str(key.to_def_id()) + } + } + /// Returns the explicitly user-written *bounds* on the associated or opaque type given by `DefId` /// that must be proven true at definition site (and which can be assumed at usage sites). /// @@ -476,7 +495,7 @@ rustc_queries! { desc { "computing `#[expect]`ed lints in this crate" } } - query lints_that_dont_need_to_run(_: ()) -> &'tcx FxIndexSet { + query lints_that_dont_need_to_run(_: ()) -> &'tcx UnordSet { arena_cache desc { "Computing all lints that are explicitly enabled or with a default level greater than Allow" } } @@ -612,6 +631,7 @@ rustc_queries! { query check_coroutine_obligations(key: LocalDefId) -> Result<(), ErrorGuaranteed> { desc { |tcx| "verify auto trait bounds for coroutine interior type `{}`", tcx.def_path_str(key) } + return_result_from_ensure_ok } /// MIR after our optimization passes have run. This is MIR that is ready @@ -1017,6 +1037,13 @@ rustc_queries! { separate_provide_extern } + /// Given an `impl_def_id`, return true if the self type is guaranteed to be unsized due + /// to either being one of the built-in unsized types (str/slice/dyn) or to be a struct + /// whose tail is one of those types. + query impl_self_is_guaranteed_unsized(impl_def_id: DefId) -> bool { + desc { |tcx| "computing whether `{}` has a guaranteed unsized self type", tcx.def_path_str(impl_def_id) } + } + /// Maps a `DefId` of a type to a list of its inherent impls. /// Contains implementations of methods that are inherent to a type. /// Methods in these implementations don't need to be exported. @@ -1033,13 +1060,12 @@ rustc_queries! { /// Unsafety-check this `LocalDefId`. query check_unsafety(key: LocalDefId) { desc { |tcx| "unsafety-checking `{}`", tcx.def_path_str(key) } - cache_on_disk_if { true } } /// Checks well-formedness of tail calls (`become f()`). query check_tail_calls(key: LocalDefId) -> Result<(), rustc_errors::ErrorGuaranteed> { desc { |tcx| "tail-call-checking `{}`", tcx.def_path_str(key) } - cache_on_disk_if { true } + return_result_from_ensure_ok } /// Returns the types assumed to be well formed while "inside" of the given item. @@ -1089,6 +1115,10 @@ rustc_queries! { desc { |tcx| "checking loops in {}", describe_as_module(key, tcx) } } + query get_externally_implementable_item_impls(_: ()) -> &'tcx EiiMap { + desc { "check externally implementable items" } + } + query check_mod_naked_functions(key: LocalModDefId) { desc { |tcx| "checking naked functions in {}", describe_as_module(key, tcx) } } @@ -1145,11 +1175,10 @@ rustc_queries! { return_result_from_ensure_ok } - /// Borrow-checks the function body. If this is a closure, returns - /// additional requirements that the closure's creator must verify. - query mir_borrowck(key: LocalDefId) -> &'tcx mir::BorrowCheckResult<'tcx> { + /// Borrow-checks the given typeck root, e.g. functions, const/static items, + /// and its children, e.g. closures, inline consts. + query mir_borrowck(key: LocalDefId) -> Result<&'tcx mir::ConcreteOpaqueTypes<'tcx>, ErrorGuaranteed> { desc { |tcx| "borrow-checking `{}`", tcx.def_path_str(key) } - cache_on_disk_if(tcx) { tcx.is_typeck_child(key.to_def_id()) } } /// Gets a complete map from all types to their inherent impls. @@ -1308,7 +1337,7 @@ rustc_queries! { query check_match(key: LocalDefId) -> Result<(), rustc_errors::ErrorGuaranteed> { desc { |tcx| "match-checking `{}`", tcx.def_path_str(key) } - cache_on_disk_if { true } + return_result_from_ensure_ok } /// Performs part of the privacy check and computes effective visibilities. @@ -1336,7 +1365,11 @@ rustc_queries! { /// Generates a MIR body for the shim. query mir_shims(key: ty::InstanceKind<'tcx>) -> &'tcx mir::Body<'tcx> { arena_cache - desc { |tcx| "generating MIR shim for `{}`", tcx.def_path_str(key.def_id()) } + desc { + |tcx| "generating MIR shim for `{}`, instance={:?}", + tcx.def_path_str(key.def_id()), + key + } } /// The `symbol_name` query provides the symbol name for calling a @@ -1428,8 +1461,8 @@ rustc_queries! { desc { |tcx| "computing target features for inline asm of `{}`", tcx.def_path_str(def_id) } } - query fn_arg_names(def_id: DefId) -> &'tcx [Option] { - desc { |tcx| "looking up function parameter names for `{}`", tcx.def_path_str(def_id) } + query fn_arg_idents(def_id: DefId) -> &'tcx [Option] { + desc { |tcx| "looking up function parameter identifiers for `{}`", tcx.def_path_str(def_id) } separate_provide_extern } @@ -1502,6 +1535,11 @@ rustc_queries! { desc { "finding local trait impls" } } + /// Return all `impl` blocks of the given trait in the current crate. + query local_trait_impls(trait_id: DefId) -> &'tcx [LocalDefId] { + desc { "finding local trait impls of `{}`", tcx.def_path_str(trait_id) } + } + /// Given a trait `trait_id`, return all known `impl` blocks. query trait_impls_of(trait_id: DefId) -> &'tcx ty::trait_def::TraitImpls { arena_cache @@ -1562,6 +1600,10 @@ rustc_queries! { query is_unpin_raw(env: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>) -> bool { desc { "computing whether `{}` is `Unpin`", env.value } } + /// Query backing `Ty::is_async_drop`. + query is_async_drop_raw(env: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>) -> bool { + desc { "computing whether `{}` is `AsyncDrop`", env.value } + } /// Query backing `Ty::needs_drop`. query needs_drop_raw(env: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>) -> bool { desc { "computing whether `{}` needs drop", env.value } @@ -1594,6 +1636,14 @@ rustc_queries! { cache_on_disk_if { true } } + /// A list of types where the ADT requires async drop if and only if any of + /// those types require async drop. If the ADT is known to always need async drop + /// then `Err(AlwaysRequiresDrop)` is returned. + query adt_async_drop_tys(def_id: DefId) -> Result<&'tcx ty::List>, AlwaysRequiresDrop> { + desc { |tcx| "computing when `{}` needs async drop", tcx.def_path_str(def_id) } + cache_on_disk_if { true } + } + /// A list of types where the ADT requires drop if and only if any of those types /// has significant drop. A type marked with the attribute `rustc_insignificant_dtor` /// is considered to not be significant. A drop is significant if it is implemented @@ -1602,7 +1652,6 @@ rustc_queries! { /// `Err(AlwaysRequiresDrop)` is returned. query adt_significant_drop_tys(def_id: DefId) -> Result<&'tcx ty::List>, AlwaysRequiresDrop> { desc { |tcx| "computing when `{}` has a significant destructor", tcx.def_path_str(def_id) } - cache_on_disk_if { false } } /// Returns a list of types which (a) have a potentially significant destructor @@ -1624,7 +1673,6 @@ rustc_queries! { /// Otherwise, there is a risk of query cycles. query list_significant_drop_tys(ty: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>) -> &'tcx ty::List> { desc { |tcx| "computing when `{}` has a significant destructor", ty.value } - cache_on_disk_if { false } } /// Computes the layout of a type. Note that this implicitly @@ -1689,11 +1737,6 @@ rustc_queries! { desc { "checking if the crate has_alloc_error_handler" } separate_provide_extern } - query has_panic_handler(_: CrateNum) -> bool { - fatal_cycle - desc { "checking if the crate has_panic_handler" } - separate_provide_extern - } query is_profiler_runtime(_: CrateNum) -> bool { fatal_cycle desc { "checking if a crate is `#![profiler_runtime]`" } @@ -1865,6 +1908,13 @@ rustc_queries! { separate_provide_extern } + /// Returns a list of all `externally implementable items` crate. + query externally_implementable_items(_: CrateNum) -> &'tcx FxIndexMap)> { + arena_cache + desc { "looking up the externally implementable items of a crate" } + separate_provide_extern + } + /// Lint against `extern fn` declarations having incompatible types. query clashing_extern_declarations(_: ()) { desc { "checking `extern fn` declarations are compatible" } @@ -1883,6 +1933,11 @@ rustc_queries! { // The macro which defines `rustc_metadata::provide_extern` depends on this query's name. // Changing the name should cause a compiler error, but in case that changes, be aware. + // + // The hash should not be calculated before the `analysis` pass is complete, specifically + // until `tcx.untracked().definitions.freeze()` has been called, otherwise if incremental + // compilation is enabled calculating this hash can freeze this structure too early in + // compilation and cause subsequent crashes when attempting to write to `definitions` query crate_hash(_: CrateNum) -> Svh { eval_always desc { "looking up the hash a crate" } @@ -2158,7 +2213,7 @@ rustc_queries! { query maybe_unused_trait_imports(_: ()) -> &'tcx FxIndexSet { desc { "fetching potentially unused trait imports" } } - query names_imported_by_glob_use(def_id: LocalDefId) -> &'tcx UnordSet { + query names_imported_by_glob_use(def_id: LocalDefId) -> &'tcx FxIndexSet { desc { |tcx| "finding names imported by glob use for `{}`", tcx.def_path_str(def_id) } } @@ -2192,6 +2247,16 @@ rustc_queries! { separate_provide_extern } + query stable_order_of_exportable_impls(_: CrateNum) -> &'tcx FxIndexMap { + desc { "fetching the stable impl's order" } + separate_provide_extern + } + + query exportable_items(_: CrateNum) -> &'tcx [DefId] { + desc { "fetching all exportable items in a crate" } + separate_provide_extern + } + /// The list of symbols exported from the given crate. /// /// - All names contained in `exported_symbols(cnum)` are guaranteed to @@ -2249,7 +2314,7 @@ rustc_queries! { /// Do not call this query directly: Invoke `normalize` instead. /// /// - query normalize_canonicalized_weak_ty( + query normalize_canonicalized_free_alias( goal: CanonicalAliasGoal<'tcx> ) -> Result< &'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, NormalizationResult<'tcx>>>, @@ -2424,7 +2489,7 @@ rustc_queries! { query resolve_instance_raw( key: ty::PseudoCanonicalInput<'tcx, (DefId, GenericArgsRef<'tcx>)> ) -> Result>, ErrorGuaranteed> { - desc { "resolving instance `{}`", ty::Instance::new(key.value.0, key.value.1) } + desc { "resolving instance `{}`", ty::Instance::new_raw(key.value.0, key.value.1) } } query reveal_opaque_types_in_bounds(key: ty::Clauses<'tcx>) -> ty::Clauses<'tcx> { @@ -2512,7 +2577,6 @@ rustc_queries! { /// monomorphized. query check_mono_item(key: ty::Instance<'tcx>) { desc { "monomorphization-time checking" } - cache_on_disk_if { true } } /// Builds the set of functions that should be skipped for the move-size check. @@ -2532,5 +2596,5 @@ rustc_queries! { } } -rustc_query_append! { define_callbacks! } +rustc_with_all_queries! { define_callbacks! } rustc_feedable_queries! { define_feedable! } diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index 14e3ce8bef6b2..e1876f8f0f9b6 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -2,7 +2,7 @@ use std::collections::hash_map::Entry; use std::mem; use std::sync::Arc; -use rustc_data_structures::fx::{FxHashMap, FxIndexSet}; +use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet}; use rustc_data_structures::memmap::Mmap; use rustc_data_structures::sync::{HashMapExt, Lock, RwLock}; use rustc_data_structures::unhash::UnhashMap; @@ -16,7 +16,7 @@ use rustc_serialize::opaque::{FileEncodeResult, FileEncoder, IntEncodedWithFixed use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_session::Session; use rustc_span::hygiene::{ - ExpnId, HygieneDecodeContext, HygieneEncodeContext, SyntaxContext, SyntaxContextData, + ExpnId, HygieneDecodeContext, HygieneEncodeContext, SyntaxContext, SyntaxContextKey, }; use rustc_span::source_map::Spanned; use rustc_span::{ @@ -45,7 +45,7 @@ const TAG_EXPN_DATA: u8 = 1; // Tags for encoding Symbol's const SYMBOL_STR: u8 = 0; const SYMBOL_OFFSET: u8 = 1; -const SYMBOL_PREINTERNED: u8 = 2; +const SYMBOL_PREDEFINED: u8 = 2; /// Provides an interface to incremental compilation data cached from the /// previous compilation session. This data will eventually include the results @@ -57,7 +57,7 @@ pub struct OnDiskCache { // Collects all `QuerySideEffect` created during the current compilation // session. - current_side_effects: Lock>, + current_side_effects: Lock>, file_index_to_stable_id: FxHashMap, @@ -566,7 +566,7 @@ impl<'a, 'tcx> SpanDecoder for CacheDecoder<'a, 'tcx> { // We look up the position of the associated `SyntaxData` and decode it. let pos = syntax_contexts.get(&id).unwrap(); this.with_position(pos.to_usize(), |decoder| { - let data: SyntaxContextData = decode_tagged(decoder, TAG_SYNTAX_CONTEXT); + let data: SyntaxContextKey = decode_tagged(decoder, TAG_SYNTAX_CONTEXT); data }) }) @@ -673,9 +673,9 @@ impl<'a, 'tcx> SpanDecoder for CacheDecoder<'a, 'tcx> { Symbol::intern(s) }) } - SYMBOL_PREINTERNED => { + SYMBOL_PREDEFINED => { let symbol_index = self.read_u32(); - Symbol::new_from_decoded(symbol_index) + Symbol::new(symbol_index) } _ => unreachable!(), } @@ -891,9 +891,9 @@ impl<'a, 'tcx> SpanEncoder for CacheEncoder<'a, 'tcx> { // copy&paste impl from rustc_metadata fn encode_symbol(&mut self, symbol: Symbol) { - // if symbol preinterned, emit tag and symbol index - if symbol.is_preinterned() { - self.encoder.emit_u8(SYMBOL_PREINTERNED); + // if symbol predefined, emit tag and symbol index + if symbol.is_predefined() { + self.encoder.emit_u8(SYMBOL_PREDEFINED); self.encoder.emit_u32(symbol.as_u32()); } else { // otherwise write it as string or as offset to it diff --git a/compiler/rustc_middle/src/query/plumbing.rs b/compiler/rustc_middle/src/query/plumbing.rs index a099f77041709..769df1ffd6f91 100644 --- a/compiler/rustc_middle/src/query/plumbing.rs +++ b/compiler/rustc_middle/src/query/plumbing.rs @@ -313,8 +313,11 @@ macro_rules! separate_provide_extern_default { macro_rules! define_callbacks { ( - $($(#[$attr:meta])* - [$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty,)*) => { + $( + $(#[$attr:meta])* + [$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty, + )* + ) => { #[allow(unused_lifetimes)] pub mod queries { @@ -366,7 +369,7 @@ macro_rules! define_callbacks { pub type Storage<'tcx> = <$($K)* as keys::Key>::Cache>; - // Ensure that keys grow no larger than 80 bytes by accident. + // Ensure that keys grow no larger than 88 bytes by accident. // Increase this limit if necessary, but do try to keep the size low if possible #[cfg(target_pointer_width = "64")] const _: () = { diff --git a/compiler/rustc_middle/src/thir.rs b/compiler/rustc_middle/src/thir.rs index 6783bbf8bf42f..b9a014d14c0d0 100644 --- a/compiler/rustc_middle/src/thir.rs +++ b/compiler/rustc_middle/src/thir.rs @@ -27,7 +27,7 @@ use tracing::instrument; use crate::middle::region; use crate::mir::interpret::AllocId; -use crate::mir::{self, BinOp, BorrowKind, FakeReadCause, UnOp}; +use crate::mir::{self, AssignOp, BinOp, BorrowKind, FakeReadCause, UnOp}; use crate::thir::visit::for_each_immediate_subpat; use crate::ty::adjustment::PointerCoercion; use crate::ty::layout::IntegerExt; @@ -292,7 +292,10 @@ pub enum ExprKind<'tcx> { If { if_then_scope: region::Scope, cond: ExprId, + /// `then` is always `ExprKind::Block`. then: ExprId, + /// If present, the `else_opt` expr is always `ExprKind::Block` (for + /// `else`) or `ExprKind::If` (for `else if`). else_opt: Option, }, /// A function call. Method calls and overloaded operators are converted to plain function calls. @@ -403,7 +406,7 @@ pub enum ExprKind<'tcx> { }, /// A *non-overloaded* operation assignment, e.g. `lhs += rhs`. AssignOp { - op: BinOp, + op: AssignOp, lhs: ExprId, rhs: ExprId, }, @@ -747,6 +750,9 @@ pub struct Ascription<'tcx> { #[derive(Clone, Debug, HashStable, TypeVisitable)] pub enum PatKind<'tcx> { + /// A missing pattern, e.g. for an anonymous param in a bare fn like `fn f(u32)`. + Missing, + /// A wildcard pattern: `_`. Wild, @@ -796,7 +802,12 @@ pub enum PatKind<'tcx> { /// Deref pattern, written `box P` for now. DerefPattern { subpattern: Box>, - mutability: hir::Mutability, + /// Whether the pattern scrutinee needs to be borrowed in order to call `Deref::deref` or + /// `DerefMut::deref_mut`, and if so, which. This is `ByRef::No` for deref patterns on + /// boxes; they are lowered using a built-in deref rather than a method call, thus they + /// don't borrow the scrutinee. + #[type_visitable(ignore)] + borrow: ByRef, }, /// One of the following: @@ -812,23 +823,17 @@ pub enum PatKind<'tcx> { }, /// Pattern obtained by converting a constant (inline or named) to its pattern - /// representation using `const_to_pat`. + /// representation using `const_to_pat`. This is used for unsafety checking. ExpandedConstant { - /// [DefId] of the constant, we need this so that we have a - /// reference that can be used by unsafety checking to visit nested - /// unevaluated constants and for diagnostics. If the `DefId` doesn't - /// correspond to a local crate, it points at the `const` item. + /// [DefId] of the constant item. def_id: DefId, - /// If `false`, then `def_id` points at a `const` item, otherwise it - /// corresponds to a local inline const. - is_inline: bool, - /// If the inline constant is used in a range pattern, this subpattern - /// represents the range (if both ends are inline constants, there will - /// be multiple InlineConstant wrappers). + /// The pattern that the constant lowered to. /// - /// Otherwise, the actual pattern that the constant lowered to. As with - /// other constants, inline constants are matched structurally where - /// possible. + /// HACK: we need to keep the `DefId` of inline constants around for unsafety checking; + /// therefore when a range pattern contains inline constants, we re-wrap the range pattern + /// with the `ExpandedConstant` nodes that correspond to the range endpoints. Hence + /// `subpattern` may actually be a range pattern, and `def_id` be the constant for one of + /// its endpoints. subpattern: Box>, }, diff --git a/compiler/rustc_middle/src/thir/visit.rs b/compiler/rustc_middle/src/thir/visit.rs index 7d62ab7970d01..f3da2a5cc8e4e 100644 --- a/compiler/rustc_middle/src/thir/visit.rs +++ b/compiler/rustc_middle/src/thir/visit.rs @@ -250,7 +250,8 @@ pub(crate) fn for_each_immediate_subpat<'a, 'tcx>( mut callback: impl FnMut(&'a Pat<'tcx>), ) { match &pat.kind { - PatKind::Wild + PatKind::Missing + | PatKind::Wild | PatKind::Binding { subpattern: None, .. } | PatKind::Constant { value: _ } | PatKind::Range(_) diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs index 7e6151745e2f8..aa33964ef0706 100644 --- a/compiler/rustc_middle/src/traits/mod.rs +++ b/compiler/rustc_middle/src/traits/mod.rs @@ -309,6 +309,13 @@ pub enum ObligationCauseCode<'tcx> { kind: ty::AssocKind, }, + /// Error derived when checking an impl item is compatible with + /// its corresponding trait item's definition + CompareEII { + external_impl: LocalDefId, + declaration: DefId, + }, + /// Checking that the bounds of a trait's associated type hold for a given impl CheckAssociatedTypeBounds { impl_item_def_id: LocalDefId, @@ -404,7 +411,7 @@ pub enum ObligationCauseCode<'tcx> { /// Requirement for a `const N: Ty` to implement `Ty: ConstParamTy` ConstParam(Ty<'tcx>), - /// Obligations emitted during the normalization of a weak type alias. + /// Obligations emitted during the normalization of a free type alias. TypeAlias(ObligationCauseCodeHandle<'tcx>, Span, DefId), } diff --git a/compiler/rustc_middle/src/traits/query.rs b/compiler/rustc_middle/src/traits/query.rs index 76f3d2bab9cf4..3f6faa1a572d9 100644 --- a/compiler/rustc_middle/src/traits/query.rs +++ b/compiler/rustc_middle/src/traits/query.rs @@ -181,7 +181,7 @@ pub struct MethodAutoderefBadTy<'tcx> { pub ty: Canonical<'tcx, QueryResponse<'tcx, Ty<'tcx>>>, } -/// Result of the `normalize_canonicalized_{{,inherent_}projection,weak}_ty` queries. +/// Result of the `normalize_canonicalized_{{,inherent_}projection,free}_ty` queries. #[derive(Clone, Debug, HashStable, TypeFoldable, TypeVisitable)] pub struct NormalizationResult<'tcx> { /// Result of the normalization. diff --git a/compiler/rustc_middle/src/traits/select.rs b/compiler/rustc_middle/src/traits/select.rs index 811bd8fb4588a..aa2ee756bc502 100644 --- a/compiler/rustc_middle/src/traits/select.rs +++ b/compiler/rustc_middle/src/traits/select.rs @@ -95,10 +95,16 @@ pub type EvaluationCache<'tcx, ENV> = Cache<(ENV, ty::PolyTraitPredicate<'tcx>), /// parameter environment. #[derive(PartialEq, Eq, Debug, Clone, TypeVisitable)] pub enum SelectionCandidate<'tcx> { + /// A built-in implementation for the `Sized` trait. This is preferred + /// over all other candidates. + SizedCandidate { + has_nested: bool, + }, + /// A builtin implementation for some specific traits, used in cases /// where we cannot rely an ordinary library implementations. /// - /// The most notable examples are `sized`, `Copy` and `Clone`. This is also + /// The most notable examples are `Copy` and `Clone`. This is also /// used for the `DiscriminantKind` and `Pointee` trait, both of which have /// an associated type. BuiltinCandidate { diff --git a/compiler/rustc_middle/src/ty/adjustment.rs b/compiler/rustc_middle/src/ty/adjustment.rs index f8ab555305f05..a61a6c571a2cd 100644 --- a/compiler/rustc_middle/src/ty/adjustment.rs +++ b/compiler/rustc_middle/src/ty/adjustment.rs @@ -5,7 +5,7 @@ use rustc_hir::lang_items::LangItem; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; use rustc_span::Span; -use crate::ty::{self, Ty, TyCtxt}; +use crate::ty::{Ty, TyCtxt}; #[derive(Clone, Copy, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)] pub enum PointerCoercion { @@ -133,7 +133,7 @@ impl OverloadedDeref { }; tcx.associated_items(trait_def_id) .in_definition_order() - .find(|m| m.kind == ty::AssocKind::Fn) + .find(|item| item.is_fn()) .unwrap() .def_id } @@ -214,3 +214,25 @@ pub enum CustomCoerceUnsized { /// Records the index of the field being coerced. Struct(FieldIdx), } + +/// Represents an implicit coercion applied to the scrutinee of a match before testing a pattern +/// against it. Currently, this is used only for implicit dereferences. +#[derive(Clone, Copy, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)] +pub struct PatAdjustment<'tcx> { + pub kind: PatAdjust, + /// The type of the scrutinee before the adjustment is applied, or the "adjusted type" of the + /// pattern. + pub source: Ty<'tcx>, +} + +/// Represents implicit coercions of patterns' types, rather than values' types. +#[derive(Clone, Copy, PartialEq, Debug, TyEncodable, TyDecodable, HashStable)] +#[derive(TypeFoldable, TypeVisitable)] +pub enum PatAdjust { + /// An implicit dereference before matching, such as when matching the pattern `0` against a + /// scrutinee of type `&u8` or `&mut u8`. + BuiltinDeref, + /// An implicit call to `Deref(Mut)::deref(_mut)` before matching, such as when matching the + /// pattern `[..]` against a scrutinee of type `Vec`. + OverloadedDeref, +} diff --git a/compiler/rustc_middle/src/ty/adt.rs b/compiler/rustc_middle/src/ty/adt.rs index cb245c0aec40a..d92b4f9c06beb 100644 --- a/compiler/rustc_middle/src/ty/adt.rs +++ b/compiler/rustc_middle/src/ty/adt.rs @@ -53,6 +53,8 @@ bitflags::bitflags! { const IS_VARIANT_LIST_NON_EXHAUSTIVE = 1 << 8; /// Indicates whether the type is `UnsafeCell`. const IS_UNSAFE_CELL = 1 << 9; + /// Indicates whether the type is `UnsafePinned`. + const IS_UNSAFE_PINNED = 1 << 10; } } rustc_data_structures::external_bitflags_debug! { AdtFlags } @@ -236,7 +238,7 @@ impl<'tcx> rustc_type_ir::inherent::AdtDef> for AdtDef<'tcx> { } fn destructor(self, tcx: TyCtxt<'tcx>) -> Option { - Some(match self.destructor(tcx)?.constness { + Some(match tcx.constness(self.destructor(tcx)?.did) { hir::Constness::Const => AdtDestructorKind::Const, hir::Constness::NotConst => AdtDestructorKind::NotConst, }) @@ -302,6 +304,9 @@ impl AdtDefData { if tcx.is_lang_item(did, LangItem::UnsafeCell) { flags |= AdtFlags::IS_UNSAFE_CELL; } + if tcx.is_lang_item(did, LangItem::UnsafePinned) { + flags |= AdtFlags::IS_UNSAFE_PINNED; + } AdtDefData { did, variants, flags, repr } } @@ -405,6 +410,12 @@ impl<'tcx> AdtDef<'tcx> { self.flags().contains(AdtFlags::IS_UNSAFE_CELL) } + /// Returns `true` if this is `UnsafePinned`. + #[inline] + pub fn is_unsafe_pinned(self) -> bool { + self.flags().contains(AdtFlags::IS_UNSAFE_PINNED) + } + /// Returns `true` if this is `ManuallyDrop`. #[inline] pub fn is_manually_drop(self) -> bool { diff --git a/compiler/rustc_middle/src/ty/assoc.rs b/compiler/rustc_middle/src/ty/assoc.rs index ce4c08aa485e5..78b2e265b488c 100644 --- a/compiler/rustc_middle/src/ty/assoc.rs +++ b/compiler/rustc_middle/src/ty/assoc.rs @@ -18,27 +18,33 @@ pub enum AssocItemContainer { #[derive(Copy, Clone, Debug, PartialEq, HashStable, Eq, Hash, Encodable, Decodable)] pub struct AssocItem { pub def_id: DefId, - pub name: Symbol, pub kind: AssocKind, pub container: AssocItemContainer, /// If this is an item in an impl of a trait then this is the `DefId` of /// the associated item on the trait that this implements. pub trait_item_def_id: Option, - - /// Whether this is a method with an explicit self - /// as its first parameter, allowing method calls. - pub fn_has_self_parameter: bool, - - /// `Some` if the associated item (an associated type) comes from the - /// return-position `impl Trait` in trait desugaring. The `ImplTraitInTraitData` - /// provides additional information about its source. - pub opt_rpitit_info: Option, } impl AssocItem { + // Gets the identifier, if it has one. + pub fn opt_name(&self) -> Option { + match self.kind { + ty::AssocKind::Type { data: AssocTypeData::Normal(name) } => Some(name), + ty::AssocKind::Type { data: AssocTypeData::Rpitit(_) } => None, + ty::AssocKind::Const { name } => Some(name), + ty::AssocKind::Fn { name, .. } => Some(name), + } + } + + // Gets the identifier name. Aborts if it lacks one, i.e. is an RPITIT + // associated type. + pub fn name(&self) -> Symbol { + self.opt_name().expect("name of non-Rpitit assoc item") + } + pub fn ident(&self, tcx: TyCtxt<'_>) -> Ident { - Ident::new(self.name, tcx.def_ident_span(self.def_id).unwrap()) + Ident::new(self.name(), tcx.def_ident_span(self.def_id).unwrap()) } /// Gets the defaultness of the associated item. @@ -78,35 +84,65 @@ impl AssocItem { pub fn signature(&self, tcx: TyCtxt<'_>) -> String { match self.kind { - ty::AssocKind::Fn => { + ty::AssocKind::Fn { .. } => { // We skip the binder here because the binder would deanonymize all // late-bound regions, and we don't want method signatures to show up // `as for<'r> fn(&'r MyType)`. Pretty-printing handles late-bound // regions just fine, showing `fn(&MyType)`. tcx.fn_sig(self.def_id).instantiate_identity().skip_binder().to_string() } - ty::AssocKind::Type => format!("type {};", self.name), - ty::AssocKind::Const => { - format!( - "const {}: {:?};", - self.name, - tcx.type_of(self.def_id).instantiate_identity() - ) + ty::AssocKind::Type { .. } => format!("type {};", self.name()), + ty::AssocKind::Const { name } => { + format!("const {}: {:?};", name, tcx.type_of(self.def_id).instantiate_identity()) } } } pub fn descr(&self) -> &'static str { match self.kind { - ty::AssocKind::Const => "const", - ty::AssocKind::Fn if self.fn_has_self_parameter => "method", - ty::AssocKind::Fn => "associated function", - ty::AssocKind::Type => "type", + ty::AssocKind::Const { .. } => "associated const", + ty::AssocKind::Fn { has_self: true, .. } => "method", + ty::AssocKind::Fn { has_self: false, .. } => "associated function", + ty::AssocKind::Type { .. } => "associated type", + } + } + + pub fn namespace(&self) -> Namespace { + match self.kind { + ty::AssocKind::Type { .. } => Namespace::TypeNS, + ty::AssocKind::Const { .. } | ty::AssocKind::Fn { .. } => Namespace::ValueNS, + } + } + + pub fn as_def_kind(&self) -> DefKind { + match self.kind { + AssocKind::Const { .. } => DefKind::AssocConst, + AssocKind::Fn { .. } => DefKind::AssocFn, + AssocKind::Type { .. } => DefKind::AssocTy, + } + } + pub fn is_type(&self) -> bool { + matches!(self.kind, ty::AssocKind::Type { .. }) + } + + pub fn is_fn(&self) -> bool { + matches!(self.kind, ty::AssocKind::Fn { .. }) + } + + pub fn is_method(&self) -> bool { + matches!(self.kind, ty::AssocKind::Fn { has_self: true, .. }) + } + + pub fn as_tag(&self) -> AssocTag { + match self.kind { + AssocKind::Const { .. } => AssocTag::Const, + AssocKind::Fn { .. } => AssocTag::Fn, + AssocKind::Type { .. } => AssocTag::Type, } } pub fn is_impl_trait_in_trait(&self) -> bool { - self.opt_rpitit_info.is_some() + matches!(self.kind, AssocKind::Type { data: AssocTypeData::Rpitit(_) }) } /// Returns true if: @@ -114,7 +150,7 @@ impl AssocItem { /// - If it is in a trait impl, the item from the original trait has this attribute, or /// - It is an inherent assoc const. pub fn is_type_const_capable(&self, tcx: TyCtxt<'_>) -> bool { - if self.kind != ty::AssocKind::Const { + if !matches!(self.kind, ty::AssocKind::Const { .. }) { return false; } @@ -128,26 +164,35 @@ impl AssocItem { } } +#[derive(Copy, Clone, PartialEq, Debug, HashStable, Eq, Hash, Encodable, Decodable)] +pub enum AssocTypeData { + Normal(Symbol), + /// The associated type comes from an RPITIT. It has no name, and the + /// `ImplTraitInTraitData` provides additional information about its + /// source. + Rpitit(ty::ImplTraitInTraitData), +} + #[derive(Copy, Clone, PartialEq, Debug, HashStable, Eq, Hash, Encodable, Decodable)] pub enum AssocKind { - Const, - Fn, - Type, + Const { name: Symbol }, + Fn { name: Symbol, has_self: bool }, + Type { data: AssocTypeData }, } impl AssocKind { pub fn namespace(&self) -> Namespace { match *self { - ty::AssocKind::Type => Namespace::TypeNS, - ty::AssocKind::Const | ty::AssocKind::Fn => Namespace::ValueNS, + ty::AssocKind::Type { .. } => Namespace::TypeNS, + ty::AssocKind::Const { .. } | ty::AssocKind::Fn { .. } => Namespace::ValueNS, } } pub fn as_def_kind(&self) -> DefKind { match self { - AssocKind::Const => DefKind::AssocConst, - AssocKind::Fn => DefKind::AssocFn, - AssocKind::Type => DefKind::AssocTy, + AssocKind::Const { .. } => DefKind::AssocConst, + AssocKind::Fn { .. } => DefKind::AssocFn, + AssocKind::Type { .. } => DefKind::AssocTy, } } } @@ -155,13 +200,22 @@ impl AssocKind { impl std::fmt::Display for AssocKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - AssocKind::Fn => write!(f, "method"), - AssocKind::Const => write!(f, "associated const"), - AssocKind::Type => write!(f, "associated type"), + AssocKind::Fn { has_self: true, .. } => write!(f, "method"), + AssocKind::Fn { has_self: false, .. } => write!(f, "associated function"), + AssocKind::Const { .. } => write!(f, "associated const"), + AssocKind::Type { .. } => write!(f, "associated type"), } } } +// Like `AssocKind`, but just the tag, no fields. Used in various kinds of matching. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum AssocTag { + Const, + Fn, + Type, +} + /// A list of `ty::AssocItem`s in definition order that allows for efficient lookup by name. /// /// When doing lookup by name, we try to postpone hygienic comparison for as long as possible since @@ -169,17 +223,17 @@ impl std::fmt::Display for AssocKind { /// done only on items with the same name. #[derive(Debug, Clone, PartialEq, HashStable)] pub struct AssocItems { - items: SortedIndexMultiMap, + items: SortedIndexMultiMap, ty::AssocItem>, } impl AssocItems { /// Constructs an `AssociatedItems` map from a series of `ty::AssocItem`s in definition order. pub fn new(items_in_def_order: impl IntoIterator) -> Self { - let items = items_in_def_order.into_iter().map(|item| (item.name, item)).collect(); + let items = items_in_def_order.into_iter().map(|item| (item.opt_name(), item)).collect(); AssocItems { items } } - /// Returns a slice of associated items in the order they were defined. + /// Returns an iterator over associated items in the order they were defined. /// /// New code should avoid relying on definition order. If you need a particular associated item /// for a known trait, make that trait a lang item instead of indexing this array. @@ -192,40 +246,33 @@ impl AssocItems { } /// Returns an iterator over all associated items with the given name, ignoring hygiene. + /// + /// Panics if `name.is_empty()` returns `true`. pub fn filter_by_name_unhygienic( &self, name: Symbol, ) -> impl '_ + Iterator { - self.items.get_by_key(name) + assert!(!name.is_empty()); + self.items.get_by_key(Some(name)) } - /// Returns the associated item with the given name and `AssocKind`, if one exists. - pub fn find_by_name_and_kind( + /// Returns the associated item with the given identifier and `AssocKind`, if one exists. + /// The identifier is matched hygienically. + pub fn find_by_ident_and_kind( &self, tcx: TyCtxt<'_>, ident: Ident, - kind: AssocKind, + assoc_tag: AssocTag, parent_def_id: DefId, ) -> Option<&ty::AssocItem> { self.filter_by_name_unhygienic(ident.name) - .filter(|item| item.kind == kind) + .filter(|item| item.as_tag() == assoc_tag) .find(|item| tcx.hygienic_eq(ident, item.ident(tcx), parent_def_id)) } - /// Returns the associated item with the given name and any of `AssocKind`, if one exists. - pub fn find_by_name_and_kinds( - &self, - tcx: TyCtxt<'_>, - ident: Ident, - // Sorted in order of what kinds to look at - kinds: &[AssocKind], - parent_def_id: DefId, - ) -> Option<&ty::AssocItem> { - kinds.iter().find_map(|kind| self.find_by_name_and_kind(tcx, ident, *kind, parent_def_id)) - } - - /// Returns the associated item with the given name in the given `Namespace`, if one exists. - pub fn find_by_name_and_namespace( + /// Returns the associated item with the given identifier in the given `Namespace`, if one + /// exists. The identifier is matched hygienically. + pub fn find_by_ident_and_namespace( &self, tcx: TyCtxt<'_>, ident: Ident, @@ -233,7 +280,7 @@ impl AssocItems { parent_def_id: DefId, ) -> Option<&ty::AssocItem> { self.filter_by_name_unhygienic(ident.name) - .filter(|item| item.kind.namespace() == ns) + .filter(|item| item.namespace() == ns) .find(|item| tcx.hygienic_eq(ident, item.ident(tcx), parent_def_id)) } } diff --git a/compiler/rustc_middle/src/ty/closure.rs b/compiler/rustc_middle/src/ty/closure.rs index 703b6ce92471f..ff9096695d4db 100644 --- a/compiler/rustc_middle/src/ty/closure.rs +++ b/compiler/rustc_middle/src/ty/closure.rs @@ -150,9 +150,9 @@ impl<'tcx> CapturedPlace<'tcx> { /// Return span pointing to use that resulted in selecting the captured path pub fn get_path_span(&self, tcx: TyCtxt<'tcx>) -> Span { if let Some(path_expr_id) = self.info.path_expr_id { - tcx.hir().span(path_expr_id) + tcx.hir_span(path_expr_id) } else if let Some(capture_kind_expr_id) = self.info.capture_kind_expr_id { - tcx.hir().span(capture_kind_expr_id) + tcx.hir_span(capture_kind_expr_id) } else { // Fallback on upvars mentioned if neither path or capture expr id is captured @@ -166,9 +166,9 @@ impl<'tcx> CapturedPlace<'tcx> { /// Return span pointing to use that resulted in selecting the current capture kind pub fn get_capture_kind_span(&self, tcx: TyCtxt<'tcx>) -> Span { if let Some(capture_kind_expr_id) = self.info.capture_kind_expr_id { - tcx.hir().span(capture_kind_expr_id) + tcx.hir_span(capture_kind_expr_id) } else if let Some(path_expr_id) = self.info.path_expr_id { - tcx.hir().span(path_expr_id) + tcx.hir_span(path_expr_id) } else { // Fallback on upvars mentioned if neither path or capture expr id is captured diff --git a/compiler/rustc_middle/src/ty/codec.rs b/compiler/rustc_middle/src/ty/codec.rs index 74b34afe616b9..5ff87959a800d 100644 --- a/compiler/rustc_middle/src/ty/codec.rs +++ b/compiler/rustc_middle/src/ty/codec.rs @@ -442,6 +442,15 @@ impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List> RefDecodable<'tcx, D> for ty::List> { + fn decode(decoder: &mut D) -> &'tcx Self { + let len = decoder.read_usize(); + decoder.interner().mk_patterns_from_iter( + (0..len).map::, _>(|_| Decodable::decode(decoder)), + ) + } +} + impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List> { fn decode(decoder: &mut D) -> &'tcx Self { let len = decoder.read_usize(); @@ -501,8 +510,9 @@ impl_decodable_via_ref! { &'tcx ty::List>, &'tcx traits::ImplSource<'tcx, ()>, &'tcx mir::Body<'tcx>, - &'tcx mir::BorrowCheckResult<'tcx>, + &'tcx mir::ConcreteOpaqueTypes<'tcx>, &'tcx ty::List, + &'tcx ty::List>, &'tcx ty::ListWithCachedTypeInfo>, &'tcx ty::List, &'tcx ty::List<(VariantIdx, FieldIdx)>, diff --git a/compiler/rustc_middle/src/ty/consts.rs b/compiler/rustc_middle/src/ty/consts.rs index ae1c6c670cbca..dc5fe2d8f8b06 100644 --- a/compiler/rustc_middle/src/ty/consts.rs +++ b/compiler/rustc_middle/src/ty/consts.rs @@ -3,6 +3,7 @@ use std::borrow::Cow; use rustc_data_structures::intern::Interned; use rustc_error_messages::MultiSpan; use rustc_macros::HashStable; +use rustc_type_ir::walk::TypeWalker; use rustc_type_ir::{self as ir, TypeFlags, WithCachedTypeInfo}; use crate::ty::{self, Ty, TyCtxt}; @@ -243,4 +244,18 @@ impl<'tcx> Const<'tcx> { pub fn is_ct_infer(self) -> bool { matches!(self.kind(), ty::ConstKind::Infer(_)) } + + /// Iterator that walks `self` and any types reachable from + /// `self`, in depth-first order. Note that just walks the types + /// that appear in `self`, it does not descend into the fields of + /// structs or variants. For example: + /// + /// ```text + /// isize => { isize } + /// Foo> => { Foo>, Bar, isize } + /// [isize] => { [isize], isize } + /// ``` + pub fn walk(self) -> TypeWalker> { + TypeWalker::new(self.into()) + } } diff --git a/compiler/rustc_middle/src/ty/consts/valtree.rs b/compiler/rustc_middle/src/ty/consts/valtree.rs index 72263d8458085..2f21d19e03c70 100644 --- a/compiler/rustc_middle/src/ty/consts/valtree.rs +++ b/compiler/rustc_middle/src/ty/consts/valtree.rs @@ -33,7 +33,7 @@ pub enum ValTreeKind<'tcx> { /// The fields of any kind of aggregate. Structs, tuples and arrays are represented by /// listing their fields' values in order. /// - /// Enums are represented by storing their discriminant as a field, followed by all + /// Enums are represented by storing their variant index as a u32 field, followed by all /// the fields of the variant. /// /// ZST types are represented as an empty slice. diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 08d4c1f9cf2f3..8871c7bb0d244 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -21,6 +21,7 @@ use rustc_data_structures::defer; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::intern::Interned; +use rustc_data_structures::jobserver::Proxy; use rustc_data_structures::profiling::SelfProfilerRef; use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; @@ -28,13 +29,12 @@ use rustc_data_structures::steal::Steal; use rustc_data_structures::sync::{ self, DynSend, DynSync, FreezeReadGuard, Lock, RwLock, WorkerLocal, }; -use rustc_data_structures::unord::UnordSet; use rustc_errors::{ Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, LintDiagnostic, MultiSpan, }; use rustc_hir::def::{CtorKind, DefKind}; use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId}; -use rustc_hir::definitions::Definitions; +use rustc_hir::definitions::{DefPathData, Definitions, DisambiguatorState}; use rustc_hir::intravisit::VisitorExt; use rustc_hir::lang_items::LangItem; use rustc_hir::{self as hir, Attribute, HirId, Node, TraitCandidate}; @@ -106,7 +106,7 @@ impl<'tcx> Interner for TyCtxt<'tcx> { ) -> Self::PredefinedOpaques { self.mk_predefined_opaques_in_body(data) } - type DefiningOpaqueTypes = &'tcx ty::List; + type LocalDefIds = &'tcx ty::List; type CanonicalVars = CanonicalVarInfos<'tcx>; fn mk_canonical_var_infos(self, infos: &[ty::CanonicalVarInfo]) -> Self::CanonicalVars { self.mk_canonical_var_infos(infos) @@ -136,6 +136,7 @@ impl<'tcx> Interner for TyCtxt<'tcx> { type AllocId = crate::mir::interpret::AllocId; type Pat = Pattern<'tcx>; + type PatList = &'tcx List>; type Safety = hir::Safety; type Abi = ExternAbi; type Const = ty::Const<'tcx>; @@ -206,6 +207,9 @@ impl<'tcx> Interner for TyCtxt<'tcx> { fn type_of(self, def_id: DefId) -> ty::EarlyBinder<'tcx, Ty<'tcx>> { self.type_of(def_id) } + fn type_of_opaque_hir_typeck(self, def_id: LocalDefId) -> ty::EarlyBinder<'tcx, Ty<'tcx>> { + self.type_of_opaque_hir_typeck(def_id) + } type AdtDef = ty::AdtDef<'tcx>; fn adt_def(self, adt_def_id: DefId) -> Self::AdtDef { @@ -223,7 +227,7 @@ impl<'tcx> Interner for TyCtxt<'tcx> { } } DefKind::OpaqueTy => ty::Opaque, - DefKind::TyAlias => ty::Weak, + DefKind::TyAlias => ty::Free, kind => bug!("unexpected DefKind in AliasTy: {kind:?}"), } } @@ -238,10 +242,18 @@ impl<'tcx> Interner for TyCtxt<'tcx> { ty::AliasTermKind::ProjectionTy } } + DefKind::AssocConst => { + if let DefKind::Impl { of_trait: false } = self.def_kind(self.parent(alias.def_id)) + { + ty::AliasTermKind::InherentConst + } else { + ty::AliasTermKind::ProjectionConst + } + } DefKind::OpaqueTy => ty::AliasTermKind::OpaqueTy, - DefKind::TyAlias => ty::AliasTermKind::WeakTy, - DefKind::AssocConst => ty::AliasTermKind::ProjectionConst, - DefKind::AnonConst | DefKind::Const | DefKind::Ctor(_, CtorKind::Const) => { + DefKind::TyAlias => ty::AliasTermKind::FreeTy, + DefKind::Const => ty::AliasTermKind::FreeConst, + DefKind::AnonConst | DefKind::Ctor(_, CtorKind::Const) => { ty::AliasTermKind::UnevaluatedConst } kind => bug!("unexpected DefKind in AliasTy: {kind:?}"), @@ -434,6 +446,10 @@ impl<'tcx> Interner for TyCtxt<'tcx> { ) } + fn impl_self_is_guaranteed_unsized(self, impl_def_id: DefId) -> bool { + self.impl_self_is_guaranteed_unsized(impl_def_id) + } + fn has_target_features(self, def_id: DefId) -> bool { !self.codegen_fn_attrs(def_id).target_features.is_empty() } @@ -446,6 +462,10 @@ impl<'tcx> Interner for TyCtxt<'tcx> { self.is_lang_item(def_id, trait_lang_item_to_lang_item(lang_item)) } + fn is_default_trait(self, def_id: DefId) -> bool { + self.is_default_trait(def_id) + } + fn as_lang_item(self, def_id: DefId) -> Option { lang_item_to_trait_lang_item(self.lang_items().from_def_id(def_id)?) } @@ -453,7 +473,7 @@ impl<'tcx> Interner for TyCtxt<'tcx> { fn associated_type_def_ids(self, def_id: DefId) -> impl IntoIterator { self.associated_items(def_id) .in_definition_order() - .filter(|assoc_item| matches!(assoc_item.kind, ty::AssocKind::Type)) + .filter(|assoc_item| assoc_item.is_type()) .map(|assoc_item| assoc_item.def_id) } @@ -579,6 +599,10 @@ impl<'tcx> Interner for TyCtxt<'tcx> { self.defaultness(def_id).has_value() } + fn impl_specializes(self, impl_def_id: Self::DefId, victim_def_id: Self::DefId) -> bool { + self.specializes((impl_def_id, victim_def_id)) + } + fn impl_is_default(self, impl_def_id: DefId) -> bool { self.defaultness(impl_def_id).is_default() } @@ -663,9 +687,26 @@ impl<'tcx> Interner for TyCtxt<'tcx> { self.anonymize_bound_vars(binder) } - fn opaque_types_defined_by(self, defining_anchor: LocalDefId) -> Self::DefiningOpaqueTypes { + fn opaque_types_defined_by(self, defining_anchor: LocalDefId) -> Self::LocalDefIds { self.opaque_types_defined_by(defining_anchor) } + + fn opaque_types_and_coroutines_defined_by( + self, + defining_anchor: Self::LocalDefId, + ) -> Self::LocalDefIds { + if self.next_trait_solver_globally() { + let coroutines_defined_by = self + .nested_bodies_within(defining_anchor) + .iter() + .filter(|def_id| self.is_coroutine(def_id.to_def_id())); + self.mk_local_def_ids_from_iter( + self.opaque_types_defined_by(defining_anchor).iter().chain(coroutines_defined_by), + ) + } else { + self.opaque_types_defined_by(defining_anchor) + } + } } macro_rules! bidirectional_lang_item_map { @@ -687,7 +728,6 @@ macro_rules! bidirectional_lang_item_map { bidirectional_lang_item_map! { // tidy-alphabetical-start - AsyncDestruct, AsyncFn, AsyncFnKindHelper, AsyncFnKindUpvars, @@ -813,6 +853,7 @@ pub struct CtxtInterners<'tcx> { captures: InternedSet<'tcx, List<&'tcx ty::CapturedPlace<'tcx>>>, offset_of: InternedSet<'tcx, List<(VariantIdx, FieldIdx)>>, valtree: InternedSet<'tcx, ty::ValTreeKind<'tcx>>, + patterns: InternedSet<'tcx, List>>, } impl<'tcx> CtxtInterners<'tcx> { @@ -849,6 +890,7 @@ impl<'tcx> CtxtInterners<'tcx> { captures: InternedSet::with_capacity(N), offset_of: InternedSet::with_capacity(N), valtree: InternedSet::with_capacity(N), + patterns: InternedSet::with_capacity(N), } } @@ -859,7 +901,7 @@ impl<'tcx> CtxtInterners<'tcx> { Ty(Interned::new_unchecked( self.type_ .intern(kind, |kind| { - let flags = super::flags::FlagComputation::for_kind(&kind); + let flags = ty::FlagComputation::>::for_kind(&kind); let stable_hash = self.stable_hash(&flags, sess, untracked, &kind); InternedInSet(self.arena.alloc(WithCachedTypeInfo { @@ -885,7 +927,7 @@ impl<'tcx> CtxtInterners<'tcx> { Const(Interned::new_unchecked( self.const_ .intern(kind, |kind: ty::ConstKind<'_>| { - let flags = super::flags::FlagComputation::for_const_kind(&kind); + let flags = ty::FlagComputation::>::for_const_kind(&kind); let stable_hash = self.stable_hash(&flags, sess, untracked, &kind); InternedInSet(self.arena.alloc(WithCachedTypeInfo { @@ -901,7 +943,7 @@ impl<'tcx> CtxtInterners<'tcx> { fn stable_hash<'a, T: HashStable>>( &self, - flags: &ty::flags::FlagComputation, + flags: &ty::FlagComputation>, sess: &'a Session, untracked: &'a Untracked, val: &T, @@ -929,7 +971,7 @@ impl<'tcx> CtxtInterners<'tcx> { Predicate(Interned::new_unchecked( self.predicate .intern(kind, |kind| { - let flags = super::flags::FlagComputation::for_predicate(kind); + let flags = ty::FlagComputation::>::for_predicate(kind); let stable_hash = self.stable_hash(&flags, sess, untracked, &kind); @@ -950,7 +992,7 @@ impl<'tcx> CtxtInterners<'tcx> { } else { self.clauses .intern_ref(clauses, || { - let flags = super::flags::FlagComputation::for_clauses(clauses); + let flags = ty::FlagComputation::>::for_clauses(clauses); InternedInSet(ListWithCachedTypeInfo::from_arena( &*self.arena, @@ -1409,6 +1451,9 @@ pub struct GlobalCtxt<'tcx> { pub(crate) alloc_map: interpret::AllocMap<'tcx>, current_gcx: CurrentGcx, + + /// A jobserver reference used to release then acquire a token while waiting on a query. + pub jobserver_proxy: Arc, } impl<'tcx> GlobalCtxt<'tcx> { @@ -1539,6 +1584,25 @@ impl<'tcx> TyCtxt<'tcx> { self.reserve_and_set_memory_dedup(alloc, salt) } + pub fn default_traits(self) -> &'static [rustc_hir::LangItem] { + match self.sess.opts.unstable_opts.experimental_default_bounds { + true => &[ + LangItem::Sized, + LangItem::DefaultTrait1, + LangItem::DefaultTrait2, + LangItem::DefaultTrait3, + LangItem::DefaultTrait4, + ], + false => &[LangItem::Sized], + } + } + + pub fn is_default_trait(self, def_id: DefId) -> bool { + self.default_traits() + .iter() + .any(|&default_trait| self.lang_items().get(default_trait) == Some(def_id)) + } + /// Returns a range of the start/end indices specified with the /// `rustc_layout_scalar_valid_range` attribute. // FIXME(eddyb) this is an awkward spot for this method, maybe move it? @@ -1594,6 +1658,7 @@ impl<'tcx> TyCtxt<'tcx> { query_system: QuerySystem<'tcx>, hooks: crate::hooks::Providers, current_gcx: CurrentGcx, + jobserver_proxy: Arc, f: impl FnOnce(TyCtxt<'tcx>) -> T, ) -> T { let data_layout = s.target.parse_data_layout().unwrap_or_else(|err| { @@ -1628,6 +1693,7 @@ impl<'tcx> TyCtxt<'tcx> { data_layout, alloc_map: interpret::AllocMap::new(), current_gcx, + jobserver_proxy, }); // This is a separate function to work around a crash with parallel rustc (#135870) @@ -1666,6 +1732,10 @@ impl<'tcx> TyCtxt<'tcx> { self.coroutine_kind(def_id).is_some() } + pub fn is_async_drop_in_place_coroutine(self, def_id: DefId) -> bool { + self.is_lang_item(self.parent(def_id), LangItem::AsyncDropInPlace) + } + /// Returns the movability of the coroutine of `def_id`, or panics /// if given a `def_id` that is not a coroutine. pub fn coroutine_movability(self, def_id: DefId) -> hir::Movability { @@ -1759,9 +1829,10 @@ impl<'tcx> TyCtxt<'tcx> { self.crate_types() .iter() .map(|ty| match *ty { - CrateType::Executable | CrateType::Staticlib | CrateType::Cdylib => { - MetadataKind::None - } + CrateType::Executable + | CrateType::Staticlib + | CrateType::Cdylib + | CrateType::Sdylib => MetadataKind::None, CrateType::Rlib => MetadataKind::Uncompressed, CrateType::Dylib | CrateType::ProcMacro => MetadataKind::Compressed, }) @@ -1781,10 +1852,15 @@ impl<'tcx> TyCtxt<'tcx> { // - needs_metadata: for putting into crate metadata. // - instrument_coverage: for putting into coverage data (see // `hash_mir_source`). + // - metrics_dir: metrics use the strict version hash in the filenames + // for dumped metrics files to prevent overwriting distinct metrics + // for similar source builds (may change in the future, this is part + // of the proof of concept impl for the metrics initiative project goal) cfg!(debug_assertions) || self.sess.opts.incremental.is_some() || self.needs_metadata() || self.sess.instrument_coverage() + || self.sess.opts.unstable_opts.metrics_dir.is_some() } #[inline] @@ -1903,8 +1979,11 @@ impl<'tcx> TyCtxtAt<'tcx> { parent: LocalDefId, name: Option, def_kind: DefKind, + override_def_path_data: Option, + disambiguator: &mut DisambiguatorState, ) -> TyCtxtFeed<'tcx, LocalDefId> { - let feed = self.tcx.create_def(parent, name, def_kind); + let feed = + self.tcx.create_def(parent, name, def_kind, override_def_path_data, disambiguator); feed.def_span(self.span); feed @@ -1918,8 +1997,10 @@ impl<'tcx> TyCtxt<'tcx> { parent: LocalDefId, name: Option, def_kind: DefKind, + override_def_path_data: Option, + disambiguator: &mut DisambiguatorState, ) -> TyCtxtFeed<'tcx, LocalDefId> { - let data = def_kind.def_path_data(name); + let data = override_def_path_data.unwrap_or_else(|| def_kind.def_path_data(name)); // The following call has the side effect of modifying the tables inside `definitions`. // These very tables are relied on by the incr. comp. engine to decode DepNodes and to // decode the on-disk cache. @@ -1929,12 +2010,7 @@ impl<'tcx> TyCtxt<'tcx> { // - has been created by this call to `create_def`. // As a consequence, this LocalDefId is always re-created before it is needed by the incr. // comp. engine itself. - // - // This call also writes to the value of `source_span` and `expn_that_defined` queries. - // This is fine because: - // - those queries are `eval_always` so we won't miss their result changing; - // - this write will have happened before these queries are called. - let def_id = self.untracked.definitions.write().create_def(parent, data); + let def_id = self.untracked.definitions.write().create_def(parent, data, disambiguator); // This function modifies `self.definitions` using a side-effect. // We need to ensure that these side effects are re-run by the incr. comp. engine. @@ -2059,7 +2135,8 @@ impl<'tcx> TyCtxt<'tcx> { CrateType::Executable | CrateType::Staticlib | CrateType::ProcMacro - | CrateType::Cdylib => false, + | CrateType::Cdylib + | CrateType::Sdylib => false, // FIXME rust-lang/rust#64319, rust-lang/rust#64872: // We want to block export of generics from dylibs, @@ -2112,7 +2189,7 @@ impl<'tcx> TyCtxt<'tcx> { return vec![]; }; - let mut v = TraitObjectVisitor(vec![], self.hir()); + let mut v = TraitObjectVisitor(vec![]); v.visit_ty_unambig(hir_output); v.0 } @@ -2125,7 +2202,7 @@ impl<'tcx> TyCtxt<'tcx> { scope_def_id: LocalDefId, ) -> Option<(Vec<&'tcx hir::Ty<'tcx>>, Span, Option)> { let hir_id = self.local_def_id_to_hir_id(scope_def_id); - let mut v = TraitObjectVisitor(vec![], self.hir()); + let mut v = TraitObjectVisitor(vec![]); // when the return type is a type alias if let Some(hir::FnDecl { output: hir::FnRetTy::Return(hir_output), .. }) = self.hir_fn_decl_by_hir_id(hir_id) && let hir::TyKind::Path(hir::QPath::Resolved( @@ -2347,6 +2424,8 @@ macro_rules! sty_debug_print { $(let mut $variant = total;)* for shard in tcx.interners.type_.lock_shards() { + // It seems that ordering doesn't affect anything here. + #[allow(rustc::potential_query_instability)] let types = shard.iter(); for &(InternedInSet(t), ()) in types { let variant = match t.internee { @@ -2605,6 +2684,7 @@ slice_interners!( local_def_ids: intern_local_def_ids(LocalDefId), captures: intern_captures(&'tcx ty::CapturedPlace<'tcx>), offset_of: pub mk_offset_of((VariantIdx, FieldIdx)), + patterns: pub mk_patterns(Pattern<'tcx>), ); impl<'tcx> TyCtxt<'tcx> { @@ -2871,11 +2951,19 @@ impl<'tcx> TyCtxt<'tcx> { self.interners.intern_clauses(clauses) } - pub fn mk_local_def_ids(self, clauses: &[LocalDefId]) -> &'tcx List { + pub fn mk_local_def_ids(self, def_ids: &[LocalDefId]) -> &'tcx List { // FIXME consider asking the input slice to be sorted to avoid // re-interning permutations, in which case that would be asserted // here. - self.intern_local_def_ids(clauses) + self.intern_local_def_ids(def_ids) + } + + pub fn mk_patterns_from_iter(self, iter: I) -> T::Output + where + I: Iterator, + T: CollectAndApply, &'tcx List>>, + { + T::collect_and_apply(iter, |xs| self.mk_patterns(xs)) } pub fn mk_local_def_ids_from_iter(self, iter: I) -> T::Output @@ -3022,8 +3110,8 @@ impl<'tcx> TyCtxt<'tcx> { span: impl Into, decorator: impl for<'a> LintDiagnostic<'a, ()>, ) { - let (level, src) = self.lint_level_at_node(lint, hir_id); - lint_level(self.sess, lint, level, src, Some(span.into()), |lint| { + let level = self.lint_level_at_node(lint, hir_id); + lint_level(self.sess, lint, level, Some(span.into()), |lint| { decorator.decorate_lint(lint); }) } @@ -3040,8 +3128,8 @@ impl<'tcx> TyCtxt<'tcx> { span: impl Into, decorate: impl for<'a, 'b> FnOnce(&'b mut Diag<'a, ()>), ) { - let (level, src) = self.lint_level_at_node(lint, hir_id); - lint_level(self.sess, lint, level, src, Some(span.into()), decorate); + let level = self.lint_level_at_node(lint, hir_id); + lint_level(self.sess, lint, level, Some(span.into()), decorate); } /// Find the crate root and the appropriate span where `use` and outer attributes can be @@ -3108,8 +3196,8 @@ impl<'tcx> TyCtxt<'tcx> { id: HirId, decorate: impl for<'a, 'b> FnOnce(&'b mut Diag<'a, ()>), ) { - let (level, src) = self.lint_level_at_node(lint, id); - lint_level(self.sess, lint, level, src, None, decorate); + let level = self.lint_level_at_node(lint, id); + lint_level(self.sess, lint, level, None, decorate); } pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx [TraitCandidate]> { @@ -3232,6 +3320,10 @@ impl<'tcx> TyCtxt<'tcx> { && self.impl_trait_header(def_id).unwrap().constness == hir::Constness::Const } + pub fn is_sdylib_interface_build(self) -> bool { + self.sess.opts.unstable_opts.build_sdylib_interface + } + pub fn intrinsic(self, def_id: impl IntoQueryParam + Copy) -> Option { match self.def_kind(def_id) { DefKind::Fn | DefKind::AssocFn => {} @@ -3248,6 +3340,11 @@ impl<'tcx> TyCtxt<'tcx> { self.sess.opts.unstable_opts.next_solver.coherence } + #[allow(rustc::bad_opt_access)] + pub fn use_typing_mode_borrowck(self) -> bool { + self.next_trait_solver_globally() || self.sess.opts.unstable_opts.typing_mode_borrowck + } + pub fn is_impl_trait_in_trait(self, def_id: DefId) -> bool { self.opt_rpitit_info(def_id).is_some() } @@ -3319,9 +3416,7 @@ pub fn provide(providers: &mut Providers) { providers.maybe_unused_trait_imports = |tcx, ()| &tcx.resolutions(()).maybe_unused_trait_imports; providers.names_imported_by_glob_use = |tcx, id| { - tcx.arena.alloc(UnordSet::from( - tcx.resolutions(()).glob_map.get(&id).cloned().unwrap_or_default(), - )) + tcx.arena.alloc(tcx.resolutions(()).glob_map.get(&id).cloned().unwrap_or_default()) }; providers.extern_mod_stmt_cnum = @@ -3330,10 +3425,6 @@ pub fn provide(providers: &mut Providers) { |tcx, LocalCrate| contains_name(tcx.hir_krate_attrs(), sym::panic_runtime); providers.is_compiler_builtins = |tcx, LocalCrate| contains_name(tcx.hir_krate_attrs(), sym::compiler_builtins); - providers.has_panic_handler = |tcx, LocalCrate| { - // We want to check if the panic handler was defined in this crate - tcx.lang_items().panic_impl().is_some_and(|did| did.is_local()) - }; providers.source_span = |tcx, def_id| tcx.untracked.source_span.get(def_id).unwrap_or(DUMMY_SP); } diff --git a/compiler/rustc_middle/src/ty/diagnostics.rs b/compiler/rustc_middle/src/ty/diagnostics.rs index d3abb3d64b8cb..b122ada0925d6 100644 --- a/compiler/rustc_middle/src/ty/diagnostics.rs +++ b/compiler/rustc_middle/src/ty/diagnostics.rs @@ -3,7 +3,7 @@ use std::fmt::Write; use std::ops::ControlFlow; -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::FxIndexMap; use rustc_errors::{ Applicability, Diag, DiagArgValue, IntoDiagArg, into_diag_arg_using_display, listify, pluralize, }; @@ -287,7 +287,7 @@ pub fn suggest_constraining_type_params<'a>( param_names_and_constraints: impl Iterator)>, span_to_replace: Option, ) -> bool { - let mut grouped = FxHashMap::default(); + let mut grouped = FxIndexMap::default(); let mut unstable_suggestion = false; param_names_and_constraints.for_each(|(param_name, constraint, def_id)| { let stable = match def_id { @@ -571,15 +571,15 @@ pub fn suggest_constraining_type_params<'a>( } /// Collect al types that have an implicit `'static` obligation that we could suggest `'_` for. -pub struct TraitObjectVisitor<'tcx>(pub Vec<&'tcx hir::Ty<'tcx>>, pub crate::hir::map::Map<'tcx>); +pub(crate) struct TraitObjectVisitor<'tcx>(pub(crate) Vec<&'tcx hir::Ty<'tcx>>); impl<'v> hir::intravisit::Visitor<'v> for TraitObjectVisitor<'v> { fn visit_ty(&mut self, ty: &'v hir::Ty<'v, AmbigArg>) { match ty.kind { hir::TyKind::TraitObject(_, tagged_ptr) if let hir::Lifetime { - res: - hir::LifetimeName::ImplicitObjectLifetimeDefault | hir::LifetimeName::Static, + kind: + hir::LifetimeKind::ImplicitObjectLifetimeDefault | hir::LifetimeKind::Static, .. } = tagged_ptr.pointer() => { @@ -592,18 +592,6 @@ impl<'v> hir::intravisit::Visitor<'v> for TraitObjectVisitor<'v> { } } -/// Collect al types that have an implicit `'static` obligation that we could suggest `'_` for. -pub struct StaticLifetimeVisitor<'tcx>(pub Vec, pub crate::hir::map::Map<'tcx>); - -impl<'v> hir::intravisit::Visitor<'v> for StaticLifetimeVisitor<'v> { - fn visit_lifetime(&mut self, lt: &'v hir::Lifetime) { - if let hir::LifetimeName::ImplicitObjectLifetimeDefault | hir::LifetimeName::Static = lt.res - { - self.0.push(lt.ident.span); - } - } -} - pub struct IsSuggestableVisitor<'tcx> { tcx: TyCtxt<'tcx>, infer_suggestable: bool, diff --git a/compiler/rustc_middle/src/ty/erase_regions.rs b/compiler/rustc_middle/src/ty/erase_regions.rs index f25c48cf42ab7..45a0b1288db87 100644 --- a/compiler/rustc_middle/src/ty/erase_regions.rs +++ b/compiler/rustc_middle/src/ty/erase_regions.rs @@ -44,7 +44,13 @@ impl<'tcx> TypeFolder> for RegionEraserVisitor<'tcx> { } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - if ty.has_infer() { ty.super_fold_with(self) } else { self.tcx.erase_regions_ty(ty) } + if !ty.has_type_flags(TypeFlags::HAS_BINDER_VARS | TypeFlags::HAS_FREE_REGIONS) { + ty + } else if ty.has_infer() { + ty.super_fold_with(self) + } else { + self.tcx.erase_regions_ty(ty) + } } fn fold_binder(&mut self, t: ty::Binder<'tcx, T>) -> ty::Binder<'tcx, T> @@ -59,9 +65,25 @@ impl<'tcx> TypeFolder> for RegionEraserVisitor<'tcx> { // We must not erase bound regions. `for<'a> fn(&'a ())` and // `fn(&'free ())` are different types: they may implement different // traits and have a different `TypeId`. - match *r { + match r.kind() { ty::ReBound(..) => r, _ => self.tcx.lifetimes.re_erased, } } + + fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> { + if ct.has_type_flags(TypeFlags::HAS_BINDER_VARS | TypeFlags::HAS_FREE_REGIONS) { + ct.super_fold_with(self) + } else { + ct + } + } + + fn fold_predicate(&mut self, p: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> { + if p.has_type_flags(TypeFlags::HAS_BINDER_VARS | TypeFlags::HAS_FREE_REGIONS) { + p.super_fold_with(self) + } else { + p + } + } } diff --git a/compiler/rustc_middle/src/ty/error.rs b/compiler/rustc_middle/src/ty/error.rs index a0e67929c5289..13723874ad3a1 100644 --- a/compiler/rustc_middle/src/ty/error.rs +++ b/compiler/rustc_middle/src/ty/error.rs @@ -205,7 +205,7 @@ impl<'tcx> Ty<'tcx> { ty::Placeholder(..) => "higher-ranked type".into(), ty::Bound(..) => "bound type variable".into(), ty::Alias(ty::Projection | ty::Inherent, _) => "associated type".into(), - ty::Alias(ty::Weak, _) => "type alias".into(), + ty::Alias(ty::Free, _) => "type alias".into(), ty::Param(_) => "type parameter".into(), ty::Alias(ty::Opaque, ..) => "opaque type".into(), } @@ -279,7 +279,7 @@ impl<'tcx> TyCtxt<'tcx> { p.hash(&mut s); let hash = s.finish(); *path = Some(path.take().unwrap_or_else(|| { - self.output_filenames(()).temp_path_ext(&format!("long-type-{hash}.txt"), None) + self.output_filenames(()).temp_path_for_diagnostic(&format!("long-type-{hash}.txt")) })); let Ok(mut file) = File::options().create(true).read(true).append(true).open(&path.as_ref().unwrap()) diff --git a/compiler/rustc_middle/src/ty/flags.rs b/compiler/rustc_middle/src/ty/flags.rs deleted file mode 100644 index b0c442d28f0a0..0000000000000 --- a/compiler/rustc_middle/src/ty/flags.rs +++ /dev/null @@ -1,359 +0,0 @@ -use std::slice; - -use crate::ty::{self, GenericArg, GenericArgKind, InferConst, Ty, TypeFlags}; - -#[derive(Debug)] -pub struct FlagComputation { - pub flags: TypeFlags, - - /// see `Ty::outer_exclusive_binder` for details - pub outer_exclusive_binder: ty::DebruijnIndex, -} - -impl FlagComputation { - fn new() -> FlagComputation { - FlagComputation { flags: TypeFlags::empty(), outer_exclusive_binder: ty::INNERMOST } - } - - #[allow(rustc::usage_of_ty_tykind)] - pub fn for_kind(kind: &ty::TyKind<'_>) -> FlagComputation { - let mut result = FlagComputation::new(); - result.add_kind(kind); - result - } - - pub fn for_predicate(binder: ty::Binder<'_, ty::PredicateKind<'_>>) -> FlagComputation { - let mut result = FlagComputation::new(); - result.add_predicate(binder); - result - } - - pub fn for_const_kind(kind: &ty::ConstKind<'_>) -> FlagComputation { - let mut result = FlagComputation::new(); - result.add_const_kind(kind); - result - } - - pub fn for_clauses(clauses: &[ty::Clause<'_>]) -> FlagComputation { - let mut result = FlagComputation::new(); - for c in clauses { - result.add_flags(c.as_predicate().flags()); - result.add_exclusive_binder(c.as_predicate().outer_exclusive_binder()); - } - result - } - - fn add_flags(&mut self, flags: TypeFlags) { - self.flags = self.flags | flags; - } - - /// indicates that `self` refers to something at binding level `binder` - fn add_bound_var(&mut self, binder: ty::DebruijnIndex) { - let exclusive_binder = binder.shifted_in(1); - self.add_exclusive_binder(exclusive_binder); - } - - /// indicates that `self` refers to something *inside* binding - /// level `binder` -- not bound by `binder`, but bound by the next - /// binder internal to it - fn add_exclusive_binder(&mut self, exclusive_binder: ty::DebruijnIndex) { - self.outer_exclusive_binder = self.outer_exclusive_binder.max(exclusive_binder); - } - - /// Adds the flags/depth from a set of types that appear within the current type, but within a - /// region binder. - fn bound_computation(&mut self, value: ty::Binder<'_, T>, f: F) - where - F: FnOnce(&mut Self, T), - { - let mut computation = FlagComputation::new(); - - if !value.bound_vars().is_empty() { - computation.add_flags(TypeFlags::HAS_BINDER_VARS); - } - - f(&mut computation, value.skip_binder()); - - self.add_flags(computation.flags); - - // The types that contributed to `computation` occurred within - // a region binder, so subtract one from the region depth - // within when adding the depth to `self`. - let outer_exclusive_binder = computation.outer_exclusive_binder; - if outer_exclusive_binder > ty::INNERMOST { - self.add_exclusive_binder(outer_exclusive_binder.shifted_out(1)); - } // otherwise, this binder captures nothing - } - - #[allow(rustc::usage_of_ty_tykind)] - fn add_kind(&mut self, kind: &ty::TyKind<'_>) { - match kind { - &ty::Bool - | &ty::Char - | &ty::Int(_) - | &ty::Float(_) - | &ty::Uint(_) - | &ty::Never - | &ty::Str - | &ty::Foreign(..) => {} - - &ty::Error(_) => self.add_flags(TypeFlags::HAS_ERROR), - - &ty::Param(_) => { - self.add_flags(TypeFlags::HAS_TY_PARAM); - } - - &ty::Closure(_, args) - | &ty::Coroutine(_, args) - | &ty::CoroutineClosure(_, args) - | &ty::CoroutineWitness(_, args) => { - self.add_args(args); - } - - &ty::Bound(debruijn, _) => { - self.add_bound_var(debruijn); - self.add_flags(TypeFlags::HAS_TY_BOUND); - } - - &ty::Placeholder(..) => { - self.add_flags(TypeFlags::HAS_TY_PLACEHOLDER); - } - - &ty::Infer(infer) => match infer { - ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_) => { - self.add_flags(TypeFlags::HAS_TY_FRESH) - } - - ty::TyVar(_) | ty::IntVar(_) | ty::FloatVar(_) => { - self.add_flags(TypeFlags::HAS_TY_INFER) - } - }, - - &ty::Adt(_, args) => { - self.add_args(args); - } - - &ty::Alias(kind, data) => { - self.add_flags(match kind { - ty::Projection => TypeFlags::HAS_TY_PROJECTION, - ty::Weak => TypeFlags::HAS_TY_WEAK, - ty::Opaque => TypeFlags::HAS_TY_OPAQUE, - ty::Inherent => TypeFlags::HAS_TY_INHERENT, - }); - - self.add_alias_ty(data); - } - - &ty::Dynamic(obj, r, _) => { - for predicate in obj.iter() { - self.bound_computation(predicate, |computation, predicate| match predicate { - ty::ExistentialPredicate::Trait(tr) => computation.add_args(tr.args), - ty::ExistentialPredicate::Projection(p) => { - computation.add_existential_projection(&p); - } - ty::ExistentialPredicate::AutoTrait(_) => {} - }); - } - - self.add_region(r); - } - - &ty::Array(tt, len) => { - self.add_ty(tt); - self.add_const(len); - } - - &ty::Pat(ty, pat) => { - self.add_ty(ty); - match *pat { - ty::PatternKind::Range { start, end } => { - self.add_const(start); - self.add_const(end); - } - } - } - - &ty::Slice(tt) => self.add_ty(tt), - - &ty::RawPtr(ty, _) => { - self.add_ty(ty); - } - - &ty::Ref(r, ty, _) => { - self.add_region(r); - self.add_ty(ty); - } - - &ty::Tuple(types) => { - self.add_tys(types); - } - - &ty::FnDef(_, args) => { - self.add_args(args); - } - - &ty::FnPtr(sig_tys, _) => self.bound_computation(sig_tys, |computation, sig_tys| { - computation.add_tys(sig_tys.inputs_and_output); - }), - - &ty::UnsafeBinder(bound_ty) => { - self.bound_computation(bound_ty.into(), |computation, ty| { - computation.add_ty(ty); - }) - } - } - } - - fn add_predicate(&mut self, binder: ty::Binder<'_, ty::PredicateKind<'_>>) { - self.bound_computation(binder, |computation, atom| computation.add_predicate_atom(atom)); - } - - fn add_predicate_atom(&mut self, atom: ty::PredicateKind<'_>) { - match atom { - ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred)) => { - self.add_args(trait_pred.trait_ref.args); - } - ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(ty::HostEffectPredicate { - trait_ref, - constness: _, - })) => { - self.add_args(trait_ref.args); - } - ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate( - a, - b, - ))) => { - self.add_region(a); - self.add_region(b); - } - ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate( - ty, - region, - ))) => { - self.add_ty(ty); - self.add_region(region); - } - ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => { - self.add_const(ct); - self.add_ty(ty); - } - ty::PredicateKind::Subtype(ty::SubtypePredicate { a_is_expected: _, a, b }) => { - self.add_ty(a); - self.add_ty(b); - } - ty::PredicateKind::Coerce(ty::CoercePredicate { a, b }) => { - self.add_ty(a); - self.add_ty(b); - } - ty::PredicateKind::Clause(ty::ClauseKind::Projection(ty::ProjectionPredicate { - projection_term, - term, - })) => { - self.add_alias_term(projection_term); - self.add_term(term); - } - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { - self.add_args(slice::from_ref(&arg)); - } - ty::PredicateKind::DynCompatible(_def_id) => {} - ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(uv)) => { - self.add_const(uv); - } - ty::PredicateKind::ConstEquate(expected, found) => { - self.add_const(expected); - self.add_const(found); - } - ty::PredicateKind::Ambiguous => {} - ty::PredicateKind::NormalizesTo(ty::NormalizesTo { alias, term }) => { - self.add_alias_term(alias); - self.add_term(term); - } - ty::PredicateKind::AliasRelate(t1, t2, _) => { - self.add_term(t1); - self.add_term(t2); - } - } - } - - fn add_ty(&mut self, ty: Ty<'_>) { - self.add_flags(ty.flags()); - self.add_exclusive_binder(ty.outer_exclusive_binder()); - } - - fn add_tys(&mut self, tys: &[Ty<'_>]) { - for &ty in tys { - self.add_ty(ty); - } - } - - fn add_region(&mut self, r: ty::Region<'_>) { - self.add_flags(r.type_flags()); - if let ty::ReBound(debruijn, _) = *r { - self.add_bound_var(debruijn); - } - } - - fn add_const(&mut self, c: ty::Const<'_>) { - self.add_flags(c.flags()); - self.add_exclusive_binder(c.outer_exclusive_binder()); - } - - fn add_const_kind(&mut self, c: &ty::ConstKind<'_>) { - match *c { - ty::ConstKind::Unevaluated(uv) => { - self.add_args(uv.args); - self.add_flags(TypeFlags::HAS_CT_PROJECTION); - } - ty::ConstKind::Infer(infer) => match infer { - InferConst::Fresh(_) => self.add_flags(TypeFlags::HAS_CT_FRESH), - InferConst::Var(_) => self.add_flags(TypeFlags::HAS_CT_INFER), - }, - ty::ConstKind::Bound(debruijn, _) => { - self.add_bound_var(debruijn); - self.add_flags(TypeFlags::HAS_CT_BOUND); - } - ty::ConstKind::Param(_) => { - self.add_flags(TypeFlags::HAS_CT_PARAM); - } - ty::ConstKind::Placeholder(_) => { - self.add_flags(TypeFlags::HAS_CT_PLACEHOLDER); - } - ty::ConstKind::Value(cv) => self.add_ty(cv.ty), - ty::ConstKind::Expr(e) => self.add_args(e.args()), - ty::ConstKind::Error(_) => self.add_flags(TypeFlags::HAS_ERROR), - } - } - - fn add_existential_projection(&mut self, projection: &ty::ExistentialProjection<'_>) { - self.add_args(projection.args); - match projection.term.unpack() { - ty::TermKind::Ty(ty) => self.add_ty(ty), - ty::TermKind::Const(ct) => self.add_const(ct), - } - } - - fn add_alias_ty(&mut self, alias_ty: ty::AliasTy<'_>) { - self.add_args(alias_ty.args); - } - - fn add_alias_term(&mut self, alias_term: ty::AliasTerm<'_>) { - self.add_args(alias_term.args); - } - - fn add_args(&mut self, args: &[GenericArg<'_>]) { - for kind in args { - match kind.unpack() { - GenericArgKind::Type(ty) => self.add_ty(ty), - GenericArgKind::Lifetime(lt) => self.add_region(lt), - GenericArgKind::Const(ct) => self.add_const(ct), - } - } - } - - fn add_term(&mut self, term: ty::Term<'_>) { - match term.unpack() { - ty::TermKind::Ty(ty) => self.add_ty(ty), - ty::TermKind::Const(ct) => self.add_const(ct), - } - } -} diff --git a/compiler/rustc_middle/src/ty/fold.rs b/compiler/rustc_middle/src/ty/fold.rs index dc2c9e3d9f119..8d6871d2f1fee 100644 --- a/compiler/rustc_middle/src/ty/fold.rs +++ b/compiler/rustc_middle/src/ty/fold.rs @@ -145,10 +145,10 @@ where } fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { - match *r { + match r.kind() { ty::ReBound(debruijn, br) if debruijn == self.current_index => { let region = self.delegate.replace_region(br); - if let ty::ReBound(debruijn1, br) = *region { + if let ty::ReBound(debruijn1, br) = region.kind() { // If the callback returns a bound region, // that region should always use the INNERMOST // debruijn index. Then we adjust it to the @@ -278,7 +278,7 @@ impl<'tcx> TyCtxt<'tcx> { where T: TypeFoldable>, { - let shift_bv = |bv: ty::BoundVar| ty::BoundVar::from_usize(bv.as_usize() + bound_vars); + let shift_bv = |bv: ty::BoundVar| bv + bound_vars; self.replace_escaping_bound_vars_uncached( value, FnMutDelegate { diff --git a/compiler/rustc_middle/src/ty/generic_args.rs b/compiler/rustc_middle/src/ty/generic_args.rs index e87859a55eddd..542c0b3e6ebb4 100644 --- a/compiler/rustc_middle/src/ty/generic_args.rs +++ b/compiler/rustc_middle/src/ty/generic_args.rs @@ -11,12 +11,13 @@ use rustc_hir::def_id::DefId; use rustc_macros::{HashStable, TyDecodable, TyEncodable, extension}; use rustc_serialize::{Decodable, Encodable}; use rustc_type_ir::WithCachedTypeInfo; +use rustc_type_ir::walk::TypeWalker; use smallvec::SmallVec; use crate::ty::codec::{TyDecoder, TyEncoder}; use crate::ty::{ self, ClosureArgs, CoroutineArgs, CoroutineClosureArgs, FallibleTypeFolder, InlineConstArgs, - Lift, List, Ty, TyCtxt, TypeFoldable, TypeVisitable, TypeVisitor, VisitorResult, + Lift, List, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeVisitable, TypeVisitor, VisitorResult, walk_visitable_list, }; @@ -249,17 +250,17 @@ impl<'tcx> GenericArg<'tcx> { } #[inline] - pub fn as_type(self) -> Option> { + pub fn as_region(self) -> Option> { match self.unpack() { - GenericArgKind::Type(ty) => Some(ty), + GenericArgKind::Lifetime(re) => Some(re), _ => None, } } #[inline] - pub fn as_region(self) -> Option> { + pub fn as_type(self) -> Option> { match self.unpack() { - GenericArgKind::Lifetime(re) => Some(re), + GenericArgKind::Type(ty) => Some(ty), _ => None, } } @@ -272,6 +273,15 @@ impl<'tcx> GenericArg<'tcx> { } } + #[inline] + pub fn as_term(self) -> Option> { + match self.unpack() { + GenericArgKind::Lifetime(_) => None, + GenericArgKind::Type(ty) => Some(ty.into()), + GenericArgKind::Const(ct) => Some(ct.into()), + } + } + /// Unpack the `GenericArg` as a region when it is known certainly to be a region. pub fn expect_region(self) -> ty::Region<'tcx> { self.as_region().unwrap_or_else(|| bug!("expected a region, but found another kind")) @@ -297,6 +307,20 @@ impl<'tcx> GenericArg<'tcx> { GenericArgKind::Const(ct) => ct.is_ct_infer(), } } + + /// Iterator that walks `self` and any types reachable from + /// `self`, in depth-first order. Note that just walks the types + /// that appear in `self`, it does not descend into the fields of + /// structs or variants. For example: + /// + /// ```text + /// isize => { isize } + /// Foo> => { Foo>, Bar, isize } + /// [isize] => { [isize], isize } + /// ``` + pub fn walk(self) -> TypeWalker> { + TypeWalker::new(self) + } } impl<'a, 'tcx> Lift> for GenericArg<'a> { @@ -322,6 +346,14 @@ impl<'tcx> TypeFoldable> for GenericArg<'tcx> { GenericArgKind::Const(ct) => ct.try_fold_with(folder).map(Into::into), } } + + fn fold_with>>(self, folder: &mut F) -> Self { + match self.unpack() { + GenericArgKind::Lifetime(lt) => lt.fold_with(folder).into(), + GenericArgKind::Type(ty) => ty.fold_with(folder).into(), + GenericArgKind::Const(ct) => ct.fold_with(folder).into(), + } + } } impl<'tcx> TypeVisitable> for GenericArg<'tcx> { @@ -396,14 +428,14 @@ impl<'tcx> GenericArgs<'tcx> { InlineConstArgs { args: self } } - /// Creates an `GenericArgs` that maps each generic parameter to itself. + /// Creates a [`GenericArgs`] that maps each generic parameter to itself. pub fn identity_for_item(tcx: TyCtxt<'tcx>, def_id: impl Into) -> GenericArgsRef<'tcx> { Self::for_item(tcx, def_id.into(), |param, _| tcx.mk_param_from_def(param)) } - /// Creates an `GenericArgs` for generic parameter definitions, + /// Creates a [`GenericArgs`] for generic parameter definitions, /// by calling closures to obtain each kind. - /// The closures get to observe the `GenericArgs` as they're + /// The closures get to observe the [`GenericArgs`] as they're /// being built, which can be used to correctly /// replace defaults of generic parameters. pub fn for_item(tcx: TyCtxt<'tcx>, def_id: DefId, mut mk_kind: F) -> GenericArgsRef<'tcx> @@ -591,6 +623,27 @@ impl<'tcx> TypeFoldable> for GenericArgsRef<'tcx> { } } 0 => Ok(self), + _ => ty::util::try_fold_list(self, folder, |tcx, v| tcx.mk_args(v)), + } + } + + fn fold_with>>(self, folder: &mut F) -> Self { + // See justification for this behavior in `try_fold_with`. + match self.len() { + 1 => { + let param0 = self[0].fold_with(folder); + if param0 == self[0] { self } else { folder.cx().mk_args(&[param0]) } + } + 2 => { + let param0 = self[0].fold_with(folder); + let param1 = self[1].fold_with(folder); + if param0 == self[0] && param1 == self[1] { + self + } else { + folder.cx().mk_args(&[param0, param1]) + } + } + 0 => self, _ => ty::util::fold_list(self, folder, |tcx, v| tcx.mk_args(v)), } } @@ -626,6 +679,22 @@ impl<'tcx> TypeFoldable> for &'tcx ty::List> { Ok(folder.cx().mk_type_list(&[param0, param1])) } } + _ => ty::util::try_fold_list(self, folder, |tcx, v| tcx.mk_type_list(v)), + } + } + + fn fold_with>>(self, folder: &mut F) -> Self { + // See comment justifying behavior in `try_fold_with`. + match self.len() { + 2 => { + let param0 = self[0].fold_with(folder); + let param1 = self[1].fold_with(folder); + if param0 == self[0] && param1 == self[1] { + self + } else { + folder.cx().mk_type_list(&[param0, param1]) + } + } _ => ty::util::fold_list(self, folder, |tcx, v| tcx.mk_type_list(v)), } } diff --git a/compiler/rustc_middle/src/ty/inhabitedness/mod.rs b/compiler/rustc_middle/src/ty/inhabitedness/mod.rs index 32988965a35bc..d8bab58545fcc 100644 --- a/compiler/rustc_middle/src/ty/inhabitedness/mod.rs +++ b/compiler/rustc_middle/src/ty/inhabitedness/mod.rs @@ -127,7 +127,7 @@ impl<'tcx> Ty<'tcx> { InhabitedPredicate::True } Never => InhabitedPredicate::False, - Param(_) | Alias(ty::Projection | ty::Weak, _) => InhabitedPredicate::GenericType(self), + Param(_) | Alias(ty::Projection | ty::Free, _) => InhabitedPredicate::GenericType(self), Alias(ty::Opaque, alias_ty) => { match alias_ty.def_id.as_local() { // Foreign opaque is considered inhabited. diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs index e3b3eccffb550..848a940c0e0ee 100644 --- a/compiler/rustc_middle/src/ty/instance.rs +++ b/compiler/rustc_middle/src/ty/instance.rs @@ -71,7 +71,7 @@ pub enum InstanceKind<'tcx> { /// - coroutines Item(DefId), - /// An intrinsic `fn` item (with `"rust-intrinsic"` ABI). + /// An intrinsic `fn` item (with`#[rustc_instrinsic]`). /// /// Alongside `Virtual`, this is the only `InstanceKind` that does not have its own callable MIR. /// Instead, codegen and const eval "magically" evaluate calls to intrinsics purely in the @@ -147,6 +147,9 @@ pub enum InstanceKind<'tcx> { /// native support. ThreadLocalShim(DefId), + /// Proxy shim for async drop of future (def_id, proxy_cor_ty, impl_cor_ty) + FutureDropPollShim(DefId, Ty<'tcx>, Ty<'tcx>), + /// `core::ptr::drop_in_place::`. /// /// The `DefId` is for `core::ptr::drop_in_place`. @@ -173,7 +176,18 @@ pub enum InstanceKind<'tcx> { /// /// The `DefId` is for `core::future::async_drop::async_drop_in_place`, the `Ty` /// is the type `T`. - AsyncDropGlueCtorShim(DefId, Option>), + AsyncDropGlueCtorShim(DefId, Ty<'tcx>), + + /// `core::future::async_drop::async_drop_in_place::<'_, T>::{closure}`. + /// + /// async_drop_in_place poll function implementation (for generated coroutine). + /// `Ty` here is `async_drop_in_place::{closure}` coroutine type, not just `T` + AsyncDropGlue(DefId, Ty<'tcx>), + + /// Generated by externally implementable items. This function adds indirection so we can choose + /// in the final crate whether to call an explicit implementation or, if none are given, call the + /// default. + EiiShim { def_id: DefId, extern_item: DefId, chosen_impl: DefId, weak_linkage: bool }, } impl<'tcx> Instance<'tcx> { @@ -221,7 +235,9 @@ impl<'tcx> Instance<'tcx> { .upstream_monomorphizations_for(def) .and_then(|monos| monos.get(&self.args).cloned()), InstanceKind::DropGlue(_, Some(_)) => tcx.upstream_drop_glue_for(self.args), - InstanceKind::AsyncDropGlueCtorShim(_, Some(_)) => { + InstanceKind::AsyncDropGlue(_, _) => None, + InstanceKind::FutureDropPollShim(_, _, _) => None, + InstanceKind::AsyncDropGlueCtorShim(_, _) => { tcx.upstream_async_drop_glue_for(self.args) } _ => None, @@ -248,7 +264,10 @@ impl<'tcx> InstanceKind<'tcx> { | InstanceKind::DropGlue(def_id, _) | InstanceKind::CloneShim(def_id, _) | InstanceKind::FnPtrAddrShim(def_id, _) - | InstanceKind::AsyncDropGlueCtorShim(def_id, _) => def_id, + | InstanceKind::FutureDropPollShim(def_id, _, _) + | InstanceKind::AsyncDropGlue(def_id, _) + | InstanceKind::AsyncDropGlueCtorShim(def_id, _) + | InstanceKind::EiiShim { def_id, .. } => def_id, } } @@ -257,7 +276,9 @@ impl<'tcx> InstanceKind<'tcx> { match self { ty::InstanceKind::Item(def) => Some(def), ty::InstanceKind::DropGlue(def_id, Some(_)) - | InstanceKind::AsyncDropGlueCtorShim(def_id, Some(_)) + | InstanceKind::AsyncDropGlueCtorShim(def_id, _) + | InstanceKind::AsyncDropGlue(def_id, _) + | InstanceKind::FutureDropPollShim(def_id, ..) | InstanceKind::ThreadLocalShim(def_id) => Some(def_id), InstanceKind::VTableShim(..) | InstanceKind::ReifyShim(..) @@ -267,9 +288,9 @@ impl<'tcx> InstanceKind<'tcx> { | InstanceKind::ClosureOnceShim { .. } | ty::InstanceKind::ConstructCoroutineInClosureShim { .. } | InstanceKind::DropGlue(..) - | InstanceKind::AsyncDropGlueCtorShim(..) | InstanceKind::CloneShim(..) - | InstanceKind::FnPtrAddrShim(..) => None, + | InstanceKind::FnPtrAddrShim(..) + | InstanceKind::EiiShim { .. } => None, } } @@ -292,7 +313,9 @@ impl<'tcx> InstanceKind<'tcx> { let def_id = match *self { ty::InstanceKind::Item(def) => def, ty::InstanceKind::DropGlue(_, Some(_)) => return false, - ty::InstanceKind::AsyncDropGlueCtorShim(_, Some(_)) => return false, + ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) => return ty.is_coroutine(), + ty::InstanceKind::FutureDropPollShim(_, _, _) => return false, + ty::InstanceKind::AsyncDropGlue(_, _) => return false, ty::InstanceKind::ThreadLocalShim(_) => return false, _ => return true, }; @@ -325,11 +348,13 @@ impl<'tcx> InstanceKind<'tcx> { | InstanceKind::FnPtrAddrShim(..) | InstanceKind::FnPtrShim(..) | InstanceKind::DropGlue(_, Some(_)) - | InstanceKind::AsyncDropGlueCtorShim(_, Some(_)) => false, + | InstanceKind::FutureDropPollShim(..) + | InstanceKind::AsyncDropGlue(_, _) + | InstanceKind::AsyncDropGlueCtorShim(_, _) + | InstanceKind::EiiShim { .. } => false, InstanceKind::ClosureOnceShim { .. } | InstanceKind::ConstructCoroutineInClosureShim { .. } | InstanceKind::DropGlue(..) - | InstanceKind::AsyncDropGlueCtorShim(..) | InstanceKind::Item(_) | InstanceKind::Intrinsic(..) | InstanceKind::ReifyShim(..) @@ -406,8 +431,17 @@ pub fn fmt_instance( InstanceKind::DropGlue(_, Some(ty)) => write!(f, " - shim(Some({ty}))"), InstanceKind::CloneShim(_, ty) => write!(f, " - shim({ty})"), InstanceKind::FnPtrAddrShim(_, ty) => write!(f, " - shim({ty})"), - InstanceKind::AsyncDropGlueCtorShim(_, None) => write!(f, " - shim(None)"), - InstanceKind::AsyncDropGlueCtorShim(_, Some(ty)) => write!(f, " - shim(Some({ty}))"), + InstanceKind::FutureDropPollShim(_, proxy_ty, impl_ty) => { + write!(f, " - dropshim({proxy_ty}-{impl_ty})") + } + InstanceKind::AsyncDropGlue(_, ty) => write!(f, " - shim({ty})"), + InstanceKind::AsyncDropGlueCtorShim(_, ty) => write!(f, " - shim(Some({ty}))"), + InstanceKind::EiiShim { def_id: _, extern_item, chosen_impl, weak_linkage: true } => { + write!(f, " - shim(eii: {extern_item:?} -> {chosen_impl:?} [weak]") + } + InstanceKind::EiiShim { def_id: _, extern_item, chosen_impl, weak_linkage: false } => { + write!(f, " - shim(eii: {extern_item:?} -> {chosen_impl:?})") + } } } @@ -425,8 +459,61 @@ impl<'tcx> fmt::Display for Instance<'tcx> { } } +// async_drop_in_place::coroutine.poll, when T is a standart coroutine, +// should be resolved to this coroutine's future_drop_poll (through FutureDropPollShim proxy). +// async_drop_in_place::coroutine>::coroutine.poll, +// when T is a standart coroutine, should be resolved to this coroutine's future_drop_poll. +// async_drop_in_place::coroutine>::coroutine.poll, +// when T is not a coroutine, should be resolved to the innermost +// async_drop_in_place::coroutine's poll function (through FutureDropPollShim proxy) +fn resolve_async_drop_poll<'tcx>(mut cor_ty: Ty<'tcx>) -> Instance<'tcx> { + let first_cor = cor_ty; + let ty::Coroutine(poll_def_id, proxy_args) = first_cor.kind() else { + bug!(); + }; + let poll_def_id = *poll_def_id; + let mut child_ty = cor_ty; + loop { + if let ty::Coroutine(child_def, child_args) = child_ty.kind() { + cor_ty = child_ty; + if *child_def == poll_def_id { + child_ty = child_args.first().unwrap().expect_ty(); + continue; + } else { + return Instance { + def: ty::InstanceKind::FutureDropPollShim(poll_def_id, first_cor, cor_ty), + args: proxy_args, + }; + } + } else { + let ty::Coroutine(_, child_args) = cor_ty.kind() else { + bug!(); + }; + if first_cor != cor_ty { + return Instance { + def: ty::InstanceKind::FutureDropPollShim(poll_def_id, first_cor, cor_ty), + args: proxy_args, + }; + } else { + return Instance { + def: ty::InstanceKind::AsyncDropGlue(poll_def_id, cor_ty), + args: child_args, + }; + } + } + } +} + impl<'tcx> Instance<'tcx> { - pub fn new(def_id: DefId, args: GenericArgsRef<'tcx>) -> Instance<'tcx> { + /// Creates a new [`InstanceKind::Item`] from the `def_id` and `args`. + /// + /// Note that this item corresponds to the body of `def_id` directly, which + /// likely does not make sense for trait items which need to be resolved to an + /// implementation, and which may not even have a body themselves. Usages of + /// this function should probably use [`Instance::expect_resolve`], or if run + /// in a polymorphic environment or within a lint (that may encounter ambiguity) + /// [`Instance::try_resolve`] instead. + pub fn new_raw(def_id: DefId, args: GenericArgsRef<'tcx>) -> Instance<'tcx> { assert!( !args.has_escaping_bound_vars(), "args of instance {def_id:?} has escaping bound vars: {args:?}" @@ -445,7 +532,7 @@ impl<'tcx> Instance<'tcx> { } }); - Instance::new(def_id, args) + Instance::new_raw(def_id, args) } #[inline] @@ -538,7 +625,7 @@ impl<'tcx> Instance<'tcx> { let type_length = type_length(args); if !tcx.type_length_limit().value_within_limit(type_length) { let (shrunk, written_to_path) = - shrunk_instance_name(tcx, Instance::new(def_id, args)); + shrunk_instance_name(tcx, Instance::new_raw(def_id, args)); let mut path = PathBuf::new(); let was_written = if let Some(path2) = written_to_path { path = path2; @@ -708,7 +795,7 @@ impl<'tcx> Instance<'tcx> { match needs_fn_once_adapter_shim(actual_kind, requested_kind) { Ok(true) => Instance::fn_once_adapter_instance(tcx, def_id, args), - _ => Instance::new(def_id, args), + _ => Instance::new_raw(def_id, args), } } @@ -720,7 +807,7 @@ impl<'tcx> Instance<'tcx> { ty::TypingEnv::fully_monomorphized(), def_id, args, - ty.ty_adt_def().and_then(|adt| tcx.hir().span_if_local(adt.did())).unwrap_or(DUMMY_SP), + ty.ty_adt_def().and_then(|adt| tcx.hir_span_if_local(adt.did())).unwrap_or(DUMMY_SP), ) } @@ -732,10 +819,19 @@ impl<'tcx> Instance<'tcx> { ty::TypingEnv::fully_monomorphized(), def_id, args, - ty.ty_adt_def().and_then(|adt| tcx.hir().span_if_local(adt.did())).unwrap_or(DUMMY_SP), + ty.ty_adt_def().and_then(|adt| tcx.hir_span_if_local(adt.did())).unwrap_or(DUMMY_SP), ) } + pub fn resolve_async_drop_in_place_poll( + tcx: TyCtxt<'tcx>, + def_id: DefId, + ty: Ty<'tcx>, + ) -> ty::Instance<'tcx> { + let args = tcx.mk_args(&[ty.into()]); + Instance::expect_resolve(tcx, ty::TypingEnv::fully_monomorphized(), def_id, args, DUMMY_SP) + } + #[instrument(level = "debug", skip(tcx), ret)] pub fn fn_once_adapter_instance( tcx: TyCtxt<'tcx>, @@ -746,7 +842,7 @@ impl<'tcx> Instance<'tcx> { let call_once = tcx .associated_items(fn_once) .in_definition_order() - .find(|it| it.kind == ty::AssocKind::Fn) + .find(|it| it.is_fn()) .unwrap() .def_id; let track_caller = @@ -800,6 +896,9 @@ impl<'tcx> Instance<'tcx> { }; if tcx.is_lang_item(trait_item_id, coroutine_callable_item) { + if tcx.is_async_drop_in_place_coroutine(coroutine_def_id) { + return Some(resolve_async_drop_poll(rcvr_args.type_at(0))); + } let ty::Coroutine(_, id_args) = *tcx.type_of(coroutine_def_id).skip_binder().kind() else { bug!() @@ -822,7 +921,7 @@ impl<'tcx> Instance<'tcx> { // This is important for `Iterator`'s combinators, but also useful for // adding future default methods to `Future`, for instance. debug_assert!(tcx.defaultness(trait_item_id).has_value()); - Some(Instance::new(trait_item_id, rcvr_args)) + Some(Instance::new_raw(trait_item_id, rcvr_args)) } } diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index ebb6a8c08a54c..7ebfebea44e56 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -1265,9 +1265,7 @@ pub fn fn_can_unwind(tcx: TyCtxt<'_>, fn_def_id: Option, abi: ExternAbi) | CCmseNonSecureCall | CCmseNonSecureEntry | Unadjusted => false, - Rust | RustCall | RustCold | RustIntrinsic => { - tcx.sess.panic_strategy() == PanicStrategy::Unwind - } + Rust | RustCall | RustCold => tcx.sess.panic_strategy() == PanicStrategy::Unwind, } } diff --git a/compiler/rustc_middle/src/ty/list.rs b/compiler/rustc_middle/src/ty/list.rs index 0fd370a56195a..0cf5820959ee5 100644 --- a/compiler/rustc_middle/src/ty/list.rs +++ b/compiler/rustc_middle/src/ty/list.rs @@ -7,9 +7,9 @@ use std::{fmt, iter, mem, ptr, slice}; use rustc_data_structures::aligned::{Aligned, align_of}; use rustc_data_structures::sync::DynSync; use rustc_serialize::{Encodable, Encoder}; +use rustc_type_ir::FlagComputation; -use super::flags::FlagComputation; -use super::{DebruijnIndex, TypeFlags}; +use super::{DebruijnIndex, TyCtxt, TypeFlags}; use crate::arena::Arena; /// `List` is a bit like `&[T]`, but with some critical differences. @@ -299,8 +299,8 @@ impl TypeInfo { } } -impl From for TypeInfo { - fn from(computation: FlagComputation) -> TypeInfo { +impl<'tcx> From>> for TypeInfo { + fn from(computation: FlagComputation>) -> TypeInfo { TypeInfo { flags: computation.flags, outer_exclusive_binder: computation.outer_exclusive_binder, diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index ac98cbc8d6cba..bbc8892c28236 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -17,7 +17,7 @@ use std::hash::{Hash, Hasher}; use std::marker::PhantomData; use std::num::NonZero; use std::ptr::NonNull; -use std::{fmt, str}; +use std::{fmt, iter, str}; pub use adt::*; pub use assoc::*; @@ -38,7 +38,9 @@ use rustc_errors::{Diag, ErrorGuaranteed}; use rustc_hir::LangItem; use rustc_hir::def::{CtorKind, CtorOf, DefKind, DocLinkResMap, LifetimeRes, Res}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LocalDefIdMap}; +use rustc_hir::definitions::DisambiguatorState; use rustc_index::IndexVec; +use rustc_index::bit_set::BitMatrix; use rustc_macros::{ Decodable, Encodable, HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable, extension, @@ -48,9 +50,18 @@ use rustc_serialize::{Decodable, Encodable}; use rustc_session::lint::LintBuffer; pub use rustc_session::lint::RegisteredTools; use rustc_span::hygiene::MacroKind; -use rustc_span::{ExpnId, ExpnKind, Ident, Span, Symbol, kw, sym}; +use rustc_span::{DUMMY_SP, ExpnId, ExpnKind, Ident, Span, Symbol, kw, sym}; +pub use rustc_type_ir::data_structures::{DelayedMap, DelayedSet}; +#[allow( + hidden_glob_reexports, + rustc::usage_of_type_ir_inherent, + rustc::non_glob_import_of_type_ir_inherent +)] +use rustc_type_ir::inherent; pub use rustc_type_ir::relate::VarianceDiagInfo; pub use rustc_type_ir::*; +#[allow(hidden_glob_reexports, unused_imports)] +use rustc_type_ir::{InferCtxtLike, Interner}; use tracing::{debug, instrument}; pub use vtable::*; use {rustc_ast as ast, rustc_attr_data_structures as attr, rustc_hir as hir}; @@ -103,13 +114,14 @@ pub use self::visit::*; use crate::error::{OpaqueHiddenTypeMismatch, TypeMismatchReason}; use crate::metadata::ModChild; use crate::middle::privacy::EffectiveVisibilities; -use crate::mir::{Body, CoroutineLayout}; +use crate::mir::{Body, CoroutineLayout, CoroutineSavedLocal, SourceInfo}; use crate::query::{IntoQueryParam, Providers}; use crate::ty; use crate::ty::codec::{TyDecoder, TyEncoder}; pub use crate::ty::diagnostics::*; use crate::ty::fast_reject::SimplifiedType; use crate::ty::util::Discr; +use crate::ty::walk::TypeWalker; pub mod abstract_const; pub mod adjustment; @@ -117,7 +129,6 @@ pub mod cast; pub mod codec; pub mod error; pub mod fast_reject; -pub mod flags; pub mod inhabitedness; pub mod layout; pub mod normalize_erasing_regions; @@ -128,7 +139,6 @@ pub mod significant_drop_order; pub mod trait_def; pub mod util; pub mod vtable; -pub mod walk; mod adt; mod assoc; @@ -172,7 +182,7 @@ pub struct ResolverGlobalCtxt { pub extern_crate_map: UnordMap, pub maybe_unused_trait_imports: FxIndexSet, pub module_children: LocalDefIdMap>, - pub glob_map: FxHashMap>, + pub glob_map: FxIndexMap>, pub main_def: Option, pub trait_impls: FxIndexMap>, /// A list of proc macro LocalDefIds, written out in the order in which @@ -208,6 +218,8 @@ pub struct ResolverAstLowering { pub node_id_to_def_id: NodeMap, + pub disambiguator: DisambiguatorState, + pub trait_map: NodeMap>, /// List functions and methods for which lifetime elision was successful. pub lifetime_elision_allowed: FxHashSet, @@ -297,7 +309,10 @@ impl Visibility { } else if restricted_id == tcx.parent_module_from_def_id(def_id).to_local_def_id() { "pub(self)".to_string() } else { - format!("pub({})", tcx.item_name(restricted_id.to_def_id())) + format!( + "pub(in crate{})", + tcx.def_path(restricted_id.to_def_id()).to_string_no_crate_verbose() + ) } } ty::Visibility::Public => "pub".to_string(), @@ -539,6 +554,13 @@ impl<'tcx> TypeFoldable> for Term<'tcx> { ty::TermKind::Const(ct) => ct.try_fold_with(folder).map(Into::into), } } + + fn fold_with>>(self, folder: &mut F) -> Self { + match self.unpack() { + ty::TermKind::Ty(ty) => ty.fold_with(folder).into(), + ty::TermKind::Const(ct) => ct.fold_with(folder).into(), + } + } } impl<'tcx> TypeVisitable> for Term<'tcx> { @@ -626,6 +648,20 @@ impl<'tcx> Term<'tcx> { TermKind::Const(ct) => ct.is_ct_infer(), } } + + /// Iterator that walks `self` and any types reachable from + /// `self`, in depth-first order. Note that just walks the types + /// that appear in `self`, it does not descend into the fields of + /// structs or variants. For example: + /// + /// ```text + /// isize => { isize } + /// Foo> => { Foo>, Bar, isize } + /// [isize] => { [isize], isize } + /// ``` + pub fn walk(self) -> TypeWalker> { + TypeWalker::new(self.into()) + } } const TAG_MASK: usize = 0b11; @@ -782,7 +818,22 @@ pub struct OpaqueHiddenType<'tcx> { pub ty: Ty<'tcx>, } +/// Whether we're currently in HIR typeck or MIR borrowck. +#[derive(Debug, Clone, Copy)] +pub enum DefiningScopeKind { + /// During writeback in typeck, we don't care about regions and simply + /// erase them. This means we also don't check whether regions are + /// universal in the opaque type key. This will only be checked in + /// MIR borrowck. + HirTypeck, + MirBorrowck, +} + impl<'tcx> OpaqueHiddenType<'tcx> { + pub fn new_error(tcx: TyCtxt<'tcx>, guar: ErrorGuaranteed) -> OpaqueHiddenType<'tcx> { + OpaqueHiddenType { span: DUMMY_SP, ty: Ty::new_error(tcx, guar) } + } + pub fn build_mismatch_error( &self, other: &Self, @@ -808,8 +859,7 @@ impl<'tcx> OpaqueHiddenType<'tcx> { self, opaque_type_key: OpaqueTypeKey<'tcx>, tcx: TyCtxt<'tcx>, - // typeck errors have subpar spans for opaque types, so delay error reporting until borrowck. - ignore_errors: bool, + defining_scope_kind: DefiningScopeKind, ) -> Self { let OpaqueTypeKey { def_id, args } = opaque_type_key; @@ -828,10 +878,19 @@ impl<'tcx> OpaqueHiddenType<'tcx> { let map = args.iter().zip(id_args).collect(); debug!("map = {:#?}", map); - // Convert the type from the function into a type valid outside - // the function, by replacing invalid regions with 'static, - // after producing an error for each of them. - self.fold_with(&mut opaque_types::ReverseMapper::new(tcx, map, self.span, ignore_errors)) + // Convert the type from the function into a type valid outside by mapping generic + // parameters to into the context of the opaque. + // + // We erase regions when doing this during HIR typeck. + let this = match defining_scope_kind { + DefiningScopeKind::HirTypeck => tcx.erase_regions(self), + DefiningScopeKind::MirBorrowck => self, + }; + let result = this.fold_with(&mut opaque_types::ReverseMapper::new(tcx, map, self.span)); + if cfg!(debug_assertions) && matches!(defining_scope_kind, DefiningScopeKind::HirTypeck) { + assert_eq!(result.ty, tcx.erase_regions(result.ty)); + } + result } } @@ -953,7 +1012,7 @@ impl<'tcx> rustc_type_ir::Flags for Clauses<'tcx> { /// environment. `ParamEnv` is the type that represents this information. See the /// [dev guide chapter][param_env_guide] for more information. /// -/// [param_env_guide]: https://rustc-dev-guide.rust-lang.org/param_env/param_env_summary.html +/// [param_env_guide]: https://rustc-dev-guide.rust-lang.org/typing_parameter_envs.html #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] #[derive(HashStable, TypeVisitable, TypeFoldable)] pub struct ParamEnv<'tcx> { @@ -977,7 +1036,7 @@ impl<'tcx> ParamEnv<'tcx> { /// to use an empty environment. See the [dev guide section][param_env_guide] /// for information on what a `ParamEnv` is and how to acquire one. /// - /// [param_env_guide]: https://rustc-dev-guide.rust-lang.org/param_env/param_env_summary.html + /// [param_env_guide]: https://rustc-dev-guide.rust-lang.org/typing_parameter_envs.html #[inline] pub fn empty() -> Self { Self::new(ListWithCachedTypeInfo::empty()) @@ -1119,17 +1178,13 @@ pub struct PseudoCanonicalInput<'tcx, T> { pub struct Destructor { /// The `DefId` of the destructor method pub did: DefId, - /// The constness of the destructor method - pub constness: hir::Constness, } // FIXME: consider combining this definition with regular `Destructor` #[derive(Copy, Clone, Debug, HashStable, Encodable, Decodable)] pub struct AsyncDestructor { - /// The `DefId` of the async destructor future constructor - pub ctor: DefId, - /// The `DefId` of the async destructor future type - pub future: DefId, + /// The `DefId` of the `impl AsyncDrop` + pub impl_did: LocalDefId, } #[derive(Clone, Copy, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)] @@ -1445,7 +1500,7 @@ impl<'tcx> TyCtxt<'tcx> { pub fn provided_trait_methods(self, id: DefId) -> impl 'tcx + Iterator { self.associated_items(id) .in_definition_order() - .filter(move |item| item.kind == AssocKind::Fn && item.defaultness(self).has_value()) + .filter(move |item| item.is_fn() && item.defaultness(self).has_value()) } pub fn repr_options_of_def(self, did: LocalDefId) -> ReprOptions { @@ -1591,8 +1646,11 @@ impl<'tcx> TyCtxt<'tcx> { /// return-position `impl Trait` from a trait, then provide the source info /// about where that RPITIT came from. pub fn opt_rpitit_info(self, def_id: DefId) -> Option { - if let DefKind::AssocTy = self.def_kind(def_id) { - self.associated_item(def_id).opt_rpitit_info + if let DefKind::AssocTy = self.def_kind(def_id) + && let AssocKind::Type { data: AssocTypeData::Rpitit(rpitit_info) } = + self.associated_item(def_id).kind + { + Some(rpitit_info) } else { None } @@ -1673,23 +1731,25 @@ impl<'tcx> TyCtxt<'tcx> { /// Returns the possibly-auto-generated MIR of a [`ty::InstanceKind`]. #[instrument(skip(self), level = "debug")] pub fn instance_mir(self, instance: ty::InstanceKind<'tcx>) -> &'tcx Body<'tcx> { - match instance { - ty::InstanceKind::Item(def) => { - debug!("calling def_kind on def: {:?}", def); - let def_kind = self.def_kind(def); - debug!("returned from def_kind: {:?}", def_kind); - match def_kind { - DefKind::Const - | DefKind::Static { .. } - | DefKind::AssocConst - | DefKind::Ctor(..) - | DefKind::AnonConst - | DefKind::InlineConst => self.mir_for_ctfe(def), - // If the caller wants `mir_for_ctfe` of a function they should not be using - // `instance_mir`, so we'll assume const fn also wants the optimized version. - _ => self.optimized_mir(def), - } + let item_mir = |def| { + debug!("calling def_kind on def: {:?}", def); + let def_kind = self.def_kind(def); + debug!("returned from def_kind: {:?}", def_kind); + match def_kind { + DefKind::Const + | DefKind::Static { .. } + | DefKind::AssocConst + | DefKind::Ctor(..) + | DefKind::AnonConst + | DefKind::InlineConst => self.mir_for_ctfe(def), + // If the caller wants `mir_for_ctfe` of a function they should not be using + // `instance_mir`, so we'll assume const fn also wants the optimized version. + _ => self.optimized_mir(def), } + }; + + match instance { + ty::InstanceKind::Item(def) => item_mir(def), ty::InstanceKind::VTableShim(..) | ty::InstanceKind::ReifyShim(..) | ty::InstanceKind::Intrinsic(..) @@ -1697,11 +1757,14 @@ impl<'tcx> TyCtxt<'tcx> { | ty::InstanceKind::Virtual(..) | ty::InstanceKind::ClosureOnceShim { .. } | ty::InstanceKind::ConstructCoroutineInClosureShim { .. } + | ty::InstanceKind::FutureDropPollShim(..) | ty::InstanceKind::DropGlue(..) | ty::InstanceKind::CloneShim(..) | ty::InstanceKind::ThreadLocalShim(..) | ty::InstanceKind::FnPtrAddrShim(..) - | ty::InstanceKind::AsyncDropGlueCtorShim(..) => self.mir_shims(instance), + | ty::InstanceKind::AsyncDropGlueCtorShim(..) + | ty::InstanceKind::AsyncDropGlue(..) + | ty::InstanceKind::EiiShim { .. } => self.mir_shims(instance), } } @@ -1817,12 +1880,12 @@ impl<'tcx> TyCtxt<'tcx> { self.def_kind(trait_def_id) == DefKind::TraitAlias } - /// Returns layout of a coroutine. Layout might be unavailable if the + /// Returns layout of a non-async-drop coroutine. Layout might be unavailable if the /// coroutine is tainted by errors. /// /// Takes `coroutine_kind` which can be acquired from the `CoroutineArgs::kind_ty`, /// e.g. `args.as_coroutine().kind_ty()`. - pub fn coroutine_layout( + fn ordinary_coroutine_layout( self, def_id: DefId, coroutine_kind_ty: Ty<'tcx>, @@ -1856,6 +1919,55 @@ impl<'tcx> TyCtxt<'tcx> { } } + /// Returns layout of a `async_drop_in_place::{closure}` coroutine + /// (returned from `async fn async_drop_in_place(..)`). + /// Layout might be unavailable if the coroutine is tainted by errors. + fn async_drop_coroutine_layout( + self, + def_id: DefId, + args: GenericArgsRef<'tcx>, + ) -> Option<&'tcx CoroutineLayout<'tcx>> { + let instance = InstanceKind::AsyncDropGlue(def_id, Ty::new_coroutine(self, def_id, args)); + self.mir_shims(instance).coroutine_layout_raw() + } + + /// Returns layout of a coroutine. Layout might be unavailable if the + /// coroutine is tainted by errors. + pub fn coroutine_layout( + self, + def_id: DefId, + args: GenericArgsRef<'tcx>, + ) -> Option<&'tcx CoroutineLayout<'tcx>> { + if self.is_async_drop_in_place_coroutine(def_id) { + // layout of `async_drop_in_place::{closure}` in case, + // when T is a coroutine, contains this internal coroutine's ptr in upvars + // and doesn't require any locals. Here is an `empty coroutine's layout` + let arg_cor_ty = args.first().unwrap().expect_ty(); + if arg_cor_ty.is_coroutine() { + let span = self.def_span(def_id); + let source_info = SourceInfo::outermost(span); + // Even minimal, empty coroutine has 3 states (RESERVED_VARIANTS), + // so variant_fields and variant_source_info should have 3 elements. + let variant_fields: IndexVec> = + iter::repeat(IndexVec::new()).take(CoroutineArgs::RESERVED_VARIANTS).collect(); + let variant_source_info: IndexVec = + iter::repeat(source_info).take(CoroutineArgs::RESERVED_VARIANTS).collect(); + let proxy_layout = CoroutineLayout { + field_tys: [].into(), + field_names: [].into(), + variant_fields, + variant_source_info, + storage_conflicts: BitMatrix::new(0, 0), + }; + return Some(self.arena.alloc(proxy_layout)); + } else { + self.async_drop_coroutine_layout(def_id, args) + } + } else { + self.ordinary_coroutine_layout(def_id, args.as_coroutine().kind_ty()) + } + } + /// Given the `DefId` of an impl, returns the `DefId` of the trait it implements. /// If it implements no trait, returns `None`. pub fn trait_id_of_impl(self, def_id: DefId) -> Option { @@ -1887,6 +1999,10 @@ impl<'tcx> TyCtxt<'tcx> { None } + pub fn is_exportable(self, def_id: DefId) -> bool { + self.exportable_items(def_id.krate).contains(&def_id) + } + /// Check if the given `DefId` is `#\[automatically_derived\]`, *and* /// whether it was produced by expanding a builtin derive macro. pub fn is_builtin_derived(self, def_id: DefId) -> bool { @@ -1920,15 +2036,15 @@ impl<'tcx> TyCtxt<'tcx> { /// Hygienically compares a use-site name (`use_name`) for a field or an associated item with /// its supposed definition name (`def_name`). The method also needs `DefId` of the supposed /// definition's parent/scope to perform comparison. - pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool { - // We could use `Ident::eq` here, but we deliberately don't. The name + pub fn hygienic_eq(self, use_ident: Ident, def_ident: Ident, def_parent_def_id: DefId) -> bool { + // We could use `Ident::eq` here, but we deliberately don't. The identifier // comparison fails frequently, and we want to avoid the expensive // `normalize_to_macros_2_0()` calls required for the span comparison whenever possible. - use_name.name == def_name.name - && use_name + use_ident.name == def_ident.name + && use_ident .span .ctxt() - .hygienic_eq(def_name.span.ctxt(), self.expn_that_defined(def_parent_def_id)) + .hygienic_eq(def_ident.span.ctxt(), self.expn_that_defined(def_parent_def_id)) } pub fn adjust_ident(self, mut ident: Ident, scope: DefId) -> Ident { diff --git a/compiler/rustc_middle/src/ty/opaque_types.rs b/compiler/rustc_middle/src/ty/opaque_types.rs index 56c44c8a84c04..9445a18ad76b1 100644 --- a/compiler/rustc_middle/src/ty/opaque_types.rs +++ b/compiler/rustc_middle/src/ty/opaque_types.rs @@ -20,12 +20,6 @@ pub(super) struct ReverseMapper<'tcx> { /// for an explanation of this field. do_not_error: bool, - /// We do not want to emit any errors in typeck because - /// the spans in typeck are subpar at the moment. - /// Borrowck will do the same work again (this time with - /// lifetime information) and thus report better errors. - ignore_errors: bool, - /// Span of function being checked. span: Span, } @@ -35,9 +29,8 @@ impl<'tcx> ReverseMapper<'tcx> { tcx: TyCtxt<'tcx>, map: FxHashMap, GenericArg<'tcx>>, span: Span, - ignore_errors: bool, ) -> Self { - Self { tcx, map, do_not_error: false, ignore_errors, span } + Self { tcx, map, do_not_error: false, span } } fn fold_kind_no_missing_regions_error(&mut self, kind: GenericArg<'tcx>) -> GenericArg<'tcx> { @@ -102,7 +95,7 @@ impl<'tcx> TypeFolder> for ReverseMapper<'tcx> { #[instrument(skip(self), level = "debug")] fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { - match *r { + match r.kind() { // Ignore bound regions and `'static` regions that appear in the // type, we only need to remap regions that reference lifetimes // from the function declaration. @@ -176,20 +169,18 @@ impl<'tcx> TypeFolder> for ReverseMapper<'tcx> { Some(u) => panic!("type mapped to unexpected kind: {u:?}"), None => { debug!(?param, ?self.map); - if !self.ignore_errors { - self.tcx - .dcx() - .struct_span_err( - self.span, - format!( - "type parameter `{ty}` is part of concrete type but not \ + let guar = self + .tcx + .dcx() + .struct_span_err( + self.span, + format!( + "type parameter `{ty}` is part of concrete type but not \ used in parameter list for the `impl Trait` type alias" - ), - ) - .emit(); - } - - Ty::new_misc_error(self.tcx) + ), + ) + .emit(); + Ty::new_error(self.tcx, guar) } } } @@ -217,8 +208,7 @@ impl<'tcx> TypeFolder> for ReverseMapper<'tcx> { ct: ct.to_string(), span: self.span, }) - .emit_unless(self.ignore_errors); - + .emit(); ty::Const::new_error(self.tcx, guar) } } diff --git a/compiler/rustc_middle/src/ty/parameterized.rs b/compiler/rustc_middle/src/ty/parameterized.rs index 19e2b57456327..d55d057c6d32f 100644 --- a/compiler/rustc_middle/src/ty/parameterized.rs +++ b/compiler/rustc_middle/src/ty/parameterized.rs @@ -27,6 +27,10 @@ impl ParameterizedOverTcx for IndexVe type Value<'tcx> = IndexVec>; } +impl ParameterizedOverTcx for Vec { + type Value<'tcx> = Vec>; +} + impl ParameterizedOverTcx for UnordMap { type Value<'tcx> = UnordMap>; } @@ -65,9 +69,11 @@ trivially_parameterized_over_tcx! { crate::middle::lib_features::FeatureStability, crate::middle::resolve_bound_vars::ObjectLifetimeDefault, crate::mir::ConstQualifs, + ty::AsyncDestructor, ty::AssocItemContainer, ty::Asyncness, ty::DeducedParamAttrs, + ty::Destructor, ty::Generics, ty::ImplPolarity, ty::ImplTraitInTraitData, @@ -85,6 +91,8 @@ trivially_parameterized_over_tcx! { rustc_attr_data_structures::DefaultBodyStability, rustc_attr_data_structures::Deprecation, rustc_attr_data_structures::Stability, + rustc_attr_data_structures::EIIDecl, + rustc_attr_data_structures::EIIImpl, rustc_hir::Constness, rustc_hir::Defaultness, rustc_hir::Safety, @@ -111,7 +119,7 @@ trivially_parameterized_over_tcx! { rustc_span::Span, rustc_span::Symbol, rustc_span::def_id::DefPathHash, - rustc_span::hygiene::SyntaxContextData, + rustc_span::hygiene::SyntaxContextKey, rustc_span::Ident, rustc_type_ir::Variance, rustc_hir::Attribute, diff --git a/compiler/rustc_middle/src/ty/pattern.rs b/compiler/rustc_middle/src/ty/pattern.rs index 4cad1ab209916..5af9b17dd7777 100644 --- a/compiler/rustc_middle/src/ty/pattern.rs +++ b/compiler/rustc_middle/src/ty/pattern.rs @@ -1,14 +1,54 @@ use std::fmt; use rustc_data_structures::intern::Interned; -use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; +use rustc_macros::HashStable; +use rustc_type_ir::ir_print::IrPrint; +use rustc_type_ir::{ + FlagComputation, Flags, {self as ir}, +}; +use super::TyCtxt; use crate::ty; +pub type PatternKind<'tcx> = ir::PatternKind>; + #[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)] #[rustc_pass_by_value] pub struct Pattern<'tcx>(pub Interned<'tcx, PatternKind<'tcx>>); +impl<'tcx> Flags for Pattern<'tcx> { + fn flags(&self) -> rustc_type_ir::TypeFlags { + match &**self { + ty::PatternKind::Range { start, end } => { + FlagComputation::for_const_kind(&start.kind()).flags + | FlagComputation::for_const_kind(&end.kind()).flags + } + ty::PatternKind::Or(pats) => { + let mut flags = pats[0].flags(); + for pat in pats[1..].iter() { + flags |= pat.flags(); + } + flags + } + } + } + + fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { + match &**self { + ty::PatternKind::Range { start, end } => { + start.outer_exclusive_binder().max(end.outer_exclusive_binder()) + } + ty::PatternKind::Or(pats) => { + let mut idx = pats[0].outer_exclusive_binder(); + for pat in pats[1..].iter() { + idx = idx.max(pat.outer_exclusive_binder()); + } + idx + } + } + } +} + impl<'tcx> std::ops::Deref for Pattern<'tcx> { type Target = PatternKind<'tcx>; @@ -23,9 +63,9 @@ impl<'tcx> fmt::Debug for Pattern<'tcx> { } } -impl<'tcx> fmt::Debug for PatternKind<'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { +impl<'tcx> IrPrint> for TyCtxt<'tcx> { + fn print(t: &PatternKind<'tcx>, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *t { PatternKind::Range { start, end } => { write!(f, "{start}")?; @@ -51,12 +91,30 @@ impl<'tcx> fmt::Debug for PatternKind<'tcx> { write!(f, "..={end}") } + PatternKind::Or(patterns) => { + write!(f, "(")?; + let mut first = true; + for pat in patterns { + if first { + first = false + } else { + write!(f, " | ")?; + } + write!(f, "{pat:?}")?; + } + write!(f, ")") + } } } + + fn print_debug(t: &PatternKind<'tcx>, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + Self::print(t, fmt) + } } -#[derive(Clone, PartialEq, Eq, Hash)] -#[derive(HashStable, TyEncodable, TyDecodable, TypeVisitable, TypeFoldable)] -pub enum PatternKind<'tcx> { - Range { start: ty::Const<'tcx>, end: ty::Const<'tcx> }, +impl<'tcx> rustc_type_ir::inherent::IntoKind for Pattern<'tcx> { + type Kind = PatternKind<'tcx>; + fn kind(self) -> Self::Kind { + *self + } } diff --git a/compiler/rustc_middle/src/ty/predicate.rs b/compiler/rustc_middle/src/ty/predicate.rs index 02e316dfc3db7..551d816941b6e 100644 --- a/compiler/rustc_middle/src/ty/predicate.rs +++ b/compiler/rustc_middle/src/ty/predicate.rs @@ -121,11 +121,10 @@ impl<'tcx> Predicate<'tcx> { /// unsoundly accept some programs. See #91068. #[inline] pub fn allow_normalization(self) -> bool { - // Keep this in sync with the one in `rustc_type_ir::inherent`! match self.kind().skip_binder() { - PredicateKind::Clause(ClauseKind::WellFormed(_)) - | PredicateKind::AliasRelate(..) - | PredicateKind::NormalizesTo(..) => false, + PredicateKind::Clause(ClauseKind::WellFormed(_)) | PredicateKind::AliasRelate(..) => { + false + } PredicateKind::Clause(ClauseKind::Trait(_)) | PredicateKind::Clause(ClauseKind::HostEffect(..)) | PredicateKind::Clause(ClauseKind::RegionOutlives(_)) @@ -137,6 +136,7 @@ impl<'tcx> Predicate<'tcx> { | PredicateKind::Coerce(_) | PredicateKind::Clause(ClauseKind::ConstEvaluatable(_)) | PredicateKind::ConstEquate(_, _) + | PredicateKind::NormalizesTo(..) | PredicateKind::Ambiguous => true, } } diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index dc2040aa5cf85..9172c5d3ab752 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -139,8 +139,7 @@ pub trait Printer<'tcx>: Sized { match key.disambiguated_data.data { DefPathData::Closure => { - // FIXME(async_closures): This is somewhat ugly. - // We need to additionally print the `kind` field of a closure if + // We need to additionally print the `kind` field of a coroutine if // it is desugared from a coroutine-closure. if let Some(hir::CoroutineKind::Desugared( _, @@ -156,6 +155,10 @@ pub trait Printer<'tcx>: Sized { // Closures' own generics are only captures, don't print them. } } + DefPathData::SyntheticCoroutineBody => { + // Synthetic coroutine bodies have no distinct generics, since like + // closures they're all just internal state of the coroutine. + } // This covers both `DefKind::AnonConst` and `DefKind::InlineConst`. // Anon consts doesn't have their own generics, and inline consts' own // generics are their inferred types, so don't print them. @@ -379,7 +382,7 @@ pub fn shrunk_instance_name<'tcx>( return (s, None); } - let path = tcx.output_filenames(()).temp_path_ext("long-type.txt", None); + let path = tcx.output_filenames(()).temp_path_for_diagnostic("long-type.txt"); let written_to_path = std::fs::write(&path, s).ok().map(|_| path); (shrunk, written_to_path) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 3281cb4135a0a..0250c777faf79 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -6,7 +6,7 @@ use std::ops::{Deref, DerefMut}; use rustc_abi::{ExternAbi, Size}; use rustc_apfloat::Float; use rustc_apfloat::ieee::{Double, Half, Quad, Single}; -use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; +use rustc_data_structures::fx::{FxIndexMap, IndexEntry}; use rustc_data_structures::unord::UnordMap; use rustc_hir as hir; use rustc_hir::LangItem; @@ -63,6 +63,7 @@ thread_local! { static FORCE_TRIMMED_PATH: Cell = const { Cell::new(false) }; static REDUCED_QUERIES: Cell = const { Cell::new(false) }; static NO_VISIBLE_PATH: Cell = const { Cell::new(false) }; + static NO_VISIBLE_PATH_IF_DOC_HIDDEN: Cell = const { Cell::new(false) }; static RTN_MODE: Cell = const { Cell::new(RtnMode::ForDiagnostic) }; } @@ -134,6 +135,8 @@ define_helper!( /// Prevent selection of visible paths. `Display` impl of DefId will prefer /// visible (public) reexports of types as paths. fn with_no_visible_paths(NoVisibleGuard, NO_VISIBLE_PATH); + /// Prevent selection of visible paths if the paths are through a doc hidden path. + fn with_no_visible_paths_if_doc_hidden(NoVisibleIfDocHiddenGuard, NO_VISIBLE_PATH_IF_DOC_HIDDEN); ); #[must_use] @@ -569,6 +572,10 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { return Ok(false); }; + if self.tcx().is_doc_hidden(visible_parent) && with_no_visible_paths_if_doc_hidden() { + return Ok(false); + } + let actual_parent = self.tcx().opt_parent(def_id); debug!( "try_print_visible_def_path: visible_parent={:?} actual_parent={:?}", @@ -613,7 +620,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { // the children of the visible parent (as was done when computing // `visible_parent_map`), looking for the specific child we currently have and then // have access to the re-exported name. - DefPathData::TypeNs(Some(ref mut name)) if Some(visible_parent) != actual_parent => { + DefPathData::TypeNs(ref mut name) if Some(visible_parent) != actual_parent => { // Item might be re-exported several times, but filter for the one // that's public and whose identifier isn't `_`. let reexport = self @@ -634,7 +641,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } // Re-exported `extern crate` (#43189). DefPathData::CrateRoot => { - data = DefPathData::TypeNs(Some(self.tcx().crate_name(def_id.krate))); + data = DefPathData::TypeNs(self.tcx().crate_name(def_id.krate)); } _ => {} } @@ -813,7 +820,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { ty::Foreign(def_id) => { p!(print_def_path(def_id, &[])); } - ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ref data) => { + ty::Alias(ty::Projection | ty::Inherent | ty::Free, ref data) => { p!(print(data)) } ty::Placeholder(placeholder) => match placeholder.bound.kind { @@ -1207,7 +1214,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { && assoc .trait_container(tcx) .is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Coroutine)) - && assoc.name == rustc_span::sym::Return + && assoc.opt_name() == Some(rustc_span::sym::Return) { if let ty::Coroutine(_, args) = args.type_at(0).kind() { let return_ty = args.as_coroutine().return_ty(); @@ -1230,7 +1237,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { p!(", "); } - p!(write("{} = ", tcx.associated_item(assoc_item_def_id).name)); + p!(write("{} = ", tcx.associated_item(assoc_item_def_id).name())); match term.unpack() { TermKind::Ty(ty) => p!(print(ty)), @@ -2520,7 +2527,7 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { let identify_regions = self.tcx.sess.opts.unstable_opts.identify_regions; - match *region { + match region.kind() { ty::ReEarlyParam(ref data) => data.has_name(), ty::ReLateParam(ty::LateParamRegion { kind, .. }) => kind.is_named(), @@ -2590,7 +2597,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { // the user might want to diagnose an error, but there is basically no way // to fit that into a short string. Hence the recommendation to use // `explain_region()` or `note_and_explain_region()`. - match *region { + match region.kind() { ty::ReEarlyParam(data) => { p!(write("{}", data.name)); return Ok(()); @@ -2680,7 +2687,7 @@ impl<'a, 'tcx> ty::TypeFolder> for RegionFolder<'a, 'tcx> { fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { let name = &mut self.name; - let region = match *r { + let region = match r.kind() { ty::ReBound(db, br) if db >= self.current_index => { *self.region_map.entry(br).or_insert_with(|| name(Some(db), self.current_index, br)) } @@ -2704,7 +2711,7 @@ impl<'a, 'tcx> ty::TypeFolder> for RegionFolder<'a, 'tcx> { } _ => return r, }; - if let ty::ReBound(debruijn1, br) = *region { + if let ty::ReBound(debruijn1, br) = region.kind() { assert_eq!(debruijn1, ty::INNERMOST); ty::Region::new_bound(self.tcx, self.current_index, br) } else { @@ -2927,7 +2934,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { fn prepare_region_info(&mut self, value: &ty::Binder<'tcx, T>) where - T: TypeVisitable>, + T: TypeFoldable>, { struct RegionNameCollector<'tcx> { used_region_names: FxHashSet, @@ -3188,7 +3195,7 @@ define_print! { ty::AliasTerm<'tcx> { match self.kind(cx.tcx()) { - ty::AliasTermKind::InherentTy => p!(pretty_print_inherent_projection(*self)), + ty::AliasTermKind::InherentTy | ty::AliasTermKind::InherentConst => p!(pretty_print_inherent_projection(*self)), ty::AliasTermKind::ProjectionTy => { if !(cx.should_print_verbose() || with_reduced_queries()) && cx.tcx().is_impl_trait_in_trait(self.def_id) @@ -3198,7 +3205,8 @@ define_print! { p!(print_def_path(self.def_id, self.args)); } } - | ty::AliasTermKind::WeakTy + ty::AliasTermKind::FreeTy + | ty::AliasTermKind::FreeConst | ty::AliasTermKind::OpaqueTy | ty::AliasTermKind::UnevaluatedConst | ty::AliasTermKind::ProjectionConst => { @@ -3240,7 +3248,7 @@ define_print! { ty::ClauseKind::ConstArgHasType(ct, ty) => { p!("the constant `", print(ct), "` has type `", print(ty), "`") }, - ty::ClauseKind::WellFormed(arg) => p!(print(arg), " well-formed"), + ty::ClauseKind::WellFormed(term) => p!(print(term), " well-formed"), ty::ClauseKind::ConstEvaluatable(ct) => { p!("the constant `", print(ct), "` can be evaluated") } @@ -3284,7 +3292,7 @@ define_print! { } ty::ExistentialProjection<'tcx> { - let name = cx.tcx().associated_item(self.def_id).name; + let name = cx.tcx().associated_item(self.def_id).name(); // The args don't contain the self ty (as it has been erased) but the corresp. // generics do as the trait always has a self ty param. We need to offset. let args = &self.args[cx.tcx().generics_of(self.def_id).parent_count - 1..]; @@ -3489,8 +3497,8 @@ pub fn trimmed_def_paths(tcx: TyCtxt<'_>, (): ()) -> DefIdMap { // Once constructed, unique namespace+symbol pairs will have a `Some(_)` entry, while // non-unique pairs will have a `None` entry. - let unique_symbols_rev: &mut FxHashMap<(Namespace, Symbol), Option> = - &mut FxHashMap::default(); + let unique_symbols_rev: &mut FxIndexMap<(Namespace, Symbol), Option> = + &mut FxIndexMap::default(); for symbol_set in tcx.resolutions(()).glob_map.values() { for symbol in symbol_set { @@ -3500,27 +3508,23 @@ pub fn trimmed_def_paths(tcx: TyCtxt<'_>, (): ()) -> DefIdMap { } } - for_each_def(tcx, |ident, ns, def_id| { - use std::collections::hash_map::Entry::{Occupied, Vacant}; - - match unique_symbols_rev.entry((ns, ident.name)) { - Occupied(mut v) => match v.get() { - None => {} - Some(existing) => { - if *existing != def_id { - v.insert(None); - } + for_each_def(tcx, |ident, ns, def_id| match unique_symbols_rev.entry((ns, ident.name)) { + IndexEntry::Occupied(mut v) => match v.get() { + None => {} + Some(existing) => { + if *existing != def_id { + v.insert(None); } - }, - Vacant(v) => { - v.insert(Some(def_id)); } + }, + IndexEntry::Vacant(v) => { + v.insert(Some(def_id)); } }); // Put the symbol from all the unique namespace+symbol pairs into `map`. let mut map: DefIdMap = Default::default(); - for ((_, symbol), opt_def_id) in unique_symbols_rev.drain() { + for ((_, symbol), opt_def_id) in unique_symbols_rev.drain(..) { use std::collections::hash_map::Entry::{Occupied, Vacant}; if let Some(def_id) = opt_def_id { diff --git a/compiler/rustc_middle/src/ty/region.rs b/compiler/rustc_middle/src/ty/region.rs index c78306f2ca379..3e4f7a79d5394 100644 --- a/compiler/rustc_middle/src/ty/region.rs +++ b/compiler/rustc_middle/src/ty/region.rs @@ -1,5 +1,3 @@ -use std::ops::Deref; - use rustc_data_structures::intern::Interned; use rustc_errors::MultiSpan; use rustc_hir::def_id::DefId; @@ -22,7 +20,7 @@ impl<'tcx> rustc_type_ir::inherent::IntoKind for Region<'tcx> { type Kind = RegionKind<'tcx>; fn kind(self) -> RegionKind<'tcx> { - *self + *self.0.0 } } @@ -32,7 +30,7 @@ impl<'tcx> rustc_type_ir::Flags for Region<'tcx> { } fn outer_exclusive_binder(&self) -> ty::DebruijnIndex { - match **self { + match self.kind() { ty::ReBound(debruijn, _) => debruijn.shifted_in(1), _ => ty::INNERMOST, } @@ -163,7 +161,7 @@ impl<'tcx> Region<'tcx> { pub fn get_name(self) -> Option { if self.has_name() { - match *self { + match self.kind() { ty::ReEarlyParam(ebr) => Some(ebr.name), ty::ReBound(_, br) => br.kind.get_name(), ty::ReLateParam(fr) => fr.kind.get_name(), @@ -185,7 +183,7 @@ impl<'tcx> Region<'tcx> { /// Is this region named by the user? pub fn has_name(self) -> bool { - match *self { + match self.kind() { ty::ReEarlyParam(ebr) => ebr.has_name(), ty::ReBound(_, br) => br.kind.is_named(), ty::ReLateParam(fr) => fr.kind.is_named(), @@ -199,32 +197,32 @@ impl<'tcx> Region<'tcx> { #[inline] pub fn is_error(self) -> bool { - matches!(*self, ty::ReError(_)) + matches!(self.kind(), ty::ReError(_)) } #[inline] pub fn is_static(self) -> bool { - matches!(*self, ty::ReStatic) + matches!(self.kind(), ty::ReStatic) } #[inline] pub fn is_erased(self) -> bool { - matches!(*self, ty::ReErased) + matches!(self.kind(), ty::ReErased) } #[inline] pub fn is_bound(self) -> bool { - matches!(*self, ty::ReBound(..)) + matches!(self.kind(), ty::ReBound(..)) } #[inline] pub fn is_placeholder(self) -> bool { - matches!(*self, ty::RePlaceholder(..)) + matches!(self.kind(), ty::RePlaceholder(..)) } #[inline] pub fn bound_at_or_above_binder(self, index: ty::DebruijnIndex) -> bool { - match *self { + match self.kind() { ty::ReBound(debruijn, _) => debruijn >= index, _ => false, } @@ -233,7 +231,7 @@ impl<'tcx> Region<'tcx> { pub fn type_flags(self) -> TypeFlags { let mut flags = TypeFlags::empty(); - match *self { + match self.kind() { ty::ReVar(..) => { flags = flags | TypeFlags::HAS_FREE_REGIONS; flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS; @@ -275,14 +273,14 @@ impl<'tcx> Region<'tcx> { /// True for free regions other than `'static`. pub fn is_param(self) -> bool { - matches!(*self, ty::ReEarlyParam(_) | ty::ReLateParam(_)) + matches!(self.kind(), ty::ReEarlyParam(_) | ty::ReLateParam(_)) } /// True for free region in the current context. /// /// This is the case for `'static` and param regions. pub fn is_free(self) -> bool { - match *self { + match self.kind() { ty::ReStatic | ty::ReEarlyParam(..) | ty::ReLateParam(..) => true, ty::ReVar(..) | ty::RePlaceholder(..) @@ -319,15 +317,6 @@ impl<'tcx> Region<'tcx> { } } -impl<'tcx> Deref for Region<'tcx> { - type Target = RegionKind<'tcx>; - - #[inline] - fn deref(&self) -> &RegionKind<'tcx> { - self.0.0 - } -} - #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)] #[derive(HashStable)] pub struct EarlyParamRegion { diff --git a/compiler/rustc_middle/src/ty/relate.rs b/compiler/rustc_middle/src/ty/relate.rs index b1dfcb80bde57..6ad4e5276b253 100644 --- a/compiler/rustc_middle/src/ty/relate.rs +++ b/compiler/rustc_middle/src/ty/relate.rs @@ -49,6 +49,7 @@ impl<'tcx> Relate> for ty::Pattern<'tcx> { a: Self, b: Self, ) -> RelateResult<'tcx, Self> { + let tcx = relation.cx(); match (&*a, &*b) { ( &ty::PatternKind::Range { start: start_a, end: end_a }, @@ -56,8 +57,17 @@ impl<'tcx> Relate> for ty::Pattern<'tcx> { ) => { let start = relation.relate(start_a, start_b)?; let end = relation.relate(end_a, end_b)?; - Ok(relation.cx().mk_pat(ty::PatternKind::Range { start, end })) + Ok(tcx.mk_pat(ty::PatternKind::Range { start, end })) + } + (&ty::PatternKind::Or(a), &ty::PatternKind::Or(b)) => { + if a.len() != b.len() { + return Err(TypeError::Mismatch); + } + let v = iter::zip(a, b).map(|(a, b)| relation.relate(a, b)); + let patterns = tcx.mk_patterns_from_iter(v)?; + Ok(tcx.mk_pat(ty::PatternKind::Or(patterns))) } + (ty::PatternKind::Range { .. } | ty::PatternKind::Or(_), _) => Err(TypeError::Mismatch), } } } diff --git a/compiler/rustc_middle/src/ty/significant_drop_order.rs b/compiler/rustc_middle/src/ty/significant_drop_order.rs index 2d9e0331451fd..561f84192b42b 100644 --- a/compiler/rustc_middle/src/ty/significant_drop_order.rs +++ b/compiler/rustc_middle/src/ty/significant_drop_order.rs @@ -26,7 +26,7 @@ fn true_significant_drop_ty<'tcx>( name_rev.push(tcx.crate_name(did.krate)); } rustc_hir::definitions::DefPathData::TypeNs(symbol) => { - name_rev.push(symbol.unwrap()); + name_rev.push(symbol); } _ => return None, } @@ -143,25 +143,11 @@ pub fn ty_dtor_span<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Option { | ty::UnsafeBinder(_) => None, ty::Adt(adt_def, _) => { - let did = adt_def.did(); - let try_local_did_span = |did: DefId| { - if let Some(local) = did.as_local() { - tcx.source_span(local) - } else { - tcx.def_span(did) - } - }; - let dtor = if let Some(dtor) = tcx.adt_destructor(did) { - dtor.did - } else if let Some(dtor) = tcx.adt_async_destructor(did) { - dtor.future + if let Some(dtor) = tcx.adt_destructor(adt_def.did()) { + Some(tcx.def_span(tcx.parent(dtor.did))) } else { - return Some(try_local_did_span(did)); - }; - let def_key = tcx.def_key(dtor); - let Some(parent_index) = def_key.parent else { return Some(try_local_did_span(dtor)) }; - let parent_did = DefId { index: parent_index, krate: dtor.krate }; - Some(try_local_did_span(parent_did)) + Some(tcx.def_span(adt_def.did())) + } } ty::Coroutine(did, _) | ty::CoroutineWitness(did, _) diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index 60fd531b4d0ee..58f7bc75054bb 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -6,20 +6,18 @@ use std::fmt::{self, Debug}; use rustc_abi::TyAndLayout; -use rustc_ast::InlineAsmTemplatePiece; use rustc_hir::def::Namespace; use rustc_hir::def_id::LocalDefId; -use rustc_span::Span; use rustc_span::source_map::Spanned; -use rustc_type_ir::{ConstKind, VisitorResult, try_visit}; +use rustc_type_ir::{ConstKind, TypeFolder, VisitorResult, try_visit}; use super::print::PrettyPrinter; use super::{GenericArg, GenericArgKind, Pattern, Region}; use crate::mir::PlaceElem; use crate::ty::print::{FmtPrinter, Printer, with_no_trimmed_paths}; use crate::ty::{ - self, FallibleTypeFolder, InferConst, Lift, Term, TermKind, Ty, TyCtxt, TypeFoldable, - TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitor, + self, FallibleTypeFolder, Lift, Term, TermKind, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable, + TypeSuperVisitable, TypeVisitable, TypeVisitor, }; impl fmt::Debug for ty::TraitDef { @@ -61,6 +59,12 @@ impl<'tcx> fmt::Debug for ty::adjustment::Adjustment<'tcx> { } } +impl<'tcx> fmt::Debug for ty::adjustment::PatAdjustment<'tcx> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{} -> {:?}", self.source, self.kind) + } +} + impl fmt::Debug for ty::BoundRegionKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { @@ -86,15 +90,15 @@ impl fmt::Debug for ty::LateParamRegion { impl fmt::Debug for ty::LateParamRegionKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { - ty::LateParamRegionKind::Anon(idx) => write!(f, "BrAnon({idx})"), + ty::LateParamRegionKind::Anon(idx) => write!(f, "LateAnon({idx})"), ty::LateParamRegionKind::Named(did, name) => { if did.is_crate_root() { - write!(f, "BrNamed({name})") + write!(f, "LateNamed({name})") } else { - write!(f, "BrNamed({did:?}, {name})") + write!(f, "LateNamed({did:?}, {name})") } } - ty::LateParamRegionKind::ClosureEnv => write!(f, "BrEnv"), + ty::LateParamRegionKind::ClosureEnv => write!(f, "LateEnv"), } } } @@ -271,6 +275,7 @@ TrivialTypeTraversalImpls! { crate::ty::AssocKind, crate::ty::BoundRegion, crate::ty::BoundVar, + crate::ty::InferConst, crate::ty::Placeholder, crate::ty::Placeholder, crate::ty::Placeholder, @@ -337,24 +342,6 @@ impl<'tcx> TypeVisitable> for ty::AdtDef<'tcx> { } } -impl<'tcx> TypeFoldable> for &'tcx ty::List> { - fn try_fold_with>>( - self, - folder: &mut F, - ) -> Result { - ty::util::fold_list(self, folder, |tcx, v| tcx.mk_poly_existential_predicates(v)) - } -} - -impl<'tcx> TypeFoldable> for &'tcx ty::List> { - fn try_fold_with>>( - self, - folder: &mut F, - ) -> Result { - ty::util::fold_list(self, folder, |tcx, v| tcx.mk_const_list(v)) - } -} - impl<'tcx> TypeFoldable> for Pattern<'tcx> { fn try_fold_with>>( self, @@ -363,6 +350,11 @@ impl<'tcx> TypeFoldable> for Pattern<'tcx> { let pat = (*self).clone().try_fold_with(folder)?; Ok(if pat == *self { self } else { folder.cx().mk_pat(pat) }) } + + fn fold_with>>(self, folder: &mut F) -> Self { + let pat = (*self).clone().fold_with(folder); + if pat == *self { self } else { folder.cx().mk_pat(pat) } + } } impl<'tcx> TypeVisitable> for Pattern<'tcx> { @@ -378,6 +370,10 @@ impl<'tcx> TypeFoldable> for Ty<'tcx> { ) -> Result { folder.try_fold_ty(self) } + + fn fold_with>>(self, folder: &mut F) -> Self { + folder.fold_ty(self) + } } impl<'tcx> TypeVisitable> for Ty<'tcx> { @@ -436,6 +432,45 @@ impl<'tcx> TypeSuperFoldable> for Ty<'tcx> { Ok(if *self.kind() == kind { self } else { folder.cx().mk_ty_from_kind(kind) }) } + + fn super_fold_with>>(self, folder: &mut F) -> Self { + let kind = match *self.kind() { + ty::RawPtr(ty, mutbl) => ty::RawPtr(ty.fold_with(folder), mutbl), + ty::Array(typ, sz) => ty::Array(typ.fold_with(folder), sz.fold_with(folder)), + ty::Slice(typ) => ty::Slice(typ.fold_with(folder)), + ty::Adt(tid, args) => ty::Adt(tid, args.fold_with(folder)), + ty::Dynamic(trait_ty, region, representation) => { + ty::Dynamic(trait_ty.fold_with(folder), region.fold_with(folder), representation) + } + ty::Tuple(ts) => ty::Tuple(ts.fold_with(folder)), + ty::FnDef(def_id, args) => ty::FnDef(def_id, args.fold_with(folder)), + ty::FnPtr(sig_tys, hdr) => ty::FnPtr(sig_tys.fold_with(folder), hdr), + ty::UnsafeBinder(f) => ty::UnsafeBinder(f.fold_with(folder)), + ty::Ref(r, ty, mutbl) => ty::Ref(r.fold_with(folder), ty.fold_with(folder), mutbl), + ty::Coroutine(did, args) => ty::Coroutine(did, args.fold_with(folder)), + ty::CoroutineWitness(did, args) => ty::CoroutineWitness(did, args.fold_with(folder)), + ty::Closure(did, args) => ty::Closure(did, args.fold_with(folder)), + ty::CoroutineClosure(did, args) => ty::CoroutineClosure(did, args.fold_with(folder)), + ty::Alias(kind, data) => ty::Alias(kind, data.fold_with(folder)), + ty::Pat(ty, pat) => ty::Pat(ty.fold_with(folder), pat.fold_with(folder)), + + ty::Bool + | ty::Char + | ty::Str + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Error(_) + | ty::Infer(_) + | ty::Param(..) + | ty::Bound(..) + | ty::Placeholder(..) + | ty::Never + | ty::Foreign(..) => return self, + }; + + if *self.kind() == kind { self } else { folder.cx().mk_ty_from_kind(kind) } + } } impl<'tcx> TypeSuperVisitable> for Ty<'tcx> { @@ -496,6 +531,10 @@ impl<'tcx> TypeFoldable> for ty::Region<'tcx> { ) -> Result { folder.try_fold_region(self) } + + fn fold_with>>(self, folder: &mut F) -> Self { + folder.fold_region(self) + } } impl<'tcx> TypeVisitable> for ty::Region<'tcx> { @@ -511,6 +550,10 @@ impl<'tcx> TypeFoldable> for ty::Predicate<'tcx> { ) -> Result { folder.try_fold_predicate(self) } + + fn fold_with>>(self, folder: &mut F) -> Self { + folder.fold_predicate(self) + } } // FIXME(clause): This is wonky @@ -521,6 +564,10 @@ impl<'tcx> TypeFoldable> for ty::Clause<'tcx> { ) -> Result { Ok(folder.try_fold_predicate(self.as_predicate())?.expect_clause()) } + + fn fold_with>>(self, folder: &mut F) -> Self { + folder.fold_predicate(self.as_predicate()).expect_clause() + } } impl<'tcx> TypeVisitable> for ty::Predicate<'tcx> { @@ -543,6 +590,11 @@ impl<'tcx> TypeSuperFoldable> for ty::Predicate<'tcx> { let new = self.kind().try_fold_with(folder)?; Ok(folder.cx().reuse_or_mk_predicate(self, new)) } + + fn super_fold_with>>(self, folder: &mut F) -> Self { + let new = self.kind().fold_with(folder); + folder.cx().reuse_or_mk_predicate(self, new) + } } impl<'tcx> TypeSuperVisitable> for ty::Predicate<'tcx> { @@ -563,15 +615,6 @@ impl<'tcx> TypeSuperVisitable> for ty::Clauses<'tcx> { } } -impl<'tcx> TypeFoldable> for ty::Clauses<'tcx> { - fn try_fold_with>>( - self, - folder: &mut F, - ) -> Result { - ty::util::fold_list(self, folder, |tcx, v| tcx.mk_clauses(v)) - } -} - impl<'tcx> TypeFoldable> for ty::Const<'tcx> { fn try_fold_with>>( self, @@ -579,6 +622,10 @@ impl<'tcx> TypeFoldable> for ty::Const<'tcx> { ) -> Result { folder.try_fold_const(self) } + + fn fold_with>>(self, folder: &mut F) -> Self { + folder.fold_const(self) + } } impl<'tcx> TypeVisitable> for ty::Const<'tcx> { @@ -606,6 +653,20 @@ impl<'tcx> TypeSuperFoldable> for ty::Const<'tcx> { }; if kind != self.kind() { Ok(folder.cx().mk_ct_from_kind(kind)) } else { Ok(self) } } + + fn super_fold_with>>(self, folder: &mut F) -> Self { + let kind = match self.kind() { + ConstKind::Param(p) => ConstKind::Param(p.fold_with(folder)), + ConstKind::Infer(i) => ConstKind::Infer(i.fold_with(folder)), + ConstKind::Bound(d, b) => ConstKind::Bound(d.fold_with(folder), b.fold_with(folder)), + ConstKind::Placeholder(p) => ConstKind::Placeholder(p.fold_with(folder)), + ConstKind::Unevaluated(uv) => ConstKind::Unevaluated(uv.fold_with(folder)), + ConstKind::Value(v) => ConstKind::Value(v.fold_with(folder)), + ConstKind::Error(e) => ConstKind::Error(e.fold_with(folder)), + ConstKind::Expr(e) => ConstKind::Expr(e.fold_with(folder)), + }; + if kind != self.kind() { folder.cx().mk_ct_from_kind(kind) } else { self } + } } impl<'tcx> TypeSuperVisitable> for ty::Const<'tcx> { @@ -639,20 +700,9 @@ impl<'tcx> TypeFoldable> for rustc_span::ErrorGuaranteed { ) -> Result { Ok(self) } -} - -impl<'tcx> TypeFoldable> for InferConst { - fn try_fold_with>>( - self, - _folder: &mut F, - ) -> Result { - Ok(self) - } -} -impl<'tcx> TypeVisitable> for InferConst { - fn visit_with>>(&self, _visitor: &mut V) -> V::Result { - V::Result::output() + fn fold_with>>(self, _folder: &mut F) -> Self { + self } } @@ -683,40 +733,50 @@ impl<'tcx, T: TypeFoldable> + Debug + Clone> TypeFoldable TypeFoldable> for &'tcx [InlineAsmTemplatePiece] { - fn try_fold_with>>( - self, - _folder: &mut F, - ) -> Result { - Ok(self) + fn fold_with>>(self, folder: &mut F) -> Self { + Spanned { node: self.node.fold_with(folder), span: self.span.fold_with(folder) } } } -impl<'tcx> TypeFoldable> for &'tcx [Span] { +impl<'tcx> TypeFoldable> for &'tcx ty::List { fn try_fold_with>>( self, _folder: &mut F, ) -> Result { Ok(self) } -} -impl<'tcx> TypeFoldable> for &'tcx ty::List { - fn try_fold_with>>( - self, - _folder: &mut F, - ) -> Result { - Ok(self) + fn fold_with>>(self, _folder: &mut F) -> Self { + self } } -impl<'tcx> TypeFoldable> for &'tcx ty::List> { - fn try_fold_with>>( - self, - folder: &mut F, - ) -> Result { - ty::util::fold_list(self, folder, |tcx, v| tcx.mk_place_elems(v)) +macro_rules! list_fold { + ($($ty:ty : $mk:ident),+ $(,)?) => { + $( + impl<'tcx> TypeFoldable> for $ty { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + ty::util::try_fold_list(self, folder, |tcx, v| tcx.$mk(v)) + } + + fn fold_with>>( + self, + folder: &mut F, + ) -> Self { + ty::util::fold_list(self, folder, |tcx, v| tcx.$mk(v)) + } + } + )* } } + +list_fold! { + ty::Clauses<'tcx> : mk_clauses, + &'tcx ty::List> : mk_poly_existential_predicates, + &'tcx ty::List> : mk_place_elems, + &'tcx ty::List> : mk_patterns, +} diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 74a94d8278453..c31ce1bc63070 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -4,11 +4,10 @@ use std::assert_matches::debug_assert_matches; use std::borrow::Cow; -use std::iter; use std::ops::{ControlFlow, Range}; use hir::def::{CtorKind, DefKind}; -use rustc_abi::{ExternAbi, FIRST_VARIANT, FieldIdx, VariantIdx}; +use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx}; use rustc_errors::{ErrorGuaranteed, MultiSpan}; use rustc_hir as hir; use rustc_hir::LangItem; @@ -16,9 +15,10 @@ use rustc_hir::def_id::DefId; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, extension}; use rustc_span::{DUMMY_SP, Span, Symbol, sym}; use rustc_type_ir::TyKind::*; +use rustc_type_ir::walk::TypeWalker; use rustc_type_ir::{self as ir, BoundVar, CollectAndApply, DynKind, TypeVisitableExt, elaborate}; use tracing::instrument; -use ty::util::{AsyncDropGlueMorphology, IntTypeExt}; +use ty::util::IntTypeExt; use super::GenericParamDefKind; use crate::infer::canonical::Canonical; @@ -77,8 +77,7 @@ impl<'tcx> ty::CoroutineArgs> { #[inline] fn variant_range(&self, def_id: DefId, tcx: TyCtxt<'tcx>) -> Range { // FIXME requires optimized MIR - FIRST_VARIANT - ..tcx.coroutine_layout(def_id, tcx.types.unit).unwrap().variant_fields.next_index() + FIRST_VARIANT..tcx.coroutine_layout(def_id, self.args).unwrap().variant_fields.next_index() } /// The discriminant for the given variant. Panics if the `variant_index` is @@ -138,10 +137,14 @@ impl<'tcx> ty::CoroutineArgs> { def_id: DefId, tcx: TyCtxt<'tcx>, ) -> impl Iterator>> { - let layout = tcx.coroutine_layout(def_id, self.kind_ty()).unwrap(); + let layout = tcx.coroutine_layout(def_id, self.args).unwrap(); layout.variant_fields.iter().map(move |variant| { variant.iter().map(move |field| { - ty::EarlyBinder::bind(layout.field_tys[*field].ty).instantiate(tcx, self.args) + if tcx.is_async_drop_in_place_coroutine(def_id) { + layout.field_tys[*field].ty + } else { + ty::EarlyBinder::bind(layout.field_tys[*field].ty).instantiate(tcx, self.args) + } }) }) } @@ -488,7 +491,7 @@ impl<'tcx> Ty<'tcx> { (kind, tcx.def_kind(alias_ty.def_id)), (ty::Opaque, DefKind::OpaqueTy) | (ty::Projection | ty::Inherent, DefKind::AssocTy) - | (ty::Weak, DefKind::TyAlias) + | (ty::Free, DefKind::TyAlias) ); Ty::new(tcx, Alias(kind, alias_ty)) } @@ -720,7 +723,10 @@ impl<'tcx> Ty<'tcx> { repr: DynKind, ) -> Ty<'tcx> { if cfg!(debug_assertions) { - let projection_count = obj.projection_bounds().count(); + let projection_count = obj + .projection_bounds() + .filter(|item| !tcx.generics_require_sized_self(item.item_def_id())) + .count(); let expected_count: usize = obj .principal_def_id() .into_iter() @@ -734,7 +740,7 @@ impl<'tcx> Ty<'tcx> { .map(|principal| { tcx.associated_items(principal.def_id()) .in_definition_order() - .filter(|item| item.kind == ty::AssocKind::Type) + .filter(|item| item.is_type()) .filter(|item| !item.is_impl_trait_in_trait()) .filter(|item| !tcx.generics_require_sized_self(item.def_id)) .count() @@ -1045,10 +1051,6 @@ impl<'tcx> rustc_type_ir::inherent::Ty> for Ty<'tcx> { self.discriminant_ty(interner) } - fn async_destructor_ty(self, interner: TyCtxt<'tcx>) -> Ty<'tcx> { - self.async_destructor_ty(interner) - } - fn has_unsafe_fields(self) -> bool { Ty::has_unsafe_fields(self) } @@ -1418,6 +1420,34 @@ impl<'tcx> Ty<'tcx> { cf.is_break() } + /// Returns the deepest `async_drop_in_place::{closure}` implementation. + /// + /// `async_drop_in_place::{closure}`, when T is a coroutine, is a proxy-impl + /// to call async drop poll from impl coroutine. + pub fn find_async_drop_impl_coroutine)>( + self, + tcx: TyCtxt<'tcx>, + mut f: F, + ) -> Ty<'tcx> { + assert!(self.is_coroutine()); + let mut cor_ty = self; + let mut ty = cor_ty; + loop { + if let ty::Coroutine(def_id, args) = ty.kind() { + cor_ty = ty; + f(ty); + if tcx.is_async_drop_in_place_coroutine(*def_id) { + ty = args.first().unwrap().expect_ty(); + continue; + } else { + return cor_ty; + } + } else { + return cor_ty; + } + } + } + /// Returns the type and mutability of `*ty`. /// /// The parameter `explicit` indicates if this is an *explicit* dereference. @@ -1441,23 +1471,7 @@ impl<'tcx> Ty<'tcx> { #[tracing::instrument(level = "trace", skip(tcx))] pub fn fn_sig(self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> { - match self.kind() { - FnDef(def_id, args) => tcx.fn_sig(*def_id).instantiate(tcx, args), - FnPtr(sig_tys, hdr) => sig_tys.with(*hdr), - Error(_) => { - // ignore errors (#54954) - Binder::dummy(ty::FnSig { - inputs_and_output: ty::List::empty(), - c_variadic: false, - safety: hir::Safety::Safe, - abi: ExternAbi::Rust, - }) - } - Closure(..) => bug!( - "to get the signature of a closure, use `args.as_closure().sig()` not `fn_sig()`", - ), - _ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self), - } + self.kind().fn_sig(tcx) } #[inline] @@ -1575,125 +1589,6 @@ impl<'tcx> Ty<'tcx> { } } - /// Returns the type of the async destructor of this type. - pub fn async_destructor_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { - match self.async_drop_glue_morphology(tcx) { - AsyncDropGlueMorphology::Noop => { - return Ty::async_destructor_combinator(tcx, LangItem::AsyncDropNoop) - .instantiate_identity(); - } - AsyncDropGlueMorphology::DeferredDropInPlace => { - let drop_in_place = - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropDeferredDropInPlace) - .instantiate(tcx, &[self.into()]); - return Ty::async_destructor_combinator(tcx, LangItem::AsyncDropFuse) - .instantiate(tcx, &[drop_in_place.into()]); - } - AsyncDropGlueMorphology::Custom => (), - } - - match *self.kind() { - ty::Param(_) | ty::Alias(..) | ty::Infer(ty::TyVar(_)) => { - let assoc_items = tcx - .associated_item_def_ids(tcx.require_lang_item(LangItem::AsyncDestruct, None)); - Ty::new_projection(tcx, assoc_items[0], [self]) - } - - ty::Array(elem_ty, _) | ty::Slice(elem_ty) => { - let dtor = Ty::async_destructor_combinator(tcx, LangItem::AsyncDropSlice) - .instantiate(tcx, &[elem_ty.into()]); - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropFuse) - .instantiate(tcx, &[dtor.into()]) - } - - ty::Adt(adt_def, args) if adt_def.is_enum() || adt_def.is_struct() => self - .adt_async_destructor_ty( - tcx, - adt_def.variants().iter().map(|v| v.fields.iter().map(|f| f.ty(tcx, args))), - ), - ty::Tuple(tys) => self.adt_async_destructor_ty(tcx, iter::once(tys)), - ty::Closure(_, args) => { - self.adt_async_destructor_ty(tcx, iter::once(args.as_closure().upvar_tys())) - } - ty::CoroutineClosure(_, args) => self - .adt_async_destructor_ty(tcx, iter::once(args.as_coroutine_closure().upvar_tys())), - - ty::Adt(adt_def, _) => { - assert!(adt_def.is_union()); - - let surface_drop = self.surface_async_dropper_ty(tcx).unwrap(); - - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropFuse) - .instantiate(tcx, &[surface_drop.into()]) - } - - ty::Bound(..) - | ty::Foreign(_) - | ty::Placeholder(_) - | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { - bug!("`async_destructor_ty` applied to unexpected type: {self:?}") - } - - _ => bug!("`async_destructor_ty` is not yet implemented for type: {self:?}"), - } - } - - fn adt_async_destructor_ty(self, tcx: TyCtxt<'tcx>, variants: I) -> Ty<'tcx> - where - I: Iterator + ExactSizeIterator, - I::Item: IntoIterator>, - { - debug_assert_eq!(self.async_drop_glue_morphology(tcx), AsyncDropGlueMorphology::Custom); - - let defer = Ty::async_destructor_combinator(tcx, LangItem::AsyncDropDefer); - let chain = Ty::async_destructor_combinator(tcx, LangItem::AsyncDropChain); - - let noop = - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropNoop).instantiate_identity(); - let either = Ty::async_destructor_combinator(tcx, LangItem::AsyncDropEither); - - let variants_dtor = variants - .into_iter() - .map(|variant| { - variant - .into_iter() - .map(|ty| defer.instantiate(tcx, &[ty.into()])) - .reduce(|acc, next| chain.instantiate(tcx, &[acc.into(), next.into()])) - .unwrap_or(noop) - }) - .reduce(|other, matched| { - either.instantiate(tcx, &[other.into(), matched.into(), self.into()]) - }) - .unwrap(); - - let dtor = if let Some(dropper_ty) = self.surface_async_dropper_ty(tcx) { - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropChain) - .instantiate(tcx, &[dropper_ty.into(), variants_dtor.into()]) - } else { - variants_dtor - }; - - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropFuse) - .instantiate(tcx, &[dtor.into()]) - } - - fn surface_async_dropper_ty(self, tcx: TyCtxt<'tcx>) -> Option> { - let adt_def = self.ty_adt_def()?; - let dropper = adt_def - .async_destructor(tcx) - .map(|_| LangItem::SurfaceAsyncDropInPlace) - .or_else(|| adt_def.destructor(tcx).map(|_| LangItem::AsyncDropSurfaceDropInPlace))?; - Some(Ty::async_destructor_combinator(tcx, dropper).instantiate(tcx, &[self.into()])) - } - - fn async_destructor_combinator( - tcx: TyCtxt<'tcx>, - lang_item: LangItem, - ) -> ty::EarlyBinder<'tcx, Ty<'tcx>> { - tcx.fn_sig(tcx.require_lang_item(lang_item, None)) - .map_bound(|fn_sig| fn_sig.output().no_bound_vars().unwrap()) - } - /// Returns the type of metadata for (potentially wide) pointers to this type, /// or the struct tail if the metadata type cannot be determined. pub fn ptr_metadata_ty_or_tail( @@ -1789,9 +1684,7 @@ impl<'tcx> Ty<'tcx> { match pointee_ty.ptr_metadata_ty_or_tail(tcx, |x| x) { Ok(metadata_ty) => metadata_ty, Err(tail_ty) => { - let Some(metadata_def_id) = tcx.lang_items().metadata_type() else { - bug!("No metadata_type lang item while looking at {self:?}") - }; + let metadata_def_id = tcx.require_lang_item(LangItem::Metadata, None); Ty::new_projection(tcx, metadata_def_id, [tail_ty]) } } @@ -1888,14 +1781,14 @@ impl<'tcx> Ty<'tcx> { /// Fast path helper for testing if a type is `Sized`. /// - /// Returning true means the type is known to be sized. Returning - /// `false` means nothing -- could be sized, might not be. + /// Returning true means the type is known to implement `Sized`. Returning `false` means + /// nothing -- could be sized, might not be. /// - /// Note that we could never rely on the fact that a type such as `[_]` is - /// trivially `!Sized` because we could be in a type environment with a - /// bound such as `[_]: Copy`. A function with such a bound obviously never - /// can be called, but that doesn't mean it shouldn't typecheck. This is why - /// this method doesn't return `Option`. + /// Note that we could never rely on the fact that a type such as `[_]` is trivially `!Sized` + /// because we could be in a type environment with a bound such as `[_]: Copy`. A function with + /// such a bound obviously never can be called, but that doesn't mean it shouldn't typecheck. + /// This is why this method doesn't return `Option`. + #[instrument(skip(tcx), level = "debug")] pub fn is_trivially_sized(self, tcx: TyCtxt<'tcx>) -> bool { match self.kind() { ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) @@ -2037,38 +1930,34 @@ impl<'tcx> Ty<'tcx> { } } + pub fn is_async_drop_in_place_coroutine(self, tcx: TyCtxt<'_>) -> bool { + match self.kind() { + ty::Coroutine(def, ..) => tcx.is_async_drop_in_place_coroutine(*def), + _ => false, + } + } + /// Returns `true` when the outermost type cannot be further normalized, /// resolved, or instantiated. This includes all primitive types, but also /// things like ADTs and trait objects, since even if their arguments or /// nested types may be further simplified, the outermost [`TyKind`] or /// type constructor remains the same. pub fn is_known_rigid(self) -> bool { - match self.kind() { - Bool - | Char - | Int(_) - | Uint(_) - | Float(_) - | Adt(_, _) - | Foreign(_) - | Str - | Array(_, _) - | Pat(_, _) - | Slice(_) - | RawPtr(_, _) - | Ref(_, _, _) - | FnDef(_, _) - | FnPtr(..) - | Dynamic(_, _, _) - | Closure(_, _) - | CoroutineClosure(_, _) - | Coroutine(_, _) - | CoroutineWitness(..) - | Never - | Tuple(_) - | UnsafeBinder(_) => true, - Error(_) | Infer(_) | Alias(_, _) | Param(_) | Bound(_, _) | Placeholder(_) => false, - } + self.kind().is_known_rigid() + } + + /// Iterator that walks `self` and any types reachable from + /// `self`, in depth-first order. Note that just walks the types + /// that appear in `self`, it does not descend into the fields of + /// structs or variants. For example: + /// + /// ```text + /// isize => { isize } + /// Foo> => { Foo>, Bar, isize } + /// [isize] => { [isize], isize } + /// ``` + pub fn walk(self) -> TypeWalker> { + TypeWalker::new(self.into()) } } diff --git a/compiler/rustc_middle/src/ty/trait_def.rs b/compiler/rustc_middle/src/ty/trait_def.rs index 8fa1c569737a0..ea25ce65f7727 100644 --- a/compiler/rustc_middle/src/ty/trait_def.rs +++ b/compiler/rustc_middle/src/ty/trait_def.rs @@ -129,21 +129,6 @@ impl<'tcx> TraitDef { } impl<'tcx> TyCtxt<'tcx> { - /// `trait_def_id` MUST BE the `DefId` of a trait. - pub fn for_each_impl(self, trait_def_id: DefId, mut f: F) { - let impls = self.trait_impls_of(trait_def_id); - - for &impl_def_id in impls.blanket_impls.iter() { - f(impl_def_id); - } - - for v in impls.non_blanket_impls.values() { - for &impl_def_id in v { - f(impl_def_id); - } - } - } - /// Iterate over every impl that could possibly match the self type `self_ty`. /// /// `trait_def_id` MUST BE the `DefId` of a trait. @@ -235,7 +220,7 @@ pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> Trait } } - for &impl_def_id in tcx.hir_trait_impls(trait_id) { + for &impl_def_id in tcx.local_trait_impls(trait_id) { let impl_def_id = impl_def_id.to_def_id(); let impl_self_ty = tcx.type_of(impl_def_id).instantiate_identity(); diff --git a/compiler/rustc_middle/src/ty/typeck_results.rs b/compiler/rustc_middle/src/ty/typeck_results.rs index 06054e22e7601..c6a45f8468690 100644 --- a/compiler/rustc_middle/src/ty/typeck_results.rs +++ b/compiler/rustc_middle/src/ty/typeck_results.rs @@ -77,8 +77,8 @@ pub struct TypeckResults<'tcx> { /// to a form valid in all Editions, either as a lint diagnostic or hard error. rust_2024_migration_desugared_pats: ItemLocalMap, - /// Stores the types which were implicitly dereferenced in pattern binding modes - /// for later usage in THIR lowering. For example, + /// Stores the types which were implicitly dereferenced in pattern binding modes or deref + /// patterns for later usage in THIR lowering. For example, /// /// ``` /// match &&Some(5i32) { @@ -86,11 +86,20 @@ pub struct TypeckResults<'tcx> { /// _ => {}, /// } /// ``` - /// leads to a `vec![&&Option, &Option]`. Empty vectors are not stored. + /// leads to a `vec![&&Option, &Option]` and + /// + /// ``` + /// #![feature(deref_patterns)] + /// match &Box::new(Some(5i32)) { + /// Some(n) => {}, + /// _ => {}, + /// } + /// ``` + /// leads to a `vec![&Box>, Box>]`. Empty vectors are not stored. /// /// See: /// - pat_adjustments: ItemLocalMap>>, + pat_adjustments: ItemLocalMap>>, /// Set of reference patterns that match against a match-ergonomics inserted reference /// (as opposed to against a reference in the scrutinee type). @@ -158,7 +167,7 @@ pub struct TypeckResults<'tcx> { /// We also store the type here, so that the compiler can use it as a hint /// for figuring out hidden types, even if they are only set in dead code /// (which doesn't show up in MIR). - pub concrete_opaque_types: FxIndexMap, ty::OpaqueHiddenType<'tcx>>, + pub concrete_opaque_types: FxIndexMap>, /// Tracks the minimum captures required for a closure; /// see `MinCaptureInformationMap` for more details. @@ -197,12 +206,6 @@ pub struct TypeckResults<'tcx> { /// formatting modified file tests/ui/coroutine/retain-resume-ref.rs pub coroutine_stalled_predicates: FxIndexSet<(ty::Predicate<'tcx>, ObligationCause<'tcx>)>, - /// We sometimes treat byte string literals (which are of type `&[u8; N]`) - /// as `&[u8]`, depending on the pattern in which they are used. - /// This hashset records all instances where we behave - /// like this to allow `const_to_pat` to reliably handle this situation. - pub treat_byte_string_as_slice: ItemLocalSet, - /// Contains the data for evaluating the effect of feature `capture_disjoint_fields` /// on closure size. pub closure_size_eval: LocalDefIdMap>, @@ -237,7 +240,6 @@ impl<'tcx> TypeckResults<'tcx> { closure_fake_reads: Default::default(), rvalue_scopes: Default::default(), coroutine_stalled_predicates: Default::default(), - treat_byte_string_as_slice: Default::default(), closure_size_eval: Default::default(), offset_of_data: Default::default(), } @@ -410,11 +412,15 @@ impl<'tcx> TypeckResults<'tcx> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_binding_modes } } - pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec>> { + pub fn pat_adjustments( + &self, + ) -> LocalTableInContext<'_, Vec>> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_adjustments } } - pub fn pat_adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec>> { + pub fn pat_adjustments_mut( + &mut self, + ) -> LocalTableInContextMut<'_, Vec>> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_adjustments } } @@ -469,6 +475,21 @@ impl<'tcx> TypeckResults<'tcx> { has_ref_mut } + /// How should a deref pattern find the place for its inner pattern to match on? + /// + /// In most cases, if the pattern recursively contains a `ref mut` binding, we find the inner + /// pattern's scrutinee by calling `DerefMut::deref_mut`, and otherwise we call `Deref::deref`. + /// However, for boxes we can use a built-in deref instead, which doesn't borrow the scrutinee; + /// in this case, we return `ByRef::No`. + pub fn deref_pat_borrow_mode(&self, pointer_ty: Ty<'_>, inner: &hir::Pat<'_>) -> ByRef { + if pointer_ty.is_box() { + ByRef::No + } else { + let mutable = self.pat_has_ref_mut_binding(inner); + ByRef::Yes(if mutable { Mutability::Mut } else { Mutability::Not }) + } + } + /// For a given closure, returns the iterator of `ty::CapturedPlace`s that are captured /// by the closure. pub fn closure_min_captures_flattened( @@ -695,6 +716,8 @@ pub type CanonicalUserTypeAnnotations<'tcx> = #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)] pub struct CanonicalUserTypeAnnotation<'tcx> { + #[type_foldable(identity)] + #[type_visitable(ignore)] pub user_ty: Box>, pub span: Span, pub inferred_ty: Ty<'tcx>, @@ -765,7 +788,7 @@ impl<'tcx> IsIdentity for CanonicalUserType<'tcx> { _ => false, }, - GenericArgKind::Lifetime(r) => match *r { + GenericArgKind::Lifetime(r) => match r.kind() { ty::ReBound(debruijn, br) => { // We only allow a `ty::INNERMOST` index in generic parameters. assert_eq!(debruijn, ty::INNERMOST); diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index c0d4130336e52..6fe5927c29fcb 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -2,7 +2,7 @@ use std::{fmt, iter}; -use rustc_abi::{ExternAbi, Float, Integer, IntegerType, Size}; +use rustc_abi::{Float, Integer, IntegerType, Size}; use rustc_apfloat::Float as _; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; @@ -389,96 +389,83 @@ impl<'tcx> TyCtxt<'tcx> { /// Calculate the destructor of a given type. pub fn calculate_dtor( self, - adt_did: DefId, - validate: impl Fn(Self, DefId) -> Result<(), ErrorGuaranteed>, + adt_did: LocalDefId, + validate: impl Fn(Self, LocalDefId) -> Result<(), ErrorGuaranteed>, ) -> Option { let drop_trait = self.lang_items().drop_trait()?; self.ensure_ok().coherent_trait(drop_trait).ok()?; - let ty = self.type_of(adt_did).instantiate_identity(); let mut dtor_candidate = None; - self.for_each_relevant_impl(drop_trait, ty, |impl_did| { + // `Drop` impls can only be written in the same crate as the adt, and cannot be blanket impls + for &impl_did in self.local_trait_impls(drop_trait) { + let Some(adt_def) = self.type_of(impl_did).skip_binder().ty_adt_def() else { continue }; + if adt_def.did() != adt_did.to_def_id() { + continue; + } + if validate(self, impl_did).is_err() { // Already `ErrorGuaranteed`, no need to delay a span bug here. - return; + continue; } let Some(item_id) = self.associated_item_def_ids(impl_did).first() else { self.dcx() .span_delayed_bug(self.def_span(impl_did), "Drop impl without drop function"); - return; + continue; }; - if let Some((old_item_id, _)) = dtor_candidate { + if self.def_kind(item_id) != DefKind::AssocFn { + self.dcx().span_delayed_bug(self.def_span(item_id), "drop is not a function"); + continue; + } + + if let Some(old_item_id) = dtor_candidate { self.dcx() .struct_span_err(self.def_span(item_id), "multiple drop impls found") .with_span_note(self.def_span(old_item_id), "other impl here") .delay_as_bug(); } - dtor_candidate = Some((*item_id, self.impl_trait_header(impl_did).unwrap().constness)); - }); + dtor_candidate = Some(*item_id); + } - let (did, constness) = dtor_candidate?; - Some(ty::Destructor { did, constness }) + let did = dtor_candidate?; + Some(ty::Destructor { did }) } /// Calculate the async destructor of a given type. pub fn calculate_async_dtor( self, - adt_did: DefId, - validate: impl Fn(Self, DefId) -> Result<(), ErrorGuaranteed>, + adt_did: LocalDefId, + validate: impl Fn(Self, LocalDefId) -> Result<(), ErrorGuaranteed>, ) -> Option { let async_drop_trait = self.lang_items().async_drop_trait()?; self.ensure_ok().coherent_trait(async_drop_trait).ok()?; - let ty = self.type_of(adt_did).instantiate_identity(); let mut dtor_candidate = None; - self.for_each_relevant_impl(async_drop_trait, ty, |impl_did| { + // `AsyncDrop` impls can only be written in the same crate as the adt, and cannot be blanket impls + for &impl_did in self.local_trait_impls(async_drop_trait) { + let Some(adt_def) = self.type_of(impl_did).skip_binder().ty_adt_def() else { continue }; + if adt_def.did() != adt_did.to_def_id() { + continue; + } + if validate(self, impl_did).is_err() { // Already `ErrorGuaranteed`, no need to delay a span bug here. - return; + continue; } - let [future, ctor] = self.associated_item_def_ids(impl_did) else { - self.dcx().span_delayed_bug( - self.def_span(impl_did), - "AsyncDrop impl without async_drop function or Dropper type", - ); - return; - }; - - if let Some((_, _, old_impl_did)) = dtor_candidate { + if let Some(old_impl_did) = dtor_candidate { self.dcx() .struct_span_err(self.def_span(impl_did), "multiple async drop impls found") .with_span_note(self.def_span(old_impl_did), "other impl here") .delay_as_bug(); } - dtor_candidate = Some((*future, *ctor, impl_did)); - }); - - let (future, ctor, _) = dtor_candidate?; - Some(ty::AsyncDestructor { future, ctor }) - } - - /// Returns async drop glue morphology for a definition. To get async drop - /// glue morphology for a type see [`Ty::async_drop_glue_morphology`]. - // - // FIXME: consider making this a query - pub fn async_drop_glue_morphology(self, did: DefId) -> AsyncDropGlueMorphology { - let ty: Ty<'tcx> = self.type_of(did).instantiate_identity(); - - // Async drop glue morphology is an internal detail, so - // using `TypingMode::PostAnalysis` probably should be fine. - let typing_env = ty::TypingEnv::fully_monomorphized(); - if ty.needs_async_drop(self, typing_env) { - AsyncDropGlueMorphology::Custom - } else if ty.needs_drop(self, typing_env) { - AsyncDropGlueMorphology::DeferredDropInPlace - } else { - AsyncDropGlueMorphology::Noop + dtor_candidate = Some(impl_did); } + + Some(ty::AsyncDestructor { impl_did: dtor_candidate? }) } /// Returns the set of types that are required to be alive in @@ -813,7 +800,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Get an English description for the item's kind. pub fn def_kind_descr(self, def_kind: DefKind, def_id: DefId) -> &'static str { match def_kind { - DefKind::AssocFn if self.associated_item(def_id).fn_has_self_parameter => "method", + DefKind::AssocFn if self.associated_item(def_id).is_method() => "method", DefKind::Closure if let Some(coroutine_kind) = self.coroutine_kind(def_id) => { match coroutine_kind { hir::CoroutineKind::Desugared( @@ -867,7 +854,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Gets an English article for the [`TyCtxt::def_kind_descr`]. pub fn def_kind_descr_article(self, def_kind: DefKind, def_id: DefId) -> &'static str { match def_kind { - DefKind::AssocFn if self.associated_item(def_id).fn_has_self_parameter => "a", + DefKind::AssocFn if self.associated_item(def_id).is_method() => "a", DefKind::Closure if let Some(coroutine_kind) = self.coroutine_kind(def_id) => { match coroutine_kind { hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, ..) => "an", @@ -905,7 +892,7 @@ impl<'tcx> TyCtxt<'tcx> { || self.extern_crate(key).is_some_and(|e| e.is_direct()) } - /// Expand any [weak alias types][weak] contained within the given `value`. + /// Expand any [free alias types][free] contained within the given `value`. /// /// This should be used over other normalization routines in situations where /// it's important not to normalize other alias types and where the predicates @@ -920,19 +907,19 @@ impl<'tcx> TyCtxt<'tcx> { ///
/// This delays a bug on overflow! Therefore you need to be certain that the /// contained types get fully normalized at a later stage. Note that even on - /// overflow all well-behaved weak alias types get expanded correctly, so the + /// overflow all well-behaved free alias types get expanded correctly, so the /// result is still useful. ///
/// - /// [weak]: ty::Weak - pub fn expand_weak_alias_tys>>(self, value: T) -> T { - value.fold_with(&mut WeakAliasTypeExpander { tcx: self, depth: 0 }) + /// [free]: ty::Free + pub fn expand_free_alias_tys>>(self, value: T) -> T { + value.fold_with(&mut FreeAliasTypeExpander { tcx: self, depth: 0 }) } - /// Peel off all [weak alias types] in this type until there are none left. + /// Peel off all [free alias types] in this type until there are none left. /// - /// This only expands weak alias types in “head” / outermost positions. It can - /// be used over [expand_weak_alias_tys] as an optimization in situations where + /// This only expands free alias types in “head” / outermost positions. It can + /// be used over [expand_free_alias_tys] as an optimization in situations where /// one only really cares about the *kind* of the final aliased type but not /// the types the other constituent types alias. /// @@ -941,17 +928,17 @@ impl<'tcx> TyCtxt<'tcx> { /// type gets fully normalized at a later stage. /// /// - /// [weak]: ty::Weak - /// [expand_weak_alias_tys]: Self::expand_weak_alias_tys - pub fn peel_off_weak_alias_tys(self, mut ty: Ty<'tcx>) -> Ty<'tcx> { - let ty::Alias(ty::Weak, _) = ty.kind() else { return ty }; + /// [free]: ty::Free + /// [expand_free_alias_tys]: Self::expand_free_alias_tys + pub fn peel_off_free_alias_tys(self, mut ty: Ty<'tcx>) -> Ty<'tcx> { + let ty::Alias(ty::Free, _) = ty.kind() else { return ty }; let limit = self.recursion_limit(); let mut depth = 0; - while let ty::Alias(ty::Weak, alias) = ty.kind() { + while let ty::Alias(ty::Free, alias) = ty.kind() { if !limit.value_within_limit(depth) { - let guar = self.dcx().delayed_bug("overflow expanding weak alias type"); + let guar = self.dcx().delayed_bug("overflow expanding free alias type"); return Ty::new_error(self, guar); } @@ -979,7 +966,9 @@ impl<'tcx> TyCtxt<'tcx> { } ty::AliasTermKind::OpaqueTy => Some(self.variances_of(def_id)), ty::AliasTermKind::InherentTy - | ty::AliasTermKind::WeakTy + | ty::AliasTermKind::InherentConst + | ty::AliasTermKind::FreeTy + | ty::AliasTermKind::FreeConst | ty::AliasTermKind::UnevaluatedConst | ty::AliasTermKind::ProjectionConst => None, } @@ -1072,25 +1061,25 @@ impl<'tcx> TypeFolder> for OpaqueTypeExpander<'tcx> { } } -struct WeakAliasTypeExpander<'tcx> { +struct FreeAliasTypeExpander<'tcx> { tcx: TyCtxt<'tcx>, depth: usize, } -impl<'tcx> TypeFolder> for WeakAliasTypeExpander<'tcx> { +impl<'tcx> TypeFolder> for FreeAliasTypeExpander<'tcx> { fn cx(&self) -> TyCtxt<'tcx> { self.tcx } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - if !ty.has_type_flags(ty::TypeFlags::HAS_TY_WEAK) { + if !ty.has_type_flags(ty::TypeFlags::HAS_TY_FREE_ALIAS) { return ty; } - let ty::Alias(ty::Weak, alias) = ty.kind() else { + let ty::Alias(ty::Free, alias) = ty.kind() else { return ty.super_fold_with(self); }; if !self.tcx.recursion_limit().value_within_limit(self.depth) { - let guar = self.tcx.dcx().delayed_bug("overflow expanding weak alias type"); + let guar = self.tcx.dcx().delayed_bug("overflow expanding free alias type"); return Ty::new_error(self.tcx, guar); } @@ -1101,25 +1090,13 @@ impl<'tcx> TypeFolder> for WeakAliasTypeExpander<'tcx> { } fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> { - if !ct.has_type_flags(ty::TypeFlags::HAS_TY_WEAK) { + if !ct.has_type_flags(ty::TypeFlags::HAS_TY_FREE_ALIAS) { return ct; } ct.super_fold_with(self) } } -/// Indicates the form of `AsyncDestruct::Destructor`. Used to simplify async -/// drop glue for types not using async drop. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub enum AsyncDropGlueMorphology { - /// Async destructor simply does nothing - Noop, - /// Async destructor simply runs `drop_in_place` - DeferredDropInPlace, - /// Async destructor has custom logic - Custom, -} - impl<'tcx> Ty<'tcx> { /// Returns the `Size` for primitive types (bool, uint, int, char, float). pub fn primitive_size(self, tcx: TyCtxt<'tcx>) -> Size { @@ -1289,16 +1266,17 @@ impl<'tcx> Ty<'tcx> { } } - /// Get morphology of the async drop glue, needed for types which do not - /// use async drop. To get async drop glue morphology for a definition see - /// [`TyCtxt::async_drop_glue_morphology`]. Used for `AsyncDestruct::Destructor` - /// type construction. - // - // FIXME: implement optimization to not instantiate a certain morphology of - // async drop glue too soon to allow per type optimizations, see array case - // for more info. Perhaps then remove this method and use `needs_(async_)drop` - // instead. - pub fn async_drop_glue_morphology(self, tcx: TyCtxt<'tcx>) -> AsyncDropGlueMorphology { + /// Checks whether values of this type `T` implement the `AsyncDrop` trait. + pub fn is_async_drop(self, tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>) -> bool { + !self.is_trivially_not_async_drop() + && tcx.is_async_drop_raw(typing_env.as_query_input(self)) + } + + /// Fast path helper for testing if a type is `AsyncDrop`. + /// + /// Returning true means the type is known to be `!AsyncDrop`. Returning + /// `false` means nothing -- could be `AsyncDrop`, might not be. + fn is_trivially_not_async_drop(self) -> bool { match self.kind() { ty::Int(_) | ty::Uint(_) @@ -1310,46 +1288,26 @@ impl<'tcx> Ty<'tcx> { | ty::Ref(..) | ty::RawPtr(..) | ty::FnDef(..) - | ty::FnPtr(..) - | ty::Infer(ty::FreshIntTy(_)) - | ty::Infer(ty::FreshFloatTy(_)) => AsyncDropGlueMorphology::Noop, - + | ty::Error(_) + | ty::FnPtr(..) => true, // FIXME(unsafe_binders): ty::UnsafeBinder(_) => todo!(), - - ty::Tuple(tys) if tys.is_empty() => AsyncDropGlueMorphology::Noop, - ty::Adt(adt_def, _) if adt_def.is_manually_drop() => AsyncDropGlueMorphology::Noop, - - // Foreign types can never have destructors. - ty::Foreign(_) => AsyncDropGlueMorphology::Noop, - - // FIXME: implement dynamic types async drops - ty::Error(_) | ty::Dynamic(..) => AsyncDropGlueMorphology::DeferredDropInPlace, - - ty::Tuple(_) | ty::Array(_, _) | ty::Slice(_) => { - // Assume worst-case scenario, because we can instantiate async - // destructors in different orders: - // - // 1. Instantiate [T; N] with T = String and N = 0 - // 2. Instantiate <[String; 0] as AsyncDestruct>::Destructor - // - // And viceversa, thus we cannot rely on String not using async - // drop or array having zero (0) elements - AsyncDropGlueMorphology::Custom - } - ty::Pat(ty, _) => ty.async_drop_glue_morphology(tcx), - - ty::Adt(adt_def, _) => tcx.async_drop_glue_morphology(adt_def.did()), - - ty::Closure(did, _) - | ty::CoroutineClosure(did, _) - | ty::Coroutine(did, _) - | ty::CoroutineWitness(did, _) => tcx.async_drop_glue_morphology(*did), - - ty::Alias(..) | ty::Param(_) | ty::Bound(..) | ty::Placeholder(..) | ty::Infer(_) => { - // No specifics, but would usually mean forwarding async drop glue - AsyncDropGlueMorphology::Custom + ty::Tuple(fields) => fields.iter().all(Self::is_trivially_not_async_drop), + ty::Pat(elem_ty, _) | ty::Slice(elem_ty) | ty::Array(elem_ty, _) => { + elem_ty.is_trivially_not_async_drop() } + ty::Adt(..) + | ty::Bound(..) + | ty::Closure(..) + | ty::CoroutineClosure(..) + | ty::Dynamic(..) + | ty::Foreign(_) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) + | ty::Infer(_) + | ty::Alias(..) + | ty::Param(_) + | ty::Placeholder(_) => false, } } @@ -1395,9 +1353,6 @@ impl<'tcx> Ty<'tcx> { /// (Note that this implies that if `ty` has an async destructor attached, /// then `needs_async_drop` will definitely return `true` for `ty`.) /// - /// When constructing `AsyncDestruct::Destructor` type, use - /// [`Ty::async_drop_glue_morphology`] instead. - // // FIXME(zetanumbers): Note that this method is used to check eligible types // in unions. #[inline] @@ -1546,55 +1501,6 @@ impl<'tcx> Ty<'tcx> { } } -pub enum ExplicitSelf<'tcx> { - ByValue, - ByReference(ty::Region<'tcx>, hir::Mutability), - ByRawPointer(hir::Mutability), - ByBox, - Other, -} - -impl<'tcx> ExplicitSelf<'tcx> { - /// Categorizes an explicit self declaration like `self: SomeType` - /// into either `self`, `&self`, `&mut self`, `Box`, or - /// `Other`. - /// This is mainly used to require the arbitrary_self_types feature - /// in the case of `Other`, to improve error messages in the common cases, - /// and to make `Other` dyn-incompatible. - /// - /// Examples: - /// - /// ```ignore (illustrative) - /// impl<'a> Foo for &'a T { - /// // Legal declarations: - /// fn method1(self: &&'a T); // ExplicitSelf::ByReference - /// fn method2(self: &'a T); // ExplicitSelf::ByValue - /// fn method3(self: Box<&'a T>); // ExplicitSelf::ByBox - /// fn method4(self: Rc<&'a T>); // ExplicitSelf::Other - /// - /// // Invalid cases will be caught by `check_method_receiver`: - /// fn method_err1(self: &'a mut T); // ExplicitSelf::Other - /// fn method_err2(self: &'static T) // ExplicitSelf::ByValue - /// fn method_err3(self: &&T) // ExplicitSelf::ByReference - /// } - /// ``` - /// - pub fn determine
", "DIR"), + opt(Stable, Opt, "o", "", "Write output to FILENAME", ""), + opt(Stable, Opt, "", "out-dir", "Write output to compiler-chosen filename in DIR", ""), opt( Stable, Opt, "", "explain", "Provide a detailed explanation of an error message", - "OPT", + "", ), opt(Stable, Flag, "", "test", "Build a test harness", ""), - opt(Stable, Opt, "", "target", "Target triple for which the code is compiled", "TARGET"), - opt(Stable, Multi, "A", "allow", "Set lint allowed", "LINT"), - opt(Stable, Multi, "W", "warn", "Set lint warnings", "LINT"), - opt(Stable, Multi, "", "force-warn", "Set lint force-warn", "LINT"), - opt(Stable, Multi, "D", "deny", "Set lint denied", "LINT"), - opt(Stable, Multi, "F", "forbid", "Set lint forbidden", "LINT"), + opt(Stable, Opt, "", "target", "Target triple for which the code is compiled", ""), + opt(Stable, Multi, "A", "allow", "Set lint allowed", ""), + opt(Stable, Multi, "W", "warn", "Set lint warnings", ""), + opt(Stable, Multi, "", "force-warn", "Set lint force-warn", ""), + opt(Stable, Multi, "D", "deny", "Set lint denied", ""), + opt(Stable, Multi, "F", "forbid", "Set lint forbidden", ""), opt( Stable, Multi, "", "cap-lints", "Set the most restrictive lint level. More restrictive lints are capped at this level", - "LEVEL", + "", ), - opt(Stable, Multi, "C", "codegen", "Set a codegen option", "OPT[=VALUE]"), + opt(Stable, Multi, "C", "codegen", "Set a codegen option", "[=]"), opt(Stable, Flag, "V", "version", "Print version info and exit", ""), opt(Stable, Flag, "v", "verbose", "Use verbose output", ""), ]; @@ -1641,29 +1777,29 @@ pub fn rustc_optgroups() -> Vec { "", "extern", "Specify where an external rust library is located", - "NAME[=PATH]", + "[=]", ), - opt(Stable, Opt, "", "sysroot", "Override the system root", "PATH"), - opt(Unstable, Multi, "Z", "", "Set unstable / perma-unstable options", "FLAG"), + opt(Stable, Opt, "", "sysroot", "Override the system root", ""), + opt(Unstable, Multi, "Z", "", "Set unstable / perma-unstable options", ""), opt( Stable, Opt, "", "error-format", "How errors and other messages are produced", - "human|json|short", + "", ), - opt(Stable, Multi, "", "json", "Configure the JSON output of the compiler", "CONFIG"), + opt(Stable, Multi, "", "json", "Configure the JSON output of the compiler", ""), opt( Stable, Opt, "", "color", "Configure coloring of output: - auto = colorize, if output goes to a tty (default); - always = always colorize output; - never = never colorize output", - "auto|always|never", + * auto = colorize, if output goes to a tty (default); + * always = always colorize output; + * never = never colorize output", + "", ), opt( Stable, @@ -1671,7 +1807,7 @@ pub fn rustc_optgroups() -> Vec { "", "diagnostic-width", "Inform rustc of the width of the output so that diagnostics can be truncated to fit", - "WIDTH", + "", ), opt( Stable, @@ -1679,9 +1815,9 @@ pub fn rustc_optgroups() -> Vec { "", "remap-path-prefix", "Remap source names in all output (compiler messages and output files)", - "FROM=TO", + "=", ), - opt(Unstable, Multi, "", "env-set", "Inject an environment variable", "VAR=VALUE"), + opt(Unstable, Multi, "", "env-set", "Inject an environment variable", "="), ]; options.extend(verbose_only.into_iter().map(|mut opt| { opt.is_verbose_help_only = true; @@ -1698,7 +1834,7 @@ pub fn get_cmd_lint_options( let mut lint_opts_with_position = vec![]; let mut describe_lints = false; - for level in [lint::Allow, lint::Warn, lint::ForceWarn(None), lint::Deny, lint::Forbid] { + for level in [lint::Allow, lint::Warn, lint::ForceWarn, lint::Deny, lint::Forbid] { for (arg_pos, lint_name) in matches.opt_strs_pos(level.as_str()) { if lint_name == "help" { describe_lints = true; @@ -2040,7 +2176,8 @@ fn collect_print_requests( check_print_request_stability(early_dcx, unstable_opts, (print_name, *print_kind)); *print_kind } else { - emit_unknown_print_request_help(early_dcx, req) + let is_nightly = nightly_options::match_is_nightly_build(matches); + emit_unknown_print_request_help(early_dcx, req, is_nightly) }; let out = out.unwrap_or(OutFileName::Stdout); @@ -2064,24 +2201,37 @@ fn check_print_request_stability( unstable_opts: &UnstableOptions, (print_name, print_kind): (&str, PrintKind), ) { + if !is_print_request_stable(print_kind) && !unstable_opts.unstable_options { + early_dcx.early_fatal(format!( + "the `-Z unstable-options` flag must also be passed to enable the `{print_name}` \ + print option" + )); + } +} + +fn is_print_request_stable(print_kind: PrintKind) -> bool { match print_kind { PrintKind::AllTargetSpecsJson | PrintKind::CheckCfg + | PrintKind::CrateRootLintLevels | PrintKind::SupportedCrateTypes - | PrintKind::TargetSpecJson - if !unstable_opts.unstable_options => - { - early_dcx.early_fatal(format!( - "the `-Z unstable-options` flag must also be passed to enable the `{print_name}` \ - print option" - )); - } - _ => {} + | PrintKind::TargetSpecJson => false, + _ => true, } } -fn emit_unknown_print_request_help(early_dcx: &EarlyDiagCtxt, req: &str) -> ! { - let prints = PRINT_KINDS.iter().map(|(name, _)| format!("`{name}`")).collect::>(); +fn emit_unknown_print_request_help(early_dcx: &EarlyDiagCtxt, req: &str, is_nightly: bool) -> ! { + let prints = PRINT_KINDS + .iter() + .filter_map(|(name, kind)| { + // If we're not on nightly, we don't want to print unstable options + if !is_nightly && !is_print_request_stable(*kind) { + None + } else { + Some(format!("`{name}`")) + } + }) + .collect::>(); let prints = prints.join(", "); let mut diag = early_dcx.early_struct_fatal(format!("unknown print request: `{req}`")); @@ -2181,44 +2331,11 @@ pub fn parse_externs( matches: &getopts::Matches, unstable_opts: &UnstableOptions, ) -> Externs { - fn is_ascii_ident(string: &str) -> bool { - let mut chars = string.chars(); - if let Some(start) = chars.next() - && (start.is_ascii_alphabetic() || start == '_') - { - chars.all(|char| char.is_ascii_alphanumeric() || char == '_') - } else { - false - } - } - let is_unstable_enabled = unstable_opts.unstable_options; let mut externs: BTreeMap = BTreeMap::new(); for arg in matches.opt_strs("extern") { - let (name, path) = match arg.split_once('=') { - None => (arg, None), - Some((name, path)) => (name.to_string(), Some(Path::new(path))), - }; - let (options, name) = match name.split_once(':') { - None => (None, name), - Some((opts, name)) => (Some(opts), name.to_string()), - }; - - if !is_ascii_ident(&name) { - let mut error = early_dcx.early_struct_fatal(format!( - "crate name `{name}` passed to `--extern` is not a valid ASCII identifier" - )); - let adjusted_name = name.replace('-', "_"); - if is_ascii_ident(&adjusted_name) { - #[allow(rustc::diagnostic_outside_of_impl)] // FIXME - error.help(format!( - "consider replacing the dashes with underscores: `{adjusted_name}`" - )); - } - error.emit(); - } - - let path = path.map(|p| CanonicalizedPath::new(p)); + let ExternOpt { crate_name: name, path, options } = + split_extern_opt(early_dcx, unstable_opts, &arg).unwrap_or_else(|e| e.emit()); let entry = externs.entry(name.to_owned()); @@ -2226,6 +2343,7 @@ pub fn parse_externs( let entry = if let Some(path) = path { // --extern prelude_name=some_file.rlib + let path = CanonicalizedPath::new(path); match entry { Entry::Vacant(vacant) => { let files = BTreeSet::from_iter(iter::once(path)); @@ -2698,7 +2816,7 @@ pub fn make_crate_type_option() -> RustcOptGroup { "crate-type", "Comma separated list of types of crates for the compiler to emit", - "[bin|lib|rlib|dylib|cdylib|staticlib|proc-macro]", + "", ) } @@ -2714,6 +2832,7 @@ pub fn parse_crate_types_from_list(list_list: Vec) -> Result CrateType::Cdylib, "bin" => CrateType::Executable, "proc-macro" => CrateType::ProcMacro, + "sdylib" => CrateType::Sdylib, _ => { return Err(format!( "unknown crate type: `{part}`, expected one of: \ @@ -2811,6 +2930,7 @@ impl fmt::Display for CrateType { CrateType::Staticlib => "staticlib".fmt(f), CrateType::Cdylib => "cdylib".fmt(f), CrateType::ProcMacro => "proc-macro".fmt(f), + CrateType::Sdylib => "sdylib".fmt(f), } } } diff --git a/compiler/rustc_session/src/config/cfg.rs b/compiler/rustc_session/src/config/cfg.rs index 231ca434962e6..cbfe9e0da6adf 100644 --- a/compiler/rustc_session/src/config/cfg.rs +++ b/compiler/rustc_session/src/config/cfg.rs @@ -142,6 +142,10 @@ pub(crate) fn disallow_cfgs(sess: &Session, user_cfgs: &Cfg) { | (sym::target_has_atomic, Some(_)) | (sym::target_has_atomic_equal_alignment, Some(_)) | (sym::target_has_atomic_load_store, Some(_)) + | (sym::target_has_reliable_f16, None | Some(_)) + | (sym::target_has_reliable_f16_math, None | Some(_)) + | (sym::target_has_reliable_f128, None | Some(_)) + | (sym::target_has_reliable_f128_math, None | Some(_)) | (sym::target_thread_local, None) => disallow(cfg, "--target"), (sym::fmt_debug, None | Some(_)) => disallow(cfg, "-Z fmt-debug"), (sym::emscripten_wasm_eh, None | Some(_)) => disallow(cfg, "-Z emscripten_wasm_eh"), diff --git a/compiler/rustc_session/src/config/externs.rs b/compiler/rustc_session/src/config/externs.rs new file mode 100644 index 0000000000000..1420ee38bf214 --- /dev/null +++ b/compiler/rustc_session/src/config/externs.rs @@ -0,0 +1,79 @@ +//! This module contains code to help parse and manipulate `--extern` arguments. + +use std::path::PathBuf; + +use rustc_errors::{Diag, FatalAbort}; + +use super::UnstableOptions; +use crate::EarlyDiagCtxt; + +#[cfg(test)] +mod tests; + +/// Represents the pieces of an `--extern` argument. +pub(crate) struct ExternOpt { + pub(crate) crate_name: String, + pub(crate) path: Option, + pub(crate) options: Option, +} + +/// Breaks out the major components of an `--extern` argument. +/// +/// The options field will be a string containing comma-separated options that will need further +/// parsing and processing. +pub(crate) fn split_extern_opt<'a>( + early_dcx: &'a EarlyDiagCtxt, + unstable_opts: &UnstableOptions, + extern_opt: &str, +) -> Result> { + let (name, path) = match extern_opt.split_once('=') { + None => (extern_opt.to_string(), None), + Some((name, path)) => (name.to_string(), Some(PathBuf::from(path))), + }; + let (options, crate_name) = match name.split_once(':') { + None => (None, name), + Some((opts, crate_name)) => { + if unstable_opts.namespaced_crates && crate_name.starts_with(':') { + // If the name starts with `:`, we know this was actually something like `foo::bar` and + // not a set of options. We can just use the original name as the crate name. + (None, name) + } else { + (Some(opts.to_string()), crate_name.to_string()) + } + } + }; + + if !valid_crate_name(&crate_name, unstable_opts) { + let mut error = early_dcx.early_struct_fatal(format!( + "crate name `{crate_name}` passed to `--extern` is not a valid ASCII identifier" + )); + let adjusted_name = crate_name.replace('-', "_"); + if is_ascii_ident(&adjusted_name) { + #[allow(rustc::diagnostic_outside_of_impl)] // FIXME + error + .help(format!("consider replacing the dashes with underscores: `{adjusted_name}`")); + } + return Err(error); + } + + Ok(ExternOpt { crate_name, path, options }) +} + +fn valid_crate_name(name: &str, unstable_opts: &UnstableOptions) -> bool { + match name.split_once("::") { + Some((a, b)) if unstable_opts.namespaced_crates => is_ascii_ident(a) && is_ascii_ident(b), + Some(_) => false, + None => is_ascii_ident(name), + } +} + +fn is_ascii_ident(string: &str) -> bool { + let mut chars = string.chars(); + if let Some(start) = chars.next() + && (start.is_ascii_alphabetic() || start == '_') + { + chars.all(|char| char.is_ascii_alphanumeric() || char == '_') + } else { + false + } +} diff --git a/compiler/rustc_session/src/config/externs/tests.rs b/compiler/rustc_session/src/config/externs/tests.rs new file mode 100644 index 0000000000000..6544886951572 --- /dev/null +++ b/compiler/rustc_session/src/config/externs/tests.rs @@ -0,0 +1,92 @@ +use std::path::PathBuf; + +use super::split_extern_opt; +use crate::EarlyDiagCtxt; +use crate::config::UnstableOptions; + +/// Verifies split_extern_opt handles the supported cases. +#[test] +fn test_split_extern_opt() { + let early_dcx = EarlyDiagCtxt::new(<_>::default()); + let unstable_opts = &UnstableOptions::default(); + + let extern_opt = + split_extern_opt(&early_dcx, unstable_opts, "priv,noprelude:foo=libbar.rlib").unwrap(); + assert_eq!(extern_opt.crate_name, "foo"); + assert_eq!(extern_opt.path, Some(PathBuf::from("libbar.rlib"))); + assert_eq!(extern_opt.options, Some("priv,noprelude".to_string())); + + let extern_opt = split_extern_opt(&early_dcx, unstable_opts, "priv,noprelude:foo").unwrap(); + assert_eq!(extern_opt.crate_name, "foo"); + assert_eq!(extern_opt.path, None); + assert_eq!(extern_opt.options, Some("priv,noprelude".to_string())); + + let extern_opt = split_extern_opt(&early_dcx, unstable_opts, "foo=libbar.rlib").unwrap(); + assert_eq!(extern_opt.crate_name, "foo"); + assert_eq!(extern_opt.path, Some(PathBuf::from("libbar.rlib"))); + assert_eq!(extern_opt.options, None); + + let extern_opt = split_extern_opt(&early_dcx, unstable_opts, "foo").unwrap(); + assert_eq!(extern_opt.crate_name, "foo"); + assert_eq!(extern_opt.path, None); + assert_eq!(extern_opt.options, None); +} + +/// Tests some invalid cases for split_extern_opt. +#[test] +fn test_split_extern_opt_invalid() { + let early_dcx = EarlyDiagCtxt::new(<_>::default()); + let unstable_opts = &UnstableOptions::default(); + + // too many `:`s + let result = split_extern_opt(&early_dcx, unstable_opts, "priv:noprelude:foo=libbar.rlib"); + assert!(result.is_err()); + let _ = result.map_err(|e| e.cancel()); + + // can't nest externs without the unstable flag + let result = split_extern_opt(&early_dcx, unstable_opts, "noprelude:foo::bar=libbar.rlib"); + assert!(result.is_err()); + let _ = result.map_err(|e| e.cancel()); +} + +/// Tests some cases for split_extern_opt with nested crates like `foo::bar`. +#[test] +fn test_split_extern_opt_nested() { + let early_dcx = EarlyDiagCtxt::new(<_>::default()); + let unstable_opts = &UnstableOptions { namespaced_crates: true, ..Default::default() }; + + let extern_opt = + split_extern_opt(&early_dcx, unstable_opts, "priv,noprelude:foo::bar=libbar.rlib").unwrap(); + assert_eq!(extern_opt.crate_name, "foo::bar"); + assert_eq!(extern_opt.path, Some(PathBuf::from("libbar.rlib"))); + assert_eq!(extern_opt.options, Some("priv,noprelude".to_string())); + + let extern_opt = + split_extern_opt(&early_dcx, unstable_opts, "priv,noprelude:foo::bar").unwrap(); + assert_eq!(extern_opt.crate_name, "foo::bar"); + assert_eq!(extern_opt.path, None); + assert_eq!(extern_opt.options, Some("priv,noprelude".to_string())); + + let extern_opt = split_extern_opt(&early_dcx, unstable_opts, "foo::bar=libbar.rlib").unwrap(); + assert_eq!(extern_opt.crate_name, "foo::bar"); + assert_eq!(extern_opt.path, Some(PathBuf::from("libbar.rlib"))); + assert_eq!(extern_opt.options, None); + + let extern_opt = split_extern_opt(&early_dcx, unstable_opts, "foo::bar").unwrap(); + assert_eq!(extern_opt.crate_name, "foo::bar"); + assert_eq!(extern_opt.path, None); + assert_eq!(extern_opt.options, None); +} + +/// Tests some invalid cases for split_extern_opt with nested crates like `foo::bar`. +#[test] +fn test_split_extern_opt_nested_invalid() { + let early_dcx = EarlyDiagCtxt::new(<_>::default()); + let unstable_opts = &UnstableOptions { namespaced_crates: true, ..Default::default() }; + + // crates can only be nested one deep. + let result = + split_extern_opt(&early_dcx, unstable_opts, "priv,noprelude:foo::bar::baz=libbar.rlib"); + assert!(result.is_err()); + let _ = result.map_err(|e| e.cancel()); +} diff --git a/compiler/rustc_session/src/cstore.rs b/compiler/rustc_session/src/cstore.rs index c8a5c22ad1230..4cfc745dec28a 100644 --- a/compiler/rustc_session/src/cstore.rs +++ b/compiler/rustc_session/src/cstore.rs @@ -27,6 +27,7 @@ pub struct CrateSource { pub dylib: Option<(PathBuf, PathKind)>, pub rlib: Option<(PathBuf, PathKind)>, pub rmeta: Option<(PathBuf, PathKind)>, + pub sdylib_interface: Option<(PathBuf, PathKind)>, } impl CrateSource { diff --git a/compiler/rustc_session/src/errors.rs b/compiler/rustc_session/src/errors.rs index 71d8dbe44fed0..bf95014843d23 100644 --- a/compiler/rustc_session/src/errors.rs +++ b/compiler/rustc_session/src/errors.rs @@ -1,4 +1,4 @@ -use std::num::NonZero; +use std::num::{NonZero, ParseIntError}; use rustc_ast::token; use rustc_ast::util::literal::LitError; @@ -14,6 +14,14 @@ use rustc_target::spec::{SplitDebuginfo, StackProtector, TargetTuple}; use crate::config::CrateType; use crate::parse::ParseSess; +#[derive(Diagnostic)] +pub(crate) enum AppleDeploymentTarget { + #[diag(session_apple_deployment_target_invalid)] + Invalid { env_var: &'static str, error: ParseIntError }, + #[diag(session_apple_deployment_target_too_low)] + TooLow { env_var: &'static str, version: String, os_min: String }, +} + pub(crate) struct FeatureGateError { pub(crate) span: MultiSpan, pub(crate) explain: DiagMessage, @@ -147,6 +155,10 @@ pub(crate) struct SanitizerCfiGeneralizePointersRequiresCfi; #[diag(session_sanitizer_cfi_normalize_integers_requires_cfi)] pub(crate) struct SanitizerCfiNormalizeIntegersRequiresCfi; +#[derive(Diagnostic)] +#[diag(session_sanitizer_kcfi_arity_requires_kcfi)] +pub(crate) struct SanitizerKcfiArityRequiresKcfi; + #[derive(Diagnostic)] #[diag(session_sanitizer_kcfi_requires_panic_abort)] pub(crate) struct SanitizerKcfiRequiresPanicAbort; diff --git a/compiler/rustc_session/src/lib.rs b/compiler/rustc_session/src/lib.rs index 0e19b982a133e..ec8e9898dc71e 100644 --- a/compiler/rustc_session/src/lib.rs +++ b/compiler/rustc_session/src/lib.rs @@ -1,8 +1,8 @@ // tidy-alphabetical-start #![allow(internal_features)] +#![cfg_attr(bootstrap, feature(let_chains))] #![feature(default_field_values)] #![feature(iter_intersperse)] -#![feature(let_chains)] #![feature(rustc_attrs)] // To generate CodegenOptionsTargetModifiers and UnstableOptionsTargetModifiers enums // with macro_rules, it is necessary to use recursive mechanic ("Incremental TT Munchers"). diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 4cc666b3e37d2..b95ebfbe89f24 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -711,7 +711,7 @@ mod desc { pub(crate) const parse_list: &str = "a space-separated list of strings"; pub(crate) const parse_list_with_polarity: &str = "a comma-separated list of strings, with elements beginning with + or -"; - pub(crate) const parse_autodiff: &str = "a comma separated list of settings: `Enable`, `PrintSteps`, `PrintTA`, `PrintAA`, `PrintPerf`, `PrintModBefore`, `PrintModAfter`, `LooseTypes`, `Inline`"; + pub(crate) const parse_autodiff: &str = "a comma separated list of settings: `Enable`, `PrintSteps`, `PrintTA`, `PrintAA`, `PrintPerf`, `PrintModBefore`, `PrintModAfter`, `PrintModFinal`, `PrintPasses`, `NoPostopt`, `LooseTypes`, `Inline`"; pub(crate) const parse_comma_list: &str = "a comma-separated list of strings"; pub(crate) const parse_opt_comma_list: &str = parse_comma_list; pub(crate) const parse_number: &str = "a number"; @@ -1359,6 +1359,9 @@ pub mod parse { "PrintSteps" => AutoDiff::PrintSteps, "PrintModBefore" => AutoDiff::PrintModBefore, "PrintModAfter" => AutoDiff::PrintModAfter, + "PrintModFinal" => AutoDiff::PrintModFinal, + "NoPostopt" => AutoDiff::NoPostopt, + "PrintPasses" => AutoDiff::PrintPasses, "LooseTypes" => AutoDiff::LooseTypes, "Inline" => AutoDiff::Inline, _ => { @@ -1955,6 +1958,9 @@ options! { "allow the linker to link its default libraries (default: no)"), dlltool: Option = (None, parse_opt_pathbuf, [UNTRACKED], "import library generation tool (ignored except when targeting windows-gnu)"), + #[rustc_lint_opt_deny_field_access("use `Session::dwarf_version` instead of this field")] + dwarf_version: Option = (None, parse_opt_number, [TRACKED], + "version of DWARF debug information to emit (default: 2 or 4, depending on platform)"), embed_bitcode: bool = (true, parse_bool, [TRACKED], "emit bitcode in rlibs (default: yes)"), extra_filename: String = (String::new(), parse_string, [UNTRACKED], @@ -2093,6 +2099,9 @@ options! { `=PrintSteps` `=PrintModBefore` `=PrintModAfter` + `=PrintModFinal` + `=PrintPasses`, + `=NoPostopt` `=LooseTypes` `=Inline` Multiple options can be combined with commas."), @@ -2104,6 +2113,8 @@ options! { "emit noalias metadata for box (default: yes)"), branch_protection: Option = (None, parse_branch_protection, [TRACKED], "set options for branch target identification and pointer authentication on AArch64"), + build_sdylib_interface: bool = (false, parse_bool, [UNTRACKED], + "whether the stable interface is being built"), cf_protection: CFProtection = (CFProtection::None, parse_cfprotection, [TRACKED], "instrument control-flow architecture protection"), check_cfg_all_expected: bool = (false, parse_bool, [UNTRACKED], @@ -2175,6 +2186,8 @@ options! { them only if an error has not been emitted"), ehcont_guard: bool = (false, parse_bool, [TRACKED], "generate Windows EHCont Guard tables"), + embed_metadata: bool = (true, parse_bool, [TRACKED], + "embed metadata in rlibs and dylibs (default: yes)"), embed_source: bool = (false, parse_bool, [TRACKED], "embed source text in DWARF debug sections (default: no)"), emit_stack_sizes: bool = (false, parse_bool, [UNTRACKED], @@ -2185,6 +2198,8 @@ options! { "Use WebAssembly error handling for wasm32-unknown-emscripten"), enforce_type_length_limit: bool = (false, parse_bool, [TRACKED], "enforce the type length limit when monomorphizing instances in codegen"), + experimental_default_bounds: bool = (false, parse_bool, [TRACKED], + "enable default bounds for experimental group of auto traits"), export_executable_symbols: bool = (false, parse_bool, [TRACKED], "export symbols from executables, as if they were dynamic libraries"), external_clangrt: bool = (false, parse_bool, [UNTRACKED], @@ -2195,7 +2210,7 @@ options! { fewer_names: Option = (None, parse_opt_bool, [TRACKED], "reduce memory use by retaining fewer names within compilation artifacts (LLVM-IR) \ (default: no)"), - fixed_x18: bool = (false, parse_bool, [TRACKED], + fixed_x18: bool = (false, parse_bool, [TRACKED TARGET_MODIFIER], "make the x18 register reserved on AArch64 (default: no)"), flatten_format_args: bool = (true, parse_bool, [TRACKED], "flatten nested format_args!() and literals into a simplified format_args!() call \ @@ -2313,18 +2328,20 @@ options! { mir_include_spans: MirIncludeSpans = (MirIncludeSpans::default(), parse_mir_include_spans, [UNTRACKED], "include extra comments in mir pretty printing, like line numbers and statement indices, \ details about types, etc. (boolean for all passes, 'nll' to enable in NLL MIR only, default: 'nll')"), - mir_keep_place_mention: bool = (false, parse_bool, [TRACKED], - "keep place mention MIR statements, interpreted e.g., by miri; implies -Zmir-opt-level=0 \ - (default: no)"), #[rustc_lint_opt_deny_field_access("use `Session::mir_opt_level` instead of this field")] mir_opt_level: Option = (None, parse_opt_number, [TRACKED], "MIR optimization level (0-4; default: 1 in non optimized builds and 2 in optimized builds)"), + mir_preserve_ub: bool = (false, parse_bool, [TRACKED], + "keep place mention statements and reads in trivial SwitchInt terminators, which are interpreted \ + e.g., by miri; implies -Zmir-opt-level=0 (default: no)"), mir_strip_debuginfo: MirStripDebugInfo = (MirStripDebugInfo::None, parse_mir_strip_debuginfo, [TRACKED], "Whether to remove some of the MIR debug info from methods. Default: None"), move_size_limit: Option = (None, parse_opt_number, [TRACKED], "the size at which the `large_assignments` lint starts to be emitted"), mutable_noalias: bool = (true, parse_bool, [TRACKED], "emit noalias metadata for mutable references (default: yes)"), + namespaced_crates: bool = (false, parse_bool, [TRACKED], + "allow crates to be namespaced by other crates (default: no)"), next_solver: NextSolverConfig = (NextSolverConfig::default(), parse_next_solver_config, [TRACKED], "enable and configure the next generation trait solver used by rustc"), nll_facts: bool = (false, parse_bool, [UNTRACKED], @@ -2439,6 +2456,8 @@ written to standard error output)"), "enable normalizing integer types (default: no)"), sanitizer_dataflow_abilist: Vec = (Vec::new(), parse_comma_list, [TRACKED], "additional ABI list files that control how shadow parameters are passed (comma separated)"), + sanitizer_kcfi_arity: Option = (None, parse_opt_bool, [TRACKED], + "enable KCFI arity indicator (default: no)"), sanitizer_memory_track_origins: usize = (0, parse_sanitizer_memory_track_origins, [TRACKED], "enable origins tracking in MemorySanitizer"), sanitizer_recover: SanitizerSet = (SanitizerSet::empty(), parse_sanitizers, [TRACKED], @@ -2551,6 +2570,9 @@ written to standard error output)"), "in diagnostics, use heuristics to shorten paths referring to items"), tune_cpu: Option = (None, parse_opt_string, [TRACKED], "select processor to schedule for (`rustc --print target-cpus` for details)"), + #[rustc_lint_opt_deny_field_access("use `TyCtxt::use_typing_mode_borrowck` instead of this field")] + typing_mode_borrowck: bool = (false, parse_bool, [TRACKED], + "enable `TypingMode::Borrowck`, changing the way opaque types are handled during MIR borrowck"), #[rustc_lint_opt_deny_field_access("use `Session::ub_checks` instead of this field")] ub_checks: Option = (None, parse_opt_bool, [TRACKED], "emit runtime checks for Undefined Behavior (default: -Cdebug-assertions)"), diff --git a/compiler/rustc_session/src/output.rs b/compiler/rustc_session/src/output.rs index a24919e434cc5..cba70b5bd5d17 100644 --- a/compiler/rustc_session/src/output.rs +++ b/compiler/rustc_session/src/output.rs @@ -98,7 +98,7 @@ pub fn filename_for_input( CrateType::Rlib => { OutFileName::Real(outputs.out_directory.join(&format!("lib{libname}.rlib"))) } - CrateType::Cdylib | CrateType::ProcMacro | CrateType::Dylib => { + CrateType::Cdylib | CrateType::ProcMacro | CrateType::Dylib | CrateType::Sdylib => { let (prefix, suffix) = (&sess.target.dll_prefix, &sess.target.dll_suffix); OutFileName::Real(outputs.out_directory.join(&format!("{prefix}{libname}{suffix}"))) } @@ -167,6 +167,7 @@ pub const CRATE_TYPES: &[(Symbol, CrateType)] = &[ (sym::staticlib, CrateType::Staticlib), (sym::proc_dash_macro, CrateType::ProcMacro), (sym::bin, CrateType::Executable), + (sym::sdylib, CrateType::Sdylib), ]; pub fn categorize_crate_type(s: Symbol) -> Option { @@ -177,9 +178,21 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec, } #[derive(PartialEq, Eq, PartialOrd, Ord)] @@ -381,6 +391,10 @@ impl Session { self.opts.unstable_opts.sanitizer_cfi_normalize_integers == Some(true) } + pub fn is_sanitizer_kcfi_arity_enabled(&self) -> bool { + self.opts.unstable_opts.sanitizer_kcfi_arity == Some(true) + } + pub fn is_sanitizer_kcfi_enabled(&self) -> bool { self.opts.unstable_opts.sanitizer.contains(SanitizerSet::KCFI) } @@ -750,7 +764,11 @@ impl Session { /// Returns the DWARF version passed on the CLI or the default for the target. pub fn dwarf_version(&self) -> u32 { - self.opts.unstable_opts.dwarf_version.unwrap_or(self.target.default_dwarf_version) + self.opts + .cg + .dwarf_version + .or(self.opts.unstable_opts.dwarf_version) + .unwrap_or(self.target.default_dwarf_version) } pub fn stack_protector(&self) -> StackProtector { @@ -891,6 +909,45 @@ impl Session { FileNameDisplayPreference::Local } } + + /// Get the deployment target on Apple platforms based on the standard environment variables, + /// or fall back to the minimum version supported by `rustc`. + /// + /// This should be guarded behind `if sess.target.is_like_darwin`. + pub fn apple_deployment_target(&self) -> apple::OSVersion { + let min = apple::OSVersion::minimum_deployment_target(&self.target); + let env_var = apple::deployment_target_env_var(&self.target.os); + + // FIXME(madsmtm): Track changes to this. + if let Ok(deployment_target) = env::var(env_var) { + match apple::OSVersion::from_str(&deployment_target) { + Ok(version) => { + let os_min = apple::OSVersion::os_minimum_deployment_target(&self.target.os); + // It is common that the deployment target is set a bit too low, for example on + // macOS Aarch64 to also target older x86_64. So we only want to warn when variable + // is lower than the minimum OS supported by rustc, not when the variable is lower + // than the minimum for a specific target. + if version < os_min { + self.dcx().emit_warn(errors::AppleDeploymentTarget::TooLow { + env_var, + version: version.fmt_pretty().to_string(), + os_min: os_min.fmt_pretty().to_string(), + }); + } + + // Raise the deployment target to the minimum supported. + version.max(min) + } + Err(error) => { + self.dcx().emit_err(errors::AppleDeploymentTarget::Invalid { env_var, error }); + min + } + } + } else { + // If no deployment target variable is set, default to the minimum found above. + min + } + } } // JUSTIFICATION: part of session construction @@ -1074,6 +1131,12 @@ pub fn build_session( let target_filesearch = filesearch::FileSearch::new(&sopts.search_paths, &target_tlib_path, &target); let host_filesearch = filesearch::FileSearch::new(&sopts.search_paths, &host_tlib_path, &host); + + let invocation_temp = sopts + .incremental + .as_ref() + .map(|_| rng().next_u32().to_base_fixed_len(CASE_INSENSITIVE).to_string()); + let sess = Session { target, host, @@ -1097,6 +1160,7 @@ pub fn build_session( expanded_args, target_filesearch, host_filesearch, + invocation_temp, }; validate_commandline_args_with_session_available(&sess); @@ -1211,6 +1275,11 @@ fn validate_commandline_args_with_session_available(sess: &Session) { } } + // KCFI arity indicator requires KCFI. + if sess.is_sanitizer_kcfi_arity_enabled() && !sess.is_sanitizer_kcfi_enabled() { + sess.dcx().emit_err(errors::SanitizerKcfiArityRequiresKcfi); + } + // LLVM CFI pointer generalization requires CFI or KCFI. if sess.is_sanitizer_cfi_generalize_pointers_enabled() { if !(sess.is_sanitizer_cfi_enabled() || sess.is_sanitizer_kcfi_enabled()) { @@ -1262,7 +1331,9 @@ fn validate_commandline_args_with_session_available(sess: &Session) { sess.dcx().emit_err(errors::BranchProtectionRequiresAArch64); } - if let Some(dwarf_version) = sess.opts.unstable_opts.dwarf_version { + if let Some(dwarf_version) = + sess.opts.cg.dwarf_version.or(sess.opts.unstable_opts.dwarf_version) + { // DWARF 1 is not supported by LLVM and DWARF 6 is not yet finalized. if dwarf_version < 2 || dwarf_version > 5 { sess.dcx().emit_err(errors::UnsupportedDwarfVersion { dwarf_version }); diff --git a/compiler/rustc_session/src/utils.rs b/compiler/rustc_session/src/utils.rs index 2243e831b66ec..e9ddd66b5e8b3 100644 --- a/compiler/rustc_session/src/utils.rs +++ b/compiler/rustc_session/src/utils.rs @@ -1,4 +1,4 @@ -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::sync::OnceLock; use rustc_data_structures::profiling::VerboseTimingGuard; @@ -104,8 +104,8 @@ pub struct CanonicalizedPath { } impl CanonicalizedPath { - pub fn new(path: &Path) -> Self { - Self { original: path.to_owned(), canonicalized: try_canonicalize(path).ok() } + pub fn new(path: PathBuf) -> Self { + Self { canonicalized: try_canonicalize(&path).ok(), original: path } } pub fn canonicalized(&self) -> &PathBuf { diff --git a/compiler/rustc_smir/Cargo.toml b/compiler/rustc_smir/Cargo.toml index b2149a03a8ef3..fc9f411ac3cd4 100644 --- a/compiler/rustc_smir/Cargo.toml +++ b/compiler/rustc_smir/Cargo.toml @@ -14,6 +14,6 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } scoped-tls = "1.0" -stable_mir = {path = "../stable_mir" } +serde = { version = "1.0.125", features = [ "derive" ] } tracing = "0.1" # tidy-alphabetical-end diff --git a/compiler/rustc_smir/src/lib.rs b/compiler/rustc_smir/src/lib.rs index eaba14bbf30f5..771ff98d58d5a 100644 --- a/compiler/rustc_smir/src/lib.rs +++ b/compiler/rustc_smir/src/lib.rs @@ -9,7 +9,6 @@ // tidy-alphabetical-start #![allow(internal_features)] #![allow(rustc::usage_of_ty_tykind)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 #![doc( html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(attr(allow(unused_variables), deny(warnings))) @@ -20,5 +19,6 @@ pub mod rustc_internal; -// Make this module private for now since external users should not call these directly. -mod rustc_smir; +pub mod rustc_smir; + +pub mod stable_mir; diff --git a/compiler/rustc_smir/src/rustc_internal/internal.rs b/compiler/rustc_smir/src/rustc_internal/internal.rs index bb2b2dea2f37f..6e13b87c41d73 100644 --- a/compiler/rustc_smir/src/rustc_internal/internal.rs +++ b/compiler/rustc_smir/src/rustc_internal/internal.rs @@ -21,6 +21,7 @@ use stable_mir::{CrateItem, CrateNum, DefId}; use super::RustcInternal; use crate::rustc_smir::Tables; +use crate::stable_mir; impl RustcInternal for CrateItem { type T<'tcx> = rustc_span::def_id::DefId; @@ -490,7 +491,6 @@ impl RustcInternal for Abi { Abi::CCmseNonSecureCall => rustc_abi::ExternAbi::CCmseNonSecureCall, Abi::CCmseNonSecureEntry => rustc_abi::ExternAbi::CCmseNonSecureEntry, Abi::System { unwind } => rustc_abi::ExternAbi::System { unwind }, - Abi::RustIntrinsic => rustc_abi::ExternAbi::RustIntrinsic, Abi::RustCall => rustc_abi::ExternAbi::RustCall, Abi::Unadjusted => rustc_abi::ExternAbi::Unadjusted, Abi::RustCold => rustc_abi::ExternAbi::RustCold, diff --git a/compiler/rustc_smir/src/rustc_internal/mod.rs b/compiler/rustc_smir/src/rustc_internal/mod.rs index ad38ea228bf53..2982a920b03d1 100644 --- a/compiler/rustc_smir/src/rustc_internal/mod.rs +++ b/compiler/rustc_smir/src/rustc_internal/mod.rs @@ -18,10 +18,12 @@ use rustc_span::def_id::{CrateNum, DefId}; use scoped_tls::scoped_thread_local; use stable_mir::Error; use stable_mir::abi::Layout; +use stable_mir::compiler_interface::SmirInterface; use stable_mir::ty::IndexedVal; -use crate::rustc_smir::context::TablesWrapper; +use crate::rustc_smir::context::SmirCtxt; use crate::rustc_smir::{Stable, Tables}; +use crate::stable_mir; mod internal; pub mod pretty; @@ -147,6 +149,14 @@ impl<'tcx> Tables<'tcx> { stable_mir::ty::CoroutineWitnessDef(self.create_def_id(did)) } + pub fn assoc_def(&mut self, did: DefId) -> stable_mir::ty::AssocDef { + stable_mir::ty::AssocDef(self.create_def_id(did)) + } + + pub fn opaque_def(&mut self, did: DefId) -> stable_mir::ty::OpaqueDef { + stable_mir::ty::OpaqueDef(self.create_def_id(did)) + } + pub fn prov(&mut self, aid: AllocId) -> stable_mir::ty::Prov { stable_mir::ty::Prov(self.create_alloc_id(aid)) } @@ -187,12 +197,12 @@ pub fn crate_num(item: &stable_mir::Crate) -> CrateNum { // datastructures and stable MIR datastructures scoped_thread_local! (static TLV: Cell<*const ()>); -pub(crate) fn init<'tcx, F, T>(tables: &TablesWrapper<'tcx>, f: F) -> T +pub(crate) fn init<'tcx, F, T>(cx: &SmirCtxt<'tcx>, f: F) -> T where F: FnOnce() -> T, { assert!(!TLV.is_set()); - let ptr = tables as *const _ as *const (); + let ptr = cx as *const _ as *const (); TLV.set(&Cell::new(ptr), || f()) } @@ -203,8 +213,8 @@ pub(crate) fn with_tables(f: impl for<'tcx> FnOnce(&mut Tables<'tcx>) -> R) - TLV.with(|tlv| { let ptr = tlv.get(); assert!(!ptr.is_null()); - let wrapper = ptr as *const TablesWrapper<'_>; - let mut tables = unsafe { (*wrapper).0.borrow_mut() }; + let context = ptr as *const SmirCtxt<'_>; + let mut tables = unsafe { (*context).0.borrow_mut() }; f(&mut *tables) }) } @@ -213,7 +223,7 @@ pub fn run(tcx: TyCtxt<'_>, f: F) -> Result where F: FnOnce() -> T, { - let tables = TablesWrapper(RefCell::new(Tables { + let tables = SmirCtxt(RefCell::new(Tables { tcx, def_ids: IndexMap::default(), alloc_ids: IndexMap::default(), @@ -224,7 +234,12 @@ where mir_consts: IndexMap::default(), layouts: IndexMap::default(), })); - stable_mir::compiler_interface::run(&tables, || init(&tables, f)) + + let interface = SmirInterface { cx: tables }; + + // Pass the `SmirInterface` to compiler_interface::run + // and initialize the rustc-specific TLS with tables. + stable_mir::compiler_interface::run(&interface, || init(&interface.cx, f)) } /// Instantiate and run the compiler with the provided arguments and callback. @@ -235,6 +250,7 @@ where /// ```ignore(needs-extern-crate) /// # extern crate rustc_driver; /// # extern crate rustc_interface; +/// # extern crate rustc_middle; /// # #[macro_use] /// # extern crate rustc_smir; /// # extern crate stable_mir; @@ -246,7 +262,7 @@ where /// // Your code goes in here. /// # ControlFlow::Continue(()) /// } -/// # let args = vec!["--verbose".to_string()]; +/// # let args = &["--verbose".to_string()]; /// let result = run!(args, analyze_code); /// # assert_eq!(result, Err(CompilerError::Skipped)) /// # } @@ -255,6 +271,7 @@ where /// ```ignore(needs-extern-crate) /// # extern crate rustc_driver; /// # extern crate rustc_interface; +/// # extern crate rustc_middle; /// # #[macro_use] /// # extern crate rustc_smir; /// # extern crate stable_mir; @@ -267,7 +284,7 @@ where /// // Your code goes in here. /// # ControlFlow::Continue(()) /// } -/// # let args = vec!["--verbose".to_string()]; +/// # let args = &["--verbose".to_string()]; /// # let extra_args = vec![]; /// let result = run!(args, || analyze_code(extra_args)); /// # assert_eq!(result, Err(CompilerError::Skipped)) @@ -319,6 +336,7 @@ macro_rules! run_driver { use rustc_driver::{Callbacks, Compilation, run_compiler}; use rustc_middle::ty::TyCtxt; use rustc_interface::interface; + use rustc_smir::rustc_internal; use stable_mir::CompilerError; use std::ops::ControlFlow; @@ -328,7 +346,6 @@ macro_rules! run_driver { C: Send, F: FnOnce($(optional!($with_tcx TyCtxt))?) -> ControlFlow + Send, { - args: Vec, callback: Option, result: Option>, } @@ -340,14 +357,14 @@ macro_rules! run_driver { F: FnOnce($(optional!($with_tcx TyCtxt))?) -> ControlFlow + Send, { /// Creates a new `StableMir` instance, with given test_function and arguments. - pub fn new(args: Vec, callback: F) -> Self { - StableMir { args, callback: Some(callback), result: None } + pub fn new(callback: F) -> Self { + StableMir { callback: Some(callback), result: None } } /// Runs the compiler against given target and tests it with `test_function` - pub fn run(&mut self) -> Result> { + pub fn run(&mut self, args: &[String]) -> Result> { let compiler_result = rustc_driver::catch_fatal_errors(|| -> interface::Result::<()> { - run_compiler(&self.args.clone(), self); + run_compiler(&args, self); Ok(()) }); match (compiler_result, self.result.take()) { @@ -397,7 +414,7 @@ macro_rules! run_driver { } } - StableMir::new($args, $callback).run() + StableMir::new($callback).run($args) }}; } diff --git a/compiler/rustc_smir/src/rustc_internal/pretty.rs b/compiler/rustc_smir/src/rustc_internal/pretty.rs index b752ad71ecc64..0710c18746afd 100644 --- a/compiler/rustc_smir/src/rustc_internal/pretty.rs +++ b/compiler/rustc_smir/src/rustc_internal/pretty.rs @@ -3,6 +3,7 @@ use std::io; use rustc_middle::ty::TyCtxt; use super::run; +use crate::stable_mir; pub fn write_smir_pretty<'tcx, W: io::Write>(tcx: TyCtxt<'tcx>, w: &mut W) -> io::Result<()> { writeln!( diff --git a/compiler/rustc_smir/src/rustc_smir/alloc.rs b/compiler/rustc_smir/src/rustc_smir/alloc.rs index 52c5b425c14f6..9cb89634c52e5 100644 --- a/compiler/rustc_smir/src/rustc_smir/alloc.rs +++ b/compiler/rustc_smir/src/rustc_smir/alloc.rs @@ -6,6 +6,7 @@ use stable_mir::mir::Mutability; use stable_mir::ty::{Allocation, ProvenanceMap}; use crate::rustc_smir::{Stable, Tables}; +use crate::stable_mir; /// Creates new empty `Allocation` from given `Align`. fn new_empty_allocation(align: Align) -> Allocation { @@ -27,7 +28,7 @@ pub(crate) fn new_allocation<'tcx>( tables: &mut Tables<'tcx>, ) -> Allocation { try_new_allocation(ty, const_value, tables) - .expect(&format!("Failed to convert: {const_value:?} to {ty:?}")) + .unwrap_or_else(|_| panic!("Failed to convert: {const_value:?} to {ty:?}")) } #[allow(rustc::usage_of_qualified_ty)] @@ -36,39 +37,30 @@ pub(crate) fn try_new_allocation<'tcx>( const_value: ConstValue<'tcx>, tables: &mut Tables<'tcx>, ) -> Result { + let layout = tables + .tcx + .layout_of(rustc_middle::ty::TypingEnv::fully_monomorphized().as_query_input(ty)) + .map_err(|e| e.stable(tables))?; Ok(match const_value { ConstValue::Scalar(scalar) => { let size = scalar.size(); - let align = tables - .tcx - .layout_of(rustc_middle::ty::TypingEnv::fully_monomorphized().as_query_input(ty)) - .map_err(|e| e.stable(tables))? - .align; - let mut allocation = - rustc_middle::mir::interpret::Allocation::new(size, align.abi, AllocInit::Uninit); + let mut allocation = rustc_middle::mir::interpret::Allocation::new( + size, + layout.align.abi, + AllocInit::Uninit, + ); allocation .write_scalar(&tables.tcx, alloc_range(Size::ZERO, size), scalar) .map_err(|e| e.stable(tables))?; allocation.stable(tables) } - ConstValue::ZeroSized => { - let align = tables - .tcx - .layout_of(rustc_middle::ty::TypingEnv::fully_monomorphized().as_query_input(ty)) - .map_err(|e| e.stable(tables))? - .align; - new_empty_allocation(align.abi) - } + ConstValue::ZeroSized => new_empty_allocation(layout.align.abi), ConstValue::Slice { data, meta } => { let alloc_id = tables.tcx.reserve_and_set_memory_alloc(data); let ptr = Pointer::new(alloc_id.into(), Size::ZERO); let scalar_ptr = rustc_middle::mir::interpret::Scalar::from_pointer(ptr, &tables.tcx); let scalar_meta = rustc_middle::mir::interpret::Scalar::from_target_usize(meta, &tables.tcx); - let layout = tables - .tcx - .layout_of(rustc_middle::ty::TypingEnv::fully_monomorphized().as_query_input(ty)) - .map_err(|e| e.stable(tables))?; let mut allocation = rustc_middle::mir::interpret::Allocation::new( layout.size, layout.align.abi, @@ -92,12 +84,7 @@ pub(crate) fn try_new_allocation<'tcx>( } ConstValue::Indirect { alloc_id, offset } => { let alloc = tables.tcx.global_alloc(alloc_id).unwrap_memory(); - let ty_size = tables - .tcx - .layout_of(rustc_middle::ty::TypingEnv::fully_monomorphized().as_query_input(ty)) - .map_err(|e| e.stable(tables))? - .size; - allocation_filter(&alloc.0, alloc_range(offset, ty_size), tables) + allocation_filter(&alloc.0, alloc_range(offset, layout.size), tables) } }) } diff --git a/compiler/rustc_smir/src/rustc_smir/builder.rs b/compiler/rustc_smir/src/rustc_smir/builder.rs index 2eb0cea0e8536..40e6d21c06378 100644 --- a/compiler/rustc_smir/src/rustc_smir/builder.rs +++ b/compiler/rustc_smir/src/rustc_smir/builder.rs @@ -10,6 +10,7 @@ use rustc_middle::mir::visit::MutVisitor; use rustc_middle::ty::{self, TyCtxt}; use crate::rustc_smir::{Stable, Tables}; +use crate::stable_mir; /// Builds a monomorphic body for a given instance. pub(crate) struct BodyBuilder<'tcx> { @@ -21,7 +22,7 @@ impl<'tcx> BodyBuilder<'tcx> { pub(crate) fn new(tcx: TyCtxt<'tcx>, instance: ty::Instance<'tcx>) -> Self { let instance = match instance.def { // To get the fallback body of an intrinsic, we need to convert it to an item. - ty::InstanceKind::Intrinsic(def_id) => ty::Instance::new(def_id, instance.args), + ty::InstanceKind::Intrinsic(def_id) => ty::Instance::new_raw(def_id, instance.args), _ => instance, }; BodyBuilder { tcx, instance } diff --git a/compiler/rustc_smir/src/rustc_smir/context.rs b/compiler/rustc_smir/src/rustc_smir/context.rs index aa1921fc8e784..bac5c9066f1f6 100644 --- a/compiler/rustc_smir/src/rustc_smir/context.rs +++ b/compiler/rustc_smir/src/rustc_smir/context.rs @@ -1,7 +1,4 @@ -//! Implementation of `[stable_mir::compiler_interface::Context]` trait. -//! -//! This trait is currently the main interface between the Rust compiler, -//! and the `stable_mir` crate. +//! Implementation of StableMIR Context. #![allow(rustc::usage_of_qualified_ty)] @@ -20,7 +17,6 @@ use rustc_middle::ty::{ use rustc_middle::{mir, ty}; use rustc_span::def_id::LOCAL_CRATE; use stable_mir::abi::{FnAbi, Layout, LayoutShape}; -use stable_mir::compiler_interface::Context; use stable_mir::mir::alloc::GlobalAlloc; use stable_mir::mir::mono::{InstanceDef, StaticDef}; use stable_mir::mir::{BinOp, Body, Place, UnOp}; @@ -35,9 +31,16 @@ use stable_mir::{Crate, CrateDef, CrateItem, CrateNum, DefId, Error, Filename, I use crate::rustc_internal::RustcInternal; use crate::rustc_smir::builder::BodyBuilder; use crate::rustc_smir::{Stable, Tables, alloc, filter_def_ids, new_item_kind, smir_crate}; +use crate::stable_mir; -impl<'tcx> Context for TablesWrapper<'tcx> { - fn target_info(&self) -> MachineInfo { +/// Provides direct access to rustc's internal queries. +/// +/// The [`crate::stable_mir::compiler_interface::SmirInterface`] must go through +/// this context to obtain rustc-level information. +pub struct SmirCtxt<'tcx>(pub RefCell>); + +impl<'tcx> SmirCtxt<'tcx> { + pub fn target_info(&self) -> MachineInfo { let mut tables = self.0.borrow_mut(); MachineInfo { endian: tables.tcx.data_layout.endian.stable(&mut *tables), @@ -47,31 +50,35 @@ impl<'tcx> Context for TablesWrapper<'tcx> { } } - fn entry_fn(&self) -> Option { + pub fn entry_fn(&self) -> Option { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; Some(tables.crate_item(tcx.entry_fn(())?.0)) } - fn all_local_items(&self) -> stable_mir::CrateItems { + /// Retrieve all items of the local crate that have a MIR associated with them. + pub fn all_local_items(&self) -> stable_mir::CrateItems { let mut tables = self.0.borrow_mut(); tables.tcx.mir_keys(()).iter().map(|item| tables.crate_item(item.to_def_id())).collect() } - fn mir_body(&self, item: stable_mir::DefId) -> stable_mir::mir::Body { + /// Retrieve the body of a function. + /// This function will panic if the body is not available. + pub fn mir_body(&self, item: stable_mir::DefId) -> stable_mir::mir::Body { let mut tables = self.0.borrow_mut(); let def_id = tables[item]; tables.tcx.instance_mir(rustc_middle::ty::InstanceKind::Item(def_id)).stable(&mut tables) } - fn has_body(&self, def: DefId) -> bool { + /// Check whether the body of a function is available. + pub fn has_body(&self, def: DefId) -> bool { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let def_id = def.internal(&mut *tables, tcx); tables.item_has_body(def_id) } - fn foreign_modules(&self, crate_num: CrateNum) -> Vec { + pub fn foreign_modules(&self, crate_num: CrateNum) -> Vec { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; tcx.foreign_modules(crate_num.internal(&mut *tables, tcx)) @@ -80,21 +87,23 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .collect() } - fn crate_functions(&self, crate_num: CrateNum) -> Vec { + /// Retrieve all functions defined in this crate. + pub fn crate_functions(&self, crate_num: CrateNum) -> Vec { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let krate = crate_num.internal(&mut *tables, tcx); filter_def_ids(tcx, krate, |def_id| tables.to_fn_def(def_id)) } - fn crate_statics(&self, crate_num: CrateNum) -> Vec { + /// Retrieve all static items defined in this crate. + pub fn crate_statics(&self, crate_num: CrateNum) -> Vec { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let krate = crate_num.internal(&mut *tables, tcx); filter_def_ids(tcx, krate, |def_id| tables.to_static(def_id)) } - fn foreign_module( + pub fn foreign_module( &self, mod_def: stable_mir::ty::ForeignModuleDef, ) -> stable_mir::ty::ForeignModule { @@ -104,7 +113,7 @@ impl<'tcx> Context for TablesWrapper<'tcx> { mod_def.stable(&mut *tables) } - fn foreign_items(&self, mod_def: stable_mir::ty::ForeignModuleDef) -> Vec { + pub fn foreign_items(&self, mod_def: stable_mir::ty::ForeignModuleDef) -> Vec { let mut tables = self.0.borrow_mut(); let def_id = tables[mod_def.def_id()]; tables @@ -118,12 +127,12 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .collect() } - fn all_trait_decls(&self) -> stable_mir::TraitDecls { + pub fn all_trait_decls(&self) -> stable_mir::TraitDecls { let mut tables = self.0.borrow_mut(); tables.tcx.all_traits().map(|trait_def_id| tables.trait_def(trait_def_id)).collect() } - fn trait_decls(&self, crate_num: CrateNum) -> stable_mir::TraitDecls { + pub fn trait_decls(&self, crate_num: CrateNum) -> stable_mir::TraitDecls { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; tcx.traits(crate_num.internal(&mut *tables, tcx)) @@ -132,14 +141,14 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .collect() } - fn trait_decl(&self, trait_def: &stable_mir::ty::TraitDef) -> stable_mir::ty::TraitDecl { + pub fn trait_decl(&self, trait_def: &stable_mir::ty::TraitDef) -> stable_mir::ty::TraitDecl { let mut tables = self.0.borrow_mut(); let def_id = tables[trait_def.0]; let trait_def = tables.tcx.trait_def(def_id); trait_def.stable(&mut *tables) } - fn all_trait_impls(&self) -> stable_mir::ImplTraitDecls { + pub fn all_trait_impls(&self) -> stable_mir::ImplTraitDecls { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; iter::once(LOCAL_CRATE) @@ -149,7 +158,7 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .collect() } - fn trait_impls(&self, crate_num: CrateNum) -> stable_mir::ImplTraitDecls { + pub fn trait_impls(&self, crate_num: CrateNum) -> stable_mir::ImplTraitDecls { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; tcx.trait_impls_in_crate(crate_num.internal(&mut *tables, tcx)) @@ -158,21 +167,21 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .collect() } - fn trait_impl(&self, impl_def: &stable_mir::ty::ImplDef) -> stable_mir::ty::ImplTrait { + pub fn trait_impl(&self, impl_def: &stable_mir::ty::ImplDef) -> stable_mir::ty::ImplTrait { let mut tables = self.0.borrow_mut(); let def_id = tables[impl_def.0]; let impl_trait = tables.tcx.impl_trait_ref(def_id).unwrap(); impl_trait.stable(&mut *tables) } - fn generics_of(&self, def_id: stable_mir::DefId) -> stable_mir::ty::Generics { + pub fn generics_of(&self, def_id: stable_mir::DefId) -> stable_mir::ty::Generics { let mut tables = self.0.borrow_mut(); let def_id = tables[def_id]; let generics = tables.tcx.generics_of(def_id); generics.stable(&mut *tables) } - fn predicates_of(&self, def_id: stable_mir::DefId) -> stable_mir::ty::GenericPredicates { + pub fn predicates_of(&self, def_id: stable_mir::DefId) -> stable_mir::ty::GenericPredicates { let mut tables = self.0.borrow_mut(); let def_id = tables[def_id]; let GenericPredicates { parent, predicates } = tables.tcx.predicates_of(def_id); @@ -190,7 +199,7 @@ impl<'tcx> Context for TablesWrapper<'tcx> { } } - fn explicit_predicates_of( + pub fn explicit_predicates_of( &self, def_id: stable_mir::DefId, ) -> stable_mir::ty::GenericPredicates { @@ -211,17 +220,20 @@ impl<'tcx> Context for TablesWrapper<'tcx> { } } - fn local_crate(&self) -> stable_mir::Crate { + /// Get information about the local crate. + pub fn local_crate(&self) -> stable_mir::Crate { let tables = self.0.borrow(); smir_crate(tables.tcx, LOCAL_CRATE) } - fn external_crates(&self) -> Vec { + /// Retrieve a list of all external crates. + pub fn external_crates(&self) -> Vec { let tables = self.0.borrow(); tables.tcx.crates(()).iter().map(|crate_num| smir_crate(tables.tcx, *crate_num)).collect() } - fn find_crates(&self, name: &str) -> Vec { + /// Find a crate with the given name. + pub fn find_crates(&self, name: &str) -> Vec { let tables = self.0.borrow(); let crates: Vec = [LOCAL_CRATE] .iter() @@ -234,7 +246,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { crates } - fn def_name(&self, def_id: stable_mir::DefId, trimmed: bool) -> Symbol { + /// Returns the name of given `DefId`. + pub fn def_name(&self, def_id: stable_mir::DefId, trimmed: bool) -> Symbol { let tables = self.0.borrow(); if trimmed { with_forced_trimmed_paths!(tables.tcx.def_path_str(tables[def_id])) @@ -243,7 +256,14 @@ impl<'tcx> Context for TablesWrapper<'tcx> { } } - fn tool_attrs( + /// Return registered tool attributes with the given attribute name. + /// + /// FIXME(jdonszelmann): may panic on non-tool attributes. After more attribute work, non-tool + /// attributes will simply return an empty list. + /// + /// Single segmented name like `#[clippy]` is specified as `&["clippy".to_string()]`. + /// Multi-segmented name like `#[rustfmt::skip]` is specified as `&["rustfmt".to_string(), "skip".to_string()]`. + pub fn tool_attrs( &self, def_id: stable_mir::DefId, attr: &[stable_mir::Symbol], @@ -267,7 +287,11 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .collect() } - fn all_tool_attrs(&self, def_id: stable_mir::DefId) -> Vec { + /// Get all tool attributes of a definition. + pub fn all_tool_attrs( + &self, + def_id: stable_mir::DefId, + ) -> Vec { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let did = tables[def_id]; @@ -291,12 +315,14 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .collect() } - fn span_to_string(&self, span: stable_mir::ty::Span) -> String { + /// Returns printable, human readable form of `Span`. + pub fn span_to_string(&self, span: stable_mir::ty::Span) -> String { let tables = self.0.borrow(); tables.tcx.sess.source_map().span_to_diagnostic_string(tables[span]) } - fn get_filename(&self, span: &Span) -> Filename { + /// Return filename from given `Span`, for diagnostic purposes. + pub fn get_filename(&self, span: &Span) -> Filename { let tables = self.0.borrow(); tables .tcx @@ -307,23 +333,27 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .to_string() } - fn get_lines(&self, span: &Span) -> LineInfo { + /// Return lines corresponding to this `Span`. + pub fn get_lines(&self, span: &Span) -> LineInfo { let tables = self.0.borrow(); let lines = &tables.tcx.sess.source_map().span_to_location_info(tables[*span]); LineInfo { start_line: lines.1, start_col: lines.2, end_line: lines.3, end_col: lines.4 } } - fn item_kind(&self, item: CrateItem) -> ItemKind { + /// Returns the `kind` of given `DefId`. + pub fn item_kind(&self, item: CrateItem) -> ItemKind { let tables = self.0.borrow(); new_item_kind(tables.tcx.def_kind(tables[item.0])) } - fn is_foreign_item(&self, item: DefId) -> bool { + /// Returns whether this is a foreign item. + pub fn is_foreign_item(&self, item: DefId) -> bool { let tables = self.0.borrow(); tables.tcx.is_foreign_item(tables[item]) } - fn foreign_item_kind(&self, def: ForeignDef) -> ForeignItemKind { + /// Returns the kind of a given foreign item. + pub fn foreign_item_kind(&self, def: ForeignDef) -> ForeignItemKind { let mut tables = self.0.borrow_mut(); let def_id = tables[def.def_id()]; let tcx = tables.tcx; @@ -338,32 +368,37 @@ impl<'tcx> Context for TablesWrapper<'tcx> { } } - fn adt_kind(&self, def: AdtDef) -> AdtKind { + /// Returns the kind of a given algebraic data type. + pub fn adt_kind(&self, def: AdtDef) -> AdtKind { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; def.internal(&mut *tables, tcx).adt_kind().stable(&mut *tables) } - fn adt_is_box(&self, def: AdtDef) -> bool { + /// Returns if the ADT is a box. + pub fn adt_is_box(&self, def: AdtDef) -> bool { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; def.internal(&mut *tables, tcx).is_box() } - fn adt_is_simd(&self, def: AdtDef) -> bool { + /// Returns whether this ADT is simd. + pub fn adt_is_simd(&self, def: AdtDef) -> bool { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; def.internal(&mut *tables, tcx).repr().simd() } - fn adt_is_cstr(&self, def: AdtDef) -> bool { + /// Returns whether this definition is a C string. + pub fn adt_is_cstr(&self, def: AdtDef) -> bool { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let def_id = def.0.internal(&mut *tables, tcx); tables.tcx.is_lang_item(def_id, LangItem::CStr) } - fn fn_sig(&self, def: FnDef, args: &GenericArgs) -> PolyFnSig { + /// Retrieve the function signature for the given generic arguments. + pub fn fn_sig(&self, def: FnDef, args: &GenericArgs) -> PolyFnSig { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let def_id = def.0.internal(&mut *tables, tcx); @@ -372,7 +407,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { sig.stable(&mut *tables) } - fn intrinsic(&self, def: DefId) -> Option { + /// Retrieve the intrinsic definition if the item corresponds one. + pub fn intrinsic(&self, def: DefId) -> Option { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let def_id = def.internal(&mut *tables, tcx); @@ -380,14 +416,16 @@ impl<'tcx> Context for TablesWrapper<'tcx> { intrinsic.map(|_| IntrinsicDef(def)) } - fn intrinsic_name(&self, def: IntrinsicDef) -> Symbol { + /// Retrieve the plain function name of an intrinsic. + pub fn intrinsic_name(&self, def: IntrinsicDef) -> Symbol { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let def_id = def.0.internal(&mut *tables, tcx); tcx.intrinsic(def_id).unwrap().name.to_string() } - fn closure_sig(&self, args: &GenericArgs) -> PolyFnSig { + /// Retrieve the closure signature for the given generic arguments. + pub fn closure_sig(&self, args: &GenericArgs) -> PolyFnSig { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let args_ref = args.internal(&mut *tables, tcx); @@ -395,25 +433,28 @@ impl<'tcx> Context for TablesWrapper<'tcx> { sig.stable(&mut *tables) } - fn adt_variants_len(&self, def: AdtDef) -> usize { + /// The number of variants in this ADT. + pub fn adt_variants_len(&self, def: AdtDef) -> usize { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; def.internal(&mut *tables, tcx).variants().len() } - fn variant_name(&self, def: VariantDef) -> Symbol { + /// The name of a variant. + pub fn variant_name(&self, def: VariantDef) -> Symbol { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; def.internal(&mut *tables, tcx).name.to_string() } - fn variant_fields(&self, def: VariantDef) -> Vec { + pub fn variant_fields(&self, def: VariantDef) -> Vec { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; def.internal(&mut *tables, tcx).fields.iter().map(|f| f.stable(&mut *tables)).collect() } - fn eval_target_usize(&self, cnst: &MirConst) -> Result { + /// Evaluate constant as a target usize. + pub fn eval_target_usize(&self, cnst: &MirConst) -> Result { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let mir_const = cnst.internal(&mut *tables, tcx); @@ -421,7 +462,7 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .try_eval_target_usize(tables.tcx, ty::TypingEnv::fully_monomorphized()) .ok_or_else(|| Error::new(format!("Const `{cnst:?}` cannot be encoded as u64"))) } - fn eval_target_usize_ty(&self, cnst: &TyConst) -> Result { + pub fn eval_target_usize_ty(&self, cnst: &TyConst) -> Result { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let mir_const = cnst.internal(&mut *tables, tcx); @@ -430,7 +471,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .ok_or_else(|| Error::new(format!("Const `{cnst:?}` cannot be encoded as u64"))) } - fn try_new_const_zst(&self, ty: Ty) -> Result { + /// Create a new zero-sized constant. + pub fn try_new_const_zst(&self, ty: Ty) -> Result { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let ty_internal = ty.internal(&mut *tables, tcx); @@ -455,7 +497,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .stable(&mut *tables)) } - fn new_const_str(&self, value: &str) -> MirConst { + /// Create a new constant that represents the given string value. + pub fn new_const_str(&self, value: &str) -> MirConst { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let ty = ty::Ty::new_static_str(tcx); @@ -466,12 +509,14 @@ impl<'tcx> Context for TablesWrapper<'tcx> { mir::Const::from_value(val, ty).stable(&mut tables) } - fn new_const_bool(&self, value: bool) -> MirConst { + /// Create a new constant that represents the given boolean value. + pub fn new_const_bool(&self, value: bool) -> MirConst { let mut tables = self.0.borrow_mut(); mir::Const::from_bool(tables.tcx, value).stable(&mut tables) } - fn try_new_const_uint(&self, value: u128, uint_ty: UintTy) -> Result { + /// Create a new constant that represents the given value. + pub fn try_new_const_uint(&self, value: u128, uint_ty: UintTy) -> Result { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let ty = ty::Ty::new_uint(tcx, uint_ty.internal(&mut *tables, tcx)); @@ -486,7 +531,7 @@ impl<'tcx> Context for TablesWrapper<'tcx> { Ok(mir::Const::from_scalar(tcx, mir::interpret::Scalar::Int(scalar), ty) .stable(&mut tables)) } - fn try_new_ty_const_uint( + pub fn try_new_ty_const_uint( &self, value: u128, uint_ty: UintTy, @@ -508,27 +553,35 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .stable(&mut *tables)) } - fn new_rigid_ty(&self, kind: RigidTy) -> stable_mir::ty::Ty { + /// Create a new type from the given kind. + pub fn new_rigid_ty(&self, kind: RigidTy) -> stable_mir::ty::Ty { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let internal_kind = kind.internal(&mut *tables, tcx); tables.tcx.mk_ty_from_kind(internal_kind).stable(&mut *tables) } - fn new_box_ty(&self, ty: stable_mir::ty::Ty) -> stable_mir::ty::Ty { + /// Create a new box type, `Box`, for the given inner type `T`. + pub fn new_box_ty(&self, ty: stable_mir::ty::Ty) -> stable_mir::ty::Ty { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let inner = ty.internal(&mut *tables, tcx); ty::Ty::new_box(tables.tcx, inner).stable(&mut *tables) } - fn def_ty(&self, item: stable_mir::DefId) -> stable_mir::ty::Ty { + /// Returns the type of given crate item. + pub fn def_ty(&self, item: stable_mir::DefId) -> stable_mir::ty::Ty { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; tcx.type_of(item.internal(&mut *tables, tcx)).instantiate_identity().stable(&mut *tables) } - fn def_ty_with_args(&self, item: stable_mir::DefId, args: &GenericArgs) -> stable_mir::ty::Ty { + /// Returns the type of given definition instantiated with the given arguments. + pub fn def_ty_with_args( + &self, + item: stable_mir::DefId, + args: &GenericArgs, + ) -> stable_mir::ty::Ty { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let args = args.internal(&mut *tables, tcx); @@ -543,33 +596,38 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .stable(&mut *tables) } - fn mir_const_pretty(&self, cnst: &stable_mir::ty::MirConst) -> String { + /// Returns literal value of a const as a string. + pub fn mir_const_pretty(&self, cnst: &stable_mir::ty::MirConst) -> String { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; cnst.internal(&mut *tables, tcx).to_string() } - fn span_of_an_item(&self, def_id: stable_mir::DefId) -> Span { + /// `Span` of an item. + pub fn span_of_an_item(&self, def_id: stable_mir::DefId) -> Span { let mut tables = self.0.borrow_mut(); tables.tcx.def_span(tables[def_id]).stable(&mut *tables) } - fn ty_pretty(&self, ty: stable_mir::ty::Ty) -> String { + /// Obtain the representation of a type. + pub fn ty_pretty(&self, ty: stable_mir::ty::Ty) -> String { let tables = self.0.borrow_mut(); tables.types[ty].to_string() } - fn ty_kind(&self, ty: stable_mir::ty::Ty) -> TyKind { + /// Obtain the representation of a type. + pub fn ty_kind(&self, ty: stable_mir::ty::Ty) -> TyKind { let mut tables = self.0.borrow_mut(); tables.types[ty].kind().stable(&mut *tables) } - fn ty_const_pretty(&self, ct: stable_mir::ty::TyConstId) -> String { + pub fn ty_const_pretty(&self, ct: stable_mir::ty::TyConstId) -> String { let tables = self.0.borrow_mut(); tables.ty_consts[ct].to_string() } - fn rigid_ty_discriminant_ty(&self, ty: &RigidTy) -> stable_mir::ty::Ty { + /// Get the discriminant Ty for this Ty if there's one. + pub fn rigid_ty_discriminant_ty(&self, ty: &RigidTy) -> stable_mir::ty::Ty { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let internal_kind = ty.internal(&mut *tables, tcx); @@ -577,7 +635,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { internal_ty.discriminant_ty(tables.tcx).stable(&mut *tables) } - fn instance_body(&self, def: InstanceDef) -> Option { + /// Get the body of an Instance which is already monomorphized. + pub fn instance_body(&self, def: InstanceDef) -> Option { let mut tables = self.0.borrow_mut(); let instance = tables.instances[def]; tables @@ -585,63 +644,67 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .then(|| BodyBuilder::new(tables.tcx, instance).build(&mut *tables)) } - fn instance_ty(&self, def: InstanceDef) -> stable_mir::ty::Ty { + /// Get the instance type with generic instantiations applied and lifetimes erased. + pub fn instance_ty(&self, def: InstanceDef) -> stable_mir::ty::Ty { let mut tables = self.0.borrow_mut(); let instance = tables.instances[def]; assert!(!instance.has_non_region_param(), "{instance:?} needs further instantiation"); instance.ty(tables.tcx, ty::TypingEnv::fully_monomorphized()).stable(&mut *tables) } - fn instance_args(&self, def: InstanceDef) -> GenericArgs { + /// Get the instantiation types. + pub fn instance_args(&self, def: InstanceDef) -> GenericArgs { let mut tables = self.0.borrow_mut(); let instance = tables.instances[def]; instance.args.stable(&mut *tables) } - fn instance_abi(&self, def: InstanceDef) -> Result { + /// Get an instance ABI. + pub fn instance_abi(&self, def: InstanceDef) -> Result { let mut tables = self.0.borrow_mut(); let instance = tables.instances[def]; Ok(tables.fn_abi_of_instance(instance, List::empty())?.stable(&mut *tables)) } - fn fn_ptr_abi(&self, fn_ptr: PolyFnSig) -> Result { + /// Get the ABI of a function pointer. + pub fn fn_ptr_abi(&self, fn_ptr: PolyFnSig) -> Result { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let sig = fn_ptr.internal(&mut *tables, tcx); Ok(tables.fn_abi_of_fn_ptr(sig, List::empty())?.stable(&mut *tables)) } - fn instance_def_id(&self, def: InstanceDef) -> stable_mir::DefId { + /// Get the instance. + pub fn instance_def_id(&self, def: InstanceDef) -> stable_mir::DefId { let mut tables = self.0.borrow_mut(); let def_id = tables.instances[def].def_id(); tables.create_def_id(def_id) } - fn instance_mangled_name(&self, instance: InstanceDef) -> Symbol { + /// Get the instance mangled name. + pub fn instance_mangled_name(&self, instance: InstanceDef) -> Symbol { let tables = self.0.borrow_mut(); let instance = tables.instances[instance]; tables.tcx.symbol_name(instance).name.to_string() } - fn is_empty_drop_shim(&self, def: InstanceDef) -> bool { + /// Check if this is an empty DropGlue shim. + pub fn is_empty_drop_shim(&self, def: InstanceDef) -> bool { let tables = self.0.borrow_mut(); let instance = tables.instances[def]; matches!(instance.def, ty::InstanceKind::DropGlue(_, None)) } - fn is_empty_async_drop_ctor_shim(&self, def: InstanceDef) -> bool { - let tables = self.0.borrow_mut(); - let instance = tables.instances[def]; - matches!(instance.def, ty::InstanceKind::AsyncDropGlueCtorShim(_, None)) - } - - fn mono_instance(&self, def_id: stable_mir::DefId) -> stable_mir::mir::mono::Instance { + /// Convert a non-generic crate item into an instance. + /// This function will panic if the item is generic. + pub fn mono_instance(&self, def_id: stable_mir::DefId) -> stable_mir::mir::mono::Instance { let mut tables = self.0.borrow_mut(); let def_id = tables[def_id]; Instance::mono(tables.tcx, def_id).stable(&mut *tables) } - fn requires_monomorphization(&self, def_id: stable_mir::DefId) -> bool { + /// Item requires monomorphization. + pub fn requires_monomorphization(&self, def_id: stable_mir::DefId) -> bool { let tables = self.0.borrow(); let def_id = tables[def_id]; let generics = tables.tcx.generics_of(def_id); @@ -649,7 +712,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { result } - fn resolve_instance( + /// Resolve an instance from the given function definition and generic arguments. + pub fn resolve_instance( &self, def: stable_mir::ty::FnDef, args: &stable_mir::ty::GenericArgs, @@ -669,7 +733,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { } } - fn resolve_drop_in_place(&self, ty: stable_mir::ty::Ty) -> stable_mir::mir::mono::Instance { + /// Resolve an instance for drop_in_place for the given type. + pub fn resolve_drop_in_place(&self, ty: stable_mir::ty::Ty) -> stable_mir::mir::mono::Instance { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let internal_ty = ty.internal(&mut *tables, tcx); @@ -677,7 +742,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { instance.stable(&mut *tables) } - fn resolve_for_fn_ptr( + /// Resolve instance for a function pointer. + pub fn resolve_for_fn_ptr( &self, def: FnDef, args: &GenericArgs, @@ -695,7 +761,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .stable(&mut *tables) } - fn resolve_closure( + /// Resolve instance for a closure with the requested type. + pub fn resolve_closure( &self, def: ClosureDef, args: &GenericArgs, @@ -712,7 +779,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { ) } - fn eval_instance(&self, def: InstanceDef, const_ty: Ty) -> Result { + /// Try to evaluate an instance into a constant. + pub fn eval_instance(&self, def: InstanceDef, const_ty: Ty) -> Result { let mut tables = self.0.borrow_mut(); let instance = tables.instances[def]; let tcx = tables.tcx; @@ -732,21 +800,24 @@ impl<'tcx> Context for TablesWrapper<'tcx> { .map_err(|e| e.stable(&mut *tables))? } - fn eval_static_initializer(&self, def: StaticDef) -> Result { + /// Evaluate a static's initializer. + pub fn eval_static_initializer(&self, def: StaticDef) -> Result { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let def_id = def.0.internal(&mut *tables, tcx); tables.tcx.eval_static_initializer(def_id).stable(&mut *tables) } - fn global_alloc(&self, alloc: stable_mir::mir::alloc::AllocId) -> GlobalAlloc { + /// Retrieve global allocation for the given allocation ID. + pub fn global_alloc(&self, alloc: stable_mir::mir::alloc::AllocId) -> GlobalAlloc { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let alloc_id = alloc.internal(&mut *tables, tcx); tables.tcx.global_alloc(alloc_id).stable(&mut *tables) } - fn vtable_allocation( + /// Retrieve the id for the virtual table. + pub fn vtable_allocation( &self, global_alloc: &GlobalAlloc, ) -> Option { @@ -764,7 +835,7 @@ impl<'tcx> Context for TablesWrapper<'tcx> { Some(alloc_id.stable(&mut *tables)) } - fn krate(&self, def_id: stable_mir::DefId) -> Crate { + pub fn krate(&self, def_id: stable_mir::DefId) -> Crate { let tables = self.0.borrow(); smir_crate(tables.tcx, tables[def_id].krate) } @@ -772,7 +843,7 @@ impl<'tcx> Context for TablesWrapper<'tcx> { /// Retrieve the instance name for diagnostic messages. /// /// This will return the specialized name, e.g., `Vec::new`. - fn instance_name(&self, def: InstanceDef, trimmed: bool) -> Symbol { + pub fn instance_name(&self, def: InstanceDef, trimmed: bool) -> Symbol { let tables = self.0.borrow_mut(); let instance = tables.instances[def]; if trimmed { @@ -786,7 +857,8 @@ impl<'tcx> Context for TablesWrapper<'tcx> { } } - fn ty_layout(&self, ty: Ty) -> Result { + /// Get the layout of a type. + pub fn ty_layout(&self, ty: Ty) -> Result { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let ty = ty.internal(&mut *tables, tcx); @@ -794,19 +866,22 @@ impl<'tcx> Context for TablesWrapper<'tcx> { Ok(layout.stable(&mut *tables)) } - fn layout_shape(&self, id: Layout) -> LayoutShape { + /// Get the layout shape. + pub fn layout_shape(&self, id: Layout) -> LayoutShape { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; id.internal(&mut *tables, tcx).0.stable(&mut *tables) } - fn place_pretty(&self, place: &Place) -> String { + /// Get a debug string representation of a place. + pub fn place_pretty(&self, place: &Place) -> String { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; format!("{:?}", place.internal(&mut *tables, tcx)) } - fn binop_ty(&self, bin_op: BinOp, rhs: Ty, lhs: Ty) -> Ty { + /// Get the resulting type of binary operation. + pub fn binop_ty(&self, bin_op: BinOp, rhs: Ty, lhs: Ty) -> Ty { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let rhs_internal = rhs.internal(&mut *tables, tcx); @@ -815,16 +890,31 @@ impl<'tcx> Context for TablesWrapper<'tcx> { ty.stable(&mut *tables) } - fn unop_ty(&self, un_op: UnOp, arg: Ty) -> Ty { + /// Get the resulting type of unary operation. + pub fn unop_ty(&self, un_op: UnOp, arg: Ty) -> Ty { let mut tables = self.0.borrow_mut(); let tcx = tables.tcx; let arg_internal = arg.internal(&mut *tables, tcx); let ty = un_op.internal(&mut *tables, tcx).ty(tcx, arg_internal); ty.stable(&mut *tables) } -} -pub(crate) struct TablesWrapper<'tcx>(pub RefCell>); + /// Get all associated items of a definition. + pub fn associated_items(&self, def_id: stable_mir::DefId) -> stable_mir::AssocItems { + let mut tables = self.0.borrow_mut(); + let tcx = tables.tcx; + let def_id = tables[def_id]; + let assoc_items = if tcx.is_trait_alias(def_id) { + Vec::new() + } else { + tcx.associated_item_def_ids(def_id) + .iter() + .map(|did| tcx.associated_item(*did).stable(&mut *tables)) + .collect() + }; + assoc_items + } +} /// Implement error handling for extracting function ABI information. impl<'tcx> FnAbiOfHelpers<'tcx> for Tables<'tcx> { diff --git a/compiler/rustc_smir/src/rustc_smir/convert/abi.rs b/compiler/rustc_smir/src/rustc_smir/convert/abi.rs index 62cbab9b723cc..7ccc785a40026 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/abi.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/abi.rs @@ -14,6 +14,7 @@ use stable_mir::target::MachineSize as Size; use stable_mir::ty::{Align, IndexedVal, VariantIdx}; use crate::rustc_smir::{Stable, Tables}; +use crate::stable_mir; impl<'tcx> Stable<'tcx> for rustc_abi::VariantIdx { type T = VariantIdx; diff --git a/compiler/rustc_smir/src/rustc_smir/convert/error.rs b/compiler/rustc_smir/src/rustc_smir/convert/error.rs index 82ecfa630ddb0..2cde5542483db 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/error.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/error.rs @@ -6,6 +6,7 @@ use rustc_middle::mir::interpret::AllocError; use rustc_middle::ty::layout::LayoutError; use crate::rustc_smir::{Stable, Tables}; +use crate::stable_mir; impl<'tcx> Stable<'tcx> for LayoutError<'tcx> { type T = stable_mir::Error; diff --git a/compiler/rustc_smir/src/rustc_smir/convert/mir.rs b/compiler/rustc_smir/src/rustc_smir/convert/mir.rs index bdd6e16a7c171..42b3e59b73ab9 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/mir.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/mir.rs @@ -9,6 +9,7 @@ use stable_mir::ty::{Allocation, ConstantKind, MirConst}; use stable_mir::{Error, opaque}; use crate::rustc_smir::{Stable, Tables, alloc}; +use crate::stable_mir; impl<'tcx> Stable<'tcx> for mir::Body<'tcx> { type T = stable_mir::mir::Body; @@ -493,6 +494,9 @@ impl<'tcx> Stable<'tcx> for mir::AssertMessage<'tcx> { AssertKind::ResumedAfterPanic(coroutine) => { stable_mir::mir::AssertMessage::ResumedAfterPanic(coroutine.stable(tables)) } + AssertKind::ResumedAfterDrop(coroutine) => { + stable_mir::mir::AssertMessage::ResumedAfterDrop(coroutine.stable(tables)) + } AssertKind::MisalignedPointerDereference { required, found } => { stable_mir::mir::AssertMessage::MisalignedPointerDereference { required: required.stable(tables), @@ -647,13 +651,18 @@ impl<'tcx> Stable<'tcx> for mir::TerminatorKind<'tcx> { mir::TerminatorKind::UnwindTerminate(_) => TerminatorKind::Abort, mir::TerminatorKind::Return => TerminatorKind::Return, mir::TerminatorKind::Unreachable => TerminatorKind::Unreachable, - mir::TerminatorKind::Drop { place, target, unwind, replace: _ } => { - TerminatorKind::Drop { - place: place.stable(tables), - target: target.as_usize(), - unwind: unwind.stable(tables), - } - } + mir::TerminatorKind::Drop { + place, + target, + unwind, + replace: _, + drop: _, + async_fut: _, + } => TerminatorKind::Drop { + place: place.stable(tables), + target: target.as_usize(), + unwind: unwind.stable(tables), + }, mir::TerminatorKind::Call { func, args, diff --git a/compiler/rustc_smir/src/rustc_smir/convert/mod.rs b/compiler/rustc_smir/src/rustc_smir/convert/mod.rs index a3da563af50d4..3494de62d835b 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/mod.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/mod.rs @@ -3,6 +3,7 @@ use rustc_abi::FieldIdx; use crate::rustc_smir::{Stable, Tables}; +use crate::stable_mir; mod abi; mod error; diff --git a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs index aa0eac628dd0f..62e5485cde37f 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs @@ -7,6 +7,7 @@ use stable_mir::ty::{ }; use crate::rustc_smir::{Stable, Tables, alloc}; +use crate::stable_mir; impl<'tcx> Stable<'tcx> for ty::AliasTyKind { type T = stable_mir::ty::AliasKind; @@ -15,7 +16,7 @@ impl<'tcx> Stable<'tcx> for ty::AliasTyKind { ty::Projection => stable_mir::ty::AliasKind::Projection, ty::Inherent => stable_mir::ty::AliasKind::Inherent, ty::Opaque => stable_mir::ty::AliasKind::Opaque, - ty::Weak => stable_mir::ty::AliasKind::Weak, + ty::Free => stable_mir::ty::AliasKind::Free, } } } @@ -411,6 +412,7 @@ impl<'tcx> Stable<'tcx> for ty::Pattern<'tcx> { end: Some(end.stable(tables)), include_end: true, }, + ty::PatternKind::Or(_) => todo!(), } } } @@ -637,8 +639,8 @@ impl<'tcx> Stable<'tcx> for ty::ClauseKind<'tcx> { const_.stable(tables), ty.stable(tables), ), - ClauseKind::WellFormed(generic_arg) => { - stable_mir::ty::ClauseKind::WellFormed(generic_arg.unpack().stable(tables)) + ClauseKind::WellFormed(term) => { + stable_mir::ty::ClauseKind::WellFormed(term.unpack().stable(tables)) } ClauseKind::ConstEvaluatable(const_) => { stable_mir::ty::ClauseKind::ConstEvaluatable(const_.stable(tables)) @@ -812,6 +814,9 @@ impl<'tcx> Stable<'tcx> for ty::Instance<'tcx> { | ty::InstanceKind::DropGlue(..) | ty::InstanceKind::CloneShim(..) | ty::InstanceKind::FnPtrShim(..) + | ty::InstanceKind::FutureDropPollShim(..) + | ty::InstanceKind::AsyncDropGlue(..) + | ty::InstanceKind::EiiShim { .. } | ty::InstanceKind::AsyncDropGlueCtorShim(..) => { stable_mir::mir::mono::InstanceKind::Shim } @@ -870,7 +875,6 @@ impl<'tcx> Stable<'tcx> for rustc_abi::ExternAbi { ExternAbi::CCmseNonSecureCall => Abi::CCmseNonSecureCall, ExternAbi::CCmseNonSecureEntry => Abi::CCmseNonSecureEntry, ExternAbi::System { unwind } => Abi::System { unwind }, - ExternAbi::RustIntrinsic => Abi::RustIntrinsic, ExternAbi::RustCall => Abi::RustCall, ExternAbi::Unadjusted => Abi::Unadjusted, ExternAbi::RustCold => Abi::RustCold, @@ -890,3 +894,69 @@ impl<'tcx> Stable<'tcx> for rustc_session::cstore::ForeignModule { } } } + +impl<'tcx> Stable<'tcx> for ty::AssocKind { + type T = stable_mir::ty::AssocKind; + + fn stable(&self, tables: &mut Tables<'_>) -> Self::T { + use stable_mir::ty::{AssocKind, AssocTypeData}; + match *self { + ty::AssocKind::Const { name } => AssocKind::Const { name: name.to_string() }, + ty::AssocKind::Fn { name, has_self } => { + AssocKind::Fn { name: name.to_string(), has_self } + } + ty::AssocKind::Type { data } => AssocKind::Type { + data: match data { + ty::AssocTypeData::Normal(name) => AssocTypeData::Normal(name.to_string()), + ty::AssocTypeData::Rpitit(rpitit) => { + AssocTypeData::Rpitit(rpitit.stable(tables)) + } + }, + }, + } + } +} + +impl<'tcx> Stable<'tcx> for ty::AssocItemContainer { + type T = stable_mir::ty::AssocItemContainer; + + fn stable(&self, _tables: &mut Tables<'_>) -> Self::T { + use stable_mir::ty::AssocItemContainer; + match self { + ty::AssocItemContainer::Trait => AssocItemContainer::Trait, + ty::AssocItemContainer::Impl => AssocItemContainer::Impl, + } + } +} + +impl<'tcx> Stable<'tcx> for ty::AssocItem { + type T = stable_mir::ty::AssocItem; + + fn stable(&self, tables: &mut Tables<'_>) -> Self::T { + stable_mir::ty::AssocItem { + def_id: tables.assoc_def(self.def_id), + kind: self.kind.stable(tables), + container: self.container.stable(tables), + trait_item_def_id: self.trait_item_def_id.map(|did| tables.assoc_def(did)), + } + } +} + +impl<'tcx> Stable<'tcx> for ty::ImplTraitInTraitData { + type T = stable_mir::ty::ImplTraitInTraitData; + + fn stable(&self, tables: &mut Tables<'_>) -> Self::T { + use stable_mir::ty::ImplTraitInTraitData; + match self { + ty::ImplTraitInTraitData::Trait { fn_def_id, opaque_def_id } => { + ImplTraitInTraitData::Trait { + fn_def_id: tables.fn_def(*fn_def_id), + opaque_def_id: tables.opaque_def(*opaque_def_id), + } + } + ty::ImplTraitInTraitData::Impl { fn_def_id } => { + ImplTraitInTraitData::Impl { fn_def_id: tables.fn_def(*fn_def_id) } + } + } + } +} diff --git a/compiler/rustc_smir/src/rustc_smir/mod.rs b/compiler/rustc_smir/src/rustc_smir/mod.rs index c5d33f090a05b..b5003baaf633c 100644 --- a/compiler/rustc_smir/src/rustc_smir/mod.rs +++ b/compiler/rustc_smir/src/rustc_smir/mod.rs @@ -21,10 +21,11 @@ use stable_mir::{CtorKind, ItemKind}; use tracing::debug; use crate::rustc_internal::IndexMap; +use crate::stable_mir; mod alloc; mod builder; -pub(crate) mod context; +pub mod context; mod convert; pub struct Tables<'tcx> { diff --git a/compiler/stable_mir/src/abi.rs b/compiler/rustc_smir/src/stable_mir/abi.rs similarity index 97% rename from compiler/stable_mir/src/abi.rs rename to compiler/rustc_smir/src/stable_mir/abi.rs index 091f3e1a95e95..3842cb7e653e8 100644 --- a/compiler/stable_mir/src/abi.rs +++ b/compiler/rustc_smir/src/stable_mir/abi.rs @@ -3,12 +3,13 @@ use std::num::NonZero; use std::ops::RangeInclusive; use serde::Serialize; +use stable_mir::compiler_interface::with; +use stable_mir::mir::FieldIdx; +use stable_mir::target::{MachineInfo, MachineSize as Size}; +use stable_mir::ty::{Align, IndexedVal, Ty, VariantIdx}; +use stable_mir::{Error, Opaque, error}; -use crate::compiler_interface::with; -use crate::mir::FieldIdx; -use crate::target::{MachineInfo, MachineSize as Size}; -use crate::ty::{Align, IndexedVal, Ty, VariantIdx}; -use crate::{Error, Opaque, error}; +use crate::stable_mir; /// A function ABI definition. #[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)] @@ -149,7 +150,7 @@ pub enum FieldsShape { Arbitrary { /// Offsets for the first byte of each field, /// ordered to match the source definition order. - /// I.e.: It follows the same order as [crate::ty::VariantDef::fields()]. + /// I.e.: It follows the same order as [super::ty::VariantDef::fields()]. /// This vector does not go in increasing order. offsets: Vec, }, diff --git a/compiler/rustc_smir/src/stable_mir/compiler_interface.rs b/compiler/rustc_smir/src/stable_mir/compiler_interface.rs new file mode 100644 index 0000000000000..bb35e23a72884 --- /dev/null +++ b/compiler/rustc_smir/src/stable_mir/compiler_interface.rs @@ -0,0 +1,497 @@ +//! Define the interface with the Rust compiler. +//! +//! StableMIR users should not use any of the items in this module directly. +//! These APIs have no stability guarantee. + +use std::cell::Cell; + +use rustc_smir::context::SmirCtxt; +use stable_mir::abi::{FnAbi, Layout, LayoutShape}; +use stable_mir::crate_def::Attribute; +use stable_mir::mir::alloc::{AllocId, GlobalAlloc}; +use stable_mir::mir::mono::{Instance, InstanceDef, StaticDef}; +use stable_mir::mir::{BinOp, Body, Place, UnOp}; +use stable_mir::target::MachineInfo; +use stable_mir::ty::{ + AdtDef, AdtKind, Allocation, ClosureDef, ClosureKind, FieldDef, FnDef, ForeignDef, + ForeignItemKind, ForeignModule, ForeignModuleDef, GenericArgs, GenericPredicates, Generics, + ImplDef, ImplTrait, IntrinsicDef, LineInfo, MirConst, PolyFnSig, RigidTy, Span, TraitDecl, + TraitDef, Ty, TyConst, TyConstId, TyKind, UintTy, VariantDef, +}; +use stable_mir::{ + AssocItems, Crate, CrateItem, CrateItems, CrateNum, DefId, Error, Filename, ImplTraitDecls, + ItemKind, Symbol, TraitDecls, mir, +}; + +use crate::{rustc_smir, stable_mir}; + +/// Stable public API for querying compiler information. +/// +/// All queries are delegated to an internal [`SmirCtxt`] that provides +/// similar APIs but based on internal rustc constructs. +/// +/// Do not use this directly. This is currently used in the macro expansion. +pub(crate) struct SmirInterface<'tcx> { + pub(crate) cx: SmirCtxt<'tcx>, +} + +impl<'tcx> SmirInterface<'tcx> { + pub(crate) fn entry_fn(&self) -> Option { + self.cx.entry_fn() + } + + /// Retrieve all items of the local crate that have a MIR associated with them. + pub(crate) fn all_local_items(&self) -> CrateItems { + self.cx.all_local_items() + } + + /// Retrieve the body of a function. + /// This function will panic if the body is not available. + pub(crate) fn mir_body(&self, item: DefId) -> mir::Body { + self.cx.mir_body(item) + } + + /// Check whether the body of a function is available. + pub(crate) fn has_body(&self, item: DefId) -> bool { + self.cx.has_body(item) + } + + pub(crate) fn foreign_modules(&self, crate_num: CrateNum) -> Vec { + self.cx.foreign_modules(crate_num) + } + + /// Retrieve all functions defined in this crate. + pub(crate) fn crate_functions(&self, crate_num: CrateNum) -> Vec { + self.cx.crate_functions(crate_num) + } + + /// Retrieve all static items defined in this crate. + pub(crate) fn crate_statics(&self, crate_num: CrateNum) -> Vec { + self.cx.crate_statics(crate_num) + } + + pub(crate) fn foreign_module(&self, mod_def: ForeignModuleDef) -> ForeignModule { + self.cx.foreign_module(mod_def) + } + + pub(crate) fn foreign_items(&self, mod_def: ForeignModuleDef) -> Vec { + self.cx.foreign_items(mod_def) + } + + pub(crate) fn all_trait_decls(&self) -> TraitDecls { + self.cx.all_trait_decls() + } + + pub(crate) fn trait_decls(&self, crate_num: CrateNum) -> TraitDecls { + self.cx.trait_decls(crate_num) + } + + pub(crate) fn trait_decl(&self, trait_def: &TraitDef) -> TraitDecl { + self.cx.trait_decl(trait_def) + } + + pub(crate) fn all_trait_impls(&self) -> ImplTraitDecls { + self.cx.all_trait_impls() + } + + pub(crate) fn trait_impls(&self, crate_num: CrateNum) -> ImplTraitDecls { + self.cx.trait_impls(crate_num) + } + + pub(crate) fn trait_impl(&self, trait_impl: &ImplDef) -> ImplTrait { + self.cx.trait_impl(trait_impl) + } + + pub(crate) fn generics_of(&self, def_id: DefId) -> Generics { + self.cx.generics_of(def_id) + } + + pub(crate) fn predicates_of(&self, def_id: DefId) -> GenericPredicates { + self.cx.predicates_of(def_id) + } + + pub(crate) fn explicit_predicates_of(&self, def_id: DefId) -> GenericPredicates { + self.cx.explicit_predicates_of(def_id) + } + + /// Get information about the local crate. + pub(crate) fn local_crate(&self) -> Crate { + self.cx.local_crate() + } + + /// Retrieve a list of all external crates. + pub(crate) fn external_crates(&self) -> Vec { + self.cx.external_crates() + } + + /// Find a crate with the given name. + pub(crate) fn find_crates(&self, name: &str) -> Vec { + self.cx.find_crates(name) + } + + /// Returns the name of given `DefId`. + pub(crate) fn def_name(&self, def_id: DefId, trimmed: bool) -> Symbol { + self.cx.def_name(def_id, trimmed) + } + + /// Return registered tool attributes with the given attribute name. + /// + /// FIXME(jdonszelmann): may panic on non-tool attributes. After more attribute work, non-tool + /// attributes will simply return an empty list. + /// + /// Single segmented name like `#[clippy]` is specified as `&["clippy".to_string()]`. + /// Multi-segmented name like `#[rustfmt::skip]` is specified as `&["rustfmt".to_string(), "skip".to_string()]`. + pub(crate) fn tool_attrs(&self, def_id: DefId, attr: &[Symbol]) -> Vec { + self.cx.tool_attrs(def_id, attr) + } + + /// Get all tool attributes of a definition. + pub(crate) fn all_tool_attrs(&self, def_id: DefId) -> Vec { + self.cx.all_tool_attrs(def_id) + } + + /// Returns printable, human readable form of `Span`. + pub(crate) fn span_to_string(&self, span: Span) -> String { + self.cx.span_to_string(span) + } + + /// Return filename from given `Span`, for diagnostic purposes. + pub(crate) fn get_filename(&self, span: &Span) -> Filename { + self.cx.get_filename(span) + } + + /// Return lines corresponding to this `Span`. + pub(crate) fn get_lines(&self, span: &Span) -> LineInfo { + self.cx.get_lines(span) + } + + /// Returns the `kind` of given `DefId`. + pub(crate) fn item_kind(&self, item: CrateItem) -> ItemKind { + self.cx.item_kind(item) + } + + /// Returns whether this is a foreign item. + pub(crate) fn is_foreign_item(&self, item: DefId) -> bool { + self.cx.is_foreign_item(item) + } + + /// Returns the kind of a given foreign item. + pub(crate) fn foreign_item_kind(&self, def: ForeignDef) -> ForeignItemKind { + self.cx.foreign_item_kind(def) + } + + /// Returns the kind of a given algebraic data type. + pub(crate) fn adt_kind(&self, def: AdtDef) -> AdtKind { + self.cx.adt_kind(def) + } + + /// Returns if the ADT is a box. + pub(crate) fn adt_is_box(&self, def: AdtDef) -> bool { + self.cx.adt_is_box(def) + } + + /// Returns whether this ADT is simd. + pub(crate) fn adt_is_simd(&self, def: AdtDef) -> bool { + self.cx.adt_is_simd(def) + } + + /// Returns whether this definition is a C string. + pub(crate) fn adt_is_cstr(&self, def: AdtDef) -> bool { + self.cx.adt_is_cstr(def) + } + + /// Retrieve the function signature for the given generic arguments. + pub(crate) fn fn_sig(&self, def: FnDef, args: &GenericArgs) -> PolyFnSig { + self.cx.fn_sig(def, args) + } + + /// Retrieve the intrinsic definition if the item corresponds one. + pub(crate) fn intrinsic(&self, item: DefId) -> Option { + self.cx.intrinsic(item) + } + + /// Retrieve the plain function name of an intrinsic. + pub(crate) fn intrinsic_name(&self, def: IntrinsicDef) -> Symbol { + self.cx.intrinsic_name(def) + } + + /// Retrieve the closure signature for the given generic arguments. + pub(crate) fn closure_sig(&self, args: &GenericArgs) -> PolyFnSig { + self.cx.closure_sig(args) + } + + /// The number of variants in this ADT. + pub(crate) fn adt_variants_len(&self, def: AdtDef) -> usize { + self.cx.adt_variants_len(def) + } + + /// The name of a variant. + pub(crate) fn variant_name(&self, def: VariantDef) -> Symbol { + self.cx.variant_name(def) + } + + pub(crate) fn variant_fields(&self, def: VariantDef) -> Vec { + self.cx.variant_fields(def) + } + + /// Evaluate constant as a target usize. + pub(crate) fn eval_target_usize(&self, cnst: &MirConst) -> Result { + self.cx.eval_target_usize(cnst) + } + + pub(crate) fn eval_target_usize_ty(&self, cnst: &TyConst) -> Result { + self.cx.eval_target_usize_ty(cnst) + } + + /// Create a new zero-sized constant. + pub(crate) fn try_new_const_zst(&self, ty: Ty) -> Result { + self.cx.try_new_const_zst(ty) + } + + /// Create a new constant that represents the given string value. + pub(crate) fn new_const_str(&self, value: &str) -> MirConst { + self.cx.new_const_str(value) + } + + /// Create a new constant that represents the given boolean value. + pub(crate) fn new_const_bool(&self, value: bool) -> MirConst { + self.cx.new_const_bool(value) + } + + /// Create a new constant that represents the given value. + pub(crate) fn try_new_const_uint( + &self, + value: u128, + uint_ty: UintTy, + ) -> Result { + self.cx.try_new_const_uint(value, uint_ty) + } + + pub(crate) fn try_new_ty_const_uint( + &self, + value: u128, + uint_ty: UintTy, + ) -> Result { + self.cx.try_new_ty_const_uint(value, uint_ty) + } + + /// Create a new type from the given kind. + pub(crate) fn new_rigid_ty(&self, kind: RigidTy) -> Ty { + self.cx.new_rigid_ty(kind) + } + + /// Create a new box type, `Box`, for the given inner type `T`. + pub(crate) fn new_box_ty(&self, ty: Ty) -> Ty { + self.cx.new_box_ty(ty) + } + + /// Returns the type of given crate item. + pub(crate) fn def_ty(&self, item: DefId) -> Ty { + self.cx.def_ty(item) + } + + /// Returns the type of given definition instantiated with the given arguments. + pub(crate) fn def_ty_with_args(&self, item: DefId, args: &GenericArgs) -> Ty { + self.cx.def_ty_with_args(item, args) + } + + /// Returns literal value of a const as a string. + pub(crate) fn mir_const_pretty(&self, cnst: &MirConst) -> String { + self.cx.mir_const_pretty(cnst) + } + + /// `Span` of an item. + pub(crate) fn span_of_an_item(&self, def_id: DefId) -> Span { + self.cx.span_of_an_item(def_id) + } + + pub(crate) fn ty_const_pretty(&self, ct: TyConstId) -> String { + self.cx.ty_const_pretty(ct) + } + + /// Obtain the representation of a type. + pub(crate) fn ty_pretty(&self, ty: Ty) -> String { + self.cx.ty_pretty(ty) + } + + /// Obtain the representation of a type. + pub(crate) fn ty_kind(&self, ty: Ty) -> TyKind { + self.cx.ty_kind(ty) + } + + /// Get the discriminant Ty for this Ty if there's one. + pub(crate) fn rigid_ty_discriminant_ty(&self, ty: &RigidTy) -> Ty { + self.cx.rigid_ty_discriminant_ty(ty) + } + + /// Get the body of an Instance which is already monomorphized. + pub(crate) fn instance_body(&self, instance: InstanceDef) -> Option { + self.cx.instance_body(instance) + } + + /// Get the instance type with generic instantiations applied and lifetimes erased. + pub(crate) fn instance_ty(&self, instance: InstanceDef) -> Ty { + self.cx.instance_ty(instance) + } + + /// Get the instantiation types. + pub(crate) fn instance_args(&self, def: InstanceDef) -> GenericArgs { + self.cx.instance_args(def) + } + + /// Get the instance. + pub(crate) fn instance_def_id(&self, instance: InstanceDef) -> DefId { + self.cx.instance_def_id(instance) + } + + /// Get the instance mangled name. + pub(crate) fn instance_mangled_name(&self, instance: InstanceDef) -> Symbol { + self.cx.instance_mangled_name(instance) + } + + /// Check if this is an empty DropGlue shim. + pub(crate) fn is_empty_drop_shim(&self, def: InstanceDef) -> bool { + self.cx.is_empty_drop_shim(def) + } + + /// Convert a non-generic crate item into an instance. + /// This function will panic if the item is generic. + pub(crate) fn mono_instance(&self, def_id: DefId) -> Instance { + self.cx.mono_instance(def_id) + } + + /// Item requires monomorphization. + pub(crate) fn requires_monomorphization(&self, def_id: DefId) -> bool { + self.cx.requires_monomorphization(def_id) + } + + /// Resolve an instance from the given function definition and generic arguments. + pub(crate) fn resolve_instance(&self, def: FnDef, args: &GenericArgs) -> Option { + self.cx.resolve_instance(def, args) + } + + /// Resolve an instance for drop_in_place for the given type. + pub(crate) fn resolve_drop_in_place(&self, ty: Ty) -> Instance { + self.cx.resolve_drop_in_place(ty) + } + + /// Resolve instance for a function pointer. + pub(crate) fn resolve_for_fn_ptr(&self, def: FnDef, args: &GenericArgs) -> Option { + self.cx.resolve_for_fn_ptr(def, args) + } + + /// Resolve instance for a closure with the requested type. + pub(crate) fn resolve_closure( + &self, + def: ClosureDef, + args: &GenericArgs, + kind: ClosureKind, + ) -> Option { + self.cx.resolve_closure(def, args, kind) + } + + /// Evaluate a static's initializer. + pub(crate) fn eval_static_initializer(&self, def: StaticDef) -> Result { + self.cx.eval_static_initializer(def) + } + + /// Try to evaluate an instance into a constant. + pub(crate) fn eval_instance( + &self, + def: InstanceDef, + const_ty: Ty, + ) -> Result { + self.cx.eval_instance(def, const_ty) + } + + /// Retrieve global allocation for the given allocation ID. + pub(crate) fn global_alloc(&self, id: AllocId) -> GlobalAlloc { + self.cx.global_alloc(id) + } + + /// Retrieve the id for the virtual table. + pub(crate) fn vtable_allocation(&self, global_alloc: &GlobalAlloc) -> Option { + self.cx.vtable_allocation(global_alloc) + } + + pub(crate) fn krate(&self, def_id: DefId) -> Crate { + self.cx.krate(def_id) + } + + pub(crate) fn instance_name(&self, def: InstanceDef, trimmed: bool) -> Symbol { + self.cx.instance_name(def, trimmed) + } + + /// Return information about the target machine. + pub(crate) fn target_info(&self) -> MachineInfo { + self.cx.target_info() + } + + /// Get an instance ABI. + pub(crate) fn instance_abi(&self, def: InstanceDef) -> Result { + self.cx.instance_abi(def) + } + + /// Get the ABI of a function pointer. + pub(crate) fn fn_ptr_abi(&self, fn_ptr: PolyFnSig) -> Result { + self.cx.fn_ptr_abi(fn_ptr) + } + + /// Get the layout of a type. + pub(crate) fn ty_layout(&self, ty: Ty) -> Result { + self.cx.ty_layout(ty) + } + + /// Get the layout shape. + pub(crate) fn layout_shape(&self, id: Layout) -> LayoutShape { + self.cx.layout_shape(id) + } + + /// Get a debug string representation of a place. + pub(crate) fn place_pretty(&self, place: &Place) -> String { + self.cx.place_pretty(place) + } + + /// Get the resulting type of binary operation. + pub(crate) fn binop_ty(&self, bin_op: BinOp, rhs: Ty, lhs: Ty) -> Ty { + self.cx.binop_ty(bin_op, rhs, lhs) + } + + /// Get the resulting type of unary operation. + pub(crate) fn unop_ty(&self, un_op: UnOp, arg: Ty) -> Ty { + self.cx.unop_ty(un_op, arg) + } + + /// Get all associated items of a definition. + pub(crate) fn associated_items(&self, def_id: DefId) -> AssocItems { + self.cx.associated_items(def_id) + } +} + +// A thread local variable that stores a pointer to [`SmirInterface`]. +scoped_tls::scoped_thread_local!(static TLV: Cell<*const ()>); + +pub(crate) fn run<'tcx, T, F>(interface: &SmirInterface<'tcx>, f: F) -> Result +where + F: FnOnce() -> T, +{ + if TLV.is_set() { + Err(Error::from("StableMIR already running")) + } else { + let ptr: *const () = (interface as *const SmirInterface<'tcx>) as *const (); + TLV.set(&Cell::new(ptr), || Ok(f())) + } +} + +/// Execute the given function with access the [`SmirInterface`]. +/// +/// I.e., This function will load the current interface and calls a function with it. +/// Do not nest these, as that will ICE. +pub(crate) fn with(f: impl FnOnce(&SmirInterface<'_>) -> R) -> R { + assert!(TLV.is_set()); + TLV.with(|tlv| { + let ptr = tlv.get(); + assert!(!ptr.is_null()); + f(unsafe { &*(ptr as *const SmirInterface<'_>) }) + }) +} diff --git a/compiler/stable_mir/src/crate_def.rs b/compiler/rustc_smir/src/stable_mir/crate_def.rs similarity index 90% rename from compiler/stable_mir/src/crate_def.rs rename to compiler/rustc_smir/src/stable_mir/crate_def.rs index 2577c281ca4f2..64f7ef9b314ff 100644 --- a/compiler/stable_mir/src/crate_def.rs +++ b/compiler/rustc_smir/src/stable_mir/crate_def.rs @@ -2,9 +2,10 @@ //! such as, a function, a trait, an enum, and any other definitions. use serde::Serialize; +use stable_mir::ty::{GenericArgs, Span, Ty}; +use stable_mir::{AssocItems, Crate, Symbol, with}; -use crate::ty::{GenericArgs, Span, Ty}; -use crate::{Crate, Symbol, with}; +use crate::stable_mir; /// A unique identification number for each item accessible for the current compilation unit. #[derive(Clone, Copy, PartialEq, Eq, Hash, Serialize)] @@ -103,6 +104,14 @@ pub trait CrateDefType: CrateDef { } } +/// A trait for retrieving all items from a definition within a crate. +pub trait CrateDefItems: CrateDef { + /// Retrieve all associated items from a definition. + fn associated_items(&self) -> AssocItems { + with(|cx| cx.associated_items(self.def_id())) + } +} + #[derive(Clone, Debug, PartialEq, Eq)] pub struct Attribute { value: String, @@ -158,3 +167,9 @@ macro_rules! crate_def_with_ty { impl CrateDefType for $name {} }; } + +macro_rules! impl_crate_def_items { + ( $name:ident $(;)? ) => { + impl CrateDefItems for $name {} + }; +} diff --git a/compiler/stable_mir/src/error.rs b/compiler/rustc_smir/src/stable_mir/error.rs similarity index 100% rename from compiler/stable_mir/src/error.rs rename to compiler/rustc_smir/src/stable_mir/error.rs diff --git a/compiler/stable_mir/src/mir.rs b/compiler/rustc_smir/src/stable_mir/mir.rs similarity index 100% rename from compiler/stable_mir/src/mir.rs rename to compiler/rustc_smir/src/stable_mir/mir.rs diff --git a/compiler/stable_mir/src/mir/alloc.rs b/compiler/rustc_smir/src/stable_mir/mir/alloc.rs similarity index 92% rename from compiler/stable_mir/src/mir/alloc.rs rename to compiler/rustc_smir/src/stable_mir/mir/alloc.rs index 023807b76aec6..782f52888b73d 100644 --- a/compiler/stable_mir/src/mir/alloc.rs +++ b/compiler/rustc_smir/src/stable_mir/mir/alloc.rs @@ -3,11 +3,12 @@ use std::io::Read; use serde::Serialize; +use stable_mir::mir::mono::{Instance, StaticDef}; +use stable_mir::target::{Endian, MachineInfo}; +use stable_mir::ty::{Allocation, Binder, ExistentialTraitRef, IndexedVal, Ty}; +use stable_mir::{Error, with}; -use crate::mir::mono::{Instance, StaticDef}; -use crate::target::{Endian, MachineInfo}; -use crate::ty::{Allocation, Binder, ExistentialTraitRef, IndexedVal, Ty}; -use crate::{Error, with}; +use crate::stable_mir; /// An allocation in the SMIR global memory can be either a function pointer, /// a static, or a "real" allocation with some data in it. diff --git a/compiler/rustc_smir/src/stable_mir/mir/body.rs b/compiler/rustc_smir/src/stable_mir/mir/body.rs new file mode 100644 index 0000000000000..660cd7db0800d --- /dev/null +++ b/compiler/rustc_smir/src/stable_mir/mir/body.rs @@ -0,0 +1,1126 @@ +use std::io; + +use serde::Serialize; +use stable_mir::compiler_interface::with; +use stable_mir::mir::pretty::function_body; +use stable_mir::ty::{ + AdtDef, ClosureDef, CoroutineClosureDef, CoroutineDef, GenericArgs, MirConst, Movability, + Region, RigidTy, Ty, TyConst, TyKind, VariantIdx, +}; +use stable_mir::{Error, Opaque, Span, Symbol}; + +use crate::stable_mir; + +/// The SMIR representation of a single function. +#[derive(Clone, Debug, Serialize)] +pub struct Body { + pub blocks: Vec, + + /// Declarations of locals within the function. + /// + /// The first local is the return value pointer, followed by `arg_count` + /// locals for the function arguments, followed by any user-declared + /// variables and temporaries. + pub(super) locals: LocalDecls, + + /// The number of arguments this function takes. + pub(super) arg_count: usize, + + /// Debug information pertaining to user variables, including captures. + pub var_debug_info: Vec, + + /// Mark an argument (which must be a tuple) as getting passed as its individual components. + /// + /// This is used for the "rust-call" ABI such as closures. + pub(super) spread_arg: Option, + + /// The span that covers the entire function body. + pub span: Span, +} + +pub type BasicBlockIdx = usize; + +impl Body { + /// Constructs a `Body`. + /// + /// A constructor is required to build a `Body` from outside the crate + /// because the `arg_count` and `locals` fields are private. + pub fn new( + blocks: Vec, + locals: LocalDecls, + arg_count: usize, + var_debug_info: Vec, + spread_arg: Option, + span: Span, + ) -> Self { + // If locals doesn't contain enough entries, it can lead to panics in + // `ret_local`, `arg_locals`, and `inner_locals`. + assert!( + locals.len() > arg_count, + "A Body must contain at least a local for the return value and each of the function's arguments" + ); + Self { blocks, locals, arg_count, var_debug_info, spread_arg, span } + } + + /// Return local that holds this function's return value. + pub fn ret_local(&self) -> &LocalDecl { + &self.locals[RETURN_LOCAL] + } + + /// Locals in `self` that correspond to this function's arguments. + pub fn arg_locals(&self) -> &[LocalDecl] { + &self.locals[1..][..self.arg_count] + } + + /// Inner locals for this function. These are the locals that are + /// neither the return local nor the argument locals. + pub fn inner_locals(&self) -> &[LocalDecl] { + &self.locals[self.arg_count + 1..] + } + + /// Returns a mutable reference to the local that holds this function's return value. + pub(crate) fn ret_local_mut(&mut self) -> &mut LocalDecl { + &mut self.locals[RETURN_LOCAL] + } + + /// Returns a mutable slice of locals corresponding to this function's arguments. + pub(crate) fn arg_locals_mut(&mut self) -> &mut [LocalDecl] { + &mut self.locals[1..][..self.arg_count] + } + + /// Returns a mutable slice of inner locals for this function. + /// Inner locals are those that are neither the return local nor the argument locals. + pub(crate) fn inner_locals_mut(&mut self) -> &mut [LocalDecl] { + &mut self.locals[self.arg_count + 1..] + } + + /// Convenience function to get all the locals in this function. + /// + /// Locals are typically accessed via the more specific methods `ret_local`, + /// `arg_locals`, and `inner_locals`. + pub fn locals(&self) -> &[LocalDecl] { + &self.locals + } + + /// Get the local declaration for this local. + pub fn local_decl(&self, local: Local) -> Option<&LocalDecl> { + self.locals.get(local) + } + + /// Get an iterator for all local declarations. + pub fn local_decls(&self) -> impl Iterator { + self.locals.iter().enumerate() + } + + /// Emit the body using the provided name for the signature. + pub fn dump(&self, w: &mut W, fn_name: &str) -> io::Result<()> { + function_body(w, self, fn_name) + } + + pub fn spread_arg(&self) -> Option { + self.spread_arg + } +} + +type LocalDecls = Vec; + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct LocalDecl { + pub ty: Ty, + pub span: Span, + pub mutability: Mutability, +} + +#[derive(Clone, PartialEq, Eq, Debug, Serialize)] +pub struct BasicBlock { + pub statements: Vec, + pub terminator: Terminator, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct Terminator { + pub kind: TerminatorKind, + pub span: Span, +} + +impl Terminator { + pub fn successors(&self) -> Successors { + self.kind.successors() + } +} + +pub type Successors = Vec; + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum TerminatorKind { + Goto { + target: BasicBlockIdx, + }, + SwitchInt { + discr: Operand, + targets: SwitchTargets, + }, + Resume, + Abort, + Return, + Unreachable, + Drop { + place: Place, + target: BasicBlockIdx, + unwind: UnwindAction, + }, + Call { + func: Operand, + args: Vec, + destination: Place, + target: Option, + unwind: UnwindAction, + }, + Assert { + cond: Operand, + expected: bool, + msg: AssertMessage, + target: BasicBlockIdx, + unwind: UnwindAction, + }, + InlineAsm { + template: String, + operands: Vec, + options: String, + line_spans: String, + destination: Option, + unwind: UnwindAction, + }, +} + +impl TerminatorKind { + pub fn successors(&self) -> Successors { + use self::TerminatorKind::*; + match *self { + Call { target: Some(t), unwind: UnwindAction::Cleanup(u), .. } + | Drop { target: t, unwind: UnwindAction::Cleanup(u), .. } + | Assert { target: t, unwind: UnwindAction::Cleanup(u), .. } + | InlineAsm { destination: Some(t), unwind: UnwindAction::Cleanup(u), .. } => { + vec![t, u] + } + Goto { target: t } + | Call { target: None, unwind: UnwindAction::Cleanup(t), .. } + | Call { target: Some(t), unwind: _, .. } + | Drop { target: t, unwind: _, .. } + | Assert { target: t, unwind: _, .. } + | InlineAsm { destination: None, unwind: UnwindAction::Cleanup(t), .. } + | InlineAsm { destination: Some(t), unwind: _, .. } => { + vec![t] + } + + Return + | Resume + | Abort + | Unreachable + | Call { target: None, unwind: _, .. } + | InlineAsm { destination: None, unwind: _, .. } => { + vec![] + } + SwitchInt { ref targets, .. } => targets.all_targets(), + } + } + + pub fn unwind(&self) -> Option<&UnwindAction> { + match *self { + TerminatorKind::Goto { .. } + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::Resume + | TerminatorKind::Abort + | TerminatorKind::SwitchInt { .. } => None, + TerminatorKind::Call { ref unwind, .. } + | TerminatorKind::Assert { ref unwind, .. } + | TerminatorKind::Drop { ref unwind, .. } + | TerminatorKind::InlineAsm { ref unwind, .. } => Some(unwind), + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct InlineAsmOperand { + pub in_value: Option, + pub out_place: Option, + // This field has a raw debug representation of MIR's InlineAsmOperand. + // For now we care about place/operand + the rest in a debug format. + pub raw_rpr: String, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum UnwindAction { + Continue, + Unreachable, + Terminate, + Cleanup(BasicBlockIdx), +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum AssertMessage { + BoundsCheck { len: Operand, index: Operand }, + Overflow(BinOp, Operand, Operand), + OverflowNeg(Operand), + DivisionByZero(Operand), + RemainderByZero(Operand), + ResumedAfterReturn(CoroutineKind), + ResumedAfterPanic(CoroutineKind), + ResumedAfterDrop(CoroutineKind), + MisalignedPointerDereference { required: Operand, found: Operand }, + NullPointerDereference, +} + +impl AssertMessage { + pub fn description(&self) -> Result<&'static str, Error> { + match self { + AssertMessage::Overflow(BinOp::Add, _, _) => Ok("attempt to add with overflow"), + AssertMessage::Overflow(BinOp::Sub, _, _) => Ok("attempt to subtract with overflow"), + AssertMessage::Overflow(BinOp::Mul, _, _) => Ok("attempt to multiply with overflow"), + AssertMessage::Overflow(BinOp::Div, _, _) => Ok("attempt to divide with overflow"), + AssertMessage::Overflow(BinOp::Rem, _, _) => { + Ok("attempt to calculate the remainder with overflow") + } + AssertMessage::OverflowNeg(_) => Ok("attempt to negate with overflow"), + AssertMessage::Overflow(BinOp::Shr, _, _) => Ok("attempt to shift right with overflow"), + AssertMessage::Overflow(BinOp::Shl, _, _) => Ok("attempt to shift left with overflow"), + AssertMessage::Overflow(op, _, _) => Err(error!("`{:?}` cannot overflow", op)), + AssertMessage::DivisionByZero(_) => Ok("attempt to divide by zero"), + AssertMessage::RemainderByZero(_) => { + Ok("attempt to calculate the remainder with a divisor of zero") + } + AssertMessage::ResumedAfterReturn(CoroutineKind::Coroutine(_)) => { + Ok("coroutine resumed after completion") + } + AssertMessage::ResumedAfterReturn(CoroutineKind::Desugared( + CoroutineDesugaring::Async, + _, + )) => Ok("`async fn` resumed after completion"), + AssertMessage::ResumedAfterReturn(CoroutineKind::Desugared( + CoroutineDesugaring::Gen, + _, + )) => Ok("`async gen fn` resumed after completion"), + AssertMessage::ResumedAfterReturn(CoroutineKind::Desugared( + CoroutineDesugaring::AsyncGen, + _, + )) => Ok("`gen fn` should just keep returning `AssertMessage::None` after completion"), + AssertMessage::ResumedAfterPanic(CoroutineKind::Coroutine(_)) => { + Ok("coroutine resumed after panicking") + } + AssertMessage::ResumedAfterPanic(CoroutineKind::Desugared( + CoroutineDesugaring::Async, + _, + )) => Ok("`async fn` resumed after panicking"), + AssertMessage::ResumedAfterPanic(CoroutineKind::Desugared( + CoroutineDesugaring::Gen, + _, + )) => Ok("`async gen fn` resumed after panicking"), + AssertMessage::ResumedAfterPanic(CoroutineKind::Desugared( + CoroutineDesugaring::AsyncGen, + _, + )) => Ok("`gen fn` should just keep returning `AssertMessage::None` after panicking"), + + AssertMessage::ResumedAfterDrop(CoroutineKind::Coroutine(_)) => { + Ok("coroutine resumed after async drop") + } + AssertMessage::ResumedAfterDrop(CoroutineKind::Desugared( + CoroutineDesugaring::Async, + _, + )) => Ok("`async fn` resumed after async drop"), + AssertMessage::ResumedAfterDrop(CoroutineKind::Desugared( + CoroutineDesugaring::Gen, + _, + )) => Ok("`async gen fn` resumed after async drop"), + AssertMessage::ResumedAfterDrop(CoroutineKind::Desugared( + CoroutineDesugaring::AsyncGen, + _, + )) => Ok("`gen fn` should just keep returning `AssertMessage::None` after async drop"), + + AssertMessage::BoundsCheck { .. } => Ok("index out of bounds"), + AssertMessage::MisalignedPointerDereference { .. } => { + Ok("misaligned pointer dereference") + } + AssertMessage::NullPointerDereference => Ok("null pointer dereference occurred"), + } + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum BinOp { + Add, + AddUnchecked, + Sub, + SubUnchecked, + Mul, + MulUnchecked, + Div, + Rem, + BitXor, + BitAnd, + BitOr, + Shl, + ShlUnchecked, + Shr, + ShrUnchecked, + Eq, + Lt, + Le, + Ne, + Ge, + Gt, + Cmp, + Offset, +} + +impl BinOp { + /// Return the type of this operation for the given input Ty. + /// This function does not perform type checking, and it currently doesn't handle SIMD. + pub fn ty(&self, lhs_ty: Ty, rhs_ty: Ty) -> Ty { + with(|ctx| ctx.binop_ty(*self, lhs_ty, rhs_ty)) + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum UnOp { + Not, + Neg, + PtrMetadata, +} + +impl UnOp { + /// Return the type of this operation for the given input Ty. + /// This function does not perform type checking, and it currently doesn't handle SIMD. + pub fn ty(&self, arg_ty: Ty) -> Ty { + with(|ctx| ctx.unop_ty(*self, arg_ty)) + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum CoroutineKind { + Desugared(CoroutineDesugaring, CoroutineSource), + Coroutine(Movability), +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum CoroutineSource { + Block, + Closure, + Fn, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum CoroutineDesugaring { + Async, + + Gen, + + AsyncGen, +} + +pub(crate) type LocalDefId = Opaque; +/// The rustc coverage data structures are heavily tied to internal details of the +/// coverage implementation that are likely to change, and are unlikely to be +/// useful to third-party tools for the foreseeable future. +pub(crate) type Coverage = Opaque; + +/// The FakeReadCause describes the type of pattern why a FakeRead statement exists. +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum FakeReadCause { + ForMatchGuard, + ForMatchedPlace(LocalDefId), + ForGuardBinding, + ForLet(LocalDefId), + ForIndex, +} + +/// Describes what kind of retag is to be performed +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize)] +pub enum RetagKind { + FnEntry, + TwoPhase, + Raw, + Default, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize)] +pub enum Variance { + Covariant, + Invariant, + Contravariant, + Bivariant, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct CopyNonOverlapping { + pub src: Operand, + pub dst: Operand, + pub count: Operand, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum NonDivergingIntrinsic { + Assume(Operand), + CopyNonOverlapping(CopyNonOverlapping), +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct Statement { + pub kind: StatementKind, + pub span: Span, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum StatementKind { + Assign(Place, Rvalue), + FakeRead(FakeReadCause, Place), + SetDiscriminant { place: Place, variant_index: VariantIdx }, + Deinit(Place), + StorageLive(Local), + StorageDead(Local), + Retag(RetagKind, Place), + PlaceMention(Place), + AscribeUserType { place: Place, projections: UserTypeProjection, variance: Variance }, + Coverage(Coverage), + Intrinsic(NonDivergingIntrinsic), + ConstEvalCounter, + Nop, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum Rvalue { + /// Creates a pointer with the indicated mutability to the place. + /// + /// This is generated by pointer casts like `&v as *const _` or raw address of expressions like + /// `&raw v` or `addr_of!(v)`. + AddressOf(RawPtrKind, Place), + + /// Creates an aggregate value, like a tuple or struct. + /// + /// This is needed because dataflow analysis needs to distinguish + /// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo` + /// has a destructor. + /// + /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After + /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too. + Aggregate(AggregateKind, Vec), + + /// * `Offset` has the same semantics as `<*const T>::offset`, except that the second + /// parameter may be a `usize` as well. + /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats, + /// raw pointers, or function pointers and return a `bool`. The types of the operands must be + /// matching, up to the usual caveat of the lifetimes in function pointers. + /// * Left and right shift operations accept signed or unsigned integers not necessarily of the + /// same type and return a value of the same type as their LHS. Like in Rust, the RHS is + /// truncated as needed. + /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching + /// types and return a value of that type. + /// * The remaining operations accept signed integers, unsigned integers, or floats with + /// matching types and return a value of that type. + BinaryOp(BinOp, Operand, Operand), + + /// Performs essentially all of the casts that can be performed via `as`. + /// + /// This allows for casts from/to a variety of types. + Cast(CastKind, Operand, Ty), + + /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition. + /// + /// For addition, subtraction, and multiplication on integers the error condition is set when + /// the infinite precision result would not be equal to the actual result. + CheckedBinaryOp(BinOp, Operand, Operand), + + /// A CopyForDeref is equivalent to a read from a place. + /// When such a read happens, it is guaranteed that the only use of the returned value is a + /// deref operation, immediately followed by one or more projections. + CopyForDeref(Place), + + /// Computes the discriminant of the place, returning it as an integer. + /// Returns zero for types without discriminant. + /// + /// The validity requirements for the underlying value are undecided for this rvalue, see + /// [#91095]. Note too that the value of the discriminant is not the same thing as the + /// variant index; + /// + /// [#91095]: https://github.com/rust-lang/rust/issues/91095 + Discriminant(Place), + + /// Yields the length of the place, as a `usize`. + /// + /// If the type of the place is an array, this is the array length. For slices (`[T]`, not + /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is + /// ill-formed for places of other types. + Len(Place), + + /// Creates a reference to the place. + Ref(Region, BorrowKind, Place), + + /// Creates an array where each element is the value of the operand. + /// + /// This is the cause of a bug in the case where the repetition count is zero because the value + /// is not dropped, see [#74836]. + /// + /// Corresponds to source code like `[x; 32]`. + /// + /// [#74836]: https://github.com/rust-lang/rust/issues/74836 + Repeat(Operand, TyConst), + + /// Transmutes a `*mut u8` into shallow-initialized `Box`. + /// + /// This is different from a normal transmute because dataflow analysis will treat the box as + /// initialized but its content as uninitialized. Like other pointer casts, this in general + /// affects alias analysis. + ShallowInitBox(Operand, Ty), + + /// Creates a pointer/reference to the given thread local. + /// + /// The yielded type is a `*mut T` if the static is mutable, otherwise if the static is extern a + /// `*const T`, and if neither of those apply a `&T`. + /// + /// **Note:** This is a runtime operation that actually executes code and is in this sense more + /// like a function call. Also, eliminating dead stores of this rvalue causes `fn main() {}` to + /// SIGILL for some reason that I (JakobDegen) never got a chance to look into. + /// + /// **Needs clarification**: Are there weird additional semantics here related to the runtime + /// nature of this operation? + ThreadLocalRef(stable_mir::CrateItem), + + /// Computes a value as described by the operation. + NullaryOp(NullOp, Ty), + + /// Exactly like `BinaryOp`, but less operands. + /// + /// Also does two's-complement arithmetic. Negation requires a signed integer or a float; + /// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds + /// return a value with the same type as their operand. + UnaryOp(UnOp, Operand), + + /// Yields the operand unchanged + Use(Operand), +} + +impl Rvalue { + pub fn ty(&self, locals: &[LocalDecl]) -> Result { + match self { + Rvalue::Use(operand) => operand.ty(locals), + Rvalue::Repeat(operand, count) => { + Ok(Ty::new_array_with_const_len(operand.ty(locals)?, count.clone())) + } + Rvalue::ThreadLocalRef(did) => Ok(did.ty()), + Rvalue::Ref(reg, bk, place) => { + let place_ty = place.ty(locals)?; + Ok(Ty::new_ref(reg.clone(), place_ty, bk.to_mutable_lossy())) + } + Rvalue::AddressOf(mutability, place) => { + let place_ty = place.ty(locals)?; + Ok(Ty::new_ptr(place_ty, mutability.to_mutable_lossy())) + } + Rvalue::Len(..) => Ok(Ty::usize_ty()), + Rvalue::Cast(.., ty) => Ok(*ty), + Rvalue::BinaryOp(op, lhs, rhs) => { + let lhs_ty = lhs.ty(locals)?; + let rhs_ty = rhs.ty(locals)?; + Ok(op.ty(lhs_ty, rhs_ty)) + } + Rvalue::CheckedBinaryOp(op, lhs, rhs) => { + let lhs_ty = lhs.ty(locals)?; + let rhs_ty = rhs.ty(locals)?; + let ty = op.ty(lhs_ty, rhs_ty); + Ok(Ty::new_tuple(&[ty, Ty::bool_ty()])) + } + Rvalue::UnaryOp(op, operand) => { + let arg_ty = operand.ty(locals)?; + Ok(op.ty(arg_ty)) + } + Rvalue::Discriminant(place) => { + let place_ty = place.ty(locals)?; + place_ty + .kind() + .discriminant_ty() + .ok_or_else(|| error!("Expected a `RigidTy` but found: {place_ty:?}")) + } + Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(..), _) => { + Ok(Ty::usize_ty()) + } + Rvalue::NullaryOp(NullOp::ContractChecks, _) + | Rvalue::NullaryOp(NullOp::UbChecks, _) => Ok(Ty::bool_ty()), + Rvalue::Aggregate(ak, ops) => match *ak { + AggregateKind::Array(ty) => Ty::try_new_array(ty, ops.len() as u64), + AggregateKind::Tuple => Ok(Ty::new_tuple( + &ops.iter().map(|op| op.ty(locals)).collect::, _>>()?, + )), + AggregateKind::Adt(def, _, ref args, _, _) => Ok(def.ty_with_args(args)), + AggregateKind::Closure(def, ref args) => Ok(Ty::new_closure(def, args.clone())), + AggregateKind::Coroutine(def, ref args, mov) => { + Ok(Ty::new_coroutine(def, args.clone(), mov)) + } + AggregateKind::CoroutineClosure(def, ref args) => { + Ok(Ty::new_coroutine_closure(def, args.clone())) + } + AggregateKind::RawPtr(ty, mutability) => Ok(Ty::new_ptr(ty, mutability)), + }, + Rvalue::ShallowInitBox(_, ty) => Ok(Ty::new_box(*ty)), + Rvalue::CopyForDeref(place) => place.ty(locals), + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum AggregateKind { + Array(Ty), + Tuple, + Adt(AdtDef, VariantIdx, GenericArgs, Option, Option), + Closure(ClosureDef, GenericArgs), + // FIXME(stable_mir): Movability here is redundant + Coroutine(CoroutineDef, GenericArgs, Movability), + CoroutineClosure(CoroutineClosureDef, GenericArgs), + RawPtr(Ty, Mutability), +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum Operand { + Copy(Place), + Move(Place), + Constant(ConstOperand), +} + +#[derive(Clone, Eq, PartialEq, Serialize)] +pub struct Place { + pub local: Local, + /// projection out of a place (access a field, deref a pointer, etc) + pub projection: Vec, +} + +impl From for Place { + fn from(local: Local) -> Self { + Place { local, projection: vec![] } + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct ConstOperand { + pub span: Span, + pub user_ty: Option, + pub const_: MirConst, +} + +/// Debug information pertaining to a user variable. +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct VarDebugInfo { + /// The variable name. + pub name: Symbol, + + /// Source info of the user variable, including the scope + /// within which the variable is visible (to debuginfo). + pub source_info: SourceInfo, + + /// The user variable's data is split across several fragments, + /// each described by a `VarDebugInfoFragment`. + pub composite: Option, + + /// Where the data for this user variable is to be found. + pub value: VarDebugInfoContents, + + /// When present, indicates what argument number this variable is in the function that it + /// originated from (starting from 1). Note, if MIR inlining is enabled, then this is the + /// argument number in the original function before it was inlined. + pub argument_index: Option, +} + +impl VarDebugInfo { + /// Return a local variable if this info is related to one. + pub fn local(&self) -> Option { + match &self.value { + VarDebugInfoContents::Place(place) if place.projection.is_empty() => Some(place.local), + VarDebugInfoContents::Place(_) | VarDebugInfoContents::Const(_) => None, + } + } + + /// Return a constant if this info is related to one. + pub fn constant(&self) -> Option<&ConstOperand> { + match &self.value { + VarDebugInfoContents::Place(_) => None, + VarDebugInfoContents::Const(const_op) => Some(const_op), + } + } +} + +pub type SourceScope = u32; + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct SourceInfo { + pub span: Span, + pub scope: SourceScope, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct VarDebugInfoFragment { + pub ty: Ty, + pub projection: Vec, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum VarDebugInfoContents { + Place(Place), + Const(ConstOperand), +} + +// In MIR ProjectionElem is parameterized on the second Field argument and the Index argument. This +// is so it can be used for both Places (for which the projection elements are of type +// ProjectionElem) and user-provided type annotations (for which the projection elements +// are of type ProjectionElem<(), ()>). In SMIR we don't need this generality, so we just use +// ProjectionElem for Places. +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum ProjectionElem { + /// Dereference projections (e.g. `*_1`) project to the address referenced by the base place. + Deref, + + /// A field projection (e.g., `f` in `_1.f`) project to a field in the base place. The field is + /// referenced by source-order index rather than the name of the field. The fields type is also + /// given. + Field(FieldIdx, Ty), + + /// Index into a slice/array. The value of the index is computed at runtime using the `V` + /// argument. + /// + /// Note that this does not also dereference, and so it does not exactly correspond to slice + /// indexing in Rust. In other words, in the below Rust code: + /// + /// ```rust + /// let x = &[1, 2, 3, 4]; + /// let i = 2; + /// x[i]; + /// ``` + /// + /// The `x[i]` is turned into a `Deref` followed by an `Index`, not just an `Index`. The same + /// thing is true of the `ConstantIndex` and `Subslice` projections below. + Index(Local), + + /// Index into a slice/array given by offsets. + /// + /// These indices are generated by slice patterns. Easiest to explain by example: + /// + /// ```ignore (illustrative) + /// [X, _, .._, _, _] => { offset: 0, min_length: 4, from_end: false }, + /// [_, X, .._, _, _] => { offset: 1, min_length: 4, from_end: false }, + /// [_, _, .._, X, _] => { offset: 2, min_length: 4, from_end: true }, + /// [_, _, .._, _, X] => { offset: 1, min_length: 4, from_end: true }, + /// ``` + ConstantIndex { + /// index or -index (in Python terms), depending on from_end + offset: u64, + /// The thing being indexed must be at least this long -- otherwise, the + /// projection is UB. + /// + /// For arrays this is always the exact length. + min_length: u64, + /// Counting backwards from end? This is always false when indexing an + /// array. + from_end: bool, + }, + + /// Projects a slice from the base place. + /// + /// These indices are generated by slice patterns. If `from_end` is true, this represents + /// `slice[from..slice.len() - to]`. Otherwise it represents `array[from..to]`. + Subslice { + from: u64, + to: u64, + /// Whether `to` counts from the start or end of the array/slice. + from_end: bool, + }, + + /// "Downcast" to a variant of an enum or a coroutine. + Downcast(VariantIdx), + + /// Like an explicit cast from an opaque type to a concrete type, but without + /// requiring an intermediate variable. + OpaqueCast(Ty), + + /// A `Subtype(T)` projection is applied to any `StatementKind::Assign` where + /// type of lvalue doesn't match the type of rvalue, the primary goal is making subtyping + /// explicit during optimizations and codegen. + /// + /// This projection doesn't impact the runtime behavior of the program except for potentially changing + /// some type metadata of the interpreter or codegen backend. + Subtype(Ty), +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct UserTypeProjection { + pub base: UserTypeAnnotationIndex, + + pub projection: Opaque, +} + +pub type Local = usize; + +pub const RETURN_LOCAL: Local = 0; + +/// The source-order index of a field in a variant. +/// +/// For example, in the following types, +/// ```ignore(illustrative) +/// enum Demo1 { +/// Variant0 { a: bool, b: i32 }, +/// Variant1 { c: u8, d: u64 }, +/// } +/// struct Demo2 { e: u8, f: u16, g: u8 } +/// ``` +/// `a`'s `FieldIdx` is `0`, +/// `b`'s `FieldIdx` is `1`, +/// `c`'s `FieldIdx` is `0`, and +/// `g`'s `FieldIdx` is `2`. +pub type FieldIdx = usize; + +type UserTypeAnnotationIndex = usize; + +/// The possible branch sites of a [TerminatorKind::SwitchInt]. +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct SwitchTargets { + /// The conditional branches where the first element represents the value that guards this + /// branch, and the second element is the branch target. + branches: Vec<(u128, BasicBlockIdx)>, + /// The `otherwise` branch which will be taken in case none of the conditional branches are + /// satisfied. + otherwise: BasicBlockIdx, +} + +impl SwitchTargets { + /// All possible targets including the `otherwise` target. + pub fn all_targets(&self) -> Successors { + self.branches.iter().map(|(_, target)| *target).chain(Some(self.otherwise)).collect() + } + + /// The `otherwise` branch target. + pub fn otherwise(&self) -> BasicBlockIdx { + self.otherwise + } + + /// The conditional targets which are only taken if the pattern matches the given value. + pub fn branches(&self) -> impl Iterator { + self.branches.iter().copied() + } + + /// The number of targets including `otherwise`. + pub fn len(&self) -> usize { + self.branches.len() + 1 + } + + /// Create a new SwitchTargets from the given branches and `otherwise` target. + pub fn new(branches: Vec<(u128, BasicBlockIdx)>, otherwise: BasicBlockIdx) -> SwitchTargets { + SwitchTargets { branches, otherwise } + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum BorrowKind { + /// Data must be immutable and is aliasable. + Shared, + + /// An immutable, aliasable borrow that is discarded after borrow-checking. Can behave either + /// like a normal shared borrow or like a special shallow borrow (see [`FakeBorrowKind`]). + Fake(FakeBorrowKind), + + /// Data is mutable and not aliasable. + Mut { + /// `true` if this borrow arose from method-call auto-ref + kind: MutBorrowKind, + }, +} + +impl BorrowKind { + pub fn to_mutable_lossy(self) -> Mutability { + match self { + BorrowKind::Mut { .. } => Mutability::Mut, + BorrowKind::Shared => Mutability::Not, + // FIXME: There's no type corresponding to a shallow borrow, so use `&` as an approximation. + BorrowKind::Fake(_) => Mutability::Not, + } + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum RawPtrKind { + Mut, + Const, + FakeForPtrMetadata, +} + +impl RawPtrKind { + pub fn to_mutable_lossy(self) -> Mutability { + match self { + RawPtrKind::Mut { .. } => Mutability::Mut, + RawPtrKind::Const => Mutability::Not, + // FIXME: There's no type corresponding to a shallow borrow, so use `&` as an approximation. + RawPtrKind::FakeForPtrMetadata => Mutability::Not, + } + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum MutBorrowKind { + Default, + TwoPhaseBorrow, + ClosureCapture, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum FakeBorrowKind { + /// A shared (deep) borrow. Data must be immutable and is aliasable. + Deep, + /// The immediately borrowed place must be immutable, but projections from + /// it don't need to be. This is used to prevent match guards from replacing + /// the scrutinee. For example, a fake borrow of `a.b` doesn't + /// conflict with a mutable borrow of `a.b.c`. + Shallow, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize)] +pub enum Mutability { + Not, + Mut, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum Safety { + Safe, + Unsafe, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum PointerCoercion { + /// Go from a fn-item type to a fn-pointer type. + ReifyFnPointer, + + /// Go from a safe fn pointer to an unsafe fn pointer. + UnsafeFnPointer, + + /// Go from a non-capturing closure to a fn pointer or an unsafe fn pointer. + /// It cannot convert a closure that requires unsafe. + ClosureFnPointer(Safety), + + /// Go from a mut raw pointer to a const raw pointer. + MutToConstPointer, + + /// Go from `*const [T; N]` to `*const T` + ArrayToPointer, + + /// Unsize a pointer/reference value, e.g., `&[T; n]` to + /// `&[T]`. Note that the source could be a thin or wide pointer. + /// This will do things like convert thin pointers to wide + /// pointers, or convert structs containing thin pointers to + /// structs containing wide pointers, or convert between wide + /// pointers. + Unsize, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum CastKind { + // FIXME(smir-rename): rename this to PointerExposeProvenance + PointerExposeAddress, + PointerWithExposedProvenance, + PointerCoercion(PointerCoercion), + // FIXME(smir-rename): change this to PointerCoercion(DynStar) + DynStar, + IntToInt, + FloatToInt, + FloatToFloat, + IntToFloat, + PtrToPtr, + FnPtrToPtr, + Transmute, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum NullOp { + /// Returns the size of a value of that type. + SizeOf, + /// Returns the minimum alignment of a type. + AlignOf, + /// Returns the offset of a field. + OffsetOf(Vec<(VariantIdx, FieldIdx)>), + /// cfg!(ub_checks), but at codegen time + UbChecks, + /// cfg!(contract_checks), but at codegen time + ContractChecks, +} + +impl Operand { + /// Get the type of an operand relative to the local declaration. + /// + /// In order to retrieve the correct type, the `locals` argument must match the list of all + /// locals from the function body where this operand originates from. + /// + /// Errors indicate a malformed operand or incompatible locals list. + pub fn ty(&self, locals: &[LocalDecl]) -> Result { + match self { + Operand::Copy(place) | Operand::Move(place) => place.ty(locals), + Operand::Constant(c) => Ok(c.ty()), + } + } +} + +impl ConstOperand { + pub fn ty(&self) -> Ty { + self.const_.ty() + } +} + +impl Place { + /// Resolve down the chain of projections to get the type referenced at the end of it. + /// E.g.: + /// Calling `ty()` on `var.field` should return the type of `field`. + /// + /// In order to retrieve the correct type, the `locals` argument must match the list of all + /// locals from the function body where this place originates from. + pub fn ty(&self, locals: &[LocalDecl]) -> Result { + self.projection.iter().try_fold(locals[self.local].ty, |place_ty, elem| elem.ty(place_ty)) + } +} + +impl ProjectionElem { + /// Get the expected type after applying this projection to a given place type. + pub fn ty(&self, place_ty: Ty) -> Result { + let ty = place_ty; + match &self { + ProjectionElem::Deref => Self::deref_ty(ty), + ProjectionElem::Field(_idx, fty) => Ok(*fty), + ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => Self::index_ty(ty), + ProjectionElem::Subslice { from, to, from_end } => { + Self::subslice_ty(ty, *from, *to, *from_end) + } + ProjectionElem::Downcast(_) => Ok(ty), + ProjectionElem::OpaqueCast(ty) | ProjectionElem::Subtype(ty) => Ok(*ty), + } + } + + fn index_ty(ty: Ty) -> Result { + ty.kind().builtin_index().ok_or_else(|| error!("Cannot index non-array type: {ty:?}")) + } + + fn subslice_ty(ty: Ty, from: u64, to: u64, from_end: bool) -> Result { + let ty_kind = ty.kind(); + match ty_kind { + TyKind::RigidTy(RigidTy::Slice(..)) => Ok(ty), + TyKind::RigidTy(RigidTy::Array(inner, _)) if !from_end => Ty::try_new_array( + inner, + to.checked_sub(from).ok_or_else(|| error!("Subslice overflow: {from}..{to}"))?, + ), + TyKind::RigidTy(RigidTy::Array(inner, size)) => { + let size = size.eval_target_usize()?; + let len = size - from - to; + Ty::try_new_array(inner, len) + } + _ => Err(Error(format!("Cannot subslice non-array type: `{ty_kind:?}`"))), + } + } + + fn deref_ty(ty: Ty) -> Result { + let deref_ty = ty + .kind() + .builtin_deref(true) + .ok_or_else(|| error!("Cannot dereference type: {ty:?}"))?; + Ok(deref_ty.ty) + } +} diff --git a/compiler/stable_mir/src/mir/mono.rs b/compiler/rustc_smir/src/stable_mir/mir/mono.rs similarity index 88% rename from compiler/stable_mir/src/mir/mono.rs rename to compiler/rustc_smir/src/stable_mir/mir/mono.rs index 22507a49411f9..f5239cccae1eb 100644 --- a/compiler/stable_mir/src/mir/mono.rs +++ b/compiler/rustc_smir/src/stable_mir/mir/mono.rs @@ -2,12 +2,13 @@ use std::fmt::{Debug, Formatter}; use std::io; use serde::Serialize; +use stable_mir::abi::FnAbi; +use stable_mir::crate_def::CrateDef; +use stable_mir::mir::Body; +use stable_mir::ty::{Allocation, ClosureDef, ClosureKind, FnDef, GenericArgs, IndexedVal, Ty}; +use stable_mir::{CrateItem, DefId, Error, ItemKind, Opaque, Symbol, with}; -use crate::abi::FnAbi; -use crate::crate_def::CrateDef; -use crate::mir::Body; -use crate::ty::{Allocation, ClosureDef, ClosureKind, FnDef, GenericArgs, IndexedVal, Ty}; -use crate::{CrateItem, DefId, Error, ItemKind, Opaque, Symbol, with}; +use crate::stable_mir; #[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)] pub enum MonoItem { @@ -117,11 +118,11 @@ impl Instance { } /// Resolve an instance starting from a function definition and generic arguments. - pub fn resolve(def: FnDef, args: &GenericArgs) -> Result { + pub fn resolve(def: FnDef, args: &GenericArgs) -> Result { with(|context| { - context.resolve_instance(def, args).ok_or_else(|| { - crate::Error::new(format!("Failed to resolve `{def:?}` with `{args:?}`")) - }) + context + .resolve_instance(def, args) + .ok_or_else(|| Error::new(format!("Failed to resolve `{def:?}` with `{args:?}`"))) }) } @@ -131,11 +132,11 @@ impl Instance { } /// Resolve an instance for a given function pointer. - pub fn resolve_for_fn_ptr(def: FnDef, args: &GenericArgs) -> Result { + pub fn resolve_for_fn_ptr(def: FnDef, args: &GenericArgs) -> Result { with(|context| { - context.resolve_for_fn_ptr(def, args).ok_or_else(|| { - crate::Error::new(format!("Failed to resolve `{def:?}` with `{args:?}`")) - }) + context + .resolve_for_fn_ptr(def, args) + .ok_or_else(|| Error::new(format!("Failed to resolve `{def:?}` with `{args:?}`"))) }) } @@ -144,11 +145,11 @@ impl Instance { def: ClosureDef, args: &GenericArgs, kind: ClosureKind, - ) -> Result { + ) -> Result { with(|context| { - context.resolve_closure(def, args, kind).ok_or_else(|| { - crate::Error::new(format!("Failed to resolve `{def:?}` with `{args:?}`")) - }) + context + .resolve_closure(def, args, kind) + .ok_or_else(|| Error::new(format!("Failed to resolve `{def:?}` with `{args:?}`"))) }) } @@ -162,10 +163,7 @@ impl Instance { /// When generating code for a Drop terminator, users can ignore an empty drop glue. /// These shims are only needed to generate a valid Drop call done via VTable. pub fn is_empty_shim(&self) -> bool { - self.kind == InstanceKind::Shim - && with(|cx| { - cx.is_empty_drop_shim(self.def) || cx.is_empty_async_drop_ctor_shim(self.def) - }) + self.kind == InstanceKind::Shim && with(|cx| cx.is_empty_drop_shim(self.def)) } /// Try to constant evaluate the instance into a constant with the given type. @@ -195,7 +193,7 @@ impl Debug for Instance { /// Try to convert a crate item into an instance. /// The item cannot be generic in order to be converted into an instance. impl TryFrom for Instance { - type Error = crate::Error; + type Error = stable_mir::Error; fn try_from(item: CrateItem) -> Result { with(|context| { @@ -212,7 +210,7 @@ impl TryFrom for Instance { /// Try to convert an instance into a crate item. /// Only user defined instances can be converted. impl TryFrom for CrateItem { - type Error = crate::Error; + type Error = stable_mir::Error; fn try_from(value: Instance) -> Result { with(|context| { @@ -259,7 +257,7 @@ crate_def! { } impl TryFrom for StaticDef { - type Error = crate::Error; + type Error = stable_mir::Error; fn try_from(value: CrateItem) -> Result { if matches!(value.kind(), ItemKind::Static) { @@ -271,7 +269,7 @@ impl TryFrom for StaticDef { } impl TryFrom for StaticDef { - type Error = crate::Error; + type Error = stable_mir::Error; fn try_from(value: Instance) -> Result { StaticDef::try_from(CrateItem::try_from(value)?) diff --git a/compiler/rustc_smir/src/stable_mir/mir/pretty.rs b/compiler/rustc_smir/src/stable_mir/mir/pretty.rs new file mode 100644 index 0000000000000..ba20651f993d1 --- /dev/null +++ b/compiler/rustc_smir/src/stable_mir/mir/pretty.rs @@ -0,0 +1,466 @@ +//! Implement methods to pretty print stable MIR body. +use std::fmt::Debug; +use std::io::Write; +use std::{fmt, io, iter}; + +use fmt::{Display, Formatter}; +use stable_mir::mir::{ + Operand, Place, RawPtrKind, Rvalue, StatementKind, UnwindAction, VarDebugInfoContents, +}; +use stable_mir::ty::{AdtKind, AssocKind, IndexedVal, MirConst, Ty, TyConst}; +use stable_mir::{Body, CrateDef, Mutability, with}; + +use super::{AggregateKind, AssertMessage, BinOp, BorrowKind, FakeBorrowKind, TerminatorKind}; +use crate::stable_mir; + +impl Display for Ty { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + with(|ctx| write!(f, "{}", ctx.ty_pretty(*self))) + } +} + +impl Display for AssocKind { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self { + AssocKind::Fn { has_self: true, .. } => write!(f, "method"), + AssocKind::Fn { has_self: false, .. } => write!(f, "associated function"), + AssocKind::Const { .. } => write!(f, "associated const"), + AssocKind::Type { .. } => write!(f, "associated type"), + } + } +} + +impl Debug for Place { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + with(|ctx| write!(f, "{}", ctx.place_pretty(self))) + } +} + +pub(crate) fn function_body(writer: &mut W, body: &Body, name: &str) -> io::Result<()> { + write!(writer, "fn {name}(")?; + let mut sep = ""; + for (index, local) in body.arg_locals().iter().enumerate() { + write!(writer, "{}_{}: {}", sep, index + 1, local.ty)?; + sep = ", "; + } + write!(writer, ")")?; + + let return_local = body.ret_local(); + writeln!(writer, " -> {} {{", return_local.ty)?; + + body.locals().iter().enumerate().try_for_each(|(index, local)| -> io::Result<()> { + if index == 0 || index > body.arg_count { + writeln!(writer, " let {}_{}: {};", pretty_mut(local.mutability), index, local.ty) + } else { + Ok(()) + } + })?; + + body.var_debug_info.iter().try_for_each(|info| { + let content = match &info.value { + VarDebugInfoContents::Place(place) => { + format!("{place:?}") + } + VarDebugInfoContents::Const(constant) => pretty_mir_const(&constant.const_), + }; + writeln!(writer, " debug {} => {};", info.name, content) + })?; + + body.blocks + .iter() + .enumerate() + .map(|(index, block)| -> io::Result<()> { + writeln!(writer, " bb{index}: {{")?; + let _ = block + .statements + .iter() + .map(|statement| -> io::Result<()> { + pretty_statement(writer, &statement.kind)?; + Ok(()) + }) + .collect::>(); + pretty_terminator(writer, &block.terminator.kind)?; + writeln!(writer, " }}").unwrap(); + Ok(()) + }) + .collect::, _>>()?; + writeln!(writer, "}}")?; + Ok(()) +} + +fn pretty_statement(writer: &mut W, statement: &StatementKind) -> io::Result<()> { + const INDENT: &str = " "; + match statement { + StatementKind::Assign(place, rval) => { + write!(writer, "{INDENT}{place:?} = ")?; + pretty_rvalue(writer, rval)?; + writeln!(writer, ";") + } + // FIXME: Add rest of the statements + StatementKind::FakeRead(cause, place) => { + writeln!(writer, "{INDENT}FakeRead({cause:?}, {place:?});") + } + StatementKind::SetDiscriminant { place, variant_index } => { + writeln!(writer, "{INDENT}discriminant({place:?} = {};", variant_index.to_index()) + } + StatementKind::Deinit(place) => writeln!(writer, "Deinit({place:?};"), + StatementKind::StorageLive(local) => { + writeln!(writer, "{INDENT}StorageLive(_{local});") + } + StatementKind::StorageDead(local) => { + writeln!(writer, "{INDENT}StorageDead(_{local});") + } + StatementKind::Retag(kind, place) => writeln!(writer, "Retag({kind:?}, {place:?});"), + StatementKind::PlaceMention(place) => { + writeln!(writer, "{INDENT}PlaceMention({place:?};") + } + StatementKind::ConstEvalCounter => { + writeln!(writer, "{INDENT}ConstEvalCounter;") + } + StatementKind::Nop => writeln!(writer, "{INDENT}nop;"), + StatementKind::AscribeUserType { .. } + | StatementKind::Coverage(_) + | StatementKind::Intrinsic(_) => { + // FIX-ME: Make them pretty. + writeln!(writer, "{INDENT}{statement:?};") + } + } +} + +fn pretty_terminator(writer: &mut W, terminator: &TerminatorKind) -> io::Result<()> { + pretty_terminator_head(writer, terminator)?; + let successors = terminator.successors(); + let successor_count = successors.len(); + let labels = pretty_successor_labels(terminator); + + let show_unwind = !matches!(terminator.unwind(), None | Some(UnwindAction::Cleanup(_))); + let fmt_unwind = |w: &mut W| -> io::Result<()> { + write!(w, "unwind ")?; + match terminator.unwind() { + None | Some(UnwindAction::Cleanup(_)) => unreachable!(), + Some(UnwindAction::Continue) => write!(w, "continue"), + Some(UnwindAction::Unreachable) => write!(w, "unreachable"), + Some(UnwindAction::Terminate) => write!(w, "terminate"), + } + }; + + match (successor_count, show_unwind) { + (0, false) => {} + (0, true) => { + write!(writer, " -> ")?; + fmt_unwind(writer)?; + } + (1, false) => write!(writer, " -> bb{:?}", successors[0])?, + _ => { + write!(writer, " -> [")?; + for (i, target) in successors.iter().enumerate() { + if i > 0 { + write!(writer, ", ")?; + } + write!(writer, "{}: bb{:?}", labels[i], target)?; + } + if show_unwind { + write!(writer, ", ")?; + fmt_unwind(writer)?; + } + write!(writer, "]")?; + } + }; + + writeln!(writer, ";") +} + +fn pretty_terminator_head(writer: &mut W, terminator: &TerminatorKind) -> io::Result<()> { + use self::TerminatorKind::*; + const INDENT: &str = " "; + match terminator { + Goto { .. } => write!(writer, "{INDENT}goto"), + SwitchInt { discr, .. } => { + write!(writer, "{INDENT}switchInt({})", pretty_operand(discr)) + } + Resume => write!(writer, "{INDENT}resume"), + Abort => write!(writer, "{INDENT}abort"), + Return => write!(writer, "{INDENT}return"), + Unreachable => write!(writer, "{INDENT}unreachable"), + Drop { place, .. } => write!(writer, "{INDENT}drop({place:?})"), + Call { func, args, destination, .. } => { + write!(writer, "{INDENT}{:?} = {}(", destination, pretty_operand(func))?; + let mut args_iter = args.iter(); + args_iter.next().map_or(Ok(()), |arg| write!(writer, "{}", pretty_operand(arg)))?; + args_iter.try_for_each(|arg| write!(writer, ", {}", pretty_operand(arg)))?; + write!(writer, ")") + } + Assert { cond, expected, msg, target: _, unwind: _ } => { + write!(writer, "{INDENT}assert(")?; + if !expected { + write!(writer, "!")?; + } + write!(writer, "{}, ", pretty_operand(cond))?; + pretty_assert_message(writer, msg)?; + write!(writer, ")") + } + InlineAsm { .. } => write!(writer, "{INDENT}InlineAsm"), + } +} + +fn pretty_successor_labels(terminator: &TerminatorKind) -> Vec { + use self::TerminatorKind::*; + match terminator { + Call { target: None, unwind: UnwindAction::Cleanup(_), .. } + | InlineAsm { destination: None, .. } => vec!["unwind".into()], + Resume | Abort | Return | Unreachable | Call { target: None, unwind: _, .. } => vec![], + Goto { .. } => vec!["".to_string()], + SwitchInt { targets, .. } => targets + .branches() + .map(|(val, _target)| format!("{val}")) + .chain(iter::once("otherwise".into())) + .collect(), + Drop { unwind: UnwindAction::Cleanup(_), .. } => vec!["return".into(), "unwind".into()], + Call { target: Some(_), unwind: UnwindAction::Cleanup(_), .. } => { + vec!["return".into(), "unwind".into()] + } + Drop { unwind: _, .. } | Call { target: Some(_), unwind: _, .. } => vec!["return".into()], + Assert { unwind: UnwindAction::Cleanup(_), .. } => { + vec!["success".into(), "unwind".into()] + } + Assert { unwind: _, .. } => vec!["success".into()], + InlineAsm { destination: Some(_), .. } => vec!["goto".into(), "unwind".into()], + } +} + +fn pretty_assert_message(writer: &mut W, msg: &AssertMessage) -> io::Result<()> { + match msg { + AssertMessage::BoundsCheck { len, index } => { + let pretty_len = pretty_operand(len); + let pretty_index = pretty_operand(index); + write!( + writer, + "\"index out of bounds: the length is {{}} but the index is {{}}\", {pretty_len}, {pretty_index}" + ) + } + AssertMessage::Overflow(BinOp::Add, l, r) => { + let pretty_l = pretty_operand(l); + let pretty_r = pretty_operand(r); + write!( + writer, + "\"attempt to compute `{{}} + {{}}`, which would overflow\", {pretty_l}, {pretty_r}" + ) + } + AssertMessage::Overflow(BinOp::Sub, l, r) => { + let pretty_l = pretty_operand(l); + let pretty_r = pretty_operand(r); + write!( + writer, + "\"attempt to compute `{{}} - {{}}`, which would overflow\", {pretty_l}, {pretty_r}" + ) + } + AssertMessage::Overflow(BinOp::Mul, l, r) => { + let pretty_l = pretty_operand(l); + let pretty_r = pretty_operand(r); + write!( + writer, + "\"attempt to compute `{{}} * {{}}`, which would overflow\", {pretty_l}, {pretty_r}" + ) + } + AssertMessage::Overflow(BinOp::Div, l, r) => { + let pretty_l = pretty_operand(l); + let pretty_r = pretty_operand(r); + write!( + writer, + "\"attempt to compute `{{}} / {{}}`, which would overflow\", {pretty_l}, {pretty_r}" + ) + } + AssertMessage::Overflow(BinOp::Rem, l, r) => { + let pretty_l = pretty_operand(l); + let pretty_r = pretty_operand(r); + write!( + writer, + "\"attempt to compute `{{}} % {{}}`, which would overflow\", {pretty_l}, {pretty_r}" + ) + } + AssertMessage::Overflow(BinOp::Shr, _, r) => { + let pretty_r = pretty_operand(r); + write!(writer, "\"attempt to shift right by `{{}}`, which would overflow\", {pretty_r}") + } + AssertMessage::Overflow(BinOp::Shl, _, r) => { + let pretty_r = pretty_operand(r); + write!(writer, "\"attempt to shift left by `{{}}`, which would overflow\", {pretty_r}") + } + AssertMessage::Overflow(op, _, _) => unreachable!("`{:?}` cannot overflow", op), + AssertMessage::OverflowNeg(op) => { + let pretty_op = pretty_operand(op); + write!(writer, "\"attempt to negate `{{}}`, which would overflow\", {pretty_op}") + } + AssertMessage::DivisionByZero(op) => { + let pretty_op = pretty_operand(op); + write!(writer, "\"attempt to divide `{{}}` by zero\", {pretty_op}") + } + AssertMessage::RemainderByZero(op) => { + let pretty_op = pretty_operand(op); + write!( + writer, + "\"attempt to calculate the remainder of `{{}}` with a divisor of zero\", {pretty_op}" + ) + } + AssertMessage::MisalignedPointerDereference { required, found } => { + let pretty_required = pretty_operand(required); + let pretty_found = pretty_operand(found); + write!( + writer, + "\"misaligned pointer dereference: address must be a multiple of {{}} but is {{}}\",{pretty_required}, {pretty_found}" + ) + } + AssertMessage::NullPointerDereference => { + write!(writer, "\"null pointer dereference occurred\"") + } + AssertMessage::ResumedAfterReturn(_) + | AssertMessage::ResumedAfterPanic(_) + | AssertMessage::ResumedAfterDrop(_) => { + write!(writer, "{}", msg.description().unwrap()) + } + } +} + +fn pretty_operand(operand: &Operand) -> String { + match operand { + Operand::Copy(copy) => { + format!("{copy:?}") + } + Operand::Move(mv) => { + format!("move {mv:?}") + } + Operand::Constant(cnst) => pretty_mir_const(&cnst.const_), + } +} + +fn pretty_mir_const(literal: &MirConst) -> String { + with(|cx| cx.mir_const_pretty(literal)) +} + +fn pretty_ty_const(ct: &TyConst) -> String { + with(|cx| cx.ty_const_pretty(ct.id)) +} + +fn pretty_rvalue(writer: &mut W, rval: &Rvalue) -> io::Result<()> { + match rval { + Rvalue::AddressOf(mutability, place) => { + write!(writer, "&raw {} {:?}", pretty_raw_ptr_kind(*mutability), place) + } + Rvalue::Aggregate(aggregate_kind, operands) => { + // FIXME: Add pretty_aggregate function that returns a pretty string + pretty_aggregate(writer, aggregate_kind, operands) + } + Rvalue::BinaryOp(bin, op1, op2) => { + write!(writer, "{:?}({}, {})", bin, pretty_operand(op1), pretty_operand(op2)) + } + Rvalue::Cast(_, op, ty) => { + write!(writer, "{} as {}", pretty_operand(op), ty) + } + Rvalue::CheckedBinaryOp(bin, op1, op2) => { + write!(writer, "Checked{:?}({}, {})", bin, pretty_operand(op1), pretty_operand(op2)) + } + Rvalue::CopyForDeref(deref) => { + write!(writer, "CopyForDeref({deref:?})") + } + Rvalue::Discriminant(place) => { + write!(writer, "discriminant({place:?})") + } + Rvalue::Len(len) => { + write!(writer, "len({len:?})") + } + Rvalue::Ref(_, borrowkind, place) => { + let kind = match borrowkind { + BorrowKind::Shared => "&", + BorrowKind::Fake(FakeBorrowKind::Deep) => "&fake ", + BorrowKind::Fake(FakeBorrowKind::Shallow) => "&fake shallow ", + BorrowKind::Mut { .. } => "&mut ", + }; + write!(writer, "{kind}{place:?}") + } + Rvalue::Repeat(op, cnst) => { + write!(writer, "[{}; {}]", pretty_operand(op), pretty_ty_const(cnst)) + } + Rvalue::ShallowInitBox(_, _) => Ok(()), + Rvalue::ThreadLocalRef(item) => { + write!(writer, "thread_local_ref{item:?}") + } + Rvalue::NullaryOp(nul, ty) => { + write!(writer, "{nul:?}::<{ty}>() \" \"") + } + Rvalue::UnaryOp(un, op) => { + write!(writer, "{:?}({})", un, pretty_operand(op)) + } + Rvalue::Use(op) => write!(writer, "{}", pretty_operand(op)), + } +} + +fn pretty_aggregate( + writer: &mut W, + aggregate_kind: &AggregateKind, + operands: &Vec, +) -> io::Result<()> { + let suffix = match aggregate_kind { + AggregateKind::Array(_) => { + write!(writer, "[")?; + "]" + } + AggregateKind::Tuple => { + write!(writer, "(")?; + ")" + } + AggregateKind::Adt(def, var, _, _, _) => { + if def.kind() == AdtKind::Enum { + write!(writer, "{}::{}", def.name(), def.variant(*var).unwrap().name())?; + } else { + write!(writer, "{}", def.variant(*var).unwrap().name())?; + } + if operands.is_empty() { + return Ok(()); + } + // FIXME: Change this once we have CtorKind in StableMIR. + write!(writer, "(")?; + ")" + } + AggregateKind::Closure(def, _) => { + write!(writer, "{{closure@{}}}(", def.span().diagnostic())?; + ")" + } + AggregateKind::Coroutine(def, _, _) => { + write!(writer, "{{coroutine@{}}}(", def.span().diagnostic())?; + ")" + } + AggregateKind::CoroutineClosure(def, _) => { + write!(writer, "{{coroutine-closure@{}}}(", def.span().diagnostic())?; + ")" + } + AggregateKind::RawPtr(ty, mutability) => { + write!( + writer, + "*{} {ty} from (", + if *mutability == Mutability::Mut { "mut" } else { "const" } + )?; + ")" + } + }; + let mut separator = ""; + for op in operands { + write!(writer, "{}{}", separator, pretty_operand(op))?; + separator = ", "; + } + write!(writer, "{suffix}") +} + +fn pretty_mut(mutability: Mutability) -> &'static str { + match mutability { + Mutability::Not => " ", + Mutability::Mut => "mut ", + } +} + +fn pretty_raw_ptr_kind(kind: RawPtrKind) -> &'static str { + match kind { + RawPtrKind::Const => "const", + RawPtrKind::Mut => "mut", + RawPtrKind::FakeForPtrMetadata => "const (fake)", + } +} diff --git a/compiler/stable_mir/src/mir/visit.rs b/compiler/rustc_smir/src/stable_mir/mir/visit.rs similarity index 98% rename from compiler/stable_mir/src/mir/visit.rs rename to compiler/rustc_smir/src/stable_mir/mir/visit.rs index 9d2368ba33202..e21dc11eea9ca 100644 --- a/compiler/stable_mir/src/mir/visit.rs +++ b/compiler/rustc_smir/src/stable_mir/mir/visit.rs @@ -35,9 +35,11 @@ //! The only place that `_` is acceptable is to match a field (or //! variant argument) that does not require visiting. -use crate::mir::*; -use crate::ty::{GenericArgs, MirConst, Region, Ty, TyConst}; -use crate::{Error, Opaque, Span}; +use stable_mir::mir::*; +use stable_mir::ty::{GenericArgs, MirConst, Region, Ty, TyConst}; +use stable_mir::{Error, Opaque, Span}; + +use crate::stable_mir; macro_rules! make_mir_visitor { ($visitor_trait_name:ident, $($mutability:ident)?) => { @@ -370,7 +372,8 @@ macro_rules! make_mir_visitor { } AssertMessage::ResumedAfterReturn(_) | AssertMessage::ResumedAfterPanic(_) - | AssertMessage::NullPointerDereference => { + | AssertMessage::NullPointerDereference + | AssertMessage::ResumedAfterDrop(_) => { //nothing to visit } AssertMessage::MisalignedPointerDereference { required, found } => { diff --git a/compiler/rustc_smir/src/stable_mir/mod.rs b/compiler/rustc_smir/src/stable_mir/mod.rs new file mode 100644 index 0000000000000..c59758d4ad3f2 --- /dev/null +++ b/compiler/rustc_smir/src/stable_mir/mod.rs @@ -0,0 +1,239 @@ +//! Module that is temporarily parasitic on the `rustc_smir` crate, +//! +//! This module is designed to resolve circular dependency that would happen +//! if we gradually invert the dependency order between `rustc_smir` and `stable_mir`. +//! +//! Once refactoring is complete, we will migrate it back to the `stable_mir` crate. + +//! The WIP stable interface to rustc internals. +//! +//! For more information see +//! +//! # Note +//! +//! This API is still completely unstable and subject to change. + +// #![doc( +// html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", +// test(attr(allow(unused_variables), deny(warnings))) +// )] +//! +//! This crate shall contain all type definitions and APIs that we expect third-party tools to invoke to +//! interact with the compiler. +//! +//! The goal is to eventually be published on +//! [crates.io](https://crates.io). + +use std::fmt::Debug; +use std::{fmt, io}; + +use serde::Serialize; +use stable_mir::compiler_interface::with; +pub use stable_mir::crate_def::{CrateDef, CrateDefItems, CrateDefType, DefId}; +pub use stable_mir::error::*; +use stable_mir::mir::mono::StaticDef; +use stable_mir::mir::{Body, Mutability}; +use stable_mir::ty::{AssocItem, FnDef, ForeignModuleDef, ImplDef, IndexedVal, Span, TraitDef, Ty}; + +use crate::stable_mir; + +pub mod abi; +#[macro_use] +pub mod crate_def; +pub mod compiler_interface; +#[macro_use] +pub mod error; +pub mod mir; +pub mod target; +pub mod ty; +pub mod visitor; + +/// Use String for now but we should replace it. +pub type Symbol = String; + +/// The number that identifies a crate. +pub type CrateNum = usize; + +impl Debug for DefId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("DefId").field("id", &self.0).field("name", &self.name()).finish() + } +} + +impl IndexedVal for DefId { + fn to_val(index: usize) -> Self { + DefId(index) + } + + fn to_index(&self) -> usize { + self.0 + } +} + +/// A list of crate items. +pub type CrateItems = Vec; + +/// A list of trait decls. +pub type TraitDecls = Vec; + +/// A list of impl trait decls. +pub type ImplTraitDecls = Vec; + +/// A list of associated items. +pub type AssocItems = Vec; + +/// Holds information about a crate. +#[derive(Clone, PartialEq, Eq, Debug, Serialize)] +pub struct Crate { + pub id: CrateNum, + pub name: Symbol, + pub is_local: bool, +} + +impl Crate { + /// The list of foreign modules in this crate. + pub fn foreign_modules(&self) -> Vec { + with(|cx| cx.foreign_modules(self.id)) + } + + /// The list of traits declared in this crate. + pub fn trait_decls(&self) -> TraitDecls { + with(|cx| cx.trait_decls(self.id)) + } + + /// The list of trait implementations in this crate. + pub fn trait_impls(&self) -> ImplTraitDecls { + with(|cx| cx.trait_impls(self.id)) + } + + /// Return a list of function definitions from this crate independent on their visibility. + pub fn fn_defs(&self) -> Vec { + with(|cx| cx.crate_functions(self.id)) + } + + /// Return a list of static items defined in this crate independent on their visibility. + pub fn statics(&self) -> Vec { + with(|cx| cx.crate_statics(self.id)) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize)] +pub enum ItemKind { + Fn, + Static, + Const, + Ctor(CtorKind), +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize)] +pub enum CtorKind { + Const, + Fn, +} + +pub type Filename = String; + +crate_def_with_ty! { + /// Holds information about an item in a crate. + #[derive(Serialize)] + pub CrateItem; +} + +impl CrateItem { + /// This will return the body of an item or panic if it's not available. + pub fn expect_body(&self) -> mir::Body { + with(|cx| cx.mir_body(self.0)) + } + + /// Return the body of an item if available. + pub fn body(&self) -> Option { + with(|cx| cx.has_body(self.0).then(|| cx.mir_body(self.0))) + } + + /// Check if a body is available for this item. + pub fn has_body(&self) -> bool { + with(|cx| cx.has_body(self.0)) + } + + pub fn span(&self) -> Span { + with(|cx| cx.span_of_an_item(self.0)) + } + + pub fn kind(&self) -> ItemKind { + with(|cx| cx.item_kind(*self)) + } + + pub fn requires_monomorphization(&self) -> bool { + with(|cx| cx.requires_monomorphization(self.0)) + } + + pub fn ty(&self) -> Ty { + with(|cx| cx.def_ty(self.0)) + } + + pub fn is_foreign_item(&self) -> bool { + with(|cx| cx.is_foreign_item(self.0)) + } + + /// Emit MIR for this item body. + pub fn emit_mir(&self, w: &mut W) -> io::Result<()> { + self.body() + .ok_or_else(|| io::Error::other(format!("No body found for `{}`", self.name())))? + .dump(w, &self.name()) + } +} + +/// Return the function where execution starts if the current +/// crate defines that. This is usually `main`, but could be +/// `start` if the crate is a no-std crate. +pub fn entry_fn() -> Option { + with(|cx| cx.entry_fn()) +} + +/// Access to the local crate. +pub fn local_crate() -> Crate { + with(|cx| cx.local_crate()) +} + +/// Try to find a crate or crates if multiple crates exist from given name. +pub fn find_crates(name: &str) -> Vec { + with(|cx| cx.find_crates(name)) +} + +/// Try to find a crate with the given name. +pub fn external_crates() -> Vec { + with(|cx| cx.external_crates()) +} + +/// Retrieve all items in the local crate that have a MIR associated with them. +pub fn all_local_items() -> CrateItems { + with(|cx| cx.all_local_items()) +} + +pub fn all_trait_decls() -> TraitDecls { + with(|cx| cx.all_trait_decls()) +} + +pub fn all_trait_impls() -> ImplTraitDecls { + with(|cx| cx.all_trait_impls()) +} + +/// A type that provides internal information but that can still be used for debug purpose. +#[derive(Clone, PartialEq, Eq, Hash, Serialize)] +pub struct Opaque(String); + +impl std::fmt::Display for Opaque { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +impl std::fmt::Debug for Opaque { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +pub fn opaque(value: &T) -> Opaque { + Opaque(format!("{value:?}")) +} diff --git a/compiler/stable_mir/src/target.rs b/compiler/rustc_smir/src/stable_mir/target.rs similarity index 95% rename from compiler/stable_mir/src/target.rs rename to compiler/rustc_smir/src/stable_mir/target.rs index 32c3a2a9122e3..6cf1e9feb015a 100644 --- a/compiler/stable_mir/src/target.rs +++ b/compiler/rustc_smir/src/stable_mir/target.rs @@ -1,8 +1,9 @@ //! Provide information about the machine that this is being compiled into. use serde::Serialize; +use stable_mir::compiler_interface::with; -use crate::compiler_interface::with; +use crate::stable_mir; /// The properties of the target machine being compiled into. #[derive(Clone, PartialEq, Eq, Serialize)] diff --git a/compiler/stable_mir/src/ty.rs b/compiler/rustc_smir/src/stable_mir/ty.rs similarity index 95% rename from compiler/stable_mir/src/ty.rs rename to compiler/rustc_smir/src/stable_mir/ty.rs index b857a735b7259..e331e5934716a 100644 --- a/compiler/stable_mir/src/ty.rs +++ b/compiler/rustc_smir/src/stable_mir/ty.rs @@ -2,15 +2,16 @@ use std::fmt::{self, Debug, Display, Formatter}; use std::ops::Range; use serde::Serialize; +use stable_mir::abi::{FnAbi, Layout}; +use stable_mir::crate_def::{CrateDef, CrateDefItems, CrateDefType}; +use stable_mir::mir::alloc::{AllocId, read_target_int, read_target_uint}; +use stable_mir::mir::mono::StaticDef; +use stable_mir::target::MachineInfo; +use stable_mir::{Filename, Opaque}; use super::mir::{Body, Mutability, Safety}; use super::{DefId, Error, Symbol, with}; -use crate::abi::{FnAbi, Layout}; -use crate::crate_def::{CrateDef, CrateDefType}; -use crate::mir::alloc::{AllocId, read_target_int, read_target_uint}; -use crate::mir::mono::StaticDef; -use crate::target::MachineInfo; -use crate::{Filename, Opaque}; +use crate::stable_mir; #[derive(Copy, Clone, Eq, PartialEq, Hash, Serialize)] pub struct Ty(usize); @@ -588,7 +589,7 @@ pub enum IntTy { impl IntTy { pub fn num_bytes(self) -> usize { match self { - IntTy::Isize => crate::target::MachineInfo::target_pointer_width().bytes(), + IntTy::Isize => MachineInfo::target_pointer_width().bytes(), IntTy::I8 => 1, IntTy::I16 => 2, IntTy::I32 => 4, @@ -611,7 +612,7 @@ pub enum UintTy { impl UintTy { pub fn num_bytes(self) -> usize { match self { - UintTy::Usize => crate::target::MachineInfo::target_pointer_width().bytes(), + UintTy::Usize => MachineInfo::target_pointer_width().bytes(), UintTy::U8 => 1, UintTy::U16 => 2, UintTy::U32 => 4, @@ -910,6 +911,10 @@ crate_def! { pub TraitDef; } +impl_crate_def_items! { + TraitDef; +} + impl TraitDef { pub fn declaration(trait_def: &TraitDef) -> TraitDecl { with(|cx| cx.trait_decl(trait_def)) @@ -932,6 +937,10 @@ crate_def! { pub ImplDef; } +impl_crate_def_items! { + ImplDef; +} + impl ImplDef { /// Retrieve information about this implementation. pub fn trait_impl(&self) -> ImplTrait { @@ -1017,7 +1026,7 @@ pub enum AliasKind { Projection, Inherent, Opaque, - Weak, + Free, } #[derive(Clone, Debug, Eq, PartialEq, Serialize)] @@ -1084,7 +1093,6 @@ pub enum Abi { CCmseNonSecureCall, CCmseNonSecureEntry, System { unwind: bool }, - RustIntrinsic, RustCall, Unadjusted, RustCold, @@ -1454,7 +1462,7 @@ pub enum ClauseKind { TypeOutlives(TypeOutlivesPredicate), Projection(ProjectionPredicate), ConstArgHasType(TyConst, Ty), - WellFormed(GenericArgKind), + WellFormed(TermKind), ConstEvaluatable(TyConst), } @@ -1555,3 +1563,59 @@ index_impl!(Span); pub struct VariantIdx(usize); index_impl!(VariantIdx); + +crate_def! { + /// Hold infomation about an Opaque definition, particularly useful in `RPITIT`. + #[derive(Serialize)] + pub OpaqueDef; +} + +crate_def! { + #[derive(Serialize)] + pub AssocDef; +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct AssocItem { + pub def_id: AssocDef, + pub kind: AssocKind, + pub container: AssocItemContainer, + + /// If this is an item in an impl of a trait then this is the `DefId` of + /// the associated item on the trait that this implements. + pub trait_item_def_id: Option, +} + +#[derive(Clone, PartialEq, Debug, Eq, Serialize)] +pub enum AssocTypeData { + Normal(Symbol), + /// The associated type comes from an RPITIT. It has no name, and the + /// `ImplTraitInTraitData` provides additional information about its + /// source. + Rpitit(ImplTraitInTraitData), +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum AssocKind { + Const { name: Symbol }, + Fn { name: Symbol, has_self: bool }, + Type { data: AssocTypeData }, +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub enum AssocItemContainer { + Trait, + Impl, +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, Serialize)] +pub enum ImplTraitInTraitData { + Trait { fn_def_id: FnDef, opaque_def_id: OpaqueDef }, + Impl { fn_def_id: FnDef }, +} + +impl AssocItem { + pub fn is_impl_trait_in_trait(&self) -> bool { + matches!(self.kind, AssocKind::Type { data: AssocTypeData::Rpitit(_) }) + } +} diff --git a/compiler/stable_mir/src/visitor.rs b/compiler/rustc_smir/src/stable_mir/visitor.rs similarity index 95% rename from compiler/stable_mir/src/visitor.rs rename to compiler/rustc_smir/src/stable_mir/visitor.rs index 8463174f9a468..31a53d1b19d96 100644 --- a/compiler/stable_mir/src/visitor.rs +++ b/compiler/rustc_smir/src/stable_mir/visitor.rs @@ -1,11 +1,13 @@ use std::ops::ControlFlow; +use stable_mir::Opaque; +use stable_mir::ty::TyConst; + use super::ty::{ Allocation, Binder, ConstDef, ExistentialPredicate, FnSig, GenericArgKind, GenericArgs, MirConst, Promoted, Region, RigidTy, TermKind, Ty, UnevaluatedConst, }; -use crate::Opaque; -use crate::ty::TyConst; +use crate::stable_mir; pub trait Visitor: Sized { type Break; @@ -47,13 +49,13 @@ impl Visitable for TyConst { } fn super_visit(&self, visitor: &mut V) -> ControlFlow { match &self.kind { - crate::ty::TyConstKind::Param(_) | crate::ty::TyConstKind::Bound(_, _) => {} - crate::ty::TyConstKind::Unevaluated(_, args) => args.visit(visitor)?, - crate::ty::TyConstKind::Value(ty, alloc) => { + super::ty::TyConstKind::Param(_) | super::ty::TyConstKind::Bound(_, _) => {} + super::ty::TyConstKind::Unevaluated(_, args) => args.visit(visitor)?, + super::ty::TyConstKind::Value(ty, alloc) => { alloc.visit(visitor)?; ty.visit(visitor)?; } - crate::ty::TyConstKind::ZSTValue(ty) => ty.visit(visitor)?, + super::ty::TyConstKind::ZSTValue(ty) => ty.visit(visitor)?, } ControlFlow::Continue(()) } diff --git a/compiler/rustc_span/src/edition.rs b/compiler/rustc_span/src/edition.rs index da298080ed2f6..28335734f4dec 100644 --- a/compiler/rustc_span/src/edition.rs +++ b/compiler/rustc_span/src/edition.rs @@ -45,7 +45,7 @@ pub const ALL_EDITIONS: &[Edition] = &[ Edition::EditionFuture, ]; -pub const EDITION_NAME_LIST: &str = "2015|2018|2021|2024"; +pub const EDITION_NAME_LIST: &str = "<2015|2018|2021|2024|future>"; pub const DEFAULT_EDITION: Edition = Edition::Edition2015; diff --git a/compiler/rustc_span/src/hygiene.rs b/compiler/rustc_span/src/hygiene.rs index e7a8dee27f568..b621920d62ba6 100644 --- a/compiler/rustc_span/src/hygiene.rs +++ b/compiler/rustc_span/src/hygiene.rs @@ -24,9 +24,6 @@ // because getting it wrong can lead to nested `HygieneData::with` calls that // trigger runtime aborts. (Fortunately these are obvious and easy to fix.) -use std::cell::RefCell; -use std::collections::hash_map::Entry; -use std::collections::hash_set::Entry as SetEntry; use std::hash::Hash; use std::sync::Arc; use std::{fmt, iter, mem}; @@ -34,7 +31,7 @@ use std::{fmt, iter, mem}; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::stable_hasher::{HashStable, HashingControls, StableHasher}; -use rustc_data_structures::sync::{Lock, WorkerLocal}; +use rustc_data_structures::sync::Lock; use rustc_data_structures::unhash::UnhashMap; use rustc_hashes::Hash64; use rustc_index::IndexVec; @@ -44,6 +41,7 @@ use tracing::{debug, trace}; use crate::def_id::{CRATE_DEF_ID, CrateNum, DefId, LOCAL_CRATE, StableCrateId}; use crate::edition::Edition; +use crate::source_map::SourceMap; use crate::symbol::{Symbol, kw, sym}; use crate::{DUMMY_SP, HashStableContext, Span, SpanDecoder, SpanEncoder, with_session_globals}; @@ -59,56 +57,33 @@ impl !PartialOrd for SyntaxContext {} /// If this part of two syntax contexts is equal, then the whole syntax contexts should be equal. /// The other fields are only for caching. -type SyntaxContextKey = (SyntaxContext, ExpnId, Transparency); +pub type SyntaxContextKey = (SyntaxContext, ExpnId, Transparency); -#[derive(Clone, Copy, PartialEq, Debug, Encodable, Decodable)] -pub struct SyntaxContextData { +#[derive(Clone, Copy, Debug)] +struct SyntaxContextData { outer_expn: ExpnId, outer_transparency: Transparency, parent: SyntaxContext, - /// This context, but with all transparent and semi-transparent expansions filtered away. + /// This context, but with all transparent and semi-opaque expansions filtered away. opaque: SyntaxContext, /// This context, but with all transparent expansions filtered away. - opaque_and_semitransparent: SyntaxContext, + opaque_and_semiopaque: SyntaxContext, /// Name of the crate to which `$crate` with this context would resolve. dollar_crate_name: Symbol, } impl SyntaxContextData { - fn new( - (parent, outer_expn, outer_transparency): SyntaxContextKey, - opaque: SyntaxContext, - opaque_and_semitransparent: SyntaxContext, - ) -> SyntaxContextData { - SyntaxContextData { - outer_expn, - outer_transparency, - parent, - opaque, - opaque_and_semitransparent, - dollar_crate_name: kw::DollarCrate, - } - } - fn root() -> SyntaxContextData { SyntaxContextData { outer_expn: ExpnId::root(), outer_transparency: Transparency::Opaque, parent: SyntaxContext::root(), opaque: SyntaxContext::root(), - opaque_and_semitransparent: SyntaxContext::root(), + opaque_and_semiopaque: SyntaxContext::root(), dollar_crate_name: kw::DollarCrate, } } - fn decode_placeholder() -> SyntaxContextData { - SyntaxContextData { dollar_crate_name: kw::Empty, ..SyntaxContextData::root() } - } - - fn is_decode_placeholder(&self) -> bool { - self.dollar_crate_name == kw::Empty - } - fn key(&self) -> SyntaxContextKey { (self.parent, self.outer_expn, self.outer_transparency) } @@ -151,7 +126,7 @@ impl !PartialOrd for LocalExpnId {} /// with a non-default mode. With this check in place, we can avoid the need /// to maintain separate versions of `ExpnData` hashes for each permutation /// of `HashingControls` settings. -fn assert_default_hashing_controls(ctx: &CTX, msg: &str) { +fn assert_default_hashing_controls(ctx: &impl HashStableContext, msg: &str) { match ctx.hashing_controls() { // Note that we require that `hash_spans` be set according to the global // `-Z incremental-ignore-spans` option. Normally, this option is disabled, @@ -207,13 +182,13 @@ pub enum Transparency { /// Identifier produced by a transparent expansion is always resolved at call-site. /// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this. Transparent, - /// Identifier produced by a semi-transparent expansion may be resolved + /// Identifier produced by a semi-opaque expansion may be resolved /// either at call-site or at definition-site. /// If it's a local variable, label or `$crate` then it's resolved at def-site. /// Otherwise it's resolved at call-site. /// `macro_rules` macros behave like this, built-in macros currently behave like this too, /// but that's an implementation detail. - SemiTransparent, + SemiOpaque, /// Identifier produced by an opaque expansion is always resolved at definition-site. /// Def-site spans in procedural macros, identifiers from `macro` by default use this. Opaque, @@ -221,7 +196,7 @@ pub enum Transparency { impl Transparency { pub fn fallback(macro_rules: bool) -> Self { - if macro_rules { Transparency::SemiTransparent } else { Transparency::Opaque } + if macro_rules { Transparency::SemiOpaque } else { Transparency::Opaque } } } @@ -419,7 +394,7 @@ impl HygieneData { } } - fn with T>(f: F) -> T { + fn with(f: impl FnOnce(&mut HygieneData) -> R) -> R { with_session_globals(|session_globals| f(&mut session_globals.hygiene_data.borrow_mut())) } @@ -463,28 +438,23 @@ impl HygieneData { } fn normalize_to_macros_2_0(&self, ctxt: SyntaxContext) -> SyntaxContext { - debug_assert!(!self.syntax_context_data[ctxt.0 as usize].is_decode_placeholder()); self.syntax_context_data[ctxt.0 as usize].opaque } fn normalize_to_macro_rules(&self, ctxt: SyntaxContext) -> SyntaxContext { - debug_assert!(!self.syntax_context_data[ctxt.0 as usize].is_decode_placeholder()); - self.syntax_context_data[ctxt.0 as usize].opaque_and_semitransparent + self.syntax_context_data[ctxt.0 as usize].opaque_and_semiopaque } fn outer_expn(&self, ctxt: SyntaxContext) -> ExpnId { - debug_assert!(!self.syntax_context_data[ctxt.0 as usize].is_decode_placeholder()); self.syntax_context_data[ctxt.0 as usize].outer_expn } fn outer_mark(&self, ctxt: SyntaxContext) -> (ExpnId, Transparency) { - debug_assert!(!self.syntax_context_data[ctxt.0 as usize].is_decode_placeholder()); let data = &self.syntax_context_data[ctxt.0 as usize]; (data.outer_expn, data.outer_transparency) } fn parent_ctxt(&self, ctxt: SyntaxContext) -> SyntaxContext { - debug_assert!(!self.syntax_context_data[ctxt.0 as usize].is_decode_placeholder()); self.syntax_context_data[ctxt.0 as usize].parent } @@ -562,7 +532,7 @@ impl HygieneData { } let call_site_ctxt = self.expn_data(expn_id).call_site.ctxt(); - let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { + let mut call_site_ctxt = if transparency == Transparency::SemiOpaque { self.normalize_to_macros_2_0(call_site_ctxt) } else { self.normalize_to_macro_rules(call_site_ctxt) @@ -595,8 +565,6 @@ impl HygieneData { expn_id: ExpnId, transparency: Transparency, ) -> SyntaxContext { - debug_assert!(!self.syntax_context_data[parent.0 as usize].is_decode_placeholder()); - // Look into the cache first. let key = (parent, expn_id, transparency); if let Some(ctxt) = self.syntax_context_map.get(&key) { @@ -604,37 +572,46 @@ impl HygieneData { } // Reserve a new syntax context. + // The inserted dummy data can only be potentially accessed by nested `alloc_ctxt` calls, + // the assert below ensures that it doesn't happen. let ctxt = SyntaxContext::from_usize(self.syntax_context_data.len()); - self.syntax_context_data.push(SyntaxContextData::decode_placeholder()); + self.syntax_context_data + .push(SyntaxContextData { dollar_crate_name: sym::dummy, ..SyntaxContextData::root() }); self.syntax_context_map.insert(key, ctxt); - // Opaque and semi-transparent versions of the parent. Note that they may be equal to the + // Opaque and semi-opaque versions of the parent. Note that they may be equal to the // parent itself. E.g. `parent_opaque` == `parent` if the expn chain contains only opaques, - // and `parent_opaque_and_semitransparent` == `parent` if the expn contains only opaques - // and semi-transparents. - let parent_opaque = self.syntax_context_data[parent.0 as usize].opaque; - let parent_opaque_and_semitransparent = - self.syntax_context_data[parent.0 as usize].opaque_and_semitransparent; - - // Evaluate opaque and semi-transparent versions of the new syntax context. - let (opaque, opaque_and_semitransparent) = match transparency { - Transparency::Transparent => (parent_opaque, parent_opaque_and_semitransparent), - Transparency::SemiTransparent => ( + // and `parent_opaque_and_semiopaque` == `parent` if the expn contains only (semi-)opaques. + let parent_data = &self.syntax_context_data[parent.0 as usize]; + assert_ne!(parent_data.dollar_crate_name, sym::dummy); + let parent_opaque = parent_data.opaque; + let parent_opaque_and_semiopaque = parent_data.opaque_and_semiopaque; + + // Evaluate opaque and semi-opaque versions of the new syntax context. + let (opaque, opaque_and_semiopaque) = match transparency { + Transparency::Transparent => (parent_opaque, parent_opaque_and_semiopaque), + Transparency::SemiOpaque => ( parent_opaque, - // Will be the same as `ctxt` if the expn chain contains only opaques and semi-transparents. - self.alloc_ctxt(parent_opaque_and_semitransparent, expn_id, transparency), + // Will be the same as `ctxt` if the expn chain contains only (semi-)opaques. + self.alloc_ctxt(parent_opaque_and_semiopaque, expn_id, transparency), ), Transparency::Opaque => ( // Will be the same as `ctxt` if the expn chain contains only opaques. self.alloc_ctxt(parent_opaque, expn_id, transparency), - // Will be the same as `ctxt` if the expn chain contains only opaques and semi-transparents. - self.alloc_ctxt(parent_opaque_and_semitransparent, expn_id, transparency), + // Will be the same as `ctxt` if the expn chain contains only (semi-)opaques. + self.alloc_ctxt(parent_opaque_and_semiopaque, expn_id, transparency), ), }; // Fill the full data, now that we have it. - self.syntax_context_data[ctxt.as_u32() as usize] = - SyntaxContextData::new(key, opaque, opaque_and_semitransparent); + self.syntax_context_data[ctxt.as_u32() as usize] = SyntaxContextData { + outer_expn: expn_id, + outer_transparency: transparency, + parent, + opaque, + opaque_and_semiopaque, + dollar_crate_name: kw::DollarCrate, + }; ctxt } } @@ -654,13 +631,12 @@ pub fn walk_chain_collapsed(span: Span, to: Span) -> Span { pub fn update_dollar_crate_names(mut get_name: impl FnMut(SyntaxContext) -> Symbol) { // The new contexts that need updating are at the end of the list and have `$crate` as a name. - // Also decoding placeholders can be encountered among both old and new contexts. let mut to_update = vec![]; HygieneData::with(|data| { for (idx, scdata) in data.syntax_context_data.iter().enumerate().rev() { if scdata.dollar_crate_name == kw::DollarCrate { to_update.push((idx, kw::DollarCrate)); - } else if !scdata.is_decode_placeholder() { + } else { break; } } @@ -926,15 +902,36 @@ impl SyntaxContext { } pub(crate) fn dollar_crate_name(self) -> Symbol { - HygieneData::with(|data| { - debug_assert!(!data.syntax_context_data[self.0 as usize].is_decode_placeholder()); - data.syntax_context_data[self.0 as usize].dollar_crate_name - }) + HygieneData::with(|data| data.syntax_context_data[self.0 as usize].dollar_crate_name) } pub fn edition(self) -> Edition { HygieneData::with(|data| data.expn_data(data.outer_expn(self)).edition) } + + /// Returns whether this context originates in a foreign crate's external macro. + /// + /// This is used to test whether a lint should not even begin to figure out whether it should + /// be reported on the current node. + pub fn in_external_macro(self, sm: &SourceMap) -> bool { + let expn_data = self.outer_expn_data(); + match expn_data.kind { + ExpnKind::Root + | ExpnKind::Desugaring( + DesugaringKind::ForLoop + | DesugaringKind::WhileLoop + | DesugaringKind::OpaqueTy + | DesugaringKind::Async + | DesugaringKind::Await, + ) => false, + ExpnKind::AstPass(_) | ExpnKind::Desugaring(_) => true, // well, it's "external" + ExpnKind::Macro(MacroKind::Bang, _) => { + // Dummy span for the `def_site` means it's an external macro. + expn_data.def_site.is_dummy() || sm.is_imported(expn_data.def_site) + } + ExpnKind::Macro { .. } => true, // definitely a plugin + } + } } impl fmt::Debug for SyntaxContext { @@ -1236,6 +1233,25 @@ impl DesugaringKind { DesugaringKind::PatTyRange => "pattern type", } } + + /// For use with `rustc_unimplemented` to support conditions + /// like `from_desugaring = "QuestionMark"` + pub fn matches(&self, value: &str) -> bool { + match self { + DesugaringKind::CondTemporary => value == "CondTemporary", + DesugaringKind::Async => value == "Async", + DesugaringKind::Await => value == "Await", + DesugaringKind::QuestionMark => value == "QuestionMark", + DesugaringKind::TryBlock => value == "TryBlock", + DesugaringKind::YeetExpr => value == "YeetExpr", + DesugaringKind::OpaqueTy => value == "OpaqueTy", + DesugaringKind::ForLoop => value == "ForLoop", + DesugaringKind::WhileLoop => value == "WhileLoop", + DesugaringKind::BoundModifier => value == "BoundModifier", + DesugaringKind::Contract => value == "Contract", + DesugaringKind::PatTyRange => value == "PatTyRange", + } + } } #[derive(Default)] @@ -1266,7 +1282,7 @@ impl HygieneEncodeContext { pub fn encode( &self, encoder: &mut T, - mut encode_ctxt: impl FnMut(&mut T, u32, &SyntaxContextData), + mut encode_ctxt: impl FnMut(&mut T, u32, &SyntaxContextKey), mut encode_expn: impl FnMut(&mut T, ExpnId, &ExpnData, ExpnHash), ) { // When we serialize a `SyntaxContextData`, we may end up serializing @@ -1278,55 +1294,47 @@ impl HygieneEncodeContext { self.latest_ctxts ); - // Consume the current round of SyntaxContexts. - // Drop the lock() temporary early - let latest_ctxts = { mem::take(&mut *self.latest_ctxts.lock()) }; - - // It's fine to iterate over a HashMap, because the serialization - // of the table that we insert data into doesn't depend on insertion - // order + // Consume the current round of syntax contexts. + // Drop the lock() temporary early. + // It's fine to iterate over a HashMap, because the serialization of the table + // that we insert data into doesn't depend on insertion order. #[allow(rustc::potential_query_instability)] - for_all_ctxts_in(latest_ctxts.into_iter(), |index, ctxt, data| { + let latest_ctxts = { mem::take(&mut *self.latest_ctxts.lock()) }.into_iter(); + let all_ctxt_data: Vec<_> = HygieneData::with(|data| { + latest_ctxts + .map(|ctxt| (ctxt, data.syntax_context_data[ctxt.0 as usize].key())) + .collect() + }); + for (ctxt, ctxt_key) in all_ctxt_data { if self.serialized_ctxts.lock().insert(ctxt) { - encode_ctxt(encoder, index, data); + encode_ctxt(encoder, ctxt.0, &ctxt_key); } - }); - - let latest_expns = { mem::take(&mut *self.latest_expns.lock()) }; + } - // Same as above, this is fine as we are inserting into a order-independent hashset + // Same as above, but for expansions instead of syntax contexts. #[allow(rustc::potential_query_instability)] - for_all_expns_in(latest_expns.into_iter(), |expn, data, hash| { + let latest_expns = { mem::take(&mut *self.latest_expns.lock()) }.into_iter(); + let all_expn_data: Vec<_> = HygieneData::with(|data| { + latest_expns + .map(|expn| (expn, data.expn_data(expn).clone(), data.expn_hash(expn))) + .collect() + }); + for (expn, expn_data, expn_hash) in all_expn_data { if self.serialized_expns.lock().insert(expn) { - encode_expn(encoder, expn, data, hash); + encode_expn(encoder, expn, &expn_data, expn_hash); } - }); + } } debug!("encode_hygiene: Done serializing SyntaxContextData"); } } -#[derive(Default)] /// Additional information used to assist in decoding hygiene data -struct HygieneDecodeContextInner { - // Maps serialized `SyntaxContext` ids to a `SyntaxContext` in the current - // global `HygieneData`. When we deserialize a `SyntaxContext`, we need to create - // a new id in the global `HygieneData`. This map tracks the ID we end up picking, - // so that multiple occurrences of the same serialized id are decoded to the same - // `SyntaxContext`. This only stores `SyntaxContext`s which are completely decoded. - remapped_ctxts: Vec>, - - /// Maps serialized `SyntaxContext` ids that are currently being decoded to a `SyntaxContext`. - decoding: FxHashMap, -} - #[derive(Default)] -/// Additional information used to assist in decoding hygiene data pub struct HygieneDecodeContext { - inner: Lock, - - /// A set of serialized `SyntaxContext` ids that are currently being decoded on each thread. - local_in_progress: WorkerLocal>>, + // A cache mapping raw serialized per-crate syntax context ids to corresponding decoded + // `SyntaxContext`s in the current global `HygieneData`. + remapped_ctxts: Lock>>, } /// Register an expansion which has been decoded from the on-disk-cache for the local crate. @@ -1397,10 +1405,10 @@ pub fn decode_expn_id( // to track which `SyntaxContext`s we have already decoded. // The provided closure will be invoked to deserialize a `SyntaxContextData` // if we haven't already seen the id of the `SyntaxContext` we are deserializing. -pub fn decode_syntax_context SyntaxContextData>( +pub fn decode_syntax_context( d: &mut D, context: &HygieneDecodeContext, - decode_data: F, + decode_data: impl FnOnce(&mut D, u32) -> SyntaxContextKey, ) -> SyntaxContext { let raw_id: u32 = Decodable::decode(d); if raw_id == 0 { @@ -1409,129 +1417,24 @@ pub fn decode_syntax_context SyntaxContext return SyntaxContext::root(); } - let pending_ctxt = { - let mut inner = context.inner.lock(); - - // Reminder: `HygieneDecodeContext` is per-crate, so there are no collisions between - // raw ids from different crate metadatas. - if let Some(ctxt) = inner.remapped_ctxts.get(raw_id as usize).copied().flatten() { - // This has already been decoded. - return ctxt; - } - - match inner.decoding.entry(raw_id) { - Entry::Occupied(ctxt_entry) => { - let pending_ctxt = *ctxt_entry.get(); - match context.local_in_progress.borrow_mut().entry(raw_id) { - // We're decoding this already on the current thread. Return here and let the - // function higher up the stack finish decoding to handle recursive cases. - // Hopefully having a `SyntaxContext` that refers to an incorrect data is ok - // during reminder of the decoding process, it's certainly not ok after the - // top level decoding function returns. - SetEntry::Occupied(..) => return pending_ctxt, - // Some other thread is currently decoding this. - // Race with it (alternatively we could wait here). - // We cannot return this value, unlike in the recursive case above, because it - // may expose a `SyntaxContext` pointing to incorrect data to arbitrary code. - SetEntry::Vacant(entry) => { - entry.insert(); - pending_ctxt - } - } - } - Entry::Vacant(entry) => { - // We are the first thread to start decoding. Mark the current thread as being progress. - context.local_in_progress.borrow_mut().insert(raw_id); - - // Allocate and store SyntaxContext id *before* calling the decoder function, - // as the SyntaxContextData may reference itself. - let new_ctxt = HygieneData::with(|hygiene_data| { - // Push a dummy SyntaxContextData to ensure that nobody else can get the - // same ID as us. This will be overwritten after call `decode_data`. - hygiene_data.syntax_context_data.push(SyntaxContextData::decode_placeholder()); - SyntaxContext::from_usize(hygiene_data.syntax_context_data.len() - 1) - }); - entry.insert(new_ctxt); - new_ctxt - } - } - }; + // Look into the cache first. + // Reminder: `HygieneDecodeContext` is per-crate, so there are no collisions between + // raw ids from different crate metadatas. + if let Some(Some(ctxt)) = context.remapped_ctxts.lock().get(raw_id) { + return *ctxt; + } // Don't try to decode data while holding the lock, since we need to // be able to recursively decode a SyntaxContext - let ctxt_data = decode_data(d, raw_id); - let ctxt_key = ctxt_data.key(); - - let ctxt = HygieneData::with(|hygiene_data| { - match hygiene_data.syntax_context_map.get(&ctxt_key) { - // Ensure that syntax contexts are unique. - // If syntax contexts with the given key already exists, reuse it instead of - // using `pending_ctxt`. - // `pending_ctxt` will leave an unused hole in the vector of syntax contexts. - // Hopefully its value isn't stored anywhere during decoding and its dummy data - // is never accessed later. The `is_decode_placeholder` asserts on all - // accesses to syntax context data attempt to ensure it. - Some(&ctxt) => ctxt, - // This is a completely new context. - // Overwrite its placeholder data with our decoded data. - None => { - let ctxt_data_ref = - &mut hygiene_data.syntax_context_data[pending_ctxt.as_u32() as usize]; - let prev_ctxt_data = mem::replace(ctxt_data_ref, ctxt_data); - // Reset `dollar_crate_name` so that it will be updated by `update_dollar_crate_names`. - // We don't care what the encoding crate set this to - we want to resolve it - // from the perspective of the current compilation session. - ctxt_data_ref.dollar_crate_name = kw::DollarCrate; - // Make sure nothing weird happened while `decode_data` was running. - if !prev_ctxt_data.is_decode_placeholder() { - // Another thread may have already inserted the decoded data, - // but the decoded data should match. - assert_eq!(prev_ctxt_data, *ctxt_data_ref); - } - hygiene_data.syntax_context_map.insert(ctxt_key, pending_ctxt); - pending_ctxt - } - } - }); + let (parent, expn_id, transparency) = decode_data(d, raw_id); + let ctxt = + HygieneData::with(|hygiene_data| hygiene_data.alloc_ctxt(parent, expn_id, transparency)); - // Mark the context as completed - context.local_in_progress.borrow_mut().remove(&raw_id); - - let mut inner = context.inner.lock(); - let new_len = raw_id as usize + 1; - if inner.remapped_ctxts.len() < new_len { - inner.remapped_ctxts.resize(new_len, None); - } - inner.remapped_ctxts[raw_id as usize] = Some(ctxt); - inner.decoding.remove(&raw_id); + context.remapped_ctxts.lock().insert(raw_id, ctxt); ctxt } -fn for_all_ctxts_in( - ctxts: impl Iterator, - mut f: F, -) { - let all_data: Vec<_> = HygieneData::with(|data| { - ctxts.map(|ctxt| (ctxt, data.syntax_context_data[ctxt.0 as usize].clone())).collect() - }); - for (ctxt, data) in all_data.into_iter() { - f(ctxt.0, ctxt, &data); - } -} - -fn for_all_expns_in( - expns: impl Iterator, - mut f: impl FnMut(ExpnId, &ExpnData, ExpnHash), -) { - let all_data: Vec<_> = HygieneData::with(|data| { - expns.map(|expn| (expn, data.expn_data(expn).clone(), data.expn_hash(expn))).collect() - }); - for (expn, data, hash) in all_data.into_iter() { - f(expn, &data, hash); - } -} - impl Encodable for LocalExpnId { fn encode(&self, e: &mut E) { self.to_expn_id().encode(e); @@ -1544,10 +1447,10 @@ impl Decodable for LocalExpnId { } } -pub fn raw_encode_syntax_context( +pub fn raw_encode_syntax_context( ctxt: SyntaxContext, context: &HygieneEncodeContext, - e: &mut E, + e: &mut impl Encoder, ) { if !context.serialized_ctxts.lock().contains(&ctxt) { context.latest_ctxts.lock().insert(ctxt); diff --git a/compiler/rustc_span/src/lib.rs b/compiler/rustc_span/src/lib.rs index 9e6ba2e1b9ce2..61c96e67c17f8 100644 --- a/compiler/rustc_span/src/lib.rs +++ b/compiler/rustc_span/src/lib.rs @@ -17,6 +17,7 @@ // tidy-alphabetical-start #![allow(internal_features)] +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(array_windows)] @@ -24,14 +25,12 @@ #![feature(core_io_borrowed_buf)] #![feature(hash_set_entry)] #![feature(if_let_guard)] -#![feature(let_chains)] #![feature(map_try_insert)] #![feature(negative_impls)] #![feature(read_buf)] #![feature(round_char_boundary)] #![feature(rustc_attrs)] #![feature(rustdoc_internals)] -#![feature(slice_as_chunks)] // tidy-alphabetical-end // The code produced by the `Encodable`/`Decodable` derive macros refer to @@ -116,9 +115,13 @@ pub struct SessionGlobals { } impl SessionGlobals { - pub fn new(edition: Edition, sm_inputs: Option) -> SessionGlobals { + pub fn new( + edition: Edition, + extra_symbols: &[&'static str], + sm_inputs: Option, + ) -> SessionGlobals { SessionGlobals { - symbol_interner: symbol::Interner::fresh(), + symbol_interner: symbol::Interner::with_extra_symbols(extra_symbols), span_interner: Lock::new(span_encoding::SpanInterner::default()), metavar_spans: Default::default(), hygiene_data: Lock::new(hygiene::HygieneData::new(edition)), @@ -129,6 +132,7 @@ impl SessionGlobals { pub fn create_session_globals_then( edition: Edition, + extra_symbols: &[&'static str], sm_inputs: Option, f: impl FnOnce() -> R, ) -> R { @@ -137,7 +141,7 @@ pub fn create_session_globals_then( "SESSION_GLOBALS should never be overwritten! \ Use another thread if you need another SessionGlobals" ); - let session_globals = SessionGlobals::new(edition, sm_inputs); + let session_globals = SessionGlobals::new(edition, extra_symbols, sm_inputs); SESSION_GLOBALS.set(&session_globals, f) } @@ -156,7 +160,7 @@ where F: FnOnce(&SessionGlobals) -> R, { if !SESSION_GLOBALS.is_set() { - let session_globals = SessionGlobals::new(edition, None); + let session_globals = SessionGlobals::new(edition, &[], None); SESSION_GLOBALS.set(&session_globals, || SESSION_GLOBALS.with(f)) } else { SESSION_GLOBALS.with(f) @@ -172,7 +176,7 @@ where /// Default edition, no source map. pub fn create_default_session_globals_then(f: impl FnOnce() -> R) -> R { - create_session_globals_then(edition::DEFAULT_EDITION, None, f) + create_session_globals_then(edition::DEFAULT_EDITION, &[], None, f) } // If this ever becomes non thread-local, `decode_syntax_context` @@ -220,7 +224,7 @@ pub fn with_metavar_spans(f: impl FnOnce(&MetavarSpansMap) -> R) -> R { // FIXME: We should use this enum or something like it to get rid of the // use of magic `/rust/1.x/...` paths across the board. -#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Decodable)] +#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Decodable, Encodable)] pub enum RealFileName { LocalPath(PathBuf), /// For remapped paths (namely paths into libstd that have been mapped @@ -246,28 +250,6 @@ impl Hash for RealFileName { } } -// This is functionally identical to #[derive(Encodable)], with the exception of -// an added assert statement -impl Encodable for RealFileName { - fn encode(&self, encoder: &mut S) { - match *self { - RealFileName::LocalPath(ref local_path) => { - encoder.emit_u8(0); - local_path.encode(encoder); - } - - RealFileName::Remapped { ref local_path, ref virtual_name } => { - encoder.emit_u8(1); - // For privacy and build reproducibility, we must not embed host-dependant path - // in artifacts if they have been remapped by --remap-path-prefix - assert!(local_path.is_none()); - local_path.encode(encoder); - virtual_name.encode(encoder); - } - } - } -} - impl RealFileName { /// Returns the path suitable for reading from the file system on the local host, /// if this information exists. @@ -364,6 +346,16 @@ impl From for FileName { } } +#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] +pub enum FileNameEmbeddablePreference { + /// If a remapped path is available, only embed the `virtual_path` and omit the `local_path`. + /// + /// Otherwise embed the local-path into the `virtual_path`. + RemappedOnly, + /// Embed the original path as well as its remapped `virtual_path` component if available. + LocalAndRemapped, +} + #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub enum FileNameDisplayPreference { /// Display the path after the application of rewrite rules provided via `--remap-path-prefix`. @@ -602,28 +594,13 @@ impl Span { !self.is_dummy() && sm.is_span_accessible(self) } - /// Returns whether `span` originates in a foreign crate's external macro. + /// Returns whether this span originates in a foreign crate's external macro. /// /// This is used to test whether a lint should not even begin to figure out whether it should /// be reported on the current node. + #[inline] pub fn in_external_macro(self, sm: &SourceMap) -> bool { - let expn_data = self.ctxt().outer_expn_data(); - match expn_data.kind { - ExpnKind::Root - | ExpnKind::Desugaring( - DesugaringKind::ForLoop - | DesugaringKind::WhileLoop - | DesugaringKind::OpaqueTy - | DesugaringKind::Async - | DesugaringKind::Await, - ) => false, - ExpnKind::AstPass(_) | ExpnKind::Desugaring(_) => true, // well, it's "external" - ExpnKind::Macro(MacroKind::Bang, _) => { - // Dummy span for the `def_site` means it's an external macro. - expn_data.def_site.is_dummy() || sm.is_imported(expn_data.def_site) - } - ExpnKind::Macro { .. } => true, // definitely a plugin - } + self.ctxt().in_external_macro(sm) } /// Returns `true` if `span` originates in a derive-macro's expansion. @@ -1112,7 +1089,7 @@ impl Span { /// Equivalent of `Span::mixed_site` from the proc macro API, /// except that the location is taken from the `self` span. pub fn with_mixed_site_ctxt(self, expn_id: ExpnId) -> Span { - self.with_ctxt_from_mark(expn_id, Transparency::SemiTransparent) + self.with_ctxt_from_mark(expn_id, Transparency::SemiOpaque) } /// Produces a span with the same location as `self` and context produced by a macro with the diff --git a/compiler/rustc_span/src/source_map.rs b/compiler/rustc_span/src/source_map.rs index 6fdf8e46fec65..8a3644163caf3 100644 --- a/compiler/rustc_span/src/source_map.rs +++ b/compiler/rustc_span/src/source_map.rs @@ -633,6 +633,24 @@ impl SourceMap { sp } + /// Extends the given `Span` to just before the previous occurrence of `c`. Return the same span + /// if an error occurred while retrieving the code snippet. + pub fn span_extend_to_prev_char_before( + &self, + sp: Span, + c: char, + accept_newlines: bool, + ) -> Span { + if let Ok(prev_source) = self.span_to_prev_source(sp) { + let prev_source = prev_source.rsplit(c).next().unwrap_or(""); + if accept_newlines || !prev_source.contains('\n') { + return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32 - 1_u32)); + } + } + + sp + } + /// Extends the given `Span` to just after the previous occurrence of `pat` when surrounded by /// whitespace. Returns None if the pattern could not be found or if an error occurred while /// retrieving the code snippet. @@ -1090,18 +1108,28 @@ pub fn get_source_map() -> Option> { pub struct FilePathMapping { mapping: Vec<(PathBuf, PathBuf)>, filename_display_for_diagnostics: FileNameDisplayPreference, + filename_embeddable_preference: FileNameEmbeddablePreference, } impl FilePathMapping { pub fn empty() -> FilePathMapping { - FilePathMapping::new(Vec::new(), FileNameDisplayPreference::Local) + FilePathMapping::new( + Vec::new(), + FileNameDisplayPreference::Local, + FileNameEmbeddablePreference::RemappedOnly, + ) } pub fn new( mapping: Vec<(PathBuf, PathBuf)>, filename_display_for_diagnostics: FileNameDisplayPreference, + filename_embeddable_preference: FileNameEmbeddablePreference, ) -> FilePathMapping { - FilePathMapping { mapping, filename_display_for_diagnostics } + FilePathMapping { + mapping, + filename_display_for_diagnostics, + filename_embeddable_preference, + } } /// Applies any path prefix substitution as defined by the mapping. @@ -1199,11 +1227,13 @@ impl FilePathMapping { ) -> RealFileName { match file_path { // Anything that's already remapped we don't modify, except for erasing - // the `local_path` portion. - RealFileName::Remapped { local_path: _, virtual_name } => { + // the `local_path` portion (if desired). + RealFileName::Remapped { local_path, virtual_name } => { RealFileName::Remapped { - // We do not want any local path to be exported into metadata - local_path: None, + local_path: match self.filename_embeddable_preference { + FileNameEmbeddablePreference::RemappedOnly => None, + FileNameEmbeddablePreference::LocalAndRemapped => local_path, + }, // We use the remapped name verbatim, even if it looks like a relative // path. The assumption is that the user doesn't want us to further // process paths that have gone through remapping. @@ -1213,12 +1243,18 @@ impl FilePathMapping { RealFileName::LocalPath(unmapped_file_path) => { // If no remapping has been applied yet, try to do so - let (new_path, was_remapped) = self.map_prefix(unmapped_file_path); + let (new_path, was_remapped) = self.map_prefix(&unmapped_file_path); if was_remapped { // It was remapped, so don't modify further return RealFileName::Remapped { - local_path: None, virtual_name: new_path.into_owned(), + // But still provide the local path if desired + local_path: match self.filename_embeddable_preference { + FileNameEmbeddablePreference::RemappedOnly => None, + FileNameEmbeddablePreference::LocalAndRemapped => { + Some(unmapped_file_path) + } + }, }; } @@ -1234,17 +1270,23 @@ impl FilePathMapping { match working_directory { RealFileName::LocalPath(unmapped_working_dir_abs) => { - let file_path_abs = unmapped_working_dir_abs.join(unmapped_file_path_rel); + let unmapped_file_path_abs = + unmapped_working_dir_abs.join(unmapped_file_path_rel); // Although neither `working_directory` nor the file name were subject // to path remapping, the concatenation between the two may be. Hence // we need to do a remapping here. - let (file_path_abs, was_remapped) = self.map_prefix(file_path_abs); + let (file_path_abs, was_remapped) = + self.map_prefix(&unmapped_file_path_abs); if was_remapped { RealFileName::Remapped { - // Erase the actual path - local_path: None, virtual_name: file_path_abs.into_owned(), + local_path: match self.filename_embeddable_preference { + FileNameEmbeddablePreference::RemappedOnly => None, + FileNameEmbeddablePreference::LocalAndRemapped => { + Some(unmapped_file_path_abs) + } + }, } } else { // No kind of remapping applied to this path, so @@ -1253,15 +1295,20 @@ impl FilePathMapping { } } RealFileName::Remapped { - local_path: _, + local_path, virtual_name: remapped_working_dir_abs, } => { // If working_directory has been remapped, then we emit // Remapped variant as the expanded path won't be valid RealFileName::Remapped { - local_path: None, virtual_name: Path::new(remapped_working_dir_abs) - .join(unmapped_file_path_rel), + .join(&unmapped_file_path_rel), + local_path: match self.filename_embeddable_preference { + FileNameEmbeddablePreference::RemappedOnly => None, + FileNameEmbeddablePreference::LocalAndRemapped => local_path + .as_ref() + .map(|local_path| local_path.join(unmapped_file_path_rel)), + }, } } } @@ -1269,27 +1316,6 @@ impl FilePathMapping { } } - /// Expand a relative path to an absolute path **without** remapping taken into account. - /// - /// The resulting `RealFileName` will have its `virtual_path` portion erased if - /// possible (i.e. if there's also a remapped path). - pub fn to_local_embeddable_absolute_path( - &self, - file_path: RealFileName, - working_directory: &RealFileName, - ) -> RealFileName { - let file_path = file_path.local_path_if_available(); - if file_path.is_absolute() { - // No remapping has applied to this path and it is absolute, - // so the working directory cannot influence it either, so - // we are done. - return RealFileName::LocalPath(file_path.to_path_buf()); - } - debug_assert!(file_path.is_relative()); - let working_directory = working_directory.local_path_if_available(); - RealFileName::LocalPath(Path::new(working_directory).join(file_path)) - } - /// Attempts to (heuristically) reverse a prefix mapping. /// /// Returns [`Some`] if there is exactly one mapping where the "to" part is diff --git a/compiler/rustc_span/src/source_map/tests.rs b/compiler/rustc_span/src/source_map/tests.rs index 957f55e39138e..589c2a3635481 100644 --- a/compiler/rustc_span/src/source_map/tests.rs +++ b/compiler/rustc_span/src/source_map/tests.rs @@ -305,6 +305,7 @@ fn path_prefix_remapping() { let mapping = &FilePathMapping::new( vec![(path("abc/def"), path("foo"))], FileNameDisplayPreference::Remapped, + FileNameEmbeddablePreference::RemappedOnly, ); assert_eq!(map_path_prefix(mapping, "abc/def/src/main.rs"), path_str("foo/src/main.rs")); @@ -316,6 +317,7 @@ fn path_prefix_remapping() { let mapping = &FilePathMapping::new( vec![(path("abc/def"), path("/foo"))], FileNameDisplayPreference::Remapped, + FileNameEmbeddablePreference::RemappedOnly, ); assert_eq!(map_path_prefix(mapping, "abc/def/src/main.rs"), path_str("/foo/src/main.rs")); @@ -327,6 +329,7 @@ fn path_prefix_remapping() { let mapping = &FilePathMapping::new( vec![(path("/abc/def"), path("foo"))], FileNameDisplayPreference::Remapped, + FileNameEmbeddablePreference::RemappedOnly, ); assert_eq!(map_path_prefix(mapping, "/abc/def/src/main.rs"), path_str("foo/src/main.rs")); @@ -338,6 +341,7 @@ fn path_prefix_remapping() { let mapping = &FilePathMapping::new( vec![(path("/abc/def"), path("/foo"))], FileNameDisplayPreference::Remapped, + FileNameEmbeddablePreference::RemappedOnly, ); assert_eq!(map_path_prefix(mapping, "/abc/def/src/main.rs"), path_str("/foo/src/main.rs")); @@ -351,6 +355,7 @@ fn path_prefix_remapping_expand_to_absolute() { let mapping = &FilePathMapping::new( vec![(path("/foo"), path("FOO")), (path("/bar"), path("BAR"))], FileNameDisplayPreference::Remapped, + FileNameEmbeddablePreference::RemappedOnly, ); let working_directory = path("/foo"); let working_directory = RealFileName::Remapped { @@ -448,6 +453,71 @@ fn path_prefix_remapping_expand_to_absolute() { ); } +#[test] +fn path_prefix_remapping_expand_to_absolute_and_local() { + // "virtual" working directory is relative path + let mapping = &FilePathMapping::new( + vec![(path("/foo"), path("FOO")), (path("/bar"), path("BAR"))], + FileNameDisplayPreference::Remapped, + FileNameEmbeddablePreference::LocalAndRemapped, + ); + let working_directory = path("/foo"); + let working_directory = RealFileName::Remapped { + local_path: Some(working_directory.clone()), + virtual_name: mapping.map_prefix(working_directory).0.into_owned(), + }; + + assert_eq!(working_directory.remapped_path_if_available(), path("FOO")); + + // Unmapped absolute path + assert_eq!( + mapping.to_embeddable_absolute_path( + RealFileName::LocalPath(path("/foo/src/main.rs")), + &working_directory + ), + RealFileName::Remapped { + local_path: Some(path("/foo/src/main.rs")), + virtual_name: path("FOO/src/main.rs") + } + ); + + // Unmapped absolute path with unrelated working directory + assert_eq!( + mapping.to_embeddable_absolute_path( + RealFileName::LocalPath(path("/bar/src/main.rs")), + &working_directory + ), + RealFileName::Remapped { + local_path: Some(path("/bar/src/main.rs")), + virtual_name: path("BAR/src/main.rs") + } + ); + + // Already remapped absolute path, with unrelated working directory + assert_eq!( + mapping.to_embeddable_absolute_path( + RealFileName::Remapped { + local_path: Some(path("/bar/src/main.rs")), + virtual_name: path("BAR/src/main.rs"), + }, + &working_directory + ), + RealFileName::Remapped { + local_path: Some(path("/bar/src/main.rs")), + virtual_name: path("BAR/src/main.rs") + } + ); + + // Already remapped relative path + assert_eq!( + mapping.to_embeddable_absolute_path( + RealFileName::Remapped { local_path: None, virtual_name: path("XYZ/src/main.rs") }, + &working_directory + ), + RealFileName::Remapped { local_path: None, virtual_name: path("XYZ/src/main.rs") } + ); +} + #[test] fn path_prefix_remapping_reverse() { // Ignores options without alphanumeric chars. @@ -455,6 +525,7 @@ fn path_prefix_remapping_reverse() { let mapping = &FilePathMapping::new( vec![(path("abc"), path("/")), (path("def"), path("."))], FileNameDisplayPreference::Remapped, + FileNameEmbeddablePreference::RemappedOnly, ); assert_eq!(reverse_map_prefix(mapping, "/hello.rs"), None); @@ -466,6 +537,7 @@ fn path_prefix_remapping_reverse() { let mapping = &FilePathMapping::new( vec![(path("abc"), path("/redacted")), (path("def"), path("/redacted"))], FileNameDisplayPreference::Remapped, + FileNameEmbeddablePreference::RemappedOnly, ); assert_eq!(reverse_map_prefix(mapping, "/redacted/hello.rs"), None); @@ -476,6 +548,7 @@ fn path_prefix_remapping_reverse() { let mapping = &FilePathMapping::new( vec![(path("abc"), path("/redacted")), (path("def/ghi"), path("/fake/dir"))], FileNameDisplayPreference::Remapped, + FileNameEmbeddablePreference::RemappedOnly, ); assert_eq!( diff --git a/compiler/rustc_span/src/span_encoding.rs b/compiler/rustc_span/src/span_encoding.rs index 9d6c7d2a42a38..a4a47dc99b084 100644 --- a/compiler/rustc_span/src/span_encoding.rs +++ b/compiler/rustc_span/src/span_encoding.rs @@ -306,8 +306,21 @@ impl Span { /// Returns `true` if this span comes from any kind of macro, desugaring or inlining. #[inline] pub fn from_expansion(self) -> bool { - // If the span is fully inferred then ctxt > MAX_CTXT - self.inline_ctxt().map_or(true, |ctxt| !ctxt.is_root()) + let ctxt = match_span_kind! { + self, + // All branches here, except `InlineParent`, actually return `span.ctxt_or_parent_or_marker`. + // Since `Interned` is selected if the field contains `CTXT_INTERNED_MARKER` returning that value + // as the context allows the compiler to optimize out the branch that selects between either + // `Interned` and `PartiallyInterned`. + // + // Interned contexts can never be the root context and `CTXT_INTERNED_MARKER` has a different value + // than the root context so this works for checking is this is an expansion. + InlineCtxt(span) => SyntaxContext::from_u16(span.ctxt), + InlineParent(_span) => SyntaxContext::root(), + PartiallyInterned(span) => SyntaxContext::from_u16(span.ctxt), + Interned(_span) => SyntaxContext::from_u16(CTXT_INTERNED_MARKER), + }; + !ctxt.is_root() } /// Returns `true` if this is a dummy span with any hygienic context. diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 47dd80c432ead..068d737b6ec3a 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -131,7 +131,7 @@ symbols! { // tidy-alphabetical-end // Weak keywords, have special meaning only in specific contexts. - // Matching predicates: none + // Matching predicates: `is_weak` // tidy-alphabetical-start Auto: "auto", Builtin: "builtin", @@ -174,7 +174,6 @@ symbols! { Arc, ArcWeak, Argument, - ArgumentMethods, ArrayIntoIter, AsMut, AsRef, @@ -249,6 +248,7 @@ symbols! { Error, File, FileType, + FmtArgumentsNew, Fn, FnMut, FnOnce, @@ -372,6 +372,7 @@ symbols! { SyncUnsafeCell, T, Target, + This, ToOwned, ToString, TokenStream, @@ -454,9 +455,11 @@ symbols! { and_then, anon, anon_adt, + anon_assoc, anonymous_lifetime_in_impl_trait, any, append_const_msg, + apx_target_feature, arbitrary_enum_discriminant, arbitrary_self_types, arbitrary_self_types_pointers, @@ -500,17 +503,8 @@ symbols! { async_call_mut, async_call_once, async_closure, - async_destruct, async_drop, - async_drop_chain, - async_drop_defer, - async_drop_deferred_drop_in_place, - async_drop_either, - async_drop_fuse, async_drop_in_place, - async_drop_noop, - async_drop_slice, - async_drop_surface_drop_in_place, async_fn, async_fn_in_dyn_trait, async_fn_in_trait, @@ -538,6 +532,7 @@ symbols! { autodiff, automatically_derived, avx, + avx10_target_feature, avx512_target_feature, avx512bw, avx512f, @@ -621,6 +616,7 @@ symbols! { cfg_target_feature, cfg_target_has_atomic, cfg_target_has_atomic_equal_alignment, + cfg_target_has_reliable_f16_f128, cfg_target_thread_local, cfg_target_vendor, cfg_trace: "", // must not be a valid identifier @@ -800,6 +796,15 @@ symbols! { default_fn, default_lib_allocator, default_method_body_is_const, + // -------------------------- + // Lang items which are used only for experiments with auto traits with default bounds. + // These lang items are not actually defined in core/std. Experiment is a part of + // `MCP: Low level components for async drop`(https://github.com/rust-lang/compiler-team/issues/727) + default_trait1, + default_trait2, + default_trait3, + default_trait4, + // -------------------------- default_type_parameter_fallback, default_type_params, define_opaque, @@ -870,6 +875,12 @@ symbols! { effects, eh_catch_typeinfo, eh_personality, + eii, + eii_impl, + eii_internals, + eii_macro, + eii_macro_for, + eii_mangle_extern, emit, emit_enum, emit_enum_variant, @@ -906,9 +917,11 @@ symbols! { expf16, expf32, expf64, + explicit_extern_abis, explicit_generic_args_with_impl_trait, explicit_tail_calls, export_name, + export_stable, expr, expr_2021, expr_fragment_specifier_2024, @@ -971,7 +984,6 @@ symbols! { fadd_fast, fake_variadic, fallback, - fallback_surface_drop, fdiv_algebraic, fdiv_fast, feature, @@ -1044,6 +1056,7 @@ symbols! { from_u16, from_usize, from_yeet, + frontmatter, fs_create_dir, fsub_algebraic, fsub_fast, @@ -1052,13 +1065,13 @@ symbols! { fundamental, fused_iterator, future, + future_drop_poll, future_output, future_trait, gdb_script_file, ge, gen_blocks, gen_future, - gen_kill, generator_clone, generators, generic_arg_infer, @@ -1176,6 +1189,7 @@ symbols! { instruction_set, integer_: "integer", // underscore to avoid clashing with the function `sym::integer` below integral, + internal_features, into_async_iter_into_iter, into_future, into_iter, @@ -1378,6 +1392,7 @@ symbols! { movbe_target_feature, move_ref_pattern, move_size_limit, + movrs_target_feature, mul, mul_assign, mul_with_overflow, @@ -1389,6 +1404,7 @@ symbols! { naked, naked_asm, naked_functions, + naked_functions_rustic_abi, naked_functions_target_feature, name, names, @@ -1508,14 +1524,18 @@ symbols! { panic_cannot_unwind, panic_const_add_overflow, panic_const_async_fn_resumed, + panic_const_async_fn_resumed_drop, panic_const_async_fn_resumed_panic, panic_const_async_gen_fn_resumed, + panic_const_async_gen_fn_resumed_drop, panic_const_async_gen_fn_resumed_panic, panic_const_coroutine_resumed, + panic_const_coroutine_resumed_drop, panic_const_coroutine_resumed_panic, panic_const_div_by_zero, panic_const_div_overflow, panic_const_gen_fn_none, + panic_const_gen_fn_none_drop, panic_const_gen_fn_none_panic, panic_const_mul_overflow, panic_const_neg_overflow, @@ -1810,13 +1830,13 @@ symbols! { rustc_lint_opt_ty, rustc_lint_query_instability, rustc_lint_untracked_query_information, - rustc_macro_edition_2021, rustc_macro_transparency, rustc_main, rustc_mir, rustc_must_implement_one_of, rustc_never_returns_null_ptr, rustc_never_type_options, + rustc_no_implicit_autorefs, rustc_no_mir_inline, rustc_nonnull_optimization_guaranteed, rustc_nounwind, @@ -1867,10 +1887,12 @@ symbols! { saturating_add, saturating_div, saturating_sub, + sdylib, search_unbox, select_unpredictable, self_in_typedefs, self_struct_ctor, + semiopaque, semitransparent, sha2, sha3, @@ -1903,6 +1925,7 @@ symbols! { simd_eq, simd_expose_provenance, simd_extract, + simd_extract_dyn, simd_fabs, simd_fcos, simd_fexp, @@ -1921,6 +1944,7 @@ symbols! { simd_ge, simd_gt, simd_insert, + simd_insert_dyn, simd_le, simd_lt, simd_masked_load, @@ -2039,8 +2063,8 @@ symbols! { sub_assign, sub_with_overflow, suggestion, + super_let, supertrait_item_shadowing, - surface_async_drop_in_place, sym, sync, synthetic, @@ -2056,6 +2080,10 @@ symbols! { target_has_atomic, target_has_atomic_equal_alignment, target_has_atomic_load_store, + target_has_reliable_f128, + target_has_reliable_f128_math, + target_has_reliable_f16, + target_has_reliable_f16_math, target_os, target_pointer_width, target_thread_local, @@ -2197,10 +2225,12 @@ symbols! { unsafe_block_in_unsafe_fn, unsafe_cell, unsafe_cell_raw_get, + unsafe_eii, unsafe_extern_blocks, unsafe_fields, unsafe_no_drop_flag, - unsafe_pin_internals, + unsafe_pinned, + unsafe_unpin, unsize, unsized_const_param_ty, unsized_const_params, @@ -2319,6 +2349,9 @@ pub const STDLIB_STABLE_CRATES: &[Symbol] = &[sym::std, sym::core, sym::alloc, s #[derive(Copy, Clone, Eq, HashStable_Generic, Encodable, Decodable)] pub struct Ident { + // `name` should never be the empty symbol. If you are considering that, + // you are probably conflating "empty identifer with "no identifier" and + // you should use `Option` instead. pub name: Symbol, pub span: Span, } @@ -2326,28 +2359,21 @@ pub struct Ident { impl Ident { #[inline] /// Constructs a new identifier from a symbol and a span. - pub const fn new(name: Symbol, span: Span) -> Ident { + pub fn new(name: Symbol, span: Span) -> Ident { + assert_ne!(name, kw::Empty); Ident { name, span } } /// Constructs a new identifier with a dummy span. #[inline] - pub const fn with_dummy_span(name: Symbol) -> Ident { + pub fn with_dummy_span(name: Symbol) -> Ident { Ident::new(name, DUMMY_SP) } - /// This is best avoided, because it blurs the lines between "empty - /// identifier" and "no identifier". Using `Option` is preferable, - /// where possible, because that is unambiguous. - #[inline] - pub fn empty() -> Ident { - Ident::with_dummy_span(kw::Empty) - } - // For dummy identifiers that are never used and absolutely must be - // present, it's better to use `Ident::dummy` than `Ident::Empty`, because - // it's clearer that it's intended as a dummy value, and more likely to be - // detected if it accidentally does get used. + // present. Note that this does *not* use the empty symbol; `sym::dummy` + // makes it clear that it's intended as a dummy value, and is more likely + // to be detected if it accidentally does get used. #[inline] pub fn dummy() -> Ident { Ident::with_dummy_span(sym::dummy) @@ -2527,15 +2553,10 @@ rustc_index::newtype_index! { } impl Symbol { - const fn new(n: u32) -> Self { + pub const fn new(n: u32) -> Self { Symbol(SymbolIndex::from_u32(n)) } - /// for use in Decoder only - pub fn new_from_decoded(n: u32) -> Self { - Self::new(n) - } - /// Maps a string to its interned representation. #[rustc_diagnostic_item = "SymbolIntern"] pub fn intern(string: &str) -> Self { @@ -2621,11 +2642,14 @@ struct InternerInner { } impl Interner { - fn prefill(init: &[&'static str]) -> Self { - Interner(Lock::new(InternerInner { - arena: Default::default(), - strings: init.iter().copied().collect(), - })) + fn prefill(init: &[&'static str], extra: &[&'static str]) -> Self { + let strings = FxIndexSet::from_iter(init.iter().copied().chain(extra.iter().copied())); + assert_eq!( + strings.len(), + init.len() + extra.len(), + "`init` or `extra` contain duplicate symbols", + ); + Interner(Lock::new(InternerInner { arena: Default::default(), strings })) } #[inline] @@ -2725,6 +2749,10 @@ impl Symbol { || self.is_unused_keyword_conditional(edition) } + pub fn is_weak(self) -> bool { + self >= kw::Auto && self <= kw::Yeet + } + /// A keyword or reserved identifier that can be used as a path segment. pub fn is_path_segment_keyword(self) -> bool { self == kw::Super @@ -2745,9 +2773,9 @@ impl Symbol { self != kw::Empty && self != kw::Underscore && !self.is_path_segment_keyword() } - /// Is this symbol was interned in compiler's `symbols!` macro - pub fn is_preinterned(self) -> bool { - self.as_u32() < PREINTERNED_SYMBOLS_COUNT + /// Was this symbol predefined in the compiler's `symbols!` macro + pub fn is_predefined(self) -> bool { + self.as_u32() < PREDEFINED_SYMBOLS_COUNT } } diff --git a/compiler/rustc_span/src/symbol/tests.rs b/compiler/rustc_span/src/symbol/tests.rs index c6aa7627b2b54..660d0d7179afa 100644 --- a/compiler/rustc_span/src/symbol/tests.rs +++ b/compiler/rustc_span/src/symbol/tests.rs @@ -3,7 +3,7 @@ use crate::create_default_session_globals_then; #[test] fn interner_tests() { - let i = Interner::prefill(&[]); + let i = Interner::prefill(&[], &[]); // first one is zero: assert_eq!(i.intern("dog"), Symbol::new(0)); // re-use gets the same entry: diff --git a/compiler/rustc_symbol_mangling/src/export.rs b/compiler/rustc_symbol_mangling/src/export.rs new file mode 100644 index 0000000000000..770401fc8cfea --- /dev/null +++ b/compiler/rustc_symbol_mangling/src/export.rs @@ -0,0 +1,181 @@ +use std::assert_matches::debug_assert_matches; + +use rustc_abi::IntegerType; +use rustc_data_structures::stable_hasher::StableHasher; +use rustc_hashes::Hash128; +use rustc_hir::def::DefKind; +use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; +use rustc_span::symbol::{Symbol, sym}; + +trait AbiHashStable<'tcx> { + fn abi_hash(&self, tcx: TyCtxt<'tcx>, hasher: &mut StableHasher); +} +macro_rules! default_hash_impl { + ($($t:ty,)+) => { + $(impl<'tcx> AbiHashStable<'tcx> for $t { + #[inline] + fn abi_hash(&self, _tcx: TyCtxt<'tcx>, hasher: &mut StableHasher) { + ::std::hash::Hash::hash(self, hasher); + } + })* + }; +} + +default_hash_impl! { i8, i16, i32, i64, i128, isize, u8, u16, u32, u64, u128, usize, } + +impl<'tcx> AbiHashStable<'tcx> for bool { + #[inline] + fn abi_hash(&self, tcx: TyCtxt<'tcx>, hasher: &mut StableHasher) { + (if *self { 1u8 } else { 0u8 }).abi_hash(tcx, hasher); + } +} + +impl<'tcx> AbiHashStable<'tcx> for str { + #[inline] + fn abi_hash(&self, tcx: TyCtxt<'tcx>, hasher: &mut StableHasher) { + self.as_bytes().abi_hash(tcx, hasher); + } +} + +impl<'tcx> AbiHashStable<'tcx> for String { + #[inline] + fn abi_hash(&self, tcx: TyCtxt<'tcx>, hasher: &mut StableHasher) { + self[..].abi_hash(tcx, hasher); + } +} + +impl<'tcx> AbiHashStable<'tcx> for Symbol { + #[inline] + fn abi_hash(&self, tcx: TyCtxt<'tcx>, hasher: &mut StableHasher) { + self.as_str().abi_hash(tcx, hasher); + } +} + +impl<'tcx, T: AbiHashStable<'tcx>> AbiHashStable<'tcx> for [T] { + fn abi_hash(&self, tcx: TyCtxt<'tcx>, hasher: &mut StableHasher) { + self.len().abi_hash(tcx, hasher); + for item in self { + item.abi_hash(tcx, hasher); + } + } +} + +impl<'tcx> AbiHashStable<'tcx> for Ty<'tcx> { + fn abi_hash(&self, tcx: TyCtxt<'tcx>, hasher: &mut StableHasher) { + match self.kind() { + ty::Bool => sym::bool.abi_hash(tcx, hasher), + ty::Char => sym::char.abi_hash(tcx, hasher), + ty::Int(int_ty) => int_ty.name_str().abi_hash(tcx, hasher), + ty::Uint(uint_ty) => uint_ty.name_str().abi_hash(tcx, hasher), + ty::Float(float_ty) => float_ty.name_str().abi_hash(tcx, hasher), + + ty::Adt(adt_def, args) => { + adt_def.is_struct().abi_hash(tcx, hasher); + adt_def.is_enum().abi_hash(tcx, hasher); + adt_def.is_union().abi_hash(tcx, hasher); + + if let Some(align) = adt_def.repr().align { + align.bits().abi_hash(tcx, hasher); + } + + if let Some(integer) = adt_def.repr().int { + match integer { + IntegerType::Pointer(sign) => sign.abi_hash(tcx, hasher), + IntegerType::Fixed(integer, sign) => { + integer.int_ty_str().abi_hash(tcx, hasher); + sign.abi_hash(tcx, hasher); + } + } + } + + if let Some(pack) = adt_def.repr().pack { + pack.bits().abi_hash(tcx, hasher); + } + + adt_def.repr().c().abi_hash(tcx, hasher); + + for variant in adt_def.variants() { + variant.name.abi_hash(tcx, hasher); + for field in &variant.fields { + field.name.abi_hash(tcx, hasher); + let field_ty = tcx.type_of(field.did).instantiate_identity(); + field_ty.abi_hash(tcx, hasher); + } + } + args.abi_hash(tcx, hasher); + } + + ty::Tuple(args) if args.len() == 0 => {} + + // FIXME: Not yet supported. + ty::Foreign(_) + | ty::Ref(_, _, _) + | ty::Str + | ty::Array(_, _) + | ty::Pat(_, _) + | ty::Slice(_) + | ty::RawPtr(_, _) + | ty::FnDef(_, _) + | ty::FnPtr(_, _) + | ty::Dynamic(_, _, _) + | ty::Closure(_, _) + | ty::CoroutineClosure(_, _) + | ty::Coroutine(_, _) + | ty::CoroutineWitness(_, _) + | ty::Never + | ty::Tuple(_) + | ty::Alias(_, _) + | ty::Param(_) + | ty::Bound(_, _) + | ty::Placeholder(_) + | ty::Infer(_) + | ty::UnsafeBinder(_) => unreachable!(), + + ty::Error(_) => {} + } + } +} + +impl<'tcx> AbiHashStable<'tcx> for ty::FnSig<'tcx> { + fn abi_hash(&self, tcx: TyCtxt<'tcx>, hasher: &mut StableHasher) { + for ty in self.inputs_and_output { + ty.abi_hash(tcx, hasher); + } + self.safety.is_safe().abi_hash(tcx, hasher); + } +} + +impl<'tcx> AbiHashStable<'tcx> for ty::GenericArg<'tcx> { + fn abi_hash(&self, tcx: TyCtxt<'tcx>, hasher: &mut StableHasher) { + self.unpack().abi_hash(tcx, hasher); + } +} + +impl<'tcx> AbiHashStable<'tcx> for ty::GenericArgKind<'tcx> { + fn abi_hash(&self, tcx: TyCtxt<'tcx>, hasher: &mut StableHasher) { + match self { + ty::GenericArgKind::Type(t) => t.abi_hash(tcx, hasher), + ty::GenericArgKind::Lifetime(_) | ty::GenericArgKind::Const(_) => unimplemented!(), + } + } +} + +pub(crate) fn compute_hash_of_export_fn<'tcx>( + tcx: TyCtxt<'tcx>, + instance: Instance<'tcx>, +) -> String { + let def_id = instance.def_id(); + debug_assert_matches!(tcx.def_kind(def_id), DefKind::Fn | DefKind::AssocFn); + + let args = instance.args; + let sig_ty = tcx.fn_sig(def_id).instantiate(tcx, args); + let sig_ty = tcx.instantiate_bound_regions_with_erased(sig_ty); + + let hash = { + let mut hasher = StableHasher::new(); + sig_ty.abi_hash(tcx, &mut hasher); + hasher.finish::() + }; + + hash.as_u128().to_string() +} diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs index 88754f1f15b46..db102abda7fa3 100644 --- a/compiler/rustc_symbol_mangling/src/legacy.rs +++ b/compiler/rustc_symbol_mangling/src/legacy.rs @@ -28,7 +28,10 @@ pub(super) fn mangle<'tcx>( loop { let key = tcx.def_key(ty_def_id); match key.disambiguated_data.data { - DefPathData::TypeNs(_) | DefPathData::ValueNs(_) | DefPathData::Closure => { + DefPathData::TypeNs(_) + | DefPathData::ValueNs(_) + | DefPathData::Closure + | DefPathData::SyntheticCoroutineBody => { instance_ty = tcx.type_of(ty_def_id).instantiate_identity(); debug!(?instance_ty); break; @@ -60,10 +63,17 @@ pub(super) fn mangle<'tcx>( .print_def_path( def_id, if let ty::InstanceKind::DropGlue(_, _) - | ty::InstanceKind::AsyncDropGlueCtorShim(_, _) = instance.def + | ty::InstanceKind::AsyncDropGlueCtorShim(_, _) + | ty::InstanceKind::FutureDropPollShim(_, _, _) = instance.def { // Add the name of the dropped type to the symbol name &*instance.args + } else if let ty::InstanceKind::AsyncDropGlue(_, ty) = instance.def { + let ty::Coroutine(_, cor_args) = ty.kind() else { + bug!(); + }; + let drop_ty = cor_args.first().unwrap().expect_ty(); + tcx.mk_args(&[GenericArg::from(drop_ty)]) } else { &[] }, @@ -96,6 +106,10 @@ pub(super) fn mangle<'tcx>( _ => {} } + if let ty::InstanceKind::FutureDropPollShim(..) = instance.def { + let _ = printer.write_str("{{drop-shim}}"); + } + printer.path.finish(hash) } diff --git a/compiler/rustc_symbol_mangling/src/lib.rs b/compiler/rustc_symbol_mangling/src/lib.rs index c9b15151a2cb2..d0418a49eb3f4 100644 --- a/compiler/rustc_symbol_mangling/src/lib.rs +++ b/compiler/rustc_symbol_mangling/src/lib.rs @@ -89,22 +89,24 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] -#![feature(let_chains)] +#![feature(assert_matches)] #![feature(rustdoc_internals)] // tidy-alphabetical-end use rustc_hir::def::DefKind; use rustc_hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs}; +use rustc_middle::middle::eii::EiiMapping; use rustc_middle::mir::mono::{InstantiationMode, MonoItem}; use rustc_middle::query::Providers; use rustc_middle::ty::{self, Instance, TyCtxt}; use rustc_session::config::SymbolManglingVersion; use tracing::debug; +mod export; mod hashed; mod legacy; mod v0; @@ -239,6 +241,7 @@ fn compute_symbol_name<'tcx>( if tcx.is_foreign_item(def_id) && (!tcx.sess.target.is_like_wasm || !tcx.wasm_import_module_map(def_id.krate).contains_key(&def_id)) + && !attrs.flags.contains(CodegenFnAttrFlags::EII_MANGLE_EXTERN) { if let Some(name) = attrs.link_name { return name.to_string(); @@ -256,6 +259,19 @@ fn compute_symbol_name<'tcx>( return tcx.item_name(def_id).to_string(); } + // if this is an EII shim, it has a kind of fake defid. It has one because it has to have one, + // but when we generate a symbol for it the name must actually match the name of the extern + // generated as part of the declaration of the EII. So, we use an instance of `extern_item` as + // the instance used for ocmputing the symbol name. + let eii_map = tcx.get_externally_implementable_item_impls(()); + let instance = if let Some(EiiMapping { extern_item, .. }) = + instance.def_id().as_local().and_then(|x| eii_map.get(&x)).copied() + { + Instance::mono(tcx, extern_item) + } else { + instance + }; + // If we're dealing with an instance of a function that's inlined from // another crate but we're marking it as globally shared to our // compilation (aka we're not making an internal copy in each of our @@ -297,12 +313,21 @@ fn compute_symbol_name<'tcx>( tcx.symbol_mangling_version(mangling_version_crate) }; - let symbol = match mangling_version { - SymbolManglingVersion::Legacy => legacy::mangle(tcx, instance, instantiating_crate), - SymbolManglingVersion::V0 => v0::mangle(tcx, instance, instantiating_crate), - SymbolManglingVersion::Hashed => hashed::mangle(tcx, instance, instantiating_crate, || { - v0::mangle(tcx, instance, instantiating_crate) - }), + let symbol = match tcx.is_exportable(def_id) { + true => format!( + "{}.{}", + v0::mangle(tcx, instance, instantiating_crate, true), + export::compute_hash_of_export_fn(tcx, instance) + ), + false => match mangling_version { + SymbolManglingVersion::Legacy => legacy::mangle(tcx, instance, instantiating_crate), + SymbolManglingVersion::V0 => v0::mangle(tcx, instance, instantiating_crate, false), + SymbolManglingVersion::Hashed => { + hashed::mangle(tcx, instance, instantiating_crate, || { + v0::mangle(tcx, instance, instantiating_crate, false) + }) + } + }, }; debug_assert!( diff --git a/compiler/rustc_symbol_mangling/src/test.rs b/compiler/rustc_symbol_mangling/src/test.rs index ddeeadff13d17..0c6d1495e39cf 100644 --- a/compiler/rustc_symbol_mangling/src/test.rs +++ b/compiler/rustc_symbol_mangling/src/test.rs @@ -56,7 +56,7 @@ impl SymbolNamesTest<'_> { // some subset. for attr in tcx.get_attrs(def_id, SYMBOL_NAME) { let def_id = def_id.to_def_id(); - let instance = Instance::new( + let instance = Instance::new_raw( def_id, tcx.erase_regions(GenericArgs::identity_for_item(tcx, def_id)), ); diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index d824a23279b1b..4a99ce09b39ab 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -26,6 +26,7 @@ pub(super) fn mangle<'tcx>( tcx: TyCtxt<'tcx>, instance: Instance<'tcx>, instantiating_crate: Option, + is_exportable: bool, ) -> String { let def_id = instance.def_id(); // FIXME(eddyb) this should ideally not be needed. @@ -35,6 +36,7 @@ pub(super) fn mangle<'tcx>( let mut cx: SymbolMangler<'_> = SymbolMangler { tcx, start_offset: prefix.len(), + is_exportable, paths: FxHashMap::default(), types: FxHashMap::default(), consts: FxHashMap::default(), @@ -58,11 +60,17 @@ pub(super) fn mangle<'tcx>( ty::InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref: false, .. } => { Some("by_ref") } - + ty::InstanceKind::FutureDropPollShim(_, _, _) => Some("drop"), _ => None, }; - if let Some(shim_kind) = shim_kind { + if let ty::InstanceKind::AsyncDropGlue(_, ty) = instance.def { + let ty::Coroutine(_, cor_args) = ty.kind() else { + bug!(); + }; + let drop_ty = cor_args.first().unwrap().expect_ty(); + cx.print_def_path(def_id, tcx.mk_args(&[GenericArg::from(drop_ty)])).unwrap() + } else if let Some(shim_kind) = shim_kind { cx.path_append_ns(|cx| cx.print_def_path(def_id, args), 'S', 0, shim_kind).unwrap() } else { cx.print_def_path(def_id, args).unwrap() @@ -87,6 +95,7 @@ pub fn mangle_internal_symbol<'tcx>(tcx: TyCtxt<'tcx>, item_name: &str) -> Strin let mut cx: SymbolMangler<'_> = SymbolMangler { tcx, start_offset: prefix.len(), + is_exportable: false, paths: FxHashMap::default(), types: FxHashMap::default(), consts: FxHashMap::default(), @@ -129,6 +138,7 @@ pub(super) fn mangle_typeid_for_trait_ref<'tcx>( let mut cx = SymbolMangler { tcx, start_offset: 0, + is_exportable: false, paths: FxHashMap::default(), types: FxHashMap::default(), consts: FxHashMap::default(), @@ -157,6 +167,7 @@ struct SymbolMangler<'tcx> { tcx: TyCtxt<'tcx>, binders: Vec, out: String, + is_exportable: bool, /// The length of the prefix in `out` (e.g. 2 for `_R`). start_offset: usize, @@ -247,6 +258,22 @@ impl<'tcx> SymbolMangler<'tcx> { Ok(()) } + + fn print_pat(&mut self, pat: ty::Pattern<'tcx>) -> Result<(), std::fmt::Error> { + Ok(match *pat { + ty::PatternKind::Range { start, end } => { + let consts = [start, end]; + for ct in consts { + Ty::new_array_with_const_len(self.tcx, self.tcx.types.unit, ct).print(self)?; + } + } + ty::PatternKind::Or(patterns) => { + for pat in patterns { + self.print_pat(pat)?; + } + } + }) + } } impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { @@ -354,7 +381,14 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { args, )?; } else { - self.push_disambiguator(key.disambiguated_data.disambiguator as u64); + let exported_impl_order = self.tcx.stable_order_of_exportable_impls(impl_def_id.krate); + let disambiguator = match self.is_exportable { + true => exported_impl_order[&impl_def_id] as u64, + false => { + exported_impl_order.len() as u64 + key.disambiguated_data.disambiguator as u64 + } + }; + self.push_disambiguator(disambiguator); self.print_def_path(parent_def_id, &[])?; } @@ -368,7 +402,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { } fn print_region(&mut self, region: ty::Region<'_>) -> Result<(), PrintError> { - let i = match *region { + let i = match region.kind() { // Erased lifetimes use the index 0, for a // shorter mangling of `L_`. ty::ReErased => 0, @@ -463,20 +497,14 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { ty.print(self)?; } - ty::Pat(ty, pat) => match *pat { - ty::PatternKind::Range { start, end } => { - let consts = [start, end]; - // HACK: Represent as tuple until we have something better. - // HACK: constants are used in arrays, even if the types don't match. - self.push("T"); - ty.print(self)?; - for ct in consts { - Ty::new_array_with_const_len(self.tcx, self.tcx.types.unit, ct) - .print(self)?; - } - self.push("E"); - } - }, + ty::Pat(ty, pat) => { + // HACK: Represent as tuple until we have something better. + // HACK: constants are used in arrays, even if the types don't match. + self.push("T"); + ty.print(self)?; + self.print_pat(pat)?; + self.push("E"); + } ty::Array(ty, len) => { self.push("A"); @@ -615,7 +643,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { cx.print_def_path(trait_ref.def_id, trait_ref.args)?; } ty::ExistentialPredicate::Projection(projection) => { - let name = cx.tcx.associated_item(projection.def_id).name; + let name = cx.tcx.associated_item(projection.def_id).name(); cx.push("p"); cx.push_ident(name.as_str()); match projection.term.unpack() { @@ -776,7 +804,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { self.push_disambiguator( disambiguated_field.disambiguator as u64, ); - self.push_ident(field_name.unwrap_or(kw::Empty).as_str()); + self.push_ident(field_name.unwrap().as_str()); field.print(self)?; } @@ -802,8 +830,10 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.push("C"); - let stable_crate_id = self.tcx.def_path_hash(cnum.as_def_id()).stable_crate_id(); - self.push_disambiguator(stable_crate_id.as_u64()); + if !self.is_exportable { + let stable_crate_id = self.tcx.def_path_hash(cnum.as_def_id()).stable_crate_id(); + self.push_disambiguator(stable_crate_id.as_u64()); + } let name = self.tcx.crate_name(cnum); self.push_ident(name.as_str()); Ok(()) @@ -850,6 +880,8 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { DefPathData::Ctor => 'c', DefPathData::AnonConst => 'k', DefPathData::OpaqueTy => 'i', + DefPathData::SyntheticCoroutineBody => 's', + DefPathData::NestedStatic => 'n', // These should never show up as `path_append` arguments. DefPathData::CrateRoot @@ -857,7 +889,9 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { | DefPathData::GlobalAsm | DefPathData::Impl | DefPathData::MacroNs(_) - | DefPathData::LifetimeNs(_) => { + | DefPathData::LifetimeNs(_) + | DefPathData::OpaqueLifetime(_) + | DefPathData::AnonAssocTy(..) => { bug!("symbol_names: unexpected DefPathData: {:?}", disambiguated_data.data) } }; diff --git a/compiler/rustc_target/src/asm/aarch64.rs b/compiler/rustc_target/src/asm/aarch64.rs index cdccb3e5d728e..43a8d9ca119df 100644 --- a/compiler/rustc_target/src/asm/aarch64.rs +++ b/compiler/rustc_target/src/asm/aarch64.rs @@ -78,7 +78,7 @@ pub(crate) fn target_reserves_x18(target: &Target, target_features: &FxIndexSet< target.os == "android" || target.os == "fuchsia" || target.env == "ohos" - || target.is_like_osx + || target.is_like_darwin || target.is_like_windows || target_features.contains(&sym::reserve_x18) } diff --git a/compiler/rustc_target/src/asm/arm.rs b/compiler/rustc_target/src/asm/arm.rs index ff0cbddecf78b..7fea10ff067bf 100644 --- a/compiler/rustc_target/src/asm/arm.rs +++ b/compiler/rustc_target/src/asm/arm.rs @@ -68,7 +68,7 @@ impl ArmInlineAsmRegClass { // This uses the same logic as useR7AsFramePointer in LLVM fn frame_pointer_is_r7(target_features: &FxIndexSet, target: &Target) -> bool { - target.is_like_osx || (!target.is_like_windows && target_features.contains(&sym::thumb_mode)) + target.is_like_darwin || (!target.is_like_windows && target_features.contains(&sym::thumb_mode)) } fn frame_pointer_r11( diff --git a/compiler/rustc_target/src/callconv/loongarch.rs b/compiler/rustc_target/src/callconv/loongarch.rs index 209d7483e612a..c779720f97b9c 100644 --- a/compiler/rustc_target/src/callconv/loongarch.rs +++ b/compiler/rustc_target/src/callconv/loongarch.rs @@ -1,6 +1,6 @@ use rustc_abi::{ - BackendRepr, ExternAbi, FieldsShape, HasDataLayout, Primitive, Reg, RegKind, Size, - TyAbiInterface, TyAndLayout, Variants, + BackendRepr, FieldsShape, HasDataLayout, Primitive, Reg, RegKind, Size, TyAbiInterface, + TyAndLayout, Variants, }; use crate::callconv::{ArgAbi, ArgExtension, CastTarget, FnAbi, PassMode, Uniform}; @@ -364,15 +364,11 @@ where } } -pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, abi: ExternAbi) +pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>) where Ty: TyAbiInterface<'a, C> + Copy, C: HasDataLayout + HasTargetSpec, { - if abi == ExternAbi::RustIntrinsic { - return; - } - let grlen = cx.data_layout().pointer_size.bits(); for arg in fn_abi.args.iter_mut() { diff --git a/compiler/rustc_target/src/callconv/mod.rs b/compiler/rustc_target/src/callconv/mod.rs index a52b2b76bc1ee..ae366e29e3232 100644 --- a/compiler/rustc_target/src/callconv/mod.rs +++ b/compiler/rustc_target/src/callconv/mod.rs @@ -144,6 +144,7 @@ pub struct ArgAttributes { /// (corresponding to LLVM's dereferenceable_or_null attributes, i.e., it is okay for this to be /// set on a null pointer, but all non-null pointers must be dereferenceable). pub pointee_size: Size, + /// The minimum alignment of the pointee, if any. pub pointee_align: Option, } @@ -670,7 +671,7 @@ impl<'a, Ty> FnAbi<'a, Ty> { } }, "aarch64" | "arm64ec" => { - let kind = if cx.target_spec().is_like_osx { + let kind = if cx.target_spec().is_like_darwin { aarch64::AbiKind::DarwinPCS } else if cx.target_spec().is_like_windows { aarch64::AbiKind::Win64 @@ -717,16 +718,16 @@ impl<'a, Ty> FnAbi<'a, Ty> { } } - pub fn adjust_for_rust_abi(&mut self, cx: &C, abi: ExternAbi) + pub fn adjust_for_rust_abi(&mut self, cx: &C) where Ty: TyAbiInterface<'a, C> + Copy, C: HasDataLayout + HasTargetSpec, { let spec = cx.target_spec(); match &*spec.arch { - "x86" => x86::compute_rust_abi_info(cx, self, abi), - "riscv32" | "riscv64" => riscv::compute_rust_abi_info(cx, self, abi), - "loongarch64" => loongarch::compute_rust_abi_info(cx, self, abi), + "x86" => x86::compute_rust_abi_info(cx, self), + "riscv32" | "riscv64" => riscv::compute_rust_abi_info(cx, self), + "loongarch64" => loongarch::compute_rust_abi_info(cx, self), "aarch64" => aarch64::compute_rust_abi_info(cx, self), _ => {} }; @@ -850,10 +851,7 @@ impl<'a, Ty> FnAbi<'a, Ty> { // // Note that the intrinsic ABI is exempt here as those are not // real functions anyway, and the backend expects very specific types. - if abi != ExternAbi::RustIntrinsic - && spec.simd_types_indirect - && !can_pass_simd_directly(arg) - { + if spec.simd_types_indirect && !can_pass_simd_directly(arg) { arg.make_indirect(); } } diff --git a/compiler/rustc_target/src/callconv/riscv.rs b/compiler/rustc_target/src/callconv/riscv.rs index 7368e225efa74..cd1d3cd1eee05 100644 --- a/compiler/rustc_target/src/callconv/riscv.rs +++ b/compiler/rustc_target/src/callconv/riscv.rs @@ -5,8 +5,8 @@ // https://github.com/llvm/llvm-project/blob/8e780252a7284be45cf1ba224cabd884847e8e92/clang/lib/CodeGen/TargetInfo.cpp#L9311-L9773 use rustc_abi::{ - BackendRepr, ExternAbi, FieldsShape, HasDataLayout, Primitive, Reg, RegKind, Size, - TyAbiInterface, TyAndLayout, Variants, + BackendRepr, FieldsShape, HasDataLayout, Primitive, Reg, RegKind, Size, TyAbiInterface, + TyAndLayout, Variants, }; use crate::callconv::{ArgAbi, ArgExtension, CastTarget, FnAbi, PassMode, Uniform}; @@ -370,15 +370,11 @@ where } } -pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, abi: ExternAbi) +pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>) where Ty: TyAbiInterface<'a, C> + Copy, C: HasDataLayout + HasTargetSpec, { - if abi == ExternAbi::RustIntrinsic { - return; - } - let xlen = cx.data_layout().pointer_size.bits(); for arg in fn_abi.args.iter_mut() { diff --git a/compiler/rustc_target/src/callconv/x86.rs b/compiler/rustc_target/src/callconv/x86.rs index 6f112b4940057..8328f818f9b8f 100644 --- a/compiler/rustc_target/src/callconv/x86.rs +++ b/compiler/rustc_target/src/callconv/x86.rs @@ -1,6 +1,6 @@ use rustc_abi::{ - AddressSpace, Align, BackendRepr, ExternAbi, HasDataLayout, Primitive, Reg, RegKind, - TyAbiInterface, TyAndLayout, + AddressSpace, Align, BackendRepr, HasDataLayout, Primitive, Reg, RegKind, TyAbiInterface, + TyAndLayout, }; use crate::callconv::{ArgAttribute, FnAbi, PassMode}; @@ -104,7 +104,7 @@ where let byval_align = if arg.layout.align.abi < align_4 { // (1.) align_4 - } else if t.is_like_osx && contains_vector(cx, arg.layout) { + } else if t.is_like_darwin && contains_vector(cx, arg.layout) { // (3.) align_16 } else { @@ -193,7 +193,7 @@ pub(crate) fn fill_inregs<'a, Ty, C>( } } -pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, abi: ExternAbi) +pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>) where Ty: TyAbiInterface<'a, C> + Copy, C: HasDataLayout + HasTargetSpec, @@ -201,10 +201,7 @@ where // Avoid returning floats in x87 registers on x86 as loading and storing from x87 // registers will quiet signalling NaNs. Also avoid using SSE registers since they // are not always available (depending on target features). - if !fn_abi.ret.is_ignore() - // Intrinsics themselves are not "real" functions, so theres no need to change their ABIs. - && abi != ExternAbi::RustIntrinsic - { + if !fn_abi.ret.is_ignore() { let has_float = match fn_abi.ret.layout.backend_repr { BackendRepr::Scalar(s) => matches!(s.primitive(), Primitive::Float(_)), BackendRepr::ScalarPair(s1, s2) => { diff --git a/compiler/rustc_target/src/lib.rs b/compiler/rustc_target/src/lib.rs index a8d7da5692de4..922c18448d51a 100644 --- a/compiler/rustc_target/src/lib.rs +++ b/compiler/rustc_target/src/lib.rs @@ -9,11 +9,12 @@ // tidy-alphabetical-start #![allow(internal_features)] +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] +#![feature(debug_closure_helpers)] #![feature(iter_intersperse)] -#![feature(let_chains)] #![feature(rustc_attrs)] #![feature(rustdoc_internals)] // tidy-alphabetical-end diff --git a/compiler/rustc_target/src/spec/base/apple/mod.rs b/compiler/rustc_target/src/spec/base/apple/mod.rs index 66c85146c2944..46fcd7d5c5198 100644 --- a/compiler/rustc_target/src/spec/base/apple/mod.rs +++ b/compiler/rustc_target/src/spec/base/apple/mod.rs @@ -1,9 +1,12 @@ use std::borrow::Cow; use std::env; +use std::fmt::{Display, from_fn}; +use std::num::ParseIntError; +use std::str::FromStr; use crate::spec::{ BinaryFormat, Cc, DebuginfoKind, FloatAbi, FramePointer, LinkerFlavor, Lld, RustcAbi, - SplitDebuginfo, StackProbeType, StaticCow, TargetOptions, cvs, + SplitDebuginfo, StackProbeType, StaticCow, Target, TargetOptions, cvs, }; #[cfg(test)] @@ -115,7 +118,7 @@ pub(crate) fn base( function_sections: false, dynamic_linking: true, families: cvs!["unix"], - is_like_osx: true, + is_like_darwin: true, binary_format: BinaryFormat::MachO, // LLVM notes that macOS 10.11+ and iOS 9+ default // to v4, so we do the same. @@ -222,3 +225,107 @@ fn link_env_remove(os: &'static str) -> StaticCow<[StaticCow]> { cvs!["MACOSX_DEPLOYMENT_TARGET"] } } + +/// Deployment target or SDK version. +/// +/// The size of the numbers in here are limited by Mach-O's `LC_BUILD_VERSION`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct OSVersion { + pub major: u16, + pub minor: u8, + pub patch: u8, +} + +impl FromStr for OSVersion { + type Err = ParseIntError; + + /// Parse an OS version triple (SDK version or deployment target). + fn from_str(version: &str) -> Result { + if let Some((major, minor)) = version.split_once('.') { + let major = major.parse()?; + if let Some((minor, patch)) = minor.split_once('.') { + Ok(Self { major, minor: minor.parse()?, patch: patch.parse()? }) + } else { + Ok(Self { major, minor: minor.parse()?, patch: 0 }) + } + } else { + Ok(Self { major: version.parse()?, minor: 0, patch: 0 }) + } + } +} + +impl OSVersion { + pub fn new(major: u16, minor: u8, patch: u8) -> Self { + Self { major, minor, patch } + } + + pub fn fmt_pretty(self) -> impl Display { + let Self { major, minor, patch } = self; + from_fn(move |f| { + write!(f, "{major}.{minor}")?; + if patch != 0 { + write!(f, ".{patch}")?; + } + Ok(()) + }) + } + + pub fn fmt_full(self) -> impl Display { + let Self { major, minor, patch } = self; + from_fn(move |f| write!(f, "{major}.{minor}.{patch}")) + } + + /// Minimum operating system versions currently supported by `rustc`. + pub fn os_minimum_deployment_target(os: &str) -> Self { + // When bumping a version in here, remember to update the platform-support docs too. + // + // NOTE: The defaults may change in future `rustc` versions, so if you are looking for the + // default deployment target, prefer: + // ``` + // $ rustc --print deployment-target + // ``` + let (major, minor, patch) = match os { + "macos" => (10, 12, 0), + "ios" => (10, 0, 0), + "tvos" => (10, 0, 0), + "watchos" => (5, 0, 0), + "visionos" => (1, 0, 0), + _ => unreachable!("tried to get deployment target for non-Apple platform"), + }; + Self { major, minor, patch } + } + + /// The deployment target for the given target. + /// + /// This is similar to `os_minimum_deployment_target`, except that on certain targets it makes sense + /// to raise the minimum OS version. + /// + /// This matches what LLVM does, see in part: + /// + pub fn minimum_deployment_target(target: &Target) -> Self { + let (major, minor, patch) = match (&*target.os, &*target.arch, &*target.abi) { + ("macos", "aarch64", _) => (11, 0, 0), + ("ios", "aarch64", "macabi") => (14, 0, 0), + ("ios", "aarch64", "sim") => (14, 0, 0), + ("ios", _, _) if target.llvm_target.starts_with("arm64e") => (14, 0, 0), + // Mac Catalyst defaults to 13.1 in Clang. + ("ios", _, "macabi") => (13, 1, 0), + ("tvos", "aarch64", "sim") => (14, 0, 0), + ("watchos", "aarch64", "sim") => (7, 0, 0), + (os, _, _) => return Self::os_minimum_deployment_target(os), + }; + Self { major, minor, patch } + } +} + +/// Name of the environment variable used to fetch the deployment target on the given OS. +pub fn deployment_target_env_var(os: &str) -> &'static str { + match os { + "macos" => "MACOSX_DEPLOYMENT_TARGET", + "ios" => "IPHONEOS_DEPLOYMENT_TARGET", + "watchos" => "WATCHOS_DEPLOYMENT_TARGET", + "tvos" => "TVOS_DEPLOYMENT_TARGET", + "visionos" => "XROS_DEPLOYMENT_TARGET", + _ => unreachable!("tried to get deployment target env var for non-Apple platform"), + } +} diff --git a/compiler/rustc_target/src/spec/base/apple/tests.rs b/compiler/rustc_target/src/spec/base/apple/tests.rs index 7a985ad4dc056..391f347010436 100644 --- a/compiler/rustc_target/src/spec/base/apple/tests.rs +++ b/compiler/rustc_target/src/spec/base/apple/tests.rs @@ -1,3 +1,4 @@ +use super::OSVersion; use crate::spec::targets::{ aarch64_apple_darwin, aarch64_apple_ios_sim, aarch64_apple_visionos_sim, aarch64_apple_watchos_sim, i686_apple_darwin, x86_64_apple_darwin, x86_64_apple_ios, @@ -42,3 +43,11 @@ fn macos_link_environment_unmodified() { ); } } + +#[test] +fn test_parse_version() { + assert_eq!("10".parse(), Ok(OSVersion::new(10, 0, 0))); + assert_eq!("10.12".parse(), Ok(OSVersion::new(10, 12, 0))); + assert_eq!("10.12.6".parse(), Ok(OSVersion::new(10, 12, 6))); + assert_eq!("9999.99.99".parse(), Ok(OSVersion::new(9999, 99, 99))); +} diff --git a/compiler/rustc_target/src/spec/base/linux_musl.rs b/compiler/rustc_target/src/spec/base/linux_musl.rs index 1a854fe362d50..1bef602404e56 100644 --- a/compiler/rustc_target/src/spec/base/linux_musl.rs +++ b/compiler/rustc_target/src/spec/base/linux_musl.rs @@ -1,12 +1,11 @@ use crate::spec::{LinkSelfContainedDefault, TargetOptions, base, crt_objects}; pub(crate) fn opts() -> TargetOptions { - let mut base = base::linux::opts(); - - base.env = "musl".into(); - base.pre_link_objects_self_contained = crt_objects::pre_musl_self_contained(); - base.post_link_objects_self_contained = crt_objects::post_musl_self_contained(); - base.link_self_contained = LinkSelfContainedDefault::InferredForMusl; - - base + TargetOptions { + env: "musl".into(), + pre_link_objects_self_contained: crt_objects::pre_musl_self_contained(), + post_link_objects_self_contained: crt_objects::post_musl_self_contained(), + link_self_contained: LinkSelfContainedDefault::InferredForMusl, + ..base::linux::opts() + } } diff --git a/compiler/rustc_target/src/spec/base/linux_ohos.rs b/compiler/rustc_target/src/spec/base/linux_ohos.rs index 6f4d69a996c34..1b7f1e196664f 100644 --- a/compiler/rustc_target/src/spec/base/linux_ohos.rs +++ b/compiler/rustc_target/src/spec/base/linux_ohos.rs @@ -1,12 +1,11 @@ use crate::spec::{TargetOptions, TlsModel, base}; pub(crate) fn opts() -> TargetOptions { - let mut base = base::linux::opts(); - - base.env = "ohos".into(); - base.crt_static_default = false; - base.tls_model = TlsModel::Emulated; - base.has_thread_local = false; - - base + TargetOptions { + env: "ohos".into(), + crt_static_default: false, + tls_model: TlsModel::Emulated, + has_thread_local: false, + ..base::linux::opts() + } } diff --git a/compiler/rustc_target/src/spec/base/lynxos178.rs b/compiler/rustc_target/src/spec/base/lynxos178.rs new file mode 100644 index 0000000000000..b9434ff5faaf6 --- /dev/null +++ b/compiler/rustc_target/src/spec/base/lynxos178.rs @@ -0,0 +1,31 @@ +use std::borrow::Cow; + +use crate::spec::{ + PanicStrategy, RelocModel, RelroLevel, SplitDebuginfo, StackProbeType, TargetOptions, cvs, +}; + +pub(crate) fn opts() -> TargetOptions { + TargetOptions { + os: "lynxos178".into(), + dynamic_linking: false, + families: cvs!["unix"], + position_independent_executables: false, + static_position_independent_executables: false, + relro_level: RelroLevel::Full, + has_thread_local: false, + crt_static_respected: true, + panic_strategy: PanicStrategy::Abort, + linker: Some(Cow::Borrowed("x86_64-lynx-lynxos178-gcc")), + no_default_libraries: false, + eh_frame_header: false, // GNU ld (GNU Binutils) 2.37.50 does not support --eh-frame-hdr + max_atomic_width: Some(64), + supported_split_debuginfo: Cow::Borrowed(&[ + SplitDebuginfo::Packed, + SplitDebuginfo::Unpacked, + SplitDebuginfo::Off, + ]), + relocation_model: RelocModel::Static, + stack_probes: StackProbeType::Inline, + ..Default::default() + } +} diff --git a/compiler/rustc_target/src/spec/base/mod.rs b/compiler/rustc_target/src/spec/base/mod.rs index e8fdc87178539..b368d93f00726 100644 --- a/compiler/rustc_target/src/spec/base/mod.rs +++ b/compiler/rustc_target/src/spec/base/mod.rs @@ -1,6 +1,6 @@ pub(crate) mod aix; pub(crate) mod android; -pub(crate) mod apple; +pub mod apple; pub(crate) mod avr; pub(crate) mod bpf; pub(crate) mod cygwin; @@ -19,6 +19,7 @@ pub(crate) mod linux_musl; pub(crate) mod linux_ohos; pub(crate) mod linux_uclibc; pub(crate) mod linux_wasm; +pub(crate) mod lynxos178; pub(crate) mod msvc; pub(crate) mod netbsd; pub(crate) mod nto_qnx; diff --git a/compiler/rustc_target/src/spec/base/msvc.rs b/compiler/rustc_target/src/spec/base/msvc.rs index 486d7158723f8..bd59678d23665 100644 --- a/compiler/rustc_target/src/spec/base/msvc.rs +++ b/compiler/rustc_target/src/spec/base/msvc.rs @@ -5,7 +5,19 @@ use crate::spec::{BinaryFormat, DebuginfoKind, LinkerFlavor, Lld, SplitDebuginfo pub(crate) fn opts() -> TargetOptions { // Suppress the verbose logo and authorship debugging output, which would needlessly // clog any log files. - let pre_link_args = TargetOptions::link_args(LinkerFlavor::Msvc(Lld::No), &["/NOLOGO"]); + let pre_link_args = TargetOptions::link_args( + LinkerFlavor::Msvc(Lld::No), + &[ + "/NOLOGO", + // "Symbol is marked as dllimport, but defined in an object file" + // Harmless warning that flags a potential performance improvement: marking a symbol as + // dllimport indirects usage via the `__imp_` symbol, which isn't required if the symbol + // is in the current binary. This is tripped by __rust_no_alloc_shim_is_unstable as it + // is generated by the compiler, but marked as a foreign item (hence the dllimport) in + // the standard library. + "/IGNORE:4286", + ], + ); TargetOptions { linker_flavor: LinkerFlavor::Msvc(Lld::No), diff --git a/compiler/rustc_target/src/spec/json.rs b/compiler/rustc_target/src/spec/json.rs index 4b6de5e18f50f..be71da76b4a35 100644 --- a/compiler/rustc_target/src/spec/json.rs +++ b/compiler/rustc_target/src/spec/json.rs @@ -598,7 +598,7 @@ impl Target { key!(families, target_families); key!(abi_return_struct_as_int, bool); key!(is_like_aix, bool); - key!(is_like_osx, bool); + key!(is_like_darwin, bool); key!(is_like_solaris, bool); key!(is_like_windows, bool); key!(is_like_msvc, bool); @@ -777,7 +777,7 @@ impl ToJson for Target { target_option_val!(families, "target-family"); target_option_val!(abi_return_struct_as_int); target_option_val!(is_like_aix); - target_option_val!(is_like_osx); + target_option_val!(is_like_darwin); target_option_val!(is_like_solaris); target_option_val!(is_like_windows); target_option_val!(is_like_msvc); diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index 7234d1dc63ee1..303be54a6d786 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -42,7 +42,9 @@ use std::path::{Path, PathBuf}; use std::str::FromStr; use std::{fmt, io}; -use rustc_abi::{Endian, ExternAbi, Integer, Size, TargetDataLayout, TargetDataLayoutErrors}; +use rustc_abi::{ + Align, Endian, ExternAbi, Integer, Size, TargetDataLayout, TargetDataLayoutErrors, +}; use rustc_data_structures::fx::{FxHashSet, FxIndexSet}; use rustc_fs_util::try_canonicalize; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; @@ -60,6 +62,7 @@ pub mod crt_objects; mod base; mod json; +pub use base::apple; pub use base::avr::ef_avr_arch; /// Linker is called through a C/C++ compiler. @@ -81,7 +84,7 @@ pub enum Lld { /// of classes that we call "linker flavors". /// /// Technically, it's not even necessary, we can nearly always infer the flavor from linker name -/// and target properties like `is_like_windows`/`is_like_osx`/etc. However, the PRs originally +/// and target properties like `is_like_windows`/`is_like_darwin`/etc. However, the PRs originally /// introducing `-Clinker-flavor` (#40018 and friends) were aiming to reduce this kind of inference /// and provide something certain and explicitly specified instead, and that design goal is still /// relevant now. @@ -2077,6 +2080,7 @@ supported_targets! { ("riscv32imafc-unknown-nuttx-elf", riscv32imafc_unknown_nuttx_elf), ("riscv64imac-unknown-nuttx-elf", riscv64imac_unknown_nuttx_elf), ("riscv64gc-unknown-nuttx-elf", riscv64gc_unknown_nuttx_elf), + ("x86_64-lynx-lynxos178", x86_64_lynx_lynxos178), ("x86_64-pc-cygwin", x86_64_pc_cygwin), } @@ -2406,7 +2410,7 @@ pub struct TargetOptions { /// in particular running dsymutil and some other stuff like `-dead_strip`. Defaults to false. /// Also indicates whether to use Apple-specific ABI changes, such as extending function /// parameters to 32-bits. - pub is_like_osx: bool, + pub is_like_darwin: bool, /// Whether the target toolchain is like Solaris's. /// Only useful for compiling against Illumos/Solaris, /// as they have a different set of linker flags. Defaults to false. @@ -2700,7 +2704,7 @@ fn add_link_args(link_args: &mut LinkArgs, flavor: LinkerFlavor, args: &[&'stati impl TargetOptions { pub fn supports_comdat(&self) -> bool { // XCOFF and MachO don't support COMDAT. - !self.is_like_aix && !self.is_like_osx + !self.is_like_aix && !self.is_like_darwin } } @@ -2804,7 +2808,7 @@ impl Default for TargetOptions { families: cvs![], abi_return_struct_as_int: false, is_like_aix: false, - is_like_osx: false, + is_like_darwin: false, is_like_solaris: false, is_like_windows: false, is_like_msvc: false, @@ -2961,14 +2965,9 @@ impl Target { pub fn is_abi_supported(&self, abi: ExternAbi) -> bool { use ExternAbi::*; match abi { - Rust - | C { .. } - | System { .. } - | RustIntrinsic - | RustCall - | Unadjusted - | Cdecl { .. } - | RustCold => true, + Rust | C { .. } | System { .. } | RustCall | Unadjusted | Cdecl { .. } | RustCold => { + true + } EfiApi => { ["arm", "aarch64", "riscv32", "riscv64", "x86", "x86_64"].contains(&&self.arch[..]) } @@ -3070,9 +3069,9 @@ impl Target { } check_eq!( - self.is_like_osx, + self.is_like_darwin, self.vendor == "apple", - "`is_like_osx` must be set if and only if `vendor` is `apple`" + "`is_like_darwin` must be set if and only if `vendor` is `apple`" ); check_eq!( self.is_like_solaris, @@ -3098,9 +3097,9 @@ impl Target { // Check that default linker flavor is compatible with some other key properties. check_eq!( - self.is_like_osx, + self.is_like_darwin, matches!(self.linker_flavor, LinkerFlavor::Darwin(..)), - "`linker_flavor` must be `darwin` if and only if `is_like_osx` is set" + "`linker_flavor` must be `darwin` if and only if `is_like_darwin` is set" ); check_eq!( self.is_like_msvc, @@ -3516,7 +3515,7 @@ impl Target { Err("the `i586-pc-windows-msvc` target has been removed. Use the `i686-pc-windows-msvc` target instead.\n\ Windows 10 (the minimum required OS version) requires a CPU baseline of at least i686 so you can safely switch".into()) } else { - Err(format!("Could not find specification for target {target_tuple:?}")) + Err(format!("could not find specification for target {target_tuple:?}")) } } TargetTuple::TargetJson { ref contents, .. } => { @@ -3568,7 +3567,19 @@ impl Target { "x86" => (Architecture::I386, None), "s390x" => (Architecture::S390x, None), "mips" | "mips32r6" => (Architecture::Mips, None), - "mips64" | "mips64r6" => (Architecture::Mips64, None), + "mips64" | "mips64r6" => ( + // While there are currently no builtin targets + // using the N32 ABI, it is possible to specify + // it using a custom target specification. N32 + // is an ILP32 ABI like the Aarch64_Ilp32 + // and X86_64_X32 cases above and below this one. + if self.options.llvm_abiname.as_ref() == "n32" { + Architecture::Mips64_N32 + } else { + Architecture::Mips64 + }, + None, + ), "x86_64" => ( if self.pointer_width == 32 { Architecture::X86_64_X32 @@ -3602,6 +3613,25 @@ impl Target { _ => return None, }) } + + /// Returns whether this target is known to have unreliable alignment: + /// native C code for the target fails to align some data to the degree + /// required by the C standard. We can't *really* do anything about that + /// since unsafe Rust code may assume alignment any time, but we can at least + /// inhibit some optimizations, and we suppress the alignment checks that + /// would detect this unsoundness. + /// + /// Every target that returns less than `Align::MAX` here is still has a soundness bug. + pub fn max_reliable_alignment(&self) -> Align { + // FIXME(#112480) MSVC on x86-32 is unsound and fails to properly align many types with + // more-than-4-byte-alignment on the stack. This makes alignments larger than 4 generally + // unreliable on 32bit Windows. + if self.is_like_windows && self.arch == "x86" { + Align::from_bytes(4).unwrap() + } else { + Align::MAX + } + } } /// Either a target tuple string or a path to a JSON file. diff --git a/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs b/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs index 98d78520c9838..c5704c574483f 100644 --- a/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs +++ b/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs @@ -1,10 +1,21 @@ -use crate::spec::{Target, TargetMetadata, base}; +use crate::spec::{FramePointer, LinkerFlavor, Lld, Target, TargetMetadata, base}; pub(crate) fn target() -> Target { let mut base = base::windows_msvc::opts(); base.max_atomic_width = Some(128); base.features = "+v8a,+neon,+fp-armv8".into(); + // Microsoft recommends enabling frame pointers on Arm64 Windows. + // From https://learn.microsoft.com/en-us/cpp/build/arm64-windows-abi-conventions?view=msvc-170#integer-registers + // "The frame pointer (x29) is required for compatibility with fast stack walking used by ETW + // and other services. It must point to the previous {x29, x30} pair on the stack." + base.frame_pointer = FramePointer::NonLeaf; + + // MSVC emits a warning about code that may trip "Cortex-A53 MPCore processor bug #843419" (see + // https://developer.arm.com/documentation/epm048406/latest) which is sometimes emitted by LLVM. + // Since Arm64 Windows 10+ isn't supported on that processor, it's safe to disable the warning. + base.add_pre_link_args(LinkerFlavor::Msvc(Lld::No), &["/arm64hazardfree"]); + Target { llvm_target: "aarch64-pc-windows-msvc".into(), metadata: TargetMetadata { diff --git a/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_gnueabi.rs b/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_gnueabi.rs index 52e786de3ed98..7b93672dbe0fa 100644 --- a/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_gnueabi.rs +++ b/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_gnueabi.rs @@ -20,6 +20,7 @@ pub(crate) fn target() -> Target { max_atomic_width: Some(32), mcount: "\u{1}__gnu_mcount_nc".into(), has_thumb_interworking: true, + llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), ..base::linux_gnu::opts() }, } diff --git a/compiler/rustc_target/src/spec/targets/armv7_sony_vita_newlibeabihf.rs b/compiler/rustc_target/src/spec/targets/armv7_sony_vita_newlibeabihf.rs index 5d292bbf8adf0..6a83835059eee 100644 --- a/compiler/rustc_target/src/spec/targets/armv7_sony_vita_newlibeabihf.rs +++ b/compiler/rustc_target/src/spec/targets/armv7_sony_vita_newlibeabihf.rs @@ -15,7 +15,7 @@ pub(crate) fn target() -> Target { ); Target { - llvm_target: "thumbv7a-vita-eabihf".into(), + llvm_target: "thumbv7a-sony-vita-eabihf".into(), metadata: TargetMetadata { description: Some( "Armv7-A Cortex-A9 Sony PlayStation Vita (requires VITASDK toolchain)".into(), diff --git a/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabihf.rs b/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabihf.rs index 3b5a337b4f139..a3b35d658e9da 100644 --- a/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabihf.rs +++ b/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabihf.rs @@ -22,6 +22,7 @@ pub(crate) fn target() -> Target { features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), max_atomic_width: Some(64), mcount: "\u{1}__gnu_mcount_nc".into(), + llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), ..base::linux_gnu::opts() }, } diff --git a/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnu.rs b/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnu.rs index 2a26323e5147b..e775c8fc524c4 100644 --- a/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnu.rs +++ b/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnu.rs @@ -20,7 +20,7 @@ pub(crate) fn target() -> Target { llvm_target: "i686-pc-windows-gnu".into(), metadata: TargetMetadata { description: Some("32-bit MinGW (Windows 10+)".into()), - tier: Some(1), + tier: Some(2), host_tools: Some(true), std: Some(true), }, diff --git a/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs b/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs index 233a1c4fd7a54..91ab311109787 100644 --- a/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs +++ b/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs @@ -7,6 +7,12 @@ pub(crate) fn target() -> Target { base.cpu = "pentium4".into(); base.max_atomic_width = Some(64); base.supported_sanitizers = SanitizerSet::ADDRESS; + // On Windows 7 32-bit, the alignment characteristic of the TLS Directory + // don't appear to be respected by the PE Loader, leading to crashes. As + // a result, let's disable has_thread_local to make sure TLS goes through + // the emulation layer. + // See https://github.com/rust-lang/rust/issues/138903 + base.has_thread_local = false; base.add_pre_link_args( LinkerFlavor::Msvc(Lld::No), diff --git a/compiler/rustc_target/src/spec/targets/mips64_openwrt_linux_musl.rs b/compiler/rustc_target/src/spec/targets/mips64_openwrt_linux_musl.rs index 71b3fbe00b2fe..508abc0101841 100644 --- a/compiler/rustc_target/src/spec/targets/mips64_openwrt_linux_musl.rs +++ b/compiler/rustc_target/src/spec/targets/mips64_openwrt_linux_musl.rs @@ -27,6 +27,7 @@ pub(crate) fn target() -> Target { abi: "abi64".into(), endian: Endian::Big, mcount: "_mcount".into(), + llvm_abiname: "n64".into(), ..base }, } diff --git a/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_gnuabi64.rs b/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_gnuabi64.rs index b130ca29c7f03..a26350ff22509 100644 --- a/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_gnuabi64.rs +++ b/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_gnuabi64.rs @@ -22,6 +22,7 @@ pub(crate) fn target() -> Target { features: "+mips64r2,+xgot".into(), max_atomic_width: Some(64), mcount: "_mcount".into(), + llvm_abiname: "n64".into(), ..base::linux_gnu::opts() }, diff --git a/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_muslabi64.rs b/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_muslabi64.rs index 4ea7c7bff44a6..fd50950305300 100644 --- a/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_muslabi64.rs +++ b/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_muslabi64.rs @@ -25,6 +25,7 @@ pub(crate) fn target() -> Target { mcount: "_mcount".into(), // FIXME(compiler-team#422): musl targets should be dynamically linked by default. crt_static_default: true, + llvm_abiname: "n64".into(), ..base }, } diff --git a/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_gnuabi64.rs b/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_gnuabi64.rs index a9afea27ef340..19bceadc62232 100644 --- a/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_gnuabi64.rs +++ b/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_gnuabi64.rs @@ -19,6 +19,7 @@ pub(crate) fn target() -> Target { features: "+mips64r2,+xgot".into(), max_atomic_width: Some(64), mcount: "_mcount".into(), + llvm_abiname: "n64".into(), ..base::linux_gnu::opts() }, diff --git a/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_muslabi64.rs b/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_muslabi64.rs index 7bdd9edda70cd..aa087b1a35af8 100644 --- a/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_muslabi64.rs +++ b/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_muslabi64.rs @@ -19,6 +19,11 @@ pub(crate) fn target() -> Target { pointer_width: 64, data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), arch: "mips64".into(), - options: TargetOptions { abi: "abi64".into(), mcount: "_mcount".into(), ..base }, + options: TargetOptions { + abi: "abi64".into(), + mcount: "_mcount".into(), + llvm_abiname: "n64".into(), + ..base + }, } } diff --git a/compiler/rustc_target/src/spec/targets/mipsisa64r6_unknown_linux_gnuabi64.rs b/compiler/rustc_target/src/spec/targets/mipsisa64r6_unknown_linux_gnuabi64.rs index 3eefa27ea04b6..cdd5f6b84365a 100644 --- a/compiler/rustc_target/src/spec/targets/mipsisa64r6_unknown_linux_gnuabi64.rs +++ b/compiler/rustc_target/src/spec/targets/mipsisa64r6_unknown_linux_gnuabi64.rs @@ -22,6 +22,7 @@ pub(crate) fn target() -> Target { features: "+mips64r6".into(), max_atomic_width: Some(64), mcount: "_mcount".into(), + llvm_abiname: "n64".into(), ..base::linux_gnu::opts() }, diff --git a/compiler/rustc_target/src/spec/targets/mipsisa64r6el_unknown_linux_gnuabi64.rs b/compiler/rustc_target/src/spec/targets/mipsisa64r6el_unknown_linux_gnuabi64.rs index 0887180791c71..88879a25818b7 100644 --- a/compiler/rustc_target/src/spec/targets/mipsisa64r6el_unknown_linux_gnuabi64.rs +++ b/compiler/rustc_target/src/spec/targets/mipsisa64r6el_unknown_linux_gnuabi64.rs @@ -19,6 +19,7 @@ pub(crate) fn target() -> Target { features: "+mips64r6".into(), max_atomic_width: Some(64), mcount: "_mcount".into(), + llvm_abiname: "n64".into(), ..base::linux_gnu::opts() }, diff --git a/compiler/rustc_target/src/spec/targets/riscv32_wrs_vxworks.rs b/compiler/rustc_target/src/spec/targets/riscv32_wrs_vxworks.rs index 8a4bc58e546e8..efc17d8d083b7 100644 --- a/compiler/rustc_target/src/spec/targets/riscv32_wrs_vxworks.rs +++ b/compiler/rustc_target/src/spec/targets/riscv32_wrs_vxworks.rs @@ -16,7 +16,7 @@ pub(crate) fn target() -> Target { cpu: "generic-rv32".into(), llvm_abiname: "ilp32d".into(), max_atomic_width: Some(32), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), stack_probes: StackProbeType::Inline, ..base::vxworks::opts() }, diff --git a/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_gnu.rs b/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_gnu.rs index 6dda346aaaf56..5b7feef70d099 100644 --- a/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_gnu.rs +++ b/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_gnu.rs @@ -17,7 +17,7 @@ pub(crate) fn target() -> Target { options: TargetOptions { code_model: Some(CodeModel::Medium), cpu: "generic-rv32".into(), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), llvm_abiname: "ilp32d".into(), max_atomic_width: Some(32), supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]), diff --git a/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_musl.rs b/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_musl.rs index ba10e3c688184..938b39b10c64e 100644 --- a/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_musl.rs +++ b/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_musl.rs @@ -19,7 +19,7 @@ pub(crate) fn target() -> Target { options: TargetOptions { code_model: Some(CodeModel::Medium), cpu: "generic-rv32".into(), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), llvm_abiname: "ilp32d".into(), max_atomic_width: Some(32), supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]), diff --git a/compiler/rustc_target/src/spec/targets/riscv64_linux_android.rs b/compiler/rustc_target/src/spec/targets/riscv64_linux_android.rs index c8ef737b9e73b..b9176c939f805 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64_linux_android.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64_linux_android.rs @@ -19,7 +19,7 @@ pub(crate) fn target() -> Target { options: TargetOptions { code_model: Some(CodeModel::Medium), cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c,+zba,+zbb,+zbs,+v".into(), + features: "+m,+a,+f,+d,+c,+b,+v,+zicsr,+zifencei".into(), llvm_abiname: "lp64d".into(), supported_sanitizers: SanitizerSet::ADDRESS, max_atomic_width: Some(64), diff --git a/compiler/rustc_target/src/spec/targets/riscv64_wrs_vxworks.rs b/compiler/rustc_target/src/spec/targets/riscv64_wrs_vxworks.rs index 39aa70035e4a7..8d8c21952de5f 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64_wrs_vxworks.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64_wrs_vxworks.rs @@ -16,7 +16,7 @@ pub(crate) fn target() -> Target { cpu: "generic-rv64".into(), llvm_abiname: "lp64d".into(), max_atomic_width: Some(64), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), stack_probes: StackProbeType::Inline, ..base::vxworks::opts() }, diff --git a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_freebsd.rs b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_freebsd.rs index ecf6567753111..e628095b88a6d 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_freebsd.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_freebsd.rs @@ -15,7 +15,7 @@ pub(crate) fn target() -> Target { options: TargetOptions { code_model: Some(CodeModel::Medium), cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), llvm_abiname: "lp64d".into(), max_atomic_width: Some(64), ..base::freebsd::opts() diff --git a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_fuchsia.rs b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_fuchsia.rs index e260237ca7764..c4466e13d1439 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_fuchsia.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_fuchsia.rs @@ -4,7 +4,7 @@ pub(crate) fn target() -> Target { let mut base = base::fuchsia::opts(); base.code_model = Some(CodeModel::Medium); base.cpu = "generic-rv64".into(); - base.features = "+m,+a,+f,+d,+c".into(); + base.features = "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(); base.llvm_abiname = "lp64d".into(); base.max_atomic_width = Some(64); base.stack_probes = StackProbeType::Inline; diff --git a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_hermit.rs b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_hermit.rs index 88b5dca284ae9..5c15bdd9f6454 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_hermit.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_hermit.rs @@ -14,7 +14,7 @@ pub(crate) fn target() -> Target { data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), options: TargetOptions { cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), relocation_model: RelocModel::Pic, code_model: Some(CodeModel::Medium), tls_model: TlsModel::LocalExec, diff --git a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_gnu.rs b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_gnu.rs index 8ffb622511db0..af2f42fa00a2f 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_gnu.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_gnu.rs @@ -17,7 +17,7 @@ pub(crate) fn target() -> Target { options: TargetOptions { code_model: Some(CodeModel::Medium), cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), llvm_abiname: "lp64d".into(), max_atomic_width: Some(64), supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]), diff --git a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_musl.rs b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_musl.rs index 33b08fdcb0510..70c19952af063 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_musl.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_musl.rs @@ -17,7 +17,7 @@ pub(crate) fn target() -> Target { options: TargetOptions { code_model: Some(CodeModel::Medium), cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), llvm_abiname: "lp64d".into(), max_atomic_width: Some(64), supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]), diff --git a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_netbsd.rs b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_netbsd.rs index 2b647e36f18a8..1f359d1e7fe6d 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_netbsd.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_netbsd.rs @@ -15,7 +15,7 @@ pub(crate) fn target() -> Target { options: TargetOptions { code_model: Some(CodeModel::Medium), cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), llvm_abiname: "lp64d".into(), max_atomic_width: Some(64), mcount: "__mcount".into(), diff --git a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_none_elf.rs b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_none_elf.rs index d6f0a5499b99c..5a5aad93efba1 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_none_elf.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_none_elf.rs @@ -22,7 +22,7 @@ pub(crate) fn target() -> Target { llvm_abiname: "lp64d".into(), cpu: "generic-rv64".into(), max_atomic_width: Some(64), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), panic_strategy: PanicStrategy::Abort, relocation_model: RelocModel::Static, code_model: Some(CodeModel::Medium), diff --git a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_nuttx_elf.rs b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_nuttx_elf.rs index bc6829897a423..e8abc926dd0ed 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_nuttx_elf.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_nuttx_elf.rs @@ -24,7 +24,7 @@ pub(crate) fn target() -> Target { llvm_abiname: "lp64d".into(), cpu: "generic-rv64".into(), max_atomic_width: Some(64), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), panic_strategy: PanicStrategy::Abort, relocation_model: RelocModel::Static, code_model: Some(CodeModel::Medium), diff --git a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_openbsd.rs b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_openbsd.rs index 75f508d8e933b..85d7dfe7865ed 100644 --- a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_openbsd.rs +++ b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_openbsd.rs @@ -15,7 +15,7 @@ pub(crate) fn target() -> Target { options: TargetOptions { code_model: Some(CodeModel::Medium), cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), + features: "+m,+a,+f,+d,+c,+zicsr,+zifencei".into(), llvm_abiname: "lp64d".into(), max_atomic_width: Some(64), ..base::openbsd::opts() diff --git a/compiler/rustc_target/src/spec/targets/x86_64_lynx_lynxos178.rs b/compiler/rustc_target/src/spec/targets/x86_64_lynx_lynxos178.rs new file mode 100644 index 0000000000000..654ae7c9c5bea --- /dev/null +++ b/compiler/rustc_target/src/spec/targets/x86_64_lynx_lynxos178.rs @@ -0,0 +1,34 @@ +use crate::spec::{SanitizerSet, StackProbeType, Target, base}; + +pub(crate) fn target() -> Target { + let mut base = base::lynxos178::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::Inline; + base.static_position_independent_executables = false; + base.supported_sanitizers = SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::KCFI + | SanitizerSet::DATAFLOW + | SanitizerSet::LEAK + | SanitizerSet::MEMORY + | SanitizerSet::SAFESTACK + | SanitizerSet::THREAD; + base.supports_xray = true; + + Target { + llvm_target: "x86_64-unknown-unknown-gnu".into(), + metadata: crate::spec::TargetMetadata { + description: Some("LynxOS-178".into()), + tier: Some(3), + host_tools: Some(false), + std: Some(false), + }, + pointer_width: 64, + data_layout: + "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-i128:128-f80:128-n8:16:32:64-S128".into(), + arch: "x86_64".into(), + options: base, + } +} diff --git a/compiler/rustc_target/src/target_features.rs b/compiler/rustc_target/src/target_features.rs index 0e6523f0880e8..5a21925ba04e7 100644 --- a/compiler/rustc_target/src/target_features.rs +++ b/compiler/rustc_target/src/target_features.rs @@ -102,6 +102,9 @@ impl Stability { // check whether they're named already elsewhere in rust // e.g. in stdarch and whether the given name matches LLVM's // if it doesn't, to_llvm_feature in llvm_util in rustc_codegen_llvm needs to be adapted. +// Additionally, if the feature is not available in older version of LLVM supported by the current +// rust, the same function must be updated to filter out these features to avoid triggering +// warnings. // // Also note that all target features listed here must be purely additive: for target_feature 1.1 to // be sound, we can never allow features like `+soft-float` (on x86) to be controlled on a @@ -380,12 +383,38 @@ static X86_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ // tidy-alphabetical-start ("adx", Stable, &[]), ("aes", Stable, &["sse2"]), + ("amx-avx512", Unstable(sym::x86_amx_intrinsics), &["amx-tile"]), ("amx-bf16", Unstable(sym::x86_amx_intrinsics), &["amx-tile"]), ("amx-complex", Unstable(sym::x86_amx_intrinsics), &["amx-tile"]), ("amx-fp16", Unstable(sym::x86_amx_intrinsics), &["amx-tile"]), + ("amx-fp8", Unstable(sym::x86_amx_intrinsics), &["amx-tile"]), ("amx-int8", Unstable(sym::x86_amx_intrinsics), &["amx-tile"]), + ("amx-movrs", Unstable(sym::x86_amx_intrinsics), &["amx-tile"]), + ("amx-tf32", Unstable(sym::x86_amx_intrinsics), &["amx-tile"]), ("amx-tile", Unstable(sym::x86_amx_intrinsics), &[]), + ("amx-transpose", Unstable(sym::x86_amx_intrinsics), &["amx-tile"]), + ("apxf", Unstable(sym::apx_target_feature), &[]), ("avx", Stable, &["sse4.2"]), + ( + "avx10.1", + Unstable(sym::avx10_target_feature), + &[ + "avx512bf16", + "avx512bitalg", + "avx512bw", + "avx512cd", + "avx512dq", + "avx512f", + "avx512fp16", + "avx512ifma", + "avx512vbmi", + "avx512vbmi2", + "avx512vl", + "avx512vnni", + "avx512vpopcntdq", + ], + ), + ("avx10.2", Unstable(sym::avx10_target_feature), &["avx10.1"]), ("avx2", Stable, &["avx"]), ("avx512bf16", Unstable(sym::avx512_target_feature), &["avx512bw"]), ("avx512bitalg", Unstable(sym::avx512_target_feature), &["avx512bw"]), @@ -393,7 +422,7 @@ static X86_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ("avx512cd", Unstable(sym::avx512_target_feature), &["avx512f"]), ("avx512dq", Unstable(sym::avx512_target_feature), &["avx512f"]), ("avx512f", Unstable(sym::avx512_target_feature), &["avx2", "fma", "f16c"]), - ("avx512fp16", Unstable(sym::avx512_target_feature), &["avx512bw", "avx512vl", "avx512dq"]), + ("avx512fp16", Unstable(sym::avx512_target_feature), &["avx512bw"]), ("avx512ifma", Unstable(sym::avx512_target_feature), &["avx512f"]), ("avx512vbmi", Unstable(sym::avx512_target_feature), &["avx512bw"]), ("avx512vbmi2", Unstable(sym::avx512_target_feature), &["avx512bw"]), @@ -418,6 +447,7 @@ static X86_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ("lahfsahf", Unstable(sym::lahfsahf_target_feature), &[]), ("lzcnt", Stable, &[]), ("movbe", Stable, &[]), + ("movrs", Unstable(sym::movrs_target_feature), &[]), ("pclmulqdq", Stable, &["sse2"]), ("popcnt", Stable, &[]), ("prfchw", Unstable(sym::prfchw_target_feature), &[]), @@ -485,10 +515,11 @@ const MIPS_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ static RISCV_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ // tidy-alphabetical-start ("a", Stable, &["zaamo", "zalrsc"]), - ("c", Stable, &[]), + ("b", Unstable(sym::riscv_target_feature), &["zba", "zbb", "zbs"]), + ("c", Stable, &["zca"]), ("d", Unstable(sym::riscv_target_feature), &["f"]), ("e", Unstable(sym::riscv_target_feature), &[]), - ("f", Unstable(sym::riscv_target_feature), &[]), + ("f", Unstable(sym::riscv_target_feature), &["zicsr"]), ( "forced-atomics", Stability::Forbidden { reason: "unsound because it changes the ABI of atomic operations" }, @@ -500,7 +531,7 @@ static RISCV_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ("unaligned-vector-mem", Unstable(sym::riscv_target_feature), &[]), ("v", Unstable(sym::riscv_target_feature), &["zvl128b", "zve64d"]), ("za128rs", Unstable(sym::riscv_target_feature), &[]), - ("za64rs", Unstable(sym::riscv_target_feature), &[]), + ("za64rs", Unstable(sym::riscv_target_feature), &["za128rs"]), // Za64rs ⊃ Za128rs ("zaamo", Unstable(sym::riscv_target_feature), &[]), ("zabha", Unstable(sym::riscv_target_feature), &["zaamo"]), ("zacas", Unstable(sym::riscv_target_feature), &["zaamo"]), @@ -509,17 +540,38 @@ static RISCV_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ("zawrs", Unstable(sym::riscv_target_feature), &[]), ("zba", Stable, &[]), ("zbb", Stable, &[]), - ("zbc", Stable, &[]), + ("zbc", Stable, &["zbkc"]), // Zbc ⊃ Zbkc ("zbkb", Stable, &[]), ("zbkc", Stable, &[]), ("zbkx", Stable, &[]), ("zbs", Stable, &[]), + ("zca", Unstable(sym::riscv_target_feature), &[]), + ("zcb", Unstable(sym::riscv_target_feature), &["zca"]), + ("zcmop", Unstable(sym::riscv_target_feature), &["zca"]), ("zdinx", Unstable(sym::riscv_target_feature), &["zfinx"]), + ("zfa", Unstable(sym::riscv_target_feature), &["f"]), + ("zfbfmin", Unstable(sym::riscv_target_feature), &["f"]), // and a subset of Zfhmin ("zfh", Unstable(sym::riscv_target_feature), &["zfhmin"]), ("zfhmin", Unstable(sym::riscv_target_feature), &["f"]), - ("zfinx", Unstable(sym::riscv_target_feature), &[]), + ("zfinx", Unstable(sym::riscv_target_feature), &["zicsr"]), ("zhinx", Unstable(sym::riscv_target_feature), &["zhinxmin"]), ("zhinxmin", Unstable(sym::riscv_target_feature), &["zfinx"]), + ("zic64b", Unstable(sym::riscv_target_feature), &[]), + ("zicbom", Unstable(sym::riscv_target_feature), &[]), + ("zicbop", Unstable(sym::riscv_target_feature), &[]), + ("zicboz", Unstable(sym::riscv_target_feature), &[]), + ("ziccamoa", Unstable(sym::riscv_target_feature), &[]), + ("ziccif", Unstable(sym::riscv_target_feature), &[]), + ("zicclsm", Unstable(sym::riscv_target_feature), &[]), + ("ziccrse", Unstable(sym::riscv_target_feature), &[]), + ("zicntr", Unstable(sym::riscv_target_feature), &["zicsr"]), + ("zicond", Unstable(sym::riscv_target_feature), &[]), + ("zicsr", Unstable(sym::riscv_target_feature), &[]), + ("zifencei", Unstable(sym::riscv_target_feature), &[]), + ("zihintntl", Unstable(sym::riscv_target_feature), &[]), + ("zihintpause", Unstable(sym::riscv_target_feature), &[]), + ("zihpm", Unstable(sym::riscv_target_feature), &["zicsr"]), + ("zimop", Unstable(sym::riscv_target_feature), &[]), ("zk", Stable, &["zkn", "zkr", "zkt"]), ("zkn", Stable, &["zbkb", "zbkc", "zbkx", "zkne", "zknd", "zknh"]), ("zknd", Stable, &[]), @@ -530,14 +582,17 @@ static RISCV_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ("zksed", Stable, &[]), ("zksh", Stable, &[]), ("zkt", Stable, &[]), - ("zvbb", Unstable(sym::riscv_target_feature), &["zvkb"]), + ("ztso", Unstable(sym::riscv_target_feature), &[]), + ("zvbb", Unstable(sym::riscv_target_feature), &["zvkb"]), // Zvbb ⊃ Zvkb ("zvbc", Unstable(sym::riscv_target_feature), &["zve64x"]), ("zve32f", Unstable(sym::riscv_target_feature), &["zve32x", "f"]), - ("zve32x", Unstable(sym::riscv_target_feature), &["zvl32b"]), + ("zve32x", Unstable(sym::riscv_target_feature), &["zvl32b", "zicsr"]), ("zve64d", Unstable(sym::riscv_target_feature), &["zve64f", "d"]), ("zve64f", Unstable(sym::riscv_target_feature), &["zve32f", "zve64x"]), ("zve64x", Unstable(sym::riscv_target_feature), &["zve32x", "zvl64b"]), - ("zvfh", Unstable(sym::riscv_target_feature), &["zvfhmin", "zfhmin"]), + ("zvfbfmin", Unstable(sym::riscv_target_feature), &["zve32f"]), + ("zvfbfwma", Unstable(sym::riscv_target_feature), &["zfbfmin", "zvfbfmin"]), + ("zvfh", Unstable(sym::riscv_target_feature), &["zvfhmin", "zve32f", "zfhmin"]), // Zvfh ⊃ Zvfhmin ("zvfhmin", Unstable(sym::riscv_target_feature), &["zve32f"]), ("zvkb", Unstable(sym::riscv_target_feature), &["zve32x"]), ("zvkg", Unstable(sym::riscv_target_feature), &["zve32x"]), @@ -546,7 +601,7 @@ static RISCV_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[ ("zvkned", Unstable(sym::riscv_target_feature), &["zve32x"]), ("zvkng", Unstable(sym::riscv_target_feature), &["zvkn", "zvkg"]), ("zvknha", Unstable(sym::riscv_target_feature), &["zve32x"]), - ("zvknhb", Unstable(sym::riscv_target_feature), &["zve64x"]), + ("zvknhb", Unstable(sym::riscv_target_feature), &["zvknha", "zve64x"]), // Zvknhb ⊃ Zvknha ("zvks", Unstable(sym::riscv_target_feature), &["zvksed", "zvksh", "zvkb", "zvkt"]), ("zvksc", Unstable(sym::riscv_target_feature), &["zvks", "zvbc"]), ("zvksed", Unstable(sym::riscv_target_feature), &["zve32x"]), @@ -754,7 +809,7 @@ const RISCV_FEATURES_FOR_CORRECT_VECTOR_ABI: &'static [(u64, &'static str)] = &[ (32768, "zvl32768b"), (65536, "zvl65536b"), ]; -// Always warn on SPARC, as the necessary target features cannot be enabled in Rust at the moment. +// Always error on SPARC, as the necessary target features cannot be enabled in Rust at the moment. const SPARC_FEATURES_FOR_CORRECT_VECTOR_ABI: &'static [(u64, &'static str)] = &[/*(64, "vis")*/]; const HEXAGON_FEATURES_FOR_CORRECT_VECTOR_ABI: &'static [(u64, &'static str)] = @@ -942,12 +997,12 @@ impl Target { // about what the intended ABI is. match &*self.llvm_abiname { "ilp32d" | "lp64d" => { - // Requires d (which implies f), incompatible with e. - FeatureConstraints { required: &["d"], incompatible: &["e"] } + // Requires d (which implies f), incompatible with e and zfinx. + FeatureConstraints { required: &["d"], incompatible: &["e", "zfinx"] } } "ilp32f" | "lp64f" => { - // Requires f, incompatible with e. - FeatureConstraints { required: &["f"], incompatible: &["e"] } + // Requires f, incompatible with e and zfinx. + FeatureConstraints { required: &["f"], incompatible: &["e", "zfinx"] } } "ilp32" | "lp64" => { // Requires nothing, incompatible with e. diff --git a/compiler/rustc_trait_selection/Cargo.toml b/compiler/rustc_trait_selection/Cargo.toml index 1c61e23362a83..1071105522d11 100644 --- a/compiler/rustc_trait_selection/Cargo.toml +++ b/compiler/rustc_trait_selection/Cargo.toml @@ -8,7 +8,6 @@ edition = "2024" itertools = "0.12" rustc_abi = { path = "../rustc_abi" } rustc_ast = { path = "../rustc_ast" } -rustc_attr_parsing = { path = "../rustc_attr_parsing" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } @@ -21,7 +20,6 @@ rustc_parse_format = { path = "../rustc_parse_format" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_transmute = { path = "../rustc_transmute", features = ["rustc"] } -rustc_type_ir = { path = "../rustc_type_ir" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } thin-vec = "0.2" tracing = "0.1" diff --git a/compiler/rustc_trait_selection/messages.ftl b/compiler/rustc_trait_selection/messages.ftl index 4db9d9915b139..762dddcaaf59e 100644 --- a/compiler/rustc_trait_selection/messages.ftl +++ b/compiler/rustc_trait_selection/messages.ftl @@ -148,9 +148,6 @@ trait_selection_dtcs_has_req_note = the used `impl` has a `'static` requirement trait_selection_dtcs_introduces_requirement = calling this method introduces the `impl`'s `'static` requirement trait_selection_dtcs_suggestion = consider relaxing the implicit `'static` requirement -trait_selection_empty_on_clause_in_rustc_on_unimplemented = empty `on`-clause in `#[rustc_on_unimplemented]` - .label = empty on-clause here - trait_selection_explicit_lifetime_required_sugg_with_ident = add explicit lifetime `{$named}` to the type of `{$simple_ident}` trait_selection_explicit_lifetime_required_sugg_with_param_type = add explicit lifetime `{$named}` to type @@ -187,9 +184,6 @@ trait_selection_inherent_projection_normalization_overflow = overflow evaluating trait_selection_invalid_format_specifier = invalid format specifier .help = no format specifier are supported in this position -trait_selection_invalid_on_clause_in_rustc_on_unimplemented = invalid `on`-clause in `#[rustc_on_unimplemented]` - .label = invalid on-clause here - trait_selection_label_bad = {$bad_kind -> *[other] cannot infer type [more_info] cannot infer {$prefix_kind -> @@ -237,10 +231,6 @@ trait_selection_negative_positive_conflict = found both positive and negative im .positive_implementation_here = positive implementation here .positive_implementation_in_crate = positive implementation in crate `{$positive_impl_cname}` -trait_selection_no_value_in_rustc_on_unimplemented = this attribute must have a valid value - .label = expected value here - .note = eg `#[rustc_on_unimplemented(message="foo")]` - trait_selection_nothing = {""} trait_selection_oc_cant_coerce_force_inline = @@ -248,10 +238,7 @@ trait_selection_oc_cant_coerce_force_inline = trait_selection_oc_cant_coerce_intrinsic = cannot coerce intrinsics to function pointers trait_selection_oc_closure_selfref = closure/coroutine type that references itself trait_selection_oc_const_compat = const not compatible with trait -trait_selection_oc_fn_lang_correct_type = {$lang_item_name -> - [panic_impl] `#[panic_handler]` - *[lang_item_name] lang item `{$lang_item_name}` - } function has wrong type +trait_selection_oc_fn_lang_correct_type = lang item `{$lang_item_name}` function has wrong type trait_selection_oc_fn_main_correct_type = `main` function has wrong type trait_selection_oc_generic = mismatched types @@ -264,8 +251,15 @@ trait_selection_oc_no_diverge = `else` clause of `let...else` does not diverge trait_selection_oc_no_else = `if` may be missing an `else` clause trait_selection_oc_try_compat = `?` operator has incompatible types trait_selection_oc_type_compat = type not compatible with trait + trait_selection_opaque_captures_lifetime = hidden type for `{$opaque_ty}` captures lifetime that does not appear in bounds .label = opaque type defined here +trait_selection_opaque_type_non_generic_param = + expected generic {$kind} parameter, found `{$arg}` + .label = {STREQ($arg, "'static") -> + [true] cannot use static lifetime; use a bound lifetime instead or remove the lifetime parameter from the opaque type + *[other] this generic parameter must be used with a generic {$kind} parameter + } trait_selection_outlives_bound = lifetime of the source pointer does not outlive lifetime bound of the object type trait_selection_outlives_content = lifetime of reference outlives lifetime of borrowed content... @@ -332,6 +326,22 @@ trait_selection_ril_introduced_by = requirement introduced by this return type trait_selection_ril_introduced_here = `'static` requirement introduced here trait_selection_ril_static_introduced_by = "`'static` lifetime requirement introduced by the return type +trait_selection_rustc_on_unimplemented_empty_on_clause = empty `on`-clause in `#[rustc_on_unimplemented]` + .label = empty `on`-clause here +trait_selection_rustc_on_unimplemented_expected_identifier = expected an identifier inside this `on`-clause + .label = expected an identifier here, not `{$path}` +trait_selection_rustc_on_unimplemented_expected_one_predicate_in_not = expected a single predicate in `not(..)` + .label = unexpected quantity of predicates here +trait_selection_rustc_on_unimplemented_invalid_flag = invalid flag in `on`-clause + .label = expected one of the `crate_local`, `direct` or `from_desugaring` flags, not `{$invalid_flag}` +trait_selection_rustc_on_unimplemented_invalid_predicate = this predicate is invalid + .label = expected one of `any`, `all` or `not` here, not `{$invalid_pred}` +trait_selection_rustc_on_unimplemented_missing_value = this attribute must have a value + .label = expected value here + .note = e.g. `#[rustc_on_unimplemented(message="foo")]` +trait_selection_rustc_on_unimplemented_unsupported_literal_in_on = literals inside `on`-clauses are not supported + .label = unexpected literal here + trait_selection_source_kind_closure_return = try giving this closure an explicit return type diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index 40f8af1f6913a..fdd547448f004 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -2334,13 +2334,13 @@ impl<'tcx> ObligationCause<'tcx> { subdiags: Vec, ) -> ObligationCauseFailureCode { match self.code() { - ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Fn, .. } => { + ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Fn { .. }, .. } => { ObligationCauseFailureCode::MethodCompat { span, subdiags } } - ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Type, .. } => { + ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Type { .. }, .. } => { ObligationCauseFailureCode::TypeCompat { span, subdiags } } - ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Const, .. } => { + ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Const { .. }, .. } => { ObligationCauseFailureCode::ConstCompat { span, subdiags } } ObligationCauseCode::BlockTailExpression(.., hir::MatchSource::TryDesugar(_)) => { @@ -2398,13 +2398,13 @@ impl<'tcx> ObligationCause<'tcx> { fn as_requirement_str(&self) -> &'static str { match self.code() { - ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Fn, .. } => { + ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Fn { .. }, .. } => { "method type is compatible with trait" } - ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Type, .. } => { + ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Type { .. }, .. } => { "associated type is compatible with trait" } - ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Const, .. } => { + ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Const { .. }, .. } => { "const is compatible with trait" } ObligationCauseCode::MainFunctionType => "`main` function has the correct type", @@ -2422,9 +2422,13 @@ pub struct ObligationCauseAsDiagArg<'tcx>(pub ObligationCause<'tcx>); impl IntoDiagArg for ObligationCauseAsDiagArg<'_> { fn into_diag_arg(self, _: &mut Option) -> rustc_errors::DiagArgValue { let kind = match self.0.code() { - ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Fn, .. } => "method_compat", - ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Type, .. } => "type_compat", - ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Const, .. } => { + ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Fn { .. }, .. } => { + "method_compat" + } + ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Type { .. }, .. } => { + "type_compat" + } + ObligationCauseCode::CompareImplItem { kind: ty::AssocKind::Const { .. }, .. } => { "const_compat" } ObligationCauseCode::MainFunctionType => "fn_main_correct_type", diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs index 0bcb5f6f3b286..de9a50f196234 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs @@ -14,11 +14,10 @@ use rustc_middle::hir::nested_filter; use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow}; use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter, Print, Printer}; use rustc_middle::ty::{ - self, GenericArg, GenericArgKind, GenericArgsRef, InferConst, IsSuggestable, Ty, TyCtxt, - TypeFoldable, TypeFolder, TypeSuperFoldable, TypeckResults, + self, GenericArg, GenericArgKind, GenericArgsRef, InferConst, IsSuggestable, Term, TermKind, + Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, TypeckResults, }; use rustc_span::{BytePos, DUMMY_SP, FileName, Ident, Span, sym}; -use rustc_type_ir::TypeVisitableExt; use tracing::{debug, instrument, warn}; use super::nice_region_error::placeholder_error::Highlighted; @@ -344,12 +343,12 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { /// which were stuck during inference. pub fn extract_inference_diagnostics_data( &self, - arg: GenericArg<'tcx>, + term: Term<'tcx>, highlight: ty::print::RegionHighlightMode<'tcx>, ) -> InferenceDiagnosticsData { let tcx = self.tcx; - match arg.unpack() { - GenericArgKind::Type(ty) => { + match term.unpack() { + TermKind::Ty(ty) => { if let ty::Infer(ty::TyVar(ty_vid)) = *ty.kind() { let var_origin = self.infcx.type_var_origin(ty_vid); if let Some(def_id) = var_origin.param_def_id @@ -375,7 +374,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { parent: None, } } - GenericArgKind::Const(ct) => { + TermKind::Const(ct) => { if let ty::ConstKind::Infer(InferConst::Var(vid)) = ct.kind() { let origin = self.const_var_origin(vid).expect("expected unresolved const var"); if let Some(def_id) = origin.param_def_id { @@ -411,7 +410,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } } } - GenericArgKind::Lifetime(_) => bug!("unexpected lifetime"), } } @@ -472,13 +470,13 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { &self, body_def_id: LocalDefId, failure_span: Span, - arg: GenericArg<'tcx>, + term: Term<'tcx>, error_code: TypeAnnotationNeeded, should_label_span: bool, ) -> Diag<'a> { - let arg = self.resolve_vars_if_possible(arg); - let arg_data = - self.extract_inference_diagnostics_data(arg, ty::print::RegionHighlightMode::default()); + let term = self.resolve_vars_if_possible(term); + let arg_data = self + .extract_inference_diagnostics_data(term, ty::print::RegionHighlightMode::default()); let Some(typeck_results) = &self.typeck_results else { // If we don't have any typeck results we're outside @@ -487,7 +485,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { return self.bad_inference_failure_err(failure_span, arg_data, error_code); }; - let mut local_visitor = FindInferSourceVisitor::new(self, typeck_results, arg); + let mut local_visitor = FindInferSourceVisitor::new(self, typeck_results, term); if let Some(body) = self.tcx.hir_maybe_body_owned_by( self.tcx.typeck_root_def_id(body_def_id.to_def_id()).expect_local(), ) { @@ -542,7 +540,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { have_turbofish, } => { let generics = self.tcx.generics_of(generics_def_id); - let is_type = matches!(arg.unpack(), GenericArgKind::Type(_)); + let is_type = term.as_type().is_some(); let (parent_exists, parent_prefix, parent_name) = InferenceDiagnosticsParentData::for_parent_def_id(self.tcx, generics_def_id) @@ -811,7 +809,7 @@ struct FindInferSourceVisitor<'a, 'tcx> { tecx: &'a TypeErrCtxt<'a, 'tcx>, typeck_results: &'a TypeckResults<'tcx>, - target: GenericArg<'tcx>, + target: Term<'tcx>, attempt: usize, infer_source_cost: usize, @@ -822,7 +820,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { fn new( tecx: &'a TypeErrCtxt<'a, 'tcx>, typeck_results: &'a TypeckResults<'tcx>, - target: GenericArg<'tcx>, + target: Term<'tcx>, ) -> Self { FindInferSourceVisitor { tecx, @@ -938,12 +936,12 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { // Check whether this generic argument is the inference variable we // are looking for. fn generic_arg_is_target(&self, arg: GenericArg<'tcx>) -> bool { - if arg == self.target { + if arg == self.target.into() { return true; } match (arg.unpack(), self.target.unpack()) { - (GenericArgKind::Type(inner_ty), GenericArgKind::Type(target_ty)) => { + (GenericArgKind::Type(inner_ty), TermKind::Ty(target_ty)) => { use ty::{Infer, TyVar}; match (inner_ty.kind(), target_ty.kind()) { (&Infer(TyVar(a_vid)), &Infer(TyVar(b_vid))) => { @@ -952,7 +950,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { _ => false, } } - (GenericArgKind::Const(inner_ct), GenericArgKind::Const(target_ct)) => { + (GenericArgKind::Const(inner_ct), TermKind::Const(target_ct)) => { use ty::InferConst::*; match (inner_ct.kind(), target_ct.kind()) { (ty::ConstKind::Infer(Var(a_vid)), ty::ConstKind::Infer(Var(b_vid))) => { @@ -1051,7 +1049,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { None? } let args = self.node_args_opt(expr.hir_id)?; - let span = tcx.hir().span(segment.hir_id); + let span = tcx.hir_span(segment.hir_id); let insert_span = segment.ident.span.shrink_to_hi().with_hi(span.hi()); InsertableGenericArgs { insert_span, @@ -1110,7 +1108,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { if generics.has_impl_trait() { return None; } - let span = tcx.hir().span(segment.hir_id); + let span = tcx.hir_span(segment.hir_id); let insert_span = segment.ident.span.shrink_to_hi().with_hi(span.hi()); Some(InsertableGenericArgs { insert_span, @@ -1144,7 +1142,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { if !segment.infer_args || generics.has_impl_trait() { do yeet (); } - let span = tcx.hir().span(segment.hir_id); + let span = tcx.hir_span(segment.hir_id); let insert_span = segment.ident.span.shrink_to_hi().with_hi(span.hi()); InsertableGenericArgs { insert_span, diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/static_impl_trait.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/static_impl_trait.rs index 083ce022238a0..eaa06d8e8b0ae 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/static_impl_trait.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/static_impl_trait.rs @@ -6,7 +6,7 @@ use rustc_hir::def_id::DefId; use rustc_hir::intravisit::{Visitor, VisitorExt, walk_ty}; use rustc_hir::{ self as hir, AmbigArg, GenericBound, GenericParam, GenericParamKind, Item, ItemKind, Lifetime, - LifetimeName, LifetimeParamKind, MissingLifetimeKind, Node, TyKind, + LifetimeKind, LifetimeParamKind, MissingLifetimeKind, Node, TyKind, }; use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitor}; use rustc_span::def_id::LocalDefId; @@ -165,7 +165,7 @@ pub fn suggest_new_region_bound( if let Some(span) = opaque.bounds.iter().find_map(|arg| match arg { GenericBound::Outlives(Lifetime { - res: LifetimeName::Static, ident, .. + kind: LifetimeKind::Static, ident, .. }) => Some(ident.span), _ => None, }) { @@ -253,7 +253,7 @@ pub fn suggest_new_region_bound( } } TyKind::TraitObject(_, lt) => { - if let LifetimeName::ImplicitObjectLifetimeDefault = lt.res { + if let LifetimeKind::ImplicitObjectLifetimeDefault = lt.kind { err.span_suggestion_verbose( fn_return.span.shrink_to_hi(), format!("{declare} the trait object {captures}, {explicit}",), @@ -365,7 +365,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { // obligation comes from the `impl`. Find that `impl` so that we can point // at it in the suggestion. let trait_did = trait_id.to_def_id(); - tcx.hir_trait_impls(trait_did).iter().find_map(|&impl_did| { + tcx.local_trait_impls(trait_did).iter().find_map(|&impl_did| { if let Node::Item(Item { kind: ItemKind::Impl(hir::Impl { self_ty, .. }), .. }) = tcx.hir_node_by_def_id(impl_did) @@ -414,7 +414,7 @@ pub struct HirTraitObjectVisitor<'a>(pub &'a mut Vec, pub DefId); impl<'a, 'tcx> Visitor<'tcx> for HirTraitObjectVisitor<'a> { fn visit_ty(&mut self, t: &'tcx hir::Ty<'tcx, AmbigArg>) { if let TyKind::TraitObject(poly_trait_refs, lifetime_ptr) = t.kind - && let Lifetime { res: LifetimeName::ImplicitObjectLifetimeDefault, .. } = + && let Lifetime { kind: LifetimeKind::ImplicitObjectLifetimeDefault, .. } = lifetime_ptr.pointer() { for ptr in poly_trait_refs { diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/trait_impl_difference.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/trait_impl_difference.rs index 742059228510d..b66bd2c6ab787 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/trait_impl_difference.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/trait_impl_difference.rs @@ -98,7 +98,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { let assoc_item = self.tcx().associated_item(trait_item_def_id); let mut visitor = TypeParamSpanVisitor { tcx: self.tcx(), types: vec![] }; match assoc_item.kind { - ty::AssocKind::Fn => { + ty::AssocKind::Fn { .. } => { if let Some(hir_id) = assoc_item.def_id.as_local().map(|id| self.tcx().local_def_id_to_hir_id(id)) { diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/util.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/util.rs index 00f053fa599e0..4a71ab4e06a35 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/util.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/util.rs @@ -42,7 +42,7 @@ pub fn find_param_with_region<'tcx>( anon_region: Region<'tcx>, replace_region: Region<'tcx>, ) -> Option> { - let (id, kind) = match *anon_region { + let (id, kind) = match anon_region.kind() { ty::ReLateParam(late_param) => (late_param.scope, late_param.kind), ty::ReEarlyParam(ebr) => { let region_def = tcx.generics_of(generic_param_scope).region_param(ebr, tcx).def_id; @@ -51,7 +51,6 @@ pub fn find_param_with_region<'tcx>( _ => return None, // not a free region }; - let hir = &tcx.hir(); let def_id = id.as_local()?; // FIXME: use def_kind @@ -93,7 +92,7 @@ pub fn find_param_with_region<'tcx>( }); found_anon_region.then(|| { let ty_hir_id = fn_decl.inputs[index].hir_id; - let param_ty_span = hir.span(ty_hir_id); + let param_ty_span = tcx.hir_span(ty_hir_id); let is_first = index == 0; AnonymousParamInfo { param, param_ty: new_param_ty, param_ty_span, kind, is_first } }) @@ -159,6 +158,6 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> { && self .tcx() .opt_associated_item(scope_def_id.to_def_id()) - .is_some_and(|i| i.fn_has_self_parameter) + .is_some_and(|i| i.is_method()) } } diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs index 5583deda99a49..be508c8cee13e 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs @@ -782,8 +782,8 @@ fn foo(&self) -> Self::T { String::new() } let methods: Vec<(Span, String)> = items .in_definition_order() .filter(|item| { - ty::AssocKind::Fn == item.kind - && Some(item.name) != current_method_ident + item.is_fn() + && Some(item.name()) != current_method_ident && !tcx.is_doc_hidden(item.def_id) }) .filter_map(|item| { diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs index c7f0a88f951a8..b8207c4f81632 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs @@ -11,9 +11,10 @@ use rustc_hir::{self as hir, ParamName}; use rustc_middle::bug; use rustc_middle::traits::ObligationCauseCode; use rustc_middle::ty::error::TypeError; -use rustc_middle::ty::{self, IsSuggestable, Region, Ty, TyCtxt, TypeVisitableExt as _}; +use rustc_middle::ty::{ + self, IsSuggestable, Region, Ty, TyCtxt, TypeVisitableExt as _, Upcast as _, +}; use rustc_span::{BytePos, ErrorGuaranteed, Span, Symbol, kw}; -use rustc_type_ir::Upcast as _; use tracing::{debug, instrument}; use super::ObligationCauseAsDiagArg; @@ -299,7 +300,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { self.tcx.param_env(generic_param_scope), terr, ); - match (*sub, *sup) { + match (sub.kind(), sup.kind()) { (ty::RePlaceholder(_), ty::RePlaceholder(_)) => {} (ty::RePlaceholder(_), _) => { note_and_explain_region( @@ -391,7 +392,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { }) } infer::RelateParamBound(span, ty, opt_span) => { - let prefix = match *sub { + let prefix = match sub.kind() { ty::ReStatic => note_and_explain::PrefixKind::TypeSatisfy, _ => note_and_explain::PrefixKind::TypeOutlive, }; @@ -707,7 +708,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ty::Projection | ty::Inherent => { format!("the associated type `{p}`") } - ty::Weak => format!("the type alias `{p}`"), + ty::Free => format!("the type alias `{p}`"), ty::Opaque => format!("the opaque type `{p}`"), }, }; @@ -850,14 +851,14 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { add_lt_suggs: &mut Vec<(Span, String)>, ) -> String { struct LifetimeReplaceVisitor<'a> { - needle: hir::LifetimeName, + needle: hir::LifetimeKind, new_lt: &'a str, add_lt_suggs: &'a mut Vec<(Span, String)>, } impl<'hir> hir::intravisit::Visitor<'hir> for LifetimeReplaceVisitor<'_> { fn visit_lifetime(&mut self, lt: &'hir hir::Lifetime) { - if lt.res == self.needle { + if lt.kind == self.needle { self.add_lt_suggs.push(lt.suggestion(self.new_lt)); } } @@ -894,7 +895,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { }; let mut visitor = LifetimeReplaceVisitor { - needle: hir::LifetimeName::Param(lifetime_def_id), + needle: hir::LifetimeKind::Param(lifetime_def_id), add_lt_suggs, new_lt: &new_lt, }; @@ -967,7 +968,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { format!("...so that the {}", sup_trace.cause.as_requirement_str()), ); - err.note_expected_found(&"", sup_expected, &"", sup_found); + err.note_expected_found("", sup_expected, "", sup_found); return if sub_region.is_error() | sup_region.is_error() { err.delay_as_bug() } else { @@ -1017,7 +1018,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { infer::BoundRegion(_, br, infer::AssocTypeProjection(def_id)) => format!( " for lifetime parameter {}in trait containing associated type `{}`", br_string(br), - self.tcx.associated_item(def_id).name + self.tcx.associated_item(def_id).name() ), infer::RegionParameterDefinition(_, name) => { format!(" for lifetime parameter `{name}`") @@ -1048,7 +1049,7 @@ pub(super) fn note_and_explain_region<'tcx>( suffix: &str, alt_span: Option, ) { - let (description, span) = match *region { + let (description, span) = match region.kind() { ty::ReEarlyParam(_) | ty::ReLateParam(_) | ty::RePlaceholder(_) | ty::ReStatic => { msg_span_from_named_region(tcx, generic_param_scope, region, alt_span) } @@ -1085,7 +1086,7 @@ fn msg_span_from_named_region<'tcx>( region: ty::Region<'tcx>, alt_span: Option, ) -> (String, Option) { - match *region { + match region.kind() { ty::ReEarlyParam(br) => { let param_def_id = tcx.generics_of(generic_param_scope).region_param(br, tcx).def_id; let span = tcx.def_span(param_def_id); @@ -1185,7 +1186,7 @@ pub fn unexpected_hidden_region_diagnostic<'a, 'tcx>( }); // Explain the region we are capturing. - match *hidden_region { + match hidden_region.kind() { ty::ReEarlyParam(_) | ty::ReLateParam(_) | ty::ReStatic => { // Assuming regionck succeeded (*), we ought to always be // capturing *some* region from the fn header, and hence it diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/ambiguity.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/ambiguity.rs index 59c93db9c8fff..275b580d79453 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/ambiguity.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/ambiguity.rs @@ -228,13 +228,18 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // Pick the first generic parameter that still contains inference variables as the one // we're going to emit an error for. If there are none (see above), fall back to // a more general error. - let arg = data.trait_ref.args.iter().find(|s| s.has_non_region_infer()); + let term = data + .trait_ref + .args + .iter() + .filter_map(ty::GenericArg::as_term) + .find(|s| s.has_non_region_infer()); - let mut err = if let Some(arg) = arg { + let mut err = if let Some(term) = term { self.emit_inference_failure_err( obligation.cause.body_id, span, - arg, + term, TypeAnnotationNeeded::E0283, true, ) @@ -276,7 +281,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } if ambiguities.len() > 1 && ambiguities.len() < 10 && has_non_region_infer { if let Some(e) = self.tainted_by_errors() - && arg.is_none() + && term.is_none() { // If `arg.is_none()`, then this is probably two param-env // candidates or impl candidates that are equal modulo lifetimes. @@ -313,7 +318,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { self.suggest_fully_qualified_path(&mut err, def_id, span, trait_pred.def_id()); } - if let Some(ty::GenericArgKind::Type(_)) = arg.map(|arg| arg.unpack()) + if term.is_some_and(|term| term.as_type().is_some()) && let Some(body) = self.tcx.hir_maybe_body_owned_by(obligation.cause.body_id) { let mut expr_finder = FindExprBySpan::new(span, self.tcx); @@ -348,11 +353,11 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { && let None = self.tainted_by_errors() { let (verb, noun) = match self.tcx.associated_item(item_id).kind { - ty::AssocKind::Const => ("refer to the", "constant"), - ty::AssocKind::Fn => ("call", "function"), + ty::AssocKind::Const { .. } => ("refer to the", "constant"), + ty::AssocKind::Fn { .. } => ("call", "function"), // This is already covered by E0223, but this following single match // arm doesn't hurt here. - ty::AssocKind::Type => ("refer to the", "type"), + ty::AssocKind::Type { .. } => ("refer to the", "type"), }; // Replace the more general E0283 with a more specific error @@ -464,11 +469,11 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { err } - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(term)) => { // Same hacky approach as above to avoid deluging user // with error messages. - if let Err(e) = arg.error_reported() { + if let Err(e) = term.error_reported() { return e; } if let Some(e) = self.tainted_by_errors() { @@ -478,7 +483,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { self.emit_inference_failure_err( obligation.cause.body_id, span, - arg, + term, TypeAnnotationNeeded::E0282, false, ) @@ -519,18 +524,19 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // other `Foo` impls are incoherent. return guar; } - let arg = data + let term = data .projection_term .args .iter() - .chain(Some(data.term.into_arg())) + .filter_map(ty::GenericArg::as_term) + .chain([data.term]) .find(|g| g.has_non_region_infer()); let predicate = self.tcx.short_string(predicate, &mut file); - if let Some(arg) = arg { + if let Some(term) = term { self.emit_inference_failure_err( obligation.cause.body_id, span, - arg, + term, TypeAnnotationNeeded::E0284, true, ) @@ -554,12 +560,13 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if let Some(e) = self.tainted_by_errors() { return e; } - let arg = data.walk().find(|g| g.is_non_region_infer()); - if let Some(arg) = arg { + let term = + data.walk().filter_map(ty::GenericArg::as_term).find(|term| term.is_infer()); + if let Some(term) = term { let err = self.emit_inference_failure_err( obligation.cause.body_id, span, - arg, + term, TypeAnnotationNeeded::E0284, true, ); diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/call_kind.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/call_kind.rs index 1c3e570b67695..d8b405e904c06 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/call_kind.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/call_kind.rs @@ -81,9 +81,7 @@ pub fn call_kind<'tcx>( } }); - let fn_call = parent.and_then(|p| { - lang_items::FN_TRAITS.iter().filter_map(|&l| tcx.lang_items().get(l)).find(|&id| id == p) - }); + let fn_call = parent.filter(|&p| tcx.fn_trait_kind_from_def_id(p).is_some()); let operator = if !from_hir_call && let Some(p) = parent { lang_items::OPERATORS.iter().filter_map(|&l| tcx.lang_items().get(l)).find(|&id| id == p) diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs index 07a67cde3be1c..970160ba212af 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs @@ -2,6 +2,7 @@ use core::ops::ControlFlow; use std::borrow::Cow; use std::path::PathBuf; +use rustc_abi::ExternAbi; use rustc_ast::TraitObjectSyntax; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::unord::UnordSet; @@ -14,6 +15,8 @@ use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId}; use rustc_hir::intravisit::Visitor; use rustc_hir::{self as hir, LangItem, Node}; use rustc_infer::infer::{InferOk, TypeTrace}; +use rustc_infer::traits::ImplSource; +use rustc_infer::traits::solve::Goal; use rustc_middle::traits::SignatureMismatchData; use rustc_middle::traits::select::OverflowError; use rustc_middle::ty::abstract_const::NotConstEvaluatable; @@ -46,8 +49,8 @@ use crate::infer::{self, InferCtxt, InferCtxtExt as _}; use crate::traits::query::evaluate_obligation::InferCtxtExt as _; use crate::traits::{ MismatchedProjectionTypes, NormalizeExt, Obligation, ObligationCause, ObligationCauseCode, - ObligationCtxt, Overflow, PredicateObligation, SelectionError, SignatureMismatch, - TraitDynIncompatible, elaborate, + ObligationCtxt, Overflow, PredicateObligation, SelectionContext, SelectionError, + SignatureMismatch, TraitDynIncompatible, elaborate, specialization_graph, }; impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { @@ -144,7 +147,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { && leaf_trait_predicate.def_id() != root_pred.def_id() // The root trait is not `Unsize`, as to avoid talking about it in // `tests/ui/coercion/coerce-issue-49593-box-never.rs`. - && Some(root_pred.def_id()) != self.tcx.lang_items().unsize_trait() + && !self.tcx.is_lang_item(root_pred.def_id(), LangItem::Unsize) { ( self.resolve_vars_if_possible( @@ -811,38 +814,17 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { obligation: &PredicateObligation<'tcx>, mut trait_pred: ty::PolyTraitPredicate<'tcx>, ) -> Option { - // If `AsyncFnKindHelper` is not implemented, that means that the closure kind - // doesn't extend the goal kind. This is worth reporting, but we can only do so - // if we actually know which closure this goal comes from, so look at the cause - // to see if we can extract that information. - if self.tcx.is_lang_item(trait_pred.def_id(), LangItem::AsyncFnKindHelper) - && let Some(found_kind) = - trait_pred.skip_binder().trait_ref.args.type_at(0).to_opt_closure_kind() - && let Some(expected_kind) = - trait_pred.skip_binder().trait_ref.args.type_at(1).to_opt_closure_kind() - && !found_kind.extends(expected_kind) - { - if let Some((_, Some(parent))) = obligation.cause.code().parent_with_predicate() { - // If we have a derived obligation, then the parent will be a `AsyncFn*` goal. + // If we end up on an `AsyncFnKindHelper` goal, try to unwrap the parent + // `AsyncFn*` goal. + if self.tcx.is_lang_item(trait_pred.def_id(), LangItem::AsyncFnKindHelper) { + let mut code = obligation.cause.code(); + // Unwrap a `FunctionArg` cause, which has been refined from a derived obligation. + if let ObligationCauseCode::FunctionArg { parent_code, .. } = code { + code = &**parent_code; + } + // If we have a derived obligation, then the parent will be a `AsyncFn*` goal. + if let Some((_, Some(parent))) = code.parent_with_predicate() { trait_pred = parent; - } else if let &ObligationCauseCode::FunctionArg { arg_hir_id, .. } = - obligation.cause.code() - && let Some(typeck_results) = &self.typeck_results - && let ty::Closure(closure_def_id, _) | ty::CoroutineClosure(closure_def_id, _) = - *typeck_results.node_type(arg_hir_id).kind() - { - // Otherwise, extract the closure kind from the obligation, - // but only if we actually have an argument to deduce the - // closure type from... - let mut err = self.report_closure_error( - &obligation, - closure_def_id, - found_kind, - expected_kind, - "Async", - ); - self.note_obligation_cause(&mut err, &obligation); - return Some(err.emit()); } } @@ -930,7 +912,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { )) = arg.kind && let Node::Pat(pat) = self.tcx.hir_node(*hir_id) && let Some((preds, guar)) = self.reported_trait_errors.borrow().get(&pat.span) - && preds.contains(&obligation.predicate) + && preds.contains(&obligation.as_goal()) { return Err(*guar); } @@ -1236,7 +1218,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ); // Only suggest derive if this isn't a derived obligation, // and the struct is local. - if let Some(span) = self.tcx.hir().span_if_local(def.did()) + if let Some(span) = self.tcx.hir_span_if_local(def.did()) && obligation.cause.code().parent().is_none() { if ty.is_structural_eq_shallow(self.tcx) { @@ -1292,6 +1274,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { fn can_match_trait( &self, + param_env: ty::ParamEnv<'tcx>, goal: ty::TraitPredicate<'tcx>, assumption: ty::PolyTraitPredicate<'tcx>, ) -> bool { @@ -1306,11 +1289,12 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { assumption, ); - self.can_eq(ty::ParamEnv::empty(), goal.trait_ref, trait_assumption.trait_ref) + self.can_eq(param_env, goal.trait_ref, trait_assumption.trait_ref) } fn can_match_projection( &self, + param_env: ty::ParamEnv<'tcx>, goal: ty::ProjectionPredicate<'tcx>, assumption: ty::PolyProjectionPredicate<'tcx>, ) -> bool { @@ -1320,7 +1304,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { assumption, ); - let param_env = ty::ParamEnv::empty(); self.can_eq(param_env, goal.projection_term, assumption.projection_term) && self.can_eq(param_env, goal.term, assumption.term) } @@ -1330,24 +1313,32 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { #[instrument(level = "debug", skip(self), ret)] pub(super) fn error_implies( &self, - cond: ty::Predicate<'tcx>, - error: ty::Predicate<'tcx>, + cond: Goal<'tcx, ty::Predicate<'tcx>>, + error: Goal<'tcx, ty::Predicate<'tcx>>, ) -> bool { if cond == error { return true; } - if let Some(error) = error.as_trait_clause() { + // FIXME: We could be smarter about this, i.e. if cond's param-env is a + // subset of error's param-env. This only matters when binders will carry + // predicates though, and obviously only matters for error reporting. + if cond.param_env != error.param_env { + return false; + } + let param_env = error.param_env; + + if let Some(error) = error.predicate.as_trait_clause() { self.enter_forall(error, |error| { - elaborate(self.tcx, std::iter::once(cond)) + elaborate(self.tcx, std::iter::once(cond.predicate)) .filter_map(|implied| implied.as_trait_clause()) - .any(|implied| self.can_match_trait(error, implied)) + .any(|implied| self.can_match_trait(param_env, error, implied)) }) - } else if let Some(error) = error.as_projection_clause() { + } else if let Some(error) = error.predicate.as_projection_clause() { self.enter_forall(error, |error| { - elaborate(self.tcx, std::iter::once(cond)) + elaborate(self.tcx, std::iter::once(cond.predicate)) .filter_map(|implied| implied.as_projection_clause()) - .any(|implied| self.can_match_projection(error, implied)) + .any(|implied| self.can_match_projection(param_env, error, implied)) }) } else { false @@ -1505,34 +1496,33 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } } - let secondary_span = (|| { + let secondary_span = self.probe(|_| { let ty::PredicateKind::Clause(ty::ClauseKind::Projection(proj)) = predicate.kind().skip_binder() else { return None; }; - let trait_assoc_item = self.tcx.opt_associated_item(proj.projection_term.def_id)?; - let trait_assoc_ident = trait_assoc_item.ident(self.tcx); - - let mut associated_items = vec![]; - self.tcx.for_each_relevant_impl( - self.tcx.trait_of_item(proj.projection_term.def_id)?, - proj.projection_term.self_ty(), - |impl_def_id| { - associated_items.extend( - self.tcx - .associated_items(impl_def_id) - .in_definition_order() - .find(|assoc| assoc.ident(self.tcx) == trait_assoc_ident), - ); - }, - ); + let Ok(Some(ImplSource::UserDefined(impl_data))) = SelectionContext::new(self) + .poly_select(&obligation.with( + self.tcx, + predicate.kind().rebind(proj.projection_term.trait_ref(self.tcx)), + )) + else { + return None; + }; - let [associated_item]: &[ty::AssocItem] = &associated_items[..] else { + let Ok(node) = + specialization_graph::assoc_def(self.tcx, impl_data.impl_def_id, proj.def_id()) + else { return None; }; - match self.tcx.hir_get_if_local(associated_item.def_id) { + + if !node.is_final() { + return None; + } + + match self.tcx.hir_get_if_local(node.item.def_id) { Some( hir::Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Type(_, Some(ty)), @@ -1555,7 +1545,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { )), _ => None, } - })(); + }); self.note_type_err( &mut diag, @@ -1671,7 +1661,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ty::Alias(ty::Projection, ..) => Some(12), ty::Alias(ty::Inherent, ..) => Some(13), ty::Alias(ty::Opaque, ..) => Some(14), - ty::Alias(ty::Weak, ..) => Some(15), + ty::Alias(ty::Free, ..) => Some(15), ty::Never => Some(16), ty::Adt(..) => Some(17), ty::Coroutine(..) => Some(18), @@ -2263,10 +2253,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // auto-traits or fundamental traits that might not be exactly what // the user might expect to be presented with. Instead this is // useful for less general traits. - if peeled - && !self.tcx.trait_is_auto(def_id) - && !self.tcx.lang_items().iter().any(|(_, id)| id == def_id) - { + if peeled && !self.tcx.trait_is_auto(def_id) && self.tcx.as_lang_item(def_id).is_none() { let impl_candidates = self.find_similar_impl_candidates(trait_pred); self.report_similar_impl_candidates( &impl_candidates, @@ -2789,32 +2776,57 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } // Note any argument mismatches - let given_ty = params.skip_binder(); + let ty::Tuple(given) = *params.skip_binder().kind() else { + return; + }; + let expected_ty = trait_pred.skip_binder().trait_ref.args.type_at(1); - if let ty::Tuple(given) = given_ty.kind() - && let ty::Tuple(expected) = expected_ty.kind() - { - if expected.len() != given.len() { - // Note number of types that were expected and given - err.note( - format!( - "expected a closure taking {} argument{}, but one taking {} argument{} was given", - given.len(), - pluralize!(given.len()), - expected.len(), - pluralize!(expected.len()), - ) - ); - } else if !self.same_type_modulo_infer(given_ty, expected_ty) { - // Print type mismatch - let (expected_args, given_args) = self.cmp(given_ty, expected_ty); - err.note_expected_found( - &"a closure with arguments", - expected_args, - &"a closure with arguments", - given_args, - ); - } + let ty::Tuple(expected) = *expected_ty.kind() else { + return; + }; + + if expected.len() != given.len() { + // Note number of types that were expected and given + err.note(format!( + "expected a closure taking {} argument{}, but one taking {} argument{} was given", + given.len(), + pluralize!(given.len()), + expected.len(), + pluralize!(expected.len()), + )); + return; + } + + let given_ty = Ty::new_fn_ptr( + self.tcx, + params.rebind(self.tcx.mk_fn_sig( + given, + self.tcx.types.unit, + false, + hir::Safety::Safe, + ExternAbi::Rust, + )), + ); + let expected_ty = Ty::new_fn_ptr( + self.tcx, + trait_pred.rebind(self.tcx.mk_fn_sig( + expected, + self.tcx.types.unit, + false, + hir::Safety::Safe, + ExternAbi::Rust, + )), + ); + + if !self.same_type_modulo_infer(given_ty, expected_ty) { + // Print type mismatch + let (expected_args, given_args) = self.cmp(expected_ty, given_ty); + err.note_expected_found( + "a closure with signature", + expected_args, + "a closure with signature", + given_args, + ); } } @@ -2943,7 +2955,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { }; let found_node = found_did.and_then(|did| self.tcx.hir_get_if_local(did)); - let found_span = found_did.and_then(|did| self.tcx.hir().span_if_local(did)); + let found_span = found_did.and_then(|did| self.tcx.hir_span_if_local(did)); if !self.reported_signature_mismatch.borrow_mut().insert((span, found_span)) { // We check closures twice, with obligations flowing in different directions, @@ -2977,8 +2989,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // This shouldn't be common unless manually implementing one of the // traits manually, but don't make it more confusing when it does // happen. - if Some(expected_trait_ref.def_id) != self.tcx.lang_items().coroutine_trait() && not_tupled - { + if !self.tcx.is_lang_item(expected_trait_ref.def_id, LangItem::Coroutine) && not_tupled { return Ok(self.report_and_explain_type_error( TypeTrace::trait_refs(&obligation.cause, expected_trait_ref, found_trait_ref), obligation.param_env, @@ -3030,7 +3041,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { node: Node<'_>, ) -> Option<(Span, Option, Vec)> { let sm = self.tcx.sess.source_map(); - let hir = self.tcx.hir(); Some(match node { Node::Expr(&hir::Expr { kind: hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, fn_arg_span, .. }), @@ -3086,7 +3096,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { .collect::>(), ), Node::Ctor(variant_data) => { - let span = variant_data.ctor_hir_id().map_or(DUMMY_SP, |id| hir.span(id)); + let span = variant_data.ctor_hir_id().map_or(DUMMY_SP, |id| self.tcx.hir_span(id)); (span, None, vec![ArgKind::empty(); variant_data.fields().len()]) } _ => panic!("non-FnLike node found: {node:?}"), diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/mod.rs index 98df09b6f7b01..78f9287b407b3 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/mod.rs @@ -2,6 +2,8 @@ pub mod ambiguity; pub mod call_kind; mod fulfillment_errors; pub mod on_unimplemented; +pub mod on_unimplemented_condition; +pub mod on_unimplemented_format; mod overflow; pub mod suggestions; @@ -12,6 +14,7 @@ use rustc_errors::{Applicability, Diag, E0038, E0276, MultiSpan, struct_span_cod use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::intravisit::Visitor; use rustc_hir::{self as hir, AmbigArg, LangItem}; +use rustc_infer::traits::solve::Goal; use rustc_infer::traits::{ DynCompatibilityViolation, Obligation, ObligationCause, ObligationCauseCode, PredicateObligation, SelectionError, @@ -144,7 +147,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { #[derive(Debug)] struct ErrorDescriptor<'tcx> { - predicate: ty::Predicate<'tcx>, + goal: Goal<'tcx, ty::Predicate<'tcx>>, index: Option, // None if this is an old error } @@ -152,15 +155,8 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { .reported_trait_errors .borrow() .iter() - .map(|(&span, predicates)| { - ( - span, - predicates - .0 - .iter() - .map(|&predicate| ErrorDescriptor { predicate, index: None }) - .collect(), - ) + .map(|(&span, goals)| { + (span, goals.0.iter().map(|&goal| ErrorDescriptor { goal, index: None }).collect()) }) .collect(); @@ -186,10 +182,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { span = expn_data.call_site; } - error_map.entry(span).or_default().push(ErrorDescriptor { - predicate: error.obligation.predicate, - index: Some(index), - }); + error_map + .entry(span) + .or_default() + .push(ErrorDescriptor { goal: error.obligation.as_goal(), index: Some(index) }); } // We do this in 2 passes because we want to display errors in order, though @@ -210,9 +206,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { continue; } - if self.error_implies(error2.predicate, error.predicate) + if self.error_implies(error2.goal, error.goal) && !(error2.index >= error.index - && self.error_implies(error.predicate, error2.predicate)) + && self.error_implies(error.goal, error2.goal)) { info!("skipping {:?} (implied by {:?})", error, error2); is_suppressed[index] = true; @@ -243,7 +239,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { .entry(span) .or_insert_with(|| (vec![], guar)) .0 - .push(error.obligation.predicate); + .push(error.obligation.as_goal()); } } } @@ -398,7 +394,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ); if !self.tcx.is_impl_trait_in_trait(trait_item_def_id) { - if let Some(span) = self.tcx.hir().span_if_local(trait_item_def_id) { + if let Some(span) = self.tcx.hir_span_if_local(trait_item_def_id) { let item_name = self.tcx.item_name(impl_item_def_id.to_def_id()); err.span_label(span, format!("definition of `{item_name}` from trait")); } diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs index f0c6e51f2a4c4..d5ee6e2123a19 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs @@ -1,44 +1,31 @@ use std::iter; use std::path::PathBuf; -use rustc_ast::MetaItemInner; -use rustc_data_structures::fx::FxHashMap; +use rustc_ast::{LitKind, MetaItem, MetaItemInner, MetaItemKind, MetaItemLit}; use rustc_errors::codes::*; use rustc_errors::{ErrorGuaranteed, struct_span_code_err}; +use rustc_hir as hir; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::{AttrArgs, Attribute}; use rustc_macros::LintDiagnostic; use rustc_middle::bug; -use rustc_middle::ty::print::PrintTraitRefExt as _; -use rustc_middle::ty::{self, GenericArgsRef, GenericParamDefKind, TyCtxt}; -use rustc_parse_format::{ParseMode, Parser, Piece, Position}; +use rustc_middle::ty::print::PrintTraitRefExt; +use rustc_middle::ty::{self, GenericArgsRef, GenericParamDef, GenericParamDefKind, TyCtxt}; use rustc_session::lint::builtin::UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES; -use rustc_span::{Ident, Span, Symbol, kw, sym}; +use rustc_span::{Span, Symbol, sym}; use tracing::{debug, info}; -use {rustc_attr_parsing as attr, rustc_hir as hir}; use super::{ObligationCauseCode, PredicateObligation}; use crate::error_reporting::TypeErrCtxt; -use crate::errors::{ - EmptyOnClauseInOnUnimplemented, InvalidOnClauseInOnUnimplemented, NoValueInOnUnimplemented, +use crate::error_reporting::traits::on_unimplemented_condition::{ + ConditionOptions, OnUnimplementedCondition, }; +use crate::error_reporting::traits::on_unimplemented_format::{ + Ctx, FormatArgs, FormatString, FormatWarning, +}; +use crate::errors::{InvalidOnClause, NoValueInOnUnimplemented}; use crate::infer::InferCtxtExt; -/// The symbols which are always allowed in a format string -static ALLOWED_FORMAT_SYMBOLS: &[Symbol] = &[ - kw::SelfUpper, - sym::ItemContext, - sym::from_desugaring, - sym::direct, - sym::cause, - sym::integral, - sym::integer_, - sym::float, - sym::_Self, - sym::crate_local, - sym::Trait, -]; - impl<'tcx> TypeErrCtxt<'_, 'tcx> { fn impl_similar_to( &self, @@ -121,86 +108,78 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { .unwrap_or_else(|| (trait_pred.def_id(), trait_pred.skip_binder().trait_ref.args)); let trait_pred = trait_pred.skip_binder(); - let mut flags = vec![]; + let mut self_types = vec![]; + let mut generic_args: Vec<(Symbol, String)> = vec![]; + let mut crate_local = false; // FIXME(-Zlower-impl-trait-in-trait-to-assoc-ty): HIR is not present for RPITITs, // but I guess we could synthesize one here. We don't see any errors that rely on // that yet, though. - let enclosure = self.describe_enclosure(obligation.cause.body_id).map(|t| t.to_owned()); - flags.push((sym::ItemContext, enclosure)); + let item_context = self.describe_enclosure(obligation.cause.body_id).unwrap_or(""); - match obligation.cause.code() { + let direct = match obligation.cause.code() { ObligationCauseCode::BuiltinDerived(..) | ObligationCauseCode::ImplDerived(..) - | ObligationCauseCode::WellFormedDerived(..) => {} + | ObligationCauseCode::WellFormedDerived(..) => false, _ => { // this is a "direct", user-specified, rather than derived, // obligation. - flags.push((sym::direct, None)); + true } - } - - if let Some(k) = obligation.cause.span.desugaring_kind() { - flags.push((sym::from_desugaring, None)); - flags.push((sym::from_desugaring, Some(format!("{k:?}")))); - } + }; - if let ObligationCauseCode::MainFunctionType = obligation.cause.code() { - flags.push((sym::cause, Some("MainFunctionType".to_string()))); - } + let from_desugaring = obligation.cause.span.desugaring_kind(); - flags.push((sym::Trait, Some(trait_pred.trait_ref.print_trait_sugared().to_string()))); + let cause = if let ObligationCauseCode::MainFunctionType = obligation.cause.code() { + Some("MainFunctionType".to_string()) + } else { + None + }; // Add all types without trimmed paths or visible paths, ensuring they end up with // their "canonical" def path. ty::print::with_no_trimmed_paths!(ty::print::with_no_visible_paths!({ let generics = self.tcx.generics_of(def_id); let self_ty = trait_pred.self_ty(); - // This is also included through the generics list as `Self`, - // but the parser won't allow you to use it - flags.push((sym::_Self, Some(self_ty.to_string()))); + self_types.push(self_ty.to_string()); if let Some(def) = self_ty.ty_adt_def() { // We also want to be able to select self's original // signature with no type arguments resolved - flags.push(( - sym::_Self, - Some(self.tcx.type_of(def.did()).instantiate_identity().to_string()), - )); + self_types.push(self.tcx.type_of(def.did()).instantiate_identity().to_string()); } - for param in generics.own_params.iter() { - let value = match param.kind { + for GenericParamDef { name, kind, index, .. } in generics.own_params.iter() { + let value = match kind { GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => { - args[param.index as usize].to_string() + args[*index as usize].to_string() } GenericParamDefKind::Lifetime => continue, }; - let name = param.name; - flags.push((name, Some(value))); + generic_args.push((*name, value)); - if let GenericParamDefKind::Type { .. } = param.kind { - let param_ty = args[param.index as usize].expect_ty(); + if let GenericParamDefKind::Type { .. } = kind { + let param_ty = args[*index as usize].expect_ty(); if let Some(def) = param_ty.ty_adt_def() { // We also want to be able to select the parameter's // original signature with no type arguments resolved - flags.push(( - name, - Some(self.tcx.type_of(def.did()).instantiate_identity().to_string()), + generic_args.push(( + *name, + self.tcx.type_of(def.did()).instantiate_identity().to_string(), )); } } } if let Some(true) = self_ty.ty_adt_def().map(|def| def.did().is_local()) { - flags.push((sym::crate_local, None)); + crate_local = true; } // Allow targeting all integers using `{integral}`, even if the exact type was resolved if self_ty.is_integral() { - flags.push((sym::_Self, Some("{integral}".to_owned()))); + self_types.push("{integral}".to_owned()); } if self_ty.is_array_slice() { - flags.push((sym::_Self, Some("&[]".to_owned()))); + self_types.push("&[]".to_owned()); } if self_ty.is_fn() { @@ -215,53 +194,51 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { hir::Safety::Unsafe => "unsafe fn", } }; - flags.push((sym::_Self, Some(shortname.to_owned()))); + self_types.push(shortname.to_owned()); } // Slices give us `[]`, `[{ty}]` if let ty::Slice(aty) = self_ty.kind() { - flags.push((sym::_Self, Some("[]".to_string()))); + self_types.push("[]".to_owned()); if let Some(def) = aty.ty_adt_def() { // We also want to be able to select the slice's type's original // signature with no type arguments resolved - flags.push(( - sym::_Self, - Some(format!("[{}]", self.tcx.type_of(def.did()).instantiate_identity())), - )); + self_types + .push(format!("[{}]", self.tcx.type_of(def.did()).instantiate_identity())); } if aty.is_integral() { - flags.push((sym::_Self, Some("[{integral}]".to_string()))); + self_types.push("[{integral}]".to_string()); } } // Arrays give us `[]`, `[{ty}; _]` and `[{ty}; N]` if let ty::Array(aty, len) = self_ty.kind() { - flags.push((sym::_Self, Some("[]".to_string()))); + self_types.push("[]".to_string()); let len = len.try_to_target_usize(self.tcx); - flags.push((sym::_Self, Some(format!("[{aty}; _]")))); + self_types.push(format!("[{aty}; _]")); if let Some(n) = len { - flags.push((sym::_Self, Some(format!("[{aty}; {n}]")))); + self_types.push(format!("[{aty}; {n}]")); } if let Some(def) = aty.ty_adt_def() { // We also want to be able to select the array's type's original // signature with no type arguments resolved let def_ty = self.tcx.type_of(def.did()).instantiate_identity(); - flags.push((sym::_Self, Some(format!("[{def_ty}; _]")))); + self_types.push(format!("[{def_ty}; _]")); if let Some(n) = len { - flags.push((sym::_Self, Some(format!("[{def_ty}; {n}]")))); + self_types.push(format!("[{def_ty}; {n}]")); } } if aty.is_integral() { - flags.push((sym::_Self, Some("[{integral}; _]".to_string()))); + self_types.push("[{integral}; _]".to_string()); if let Some(n) = len { - flags.push((sym::_Self, Some(format!("[{{integral}}; {n}]")))); + self_types.push(format!("[{{integral}}; {n}]")); } } } if let ty::Dynamic(traits, _, _) = self_ty.kind() { for t in traits.iter() { if let ty::ExistentialPredicate::Trait(trait_ref) = t.skip_binder() { - flags.push((sym::_Self, Some(self.tcx.def_path_str(trait_ref.def_id)))) + self_types.push(self.tcx.def_path_str(trait_ref.def_id)); } } } @@ -271,34 +248,79 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { && let ty::Slice(sty) = ref_ty.kind() && sty.is_integral() { - flags.push((sym::_Self, Some("&[{integral}]".to_owned()))); + self_types.push("&[{integral}]".to_owned()); } })); + let this = self.tcx.def_path_str(trait_pred.trait_ref.def_id); + let trait_sugared = trait_pred.trait_ref.print_trait_sugared(); + + let condition_options = ConditionOptions { + self_types, + from_desugaring, + cause, + crate_local, + direct, + generic_args, + }; + + // Unlike the generic_args earlier, + // this one is *not* collected under `with_no_trimmed_paths!` + // for printing the type to the user + // + // This includes `Self`, as it is the first parameter in `own_params`. + let generic_args = self + .tcx + .generics_of(trait_pred.trait_ref.def_id) + .own_params + .iter() + .filter_map(|param| { + let value = match param.kind { + GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => { + if let Some(ty) = trait_pred.trait_ref.args[param.index as usize].as_type() + { + self.tcx.short_string(ty, long_ty_file) + } else { + trait_pred.trait_ref.args[param.index as usize].to_string() + } + } + GenericParamDefKind::Lifetime => return None, + }; + let name = param.name; + Some((name, value)) + }) + .collect(); + + let format_args = FormatArgs { this, trait_sugared, generic_args, item_context }; + if let Ok(Some(command)) = OnUnimplementedDirective::of_item(self.tcx, def_id) { - command.evaluate(self.tcx, trait_pred.trait_ref, &flags, long_ty_file) + command.evaluate(self.tcx, trait_pred.trait_ref, &condition_options, &format_args) } else { OnUnimplementedNote::default() } } } +/// Represents a format string in a on_unimplemented attribute, +/// like the "content" in `#[diagnostic::on_unimplemented(message = "content")]` #[derive(Clone, Debug)] pub struct OnUnimplementedFormatString { + /// Symbol of the format string, i.e. `"content"` symbol: Symbol, + ///The span of the format string, i.e. `"content"` span: Span, is_diagnostic_namespace_variant: bool, } #[derive(Debug)] pub struct OnUnimplementedDirective { - pub condition: Option, - pub subcommands: Vec, - pub message: Option, - pub label: Option, - pub notes: Vec, - pub parent_label: Option, - pub append_const_msg: Option, + condition: Option, + subcommands: Vec, + message: Option<(Span, OnUnimplementedFormatString)>, + label: Option<(Span, OnUnimplementedFormatString)>, + notes: Vec, + parent_label: Option, + append_const_msg: Option, } /// For the `#[rustc_on_unimplemented]` attribute @@ -329,7 +351,7 @@ pub struct MalformedOnUnimplementedAttrLint { } impl MalformedOnUnimplementedAttrLint { - fn new(span: Span) -> Self { + pub fn new(span: Span) -> Self { Self { span } } } @@ -350,7 +372,7 @@ pub struct IgnoredDiagnosticOption { } impl IgnoredDiagnosticOption { - fn maybe_emit_warning<'tcx>( + pub fn maybe_emit_warning<'tcx>( tcx: TyCtxt<'tcx>, item_def_id: DefId, new: Option, @@ -370,29 +392,11 @@ impl IgnoredDiagnosticOption { } } -#[derive(LintDiagnostic)] -#[diag(trait_selection_unknown_format_parameter_for_on_unimplemented_attr)] -#[help] -pub struct UnknownFormatParameterForOnUnimplementedAttr { - argument_name: Symbol, - trait_name: Ident, -} - -#[derive(LintDiagnostic)] -#[diag(trait_selection_disallowed_positional_argument)] -#[help] -pub struct DisallowedPositionalArgument; - -#[derive(LintDiagnostic)] -#[diag(trait_selection_invalid_format_specifier)] -#[help] -pub struct InvalidFormatSpecifier; - #[derive(LintDiagnostic)] #[diag(trait_selection_wrapped_parser_error)] pub struct WrappedParserError { - description: String, - label: String, + pub description: String, + pub label: String, } impl<'tcx> OnUnimplementedDirective { @@ -407,12 +411,12 @@ impl<'tcx> OnUnimplementedDirective { let mut errored = None; let mut item_iter = items.iter(); - let parse_value = |value_str, value_span| { + let parse_value = |value_str, span| { OnUnimplementedFormatString::try_parse( tcx, item_def_id, value_str, - value_span, + span, is_diagnostic_namespace_variant, ) .map(Some) @@ -423,18 +427,12 @@ impl<'tcx> OnUnimplementedDirective { } else { let cond = item_iter .next() - .ok_or_else(|| tcx.dcx().emit_err(EmptyOnClauseInOnUnimplemented { span }))? - .meta_item_or_bool() - .ok_or_else(|| tcx.dcx().emit_err(InvalidOnClauseInOnUnimplemented { span }))?; - attr::eval_condition(cond, &tcx.sess, Some(tcx.features()), &mut |cfg| { - if let Some(value) = cfg.value - && let Err(guar) = parse_value(value, cfg.span) - { - errored = Some(guar); - } - true - }); - Some(cond.clone()) + .ok_or_else(|| tcx.dcx().emit_err(InvalidOnClause::Empty { span }))?; + + match OnUnimplementedCondition::parse(cond) { + Ok(condition) => Some(condition), + Err(e) => return Err(tcx.dcx().emit_err(e)), + } }; let mut message = None; @@ -444,24 +442,36 @@ impl<'tcx> OnUnimplementedDirective { let mut subcommands = vec![]; let mut append_const_msg = None; + let get_value_and_span = |item: &_, key| { + if let MetaItemInner::MetaItem(MetaItem { + path, + kind: MetaItemKind::NameValue(MetaItemLit { span, kind: LitKind::Str(s, _), .. }), + .. + }) = item + && *path == key + { + Some((*s, *span)) + } else { + None + } + }; + for item in item_iter { - if item.has_name(sym::message) && message.is_none() { - if let Some(message_) = item.value_str() { - message = parse_value(message_, item.span())?; - continue; - } - } else if item.has_name(sym::label) && label.is_none() { - if let Some(label_) = item.value_str() { - label = parse_value(label_, item.span())?; + if let Some((message_, span)) = get_value_and_span(item, sym::message) + && message.is_none() + { + message = parse_value(message_, span)?.map(|l| (item.span(), l)); + continue; + } else if let Some((label_, span)) = get_value_and_span(item, sym::label) + && label.is_none() + { + label = parse_value(label_, span)?.map(|l| (item.span(), l)); + continue; + } else if let Some((note_, span)) = get_value_and_span(item, sym::note) { + if let Some(note) = parse_value(note_, span)? { + notes.push(note); continue; } - } else if item.has_name(sym::note) { - if let Some(note_) = item.value_str() { - if let Some(note) = parse_value(note_, item.span())? { - notes.push(note); - continue; - } - } } else if item.has_name(sym::parent_label) && parent_label.is_none() && !is_diagnostic_namespace_variant @@ -539,6 +549,13 @@ impl<'tcx> OnUnimplementedDirective { } pub fn of_item(tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result, ErrorGuaranteed> { + if !tcx.is_trait(item_def_id) { + // It could be a trait_alias (`trait MyTrait = SomeOtherTrait`) + // or an implementation (`impl MyTrait for Foo {}`) + // + // We don't support those. + return Ok(None); + } if let Some(attr) = tcx.get_attr(item_def_id, sym::rustc_on_unimplemented) { return Self::parse_attribute(attr, false, tcx, item_def_id); } else { @@ -554,15 +571,15 @@ impl<'tcx> OnUnimplementedDirective { IgnoredDiagnosticOption::maybe_emit_warning( tcx, item_def_id, - directive.message.as_ref().map(|f| f.span), - aggr.message.as_ref().map(|f| f.span), + directive.message.as_ref().map(|f| f.0), + aggr.message.as_ref().map(|f| f.0), "message", ); IgnoredDiagnosticOption::maybe_emit_warning( tcx, item_def_id, - directive.label.as_ref().map(|f| f.span), - aggr.label.as_ref().map(|f| f.span), + directive.label.as_ref().map(|f| f.0), + aggr.label.as_ref().map(|f| f.0), "label", ); IgnoredDiagnosticOption::maybe_emit_warning( @@ -636,13 +653,16 @@ impl<'tcx> OnUnimplementedDirective { condition: None, message: None, subcommands: vec![], - label: Some(OnUnimplementedFormatString::try_parse( - tcx, - item_def_id, - value, + label: Some(( attr.span(), - is_diagnostic_namespace_variant, - )?), + OnUnimplementedFormatString::try_parse( + tcx, + item_def_id, + value, + attr.value_span().unwrap_or(attr.span()), + is_diagnostic_namespace_variant, + )?, + )), notes: Vec::new(), parent_label: None, append_const_msg: None, @@ -698,47 +718,27 @@ impl<'tcx> OnUnimplementedDirective { result } - pub fn evaluate( + pub(crate) fn evaluate( &self, tcx: TyCtxt<'tcx>, trait_ref: ty::TraitRef<'tcx>, - options: &[(Symbol, Option)], - long_ty_file: &mut Option, + condition_options: &ConditionOptions, + args: &FormatArgs<'tcx>, ) -> OnUnimplementedNote { let mut message = None; let mut label = None; let mut notes = Vec::new(); let mut parent_label = None; let mut append_const_msg = None; - info!("evaluate({:?}, trait_ref={:?}, options={:?})", self, trait_ref, options); - - let options_map: FxHashMap = - options.iter().filter_map(|(k, v)| v.clone().map(|v| (*k, v))).collect(); + info!( + "evaluate({:?}, trait_ref={:?}, options={:?}, args ={:?})", + self, trait_ref, condition_options, args + ); for command in self.subcommands.iter().chain(Some(self)).rev() { debug!(?command); if let Some(ref condition) = command.condition - && !attr::eval_condition(condition, &tcx.sess, Some(tcx.features()), &mut |cfg| { - let value = cfg.value.map(|v| { - // `with_no_visible_paths` is also used when generating the options, - // so we need to match it here. - ty::print::with_no_visible_paths!( - OnUnimplementedFormatString { - symbol: v, - span: cfg.span, - is_diagnostic_namespace_variant: false - } - .format( - tcx, - trait_ref, - &options_map, - long_ty_file - ) - ) - }); - - options.contains(&(cfg.name, value)) - }) + && !condition.matches_predicate(condition_options) { debug!("evaluate: skipping {:?} due to condition", command); continue; @@ -762,14 +762,10 @@ impl<'tcx> OnUnimplementedDirective { } OnUnimplementedNote { - label: label.map(|l| l.format(tcx, trait_ref, &options_map, long_ty_file)), - message: message.map(|m| m.format(tcx, trait_ref, &options_map, long_ty_file)), - notes: notes - .into_iter() - .map(|n| n.format(tcx, trait_ref, &options_map, long_ty_file)) - .collect(), - parent_label: parent_label - .map(|e_s| e_s.format(tcx, trait_ref, &options_map, long_ty_file)), + label: label.map(|l| l.1.format(tcx, trait_ref, args)), + message: message.map(|m| m.1.format(tcx, trait_ref, args)), + notes: notes.into_iter().map(|n| n.format(tcx, trait_ref, args)).collect(), + parent_label: parent_label.map(|e_s| e_s.format(tcx, trait_ref, args)), append_const_msg, } } @@ -780,142 +776,95 @@ impl<'tcx> OnUnimplementedFormatString { tcx: TyCtxt<'tcx>, item_def_id: DefId, from: Symbol, - value_span: Span, + span: Span, is_diagnostic_namespace_variant: bool, ) -> Result { - let result = OnUnimplementedFormatString { - symbol: from, - span: value_span, - is_diagnostic_namespace_variant, - }; + let result = + OnUnimplementedFormatString { symbol: from, span, is_diagnostic_namespace_variant }; result.verify(tcx, item_def_id)?; Ok(result) } - fn verify(&self, tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result<(), ErrorGuaranteed> { - let trait_def_id = if tcx.is_trait(item_def_id) { - item_def_id + fn verify(&self, tcx: TyCtxt<'tcx>, trait_def_id: DefId) -> Result<(), ErrorGuaranteed> { + if !tcx.is_trait(trait_def_id) { + return Ok(()); + }; + + let ctx = if self.is_diagnostic_namespace_variant { + Ctx::DiagnosticOnUnimplemented { tcx, trait_def_id } } else { - tcx.trait_id_of_impl(item_def_id) - .expect("expected `on_unimplemented` to correspond to a trait") + Ctx::RustcOnUnimplemented { tcx, trait_def_id } }; - let trait_name = tcx.item_ident(trait_def_id); - let generics = tcx.generics_of(item_def_id); - let s = self.symbol.as_str(); - let mut parser = Parser::new(s, None, None, false, ParseMode::Format); + let mut result = Ok(()); - for token in &mut parser { - match token { - Piece::Lit(_) => (), // Normal string, no need to check it - Piece::NextArgument(a) => { - let format_spec = a.format; - if self.is_diagnostic_namespace_variant - && (format_spec.ty_span.is_some() - || format_spec.width_span.is_some() - || format_spec.precision_span.is_some() - || format_spec.fill_span.is_some()) - { - if let Some(item_def_id) = item_def_id.as_local() { - tcx.emit_node_span_lint( - UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, - tcx.local_def_id_to_hir_id(item_def_id), - self.span, - InvalidFormatSpecifier, - ); - } + + match FormatString::parse(self.symbol, self.span, &ctx) { + // Warnings about format specifiers, deprecated parameters, wrong parameters etc. + // In other words we'd like to let the author know, but we can still try to format the string later + Ok(FormatString { warnings, .. }) => { + if self.is_diagnostic_namespace_variant { + for w in warnings { + w.emit_warning(tcx, trait_def_id) } - match a.position { - Position::ArgumentNamed(s) => { - match Symbol::intern(s) { - // `{ThisTraitsName}` is allowed - s if s == trait_name.name - && !self.is_diagnostic_namespace_variant => - { - () - } - s if ALLOWED_FORMAT_SYMBOLS.contains(&s) - && !self.is_diagnostic_namespace_variant => - { - () - } - // So is `{A}` if A is a type parameter - s if generics.own_params.iter().any(|param| param.name == s) => (), - s => { - if self.is_diagnostic_namespace_variant { - if let Some(item_def_id) = item_def_id.as_local() { - tcx.emit_node_span_lint( - UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, - tcx.local_def_id_to_hir_id(item_def_id), - self.span, - UnknownFormatParameterForOnUnimplementedAttr { - argument_name: s, - trait_name, - }, - ); - } - } else { - result = Err(struct_span_code_err!( - tcx.dcx(), - self.span, - E0230, - "there is no parameter `{}` on {}", - s, - if trait_def_id == item_def_id { - format!("trait `{trait_name}`") - } else { - "impl".to_string() - } - ) - .emit()); - } - } + } else { + for w in warnings { + match w { + FormatWarning::UnknownParam { argument_name, span } => { + let reported = struct_span_code_err!( + tcx.dcx(), + span, + E0230, + "cannot find parameter {} on this trait", + argument_name, + ) + .emit(); + result = Err(reported); } - } - // `{:1}` and `{}` are not to be used - Position::ArgumentIs(..) | Position::ArgumentImplicitlyIs(_) => { - if self.is_diagnostic_namespace_variant { - if let Some(item_def_id) = item_def_id.as_local() { - tcx.emit_node_span_lint( - UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, - tcx.local_def_id_to_hir_id(item_def_id), - self.span, - DisallowedPositionalArgument, - ); - } - } else { + FormatWarning::PositionalArgument { span, .. } => { let reported = struct_span_code_err!( tcx.dcx(), - self.span, + span, E0231, - "only named generic parameters are allowed" + "positional format arguments are not allowed here" ) .emit(); result = Err(reported); } + FormatWarning::InvalidSpecifier { .. } + | FormatWarning::FutureIncompat { .. } => {} } } } } - } - // we cannot return errors from processing the format string as hard error here - // as the diagnostic namespace guarantees that malformed input cannot cause an error - // - // if we encounter any error while processing we nevertheless want to show it as warning - // so that users are aware that something is not correct - for e in parser.errors { - if self.is_diagnostic_namespace_variant { - if let Some(item_def_id) = item_def_id.as_local() { - tcx.emit_node_span_lint( - UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, - tcx.local_def_id_to_hir_id(item_def_id), - self.span, - WrappedParserError { description: e.description, label: e.label }, - ); + // Errors from the underlying `rustc_parse_format::Parser` + Err(errors) => { + // we cannot return errors from processing the format string as hard error here + // as the diagnostic namespace guarantees that malformed input cannot cause an error + // + // if we encounter any error while processing we nevertheless want to show it as warning + // so that users are aware that something is not correct + for e in errors { + if self.is_diagnostic_namespace_variant { + if let Some(trait_def_id) = trait_def_id.as_local() { + tcx.emit_node_span_lint( + UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, + tcx.local_def_id_to_hir_id(trait_def_id), + self.span, + WrappedParserError { description: e.description, label: e.label }, + ); + } + } else { + let reported = struct_span_code_err!( + tcx.dcx(), + self.span, + E0231, + "{}", + e.description, + ) + .emit(); + result = Err(reported); + } } - } else { - let reported = - struct_span_code_err!(tcx.dcx(), self.span, E0231, "{}", e.description,).emit(); - result = Err(reported); } } @@ -926,98 +875,28 @@ impl<'tcx> OnUnimplementedFormatString { &self, tcx: TyCtxt<'tcx>, trait_ref: ty::TraitRef<'tcx>, - options: &FxHashMap, - long_ty_file: &mut Option, + args: &FormatArgs<'tcx>, ) -> String { - let name = tcx.item_name(trait_ref.def_id); - let trait_str = tcx.def_path_str(trait_ref.def_id); - let generics = tcx.generics_of(trait_ref.def_id); - let generic_map = generics - .own_params - .iter() - .filter_map(|param| { - let value = match param.kind { - GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => { - if let Some(ty) = trait_ref.args[param.index as usize].as_type() { - tcx.short_string(ty, long_ty_file) - } else { - trait_ref.args[param.index as usize].to_string() - } - } - GenericParamDefKind::Lifetime => return None, - }; - let name = param.name; - Some((name, value)) - }) - .collect::>(); - let empty_string = String::new(); - - let s = self.symbol.as_str(); - let mut parser = Parser::new(s, None, None, false, ParseMode::Format); - let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string); - let constructed_message = (&mut parser) - .map(|p| match p { - Piece::Lit(s) => s.to_owned(), - Piece::NextArgument(a) => match a.position { - Position::ArgumentNamed(arg) => { - let s = Symbol::intern(arg); - match generic_map.get(&s) { - Some(val) => val.to_string(), - None if self.is_diagnostic_namespace_variant => { - format!("{{{arg}}}") - } - None if s == name => trait_str.clone(), - None => { - if let Some(val) = options.get(&s) { - val.clone() - } else if s == sym::from_desugaring { - // don't break messages using these two arguments incorrectly - String::new() - } else if s == sym::ItemContext - && !self.is_diagnostic_namespace_variant - { - item_context.clone() - } else if s == sym::integral { - String::from("{integral}") - } else if s == sym::integer_ { - String::from("{integer}") - } else if s == sym::float { - String::from("{float}") - } else { - bug!( - "broken on_unimplemented {:?} for {:?}: \ - no argument matching {:?}", - self.symbol, - trait_ref, - s - ) - } - } - } - } - Position::ArgumentImplicitlyIs(_) if self.is_diagnostic_namespace_variant => { - String::from("{}") - } - Position::ArgumentIs(idx) if self.is_diagnostic_namespace_variant => { - format!("{{{idx}}}") - } - _ => bug!("broken on_unimplemented {:?} - bad format arg", self.symbol), - }, - }) - .collect(); - // we cannot return errors from processing the format string as hard error here - // as the diagnostic namespace guarantees that malformed input cannot cause an error - // - // if we encounter any error while processing the format string - // we don't want to show the potentially half assembled formatted string, - // therefore we fall back to just showing the input string in this case - // - // The actual parser errors are emitted earlier - // as lint warnings in OnUnimplementedFormatString::verify - if self.is_diagnostic_namespace_variant && !parser.errors.is_empty() { - String::from(s) + let trait_def_id = trait_ref.def_id; + let ctx = if self.is_diagnostic_namespace_variant { + Ctx::DiagnosticOnUnimplemented { tcx, trait_def_id } + } else { + Ctx::RustcOnUnimplemented { tcx, trait_def_id } + }; + + if let Ok(s) = FormatString::parse(self.symbol, self.span, &ctx) { + s.format(args) } else { - constructed_message + // we cannot return errors from processing the format string as hard error here + // as the diagnostic namespace guarantees that malformed input cannot cause an error + // + // if we encounter any error while processing the format string + // we don't want to show the potentially half assembled formatted string, + // therefore we fall back to just showing the input string in this case + // + // The actual parser errors are emitted earlier + // as lint warnings in OnUnimplementedFormatString::verify + self.symbol.as_str().into() } } } diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented_condition.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented_condition.rs new file mode 100644 index 0000000000000..13753761f0923 --- /dev/null +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented_condition.rs @@ -0,0 +1,317 @@ +use rustc_ast::{MetaItemInner, MetaItemKind, MetaItemLit}; +use rustc_parse_format::{ParseMode, Parser, Piece, Position}; +use rustc_span::{DesugaringKind, Ident, Span, Symbol, kw, sym}; + +use crate::errors::InvalidOnClause; + +/// Represents the `on` filter in `#[rustc_on_unimplemented]`. +#[derive(Debug)] +pub(crate) struct OnUnimplementedCondition { + span: Span, + pred: Predicate, +} + +impl OnUnimplementedCondition { + pub(crate) fn span(&self) -> Span { + self.span + } + + pub(crate) fn matches_predicate(&self, options: &ConditionOptions) -> bool { + self.pred.eval(&mut |p| match p { + FlagOrNv::Flag(b) => options.has_flag(*b), + FlagOrNv::NameValue(NameValue { name, value }) => { + let value = value.format(&options.generic_args); + options.contains(*name, value) + } + }) + } + + pub(crate) fn parse(input: &MetaItemInner) -> Result { + let span = input.span(); + let pred = Predicate::parse(input)?; + Ok(OnUnimplementedCondition { span, pred }) + } +} + +/// Predicate(s) in `#[rustc_on_unimplemented]`'s `on` filter. See [`OnUnimplementedCondition`]. +/// +/// It is similar to the predicate in the `cfg` attribute, +/// and may contain nested predicates. +#[derive(Debug)] +enum Predicate { + /// A condition like `on(crate_local)`. + Flag(Flag), + /// A match, like `on(Rhs = "Whatever")`. + Match(NameValue), + /// Negation, like `on(not($pred))`. + Not(Box), + /// True if all predicates are true, like `on(all($a, $b, $c))`. + All(Vec), + /// True if any predicate is true, like `on(any($a, $b, $c))`. + Any(Vec), +} + +impl Predicate { + fn parse(input: &MetaItemInner) -> Result { + let meta_item = match input { + MetaItemInner::MetaItem(meta_item) => meta_item, + MetaItemInner::Lit(lit) => { + return Err(InvalidOnClause::UnsupportedLiteral { span: lit.span }); + } + }; + + let Some(predicate) = meta_item.ident() else { + return Err(InvalidOnClause::ExpectedIdentifier { + span: meta_item.path.span, + path: meta_item.path.clone(), + }); + }; + + match meta_item.kind { + MetaItemKind::List(ref mis) => match predicate.name { + sym::any => Ok(Predicate::Any(Predicate::parse_sequence(mis)?)), + sym::all => Ok(Predicate::All(Predicate::parse_sequence(mis)?)), + sym::not => match &**mis { + [one] => Ok(Predicate::Not(Box::new(Predicate::parse(one)?))), + [first, .., last] => Err(InvalidOnClause::ExpectedOnePredInNot { + span: first.span().to(last.span()), + }), + [] => Err(InvalidOnClause::ExpectedOnePredInNot { span: meta_item.span }), + }, + invalid_pred => { + Err(InvalidOnClause::InvalidPredicate { span: predicate.span, invalid_pred }) + } + }, + MetaItemKind::NameValue(MetaItemLit { symbol, .. }) => { + let name = Name::parse(predicate); + let value = FilterFormatString::parse(symbol); + let kv = NameValue { name, value }; + Ok(Predicate::Match(kv)) + } + MetaItemKind::Word => { + let flag = Flag::parse(predicate)?; + Ok(Predicate::Flag(flag)) + } + } + } + + fn parse_sequence(sequence: &[MetaItemInner]) -> Result, InvalidOnClause> { + sequence.iter().map(Predicate::parse).collect() + } + + fn eval(&self, eval: &mut impl FnMut(FlagOrNv<'_>) -> bool) -> bool { + match self { + Predicate::Flag(flag) => eval(FlagOrNv::Flag(flag)), + Predicate::Match(nv) => eval(FlagOrNv::NameValue(nv)), + Predicate::Not(not) => !not.eval(eval), + Predicate::All(preds) => preds.into_iter().all(|pred| pred.eval(eval)), + Predicate::Any(preds) => preds.into_iter().any(|pred| pred.eval(eval)), + } + } +} + +/// Represents a `MetaWord` in an `on`-filter. +#[derive(Debug, Clone, Copy)] +enum Flag { + /// Whether the code causing the trait bound to not be fulfilled + /// is part of the user's crate. + CrateLocal, + /// Whether the obligation is user-specified rather than derived. + Direct, + /// Whether we are in some kind of desugaring like + /// `?` or `try { .. }`. + FromDesugaring, +} + +impl Flag { + fn parse(Ident { name, span }: Ident) -> Result { + match name { + sym::crate_local => Ok(Flag::CrateLocal), + sym::direct => Ok(Flag::Direct), + sym::from_desugaring => Ok(Flag::FromDesugaring), + invalid_flag => Err(InvalidOnClause::InvalidFlag { invalid_flag, span }), + } + } +} + +/// A `MetaNameValueStr` in an `on`-filter. +/// +/// For example, `#[rustc_on_unimplemented(on(name = "value", message = "hello"))]`. +#[derive(Debug, Clone)] +struct NameValue { + name: Name, + /// Something like `"&str"` or `"alloc::string::String"`, + /// in which case it just contains a single string piece. + /// But if it is something like `"&[{A}]"` then it must be formatted later. + value: FilterFormatString, +} + +/// The valid names of the `on` filter. +#[derive(Debug, Clone, Copy)] +enum Name { + Cause, + FromDesugaring, + SelfUpper, + GenericArg(Symbol), +} + +impl Name { + fn parse(Ident { name, .. }: Ident) -> Self { + match name { + sym::_Self | kw::SelfUpper => Name::SelfUpper, + sym::from_desugaring => Name::FromDesugaring, + sym::cause => Name::Cause, + // FIXME(mejrs) Perhaps we should start checking that + // this actually is a valid generic parameter? + generic => Name::GenericArg(generic), + } + } +} + +#[derive(Debug, Clone)] +enum FlagOrNv<'p> { + Flag(&'p Flag), + NameValue(&'p NameValue), +} + +/// Represents a value inside an `on` filter. +/// +/// For example, `#[rustc_on_unimplemented(on(name = "value", message = "hello"))]`. +/// If it is a simple literal like this then `pieces` will be `[LitOrArg::Lit("value")]`. +/// The `Arg` variant is used when it contains formatting like +/// `#[rustc_on_unimplemented(on(Self = "&[{A}]", message = "hello"))]`. +#[derive(Debug, Clone)] +struct FilterFormatString { + pieces: Vec, +} + +#[derive(Debug, Clone)] +enum LitOrArg { + Lit(String), + Arg(String), +} + +impl FilterFormatString { + fn parse(input: Symbol) -> Self { + let pieces = Parser::new(input.as_str(), None, None, false, ParseMode::Format) + .map(|p| match p { + Piece::Lit(s) => LitOrArg::Lit(s.to_owned()), + // We just ignore formatspecs here + Piece::NextArgument(a) => match a.position { + // In `TypeErrCtxt::on_unimplemented_note` we substitute `"{integral}"` even + // if the integer type has been resolved, to allow targeting all integers. + // `"{integer}"` and `"{float}"` come from numerics that haven't been inferred yet, + // from the `Display` impl of `InferTy` to be precise. + // + // Don't try to format these later! + Position::ArgumentNamed(arg @ "integer" | arg @ "integral" | arg @ "float") => { + LitOrArg::Lit(format!("{{{arg}}}")) + } + + // FIXME(mejrs) We should check if these correspond to a generic of the trait. + Position::ArgumentNamed(arg) => LitOrArg::Arg(arg.to_owned()), + + // FIXME(mejrs) These should really be warnings/errors + Position::ArgumentImplicitlyIs(_) => LitOrArg::Lit(String::from("{}")), + Position::ArgumentIs(idx) => LitOrArg::Lit(format!("{{{idx}}}")), + }, + }) + .collect(); + Self { pieces } + } + + fn format(&self, generic_args: &[(Symbol, String)]) -> String { + let mut ret = String::new(); + + for piece in &self.pieces { + match piece { + LitOrArg::Lit(s) => ret.push_str(s), + LitOrArg::Arg(arg) => { + let s = Symbol::intern(arg); + match generic_args.iter().find(|(k, _)| *k == s) { + Some((_, val)) => ret.push_str(val), + None => { + // FIXME(mejrs) If we start checking as mentioned in + // FilterFormatString::parse then this shouldn't happen + let _ = std::fmt::write(&mut ret, format_args!("{{{s}}}")); + } + } + } + } + } + + ret + } +} + +/// Used with `OnUnimplementedCondition::matches_predicate` to evaluate the +/// [`OnUnimplementedCondition`]. +/// +/// For example, given a +/// ```rust,ignore (just an example) +/// #[rustc_on_unimplemented( +/// on(all(from_desugaring = "QuestionMark"), +/// message = "the `?` operator can only be used in {ItemContext} \ +/// that returns `Result` or `Option` \ +/// (or another type that implements `{FromResidual}`)", +/// label = "cannot use the `?` operator in {ItemContext} that returns `{Self}`", +/// parent_label = "this function should return `Result` or `Option` to accept `?`" +/// ), +/// )] +/// pub trait FromResidual::Residual> { +/// ... +/// } +/// +/// async fn an_async_function() -> u32 { +/// let x: Option = None; +/// x?; //~ ERROR the `?` operator +/// 22 +/// } +/// ``` +/// it will look like this: +/// +/// ```rust,ignore (just an example) +/// ConditionOptions { +/// self_types: ["u32", "{integral}"], +/// from_desugaring: Some("QuestionMark"), +/// cause: None, +/// crate_local: false, +/// direct: true, +/// generic_args: [("Self","u32"), +/// ("R", "core::option::Option"), +/// ("R", "core::option::Option" ), +/// ], +/// } +/// ``` +#[derive(Debug)] +pub(crate) struct ConditionOptions { + /// All the self types that may apply. + pub(crate) self_types: Vec, + // The kind of compiler desugaring. + pub(crate) from_desugaring: Option, + /// Match on a variant of [rustc_infer::traits::ObligationCauseCode]. + pub(crate) cause: Option, + pub(crate) crate_local: bool, + /// Is the obligation "directly" user-specified, rather than derived? + pub(crate) direct: bool, + // A list of the generic arguments and their reified types. + pub(crate) generic_args: Vec<(Symbol, String)>, +} + +impl ConditionOptions { + fn has_flag(&self, name: Flag) -> bool { + match name { + Flag::CrateLocal => self.crate_local, + Flag::Direct => self.direct, + Flag::FromDesugaring => self.from_desugaring.is_some(), + } + } + fn contains(&self, name: Name, value: String) -> bool { + match name { + Name::SelfUpper => self.self_types.contains(&value), + Name::FromDesugaring => self.from_desugaring.is_some_and(|ds| ds.matches(&value)), + Name::Cause => self.cause == Some(value), + Name::GenericArg(arg) => self.generic_args.contains(&(arg, value)), + } + } +} diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented_format.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented_format.rs new file mode 100644 index 0000000000000..d8b90844b7d2f --- /dev/null +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented_format.rs @@ -0,0 +1,413 @@ +use std::fmt; +use std::ops::Range; + +use errors::*; +use rustc_middle::ty::TyCtxt; +use rustc_middle::ty::print::TraitRefPrintSugared; +use rustc_parse_format::{ + Alignment, Argument, Count, FormatSpec, ParseError, ParseMode, Parser, Piece as RpfPiece, + Position, +}; +use rustc_session::lint::builtin::UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES; +use rustc_span::def_id::DefId; +use rustc_span::{BytePos, Pos, Span, Symbol, kw, sym}; + +/// Like [std::fmt::Arguments] this is a string that has been parsed into "pieces", +/// either as string pieces or dynamic arguments. +#[derive(Debug)] +pub struct FormatString { + #[allow(dead_code, reason = "Debug impl")] + input: Symbol, + span: Span, + pieces: Vec, + /// The formatting string was parsed succesfully but with warnings + pub warnings: Vec, +} + +#[derive(Debug)] +enum Piece { + Lit(String), + Arg(FormatArg), +} + +#[derive(Debug)] +enum FormatArg { + // A generic parameter, like `{T}` if we're on the `From` trait. + GenericParam { + generic_param: Symbol, + }, + // `{Self}` + SelfUpper, + /// `{This}` or `{TraitName}` + This, + /// The sugared form of the trait + Trait, + /// what we're in, like a function, method, closure etc. + ItemContext, + /// What the user typed, if it doesn't match anything we can use. + AsIs(String), +} + +pub enum Ctx<'tcx> { + // `#[rustc_on_unimplemented]` + RustcOnUnimplemented { tcx: TyCtxt<'tcx>, trait_def_id: DefId }, + // `#[diagnostic::...]` + DiagnosticOnUnimplemented { tcx: TyCtxt<'tcx>, trait_def_id: DefId }, +} + +#[derive(Debug)] +pub enum FormatWarning { + UnknownParam { argument_name: Symbol, span: Span }, + PositionalArgument { span: Span, help: String }, + InvalidSpecifier { name: String, span: Span }, + FutureIncompat { span: Span, help: String }, +} + +impl FormatWarning { + pub fn emit_warning<'tcx>(&self, tcx: TyCtxt<'tcx>, item_def_id: DefId) { + match *self { + FormatWarning::UnknownParam { argument_name, span } => { + let this = tcx.item_ident(item_def_id); + if let Some(item_def_id) = item_def_id.as_local() { + tcx.emit_node_span_lint( + UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, + tcx.local_def_id_to_hir_id(item_def_id), + span, + UnknownFormatParameterForOnUnimplementedAttr { + argument_name, + trait_name: this, + }, + ); + } + } + FormatWarning::PositionalArgument { span, .. } => { + if let Some(item_def_id) = item_def_id.as_local() { + tcx.emit_node_span_lint( + UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, + tcx.local_def_id_to_hir_id(item_def_id), + span, + DisallowedPositionalArgument, + ); + } + } + FormatWarning::InvalidSpecifier { span, .. } => { + if let Some(item_def_id) = item_def_id.as_local() { + tcx.emit_node_span_lint( + UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, + tcx.local_def_id_to_hir_id(item_def_id), + span, + InvalidFormatSpecifier, + ); + } + } + FormatWarning::FutureIncompat { .. } => { + // We've never deprecated anything in diagnostic namespace format strings + // but if we do we will emit a warning here + + // FIXME(mejrs) in a couple releases, start emitting warnings for + // #[rustc_on_unimplemented] deprecated args + } + } + } +} + +/// Arguments to fill a [FormatString] with. +/// +/// For example, given a +/// ```rust,ignore (just an example) +/// +/// #[rustc_on_unimplemented( +/// on(all(from_desugaring = "QuestionMark"), +/// message = "the `?` operator can only be used in {ItemContext} \ +/// that returns `Result` or `Option` \ +/// (or another type that implements `{FromResidual}`)", +/// label = "cannot use the `?` operator in {ItemContext} that returns `{Self}`", +/// parent_label = "this function should return `Result` or `Option` to accept `?`" +/// ), +/// )] +/// pub trait FromResidual::Residual> { +/// ... +/// } +/// +/// async fn an_async_function() -> u32 { +/// let x: Option = None; +/// x?; //~ ERROR the `?` operator +/// 22 +/// } +/// ``` +/// it will look like this: +/// +/// ```rust,ignore (just an example) +/// FormatArgs { +/// this: "FromResidual", +/// trait_sugared: "FromResidual>", +/// item_context: "an async function", +/// generic_args: [("Self", "u32"), ("R", "Option")], +/// } +/// ``` +#[derive(Debug)] +pub struct FormatArgs<'tcx> { + pub this: String, + pub trait_sugared: TraitRefPrintSugared<'tcx>, + pub item_context: &'static str, + pub generic_args: Vec<(Symbol, String)>, +} + +impl FormatString { + pub fn span(&self) -> Span { + self.span + } + + pub fn parse<'tcx>( + input: Symbol, + span: Span, + ctx: &Ctx<'tcx>, + ) -> Result> { + let s = input.as_str(); + let mut parser = Parser::new(s, None, None, false, ParseMode::Format); + let mut pieces = Vec::new(); + let mut warnings = Vec::new(); + + for piece in &mut parser { + match piece { + RpfPiece::Lit(lit) => { + pieces.push(Piece::Lit(lit.into())); + } + RpfPiece::NextArgument(arg) => { + warn_on_format_spec(arg.format.clone(), &mut warnings, span); + let arg = parse_arg(&arg, ctx, &mut warnings, span); + pieces.push(Piece::Arg(arg)); + } + } + } + + if parser.errors.is_empty() { + Ok(FormatString { input, pieces, span, warnings }) + } else { + Err(parser.errors) + } + } + + pub fn format(&self, args: &FormatArgs<'_>) -> String { + let mut ret = String::new(); + for piece in &self.pieces { + match piece { + Piece::Lit(s) | Piece::Arg(FormatArg::AsIs(s)) => ret.push_str(&s), + + // `A` if we have `trait Trait {}` and `note = "i'm the actual type of {A}"` + Piece::Arg(FormatArg::GenericParam { generic_param }) => { + // Should always be some but we can't raise errors here + let value = match args.generic_args.iter().find(|(p, _)| p == generic_param) { + Some((_, val)) => val.to_string(), + None => generic_param.to_string(), + }; + ret.push_str(&value); + } + // `{Self}` + Piece::Arg(FormatArg::SelfUpper) => { + let slf = match args.generic_args.iter().find(|(p, _)| *p == kw::SelfUpper) { + Some((_, val)) => val.to_string(), + None => "Self".to_string(), + }; + ret.push_str(&slf); + } + + // It's only `rustc_onunimplemented` from here + Piece::Arg(FormatArg::This) => ret.push_str(&args.this), + Piece::Arg(FormatArg::Trait) => { + let _ = fmt::write(&mut ret, format_args!("{}", &args.trait_sugared)); + } + Piece::Arg(FormatArg::ItemContext) => ret.push_str(args.item_context), + } + } + ret + } +} + +fn parse_arg<'tcx>( + arg: &Argument<'_>, + ctx: &Ctx<'tcx>, + warnings: &mut Vec, + input_span: Span, +) -> FormatArg { + let (Ctx::RustcOnUnimplemented { tcx, trait_def_id } + | Ctx::DiagnosticOnUnimplemented { tcx, trait_def_id }) = ctx; + let trait_name = tcx.item_ident(*trait_def_id); + let generics = tcx.generics_of(trait_def_id); + let span = slice_span(input_span, arg.position_span.clone()); + + match arg.position { + // Something like "hello {name}" + Position::ArgumentNamed(name) => match (ctx, Symbol::intern(name)) { + // accepted, but deprecated + (Ctx::RustcOnUnimplemented { .. }, sym::_Self) => { + warnings + .push(FormatWarning::FutureIncompat { span, help: String::from("use {Self}") }); + FormatArg::SelfUpper + } + ( + Ctx::RustcOnUnimplemented { .. }, + sym::from_desugaring + | sym::crate_local + | sym::direct + | sym::cause + | sym::float + | sym::integer_ + | sym::integral, + ) => { + warnings.push(FormatWarning::FutureIncompat { + span, + help: String::from("don't use this in a format string"), + }); + FormatArg::AsIs(String::new()) + } + + // Only `#[rustc_on_unimplemented]` can use these + (Ctx::RustcOnUnimplemented { .. }, sym::ItemContext) => FormatArg::ItemContext, + (Ctx::RustcOnUnimplemented { .. }, sym::This) => FormatArg::This, + (Ctx::RustcOnUnimplemented { .. }, sym::Trait) => FormatArg::Trait, + // `{ThisTraitsName}`. Some attrs in std use this, but I'd like to change it to the more general `{This}` + // because that'll be simpler to parse and extend in the future + (Ctx::RustcOnUnimplemented { .. }, name) if name == trait_name.name => { + warnings + .push(FormatWarning::FutureIncompat { span, help: String::from("use {This}") }); + FormatArg::This + } + + // Any attribute can use these + ( + Ctx::RustcOnUnimplemented { .. } | Ctx::DiagnosticOnUnimplemented { .. }, + kw::SelfUpper, + ) => FormatArg::SelfUpper, + ( + Ctx::RustcOnUnimplemented { .. } | Ctx::DiagnosticOnUnimplemented { .. }, + generic_param, + ) if generics.own_params.iter().any(|param| param.name == generic_param) => { + FormatArg::GenericParam { generic_param } + } + + (_, argument_name) => { + warnings.push(FormatWarning::UnknownParam { argument_name, span }); + FormatArg::AsIs(format!("{{{}}}", argument_name.as_str())) + } + }, + + // `{:1}` and `{}` are ignored + Position::ArgumentIs(idx) => { + warnings.push(FormatWarning::PositionalArgument { + span, + help: format!("use `{{{idx}}}` to print a number in braces"), + }); + FormatArg::AsIs(format!("{{{idx}}}")) + } + Position::ArgumentImplicitlyIs(_) => { + warnings.push(FormatWarning::PositionalArgument { + span, + help: String::from("use `{{}}` to print empty braces"), + }); + FormatArg::AsIs(String::from("{}")) + } + } +} + +/// `#[rustc_on_unimplemented]` and `#[diagnostic::...]` don't actually do anything +/// with specifiers, so emit a warning if they are used. +fn warn_on_format_spec(spec: FormatSpec<'_>, warnings: &mut Vec, input_span: Span) { + if !matches!( + spec, + FormatSpec { + fill: None, + fill_span: None, + align: Alignment::AlignUnknown, + sign: None, + alternate: false, + zero_pad: false, + debug_hex: None, + precision: Count::CountImplied, + precision_span: None, + width: Count::CountImplied, + width_span: None, + ty: _, + ty_span: _, + }, + ) { + let span = spec.ty_span.map(|inner| slice_span(input_span, inner)).unwrap_or(input_span); + warnings.push(FormatWarning::InvalidSpecifier { span, name: spec.ty.into() }) + } +} + +fn slice_span(input: Span, range: Range) -> Span { + let span = input.data(); + + Span::new( + span.lo + BytePos::from_usize(range.start), + span.lo + BytePos::from_usize(range.end), + span.ctxt, + span.parent, + ) +} + +pub mod errors { + use rustc_macros::LintDiagnostic; + use rustc_span::Ident; + + use super::*; + + #[derive(LintDiagnostic)] + #[diag(trait_selection_unknown_format_parameter_for_on_unimplemented_attr)] + #[help] + pub struct UnknownFormatParameterForOnUnimplementedAttr { + pub argument_name: Symbol, + pub trait_name: Ident, + } + + #[derive(LintDiagnostic)] + #[diag(trait_selection_disallowed_positional_argument)] + #[help] + pub struct DisallowedPositionalArgument; + + #[derive(LintDiagnostic)] + #[diag(trait_selection_invalid_format_specifier)] + #[help] + pub struct InvalidFormatSpecifier; + + #[derive(LintDiagnostic)] + #[diag(trait_selection_missing_options_for_on_unimplemented_attr)] + #[help] + pub struct MissingOptionsForOnUnimplementedAttr; + + #[derive(LintDiagnostic)] + #[diag(trait_selection_ignored_diagnostic_option)] + pub struct IgnoredDiagnosticOption { + pub option_name: &'static str, + #[label] + pub span: Span, + #[label(trait_selection_other_label)] + pub prev_span: Span, + } + + impl IgnoredDiagnosticOption { + pub fn maybe_emit_warning<'tcx>( + tcx: TyCtxt<'tcx>, + item_def_id: DefId, + new: Option, + old: Option, + option_name: &'static str, + ) { + if let (Some(new_item), Some(old_item)) = (new, old) { + if let Some(item_def_id) = item_def_id.as_local() { + tcx.emit_node_span_lint( + UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, + tcx.local_def_id_to_hir_id(item_def_id), + new_item, + IgnoredDiagnosticOption { + span: new_item, + prev_span: old_item, + option_name, + }, + ); + } + } + } + } +} diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/overflow.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/overflow.rs index c5ed74420d4d2..d929ecf68bf34 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/overflow.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/overflow.rs @@ -5,10 +5,9 @@ use rustc_hir::def::Namespace; use rustc_hir::def_id::LOCAL_CRATE; use rustc_infer::traits::{Obligation, PredicateObligation}; use rustc_middle::ty::print::{FmtPrinter, Print}; -use rustc_middle::ty::{self, TyCtxt}; +use rustc_middle::ty::{self, TyCtxt, Upcast}; use rustc_session::Limit; use rustc_span::Span; -use rustc_type_ir::Upcast; use tracing::debug; use crate::error_reporting::TypeErrCtxt; diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs index dc8022b95c313..b4ae0de1d9952 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs @@ -1193,7 +1193,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // FIXME(compiler-errors): This is kind of a mess, but required for obligations // that come from a path expr to affect the *call* expr. c @ ObligationCauseCode::WhereClauseInExpr(_, _, hir_id, _) - if self.tcx.hir().span(*hir_id).lo() == span.lo() => + if self.tcx.hir_span(*hir_id).lo() == span.lo() => { c } @@ -1516,6 +1516,12 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } else { expr.span.with_hi(expr.span.lo() + BytePos(1)) }; + + match self.tcx.sess.source_map().span_to_snippet(span) { + Ok(snippet) if snippet.starts_with("&") => {} + _ => break 'outer, + } + suggestions.push((span, String::new())); let ty::Ref(_, inner_ty, _) = suggested_ty.kind() else { @@ -1988,7 +1994,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { { let closure: Vec<_> = self .tcx - .fn_arg_names(fn_def_id) + .fn_arg_idents(fn_def_id) .iter() .enumerate() .map(|(i, ident)| { @@ -2112,16 +2118,20 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { trait_ref: DefId, ) { if let Some(assoc_item) = self.tcx.opt_associated_item(item_def_id) { - if let ty::AssocKind::Const | ty::AssocKind::Type = assoc_item.kind { + if let ty::AssocKind::Const { .. } | ty::AssocKind::Type { .. } = assoc_item.kind { err.note(format!( "{}s cannot be accessed directly on a `trait`, they can only be \ accessed through a specific `impl`", - self.tcx.def_kind_descr(assoc_item.kind.as_def_kind(), item_def_id) + self.tcx.def_kind_descr(assoc_item.as_def_kind(), item_def_id) )); err.span_suggestion( span, "use the fully qualified path to an implementation", - format!("::{}", self.tcx.def_path_str(trait_ref), assoc_item.name), + format!( + "::{}", + self.tcx.def_path_str(trait_ref), + assoc_item.name() + ), Applicability::HasPlaceholders, ); } @@ -3018,12 +3028,23 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { [] => span_bug!(ty.span, "trait object with no traits: {ty:?}"), }; let needs_parens = traits.len() != 1; - err.span_suggestion_verbose( - span, - "you can use `impl Trait` as the argument type", - "impl ", - Applicability::MaybeIncorrect, - ); + // Don't recommend impl Trait as a closure argument + if let Some(hir_id) = hir_id + && matches!( + self.tcx.parent_hir_node(hir_id), + hir::Node::Item(hir::Item { + kind: hir::ItemKind::Fn { .. }, + .. + }) + ) + { + err.span_suggestion_verbose( + span, + "you can use `impl Trait` as the argument type", + "impl ", + Applicability::MaybeIncorrect, + ); + } let sugg = if !needs_parens { vec![(span.shrink_to_lo(), format!("&{kw}"))] } else { @@ -3550,6 +3571,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } err.span_note(assoc_span, msg); } + ObligationCauseCode::CompareEII { .. } => { + panic!("trait bounds on EII not yet supported ") + } ObligationCauseCode::TrivialBound => { err.help("see issue #48214"); tcx.disabled_nightly_features( @@ -3829,12 +3853,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ); if let ty::PredicateKind::Clause(clause) = failed_pred.kind().skip_binder() && let ty::ClauseKind::Trait(pred) = clause - && [ - tcx.lang_items().fn_once_trait(), - tcx.lang_items().fn_mut_trait(), - tcx.lang_items().fn_trait(), - ] - .contains(&Some(pred.def_id())) + && tcx.fn_trait_kind_from_def_id(pred.def_id()).is_some() { if let [stmt, ..] = block.stmts && let hir::StmtKind::Semi(value) = stmt.kind @@ -4481,7 +4500,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { Obligation::new(self.tcx, obligation.cause.clone(), obligation.param_env, trait_ref); if self.predicate_must_hold_modulo_regions(&obligation) { - let arg_span = self.tcx.hir().span(*arg_hir_id); + let arg_span = self.tcx.hir_span(*arg_hir_id); err.multipart_suggestion_verbose( format!("use a unary tuple instead"), vec![(arg_span.shrink_to_lo(), "(".into()), (arg_span.shrink_to_hi(), ",)".into())], @@ -4521,7 +4540,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { parent_code: _, } = cause.code() { - let arg_span = self.tcx.hir().span(*arg_hir_id); + let arg_span = self.tcx.hir_span(*arg_hir_id); let mut sp: MultiSpan = arg_span.into(); sp.push_span_label( @@ -4530,7 +4549,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { generic types that should be inferred from this argument", ); sp.push_span_label( - self.tcx.hir().span(*call_hir_id), + self.tcx.hir_span(*call_hir_id), "add turbofish arguments to this call to \ specify the types manually, even if it's redundant", ); @@ -4939,7 +4958,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { .type_implements_trait(pred.def_id(), [rhs_ty, lhs_ty], param_env) .must_apply_modulo_regions() { - let lhs_span = tcx.hir().span(lhs_hir_id); + let lhs_span = tcx.hir_span(lhs_hir_id); let sm = tcx.sess.source_map(); if let Ok(rhs_snippet) = sm.span_to_snippet(rhs_span) && let Ok(lhs_snippet) = sm.span_to_snippet(lhs_span) @@ -5397,10 +5416,10 @@ fn point_at_assoc_type_restriction( ); } if let Some(new) = - tcx.associated_items(data.impl_or_alias_def_id).find_by_name_and_kind( + tcx.associated_items(data.impl_or_alias_def_id).find_by_ident_and_kind( tcx, Ident::with_dummy_span(name), - ty::AssocKind::Type, + ty::AssocTag::Type, data.impl_or_alias_def_id, ) { diff --git a/compiler/rustc_trait_selection/src/errors.rs b/compiler/rustc_trait_selection/src/errors.rs index b30390a9330eb..04cae1c9b6419 100644 --- a/compiler/rustc_trait_selection/src/errors.rs +++ b/compiler/rustc_trait_selection/src/errors.rs @@ -1,18 +1,19 @@ use std::path::PathBuf; +use rustc_ast::Path; use rustc_data_structures::fx::{FxHashSet, FxIndexSet}; use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagCtxtHandle, DiagMessage, DiagStyledString, Diagnostic, - EmissionGuarantee, IntoDiagArg, Level, MultiSpan, SubdiagMessageOp, Subdiagnostic, + EmissionGuarantee, IntoDiagArg, Level, MultiSpan, Subdiagnostic, }; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::intravisit::{Visitor, VisitorExt, walk_ty}; -use rustc_hir::{self as hir, AmbigArg, FnRetTy, GenericParamKind, IsAnonInPath, Node}; +use rustc_hir::{self as hir, AmbigArg, FnRetTy, GenericParamKind, Node}; use rustc_macros::{Diagnostic, Subdiagnostic}; use rustc_middle::ty::print::{PrintTraitRefExt as _, TraitRefPrintOnlyTraitPath}; -use rustc_middle::ty::{self, Binder, ClosureKind, FnSig, Region, Ty, TyCtxt}; +use rustc_middle::ty::{self, Binder, ClosureKind, FnSig, GenericArg, Region, Ty, TyCtxt}; use rustc_span::{BytePos, Ident, Span, Symbol, kw}; use crate::error_reporting::infer::ObligationCauseAsDiagArg; @@ -31,23 +32,50 @@ pub struct UnableToConstructConstantValue<'a> { } #[derive(Diagnostic)] -#[diag(trait_selection_empty_on_clause_in_rustc_on_unimplemented, code = E0232)] -pub struct EmptyOnClauseInOnUnimplemented { - #[primary_span] - #[label] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(trait_selection_invalid_on_clause_in_rustc_on_unimplemented, code = E0232)] -pub struct InvalidOnClauseInOnUnimplemented { - #[primary_span] - #[label] - pub span: Span, +pub enum InvalidOnClause { + #[diag(trait_selection_rustc_on_unimplemented_empty_on_clause, code = E0232)] + Empty { + #[primary_span] + #[label] + span: Span, + }, + #[diag(trait_selection_rustc_on_unimplemented_expected_one_predicate_in_not, code = E0232)] + ExpectedOnePredInNot { + #[primary_span] + #[label] + span: Span, + }, + #[diag(trait_selection_rustc_on_unimplemented_unsupported_literal_in_on, code = E0232)] + UnsupportedLiteral { + #[primary_span] + #[label] + span: Span, + }, + #[diag(trait_selection_rustc_on_unimplemented_expected_identifier, code = E0232)] + ExpectedIdentifier { + #[primary_span] + #[label] + span: Span, + path: Path, + }, + #[diag(trait_selection_rustc_on_unimplemented_invalid_predicate, code = E0232)] + InvalidPredicate { + #[primary_span] + #[label] + span: Span, + invalid_pred: Symbol, + }, + #[diag(trait_selection_rustc_on_unimplemented_invalid_flag, code = E0232)] + InvalidFlag { + #[primary_span] + #[label] + span: Span, + invalid_flag: Symbol, + }, } #[derive(Diagnostic)] -#[diag(trait_selection_no_value_in_rustc_on_unimplemented, code = E0232)] +#[diag(trait_selection_rustc_on_unimplemented_missing_value, code = E0232)] #[note] pub struct NoValueInOnUnimplemented { #[primary_span] @@ -107,11 +135,7 @@ pub enum AdjustSignatureBorrow { } impl Subdiagnostic for AdjustSignatureBorrow { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { match self { AdjustSignatureBorrow::Borrow { to_borrow } => { diag.arg("len", to_borrow.len()); @@ -381,11 +405,7 @@ pub enum RegionOriginNote<'a> { } impl Subdiagnostic for RegionOriginNote<'_> { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { let mut label_or_note = |span, msg: DiagMessage| { let sub_count = diag.children.iter().filter(|d| d.span.is_dummy()).count(); let expanded_sub_count = diag.children.iter().filter(|d| !d.span.is_dummy()).count(); @@ -415,7 +435,7 @@ impl Subdiagnostic for RegionOriginNote<'_> { label_or_note(span, fluent::trait_selection_subtype); diag.arg("requirement", requirement); - diag.note_expected_found(&"", expected, &"", found); + diag.note_expected_found("", expected, "", found); } RegionOriginNote::WithRequirement { span, requirement, expected_found: None } => { // FIXME: this really should be handled at some earlier stage. Our @@ -446,11 +466,7 @@ pub enum LifetimeMismatchLabels { } impl Subdiagnostic for LifetimeMismatchLabels { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { match self { LifetimeMismatchLabels::InRet { param_span, ret_span, span, label_var1 } => { diag.span_label(param_span, fluent::trait_selection_declared_different); @@ -495,11 +511,7 @@ pub struct AddLifetimeParamsSuggestion<'a> { } impl Subdiagnostic for AddLifetimeParamsSuggestion<'_> { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { let mut mk_suggestion = || { let Some(anon_reg) = self.tcx.is_suitable_region(self.generic_param_scope, self.sub) else { @@ -567,19 +579,6 @@ impl Subdiagnostic for AddLifetimeParamsSuggestion<'_> { impl<'v> Visitor<'v> for ImplicitLifetimeFinder { fn visit_ty(&mut self, ty: &'v hir::Ty<'v, AmbigArg>) { - let make_suggestion = |lifetime: &hir::Lifetime| { - if lifetime.is_anon_in_path == IsAnonInPath::Yes - && lifetime.ident.span.is_empty() - { - format!("{}, ", self.suggestion_param_name) - } else if lifetime.ident.name == kw::UnderscoreLifetime - && lifetime.ident.span.is_empty() - { - format!("{} ", self.suggestion_param_name) - } else { - self.suggestion_param_name.clone() - } - }; match ty.kind { hir::TyKind::Path(hir::QPath::Resolved(_, path)) => { for segment in path.segments { @@ -588,7 +587,7 @@ impl Subdiagnostic for AddLifetimeParamsSuggestion<'_> { matches!( arg, hir::GenericArg::Lifetime(lifetime) - if lifetime.is_anon_in_path == IsAnonInPath::Yes + if lifetime.is_syntactically_hidden() ) }) { self.suggestions.push(( @@ -607,10 +606,10 @@ impl Subdiagnostic for AddLifetimeParamsSuggestion<'_> { if let hir::GenericArg::Lifetime(lifetime) = arg && lifetime.is_anonymous() { - self.suggestions.push(( - lifetime.ident.span, - make_suggestion(lifetime), - )); + self.suggestions.push( + lifetime + .suggestion(&self.suggestion_param_name), + ); } } } @@ -618,7 +617,7 @@ impl Subdiagnostic for AddLifetimeParamsSuggestion<'_> { } } hir::TyKind::Ref(lifetime, ..) if lifetime.is_anonymous() => { - self.suggestions.push((lifetime.ident.span, make_suggestion(lifetime))); + self.suggestions.push(lifetime.suggestion(&self.suggestion_param_name)); } _ => {} } @@ -689,11 +688,7 @@ pub struct IntroducesStaticBecauseUnmetLifetimeReq { } impl Subdiagnostic for IntroducesStaticBecauseUnmetLifetimeReq { - fn add_to_diag_with>( - mut self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(mut self, diag: &mut Diag<'_, G>) { self.unmet_requirements .push_span_label(self.binding_span, fluent::trait_selection_msl_introduces_static); diag.span_note(self.unmet_requirements, fluent::trait_selection_msl_unmet_req); @@ -1008,17 +1003,13 @@ pub struct ConsiderBorrowingParamHelp { } impl Subdiagnostic for ConsiderBorrowingParamHelp { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { let mut type_param_span: MultiSpan = self.spans.clone().into(); for &span in &self.spans { // Seems like we can't call f() here as Into is required type_param_span.push_span_label(span, fluent::trait_selection_tid_consider_borrowing); } - let msg = f(diag, fluent::trait_selection_tid_param_help.into()); + let msg = diag.eagerly_translate(fluent::trait_selection_tid_param_help); diag.span_help(type_param_span, msg); } } @@ -1053,18 +1044,14 @@ pub struct DynTraitConstraintSuggestion { } impl Subdiagnostic for DynTraitConstraintSuggestion { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { let mut multi_span: MultiSpan = vec![self.span].into(); multi_span.push_span_label(self.span, fluent::trait_selection_dtcs_has_lifetime_req_label); multi_span .push_span_label(self.ident.span, fluent::trait_selection_dtcs_introduces_requirement); - let msg = f(diag, fluent::trait_selection_dtcs_has_req_note.into()); + let msg = diag.eagerly_translate(fluent::trait_selection_dtcs_has_req_note); diag.span_note(multi_span, msg); - let msg = f(diag, fluent::trait_selection_dtcs_suggestion.into()); + let msg = diag.eagerly_translate(fluent::trait_selection_dtcs_suggestion); diag.span_suggestion_verbose( self.span.shrink_to_hi(), msg, @@ -1101,11 +1088,7 @@ pub struct ReqIntroducedLocations { } impl Subdiagnostic for ReqIntroducedLocations { - fn add_to_diag_with>( - mut self, - diag: &mut Diag<'_, G>, - f: &F, - ) { + fn add_to_diag(mut self, diag: &mut Diag<'_, G>) { for sp in self.spans { self.span.push_span_label(sp, fluent::trait_selection_ril_introduced_here); } @@ -1114,7 +1097,7 @@ impl Subdiagnostic for ReqIntroducedLocations { self.span.push_span_label(self.fn_decl_span, fluent::trait_selection_ril_introduced_by); } self.span.push_span_label(self.cause_span, fluent::trait_selection_ril_because_of); - let msg = f(diag, fluent::trait_selection_ril_static_introduced_by.into()); + let msg = diag.eagerly_translate(fluent::trait_selection_ril_static_introduced_by); diag.span_note(self.span, msg); } } @@ -1513,13 +1496,9 @@ pub struct SuggestTuplePatternMany { } impl Subdiagnostic for SuggestTuplePatternMany { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.arg("path", self.path); - let message = f(diag, crate::fluent_generated::trait_selection_stp_wrap_many.into()); + let message = diag.eagerly_translate(fluent::trait_selection_stp_wrap_many); diag.multipart_suggestions( message, self.compatible_variants.into_iter().map(|variant| { @@ -1752,11 +1731,7 @@ pub struct AddPreciseCapturingAndParams { } impl Subdiagnostic for AddPreciseCapturingAndParams { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.arg("new_lifetime", self.new_lifetime); diag.multipart_suggestion_verbose( fluent::trait_selection_precise_capturing_new_but_apit, @@ -1896,11 +1871,7 @@ pub struct AddPreciseCapturingForOvercapture { } impl Subdiagnostic for AddPreciseCapturingForOvercapture { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { let applicability = if self.apit_spans.is_empty() { Applicability::MachineApplicable } else { @@ -1922,3 +1893,14 @@ impl Subdiagnostic for AddPreciseCapturingForOvercapture { } } } + +#[derive(Diagnostic)] +#[diag(trait_selection_opaque_type_non_generic_param, code = E0792)] +pub(crate) struct NonGenericOpaqueTypeParam<'a, 'tcx> { + pub arg: GenericArg<'tcx>, + pub kind: &'a str, + #[primary_span] + pub span: Span, + #[label] + pub param_span: Span, +} diff --git a/compiler/rustc_trait_selection/src/errors/note_and_explain.rs b/compiler/rustc_trait_selection/src/errors/note_and_explain.rs index 46622246a178d..84e7686fdd3fc 100644 --- a/compiler/rustc_trait_selection/src/errors/note_and_explain.rs +++ b/compiler/rustc_trait_selection/src/errors/note_and_explain.rs @@ -1,4 +1,4 @@ -use rustc_errors::{Diag, EmissionGuarantee, IntoDiagArg, SubdiagMessageOp, Subdiagnostic}; +use rustc_errors::{Diag, EmissionGuarantee, IntoDiagArg, Subdiagnostic}; use rustc_hir::def_id::LocalDefId; use rustc_middle::bug; use rustc_middle::ty::{self, TyCtxt}; @@ -20,7 +20,7 @@ impl<'a> DescriptionCtx<'a> { region: ty::Region<'tcx>, alt_span: Option, ) -> Option { - let (span, kind, arg) = match *region { + let (span, kind, arg) = match region.kind() { ty::ReEarlyParam(br) => { let scope = tcx .parent(tcx.generics_of(generic_param_scope).region_param(br, tcx).def_id) @@ -162,17 +162,13 @@ impl RegionExplanation<'_> { } impl Subdiagnostic for RegionExplanation<'_> { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.arg("pref_kind", self.prefix); diag.arg("suff_kind", self.suffix); diag.arg("desc_kind", self.desc.kind); diag.arg("desc_arg", self.desc.arg); - let msg = f(diag, fluent::trait_selection_region_explanation.into()); + let msg = diag.eagerly_translate(fluent::trait_selection_region_explanation); if let Some(span) = self.desc.span { diag.span_note(span, msg); } else { diff --git a/compiler/rustc_trait_selection/src/infer.rs b/compiler/rustc_trait_selection/src/infer.rs index 84ac229b743d9..0dab3adadb033 100644 --- a/compiler/rustc_trait_selection/src/infer.rs +++ b/compiler/rustc_trait_selection/src/infer.rs @@ -34,7 +34,7 @@ impl<'tcx> InferCtxt<'tcx> { // FIXME(#132279): This should be removed as it causes us to incorrectly // handle opaques in their defining scope. - if !(param_env, ty).has_infer() { + if !self.next_trait_solver() && !(param_env, ty).has_infer() { return self.tcx.type_is_copy_modulo_regions(self.typing_env(param_env), ty); } diff --git a/compiler/rustc_trait_selection/src/lib.rs b/compiler/rustc_trait_selection/src/lib.rs index b18fb0fb8fd31..7613a0cef52a7 100644 --- a/compiler/rustc_trait_selection/src/lib.rs +++ b/compiler/rustc_trait_selection/src/lib.rs @@ -14,6 +14,7 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] @@ -23,7 +24,6 @@ #![feature(if_let_guard)] #![feature(iter_intersperse)] #![feature(iterator_try_reduce)] -#![feature(let_chains)] #![feature(never_type)] #![feature(rustdoc_internals)] #![feature(try_blocks)] @@ -36,6 +36,7 @@ pub mod error_reporting; pub mod errors; pub mod infer; +pub mod opaque_types; pub mod regions; pub mod solve; pub mod traits; diff --git a/compiler/rustc_trait_selection/src/opaque_types.rs b/compiler/rustc_trait_selection/src/opaque_types.rs new file mode 100644 index 0000000000000..332204a0c5f06 --- /dev/null +++ b/compiler/rustc_trait_selection/src/opaque_types.rs @@ -0,0 +1,207 @@ +use rustc_data_structures::fx::FxIndexMap; +use rustc_hir::OpaqueTyOrigin; +use rustc_hir::def_id::LocalDefId; +use rustc_infer::infer::outlives::env::OutlivesEnvironment; +use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; +use rustc_middle::ty::{ + self, DefiningScopeKind, GenericArgKind, GenericArgs, OpaqueTypeKey, TyCtxt, TypeVisitableExt, + TypingMode, fold_regions, +}; +use rustc_span::{ErrorGuaranteed, Span}; + +use crate::errors::NonGenericOpaqueTypeParam; +use crate::regions::OutlivesEnvironmentBuildExt; +use crate::traits::ObligationCtxt; + +pub enum InvalidOpaqueTypeArgs<'tcx> { + AlreadyReported(ErrorGuaranteed), + NotAParam { opaque_type_key: OpaqueTypeKey<'tcx>, param_index: usize, span: Span }, + DuplicateParam { opaque_type_key: OpaqueTypeKey<'tcx>, param_indices: Vec, span: Span }, +} +impl From for InvalidOpaqueTypeArgs<'_> { + fn from(guar: ErrorGuaranteed) -> Self { + InvalidOpaqueTypeArgs::AlreadyReported(guar) + } +} +impl<'tcx> InvalidOpaqueTypeArgs<'tcx> { + pub fn report(self, infcx: &InferCtxt<'tcx>) -> ErrorGuaranteed { + let tcx = infcx.tcx; + match self { + InvalidOpaqueTypeArgs::AlreadyReported(guar) => guar, + InvalidOpaqueTypeArgs::NotAParam { opaque_type_key, param_index, span } => { + let opaque_generics = tcx.generics_of(opaque_type_key.def_id); + let opaque_param = opaque_generics.param_at(param_index, tcx); + let kind = opaque_param.kind.descr(); + infcx.dcx().emit_err(NonGenericOpaqueTypeParam { + arg: opaque_type_key.args[param_index], + kind, + span, + param_span: tcx.def_span(opaque_param.def_id), + }) + } + InvalidOpaqueTypeArgs::DuplicateParam { opaque_type_key, param_indices, span } => { + let opaque_generics = tcx.generics_of(opaque_type_key.def_id); + let descr = opaque_generics.param_at(param_indices[0], tcx).kind.descr(); + let spans: Vec<_> = param_indices + .into_iter() + .map(|i| tcx.def_span(opaque_generics.param_at(i, tcx).def_id)) + .collect(); + infcx + .dcx() + .struct_span_err(span, "non-defining opaque type use in defining scope") + .with_span_note(spans, format!("{descr} used multiple times")) + .emit() + } + } + } +} + +/// Opaque type parameter validity check as documented in the [rustc-dev-guide chapter]. +/// +/// [rustc-dev-guide chapter]: +/// https://rustc-dev-guide.rust-lang.org/opaque-types-region-infer-restrictions.html +pub fn check_opaque_type_parameter_valid<'tcx>( + infcx: &InferCtxt<'tcx>, + opaque_type_key: OpaqueTypeKey<'tcx>, + span: Span, + defining_scope_kind: DefiningScopeKind, +) -> Result<(), InvalidOpaqueTypeArgs<'tcx>> { + let tcx = infcx.tcx; + let opaque_env = LazyOpaqueTyEnv::new(tcx, opaque_type_key.def_id); + let mut seen_params: FxIndexMap<_, Vec<_>> = FxIndexMap::default(); + + // Avoid duplicate errors in case the opaque has already been malformed in + // HIR typeck. + if let DefiningScopeKind::MirBorrowck = defining_scope_kind { + infcx + .tcx + .type_of_opaque_hir_typeck(opaque_type_key.def_id) + .instantiate_identity() + .error_reported()?; + } + + for (i, arg) in opaque_type_key.iter_captured_args(tcx) { + let arg_is_param = match arg.unpack() { + GenericArgKind::Lifetime(lt) => match defining_scope_kind { + DefiningScopeKind::HirTypeck => continue, + DefiningScopeKind::MirBorrowck => { + matches!(lt.kind(), ty::ReEarlyParam(_) | ty::ReLateParam(_)) + || (lt.is_static() && opaque_env.param_equal_static(i)) + } + }, + GenericArgKind::Type(ty) => matches!(ty.kind(), ty::Param(_)), + GenericArgKind::Const(ct) => matches!(ct.kind(), ty::ConstKind::Param(_)), + }; + + if arg_is_param { + // Register if the same lifetime appears multiple times in the generic args. + // There is an exception when the opaque type *requires* the lifetimes to be equal. + // See [rustc-dev-guide chapter] § "An exception to uniqueness rule". + let seen_where = seen_params.entry(arg).or_default(); + if !seen_where.first().is_some_and(|&prev_i| opaque_env.params_equal(i, prev_i)) { + seen_where.push(i); + } + } else { + // Prevent `fn foo() -> Foo` from being defining. + opaque_env.param_is_error(i)?; + return Err(InvalidOpaqueTypeArgs::NotAParam { opaque_type_key, param_index: i, span }); + } + } + + for (_, param_indices) in seen_params { + if param_indices.len() > 1 { + return Err(InvalidOpaqueTypeArgs::DuplicateParam { + opaque_type_key, + param_indices, + span, + }); + } + } + + Ok(()) +} + +/// Computes if an opaque type requires a lifetime parameter to be equal to +/// another one or to the `'static` lifetime. +/// These requirements are derived from the explicit and implied bounds. +struct LazyOpaqueTyEnv<'tcx> { + tcx: TyCtxt<'tcx>, + def_id: LocalDefId, + + /// Equal parameters will have the same name. Computed Lazily. + /// Example: + /// `type Opaque<'a: 'static, 'b: 'c, 'c: 'b> = impl Sized;` + /// Identity args: `['a, 'b, 'c]` + /// Canonical args: `['static, 'b, 'b]` + canonical_args: std::cell::OnceCell>, +} + +impl<'tcx> LazyOpaqueTyEnv<'tcx> { + fn new(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> Self { + Self { tcx, def_id, canonical_args: std::cell::OnceCell::new() } + } + + fn param_equal_static(&self, param_index: usize) -> bool { + self.get_canonical_args()[param_index].expect_region().is_static() + } + + fn params_equal(&self, param1: usize, param2: usize) -> bool { + let canonical_args = self.get_canonical_args(); + canonical_args[param1] == canonical_args[param2] + } + + fn param_is_error(&self, param_index: usize) -> Result<(), ErrorGuaranteed> { + self.get_canonical_args()[param_index].error_reported() + } + + fn get_canonical_args(&self) -> ty::GenericArgsRef<'tcx> { + if let Some(&canonical_args) = self.canonical_args.get() { + return canonical_args; + } + + let &Self { tcx, def_id, .. } = self; + let origin = tcx.local_opaque_ty_origin(def_id); + let parent = match origin { + OpaqueTyOrigin::FnReturn { parent, .. } + | OpaqueTyOrigin::AsyncFn { parent, .. } + | OpaqueTyOrigin::TyAlias { parent, .. } => parent, + }; + let param_env = tcx.param_env(parent); + let args = GenericArgs::identity_for_item(tcx, parent).extend_to( + tcx, + def_id.to_def_id(), + |param, _| { + tcx.map_opaque_lifetime_to_parent_lifetime(param.def_id.expect_local()).into() + }, + ); + + // FIXME(#132279): It feels wrong to use `non_body_analysis` here given that we're + // in a body here. + let infcx = tcx.infer_ctxt().build(TypingMode::non_body_analysis()); + let ocx = ObligationCtxt::new(&infcx); + + let wf_tys = ocx.assumed_wf_types(param_env, parent).unwrap_or_else(|_| { + tcx.dcx().span_delayed_bug(tcx.def_span(def_id), "error getting implied bounds"); + Default::default() + }); + let outlives_env = OutlivesEnvironment::new(&infcx, parent, param_env, wf_tys); + + let mut seen = vec![tcx.lifetimes.re_static]; + let canonical_args = fold_regions(tcx, args, |r1, _| { + if r1.is_error() { + r1 + } else if let Some(&r2) = seen.iter().find(|&&r2| { + let free_regions = outlives_env.free_region_map(); + free_regions.sub_free_regions(tcx, r1, r2) + && free_regions.sub_free_regions(tcx, r2, r1) + }) { + r2 + } else { + seen.push(r1); + r1 + } + }); + self.canonical_args.set(canonical_args).unwrap(); + canonical_args + } +} diff --git a/compiler/rustc_trait_selection/src/solve.rs b/compiler/rustc_trait_selection/src/solve.rs index d425ab50ae0fd..5a5d16167d28d 100644 --- a/compiler/rustc_trait_selection/src/solve.rs +++ b/compiler/rustc_trait_selection/src/solve.rs @@ -9,5 +9,8 @@ mod select; pub(crate) use delegate::SolverDelegate; pub use fulfill::{FulfillmentCtxt, NextSolverError}; pub(crate) use normalize::deeply_normalize_for_diagnostics; -pub use normalize::{deeply_normalize, deeply_normalize_with_skipped_universes}; +pub use normalize::{ + deeply_normalize, deeply_normalize_with_skipped_universes, + deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals, +}; pub use select::InferCtxtSelectExt; diff --git a/compiler/rustc_trait_selection/src/solve/delegate.rs b/compiler/rustc_trait_selection/src/solve/delegate.rs index 3d9a90eb74e7a..3601c2cba9b55 100644 --- a/compiler/rustc_trait_selection/src/solve/delegate.rs +++ b/compiler/rustc_trait_selection/src/solve/delegate.rs @@ -8,10 +8,10 @@ use rustc_infer::infer::canonical::{ }; use rustc_infer::infer::{InferCtxt, RegionVariableOrigin, TyCtxtInferExt}; use rustc_infer::traits::solve::Goal; -use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, TypeVisitableExt as _}; +use rustc_middle::traits::query::NoSolution; +use rustc_middle::traits::solve::Certainty; +use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, TypeVisitableExt as _, TypingMode}; use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span}; -use rustc_type_ir::TypingMode; -use rustc_type_ir::solve::{Certainty, NoSolution}; use crate::traits::{EvaluateConstErr, specialization_graph}; @@ -92,16 +92,16 @@ impl<'tcx> rustc_next_trait_solver::delegate::SolverDelegate for SolverDelegate< fn well_formed_goals( &self, param_env: ty::ParamEnv<'tcx>, - arg: ty::GenericArg<'tcx>, + term: ty::Term<'tcx>, ) -> Option>>> { - crate::traits::wf::unnormalized_obligations(&self.0, param_env, arg, DUMMY_SP, CRATE_DEF_ID) - .map(|obligations| { - obligations.into_iter().map(|obligation| obligation.as_goal()).collect() - }) - } - - fn clone_opaque_types_for_query_response(&self) -> Vec<(ty::OpaqueTypeKey<'tcx>, Ty<'tcx>)> { - self.0.clone_opaque_types_for_query_response() + crate::traits::wf::unnormalized_obligations( + &self.0, + param_env, + term, + DUMMY_SP, + CRATE_DEF_ID, + ) + .map(|obligations| obligations.into_iter().map(|obligation| obligation.as_goal()).collect()) } fn make_deduplicated_outlives_constraints( @@ -149,18 +149,6 @@ impl<'tcx> rustc_next_trait_solver::delegate::SolverDelegate for SolverDelegate< self.0.instantiate_canonical_var(span, cv_info, universe_map) } - fn insert_hidden_type( - &self, - opaque_type_key: ty::OpaqueTypeKey<'tcx>, - param_env: ty::ParamEnv<'tcx>, - hidden_ty: Ty<'tcx>, - goals: &mut Vec>>, - ) -> Result<(), NoSolution> { - self.0 - .insert_hidden_type(opaque_type_key, DUMMY_SP, param_env, hidden_ty, goals) - .map_err(|_| NoSolution) - } - fn add_item_bounds_for_hidden_type( &self, def_id: DefId, @@ -172,19 +160,6 @@ impl<'tcx> rustc_next_trait_solver::delegate::SolverDelegate for SolverDelegate< self.0.add_item_bounds_for_hidden_type(def_id, args, param_env, hidden_ty, goals); } - fn inject_new_hidden_type_unchecked( - &self, - key: ty::OpaqueTypeKey<'tcx>, - hidden_ty: Ty<'tcx>, - span: Span, - ) { - self.0.inject_new_hidden_type_unchecked(key, ty::OpaqueHiddenType { ty: hidden_ty, span }) - } - - fn reset_opaque_types(&self) { - let _ = self.take_opaque_types(); - } - fn fetch_eligible_assoc_item( &self, goal_trait_ref: ty::TraitRef<'tcx>, @@ -204,6 +179,7 @@ impl<'tcx> rustc_next_trait_solver::delegate::SolverDelegate for SolverDelegate< match self.typing_mode() { TypingMode::Coherence | TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } | TypingMode::PostBorrowckAnalysis { .. } => false, TypingMode::PostAnalysis => { let poly_trait_ref = self.resolve_vars_if_possible(goal_trait_ref); diff --git a/compiler/rustc_trait_selection/src/solve/fulfill.rs b/compiler/rustc_trait_selection/src/solve/fulfill.rs index 192e632a2d5b9..3e1cdac84dfd1 100644 --- a/compiler/rustc_trait_selection/src/solve/fulfill.rs +++ b/compiler/rustc_trait_selection/src/solve/fulfill.rs @@ -1,18 +1,25 @@ use std::marker::PhantomData; use std::mem; +use std::ops::ControlFlow; use rustc_data_structures::thinvec::ExtractIf; +use rustc_hir::def_id::LocalDefId; use rustc_infer::infer::InferCtxt; use rustc_infer::traits::query::NoSolution; use rustc_infer::traits::{ FromSolverError, PredicateObligation, PredicateObligations, TraitEngine, }; +use rustc_middle::ty::{ + self, DelayedSet, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor, TypingMode, +}; use rustc_next_trait_solver::solve::{GenerateProofTree, HasChanged, SolverDelegateEvalExt as _}; +use rustc_span::Span; use tracing::instrument; use self::derive_errors::*; use super::Certainty; use super::delegate::SolverDelegate; +use super::inspect::{self, ProofTreeInferCtxtExt}; use crate::traits::{FulfillmentError, ScrubbedTraitError}; mod derive_errors; @@ -39,7 +46,7 @@ pub struct FulfillmentCtxt<'tcx, E: 'tcx> { _errors: PhantomData, } -#[derive(Default)] +#[derive(Default, Debug)] struct ObligationStorage<'tcx> { /// Obligations which resulted in an overflow in fulfillment itself. /// @@ -55,20 +62,23 @@ impl<'tcx> ObligationStorage<'tcx> { self.pending.push(obligation); } + fn has_pending_obligations(&self) -> bool { + !self.pending.is_empty() || !self.overflowed.is_empty() + } + fn clone_pending(&self) -> PredicateObligations<'tcx> { let mut obligations = self.pending.clone(); obligations.extend(self.overflowed.iter().cloned()); obligations } - fn take_pending(&mut self) -> PredicateObligations<'tcx> { - let mut obligations = mem::take(&mut self.pending); - obligations.append(&mut self.overflowed); - obligations - } - - fn unstalled_for_select(&mut self) -> impl Iterator> + 'tcx { - mem::take(&mut self.pending).into_iter() + fn drain_pending( + &mut self, + cond: impl Fn(&PredicateObligation<'tcx>) -> bool, + ) -> PredicateObligations<'tcx> { + let (unstalled, pending) = mem::take(&mut self.pending).into_iter().partition(cond); + self.pending = pending; + unstalled } fn on_fulfillment_overflow(&mut self, infcx: &InferCtxt<'tcx>) { @@ -152,15 +162,15 @@ where fn select_where_possible(&mut self, infcx: &InferCtxt<'tcx>) -> Vec { assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots()); let mut errors = Vec::new(); - for i in 0.. { - if !infcx.tcx.recursion_limit().value_within_limit(i) { - self.obligations.on_fulfillment_overflow(infcx); - // Only return true errors that we have accumulated while processing. - return errors; - } - + loop { let mut has_changed = false; - for obligation in self.obligations.unstalled_for_select() { + for mut obligation in self.obligations.drain_pending(|_| true) { + if !infcx.tcx.recursion_limit().value_within_limit(obligation.recursion_depth) { + self.obligations.on_fulfillment_overflow(infcx); + // Only return true errors that we have accumulated while processing. + return errors; + } + let goal = obligation.as_goal(); let result = <&SolverDelegate<'tcx>>::from(infcx) .evaluate_root_goal(goal, GenerateProofTree::No, obligation.cause.span) @@ -178,6 +188,13 @@ where }; if changed == HasChanged::Yes { + // We increment the recursion depth here to track the number of times + // this goal has resulted in inference progress. This doesn't precisely + // model the way that we track recursion depth in the old solver due + // to the fact that we only process root obligations, but it is a good + // approximation and should only result in fulfillment overflow in + // pathological cases. + obligation.recursion_depth += 1; has_changed = true; } @@ -196,15 +213,95 @@ where } fn has_pending_obligations(&self) -> bool { - !self.obligations.pending.is_empty() || !self.obligations.overflowed.is_empty() + self.obligations.has_pending_obligations() } fn pending_obligations(&self) -> PredicateObligations<'tcx> { self.obligations.clone_pending() } - fn drain_unstalled_obligations(&mut self, _: &InferCtxt<'tcx>) -> PredicateObligations<'tcx> { - self.obligations.take_pending() + fn drain_stalled_obligations_for_coroutines( + &mut self, + infcx: &InferCtxt<'tcx>, + ) -> PredicateObligations<'tcx> { + let stalled_generators = match infcx.typing_mode() { + TypingMode::Analysis { defining_opaque_types_and_generators } => { + defining_opaque_types_and_generators + } + TypingMode::Coherence + | TypingMode::Borrowck { defining_opaque_types: _ } + | TypingMode::PostBorrowckAnalysis { defined_opaque_types: _ } + | TypingMode::PostAnalysis => return Default::default(), + }; + + if stalled_generators.is_empty() { + return Default::default(); + } + + self.obligations.drain_pending(|obl| { + infcx.probe(|_| { + infcx + .visit_proof_tree( + obl.as_goal(), + &mut StalledOnCoroutines { + stalled_generators, + span: obl.cause.span, + cache: Default::default(), + }, + ) + .is_break() + }) + }) + } +} + +/// Detect if a goal is stalled on a coroutine that is owned by the current typeck root. +/// +/// This function can (erroneously) fail to detect a predicate, i.e. it doesn't need to +/// be complete. However, this will lead to ambiguity errors, so we want to make it +/// accurate. +/// +/// This function can be also return false positives, which will lead to poor diagnostics +/// so we want to keep this visitor *precise* too. +struct StalledOnCoroutines<'tcx> { + stalled_generators: &'tcx ty::List, + span: Span, + cache: DelayedSet>, +} + +impl<'tcx> inspect::ProofTreeVisitor<'tcx> for StalledOnCoroutines<'tcx> { + type Result = ControlFlow<()>; + + fn span(&self) -> rustc_span::Span { + self.span + } + + fn visit_goal(&mut self, inspect_goal: &super::inspect::InspectGoal<'_, 'tcx>) -> Self::Result { + inspect_goal.goal().predicate.visit_with(self)?; + + if let Some(candidate) = inspect_goal.unique_applicable_candidate() { + candidate.visit_nested_no_probe(self) + } else { + ControlFlow::Continue(()) + } + } +} + +impl<'tcx> TypeVisitor> for StalledOnCoroutines<'tcx> { + type Result = ControlFlow<()>; + + fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result { + if !self.cache.insert(ty) { + return ControlFlow::Continue(()); + } + + if let ty::CoroutineWitness(def_id, _) = *ty.kind() + && def_id.as_local().is_some_and(|def_id| self.stalled_generators.contains(&def_id)) + { + return ControlFlow::Break(()); + } + + ty.super_visit_with(self) } } diff --git a/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs b/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs index 3a939df25e07b..f64cd5ffebe3d 100644 --- a/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs +++ b/compiler/rustc_trait_selection/src/solve/fulfill/derive_errors.rs @@ -1,21 +1,22 @@ use std::ops::ControlFlow; +use rustc_hir::LangItem; use rustc_infer::infer::InferCtxt; use rustc_infer::traits::solve::{CandidateSource, GoalSource, MaybeCause}; use rustc_infer::traits::{ self, MismatchedProjectionTypes, Obligation, ObligationCause, ObligationCauseCode, PredicateObligation, SelectionError, }; +use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::{bug, span_bug}; use rustc_next_trait_solver::solve::{GenerateProofTree, SolverDelegateEvalExt as _}; -use rustc_type_ir::solve::NoSolution; use tracing::{instrument, trace}; -use crate::solve::Certainty; use crate::solve::delegate::SolverDelegate; use crate::solve::inspect::{self, ProofTreeInferCtxtExt, ProofTreeVisitor}; +use crate::solve::{Certainty, deeply_normalize_for_diagnostics}; use crate::traits::{FulfillmentError, FulfillmentErrorCode, wf}; pub(super) fn fulfillment_error_for_no_solution<'tcx>( @@ -98,7 +99,13 @@ pub(super) fn fulfillment_error_for_stalled<'tcx>( Ok((_, Certainty::Maybe(MaybeCause::Ambiguity))) => { (FulfillmentErrorCode::Ambiguity { overflow: None }, true) } - Ok((_, Certainty::Maybe(MaybeCause::Overflow { suggest_increasing_limit }))) => ( + Ok(( + _, + Certainty::Maybe(MaybeCause::Overflow { + suggest_increasing_limit, + keep_constraints: _, + }), + )) => ( FulfillmentErrorCode::Ambiguity { overflow: Some(suggest_increasing_limit) }, // Don't look into overflows because we treat overflows weirdly anyways. // We discard the inference constraints from overflowing goals, so @@ -109,10 +116,16 @@ pub(super) fn fulfillment_error_for_stalled<'tcx>( false, ), Ok((_, Certainty::Yes)) => { - bug!("did not expect successful goal when collecting ambiguity errors") + bug!( + "did not expect successful goal when collecting ambiguity errors for `{:?}`", + infcx.resolve_vars_if_possible(root_obligation.predicate), + ) } Err(_) => { - bug!("did not expect selection error when collecting ambiguity errors") + bug!( + "did not expect selection error when collecting ambiguity errors for `{:?}`", + infcx.resolve_vars_if_possible(root_obligation.predicate), + ) } } }); @@ -151,7 +164,7 @@ fn find_best_leaf_obligation<'tcx>( // // We should probably fix the visitor to not do so instead, as this also // means the leaf obligation may be incorrect. - infcx + let obligation = infcx .fudge_inference_if_ok(|| { infcx .visit_proof_tree( @@ -161,7 +174,8 @@ fn find_best_leaf_obligation<'tcx>( .break_value() .ok_or(()) }) - .unwrap_or(obligation) + .unwrap_or(obligation); + deeply_normalize_for_diagnostics(infcx, obligation.param_env, obligation) } struct BestObligation<'tcx> { @@ -233,13 +247,13 @@ impl<'tcx> BestObligation<'tcx> { fn visit_well_formed_goal( &mut self, candidate: &inspect::InspectCandidate<'_, 'tcx>, - arg: ty::GenericArg<'tcx>, + term: ty::Term<'tcx>, ) -> ControlFlow> { let infcx = candidate.goal().infcx(); let param_env = candidate.goal().goal().param_env; let body_id = self.obligation.cause.body_id; - for obligation in wf::unnormalized_obligations(infcx, param_env, arg, self.span(), body_id) + for obligation in wf::unnormalized_obligations(infcx, param_env, term, self.span(), body_id) .into_iter() .flatten() { @@ -291,6 +305,40 @@ impl<'tcx> BestObligation<'tcx> { } } + /// When a higher-ranked projection goal fails, check that the corresponding + /// higher-ranked trait goal holds or not. This is because the process of + /// instantiating and then re-canonicalizing the binder of the projection goal + /// forces us to be unable to see that the leak check failed in the nested + /// `NormalizesTo` goal, so we don't fall back to the rigid projection check + /// that should catch when a projection goal fails due to an unsatisfied trait + /// goal. + fn detect_trait_error_in_higher_ranked_projection( + &mut self, + goal: &inspect::InspectGoal<'_, 'tcx>, + ) -> ControlFlow> { + let tcx = goal.infcx().tcx; + if let Some(projection_clause) = goal.goal().predicate.as_projection_clause() + && !projection_clause.bound_vars().is_empty() + { + let pred = projection_clause.map_bound(|proj| proj.projection_term.trait_ref(tcx)); + let obligation = Obligation::new( + tcx, + self.obligation.cause.clone(), + goal.goal().param_env, + deeply_normalize_for_diagnostics(goal.infcx(), goal.goal().param_env, pred), + ); + self.with_derived_obligation(obligation, |this| { + goal.infcx().visit_proof_tree_at_depth( + goal.goal().with(tcx, pred), + goal.depth() + 1, + this, + ) + }) + } else { + ControlFlow::Continue(()) + } + } + /// It is likely that `NormalizesTo` failed without any applicable candidates /// because the alias is not well-formed. /// @@ -360,7 +408,8 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> { (true, Ok(Certainty::Maybe(MaybeCause::Ambiguity))) | (false, Err(_)) => {} _ => return ControlFlow::Continue(()), } - let pred_kind = goal.goal().predicate.kind(); + + let pred = goal.goal().predicate; let candidates = self.non_trivial_candidates(goal); let candidate = match candidates.as_slice() { @@ -374,7 +423,7 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> { source: CandidateSource::Impl(impl_def_id), result: _, } = candidate.kind() - && goal.infcx().tcx.do_not_recommend_impl(impl_def_id) + && tcx.do_not_recommend_impl(impl_def_id) { trace!("#[do_not_recommend] -> exit"); return ControlFlow::Break(self.obligation.clone()); @@ -382,12 +431,12 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> { // FIXME: Also, what about considering >1 layer up the stack? May be necessary // for normalizes-to. - let child_mode = match pred_kind.skip_binder() { - ty::PredicateKind::Clause(ty::ClauseKind::Trait(pred)) => { - ChildMode::Trait(pred_kind.rebind(pred)) + let child_mode = match pred.kind().skip_binder() { + ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred)) => { + ChildMode::Trait(pred.kind().rebind(trait_pred)) } - ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(pred)) => { - ChildMode::Host(pred_kind.rebind(pred)) + ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(host_pred)) => { + ChildMode::Host(pred.kind().rebind(host_pred)) } ty::PredicateKind::NormalizesTo(normalizes_to) if matches!( @@ -395,13 +444,13 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> { ty::AliasTermKind::ProjectionTy | ty::AliasTermKind::ProjectionConst ) => { - ChildMode::Trait(pred_kind.rebind(ty::TraitPredicate { + ChildMode::Trait(pred.kind().rebind(ty::TraitPredicate { trait_ref: normalizes_to.alias.trait_ref(tcx), polarity: ty::PredicatePolarity::Positive, })) } - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { - return self.visit_well_formed_goal(candidate, arg); + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(term)) => { + return self.visit_well_formed_goal(candidate, term); } _ => ChildMode::PassThrough, }; @@ -416,9 +465,8 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> { // We do this as a separate loop so that we do not choose to tell the user about some nested // goal before we encounter a `T: FnPtr` nested goal. for nested_goal in &nested_goals { - if let Some(fn_ptr_trait) = tcx.lang_items().fn_ptr_trait() - && let Some(poly_trait_pred) = nested_goal.goal().predicate.as_trait_clause() - && poly_trait_pred.def_id() == fn_ptr_trait + if let Some(poly_trait_pred) = nested_goal.goal().predicate.as_trait_clause() + && tcx.is_lang_item(poly_trait_pred.def_id(), LangItem::FnPtrTrait) && let Err(NoSolution) = nested_goal.result() { return ControlFlow::Break(self.obligation.clone()); @@ -429,10 +477,12 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> { for nested_goal in nested_goals { trace!(nested_goal = ?(nested_goal.goal(), nested_goal.source(), nested_goal.result())); + let nested_pred = nested_goal.goal().predicate; + let make_obligation = |cause| Obligation { cause, param_env: nested_goal.goal().param_env, - predicate: nested_goal.goal().predicate, + predicate: nested_pred, recursion_depth: self.obligation.recursion_depth + 1, }; @@ -482,30 +532,21 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> { // alias-relate may fail because the lhs or rhs can't be normalized, // and therefore is treated as rigid. - if let Some(ty::PredicateKind::AliasRelate(lhs, rhs, _)) = pred_kind.no_bound_vars() { - if let Some(obligation) = goal - .infcx() - .visit_proof_tree_at_depth( - goal.goal().with(goal.infcx().tcx, ty::ClauseKind::WellFormed(lhs.into())), - goal.depth() + 1, - self, - ) - .break_value() - { - return ControlFlow::Break(obligation); - } else if let Some(obligation) = goal - .infcx() - .visit_proof_tree_at_depth( - goal.goal().with(goal.infcx().tcx, ty::ClauseKind::WellFormed(rhs.into())), - goal.depth() + 1, - self, - ) - .break_value() - { - return ControlFlow::Break(obligation); - } + if let Some(ty::PredicateKind::AliasRelate(lhs, rhs, _)) = pred.kind().no_bound_vars() { + goal.infcx().visit_proof_tree_at_depth( + goal.goal().with(tcx, ty::ClauseKind::WellFormed(lhs.into())), + goal.depth() + 1, + self, + )?; + goal.infcx().visit_proof_tree_at_depth( + goal.goal().with(tcx, ty::ClauseKind::WellFormed(rhs.into())), + goal.depth() + 1, + self, + )?; } + self.detect_trait_error_in_higher_ranked_projection(goal)?; + ControlFlow::Break(self.obligation.clone()) } } diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index 48a05ad29fbd9..9795655e84222 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -292,7 +292,7 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { inspect::ProbeStep::NestedProbe(ref probe) => { match probe.kind { // These never assemble candidates for the goal we're trying to solve. - inspect::ProbeKind::UpcastProjectionCompatibility + inspect::ProbeKind::ProjectionCompatibility | inspect::ProbeKind::ShadowedEnvProbing => continue, inspect::ProbeKind::NormalizedSelfTyAssembly @@ -314,8 +314,10 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { } match probe.kind { - inspect::ProbeKind::UpcastProjectionCompatibility - | inspect::ProbeKind::ShadowedEnvProbing => bug!(), + inspect::ProbeKind::ProjectionCompatibility + | inspect::ProbeKind::ShadowedEnvProbing => { + bug!() + } inspect::ProbeKind::NormalizedSelfTyAssembly | inspect::ProbeKind::UnsizeAssembly => {} @@ -380,7 +382,7 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { if let Some(term_hack) = normalizes_to_term_hack { infcx .probe(|_| term_hack.constrain(infcx, DUMMY_SP, uncanonicalized_goal.param_env)) - .map(|certainty| ok.value.certainty.unify_with(certainty)) + .map(|certainty| ok.value.certainty.and(certainty)) } else { Ok(ok.value.certainty) } diff --git a/compiler/rustc_trait_selection/src/solve/normalize.rs b/compiler/rustc_trait_selection/src/solve/normalize.rs index 232357dc71a0d..d903f94b489d3 100644 --- a/compiler/rustc_trait_selection/src/solve/normalize.rs +++ b/compiler/rustc_trait_selection/src/solve/normalize.rs @@ -1,10 +1,10 @@ use std::assert_matches::assert_matches; use std::fmt::Debug; -use std::marker::PhantomData; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_infer::infer::InferCtxt; use rustc_infer::infer::at::At; +use rustc_infer::traits::solve::Goal; use rustc_infer::traits::{FromSolverError, Obligation, TraitEngine}; use rustc_middle::traits::ObligationCause; use rustc_middle::ty::{ @@ -45,11 +45,44 @@ where T: TypeFoldable>, E: FromSolverError<'tcx, NextSolverError<'tcx>>, { - let fulfill_cx = FulfillmentCtxt::new(at.infcx); - let mut folder = - NormalizationFolder { at, fulfill_cx, depth: 0, universes, _errors: PhantomData }; + let (value, coroutine_goals) = + deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals( + at, value, universes, + )?; + assert_eq!(coroutine_goals, vec![]); - value.try_fold_with(&mut folder) + Ok(value) +} + +/// Deeply normalize all aliases in `value`. This does not handle inference and expects +/// its input to be already fully resolved. +/// +/// Additionally takes a list of universes which represents the binders which have been +/// entered before passing `value` to the function. This is currently needed for +/// `normalize_erasing_regions`, which skips binders as it walks through a type. +/// +/// This returns a set of stalled obligations involving coroutines if the typing mode of +/// the underlying infcx has any stalled coroutine def ids. +pub fn deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals<'tcx, T, E>( + at: At<'_, 'tcx>, + value: T, + universes: Vec>, +) -> Result<(T, Vec>>), Vec> +where + T: TypeFoldable>, + E: FromSolverError<'tcx, NextSolverError<'tcx>>, +{ + let fulfill_cx = FulfillmentCtxt::new(at.infcx); + let mut folder = NormalizationFolder { + at, + fulfill_cx, + depth: 0, + universes, + stalled_coroutine_goals: vec![], + }; + let value = value.try_fold_with(&mut folder)?; + let errors = folder.fulfill_cx.select_all_or_error(at.infcx); + if errors.is_empty() { Ok((value, folder.stalled_coroutine_goals)) } else { Err(errors) } } struct NormalizationFolder<'me, 'tcx, E> { @@ -57,7 +90,7 @@ struct NormalizationFolder<'me, 'tcx, E> { fulfill_cx: FulfillmentCtxt<'tcx, E>, depth: usize, universes: Vec>, - _errors: PhantomData, + stalled_coroutine_goals: Vec>>, } impl<'tcx, E> NormalizationFolder<'_, 'tcx, E> @@ -98,10 +131,7 @@ where ); self.fulfill_cx.register_predicate_obligation(infcx, obligation); - let errors = self.fulfill_cx.select_all_or_error(infcx); - if !errors.is_empty() { - return Err(errors); - } + self.select_all_and_stall_coroutine_predicates()?; // Alias is guaranteed to be fully structurally resolved, // so we can super fold here. @@ -139,7 +169,7 @@ where let result = if infcx.predicate_may_hold(&obligation) { self.fulfill_cx.register_predicate_obligation(infcx, obligation); - let errors = self.fulfill_cx.select_all_or_error(infcx); + let errors = self.fulfill_cx.select_where_possible(infcx); if !errors.is_empty() { return Err(errors); } @@ -152,6 +182,27 @@ where self.depth -= 1; Ok(result) } + + fn select_all_and_stall_coroutine_predicates(&mut self) -> Result<(), Vec> { + let errors = self.fulfill_cx.select_where_possible(self.at.infcx); + if !errors.is_empty() { + return Err(errors); + } + + self.stalled_coroutine_goals.extend( + self.fulfill_cx + .drain_stalled_obligations_for_coroutines(self.at.infcx) + .into_iter() + .map(|obl| obl.as_goal()), + ); + + let errors = self.fulfill_cx.collect_remaining_errors(self.at.infcx); + if !errors.is_empty() { + return Err(errors); + } + + Ok(()) + } } impl<'tcx, E> FallibleTypeFolder> for NormalizationFolder<'_, 'tcx, E> @@ -253,20 +304,32 @@ impl<'tcx> TypeFolder> for DeeplyNormalizeForDiagnosticsFolder<'_, } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - deeply_normalize_with_skipped_universes( - self.at, - ty, - vec![None; ty.outer_exclusive_binder().as_usize()], - ) - .unwrap_or_else(|_: Vec>| ty.super_fold_with(self)) + let infcx = self.at.infcx; + let result: Result<_, Vec>> = infcx.commit_if_ok(|_| { + deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals( + self.at, + ty, + vec![None; ty.outer_exclusive_binder().as_usize()], + ) + }); + match result { + Ok((ty, _)) => ty, + Err(_) => ty.super_fold_with(self), + } } fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> { - deeply_normalize_with_skipped_universes( - self.at, - ct, - vec![None; ct.outer_exclusive_binder().as_usize()], - ) - .unwrap_or_else(|_: Vec>| ct.super_fold_with(self)) + let infcx = self.at.infcx; + let result: Result<_, Vec>> = infcx.commit_if_ok(|_| { + deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals( + self.at, + ct, + vec![None; ct.outer_exclusive_binder().as_usize()], + ) + }); + match result { + Ok((ct, _)) => ct, + Err(_) => ct.super_fold_with(self), + } } } diff --git a/compiler/rustc_trait_selection/src/solve/select.rs b/compiler/rustc_trait_selection/src/solve/select.rs index 4437fc5b0295f..4fdaf740287ba 100644 --- a/compiler/rustc_trait_selection/src/solve/select.rs +++ b/compiler/rustc_trait_selection/src/solve/select.rs @@ -177,7 +177,7 @@ fn to_selection<'tcx>( }, ProbeKind::NormalizedSelfTyAssembly | ProbeKind::UnsizeAssembly - | ProbeKind::UpcastProjectionCompatibility + | ProbeKind::ProjectionCompatibility | ProbeKind::OpaqueTypeStorageLookup { result: _ } | ProbeKind::Root { result: _ } | ProbeKind::ShadowedEnvProbing diff --git a/compiler/rustc_trait_selection/src/traits/auto_trait.rs b/compiler/rustc_trait_selection/src/traits/auto_trait.rs index 1fca2f4da7eee..02521c9453d98 100644 --- a/compiler/rustc_trait_selection/src/traits/auto_trait.rs +++ b/compiler/rustc_trait_selection/src/traits/auto_trait.rs @@ -382,7 +382,7 @@ impl<'tcx> AutoTraitFinder<'tcx> { for (new_region, old_region) in iter::zip(new_args.regions(), old_args.regions()) { - match (*new_region, *old_region) { + match (new_region.kind(), old_region.kind()) { // If both predicates have an `ReBound` (a HRTB) in the // same spot, we do nothing. (ty::ReBound(_, _), ty::ReBound(_, _)) => {} diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs index bcc247ba53c2b..93c7dae9c5be6 100644 --- a/compiler/rustc_trait_selection/src/traits/coherence.rs +++ b/compiler/rustc_trait_selection/src/traits/coherence.rs @@ -537,7 +537,7 @@ fn plug_infer_with_placeholders<'tcx>( } fn visit_region(&mut self, r: ty::Region<'tcx>) { - if let ty::ReVar(vid) = *r { + if let ty::ReVar(vid) = r.kind() { let r = self .infcx .inner diff --git a/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs b/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs index 78a452439836f..220a847cc230f 100644 --- a/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs +++ b/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs @@ -13,9 +13,9 @@ use rustc_middle::query::Providers; use rustc_middle::ty::{ self, EarlyBinder, GenericArgs, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, TypingMode, Upcast, + elaborate, }; use rustc_span::Span; -use rustc_type_ir::elaborate; use smallvec::SmallVec; use tracing::{debug, instrument}; @@ -188,7 +188,7 @@ fn bounds_reference_self(tcx: TyCtxt<'_>, trait_def_id: DefId) -> SmallVec<[Span tcx.associated_items(trait_def_id) .in_definition_order() // We're only looking at associated type bounds - .filter(|item| item.kind == ty::AssocKind::Type) + .filter(|item| item.is_type()) // Ignore GATs with `Self: Sized` .filter(|item| !tcx.generics_require_sized_self(item.def_id)) .flat_map(|item| tcx.explicit_item_bounds(item.def_id).iter_identity_copied()) @@ -298,31 +298,33 @@ pub fn dyn_compatibility_violations_for_assoc_item( match item.kind { // Associated consts are never dyn-compatible, as they can't have `where` bounds yet at all, // and associated const bounds in trait objects aren't a thing yet either. - ty::AssocKind::Const => { - vec![DynCompatibilityViolation::AssocConst(item.name, item.ident(tcx).span)] + ty::AssocKind::Const { name } => { + vec![DynCompatibilityViolation::AssocConst(name, item.ident(tcx).span)] } - ty::AssocKind::Fn => virtual_call_violations_for_method(tcx, trait_def_id, item) - .into_iter() - .map(|v| { - let node = tcx.hir_get_if_local(item.def_id); - // Get an accurate span depending on the violation. - let span = match (&v, node) { - (MethodViolationCode::ReferencesSelfInput(Some(span)), _) => *span, - (MethodViolationCode::UndispatchableReceiver(Some(span)), _) => *span, - (MethodViolationCode::ReferencesImplTraitInTrait(span), _) => *span, - (MethodViolationCode::ReferencesSelfOutput, Some(node)) => { - node.fn_decl().map_or(item.ident(tcx).span, |decl| decl.output.span()) - } - _ => item.ident(tcx).span, - }; + ty::AssocKind::Fn { name, .. } => { + virtual_call_violations_for_method(tcx, trait_def_id, item) + .into_iter() + .map(|v| { + let node = tcx.hir_get_if_local(item.def_id); + // Get an accurate span depending on the violation. + let span = match (&v, node) { + (MethodViolationCode::ReferencesSelfInput(Some(span)), _) => *span, + (MethodViolationCode::UndispatchableReceiver(Some(span)), _) => *span, + (MethodViolationCode::ReferencesImplTraitInTrait(span), _) => *span, + (MethodViolationCode::ReferencesSelfOutput, Some(node)) => { + node.fn_decl().map_or(item.ident(tcx).span, |decl| decl.output.span()) + } + _ => item.ident(tcx).span, + }; - DynCompatibilityViolation::Method(item.name, v, span) - }) - .collect(), + DynCompatibilityViolation::Method(name, v, span) + }) + .collect() + } // Associated types can only be dyn-compatible if they have `Self: Sized` bounds. - ty::AssocKind::Type => { + ty::AssocKind::Type { .. } => { if !tcx.generics_of(item.def_id).is_own_empty() && !item.is_impl_trait_in_trait() { - vec![DynCompatibilityViolation::GAT(item.name, item.ident(tcx).span)] + vec![DynCompatibilityViolation::GAT(item.name(), item.ident(tcx).span)] } else { // We will permit associated types if they are explicitly mentioned in the trait object. // We can't check this here, as here we only check if it is guaranteed to not be possible. @@ -344,7 +346,7 @@ fn virtual_call_violations_for_method<'tcx>( let sig = tcx.fn_sig(method.def_id).instantiate_identity(); // The method's first parameter must be named `self` - if !method.fn_has_self_parameter { + if !method.is_method() { let sugg = if let Some(hir::Node::TraitItem(hir::TraitItem { generics, kind: hir::TraitItemKind::Fn(sig, _), @@ -583,27 +585,36 @@ fn receiver_is_dispatchable<'tcx>( // create a modified param env, with `Self: Unsize` and `U: Trait` (and all of // its supertraits) added to caller bounds. `U: ?Sized` is already implied here. let param_env = { - let param_env = tcx.param_env(method.def_id); + // N.B. We generally want to emulate the construction of the `unnormalized_param_env` + // in the param-env query here. The fact that we don't just start with the clauses + // in the param-env of the method is because those are already normalized, and mixing + // normalized and unnormalized copies of predicates in `normalize_param_env_or_error` + // will cause ambiguity that the user can't really avoid. + // + // We leave out certain complexities of the param-env query here. Specifically, we: + // 1. Do not add `~const` bounds since there are no `dyn const Trait`s. + // 2. Do not add RPITIT self projection bounds for defaulted methods, since we + // are not constructing a param-env for "inside" of the body of the defaulted + // method, so we don't really care about projecting to a specific RPIT type, + // and because RPITITs are not dyn compatible (yet). + let mut predicates = tcx.predicates_of(method.def_id).instantiate_identity(tcx).predicates; // Self: Unsize let unsize_predicate = - ty::TraitRef::new(tcx, unsize_did, [tcx.types.self_param, unsized_self_ty]).upcast(tcx); + ty::TraitRef::new(tcx, unsize_did, [tcx.types.self_param, unsized_self_ty]); + predicates.push(unsize_predicate.upcast(tcx)); // U: Trait - let trait_predicate = { - let trait_def_id = method.trait_container(tcx).unwrap(); - let args = GenericArgs::for_item(tcx, trait_def_id, |param, _| { - if param.index == 0 { unsized_self_ty.into() } else { tcx.mk_param_from_def(param) } - }); - - ty::TraitRef::new_from_args(tcx, trait_def_id, args).upcast(tcx) - }; + let trait_def_id = method.trait_container(tcx).unwrap(); + let args = GenericArgs::for_item(tcx, trait_def_id, |param, _| { + if param.index == 0 { unsized_self_ty.into() } else { tcx.mk_param_from_def(param) } + }); + let trait_predicate = ty::TraitRef::new_from_args(tcx, trait_def_id, args); + predicates.push(trait_predicate.upcast(tcx)); normalize_param_env_or_error( tcx, - ty::ParamEnv::new(tcx.mk_clauses_from_iter( - param_env.caller_bounds().iter().chain([unsize_predicate, trait_predicate]), - )), + ty::ParamEnv::new(tcx.mk_clauses(&predicates)), ObligationCause::dummy_with_span(tcx.def_span(method.def_id)), ) }; @@ -788,7 +799,7 @@ impl<'tcx> TypeFolder> for EraseEscapingBoundRegions<'tcx> { } fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { - if let ty::ReBound(debruijn, _) = *r + if let ty::ReBound(debruijn, _) = r.kind() && debruijn < self.binder { r diff --git a/compiler/rustc_trait_selection/src/traits/effects.rs b/compiler/rustc_trait_selection/src/traits/effects.rs index 3c127416cbf7c..cc5861b5a1f59 100644 --- a/compiler/rustc_trait_selection/src/traits/effects.rs +++ b/compiler/rustc_trait_selection/src/traits/effects.rs @@ -4,10 +4,10 @@ use rustc_infer::traits::{ ImplDerivedHostCause, ImplSource, Obligation, ObligationCauseCode, PredicateObligation, }; use rustc_middle::span_bug; +use rustc_middle::traits::query::NoSolution; +use rustc_middle::ty::elaborate::elaborate; use rustc_middle::ty::fast_reject::DeepRejectCtxt; use rustc_middle::ty::{self, TypingMode}; -use rustc_type_ir::elaborate::elaborate; -use rustc_type_ir::solve::NoSolution; use thin_vec::{ThinVec, thin_vec}; use super::SelectionContext; @@ -106,10 +106,6 @@ fn match_candidate<'tcx>( more_nested(selcx, &mut nested); - for nested in &mut nested { - nested.set_depth_from_parent(obligation.recursion_depth); - } - Ok(nested) } @@ -256,6 +252,9 @@ fn evaluate_host_effect_for_destruct_goal<'tcx>( let self_ty = obligation.predicate.self_ty(); let const_conditions = match *self_ty.kind() { + // `ManuallyDrop` is trivially `~const Destruct` as we do not run any drop glue on it. + ty::Adt(adt_def, _) if adt_def.is_manually_drop() => thin_vec![], + // An ADT is `~const Destruct` only if all of the fields are, // *and* if there is a `Drop` impl, that `Drop` impl is also `~const`. ty::Adt(adt_def, args) => { @@ -263,7 +262,7 @@ fn evaluate_host_effect_for_destruct_goal<'tcx>( .all_fields() .map(|field| ty::TraitRef::new(tcx, destruct_def_id, [field.ty(tcx, args)])) .collect(); - match adt_def.destructor(tcx).map(|dtor| dtor.constness) { + match adt_def.destructor(tcx).map(|dtor| tcx.constness(dtor.did)) { // `Drop` impl exists, but it's not const. Type cannot be `~const Destruct`. Some(hir::Constness::NotConst) => return Err(EvaluationFailure::NoSolution), // `Drop` impl exists, and it's const. Require `Ty: ~const Drop` to hold. @@ -378,10 +377,6 @@ fn evaluate_host_effect_from_selection_candiate<'tcx>( }), ); - for nested in &mut nested { - nested.set_depth_from_parent(obligation.recursion_depth); - } - Ok(nested) } _ => Err(EvaluationFailure::NoSolution), diff --git a/compiler/rustc_trait_selection/src/traits/engine.rs b/compiler/rustc_trait_selection/src/traits/engine.rs index 9f3178f887927..8d6e6b4a65165 100644 --- a/compiler/rustc_trait_selection/src/traits/engine.rs +++ b/compiler/rustc_trait_selection/src/traits/engine.rs @@ -14,8 +14,8 @@ use rustc_macros::extension; use rustc_middle::arena::ArenaAllocatable; use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::error::TypeError; +use rustc_middle::ty::relate::Relate; use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, Upcast, Variance}; -use rustc_type_ir::relate::Relate; use super::{FromSolverError, FulfillmentContext, ScrubbedTraitError, TraitEngine}; use crate::error_reporting::InferCtxtErrorExt; diff --git a/compiler/rustc_trait_selection/src/traits/fulfill.rs b/compiler/rustc_trait_selection/src/traits/fulfill.rs index e39f8e673dbac..34c3c905bd977 100644 --- a/compiler/rustc_trait_selection/src/traits/fulfill.rs +++ b/compiler/rustc_trait_selection/src/traits/fulfill.rs @@ -24,10 +24,10 @@ use super::{ }; use crate::error_reporting::InferCtxtErrorExt; use crate::infer::{InferCtxt, TyOrConstInferVar}; -use crate::traits::EvaluateConstErr; use crate::traits::normalize::normalize_with_depth_to; use crate::traits::project::{PolyProjectionObligation, ProjectionCacheKeyExt as _}; use crate::traits::query::evaluate_obligation::InferCtxtExt; +use crate::traits::{EvaluateConstErr, sizedness_fast_path}; pub(crate) type PendingPredicateObligations<'tcx> = ThinVec>; @@ -162,7 +162,7 @@ where self.select(selcx) } - fn drain_unstalled_obligations( + fn drain_stalled_obligations_for_coroutines( &mut self, infcx: &InferCtxt<'tcx>, ) -> PredicateObligations<'tcx> { @@ -225,9 +225,15 @@ struct FulfillProcessor<'a, 'tcx> { selcx: SelectionContext<'a, 'tcx>, } -fn mk_pending<'tcx>(os: PredicateObligations<'tcx>) -> PendingPredicateObligations<'tcx> { +fn mk_pending<'tcx>( + parent: &PredicateObligation<'tcx>, + os: PredicateObligations<'tcx>, +) -> PendingPredicateObligations<'tcx> { os.into_iter() - .map(|o| PendingPredicateObligation { obligation: o, stalled_on: vec![] }) + .map(|mut o| { + o.set_depth_from_parent(parent.recursion_depth); + PendingPredicateObligation { obligation: o, stalled_on: vec![] } + }) .collect() } @@ -329,6 +335,10 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { let infcx = self.selcx.infcx; + if sizedness_fast_path(infcx.tcx, obligation.predicate) { + return ProcessResult::Changed(thin_vec::thin_vec![]); + } + if obligation.predicate.has_aliases() { let mut obligations = PredicateObligations::new(); let predicate = normalize_with_depth_to( @@ -341,7 +351,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { ); if predicate != obligation.predicate { obligations.push(obligation.with(infcx.tcx, predicate)); - return ProcessResult::Changed(mk_pending(obligations)); + return ProcessResult::Changed(mk_pending(obligation, obligations)); } } let binder = obligation.predicate.kind(); @@ -385,7 +395,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { let mut obligations = PredicateObligations::with_capacity(1); obligations.push(obligation.with(infcx.tcx, pred)); - ProcessResult::Changed(mk_pending(obligations)) + ProcessResult::Changed(mk_pending(obligation, obligations)) } ty::PredicateKind::Ambiguous => ProcessResult::Unchanged, ty::PredicateKind::NormalizesTo(..) => { @@ -410,6 +420,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { let host_obligation = obligation.with(infcx.tcx, data); self.process_host_obligation( + obligation, host_obligation, &mut pending_obligation.stalled_on, ) @@ -486,7 +497,10 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { // `>::Output` when this is an `Expr` representing // `lhs + rhs`. ty::ConstKind::Expr(_) => { - return ProcessResult::Changed(mk_pending(PredicateObligations::new())); + return ProcessResult::Changed(mk_pending( + obligation, + PredicateObligations::new(), + )); } ty::ConstKind::Placeholder(_) => { bug!("placeholder const {:?} in old solver", ct) @@ -503,7 +517,10 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { ct_ty, ty, ) { - Ok(inf_ok) => ProcessResult::Changed(mk_pending(inf_ok.into_obligations())), + Ok(inf_ok) => ProcessResult::Changed(mk_pending( + obligation, + inf_ok.into_obligations(), + )), Err(_) => ProcessResult::Error(FulfillmentErrorCode::Select( SelectionError::ConstArgHasWrongType { ct, ct_ty, expected_ty: ty }, )), @@ -523,21 +540,21 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { self.selcx.infcx.err_ctxt().report_overflow_obligation(&obligation, false); } - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(term)) => { match wf::obligations( self.selcx.infcx, obligation.param_env, obligation.cause.body_id, obligation.recursion_depth + 1, - arg, + term, obligation.cause.span, ) { None => { pending_obligation.stalled_on = - vec![TyOrConstInferVar::maybe_from_generic_arg(arg).unwrap()]; + vec![TyOrConstInferVar::maybe_from_term(term).unwrap()]; ProcessResult::Unchanged } - Some(os) => ProcessResult::Changed(mk_pending(os)), + Some(os) => ProcessResult::Changed(mk_pending(obligation, os)), } } @@ -553,11 +570,8 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { vec![TyOrConstInferVar::Ty(a), TyOrConstInferVar::Ty(b)]; ProcessResult::Unchanged } - Ok(Ok(mut ok)) => { - for subobligation in &mut ok.obligations { - subobligation.set_depth_from_parent(obligation.recursion_depth); - } - ProcessResult::Changed(mk_pending(ok.obligations)) + Ok(Ok(ok)) => { + ProcessResult::Changed(mk_pending(obligation, ok.obligations)) } Ok(Err(err)) => { let expected_found = if subtype.a_is_expected { @@ -582,7 +596,9 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { vec![TyOrConstInferVar::Ty(a), TyOrConstInferVar::Ty(b)]; ProcessResult::Unchanged } - Ok(Ok(ok)) => ProcessResult::Changed(mk_pending(ok.obligations)), + Ok(Ok(ok)) => { + ProcessResult::Changed(mk_pending(obligation, ok.obligations)) + } Ok(Err(err)) => { let expected_found = ExpectedFound::new(coerce.b, coerce.a); ProcessResult::Error(FulfillmentErrorCode::Subtype(expected_found, err)) @@ -645,6 +661,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { ) { return ProcessResult::Changed(mk_pending( + obligation, new_obligations.into_obligations(), )); } @@ -659,6 +676,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { .eq(DefineOpaqueTypes::Yes, c1, c2) { return ProcessResult::Changed(mk_pending( + obligation, new_obligations.into_obligations(), )); } @@ -704,9 +722,10 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { c1, c2, ) { - Ok(inf_ok) => { - ProcessResult::Changed(mk_pending(inf_ok.into_obligations())) - } + Ok(inf_ok) => ProcessResult::Changed(mk_pending( + obligation, + inf_ok.into_obligations(), + )), Err(err) => { ProcessResult::Error(FulfillmentErrorCode::ConstEquate( ExpectedFound::new(c1, c2), @@ -790,7 +809,7 @@ impl<'a, 'tcx> FulfillProcessor<'a, 'tcx> { match self.selcx.poly_select(&trait_obligation) { Ok(Some(impl_source)) => { debug!("selecting trait at depth {} yielded Ok(Some)", obligation.recursion_depth); - ProcessResult::Changed(mk_pending(impl_source.nested_obligations())) + ProcessResult::Changed(mk_pending(obligation, impl_source.nested_obligations())) } Ok(None) => { debug!("selecting trait at depth {} yielded Ok(None)", obligation.recursion_depth); @@ -854,7 +873,7 @@ impl<'a, 'tcx> FulfillProcessor<'a, 'tcx> { } match project::poly_project_and_unify_term(&mut self.selcx, &project_obligation) { - ProjectAndUnifyResult::Holds(os) => ProcessResult::Changed(mk_pending(os)), + ProjectAndUnifyResult::Holds(os) => ProcessResult::Changed(mk_pending(obligation, os)), ProjectAndUnifyResult::FailedNormalization => { stalled_on.clear(); stalled_on.extend(args_infer_vars( @@ -868,7 +887,7 @@ impl<'a, 'tcx> FulfillProcessor<'a, 'tcx> { let mut obligations = PredicateObligations::with_capacity(1); obligations.push(project_obligation.with(tcx, project_obligation.predicate)); - ProcessResult::Changed(mk_pending(obligations)) + ProcessResult::Changed(mk_pending(obligation, obligations)) } ProjectAndUnifyResult::MismatchedProjectionTypes(e) => { ProcessResult::Error(FulfillmentErrorCode::Project(e)) @@ -878,11 +897,12 @@ impl<'a, 'tcx> FulfillProcessor<'a, 'tcx> { fn process_host_obligation( &mut self, + obligation: &PredicateObligation<'tcx>, host_obligation: HostEffectObligation<'tcx>, stalled_on: &mut Vec, ) -> ProcessResult, FulfillmentErrorCode<'tcx>> { match effects::evaluate_host_effect_obligation(&mut self.selcx, &host_obligation) { - Ok(nested) => ProcessResult::Changed(mk_pending(nested)), + Ok(nested) => ProcessResult::Changed(mk_pending(obligation, nested)), Err(effects::EvaluationFailure::Ambiguous) => { stalled_on.clear(); stalled_on.extend(args_infer_vars( diff --git a/compiler/rustc_trait_selection/src/traits/mod.rs b/compiler/rustc_trait_selection/src/traits/mod.rs index de337710b5ef7..5b938456e03b0 100644 --- a/compiler/rustc_trait_selection/src/traits/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/mod.rs @@ -51,7 +51,7 @@ pub use self::dyn_compatibility::{ pub use self::engine::{ObligationCtxt, TraitEngineExt}; pub use self::fulfill::{FulfillmentContext, OldSolverError, PendingPredicateObligation}; pub use self::normalize::NormalizeExt; -pub use self::project::{normalize_inherent_projection, normalize_projection_ty}; +pub use self::project::{normalize_inherent_projection, normalize_projection_term}; pub use self::select::{ EvaluationCache, EvaluationResult, IntercrateAmbiguityCause, OverflowError, SelectionCache, SelectionContext, @@ -65,8 +65,8 @@ pub use self::specialize::{ pub use self::structural_normalize::StructurallyNormalizeExt; pub use self::util::{ BoundVarReplacer, PlaceholderReplacer, elaborate, expand_trait_aliases, impl_item_is_final, - supertrait_def_ids, supertraits, transitive_bounds_that_define_assoc_item, upcast_choices, - with_replaced_escaping_bound_vars, + sizedness_fast_path, supertrait_def_ids, supertraits, transitive_bounds_that_define_assoc_item, + upcast_choices, with_replaced_escaping_bound_vars, }; use crate::error_reporting::InferCtxtErrorExt; use crate::infer::outlives::env::OutlivesEnvironment; @@ -643,7 +643,7 @@ fn replace_param_and_infer_args_with_placeholder<'tcx>( ) -> GenericArgsRef<'tcx> { struct ReplaceParamAndInferWithPlaceholder<'tcx> { tcx: TyCtxt<'tcx>, - idx: u32, + idx: ty::BoundVar, } impl<'tcx> TypeFolder> for ReplaceParamAndInferWithPlaceholder<'tcx> { @@ -653,19 +653,13 @@ fn replace_param_and_infer_args_with_placeholder<'tcx>( fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { if let ty::Infer(_) = t.kind() { - let idx = { - let idx = self.idx; - self.idx += 1; - idx - }; + let idx = self.idx; + self.idx += 1; Ty::new_placeholder( self.tcx, ty::PlaceholderType { universe: ty::UniverseIndex::ROOT, - bound: ty::BoundTy { - var: ty::BoundVar::from_u32(idx), - kind: ty::BoundTyKind::Anon, - }, + bound: ty::BoundTy { var: idx, kind: ty::BoundTyKind::Anon }, }, ) } else { @@ -675,16 +669,11 @@ fn replace_param_and_infer_args_with_placeholder<'tcx>( fn fold_const(&mut self, c: ty::Const<'tcx>) -> ty::Const<'tcx> { if let ty::ConstKind::Infer(_) = c.kind() { + let idx = self.idx; + self.idx += 1; ty::Const::new_placeholder( self.tcx, - ty::PlaceholderConst { - universe: ty::UniverseIndex::ROOT, - bound: ty::BoundVar::from_u32({ - let idx = self.idx; - self.idx += 1; - idx - }), - }, + ty::PlaceholderConst { universe: ty::UniverseIndex::ROOT, bound: idx }, ) } else { c.super_fold_with(self) @@ -692,7 +681,7 @@ fn replace_param_and_infer_args_with_placeholder<'tcx>( } } - args.fold_with(&mut ReplaceParamAndInferWithPlaceholder { tcx, idx: 0 }) + args.fold_with(&mut ReplaceParamAndInferWithPlaceholder { tcx, idx: ty::BoundVar::ZERO }) } /// Normalizes the predicates and checks whether they hold in an empty environment. If this diff --git a/compiler/rustc_trait_selection/src/traits/normalize.rs b/compiler/rustc_trait_selection/src/traits/normalize.rs index ad62b456ad461..88a0c402702e1 100644 --- a/compiler/rustc_trait_selection/src/traits/normalize.rs +++ b/compiler/rustc_trait_selection/src/traits/normalize.rs @@ -1,6 +1,7 @@ //! Deeply normalize types using the old trait solver. use rustc_data_structures::stack::ensure_sufficient_stack; +use rustc_hir::def::DefKind; use rustc_infer::infer::at::At; use rustc_infer::infer::{InferCtxt, InferOk}; use rustc_infer::traits::{ @@ -10,15 +11,12 @@ use rustc_macros::extension; use rustc_middle::span_bug; use rustc_middle::traits::{ObligationCause, ObligationCauseCode}; use rustc_middle::ty::{ - self, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitable, TypeVisitableExt, - TypingMode, + self, AliasTerm, Term, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitable, + TypeVisitableExt, TypingMode, }; use tracing::{debug, instrument}; -use super::{ - BoundVarReplacer, PlaceholderReplacer, SelectionContext, project, - with_replaced_escaping_bound_vars, -}; +use super::{BoundVarReplacer, PlaceholderReplacer, SelectionContext, project}; use crate::error_reporting::InferCtxtErrorExt; use crate::error_reporting::traits::OverflowCause; use crate::solve::NextSolverError; @@ -77,7 +75,15 @@ impl<'tcx> At<'_, 'tcx> { .into_value_registering_obligations(self.infcx, &mut *fulfill_cx); let errors = fulfill_cx.select_all_or_error(self.infcx); let value = self.infcx.resolve_vars_if_possible(value); - if errors.is_empty() { Ok(value) } else { Err(errors) } + if errors.is_empty() { + Ok(value) + } else { + // Drop pending obligations, since deep normalization may happen + // in a loop and we don't want to trigger the assertion on the next + // iteration due to pending ambiguous obligations we've left over. + let _ = fulfill_cx.collect_remaining_errors(self.infcx); + Err(errors) + } } } } @@ -130,6 +136,7 @@ pub(super) fn needs_normalization<'tcx, T: TypeVisitable>>( // FIXME(#132279): We likely want to reveal opaques during post borrowck analysis TypingMode::Coherence | TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } | TypingMode::PostBorrowckAnalysis { .. } => flags.remove(ty::TypeFlags::HAS_TY_OPAQUE), TypingMode::PostAnalysis => {} } @@ -169,6 +176,163 @@ impl<'a, 'b, 'tcx> AssocTypeNormalizer<'a, 'b, 'tcx> { if !needs_normalization(self.selcx.infcx, &value) { value } else { value.fold_with(self) } } + + // FIXME(mgca): While this supports constants, it is only used for types by default right now + #[instrument(level = "debug", skip(self), ret)] + fn normalize_trait_projection(&mut self, proj: AliasTerm<'tcx>) -> Term<'tcx> { + if !proj.has_escaping_bound_vars() { + // When we don't have escaping bound vars we can normalize ambig aliases + // to inference variables (done in `normalize_projection_ty`). This would + // be wrong if there were escaping bound vars as even if we instantiated + // the bound vars with placeholders, we wouldn't be able to map them back + // after normalization succeeded. + // + // Also, as an optimization: when we don't have escaping bound vars, we don't + // need to replace them with placeholders (see branch below). + let proj = proj.fold_with(self); + project::normalize_projection_term( + self.selcx, + self.param_env, + proj, + self.cause.clone(), + self.depth, + self.obligations, + ) + } else { + // If there are escaping bound vars, we temporarily replace the + // bound vars with placeholders. Note though, that in the case + // that we still can't project for whatever reason (e.g. self + // type isn't known enough), we *can't* register an obligation + // and return an inference variable (since then that obligation + // would have bound vars and that's a can of worms). Instead, + // we just give up and fall back to pretending like we never tried! + // + // Note: this isn't necessarily the final approach here; we may + // want to figure out how to register obligations with escaping vars + // or handle this some other way. + let infcx = self.selcx.infcx; + let (proj, mapped_regions, mapped_types, mapped_consts) = + BoundVarReplacer::replace_bound_vars(infcx, &mut self.universes, proj); + let proj = proj.fold_with(self); + let normalized_term = project::opt_normalize_projection_term( + self.selcx, + self.param_env, + proj, + self.cause.clone(), + self.depth, + self.obligations, + ) + .ok() + .flatten() + .unwrap_or(proj.to_term(infcx.tcx)); + + PlaceholderReplacer::replace_placeholders( + infcx, + mapped_regions, + mapped_types, + mapped_consts, + &self.universes, + normalized_term, + ) + } + } + + // FIXME(mgca): While this supports constants, it is only used for types by default right now + #[instrument(level = "debug", skip(self), ret)] + fn normalize_inherent_projection(&mut self, inherent: AliasTerm<'tcx>) -> Term<'tcx> { + if !inherent.has_escaping_bound_vars() { + // When we don't have escaping bound vars we can normalize ambig aliases + // to inference variables (done in `normalize_projection_ty`). This would + // be wrong if there were escaping bound vars as even if we instantiated + // the bound vars with placeholders, we wouldn't be able to map them back + // after normalization succeeded. + // + // Also, as an optimization: when we don't have escaping bound vars, we don't + // need to replace them with placeholders (see branch below). + + let inherent = inherent.fold_with(self); + project::normalize_inherent_projection( + self.selcx, + self.param_env, + inherent, + self.cause.clone(), + self.depth, + self.obligations, + ) + } else { + let infcx = self.selcx.infcx; + let (inherent, mapped_regions, mapped_types, mapped_consts) = + BoundVarReplacer::replace_bound_vars(infcx, &mut self.universes, inherent); + let inherent = inherent.fold_with(self); + let inherent = project::normalize_inherent_projection( + self.selcx, + self.param_env, + inherent, + self.cause.clone(), + self.depth, + self.obligations, + ); + + PlaceholderReplacer::replace_placeholders( + infcx, + mapped_regions, + mapped_types, + mapped_consts, + &self.universes, + inherent, + ) + } + } + + // FIXME(mgca): While this supports constants, it is only used for types by default right now + #[instrument(level = "debug", skip(self), ret)] + fn normalize_free_alias(&mut self, free: AliasTerm<'tcx>) -> Term<'tcx> { + let recursion_limit = self.cx().recursion_limit(); + if !recursion_limit.value_within_limit(self.depth) { + self.selcx.infcx.err_ctxt().report_overflow_error( + OverflowCause::DeeplyNormalize(free.into()), + self.cause.span, + false, + |diag| { + diag.note(crate::fluent_generated::trait_selection_ty_alias_overflow); + }, + ); + } + + let infcx = self.selcx.infcx; + self.obligations.extend( + // FIXME(BoxyUwU): + // FIXME(lazy_type_alias): + // It seems suspicious to instantiate the predicates with arguments that might be bound vars, + // we might wind up instantiating one of these bound vars underneath a hrtb. + infcx.tcx.predicates_of(free.def_id).instantiate_own(infcx.tcx, free.args).map( + |(mut predicate, span)| { + if free.has_escaping_bound_vars() { + (predicate, ..) = BoundVarReplacer::replace_bound_vars( + infcx, + &mut self.universes, + predicate, + ); + } + let mut cause = self.cause.clone(); + cause.map_code(|code| ObligationCauseCode::TypeAlias(code, span, free.def_id)); + Obligation::new(infcx.tcx, cause, self.param_env, predicate) + }, + ), + ); + self.depth += 1; + let res = if free.kind(infcx.tcx).is_type() { + infcx.tcx.type_of(free.def_id).instantiate(infcx.tcx, free.args).fold_with(self).into() + } else { + // FIXME(mgca): once const items are actual aliases defined as equal to type system consts + // this should instead use that rather than evaluating. + super::evaluate_const(infcx, free.to_term(infcx.tcx).expect_const(), self.param_env) + .super_fold_with(self) + .into() + }; + self.depth -= 1; + res + } } impl<'a, 'b, 'tcx> TypeFolder> for AssocTypeNormalizer<'a, 'b, 'tcx> { @@ -226,6 +390,7 @@ impl<'a, 'b, 'tcx> TypeFolder> for AssocTypeNormalizer<'a, 'b, 'tcx // FIXME(#132279): We likely want to reveal opaques during post borrowck analysis TypingMode::Coherence | TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } | TypingMode::PostBorrowckAnalysis { .. } => ty.super_fold_with(self), TypingMode::PostAnalysis => { let recursion_limit = self.cx().recursion_limit(); @@ -249,183 +414,63 @@ impl<'a, 'b, 'tcx> TypeFolder> for AssocTypeNormalizer<'a, 'b, 'tcx } } - ty::Projection if !data.has_escaping_bound_vars() => { - // This branch is *mostly* just an optimization: when we don't - // have escaping bound vars, we don't need to replace them with - // placeholders (see branch below). *Also*, we know that we can - // register an obligation to *later* project, since we know - // there won't be bound vars there. - let data = data.fold_with(self); - let normalized_ty = project::normalize_projection_ty( - self.selcx, - self.param_env, - data, - self.cause.clone(), - self.depth, - self.obligations, - ); - debug!( - ?self.depth, - ?ty, - ?normalized_ty, - obligations.len = ?self.obligations.len(), - "AssocTypeNormalizer: normalized type" - ); - normalized_ty.expect_type() - } - - ty::Projection => { - // If there are escaping bound vars, we temporarily replace the - // bound vars with placeholders. Note though, that in the case - // that we still can't project for whatever reason (e.g. self - // type isn't known enough), we *can't* register an obligation - // and return an inference variable (since then that obligation - // would have bound vars and that's a can of worms). Instead, - // we just give up and fall back to pretending like we never tried! - // - // Note: this isn't necessarily the final approach here; we may - // want to figure out how to register obligations with escaping vars - // or handle this some other way. - - let infcx = self.selcx.infcx; - let (data, mapped_regions, mapped_types, mapped_consts) = - BoundVarReplacer::replace_bound_vars(infcx, &mut self.universes, data); - let data = data.fold_with(self); - let normalized_ty = project::opt_normalize_projection_term( - self.selcx, - self.param_env, - data.into(), - self.cause.clone(), - self.depth, - self.obligations, - ) - .ok() - .flatten() - .map(|term| term.expect_type()) - .map(|normalized_ty| { - PlaceholderReplacer::replace_placeholders( - infcx, - mapped_regions, - mapped_types, - mapped_consts, - &self.universes, - normalized_ty, - ) - }) - .unwrap_or_else(|| ty.super_fold_with(self)); - - debug!( - ?self.depth, - ?ty, - ?normalized_ty, - obligations.len = ?self.obligations.len(), - "AssocTypeNormalizer: normalized type" - ); - normalized_ty - } - ty::Weak => { - let recursion_limit = self.cx().recursion_limit(); - if !recursion_limit.value_within_limit(self.depth) { - self.selcx.infcx.err_ctxt().report_overflow_error( - OverflowCause::DeeplyNormalize(data.into()), - self.cause.span, - false, - |diag| { - diag.note(crate::fluent_generated::trait_selection_ty_alias_overflow); - }, - ); - } - - let infcx = self.selcx.infcx; - self.obligations.extend( - infcx.tcx.predicates_of(data.def_id).instantiate_own(infcx.tcx, data.args).map( - |(mut predicate, span)| { - if data.has_escaping_bound_vars() { - (predicate, ..) = BoundVarReplacer::replace_bound_vars( - infcx, - &mut self.universes, - predicate, - ); - } - let mut cause = self.cause.clone(); - cause.map_code(|code| { - ObligationCauseCode::TypeAlias(code, span, data.def_id) - }); - Obligation::new(infcx.tcx, cause, self.param_env, predicate) - }, - ), - ); - self.depth += 1; - let res = infcx - .tcx - .type_of(data.def_id) - .instantiate(infcx.tcx, data.args) - .fold_with(self); - self.depth -= 1; - res - } - - ty::Inherent if !data.has_escaping_bound_vars() => { - // This branch is *mostly* just an optimization: when we don't - // have escaping bound vars, we don't need to replace them with - // placeholders (see branch below). *Also*, we know that we can - // register an obligation to *later* project, since we know - // there won't be bound vars there. - - let data = data.fold_with(self); - - project::normalize_inherent_projection( - self.selcx, - self.param_env, - data, - self.cause.clone(), - self.depth, - self.obligations, - ) - } - - ty::Inherent => { - let infcx = self.selcx.infcx; - let (data, mapped_regions, mapped_types, mapped_consts) = - BoundVarReplacer::replace_bound_vars(infcx, &mut self.universes, data); - let data = data.fold_with(self); - let ty = project::normalize_inherent_projection( - self.selcx, - self.param_env, - data, - self.cause.clone(), - self.depth, - self.obligations, - ); - - PlaceholderReplacer::replace_placeholders( - infcx, - mapped_regions, - mapped_types, - mapped_consts, - &self.universes, - ty, - ) - } + ty::Projection => self.normalize_trait_projection(data.into()).expect_type(), + ty::Inherent => self.normalize_inherent_projection(data.into()).expect_type(), + ty::Free => self.normalize_free_alias(data.into()).expect_type(), } } #[instrument(skip(self), level = "debug")] - fn fold_const(&mut self, constant: ty::Const<'tcx>) -> ty::Const<'tcx> { + fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> { let tcx = self.selcx.tcx(); - if tcx.features().generic_const_exprs() || !needs_normalization(self.selcx.infcx, &constant) - { - constant + if tcx.features().generic_const_exprs() || !needs_normalization(self.selcx.infcx, &ct) { + return ct; + } + + // Doing "proper" normalization of const aliases is inherently cyclic until const items + // are real aliases instead of having bodies. We gate proper const alias handling behind + // mgca to avoid breaking stable code, though this should become the "main" codepath long + // before mgca is stabilized. + // + // FIXME(BoxyUwU): Enabling this by default is blocked on a refactoring to how const items + // are represented. + if tcx.features().min_generic_const_args() { + let uv = match ct.kind() { + ty::ConstKind::Unevaluated(uv) => uv, + _ => return ct.super_fold_with(self), + }; + + let ct = match tcx.def_kind(uv.def) { + DefKind::AssocConst => match tcx.def_kind(tcx.parent(uv.def)) { + DefKind::Trait => self.normalize_trait_projection(uv.into()), + DefKind::Impl { of_trait: false } => { + self.normalize_inherent_projection(uv.into()) + } + kind => unreachable!( + "unexpected `DefKind` for const alias' resolution's parent def: {:?}", + kind + ), + }, + DefKind::Const | DefKind::AnonConst => self.normalize_free_alias(uv.into()), + kind => { + unreachable!("unexpected `DefKind` for const alias to resolve to: {:?}", kind) + } + }; + + // We re-fold the normalized const as the `ty` field on `ConstKind::Value` may be + // unnormalized after const evaluation returns. + ct.expect_const().super_fold_with(self) } else { - let constant = constant.super_fold_with(self); - debug!(?constant, ?self.param_env); - with_replaced_escaping_bound_vars( + let ct = ct.super_fold_with(self); + return super::with_replaced_escaping_bound_vars( self.selcx.infcx, &mut self.universes, - constant, - |constant| super::evaluate_const(self.selcx.infcx, constant, self.param_env), + ct, + |ct| super::evaluate_const(self.selcx.infcx, ct, self.param_env), ) - .super_fold_with(self) + .super_fold_with(self); + // We re-fold the normalized const as the `ty` field on `ConstKind::Value` may be + // unnormalized after const evaluation returns. } } diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index 6057b66c483f5..ca58da5ca6d55 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -5,7 +5,6 @@ use std::ops::ControlFlow; use rustc_data_structures::sso::SsoHashSet; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::ErrorGuaranteed; -use rustc_hir::def::DefKind; use rustc_hir::lang_items::LangItem; use rustc_infer::infer::DefineOpaqueTypes; use rustc_infer::infer::resolve::OpportunisticRegionResolver; @@ -172,6 +171,7 @@ pub(super) enum ProjectAndUnifyResult<'tcx> { /// ``` /// If successful, this may result in additional obligations. Also returns /// the projection cache key used to track these additional obligations. +// FIXME(mgca): While this supports constants, it is only used for types by default right now #[instrument(level = "debug", skip(selcx))] pub(super) fn poly_project_and_unify_term<'cx, 'tcx>( selcx: &mut SelectionContext<'cx, 'tcx>, @@ -201,6 +201,7 @@ pub(super) fn poly_project_and_unify_term<'cx, 'tcx>( /// If successful, this may result in additional obligations. /// /// See [poly_project_and_unify_term] for an explanation of the return value. +// FIXME(mgca): While this supports constants, it is only used for types by default right now #[instrument(level = "debug", skip(selcx))] fn project_and_unify_term<'cx, 'tcx>( selcx: &mut SelectionContext<'cx, 'tcx>, @@ -258,34 +259,28 @@ fn project_and_unify_term<'cx, 'tcx>( /// there are unresolved type variables in the projection, we will /// instantiate it with a fresh type variable `$X` and generate a new /// obligation `::Item == $X` for later. -pub fn normalize_projection_ty<'a, 'b, 'tcx>( +// FIXME(mgca): While this supports constants, it is only used for types by default right now +pub fn normalize_projection_term<'a, 'b, 'tcx>( selcx: &'a mut SelectionContext<'b, 'tcx>, param_env: ty::ParamEnv<'tcx>, - projection_ty: ty::AliasTy<'tcx>, + alias_term: ty::AliasTerm<'tcx>, cause: ObligationCause<'tcx>, depth: usize, obligations: &mut PredicateObligations<'tcx>, ) -> Term<'tcx> { - opt_normalize_projection_term( - selcx, - param_env, - projection_ty.into(), - cause.clone(), - depth, - obligations, - ) - .ok() - .flatten() - .unwrap_or_else(move || { - // if we bottom out in ambiguity, create a type variable - // and a deferred predicate to resolve this when more type - // information is available. - - selcx - .infcx - .projection_ty_to_infer(param_env, projection_ty, cause, depth + 1, obligations) - .into() - }) + opt_normalize_projection_term(selcx, param_env, alias_term, cause.clone(), depth, obligations) + .ok() + .flatten() + .unwrap_or_else(move || { + // if we bottom out in ambiguity, create a type variable + // and a deferred predicate to resolve this when more type + // information is available. + + selcx + .infcx + .projection_term_to_infer(param_env, alias_term, cause, depth + 1, obligations) + .into() + }) } /// The guts of `normalize`: normalize a specific projection like `( /// often immediately appended to another obligations vector. So now this /// function takes an obligations vector and appends to it directly, which is /// slightly uglier but avoids the need for an extra short-lived allocation. +// FIXME(mgca): While this supports constants, it is only used for types by default right now #[instrument(level = "debug", skip(selcx, param_env, cause, obligations))] pub(super) fn opt_normalize_projection_term<'a, 'b, 'tcx>( selcx: &'a mut SelectionContext<'b, 'tcx>, @@ -456,6 +452,7 @@ pub(super) fn opt_normalize_projection_term<'a, 'b, 'tcx>( /// an error for this obligation, but we legitimately should not, /// because it contains `[type error]`. Yuck! (See issue #29857 for /// one case where this arose.) +// FIXME(mgca): While this supports constants, it is only used for types by default right now fn normalize_to_error<'a, 'tcx>( selcx: &SelectionContext<'a, 'tcx>, param_env: ty::ParamEnv<'tcx>, @@ -468,10 +465,11 @@ fn normalize_to_error<'a, 'tcx>( ty::AliasTermKind::ProjectionTy | ty::AliasTermKind::InherentTy | ty::AliasTermKind::OpaqueTy - | ty::AliasTermKind::WeakTy => selcx.infcx.next_ty_var(cause.span).into(), - ty::AliasTermKind::UnevaluatedConst | ty::AliasTermKind::ProjectionConst => { - selcx.infcx.next_const_var(cause.span).into() - } + | ty::AliasTermKind::FreeTy => selcx.infcx.next_ty_var(cause.span).into(), + ty::AliasTermKind::FreeConst + | ty::AliasTermKind::InherentConst + | ty::AliasTermKind::UnevaluatedConst + | ty::AliasTermKind::ProjectionConst => selcx.infcx.next_const_var(cause.span).into(), }; let mut obligations = PredicateObligations::new(); obligations.push(Obligation { @@ -484,36 +482,37 @@ fn normalize_to_error<'a, 'tcx>( } /// Confirm and normalize the given inherent projection. +// FIXME(mgca): While this supports constants, it is only used for types by default right now #[instrument(level = "debug", skip(selcx, param_env, cause, obligations))] pub fn normalize_inherent_projection<'a, 'b, 'tcx>( selcx: &'a mut SelectionContext<'b, 'tcx>, param_env: ty::ParamEnv<'tcx>, - alias_ty: ty::AliasTy<'tcx>, + alias_term: ty::AliasTerm<'tcx>, cause: ObligationCause<'tcx>, depth: usize, obligations: &mut PredicateObligations<'tcx>, -) -> Ty<'tcx> { +) -> ty::Term<'tcx> { let tcx = selcx.tcx(); if !tcx.recursion_limit().value_within_limit(depth) { // Halt compilation because it is important that overflows never be masked. tcx.dcx().emit_fatal(InherentProjectionNormalizationOverflow { span: cause.span, - ty: alias_ty.to_string(), + ty: alias_term.to_string(), }); } - let args = compute_inherent_assoc_ty_args( + let args = compute_inherent_assoc_term_args( selcx, param_env, - alias_ty, + alias_term, cause.clone(), depth, obligations, ); // Register the obligations arising from the impl and from the associated type itself. - let predicates = tcx.predicates_of(alias_ty.def_id).instantiate(tcx, args); + let predicates = tcx.predicates_of(alias_term.def_id).instantiate(tcx, args); for (predicate, span) in predicates { let predicate = normalize_with_depth_to( selcx, @@ -531,7 +530,7 @@ pub fn normalize_inherent_projection<'a, 'b, 'tcx>( // cause code, inherent projections will be printed with identity instantiation in // diagnostics which is not ideal. // Consider creating separate cause codes for this specific situation. - ObligationCauseCode::WhereClause(alias_ty.def_id, span), + ObligationCauseCode::WhereClause(alias_term.def_id, span), ); obligations.push(Obligation::with_depth( @@ -543,27 +542,33 @@ pub fn normalize_inherent_projection<'a, 'b, 'tcx>( )); } - let ty = tcx.type_of(alias_ty.def_id).instantiate(tcx, args); + let term: Term<'tcx> = if alias_term.kind(tcx).is_type() { + tcx.type_of(alias_term.def_id).instantiate(tcx, args).into() + } else { + get_associated_const_value(selcx, alias_term.to_term(tcx).expect_const(), param_env).into() + }; - let mut ty = selcx.infcx.resolve_vars_if_possible(ty); - if ty.has_aliases() { - ty = normalize_with_depth_to(selcx, param_env, cause.clone(), depth + 1, ty, obligations); + let mut term = selcx.infcx.resolve_vars_if_possible(term); + if term.has_aliases() { + term = + normalize_with_depth_to(selcx, param_env, cause.clone(), depth + 1, term, obligations); } - ty + term } -pub fn compute_inherent_assoc_ty_args<'a, 'b, 'tcx>( +// FIXME(mgca): While this supports constants, it is only used for types by default right now +pub fn compute_inherent_assoc_term_args<'a, 'b, 'tcx>( selcx: &'a mut SelectionContext<'b, 'tcx>, param_env: ty::ParamEnv<'tcx>, - alias_ty: ty::AliasTy<'tcx>, + alias_term: ty::AliasTerm<'tcx>, cause: ObligationCause<'tcx>, depth: usize, obligations: &mut PredicateObligations<'tcx>, ) -> ty::GenericArgsRef<'tcx> { let tcx = selcx.tcx(); - let impl_def_id = tcx.parent(alias_ty.def_id); + let impl_def_id = tcx.parent(alias_term.def_id); let impl_args = selcx.infcx.fresh_args_for_item(cause.span, impl_def_id); let mut impl_ty = tcx.type_of(impl_def_id).instantiate(tcx, impl_args); @@ -580,7 +585,7 @@ pub fn compute_inherent_assoc_ty_args<'a, 'b, 'tcx>( // Infer the generic parameters of the impl by unifying the // impl type with the self type of the projection. - let mut self_ty = alias_ty.self_ty(); + let mut self_ty = alias_term.self_ty(); if !selcx.infcx.next_trait_solver() { self_ty = normalize_with_depth_to( selcx, @@ -602,7 +607,7 @@ pub fn compute_inherent_assoc_ty_args<'a, 'b, 'tcx>( } } - alias_ty.rebase_inherent_args_onto_impl(impl_args, tcx) + alias_term.rebase_inherent_args_onto_impl(impl_args, tcx) } enum Projected<'tcx> { @@ -630,6 +635,7 @@ impl<'tcx> Progress<'tcx> { /// /// IMPORTANT: /// - `obligation` must be fully normalized +// FIXME(mgca): While this supports constants, it is only used for types by default right now #[instrument(level = "info", skip(selcx))] fn project<'cx, 'tcx>( selcx: &mut SelectionContext<'cx, 'tcx>, @@ -669,30 +675,11 @@ fn project<'cx, 'tcx>( match candidates { ProjectionCandidateSet::Single(candidate) => { - Ok(Projected::Progress(confirm_candidate(selcx, obligation, candidate))) + confirm_candidate(selcx, obligation, candidate) } ProjectionCandidateSet::None => { let tcx = selcx.tcx(); - let term = match tcx.def_kind(obligation.predicate.def_id) { - DefKind::AssocTy => Ty::new_projection_from_args( - tcx, - obligation.predicate.def_id, - obligation.predicate.args, - ) - .into(), - DefKind::AssocConst => ty::Const::new_unevaluated( - tcx, - ty::UnevaluatedConst::new( - obligation.predicate.def_id, - obligation.predicate.args, - ), - ) - .into(), - kind => { - bug!("unknown projection def-id: {}", kind.descr(obligation.predicate.def_id)) - } - }; - + let term = obligation.predicate.to_term(tcx); Ok(Projected::NoProgress(term)) } // Error occurred while trying to processing impls. @@ -915,7 +902,7 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( ImplSource::UserDefined(impl_data) => { // We have to be careful when projecting out of an // impl because of specialization. If we are not in - // codegen (i.e., projection mode is not "any"), and the + // codegen (i.e., `TypingMode` is not `PostAnalysis`), and the // impl's type is declared as default, then we disable // projection (even if the trait ref is fully // monomorphic). In the case where trait ref is not @@ -952,6 +939,7 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( match selcx.infcx.typing_mode() { TypingMode::Coherence | TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } | TypingMode::PostBorrowckAnalysis { .. } => { debug!( assoc_ty = ?selcx.tcx().def_path_str(node_item.item.def_id), @@ -983,36 +971,38 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( let self_ty = selcx.infcx.shallow_resolve(obligation.predicate.self_ty()); let tcx = selcx.tcx(); - let lang_items = selcx.tcx().lang_items(); - if [ - lang_items.coroutine_trait(), - lang_items.future_trait(), - lang_items.iterator_trait(), - lang_items.async_iterator_trait(), - lang_items.fn_trait(), - lang_items.fn_mut_trait(), - lang_items.fn_once_trait(), - lang_items.async_fn_trait(), - lang_items.async_fn_mut_trait(), - lang_items.async_fn_once_trait(), - ] - .contains(&Some(trait_ref.def_id)) - { - true - } else if tcx.is_lang_item(trait_ref.def_id, LangItem::AsyncFnKindHelper) { - // FIXME(async_closures): Validity constraints here could be cleaned up. - if obligation.predicate.args.type_at(0).is_ty_var() - || obligation.predicate.args.type_at(4).is_ty_var() - || obligation.predicate.args.type_at(5).is_ty_var() - { - candidate_set.mark_ambiguous(); - true - } else { - obligation.predicate.args.type_at(0).to_opt_closure_kind().is_some() - && obligation.predicate.args.type_at(1).to_opt_closure_kind().is_some() + match selcx.tcx().as_lang_item(trait_ref.def_id) { + Some( + LangItem::Coroutine + | LangItem::Future + | LangItem::Iterator + | LangItem::AsyncIterator + | LangItem::Fn + | LangItem::FnMut + | LangItem::FnOnce + | LangItem::AsyncFn + | LangItem::AsyncFnMut + | LangItem::AsyncFnOnce, + ) => true, + Some(LangItem::AsyncFnKindHelper) => { + // FIXME(async_closures): Validity constraints here could be cleaned up. + if obligation.predicate.args.type_at(0).is_ty_var() + || obligation.predicate.args.type_at(4).is_ty_var() + || obligation.predicate.args.type_at(5).is_ty_var() + { + candidate_set.mark_ambiguous(); + true + } else { + obligation.predicate.args.type_at(0).to_opt_closure_kind().is_some() + && obligation + .predicate + .args + .type_at(1) + .to_opt_closure_kind() + .is_some() + } } - } else if tcx.is_lang_item(trait_ref.def_id, LangItem::DiscriminantKind) { - match self_ty.kind() { + Some(LangItem::DiscriminantKind) => match self_ty.kind() { ty::Bool | ty::Char | ty::Int(_) @@ -1049,138 +1039,104 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( | ty::Placeholder(..) | ty::Infer(..) | ty::Error(_) => false, - } - } else if tcx.is_lang_item(trait_ref.def_id, LangItem::AsyncDestruct) { - match self_ty.kind() { - ty::Bool - | ty::Char - | ty::Int(_) - | ty::Uint(_) - | ty::Float(_) - | ty::Adt(..) - | ty::Str - | ty::Array(..) - | ty::Slice(_) - | ty::RawPtr(..) - | ty::Ref(..) - | ty::FnDef(..) - | ty::FnPtr(..) - | ty::UnsafeBinder(_) - | ty::Dynamic(..) - | ty::Closure(..) - | ty::CoroutineClosure(..) - | ty::Coroutine(..) - | ty::CoroutineWitness(..) - | ty::Pat(..) - | ty::Never - | ty::Tuple(..) - | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) => true, - - // type parameters, opaques, and unnormalized projections don't have - // a known async destructor and may need to be normalized further or rely - // on param env for async destructor projections - ty::Param(_) - | ty::Foreign(_) - | ty::Alias(..) - | ty::Bound(..) - | ty::Placeholder(..) - | ty::Infer(_) - | ty::Error(_) => false, - } - } else if tcx.is_lang_item(trait_ref.def_id, LangItem::PointeeTrait) { - let tail = selcx.tcx().struct_tail_raw( - self_ty, - |ty| { - // We throw away any obligations we get from this, since we normalize - // and confirm these obligations once again during confirmation - normalize_with_depth( - selcx, - obligation.param_env, - obligation.cause.clone(), - obligation.recursion_depth + 1, - ty, - ) - .value - }, - || {}, - ); + }, + Some(LangItem::PointeeTrait) => { + let tail = selcx.tcx().struct_tail_raw( + self_ty, + |ty| { + // We throw away any obligations we get from this, since we normalize + // and confirm these obligations once again during confirmation + normalize_with_depth( + selcx, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth + 1, + ty, + ) + .value + }, + || {}, + ); - match tail.kind() { - ty::Bool - | ty::Char - | ty::Int(_) - | ty::Uint(_) - | ty::Float(_) - | ty::Str - | ty::Array(..) - | ty::Pat(..) - | ty::Slice(_) - | ty::RawPtr(..) - | ty::Ref(..) - | ty::FnDef(..) - | ty::FnPtr(..) - | ty::Dynamic(..) - | ty::Closure(..) - | ty::CoroutineClosure(..) - | ty::Coroutine(..) - | ty::CoroutineWitness(..) - | ty::Never - // Extern types have unit metadata, according to RFC 2850 - | ty::Foreign(_) - // If returned by `struct_tail` this is a unit struct - // without any fields, or not a struct, and therefore is Sized. - | ty::Adt(..) - // If returned by `struct_tail` this is the empty tuple. - | ty::Tuple(..) - // Integers and floats are always Sized, and so have unit type metadata. - | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) - // This happens if we reach the recursion limit when finding the struct tail. - | ty::Error(..) => true, - - // We normalize from `Wrapper::Metadata` to `Tail::Metadata` if able. - // Otherwise, type parameters, opaques, and unnormalized projections have - // unit metadata if they're known (e.g. by the param_env) to be sized. - ty::Param(_) | ty::Alias(..) - if self_ty != tail - || selcx.infcx.predicate_must_hold_modulo_regions( - &obligation.with( - selcx.tcx(), - ty::TraitRef::new( + match tail.kind() { + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Str + | ty::Array(..) + | ty::Pat(..) + | ty::Slice(_) + | ty::RawPtr(..) + | ty::Ref(..) + | ty::FnDef(..) + | ty::FnPtr(..) + | ty::Dynamic(..) + | ty::Closure(..) + | ty::CoroutineClosure(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) + | ty::Never + // Extern types have unit metadata, according to RFC 2850 + | ty::Foreign(_) + // If returned by `struct_tail` this is a unit struct + // without any fields, or not a struct, and therefore is Sized. + | ty::Adt(..) + // If returned by `struct_tail` this is the empty tuple. + | ty::Tuple(..) + // Integers and floats are always Sized, and so have unit type metadata. + | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) + // This happens if we reach the recursion limit when finding the struct tail. + | ty::Error(..) => true, + + // We normalize from `Wrapper::Metadata` to `Tail::Metadata` if able. + // Otherwise, type parameters, opaques, and unnormalized projections have + // unit metadata if they're known (e.g. by the param_env) to be sized. + ty::Param(_) | ty::Alias(..) + if self_ty != tail + || selcx.infcx.predicate_must_hold_modulo_regions( + &obligation.with( selcx.tcx(), - selcx.tcx().require_lang_item( - LangItem::Sized, - Some(obligation.cause.span), + ty::TraitRef::new( + selcx.tcx(), + selcx.tcx().require_lang_item( + LangItem::Sized, + Some(obligation.cause.span), + ), + [self_ty], ), - [self_ty], ), - ), - ) => - { - true - } + ) => + { + true + } - ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), + ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"), - // FIXME(compiler-errors): are Bound and Placeholder types ever known sized? - ty::Param(_) - | ty::Alias(..) - | ty::Bound(..) - | ty::Placeholder(..) - | ty::Infer(..) => { - if tail.has_infer_types() { - candidate_set.mark_ambiguous(); + // FIXME(compiler-errors): are Bound and Placeholder types ever known sized? + ty::Param(_) + | ty::Alias(..) + | ty::Bound(..) + | ty::Placeholder(..) + | ty::Infer(..) => { + if tail.has_infer_types() { + candidate_set.mark_ambiguous(); + } + false } - false } } - } else if tcx.trait_is_auto(trait_ref.def_id) { - tcx.dcx().span_delayed_bug( - tcx.def_span(obligation.predicate.def_id), - "associated types not allowed on auto traits", - ); - false - } else { - bug!("unexpected builtin trait with associated type: {trait_ref:?}") + _ if tcx.trait_is_auto(trait_ref.def_id) => { + tcx.dcx().span_delayed_bug( + tcx.def_span(obligation.predicate.def_id), + "associated types not allowed on auto traits", + ); + false + } + _ => { + bug!("unexpected builtin trait with associated type: {trait_ref:?}") + } } } ImplSource::Param(..) => { @@ -1239,22 +1195,21 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( }); } +// FIXME(mgca): While this supports constants, it is only used for types by default right now fn confirm_candidate<'cx, 'tcx>( selcx: &mut SelectionContext<'cx, 'tcx>, obligation: &ProjectionTermObligation<'tcx>, candidate: ProjectionCandidate<'tcx>, -) -> Progress<'tcx> { +) -> Result, ProjectionError<'tcx>> { debug!(?obligation, ?candidate, "confirm_candidate"); - let mut progress = match candidate { + let mut result = match candidate { ProjectionCandidate::ParamEnv(poly_projection) - | ProjectionCandidate::Object(poly_projection) => { - confirm_param_env_candidate(selcx, obligation, poly_projection, false) - } - - ProjectionCandidate::TraitDef(poly_projection) => { - confirm_param_env_candidate(selcx, obligation, poly_projection, true) - } - + | ProjectionCandidate::Object(poly_projection) => Ok(Projected::Progress( + confirm_param_env_candidate(selcx, obligation, poly_projection, false), + )), + ProjectionCandidate::TraitDef(poly_projection) => Ok(Projected::Progress( + confirm_param_env_candidate(selcx, obligation, poly_projection, true), + )), ProjectionCandidate::Select(impl_source) => { confirm_select_candidate(selcx, obligation, impl_source) } @@ -1265,23 +1220,27 @@ fn confirm_candidate<'cx, 'tcx>( // with new region variables, we need to resolve them to existing variables // when possible for this to work. See `auto-trait-projection-recursion.rs` // for a case where this matters. - if progress.term.has_infer_regions() { + if let Ok(Projected::Progress(progress)) = &mut result + && progress.term.has_infer_regions() + { progress.term = progress.term.fold_with(&mut OpportunisticRegionResolver::new(selcx.infcx)); } - progress + + result } +// FIXME(mgca): While this supports constants, it is only used for types by default right now fn confirm_select_candidate<'cx, 'tcx>( selcx: &mut SelectionContext<'cx, 'tcx>, obligation: &ProjectionTermObligation<'tcx>, impl_source: Selection<'tcx>, -) -> Progress<'tcx> { +) -> Result, ProjectionError<'tcx>> { match impl_source { ImplSource::UserDefined(data) => confirm_impl_candidate(selcx, obligation, data), ImplSource::Builtin(BuiltinImplSource::Misc | BuiltinImplSource::Trivial, data) => { let tcx = selcx.tcx(); let trait_def_id = obligation.predicate.trait_def_id(tcx); - if tcx.is_lang_item(trait_def_id, LangItem::Coroutine) { + let progress = if tcx.is_lang_item(trait_def_id, LangItem::Coroutine) { confirm_coroutine_candidate(selcx, obligation, data) } else if tcx.is_lang_item(trait_def_id, LangItem::Future) { confirm_future_candidate(selcx, obligation, data) @@ -1303,7 +1262,8 @@ fn confirm_select_candidate<'cx, 'tcx>( confirm_async_fn_kind_helper_candidate(selcx, obligation, data) } else { confirm_builtin_candidate(selcx, obligation, data) - } + }; + Ok(Projected::Progress(progress)) } ImplSource::Builtin(BuiltinImplSource::Object { .. }, _) | ImplSource::Param(..) @@ -1409,7 +1369,7 @@ fn confirm_future_candidate<'cx, 'tcx>( coroutine_sig, ); - debug_assert_eq!(tcx.associated_item(obligation.predicate.def_id).name, sym::Output); + debug_assert_eq!(tcx.associated_item(obligation.predicate.def_id).name(), sym::Output); let predicate = ty::ProjectionPredicate { projection_term: ty::AliasTerm::new_from_args( @@ -1455,7 +1415,7 @@ fn confirm_iterator_candidate<'cx, 'tcx>( gen_sig, ); - debug_assert_eq!(tcx.associated_item(obligation.predicate.def_id).name, sym::Item); + debug_assert_eq!(tcx.associated_item(obligation.predicate.def_id).name(), sym::Item); let predicate = ty::ProjectionPredicate { projection_term: ty::AliasTerm::new_from_args( @@ -1501,7 +1461,7 @@ fn confirm_async_iterator_candidate<'cx, 'tcx>( gen_sig, ); - debug_assert_eq!(tcx.associated_item(obligation.predicate.def_id).name, sym::Item); + debug_assert_eq!(tcx.associated_item(obligation.predicate.def_id).name(), sym::Item); let ty::Adt(_poll_adt, args) = *yield_ty.kind() else { bug!(); @@ -1540,11 +1500,6 @@ fn confirm_builtin_candidate<'cx, 'tcx>( assert_eq!(discriminant_def_id, item_def_id); (self_ty.discriminant_ty(tcx).into(), PredicateObligations::new()) - } else if tcx.is_lang_item(trait_def_id, LangItem::AsyncDestruct) { - let destructor_def_id = tcx.associated_item_def_ids(trait_def_id)[0]; - assert_eq!(destructor_def_id, item_def_id); - - (self_ty.async_destructor_ty(tcx).into(), PredicateObligations::new()) } else if tcx.is_lang_item(trait_def_id, LangItem::PointeeTrait) { let metadata_def_id = tcx.require_lang_item(LangItem::Metadata, None); assert_eq!(metadata_def_id, item_def_id); @@ -1926,6 +1881,7 @@ fn confirm_async_fn_kind_helper_candidate<'cx, 'tcx>( .with_addl_obligations(nested) } +// FIXME(mgca): While this supports constants, it is only used for types by default right now fn confirm_param_env_candidate<'cx, 'tcx>( selcx: &mut SelectionContext<'cx, 'tcx>, obligation: &ProjectionTermObligation<'tcx>, @@ -1979,9 +1935,7 @@ fn confirm_param_env_candidate<'cx, 'tcx>( ) { Ok(InferOk { value: _, obligations }) => { nested_obligations.extend(obligations); - assoc_ty_own_obligations(selcx, obligation, &mut nested_obligations); - // FIXME(associated_const_equality): Handle consts here as well? Maybe this progress type should just take - // a term instead. + assoc_term_own_obligations(selcx, obligation, &mut nested_obligations); Progress { term: cache_entry.term, obligations: nested_obligations } } Err(e) => { @@ -1995,11 +1949,12 @@ fn confirm_param_env_candidate<'cx, 'tcx>( } } +// FIXME(mgca): While this supports constants, it is only used for types by default right now fn confirm_impl_candidate<'cx, 'tcx>( selcx: &mut SelectionContext<'cx, 'tcx>, obligation: &ProjectionTermObligation<'tcx>, impl_impl_source: ImplSourceUserDefinedData<'tcx, PredicateObligation<'tcx>>, -) -> Progress<'tcx> { +) -> Result, ProjectionError<'tcx>> { let tcx = selcx.tcx(); let ImplSourceUserDefinedData { impl_def_id, args, mut nested } = impl_impl_source; @@ -2008,21 +1963,40 @@ fn confirm_impl_candidate<'cx, 'tcx>( let trait_def_id = tcx.trait_id_of_impl(impl_def_id).unwrap(); let param_env = obligation.param_env; - let assoc_ty = match specialization_graph::assoc_def(tcx, impl_def_id, assoc_item_id) { - Ok(assoc_ty) => assoc_ty, - Err(guar) => return Progress::error(tcx, guar), + let assoc_term = match specialization_graph::assoc_def(tcx, impl_def_id, assoc_item_id) { + Ok(assoc_term) => assoc_term, + Err(guar) => return Ok(Projected::Progress(Progress::error(tcx, guar))), }; - if !assoc_ty.item.defaultness(tcx).has_value() { - // This means that the impl is missing a definition for the - // associated type. This error will be reported by the type - // checker method `check_impl_items_against_trait`, so here we - // just return Error. + + // This means that the impl is missing a definition for the + // associated type. This is either because the associate item + // has impossible-to-satisfy predicates (since those were + // allowed in ), + // or because the impl is literally missing the definition. + if !assoc_term.item.defaultness(tcx).has_value() { debug!( "confirm_impl_candidate: no associated type {:?} for {:?}", - assoc_ty.item.name, obligation.predicate + assoc_term.item.name(), + obligation.predicate ); - return Progress { term: Ty::new_misc_error(tcx).into(), obligations: nested }; + if tcx.impl_self_is_guaranteed_unsized(impl_def_id) { + // We treat this projection as rigid here, which is represented via + // `Projected::NoProgress`. This will ensure that the projection is + // checked for well-formedness, and it's either satisfied by a trivial + // where clause in its env or it results in an error. + return Ok(Projected::NoProgress(obligation.predicate.to_term(tcx))); + } else { + return Ok(Projected::Progress(Progress { + term: if obligation.predicate.kind(tcx).is_type() { + Ty::new_misc_error(tcx).into() + } else { + ty::Const::new_misc_error(tcx).into() + }, + obligations: nested, + })); + } } + // If we're trying to normalize ` as X>::A` using //`impl X for Vec { type A = Box; }`, then: // @@ -2030,32 +2004,41 @@ fn confirm_impl_candidate<'cx, 'tcx>( // * `args` is `[u32]` // * `args` ends up as `[u32, S]` let args = obligation.predicate.args.rebase_onto(tcx, trait_def_id, args); - let args = translate_args(selcx.infcx, param_env, impl_def_id, args, assoc_ty.defining_node); - let is_const = matches!(tcx.def_kind(assoc_ty.item.def_id), DefKind::AssocConst); - let term: ty::EarlyBinder<'tcx, ty::Term<'tcx>> = if is_const { - let did = assoc_ty.item.def_id; - let identity_args = crate::traits::GenericArgs::identity_for_item(tcx, did); - let uv = ty::UnevaluatedConst::new(did, identity_args); - ty::EarlyBinder::bind(ty::Const::new_unevaluated(tcx, uv).into()) + let args = translate_args(selcx.infcx, param_env, impl_def_id, args, assoc_term.defining_node); + + let term = if obligation.predicate.kind(tcx).is_type() { + tcx.type_of(assoc_term.item.def_id).map_bound(|ty| ty.into()) } else { - tcx.type_of(assoc_ty.item.def_id).map_bound(|ty| ty.into()) + ty::EarlyBinder::bind( + get_associated_const_value( + selcx, + obligation.predicate.to_term(tcx).expect_const(), + param_env, + ) + .into(), + ) }; - if !tcx.check_args_compatible(assoc_ty.item.def_id, args) { - let err = Ty::new_error_with_message( - tcx, - obligation.cause.span, - "impl item and trait item have different parameters", - ); - Progress { term: err.into(), obligations: nested } + + let progress = if !tcx.check_args_compatible(assoc_term.item.def_id, args) { + let msg = "impl item and trait item have different parameters"; + let span = obligation.cause.span; + let err = if obligation.predicate.kind(tcx).is_type() { + Ty::new_error_with_message(tcx, span, msg).into() + } else { + ty::Const::new_error_with_message(tcx, span, msg).into() + }; + Progress { term: err, obligations: nested } } else { - assoc_ty_own_obligations(selcx, obligation, &mut nested); + assoc_term_own_obligations(selcx, obligation, &mut nested); Progress { term: term.instantiate(tcx, args), obligations: nested } - } + }; + Ok(Projected::Progress(progress)) } // Get obligations corresponding to the predicates from the where-clause of the // associated type itself. -fn assoc_ty_own_obligations<'cx, 'tcx>( +// FIXME(mgca): While this supports constants, it is only used for types by default right now +fn assoc_term_own_obligations<'cx, 'tcx>( selcx: &mut SelectionContext<'cx, 'tcx>, obligation: &ProjectionTermObligation<'tcx>, nested: &mut PredicateObligations<'tcx>, @@ -2125,3 +2108,15 @@ impl<'cx, 'tcx> ProjectionCacheKeyExt<'cx, 'tcx> for ProjectionCacheKey<'tcx> { }) } } + +fn get_associated_const_value<'tcx>( + selcx: &mut SelectionContext<'_, 'tcx>, + alias_ct: ty::Const<'tcx>, + param_env: ty::ParamEnv<'tcx>, +) -> ty::Const<'tcx> { + // FIXME(mgca): We shouldn't be invoking ctfe here, instead const items should be aliases to type + // system consts that we can retrieve with some `query const_arg_of_alias` query. Evaluating the + // constant is "close enough" to getting the actual rhs of the const item for now even if it might + // lead to some cycles + super::evaluate_const(selcx.infcx, alias_ct, param_env) +} diff --git a/compiler/rustc_trait_selection/src/traits/query/normalize.rs b/compiler/rustc_trait_selection/src/traits/query/normalize.rs index 165c63f3745a2..eb34cb10c68dd 100644 --- a/compiler/rustc_trait_selection/src/traits/query/normalize.rs +++ b/compiler/rustc_trait_selection/src/traits/query/normalize.rs @@ -9,7 +9,7 @@ use rustc_macros::extension; pub use rustc_middle::traits::query::NormalizationResult; use rustc_middle::ty::{ self, FallibleTypeFolder, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, - TypeVisitable, TypeVisitableExt, TypeVisitor, TypingMode, + TypeVisitableExt, TypeVisitor, TypingMode, }; use rustc_span::DUMMY_SP; use tracing::{debug, info, instrument}; @@ -127,7 +127,7 @@ struct MaxEscapingBoundVarVisitor { } impl<'tcx> TypeVisitor> for MaxEscapingBoundVarVisitor { - fn visit_binder>>(&mut self, t: &ty::Binder<'tcx, T>) { + fn visit_binder>>(&mut self, t: &ty::Binder<'tcx, T>) { self.outer_index.shift_in(1); t.super_visit_with(self); self.outer_index.shift_out(1); @@ -144,7 +144,7 @@ impl<'tcx> TypeVisitor> for MaxEscapingBoundVarVisitor { #[inline] fn visit_region(&mut self, r: ty::Region<'tcx>) { - match *r { + match r.kind() { ty::ReBound(debruijn, _) if debruijn > self.outer_index => { self.escaping = self.escaping.max(debruijn.as_usize() - self.outer_index.as_usize()); @@ -216,6 +216,7 @@ impl<'a, 'tcx> FallibleTypeFolder> for QueryNormalizer<'a, 'tcx> { match self.infcx.typing_mode() { TypingMode::Coherence | TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } | TypingMode::PostBorrowckAnalysis { .. } => ty.try_super_fold_with(self)?, TypingMode::PostAnalysis => { @@ -252,7 +253,7 @@ impl<'a, 'tcx> FallibleTypeFolder> for QueryNormalizer<'a, 'tcx> { } } - ty::Projection | ty::Inherent | ty::Weak => { + ty::Projection | ty::Inherent | ty::Free => { // See note in `rustc_trait_selection::traits::project` let infcx = self.infcx; @@ -274,7 +275,7 @@ impl<'a, 'tcx> FallibleTypeFolder> for QueryNormalizer<'a, 'tcx> { debug!("QueryNormalizer: orig_values = {:#?}", orig_values); let result = match kind { ty::Projection => tcx.normalize_canonicalized_projection_ty(c_data), - ty::Weak => tcx.normalize_canonicalized_weak_ty(c_data), + ty::Free => tcx.normalize_canonicalized_free_alias(c_data), ty::Inherent => tcx.normalize_canonicalized_inherent_projection_ty(c_data), kind => unreachable!("did not expect {kind:?} due to match arm above"), }?; @@ -312,10 +313,10 @@ impl<'a, 'tcx> FallibleTypeFolder> for QueryNormalizer<'a, 'tcx> { }; // `tcx.normalize_canonicalized_projection_ty` may normalize to a type that // still has unevaluated consts, so keep normalizing here if that's the case. - // Similarly, `tcx.normalize_canonicalized_weak_ty` will only unwrap one layer + // Similarly, `tcx.normalize_canonicalized_free_alias` will only unwrap one layer // of type and we need to continue folding it to reveal the TAIT behind it. if res != ty - && (res.has_type_flags(ty::TypeFlags::HAS_CT_PROJECTION) || kind == ty::Weak) + && (res.has_type_flags(ty::TypeFlags::HAS_CT_PROJECTION) || kind == ty::Free) { res.try_fold_with(self)? } else { diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs index 4eecde00eaa1e..81b5a131a32e0 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs @@ -117,8 +117,7 @@ fn relate_mir_and_user_args<'tcx>( CRATE_DEF_ID, ObligationCauseCode::AscribeUserTypeProvePredicate(predicate_span), ); - let instantiated_predicate = - ocx.normalize(&cause.clone(), param_env, instantiated_predicate); + let instantiated_predicate = ocx.normalize(&cause, param_env, instantiated_predicate); ocx.register_obligation(Obligation::new(tcx, cause, param_env, instantiated_predicate)); } @@ -132,12 +131,12 @@ fn relate_mir_and_user_args<'tcx>( // const CONST: () = { /* arbitrary code that depends on T being WF */ }; // } // ``` - for arg in args { + for term in args.iter().filter_map(ty::GenericArg::as_term) { ocx.register_obligation(Obligation::new( tcx, cause.clone(), param_env, - ty::ClauseKind::WellFormed(arg), + ty::ClauseKind::WellFormed(term), )); } diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs index f98529860ff81..d9b57f0c67d14 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs @@ -6,10 +6,10 @@ use rustc_infer::traits::query::OutlivesBound; use rustc_infer::traits::query::type_op::ImpliedOutlivesBounds; use rustc_middle::infer::canonical::CanonicalQueryResponse; use rustc_middle::traits::ObligationCause; +use rustc_middle::ty::outlives::{Component, push_outlives_components}; use rustc_middle::ty::{self, ParamEnvAnd, Ty, TyCtxt, TypeVisitable, TypeVisitor}; use rustc_span::def_id::CRATE_DEF_ID; use rustc_span::{DUMMY_SP, Span, sym}; -use rustc_type_ir::outlives::{Component, push_outlives_components}; use smallvec::{SmallVec, smallvec}; use crate::traits::query::NoSolution; @@ -113,8 +113,8 @@ pub fn compute_implied_outlives_bounds_inner<'tcx>( | ty::PredicateKind::AliasRelate(..) => {} // We need to search through *all* WellFormed predicates - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { - wf_args.push(arg); + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(term)) => { + wf_args.push(term); } // We need to register region relationships diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs index 4f9e2e79d624c..18971c47831a4 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs @@ -1,4 +1,3 @@ -use rustc_hir::LangItem; use rustc_infer::traits::Obligation; use rustc_middle::traits::ObligationCause; use rustc_middle::traits::query::NoSolution; @@ -7,7 +6,7 @@ use rustc_middle::ty::{self, ParamEnvAnd, TyCtxt}; use rustc_span::Span; use crate::infer::canonical::{CanonicalQueryInput, CanonicalQueryResponse}; -use crate::traits::ObligationCtxt; +use crate::traits::{ObligationCtxt, sizedness_fast_path}; impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> { type QueryResponse = (); @@ -16,22 +15,14 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> { tcx: TyCtxt<'tcx>, key: &ParamEnvAnd<'tcx, Self>, ) -> Option { - // Proving Sized, very often on "obviously sized" types like - // `&T`, accounts for about 60% percentage of the predicates - // we have to prove. No need to canonicalize and all that for - // such cases. - if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_ref)) = - key.value.predicate.kind().skip_binder() - && tcx.is_lang_item(trait_ref.def_id(), LangItem::Sized) - && trait_ref.self_ty().is_trivially_sized(tcx) - { + if sizedness_fast_path(tcx, key.value.predicate) { return Some(()); } - if let ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) = + if let ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(term)) = key.value.predicate.kind().skip_binder() { - match arg.as_type()?.kind() { + match term.as_type()?.kind() { ty::Param(_) | ty::Bool | ty::Char diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs index 4cfd8149b1e95..10a2ba049d852 100644 --- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs +++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs @@ -14,9 +14,8 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexSet}; use rustc_hir as hir; use rustc_infer::traits::{Obligation, PolyTraitObligation, SelectionError}; use rustc_middle::ty::fast_reject::DeepRejectCtxt; -use rustc_middle::ty::{self, Ty, TypeVisitableExt, TypingMode}; +use rustc_middle::ty::{self, Ty, TypeVisitableExt, TypingMode, elaborate}; use rustc_middle::{bug, span_bug}; -use rustc_type_ir::elaborate; use tracing::{debug, instrument, trace}; use super::SelectionCandidate::*; @@ -79,17 +78,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } else if tcx.is_lang_item(def_id, LangItem::DiscriminantKind) { // `DiscriminantKind` is automatically implemented for every type. candidates.vec.push(BuiltinCandidate { has_nested: false }); - } else if tcx.is_lang_item(def_id, LangItem::AsyncDestruct) { - // `AsyncDestruct` is automatically implemented for every type. - candidates.vec.push(BuiltinCandidate { has_nested: false }); } else if tcx.is_lang_item(def_id, LangItem::PointeeTrait) { // `Pointee` is automatically implemented for every type. candidates.vec.push(BuiltinCandidate { has_nested: false }); } else if tcx.is_lang_item(def_id, LangItem::Sized) { - // Sized is never implementable by end-users, it is - // always automatically computed. - let sized_conditions = self.sized_conditions(obligation); - self.assemble_builtin_bound_candidates(sized_conditions, &mut candidates); + self.assemble_builtin_sized_candidate(obligation, &mut candidates); } else if tcx.is_lang_item(def_id, LangItem::Unsize) { self.assemble_candidates_for_unsizing(obligation, &mut candidates); } else if tcx.is_lang_item(def_id, LangItem::Destruct) { @@ -243,8 +236,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { if !drcx.args_may_unify(obligation_args, bound_trait_ref.skip_binder().args) { continue; } - // FIXME(oli-obk): it is suspicious that we are dropping the constness and - // polarity here. let wc = self.where_clause_may_apply(stack, bound_trait_ref)?; if wc.may_apply() { candidates.vec.push(ParamCandidate(bound)); @@ -695,6 +686,23 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let def_id = obligation.predicate.def_id(); + let mut check_impls = || { + // Only consider auto impls if there are no manual impls for the root of `self_ty`. + // + // For example, we only consider auto candidates for `&i32: Auto` if no explicit impl + // for `&SomeType: Auto` exists. Due to E0321 the only crate where impls + // for `&SomeType: Auto` can be defined is the crate where `Auto` has been defined. + // + // Generally, we have to guarantee that for all `SimplifiedType`s the only crate + // which may define impls for that type is either the crate defining the type + // or the trait. This should be guaranteed by the orphan check. + let mut has_impl = false; + self.tcx().for_each_relevant_impl(def_id, self_ty, |_| has_impl = true); + if !has_impl { + candidates.vec.push(AutoImplCandidate) + } + }; + if self.tcx().trait_is_auto(def_id) { match *self_ty.kind() { ty::Dynamic(..) => { @@ -708,9 +716,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // we don't add any `..` impl. Default traits could // still be provided by a manual implementation for // this trait and type. + + // Backward compatibility for default auto traits. + // Test: ui/traits/default_auto_traits/extern-types.rs + if self.tcx().is_default_trait(def_id) { + check_impls() + } } ty::Param(..) - | ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ..) + | ty::Alias(ty::Projection | ty::Inherent | ty::Free, ..) | ty::Placeholder(..) | ty::Bound(..) => { // In these cases, we don't know what the actual @@ -808,20 +822,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { return; } - // Only consider auto impls if there are no manual impls for the root of `self_ty`. - // - // For example, we only consider auto candidates for `&i32: Auto` if no explicit impl - // for `&SomeType: Auto` exists. Due to E0321 the only crate where impls - // for `&SomeType: Auto` can be defined is the crate where `Auto` has been defined. - // - // Generally, we have to guarantee that for all `SimplifiedType`s the only crate - // which may define impls for that type is either the crate defining the type - // or the trait. This should be guaranteed by the orphan check. - let mut has_impl = false; - self.tcx().for_each_relevant_impl(def_id, self_ty, |_| has_impl = true); - if !has_impl { - candidates.vec.push(AutoImplCandidate) - } + check_impls(); } ty::Error(_) => { candidates.vec.push(AutoImplCandidate); @@ -1061,6 +1062,27 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { /// Assembles the trait which are built-in to the language itself: /// `Copy`, `Clone` and `Sized`. #[instrument(level = "debug", skip(self, candidates))] + fn assemble_builtin_sized_candidate( + &mut self, + obligation: &PolyTraitObligation<'tcx>, + candidates: &mut SelectionCandidateSet<'tcx>, + ) { + match self.sized_conditions(obligation) { + BuiltinImplConditions::Where(nested) => { + candidates + .vec + .push(SizedCandidate { has_nested: !nested.skip_binder().is_empty() }); + } + BuiltinImplConditions::None => {} + BuiltinImplConditions::Ambiguous => { + candidates.ambiguous = true; + } + } + } + + /// Assembles the trait which are built-in to the language itself: + /// e.g. `Copy` and `Clone`. + #[instrument(level = "debug", skip(self, candidates))] fn assemble_builtin_bound_candidates( &mut self, conditions: BuiltinImplConditions<'tcx>, diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs index a66c958c10978..94190cd3ae33a 100644 --- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs +++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs @@ -15,10 +15,9 @@ use rustc_hir::lang_items::LangItem; use rustc_infer::infer::{DefineOpaqueTypes, HigherRankedType, InferOk}; use rustc_infer::traits::ObligationCauseCode; use rustc_middle::traits::{BuiltinImplSource, SignatureMismatchData}; -use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, Upcast}; +use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, Upcast, elaborate}; use rustc_middle::{bug, span_bug}; use rustc_span::def_id::DefId; -use rustc_type_ir::elaborate; use thin_vec::thin_vec; use tracing::{debug, instrument}; @@ -39,7 +38,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { obligation: &PolyTraitObligation<'tcx>, candidate: SelectionCandidate<'tcx>, ) -> Result, SelectionError<'tcx>> { - let mut impl_src = match candidate { + Ok(match candidate { + SizedCandidate { has_nested } => { + let data = self.confirm_builtin_candidate(obligation, has_nested); + ImplSource::Builtin(BuiltinImplSource::Misc, data) + } + BuiltinCandidate { has_nested } => { let data = self.confirm_builtin_candidate(obligation, has_nested); ImplSource::Builtin(BuiltinImplSource::Misc, data) @@ -134,15 +138,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { BikeshedGuaranteedNoDropCandidate => { self.confirm_bikeshed_guaranteed_no_drop_candidate(obligation) } - }; - - // The obligations returned by confirmation are recursively evaluated - // so we need to make sure they have the correct depth. - for subobligation in impl_src.borrow_nested_obligations_mut() { - subobligation.set_depth_from_parent(obligation.recursion_depth); - } - - Ok(impl_src) + }) } fn confirm_projection_candidate( @@ -320,7 +316,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { obligation.cause.clone(), obligation.recursion_depth + 1, obligation.param_env, - obligation.predicate.rebind(trait_ref), + trait_ref, ) }; @@ -346,7 +342,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { obligation.cause.clone(), obligation.recursion_depth + 1, obligation.param_env, - obligation.predicate.rebind(outlives), + outlives, ) }; @@ -407,10 +403,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } } - let predicate = obligation.predicate.skip_binder(); + let predicate = self.infcx.enter_forall_and_leak_universe(obligation.predicate); let mut assume = predicate.trait_ref.args.const_at(2); - // FIXME(mgca): We should shallowly normalize this. if self.tcx().features().generic_const_exprs() { assume = crate::traits::evaluate_const(self.infcx, assume, obligation.param_env) } @@ -597,9 +592,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // Associated types that require `Self: Sized` do not show up in the built-in // implementation of `Trait for dyn Trait`, and can be dropped here. .filter(|item| !tcx.generics_require_sized_self(item.def_id)) - .filter_map( - |item| if item.kind == ty::AssocKind::Type { Some(item.def_id) } else { None }, - ) + .filter_map(|item| if item.is_type() { Some(item.def_id) } else { None }) .collect(); for assoc_type in assoc_types { @@ -1093,26 +1086,36 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { { // See `assemble_candidates_for_unsizing` for more info. // We already checked the compatibility of auto traits within `assemble_candidates_for_unsizing`. - let iter = data_a - .principal() - .filter(|_| { - // optionally drop the principal, if we're unsizing to no principal - data_b.principal().is_some() - }) - .map(|b| b.map_bound(ty::ExistentialPredicate::Trait)) - .into_iter() - .chain( + let existential_predicates = if data_b.principal().is_some() { + tcx.mk_poly_existential_predicates_from_iter( data_a - .projection_bounds() - .map(|b| b.map_bound(ty::ExistentialPredicate::Projection)), + .principal() + .map(|b| b.map_bound(ty::ExistentialPredicate::Trait)) + .into_iter() + .chain( + data_a + .projection_bounds() + .map(|b| b.map_bound(ty::ExistentialPredicate::Projection)), + ) + .chain( + data_b + .auto_traits() + .map(ty::ExistentialPredicate::AutoTrait) + .map(ty::Binder::dummy), + ), ) - .chain( + } else { + // If we're unsizing to a dyn type that has no principal, then drop + // the principal and projections from the type. We use the auto traits + // from the RHS type since as we noted that we've checked for auto + // trait compatibility during unsizing. + tcx.mk_poly_existential_predicates_from_iter( data_b .auto_traits() .map(ty::ExistentialPredicate::AutoTrait) .map(ty::Binder::dummy), - ); - let existential_predicates = tcx.mk_poly_existential_predicates_from_iter(iter); + ) + }; let source_trait = Ty::new_dynamic(tcx, existential_predicates, r_b, dyn_a); // Require that the traits involved in this upcast are **equal**; diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index e439df76cd4b9..4ce37db428002 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -28,10 +28,9 @@ use rustc_middle::ty::error::TypeErrorToStringExt; use rustc_middle::ty::print::{PrintTraitRefExt as _, with_no_trimmed_paths}; use rustc_middle::ty::{ self, GenericArgsRef, PolyProjectionPredicate, Ty, TyCtxt, TypeFoldable, TypeVisitableExt, - TypingMode, Upcast, + TypingMode, Upcast, elaborate, }; use rustc_span::{Symbol, sym}; -use rustc_type_ir::elaborate; use tracing::{debug, instrument, trace}; use self::EvaluationResult::*; @@ -49,7 +48,9 @@ use crate::infer::{InferCtxt, InferOk, TypeFreshener}; use crate::solve::InferCtxtSelectExt as _; use crate::traits::normalize::{normalize_with_depth, normalize_with_depth_to}; use crate::traits::project::{ProjectAndUnifyResult, ProjectionCacheKeyExt}; -use crate::traits::{EvaluateConstErr, ProjectionCacheKey, Unimplemented, effects}; +use crate::traits::{ + EvaluateConstErr, ProjectionCacheKey, Unimplemented, effects, sizedness_fast_path, +}; mod _match; mod candidate_assembly; @@ -603,6 +604,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { None => self.check_recursion_limit(&obligation, &obligation)?, } + if sizedness_fast_path(self.tcx(), obligation.predicate) { + return Ok(EvaluatedToOk); + } + ensure_sufficient_stack(|| { let bound_predicate = obligation.predicate.kind(); match bound_predicate.skip_binder() { @@ -652,7 +657,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } } - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(term)) => { // So, there is a bit going on here. First, `WellFormed` predicates // are coinductive, like trait predicates with auto traits. // This means that we need to detect if we have recursively @@ -676,11 +681,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let cache = previous_stack.cache; let dfn = cache.next_dfn(); - for stack_arg in previous_stack.cache.wf_args.borrow().iter().rev() { - if stack_arg.0 != arg { + for stack_term in previous_stack.cache.wf_args.borrow().iter().rev() { + if stack_term.0 != term { continue; } - debug!("WellFormed({:?}) on stack", arg); + debug!("WellFormed({:?}) on stack", term); if let Some(stack) = previous_stack.head { // Okay, let's imagine we have two different stacks: // `T: NonAutoTrait -> WF(T) -> T: NonAutoTrait` @@ -696,11 +701,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // would contain `(T, 1)`. We want to check all // trait predicates greater than `1`. The previous // stack would be `T: Auto`. - let cycle = stack.iter().take_while(|s| s.depth > stack_arg.1); + let cycle = stack.iter().take_while(|s| s.depth > stack_term.1); let tcx = self.tcx(); let cycle = cycle.map(|stack| stack.obligation.predicate.upcast(tcx)); if self.coinductive_match(cycle) { - stack.update_reached_depth(stack_arg.1); + stack.update_reached_depth(stack_term.1); return Ok(EvaluatedToOk); } else { return Ok(EvaluatedToAmbigStackDependent); @@ -714,11 +719,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { obligation.param_env, obligation.cause.body_id, obligation.recursion_depth + 1, - arg, + term, obligation.cause.span, ) { Some(obligations) => { - cache.wf_args.borrow_mut().push((arg, previous_stack.depth())); + cache.wf_args.borrow_mut().push((term, previous_stack.depth())); let result = self.evaluate_predicates_recursively(previous_stack, obligations); cache.wf_args.borrow_mut().pop(); @@ -1446,6 +1451,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { match self.infcx.typing_mode() { TypingMode::Coherence => {} TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } | TypingMode::PostBorrowckAnalysis { .. } | TypingMode::PostAnalysis => return Ok(()), } @@ -1491,7 +1497,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // However, if we disqualify *all* goals from being cached, perf suffers. // This is likely fixed by better caching in general in the new solver. // See: . - TypingMode::Analysis { defining_opaque_types } => { + TypingMode::Analysis { + defining_opaque_types_and_generators: defining_opaque_types, + } + | TypingMode::Borrowck { defining_opaque_types } => { defining_opaque_types.is_empty() || !pred.has_opaque_types() } // The hidden types of `defined_opaque_types` is not local to the current @@ -1801,17 +1810,21 @@ impl<'tcx> SelectionContext<'_, 'tcx> { return Some(candidates.pop().unwrap().candidate); } - // We prefer trivial builtin candidates, i.e. builtin impls without any nested - // requirements, over all others. This is a fix for #53123 and prevents winnowing - // from accidentally extending the lifetime of a variable. - let mut trivial_builtin = candidates - .iter() - .filter(|c| matches!(c.candidate, BuiltinCandidate { has_nested: false })); - if let Some(_trivial) = trivial_builtin.next() { - // There should only ever be a single trivial builtin candidate + // We prefer `Sized` candidates over everything. + let mut sized_candidates = + candidates.iter().filter(|c| matches!(c.candidate, SizedCandidate { has_nested: _ })); + if let Some(sized_candidate) = sized_candidates.next() { + // There should only ever be a single sized candidate // as they would otherwise overlap. - debug_assert_eq!(trivial_builtin.next(), None); - return Some(BuiltinCandidate { has_nested: false }); + debug_assert_eq!(sized_candidates.next(), None); + // Only prefer the built-in `Sized` candidate if its nested goals are certain. + // Otherwise, we may encounter failure later on if inference causes this candidate + // to not hold, but a where clause would've applied instead. + if sized_candidate.evaluation.must_apply_modulo_regions() { + return Some(sized_candidate.candidate.clone()); + } else { + return None; + } } // Before we consider where-bounds, we have to deduplicate them here and also @@ -1940,7 +1953,8 @@ impl<'tcx> SelectionContext<'_, 'tcx> { // Don't use impl candidates which overlap with other candidates. // This should pretty much only ever happen with malformed impls. if candidates.iter().all(|c| match c.candidate { - BuiltinCandidate { has_nested: _ } + SizedCandidate { has_nested: _ } + | BuiltinCandidate { has_nested: _ } | TransmutabilityCandidate | AutoImplCandidate | ClosureCandidate { .. } @@ -2294,6 +2308,11 @@ impl<'tcx> SelectionContext<'_, 'tcx> { | ty::Never | ty::Char => ty::Binder::dummy(Vec::new()), + // This branch is only for `experimental_default_bounds`. + // Other foreign types were rejected earlier in + // `assemble_candidates_from_auto_impls`. + ty::Foreign(..) => ty::Binder::dummy(Vec::new()), + // FIXME(unsafe_binders): Squash the double binder for now, I guess. ty::UnsafeBinder(_) => return Err(SelectionError::Unimplemented), @@ -2303,8 +2322,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { ty::Placeholder(..) | ty::Dynamic(..) | ty::Param(..) - | ty::Foreign(..) - | ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ..) + | ty::Alias(ty::Projection | ty::Inherent | ty::Free, ..) | ty::Bound(..) | ty::Infer(ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { bug!("asked to assemble constituent types of unexpected type: {:?}", t); @@ -2950,14 +2968,14 @@ struct ProvisionalEvaluationCache<'tcx> { /// means the cached value for `F`. map: RefCell, ProvisionalEvaluation>>, - /// The stack of args that we assume to be true because a `WF(arg)` predicate + /// The stack of terms that we assume to be well-formed because a `WF(term)` predicate /// is on the stack above (and because of wellformedness is coinductive). /// In an "ideal" world, this would share a stack with trait predicates in /// `TraitObligationStack`. However, trait predicates are *much* hotter than /// `WellFormed` predicates, and it's very likely that the additional matches /// will have a perf effect. The value here is the well-formed `GenericArg` /// and the depth of the trait predicate *above* that well-formed predicate. - wf_args: RefCell, usize)>>, + wf_args: RefCell, usize)>>, } /// A cache value for the provisional cache: contains the depth-first diff --git a/compiler/rustc_trait_selection/src/traits/specialize/mod.rs b/compiler/rustc_trait_selection/src/traits/specialize/mod.rs index 448ac558cad7f..b30fadd3e5b7d 100644 --- a/compiler/rustc_trait_selection/src/traits/specialize/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/specialize/mod.rs @@ -18,11 +18,11 @@ use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_infer::traits::Obligation; use rustc_middle::bug; use rustc_middle::query::LocalCrate; +use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::print::PrintTraitRefExt as _; use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, TypeVisitableExt, TypingMode}; use rustc_session::lint::builtin::COHERENCE_LEAK_CHECK; use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, sym}; -use rustc_type_ir::solve::NoSolution; use specialization_graph::GraphExt; use tracing::{debug, instrument}; diff --git a/compiler/rustc_trait_selection/src/traits/util.rs b/compiler/rustc_trait_selection/src/traits/util.rs index 15f5cf916a48b..035fd38c48aad 100644 --- a/compiler/rustc_trait_selection/src/traits/util.rs +++ b/compiler/rustc_trait_selection/src/traits/util.rs @@ -1,6 +1,7 @@ use std::collections::{BTreeMap, VecDeque}; use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; +use rustc_hir::LangItem; use rustc_hir::def_id::DefId; use rustc_infer::infer::InferCtxt; pub use rustc_infer::traits::util::*; @@ -289,7 +290,7 @@ impl<'tcx> TypeFolder> for BoundVarReplacer<'_, 'tcx> { } fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { - match *r { + match r.kind() { ty::ReBound(debruijn, _) if debruijn.as_usize() >= self.current_index.as_usize() + self.universe_indices.len() => @@ -407,7 +408,7 @@ impl<'tcx> TypeFolder> for PlaceholderReplacer<'_, 'tcx> { } fn fold_region(&mut self, r0: ty::Region<'tcx>) -> ty::Region<'tcx> { - let r1 = match *r0 { + let r1 = match r0.kind() { ty::ReVar(vid) => self .infcx .inner @@ -417,7 +418,7 @@ impl<'tcx> TypeFolder> for PlaceholderReplacer<'_, 'tcx> { _ => r0, }; - let r2 = match *r1 { + let r2 = match r1.kind() { ty::RePlaceholder(p) => { let replace_var = self.mapped_regions.get(&p); match replace_var { @@ -504,3 +505,21 @@ impl<'tcx> TypeFolder> for PlaceholderReplacer<'_, 'tcx> { } } } + +pub fn sizedness_fast_path<'tcx>(tcx: TyCtxt<'tcx>, predicate: ty::Predicate<'tcx>) -> bool { + // Proving `Sized` very often on "obviously sized" types like `&T`, accounts for about 60% + // percentage of the predicates we have to prove. No need to canonicalize and all that for + // such cases. + if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_ref)) = + predicate.kind().skip_binder() + { + if tcx.is_lang_item(trait_ref.def_id(), LangItem::Sized) + && trait_ref.self_ty().is_trivially_sized(tcx) + { + debug!("fast path -- trivial sizedness"); + return true; + } + } + + false +} diff --git a/compiler/rustc_trait_selection/src/traits/vtable.rs b/compiler/rustc_trait_selection/src/traits/vtable.rs index 165174c0bcc15..3565c11249ad1 100644 --- a/compiler/rustc_trait_selection/src/traits/vtable.rs +++ b/compiler/rustc_trait_selection/src/traits/vtable.rs @@ -197,10 +197,8 @@ fn own_existential_vtable_entries_iter( tcx: TyCtxt<'_>, trait_def_id: DefId, ) -> impl Iterator { - let trait_methods = tcx - .associated_items(trait_def_id) - .in_definition_order() - .filter(|item| item.kind == ty::AssocKind::Fn); + let trait_methods = + tcx.associated_items(trait_def_id).in_definition_order().filter(|item| item.is_fn()); // Now list each method's DefId (for within its trait). let own_entries = trait_methods.filter_map(move |&trait_method| { diff --git a/compiler/rustc_trait_selection/src/traits/wf.rs b/compiler/rustc_trait_selection/src/traits/wf.rs index 54b6c22b2d821..08d3b92e9b5ef 100644 --- a/compiler/rustc_trait_selection/src/traits/wf.rs +++ b/compiler/rustc_trait_selection/src/traits/wf.rs @@ -1,12 +1,18 @@ +//! Core logic responsible for determining what it means for various type system +//! primitives to be "well formed". Actually checking whether these primitives are +//! well formed is performed elsewhere (e.g. during type checking or item well formedness +//! checking). + use std::iter; use rustc_hir as hir; +use rustc_hir::def::DefKind; use rustc_hir::lang_items::LangItem; use rustc_infer::traits::{ObligationCauseCode, PredicateObligations}; use rustc_middle::bug; use rustc_middle::ty::{ - self, GenericArg, GenericArgKind, GenericArgsRef, Ty, TyCtxt, TypeSuperVisitable, - TypeVisitable, TypeVisitableExt, TypeVisitor, + self, GenericArgsRef, Term, TermKind, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, + TypeVisitableExt, TypeVisitor, }; use rustc_session::parse::feature_err; use rustc_span::def_id::{DefId, LocalDefId}; @@ -15,28 +21,29 @@ use tracing::{debug, instrument, trace}; use crate::infer::InferCtxt; use crate::traits; + /// Returns the set of obligations needed to make `arg` well-formed. /// If `arg` contains unresolved inference variables, this may include /// further WF obligations. However, if `arg` IS an unresolved /// inference variable, returns `None`, because we are not able to -/// make any progress at all. This is to prevent "livelock" where we -/// say "$0 is WF if $0 is WF". +/// make any progress at all. This is to prevent cycles where we +/// say "?0 is WF if ?0 is WF". pub fn obligations<'tcx>( infcx: &InferCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, body_id: LocalDefId, recursion_depth: usize, - arg: GenericArg<'tcx>, + term: Term<'tcx>, span: Span, ) -> Option> { - // Handle the "livelock" case (see comment above) by bailing out if necessary. - let arg = match arg.unpack() { - GenericArgKind::Type(ty) => { + // Handle the "cycle" case (see comment above) by bailing out if necessary. + let term = match term.unpack() { + TermKind::Ty(ty) => { match ty.kind() { ty::Infer(ty::TyVar(_)) => { let resolved_ty = infcx.shallow_resolve(ty); if resolved_ty == ty { - // No progress, bail out to prevent "livelock". + // No progress, bail out to prevent cycles. return None; } else { resolved_ty @@ -46,12 +53,12 @@ pub fn obligations<'tcx>( } .into() } - GenericArgKind::Const(ct) => { + TermKind::Const(ct) => { match ct.kind() { ty::ConstKind::Infer(_) => { let resolved = infcx.shallow_resolve_const(ct); if resolved == ct { - // No progress. + // No progress, bail out to prevent cycles. return None; } else { resolved @@ -61,8 +68,6 @@ pub fn obligations<'tcx>( } .into() } - // There is nothing we have to do for lifetimes. - GenericArgKind::Lifetime(..) => return Some(PredicateObligations::new()), }; let mut wf = WfPredicates { @@ -74,11 +79,11 @@ pub fn obligations<'tcx>( recursion_depth, item: None, }; - wf.compute(arg); - debug!("wf::obligations({:?}, body_id={:?}) = {:?}", arg, body_id, wf.out); + wf.add_wf_preds_for_term(term); + debug!("wf::obligations({:?}, body_id={:?}) = {:?}", term, body_id, wf.out); let result = wf.normalize(infcx); - debug!("wf::obligations({:?}, body_id={:?}) ~~> {:?}", arg, body_id, result); + debug!("wf::obligations({:?}, body_id={:?}) ~~> {:?}", term, body_id, result); Some(result) } @@ -89,23 +94,19 @@ pub fn obligations<'tcx>( pub fn unnormalized_obligations<'tcx>( infcx: &InferCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, - arg: GenericArg<'tcx>, + term: Term<'tcx>, span: Span, body_id: LocalDefId, ) -> Option> { - debug_assert_eq!(arg, infcx.resolve_vars_if_possible(arg)); + debug_assert_eq!(term, infcx.resolve_vars_if_possible(term)); // However, if `arg` IS an unresolved inference variable, returns `None`, // because we are not able to make any progress at all. This is to prevent - // "livelock" where we say "$0 is WF if $0 is WF". - if arg.is_non_region_infer() { + // cycles where we say "?0 is WF if ?0 is WF". + if term.is_infer() { return None; } - if let ty::GenericArgKind::Lifetime(..) = arg.unpack() { - return Some(PredicateObligations::new()); - } - let mut wf = WfPredicates { infcx, param_env, @@ -115,7 +116,7 @@ pub fn unnormalized_obligations<'tcx>( recursion_depth: 0, item: None, }; - wf.compute(arg); + wf.add_wf_preds_for_term(term); Some(wf.out) } @@ -140,7 +141,7 @@ pub fn trait_obligations<'tcx>( recursion_depth: 0, item: Some(item), }; - wf.compute_trait_pred(trait_pred, Elaborate::All); + wf.add_wf_preds_for_trait_pred(trait_pred, Elaborate::All); debug!(obligations = ?wf.out); wf.normalize(infcx) } @@ -171,7 +172,7 @@ pub fn clause_obligations<'tcx>( // It's ok to skip the binder here because wf code is prepared for it match clause.kind().skip_binder() { ty::ClauseKind::Trait(t) => { - wf.compute_trait_pred(t, Elaborate::None); + wf.add_wf_preds_for_trait_pred(t, Elaborate::None); } ty::ClauseKind::HostEffect(..) => { // Technically the well-formedness of this predicate is implied by @@ -179,22 +180,22 @@ pub fn clause_obligations<'tcx>( } ty::ClauseKind::RegionOutlives(..) => {} ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(ty, _reg)) => { - wf.compute(ty.into()); + wf.add_wf_preds_for_term(ty.into()); } ty::ClauseKind::Projection(t) => { - wf.compute_alias_term(t.projection_term); - wf.compute(t.term.into_arg()); + wf.add_wf_preds_for_alias_term(t.projection_term); + wf.add_wf_preds_for_term(t.term); } ty::ClauseKind::ConstArgHasType(ct, ty) => { - wf.compute(ct.into()); - wf.compute(ty.into()); + wf.add_wf_preds_for_term(ct.into()); + wf.add_wf_preds_for_term(ty.into()); } - ty::ClauseKind::WellFormed(arg) => { - wf.compute(arg); + ty::ClauseKind::WellFormed(term) => { + wf.add_wf_preds_for_term(term); } ty::ClauseKind::ConstEvaluatable(ct) => { - wf.compute(ct.into()); + wf.add_wf_preds_for_term(ct.into()); } } @@ -372,14 +373,18 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { } /// Pushes the obligations required for `trait_ref` to be WF into `self.out`. - fn compute_trait_pred(&mut self, trait_pred: ty::TraitPredicate<'tcx>, elaborate: Elaborate) { + fn add_wf_preds_for_trait_pred( + &mut self, + trait_pred: ty::TraitPredicate<'tcx>, + elaborate: Elaborate, + ) { let tcx = self.tcx(); let trait_ref = trait_pred.trait_ref; // Negative trait predicates don't require supertraits to hold, just // that their args are WF. if trait_pred.polarity == ty::PredicatePolarity::Negative { - self.compute_negative_trait_pred(trait_ref); + self.add_wf_preds_for_negative_trait_pred(trait_ref); return; } @@ -416,11 +421,9 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { .args .iter() .enumerate() - .filter(|(_, arg)| { - matches!(arg.unpack(), GenericArgKind::Type(..) | GenericArgKind::Const(..)) - }) - .filter(|(_, arg)| !arg.has_escaping_bound_vars()) - .map(|(i, arg)| { + .filter_map(|(i, arg)| arg.as_term().map(|t| (i, t))) + .filter(|(_, term)| !term.has_escaping_bound_vars()) + .map(|(i, term)| { let mut cause = traits::ObligationCause::misc(self.span, self.body_id); // The first arg is the self ty - use the correct span for it. if i == 0 { @@ -435,9 +438,7 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { cause, depth, param_env, - ty::Binder::dummy(ty::PredicateKind::Clause(ty::ClauseKind::WellFormed( - arg, - ))), + ty::ClauseKind::WellFormed(term), ) }), ); @@ -445,15 +446,17 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { // Compute the obligations that are required for `trait_ref` to be WF, // given that it is a *negative* trait predicate. - fn compute_negative_trait_pred(&mut self, trait_ref: ty::TraitRef<'tcx>) { + fn add_wf_preds_for_negative_trait_pred(&mut self, trait_ref: ty::TraitRef<'tcx>) { for arg in trait_ref.args { - self.compute(arg); + if let Some(term) = arg.as_term() { + self.add_wf_preds_for_term(term); + } } } /// Pushes the obligations required for an alias (except inherent) to be WF /// into `self.out`. - fn compute_alias_term(&mut self, data: ty::AliasTerm<'tcx>) { + fn add_wf_preds_for_alias_term(&mut self, data: ty::AliasTerm<'tcx>) { // A projection is well-formed if // // (a) its predicates hold (*) @@ -478,13 +481,13 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { let obligations = self.nominal_obligations(data.def_id, data.args); self.out.extend(obligations); - self.compute_projection_args(data.args); + self.add_wf_preds_for_projection_args(data.args); } /// Pushes the obligations required for an inherent alias to be WF /// into `self.out`. // FIXME(inherent_associated_types): Merge this function with `fn compute_alias`. - fn compute_inherent_projection(&mut self, data: ty::AliasTy<'tcx>) { + fn add_wf_preds_for_inherent_projection(&mut self, data: ty::AliasTerm<'tcx>) { // An inherent projection is well-formed if // // (a) its predicates hold (*) @@ -496,7 +499,7 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { if !data.self_ty().has_escaping_bound_vars() { // FIXME(inherent_associated_types): Should this happen inside of a snapshot? // FIXME(inherent_associated_types): This is incompatible with the new solver and lazy norm! - let args = traits::project::compute_inherent_assoc_ty_args( + let args = traits::project::compute_inherent_assoc_term_args( &mut traits::SelectionContext::new(self.infcx), self.param_env, data, @@ -511,7 +514,7 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { data.args.visit_with(self); } - fn compute_projection_args(&mut self, args: GenericArgsRef<'tcx>) { + fn add_wf_preds_for_projection_args(&mut self, args: GenericArgsRef<'tcx>) { let tcx = self.tcx(); let cause = self.cause(ObligationCauseCode::WellFormed(None)); let param_env = self.param_env; @@ -519,19 +522,15 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { self.out.extend( args.iter() - .filter(|arg| { - matches!(arg.unpack(), GenericArgKind::Type(..) | GenericArgKind::Const(..)) - }) - .filter(|arg| !arg.has_escaping_bound_vars()) - .map(|arg| { + .filter_map(|arg| arg.as_term()) + .filter(|term| !term.has_escaping_bound_vars()) + .map(|term| { traits::Obligation::with_depth( tcx, cause.clone(), depth, param_env, - ty::Binder::dummy(ty::PredicateKind::Clause(ty::ClauseKind::WellFormed( - arg, - ))), + ty::ClauseKind::WellFormed(term), ) }), ); @@ -555,10 +554,10 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { } } - /// Pushes all the predicates needed to validate that `ty` is WF into `out`. + /// Pushes all the predicates needed to validate that `term` is WF into `out`. #[instrument(level = "debug", skip(self))] - fn compute(&mut self, arg: GenericArg<'tcx>) { - arg.visit_with(self); + fn add_wf_preds_for_term(&mut self, term: Term<'tcx>) { + term.visit_with(self); debug!(?self.out); } @@ -596,7 +595,7 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { .collect() } - fn from_object_ty( + fn add_wf_preds_for_dyn_ty( &mut self, ty: Ty<'tcx>, data: &'tcx ty::List>, @@ -651,6 +650,57 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { outlives, )); } + + // We don't add any wf predicates corresponding to the trait ref's generic arguments + // which allows code like this to compile: + // ```rust + // trait Trait {} + // fn foo(_: &dyn Trait<[u32]>) {} + // ``` + } + } + + fn add_wf_preds_for_pat_ty(&mut self, base_ty: Ty<'tcx>, pat: ty::Pattern<'tcx>) { + let tcx = self.tcx(); + match *pat { + ty::PatternKind::Range { start, end } => { + let mut check = |c| { + let cause = self.cause(ObligationCauseCode::Misc); + self.out.push(traits::Obligation::with_depth( + tcx, + cause.clone(), + self.recursion_depth, + self.param_env, + ty::Binder::dummy(ty::PredicateKind::Clause( + ty::ClauseKind::ConstArgHasType(c, base_ty), + )), + )); + if !tcx.features().generic_pattern_types() { + if c.has_param() { + if self.span.is_dummy() { + self.tcx() + .dcx() + .delayed_bug("feature error should be reported elsewhere, too"); + } else { + feature_err( + &self.tcx().sess, + sym::generic_pattern_types, + self.span, + "wraparound pattern type ranges cause monomorphization time errors", + ) + .emit(); + } + } + } + }; + check(start); + check(end); + } + ty::PatternKind::Or(patterns) => { + for pat in patterns { + self.add_wf_preds_for_pat_ty(base_ty, pat) + } + } } } } @@ -705,43 +755,9 @@ impl<'a, 'tcx> TypeVisitor> for WfPredicates<'a, 'tcx> { )); } - ty::Pat(subty, pat) => { - self.require_sized(subty, ObligationCauseCode::Misc); - match *pat { - ty::PatternKind::Range { start, end } => { - let mut check = |c| { - let cause = self.cause(ObligationCauseCode::Misc); - self.out.push(traits::Obligation::with_depth( - tcx, - cause.clone(), - self.recursion_depth, - self.param_env, - ty::Binder::dummy(ty::PredicateKind::Clause( - ty::ClauseKind::ConstArgHasType(c, subty), - )), - )); - if !tcx.features().generic_pattern_types() { - if c.has_param() { - if self.span.is_dummy() { - self.tcx().dcx().delayed_bug( - "feature error should be reported elsewhere, too", - ); - } else { - feature_err( - &self.tcx().sess, - sym::generic_pattern_types, - self.span, - "wraparound pattern type ranges cause monomorphization time errors", - ) - .emit(); - } - } - } - }; - check(start); - check(end); - } - } + ty::Pat(base_ty, pat) => { + self.require_sized(base_ty, ObligationCauseCode::Misc); + self.add_wf_preds_for_pat_ty(base_ty, pat); } ty::Tuple(tys) => { @@ -756,12 +772,12 @@ impl<'a, 'tcx> TypeVisitor> for WfPredicates<'a, 'tcx> { // Simple cases that are WF if their type args are WF. } - ty::Alias(ty::Projection | ty::Opaque | ty::Weak, data) => { + ty::Alias(ty::Projection | ty::Opaque | ty::Free, data) => { let obligations = self.nominal_obligations(data.def_id, data.args); self.out.extend(obligations); } ty::Alias(ty::Inherent, data) => { - self.compute_inherent_projection(data); + self.add_wf_preds_for_inherent_projection(data.into()); return; // Subtree handled by compute_inherent_projection. } @@ -895,7 +911,7 @@ impl<'a, 'tcx> TypeVisitor> for WfPredicates<'a, 'tcx> { // // Here, we defer WF checking due to higher-ranked // regions. This is perhaps not ideal. - self.from_object_ty(t, data, r); + self.add_wf_preds_for_dyn_ty(t, data, r); // FIXME(#27579) RFC also considers adding trait // obligations that don't refer to Self and @@ -917,11 +933,11 @@ impl<'a, 'tcx> TypeVisitor> for WfPredicates<'a, 'tcx> { // 1. Check if they have been resolved, and if so proceed with // THAT type. // 2. If not, we've at least simplified things (e.g., we went - // from `Vec<$0>: WF` to `$0: WF`), so we can + // from `Vec?0>: WF` to `?0: WF`), so we can // register a pending obligation and keep // moving. (Goal is that an "inductive hypothesis" // is satisfied to ensure termination.) - // See also the comment on `fn obligations`, describing "livelock" + // See also the comment on `fn obligations`, describing cycle // prevention, which happens before this can be reached. ty::Infer(_) => { let cause = self.cause(ObligationCauseCode::WellFormed(None)); @@ -946,9 +962,6 @@ impl<'a, 'tcx> TypeVisitor> for WfPredicates<'a, 'tcx> { match c.kind() { ty::ConstKind::Unevaluated(uv) => { if !c.has_escaping_bound_vars() { - let obligations = self.nominal_obligations(uv.def, uv.args); - self.out.extend(obligations); - let predicate = ty::Binder::dummy(ty::PredicateKind::Clause( ty::ClauseKind::ConstEvaluatable(c), )); @@ -960,6 +973,16 @@ impl<'a, 'tcx> TypeVisitor> for WfPredicates<'a, 'tcx> { self.param_env, predicate, )); + + if tcx.def_kind(uv.def) == DefKind::AssocConst + && tcx.def_kind(tcx.parent(uv.def)) == (DefKind::Impl { of_trait: false }) + { + self.add_wf_preds_for_inherent_projection(uv.into()); + return; // Subtree is handled by above function + } else { + let obligations = self.nominal_obligations(uv.def, uv.args); + self.out.extend(obligations); + } } } ty::ConstKind::Infer(_) => { diff --git a/compiler/rustc_traits/src/codegen.rs b/compiler/rustc_traits/src/codegen.rs index 4a889abfc28f1..88f02d16c7d5b 100644 --- a/compiler/rustc_traits/src/codegen.rs +++ b/compiler/rustc_traits/src/codegen.rs @@ -6,11 +6,11 @@ use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::bug; use rustc_middle::traits::CodegenObligationError; -use rustc_middle::ty::{self, PseudoCanonicalInput, TyCtxt, TypeVisitableExt}; +use rustc_middle::ty::{self, PseudoCanonicalInput, TyCtxt, TypeVisitableExt, Upcast}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::traits::{ ImplSource, Obligation, ObligationCause, ObligationCtxt, ScrubbedTraitError, SelectionContext, - Unimplemented, + Unimplemented, sizedness_fast_path, }; use tracing::debug; @@ -34,6 +34,13 @@ pub(crate) fn codegen_select_candidate<'tcx>( let (infcx, param_env) = tcx.infer_ctxt().ignoring_regions().build_with_typing_env(typing_env); let mut selcx = SelectionContext::new(&infcx); + if sizedness_fast_path(tcx, trait_ref.upcast(tcx)) { + return Ok(&*tcx.arena.alloc(ImplSource::Builtin( + ty::solve::BuiltinImplSource::Trivial, + Default::default(), + ))); + } + let obligation_cause = ObligationCause::dummy(); let obligation = Obligation::new(tcx, obligation_cause, param_env, trait_ref); diff --git a/compiler/rustc_traits/src/evaluate_obligation.rs b/compiler/rustc_traits/src/evaluate_obligation.rs index c9ad096c6e9d0..7771db855d704 100644 --- a/compiler/rustc_traits/src/evaluate_obligation.rs +++ b/compiler/rustc_traits/src/evaluate_obligation.rs @@ -5,6 +5,7 @@ use rustc_span::DUMMY_SP; use rustc_trait_selection::traits::query::CanonicalPredicateGoal; use rustc_trait_selection::traits::{ EvaluationResult, Obligation, ObligationCause, OverflowError, SelectionContext, TraitQueryMode, + sizedness_fast_path, }; use tracing::debug; @@ -23,6 +24,10 @@ fn evaluate_obligation<'tcx>( debug!("evaluate_obligation: goal={:#?}", goal); let ParamEnvAnd { param_env, value: predicate } = goal; + if sizedness_fast_path(tcx, predicate) { + return Ok(EvaluationResult::EvaluatedToOk); + } + let mut selcx = SelectionContext::with_query_mode(infcx, TraitQueryMode::Canonical); let obligation = Obligation::new(tcx, ObligationCause::dummy(), param_env, predicate); diff --git a/compiler/rustc_traits/src/normalize_projection_ty.rs b/compiler/rustc_traits/src/normalize_projection_ty.rs index 4c2b7e4769ab0..e52898cc6e242 100644 --- a/compiler/rustc_traits/src/normalize_projection_ty.rs +++ b/compiler/rustc_traits/src/normalize_projection_ty.rs @@ -13,7 +13,7 @@ use tracing::debug; pub(crate) fn provide(p: &mut Providers) { *p = Providers { normalize_canonicalized_projection_ty, - normalize_canonicalized_weak_ty, + normalize_canonicalized_free_alias, normalize_canonicalized_inherent_projection_ty, ..*p }; @@ -32,8 +32,14 @@ fn normalize_canonicalized_projection_ty<'tcx>( let selcx = &mut SelectionContext::new(ocx.infcx); let cause = ObligationCause::dummy(); let mut obligations = PredicateObligations::new(); - let answer = - traits::normalize_projection_ty(selcx, param_env, goal, cause, 0, &mut obligations); + let answer = traits::normalize_projection_term( + selcx, + param_env, + goal.into(), + cause, + 0, + &mut obligations, + ); ocx.register_obligations(obligations); // #112047: With projections and opaques, we are able to create opaques that // are recursive (given some generic parameters of the opaque's type variables). @@ -63,11 +69,11 @@ fn normalize_canonicalized_projection_ty<'tcx>( ) } -fn normalize_canonicalized_weak_ty<'tcx>( +fn normalize_canonicalized_free_alias<'tcx>( tcx: TyCtxt<'tcx>, goal: CanonicalAliasGoal<'tcx>, ) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, NormalizationResult<'tcx>>>, NoSolution> { - debug!("normalize_canonicalized_weak_ty(goal={:#?})", goal); + debug!("normalize_canonicalized_free_alias(goal={:#?})", goal); tcx.infer_ctxt().enter_canonical_trait_query( &goal, @@ -104,14 +110,14 @@ fn normalize_canonicalized_inherent_projection_ty<'tcx>( let answer = traits::normalize_inherent_projection( selcx, param_env, - goal, + goal.into(), cause, 0, &mut obligations, ); ocx.register_obligations(obligations); - Ok(NormalizationResult { normalized_ty: answer }) + Ok(NormalizationResult { normalized_ty: answer.expect_type() }) }, ) } diff --git a/compiler/rustc_transmute/Cargo.toml b/compiler/rustc_transmute/Cargo.toml index f0c783b30020e..246b66d3d0307 100644 --- a/compiler/rustc_transmute/Cargo.toml +++ b/compiler/rustc_transmute/Cargo.toml @@ -10,9 +10,15 @@ rustc_data_structures = { path = "../rustc_data_structures" } rustc_hir = { path = "../rustc_hir", optional = true } rustc_middle = { path = "../rustc_middle", optional = true } rustc_span = { path = "../rustc_span", optional = true } +smallvec = "1.8.1" tracing = "0.1" # tidy-alphabetical-end +[dev-dependencies] +# tidy-alphabetical-start +itertools = "0.12" +# tidy-alphabetical-end + [features] rustc = [ "dep:rustc_abi", @@ -20,8 +26,3 @@ rustc = [ "dep:rustc_middle", "dep:rustc_span", ] - -[dev-dependencies] -# tidy-alphabetical-start -itertools = "0.12" -# tidy-alphabetical-end diff --git a/compiler/rustc_transmute/src/layout/dfa.rs b/compiler/rustc_transmute/src/layout/dfa.rs index af568171f911c..6d072c336af23 100644 --- a/compiler/rustc_transmute/src/layout/dfa.rs +++ b/compiler/rustc_transmute/src/layout/dfa.rs @@ -1,19 +1,19 @@ use std::fmt; +use std::iter::Peekable; use std::sync::atomic::{AtomicU32, Ordering}; -use tracing::instrument; +use super::{Byte, Ref, Tree, Uninhabited}; +use crate::{Map, Set}; -use super::{Byte, Nfa, Ref, nfa}; -use crate::Map; - -#[derive(PartialEq, Clone, Debug)] +#[derive(PartialEq)] +#[cfg_attr(test, derive(Clone))] pub(crate) struct Dfa where R: Ref, { pub(crate) transitions: Map>, pub(crate) start: State, - pub(crate) accepting: State, + pub(crate) accept: State, } #[derive(PartialEq, Clone, Debug)] @@ -21,7 +21,7 @@ pub(crate) struct Transitions where R: Ref, { - byte_transitions: Map, + byte_transitions: EdgeSet, ref_transitions: Map, } @@ -30,39 +30,19 @@ where R: Ref, { fn default() -> Self { - Self { byte_transitions: Map::default(), ref_transitions: Map::default() } - } -} - -impl Transitions -where - R: Ref, -{ - #[cfg(test)] - fn insert(&mut self, transition: Transition, state: State) { - match transition { - Transition::Byte(b) => { - self.byte_transitions.insert(b, state); - } - Transition::Ref(r) => { - self.ref_transitions.insert(r, state); - } - } + Self { byte_transitions: EdgeSet::empty(), ref_transitions: Map::default() } } } -/// The states in a `Nfa` represent byte offsets. +/// The states in a [`Dfa`] represent byte offsets. #[derive(Hash, Eq, PartialEq, PartialOrd, Ord, Copy, Clone)] -pub(crate) struct State(u32); +pub(crate) struct State(pub(crate) u32); -#[cfg(test)] -#[derive(Hash, Eq, PartialEq, Clone, Copy)] -pub(crate) enum Transition -where - R: Ref, -{ - Byte(Byte), - Ref(R), +impl State { + pub(crate) fn new() -> Self { + static COUNTER: AtomicU32 = AtomicU32::new(0); + Self(COUNTER.fetch_add(1, Ordering::SeqCst)) + } } impl fmt::Debug for State { @@ -71,112 +51,449 @@ impl fmt::Debug for State { } } -#[cfg(test)] -impl fmt::Debug for Transition -where - R: Ref, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match &self { - Self::Byte(b) => b.fmt(f), - Self::Ref(r) => r.fmt(f), - } - } -} - impl Dfa where R: Ref, { #[cfg(test)] pub(crate) fn bool() -> Self { - let mut transitions: Map> = Map::default(); + Self::from_transitions(|accept| Transitions { + byte_transitions: EdgeSet::new(Byte::new(0x00..=0x01), accept), + ref_transitions: Map::default(), + }) + } + + pub(crate) fn unit() -> Self { + let transitions: Map> = Map::default(); + let start = State::new(); + let accept = start; + + Self { transitions, start, accept } + } + + pub(crate) fn from_byte(byte: Byte) -> Self { + Self::from_transitions(|accept| Transitions { + byte_transitions: EdgeSet::new(byte, accept), + ref_transitions: Map::default(), + }) + } + + pub(crate) fn from_ref(r: R) -> Self { + Self::from_transitions(|accept| Transitions { + byte_transitions: EdgeSet::empty(), + ref_transitions: [(r, accept)].into_iter().collect(), + }) + } + + fn from_transitions(f: impl FnOnce(State) -> Transitions) -> Self { let start = State::new(); - let accepting = State::new(); + let accept = State::new(); + + Self { transitions: [(start, f(accept))].into_iter().collect(), start, accept } + } + + pub(crate) fn from_tree(tree: Tree) -> Result { + Ok(match tree { + Tree::Byte(b) => Self::from_byte(b), + Tree::Ref(r) => Self::from_ref(r), + Tree::Alt(alts) => { + // Convert and filter the inhabited alternatives. + let mut alts = alts.into_iter().map(Self::from_tree).filter_map(Result::ok); + // If there are no alternatives, return `Uninhabited`. + let dfa = alts.next().ok_or(Uninhabited)?; + // Combine the remaining alternatives with `dfa`. + alts.fold(dfa, |dfa, alt| dfa.union(alt, State::new)) + } + Tree::Seq(elts) => { + let mut dfa = Self::unit(); + for elt in elts.into_iter().map(Self::from_tree) { + dfa = dfa.concat(elt?); + } + dfa + } + }) + } + + /// Concatenate two `Dfa`s. + pub(crate) fn concat(self, other: Self) -> Self { + if self.start == self.accept { + return other; + } else if other.start == other.accept { + return self; + } - transitions.entry(start).or_default().insert(Transition::Byte(Byte::Init(0x00)), accepting); + let start = self.start; + let accept = other.accept; - transitions.entry(start).or_default().insert(Transition::Byte(Byte::Init(0x01)), accepting); + let mut transitions: Map> = self.transitions; - Self { transitions, start, accepting } + for (source, transition) in other.transitions { + let fix_state = |state| if state == other.start { self.accept } else { state }; + let byte_transitions = transition.byte_transitions.map_states(&fix_state); + let ref_transitions = transition + .ref_transitions + .into_iter() + .map(|(r, state)| (r, fix_state(state))) + .collect(); + + let old = transitions + .insert(fix_state(source), Transitions { byte_transitions, ref_transitions }); + assert!(old.is_none()); + } + + Self { transitions, start, accept } } - #[instrument(level = "debug")] - pub(crate) fn from_nfa(nfa: Nfa) -> Self { - let Nfa { transitions: nfa_transitions, start: nfa_start, accepting: nfa_accepting } = nfa; + /// Compute the union of two `Dfa`s. + pub(crate) fn union(self, other: Self, mut new_state: impl FnMut() -> State) -> Self { + // We implement `union` by lazily initializing a set of states + // corresponding to the product of states in `self` and `other`, and + // then add transitions between these states that correspond to where + // they exist between `self` and `other`. - let mut dfa_transitions: Map> = Map::default(); - let mut nfa_to_dfa: Map = Map::default(); - let dfa_start = State::new(); - nfa_to_dfa.insert(nfa_start, dfa_start); + let a = self; + let b = other; - let mut queue = vec![(nfa_start, dfa_start)]; + let accept = new_state(); - while let Some((nfa_state, dfa_state)) = queue.pop() { - if nfa_state == nfa_accepting { - continue; + let mut mapping: Map<(Option, Option), State> = Map::default(); + + let mut mapped = |(a_state, b_state)| { + if Some(a.accept) == a_state || Some(b.accept) == b_state { + // If either `a_state` or `b_state` are accepting, map to a + // common `accept` state. + accept + } else { + *mapping.entry((a_state, b_state)).or_insert_with(&mut new_state) } + }; + + let start = mapped((Some(a.start), Some(b.start))); + let mut transitions: Map> = Map::default(); + let empty_transitions = Transitions::default(); - for (nfa_transition, next_nfa_states) in nfa_transitions[&nfa_state].iter() { - let dfa_transitions = - dfa_transitions.entry(dfa_state).or_insert_with(Default::default); - - let mapped_state = next_nfa_states.iter().find_map(|x| nfa_to_dfa.get(x).copied()); - - let next_dfa_state = match nfa_transition { - &nfa::Transition::Byte(b) => *dfa_transitions - .byte_transitions - .entry(b) - .or_insert_with(|| mapped_state.unwrap_or_else(State::new)), - &nfa::Transition::Ref(r) => *dfa_transitions - .ref_transitions - .entry(r) - .or_insert_with(|| mapped_state.unwrap_or_else(State::new)), - }; - - for &next_nfa_state in next_nfa_states { - nfa_to_dfa.entry(next_nfa_state).or_insert_with(|| { - queue.push((next_nfa_state, next_dfa_state)); - next_dfa_state - }); + struct WorkQueue { + queue: Vec<(Option, Option)>, + // Track all entries ever enqueued to avoid duplicating work. This + // gives us a guarantee that a given (a_state, b_state) pair will + // only ever be visited once. + enqueued: Set<(Option, Option)>, + } + impl WorkQueue { + fn enqueue(&mut self, a_state: Option, b_state: Option) { + if self.enqueued.insert((a_state, b_state)) { + self.queue.push((a_state, b_state)); } } } + let mut queue = WorkQueue { queue: Vec::new(), enqueued: Set::default() }; + queue.enqueue(Some(a.start), Some(b.start)); + + while let Some((a_src, b_src)) = queue.queue.pop() { + let src = mapped((a_src, b_src)); + if src == accept { + // While it's possible to have a DFA whose accept state has + // out-edges, these do not affect the semantics of the DFA, and + // so there's no point in processing them. Continuing here also + // has the advantage of guaranteeing that we only ever process a + // given node in the output DFA once. In particular, with the + // exception of the accept state, we ensure that we only push a + // given node to the `queue` once. This allows the following + // code to assume that we're processing a node we've never + // processed before, which means we never need to merge two edge + // sets - we only ever need to construct a new edge set from + // whole cloth. + continue; + } + + let a_transitions = + a_src.and_then(|a_src| a.transitions.get(&a_src)).unwrap_or(&empty_transitions); + let b_transitions = + b_src.and_then(|b_src| b.transitions.get(&b_src)).unwrap_or(&empty_transitions); + + let byte_transitions = a_transitions.byte_transitions.union( + &b_transitions.byte_transitions, + |a_dst, b_dst| { + assert!(a_dst.is_some() || b_dst.is_some()); + + queue.enqueue(a_dst, b_dst); + mapped((a_dst, b_dst)) + }, + ); + + let ref_transitions = + a_transitions.ref_transitions.keys().chain(b_transitions.ref_transitions.keys()); + + let ref_transitions = ref_transitions + .map(|ref_transition| { + let a_dst = a_transitions.ref_transitions.get(ref_transition).copied(); + let b_dst = b_transitions.ref_transitions.get(ref_transition).copied(); - let dfa_accepting = nfa_to_dfa[&nfa_accepting]; + assert!(a_dst.is_some() || b_dst.is_some()); - Self { transitions: dfa_transitions, start: dfa_start, accepting: dfa_accepting } + queue.enqueue(a_dst, b_dst); + (*ref_transition, mapped((a_dst, b_dst))) + }) + .collect(); + + let old = transitions.insert(src, Transitions { byte_transitions, ref_transitions }); + // See `if src == accept { ... }` above. The comment there explains + // why this assert is valid. + assert_eq!(old, None); + } + + Self { transitions, start, accept } } - pub(crate) fn bytes_from(&self, start: State) -> Option<&Map> { - Some(&self.transitions.get(&start)?.byte_transitions) + pub(crate) fn get_uninit_edge_dst(&self, state: State) -> Option { + let transitions = self.transitions.get(&state)?; + transitions.byte_transitions.get_uninit_edge_dst() } - pub(crate) fn byte_from(&self, start: State, byte: Byte) -> Option { - self.transitions.get(&start)?.byte_transitions.get(&byte).copied() + pub(crate) fn bytes_from(&self, start: State) -> impl Iterator { + self.transitions + .get(&start) + .into_iter() + .flat_map(|transitions| transitions.byte_transitions.iter()) } - pub(crate) fn refs_from(&self, start: State) -> Option<&Map> { - Some(&self.transitions.get(&start)?.ref_transitions) + pub(crate) fn refs_from(&self, start: State) -> impl Iterator { + self.transitions + .get(&start) + .into_iter() + .flat_map(|transitions| transitions.ref_transitions.iter()) + .map(|(r, s)| (*r, *s)) } -} -impl State { - pub(crate) fn new() -> Self { - static COUNTER: AtomicU32 = AtomicU32::new(0); - Self(COUNTER.fetch_add(1, Ordering::SeqCst)) + #[cfg(test)] + pub(crate) fn from_edges>( + start: u32, + accept: u32, + edges: &[(u32, B, u32)], + ) -> Self { + let start = State(start); + let accept = State(accept); + let mut transitions: Map> = Map::default(); + + for &(src, ref edge, dst) in edges.iter() { + transitions.entry(State(src)).or_default().push((edge.clone().into(), State(dst))); + } + + let transitions = transitions + .into_iter() + .map(|(src, edges)| { + ( + src, + Transitions { + byte_transitions: EdgeSet::from_edges(edges), + ref_transitions: Map::default(), + }, + ) + }) + .collect(); + + Self { start, accept, transitions } } } -#[cfg(test)] -impl From> for Transition +/// Serialize the DFA using the Graphviz DOT format. +impl fmt::Debug for Dfa where R: Ref, { - fn from(nfa_transition: nfa::Transition) -> Self { - match nfa_transition { - nfa::Transition::Byte(byte) => Transition::Byte(byte), - nfa::Transition::Ref(r) => Transition::Ref(r), + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "digraph {{")?; + writeln!(f, " {:?} [shape = doublecircle]", self.start)?; + writeln!(f, " {:?} [shape = doublecircle]", self.accept)?; + + for (src, transitions) in self.transitions.iter() { + for (t, dst) in transitions.byte_transitions.iter() { + writeln!(f, " {src:?} -> {dst:?} [label=\"{t:?}\"]")?; + } + + for (t, dst) in transitions.ref_transitions.iter() { + writeln!(f, " {src:?} -> {dst:?} [label=\"{t:?}\"]")?; + } + } + + writeln!(f, "}}") + } +} + +use edge_set::EdgeSet; +mod edge_set { + use smallvec::SmallVec; + + use super::*; + + /// The set of outbound byte edges associated with a DFA node. + #[derive(Eq, PartialEq, Clone, Debug)] + pub(super) struct EdgeSet { + // A sequence of byte edges with contiguous byte values and a common + // destination is stored as a single run. + // + // Runs are non-empty, non-overlapping, and stored in ascending order. + runs: SmallVec<[(Byte, S); 1]>, + } + + impl EdgeSet { + pub(crate) fn new(range: Byte, dst: S) -> Self { + let mut this = Self { runs: SmallVec::new() }; + if !range.is_empty() { + this.runs.push((range, dst)); + } + this + } + + pub(crate) fn empty() -> Self { + Self { runs: SmallVec::new() } + } + + #[cfg(test)] + pub(crate) fn from_edges(mut edges: Vec<(Byte, S)>) -> Self + where + S: Ord, + { + edges.sort(); + Self { runs: edges.into() } + } + + pub(crate) fn iter(&self) -> impl Iterator + where + S: Copy, + { + self.runs.iter().copied() + } + + pub(crate) fn get_uninit_edge_dst(&self) -> Option + where + S: Copy, + { + // Uninit is ordered last. + let &(range, dst) = self.runs.last()?; + if range.contains_uninit() { Some(dst) } else { None } + } + + pub(crate) fn map_states(self, mut f: impl FnMut(S) -> SS) -> EdgeSet { + EdgeSet { + // NOTE: It appears as through ` as + // IntoIterator>::IntoIter` and `std::iter::Map` both implement + // `TrustedLen`, which in turn means that this `.collect()` + // allocates the correct number of elements once up-front [1]. + // + // [1] https://doc.rust-lang.org/1.85.0/src/alloc/vec/spec_from_iter_nested.rs.html#47 + runs: self.runs.into_iter().map(|(b, s)| (b, f(s))).collect(), + } + } + + /// Unions two edge sets together. + /// + /// If `u = a.union(b)`, then for each byte value, `u` will have an edge + /// with that byte value and with the destination `join(Some(_), None)`, + /// `join(None, Some(_))`, or `join(Some(_), Some(_))` depending on whether `a`, + /// `b`, or both have an edge with that byte value. + /// + /// If neither `a` nor `b` have an edge with a particular byte value, + /// then no edge with that value will be present in `u`. + pub(crate) fn union( + &self, + other: &Self, + mut join: impl FnMut(Option, Option) -> S, + ) -> EdgeSet + where + S: Copy + Eq, + { + let mut runs: SmallVec<[(Byte, S); 1]> = SmallVec::new(); + let xs = self.runs.iter().copied(); + let ys = other.runs.iter().copied(); + for (range, (x, y)) in union(xs, ys) { + let state = join(x, y); + match runs.last_mut() { + // Merge contiguous runs with a common destination. + Some(&mut (ref mut last_range, ref mut last_state)) + if last_range.end == range.start && *last_state == state => + { + last_range.end = range.end + } + _ => runs.push((range, state)), + } + } + EdgeSet { runs } + } + } +} + +/// Merges two sorted sequences into one sorted sequence. +pub(crate) fn union, Y: Iterator>( + xs: X, + ys: Y, +) -> UnionIter { + UnionIter { xs: xs.peekable(), ys: ys.peekable() } +} + +pub(crate) struct UnionIter { + xs: Peekable, + ys: Peekable, +} + +// FIXME(jswrenn) we'd likely benefit from specializing try_fold here. +impl, Y: Iterator> Iterator + for UnionIter +{ + type Item = (Byte, (Option, Option)); + + fn next(&mut self) -> Option { + use std::cmp::{self, Ordering}; + + let ret; + match (self.xs.peek_mut(), self.ys.peek_mut()) { + (None, None) => { + ret = None; + } + (Some(x), None) => { + ret = Some((x.0, (Some(x.1), None))); + self.xs.next(); + } + (None, Some(y)) => { + ret = Some((y.0, (None, Some(y.1)))); + self.ys.next(); + } + (Some(x), Some(y)) => { + let start; + let end; + let dst; + match x.0.start.cmp(&y.0.start) { + Ordering::Less => { + start = x.0.start; + end = cmp::min(x.0.end, y.0.start); + dst = (Some(x.1), None); + } + Ordering::Greater => { + start = y.0.start; + end = cmp::min(x.0.start, y.0.end); + dst = (None, Some(y.1)); + } + Ordering::Equal => { + start = x.0.start; + end = cmp::min(x.0.end, y.0.end); + dst = (Some(x.1), Some(y.1)); + } + } + ret = Some((Byte { start, end }, dst)); + if start == x.0.start { + x.0.start = end; + } + if start == y.0.start { + y.0.start = end; + } + if x.0.is_empty() { + self.xs.next(); + } + if y.0.is_empty() { + self.ys.next(); + } + } } + ret } } diff --git a/compiler/rustc_transmute/src/layout/mod.rs b/compiler/rustc_transmute/src/layout/mod.rs index c4c01a8fac31f..c08bf440734e2 100644 --- a/compiler/rustc_transmute/src/layout/mod.rs +++ b/compiler/rustc_transmute/src/layout/mod.rs @@ -1,34 +1,83 @@ use std::fmt::{self, Debug}; use std::hash::Hash; +use std::ops::RangeInclusive; pub(crate) mod tree; pub(crate) use tree::Tree; -pub(crate) mod nfa; -pub(crate) use nfa::Nfa; - pub(crate) mod dfa; -pub(crate) use dfa::Dfa; +pub(crate) use dfa::{Dfa, union}; #[derive(Debug)] pub(crate) struct Uninhabited; -/// An instance of a byte is either initialized to a particular value, or uninitialized. -#[derive(Hash, Eq, PartialEq, Clone, Copy)] -pub(crate) enum Byte { - Uninit, - Init(u8), +/// A range of byte values (including an uninit byte value). +#[derive(Hash, Eq, PartialEq, Ord, PartialOrd, Clone, Copy)] +pub(crate) struct Byte { + // An inclusive-exclusive range. We use this instead of `Range` because `Range: !Copy`. + // + // Uninit byte value is represented by 256. + pub(crate) start: u16, + pub(crate) end: u16, +} + +impl Byte { + const UNINIT: u16 = 256; + + #[inline] + fn new(range: RangeInclusive) -> Self { + let start: u16 = (*range.start()).into(); + let end: u16 = (*range.end()).into(); + Byte { start, end: end + 1 } + } + + #[inline] + fn from_val(val: u8) -> Self { + let val: u16 = val.into(); + Byte { start: val, end: val + 1 } + } + + #[inline] + fn uninit() -> Byte { + Byte { start: 0, end: Self::UNINIT + 1 } + } + + #[inline] + fn is_empty(&self) -> bool { + self.start == self.end + } + + #[inline] + fn contains_uninit(&self) -> bool { + self.start <= Self::UNINIT && Self::UNINIT < self.end + } } impl fmt::Debug for Byte { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match &self { - Self::Uninit => f.write_str("??u8"), - Self::Init(b) => write!(f, "{b:#04x}u8"), + if self.start == Self::UNINIT && self.end == Self::UNINIT + 1 { + write!(f, "uninit") + } else if self.start <= Self::UNINIT && self.end == Self::UNINIT + 1 { + write!(f, "{}..{}|uninit", self.start, self.end - 1) + } else { + write!(f, "{}..{}", self.start, self.end) } } } +impl From> for Byte { + fn from(src: RangeInclusive) -> Self { + Self::new(src) + } +} + +impl From for Byte { + #[inline] + fn from(src: u8) -> Self { + Self::from_val(src) + } +} + pub(crate) trait Def: Debug + Hash + Eq + PartialEq + Copy + Clone { fn has_safety_invariants(&self) -> bool; } @@ -58,6 +107,21 @@ impl Ref for ! { } } +#[cfg(test)] +impl Ref for [(); N] { + fn min_align(&self) -> usize { + N + } + + fn size(&self) -> usize { + N + } + + fn is_mutable(&self) -> bool { + false + } +} + #[cfg(feature = "rustc")] pub mod rustc { use std::fmt::{self, Write}; diff --git a/compiler/rustc_transmute/src/layout/nfa.rs b/compiler/rustc_transmute/src/layout/nfa.rs deleted file mode 100644 index 9c21fd94f03ec..0000000000000 --- a/compiler/rustc_transmute/src/layout/nfa.rs +++ /dev/null @@ -1,169 +0,0 @@ -use std::fmt; -use std::sync::atomic::{AtomicU32, Ordering}; - -use super::{Byte, Ref, Tree, Uninhabited}; -use crate::{Map, Set}; - -/// A non-deterministic finite automaton (NFA) that represents the layout of a type. -/// The transmutability of two given types is computed by comparing their `Nfa`s. -#[derive(PartialEq, Debug)] -pub(crate) struct Nfa -where - R: Ref, -{ - pub(crate) transitions: Map, Set>>, - pub(crate) start: State, - pub(crate) accepting: State, -} - -/// The states in a `Nfa` represent byte offsets. -#[derive(Hash, Eq, PartialEq, PartialOrd, Ord, Copy, Clone)] -pub(crate) struct State(u32); - -/// The transitions between states in a `Nfa` reflect bit validity. -#[derive(Hash, Eq, PartialEq, Clone, Copy)] -pub(crate) enum Transition -where - R: Ref, -{ - Byte(Byte), - Ref(R), -} - -impl fmt::Debug for State { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "S_{}", self.0) - } -} - -impl fmt::Debug for Transition -where - R: Ref, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match &self { - Self::Byte(b) => b.fmt(f), - Self::Ref(r) => r.fmt(f), - } - } -} - -impl Nfa -where - R: Ref, -{ - pub(crate) fn unit() -> Self { - let transitions: Map, Set>> = Map::default(); - let start = State::new(); - let accepting = start; - - Nfa { transitions, start, accepting } - } - - pub(crate) fn from_byte(byte: Byte) -> Self { - let mut transitions: Map, Set>> = Map::default(); - let start = State::new(); - let accepting = State::new(); - - let source = transitions.entry(start).or_default(); - let edge = source.entry(Transition::Byte(byte)).or_default(); - edge.insert(accepting); - - Nfa { transitions, start, accepting } - } - - pub(crate) fn from_ref(r: R) -> Self { - let mut transitions: Map, Set>> = Map::default(); - let start = State::new(); - let accepting = State::new(); - - let source = transitions.entry(start).or_default(); - let edge = source.entry(Transition::Ref(r)).or_default(); - edge.insert(accepting); - - Nfa { transitions, start, accepting } - } - - pub(crate) fn from_tree(tree: Tree) -> Result { - Ok(match tree { - Tree::Byte(b) => Self::from_byte(b), - Tree::Ref(r) => Self::from_ref(r), - Tree::Alt(alts) => { - let mut alts = alts.into_iter().map(Self::from_tree); - let mut nfa = alts.next().ok_or(Uninhabited)??; - for alt in alts { - nfa = nfa.union(alt?); - } - nfa - } - Tree::Seq(elts) => { - let mut nfa = Self::unit(); - for elt in elts.into_iter().map(Self::from_tree) { - nfa = nfa.concat(elt?); - } - nfa - } - }) - } - - /// Concatenate two `Nfa`s. - pub(crate) fn concat(self, other: Self) -> Self { - if self.start == self.accepting { - return other; - } else if other.start == other.accepting { - return self; - } - - let start = self.start; - let accepting = other.accepting; - - let mut transitions: Map, Set>> = self.transitions; - - for (source, transition) in other.transitions { - let fix_state = |state| if state == other.start { self.accepting } else { state }; - let entry = transitions.entry(fix_state(source)).or_default(); - for (edge, destinations) in transition { - let entry = entry.entry(edge).or_default(); - for destination in destinations { - entry.insert(fix_state(destination)); - } - } - } - - Self { transitions, start, accepting } - } - - /// Compute the union of two `Nfa`s. - pub(crate) fn union(self, other: Self) -> Self { - let start = self.start; - let accepting = self.accepting; - - let mut transitions: Map, Set>> = self.transitions.clone(); - - for (&(mut source), transition) in other.transitions.iter() { - // if source is starting state of `other`, replace with starting state of `self` - if source == other.start { - source = self.start; - } - let entry = transitions.entry(source).or_default(); - for (edge, destinations) in transition { - let entry = entry.entry(*edge).or_default(); - for &(mut destination) in destinations { - // if dest is accepting state of `other`, replace with accepting state of `self` - if destination == other.accepting { - destination = self.accepting; - } - entry.insert(destination); - } - } - } - Self { transitions, start, accepting } - } -} - -impl State { - pub(crate) fn new() -> Self { - static COUNTER: AtomicU32 = AtomicU32::new(0); - Self(COUNTER.fetch_add(1, Ordering::SeqCst)) - } -} diff --git a/compiler/rustc_transmute/src/layout/tree.rs b/compiler/rustc_transmute/src/layout/tree.rs index a21be5dda4ee0..7cf712ce9e977 100644 --- a/compiler/rustc_transmute/src/layout/tree.rs +++ b/compiler/rustc_transmute/src/layout/tree.rs @@ -1,4 +1,4 @@ -use std::ops::ControlFlow; +use std::ops::{ControlFlow, RangeInclusive}; use super::{Byte, Def, Ref}; @@ -32,6 +32,22 @@ where Byte(Byte), } +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub(crate) enum Endian { + Little, + Big, +} + +#[cfg(feature = "rustc")] +impl From for Endian { + fn from(order: rustc_abi::Endian) -> Endian { + match order { + rustc_abi::Endian::Little => Endian::Little, + rustc_abi::Endian::Big => Endian::Big, + } + } +} + impl Tree where D: Def, @@ -54,27 +70,65 @@ where /// A `Tree` containing a single, uninitialized byte. pub(crate) fn uninit() -> Self { - Self::Byte(Byte::Uninit) + Self::Byte(Byte::uninit()) } /// A `Tree` representing the layout of `bool`. pub(crate) fn bool() -> Self { - Self::from_bits(0x00).or(Self::from_bits(0x01)) + Self::byte(0x00..=0x01) } /// A `Tree` whose layout matches that of a `u8`. pub(crate) fn u8() -> Self { - Self::Alt((0u8..=255).map(Self::from_bits).collect()) + Self::byte(0x00..=0xFF) + } + + /// A `Tree` whose layout matches that of a `char`. + pub(crate) fn char(order: Endian) -> Self { + // `char`s can be in the following ranges: + // - [0, 0xD7FF] + // - [0xE000, 10FFFF] + // + // All other `char` values are illegal. We can thus represent a `char` + // as a union of three possible layouts: + // - 00 00 [00, D7] XX + // - 00 00 [E0, FF] XX + // - 00 [01, 10] XX XX + + const _0: RangeInclusive = 0..=0; + const BYTE: RangeInclusive = 0x00..=0xFF; + let x = Self::from_big_endian(order, [_0, _0, 0x00..=0xD7, BYTE]); + let y = Self::from_big_endian(order, [_0, _0, 0xE0..=0xFF, BYTE]); + let z = Self::from_big_endian(order, [_0, 0x01..=0x10, BYTE, BYTE]); + Self::alt([x, y, z]) } - /// A `Tree` whose layout accepts exactly the given bit pattern. - pub(crate) fn from_bits(bits: u8) -> Self { - Self::Byte(Byte::Init(bits)) + /// A `Tree` whose layout matches `std::num::NonZeroXxx`. + #[allow(dead_code)] + pub(crate) fn nonzero(width_in_bytes: u64) -> Self { + const BYTE: RangeInclusive = 0x00..=0xFF; + const NONZERO: RangeInclusive = 0x01..=0xFF; + + (0..width_in_bytes) + .map(|nz_idx| { + (0..width_in_bytes) + .map(|pos| Self::byte(if pos == nz_idx { NONZERO } else { BYTE })) + .fold(Self::unit(), Self::then) + }) + .fold(Self::uninhabited(), Self::or) + } + + pub(crate) fn bytes>(bytes: [B; N]) -> Self { + Self::seq(bytes.map(B::into).map(Self::Byte)) + } + + pub(crate) fn byte(byte: impl Into) -> Self { + Self::Byte(byte.into()) } /// A `Tree` whose layout is a number of the given width. - pub(crate) fn number(width_in_bytes: usize) -> Self { - Self::Seq(vec![Self::u8(); width_in_bytes]) + pub(crate) fn number(width_in_bytes: u64) -> Self { + Self::Seq(vec![Self::u8(); width_in_bytes.try_into().unwrap()]) } /// A `Tree` whose layout is entirely padding of the given width. @@ -125,13 +179,35 @@ where Self::Byte(..) | Self::Ref(..) | Self::Def(..) => true, } } -} -impl Tree -where - D: Def, - R: Ref, -{ + /// Produces a `Tree` which represents a sequence of bytes stored in + /// `order`. + /// + /// `bytes` is taken to be in big-endian byte order, and its order will be + /// swapped if `order == Endian::Little`. + pub(crate) fn from_big_endian>( + order: Endian, + mut bytes: [B; N], + ) -> Self { + if order == Endian::Little { + (&mut bytes[..]).reverse(); + } + + Self::bytes(bytes) + } + + /// Produces a `Tree` where each of the trees in `trees` are sequenced one + /// after another. + pub(crate) fn seq(trees: [Tree; N]) -> Self { + trees.into_iter().fold(Tree::unit(), Self::then) + } + + /// Produces a `Tree` where each of the trees in `trees` are accepted as + /// alternative layouts. + pub(crate) fn alt(trees: [Tree; N]) -> Self { + trees.into_iter().fold(Tree::uninhabited(), Self::or) + } + /// Produces a new `Tree` where `other` is sequenced after `self`. pub(crate) fn then(self, other: Self) -> Self { match (self, other) { @@ -222,17 +298,17 @@ pub(crate) mod rustc { ty::Float(nty) => { let width = nty.bit_width() / 8; - Ok(Self::number(width as _)) + Ok(Self::number(width.try_into().unwrap())) } ty::Int(nty) => { let width = nty.normalize(pointer_size.bits() as _).bit_width().unwrap() / 8; - Ok(Self::number(width as _)) + Ok(Self::number(width.try_into().unwrap())) } ty::Uint(nty) => { let width = nty.normalize(pointer_size.bits() as _).bit_width().unwrap() / 8; - Ok(Self::number(width as _)) + Ok(Self::number(width.try_into().unwrap())) } ty::Tuple(members) => Self::from_tuple((ty, layout), members, cx), @@ -249,11 +325,33 @@ pub(crate) mod rustc { .fold(Tree::unit(), |tree, elt| tree.then(elt))) } - ty::Adt(adt_def, _args_ref) if !ty.is_box() => match adt_def.adt_kind() { - AdtKind::Struct => Self::from_struct((ty, layout), *adt_def, cx), - AdtKind::Enum => Self::from_enum((ty, layout), *adt_def, cx), - AdtKind::Union => Self::from_union((ty, layout), *adt_def, cx), - }, + ty::Adt(adt_def, _args_ref) if !ty.is_box() => { + let (lo, hi) = cx.tcx().layout_scalar_valid_range(adt_def.did()); + + use core::ops::Bound::*; + let is_transparent = adt_def.repr().transparent(); + match (adt_def.adt_kind(), lo, hi) { + (AdtKind::Struct, Unbounded, Unbounded) => { + Self::from_struct((ty, layout), *adt_def, cx) + } + (AdtKind::Struct, Included(1), Included(_hi)) if is_transparent => { + // FIXME(@joshlf): Support `NonZero` types: + // - Check to make sure that the first field is + // numerical + // - Check to make sure that the upper bound is the + // maximum value for the field's type + // - Construct `Self::nonzero` + Err(Err::NotYetSupported) + } + (AdtKind::Enum, Unbounded, Unbounded) => { + Self::from_enum((ty, layout), *adt_def, cx) + } + (AdtKind::Union, Unbounded, Unbounded) => { + Self::from_union((ty, layout), *adt_def, cx) + } + _ => Err(Err::NotYetSupported), + } + } ty::Ref(lifetime, ty, mutability) => { let layout = layout_of(cx, *ty)?; @@ -268,6 +366,8 @@ pub(crate) mod rustc { })) } + ty::Char => Ok(Self::char(cx.tcx().data_layout.endian.into())), + _ => Err(Err::NotYetSupported), } } @@ -450,7 +550,7 @@ pub(crate) mod rustc { &bytes[bytes.len() - size.bytes_usize()..] } }; - Self::Seq(bytes.iter().map(|&b| Self::from_bits(b)).collect()) + Self::Seq(bytes.iter().map(|&b| Self::byte(b)).collect()) } /// Constructs a `Tree` from a union. @@ -514,7 +614,7 @@ pub(crate) mod rustc { } } ty::Tuple(fields) => fields[i.as_usize()], - kind @ _ => unimplemented!( + kind => unimplemented!( "only a subset of `Ty::ty_and_layout_field`'s functionality is implemented. implementation needed for {:?}", kind ), diff --git a/compiler/rustc_transmute/src/layout/tree/tests.rs b/compiler/rustc_transmute/src/layout/tree/tests.rs index 44f50a25c939a..8c3dbbe37ab21 100644 --- a/compiler/rustc_transmute/src/layout/tree/tests.rs +++ b/compiler/rustc_transmute/src/layout/tree/tests.rs @@ -20,23 +20,18 @@ mod prune { #[test] fn seq_1() { - let layout: Tree = - Tree::def(Def::NoSafetyInvariants).then(Tree::from_bits(0x00)); - assert_eq!( - layout.prune(&|d| matches!(d, Def::HasSafetyInvariants)), - Tree::from_bits(0x00) - ); + let layout: Tree = Tree::def(Def::NoSafetyInvariants).then(Tree::byte(0x00)); + assert_eq!(layout.prune(&|d| matches!(d, Def::HasSafetyInvariants)), Tree::byte(0x00)); } #[test] fn seq_2() { - let layout: Tree = Tree::from_bits(0x00) - .then(Tree::def(Def::NoSafetyInvariants)) - .then(Tree::from_bits(0x01)); + let layout: Tree = + Tree::byte(0x00).then(Tree::def(Def::NoSafetyInvariants)).then(Tree::byte(0x01)); assert_eq!( layout.prune(&|d| matches!(d, Def::HasSafetyInvariants)), - Tree::from_bits(0x00).then(Tree::from_bits(0x01)) + Tree::byte(0x00).then(Tree::byte(0x01)) ); } } @@ -66,7 +61,7 @@ mod prune { #[test] fn invisible_def_in_seq_len_3() { let layout: Tree = Tree::def(Def::NoSafetyInvariants) - .then(Tree::from_bits(0x00)) + .then(Tree::byte(0x00)) .then(Tree::def(Def::HasSafetyInvariants)); assert_eq!( layout.prune(&|d| matches!(d, Def::HasSafetyInvariants)), @@ -94,12 +89,9 @@ mod prune { #[test] fn visible_def_in_seq_len_3() { let layout: Tree = Tree::def(Def::NoSafetyInvariants) - .then(Tree::from_bits(0x00)) + .then(Tree::byte(0x00)) .then(Tree::def(Def::NoSafetyInvariants)); - assert_eq!( - layout.prune(&|d| matches!(d, Def::HasSafetyInvariants)), - Tree::from_bits(0x00) - ); + assert_eq!(layout.prune(&|d| matches!(d, Def::HasSafetyInvariants)), Tree::byte(0x00)); } } } diff --git a/compiler/rustc_transmute/src/lib.rs b/compiler/rustc_transmute/src/lib.rs index 7d800e49ff45f..ce18dad55179c 100644 --- a/compiler/rustc_transmute/src/lib.rs +++ b/compiler/rustc_transmute/src/lib.rs @@ -1,5 +1,5 @@ // tidy-alphabetical-start -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(test, feature(test))] #![feature(never_type)] // tidy-alphabetical-end diff --git a/compiler/rustc_transmute/src/maybe_transmutable/mod.rs b/compiler/rustc_transmute/src/maybe_transmutable/mod.rs index 63fabc9c83d93..f76abe50ed343 100644 --- a/compiler/rustc_transmute/src/maybe_transmutable/mod.rs +++ b/compiler/rustc_transmute/src/maybe_transmutable/mod.rs @@ -1,10 +1,11 @@ +use rustc_data_structures::stack::ensure_sufficient_stack; use tracing::{debug, instrument, trace}; pub(crate) mod query_context; #[cfg(test)] mod tests; -use crate::layout::{self, Byte, Def, Dfa, Nfa, Ref, Tree, Uninhabited, dfa}; +use crate::layout::{self, Def, Dfa, Ref, Tree, dfa, union}; use crate::maybe_transmutable::query_context::QueryContext; use crate::{Answer, Condition, Map, Reason}; @@ -73,7 +74,7 @@ where /// Answers whether a `Tree` is transmutable into another `Tree`. /// /// This method begins by de-def'ing `src` and `dst`, and prunes private paths from `dst`, - /// then converts `src` and `dst` to `Nfa`s, and computes an answer using those NFAs. + /// then converts `src` and `dst` to `Dfa`s, and computes an answer using those DFAs. #[inline(always)] #[instrument(level = "debug", skip(self), fields(src = ?self.src, dst = ?self.dst))] pub(crate) fn answer(self) -> Answer<::Ref> { @@ -105,47 +106,30 @@ where trace!(?dst, "pruned dst"); - // Convert `src` from a tree-based representation to an NFA-based + // Convert `src` from a tree-based representation to an DFA-based // representation. If the conversion fails because `src` is uninhabited, // conclude that the transmutation is acceptable, because instances of // the `src` type do not exist. - let src = match Nfa::from_tree(src) { + let src = match Dfa::from_tree(src) { Ok(src) => src, - Err(Uninhabited) => return Answer::Yes, + Err(layout::Uninhabited) => return Answer::Yes, }; - // Convert `dst` from a tree-based representation to an NFA-based + // Convert `dst` from a tree-based representation to an DFA-based // representation. If the conversion fails because `src` is uninhabited, // conclude that the transmutation is unacceptable. Valid instances of // the `dst` type do not exist, either because it's genuinely // uninhabited, or because there are no branches of the tree that are // free of safety invariants. - let dst = match Nfa::from_tree(dst) { + let dst = match Dfa::from_tree(dst) { Ok(dst) => dst, - Err(Uninhabited) => return Answer::No(Reason::DstMayHaveSafetyInvariants), + Err(layout::Uninhabited) => return Answer::No(Reason::DstMayHaveSafetyInvariants), }; MaybeTransmutableQuery { src, dst, assume, context }.answer() } } -impl MaybeTransmutableQuery::Ref>, C> -where - C: QueryContext, -{ - /// Answers whether a `Nfa` is transmutable into another `Nfa`. - /// - /// This method converts `src` and `dst` to DFAs, then computes an answer using those DFAs. - #[inline(always)] - #[instrument(level = "debug", skip(self), fields(src = ?self.src, dst = ?self.dst))] - pub(crate) fn answer(self) -> Answer<::Ref> { - let Self { src, dst, assume, context } = self; - let src = Dfa::from_nfa(src); - let dst = Dfa::from_nfa(dst); - MaybeTransmutableQuery { src, dst, assume, context }.answer() - } -} - impl MaybeTransmutableQuery::Ref>, C> where C: QueryContext, @@ -166,146 +150,135 @@ where if let Some(answer) = cache.get(&(src_state, dst_state)) { answer.clone() } else { - debug!(?src_state, ?dst_state); - debug!(src = ?self.src); - debug!(dst = ?self.dst); - debug!( - src_transitions_len = self.src.transitions.len(), - dst_transitions_len = self.dst.transitions.len() - ); - let answer = if dst_state == self.dst.accepting { - // truncation: `size_of(Src) >= size_of(Dst)` - // - // Why is truncation OK to do? Because even though the Src is bigger, all we care about - // is whether we have enough data for the Dst to be valid in accordance with what its - // type dictates. - // For example, in a u8 to `()` transmutation, we have enough data available from the u8 - // to transmute it to a `()` (though in this case does `()` really need any data to - // begin with? It doesn't). Same thing with u8 to fieldless struct. - // Now then, why is something like u8 to bool not allowed? That is not because the bool - // is smaller in size, but rather because those 2 bits that we are re-interpreting from - // the u8 could introduce invalid states for the bool type. - // - // So, if it's possible to transmute to a smaller Dst by truncating, and we can guarantee - // that none of the actually-used data can introduce an invalid state for Dst's type, we - // are able to safely transmute, even with truncation. - Answer::Yes - } else if src_state == self.src.accepting { - // extension: `size_of(Src) >= size_of(Dst)` - if let Some(dst_state_prime) = self.dst.byte_from(dst_state, Byte::Uninit) { - self.answer_memo(cache, src_state, dst_state_prime) - } else { - Answer::No(Reason::DstIsTooBig) - } + let answer = ensure_sufficient_stack(|| self.answer_impl(cache, src_state, dst_state)); + if let Some(..) = cache.insert((src_state, dst_state), answer.clone()) { + panic!("failed to correctly cache transmutability") + } + answer + } + } + + fn answer_impl( + &self, + cache: &mut Map<(dfa::State, dfa::State), Answer<::Ref>>, + src_state: dfa::State, + dst_state: dfa::State, + ) -> Answer<::Ref> { + debug!(?src_state, ?dst_state); + debug!(src = ?self.src); + debug!(dst = ?self.dst); + debug!( + src_transitions_len = self.src.transitions.len(), + dst_transitions_len = self.dst.transitions.len() + ); + if dst_state == self.dst.accept { + // truncation: `size_of(Src) >= size_of(Dst)` + // + // Why is truncation OK to do? Because even though the Src is bigger, all we care about + // is whether we have enough data for the Dst to be valid in accordance with what its + // type dictates. + // For example, in a u8 to `()` transmutation, we have enough data available from the u8 + // to transmute it to a `()` (though in this case does `()` really need any data to + // begin with? It doesn't). Same thing with u8 to fieldless struct. + // Now then, why is something like u8 to bool not allowed? That is not because the bool + // is smaller in size, but rather because those 2 bits that we are re-interpreting from + // the u8 could introduce invalid states for the bool type. + // + // So, if it's possible to transmute to a smaller Dst by truncating, and we can guarantee + // that none of the actually-used data can introduce an invalid state for Dst's type, we + // are able to safely transmute, even with truncation. + Answer::Yes + } else if src_state == self.src.accept { + // extension: `size_of(Src) <= size_of(Dst)` + if let Some(dst_state_prime) = self.dst.get_uninit_edge_dst(dst_state) { + self.answer_memo(cache, src_state, dst_state_prime) } else { - let src_quantifier = if self.assume.validity { - // if the compiler may assume that the programmer is doing additional validity checks, - // (e.g.: that `src != 3u8` when the destination type is `bool`) - // then there must exist at least one transition out of `src_state` such that the transmute is viable... - Quantifier::ThereExists - } else { - // if the compiler cannot assume that the programmer is doing additional validity checks, - // then for all transitions out of `src_state`, such that the transmute is viable... - // then there must exist at least one transition out of `dst_state` such that the transmute is viable... - Quantifier::ForAll - }; - - let bytes_answer = src_quantifier.apply( - // for each of the byte transitions out of the `src_state`... - self.src.bytes_from(src_state).unwrap_or(&Map::default()).into_iter().map( - |(&src_validity, &src_state_prime)| { - // ...try to find a matching transition out of `dst_state`. - if let Some(dst_state_prime) = - self.dst.byte_from(dst_state, src_validity) - { - self.answer_memo(cache, src_state_prime, dst_state_prime) - } else if let Some(dst_state_prime) = - // otherwise, see if `dst_state` has any outgoing `Uninit` transitions - // (any init byte is a valid uninit byte) - self.dst.byte_from(dst_state, Byte::Uninit) + Answer::No(Reason::DstIsTooBig) + } + } else { + let src_quantifier = if self.assume.validity { + // if the compiler may assume that the programmer is doing additional validity checks, + // (e.g.: that `src != 3u8` when the destination type is `bool`) + // then there must exist at least one transition out of `src_state` such that the transmute is viable... + Quantifier::ThereExists + } else { + // if the compiler cannot assume that the programmer is doing additional validity checks, + // then for all transitions out of `src_state`, such that the transmute is viable... + // then there must exist at least one transition out of `dst_state` such that the transmute is viable... + Quantifier::ForAll + }; + + let bytes_answer = src_quantifier.apply( + union(self.src.bytes_from(src_state), self.dst.bytes_from(dst_state)).filter_map( + |(_range, (src_state_prime, dst_state_prime))| { + match (src_state_prime, dst_state_prime) { + // No matching transitions in `src`. Skip. + (None, _) => None, + // No matching transitions in `dst`. Fail. + (Some(_), None) => Some(Answer::No(Reason::DstIsBitIncompatible)), + // Matching transitions. Continue with successor states. + (Some(src_state_prime), Some(dst_state_prime)) => { + Some(self.answer_memo(cache, src_state_prime, dst_state_prime)) + } + } + }, + ), + ); + + // The below early returns reflect how this code would behave: + // if self.assume.validity { + // or(bytes_answer, refs_answer) + // } else { + // and(bytes_answer, refs_answer) + // } + // ...if `refs_answer` was computed lazily. The below early + // returns can be deleted without impacting the correctness of + // the algorithm; only its performance. + debug!(?bytes_answer); + match bytes_answer { + Answer::No(_) if !self.assume.validity => return bytes_answer, + Answer::Yes if self.assume.validity => return bytes_answer, + _ => {} + }; + + let refs_answer = src_quantifier.apply( + // for each reference transition out of `src_state`... + self.src.refs_from(src_state).map(|(src_ref, src_state_prime)| { + // ...there exists a reference transition out of `dst_state`... + Quantifier::ThereExists.apply(self.dst.refs_from(dst_state).map( + |(dst_ref, dst_state_prime)| { + if !src_ref.is_mutable() && dst_ref.is_mutable() { + Answer::No(Reason::DstIsMoreUnique) + } else if !self.assume.alignment + && src_ref.min_align() < dst_ref.min_align() { - self.answer_memo(cache, src_state_prime, dst_state_prime) + Answer::No(Reason::DstHasStricterAlignment { + src_min_align: src_ref.min_align(), + dst_min_align: dst_ref.min_align(), + }) + } else if dst_ref.size() > src_ref.size() { + Answer::No(Reason::DstRefIsTooBig { src: src_ref, dst: dst_ref }) } else { - // otherwise, we've exhausted our options. - // the DFAs, from this point onwards, are bit-incompatible. - Answer::No(Reason::DstIsBitIncompatible) - } - }, - ), - ); - - // The below early returns reflect how this code would behave: - // if self.assume.validity { - // or(bytes_answer, refs_answer) - // } else { - // and(bytes_answer, refs_answer) - // } - // ...if `refs_answer` was computed lazily. The below early - // returns can be deleted without impacting the correctness of - // the algorithm; only its performance. - debug!(?bytes_answer); - match bytes_answer { - Answer::No(_) if !self.assume.validity => return bytes_answer, - Answer::Yes if self.assume.validity => return bytes_answer, - _ => {} - }; - - let refs_answer = src_quantifier.apply( - // for each reference transition out of `src_state`... - self.src.refs_from(src_state).unwrap_or(&Map::default()).into_iter().map( - |(&src_ref, &src_state_prime)| { - // ...there exists a reference transition out of `dst_state`... - Quantifier::ThereExists.apply( - self.dst - .refs_from(dst_state) - .unwrap_or(&Map::default()) - .into_iter() - .map(|(&dst_ref, &dst_state_prime)| { - if !src_ref.is_mutable() && dst_ref.is_mutable() { - Answer::No(Reason::DstIsMoreUnique) - } else if !self.assume.alignment - && src_ref.min_align() < dst_ref.min_align() - { - Answer::No(Reason::DstHasStricterAlignment { - src_min_align: src_ref.min_align(), - dst_min_align: dst_ref.min_align(), - }) - } else if dst_ref.size() > src_ref.size() { - Answer::No(Reason::DstRefIsTooBig { - src: src_ref, - dst: dst_ref, - }) - } else { - // ...such that `src` is transmutable into `dst`, if - // `src_ref` is transmutability into `dst_ref`. - and( - Answer::If(Condition::IfTransmutable { - src: src_ref, - dst: dst_ref, - }), - self.answer_memo( - cache, - src_state_prime, - dst_state_prime, - ), - ) - } + // ...such that `src` is transmutable into `dst`, if + // `src_ref` is transmutability into `dst_ref`. + and( + Answer::If(Condition::IfTransmutable { + src: src_ref, + dst: dst_ref, }), - ) + self.answer_memo(cache, src_state_prime, dst_state_prime), + ) + } }, - ), - ); + )) + }), + ); - if self.assume.validity { - or(bytes_answer, refs_answer) - } else { - and(bytes_answer, refs_answer) - } - }; - if let Some(..) = cache.insert((src_state, dst_state), answer.clone()) { - panic!("failed to correctly cache transmutability") + if self.assume.validity { + or(bytes_answer, refs_answer) + } else { + and(bytes_answer, refs_answer) } - answer } } } diff --git a/compiler/rustc_transmute/src/maybe_transmutable/query_context.rs b/compiler/rustc_transmute/src/maybe_transmutable/query_context.rs index f8b59bdf32684..214da101be375 100644 --- a/compiler/rustc_transmute/src/maybe_transmutable/query_context.rs +++ b/compiler/rustc_transmute/src/maybe_transmutable/query_context.rs @@ -8,9 +8,17 @@ pub(crate) trait QueryContext { #[cfg(test)] pub(crate) mod test { + use std::marker::PhantomData; + use super::QueryContext; - pub(crate) struct UltraMinimal; + pub(crate) struct UltraMinimal(PhantomData); + + impl Default for UltraMinimal { + fn default() -> Self { + Self(PhantomData) + } + } #[derive(Debug, Hash, Eq, PartialEq, Clone, Copy)] pub(crate) enum Def { @@ -24,9 +32,9 @@ pub(crate) mod test { } } - impl QueryContext for UltraMinimal { + impl QueryContext for UltraMinimal { type Def = Def; - type Ref = !; + type Ref = R; } } diff --git a/compiler/rustc_transmute/src/maybe_transmutable/tests.rs b/compiler/rustc_transmute/src/maybe_transmutable/tests.rs index 4d81382eba02c..0227ad71ae660 100644 --- a/compiler/rustc_transmute/src/maybe_transmutable/tests.rs +++ b/compiler/rustc_transmute/src/maybe_transmutable/tests.rs @@ -1,100 +1,185 @@ +extern crate test; + use itertools::Itertools; use super::query_context::test::{Def, UltraMinimal}; -use crate::maybe_transmutable::MaybeTransmutableQuery; -use crate::{Reason, layout}; +use crate::{Answer, Assume, Reason, layout}; + +type Tree = layout::Tree; +type Dfa = layout::Dfa; + +trait Representation { + fn is_transmutable(src: Self, dst: Self, assume: Assume) -> Answer; +} + +impl Representation for Tree { + fn is_transmutable(src: Self, dst: Self, assume: Assume) -> Answer { + crate::maybe_transmutable::MaybeTransmutableQuery::new( + src, + dst, + assume, + UltraMinimal::default(), + ) + .answer() + } +} + +impl Representation for Dfa { + fn is_transmutable(src: Self, dst: Self, assume: Assume) -> Answer { + crate::maybe_transmutable::MaybeTransmutableQuery::new( + src, + dst, + assume, + UltraMinimal::default(), + ) + .answer() + } +} + +fn is_transmutable( + src: &R, + dst: &R, + assume: Assume, +) -> crate::Answer { + let src = src.clone(); + let dst = dst.clone(); + // The only dimension of the transmutability analysis we want to test + // here is the safety analysis. To ensure this, we disable all other + // toggleable aspects of the transmutability analysis. + R::is_transmutable(src, dst, assume) +} mod safety { use super::*; use crate::Answer; - type Tree = layout::Tree; - const DST_HAS_SAFETY_INVARIANTS: Answer = Answer::No(crate::Reason::DstMayHaveSafetyInvariants); - fn is_transmutable(src: &Tree, dst: &Tree, assume_safety: bool) -> crate::Answer { - let src = src.clone(); - let dst = dst.clone(); - // The only dimension of the transmutability analysis we want to test - // here is the safety analysis. To ensure this, we disable all other - // toggleable aspects of the transmutability analysis. - let assume = crate::Assume { - alignment: true, - lifetimes: true, - validity: true, - safety: assume_safety, - }; - crate::maybe_transmutable::MaybeTransmutableQuery::new(src, dst, assume, UltraMinimal) - .answer() - } - #[test] fn src_safe_dst_safe() { let src = Tree::Def(Def::NoSafetyInvariants).then(Tree::u8()); let dst = Tree::Def(Def::NoSafetyInvariants).then(Tree::u8()); - assert_eq!(is_transmutable(&src, &dst, false), Answer::Yes); - assert_eq!(is_transmutable(&src, &dst, true), Answer::Yes); + assert_eq!(is_transmutable(&src, &dst, Assume::default()), Answer::Yes); + assert_eq!( + is_transmutable(&src, &dst, Assume { safety: true, ..Assume::default() }), + Answer::Yes + ); } #[test] fn src_safe_dst_unsafe() { let src = Tree::Def(Def::NoSafetyInvariants).then(Tree::u8()); let dst = Tree::Def(Def::HasSafetyInvariants).then(Tree::u8()); - assert_eq!(is_transmutable(&src, &dst, false), DST_HAS_SAFETY_INVARIANTS); - assert_eq!(is_transmutable(&src, &dst, true), Answer::Yes); + assert_eq!(is_transmutable(&src, &dst, Assume::default()), DST_HAS_SAFETY_INVARIANTS); + assert_eq!( + is_transmutable(&src, &dst, Assume { safety: true, ..Assume::default() }), + Answer::Yes + ); } #[test] fn src_unsafe_dst_safe() { let src = Tree::Def(Def::HasSafetyInvariants).then(Tree::u8()); let dst = Tree::Def(Def::NoSafetyInvariants).then(Tree::u8()); - assert_eq!(is_transmutable(&src, &dst, false), Answer::Yes); - assert_eq!(is_transmutable(&src, &dst, true), Answer::Yes); + assert_eq!(is_transmutable(&src, &dst, Assume::default()), Answer::Yes); + assert_eq!( + is_transmutable(&src, &dst, Assume { safety: true, ..Assume::default() }), + Answer::Yes + ); } #[test] fn src_unsafe_dst_unsafe() { let src = Tree::Def(Def::HasSafetyInvariants).then(Tree::u8()); let dst = Tree::Def(Def::HasSafetyInvariants).then(Tree::u8()); - assert_eq!(is_transmutable(&src, &dst, false), DST_HAS_SAFETY_INVARIANTS); - assert_eq!(is_transmutable(&src, &dst, true), Answer::Yes); + assert_eq!(is_transmutable(&src, &dst, Assume::default()), DST_HAS_SAFETY_INVARIANTS); + assert_eq!( + is_transmutable(&src, &dst, Assume { safety: true, ..Assume::default() }), + Answer::Yes + ); + } +} + +mod size { + use super::*; + + #[test] + fn size() { + let small = Tree::number(1); + let large = Tree::number(2); + + for alignment in [false, true] { + for lifetimes in [false, true] { + for safety in [false, true] { + for validity in [false, true] { + let assume = Assume { alignment, lifetimes, safety, validity }; + assert_eq!( + is_transmutable(&small, &large, assume), + Answer::No(Reason::DstIsTooBig), + "assume: {assume:?}" + ); + assert_eq!( + is_transmutable(&large, &small, assume), + Answer::Yes, + "assume: {assume:?}" + ); + } + } + } + } } } mod bool { use super::*; - use crate::Answer; #[test] fn should_permit_identity_transmutation_tree() { - let answer = crate::maybe_transmutable::MaybeTransmutableQuery::new( - layout::Tree::::bool(), - layout::Tree::::bool(), - crate::Assume { alignment: false, lifetimes: false, validity: true, safety: false }, - UltraMinimal, - ) - .answer(); - assert_eq!(answer, Answer::Yes); + let src = Tree::bool(); + assert_eq!(is_transmutable(&src, &src, Assume::default()), Answer::Yes); + assert_eq!( + is_transmutable(&src, &src, Assume { validity: true, ..Assume::default() }), + Answer::Yes + ); } #[test] fn should_permit_identity_transmutation_dfa() { - let answer = crate::maybe_transmutable::MaybeTransmutableQuery::new( - layout::Dfa::::bool(), - layout::Dfa::::bool(), - crate::Assume { alignment: false, lifetimes: false, validity: true, safety: false }, - UltraMinimal, - ) - .answer(); - assert_eq!(answer, Answer::Yes); + let src = Dfa::bool(); + assert_eq!(is_transmutable(&src, &src, Assume::default()), Answer::Yes); + assert_eq!( + is_transmutable(&src, &src, Assume { validity: true, ..Assume::default() }), + Answer::Yes + ); + } + + #[test] + fn transmute_u8() { + let bool = &Tree::bool(); + let u8 = &Tree::u8(); + for (src, dst, assume_validity, answer) in [ + (bool, u8, false, Answer::Yes), + (bool, u8, true, Answer::Yes), + (u8, bool, false, Answer::No(Reason::DstIsBitIncompatible)), + (u8, bool, true, Answer::Yes), + ] { + assert_eq!( + is_transmutable( + src, + dst, + Assume { validity: assume_validity, ..Assume::default() } + ), + answer + ); + } } #[test] fn should_permit_validity_expansion_and_reject_contraction() { - let b0 = layout::Tree::::from_bits(0); - let b1 = layout::Tree::::from_bits(1); - let b2 = layout::Tree::::from_bits(2); + let b0 = layout::Tree::::byte(0); + let b1 = layout::Tree::::byte(1); + let b2 = layout::Tree::::byte(2); let alts = [b0, b1, b2]; @@ -104,7 +189,7 @@ mod bool { let into_set = |alts: Vec<_>| { #[cfg(feature = "rustc")] - let mut set = crate::Set::default(); + let mut set = rustc_data_structures::fx::FxIndexSet::default(); #[cfg(not(feature = "rustc"))] let mut set = std::collections::HashSet::new(); set.extend(alts); @@ -122,13 +207,7 @@ mod bool { if src_set.is_subset(&dst_set) { assert_eq!( Answer::Yes, - MaybeTransmutableQuery::new( - src_layout.clone(), - dst_layout.clone(), - crate::Assume { validity: false, ..crate::Assume::default() }, - UltraMinimal, - ) - .answer(), + is_transmutable(&src_layout, &dst_layout, Assume::default()), "{:?} SHOULD be transmutable into {:?}", src_layout, dst_layout @@ -136,13 +215,11 @@ mod bool { } else if !src_set.is_disjoint(&dst_set) { assert_eq!( Answer::Yes, - MaybeTransmutableQuery::new( - src_layout.clone(), - dst_layout.clone(), - crate::Assume { validity: true, ..crate::Assume::default() }, - UltraMinimal, - ) - .answer(), + is_transmutable( + &src_layout, + &dst_layout, + Assume { validity: true, ..Assume::default() } + ), "{:?} SHOULD be transmutable (assuming validity) into {:?}", src_layout, dst_layout @@ -150,13 +227,7 @@ mod bool { } else { assert_eq!( Answer::No(Reason::DstIsBitIncompatible), - MaybeTransmutableQuery::new( - src_layout.clone(), - dst_layout.clone(), - crate::Assume { validity: false, ..crate::Assume::default() }, - UltraMinimal, - ) - .answer(), + is_transmutable(&src_layout, &dst_layout, Assume::default()), "{:?} should NOT be transmutable into {:?}", src_layout, dst_layout @@ -166,3 +237,221 @@ mod bool { } } } + +mod uninit { + use super::*; + + #[test] + fn size() { + let mu = Tree::uninit(); + let u8 = Tree::u8(); + + for alignment in [false, true] { + for lifetimes in [false, true] { + for safety in [false, true] { + for validity in [false, true] { + let assume = Assume { alignment, lifetimes, safety, validity }; + + let want = if validity { + Answer::Yes + } else { + Answer::No(Reason::DstIsBitIncompatible) + }; + + assert_eq!(is_transmutable(&mu, &u8, assume), want, "assume: {assume:?}"); + assert_eq!( + is_transmutable(&u8, &mu, assume), + Answer::Yes, + "assume: {assume:?}" + ); + } + } + } + } + } +} + +mod alt { + use super::*; + use crate::Answer; + + #[test] + fn should_permit_identity_transmutation() { + type Tree = layout::Tree; + + let x = Tree::Seq(vec![Tree::byte(0), Tree::byte(0)]); + let y = Tree::Seq(vec![Tree::bool(), Tree::byte(1)]); + let layout = Tree::Alt(vec![x, y]); + + let answer = crate::maybe_transmutable::MaybeTransmutableQuery::new( + layout.clone(), + layout.clone(), + crate::Assume::default(), + UltraMinimal::default(), + ) + .answer(); + assert_eq!(answer, Answer::Yes, "layout:{:#?}", layout); + } +} + +mod union { + use super::*; + + #[test] + fn union() { + let [a, b, c, d] = [0, 1, 2, 3]; + let s = Dfa::from_edges(a, d, &[(a, 0, b), (b, 0, d), (a, 1, c), (c, 1, d)]); + + let t = Dfa::from_edges(a, c, &[(a, 1, b), (b, 0, c)]); + + let mut ctr = 0; + let new_state = || { + let state = crate::layout::dfa::State(ctr); + ctr += 1; + state + }; + + let u = s.clone().union(t.clone(), new_state); + + let expected_u = + Dfa::from_edges(b, a, &[(b, 0..=0, c), (b, 1..=1, d), (d, 0..=1, a), (c, 0..=0, a)]); + + assert_eq!(u, expected_u); + + assert_eq!(is_transmutable(&s, &u, Assume::default()), Answer::Yes); + assert_eq!(is_transmutable(&t, &u, Assume::default()), Answer::Yes); + } +} + +mod char { + use super::*; + use crate::layout::tree::Endian; + + #[test] + fn should_permit_valid_transmutation() { + for order in [Endian::Big, Endian::Little] { + use Answer::*; + let char_layout = layout::Tree::::char(order); + + // `char`s can be in the following ranges: + // - [0, 0xD7FF] + // - [0xE000, 10FFFF] + // + // This loop synthesizes a singleton-validity type for the extremes + // of each range, and for one past the end of the extremes of each + // range. + let no = No(Reason::DstIsBitIncompatible); + for (src, answer) in [ + (0u32, Yes), + (0xD7FF, Yes), + (0xD800, no.clone()), + (0xDFFF, no.clone()), + (0xE000, Yes), + (0x10FFFF, Yes), + (0x110000, no.clone()), + (0xFFFF0000, no.clone()), + (0xFFFFFFFF, no), + ] { + let src_layout = + layout::tree::Tree::::from_big_endian(order, src.to_be_bytes()); + + let a = is_transmutable(&src_layout, &char_layout, Assume::default()); + assert_eq!(a, answer, "endian:{order:?},\nsrc:{src:x}"); + } + } + } +} + +mod nonzero { + use super::*; + use crate::{Answer, Reason}; + + const NONZERO_BYTE_WIDTHS: [u64; 5] = [1, 2, 4, 8, 16]; + + #[test] + fn should_permit_identity_transmutation() { + for width in NONZERO_BYTE_WIDTHS { + let layout = layout::Tree::::nonzero(width); + assert_eq!(is_transmutable(&layout, &layout, Assume::default()), Answer::Yes); + } + } + + #[test] + fn should_permit_valid_transmutation() { + for width in NONZERO_BYTE_WIDTHS { + use Answer::*; + + let num = layout::Tree::::number(width); + let nz = layout::Tree::::nonzero(width); + + let a = is_transmutable(&num, &nz, Assume::default()); + assert_eq!(a, No(Reason::DstIsBitIncompatible), "width:{width}"); + + let a = is_transmutable(&nz, &num, Assume::default()); + assert_eq!(a, Yes, "width:{width}"); + } + } +} + +mod r#ref { + use super::*; + + #[test] + fn should_permit_identity_transmutation() { + type Tree = crate::layout::Tree; + + for validity in [false, true] { + let layout = Tree::Seq(vec![Tree::byte(0x00), Tree::Ref([()])]); + + let assume = Assume { validity, ..Assume::default() }; + + let answer = crate::maybe_transmutable::MaybeTransmutableQuery::new( + layout.clone(), + layout, + assume, + UltraMinimal::default(), + ) + .answer(); + assert_eq!( + answer, + Answer::If(crate::Condition::IfTransmutable { src: [()], dst: [()] }) + ); + } + } +} + +mod benches { + use std::hint::black_box; + + use test::Bencher; + + use super::*; + + #[bench] + fn bench_dfa_from_tree(b: &mut Bencher) { + let num = Tree::number(8).prune(&|_| false); + let num = black_box(num); + + b.iter(|| { + let _ = black_box(Dfa::from_tree(num.clone())); + }) + } + + #[bench] + fn bench_transmute(b: &mut Bencher) { + let num = Tree::number(8).prune(&|_| false); + let dfa = black_box(Dfa::from_tree(num).unwrap()); + + b.iter(|| { + let answer = crate::maybe_transmutable::MaybeTransmutableQuery::new( + dfa.clone(), + dfa.clone(), + Assume::default(), + UltraMinimal::default(), + ) + .answer(); + let answer = std::hint::black_box(answer); + assert_eq!(answer, Answer::Yes); + }) + } +} diff --git a/compiler/rustc_ty_utils/src/abi.rs b/compiler/rustc_ty_utils/src/abi.rs index 48d5a4a0fcb0d..2b49d7ac8b599 100644 --- a/compiler/rustc_ty_utils/src/abi.rs +++ b/compiler/rustc_ty_utils/src/abi.rs @@ -244,7 +244,7 @@ fn fn_sig_for_fn_abi<'tcx>( fn conv_from_spec_abi(tcx: TyCtxt<'_>, abi: ExternAbi, c_variadic: bool) -> Conv { use rustc_abi::ExternAbi::*; match tcx.sess.target.adjust_abi(abi, c_variadic) { - RustIntrinsic | Rust | RustCall => Conv::Rust, + Rust | RustCall => Conv::Rust, // This is intentionally not using `Conv::Cold`, as that has to preserve // even SIMD registers, which is generally not a good trade-off. @@ -347,7 +347,8 @@ fn adjust_for_rust_scalar<'tcx>( None }; if let Some(kind) = kind { - attrs.pointee_align = Some(pointee.align); + attrs.pointee_align = + Some(pointee.align.min(cx.tcx().sess.target.max_reliable_alignment())); // `Box` are not necessarily dereferenceable for the entire duration of the function as // they can be deallocated at any time. Same for non-frozen shared references (see @@ -550,8 +551,10 @@ fn fn_abi_new_uncached<'tcx>( extra_args }; - let is_drop_in_place = - determined_fn_def_id.is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::DropInPlace)); + let is_drop_in_place = determined_fn_def_id.is_some_and(|def_id| { + tcx.is_lang_item(def_id, LangItem::DropInPlace) + || tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace) + }); let arg_of = |ty: Ty<'tcx>, arg_idx: Option| -> Result<_, &'tcx FnAbiError<'tcx>> { let span = tracing::debug_span!("arg_of"); @@ -660,7 +663,7 @@ fn fn_abi_adjust_for_abi<'tcx>( let tcx = cx.tcx(); if abi.is_rustic_abi() { - fn_abi.adjust_for_rust_abi(cx, abi); + fn_abi.adjust_for_rust_abi(cx); // Look up the deduced parameter attributes for this function, if we have its def ID and // we're optimizing in non-incremental mode. We'll tag its parameters with those attributes diff --git a/compiler/rustc_ty_utils/src/assoc.rs b/compiler/rustc_ty_utils/src/assoc.rs index b7684e85d4128..f14a45aa1e3b9 100644 --- a/compiler/rustc_ty_utils/src/assoc.rs +++ b/compiler/rustc_ty_utils/src/assoc.rs @@ -1,12 +1,12 @@ use rustc_data_structures::fx::FxIndexSet; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId}; +use rustc_hir::definitions::{DefPathData, DisambiguatorState}; use rustc_hir::intravisit::{self, Visitor}; use rustc_hir::{self as hir, AmbigArg}; use rustc_middle::query::Providers; use rustc_middle::ty::{self, ImplTraitInTraitData, TyCtxt}; use rustc_middle::{bug, span_bug}; -use rustc_span::kw; pub(crate) fn provide(providers: &mut Providers) { *providers = Providers { @@ -129,39 +129,51 @@ fn associated_item(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::AssocItem { fn associated_item_from_trait_item_ref(trait_item_ref: &hir::TraitItemRef) -> ty::AssocItem { let owner_id = trait_item_ref.id.owner_id; - let (kind, has_self) = match trait_item_ref.kind { - hir::AssocItemKind::Const => (ty::AssocKind::Const, false), - hir::AssocItemKind::Fn { has_self } => (ty::AssocKind::Fn, has_self), - hir::AssocItemKind::Type => (ty::AssocKind::Type, false), + let name = trait_item_ref.ident.name; + let kind = match trait_item_ref.kind { + hir::AssocItemKind::Const => ty::AssocKind::Const { name }, + hir::AssocItemKind::Fn { has_self } => ty::AssocKind::Fn { name, has_self }, + hir::AssocItemKind::Type => ty::AssocKind::Type { data: ty::AssocTypeData::Normal(name) }, }; ty::AssocItem { - name: trait_item_ref.ident.name, kind, def_id: owner_id.to_def_id(), trait_item_def_id: Some(owner_id.to_def_id()), container: ty::AssocItemContainer::Trait, - fn_has_self_parameter: has_self, - opt_rpitit_info: None, } } fn associated_item_from_impl_item_ref(impl_item_ref: &hir::ImplItemRef) -> ty::AssocItem { let def_id = impl_item_ref.id.owner_id; - let (kind, has_self) = match impl_item_ref.kind { - hir::AssocItemKind::Const => (ty::AssocKind::Const, false), - hir::AssocItemKind::Fn { has_self } => (ty::AssocKind::Fn, has_self), - hir::AssocItemKind::Type => (ty::AssocKind::Type, false), + let name = impl_item_ref.ident.name; + let kind = match impl_item_ref.kind { + hir::AssocItemKind::Const => ty::AssocKind::Const { name }, + hir::AssocItemKind::Fn { has_self } => ty::AssocKind::Fn { name, has_self }, + hir::AssocItemKind::Type => ty::AssocKind::Type { data: ty::AssocTypeData::Normal(name) }, }; ty::AssocItem { - name: impl_item_ref.ident.name, kind, def_id: def_id.to_def_id(), trait_item_def_id: impl_item_ref.trait_item_def_id, container: ty::AssocItemContainer::Impl, - fn_has_self_parameter: has_self, - opt_rpitit_info: None, + } +} +struct RPITVisitor { + rpits: FxIndexSet, +} + +impl<'tcx> Visitor<'tcx> for RPITVisitor { + fn visit_ty(&mut self, ty: &'tcx hir::Ty<'tcx, AmbigArg>) { + if let hir::TyKind::OpaqueDef(opaq) = ty.kind + && self.rpits.insert(opaq.def_id) + { + for bound in opaq.bounds { + intravisit::walk_param_bound(self, bound); + } + } + intravisit::walk_ty(self, ty) } } @@ -182,23 +194,6 @@ fn associated_types_for_impl_traits_in_associated_fn( match tcx.def_kind(parent_def_id) { DefKind::Trait => { - struct RPITVisitor { - rpits: FxIndexSet, - } - - impl<'tcx> Visitor<'tcx> for RPITVisitor { - fn visit_ty(&mut self, ty: &'tcx hir::Ty<'tcx, AmbigArg>) { - if let hir::TyKind::OpaqueDef(opaq) = ty.kind - && self.rpits.insert(opaq.def_id) - { - for bound in opaq.bounds { - intravisit::walk_param_bound(self, bound); - } - } - intravisit::walk_ty(self, ty) - } - } - let mut visitor = RPITVisitor { rpits: FxIndexSet::default() }; if let Some(output) = tcx.hir_get_fn_output(fn_def_id) { @@ -251,9 +246,23 @@ fn associated_type_for_impl_trait_in_trait( let trait_def_id = tcx.local_parent(fn_def_id); assert_eq!(tcx.def_kind(trait_def_id), DefKind::Trait); + // Collect all opaque types in return position for the method and use + // the index as the disambiguator to make an unique def path. + let mut visitor = RPITVisitor { rpits: FxIndexSet::default() }; + visitor.visit_fn_ret_ty(tcx.hir_get_fn_output(fn_def_id).unwrap()); + let disambiguator = visitor.rpits.get_index_of(&opaque_ty_def_id).unwrap().try_into().unwrap(); + let span = tcx.def_span(opaque_ty_def_id); - // No name because this is a synthetic associated type. - let trait_assoc_ty = tcx.at(span).create_def(trait_def_id, None, DefKind::AssocTy); + // Also use the method name to create an unique def path. + let data = DefPathData::AnonAssocTy(tcx.item_name(fn_def_id.to_def_id())); + let trait_assoc_ty = tcx.at(span).create_def( + trait_def_id, + // No name because this is an anonymous associated type. + None, + DefKind::AssocTy, + Some(data), + &mut DisambiguatorState::with(trait_def_id, data, disambiguator), + ); let local_def_id = trait_assoc_ty.def_id(); let def_id = local_def_id.to_def_id(); @@ -264,16 +273,15 @@ fn associated_type_for_impl_trait_in_trait( trait_assoc_ty.def_ident_span(Some(span)); trait_assoc_ty.associated_item(ty::AssocItem { - name: kw::Empty, - kind: ty::AssocKind::Type, + kind: ty::AssocKind::Type { + data: ty::AssocTypeData::Rpitit(ImplTraitInTraitData::Trait { + fn_def_id: fn_def_id.to_def_id(), + opaque_def_id: opaque_ty_def_id.to_def_id(), + }), + }, def_id, trait_item_def_id: None, container: ty::AssocItemContainer::Trait, - fn_has_self_parameter: false, - opt_rpitit_info: Some(ImplTraitInTraitData::Trait { - fn_def_id: fn_def_id.to_def_id(), - opaque_def_id: opaque_ty_def_id.to_def_id(), - }), }); // Copy visility of the containing function. @@ -305,8 +313,22 @@ fn associated_type_for_impl_trait_in_impl( hir::FnRetTy::DefaultReturn(_) => tcx.def_span(impl_fn_def_id), hir::FnRetTy::Return(ty) => ty.span, }; - // No name because this is a synthetic associated type. - let impl_assoc_ty = tcx.at(span).create_def(impl_local_def_id, None, DefKind::AssocTy); + + // Use the same disambiguator and method name as the anon associated type in the trait. + let disambiguated_data = tcx.def_key(trait_assoc_def_id).disambiguated_data; + let DefPathData::AnonAssocTy(name) = disambiguated_data.data else { + bug!("expected anon associated type") + }; + let data = DefPathData::AnonAssocTy(name); + + let impl_assoc_ty = tcx.at(span).create_def( + impl_local_def_id, + // No name because this is an anonymous associated type. + None, + DefKind::AssocTy, + Some(data), + &mut DisambiguatorState::with(impl_local_def_id, data, disambiguated_data.disambiguator), + ); let local_def_id = impl_assoc_ty.def_id(); let def_id = local_def_id.to_def_id(); @@ -317,13 +339,14 @@ fn associated_type_for_impl_trait_in_impl( impl_assoc_ty.def_ident_span(Some(span)); impl_assoc_ty.associated_item(ty::AssocItem { - name: kw::Empty, - kind: ty::AssocKind::Type, + kind: ty::AssocKind::Type { + data: ty::AssocTypeData::Rpitit(ImplTraitInTraitData::Impl { + fn_def_id: impl_fn_def_id.to_def_id(), + }), + }, def_id, trait_item_def_id: Some(trait_assoc_def_id), container: ty::AssocItemContainer::Impl, - fn_has_self_parameter: false, - opt_rpitit_info: Some(ImplTraitInTraitData::Impl { fn_def_id: impl_fn_def_id.to_def_id() }), }); // Copy visility of the containing function. diff --git a/compiler/rustc_ty_utils/src/common_traits.rs b/compiler/rustc_ty_utils/src/common_traits.rs index 20646cf9a826c..bb2c4172b0877 100644 --- a/compiler/rustc_ty_utils/src/common_traits.rs +++ b/compiler/rustc_ty_utils/src/common_traits.rs @@ -29,6 +29,13 @@ fn is_unpin_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::PseudoCanonicalInput<'tcx, T is_item_raw(tcx, query, LangItem::Unpin) } +fn is_async_drop_raw<'tcx>( + tcx: TyCtxt<'tcx>, + query: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>, +) -> bool { + is_item_raw(tcx, query, LangItem::AsyncDrop) +} + fn is_item_raw<'tcx>( tcx: TyCtxt<'tcx>, query: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>, @@ -46,6 +53,7 @@ pub(crate) fn provide(providers: &mut Providers) { is_sized_raw, is_freeze_raw, is_unpin_raw, + is_async_drop_raw, ..*providers }; } diff --git a/compiler/rustc_ty_utils/src/implied_bounds.rs b/compiler/rustc_ty_utils/src/implied_bounds.rs index 088d5e76b8685..6fa763f18ef19 100644 --- a/compiler/rustc_ty_utils/src/implied_bounds.rs +++ b/compiler/rustc_ty_utils/src/implied_bounds.rs @@ -4,9 +4,9 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::LocalDefId; -use rustc_middle::bug; use rustc_middle::query::Providers; use rustc_middle::ty::{self, Ty, TyCtxt, fold_regions}; +use rustc_middle::{bug, span_bug}; use rustc_span::Span; pub(crate) fn provide(providers: &mut Providers) { @@ -21,7 +21,8 @@ pub(crate) fn provide(providers: &mut Providers) { } fn assumed_wf_types<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &'tcx [(Ty<'tcx>, Span)] { - match tcx.def_kind(def_id) { + let kind = tcx.def_kind(def_id); + match kind { DefKind::Fn => { let sig = tcx.fn_sig(def_id).instantiate_identity(); let liberated_sig = tcx.liberate_late_bound_regions(def_id.to_def_id(), sig); @@ -75,7 +76,7 @@ fn assumed_wf_types<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &'tcx [(Ty<' { let orig_lt = tcx.map_opaque_lifetime_to_parent_lifetime(param.def_id.expect_local()); - if matches!(*orig_lt, ty::ReLateParam(..)) { + if matches!(orig_lt.kind(), ty::ReLateParam(..)) { mapping.insert( orig_lt, ty::Region::new_early_param( @@ -121,32 +122,38 @@ fn assumed_wf_types<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &'tcx [(Ty<' } } DefKind::AssocConst | DefKind::AssocTy => tcx.assumed_wf_types(tcx.local_parent(def_id)), - DefKind::OpaqueTy => bug!("implied bounds are not defined for opaques"), - DefKind::Mod + DefKind::Static { .. } + | DefKind::Const + | DefKind::AnonConst + | DefKind::InlineConst | DefKind::Struct | DefKind::Union | DefKind::Enum - | DefKind::Variant | DefKind::Trait - | DefKind::TyAlias - | DefKind::ForeignTy | DefKind::TraitAlias + | DefKind::TyAlias => ty::List::empty(), + DefKind::OpaqueTy + | DefKind::Mod + | DefKind::Variant + | DefKind::ForeignTy | DefKind::TyParam - | DefKind::Const | DefKind::ConstParam - | DefKind::Static { .. } | DefKind::Ctor(_, _) | DefKind::Macro(_) | DefKind::ExternCrate | DefKind::Use | DefKind::ForeignMod - | DefKind::AnonConst - | DefKind::InlineConst | DefKind::Field | DefKind::LifetimeParam | DefKind::GlobalAsm | DefKind::Closure - | DefKind::SyntheticCoroutineBody => ty::List::empty(), + | DefKind::SyntheticCoroutineBody => { + span_bug!( + tcx.def_span(def_id), + "`assumed_wf_types` not defined for {} `{def_id:?}`", + kind.descr(def_id.to_def_id()) + ); + } } } diff --git a/compiler/rustc_ty_utils/src/instance.rs b/compiler/rustc_ty_utils/src/instance.rs index 962e1353ebcd7..166e8f1934299 100644 --- a/compiler/rustc_ty_utils/src/instance.rs +++ b/compiler/rustc_ty_utils/src/instance.rs @@ -5,7 +5,6 @@ use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::bug; use rustc_middle::query::Providers; use rustc_middle::traits::{BuiltinImplSource, CodegenObligationError}; -use rustc_middle::ty::util::AsyncDropGlueMorphology; use rustc_middle::ty::{ self, ClosureKind, GenericArgsRef, Instance, PseudoCanonicalInput, TyCtxt, TypeVisitableExt, }; @@ -41,20 +40,26 @@ fn resolve_instance_raw<'tcx>( if ty.needs_drop(tcx, typing_env) { debug!(" => nontrivial drop glue"); match *ty.kind() { + ty::Coroutine(coroutine_def_id, ..) => { + // FIXME: sync drop of coroutine with async drop (generate both versions?) + // Currently just ignored + if tcx.optimized_mir(coroutine_def_id).coroutine_drop_async().is_some() { + ty::InstanceKind::DropGlue(def_id, None) + } else { + ty::InstanceKind::DropGlue(def_id, Some(ty)) + } + } ty::Closure(..) | ty::CoroutineClosure(..) - | ty::Coroutine(..) | ty::Tuple(..) | ty::Adt(..) | ty::Dynamic(..) | ty::Array(..) | ty::Slice(..) - | ty::UnsafeBinder(..) => {} + | ty::UnsafeBinder(..) => ty::InstanceKind::DropGlue(def_id, Some(ty)), // Drop shims can only be built from ADTs. _ => return Ok(None), } - - ty::InstanceKind::DropGlue(def_id, Some(ty)) } else { debug!(" => trivial drop glue"); ty::InstanceKind::DropGlue(def_id, None) @@ -62,7 +67,7 @@ fn resolve_instance_raw<'tcx>( } else if tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace) { let ty = args.type_at(0); - if ty.async_drop_glue_morphology(tcx) != AsyncDropGlueMorphology::Noop { + if ty.needs_async_drop(tcx, typing_env) { match *ty.kind() { ty::Closure(..) | ty::CoroutineClosure(..) @@ -76,11 +81,14 @@ fn resolve_instance_raw<'tcx>( _ => return Ok(None), } debug!(" => nontrivial async drop glue ctor"); - ty::InstanceKind::AsyncDropGlueCtorShim(def_id, Some(ty)) + ty::InstanceKind::AsyncDropGlueCtorShim(def_id, ty) } else { debug!(" => trivial async drop glue ctor"); - ty::InstanceKind::AsyncDropGlueCtorShim(def_id, None) + ty::InstanceKind::AsyncDropGlueCtorShim(def_id, ty) } + } else if tcx.is_async_drop_in_place_coroutine(def_id) { + let ty = args.type_at(0); + ty::InstanceKind::AsyncDropGlue(def_id, ty) } else { debug!(" => free item"); ty::InstanceKind::Item(def_id) @@ -149,6 +157,7 @@ fn resolve_associated_item<'tcx>( match typing_env.typing_mode { ty::TypingMode::Coherence | ty::TypingMode::Analysis { .. } + | ty::TypingMode::Borrowck { .. } | ty::TypingMode::PostBorrowckAnalysis { .. } => false, ty::TypingMode::PostAnalysis => !trait_ref.still_further_specializable(), } @@ -226,7 +235,7 @@ fn resolve_associated_item<'tcx>( tcx.ensure_ok().compare_impl_item(leaf_def_item)?; } - Some(ty::Instance::new(leaf_def.item.def_id, args)) + Some(ty::Instance::new_raw(leaf_def.item.def_id, args)) } traits::ImplSource::Builtin(BuiltinImplSource::Object(_), _) => { let trait_ref = ty::TraitRef::from_method(tcx, trait_id, rcvr_args); @@ -271,7 +280,7 @@ fn resolve_associated_item<'tcx>( // Use the default `fn clone_from` from `trait Clone`. let args = tcx.erase_regions(rcvr_args); - Some(ty::Instance::new(trait_item_id, args)) + Some(ty::Instance::new_raw(trait_item_id, args)) } } else if tcx.is_lang_item(trait_ref.def_id, LangItem::FnPtrTrait) { if tcx.is_lang_item(trait_item_id, LangItem::FnPtrAddr) { @@ -320,7 +329,7 @@ fn resolve_associated_item<'tcx>( // sync with the built-in trait implementations (since all of the // implementations return `FnOnce::Output`). if ty::ClosureKind::FnOnce == args.as_coroutine_closure().kind() { - Some(Instance::new(coroutine_closure_def_id, args)) + Some(Instance::new_raw(coroutine_closure_def_id, args)) } else { Some(Instance { def: ty::InstanceKind::ConstructCoroutineInClosureShim { @@ -353,7 +362,7 @@ fn resolve_associated_item<'tcx>( args, }) } else { - Some(Instance::new(coroutine_closure_def_id, args)) + Some(Instance::new_raw(coroutine_closure_def_id, args)) } } ty::Closure(closure_def_id, args) => { @@ -372,7 +381,7 @@ fn resolve_associated_item<'tcx>( let name = tcx.item_name(trait_item_id); assert_eq!(name, sym::transmute); let args = tcx.erase_regions(rcvr_args); - Some(ty::Instance::new(trait_item_id, args)) + Some(ty::Instance::new_raw(trait_item_id, args)) } else { Instance::try_resolve_item_for_coroutine(tcx, trait_item_id, trait_id, rcvr_args) } diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index 0017186c1b082..908fcb14cb2fc 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -184,6 +184,10 @@ fn layout_of_uncached<'tcx>( } let tcx = cx.tcx(); + + // layout of `async_drop_in_place::{closure}` in case, + // when T is a coroutine, contains this internal coroutine's ref + let dl = cx.data_layout(); let map_layout = |result: Result<_, _>| match result { Ok(layout) => Ok(tcx.mk_layout(layout)), @@ -255,13 +259,95 @@ fn layout_of_uncached<'tcx>( }; layout.largest_niche = Some(niche); - - tcx.mk_layout(layout) } else { bug!("pattern type with range but not scalar layout: {ty:?}, {layout:?}") } } + ty::PatternKind::Or(variants) => match *variants[0] { + ty::PatternKind::Range { .. } => { + if let BackendRepr::Scalar(scalar) = &mut layout.backend_repr { + let variants: Result, _> = variants + .iter() + .map(|pat| match *pat { + ty::PatternKind::Range { start, end } => Ok(( + extract_const_value(cx, ty, start) + .unwrap() + .try_to_bits(tcx, cx.typing_env) + .ok_or_else(|| error(cx, LayoutError::Unknown(ty)))?, + extract_const_value(cx, ty, end) + .unwrap() + .try_to_bits(tcx, cx.typing_env) + .ok_or_else(|| error(cx, LayoutError::Unknown(ty)))?, + )), + ty::PatternKind::Or(_) => { + unreachable!("mixed or patterns are not allowed") + } + }) + .collect(); + let mut variants = variants?; + if !scalar.is_signed() { + let guar = tcx.dcx().err(format!( + "only signed integer base types are allowed for or-pattern pattern types at present" + )); + + return Err(error(cx, LayoutError::ReferencesError(guar))); + } + variants.sort(); + if variants.len() != 2 { + let guar = tcx + .dcx() + .err(format!("the only or-pattern types allowed are two range patterns that are directly connected at their overflow site")); + + return Err(error(cx, LayoutError::ReferencesError(guar))); + } + + // first is the one starting at the signed in range min + let mut first = variants[0]; + let mut second = variants[1]; + if second.0 + == layout.size.truncate(layout.size.signed_int_min() as u128) + { + (second, first) = (first, second); + } + + if layout.size.sign_extend(first.1) >= layout.size.sign_extend(second.0) + { + let guar = tcx.dcx().err(format!( + "only non-overlapping pattern type ranges are allowed at present" + )); + + return Err(error(cx, LayoutError::ReferencesError(guar))); + } + if layout.size.signed_int_max() as u128 != second.1 { + let guar = tcx.dcx().err(format!( + "one pattern needs to end at `{ty}::MAX`, but was {} instead", + second.1 + )); + + return Err(error(cx, LayoutError::ReferencesError(guar))); + } + + // Now generate a wrapping range (which aren't allowed in surface syntax). + scalar.valid_range_mut().start = second.0; + scalar.valid_range_mut().end = first.1; + + let niche = Niche { + offset: Size::ZERO, + value: scalar.primitive(), + valid_range: scalar.valid_range(cx), + }; + + layout.largest_niche = Some(niche); + } else { + bug!( + "pattern type with range but not scalar layout: {ty:?}, {layout:?}" + ) + } + } + ty::PatternKind::Or(..) => bug!("patterns cannot have nested or patterns"), + }, } + tcx.mk_layout(layout) } // Basic scalars. @@ -406,7 +492,7 @@ fn layout_of_uncached<'tcx>( ty::Coroutine(def_id, args) => { use rustc_middle::ty::layout::PrimitiveExt as _; - let Some(info) = tcx.coroutine_layout(def_id, args.as_coroutine().kind_ty()) else { + let Some(info) = tcx.coroutine_layout(def_id, args) else { return Err(error(cx, LayoutError::Unknown(ty))); }; @@ -514,6 +600,9 @@ fn layout_of_uncached<'tcx>( return map_layout(cx.calc.layout_of_union(&def.repr(), &variants)); } + // UnsafeCell and UnsafePinned both disable niche optimizations + let is_special_no_niche = def.is_unsafe_cell() || def.is_unsafe_pinned(); + let get_discriminant_type = |min, max| abi::Integer::repr_discr(tcx, ty, &def.repr(), min, max); @@ -542,7 +631,7 @@ fn layout_of_uncached<'tcx>( &def.repr(), &variants, def.is_enum(), - def.is_unsafe_cell(), + is_special_no_niche, tcx.layout_scalar_valid_range(def.did()), get_discriminant_type, discriminants_iter(), @@ -568,7 +657,7 @@ fn layout_of_uncached<'tcx>( &def.repr(), &variants, def.is_enum(), - def.is_unsafe_cell(), + is_special_no_niche, tcx.layout_scalar_valid_range(def.did()), get_discriminant_type, discriminants_iter(), @@ -767,7 +856,7 @@ fn variant_info_for_coroutine<'tcx>( return (vec![], None); }; - let coroutine = cx.tcx().coroutine_layout(def_id, args.as_coroutine().kind_ty()).unwrap(); + let coroutine = cx.tcx().coroutine_layout(def_id, args).unwrap(); let upvar_names = cx.tcx().closure_saved_names_of_captured_variables(def_id); let mut upvars_size = Size::ZERO; diff --git a/compiler/rustc_ty_utils/src/lib.rs b/compiler/rustc_ty_utils/src/lib.rs index 143b7d538801b..f79b6d44bfdfd 100644 --- a/compiler/rustc_ty_utils/src/lib.rs +++ b/compiler/rustc_ty_utils/src/lib.rs @@ -6,7 +6,7 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(assert_matches)] @@ -14,7 +14,6 @@ #![feature(box_patterns)] #![feature(if_let_guard)] #![feature(iterator_try_collect)] -#![feature(let_chains)] #![feature(never_type)] #![feature(rustdoc_internals)] // tidy-alphabetical-end @@ -30,6 +29,7 @@ mod implied_bounds; mod instance; mod layout; mod needs_drop; +mod nested_bodies; mod opaque_types; mod representability; pub mod sig_types; @@ -51,4 +51,5 @@ pub fn provide(providers: &mut Providers) { ty::provide(providers); instance::provide(providers); structural_match::provide(providers); + nested_bodies::provide(providers); } diff --git a/compiler/rustc_ty_utils/src/needs_drop.rs b/compiler/rustc_ty_utils/src/needs_drop.rs index 52955ec59a4a0..c3b04c20f4b67 100644 --- a/compiler/rustc_ty_utils/src/needs_drop.rs +++ b/compiler/rustc_ty_utils/src/needs_drop.rs @@ -42,11 +42,11 @@ fn needs_async_drop_raw<'tcx>( let adt_has_async_dtor = |adt_def: ty::AdtDef<'tcx>| adt_def.async_destructor(tcx).map(|_| DtorType::Significant); let res = drop_tys_helper(tcx, query.value, query.typing_env, adt_has_async_dtor, false, false) - .filter(filter_array_elements(tcx, query.typing_env)) + .filter(filter_array_elements_async(tcx, query.typing_env)) .next() .is_some(); - debug!("needs_drop_raw({:?}) = {:?}", query, res); + debug!("needs_async_drop_raw({:?}) = {:?}", query, res); res } @@ -66,6 +66,18 @@ fn filter_array_elements<'tcx>( Err(AlwaysRequiresDrop) => true, } } +fn filter_array_elements_async<'tcx>( + tcx: TyCtxt<'tcx>, + typing_env: ty::TypingEnv<'tcx>, +) -> impl Fn(&Result, AlwaysRequiresDrop>) -> bool { + move |ty| match ty { + Ok(ty) => match *ty.kind() { + ty::Array(elem, _) => tcx.needs_async_drop_raw(typing_env.as_query_input(elem)), + _ => true, + }, + Err(AlwaysRequiresDrop) => true, + } +} fn has_significant_drop_raw<'tcx>( tcx: TyCtxt<'tcx>, @@ -414,6 +426,27 @@ fn adt_drop_tys<'tcx>( .collect::, _>>() .map(|components| tcx.mk_type_list(&components)) } + +fn adt_async_drop_tys<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, +) -> Result<&'tcx ty::List>, AlwaysRequiresDrop> { + // This is for the "adt_async_drop_tys" query, that considers all `AsyncDrop` impls. + let adt_has_dtor = + |adt_def: ty::AdtDef<'tcx>| adt_def.async_destructor(tcx).map(|_| DtorType::Significant); + // `tcx.type_of(def_id)` identical to `tcx.make_adt(def, identity_args)` + drop_tys_helper( + tcx, + tcx.type_of(def_id).instantiate_identity(), + ty::TypingEnv::non_body_analysis(tcx, def_id), + adt_has_dtor, + false, + false, + ) + .collect::, _>>() + .map(|components| tcx.mk_type_list(&components)) +} + // If `def_id` refers to a generic ADT, the queries above and below act as if they had been handed // a `tcx.make_ty(def, identity_args)` and as such it is legal to instantiate the generic parameters // of the ADT into the outputted `ty`s. @@ -458,6 +491,7 @@ pub(crate) fn provide(providers: &mut Providers) { needs_async_drop_raw, has_significant_drop_raw, adt_drop_tys, + adt_async_drop_tys, adt_significant_drop_tys, list_significant_drop_tys, ..*providers diff --git a/compiler/rustc_ty_utils/src/nested_bodies.rs b/compiler/rustc_ty_utils/src/nested_bodies.rs new file mode 100644 index 0000000000000..7c74d8eb63518 --- /dev/null +++ b/compiler/rustc_ty_utils/src/nested_bodies.rs @@ -0,0 +1,34 @@ +use rustc_hir as hir; +use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_hir::intravisit::Visitor; +use rustc_middle::query::Providers; +use rustc_middle::ty::{self, TyCtxt}; + +fn nested_bodies_within<'tcx>(tcx: TyCtxt<'tcx>, item: LocalDefId) -> &'tcx ty::List { + let body = tcx.hir_body_owned_by(item); + let mut collector = + NestedBodiesVisitor { tcx, root_def_id: item.to_def_id(), nested_bodies: vec![] }; + collector.visit_body(body); + tcx.mk_local_def_ids(&collector.nested_bodies) +} + +struct NestedBodiesVisitor<'tcx> { + tcx: TyCtxt<'tcx>, + root_def_id: DefId, + nested_bodies: Vec, +} + +impl<'tcx> Visitor<'tcx> for NestedBodiesVisitor<'tcx> { + fn visit_nested_body(&mut self, id: hir::BodyId) { + let body_def_id = self.tcx.hir_body_owner_def_id(id); + if self.tcx.typeck_root_def_id(body_def_id.to_def_id()) == self.root_def_id { + self.nested_bodies.push(body_def_id); + let body = self.tcx.hir_body(id); + self.visit_body(body); + } + } +} + +pub(super) fn provide(providers: &mut Providers) { + *providers = Providers { nested_bodies_within, ..*providers }; +} diff --git a/compiler/rustc_ty_utils/src/opaque_types.rs b/compiler/rustc_ty_utils/src/opaque_types.rs index 3aad97d86cca1..841f602d985cd 100644 --- a/compiler/rustc_ty_utils/src/opaque_types.rs +++ b/compiler/rustc_ty_utils/src/opaque_types.rs @@ -3,10 +3,10 @@ use rustc_hir::def::DefKind; use rustc_hir::def_id::LocalDefId; use rustc_hir::intravisit; use rustc_hir::intravisit::Visitor; -use rustc_middle::bug; use rustc_middle::query::Providers; use rustc_middle::ty::util::{CheckRegions, NotUniqueParam}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor}; +use rustc_middle::{bug, span_bug}; use rustc_span::Span; use tracing::{instrument, trace}; @@ -223,7 +223,7 @@ impl<'tcx> TypeVisitor> for OpaqueTypeCollector<'tcx> { } // Skips type aliases, as they are meant to be transparent. // FIXME(type_alias_impl_trait): can we require mentioning nested type aliases explicitly? - ty::Alias(ty::Weak, alias_ty) if alias_ty.def_id.is_local() => { + ty::Alias(ty::Free, alias_ty) if alias_ty.def_id.is_local() => { self.tcx .type_of(alias_ty.def_id) .instantiate(self.tcx, alias_ty.args) @@ -320,9 +320,12 @@ fn opaque_types_defined_by<'tcx>( | DefKind::AnonConst => { collector.collect_taits_declared_in_body(); } + // Closures and coroutines are type checked with their parent + DefKind::Closure | DefKind::InlineConst => { + collector.opaques.extend(tcx.opaque_types_defined_by(tcx.local_parent(item))); + } + DefKind::AssocTy | DefKind::TyAlias | DefKind::GlobalAsm => {} DefKind::OpaqueTy - | DefKind::TyAlias - | DefKind::AssocTy | DefKind::Mod | DefKind::Struct | DefKind::Union @@ -340,12 +343,13 @@ fn opaque_types_defined_by<'tcx>( | DefKind::ForeignMod | DefKind::Field | DefKind::LifetimeParam - | DefKind::GlobalAsm | DefKind::Impl { .. } - | DefKind::SyntheticCoroutineBody => {} - // Closures and coroutines are type checked with their parent - DefKind::Closure | DefKind::InlineConst => { - collector.opaques.extend(tcx.opaque_types_defined_by(tcx.local_parent(item))); + | DefKind::SyntheticCoroutineBody => { + span_bug!( + tcx.def_span(item), + "`opaque_types_defined_by` not defined for {} `{item:?}`", + kind.descr(item.to_def_id()) + ); } } tcx.mk_local_def_ids(&collector.opaques) diff --git a/compiler/rustc_ty_utils/src/sig_types.rs b/compiler/rustc_ty_utils/src/sig_types.rs index 5bb96f90029ae..dc6009116ac57 100644 --- a/compiler/rustc_ty_utils/src/sig_types.rs +++ b/compiler/rustc_ty_utils/src/sig_types.rs @@ -116,7 +116,7 @@ pub fn walk_types<'tcx, V: SpannedTypeVisitor<'tcx>>( "{kind:?} has not seen any uses of `walk_types` yet, ping oli-obk if you'd like any help" ) } - // These don't have any types. + // These don't have any types, but are visited during privacy checking. | DefKind::ExternCrate | DefKind::ForeignMod | DefKind::ForeignTy diff --git a/compiler/rustc_ty_utils/src/ty.rs b/compiler/rustc_ty_utils/src/ty.rs index 9dc4f11e456e2..0c49ddff39bc2 100644 --- a/compiler/rustc_ty_utils/src/ty.rs +++ b/compiler/rustc_ty_utils/src/ty.rs @@ -3,10 +3,12 @@ use rustc_hir as hir; use rustc_hir::LangItem; use rustc_hir::def::DefKind; use rustc_index::bit_set::DenseBitSet; +use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::bug; use rustc_middle::query::Providers; use rustc_middle::ty::{ - self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor, Upcast, fold_regions, + self, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitor, Upcast, + fold_regions, }; use rustc_span::DUMMY_SP; use rustc_span::def_id::{CRATE_DEF_ID, DefId, LocalDefId}; @@ -184,7 +186,7 @@ struct ImplTraitInTraitFinder<'a, 'tcx> { } impl<'tcx> TypeVisitor> for ImplTraitInTraitFinder<'_, 'tcx> { - fn visit_binder>>(&mut self, binder: &ty::Binder<'tcx, T>) { + fn visit_binder>>(&mut self, binder: &ty::Binder<'tcx, T>) { self.depth.shift_in(1); binder.super_visit_with(self); self.depth.shift_out(1); @@ -312,6 +314,61 @@ fn unsizing_params_for_adt<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> DenseBitSe unsizing_params } +fn impl_self_is_guaranteed_unsized<'tcx>(tcx: TyCtxt<'tcx>, impl_def_id: DefId) -> bool { + debug_assert_eq!(tcx.def_kind(impl_def_id), DefKind::Impl { of_trait: true }); + + let infcx = tcx.infer_ctxt().ignoring_regions().build(ty::TypingMode::non_body_analysis()); + + let ocx = traits::ObligationCtxt::new_with_diagnostics(&infcx); + let cause = traits::ObligationCause::dummy(); + let param_env = tcx.param_env(impl_def_id); + + let tail = tcx.struct_tail_raw( + tcx.type_of(impl_def_id).instantiate_identity(), + |ty| { + ocx.structurally_normalize_ty(&cause, param_env, ty).unwrap_or_else(|_| { + Ty::new_error_with_message( + tcx, + tcx.def_span(impl_def_id), + "struct tail should be computable", + ) + }) + }, + || (), + ); + + match tail.kind() { + ty::Dynamic(_, _, ty::Dyn) | ty::Slice(_) | ty::Str => true, + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Adt(_, _) + | ty::Foreign(_) + | ty::Array(_, _) + | ty::Pat(_, _) + | ty::RawPtr(_, _) + | ty::Ref(_, _, _) + | ty::FnDef(_, _) + | ty::FnPtr(_, _) + | ty::UnsafeBinder(_) + | ty::Closure(_, _) + | ty::CoroutineClosure(_, _) + | ty::Coroutine(_, _) + | ty::CoroutineWitness(_, _) + | ty::Never + | ty::Tuple(_) + | ty::Alias(_, _) + | ty::Param(_) + | ty::Bound(_, _) + | ty::Placeholder(_) + | ty::Infer(_) + | ty::Error(_) + | ty::Dynamic(_, _, ty::DynStar) => false, + } +} + pub(crate) fn provide(providers: &mut Providers) { *providers = Providers { asyncness, @@ -320,6 +377,7 @@ pub(crate) fn provide(providers: &mut Providers) { param_env_normalized_for_post_analysis, defaultness, unsizing_params_for_adt, + impl_self_is_guaranteed_unsized, ..*providers }; } diff --git a/compiler/rustc_type_ir/Cargo.toml b/compiler/rustc_type_ir/Cargo.toml index 4adf715792666..83d3d78298e60 100644 --- a/compiler/rustc_type_ir/Cargo.toml +++ b/compiler/rustc_type_ir/Cargo.toml @@ -7,6 +7,7 @@ edition = "2024" # tidy-alphabetical-start bitflags = "2.4.1" derive-where = "1.2.7" +ena = "0.14.3" indexmap = "2.0.0" rustc-hash = "1.1.0" rustc_ast_ir = { path = "../rustc_ast_ir", default-features = false } diff --git a/compiler/rustc_type_ir/src/binder.rs b/compiler/rustc_type_ir/src/binder.rs index e9055940310d0..000cf1e1fd8b1 100644 --- a/compiler/rustc_type_ir/src/binder.rs +++ b/compiler/rustc_type_ir/src/binder.rs @@ -122,9 +122,13 @@ impl> TypeFoldable for Binder { fn try_fold_with>(self, folder: &mut F) -> Result { folder.try_fold_binder(self) } + + fn fold_with>(self, folder: &mut F) -> Self { + folder.fold_binder(self) + } } -impl> TypeVisitable for Binder { +impl> TypeVisitable for Binder { fn visit_with>(&self, visitor: &mut V) -> V::Result { visitor.visit_binder(self) } @@ -135,11 +139,15 @@ impl> TypeSuperFoldable for Binder { self, folder: &mut F, ) -> Result { - self.try_map_bound(|ty| ty.try_fold_with(folder)) + self.try_map_bound(|t| t.try_fold_with(folder)) + } + + fn super_fold_with>(self, folder: &mut F) -> Self { + self.map_bound(|t| t.fold_with(folder)) } } -impl> TypeSuperVisitable for Binder { +impl> TypeSuperVisitable for Binder { fn super_visit_with>(&self, visitor: &mut V) -> V::Result { self.as_ref().skip_binder().visit_with(visitor) } @@ -284,7 +292,7 @@ impl ValidateBoundVars { impl TypeVisitor for ValidateBoundVars { type Result = ControlFlow<()>; - fn visit_binder>(&mut self, t: &Binder) -> Self::Result { + fn visit_binder>(&mut self, t: &Binder) -> Self::Result { self.binder_index.shift_in(1); let result = t.super_visit_with(self); self.binder_index.shift_out(1); diff --git a/compiler/rustc_type_ir/src/canonical.rs b/compiler/rustc_type_ir/src/canonical.rs index 03d3194f1065d..67b67df4b2817 100644 --- a/compiler/rustc_type_ir/src/canonical.rs +++ b/compiler/rustc_type_ir/src/canonical.rs @@ -34,7 +34,6 @@ pub struct CanonicalQueryInput { #[derive_where(Eq; I: Interner, V: Eq)] #[derive_where(Debug; I: Interner, V: fmt::Debug)] #[derive_where(Copy; I: Interner, V: Copy)] -#[derive(TypeVisitable_Generic, TypeFoldable_Generic)] #[cfg_attr( feature = "nightly", derive(Encodable_NoContext, Decodable_NoContext, HashStable_NoContext) @@ -147,7 +146,6 @@ impl CanonicalVarInfo { /// in the type-theory sense of the term -- i.e., a "meta" type system /// that analyzes type-like values. #[derive_where(Clone, Copy, Hash, PartialEq, Eq, Debug; I: Interner)] -#[derive(TypeVisitable_Generic, TypeFoldable_Generic)] #[cfg_attr( feature = "nightly", derive(Decodable_NoContext, Encodable_NoContext, HashStable_NoContext) diff --git a/compiler/rustc_type_ir/src/data_structures/mod.rs b/compiler/rustc_type_ir/src/data_structures/mod.rs index 30c67d10d0e86..a72669cbd189b 100644 --- a/compiler/rustc_type_ir/src/data_structures/mod.rs +++ b/compiler/rustc_type_ir/src/data_structures/mod.rs @@ -1,5 +1,6 @@ use std::hash::BuildHasherDefault; +pub use ena::unify::{NoError, UnifyKey, UnifyValue}; use rustc_hash::FxHasher; pub use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; diff --git a/compiler/rustc_type_ir/src/flags.rs b/compiler/rustc_type_ir/src/flags.rs index 6a2498242feeb..7ed0f92b6398a 100644 --- a/compiler/rustc_type_ir/src/flags.rs +++ b/compiler/rustc_type_ir/src/flags.rs @@ -1,3 +1,7 @@ +use crate::inherent::*; +use crate::visit::Flags; +use crate::{self as ty, Interner}; + bitflags::bitflags! { /// Flags that we track on types. These flags are propagated upwards /// through the type during type construction, so that we can quickly check @@ -69,8 +73,8 @@ bitflags::bitflags! { /// Does this have `Projection`? const HAS_TY_PROJECTION = 1 << 10; - /// Does this have `Weak`? - const HAS_TY_WEAK = 1 << 11; + /// Does this have `Free` aliases? + const HAS_TY_FREE_ALIAS = 1 << 11; /// Does this have `Opaque`? const HAS_TY_OPAQUE = 1 << 12; /// Does this have `Inherent`? @@ -82,7 +86,7 @@ bitflags::bitflags! { /// /// Rephrased, could this term be normalized further? const HAS_ALIAS = TypeFlags::HAS_TY_PROJECTION.bits() - | TypeFlags::HAS_TY_WEAK.bits() + | TypeFlags::HAS_TY_FREE_ALIAS.bits() | TypeFlags::HAS_TY_OPAQUE.bits() | TypeFlags::HAS_TY_INHERENT.bits() | TypeFlags::HAS_CT_PROJECTION.bits(); @@ -128,3 +132,366 @@ bitflags::bitflags! { const HAS_BINDER_VARS = 1 << 23; } } + +#[derive(Debug)] +pub struct FlagComputation { + pub flags: TypeFlags, + + /// see `Ty::outer_exclusive_binder` for details + pub outer_exclusive_binder: ty::DebruijnIndex, + + interner: std::marker::PhantomData, +} + +impl FlagComputation { + fn new() -> FlagComputation { + FlagComputation { + flags: TypeFlags::empty(), + outer_exclusive_binder: ty::INNERMOST, + interner: std::marker::PhantomData, + } + } + + #[allow(rustc::usage_of_ty_tykind)] + pub fn for_kind(kind: &ty::TyKind) -> FlagComputation { + let mut result = FlagComputation::new(); + result.add_kind(kind); + result + } + + pub fn for_predicate(binder: ty::Binder>) -> FlagComputation { + let mut result = FlagComputation::new(); + result.add_predicate(binder); + result + } + + pub fn for_const_kind(kind: &ty::ConstKind) -> FlagComputation { + let mut result = FlagComputation::new(); + result.add_const_kind(kind); + result + } + + pub fn for_clauses(clauses: &[I::Clause]) -> FlagComputation { + let mut result = FlagComputation::new(); + for c in clauses { + result.add_flags(c.as_predicate().flags()); + result.add_exclusive_binder(c.as_predicate().outer_exclusive_binder()); + } + result + } + + fn add_flags(&mut self, flags: TypeFlags) { + self.flags = self.flags | flags; + } + + /// indicates that `self` refers to something at binding level `binder` + fn add_bound_var(&mut self, binder: ty::DebruijnIndex) { + let exclusive_binder = binder.shifted_in(1); + self.add_exclusive_binder(exclusive_binder); + } + + /// indicates that `self` refers to something *inside* binding + /// level `binder` -- not bound by `binder`, but bound by the next + /// binder internal to it + fn add_exclusive_binder(&mut self, exclusive_binder: ty::DebruijnIndex) { + self.outer_exclusive_binder = self.outer_exclusive_binder.max(exclusive_binder); + } + + /// Adds the flags/depth from a set of types that appear within the current type, but within a + /// region binder. + fn bound_computation(&mut self, value: ty::Binder, f: F) + where + F: FnOnce(&mut Self, T), + { + let mut computation = FlagComputation::new(); + + if !value.bound_vars().is_empty() { + computation.add_flags(TypeFlags::HAS_BINDER_VARS); + } + + f(&mut computation, value.skip_binder()); + + self.add_flags(computation.flags); + + // The types that contributed to `computation` occurred within + // a region binder, so subtract one from the region depth + // within when adding the depth to `self`. + let outer_exclusive_binder = computation.outer_exclusive_binder; + if outer_exclusive_binder > ty::INNERMOST { + self.add_exclusive_binder(outer_exclusive_binder.shifted_out(1)); + } // otherwise, this binder captures nothing + } + + #[allow(rustc::usage_of_ty_tykind)] + fn add_kind(&mut self, kind: &ty::TyKind) { + match *kind { + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Float(_) + | ty::Uint(_) + | ty::Never + | ty::Str + | ty::Foreign(..) => {} + + ty::Error(_) => self.add_flags(TypeFlags::HAS_ERROR), + + ty::Param(_) => { + self.add_flags(TypeFlags::HAS_TY_PARAM); + } + + ty::Closure(_, args) + | ty::Coroutine(_, args) + | ty::CoroutineClosure(_, args) + | ty::CoroutineWitness(_, args) => { + self.add_args(args.as_slice()); + } + + ty::Bound(debruijn, _) => { + self.add_bound_var(debruijn); + self.add_flags(TypeFlags::HAS_TY_BOUND); + } + + ty::Placeholder(..) => { + self.add_flags(TypeFlags::HAS_TY_PLACEHOLDER); + } + + ty::Infer(infer) => match infer { + ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_) => { + self.add_flags(TypeFlags::HAS_TY_FRESH) + } + + ty::TyVar(_) | ty::IntVar(_) | ty::FloatVar(_) => { + self.add_flags(TypeFlags::HAS_TY_INFER) + } + }, + + ty::Adt(_, args) => { + self.add_args(args.as_slice()); + } + + ty::Alias(kind, data) => { + self.add_flags(match kind { + ty::Projection => TypeFlags::HAS_TY_PROJECTION, + ty::Free => TypeFlags::HAS_TY_FREE_ALIAS, + ty::Opaque => TypeFlags::HAS_TY_OPAQUE, + ty::Inherent => TypeFlags::HAS_TY_INHERENT, + }); + + self.add_alias_ty(data); + } + + ty::Dynamic(obj, r, _) => { + for predicate in obj.iter() { + self.bound_computation(predicate, |computation, predicate| match predicate { + ty::ExistentialPredicate::Trait(tr) => { + computation.add_args(tr.args.as_slice()) + } + ty::ExistentialPredicate::Projection(p) => { + computation.add_existential_projection(&p); + } + ty::ExistentialPredicate::AutoTrait(_) => {} + }); + } + + self.add_region(r); + } + + ty::Array(tt, len) => { + self.add_ty(tt); + self.add_const(len); + } + + ty::Pat(ty, pat) => { + self.add_ty(ty); + self.add_ty_pat(pat); + } + + ty::Slice(tt) => self.add_ty(tt), + + ty::RawPtr(ty, _) => { + self.add_ty(ty); + } + + ty::Ref(r, ty, _) => { + self.add_region(r); + self.add_ty(ty); + } + + ty::Tuple(types) => { + self.add_tys(types); + } + + ty::FnDef(_, args) => { + self.add_args(args.as_slice()); + } + + ty::FnPtr(sig_tys, _) => self.bound_computation(sig_tys, |computation, sig_tys| { + computation.add_tys(sig_tys.inputs_and_output); + }), + + ty::UnsafeBinder(bound_ty) => { + self.bound_computation(bound_ty.into(), |computation, ty| { + computation.add_ty(ty); + }) + } + } + } + + fn add_ty_pat(&mut self, pat: ::Pat) { + self.add_flags(pat.flags()); + } + + fn add_predicate(&mut self, binder: ty::Binder>) { + self.bound_computation(binder, |computation, atom| computation.add_predicate_atom(atom)); + } + + fn add_predicate_atom(&mut self, atom: ty::PredicateKind) { + match atom { + ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred)) => { + self.add_args(trait_pred.trait_ref.args.as_slice()); + } + ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(ty::HostEffectPredicate { + trait_ref, + constness: _, + })) => { + self.add_args(trait_ref.args.as_slice()); + } + ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate( + a, + b, + ))) => { + self.add_region(a); + self.add_region(b); + } + ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate( + ty, + region, + ))) => { + self.add_ty(ty); + self.add_region(region); + } + ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => { + self.add_const(ct); + self.add_ty(ty); + } + ty::PredicateKind::Subtype(ty::SubtypePredicate { a_is_expected: _, a, b }) => { + self.add_ty(a); + self.add_ty(b); + } + ty::PredicateKind::Coerce(ty::CoercePredicate { a, b }) => { + self.add_ty(a); + self.add_ty(b); + } + ty::PredicateKind::Clause(ty::ClauseKind::Projection(ty::ProjectionPredicate { + projection_term, + term, + })) => { + self.add_alias_term(projection_term); + self.add_term(term); + } + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(term)) => { + self.add_term(term); + } + ty::PredicateKind::DynCompatible(_def_id) => {} + ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(uv)) => { + self.add_const(uv); + } + ty::PredicateKind::ConstEquate(expected, found) => { + self.add_const(expected); + self.add_const(found); + } + ty::PredicateKind::Ambiguous => {} + ty::PredicateKind::NormalizesTo(ty::NormalizesTo { alias, term }) => { + self.add_alias_term(alias); + self.add_term(term); + } + ty::PredicateKind::AliasRelate(t1, t2, _) => { + self.add_term(t1); + self.add_term(t2); + } + } + } + + fn add_ty(&mut self, ty: I::Ty) { + self.add_flags(ty.flags()); + self.add_exclusive_binder(ty.outer_exclusive_binder()); + } + + fn add_tys(&mut self, tys: I::Tys) { + for ty in tys.iter() { + self.add_ty(ty); + } + } + + fn add_region(&mut self, r: I::Region) { + self.add_flags(r.flags()); + if let ty::ReBound(debruijn, _) = r.kind() { + self.add_bound_var(debruijn); + } + } + + fn add_const(&mut self, c: I::Const) { + self.add_flags(c.flags()); + self.add_exclusive_binder(c.outer_exclusive_binder()); + } + + fn add_const_kind(&mut self, c: &ty::ConstKind) { + match *c { + ty::ConstKind::Unevaluated(uv) => { + self.add_args(uv.args.as_slice()); + self.add_flags(TypeFlags::HAS_CT_PROJECTION); + } + ty::ConstKind::Infer(infer) => match infer { + ty::InferConst::Fresh(_) => self.add_flags(TypeFlags::HAS_CT_FRESH), + ty::InferConst::Var(_) => self.add_flags(TypeFlags::HAS_CT_INFER), + }, + ty::ConstKind::Bound(debruijn, _) => { + self.add_bound_var(debruijn); + self.add_flags(TypeFlags::HAS_CT_BOUND); + } + ty::ConstKind::Param(_) => { + self.add_flags(TypeFlags::HAS_CT_PARAM); + } + ty::ConstKind::Placeholder(_) => { + self.add_flags(TypeFlags::HAS_CT_PLACEHOLDER); + } + ty::ConstKind::Value(cv) => self.add_ty(cv.ty()), + ty::ConstKind::Expr(e) => self.add_args(e.args().as_slice()), + ty::ConstKind::Error(_) => self.add_flags(TypeFlags::HAS_ERROR), + } + } + + fn add_existential_projection(&mut self, projection: &ty::ExistentialProjection) { + self.add_args(projection.args.as_slice()); + match projection.term.kind() { + ty::TermKind::Ty(ty) => self.add_ty(ty), + ty::TermKind::Const(ct) => self.add_const(ct), + } + } + + fn add_alias_ty(&mut self, alias_ty: ty::AliasTy) { + self.add_args(alias_ty.args.as_slice()); + } + + fn add_alias_term(&mut self, alias_term: ty::AliasTerm) { + self.add_args(alias_term.args.as_slice()); + } + + fn add_args(&mut self, args: &[I::GenericArg]) { + for kind in args { + match kind.kind() { + ty::GenericArgKind::Type(ty) => self.add_ty(ty), + ty::GenericArgKind::Lifetime(lt) => self.add_region(lt), + ty::GenericArgKind::Const(ct) => self.add_const(ct), + } + } + } + + fn add_term(&mut self, term: I::Term) { + match term.kind() { + ty::TermKind::Ty(ty) => self.add_ty(ty), + ty::TermKind::Const(ct) => self.add_const(ct), + } + } +} diff --git a/compiler/rustc_type_ir/src/fold.rs b/compiler/rustc_type_ir/src/fold.rs index 0bc8b94bbf4cc..ce1188070ca7d 100644 --- a/compiler/rustc_type_ir/src/fold.rs +++ b/compiler/rustc_type_ir/src/fold.rs @@ -45,6 +45,7 @@ //! - u.fold_with(folder) //! ``` +use std::convert::Infallible; use std::mem; use std::sync::Arc; @@ -54,13 +55,7 @@ use tracing::{debug, instrument}; use crate::inherent::*; use crate::visit::{TypeVisitable, TypeVisitableExt as _}; -use crate::{self as ty, Interner}; - -#[cfg(feature = "nightly")] -type Never = !; - -#[cfg(not(feature = "nightly"))] -type Never = std::convert::Infallible; +use crate::{self as ty, Interner, TypeFlags}; /// This trait is implemented for every type that can be folded, /// providing the skeleton of the traversal. @@ -82,18 +77,24 @@ pub trait TypeFoldable: TypeVisitable + Clone { /// /// For types of interest (such as `Ty`), the implementation of this method /// calls a folder method specifically for that type (such as - /// `F::try_fold_ty`). This is where control transfers from `TypeFoldable` - /// to `TypeFolder`. + /// `F::try_fold_ty`). This is where control transfers from [`TypeFoldable`] + /// to [`FallibleTypeFolder`]. fn try_fold_with>(self, folder: &mut F) -> Result; - /// A convenient alternative to `try_fold_with` for use with infallible - /// folders. Do not override this method, to ensure coherence with - /// `try_fold_with`. - fn fold_with>(self, folder: &mut F) -> Self { - match self.try_fold_with(folder) { - Ok(t) => t, - } - } + /// The entry point for folding. To fold a value `t` with a folder `f` + /// call: `t.fold_with(f)`. + /// + /// For most types, this just traverses the value, calling `fold_with` + /// on each field/element. + /// + /// For types of interest (such as `Ty`), the implementation of this method + /// calls a folder method specifically for that type (such as + /// `F::fold_ty`). This is where control transfers from `TypeFoldable` + /// to `TypeFolder`. + /// + /// Same as [`TypeFoldable::try_fold_with`], but not fallible. Make sure to keep + /// the behavior in sync across functions. + fn fold_with>(self, folder: &mut F) -> Self; } // This trait is implemented for types of interest. @@ -112,11 +113,7 @@ pub trait TypeSuperFoldable: TypeFoldable { /// A convenient alternative to `try_super_fold_with` for use with /// infallible folders. Do not override this method, to ensure coherence /// with `try_super_fold_with`. - fn super_fold_with>(self, folder: &mut F) -> Self { - match self.try_super_fold_with(folder) { - Ok(t) => t, - } - } + fn super_fold_with>(self, folder: &mut F) -> Self; } /// This trait is implemented for every infallible folding traversal. There is @@ -128,7 +125,7 @@ pub trait TypeSuperFoldable: TypeFoldable { /// A blanket implementation of [`FallibleTypeFolder`] will defer to /// the infallible methods of this trait to ensure that the two APIs /// are coherent. -pub trait TypeFolder: FallibleTypeFolder { +pub trait TypeFolder: Sized { fn cx(&self) -> I; fn fold_binder(&mut self, t: ty::Binder) -> ty::Binder @@ -195,42 +192,6 @@ pub trait FallibleTypeFolder: Sized { } } -// This blanket implementation of the fallible trait for infallible folders -// delegates to infallible methods to ensure coherence. -impl FallibleTypeFolder for F -where - F: TypeFolder, -{ - type Error = Never; - - fn cx(&self) -> I { - TypeFolder::cx(self) - } - - fn try_fold_binder(&mut self, t: ty::Binder) -> Result, Never> - where - T: TypeFoldable, - { - Ok(self.fold_binder(t)) - } - - fn try_fold_ty(&mut self, t: I::Ty) -> Result { - Ok(self.fold_ty(t)) - } - - fn try_fold_region(&mut self, r: I::Region) -> Result { - Ok(self.fold_region(r)) - } - - fn try_fold_const(&mut self, c: I::Const) -> Result { - Ok(self.fold_const(c)) - } - - fn try_fold_predicate(&mut self, p: I::Predicate) -> Result { - Ok(self.fold_predicate(p)) - } -} - /////////////////////////////////////////////////////////////////////////// // Traversal implementations. @@ -238,6 +199,10 @@ impl, U: TypeFoldable> TypeFoldable for (T fn try_fold_with>(self, folder: &mut F) -> Result<(T, U), F::Error> { Ok((self.0.try_fold_with(folder)?, self.1.try_fold_with(folder)?)) } + + fn fold_with>(self, folder: &mut F) -> Self { + (self.0.fold_with(folder), self.1.fold_with(folder)) + } } impl, B: TypeFoldable, C: TypeFoldable> TypeFoldable @@ -253,6 +218,10 @@ impl, B: TypeFoldable, C: TypeFoldable> Ty self.2.try_fold_with(folder)?, )) } + + fn fold_with>(self, folder: &mut F) -> Self { + (self.0.fold_with(folder), self.1.fold_with(folder), self.2.fold_with(folder)) + } } impl> TypeFoldable for Option { @@ -262,6 +231,10 @@ impl> TypeFoldable for Option { None => None, }) } + + fn fold_with>(self, folder: &mut F) -> Self { + Some(self?.fold_with(folder)) + } } impl, E: TypeFoldable> TypeFoldable for Result { @@ -271,41 +244,61 @@ impl, E: TypeFoldable> TypeFoldable for Re Err(e) => Err(e.try_fold_with(folder)?), }) } + + fn fold_with>(self, folder: &mut F) -> Self { + match self { + Ok(v) => Ok(v.fold_with(folder)), + Err(e) => Err(e.fold_with(folder)), + } + } +} + +fn fold_arc( + mut arc: Arc, + fold: impl FnOnce(T) -> Result, +) -> Result, E> { + // We merely want to replace the contained `T`, if at all possible, + // so that we don't needlessly allocate a new `Arc` or indeed clone + // the contained type. + unsafe { + // First step is to ensure that we have a unique reference to + // the contained type, which `Arc::make_mut` will accomplish (by + // allocating a new `Arc` and cloning the `T` only if required). + // This is done *before* casting to `Arc>` so that + // panicking during `make_mut` does not leak the `T`. + Arc::make_mut(&mut arc); + + // Casting to `Arc>` is safe because `ManuallyDrop` + // is `repr(transparent)`. + let ptr = Arc::into_raw(arc).cast::>(); + let mut unique = Arc::from_raw(ptr); + + // Call to `Arc::make_mut` above guarantees that `unique` is the + // sole reference to the contained value, so we can avoid doing + // a checked `get_mut` here. + let slot = Arc::get_mut(&mut unique).unwrap_unchecked(); + + // Semantically move the contained type out from `unique`, fold + // it, then move the folded value back into `unique`. Should + // folding fail, `ManuallyDrop` ensures that the "moved-out" + // value is not re-dropped. + let owned = mem::ManuallyDrop::take(slot); + let folded = fold(owned)?; + *slot = mem::ManuallyDrop::new(folded); + + // Cast back to `Arc`. + Ok(Arc::from_raw(Arc::into_raw(unique).cast())) + } } impl> TypeFoldable for Arc { - fn try_fold_with>(mut self, folder: &mut F) -> Result { - // We merely want to replace the contained `T`, if at all possible, - // so that we don't needlessly allocate a new `Arc` or indeed clone - // the contained type. - unsafe { - // First step is to ensure that we have a unique reference to - // the contained type, which `Arc::make_mut` will accomplish (by - // allocating a new `Arc` and cloning the `T` only if required). - // This is done *before* casting to `Arc>` so that - // panicking during `make_mut` does not leak the `T`. - Arc::make_mut(&mut self); - - // Casting to `Arc>` is safe because `ManuallyDrop` - // is `repr(transparent)`. - let ptr = Arc::into_raw(self).cast::>(); - let mut unique = Arc::from_raw(ptr); - - // Call to `Arc::make_mut` above guarantees that `unique` is the - // sole reference to the contained value, so we can avoid doing - // a checked `get_mut` here. - let slot = Arc::get_mut(&mut unique).unwrap_unchecked(); - - // Semantically move the contained type out from `unique`, fold - // it, then move the folded value back into `unique`. Should - // folding fail, `ManuallyDrop` ensures that the "moved-out" - // value is not re-dropped. - let owned = mem::ManuallyDrop::take(slot); - let folded = owned.try_fold_with(folder)?; - *slot = mem::ManuallyDrop::new(folded); - - // Cast back to `Arc`. - Ok(Arc::from_raw(Arc::into_raw(unique).cast())) + fn try_fold_with>(self, folder: &mut F) -> Result { + fold_arc(self, |t| t.try_fold_with(folder)) + } + + fn fold_with>(self, folder: &mut F) -> Self { + match fold_arc::(self, |t| Ok(t.fold_with(folder))) { + Ok(t) => t, } } } @@ -315,30 +308,51 @@ impl> TypeFoldable for Box { *self = (*self).try_fold_with(folder)?; Ok(self) } + + fn fold_with>(mut self, folder: &mut F) -> Self { + *self = (*self).fold_with(folder); + self + } } impl> TypeFoldable for Vec { fn try_fold_with>(self, folder: &mut F) -> Result { self.into_iter().map(|t| t.try_fold_with(folder)).collect() } + + fn fold_with>(self, folder: &mut F) -> Self { + self.into_iter().map(|t| t.fold_with(folder)).collect() + } } impl> TypeFoldable for ThinVec { fn try_fold_with>(self, folder: &mut F) -> Result { self.into_iter().map(|t| t.try_fold_with(folder)).collect() } + + fn fold_with>(self, folder: &mut F) -> Self { + self.into_iter().map(|t| t.fold_with(folder)).collect() + } } impl> TypeFoldable for Box<[T]> { fn try_fold_with>(self, folder: &mut F) -> Result { Vec::from(self).try_fold_with(folder).map(Vec::into_boxed_slice) } + + fn fold_with>(self, folder: &mut F) -> Self { + Vec::into_boxed_slice(Vec::from(self).fold_with(folder)) + } } impl, Ix: Idx> TypeFoldable for IndexVec { fn try_fold_with>(self, folder: &mut F) -> Result { self.raw.try_fold_with(folder).map(IndexVec::from_raw) } + + fn fold_with>(self, folder: &mut F) -> Self { + IndexVec::from_raw(self.raw.fold_with(folder)) + } } /////////////////////////////////////////////////////////////////////////// @@ -438,12 +452,12 @@ where pub fn fold_regions( cx: I, value: T, - mut f: impl FnMut(I::Region, ty::DebruijnIndex) -> I::Region, + f: impl FnMut(I::Region, ty::DebruijnIndex) -> I::Region, ) -> T where T: TypeFoldable, { - value.fold_with(&mut RegionFolder::new(cx, &mut f)) + value.fold_with(&mut RegionFolder::new(cx, f)) } /// Folds over the substructure of a type, visiting its component @@ -453,7 +467,7 @@ where /// new bound regions which are not visited by this visitors as /// they are not free; only regions that occur free will be /// visited by `fld_r`. -pub struct RegionFolder<'a, I: Interner> { +pub struct RegionFolder { cx: I, /// Stores the index of a binder *just outside* the stuff we have @@ -464,20 +478,21 @@ pub struct RegionFolder<'a, I: Interner> { /// Callback invokes for each free region. The `DebruijnIndex` /// points to the binder *just outside* the ones we have passed /// through. - fold_region_fn: &'a mut (dyn FnMut(I::Region, ty::DebruijnIndex) -> I::Region + 'a), + fold_region_fn: F, } -impl<'a, I: Interner> RegionFolder<'a, I> { +impl RegionFolder { #[inline] - pub fn new( - cx: I, - fold_region_fn: &'a mut dyn FnMut(I::Region, ty::DebruijnIndex) -> I::Region, - ) -> RegionFolder<'a, I> { + pub fn new(cx: I, fold_region_fn: F) -> RegionFolder { RegionFolder { cx, current_index: ty::INNERMOST, fold_region_fn } } } -impl<'a, I: Interner> TypeFolder for RegionFolder<'a, I> { +impl TypeFolder for RegionFolder +where + I: Interner, + F: FnMut(I::Region, ty::DebruijnIndex) -> I::Region, +{ fn cx(&self) -> I { self.cx } @@ -502,4 +517,34 @@ impl<'a, I: Interner> TypeFolder for RegionFolder<'a, I> { } } } + + fn fold_ty(&mut self, t: I::Ty) -> I::Ty { + if t.has_type_flags( + TypeFlags::HAS_FREE_REGIONS | TypeFlags::HAS_RE_BOUND | TypeFlags::HAS_RE_ERASED, + ) { + t.super_fold_with(self) + } else { + t + } + } + + fn fold_const(&mut self, ct: I::Const) -> I::Const { + if ct.has_type_flags( + TypeFlags::HAS_FREE_REGIONS | TypeFlags::HAS_RE_BOUND | TypeFlags::HAS_RE_ERASED, + ) { + ct.super_fold_with(self) + } else { + ct + } + } + + fn fold_predicate(&mut self, p: I::Predicate) -> I::Predicate { + if p.has_type_flags( + TypeFlags::HAS_FREE_REGIONS | TypeFlags::HAS_RE_BOUND | TypeFlags::HAS_RE_ERASED, + ) { + p.super_fold_with(self) + } else { + p + } + } } diff --git a/compiler/rustc_type_ir/src/infer_ctxt.rs b/compiler/rustc_type_ir/src/infer_ctxt.rs index e512e8fc838f1..c149076211739 100644 --- a/compiler/rustc_type_ir/src/infer_ctxt.rs +++ b/compiler/rustc_type_ir/src/infer_ctxt.rs @@ -1,3 +1,5 @@ +use std::fmt::Debug; + use derive_where::derive_where; #[cfg(feature = "nightly")] use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_NoContext}; @@ -65,13 +67,21 @@ pub enum TypingMode { /// let x: <() as Assoc>::Output = true; /// } /// ``` - Analysis { defining_opaque_types: I::DefiningOpaqueTypes }, + Analysis { defining_opaque_types_and_generators: I::LocalDefIds }, + /// The behavior during MIR borrowck is identical to `TypingMode::Analysis` + /// except that the initial value for opaque types is the type computed during + /// HIR typeck with unique unconstrained region inference variables. + /// + /// This is currently only used with by the new solver as it results in new + /// non-universal defining uses of opaque types, which is a breaking change. + /// See tests/ui/impl-trait/non-defining-use/as-projection-term.rs. + Borrowck { defining_opaque_types: I::LocalDefIds }, /// Any analysis after borrowck for a given body should be able to use all the /// hidden types defined by borrowck, without being able to define any new ones. /// /// This is currently only used by the new solver, but should be implemented in /// the old solver as well. - PostBorrowckAnalysis { defined_opaque_types: I::DefiningOpaqueTypes }, + PostBorrowckAnalysis { defined_opaque_types: I::LocalDefIds }, /// After analysis, mostly during codegen and MIR optimizations, we're able to /// reveal all opaque types. As the concrete type should *never* be observable /// directly by the user, this should not be used by checks which may expose @@ -86,13 +96,29 @@ pub enum TypingMode { impl TypingMode { /// Analysis outside of a body does not define any opaque types. pub fn non_body_analysis() -> TypingMode { - TypingMode::Analysis { defining_opaque_types: Default::default() } + TypingMode::Analysis { defining_opaque_types_and_generators: Default::default() } + } + + pub fn typeck_for_body(cx: I, body_def_id: I::LocalDefId) -> TypingMode { + TypingMode::Analysis { + defining_opaque_types_and_generators: cx + .opaque_types_and_coroutines_defined_by(body_def_id), + } } /// While typechecking a body, we need to be able to define the opaque /// types defined by that body. + /// + /// FIXME: This will be removed because it's generally not correct to define + /// opaques outside of HIR typeck. pub fn analysis_in_body(cx: I, body_def_id: I::LocalDefId) -> TypingMode { - TypingMode::Analysis { defining_opaque_types: cx.opaque_types_defined_by(body_def_id) } + TypingMode::Analysis { + defining_opaque_types_and_generators: cx.opaque_types_defined_by(body_def_id), + } + } + + pub fn borrowck(cx: I, body_def_id: I::LocalDefId) -> TypingMode { + TypingMode::Borrowck { defining_opaque_types: cx.opaque_types_defined_by(body_def_id) } } pub fn post_borrowck_analysis(cx: I, body_def_id: I::LocalDefId) -> TypingMode { @@ -221,4 +247,32 @@ pub trait InferCtxtLike: Sized { r: ::Region, span: ::Span, ); + + type OpaqueTypeStorageEntries: Debug + Copy + Default; + fn opaque_types_storage_num_entries(&self) -> Self::OpaqueTypeStorageEntries; + fn clone_opaque_types_lookup_table( + &self, + ) -> Vec<(ty::OpaqueTypeKey, ::Ty)>; + fn clone_duplicate_opaque_types( + &self, + ) -> Vec<(ty::OpaqueTypeKey, ::Ty)>; + fn clone_opaque_types_added_since( + &self, + prev_entries: Self::OpaqueTypeStorageEntries, + ) -> Vec<(ty::OpaqueTypeKey, ::Ty)>; + + fn register_hidden_type_in_storage( + &self, + opaque_type_key: ty::OpaqueTypeKey, + hidden_ty: ::Ty, + span: ::Span, + ) -> Option<::Ty>; + fn add_duplicate_opaque_type( + &self, + opaque_type_key: ty::OpaqueTypeKey, + hidden_ty: ::Ty, + span: ::Span, + ); + + fn reset_opaque_types(&self); } diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs index d4134bdf3a782..e6e6466766beb 100644 --- a/compiler/rustc_type_ir/src/inherent.rs +++ b/compiler/rustc_type_ir/src/inherent.rs @@ -146,36 +146,18 @@ pub trait Ty>: fn has_unsafe_fields(self) -> bool; fn fn_sig(self, interner: I) -> ty::Binder> { - match self.kind() { - ty::FnPtr(sig_tys, hdr) => sig_tys.with(hdr), - ty::FnDef(def_id, args) => interner.fn_sig(def_id).instantiate(interner, args), - ty::Error(_) => { - // ignore errors (#54954) - ty::Binder::dummy(ty::FnSig { - inputs_and_output: Default::default(), - c_variadic: false, - safety: I::Safety::safe(), - abi: I::Abi::rust(), - }) - } - ty::Closure(..) => panic!( - "to get the signature of a closure, use `args.as_closure().sig()` not `fn_sig()`", - ), - _ => panic!("Ty::fn_sig() called on non-fn type: {:?}", self), - } + self.kind().fn_sig(interner) } fn discriminant_ty(self, interner: I) -> I::Ty; - fn async_destructor_ty(self, interner: I) -> I::Ty; - - /// Returns `true` when the outermost type cannot be further normalized, - /// resolved, or instantiated. This includes all primitive types, but also - /// things like ADTs and trait objects, since even if their arguments or - /// nested types may be further simplified, the outermost [`ty::TyKind`] or - /// type constructor remains the same. fn is_known_rigid(self) -> bool { + self.kind().is_known_rigid() + } + + fn is_guaranteed_unsized_raw(self) -> bool { match self.kind() { + ty::Dynamic(_, _, ty::Dyn) | ty::Slice(_) | ty::Str => true, ty::Bool | ty::Char | ty::Int(_) @@ -183,29 +165,26 @@ pub trait Ty>: | ty::Float(_) | ty::Adt(_, _) | ty::Foreign(_) - | ty::Str | ty::Array(_, _) | ty::Pat(_, _) - | ty::Slice(_) | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(..) + | ty::FnPtr(_, _) | ty::UnsafeBinder(_) - | ty::Dynamic(_, _, _) | ty::Closure(_, _) | ty::CoroutineClosure(_, _) | ty::Coroutine(_, _) - | ty::CoroutineWitness(..) + | ty::CoroutineWitness(_, _) | ty::Never - | ty::Tuple(_) => true, - - ty::Error(_) - | ty::Infer(_) + | ty::Tuple(_) | ty::Alias(_, _) | ty::Param(_) | ty::Bound(_, _) - | ty::Placeholder(_) => false, + | ty::Placeholder(_) + | ty::Infer(_) + | ty::Error(_) + | ty::Dynamic(_, _, ty::DynStar) => false, } } } @@ -462,6 +441,14 @@ pub trait Predicate>: { fn as_clause(self) -> Option; + fn as_normalizes_to(self) -> Option>> { + let kind = self.kind(); + match kind.skip_binder() { + ty::PredicateKind::NormalizesTo(pred) => Some(kind.rebind(pred)), + _ => None, + } + } + // FIXME: Eventually uplift the impl out of rustc and make this defaulted. fn allow_normalization(self) -> bool; } @@ -603,7 +590,7 @@ pub trait Span: Copy + Debug + Hash + Eq + TypeFoldable { pub trait SliceLike: Sized + Copy { type Item: Copy; - type IntoIter: Iterator; + type IntoIter: Iterator + DoubleEndedIterator; fn iter(self) -> Self::IntoIter; diff --git a/compiler/rustc_type_ir/src/interner.rs b/compiler/rustc_type_ir/src/interner.rs index 8f86270d7dce7..0fd2d9f3ad38b 100644 --- a/compiler/rustc_type_ir/src/interner.rs +++ b/compiler/rustc_type_ir/src/interner.rs @@ -31,6 +31,7 @@ pub trait Interner: + IrPrint> + IrPrint> + IrPrint> + + IrPrint> { type DefId: DefId; type LocalDefId: Copy + Debug + Hash + Eq + Into + TypeFoldable; @@ -55,7 +56,7 @@ pub trait Interner: data: PredefinedOpaquesData, ) -> Self::PredefinedOpaques; - type DefiningOpaqueTypes: Copy + type LocalDefIds: Copy + Debug + Hash + Default @@ -104,7 +105,21 @@ pub trait Interner: type ErrorGuaranteed: Copy + Debug + Hash + Eq; type BoundExistentialPredicates: BoundExistentialPredicates; type AllocId: Copy + Debug + Hash + Eq; - type Pat: Copy + Debug + Hash + Eq + Debug + Relate; + type Pat: Copy + + Debug + + Hash + + Eq + + Debug + + Relate + + Flags + + IntoKind>; + type PatList: Copy + + Debug + + Hash + + Default + + Eq + + TypeVisitable + + SliceLike; type Safety: Safety; type Abi: Abi; @@ -149,6 +164,8 @@ pub trait Interner: ) -> Option; fn type_of(self, def_id: Self::DefId) -> ty::EarlyBinder; + fn type_of_opaque_hir_typeck(self, def_id: Self::LocalDefId) + -> ty::EarlyBinder; type AdtDef: AdtDef; fn adt_def(self, adt_def_id: Self::DefId) -> Self::AdtDef; @@ -253,12 +270,16 @@ pub trait Interner: def_id: Self::DefId, ) -> ty::EarlyBinder>>>; + fn impl_self_is_guaranteed_unsized(self, def_id: Self::DefId) -> bool; + fn has_target_features(self, def_id: Self::DefId) -> bool; fn require_lang_item(self, lang_item: TraitSolverLangItem) -> Self::DefId; fn is_lang_item(self, def_id: Self::DefId, lang_item: TraitSolverLangItem) -> bool; + fn is_default_trait(self, def_id: Self::DefId) -> bool; + fn as_lang_item(self, def_id: Self::DefId) -> Option; fn associated_type_def_ids(self, def_id: Self::DefId) -> impl IntoIterator; @@ -272,6 +293,8 @@ pub trait Interner: fn has_item_definition(self, def_id: Self::DefId) -> bool; + fn impl_specializes(self, impl_def_id: Self::DefId, victim_def_id: Self::DefId) -> bool; + fn impl_is_default(self, impl_def_id: Self::DefId) -> bool; fn impl_trait_ref(self, impl_def_id: Self::DefId) -> ty::EarlyBinder>; @@ -316,10 +339,12 @@ pub trait Interner: binder: ty::Binder, ) -> ty::Binder; - fn opaque_types_defined_by( + fn opaque_types_defined_by(self, defining_anchor: Self::LocalDefId) -> Self::LocalDefIds; + + fn opaque_types_and_coroutines_defined_by( self, defining_anchor: Self::LocalDefId, - ) -> Self::DefiningOpaqueTypes; + ) -> Self::LocalDefIds; } /// Imagine you have a function `F: FnOnce(&[T]) -> R`, plus an iterator `iter` diff --git a/compiler/rustc_type_ir/src/ir_print.rs b/compiler/rustc_type_ir/src/ir_print.rs index 0c71f3a3df2a2..388ad09cb200c 100644 --- a/compiler/rustc_type_ir/src/ir_print.rs +++ b/compiler/rustc_type_ir/src/ir_print.rs @@ -2,8 +2,8 @@ use std::fmt; use crate::{ AliasTerm, AliasTy, Binder, CoercePredicate, ExistentialProjection, ExistentialTraitRef, FnSig, - HostEffectPredicate, Interner, NormalizesTo, OutlivesPredicate, ProjectionPredicate, - SubtypePredicate, TraitPredicate, TraitRef, + HostEffectPredicate, Interner, NormalizesTo, OutlivesPredicate, PatternKind, + ProjectionPredicate, SubtypePredicate, TraitPredicate, TraitRef, }; pub trait IrPrint { @@ -57,9 +57,10 @@ define_display_via_print!( AliasTy, AliasTerm, FnSig, + PatternKind, ); -define_debug_via_print!(TraitRef, ExistentialTraitRef, ExistentialProjection); +define_debug_via_print!(TraitRef, ExistentialTraitRef, PatternKind); impl fmt::Display for OutlivesPredicate where diff --git a/compiler/rustc_type_ir/src/lang_items.rs b/compiler/rustc_type_ir/src/lang_items.rs index 65f7cdf8f922b..699dd82fb22c9 100644 --- a/compiler/rustc_type_ir/src/lang_items.rs +++ b/compiler/rustc_type_ir/src/lang_items.rs @@ -2,7 +2,6 @@ /// representation of `LangItem`s used in the underlying compiler implementation. pub enum TraitSolverLangItem { // tidy-alphabetical-start - AsyncDestruct, AsyncFn, AsyncFnKindHelper, AsyncFnKindUpvars, diff --git a/compiler/rustc_type_ir/src/lib.rs b/compiler/rustc_type_ir/src/lib.rs index 4e2baca27854f..792090effcff1 100644 --- a/compiler/rustc_type_ir/src/lib.rs +++ b/compiler/rustc_type_ir/src/lib.rs @@ -1,12 +1,12 @@ // tidy-alphabetical-start #![allow(rustc::usage_of_ty_tykind)] #![allow(rustc::usage_of_type_ir_inherent)] +#![allow(rustc::usage_of_type_ir_traits)] #![cfg_attr( feature = "nightly", feature(associated_type_defaults, never_type, rustc_attrs, negative_impls) )] #![cfg_attr(feature = "nightly", allow(internal_features))] -#![cfg_attr(not(bootstrap), allow(rustc::usage_of_type_ir_traits))] // tidy-alphabetical-end extern crate self as rustc_type_ir; @@ -31,6 +31,7 @@ pub mod outlives; pub mod relate; pub mod search_graph; pub mod solve; +pub mod walk; // These modules are not `pub` since they are glob-imported. #[macro_use] @@ -44,6 +45,7 @@ mod generic_arg; mod infer_ctxt; mod interner; mod opaque_ty; +mod pattern; mod predicate; mod predicate_kind; mod region_kind; @@ -67,6 +69,7 @@ pub use generic_arg::*; pub use infer_ctxt::*; pub use interner::*; pub use opaque_ty::*; +pub use pattern::*; pub use predicate::*; pub use predicate_kind::*; pub use region_kind::*; diff --git a/compiler/rustc_type_ir/src/pattern.rs b/compiler/rustc_type_ir/src/pattern.rs new file mode 100644 index 0000000000000..7e56565917c67 --- /dev/null +++ b/compiler/rustc_type_ir/src/pattern.rs @@ -0,0 +1,17 @@ +use derive_where::derive_where; +#[cfg(feature = "nightly")] +use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_NoContext}; +use rustc_type_ir_macros::{Lift_Generic, TypeFoldable_Generic, TypeVisitable_Generic}; + +use crate::Interner; + +#[derive_where(Clone, Copy, Hash, PartialEq, Eq; I: Interner)] +#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)] +#[cfg_attr( + feature = "nightly", + derive(Decodable_NoContext, Encodable_NoContext, HashStable_NoContext) +)] +pub enum PatternKind { + Range { start: I::Const, end: I::Const }, + Or(I::PatList), +} diff --git a/compiler/rustc_type_ir/src/predicate.rs b/compiler/rustc_type_ir/src/predicate.rs index 22d0fa23d0c56..b59495b93c836 100644 --- a/compiler/rustc_type_ir/src/predicate.rs +++ b/compiler/rustc_type_ir/src/predicate.rs @@ -374,7 +374,7 @@ impl ty::Binder> { } /// A `ProjectionPredicate` for an `ExistentialTraitRef`. -#[derive_where(Clone, Copy, Hash, PartialEq, Eq; I: Interner)] +#[derive_where(Clone, Copy, Hash, PartialEq, Eq, Debug; I: Interner)] #[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)] #[cfg_attr( feature = "nightly", @@ -470,14 +470,19 @@ pub enum AliasTermKind { /// An opaque type (usually from `impl Trait` in type aliases or function return types) /// Can only be normalized away in PostAnalysis mode or its defining scope. OpaqueTy, - /// A type alias that actually checks its trait bounds. + /// A free type alias that actually checks its trait bounds. /// Currently only used if the type alias references opaque types. /// Can always be normalized away. - WeakTy, - /// An unevaluated const coming from a generic const expression. + FreeTy, + + /// An unevaluated anonymous constants. UnevaluatedConst, /// An unevaluated const coming from an associated const. ProjectionConst, + /// A top level const item not part of a trait or impl. + FreeConst, + /// An associated const in an inherent `impl` + InherentConst, } impl AliasTermKind { @@ -486,11 +491,27 @@ impl AliasTermKind { AliasTermKind::ProjectionTy => "associated type", AliasTermKind::ProjectionConst => "associated const", AliasTermKind::InherentTy => "inherent associated type", + AliasTermKind::InherentConst => "inherent associated const", AliasTermKind::OpaqueTy => "opaque type", - AliasTermKind::WeakTy => "type alias", + AliasTermKind::FreeTy => "type alias", + AliasTermKind::FreeConst => "unevaluated constant", AliasTermKind::UnevaluatedConst => "unevaluated constant", } } + + pub fn is_type(self) -> bool { + match self { + AliasTermKind::ProjectionTy + | AliasTermKind::InherentTy + | AliasTermKind::OpaqueTy + | AliasTermKind::FreeTy => true, + + AliasTermKind::UnevaluatedConst + | AliasTermKind::ProjectionConst + | AliasTermKind::InherentConst + | AliasTermKind::FreeConst => false, + } + } } impl From for AliasTermKind { @@ -498,7 +519,7 @@ impl From for AliasTermKind { match value { ty::Projection => AliasTermKind::ProjectionTy, ty::Opaque => AliasTermKind::OpaqueTy, - ty::Weak => AliasTermKind::WeakTy, + ty::Free => AliasTermKind::FreeTy, ty::Inherent => AliasTermKind::InherentTy, } } @@ -565,8 +586,11 @@ impl AliasTerm { AliasTermKind::ProjectionTy | AliasTermKind::InherentTy | AliasTermKind::OpaqueTy - | AliasTermKind::WeakTy => {} - AliasTermKind::UnevaluatedConst | AliasTermKind::ProjectionConst => { + | AliasTermKind::FreeTy => {} + AliasTermKind::InherentConst + | AliasTermKind::FreeConst + | AliasTermKind::UnevaluatedConst + | AliasTermKind::ProjectionConst => { panic!("Cannot turn `UnevaluatedConst` into `AliasTy`") } } @@ -597,24 +621,25 @@ impl AliasTerm { ty::AliasTy { def_id: self.def_id, args: self.args, _use_alias_ty_new_instead: () }, ) .into(), - AliasTermKind::WeakTy => Ty::new_alias( + AliasTermKind::FreeTy => Ty::new_alias( interner, - ty::AliasTyKind::Weak, + ty::AliasTyKind::Free, ty::AliasTy { def_id: self.def_id, args: self.args, _use_alias_ty_new_instead: () }, ) .into(), - AliasTermKind::UnevaluatedConst | AliasTermKind::ProjectionConst => { - I::Const::new_unevaluated( - interner, - ty::UnevaluatedConst::new(self.def_id, self.args), - ) - .into() - } + AliasTermKind::FreeConst + | AliasTermKind::InherentConst + | AliasTermKind::UnevaluatedConst + | AliasTermKind::ProjectionConst => I::Const::new_unevaluated( + interner, + ty::UnevaluatedConst::new(self.def_id, self.args), + ) + .into(), } } } -/// The following methods work only with (trait) associated type projections. +/// The following methods work only with (trait) associated term projections. impl AliasTerm { pub fn self_ty(self) -> I::Ty { self.args.type_at(0) @@ -659,6 +684,31 @@ impl AliasTerm { } } +/// The following methods work only with inherent associated term projections. +impl AliasTerm { + /// Transform the generic parameters to have the given `impl` args as the base and the GAT args on top of that. + /// + /// Does the following transformation: + /// + /// ```text + /// [Self, P_0...P_m] -> [I_0...I_n, P_0...P_m] + /// + /// I_i impl args + /// P_j GAT args + /// ``` + pub fn rebase_inherent_args_onto_impl( + self, + impl_args: I::GenericArgs, + interner: I, + ) -> I::GenericArgs { + debug_assert!(matches!( + self.kind(interner), + AliasTermKind::InherentTy | AliasTermKind::InherentConst + )); + interner.mk_args_from_iter(impl_args.iter().chain(self.args.iter().skip(1))) + } +} + impl From> for AliasTerm { fn from(ty: ty::AliasTy) -> Self { AliasTerm { args: ty.args, def_id: ty.def_id, _use_alias_term_new_instead: () } diff --git a/compiler/rustc_type_ir/src/predicate_kind.rs b/compiler/rustc_type_ir/src/predicate_kind.rs index 847dff156fe87..4e41fd16ffd76 100644 --- a/compiler/rustc_type_ir/src/predicate_kind.rs +++ b/compiler/rustc_type_ir/src/predicate_kind.rs @@ -36,7 +36,7 @@ pub enum ClauseKind { ConstArgHasType(I::Const, I::Ty), /// No syntax: `T` well-formed. - WellFormed(I::GenericArg), + WellFormed(I::Term), /// Constant initializer must evaluate successfully. ConstEvaluatable(I::Const), diff --git a/compiler/rustc_type_ir/src/relate.rs b/compiler/rustc_type_ir/src/relate.rs index d065384b58e23..e3c4a793b37f6 100644 --- a/compiler/rustc_type_ir/src/relate.rs +++ b/compiler/rustc_type_ir/src/relate.rs @@ -273,8 +273,10 @@ impl Relate for ty::AliasTerm { false, // do not fetch `type_of(a_def_id)`, as it will cause a cycle )?, ty::AliasTermKind::ProjectionTy - | ty::AliasTermKind::WeakTy + | ty::AliasTermKind::FreeConst + | ty::AliasTermKind::FreeTy | ty::AliasTermKind::InherentTy + | ty::AliasTermKind::InherentConst | ty::AliasTermKind::UnevaluatedConst | ty::AliasTermKind::ProjectionConst => { relate_args_invariantly(relation, a.args, b.args)? diff --git a/compiler/rustc_type_ir/src/relate/combine.rs b/compiler/rustc_type_ir/src/relate/combine.rs index d49f8d3093db7..8dd7c4df24421 100644 --- a/compiler/rustc_type_ir/src/relate/combine.rs +++ b/compiler/rustc_type_ir/src/relate/combine.rs @@ -137,6 +137,7 @@ where Ok(a) } TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } | TypingMode::PostBorrowckAnalysis { .. } | TypingMode::PostAnalysis => structurally_relate_tys(relation, a, b), } diff --git a/compiler/rustc_type_ir/src/solve/inspect.rs b/compiler/rustc_type_ir/src/solve/inspect.rs index 18fb71dd290e7..b10641b287d4a 100644 --- a/compiler/rustc_type_ir/src/solve/inspect.rs +++ b/compiler/rustc_type_ir/src/solve/inspect.rs @@ -118,10 +118,12 @@ pub enum ProbeKind { /// Used in the probe that wraps normalizing the non-self type for the unsize /// trait, which is also structurally matched on. UnsizeAssembly, - /// During upcasting from some source object to target object type, used to - /// do a probe to find out what projection type(s) may be used to prove that - /// the source type upholds all of the target type's object bounds. - UpcastProjectionCompatibility, + /// Used to do a probe to find out what projection type(s) match a given + /// alias bound or projection predicate. For trait upcasting, this is used + /// to prove that the source type upholds all of the target type's object + /// bounds. For object type bounds, this is used when eagerly replacing + /// supertrait aliases. + ProjectionCompatibility, /// Looking for param-env candidates that satisfy the trait ref for a projection. ShadowedEnvProbing, /// Try to unify an opaque type with an existing key in the storage. diff --git a/compiler/rustc_type_ir/src/solve/mod.rs b/compiler/rustc_type_ir/src/solve/mod.rs index 3aec4804b2799..2e05c23a6458c 100644 --- a/compiler/rustc_type_ir/src/solve/mod.rs +++ b/compiler/rustc_type_ir/src/solve/mod.rs @@ -83,8 +83,11 @@ pub enum GoalSource { /// Instantiating a higher-ranked goal and re-proving it. InstantiateHigherRanked, /// Predicate required for an alias projection to be well-formed. - /// This is used in two places: projecting to an opaque whose hidden type - /// is already registered in the opaque type storage, and for rigid projections. + /// This is used in three places: + /// 1. projecting to an opaque whose hidden type is already registered in + /// the opaque type storage, + /// 2. for rigid projections's trait goal, + /// 3. for GAT where clauses. AliasWellFormed, /// In case normalizing aliases in nested goals cycles, eagerly normalizing these /// aliases in the context of the parent may incorrectly change the cycle kind. @@ -144,9 +147,8 @@ pub enum CandidateSource { /// For a list of all traits with builtin impls, check out the /// `EvalCtxt::assemble_builtin_impl_candidates` method. BuiltinImpl(BuiltinImplSource), - /// An assumption from the environment. - /// - /// More precisely we've used the `n-th` assumption in the `param_env`. + /// An assumption from the environment. Stores a [`ParamEnvSource`], since we + /// prefer non-global param-env candidates in candidate assembly. /// /// ## Examples /// @@ -157,7 +159,7 @@ pub enum CandidateSource { /// (x.clone(), x) /// } /// ``` - ParamEnv(usize), + ParamEnv(ParamEnvSource), /// If the self type is an alias type, e.g. an opaque type or a projection, /// we know the bounds on that alias to hold even without knowing its concrete /// underlying type. @@ -186,6 +188,14 @@ pub enum CandidateSource { CoherenceUnknowable, } +#[derive(Clone, Copy, Hash, PartialEq, Eq, Debug)] +pub enum ParamEnvSource { + /// Preferred eagerly. + NonGlobal, + // Not considered unless there are non-global param-env candidates too. + Global, +} + #[derive(Clone, Copy, Hash, PartialEq, Eq, Debug)] #[cfg_attr( feature = "nightly", @@ -263,17 +273,17 @@ impl Certainty { /// however matter for diagnostics. If `T: Foo` resulted in overflow and `T: Bar` /// in ambiguity without changing the inference state, we still want to tell the /// user that `T: Baz` results in overflow. - pub fn unify_with(self, other: Certainty) -> Certainty { + pub fn and(self, other: Certainty) -> Certainty { match (self, other) { (Certainty::Yes, Certainty::Yes) => Certainty::Yes, (Certainty::Yes, Certainty::Maybe(_)) => other, (Certainty::Maybe(_), Certainty::Yes) => self, - (Certainty::Maybe(a), Certainty::Maybe(b)) => Certainty::Maybe(a.unify_with(b)), + (Certainty::Maybe(a), Certainty::Maybe(b)) => Certainty::Maybe(a.and(b)), } } pub const fn overflow(suggest_increasing_limit: bool) -> Certainty { - Certainty::Maybe(MaybeCause::Overflow { suggest_increasing_limit }) + Certainty::Maybe(MaybeCause::Overflow { suggest_increasing_limit, keep_constraints: false }) } } @@ -286,19 +296,58 @@ pub enum MaybeCause { /// or we hit a case where we just don't bother, e.g. `?x: Trait` goals. Ambiguity, /// We gave up due to an overflow, most often by hitting the recursion limit. - Overflow { suggest_increasing_limit: bool }, + Overflow { suggest_increasing_limit: bool, keep_constraints: bool }, } impl MaybeCause { - fn unify_with(self, other: MaybeCause) -> MaybeCause { + fn and(self, other: MaybeCause) -> MaybeCause { match (self, other) { (MaybeCause::Ambiguity, MaybeCause::Ambiguity) => MaybeCause::Ambiguity, (MaybeCause::Ambiguity, MaybeCause::Overflow { .. }) => other, (MaybeCause::Overflow { .. }, MaybeCause::Ambiguity) => self, ( - MaybeCause::Overflow { suggest_increasing_limit: a }, - MaybeCause::Overflow { suggest_increasing_limit: b }, - ) => MaybeCause::Overflow { suggest_increasing_limit: a || b }, + MaybeCause::Overflow { + suggest_increasing_limit: limit_a, + keep_constraints: keep_a, + }, + MaybeCause::Overflow { + suggest_increasing_limit: limit_b, + keep_constraints: keep_b, + }, + ) => MaybeCause::Overflow { + suggest_increasing_limit: limit_a && limit_b, + keep_constraints: keep_a && keep_b, + }, + } + } + + pub fn or(self, other: MaybeCause) -> MaybeCause { + match (self, other) { + (MaybeCause::Ambiguity, MaybeCause::Ambiguity) => MaybeCause::Ambiguity, + + // When combining ambiguity + overflow, we can keep constraints. + ( + MaybeCause::Ambiguity, + MaybeCause::Overflow { suggest_increasing_limit, keep_constraints: _ }, + ) => MaybeCause::Overflow { suggest_increasing_limit, keep_constraints: true }, + ( + MaybeCause::Overflow { suggest_increasing_limit, keep_constraints: _ }, + MaybeCause::Ambiguity, + ) => MaybeCause::Overflow { suggest_increasing_limit, keep_constraints: true }, + + ( + MaybeCause::Overflow { + suggest_increasing_limit: limit_a, + keep_constraints: keep_a, + }, + MaybeCause::Overflow { + suggest_increasing_limit: limit_b, + keep_constraints: keep_b, + }, + ) => MaybeCause::Overflow { + suggest_increasing_limit: limit_a || limit_b, + keep_constraints: keep_a || keep_b, + }, } } } diff --git a/compiler/rustc_type_ir/src/ty_kind.rs b/compiler/rustc_type_ir/src/ty_kind.rs index 9bea4482b550c..cf2e4284d10da 100644 --- a/compiler/rustc_type_ir/src/ty_kind.rs +++ b/compiler/rustc_type_ir/src/ty_kind.rs @@ -6,9 +6,8 @@ use rustc_ast_ir::Mutability; #[cfg(feature = "nightly")] use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; #[cfg(feature = "nightly")] -use rustc_data_structures::unify::{NoError, UnifyKey, UnifyValue}; -#[cfg(feature = "nightly")] use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_NoContext}; +use rustc_type_ir::data_structures::{NoError, UnifyKey, UnifyValue}; use rustc_type_ir_macros::{Lift_Generic, TypeFoldable_Generic, TypeVisitable_Generic}; use self::TyKind::*; @@ -55,7 +54,7 @@ pub enum AliasTyKind { /// A type alias that actually checks its trait bounds. /// Currently only used if the type alias references opaque types. /// Can always be normalized away. - Weak, + Free, } impl AliasTyKind { @@ -64,7 +63,7 @@ impl AliasTyKind { AliasTyKind::Projection => "associated type", AliasTyKind::Inherent => "inherent associated type", AliasTyKind::Opaque => "opaque type", - AliasTyKind::Weak => "type alias", + AliasTyKind::Free => "type alias", } } } @@ -224,7 +223,7 @@ pub enum TyKind { /// A tuple type. For example, `(i32, bool)`. Tuple(I::Tys), - /// A projection, opaque type, weak type alias, or inherent associated type. + /// A projection, opaque type, free type alias, or inherent associated type. /// All of these types are represented as pairs of def-id and args, and can /// be normalized, so they are grouped conceptually. Alias(AliasTyKind, AliasTy), @@ -273,6 +272,68 @@ pub enum TyKind { Error(I::ErrorGuaranteed), } +impl TyKind { + pub fn fn_sig(self, interner: I) -> ty::Binder> { + match self { + ty::FnPtr(sig_tys, hdr) => sig_tys.with(hdr), + ty::FnDef(def_id, args) => interner.fn_sig(def_id).instantiate(interner, args), + ty::Error(_) => { + // ignore errors (#54954) + ty::Binder::dummy(ty::FnSig { + inputs_and_output: Default::default(), + c_variadic: false, + safety: I::Safety::safe(), + abi: I::Abi::rust(), + }) + } + ty::Closure(..) => panic!( + "to get the signature of a closure, use `args.as_closure().sig()` not `fn_sig()`", + ), + _ => panic!("Ty::fn_sig() called on non-fn type: {:?}", self), + } + } + + /// Returns `true` when the outermost type cannot be further normalized, + /// resolved, or instantiated. This includes all primitive types, but also + /// things like ADTs and trait objects, since even if their arguments or + /// nested types may be further simplified, the outermost [`ty::TyKind`] or + /// type constructor remains the same. + pub fn is_known_rigid(self) -> bool { + match self { + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Adt(_, _) + | ty::Foreign(_) + | ty::Str + | ty::Array(_, _) + | ty::Pat(_, _) + | ty::Slice(_) + | ty::RawPtr(_, _) + | ty::Ref(_, _, _) + | ty::FnDef(_, _) + | ty::FnPtr(..) + | ty::UnsafeBinder(_) + | ty::Dynamic(_, _, _) + | ty::Closure(_, _) + | ty::CoroutineClosure(_, _) + | ty::Coroutine(_, _) + | ty::CoroutineWitness(..) + | ty::Never + | ty::Tuple(_) => true, + + ty::Error(_) + | ty::Infer(_) + | ty::Alias(_, _) + | ty::Param(_) + | ty::Bound(_, _) + | ty::Placeholder(_) => false, + } + } +} + // This is manually implemented because a derive would require `I: Debug` impl fmt::Debug for TyKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -453,28 +514,6 @@ impl AliasTy { } } -/// The following methods work only with inherent associated type projections. -impl AliasTy { - /// Transform the generic parameters to have the given `impl` args as the base and the GAT args on top of that. - /// - /// Does the following transformation: - /// - /// ```text - /// [Self, P_0...P_m] -> [I_0...I_n, P_0...P_m] - /// - /// I_i impl args - /// P_j GAT args - /// ``` - pub fn rebase_inherent_args_onto_impl( - self, - impl_args: I::GenericArgs, - interner: I, - ) -> I::GenericArgs { - debug_assert_eq!(self.kind(interner), AliasTyKind::Inherent); - interner.mk_args_from_iter(impl_args.iter().chain(self.args.iter().skip(1))) - } -} - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr( feature = "nightly", @@ -734,7 +773,6 @@ pub enum InferTy { /// Raw `TyVid` are used as the unification key for `sub_relations`; /// they carry no values. -#[cfg(feature = "nightly")] impl UnifyKey for TyVid { type Value = (); #[inline] @@ -750,7 +788,6 @@ impl UnifyKey for TyVid { } } -#[cfg(feature = "nightly")] impl UnifyValue for IntVarValue { type Error = NoError; @@ -770,7 +807,6 @@ impl UnifyValue for IntVarValue { } } -#[cfg(feature = "nightly")] impl UnifyKey for IntVid { type Value = IntVarValue; #[inline] // make this function eligible for inlining - it is quite hot. @@ -786,7 +822,6 @@ impl UnifyKey for IntVid { } } -#[cfg(feature = "nightly")] impl UnifyValue for FloatVarValue { type Error = NoError; @@ -804,7 +839,6 @@ impl UnifyValue for FloatVarValue { } } -#[cfg(feature = "nightly")] impl UnifyKey for FloatVid { type Value = FloatVarValue; #[inline] diff --git a/compiler/rustc_type_ir/src/ty_kind/closure.rs b/compiler/rustc_type_ir/src/ty_kind/closure.rs index d1ca9bdb7fbd1..8ba985d2d1931 100644 --- a/compiler/rustc_type_ir/src/ty_kind/closure.rs +++ b/compiler/rustc_type_ir/src/ty_kind/closure.rs @@ -342,7 +342,7 @@ struct HasRegionsBoundAt { // FIXME: Could be optimized to not walk into components with no escaping bound vars. impl TypeVisitor for HasRegionsBoundAt { type Result = ControlFlow<()>; - fn visit_binder>(&mut self, t: &ty::Binder) -> Self::Result { + fn visit_binder>(&mut self, t: &ty::Binder) -> Self::Result { self.binder.shift_in(1); t.super_visit_with(self)?; self.binder.shift_out(1); diff --git a/compiler/rustc_type_ir/src/visit.rs b/compiler/rustc_type_ir/src/visit.rs index 2285e0e75de04..ccb84e2591122 100644 --- a/compiler/rustc_type_ir/src/visit.rs +++ b/compiler/rustc_type_ir/src/visit.rs @@ -52,7 +52,7 @@ use smallvec::SmallVec; use thin_vec::ThinVec; use crate::inherent::*; -use crate::{self as ty, Interner, TypeFlags}; +use crate::{self as ty, Interner, TypeFlags, TypeFoldable}; /// This trait is implemented for every type that can be visited, /// providing the skeleton of the traversal. @@ -94,7 +94,7 @@ pub trait TypeVisitor: Sized { #[cfg(not(feature = "nightly"))] type Result: VisitorResult; - fn visit_binder>(&mut self, t: &ty::Binder) -> Self::Result { + fn visit_binder>(&mut self, t: &ty::Binder) -> Self::Result { t.super_visit_with(self) } @@ -401,7 +401,7 @@ impl std::fmt::Debug for HasTypeFlagsVisitor { impl TypeVisitor for HasTypeFlagsVisitor { type Result = ControlFlow; - fn visit_binder>(&mut self, t: &ty::Binder) -> Self::Result { + fn visit_binder>(&mut self, t: &ty::Binder) -> Self::Result { // If we're looking for the HAS_BINDER_VARS flag, check if the // binder has vars. This won't be present in the binder's bound // value, so we need to check here too. @@ -510,7 +510,7 @@ struct HasEscapingVarsVisitor { impl TypeVisitor for HasEscapingVarsVisitor { type Result = ControlFlow; - fn visit_binder>(&mut self, t: &ty::Binder) -> Self::Result { + fn visit_binder>(&mut self, t: &ty::Binder) -> Self::Result { self.outer_index.shift_in(1); let result = t.super_visit_with(self); self.outer_index.shift_out(1); diff --git a/compiler/rustc_type_ir/src/walk.rs b/compiler/rustc_type_ir/src/walk.rs new file mode 100644 index 0000000000000..737550eb73e99 --- /dev/null +++ b/compiler/rustc_type_ir/src/walk.rs @@ -0,0 +1,182 @@ +//! An iterator over the type substructure. +//! WARNING: this does not keep track of the region depth. + +use smallvec::{SmallVec, smallvec}; +use tracing::debug; + +use crate::data_structures::SsoHashSet; +use crate::inherent::*; +use crate::{self as ty, Interner}; + +// The TypeWalker's stack is hot enough that it's worth going to some effort to +// avoid heap allocations. +type TypeWalkerStack = SmallVec<[::GenericArg; 8]>; + +pub struct TypeWalker { + stack: TypeWalkerStack, + last_subtree: usize, + pub visited: SsoHashSet, +} + +/// An iterator for walking the type tree. +/// +/// It's very easy to produce a deeply +/// nested type tree with a lot of +/// identical subtrees. In order to work efficiently +/// in this situation walker only visits each type once. +/// It maintains a set of visited types and +/// skips any types that are already there. +impl TypeWalker { + pub fn new(root: I::GenericArg) -> Self { + Self { stack: smallvec![root], last_subtree: 1, visited: SsoHashSet::new() } + } + + /// Skips the subtree corresponding to the last type + /// returned by `next()`. + /// + /// Example: Imagine you are walking `Foo, usize>`. + /// + /// ```ignore (illustrative) + /// let mut iter: TypeWalker = ...; + /// iter.next(); // yields Foo + /// iter.next(); // yields Bar + /// iter.skip_current_subtree(); // skips i32 + /// iter.next(); // yields usize + /// ``` + pub fn skip_current_subtree(&mut self) { + self.stack.truncate(self.last_subtree); + } +} + +impl Iterator for TypeWalker { + type Item = I::GenericArg; + + fn next(&mut self) -> Option { + debug!("next(): stack={:?}", self.stack); + loop { + let next = self.stack.pop()?; + self.last_subtree = self.stack.len(); + if self.visited.insert(next) { + push_inner::(&mut self.stack, next); + debug!("next: stack={:?}", self.stack); + return Some(next); + } + } + } +} + +/// We push `GenericArg`s on the stack in reverse order so as to +/// maintain a pre-order traversal. As of the time of this +/// writing, the fact that the traversal is pre-order is not +/// known to be significant to any code, but it seems like the +/// natural order one would expect (basically, the order of the +/// types as they are written). +fn push_inner(stack: &mut TypeWalkerStack, parent: I::GenericArg) { + match parent.kind() { + ty::GenericArgKind::Type(parent_ty) => match parent_ty.kind() { + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Str + | ty::Infer(_) + | ty::Param(_) + | ty::Never + | ty::Error(_) + | ty::Placeholder(..) + | ty::Bound(..) + | ty::Foreign(..) => {} + + ty::Pat(ty, pat) => { + push_ty_pat::(stack, pat); + stack.push(ty.into()); + } + ty::Array(ty, len) => { + stack.push(len.into()); + stack.push(ty.into()); + } + ty::Slice(ty) => { + stack.push(ty.into()); + } + ty::RawPtr(ty, _) => { + stack.push(ty.into()); + } + ty::Ref(lt, ty, _) => { + stack.push(ty.into()); + stack.push(lt.into()); + } + ty::Alias(_, data) => { + stack.extend(data.args.iter().rev()); + } + ty::Dynamic(obj, lt, _) => { + stack.push(lt.into()); + stack.extend( + obj.iter() + .rev() + .filter_map(|predicate| { + let (args, opt_ty) = match predicate.skip_binder() { + ty::ExistentialPredicate::Trait(tr) => (tr.args, None), + ty::ExistentialPredicate::Projection(p) => (p.args, Some(p.term)), + ty::ExistentialPredicate::AutoTrait(_) => { + return None; + } + }; + + Some(args.iter().rev().chain(opt_ty.map(|term| match term.kind() { + ty::TermKind::Ty(ty) => ty.into(), + ty::TermKind::Const(ct) => ct.into(), + }))) + }) + .flatten(), + ); + } + ty::Adt(_, args) + | ty::Closure(_, args) + | ty::CoroutineClosure(_, args) + | ty::Coroutine(_, args) + | ty::CoroutineWitness(_, args) + | ty::FnDef(_, args) => { + stack.extend(args.iter().rev()); + } + ty::Tuple(ts) => stack.extend(ts.iter().rev().map(|ty| ty.into())), + ty::FnPtr(sig_tys, _hdr) => { + stack.extend( + sig_tys.skip_binder().inputs_and_output.iter().rev().map(|ty| ty.into()), + ); + } + ty::UnsafeBinder(bound_ty) => { + stack.push(bound_ty.skip_binder().into()); + } + }, + ty::GenericArgKind::Lifetime(_) => {} + ty::GenericArgKind::Const(parent_ct) => match parent_ct.kind() { + ty::ConstKind::Infer(_) + | ty::ConstKind::Param(_) + | ty::ConstKind::Placeholder(_) + | ty::ConstKind::Bound(..) + | ty::ConstKind::Error(_) => {} + + ty::ConstKind::Value(cv) => stack.push(cv.ty().into()), + + ty::ConstKind::Expr(expr) => stack.extend(expr.args().iter().rev()), + ty::ConstKind::Unevaluated(ct) => { + stack.extend(ct.args.iter().rev()); + } + }, + } +} + +fn push_ty_pat(stack: &mut TypeWalkerStack, pat: I::Pat) { + match pat.kind() { + ty::PatternKind::Range { start, end } => { + stack.push(end.into()); + stack.push(start.into()); + } + ty::PatternKind::Or(pats) => { + for pat in pats.iter() { + push_ty_pat::(stack, pat) + } + } + } +} diff --git a/compiler/rustc_type_ir_macros/src/lib.rs b/compiler/rustc_type_ir_macros/src/lib.rs index 8eefecdc980e5..3a10d0d41ef32 100644 --- a/compiler/rustc_type_ir_macros/src/lib.rs +++ b/compiler/rustc_type_ir_macros/src/lib.rs @@ -83,7 +83,7 @@ fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke s.add_where_predicate(parse_quote! { I: Interner }); s.add_bounds(synstructure::AddBounds::Fields); s.bind_with(|_| synstructure::BindStyle::Move); - let body_fold = s.each_variant(|vi| { + let body_try_fold = s.each_variant(|vi| { let bindings = vi.bindings(); vi.construct(|_, index| { let bind = &bindings[index]; @@ -99,6 +99,22 @@ fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke }) }); + let body_fold = s.each_variant(|vi| { + let bindings = vi.bindings(); + vi.construct(|_, index| { + let bind = &bindings[index]; + + // retain value of fields with #[type_foldable(identity)] + if has_ignore_attr(&bind.ast().attrs, "type_foldable", "identity") { + bind.to_token_stream() + } else { + quote! { + ::rustc_type_ir::TypeFoldable::fold_with(#bind, __folder) + } + } + }) + }); + // We filter fields which get ignored and don't require them to implement // `TypeFoldable`. We do so after generating `body_fold` as we still need // to generate code for them. @@ -111,7 +127,14 @@ fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke self, __folder: &mut __F ) -> Result { - Ok(match self { #body_fold }) + Ok(match self { #body_try_fold }) + } + + fn fold_with<__F: ::rustc_type_ir::TypeFolder>( + self, + __folder: &mut __F + ) -> Self { + match self { #body_fold } } }, ) diff --git a/compiler/stable_mir/Cargo.toml b/compiler/stable_mir/Cargo.toml index d691a0e4f22f5..516c8e9c718b4 100644 --- a/compiler/stable_mir/Cargo.toml +++ b/compiler/stable_mir/Cargo.toml @@ -4,5 +4,10 @@ version = "0.1.0-preview" edition = "2024" [dependencies] -scoped-tls = "1.0" -serde = { version = "1.0.125", features = [ "derive" ] } +rustc_smir = { path = "../rustc_smir" } + +[features] +# Provides access to APIs that expose internals of the rust compiler. +# APIs enabled by this feature are unstable. They can be removed or modified +# at any point and they are not included in the crate's semantic versioning. +rustc_internal = [] diff --git a/compiler/stable_mir/src/compiler_interface.rs b/compiler/stable_mir/src/compiler_interface.rs deleted file mode 100644 index e82c957c34ea6..0000000000000 --- a/compiler/stable_mir/src/compiler_interface.rs +++ /dev/null @@ -1,283 +0,0 @@ -//! Define the interface with the Rust compiler. -//! -//! StableMIR users should not use any of the items in this module directly. -//! These APIs have no stability guarantee. - -use std::cell::Cell; - -use crate::abi::{FnAbi, Layout, LayoutShape}; -use crate::crate_def::Attribute; -use crate::mir::alloc::{AllocId, GlobalAlloc}; -use crate::mir::mono::{Instance, InstanceDef, StaticDef}; -use crate::mir::{BinOp, Body, Place, UnOp}; -use crate::target::MachineInfo; -use crate::ty::{ - AdtDef, AdtKind, Allocation, ClosureDef, ClosureKind, FieldDef, FnDef, ForeignDef, - ForeignItemKind, ForeignModule, ForeignModuleDef, GenericArgs, GenericPredicates, Generics, - ImplDef, ImplTrait, IntrinsicDef, LineInfo, MirConst, PolyFnSig, RigidTy, Span, TraitDecl, - TraitDef, Ty, TyConst, TyConstId, TyKind, UintTy, VariantDef, -}; -use crate::{ - Crate, CrateItem, CrateItems, CrateNum, DefId, Error, Filename, ImplTraitDecls, ItemKind, - Symbol, TraitDecls, mir, -}; - -/// This trait defines the interface between stable_mir and the Rust compiler. -/// Do not use this directly. -pub trait Context { - fn entry_fn(&self) -> Option; - /// Retrieve all items of the local crate that have a MIR associated with them. - fn all_local_items(&self) -> CrateItems; - /// Retrieve the body of a function. - /// This function will panic if the body is not available. - fn mir_body(&self, item: DefId) -> mir::Body; - /// Check whether the body of a function is available. - fn has_body(&self, item: DefId) -> bool; - fn foreign_modules(&self, crate_num: CrateNum) -> Vec; - - /// Retrieve all functions defined in this crate. - fn crate_functions(&self, crate_num: CrateNum) -> Vec; - - /// Retrieve all static items defined in this crate. - fn crate_statics(&self, crate_num: CrateNum) -> Vec; - fn foreign_module(&self, mod_def: ForeignModuleDef) -> ForeignModule; - fn foreign_items(&self, mod_def: ForeignModuleDef) -> Vec; - fn all_trait_decls(&self) -> TraitDecls; - fn trait_decls(&self, crate_num: CrateNum) -> TraitDecls; - fn trait_decl(&self, trait_def: &TraitDef) -> TraitDecl; - fn all_trait_impls(&self) -> ImplTraitDecls; - fn trait_impls(&self, crate_num: CrateNum) -> ImplTraitDecls; - fn trait_impl(&self, trait_impl: &ImplDef) -> ImplTrait; - fn generics_of(&self, def_id: DefId) -> Generics; - fn predicates_of(&self, def_id: DefId) -> GenericPredicates; - fn explicit_predicates_of(&self, def_id: DefId) -> GenericPredicates; - /// Get information about the local crate. - fn local_crate(&self) -> Crate; - /// Retrieve a list of all external crates. - fn external_crates(&self) -> Vec; - - /// Find a crate with the given name. - fn find_crates(&self, name: &str) -> Vec; - - /// Returns the name of given `DefId` - fn def_name(&self, def_id: DefId, trimmed: bool) -> Symbol; - - /// Return registered tool attributes with the given attribute name. - /// - /// FIXME(jdonszelmann): may panic on non-tool attributes. After more attribute work, non-tool - /// attributes will simply return an empty list. - /// - /// Single segmented name like `#[clippy]` is specified as `&["clippy".to_string()]`. - /// Multi-segmented name like `#[rustfmt::skip]` is specified as `&["rustfmt".to_string(), "skip".to_string()]`. - fn tool_attrs(&self, def_id: DefId, attr: &[Symbol]) -> Vec; - - /// Get all tool attributes of a definition. - fn all_tool_attrs(&self, def_id: DefId) -> Vec; - - /// Returns printable, human readable form of `Span` - fn span_to_string(&self, span: Span) -> String; - - /// Return filename from given `Span`, for diagnostic purposes - fn get_filename(&self, span: &Span) -> Filename; - - /// Return lines corresponding to this `Span` - fn get_lines(&self, span: &Span) -> LineInfo; - - /// Returns the `kind` of given `DefId` - fn item_kind(&self, item: CrateItem) -> ItemKind; - - /// Returns whether this is a foreign item. - fn is_foreign_item(&self, item: DefId) -> bool; - - /// Returns the kind of a given foreign item. - fn foreign_item_kind(&self, def: ForeignDef) -> ForeignItemKind; - - /// Returns the kind of a given algebraic data type - fn adt_kind(&self, def: AdtDef) -> AdtKind; - - /// Returns if the ADT is a box. - fn adt_is_box(&self, def: AdtDef) -> bool; - - /// Returns whether this ADT is simd. - fn adt_is_simd(&self, def: AdtDef) -> bool; - - /// Returns whether this definition is a C string. - fn adt_is_cstr(&self, def: AdtDef) -> bool; - - /// Retrieve the function signature for the given generic arguments. - fn fn_sig(&self, def: FnDef, args: &GenericArgs) -> PolyFnSig; - - /// Retrieve the intrinsic definition if the item corresponds one. - fn intrinsic(&self, item: DefId) -> Option; - - /// Retrieve the plain function name of an intrinsic. - fn intrinsic_name(&self, def: IntrinsicDef) -> Symbol; - - /// Retrieve the closure signature for the given generic arguments. - fn closure_sig(&self, args: &GenericArgs) -> PolyFnSig; - - /// The number of variants in this ADT. - fn adt_variants_len(&self, def: AdtDef) -> usize; - - /// The name of a variant. - fn variant_name(&self, def: VariantDef) -> Symbol; - fn variant_fields(&self, def: VariantDef) -> Vec; - - /// Evaluate constant as a target usize. - fn eval_target_usize(&self, cnst: &MirConst) -> Result; - fn eval_target_usize_ty(&self, cnst: &TyConst) -> Result; - - /// Create a new zero-sized constant. - fn try_new_const_zst(&self, ty: Ty) -> Result; - - /// Create a new constant that represents the given string value. - fn new_const_str(&self, value: &str) -> MirConst; - - /// Create a new constant that represents the given boolean value. - fn new_const_bool(&self, value: bool) -> MirConst; - - /// Create a new constant that represents the given value. - fn try_new_const_uint(&self, value: u128, uint_ty: UintTy) -> Result; - fn try_new_ty_const_uint(&self, value: u128, uint_ty: UintTy) -> Result; - - /// Create a new type from the given kind. - fn new_rigid_ty(&self, kind: RigidTy) -> Ty; - - /// Create a new box type, `Box`, for the given inner type `T`. - fn new_box_ty(&self, ty: Ty) -> Ty; - - /// Returns the type of given crate item. - fn def_ty(&self, item: DefId) -> Ty; - - /// Returns the type of given definition instantiated with the given arguments. - fn def_ty_with_args(&self, item: DefId, args: &GenericArgs) -> Ty; - - /// Returns literal value of a const as a string. - fn mir_const_pretty(&self, cnst: &MirConst) -> String; - - /// `Span` of an item - fn span_of_an_item(&self, def_id: DefId) -> Span; - - fn ty_const_pretty(&self, ct: TyConstId) -> String; - - /// Obtain the representation of a type. - fn ty_pretty(&self, ty: Ty) -> String; - - /// Obtain the representation of a type. - fn ty_kind(&self, ty: Ty) -> TyKind; - - // Get the discriminant Ty for this Ty if there's one. - fn rigid_ty_discriminant_ty(&self, ty: &RigidTy) -> Ty; - - /// Get the body of an Instance which is already monomorphized. - fn instance_body(&self, instance: InstanceDef) -> Option; - - /// Get the instance type with generic instantiations applied and lifetimes erased. - fn instance_ty(&self, instance: InstanceDef) -> Ty; - - /// Get the instantiation types. - fn instance_args(&self, def: InstanceDef) -> GenericArgs; - - /// Get the instance. - fn instance_def_id(&self, instance: InstanceDef) -> DefId; - - /// Get the instance mangled name. - fn instance_mangled_name(&self, instance: InstanceDef) -> Symbol; - - /// Check if this is an empty DropGlue shim. - fn is_empty_drop_shim(&self, def: InstanceDef) -> bool; - - /// Check if this is an empty AsyncDropGlueCtor shim. - fn is_empty_async_drop_ctor_shim(&self, def: InstanceDef) -> bool; - - /// Convert a non-generic crate item into an instance. - /// This function will panic if the item is generic. - fn mono_instance(&self, def_id: DefId) -> Instance; - - /// Item requires monomorphization. - fn requires_monomorphization(&self, def_id: DefId) -> bool; - - /// Resolve an instance from the given function definition and generic arguments. - fn resolve_instance(&self, def: FnDef, args: &GenericArgs) -> Option; - - /// Resolve an instance for drop_in_place for the given type. - fn resolve_drop_in_place(&self, ty: Ty) -> Instance; - - /// Resolve instance for a function pointer. - fn resolve_for_fn_ptr(&self, def: FnDef, args: &GenericArgs) -> Option; - - /// Resolve instance for a closure with the requested type. - fn resolve_closure( - &self, - def: ClosureDef, - args: &GenericArgs, - kind: ClosureKind, - ) -> Option; - - /// Evaluate a static's initializer. - fn eval_static_initializer(&self, def: StaticDef) -> Result; - - /// Try to evaluate an instance into a constant. - fn eval_instance(&self, def: InstanceDef, const_ty: Ty) -> Result; - - /// Retrieve global allocation for the given allocation ID. - fn global_alloc(&self, id: AllocId) -> GlobalAlloc; - - /// Retrieve the id for the virtual table. - fn vtable_allocation(&self, global_alloc: &GlobalAlloc) -> Option; - fn krate(&self, def_id: DefId) -> Crate; - fn instance_name(&self, def: InstanceDef, trimmed: bool) -> Symbol; - - /// Return information about the target machine. - fn target_info(&self) -> MachineInfo; - - /// Get an instance ABI. - fn instance_abi(&self, def: InstanceDef) -> Result; - - /// Get the ABI of a function pointer. - fn fn_ptr_abi(&self, fn_ptr: PolyFnSig) -> Result; - - /// Get the layout of a type. - fn ty_layout(&self, ty: Ty) -> Result; - - /// Get the layout shape. - fn layout_shape(&self, id: Layout) -> LayoutShape; - - /// Get a debug string representation of a place. - fn place_pretty(&self, place: &Place) -> String; - - /// Get the resulting type of binary operation. - fn binop_ty(&self, bin_op: BinOp, rhs: Ty, lhs: Ty) -> Ty; - - /// Get the resulting type of unary operation. - fn unop_ty(&self, un_op: UnOp, arg: Ty) -> Ty; -} - -// A thread local variable that stores a pointer to the tables mapping between TyCtxt -// datastructures and stable MIR datastructures -scoped_tls::scoped_thread_local!(static TLV: Cell<*const ()>); - -pub fn run(context: &dyn Context, f: F) -> Result -where - F: FnOnce() -> T, -{ - if TLV.is_set() { - Err(Error::from("StableMIR already running")) - } else { - let ptr: *const () = (&raw const context) as _; - TLV.set(&Cell::new(ptr), || Ok(f())) - } -} - -/// Execute the given function with access the compiler [Context]. -/// -/// I.e., This function will load the current context and calls a function with it. -/// Do not nest these, as that will ICE. -pub(crate) fn with(f: impl FnOnce(&dyn Context) -> R) -> R { - assert!(TLV.is_set()); - TLV.with(|tlv| { - let ptr = tlv.get(); - assert!(!ptr.is_null()); - f(unsafe { *(ptr as *const &dyn Context) }) - }) -} diff --git a/compiler/stable_mir/src/lib.rs b/compiler/stable_mir/src/lib.rs index 70d42dfbfcb9a..688f3936b26cc 100644 --- a/compiler/stable_mir/src/lib.rs +++ b/compiler/stable_mir/src/lib.rs @@ -1,228 +1,11 @@ -//! The WIP stable interface to rustc internals. +//! We've temporarily moved the `stable_mir` implementation to [`rustc_smir::stable_mir`], +//! during refactoring to break the circular dependency between `rustc_smir` and `stable_mir`, //! -//! For more information see -//! -//! # Note -//! -//! This API is still completely unstable and subject to change. - -#![doc( - html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", - test(attr(allow(unused_variables), deny(warnings))) -)] -//! -//! This crate shall contain all type definitions and APIs that we expect third-party tools to invoke to -//! interact with the compiler. -//! -//! The goal is to eventually be published on -//! [crates.io](https://crates.io). - -use std::fmt::Debug; -use std::{fmt, io}; - -use serde::Serialize; - -use crate::compiler_interface::with; -pub use crate::crate_def::{CrateDef, CrateDefType, DefId}; -pub use crate::error::*; -use crate::mir::mono::StaticDef; -use crate::mir::{Body, Mutability}; -use crate::ty::{FnDef, ForeignModuleDef, ImplDef, IndexedVal, Span, TraitDef, Ty}; - -pub mod abi; -#[macro_use] -pub mod crate_def; -pub mod compiler_interface; -#[macro_use] -pub mod error; -pub mod mir; -pub mod target; -pub mod ty; -pub mod visitor; - -/// Use String for now but we should replace it. -pub type Symbol = String; - -/// The number that identifies a crate. -pub type CrateNum = usize; - -impl Debug for DefId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("DefId").field("id", &self.0).field("name", &self.name()).finish() - } -} - -impl IndexedVal for DefId { - fn to_val(index: usize) -> Self { - DefId(index) - } - - fn to_index(&self) -> usize { - self.0 - } -} - -/// A list of crate items. -pub type CrateItems = Vec; - -/// A list of trait decls. -pub type TraitDecls = Vec; - -/// A list of impl trait decls. -pub type ImplTraitDecls = Vec; - -/// Holds information about a crate. -#[derive(Clone, PartialEq, Eq, Debug, Serialize)] -pub struct Crate { - pub id: CrateNum, - pub name: Symbol, - pub is_local: bool, -} - -impl Crate { - /// The list of foreign modules in this crate. - pub fn foreign_modules(&self) -> Vec { - with(|cx| cx.foreign_modules(self.id)) - } - - /// The list of traits declared in this crate. - pub fn trait_decls(&self) -> TraitDecls { - with(|cx| cx.trait_decls(self.id)) - } - - /// The list of trait implementations in this crate. - pub fn trait_impls(&self) -> ImplTraitDecls { - with(|cx| cx.trait_impls(self.id)) - } - - /// Return a list of function definitions from this crate independent on their visibility. - pub fn fn_defs(&self) -> Vec { - with(|cx| cx.crate_functions(self.id)) - } - - /// Return a list of static items defined in this crate independent on their visibility. - pub fn statics(&self) -> Vec { - with(|cx| cx.crate_statics(self.id)) - } -} - -#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize)] -pub enum ItemKind { - Fn, - Static, - Const, - Ctor(CtorKind), -} - -#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize)] -pub enum CtorKind { - Const, - Fn, -} - -pub type Filename = String; - -crate_def_with_ty! { - /// Holds information about an item in a crate. - #[derive(Serialize)] - pub CrateItem; -} - -impl CrateItem { - /// This will return the body of an item or panic if it's not available. - pub fn expect_body(&self) -> mir::Body { - with(|cx| cx.mir_body(self.0)) - } - - /// Return the body of an item if available. - pub fn body(&self) -> Option { - with(|cx| cx.has_body(self.0).then(|| cx.mir_body(self.0))) - } - - /// Check if a body is available for this item. - pub fn has_body(&self) -> bool { - with(|cx| cx.has_body(self.0)) - } - - pub fn span(&self) -> Span { - with(|cx| cx.span_of_an_item(self.0)) - } - - pub fn kind(&self) -> ItemKind { - with(|cx| cx.item_kind(*self)) - } - - pub fn requires_monomorphization(&self) -> bool { - with(|cx| cx.requires_monomorphization(self.0)) - } - - pub fn ty(&self) -> Ty { - with(|cx| cx.def_ty(self.0)) - } - - pub fn is_foreign_item(&self) -> bool { - with(|cx| cx.is_foreign_item(self.0)) - } - - /// Emit MIR for this item body. - pub fn emit_mir(&self, w: &mut W) -> io::Result<()> { - self.body() - .ok_or_else(|| io::Error::other(format!("No body found for `{}`", self.name())))? - .dump(w, &self.name()) - } -} - -/// Return the function where execution starts if the current -/// crate defines that. This is usually `main`, but could be -/// `start` if the crate is a no-std crate. -pub fn entry_fn() -> Option { - with(|cx| cx.entry_fn()) -} - -/// Access to the local crate. -pub fn local_crate() -> Crate { - with(|cx| cx.local_crate()) -} - -/// Try to find a crate or crates if multiple crates exist from given name. -pub fn find_crates(name: &str) -> Vec { - with(|cx| cx.find_crates(name)) -} - -/// Try to find a crate with the given name. -pub fn external_crates() -> Vec { - with(|cx| cx.external_crates()) -} - -/// Retrieve all items in the local crate that have a MIR associated with them. -pub fn all_local_items() -> CrateItems { - with(|cx| cx.all_local_items()) -} - -pub fn all_trait_decls() -> TraitDecls { - with(|cx| cx.all_trait_decls()) -} - -pub fn all_trait_impls() -> ImplTraitDecls { - with(|cx| cx.all_trait_impls()) -} - -/// A type that provides internal information but that can still be used for debug purpose. -#[derive(Clone, PartialEq, Eq, Hash, Serialize)] -pub struct Opaque(String); - -impl std::fmt::Display for Opaque { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} - -impl std::fmt::Debug for Opaque { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} +//! This is a transitional measure as described in [PR #139319](https://github.com/rust-lang/rust/pull/139319). +//! Once the refactoring is complete, the `stable_mir` implementation will be moved back here. -pub fn opaque(value: &T) -> Opaque { - Opaque(format!("{value:?}")) -} +/// Export the rustc_internal APIs. Note that this module has no stability +/// guarantees and it is not taken into account for semver. +#[cfg(feature = "rustc_internal")] +pub use rustc_smir::rustc_internal; +pub use rustc_smir::stable_mir::*; diff --git a/compiler/stable_mir/src/mir/body.rs b/compiler/stable_mir/src/mir/body.rs deleted file mode 100644 index 2a1c163de3c45..0000000000000 --- a/compiler/stable_mir/src/mir/body.rs +++ /dev/null @@ -1,1108 +0,0 @@ -use std::io; - -use serde::Serialize; - -use crate::compiler_interface::with; -use crate::mir::pretty::function_body; -use crate::ty::{ - AdtDef, ClosureDef, CoroutineClosureDef, CoroutineDef, GenericArgs, MirConst, Movability, - Region, RigidTy, Ty, TyConst, TyKind, VariantIdx, -}; -use crate::{Error, Opaque, Span, Symbol}; - -/// The SMIR representation of a single function. -#[derive(Clone, Debug, Serialize)] -pub struct Body { - pub blocks: Vec, - - /// Declarations of locals within the function. - /// - /// The first local is the return value pointer, followed by `arg_count` - /// locals for the function arguments, followed by any user-declared - /// variables and temporaries. - pub(super) locals: LocalDecls, - - /// The number of arguments this function takes. - pub(super) arg_count: usize, - - /// Debug information pertaining to user variables, including captures. - pub var_debug_info: Vec, - - /// Mark an argument (which must be a tuple) as getting passed as its individual components. - /// - /// This is used for the "rust-call" ABI such as closures. - pub(super) spread_arg: Option, - - /// The span that covers the entire function body. - pub span: Span, -} - -pub type BasicBlockIdx = usize; - -impl Body { - /// Constructs a `Body`. - /// - /// A constructor is required to build a `Body` from outside the crate - /// because the `arg_count` and `locals` fields are private. - pub fn new( - blocks: Vec, - locals: LocalDecls, - arg_count: usize, - var_debug_info: Vec, - spread_arg: Option, - span: Span, - ) -> Self { - // If locals doesn't contain enough entries, it can lead to panics in - // `ret_local`, `arg_locals`, and `inner_locals`. - assert!( - locals.len() > arg_count, - "A Body must contain at least a local for the return value and each of the function's arguments" - ); - Self { blocks, locals, arg_count, var_debug_info, spread_arg, span } - } - - /// Return local that holds this function's return value. - pub fn ret_local(&self) -> &LocalDecl { - &self.locals[RETURN_LOCAL] - } - - /// Locals in `self` that correspond to this function's arguments. - pub fn arg_locals(&self) -> &[LocalDecl] { - &self.locals[1..][..self.arg_count] - } - - /// Inner locals for this function. These are the locals that are - /// neither the return local nor the argument locals. - pub fn inner_locals(&self) -> &[LocalDecl] { - &self.locals[self.arg_count + 1..] - } - - /// Returns a mutable reference to the local that holds this function's return value. - pub(crate) fn ret_local_mut(&mut self) -> &mut LocalDecl { - &mut self.locals[RETURN_LOCAL] - } - - /// Returns a mutable slice of locals corresponding to this function's arguments. - pub(crate) fn arg_locals_mut(&mut self) -> &mut [LocalDecl] { - &mut self.locals[1..][..self.arg_count] - } - - /// Returns a mutable slice of inner locals for this function. - /// Inner locals are those that are neither the return local nor the argument locals. - pub(crate) fn inner_locals_mut(&mut self) -> &mut [LocalDecl] { - &mut self.locals[self.arg_count + 1..] - } - - /// Convenience function to get all the locals in this function. - /// - /// Locals are typically accessed via the more specific methods `ret_local`, - /// `arg_locals`, and `inner_locals`. - pub fn locals(&self) -> &[LocalDecl] { - &self.locals - } - - /// Get the local declaration for this local. - pub fn local_decl(&self, local: Local) -> Option<&LocalDecl> { - self.locals.get(local) - } - - /// Get an iterator for all local declarations. - pub fn local_decls(&self) -> impl Iterator { - self.locals.iter().enumerate() - } - - /// Emit the body using the provided name for the signature. - pub fn dump(&self, w: &mut W, fn_name: &str) -> io::Result<()> { - function_body(w, self, fn_name) - } - - pub fn spread_arg(&self) -> Option { - self.spread_arg - } -} - -type LocalDecls = Vec; - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct LocalDecl { - pub ty: Ty, - pub span: Span, - pub mutability: Mutability, -} - -#[derive(Clone, PartialEq, Eq, Debug, Serialize)] -pub struct BasicBlock { - pub statements: Vec, - pub terminator: Terminator, -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct Terminator { - pub kind: TerminatorKind, - pub span: Span, -} - -impl Terminator { - pub fn successors(&self) -> Successors { - self.kind.successors() - } -} - -pub type Successors = Vec; - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum TerminatorKind { - Goto { - target: BasicBlockIdx, - }, - SwitchInt { - discr: Operand, - targets: SwitchTargets, - }, - Resume, - Abort, - Return, - Unreachable, - Drop { - place: Place, - target: BasicBlockIdx, - unwind: UnwindAction, - }, - Call { - func: Operand, - args: Vec, - destination: Place, - target: Option, - unwind: UnwindAction, - }, - Assert { - cond: Operand, - expected: bool, - msg: AssertMessage, - target: BasicBlockIdx, - unwind: UnwindAction, - }, - InlineAsm { - template: String, - operands: Vec, - options: String, - line_spans: String, - destination: Option, - unwind: UnwindAction, - }, -} - -impl TerminatorKind { - pub fn successors(&self) -> Successors { - use self::TerminatorKind::*; - match *self { - Call { target: Some(t), unwind: UnwindAction::Cleanup(u), .. } - | Drop { target: t, unwind: UnwindAction::Cleanup(u), .. } - | Assert { target: t, unwind: UnwindAction::Cleanup(u), .. } - | InlineAsm { destination: Some(t), unwind: UnwindAction::Cleanup(u), .. } => { - vec![t, u] - } - Goto { target: t } - | Call { target: None, unwind: UnwindAction::Cleanup(t), .. } - | Call { target: Some(t), unwind: _, .. } - | Drop { target: t, unwind: _, .. } - | Assert { target: t, unwind: _, .. } - | InlineAsm { destination: None, unwind: UnwindAction::Cleanup(t), .. } - | InlineAsm { destination: Some(t), unwind: _, .. } => { - vec![t] - } - - Return - | Resume - | Abort - | Unreachable - | Call { target: None, unwind: _, .. } - | InlineAsm { destination: None, unwind: _, .. } => { - vec![] - } - SwitchInt { ref targets, .. } => targets.all_targets(), - } - } - - pub fn unwind(&self) -> Option<&UnwindAction> { - match *self { - TerminatorKind::Goto { .. } - | TerminatorKind::Return - | TerminatorKind::Unreachable - | TerminatorKind::Resume - | TerminatorKind::Abort - | TerminatorKind::SwitchInt { .. } => None, - TerminatorKind::Call { ref unwind, .. } - | TerminatorKind::Assert { ref unwind, .. } - | TerminatorKind::Drop { ref unwind, .. } - | TerminatorKind::InlineAsm { ref unwind, .. } => Some(unwind), - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct InlineAsmOperand { - pub in_value: Option, - pub out_place: Option, - // This field has a raw debug representation of MIR's InlineAsmOperand. - // For now we care about place/operand + the rest in a debug format. - pub raw_rpr: String, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum UnwindAction { - Continue, - Unreachable, - Terminate, - Cleanup(BasicBlockIdx), -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum AssertMessage { - BoundsCheck { len: Operand, index: Operand }, - Overflow(BinOp, Operand, Operand), - OverflowNeg(Operand), - DivisionByZero(Operand), - RemainderByZero(Operand), - ResumedAfterReturn(CoroutineKind), - ResumedAfterPanic(CoroutineKind), - MisalignedPointerDereference { required: Operand, found: Operand }, - NullPointerDereference, -} - -impl AssertMessage { - pub fn description(&self) -> Result<&'static str, Error> { - match self { - AssertMessage::Overflow(BinOp::Add, _, _) => Ok("attempt to add with overflow"), - AssertMessage::Overflow(BinOp::Sub, _, _) => Ok("attempt to subtract with overflow"), - AssertMessage::Overflow(BinOp::Mul, _, _) => Ok("attempt to multiply with overflow"), - AssertMessage::Overflow(BinOp::Div, _, _) => Ok("attempt to divide with overflow"), - AssertMessage::Overflow(BinOp::Rem, _, _) => { - Ok("attempt to calculate the remainder with overflow") - } - AssertMessage::OverflowNeg(_) => Ok("attempt to negate with overflow"), - AssertMessage::Overflow(BinOp::Shr, _, _) => Ok("attempt to shift right with overflow"), - AssertMessage::Overflow(BinOp::Shl, _, _) => Ok("attempt to shift left with overflow"), - AssertMessage::Overflow(op, _, _) => Err(error!("`{:?}` cannot overflow", op)), - AssertMessage::DivisionByZero(_) => Ok("attempt to divide by zero"), - AssertMessage::RemainderByZero(_) => { - Ok("attempt to calculate the remainder with a divisor of zero") - } - AssertMessage::ResumedAfterReturn(CoroutineKind::Coroutine(_)) => { - Ok("coroutine resumed after completion") - } - AssertMessage::ResumedAfterReturn(CoroutineKind::Desugared( - CoroutineDesugaring::Async, - _, - )) => Ok("`async fn` resumed after completion"), - AssertMessage::ResumedAfterReturn(CoroutineKind::Desugared( - CoroutineDesugaring::Gen, - _, - )) => Ok("`async gen fn` resumed after completion"), - AssertMessage::ResumedAfterReturn(CoroutineKind::Desugared( - CoroutineDesugaring::AsyncGen, - _, - )) => Ok("`gen fn` should just keep returning `AssertMessage::None` after completion"), - AssertMessage::ResumedAfterPanic(CoroutineKind::Coroutine(_)) => { - Ok("coroutine resumed after panicking") - } - AssertMessage::ResumedAfterPanic(CoroutineKind::Desugared( - CoroutineDesugaring::Async, - _, - )) => Ok("`async fn` resumed after panicking"), - AssertMessage::ResumedAfterPanic(CoroutineKind::Desugared( - CoroutineDesugaring::Gen, - _, - )) => Ok("`async gen fn` resumed after panicking"), - AssertMessage::ResumedAfterPanic(CoroutineKind::Desugared( - CoroutineDesugaring::AsyncGen, - _, - )) => Ok("`gen fn` should just keep returning `AssertMessage::None` after panicking"), - - AssertMessage::BoundsCheck { .. } => Ok("index out of bounds"), - AssertMessage::MisalignedPointerDereference { .. } => { - Ok("misaligned pointer dereference") - } - AssertMessage::NullPointerDereference => Ok("null pointer dereference occurred"), - } - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum BinOp { - Add, - AddUnchecked, - Sub, - SubUnchecked, - Mul, - MulUnchecked, - Div, - Rem, - BitXor, - BitAnd, - BitOr, - Shl, - ShlUnchecked, - Shr, - ShrUnchecked, - Eq, - Lt, - Le, - Ne, - Ge, - Gt, - Cmp, - Offset, -} - -impl BinOp { - /// Return the type of this operation for the given input Ty. - /// This function does not perform type checking, and it currently doesn't handle SIMD. - pub fn ty(&self, lhs_ty: Ty, rhs_ty: Ty) -> Ty { - with(|ctx| ctx.binop_ty(*self, lhs_ty, rhs_ty)) - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum UnOp { - Not, - Neg, - PtrMetadata, -} - -impl UnOp { - /// Return the type of this operation for the given input Ty. - /// This function does not perform type checking, and it currently doesn't handle SIMD. - pub fn ty(&self, arg_ty: Ty) -> Ty { - with(|ctx| ctx.unop_ty(*self, arg_ty)) - } -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum CoroutineKind { - Desugared(CoroutineDesugaring, CoroutineSource), - Coroutine(Movability), -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum CoroutineSource { - Block, - Closure, - Fn, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum CoroutineDesugaring { - Async, - - Gen, - - AsyncGen, -} - -pub(crate) type LocalDefId = Opaque; -/// The rustc coverage data structures are heavily tied to internal details of the -/// coverage implementation that are likely to change, and are unlikely to be -/// useful to third-party tools for the foreseeable future. -pub(crate) type Coverage = Opaque; - -/// The FakeReadCause describes the type of pattern why a FakeRead statement exists. -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum FakeReadCause { - ForMatchGuard, - ForMatchedPlace(LocalDefId), - ForGuardBinding, - ForLet(LocalDefId), - ForIndex, -} - -/// Describes what kind of retag is to be performed -#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize)] -pub enum RetagKind { - FnEntry, - TwoPhase, - Raw, - Default, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize)] -pub enum Variance { - Covariant, - Invariant, - Contravariant, - Bivariant, -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct CopyNonOverlapping { - pub src: Operand, - pub dst: Operand, - pub count: Operand, -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum NonDivergingIntrinsic { - Assume(Operand), - CopyNonOverlapping(CopyNonOverlapping), -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct Statement { - pub kind: StatementKind, - pub span: Span, -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum StatementKind { - Assign(Place, Rvalue), - FakeRead(FakeReadCause, Place), - SetDiscriminant { place: Place, variant_index: VariantIdx }, - Deinit(Place), - StorageLive(Local), - StorageDead(Local), - Retag(RetagKind, Place), - PlaceMention(Place), - AscribeUserType { place: Place, projections: UserTypeProjection, variance: Variance }, - Coverage(Coverage), - Intrinsic(NonDivergingIntrinsic), - ConstEvalCounter, - Nop, -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum Rvalue { - /// Creates a pointer with the indicated mutability to the place. - /// - /// This is generated by pointer casts like `&v as *const _` or raw address of expressions like - /// `&raw v` or `addr_of!(v)`. - AddressOf(RawPtrKind, Place), - - /// Creates an aggregate value, like a tuple or struct. - /// - /// This is needed because dataflow analysis needs to distinguish - /// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo` - /// has a destructor. - /// - /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After - /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too. - Aggregate(AggregateKind, Vec), - - /// * `Offset` has the same semantics as `<*const T>::offset`, except that the second - /// parameter may be a `usize` as well. - /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats, - /// raw pointers, or function pointers and return a `bool`. The types of the operands must be - /// matching, up to the usual caveat of the lifetimes in function pointers. - /// * Left and right shift operations accept signed or unsigned integers not necessarily of the - /// same type and return a value of the same type as their LHS. Like in Rust, the RHS is - /// truncated as needed. - /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching - /// types and return a value of that type. - /// * The remaining operations accept signed integers, unsigned integers, or floats with - /// matching types and return a value of that type. - BinaryOp(BinOp, Operand, Operand), - - /// Performs essentially all of the casts that can be performed via `as`. - /// - /// This allows for casts from/to a variety of types. - Cast(CastKind, Operand, Ty), - - /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition. - /// - /// For addition, subtraction, and multiplication on integers the error condition is set when - /// the infinite precision result would not be equal to the actual result. - CheckedBinaryOp(BinOp, Operand, Operand), - - /// A CopyForDeref is equivalent to a read from a place. - /// When such a read happens, it is guaranteed that the only use of the returned value is a - /// deref operation, immediately followed by one or more projections. - CopyForDeref(Place), - - /// Computes the discriminant of the place, returning it as an integer. - /// Returns zero for types without discriminant. - /// - /// The validity requirements for the underlying value are undecided for this rvalue, see - /// [#91095]. Note too that the value of the discriminant is not the same thing as the - /// variant index; - /// - /// [#91095]: https://github.com/rust-lang/rust/issues/91095 - Discriminant(Place), - - /// Yields the length of the place, as a `usize`. - /// - /// If the type of the place is an array, this is the array length. For slices (`[T]`, not - /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is - /// ill-formed for places of other types. - Len(Place), - - /// Creates a reference to the place. - Ref(Region, BorrowKind, Place), - - /// Creates an array where each element is the value of the operand. - /// - /// This is the cause of a bug in the case where the repetition count is zero because the value - /// is not dropped, see [#74836]. - /// - /// Corresponds to source code like `[x; 32]`. - /// - /// [#74836]: https://github.com/rust-lang/rust/issues/74836 - Repeat(Operand, TyConst), - - /// Transmutes a `*mut u8` into shallow-initialized `Box`. - /// - /// This is different from a normal transmute because dataflow analysis will treat the box as - /// initialized but its content as uninitialized. Like other pointer casts, this in general - /// affects alias analysis. - ShallowInitBox(Operand, Ty), - - /// Creates a pointer/reference to the given thread local. - /// - /// The yielded type is a `*mut T` if the static is mutable, otherwise if the static is extern a - /// `*const T`, and if neither of those apply a `&T`. - /// - /// **Note:** This is a runtime operation that actually executes code and is in this sense more - /// like a function call. Also, eliminating dead stores of this rvalue causes `fn main() {}` to - /// SIGILL for some reason that I (JakobDegen) never got a chance to look into. - /// - /// **Needs clarification**: Are there weird additional semantics here related to the runtime - /// nature of this operation? - ThreadLocalRef(crate::CrateItem), - - /// Computes a value as described by the operation. - NullaryOp(NullOp, Ty), - - /// Exactly like `BinaryOp`, but less operands. - /// - /// Also does two's-complement arithmetic. Negation requires a signed integer or a float; - /// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds - /// return a value with the same type as their operand. - UnaryOp(UnOp, Operand), - - /// Yields the operand unchanged - Use(Operand), -} - -impl Rvalue { - pub fn ty(&self, locals: &[LocalDecl]) -> Result { - match self { - Rvalue::Use(operand) => operand.ty(locals), - Rvalue::Repeat(operand, count) => { - Ok(Ty::new_array_with_const_len(operand.ty(locals)?, count.clone())) - } - Rvalue::ThreadLocalRef(did) => Ok(did.ty()), - Rvalue::Ref(reg, bk, place) => { - let place_ty = place.ty(locals)?; - Ok(Ty::new_ref(reg.clone(), place_ty, bk.to_mutable_lossy())) - } - Rvalue::AddressOf(mutability, place) => { - let place_ty = place.ty(locals)?; - Ok(Ty::new_ptr(place_ty, mutability.to_mutable_lossy())) - } - Rvalue::Len(..) => Ok(Ty::usize_ty()), - Rvalue::Cast(.., ty) => Ok(*ty), - Rvalue::BinaryOp(op, lhs, rhs) => { - let lhs_ty = lhs.ty(locals)?; - let rhs_ty = rhs.ty(locals)?; - Ok(op.ty(lhs_ty, rhs_ty)) - } - Rvalue::CheckedBinaryOp(op, lhs, rhs) => { - let lhs_ty = lhs.ty(locals)?; - let rhs_ty = rhs.ty(locals)?; - let ty = op.ty(lhs_ty, rhs_ty); - Ok(Ty::new_tuple(&[ty, Ty::bool_ty()])) - } - Rvalue::UnaryOp(op, operand) => { - let arg_ty = operand.ty(locals)?; - Ok(op.ty(arg_ty)) - } - Rvalue::Discriminant(place) => { - let place_ty = place.ty(locals)?; - place_ty - .kind() - .discriminant_ty() - .ok_or_else(|| error!("Expected a `RigidTy` but found: {place_ty:?}")) - } - Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf | NullOp::OffsetOf(..), _) => { - Ok(Ty::usize_ty()) - } - Rvalue::NullaryOp(NullOp::ContractChecks, _) - | Rvalue::NullaryOp(NullOp::UbChecks, _) => Ok(Ty::bool_ty()), - Rvalue::Aggregate(ak, ops) => match *ak { - AggregateKind::Array(ty) => Ty::try_new_array(ty, ops.len() as u64), - AggregateKind::Tuple => Ok(Ty::new_tuple( - &ops.iter().map(|op| op.ty(locals)).collect::, _>>()?, - )), - AggregateKind::Adt(def, _, ref args, _, _) => Ok(def.ty_with_args(args)), - AggregateKind::Closure(def, ref args) => Ok(Ty::new_closure(def, args.clone())), - AggregateKind::Coroutine(def, ref args, mov) => { - Ok(Ty::new_coroutine(def, args.clone(), mov)) - } - AggregateKind::CoroutineClosure(def, ref args) => { - Ok(Ty::new_coroutine_closure(def, args.clone())) - } - AggregateKind::RawPtr(ty, mutability) => Ok(Ty::new_ptr(ty, mutability)), - }, - Rvalue::ShallowInitBox(_, ty) => Ok(Ty::new_box(*ty)), - Rvalue::CopyForDeref(place) => place.ty(locals), - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum AggregateKind { - Array(Ty), - Tuple, - Adt(AdtDef, VariantIdx, GenericArgs, Option, Option), - Closure(ClosureDef, GenericArgs), - // FIXME(stable_mir): Movability here is redundant - Coroutine(CoroutineDef, GenericArgs, Movability), - CoroutineClosure(CoroutineClosureDef, GenericArgs), - RawPtr(Ty, Mutability), -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum Operand { - Copy(Place), - Move(Place), - Constant(ConstOperand), -} - -#[derive(Clone, Eq, PartialEq, Serialize)] -pub struct Place { - pub local: Local, - /// projection out of a place (access a field, deref a pointer, etc) - pub projection: Vec, -} - -impl From for Place { - fn from(local: Local) -> Self { - Place { local, projection: vec![] } - } -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct ConstOperand { - pub span: Span, - pub user_ty: Option, - pub const_: MirConst, -} - -/// Debug information pertaining to a user variable. -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct VarDebugInfo { - /// The variable name. - pub name: Symbol, - - /// Source info of the user variable, including the scope - /// within which the variable is visible (to debuginfo). - pub source_info: SourceInfo, - - /// The user variable's data is split across several fragments, - /// each described by a `VarDebugInfoFragment`. - pub composite: Option, - - /// Where the data for this user variable is to be found. - pub value: VarDebugInfoContents, - - /// When present, indicates what argument number this variable is in the function that it - /// originated from (starting from 1). Note, if MIR inlining is enabled, then this is the - /// argument number in the original function before it was inlined. - pub argument_index: Option, -} - -impl VarDebugInfo { - /// Return a local variable if this info is related to one. - pub fn local(&self) -> Option { - match &self.value { - VarDebugInfoContents::Place(place) if place.projection.is_empty() => Some(place.local), - VarDebugInfoContents::Place(_) | VarDebugInfoContents::Const(_) => None, - } - } - - /// Return a constant if this info is related to one. - pub fn constant(&self) -> Option<&ConstOperand> { - match &self.value { - VarDebugInfoContents::Place(_) => None, - VarDebugInfoContents::Const(const_op) => Some(const_op), - } - } -} - -pub type SourceScope = u32; - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct SourceInfo { - pub span: Span, - pub scope: SourceScope, -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct VarDebugInfoFragment { - pub ty: Ty, - pub projection: Vec, -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum VarDebugInfoContents { - Place(Place), - Const(ConstOperand), -} - -// In MIR ProjectionElem is parameterized on the second Field argument and the Index argument. This -// is so it can be used for both Places (for which the projection elements are of type -// ProjectionElem) and user-provided type annotations (for which the projection elements -// are of type ProjectionElem<(), ()>). In SMIR we don't need this generality, so we just use -// ProjectionElem for Places. -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum ProjectionElem { - /// Dereference projections (e.g. `*_1`) project to the address referenced by the base place. - Deref, - - /// A field projection (e.g., `f` in `_1.f`) project to a field in the base place. The field is - /// referenced by source-order index rather than the name of the field. The fields type is also - /// given. - Field(FieldIdx, Ty), - - /// Index into a slice/array. The value of the index is computed at runtime using the `V` - /// argument. - /// - /// Note that this does not also dereference, and so it does not exactly correspond to slice - /// indexing in Rust. In other words, in the below Rust code: - /// - /// ```rust - /// let x = &[1, 2, 3, 4]; - /// let i = 2; - /// x[i]; - /// ``` - /// - /// The `x[i]` is turned into a `Deref` followed by an `Index`, not just an `Index`. The same - /// thing is true of the `ConstantIndex` and `Subslice` projections below. - Index(Local), - - /// Index into a slice/array given by offsets. - /// - /// These indices are generated by slice patterns. Easiest to explain by example: - /// - /// ```ignore (illustrative) - /// [X, _, .._, _, _] => { offset: 0, min_length: 4, from_end: false }, - /// [_, X, .._, _, _] => { offset: 1, min_length: 4, from_end: false }, - /// [_, _, .._, X, _] => { offset: 2, min_length: 4, from_end: true }, - /// [_, _, .._, _, X] => { offset: 1, min_length: 4, from_end: true }, - /// ``` - ConstantIndex { - /// index or -index (in Python terms), depending on from_end - offset: u64, - /// The thing being indexed must be at least this long -- otherwise, the - /// projection is UB. - /// - /// For arrays this is always the exact length. - min_length: u64, - /// Counting backwards from end? This is always false when indexing an - /// array. - from_end: bool, - }, - - /// Projects a slice from the base place. - /// - /// These indices are generated by slice patterns. If `from_end` is true, this represents - /// `slice[from..slice.len() - to]`. Otherwise it represents `array[from..to]`. - Subslice { - from: u64, - to: u64, - /// Whether `to` counts from the start or end of the array/slice. - from_end: bool, - }, - - /// "Downcast" to a variant of an enum or a coroutine. - Downcast(VariantIdx), - - /// Like an explicit cast from an opaque type to a concrete type, but without - /// requiring an intermediate variable. - OpaqueCast(Ty), - - /// A `Subtype(T)` projection is applied to any `StatementKind::Assign` where - /// type of lvalue doesn't match the type of rvalue, the primary goal is making subtyping - /// explicit during optimizations and codegen. - /// - /// This projection doesn't impact the runtime behavior of the program except for potentially changing - /// some type metadata of the interpreter or codegen backend. - Subtype(Ty), -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct UserTypeProjection { - pub base: UserTypeAnnotationIndex, - - pub projection: Opaque, -} - -pub type Local = usize; - -pub const RETURN_LOCAL: Local = 0; - -/// The source-order index of a field in a variant. -/// -/// For example, in the following types, -/// ```ignore(illustrative) -/// enum Demo1 { -/// Variant0 { a: bool, b: i32 }, -/// Variant1 { c: u8, d: u64 }, -/// } -/// struct Demo2 { e: u8, f: u16, g: u8 } -/// ``` -/// `a`'s `FieldIdx` is `0`, -/// `b`'s `FieldIdx` is `1`, -/// `c`'s `FieldIdx` is `0`, and -/// `g`'s `FieldIdx` is `2`. -pub type FieldIdx = usize; - -type UserTypeAnnotationIndex = usize; - -/// The possible branch sites of a [TerminatorKind::SwitchInt]. -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub struct SwitchTargets { - /// The conditional branches where the first element represents the value that guards this - /// branch, and the second element is the branch target. - branches: Vec<(u128, BasicBlockIdx)>, - /// The `otherwise` branch which will be taken in case none of the conditional branches are - /// satisfied. - otherwise: BasicBlockIdx, -} - -impl SwitchTargets { - /// All possible targets including the `otherwise` target. - pub fn all_targets(&self) -> Successors { - self.branches.iter().map(|(_, target)| *target).chain(Some(self.otherwise)).collect() - } - - /// The `otherwise` branch target. - pub fn otherwise(&self) -> BasicBlockIdx { - self.otherwise - } - - /// The conditional targets which are only taken if the pattern matches the given value. - pub fn branches(&self) -> impl Iterator { - self.branches.iter().copied() - } - - /// The number of targets including `otherwise`. - pub fn len(&self) -> usize { - self.branches.len() + 1 - } - - /// Create a new SwitchTargets from the given branches and `otherwise` target. - pub fn new(branches: Vec<(u128, BasicBlockIdx)>, otherwise: BasicBlockIdx) -> SwitchTargets { - SwitchTargets { branches, otherwise } - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum BorrowKind { - /// Data must be immutable and is aliasable. - Shared, - - /// An immutable, aliasable borrow that is discarded after borrow-checking. Can behave either - /// like a normal shared borrow or like a special shallow borrow (see [`FakeBorrowKind`]). - Fake(FakeBorrowKind), - - /// Data is mutable and not aliasable. - Mut { - /// `true` if this borrow arose from method-call auto-ref - kind: MutBorrowKind, - }, -} - -impl BorrowKind { - pub fn to_mutable_lossy(self) -> Mutability { - match self { - BorrowKind::Mut { .. } => Mutability::Mut, - BorrowKind::Shared => Mutability::Not, - // FIXME: There's no type corresponding to a shallow borrow, so use `&` as an approximation. - BorrowKind::Fake(_) => Mutability::Not, - } - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum RawPtrKind { - Mut, - Const, - FakeForPtrMetadata, -} - -impl RawPtrKind { - pub fn to_mutable_lossy(self) -> Mutability { - match self { - RawPtrKind::Mut { .. } => Mutability::Mut, - RawPtrKind::Const => Mutability::Not, - // FIXME: There's no type corresponding to a shallow borrow, so use `&` as an approximation. - RawPtrKind::FakeForPtrMetadata => Mutability::Not, - } - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum MutBorrowKind { - Default, - TwoPhaseBorrow, - ClosureCapture, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum FakeBorrowKind { - /// A shared (deep) borrow. Data must be immutable and is aliasable. - Deep, - /// The immediately borrowed place must be immutable, but projections from - /// it don't need to be. This is used to prevent match guards from replacing - /// the scrutinee. For example, a fake borrow of `a.b` doesn't - /// conflict with a mutable borrow of `a.b.c`. - Shallow, -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize)] -pub enum Mutability { - Not, - Mut, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum Safety { - Safe, - Unsafe, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum PointerCoercion { - /// Go from a fn-item type to a fn-pointer type. - ReifyFnPointer, - - /// Go from a safe fn pointer to an unsafe fn pointer. - UnsafeFnPointer, - - /// Go from a non-capturing closure to a fn pointer or an unsafe fn pointer. - /// It cannot convert a closure that requires unsafe. - ClosureFnPointer(Safety), - - /// Go from a mut raw pointer to a const raw pointer. - MutToConstPointer, - - /// Go from `*const [T; N]` to `*const T` - ArrayToPointer, - - /// Unsize a pointer/reference value, e.g., `&[T; n]` to - /// `&[T]`. Note that the source could be a thin or wide pointer. - /// This will do things like convert thin pointers to wide - /// pointers, or convert structs containing thin pointers to - /// structs containing wide pointers, or convert between wide - /// pointers. - Unsize, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] -pub enum CastKind { - // FIXME(smir-rename): rename this to PointerExposeProvenance - PointerExposeAddress, - PointerWithExposedProvenance, - PointerCoercion(PointerCoercion), - // FIXME(smir-rename): change this to PointerCoercion(DynStar) - DynStar, - IntToInt, - FloatToInt, - FloatToFloat, - IntToFloat, - PtrToPtr, - FnPtrToPtr, - Transmute, -} - -#[derive(Clone, Debug, Eq, PartialEq, Serialize)] -pub enum NullOp { - /// Returns the size of a value of that type. - SizeOf, - /// Returns the minimum alignment of a type. - AlignOf, - /// Returns the offset of a field. - OffsetOf(Vec<(VariantIdx, FieldIdx)>), - /// cfg!(ub_checks), but at codegen time - UbChecks, - /// cfg!(contract_checks), but at codegen time - ContractChecks, -} - -impl Operand { - /// Get the type of an operand relative to the local declaration. - /// - /// In order to retrieve the correct type, the `locals` argument must match the list of all - /// locals from the function body where this operand originates from. - /// - /// Errors indicate a malformed operand or incompatible locals list. - pub fn ty(&self, locals: &[LocalDecl]) -> Result { - match self { - Operand::Copy(place) | Operand::Move(place) => place.ty(locals), - Operand::Constant(c) => Ok(c.ty()), - } - } -} - -impl ConstOperand { - pub fn ty(&self) -> Ty { - self.const_.ty() - } -} - -impl Place { - /// Resolve down the chain of projections to get the type referenced at the end of it. - /// E.g.: - /// Calling `ty()` on `var.field` should return the type of `field`. - /// - /// In order to retrieve the correct type, the `locals` argument must match the list of all - /// locals from the function body where this place originates from. - pub fn ty(&self, locals: &[LocalDecl]) -> Result { - self.projection.iter().try_fold(locals[self.local].ty, |place_ty, elem| elem.ty(place_ty)) - } -} - -impl ProjectionElem { - /// Get the expected type after applying this projection to a given place type. - pub fn ty(&self, place_ty: Ty) -> Result { - let ty = place_ty; - match &self { - ProjectionElem::Deref => Self::deref_ty(ty), - ProjectionElem::Field(_idx, fty) => Ok(*fty), - ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => Self::index_ty(ty), - ProjectionElem::Subslice { from, to, from_end } => { - Self::subslice_ty(ty, *from, *to, *from_end) - } - ProjectionElem::Downcast(_) => Ok(ty), - ProjectionElem::OpaqueCast(ty) | ProjectionElem::Subtype(ty) => Ok(*ty), - } - } - - fn index_ty(ty: Ty) -> Result { - ty.kind().builtin_index().ok_or_else(|| error!("Cannot index non-array type: {ty:?}")) - } - - fn subslice_ty(ty: Ty, from: u64, to: u64, from_end: bool) -> Result { - let ty_kind = ty.kind(); - match ty_kind { - TyKind::RigidTy(RigidTy::Slice(..)) => Ok(ty), - TyKind::RigidTy(RigidTy::Array(inner, _)) if !from_end => Ty::try_new_array( - inner, - to.checked_sub(from).ok_or_else(|| error!("Subslice overflow: {from}..{to}"))?, - ), - TyKind::RigidTy(RigidTy::Array(inner, size)) => { - let size = size.eval_target_usize()?; - let len = size - from - to; - Ty::try_new_array(inner, len) - } - _ => Err(Error(format!("Cannot subslice non-array type: `{ty_kind:?}`"))), - } - } - - fn deref_ty(ty: Ty) -> Result { - let deref_ty = ty - .kind() - .builtin_deref(true) - .ok_or_else(|| error!("Cannot dereference type: {ty:?}"))?; - Ok(deref_ty.ty) - } -} diff --git a/compiler/stable_mir/src/mir/pretty.rs b/compiler/stable_mir/src/mir/pretty.rs deleted file mode 100644 index 8278afb7a2f17..0000000000000 --- a/compiler/stable_mir/src/mir/pretty.rs +++ /dev/null @@ -1,452 +0,0 @@ -//! Implement methods to pretty print stable MIR body. -use std::fmt::Debug; -use std::io::Write; -use std::{fmt, io, iter}; - -use fmt::{Display, Formatter}; - -use super::{AggregateKind, AssertMessage, BinOp, BorrowKind, FakeBorrowKind, TerminatorKind}; -use crate::mir::{ - Operand, Place, RawPtrKind, Rvalue, StatementKind, UnwindAction, VarDebugInfoContents, -}; -use crate::ty::{AdtKind, IndexedVal, MirConst, Ty, TyConst}; -use crate::{Body, CrateDef, Mutability, with}; - -impl Display for Ty { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - with(|ctx| write!(f, "{}", ctx.ty_pretty(*self))) - } -} - -impl Debug for Place { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - with(|ctx| write!(f, "{}", ctx.place_pretty(self))) - } -} - -pub(crate) fn function_body(writer: &mut W, body: &Body, name: &str) -> io::Result<()> { - write!(writer, "fn {name}(")?; - let mut sep = ""; - for (index, local) in body.arg_locals().iter().enumerate() { - write!(writer, "{}_{}: {}", sep, index + 1, local.ty)?; - sep = ", "; - } - write!(writer, ")")?; - - let return_local = body.ret_local(); - writeln!(writer, " -> {} {{", return_local.ty)?; - - body.locals().iter().enumerate().try_for_each(|(index, local)| -> io::Result<()> { - if index == 0 || index > body.arg_count { - writeln!(writer, " let {}_{}: {};", pretty_mut(local.mutability), index, local.ty) - } else { - Ok(()) - } - })?; - - body.var_debug_info.iter().try_for_each(|info| { - let content = match &info.value { - VarDebugInfoContents::Place(place) => { - format!("{place:?}") - } - VarDebugInfoContents::Const(constant) => pretty_mir_const(&constant.const_), - }; - writeln!(writer, " debug {} => {};", info.name, content) - })?; - - body.blocks - .iter() - .enumerate() - .map(|(index, block)| -> io::Result<()> { - writeln!(writer, " bb{index}: {{")?; - let _ = block - .statements - .iter() - .map(|statement| -> io::Result<()> { - pretty_statement(writer, &statement.kind)?; - Ok(()) - }) - .collect::>(); - pretty_terminator(writer, &block.terminator.kind)?; - writeln!(writer, " }}").unwrap(); - Ok(()) - }) - .collect::, _>>()?; - writeln!(writer, "}}")?; - Ok(()) -} - -fn pretty_statement(writer: &mut W, statement: &StatementKind) -> io::Result<()> { - const INDENT: &str = " "; - match statement { - StatementKind::Assign(place, rval) => { - write!(writer, "{INDENT}{place:?} = ")?; - pretty_rvalue(writer, rval)?; - writeln!(writer, ";") - } - // FIXME: Add rest of the statements - StatementKind::FakeRead(cause, place) => { - writeln!(writer, "{INDENT}FakeRead({cause:?}, {place:?});") - } - StatementKind::SetDiscriminant { place, variant_index } => { - writeln!(writer, "{INDENT}discriminant({place:?} = {};", variant_index.to_index()) - } - StatementKind::Deinit(place) => writeln!(writer, "Deinit({place:?};"), - StatementKind::StorageLive(local) => { - writeln!(writer, "{INDENT}StorageLive(_{local});") - } - StatementKind::StorageDead(local) => { - writeln!(writer, "{INDENT}StorageDead(_{local});") - } - StatementKind::Retag(kind, place) => writeln!(writer, "Retag({kind:?}, {place:?});"), - StatementKind::PlaceMention(place) => { - writeln!(writer, "{INDENT}PlaceMention({place:?};") - } - StatementKind::ConstEvalCounter => { - writeln!(writer, "{INDENT}ConstEvalCounter;") - } - StatementKind::Nop => writeln!(writer, "{INDENT}nop;"), - StatementKind::AscribeUserType { .. } - | StatementKind::Coverage(_) - | StatementKind::Intrinsic(_) => { - // FIX-ME: Make them pretty. - writeln!(writer, "{INDENT}{statement:?};") - } - } -} - -fn pretty_terminator(writer: &mut W, terminator: &TerminatorKind) -> io::Result<()> { - pretty_terminator_head(writer, terminator)?; - let successors = terminator.successors(); - let successor_count = successors.len(); - let labels = pretty_successor_labels(terminator); - - let show_unwind = !matches!(terminator.unwind(), None | Some(UnwindAction::Cleanup(_))); - let fmt_unwind = |w: &mut W| -> io::Result<()> { - write!(w, "unwind ")?; - match terminator.unwind() { - None | Some(UnwindAction::Cleanup(_)) => unreachable!(), - Some(UnwindAction::Continue) => write!(w, "continue"), - Some(UnwindAction::Unreachable) => write!(w, "unreachable"), - Some(UnwindAction::Terminate) => write!(w, "terminate"), - } - }; - - match (successor_count, show_unwind) { - (0, false) => {} - (0, true) => { - write!(writer, " -> ")?; - fmt_unwind(writer)?; - } - (1, false) => write!(writer, " -> bb{:?}", successors[0])?, - _ => { - write!(writer, " -> [")?; - for (i, target) in successors.iter().enumerate() { - if i > 0 { - write!(writer, ", ")?; - } - write!(writer, "{}: bb{:?}", labels[i], target)?; - } - if show_unwind { - write!(writer, ", ")?; - fmt_unwind(writer)?; - } - write!(writer, "]")?; - } - }; - - writeln!(writer, ";") -} - -fn pretty_terminator_head(writer: &mut W, terminator: &TerminatorKind) -> io::Result<()> { - use self::TerminatorKind::*; - const INDENT: &str = " "; - match terminator { - Goto { .. } => write!(writer, "{INDENT}goto"), - SwitchInt { discr, .. } => { - write!(writer, "{INDENT}switchInt({})", pretty_operand(discr)) - } - Resume => write!(writer, "{INDENT}resume"), - Abort => write!(writer, "{INDENT}abort"), - Return => write!(writer, "{INDENT}return"), - Unreachable => write!(writer, "{INDENT}unreachable"), - Drop { place, .. } => write!(writer, "{INDENT}drop({place:?})"), - Call { func, args, destination, .. } => { - write!(writer, "{INDENT}{:?} = {}(", destination, pretty_operand(func))?; - let mut args_iter = args.iter(); - args_iter.next().map_or(Ok(()), |arg| write!(writer, "{}", pretty_operand(arg)))?; - args_iter.try_for_each(|arg| write!(writer, ", {}", pretty_operand(arg)))?; - write!(writer, ")") - } - Assert { cond, expected, msg, target: _, unwind: _ } => { - write!(writer, "{INDENT}assert(")?; - if !expected { - write!(writer, "!")?; - } - write!(writer, "{}, ", pretty_operand(cond))?; - pretty_assert_message(writer, msg)?; - write!(writer, ")") - } - InlineAsm { .. } => write!(writer, "{INDENT}InlineAsm"), - } -} - -fn pretty_successor_labels(terminator: &TerminatorKind) -> Vec { - use self::TerminatorKind::*; - match terminator { - Call { target: None, unwind: UnwindAction::Cleanup(_), .. } - | InlineAsm { destination: None, .. } => vec!["unwind".into()], - Resume | Abort | Return | Unreachable | Call { target: None, unwind: _, .. } => vec![], - Goto { .. } => vec!["".to_string()], - SwitchInt { targets, .. } => targets - .branches() - .map(|(val, _target)| format!("{val}")) - .chain(iter::once("otherwise".into())) - .collect(), - Drop { unwind: UnwindAction::Cleanup(_), .. } => vec!["return".into(), "unwind".into()], - Call { target: Some(_), unwind: UnwindAction::Cleanup(_), .. } => { - vec!["return".into(), "unwind".into()] - } - Drop { unwind: _, .. } | Call { target: Some(_), unwind: _, .. } => vec!["return".into()], - Assert { unwind: UnwindAction::Cleanup(_), .. } => { - vec!["success".into(), "unwind".into()] - } - Assert { unwind: _, .. } => vec!["success".into()], - InlineAsm { destination: Some(_), .. } => vec!["goto".into(), "unwind".into()], - } -} - -fn pretty_assert_message(writer: &mut W, msg: &AssertMessage) -> io::Result<()> { - match msg { - AssertMessage::BoundsCheck { len, index } => { - let pretty_len = pretty_operand(len); - let pretty_index = pretty_operand(index); - write!( - writer, - "\"index out of bounds: the length is {{}} but the index is {{}}\", {pretty_len}, {pretty_index}" - ) - } - AssertMessage::Overflow(BinOp::Add, l, r) => { - let pretty_l = pretty_operand(l); - let pretty_r = pretty_operand(r); - write!( - writer, - "\"attempt to compute `{{}} + {{}}`, which would overflow\", {pretty_l}, {pretty_r}" - ) - } - AssertMessage::Overflow(BinOp::Sub, l, r) => { - let pretty_l = pretty_operand(l); - let pretty_r = pretty_operand(r); - write!( - writer, - "\"attempt to compute `{{}} - {{}}`, which would overflow\", {pretty_l}, {pretty_r}" - ) - } - AssertMessage::Overflow(BinOp::Mul, l, r) => { - let pretty_l = pretty_operand(l); - let pretty_r = pretty_operand(r); - write!( - writer, - "\"attempt to compute `{{}} * {{}}`, which would overflow\", {pretty_l}, {pretty_r}" - ) - } - AssertMessage::Overflow(BinOp::Div, l, r) => { - let pretty_l = pretty_operand(l); - let pretty_r = pretty_operand(r); - write!( - writer, - "\"attempt to compute `{{}} / {{}}`, which would overflow\", {pretty_l}, {pretty_r}" - ) - } - AssertMessage::Overflow(BinOp::Rem, l, r) => { - let pretty_l = pretty_operand(l); - let pretty_r = pretty_operand(r); - write!( - writer, - "\"attempt to compute `{{}} % {{}}`, which would overflow\", {pretty_l}, {pretty_r}" - ) - } - AssertMessage::Overflow(BinOp::Shr, _, r) => { - let pretty_r = pretty_operand(r); - write!(writer, "\"attempt to shift right by `{{}}`, which would overflow\", {pretty_r}") - } - AssertMessage::Overflow(BinOp::Shl, _, r) => { - let pretty_r = pretty_operand(r); - write!(writer, "\"attempt to shift left by `{{}}`, which would overflow\", {pretty_r}") - } - AssertMessage::Overflow(op, _, _) => unreachable!("`{:?}` cannot overflow", op), - AssertMessage::OverflowNeg(op) => { - let pretty_op = pretty_operand(op); - write!(writer, "\"attempt to negate `{{}}`, which would overflow\", {pretty_op}") - } - AssertMessage::DivisionByZero(op) => { - let pretty_op = pretty_operand(op); - write!(writer, "\"attempt to divide `{{}}` by zero\", {pretty_op}") - } - AssertMessage::RemainderByZero(op) => { - let pretty_op = pretty_operand(op); - write!( - writer, - "\"attempt to calculate the remainder of `{{}}` with a divisor of zero\", {pretty_op}" - ) - } - AssertMessage::MisalignedPointerDereference { required, found } => { - let pretty_required = pretty_operand(required); - let pretty_found = pretty_operand(found); - write!( - writer, - "\"misaligned pointer dereference: address must be a multiple of {{}} but is {{}}\",{pretty_required}, {pretty_found}" - ) - } - AssertMessage::NullPointerDereference => { - write!(writer, "\"null pointer dereference occurred\"") - } - AssertMessage::ResumedAfterReturn(_) | AssertMessage::ResumedAfterPanic(_) => { - write!(writer, "{}", msg.description().unwrap()) - } - } -} - -fn pretty_operand(operand: &Operand) -> String { - match operand { - Operand::Copy(copy) => { - format!("{copy:?}") - } - Operand::Move(mv) => { - format!("move {mv:?}") - } - Operand::Constant(cnst) => pretty_mir_const(&cnst.const_), - } -} - -fn pretty_mir_const(literal: &MirConst) -> String { - with(|cx| cx.mir_const_pretty(literal)) -} - -fn pretty_ty_const(ct: &TyConst) -> String { - with(|cx| cx.ty_const_pretty(ct.id)) -} - -fn pretty_rvalue(writer: &mut W, rval: &Rvalue) -> io::Result<()> { - match rval { - Rvalue::AddressOf(mutability, place) => { - write!(writer, "&raw {} {:?}", pretty_raw_ptr_kind(*mutability), place) - } - Rvalue::Aggregate(aggregate_kind, operands) => { - // FIXME: Add pretty_aggregate function that returns a pretty string - pretty_aggregate(writer, aggregate_kind, operands) - } - Rvalue::BinaryOp(bin, op1, op2) => { - write!(writer, "{:?}({}, {})", bin, pretty_operand(op1), pretty_operand(op2)) - } - Rvalue::Cast(_, op, ty) => { - write!(writer, "{} as {}", pretty_operand(op), ty) - } - Rvalue::CheckedBinaryOp(bin, op1, op2) => { - write!(writer, "Checked{:?}({}, {})", bin, pretty_operand(op1), pretty_operand(op2)) - } - Rvalue::CopyForDeref(deref) => { - write!(writer, "CopyForDeref({deref:?})") - } - Rvalue::Discriminant(place) => { - write!(writer, "discriminant({place:?})") - } - Rvalue::Len(len) => { - write!(writer, "len({len:?})") - } - Rvalue::Ref(_, borrowkind, place) => { - let kind = match borrowkind { - BorrowKind::Shared => "&", - BorrowKind::Fake(FakeBorrowKind::Deep) => "&fake ", - BorrowKind::Fake(FakeBorrowKind::Shallow) => "&fake shallow ", - BorrowKind::Mut { .. } => "&mut ", - }; - write!(writer, "{kind}{place:?}") - } - Rvalue::Repeat(op, cnst) => { - write!(writer, "[{}; {}]", pretty_operand(op), pretty_ty_const(cnst)) - } - Rvalue::ShallowInitBox(_, _) => Ok(()), - Rvalue::ThreadLocalRef(item) => { - write!(writer, "thread_local_ref{item:?}") - } - Rvalue::NullaryOp(nul, ty) => { - write!(writer, "{nul:?}::<{ty}>() \" \"") - } - Rvalue::UnaryOp(un, op) => { - write!(writer, "{:?}({})", un, pretty_operand(op)) - } - Rvalue::Use(op) => write!(writer, "{}", pretty_operand(op)), - } -} - -fn pretty_aggregate( - writer: &mut W, - aggregate_kind: &AggregateKind, - operands: &Vec, -) -> io::Result<()> { - let suffix = match aggregate_kind { - AggregateKind::Array(_) => { - write!(writer, "[")?; - "]" - } - AggregateKind::Tuple => { - write!(writer, "(")?; - ")" - } - AggregateKind::Adt(def, var, _, _, _) => { - if def.kind() == AdtKind::Enum { - write!(writer, "{}::{}", def.name(), def.variant(*var).unwrap().name())?; - } else { - write!(writer, "{}", def.variant(*var).unwrap().name())?; - } - if operands.is_empty() { - return Ok(()); - } - // FIXME: Change this once we have CtorKind in StableMIR. - write!(writer, "(")?; - ")" - } - AggregateKind::Closure(def, _) => { - write!(writer, "{{closure@{}}}(", def.span().diagnostic())?; - ")" - } - AggregateKind::Coroutine(def, _, _) => { - write!(writer, "{{coroutine@{}}}(", def.span().diagnostic())?; - ")" - } - AggregateKind::CoroutineClosure(def, _) => { - write!(writer, "{{coroutine-closure@{}}}(", def.span().diagnostic())?; - ")" - } - AggregateKind::RawPtr(ty, mutability) => { - write!( - writer, - "*{} {ty} from (", - if *mutability == Mutability::Mut { "mut" } else { "const" } - )?; - ")" - } - }; - let mut separator = ""; - for op in operands { - write!(writer, "{}{}", separator, pretty_operand(op))?; - separator = ", "; - } - write!(writer, "{suffix}") -} - -fn pretty_mut(mutability: Mutability) -> &'static str { - match mutability { - Mutability::Not => " ", - Mutability::Mut => "mut ", - } -} - -fn pretty_raw_ptr_kind(kind: RawPtrKind) -> &'static str { - match kind { - RawPtrKind::Const => "const", - RawPtrKind::Mut => "mut", - RawPtrKind::FakeForPtrMetadata => "const (fake)", - } -} diff --git a/library/Cargo.lock b/library/Cargo.lock index 6b1a0a080551f..5100b4d8176dc 100644 --- a/library/Cargo.lock +++ b/library/Cargo.lock @@ -32,12 +32,6 @@ dependencies = [ "core", ] -[[package]] -name = "allocator-api2" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" - [[package]] name = "alloctests" version = "0.0.0" @@ -67,9 +61,9 @@ dependencies = [ [[package]] name = "compiler_builtins" -version = "0.1.152" +version = "0.1.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2153cf213eb259361567720ce55f6446f17acd0ccca87fb6dc05360578228a58" +checksum = "164cdc689e4c6d69417f77a5f48be240c291e84fbef0b1281755dc754b19c809" dependencies = [ "cc", "rustc-std-workspace-core", @@ -89,9 +83,9 @@ dependencies = [ [[package]] name = "dlmalloc" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b5e0d321d61de16390ed273b647ce51605b575916d3c25e6ddf27a1e140035" +checksum = "8cff88b751e7a276c4ab0e222c3f355190adc6dde9ce39c851db39da34990df7" dependencies = [ "cfg-if", "compiler_builtins", @@ -134,11 +128,10 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.2" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" dependencies = [ - "allocator-api2", "compiler_builtins", "rustc-std-workspace-alloc", "rustc-std-workspace-core", @@ -157,9 +150,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.171" +version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" dependencies = [ "rustc-std-workspace-core", ] @@ -176,9 +169,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.8.3" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" +checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" dependencies = [ "adler2", "compiler_builtins", @@ -221,20 +214,12 @@ dependencies = [ "unwind", ] -[[package]] -name = "proc-macro2" -version = "1.0.93" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" -dependencies = [ - "unicode-ident", -] - [[package]] name = "proc_macro" version = "0.0.0" dependencies = [ "core", + "rustc-literal-escaper", "std", ] @@ -245,20 +230,11 @@ dependencies = [ "cc", ] -[[package]] -name = "quote" -version = "1.0.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" -dependencies = [ - "proc-macro2", -] - [[package]] name = "r-efi" -version = "4.5.0" +version = "5.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9e935efc5854715dfc0a4c9ef18dc69dee0ec3bf9cc3ab740db831c0fdd86a3" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" dependencies = [ "compiler_builtins", "rustc-std-workspace-core", @@ -266,9 +242,9 @@ dependencies = [ [[package]] name = "r-efi-alloc" -version = "1.0.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31d6f09fe2b6ad044bc3d2c34ce4979796581afd2f1ebc185837e02421e02fd7" +checksum = "e43c53ff1a01d423d1cb762fd991de07d32965ff0ca2e4f80444ac7804198203" dependencies = [ "compiler_builtins", "r-efi", @@ -277,22 +253,18 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" +checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" dependencies = [ "rand_core", - "zerocopy", ] [[package]] name = "rand_core" -version = "0.9.0" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b08f3c9802962f7e1b25113931d94f43ed9725bebc59db9d0c3e9a23b67e15ff" -dependencies = [ - "zerocopy", -] +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" [[package]] name = "rand_xorshift" @@ -313,6 +285,15 @@ dependencies = [ "rustc-std-workspace-core", ] +[[package]] +name = "rustc-literal-escaper" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04" +dependencies = [ + "rustc-std-workspace-std", +] + [[package]] name = "rustc-std-workspace-alloc" version = "1.99.0" @@ -380,17 +361,6 @@ dependencies = [ "rustc-std-workspace-core", ] -[[package]] -name = "syn" -version = "2.0.98" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - [[package]] name = "sysroot" version = "0.0.0" @@ -411,12 +381,6 @@ dependencies = [ "std", ] -[[package]] -name = "unicode-ident" -version = "1.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" - [[package]] name = "unicode-width" version = "0.1.14" @@ -441,9 +405,9 @@ dependencies = [ [[package]] name = "unwinding" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51f06a05848f650946acef3bf525fe96612226b61f74ae23ffa4e98bfbb8ab3c" +checksum = "8393f2782b6060a807337ff353780c1ca15206f9ba2424df18cb6e733bd7b345" dependencies = [ "compiler_builtins", "gimli", @@ -537,23 +501,3 @@ name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "zerocopy" -version = "0.8.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa91407dacce3a68c56de03abe2760159582b846c6a4acd2f456618087f12713" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06718a168365cad3d5ff0bb133aad346959a2074bd4a85c121255a11304a8626" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] diff --git a/library/Cargo.toml b/library/Cargo.toml index 4d5955593ffcd..026ba1470081d 100644 --- a/library/Cargo.toml +++ b/library/Cargo.toml @@ -38,8 +38,11 @@ adler2.debug = 0 gimli.debug = 0 gimli.opt-level = "s" miniz_oxide.debug = 0 +miniz_oxide.opt-level = "s" +# `opt-level = "s"` for `object` led to a size regression when tried previously object.debug = 0 rustc-demangle.debug = 0 +rustc-demangle.opt-level = "s" [patch.crates-io] # See comments in `library/rustc-std-workspace-core/README.md` for what's going on diff --git a/library/alloc/Cargo.toml b/library/alloc/Cargo.toml index b729d5e116d2c..51ddc9bf9fc9d 100644 --- a/library/alloc/Cargo.toml +++ b/library/alloc/Cargo.toml @@ -16,7 +16,7 @@ bench = false [dependencies] core = { path = "../core", public = true } -compiler_builtins = { version = "=0.1.152", features = ['rustc-dep-of-std'] } +compiler_builtins = { version = "=0.1.158", features = ['rustc-dep-of-std'] } [features] compiler-builtins-mem = ['compiler_builtins/mem'] @@ -36,5 +36,4 @@ check-cfg = [ 'cfg(no_global_oom_handling)', 'cfg(no_rc)', 'cfg(no_sync)', - 'cfg(randomized_layouts)', ] diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs index 1e03a191276ca..e1cc4ba25c4ea 100644 --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@ -16,22 +16,22 @@ unsafe extern "Rust" { // otherwise. #[rustc_allocator] #[rustc_nounwind] - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] fn __rust_alloc(size: usize, align: usize) -> *mut u8; #[rustc_deallocator] #[rustc_nounwind] - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize); #[rustc_reallocator] #[rustc_nounwind] - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8; #[rustc_allocator_zeroed] #[rustc_nounwind] - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8; - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] static __rust_no_alloc_shim_is_unstable: u8; } @@ -360,7 +360,7 @@ unsafe extern "Rust" { // This is the magic symbol to call the global alloc error handler. rustc generates // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the // default implementations below (`__rdl_oom`) otherwise. - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] fn __rust_alloc_error_handler(size: usize, align: usize) -> !; } @@ -427,7 +427,7 @@ pub mod __alloc_error_handler { unsafe extern "Rust" { // This symbol is emitted by rustc next to __rust_alloc_error_handler. // Its value depends on the -Zoom={panic,abort} compiler option. - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] static __rust_alloc_error_handler_should_panic: u8; } diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index 4644e37f809c1..4536f55544354 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -952,7 +952,7 @@ impl Box, A> { /// assert_eq!(*x, i); /// } /// ``` - #[stable(feature = "box_uninit_write", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "box_uninit_write", since = "1.87.0")] #[inline] pub fn write(mut boxed: Self, value: T) -> Box { unsafe { diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index 78b7da9d6b3ee..5ca32ed741af8 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -1917,14 +1917,13 @@ pub struct ExtractIf< V, F, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, -> where - F: 'a + FnMut(&K, &mut V) -> bool, -{ +> { pred: F, inner: ExtractIfInner<'a, K, V>, /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. alloc: A, } + /// Most of the implementation of ExtractIf are generic over the type /// of the predicate, thus also serving for BTreeSet::ExtractIf. pub(super) struct ExtractIfInner<'a, K, V> { @@ -1940,14 +1939,14 @@ pub(super) struct ExtractIfInner<'a, K, V> { } #[unstable(feature = "btree_extract_if", issue = "70530")] -impl fmt::Debug for ExtractIf<'_, K, V, F> +impl fmt::Debug for ExtractIf<'_, K, V, F, A> where K: fmt::Debug, V: fmt::Debug, - F: FnMut(&K, &mut V) -> bool, + A: Allocator + Clone, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("ExtractIf").field(&self.inner.peek()).finish() + f.debug_struct("ExtractIf").field("peek", &self.inner.peek()).finish_non_exhaustive() } } diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs index 041f80c1f2c52..343934680b87a 100644 --- a/library/alloc/src/collections/btree/set.rs +++ b/library/alloc/src/collections/btree/set.rs @@ -139,7 +139,7 @@ pub struct Iter<'a, T: 'a> { #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for Iter<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("Iter").field(&self.iter.clone()).finish() + f.debug_tuple("Iter").field(&self.iter).finish() } } @@ -1556,10 +1556,7 @@ pub struct ExtractIf< T, F, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, -> where - T: 'a, - F: 'a + FnMut(&T) -> bool, -{ +> { pred: F, inner: super::map::ExtractIfInner<'a, T, SetValZST>, /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. @@ -1567,13 +1564,15 @@ pub struct ExtractIf< } #[unstable(feature = "btree_extract_if", issue = "70530")] -impl fmt::Debug for ExtractIf<'_, T, F, A> +impl fmt::Debug for ExtractIf<'_, T, F, A> where T: fmt::Debug, - F: FnMut(&T) -> bool, + A: Allocator + Clone, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("ExtractIf").field(&self.inner.peek().map(|(k, _)| k)).finish() + f.debug_struct("ExtractIf") + .field("peek", &self.inner.peek().map(|(k, _)| k)) + .finish_non_exhaustive() } } diff --git a/library/alloc/src/collections/btree/set_val.rs b/library/alloc/src/collections/btree/set_val.rs index cf30160bfbbc2..5037b6578e80a 100644 --- a/library/alloc/src/collections/btree/set_val.rs +++ b/library/alloc/src/collections/btree/set_val.rs @@ -9,7 +9,7 @@ pub(super) struct SetValZST; /// Returns `true` only for type `SetValZST`, `false` for all other types (blanket implementation). /// `TypeId` requires a `'static` lifetime, use of this trait avoids that restriction. /// -/// [`TypeId`]: std::any::TypeId +/// [`TypeId`]: core::any::TypeId pub(super) trait IsSetVal { fn is_set_val() -> bool; } diff --git a/library/alloc/src/collections/linked_list.rs b/library/alloc/src/collections/linked_list.rs index 3183268b4b32e..00e2805d11f61 100644 --- a/library/alloc/src/collections/linked_list.rs +++ b/library/alloc/src/collections/linked_list.rs @@ -1151,7 +1151,7 @@ impl LinkedList { /// assert_eq!(evens.into_iter().collect::>(), vec![2, 4, 6, 8, 14]); /// assert_eq!(odds.into_iter().collect::>(), vec![1, 3, 5, 9, 11, 13, 15]); /// ``` - #[stable(feature = "extract_if", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "extract_if", since = "1.87.0")] pub fn extract_if(&mut self, filter: F) -> ExtractIf<'_, T, F, A> where F: FnMut(&mut T) -> bool, @@ -1931,7 +1931,7 @@ impl<'a, T, A: Allocator> CursorMut<'a, T, A> { } /// An iterator produced by calling `extract_if` on LinkedList. -#[stable(feature = "extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "extract_if", since = "1.87.0")] #[must_use = "iterators are lazy and do nothing unless consumed"] pub struct ExtractIf< 'a, @@ -1946,7 +1946,7 @@ pub struct ExtractIf< old_len: usize, } -#[stable(feature = "extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "extract_if", since = "1.87.0")] impl Iterator for ExtractIf<'_, T, F, A> where F: FnMut(&mut T) -> bool, @@ -1975,10 +1975,15 @@ where } } -#[stable(feature = "extract_if", since = "CURRENT_RUSTC_VERSION")] -impl fmt::Debug for ExtractIf<'_, T, F> { +#[stable(feature = "extract_if", since = "1.87.0")] +impl fmt::Debug for ExtractIf<'_, T, F, A> +where + T: fmt::Debug, + A: Allocator, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("ExtractIf").field(&self.list).finish() + let peek = self.it.map(|node| unsafe { &node.as_ref().element }); + f.debug_struct("ExtractIf").field("peek", &peek).finish_non_exhaustive() } } diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs index f8844e2d3a5cb..712f38a76c018 100644 --- a/library/alloc/src/collections/vec_deque/mod.rs +++ b/library/alloc/src/collections/vec_deque/mod.rs @@ -1188,6 +1188,73 @@ impl VecDeque { } } + /// Shortens the deque, keeping the last `len` elements and dropping + /// the rest. + /// + /// If `len` is greater or equal to the deque's current length, this has + /// no effect. + /// + /// # Examples + /// + /// ``` + /// # #![feature(vec_deque_truncate_front)] + /// use std::collections::VecDeque; + /// + /// let mut buf = VecDeque::new(); + /// buf.push_front(5); + /// buf.push_front(10); + /// buf.push_front(15); + /// assert_eq!(buf, [15, 10, 5]); + /// assert_eq!(buf.as_slices(), (&[15, 10, 5][..], &[][..])); + /// buf.truncate_front(1); + /// assert_eq!(buf.as_slices(), (&[5][..], &[][..])); + /// ``` + #[unstable(feature = "vec_deque_truncate_front", issue = "140667")] + pub fn truncate_front(&mut self, len: usize) { + /// Runs the destructor for all items in the slice when it gets dropped (normally or + /// during unwinding). + struct Dropper<'a, T>(&'a mut [T]); + + impl<'a, T> Drop for Dropper<'a, T> { + fn drop(&mut self) { + unsafe { + ptr::drop_in_place(self.0); + } + } + } + + unsafe { + if len >= self.len { + // No action is taken + return; + } + + let (front, back) = self.as_mut_slices(); + if len > back.len() { + // The 'back' slice remains unchanged. + // front.len() + back.len() == self.len, so 'end' is non-negative + // and end < front.len() + let end = front.len() - (len - back.len()); + let drop_front = front.get_unchecked_mut(..end) as *mut _; + self.head += end; + self.len = len; + ptr::drop_in_place(drop_front); + } else { + let drop_front = front as *mut _; + // 'end' is non-negative by the condition above + let end = back.len() - len; + let drop_back = back.get_unchecked_mut(..end) as *mut _; + self.head = self.to_physical_idx(self.len - len); + self.len = len; + + // Make sure the second half is dropped even when a destructor + // in the first one panics. + let _back_dropper = Dropper(&mut *drop_back); + ptr::drop_in_place(drop_front); + } + } + } + /// Returns a reference to the underlying allocator. #[unstable(feature = "allocator_api", issue = "32838")] #[inline] diff --git a/library/alloc/src/ffi/c_str.rs b/library/alloc/src/ffi/c_str.rs index f6743c6571095..8b448a18402c3 100644 --- a/library/alloc/src/ffi/c_str.rs +++ b/library/alloc/src/ffi/c_str.rs @@ -351,9 +351,14 @@ impl CString { /// # Safety /// /// This should only ever be called with a pointer that was earlier - /// obtained by calling [`CString::into_raw`]. Other usage (e.g., trying to take - /// ownership of a string that was allocated by foreign code) is likely to lead - /// to undefined behavior or allocator corruption. + /// obtained by calling [`CString::into_raw`], and the memory it points to must not be accessed + /// through any other pointer during the lifetime of reconstructed `CString`. + /// Other usage (e.g., trying to take ownership of a string that was allocated by foreign code) + /// is likely to lead to undefined behavior or allocator corruption. + /// + /// This function does not validate ownership of the raw pointer's memory. + /// A double-free may occur if the function is called twice on the same raw pointer. + /// Additionally, the caller must ensure the pointer is not dangling. /// /// It should be noted that the length isn't just "recomputed," but that /// the recomputed length must match the original length from the @@ -574,7 +579,7 @@ impl CString { #[stable(feature = "as_c_str", since = "1.20.0")] #[rustc_diagnostic_item = "cstring_as_c_str"] pub fn as_c_str(&self) -> &CStr { - &*self + unsafe { CStr::from_bytes_with_nul_unchecked(self.as_bytes_with_nul()) } } /// Converts this `CString` into a boxed [`CStr`]. @@ -705,14 +710,14 @@ impl ops::Deref for CString { #[inline] fn deref(&self) -> &CStr { - unsafe { CStr::from_bytes_with_nul_unchecked(self.as_bytes_with_nul()) } + self.as_c_str() } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for CString { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&**self, f) + fmt::Debug::fmt(self.as_c_str(), f) } } @@ -818,6 +823,7 @@ impl From>> for CString { } } +#[stable(feature = "c_string_from_str", since = "1.85.0")] impl FromStr for CString { type Err = NulError; @@ -830,6 +836,7 @@ impl FromStr for CString { } } +#[stable(feature = "c_string_from_str", since = "1.85.0")] impl TryFrom for String { type Error = IntoStringError; @@ -1116,7 +1123,7 @@ impl CStr { /// with the corresponding &[str] slice. Otherwise, it will /// replace any invalid UTF-8 sequences with /// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD] and return a - /// [Cow]::[Owned]\(&[str]) with the result. + /// [Cow]::[Owned]\([String]) with the result. /// /// [str]: prim@str "str" /// [Borrowed]: Cow::Borrowed diff --git a/library/alloc/src/ffi/mod.rs b/library/alloc/src/ffi/mod.rs index 695d7ad07cf76..05a2763a22596 100644 --- a/library/alloc/src/ffi/mod.rs +++ b/library/alloc/src/ffi/mod.rs @@ -87,5 +87,5 @@ pub use self::c_str::CString; #[stable(feature = "alloc_c_string", since = "1.64.0")] pub use self::c_str::{FromVecWithNulError, IntoStringError, NulError}; -#[unstable(feature = "c_str_module", issue = "112134")] +#[stable(feature = "c_str_module", since = "CURRENT_RUSTC_VERSION")] pub mod c_str; diff --git a/library/alloc/src/fmt.rs b/library/alloc/src/fmt.rs index e40de13f3d4a9..30f42050ac8ac 100644 --- a/library/alloc/src/fmt.rs +++ b/library/alloc/src/fmt.rs @@ -109,7 +109,7 @@ //! parameters (corresponding to `format_spec` in [the syntax](#syntax)). These //! parameters affect the string representation of what's being formatted. //! -//! The colon `:` in format syntax divides indentifier of the input data and +//! The colon `:` in format syntax divides identifier of the input data and //! the formatting options, the colon itself does not change anything, only //! introduces the options. //! diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index f0cdb1e4e0f78..abda5aefab645 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -104,6 +104,7 @@ #![feature(async_iterator)] #![feature(bstr)] #![feature(bstr_internals)] +#![feature(char_internals)] #![feature(char_max_len)] #![feature(clone_to_uninit)] #![feature(coerce_unsized)] @@ -121,6 +122,7 @@ #![feature(fmt_internals)] #![feature(fn_traits)] #![feature(formatting_options)] +#![feature(generic_atomic)] #![feature(hasher_prefixfree_extras)] #![feature(inplace_iteration)] #![feature(iter_advance_by)] @@ -135,6 +137,7 @@ #![feature(pattern)] #![feature(pin_coerce_unsized_trait)] #![feature(pointer_like_trait)] +#![feature(ptr_alignment_type)] #![feature(ptr_internals)] #![feature(ptr_metadata)] #![feature(set_ptr_value)] diff --git a/library/alloc/src/macros.rs b/library/alloc/src/macros.rs index 214192b8c9a9b..1e6e2ae8c3675 100644 --- a/library/alloc/src/macros.rs +++ b/library/alloc/src/macros.rs @@ -105,8 +105,7 @@ macro_rules! vec { macro_rules! format { ($($arg:tt)*) => { $crate::__export::must_use({ - let res = $crate::fmt::format($crate::__export::format_args!($($arg)*)); - res + $crate::fmt::format($crate::__export::format_args!($($arg)*)) }) } } diff --git a/library/alloc/src/raw_vec/mod.rs b/library/alloc/src/raw_vec/mod.rs index 99ebc5c4bfca8..a989e5b55b3d1 100644 --- a/library/alloc/src/raw_vec/mod.rs +++ b/library/alloc/src/raw_vec/mod.rs @@ -6,7 +6,7 @@ use core::marker::PhantomData; use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties}; -use core::ptr::{self, NonNull, Unique}; +use core::ptr::{self, Alignment, NonNull, Unique}; use core::{cmp, hint}; #[cfg(not(no_global_oom_handling))] @@ -177,7 +177,7 @@ impl RawVec { /// the returned `RawVec`. #[inline] pub(crate) const fn new_in(alloc: A) -> Self { - Self { inner: RawVecInner::new_in(alloc, align_of::()), _marker: PhantomData } + Self { inner: RawVecInner::new_in(alloc, Alignment::of::()), _marker: PhantomData } } /// Like `with_capacity`, but parameterized over the choice of @@ -409,8 +409,8 @@ unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { impl RawVecInner { #[inline] - const fn new_in(alloc: A, align: usize) -> Self { - let ptr = unsafe { core::mem::transmute(align) }; + const fn new_in(alloc: A, align: Alignment) -> Self { + let ptr = Unique::from_non_null(NonNull::without_provenance(align.as_nonzero())); // `cap: 0` means "unallocated". zero-sized types are ignored. Self { ptr, cap: ZERO_CAP, alloc } } @@ -465,7 +465,7 @@ impl RawVecInner { // Don't allocate here because `Drop` will not deallocate when `capacity` is 0. if layout.size() == 0 { - return Ok(Self::new_in(alloc, elem_layout.align())); + return Ok(Self::new_in(alloc, elem_layout.alignment())); } if let Err(err) = alloc_guard(layout.size()) { diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 619d9f258e342..4b8ea708e7e57 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -3536,11 +3536,11 @@ impl Default for Weak { } } -// NOTE: We checked_add here to deal with mem::forget safely. In particular -// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then -// you can free the allocation while outstanding Rcs (or Weaks) exist. -// We abort because this is such a degenerate scenario that we don't care about -// what happens -- no real program should ever experience this. +// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count +// is not decremented, meaning the ref-count can overflow, and then you can +// free the allocation while outstanding Rcs (or Weaks) exist, which would be +// unsound. We abort because this is such a degenerate scenario that we don't +// care about what happens -- no real program should ever experience this. // // This should have negligible overhead since you don't actually need to // clone these much in Rust thanks to ownership and move-semantics. diff --git a/library/alloc/src/str.rs b/library/alloc/src/str.rs index 0664f2c3cf2c1..24c5d4c92f71f 100644 --- a/library/alloc/src/str.rs +++ b/library/alloc/src/str.rs @@ -603,6 +603,10 @@ impl str { /// Converts a boxed slice of bytes to a boxed string slice without checking /// that the string contains valid UTF-8. /// +/// # Safety +/// +/// * The provided bytes must contain a valid UTF-8 sequence. +/// /// # Examples /// /// ``` diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs index 9236f5cb8d1f0..cd9e04a915aa2 100644 --- a/library/alloc/src/string.rs +++ b/library/alloc/src/string.rs @@ -1043,7 +1043,7 @@ impl String { #[inline] #[must_use = "`self` will be dropped if the result is not used"] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] #[rustc_allow_const_fn_unstable(const_precise_live_drops)] pub const fn into_bytes(self) -> Vec { self.vec @@ -1062,7 +1062,7 @@ impl String { #[must_use] #[stable(feature = "string_as_str", since = "1.7.0")] #[rustc_diagnostic_item = "string_as_str"] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] pub const fn as_str(&self) -> &str { // SAFETY: String contents are stipulated to be valid UTF-8, invalid contents are an error // at construction. @@ -1085,7 +1085,7 @@ impl String { #[must_use] #[stable(feature = "string_as_str", since = "1.7.0")] #[rustc_diagnostic_item = "string_as_mut_str"] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] pub const fn as_mut_str(&mut self) -> &mut str { // SAFETY: String contents are stipulated to be valid UTF-8, invalid contents are an error // at construction. @@ -1134,7 +1134,7 @@ impl String { /// assert_eq!(string, "abcdecdeabecde"); /// ``` #[cfg(not(no_global_oom_handling))] - #[stable(feature = "string_extend_from_within", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "string_extend_from_within", since = "1.87.0")] pub fn extend_from_within(&mut self, src: R) where R: RangeBounds, @@ -1159,7 +1159,7 @@ impl String { #[inline] #[must_use] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] pub const fn capacity(&self) -> usize { self.vec.capacity() } @@ -1401,11 +1401,14 @@ impl String { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn push(&mut self, ch: char) { - match ch.len_utf8() { - 1 => self.vec.push(ch as u8), - _ => { - self.vec.extend_from_slice(ch.encode_utf8(&mut [0; char::MAX_LEN_UTF8]).as_bytes()) - } + let len = self.len(); + let ch_len = ch.len_utf8(); + self.reserve(ch_len); + + // SAFETY: Just reserved capacity for at least the length needed to encode `ch`. + unsafe { + core::char::encode_utf8_raw_unchecked(ch as u32, self.vec.as_mut_ptr().add(self.len())); + self.vec.set_len(len + ch_len); } } @@ -1425,7 +1428,7 @@ impl String { #[inline] #[must_use] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] pub const fn as_bytes(&self) -> &[u8] { self.vec.as_slice() } @@ -1702,24 +1705,31 @@ impl String { #[rustc_confusables("set")] pub fn insert(&mut self, idx: usize, ch: char) { assert!(self.is_char_boundary(idx)); - let mut bits = [0; char::MAX_LEN_UTF8]; - let bits = ch.encode_utf8(&mut bits).as_bytes(); + let len = self.len(); + let ch_len = ch.len_utf8(); + self.reserve(ch_len); + + // SAFETY: Move the bytes starting from `idx` to their new location `ch_len` + // bytes ahead. This is safe because sufficient capacity was reserved, and `idx` + // is a char boundary. unsafe { - self.insert_bytes(idx, bits); + ptr::copy( + self.vec.as_ptr().add(idx), + self.vec.as_mut_ptr().add(idx + ch_len), + len - idx, + ); } - } - #[cfg(not(no_global_oom_handling))] - unsafe fn insert_bytes(&mut self, idx: usize, bytes: &[u8]) { - let len = self.len(); - let amt = bytes.len(); - self.vec.reserve(amt); + // SAFETY: Encode the character into the vacated region if `idx != len`, + // or into the uninitialized spare capacity otherwise. + unsafe { + core::char::encode_utf8_raw_unchecked(ch as u32, self.vec.as_mut_ptr().add(idx)); + } + // SAFETY: Update the length to include the newly added bytes. unsafe { - ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx); - ptr::copy_nonoverlapping(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt); - self.vec.set_len(len + amt); + self.vec.set_len(len + ch_len); } } @@ -1749,8 +1759,27 @@ impl String { pub fn insert_str(&mut self, idx: usize, string: &str) { assert!(self.is_char_boundary(idx)); + let len = self.len(); + let amt = string.len(); + self.reserve(amt); + + // SAFETY: Move the bytes starting from `idx` to their new location `amt` bytes + // ahead. This is safe because sufficient capacity was just reserved, and `idx` + // is a char boundary. + unsafe { + ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx); + } + + // SAFETY: Copy the new string slice into the vacated region if `idx != len`, + // or into the uninitialized spare capacity otherwise. The borrow checker + // ensures that the source and destination do not overlap. + unsafe { + ptr::copy_nonoverlapping(string.as_ptr(), self.vec.as_mut_ptr().add(idx), amt); + } + + // SAFETY: Update the length to include the newly added bytes. unsafe { - self.insert_bytes(idx, string.as_bytes()); + self.vec.set_len(len + amt); } } @@ -1779,7 +1808,7 @@ impl String { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] pub const unsafe fn as_mut_vec(&mut self) -> &mut Vec { &mut self.vec } @@ -1801,8 +1830,9 @@ impl String { #[inline] #[must_use] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] #[rustc_confusables("length", "size")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] pub const fn len(&self) -> usize { self.vec.len() } @@ -1821,7 +1851,8 @@ impl String { #[inline] #[must_use] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] pub const fn is_empty(&self) -> bool { self.len() == 0 } @@ -3140,7 +3171,7 @@ impl From for Vec { } } -#[stable(feature = "try_from_vec_u8_for_string", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "try_from_vec_u8_for_string", since = "1.87.0")] impl TryFrom> for String { type Error = FromUtf8Error; /// Converts the given [`Vec`] into a [`String`] if it contains valid UTF-8 data. diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index be581661f4ce3..17090925cfa0c 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -26,8 +26,8 @@ use core::pin::{Pin, PinCoerceUnsized}; use core::ptr::{self, NonNull}; #[cfg(not(no_global_oom_handling))] use core::slice::from_raw_parts_mut; -use core::sync::atomic; use core::sync::atomic::Ordering::{Acquire, Relaxed, Release}; +use core::sync::atomic::{self, Atomic}; use core::{borrow, fmt, hint}; #[cfg(not(no_global_oom_handling))] @@ -369,12 +369,12 @@ impl fmt::Debug for Weak { // inner types. #[repr(C)] struct ArcInner { - strong: atomic::AtomicUsize, + strong: Atomic, // the value usize::MAX acts as a sentinel for temporarily "locking" the // ability to upgrade weak pointers or downgrade strong ones; this is used // to avoid races in `make_mut` and `get_mut`. - weak: atomic::AtomicUsize, + weak: Atomic, data: T, } @@ -2446,7 +2446,7 @@ impl Arc { #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { - if this.is_unique() { + if Self::is_unique(this) { // This unsafety is ok because we're guaranteed that the pointer // returned is the *only* pointer that will ever be returned to T. Our // reference count is guaranteed to be 1 at this point, and we required @@ -2526,11 +2526,64 @@ impl Arc { unsafe { &mut (*this.ptr.as_ptr()).data } } - /// Determine whether this is the unique reference (including weak refs) to - /// the underlying data. + /// Determine whether this is the unique reference to the underlying data. /// - /// Note that this requires locking the weak ref count. - fn is_unique(&mut self) -> bool { + /// Returns `true` if there are no other `Arc` or [`Weak`] pointers to the same allocation; + /// returns `false` otherwise. + /// + /// If this function returns `true`, then is guaranteed to be safe to call [`get_mut_unchecked`] + /// on this `Arc`, so long as no clones occur in between. + /// + /// # Examples + /// + /// ``` + /// #![feature(arc_is_unique)] + /// + /// use std::sync::Arc; + /// + /// let x = Arc::new(3); + /// assert!(Arc::is_unique(&x)); + /// + /// let y = Arc::clone(&x); + /// assert!(!Arc::is_unique(&x)); + /// drop(y); + /// + /// // Weak references also count, because they could be upgraded at any time. + /// let z = Arc::downgrade(&x); + /// assert!(!Arc::is_unique(&x)); + /// ``` + /// + /// # Pointer invalidation + /// + /// This function will always return the same value as `Arc::get_mut(arc).is_some()`. However, + /// unlike that operation it does not produce any mutable references to the underlying data, + /// meaning no pointers to the data inside the `Arc` are invalidated by the call. Thus, the + /// following code is valid, even though it would be UB if it used `Arc::get_mut`: + /// + /// ``` + /// #![feature(arc_is_unique)] + /// + /// use std::sync::Arc; + /// + /// let arc = Arc::new(5); + /// let pointer: *const i32 = &*arc; + /// assert!(Arc::is_unique(&arc)); + /// assert_eq!(unsafe { *pointer }, 5); + /// ``` + /// + /// # Atomic orderings + /// + /// Concurrent drops to other `Arc` pointers to the same allocation will synchronize with this + /// call - that is, this call performs an `Acquire` operation on the underlying strong and weak + /// ref counts. This ensures that calling `get_mut_unchecked` is safe. + /// + /// Note that this operation requires locking the weak ref count, so concurrent calls to + /// `downgrade` may spin-loop for a short period of time. + /// + /// [`get_mut_unchecked`]: Self::get_mut_unchecked + #[inline] + #[unstable(feature = "arc_is_unique", issue = "138938")] + pub fn is_unique(this: &Self) -> bool { // lock the weak pointer count if we appear to be the sole weak pointer // holder. // @@ -2538,16 +2591,16 @@ impl Arc { // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded // weak ref was never dropped, the CAS here will fail so we do not care to synchronize. - if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { + if this.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { // This needs to be an `Acquire` to synchronize with the decrement of the `strong` // counter in `drop` -- the only access that happens when any but the last reference // is being dropped. - let unique = self.inner().strong.load(Acquire) == 1; + let unique = this.inner().strong.load(Acquire) == 1; // The release write here synchronizes with a read in `downgrade`, // effectively preventing the above read of `strong` from happening // after the write. - self.inner().weak.store(1, Release); // release the lock + this.inner().weak.store(1, Release); // release the lock unique } else { false @@ -2760,8 +2813,8 @@ impl Weak { /// Helper type to allow accessing the reference counts without /// making any assertions about the data field. struct WeakInner<'a> { - weak: &'a atomic::AtomicUsize, - strong: &'a atomic::AtomicUsize, + weak: &'a Atomic, + strong: &'a Atomic, } impl Weak { diff --git a/library/alloc/src/vec/extract_if.rs b/library/alloc/src/vec/extract_if.rs index be869553ef4e1..a456d3d9e602d 100644 --- a/library/alloc/src/vec/extract_if.rs +++ b/library/alloc/src/vec/extract_if.rs @@ -1,5 +1,5 @@ use core::ops::{Range, RangeBounds}; -use core::{ptr, slice}; +use core::{fmt, ptr, slice}; use super::Vec; use crate::alloc::{Allocator, Global}; @@ -15,8 +15,7 @@ use crate::alloc::{Allocator, Global}; /// let mut v = vec![0, 1, 2]; /// let iter: std::vec::ExtractIf<'_, _, _> = v.extract_if(.., |x| *x % 2 == 0); /// ``` -#[stable(feature = "extract_if", since = "CURRENT_RUSTC_VERSION")] -#[derive(Debug)] +#[stable(feature = "extract_if", since = "1.87.0")] #[must_use = "iterators are lazy and do nothing unless consumed"] pub struct ExtractIf< 'a, @@ -57,7 +56,7 @@ impl<'a, T, F, A: Allocator> ExtractIf<'a, T, F, A> { } } -#[stable(feature = "extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "extract_if", since = "1.87.0")] impl Iterator for ExtractIf<'_, T, F, A> where F: FnMut(&mut T) -> bool, @@ -93,7 +92,7 @@ where } } -#[stable(feature = "extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "extract_if", since = "1.87.0")] impl Drop for ExtractIf<'_, T, F, A> { fn drop(&mut self) { unsafe { @@ -108,3 +107,15 @@ impl Drop for ExtractIf<'_, T, F, A> { } } } + +#[stable(feature = "extract_if", since = "1.87.0")] +impl fmt::Debug for ExtractIf<'_, T, F, A> +where + T: fmt::Debug, + A: Allocator, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let peek = if self.idx < self.end { self.vec.get(self.idx) } else { None }; + f.debug_struct("ExtractIf").field("peek", &peek).finish_non_exhaustive() + } +} diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index 3eee988b6c9d1..37df928228d9c 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -168,7 +168,7 @@ impl IntoIter { // SAFETY: This allocation originally came from a `Vec`, so it passes // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`, - // so the `sub_ptr`s below cannot wrap, and will produce a well-formed + // so the `offset_from_unsigned`s below cannot wrap, and will produce a well-formed // range. `end` ≤ `buf + cap`, so the range will be in-bounds. // Taking `alloc` is ok because nothing else is going to look at it, // since our `Drop` impl isn't going to run so there's no more code. @@ -258,6 +258,11 @@ impl Iterator for IntoIter { self.len() } + #[inline] + fn last(mut self) -> Option { + self.next_back() + } + #[inline] fn next_chunk(&mut self) -> Result<[T; N], core::array::IntoIter> { let mut raw_ary = [const { MaybeUninit::uninit() }; N]; diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index 633ef717e04dc..a97912304c894 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -66,7 +66,7 @@ use core::ptr::{self, NonNull}; use core::slice::{self, SliceIndex}; use core::{fmt, intrinsics}; -#[stable(feature = "extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "extract_if", since = "1.87.0")] pub use self::extract_if::ExtractIf; use crate::alloc::{Allocator, Global}; use crate::borrow::{Cow, ToOwned}; @@ -1267,7 +1267,7 @@ impl Vec { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] pub const fn capacity(&self) -> usize { self.buf.capacity() } @@ -1582,7 +1582,7 @@ impl Vec { #[inline] #[stable(feature = "vec_as_slice", since = "1.7.0")] #[rustc_diagnostic_item = "vec_as_slice"] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] pub const fn as_slice(&self) -> &[T] { // SAFETY: `slice::from_raw_parts` requires pointee is a contiguous, aligned buffer of size // `len` containing properly-initialized `T`s. Data must not be mutated for the returned @@ -1614,7 +1614,7 @@ impl Vec { #[inline] #[stable(feature = "vec_as_slice", since = "1.7.0")] #[rustc_diagnostic_item = "vec_as_mut_slice"] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] pub const fn as_mut_slice(&mut self) -> &mut [T] { // SAFETY: `slice::from_raw_parts_mut` requires pointee is a contiguous, aligned buffer of // size `len` containing properly-initialized `T`s. Data must not be accessed through any @@ -1686,7 +1686,7 @@ impl Vec { /// [`as_ptr`]: Vec::as_ptr /// [`as_non_null`]: Vec::as_non_null #[stable(feature = "vec_as_ptr", since = "1.37.0")] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] #[rustc_never_returns_null_ptr] #[rustc_as_ptr] #[inline] @@ -1749,7 +1749,7 @@ impl Vec { /// [`as_ptr`]: Vec::as_ptr /// [`as_non_null`]: Vec::as_non_null #[stable(feature = "vec_as_ptr", since = "1.37.0")] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] #[rustc_never_returns_null_ptr] #[rustc_as_ptr] #[inline] @@ -2588,7 +2588,7 @@ impl Vec { #[inline] #[track_caller] unsafe fn append_elements(&mut self, other: *const [T]) { - let count = unsafe { (*other).len() }; + let count = other.len(); self.reserve(count); let len = self.len(); unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) }; @@ -2700,7 +2700,7 @@ impl Vec { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] #[rustc_confusables("length", "size")] pub const fn len(&self) -> usize { let len = self.len; @@ -2726,7 +2726,7 @@ impl Vec { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "vec_is_empty"] - #[rustc_const_stable(feature = "const_vec_string_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_vec_string_slice", since = "1.87.0")] pub const fn is_empty(&self) -> bool { self.len() == 0 } @@ -2803,6 +2803,10 @@ impl Vec { /// want to use the [`Default`] trait to generate values, you can /// pass [`Default::default`] as the second argument. /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` _bytes_. + /// /// # Examples /// /// ``` @@ -3010,6 +3014,10 @@ impl Vec { /// [`Clone`]), use [`Vec::resize_with`]. /// If you only need to resize to a smaller size, use [`Vec::truncate`]. /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` _bytes_. + /// /// # Examples /// /// ``` @@ -3651,28 +3659,34 @@ impl Vec { /// /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating /// or the iteration short-circuits, then the remaining elements will be retained. - /// Use [`retain`] with a negated predicate if you do not need the returned iterator. + /// Use [`retain_mut`] with a negated predicate if you do not need the returned iterator. /// - /// [`retain`]: Vec::retain + /// [`retain_mut`]: Vec::retain_mut /// /// Using this method is equivalent to the following code: /// /// ``` - /// # use std::cmp::min; - /// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 }; - /// # let mut vec = vec![1, 2, 3, 4, 5, 6]; - /// # let range = 1..4; + /// # let some_predicate = |x: &mut i32| { *x % 2 == 1 }; + /// # let mut vec = vec![0, 1, 2, 3, 4, 5, 6]; + /// # let mut vec2 = vec.clone(); + /// # let range = 1..5; /// let mut i = range.start; - /// while i < min(vec.len(), range.end) { + /// let end_items = vec.len() - range.end; + /// # let mut extracted = vec![]; + /// + /// while i < vec.len() - end_items { /// if some_predicate(&mut vec[i]) { /// let val = vec.remove(i); + /// # extracted.push(val); /// // your code here /// } else { /// i += 1; /// } /// } /// - /// # assert_eq!(vec, vec![1, 4, 5]); + /// # let extracted2: Vec<_> = vec2.extract_if(range, some_predicate).collect(); + /// # assert_eq!(vec, vec2); + /// # assert_eq!(extracted, extracted2); /// ``` /// /// But `extract_if` is easier to use. `extract_if` is also more efficient, @@ -3707,7 +3721,7 @@ impl Vec { /// assert_eq!(items, vec![0, 0, 0, 0, 0, 0, 0, 2, 2, 2]); /// assert_eq!(ones.len(), 3); /// ``` - #[stable(feature = "extract_if", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "extract_if", since = "1.87.0")] pub fn extract_if(&mut self, range: R, filter: F) -> ExtractIf<'_, T, F, A> where F: FnMut(&mut T) -> bool, diff --git a/library/alloc/src/vec/splice.rs b/library/alloc/src/vec/splice.rs index ca5cb17f8bfda..ed1a0dda76d29 100644 --- a/library/alloc/src/vec/splice.rs +++ b/library/alloc/src/vec/splice.rs @@ -59,7 +59,7 @@ impl Drop for Splice<'_, I, A> { // and moving things into the final place. // Which means we can replace the slice::Iter with pointers that won't point to deallocated // memory, so that Drain::drop is still allowed to call iter.len(), otherwise it would break - // the ptr.sub_ptr contract. + // the ptr.offset_from_unsigned contract. self.drain.iter = (&[]).iter(); unsafe { diff --git a/library/alloctests/benches/string.rs b/library/alloctests/benches/string.rs index 3d79ab78c6950..0bbec12e4fdc6 100644 --- a/library/alloctests/benches/string.rs +++ b/library/alloctests/benches/string.rs @@ -4,7 +4,7 @@ use test::{Bencher, black_box}; #[bench] fn bench_with_capacity(b: &mut Bencher) { - b.iter(|| String::with_capacity(100)); + b.iter(|| String::with_capacity(black_box(100))); } #[bench] @@ -12,7 +12,8 @@ fn bench_push_str(b: &mut Bencher) { let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb"; b.iter(|| { let mut r = String::new(); - r.push_str(s); + black_box(&mut r).push_str(black_box(s)); + r }); } @@ -24,8 +25,9 @@ fn bench_push_str_one_byte(b: &mut Bencher) { b.iter(|| { let mut r = String::new(); for _ in 0..REPETITIONS { - r.push_str("a") + black_box(&mut r).push_str(black_box("a")); } + r }); } @@ -35,8 +37,9 @@ fn bench_push_char_one_byte(b: &mut Bencher) { b.iter(|| { let mut r = String::new(); for _ in 0..REPETITIONS { - r.push('a') + black_box(&mut r).push(black_box('a')); } + r }); } @@ -46,8 +49,9 @@ fn bench_push_char_two_bytes(b: &mut Bencher) { b.iter(|| { let mut r = String::new(); for _ in 0..REPETITIONS { - r.push('â') + black_box(&mut r).push(black_box('â')); } + r }); } @@ -57,34 +61,26 @@ fn from_utf8_lossy_100_ascii(b: &mut Bencher) { Lorem ipsum dolor sit amet, consectetur. "; assert_eq!(100, s.len()); - b.iter(|| { - let _ = String::from_utf8_lossy(s); - }); + b.iter(|| String::from_utf8_lossy(black_box(s))); } #[bench] fn from_utf8_lossy_100_multibyte(b: &mut Bencher) { let s = "𐌀𐌖𐌋𐌄𐌑𐌉ปรدولة الكويتทศไทย中华𐍅𐌿𐌻𐍆𐌹𐌻𐌰".as_bytes(); assert_eq!(100, s.len()); - b.iter(|| { - let _ = String::from_utf8_lossy(s); - }); + b.iter(|| String::from_utf8_lossy(black_box(s))); } #[bench] fn from_utf8_lossy_invalid(b: &mut Bencher) { let s = b"Hello\xC0\x80 There\xE6\x83 Goodbye"; - b.iter(|| { - let _ = String::from_utf8_lossy(s); - }); + b.iter(|| String::from_utf8_lossy(black_box(s))); } #[bench] fn from_utf8_lossy_100_invalid(b: &mut Bencher) { let s = repeat(0xf5).take(100).collect::>(); - b.iter(|| { - let _ = String::from_utf8_lossy(&s); - }); + b.iter(|| String::from_utf8_lossy(black_box(&s))); } #[bench] @@ -96,8 +92,8 @@ fn bench_exact_size_shrink_to_fit(b: &mut Bencher) { r.push_str(s); assert_eq!(r.len(), r.capacity()); b.iter(|| { - let mut r = String::with_capacity(s.len()); - r.push_str(s); + let mut r = String::with_capacity(black_box(s.len())); + r.push_str(black_box(s)); r.shrink_to_fit(); r }); @@ -107,21 +103,21 @@ fn bench_exact_size_shrink_to_fit(b: &mut Bencher) { fn bench_from_str(b: &mut Bencher) { let s = "Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; - b.iter(|| String::from(s)) + b.iter(|| String::from(black_box(s))) } #[bench] fn bench_from(b: &mut Bencher) { let s = "Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; - b.iter(|| String::from(s)) + b.iter(|| String::from(black_box(s))) } #[bench] fn bench_to_string(b: &mut Bencher) { let s = "Hello there, the quick brown fox jumped over the lazy dog! \ Lorem ipsum dolor sit amet, consectetur. "; - b.iter(|| s.to_string()) + b.iter(|| black_box(s).to_string()) } #[bench] @@ -129,7 +125,7 @@ fn bench_insert_char_short(b: &mut Bencher) { let s = "Hello, World!"; b.iter(|| { let mut x = String::from(s); - black_box(&mut x).insert(6, black_box(' ')); + black_box(&mut x).insert(black_box(6), black_box(' ')); x }) } @@ -139,7 +135,7 @@ fn bench_insert_char_long(b: &mut Bencher) { let s = "Hello, World!"; b.iter(|| { let mut x = String::from(s); - black_box(&mut x).insert(6, black_box('❤')); + black_box(&mut x).insert(black_box(6), black_box('❤')); x }) } @@ -149,7 +145,7 @@ fn bench_insert_str_short(b: &mut Bencher) { let s = "Hello, World!"; b.iter(|| { let mut x = String::from(s); - black_box(&mut x).insert_str(6, black_box(" ")); + black_box(&mut x).insert_str(black_box(6), black_box(" ")); x }) } @@ -159,7 +155,7 @@ fn bench_insert_str_long(b: &mut Bencher) { let s = "Hello, World!"; b.iter(|| { let mut x = String::from(s); - black_box(&mut x).insert_str(6, black_box(" rustic ")); + black_box(&mut x).insert_str(black_box(6), black_box(" rustic ")); x }) } diff --git a/library/alloctests/lib.rs b/library/alloctests/lib.rs index 6ce8a6d9ca174..56e60ed4c8448 100644 --- a/library/alloctests/lib.rs +++ b/library/alloctests/lib.rs @@ -28,6 +28,8 @@ #![feature(iter_next_chunk)] #![feature(maybe_uninit_slice)] #![feature(maybe_uninit_uninit_array_transpose)] +#![feature(nonnull_provenance)] +#![feature(ptr_alignment_type)] #![feature(ptr_internals)] #![feature(sized_type_properties)] #![feature(slice_iter_mut_as_mut_slice)] diff --git a/library/alloctests/tests/c_str2.rs b/library/alloctests/tests/c_str2.rs index 0f4c27fa12322..fe7686bd1c592 100644 --- a/library/alloctests/tests/c_str2.rs +++ b/library/alloctests/tests/c_str2.rs @@ -33,12 +33,6 @@ fn build_with_zero2() { assert!(CString::new(vec![0]).is_err()); } -#[test] -fn formatted() { - let s = CString::new(&b"abc\x01\x02\n\xE2\x80\xA6\xFF"[..]).unwrap(); - assert_eq!(format!("{s:?}"), r#""abc\x01\x02\n\xe2\x80\xa6\xff""#); -} - #[test] fn borrowed() { unsafe { diff --git a/library/alloctests/tests/fmt.rs b/library/alloctests/tests/fmt.rs index c13074c53b73d..a20e8c623360f 100644 --- a/library/alloctests/tests/fmt.rs +++ b/library/alloctests/tests/fmt.rs @@ -1,6 +1,7 @@ #![deny(warnings)] // FIXME(static_mut_refs): Do not allow `static_mut_refs` lint #![allow(static_mut_refs)] +#![cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] use std::cell::RefCell; use std::fmt::{self, Write}; diff --git a/library/alloctests/tests/lib.rs b/library/alloctests/tests/lib.rs index 46c11ea150bf8..38309585fad61 100644 --- a/library/alloctests/tests/lib.rs +++ b/library/alloctests/tests/lib.rs @@ -37,6 +37,7 @@ #![feature(str_as_str)] #![feature(strict_provenance_lints)] #![feature(vec_deque_pop_if)] +#![feature(vec_deque_truncate_front)] #![feature(unique_rc_arc)] #![feature(macro_metavar_expr_concat)] #![allow(internal_features)] @@ -63,6 +64,7 @@ mod fmt; mod heap; mod linked_list; mod misc_tests; +mod num; mod rc; mod slice; mod sort; diff --git a/library/alloctests/tests/num.rs b/library/alloctests/tests/num.rs new file mode 100644 index 0000000000000..3c76e68c60640 --- /dev/null +++ b/library/alloctests/tests/num.rs @@ -0,0 +1,69 @@ +use std::fmt::{Debug, Display}; +use std::str::FromStr; + +fn assert_nb(value: Int) { + let s = value.to_string(); + let s2 = format!("s: {}.", value); + + assert_eq!(format!("s: {s}."), s2); + let Ok(ret) = Int::from_str(&s) else { + panic!("failed to convert into to string"); + }; + assert_eq!(ret, value); +} + +macro_rules! uint_to_s { + ($($fn_name:ident, $int:ident,)+) => { + $( + #[test] + fn $fn_name() { + assert_nb::<$int>($int::MIN); + assert_nb::<$int>($int::MAX); + assert_nb::<$int>(1); + assert_nb::<$int>($int::MIN / 2); + assert_nb::<$int>($int::MAX / 2); + } + )+ + } +} +macro_rules! int_to_s { + ($($fn_name:ident, $int:ident,)+) => { + $( + #[test] + fn $fn_name() { + assert_nb::<$int>($int::MIN); + assert_nb::<$int>($int::MAX); + assert_nb::<$int>(1); + assert_nb::<$int>(0); + assert_nb::<$int>(-1); + assert_nb::<$int>($int::MIN / 2); + assert_nb::<$int>($int::MAX / 2); + } + )+ + } +} + +int_to_s!( + test_i8_to_string, + i8, + test_i16_to_string, + i16, + test_i32_to_string, + i32, + test_i64_to_string, + i64, + test_i128_to_string, + i128, +); +uint_to_s!( + test_u8_to_string, + u8, + test_u16_to_string, + u16, + test_u32_to_string, + u32, + test_u64_to_string, + u64, + test_u128_to_string, + u128, +); diff --git a/library/alloctests/tests/vec_deque.rs b/library/alloctests/tests/vec_deque.rs index 1b03c29e5bda1..b77ea3a312bef 100644 --- a/library/alloctests/tests/vec_deque.rs +++ b/library/alloctests/tests/vec_deque.rs @@ -1686,6 +1686,40 @@ fn truncate_leak() { assert_eq!(unsafe { DROPS }, 7); } +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn truncate_front_leak() { + static mut DROPS: i32 = 0; + + struct D(bool); + + impl Drop for D { + fn drop(&mut self) { + unsafe { + DROPS += 1; + } + + if self.0 { + panic!("panic in `drop`"); + } + } + } + + let mut q = VecDeque::new(); + q.push_back(D(false)); + q.push_back(D(false)); + q.push_back(D(false)); + q.push_back(D(false)); + q.push_back(D(false)); + q.push_front(D(true)); + q.push_front(D(false)); + q.push_front(D(false)); + + catch_unwind(AssertUnwindSafe(|| q.truncate_front(1))).ok(); + + assert_eq!(unsafe { DROPS }, 7); +} + #[test] #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] fn test_drain_leak() { @@ -1863,3 +1897,38 @@ fn test_collect_from_into_iter_keeps_allocation() { assert_eq!(v.capacity(), 13); } } + +#[test] +fn test_truncate_front() { + let mut v = VecDeque::with_capacity(13); + v.extend(0..7); + assert_eq!(v.as_slices(), ([0, 1, 2, 3, 4, 5, 6].as_slice(), [].as_slice())); + v.truncate_front(10); + assert_eq!(v.len(), 7); + assert_eq!(v.as_slices(), ([0, 1, 2, 3, 4, 5, 6].as_slice(), [].as_slice())); + v.truncate_front(7); + assert_eq!(v.len(), 7); + assert_eq!(v.as_slices(), ([0, 1, 2, 3, 4, 5, 6].as_slice(), [].as_slice())); + v.truncate_front(3); + assert_eq!(v.as_slices(), ([4, 5, 6].as_slice(), [].as_slice())); + assert_eq!(v.len(), 3); + v.truncate_front(0); + assert_eq!(v.as_slices(), ([].as_slice(), [].as_slice())); + assert_eq!(v.len(), 0); + + v.clear(); + v.extend(0..7); + assert_eq!(v.as_slices(), ([0, 1, 2, 3, 4, 5, 6].as_slice(), [].as_slice())); + v.push_front(9); + v.push_front(8); + v.push_front(7); + assert_eq!(v.as_slices(), ([7, 8, 9].as_slice(), [0, 1, 2, 3, 4, 5, 6].as_slice())); + v.truncate_front(12); + assert_eq!(v.as_slices(), ([7, 8, 9].as_slice(), [0, 1, 2, 3, 4, 5, 6].as_slice())); + v.truncate_front(10); + assert_eq!(v.as_slices(), ([7, 8, 9].as_slice(), [0, 1, 2, 3, 4, 5, 6].as_slice())); + v.truncate_front(8); + assert_eq!(v.as_slices(), ([9].as_slice(), [0, 1, 2, 3, 4, 5, 6].as_slice())); + v.truncate_front(5); + assert_eq!(v.as_slices(), ([2, 3, 4, 5, 6].as_slice(), [].as_slice())); +} diff --git a/library/core/Cargo.toml b/library/core/Cargo.toml index b60826ee4e6c7..99e52d0ada0a6 100644 --- a/library/core/Cargo.toml +++ b/library/core/Cargo.toml @@ -31,9 +31,6 @@ level = "warn" check-cfg = [ 'cfg(bootstrap)', 'cfg(no_fp_fmt_parse)', - 'cfg(stdarch_intel_sde)', - # #[cfg(bootstrap)] - 'cfg(target_feature, values("vector-enhancements-1"))', # core use #[path] imports to portable-simd `core_simd` crate # and to stdarch `core_arch` crate which messes-up with Cargo list # of declared features, we therefor expect any feature cfg diff --git a/library/core/src/alloc/layout.rs b/library/core/src/alloc/layout.rs index 1595a3af883d1..e8a03aadc3390 100644 --- a/library/core/src/alloc/layout.rs +++ b/library/core/src/alloc/layout.rs @@ -520,6 +520,14 @@ impl Layout { unsafe { Ok(Layout::from_size_align_unchecked(array_size, align.as_usize())) } } } + + /// Perma-unstable access to `align` as `Alignment` type. + #[unstable(issue = "none", feature = "std_internals")] + #[doc(hidden)] + #[inline] + pub const fn alignment(&self) -> Alignment { + self.align + } } #[stable(feature = "alloc_layout", since = "1.28.0")] diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index 9805cee1c331e..9d608d5e83c40 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -90,7 +90,7 @@ impl fmt::Display for AllocError { /// # Safety /// /// Memory blocks that are [*currently allocated*] by an allocator, -/// must point to valid memory, and retain their validity while until either: +/// must point to valid memory, and retain their validity until either: /// - the memory block is deallocated, or /// - the allocator is dropped. /// @@ -112,7 +112,9 @@ pub unsafe trait Allocator { /// /// The returned block of memory remains valid as long as it is [*currently allocated*] and the shorter of: /// - the borrow-checker lifetime of the allocator type itself. - /// - as long as at the allocator and all its clones has not been dropped. + /// - as long as the allocator and all its clones have not been dropped. + /// + /// [*currently allocated*]: #currently-allocated-memory /// /// # Errors /// diff --git a/library/core/src/any.rs b/library/core/src/any.rs index 10f2a11d558be..7aa3f3c6d7434 100644 --- a/library/core/src/any.rs +++ b/library/core/src/any.rs @@ -772,8 +772,8 @@ impl hash::Hash for TypeId { // (especially given the previous point about the lower 64 bits being // high quality on their own). // - It is correct to do so -- only hashing a subset of `self` is still - // with an `Eq` implementation that considers the entire value, as - // ours does. + // compatible with an `Eq` implementation that considers the entire + // value, as ours does. self.t.1.hash(state); } } diff --git a/library/core/src/arch.rs b/library/core/src/arch.rs index 81d828a971c80..f19fde2b4c733 100644 --- a/library/core/src/arch.rs +++ b/library/core/src/arch.rs @@ -32,7 +32,7 @@ pub macro asm("assembly template", $(operands,)* $(options($(option),*))?) { /// /// [Rust By Example]: https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html /// [reference]: https://doc.rust-lang.org/nightly/reference/inline-assembly.html -#[unstable(feature = "naked_functions", issue = "90957")] +#[stable(feature = "naked_functions", since = "CURRENT_RUSTC_VERSION")] #[rustc_builtin_macro] pub macro naked_asm("assembly template", $(operands,)* $(options($(option),*))?) { /* compiler built-in */ diff --git a/library/core/src/array/iter.rs b/library/core/src/array/iter.rs index 1edade41597f7..90f76d6d4c7be 100644 --- a/library/core/src/array/iter.rs +++ b/library/core/src/array/iter.rs @@ -1,38 +1,35 @@ //! Defines the `IntoIter` owned iterator for arrays. use crate::intrinsics::transmute_unchecked; -use crate::iter::{self, FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce}; +use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce}; use crate::mem::MaybeUninit; use crate::num::NonZero; -use crate::ops::{IndexRange, Range}; +use crate::ops::{IndexRange, Range, Try}; use crate::{fmt, ptr}; +mod iter_inner; + +type InnerSized = iter_inner::PolymorphicIter<[MaybeUninit; N]>; +type InnerUnsized = iter_inner::PolymorphicIter<[MaybeUninit]>; + /// A by-value [array] iterator. #[stable(feature = "array_value_iter", since = "1.51.0")] #[rustc_insignificant_dtor] #[rustc_diagnostic_item = "ArrayIntoIter"] +#[derive(Clone)] pub struct IntoIter { - /// This is the array we are iterating over. - /// - /// Elements with index `i` where `alive.start <= i < alive.end` have not - /// been yielded yet and are valid array entries. Elements with indices `i - /// < alive.start` or `i >= alive.end` have been yielded already and must - /// not be accessed anymore! Those dead elements might even be in a - /// completely uninitialized state! - /// - /// So the invariants are: - /// - `data[alive]` is alive (i.e. contains valid elements) - /// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the - /// elements were already read and must not be touched anymore!) - data: [MaybeUninit; N], + inner: InnerSized, +} - /// The elements in `data` that have not been yielded yet. - /// - /// Invariants: - /// - `alive.end <= N` - /// - /// (And the `IndexRange` type requires `alive.start <= alive.end`.) - alive: IndexRange, +impl IntoIter { + #[inline] + fn unsize(&self) -> &InnerUnsized { + &self.inner + } + #[inline] + fn unsize_mut(&mut self) -> &mut InnerUnsized { + &mut self.inner + } } // Note: the `#[rustc_skip_during_method_dispatch(array)]` on `trait IntoIterator` @@ -53,6 +50,7 @@ impl IntoIterator for [T; N] { /// 2021 edition -- see the [array] Editions section for more information. /// /// [array]: prim@array + #[inline] fn into_iter(self) -> Self::IntoIter { // SAFETY: The transmute here is actually safe. The docs of `MaybeUninit` // promise: @@ -68,7 +66,10 @@ impl IntoIterator for [T; N] { // FIXME: If normal `transmute` ever gets smart enough to allow this // directly, use it instead of `transmute_unchecked`. let data: [MaybeUninit; N] = unsafe { transmute_unchecked(self) }; - IntoIter { data, alive: IndexRange::zero_to(N) } + // SAFETY: The original array was entirely initialized and the the alive + // range we're passing here represents that fact. + let inner = unsafe { InnerSized::new_unchecked(IndexRange::zero_to(N), data) }; + IntoIter { inner } } } @@ -136,13 +137,16 @@ impl IntoIter { /// assert_eq!(r.collect::>(), vec![10, 11, 12, 13, 14, 15]); /// ``` #[unstable(feature = "array_into_iter_constructors", issue = "91583")] + #[inline] pub const unsafe fn new_unchecked( buffer: [MaybeUninit; N], initialized: Range, ) -> Self { // SAFETY: one of our safety conditions is that the range is canonical. let alive = unsafe { IndexRange::new_unchecked(initialized.start, initialized.end) }; - Self { data: buffer, alive } + // SAFETY: one of our safety condition is that these items are initialized. + let inner = unsafe { InnerSized::new_unchecked(alive, buffer) }; + IntoIter { inner } } /// Creates an iterator over `T` which returns no elements. @@ -198,172 +202,134 @@ impl IntoIter { /// assert_eq!(get_bytes(false).collect::>(), vec![]); /// ``` #[unstable(feature = "array_into_iter_constructors", issue = "91583")] + #[inline] pub const fn empty() -> Self { - let buffer = [const { MaybeUninit::uninit() }; N]; - let initialized = 0..0; - - // SAFETY: We're telling it that none of the elements are initialized, - // which is trivially true. And ∀N: usize, 0 <= N. - unsafe { Self::new_unchecked(buffer, initialized) } + let inner = InnerSized::empty(); + IntoIter { inner } } /// Returns an immutable slice of all elements that have not been yielded /// yet. #[stable(feature = "array_value_iter", since = "1.51.0")] + #[inline] pub fn as_slice(&self) -> &[T] { - // SAFETY: We know that all elements within `alive` are properly initialized. - unsafe { - let slice = self.data.get_unchecked(self.alive.clone()); - slice.assume_init_ref() - } + self.unsize().as_slice() } /// Returns a mutable slice of all elements that have not been yielded yet. #[stable(feature = "array_value_iter", since = "1.51.0")] + #[inline] pub fn as_mut_slice(&mut self) -> &mut [T] { - // SAFETY: We know that all elements within `alive` are properly initialized. - unsafe { - let slice = self.data.get_unchecked_mut(self.alive.clone()); - slice.assume_init_mut() - } + self.unsize_mut().as_mut_slice() } } #[stable(feature = "array_value_iter_impls", since = "1.40.0")] impl Iterator for IntoIter { type Item = T; + + #[inline] fn next(&mut self) -> Option { - // Get the next index from the front. - // - // Increasing `alive.start` by 1 maintains the invariant regarding - // `alive`. However, due to this change, for a short time, the alive - // zone is not `data[alive]` anymore, but `data[idx..alive.end]`. - self.alive.next().map(|idx| { - // Read the element from the array. - // SAFETY: `idx` is an index into the former "alive" region of the - // array. Reading this element means that `data[idx]` is regarded as - // dead now (i.e. do not touch). As `idx` was the start of the - // alive-zone, the alive zone is now `data[alive]` again, restoring - // all invariants. - unsafe { self.data.get_unchecked(idx).assume_init_read() } - }) + self.unsize_mut().next() } + #[inline] fn size_hint(&self) -> (usize, Option) { - let len = self.len(); - (len, Some(len)) + self.unsize().size_hint() } #[inline] - fn fold(mut self, init: Acc, mut fold: Fold) -> Acc + fn fold(mut self, init: Acc, fold: Fold) -> Acc where Fold: FnMut(Acc, Self::Item) -> Acc, { - let data = &mut self.data; - iter::ByRefSized(&mut self.alive).fold(init, |acc, idx| { - // SAFETY: idx is obtained by folding over the `alive` range, which implies the - // value is currently considered alive but as the range is being consumed each value - // we read here will only be read once and then considered dead. - fold(acc, unsafe { data.get_unchecked(idx).assume_init_read() }) - }) + self.unsize_mut().fold(init, fold) } + #[inline] + fn try_fold(&mut self, init: B, f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, + { + self.unsize_mut().try_fold(init, f) + } + + #[inline] fn count(self) -> usize { self.len() } + #[inline] fn last(mut self) -> Option { self.next_back() } + #[inline] fn advance_by(&mut self, n: usize) -> Result<(), NonZero> { - // This also moves the start, which marks them as conceptually "dropped", - // so if anything goes bad then our drop impl won't double-free them. - let range_to_drop = self.alive.take_prefix(n); - let remaining = n - range_to_drop.len(); - - // SAFETY: These elements are currently initialized, so it's fine to drop them. - unsafe { - let slice = self.data.get_unchecked_mut(range_to_drop); - slice.assume_init_drop(); - } - - NonZero::new(remaining).map_or(Ok(()), Err) + self.unsize_mut().advance_by(n) } #[inline] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { // SAFETY: The caller must provide an idx that is in bound of the remainder. - unsafe { self.data.as_ptr().add(self.alive.start()).add(idx).cast::().read() } + let elem_ref = unsafe { self.as_mut_slice().get_unchecked_mut(idx) }; + // SAFETY: We only implement `TrustedRandomAccessNoCoerce` for types + // which are actually `Copy`, so cannot have multiple-drop issues. + unsafe { ptr::read(elem_ref) } } } #[stable(feature = "array_value_iter_impls", since = "1.40.0")] impl DoubleEndedIterator for IntoIter { + #[inline] fn next_back(&mut self) -> Option { - // Get the next index from the back. - // - // Decreasing `alive.end` by 1 maintains the invariant regarding - // `alive`. However, due to this change, for a short time, the alive - // zone is not `data[alive]` anymore, but `data[alive.start..=idx]`. - self.alive.next_back().map(|idx| { - // Read the element from the array. - // SAFETY: `idx` is an index into the former "alive" region of the - // array. Reading this element means that `data[idx]` is regarded as - // dead now (i.e. do not touch). As `idx` was the end of the - // alive-zone, the alive zone is now `data[alive]` again, restoring - // all invariants. - unsafe { self.data.get_unchecked(idx).assume_init_read() } - }) + self.unsize_mut().next_back() } #[inline] - fn rfold(mut self, init: Acc, mut rfold: Fold) -> Acc + fn rfold(mut self, init: Acc, rfold: Fold) -> Acc where Fold: FnMut(Acc, Self::Item) -> Acc, { - let data = &mut self.data; - iter::ByRefSized(&mut self.alive).rfold(init, |acc, idx| { - // SAFETY: idx is obtained by folding over the `alive` range, which implies the - // value is currently considered alive but as the range is being consumed each value - // we read here will only be read once and then considered dead. - rfold(acc, unsafe { data.get_unchecked(idx).assume_init_read() }) - }) + self.unsize_mut().rfold(init, rfold) + } + + #[inline] + fn try_rfold(&mut self, init: B, f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, + { + self.unsize_mut().try_rfold(init, f) } + #[inline] fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero> { - // This also moves the end, which marks them as conceptually "dropped", - // so if anything goes bad then our drop impl won't double-free them. - let range_to_drop = self.alive.take_suffix(n); - let remaining = n - range_to_drop.len(); - - // SAFETY: These elements are currently initialized, so it's fine to drop them. - unsafe { - let slice = self.data.get_unchecked_mut(range_to_drop); - slice.assume_init_drop(); - } - - NonZero::new(remaining).map_or(Ok(()), Err) + self.unsize_mut().advance_back_by(n) } } #[stable(feature = "array_value_iter_impls", since = "1.40.0")] impl Drop for IntoIter { + #[inline] fn drop(&mut self) { - // SAFETY: This is safe: `as_mut_slice` returns exactly the sub-slice - // of elements that have not been moved out yet and that remain - // to be dropped. - unsafe { ptr::drop_in_place(self.as_mut_slice()) } + // `inner` now handles this, but it'd technically be a breaking change + // to remove this `impl`, even though it's useless. } } #[stable(feature = "array_value_iter_impls", since = "1.40.0")] impl ExactSizeIterator for IntoIter { + #[inline] fn len(&self) -> usize { - self.alive.len() + self.inner.len() } + #[inline] fn is_empty(&self) -> bool { - self.alive.is_empty() + self.inner.len() == 0 } } @@ -396,32 +362,9 @@ where const MAY_HAVE_SIDE_EFFECT: bool = false; } -#[stable(feature = "array_value_iter_impls", since = "1.40.0")] -impl Clone for IntoIter { - fn clone(&self) -> Self { - // Note, we don't really need to match the exact same alive range, so - // we can just clone into offset 0 regardless of where `self` is. - let mut new = - Self { data: [const { MaybeUninit::uninit() }; N], alive: IndexRange::zero_to(0) }; - - // Clone all alive elements. - for (src, dst) in iter::zip(self.as_slice(), &mut new.data) { - // Write a clone into the new array, then update its alive range. - // If cloning panics, we'll correctly drop the previous items. - dst.write(src.clone()); - // This addition cannot overflow as we're iterating a slice - new.alive = IndexRange::zero_to(new.alive.end() + 1); - } - - new - } -} - #[stable(feature = "array_value_iter_impls", since = "1.40.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // Only print the elements that were not yielded yet: we cannot - // access the yielded elements anymore. - f.debug_tuple("IntoIter").field(&self.as_slice()).finish() + self.unsize().fmt(f) } } diff --git a/library/core/src/array/iter/iter_inner.rs b/library/core/src/array/iter/iter_inner.rs new file mode 100644 index 0000000000000..3c2343591f8cf --- /dev/null +++ b/library/core/src/array/iter/iter_inner.rs @@ -0,0 +1,281 @@ +//! Defines the `IntoIter` owned iterator for arrays. + +use crate::mem::MaybeUninit; +use crate::num::NonZero; +use crate::ops::{IndexRange, NeverShortCircuit, Try}; +use crate::{fmt, iter}; + +#[allow(private_bounds)] +trait PartialDrop { + /// # Safety + /// `self[alive]` are all initialized before the call, + /// then are never used (without reinitializing them) after it. + unsafe fn partial_drop(&mut self, alive: IndexRange); +} +impl PartialDrop for [MaybeUninit] { + unsafe fn partial_drop(&mut self, alive: IndexRange) { + // SAFETY: We know that all elements within `alive` are properly initialized. + unsafe { self.get_unchecked_mut(alive).assume_init_drop() } + } +} +impl PartialDrop for [MaybeUninit; N] { + unsafe fn partial_drop(&mut self, alive: IndexRange) { + let slice: &mut [MaybeUninit] = self; + // SAFETY: Initialized elements in the array are also initialized in the slice. + unsafe { slice.partial_drop(alive) } + } +} + +/// The internals of a by-value array iterator. +/// +/// The real `array::IntoIter` stores a `PolymorphicIter<[MaybeUninit, N]>` +/// which it unsizes to `PolymorphicIter<[MaybeUninit]>` to iterate. +#[allow(private_bounds)] +pub(super) struct PolymorphicIter +where + DATA: PartialDrop, +{ + /// The elements in `data` that have not been yielded yet. + /// + /// Invariants: + /// - `alive.end <= N` + /// + /// (And the `IndexRange` type requires `alive.start <= alive.end`.) + alive: IndexRange, + + /// This is the array we are iterating over. + /// + /// Elements with index `i` where `alive.start <= i < alive.end` have not + /// been yielded yet and are valid array entries. Elements with indices `i + /// < alive.start` or `i >= alive.end` have been yielded already and must + /// not be accessed anymore! Those dead elements might even be in a + /// completely uninitialized state! + /// + /// So the invariants are: + /// - `data[alive]` is alive (i.e. contains valid elements) + /// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the + /// elements were already read and must not be touched anymore!) + data: DATA, +} + +#[allow(private_bounds)] +impl PolymorphicIter +where + DATA: PartialDrop, +{ + #[inline] + pub(super) const fn len(&self) -> usize { + self.alive.len() + } +} + +#[allow(private_bounds)] +impl Drop for PolymorphicIter +where + DATA: PartialDrop, +{ + #[inline] + fn drop(&mut self) { + // SAFETY: by our type invariant `self.alive` is exactly the initialized + // items, and this is drop so nothing can use the items afterwards. + unsafe { self.data.partial_drop(self.alive.clone()) } + } +} + +impl PolymorphicIter<[MaybeUninit; N]> { + #[inline] + pub(super) const fn empty() -> Self { + Self { alive: IndexRange::zero_to(0), data: [const { MaybeUninit::uninit() }; N] } + } + + /// # Safety + /// `data[alive]` are all initialized. + #[inline] + pub(super) const unsafe fn new_unchecked(alive: IndexRange, data: [MaybeUninit; N]) -> Self { + Self { alive, data } + } +} + +impl Clone for PolymorphicIter<[MaybeUninit; N]> { + #[inline] + fn clone(&self) -> Self { + // Note, we don't really need to match the exact same alive range, so + // we can just clone into offset 0 regardless of where `self` is. + let mut new = Self::empty(); + + fn clone_into_new( + source: &PolymorphicIter<[MaybeUninit]>, + target: &mut PolymorphicIter<[MaybeUninit]>, + ) { + // Clone all alive elements. + for (src, dst) in iter::zip(source.as_slice(), &mut target.data) { + // Write a clone into the new array, then update its alive range. + // If cloning panics, we'll correctly drop the previous items. + dst.write(src.clone()); + // This addition cannot overflow as we're iterating a slice, + // the length of which always fits in usize. + target.alive = IndexRange::zero_to(target.alive.end() + 1); + } + } + + clone_into_new(self, &mut new); + new + } +} + +impl PolymorphicIter<[MaybeUninit]> { + #[inline] + pub(super) fn as_slice(&self) -> &[T] { + // SAFETY: We know that all elements within `alive` are properly initialized. + unsafe { + let slice = self.data.get_unchecked(self.alive.clone()); + slice.assume_init_ref() + } + } + + #[inline] + pub(super) fn as_mut_slice(&mut self) -> &mut [T] { + // SAFETY: We know that all elements within `alive` are properly initialized. + unsafe { + let slice = self.data.get_unchecked_mut(self.alive.clone()); + slice.assume_init_mut() + } + } +} + +impl fmt::Debug for PolymorphicIter<[MaybeUninit]> { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // Only print the elements that were not yielded yet: we cannot + // access the yielded elements anymore. + f.debug_tuple("IntoIter").field(&self.as_slice()).finish() + } +} + +/// Iterator-equivalent methods. +/// +/// We don't implement the actual iterator traits because we want to implement +/// things like `try_fold` that require `Self: Sized` (which we're not). +impl PolymorphicIter<[MaybeUninit]> { + #[inline] + pub(super) fn next(&mut self) -> Option { + // Get the next index from the front. + // + // Increasing `alive.start` by 1 maintains the invariant regarding + // `alive`. However, due to this change, for a short time, the alive + // zone is not `data[alive]` anymore, but `data[idx..alive.end]`. + self.alive.next().map(|idx| { + // Read the element from the array. + // SAFETY: `idx` is an index into the former "alive" region of the + // array. Reading this element means that `data[idx]` is regarded as + // dead now (i.e. do not touch). As `idx` was the start of the + // alive-zone, the alive zone is now `data[alive]` again, restoring + // all invariants. + unsafe { self.data.get_unchecked(idx).assume_init_read() } + }) + } + + #[inline] + pub(super) fn size_hint(&self) -> (usize, Option) { + let len = self.len(); + (len, Some(len)) + } + + #[inline] + pub(super) fn advance_by(&mut self, n: usize) -> Result<(), NonZero> { + // This also moves the start, which marks them as conceptually "dropped", + // so if anything goes bad then our drop impl won't double-free them. + let range_to_drop = self.alive.take_prefix(n); + let remaining = n - range_to_drop.len(); + + // SAFETY: These elements are currently initialized, so it's fine to drop them. + unsafe { + let slice = self.data.get_unchecked_mut(range_to_drop); + slice.assume_init_drop(); + } + + NonZero::new(remaining).map_or(Ok(()), Err) + } + + #[inline] + pub(super) fn fold(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B { + self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0 + } + + #[inline] + pub(super) fn try_fold(&mut self, init: B, mut f: F) -> R + where + F: FnMut(B, T) -> R, + R: Try, + { + // `alive` is an `IndexRange`, not an arbitrary iterator, so we can + // trust that its `try_fold` isn't going to do something weird like + // call the fold-er multiple times for the same index. + let data = &mut self.data; + self.alive.try_fold(init, move |accum, idx| { + // SAFETY: `idx` has been removed from the alive range, so we're not + // going to drop it (even if `f` panics) and thus its ok to give + // out ownership of that item to `f` to handle. + let elem = unsafe { data.get_unchecked(idx).assume_init_read() }; + f(accum, elem) + }) + } + + #[inline] + pub(super) fn next_back(&mut self) -> Option { + // Get the next index from the back. + // + // Decreasing `alive.end` by 1 maintains the invariant regarding + // `alive`. However, due to this change, for a short time, the alive + // zone is not `data[alive]` anymore, but `data[alive.start..=idx]`. + self.alive.next_back().map(|idx| { + // Read the element from the array. + // SAFETY: `idx` is an index into the former "alive" region of the + // array. Reading this element means that `data[idx]` is regarded as + // dead now (i.e. do not touch). As `idx` was the end of the + // alive-zone, the alive zone is now `data[alive]` again, restoring + // all invariants. + unsafe { self.data.get_unchecked(idx).assume_init_read() } + }) + } + + #[inline] + pub(super) fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero> { + // This also moves the end, which marks them as conceptually "dropped", + // so if anything goes bad then our drop impl won't double-free them. + let range_to_drop = self.alive.take_suffix(n); + let remaining = n - range_to_drop.len(); + + // SAFETY: These elements are currently initialized, so it's fine to drop them. + unsafe { + let slice = self.data.get_unchecked_mut(range_to_drop); + slice.assume_init_drop(); + } + + NonZero::new(remaining).map_or(Ok(()), Err) + } + + #[inline] + pub(super) fn rfold(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B { + self.try_rfold(init, NeverShortCircuit::wrap_mut_2(f)).0 + } + + #[inline] + pub(super) fn try_rfold(&mut self, init: B, mut f: F) -> R + where + F: FnMut(B, T) -> R, + R: Try, + { + // `alive` is an `IndexRange`, not an arbitrary iterator, so we can + // trust that its `try_rfold` isn't going to do something weird like + // call the fold-er multiple times for the same index. + let data = &mut self.data; + self.alive.try_rfold(init, move |accum, idx| { + // SAFETY: `idx` has been removed from the alive range, so we're not + // going to drop it (even if `f` panics) and thus its ok to give + // out ownership of that item to `f` to handle. + let elem = unsafe { data.get_unchecked(idx).assume_init_read() }; + f(accum, elem) + }) + } +} diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs index 28329bb090845..efa7bed7c8e17 100644 --- a/library/core/src/array/mod.rs +++ b/library/core/src/array/mod.rs @@ -55,12 +55,16 @@ pub fn repeat(val: T) -> [T; N] { from_trusted_iterator(repeat_n(val, N)) } -/// Creates an array of type [T; N], where each element `T` is the returned value from `cb` -/// using that element's index. +/// Creates an array where each element is produced by calling `f` with +/// that element's index while walking forward through the array. /// -/// # Arguments +/// This is essentially the same as writing +/// ```text +/// [f(0), f(1), f(2), …, f(N - 2), f(N - 1)] +/// ``` +/// and is similar to `(0..i).map(f)`, just for arrays not iterators. /// -/// * `cb`: Callback where the passed argument is the current array index. +/// If `N == 0`, this produces an empty array without ever calling `f`. /// /// # Example /// @@ -82,13 +86,30 @@ pub fn repeat(val: T) -> [T; N] { /// // indexes are: 0 1 2 3 4 /// assert_eq!(bool_arr, [true, false, true, false, true]); /// ``` +/// +/// You can also capture things, for example to create an array full of clones +/// where you can't just use `[item; N]` because it's not `Copy`: +/// ``` +/// # // TBH `array::repeat` would be better for this, but it's not stable yet. +/// let my_string = String::from("Hello"); +/// let clones: [String; 42] = std::array::from_fn(|_| my_string.clone()); +/// assert!(clones.iter().all(|x| *x == my_string)); +/// ``` +/// +/// The array is generated in ascending index order, starting from the front +/// and going towards the back, so you can use closures with mutable state: +/// ``` +/// let mut state = 1; +/// let a = std::array::from_fn(|_| { let x = state; state *= 2; x }); +/// assert_eq!(a, [1, 2, 4, 8, 16, 32]); +/// ``` #[inline] #[stable(feature = "array_from_fn", since = "1.63.0")] -pub fn from_fn(cb: F) -> [T; N] +pub fn from_fn(f: F) -> [T; N] where F: FnMut(usize) -> T, { - try_from_fn(NeverShortCircuit::wrap_mut_1(cb)).0 + try_from_fn(NeverShortCircuit::wrap_mut_1(f)).0 } /// Creates an array `[T; N]` where each fallible array element `T` is returned by the `cb` call. diff --git a/library/core/src/bool.rs b/library/core/src/bool.rs index d525ab425e60d..2016ece007eba 100644 --- a/library/core/src/bool.rs +++ b/library/core/src/bool.rs @@ -61,52 +61,4 @@ impl bool { pub fn then T>(self, f: F) -> Option { if self { Some(f()) } else { None } } - - /// Returns either `true_val` or `false_val` depending on the value of - /// `self`, with a hint to the compiler that `self` is unlikely - /// to be correctly predicted by a CPU’s branch predictor. - /// - /// This method is functionally equivalent to - /// ```ignore (this is just for illustrative purposes) - /// fn select_unpredictable(b: bool, true_val: T, false_val: T) -> T { - /// if b { true_val } else { false_val } - /// } - /// ``` - /// but might generate different assembly. In particular, on platforms with - /// a conditional move or select instruction (like `cmov` on x86 or `csel` - /// on ARM) the optimizer might use these instructions to avoid branches, - /// which can benefit performance if the branch predictor is struggling - /// with predicting `condition`, such as in an implementation of binary - /// search. - /// - /// Note however that this lowering is not guaranteed (on any platform) and - /// should not be relied upon when trying to write constant-time code. Also - /// be aware that this lowering might *decrease* performance if `condition` - /// is well-predictable. It is advisable to perform benchmarks to tell if - /// this function is useful. - /// - /// # Examples - /// - /// Distribute values evenly between two buckets: - /// ``` - /// #![feature(select_unpredictable)] - /// - /// use std::hash::BuildHasher; - /// - /// fn append(hasher: &H, v: i32, bucket_one: &mut Vec, bucket_two: &mut Vec) { - /// let hash = hasher.hash_one(&v); - /// let bucket = (hash % 2 == 0).select_unpredictable(bucket_one, bucket_two); - /// bucket.push(v); - /// } - /// # let hasher = std::collections::hash_map::RandomState::new(); - /// # let mut bucket_one = Vec::new(); - /// # let mut bucket_two = Vec::new(); - /// # append(&hasher, 42, &mut bucket_one, &mut bucket_two); - /// # assert_eq!(bucket_one.len() + bucket_two.len(), 1); - /// ``` - #[inline(always)] - #[unstable(feature = "select_unpredictable", issue = "133962")] - pub fn select_unpredictable(self, true_val: T, false_val: T) -> T { - crate::intrinsics::select_unpredictable(self, true_val, false_val) - } } diff --git a/library/core/src/bstr.rs b/library/core/src/bstr.rs deleted file mode 100644 index ae84fd8adb61c..0000000000000 --- a/library/core/src/bstr.rs +++ /dev/null @@ -1,583 +0,0 @@ -//! The `ByteStr` type and trait implementations. - -use crate::borrow::{Borrow, BorrowMut}; -use crate::cmp::Ordering; -use crate::ops::{ - Deref, DerefMut, DerefPure, Index, IndexMut, Range, RangeFrom, RangeFull, RangeInclusive, - RangeTo, RangeToInclusive, -}; -use crate::{fmt, hash}; - -/// A wrapper for `&[u8]` representing a human-readable string that's conventionally, but not -/// always, UTF-8. -/// -/// Unlike `&str`, this type permits non-UTF-8 contents, making it suitable for user input, -/// non-native filenames (as `Path` only supports native filenames), and other applications that -/// need to round-trip whatever data the user provides. -/// -/// For an owned, growable byte string buffer, use -/// [`ByteString`](../../std/bstr/struct.ByteString.html). -/// -/// `ByteStr` implements `Deref` to `[u8]`, so all methods available on `[u8]` are available on -/// `ByteStr`. -/// -/// # Representation -/// -/// A `&ByteStr` has the same representation as a `&str`. That is, a `&ByteStr` is a wide pointer -/// which includes a pointer to some bytes and a length. -/// -/// # Trait implementations -/// -/// The `ByteStr` type has a number of trait implementations, and in particular, defines equality -/// and comparisons between `&ByteStr`, `&str`, and `&[u8]`, for convenience. -/// -/// The `Debug` implementation for `ByteStr` shows its bytes as a normal string, with invalid UTF-8 -/// presented as hex escape sequences. -/// -/// The `Display` implementation behaves as if the `ByteStr` were first lossily converted to a -/// `str`, with invalid UTF-8 presented as the Unicode replacement character: � -/// -#[unstable(feature = "bstr", issue = "134915")] -#[repr(transparent)] -#[doc(alias = "BStr")] -pub struct ByteStr(pub [u8]); - -impl ByteStr { - /// Creates a `ByteStr` slice from anything that can be converted to a byte slice. - /// - /// This is a zero-cost conversion. - /// - /// # Example - /// - /// You can create a `ByteStr` from a byte array, a byte slice or a string slice: - /// - /// ``` - /// # #![feature(bstr)] - /// # use std::bstr::ByteStr; - /// let a = ByteStr::new(b"abc"); - /// let b = ByteStr::new(&b"abc"[..]); - /// let c = ByteStr::new("abc"); - /// - /// assert_eq!(a, b); - /// assert_eq!(a, c); - /// ``` - #[inline] - #[unstable(feature = "bstr", issue = "134915")] - pub fn new>(bytes: &B) -> &Self { - ByteStr::from_bytes(bytes.as_ref()) - } - - #[doc(hidden)] - #[unstable(feature = "bstr_internals", issue = "none")] - #[inline] - pub fn from_bytes(slice: &[u8]) -> &Self { - // SAFETY: `ByteStr` is a transparent wrapper around `[u8]`, so we can turn a reference to - // the wrapped type into a reference to the wrapper type. - unsafe { &*(slice as *const [u8] as *const Self) } - } - - #[doc(hidden)] - #[unstable(feature = "bstr_internals", issue = "none")] - #[inline] - pub fn from_bytes_mut(slice: &mut [u8]) -> &mut Self { - // SAFETY: `ByteStr` is a transparent wrapper around `[u8]`, so we can turn a reference to - // the wrapped type into a reference to the wrapper type. - unsafe { &mut *(slice as *mut [u8] as *mut Self) } - } - - #[doc(hidden)] - #[unstable(feature = "bstr_internals", issue = "none")] - #[inline] - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl Deref for ByteStr { - type Target = [u8]; - - #[inline] - fn deref(&self) -> &[u8] { - &self.0 - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl DerefMut for ByteStr { - #[inline] - fn deref_mut(&mut self) -> &mut [u8] { - &mut self.0 - } -} - -#[unstable(feature = "deref_pure_trait", issue = "87121")] -unsafe impl DerefPure for ByteStr {} - -#[unstable(feature = "bstr", issue = "134915")] -impl fmt::Debug for ByteStr { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "\"")?; - for chunk in self.utf8_chunks() { - for c in chunk.valid().chars() { - match c { - '\0' => write!(f, "\\0")?, - '\x01'..='\x7f' => write!(f, "{}", (c as u8).escape_ascii())?, - _ => write!(f, "{}", c.escape_debug())?, - } - } - write!(f, "{}", chunk.invalid().escape_ascii())?; - } - write!(f, "\"")?; - Ok(()) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl fmt::Display for ByteStr { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fn fmt_nopad(this: &ByteStr, f: &mut fmt::Formatter<'_>) -> fmt::Result { - for chunk in this.utf8_chunks() { - f.write_str(chunk.valid())?; - if !chunk.invalid().is_empty() { - f.write_str("\u{FFFD}")?; - } - } - Ok(()) - } - - let Some(align) = f.align() else { - return fmt_nopad(self, f); - }; - let nchars: usize = self - .utf8_chunks() - .map(|chunk| { - chunk.valid().chars().count() + if chunk.invalid().is_empty() { 0 } else { 1 } - }) - .sum(); - let padding = f.width().unwrap_or(0).saturating_sub(nchars); - let fill = f.fill(); - let (lpad, rpad) = match align { - fmt::Alignment::Left => (0, padding), - fmt::Alignment::Right => (padding, 0), - fmt::Alignment::Center => { - let half = padding / 2; - (half, half + padding % 2) - } - }; - for _ in 0..lpad { - write!(f, "{fill}")?; - } - fmt_nopad(self, f)?; - for _ in 0..rpad { - write!(f, "{fill}")?; - } - - Ok(()) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl AsRef<[u8]> for ByteStr { - #[inline] - fn as_ref(&self) -> &[u8] { - &self.0 - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl AsRef for ByteStr { - #[inline] - fn as_ref(&self) -> &ByteStr { - self - } -} - -// `impl AsRef for [u8]` omitted to avoid widespread inference failures - -#[unstable(feature = "bstr", issue = "134915")] -impl AsRef for str { - #[inline] - fn as_ref(&self) -> &ByteStr { - ByteStr::new(self) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl AsMut<[u8]> for ByteStr { - #[inline] - fn as_mut(&mut self) -> &mut [u8] { - &mut self.0 - } -} - -// `impl AsMut for [u8]` omitted to avoid widespread inference failures - -// `impl Borrow for [u8]` omitted to avoid widespread inference failures - -// `impl Borrow for str` omitted to avoid widespread inference failures - -#[unstable(feature = "bstr", issue = "134915")] -impl Borrow<[u8]> for ByteStr { - #[inline] - fn borrow(&self) -> &[u8] { - &self.0 - } -} - -// `impl BorrowMut for [u8]` omitted to avoid widespread inference failures - -#[unstable(feature = "bstr", issue = "134915")] -impl BorrowMut<[u8]> for ByteStr { - #[inline] - fn borrow_mut(&mut self) -> &mut [u8] { - &mut self.0 - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl<'a> Default for &'a ByteStr { - fn default() -> Self { - ByteStr::from_bytes(b"") - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl<'a> Default for &'a mut ByteStr { - fn default() -> Self { - ByteStr::from_bytes_mut(&mut []) - } -} - -// Omitted due to inference failures -// -// #[unstable(feature = "bstr", issue = "134915")] -// impl<'a, const N: usize> From<&'a [u8; N]> for &'a ByteStr { -// #[inline] -// fn from(s: &'a [u8; N]) -> Self { -// ByteStr::from_bytes(s) -// } -// } -// -// #[unstable(feature = "bstr", issue = "134915")] -// impl<'a> From<&'a [u8]> for &'a ByteStr { -// #[inline] -// fn from(s: &'a [u8]) -> Self { -// ByteStr::from_bytes(s) -// } -// } - -// Omitted due to slice-from-array-issue-113238: -// -// #[unstable(feature = "bstr", issue = "134915")] -// impl<'a> From<&'a ByteStr> for &'a [u8] { -// #[inline] -// fn from(s: &'a ByteStr) -> Self { -// &s.0 -// } -// } -// -// #[unstable(feature = "bstr", issue = "134915")] -// impl<'a> From<&'a mut ByteStr> for &'a mut [u8] { -// #[inline] -// fn from(s: &'a mut ByteStr) -> Self { -// &mut s.0 -// } -// } - -// Omitted due to inference failures -// -// #[unstable(feature = "bstr", issue = "134915")] -// impl<'a> From<&'a str> for &'a ByteStr { -// #[inline] -// fn from(s: &'a str) -> Self { -// ByteStr::from_bytes(s.as_bytes()) -// } -// } - -#[unstable(feature = "bstr", issue = "134915")] -impl hash::Hash for ByteStr { - #[inline] - fn hash(&self, state: &mut H) { - self.0.hash(state); - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl Index for ByteStr { - type Output = u8; - - #[inline] - fn index(&self, idx: usize) -> &u8 { - &self.0[idx] - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl Index for ByteStr { - type Output = ByteStr; - - #[inline] - fn index(&self, _: RangeFull) -> &ByteStr { - self - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl Index> for ByteStr { - type Output = ByteStr; - - #[inline] - fn index(&self, r: Range) -> &ByteStr { - ByteStr::from_bytes(&self.0[r]) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl Index> for ByteStr { - type Output = ByteStr; - - #[inline] - fn index(&self, r: RangeInclusive) -> &ByteStr { - ByteStr::from_bytes(&self.0[r]) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl Index> for ByteStr { - type Output = ByteStr; - - #[inline] - fn index(&self, r: RangeFrom) -> &ByteStr { - ByteStr::from_bytes(&self.0[r]) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl Index> for ByteStr { - type Output = ByteStr; - - #[inline] - fn index(&self, r: RangeTo) -> &ByteStr { - ByteStr::from_bytes(&self.0[r]) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl Index> for ByteStr { - type Output = ByteStr; - - #[inline] - fn index(&self, r: RangeToInclusive) -> &ByteStr { - ByteStr::from_bytes(&self.0[r]) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl IndexMut for ByteStr { - #[inline] - fn index_mut(&mut self, idx: usize) -> &mut u8 { - &mut self.0[idx] - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl IndexMut for ByteStr { - #[inline] - fn index_mut(&mut self, _: RangeFull) -> &mut ByteStr { - self - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl IndexMut> for ByteStr { - #[inline] - fn index_mut(&mut self, r: Range) -> &mut ByteStr { - ByteStr::from_bytes_mut(&mut self.0[r]) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl IndexMut> for ByteStr { - #[inline] - fn index_mut(&mut self, r: RangeInclusive) -> &mut ByteStr { - ByteStr::from_bytes_mut(&mut self.0[r]) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl IndexMut> for ByteStr { - #[inline] - fn index_mut(&mut self, r: RangeFrom) -> &mut ByteStr { - ByteStr::from_bytes_mut(&mut self.0[r]) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl IndexMut> for ByteStr { - #[inline] - fn index_mut(&mut self, r: RangeTo) -> &mut ByteStr { - ByteStr::from_bytes_mut(&mut self.0[r]) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl IndexMut> for ByteStr { - #[inline] - fn index_mut(&mut self, r: RangeToInclusive) -> &mut ByteStr { - ByteStr::from_bytes_mut(&mut self.0[r]) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl Eq for ByteStr {} - -#[unstable(feature = "bstr", issue = "134915")] -impl PartialEq for ByteStr { - #[inline] - fn eq(&self, other: &ByteStr) -> bool { - &self.0 == &other.0 - } -} - -#[doc(hidden)] -#[macro_export] -#[unstable(feature = "bstr_internals", issue = "none")] -macro_rules! impl_partial_eq { - ($lhs:ty, $rhs:ty) => { - #[allow(unused_lifetimes)] - impl<'a> PartialEq<$rhs> for $lhs { - #[inline] - fn eq(&self, other: &$rhs) -> bool { - let other: &[u8] = other.as_ref(); - PartialEq::eq(self.as_bytes(), other) - } - } - - #[allow(unused_lifetimes)] - impl<'a> PartialEq<$lhs> for $rhs { - #[inline] - fn eq(&self, other: &$lhs) -> bool { - let this: &[u8] = self.as_ref(); - PartialEq::eq(this, other.as_bytes()) - } - } - }; -} - -#[doc(hidden)] -#[unstable(feature = "bstr_internals", issue = "none")] -pub use impl_partial_eq; - -#[doc(hidden)] -#[macro_export] -#[unstable(feature = "bstr_internals", issue = "none")] -macro_rules! impl_partial_eq_ord { - ($lhs:ty, $rhs:ty) => { - $crate::bstr::impl_partial_eq!($lhs, $rhs); - - #[allow(unused_lifetimes)] - #[unstable(feature = "bstr", issue = "134915")] - impl<'a> PartialOrd<$rhs> for $lhs { - #[inline] - fn partial_cmp(&self, other: &$rhs) -> Option { - let other: &[u8] = other.as_ref(); - PartialOrd::partial_cmp(self.as_bytes(), other) - } - } - - #[allow(unused_lifetimes)] - #[unstable(feature = "bstr", issue = "134915")] - impl<'a> PartialOrd<$lhs> for $rhs { - #[inline] - fn partial_cmp(&self, other: &$lhs) -> Option { - let this: &[u8] = self.as_ref(); - PartialOrd::partial_cmp(this, other.as_bytes()) - } - } - }; -} - -#[doc(hidden)] -#[unstable(feature = "bstr_internals", issue = "none")] -pub use impl_partial_eq_ord; - -#[doc(hidden)] -#[macro_export] -#[unstable(feature = "bstr_internals", issue = "none")] -macro_rules! impl_partial_eq_n { - ($lhs:ty, $rhs:ty) => { - #[allow(unused_lifetimes)] - #[unstable(feature = "bstr", issue = "134915")] - impl PartialEq<$rhs> for $lhs { - #[inline] - fn eq(&self, other: &$rhs) -> bool { - let other: &[u8] = other.as_ref(); - PartialEq::eq(self.as_bytes(), other) - } - } - - #[allow(unused_lifetimes)] - #[unstable(feature = "bstr", issue = "134915")] - impl PartialEq<$lhs> for $rhs { - #[inline] - fn eq(&self, other: &$lhs) -> bool { - let this: &[u8] = self.as_ref(); - PartialEq::eq(this, other.as_bytes()) - } - } - }; -} - -#[doc(hidden)] -#[unstable(feature = "bstr_internals", issue = "none")] -pub use impl_partial_eq_n; - -// PartialOrd with `[u8]` omitted to avoid inference failures -impl_partial_eq!(ByteStr, [u8]); -// PartialOrd with `&[u8]` omitted to avoid inference failures -impl_partial_eq!(ByteStr, &[u8]); -// PartialOrd with `str` omitted to avoid inference failures -impl_partial_eq!(ByteStr, str); -// PartialOrd with `&str` omitted to avoid inference failures -impl_partial_eq!(ByteStr, &str); -// PartialOrd with `[u8; N]` omitted to avoid inference failures -impl_partial_eq_n!(ByteStr, [u8; N]); -// PartialOrd with `[u8; N]` omitted to avoid inference failures -impl_partial_eq_n!(ByteStr, &[u8; N]); - -#[unstable(feature = "bstr", issue = "134915")] -impl Ord for ByteStr { - #[inline] - fn cmp(&self, other: &ByteStr) -> Ordering { - Ord::cmp(&self.0, &other.0) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl PartialOrd for ByteStr { - #[inline] - fn partial_cmp(&self, other: &ByteStr) -> Option { - PartialOrd::partial_cmp(&self.0, &other.0) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl<'a> TryFrom<&'a ByteStr> for &'a str { - type Error = crate::str::Utf8Error; - - #[inline] - fn try_from(s: &'a ByteStr) -> Result { - crate::str::from_utf8(&s.0) - } -} - -#[unstable(feature = "bstr", issue = "134915")] -impl<'a> TryFrom<&'a mut ByteStr> for &'a mut str { - type Error = crate::str::Utf8Error; - - #[inline] - fn try_from(s: &'a mut ByteStr) -> Result { - crate::str::from_utf8_mut(&mut s.0) - } -} diff --git a/library/core/src/bstr/mod.rs b/library/core/src/bstr/mod.rs new file mode 100644 index 0000000000000..13127d645a257 --- /dev/null +++ b/library/core/src/bstr/mod.rs @@ -0,0 +1,323 @@ +//! The `ByteStr` type and trait implementations. + +mod traits; + +#[unstable(feature = "bstr_internals", issue = "none")] +pub use traits::{impl_partial_eq, impl_partial_eq_n, impl_partial_eq_ord}; + +use crate::borrow::{Borrow, BorrowMut}; +use crate::fmt; +use crate::ops::{Deref, DerefMut, DerefPure}; + +/// A wrapper for `&[u8]` representing a human-readable string that's conventionally, but not +/// always, UTF-8. +/// +/// Unlike `&str`, this type permits non-UTF-8 contents, making it suitable for user input, +/// non-native filenames (as `Path` only supports native filenames), and other applications that +/// need to round-trip whatever data the user provides. +/// +/// For an owned, growable byte string buffer, use +/// [`ByteString`](../../std/bstr/struct.ByteString.html). +/// +/// `ByteStr` implements `Deref` to `[u8]`, so all methods available on `[u8]` are available on +/// `ByteStr`. +/// +/// # Representation +/// +/// A `&ByteStr` has the same representation as a `&str`. That is, a `&ByteStr` is a wide pointer +/// which includes a pointer to some bytes and a length. +/// +/// # Trait implementations +/// +/// The `ByteStr` type has a number of trait implementations, and in particular, defines equality +/// and comparisons between `&ByteStr`, `&str`, and `&[u8]`, for convenience. +/// +/// The `Debug` implementation for `ByteStr` shows its bytes as a normal string, with invalid UTF-8 +/// presented as hex escape sequences. +/// +/// The `Display` implementation behaves as if the `ByteStr` were first lossily converted to a +/// `str`, with invalid UTF-8 presented as the Unicode replacement character (�). +#[unstable(feature = "bstr", issue = "134915")] +#[repr(transparent)] +#[doc(alias = "BStr")] +pub struct ByteStr(pub [u8]); + +impl ByteStr { + /// Creates a `ByteStr` slice from anything that can be converted to a byte slice. + /// + /// This is a zero-cost conversion. + /// + /// # Example + /// + /// You can create a `ByteStr` from a byte array, a byte slice or a string slice: + /// + /// ``` + /// # #![feature(bstr)] + /// # use std::bstr::ByteStr; + /// let a = ByteStr::new(b"abc"); + /// let b = ByteStr::new(&b"abc"[..]); + /// let c = ByteStr::new("abc"); + /// + /// assert_eq!(a, b); + /// assert_eq!(a, c); + /// ``` + #[inline] + #[unstable(feature = "bstr", issue = "134915")] + pub fn new>(bytes: &B) -> &Self { + ByteStr::from_bytes(bytes.as_ref()) + } + + #[doc(hidden)] + #[unstable(feature = "bstr_internals", issue = "none")] + #[inline] + pub fn from_bytes(slice: &[u8]) -> &Self { + // SAFETY: `ByteStr` is a transparent wrapper around `[u8]`, so we can turn a reference to + // the wrapped type into a reference to the wrapper type. + unsafe { &*(slice as *const [u8] as *const Self) } + } + + #[doc(hidden)] + #[unstable(feature = "bstr_internals", issue = "none")] + #[inline] + pub fn from_bytes_mut(slice: &mut [u8]) -> &mut Self { + // SAFETY: `ByteStr` is a transparent wrapper around `[u8]`, so we can turn a reference to + // the wrapped type into a reference to the wrapper type. + unsafe { &mut *(slice as *mut [u8] as *mut Self) } + } + + #[doc(hidden)] + #[unstable(feature = "bstr_internals", issue = "none")] + #[inline] + pub fn as_bytes(&self) -> &[u8] { + &self.0 + } + + #[doc(hidden)] + #[unstable(feature = "bstr_internals", issue = "none")] + #[inline] + pub fn as_bytes_mut(&mut self) -> &mut [u8] { + &mut self.0 + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl Deref for ByteStr { + type Target = [u8]; + + #[inline] + fn deref(&self) -> &[u8] { + &self.0 + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl DerefMut for ByteStr { + #[inline] + fn deref_mut(&mut self) -> &mut [u8] { + &mut self.0 + } +} + +#[unstable(feature = "deref_pure_trait", issue = "87121")] +unsafe impl DerefPure for ByteStr {} + +#[unstable(feature = "bstr", issue = "134915")] +impl fmt::Debug for ByteStr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "\"")?; + for chunk in self.utf8_chunks() { + for c in chunk.valid().chars() { + match c { + '\0' => write!(f, "\\0")?, + '\x01'..='\x7f' => write!(f, "{}", (c as u8).escape_ascii())?, + _ => write!(f, "{}", c.escape_debug())?, + } + } + write!(f, "{}", chunk.invalid().escape_ascii())?; + } + write!(f, "\"")?; + Ok(()) + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl fmt::Display for ByteStr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt_nopad(this: &ByteStr, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for chunk in this.utf8_chunks() { + f.write_str(chunk.valid())?; + if !chunk.invalid().is_empty() { + f.write_str("\u{FFFD}")?; + } + } + Ok(()) + } + + let Some(align) = f.align() else { + return fmt_nopad(self, f); + }; + let nchars: usize = self + .utf8_chunks() + .map(|chunk| { + chunk.valid().chars().count() + if chunk.invalid().is_empty() { 0 } else { 1 } + }) + .sum(); + let padding = f.width().unwrap_or(0).saturating_sub(nchars); + let fill = f.fill(); + let (lpad, rpad) = match align { + fmt::Alignment::Left => (0, padding), + fmt::Alignment::Right => (padding, 0), + fmt::Alignment::Center => { + let half = padding / 2; + (half, half + padding % 2) + } + }; + for _ in 0..lpad { + write!(f, "{fill}")?; + } + fmt_nopad(self, f)?; + for _ in 0..rpad { + write!(f, "{fill}")?; + } + + Ok(()) + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl AsRef<[u8]> for ByteStr { + #[inline] + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl AsRef for ByteStr { + #[inline] + fn as_ref(&self) -> &ByteStr { + self + } +} + +// `impl AsRef for [u8]` omitted to avoid widespread inference failures + +#[unstable(feature = "bstr", issue = "134915")] +impl AsRef for str { + #[inline] + fn as_ref(&self) -> &ByteStr { + ByteStr::new(self) + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl AsMut<[u8]> for ByteStr { + #[inline] + fn as_mut(&mut self) -> &mut [u8] { + &mut self.0 + } +} + +// `impl AsMut for [u8]` omitted to avoid widespread inference failures + +// `impl Borrow for [u8]` omitted to avoid widespread inference failures + +// `impl Borrow for str` omitted to avoid widespread inference failures + +#[unstable(feature = "bstr", issue = "134915")] +impl Borrow<[u8]> for ByteStr { + #[inline] + fn borrow(&self) -> &[u8] { + &self.0 + } +} + +// `impl BorrowMut for [u8]` omitted to avoid widespread inference failures + +#[unstable(feature = "bstr", issue = "134915")] +impl BorrowMut<[u8]> for ByteStr { + #[inline] + fn borrow_mut(&mut self) -> &mut [u8] { + &mut self.0 + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl<'a> Default for &'a ByteStr { + fn default() -> Self { + ByteStr::from_bytes(b"") + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl<'a> Default for &'a mut ByteStr { + fn default() -> Self { + ByteStr::from_bytes_mut(&mut []) + } +} + +// Omitted due to inference failures +// +// #[unstable(feature = "bstr", issue = "134915")] +// impl<'a, const N: usize> From<&'a [u8; N]> for &'a ByteStr { +// #[inline] +// fn from(s: &'a [u8; N]) -> Self { +// ByteStr::from_bytes(s) +// } +// } +// +// #[unstable(feature = "bstr", issue = "134915")] +// impl<'a> From<&'a [u8]> for &'a ByteStr { +// #[inline] +// fn from(s: &'a [u8]) -> Self { +// ByteStr::from_bytes(s) +// } +// } + +// Omitted due to slice-from-array-issue-113238: +// +// #[unstable(feature = "bstr", issue = "134915")] +// impl<'a> From<&'a ByteStr> for &'a [u8] { +// #[inline] +// fn from(s: &'a ByteStr) -> Self { +// &s.0 +// } +// } +// +// #[unstable(feature = "bstr", issue = "134915")] +// impl<'a> From<&'a mut ByteStr> for &'a mut [u8] { +// #[inline] +// fn from(s: &'a mut ByteStr) -> Self { +// &mut s.0 +// } +// } + +// Omitted due to inference failures +// +// #[unstable(feature = "bstr", issue = "134915")] +// impl<'a> From<&'a str> for &'a ByteStr { +// #[inline] +// fn from(s: &'a str) -> Self { +// ByteStr::from_bytes(s.as_bytes()) +// } +// } + +#[unstable(feature = "bstr", issue = "134915")] +impl<'a> TryFrom<&'a ByteStr> for &'a str { + type Error = crate::str::Utf8Error; + + #[inline] + fn try_from(s: &'a ByteStr) -> Result { + crate::str::from_utf8(&s.0) + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl<'a> TryFrom<&'a mut ByteStr> for &'a mut str { + type Error = crate::str::Utf8Error; + + #[inline] + fn try_from(s: &'a mut ByteStr) -> Result { + crate::str::from_utf8_mut(&mut s.0) + } +} diff --git a/library/core/src/bstr/traits.rs b/library/core/src/bstr/traits.rs new file mode 100644 index 0000000000000..ff46bb13ba4eb --- /dev/null +++ b/library/core/src/bstr/traits.rs @@ -0,0 +1,277 @@ +//! Trait implementations for `ByteStr`. + +use crate::bstr::ByteStr; +use crate::cmp::Ordering; +use crate::slice::SliceIndex; +use crate::{hash, ops, range}; + +#[unstable(feature = "bstr", issue = "134915")] +impl Ord for ByteStr { + #[inline] + fn cmp(&self, other: &ByteStr) -> Ordering { + Ord::cmp(&self.0, &other.0) + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl PartialOrd for ByteStr { + #[inline] + fn partial_cmp(&self, other: &ByteStr) -> Option { + PartialOrd::partial_cmp(&self.0, &other.0) + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl PartialEq for ByteStr { + #[inline] + fn eq(&self, other: &ByteStr) -> bool { + &self.0 == &other.0 + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl Eq for ByteStr {} + +#[unstable(feature = "bstr", issue = "134915")] +impl hash::Hash for ByteStr { + #[inline] + fn hash(&self, state: &mut H) { + self.0.hash(state); + } +} + +#[doc(hidden)] +#[macro_export] +#[unstable(feature = "bstr_internals", issue = "none")] +macro_rules! impl_partial_eq { + ($lhs:ty, $rhs:ty) => { + #[allow(unused_lifetimes)] + impl<'a> PartialEq<$rhs> for $lhs { + #[inline] + fn eq(&self, other: &$rhs) -> bool { + let other: &[u8] = other.as_ref(); + PartialEq::eq(self.as_bytes(), other) + } + } + + #[allow(unused_lifetimes)] + impl<'a> PartialEq<$lhs> for $rhs { + #[inline] + fn eq(&self, other: &$lhs) -> bool { + let this: &[u8] = self.as_ref(); + PartialEq::eq(this, other.as_bytes()) + } + } + }; +} + +#[doc(hidden)] +#[unstable(feature = "bstr_internals", issue = "none")] +pub use impl_partial_eq; + +#[doc(hidden)] +#[macro_export] +#[unstable(feature = "bstr_internals", issue = "none")] +macro_rules! impl_partial_eq_ord { + ($lhs:ty, $rhs:ty) => { + $crate::bstr::impl_partial_eq!($lhs, $rhs); + + #[allow(unused_lifetimes)] + #[unstable(feature = "bstr", issue = "134915")] + impl<'a> PartialOrd<$rhs> for $lhs { + #[inline] + fn partial_cmp(&self, other: &$rhs) -> Option { + let other: &[u8] = other.as_ref(); + PartialOrd::partial_cmp(self.as_bytes(), other) + } + } + + #[allow(unused_lifetimes)] + #[unstable(feature = "bstr", issue = "134915")] + impl<'a> PartialOrd<$lhs> for $rhs { + #[inline] + fn partial_cmp(&self, other: &$lhs) -> Option { + let this: &[u8] = self.as_ref(); + PartialOrd::partial_cmp(this, other.as_bytes()) + } + } + }; +} + +#[doc(hidden)] +#[unstable(feature = "bstr_internals", issue = "none")] +pub use impl_partial_eq_ord; + +#[doc(hidden)] +#[macro_export] +#[unstable(feature = "bstr_internals", issue = "none")] +macro_rules! impl_partial_eq_n { + ($lhs:ty, $rhs:ty) => { + #[allow(unused_lifetimes)] + #[unstable(feature = "bstr", issue = "134915")] + impl PartialEq<$rhs> for $lhs { + #[inline] + fn eq(&self, other: &$rhs) -> bool { + let other: &[u8] = other.as_ref(); + PartialEq::eq(self.as_bytes(), other) + } + } + + #[allow(unused_lifetimes)] + #[unstable(feature = "bstr", issue = "134915")] + impl PartialEq<$lhs> for $rhs { + #[inline] + fn eq(&self, other: &$lhs) -> bool { + let this: &[u8] = self.as_ref(); + PartialEq::eq(this, other.as_bytes()) + } + } + }; +} + +#[doc(hidden)] +#[unstable(feature = "bstr_internals", issue = "none")] +pub use impl_partial_eq_n; + +// PartialOrd with `[u8]` omitted to avoid inference failures +impl_partial_eq!(ByteStr, [u8]); +// PartialOrd with `&[u8]` omitted to avoid inference failures +impl_partial_eq!(ByteStr, &[u8]); +// PartialOrd with `str` omitted to avoid inference failures +impl_partial_eq!(ByteStr, str); +// PartialOrd with `&str` omitted to avoid inference failures +impl_partial_eq!(ByteStr, &str); +// PartialOrd with `[u8; N]` omitted to avoid inference failures +impl_partial_eq_n!(ByteStr, [u8; N]); +// PartialOrd with `[u8; N]` omitted to avoid inference failures +impl_partial_eq_n!(ByteStr, &[u8; N]); + +#[unstable(feature = "bstr", issue = "134915")] +impl ops::Index for ByteStr +where + I: SliceIndex, +{ + type Output = I::Output; + + #[inline] + fn index(&self, index: I) -> &I::Output { + index.index(self) + } +} + +#[unstable(feature = "bstr", issue = "134915")] +impl ops::IndexMut for ByteStr +where + I: SliceIndex, +{ + #[inline] + fn index_mut(&mut self, index: I) -> &mut I::Output { + index.index_mut(self) + } +} + +#[unstable(feature = "bstr", issue = "134915")] +unsafe impl SliceIndex for ops::RangeFull { + type Output = ByteStr; + #[inline] + fn get(self, slice: &ByteStr) -> Option<&Self::Output> { + Some(slice) + } + #[inline] + fn get_mut(self, slice: &mut ByteStr) -> Option<&mut Self::Output> { + Some(slice) + } + #[inline] + unsafe fn get_unchecked(self, slice: *const ByteStr) -> *const Self::Output { + slice + } + #[inline] + unsafe fn get_unchecked_mut(self, slice: *mut ByteStr) -> *mut Self::Output { + slice + } + #[inline] + fn index(self, slice: &ByteStr) -> &Self::Output { + slice + } + #[inline] + fn index_mut(self, slice: &mut ByteStr) -> &mut Self::Output { + slice + } +} + +#[unstable(feature = "bstr", issue = "134915")] +unsafe impl SliceIndex for usize { + type Output = u8; + #[inline] + fn get(self, slice: &ByteStr) -> Option<&Self::Output> { + self.get(slice.as_bytes()) + } + #[inline] + fn get_mut(self, slice: &mut ByteStr) -> Option<&mut Self::Output> { + self.get_mut(slice.as_bytes_mut()) + } + #[inline] + unsafe fn get_unchecked(self, slice: *const ByteStr) -> *const Self::Output { + // SAFETY: the caller has to uphold the safety contract for `get_unchecked`. + unsafe { self.get_unchecked(slice as *const [u8]) } + } + #[inline] + unsafe fn get_unchecked_mut(self, slice: *mut ByteStr) -> *mut Self::Output { + // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. + unsafe { self.get_unchecked_mut(slice as *mut [u8]) } + } + #[inline] + fn index(self, slice: &ByteStr) -> &Self::Output { + self.index(slice.as_bytes()) + } + #[inline] + fn index_mut(self, slice: &mut ByteStr) -> &mut Self::Output { + self.index_mut(slice.as_bytes_mut()) + } +} + +macro_rules! impl_slice_index { + ($index:ty) => { + #[unstable(feature = "bstr", issue = "134915")] + unsafe impl SliceIndex for $index { + type Output = ByteStr; + #[inline] + fn get(self, slice: &ByteStr) -> Option<&Self::Output> { + self.get(slice.as_bytes()).map(ByteStr::from_bytes) + } + #[inline] + fn get_mut(self, slice: &mut ByteStr) -> Option<&mut Self::Output> { + self.get_mut(slice.as_bytes_mut()).map(ByteStr::from_bytes_mut) + } + #[inline] + unsafe fn get_unchecked(self, slice: *const ByteStr) -> *const Self::Output { + // SAFETY: the caller has to uphold the safety contract for `get_unchecked`. + unsafe { self.get_unchecked(slice as *const [u8]) as *const ByteStr } + } + #[inline] + unsafe fn get_unchecked_mut(self, slice: *mut ByteStr) -> *mut Self::Output { + // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. + unsafe { self.get_unchecked_mut(slice as *mut [u8]) as *mut ByteStr } + } + #[inline] + fn index(self, slice: &ByteStr) -> &Self::Output { + ByteStr::from_bytes(self.index(slice.as_bytes())) + } + #[inline] + fn index_mut(self, slice: &mut ByteStr) -> &mut Self::Output { + ByteStr::from_bytes_mut(self.index_mut(slice.as_bytes_mut())) + } + } + }; +} + +impl_slice_index!(ops::IndexRange); +impl_slice_index!(ops::Range); +impl_slice_index!(range::Range); +impl_slice_index!(ops::RangeTo); +impl_slice_index!(ops::RangeFrom); +impl_slice_index!(range::RangeFrom); +impl_slice_index!(ops::RangeInclusive); +impl_slice_index!(range::RangeInclusive); +impl_slice_index!(ops::RangeToInclusive); +impl_slice_index!((ops::Bound, ops::Bound)); diff --git a/library/core/src/cell.rs b/library/core/src/cell.rs index e789601a409af..c7657350a0d9a 100644 --- a/library/core/src/cell.rs +++ b/library/core/src/cell.rs @@ -544,31 +544,22 @@ impl Cell { unsafe { *self.value.get() } } - /// Updates the contained value using a function and returns the new value. + /// Updates the contained value using a function. /// /// # Examples /// /// ``` - /// #![feature(cell_update)] - /// /// use std::cell::Cell; /// /// let c = Cell::new(5); - /// let new = c.update(|x| x + 1); - /// - /// assert_eq!(new, 6); + /// c.update(|x| x + 1); /// assert_eq!(c.get(), 6); /// ``` #[inline] - #[unstable(feature = "cell_update", issue = "50186")] - pub fn update(&self, f: F) -> T - where - F: FnOnce(T) -> T, - { + #[stable(feature = "cell_update", since = "CURRENT_RUSTC_VERSION")] + pub fn update(&self, f: impl FnOnce(T) -> T) { let old = self.get(); - let new = f(old); - self.set(new); - new + self.set(f(old)); } } @@ -1163,7 +1154,9 @@ impl RefCell { /// Since this method borrows `RefCell` mutably, it is statically guaranteed /// that no borrows to the underlying data exist. The dynamic checks inherent /// in [`borrow_mut`] and most other methods of `RefCell` are therefore - /// unnecessary. + /// unnecessary. Note that this method does not reset the borrowing state if borrows were previously leaked + /// (e.g., via [`forget()`] on a [`Ref`] or [`RefMut`]). For that purpose, + /// consider using the unstable [`undo_leak`] method. /// /// This method can only be called if `RefCell` can be mutably borrowed, /// which in general is only the case directly after the `RefCell` has @@ -1174,6 +1167,8 @@ impl RefCell { /// Use [`borrow_mut`] to get mutable access to the underlying data then. /// /// [`borrow_mut`]: RefCell::borrow_mut() + /// [`forget()`]: mem::forget + /// [`undo_leak`]: RefCell::undo_leak() /// /// # Examples /// diff --git a/library/core/src/char/convert.rs b/library/core/src/char/convert.rs index ac808038f8900..d820965a7463e 100644 --- a/library/core/src/char/convert.rs +++ b/library/core/src/char/convert.rs @@ -21,6 +21,7 @@ pub(super) const fn from_u32(i: u32) -> Option { /// Converts a `u32` to a `char`, ignoring validity. See [`char::from_u32_unchecked`]. #[inline] #[must_use] +#[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] pub(super) const unsafe fn from_u32_unchecked(i: u32) -> char { // SAFETY: the caller must guarantee that `i` is a valid char value. unsafe { @@ -221,6 +222,7 @@ impl FromStr for char { } #[inline] +#[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] const fn char_try_from_u32(i: u32) -> Result { // This is an optimized version of the check // (i > MAX as u32) || (i >= 0xD800 && i <= 0xDFFF), diff --git a/library/core/src/char/methods.rs b/library/core/src/char/methods.rs index fa584953bed5c..042925a352f39 100644 --- a/library/core/src/char/methods.rs +++ b/library/core/src/char/methods.rs @@ -337,7 +337,7 @@ impl char { /// '1'.is_digit(1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_char_classify", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_char_classify", since = "1.87.0")] #[inline] pub const fn is_digit(self, radix: u32) -> bool { self.to_digit(radix).is_some() @@ -886,7 +886,7 @@ impl char { /// ``` #[must_use] #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_char_classify", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_char_classify", since = "1.87.0")] #[inline] pub const fn is_whitespace(self) -> bool { match self { @@ -1806,39 +1806,71 @@ const fn len_utf16(code: u32) -> usize { #[inline] pub const fn encode_utf8_raw(code: u32, dst: &mut [u8]) -> &mut [u8] { let len = len_utf8(code); - match (len, &mut *dst) { - (1, [a, ..]) => { - *a = code as u8; - } - (2, [a, b, ..]) => { - *a = (code >> 6 & 0x1F) as u8 | TAG_TWO_B; - *b = (code & 0x3F) as u8 | TAG_CONT; - } - (3, [a, b, c, ..]) => { - *a = (code >> 12 & 0x0F) as u8 | TAG_THREE_B; - *b = (code >> 6 & 0x3F) as u8 | TAG_CONT; - *c = (code & 0x3F) as u8 | TAG_CONT; - } - (4, [a, b, c, d, ..]) => { - *a = (code >> 18 & 0x07) as u8 | TAG_FOUR_B; - *b = (code >> 12 & 0x3F) as u8 | TAG_CONT; - *c = (code >> 6 & 0x3F) as u8 | TAG_CONT; - *d = (code & 0x3F) as u8 | TAG_CONT; - } - _ => { - const_panic!( - "encode_utf8: buffer does not have enough bytes to encode code point", - "encode_utf8: need {len} bytes to encode U+{code:04X} but buffer has just {dst_len}", - code: u32 = code, - len: usize = len, - dst_len: usize = dst.len(), - ) - } - }; + if dst.len() < len { + const_panic!( + "encode_utf8: buffer does not have enough bytes to encode code point", + "encode_utf8: need {len} bytes to encode U+{code:04X} but buffer has just {dst_len}", + code: u32 = code, + len: usize = len, + dst_len: usize = dst.len(), + ); + } + + // SAFETY: `dst` is checked to be at least the length needed to encode the codepoint. + unsafe { encode_utf8_raw_unchecked(code, dst.as_mut_ptr()) }; + // SAFETY: `<&mut [u8]>::as_mut_ptr` is guaranteed to return a valid pointer and `len` has been tested to be within bounds. unsafe { slice::from_raw_parts_mut(dst.as_mut_ptr(), len) } } +/// Encodes a raw `u32` value as UTF-8 into the byte buffer pointed to by `dst`. +/// +/// Unlike `char::encode_utf8`, this method also handles codepoints in the surrogate range. +/// (Creating a `char` in the surrogate range is UB.) +/// The result is valid [generalized UTF-8] but not valid UTF-8. +/// +/// [generalized UTF-8]: https://simonsapin.github.io/wtf-8/#generalized-utf8 +/// +/// # Safety +/// +/// The behavior is undefined if the buffer pointed to by `dst` is not +/// large enough to hold the encoded codepoint. A buffer of length four +/// is large enough to encode any `char`. +/// +/// For a safe version of this function, see the [`encode_utf8_raw`] function. +#[unstable(feature = "char_internals", reason = "exposed only for libstd", issue = "none")] +#[doc(hidden)] +#[inline] +pub const unsafe fn encode_utf8_raw_unchecked(code: u32, dst: *mut u8) { + let len = len_utf8(code); + // SAFETY: The caller must guarantee that the buffer pointed to by `dst` + // is at least `len` bytes long. + unsafe { + match len { + 1 => { + *dst = code as u8; + } + 2 => { + *dst = (code >> 6 & 0x1F) as u8 | TAG_TWO_B; + *dst.add(1) = (code & 0x3F) as u8 | TAG_CONT; + } + 3 => { + *dst = (code >> 12 & 0x0F) as u8 | TAG_THREE_B; + *dst.add(1) = (code >> 6 & 0x3F) as u8 | TAG_CONT; + *dst.add(2) = (code & 0x3F) as u8 | TAG_CONT; + } + 4 => { + *dst = (code >> 18 & 0x07) as u8 | TAG_FOUR_B; + *dst.add(1) = (code >> 12 & 0x3F) as u8 | TAG_CONT; + *dst.add(2) = (code >> 6 & 0x3F) as u8 | TAG_CONT; + *dst.add(3) = (code & 0x3F) as u8 | TAG_CONT; + } + // SAFETY: `char` always takes between 1 and 4 bytes to encode in UTF-8. + _ => crate::hint::unreachable_unchecked(), + } + } +} + /// Encodes a raw `u32` value as native endian UTF-16 into the provided `u16` buffer, /// and then returns the subslice of the buffer that contains the encoded character. /// diff --git a/library/core/src/char/mod.rs b/library/core/src/char/mod.rs index 088c709f1a2af..5b9f0e2143f5d 100644 --- a/library/core/src/char/mod.rs +++ b/library/core/src/char/mod.rs @@ -38,7 +38,7 @@ pub use self::decode::{DecodeUtf16, DecodeUtf16Error}; #[unstable(feature = "char_internals", reason = "exposed only for libstd", issue = "none")] pub use self::methods::encode_utf16_raw; // perma-unstable #[unstable(feature = "char_internals", reason = "exposed only for libstd", issue = "none")] -pub use self::methods::encode_utf8_raw; // perma-unstable +pub use self::methods::{encode_utf8_raw, encode_utf8_raw_unchecked}; // perma-unstable #[rustfmt::skip] use crate::ascii; diff --git a/library/core/src/clone.rs b/library/core/src/clone.rs index e0ac0bfc5289f..c237ac84cf407 100644 --- a/library/core/src/clone.rs +++ b/library/core/src/clone.rs @@ -216,7 +216,7 @@ pub macro Clone($item:item) { /// Use closures allow captured values to be automatically used. /// This is similar to have a closure that you would call `.use` over each captured value. #[unstable(feature = "ergonomic_clones", issue = "132290")] -#[cfg_attr(not(bootstrap), lang = "use_cloned")] +#[lang = "use_cloned"] pub trait UseCloned: Clone { // Empty. } @@ -427,7 +427,7 @@ pub unsafe trait CloneToUninit { /// read or dropped, because even if it was previously valid, it may have been partially /// overwritten. /// - /// The caller may wish to to take care to deallocate the allocation pointed to by `dest`, + /// The caller may wish to take care to deallocate the allocation pointed to by `dest`, /// if applicable, to avoid a memory leak (but this is not a requirement). /// /// Implementors should avoid leaking values by, upon unwinding, dropping all component values diff --git a/library/core/src/cmp.rs b/library/core/src/cmp.rs index 0dc2cc72e06cc..c315131f4136c 100644 --- a/library/core/src/cmp.rs +++ b/library/core/src/cmp.rs @@ -2053,6 +2053,22 @@ mod impls { fn ge(&self, other: &&B) -> bool { PartialOrd::ge(*self, *other) } + #[inline] + fn __chaining_lt(&self, other: &&B) -> ControlFlow { + PartialOrd::__chaining_lt(*self, *other) + } + #[inline] + fn __chaining_le(&self, other: &&B) -> ControlFlow { + PartialOrd::__chaining_le(*self, *other) + } + #[inline] + fn __chaining_gt(&self, other: &&B) -> ControlFlow { + PartialOrd::__chaining_gt(*self, *other) + } + #[inline] + fn __chaining_ge(&self, other: &&B) -> ControlFlow { + PartialOrd::__chaining_ge(*self, *other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for &A @@ -2108,6 +2124,22 @@ mod impls { fn ge(&self, other: &&mut B) -> bool { PartialOrd::ge(*self, *other) } + #[inline] + fn __chaining_lt(&self, other: &&mut B) -> ControlFlow { + PartialOrd::__chaining_lt(*self, *other) + } + #[inline] + fn __chaining_le(&self, other: &&mut B) -> ControlFlow { + PartialOrd::__chaining_le(*self, *other) + } + #[inline] + fn __chaining_gt(&self, other: &&mut B) -> ControlFlow { + PartialOrd::__chaining_gt(*self, *other) + } + #[inline] + fn __chaining_ge(&self, other: &&mut B) -> ControlFlow { + PartialOrd::__chaining_ge(*self, *other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for &mut A diff --git a/library/core/src/contracts.rs b/library/core/src/contracts.rs index 8b79a3a7eba86..495f84bce4bf2 100644 --- a/library/core/src/contracts.rs +++ b/library/core/src/contracts.rs @@ -2,19 +2,23 @@ pub use crate::macros::builtin::{contracts_ensures as ensures, contracts_requires as requires}; -/// Emitted by rustc as a desugaring of `#[ensures(PRED)] fn foo() -> R { ... [return R;] ... }` -/// into: `fn foo() { let _check = build_check_ensures(|ret| PRED) ... [return _check(R);] ... }` -/// (including the implicit return of the tail expression, if any). +/// This is an identity function used as part of the desugaring of the `#[ensures]` attribute. +/// +/// This is an existing hack to allow users to omit the type of the return value in their ensures +/// attribute. +/// +/// Ideally, rustc should be able to generate the type annotation. +/// The existing lowering logic makes it rather hard to add the explicit type annotation, +/// while the function call is fairly straight forward. #[unstable(feature = "contracts_internals", issue = "128044" /* compiler-team#759 */)] +// Similar to `contract_check_requires`, we need to use the user-facing +// `contracts` feature rather than the perma-unstable `contracts_internals`. +// Const-checking doesn't honor allow_internal_unstable logic used by contract expansion. +#[rustc_const_unstable(feature = "contracts", issue = "128044")] #[lang = "contract_build_check_ensures"] -#[track_caller] -pub fn build_check_ensures(cond: C) -> impl (Fn(Ret) -> Ret) + Copy +pub const fn build_check_ensures(cond: C) -> C where - C: for<'a> Fn(&'a Ret) -> bool + Copy + 'static, + C: Fn(&Ret) -> bool + Copy + 'static, { - #[track_caller] - move |ret| { - crate::intrinsics::contract_check_ensures(&ret, cond); - ret - } + cond } diff --git a/library/core/src/ffi/c_str.rs b/library/core/src/ffi/c_str.rs index 080c0cef53304..ac07c645c0195 100644 --- a/library/core/src/ffi/c_str.rs +++ b/library/core/src/ffi/c_str.rs @@ -79,8 +79,9 @@ use crate::{fmt, ops, slice, str}; /// /// fn my_string_safe() -> String { /// let cstr = unsafe { CStr::from_ptr(my_string()) }; -/// // Get copy-on-write Cow<'_, str>, then guarantee a freshly-owned String allocation -/// String::from_utf8_lossy(cstr.to_bytes()).to_string() +/// // Get a copy-on-write Cow<'_, str>, then extract the +/// // allocated String (or allocate a fresh one if needed). +/// cstr.to_string_lossy().into_owned() /// } /// /// println!("string: {}", my_string_safe()); @@ -150,7 +151,6 @@ impl Error for FromBytesWithNulError { /// within the slice. /// /// This error is created by the [`CStr::from_bytes_until_nul`] method. -/// #[derive(Clone, PartialEq, Eq, Debug)] #[stable(feature = "cstr_from_bytes_until_nul", since = "1.69.0")] pub struct FromBytesUntilNulError(()); diff --git a/library/core/src/ffi/mod.rs b/library/core/src/ffi/mod.rs index 9bae5fd466a18..c9c73a25d899e 100644 --- a/library/core/src/ffi/mod.rs +++ b/library/core/src/ffi/mod.rs @@ -20,7 +20,7 @@ pub use self::c_str::FromBytesUntilNulError; pub use self::c_str::FromBytesWithNulError; use crate::fmt; -#[unstable(feature = "c_str_module", issue = "112134")] +#[stable(feature = "c_str_module", since = "CURRENT_RUSTC_VERSION")] pub mod c_str; #[unstable( diff --git a/library/core/src/fmt/mod.rs b/library/core/src/fmt/mod.rs index 30fd2d7815f51..4f7f8a5b84dd5 100644 --- a/library/core/src/fmt/mod.rs +++ b/library/core/src/fmt/mod.rs @@ -7,7 +7,7 @@ use crate::char::{EscapeDebugExtArgs, MAX_LEN_UTF8}; use crate::marker::PhantomData; use crate::num::fmt as numfmt; use crate::ops::Deref; -use crate::{iter, mem, result, str}; +use crate::{iter, result, str}; mod builders; #[cfg(not(no_fp_fmt_parse))] @@ -622,44 +622,9 @@ pub struct Arguments<'a> { args: &'a [rt::Argument<'a>], } -/// Used by the format_args!() macro to create a fmt::Arguments object. #[doc(hidden)] #[unstable(feature = "fmt_internals", issue = "none")] impl<'a> Arguments<'a> { - #[inline] - pub const fn new_const(pieces: &'a [&'static str; N]) -> Self { - const { assert!(N <= 1) }; - Arguments { pieces, fmt: None, args: &[] } - } - - /// When using the format_args!() macro, this function is used to generate the - /// Arguments structure. - #[inline] - pub const fn new_v1( - pieces: &'a [&'static str; P], - args: &'a [rt::Argument<'a>; A], - ) -> Arguments<'a> { - const { assert!(P >= A && P <= A + 1, "invalid args") } - Arguments { pieces, fmt: None, args } - } - - /// Specifies nonstandard formatting parameters. - /// - /// An `rt::UnsafeArg` is required because the following invariants must be held - /// in order for this function to be safe: - /// 1. The `pieces` slice must be at least as long as `fmt`. - /// 2. Every `rt::Placeholder::position` value within `fmt` must be a valid index of `args`. - /// 3. Every `rt::Count::Param` within `fmt` must contain a valid index of `args`. - #[inline] - pub const fn new_v1_formatted( - pieces: &'a [&'static str], - args: &'a [rt::Argument<'a>], - fmt: &'a [rt::Placeholder], - _unsafe_arg: rt::UnsafeArg, - ) -> Arguments<'a> { - Arguments { pieces, fmt: Some(fmt), args } - } - /// Estimates the length of the formatted text. /// /// This is intended to be used for setting initial `String` capacity @@ -743,6 +708,7 @@ impl<'a> Arguments<'a> { #[unstable(feature = "fmt_internals", reason = "internal to standard library", issue = "none")] #[must_use] #[inline] + #[doc(hidden)] pub fn as_statically_known_str(&self) -> Option<&'static str> { let s = self.as_str(); if core::intrinsics::is_val_statically_known(s.is_some()) { s } else { None } @@ -1514,19 +1480,6 @@ unsafe fn run(fmt: &mut Formatter<'_>, arg: &rt::Placeholder, args: &[rt::Argume // which guarantees the indexes are always within bounds. unsafe { (getcount(args, &arg.width), getcount(args, &arg.precision)) }; - #[cfg(bootstrap)] - let options = - *FormattingOptions { flags: flags::ALWAYS_SET | arg.flags << 21, width: 0, precision: 0 } - .align(match arg.align { - rt::Alignment::Left => Some(Alignment::Left), - rt::Alignment::Right => Some(Alignment::Right), - rt::Alignment::Center => Some(Alignment::Center), - rt::Alignment::Unknown => None, - }) - .fill(arg.fill) - .width(width) - .precision(precision); - #[cfg(not(bootstrap))] let options = FormattingOptions { flags: arg.flags, width, precision }; // Extract the correct argument @@ -1543,21 +1496,6 @@ unsafe fn run(fmt: &mut Formatter<'_>, arg: &rt::Placeholder, args: &[rt::Argume unsafe { value.fmt(fmt) } } -#[cfg(bootstrap)] -unsafe fn getcount(args: &[rt::Argument<'_>], cnt: &rt::Count) -> Option { - match *cnt { - rt::Count::Is(n) => Some(n as u16), - rt::Count::Implied => None, - rt::Count::Param(i) => { - debug_assert!(i < args.len()); - // SAFETY: cnt and args come from the same Arguments, - // which guarantees this index is always within bounds. - unsafe { args.get_unchecked(i).as_u16() } - } - } -} - -#[cfg(not(bootstrap))] unsafe fn getcount(args: &[rt::Argument<'_>], cnt: &rt::Count) -> u16 { match *cnt { rt::Count::Is(n) => n, @@ -3016,6 +2954,6 @@ impl Debug for SyncUnsafeCell { } } -// If you expected tests to be here, look instead at the core/tests/fmt.rs file, +// If you expected tests to be here, look instead at coretests/tests/fmt/; // it's a lot easier than creating all of the rt::Piece structures here. -// There are also tests in the alloc crate, for those that need allocations. +// There are also tests in alloctests/tests/fmt.rs, for those that need allocations. diff --git a/library/core/src/fmt/rt.rs b/library/core/src/fmt/rt.rs index 0b04ebccae2bd..c2a8a39bcac8f 100644 --- a/library/core/src/fmt/rt.rs +++ b/library/core/src/fmt/rt.rs @@ -1,7 +1,10 @@ #![allow(missing_debug_implementations)] #![unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")] -//! These are the lang items used by format_args!(). +//! All types and methods in this file are used by the compiler in +//! the expansion/lowering of format_args!(). +//! +//! Do not modify them without understanding the consequences for the format_args!() macro. use super::*; use crate::hint::unreachable_unchecked; @@ -11,10 +14,6 @@ use crate::ptr::NonNull; #[derive(Copy, Clone)] pub struct Placeholder { pub position: usize, - #[cfg(bootstrap)] - pub fill: char, - #[cfg(bootstrap)] - pub align: Alignment, pub flags: u32, pub precision: Count, pub width: Count, @@ -23,38 +22,17 @@ pub struct Placeholder { #[cfg(bootstrap)] impl Placeholder { #[inline] - pub const fn new( - position: usize, - fill: char, - align: Alignment, - flags: u32, - precision: Count, - width: Count, - ) -> Self { - Self { position, fill, align, flags, precision, width } + pub const fn new(position: usize, flags: u32, precision: Count, width: Count) -> Self { + Self { position, flags, precision, width } } } -#[cfg(bootstrap)] -#[lang = "format_alignment"] -#[derive(Copy, Clone, PartialEq, Eq)] -pub enum Alignment { - Left, - Right, - Center, - Unknown, -} - /// Used by [width](https://doc.rust-lang.org/std/fmt/#width) /// and [precision](https://doc.rust-lang.org/std/fmt/#precision) specifiers. #[lang = "format_count"] #[derive(Copy, Clone)] pub enum Count { /// Specified with a literal number, stores the value - #[cfg(bootstrap)] - Is(usize), - /// Specified with a literal number, stores the value - #[cfg(not(bootstrap))] Is(u16), /// Specified using `$` and `*` syntaxes, stores the index into `args` Param(usize), @@ -90,61 +68,91 @@ pub struct Argument<'a> { ty: ArgumentType<'a>, } -#[rustc_diagnostic_item = "ArgumentMethods"] -impl Argument<'_> { - #[inline] - const fn new<'a, T>(x: &'a T, f: fn(&T, &mut Formatter<'_>) -> Result) -> Argument<'a> { +macro_rules! argument_new { + ($t:ty, $x:expr, $f:expr) => { Argument { // INVARIANT: this creates an `ArgumentType<'a>` from a `&'a T` and // a `fn(&T, ...)`, so the invariant is maintained. ty: ArgumentType::Placeholder { - value: NonNull::from_ref(x).cast(), - // SAFETY: function pointers always have the same layout. - formatter: unsafe { mem::transmute(f) }, + value: NonNull::<$t>::from_ref($x).cast(), + // The Rust ABI considers all pointers to be equivalent, so transmuting a fn(&T) to + // fn(NonNull<()>) and calling it with a NonNull<()> that points at a T is allowed. + // However, the CFI sanitizer does not allow this, and triggers a crash when it + // happens. + // + // To avoid this crash, we use a helper function when CFI is enabled. To avoid the + // cost of this helper function (mainly code-size) when it is not needed, we + // transmute the function pointer otherwise. + // + // This is similar to what the Rust compiler does internally with vtables when KCFI + // is enabled, where it generates trampoline functions that only serve to adjust the + // expected type of the argument. `ArgumentType::Placeholder` is a bit like a + // manually constructed trait object, so it is not surprising that the same approach + // has to be applied here as well. + // + // It is still considered problematic (from the Rust side) that CFI rejects entirely + // legal Rust programs, so we do not consider anything done here a stable guarantee, + // but meanwhile we carry this work-around to keep Rust compatible with CFI and + // KCFI. + #[cfg(not(any(sanitize = "cfi", sanitize = "kcfi")))] + formatter: { + let f: fn(&$t, &mut Formatter<'_>) -> Result = $f; + // SAFETY: This is only called with `value`, which has the right type. + unsafe { core::mem::transmute(f) } + }, + #[cfg(any(sanitize = "cfi", sanitize = "kcfi"))] + formatter: |ptr: NonNull<()>, fmt: &mut Formatter<'_>| { + let func = $f; + // SAFETY: This is the same type as the `value` field. + let r = unsafe { ptr.cast::<$t>().as_ref() }; + (func)(r, fmt) + }, _lifetime: PhantomData, }, } - } + }; +} +impl Argument<'_> { #[inline] - pub fn new_display(x: &T) -> Argument<'_> { - Self::new(x, Display::fmt) + pub const fn new_display(x: &T) -> Argument<'_> { + argument_new!(T, x, ::fmt) } #[inline] - pub fn new_debug(x: &T) -> Argument<'_> { - Self::new(x, Debug::fmt) + pub const fn new_debug(x: &T) -> Argument<'_> { + argument_new!(T, x, ::fmt) } #[inline] - pub fn new_debug_noop(x: &T) -> Argument<'_> { - Self::new(x, |_, _| Ok(())) + pub const fn new_debug_noop(x: &T) -> Argument<'_> { + argument_new!(T, x, |_: &T, _| Ok(())) } #[inline] - pub fn new_octal(x: &T) -> Argument<'_> { - Self::new(x, Octal::fmt) + pub const fn new_octal(x: &T) -> Argument<'_> { + argument_new!(T, x, ::fmt) } #[inline] - pub fn new_lower_hex(x: &T) -> Argument<'_> { - Self::new(x, LowerHex::fmt) + pub const fn new_lower_hex(x: &T) -> Argument<'_> { + argument_new!(T, x, ::fmt) } #[inline] - pub fn new_upper_hex(x: &T) -> Argument<'_> { - Self::new(x, UpperHex::fmt) + pub const fn new_upper_hex(x: &T) -> Argument<'_> { + argument_new!(T, x, ::fmt) } #[inline] - pub fn new_pointer(x: &T) -> Argument<'_> { - Self::new(x, Pointer::fmt) + pub const fn new_pointer(x: &T) -> Argument<'_> { + argument_new!(T, x, ::fmt) } #[inline] - pub fn new_binary(x: &T) -> Argument<'_> { - Self::new(x, Binary::fmt) + pub const fn new_binary(x: &T) -> Argument<'_> { + argument_new!(T, x, ::fmt) } #[inline] - pub fn new_lower_exp(x: &T) -> Argument<'_> { - Self::new(x, LowerExp::fmt) + pub const fn new_lower_exp(x: &T) -> Argument<'_> { + argument_new!(T, x, ::fmt) } #[inline] - pub fn new_upper_exp(x: &T) -> Argument<'_> { - Self::new(x, UpperExp::fmt) + pub const fn new_upper_exp(x: &T) -> Argument<'_> { + argument_new!(T, x, ::fmt) } #[inline] #[track_caller] @@ -160,11 +168,6 @@ impl Argument<'_> { /// # Safety /// /// This argument must actually be a placeholder argument. - /// - // FIXME: Transmuting formatter in new and indirectly branching to/calling - // it here is an explicit CFI violation. - #[allow(inline_no_sanitize)] - #[no_sanitize(cfi, kcfi)] #[inline] pub(super) unsafe fn fmt(&self, f: &mut Formatter<'_>) -> Result { match self.ty { @@ -221,3 +224,57 @@ impl UnsafeArg { Self { _private: () } } } + +/// Used by the format_args!() macro to create a fmt::Arguments object. +#[doc(hidden)] +#[unstable(feature = "fmt_internals", issue = "none")] +#[rustc_diagnostic_item = "FmtArgumentsNew"] +impl<'a> Arguments<'a> { + #[inline] + pub const fn new_const(pieces: &'a [&'static str; N]) -> Self { + const { assert!(N <= 1) }; + Arguments { pieces, fmt: None, args: &[] } + } + + /// When using the format_args!() macro, this function is used to generate the + /// Arguments structure. + /// + /// This function should _not_ be const, to make sure we don't accept + /// format_args!() and panic!() with arguments in const, even when not evaluated: + /// + /// ```compile_fail,E0015 + /// const _: () = if false { panic!("a {}", "a") }; + /// ``` + #[inline] + pub fn new_v1( + pieces: &'a [&'static str; P], + args: &'a [rt::Argument<'a>; A], + ) -> Arguments<'a> { + const { assert!(P >= A && P <= A + 1, "invalid args") } + Arguments { pieces, fmt: None, args } + } + + /// Specifies nonstandard formatting parameters. + /// + /// An `rt::UnsafeArg` is required because the following invariants must be held + /// in order for this function to be safe: + /// 1. The `pieces` slice must be at least as long as `fmt`. + /// 2. Every `rt::Placeholder::position` value within `fmt` must be a valid index of `args`. + /// 3. Every `rt::Count::Param` within `fmt` must contain a valid index of `args`. + /// + /// This function should _not_ be const, to make sure we don't accept + /// format_args!() and panic!() with arguments in const, even when not evaluated: + /// + /// ```compile_fail,E0015 + /// const _: () = if false { panic!("a {:1}", "a") }; + /// ``` + #[inline] + pub fn new_v1_formatted( + pieces: &'a [&'static str], + args: &'a [rt::Argument<'a>], + fmt: &'a [rt::Placeholder], + _unsafe_arg: rt::UnsafeArg, + ) -> Arguments<'a> { + Arguments { pieces, fmt: Some(fmt), args } + } +} diff --git a/library/core/src/future/async_drop.rs b/library/core/src/future/async_drop.rs index f1778a4d782af..fc4f95a98b42d 100644 --- a/library/core/src/future/async_drop.rs +++ b/library/core/src/future/async_drop.rs @@ -1,284 +1,51 @@ #![unstable(feature = "async_drop", issue = "126482")] -use crate::fmt; -use crate::future::{Future, IntoFuture}; -use crate::intrinsics::discriminant_value; -use crate::marker::{DiscriminantKind, PhantomPinned}; -use crate::mem::MaybeUninit; -use crate::pin::Pin; -use crate::task::{Context, Poll, ready}; - -/// Asynchronously drops a value by running `AsyncDrop::async_drop` -/// on a value and its fields recursively. -#[unstable(feature = "async_drop", issue = "126482")] -pub fn async_drop(value: T) -> AsyncDropOwning { - AsyncDropOwning { value: MaybeUninit::new(value), dtor: None, _pinned: PhantomPinned } -} - -/// A future returned by the [`async_drop`]. -#[unstable(feature = "async_drop", issue = "126482")] -pub struct AsyncDropOwning { - value: MaybeUninit, - dtor: Option>, - _pinned: PhantomPinned, -} - -#[unstable(feature = "async_drop", issue = "126482")] -impl fmt::Debug for AsyncDropOwning { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("AsyncDropOwning").finish_non_exhaustive() - } -} - -#[unstable(feature = "async_drop", issue = "126482")] -impl Future for AsyncDropOwning { - type Output = (); - - fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - // SAFETY: Self is pinned thus it is ok to store references to self - unsafe { - let this = self.get_unchecked_mut(); - let dtor = Pin::new_unchecked( - this.dtor.get_or_insert_with(|| async_drop_in_place(this.value.as_mut_ptr())), - ); - // AsyncDestuctors are idempotent so Self gets idempotency as well - dtor.poll(cx) - } - } -} +#[allow(unused_imports)] +use core::future::Future; -#[lang = "async_drop_in_place"] -#[allow(unconditional_recursion)] -// FIXME: Consider if `#[rustc_diagnostic_item = "ptr_drop_in_place"]` is needed? -unsafe fn async_drop_in_place_raw( - to_drop: *mut T, -) -> ::AsyncDestructor { - // Code here does not matter - this is replaced by the - // real async drop glue constructor by the compiler. - - // SAFETY: see comment above - unsafe { async_drop_in_place_raw(to_drop) } -} +#[allow(unused_imports)] +use crate::pin::Pin; +#[allow(unused_imports)] +use crate::task::{Context, Poll}; -/// Creates the asynchronous destructor of the pointed-to value. -/// -/// # Safety -/// -/// Behavior is undefined if any of the following conditions are violated: -/// -/// * `to_drop` must be [valid](crate::ptr#safety) for both reads and writes. -/// -/// * `to_drop` must be properly aligned, even if `T` has size 0. +/// Async version of Drop trait. /// -/// * `to_drop` must be nonnull, even if `T` has size 0. +/// When a value is no longer needed, Rust will run a "destructor" on that value. +/// The most common way that a value is no longer needed is when it goes out of +/// scope. Destructors may still run in other circumstances, but we're going to +/// focus on scope for the examples here. To learn about some of those other cases, +/// please see [the reference] section on destructors. /// -/// * The value `to_drop` points to must be valid for async dropping, -/// which may mean it must uphold additional invariants. These -/// invariants depend on the type of the value being dropped. For -/// instance, when dropping a Box, the box's pointer to the heap must -/// be valid. +/// [the reference]: https://doc.rust-lang.org/reference/destructors.html /// -/// * While `async_drop_in_place` is executing or the returned async -/// destructor is alive, the only way to access parts of `to_drop` -/// is through the `self: Pin<&mut Self>` references supplied to -/// the `AsyncDrop::async_drop` methods that `async_drop_in_place` -/// or `AsyncDropInPlace::poll` invokes. This usually means the -/// returned future stores the `to_drop` pointer and user is required -/// to guarantee that dropped value doesn't move. +/// ## `Copy` and ([`Drop`]|`AsyncDrop`) are exclusive /// -#[unstable(feature = "async_drop", issue = "126482")] -pub unsafe fn async_drop_in_place(to_drop: *mut T) -> AsyncDropInPlace { - // SAFETY: `async_drop_in_place_raw` has the same safety requirements - unsafe { AsyncDropInPlace(async_drop_in_place_raw(to_drop)) } -} - -/// A future returned by the [`async_drop_in_place`]. -#[unstable(feature = "async_drop", issue = "126482")] -pub struct AsyncDropInPlace(::AsyncDestructor); - -#[unstable(feature = "async_drop", issue = "126482")] -impl fmt::Debug for AsyncDropInPlace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("AsyncDropInPlace").finish_non_exhaustive() - } -} - -#[unstable(feature = "async_drop", issue = "126482")] -impl Future for AsyncDropInPlace { - type Output = (); - - #[inline(always)] - fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - // SAFETY: This code simply forwards poll call to the inner future - unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().0) }.poll(cx) - } -} - -// FIXME(zetanumbers): Add same restrictions on AsyncDrop impls as -// with Drop impls -/// Custom code within the asynchronous destructor. +/// You cannot implement both [`Copy`] and ([`Drop`]|`AsyncDrop`) on the same type. Types that +/// are `Copy` get implicitly duplicated by the compiler, making it very +/// hard to predict when, and how often destructors will be executed. As such, +/// these types cannot have destructors. +#[cfg(not(bootstrap))] #[unstable(feature = "async_drop", issue = "126482")] #[lang = "async_drop"] pub trait AsyncDrop { - /// A future returned by the [`AsyncDrop::async_drop`] to be part - /// of the async destructor. - #[unstable(feature = "async_drop", issue = "126482")] - type Dropper<'a>: Future - where - Self: 'a; - - /// Constructs the asynchronous destructor for this type. - #[unstable(feature = "async_drop", issue = "126482")] - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_>; -} - -#[lang = "async_destruct"] -#[rustc_deny_explicit_impl] -#[rustc_do_not_implement_via_object] -trait AsyncDestruct { - type AsyncDestructor: Future; -} - -/// Basically calls `AsyncDrop::async_drop` with pointer. Used to simplify -/// generation of the code for `async_drop_in_place_raw` -#[lang = "surface_async_drop_in_place"] -async unsafe fn surface_async_drop_in_place(ptr: *mut T) { - // SAFETY: We call this from async drop `async_drop_in_place_raw` - // which has the same safety requirements - unsafe { ::async_drop(Pin::new_unchecked(&mut *ptr)).await } -} - -/// Basically calls `Drop::drop` with pointer. Used to simplify generation -/// of the code for `async_drop_in_place_raw` -#[allow(drop_bounds)] -#[lang = "async_drop_surface_drop_in_place"] -async unsafe fn surface_drop_in_place(ptr: *mut T) { - // SAFETY: We call this from async drop `async_drop_in_place_raw` - // which has the same safety requirements - unsafe { crate::ops::fallback_surface_drop(&mut *ptr) } -} - -/// Wraps a future to continue outputting `Poll::Ready(())` once after -/// wrapped future completes by returning `Poll::Ready(())` on poll. This -/// is useful for constructing async destructors to guarantee this -/// "fuse" property -// -// FIXME: Consider optimizing combinators to not have to use fuse in majority -// of cases, perhaps by adding `#[(rustc_)idempotent(_future)]` attribute for -// async functions and blocks with the unit return type. However current layout -// optimizations currently encode `None` case into the async block's discriminant. -struct Fuse { - inner: Option, -} - -#[lang = "async_drop_fuse"] -fn fuse(inner: T) -> Fuse { - Fuse { inner: Some(inner) } -} - -impl Future for Fuse -where - T: Future, -{ - type Output = (); - - fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - // SAFETY: pin projection into `self.inner` - unsafe { - let this = self.get_unchecked_mut(); - if let Some(inner) = &mut this.inner { - ready!(Pin::new_unchecked(inner).poll(cx)); - this.inner = None; - } - } - Poll::Ready(()) - } -} - -/// Async destructor for arrays and slices. -#[lang = "async_drop_slice"] -async unsafe fn slice(s: *mut [T]) { - let len = s.len(); - let ptr = s.as_mut_ptr(); - for i in 0..len { - // SAFETY: we iterate over elements of `s` slice - unsafe { async_drop_in_place_raw(ptr.add(i)).await } - } -} - -/// Constructs a chain of two futures, which awaits them sequentially as -/// a future. -#[lang = "async_drop_chain"] -async fn chain(first: F, last: G) -where - F: IntoFuture, - G: IntoFuture, -{ - first.await; - last.await; -} - -/// Basically a lazy version of `async_drop_in_place`. Returns a future -/// that would call `AsyncDrop::async_drop` on a first poll. -/// -/// # Safety -/// -/// Same as `async_drop_in_place` except is lazy to avoid creating -/// multiple mutable references. -#[lang = "async_drop_defer"] -async unsafe fn defer(to_drop: *mut T) { - // SAFETY: same safety requirements as `async_drop_in_place` - unsafe { async_drop_in_place(to_drop) }.await -} - -/// If `T`'s discriminant is equal to the stored one then awaits `M` -/// otherwise awaits the `O`. -/// -/// # Safety -/// -/// Users should carefully manage the returned future, since it would -/// try creating an immutable reference from `this` and get pointee's -/// discriminant. -// FIXME(zetanumbers): Send and Sync impls -#[lang = "async_drop_either"] -async unsafe fn either, M: IntoFuture, T>( - other: O, - matched: M, - this: *mut T, - discr: ::Discriminant, -) { - // SAFETY: Guaranteed by the safety section of this funtion's documentation - if unsafe { discriminant_value(&*this) } == discr { - drop(other); - matched.await - } else { - drop(matched); - other.await - } -} - -#[lang = "async_drop_deferred_drop_in_place"] -async unsafe fn deferred_drop_in_place(to_drop: *mut T) { - // SAFETY: same safety requirements as with drop_in_place (implied by - // function's name) - unsafe { crate::ptr::drop_in_place(to_drop) } -} - -/// Used for noop async destructors. We don't use [`core::future::Ready`] -/// because it panics after its second poll, which could be potentially -/// bad if that would happen during the cleanup. -#[derive(Clone, Copy)] -struct Noop; - -#[lang = "async_drop_noop"] -fn noop() -> Noop { - Noop -} - -impl Future for Noop { - type Output = (); - - fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll { - Poll::Ready(()) - } + /// Executes the async destructor for this type. + /// + /// This method is called implicitly when the value goes out of scope, + /// and cannot be called explicitly. + /// + /// When this method has been called, `self` has not yet been deallocated. + /// That only happens after the method is over. + /// + /// # Panics + #[allow(async_fn_in_trait)] + async fn drop(self: Pin<&mut Self>); +} + +/// Async drop. +#[cfg(not(bootstrap))] +#[unstable(feature = "async_drop", issue = "126482")] +#[lang = "async_drop_in_place"] +pub async unsafe fn async_drop_in_place(_to_drop: *mut T) { + // Code here does not matter - this is replaced by the + // real implementation by the compiler. } diff --git a/library/core/src/future/mod.rs b/library/core/src/future/mod.rs index 65c0171c88d5b..4b5a2f34d3f3e 100644 --- a/library/core/src/future/mod.rs +++ b/library/core/src/future/mod.rs @@ -20,8 +20,9 @@ mod pending; mod poll_fn; mod ready; +#[cfg(not(bootstrap))] #[unstable(feature = "async_drop", issue = "126482")] -pub use async_drop::{AsyncDrop, AsyncDropInPlace, async_drop, async_drop_in_place}; +pub use async_drop::{AsyncDrop, async_drop_in_place}; #[stable(feature = "into_future", since = "1.64.0")] pub use into_future::IntoFuture; #[stable(feature = "future_readiness_fns", since = "1.48.0")] diff --git a/library/core/src/hint.rs b/library/core/src/hint.rs index 5ce282b05de73..cb83540c4eaf7 100644 --- a/library/core/src/hint.rs +++ b/library/core/src/hint.rs @@ -4,6 +4,7 @@ //! //! Hints may be compile time or runtime. +use crate::mem::MaybeUninit; use crate::{intrinsics, ub_checks}; /// Informs the compiler that the site which is calling this function is not @@ -319,6 +320,10 @@ pub fn spin_loop() { /// This also means that this function does not offer any guarantees for cryptographic or security /// purposes. /// +/// This limitation is not specific to `black_box`; there is no mechanism in the entire Rust +/// language that can provide the guarantees required for constant-time cryptography. +/// (There is also no such mechanism in LLVM, so the same is true for every other LLVM-based compiler.) +/// /// /// /// [`std::convert::identity`]: crate::convert::identity @@ -734,3 +739,61 @@ pub const fn unlikely(b: bool) -> bool { pub const fn cold_path() { crate::intrinsics::cold_path() } + +/// Returns either `true_val` or `false_val` depending on the value of +/// `condition`, with a hint to the compiler that `condition` is unlikely to be +/// correctly predicted by a CPU’s branch predictor. +/// +/// This method is functionally equivalent to +/// ```ignore (this is just for illustrative purposes) +/// fn select_unpredictable(b: bool, true_val: T, false_val: T) -> T { +/// if b { true_val } else { false_val } +/// } +/// ``` +/// but might generate different assembly. In particular, on platforms with +/// a conditional move or select instruction (like `cmov` on x86 or `csel` +/// on ARM) the optimizer might use these instructions to avoid branches, +/// which can benefit performance if the branch predictor is struggling +/// with predicting `condition`, such as in an implementation of binary +/// search. +/// +/// Note however that this lowering is not guaranteed (on any platform) and +/// should not be relied upon when trying to write cryptographic constant-time +/// code. Also be aware that this lowering might *decrease* performance if +/// `condition` is well-predictable. It is advisable to perform benchmarks to +/// tell if this function is useful. +/// +/// # Examples +/// +/// Distribute values evenly between two buckets: +/// ``` +/// use std::hash::BuildHasher; +/// use std::hint; +/// +/// fn append(hasher: &H, v: i32, bucket_one: &mut Vec, bucket_two: &mut Vec) { +/// let hash = hasher.hash_one(&v); +/// let bucket = hint::select_unpredictable(hash % 2 == 0, bucket_one, bucket_two); +/// bucket.push(v); +/// } +/// # let hasher = std::collections::hash_map::RandomState::new(); +/// # let mut bucket_one = Vec::new(); +/// # let mut bucket_two = Vec::new(); +/// # append(&hasher, 42, &mut bucket_one, &mut bucket_two); +/// # assert_eq!(bucket_one.len() + bucket_two.len(), 1); +/// ``` +#[inline(always)] +#[stable(feature = "select_unpredictable", since = "CURRENT_RUSTC_VERSION")] +pub fn select_unpredictable(condition: bool, true_val: T, false_val: T) -> T { + // FIXME(https://github.com/rust-lang/unsafe-code-guidelines/issues/245): + // Change this to use ManuallyDrop instead. + let mut true_val = MaybeUninit::new(true_val); + let mut false_val = MaybeUninit::new(false_val); + // SAFETY: The value that is not selected is dropped, and the selected one + // is returned. This is necessary because the intrinsic doesn't drop the + // value that is not selected. + unsafe { + crate::intrinsics::select_unpredictable(!condition, &mut true_val, &mut false_val) + .assume_init_drop(); + crate::intrinsics::select_unpredictable(condition, true_val, false_val).assume_init() + } +} diff --git a/library/core/src/intrinsics/mod.rs b/library/core/src/intrinsics/mod.rs index 81e59a1f349ec..5649736e40492 100644 --- a/library/core/src/intrinsics/mod.rs +++ b/library/core/src/intrinsics/mod.rs @@ -5,14 +5,11 @@ //! //! # Const intrinsics //! -//! Note: any changes to the constness of intrinsics should be discussed with the language team. -//! This includes changes in the stability of the constness. -//! -//! In order to make an intrinsic usable at compile-time, it needs to be declared in the "new" -//! style, i.e. as a `#[rustc_intrinsic]` function, not inside an `extern` block. Then copy the -//! implementation from to +//! In order to make an intrinsic unstable usable at compile-time, copy the implementation from +//! to //! -//! and make the intrinsic declaration a `const fn`. +//! and make the intrinsic declaration below a `const fn`. This should be done in coordination with +//! wg-const-eval. //! //! If an intrinsic is supposed to be used from a `const fn` with a `rustc_const_stable` attribute, //! `#[rustc_intrinsic_const_stable_indirect]` needs to be added to the intrinsic. Such a change requires @@ -1329,7 +1326,9 @@ pub const fn unlikely(b: bool) -> bool { /// Therefore, implementations must not require the user to uphold /// any safety invariants. /// -/// The public form of this instrinsic is [`bool::select_unpredictable`]. +/// The public form of this instrinsic is [`core::hint::select_unpredictable`]. +/// However unlike the public form, the intrinsic will not drop the value that +/// is not selected. #[unstable(feature = "core_intrinsics", issue = "none")] #[rustc_intrinsic] #[rustc_nounwind] @@ -1498,6 +1497,7 @@ pub const fn forget(_: T); /// Turning raw bytes (`[u8; SZ]`) into `u32`, `f64`, etc.: /// /// ``` +/// # #![cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] /// let raw_bytes = [0x78, 0x56, 0x34, 0x12]; /// /// let num = unsafe { @@ -2307,20 +2307,8 @@ pub unsafe fn truncf128(x: f128) -> f128; /// [`f16::round_ties_even`](../../std/primitive.f16.html#method.round_ties_even) #[rustc_intrinsic] #[rustc_nounwind] -#[cfg(not(bootstrap))] pub fn round_ties_even_f16(x: f16) -> f16; -/// To be removed on next bootstrap bump. -#[cfg(bootstrap)] -pub fn round_ties_even_f16(x: f16) -> f16 { - #[rustc_intrinsic] - #[rustc_nounwind] - unsafe fn rintf16(x: f16) -> f16; - - // SAFETY: this intrinsic isn't actually unsafe - unsafe { rintf16(x) } -} - /// Returns the nearest integer to an `f32`. Rounds half-way cases to the number with an even /// least significant digit. /// @@ -2328,20 +2316,8 @@ pub fn round_ties_even_f16(x: f16) -> f16 { /// [`f32::round_ties_even`](../../std/primitive.f32.html#method.round_ties_even) #[rustc_intrinsic] #[rustc_nounwind] -#[cfg(not(bootstrap))] pub fn round_ties_even_f32(x: f32) -> f32; -/// To be removed on next bootstrap bump. -#[cfg(bootstrap)] -pub fn round_ties_even_f32(x: f32) -> f32 { - #[rustc_intrinsic] - #[rustc_nounwind] - unsafe fn rintf32(x: f32) -> f32; - - // SAFETY: this intrinsic isn't actually unsafe - unsafe { rintf32(x) } -} - /// Provided for compatibility with stdarch. DO NOT USE. #[inline(always)] pub unsafe fn rintf32(x: f32) -> f32 { @@ -2355,20 +2331,8 @@ pub unsafe fn rintf32(x: f32) -> f32 { /// [`f64::round_ties_even`](../../std/primitive.f64.html#method.round_ties_even) #[rustc_intrinsic] #[rustc_nounwind] -#[cfg(not(bootstrap))] pub fn round_ties_even_f64(x: f64) -> f64; -/// To be removed on next bootstrap bump. -#[cfg(bootstrap)] -pub fn round_ties_even_f64(x: f64) -> f64 { - #[rustc_intrinsic] - #[rustc_nounwind] - unsafe fn rintf64(x: f64) -> f64; - - // SAFETY: this intrinsic isn't actually unsafe - unsafe { rintf64(x) } -} - /// Provided for compatibility with stdarch. DO NOT USE. #[inline(always)] pub unsafe fn rintf64(x: f64) -> f64 { @@ -2382,20 +2346,8 @@ pub unsafe fn rintf64(x: f64) -> f64 { /// [`f128::round_ties_even`](../../std/primitive.f128.html#method.round_ties_even) #[rustc_intrinsic] #[rustc_nounwind] -#[cfg(not(bootstrap))] pub fn round_ties_even_f128(x: f128) -> f128; -/// To be removed on next bootstrap bump. -#[cfg(bootstrap)] -pub fn round_ties_even_f128(x: f128) -> f128 { - #[rustc_intrinsic] - #[rustc_nounwind] - unsafe fn rintf128(x: f128) -> f128; - - // SAFETY: this intrinsic isn't actually unsafe - unsafe { rintf128(x) } -} - /// Returns the nearest integer to an `f16`. Rounds half-way cases away from zero. /// /// The stabilized version of this intrinsic is @@ -2475,38 +2427,38 @@ pub unsafe fn float_to_int_unchecked(value: Float) -> In /// Float addition that allows optimizations based on algebraic rules. /// -/// This intrinsic does not have a stable counterpart. +/// Stabilized as [`f16::algebraic_add`], [`f32::algebraic_add`], [`f64::algebraic_add`] and [`f128::algebraic_add`]. #[rustc_nounwind] #[rustc_intrinsic] -pub fn fadd_algebraic(a: T, b: T) -> T; +pub const fn fadd_algebraic(a: T, b: T) -> T; /// Float subtraction that allows optimizations based on algebraic rules. /// -/// This intrinsic does not have a stable counterpart. +/// Stabilized as [`f16::algebraic_sub`], [`f32::algebraic_sub`], [`f64::algebraic_sub`] and [`f128::algebraic_sub`]. #[rustc_nounwind] #[rustc_intrinsic] -pub fn fsub_algebraic(a: T, b: T) -> T; +pub const fn fsub_algebraic(a: T, b: T) -> T; /// Float multiplication that allows optimizations based on algebraic rules. /// -/// This intrinsic does not have a stable counterpart. +/// Stabilized as [`f16::algebraic_mul`], [`f32::algebraic_mul`], [`f64::algebraic_mul`] and [`f128::algebraic_mul`]. #[rustc_nounwind] #[rustc_intrinsic] -pub fn fmul_algebraic(a: T, b: T) -> T; +pub const fn fmul_algebraic(a: T, b: T) -> T; /// Float division that allows optimizations based on algebraic rules. /// -/// This intrinsic does not have a stable counterpart. +/// Stabilized as [`f16::algebraic_div`], [`f32::algebraic_div`], [`f64::algebraic_div`] and [`f128::algebraic_div`]. #[rustc_nounwind] #[rustc_intrinsic] -pub fn fdiv_algebraic(a: T, b: T) -> T; +pub const fn fdiv_algebraic(a: T, b: T) -> T; /// Float remainder that allows optimizations based on algebraic rules. /// -/// This intrinsic does not have a stable counterpart. +/// Stabilized as [`f16::algebraic_rem`], [`f32::algebraic_rem`], [`f64::algebraic_rem`] and [`f128::algebraic_rem`]. #[rustc_nounwind] #[rustc_intrinsic] -pub fn frem_algebraic(a: T, b: T) -> T; +pub const fn frem_algebraic(a: T, b: T) -> T; /// Returns the number of bits set in an integer type `T` /// @@ -2679,13 +2631,15 @@ pub const fn bswap(x: T) -> T; #[rustc_intrinsic] pub const fn bitreverse(x: T) -> T; -/// Does a three-way comparison between the two integer arguments. +/// Does a three-way comparison between the two arguments, +/// which must be of character or integer (signed or unsigned) type. /// -/// This is included as an intrinsic as it's useful to let it be one thing -/// in MIR, rather than the multiple checks and switches that make its IR -/// large and difficult to optimize. +/// This was originally added because it greatly simplified the MIR in `cmp` +/// implementations, and then LLVM 20 added a backend intrinsic for it too. /// /// The stabilized version of this intrinsic is [`Ord::cmp`]. +#[rustc_intrinsic_const_stable_indirect] +#[rustc_nounwind] #[rustc_intrinsic] pub const fn three_way_compare(lhs: T, rhss: T) -> crate::cmp::Ordering; @@ -2786,6 +2740,7 @@ pub const fn carrying_mul_add, /// `x % y != 0` or `y == 0` or `x == T::MIN && y == -1` /// /// This intrinsic does not have a stable counterpart. +#[rustc_intrinsic_const_stable_indirect] #[rustc_nounwind] #[rustc_intrinsic] pub const unsafe fn exact_div(x: T, y: T) -> T; @@ -3041,7 +2996,7 @@ pub unsafe fn nontemporal_store(ptr: *mut T, val: T); #[rustc_intrinsic] pub const unsafe fn ptr_offset_from(ptr: *const T, base: *const T) -> isize; -/// See documentation of `<*const T>::sub_ptr` for details. +/// See documentation of `<*const T>::offset_from_unsigned` for details. #[rustc_nounwind] #[rustc_intrinsic] #[rustc_intrinsic_const_stable_indirect] @@ -3365,7 +3320,6 @@ pub const fn is_val_statically_known(_arg: T) -> bool { #[inline] #[rustc_intrinsic] #[rustc_intrinsic_const_stable_indirect] -#[rustc_allow_const_fn_unstable(const_swap_nonoverlapping)] // this is anyway not called since CTFE implements the intrinsic pub const unsafe fn typed_swap_nonoverlapping(x: *mut T, y: *mut T) { // SAFETY: The caller provided single non-overlapping items behind // pointers, so swapping them with `count: 1` is fine. @@ -3450,20 +3404,62 @@ pub const fn contract_checks() -> bool { /// /// By default, if `contract_checks` is enabled, this will panic with no unwind if the condition /// returns false. -#[unstable(feature = "contracts_internals", issue = "128044" /* compiler-team#759 */)] +/// +/// Note that this function is a no-op during constant evaluation. +#[unstable(feature = "contracts_internals", issue = "128044")] +// Calls to this function get inserted by an AST expansion pass, which uses the equivalent of +// `#[allow_internal_unstable]` to allow using `contracts_internals` functions. Const-checking +// doesn't honor `#[allow_internal_unstable]`, so for the const feature gate we use the user-facing +// `contracts` feature rather than the perma-unstable `contracts_internals` +#[rustc_const_unstable(feature = "contracts", issue = "128044")] #[lang = "contract_check_requires"] #[rustc_intrinsic] -pub fn contract_check_requires bool>(cond: C) { - if contract_checks() && !cond() { - // Emit no unwind panic in case this was a safety requirement. - crate::panicking::panic_nounwind("failed requires check"); - } +pub const fn contract_check_requires bool + Copy>(cond: C) { + const_eval_select!( + @capture[C: Fn() -> bool + Copy] { cond: C } : + if const { + // Do nothing + } else { + if contract_checks() && !cond() { + // Emit no unwind panic in case this was a safety requirement. + crate::panicking::panic_nounwind("failed requires check"); + } + } + ) } /// Check if the post-condition `cond` has been met. /// /// By default, if `contract_checks` is enabled, this will panic with no unwind if the condition /// returns false. +/// +/// Note that this function is a no-op during constant evaluation. +#[cfg(not(bootstrap))] +#[unstable(feature = "contracts_internals", issue = "128044")] +// Similar to `contract_check_requires`, we need to use the user-facing +// `contracts` feature rather than the perma-unstable `contracts_internals`. +// Const-checking doesn't honor allow_internal_unstable logic used by contract expansion. +#[rustc_const_unstable(feature = "contracts", issue = "128044")] +#[lang = "contract_check_ensures"] +#[rustc_intrinsic] +pub const fn contract_check_ensures bool + Copy, Ret>(cond: C, ret: Ret) -> Ret { + const_eval_select!( + @capture[C: Fn(&Ret) -> bool + Copy, Ret] { cond: C, ret: Ret } -> Ret : + if const { + // Do nothing + ret + } else { + if contract_checks() && !cond(&ret) { + // Emit no unwind panic in case this was a safety requirement. + crate::panicking::panic_nounwind("failed ensures check"); + } + ret + } + ) +} + +/// This is the old version of contract_check_ensures kept here for bootstrap only. +#[cfg(bootstrap)] #[unstable(feature = "contracts_internals", issue = "128044" /* compiler-team#759 */)] #[rustc_intrinsic] pub fn contract_check_ensures<'a, Ret, C: Fn(&'a Ret) -> bool>(ret: &'a Ret, cond: C) { @@ -3723,7 +3719,7 @@ pub const fn ptr_metadata + ?Sized, M>(ptr: *const /// [`Vec::append`]: ../../std/vec/struct.Vec.html#method.append #[doc(alias = "memcpy")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_allowed_through_unstable_modules = "import this function via `std::mem` instead"] +#[rustc_allowed_through_unstable_modules = "import this function via `std::ptr` instead"] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces @@ -3826,7 +3822,7 @@ pub const unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: us /// ``` #[doc(alias = "memmove")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_allowed_through_unstable_modules = "import this function via `std::mem` instead"] +#[rustc_allowed_through_unstable_modules = "import this function via `std::ptr` instead"] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces @@ -3906,7 +3902,7 @@ pub const unsafe fn copy(src: *const T, dst: *mut T, count: usize) { /// ``` #[doc(alias = "memset")] #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_allowed_through_unstable_modules = "import this function via `std::mem` instead"] +#[rustc_allowed_through_unstable_modules = "import this function via `std::ptr` instead"] #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces diff --git a/library/core/src/intrinsics/simd.rs b/library/core/src/intrinsics/simd.rs index ae6e1a779ed58..86e3f8509ee29 100644 --- a/library/core/src/intrinsics/simd.rs +++ b/library/core/src/intrinsics/simd.rs @@ -4,7 +4,7 @@ /// Inserts an element into a vector, returning the updated vector. /// -/// `T` must be a vector with element type `U`. +/// `T` must be a vector with element type `U`, and `idx` must be `const`. /// /// # Safety /// @@ -15,15 +15,48 @@ pub const unsafe fn simd_insert(x: T, idx: u32, val: U) -> T; /// Extracts an element from a vector. /// -/// `T` must be a vector with element type `U`. +/// `T` must be a vector with element type `U`, and `idx` must be `const`. /// /// # Safety /// -/// `idx` must be in-bounds of the vector. +/// `idx` must be const and in-bounds of the vector. #[rustc_intrinsic] #[rustc_nounwind] pub const unsafe fn simd_extract(x: T, idx: u32) -> U; +/// Inserts an element into a vector, returning the updated vector. +/// +/// `T` must be a vector with element type `U`. +/// +/// If the index is `const`, [`simd_insert`] may emit better assembly. +/// +/// # Safety +/// +/// `idx` must be in-bounds of the vector. +#[rustc_nounwind] +#[cfg_attr(not(bootstrap), rustc_intrinsic)] +pub unsafe fn simd_insert_dyn(mut x: T, idx: u32, val: U) -> T { + // SAFETY: `idx` must be in-bounds + unsafe { (&raw mut x).cast::().add(idx as usize).write(val) } + x +} + +/// Extracts an element from a vector. +/// +/// `T` must be a vector with element type `U`. +/// +/// If the index is `const`, [`simd_extract`] may emit better assembly. +/// +/// # Safety +/// +/// `idx` must be in-bounds of the vector. +#[rustc_nounwind] +#[cfg_attr(not(bootstrap), rustc_intrinsic)] +pub unsafe fn simd_extract_dyn(x: T, idx: u32) -> U { + // SAFETY: `idx` must be in-bounds + unsafe { (&raw const x).cast::().add(idx as usize).read() } +} + /// Adds two simd vectors elementwise. /// /// `T` must be a vector of integers or floats. @@ -271,7 +304,7 @@ pub unsafe fn simd_shuffle(x: T, y: T, idx: U) -> V; /// /// `U` must be a vector of pointers to the element type of `T`, with the same length as `T`. /// -/// `V` must be a vector of signed integers with the same length as `T` (but any element size). +/// `V` must be a vector of integers with the same length as `T` (but any element size). /// /// For each pointer in `ptr`, if the corresponding value in `mask` is `!0`, read the pointer. /// Otherwise if the corresponding value in `mask` is `0`, return the corresponding value from @@ -292,7 +325,7 @@ pub unsafe fn simd_gather(val: T, ptr: U, mask: V) -> T; /// /// `U` must be a vector of pointers to the element type of `T`, with the same length as `T`. /// -/// `V` must be a vector of signed integers with the same length as `T` (but any element size). +/// `V` must be a vector of integers with the same length as `T` (but any element size). /// /// For each pointer in `ptr`, if the corresponding value in `mask` is `!0`, write the /// corresponding value in `val` to the pointer. @@ -316,7 +349,7 @@ pub unsafe fn simd_scatter(val: T, ptr: U, mask: V); /// /// `U` must be a pointer to the element type of `T` /// -/// `V` must be a vector of signed integers with the same length as `T` (but any element size). +/// `V` must be a vector of integers with the same length as `T` (but any element size). /// /// For each element, if the corresponding value in `mask` is `!0`, read the corresponding /// pointer offset from `ptr`. @@ -339,7 +372,7 @@ pub unsafe fn simd_masked_load(mask: V, ptr: U, val: T) -> T; /// /// `U` must be a pointer to the element type of `T` /// -/// `V` must be a vector of signed integers with the same length as `T` (but any element size). +/// `V` must be a vector of integers with the same length as `T` (but any element size). /// /// For each element, if the corresponding value in `mask` is `!0`, write the corresponding /// value in `val` to the pointer offset from `ptr`. @@ -523,7 +556,7 @@ pub unsafe fn simd_bitmask(x: T) -> U; /// /// `T` must be a vector. /// -/// `M` must be a signed integer vector with the same length as `T` (but any element size). +/// `M` must be an integer vector with the same length as `T` (but any element size). /// /// For each element, if the corresponding value in `mask` is `!0`, select the element from /// `if_true`. If the corresponding value in `mask` is `0`, select the element from @@ -544,11 +577,9 @@ pub unsafe fn simd_select(mask: M, if_true: T, if_false: T) -> T; /// For each element, if the bit in `mask` is `1`, select the element from /// `if_true`. If the corresponding bit in `mask` is `0`, select the element from /// `if_false`. +/// The remaining bits of the mask are ignored. /// /// The bitmask bit order matches `simd_bitmask`. -/// -/// # Safety -/// Padding bits must be all zero. #[rustc_intrinsic] #[rustc_nounwind] pub unsafe fn simd_select_bitmask(m: M, yes: T, no: T) -> T; diff --git a/library/core/src/iter/adapters/enumerate.rs b/library/core/src/iter/adapters/enumerate.rs index f9c388e8564d3..f7b9f0b7a5e9d 100644 --- a/library/core/src/iter/adapters/enumerate.rs +++ b/library/core/src/iter/adapters/enumerate.rs @@ -23,6 +23,39 @@ impl Enumerate { pub(in crate::iter) fn new(iter: I) -> Enumerate { Enumerate { iter, count: 0 } } + + /// Retrieve the current position of the iterator. + /// + /// If the iterator has not advanced, the position returned will be 0. + /// + /// The position may also exceed the bounds of the iterator to allow for calculating + /// the displacement of the iterator from following calls to [`Iterator::next`]. + /// + /// # Examples + /// + /// ``` + /// #![feature(next_index)] + /// + /// let arr = ['a', 'b']; + /// + /// let mut iter = arr.iter().enumerate(); + /// + /// assert_eq!(iter.next_index(), 0); + /// assert_eq!(iter.next(), Some((0, &'a'))); + /// + /// assert_eq!(iter.next_index(), 1); + /// assert_eq!(iter.next_index(), 1); + /// assert_eq!(iter.next(), Some((1, &'b'))); + /// + /// assert_eq!(iter.next_index(), 2); + /// assert_eq!(iter.next(), None); + /// assert_eq!(iter.next_index(), 2); + /// ``` + #[inline] + #[unstable(feature = "next_index", issue = "130711")] + pub fn next_index(&self) -> usize { + self.count + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -36,7 +69,7 @@ where /// /// The method does no guarding against overflows, so enumerating more than /// `usize::MAX` elements either produces the wrong result or panics. If - /// debug assertions are enabled, a panic is guaranteed. + /// overflow checks are enabled, a panic is guaranteed. /// /// # Panics /// diff --git a/library/core/src/iter/adapters/peekable.rs b/library/core/src/iter/adapters/peekable.rs index cc12cd9c35601..a6522659620a0 100644 --- a/library/core/src/iter/adapters/peekable.rs +++ b/library/core/src/iter/adapters/peekable.rs @@ -271,7 +271,7 @@ impl Peekable { /// assert_eq!(iter.next_if(|&x| x == 0), Some(0)); /// // The next item returned is now 1, so `next_if` will return `None`. /// assert_eq!(iter.next_if(|&x| x == 0), None); - /// // `next_if` saves the value of the next item if it was not equal to `expected`. + /// // `next_if` retains the next item if the predicate evaluates to `false` for it. /// assert_eq!(iter.next(), Some(1)); /// ``` /// @@ -304,9 +304,9 @@ impl Peekable { /// let mut iter = (0..5).peekable(); /// // The first item of the iterator is 0; consume it. /// assert_eq!(iter.next_if_eq(&0), Some(0)); - /// // The next item returned is now 1, so `next_if` will return `None`. + /// // The next item returned is now 1, so `next_if_eq` will return `None`. /// assert_eq!(iter.next_if_eq(&0), None); - /// // `next_if_eq` saves the value of the next item if it was not equal to `expected`. + /// // `next_if_eq` retains the next item if it was not equal to `expected`. /// assert_eq!(iter.next(), Some(1)); /// ``` #[stable(feature = "peekable_next_if", since = "1.51.0")] diff --git a/library/core/src/iter/traits/iterator.rs b/library/core/src/iter/traits/iterator.rs index 3bbb52fdbcb5f..d1e71f0e60f2a 100644 --- a/library/core/src/iter/traits/iterator.rs +++ b/library/core/src/iter/traits/iterator.rs @@ -56,12 +56,12 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// let mut iter = a.iter(); + /// let mut iter = a.into_iter(); /// /// // A call to next() returns the next value... - /// assert_eq!(Some(&1), iter.next()); - /// assert_eq!(Some(&2), iter.next()); - /// assert_eq!(Some(&3), iter.next()); + /// assert_eq!(Some(1), iter.next()); + /// assert_eq!(Some(2), iter.next()); + /// assert_eq!(Some(3), iter.next()); /// /// // ... and then None once it's over. /// assert_eq!(None, iter.next()); @@ -199,7 +199,7 @@ pub trait Iterator { /// /// The method does no guarding against overflows, so counting elements of /// an iterator with more than [`usize::MAX`] elements either produces the - /// wrong result or panics. If debug assertions are enabled, a panic is + /// wrong result or panics. If overflow checks are enabled, a panic is /// guaranteed. /// /// # Panics @@ -239,10 +239,10 @@ pub trait Iterator { /// /// ``` /// let a = [1, 2, 3]; - /// assert_eq!(a.iter().last(), Some(&3)); + /// assert_eq!(a.into_iter().last(), Some(3)); /// /// let a = [1, 2, 3, 4, 5]; - /// assert_eq!(a.iter().last(), Some(&5)); + /// assert_eq!(a.into_iter().last(), Some(5)); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] @@ -284,12 +284,12 @@ pub trait Iterator { /// use std::num::NonZero; /// /// let a = [1, 2, 3, 4]; - /// let mut iter = a.iter(); + /// let mut iter = a.into_iter(); /// /// assert_eq!(iter.advance_by(2), Ok(())); - /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), Some(3)); /// assert_eq!(iter.advance_by(0), Ok(())); - /// assert_eq!(iter.advance_by(100), Err(NonZero::new(99).unwrap())); // only `&4` was skipped + /// assert_eq!(iter.advance_by(100), Err(NonZero::new(99).unwrap())); // only `4` was skipped /// ``` #[inline] #[unstable(feature = "iter_advance_by", reason = "recently added", issue = "77404")] @@ -322,7 +322,7 @@ pub trait Iterator { /// /// ``` /// let a = [1, 2, 3]; - /// assert_eq!(a.iter().nth(1), Some(&2)); + /// assert_eq!(a.into_iter().nth(1), Some(2)); /// ``` /// /// Calling `nth()` multiple times doesn't rewind the iterator: @@ -330,9 +330,9 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// let mut iter = a.iter(); + /// let mut iter = a.into_iter(); /// - /// assert_eq!(iter.nth(1), Some(&2)); + /// assert_eq!(iter.nth(1), Some(2)); /// assert_eq!(iter.nth(1), None); /// ``` /// @@ -340,7 +340,7 @@ pub trait Iterator { /// /// ``` /// let a = [1, 2, 3]; - /// assert_eq!(a.iter().nth(10), None); + /// assert_eq!(a.into_iter().nth(10), None); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] @@ -385,11 +385,11 @@ pub trait Iterator { /// /// ``` /// let a = [0, 1, 2, 3, 4, 5]; - /// let mut iter = a.iter().step_by(2); + /// let mut iter = a.into_iter().step_by(2); /// - /// assert_eq!(iter.next(), Some(&0)); - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), Some(&4)); + /// assert_eq!(iter.next(), Some(0)); + /// assert_eq!(iter.next(), Some(2)); + /// assert_eq!(iter.next(), Some(4)); /// assert_eq!(iter.next(), None); /// ``` #[inline] @@ -417,37 +417,37 @@ pub trait Iterator { /// Basic usage: /// /// ``` - /// let a1 = [1, 2, 3]; - /// let a2 = [4, 5, 6]; + /// let s1 = "abc".chars(); + /// let s2 = "def".chars(); /// - /// let mut iter = a1.iter().chain(a2.iter()); + /// let mut iter = s1.chain(s2); /// - /// assert_eq!(iter.next(), Some(&1)); - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), Some(&3)); - /// assert_eq!(iter.next(), Some(&4)); - /// assert_eq!(iter.next(), Some(&5)); - /// assert_eq!(iter.next(), Some(&6)); + /// assert_eq!(iter.next(), Some('a')); + /// assert_eq!(iter.next(), Some('b')); + /// assert_eq!(iter.next(), Some('c')); + /// assert_eq!(iter.next(), Some('d')); + /// assert_eq!(iter.next(), Some('e')); + /// assert_eq!(iter.next(), Some('f')); /// assert_eq!(iter.next(), None); /// ``` /// /// Since the argument to `chain()` uses [`IntoIterator`], we can pass /// anything that can be converted into an [`Iterator`], not just an - /// [`Iterator`] itself. For example, slices (`&[T]`) implement + /// [`Iterator`] itself. For example, arrays (`[T]`) implement /// [`IntoIterator`], and so can be passed to `chain()` directly: /// /// ``` - /// let s1 = &[1, 2, 3]; - /// let s2 = &[4, 5, 6]; + /// let a1 = [1, 2, 3]; + /// let a2 = [4, 5, 6]; /// - /// let mut iter = s1.iter().chain(s2); + /// let mut iter = a1.into_iter().chain(a2); /// - /// assert_eq!(iter.next(), Some(&1)); - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), Some(&3)); - /// assert_eq!(iter.next(), Some(&4)); - /// assert_eq!(iter.next(), Some(&5)); - /// assert_eq!(iter.next(), Some(&6)); + /// assert_eq!(iter.next(), Some(1)); + /// assert_eq!(iter.next(), Some(2)); + /// assert_eq!(iter.next(), Some(3)); + /// assert_eq!(iter.next(), Some(4)); + /// assert_eq!(iter.next(), Some(5)); + /// assert_eq!(iter.next(), Some(6)); /// assert_eq!(iter.next(), None); /// ``` /// @@ -496,31 +496,31 @@ pub trait Iterator { /// Basic usage: /// /// ``` - /// let a1 = [1, 2, 3]; - /// let a2 = [4, 5, 6]; + /// let s1 = "abc".chars(); + /// let s2 = "def".chars(); /// - /// let mut iter = a1.iter().zip(a2.iter()); + /// let mut iter = s1.zip(s2); /// - /// assert_eq!(iter.next(), Some((&1, &4))); - /// assert_eq!(iter.next(), Some((&2, &5))); - /// assert_eq!(iter.next(), Some((&3, &6))); + /// assert_eq!(iter.next(), Some(('a', 'd'))); + /// assert_eq!(iter.next(), Some(('b', 'e'))); + /// assert_eq!(iter.next(), Some(('c', 'f'))); /// assert_eq!(iter.next(), None); /// ``` /// /// Since the argument to `zip()` uses [`IntoIterator`], we can pass /// anything that can be converted into an [`Iterator`], not just an - /// [`Iterator`] itself. For example, slices (`&[T]`) implement + /// [`Iterator`] itself. For example, arrays (`[T]`) implement /// [`IntoIterator`], and so can be passed to `zip()` directly: /// /// ``` - /// let s1 = &[1, 2, 3]; - /// let s2 = &[4, 5, 6]; + /// let a1 = [1, 2, 3]; + /// let a2 = [4, 5, 6]; /// - /// let mut iter = s1.iter().zip(s2); + /// let mut iter = a1.into_iter().zip(a2); /// - /// assert_eq!(iter.next(), Some((&1, &4))); - /// assert_eq!(iter.next(), Some((&2, &5))); - /// assert_eq!(iter.next(), Some((&3, &6))); + /// assert_eq!(iter.next(), Some((1, 4))); + /// assert_eq!(iter.next(), Some((2, 5))); + /// assert_eq!(iter.next(), Some((3, 6))); /// assert_eq!(iter.next(), None); /// ``` /// @@ -604,12 +604,12 @@ pub trait Iterator { /// ``` /// #![feature(iter_intersperse)] /// - /// let mut a = [0, 1, 2].iter().intersperse(&100); - /// assert_eq!(a.next(), Some(&0)); // The first element from `a`. - /// assert_eq!(a.next(), Some(&100)); // The separator. - /// assert_eq!(a.next(), Some(&1)); // The next element from `a`. - /// assert_eq!(a.next(), Some(&100)); // The separator. - /// assert_eq!(a.next(), Some(&2)); // The last element from `a`. + /// let mut a = [0, 1, 2].into_iter().intersperse(100); + /// assert_eq!(a.next(), Some(0)); // The first element from `a`. + /// assert_eq!(a.next(), Some(100)); // The separator. + /// assert_eq!(a.next(), Some(1)); // The next element from `a`. + /// assert_eq!(a.next(), Some(100)); // The separator. + /// assert_eq!(a.next(), Some(2)); // The last element from `a`. /// assert_eq!(a.next(), None); // The iterator is finished. /// ``` /// @@ -617,7 +617,8 @@ pub trait Iterator { /// ``` /// #![feature(iter_intersperse)] /// - /// let hello = ["Hello", "World", "!"].iter().copied().intersperse(" ").collect::(); + /// let words = ["Hello", "World", "!"]; + /// let hello: String = words.into_iter().intersperse(" ").collect(); /// assert_eq!(hello, "Hello World !"); /// ``` /// @@ -673,7 +674,7 @@ pub trait Iterator { /// let src = ["Hello", "to", "all", "people", "!!"].iter().copied(); /// /// // The closure mutably borrows its context to generate an item. - /// let mut happy_emojis = [" ❤️ ", " 😀 "].iter().copied(); + /// let mut happy_emojis = [" ❤️ ", " 😀 "].into_iter(); /// let separator = || happy_emojis.next().unwrap_or(" 🦀 "); /// /// let result = src.intersperse_with(separator).collect::(); @@ -734,7 +735,7 @@ pub trait Iterator { /// /// // it won't even execute, as it is lazy. Rust will warn you about this. /// - /// // Instead, use for: + /// // Instead, use a for-loop: /// for x in 0..5 { /// println!("{x}"); /// } @@ -814,10 +815,10 @@ pub trait Iterator { /// ``` /// let a = [0i32, 1, 2]; /// - /// let mut iter = a.iter().filter(|x| x.is_positive()); + /// let mut iter = a.into_iter().filter(|x| x.is_positive()); /// - /// assert_eq!(iter.next(), Some(&1)); - /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), Some(1)); + /// assert_eq!(iter.next(), Some(2)); /// assert_eq!(iter.next(), None); /// ``` /// @@ -826,21 +827,20 @@ pub trait Iterator { /// situation, where the type of the closure is a double reference: /// /// ``` - /// let a = [0, 1, 2]; + /// let s = &[0, 1, 2]; /// - /// let mut iter = a.iter().filter(|x| **x > 1); // need two *s! + /// let mut iter = s.iter().filter(|x| **x > 1); // needs two *s! /// /// assert_eq!(iter.next(), Some(&2)); /// assert_eq!(iter.next(), None); /// ``` /// - /// It's common to instead use destructuring on the argument to strip away - /// one: + /// It's common to instead use destructuring on the argument to strip away one: /// /// ``` - /// let a = [0, 1, 2]; + /// let s = &[0, 1, 2]; /// - /// let mut iter = a.iter().filter(|&x| *x > 1); // both & and * + /// let mut iter = s.iter().filter(|&x| *x > 1); // both & and * /// /// assert_eq!(iter.next(), Some(&2)); /// assert_eq!(iter.next(), None); @@ -849,9 +849,9 @@ pub trait Iterator { /// or both: /// /// ``` - /// let a = [0, 1, 2]; + /// let s = &[0, 1, 2]; /// - /// let mut iter = a.iter().filter(|&&x| x > 1); // two &s + /// let mut iter = s.iter().filter(|&&x| x > 1); // two &s /// /// assert_eq!(iter.next(), Some(&2)); /// assert_eq!(iter.next(), None); @@ -931,7 +931,7 @@ pub trait Iterator { /// /// The method does no guarding against overflows, so enumerating more than /// [`usize::MAX`] elements either produces the wrong result or panics. If - /// debug assertions are enabled, a panic is guaranteed. + /// overflow checks are enabled, a panic is guaranteed. /// /// # Panics /// @@ -945,11 +945,11 @@ pub trait Iterator { /// ``` /// let a = ['a', 'b', 'c']; /// - /// let mut iter = a.iter().enumerate(); + /// let mut iter = a.into_iter().enumerate(); /// - /// assert_eq!(iter.next(), Some((0, &'a'))); - /// assert_eq!(iter.next(), Some((1, &'b'))); - /// assert_eq!(iter.next(), Some((2, &'c'))); + /// assert_eq!(iter.next(), Some((0, 'a'))); + /// assert_eq!(iter.next(), Some((1, 'b'))); + /// assert_eq!(iter.next(), Some((2, 'c'))); /// assert_eq!(iter.next(), None); /// ``` #[inline] @@ -980,19 +980,19 @@ pub trait Iterator { /// ``` /// let xs = [1, 2, 3]; /// - /// let mut iter = xs.iter().peekable(); + /// let mut iter = xs.into_iter().peekable(); /// /// // peek() lets us see into the future - /// assert_eq!(iter.peek(), Some(&&1)); - /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.peek(), Some(&1)); + /// assert_eq!(iter.next(), Some(1)); /// - /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), Some(2)); /// /// // we can peek() multiple times, the iterator won't advance - /// assert_eq!(iter.peek(), Some(&&3)); - /// assert_eq!(iter.peek(), Some(&&3)); + /// assert_eq!(iter.peek(), Some(&3)); + /// assert_eq!(iter.peek(), Some(&3)); /// - /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), Some(3)); /// /// // after the iterator is finished, so is peek() /// assert_eq!(iter.peek(), None); @@ -1005,21 +1005,21 @@ pub trait Iterator { /// ``` /// let xs = [1, 2, 3]; /// - /// let mut iter = xs.iter().peekable(); + /// let mut iter = xs.into_iter().peekable(); /// /// // `peek_mut()` lets us see into the future - /// assert_eq!(iter.peek_mut(), Some(&mut &1)); - /// assert_eq!(iter.peek_mut(), Some(&mut &1)); - /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.peek_mut(), Some(&mut 1)); + /// assert_eq!(iter.peek_mut(), Some(&mut 1)); + /// assert_eq!(iter.next(), Some(1)); /// - /// if let Some(mut p) = iter.peek_mut() { - /// assert_eq!(*p, &2); + /// if let Some(p) = iter.peek_mut() { + /// assert_eq!(*p, 2); /// // put a value into the iterator - /// *p = &1000; + /// *p = 1000; /// } /// /// // The value reappears as the iterator continues - /// assert_eq!(iter.collect::>(), vec![&1000, &3]); + /// assert_eq!(iter.collect::>(), vec![1000, 3]); /// ``` /// [`peek`]: Peekable::peek /// [`peek_mut`]: Peekable::peek_mut @@ -1051,10 +1051,10 @@ pub trait Iterator { /// ``` /// let a = [-1i32, 0, 1]; /// - /// let mut iter = a.iter().skip_while(|x| x.is_negative()); + /// let mut iter = a.into_iter().skip_while(|x| x.is_negative()); /// - /// assert_eq!(iter.next(), Some(&0)); - /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.next(), Some(0)); + /// assert_eq!(iter.next(), Some(1)); /// assert_eq!(iter.next(), None); /// ``` /// @@ -1063,9 +1063,9 @@ pub trait Iterator { /// situation, where the type of the closure argument is a double reference: /// /// ``` - /// let a = [-1, 0, 1]; + /// let s = &[-1, 0, 1]; /// - /// let mut iter = a.iter().skip_while(|x| **x < 0); // need two *s! + /// let mut iter = s.iter().skip_while(|x| **x < 0); // need two *s! /// /// assert_eq!(iter.next(), Some(&0)); /// assert_eq!(iter.next(), Some(&1)); @@ -1077,14 +1077,14 @@ pub trait Iterator { /// ``` /// let a = [-1, 0, 1, -2]; /// - /// let mut iter = a.iter().skip_while(|x| **x < 0); + /// let mut iter = a.into_iter().skip_while(|&x| x < 0); /// - /// assert_eq!(iter.next(), Some(&0)); - /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.next(), Some(0)); + /// assert_eq!(iter.next(), Some(1)); /// /// // while this would have been false, since we already got a false, /// // skip_while() isn't used any more - /// assert_eq!(iter.next(), Some(&-2)); + /// assert_eq!(iter.next(), Some(-2)); /// /// assert_eq!(iter.next(), None); /// ``` @@ -1115,9 +1115,9 @@ pub trait Iterator { /// ``` /// let a = [-1i32, 0, 1]; /// - /// let mut iter = a.iter().take_while(|x| x.is_negative()); + /// let mut iter = a.into_iter().take_while(|x| x.is_negative()); /// - /// assert_eq!(iter.next(), Some(&-1)); + /// assert_eq!(iter.next(), Some(-1)); /// assert_eq!(iter.next(), None); /// ``` /// @@ -1126,9 +1126,9 @@ pub trait Iterator { /// situation, where the type of the closure is a double reference: /// /// ``` - /// let a = [-1, 0, 1]; + /// let s = &[-1, 0, 1]; /// - /// let mut iter = a.iter().take_while(|x| **x < 0); // need two *s! + /// let mut iter = s.iter().take_while(|x| **x < 0); // need two *s! /// /// assert_eq!(iter.next(), Some(&-1)); /// assert_eq!(iter.next(), None); @@ -1139,12 +1139,12 @@ pub trait Iterator { /// ``` /// let a = [-1, 0, 1, -2]; /// - /// let mut iter = a.iter().take_while(|x| **x < 0); + /// let mut iter = a.into_iter().take_while(|&x| x < 0); /// - /// assert_eq!(iter.next(), Some(&-1)); + /// assert_eq!(iter.next(), Some(-1)); /// /// // We have more elements that are less than zero, but since we already - /// // got a false, take_while() isn't used any more + /// // got a false, take_while() ignores the remaining elements. /// assert_eq!(iter.next(), None); /// ``` /// @@ -1154,18 +1154,15 @@ pub trait Iterator { /// /// ``` /// let a = [1, 2, 3, 4]; - /// let mut iter = a.iter(); + /// let mut iter = a.into_iter(); /// - /// let result: Vec = iter.by_ref() - /// .take_while(|n| **n != 3) - /// .cloned() - /// .collect(); + /// let result: Vec = iter.by_ref().take_while(|&n| n != 3).collect(); /// - /// assert_eq!(result, &[1, 2]); + /// assert_eq!(result, [1, 2]); /// - /// let result: Vec = iter.cloned().collect(); + /// let result: Vec = iter.collect(); /// - /// assert_eq!(result, &[4]); + /// assert_eq!(result, [4]); /// ``` /// /// The `3` is no longer there, because it was consumed in order to see if @@ -1193,7 +1190,7 @@ pub trait Iterator { /// ``` /// let a = [-1i32, 4, 0, 1]; /// - /// let mut iter = a.iter().map_while(|x| 16i32.checked_div(*x)); + /// let mut iter = a.into_iter().map_while(|x| 16i32.checked_div(x)); /// /// assert_eq!(iter.next(), Some(-16)); /// assert_eq!(iter.next(), Some(4)); @@ -1208,8 +1205,8 @@ pub trait Iterator { /// ``` /// let a = [-1i32, 4, 0, 1]; /// - /// let mut iter = a.iter() - /// .map(|x| 16i32.checked_div(*x)) + /// let mut iter = a.into_iter() + /// .map(|x| 16i32.checked_div(x)) /// .take_while(|x| x.is_some()) /// .map(|x| x.unwrap()); /// @@ -1223,12 +1220,12 @@ pub trait Iterator { /// ``` /// let a = [0, 1, 2, -3, 4, 5, -6]; /// - /// let iter = a.iter().map_while(|x| u32::try_from(*x).ok()); - /// let vec = iter.collect::>(); + /// let iter = a.into_iter().map_while(|x| u32::try_from(x).ok()); + /// let vec: Vec<_> = iter.collect(); /// - /// // We have more elements which could fit in u32 (4, 5), but `map_while` returned `None` for `-3` + /// // We have more elements that could fit in u32 (such as 4, 5), but `map_while` returned `None` for `-3` /// // (as the `predicate` returned `None`) and `collect` stops at the first `None` encountered. - /// assert_eq!(vec, vec![0, 1, 2]); + /// assert_eq!(vec, [0, 1, 2]); /// ``` /// /// Because `map_while()` needs to look at the value in order to see if it @@ -1237,17 +1234,17 @@ pub trait Iterator { /// /// ``` /// let a = [1, 2, -3, 4]; - /// let mut iter = a.iter(); + /// let mut iter = a.into_iter(); /// /// let result: Vec = iter.by_ref() - /// .map_while(|n| u32::try_from(*n).ok()) + /// .map_while(|n| u32::try_from(n).ok()) /// .collect(); /// - /// assert_eq!(result, &[1, 2]); + /// assert_eq!(result, [1, 2]); /// - /// let result: Vec = iter.cloned().collect(); + /// let result: Vec = iter.collect(); /// - /// assert_eq!(result, &[4]); + /// assert_eq!(result, [4]); /// ``` /// /// The `-3` is no longer there, because it was consumed in order to see if @@ -1255,7 +1252,7 @@ pub trait Iterator { /// /// Note that unlike [`take_while`] this iterator is **not** fused. /// It is also not specified what this iterator returns after the first [`None`] is returned. - /// If you need fused iterator, use [`fuse`]. + /// If you need a fused iterator, use [`fuse`]. /// /// [`fuse`]: Iterator::fuse #[inline] @@ -1282,9 +1279,9 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// let mut iter = a.iter().skip(2); + /// let mut iter = a.into_iter().skip(2); /// - /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), Some(3)); /// assert_eq!(iter.next(), None); /// ``` #[inline] @@ -1312,10 +1309,10 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// let mut iter = a.iter().take(2); + /// let mut iter = a.into_iter().take(2); /// - /// assert_eq!(iter.next(), Some(&1)); - /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), Some(1)); + /// assert_eq!(iter.next(), Some(2)); /// assert_eq!(iter.next(), None); /// ``` /// @@ -1340,6 +1337,25 @@ pub trait Iterator { /// assert_eq!(iter.next(), Some(2)); /// assert_eq!(iter.next(), None); /// ``` + /// + /// Use [`by_ref`] to take from the iterator without consuming it, and then + /// continue using the original iterator: + /// + /// ``` + /// let mut words = ["hello", "world", "of", "Rust"].into_iter(); + /// + /// // Take the first two words. + /// let hello_world: Vec<_> = words.by_ref().take(2).collect(); + /// assert_eq!(hello_world, vec!["hello", "world"]); + /// + /// // Collect the rest of the words. + /// // We can only do this because we used `by_ref` earlier. + /// let of_rust: Vec<_> = words.collect(); + /// assert_eq!(of_rust, vec!["of", "Rust"]); + /// ``` + /// + /// [`by_ref`]: Iterator::by_ref + #[doc(alias = "limit")] #[inline] #[stable(feature = "rust1", since = "1.0.0")] fn take(self, n: usize) -> Take @@ -1370,7 +1386,7 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3, 4]; /// - /// let mut iter = a.iter().scan(1, |state, &x| { + /// let mut iter = a.into_iter().scan(1, |state, x| { /// // each iteration, we'll multiply the state by the element ... /// *state = *state * x; /// @@ -1448,8 +1464,8 @@ pub trait Iterator { /// /// ``` /// let data = vec![vec![1, 2, 3, 4], vec![5, 6]]; - /// let flattened = data.into_iter().flatten().collect::>(); - /// assert_eq!(flattened, &[1, 2, 3, 4, 5, 6]); + /// let flattened: Vec<_> = data.into_iter().flatten().collect(); + /// assert_eq!(flattened, [1, 2, 3, 4, 5, 6]); /// ``` /// /// Mapping and then flattening: @@ -1483,11 +1499,11 @@ pub trait Iterator { /// ``` /// let options = vec![Some(123), Some(321), None, Some(231)]; /// let flattened_options: Vec<_> = options.into_iter().flatten().collect(); - /// assert_eq!(flattened_options, vec![123, 321, 231]); + /// assert_eq!(flattened_options, [123, 321, 231]); /// /// let results = vec![Ok(123), Ok(321), Err(456), Ok(231)]; /// let flattened_results: Vec<_> = results.into_iter().flatten().collect(); - /// assert_eq!(flattened_results, vec![123, 321, 231]); + /// assert_eq!(flattened_results, [123, 321, 231]); /// ``` /// /// Flattening only removes one level of nesting at a time: @@ -1495,11 +1511,11 @@ pub trait Iterator { /// ``` /// let d3 = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]; /// - /// let d2 = d3.iter().flatten().collect::>(); - /// assert_eq!(d2, [&[1, 2], &[3, 4], &[5, 6], &[7, 8]]); + /// let d2: Vec<_> = d3.into_iter().flatten().collect(); + /// assert_eq!(d2, [[1, 2], [3, 4], [5, 6], [7, 8]]); /// - /// let d1 = d3.iter().flatten().flatten().collect::>(); - /// assert_eq!(d1, [&1, &2, &3, &4, &5, &6, &7, &8]); + /// let d1: Vec<_> = d3.into_iter().flatten().flatten().collect(); + /// assert_eq!(d1, [1, 2, 3, 4, 5, 6, 7, 8]); /// ``` /// /// Here we see that `flatten()` does not perform a "deep" flatten. @@ -1886,7 +1902,7 @@ pub trait Iterator { /// let a = [1, 2, 3]; /// /// let doubled: Vec = a.iter() - /// .map(|&x| x * 2) + /// .map(|x| x * 2) /// .collect(); /// /// assert_eq!(vec![2, 4, 6], doubled); @@ -1902,7 +1918,7 @@ pub trait Iterator { /// /// let a = [1, 2, 3]; /// - /// let doubled: VecDeque = a.iter().map(|&x| x * 2).collect(); + /// let doubled: VecDeque = a.iter().map(|x| x * 2).collect(); /// /// assert_eq!(2, doubled[0]); /// assert_eq!(4, doubled[1]); @@ -1935,8 +1951,8 @@ pub trait Iterator { /// ``` /// let chars = ['g', 'd', 'k', 'k', 'n']; /// - /// let hello: String = chars.iter() - /// .map(|&x| x as u8) + /// let hello: String = chars.into_iter() + /// .map(|x| x as u8) /// .map(|x| (x + 1) as char) /// .collect(); /// @@ -1949,14 +1965,14 @@ pub trait Iterator { /// ``` /// let results = [Ok(1), Err("nope"), Ok(3), Err("bad")]; /// - /// let result: Result, &str> = results.iter().cloned().collect(); + /// let result: Result, &str> = results.into_iter().collect(); /// /// // gives us the first error /// assert_eq!(Err("nope"), result); /// /// let results = [Ok(1), Ok(3)]; /// - /// let result: Result, &str> = results.iter().cloned().collect(); + /// let result: Result, &str> = results.into_iter().collect(); /// /// // gives us the list of answers /// assert_eq!(Ok(vec![1, 3]), result); @@ -2087,8 +2103,8 @@ pub trait Iterator { /// let a = [1, 2, 3]; /// let mut vec: Vec:: = vec![0, 1]; /// - /// a.iter().map(|&x| x * 2).collect_into(&mut vec); - /// a.iter().map(|&x| x * 10).collect_into(&mut vec); + /// a.iter().map(|x| x * 2).collect_into(&mut vec); + /// a.iter().map(|x| x * 10).collect_into(&mut vec); /// /// assert_eq!(vec, vec![0, 1, 2, 4, 6, 10, 20, 30]); /// ``` @@ -2101,8 +2117,8 @@ pub trait Iterator { /// let a = [1, 2, 3]; /// let mut vec: Vec:: = Vec::with_capacity(6); /// - /// a.iter().map(|&x| x * 2).collect_into(&mut vec); - /// a.iter().map(|&x| x * 10).collect_into(&mut vec); + /// a.iter().map(|x| x * 2).collect_into(&mut vec); + /// a.iter().map(|x| x * 10).collect_into(&mut vec); /// /// assert_eq!(6, vec.capacity()); /// assert_eq!(vec, vec![2, 4, 6, 10, 20, 30]); @@ -2156,8 +2172,8 @@ pub trait Iterator { /// .into_iter() /// .partition(|n| n % 2 == 0); /// - /// assert_eq!(even, vec![2]); - /// assert_eq!(odd, vec![1, 3]); + /// assert_eq!(even, [2]); + /// assert_eq!(odd, [1, 3]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn partition(self, f: F) -> (B, B) @@ -2215,11 +2231,11 @@ pub trait Iterator { /// let mut a = [1, 2, 3, 4, 5, 6, 7]; /// /// // Partition in-place between evens and odds - /// let i = a.iter_mut().partition_in_place(|&n| n % 2 == 0); + /// let i = a.iter_mut().partition_in_place(|n| n % 2 == 0); /// /// assert_eq!(i, 3); - /// assert!(a[..i].iter().all(|&n| n % 2 == 0)); // evens - /// assert!(a[i..].iter().all(|&n| n % 2 == 1)); // odds + /// assert!(a[..i].iter().all(|n| n % 2 == 0)); // evens + /// assert!(a[i..].iter().all(|n| n % 2 == 1)); // odds /// ``` #[unstable(feature = "iter_partition_in_place", reason = "new API", issue = "62543")] fn partition_in_place<'a, T: 'a, P>(mut self, ref mut predicate: P) -> usize @@ -2326,7 +2342,7 @@ pub trait Iterator { /// let a = [1, 2, 3]; /// /// // the checked sum of all of the elements of the array - /// let sum = a.iter().try_fold(0i8, |acc, &x| acc.checked_add(x)); + /// let sum = a.into_iter().try_fold(0i8, |acc, x| acc.checked_add(x)); /// /// assert_eq!(sum, Some(6)); /// ``` @@ -2335,16 +2351,16 @@ pub trait Iterator { /// /// ``` /// let a = [10, 20, 30, 100, 40, 50]; - /// let mut it = a.iter(); + /// let mut iter = a.into_iter(); /// /// // This sum overflows when adding the 100 element - /// let sum = it.try_fold(0i8, |acc, &x| acc.checked_add(x)); + /// let sum = iter.try_fold(0i8, |acc, x| acc.checked_add(x)); /// assert_eq!(sum, None); /// /// // Because it short-circuited, the remaining elements are still /// // available through the iterator. - /// assert_eq!(it.len(), 2); - /// assert_eq!(it.next(), Some(&40)); + /// assert_eq!(iter.len(), 2); + /// assert_eq!(iter.next(), Some(40)); /// ``` /// /// While you cannot `break` from a closure, the [`ControlFlow`] type allows @@ -2697,9 +2713,9 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// assert!(a.iter().all(|&x| x > 0)); + /// assert!(a.into_iter().all(|x| x > 0)); /// - /// assert!(!a.iter().all(|&x| x > 2)); + /// assert!(!a.into_iter().all(|x| x > 2)); /// ``` /// /// Stopping at the first `false`: @@ -2707,12 +2723,12 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// let mut iter = a.iter(); + /// let mut iter = a.into_iter(); /// - /// assert!(!iter.all(|&x| x != 2)); + /// assert!(!iter.all(|x| x != 2)); /// /// // we can still use `iter`, as there are more elements. - /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), Some(3)); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] @@ -2750,9 +2766,9 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// assert!(a.iter().any(|&x| x > 0)); + /// assert!(a.into_iter().any(|x| x > 0)); /// - /// assert!(!a.iter().any(|&x| x > 5)); + /// assert!(!a.into_iter().any(|x| x > 5)); /// ``` /// /// Stopping at the first `true`: @@ -2760,12 +2776,12 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// let mut iter = a.iter(); + /// let mut iter = a.into_iter(); /// - /// assert!(iter.any(|&x| x != 2)); + /// assert!(iter.any(|x| x != 2)); /// /// // we can still use `iter`, as there are more elements. - /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), Some(2)); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] @@ -2811,9 +2827,8 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// assert_eq!(a.iter().find(|&&x| x == 2), Some(&2)); - /// - /// assert_eq!(a.iter().find(|&&x| x == 5), None); + /// assert_eq!(a.into_iter().find(|&x| x == 2), Some(2)); + /// assert_eq!(a.into_iter().find(|&x| x == 5), None); /// ``` /// /// Stopping at the first `true`: @@ -2821,12 +2836,12 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// let mut iter = a.iter(); + /// let mut iter = a.into_iter(); /// - /// assert_eq!(iter.find(|&&x| x == 2), Some(&2)); + /// assert_eq!(iter.find(|&x| x == 2), Some(2)); /// /// // we can still use `iter`, as there are more elements. - /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), Some(3)); /// ``` /// /// Note that `iter.find(f)` is equivalent to `iter.filter(f).next()`. @@ -2894,13 +2909,13 @@ pub trait Iterator { /// let a = ["1", "2", "lol", "NaN", "5"]; /// /// let is_my_num = |s: &str, search: i32| -> Result { - /// Ok(s.parse::()? == search) + /// Ok(s.parse::()? == search) /// }; /// - /// let result = a.iter().try_find(|&&s| is_my_num(s, 2)); - /// assert_eq!(result, Ok(Some(&"2"))); + /// let result = a.into_iter().try_find(|&s| is_my_num(s, 2)); + /// assert_eq!(result, Ok(Some("2"))); /// - /// let result = a.iter().try_find(|&&s| is_my_num(s, 5)); + /// let result = a.into_iter().try_find(|&s| is_my_num(s, 5)); /// assert!(result.is_err()); /// ``` /// @@ -2912,11 +2927,11 @@ pub trait Iterator { /// use std::num::NonZero; /// /// let a = [3, 5, 7, 4, 9, 0, 11u32]; - /// let result = a.iter().try_find(|&&x| NonZero::new(x).map(|y| y.is_power_of_two())); - /// assert_eq!(result, Some(Some(&4))); - /// let result = a.iter().take(3).try_find(|&&x| NonZero::new(x).map(|y| y.is_power_of_two())); + /// let result = a.into_iter().try_find(|&x| NonZero::new(x).map(|y| y.is_power_of_two())); + /// assert_eq!(result, Some(Some(4))); + /// let result = a.into_iter().take(3).try_find(|&x| NonZero::new(x).map(|y| y.is_power_of_two())); /// assert_eq!(result, Some(None)); - /// let result = a.iter().rev().try_find(|&&x| NonZero::new(x).map(|y| y.is_power_of_two())); + /// let result = a.into_iter().rev().try_find(|&x| NonZero::new(x).map(|y| y.is_power_of_two())); /// assert_eq!(result, None); /// ``` #[inline] @@ -2964,7 +2979,7 @@ pub trait Iterator { /// /// The method does no guarding against overflows, so if there are more /// than [`usize::MAX`] non-matching elements, it either produces the wrong - /// result or panics. If debug assertions are enabled, a panic is + /// result or panics. If overflow checks are enabled, a panic is /// guaranteed. /// /// # Panics @@ -2981,9 +2996,9 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// assert_eq!(a.iter().position(|&x| x == 2), Some(1)); + /// assert_eq!(a.into_iter().position(|x| x == 2), Some(1)); /// - /// assert_eq!(a.iter().position(|&x| x == 5), None); + /// assert_eq!(a.into_iter().position(|x| x == 5), None); /// ``` /// /// Stopping at the first `true`: @@ -2991,15 +3006,15 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3, 4]; /// - /// let mut iter = a.iter(); + /// let mut iter = a.into_iter(); /// - /// assert_eq!(iter.position(|&x| x >= 2), Some(1)); + /// assert_eq!(iter.position(|x| x >= 2), Some(1)); /// /// // we can still use `iter`, as there are more elements. - /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), Some(3)); /// /// // The returned index depends on iterator state - /// assert_eq!(iter.position(|&x| x == 4), Some(0)); + /// assert_eq!(iter.position(|x| x == 4), Some(0)); /// /// ``` #[inline] @@ -3049,9 +3064,9 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// assert_eq!(a.iter().rposition(|&x| x == 3), Some(2)); + /// assert_eq!(a.into_iter().rposition(|x| x == 3), Some(2)); /// - /// assert_eq!(a.iter().rposition(|&x| x == 5), None); + /// assert_eq!(a.into_iter().rposition(|x| x == 5), None); /// ``` /// /// Stopping at the first `true`: @@ -3059,13 +3074,13 @@ pub trait Iterator { /// ``` /// let a = [-1, 2, 3, 4]; /// - /// let mut iter = a.iter(); + /// let mut iter = a.into_iter(); /// - /// assert_eq!(iter.rposition(|&x| x >= 2), Some(3)); + /// assert_eq!(iter.rposition(|x| x >= 2), Some(3)); /// /// // we can still use `iter`, as there are more elements. - /// assert_eq!(iter.next(), Some(&-1)); - /// assert_eq!(iter.next_back(), Some(&3)); + /// assert_eq!(iter.next(), Some(-1)); + /// assert_eq!(iter.next_back(), Some(3)); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] @@ -3111,10 +3126,10 @@ pub trait Iterator { /// /// ``` /// let a = [1, 2, 3]; - /// let b: Vec = Vec::new(); + /// let b: [u32; 0] = []; /// - /// assert_eq!(a.iter().max(), Some(&3)); - /// assert_eq!(b.iter().max(), None); + /// assert_eq!(a.into_iter().max(), Some(3)); + /// assert_eq!(b.into_iter().max(), None); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] @@ -3147,10 +3162,10 @@ pub trait Iterator { /// /// ``` /// let a = [1, 2, 3]; - /// let b: Vec = Vec::new(); + /// let b: [u32; 0] = []; /// - /// assert_eq!(a.iter().min(), Some(&1)); - /// assert_eq!(b.iter().min(), None); + /// assert_eq!(a.into_iter().min(), Some(1)); + /// assert_eq!(b.into_iter().min(), None); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] @@ -3172,7 +3187,7 @@ pub trait Iterator { /// /// ``` /// let a = [-3_i32, 0, 1, 5, -10]; - /// assert_eq!(*a.iter().max_by_key(|x| x.abs()).unwrap(), -10); + /// assert_eq!(a.into_iter().max_by_key(|x| x.abs()).unwrap(), -10); /// ``` #[inline] #[stable(feature = "iter_cmp_by_key", since = "1.6.0")] @@ -3205,7 +3220,7 @@ pub trait Iterator { /// /// ``` /// let a = [-3_i32, 0, 1, 5, -10]; - /// assert_eq!(*a.iter().max_by(|x, y| x.cmp(y)).unwrap(), 5); + /// assert_eq!(a.into_iter().max_by(|x, y| x.cmp(y)).unwrap(), 5); /// ``` #[inline] #[stable(feature = "iter_max_by", since = "1.15.0")] @@ -3232,7 +3247,7 @@ pub trait Iterator { /// /// ``` /// let a = [-3_i32, 0, 1, 5, -10]; - /// assert_eq!(*a.iter().min_by_key(|x| x.abs()).unwrap(), 0); + /// assert_eq!(a.into_iter().min_by_key(|x| x.abs()).unwrap(), 0); /// ``` #[inline] #[stable(feature = "iter_cmp_by_key", since = "1.6.0")] @@ -3265,7 +3280,7 @@ pub trait Iterator { /// /// ``` /// let a = [-3_i32, 0, 1, 5, -10]; - /// assert_eq!(*a.iter().min_by(|x, y| x.cmp(y)).unwrap(), -10); + /// assert_eq!(a.into_iter().min_by(|x, y| x.cmp(y)).unwrap(), -10); /// ``` #[inline] #[stable(feature = "iter_min_by", since = "1.15.0")] @@ -3295,11 +3310,11 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// let mut iter = a.iter().rev(); + /// let mut iter = a.into_iter().rev(); /// - /// assert_eq!(iter.next(), Some(&3)); - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.next(), Some(3)); + /// assert_eq!(iter.next(), Some(2)); + /// assert_eq!(iter.next(), Some(1)); /// /// assert_eq!(iter.next(), None); /// ``` @@ -3328,7 +3343,7 @@ pub trait Iterator { /// ``` /// let a = [(1, 2), (3, 4), (5, 6)]; /// - /// let (left, right): (Vec<_>, Vec<_>) = a.iter().cloned().unzip(); + /// let (left, right): (Vec<_>, Vec<_>) = a.into_iter().unzip(); /// /// assert_eq!(left, [1, 3, 5]); /// assert_eq!(right, [2, 4, 6]); @@ -3336,7 +3351,7 @@ pub trait Iterator { /// // you can also unzip multiple nested tuples at once /// let a = [(1, (2, 3)), (4, (5, 6))]; /// - /// let (x, (y, z)): (Vec<_>, (Vec<_>, Vec<_>)) = a.iter().cloned().unzip(); + /// let (x, (y, z)): (Vec<_>, (Vec<_>, Vec<_>)) = a.into_iter().unzip(); /// assert_eq!(x, [1, 4]); /// assert_eq!(y, [2, 5]); /// assert_eq!(z, [3, 6]); @@ -3368,8 +3383,8 @@ pub trait Iterator { /// // copied is the same as .map(|&x| x) /// let v_map: Vec<_> = a.iter().map(|&x| x).collect(); /// - /// assert_eq!(v_copied, vec![1, 2, 3]); - /// assert_eq!(v_map, vec![1, 2, 3]); + /// assert_eq!(v_copied, [1, 2, 3]); + /// assert_eq!(v_map, [1, 2, 3]); /// ``` #[stable(feature = "iter_copied", since = "1.36.0")] #[rustc_diagnostic_item = "iter_copied"] @@ -3404,8 +3419,8 @@ pub trait Iterator { /// // cloned is the same as .map(|&x| x), for integers /// let v_map: Vec<_> = a.iter().map(|&x| x).collect(); /// - /// assert_eq!(v_cloned, vec![1, 2, 3]); - /// assert_eq!(v_map, vec![1, 2, 3]); + /// assert_eq!(v_cloned, [1, 2, 3]); + /// assert_eq!(v_map, [1, 2, 3]); /// ``` /// /// To get the best performance, try to clone late: @@ -3441,15 +3456,14 @@ pub trait Iterator { /// ``` /// let a = [1, 2, 3]; /// - /// let mut it = a.iter().cycle(); + /// let mut iter = a.into_iter().cycle(); /// - /// assert_eq!(it.next(), Some(&1)); - /// assert_eq!(it.next(), Some(&2)); - /// assert_eq!(it.next(), Some(&3)); - /// assert_eq!(it.next(), Some(&1)); - /// assert_eq!(it.next(), Some(&2)); - /// assert_eq!(it.next(), Some(&3)); - /// assert_eq!(it.next(), Some(&1)); + /// loop { + /// assert_eq!(iter.next(), Some(1)); + /// assert_eq!(iter.next(), Some(2)); + /// assert_eq!(iter.next(), Some(3)); + /// # break; + /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -3516,7 +3530,7 @@ pub trait Iterator { /// # Panics /// /// When calling `sum()` and a primitive integer type is being returned, this - /// method will panic if the computation overflows and debug assertions are + /// method will panic if the computation overflows and overflow checks are /// enabled. /// /// # Examples @@ -3550,7 +3564,7 @@ pub trait Iterator { /// # Panics /// /// When calling `product()` and a primitive integer type is being returned, - /// method will panic if the computation overflows and debug assertions are + /// method will panic if the computation overflows and overflow checks are /// enabled. /// /// # Examples @@ -3607,9 +3621,9 @@ pub trait Iterator { /// let xs = [1, 2, 3, 4]; /// let ys = [1, 4, 9, 16]; /// - /// assert_eq!(xs.iter().cmp_by(&ys, |&x, &y| x.cmp(&y)), Ordering::Less); - /// assert_eq!(xs.iter().cmp_by(&ys, |&x, &y| (x * x).cmp(&y)), Ordering::Equal); - /// assert_eq!(xs.iter().cmp_by(&ys, |&x, &y| (2 * x).cmp(&y)), Ordering::Greater); + /// assert_eq!(xs.into_iter().cmp_by(ys, |x, y| x.cmp(&y)), Ordering::Less); + /// assert_eq!(xs.into_iter().cmp_by(ys, |x, y| (x * x).cmp(&y)), Ordering::Equal); + /// assert_eq!(xs.into_iter().cmp_by(ys, |x, y| (2 * x).cmp(&y)), Ordering::Greater); /// ``` #[unstable(feature = "iter_order_by", issue = "64295")] fn cmp_by(self, other: I, cmp: F) -> Ordering @@ -3691,15 +3705,15 @@ pub trait Iterator { /// let ys = [1.0, 4.0, 9.0, 16.0]; /// /// assert_eq!( - /// xs.iter().partial_cmp_by(&ys, |&x, &y| x.partial_cmp(&y)), + /// xs.iter().partial_cmp_by(ys, |x, y| x.partial_cmp(&y)), /// Some(Ordering::Less) /// ); /// assert_eq!( - /// xs.iter().partial_cmp_by(&ys, |&x, &y| (x * x).partial_cmp(&y)), + /// xs.iter().partial_cmp_by(ys, |x, y| (x * x).partial_cmp(&y)), /// Some(Ordering::Equal) /// ); /// assert_eq!( - /// xs.iter().partial_cmp_by(&ys, |&x, &y| (2.0 * x).partial_cmp(&y)), + /// xs.iter().partial_cmp_by(ys, |x, y| (2.0 * x).partial_cmp(&y)), /// Some(Ordering::Greater) /// ); /// ``` @@ -3757,7 +3771,7 @@ pub trait Iterator { /// let xs = [1, 2, 3, 4]; /// let ys = [1, 4, 9, 16]; /// - /// assert!(xs.iter().eq_by(&ys, |&x, &y| x * x == y)); + /// assert!(xs.iter().eq_by(ys, |x, y| x * x == y)); /// ``` #[unstable(feature = "iter_order_by", issue = "64295")] fn eq_by(self, other: I, eq: F) -> bool diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index dc06aa4c38d55..41dcf43c725da 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -95,13 +95,13 @@ // // Library features: // tidy-alphabetical-start +#![cfg_attr(not(bootstrap), feature(eii))] #![feature(array_ptr_get)] #![feature(asm_experimental_arch)] #![feature(bigint_helper_methods)] #![feature(bstr)] #![feature(bstr_internals)] #![feature(cfg_match)] -#![feature(closure_track_caller)] #![feature(const_carrying_mul_add)] #![feature(const_eval_select)] #![feature(core_intrinsics)] @@ -119,7 +119,6 @@ #![feature(ptr_metadata)] #![feature(set_ptr_value)] #![feature(slice_as_array)] -#![feature(slice_as_chunks)] #![feature(slice_ptr_get)] #![feature(str_internals)] #![feature(str_split_inclusive_remainder)] @@ -127,6 +126,7 @@ #![feature(ub_checks)] #![feature(unchecked_neg)] #![feature(unchecked_shifts)] +#![feature(unsafe_pinned)] #![feature(utf16_extra)] #![feature(variant_count)] // tidy-alphabetical-end @@ -169,7 +169,6 @@ #![feature(negative_impls)] #![feature(never_type)] #![feature(no_core)] -#![feature(no_sanitize)] #![feature(optimize_attribute)] #![feature(prelude_import)] #![feature(repr_simd)] @@ -239,6 +238,9 @@ pub mod contracts; #[unstable(feature = "cfg_match", issue = "115585")] pub use crate::macros::cfg_match; +#[cfg(not(bootstrap))] +#[stable(feature = "panic_hooks", since = "1.10.0")] +pub use crate::panic::panic_handler; #[macro_use] mod internal_macros; diff --git a/library/core/src/macros/mod.rs b/library/core/src/macros/mod.rs index 5f200b31d1ae7..8e7036379ea13 100644 --- a/library/core/src/macros/mod.rs +++ b/library/core/src/macros/mod.rs @@ -237,9 +237,10 @@ pub macro assert_matches { /// ``` #[unstable(feature = "cfg_match", issue = "115585")] #[rustc_diagnostic_item = "cfg_match"] +#[rustc_macro_transparency = "semitransparent"] pub macro cfg_match { ({ $($tt:tt)* }) => {{ - cfg_match! { $($tt)* } + $crate::cfg_match! { $($tt)* } }}, (_ => { $($output:tt)* }) => { $($output)* @@ -249,10 +250,10 @@ pub macro cfg_match { $($( $rest:tt )+)? ) => { #[cfg($cfg)] - cfg_match! { _ => $output } + $crate::cfg_match! { _ => $output } $( #[cfg(not($cfg))] - cfg_match! { $($rest)+ } + $crate::cfg_match! { $($rest)+ } )? }, } @@ -1137,6 +1138,10 @@ pub(crate) mod builtin { issue = "29599", reason = "`concat_idents` is not stable enough for use and is subject to change" )] + #[deprecated( + since = "1.88.0", + note = "use `${concat(...)}` with the `macro_metavar_expr_concat` feature instead" + )] #[rustc_builtin_macro] #[macro_export] macro_rules! concat_idents { @@ -1656,7 +1661,6 @@ pub(crate) mod builtin { #[unstable( feature = "test", issue = "50297", - soft, reason = "`bench` is a part of custom test frameworks which are unstable" )] #[allow_internal_unstable(test, rustc_attrs, coverage_attribute)] @@ -1743,8 +1747,8 @@ pub(crate) mod builtin { /* compiler built-in */ } - /// Provide a list of type aliases and other opaque-type-containing type definitions. - /// This list will be used in the body of the item it is applied to define opaque + /// Provide a list of type aliases and other opaque-type-containing type definitions + /// to an item with a body. This list will be used in that body to define opaque /// types' hidden types. /// Can only be applied to things that have bodies. #[unstable( @@ -1753,7 +1757,6 @@ pub(crate) mod builtin { reason = "`type_alias_impl_trait` has open design concerns" )] #[rustc_builtin_macro] - #[cfg(not(bootstrap))] pub macro define_opaque($($tt:tt)*) { /* compiler built-in */ } @@ -1780,4 +1783,32 @@ pub(crate) mod builtin { pub macro deref($pat:pat) { builtin # deref($pat) } + + /// Externally Implementable Item: Defines an attribute macro that can override the item + /// this is applied to. + #[cfg(not(bootstrap))] + #[unstable(feature = "eii", issue = "125418")] + #[rustc_builtin_macro] + #[allow_internal_unstable(eii_internals, decl_macro, rustc_attrs)] + pub macro eii($item:item) { + /* compiler built-in */ + } + + /// Unsafely Externally Implementable Item: Defines an unsafe attribute macro that can override + /// the item this is applied to. + #[cfg(not(bootstrap))] + #[unstable(feature = "eii", issue = "125418")] + #[rustc_builtin_macro] + #[allow_internal_unstable(eii_internals, decl_macro, rustc_attrs)] + pub macro unsafe_eii($item:item) { + /* compiler built-in */ + } + + /// Impl detail of EII + #[cfg(not(bootstrap))] + #[unstable(feature = "eii_internals", issue = "none")] + #[rustc_builtin_macro] + pub macro eii_macro_for($item:item) { + /* compiler built-in */ + } } diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs index 68011310d2cad..9dc20beda6c64 100644 --- a/library/core/src/marker.rs +++ b/library/core/src/marker.rs @@ -17,6 +17,7 @@ use crate::cell::UnsafeCell; use crate::cmp; use crate::fmt::Debug; use crate::hash::{Hash, Hasher}; +use crate::pin::UnsafePinned; /// Implements a given marker trait for multiple types at the same time. /// @@ -878,6 +879,23 @@ marker_impls! { {T: ?Sized} &mut T, } +/// Used to determine whether a type contains any `UnsafePinned` (or `PhantomPinned`) internally, +/// but not through an indirection. This affects, for example, whether we emit `noalias` metadata +/// for `&mut T` or not. +/// +/// This is part of [RFC 3467](https://rust-lang.github.io/rfcs/3467-unsafe-pinned.html), and is +/// tracked by [#125735](https://github.com/rust-lang/rust/issues/125735). +#[cfg_attr(not(bootstrap), lang = "unsafe_unpin")] +#[cfg_attr(bootstrap, allow(dead_code))] +pub(crate) unsafe auto trait UnsafeUnpin {} + +impl !UnsafeUnpin for UnsafePinned {} +unsafe impl UnsafeUnpin for PhantomData {} +unsafe impl UnsafeUnpin for *const T {} +unsafe impl UnsafeUnpin for *mut T {} +unsafe impl UnsafeUnpin for &T {} +unsafe impl UnsafeUnpin for &mut T {} + /// Types that do not require any pinning guarantees. /// /// For information on what "pinning" is, see the [`pin` module] documentation. @@ -953,6 +971,11 @@ pub auto trait Unpin {} /// A marker type which does not implement `Unpin`. /// /// If a type contains a `PhantomPinned`, it will not implement `Unpin` by default. +// +// FIXME(unsafe_pinned): This is *not* a stable guarantee we want to make, at least not yet. +// Note that for backwards compatibility with the new [`UnsafePinned`] wrapper type, placing this +// marker in your struct acts as if you wrapped the entire struct in an `UnsafePinned`. This type +// will likely eventually be deprecated, and all new code should be using `UnsafePinned` instead. #[stable(feature = "pin", since = "1.33.0")] #[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct PhantomPinned; @@ -960,6 +983,12 @@ pub struct PhantomPinned; #[stable(feature = "pin", since = "1.33.0")] impl !Unpin for PhantomPinned {} +// This is a small hack to allow existing code which uses PhantomPinned to opt-out of noalias to +// continue working. Ideally PhantomPinned could just wrap an `UnsafePinned<()>` to get the same +// effect, but we can't add a new field to an already stable unit struct -- that would be a breaking +// change. +impl !UnsafeUnpin for PhantomPinned {} + marker_impls! { #[stable(feature = "pin", since = "1.33.0")] Unpin for diff --git a/library/core/src/net/socket_addr.rs b/library/core/src/net/socket_addr.rs index 21753d0092497..936f9f64930d5 100644 --- a/library/core/src/net/socket_addr.rs +++ b/library/core/src/net/socket_addr.rs @@ -210,7 +210,7 @@ impl SocketAddr { /// ``` #[inline] #[stable(feature = "sockaddr_setters", since = "1.9.0")] - #[rustc_const_stable(feature = "const_sockaddr_setters", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_sockaddr_setters", since = "1.87.0")] pub const fn set_ip(&mut self, new_ip: IpAddr) { // `match (*self, new_ip)` would have us mutate a copy of self only to throw it away. match (self, new_ip) { @@ -254,7 +254,7 @@ impl SocketAddr { /// ``` #[inline] #[stable(feature = "sockaddr_setters", since = "1.9.0")] - #[rustc_const_stable(feature = "const_sockaddr_setters", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_sockaddr_setters", since = "1.87.0")] pub const fn set_port(&mut self, new_port: u16) { match *self { SocketAddr::V4(ref mut a) => a.set_port(new_port), @@ -360,7 +360,7 @@ impl SocketAddrV4 { /// ``` #[inline] #[stable(feature = "sockaddr_setters", since = "1.9.0")] - #[rustc_const_stable(feature = "const_sockaddr_setters", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_sockaddr_setters", since = "1.87.0")] pub const fn set_ip(&mut self, new_ip: Ipv4Addr) { self.ip = new_ip; } @@ -396,7 +396,7 @@ impl SocketAddrV4 { /// ``` #[inline] #[stable(feature = "sockaddr_setters", since = "1.9.0")] - #[rustc_const_stable(feature = "const_sockaddr_setters", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_sockaddr_setters", since = "1.87.0")] pub const fn set_port(&mut self, new_port: u16) { self.port = new_port; } @@ -458,7 +458,7 @@ impl SocketAddrV6 { /// ``` #[inline] #[stable(feature = "sockaddr_setters", since = "1.9.0")] - #[rustc_const_stable(feature = "const_sockaddr_setters", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_sockaddr_setters", since = "1.87.0")] pub const fn set_ip(&mut self, new_ip: Ipv6Addr) { self.ip = new_ip; } @@ -494,7 +494,7 @@ impl SocketAddrV6 { /// ``` #[inline] #[stable(feature = "sockaddr_setters", since = "1.9.0")] - #[rustc_const_stable(feature = "const_sockaddr_setters", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_sockaddr_setters", since = "1.87.0")] pub const fn set_port(&mut self, new_port: u16) { self.port = new_port; } @@ -542,7 +542,7 @@ impl SocketAddrV6 { /// ``` #[inline] #[stable(feature = "sockaddr_setters", since = "1.9.0")] - #[rustc_const_stable(feature = "const_sockaddr_setters", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_sockaddr_setters", since = "1.87.0")] pub const fn set_flowinfo(&mut self, new_flowinfo: u32) { self.flowinfo = new_flowinfo; } @@ -585,7 +585,7 @@ impl SocketAddrV6 { /// ``` #[inline] #[stable(feature = "sockaddr_setters", since = "1.9.0")] - #[rustc_const_stable(feature = "const_sockaddr_setters", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_sockaddr_setters", since = "1.87.0")] pub const fn set_scope_id(&mut self, new_scope_id: u32) { self.scope_id = new_scope_id; } diff --git a/library/core/src/num/diy_float.rs b/library/core/src/num/diy_float.rs index ce7f6475d0599..e054e7f3f10a7 100644 --- a/library/core/src/num/diy_float.rs +++ b/library/core/src/num/diy_float.rs @@ -21,61 +21,29 @@ pub struct Fp { impl Fp { /// Returns a correctly rounded product of itself and `other`. - pub fn mul(&self, other: &Fp) -> Fp { - const MASK: u64 = 0xffffffff; - let a = self.f >> 32; - let b = self.f & MASK; - let c = other.f >> 32; - let d = other.f & MASK; - let ac = a * c; - let bc = b * c; - let ad = a * d; - let bd = b * d; - let tmp = (bd >> 32) + (ad & MASK) + (bc & MASK) + (1 << 31) /* round */; - let f = ac + (ad >> 32) + (bc >> 32) + (tmp >> 32); + pub fn mul(self, other: Self) -> Self { + let (lo, hi) = self.f.widening_mul(other.f); + let f = hi + (lo >> 63) /* round */; let e = self.e + other.e + 64; - Fp { f, e } + Self { f, e } } /// Normalizes itself so that the resulting mantissa is at least `2^63`. - pub fn normalize(&self) -> Fp { - let mut f = self.f; - let mut e = self.e; - if f >> (64 - 32) == 0 { - f <<= 32; - e -= 32; - } - if f >> (64 - 16) == 0 { - f <<= 16; - e -= 16; - } - if f >> (64 - 8) == 0 { - f <<= 8; - e -= 8; - } - if f >> (64 - 4) == 0 { - f <<= 4; - e -= 4; - } - if f >> (64 - 2) == 0 { - f <<= 2; - e -= 2; - } - if f >> (64 - 1) == 0 { - f <<= 1; - e -= 1; - } + pub fn normalize(self) -> Self { + let lz = self.f.leading_zeros(); + let f = self.f << lz; + let e = self.e - lz as i16; debug_assert!(f >= (1 << 63)); - Fp { f, e } + Self { f, e } } /// Normalizes itself to have the shared exponent. /// It can only decrease the exponent (and thus increase the mantissa). - pub fn normalize_to(&self, e: i16) -> Fp { + pub fn normalize_to(self, e: i16) -> Self { let edelta = self.e - e; assert!(edelta >= 0); let edelta = edelta as usize; assert_eq!(self.f << edelta >> edelta, self.f); - Fp { f: self.f << edelta, e } + Self { f: self.f << edelta, e } } } diff --git a/library/core/src/num/f128.rs b/library/core/src/num/f128.rs index b17190971c3e8..b1119d4899bab 100644 --- a/library/core/src/num/f128.rs +++ b/library/core/src/num/f128.rs @@ -145,6 +145,9 @@ impl f128 { pub const RADIX: u32 = 2; /// Number of significant digits in base 2. + /// + /// Note that the size of the mantissa in the bitwise representation is one + /// smaller than this since the leading 1 is not stored explicitly. #[unstable(feature = "f128", issue = "116909")] pub const MANTISSA_DIGITS: u32 = 113; @@ -194,16 +197,22 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] pub const MAX: f128 = 1.18973149535723176508575932662800702e+4932_f128; - /// One greater than the minimum possible normal power of 2 exponent. + /// One greater than the minimum possible *normal* power of 2 exponent + /// for a significand bounded by 1 ≤ x < 2 (i.e. the IEEE definition). /// - /// If x = `MIN_EXP`, then normal numbers - /// ≥ 0.5 × 2x. + /// This corresponds to the exact minimum possible *normal* power of 2 exponent + /// for a significand bounded by 0.5 ≤ x < 1 (i.e. the C definition). + /// In other words, all normal numbers representable by this type are + /// greater than or equal to 0.5 × 2MIN_EXP. #[unstable(feature = "f128", issue = "116909")] pub const MIN_EXP: i32 = -16_381; - /// Maximum possible power of 2 exponent. + /// One greater than the maximum possible power of 2 exponent + /// for a significand bounded by 1 ≤ x < 2 (i.e. the IEEE definition). /// - /// If x = `MAX_EXP`, then normal numbers - /// < 1 × 2x. + /// This corresponds to the exact maximum possible power of 2 exponent + /// for a significand bounded by 0.5 ≤ x < 1 (i.e. the C definition). + /// In other words, all numbers representable by this type are + /// strictly less than 2MAX_EXP. #[unstable(feature = "f128", issue = "116909")] pub const MAX_EXP: i32 = 16_384; @@ -224,14 +233,16 @@ impl f128 { /// Not a Number (NaN). /// - /// Note that IEEE 754 doesn't define just a single NaN value; - /// a plethora of bit patterns are considered to be NaN. - /// Furthermore, the standard makes a difference - /// between a "signaling" and a "quiet" NaN, - /// and allows inspecting its "payload" (the unspecified bits in the bit pattern). - /// This constant isn't guaranteed to equal to any specific NaN bitpattern, - /// and the stability of its representation over Rust versions - /// and target platforms isn't guaranteed. + /// Note that IEEE 754 doesn't define just a single NaN value; a plethora of bit patterns are + /// considered to be NaN. Furthermore, the standard makes a difference between a "signaling" and + /// a "quiet" NaN, and allows inspecting its "payload" (the unspecified bits in the bit pattern) + /// and its sign. See the [specification of NaN bit patterns](f32#nan-bit-patterns) for more + /// info. + /// + /// This constant is guaranteed to be a quiet NaN (on targets that follow the Rust assumptions + /// that the quiet/signaling bit being set to 1 indicates a quiet NaN). Beyond that, nothing is + /// guaranteed about the specific bit pattern chosen here: both payload and sign are arbitrary. + /// The concrete bit pattern may change across Rust versions and target platforms. #[allow(clippy::eq_op)] #[rustc_diagnostic_item = "f128_nan"] #[unstable(feature = "f128", issue = "116909")] @@ -799,7 +810,7 @@ impl f128 { } } - /// Calculates the middle point of `self` and `rhs`. + /// Calculates the midpoint (average) between `self` and `rhs`. /// /// This returns NaN when *either* argument is NaN or if a combination of /// +inf and -inf is provided as arguments. @@ -816,6 +827,7 @@ impl f128 { /// # } /// ``` #[inline] + #[doc(alias = "average")] #[unstable(feature = "f128", issue = "116909")] #[rustc_const_unstable(feature = "f128", issue = "116909")] pub const fn midpoint(self, other: f128) -> f128 { @@ -898,6 +910,7 @@ impl f128 { #[inline] #[unstable(feature = "f128", issue = "116909")] #[must_use = "this returns the result of the operation, without modifying the original"] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] pub const fn to_bits(self) -> u128 { // SAFETY: `u128` is a plain old datatype so we can always transmute to it. unsafe { mem::transmute(self) } @@ -945,6 +958,7 @@ impl f128 { #[inline] #[must_use] #[unstable(feature = "f128", issue = "116909")] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] pub const fn from_bits(v: u128) -> Self { // It turns out the safety issues with sNaN were overblown! Hooray! // SAFETY: `u128` is a plain old datatype so we can always transmute from it. @@ -1362,4 +1376,59 @@ impl f128 { // SAFETY: this is actually a safe intrinsic unsafe { intrinsics::copysignf128(self, sign) } } + + /// Float addition that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_add(self, rhs: f128) -> f128 { + intrinsics::fadd_algebraic(self, rhs) + } + + /// Float subtraction that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_sub(self, rhs: f128) -> f128 { + intrinsics::fsub_algebraic(self, rhs) + } + + /// Float multiplication that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_mul(self, rhs: f128) -> f128 { + intrinsics::fmul_algebraic(self, rhs) + } + + /// Float division that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_div(self, rhs: f128) -> f128 { + intrinsics::fdiv_algebraic(self, rhs) + } + + /// Float remainder that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_rem(self, rhs: f128) -> f128 { + intrinsics::frem_algebraic(self, rhs) + } } diff --git a/library/core/src/num/f16.rs b/library/core/src/num/f16.rs index d20677f43b417..54e38d9e1a6f1 100644 --- a/library/core/src/num/f16.rs +++ b/library/core/src/num/f16.rs @@ -140,6 +140,9 @@ impl f16 { pub const RADIX: u32 = 2; /// Number of significant digits in base 2. + /// + /// Note that the size of the mantissa in the bitwise representation is one + /// smaller than this since the leading 1 is not stored explicitly. #[unstable(feature = "f16", issue = "116909")] pub const MANTISSA_DIGITS: u32 = 11; @@ -189,16 +192,22 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] pub const MAX: f16 = 6.5504e+4_f16; - /// One greater than the minimum possible normal power of 2 exponent. + /// One greater than the minimum possible *normal* power of 2 exponent + /// for a significand bounded by 1 ≤ x < 2 (i.e. the IEEE definition). /// - /// If x = `MIN_EXP`, then normal numbers - /// ≥ 0.5 × 2x. + /// This corresponds to the exact minimum possible *normal* power of 2 exponent + /// for a significand bounded by 0.5 ≤ x < 1 (i.e. the C definition). + /// In other words, all normal numbers representable by this type are + /// greater than or equal to 0.5 × 2MIN_EXP. #[unstable(feature = "f16", issue = "116909")] pub const MIN_EXP: i32 = -13; - /// Maximum possible power of 2 exponent. + /// One greater than the maximum possible power of 2 exponent + /// for a significand bounded by 1 ≤ x < 2 (i.e. the IEEE definition). /// - /// If x = `MAX_EXP`, then normal numbers - /// < 1 × 2x. + /// This corresponds to the exact maximum possible power of 2 exponent + /// for a significand bounded by 0.5 ≤ x < 1 (i.e. the C definition). + /// In other words, all numbers representable by this type are + /// strictly less than 2MAX_EXP. #[unstable(feature = "f16", issue = "116909")] pub const MAX_EXP: i32 = 16; @@ -219,14 +228,16 @@ impl f16 { /// Not a Number (NaN). /// - /// Note that IEEE 754 doesn't define just a single NaN value; - /// a plethora of bit patterns are considered to be NaN. - /// Furthermore, the standard makes a difference - /// between a "signaling" and a "quiet" NaN, - /// and allows inspecting its "payload" (the unspecified bits in the bit pattern). - /// This constant isn't guaranteed to equal to any specific NaN bitpattern, - /// and the stability of its representation over Rust versions - /// and target platforms isn't guaranteed. + /// Note that IEEE 754 doesn't define just a single NaN value; a plethora of bit patterns are + /// considered to be NaN. Furthermore, the standard makes a difference between a "signaling" and + /// a "quiet" NaN, and allows inspecting its "payload" (the unspecified bits in the bit pattern) + /// and its sign. See the [specification of NaN bit patterns](f32#nan-bit-patterns) for more + /// info. + /// + /// This constant is guaranteed to be a quiet NaN (on targets that follow the Rust assumptions + /// that the quiet/signaling bit being set to 1 indicates a quiet NaN). Beyond that, nothing is + /// guaranteed about the specific bit pattern chosen here: both payload and sign are arbitrary. + /// The concrete bit pattern may change across Rust versions and target platforms. #[allow(clippy::eq_op)] #[rustc_diagnostic_item = "f16_nan"] #[unstable(feature = "f16", issue = "116909")] @@ -787,7 +798,7 @@ impl f16 { } } - /// Calculates the middle point of `self` and `rhs`. + /// Calculates the midpoint (average) between `self` and `rhs`. /// /// This returns NaN when *either* argument is NaN or if a combination of /// +inf and -inf is provided as arguments. @@ -803,6 +814,7 @@ impl f16 { /// # } /// ``` #[inline] + #[doc(alias = "average")] #[unstable(feature = "f16", issue = "116909")] #[rustc_const_unstable(feature = "f16", issue = "116909")] pub const fn midpoint(self, other: f16) -> f16 { @@ -886,6 +898,7 @@ impl f16 { #[inline] #[unstable(feature = "f16", issue = "116909")] #[must_use = "this returns the result of the operation, without modifying the original"] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] pub const fn to_bits(self) -> u16 { // SAFETY: `u16` is a plain old datatype so we can always transmute to it. unsafe { mem::transmute(self) } @@ -932,6 +945,7 @@ impl f16 { #[inline] #[must_use] #[unstable(feature = "f16", issue = "116909")] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] pub const fn from_bits(v: u16) -> Self { // It turns out the safety issues with sNaN were overblown! Hooray! // SAFETY: `u16` is a plain old datatype so we can always transmute from it. @@ -1338,4 +1352,59 @@ impl f16 { // SAFETY: this is actually a safe intrinsic unsafe { intrinsics::copysignf16(self, sign) } } + + /// Float addition that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_add(self, rhs: f16) -> f16 { + intrinsics::fadd_algebraic(self, rhs) + } + + /// Float subtraction that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_sub(self, rhs: f16) -> f16 { + intrinsics::fsub_algebraic(self, rhs) + } + + /// Float multiplication that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_mul(self, rhs: f16) -> f16 { + intrinsics::fmul_algebraic(self, rhs) + } + + /// Float division that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_div(self, rhs: f16) -> f16 { + intrinsics::fdiv_algebraic(self, rhs) + } + + /// Float remainder that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_rem(self, rhs: f16) -> f16 { + intrinsics::frem_algebraic(self, rhs) + } } diff --git a/library/core/src/num/f32.rs b/library/core/src/num/f32.rs index 53373584d5551..e66fd3bb52b86 100644 --- a/library/core/src/num/f32.rs +++ b/library/core/src/num/f32.rs @@ -390,6 +390,9 @@ impl f32 { pub const RADIX: u32 = 2; /// Number of significant digits in base 2. + /// + /// Note that the size of the mantissa in the bitwise representation is one + /// smaller than this since the leading 1 is not stored explicitly. #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MANTISSA_DIGITS: u32 = 24; @@ -440,16 +443,22 @@ impl f32 { #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MAX: f32 = 3.40282347e+38_f32; - /// One greater than the minimum possible normal power of 2 exponent. + /// One greater than the minimum possible *normal* power of 2 exponent + /// for a significand bounded by 1 ≤ x < 2 (i.e. the IEEE definition). /// - /// If x = `MIN_EXP`, then normal numbers - /// ≥ 0.5 × 2x. + /// This corresponds to the exact minimum possible *normal* power of 2 exponent + /// for a significand bounded by 0.5 ≤ x < 1 (i.e. the C definition). + /// In other words, all normal numbers representable by this type are + /// greater than or equal to 0.5 × 2MIN_EXP. #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MIN_EXP: i32 = -125; - /// Maximum possible power of 2 exponent. + /// One greater than the maximum possible power of 2 exponent + /// for a significand bounded by 1 ≤ x < 2 (i.e. the IEEE definition). /// - /// If x = `MAX_EXP`, then normal numbers - /// < 1 × 2x. + /// This corresponds to the exact maximum possible power of 2 exponent + /// for a significand bounded by 0.5 ≤ x < 1 (i.e. the C definition). + /// In other words, all numbers representable by this type are + /// strictly less than 2MAX_EXP. #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MAX_EXP: i32 = 128; @@ -470,14 +479,16 @@ impl f32 { /// Not a Number (NaN). /// - /// Note that IEEE 754 doesn't define just a single NaN value; - /// a plethora of bit patterns are considered to be NaN. - /// Furthermore, the standard makes a difference - /// between a "signaling" and a "quiet" NaN, - /// and allows inspecting its "payload" (the unspecified bits in the bit pattern). - /// This constant isn't guaranteed to equal to any specific NaN bitpattern, - /// and the stability of its representation over Rust versions - /// and target platforms isn't guaranteed. + /// Note that IEEE 754 doesn't define just a single NaN value; a plethora of bit patterns are + /// considered to be NaN. Furthermore, the standard makes a difference between a "signaling" and + /// a "quiet" NaN, and allows inspecting its "payload" (the unspecified bits in the bit pattern) + /// and its sign. See the [specification of NaN bit patterns](f32#nan-bit-patterns) for more + /// info. + /// + /// This constant is guaranteed to be a quiet NaN (on targets that follow the Rust assumptions + /// that the quiet/signaling bit being set to 1 indicates a quiet NaN). Beyond that, nothing is + /// guaranteed about the specific bit pattern chosen here: both payload and sign are arbitrary. + /// The concrete bit pattern may change across Rust versions and target platforms. #[stable(feature = "assoc_int_consts", since = "1.43.0")] #[rustc_diagnostic_item = "f32_nan"] #[allow(clippy::eq_op)] @@ -705,8 +716,7 @@ impl f32 { pub const fn is_sign_negative(self) -> bool { // IEEE754 says: isSignMinus(x) is true if and only if x has negative sign. isSignMinus // applies to zeros and NaNs as well. - // SAFETY: This is just transmuting to get the sign bit, it's fine. - unsafe { mem::transmute::(self) & 0x8000_0000 != 0 } + self.to_bits() & 0x8000_0000 != 0 } /// Returns the least number greater than `self`. @@ -981,7 +991,7 @@ impl f32 { } } - /// Calculates the middle point of `self` and `rhs`. + /// Calculates the midpoint (average) between `self` and `rhs`. /// /// This returns NaN when *either* argument is NaN or if a combination of /// +inf and -inf is provided as arguments. @@ -993,6 +1003,7 @@ impl f32 { /// assert_eq!((-5.5f32).midpoint(8.0), 1.25); /// ``` #[inline] + #[doc(alias = "average")] #[stable(feature = "num_midpoint", since = "1.85.0")] #[rustc_const_stable(feature = "num_midpoint", since = "1.85.0")] pub const fn midpoint(self, other: f32) -> f32 { @@ -1091,6 +1102,7 @@ impl f32 { #[stable(feature = "float_bits_conv", since = "1.20.0")] #[rustc_const_stable(feature = "const_float_bits_conv", since = "1.83.0")] #[inline] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] pub const fn to_bits(self) -> u32 { // SAFETY: `u32` is a plain old datatype so we can always transmute to it. unsafe { mem::transmute(self) } @@ -1136,6 +1148,7 @@ impl f32 { #[rustc_const_stable(feature = "const_float_bits_conv", since = "1.83.0")] #[must_use] #[inline] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] pub const fn from_bits(v: u32) -> Self { // It turns out the safety issues with sNaN were overblown! Hooray! // SAFETY: `u32` is a plain old datatype so we can always transmute from it. @@ -1504,4 +1517,59 @@ impl f32 { // SAFETY: this is actually a safe intrinsic unsafe { intrinsics::copysignf32(self, sign) } } + + /// Float addition that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_add(self, rhs: f32) -> f32 { + intrinsics::fadd_algebraic(self, rhs) + } + + /// Float subtraction that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_sub(self, rhs: f32) -> f32 { + intrinsics::fsub_algebraic(self, rhs) + } + + /// Float multiplication that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_mul(self, rhs: f32) -> f32 { + intrinsics::fmul_algebraic(self, rhs) + } + + /// Float division that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_div(self, rhs: f32) -> f32 { + intrinsics::fdiv_algebraic(self, rhs) + } + + /// Float remainder that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_rem(self, rhs: f32) -> f32 { + intrinsics::frem_algebraic(self, rhs) + } } diff --git a/library/core/src/num/f64.rs b/library/core/src/num/f64.rs index ca28b40bb3adc..2d791437b2825 100644 --- a/library/core/src/num/f64.rs +++ b/library/core/src/num/f64.rs @@ -390,6 +390,9 @@ impl f64 { pub const RADIX: u32 = 2; /// Number of significant digits in base 2. + /// + /// Note that the size of the mantissa in the bitwise representation is one + /// smaller than this since the leading 1 is not stored explicitly. #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MANTISSA_DIGITS: u32 = 53; /// Approximate number of significant digits in base 10. @@ -439,16 +442,22 @@ impl f64 { #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MAX: f64 = 1.7976931348623157e+308_f64; - /// One greater than the minimum possible normal power of 2 exponent. + /// One greater than the minimum possible *normal* power of 2 exponent + /// for a significand bounded by 1 ≤ x < 2 (i.e. the IEEE definition). /// - /// If x = `MIN_EXP`, then normal numbers - /// ≥ 0.5 × 2x. + /// This corresponds to the exact minimum possible *normal* power of 2 exponent + /// for a significand bounded by 0.5 ≤ x < 1 (i.e. the C definition). + /// In other words, all normal numbers representable by this type are + /// greater than or equal to 0.5 × 2MIN_EXP. #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MIN_EXP: i32 = -1021; - /// Maximum possible power of 2 exponent. + /// One greater than the maximum possible power of 2 exponent + /// for a significand bounded by 1 ≤ x < 2 (i.e. the IEEE definition). /// - /// If x = `MAX_EXP`, then normal numbers - /// < 1 × 2x. + /// This corresponds to the exact maximum possible power of 2 exponent + /// for a significand bounded by 0.5 ≤ x < 1 (i.e. the C definition). + /// In other words, all numbers representable by this type are + /// strictly less than 2MAX_EXP. #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MAX_EXP: i32 = 1024; @@ -469,14 +478,16 @@ impl f64 { /// Not a Number (NaN). /// - /// Note that IEEE 754 doesn't define just a single NaN value; - /// a plethora of bit patterns are considered to be NaN. - /// Furthermore, the standard makes a difference - /// between a "signaling" and a "quiet" NaN, - /// and allows inspecting its "payload" (the unspecified bits in the bit pattern). - /// This constant isn't guaranteed to equal to any specific NaN bitpattern, - /// and the stability of its representation over Rust versions - /// and target platforms isn't guaranteed. + /// Note that IEEE 754 doesn't define just a single NaN value; a plethora of bit patterns are + /// considered to be NaN. Furthermore, the standard makes a difference between a "signaling" and + /// a "quiet" NaN, and allows inspecting its "payload" (the unspecified bits in the bit pattern) + /// and its sign. See the [specification of NaN bit patterns](f32#nan-bit-patterns) for more + /// info. + /// + /// This constant is guaranteed to be a quiet NaN (on targets that follow the Rust assumptions + /// that the quiet/signaling bit being set to 1 indicates a quiet NaN). Beyond that, nothing is + /// guaranteed about the specific bit pattern chosen here: both payload and sign are arbitrary. + /// The concrete bit pattern may change across Rust versions and target platforms. #[rustc_diagnostic_item = "f64_nan"] #[stable(feature = "assoc_int_consts", since = "1.43.0")] #[allow(clippy::eq_op)] @@ -713,8 +724,7 @@ impl f64 { pub const fn is_sign_negative(self) -> bool { // IEEE754 says: isSignMinus(x) is true if and only if x has negative sign. isSignMinus // applies to zeros and NaNs as well. - // SAFETY: This is just transmuting to get the sign bit, it's fine. - unsafe { mem::transmute::(self) & Self::SIGN_MASK != 0 } + self.to_bits() & Self::SIGN_MASK != 0 } #[must_use] @@ -999,7 +1009,7 @@ impl f64 { } } - /// Calculates the middle point of `self` and `rhs`. + /// Calculates the midpoint (average) between `self` and `rhs`. /// /// This returns NaN when *either* argument is NaN or if a combination of /// +inf and -inf is provided as arguments. @@ -1011,6 +1021,7 @@ impl f64 { /// assert_eq!((-5.5f64).midpoint(8.0), 1.25); /// ``` #[inline] + #[doc(alias = "average")] #[stable(feature = "num_midpoint", since = "1.85.0")] #[rustc_const_stable(feature = "num_midpoint", since = "1.85.0")] pub const fn midpoint(self, other: f64) -> f64 { @@ -1089,6 +1100,7 @@ impl f64 { without modifying the original"] #[stable(feature = "float_bits_conv", since = "1.20.0")] #[rustc_const_stable(feature = "const_float_bits_conv", since = "1.83.0")] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] #[inline] pub const fn to_bits(self) -> u64 { // SAFETY: `u64` is a plain old datatype so we can always transmute to it. @@ -1135,6 +1147,7 @@ impl f64 { #[rustc_const_stable(feature = "const_float_bits_conv", since = "1.83.0")] #[must_use] #[inline] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] pub const fn from_bits(v: u64) -> Self { // It turns out the safety issues with sNaN were overblown! Hooray! // SAFETY: `u64` is a plain old datatype so we can always transmute from it. @@ -1503,4 +1516,59 @@ impl f64 { // SAFETY: this is actually a safe intrinsic unsafe { intrinsics::copysignf64(self, sign) } } + + /// Float addition that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_add(self, rhs: f64) -> f64 { + intrinsics::fadd_algebraic(self, rhs) + } + + /// Float subtraction that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_sub(self, rhs: f64) -> f64 { + intrinsics::fsub_algebraic(self, rhs) + } + + /// Float multiplication that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_mul(self, rhs: f64) -> f64 { + intrinsics::fmul_algebraic(self, rhs) + } + + /// Float division that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_div(self, rhs: f64) -> f64 { + intrinsics::fdiv_algebraic(self, rhs) + } + + /// Float remainder that allows optimizations based on algebraic rules. + /// + /// See [algebraic operators](primitive@f32#algebraic-operators) for more info. + #[must_use = "method returns a new number and does not mutate the original value"] + #[unstable(feature = "float_algebraic", issue = "136469")] + #[rustc_const_unstable(feature = "float_algebraic", issue = "136469")] + #[inline] + pub const fn algebraic_rem(self, rhs: f64) -> f64 { + intrinsics::frem_algebraic(self, rhs) + } } diff --git a/library/core/src/num/flt2dec/strategy/grisu.rs b/library/core/src/num/flt2dec/strategy/grisu.rs index 2816de4c63339..d3bbb0934e0ff 100644 --- a/library/core/src/num/flt2dec/strategy/grisu.rs +++ b/library/core/src/num/flt2dec/strategy/grisu.rs @@ -196,9 +196,9 @@ pub fn format_shortest_opt<'a>( let (minusk, cached) = cached_power(ALPHA - plus.e - 64, GAMMA - plus.e - 64); // scale fps. this gives the maximal error of 1 ulp (proved from Theorem 5.1). - let plus = plus.mul(&cached); - let minus = minus.mul(&cached); - let v = v.mul(&cached); + let plus = plus.mul(cached); + let minus = minus.mul(cached); + let v = v.mul(cached); debug_assert_eq!(plus.e, minus.e); debug_assert_eq!(plus.e, v.e); @@ -480,7 +480,7 @@ pub fn format_exact_opt<'a>( // normalize and scale `v`. let v = Fp { f: d.mant, e: d.exp }.normalize(); let (minusk, cached) = cached_power(ALPHA - v.e - 64, GAMMA - v.e - 64); - let v = v.mul(&cached); + let v = v.mul(cached); // divide `v` into integral and fractional parts. let e = -v.e as usize; diff --git a/library/core/src/num/int_macros.rs b/library/core/src/num/int_macros.rs index a72ca4bcb059b..8d31a7b697a80 100644 --- a/library/core/src/num/int_macros.rs +++ b/library/core/src/num/int_macros.rs @@ -244,8 +244,8 @@ macro_rules! int_impl { /// #[doc = concat!("assert_eq!(n.cast_unsigned(), ", stringify!($UnsignedT), "::MAX);")] /// ``` - #[stable(feature = "integer_sign_cast", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "integer_sign_cast", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "integer_sign_cast", since = "1.87.0")] + #[rustc_const_stable(feature = "integer_sign_cast", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] @@ -1355,8 +1355,8 @@ macro_rules! int_impl { #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".unbounded_shl(4), 0x10);")] #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".unbounded_shl(129), 0);")] /// ``` - #[stable(feature = "unbounded_shifts", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "unbounded_shifts", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "unbounded_shifts", since = "1.87.0")] + #[rustc_const_stable(feature = "unbounded_shifts", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] @@ -1478,8 +1478,8 @@ macro_rules! int_impl { #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".unbounded_shr(129), 0);")] #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MIN.unbounded_shr(129), -1);")] /// ``` - #[stable(feature = "unbounded_shifts", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "unbounded_shifts", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "unbounded_shifts", since = "1.87.0")] + #[rustc_const_stable(feature = "unbounded_shifts", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] @@ -3571,10 +3571,7 @@ macro_rules! int_impl { // so delegate it to `Ord` which is already producing -1/0/+1 // exactly like we need and can be the place to deal with the complexity. - // FIXME(const-hack): replace with cmp - if self < 0 { -1 } - else if self == 0 { 0 } - else { 1 } + crate::intrinsics::three_way_compare(self, 0) as Self } /// Returns `true` if `self` is positive and `false` if the number is zero or @@ -3678,6 +3675,7 @@ macro_rules! int_impl { /// ``` #[stable(feature = "int_to_from_bytes", since = "1.32.0")] #[rustc_const_stable(feature = "const_int_conversion", since = "1.44.0")] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] // SAFETY: const sound because integers are plain old datatypes so we can always // transmute them to arrays of bytes #[must_use = "this returns the result of the operation, \ @@ -3781,6 +3779,7 @@ macro_rules! int_impl { /// ``` #[stable(feature = "int_to_from_bytes", since = "1.32.0")] #[rustc_const_stable(feature = "const_int_conversion", since = "1.44.0")] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] #[must_use] // SAFETY: const sound because integers are plain old datatypes so we can always // transmute to them diff --git a/library/core/src/num/mod.rs b/library/core/src/num/mod.rs index 151e128cd78a9..ecc1c7bf9021d 100644 --- a/library/core/src/num/mod.rs +++ b/library/core/src/num/mod.rs @@ -99,8 +99,8 @@ macro_rules! i8_xe_bytes_doc { **Note**: This function is meaningless on `i8`. Byte order does not exist as a concept for byte-sized integers. This function is only provided in symmetry -with larger integer types. You can cast from and to `u8` using `as i8` and `as -u8`. +with larger integer types. You can cast from and to `u8` using +[`cast_signed`](u8::cast_signed) and [`cast_unsigned`](Self::cast_unsigned). " }; @@ -130,7 +130,7 @@ depending on the target pointer size. macro_rules! midpoint_impl { ($SelfT:ty, unsigned) => { - /// Calculates the middle point of `self` and `rhs`. + /// Calculates the midpoint (average) between `self` and `rhs`. /// /// `midpoint(a, b)` is `(a + b) / 2` as if it were performed in a /// sufficiently-large unsigned integral type. This implies that the result is @@ -146,6 +146,8 @@ macro_rules! midpoint_impl { #[rustc_const_stable(feature = "num_midpoint", since = "1.85.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] + #[doc(alias = "average_floor")] + #[doc(alias = "average")] #[inline] pub const fn midpoint(self, rhs: $SelfT) -> $SelfT { // Use the well known branchless algorithm from Hacker's Delight to compute @@ -154,7 +156,7 @@ macro_rules! midpoint_impl { } }; ($SelfT:ty, signed) => { - /// Calculates the middle point of `self` and `rhs`. + /// Calculates the midpoint (average) between `self` and `rhs`. /// /// `midpoint(a, b)` is `(a + b) / 2` as if it were performed in a /// sufficiently-large signed integral type. This implies that the result is @@ -169,10 +171,13 @@ macro_rules! midpoint_impl { #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".midpoint(-7), -3);")] #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".midpoint(7), 3);")] /// ``` - #[stable(feature = "num_midpoint_signed", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "num_midpoint_signed", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "num_midpoint_signed", since = "1.87.0")] + #[rustc_const_stable(feature = "num_midpoint_signed", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] + #[doc(alias = "average_floor")] + #[doc(alias = "average_ceil")] + #[doc(alias = "average")] #[inline] pub const fn midpoint(self, rhs: Self) -> Self { // Use the well known branchless algorithm from Hacker's Delight to compute @@ -184,7 +189,7 @@ macro_rules! midpoint_impl { } }; ($SelfT:ty, $WideT:ty, unsigned) => { - /// Calculates the middle point of `self` and `rhs`. + /// Calculates the midpoint (average) between `self` and `rhs`. /// /// `midpoint(a, b)` is `(a + b) / 2` as if it were performed in a /// sufficiently-large unsigned integral type. This implies that the result is @@ -200,13 +205,15 @@ macro_rules! midpoint_impl { #[rustc_const_stable(feature = "num_midpoint", since = "1.85.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] + #[doc(alias = "average_floor")] + #[doc(alias = "average")] #[inline] pub const fn midpoint(self, rhs: $SelfT) -> $SelfT { ((self as $WideT + rhs as $WideT) / 2) as $SelfT } }; ($SelfT:ty, $WideT:ty, signed) => { - /// Calculates the middle point of `self` and `rhs`. + /// Calculates the midpoint (average) between `self` and `rhs`. /// /// `midpoint(a, b)` is `(a + b) / 2` as if it were performed in a /// sufficiently-large signed integral type. This implies that the result is @@ -221,10 +228,13 @@ macro_rules! midpoint_impl { #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".midpoint(-7), -3);")] #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".midpoint(7), 3);")] /// ``` - #[stable(feature = "num_midpoint_signed", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "num_midpoint_signed", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "num_midpoint_signed", since = "1.87.0")] + #[rustc_const_stable(feature = "num_midpoint_signed", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] + #[doc(alias = "average_floor")] + #[doc(alias = "average_ceil")] + #[doc(alias = "average")] #[inline] pub const fn midpoint(self, rhs: $SelfT) -> $SelfT { ((self as $WideT + rhs as $WideT) / 2) as $SelfT diff --git a/library/core/src/num/nonzero.rs b/library/core/src/num/nonzero.rs index 7585ec140e31e..8a8b2733d5e88 100644 --- a/library/core/src/num/nonzero.rs +++ b/library/core/src/num/nonzero.rs @@ -1589,7 +1589,7 @@ macro_rules! nonzero_integer_signedness_dependent_methods { super::int_log10::$Int(self.get()) } - /// Calculates the middle point of `self` and `rhs`. + /// Calculates the midpoint (average) between `self` and `rhs`. /// /// `midpoint(a, b)` is `(a + b) >> 1` as if it were performed in a /// sufficiently-large signed integral type. This implies that the result is @@ -1615,6 +1615,8 @@ macro_rules! nonzero_integer_signedness_dependent_methods { #[rustc_const_stable(feature = "num_midpoint", since = "1.85.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] + #[doc(alias = "average_floor")] + #[doc(alias = "average")] #[inline] pub const fn midpoint(self, rhs: Self) -> Self { // SAFETY: The only way to get `0` with midpoint is to have two opposite or @@ -1704,8 +1706,8 @@ macro_rules! nonzero_integer_signedness_dependent_methods { /// #[doc = concat!("assert_eq!(n.cast_signed(), NonZero::new(-1", stringify!($Sint), ").unwrap());")] /// ``` - #[stable(feature = "integer_sign_cast", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "integer_sign_cast", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "integer_sign_cast", since = "1.87.0")] + #[rustc_const_stable(feature = "integer_sign_cast", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] @@ -2143,8 +2145,8 @@ macro_rules! nonzero_integer_signedness_dependent_methods { /// #[doc = concat!("assert_eq!(n.cast_unsigned(), NonZero::<", stringify!($Uint), ">::MAX);")] /// ``` - #[stable(feature = "integer_sign_cast", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "integer_sign_cast", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "integer_sign_cast", since = "1.87.0")] + #[rustc_const_stable(feature = "integer_sign_cast", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] diff --git a/library/core/src/num/uint_macros.rs b/library/core/src/num/uint_macros.rs index 586892758398b..bc6cb9508167d 100644 --- a/library/core/src/num/uint_macros.rs +++ b/library/core/src/num/uint_macros.rs @@ -273,8 +273,8 @@ macro_rules! uint_impl { /// #[doc = concat!("assert_eq!(n.cast_signed(), -1", stringify!($SignedT), ");")] /// ``` - #[stable(feature = "integer_sign_cast", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "integer_sign_cast", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "integer_sign_cast", since = "1.87.0")] + #[rustc_const_stable(feature = "integer_sign_cast", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] @@ -1616,8 +1616,8 @@ macro_rules! uint_impl { #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".unbounded_shl(4), 0x10);")] #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".unbounded_shl(129), 0);")] /// ``` - #[stable(feature = "unbounded_shifts", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "unbounded_shifts", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "unbounded_shifts", since = "1.87.0")] + #[rustc_const_stable(feature = "unbounded_shifts", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] @@ -1737,8 +1737,8 @@ macro_rules! uint_impl { #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".unbounded_shr(4), 0x1);")] #[doc = concat!("assert_eq!(0x10", stringify!($SelfT), ".unbounded_shr(129), 0);")] /// ``` - #[stable(feature = "unbounded_shifts", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "unbounded_shifts", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "unbounded_shifts", since = "1.87.0")] + #[rustc_const_stable(feature = "unbounded_shifts", since = "1.87.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] @@ -3331,8 +3331,8 @@ macro_rules! uint_impl { #[doc = concat!("assert!(0_", stringify!($SelfT), ".is_multiple_of(0));")] #[doc = concat!("assert!(!6_", stringify!($SelfT), ".is_multiple_of(0));")] /// ``` - #[stable(feature = "unsigned_is_multiple_of", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "unsigned_is_multiple_of", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "unsigned_is_multiple_of", since = "1.87.0")] + #[rustc_const_stable(feature = "unsigned_is_multiple_of", since = "1.87.0")] #[must_use] #[inline] #[rustc_inherit_overflow_checks] @@ -3343,7 +3343,7 @@ macro_rules! uint_impl { } } - /// Returns `true` if and only if `self == 2^k` for some `k`. + /// Returns `true` if and only if `self == 2^k` for some unsigned integer `k`. /// /// # Examples /// @@ -3523,6 +3523,7 @@ macro_rules! uint_impl { #[rustc_const_stable(feature = "const_int_conversion", since = "1.44.0")] #[must_use = "this returns the result of the operation, \ without modifying the original"] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] // SAFETY: const sound because integers are plain old datatypes so we can always // transmute them to arrays of bytes #[inline] @@ -3624,6 +3625,7 @@ macro_rules! uint_impl { /// ``` #[stable(feature = "int_to_from_bytes", since = "1.32.0")] #[rustc_const_stable(feature = "const_int_conversion", since = "1.44.0")] + #[cfg_attr(not(bootstrap), allow(unnecessary_transmutes))] #[must_use] // SAFETY: const sound because integers are plain old datatypes so we can always // transmute to them diff --git a/library/core/src/ops/drop.rs b/library/core/src/ops/drop.rs index e024b7fb4d301..5d040804a8d1c 100644 --- a/library/core/src/ops/drop.rs +++ b/library/core/src/ops/drop.rs @@ -240,10 +240,3 @@ pub trait Drop { #[stable(feature = "rust1", since = "1.0.0")] fn drop(&mut self); } - -/// Fallback function to call surface level `Drop::drop` function -#[allow(drop_bounds)] -#[lang = "fallback_surface_drop"] -pub(crate) fn fallback_surface_drop(x: &mut T) { - ::drop(x) -} diff --git a/library/core/src/ops/index.rs b/library/core/src/ops/index.rs index 37d9a28fb99c0..8106c088f0ba2 100644 --- a/library/core/src/ops/index.rs +++ b/library/core/src/ops/index.rs @@ -67,6 +67,7 @@ pub trait Index { /// /// May panic if the index is out of bounds. #[stable(feature = "rust1", since = "1.0.0")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] #[track_caller] fn index(&self, index: Idx) -> &Self::Output; } @@ -171,6 +172,7 @@ pub trait IndexMut: Index { /// /// May panic if the index is out of bounds. #[stable(feature = "rust1", since = "1.0.0")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] #[track_caller] fn index_mut(&mut self, index: Idx) -> &mut Self::Output; } diff --git a/library/core/src/ops/index_range.rs b/library/core/src/ops/index_range.rs index b82184b15b2f5..c645c996eb707 100644 --- a/library/core/src/ops/index_range.rs +++ b/library/core/src/ops/index_range.rs @@ -1,5 +1,6 @@ use crate::iter::{FusedIterator, TrustedLen}; use crate::num::NonZero; +use crate::ops::{NeverShortCircuit, Try}; use crate::ub_checks; /// Like a `Range`, but with a safety invariant that `start <= end`. @@ -112,6 +113,12 @@ impl IndexRange { self.end = mid; suffix } + + #[inline] + fn assume_range(&self) { + // SAFETY: This is the type invariant + unsafe { crate::hint::assert_unchecked(self.start <= self.end) } + } } impl Iterator for IndexRange { @@ -138,6 +145,30 @@ impl Iterator for IndexRange { let taken = self.take_prefix(n); NonZero::new(n - taken.len()).map_or(Ok(()), Err) } + + #[inline] + fn fold B>(mut self, init: B, f: F) -> B { + self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0 + } + + #[inline] + fn try_fold(&mut self, mut accum: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, + { + // `Range` needs to check `start < end`, but thanks to our type invariant + // we can loop on the stricter `start != end`. + + self.assume_range(); + while self.start != self.end { + // SAFETY: We just checked that the range is non-empty + let i = unsafe { self.next_unchecked() }; + accum = f(accum, i)?; + } + try { accum } + } } impl DoubleEndedIterator for IndexRange { @@ -156,6 +187,30 @@ impl DoubleEndedIterator for IndexRange { let taken = self.take_suffix(n); NonZero::new(n - taken.len()).map_or(Ok(()), Err) } + + #[inline] + fn rfold B>(mut self, init: B, f: F) -> B { + self.try_rfold(init, NeverShortCircuit::wrap_mut_2(f)).0 + } + + #[inline] + fn try_rfold(&mut self, mut accum: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try, + { + // `Range` needs to check `start < end`, but thanks to our type invariant + // we can loop on the stricter `start != end`. + + self.assume_range(); + while self.start != self.end { + // SAFETY: We just checked that the range is non-empty + let i = unsafe { self.next_back_unchecked() }; + accum = f(accum, i)?; + } + try { accum } + } } impl ExactSizeIterator for IndexRange { diff --git a/library/core/src/ops/mod.rs b/library/core/src/ops/mod.rs index 627a875d9f724..1658f0e5a3692 100644 --- a/library/core/src/ops/mod.rs +++ b/library/core/src/ops/mod.rs @@ -176,7 +176,6 @@ pub use self::deref::Receiver; pub use self::deref::{Deref, DerefMut}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::drop::Drop; -pub(crate) use self::drop::fallback_surface_drop; #[stable(feature = "rust1", since = "1.0.0")] pub use self::function::{Fn, FnMut, FnOnce}; #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/core/src/option.rs b/library/core/src/option.rs index 7ec0ac7127142..aed5a043c11a3 100644 --- a/library/core/src/option.rs +++ b/library/core/src/option.rs @@ -162,8 +162,14 @@ //! The [`is_some`] and [`is_none`] methods return [`true`] if the [`Option`] //! is [`Some`] or [`None`], respectively. //! +//! The [`is_some_and`] and [`is_none_or`] methods apply the provided function +//! to the contents of the [`Option`] to produce a boolean value. +//! If this is [`None`] then a default result is returned instead without executing the function. +//! //! [`is_none`]: Option::is_none //! [`is_some`]: Option::is_some +//! [`is_some_and`]: Option::is_some_and +//! [`is_none_or`]: Option::is_none_or //! //! ## Adapters for working with references //! @@ -177,6 +183,10 @@ //! [Option]<[Pin]<[&]T>> //! * [`as_pin_mut`] converts from [Pin]<[&mut] [Option]\> to //! [Option]<[Pin]<[&mut] T>> +//! * [`as_slice`] returns a one-element slice of the contained value, if any. +//! If this is [`None`], an empty slice is returned. +//! * [`as_mut_slice`] returns a mutable one-element slice of the contained value, if any. +//! If this is [`None`], an empty slice is returned. //! //! [&]: reference "shared reference" //! [&mut]: reference "mutable reference" @@ -187,6 +197,8 @@ //! [`as_pin_mut`]: Option::as_pin_mut //! [`as_pin_ref`]: Option::as_pin_ref //! [`as_ref`]: Option::as_ref +//! [`as_slice`]: Option::as_slice +//! [`as_mut_slice`]: Option::as_mut_slice //! //! ## Extracting the contained value //! @@ -200,12 +212,15 @@ //! (which must implement the [`Default`] trait) //! * [`unwrap_or_else`] returns the result of evaluating the provided //! function +//! * [`unwrap_unchecked`] produces *[undefined behavior]* //! //! [`expect`]: Option::expect //! [`unwrap`]: Option::unwrap //! [`unwrap_or`]: Option::unwrap_or //! [`unwrap_or_default`]: Option::unwrap_or_default //! [`unwrap_or_else`]: Option::unwrap_or_else +//! [`unwrap_unchecked`]: Option::unwrap_unchecked +//! [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html //! //! ## Transforming contained values //! @@ -230,8 +245,9 @@ //! * [`filter`] calls the provided predicate function on the contained //! value `t` if the [`Option`] is [`Some(t)`], and returns [`Some(t)`] //! if the function returns `true`; otherwise, returns [`None`] -//! * [`flatten`] removes one level of nesting from an -//! [`Option>`] +//! * [`flatten`] removes one level of nesting from an [`Option>`] +//! * [`inspect`] method takes ownership of the [`Option`] and applies +//! the provided function to the contained value by reference if [`Some`] //! * [`map`] transforms [`Option`] to [`Option`] by applying the //! provided function to the contained value of [`Some`] and leaving //! [`None`] values unchanged @@ -239,6 +255,7 @@ //! [`Some(t)`]: Some //! [`filter`]: Option::filter //! [`flatten`]: Option::flatten +//! [`inspect`]: Option::inspect //! [`map`]: Option::map //! //! These methods transform [`Option`] to a value of a possibly @@ -621,6 +638,10 @@ impl Option { /// /// let x: Option = None; /// assert_eq!(x.is_some_and(|x| x > 1), false); + /// + /// let x: Option = Some("ownership".to_string()); + /// assert_eq!(x.as_ref().is_some_and(|x| x.len() > 1), true); + /// println!("still alive {:?}", x); /// ``` #[must_use] #[inline] @@ -665,6 +686,10 @@ impl Option { /// /// let x: Option = None; /// assert_eq!(x.is_none_or(|x| x > 1), true); + /// + /// let x: Option = Some("ownership".to_string()); + /// assert_eq!(x.as_ref().is_none_or(|x| x.len() > 1), true); + /// println!("still alive {:?}", x); /// ``` #[must_use] #[inline] diff --git a/library/core/src/panic.rs b/library/core/src/panic.rs index 5fa340a6147f6..df41d5d3fadde 100644 --- a/library/core/src/panic.rs +++ b/library/core/src/panic.rs @@ -16,6 +16,14 @@ pub use self::panic_info::PanicMessage; pub use self::unwind_safe::{AssertUnwindSafe, RefUnwindSafe, UnwindSafe}; use crate::any::Any; +/// Core expects some crate to provide a function annotated with `#[panic_handler]` with this +/// signature. This annotated function will be called when a panic occurs. +#[stable(feature = "panic_hooks", since = "1.10.0")] +#[cfg(not(bootstrap))] +#[eii(panic_handler)] +#[lang = "panic_impl"] +pub(crate) fn panic_impl(info: &PanicInfo<'_>) -> !; + #[doc(hidden)] #[unstable(feature = "edition_panic", issue = "none", reason = "use panic!() instead")] #[allow_internal_unstable(panic_internals, const_format_args)] diff --git a/library/core/src/panicking.rs b/library/core/src/panicking.rs index 33ad59916e391..52c6b0d957748 100644 --- a/library/core/src/panicking.rs +++ b/library/core/src/panicking.rs @@ -56,23 +56,36 @@ pub const fn panic_fmt(fmt: fmt::Arguments<'_>) -> ! { if cfg!(feature = "panic_immediate_abort") { super::intrinsics::abort() } + #[cfg(bootstrap)] + { + // NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call + // that gets resolved to the `#[panic_handler]` function. + unsafe extern "Rust" { + #[lang = "panic_impl"] + fn panic_impl(pi: &PanicInfo<'_>) -> !; + } - // NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call - // that gets resolved to the `#[panic_handler]` function. - unsafe extern "Rust" { - #[lang = "panic_impl"] - fn panic_impl(pi: &PanicInfo<'_>) -> !; - } - - let pi = PanicInfo::new( - &fmt, - Location::caller(), - /* can_unwind */ true, - /* force_no_backtrace */ false, - ); + let pi = PanicInfo::new( + &fmt, + Location::caller(), + /* can_unwind */ true, + /* force_no_backtrace */ false, + ); - // SAFETY: `panic_impl` is defined in safe Rust code and thus is safe to call. - unsafe { panic_impl(&pi) } + // SAFETY: `panic_impl` is defined in safe Rust code and thus is safe to call. + unsafe { panic_impl(&pi) } + } + #[cfg(not(bootstrap))] + { + let pi = PanicInfo::new( + &fmt, + Location::caller(), + /* can_unwind */ true, + /* force_no_backtrace */ false, + ); + + crate::panic::panic_impl(&pi) + } } /// Like `panic_fmt`, but for non-unwinding panics. @@ -98,23 +111,39 @@ pub const fn panic_nounwind_fmt(fmt: fmt::Arguments<'_>, force_no_backtrace: boo super::intrinsics::abort() } - // NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call - // that gets resolved to the `#[panic_handler]` function. - unsafe extern "Rust" { - #[lang = "panic_impl"] - fn panic_impl(pi: &PanicInfo<'_>) -> !; - } + #[cfg(bootstrap)] + { + // NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call + // that gets resolved to the `#[panic_handler]` function. + unsafe extern "Rust" { + #[lang = "panic_impl"] + fn panic_impl(pi: &PanicInfo<'_>) -> !; + } - // PanicInfo with the `can_unwind` flag set to false forces an abort. - let pi = PanicInfo::new( - &fmt, - Location::caller(), - /* can_unwind */ false, - force_no_backtrace, - ); + // PanicInfo with the `can_unwind` flag set to false forces an abort. + let pi = PanicInfo::new( + &fmt, + Location::caller(), + /* can_unwind */ false, + force_no_backtrace, + ); - // SAFETY: `panic_impl` is defined in safe Rust code and thus is safe to call. - unsafe { panic_impl(&pi) } + // SAFETY: `panic_impl` is defined in safe Rust code and thus is safe to call. + unsafe { panic_impl(&pi) } + } + + #[cfg(not(bootstrap))] + { + // PanicInfo with the `can_unwind` flag set to false forces an abort. + let pi = PanicInfo::new( + &fmt, + Location::caller(), + /* can_unwind */ false, + force_no_backtrace, + ); + + crate::panic::panic_impl(&pi) + } } ) } @@ -155,30 +184,26 @@ pub const fn panic(expr: &'static str) -> ! { // reducing binary size impact. macro_rules! panic_const { ($($lang:ident = $message:expr,)+) => { - pub mod panic_const { - use super::*; - - $( - /// This is a panic called with a message that's a result of a MIR-produced Assert. - // - // never inline unless panic_immediate_abort to avoid code - // bloat at the call sites as much as possible - #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)] - #[cfg_attr(feature = "panic_immediate_abort", inline)] - #[track_caller] - #[rustc_const_stable_indirect] // must follow stable const rules since it is exposed to stable - #[lang = stringify!($lang)] - pub const fn $lang() -> ! { - // Use Arguments::new_const instead of format_args!("{expr}") to potentially - // reduce size overhead. The format_args! macro uses str's Display trait to - // write expr, which calls Formatter::pad, which must accommodate string - // truncation and padding (even though none is used here). Using - // Arguments::new_const may allow the compiler to omit Formatter::pad from the - // output binary, saving up to a few kilobytes. - panic_fmt(fmt::Arguments::new_const(&[$message])); - } - )+ - } + $( + /// This is a panic called with a message that's a result of a MIR-produced Assert. + // + // never inline unless panic_immediate_abort to avoid code + // bloat at the call sites as much as possible + #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)] + #[cfg_attr(feature = "panic_immediate_abort", inline)] + #[track_caller] + #[rustc_const_stable_indirect] // must follow stable const rules since it is exposed to stable + #[lang = stringify!($lang)] + pub const fn $lang() -> ! { + // Use Arguments::new_const instead of format_args!("{expr}") to potentially + // reduce size overhead. The format_args! macro uses str's Display trait to + // write expr, which calls Formatter::pad, which must accommodate string + // truncation and padding (even though none is used here). Using + // Arguments::new_const may allow the compiler to omit Formatter::pad from the + // output binary, saving up to a few kilobytes. + panic_fmt(fmt::Arguments::new_const(&[$message])); + } + )+ } } @@ -186,25 +211,37 @@ macro_rules! panic_const { // slightly different forms. It's not clear if there's a good way to deduplicate without adding // special cases to the compiler (e.g., a const generic function wouldn't have a single definition // shared across crates, which is exactly what we want here). -panic_const! { - panic_const_add_overflow = "attempt to add with overflow", - panic_const_sub_overflow = "attempt to subtract with overflow", - panic_const_mul_overflow = "attempt to multiply with overflow", - panic_const_div_overflow = "attempt to divide with overflow", - panic_const_rem_overflow = "attempt to calculate the remainder with overflow", - panic_const_neg_overflow = "attempt to negate with overflow", - panic_const_shr_overflow = "attempt to shift right with overflow", - panic_const_shl_overflow = "attempt to shift left with overflow", - panic_const_div_by_zero = "attempt to divide by zero", - panic_const_rem_by_zero = "attempt to calculate the remainder with a divisor of zero", - panic_const_coroutine_resumed = "coroutine resumed after completion", - panic_const_async_fn_resumed = "`async fn` resumed after completion", - panic_const_async_gen_fn_resumed = "`async gen fn` resumed after completion", - panic_const_gen_fn_none = "`gen fn` should just keep returning `None` after completion", - panic_const_coroutine_resumed_panic = "coroutine resumed after panicking", - panic_const_async_fn_resumed_panic = "`async fn` resumed after panicking", - panic_const_async_gen_fn_resumed_panic = "`async gen fn` resumed after panicking", - panic_const_gen_fn_none_panic = "`gen fn` should just keep returning `None` after panicking", +pub mod panic_const { + use super::*; + panic_const! { + panic_const_add_overflow = "attempt to add with overflow", + panic_const_sub_overflow = "attempt to subtract with overflow", + panic_const_mul_overflow = "attempt to multiply with overflow", + panic_const_div_overflow = "attempt to divide with overflow", + panic_const_rem_overflow = "attempt to calculate the remainder with overflow", + panic_const_neg_overflow = "attempt to negate with overflow", + panic_const_shr_overflow = "attempt to shift right with overflow", + panic_const_shl_overflow = "attempt to shift left with overflow", + panic_const_div_by_zero = "attempt to divide by zero", + panic_const_rem_by_zero = "attempt to calculate the remainder with a divisor of zero", + panic_const_coroutine_resumed = "coroutine resumed after completion", + panic_const_async_fn_resumed = "`async fn` resumed after completion", + panic_const_async_gen_fn_resumed = "`async gen fn` resumed after completion", + panic_const_gen_fn_none = "`gen fn` should just keep returning `None` after completion", + panic_const_coroutine_resumed_panic = "coroutine resumed after panicking", + panic_const_async_fn_resumed_panic = "`async fn` resumed after panicking", + panic_const_async_gen_fn_resumed_panic = "`async gen fn` resumed after panicking", + panic_const_gen_fn_none_panic = "`gen fn` should just keep returning `None` after panicking", + } + // Separated panic constants list for async drop feature + // (May be joined when the corresponding lang items will be in the bootstrap) + #[cfg(not(bootstrap))] + panic_const! { + panic_const_coroutine_resumed_drop = "coroutine resumed after async drop", + panic_const_async_fn_resumed_drop = "`async fn` resumed after async drop", + panic_const_async_gen_fn_resumed_drop = "`async gen fn` resumed after async drop", + panic_const_gen_fn_none_drop = "`gen fn` resumed after async drop", + } } /// Like `panic`, but without unwinding and track_caller to reduce the impact on codesize on the caller. diff --git a/library/core/src/pat.rs b/library/core/src/pat.rs index f8826096df3e1..91d015b1bc53f 100644 --- a/library/core/src/pat.rs +++ b/library/core/src/pat.rs @@ -25,15 +25,15 @@ macro_rules! pattern_type { )] pub trait RangePattern { /// Trait version of the inherent `MIN` assoc const. - #[cfg_attr(not(bootstrap), lang = "RangeMin")] + #[lang = "RangeMin"] const MIN: Self; /// Trait version of the inherent `MIN` assoc const. - #[cfg_attr(not(bootstrap), lang = "RangeMax")] + #[lang = "RangeMax"] const MAX: Self; /// A compile-time helper to subtract 1 for exclusive ranges. - #[cfg_attr(not(bootstrap), lang = "RangeSub")] + #[lang = "RangeSub"] #[track_caller] fn sub_one(self) -> Self; } diff --git a/library/core/src/pin.rs b/library/core/src/pin.rs index bc097bf198d03..dd1c2f2c28513 100644 --- a/library/core/src/pin.rs +++ b/library/core/src/pin.rs @@ -676,7 +676,7 @@ //! let data_ptr = unpinned_src.data.as_ptr() as *const u8; //! let slice_ptr = unpinned_src.slice.as_ptr() as *const u8; //! let offset = slice_ptr.offset_from(data_ptr) as usize; -//! let len = (*unpinned_src.slice.as_ptr()).len(); +//! let len = unpinned_src.slice.as_ptr().len(); //! //! unpinned_self.slice = NonNull::from(&mut unpinned_self.data[offset..offset+len]); //! } @@ -931,6 +931,11 @@ use crate::{ }; use crate::{cmp, fmt}; +mod unsafe_pinned; + +#[unstable(feature = "unsafe_pinned", issue = "125735")] +pub use self::unsafe_pinned::UnsafePinned; + /// A pointer which pins its pointee in place. /// /// [`Pin`] is a wrapper around some kind of pointer `Ptr` which makes that pointer "pin" its @@ -1087,24 +1092,15 @@ use crate::{cmp, fmt}; #[rustc_pub_transparent] #[derive(Copy, Clone)] pub struct Pin { - // FIXME(#93176): this field is made `#[unstable] #[doc(hidden)] pub` to: - // - deter downstream users from accessing it (which would be unsound!), - // - let the `pin!` macro access it (such a macro requires using struct - // literal syntax in order to benefit from lifetime extension). - // - // However, if the `Deref` impl exposes a field with the same name as this - // field, then the two will collide, resulting in a confusing error when the - // user attempts to access the field through a `Pin`. Therefore, the - // name `__pointer` is designed to be unlikely to collide with any other - // field. Long-term, macro hygiene is expected to offer a more robust - // alternative, alongside `unsafe` fields. - #[unstable(feature = "unsafe_pin_internals", issue = "none")] - #[doc(hidden)] - pub __pointer: Ptr, + /// Only public for bootstrap. + #[cfg(bootstrap)] + pub pointer: Ptr, + #[cfg(not(bootstrap))] + pointer: Ptr, } // The following implementations aren't derived in order to avoid soundness -// issues. `&self.__pointer` should not be accessible to untrusted trait +// issues. `&self.pointer` should not be accessible to untrusted trait // implementations. // // See for more details. @@ -1218,7 +1214,7 @@ impl> Pin { #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin_into_inner", since = "1.39.0")] pub const fn into_inner(pin: Pin) -> Ptr { - pin.__pointer + pin.pointer } } @@ -1355,7 +1351,7 @@ impl Pin { #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin", since = "1.33.0")] pub const unsafe fn new_unchecked(pointer: Ptr) -> Pin { - Pin { __pointer: pointer } + Pin { pointer } } /// Gets a shared reference to the pinned value this [`Pin`] points to. @@ -1369,7 +1365,7 @@ impl Pin { #[inline(always)] pub fn as_ref(&self) -> Pin<&Ptr::Target> { // SAFETY: see documentation on this function - unsafe { Pin::new_unchecked(&*self.__pointer) } + unsafe { Pin::new_unchecked(&*self.pointer) } } } @@ -1413,7 +1409,7 @@ impl Pin { #[inline(always)] pub fn as_mut(&mut self) -> Pin<&mut Ptr::Target> { // SAFETY: see documentation on this function - unsafe { Pin::new_unchecked(&mut *self.__pointer) } + unsafe { Pin::new_unchecked(&mut *self.pointer) } } /// Gets `Pin<&mut T>` to the underlying pinned value from this nested `Pin`-pointer. @@ -1480,7 +1476,7 @@ impl Pin { where Ptr::Target: Sized, { - *(self.__pointer) = value; + *(self.pointer) = value; } } @@ -1508,7 +1504,7 @@ impl Pin { #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin_into_inner", since = "1.39.0")] pub const unsafe fn into_inner_unchecked(pin: Pin) -> Ptr { - pin.__pointer + pin.pointer } } @@ -1534,7 +1530,7 @@ impl<'a, T: ?Sized> Pin<&'a T> { U: ?Sized, F: FnOnce(&T) -> &U, { - let pointer = &*self.__pointer; + let pointer = &*self.pointer; let new_pointer = func(pointer); // SAFETY: the safety contract for `new_unchecked` must be @@ -1564,7 +1560,7 @@ impl<'a, T: ?Sized> Pin<&'a T> { #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin", since = "1.33.0")] pub const fn get_ref(self) -> &'a T { - self.__pointer + self.pointer } } @@ -1575,7 +1571,7 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] #[stable(feature = "pin", since = "1.33.0")] pub const fn into_ref(self) -> Pin<&'a T> { - Pin { __pointer: self.__pointer } + Pin { pointer: self.pointer } } /// Gets a mutable reference to the data inside of this `Pin`. @@ -1595,7 +1591,7 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { where T: Unpin, { - self.__pointer + self.pointer } /// Gets a mutable reference to the data inside of this `Pin`. @@ -1613,7 +1609,7 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { #[stable(feature = "pin", since = "1.33.0")] #[rustc_const_stable(feature = "const_pin", since = "1.84.0")] pub const unsafe fn get_unchecked_mut(self) -> &'a mut T { - self.__pointer + self.pointer } /// Constructs a new pin by mapping the interior value. @@ -1700,21 +1696,21 @@ impl LegacyReceiver for Pin {} #[stable(feature = "pin", since = "1.33.0")] impl fmt::Debug for Pin { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&self.__pointer, f) + fmt::Debug::fmt(&self.pointer, f) } } #[stable(feature = "pin", since = "1.33.0")] impl fmt::Display for Pin { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&self.__pointer, f) + fmt::Display::fmt(&self.pointer, f) } } #[stable(feature = "pin", since = "1.33.0")] impl fmt::Pointer for Pin { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Pointer::fmt(&self.__pointer, f) + fmt::Pointer::fmt(&self.pointer, f) } } @@ -1940,80 +1936,22 @@ unsafe impl PinCoerceUnsized for *mut T {} /// constructor. /// /// [`Box::pin`]: ../../std/boxed/struct.Box.html#method.pin +#[cfg(not(bootstrap))] +#[stable(feature = "pin_macro", since = "1.68.0")] +#[rustc_macro_transparency = "semitransparent"] +#[allow_internal_unstable(super_let)] +pub macro pin($value:expr $(,)?) { + { + super let mut pinned = $value; + // SAFETY: The value is pinned: it is the local above which cannot be named outside this macro. + unsafe { $crate::pin::Pin::new_unchecked(&mut pinned) } + } +} + +/// Only for bootstrap. +#[cfg(bootstrap)] #[stable(feature = "pin_macro", since = "1.68.0")] #[rustc_macro_transparency = "semitransparent"] -#[allow_internal_unstable(unsafe_pin_internals)] -#[cfg_attr(not(bootstrap), rustc_macro_edition_2021)] pub macro pin($value:expr $(,)?) { - // This is `Pin::new_unchecked(&mut { $value })`, so, for starters, let's - // review such a hypothetical macro (that any user-code could define): - // - // ```rust - // macro_rules! pin {( $value:expr ) => ( - // match &mut { $value } { at_value => unsafe { // Do not wrap `$value` in an `unsafe` block. - // $crate::pin::Pin::<&mut _>::new_unchecked(at_value) - // }} - // )} - // ``` - // - // Safety: - // - `type P = &mut _`. There are thus no pathological `Deref{,Mut}` impls - // that would break `Pin`'s invariants. - // - `{ $value }` is braced, making it a _block expression_, thus **moving** - // the given `$value`, and making it _become an **anonymous** temporary_. - // By virtue of being anonymous, it can no longer be accessed, thus - // preventing any attempts to `mem::replace` it or `mem::forget` it, _etc._ - // - // This gives us a `pin!` definition that is sound, and which works, but only - // in certain scenarios: - // - If the `pin!(value)` expression is _directly_ fed to a function call: - // `let poll = pin!(fut).poll(cx);` - // - If the `pin!(value)` expression is part of a scrutinee: - // ```rust - // match pin!(fut) { pinned_fut => { - // pinned_fut.as_mut().poll(...); - // pinned_fut.as_mut().poll(...); - // }} // <- `fut` is dropped here. - // ``` - // Alas, it doesn't work for the more straight-forward use-case: `let` bindings. - // ```rust - // let pinned_fut = pin!(fut); // <- temporary value is freed at the end of this statement - // pinned_fut.poll(...) // error[E0716]: temporary value dropped while borrowed - // // note: consider using a `let` binding to create a longer lived value - // ``` - // - Issues such as this one are the ones motivating https://github.com/rust-lang/rfcs/pull/66 - // - // This makes such a macro incredibly unergonomic in practice, and the reason most macros - // out there had to take the path of being a statement/binding macro (_e.g._, `pin!(future);`) - // instead of featuring the more intuitive ergonomics of an expression macro. - // - // Luckily, there is a way to avoid the problem. Indeed, the problem stems from the fact that a - // temporary is dropped at the end of its enclosing statement when it is part of the parameters - // given to function call, which has precisely been the case with our `Pin::new_unchecked()`! - // For instance, - // ```rust - // let p = Pin::new_unchecked(&mut ); - // ``` - // becomes: - // ```rust - // let p = { let mut anon = ; &mut anon }; - // ``` - // - // However, when using a literal braced struct to construct the value, references to temporaries - // can then be taken. This makes Rust change the lifespan of such temporaries so that they are, - // instead, dropped _at the end of the enscoping block_. - // For instance, - // ```rust - // let p = Pin { __pointer: &mut }; - // ``` - // becomes: - // ```rust - // let mut anon = ; - // let p = Pin { __pointer: &mut anon }; - // ``` - // which is *exactly* what we want. - // - // See https://doc.rust-lang.org/1.58.1/reference/destructors.html#temporary-lifetime-extension - // for more info. - $crate::pin::Pin::<&mut _> { __pointer: &mut { $value } } + $crate::pin::Pin::<&mut _> { pointer: &mut { $value } } } diff --git a/library/core/src/pin/unsafe_pinned.rs b/library/core/src/pin/unsafe_pinned.rs new file mode 100644 index 0000000000000..5fb628c8adbc5 --- /dev/null +++ b/library/core/src/pin/unsafe_pinned.rs @@ -0,0 +1,197 @@ +use crate::marker::{PointerLike, Unpin}; +use crate::ops::{CoerceUnsized, DispatchFromDyn}; +use crate::pin::Pin; +use crate::{fmt, ptr}; + +/// This type provides a way to opt-out of typical aliasing rules; +/// specifically, `&mut UnsafePinned` is not guaranteed to be a unique pointer. +/// +/// However, even if you define your type like `pub struct Wrapper(UnsafePinned<...>)`, it is still +/// very risky to have an `&mut Wrapper` that aliases anything else. Many functions that work +/// generically on `&mut T` assume that the memory that stores `T` is uniquely owned (such as +/// `mem::swap`). In other words, while having aliasing with `&mut Wrapper` is not immediate +/// Undefined Behavior, it is still unsound to expose such a mutable reference to code you do not +/// control! Techniques such as pinning via [`Pin`] are needed to ensure soundness. +/// +/// Similar to [`UnsafeCell`](crate::cell::UnsafeCell), `UnsafePinned` will not usually show up in +/// the public API of a library. It is an internal implementation detail of libraries that need to +/// support aliasing mutable references. +/// +/// Further note that this does *not* lift the requirement that shared references must be read-only! +/// Use `UnsafeCell` for that. +/// +/// This type blocks niches the same way `UnsafeCell` does. +#[cfg_attr(not(bootstrap), lang = "unsafe_pinned")] +#[repr(transparent)] +#[unstable(feature = "unsafe_pinned", issue = "125735")] +pub struct UnsafePinned { + value: T, +} + +/// When this type is used, that almost certainly means safe APIs need to use pinning to avoid the +/// aliases from becoming invalidated. Therefore let's mark this as `!Unpin`. You can always opt +/// back in to `Unpin` with an `impl` block, provided your API is still sound while unpinned. +#[unstable(feature = "unsafe_pinned", issue = "125735")] +impl !Unpin for UnsafePinned {} + +/// The type is `Copy` when `T` is to avoid people assuming that `Copy` implies there is no +/// `UnsafePinned` anywhere. (This is an issue with `UnsafeCell`: people use `Copy` bounds to mean +/// `Freeze`.) Given that there is no `unsafe impl Copy for ...`, this is also the option that +/// leaves the user more choices (as they can always wrap this in a `!Copy` type). +// FIXME(unsafe_pinned): this may be unsound or a footgun? +#[unstable(feature = "unsafe_pinned", issue = "125735")] +impl Copy for UnsafePinned {} + +#[unstable(feature = "unsafe_pinned", issue = "125735")] +impl Clone for UnsafePinned { + fn clone(&self) -> Self { + *self + } +} + +// `Send` and `Sync` are inherited from `T`. This is similar to `SyncUnsafeCell`, since +// we eventually concluded that `UnsafeCell` implicitly making things `!Sync` is sometimes +// unergonomic. A type that needs to be `!Send`/`!Sync` should really have an explicit +// opt-out itself, e.g. via an `PhantomData<*mut T>` or (one day) via `impl !Send`/`impl !Sync`. + +impl UnsafePinned { + /// Constructs a new instance of `UnsafePinned` which will wrap the specified value. + /// + /// All access to the inner value through `&UnsafePinned` or `&mut UnsafePinned` or + /// `Pin<&mut UnsafePinned>` requires `unsafe` code. + #[inline(always)] + #[must_use] + #[unstable(feature = "unsafe_pinned", issue = "125735")] + pub const fn new(value: T) -> Self { + UnsafePinned { value } + } + + /// Unwraps the value, consuming this `UnsafePinned`. + #[inline(always)] + #[must_use] + #[unstable(feature = "unsafe_pinned", issue = "125735")] + #[rustc_allow_const_fn_unstable(const_precise_live_drops)] + pub const fn into_inner(self) -> T { + self.value + } +} + +impl UnsafePinned { + /// Get read-write access to the contents of a pinned `UnsafePinned`. + #[inline(always)] + #[must_use] + #[unstable(feature = "unsafe_pinned", issue = "125735")] + pub const fn get_mut_pinned(self: Pin<&mut Self>) -> *mut T { + // SAFETY: we're not using `get_unchecked_mut` to unpin anything + unsafe { self.get_unchecked_mut() }.get_mut_unchecked() + } + + /// Get read-write access to the contents of an `UnsafePinned`. + /// + /// You should usually be using `get_mut_pinned` instead to explicitly track the fact that this + /// memory is "pinned" due to there being aliases. + #[inline(always)] + #[must_use] + #[unstable(feature = "unsafe_pinned", issue = "125735")] + pub const fn get_mut_unchecked(&mut self) -> *mut T { + ptr::from_mut(self) as *mut T + } + + /// Get read-only access to the contents of a shared `UnsafePinned`. + /// + /// Note that `&UnsafePinned` is read-only if `&T` is read-only. This means that if there is + /// mutation of the `T`, future reads from the `*const T` returned here are UB! Use + /// [`UnsafeCell`] if you also need interior mutability. + /// + /// [`UnsafeCell`]: crate::cell::UnsafeCell + /// + /// ```rust,no_run + /// #![feature(unsafe_pinned)] + /// use std::pin::UnsafePinned; + /// + /// unsafe { + /// let mut x = UnsafePinned::new(0); + /// let ptr = x.get(); // read-only pointer, assumes immutability + /// x.get_mut_unchecked().write(1); + /// ptr.read(); // UB! + /// } + /// ``` + #[inline(always)] + #[must_use] + #[unstable(feature = "unsafe_pinned", issue = "125735")] + pub const fn get(&self) -> *const T { + ptr::from_ref(self) as *const T + } + + /// Gets an immutable pointer to the wrapped value. + /// + /// The difference from [`get`] is that this function accepts a raw pointer, which is useful to + /// avoid the creation of temporary references. + /// + /// [`get`]: UnsafePinned::get + #[inline(always)] + #[must_use] + #[unstable(feature = "unsafe_pinned", issue = "125735")] + pub const fn raw_get(this: *const Self) -> *const T { + this as *const T + } + + /// Gets a mutable pointer to the wrapped value. + /// + /// The difference from [`get_mut_pinned`] and [`get_mut_unchecked`] is that this function + /// accepts a raw pointer, which is useful to avoid the creation of temporary references. + /// + /// [`get_mut_pinned`]: UnsafePinned::get_mut_pinned + /// [`get_mut_unchecked`]: UnsafePinned::get_mut_unchecked + #[inline(always)] + #[must_use] + #[unstable(feature = "unsafe_pinned", issue = "125735")] + pub const fn raw_get_mut(this: *mut Self) -> *mut T { + this as *mut T + } +} + +#[unstable(feature = "unsafe_pinned", issue = "125735")] +impl Default for UnsafePinned { + /// Creates an `UnsafePinned`, with the `Default` value for T. + fn default() -> Self { + UnsafePinned::new(T::default()) + } +} + +#[unstable(feature = "unsafe_pinned", issue = "125735")] +impl From for UnsafePinned { + /// Creates a new `UnsafePinned` containing the given value. + fn from(value: T) -> Self { + UnsafePinned::new(value) + } +} + +#[unstable(feature = "unsafe_pinned", issue = "125735")] +impl fmt::Debug for UnsafePinned { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("UnsafePinned").finish_non_exhaustive() + } +} + +#[unstable(feature = "coerce_unsized", issue = "18598")] +// #[unstable(feature = "unsafe_pinned", issue = "125735")] +impl, U> CoerceUnsized> for UnsafePinned {} + +// Allow types that wrap `UnsafePinned` to also implement `DispatchFromDyn` +// and become dyn-compatible method receivers. +// Note that currently `UnsafePinned` itself cannot be a method receiver +// because it does not implement Deref. +// In other words: +// `self: UnsafePinned<&Self>` won't work +// `self: UnsafePinned` becomes possible +// FIXME(unsafe_pinned) this logic is copied from UnsafeCell, is it still sound? +#[unstable(feature = "dispatch_from_dyn", issue = "none")] +// #[unstable(feature = "unsafe_pinned", issue = "125735")] +impl, U> DispatchFromDyn> for UnsafePinned {} + +#[unstable(feature = "pointer_like_trait", issue = "none")] +// #[unstable(feature = "unsafe_pinned", issue = "125735")] +impl PointerLike for UnsafePinned {} + +// FIXME(unsafe_pinned): impl PinCoerceUnsized for UnsafePinned? diff --git a/library/core/src/prelude/v1.rs b/library/core/src/prelude/v1.rs index c5975c0305031..dad7d16eb562d 100644 --- a/library/core/src/prelude/v1.rs +++ b/library/core/src/prelude/v1.rs @@ -59,6 +59,7 @@ pub use crate::hash::macros::Hash; #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow(deprecated)] +#[cfg_attr(bootstrap, allow(deprecated_in_future))] #[doc(no_inline)] pub use crate::{ assert, cfg, column, compile_error, concat, concat_idents, env, file, format_args, @@ -117,5 +118,12 @@ pub use crate::macros::builtin::deref; issue = "63063", reason = "`type_alias_impl_trait` has open design concerns" )] -#[cfg(not(bootstrap))] pub use crate::macros::builtin::define_opaque; + +#[unstable(feature = "eii", issue = "125418")] +#[cfg(not(bootstrap))] +pub use crate::macros::builtin::{eii, unsafe_eii}; + +#[unstable(feature = "eii_internals", issue = "none")] +#[cfg(not(bootstrap))] +pub use crate::macros::builtin::eii_macro_for; diff --git a/library/core/src/primitive_docs.rs b/library/core/src/primitive_docs.rs index 89c856fe10746..17c4b48836134 100644 --- a/library/core/src/primitive_docs.rs +++ b/library/core/src/primitive_docs.rs @@ -127,15 +127,13 @@ mod prim_bool {} /// [`Result`] which we can unpack like this: /// /// ``` -/// #![feature(exhaustive_patterns)] /// use std::str::FromStr; /// let Ok(s) = String::from_str("hello"); /// ``` /// -/// Since the [`Err`] variant contains a `!`, it can never occur. If the `exhaustive_patterns` -/// feature is present this means we can exhaustively match on [`Result`] by just taking the -/// [`Ok`] variant. This illustrates another behavior of `!` - it can be used to "delete" certain -/// enum variants from generic types like `Result`. +/// Since the [`Err`] variant contains a `!`, it can never occur. This means we can exhaustively +/// match on [`Result`] by just taking the [`Ok`] variant. This illustrates another behavior +/// of `!` - it can be used to "delete" certain enum variants from generic types like `Result`. /// /// ## Infinite loops /// @@ -1309,12 +1307,59 @@ mod prim_f16 {} // FIXME: Is there a better place to put this? /// /// | `target_arch` | Extra payloads possible on this platform | -/// |---------------|---------| -/// | `x86`, `x86_64`, `arm`, `aarch64`, `riscv32`, `riscv64` | None | +/// |---------------|------------------------------------------| +// Sorted alphabetically +/// | `aarch64`, `arm`, `arm64ec`, `loongarch64`, `powerpc` (except when `target_abi = "spe"`), `powerpc64`, `riscv32`, `riscv64`, `s390x`, `x86`, `x86_64` | None | +/// | `nvptx64` | All payloads | /// | `sparc`, `sparc64` | The all-one payload | -/// | `wasm32`, `wasm64` | If all input NaNs are quiet with all-zero payload: None.
Otherwise: all possible payloads. | +/// | `wasm32`, `wasm64` | If all input NaNs are quiet with all-zero payload: None.
Otherwise: all payloads. | /// /// For targets not in this table, all payloads are possible. +/// +/// # Algebraic operators +/// +/// Algebraic operators of the form `a.algebraic_*(b)` allow the compiler to optimize +/// floating point operations using all the usual algebraic properties of real numbers -- +/// despite the fact that those properties do *not* hold on floating point numbers. +/// This can give a great performance boost since it may unlock vectorization. +/// +/// The exact set of optimizations is unspecified but typically allows combining operations, +/// rearranging series of operations based on mathematical properties, converting between division +/// and reciprocal multiplication, and disregarding the sign of zero. This means that the results of +/// elementary operations may have undefined precision, and "non-mathematical" values +/// such as NaN, +/-Inf, or -0.0 may behave in unexpected ways, but these operations +/// will never cause undefined behavior. +/// +/// Because of the unpredictable nature of compiler optimizations, the same inputs may produce +/// different results even within a single program run. **Unsafe code must not rely on any property +/// of the return value for soundness.** However, implementations will generally do their best to +/// pick a reasonable tradeoff between performance and accuracy of the result. +/// +/// For example: +/// +/// ``` +/// # #![feature(float_algebraic)] +/// # #![allow(unused_assignments)] +/// # let mut x: f32 = 0.0; +/// # let a: f32 = 1.0; +/// # let b: f32 = 2.0; +/// # let c: f32 = 3.0; +/// # let d: f32 = 4.0; +/// x = a.algebraic_add(b).algebraic_add(c).algebraic_add(d); +/// ``` +/// +/// May be rewritten as: +/// +/// ``` +/// # #![allow(unused_assignments)] +/// # let mut x: f32 = 0.0; +/// # let a: f32 = 1.0; +/// # let b: f32 = 2.0; +/// # let c: f32 = 3.0; +/// # let d: f32 = 4.0; +/// x = a + b + c + d; // As written +/// x = (a + c) + (b + d); // Reordered to shorten critical path and enable vectorization +/// ``` #[stable(feature = "rust1", since = "1.0.0")] mod prim_f32 {} diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 71a84aff24606..5234fb83eb67c 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -484,8 +484,9 @@ impl *const T { /// /// This operation itself is always safe, but using the resulting pointer is not. /// - /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not - /// be used to read or write other allocated objects. + /// The resulting pointer "remembers" the [allocated object] that `self` points to + /// (this is called "[Provenance](ptr/index.html#provenance)"). + /// The pointer must not be used to read or write other allocated objects. /// /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z` /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still @@ -764,8 +765,8 @@ impl *const T { /// // This would be incorrect, as the pointers are not correctly ordered: /// // ptr1.offset_from_unsigned(ptr2) /// ``` - #[stable(feature = "ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "ptr_sub_ptr", since = "1.87.0")] + #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")] #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize @@ -804,17 +805,17 @@ impl *const T { /// units of **bytes**. /// /// This is purely a convenience for casting to a `u8` pointer and - /// using [`sub_ptr`][pointer::offset_from_unsigned] on it. See that method for - /// documentation and safety requirements. + /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it. + /// See that method for documentation and safety requirements. /// /// For non-`Sized` pointees this operation considers only the data pointers, /// ignoring the metadata. - #[stable(feature = "ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "ptr_sub_ptr", since = "1.87.0")] + #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")] #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn byte_offset_from_unsigned(self, origin: *const U) -> usize { - // SAFETY: the caller must uphold the safety contract for `sub_ptr`. + // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`. unsafe { self.cast::().offset_from_unsigned(origin.cast::()) } } @@ -1739,3 +1740,11 @@ impl PartialOrd for *const T { *self >= *other } } + +#[stable(feature = "raw_ptr_default", since = "CURRENT_RUSTC_VERSION")] +impl Default for *const T { + /// Returns the default value of [`null()`][crate::ptr::null]. + fn default() -> Self { + crate::ptr::null() + } +} diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index ea53da78d3bd2..aa103af93ffcb 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -278,7 +278,7 @@ //! ### Using Strict Provenance //! //! Most code needs no changes to conform to strict provenance, as the only really concerning -//! operation is casts from usize to a pointer. For code which *does* cast a `usize` to a pointer, +//! operation is casts from `usize` to a pointer. For code which *does* cast a `usize` to a pointer, //! the scope of the change depends on exactly what you're doing. //! //! In general, you just need to make sure that if you want to convert a `usize` address to a @@ -398,6 +398,7 @@ use crate::cmp::Ordering; use crate::intrinsics::const_eval_select; use crate::marker::FnPtr; use crate::mem::{self, MaybeUninit, SizedTypeProperties}; +use crate::num::NonZero; use crate::{fmt, hash, intrinsics, ub_checks}; mod alignment; @@ -1064,10 +1065,45 @@ pub const unsafe fn swap(x: *mut T, y: *mut T) { /// assert_eq!(x, [7, 8, 3, 4]); /// assert_eq!(y, [1, 2, 9]); /// ``` +/// +/// # Const evaluation limitations +/// +/// If this function is invoked during const-evaluation, the current implementation has a small (and +/// rarely relevant) limitation: if `count` is at least 2 and the data pointed to by `x` or `y` +/// contains a pointer that crosses the boundary of two `T`-sized chunks of memory, the function may +/// fail to evaluate (similar to a panic during const-evaluation). This behavior may change in the +/// future. +/// +/// The limitation is illustrated by the following example: +/// +/// ``` +/// use std::mem::size_of; +/// use std::ptr; +/// +/// const { unsafe { +/// const PTR_SIZE: usize = size_of::<*const i32>(); +/// let mut data1 = [0u8; PTR_SIZE]; +/// let mut data2 = [0u8; PTR_SIZE]; +/// // Store a pointer in `data1`. +/// data1.as_mut_ptr().cast::<*const i32>().write_unaligned(&42); +/// // Swap the contents of `data1` and `data2` by swapping `PTR_SIZE` many `u8`-sized chunks. +/// // This call will fail, because the pointer in `data1` crosses the boundary +/// // between several of the 1-byte chunks that are being swapped here. +/// //ptr::swap_nonoverlapping(data1.as_mut_ptr(), data2.as_mut_ptr(), PTR_SIZE); +/// // Swap the contents of `data1` and `data2` by swapping a single chunk of size +/// // `[u8; PTR_SIZE]`. That works, as there is no pointer crossing the boundary between +/// // two chunks. +/// ptr::swap_nonoverlapping(&mut data1, &mut data2, 1); +/// // Read the pointer from `data2` and dereference it. +/// let ptr = data2.as_ptr().cast::<*const i32>().read_unaligned(); +/// assert!(*ptr == 42); +/// } } +/// ``` #[inline] #[stable(feature = "swap_nonoverlapping", since = "1.27.0")] -#[rustc_const_unstable(feature = "const_swap_nonoverlapping", issue = "133668")] +#[rustc_const_stable(feature = "const_swap_nonoverlapping", since = "CURRENT_RUSTC_VERSION")] #[rustc_diagnostic_item = "ptr_swap_nonoverlapping"] +#[rustc_allow_const_fn_unstable(const_eval_select)] // both implementations behave the same pub const unsafe fn swap_nonoverlapping(x: *mut T, y: *mut T, count: usize) { ub_checks::assert_unsafe_precondition!( check_library_ub, @@ -1094,51 +1130,25 @@ pub const unsafe fn swap_nonoverlapping(x: *mut T, y: *mut T, count: usize) { // are pointers inside `T` we will copy them in one go rather than trying to copy a part // of a pointer (which would not work). // SAFETY: Same preconditions as this function - unsafe { swap_nonoverlapping_simple_untyped(x, y, count) } + unsafe { swap_nonoverlapping_const(x, y, count) } } else { - macro_rules! attempt_swap_as_chunks { - ($ChunkTy:ty) => { - if align_of::() >= align_of::<$ChunkTy>() - && size_of::() % size_of::<$ChunkTy>() == 0 - { - let x: *mut $ChunkTy = x.cast(); - let y: *mut $ChunkTy = y.cast(); - let count = count * (size_of::() / size_of::<$ChunkTy>()); - // SAFETY: these are the same bytes that the caller promised were - // ok, just typed as `MaybeUninit`s instead of as `T`s. - // The `if` condition above ensures that we're not violating - // alignment requirements, and that the division is exact so - // that we don't lose any bytes off the end. - return unsafe { swap_nonoverlapping_simple_untyped(x, y, count) }; - } - }; + // Going though a slice here helps codegen know the size fits in `isize` + let slice = slice_from_raw_parts_mut(x, count); + // SAFETY: This is all readable from the pointer, meaning it's one + // allocated object, and thus cannot be more than isize::MAX bytes. + let bytes = unsafe { mem::size_of_val_raw::<[T]>(slice) }; + if let Some(bytes) = NonZero::new(bytes) { + // SAFETY: These are the same ranges, just expressed in a different + // type, so they're still non-overlapping. + unsafe { swap_nonoverlapping_bytes(x.cast(), y.cast(), bytes) }; } - - // Split up the slice into small power-of-two-sized chunks that LLVM is able - // to vectorize (unless it's a special type with more-than-pointer alignment, - // because we don't want to pessimize things like slices of SIMD vectors.) - if align_of::() <= size_of::() - && (!size_of::().is_power_of_two() - || size_of::() > size_of::() * 2) - { - attempt_swap_as_chunks!(usize); - attempt_swap_as_chunks!(u8); - } - - // SAFETY: Same preconditions as this function - unsafe { swap_nonoverlapping_simple_untyped(x, y, count) } } ) } /// Same behavior and safety conditions as [`swap_nonoverlapping`] -/// -/// LLVM can vectorize this (at least it can for the power-of-two-sized types -/// `swap_nonoverlapping` tries to use) so no need to manually SIMD it. #[inline] -const unsafe fn swap_nonoverlapping_simple_untyped(x: *mut T, y: *mut T, count: usize) { - let x = x.cast::>(); - let y = y.cast::>(); +const unsafe fn swap_nonoverlapping_const(x: *mut T, y: *mut T, count: usize) { let mut i = 0; while i < count { // SAFETY: By precondition, `i` is in-bounds because it's below `n` @@ -1147,26 +1157,91 @@ const unsafe fn swap_nonoverlapping_simple_untyped(x: *mut T, y: *mut T, coun // and it's distinct from `x` since the ranges are non-overlapping let y = unsafe { y.add(i) }; - // If we end up here, it's because we're using a simple type -- like - // a small power-of-two-sized thing -- or a special type with particularly - // large alignment, particularly SIMD types. - // Thus, we're fine just reading-and-writing it, as either it's small - // and that works well anyway or it's special and the type's author - // presumably wanted things to be done in the larger chunk. - // SAFETY: we're only ever given pointers that are valid to read/write, // including being aligned, and nothing here panics so it's drop-safe. unsafe { - let a: MaybeUninit = read(x); - let b: MaybeUninit = read(y); - write(x, b); - write(y, a); + // Note that it's critical that these use `copy_nonoverlapping`, + // rather than `read`/`write`, to avoid #134713 if T has padding. + let mut temp = MaybeUninit::::uninit(); + copy_nonoverlapping(x, temp.as_mut_ptr(), 1); + copy_nonoverlapping(y, x, 1); + copy_nonoverlapping(temp.as_ptr(), y, 1); } i += 1; } } +// Don't let MIR inline this, because we really want it to keep its noalias metadata +#[rustc_no_mir_inline] +#[inline] +fn swap_chunk(x: &mut MaybeUninit<[u8; N]>, y: &mut MaybeUninit<[u8; N]>) { + let a = *x; + let b = *y; + *x = b; + *y = a; +} + +#[inline] +unsafe fn swap_nonoverlapping_bytes(x: *mut u8, y: *mut u8, bytes: NonZero) { + // Same as `swap_nonoverlapping::<[u8; N]>`. + unsafe fn swap_nonoverlapping_chunks( + x: *mut MaybeUninit<[u8; N]>, + y: *mut MaybeUninit<[u8; N]>, + chunks: NonZero, + ) { + let chunks = chunks.get(); + for i in 0..chunks { + // SAFETY: i is in [0, chunks) so the adds and dereferences are in-bounds. + unsafe { swap_chunk(&mut *x.add(i), &mut *y.add(i)) }; + } + } + + // Same as `swap_nonoverlapping_bytes`, but accepts at most 1+2+4=7 bytes + #[inline] + unsafe fn swap_nonoverlapping_short(x: *mut u8, y: *mut u8, bytes: NonZero) { + // Tail handling for auto-vectorized code sometimes has element-at-a-time behaviour, + // see . + // By swapping as different sizes, rather than as a loop over bytes, + // we make sure not to end up with, say, seven byte-at-a-time copies. + + let bytes = bytes.get(); + let mut i = 0; + macro_rules! swap_prefix { + ($($n:literal)+) => {$( + if (bytes & $n) != 0 { + // SAFETY: `i` can only have the same bits set as those in bytes, + // so these `add`s are in-bounds of `bytes`. But the bit for + // `$n` hasn't been set yet, so the `$n` bytes that `swap_chunk` + // will read and write are within the usable range. + unsafe { swap_chunk::<$n>(&mut*x.add(i).cast(), &mut*y.add(i).cast()) }; + i |= $n; + } + )+}; + } + swap_prefix!(4 2 1); + debug_assert_eq!(i, bytes); + } + + const CHUNK_SIZE: usize = size_of::<*const ()>(); + let bytes = bytes.get(); + + let chunks = bytes / CHUNK_SIZE; + let tail = bytes % CHUNK_SIZE; + if let Some(chunks) = NonZero::new(chunks) { + // SAFETY: this is bytes/CHUNK_SIZE*CHUNK_SIZE bytes, which is <= bytes, + // so it's within the range of our non-overlapping bytes. + unsafe { swap_nonoverlapping_chunks::(x.cast(), y.cast(), chunks) }; + } + if let Some(tail) = NonZero::new(tail) { + const { assert!(CHUNK_SIZE <= 8) }; + let delta = chunks * CHUNK_SIZE; + // SAFETY: the tail length is below CHUNK SIZE because of the remainder, + // and CHUNK_SIZE is at most 8 by the const assert, so tail <= 7 + unsafe { swap_nonoverlapping_short(x.add(delta), y.add(delta), tail) }; + } +} + /// Moves `src` into the pointed `dst`, returning the previous `dst` value. /// /// Neither value is dropped. diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index b960a3d86bef0..31b8d3b572c0b 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -482,8 +482,9 @@ impl *mut T { /// /// This operation itself is always safe, but using the resulting pointer is not. /// - /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not - /// be used to read or write other allocated objects. + /// The resulting pointer "remembers" the [allocated object] that `self` points to + /// (this is called "[Provenance](ptr/index.html#provenance)"). + /// The pointer must not be used to read or write other allocated objects. /// /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z` /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still @@ -937,15 +938,16 @@ impl *mut T { /// /// // This would be incorrect, as the pointers are not correctly ordered: /// // ptr1.offset_from(ptr2) - #[stable(feature = "ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] + /// ``` + #[stable(feature = "ptr_sub_ptr", since = "1.87.0")] + #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")] #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize where T: Sized, { - // SAFETY: the caller must uphold the safety contract for `sub_ptr`. + // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`. unsafe { (self as *const T).offset_from_unsigned(origin) } } @@ -954,17 +956,17 @@ impl *mut T { /// units of **bytes**. /// /// This is purely a convenience for casting to a `u8` pointer and - /// using [`sub_ptr`][pointer::offset_from_unsigned] on it. See that method for - /// documentation and safety requirements. + /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it. + /// See that method for documentation and safety requirements. /// /// For non-`Sized` pointees this operation considers only the data pointers, /// ignoring the metadata. - #[stable(feature = "ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "ptr_sub_ptr", since = "1.87.0")] + #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")] #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn byte_offset_from_unsigned(self, origin: *mut U) -> usize { - // SAFETY: the caller must uphold the safety contract for `byte_sub_ptr`. + // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`. unsafe { (self as *const T).byte_offset_from_unsigned(origin) } } @@ -1574,8 +1576,9 @@ impl *mut T { /// /// [`ptr::replace`]: crate::ptr::replace() #[stable(feature = "pointer_methods", since = "1.26.0")] + #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "CURRENT_RUSTC_VERSION")] #[inline(always)] - pub unsafe fn replace(self, src: T) -> T + pub const unsafe fn replace(self, src: T) -> T where T: Sized, { @@ -2155,3 +2158,11 @@ impl PartialOrd for *mut T { *self >= *other } } + +#[stable(feature = "raw_ptr_default", since = "CURRENT_RUSTC_VERSION")] +impl Default for *mut T { + /// Returns the default value of [`null_mut()`][crate::ptr::null_mut]. + fn default() -> Self { + crate::ptr::null_mut() + } +} diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index c769ba673c61e..0864cc457b658 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -900,13 +900,13 @@ impl NonNull { /// ``` #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces - #[stable(feature = "ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "ptr_sub_ptr", since = "1.87.0")] + #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")] pub const unsafe fn offset_from_unsigned(self, subtracted: NonNull) -> usize where T: Sized, { - // SAFETY: the caller must uphold the safety contract for `sub_ptr`. + // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`. unsafe { self.as_ptr().offset_from_unsigned(subtracted.as_ptr()) } } @@ -915,17 +915,17 @@ impl NonNull { /// units of **bytes**. /// /// This is purely a convenience for casting to a `u8` pointer and - /// using [`sub_ptr`][NonNull::offset_from_unsigned] on it. See that method for - /// documentation and safety requirements. + /// using [`offset_from_unsigned`][NonNull::offset_from_unsigned] on it. + /// See that method for documentation and safety requirements. /// /// For non-`Sized` pointees this operation considers only the data pointers, /// ignoring the metadata. #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces - #[stable(feature = "ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "ptr_sub_ptr", since = "1.87.0")] + #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")] pub const unsafe fn byte_offset_from_unsigned(self, origin: NonNull) -> usize { - // SAFETY: the caller must uphold the safety contract for `byte_sub_ptr`. + // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`. unsafe { self.as_ptr().byte_offset_from_unsigned(origin.as_ptr()) } } @@ -1166,7 +1166,8 @@ impl NonNull { /// [`ptr::replace`]: crate::ptr::replace() #[inline(always)] #[stable(feature = "non_null_convenience", since = "1.80.0")] - pub unsafe fn replace(self, src: T) -> T + #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "CURRENT_RUSTC_VERSION")] + pub const unsafe fn replace(self, src: T) -> T where T: Sized, { diff --git a/library/core/src/ptr/unique.rs b/library/core/src/ptr/unique.rs index 4810ebe01f9bb..d688ce2a07a6a 100644 --- a/library/core/src/ptr/unique.rs +++ b/library/core/src/ptr/unique.rs @@ -100,6 +100,12 @@ impl Unique { } } + /// Create a new `Unique` from a `NonNull` in const context. + #[inline] + pub const fn from_non_null(pointer: NonNull) -> Self { + Unique { pointer, _marker: PhantomData } + } + /// Acquires the underlying `*mut` pointer. #[must_use = "`self` will be dropped if the result is not used"] #[inline] @@ -202,6 +208,6 @@ impl From> for Unique { /// This conversion is infallible since `NonNull` cannot be null. #[inline] fn from(pointer: NonNull) -> Self { - Unique { pointer, _marker: PhantomData } + Unique::from_non_null(pointer) } } diff --git a/library/core/src/result.rs b/library/core/src/result.rs index 48ab9267f216c..736ffb7d0caf3 100644 --- a/library/core/src/result.rs +++ b/library/core/src/result.rs @@ -259,8 +259,14 @@ //! The [`is_ok`] and [`is_err`] methods return [`true`] if the [`Result`] //! is [`Ok`] or [`Err`], respectively. //! +//! The [`is_ok_and`] and [`is_err_and`] methods apply the provided function +//! to the contents of the [`Result`] to produce a boolean value. If the [`Result`] does not have the expected variant +//! then [`false`] is returned instead without executing the function. +//! //! [`is_err`]: Result::is_err //! [`is_ok`]: Result::is_ok +//! [`is_ok_and`]: Result::is_ok_and +//! [`is_err_and`]: Result::is_err_and //! //! ## Adapters for working with references //! @@ -287,6 +293,7 @@ //! (which must implement the [`Default`] trait) //! * [`unwrap_or_else`] returns the result of evaluating the provided //! function +//! * [`unwrap_unchecked`] produces *[undefined behavior]* //! //! The panicking methods [`expect`] and [`unwrap`] require `E` to //! implement the [`Debug`] trait. @@ -297,6 +304,8 @@ //! [`unwrap_or`]: Result::unwrap_or //! [`unwrap_or_default`]: Result::unwrap_or_default //! [`unwrap_or_else`]: Result::unwrap_or_else +//! [`unwrap_unchecked`]: Result::unwrap_unchecked +//! [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html //! //! These methods extract the contained value in a [`Result`] when it //! is the [`Err`] variant. They require `T` to implement the [`Debug`] @@ -304,10 +313,13 @@ //! //! * [`expect_err`] panics with a provided custom message //! * [`unwrap_err`] panics with a generic message +//! * [`unwrap_err_unchecked`] produces *[undefined behavior]* //! //! [`Debug`]: crate::fmt::Debug //! [`expect_err`]: Result::expect_err //! [`unwrap_err`]: Result::unwrap_err +//! [`unwrap_err_unchecked`]: Result::unwrap_err_unchecked +//! [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html //! //! ## Transforming contained values //! @@ -330,21 +342,29 @@ //! [`Some(v)`]: Option::Some //! [`transpose`]: Result::transpose //! -//! This method transforms the contained value of the [`Ok`] variant: +//! These methods transform the contained value of the [`Ok`] variant: //! //! * [`map`] transforms [`Result`] into [`Result`] by applying //! the provided function to the contained value of [`Ok`] and leaving //! [`Err`] values unchanged +//! * [`inspect`] takes ownership of the [`Result`], applies the +//! provided function to the contained value by reference, +//! and then returns the [`Result`] //! //! [`map`]: Result::map +//! [`inspect`]: Result::inspect //! -//! This method transforms the contained value of the [`Err`] variant: +//! These methods transform the contained value of the [`Err`] variant: //! //! * [`map_err`] transforms [`Result`] into [`Result`] by //! applying the provided function to the contained value of [`Err`] and //! leaving [`Ok`] values unchanged +//! * [`inspect_err`] takes ownership of the [`Result`], applies the +//! provided function to the contained value of [`Err`] by reference, +//! and then returns the [`Result`] //! //! [`map_err`]: Result::map_err +//! [`inspect_err`]: Result::inspect_err //! //! These methods transform a [`Result`] into a value of a possibly //! different type `U`: @@ -578,6 +598,10 @@ impl Result { /// /// let x: Result = Err("hey"); /// assert_eq!(x.is_ok_and(|x| x > 1), false); + /// + /// let x: Result = Ok("ownership".to_string()); + /// assert_eq!(x.as_ref().is_ok_and(|x| x.len() > 1), true); + /// println!("still alive {:?}", x); /// ``` #[must_use] #[inline] @@ -623,6 +647,10 @@ impl Result { /// /// let x: Result = Ok(123); /// assert_eq!(x.is_err_and(|x| x.kind() == ErrorKind::NotFound), false); + /// + /// let x: Result = Err("ownership".to_string()); + /// assert_eq!(x.as_ref().is_err_and(|x| x.len() > 1), true); + /// println!("still alive {:?}", x); /// ``` #[must_use] #[inline] diff --git a/library/core/src/slice/cmp.rs b/library/core/src/slice/cmp.rs index da85f42926e6b..5ce72b46eee36 100644 --- a/library/core/src/slice/cmp.rs +++ b/library/core/src/slice/cmp.rs @@ -5,6 +5,7 @@ use crate::ascii; use crate::cmp::{self, BytewiseEq, Ordering}; use crate::intrinsics::compare_bytes; use crate::num::NonZero; +use crate::ops::ControlFlow; #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq<[U]> for [T] @@ -31,12 +32,64 @@ impl Ord for [T] { } } +#[inline] +fn as_underlying(x: ControlFlow) -> u8 { + // SAFETY: This will only compile if `bool` and `ControlFlow` have the same + // size (which isn't guaranteed but this is libcore). Because they have the same + // size, it's a niched implementation, which in one byte means there can't be + // any uninitialized memory. The callers then only check for `0` or `1` from this, + // which must necessarily match the `Break` variant, and we're fine no matter + // what ends up getting picked as the value representing `Continue(())`. + unsafe { crate::mem::transmute(x) } +} + /// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison). #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for [T] { + #[inline] fn partial_cmp(&self, other: &[T]) -> Option { SlicePartialOrd::partial_compare(self, other) } + #[inline] + fn lt(&self, other: &Self) -> bool { + // This is certainly not the obvious way to implement these methods. + // Unfortunately, using anything that looks at the discriminant means that + // LLVM sees a check for `2` (aka `ControlFlow::Continue(())`) and + // gets very distracted by that, ending up generating extraneous code. + // This should be changed to something simpler once either LLVM is smarter, + // see , or we generate + // niche discriminant checks in a way that doesn't trigger it. + + as_underlying(self.__chaining_lt(other)) == 1 + } + #[inline] + fn le(&self, other: &Self) -> bool { + as_underlying(self.__chaining_le(other)) != 0 + } + #[inline] + fn gt(&self, other: &Self) -> bool { + as_underlying(self.__chaining_gt(other)) == 1 + } + #[inline] + fn ge(&self, other: &Self) -> bool { + as_underlying(self.__chaining_ge(other)) != 0 + } + #[inline] + fn __chaining_lt(&self, other: &Self) -> ControlFlow { + SliceChain::chaining_lt(self, other) + } + #[inline] + fn __chaining_le(&self, other: &Self) -> ControlFlow { + SliceChain::chaining_le(self, other) + } + #[inline] + fn __chaining_gt(&self, other: &Self) -> ControlFlow { + SliceChain::chaining_gt(self, other) + } + #[inline] + fn __chaining_ge(&self, other: &Self) -> ControlFlow { + SliceChain::chaining_ge(self, other) + } } #[doc(hidden)] @@ -99,24 +152,63 @@ trait SlicePartialOrd: Sized { fn partial_compare(left: &[Self], right: &[Self]) -> Option; } +#[doc(hidden)] +// intermediate trait for specialization of slice's PartialOrd chaining methods +trait SliceChain: Sized { + fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow; + fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow; + fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow; + fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow; +} + +type AlwaysBreak = ControlFlow; + impl SlicePartialOrd for A { default fn partial_compare(left: &[A], right: &[A]) -> Option { - let l = cmp::min(left.len(), right.len()); - - // Slice to the loop iteration range to enable bound check - // elimination in the compiler - let lhs = &left[..l]; - let rhs = &right[..l]; + let elem_chain = |a, b| match PartialOrd::partial_cmp(a, b) { + Some(Ordering::Equal) => ControlFlow::Continue(()), + non_eq => ControlFlow::Break(non_eq), + }; + let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::partial_cmp(a, b)); + let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain); + b + } +} - for i in 0..l { - match lhs[i].partial_cmp(&rhs[i]) { - Some(Ordering::Equal) => (), - non_eq => return non_eq, - } - } +impl SliceChain for A { + default fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow { + chaining_impl(left, right, PartialOrd::__chaining_lt, usize::__chaining_lt) + } + default fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow { + chaining_impl(left, right, PartialOrd::__chaining_le, usize::__chaining_le) + } + default fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow { + chaining_impl(left, right, PartialOrd::__chaining_gt, usize::__chaining_gt) + } + default fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow { + chaining_impl(left, right, PartialOrd::__chaining_ge, usize::__chaining_ge) + } +} - left.len().partial_cmp(&right.len()) +#[inline] +fn chaining_impl<'l, 'r, A: PartialOrd, B, C>( + left: &'l [A], + right: &'r [A], + elem_chain: impl Fn(&'l A, &'r A) -> ControlFlow, + len_chain: impl for<'a> FnOnce(&'a usize, &'a usize) -> ControlFlow, +) -> ControlFlow { + let l = cmp::min(left.len(), right.len()); + + // Slice to the loop iteration range to enable bound check + // elimination in the compiler + let lhs = &left[..l]; + let rhs = &right[..l]; + + for i in 0..l { + elem_chain(&lhs[i], &rhs[i])?; } + + len_chain(&left.len(), &right.len()) } // This is the impl that we would like to have. Unfortunately it's not sound. @@ -165,21 +257,13 @@ trait SliceOrd: Sized { impl SliceOrd for A { default fn compare(left: &[Self], right: &[Self]) -> Ordering { - let l = cmp::min(left.len(), right.len()); - - // Slice to the loop iteration range to enable bound check - // elimination in the compiler - let lhs = &left[..l]; - let rhs = &right[..l]; - - for i in 0..l { - match lhs[i].cmp(&rhs[i]) { - Ordering::Equal => (), - non_eq => return non_eq, - } - } - - left.len().cmp(&right.len()) + let elem_chain = |a, b| match Ord::cmp(a, b) { + Ordering::Equal => ControlFlow::Continue(()), + non_eq => ControlFlow::Break(non_eq), + }; + let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::cmp(a, b)); + let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain); + b } } @@ -191,7 +275,7 @@ impl SliceOrd for A { /// * For every `x` and `y` of this type, `Ord(x, y)` must return the same /// value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`. #[rustc_specialization_trait] -unsafe trait UnsignedBytewiseOrd {} +unsafe trait UnsignedBytewiseOrd: Ord {} unsafe impl UnsignedBytewiseOrd for bool {} unsafe impl UnsignedBytewiseOrd for u8 {} @@ -225,6 +309,38 @@ impl SliceOrd for A { } } +// Don't generate our own chaining loops for `memcmp`-able things either. +impl SliceChain for A { + #[inline] + fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow { + match SliceOrd::compare(left, right) { + Ordering::Equal => ControlFlow::Continue(()), + ne => ControlFlow::Break(ne.is_lt()), + } + } + #[inline] + fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow { + match SliceOrd::compare(left, right) { + Ordering::Equal => ControlFlow::Continue(()), + ne => ControlFlow::Break(ne.is_le()), + } + } + #[inline] + fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow { + match SliceOrd::compare(left, right) { + Ordering::Equal => ControlFlow::Continue(()), + ne => ControlFlow::Break(ne.is_gt()), + } + } + #[inline] + fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow { + match SliceOrd::compare(left, right) { + Ordering::Equal => ControlFlow::Continue(()), + ne => ControlFlow::Break(ne.is_ge()), + } + } +} + pub(super) trait SliceContains: Sized { fn slice_contains(&self, x: &[Self]) -> bool; } diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index a687ed7129dc8..85a5e89a49eb3 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -93,9 +93,9 @@ unsafe impl Send for Iter<'_, T> {} impl<'a, T> Iter<'a, T> { #[inline] - pub(super) fn new(slice: &'a [T]) -> Self { + pub(super) const fn new(slice: &'a [T]) -> Self { let len = slice.len(); - let ptr: NonNull = NonNull::from(slice).cast(); + let ptr: NonNull = NonNull::from_ref(slice).cast(); // SAFETY: Similar to `IterMut::new`. unsafe { let end_or_len = @@ -218,9 +218,9 @@ unsafe impl Send for IterMut<'_, T> {} impl<'a, T> IterMut<'a, T> { #[inline] - pub(super) fn new(slice: &'a mut [T]) -> Self { + pub(super) const fn new(slice: &'a mut [T]) -> Self { let len = slice.len(); - let ptr: NonNull = NonNull::from(slice).cast(); + let ptr: NonNull = NonNull::from_mut(slice).cast(); // SAFETY: There are several things here: // // `ptr` has been obtained by `slice.as_ptr()` where `slice` is a valid @@ -1335,7 +1335,7 @@ pub struct Windows<'a, T: 'a> { impl<'a, T: 'a> Windows<'a, T> { #[inline] - pub(super) fn new(slice: &'a [T], size: NonZero) -> Self { + pub(super) const fn new(slice: &'a [T], size: NonZero) -> Self { Self { v: slice, size } } } @@ -1380,14 +1380,16 @@ impl<'a, T> Iterator for Windows<'a, T> { #[inline] fn nth(&mut self, n: usize) -> Option { - let (end, overflow) = self.size.get().overflowing_add(n); - if end > self.v.len() || overflow { - self.v = &[]; - None - } else { - let nth = &self.v[n..end]; - self.v = &self.v[n + 1..]; + let size = self.size.get(); + if let Some(rest) = self.v.get(n..) + && let Some(nth) = rest.get(..size) + { + self.v = &rest[1..]; Some(nth) + } else { + // setting length to 0 is cheaper than overwriting the pointer when assigning &[] + self.v = &self.v[..0]; // cheaper than &[] + None } } @@ -1427,7 +1429,7 @@ impl<'a, T> DoubleEndedIterator for Windows<'a, T> { fn nth_back(&mut self, n: usize) -> Option { let (end, overflow) = self.v.len().overflowing_sub(n); if end < self.size.get() || overflow { - self.v = &[]; + self.v = &self.v[..0]; // cheaper than &[] None } else { let ret = &self.v[end - self.size.get()..end]; @@ -1487,7 +1489,7 @@ pub struct Chunks<'a, T: 'a> { impl<'a, T: 'a> Chunks<'a, T> { #[inline] - pub(super) fn new(slice: &'a [T], size: usize) -> Self { + pub(super) const fn new(slice: &'a [T], size: usize) -> Self { Self { v: slice, chunk_size: size } } } @@ -1536,17 +1538,15 @@ impl<'a, T> Iterator for Chunks<'a, T> { #[inline] fn nth(&mut self, n: usize) -> Option { let (start, overflow) = n.overflowing_mul(self.chunk_size); - if start >= self.v.len() || overflow { - self.v = &[]; - None - } else { - let end = match start.checked_add(self.chunk_size) { - Some(sum) => cmp::min(self.v.len(), sum), - None => self.v.len(), - }; - let nth = &self.v[start..end]; - self.v = &self.v[end..]; + // min(len) makes a wrong start harmless, but enables optimizing this to brachless code + let chunk_start = &self.v[start.min(self.v.len())..]; + let (nth, remainder) = chunk_start.split_at(self.chunk_size.min(chunk_start.len())); + if !overflow && start < self.v.len() { + self.v = remainder; Some(nth) + } else { + self.v = &self.v[..0]; // cheaper than &[] + None } } @@ -1609,7 +1609,7 @@ impl<'a, T> DoubleEndedIterator for Chunks<'a, T> { fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { - self.v = &[]; + self.v = &self.v[..0]; // cheaper than &[] None } else { let start = (len - 1 - n) * self.chunk_size; @@ -1677,7 +1677,7 @@ pub struct ChunksMut<'a, T: 'a> { impl<'a, T: 'a> ChunksMut<'a, T> { #[inline] - pub(super) fn new(slice: &'a mut [T], size: usize) -> Self { + pub(super) const fn new(slice: &'a mut [T], size: usize) -> Self { Self { v: slice, chunk_size: size, _marker: PhantomData } } } @@ -1863,7 +1863,7 @@ pub struct ChunksExact<'a, T: 'a> { impl<'a, T> ChunksExact<'a, T> { #[inline] - pub(super) fn new(slice: &'a [T], chunk_size: usize) -> Self { + pub(super) const fn new(slice: &'a [T], chunk_size: usize) -> Self { let rem = slice.len() % chunk_size; let fst_len = slice.len() - rem; // SAFETY: 0 <= fst_len <= slice.len() by construction above @@ -1933,7 +1933,7 @@ impl<'a, T> Iterator for ChunksExact<'a, T> { fn nth(&mut self, n: usize) -> Option { let (start, overflow) = n.overflowing_mul(self.chunk_size); if start >= self.v.len() || overflow { - self.v = &[]; + self.v = &self.v[..0]; // cheaper than &[] None } else { let (_, snd) = self.v.split_at(start); @@ -1971,7 +1971,7 @@ impl<'a, T> DoubleEndedIterator for ChunksExact<'a, T> { fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { - self.v = &[]; + self.v = &self.v[..0]; // cheaper than &[] None } else { let start = (len - 1 - n) * self.chunk_size; @@ -2043,7 +2043,7 @@ pub struct ChunksExactMut<'a, T: 'a> { impl<'a, T> ChunksExactMut<'a, T> { #[inline] - pub(super) fn new(slice: &'a mut [T], chunk_size: usize) -> Self { + pub(super) const fn new(slice: &'a mut [T], chunk_size: usize) -> Self { let rem = slice.len() % chunk_size; let fst_len = slice.len() - rem; // SAFETY: 0 <= fst_len <= slice.len() by construction above @@ -2210,7 +2210,7 @@ pub struct ArrayWindows<'a, T: 'a, const N: usize> { impl<'a, T: 'a, const N: usize> ArrayWindows<'a, T, N> { #[inline] - pub(super) fn new(slice: &'a [T]) -> Self { + pub(super) const fn new(slice: &'a [T]) -> Self { let num_windows = slice.len().saturating_sub(N - 1); Self { slice_head: slice.as_ptr(), num: num_windows, marker: PhantomData } } @@ -2334,8 +2334,9 @@ pub struct ArrayChunks<'a, T: 'a, const N: usize> { } impl<'a, T, const N: usize> ArrayChunks<'a, T, N> { + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] - pub(super) fn new(slice: &'a [T]) -> Self { + pub(super) const fn new(slice: &'a [T]) -> Self { let (array_slice, rem) = slice.as_chunks(); Self { iter: array_slice.iter(), rem } } @@ -2460,8 +2461,9 @@ pub struct ArrayChunksMut<'a, T: 'a, const N: usize> { } impl<'a, T, const N: usize> ArrayChunksMut<'a, T, N> { + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] - pub(super) fn new(slice: &'a mut [T]) -> Self { + pub(super) const fn new(slice: &'a mut [T]) -> Self { let (array_slice, rem) = slice.as_chunks_mut(); Self { iter: array_slice.iter_mut(), rem } } @@ -2579,7 +2581,7 @@ pub struct RChunks<'a, T: 'a> { impl<'a, T: 'a> RChunks<'a, T> { #[inline] - pub(super) fn new(slice: &'a [T], size: usize) -> Self { + pub(super) const fn new(slice: &'a [T], size: usize) -> Self { Self { v: slice, chunk_size: size } } } @@ -2635,7 +2637,7 @@ impl<'a, T> Iterator for RChunks<'a, T> { fn nth(&mut self, n: usize) -> Option { let (end, overflow) = n.overflowing_mul(self.chunk_size); if end >= self.v.len() || overflow { - self.v = &[]; + self.v = &self.v[..0]; // cheaper than &[] None } else { // Can't underflow because of the check above @@ -2692,7 +2694,7 @@ impl<'a, T> DoubleEndedIterator for RChunks<'a, T> { fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { - self.v = &[]; + self.v = &self.v[..0]; // cheaper than &[] None } else { // can't underflow because `n < len` @@ -2759,7 +2761,7 @@ pub struct RChunksMut<'a, T: 'a> { impl<'a, T: 'a> RChunksMut<'a, T> { #[inline] - pub(super) fn new(slice: &'a mut [T], size: usize) -> Self { + pub(super) const fn new(slice: &'a mut [T], size: usize) -> Self { Self { v: slice, chunk_size: size, _marker: PhantomData } } } @@ -2950,7 +2952,7 @@ pub struct RChunksExact<'a, T: 'a> { impl<'a, T> RChunksExact<'a, T> { #[inline] - pub(super) fn new(slice: &'a [T], chunk_size: usize) -> Self { + pub(super) const fn new(slice: &'a [T], chunk_size: usize) -> Self { let rem = slice.len() % chunk_size; // SAFETY: 0 <= rem <= slice.len() by construction above let (fst, snd) = unsafe { slice.split_at_unchecked(rem) }; @@ -2976,7 +2978,8 @@ impl<'a, T> RChunksExact<'a, T> { /// ``` #[must_use] #[stable(feature = "rchunks", since = "1.31.0")] - pub fn remainder(&self) -> &'a [T] { + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] + pub const fn remainder(&self) -> &'a [T] { self.rem } } @@ -3019,7 +3022,7 @@ impl<'a, T> Iterator for RChunksExact<'a, T> { fn nth(&mut self, n: usize) -> Option { let (end, overflow) = n.overflowing_mul(self.chunk_size); if end >= self.v.len() || overflow { - self.v = &[]; + self.v = &self.v[..0]; // cheaper than &[] None } else { let (fst, _) = self.v.split_at(self.v.len() - end); @@ -3058,7 +3061,7 @@ impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> { fn nth_back(&mut self, n: usize) -> Option { let len = self.len(); if n >= len { - self.v = &[]; + self.v = &self.v[..0]; // cheaper than &[] None } else { // now that we know that `n` corresponds to a chunk, @@ -3132,7 +3135,7 @@ pub struct RChunksExactMut<'a, T: 'a> { impl<'a, T> RChunksExactMut<'a, T> { #[inline] - pub(super) fn new(slice: &'a mut [T], chunk_size: usize) -> Self { + pub(super) const fn new(slice: &'a mut [T], chunk_size: usize) -> Self { let rem = slice.len() % chunk_size; // SAFETY: 0 <= rem <= slice.len() by construction above let (fst, snd) = unsafe { slice.split_at_mut_unchecked(rem) }; @@ -3144,7 +3147,8 @@ impl<'a, T> RChunksExactMut<'a, T> { /// elements. #[must_use = "`self` will be dropped if the result is not used"] #[stable(feature = "rchunks", since = "1.31.0")] - pub fn into_remainder(self) -> &'a mut [T] { + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] + pub const fn into_remainder(self) -> &'a mut [T] { self.rem } } @@ -3308,7 +3312,7 @@ pub struct ChunkBy<'a, T: 'a, P> { #[stable(feature = "slice_group_by", since = "1.77.0")] impl<'a, T: 'a, P> ChunkBy<'a, T, P> { - pub(super) fn new(slice: &'a [T], predicate: P) -> Self { + pub(super) const fn new(slice: &'a [T], predicate: P) -> Self { ChunkBy { slice, predicate } } } @@ -3395,7 +3399,7 @@ pub struct ChunkByMut<'a, T: 'a, P> { #[stable(feature = "slice_group_by", since = "1.77.0")] impl<'a, T: 'a, P> ChunkByMut<'a, T, P> { - pub(super) fn new(slice: &'a mut [T], predicate: P) -> Self { + pub(super) const fn new(slice: &'a mut [T], predicate: P) -> Self { ChunkByMut { slice, predicate } } } diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index 5bb7243c4491b..81fe0166fd77b 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -8,7 +8,7 @@ use crate::cmp::Ordering::{self, Equal, Greater, Less}; use crate::intrinsics::{exact_div, unchecked_sub}; -use crate::mem::{self, SizedTypeProperties}; +use crate::mem::{self, MaybeUninit, SizedTypeProperties}; use crate::num::NonZero; use crate::ops::{OneSidedRange, OneSidedRangeBound, Range, RangeBounds, RangeInclusive}; use crate::panic::const_panic; @@ -109,6 +109,7 @@ impl [T] { #[lang = "slice_len_fn"] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_slice_len", since = "1.39.0")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] #[inline] #[must_use] pub const fn len(&self) -> usize { @@ -128,6 +129,7 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_slice_is_empty", since = "1.39.0")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] #[inline] #[must_use] pub const fn is_empty(&self) -> bool { @@ -382,16 +384,11 @@ impl [T] { #[stable(feature = "slice_first_last_chunk", since = "1.77.0")] #[rustc_const_stable(feature = "slice_first_last_chunk", since = "1.77.0")] pub const fn split_first_chunk(&self) -> Option<(&[T; N], &[T])> { - if self.len() < N { - None - } else { - // SAFETY: We manually verified the bounds of the split. - let (first, tail) = unsafe { self.split_at_unchecked(N) }; + let Some((first, tail)) = self.split_at_checked(N) else { return None }; - // SAFETY: We explicitly check for the correct number of elements, - // and do not let the references outlive the slice. - Some((unsafe { &*(first.as_ptr().cast::<[T; N]>()) }, tail)) - } + // SAFETY: We explicitly check for the correct number of elements, + // and do not let the references outlive the slice. + Some((unsafe { &*(first.as_ptr().cast::<[T; N]>()) }, tail)) } /// Returns a mutable array reference to the first `N` items in the slice and the remaining @@ -419,17 +416,12 @@ impl [T] { pub const fn split_first_chunk_mut( &mut self, ) -> Option<(&mut [T; N], &mut [T])> { - if self.len() < N { - None - } else { - // SAFETY: We manually verified the bounds of the split. - let (first, tail) = unsafe { self.split_at_mut_unchecked(N) }; + let Some((first, tail)) = self.split_at_mut_checked(N) else { return None }; - // SAFETY: We explicitly check for the correct number of elements, - // do not let the reference outlive the slice, - // and enforce exclusive mutability of the chunk by the split. - Some((unsafe { &mut *(first.as_mut_ptr().cast::<[T; N]>()) }, tail)) - } + // SAFETY: We explicitly check for the correct number of elements, + // do not let the reference outlive the slice, + // and enforce exclusive mutability of the chunk by the split. + Some((unsafe { &mut *(first.as_mut_ptr().cast::<[T; N]>()) }, tail)) } /// Returns an array reference to the last `N` items in the slice and the remaining slice. @@ -452,16 +444,12 @@ impl [T] { #[stable(feature = "slice_first_last_chunk", since = "1.77.0")] #[rustc_const_stable(feature = "slice_first_last_chunk", since = "1.77.0")] pub const fn split_last_chunk(&self) -> Option<(&[T], &[T; N])> { - if self.len() < N { - None - } else { - // SAFETY: We manually verified the bounds of the split. - let (init, last) = unsafe { self.split_at_unchecked(self.len() - N) }; + let Some(index) = self.len().checked_sub(N) else { return None }; + let (init, last) = self.split_at(index); - // SAFETY: We explicitly check for the correct number of elements, - // and do not let the references outlive the slice. - Some((init, unsafe { &*(last.as_ptr().cast::<[T; N]>()) })) - } + // SAFETY: We explicitly check for the correct number of elements, + // and do not let the references outlive the slice. + Some((init, unsafe { &*(last.as_ptr().cast::<[T; N]>()) })) } /// Returns a mutable array reference to the last `N` items in the slice and the remaining @@ -489,17 +477,13 @@ impl [T] { pub const fn split_last_chunk_mut( &mut self, ) -> Option<(&mut [T], &mut [T; N])> { - if self.len() < N { - None - } else { - // SAFETY: We manually verified the bounds of the split. - let (init, last) = unsafe { self.split_at_mut_unchecked(self.len() - N) }; + let Some(index) = self.len().checked_sub(N) else { return None }; + let (init, last) = self.split_at_mut(index); - // SAFETY: We explicitly check for the correct number of elements, - // do not let the reference outlive the slice, - // and enforce exclusive mutability of the chunk by the split. - Some((init, unsafe { &mut *(last.as_mut_ptr().cast::<[T; N]>()) })) - } + // SAFETY: We explicitly check for the correct number of elements, + // do not let the reference outlive the slice, + // and enforce exclusive mutability of the chunk by the split. + Some((init, unsafe { &mut *(last.as_mut_ptr().cast::<[T; N]>()) })) } /// Returns an array reference to the last `N` items in the slice. @@ -522,17 +506,13 @@ impl [T] { #[stable(feature = "slice_first_last_chunk", since = "1.77.0")] #[rustc_const_stable(feature = "const_slice_last_chunk", since = "1.80.0")] pub const fn last_chunk(&self) -> Option<&[T; N]> { - if self.len() < N { - None - } else { - // SAFETY: We manually verified the bounds of the slice. - // FIXME(const-hack): Without const traits, we need this instead of `get_unchecked`. - let last = unsafe { self.split_at_unchecked(self.len() - N).1 }; + // FIXME(const-hack): Without const traits, we need this instead of `get`. + let Some(index) = self.len().checked_sub(N) else { return None }; + let (_, last) = self.split_at(index); - // SAFETY: We explicitly check for the correct number of elements, - // and do not let the references outlive the slice. - Some(unsafe { &*(last.as_ptr().cast::<[T; N]>()) }) - } + // SAFETY: We explicitly check for the correct number of elements, + // and do not let the references outlive the slice. + Some(unsafe { &*(last.as_ptr().cast::<[T; N]>()) }) } /// Returns a mutable array reference to the last `N` items in the slice. @@ -556,18 +536,14 @@ impl [T] { #[stable(feature = "slice_first_last_chunk", since = "1.77.0")] #[rustc_const_stable(feature = "const_slice_first_last_chunk", since = "1.83.0")] pub const fn last_chunk_mut(&mut self) -> Option<&mut [T; N]> { - if self.len() < N { - None - } else { - // SAFETY: We manually verified the bounds of the slice. - // FIXME(const-hack): Without const traits, we need this instead of `get_unchecked`. - let last = unsafe { self.split_at_mut_unchecked(self.len() - N).1 }; + // FIXME(const-hack): Without const traits, we need this instead of `get`. + let Some(index) = self.len().checked_sub(N) else { return None }; + let (_, last) = self.split_at_mut(index); - // SAFETY: We explicitly check for the correct number of elements, - // do not let the reference outlive the slice, - // and require exclusive access to the entire slice to mutate the chunk. - Some(unsafe { &mut *(last.as_mut_ptr().cast::<[T; N]>()) }) - } + // SAFETY: We explicitly check for the correct number of elements, + // do not let the reference outlive the slice, + // and require exclusive access to the entire slice to mutate the chunk. + Some(unsafe { &mut *(last.as_mut_ptr().cast::<[T; N]>()) }) } /// Returns a reference to an element or subslice depending on the type of @@ -588,6 +564,7 @@ impl [T] { /// assert_eq!(None, v.get(0..4)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] #[inline] #[must_use] pub fn get(&self, index: I) -> Option<&I::Output> @@ -613,6 +590,7 @@ impl [T] { /// assert_eq!(x, &[0, 42, 2]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] #[inline] #[must_use] pub fn get_mut(&mut self, index: I) -> Option<&mut I::Output> @@ -650,6 +628,7 @@ impl [T] { /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] #[inline] #[must_use] pub unsafe fn get_unchecked(&self, index: I) -> &I::Output @@ -692,6 +671,7 @@ impl [T] { /// assert_eq!(x, &[1, 13, 4]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] #[inline] #[must_use] pub unsafe fn get_unchecked_mut(&mut self, index: I) -> &mut I::Output @@ -1043,9 +1023,10 @@ impl [T] { /// assert_eq!(iterator.next(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[rustc_diagnostic_item = "slice_iter"] - pub fn iter(&self) -> Iter<'_, T> { + pub const fn iter(&self) -> Iter<'_, T> { Iter::new(self) } @@ -1062,9 +1043,10 @@ impl [T] { /// } /// assert_eq!(x, &[3, 4, 6]); /// ``` + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub fn iter_mut(&mut self) -> IterMut<'_, T> { + pub const fn iter_mut(&mut self) -> IterMut<'_, T> { IterMut::new(self) } @@ -1116,9 +1098,10 @@ impl [T] { /// assert_eq!(array, ['s', 't', ' ', '2', '0', '1', '5', 'u', 'R']); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn windows(&self, size: usize) -> Windows<'_, T> { + pub const fn windows(&self, size: usize) -> Windows<'_, T> { let size = NonZero::new(size).expect("window size must be non-zero"); Windows::new(self, size) } @@ -1151,9 +1134,10 @@ impl [T] { /// [`chunks_exact`]: slice::chunks_exact /// [`rchunks`]: slice::rchunks #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> { + pub const fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> { assert!(chunk_size != 0, "chunk size must be non-zero"); Chunks::new(self, chunk_size) } @@ -1190,9 +1174,10 @@ impl [T] { /// [`chunks_exact_mut`]: slice::chunks_exact_mut /// [`rchunks_mut`]: slice::rchunks_mut #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<'_, T> { + pub const fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<'_, T> { assert!(chunk_size != 0, "chunk size must be non-zero"); ChunksMut::new(self, chunk_size) } @@ -1228,9 +1213,10 @@ impl [T] { /// [`chunks`]: slice::chunks /// [`rchunks_exact`]: slice::rchunks_exact #[stable(feature = "chunks_exact", since = "1.31.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> { + pub const fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> { assert!(chunk_size != 0, "chunk size must be non-zero"); ChunksExact::new(self, chunk_size) } @@ -1271,9 +1257,10 @@ impl [T] { /// [`chunks_mut`]: slice::chunks_mut /// [`rchunks_exact_mut`]: slice::rchunks_exact_mut #[stable(feature = "chunks_exact", since = "1.31.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<'_, T> { + pub const fn chunks_exact_mut(&mut self, chunk_size: usize) -> ChunksExactMut<'_, T> { assert!(chunk_size != 0, "chunk size must be non-zero"); ChunksExactMut::new(self, chunk_size) } @@ -1281,6 +1268,18 @@ impl [T] { /// Splits the slice into a slice of `N`-element arrays, /// assuming that there's no remainder. /// + /// This is the inverse operation to [`as_flattened`]. + /// + /// [`as_flattened`]: slice::as_flattened + /// + /// As this is `unsafe`, consider whether you could use [`as_chunks`] or + /// [`as_rchunks`] instead, perhaps via something like + /// `if let (chunks, []) = slice.as_chunks()` or + /// `let (chunks, []) = slice.as_chunks() else { unreachable!() };`. + /// + /// [`as_chunks`]: slice::as_chunks + /// [`as_rchunks`]: slice::as_rchunks + /// /// # Safety /// /// This may only be called when @@ -1290,7 +1289,6 @@ impl [T] { /// # Examples /// /// ``` - /// #![feature(slice_as_chunks)] /// let slice: &[char] = &['l', 'o', 'r', 'e', 'm', '!']; /// let chunks: &[[char; 1]] = /// // SAFETY: 1-element chunks never have remainder @@ -1305,8 +1303,8 @@ impl [T] { /// // let chunks: &[[_; 5]] = slice.as_chunks_unchecked() // The slice length is not a multiple of 5 /// // let chunks: &[[_; 0]] = slice.as_chunks_unchecked() // Zero-length chunks are never allowed /// ``` - #[unstable(feature = "slice_as_chunks", issue = "74985")] - #[rustc_const_unstable(feature = "slice_as_chunks", issue = "74985")] + #[stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] #[inline] #[must_use] pub const unsafe fn as_chunks_unchecked(&self) -> &[[T; N]] { @@ -1326,15 +1324,27 @@ impl [T] { /// starting at the beginning of the slice, /// and a remainder slice with length strictly less than `N`. /// + /// The remainder is meaningful in the division sense. Given + /// `let (chunks, remainder) = slice.as_chunks()`, then: + /// - `chunks.len()` equals `slice.len() / N`, + /// - `remainder.len()` equals `slice.len() % N`, and + /// - `slice.len()` equals `chunks.len() * N + remainder.len()`. + /// + /// You can flatten the chunks back into a slice-of-`T` with [`as_flattened`]. + /// + /// [`as_flattened`]: slice::as_flattened + /// /// # Panics /// - /// Panics if `N` is zero. This check will most probably get changed to a compile time - /// error before this method gets stabilized. + /// Panics if `N` is zero. + /// + /// Note that this check is against a const generic parameter, not a runtime + /// value, and thus a particular monomorphization will either always panic + /// or it will never panic. /// /// # Examples /// /// ``` - /// #![feature(slice_as_chunks)] /// let slice = ['l', 'o', 'r', 'e', 'm']; /// let (chunks, remainder) = slice.as_chunks(); /// assert_eq!(chunks, &[['l', 'o'], ['r', 'e']]); @@ -1344,15 +1354,14 @@ impl [T] { /// If you expect the slice to be an exact multiple, you can combine /// `let`-`else` with an empty slice pattern: /// ``` - /// #![feature(slice_as_chunks)] /// let slice = ['R', 'u', 's', 't']; /// let (chunks, []) = slice.as_chunks::<2>() else { /// panic!("slice didn't have even length") /// }; /// assert_eq!(chunks, &[['R', 'u'], ['s', 't']]); /// ``` - #[unstable(feature = "slice_as_chunks", issue = "74985")] - #[rustc_const_unstable(feature = "slice_as_chunks", issue = "74985")] + #[stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] #[inline] #[track_caller] #[must_use] @@ -1372,22 +1381,34 @@ impl [T] { /// starting at the end of the slice, /// and a remainder slice with length strictly less than `N`. /// + /// The remainder is meaningful in the division sense. Given + /// `let (remainder, chunks) = slice.as_rchunks()`, then: + /// - `remainder.len()` equals `slice.len() % N`, + /// - `chunks.len()` equals `slice.len() / N`, and + /// - `slice.len()` equals `chunks.len() * N + remainder.len()`. + /// + /// You can flatten the chunks back into a slice-of-`T` with [`as_flattened`]. + /// + /// [`as_flattened`]: slice::as_flattened + /// /// # Panics /// - /// Panics if `N` is zero. This check will most probably get changed to a compile time - /// error before this method gets stabilized. + /// Panics if `N` is zero. + /// + /// Note that this check is against a const generic parameter, not a runtime + /// value, and thus a particular monomorphization will either always panic + /// or it will never panic. /// /// # Examples /// /// ``` - /// #![feature(slice_as_chunks)] /// let slice = ['l', 'o', 'r', 'e', 'm']; /// let (remainder, chunks) = slice.as_rchunks(); /// assert_eq!(remainder, &['l']); /// assert_eq!(chunks, &[['o', 'r'], ['e', 'm']]); /// ``` - #[unstable(feature = "slice_as_chunks", issue = "74985")] - #[rustc_const_unstable(feature = "slice_as_chunks", issue = "74985")] + #[stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] #[inline] #[track_caller] #[must_use] @@ -1429,9 +1450,10 @@ impl [T] { /// /// [`chunks_exact`]: slice::chunks_exact #[unstable(feature = "array_chunks", issue = "74985")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn array_chunks(&self) -> ArrayChunks<'_, T, N> { + pub const fn array_chunks(&self) -> ArrayChunks<'_, T, N> { assert!(N != 0, "chunk size must be non-zero"); ArrayChunks::new(self) } @@ -1439,6 +1461,18 @@ impl [T] { /// Splits the slice into a slice of `N`-element arrays, /// assuming that there's no remainder. /// + /// This is the inverse operation to [`as_flattened_mut`]. + /// + /// [`as_flattened_mut`]: slice::as_flattened_mut + /// + /// As this is `unsafe`, consider whether you could use [`as_chunks_mut`] or + /// [`as_rchunks_mut`] instead, perhaps via something like + /// `if let (chunks, []) = slice.as_chunks_mut()` or + /// `let (chunks, []) = slice.as_chunks_mut() else { unreachable!() };`. + /// + /// [`as_chunks_mut`]: slice::as_chunks_mut + /// [`as_rchunks_mut`]: slice::as_rchunks_mut + /// /// # Safety /// /// This may only be called when @@ -1448,7 +1482,6 @@ impl [T] { /// # Examples /// /// ``` - /// #![feature(slice_as_chunks)] /// let slice: &mut [char] = &mut ['l', 'o', 'r', 'e', 'm', '!']; /// let chunks: &mut [[char; 1]] = /// // SAFETY: 1-element chunks never have remainder @@ -1465,8 +1498,8 @@ impl [T] { /// // let chunks: &[[_; 5]] = slice.as_chunks_unchecked_mut() // The slice length is not a multiple of 5 /// // let chunks: &[[_; 0]] = slice.as_chunks_unchecked_mut() // Zero-length chunks are never allowed /// ``` - #[unstable(feature = "slice_as_chunks", issue = "74985")] - #[rustc_const_unstable(feature = "slice_as_chunks", issue = "74985")] + #[stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] #[inline] #[must_use] pub const unsafe fn as_chunks_unchecked_mut(&mut self) -> &mut [[T; N]] { @@ -1486,15 +1519,27 @@ impl [T] { /// starting at the beginning of the slice, /// and a remainder slice with length strictly less than `N`. /// + /// The remainder is meaningful in the division sense. Given + /// `let (chunks, remainder) = slice.as_chunks_mut()`, then: + /// - `chunks.len()` equals `slice.len() / N`, + /// - `remainder.len()` equals `slice.len() % N`, and + /// - `slice.len()` equals `chunks.len() * N + remainder.len()`. + /// + /// You can flatten the chunks back into a slice-of-`T` with [`as_flattened_mut`]. + /// + /// [`as_flattened_mut`]: slice::as_flattened_mut + /// /// # Panics /// - /// Panics if `N` is zero. This check will most probably get changed to a compile time - /// error before this method gets stabilized. + /// Panics if `N` is zero. + /// + /// Note that this check is against a const generic parameter, not a runtime + /// value, and thus a particular monomorphization will either always panic + /// or it will never panic. /// /// # Examples /// /// ``` - /// #![feature(slice_as_chunks)] /// let v = &mut [0, 0, 0, 0, 0]; /// let mut count = 1; /// @@ -1506,8 +1551,8 @@ impl [T] { /// } /// assert_eq!(v, &[1, 1, 2, 2, 9]); /// ``` - #[unstable(feature = "slice_as_chunks", issue = "74985")] - #[rustc_const_unstable(feature = "slice_as_chunks", issue = "74985")] + #[stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] #[inline] #[track_caller] #[must_use] @@ -1527,15 +1572,27 @@ impl [T] { /// starting at the end of the slice, /// and a remainder slice with length strictly less than `N`. /// + /// The remainder is meaningful in the division sense. Given + /// `let (remainder, chunks) = slice.as_rchunks_mut()`, then: + /// - `remainder.len()` equals `slice.len() % N`, + /// - `chunks.len()` equals `slice.len() / N`, and + /// - `slice.len()` equals `chunks.len() * N + remainder.len()`. + /// + /// You can flatten the chunks back into a slice-of-`T` with [`as_flattened_mut`]. + /// + /// [`as_flattened_mut`]: slice::as_flattened_mut + /// /// # Panics /// - /// Panics if `N` is zero. This check will most probably get changed to a compile time - /// error before this method gets stabilized. + /// Panics if `N` is zero. + /// + /// Note that this check is against a const generic parameter, not a runtime + /// value, and thus a particular monomorphization will either always panic + /// or it will never panic. /// /// # Examples /// /// ``` - /// #![feature(slice_as_chunks)] /// let v = &mut [0, 0, 0, 0, 0]; /// let mut count = 1; /// @@ -1547,8 +1604,8 @@ impl [T] { /// } /// assert_eq!(v, &[9, 1, 1, 2, 2]); /// ``` - #[unstable(feature = "slice_as_chunks", issue = "74985")] - #[rustc_const_unstable(feature = "slice_as_chunks", issue = "74985")] + #[stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "slice_as_chunks", since = "CURRENT_RUSTC_VERSION")] #[inline] #[track_caller] #[must_use] @@ -1592,9 +1649,10 @@ impl [T] { /// /// [`chunks_exact_mut`]: slice::chunks_exact_mut #[unstable(feature = "array_chunks", issue = "74985")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn array_chunks_mut(&mut self) -> ArrayChunksMut<'_, T, N> { + pub const fn array_chunks_mut(&mut self) -> ArrayChunksMut<'_, T, N> { assert!(N != 0, "chunk size must be non-zero"); ArrayChunksMut::new(self) } @@ -1625,9 +1683,10 @@ impl [T] { /// /// [`windows`]: slice::windows #[unstable(feature = "array_windows", issue = "75027")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn array_windows(&self) -> ArrayWindows<'_, T, N> { + pub const fn array_windows(&self) -> ArrayWindows<'_, T, N> { assert!(N != 0, "window size must be non-zero"); ArrayWindows::new(self) } @@ -1660,9 +1719,10 @@ impl [T] { /// [`rchunks_exact`]: slice::rchunks_exact /// [`chunks`]: slice::chunks #[stable(feature = "rchunks", since = "1.31.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> { + pub const fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> { assert!(chunk_size != 0, "chunk size must be non-zero"); RChunks::new(self, chunk_size) } @@ -1699,9 +1759,10 @@ impl [T] { /// [`rchunks_exact_mut`]: slice::rchunks_exact_mut /// [`chunks_mut`]: slice::chunks_mut #[stable(feature = "rchunks", since = "1.31.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<'_, T> { + pub const fn rchunks_mut(&mut self, chunk_size: usize) -> RChunksMut<'_, T> { assert!(chunk_size != 0, "chunk size must be non-zero"); RChunksMut::new(self, chunk_size) } @@ -1739,9 +1800,10 @@ impl [T] { /// [`rchunks`]: slice::rchunks /// [`chunks_exact`]: slice::chunks_exact #[stable(feature = "rchunks", since = "1.31.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<'_, T> { + pub const fn rchunks_exact(&self, chunk_size: usize) -> RChunksExact<'_, T> { assert!(chunk_size != 0, "chunk size must be non-zero"); RChunksExact::new(self, chunk_size) } @@ -1783,9 +1845,10 @@ impl [T] { /// [`rchunks_mut`]: slice::rchunks_mut /// [`chunks_exact_mut`]: slice::chunks_exact_mut #[stable(feature = "rchunks", since = "1.31.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] #[track_caller] - pub fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T> { + pub const fn rchunks_exact_mut(&mut self, chunk_size: usize) -> RChunksExactMut<'_, T> { assert!(chunk_size != 0, "chunk size must be non-zero"); RChunksExactMut::new(self, chunk_size) } @@ -1823,8 +1886,9 @@ impl [T] { /// assert_eq!(iter.next(), None); /// ``` #[stable(feature = "slice_group_by", since = "1.77.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] - pub fn chunk_by(&self, pred: F) -> ChunkBy<'_, T, F> + pub const fn chunk_by(&self, pred: F) -> ChunkBy<'_, T, F> where F: FnMut(&T, &T) -> bool, { @@ -1864,8 +1928,9 @@ impl [T] { /// assert_eq!(iter.next(), None); /// ``` #[stable(feature = "slice_group_by", since = "1.77.0")] + #[rustc_const_unstable(feature = "const_slice_make_iter", issue = "137737")] #[inline] - pub fn chunk_by_mut(&mut self, pred: F) -> ChunkByMut<'_, T, F> + pub const fn chunk_by_mut(&mut self, pred: F) -> ChunkByMut<'_, T, F> where F: FnMut(&T, &T) -> bool, { @@ -2830,7 +2895,7 @@ impl [T] { let half = size / 2; let mid = base + half; - // SAFETY: the call is made safe by the following inconstants: + // SAFETY: the call is made safe by the following invariants: // - `mid >= 0`: by definition // - `mid < size`: `mid = size / 2 + size / 4 + size / 8 ...` let cmp = f(unsafe { self.get_unchecked(mid) }); @@ -2838,7 +2903,7 @@ impl [T] { // Binary search interacts poorly with branch prediction, so force // the compiler to use conditional moves if supported by the target // architecture. - base = (cmp == Greater).select_unpredictable(base, mid); + base = hint::select_unpredictable(cmp == Greater, base, mid); // This is imprecise in the case where `size` is odd and the // comparison returns Greater: the mid element still gets included @@ -3731,7 +3796,7 @@ impl [T] { #[doc(alias = "memcpy")] #[inline] #[stable(feature = "copy_from_slice", since = "1.9.0")] - #[rustc_const_stable(feature = "const_copy_from_slice", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_copy_from_slice", since = "1.87.0")] #[track_caller] pub const fn copy_from_slice(&mut self, src: &[T]) where @@ -4341,7 +4406,7 @@ impl [T] { /// ``` #[inline] #[must_use = "method does not modify the slice if the range is out of bounds"] - #[stable(feature = "slice_take", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "slice_take", since = "1.87.0")] pub fn split_off<'a, R: OneSidedRange>( self: &mut &'a Self, range: R, @@ -4407,7 +4472,7 @@ impl [T] { /// ``` #[inline] #[must_use = "method does not modify the slice if the range is out of bounds"] - #[stable(feature = "slice_take", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "slice_take", since = "1.87.0")] pub fn split_off_mut<'a, R: OneSidedRange>( self: &mut &'a mut Self, range: R, @@ -4444,7 +4509,7 @@ impl [T] { /// assert_eq!(first, &'a'); /// ``` #[inline] - #[stable(feature = "slice_take", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "slice_take", since = "1.87.0")] #[rustc_const_unstable(feature = "const_split_off_first_last", issue = "138539")] pub const fn split_off_first<'a>(self: &mut &'a Self) -> Option<&'a T> { // FIXME(const-hack): Use `?` when available in const instead of `let-else`. @@ -4469,7 +4534,7 @@ impl [T] { /// assert_eq!(first, &'d'); /// ``` #[inline] - #[stable(feature = "slice_take", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "slice_take", since = "1.87.0")] #[rustc_const_unstable(feature = "const_split_off_first_last", issue = "138539")] pub const fn split_off_first_mut<'a>(self: &mut &'a mut Self) -> Option<&'a mut T> { // FIXME(const-hack): Use `mem::take` and `?` when available in const. @@ -4494,7 +4559,7 @@ impl [T] { /// assert_eq!(last, &'c'); /// ``` #[inline] - #[stable(feature = "slice_take", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "slice_take", since = "1.87.0")] #[rustc_const_unstable(feature = "const_split_off_first_last", issue = "138539")] pub const fn split_off_last<'a>(self: &mut &'a Self) -> Option<&'a T> { // FIXME(const-hack): Use `?` when available in const instead of `let-else`. @@ -4519,7 +4584,7 @@ impl [T] { /// assert_eq!(last, &'d'); /// ``` #[inline] - #[stable(feature = "slice_take", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "slice_take", since = "1.87.0")] #[rustc_const_unstable(feature = "const_split_off_first_last", issue = "138539")] pub const fn split_off_last_mut<'a>(self: &mut &'a mut Self) -> Option<&'a mut T> { // FIXME(const-hack): Use `mem::take` and `?` when available in const. @@ -4589,7 +4654,7 @@ impl [T] { // or generate worse code otherwise. This is also why we need to go // through a raw pointer here. let slice: *mut [T] = self; - let mut arr: mem::MaybeUninit<[&mut I::Output; N]> = mem::MaybeUninit::uninit(); + let mut arr: MaybeUninit<[&mut I::Output; N]> = MaybeUninit::uninit(); let arr_ptr = arr.as_mut_ptr(); // SAFETY: We expect `indices` to contain disjunct values that are @@ -4774,9 +4839,63 @@ impl [T] { } } +impl [MaybeUninit] { + /// Transmutes the mutable uninitialized slice to a mutable uninitialized slice of + /// another type, ensuring alignment of the types is maintained. + /// + /// This is a safe wrapper around [`slice::align_to_mut`], so inherits the same + /// guarantees as that method. + /// + /// # Examples + /// + /// ``` + /// #![feature(align_to_uninit_mut)] + /// use std::mem::MaybeUninit; + /// + /// pub struct BumpAllocator<'scope> { + /// memory: &'scope mut [MaybeUninit], + /// } + /// + /// impl<'scope> BumpAllocator<'scope> { + /// pub fn new(memory: &'scope mut [MaybeUninit]) -> Self { + /// Self { memory } + /// } + /// pub fn try_alloc_uninit(&mut self) -> Option<&'scope mut MaybeUninit> { + /// let first_end = self.memory.as_ptr().align_offset(align_of::()) + size_of::(); + /// let prefix = self.memory.split_off_mut(..first_end)?; + /// Some(&mut prefix.align_to_uninit_mut::().1[0]) + /// } + /// pub fn try_alloc_u32(&mut self, value: u32) -> Option<&'scope mut u32> { + /// let uninit = self.try_alloc_uninit()?; + /// Some(uninit.write(value)) + /// } + /// } + /// + /// let mut memory = [MaybeUninit::::uninit(); 10]; + /// let mut allocator = BumpAllocator::new(&mut memory); + /// let v = allocator.try_alloc_u32(42); + /// assert_eq!(v, Some(&mut 42)); + /// ``` + #[unstable(feature = "align_to_uninit_mut", issue = "139062")] + #[inline] + #[must_use] + pub fn align_to_uninit_mut(&mut self) -> (&mut Self, &mut [MaybeUninit], &mut Self) { + // SAFETY: `MaybeUninit` is transparent. Correct size and alignment are guaranteed by + // `align_to_mut` itself. Therefore the only thing that we have to ensure for a safe + // `transmute` is that the values are valid for the types involved. But for `MaybeUninit` + // any values are valid, so this operation is safe. + unsafe { self.align_to_mut() } + } +} + impl [[T; N]] { /// Takes a `&[[T; N]]`, and flattens it to a `&[T]`. /// + /// For the opposite operation, see [`as_chunks`] and [`as_rchunks`]. + /// + /// [`as_chunks`]: slice::as_chunks + /// [`as_rchunks`]: slice::as_rchunks + /// /// # Panics /// /// This panics if the length of the resulting slice would overflow a `usize`. @@ -4802,7 +4921,7 @@ impl [[T; N]] { /// assert!(empty_slice_of_arrays.as_flattened().is_empty()); /// ``` #[stable(feature = "slice_flatten", since = "1.80.0")] - #[rustc_const_stable(feature = "const_slice_flatten", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_slice_flatten", since = "1.87.0")] pub const fn as_flattened(&self) -> &[T] { let len = if T::IS_ZST { self.len().checked_mul(N).expect("slice len overflow") @@ -4817,6 +4936,11 @@ impl [[T; N]] { /// Takes a `&mut [[T; N]]`, and flattens it to a `&mut [T]`. /// + /// For the opposite operation, see [`as_chunks_mut`] and [`as_rchunks_mut`]. + /// + /// [`as_chunks_mut`]: slice::as_chunks_mut + /// [`as_rchunks_mut`]: slice::as_rchunks_mut + /// /// # Panics /// /// This panics if the length of the resulting slice would overflow a `usize`. @@ -4839,7 +4963,7 @@ impl [[T; N]] { /// assert_eq!(array, [[6, 7, 8], [9, 10, 11], [12, 13, 14]]); /// ``` #[stable(feature = "slice_flatten", since = "1.80.0")] - #[rustc_const_stable(feature = "const_slice_flatten", since = "CURRENT_RUSTC_VERSION")] + #[rustc_const_stable(feature = "const_slice_flatten", since = "1.87.0")] pub const fn as_flattened_mut(&mut self) -> &mut [T] { let len = if T::IS_ZST { self.len().checked_mul(N).expect("slice len overflow") diff --git a/library/core/src/slice/sort/shared/smallsort.rs b/library/core/src/slice/sort/shared/smallsort.rs index 95f196a40d01c..4280f7570db4c 100644 --- a/library/core/src/slice/sort/shared/smallsort.rs +++ b/library/core/src/slice/sort/shared/smallsort.rs @@ -2,7 +2,7 @@ use crate::mem::{self, ManuallyDrop, MaybeUninit}; use crate::slice::sort::shared::FreezeMarker; -use crate::{intrinsics, ptr, slice}; +use crate::{hint, intrinsics, ptr, slice}; // It's important to differentiate between SMALL_SORT_THRESHOLD performance for // small slices and small-sort performance sorting small sub-slices as part of @@ -408,8 +408,8 @@ where // } // The goal is to generate cmov instructions here. - let v_a_swap = should_swap.select_unpredictable(v_b, v_a); - let v_b_swap = should_swap.select_unpredictable(v_a, v_b); + let v_a_swap = hint::select_unpredictable(should_swap, v_b, v_a); + let v_b_swap = hint::select_unpredictable(should_swap, v_a, v_b); let v_b_swap_tmp = ManuallyDrop::new(ptr::read(v_b_swap)); ptr::copy(v_a_swap, v_a, 1); @@ -640,15 +640,15 @@ pub unsafe fn sort4_stable bool>( // 1, 1 | c b a d let c3 = is_less(&*c, &*a); let c4 = is_less(&*d, &*b); - let min = c3.select_unpredictable(c, a); - let max = c4.select_unpredictable(b, d); - let unknown_left = c3.select_unpredictable(a, c4.select_unpredictable(c, b)); - let unknown_right = c4.select_unpredictable(d, c3.select_unpredictable(b, c)); + let min = hint::select_unpredictable(c3, c, a); + let max = hint::select_unpredictable(c4, b, d); + let unknown_left = hint::select_unpredictable(c3, a, hint::select_unpredictable(c4, c, b)); + let unknown_right = hint::select_unpredictable(c4, d, hint::select_unpredictable(c3, b, c)); // Sort the last two unknown elements. let c5 = is_less(&*unknown_right, &*unknown_left); - let lo = c5.select_unpredictable(unknown_right, unknown_left); - let hi = c5.select_unpredictable(unknown_left, unknown_right); + let lo = hint::select_unpredictable(c5, unknown_right, unknown_left); + let hi = hint::select_unpredictable(c5, unknown_left, unknown_right); ptr::copy_nonoverlapping(min, dst, 1); ptr::copy_nonoverlapping(lo, dst.add(1), 1); diff --git a/library/core/src/str/converts.rs b/library/core/src/str/converts.rs index 1276d9014f0ef..058628797ea85 100644 --- a/library/core/src/str/converts.rs +++ b/library/core/src/str/converts.rs @@ -126,7 +126,7 @@ pub const fn from_utf8(v: &[u8]) -> Result<&str, Utf8Error> { /// See the docs for [`Utf8Error`] for more details on the kinds of /// errors that can be returned. #[stable(feature = "str_mut_extras", since = "1.20.0")] -#[rustc_const_stable(feature = "const_str_from_utf8", since = "CURRENT_RUSTC_VERSION")] +#[rustc_const_stable(feature = "const_str_from_utf8", since = "1.87.0")] #[rustc_diagnostic_item = "str_from_utf8_mut"] pub const fn from_utf8_mut(v: &mut [u8]) -> Result<&mut str, Utf8Error> { // FIXME(const-hack): This should use `?` again, once it's `const` @@ -178,7 +178,7 @@ pub const unsafe fn from_utf8_unchecked(v: &[u8]) -> &str { /// Converts a slice of bytes to a string slice without checking /// that the string contains valid UTF-8; mutable version. /// -/// See the immutable version, [`from_utf8_unchecked()`] for more information. +/// See the immutable version, [`from_utf8_unchecked()`] for documentation and safety requirements. /// /// # Examples /// diff --git a/library/core/src/str/lossy.rs b/library/core/src/str/lossy.rs index ed2cefc59a51c..8d4210c80827d 100644 --- a/library/core/src/str/lossy.rs +++ b/library/core/src/str/lossy.rs @@ -147,12 +147,14 @@ impl fmt::Debug for Debug<'_> { /// An iterator used to decode a slice of mostly UTF-8 bytes to string slices /// ([`&str`]) and byte slices ([`&[u8]`][byteslice]). /// +/// This struct is created by the [`utf8_chunks`] method on bytes slices. /// If you want a simple conversion from UTF-8 byte slices to string slices, /// [`from_utf8`] is easier to use. /// /// See the [`Utf8Chunk`] type for documentation of the items yielded by this iterator. /// /// [byteslice]: slice +/// [`utf8_chunks`]: slice::utf8_chunks /// [`from_utf8`]: super::from_utf8 /// /// # Examples diff --git a/library/core/src/str/mod.rs b/library/core/src/str/mod.rs index 5cc08f8a71afb..dafabba645c61 100644 --- a/library/core/src/str/mod.rs +++ b/library/core/src/str/mod.rs @@ -134,6 +134,7 @@ impl str { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_str_len", since = "1.39.0")] #[rustc_diagnostic_item = "str_len"] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] #[must_use] #[inline] pub const fn len(&self) -> usize { @@ -153,6 +154,7 @@ impl str { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_str_is_empty", since = "1.39.0")] + #[cfg_attr(not(bootstrap), rustc_no_implicit_autorefs)] #[must_use] #[inline] pub const fn is_empty(&self) -> bool { @@ -230,8 +232,8 @@ impl str { /// /// assert_eq!("💖", sparkle_heart); /// ``` - #[stable(feature = "inherent_str_constructors", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "inherent_str_constructors", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "inherent_str_constructors", since = "1.87.0")] + #[rustc_const_stable(feature = "inherent_str_constructors", since = "1.87.0")] #[rustc_diagnostic_item = "str_inherent_from_utf8"] pub const fn from_utf8(v: &[u8]) -> Result<&str, Utf8Error> { converts::from_utf8(v) @@ -263,8 +265,8 @@ impl str { /// ``` /// See the docs for [`Utf8Error`] for more details on the kinds of /// errors that can be returned. - #[stable(feature = "inherent_str_constructors", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "const_str_from_utf8", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "inherent_str_constructors", since = "1.87.0")] + #[rustc_const_stable(feature = "const_str_from_utf8", since = "1.87.0")] #[rustc_diagnostic_item = "str_inherent_from_utf8_mut"] pub const fn from_utf8_mut(v: &mut [u8]) -> Result<&mut str, Utf8Error> { converts::from_utf8_mut(v) @@ -295,8 +297,8 @@ impl str { /// ``` #[inline] #[must_use] - #[stable(feature = "inherent_str_constructors", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "inherent_str_constructors", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "inherent_str_constructors", since = "1.87.0")] + #[rustc_const_stable(feature = "inherent_str_constructors", since = "1.87.0")] #[rustc_diagnostic_item = "str_inherent_from_utf8_unchecked"] pub const unsafe fn from_utf8_unchecked(v: &[u8]) -> &str { // SAFETY: converts::from_utf8_unchecked has the same safety requirements as this function. @@ -306,7 +308,7 @@ impl str { /// Converts a slice of bytes to a string slice without checking /// that the string contains valid UTF-8; mutable version. /// - /// See the immutable version, [`from_utf8_unchecked()`] for more information. + /// See the immutable version, [`from_utf8_unchecked()`] for documentation and safety requirements. /// /// # Examples /// @@ -320,8 +322,8 @@ impl str { /// ``` #[inline] #[must_use] - #[stable(feature = "inherent_str_constructors", since = "CURRENT_RUSTC_VERSION")] - #[rustc_const_stable(feature = "inherent_str_constructors", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "inherent_str_constructors", since = "1.87.0")] + #[rustc_const_stable(feature = "inherent_str_constructors", since = "1.87.0")] #[rustc_diagnostic_item = "str_inherent_from_utf8_unchecked_mut"] pub const unsafe fn from_utf8_unchecked_mut(v: &mut [u8]) -> &mut str { // SAFETY: converts::from_utf8_unchecked_mut has the same safety requirements as this function. @@ -2115,7 +2117,7 @@ impl str { #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "str_trim"] pub fn trim(&self) -> &str { - self.trim_matches(|c: char| c.is_whitespace()) + self.trim_matches(char::is_whitespace) } /// Returns a string slice with leading whitespace removed. @@ -2154,7 +2156,7 @@ impl str { #[stable(feature = "trim_direction", since = "1.30.0")] #[rustc_diagnostic_item = "str_trim_start"] pub fn trim_start(&self) -> &str { - self.trim_start_matches(|c: char| c.is_whitespace()) + self.trim_start_matches(char::is_whitespace) } /// Returns a string slice with trailing whitespace removed. @@ -2193,7 +2195,7 @@ impl str { #[stable(feature = "trim_direction", since = "1.30.0")] #[rustc_diagnostic_item = "str_trim_end"] pub fn trim_end(&self) -> &str { - self.trim_end_matches(|c: char| c.is_whitespace()) + self.trim_end_matches(char::is_whitespace) } /// Returns a string slice with leading whitespace removed. diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs index 9b1b13e7129ee..84c7f1aafe1b0 100644 --- a/library/core/src/sync/atomic.rs +++ b/library/core/src/sync/atomic.rs @@ -247,6 +247,100 @@ use crate::cell::UnsafeCell; use crate::hint::spin_loop; use crate::{fmt, intrinsics}; +trait Sealed {} + +/// A marker trait for primitive types which can be modified atomically. +/// +/// This is an implementation detail for [Atomic]\ which may disappear or be replaced at any time. +/// +/// # Safety +/// +/// Types implementing this trait must be primitives that can be modified atomically. +/// +/// The associated `Self::AtomicInner` type must have the same size and bit validity as `Self`, +/// but may have a higher alignment requirement, so the following `transmute`s are sound: +/// +/// - `&mut Self::AtomicInner` as `&mut Self` +/// - `Self` as `Self::AtomicInner` or the reverse +#[unstable( + feature = "atomic_internals", + reason = "implementation detail which may disappear or be replaced at any time", + issue = "none" +)] +#[expect(private_bounds)] +pub unsafe trait AtomicPrimitive: Sized + Copy + Sealed { + /// Temporary implementation detail. + type AtomicInner: Sized; +} + +macro impl_atomic_primitive( + $Atom:ident $(<$T:ident>)? ($Primitive:ty), + size($size:literal), + align($align:literal) $(,)? +) { + impl $(<$T>)? Sealed for $Primitive {} + + #[unstable( + feature = "atomic_internals", + reason = "implementation detail which may disappear or be replaced at any time", + issue = "none" + )] + #[cfg(target_has_atomic_load_store = $size)] + unsafe impl $(<$T>)? AtomicPrimitive for $Primitive { + type AtomicInner = $Atom $(<$T>)?; + } +} + +impl_atomic_primitive!(AtomicBool(bool), size("8"), align(1)); +impl_atomic_primitive!(AtomicI8(i8), size("8"), align(1)); +impl_atomic_primitive!(AtomicU8(u8), size("8"), align(1)); +impl_atomic_primitive!(AtomicI16(i16), size("16"), align(2)); +impl_atomic_primitive!(AtomicU16(u16), size("16"), align(2)); +impl_atomic_primitive!(AtomicI32(i32), size("32"), align(4)); +impl_atomic_primitive!(AtomicU32(u32), size("32"), align(4)); +impl_atomic_primitive!(AtomicI64(i64), size("64"), align(8)); +impl_atomic_primitive!(AtomicU64(u64), size("64"), align(8)); +impl_atomic_primitive!(AtomicI128(i128), size("128"), align(16)); +impl_atomic_primitive!(AtomicU128(u128), size("128"), align(16)); + +#[cfg(target_pointer_width = "16")] +impl_atomic_primitive!(AtomicIsize(isize), size("ptr"), align(2)); +#[cfg(target_pointer_width = "32")] +impl_atomic_primitive!(AtomicIsize(isize), size("ptr"), align(4)); +#[cfg(target_pointer_width = "64")] +impl_atomic_primitive!(AtomicIsize(isize), size("ptr"), align(8)); + +#[cfg(target_pointer_width = "16")] +impl_atomic_primitive!(AtomicUsize(usize), size("ptr"), align(2)); +#[cfg(target_pointer_width = "32")] +impl_atomic_primitive!(AtomicUsize(usize), size("ptr"), align(4)); +#[cfg(target_pointer_width = "64")] +impl_atomic_primitive!(AtomicUsize(usize), size("ptr"), align(8)); + +#[cfg(target_pointer_width = "16")] +impl_atomic_primitive!(AtomicPtr(*mut T), size("ptr"), align(2)); +#[cfg(target_pointer_width = "32")] +impl_atomic_primitive!(AtomicPtr(*mut T), size("ptr"), align(4)); +#[cfg(target_pointer_width = "64")] +impl_atomic_primitive!(AtomicPtr(*mut T), size("ptr"), align(8)); + +/// A memory location which can be safely modified from multiple threads. +/// +/// This has the same size and bit validity as the underlying type `T`. However, +/// the alignment of this type is always equal to its size, even on targets where +/// `T` has alignment less than its size. +/// +/// For more about the differences between atomic types and non-atomic types as +/// well as information about the portability of this type, please see the +/// [module-level documentation]. +/// +/// **Note:** This type is only available on platforms that support atomic loads +/// and stores of `T`. +/// +/// [module-level documentation]: crate::sync::atomic +#[unstable(feature = "generic_atomic", issue = "130539")] +pub type Atomic = ::AtomicInner; + // Some architectures don't have byte-sized atomics, which results in LLVM // emulating them using a LL/SC loop. However for AtomicBool we can take // advantage of the fact that it only ever contains 0 or 1 and use atomic OR/AND diff --git a/library/core/src/tuple.rs b/library/core/src/tuple.rs index d754bb9034300..02eb805ece121 100644 --- a/library/core/src/tuple.rs +++ b/library/core/src/tuple.rs @@ -2,7 +2,7 @@ use crate::cmp::Ordering::{self, *}; use crate::marker::{ConstParamTy_, StructuralPartialEq, UnsizedConstParamTy}; -use crate::ops::ControlFlow::{Break, Continue}; +use crate::ops::ControlFlow::{self, Break, Continue}; // Recursive macro for implementing n-ary tuple functions and operations // @@ -95,6 +95,22 @@ macro_rules! tuple_impls { fn gt(&self, other: &($($T,)+)) -> bool { lexical_ord!(gt, __chaining_gt, $( ${ignore($T)} self.${index()}, other.${index()} ),+) } + #[inline] + fn __chaining_lt(&self, other: &($($T,)+)) -> ControlFlow { + lexical_chain!(__chaining_lt, $( ${ignore($T)} self.${index()}, other.${index()} ),+) + } + #[inline] + fn __chaining_le(&self, other: &($($T,)+)) -> ControlFlow { + lexical_chain!(__chaining_le, $( ${ignore($T)} self.${index()}, other.${index()} ),+) + } + #[inline] + fn __chaining_gt(&self, other: &($($T,)+)) -> ControlFlow { + lexical_chain!(__chaining_gt, $( ${ignore($T)} self.${index()}, other.${index()} ),+) + } + #[inline] + fn __chaining_ge(&self, other: &($($T,)+)) -> ControlFlow { + lexical_chain!(__chaining_ge, $( ${ignore($T)} self.${index()}, other.${index()} ),+) + } } } @@ -187,6 +203,17 @@ macro_rules! lexical_ord { }; } +// Same parameter interleaving as `lexical_ord` above +macro_rules! lexical_chain { + ($chain_rel: ident, $a:expr, $b:expr $(,$rest_a:expr, $rest_b:expr)*) => {{ + PartialOrd::$chain_rel(&$a, &$b)?; + lexical_chain!($chain_rel $(,$rest_a, $rest_b)*) + }}; + ($chain_rel: ident) => { + Continue(()) + }; +} + macro_rules! lexical_partial_cmp { ($a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => { match ($a).partial_cmp(&$b) { diff --git a/library/core/src/unicode/unicode_data.rs b/library/core/src/unicode/unicode_data.rs index 4655d35e9c437..25b9c6e0e0e94 100644 --- a/library/core/src/unicode/unicode_data.rs +++ b/library/core/src/unicode/unicode_data.rs @@ -47,45 +47,78 @@ const fn bitset_search< (word & (1 << (needle % 64) as u64)) != 0 } -fn decode_prefix_sum(short_offset_run_header: u32) -> u32 { - short_offset_run_header & ((1 << 21) - 1) -} +#[repr(transparent)] +struct ShortOffsetRunHeader(u32); + +impl ShortOffsetRunHeader { + const fn new(start_index: usize, prefix_sum: u32) -> Self { + assert!(start_index < (1 << 11)); + assert!(prefix_sum < (1 << 21)); + + Self((start_index as u32) << 21 | prefix_sum) + } -fn decode_length(short_offset_run_header: u32) -> usize { - (short_offset_run_header >> 21) as usize + #[inline] + const fn start_index(&self) -> usize { + (self.0 >> 21) as usize + } + + #[inline] + const fn prefix_sum(&self) -> u32 { + self.0 & ((1 << 21) - 1) + } } +/// # Safety +/// +/// - The last element of `short_offset_runs` must be greater than `std::char::MAX`. +/// - The start indices of all elements in `short_offset_runs` must be less than `OFFSETS`. #[inline(always)] -fn skip_search( - needle: u32, - short_offset_runs: &[u32; SOR], +unsafe fn skip_search( + needle: char, + short_offset_runs: &[ShortOffsetRunHeader; SOR], offsets: &[u8; OFFSETS], ) -> bool { - // Note that this *cannot* be past the end of the array, as the last - // element is greater than std::char::MAX (the largest possible needle). - // - // So, we cannot have found it (i.e. Ok(idx) + 1 != length) and the correct - // location cannot be past it, so Err(idx) != length either. - // - // This means that we can avoid bounds checking for the accesses below, too. + let needle = needle as u32; + let last_idx = - match short_offset_runs.binary_search_by_key(&(needle << 11), |header| header << 11) { + match short_offset_runs.binary_search_by_key(&(needle << 11), |header| (header.0 << 11)) { Ok(idx) => idx + 1, Err(idx) => idx, }; + // SAFETY: `last_idx` *cannot* be past the end of the array, as the last + // element is greater than `std::char::MAX` (the largest possible needle) + // as guaranteed by the caller. + // + // So, we cannot have found it (i.e. `Ok(idx) => idx + 1 != length`) and the + // correct location cannot be past it, so `Err(idx) => idx != length` either. + // + // This means that we can avoid bounds checking for the accesses below, too. + // + // We need to use `intrinsics::assume` since the `panic_nounwind` contained + // in `hint::assert_unchecked` may not be optimized out. + unsafe { crate::intrinsics::assume(last_idx < SOR) }; - let mut offset_idx = decode_length(short_offset_runs[last_idx]); + let mut offset_idx = short_offset_runs[last_idx].start_index(); let length = if let Some(next) = short_offset_runs.get(last_idx + 1) { - decode_length(*next) - offset_idx + (*next).start_index() - offset_idx } else { offsets.len() - offset_idx }; + let prev = - last_idx.checked_sub(1).map(|prev| decode_prefix_sum(short_offset_runs[prev])).unwrap_or(0); + last_idx.checked_sub(1).map(|prev| short_offset_runs[prev].prefix_sum()).unwrap_or(0); let total = needle - prev; let mut prefix_sum = 0; for _ in 0..(length - 1) { + // SAFETY: It is guaranteed that `length <= OFFSETS - offset_idx`, + // so it follows that `length - 1 + offset_idx < OFFSETS`, therefore + // `offset_idx < OFFSETS` is always true in this loop. + // + // We need to use `intrinsics::assume` since the `panic_nounwind` contained + // in `hint::assert_unchecked` may not be optimized out. + unsafe { crate::intrinsics::assume(offset_idx < OFFSETS) }; let offset = offsets[offset_idx]; prefix_sum += offset as u32; if prefix_sum > total { @@ -100,15 +133,36 @@ pub const UNICODE_VERSION: (u8, u8, u8) = (16, 0, 0); #[rustfmt::skip] pub mod alphabetic { - static SHORT_OFFSET_RUNS: [u32; 53] = [ - 706, 33559113, 876615277, 956309270, 1166025910, 1314925568, 1319120901, 1398813696, - 1449151936, 1451271309, 1455465997, 1463867300, 1652619520, 1663105646, 1665203518, - 1711342208, 1797326647, 1895898848, 2560697242, 2583768976, 2594255920, 2600551419, - 2608940615, 2613141760, 2615240704, 2619435577, 2621533504, 2652997624, 2688650454, - 2692853744, 2699145507, 2713826044, 2734799872, 2736903168, 2757875366, 2835472128, - 2883707536, 2934039760, 2942429152, 2955013632, 2988568880, 3126984704, 3139610336, - 3141711674, 3145911970, 3154308065, 3158503006, 3162699776, 3164797470, 3166896128, - 3168998219, 3171099568, 3176407984, + use super::ShortOffsetRunHeader; + + static SHORT_OFFSET_RUNS: [ShortOffsetRunHeader; 53] = [ + ShortOffsetRunHeader::new(0, 706), ShortOffsetRunHeader::new(16, 4681), + ShortOffsetRunHeader::new(418, 5741), ShortOffsetRunHeader::new(456, 7958), + ShortOffsetRunHeader::new(556, 9398), ShortOffsetRunHeader::new(627, 11264), + ShortOffsetRunHeader::new(629, 12293), ShortOffsetRunHeader::new(667, 13312), + ShortOffsetRunHeader::new(691, 19904), ShortOffsetRunHeader::new(692, 42125), + ShortOffsetRunHeader::new(694, 42509), ShortOffsetRunHeader::new(698, 55204), + ShortOffsetRunHeader::new(788, 63744), ShortOffsetRunHeader::new(793, 64110), + ShortOffsetRunHeader::new(794, 64830), ShortOffsetRunHeader::new(816, 66176), + ShortOffsetRunHeader::new(857, 67383), ShortOffsetRunHeader::new(904, 73440), + ShortOffsetRunHeader::new(1221, 74650), ShortOffsetRunHeader::new(1232, 77712), + ShortOffsetRunHeader::new(1237, 78896), ShortOffsetRunHeader::new(1240, 82939), + ShortOffsetRunHeader::new(1244, 83527), ShortOffsetRunHeader::new(1246, 90368), + ShortOffsetRunHeader::new(1247, 92160), ShortOffsetRunHeader::new(1249, 92729), + ShortOffsetRunHeader::new(1250, 93504), ShortOffsetRunHeader::new(1265, 100344), + ShortOffsetRunHeader::new(1282, 101590), ShortOffsetRunHeader::new(1284, 110576), + ShortOffsetRunHeader::new(1287, 110883), ShortOffsetRunHeader::new(1294, 111356), + ShortOffsetRunHeader::new(1304, 113664), ShortOffsetRunHeader::new(1305, 119808), + ShortOffsetRunHeader::new(1315, 120486), ShortOffsetRunHeader::new(1352, 122624), + ShortOffsetRunHeader::new(1375, 123536), ShortOffsetRunHeader::new(1399, 124112), + ShortOffsetRunHeader::new(1403, 124896), ShortOffsetRunHeader::new(1409, 126464), + ShortOffsetRunHeader::new(1425, 127280), ShortOffsetRunHeader::new(1491, 131072), + ShortOffsetRunHeader::new(1497, 173792), ShortOffsetRunHeader::new(1498, 177978), + ShortOffsetRunHeader::new(1500, 183970), ShortOffsetRunHeader::new(1504, 191457), + ShortOffsetRunHeader::new(1506, 192094), ShortOffsetRunHeader::new(1508, 194560), + ShortOffsetRunHeader::new(1509, 195102), ShortOffsetRunHeader::new(1510, 196608), + ShortOffsetRunHeader::new(1511, 201547), ShortOffsetRunHeader::new(1512, 205744), + ShortOffsetRunHeader::new(1514, 1319856), ]; static OFFSETS: [u8; 1515] = [ 65, 26, 6, 26, 47, 1, 10, 1, 4, 1, 5, 23, 1, 31, 1, 0, 4, 12, 14, 5, 7, 1, 1, 1, 86, 1, 29, @@ -169,22 +223,44 @@ pub mod alphabetic { 0, 0, 0, 0, 5, 0, 0, ]; pub fn lookup(c: char) -> bool { - super::skip_search( - c as u32, - &SHORT_OFFSET_RUNS, - &OFFSETS, - ) + const { + assert!(SHORT_OFFSET_RUNS.last().unwrap().0 > char::MAX as u32); + let mut i = 0; + while i < SHORT_OFFSET_RUNS.len() { + assert!(SHORT_OFFSET_RUNS[i].start_index() < OFFSETS.len()); + i += 1; + } + } + // SAFETY: We just ensured the last element of `SHORT_OFFSET_RUNS` is greater than `std::char::MAX` + // and the start indices of all elements in `SHORT_OFFSET_RUNS` are smaller than `OFFSETS.len()`. + unsafe { super::skip_search(c, &SHORT_OFFSET_RUNS, &OFFSETS) } } } #[rustfmt::skip] pub mod case_ignorable { - static SHORT_OFFSET_RUNS: [u32; 37] = [ - 688, 44045149, 572528402, 576724925, 807414908, 878718981, 903913493, 929080568, 933275148, - 937491230, 1138818560, 1147208189, 1210124160, 1222707713, 1235291428, 1260457643, - 1277237295, 1537284411, 1545673776, 1604394739, 1667314736, 1692492062, 1700883184, - 1709272384, 1721855823, 1730260976, 1747041437, 1759629056, 1768018279, 1776409088, - 1797382144, 1822548654, 1856103659, 1864493264, 1872884731, 1882062849, 1887371760, + use super::ShortOffsetRunHeader; + + static SHORT_OFFSET_RUNS: [ShortOffsetRunHeader; 37] = [ + ShortOffsetRunHeader::new(0, 688), ShortOffsetRunHeader::new(21, 4957), + ShortOffsetRunHeader::new(273, 5906), ShortOffsetRunHeader::new(275, 8125), + ShortOffsetRunHeader::new(385, 11388), ShortOffsetRunHeader::new(419, 12293), + ShortOffsetRunHeader::new(431, 40981), ShortOffsetRunHeader::new(443, 42232), + ShortOffsetRunHeader::new(445, 42508), ShortOffsetRunHeader::new(447, 64286), + ShortOffsetRunHeader::new(543, 65024), ShortOffsetRunHeader::new(547, 66045), + ShortOffsetRunHeader::new(577, 67456), ShortOffsetRunHeader::new(583, 68097), + ShortOffsetRunHeader::new(589, 68900), ShortOffsetRunHeader::new(601, 69291), + ShortOffsetRunHeader::new(609, 71727), ShortOffsetRunHeader::new(733, 71995), + ShortOffsetRunHeader::new(737, 72752), ShortOffsetRunHeader::new(765, 73459), + ShortOffsetRunHeader::new(795, 78896), ShortOffsetRunHeader::new(807, 90398), + ShortOffsetRunHeader::new(811, 92912), ShortOffsetRunHeader::new(815, 93504), + ShortOffsetRunHeader::new(821, 94031), ShortOffsetRunHeader::new(825, 110576), + ShortOffsetRunHeader::new(833, 113821), ShortOffsetRunHeader::new(839, 118528), + ShortOffsetRunHeader::new(843, 119143), ShortOffsetRunHeader::new(847, 121344), + ShortOffsetRunHeader::new(857, 122880), ShortOffsetRunHeader::new(869, 123566), + ShortOffsetRunHeader::new(885, 124139), ShortOffsetRunHeader::new(889, 125136), + ShortOffsetRunHeader::new(893, 127995), ShortOffsetRunHeader::new(897, 917505), + ShortOffsetRunHeader::new(899, 2032112), ]; static OFFSETS: [u8; 905] = [ 39, 1, 6, 1, 11, 1, 35, 1, 1, 1, 71, 1, 4, 1, 1, 1, 4, 1, 2, 2, 0, 192, 4, 2, 4, 1, 9, 2, @@ -222,20 +298,36 @@ pub mod case_ignorable { 1, 61, 4, 0, 5, 254, 2, 0, 7, 109, 8, 0, 5, 0, 1, 30, 96, 128, 240, 0, ]; pub fn lookup(c: char) -> bool { - super::skip_search( - c as u32, - &SHORT_OFFSET_RUNS, - &OFFSETS, - ) + const { + assert!(SHORT_OFFSET_RUNS.last().unwrap().0 > char::MAX as u32); + let mut i = 0; + while i < SHORT_OFFSET_RUNS.len() { + assert!(SHORT_OFFSET_RUNS[i].start_index() < OFFSETS.len()); + i += 1; + } + } + // SAFETY: We just ensured the last element of `SHORT_OFFSET_RUNS` is greater than `std::char::MAX` + // and the start indices of all elements in `SHORT_OFFSET_RUNS` are smaller than `OFFSETS.len()`. + unsafe { super::skip_search(c, &SHORT_OFFSET_RUNS, &OFFSETS) } } } #[rustfmt::skip] pub mod cased { - static SHORT_OFFSET_RUNS: [u32; 22] = [ - 4256, 115348384, 136322176, 144711446, 163587254, 320875520, 325101120, 350268208, - 392231680, 404815649, 413205504, 421595008, 467733632, 484513952, 501313088, 505533440, - 509728422, 587325184, 635559984, 648145152, 652341552, 657650058, + use super::ShortOffsetRunHeader; + + static SHORT_OFFSET_RUNS: [ShortOffsetRunHeader; 22] = [ + ShortOffsetRunHeader::new(0, 4256), ShortOffsetRunHeader::new(55, 5024), + ShortOffsetRunHeader::new(65, 7296), ShortOffsetRunHeader::new(69, 7958), + ShortOffsetRunHeader::new(78, 9398), ShortOffsetRunHeader::new(153, 11264), + ShortOffsetRunHeader::new(155, 42560), ShortOffsetRunHeader::new(167, 43824), + ShortOffsetRunHeader::new(187, 64256), ShortOffsetRunHeader::new(193, 65313), + ShortOffsetRunHeader::new(197, 66560), ShortOffsetRunHeader::new(201, 67456), + ShortOffsetRunHeader::new(223, 68736), ShortOffsetRunHeader::new(231, 71840), + ShortOffsetRunHeader::new(239, 93760), ShortOffsetRunHeader::new(241, 119808), + ShortOffsetRunHeader::new(243, 120486), ShortOffsetRunHeader::new(280, 122624), + ShortOffsetRunHeader::new(303, 122928), ShortOffsetRunHeader::new(309, 125184), + ShortOffsetRunHeader::new(311, 127280), ShortOffsetRunHeader::new(313, 1241482), ]; static OFFSETS: [u8; 319] = [ 65, 26, 6, 26, 47, 1, 10, 1, 4, 1, 5, 23, 1, 31, 1, 195, 1, 4, 4, 208, 1, 36, 7, 2, 30, 5, @@ -252,39 +344,67 @@ pub mod cased { 8, 0, 10, 1, 20, 6, 6, 0, 62, 0, 68, 0, 26, 6, 26, 6, 26, 0, ]; pub fn lookup(c: char) -> bool { - super::skip_search( - c as u32, - &SHORT_OFFSET_RUNS, - &OFFSETS, - ) + const { + assert!(SHORT_OFFSET_RUNS.last().unwrap().0 > char::MAX as u32); + let mut i = 0; + while i < SHORT_OFFSET_RUNS.len() { + assert!(SHORT_OFFSET_RUNS[i].start_index() < OFFSETS.len()); + i += 1; + } + } + // SAFETY: We just ensured the last element of `SHORT_OFFSET_RUNS` is greater than `std::char::MAX` + // and the start indices of all elements in `SHORT_OFFSET_RUNS` are smaller than `OFFSETS.len()`. + unsafe { super::skip_search(c, &SHORT_OFFSET_RUNS, &OFFSETS) } } } #[rustfmt::skip] pub mod cc { - static SHORT_OFFSET_RUNS: [u32; 1] = [ - 1114272, + use super::ShortOffsetRunHeader; + + static SHORT_OFFSET_RUNS: [ShortOffsetRunHeader; 1] = [ + ShortOffsetRunHeader::new(0, 1114272), ]; static OFFSETS: [u8; 5] = [ 0, 32, 95, 33, 0, ]; pub fn lookup(c: char) -> bool { - super::skip_search( - c as u32, - &SHORT_OFFSET_RUNS, - &OFFSETS, - ) + const { + assert!(SHORT_OFFSET_RUNS.last().unwrap().0 > char::MAX as u32); + let mut i = 0; + while i < SHORT_OFFSET_RUNS.len() { + assert!(SHORT_OFFSET_RUNS[i].start_index() < OFFSETS.len()); + i += 1; + } + } + // SAFETY: We just ensured the last element of `SHORT_OFFSET_RUNS` is greater than `std::char::MAX` + // and the start indices of all elements in `SHORT_OFFSET_RUNS` are smaller than `OFFSETS.len()`. + unsafe { super::skip_search(c, &SHORT_OFFSET_RUNS, &OFFSETS) } } } #[rustfmt::skip] pub mod grapheme_extend { - static SHORT_OFFSET_RUNS: [u32; 34] = [ - 768, 2098307, 6292881, 10490717, 522196754, 526393356, 723528943, 731918378, 744531567, - 752920578, 769719070, 908131840, 912326558, 920715773, 924912129, 937495844, 962662059, - 971053103, 1256266800, 1323376371, 1386296384, 1407279390, 1415670512, 1424060239, - 1432468637, 1449250560, 1453445477, 1461836288, 1487003648, 1512170158, 1541530860, - 1549920464, 1559101472, 1568604656, + use super::ShortOffsetRunHeader; + + static SHORT_OFFSET_RUNS: [ShortOffsetRunHeader; 34] = [ + ShortOffsetRunHeader::new(0, 768), ShortOffsetRunHeader::new(1, 1155), + ShortOffsetRunHeader::new(3, 1425), ShortOffsetRunHeader::new(5, 4957), + ShortOffsetRunHeader::new(249, 5906), ShortOffsetRunHeader::new(251, 8204), + ShortOffsetRunHeader::new(345, 11503), ShortOffsetRunHeader::new(349, 12330), + ShortOffsetRunHeader::new(355, 42607), ShortOffsetRunHeader::new(359, 43010), + ShortOffsetRunHeader::new(367, 64286), ShortOffsetRunHeader::new(433, 65024), + ShortOffsetRunHeader::new(435, 65438), ShortOffsetRunHeader::new(439, 66045), + ShortOffsetRunHeader::new(441, 68097), ShortOffsetRunHeader::new(447, 68900), + ShortOffsetRunHeader::new(459, 69291), ShortOffsetRunHeader::new(463, 71727), + ShortOffsetRunHeader::new(599, 72752), ShortOffsetRunHeader::new(631, 73459), + ShortOffsetRunHeader::new(661, 78912), ShortOffsetRunHeader::new(671, 90398), + ShortOffsetRunHeader::new(675, 92912), ShortOffsetRunHeader::new(679, 94031), + ShortOffsetRunHeader::new(683, 113821), ShortOffsetRunHeader::new(691, 118528), + ShortOffsetRunHeader::new(693, 119141), ShortOffsetRunHeader::new(697, 121344), + ShortOffsetRunHeader::new(709, 122880), ShortOffsetRunHeader::new(721, 123566), + ShortOffsetRunHeader::new(735, 124140), ShortOffsetRunHeader::new(739, 125136), + ShortOffsetRunHeader::new(743, 917536), ShortOffsetRunHeader::new(747, 2032112), ]; static OFFSETS: [u8; 751] = [ 0, 112, 0, 7, 0, 45, 1, 1, 1, 2, 1, 2, 1, 1, 72, 11, 48, 21, 16, 1, 101, 7, 2, 6, 2, 2, 1, @@ -319,12 +439,20 @@ pub mod grapheme_extend { pub fn lookup(c: char) -> bool { (c as u32) >= 0x300 && lookup_slow(c) } + + #[inline(never)] fn lookup_slow(c: char) -> bool { - super::skip_search( - c as u32, - &SHORT_OFFSET_RUNS, - &OFFSETS, - ) + const { + assert!(SHORT_OFFSET_RUNS.last().unwrap().0 > char::MAX as u32); + let mut i = 0; + while i < SHORT_OFFSET_RUNS.len() { + assert!(SHORT_OFFSET_RUNS[i].start_index() < OFFSETS.len()); + i += 1; + } + } + // SAFETY: We just ensured the last element of `SHORT_OFFSET_RUNS` is greater than `std::char::MAX` + // and the start indices of all elements in `SHORT_OFFSET_RUNS` are smaller than `OFFSETS.len()`. + unsafe { super::skip_search(c, &SHORT_OFFSET_RUNS, &OFFSETS) } } } @@ -436,13 +564,30 @@ pub mod lowercase { #[rustfmt::skip] pub mod n { - static SHORT_OFFSET_RUNS: [u32; 42] = [ - 1632, 18876774, 31461440, 102765417, 111154926, 115349830, 132128880, 165684320, 186656630, - 195046653, 199241735, 203436434, 216049184, 241215536, 249605104, 274792208, 278987015, - 283181793, 295766104, 320933114, 383848032, 396432464, 438376016, 446765280, 463543280, - 471932752, 488711168, 497115440, 501312096, 505507184, 522284672, 526503152, 530698944, - 534894542, 547479872, 551674608, 555869424, 560064711, 568454257, 576844032, 597818352, - 603126778, + use super::ShortOffsetRunHeader; + + static SHORT_OFFSET_RUNS: [ShortOffsetRunHeader; 42] = [ + ShortOffsetRunHeader::new(0, 1632), ShortOffsetRunHeader::new(9, 2406), + ShortOffsetRunHeader::new(15, 4160), ShortOffsetRunHeader::new(49, 4969), + ShortOffsetRunHeader::new(53, 5870), ShortOffsetRunHeader::new(55, 6470), + ShortOffsetRunHeader::new(63, 8304), ShortOffsetRunHeader::new(79, 9312), + ShortOffsetRunHeader::new(89, 10102), ShortOffsetRunHeader::new(93, 11517), + ShortOffsetRunHeader::new(95, 12295), ShortOffsetRunHeader::new(97, 12690), + ShortOffsetRunHeader::new(103, 42528), ShortOffsetRunHeader::new(115, 43056), + ShortOffsetRunHeader::new(119, 44016), ShortOffsetRunHeader::new(131, 65296), + ShortOffsetRunHeader::new(133, 65799), ShortOffsetRunHeader::new(135, 66273), + ShortOffsetRunHeader::new(141, 67672), ShortOffsetRunHeader::new(153, 68858), + ShortOffsetRunHeader::new(183, 69216), ShortOffsetRunHeader::new(189, 70736), + ShortOffsetRunHeader::new(209, 71248), ShortOffsetRunHeader::new(213, 71904), + ShortOffsetRunHeader::new(221, 72688), ShortOffsetRunHeader::new(225, 73552), + ShortOffsetRunHeader::new(233, 74752), ShortOffsetRunHeader::new(237, 90416), + ShortOffsetRunHeader::new(239, 92768), ShortOffsetRunHeader::new(241, 93552), + ShortOffsetRunHeader::new(249, 93824), ShortOffsetRunHeader::new(251, 118000), + ShortOffsetRunHeader::new(253, 119488), ShortOffsetRunHeader::new(255, 120782), + ShortOffsetRunHeader::new(261, 123200), ShortOffsetRunHeader::new(263, 123632), + ShortOffsetRunHeader::new(265, 124144), ShortOffsetRunHeader::new(267, 125127), + ShortOffsetRunHeader::new(271, 126065), ShortOffsetRunHeader::new(275, 127232), + ShortOffsetRunHeader::new(285, 130032), ShortOffsetRunHeader::new(287, 1244154), ]; static OFFSETS: [u8; 289] = [ 48, 10, 120, 2, 5, 1, 2, 3, 0, 10, 134, 10, 198, 10, 0, 10, 118, 10, 4, 6, 108, 10, 118, @@ -459,11 +604,17 @@ pub mod n { 10, 247, 10, 0, 9, 128, 10, 0, 59, 1, 3, 1, 4, 76, 45, 1, 15, 0, 13, 0, 10, 0, ]; pub fn lookup(c: char) -> bool { - super::skip_search( - c as u32, - &SHORT_OFFSET_RUNS, - &OFFSETS, - ) + const { + assert!(SHORT_OFFSET_RUNS.last().unwrap().0 > char::MAX as u32); + let mut i = 0; + while i < SHORT_OFFSET_RUNS.len() { + assert!(SHORT_OFFSET_RUNS[i].start_index() < OFFSETS.len()); + i += 1; + } + } + // SAFETY: We just ensured the last element of `SHORT_OFFSET_RUNS` is greater than `std::char::MAX` + // and the start indices of all elements in `SHORT_OFFSET_RUNS` are smaller than `OFFSETS.len()`. + unsafe { super::skip_search(c, &SHORT_OFFSET_RUNS, &OFFSETS) } } } diff --git a/library/coretests/Cargo.toml b/library/coretests/Cargo.toml index e44f01d347b3d..7656388d24bee 100644 --- a/library/coretests/Cargo.toml +++ b/library/coretests/Cargo.toml @@ -12,6 +12,7 @@ edition = "2024" path = "lib.rs" test = false bench = false +doc = false [[test]] name = "coretests" diff --git a/library/coretests/benches/ascii.rs b/library/coretests/benches/ascii.rs index 3fe45aa360bf0..64bdc7fed118f 100644 --- a/library/coretests/benches/ascii.rs +++ b/library/coretests/benches/ascii.rs @@ -354,7 +354,7 @@ static ASCII_CHARACTER_CLASS: [AsciiCharacterClass; 256] = [ ]; const ASCII_PATH: &[u8] = b"home/kyubey/rust/build/x86_64-unknown-linux-gnu/stage0/lib:/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0-tools/release/deps"; -const RUST_INCANTATION: &[u8] = br#"AR_x86_64_unknown_linux_gnu="ar" CARGO_INCREMENTAL="0" CARGO_PROFILE_RELEASE_DEBUG="1" CARGO_PROFILE_RELEASE_DEBUG_ASSERTIONS="false" CARGO_PROFILE_RELEASE_OVERFLOW_CHECKS="false" CARGO_TARGET_DIR="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0-std" CC_x86_64_unknown_linux_gnu="cc" CFG_COMPILER_HOST_TRIPLE="x86_64-unknown-linux-gnu" CFG_RELEASE_CHANNEL="dev" CFLAGS_x86_64_unknown_linux_gnu="-ffunction-sections -fdata-sections -fPIC -m64" CXXFLAGS_x86_64_unknown_linux_gnu="-ffunction-sections -fdata-sections -fPIC -m64" CXX_x86_64_unknown_linux_gnu="c++" LD_LIBRARY_PATH="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0-sysroot/lib/rustlib/x86_64-unknown-linux-gnu/lib" LIBC_CHECK_CFG="1" RANLIB_x86_64_unknown_linux_gnu="ar s" REAL_LIBRARY_PATH_VAR="LD_LIBRARY_PATH" RUSTBUILD_NATIVE_DIR="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/native" RUSTC="/home/kyubey/workspace/rust/build/bootstrap/debug/rustc" RUSTC_BOOTSTRAP="1" RUSTC_BREAK_ON_ICE="1" RUSTC_ERROR_METADATA_DST="/home/kyubey/workspace/rust/build/tmp/extended-error-metadata" RUSTC_FORCE_UNSTABLE="1" RUSTC_HOST_FUSE_LD_LLD="1" RUSTC_INSTALL_BINDIR="bin" RUSTC_LIBDIR="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/lib" RUSTC_LINT_FLAGS="-Wrust_2018_idioms -Wunused_lifetimes -Wsemicolon_in_expressions_from_macros" RUSTC_REAL="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/bin/rustc" RUSTC_SNAPSHOT="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/bin/rustc" RUSTC_SNAPSHOT_LIBDIR="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/lib" RUSTC_STAGE="0" RUSTC_SYSROOT="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0-sysroot" RUSTC_VERBOSE="0" RUSTDOC="/home/kyubey/workspace/rust/build/bootstrap/debug/rustdoc" RUSTDOCFLAGS="-C target-cpu=native --cfg=bootstrap -Csymbol-mangling-version=legacy -Zunstable-options -Zunstable-options --check-cfg=values(bootstrap) --check-cfg=values(stdarch_intel_sde) --check-cfg=values(no_fp_fmt_parse) --check-cfg=values(no_global_oom_handling) --check-cfg=values(no_rc) --check-cfg=values(no_sync) --check-cfg=values(freebsd12) --check-cfg=values(freebsd13) --check-cfg=values(backtrace_in_libstd) --check-cfg=values(target_env,\"libnx\") --check-cfg=values(target_arch,\"asmjs\",\"spirv\",\"nvptx\",\"xtensa\") -Clink-arg=-fuse-ld=lld -Clink-arg=-Wl,--threads=1 -Wrustdoc::invalid_codeblock_attributes --crate-version 1.72.0-dev -Zcrate-attr=doc(html_root_url=\"https://doc.rust-lang.org/nightly/\") -Zcrate-attr=warn(rust_2018_idioms)" RUSTDOC_FUSE_LD_LLD="1" RUSTDOC_LIBDIR="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/lib" RUSTDOC_REAL="/path/to/nowhere/rustdoc/not/required" RUSTFLAGS="-C target-cpu=native --cfg=bootstrap -Csymbol-mangling-version=legacy -Zunstable-options -Zunstable-options --check-cfg=values(bootstrap) --check-cfg=values(stdarch_intel_sde) --check-cfg=values(no_fp_fmt_parse) --check-cfg=values(no_global_oom_handling) --check-cfg=values(no_rc) --check-cfg=values(no_sync) --check-cfg=values(freebsd12) --check-cfg=values(freebsd13) --check-cfg=values(backtrace_in_libstd) --check-cfg=values(target_env,\"libnx\") --check-cfg=values(target_arch,\"asmjs\",\"spirv\",\"nvptx\",\"xtensa\") -Zmacro-backtrace -Clink-args=-Wl,-z,origin -Clink-args=-Wl,-rpath,$ORIGIN/../lib -Clink-args=-fuse-ld=lld -Csplit-debuginfo=off -Cprefer-dynamic -Zinline-mir -Clto=off -Zcrate-attr=doc(html_root_url=\"https://doc.rust-lang.org/nightly/\")" RUST_COMPILER_RT_ROOT="/home/kyubey/workspace/rust/src/llvm-project/compiler-rt" RUST_TEST_THREADS="48" WINAPI_NO_BUNDLED_LIBRARIES="1" __CARGO_DEFAULT_LIB_METADATA="bootstrapstd" "/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/bin/cargo" "bench" "--target" "x86_64-unknown-linux-gnu" "-Zcheck-cfg=names,values,output" "-Zbinary-dep-depinfo" "-j" "48" "--features" " panic-unwind backtrace compiler-builtins-c" "--manifest-path" "/home/kyubey/workspace/rust/library/sysroot/Cargo.toml" "-p" "core" "--" "bench_ascii_escape_display" "--quiet" "-Z" "unstable-options" "--format" "json""#; +const RUST_INCANTATION: &[u8] = br#"AR_x86_64_unknown_linux_gnu="ar" CARGO_INCREMENTAL="0" CARGO_PROFILE_RELEASE_DEBUG="1" CARGO_PROFILE_RELEASE_DEBUG_ASSERTIONS="false" CARGO_PROFILE_RELEASE_OVERFLOW_CHECKS="false" CARGO_TARGET_DIR="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0-std" CC_x86_64_unknown_linux_gnu="cc" CFG_COMPILER_HOST_TRIPLE="x86_64-unknown-linux-gnu" CFG_RELEASE_CHANNEL="dev" CFLAGS_x86_64_unknown_linux_gnu="-ffunction-sections -fdata-sections -fPIC -m64" CXXFLAGS_x86_64_unknown_linux_gnu="-ffunction-sections -fdata-sections -fPIC -m64" CXX_x86_64_unknown_linux_gnu="c++" LD_LIBRARY_PATH="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0-sysroot/lib/rustlib/x86_64-unknown-linux-gnu/lib" LIBC_CHECK_CFG="1" RANLIB_x86_64_unknown_linux_gnu="ar s" REAL_LIBRARY_PATH_VAR="LD_LIBRARY_PATH" RUSTBUILD_NATIVE_DIR="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/native" RUSTC="/home/kyubey/workspace/rust/build/bootstrap/debug/rustc" RUSTC_BOOTSTRAP="1" RUSTC_BREAK_ON_ICE="1" RUSTC_ERROR_METADATA_DST="/home/kyubey/workspace/rust/build/tmp/extended-error-metadata" RUSTC_FORCE_UNSTABLE="1" RUSTC_HOST_FUSE_LD_LLD="1" RUSTC_INSTALL_BINDIR="bin" RUSTC_LIBDIR="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/lib" RUSTC_LINT_FLAGS="-Wrust_2018_idioms -Wunused_lifetimes -Wsemicolon_in_expressions_from_macros" RUSTC_REAL="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/bin/rustc" RUSTC_SNAPSHOT="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/bin/rustc" RUSTC_SNAPSHOT_LIBDIR="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/lib" RUSTC_STAGE="0" RUSTC_SYSROOT="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0-sysroot" RUSTC_VERBOSE="0" RUSTDOC="/home/kyubey/workspace/rust/build/bootstrap/debug/rustdoc" RUSTDOCFLAGS="-C target-cpu=native --cfg=bootstrap -Csymbol-mangling-version=legacy -Zunstable-options -Zunstable-options --check-cfg=values(bootstrap) --check-cfg=values(no_fp_fmt_parse) --check-cfg=values(no_global_oom_handling) --check-cfg=values(no_rc) --check-cfg=values(no_sync) --check-cfg=values(freebsd12) --check-cfg=values(freebsd13) --check-cfg=values(backtrace_in_libstd) --check-cfg=values(target_env,\"libnx\") --check-cfg=values(target_arch,\"asmjs\",\"spirv\",\"nvptx\",\"xtensa\") -Clink-arg=-fuse-ld=lld -Clink-arg=-Wl,--threads=1 -Wrustdoc::invalid_codeblock_attributes --crate-version 1.72.0-dev -Zcrate-attr=doc(html_root_url=\"https://doc.rust-lang.org/nightly/\") -Zcrate-attr=warn(rust_2018_idioms)" RUSTDOC_FUSE_LD_LLD="1" RUSTDOC_LIBDIR="/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/lib" RUSTDOC_REAL="/path/to/nowhere/rustdoc/not/required" RUSTFLAGS="-C target-cpu=native --cfg=bootstrap -Csymbol-mangling-version=legacy -Zunstable-options -Zunstable-options --check-cfg=values(bootstrap) --check-cfg=values(no_fp_fmt_parse) --check-cfg=values(no_global_oom_handling) --check-cfg=values(no_rc) --check-cfg=values(no_sync) --check-cfg=values(freebsd12) --check-cfg=values(freebsd13) --check-cfg=values(backtrace_in_libstd) --check-cfg=values(target_env,\"libnx\") --check-cfg=values(target_arch,\"asmjs\",\"spirv\",\"nvptx\",\"xtensa\") -Zmacro-backtrace -Clink-args=-Wl,-z,origin -Clink-args=-Wl,-rpath,$ORIGIN/../lib -Clink-args=-fuse-ld=lld -Csplit-debuginfo=off -Cprefer-dynamic -Zinline-mir -Clto=off -Zcrate-attr=doc(html_root_url=\"https://doc.rust-lang.org/nightly/\")" RUST_COMPILER_RT_ROOT="/home/kyubey/workspace/rust/src/llvm-project/compiler-rt" RUST_TEST_THREADS="48" WINAPI_NO_BUNDLED_LIBRARIES="1" __CARGO_DEFAULT_LIB_METADATA="bootstrapstd" "/home/kyubey/workspace/rust/build/x86_64-unknown-linux-gnu/stage0/bin/cargo" "bench" "--target" "x86_64-unknown-linux-gnu" "-Zcheck-cfg=names,values,output" "-Zbinary-dep-depinfo" "-j" "48" "--features" " panic-unwind backtrace compiler-builtins-c" "--manifest-path" "/home/kyubey/workspace/rust/library/sysroot/Cargo.toml" "-p" "core" "--" "bench_ascii_escape_display" "--quiet" "-Z" "unstable-options" "--format" "json""#; #[bench] fn bench_ascii_escape_display_no_escape(b: &mut Bencher) { diff --git a/library/coretests/tests/cell.rs b/library/coretests/tests/cell.rs index d6a401c2b4d98..781a46c3744f5 100644 --- a/library/coretests/tests/cell.rs +++ b/library/coretests/tests/cell.rs @@ -50,10 +50,10 @@ fn smoketest_cell() { fn cell_update() { let x = Cell::new(10); - assert_eq!(x.update(|x| x + 5), 15); + x.update(|x| x + 5); assert_eq!(x.get(), 15); - assert_eq!(x.update(|x| x / 3), 5); + x.update(|x| x / 3); assert_eq!(x.get(), 5); } diff --git a/library/coretests/tests/ffi/cstr.rs b/library/coretests/tests/ffi/cstr.rs index 9bf4c21a9ab97..0d85b22c585a1 100644 --- a/library/coretests/tests/ffi/cstr.rs +++ b/library/coretests/tests/ffi/cstr.rs @@ -13,3 +13,9 @@ fn compares_as_u8s() { assert_eq!(Ord::cmp(a, b), Ord::cmp(a_bytes, b_bytes)); assert_eq!(PartialOrd::partial_cmp(a, b), PartialOrd::partial_cmp(a_bytes, b_bytes)); } + +#[test] +fn debug() { + let s = c"abc\x01\x02\n\xE2\x80\xA6\xFF"; + assert_eq!(format!("{s:?}"), r#""abc\x01\x02\n\xe2\x80\xa6\xff""#); +} diff --git a/library/coretests/tests/hint.rs b/library/coretests/tests/hint.rs new file mode 100644 index 0000000000000..032bbc1dcc80f --- /dev/null +++ b/library/coretests/tests/hint.rs @@ -0,0 +1,23 @@ +#[test] +fn select_unpredictable_drop() { + use core::cell::Cell; + struct X<'a>(&'a Cell); + impl Drop for X<'_> { + fn drop(&mut self) { + self.0.set(true); + } + } + + let a_dropped = Cell::new(false); + let b_dropped = Cell::new(false); + let a = X(&a_dropped); + let b = X(&b_dropped); + assert!(!a_dropped.get()); + assert!(!b_dropped.get()); + let selected = core::hint::select_unpredictable(core::hint::black_box(true), a, b); + assert!(!a_dropped.get()); + assert!(b_dropped.get()); + drop(selected); + assert!(a_dropped.get()); + assert!(b_dropped.get()); +} diff --git a/library/coretests/tests/iter/adapters/enumerate.rs b/library/coretests/tests/iter/adapters/enumerate.rs index b57d51c077e9b..2294f856b58d6 100644 --- a/library/coretests/tests/iter/adapters/enumerate.rs +++ b/library/coretests/tests/iter/adapters/enumerate.rs @@ -120,3 +120,13 @@ fn test_double_ended_enumerate() { assert_eq!(it.next_back(), Some((2, 3))); assert_eq!(it.next(), None); } + +#[test] +fn test_empty_iterator_enumerate_next_index() { + let mut it = empty::().enumerate(); + assert_eq!(it.next_index(), 0); + assert_eq!(it.next_index(), 0); + assert_eq!(it.next(), None); + assert_eq!(it.next_index(), 0); + assert_eq!(it.next_index(), 0); +} diff --git a/library/coretests/tests/lib.rs b/library/coretests/tests/lib.rs index 79022fec8a20c..a71c4139308aa 100644 --- a/library/coretests/tests/lib.rs +++ b/library/coretests/tests/lib.rs @@ -12,11 +12,9 @@ #![feature(async_iterator)] #![feature(bigint_helper_methods)] #![feature(bstr)] -#![feature(cell_update)] #![feature(char_max_len)] #![feature(clone_to_uninit)] #![feature(const_eval_select)] -#![feature(const_swap_nonoverlapping)] #![feature(const_trait_impl)] #![feature(core_intrinsics)] #![feature(core_intrinsics_fallbacks)] @@ -39,7 +37,6 @@ #![feature(generic_assert_internals)] #![feature(hasher_prefixfree_extras)] #![feature(hashmap_internals)] -#![feature(inline_const_pat)] #![feature(int_roundings)] #![feature(ip)] #![feature(ip_from)] @@ -64,6 +61,7 @@ #![feature(maybe_uninit_write_slice)] #![feature(min_specialization)] #![feature(never_type)] +#![feature(next_index)] #![feature(numfmt)] #![feature(pattern)] #![feature(pointer_is_aligned_to)] @@ -95,16 +93,17 @@ /// Version of `assert_matches` that ignores fancy runtime printing in const context and uses structural equality. macro_rules! assert_eq_const_safe { - ($left:expr, $right:expr) => { - assert_eq_const_safe!($left, $right, concat!(stringify!($left), " == ", stringify!($right))); + ($t:ty: $left:expr, $right:expr) => { + assert_eq_const_safe!($t: $left, $right, concat!(stringify!($left), " == ", stringify!($right))); }; - ($left:expr, $right:expr$(, $($arg:tt)+)?) => { + ($t:ty: $left:expr, $right:expr$(, $($arg:tt)+)?) => { { fn runtime() { assert_eq!($left, $right, $($($arg)*),*); } const fn compiletime() { - assert!(matches!($left, const { $right })); + const PAT: $t = $right; + assert!(matches!($left, PAT), $($($arg)*),*); } core::intrinsics::const_eval_select((), compiletime, runtime) } @@ -147,6 +146,7 @@ mod ffi; mod fmt; mod future; mod hash; +mod hint; mod intrinsics; mod io; mod iter; diff --git a/library/coretests/tests/num/int_macros.rs b/library/coretests/tests/num/int_macros.rs index bbf19d2b444f9..0d9fb9e797e1f 100644 --- a/library/coretests/tests/num/int_macros.rs +++ b/library/coretests/tests/num/int_macros.rs @@ -1,5 +1,6 @@ macro_rules! int_module { ($T:ident, $U:ident) => { + use core::num::ParseIntError; use core::ops::{BitAnd, BitOr, BitXor, Not, Shl, Shr}; use core::$T::*; @@ -32,20 +33,20 @@ macro_rules! int_module { test_runtime_and_compiletime! { fn test_rem_euclid() { - assert_eq_const_safe!((-1 as $T).rem_euclid(MIN), MAX); + assert_eq_const_safe!($T: (-1 as $T).rem_euclid(MIN), MAX); } fn test_abs() { - assert_eq_const_safe!((1 as $T).abs(), 1 as $T); - assert_eq_const_safe!((0 as $T).abs(), 0 as $T); - assert_eq_const_safe!((-1 as $T).abs(), 1 as $T); + assert_eq_const_safe!($T: (1 as $T).abs(), 1 as $T); + assert_eq_const_safe!($T: (0 as $T).abs(), 0 as $T); + assert_eq_const_safe!($T: (-1 as $T).abs(), 1 as $T); } fn test_signum() { - assert_eq_const_safe!((1 as $T).signum(), 1 as $T); - assert_eq_const_safe!((0 as $T).signum(), 0 as $T); - assert_eq_const_safe!((-0 as $T).signum(), 0 as $T); - assert_eq_const_safe!((-1 as $T).signum(), -1 as $T); + assert_eq_const_safe!($T: (1 as $T).signum(), 1 as $T); + assert_eq_const_safe!($T: (0 as $T).signum(), 0 as $T); + assert_eq_const_safe!($T: (-0 as $T).signum(), 0 as $T); + assert_eq_const_safe!($T: (-1 as $T).signum(), -1 as $T); } fn test_is_positive() { @@ -72,123 +73,123 @@ macro_rules! int_module { test_runtime_and_compiletime! { fn test_count_ones() { - assert_eq_const_safe!(A.count_ones(), 3); - assert_eq_const_safe!(B.count_ones(), 2); - assert_eq_const_safe!(C.count_ones(), 5); + assert_eq_const_safe!(u32: A.count_ones(), 3); + assert_eq_const_safe!(u32: B.count_ones(), 2); + assert_eq_const_safe!(u32: C.count_ones(), 5); } fn test_count_zeros() { - assert_eq_const_safe!(A.count_zeros(), $T::BITS - 3); - assert_eq_const_safe!(B.count_zeros(), $T::BITS - 2); - assert_eq_const_safe!(C.count_zeros(), $T::BITS - 5); + assert_eq_const_safe!(u32: A.count_zeros(), $T::BITS - 3); + assert_eq_const_safe!(u32: B.count_zeros(), $T::BITS - 2); + assert_eq_const_safe!(u32: C.count_zeros(), $T::BITS - 5); } fn test_leading_trailing_ones() { const A: $T = 0b0101_1111; - assert_eq_const_safe!(A.trailing_ones(), 5); - assert_eq_const_safe!((!A).leading_ones(), $T::BITS - 7); + assert_eq_const_safe!(u32: A.trailing_ones(), 5); + assert_eq_const_safe!(u32: (!A).leading_ones(), $T::BITS - 7); - assert_eq_const_safe!(A.reverse_bits().leading_ones(), 5); + assert_eq_const_safe!(u32: A.reverse_bits().leading_ones(), 5); - assert_eq_const_safe!(_1.leading_ones(), $T::BITS); - assert_eq_const_safe!(_1.trailing_ones(), $T::BITS); + assert_eq_const_safe!(u32: _1.leading_ones(), $T::BITS); + assert_eq_const_safe!(u32: _1.trailing_ones(), $T::BITS); - assert_eq_const_safe!((_1 << 1).trailing_ones(), 0); - assert_eq_const_safe!(MAX.leading_ones(), 0); + assert_eq_const_safe!(u32: (_1 << 1).trailing_ones(), 0); + assert_eq_const_safe!(u32: MAX.leading_ones(), 0); - assert_eq_const_safe!((_1 << 1).leading_ones(), $T::BITS - 1); - assert_eq_const_safe!(MAX.trailing_ones(), $T::BITS - 1); + assert_eq_const_safe!(u32: (_1 << 1).leading_ones(), $T::BITS - 1); + assert_eq_const_safe!(u32: MAX.trailing_ones(), $T::BITS - 1); - assert_eq_const_safe!(_0.leading_ones(), 0); - assert_eq_const_safe!(_0.trailing_ones(), 0); + assert_eq_const_safe!(u32: _0.leading_ones(), 0); + assert_eq_const_safe!(u32: _0.trailing_ones(), 0); const X: $T = 0b0010_1100; - assert_eq_const_safe!(X.leading_ones(), 0); - assert_eq_const_safe!(X.trailing_ones(), 0); + assert_eq_const_safe!(u32: X.leading_ones(), 0); + assert_eq_const_safe!(u32: X.trailing_ones(), 0); } fn test_rotate() { - assert_eq_const_safe!(A.rotate_left(6).rotate_right(2).rotate_right(4), A); - assert_eq_const_safe!(B.rotate_left(3).rotate_left(2).rotate_right(5), B); - assert_eq_const_safe!(C.rotate_left(6).rotate_right(2).rotate_right(4), C); + assert_eq_const_safe!($T: A.rotate_left(6).rotate_right(2).rotate_right(4), A); + assert_eq_const_safe!($T: B.rotate_left(3).rotate_left(2).rotate_right(5), B); + assert_eq_const_safe!($T: C.rotate_left(6).rotate_right(2).rotate_right(4), C); // Rotating these should make no difference // // We test using 124 bits because to ensure that overlong bit shifts do // not cause undefined behavior. See #10183. - assert_eq_const_safe!(_0.rotate_left(124), _0); - assert_eq_const_safe!(_1.rotate_left(124), _1); - assert_eq_const_safe!(_0.rotate_right(124), _0); - assert_eq_const_safe!(_1.rotate_right(124), _1); + assert_eq_const_safe!($T: _0.rotate_left(124), _0); + assert_eq_const_safe!($T: _1.rotate_left(124), _1); + assert_eq_const_safe!($T: _0.rotate_right(124), _0); + assert_eq_const_safe!($T: _1.rotate_right(124), _1); // Rotating by 0 should have no effect - assert_eq_const_safe!(A.rotate_left(0), A); - assert_eq_const_safe!(B.rotate_left(0), B); - assert_eq_const_safe!(C.rotate_left(0), C); + assert_eq_const_safe!($T: A.rotate_left(0), A); + assert_eq_const_safe!($T: B.rotate_left(0), B); + assert_eq_const_safe!($T: C.rotate_left(0), C); // Rotating by a multiple of word size should also have no effect - assert_eq_const_safe!(A.rotate_left(128), A); - assert_eq_const_safe!(B.rotate_left(128), B); - assert_eq_const_safe!(C.rotate_left(128), C); + assert_eq_const_safe!($T: A.rotate_left(128), A); + assert_eq_const_safe!($T: B.rotate_left(128), B); + assert_eq_const_safe!($T: C.rotate_left(128), C); } fn test_swap_bytes() { - assert_eq_const_safe!(A.swap_bytes().swap_bytes(), A); - assert_eq_const_safe!(B.swap_bytes().swap_bytes(), B); - assert_eq_const_safe!(C.swap_bytes().swap_bytes(), C); + assert_eq_const_safe!($T: A.swap_bytes().swap_bytes(), A); + assert_eq_const_safe!($T: B.swap_bytes().swap_bytes(), B); + assert_eq_const_safe!($T: C.swap_bytes().swap_bytes(), C); // Swapping these should make no difference - assert_eq_const_safe!(_0.swap_bytes(), _0); - assert_eq_const_safe!(_1.swap_bytes(), _1); + assert_eq_const_safe!($T: _0.swap_bytes(), _0); + assert_eq_const_safe!($T: _1.swap_bytes(), _1); } fn test_le() { - assert_eq_const_safe!($T::from_le(A.to_le()), A); - assert_eq_const_safe!($T::from_le(B.to_le()), B); - assert_eq_const_safe!($T::from_le(C.to_le()), C); - assert_eq_const_safe!($T::from_le(_0), _0); - assert_eq_const_safe!($T::from_le(_1), _1); - assert_eq_const_safe!(_0.to_le(), _0); - assert_eq_const_safe!(_1.to_le(), _1); + assert_eq_const_safe!($T: $T::from_le(A.to_le()), A); + assert_eq_const_safe!($T: $T::from_le(B.to_le()), B); + assert_eq_const_safe!($T: $T::from_le(C.to_le()), C); + assert_eq_const_safe!($T: $T::from_le(_0), _0); + assert_eq_const_safe!($T: $T::from_le(_1), _1); + assert_eq_const_safe!($T: _0.to_le(), _0); + assert_eq_const_safe!($T: _1.to_le(), _1); } fn test_be() { - assert_eq_const_safe!($T::from_be(A.to_be()), A); - assert_eq_const_safe!($T::from_be(B.to_be()), B); - assert_eq_const_safe!($T::from_be(C.to_be()), C); - assert_eq_const_safe!($T::from_be(_0), _0); - assert_eq_const_safe!($T::from_be(_1), _1); - assert_eq_const_safe!(_0.to_be(), _0); - assert_eq_const_safe!(_1.to_be(), _1); + assert_eq_const_safe!($T: $T::from_be(A.to_be()), A); + assert_eq_const_safe!($T: $T::from_be(B.to_be()), B); + assert_eq_const_safe!($T: $T::from_be(C.to_be()), C); + assert_eq_const_safe!($T: $T::from_be(_0), _0); + assert_eq_const_safe!($T: $T::from_be(_1), _1); + assert_eq_const_safe!($T: _0.to_be(), _0); + assert_eq_const_safe!($T: _1.to_be(), _1); } fn test_signed_checked_div() { - assert_eq_const_safe!((10 as $T).checked_div(2), Some(5)); - assert_eq_const_safe!((5 as $T).checked_div(0), None); - assert_eq_const_safe!(isize::MIN.checked_div(-1), None); + assert_eq_const_safe!(Option<$T>: (10 as $T).checked_div(2), Some(5)); + assert_eq_const_safe!(Option<$T>: (5 as $T).checked_div(0), None); + assert_eq_const_safe!(Option<$T>: $T::MIN.checked_div(-1), None); } fn test_saturating_abs() { - assert_eq_const_safe!((0 as $T).saturating_abs(), 0); - assert_eq_const_safe!((123 as $T).saturating_abs(), 123); - assert_eq_const_safe!((-123 as $T).saturating_abs(), 123); - assert_eq_const_safe!((MAX - 2).saturating_abs(), MAX - 2); - assert_eq_const_safe!((MAX - 1).saturating_abs(), MAX - 1); - assert_eq_const_safe!(MAX.saturating_abs(), MAX); - assert_eq_const_safe!((MIN + 2).saturating_abs(), MAX - 1); - assert_eq_const_safe!((MIN + 1).saturating_abs(), MAX); - assert_eq_const_safe!(MIN.saturating_abs(), MAX); + assert_eq_const_safe!($T: (0 as $T).saturating_abs(), 0); + assert_eq_const_safe!($T: (123 as $T).saturating_abs(), 123); + assert_eq_const_safe!($T: (-123 as $T).saturating_abs(), 123); + assert_eq_const_safe!($T: (MAX - 2).saturating_abs(), MAX - 2); + assert_eq_const_safe!($T: (MAX - 1).saturating_abs(), MAX - 1); + assert_eq_const_safe!($T: MAX.saturating_abs(), MAX); + assert_eq_const_safe!($T: (MIN + 2).saturating_abs(), MAX - 1); + assert_eq_const_safe!($T: (MIN + 1).saturating_abs(), MAX); + assert_eq_const_safe!($T: MIN.saturating_abs(), MAX); } fn test_saturating_neg() { - assert_eq_const_safe!((0 as $T).saturating_neg(), 0); - assert_eq_const_safe!((123 as $T).saturating_neg(), -123); - assert_eq_const_safe!((-123 as $T).saturating_neg(), 123); - assert_eq_const_safe!((MAX - 2).saturating_neg(), MIN + 3); - assert_eq_const_safe!((MAX - 1).saturating_neg(), MIN + 2); - assert_eq_const_safe!(MAX.saturating_neg(), MIN + 1); - assert_eq_const_safe!((MIN + 2).saturating_neg(), MAX - 1); - assert_eq_const_safe!((MIN + 1).saturating_neg(), MAX); - assert_eq_const_safe!(MIN.saturating_neg(), MAX); + assert_eq_const_safe!($T: (0 as $T).saturating_neg(), 0); + assert_eq_const_safe!($T: (123 as $T).saturating_neg(), -123); + assert_eq_const_safe!($T: (-123 as $T).saturating_neg(), 123); + assert_eq_const_safe!($T: (MAX - 2).saturating_neg(), MIN + 3); + assert_eq_const_safe!($T: (MAX - 1).saturating_neg(), MIN + 2); + assert_eq_const_safe!($T: MAX.saturating_neg(), MIN + 1); + assert_eq_const_safe!($T: (MIN + 2).saturating_neg(), MAX - 1); + assert_eq_const_safe!($T: (MIN + 1).saturating_neg(), MAX); + assert_eq_const_safe!($T: MIN.saturating_neg(), MAX); } } @@ -250,23 +251,23 @@ macro_rules! int_module { test_runtime_and_compiletime! { fn test_from_str_radix() { - assert_eq_const_safe!($T::from_str_radix("123", 10), Ok(123 as $T)); - assert_eq_const_safe!($T::from_str_radix("1001", 2), Ok(9 as $T)); - assert_eq_const_safe!($T::from_str_radix("123", 8), Ok(83 as $T)); - assert_eq_const_safe!(i32::from_str_radix("123", 16), Ok(291 as i32)); - assert_eq_const_safe!(i32::from_str_radix("ffff", 16), Ok(65535 as i32)); - assert_eq_const_safe!(i32::from_str_radix("FFFF", 16), Ok(65535 as i32)); - assert_eq_const_safe!($T::from_str_radix("z", 36), Ok(35 as $T)); - assert_eq_const_safe!($T::from_str_radix("Z", 36), Ok(35 as $T)); - - assert_eq_const_safe!($T::from_str_radix("-123", 10), Ok(-123 as $T)); - assert_eq_const_safe!($T::from_str_radix("-1001", 2), Ok(-9 as $T)); - assert_eq_const_safe!($T::from_str_radix("-123", 8), Ok(-83 as $T)); - assert_eq_const_safe!(i32::from_str_radix("-123", 16), Ok(-291 as i32)); - assert_eq_const_safe!(i32::from_str_radix("-ffff", 16), Ok(-65535 as i32)); - assert_eq_const_safe!(i32::from_str_radix("-FFFF", 16), Ok(-65535 as i32)); - assert_eq_const_safe!($T::from_str_radix("-z", 36), Ok(-35 as $T)); - assert_eq_const_safe!($T::from_str_radix("-Z", 36), Ok(-35 as $T)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("123", 10), Ok(123 as $T)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("1001", 2), Ok(9 as $T)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("123", 8), Ok(83 as $T)); + assert_eq_const_safe!(Result: i32::from_str_radix("123", 16), Ok(291 as i32)); + assert_eq_const_safe!(Result: i32::from_str_radix("ffff", 16), Ok(65535 as i32)); + assert_eq_const_safe!(Result: i32::from_str_radix("FFFF", 16), Ok(65535 as i32)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("z", 36), Ok(35 as $T)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("Z", 36), Ok(35 as $T)); + + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("-123", 10), Ok(-123 as $T)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("-1001", 2), Ok(-9 as $T)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("-123", 8), Ok(-83 as $T)); + assert_eq_const_safe!(Result: i32::from_str_radix("-123", 16), Ok(-291 as i32)); + assert_eq_const_safe!(Result: i32::from_str_radix("-ffff", 16), Ok(-65535 as i32)); + assert_eq_const_safe!(Result: i32::from_str_radix("-FFFF", 16), Ok(-65535 as i32)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("-z", 36), Ok(-35 as $T)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("-Z", 36), Ok(-35 as $T)); assert!($T::from_str_radix("Z", 35).is_err()); assert!($T::from_str_radix("-9", 2).is_err()); @@ -277,16 +278,16 @@ macro_rules! int_module { fn test_pow() { { const R: $T = 2; - assert_eq_const_safe!(R.pow(2), 4 as $T); - assert_eq_const_safe!(R.pow(0), 1 as $T); - assert_eq_const_safe!(R.wrapping_pow(2), 4 as $T); - assert_eq_const_safe!(R.wrapping_pow(0), 1 as $T); - assert_eq_const_safe!(R.checked_pow(2), Some(4 as $T)); - assert_eq_const_safe!(R.checked_pow(0), Some(1 as $T)); - assert_eq_const_safe!(R.overflowing_pow(2), (4 as $T, false)); - assert_eq_const_safe!(R.overflowing_pow(0), (1 as $T, false)); - assert_eq_const_safe!(R.saturating_pow(2), 4 as $T); - assert_eq_const_safe!(R.saturating_pow(0), 1 as $T); + assert_eq_const_safe!($T: R.pow(2), 4 as $T); + assert_eq_const_safe!($T: R.pow(0), 1 as $T); + assert_eq_const_safe!($T: R.wrapping_pow(2), 4 as $T); + assert_eq_const_safe!($T: R.wrapping_pow(0), 1 as $T); + assert_eq_const_safe!(Option<$T>: R.checked_pow(2), Some(4 as $T)); + assert_eq_const_safe!(Option<$T>: R.checked_pow(0), Some(1 as $T)); + assert_eq_const_safe!(($T, bool): R.overflowing_pow(2), (4 as $T, false)); + assert_eq_const_safe!(($T, bool): R.overflowing_pow(0), (1 as $T, false)); + assert_eq_const_safe!($T: R.saturating_pow(2), 4 as $T); + assert_eq_const_safe!($T: R.saturating_pow(0), 1 as $T); } { @@ -295,221 +296,227 @@ macro_rules! int_module { // if itest::MAX == 2^j-1, then itest is a `j` bit int, // so that `itest::MAX*itest::MAX == 2^(2*j)-2^(j+1)+1`, // thussaturating_pow the overflowing result is exactly 1. - assert_eq_const_safe!(R.wrapping_pow(2), 1 as $T); - assert_eq_const_safe!(R.checked_pow(2), None); - assert_eq_const_safe!(R.overflowing_pow(2), (1 as $T, true)); - assert_eq_const_safe!(R.saturating_pow(2), MAX); + assert_eq_const_safe!($T: R.wrapping_pow(2), 1 as $T); + assert_eq_const_safe!(Option<$T>: R.checked_pow(2), None); + assert_eq_const_safe!(($T, bool): R.overflowing_pow(2), (1 as $T, true)); + assert_eq_const_safe!($T: R.saturating_pow(2), MAX); } { // test for negative exponent. const R: $T = -2; - assert_eq_const_safe!(R.pow(2), 4 as $T); - assert_eq_const_safe!(R.pow(3), -8 as $T); - assert_eq_const_safe!(R.pow(0), 1 as $T); - assert_eq_const_safe!(R.wrapping_pow(2), 4 as $T); - assert_eq_const_safe!(R.wrapping_pow(3), -8 as $T); - assert_eq_const_safe!(R.wrapping_pow(0), 1 as $T); - assert_eq_const_safe!(R.checked_pow(2), Some(4 as $T)); - assert_eq_const_safe!(R.checked_pow(3), Some(-8 as $T)); - assert_eq_const_safe!(R.checked_pow(0), Some(1 as $T)); - assert_eq_const_safe!(R.overflowing_pow(2), (4 as $T, false)); - assert_eq_const_safe!(R.overflowing_pow(3), (-8 as $T, false)); - assert_eq_const_safe!(R.overflowing_pow(0), (1 as $T, false)); - assert_eq_const_safe!(R.saturating_pow(2), 4 as $T); - assert_eq_const_safe!(R.saturating_pow(3), -8 as $T); - assert_eq_const_safe!(R.saturating_pow(0), 1 as $T); + assert_eq_const_safe!($T: R.pow(2), 4 as $T); + assert_eq_const_safe!($T: R.pow(3), -8 as $T); + assert_eq_const_safe!($T: R.pow(0), 1 as $T); + assert_eq_const_safe!($T: R.wrapping_pow(2), 4 as $T); + assert_eq_const_safe!($T: R.wrapping_pow(3), -8 as $T); + assert_eq_const_safe!($T: R.wrapping_pow(0), 1 as $T); + assert_eq_const_safe!(Option<$T>: R.checked_pow(2), Some(4 as $T)); + assert_eq_const_safe!(Option<$T>: R.checked_pow(3), Some(-8 as $T)); + assert_eq_const_safe!(Option<$T>: R.checked_pow(0), Some(1 as $T)); + assert_eq_const_safe!(($T, bool): R.overflowing_pow(2), (4 as $T, false)); + assert_eq_const_safe!(($T, bool): R.overflowing_pow(3), (-8 as $T, false)); + assert_eq_const_safe!(($T, bool): R.overflowing_pow(0), (1 as $T, false)); + assert_eq_const_safe!($T: R.saturating_pow(2), 4 as $T); + assert_eq_const_safe!($T: R.saturating_pow(3), -8 as $T); + assert_eq_const_safe!($T: R.saturating_pow(0), 1 as $T); } } fn test_div_floor() { const A: $T = 8; const B: $T = 3; - assert_eq_const_safe!(A.div_floor(B), 2); - assert_eq_const_safe!(A.div_floor(-B), -3); - assert_eq_const_safe!((-A).div_floor(B), -3); - assert_eq_const_safe!((-A).div_floor(-B), 2); + assert_eq_const_safe!($T: A.div_floor(B), 2); + assert_eq_const_safe!($T: A.div_floor(-B), -3); + assert_eq_const_safe!($T: (-A).div_floor(B), -3); + assert_eq_const_safe!($T: (-A).div_floor(-B), 2); } fn test_div_ceil() { const A: $T = 8; const B: $T = 3; - assert_eq_const_safe!(A.div_ceil(B), 3); - assert_eq_const_safe!(A.div_ceil(-B), -2); - assert_eq_const_safe!((-A).div_ceil(B), -2); - assert_eq_const_safe!((-A).div_ceil(-B), 3); + assert_eq_const_safe!($T: A.div_ceil(B), 3); + assert_eq_const_safe!($T: A.div_ceil(-B), -2); + assert_eq_const_safe!($T: (-A).div_ceil(B), -2); + assert_eq_const_safe!($T: (-A).div_ceil(-B), 3); } fn test_next_multiple_of() { - assert_eq_const_safe!((16 as $T).next_multiple_of(8), 16); - assert_eq_const_safe!((23 as $T).next_multiple_of(8), 24); - assert_eq_const_safe!((16 as $T).next_multiple_of(-8), 16); - assert_eq_const_safe!((23 as $T).next_multiple_of(-8), 16); - assert_eq_const_safe!((-16 as $T).next_multiple_of(8), -16); - assert_eq_const_safe!((-23 as $T).next_multiple_of(8), -16); - assert_eq_const_safe!((-16 as $T).next_multiple_of(-8), -16); - assert_eq_const_safe!((-23 as $T).next_multiple_of(-8), -24); - assert_eq_const_safe!(MIN.next_multiple_of(-1), MIN); + assert_eq_const_safe!($T: (16 as $T).next_multiple_of(8), 16); + assert_eq_const_safe!($T: (23 as $T).next_multiple_of(8), 24); + assert_eq_const_safe!($T: (16 as $T).next_multiple_of(-8), 16); + assert_eq_const_safe!($T: (23 as $T).next_multiple_of(-8), 16); + assert_eq_const_safe!($T: (-16 as $T).next_multiple_of(8), -16); + assert_eq_const_safe!($T: (-23 as $T).next_multiple_of(8), -16); + assert_eq_const_safe!($T: (-16 as $T).next_multiple_of(-8), -16); + assert_eq_const_safe!($T: (-23 as $T).next_multiple_of(-8), -24); + assert_eq_const_safe!($T: MIN.next_multiple_of(-1), MIN); } fn test_checked_next_multiple_of() { - assert_eq_const_safe!((16 as $T).checked_next_multiple_of(8), Some(16)); - assert_eq_const_safe!((23 as $T).checked_next_multiple_of(8), Some(24)); - assert_eq_const_safe!((16 as $T).checked_next_multiple_of(-8), Some(16)); - assert_eq_const_safe!((23 as $T).checked_next_multiple_of(-8), Some(16)); - assert_eq_const_safe!((-16 as $T).checked_next_multiple_of(8), Some(-16)); - assert_eq_const_safe!((-23 as $T).checked_next_multiple_of(8), Some(-16)); - assert_eq_const_safe!((-16 as $T).checked_next_multiple_of(-8), Some(-16)); - assert_eq_const_safe!((-23 as $T).checked_next_multiple_of(-8), Some(-24)); - assert_eq_const_safe!((1 as $T).checked_next_multiple_of(0), None); - assert_eq_const_safe!(MAX.checked_next_multiple_of(2), None); - assert_eq_const_safe!(MIN.checked_next_multiple_of(-3), None); - assert_eq_const_safe!(MIN.checked_next_multiple_of(-1), Some(MIN)); + assert_eq_const_safe!(Option<$T>: (16 as $T).checked_next_multiple_of(8), Some(16)); + assert_eq_const_safe!(Option<$T>: (23 as $T).checked_next_multiple_of(8), Some(24)); + assert_eq_const_safe!(Option<$T>: (16 as $T).checked_next_multiple_of(-8), Some(16)); + assert_eq_const_safe!(Option<$T>: (23 as $T).checked_next_multiple_of(-8), Some(16)); + assert_eq_const_safe!(Option<$T>: (-16 as $T).checked_next_multiple_of(8), Some(-16)); + assert_eq_const_safe!(Option<$T>: (-23 as $T).checked_next_multiple_of(8), Some(-16)); + assert_eq_const_safe!(Option<$T>: (-16 as $T).checked_next_multiple_of(-8), Some(-16)); + assert_eq_const_safe!(Option<$T>: (-23 as $T).checked_next_multiple_of(-8), Some(-24)); + assert_eq_const_safe!(Option<$T>: (1 as $T).checked_next_multiple_of(0), None); + assert_eq_const_safe!(Option<$T>: MAX.checked_next_multiple_of(2), None); + assert_eq_const_safe!(Option<$T>: MIN.checked_next_multiple_of(-3), None); + assert_eq_const_safe!(Option<$T>: MIN.checked_next_multiple_of(-1), Some(MIN)); } fn test_carrying_add() { - assert_eq_const_safe!(MAX.carrying_add(1, false), (MIN, true)); - assert_eq_const_safe!(MAX.carrying_add(0, true), (MIN, true)); - assert_eq_const_safe!(MAX.carrying_add(1, true), (MIN + 1, true)); - assert_eq_const_safe!(MAX.carrying_add(-1, false), (MAX - 1, false)); - assert_eq_const_safe!(MAX.carrying_add(-1, true), (MAX, false)); // no intermediate overflow - assert_eq_const_safe!(MIN.carrying_add(-1, false), (MAX, true)); - assert_eq_const_safe!(MIN.carrying_add(-1, true), (MIN, false)); // no intermediate overflow - assert_eq_const_safe!((0 as $T).carrying_add(MAX, true), (MIN, true)); - assert_eq_const_safe!((0 as $T).carrying_add(MIN, true), (MIN + 1, false)); + assert_eq_const_safe!(($T, bool): MAX.carrying_add(1, false), (MIN, true)); + assert_eq_const_safe!(($T, bool): MAX.carrying_add(0, true), (MIN, true)); + assert_eq_const_safe!(($T, bool): MAX.carrying_add(1, true), (MIN + 1, true)); + assert_eq_const_safe!(($T, bool): MAX.carrying_add(-1, false), (MAX - 1, false)); + assert_eq_const_safe!(($T, bool): MAX.carrying_add(-1, true), (MAX, false)); // no intermediate overflow + assert_eq_const_safe!(($T, bool): MIN.carrying_add(-1, false), (MAX, true)); + assert_eq_const_safe!(($T, bool): MIN.carrying_add(-1, true), (MIN, false)); // no intermediate overflow + assert_eq_const_safe!(($T, bool): (0 as $T).carrying_add(MAX, true), (MIN, true)); + assert_eq_const_safe!(($T, bool): (0 as $T).carrying_add(MIN, true), (MIN + 1, false)); } fn test_borrowing_sub() { - assert_eq_const_safe!(MIN.borrowing_sub(1, false), (MAX, true)); - assert_eq_const_safe!(MIN.borrowing_sub(0, true), (MAX, true)); - assert_eq_const_safe!(MIN.borrowing_sub(1, true), (MAX - 1, true)); - assert_eq_const_safe!(MIN.borrowing_sub(-1, false), (MIN + 1, false)); - assert_eq_const_safe!(MIN.borrowing_sub(-1, true), (MIN, false)); // no intermediate overflow - assert_eq_const_safe!(MAX.borrowing_sub(-1, false), (MIN, true)); - assert_eq_const_safe!(MAX.borrowing_sub(-1, true), (MAX, false)); // no intermediate overflow - assert_eq_const_safe!((0 as $T).borrowing_sub(MIN, false), (MIN, true)); - assert_eq_const_safe!((0 as $T).borrowing_sub(MIN, true), (MAX, false)); + assert_eq_const_safe!(($T, bool): MIN.borrowing_sub(1, false), (MAX, true)); + assert_eq_const_safe!(($T, bool): MIN.borrowing_sub(0, true), (MAX, true)); + assert_eq_const_safe!(($T, bool): MIN.borrowing_sub(1, true), (MAX - 1, true)); + assert_eq_const_safe!(($T, bool): MIN.borrowing_sub(-1, false), (MIN + 1, false)); + assert_eq_const_safe!(($T, bool): MIN.borrowing_sub(-1, true), (MIN, false)); // no intermediate overflow + assert_eq_const_safe!(($T, bool): MAX.borrowing_sub(-1, false), (MIN, true)); + assert_eq_const_safe!(($T, bool): MAX.borrowing_sub(-1, true), (MAX, false)); // no intermediate overflow + assert_eq_const_safe!(($T, bool): (0 as $T).borrowing_sub(MIN, false), (MIN, true)); + assert_eq_const_safe!(($T, bool): (0 as $T).borrowing_sub(MIN, true), (MAX, false)); } fn test_widening_mul() { - assert_eq_const_safe!(MAX.widening_mul(MAX), (1, MAX / 2)); - assert_eq_const_safe!(MIN.widening_mul(MAX), (MIN as $U, MIN / 2)); - assert_eq_const_safe!(MIN.widening_mul(MIN), (0, MAX / 2 + 1)); + assert_eq_const_safe!(($U, $T): MAX.widening_mul(MAX), (1, MAX / 2)); + assert_eq_const_safe!(($U, $T): MIN.widening_mul(MAX), (MIN as $U, MIN / 2)); + assert_eq_const_safe!(($U, $T): MIN.widening_mul(MIN), (0, MAX / 2 + 1)); } fn test_carrying_mul() { - assert_eq_const_safe!(MAX.carrying_mul(MAX, 0), (1, MAX / 2)); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MAX.carrying_mul(MAX, 0), (1, MAX / 2)); + assert_eq_const_safe!(($U, $T): MAX.carrying_mul(MAX, MAX), (UMAX / 2 + 1, MAX / 2) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MAX.carrying_mul(MAX, MIN), (UMAX / 2 + 2, MAX / 2 - 1) ); - assert_eq_const_safe!(MIN.carrying_mul(MAX, 0), (MIN as $U, MIN / 2)); - assert_eq_const_safe!(MIN.carrying_mul(MAX, MAX), (UMAX, MIN / 2)); - assert_eq_const_safe!(MIN.carrying_mul(MAX, MIN), (0, MIN / 2)); - assert_eq_const_safe!(MIN.carrying_mul(MIN, 0), (0, MAX / 2 + 1)); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MIN.carrying_mul(MAX, 0), (MIN as $U, MIN / 2)); + assert_eq_const_safe!(($U, $T): MIN.carrying_mul(MAX, MAX), (UMAX, MIN / 2)); + assert_eq_const_safe!(($U, $T): MIN.carrying_mul(MAX, MIN), (0, MIN / 2)); + assert_eq_const_safe!(($U, $T): MIN.carrying_mul(MIN, 0), (0, MAX / 2 + 1)); + assert_eq_const_safe!(($U, $T): MIN.carrying_mul(MIN, MAX), (UMAX / 2, MAX / 2 + 1) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MIN.carrying_mul(MIN, MIN), (UMAX / 2 + 1, MAX / 2) ); } fn test_carrying_mul_add() { - assert_eq_const_safe!(MAX.carrying_mul_add(MAX, 0, 0), (1, MAX / 2)); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MAX.carrying_mul_add(MAX, 0, 0), (1, MAX / 2)); + assert_eq_const_safe!(($U, $T): MAX.carrying_mul_add(MAX, MAX, 0), (UMAX / 2 + 1, MAX / 2) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MAX.carrying_mul_add(MAX, MIN, 0), (UMAX / 2 + 2, MAX / 2 - 1) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MAX.carrying_mul_add(MAX, MAX, MAX), (UMAX, MAX / 2) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MAX.carrying_mul_add(MAX, MAX, MIN), (0, MAX / 2) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MAX.carrying_mul_add(MAX, MIN, MIN), (1, MAX / 2 - 1) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MIN.carrying_mul_add(MAX, 0, 0), (MIN as $U, MIN / 2) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MIN.carrying_mul_add(MAX, MAX, 0), (UMAX, MIN / 2) ); - assert_eq_const_safe!(MIN.carrying_mul_add(MAX, MIN, 0), (0, MIN / 2)); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): + MIN.carrying_mul_add(MAX, MIN, 0), + (0, MIN / 2) + ); + assert_eq_const_safe!(($U, $T): MIN.carrying_mul_add(MAX, MAX, MAX), (UMAX / 2 - 1, MIN / 2 + 1) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MIN.carrying_mul_add(MAX, MAX, MIN), (UMAX / 2, MIN / 2) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MIN.carrying_mul_add(MAX, MIN, MIN), (UMAX / 2 + 1, MIN / 2 - 1) ); - assert_eq_const_safe!(MIN.carrying_mul_add(MIN, 0, 0), (0, MAX / 2 + 1)); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): + MIN.carrying_mul_add(MIN, 0, 0), + (0, MAX / 2 + 1) + ); + assert_eq_const_safe!(($U, $T): MIN.carrying_mul_add(MIN, MAX, 0), (UMAX / 2, MAX / 2 + 1) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MIN.carrying_mul_add(MIN, MIN, 0), (UMAX / 2 + 1, MAX / 2) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MIN.carrying_mul_add(MIN, MAX, MAX), (UMAX - 1, MAX / 2 + 1) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MIN.carrying_mul_add(MIN, MAX, MIN), (UMAX, MAX / 2) ); - assert_eq_const_safe!( + assert_eq_const_safe!(($U, $T): MIN.carrying_mul_add(MIN, MIN, MIN), (0, MAX / 2) ); } fn test_midpoint() { - assert_eq_const_safe!(<$T>::midpoint(1, 3), 2); - assert_eq_const_safe!(<$T>::midpoint(3, 1), 2); + assert_eq_const_safe!($T: <$T>::midpoint(1, 3), 2); + assert_eq_const_safe!($T: <$T>::midpoint(3, 1), 2); - assert_eq_const_safe!(<$T>::midpoint(0, 0), 0); - assert_eq_const_safe!(<$T>::midpoint(0, 2), 1); - assert_eq_const_safe!(<$T>::midpoint(2, 0), 1); - assert_eq_const_safe!(<$T>::midpoint(2, 2), 2); + assert_eq_const_safe!($T: <$T>::midpoint(0, 0), 0); + assert_eq_const_safe!($T: <$T>::midpoint(0, 2), 1); + assert_eq_const_safe!($T: <$T>::midpoint(2, 0), 1); + assert_eq_const_safe!($T: <$T>::midpoint(2, 2), 2); - assert_eq_const_safe!(<$T>::midpoint(1, 4), 2); - assert_eq_const_safe!(<$T>::midpoint(4, 1), 2); - assert_eq_const_safe!(<$T>::midpoint(3, 4), 3); - assert_eq_const_safe!(<$T>::midpoint(4, 3), 3); + assert_eq_const_safe!($T: <$T>::midpoint(1, 4), 2); + assert_eq_const_safe!($T: <$T>::midpoint(4, 1), 2); + assert_eq_const_safe!($T: <$T>::midpoint(3, 4), 3); + assert_eq_const_safe!($T: <$T>::midpoint(4, 3), 3); - assert_eq_const_safe!(<$T>::midpoint(<$T>::MIN, <$T>::MAX), 0); - assert_eq_const_safe!(<$T>::midpoint(<$T>::MAX, <$T>::MIN), 0); - assert_eq_const_safe!(<$T>::midpoint(<$T>::MIN, <$T>::MIN), <$T>::MIN); - assert_eq_const_safe!(<$T>::midpoint(<$T>::MAX, <$T>::MAX), <$T>::MAX); + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MIN, <$T>::MAX), 0); + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MAX, <$T>::MIN), 0); + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MIN, <$T>::MIN), <$T>::MIN); + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MAX, <$T>::MAX), <$T>::MAX); - assert_eq_const_safe!(<$T>::midpoint(<$T>::MIN, 6), <$T>::MIN / 2 + 3); - assert_eq_const_safe!(<$T>::midpoint(6, <$T>::MIN), <$T>::MIN / 2 + 3); - assert_eq_const_safe!(<$T>::midpoint(<$T>::MAX, 6), <$T>::MAX / 2 + 3); - assert_eq_const_safe!(<$T>::midpoint(6, <$T>::MAX), <$T>::MAX / 2 + 3); + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MIN, 6), <$T>::MIN / 2 + 3); + assert_eq_const_safe!($T: <$T>::midpoint(6, <$T>::MIN), <$T>::MIN / 2 + 3); + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MAX, 6), <$T>::MAX / 2 + 3); + assert_eq_const_safe!($T: <$T>::midpoint(6, <$T>::MAX), <$T>::MAX / 2 + 3); } } @@ -526,154 +533,154 @@ macro_rules! int_module { test_runtime_and_compiletime! { fn test_unbounded_shl() { // <$T>::MIN - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_ONE), (<$T>::MIN << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_TWO), (<$T>::MIN << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_THREE), (<$T>::MIN << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MIN << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, 1), (<$T>::MIN << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, 3), (<$T>::MIN << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, 5), (<$T>::MIN << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_ONE), (<$T>::MIN << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_TWO), (<$T>::MIN << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_THREE), (<$T>::MIN << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MIN << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, 1), (<$T>::MIN << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, 3), (<$T>::MIN << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, 5), (<$T>::MIN << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW3), 0); // <$T>::MAX - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_ONE), (<$T>::MAX << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_TWO), (<$T>::MAX << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_THREE), (<$T>::MAX << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MAX << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, 1), (<$T>::MAX << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, 3), (<$T>::MAX << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, 5), (<$T>::MAX << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_ONE), (<$T>::MAX << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_TWO), (<$T>::MAX << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_THREE), (<$T>::MAX << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MAX << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, 1), (<$T>::MAX << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, 3), (<$T>::MAX << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, 5), (<$T>::MAX << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW3), 0); // 1 - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_ONE), (1 << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_TWO), (1 << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_THREE), (1 << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_FOUR), (1 << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, 1), (1 << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, 3), (1 << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, 5), (1 << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_ONE), (1 << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_TWO), (1 << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_THREE), (1 << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_FOUR), (1 << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, 1), (1 << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, 3), (1 << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, 5), (1 << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW3), 0); // -1 - assert_eq_const_safe!(<$T>::unbounded_shl(-1, SHIFT_AMOUNT_TEST_ONE), (-1 << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(-1, SHIFT_AMOUNT_TEST_TWO), (-1 << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(-1, SHIFT_AMOUNT_TEST_THREE), (-1 << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(-1, SHIFT_AMOUNT_TEST_FOUR), (-1 << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(-1, 1), (-1 << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(-1, 3), (-1 << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(-1, 5), (-1 << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(-1, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(-1, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(-1, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(-1, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, SHIFT_AMOUNT_TEST_ONE), (-1 << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, SHIFT_AMOUNT_TEST_TWO), (-1 << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, SHIFT_AMOUNT_TEST_THREE), (-1 << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, SHIFT_AMOUNT_TEST_FOUR), (-1 << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, 1), (-1 << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, 3), (-1 << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, 5), (-1 << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(-1, SHIFT_AMOUNT_OVERFLOW3), 0); // 8 - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_ONE), (8 << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_TWO), (8 << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_THREE), (8 << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_FOUR), (8 << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, 1), (8 << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, 3), (8 << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, 5), (8 << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_ONE), (8 << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_TWO), (8 << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_THREE), (8 << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_FOUR), (8 << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, 1), (8 << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, 3), (8 << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, 5), (8 << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW3), 0); // 17 - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_ONE), (17 << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_TWO), (17 << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_THREE), (17 << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_FOUR), (17 << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, 1), (17 << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, 3), (17 << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, 5), (17 << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_ONE), (17 << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_TWO), (17 << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_THREE), (17 << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_FOUR), (17 << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, 1), (17 << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, 3), (17 << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, 5), (17 << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW3), 0); } fn test_unbounded_shr() { // <$T>::MIN - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_ONE), (<$T>::MIN >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_TWO), (<$T>::MIN >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_THREE), (<$T>::MIN >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MIN >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, 1), (<$T>::MIN >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, 3), (<$T>::MIN >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, 5), (<$T>::MIN >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW), -1); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW2), -1); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW3), -1); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_ONE), (<$T>::MIN >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_TWO), (<$T>::MIN >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_THREE), (<$T>::MIN >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MIN >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, 1), (<$T>::MIN >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, 3), (<$T>::MIN >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, 5), (<$T>::MIN >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW), -1); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW2), -1); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW3), -1); // <$T>::MAX - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_ONE), (<$T>::MAX >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_TWO), (<$T>::MAX >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_THREE), (<$T>::MAX >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MAX >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, 1), (<$T>::MAX >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, 3), (<$T>::MAX >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, 5), (<$T>::MAX >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_ONE), (<$T>::MAX >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_TWO), (<$T>::MAX >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_THREE), (<$T>::MAX >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MAX >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, 1), (<$T>::MAX >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, 3), (<$T>::MAX >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, 5), (<$T>::MAX >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW3), 0); // 1 - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_ONE), (1 >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_TWO), (1 >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_THREE), (1 >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_FOUR), (1 >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, 1), (1 >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, 3), (1 >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, 5), (1 >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_ONE), (1 >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_TWO), (1 >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_THREE), (1 >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_FOUR), (1 >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, 1), (1 >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, 3), (1 >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, 5), (1 >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW3), 0); // -1 - assert_eq_const_safe!(<$T>::unbounded_shr(-1, SHIFT_AMOUNT_TEST_ONE), (-1 >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(-1, SHIFT_AMOUNT_TEST_TWO), (-1 >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(-1, SHIFT_AMOUNT_TEST_THREE), (-1 >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(-1, SHIFT_AMOUNT_TEST_FOUR), (-1 >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(-1, 1), (-1 >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(-1, 3), (-1 >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(-1, 5), (-1 >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(-1, SHIFT_AMOUNT_OVERFLOW), -1); - assert_eq_const_safe!(<$T>::unbounded_shr(-1, SHIFT_AMOUNT_OVERFLOW), -1); - assert_eq_const_safe!(<$T>::unbounded_shr(-1, SHIFT_AMOUNT_OVERFLOW2), -1); - assert_eq_const_safe!(<$T>::unbounded_shr(-1, SHIFT_AMOUNT_OVERFLOW3), -1); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, SHIFT_AMOUNT_TEST_ONE), (-1 >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, SHIFT_AMOUNT_TEST_TWO), (-1 >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, SHIFT_AMOUNT_TEST_THREE), (-1 >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, SHIFT_AMOUNT_TEST_FOUR), (-1 >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, 1), (-1 >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, 3), (-1 >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, 5), (-1 >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, SHIFT_AMOUNT_OVERFLOW), -1); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, SHIFT_AMOUNT_OVERFLOW), -1); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, SHIFT_AMOUNT_OVERFLOW2), -1); + assert_eq_const_safe!($T: <$T>::unbounded_shr(-1, SHIFT_AMOUNT_OVERFLOW3), -1); // 8 - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_ONE), (8 >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_TWO), (8 >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_THREE), (8 >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_FOUR), (8 >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, 1), (8 >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, 3), (8 >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, 5), (8 >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_ONE), (8 >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_TWO), (8 >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_THREE), (8 >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_FOUR), (8 >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, 1), (8 >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, 3), (8 >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, 5), (8 >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW3), 0); // 17 - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_ONE), (17 >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_TWO), (17 >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_THREE), (17 >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_FOUR), (17 >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, 1), (17 >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, 3), (17 >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, 5), (17 >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_ONE), (17 >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_TWO), (17 >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_THREE), (17 >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_FOUR), (17 >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, 1), (17 >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, 3), (17 >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, 5), (17 >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW3), 0); } } }; diff --git a/library/coretests/tests/num/uint_macros.rs b/library/coretests/tests/num/uint_macros.rs index d09eb97b17e06..2e35e8bf5342a 100644 --- a/library/coretests/tests/num/uint_macros.rs +++ b/library/coretests/tests/num/uint_macros.rs @@ -1,5 +1,6 @@ macro_rules! uint_module { ($T:ident) => { + use core::num::ParseIntError; use core::ops::{BitAnd, BitOr, BitXor, Not, Shl, Shr}; use core::$T::*; @@ -49,95 +50,95 @@ macro_rules! uint_module { fn test_leading_trailing_ones() { const A: $T = 0b0101_1111; - assert_eq_const_safe!(A.trailing_ones(), 5); - assert_eq_const_safe!((!A).leading_ones(), $T::BITS - 7); + assert_eq_const_safe!(u32: A.trailing_ones(), 5); + assert_eq_const_safe!(u32: (!A).leading_ones(), $T::BITS - 7); - assert_eq_const_safe!(A.reverse_bits().leading_ones(), 5); + assert_eq_const_safe!(u32: A.reverse_bits().leading_ones(), 5); - assert_eq_const_safe!(_1.leading_ones(), $T::BITS); - assert_eq_const_safe!(_1.trailing_ones(), $T::BITS); + assert_eq_const_safe!(u32: _1.leading_ones(), $T::BITS); + assert_eq_const_safe!(u32: _1.trailing_ones(), $T::BITS); - assert_eq_const_safe!((_1 << 1).trailing_ones(), 0); - assert_eq_const_safe!((_1 >> 1).leading_ones(), 0); + assert_eq_const_safe!(u32: (_1 << 1).trailing_ones(), 0); + assert_eq_const_safe!(u32: (_1 >> 1).leading_ones(), 0); - assert_eq_const_safe!((_1 << 1).leading_ones(), $T::BITS - 1); - assert_eq_const_safe!((_1 >> 1).trailing_ones(), $T::BITS - 1); + assert_eq_const_safe!(u32: (_1 << 1).leading_ones(), $T::BITS - 1); + assert_eq_const_safe!(u32: (_1 >> 1).trailing_ones(), $T::BITS - 1); - assert_eq_const_safe!(_0.leading_ones(), 0); - assert_eq_const_safe!(_0.trailing_ones(), 0); + assert_eq_const_safe!(u32: _0.leading_ones(), 0); + assert_eq_const_safe!(u32: _0.trailing_ones(), 0); const X: $T = 0b0010_1100; - assert_eq_const_safe!(X.leading_ones(), 0); - assert_eq_const_safe!(X.trailing_ones(), 0); + assert_eq_const_safe!(u32: X.leading_ones(), 0); + assert_eq_const_safe!(u32: X.trailing_ones(), 0); } fn test_rotate() { - assert_eq_const_safe!(A.rotate_left(6).rotate_right(2).rotate_right(4), A); - assert_eq_const_safe!(B.rotate_left(3).rotate_left(2).rotate_right(5), B); - assert_eq_const_safe!(C.rotate_left(6).rotate_right(2).rotate_right(4), C); + assert_eq_const_safe!($T: A.rotate_left(6).rotate_right(2).rotate_right(4), A); + assert_eq_const_safe!($T: B.rotate_left(3).rotate_left(2).rotate_right(5), B); + assert_eq_const_safe!($T: C.rotate_left(6).rotate_right(2).rotate_right(4), C); // Rotating these should make no difference // // We test using 124 bits because to ensure that overlong bit shifts do // not cause undefined behavior. See #10183. - assert_eq_const_safe!(_0.rotate_left(124), _0); - assert_eq_const_safe!(_1.rotate_left(124), _1); - assert_eq_const_safe!(_0.rotate_right(124), _0); - assert_eq_const_safe!(_1.rotate_right(124), _1); + assert_eq_const_safe!($T: _0.rotate_left(124), _0); + assert_eq_const_safe!($T: _1.rotate_left(124), _1); + assert_eq_const_safe!($T: _0.rotate_right(124), _0); + assert_eq_const_safe!($T: _1.rotate_right(124), _1); // Rotating by 0 should have no effect - assert_eq_const_safe!(A.rotate_left(0), A); - assert_eq_const_safe!(B.rotate_left(0), B); - assert_eq_const_safe!(C.rotate_left(0), C); + assert_eq_const_safe!($T: A.rotate_left(0), A); + assert_eq_const_safe!($T: B.rotate_left(0), B); + assert_eq_const_safe!($T: C.rotate_left(0), C); // Rotating by a multiple of word size should also have no effect - assert_eq_const_safe!(A.rotate_left(128), A); - assert_eq_const_safe!(B.rotate_left(128), B); - assert_eq_const_safe!(C.rotate_left(128), C); + assert_eq_const_safe!($T: A.rotate_left(128), A); + assert_eq_const_safe!($T: B.rotate_left(128), B); + assert_eq_const_safe!($T: C.rotate_left(128), C); } fn test_swap_bytes() { - assert_eq_const_safe!(A.swap_bytes().swap_bytes(), A); - assert_eq_const_safe!(B.swap_bytes().swap_bytes(), B); - assert_eq_const_safe!(C.swap_bytes().swap_bytes(), C); + assert_eq_const_safe!($T: A.swap_bytes().swap_bytes(), A); + assert_eq_const_safe!($T: B.swap_bytes().swap_bytes(), B); + assert_eq_const_safe!($T: C.swap_bytes().swap_bytes(), C); // Swapping these should make no difference - assert_eq_const_safe!(_0.swap_bytes(), _0); - assert_eq_const_safe!(_1.swap_bytes(), _1); + assert_eq_const_safe!($T: _0.swap_bytes(), _0); + assert_eq_const_safe!($T: _1.swap_bytes(), _1); } fn test_reverse_bits() { - assert_eq_const_safe!(A.reverse_bits().reverse_bits(), A); - assert_eq_const_safe!(B.reverse_bits().reverse_bits(), B); - assert_eq_const_safe!(C.reverse_bits().reverse_bits(), C); + assert_eq_const_safe!($T: A.reverse_bits().reverse_bits(), A); + assert_eq_const_safe!($T: B.reverse_bits().reverse_bits(), B); + assert_eq_const_safe!($T: C.reverse_bits().reverse_bits(), C); // Swapping these should make no difference - assert_eq_const_safe!(_0.reverse_bits(), _0); - assert_eq_const_safe!(_1.reverse_bits(), _1); + assert_eq_const_safe!($T: _0.reverse_bits(), _0); + assert_eq_const_safe!($T: _1.reverse_bits(), _1); } fn test_le() { - assert_eq_const_safe!($T::from_le(A.to_le()), A); - assert_eq_const_safe!($T::from_le(B.to_le()), B); - assert_eq_const_safe!($T::from_le(C.to_le()), C); - assert_eq_const_safe!($T::from_le(_0), _0); - assert_eq_const_safe!($T::from_le(_1), _1); - assert_eq_const_safe!(_0.to_le(), _0); - assert_eq_const_safe!(_1.to_le(), _1); + assert_eq_const_safe!($T: $T::from_le(A.to_le()), A); + assert_eq_const_safe!($T: $T::from_le(B.to_le()), B); + assert_eq_const_safe!($T: $T::from_le(C.to_le()), C); + assert_eq_const_safe!($T: $T::from_le(_0), _0); + assert_eq_const_safe!($T: $T::from_le(_1), _1); + assert_eq_const_safe!($T: _0.to_le(), _0); + assert_eq_const_safe!($T: _1.to_le(), _1); } fn test_be() { - assert_eq_const_safe!($T::from_be(A.to_be()), A); - assert_eq_const_safe!($T::from_be(B.to_be()), B); - assert_eq_const_safe!($T::from_be(C.to_be()), C); - assert_eq_const_safe!($T::from_be(_0), _0); - assert_eq_const_safe!($T::from_be(_1), _1); - assert_eq_const_safe!(_0.to_be(), _0); - assert_eq_const_safe!(_1.to_be(), _1); + assert_eq_const_safe!($T: $T::from_be(A.to_be()), A); + assert_eq_const_safe!($T: $T::from_be(B.to_be()), B); + assert_eq_const_safe!($T: $T::from_be(C.to_be()), C); + assert_eq_const_safe!($T: $T::from_be(_0), _0); + assert_eq_const_safe!($T: $T::from_be(_1), _1); + assert_eq_const_safe!($T: _0.to_be(), _0); + assert_eq_const_safe!($T: _1.to_be(), _1); } fn test_unsigned_checked_div() { - assert_eq_const_safe!((10 as $T).checked_div(2), Some(5)); - assert_eq_const_safe!((5 as $T).checked_div(0), None); + assert_eq_const_safe!(Option<$T>: (10 as $T).checked_div(2), Some(5)); + assert_eq_const_safe!(Option<$T>: (5 as $T).checked_div(0), None); } } @@ -194,12 +195,12 @@ macro_rules! uint_module { test_runtime_and_compiletime! { fn test_parse_bytes() { - assert_eq_const_safe!($T::from_str_radix("123", 10), Ok(123 as $T)); - assert_eq_const_safe!($T::from_str_radix("1001", 2), Ok(9 as $T)); - assert_eq_const_safe!($T::from_str_radix("123", 8), Ok(83 as $T)); - assert_eq_const_safe!(u16::from_str_radix("123", 16), Ok(291 as u16)); - assert_eq_const_safe!(u16::from_str_radix("ffff", 16), Ok(65535 as u16)); - assert_eq_const_safe!($T::from_str_radix("z", 36), Ok(35 as $T)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("123", 10), Ok(123 as $T)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("1001", 2), Ok(9 as $T)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("123", 8), Ok(83 as $T)); + assert_eq_const_safe!(Result: u16::from_str_radix("123", 16), Ok(291 as u16)); + assert_eq_const_safe!(Result: u16::from_str_radix("ffff", 16), Ok(65535 as u16)); + assert_eq_const_safe!(Result<$T, ParseIntError>: $T::from_str_radix("z", 36), Ok(35 as $T)); assert!($T::from_str_radix("Z", 10).is_err()); assert!($T::from_str_radix("_", 2).is_err()); @@ -208,16 +209,16 @@ macro_rules! uint_module { fn test_pow() { { const R: $T = 2; - assert_eq_const_safe!(R.pow(2), 4 as $T); - assert_eq_const_safe!(R.pow(0), 1 as $T); - assert_eq_const_safe!(R.wrapping_pow(2), 4 as $T); - assert_eq_const_safe!(R.wrapping_pow(0), 1 as $T); - assert_eq_const_safe!(R.checked_pow(2), Some(4 as $T)); - assert_eq_const_safe!(R.checked_pow(0), Some(1 as $T)); - assert_eq_const_safe!(R.overflowing_pow(2), (4 as $T, false)); - assert_eq_const_safe!(R.overflowing_pow(0), (1 as $T, false)); - assert_eq_const_safe!(R.saturating_pow(2), 4 as $T); - assert_eq_const_safe!(R.saturating_pow(0), 1 as $T); + assert_eq_const_safe!($T: R.pow(2), 4 as $T); + assert_eq_const_safe!($T: R.pow(0), 1 as $T); + assert_eq_const_safe!($T: R.wrapping_pow(2), 4 as $T); + assert_eq_const_safe!($T: R.wrapping_pow(0), 1 as $T); + assert_eq_const_safe!(Option<$T>: R.checked_pow(2), Some(4 as $T)); + assert_eq_const_safe!(Option<$T>: R.checked_pow(0), Some(1 as $T)); + assert_eq_const_safe!(($T, bool): R.overflowing_pow(2), (4 as $T, false)); + assert_eq_const_safe!(($T, bool): R.overflowing_pow(0), (1 as $T, false)); + assert_eq_const_safe!($T: R.saturating_pow(2), 4 as $T); + assert_eq_const_safe!($T: R.saturating_pow(0), 1 as $T); } { @@ -226,20 +227,20 @@ macro_rules! uint_module { // if itest::MAX == 2^j-1, then itest is a `j` bit int, // so that `itest::MAX*itest::MAX == 2^(2*j)-2^(j+1)+1`, // thussaturating_pow the overflowing result is exactly 1. - assert_eq_const_safe!(R.wrapping_pow(2), 1 as $T); - assert_eq_const_safe!(R.checked_pow(2), None); - assert_eq_const_safe!(R.overflowing_pow(2), (1 as $T, true)); - assert_eq_const_safe!(R.saturating_pow(2), MAX); + assert_eq_const_safe!($T: R.wrapping_pow(2), 1 as $T); + assert_eq_const_safe!(Option<$T>: R.checked_pow(2), None); + assert_eq_const_safe!(($T, bool): R.overflowing_pow(2), (1 as $T, true)); + assert_eq_const_safe!($T: R.saturating_pow(2), MAX); } } fn test_isqrt() { - assert_eq_const_safe!((0 as $T).isqrt(), 0 as $T); - assert_eq_const_safe!((1 as $T).isqrt(), 1 as $T); - assert_eq_const_safe!((2 as $T).isqrt(), 1 as $T); - assert_eq_const_safe!((99 as $T).isqrt(), 9 as $T); - assert_eq_const_safe!((100 as $T).isqrt(), 10 as $T); - assert_eq_const_safe!($T::MAX.isqrt(), (1 << ($T::BITS / 2)) - 1); + assert_eq_const_safe!($T: (0 as $T).isqrt(), 0 as $T); + assert_eq_const_safe!($T: (1 as $T).isqrt(), 1 as $T); + assert_eq_const_safe!($T: (2 as $T).isqrt(), 1 as $T); + assert_eq_const_safe!($T: (99 as $T).isqrt(), 9 as $T); + assert_eq_const_safe!($T: (100 as $T).isqrt(), 10 as $T); + assert_eq_const_safe!($T: $T::MAX.isqrt(), (1 << ($T::BITS / 2)) - 1); } } @@ -264,24 +265,24 @@ macro_rules! uint_module { test_runtime_and_compiletime! { fn test_div_floor() { - assert_eq_const_safe!((8 as $T).div_floor(3), 2); + assert_eq_const_safe!($T: (8 as $T).div_floor(3), 2); } fn test_div_ceil() { - assert_eq_const_safe!((8 as $T).div_ceil(3), 3); + assert_eq_const_safe!($T: (8 as $T).div_ceil(3), 3); } fn test_next_multiple_of() { - assert_eq_const_safe!((16 as $T).next_multiple_of(8), 16); - assert_eq_const_safe!((23 as $T).next_multiple_of(8), 24); - assert_eq_const_safe!(MAX.next_multiple_of(1), MAX); + assert_eq_const_safe!($T: (16 as $T).next_multiple_of(8), 16); + assert_eq_const_safe!($T: (23 as $T).next_multiple_of(8), 24); + assert_eq_const_safe!($T: MAX.next_multiple_of(1), MAX); } fn test_checked_next_multiple_of() { - assert_eq_const_safe!((16 as $T).checked_next_multiple_of(8), Some(16)); - assert_eq_const_safe!((23 as $T).checked_next_multiple_of(8), Some(24)); - assert_eq_const_safe!((1 as $T).checked_next_multiple_of(0), None); - assert_eq_const_safe!(MAX.checked_next_multiple_of(2), None); + assert_eq_const_safe!(Option<$T>: (16 as $T).checked_next_multiple_of(8), Some(16)); + assert_eq_const_safe!(Option<$T>: (23 as $T).checked_next_multiple_of(8), Some(24)); + assert_eq_const_safe!(Option<$T>: (1 as $T).checked_next_multiple_of(0), None); + assert_eq_const_safe!(Option<$T>: MAX.checked_next_multiple_of(2), None); } fn test_is_next_multiple_of() { @@ -292,63 +293,63 @@ macro_rules! uint_module { } fn test_carrying_add() { - assert_eq_const_safe!($T::MAX.carrying_add(1, false), (0, true)); - assert_eq_const_safe!($T::MAX.carrying_add(0, true), (0, true)); - assert_eq_const_safe!($T::MAX.carrying_add(1, true), (1, true)); + assert_eq_const_safe!(($T, bool): $T::MAX.carrying_add(1, false), (0, true)); + assert_eq_const_safe!(($T, bool): $T::MAX.carrying_add(0, true), (0, true)); + assert_eq_const_safe!(($T, bool): $T::MAX.carrying_add(1, true), (1, true)); - assert_eq_const_safe!($T::MIN.carrying_add($T::MAX, false), ($T::MAX, false)); - assert_eq_const_safe!($T::MIN.carrying_add(0, true), (1, false)); - assert_eq_const_safe!($T::MIN.carrying_add($T::MAX, true), (0, true)); + assert_eq_const_safe!(($T, bool): $T::MIN.carrying_add($T::MAX, false), ($T::MAX, false)); + assert_eq_const_safe!(($T, bool): $T::MIN.carrying_add(0, true), (1, false)); + assert_eq_const_safe!(($T, bool): $T::MIN.carrying_add($T::MAX, true), (0, true)); } fn test_borrowing_sub() { - assert_eq_const_safe!($T::MIN.borrowing_sub(1, false), ($T::MAX, true)); - assert_eq_const_safe!($T::MIN.borrowing_sub(0, true), ($T::MAX, true)); - assert_eq_const_safe!($T::MIN.borrowing_sub(1, true), ($T::MAX - 1, true)); + assert_eq_const_safe!(($T, bool): $T::MIN.borrowing_sub(1, false), ($T::MAX, true)); + assert_eq_const_safe!(($T, bool): $T::MIN.borrowing_sub(0, true), ($T::MAX, true)); + assert_eq_const_safe!(($T, bool): $T::MIN.borrowing_sub(1, true), ($T::MAX - 1, true)); - assert_eq_const_safe!($T::MAX.borrowing_sub($T::MAX, false), (0, false)); - assert_eq_const_safe!($T::MAX.borrowing_sub(0, true), ($T::MAX - 1, false)); - assert_eq_const_safe!($T::MAX.borrowing_sub($T::MAX, true), ($T::MAX, true)); + assert_eq_const_safe!(($T, bool): $T::MAX.borrowing_sub($T::MAX, false), (0, false)); + assert_eq_const_safe!(($T, bool): $T::MAX.borrowing_sub(0, true), ($T::MAX - 1, false)); + assert_eq_const_safe!(($T, bool): $T::MAX.borrowing_sub($T::MAX, true), ($T::MAX, true)); } fn test_widening_mul() { - assert_eq_const_safe!($T::MAX.widening_mul($T::MAX), (1, $T::MAX - 1)); + assert_eq_const_safe!(($T, $T): $T::MAX.widening_mul($T::MAX), (1, $T::MAX - 1)); } fn test_carrying_mul() { - assert_eq_const_safe!($T::MAX.carrying_mul($T::MAX, 0), (1, $T::MAX - 1)); - assert_eq_const_safe!($T::MAX.carrying_mul($T::MAX, $T::MAX), (0, $T::MAX)); + assert_eq_const_safe!(($T, $T): $T::MAX.carrying_mul($T::MAX, 0), (1, $T::MAX - 1)); + assert_eq_const_safe!(($T, $T): $T::MAX.carrying_mul($T::MAX, $T::MAX), (0, $T::MAX)); } fn test_carrying_mul_add() { - assert_eq_const_safe!($T::MAX.carrying_mul_add($T::MAX, 0, 0), (1, $T::MAX - 1)); - assert_eq_const_safe!($T::MAX.carrying_mul_add($T::MAX, $T::MAX, 0), (0, $T::MAX)); - assert_eq_const_safe!($T::MAX.carrying_mul_add($T::MAX, $T::MAX, $T::MAX), ($T::MAX, $T::MAX)); + assert_eq_const_safe!(($T, $T): $T::MAX.carrying_mul_add($T::MAX, 0, 0), (1, $T::MAX - 1)); + assert_eq_const_safe!(($T, $T): $T::MAX.carrying_mul_add($T::MAX, $T::MAX, 0), (0, $T::MAX)); + assert_eq_const_safe!(($T, $T): $T::MAX.carrying_mul_add($T::MAX, $T::MAX, $T::MAX), ($T::MAX, $T::MAX)); } fn test_midpoint() { - assert_eq_const_safe!(<$T>::midpoint(1, 3), 2); - assert_eq_const_safe!(<$T>::midpoint(3, 1), 2); - - assert_eq_const_safe!(<$T>::midpoint(0, 0), 0); - assert_eq_const_safe!(<$T>::midpoint(0, 2), 1); - assert_eq_const_safe!(<$T>::midpoint(2, 0), 1); - assert_eq_const_safe!(<$T>::midpoint(2, 2), 2); - - assert_eq_const_safe!(<$T>::midpoint(1, 4), 2); - assert_eq_const_safe!(<$T>::midpoint(4, 1), 2); - assert_eq_const_safe!(<$T>::midpoint(3, 4), 3); - assert_eq_const_safe!(<$T>::midpoint(4, 3), 3); - - assert_eq_const_safe!(<$T>::midpoint(<$T>::MIN, <$T>::MAX), (<$T>::MAX - <$T>::MIN) / 2); - assert_eq_const_safe!(<$T>::midpoint(<$T>::MAX, <$T>::MIN), (<$T>::MAX - <$T>::MIN) / 2); - assert_eq_const_safe!(<$T>::midpoint(<$T>::MIN, <$T>::MIN), <$T>::MIN); - assert_eq_const_safe!(<$T>::midpoint(<$T>::MAX, <$T>::MAX), <$T>::MAX); - - assert_eq_const_safe!(<$T>::midpoint(<$T>::MIN, 6), <$T>::MIN / 2 + 3); - assert_eq_const_safe!(<$T>::midpoint(6, <$T>::MIN), <$T>::MIN / 2 + 3); - assert_eq_const_safe!(<$T>::midpoint(<$T>::MAX, 6), (<$T>::MAX - <$T>::MIN) / 2 + 3); - assert_eq_const_safe!(<$T>::midpoint(6, <$T>::MAX), (<$T>::MAX - <$T>::MIN) / 2 + 3); + assert_eq_const_safe!($T: <$T>::midpoint(1, 3), 2); + assert_eq_const_safe!($T: <$T>::midpoint(3, 1), 2); + + assert_eq_const_safe!($T: <$T>::midpoint(0, 0), 0); + assert_eq_const_safe!($T: <$T>::midpoint(0, 2), 1); + assert_eq_const_safe!($T: <$T>::midpoint(2, 0), 1); + assert_eq_const_safe!($T: <$T>::midpoint(2, 2), 2); + + assert_eq_const_safe!($T: <$T>::midpoint(1, 4), 2); + assert_eq_const_safe!($T: <$T>::midpoint(4, 1), 2); + assert_eq_const_safe!($T: <$T>::midpoint(3, 4), 3); + assert_eq_const_safe!($T: <$T>::midpoint(4, 3), 3); + + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MIN, <$T>::MAX), (<$T>::MAX - <$T>::MIN) / 2); + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MAX, <$T>::MIN), (<$T>::MAX - <$T>::MIN) / 2); + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MIN, <$T>::MIN), <$T>::MIN); + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MAX, <$T>::MAX), <$T>::MAX); + + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MIN, 6), <$T>::MIN / 2 + 3); + assert_eq_const_safe!($T: <$T>::midpoint(6, <$T>::MIN), <$T>::MIN / 2 + 3); + assert_eq_const_safe!($T: <$T>::midpoint(<$T>::MAX, 6), (<$T>::MAX - <$T>::MIN) / 2 + 3); + assert_eq_const_safe!($T: <$T>::midpoint(6, <$T>::MAX), (<$T>::MAX - <$T>::MIN) / 2 + 3); } } @@ -365,154 +366,154 @@ macro_rules! uint_module { test_runtime_and_compiletime! { fn test_unbounded_shl() { // <$T>::MIN - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_ONE), (<$T>::MIN << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_TWO), (<$T>::MIN << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_THREE), (<$T>::MIN << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MIN << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, 1), (<$T>::MIN << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, 3), (<$T>::MIN << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, 5), (<$T>::MIN << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_ONE), (<$T>::MIN << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_TWO), (<$T>::MIN << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_THREE), (<$T>::MIN << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MIN << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, 1), (<$T>::MIN << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, 3), (<$T>::MIN << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, 5), (<$T>::MIN << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW3), 0); // <$T>::MAX - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_ONE), (<$T>::MAX << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_TWO), (<$T>::MAX << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_THREE), (<$T>::MAX << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MAX << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, 1), (<$T>::MAX << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, 3), (<$T>::MAX << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, 5), (<$T>::MAX << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_ONE), (<$T>::MAX << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_TWO), (<$T>::MAX << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_THREE), (<$T>::MAX << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MAX << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, 1), (<$T>::MAX << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, 3), (<$T>::MAX << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, 5), (<$T>::MAX << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW3), 0); // 1 - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_ONE), (1 << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_TWO), (1 << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_THREE), (1 << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_FOUR), (1 << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, 1), (1 << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, 3), (1 << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, 5), (1 << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_ONE), (1 << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_TWO), (1 << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_THREE), (1 << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_TEST_FOUR), (1 << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, 1), (1 << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, 3), (1 << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, 5), (1 << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(1, SHIFT_AMOUNT_OVERFLOW3), 0); // !0 - assert_eq_const_safe!(<$T>::unbounded_shl(!0, SHIFT_AMOUNT_TEST_ONE), (!0 << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(!0, SHIFT_AMOUNT_TEST_TWO), (!0 << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(!0, SHIFT_AMOUNT_TEST_THREE), (!0 << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(!0, SHIFT_AMOUNT_TEST_FOUR), (!0 << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(!0, 1), (!0 << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(!0, 3), (!0 << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(!0, 5), (!0 << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(!0, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(!0, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(!0, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(!0, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, SHIFT_AMOUNT_TEST_ONE), (!0 << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, SHIFT_AMOUNT_TEST_TWO), (!0 << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, SHIFT_AMOUNT_TEST_THREE), (!0 << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, SHIFT_AMOUNT_TEST_FOUR), (!0 << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, 1), (!0 << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, 3), (!0 << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, 5), (!0 << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(!0, SHIFT_AMOUNT_OVERFLOW3), 0); // 8 - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_ONE), (8 << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_TWO), (8 << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_THREE), (8 << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_FOUR), (8 << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, 1), (8 << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, 3), (8 << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, 5), (8 << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_ONE), (8 << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_TWO), (8 << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_THREE), (8 << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_TEST_FOUR), (8 << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, 1), (8 << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, 3), (8 << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, 5), (8 << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(8, SHIFT_AMOUNT_OVERFLOW3), 0); // 17 - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_ONE), (17 << SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_TWO), (17 << SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_THREE), (17 << SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_FOUR), (17 << SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, 1), (17 << 1)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, 3), (17 << 3)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, 5), (17 << 5)); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_ONE), (17 << SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_TWO), (17 << SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_THREE), (17 << SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_TEST_FOUR), (17 << SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, 1), (17 << 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, 3), (17 << 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, 5), (17 << 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shl(17, SHIFT_AMOUNT_OVERFLOW3), 0); } fn test_unbounded_shr() { // <$T>::MIN - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_ONE), (<$T>::MIN >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_TWO), (<$T>::MIN >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_THREE), (<$T>::MIN >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MIN >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, 1), (<$T>::MIN >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, 3), (<$T>::MIN >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, 5), (<$T>::MIN >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_ONE), (<$T>::MIN >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_TWO), (<$T>::MIN >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_THREE), (<$T>::MIN >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MIN >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, 1), (<$T>::MIN >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, 3), (<$T>::MIN >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, 5), (<$T>::MIN >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MIN, SHIFT_AMOUNT_OVERFLOW3), 0); // <$T>::MAX - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_ONE), (<$T>::MAX >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_TWO), (<$T>::MAX >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_THREE), (<$T>::MAX >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MAX >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, 1), (<$T>::MAX >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, 3), (<$T>::MAX >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, 5), (<$T>::MAX >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_ONE), (<$T>::MAX >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_TWO), (<$T>::MAX >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_THREE), (<$T>::MAX >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_TEST_FOUR), (<$T>::MAX >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, 1), (<$T>::MAX >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, 3), (<$T>::MAX >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, 5), (<$T>::MAX >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(<$T>::MAX, SHIFT_AMOUNT_OVERFLOW3), 0); // 1 - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_ONE), (1 >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_TWO), (1 >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_THREE), (1 >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_FOUR), (1 >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, 1), (1 >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, 3), (1 >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, 5), (1 >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_ONE), (1 >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_TWO), (1 >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_THREE), (1 >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_TEST_FOUR), (1 >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, 1), (1 >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, 3), (1 >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, 5), (1 >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(1, SHIFT_AMOUNT_OVERFLOW3), 0); // !0 - assert_eq_const_safe!(<$T>::unbounded_shr(!0, SHIFT_AMOUNT_TEST_ONE), (!0 >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(!0, SHIFT_AMOUNT_TEST_TWO), (!0 >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(!0, SHIFT_AMOUNT_TEST_THREE), (!0 >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(!0, SHIFT_AMOUNT_TEST_FOUR), (!0 >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(!0, 1), (!0 >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(!0, 3), (!0 >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(!0, 5), (!0 >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(!0, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(!0, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(!0, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(!0, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, SHIFT_AMOUNT_TEST_ONE), (!0 >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, SHIFT_AMOUNT_TEST_TWO), (!0 >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, SHIFT_AMOUNT_TEST_THREE), (!0 >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, SHIFT_AMOUNT_TEST_FOUR), (!0 >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, 1), (!0 >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, 3), (!0 >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, 5), (!0 >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(!0, SHIFT_AMOUNT_OVERFLOW3), 0); // 8 - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_ONE), (8 >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_TWO), (8 >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_THREE), (8 >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_FOUR), (8 >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, 1), (8 >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, 3), (8 >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, 5), (8 >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_ONE), (8 >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_TWO), (8 >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_THREE), (8 >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_TEST_FOUR), (8 >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, 1), (8 >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, 3), (8 >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, 5), (8 >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(8, SHIFT_AMOUNT_OVERFLOW3), 0); // 17 - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_ONE), (17 >> SHIFT_AMOUNT_TEST_ONE)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_TWO), (17 >> SHIFT_AMOUNT_TEST_TWO)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_THREE), (17 >> SHIFT_AMOUNT_TEST_THREE)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_FOUR), (17 >> SHIFT_AMOUNT_TEST_FOUR)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, 1), (17 >> 1)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, 3), (17 >> 3)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, 5), (17 >> 5)); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW2), 0); - assert_eq_const_safe!(<$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW3), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_ONE), (17 >> SHIFT_AMOUNT_TEST_ONE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_TWO), (17 >> SHIFT_AMOUNT_TEST_TWO)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_THREE), (17 >> SHIFT_AMOUNT_TEST_THREE)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_TEST_FOUR), (17 >> SHIFT_AMOUNT_TEST_FOUR)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, 1), (17 >> 1)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, 3), (17 >> 3)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, 5), (17 >> 5)); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW2), 0); + assert_eq_const_safe!($T: <$T>::unbounded_shr(17, SHIFT_AMOUNT_OVERFLOW3), 0); } } }; diff --git a/library/coretests/tests/ptr.rs b/library/coretests/tests/ptr.rs index 6091926084a35..bb60fb07468f9 100644 --- a/library/coretests/tests/ptr.rs +++ b/library/coretests/tests/ptr.rs @@ -949,6 +949,10 @@ fn test_const_swap_ptr() { // Make sure they still work. assert!(*s1.0.ptr == 1); assert!(*s2.0.ptr == 666); + + // This is where we'd swap again using a `u8` type and a `count` of `size_of::()` if it + // were not for the limitation of `swap_nonoverlapping` around pointers crossing multiple + // elements. }; } @@ -984,3 +988,56 @@ fn test_ptr_metadata_in_const() { assert_eq!(SLICE_META, 3); assert_eq!(DYN_META.size_of(), 42); } + +// See +const fn ptr_swap_nonoverlapping_is_untyped_inner() { + #[repr(C)] + struct HasPadding(usize, u8); + + let buf1: [usize; 2] = [1000, 2000]; + let buf2: [usize; 2] = [3000, 4000]; + + // HasPadding and [usize; 2] have the same size and alignment, + // so swap_nonoverlapping should treat them the same + assert!(size_of::() == size_of::<[usize; 2]>()); + assert!(align_of::() == align_of::<[usize; 2]>()); + + let mut b1 = buf1; + let mut b2 = buf2; + // Safety: b1 and b2 are distinct local variables, + // with the same size and alignment as HasPadding. + unsafe { + std::ptr::swap_nonoverlapping( + b1.as_mut_ptr().cast::(), + b2.as_mut_ptr().cast::(), + 1, + ); + } + assert!(b1[0] == buf2[0]); + assert!(b1[1] == buf2[1]); + assert!(b2[0] == buf1[0]); + assert!(b2[1] == buf1[1]); +} + +#[test] +fn test_ptr_swap_nonoverlapping_is_untyped() { + ptr_swap_nonoverlapping_is_untyped_inner(); + const { ptr_swap_nonoverlapping_is_untyped_inner() }; +} + +#[test] +fn test_ptr_default() { + #[derive(Default)] + struct PtrDefaultTest { + ptr: *const u64, + } + let default = PtrDefaultTest::default(); + assert!(default.ptr.is_null()); + + #[derive(Default)] + struct PtrMutDefaultTest { + ptr: *mut u64, + } + let default = PtrMutDefaultTest::default(); + assert!(default.ptr.is_null()); +} diff --git a/library/coretests/tests/str.rs b/library/coretests/tests/str.rs index f5066343af20a..5e23e910f0aeb 100644 --- a/library/coretests/tests/str.rs +++ b/library/coretests/tests/str.rs @@ -1 +1 @@ -// All `str` tests live in library/alloc/tests/str.rs +// All `str` tests live in library/alloctests/tests/str.rs diff --git a/library/panic_unwind/src/lib.rs b/library/panic_unwind/src/lib.rs index e5c1d6bdb3b06..50bd933aca204 100644 --- a/library/panic_unwind/src/lib.rs +++ b/library/panic_unwind/src/lib.rs @@ -79,11 +79,11 @@ cfg_if::cfg_if! { unsafe extern "C" { /// Handler in std called when a panic object is dropped outside of /// `catch_unwind`. - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] fn __rust_drop_panic() -> !; /// Handler in std called when a foreign exception is caught. - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] fn __rust_foreign_exception() -> !; } diff --git a/library/proc_macro/Cargo.toml b/library/proc_macro/Cargo.toml index 72cb4e4166f8e..b8bc2a3af4cd4 100644 --- a/library/proc_macro/Cargo.toml +++ b/library/proc_macro/Cargo.toml @@ -9,3 +9,4 @@ std = { path = "../std" } # `core` when resolving doc links. Without this line a different `core` will be # loaded from sysroot causing duplicate lang items and other similar errors. core = { path = "../core" } +rustc-literal-escaper = { version = "0.0.2", features = ["rustc-dep-of-std"] } diff --git a/library/proc_macro/src/bridge/client.rs b/library/proc_macro/src/bridge/client.rs index f6d4825c67b24..e7d547966a5d5 100644 --- a/library/proc_macro/src/bridge/client.rs +++ b/library/proc_macro/src/bridge/client.rs @@ -111,12 +111,6 @@ impl Clone for TokenStream { } } -impl Clone for SourceFile { - fn clone(&self) -> Self { - self.clone() - } -} - impl Span { pub(crate) fn def_site() -> Span { Bridge::with(|bridge| bridge.globals.def_site) diff --git a/library/proc_macro/src/bridge/mod.rs b/library/proc_macro/src/bridge/mod.rs index 52cc8fba0438d..75d82d7465404 100644 --- a/library/proc_macro/src/bridge/mod.rs +++ b/library/proc_macro/src/bridge/mod.rs @@ -9,7 +9,6 @@ #![deny(unsafe_code)] // proc_macros anyway don't work on wasm hosts so while both sides of this bridge can // be built with different versions of rustc, the wasm ABI changes don't really matter. -#![cfg_attr(bootstrap, allow(unknown_lints))] #![allow(wasm_c_abi)] use std::hash::Hash; @@ -82,16 +81,8 @@ macro_rules! with_api { $self: $S::TokenStream ) -> Vec>; }, - SourceFile { - fn drop($self: $S::SourceFile); - fn clone($self: &$S::SourceFile) -> $S::SourceFile; - fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool; - fn path($self: &$S::SourceFile) -> String; - fn is_real($self: &$S::SourceFile) -> bool; - }, Span { fn debug($self: $S::Span) -> String; - fn source_file($self: $S::Span) -> $S::SourceFile; fn parent($self: $S::Span) -> Option<$S::Span>; fn source($self: $S::Span) -> $S::Span; fn byte_range($self: $S::Span) -> Range; @@ -99,6 +90,8 @@ macro_rules! with_api { fn end($self: $S::Span) -> $S::Span; fn line($self: $S::Span) -> usize; fn column($self: $S::Span) -> usize; + fn file($self: $S::Span) -> String; + fn local_file($self: $S::Span) -> Option; fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>; fn subspan($self: $S::Span, start: Bound, end: Bound) -> Option<$S::Span>; fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span; @@ -121,7 +114,6 @@ macro_rules! with_api_handle_types { 'owned: FreeFunctions, TokenStream, - SourceFile, 'interned: Span, diff --git a/library/proc_macro/src/bridge/server.rs b/library/proc_macro/src/bridge/server.rs index 97e5a603c3ac9..5beda7c3c96e5 100644 --- a/library/proc_macro/src/bridge/server.rs +++ b/library/proc_macro/src/bridge/server.rs @@ -82,7 +82,6 @@ with_api_handle_types!(define_server_handles); pub trait Types { type FreeFunctions: 'static; type TokenStream: 'static + Clone; - type SourceFile: 'static + Clone; type Span: 'static + Copy + Eq + Hash; type Symbol: 'static; } diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index d9141eab5919f..79e9b8430b8d5 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -27,6 +27,7 @@ #![feature(panic_can_unwind)] #![feature(restricted_std)] #![feature(rustc_attrs)] +#![feature(stmt_expr_attributes)] #![feature(extend_one)] #![recursion_limit = "256"] #![allow(internal_features)] @@ -51,11 +52,24 @@ use std::{error, fmt}; #[unstable(feature = "proc_macro_diagnostic", issue = "54140")] pub use diagnostic::{Diagnostic, Level, MultiSpan}; +#[unstable(feature = "proc_macro_value", issue = "136652")] +pub use rustc_literal_escaper::EscapeError; +use rustc_literal_escaper::{MixedUnit, Mode, byte_from_char, unescape_mixed, unescape_unicode}; #[unstable(feature = "proc_macro_totokens", issue = "130977")] pub use to_tokens::ToTokens; use crate::escape::{EscapeOptions, escape_bytes}; +/// Errors returned when trying to retrieve a literal unescaped value. +#[unstable(feature = "proc_macro_value", issue = "136652")] +#[derive(Debug, PartialEq, Eq)] +pub enum ConversionErrorKind { + /// The literal failed to be escaped, take a look at [`EscapeError`] for more information. + FailedToUnescape(EscapeError), + /// Trying to convert a literal with the wrong type. + InvalidLiteralKind, +} + /// Determines whether proc_macro has been made accessible to the currently /// running program. /// @@ -477,12 +491,6 @@ impl Span { Span(bridge::client::Span::mixed_site()) } - /// The original source file into which this span points. - #[unstable(feature = "proc_macro_span", issue = "54725")] - pub fn source_file(&self) -> SourceFile { - SourceFile(self.0.source_file()) - } - /// The `Span` for the tokens in the previous macro expansion from which /// `self` was generated from, if any. #[unstable(feature = "proc_macro_span", issue = "54725")] @@ -505,13 +513,13 @@ impl Span { } /// Creates an empty span pointing to directly before this span. - #[unstable(feature = "proc_macro_span", issue = "54725")] + #[stable(feature = "proc_macro_span_location", since = "CURRENT_RUSTC_VERSION")] pub fn start(&self) -> Span { Span(self.0.start()) } /// Creates an empty span pointing to directly after this span. - #[unstable(feature = "proc_macro_span", issue = "54725")] + #[stable(feature = "proc_macro_span_location", since = "CURRENT_RUSTC_VERSION")] pub fn end(&self) -> Span { Span(self.0.end()) } @@ -519,7 +527,7 @@ impl Span { /// The one-indexed line of the source file where the span starts. /// /// To obtain the line of the span's end, use `span.end().line()`. - #[unstable(feature = "proc_macro_span", issue = "54725")] + #[stable(feature = "proc_macro_span_location", since = "CURRENT_RUSTC_VERSION")] pub fn line(&self) -> usize { self.0.line() } @@ -527,11 +535,30 @@ impl Span { /// The one-indexed column of the source file where the span starts. /// /// To obtain the column of the span's end, use `span.end().column()`. - #[unstable(feature = "proc_macro_span", issue = "54725")] + #[stable(feature = "proc_macro_span_location", since = "CURRENT_RUSTC_VERSION")] pub fn column(&self) -> usize { self.0.column() } + /// The path to the source file in which this span occurs, for display purposes. + /// + /// This might not correspond to a valid file system path. + /// It might be remapped (e.g. `"/src/lib.rs"`) or an artificial path (e.g. `""`). + #[stable(feature = "proc_macro_span_file", since = "CURRENT_RUSTC_VERSION")] + pub fn file(&self) -> String { + self.0.file() + } + + /// The path to the source file in which this span occurs on the local file system. + /// + /// This is the actual path on disk. It is unaffected by path remapping. + /// + /// This path should not be embedded in the output of the macro; prefer `file()` instead. + #[stable(feature = "proc_macro_span_file", since = "CURRENT_RUSTC_VERSION")] + pub fn local_file(&self) -> Option { + self.0.local_file().map(|s| PathBuf::from(s)) + } + /// Creates a new span encompassing `self` and `other`. /// /// Returns `None` if `self` and `other` are from different files. @@ -600,58 +627,6 @@ impl fmt::Debug for Span { } } -/// The source file of a given `Span`. -#[unstable(feature = "proc_macro_span", issue = "54725")] -#[derive(Clone)] -pub struct SourceFile(bridge::client::SourceFile); - -impl SourceFile { - /// Gets the path to this source file. - /// - /// ### Note - /// If the code span associated with this `SourceFile` was generated by an external macro, this - /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check. - /// - /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on - /// the command line, the path as given might not actually be valid. - /// - /// [`is_real`]: Self::is_real - #[unstable(feature = "proc_macro_span", issue = "54725")] - pub fn path(&self) -> PathBuf { - PathBuf::from(self.0.path()) - } - - /// Returns `true` if this source file is a real source file, and not generated by an external - /// macro's expansion. - #[unstable(feature = "proc_macro_span", issue = "54725")] - pub fn is_real(&self) -> bool { - // This is a hack until intercrate spans are implemented and we can have real source files - // for spans generated in external macros. - // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368 - self.0.is_real() - } -} - -#[unstable(feature = "proc_macro_span", issue = "54725")] -impl fmt::Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SourceFile") - .field("path", &self.path()) - .field("is_real", &self.is_real()) - .finish() - } -} - -#[unstable(feature = "proc_macro_span", issue = "54725")] -impl PartialEq for SourceFile { - fn eq(&self, other: &Self) -> bool { - self.0.eq(&other.0) - } -} - -#[unstable(feature = "proc_macro_span", issue = "54725")] -impl Eq for SourceFile {} - /// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`). #[stable(feature = "proc_macro_lib2", since = "1.29.0")] #[derive(Clone)] @@ -1451,6 +1426,107 @@ impl Literal { } }) } + + /// Returns the unescaped string value if the current literal is a string or a string literal. + #[unstable(feature = "proc_macro_value", issue = "136652")] + pub fn str_value(&self) -> Result { + self.0.symbol.with(|symbol| match self.0.kind { + bridge::LitKind::Str => { + if symbol.contains('\\') { + let mut buf = String::with_capacity(symbol.len()); + let mut error = None; + // Force-inlining here is aggressive but the closure is + // called on every char in the string, so it can be hot in + // programs with many long strings containing escapes. + unescape_unicode( + symbol, + Mode::Str, + &mut #[inline(always)] + |_, c| match c { + Ok(c) => buf.push(c), + Err(err) => { + if err.is_fatal() { + error = Some(ConversionErrorKind::FailedToUnescape(err)); + } + } + }, + ); + if let Some(error) = error { Err(error) } else { Ok(buf) } + } else { + Ok(symbol.to_string()) + } + } + bridge::LitKind::StrRaw(_) => Ok(symbol.to_string()), + _ => Err(ConversionErrorKind::InvalidLiteralKind), + }) + } + + /// Returns the unescaped string value if the current literal is a c-string or a c-string + /// literal. + #[unstable(feature = "proc_macro_value", issue = "136652")] + pub fn cstr_value(&self) -> Result, ConversionErrorKind> { + self.0.symbol.with(|symbol| match self.0.kind { + bridge::LitKind::CStr => { + let mut error = None; + let mut buf = Vec::with_capacity(symbol.len()); + + unescape_mixed(symbol, Mode::CStr, &mut |_span, c| match c { + Ok(MixedUnit::Char(c)) => { + buf.extend_from_slice(c.encode_utf8(&mut [0; 4]).as_bytes()) + } + Ok(MixedUnit::HighByte(b)) => buf.push(b), + Err(err) => { + if err.is_fatal() { + error = Some(ConversionErrorKind::FailedToUnescape(err)); + } + } + }); + if let Some(error) = error { + Err(error) + } else { + buf.push(0); + Ok(buf) + } + } + bridge::LitKind::CStrRaw(_) => { + // Raw strings have no escapes so we can convert the symbol + // directly to a `Lrc` after appending the terminating NUL + // char. + let mut buf = symbol.to_owned().into_bytes(); + buf.push(0); + Ok(buf) + } + _ => Err(ConversionErrorKind::InvalidLiteralKind), + }) + } + + /// Returns the unescaped string value if the current literal is a byte string or a byte string + /// literal. + #[unstable(feature = "proc_macro_value", issue = "136652")] + pub fn byte_str_value(&self) -> Result, ConversionErrorKind> { + self.0.symbol.with(|symbol| match self.0.kind { + bridge::LitKind::ByteStr => { + let mut buf = Vec::with_capacity(symbol.len()); + let mut error = None; + + unescape_unicode(symbol, Mode::ByteStr, &mut |_, c| match c { + Ok(c) => buf.push(byte_from_char(c)), + Err(err) => { + if err.is_fatal() { + error = Some(ConversionErrorKind::FailedToUnescape(err)); + } + } + }); + if let Some(error) = error { Err(error) } else { Ok(buf) } + } + bridge::LitKind::ByteStrRaw(_) => { + // Raw strings have no escapes so we can convert the symbol + // directly to a `Lrc`. + Ok(symbol.to_owned().into_bytes()) + } + _ => Err(ConversionErrorKind::InvalidLiteralKind), + }) + } } /// Parse a single literal from its stringified representation. diff --git a/library/profiler_builtins/build.rs b/library/profiler_builtins/build.rs index dd85239fa8cfd..fc1a9ecc1ec32 100644 --- a/library/profiler_builtins/build.rs +++ b/library/profiler_builtins/build.rs @@ -9,8 +9,14 @@ use std::path::PathBuf; fn main() { if let Ok(rt) = tracked_env_var("LLVM_PROFILER_RT_LIB") { - println!("cargo::rustc-link-lib=static:+verbatim={rt}"); - return; + let rt = PathBuf::from(rt); + if let Some(lib) = rt.file_name() { + if let Some(dir) = rt.parent() { + println!("cargo::rustc-link-search=native={}", dir.display()); + } + println!("cargo::rustc-link-lib=static:+verbatim={}", lib.to_str().unwrap()); + return; + } } let target_os = env::var("CARGO_CFG_TARGET_OS").expect("CARGO_CFG_TARGET_OS was not set"); diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml index 176da603d58d7..7915196e8e899 100644 --- a/library/std/Cargo.toml +++ b/library/std/Cargo.toml @@ -18,7 +18,7 @@ cfg-if = { version = "1.0", features = ['rustc-dep-of-std'] } panic_unwind = { path = "../panic_unwind", optional = true } panic_abort = { path = "../panic_abort" } core = { path = "../core", public = true } -compiler_builtins = { version = "=0.1.152" } +compiler_builtins = { version = "=0.1.158" } unwind = { path = "../unwind" } hashbrown = { version = "0.15", default-features = false, features = [ 'rustc-dep-of-std', @@ -35,7 +35,7 @@ miniz_oxide = { version = "0.8.0", optional = true, default-features = false } addr2line = { version = "0.24.0", optional = true, default-features = false } [target.'cfg(not(all(windows, target_env = "msvc")))'.dependencies] -libc = { version = "0.2.171", default-features = false, features = [ +libc = { version = "0.2.172", default-features = false, features = [ 'rustc-dep-of-std', ], public = true } @@ -57,7 +57,7 @@ object = { version = "0.36.0", default-features = false, optional = true, featur 'archive', ] } -[target.'cfg(windows)'.dependencies.windows-targets] +[target.'cfg(any(windows, target_os = "cygwin"))'.dependencies.windows-targets] path = "../windows_targets" [dev-dependencies] @@ -83,8 +83,8 @@ wasi = { version = "0.11.0", features = [ ], default-features = false } [target.'cfg(target_os = "uefi")'.dependencies] -r-efi = { version = "4.5.0", features = ['rustc-dep-of-std'] } -r-efi-alloc = { version = "1.0.0", features = ['rustc-dep-of-std'] } +r-efi = { version = "5.2.0", features = ['rustc-dep-of-std'] } +r-efi-alloc = { version = "2.0.0", features = ['rustc-dep-of-std'] } [features] backtrace = [ @@ -121,7 +121,6 @@ debug_typeid = ["core/debug_typeid"] # https://github.com/rust-lang/stdarch/blob/master/crates/std_detect/Cargo.toml std_detect_file_io = ["std_detect/std_detect_file_io"] std_detect_dlsym_getauxval = ["std_detect/std_detect_dlsym_getauxval"] -std_detect_env_override = ["std_detect/std_detect_env_override"] # Enable using raw-dylib for Windows imports. # This will eventually be the default. @@ -163,4 +162,10 @@ check-cfg = [ # and to the `backtrace` crate which messes-up with Cargo list # of declared features, we therefor expect any feature cfg 'cfg(feature, values(any()))', + # Internal features aren't marked known config by default, we use these to + # gate tests. + 'cfg(target_has_reliable_f16)', + 'cfg(target_has_reliable_f16_math)', + 'cfg(target_has_reliable_f128)', + 'cfg(target_has_reliable_f128_math)', ] diff --git a/library/std/build.rs b/library/std/build.rs index d76d07a89f4e8..ef695601a448a 100644 --- a/library/std/build.rs +++ b/library/std/build.rs @@ -7,17 +7,6 @@ fn main() { let target_vendor = env::var("CARGO_CFG_TARGET_VENDOR").expect("CARGO_CFG_TARGET_VENDOR was not set"); let target_env = env::var("CARGO_CFG_TARGET_ENV").expect("CARGO_CFG_TARGET_ENV was not set"); - let target_abi = env::var("CARGO_CFG_TARGET_ABI").expect("CARGO_CFG_TARGET_ABI was not set"); - let target_pointer_width: u32 = env::var("CARGO_CFG_TARGET_POINTER_WIDTH") - .expect("CARGO_CFG_TARGET_POINTER_WIDTH was not set") - .parse() - .unwrap(); - let target_features: Vec<_> = env::var("CARGO_CFG_TARGET_FEATURE") - .unwrap_or_default() - .split(",") - .map(ToOwned::to_owned) - .collect(); - let is_miri = env::var_os("CARGO_CFG_MIRI").is_some(); println!("cargo:rustc-check-cfg=cfg(netbsd10)"); if target_os == "netbsd" && env::var("RUSTC_STD_NETBSD10").is_ok() { @@ -85,110 +74,4 @@ fn main() { println!("cargo:rustc-cfg=backtrace_in_libstd"); println!("cargo:rustc-env=STD_ENV_ARCH={}", env::var("CARGO_CFG_TARGET_ARCH").unwrap()); - - // Emit these on platforms that have no known ABI bugs, LLVM selection bugs, lowering bugs, - // missing symbols, or other problems, to determine when tests get run. - // If more broken platforms are found, please update the tracking issue at - // - // - // Some of these match arms are redundant; the goal is to separate reasons that the type is - // unreliable, even when multiple reasons might fail the same platform. - println!("cargo:rustc-check-cfg=cfg(reliable_f16)"); - println!("cargo:rustc-check-cfg=cfg(reliable_f128)"); - - // This is a step beyond only having the types and basic functions available. Math functions - // aren't consistently available or correct. - println!("cargo:rustc-check-cfg=cfg(reliable_f16_math)"); - println!("cargo:rustc-check-cfg=cfg(reliable_f128_math)"); - - let has_reliable_f16 = match (target_arch.as_str(), target_os.as_str()) { - // We can always enable these in Miri as that is not affected by codegen bugs. - _ if is_miri => true, - // Selection failure - ("s390x", _) => false, - // Unsupported - ("arm64ec", _) => false, - // LLVM crash - ("aarch64", _) if !target_features.iter().any(|f| f == "neon") => false, - // MinGW ABI bugs - ("x86_64", "windows") if target_env == "gnu" && target_abi != "llvm" => false, - // Infinite recursion - ("csky", _) => false, - ("hexagon", _) => false, - ("powerpc" | "powerpc64", _) => false, - ("sparc" | "sparc64", _) => false, - ("wasm32" | "wasm64", _) => false, - // `f16` support only requires that symbols converting to and from `f32` are available. We - // provide these in `compiler-builtins`, so `f16` should be available on all platforms that - // do not have other ABI issues or LLVM crashes. - _ => true, - }; - - let has_reliable_f128 = match (target_arch.as_str(), target_os.as_str()) { - // We can always enable these in Miri as that is not affected by codegen bugs. - _ if is_miri => true, - // Unsupported - ("arm64ec", _) => false, - // Selection bug - ("mips64" | "mips64r6", _) => false, - // Selection bug - ("nvptx64", _) => false, - // ABI bugs et al. (full - // list at ) - ("powerpc" | "powerpc64", _) => false, - // ABI unsupported - ("sparc", _) => false, - // Stack alignment bug . NB: tests may - // not fail if our compiler-builtins is linked. - ("x86", _) => false, - // MinGW ABI bugs - ("x86_64", "windows") if target_env == "gnu" && target_abi != "llvm" => false, - // There are no known problems on other platforms, so the only requirement is that symbols - // are available. `compiler-builtins` provides all symbols required for core `f128` - // support, so this should work for everything else. - _ => true, - }; - - // Configure platforms that have reliable basics but may have unreliable math. - - // LLVM is currently adding missing routines, - let has_reliable_f16_math = has_reliable_f16 - && match (target_arch.as_str(), target_os.as_str()) { - // FIXME: Disabled on Miri as the intrinsics are not implemented yet. - _ if is_miri => false, - // x86 has a crash for `powi`: - ("x86" | "x86_64", _) => false, - // Assume that working `f16` means working `f16` math for most platforms, since - // operations just go through `f32`. - _ => true, - }; - - let has_reliable_f128_math = has_reliable_f128 - && match (target_arch.as_str(), target_os.as_str()) { - // FIXME: Disabled on Miri as the intrinsics are not implemented yet. - _ if is_miri => false, - // LLVM lowers `fp128` math to `long double` symbols even on platforms where - // `long double` is not IEEE binary128. See - // . - // - // This rules out anything that doesn't have `long double` = `binary128`; <= 32 bits - // (ld is `f64`), anything other than Linux (Windows and MacOS use `f64`), and `x86` - // (ld is 80-bit extended precision). - ("x86_64", _) => false, - (_, "linux") if target_pointer_width == 64 => true, - _ => false, - }; - - if has_reliable_f16 { - println!("cargo:rustc-cfg=reliable_f16"); - } - if has_reliable_f128 { - println!("cargo:rustc-cfg=reliable_f128"); - } - if has_reliable_f16_math { - println!("cargo:rustc-cfg=reliable_f16_math"); - } - if has_reliable_f128_math { - println!("cargo:rustc-cfg=reliable_f128_math"); - } } diff --git a/library/std/src/alloc.rs b/library/std/src/alloc.rs index 5d2a304b41c5a..b574e9f3a25e3 100644 --- a/library/std/src/alloc.rs +++ b/library/std/src/alloc.rs @@ -57,7 +57,7 @@ #![stable(feature = "alloc_module", since = "1.28.0")] use core::ptr::NonNull; -use core::sync::atomic::{AtomicPtr, Ordering}; +use core::sync::atomic::{Atomic, AtomicPtr, Ordering}; use core::{hint, mem, ptr}; #[stable(feature = "alloc_module", since = "1.28.0")] @@ -287,7 +287,7 @@ unsafe impl Allocator for System { } } -static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut()); +static HOOK: Atomic<*mut ()> = AtomicPtr::new(ptr::null_mut()); /// Registers a custom allocation error hook, replacing any that was previously registered. /// @@ -348,7 +348,7 @@ fn default_alloc_error_hook(layout: Layout) { unsafe extern "Rust" { // This symbol is emitted by rustc next to __rust_alloc_error_handler. // Its value depends on the -Zoom={panic,abort} compiler option. - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] static __rust_alloc_error_handler_should_panic: u8; } diff --git a/library/std/src/backtrace.rs b/library/std/src/backtrace.rs index 3e641ac5d9041..c3fcb0e2e42b0 100644 --- a/library/std/src/backtrace.rs +++ b/library/std/src/backtrace.rs @@ -92,8 +92,8 @@ use crate::backtrace_rs::{self, BytesOrWideString}; use crate::ffi::c_void; use crate::panic::UnwindSafe; use crate::sync::LazyLock; -use crate::sync::atomic::AtomicU8; use crate::sync::atomic::Ordering::Relaxed; +use crate::sync::atomic::{Atomic, AtomicU8}; use crate::sys::backtrace::{lock, output_filename, set_image_base}; use crate::{env, fmt}; @@ -254,7 +254,7 @@ impl Backtrace { // Cache the result of reading the environment variables to make // backtrace captures speedy, because otherwise reading environment // variables every time can be somewhat slow. - static ENABLED: AtomicU8 = AtomicU8::new(0); + static ENABLED: Atomic = AtomicU8::new(0); match ENABLED.load(Relaxed) { 0 => {} 1 => return false, @@ -432,7 +432,7 @@ mod helper { use super::*; pub(super) type LazyResolve = impl (FnOnce() -> Capture) + Send + Sync + UnwindSafe; - #[cfg_attr(not(bootstrap), define_opaque(LazyResolve))] + #[define_opaque(LazyResolve)] pub(super) fn lazy_resolve(mut capture: Capture) -> LazyResolve { move || { // Use the global backtrace lock to synchronize this as it's a diff --git a/library/std/src/collections/hash/map.rs b/library/std/src/collections/hash/map.rs index 2487f5a2a503f..961d6ee0665c1 100644 --- a/library/std/src/collections/hash/map.rs +++ b/library/std/src/collections/hash/map.rs @@ -973,6 +973,9 @@ where /// Returns an array of length `N` with the results of each query. For soundness, at most one /// mutable reference will be returned to any value. `None` will be used if the key is missing. /// + /// This method performs a check to ensure there are no duplicate keys, which currently has a time-complexity of O(n^2), + /// so be careful when passing many keys. + /// /// # Panics /// /// Panics if any keys are overlapping. @@ -1679,10 +1682,7 @@ impl<'a, K, V> Drain<'a, K, V> { /// ``` #[stable(feature = "hash_extract_if", since = "CURRENT_RUSTC_VERSION")] #[must_use = "iterators are lazy and do nothing unless consumed"] -pub struct ExtractIf<'a, K, V, F> -where - F: FnMut(&K, &mut V) -> bool, -{ +pub struct ExtractIf<'a, K, V, F> { base: base::ExtractIf<'a, K, V, F>, } @@ -2315,9 +2315,10 @@ where impl FusedIterator for ExtractIf<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {} #[stable(feature = "hash_extract_if", since = "CURRENT_RUSTC_VERSION")] -impl<'a, K, V, F> fmt::Debug for ExtractIf<'a, K, V, F> +impl fmt::Debug for ExtractIf<'_, K, V, F> where - F: FnMut(&K, &mut V) -> bool, + K: fmt::Debug, + V: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("ExtractIf").finish_non_exhaustive() diff --git a/library/std/src/collections/hash/set.rs b/library/std/src/collections/hash/set.rs index a547a9943c1a0..fa2f4f0a58fec 100644 --- a/library/std/src/collections/hash/set.rs +++ b/library/std/src/collections/hash/set.rs @@ -1391,10 +1391,7 @@ pub struct Drain<'a, K: 'a> { /// let mut extract_ifed = a.extract_if(|v| v % 2 == 0); /// ``` #[stable(feature = "hash_extract_if", since = "CURRENT_RUSTC_VERSION")] -pub struct ExtractIf<'a, K, F> -where - F: FnMut(&K) -> bool, -{ +pub struct ExtractIf<'a, K, F> { base: base::ExtractIf<'a, K, F>, } @@ -1694,9 +1691,9 @@ where impl FusedIterator for ExtractIf<'_, K, F> where F: FnMut(&K) -> bool {} #[stable(feature = "hash_extract_if", since = "CURRENT_RUSTC_VERSION")] -impl<'a, K, F> fmt::Debug for ExtractIf<'a, K, F> +impl fmt::Debug for ExtractIf<'_, K, F> where - F: FnMut(&K) -> bool, + K: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("ExtractIf").finish_non_exhaustive() diff --git a/library/std/src/env.rs b/library/std/src/env.rs index 05bd4345ea8dd..ce2dc79522076 100644 --- a/library/std/src/env.rs +++ b/library/std/src/env.rs @@ -12,9 +12,11 @@ use crate::error::Error; use crate::ffi::{OsStr, OsString}; +use crate::num::NonZero; +use crate::ops::Try; use crate::path::{Path, PathBuf}; -use crate::sys::os as os_imp; -use crate::{fmt, io, sys}; +use crate::sys::{env as env_imp, os as os_imp}; +use crate::{array, fmt, io, sys}; /// Returns the current working directory as a [`PathBuf`]. /// @@ -96,7 +98,7 @@ pub struct Vars { /// [`env::vars_os()`]: vars_os #[stable(feature = "env", since = "1.0.0")] pub struct VarsOs { - inner: os_imp::Env, + inner: env_imp::Env, } /// Returns an iterator of (variable, value) pairs of strings, for all the @@ -150,7 +152,7 @@ pub fn vars() -> Vars { #[must_use] #[stable(feature = "env", since = "1.0.0")] pub fn vars_os() -> VarsOs { - VarsOs { inner: os_imp::env() } + VarsOs { inner: env_imp::env() } } #[stable(feature = "env", since = "1.0.0")] @@ -259,7 +261,7 @@ pub fn var_os>(key: K) -> Option { } fn _var_os(key: &OsStr) -> Option { - os_imp::getenv(key) + env_imp::getenv(key) } /// The error type for operations interacting with environment variables. @@ -363,7 +365,7 @@ impl Error for VarError { #[stable(feature = "env", since = "1.0.0")] pub unsafe fn set_var, V: AsRef>(key: K, value: V) { let (key, value) = (key.as_ref(), value.as_ref()); - unsafe { os_imp::setenv(key, value) }.unwrap_or_else(|e| { + unsafe { env_imp::setenv(key, value) }.unwrap_or_else(|e| { panic!("failed to set environment variable `{key:?}` to `{value:?}`: {e}") }) } @@ -434,7 +436,7 @@ pub unsafe fn set_var, V: AsRef>(key: K, value: V) { #[stable(feature = "env", since = "1.0.0")] pub unsafe fn remove_var>(key: K) { let key = key.as_ref(); - unsafe { os_imp::unsetenv(key) } + unsafe { env_imp::unsetenv(key) } .unwrap_or_else(|e| panic!("failed to remove environment variable `{key:?}`: {e}")) } @@ -872,19 +874,36 @@ impl !Sync for Args {} #[stable(feature = "env", since = "1.0.0")] impl Iterator for Args { type Item = String; + fn next(&mut self) -> Option { self.inner.next().map(|s| s.into_string().unwrap()) } + + #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + + // Methods which skip args cannot simply delegate to the inner iterator, + // because `env::args` states that we will "panic during iteration if any + // argument to the process is not valid Unicode". + // + // This offers two possible interpretations: + // - a skipped argument is never encountered "during iteration" + // - even a skipped argument is encountered "during iteration" + // + // As a panic can be observed, we err towards validating even skipped + // arguments for now, though this is not explicitly promised by the API. } #[stable(feature = "env", since = "1.0.0")] impl ExactSizeIterator for Args { + #[inline] fn len(&self) -> usize { self.inner.len() } + + #[inline] fn is_empty(&self) -> bool { self.inner.is_empty() } @@ -914,19 +933,65 @@ impl !Sync for ArgsOs {} #[stable(feature = "env", since = "1.0.0")] impl Iterator for ArgsOs { type Item = OsString; + + #[inline] fn next(&mut self) -> Option { self.inner.next() } + + #[inline] + fn next_chunk( + &mut self, + ) -> Result<[OsString; N], array::IntoIter> { + self.inner.next_chunk() + } + + #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + + #[inline] + fn count(self) -> usize { + self.inner.len() + } + + #[inline] + fn last(self) -> Option { + self.inner.last() + } + + #[inline] + fn advance_by(&mut self, n: usize) -> Result<(), NonZero> { + self.inner.advance_by(n) + } + + #[inline] + fn try_fold(&mut self, init: B, f: F) -> R + where + F: FnMut(B, Self::Item) -> R, + R: Try, + { + self.inner.try_fold(init, f) + } + + #[inline] + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.inner.fold(init, f) + } } #[stable(feature = "env", since = "1.0.0")] impl ExactSizeIterator for ArgsOs { + #[inline] fn len(&self) -> usize { self.inner.len() } + + #[inline] fn is_empty(&self) -> bool { self.inner.is_empty() } @@ -934,9 +999,15 @@ impl ExactSizeIterator for ArgsOs { #[stable(feature = "env_iterators", since = "1.12.0")] impl DoubleEndedIterator for ArgsOs { + #[inline] fn next_back(&mut self) -> Option { self.inner.next_back() } + + #[inline] + fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero> { + self.inner.advance_back_by(n) + } } #[stable(feature = "std_debug", since = "1.16.0")] @@ -950,7 +1021,7 @@ impl fmt::Debug for ArgsOs { /// Constants associated with the current target #[stable(feature = "env", since = "1.0.0")] pub mod consts { - use crate::sys::env::os; + use crate::sys::env_consts::os; /// A string describing the architecture of the CPU that is currently in use. /// An example value may be: `"x86"`, `"arm"` or `"riscv64"`. diff --git a/library/std/src/f128.rs b/library/std/src/f128.rs index ede2196905118..2b416b13fa59c 100644 --- a/library/std/src/f128.rs +++ b/library/std/src/f128.rs @@ -22,7 +22,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let f = 3.7_f128; /// let g = 3.0_f128; @@ -49,7 +53,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let f = 3.01_f128; /// let g = 4.0_f128; @@ -76,7 +84,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let f = 3.3_f128; /// let g = -3.3_f128; @@ -108,7 +120,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let f = 3.3_f128; /// let g = -3.3_f128; @@ -138,7 +154,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let f = 3.7_f128; /// let g = 3.0_f128; @@ -166,7 +186,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 3.6_f128; /// let y = -3.6_f128; @@ -203,7 +227,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let m = 10.0_f128; /// let x = 4.0_f128; @@ -247,7 +275,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let a: f128 = 7.0; /// let b = 4.0; @@ -289,7 +321,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let a: f128 = 7.0; /// let b = 4.0; @@ -326,7 +362,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 2.0_f128; /// let abs_difference = (x.powi(2) - (x * x)).abs(); @@ -354,7 +394,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 2.0_f128; /// let abs_difference = (x.powf(2.0) - (x * x)).abs(); @@ -386,7 +430,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let positive = 4.0_f128; /// let negative = -4.0_f128; @@ -417,7 +465,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let one = 1.0f128; /// // e^1 @@ -448,7 +500,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let f = 2.0f128; /// @@ -479,7 +535,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let one = 1.0f128; /// // e^1 @@ -495,7 +555,11 @@ impl f128 { /// Non-positive values: /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// assert_eq!(0_f128.ln(), f128::NEG_INFINITY); /// assert!((-42_f128).ln().is_nan()); @@ -526,7 +590,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let five = 5.0f128; /// @@ -540,7 +608,11 @@ impl f128 { /// Non-positive values: /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// assert_eq!(0_f128.log(10.0), f128::NEG_INFINITY); /// assert!((-42_f128).log(10.0).is_nan()); @@ -567,7 +639,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let two = 2.0f128; /// @@ -581,7 +657,11 @@ impl f128 { /// Non-positive values: /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// assert_eq!(0_f128.log2(), f128::NEG_INFINITY); /// assert!((-42_f128).log2().is_nan()); @@ -608,7 +688,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let ten = 10.0f128; /// @@ -622,7 +706,11 @@ impl f128 { /// Non-positive values: /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// assert_eq!(0_f128.log10(), f128::NEG_INFINITY); /// assert!((-42_f128).log10().is_nan()); @@ -651,7 +739,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 8.0f128; /// @@ -666,7 +758,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn cbrt(self) -> f128 { - unsafe { cmath::cbrtf128(self) } + cmath::cbrtf128(self) } /// Compute the distance between the origin and a point (`x`, `y`) on the @@ -687,7 +779,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 2.0f128; /// let y = 3.0f128; @@ -703,7 +799,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn hypot(self, other: f128) -> f128 { - unsafe { cmath::hypotf128(self, other) } + cmath::hypotf128(self, other) } /// Computes the sine of a number (in radians). @@ -717,7 +813,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = std::f128::consts::FRAC_PI_2; /// @@ -745,7 +845,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 2.0 * std::f128::consts::PI; /// @@ -776,7 +880,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = std::f128::consts::FRAC_PI_4; /// let abs_difference = (x.tan() - 1.0).abs(); @@ -789,7 +897,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn tan(self) -> f128 { - unsafe { cmath::tanf128(self) } + cmath::tanf128(self) } /// Computes the arcsine of a number. Return value is in radians in @@ -808,7 +916,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let f = std::f128::consts::FRAC_PI_2; /// @@ -824,7 +936,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn asin(self) -> f128 { - unsafe { cmath::asinf128(self) } + cmath::asinf128(self) } /// Computes the arccosine of a number. Return value is in radians in @@ -843,7 +955,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let f = std::f128::consts::FRAC_PI_4; /// @@ -859,7 +975,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn acos(self) -> f128 { - unsafe { cmath::acosf128(self) } + cmath::acosf128(self) } /// Computes the arctangent of a number. Return value is in radians in the @@ -877,7 +993,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let f = 1.0f128; /// @@ -893,7 +1013,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn atan(self) -> f128 { - unsafe { cmath::atanf128(self) } + cmath::atanf128(self) } /// Computes the four quadrant arctangent of `self` (`y`) and `other` (`x`) in radians. @@ -915,7 +1035,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// // Positive angles measured counter-clockwise /// // from positive x axis @@ -939,7 +1063,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn atan2(self, other: f128) -> f128 { - unsafe { cmath::atan2f128(self, other) } + cmath::atan2f128(self, other) } /// Simultaneously computes the sine and cosine of the number, `x`. Returns @@ -957,7 +1081,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = std::f128::consts::FRAC_PI_4; /// let f = x.sin_cos(); @@ -992,7 +1120,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 1e-8_f128; /// @@ -1008,7 +1140,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn exp_m1(self) -> f128 { - unsafe { cmath::expm1f128(self) } + cmath::expm1f128(self) } /// Returns `ln(1+n)` (natural logarithm) more accurately than if @@ -1028,7 +1160,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 1e-8_f128; /// @@ -1043,7 +1179,11 @@ impl f128 { /// Out-of-range values: /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// assert_eq!((-1.0_f128).ln_1p(), f128::NEG_INFINITY); /// assert!((-2.0_f128).ln_1p().is_nan()); @@ -1055,7 +1195,7 @@ impl f128 { #[rustc_allow_incoherent_impl] #[unstable(feature = "f128", issue = "116909")] pub fn ln_1p(self) -> f128 { - unsafe { cmath::log1pf128(self) } + cmath::log1pf128(self) } /// Hyperbolic sine function. @@ -1072,7 +1212,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let e = std::f128::consts::E; /// let x = 1.0f128; @@ -1090,7 +1234,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn sinh(self) -> f128 { - unsafe { cmath::sinhf128(self) } + cmath::sinhf128(self) } /// Hyperbolic cosine function. @@ -1107,7 +1251,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let e = std::f128::consts::E; /// let x = 1.0f128; @@ -1125,7 +1273,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn cosh(self) -> f128 { - unsafe { cmath::coshf128(self) } + cmath::coshf128(self) } /// Hyperbolic tangent function. @@ -1142,7 +1290,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let e = std::f128::consts::E; /// let x = 1.0f128; @@ -1160,7 +1312,7 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn tanh(self) -> f128 { - unsafe { cmath::tanhf128(self) } + cmath::tanhf128(self) } /// Inverse hyperbolic sine function. @@ -1174,7 +1326,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 1.0f128; /// let f = x.sinh().asinh(); @@ -1206,7 +1362,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 1.0f128; /// let f = x.cosh().acosh(); @@ -1240,7 +1400,11 @@ impl f128 { /// /// ``` /// #![feature(f128)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let e = std::f128::consts::E; /// let f = e.tanh().atanh(); @@ -1274,7 +1438,11 @@ impl f128 { /// ``` /// #![feature(f128)] /// #![feature(float_gamma)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 5.0f128; /// @@ -1289,7 +1457,7 @@ impl f128 { // #[unstable(feature = "float_gamma", issue = "99842")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn gamma(self) -> f128 { - unsafe { cmath::tgammaf128(self) } + cmath::tgammaf128(self) } /// Natural logarithm of the absolute value of the gamma function @@ -1309,7 +1477,11 @@ impl f128 { /// ``` /// #![feature(f128)] /// #![feature(float_gamma)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// let x = 2.0f128; /// @@ -1325,7 +1497,7 @@ impl f128 { #[must_use = "method returns a new number and does not mutate the original value"] pub fn ln_gamma(self) -> (f128, i32) { let mut signgamp: i32 = 0; - let x = unsafe { cmath::lgammaf128_r(self, &mut signgamp) }; + let x = cmath::lgammaf128_r(self, &mut signgamp); (x, signgamp) } @@ -1344,7 +1516,11 @@ impl f128 { /// ``` /// #![feature(f128)] /// #![feature(float_erf)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// /// The error function relates what percent of a normal distribution lies /// /// within `x` standard deviations (scaled by `1/sqrt(2)`). /// fn within_standard_deviations(x: f128) -> f128 { @@ -1365,7 +1541,7 @@ impl f128 { // #[unstable(feature = "float_erf", issue = "136321")] #[inline] pub fn erf(self) -> f128 { - unsafe { cmath::erff128(self) } + cmath::erff128(self) } /// Complementary error function. @@ -1383,7 +1559,11 @@ impl f128 { /// ``` /// #![feature(f128)] /// #![feature(float_erf)] - /// # #[cfg(reliable_f128_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f128_math)] { /// let x: f128 = 0.123; /// /// let one = x.erf() + x.erfc(); @@ -1398,6 +1578,6 @@ impl f128 { // #[unstable(feature = "float_erf", issue = "136321")] #[inline] pub fn erfc(self) -> f128 { - unsafe { cmath::erfcf128(self) } + cmath::erfcf128(self) } } diff --git a/library/std/src/f16.rs b/library/std/src/f16.rs index 286993d736b9c..3f88ab2d400e9 100644 --- a/library/std/src/f16.rs +++ b/library/std/src/f16.rs @@ -22,7 +22,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let f = 3.7_f16; /// let g = 3.0_f16; @@ -49,7 +53,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let f = 3.01_f16; /// let g = 4.0_f16; @@ -76,7 +84,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let f = 3.3_f16; /// let g = -3.3_f16; @@ -108,7 +120,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let f = 3.3_f16; /// let g = -3.3_f16; @@ -138,7 +154,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let f = 3.7_f16; /// let g = 3.0_f16; @@ -166,7 +186,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 3.6_f16; /// let y = -3.6_f16; @@ -203,7 +227,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let m = 10.0_f16; /// let x = 4.0_f16; @@ -247,7 +275,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let a: f16 = 7.0; /// let b = 4.0; @@ -289,7 +321,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let a: f16 = 7.0; /// let b = 4.0; @@ -326,7 +362,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 2.0_f16; /// let abs_difference = (x.powi(2) - (x * x)).abs(); @@ -354,7 +394,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 2.0_f16; /// let abs_difference = (x.powf(2.0) - (x * x)).abs(); @@ -386,7 +430,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let positive = 4.0_f16; /// let negative = -4.0_f16; @@ -417,7 +465,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let one = 1.0f16; /// // e^1 @@ -448,7 +500,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let f = 2.0f16; /// @@ -479,7 +535,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let one = 1.0f16; /// // e^1 @@ -495,7 +555,11 @@ impl f16 { /// Non-positive values: /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// assert_eq!(0_f16.ln(), f16::NEG_INFINITY); /// assert!((-42_f16).ln().is_nan()); @@ -526,7 +590,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let five = 5.0f16; /// @@ -540,7 +608,11 @@ impl f16 { /// Non-positive values: /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// assert_eq!(0_f16.log(10.0), f16::NEG_INFINITY); /// assert!((-42_f16).log(10.0).is_nan()); @@ -567,7 +639,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let two = 2.0f16; /// @@ -581,7 +657,11 @@ impl f16 { /// Non-positive values: /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// assert_eq!(0_f16.log2(), f16::NEG_INFINITY); /// assert!((-42_f16).log2().is_nan()); @@ -608,7 +688,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let ten = 10.0f16; /// @@ -622,7 +706,11 @@ impl f16 { /// Non-positive values: /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// assert_eq!(0_f16.log10(), f16::NEG_INFINITY); /// assert!((-42_f16).log10().is_nan()); @@ -650,7 +738,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 8.0f16; /// @@ -665,7 +757,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn cbrt(self) -> f16 { - (unsafe { cmath::cbrtf(self as f32) }) as f16 + cmath::cbrtf(self as f32) as f16 } /// Compute the distance between the origin and a point (`x`, `y`) on the @@ -685,7 +777,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 2.0f16; /// let y = 3.0f16; @@ -701,7 +797,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn hypot(self, other: f16) -> f16 { - (unsafe { cmath::hypotf(self as f32, other as f32) }) as f16 + cmath::hypotf(self as f32, other as f32) as f16 } /// Computes the sine of a number (in radians). @@ -715,7 +811,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = std::f16::consts::FRAC_PI_2; /// @@ -743,7 +843,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 2.0 * std::f16::consts::PI; /// @@ -774,7 +878,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = std::f16::consts::FRAC_PI_4; /// let abs_difference = (x.tan() - 1.0).abs(); @@ -787,7 +895,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn tan(self) -> f16 { - (unsafe { cmath::tanf(self as f32) }) as f16 + cmath::tanf(self as f32) as f16 } /// Computes the arcsine of a number. Return value is in radians in @@ -806,7 +914,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let f = std::f16::consts::FRAC_PI_2; /// @@ -822,7 +934,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn asin(self) -> f16 { - (unsafe { cmath::asinf(self as f32) }) as f16 + cmath::asinf(self as f32) as f16 } /// Computes the arccosine of a number. Return value is in radians in @@ -841,7 +953,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let f = std::f16::consts::FRAC_PI_4; /// @@ -857,7 +973,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn acos(self) -> f16 { - (unsafe { cmath::acosf(self as f32) }) as f16 + cmath::acosf(self as f32) as f16 } /// Computes the arctangent of a number. Return value is in radians in the @@ -875,7 +991,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let f = 1.0f16; /// @@ -891,7 +1011,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn atan(self) -> f16 { - (unsafe { cmath::atanf(self as f32) }) as f16 + cmath::atanf(self as f32) as f16 } /// Computes the four quadrant arctangent of `self` (`y`) and `other` (`x`) in radians. @@ -913,7 +1033,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// // Positive angles measured counter-clockwise /// // from positive x axis @@ -937,7 +1061,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn atan2(self, other: f16) -> f16 { - (unsafe { cmath::atan2f(self as f32, other as f32) }) as f16 + cmath::atan2f(self as f32, other as f32) as f16 } /// Simultaneously computes the sine and cosine of the number, `x`. Returns @@ -955,7 +1079,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = std::f16::consts::FRAC_PI_4; /// let f = x.sin_cos(); @@ -990,7 +1118,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 1e-4_f16; /// @@ -1006,7 +1138,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn exp_m1(self) -> f16 { - (unsafe { cmath::expm1f(self as f32) }) as f16 + cmath::expm1f(self as f32) as f16 } /// Returns `ln(1+n)` (natural logarithm) more accurately than if @@ -1026,7 +1158,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 1e-4_f16; /// @@ -1041,7 +1177,11 @@ impl f16 { /// Out-of-range values: /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// assert_eq!((-1.0_f16).ln_1p(), f16::NEG_INFINITY); /// assert!((-2.0_f16).ln_1p().is_nan()); @@ -1053,7 +1193,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn ln_1p(self) -> f16 { - (unsafe { cmath::log1pf(self as f32) }) as f16 + cmath::log1pf(self as f32) as f16 } /// Hyperbolic sine function. @@ -1070,7 +1210,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let e = std::f16::consts::E; /// let x = 1.0f16; @@ -1088,7 +1232,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn sinh(self) -> f16 { - (unsafe { cmath::sinhf(self as f32) }) as f16 + cmath::sinhf(self as f32) as f16 } /// Hyperbolic cosine function. @@ -1105,7 +1249,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let e = std::f16::consts::E; /// let x = 1.0f16; @@ -1123,7 +1271,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn cosh(self) -> f16 { - (unsafe { cmath::coshf(self as f32) }) as f16 + cmath::coshf(self as f32) as f16 } /// Hyperbolic tangent function. @@ -1140,7 +1288,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let e = std::f16::consts::E; /// let x = 1.0f16; @@ -1158,7 +1310,7 @@ impl f16 { #[unstable(feature = "f16", issue = "116909")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn tanh(self) -> f16 { - (unsafe { cmath::tanhf(self as f32) }) as f16 + cmath::tanhf(self as f32) as f16 } /// Inverse hyperbolic sine function. @@ -1172,7 +1324,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 1.0f16; /// let f = x.sinh().asinh(); @@ -1204,7 +1360,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 1.0f16; /// let f = x.cosh().acosh(); @@ -1238,7 +1398,11 @@ impl f16 { /// /// ``` /// #![feature(f16)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let e = std::f16::consts::E; /// let f = e.tanh().atanh(); @@ -1272,7 +1436,11 @@ impl f16 { /// ``` /// #![feature(f16)] /// #![feature(float_gamma)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 5.0f16; /// @@ -1287,7 +1455,7 @@ impl f16 { // #[unstable(feature = "float_gamma", issue = "99842")] #[must_use = "method returns a new number and does not mutate the original value"] pub fn gamma(self) -> f16 { - (unsafe { cmath::tgammaf(self as f32) }) as f16 + cmath::tgammaf(self as f32) as f16 } /// Natural logarithm of the absolute value of the gamma function @@ -1307,7 +1475,11 @@ impl f16 { /// ``` /// #![feature(f16)] /// #![feature(float_gamma)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// let x = 2.0f16; /// @@ -1323,7 +1495,7 @@ impl f16 { #[must_use = "method returns a new number and does not mutate the original value"] pub fn ln_gamma(self) -> (f16, i32) { let mut signgamp: i32 = 0; - let x = (unsafe { cmath::lgammaf_r(self as f32, &mut signgamp) }) as f16; + let x = cmath::lgammaf_r(self as f32, &mut signgamp) as f16; (x, signgamp) } @@ -1342,7 +1514,11 @@ impl f16 { /// ``` /// #![feature(f16)] /// #![feature(float_erf)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// /// The error function relates what percent of a normal distribution lies /// /// within `x` standard deviations (scaled by `1/sqrt(2)`). /// fn within_standard_deviations(x: f16) -> f16 { @@ -1363,7 +1539,7 @@ impl f16 { // #[unstable(feature = "float_erf", issue = "136321")] #[inline] pub fn erf(self) -> f16 { - (unsafe { cmath::erff(self as f32) }) as f16 + cmath::erff(self as f32) as f16 } /// Complementary error function. @@ -1381,7 +1557,11 @@ impl f16 { /// ``` /// #![feature(f16)] /// #![feature(float_erf)] - /// # #[cfg(reliable_f16_math)] { + /// # #![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] + /// # #![cfg_attr(not(bootstrap), expect(internal_features))] + /// # #[cfg(not(miri))] + /// # #[cfg(not(bootstrap))] + /// # #[cfg(target_has_reliable_f16_math)] { /// let x: f16 = 0.123; /// /// let one = x.erf() + x.erfc(); @@ -1396,6 +1576,6 @@ impl f16 { // #[unstable(feature = "float_erf", issue = "136321")] #[inline] pub fn erfc(self) -> f16 { - (unsafe { cmath::erfcf(self as f32) }) as f16 + cmath::erfcf(self as f32) as f16 } } diff --git a/library/std/src/f32.rs b/library/std/src/f32.rs index 980e7f7793af2..baf7002f3803c 100644 --- a/library/std/src/f32.rs +++ b/library/std/src/f32.rs @@ -599,7 +599,7 @@ impl f32 { filing an issue describing your use-case too)." )] pub fn abs_sub(self, other: f32) -> f32 { - unsafe { cmath::fdimf(self, other) } + cmath::fdimf(self, other) } /// Returns the cube root of a number. @@ -626,7 +626,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn cbrt(self) -> f32 { - unsafe { cmath::cbrtf(self) } + cmath::cbrtf(self) } /// Compute the distance between the origin and a point (`x`, `y`) on the @@ -657,7 +657,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn hypot(self, other: f32) -> f32 { - unsafe { cmath::hypotf(self, other) } + cmath::hypotf(self, other) } /// Computes the sine of a number (in radians). @@ -730,7 +730,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn tan(self) -> f32 { - unsafe { cmath::tanf(self) } + cmath::tanf(self) } /// Computes the arcsine of a number. Return value is in radians in @@ -760,7 +760,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn asin(self) -> f32 { - unsafe { cmath::asinf(self) } + cmath::asinf(self) } /// Computes the arccosine of a number. Return value is in radians in @@ -790,7 +790,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn acos(self) -> f32 { - unsafe { cmath::acosf(self) } + cmath::acosf(self) } /// Computes the arctangent of a number. Return value is in radians in the @@ -819,7 +819,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn atan(self) -> f32 { - unsafe { cmath::atanf(self) } + cmath::atanf(self) } /// Computes the four quadrant arctangent of `self` (`y`) and `other` (`x`) in radians. @@ -860,7 +860,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn atan2(self, other: f32) -> f32 { - unsafe { cmath::atan2f(self, other) } + cmath::atan2f(self, other) } /// Simultaneously computes the sine and cosine of the number, `x`. Returns @@ -919,7 +919,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn exp_m1(self) -> f32 { - unsafe { cmath::expm1f(self) } + cmath::expm1f(self) } /// Returns `ln(1+n)` (natural logarithm) more accurately than if @@ -957,7 +957,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn ln_1p(self) -> f32 { - unsafe { cmath::log1pf(self) } + cmath::log1pf(self) } /// Hyperbolic sine function. @@ -987,7 +987,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn sinh(self) -> f32 { - unsafe { cmath::sinhf(self) } + cmath::sinhf(self) } /// Hyperbolic cosine function. @@ -1017,7 +1017,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn cosh(self) -> f32 { - unsafe { cmath::coshf(self) } + cmath::coshf(self) } /// Hyperbolic tangent function. @@ -1047,7 +1047,7 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn tanh(self) -> f32 { - unsafe { cmath::tanhf(self) } + cmath::tanhf(self) } /// Inverse hyperbolic sine function. @@ -1158,7 +1158,7 @@ impl f32 { #[unstable(feature = "float_gamma", issue = "99842")] #[inline] pub fn gamma(self) -> f32 { - unsafe { cmath::tgammaf(self) } + cmath::tgammaf(self) } /// Natural logarithm of the absolute value of the gamma function @@ -1188,7 +1188,7 @@ impl f32 { #[inline] pub fn ln_gamma(self) -> (f32, i32) { let mut signgamp: i32 = 0; - let x = unsafe { cmath::lgammaf_r(self, &mut signgamp) }; + let x = cmath::lgammaf_r(self, &mut signgamp); (x, signgamp) } @@ -1224,7 +1224,7 @@ impl f32 { #[unstable(feature = "float_erf", issue = "136321")] #[inline] pub fn erf(self) -> f32 { - unsafe { cmath::erff(self) } + cmath::erff(self) } /// Complementary error function. @@ -1253,6 +1253,6 @@ impl f32 { #[unstable(feature = "float_erf", issue = "136321")] #[inline] pub fn erfc(self) -> f32 { - unsafe { cmath::erfcf(self) } + cmath::erfcf(self) } } diff --git a/library/std/src/f64.rs b/library/std/src/f64.rs index 2aaab3ffc8352..84fd9bfb7b680 100644 --- a/library/std/src/f64.rs +++ b/library/std/src/f64.rs @@ -599,7 +599,7 @@ impl f64 { filing an issue describing your use-case too)." )] pub fn abs_sub(self, other: f64) -> f64 { - unsafe { cmath::fdim(self, other) } + cmath::fdim(self, other) } /// Returns the cube root of a number. @@ -626,7 +626,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn cbrt(self) -> f64 { - unsafe { cmath::cbrt(self) } + cmath::cbrt(self) } /// Compute the distance between the origin and a point (`x`, `y`) on the @@ -657,7 +657,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn hypot(self, other: f64) -> f64 { - unsafe { cmath::hypot(self, other) } + cmath::hypot(self, other) } /// Computes the sine of a number (in radians). @@ -730,7 +730,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn tan(self) -> f64 { - unsafe { cmath::tan(self) } + cmath::tan(self) } /// Computes the arcsine of a number. Return value is in radians in @@ -760,7 +760,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn asin(self) -> f64 { - unsafe { cmath::asin(self) } + cmath::asin(self) } /// Computes the arccosine of a number. Return value is in radians in @@ -790,7 +790,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn acos(self) -> f64 { - unsafe { cmath::acos(self) } + cmath::acos(self) } /// Computes the arctangent of a number. Return value is in radians in the @@ -819,7 +819,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn atan(self) -> f64 { - unsafe { cmath::atan(self) } + cmath::atan(self) } /// Computes the four quadrant arctangent of `self` (`y`) and `other` (`x`) in radians. @@ -860,7 +860,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn atan2(self, other: f64) -> f64 { - unsafe { cmath::atan2(self, other) } + cmath::atan2(self, other) } /// Simultaneously computes the sine and cosine of the number, `x`. Returns @@ -919,7 +919,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn exp_m1(self) -> f64 { - unsafe { cmath::expm1(self) } + cmath::expm1(self) } /// Returns `ln(1+n)` (natural logarithm) more accurately than if @@ -957,7 +957,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn ln_1p(self) -> f64 { - unsafe { cmath::log1p(self) } + cmath::log1p(self) } /// Hyperbolic sine function. @@ -987,7 +987,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn sinh(self) -> f64 { - unsafe { cmath::sinh(self) } + cmath::sinh(self) } /// Hyperbolic cosine function. @@ -1017,7 +1017,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn cosh(self) -> f64 { - unsafe { cmath::cosh(self) } + cmath::cosh(self) } /// Hyperbolic tangent function. @@ -1047,7 +1047,7 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn tanh(self) -> f64 { - unsafe { cmath::tanh(self) } + cmath::tanh(self) } /// Inverse hyperbolic sine function. @@ -1158,7 +1158,7 @@ impl f64 { #[unstable(feature = "float_gamma", issue = "99842")] #[inline] pub fn gamma(self) -> f64 { - unsafe { cmath::tgamma(self) } + cmath::tgamma(self) } /// Natural logarithm of the absolute value of the gamma function @@ -1188,7 +1188,7 @@ impl f64 { #[inline] pub fn ln_gamma(self) -> (f64, i32) { let mut signgamp: i32 = 0; - let x = unsafe { cmath::lgamma_r(self, &mut signgamp) }; + let x = cmath::lgamma_r(self, &mut signgamp); (x, signgamp) } @@ -1224,7 +1224,7 @@ impl f64 { #[unstable(feature = "float_erf", issue = "136321")] #[inline] pub fn erf(self) -> f64 { - unsafe { cmath::erf(self) } + cmath::erf(self) } /// Complementary error function. @@ -1253,6 +1253,6 @@ impl f64 { #[unstable(feature = "float_erf", issue = "136321")] #[inline] pub fn erfc(self) -> f64 { - unsafe { cmath::erfc(self) } + cmath::erfc(self) } } diff --git a/library/std/src/ffi/mod.rs b/library/std/src/ffi/mod.rs index 860ec3a6be16e..bd9446f5aba51 100644 --- a/library/std/src/ffi/mod.rs +++ b/library/std/src/ffi/mod.rs @@ -161,7 +161,7 @@ #![stable(feature = "rust1", since = "1.0.0")] -#[unstable(feature = "c_str_module", issue = "112134")] +#[stable(feature = "c_str_module", since = "CURRENT_RUSTC_VERSION")] pub mod c_str; #[stable(feature = "core_c_void", since = "1.30.0")] @@ -201,5 +201,5 @@ pub use self::c_str::{CStr, CString}; #[doc(inline)] pub use self::os_str::{OsStr, OsString}; -#[stable(feature = "os_str_display", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "os_str_display", since = "1.87.0")] pub mod os_str; diff --git a/library/std/src/ffi/os_str.rs b/library/std/src/ffi/os_str.rs index aa25ff5293c71..72bdf03ee61a4 100644 --- a/library/std/src/ffi/os_str.rs +++ b/library/std/src/ffi/os_str.rs @@ -582,15 +582,25 @@ impl OsString { #[unstable(feature = "os_string_truncate", issue = "133262")] pub fn truncate(&mut self, len: usize) { self.as_os_str().inner.check_public_boundary(len); - self.inner.truncate(len); + // SAFETY: The length was just checked to be at a valid boundary. + unsafe { self.inner.truncate_unchecked(len) }; } - /// Provides plumbing to core `Vec::extend_from_slice`. - /// More well behaving alternative to allowing outer types - /// full mutable access to the core `Vec`. + /// Provides plumbing to `Vec::extend_from_slice` without giving full + /// mutable access to the `Vec`. + /// + /// # Safety + /// + /// The slice must be valid for the platform encoding (as described in + /// [`OsStr::from_encoded_bytes_unchecked`]). + /// + /// This bypasses the encoding-dependent surrogate joining, so `self` must + /// not end with a leading surrogate half and `other` must not start with + /// with a trailing surrogate half. #[inline] - pub(crate) fn extend_from_slice(&mut self, other: &[u8]) { - self.inner.extend_from_slice(other); + pub(crate) unsafe fn extend_from_slice_unchecked(&mut self, other: &[u8]) { + // SAFETY: Guaranteed by caller. + unsafe { self.inner.extend_from_slice_unchecked(other) }; } } @@ -1255,7 +1265,7 @@ impl OsStr { /// let s = OsStr::new("Hello, world!"); /// println!("{}", s.display()); /// ``` - #[stable(feature = "os_str_display", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "os_str_display", since = "1.87.0")] #[must_use = "this does not display the `OsStr`; \ it returns an object that can be displayed"] #[inline] @@ -1612,19 +1622,19 @@ impl fmt::Debug for OsStr { /// /// [`Display`]: fmt::Display /// [`format!`]: crate::format -#[stable(feature = "os_str_display", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "os_str_display", since = "1.87.0")] pub struct Display<'a> { os_str: &'a OsStr, } -#[stable(feature = "os_str_display", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "os_str_display", since = "1.87.0")] impl fmt::Debug for Display<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&self.os_str, f) } } -#[stable(feature = "os_str_display", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "os_str_display", since = "1.87.0")] impl fmt::Display for Display<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.os_str.inner, f) diff --git a/library/std/src/fs.rs b/library/std/src/fs.rs index 801baf3d99072..11f439b9996de 100644 --- a/library/std/src/fs.rs +++ b/library/std/src/fs.rs @@ -21,7 +21,6 @@ mod tests; use crate::ffi::OsString; -use crate::fmt; use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, Read, Seek, SeekFrom, Write}; use crate::path::{Path, PathBuf}; use crate::sealed::Sealed; @@ -29,6 +28,7 @@ use crate::sync::Arc; use crate::sys::fs as fs_imp; use crate::sys_common::{AsInner, AsInnerMut, FromInner, IntoInner}; use crate::time::SystemTime; +use crate::{error, fmt}; /// An object providing access to an open file on the filesystem. /// @@ -116,6 +116,22 @@ pub struct File { inner: fs_imp::File, } +/// An enumeration of possible errors which can occur while trying to acquire a lock +/// from the [`try_lock`] method and [`try_lock_shared`] method on a [`File`]. +/// +/// [`try_lock`]: File::try_lock +/// [`try_lock_shared`]: File::try_lock_shared +#[unstable(feature = "file_lock", issue = "130994")] +pub enum TryLockError { + /// The lock could not be acquired due to an I/O error on the file. The standard library will + /// not return an [`ErrorKind::WouldBlock`] error inside [`TryLockError::Error`] + /// + /// [`ErrorKind::WouldBlock`]: io::ErrorKind::WouldBlock + Error(io::Error), + /// The lock could not be acquired at this time because it is held by another handle/process. + WouldBlock, +} + /// Metadata information about a file. /// /// This structure is returned from the [`metadata`] or @@ -352,6 +368,30 @@ pub fn write, C: AsRef<[u8]>>(path: P, contents: C) -> io::Result inner(path.as_ref(), contents.as_ref()) } +#[unstable(feature = "file_lock", issue = "130994")] +impl error::Error for TryLockError {} + +#[unstable(feature = "file_lock", issue = "130994")] +impl fmt::Debug for TryLockError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + TryLockError::Error(err) => err.fmt(f), + TryLockError::WouldBlock => "WouldBlock".fmt(f), + } + } +} + +#[unstable(feature = "file_lock", issue = "130994")] +impl fmt::Display for TryLockError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + TryLockError::Error(_) => "lock acquisition failed due to I/O error", + TryLockError::WouldBlock => "lock acquisition failed because the operation would block", + } + .fmt(f) + } +} + impl File { /// Attempts to open a file in read-only mode. /// @@ -734,8 +774,8 @@ impl File { /// Try to acquire an exclusive lock on the file. /// - /// Returns `Ok(false)` if a different lock is already held on this file (via another - /// handle/descriptor). + /// Returns `Err(TryLockError::WouldBlock)` if a different lock is already held on this file + /// (via another handle/descriptor). /// /// This acquires an exclusive lock; no other file handle to this file may acquire another lock. /// @@ -777,23 +817,27 @@ impl File { /// /// ```no_run /// #![feature(file_lock)] - /// use std::fs::File; + /// use std::fs::{File, TryLockError}; /// /// fn main() -> std::io::Result<()> { /// let f = File::create("foo.txt")?; - /// f.try_lock()?; + /// match f.try_lock() { + /// Ok(_) => (), + /// Err(TryLockError::WouldBlock) => (), // Lock not acquired + /// Err(TryLockError::Error(err)) => return Err(err), + /// } /// Ok(()) /// } /// ``` #[unstable(feature = "file_lock", issue = "130994")] - pub fn try_lock(&self) -> io::Result { + pub fn try_lock(&self) -> Result<(), TryLockError> { self.inner.try_lock() } /// Try to acquire a shared (non-exclusive) lock on the file. /// - /// Returns `Ok(false)` if an exclusive lock is already held on this file (via another - /// handle/descriptor). + /// Returns `Err(TryLockError::WouldBlock)` if a different lock is already held on this file + /// (via another handle/descriptor). /// /// This acquires a shared lock; more than one file handle may hold a shared lock, but none may /// hold an exclusive lock at the same time. @@ -834,16 +878,21 @@ impl File { /// /// ```no_run /// #![feature(file_lock)] - /// use std::fs::File; + /// use std::fs::{File, TryLockError}; /// /// fn main() -> std::io::Result<()> { /// let f = File::open("foo.txt")?; - /// f.try_lock_shared()?; + /// match f.try_lock_shared() { + /// Ok(_) => (), + /// Err(TryLockError::WouldBlock) => (), // Lock not acquired + /// Err(TryLockError::Error(err)) => return Err(err), + /// } + /// /// Ok(()) /// } /// ``` #[unstable(feature = "file_lock", issue = "130994")] - pub fn try_lock_shared(&self) -> io::Result { + pub fn try_lock_shared(&self) -> Result<(), TryLockError> { self.inner.try_lock_shared() } @@ -2874,6 +2923,8 @@ pub fn remove_dir>(path: P) -> io::Result<()> { /// /// Consider ignoring the error if validating the removal is not required for your use case. /// +/// This function may return [`io::ErrorKind::DirectoryNotEmpty`] if the directory is concurrently +/// written into, which typically indicates some contents were removed but not all. /// [`io::ErrorKind::NotFound`] is only returned if no removal occurs. /// /// [`fs::remove_file`]: remove_file @@ -2980,6 +3031,21 @@ pub fn read_dir>(path: P) -> io::Result { /// /// [changes]: io#platform-specific-behavior /// +/// ## Symlinks +/// On UNIX-like systems, this function will update the permission bits +/// of the file pointed to by the symlink. +/// +/// Note that this behavior can lead to privalage escalation vulnerabilites, +/// where the ability to create a symlink in one directory allows you to +/// cause the permissions of another file or directory to be modified. +/// +/// For this reason, using this function with symlinks should be avoided. +/// When possible, permissions should be set at creation time instead. +/// +/// # Rationale +/// POSIX does not specify an `lchmod` function, +/// and symlinks can be followed regardless of what permission bits are set. +/// /// # Errors /// /// This function will return an error in the following situations, but is not diff --git a/library/std/src/fs/tests.rs b/library/std/src/fs/tests.rs index 4712e58980cc6..c81a5ff4d96e6 100644 --- a/library/std/src/fs/tests.rs +++ b/library/std/src/fs/tests.rs @@ -1,6 +1,22 @@ use rand::RngCore; +#[cfg(any( + windows, + target_os = "freebsd", + target_os = "linux", + target_os = "netbsd", + target_vendor = "apple", +))] +use crate::assert_matches::assert_matches; use crate::char::MAX_LEN_UTF8; +#[cfg(any( + windows, + target_os = "freebsd", + target_os = "linux", + target_os = "netbsd", + target_vendor = "apple", +))] +use crate::fs::TryLockError; use crate::fs::{self, File, FileTimes, OpenOptions}; use crate::io::prelude::*; use crate::io::{BorrowedBuf, ErrorKind, SeekFrom}; @@ -223,8 +239,8 @@ fn file_lock_multiple_shared() { check!(f2.lock_shared()); check!(f1.unlock()); check!(f2.unlock()); - assert!(check!(f1.try_lock_shared())); - assert!(check!(f2.try_lock_shared())); + check!(f1.try_lock_shared()); + check!(f2.try_lock_shared()); } #[test] @@ -243,12 +259,12 @@ fn file_lock_blocking() { // Check that shared locks block exclusive locks check!(f1.lock_shared()); - assert!(!check!(f2.try_lock())); + assert_matches!(f2.try_lock(), Err(TryLockError::WouldBlock)); check!(f1.unlock()); // Check that exclusive locks block shared locks check!(f1.lock()); - assert!(!check!(f2.try_lock_shared())); + assert_matches!(f2.try_lock_shared(), Err(TryLockError::WouldBlock)); } #[test] @@ -267,9 +283,9 @@ fn file_lock_drop() { // Check that locks are released when the File is dropped check!(f1.lock_shared()); - assert!(!check!(f2.try_lock())); + assert_matches!(f2.try_lock(), Err(TryLockError::WouldBlock)); drop(f1); - assert!(check!(f2.try_lock())); + check!(f2.try_lock()); } #[test] @@ -288,10 +304,10 @@ fn file_lock_dup() { // Check that locks are not dropped if the File has been cloned check!(f1.lock_shared()); - assert!(!check!(f2.try_lock())); + assert_matches!(f2.try_lock(), Err(TryLockError::WouldBlock)); let cloned = check!(f1.try_clone()); drop(f1); - assert!(!check!(f2.try_lock())); + assert_matches!(f2.try_lock(), Err(TryLockError::WouldBlock)); drop(cloned) } @@ -307,9 +323,9 @@ fn file_lock_double_unlock() { // Check that both are released by unlock() check!(f1.lock()); check!(f1.lock_shared()); - assert!(!check!(f2.try_lock())); + assert_matches!(f2.try_lock(), Err(TryLockError::WouldBlock)); check!(f1.unlock()); - assert!(check!(f2.try_lock())); + check!(f2.try_lock()); } #[test] @@ -714,6 +730,10 @@ fn recursive_mkdir_empty() { } #[test] +#[cfg_attr( + all(windows, target_arch = "aarch64"), + ignore = "SymLinks not enabled on Arm64 Windows runners https://github.com/actions/partner-runner-images/issues/94" +)] fn recursive_rmdir() { let tmpdir = tmpdir(); let d1 = tmpdir.join("d1"); @@ -733,6 +753,10 @@ fn recursive_rmdir() { } #[test] +#[cfg_attr( + all(windows, target_arch = "aarch64"), + ignore = "SymLinks not enabled on Arm64 Windows runners https://github.com/actions/partner-runner-images/issues/94" +)] fn recursive_rmdir_of_symlink() { // test we do not recursively delete a symlink but only dirs. let tmpdir = tmpdir(); @@ -1517,6 +1541,10 @@ fn file_open_not_found() { } #[test] +#[cfg_attr( + all(windows, target_arch = "aarch64"), + ignore = "SymLinks not enabled on Arm64 Windows runners https://github.com/actions/partner-runner-images/issues/94" +)] fn create_dir_all_with_junctions() { let tmpdir = tmpdir(); let target = tmpdir.join("target"); @@ -1995,6 +2023,10 @@ fn test_rename_symlink() { #[test] #[cfg(windows)] +#[cfg_attr( + all(windows, target_arch = "aarch64"), + ignore = "SymLinks not enabled on Arm64 Windows runners https://github.com/actions/partner-runner-images/issues/94" +)] fn test_rename_junction() { let tmpdir = tmpdir(); let original = tmpdir.join("original"); diff --git a/library/std/src/io/buffered/bufreader/buffer.rs b/library/std/src/io/buffered/bufreader/buffer.rs index 9fd2472ebdfdb..574288e579e0b 100644 --- a/library/std/src/io/buffered/bufreader/buffer.rs +++ b/library/std/src/io/buffered/bufreader/buffer.rs @@ -123,7 +123,6 @@ impl Buffer { /// Remove bytes that have already been read from the buffer. pub fn backshift(&mut self) { self.buf.copy_within(self.pos.., 0); - self.initialized -= self.pos; self.filled -= self.pos; self.pos = 0; } diff --git a/library/std/src/io/copy.rs b/library/std/src/io/copy.rs index 8d733325b3be7..15e962924ac71 100644 --- a/library/std/src/io/copy.rs +++ b/library/std/src/io/copy.rs @@ -248,8 +248,11 @@ impl BufferedWriterSpec for BufWriter { Err(e) => return Err(e), } } else { + // All the bytes that were already in the buffer are initialized, + // treat them as such when the buffer is flushed. + init += buf.len(); + self.flush_buf()?; - init = 0; } } } diff --git a/library/std/src/io/error.rs b/library/std/src/io/error.rs index 8472f90305007..cf3778bd29071 100644 --- a/library/std/src/io/error.rs +++ b/library/std/src/io/error.rs @@ -48,6 +48,7 @@ use crate::{error, fmt, result, sys}; /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] +#[cfg_attr(not(bootstrap), doc(search_unbox))] pub type Result = result::Result; /// The error type for I/O operations of the [`Read`], [`Write`], [`Seek`], and @@ -374,7 +375,7 @@ pub enum ErrorKind { /// A filename was invalid. /// /// This error can also occur if a length limit for a name was exceeded. - #[stable(feature = "io_error_invalid_filename", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "io_error_invalid_filename", since = "1.87.0")] InvalidFilename, /// Program argument list too long. /// diff --git a/library/std/src/io/mod.rs b/library/std/src/io/mod.rs index 314cbb45d49e2..96fac4f6bde68 100644 --- a/library/std/src/io/mod.rs +++ b/library/std/src/io/mod.rs @@ -310,7 +310,7 @@ pub use self::error::RawOsError; pub use self::error::SimpleMessage; #[unstable(feature = "io_const_error", issue = "133448")] pub use self::error::const_error; -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] pub use self::pipe::{PipeReader, PipeWriter, pipe}; #[stable(feature = "is_terminal", since = "1.70.0")] pub use self::stdio::IsTerminal; @@ -2319,9 +2319,9 @@ pub trait BufRead: Read { /// Checks if there is any data left to be `read`. /// /// This function may fill the buffer to check for data, - /// so this functions returns `Result`, not `bool`. + /// so this function returns `Result`, not `bool`. /// - /// Default implementation calls `fill_buf` and checks that + /// The default implementation calls `fill_buf` and checks that the /// returned slice is empty (which means that there is no data left, /// since EOF is reached). /// @@ -2658,6 +2658,10 @@ impl Chain { /// Gets references to the underlying readers in this `Chain`. /// + /// Care should be taken to avoid modifying the internal I/O state of the + /// underlying readers as doing so may corrupt the internal state of this + /// `Chain`. + /// /// # Examples /// /// ```no_run @@ -2915,6 +2919,10 @@ impl Take { /// Gets a reference to the underlying reader. /// + /// Care should be taken to avoid modifying the internal I/O state of the + /// underlying reader as doing so may corrupt the internal limit of this + /// `Take`. + /// /// # Examples /// /// ```no_run diff --git a/library/std/src/io/pipe.rs b/library/std/src/io/pipe.rs index cfed9b05cc0c6..47243806cd2d9 100644 --- a/library/std/src/io/pipe.rs +++ b/library/std/src/io/pipe.rs @@ -2,7 +2,7 @@ use crate::io; use crate::sys::anonymous_pipe::{AnonPipe, pipe as pipe_inner}; use crate::sys_common::{FromInner, IntoInner}; -/// Create an anonymous pipe. +/// Creates an anonymous pipe. /// /// # Behavior /// @@ -67,19 +67,19 @@ use crate::sys_common::{FromInner, IntoInner}; /// ``` /// [changes]: io#platform-specific-behavior /// [man page]: https://man7.org/linux/man-pages/man7/pipe.7.html -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[inline] pub fn pipe() -> io::Result<(PipeReader, PipeWriter)> { pipe_inner().map(|(reader, writer)| (PipeReader(reader), PipeWriter(writer))) } /// Read end of an anonymous pipe. -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[derive(Debug)] pub struct PipeReader(pub(crate) AnonPipe); /// Write end of an anonymous pipe. -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[derive(Debug)] pub struct PipeWriter(pub(crate) AnonPipe); @@ -108,7 +108,7 @@ impl IntoInner for PipeWriter { } impl PipeReader { - /// Create a new [`PipeReader`] instance that shares the same underlying file description. + /// Creates a new [`PipeReader`] instance that shares the same underlying file description. /// /// # Examples /// @@ -160,14 +160,14 @@ impl PipeReader { /// # Ok(()) /// # } /// ``` - #[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "anonymous_pipe", since = "1.87.0")] pub fn try_clone(&self) -> io::Result { self.0.try_clone().map(Self) } } impl PipeWriter { - /// Create a new [`PipeWriter`] instance that shares the same underlying file description. + /// Creates a new [`PipeWriter`] instance that shares the same underlying file description. /// /// # Examples /// @@ -199,13 +199,13 @@ impl PipeWriter { /// # Ok(()) /// # } /// ``` - #[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "anonymous_pipe", since = "1.87.0")] pub fn try_clone(&self) -> io::Result { self.0.try_clone().map(Self) } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl io::Read for &PipeReader { fn read(&mut self, buf: &mut [u8]) -> io::Result { self.0.read(buf) @@ -225,7 +225,7 @@ impl io::Read for &PipeReader { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl io::Read for PipeReader { fn read(&mut self, buf: &mut [u8]) -> io::Result { self.0.read(buf) @@ -245,7 +245,7 @@ impl io::Read for PipeReader { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl io::Write for &PipeWriter { fn write(&mut self, buf: &[u8]) -> io::Result { self.0.write(buf) @@ -263,7 +263,7 @@ impl io::Write for &PipeWriter { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl io::Write for PipeWriter { fn write(&mut self, buf: &[u8]) -> io::Result { self.0.write(buf) diff --git a/library/std/src/io/stdio.rs b/library/std/src/io/stdio.rs index 8fc1633133974..2d80fe49e80a7 100644 --- a/library/std/src/io/stdio.rs +++ b/library/std/src/io/stdio.rs @@ -11,7 +11,7 @@ use crate::io::{ self, BorrowedCursor, BufReader, IoSlice, IoSliceMut, LineWriter, Lines, SpecReadByte, }; use crate::panic::{RefUnwindSafe, UnwindSafe}; -use crate::sync::atomic::{AtomicBool, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, Ordering}; use crate::sync::{Arc, Mutex, MutexGuard, OnceLock, ReentrantLock, ReentrantLockGuard}; use crate::sys::stdio; use crate::thread::AccessError; @@ -37,7 +37,7 @@ thread_local! { /// have a consistent order between set_output_capture and print_to *within /// the same thread*. Within the same thread, things always have a perfectly /// consistent order. So Ordering::Relaxed is fine. -static OUTPUT_CAPTURE_USED: AtomicBool = AtomicBool::new(false); +static OUTPUT_CAPTURE_USED: Atomic = AtomicBool::new(false); /// A handle to a raw instance of the standard input stream of this process. /// diff --git a/library/std/src/keyword_docs.rs b/library/std/src/keyword_docs.rs index c07c391892d80..79b25040ef607 100644 --- a/library/std/src/keyword_docs.rs +++ b/library/std/src/keyword_docs.rs @@ -91,7 +91,7 @@ mod as_keyword {} /// /// When associated with `loop`, a break expression may be used to return a value from that loop. /// This is only valid with `loop` and not with any other type of loop. -/// If no value is specified, `break;` returns `()`. +/// If no value is specified for `break;` it returns `()`. /// Every `break` within a loop must return the same type. /// /// ```rust @@ -109,6 +109,33 @@ mod as_keyword {} /// println!("{result}"); /// ``` /// +/// It is also possible to exit from any *labelled* block returning the value early. +/// If no value is specified for `break;` it returns `()`. +/// +/// ```rust +/// let inputs = vec!["Cow", "Cat", "Dog", "Snake", "Cod"]; +/// +/// let mut results = vec![]; +/// for input in inputs { +/// let result = 'filter: { +/// if input.len() > 3 { +/// break 'filter Err("Too long"); +/// }; +/// +/// if !input.contains("C") { +/// break 'filter Err("No Cs"); +/// }; +/// +/// Ok(input.to_uppercase()) +/// }; +/// +/// results.push(result); +/// } +/// +/// // [Ok("COW"), Ok("CAT"), Err("No Cs"), Err("Too long"), Ok("COD")] +/// println!("{:?}", results) +/// ``` +/// /// For more details consult the [Reference on "break expression"] and the [Reference on "break and /// loop values"]. /// @@ -119,7 +146,7 @@ mod break_keyword {} #[doc(keyword = "const")] // -/// Compile-time constants, compile-time evaluable functions, and raw pointers. +/// Compile-time constants, compile-time blocks, compile-time evaluable functions, and raw pointers. /// /// ## Compile-time constants /// @@ -166,6 +193,12 @@ mod break_keyword {} /// /// For more detail on `const`, see the [Rust Book] or the [Reference]. /// +/// ## Compile-time blocks +/// +/// The `const` keyword can also be used to define a block of code that is evaluated at compile time. +/// This is useful for ensuring certain computations are completed before optimizations happen, as well as +/// before runtime. For more details, see the [Reference][const-blocks]. +/// /// ## Compile-time evaluable functions /// /// The other main use of the `const` keyword is in `const fn`. This marks a function as being @@ -184,6 +217,7 @@ mod break_keyword {} /// [pointer primitive]: pointer /// [Rust Book]: ../book/ch03-01-variables-and-mutability.html#constants /// [Reference]: ../reference/items/constant-items.html +/// [const-blocks]: ../reference/expressions/block-expr.html#const-blocks /// [const-eval]: ../reference/const_eval.html mod const_keyword {} @@ -381,11 +415,15 @@ mod enum_keyword {} /// lazy_static;`. The other use is in foreign function interfaces (FFI). /// /// `extern` is used in two different contexts within FFI. The first is in the form of external -/// blocks, for declaring function interfaces that Rust code can call foreign code by. +/// blocks, for declaring function interfaces that Rust code can call foreign code by. This use +/// of `extern` is unsafe, since we are asserting to the compiler that all function declarations +/// are correct. If they are not, using these items may lead to undefined behavior. /// /// ```rust ignore +/// // SAFETY: The function declarations given below are in +/// // line with the header files of `my_c_library`. /// #[link(name = "my_c_library")] -/// extern "C" { +/// unsafe extern "C" { /// fn my_c_function(x: i32) -> bool; /// } /// ``` @@ -1195,6 +1233,28 @@ mod ref_keyword {} /// Ok(()) /// } /// ``` +/// +/// Within [closures] and [`async`] blocks, `return` returns a value from within the closure or +/// `async` block, not from the parent function: +/// +/// ```rust +/// fn foo() -> i32 { +/// let closure = || { +/// return 5; +/// }; +/// +/// let future = async { +/// return 10; +/// }; +/// +/// return 15; +/// } +/// +/// assert_eq!(foo(), 15); +/// ``` +/// +/// [closures]: ../book/ch13-01-closures.html +/// [`async`]: ../std/keyword.async.html mod return_keyword {} #[doc(keyword = "self")] @@ -2388,6 +2448,39 @@ mod while_keyword {} /// /// We have written an [async book] detailing `async`/`await` and trade-offs compared to using threads. /// +/// ## Control Flow +/// [`return`] statements and [`?`][try operator] operators within `async` blocks do not cause +/// a return from the parent function; rather, they cause the `Future` returned by the block to +/// return with that value. +/// +/// For example, the following Rust function will return `5`, causing `x` to take the [`!` type][never type]: +/// ```rust +/// #[expect(unused_variables)] +/// fn example() -> i32 { +/// let x = { +/// return 5; +/// }; +/// } +/// ``` +/// In contrast, the following asynchronous function assigns a `Future` to `x`, and +/// only returns `5` when `x` is `.await`ed: +/// ```rust +/// async fn example() -> i32 { +/// let x = async { +/// return 5; +/// }; +/// +/// x.await +/// } +/// ``` +/// Code using `?` behaves similarly - it causes the `async` block to return a [`Result`] without +/// affecting the parent function. +/// +/// Note that you cannot use `break` or `continue` from within an `async` block to affect the +/// control flow of a loop in the parent function. +/// +/// Control flow in `async` blocks is documented further in the [async book][async book blocks]. +/// /// ## Editions /// /// `async` is a keyword from the 2018 edition onwards. @@ -2397,6 +2490,11 @@ mod while_keyword {} /// [`Future`]: future::Future /// [`.await`]: ../std/keyword.await.html /// [async book]: https://rust-lang.github.io/async-book/ +/// [`return`]: ../std/keyword.return.html +/// [try operator]: ../reference/expressions/operator-expr.html#r-expr.try +/// [never type]: ../reference/types/never.html +/// [`Result`]: result::Result +/// [async book blocks]: https://rust-lang.github.io/async-book/part-guide/more-async-await.html#async-blocks mod async_keyword {} #[doc(keyword = "await")] diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 9dcedaa13f661..c011f9661ae7a 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -301,6 +301,8 @@ #![feature(formatting_options)] #![feature(if_let_guard)] #![feature(intra_doc_pointers)] +#![feature(iter_advance_by)] +#![feature(iter_next_chunk)] #![feature(lang_items)] #![feature(let_chains)] #![feature(link_cfg)] @@ -312,7 +314,6 @@ #![feature(needs_panic_runtime)] #![feature(negative_impls)] #![feature(never_type)] -#![feature(no_sanitize)] #![feature(optimize_attribute)] #![feature(prelude_import)] #![feature(rustc_attrs)] @@ -322,6 +323,7 @@ #![feature(strict_provenance_lints)] #![feature(thread_local)] #![feature(try_blocks)] +#![feature(try_trait_v2)] #![feature(type_alias_impl_trait)] // tidy-alphabetical-end // @@ -330,7 +332,6 @@ #![feature(array_chunks)] #![feature(bstr)] #![feature(bstr_internals)] -#![feature(c_str_module)] #![feature(char_internals)] #![feature(clone_to_uninit)] #![feature(core_intrinsics)] @@ -341,9 +342,11 @@ #![feature(exact_size_is_empty)] #![feature(exclusive_wrapper)] #![feature(extend_one)] +#![feature(float_algebraic)] #![feature(float_gamma)] #![feature(float_minimum_maximum)] #![feature(fmt_internals)] +#![feature(generic_atomic)] #![feature(hasher_prefixfree_extras)] #![feature(hashmap_internals)] #![feature(hint_must_use)] @@ -704,11 +707,18 @@ pub use core::cfg_match; reason = "`concat_bytes` is not stable enough for use and is subject to change" )] pub use core::concat_bytes; +#[stable(feature = "matches_macro", since = "1.42.0")] +#[allow(deprecated, deprecated_in_future)] +pub use core::matches; #[stable(feature = "core_primitive", since = "1.43.0")] pub use core::primitive; +#[stable(feature = "todo_macro", since = "1.40.0")] +#[allow(deprecated, deprecated_in_future)] +pub use core::todo; // Re-export built-in macros defined through core. #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow(deprecated)] +#[cfg_attr(bootstrap, allow(deprecated_in_future))] pub use core::{ assert, assert_matches, cfg, column, compile_error, concat, concat_idents, const_format_args, env, file, format_args, format_args_nl, include, include_bytes, include_str, line, log_syntax, @@ -718,8 +728,8 @@ pub use core::{ #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::{ - assert_eq, assert_ne, debug_assert, debug_assert_eq, debug_assert_ne, matches, todo, r#try, - unimplemented, unreachable, write, writeln, + assert_eq, assert_ne, debug_assert, debug_assert_eq, debug_assert_ne, r#try, unimplemented, + unreachable, write, writeln, }; // Include a number of private modules that exist solely to provide diff --git a/library/std/src/net/socket_addr.rs b/library/std/src/net/socket_addr.rs index 4c8905c0d4609..41e623e79ce27 100644 --- a/library/std/src/net/socket_addr.rs +++ b/library/std/src/net/socket_addr.rs @@ -101,7 +101,7 @@ use crate::{io, iter, option, slice, vec}; /// assert_eq!(err.kind(), io::ErrorKind::InvalidInput); /// ``` /// -/// [`TcpStream::connect`] is an example of an function that utilizes +/// [`TcpStream::connect`] is an example of a function that utilizes /// `ToSocketAddrs` as a trait bound on its parameter in order to accept /// different types: /// diff --git a/library/std/src/os/fd/owned.rs b/library/std/src/os/fd/owned.rs index be73e7dee9c7b..10e1e73a115bd 100644 --- a/library/std/src/os/fd/owned.rs +++ b/library/std/src/os/fd/owned.rs @@ -505,7 +505,7 @@ impl<'a> AsFd for io::StderrLock<'a> { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl AsFd for io::PipeReader { fn as_fd(&self) -> BorrowedFd<'_> { @@ -513,7 +513,7 @@ impl AsFd for io::PipeReader { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl From for OwnedFd { fn from(pipe: io::PipeReader) -> Self { @@ -521,7 +521,7 @@ impl From for OwnedFd { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl AsFd for io::PipeWriter { fn as_fd(&self) -> BorrowedFd<'_> { @@ -529,7 +529,7 @@ impl AsFd for io::PipeWriter { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl From for OwnedFd { fn from(pipe: io::PipeWriter) -> Self { @@ -537,7 +537,7 @@ impl From for OwnedFd { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl From for io::PipeReader { fn from(owned_fd: OwnedFd) -> Self { @@ -545,7 +545,7 @@ impl From for io::PipeReader { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl From for io::PipeWriter { fn from(owned_fd: OwnedFd) -> Self { diff --git a/library/std/src/os/fd/raw.rs b/library/std/src/os/fd/raw.rs index c800c1489ad27..34a6cf1a8b84d 100644 --- a/library/std/src/os/fd/raw.rs +++ b/library/std/src/os/fd/raw.rs @@ -285,7 +285,7 @@ impl AsRawFd for Box { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl AsRawFd for io::PipeReader { fn as_raw_fd(&self) -> RawFd { @@ -293,7 +293,7 @@ impl AsRawFd for io::PipeReader { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl FromRawFd for io::PipeReader { unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { @@ -301,7 +301,7 @@ impl FromRawFd for io::PipeReader { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl IntoRawFd for io::PipeReader { fn into_raw_fd(self) -> RawFd { @@ -309,7 +309,7 @@ impl IntoRawFd for io::PipeReader { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl AsRawFd for io::PipeWriter { fn as_raw_fd(&self) -> RawFd { @@ -317,7 +317,7 @@ impl AsRawFd for io::PipeWriter { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl FromRawFd for io::PipeWriter { unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { @@ -325,7 +325,7 @@ impl FromRawFd for io::PipeWriter { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] #[cfg(not(target_os = "trusty"))] impl IntoRawFd for io::PipeWriter { fn into_raw_fd(self) -> RawFd { diff --git a/library/std/src/os/uefi/env.rs b/library/std/src/os/uefi/env.rs index cf8ae697e389d..ab5406e605c6b 100644 --- a/library/std/src/os/uefi/env.rs +++ b/library/std/src/os/uefi/env.rs @@ -4,13 +4,13 @@ use crate::ffi::c_void; use crate::ptr::NonNull; -use crate::sync::atomic::{AtomicBool, AtomicPtr, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, AtomicPtr, Ordering}; -static SYSTEM_TABLE: AtomicPtr = AtomicPtr::new(crate::ptr::null_mut()); -static IMAGE_HANDLE: AtomicPtr = AtomicPtr::new(crate::ptr::null_mut()); +static SYSTEM_TABLE: Atomic<*mut c_void> = AtomicPtr::new(crate::ptr::null_mut()); +static IMAGE_HANDLE: Atomic<*mut c_void> = AtomicPtr::new(crate::ptr::null_mut()); // Flag to check if BootServices are still valid. // Start with assuming that they are not available -static BOOT_SERVICES_FLAG: AtomicBool = AtomicBool::new(false); +static BOOT_SERVICES_FLAG: Atomic = AtomicBool::new(false); /// Initializes the global System Table and Image Handle pointers. /// diff --git a/library/std/src/os/unix/fs.rs b/library/std/src/os/unix/fs.rs index 0427feb29550f..4f9259f39c1ab 100644 --- a/library/std/src/os/unix/fs.rs +++ b/library/std/src/os/unix/fs.rs @@ -1100,3 +1100,39 @@ pub fn lchown>(dir: P, uid: Option, gid: Option) -> io: pub fn chroot>(dir: P) -> io::Result<()> { sys::fs::chroot(dir.as_ref()) } + +/// Create a FIFO special file at the specified path with the specified mode. +/// +/// # Examples +/// +/// ```no_run +/// # #![feature(unix_mkfifo)] +/// # #[cfg(not(unix))] +/// # fn main() {} +/// # #[cfg(unix)] +/// # fn main() -> std::io::Result<()> { +/// # use std::{ +/// # os::unix::fs::{mkfifo, PermissionsExt}, +/// # fs::{File, Permissions, remove_file}, +/// # io::{Write, Read}, +/// # }; +/// # let _ = remove_file("/tmp/fifo"); +/// mkfifo("/tmp/fifo", Permissions::from_mode(0o774))?; +/// +/// let mut wx = File::options().read(true).write(true).open("/tmp/fifo")?; +/// let mut rx = File::open("/tmp/fifo")?; +/// +/// wx.write_all(b"hello, world!")?; +/// drop(wx); +/// +/// let mut s = String::new(); +/// rx.read_to_string(&mut s)?; +/// +/// assert_eq!(s, "hello, world!"); +/// # Ok(()) +/// # } +/// ``` +#[unstable(feature = "unix_mkfifo", issue = "139324")] +pub fn mkfifo>(path: P, permissions: Permissions) -> io::Result<()> { + sys::fs::mkfifo(path.as_ref(), permissions.mode()) +} diff --git a/library/std/src/os/unix/fs/tests.rs b/library/std/src/os/unix/fs/tests.rs index db9621c8c205c..1840bb38c17c8 100644 --- a/library/std/src/os/unix/fs/tests.rs +++ b/library/std/src/os/unix/fs/tests.rs @@ -55,3 +55,23 @@ fn write_vectored_at() { let content = fs::read(&filename).unwrap(); assert_eq!(&content, expected); } + +#[test] +fn test_mkfifo() { + let tmp_dir = crate::test_helpers::tmpdir(); + + let fifo = tmp_dir.path().join("fifo"); + + mkfifo(&fifo, Permissions::from_mode(0o774)).unwrap(); + + let mut wx = fs::File::options().read(true).write(true).open(&fifo).unwrap(); + let mut rx = fs::File::open(fifo).unwrap(); + + wx.write_all(b"hello, world!").unwrap(); + drop(wx); + + let mut s = String::new(); + rx.read_to_string(&mut s).unwrap(); + + assert_eq!(s, "hello, world!"); +} diff --git a/library/std/src/os/windows/io/handle.rs b/library/std/src/os/windows/io/handle.rs index 7f21929b85f99..4fc04b79315f9 100644 --- a/library/std/src/os/windows/io/handle.rs +++ b/library/std/src/os/windows/io/handle.rs @@ -661,42 +661,42 @@ impl From> for OwnedHandle { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl AsHandle for io::PipeReader { fn as_handle(&self) -> BorrowedHandle<'_> { self.0.as_handle() } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl From for OwnedHandle { fn from(pipe: io::PipeReader) -> Self { pipe.into_inner().into_inner() } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl AsHandle for io::PipeWriter { fn as_handle(&self) -> BorrowedHandle<'_> { self.0.as_handle() } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl From for OwnedHandle { fn from(pipe: io::PipeWriter) -> Self { pipe.into_inner().into_inner() } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl From for io::PipeReader { fn from(owned_handle: OwnedHandle) -> Self { Self::from_inner(FromInner::from_inner(owned_handle)) } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl From for io::PipeWriter { fn from(owned_handle: OwnedHandle) -> Self { Self::from_inner(FromInner::from_inner(owned_handle)) diff --git a/library/std/src/os/windows/io/raw.rs b/library/std/src/os/windows/io/raw.rs index bc3e55c862962..a3ec7440338d2 100644 --- a/library/std/src/os/windows/io/raw.rs +++ b/library/std/src/os/windows/io/raw.rs @@ -311,42 +311,42 @@ impl IntoRawSocket for net::UdpSocket { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl AsRawHandle for io::PipeReader { fn as_raw_handle(&self) -> RawHandle { self.0.as_raw_handle() } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl FromRawHandle for io::PipeReader { unsafe fn from_raw_handle(raw_handle: RawHandle) -> Self { unsafe { Self::from_inner(FromRawHandle::from_raw_handle(raw_handle)) } } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl IntoRawHandle for io::PipeReader { fn into_raw_handle(self) -> RawHandle { self.0.into_raw_handle() } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl AsRawHandle for io::PipeWriter { fn as_raw_handle(&self) -> RawHandle { self.0.as_raw_handle() } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl FromRawHandle for io::PipeWriter { unsafe fn from_raw_handle(raw_handle: RawHandle) -> Self { unsafe { Self::from_inner(FromRawHandle::from_raw_handle(raw_handle)) } } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl IntoRawHandle for io::PipeWriter { fn into_raw_handle(self) -> RawHandle { self.0.into_raw_handle() diff --git a/library/std/src/os/xous/services.rs b/library/std/src/os/xous/services.rs index 93916750c0547..0681485ea0610 100644 --- a/library/std/src/os/xous/services.rs +++ b/library/std/src/os/xous/services.rs @@ -1,4 +1,4 @@ -use core::sync::atomic::{AtomicU32, Ordering}; +use core::sync::atomic::{Atomic, AtomicU32, Ordering}; use crate::os::xous::ffi::Connection; @@ -106,7 +106,7 @@ pub fn try_connect(name: &str) -> Option { ns::try_connect_with_name(name) } -static NAME_SERVER_CONNECTION: AtomicU32 = AtomicU32::new(0); +static NAME_SERVER_CONNECTION: Atomic = AtomicU32::new(0); /// Returns a `Connection` to the name server. If the name server has not been started, /// then this call will block until the name server has been started. The `Connection` diff --git a/library/std/src/os/xous/services/dns.rs b/library/std/src/os/xous/services/dns.rs index 0288164839360..7641d1f15e444 100644 --- a/library/std/src/os/xous/services/dns.rs +++ b/library/std/src/os/xous/services/dns.rs @@ -1,4 +1,4 @@ -use core::sync::atomic::{AtomicU32, Ordering}; +use core::sync::atomic::{Atomic, AtomicU32, Ordering}; use crate::os::xous::ffi::Connection; use crate::os::xous::services::connect; @@ -17,7 +17,7 @@ impl Into for DnsLendMut { /// Returns a `Connection` to the DNS lookup server. This server is used for /// querying domain name values. pub(crate) fn dns_server() -> Connection { - static DNS_CONNECTION: AtomicU32 = AtomicU32::new(0); + static DNS_CONNECTION: Atomic = AtomicU32::new(0); let cid = DNS_CONNECTION.load(Ordering::Relaxed); if cid != 0 { return cid.into(); diff --git a/library/std/src/os/xous/services/log.rs b/library/std/src/os/xous/services/log.rs index 095d4f4a3e7a8..e7717c8515d06 100644 --- a/library/std/src/os/xous/services/log.rs +++ b/library/std/src/os/xous/services/log.rs @@ -1,4 +1,4 @@ -use core::sync::atomic::{AtomicU32, Ordering}; +use core::sync::atomic::{Atomic, AtomicU32, Ordering}; use crate::os::xous::ffi::Connection; @@ -64,7 +64,7 @@ impl Into for LogLend { /// running. It is safe to call this multiple times, because the address is /// shared among all threads in a process. pub(crate) fn log_server() -> Connection { - static LOG_SERVER_CONNECTION: AtomicU32 = AtomicU32::new(0); + static LOG_SERVER_CONNECTION: Atomic = AtomicU32::new(0); let cid = LOG_SERVER_CONNECTION.load(Ordering::Relaxed); if cid != 0 { diff --git a/library/std/src/os/xous/services/net.rs b/library/std/src/os/xous/services/net.rs index 83acc7961b377..c20bf1a7ad596 100644 --- a/library/std/src/os/xous/services/net.rs +++ b/library/std/src/os/xous/services/net.rs @@ -1,4 +1,4 @@ -use core::sync::atomic::{AtomicU32, Ordering}; +use core::sync::atomic::{Atomic, AtomicU32, Ordering}; use crate::os::xous::ffi::Connection; use crate::os::xous::services::connect; @@ -84,7 +84,7 @@ impl<'a> Into<[usize; 5]> for NetBlockingScalar { /// Returns a `Connection` to the Network server. This server provides all /// OS-level networking functions. pub(crate) fn net_server() -> Connection { - static NET_CONNECTION: AtomicU32 = AtomicU32::new(0); + static NET_CONNECTION: Atomic = AtomicU32::new(0); let cid = NET_CONNECTION.load(Ordering::Relaxed); if cid != 0 { return cid.into(); diff --git a/library/std/src/os/xous/services/systime.rs b/library/std/src/os/xous/services/systime.rs index de87694b4cdca..e54cffdc4c018 100644 --- a/library/std/src/os/xous/services/systime.rs +++ b/library/std/src/os/xous/services/systime.rs @@ -1,4 +1,4 @@ -use core::sync::atomic::{AtomicU32, Ordering}; +use core::sync::atomic::{Atomic, AtomicU32, Ordering}; use crate::os::xous::ffi::{Connection, connect}; @@ -17,7 +17,7 @@ impl Into<[usize; 5]> for SystimeScalar { /// Returns a `Connection` to the systime server. This server is used for reporting the /// realtime clock. pub(crate) fn systime_server() -> Connection { - static SYSTIME_SERVER_CONNECTION: AtomicU32 = AtomicU32::new(0); + static SYSTIME_SERVER_CONNECTION: Atomic = AtomicU32::new(0); let cid = SYSTIME_SERVER_CONNECTION.load(Ordering::Relaxed); if cid != 0 { return cid.into(); diff --git a/library/std/src/os/xous/services/ticktimer.rs b/library/std/src/os/xous/services/ticktimer.rs index 66ade6da65cd3..bf51ecde8e5bc 100644 --- a/library/std/src/os/xous/services/ticktimer.rs +++ b/library/std/src/os/xous/services/ticktimer.rs @@ -1,4 +1,4 @@ -use core::sync::atomic::{AtomicU32, Ordering}; +use core::sync::atomic::{Atomic, AtomicU32, Ordering}; use crate::os::xous::ffi::Connection; @@ -31,7 +31,7 @@ impl Into<[usize; 5]> for TicktimerScalar { /// Returns a `Connection` to the ticktimer server. This server is used for synchronization /// primitives such as sleep, Mutex, and Condvar. pub(crate) fn ticktimer_server() -> Connection { - static TICKTIMER_SERVER_CONNECTION: AtomicU32 = AtomicU32::new(0); + static TICKTIMER_SERVER_CONNECTION: Atomic = AtomicU32::new(0); let cid = TICKTIMER_SERVER_CONNECTION.load(Ordering::Relaxed); if cid != 0 { return cid.into(); diff --git a/library/std/src/panic.rs b/library/std/src/panic.rs index 22776ae2bc4a7..f3b26ac64dfa3 100644 --- a/library/std/src/panic.rs +++ b/library/std/src/panic.rs @@ -3,7 +3,7 @@ #![stable(feature = "std_panic", since = "1.9.0")] use crate::any::Any; -use crate::sync::atomic::{AtomicU8, Ordering}; +use crate::sync::atomic::{Atomic, AtomicU8, Ordering}; use crate::sync::{Condvar, Mutex, RwLock}; use crate::thread::Result; use crate::{collections, fmt, panicking}; @@ -469,7 +469,7 @@ impl BacktraceStyle { // that backtrace. // // Internally stores equivalent of an Option. -static SHOULD_CAPTURE: AtomicU8 = AtomicU8::new(0); +static SHOULD_CAPTURE: Atomic = AtomicU8::new(0); /// Configures whether the default panic hook will capture and display a /// backtrace. diff --git a/library/std/src/panicking.rs b/library/std/src/panicking.rs index b35549c92ada7..4bfedf78366e7 100644 --- a/library/std/src/panicking.rs +++ b/library/std/src/panicking.rs @@ -21,7 +21,7 @@ use crate::any::Any; use crate::io::try_set_output_capture; use crate::mem::{self, ManuallyDrop}; use crate::panic::{BacktraceStyle, PanicHookInfo}; -use crate::sync::atomic::{AtomicBool, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, Ordering}; use crate::sync::{PoisonError, RwLock}; use crate::sys::backtrace; use crate::sys::stdio::panic_output; @@ -55,14 +55,14 @@ pub static EMPTY_PANIC: fn(&'static str) -> ! = // hook up these functions, but it is not this day! #[allow(improper_ctypes)] unsafe extern "C" { - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] fn __rust_panic_cleanup(payload: *mut u8) -> *mut (dyn Any + Send + 'static); } unsafe extern "Rust" { /// `PanicPayload` lazily performs allocation only when needed (this avoids /// allocations when using the "abort" panic runtime). - #[cfg_attr(not(bootstrap), rustc_std_internal_symbol)] + #[rustc_std_internal_symbol] fn __rust_start_panic(payload: &mut dyn PanicPayload) -> u32; } @@ -289,7 +289,7 @@ fn default_hook(info: &PanicHookInfo<'_>) { }; }); - static FIRST_PANIC: AtomicBool = AtomicBool::new(true); + static FIRST_PANIC: Atomic = AtomicBool::new(true); match backtrace { // SAFETY: we took out a lock just a second ago. @@ -374,7 +374,7 @@ pub mod panic_count { #[unstable(feature = "update_panic_count", issue = "none")] pub mod panic_count { use crate::cell::Cell; - use crate::sync::atomic::{AtomicUsize, Ordering}; + use crate::sync::atomic::{Atomic, AtomicUsize, Ordering}; const ALWAYS_ABORT_FLAG: usize = 1 << (usize::BITS - 1); @@ -416,7 +416,7 @@ pub mod panic_count { // // Stealing a bit is fine because it just amounts to assuming that each // panicking thread consumes at least 2 bytes of address space. - static GLOBAL_PANIC_COUNT: AtomicUsize = AtomicUsize::new(0); + static GLOBAL_PANIC_COUNT: Atomic = AtomicUsize::new(0); // Increases the global and local panic count, and returns whether an // immediate abort is required. diff --git a/library/std/src/path.rs b/library/std/src/path.rs index 980213be7ea90..1a4a7aa7448cb 100644 --- a/library/std/src/path.rs +++ b/library/std/src/path.rs @@ -353,6 +353,15 @@ fn split_file_at_dot(file: &OsStr) -> (&OsStr, Option<&OsStr>) { } } +/// Checks whether the string is valid as a file extension, or panics otherwise. +fn validate_extension(extension: &OsStr) { + for &b in extension.as_encoded_bytes() { + if is_sep_byte(b) { + panic!("extension cannot contain path separators: {extension:?}"); + } + } +} + //////////////////////////////////////////////////////////////////////////////// // The core iterators //////////////////////////////////////////////////////////////////////////////// @@ -1507,13 +1516,7 @@ impl PathBuf { } fn _set_extension(&mut self, extension: &OsStr) -> bool { - for &b in extension.as_encoded_bytes() { - if b < 128 { - if is_separator(b as char) { - panic!("extension cannot contain path separators: {:?}", extension); - } - } - } + validate_extension(extension); let file_stem = match self.file_stem() { None => return false, @@ -1526,11 +1529,13 @@ impl PathBuf { self.inner.truncate(end_file_stem.wrapping_sub(start)); // add the new extension, if any - let new = extension; + let new = extension.as_encoded_bytes(); if !new.is_empty() { self.inner.reserve_exact(new.len() + 1); - self.inner.push(OsStr::new(".")); - self.inner.push(new); + self.inner.push("."); + // SAFETY: Since a UTF-8 string was just pushed, it is not possible + // for the buffer to end with a surrogate half. + unsafe { self.inner.extend_from_slice_unchecked(new) }; } true @@ -1541,6 +1546,11 @@ impl PathBuf { /// Returns `false` and does nothing if [`self.file_name`] is [`None`], /// returns `true` and updates the extension otherwise. /// + /// # Panics + /// + /// Panics if the passed extension contains a path separator (see + /// [`is_separator`]). + /// /// # Caveats /// /// The appended `extension` may contain dots and will be used in its entirety, @@ -1582,12 +1592,14 @@ impl PathBuf { } fn _add_extension(&mut self, extension: &OsStr) -> bool { + validate_extension(extension); + let file_name = match self.file_name() { None => return false, Some(f) => f.as_encoded_bytes(), }; - let new = extension; + let new = extension.as_encoded_bytes(); if !new.is_empty() { // truncate until right after the file name // this is necessary for trimming the trailing slash @@ -1597,8 +1609,10 @@ impl PathBuf { // append the new extension self.inner.reserve_exact(new.len() + 1); - self.inner.push(OsStr::new(".")); - self.inner.push(new); + self.inner.push("."); + // SAFETY: Since a UTF-8 string was just pushed, it is not possible + // for the buffer to end with a surrogate half. + unsafe { self.inner.extend_from_slice_unchecked(new) }; } true @@ -2759,7 +2773,8 @@ impl Path { }; let mut new_path = PathBuf::with_capacity(new_capacity); - new_path.inner.extend_from_slice(slice_to_copy); + // SAFETY: The path is empty, so cannot have surrogate halves. + unsafe { new_path.inner.extend_from_slice_unchecked(slice_to_copy) }; new_path.set_extension(extension); new_path } @@ -3265,7 +3280,7 @@ impl Hash for Path { if !verbatim { component_start += match tail { [b'.'] => 1, - [b'.', sep @ _, ..] if is_sep_byte(*sep) => 1, + [b'.', sep, ..] if is_sep_byte(*sep) => 1, _ => 0, }; } diff --git a/library/std/src/prelude/v1.rs b/library/std/src/prelude/v1.rs index 4217f65864072..ca45a70919327 100644 --- a/library/std/src/prelude/v1.rs +++ b/library/std/src/prelude/v1.rs @@ -46,6 +46,7 @@ pub use crate::result::Result::{self, Err, Ok}; // Re-exported built-in macros #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow(deprecated)] +#[cfg_attr(bootstrap, allow(deprecated_in_future))] #[doc(no_inline)] pub use core::prelude::v1::{ assert, cfg, column, compile_error, concat, concat_idents, env, file, format_args, @@ -109,9 +110,16 @@ pub use core::prelude::v1::deref; issue = "63063", reason = "`type_alias_impl_trait` has open design concerns" )] -#[cfg(not(bootstrap))] pub use core::prelude::v1::define_opaque; +#[unstable(feature = "eii", issue = "125418")] +#[cfg(not(bootstrap))] +pub use core::prelude::v1::{eii, unsafe_eii}; + +#[unstable(feature = "eii_internals", issue = "none")] +#[cfg(not(bootstrap))] +pub use core::prelude::v1::eii_macro_for; + // The file so far is equivalent to core/src/prelude/v1.rs. It is duplicated // rather than glob imported because we want docs to show these re-exports as // pointing to within `std`. diff --git a/library/std/src/process.rs b/library/std/src/process.rs index 3b765a9537bc9..df6b9a6e563ce 100644 --- a/library/std/src/process.rs +++ b/library/std/src/process.rs @@ -168,8 +168,6 @@ use crate::num::NonZero; use crate::path::Path; use crate::sys::pipe::{AnonPipe, read2}; use crate::sys::process as imp; -#[stable(feature = "command_access", since = "1.57.0")] -pub use crate::sys_common::process::CommandEnvs; use crate::sys_common::{AsInner, AsInnerMut, FromInner, IntoInner}; use crate::{fmt, fs, str}; @@ -1073,7 +1071,7 @@ impl Command { /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn output(&mut self) -> io::Result { - let (status, stdout, stderr) = self.inner.output()?; + let (status, stdout, stderr) = imp::output(&mut self.inner)?; Ok(Output { status: ExitStatus(status), stdout, stderr }) } @@ -1174,7 +1172,7 @@ impl Command { /// ``` #[stable(feature = "command_access", since = "1.57.0")] pub fn get_envs(&self) -> CommandEnvs<'_> { - self.inner.get_envs() + CommandEnvs { iter: self.inner.get_envs() } } /// Returns the working directory for the child process. @@ -1264,6 +1262,48 @@ impl<'a> ExactSizeIterator for CommandArgs<'a> { } } +/// An iterator over the command environment variables. +/// +/// This struct is created by +/// [`Command::get_envs`][crate::process::Command::get_envs]. See its +/// documentation for more. +#[must_use = "iterators are lazy and do nothing unless consumed"] +#[stable(feature = "command_access", since = "1.57.0")] +pub struct CommandEnvs<'a> { + iter: imp::CommandEnvs<'a>, +} + +#[stable(feature = "command_access", since = "1.57.0")] +impl<'a> Iterator for CommandEnvs<'a> { + type Item = (&'a OsStr, Option<&'a OsStr>); + + fn next(&mut self) -> Option { + self.iter.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +#[stable(feature = "command_access", since = "1.57.0")] +impl<'a> ExactSizeIterator for CommandEnvs<'a> { + fn len(&self) -> usize { + self.iter.len() + } + + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} + +#[stable(feature = "command_access", since = "1.57.0")] +impl<'a> fmt::Debug for CommandEnvs<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.iter.fmt(f) + } +} + /// The output of a finished process. /// /// This is returned in a Result by either the [`output`] method of a @@ -1286,6 +1326,40 @@ pub struct Output { pub stderr: Vec, } +impl Output { + /// Returns an error if a nonzero exit status was received. + /// + /// If the [`Command`] exited successfully, + /// `self` is returned. + /// + /// This is equivalent to calling [`exit_ok`](ExitStatus::exit_ok) + /// on [`Output.status`](Output::status). + /// + /// Note that this will throw away the [`Output::stderr`] field in the error case. + /// If the child process outputs useful informantion to stderr, you can: + /// * Use `cmd.stderr(Stdio::inherit())` to forward the + /// stderr child process to the parent's stderr, + /// usually printing it to console where the user can see it. + /// This is usually correct for command-line applications. + /// * Capture `stderr` using a custom error type. + /// This is usually correct for libraries. + /// + /// # Examples + /// + /// ``` + /// #![feature(exit_status_error)] + /// # #[cfg(unix)] { + /// use std::process::Command; + /// assert!(Command::new("false").output().unwrap().exit_ok().is_err()); + /// # } + /// ``` + #[unstable(feature = "exit_status_error", issue = "84908")] + pub fn exit_ok(self) -> Result { + self.status.exit_ok()?; + Ok(self) + } +} + // If either stderr or stdout are valid utf8 strings it prints the valid // strings, otherwise it prints the byte sequence instead #[stable(feature = "process_output_debug", since = "1.7.0")] @@ -1659,14 +1733,14 @@ impl From for Stdio { } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl From for Stdio { fn from(pipe: io::PipeWriter) -> Self { Stdio::from_inner(pipe.into_inner().into()) } } -#[stable(feature = "anonymous_pipe", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "anonymous_pipe", since = "1.87.0")] impl From for Stdio { fn from(pipe: io::PipeReader) -> Self { Stdio::from_inner(pipe.into_inner().into()) diff --git a/library/std/src/rt.rs b/library/std/src/rt.rs index 3a22a16cb165e..9737b2f5bfe60 100644 --- a/library/std/src/rt.rs +++ b/library/std/src/rt.rs @@ -46,7 +46,7 @@ macro_rules! rtprintpanic { macro_rules! rtabort { ($($t:tt)*) => { { - rtprintpanic!("fatal runtime error: {}\n", format_args!($($t)*)); + rtprintpanic!("fatal runtime error: {}, aborting\n", format_args!($($t)*)); crate::sys::abort_internal(); } } diff --git a/library/std/src/sync/mpmc/array.rs b/library/std/src/sync/mpmc/array.rs index a467237fef152..880d8b5f57cf4 100644 --- a/library/std/src/sync/mpmc/array.rs +++ b/library/std/src/sync/mpmc/array.rs @@ -16,13 +16,13 @@ use super::waker::SyncWaker; use crate::cell::UnsafeCell; use crate::mem::MaybeUninit; use crate::ptr; -use crate::sync::atomic::{self, AtomicUsize, Ordering}; +use crate::sync::atomic::{self, Atomic, AtomicUsize, Ordering}; use crate::time::Instant; /// A slot in a channel. struct Slot { /// The current stamp. - stamp: AtomicUsize, + stamp: Atomic, /// The message in this slot. Either read out in `read` or dropped through /// `discard_all_messages`. @@ -55,7 +55,7 @@ pub(crate) struct Channel { /// represent the lap. The mark bit in the head is always zero. /// /// Messages are popped from the head of the channel. - head: CachePadded, + head: CachePadded>, /// The tail of the channel. /// @@ -64,7 +64,7 @@ pub(crate) struct Channel { /// represent the lap. The mark bit indicates that the channel is disconnected. /// /// Messages are pushed into the tail of the channel. - tail: CachePadded, + tail: CachePadded>, /// The buffer holding slots. buffer: Box<[Slot]>, diff --git a/library/std/src/sync/mpmc/context.rs b/library/std/src/sync/mpmc/context.rs index 51aa7e82e7890..6b2f4cb6ffd29 100644 --- a/library/std/src/sync/mpmc/context.rs +++ b/library/std/src/sync/mpmc/context.rs @@ -5,7 +5,7 @@ use super::waker::current_thread_id; use crate::cell::Cell; use crate::ptr; use crate::sync::Arc; -use crate::sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; +use crate::sync::atomic::{Atomic, AtomicPtr, AtomicUsize, Ordering}; use crate::thread::{self, Thread}; use crate::time::Instant; @@ -19,10 +19,10 @@ pub struct Context { #[derive(Debug)] struct Inner { /// Selected operation. - select: AtomicUsize, + select: Atomic, /// A slot into which another thread may store a pointer to its `Packet`. - packet: AtomicPtr<()>, + packet: Atomic<*mut ()>, /// Thread handle. thread: Thread, diff --git a/library/std/src/sync/mpmc/counter.rs b/library/std/src/sync/mpmc/counter.rs index d1bfe612f536f..efa6af1148334 100644 --- a/library/std/src/sync/mpmc/counter.rs +++ b/library/std/src/sync/mpmc/counter.rs @@ -1,16 +1,16 @@ -use crate::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, AtomicUsize, Ordering}; use crate::{ops, process}; /// Reference counter internals. struct Counter { /// The number of senders associated with the channel. - senders: AtomicUsize, + senders: Atomic, /// The number of receivers associated with the channel. - receivers: AtomicUsize, + receivers: Atomic, /// Set to `true` if the last sender or the last receiver reference deallocates the channel. - destroy: AtomicBool, + destroy: Atomic, /// The internal channel. chan: C, diff --git a/library/std/src/sync/mpmc/list.rs b/library/std/src/sync/mpmc/list.rs index d88914f529142..3fcfb85cf2aab 100644 --- a/library/std/src/sync/mpmc/list.rs +++ b/library/std/src/sync/mpmc/list.rs @@ -9,7 +9,7 @@ use crate::cell::UnsafeCell; use crate::marker::PhantomData; use crate::mem::MaybeUninit; use crate::ptr; -use crate::sync::atomic::{self, AtomicPtr, AtomicUsize, Ordering}; +use crate::sync::atomic::{self, Atomic, AtomicPtr, AtomicUsize, Ordering}; use crate::time::Instant; // Bits indicating the state of a slot: @@ -37,7 +37,7 @@ struct Slot { msg: UnsafeCell>, /// The state of the slot. - state: AtomicUsize, + state: Atomic, } impl Slot { @@ -55,7 +55,7 @@ impl Slot { /// Each block in the list can hold up to `BLOCK_CAP` messages. struct Block { /// The next block in the linked list. - next: AtomicPtr>, + next: Atomic<*mut Block>, /// Slots for messages. slots: [Slot; BLOCK_CAP], @@ -65,11 +65,11 @@ impl Block { /// Creates an empty block. fn new() -> Box> { // SAFETY: This is safe because: - // [1] `Block::next` (AtomicPtr) may be safely zero initialized. + // [1] `Block::next` (Atomic<*mut _>) may be safely zero initialized. // [2] `Block::slots` (Array) may be safely zero initialized because of [3, 4]. // [3] `Slot::msg` (UnsafeCell) may be safely zero initialized because it // holds a MaybeUninit. - // [4] `Slot::state` (AtomicUsize) may be safely zero initialized. + // [4] `Slot::state` (Atomic) may be safely zero initialized. unsafe { Box::new_zeroed().assume_init() } } @@ -110,10 +110,10 @@ impl Block { #[derive(Debug)] struct Position { /// The index in the channel. - index: AtomicUsize, + index: Atomic, /// The block in the linked list. - block: AtomicPtr>, + block: Atomic<*mut Block>, } /// The token type for the list flavor. @@ -213,6 +213,11 @@ impl Channel { .compare_exchange(block, new, Ordering::Release, Ordering::Relaxed) .is_ok() { + // This yield point leaves the channel in a half-initialized state where the + // tail.block pointer is set but the head.block is not. This is used to + // facilitate the test in src/tools/miri/tests/pass/issues/issue-139553.rs + #[cfg(miri)] + crate::thread::yield_now(); self.head.block.store(new, Ordering::Release); block = new; } else { @@ -564,9 +569,15 @@ impl Channel { // In that case, just wait until it gets initialized. while block.is_null() { backoff.spin_heavy(); - block = self.head.block.load(Ordering::Acquire); + block = self.head.block.swap(ptr::null_mut(), Ordering::AcqRel); } } + // After this point `head.block` is not modified again and it will be deallocated if it's + // non-null. The `Drop` code of the channel, which runs after this function, also attempts + // to deallocate `head.block` if it's non-null. Therefore this function must maintain the + // invariant that if a deallocation of head.block is attemped then it must also be set to + // NULL. Failing to do so will lead to the Drop code attempting a double free. For this + // reason both reads above do an atomic swap instead of a simple atomic load. unsafe { // Drop all messages between head and tail and deallocate the heap-allocated blocks. diff --git a/library/std/src/sync/mpmc/waker.rs b/library/std/src/sync/mpmc/waker.rs index f5e764e69bd6e..4216fb7ac5902 100644 --- a/library/std/src/sync/mpmc/waker.rs +++ b/library/std/src/sync/mpmc/waker.rs @@ -4,7 +4,7 @@ use super::context::Context; use super::select::{Operation, Selected}; use crate::ptr; use crate::sync::Mutex; -use crate::sync::atomic::{AtomicBool, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, Ordering}; /// Represents a thread blocked on a specific channel operation. pub(crate) struct Entry { @@ -137,7 +137,7 @@ pub(crate) struct SyncWaker { inner: Mutex, /// `true` if the waker is empty. - is_empty: AtomicBool, + is_empty: Atomic, } impl SyncWaker { diff --git a/library/std/src/sync/mpmc/zero.rs b/library/std/src/sync/mpmc/zero.rs index 577997c07a636..f1ecf80fcb9f6 100644 --- a/library/std/src/sync/mpmc/zero.rs +++ b/library/std/src/sync/mpmc/zero.rs @@ -10,7 +10,7 @@ use super::waker::Waker; use crate::cell::UnsafeCell; use crate::marker::PhantomData; use crate::sync::Mutex; -use crate::sync::atomic::{AtomicBool, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, Ordering}; use crate::time::Instant; use crate::{fmt, ptr}; @@ -35,7 +35,7 @@ struct Packet { on_stack: bool, /// Equals `true` once the packet is ready for reading or writing. - ready: AtomicBool, + ready: Atomic, /// The message. msg: UnsafeCell>, diff --git a/library/std/src/sync/poison.rs b/library/std/src/sync/poison.rs index 1b8809734b8a8..cc1d0b30152a1 100644 --- a/library/std/src/sync/poison.rs +++ b/library/std/src/sync/poison.rs @@ -76,7 +76,7 @@ pub use self::rwlock::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use crate::error::Error; use crate::fmt; #[cfg(panic = "unwind")] -use crate::sync::atomic::{AtomicBool, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, Ordering}; #[cfg(panic = "unwind")] use crate::thread; @@ -88,7 +88,7 @@ mod rwlock; pub(crate) struct Flag { #[cfg(panic = "unwind")] - failed: AtomicBool, + failed: Atomic, } // Note that the Ordering uses to access the `failed` field of `Flag` below is diff --git a/library/std/src/sync/poison/mutex.rs b/library/std/src/sync/poison/mutex.rs index 9362c764173a8..1c29c619edc3a 100644 --- a/library/std/src/sync/poison/mutex.rs +++ b/library/std/src/sync/poison/mutex.rs @@ -253,11 +253,11 @@ unsafe impl Sync for MutexGuard<'_, T> {} /// The data protected by the mutex can be accessed through this guard via its /// [`Deref`] and [`DerefMut`] implementations. /// -/// This structure is created by the [`map`] and [`try_map`] methods on +/// This structure is created by the [`map`] and [`filter_map`] methods on /// [`MutexGuard`]. /// /// [`map`]: MutexGuard::map -/// [`try_map`]: MutexGuard::try_map +/// [`filter_map`]: MutexGuard::filter_map /// [`Condvar`]: crate::sync::Condvar #[must_use = "if unused the Mutex will immediately unlock"] #[must_not_suspend = "holding a MappedMutexGuard across suspend \ @@ -582,7 +582,9 @@ impl Mutex { /// Returns a mutable reference to the underlying data. /// /// Since this call borrows the `Mutex` mutably, no actual locking needs to - /// take place -- the mutable borrow statically guarantees no locks exist. + /// take place -- the mutable borrow statically guarantees no new locks can be acquired + /// while this reference exists. Note that this method does not clear any previous abandoned locks + /// (e.g., via [`forget()`] on a [`MutexGuard`]). /// /// # Errors /// @@ -599,6 +601,8 @@ impl Mutex { /// *mutex.get_mut().unwrap() = 10; /// assert_eq!(*mutex.lock().unwrap(), 10); /// ``` + /// + /// [`forget()`]: mem::forget #[stable(feature = "mutex_get_mut", since = "1.6.0")] pub fn get_mut(&mut self) -> LockResult<&mut T> { let data = self.data.get_mut(); @@ -714,7 +718,7 @@ impl<'a, T: ?Sized> MutexGuard<'a, T> { U: ?Sized, { // SAFETY: the conditions of `MutexGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. let data = NonNull::from(f(unsafe { &mut *orig.lock.data.get() })); @@ -735,17 +739,16 @@ impl<'a, T: ?Sized> MutexGuard<'a, T> { /// The `Mutex` is already locked, so this cannot fail. /// /// This is an associated function that needs to be used as - /// `MutexGuard::try_map(...)`. A method would interfere with methods of the + /// `MutexGuard::filter_map(...)`. A method would interfere with methods of the /// same name on the contents of the `MutexGuard` used through `Deref`. - #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] - pub fn try_map(orig: Self, f: F) -> Result, Self> + pub fn filter_map(orig: Self, f: F) -> Result, Self> where F: FnOnce(&mut T) -> Option<&mut U>, U: ?Sized, { // SAFETY: the conditions of `MutexGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. match f(unsafe { &mut *orig.lock.data.get() }) { @@ -822,7 +825,7 @@ impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { U: ?Sized, { // SAFETY: the conditions of `MutexGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. let data = NonNull::from(f(unsafe { orig.data.as_mut() })); @@ -843,17 +846,16 @@ impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { /// The `Mutex` is already locked, so this cannot fail. /// /// This is an associated function that needs to be used as - /// `MappedMutexGuard::try_map(...)`. A method would interfere with methods of the + /// `MappedMutexGuard::filter_map(...)`. A method would interfere with methods of the /// same name on the contents of the `MutexGuard` used through `Deref`. - #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] - pub fn try_map(mut orig: Self, f: F) -> Result, Self> + pub fn filter_map(mut orig: Self, f: F) -> Result, Self> where F: FnOnce(&mut T) -> Option<&mut U>, U: ?Sized, { // SAFETY: the conditions of `MutexGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. match f(unsafe { orig.data.as_mut() }) { diff --git a/library/std/src/sync/poison/rwlock.rs b/library/std/src/sync/poison/rwlock.rs index f9d9321f5f2d8..6976c0a64e23f 100644 --- a/library/std/src/sync/poison/rwlock.rs +++ b/library/std/src/sync/poison/rwlock.rs @@ -147,11 +147,11 @@ unsafe impl Sync for RwLockWriteGuard<'_, T> {} /// RAII structure used to release the shared read access of a lock when /// dropped, which can point to a subfield of the protected data. /// -/// This structure is created by the [`map`] and [`try_map`] methods +/// This structure is created by the [`map`] and [`filter_map`] methods /// on [`RwLockReadGuard`]. /// /// [`map`]: RwLockReadGuard::map -/// [`try_map`]: RwLockReadGuard::try_map +/// [`filter_map`]: RwLockReadGuard::filter_map #[must_use = "if unused the RwLock will immediately unlock"] #[must_not_suspend = "holding a MappedRwLockReadGuard across suspend \ points can cause deadlocks, delays, \ @@ -176,11 +176,11 @@ unsafe impl Sync for MappedRwLockReadGuard<'_, T> {} /// RAII structure used to release the exclusive write access of a lock when /// dropped, which can point to a subfield of the protected data. /// -/// This structure is created by the [`map`] and [`try_map`] methods +/// This structure is created by the [`map`] and [`filter_map`] methods /// on [`RwLockWriteGuard`]. /// /// [`map`]: RwLockWriteGuard::map -/// [`try_map`]: RwLockWriteGuard::try_map +/// [`filter_map`]: RwLockWriteGuard::filter_map #[must_use = "if unused the RwLock will immediately unlock"] #[must_not_suspend = "holding a MappedRwLockWriteGuard across suspend \ points can cause deadlocks, delays, \ @@ -608,7 +608,9 @@ impl RwLock { /// Returns a mutable reference to the underlying data. /// /// Since this call borrows the `RwLock` mutably, no actual locking needs to - /// take place -- the mutable borrow statically guarantees no locks exist. + /// take place -- the mutable borrow statically guarantees no new locks can be acquired + /// while this reference exists. Note that this method does not clear any previously abandoned locks + /// (e.g., via [`forget()`] on a [`RwLockReadGuard`] or [`RwLockWriteGuard`]). /// /// # Errors /// @@ -786,7 +788,7 @@ impl Deref for MappedRwLockReadGuard<'_, T> { fn deref(&self) -> &T { // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. unsafe { self.data.as_ref() } } } @@ -797,7 +799,7 @@ impl Deref for MappedRwLockWriteGuard<'_, T> { fn deref(&self) -> &T { // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. unsafe { self.data.as_ref() } } } @@ -806,7 +808,7 @@ impl Deref for MappedRwLockWriteGuard<'_, T> { impl DerefMut for MappedRwLockWriteGuard<'_, T> { fn deref_mut(&mut self) -> &mut T { // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. unsafe { self.data.as_mut() } } } @@ -836,7 +838,7 @@ impl Drop for RwLockWriteGuard<'_, T> { impl Drop for MappedRwLockReadGuard<'_, T> { fn drop(&mut self) { // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. unsafe { self.inner_lock.read_unlock(); } @@ -848,7 +850,7 @@ impl Drop for MappedRwLockWriteGuard<'_, T> { fn drop(&mut self) { self.poison_flag.done(&self.poison); // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. unsafe { self.inner_lock.write_unlock(); } @@ -876,7 +878,7 @@ impl<'a, T: ?Sized> RwLockReadGuard<'a, T> { U: ?Sized, { // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. let data = NonNull::from(f(unsafe { orig.data.as_ref() })); @@ -891,22 +893,21 @@ impl<'a, T: ?Sized> RwLockReadGuard<'a, T> { /// The `RwLock` is already locked for reading, so this cannot fail. /// /// This is an associated function that needs to be used as - /// `RwLockReadGuard::try_map(...)`. A method would interfere with methods + /// `RwLockReadGuard::filter_map(...)`. A method would interfere with methods /// of the same name on the contents of the `RwLockReadGuard` used through /// `Deref`. /// /// # Panics /// /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will not be poisoned. - #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] - pub fn try_map(orig: Self, f: F) -> Result, Self> + pub fn filter_map(orig: Self, f: F) -> Result, Self> where F: FnOnce(&T) -> Option<&U>, U: ?Sized, { // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. match f(unsafe { orig.data.as_ref() }) { @@ -941,7 +942,7 @@ impl<'a, T: ?Sized> MappedRwLockReadGuard<'a, T> { U: ?Sized, { // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. let data = NonNull::from(f(unsafe { orig.data.as_ref() })); @@ -956,22 +957,21 @@ impl<'a, T: ?Sized> MappedRwLockReadGuard<'a, T> { /// The `RwLock` is already locked for reading, so this cannot fail. /// /// This is an associated function that needs to be used as - /// `MappedRwLockReadGuard::try_map(...)`. A method would interfere with + /// `MappedRwLockReadGuard::filter_map(...)`. A method would interfere with /// methods of the same name on the contents of the `MappedRwLockReadGuard` /// used through `Deref`. /// /// # Panics /// /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will not be poisoned. - #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] - pub fn try_map(orig: Self, f: F) -> Result, Self> + pub fn filter_map(orig: Self, f: F) -> Result, Self> where F: FnOnce(&T) -> Option<&U>, U: ?Sized, { // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. match f(unsafe { orig.data.as_ref() }) { @@ -1006,7 +1006,7 @@ impl<'a, T: ?Sized> RwLockWriteGuard<'a, T> { U: ?Sized, { // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. let data = NonNull::from(f(unsafe { &mut *orig.lock.data.get() })); @@ -1027,22 +1027,21 @@ impl<'a, T: ?Sized> RwLockWriteGuard<'a, T> { /// The `RwLock` is already locked for writing, so this cannot fail. /// /// This is an associated function that needs to be used as - /// `RwLockWriteGuard::try_map(...)`. A method would interfere with methods + /// `RwLockWriteGuard::filter_map(...)`. A method would interfere with methods /// of the same name on the contents of the `RwLockWriteGuard` used through /// `Deref`. /// /// # Panics /// /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will be poisoned. - #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] - pub fn try_map(orig: Self, f: F) -> Result, Self> + pub fn filter_map(orig: Self, f: F) -> Result, Self> where F: FnOnce(&mut T) -> Option<&mut U>, U: ?Sized, { // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. match f(unsafe { &mut *orig.lock.data.get() }) { @@ -1145,7 +1144,7 @@ impl<'a, T: ?Sized> MappedRwLockWriteGuard<'a, T> { U: ?Sized, { // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. let data = NonNull::from(f(unsafe { orig.data.as_mut() })); @@ -1166,22 +1165,21 @@ impl<'a, T: ?Sized> MappedRwLockWriteGuard<'a, T> { /// The `RwLock` is already locked for writing, so this cannot fail. /// /// This is an associated function that needs to be used as - /// `MappedRwLockWriteGuard::try_map(...)`. A method would interfere with + /// `MappedRwLockWriteGuard::filter_map(...)`. A method would interfere with /// methods of the same name on the contents of the `MappedRwLockWriteGuard` /// used through `Deref`. /// /// # Panics /// /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will be poisoned. - #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] - pub fn try_map(mut orig: Self, f: F) -> Result, Self> + pub fn filter_map(mut orig: Self, f: F) -> Result, Self> where F: FnOnce(&mut T) -> Option<&mut U>, U: ?Sized, { // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard - // was created, and have been upheld throughout `map` and/or `try_map`. + // was created, and have been upheld throughout `map` and/or `filter_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. match f(unsafe { orig.data.as_mut() }) { diff --git a/library/std/src/sync/reentrant_lock.rs b/library/std/src/sync/reentrant_lock.rs index e009eb410efc0..24539d4e8303d 100644 --- a/library/std/src/sync/reentrant_lock.rs +++ b/library/std/src/sync/reentrant_lock.rs @@ -89,9 +89,9 @@ pub struct ReentrantLock { cfg_if!( if #[cfg(target_has_atomic = "64")] { - use crate::sync::atomic::{AtomicU64, Ordering::Relaxed}; + use crate::sync::atomic::{Atomic, AtomicU64, Ordering::Relaxed}; - struct Tid(AtomicU64); + struct Tid(Atomic); impl Tid { const fn new() -> Self { @@ -120,6 +120,7 @@ cfg_if!( } use crate::sync::atomic::{ + Atomic, AtomicUsize, Ordering, }; @@ -137,7 +138,7 @@ cfg_if!( // the current thread, or by a thread that has terminated before // the current thread was created. In either case, no further // synchronization is needed (as per ) - tls_addr: AtomicUsize, + tls_addr: Atomic, tid: UnsafeCell, } diff --git a/library/std/src/sys/alloc/sgx.rs b/library/std/src/sys/alloc/sgx.rs index f5c27688fbc8f..afdef7a5cb647 100644 --- a/library/std/src/sys/alloc/sgx.rs +++ b/library/std/src/sys/alloc/sgx.rs @@ -1,6 +1,6 @@ use crate::alloc::{GlobalAlloc, Layout, System}; use crate::ptr; -use crate::sync::atomic::{AtomicBool, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, Ordering}; use crate::sys::pal::abi::mem as sgx_mem; use crate::sys::pal::waitqueue::SpinMutex; @@ -10,8 +10,10 @@ use crate::sys::pal::waitqueue::SpinMutex; // The current allocator here is the `dlmalloc` crate which we've got included // in the rust-lang/rust repository as a submodule. The crate is a port of // dlmalloc.c from C to Rust. +// +// Specifying linkage/symbol name is solely to ensure a single instance between this crate and its unit tests #[cfg_attr(test, linkage = "available_externally")] -#[unsafe(export_name = "_ZN16__rust_internals3std3sys3sgx5alloc8DLMALLOCE")] +#[unsafe(export_name = "_ZN16__rust_internals3std3sys5alloc3sgx8DLMALLOCE")] static DLMALLOC: SpinMutex> = SpinMutex::new(dlmalloc::Dlmalloc::new_with_allocator(Sgx {})); @@ -20,7 +22,7 @@ struct Sgx; unsafe impl dlmalloc::Allocator for Sgx { /// Allocs system resources fn alloc(&self, _size: usize) -> (*mut u8, usize, u32) { - static INIT: AtomicBool = AtomicBool::new(false); + static INIT: Atomic = AtomicBool::new(false); // No ordering requirement since this function is protected by the global lock. if !INIT.swap(true, Ordering::Relaxed) { diff --git a/library/std/src/sys/alloc/wasm.rs b/library/std/src/sys/alloc/wasm.rs index 53fbc9529e590..c8fab992a88a7 100644 --- a/library/std/src/sys/alloc/wasm.rs +++ b/library/std/src/sys/alloc/wasm.rs @@ -60,10 +60,10 @@ unsafe impl GlobalAlloc for System { #[cfg(target_feature = "atomics")] mod lock { - use crate::sync::atomic::AtomicI32; use crate::sync::atomic::Ordering::{Acquire, Release}; + use crate::sync::atomic::{Atomic, AtomicI32}; - static LOCKED: AtomicI32 = AtomicI32::new(0); + static LOCKED: Atomic = AtomicI32::new(0); pub struct DropLock; diff --git a/library/std/src/sys/alloc/xous.rs b/library/std/src/sys/alloc/xous.rs index ccaa972c22de3..c7f973b802791 100644 --- a/library/std/src/sys/alloc/xous.rs +++ b/library/std/src/sys/alloc/xous.rs @@ -49,10 +49,10 @@ unsafe impl GlobalAlloc for System { } mod lock { - use crate::sync::atomic::AtomicI32; use crate::sync::atomic::Ordering::{Acquire, Release}; + use crate::sync::atomic::{Atomic, AtomicI32}; - static LOCKED: AtomicI32 = AtomicI32::new(0); + static LOCKED: Atomic = AtomicI32::new(0); pub struct DropLock; diff --git a/library/std/src/sys/args/common.rs b/library/std/src/sys/args/common.rs new file mode 100644 index 0000000000000..e787105a05a73 --- /dev/null +++ b/library/std/src/sys/args/common.rs @@ -0,0 +1,101 @@ +use crate::ffi::OsString; +use crate::num::NonZero; +use crate::ops::Try; +use crate::{array, fmt, vec}; + +pub struct Args { + iter: vec::IntoIter, +} + +impl !Send for Args {} +impl !Sync for Args {} + +impl Args { + #[inline] + pub(super) fn new(args: Vec) -> Self { + Args { iter: args.into_iter() } + } +} + +impl fmt::Debug for Args { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.iter.as_slice().fmt(f) + } +} + +impl Iterator for Args { + type Item = OsString; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next() + } + + #[inline] + fn next_chunk( + &mut self, + ) -> Result<[OsString; N], array::IntoIter> { + self.iter.next_chunk() + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } + + #[inline] + fn count(self) -> usize { + self.iter.len() + } + + #[inline] + fn last(self) -> Option { + self.iter.last() + } + + #[inline] + fn advance_by(&mut self, n: usize) -> Result<(), NonZero> { + self.iter.advance_by(n) + } + + #[inline] + fn try_fold(&mut self, init: B, f: F) -> R + where + F: FnMut(B, Self::Item) -> R, + R: Try, + { + self.iter.try_fold(init, f) + } + + #[inline] + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.iter.fold(init, f) + } +} + +impl DoubleEndedIterator for Args { + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back() + } + + #[inline] + fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero> { + self.iter.advance_back_by(n) + } +} + +impl ExactSizeIterator for Args { + #[inline] + fn len(&self) -> usize { + self.iter.len() + } + + #[inline] + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} diff --git a/library/std/src/sys/args/mod.rs b/library/std/src/sys/args/mod.rs new file mode 100644 index 0000000000000..0011f55dc14ee --- /dev/null +++ b/library/std/src/sys/args/mod.rs @@ -0,0 +1,44 @@ +//! Platform-dependent command line arguments abstraction. + +#![forbid(unsafe_op_in_unsafe_fn)] + +#[cfg(any( + all(target_family = "unix", not(any(target_os = "espidf", target_os = "vita"))), + target_family = "windows", + target_os = "hermit", + target_os = "uefi", + target_os = "wasi", + target_os = "xous", +))] +mod common; + +cfg_if::cfg_if! { + if #[cfg(any( + all(target_family = "unix", not(any(target_os = "espidf", target_os = "vita"))), + target_os = "hermit", + ))] { + mod unix; + pub use unix::*; + } else if #[cfg(target_family = "windows")] { + mod windows; + pub use windows::*; + } else if #[cfg(all(target_vendor = "fortanix", target_env = "sgx"))] { + mod sgx; + pub use sgx::*; + } else if #[cfg(target_os = "uefi")] { + mod uefi; + pub use uefi::*; + } else if #[cfg(target_os = "wasi")] { + mod wasi; + pub use wasi::*; + } else if #[cfg(target_os = "xous")] { + mod xous; + pub use xous::*; + } else if #[cfg(target_os = "zkvm")] { + mod zkvm; + pub use zkvm::*; + } else { + mod unsupported; + pub use unsupported::*; + } +} diff --git a/library/std/src/sys/args/sgx.rs b/library/std/src/sys/args/sgx.rs new file mode 100644 index 0000000000000..f800500c22a8a --- /dev/null +++ b/library/std/src/sys/args/sgx.rs @@ -0,0 +1,110 @@ +#![allow(fuzzy_provenance_casts)] // FIXME: this module systematically confuses pointers and integers + +use crate::ffi::OsString; +use crate::num::NonZero; +use crate::ops::Try; +use crate::sync::atomic::{Atomic, AtomicUsize, Ordering}; +use crate::sys::os_str::Buf; +use crate::sys::pal::abi::usercalls::alloc; +use crate::sys::pal::abi::usercalls::raw::ByteBuffer; +use crate::sys_common::FromInner; +use crate::{fmt, slice}; + +// Specifying linkage/symbol name is solely to ensure a single instance between this crate and its unit tests +#[cfg_attr(test, linkage = "available_externally")] +#[unsafe(export_name = "_ZN16__rust_internals3std3sys3sgx4args4ARGSE")] +static ARGS: Atomic = AtomicUsize::new(0); +type ArgsStore = Vec; + +#[cfg_attr(test, allow(dead_code))] +pub unsafe fn init(argc: isize, argv: *const *const u8) { + if argc != 0 { + let args = unsafe { alloc::User::<[ByteBuffer]>::from_raw_parts(argv as _, argc as _) }; + let args = args + .iter() + .map(|a| OsString::from_inner(Buf { inner: a.copy_user_buffer() })) + .collect::(); + ARGS.store(Box::into_raw(Box::new(args)) as _, Ordering::Relaxed); + } +} + +pub fn args() -> Args { + let args = unsafe { (ARGS.load(Ordering::Relaxed) as *const ArgsStore).as_ref() }; + let slice = args.map(|args| args.as_slice()).unwrap_or(&[]); + Args { iter: slice.iter() } +} + +pub struct Args { + iter: slice::Iter<'static, OsString>, +} + +impl fmt::Debug for Args { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.iter.as_slice().fmt(f) + } +} + +impl Iterator for Args { + type Item = OsString; + + fn next(&mut self) -> Option { + self.iter.next().cloned() + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } + + #[inline] + fn count(self) -> usize { + self.iter.len() + } + + fn last(self) -> Option { + self.iter.last().cloned() + } + + #[inline] + fn advance_by(&mut self, n: usize) -> Result<(), NonZero> { + self.iter.advance_by(n) + } + + fn try_fold(&mut self, init: B, f: F) -> R + where + F: FnMut(B, Self::Item) -> R, + R: Try, + { + self.iter.by_ref().cloned().try_fold(init, f) + } + + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.iter.cloned().fold(init, f) + } +} + +impl DoubleEndedIterator for Args { + fn next_back(&mut self) -> Option { + self.iter.next_back().cloned() + } + + #[inline] + fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero> { + self.iter.advance_back_by(n) + } +} + +impl ExactSizeIterator for Args { + #[inline] + fn len(&self) -> usize { + self.iter.len() + } + + #[inline] + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} diff --git a/library/std/src/sys/args/uefi.rs b/library/std/src/sys/args/uefi.rs new file mode 100644 index 0000000000000..02dada382eff0 --- /dev/null +++ b/library/std/src/sys/args/uefi.rs @@ -0,0 +1,118 @@ +use r_efi::protocols::loaded_image; + +pub use super::common::Args; +use crate::env::current_exe; +use crate::ffi::OsString; +use crate::iter::Iterator; +use crate::sys::pal::helpers; + +pub fn args() -> Args { + let lazy_current_exe = || Vec::from([current_exe().map(Into::into).unwrap_or_default()]); + + // Each loaded image has an image handle that supports `EFI_LOADED_IMAGE_PROTOCOL`. Thus, this + // will never fail. + let protocol = + helpers::image_handle_protocol::(loaded_image::PROTOCOL_GUID) + .unwrap(); + + let lp_size = unsafe { (*protocol.as_ptr()).load_options_size } as usize; + // Break if we are sure that it cannot be UTF-16 + if lp_size < size_of::() || lp_size % size_of::() != 0 { + return Args::new(lazy_current_exe()); + } + let lp_size = lp_size / size_of::(); + + let lp_cmd_line = unsafe { (*protocol.as_ptr()).load_options as *const u16 }; + if !lp_cmd_line.is_aligned() { + return Args::new(lazy_current_exe()); + } + let lp_cmd_line = unsafe { crate::slice::from_raw_parts(lp_cmd_line, lp_size) }; + + Args::new(parse_lp_cmd_line(lp_cmd_line).unwrap_or_else(lazy_current_exe)) +} + +/// Implements the UEFI command-line argument parsing algorithm. +/// +/// This implementation is based on what is defined in Section 3.4 of +/// [UEFI Shell Specification](https://uefi.org/sites/default/files/resources/UEFI_Shell_Spec_2_0.pdf) +/// +/// Returns None in the following cases: +/// - Invalid UTF-16 (unpaired surrogate) +/// - Empty/improper arguments +fn parse_lp_cmd_line(code_units: &[u16]) -> Option> { + const QUOTE: char = '"'; + const SPACE: char = ' '; + const CARET: char = '^'; + const NULL: char = '\0'; + + let mut ret_val = Vec::new(); + let mut code_units_iter = char::decode_utf16(code_units.iter().cloned()).peekable(); + + // The executable name at the beginning is special. + let mut in_quotes = false; + let mut cur = String::new(); + while let Some(w) = code_units_iter.next() { + let w = w.ok()?; + match w { + // break on NULL + NULL => break, + // A quote mark always toggles `in_quotes` no matter what because + // there are no escape characters when parsing the executable name. + QUOTE => in_quotes = !in_quotes, + // If not `in_quotes` then whitespace ends argv[0]. + SPACE if !in_quotes => break, + // In all other cases the code unit is taken literally. + _ => cur.push(w), + } + } + + // If exe name is missing, the cli args are invalid + if cur.is_empty() { + return None; + } + + ret_val.push(OsString::from(cur)); + // Skip whitespace. + while code_units_iter.next_if_eq(&Ok(SPACE)).is_some() {} + + // Parse the arguments according to these rules: + // * All code units are taken literally except space, quote and caret. + // * When not `in_quotes`, space separate arguments. Consecutive spaces are + // treated as a single separator. + // * A space `in_quotes` is taken literally. + // * A quote toggles `in_quotes` mode unless it's escaped. An escaped quote is taken literally. + // * A quote can be escaped if preceded by caret. + // * A caret can be escaped if preceded by caret. + let mut cur = String::new(); + let mut in_quotes = false; + while let Some(w) = code_units_iter.next() { + let w = w.ok()?; + match w { + // break on NULL + NULL => break, + // If not `in_quotes`, a space or tab ends the argument. + SPACE if !in_quotes => { + ret_val.push(OsString::from(&cur[..])); + cur.truncate(0); + + // Skip whitespace. + while code_units_iter.next_if_eq(&Ok(SPACE)).is_some() {} + } + // Caret can escape quotes or carets + CARET if in_quotes => { + if let Some(x) = code_units_iter.next() { + cur.push(x.ok()?); + } + } + // If quote then flip `in_quotes` + QUOTE => in_quotes = !in_quotes, + // Everything else is always taken literally. + _ => cur.push(w), + } + } + // Push the final argument, if any. + if !cur.is_empty() || in_quotes { + ret_val.push(OsString::from(cur)); + } + Some(ret_val) +} diff --git a/library/std/src/sys/args/unix.rs b/library/std/src/sys/args/unix.rs new file mode 100644 index 0000000000000..0dfbd5f03eba9 --- /dev/null +++ b/library/std/src/sys/args/unix.rs @@ -0,0 +1,197 @@ +//! Global initialization and retrieval of command line arguments. +//! +//! On some platforms these are stored during runtime startup, +//! and on some they are retrieved from the system on demand. + +#![allow(dead_code)] // runtime init functions not used during testing + +pub use super::common::Args; +use crate::ffi::CStr; +#[cfg(target_os = "hermit")] +use crate::os::hermit::ffi::OsStringExt; +#[cfg(not(target_os = "hermit"))] +use crate::os::unix::ffi::OsStringExt; + +/// One-time global initialization. +pub unsafe fn init(argc: isize, argv: *const *const u8) { + unsafe { imp::init(argc, argv) } +} + +/// Returns the command line arguments +pub fn args() -> Args { + let (argc, argv) = imp::argc_argv(); + + let mut vec = Vec::with_capacity(argc as usize); + + for i in 0..argc { + // SAFETY: `argv` is non-null if `argc` is positive, and it is + // guaranteed to be at least as long as `argc`, so reading from it + // should be safe. + let ptr = unsafe { argv.offset(i).read() }; + + // Some C commandline parsers (e.g. GLib and Qt) are replacing already + // handled arguments in `argv` with `NULL` and move them to the end. + // + // Since they can't directly ensure updates to `argc` as well, this + // means that `argc` might be bigger than the actual number of + // non-`NULL` pointers in `argv` at this point. + // + // To handle this we simply stop iterating at the first `NULL` + // argument. `argv` is also guaranteed to be `NULL`-terminated so any + // non-`NULL` arguments after the first `NULL` can safely be ignored. + if ptr.is_null() { + // NOTE: On Apple platforms, `-[NSProcessInfo arguments]` does not + // stop iterating here, but instead `continue`, always iterating + // up until it reached `argc`. + // + // This difference will only matter in very specific circumstances + // where `argc`/`argv` have been modified, but in unexpected ways, + // so it likely doesn't really matter which option we choose. + // See the following PR for further discussion: + // + break; + } + + // SAFETY: Just checked that the pointer is not NULL, and arguments + // are otherwise guaranteed to be valid C strings. + let cstr = unsafe { CStr::from_ptr(ptr) }; + vec.push(OsStringExt::from_vec(cstr.to_bytes().to_vec())); + } + + Args::new(vec) +} + +#[cfg(any( + target_os = "linux", + target_os = "android", + target_os = "freebsd", + target_os = "dragonfly", + target_os = "netbsd", + target_os = "openbsd", + target_os = "cygwin", + target_os = "solaris", + target_os = "illumos", + target_os = "emscripten", + target_os = "haiku", + target_os = "hermit", + target_os = "l4re", + target_os = "fuchsia", + target_os = "redox", + target_os = "vxworks", + target_os = "horizon", + target_os = "aix", + target_os = "nto", + target_os = "hurd", + target_os = "rtems", + target_os = "nuttx", +))] +mod imp { + use crate::ffi::c_char; + use crate::ptr; + use crate::sync::atomic::{Atomic, AtomicIsize, AtomicPtr, Ordering}; + + // The system-provided argc and argv, which we store in static memory + // here so that we can defer the work of parsing them until its actually + // needed. + // + // Note that we never mutate argv/argc, the argv array, or the argv + // strings, which allows the code in this file to be very simple. + static ARGC: Atomic = AtomicIsize::new(0); + static ARGV: Atomic<*mut *const u8> = AtomicPtr::new(ptr::null_mut()); + + unsafe fn really_init(argc: isize, argv: *const *const u8) { + // These don't need to be ordered with each other or other stores, + // because they only hold the unmodified system-provided argv/argc. + ARGC.store(argc, Ordering::Relaxed); + ARGV.store(argv as *mut _, Ordering::Relaxed); + } + + #[inline(always)] + pub unsafe fn init(argc: isize, argv: *const *const u8) { + // on GNU/Linux if we are main then we will init argv and argc twice, it "duplicates work" + // BUT edge-cases are real: only using .init_array can break most emulators, dlopen, etc. + unsafe { really_init(argc, argv) }; + } + + /// glibc passes argc, argv, and envp to functions in .init_array, as a non-standard extension. + /// This allows `std::env::args` to work even in a `cdylib`, as it does on macOS and Windows. + #[cfg(all(target_os = "linux", target_env = "gnu"))] + #[used] + #[unsafe(link_section = ".init_array.00099")] + static ARGV_INIT_ARRAY: extern "C" fn( + crate::os::raw::c_int, + *const *const u8, + *const *const u8, + ) = { + extern "C" fn init_wrapper( + argc: crate::os::raw::c_int, + argv: *const *const u8, + _envp: *const *const u8, + ) { + unsafe { really_init(argc as isize, argv) }; + } + init_wrapper + }; + + pub fn argc_argv() -> (isize, *const *const c_char) { + // Load ARGC and ARGV, which hold the unmodified system-provided + // argc/argv, so we can read the pointed-to memory without atomics or + // synchronization. + // + // If either ARGC or ARGV is still zero or null, then either there + // really are no arguments, or someone is asking for `args()` before + // initialization has completed, and we return an empty list. + let argv = ARGV.load(Ordering::Relaxed); + let argc = if argv.is_null() { 0 } else { ARGC.load(Ordering::Relaxed) }; + + // Cast from `*mut *const u8` to `*const *const c_char` + (argc, argv.cast()) + } +} + +// Use `_NSGetArgc` and `_NSGetArgv` on Apple platforms. +// +// Even though these have underscores in their names, they've been available +// since the first versions of both macOS and iOS, and are declared in +// the header `crt_externs.h`. +// +// NOTE: This header was added to the iOS 13.0 SDK, which has been the source +// of a great deal of confusion in the past about the availability of these +// APIs. +// +// NOTE(madsmtm): This has not strictly been verified to not cause App Store +// rejections; if this is found to be the case, the previous implementation +// of this used `[[NSProcessInfo processInfo] arguments]`. +#[cfg(target_vendor = "apple")] +mod imp { + use crate::ffi::{c_char, c_int}; + + pub unsafe fn init(_argc: isize, _argv: *const *const u8) { + // No need to initialize anything in here, `libdyld.dylib` has already + // done the work for us. + } + + pub fn argc_argv() -> (isize, *const *const c_char) { + unsafe extern "C" { + // These functions are in crt_externs.h. + fn _NSGetArgc() -> *mut c_int; + fn _NSGetArgv() -> *mut *mut *mut c_char; + } + + // SAFETY: The returned pointer points to a static initialized early + // in the program lifetime by `libdyld.dylib`, and as such is always + // valid. + // + // NOTE: Similar to `_NSGetEnviron`, there technically isn't anything + // protecting us against concurrent modifications to this, and there + // doesn't exist a lock that we can take. Instead, it is generally + // expected that it's only modified in `main` / before other code + // runs, so reading this here should be fine. + let argc = unsafe { _NSGetArgc().read() }; + // SAFETY: Same as above. + let argv = unsafe { _NSGetArgv().read() }; + + // Cast from `*mut *mut c_char` to `*const *const c_char` + (argc as isize, argv.cast()) + } +} diff --git a/library/std/src/sys/args/unsupported.rs b/library/std/src/sys/args/unsupported.rs new file mode 100644 index 0000000000000..ecffc6d26414b --- /dev/null +++ b/library/std/src/sys/args/unsupported.rs @@ -0,0 +1,42 @@ +use crate::ffi::OsString; +use crate::fmt; + +pub struct Args {} + +pub fn args() -> Args { + Args {} +} + +impl fmt::Debug for Args { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().finish() + } +} + +impl Iterator for Args { + type Item = OsString; + + #[inline] + fn next(&mut self) -> Option { + None + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + (0, Some(0)) + } +} + +impl DoubleEndedIterator for Args { + #[inline] + fn next_back(&mut self) -> Option { + None + } +} + +impl ExactSizeIterator for Args { + #[inline] + fn len(&self) -> usize { + 0 + } +} diff --git a/library/std/src/sys/args/wasi.rs b/library/std/src/sys/args/wasi.rs new file mode 100644 index 0000000000000..72063a87dc9f5 --- /dev/null +++ b/library/std/src/sys/args/wasi.rs @@ -0,0 +1,26 @@ +#![forbid(unsafe_op_in_unsafe_fn)] + +pub use super::common::Args; +use crate::ffi::{CStr, OsStr, OsString}; +use crate::os::wasi::ffi::OsStrExt; + +/// Returns the command line arguments +pub fn args() -> Args { + Args::new(maybe_args().unwrap_or(Vec::new())) +} + +fn maybe_args() -> Option> { + unsafe { + let (argc, buf_size) = wasi::args_sizes_get().ok()?; + let mut argv = Vec::with_capacity(argc); + let mut buf = Vec::with_capacity(buf_size); + wasi::args_get(argv.as_mut_ptr(), buf.as_mut_ptr()).ok()?; + argv.set_len(argc); + let mut ret = Vec::with_capacity(argc); + for ptr in argv { + let s = CStr::from_ptr(ptr.cast()); + ret.push(OsStr::from_bytes(s.to_bytes()).to_owned()); + } + Some(ret) + } +} diff --git a/library/std/src/sys/args/windows.rs b/library/std/src/sys/args/windows.rs new file mode 100644 index 0000000000000..81c44fabdcc67 --- /dev/null +++ b/library/std/src/sys/args/windows.rs @@ -0,0 +1,411 @@ +//! The Windows command line is just a string +//! +//! +//! This module implements the parsing necessary to turn that string into a list of arguments. + +#[cfg(test)] +mod tests; + +pub use super::common::Args; +use crate::ffi::{OsStr, OsString}; +use crate::num::NonZero; +use crate::os::windows::prelude::*; +use crate::path::{Path, PathBuf}; +use crate::sys::pal::os::current_exe; +use crate::sys::pal::{ensure_no_nuls, fill_utf16_buf}; +use crate::sys::path::get_long_path; +use crate::sys::{c, to_u16s}; +use crate::sys_common::AsInner; +use crate::sys_common::wstr::WStrUnits; +use crate::{io, iter, ptr}; + +pub fn args() -> Args { + // SAFETY: `GetCommandLineW` returns a pointer to a null terminated UTF-16 + // string so it's safe for `WStrUnits` to use. + unsafe { + let lp_cmd_line = c::GetCommandLineW(); + let parsed_args_list = parse_lp_cmd_line(WStrUnits::new(lp_cmd_line), || { + current_exe().map(PathBuf::into_os_string).unwrap_or_else(|_| OsString::new()) + }); + + Args::new(parsed_args_list) + } +} + +/// Implements the Windows command-line argument parsing algorithm. +/// +/// Microsoft's documentation for the Windows CLI argument format can be found at +/// +/// +/// A more in-depth explanation is here: +/// +/// +/// Windows includes a function to do command line parsing in shell32.dll. +/// However, this is not used for two reasons: +/// +/// 1. Linking with that DLL causes the process to be registered as a GUI application. +/// GUI applications add a bunch of overhead, even if no windows are drawn. See +/// . +/// +/// 2. It does not follow the modern C/C++ argv rules outlined in the first two links above. +/// +/// This function was tested for equivalence to the C/C++ parsing rules using an +/// extensive test suite available at +/// . +fn parse_lp_cmd_line<'a, F: Fn() -> OsString>( + lp_cmd_line: Option>, + exe_name: F, +) -> Vec { + const BACKSLASH: NonZero = NonZero::new(b'\\' as u16).unwrap(); + const QUOTE: NonZero = NonZero::new(b'"' as u16).unwrap(); + const TAB: NonZero = NonZero::new(b'\t' as u16).unwrap(); + const SPACE: NonZero = NonZero::new(b' ' as u16).unwrap(); + + let mut ret_val = Vec::new(); + // If the cmd line pointer is null or it points to an empty string then + // return the name of the executable as argv[0]. + if lp_cmd_line.as_ref().and_then(|cmd| cmd.peek()).is_none() { + ret_val.push(exe_name()); + return ret_val; + } + let mut code_units = lp_cmd_line.unwrap(); + + // The executable name at the beginning is special. + let mut in_quotes = false; + let mut cur = Vec::new(); + for w in &mut code_units { + match w { + // A quote mark always toggles `in_quotes` no matter what because + // there are no escape characters when parsing the executable name. + QUOTE => in_quotes = !in_quotes, + // If not `in_quotes` then whitespace ends argv[0]. + SPACE | TAB if !in_quotes => break, + // In all other cases the code unit is taken literally. + _ => cur.push(w.get()), + } + } + // Skip whitespace. + code_units.advance_while(|w| w == SPACE || w == TAB); + ret_val.push(OsString::from_wide(&cur)); + + // Parse the arguments according to these rules: + // * All code units are taken literally except space, tab, quote and backslash. + // * When not `in_quotes`, space and tab separate arguments. Consecutive spaces and tabs are + // treated as a single separator. + // * A space or tab `in_quotes` is taken literally. + // * A quote toggles `in_quotes` mode unless it's escaped. An escaped quote is taken literally. + // * A quote can be escaped if preceded by an odd number of backslashes. + // * If any number of backslashes is immediately followed by a quote then the number of + // backslashes is halved (rounding down). + // * Backslashes not followed by a quote are all taken literally. + // * If `in_quotes` then a quote can also be escaped using another quote + // (i.e. two consecutive quotes become one literal quote). + let mut cur = Vec::new(); + let mut in_quotes = false; + while let Some(w) = code_units.next() { + match w { + // If not `in_quotes`, a space or tab ends the argument. + SPACE | TAB if !in_quotes => { + ret_val.push(OsString::from_wide(&cur[..])); + cur.truncate(0); + + // Skip whitespace. + code_units.advance_while(|w| w == SPACE || w == TAB); + } + // Backslashes can escape quotes or backslashes but only if consecutive backslashes are followed by a quote. + BACKSLASH => { + let backslash_count = code_units.advance_while(|w| w == BACKSLASH) + 1; + if code_units.peek() == Some(QUOTE) { + cur.extend(iter::repeat(BACKSLASH.get()).take(backslash_count / 2)); + // The quote is escaped if there are an odd number of backslashes. + if backslash_count % 2 == 1 { + code_units.next(); + cur.push(QUOTE.get()); + } + } else { + // If there is no quote on the end then there is no escaping. + cur.extend(iter::repeat(BACKSLASH.get()).take(backslash_count)); + } + } + // If `in_quotes` and not backslash escaped (see above) then a quote either + // unsets `in_quote` or is escaped by another quote. + QUOTE if in_quotes => match code_units.peek() { + // Two consecutive quotes when `in_quotes` produces one literal quote. + Some(QUOTE) => { + cur.push(QUOTE.get()); + code_units.next(); + } + // Otherwise set `in_quotes`. + Some(_) => in_quotes = false, + // The end of the command line. + // Push `cur` even if empty, which we do by breaking while `in_quotes` is still set. + None => break, + }, + // If not `in_quotes` and not BACKSLASH escaped (see above) then a quote sets `in_quote`. + QUOTE => in_quotes = true, + // Everything else is always taken literally. + _ => cur.push(w.get()), + } + } + // Push the final argument, if any. + if !cur.is_empty() || in_quotes { + ret_val.push(OsString::from_wide(&cur[..])); + } + ret_val +} + +#[derive(Debug)] +pub(crate) enum Arg { + /// Add quotes (if needed) + Regular(OsString), + /// Append raw string without quoting + Raw(OsString), +} + +enum Quote { + // Every arg is quoted + Always, + // Whitespace and empty args are quoted + Auto, + // Arg appended without any changes (#29494) + Never, +} + +pub(crate) fn append_arg(cmd: &mut Vec, arg: &Arg, force_quotes: bool) -> io::Result<()> { + let (arg, quote) = match arg { + Arg::Regular(arg) => (arg, if force_quotes { Quote::Always } else { Quote::Auto }), + Arg::Raw(arg) => (arg, Quote::Never), + }; + + // If an argument has 0 characters then we need to quote it to ensure + // that it actually gets passed through on the command line or otherwise + // it will be dropped entirely when parsed on the other end. + ensure_no_nuls(arg)?; + let arg_bytes = arg.as_encoded_bytes(); + let (quote, escape) = match quote { + Quote::Always => (true, true), + Quote::Auto => { + (arg_bytes.iter().any(|c| *c == b' ' || *c == b'\t') || arg_bytes.is_empty(), true) + } + Quote::Never => (false, false), + }; + if quote { + cmd.push('"' as u16); + } + + let mut backslashes: usize = 0; + for x in arg.encode_wide() { + if escape { + if x == '\\' as u16 { + backslashes += 1; + } else { + if x == '"' as u16 { + // Add n+1 backslashes to total 2n+1 before internal '"'. + cmd.extend((0..=backslashes).map(|_| '\\' as u16)); + } + backslashes = 0; + } + } + cmd.push(x); + } + + if quote { + // Add n backslashes to total 2n before ending '"'. + cmd.extend((0..backslashes).map(|_| '\\' as u16)); + cmd.push('"' as u16); + } + Ok(()) +} + +fn append_bat_arg(cmd: &mut Vec, arg: &OsStr, mut quote: bool) -> io::Result<()> { + ensure_no_nuls(arg)?; + // If an argument has 0 characters then we need to quote it to ensure + // that it actually gets passed through on the command line or otherwise + // it will be dropped entirely when parsed on the other end. + // + // We also need to quote the argument if it ends with `\` to guard against + // bat usage such as `"%~2"` (i.e. force quote arguments) otherwise a + // trailing slash will escape the closing quote. + if arg.is_empty() || arg.as_encoded_bytes().last() == Some(&b'\\') { + quote = true; + } + for cp in arg.as_inner().inner.code_points() { + if let Some(cp) = cp.to_char() { + // Rather than trying to find every ascii symbol that must be quoted, + // we assume that all ascii symbols must be quoted unless they're known to be good. + // We also quote Unicode control blocks for good measure. + // Note an unquoted `\` is fine so long as the argument isn't otherwise quoted. + static UNQUOTED: &str = r"#$*+-./:?@\_"; + let ascii_needs_quotes = + cp.is_ascii() && !(cp.is_ascii_alphanumeric() || UNQUOTED.contains(cp)); + if ascii_needs_quotes || cp.is_control() { + quote = true; + } + } + } + + if quote { + cmd.push('"' as u16); + } + // Loop through the string, escaping `\` only if followed by `"`. + // And escaping `"` by doubling them. + let mut backslashes: usize = 0; + for x in arg.encode_wide() { + if x == '\\' as u16 { + backslashes += 1; + } else { + if x == '"' as u16 { + // Add n backslashes to total 2n before internal `"`. + cmd.extend((0..backslashes).map(|_| '\\' as u16)); + // Appending an additional double-quote acts as an escape. + cmd.push(b'"' as u16) + } else if x == '%' as u16 || x == '\r' as u16 { + // yt-dlp hack: replaces `%` with `%%cd:~,%` to stop %VAR% being expanded as an environment variable. + // + // # Explanation + // + // cmd supports extracting a substring from a variable using the following syntax: + // %variable:~start_index,end_index% + // + // In the above command `cd` is used as the variable and the start_index and end_index are left blank. + // `cd` is a built-in variable that dynamically expands to the current directory so it's always available. + // Explicitly omitting both the start and end index creates a zero-length substring. + // + // Therefore it all resolves to nothing. However, by doing this no-op we distract cmd.exe + // from potentially expanding %variables% in the argument. + cmd.extend_from_slice(&[ + '%' as u16, '%' as u16, 'c' as u16, 'd' as u16, ':' as u16, '~' as u16, + ',' as u16, + ]); + } + backslashes = 0; + } + cmd.push(x); + } + if quote { + // Add n backslashes to total 2n before ending `"`. + cmd.extend((0..backslashes).map(|_| '\\' as u16)); + cmd.push('"' as u16); + } + Ok(()) +} + +pub(crate) fn make_bat_command_line( + script: &[u16], + args: &[Arg], + force_quotes: bool, +) -> io::Result> { + const INVALID_ARGUMENT_ERROR: io::Error = + io::const_error!(io::ErrorKind::InvalidInput, r#"batch file arguments are invalid"#); + // Set the start of the command line to `cmd.exe /c "` + // It is necessary to surround the command in an extra pair of quotes, + // hence the trailing quote here. It will be closed after all arguments + // have been added. + // Using /e:ON enables "command extensions" which is essential for the `%` hack to work. + let mut cmd: Vec = "cmd.exe /e:ON /v:OFF /d /c \"".encode_utf16().collect(); + + // Push the script name surrounded by its quote pair. + cmd.push(b'"' as u16); + // Windows file names cannot contain a `"` character or end with `\\`. + // If the script name does then return an error. + if script.contains(&(b'"' as u16)) || script.last() == Some(&(b'\\' as u16)) { + return Err(io::const_error!( + io::ErrorKind::InvalidInput, + "Windows file names may not contain `\"` or end with `\\`" + )); + } + cmd.extend_from_slice(script.strip_suffix(&[0]).unwrap_or(script)); + cmd.push(b'"' as u16); + + // Append the arguments. + // FIXME: This needs tests to ensure that the arguments are properly + // reconstructed by the batch script by default. + for arg in args { + cmd.push(' ' as u16); + match arg { + Arg::Regular(arg_os) => { + let arg_bytes = arg_os.as_encoded_bytes(); + // Disallow \r and \n as they may truncate the arguments. + const DISALLOWED: &[u8] = b"\r\n"; + if arg_bytes.iter().any(|c| DISALLOWED.contains(c)) { + return Err(INVALID_ARGUMENT_ERROR); + } + append_bat_arg(&mut cmd, arg_os, force_quotes)?; + } + _ => { + // Raw arguments are passed on as-is. + // It's the user's responsibility to properly handle arguments in this case. + append_arg(&mut cmd, arg, force_quotes)?; + } + }; + } + + // Close the quote we left opened earlier. + cmd.push(b'"' as u16); + + Ok(cmd) +} + +/// Takes a path and tries to return a non-verbatim path. +/// +/// This is necessary because cmd.exe does not support verbatim paths. +pub(crate) fn to_user_path(path: &Path) -> io::Result> { + from_wide_to_user_path(to_u16s(path)?) +} +pub(crate) fn from_wide_to_user_path(mut path: Vec) -> io::Result> { + // UTF-16 encoded code points, used in parsing and building UTF-16 paths. + // All of these are in the ASCII range so they can be cast directly to `u16`. + const SEP: u16 = b'\\' as _; + const QUERY: u16 = b'?' as _; + const COLON: u16 = b':' as _; + const U: u16 = b'U' as _; + const N: u16 = b'N' as _; + const C: u16 = b'C' as _; + + // Early return if the path is too long to remove the verbatim prefix. + const LEGACY_MAX_PATH: usize = 260; + if path.len() > LEGACY_MAX_PATH { + return Ok(path); + } + + match &path[..] { + // `\\?\C:\...` => `C:\...` + [SEP, SEP, QUERY, SEP, _, COLON, SEP, ..] => unsafe { + let lpfilename = path[4..].as_ptr(); + fill_utf16_buf( + |buffer, size| c::GetFullPathNameW(lpfilename, size, buffer, ptr::null_mut()), + |full_path: &[u16]| { + if full_path == &path[4..path.len() - 1] { + let mut path: Vec = full_path.into(); + path.push(0); + path + } else { + path + } + }, + ) + }, + // `\\?\UNC\...` => `\\...` + [SEP, SEP, QUERY, SEP, U, N, C, SEP, ..] => unsafe { + // Change the `C` in `UNC\` to `\` so we can get a slice that starts with `\\`. + path[6] = b'\\' as u16; + let lpfilename = path[6..].as_ptr(); + fill_utf16_buf( + |buffer, size| c::GetFullPathNameW(lpfilename, size, buffer, ptr::null_mut()), + |full_path: &[u16]| { + if full_path == &path[6..path.len() - 1] { + let mut path: Vec = full_path.into(); + path.push(0); + path + } else { + // Restore the 'C' in "UNC". + path[6] = b'C' as u16; + path + } + }, + ) + }, + // For everything else, leave the path unchanged. + _ => get_long_path(path, false), + } +} diff --git a/library/std/src/sys/pal/windows/args/tests.rs b/library/std/src/sys/args/windows/tests.rs similarity index 100% rename from library/std/src/sys/pal/windows/args/tests.rs rename to library/std/src/sys/args/windows/tests.rs diff --git a/library/std/src/sys/args/xous.rs b/library/std/src/sys/args/xous.rs new file mode 100644 index 0000000000000..2010bad14d1fb --- /dev/null +++ b/library/std/src/sys/args/xous.rs @@ -0,0 +1,20 @@ +pub use super::common::Args; +use crate::sys::pal::os::get_application_parameters; +use crate::sys::pal::os::params::ArgumentList; + +pub fn args() -> Args { + let Some(params) = get_application_parameters() else { + return Args::new(vec![]); + }; + + for param in params { + if let Ok(args) = ArgumentList::try_from(¶m) { + let mut parsed_args = vec![]; + for arg in args { + parsed_args.push(arg.into()); + } + return Args::new(parsed_args); + } + } + Args::new(vec![]) +} diff --git a/library/std/src/sys/args/zkvm.rs b/library/std/src/sys/args/zkvm.rs new file mode 100644 index 0000000000000..194ba7159d459 --- /dev/null +++ b/library/std/src/sys/args/zkvm.rs @@ -0,0 +1,81 @@ +use crate::ffi::OsString; +use crate::fmt; +use crate::sys::os_str; +use crate::sys::pal::{WORD_SIZE, abi}; +use crate::sys_common::FromInner; + +pub struct Args { + i_forward: usize, + i_back: usize, + count: usize, +} + +pub fn args() -> Args { + let count = unsafe { abi::sys_argc() }; + Args { i_forward: 0, i_back: 0, count } +} + +impl Args { + /// Use sys_argv to get the arg at the requested index. Does not check that i is less than argc + /// and will not return if the index is out of bounds. + fn argv(i: usize) -> OsString { + let arg_len = unsafe { abi::sys_argv(crate::ptr::null_mut(), 0, i) }; + + let arg_len_words = (arg_len + WORD_SIZE - 1) / WORD_SIZE; + let words = unsafe { abi::sys_alloc_words(arg_len_words) }; + + let arg_len2 = unsafe { abi::sys_argv(words, arg_len_words, i) }; + debug_assert_eq!(arg_len, arg_len2); + + // Convert to OsString. + // + // FIXME: We can probably get rid of the extra copy here if we + // reimplement "os_str" instead of just using the generic unix + // "os_str". + let arg_bytes: &[u8] = + unsafe { crate::slice::from_raw_parts(words.cast() as *const u8, arg_len) }; + OsString::from_inner(os_str::Buf { inner: arg_bytes.to_vec() }) + } +} + +impl fmt::Debug for Args { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().finish() + } +} + +impl Iterator for Args { + type Item = OsString; + + fn next(&mut self) -> Option { + if self.i_forward >= self.count - self.i_back { + None + } else { + let arg = Self::argv(self.i_forward); + self.i_forward += 1; + Some(arg) + } + } + + fn size_hint(&self) -> (usize, Option) { + (self.count, Some(self.count)) + } +} + +impl ExactSizeIterator for Args { + fn len(&self) -> usize { + self.count + } +} + +impl DoubleEndedIterator for Args { + fn next_back(&mut self) -> Option { + if self.i_back >= self.count - self.i_forward { + None + } else { + let arg = Self::argv(self.count - 1 - self.i_back); + self.i_back += 1; + Some(arg) + } + } +} diff --git a/library/std/src/sys/cmath.rs b/library/std/src/sys/cmath.rs index c9969b4e376ea..668fd92853400 100644 --- a/library/std/src/sys/cmath.rs +++ b/library/std/src/sys/cmath.rs @@ -3,70 +3,70 @@ // These symbols are all defined by `libm`, // or by `compiler-builtins` on unsupported platforms. unsafe extern "C" { - pub fn acos(n: f64) -> f64; - pub fn asin(n: f64) -> f64; - pub fn atan(n: f64) -> f64; - pub fn atan2(a: f64, b: f64) -> f64; - pub fn cbrt(n: f64) -> f64; - pub fn cbrtf(n: f32) -> f32; - pub fn cosh(n: f64) -> f64; - pub fn expm1(n: f64) -> f64; - pub fn expm1f(n: f32) -> f32; - pub fn fdim(a: f64, b: f64) -> f64; - pub fn fdimf(a: f32, b: f32) -> f32; + pub safe fn acos(n: f64) -> f64; + pub safe fn asin(n: f64) -> f64; + pub safe fn atan(n: f64) -> f64; + pub safe fn atan2(a: f64, b: f64) -> f64; + pub safe fn cbrt(n: f64) -> f64; + pub safe fn cbrtf(n: f32) -> f32; + pub safe fn cosh(n: f64) -> f64; + pub safe fn expm1(n: f64) -> f64; + pub safe fn expm1f(n: f32) -> f32; + pub safe fn fdim(a: f64, b: f64) -> f64; + pub safe fn fdimf(a: f32, b: f32) -> f32; #[cfg_attr(target_env = "msvc", link_name = "_hypot")] - pub fn hypot(x: f64, y: f64) -> f64; + pub safe fn hypot(x: f64, y: f64) -> f64; #[cfg_attr(target_env = "msvc", link_name = "_hypotf")] - pub fn hypotf(x: f32, y: f32) -> f32; - pub fn log1p(n: f64) -> f64; - pub fn log1pf(n: f32) -> f32; - pub fn sinh(n: f64) -> f64; - pub fn tan(n: f64) -> f64; - pub fn tanh(n: f64) -> f64; - pub fn tgamma(n: f64) -> f64; - pub fn tgammaf(n: f32) -> f32; - pub fn lgamma_r(n: f64, s: &mut i32) -> f64; + pub safe fn hypotf(x: f32, y: f32) -> f32; + pub safe fn log1p(n: f64) -> f64; + pub safe fn log1pf(n: f32) -> f32; + pub safe fn sinh(n: f64) -> f64; + pub safe fn tan(n: f64) -> f64; + pub safe fn tanh(n: f64) -> f64; + pub safe fn tgamma(n: f64) -> f64; + pub safe fn tgammaf(n: f32) -> f32; + pub safe fn lgamma_r(n: f64, s: &mut i32) -> f64; #[cfg(not(target_os = "aix"))] - pub fn lgammaf_r(n: f32, s: &mut i32) -> f32; - pub fn erf(n: f64) -> f64; - pub fn erff(n: f32) -> f32; - pub fn erfc(n: f64) -> f64; - pub fn erfcf(n: f32) -> f32; + pub safe fn lgammaf_r(n: f32, s: &mut i32) -> f32; + pub safe fn erf(n: f64) -> f64; + pub safe fn erff(n: f32) -> f32; + pub safe fn erfc(n: f64) -> f64; + pub safe fn erfcf(n: f32) -> f32; - pub fn acosf128(n: f128) -> f128; - pub fn asinf128(n: f128) -> f128; - pub fn atanf128(n: f128) -> f128; - pub fn atan2f128(a: f128, b: f128) -> f128; - pub fn cbrtf128(n: f128) -> f128; - pub fn coshf128(n: f128) -> f128; - pub fn expm1f128(n: f128) -> f128; - pub fn hypotf128(x: f128, y: f128) -> f128; - pub fn log1pf128(n: f128) -> f128; - pub fn sinhf128(n: f128) -> f128; - pub fn tanf128(n: f128) -> f128; - pub fn tanhf128(n: f128) -> f128; - pub fn tgammaf128(n: f128) -> f128; - pub fn lgammaf128_r(n: f128, s: &mut i32) -> f128; - pub fn erff128(n: f128) -> f128; - pub fn erfcf128(n: f128) -> f128; + pub safe fn acosf128(n: f128) -> f128; + pub safe fn asinf128(n: f128) -> f128; + pub safe fn atanf128(n: f128) -> f128; + pub safe fn atan2f128(a: f128, b: f128) -> f128; + pub safe fn cbrtf128(n: f128) -> f128; + pub safe fn coshf128(n: f128) -> f128; + pub safe fn expm1f128(n: f128) -> f128; + pub safe fn hypotf128(x: f128, y: f128) -> f128; + pub safe fn log1pf128(n: f128) -> f128; + pub safe fn sinhf128(n: f128) -> f128; + pub safe fn tanf128(n: f128) -> f128; + pub safe fn tanhf128(n: f128) -> f128; + pub safe fn tgammaf128(n: f128) -> f128; + pub safe fn lgammaf128_r(n: f128, s: &mut i32) -> f128; + pub safe fn erff128(n: f128) -> f128; + pub safe fn erfcf128(n: f128) -> f128; cfg_if::cfg_if! { if #[cfg(not(all(target_os = "windows", target_env = "msvc", target_arch = "x86")))] { - pub fn acosf(n: f32) -> f32; - pub fn asinf(n: f32) -> f32; - pub fn atan2f(a: f32, b: f32) -> f32; - pub fn atanf(n: f32) -> f32; - pub fn coshf(n: f32) -> f32; - pub fn sinhf(n: f32) -> f32; - pub fn tanf(n: f32) -> f32; - pub fn tanhf(n: f32) -> f32; + pub safe fn acosf(n: f32) -> f32; + pub safe fn asinf(n: f32) -> f32; + pub safe fn atan2f(a: f32, b: f32) -> f32; + pub safe fn atanf(n: f32) -> f32; + pub safe fn coshf(n: f32) -> f32; + pub safe fn sinhf(n: f32) -> f32; + pub safe fn tanf(n: f32) -> f32; + pub safe fn tanhf(n: f32) -> f32; }} } // On AIX, we don't have lgammaf_r only the f64 version, so we can // use the f64 version lgamma_r #[cfg(target_os = "aix")] -pub unsafe fn lgammaf_r(n: f32, s: &mut i32) -> f32 { +pub fn lgammaf_r(n: f32, s: &mut i32) -> f32 { lgamma_r(n.into(), s) as f32 } @@ -76,42 +76,42 @@ pub unsafe fn lgammaf_r(n: f32, s: &mut i32) -> f32 { cfg_if::cfg_if! { if #[cfg(all(target_os = "windows", target_env = "msvc", target_arch = "x86"))] { #[inline] - pub unsafe fn acosf(n: f32) -> f32 { + pub fn acosf(n: f32) -> f32 { f64::acos(n as f64) as f32 } #[inline] - pub unsafe fn asinf(n: f32) -> f32 { + pub fn asinf(n: f32) -> f32 { f64::asin(n as f64) as f32 } #[inline] - pub unsafe fn atan2f(n: f32, b: f32) -> f32 { + pub fn atan2f(n: f32, b: f32) -> f32 { f64::atan2(n as f64, b as f64) as f32 } #[inline] - pub unsafe fn atanf(n: f32) -> f32 { + pub fn atanf(n: f32) -> f32 { f64::atan(n as f64) as f32 } #[inline] - pub unsafe fn coshf(n: f32) -> f32 { + pub fn coshf(n: f32) -> f32 { f64::cosh(n as f64) as f32 } #[inline] - pub unsafe fn sinhf(n: f32) -> f32 { + pub fn sinhf(n: f32) -> f32 { f64::sinh(n as f64) as f32 } #[inline] - pub unsafe fn tanf(n: f32) -> f32 { + pub fn tanf(n: f32) -> f32 { f64::tan(n as f64) as f32 } #[inline] - pub unsafe fn tanhf(n: f32) -> f32 { + pub fn tanhf(n: f32) -> f32 { f64::tanh(n as f64) as f32 } }} diff --git a/library/std/src/sys/env/common.rs b/library/std/src/sys/env/common.rs new file mode 100644 index 0000000000000..f161ff073f3d5 --- /dev/null +++ b/library/std/src/sys/env/common.rs @@ -0,0 +1,48 @@ +use crate::ffi::OsString; +use crate::{fmt, vec}; + +pub struct Env { + iter: vec::IntoIter<(OsString, OsString)>, +} + +// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. +pub struct EnvStrDebug<'a> { + slice: &'a [(OsString, OsString)], +} + +impl fmt::Debug for EnvStrDebug<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list() + .entries(self.slice.iter().map(|(a, b)| (a.to_str().unwrap(), b.to_str().unwrap()))) + .finish() + } +} + +impl Env { + pub(super) fn new(env: Vec<(OsString, OsString)>) -> Self { + Env { iter: env.into_iter() } + } + + pub fn str_debug(&self) -> impl fmt::Debug + '_ { + EnvStrDebug { slice: self.iter.as_slice() } + } +} + +impl fmt::Debug for Env { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.iter.as_slice()).finish() + } +} + +impl !Send for Env {} +impl !Sync for Env {} + +impl Iterator for Env { + type Item = (OsString, OsString); + fn next(&mut self) -> Option<(OsString, OsString)> { + self.iter.next() + } + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} diff --git a/library/std/src/sys/env/hermit.rs b/library/std/src/sys/env/hermit.rs new file mode 100644 index 0000000000000..445ecdeb6a39f --- /dev/null +++ b/library/std/src/sys/env/hermit.rs @@ -0,0 +1,72 @@ +use core::slice::memchr; + +pub use super::common::Env; +use crate::collections::HashMap; +use crate::ffi::{CStr, OsStr, OsString, c_char}; +use crate::io; +use crate::os::hermit::ffi::OsStringExt; +use crate::sync::Mutex; + +static ENV: Mutex>> = Mutex::new(None); + +pub fn init(env: *const *const c_char) { + let mut guard = ENV.lock().unwrap(); + let map = guard.insert(HashMap::new()); + + if env.is_null() { + return; + } + + unsafe { + let mut environ = env; + while !(*environ).is_null() { + if let Some((key, value)) = parse(CStr::from_ptr(*environ).to_bytes()) { + map.insert(key, value); + } + environ = environ.add(1); + } + } + + fn parse(input: &[u8]) -> Option<(OsString, OsString)> { + // Strategy (copied from glibc): Variable name and value are separated + // by an ASCII equals sign '='. Since a variable name must not be + // empty, allow variable names starting with an equals sign. Skip all + // malformed lines. + if input.is_empty() { + return None; + } + let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); + pos.map(|p| { + ( + OsStringExt::from_vec(input[..p].to_vec()), + OsStringExt::from_vec(input[p + 1..].to_vec()), + ) + }) + } +} + +/// Returns a vector of (variable, value) byte-vector pairs for all the +/// environment variables of the current process. +pub fn env() -> Env { + let guard = ENV.lock().unwrap(); + let env = guard.as_ref().unwrap(); + + let result = env.iter().map(|(key, value)| (key.clone(), value.clone())).collect(); + + Env::new(result) +} + +pub fn getenv(k: &OsStr) -> Option { + ENV.lock().unwrap().as_ref().unwrap().get(k).cloned() +} + +pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { + let (k, v) = (k.to_owned(), v.to_owned()); + ENV.lock().unwrap().as_mut().unwrap().insert(k, v); + Ok(()) +} + +pub unsafe fn unsetenv(k: &OsStr) -> io::Result<()> { + ENV.lock().unwrap().as_mut().unwrap().remove(k); + Ok(()) +} diff --git a/library/std/src/sys/env/mod.rs b/library/std/src/sys/env/mod.rs new file mode 100644 index 0000000000000..d81ff875c830f --- /dev/null +++ b/library/std/src/sys/env/mod.rs @@ -0,0 +1,48 @@ +//! Platform-dependent environment variables abstraction. + +#![forbid(unsafe_op_in_unsafe_fn)] + +#[cfg(any( + target_family = "unix", + target_os = "hermit", + all(target_vendor = "fortanix", target_env = "sgx"), + target_os = "solid_asp3", + target_os = "uefi", + target_os = "wasi", + target_os = "xous", +))] +mod common; + +cfg_if::cfg_if! { + if #[cfg(target_family = "unix")] { + mod unix; + pub use unix::*; + } else if #[cfg(target_family = "windows")] { + mod windows; + pub use windows::*; + } else if #[cfg(target_os = "hermit")] { + mod hermit; + pub use hermit::*; + } else if #[cfg(all(target_vendor = "fortanix", target_env = "sgx"))] { + mod sgx; + pub use sgx::*; + } else if #[cfg(target_os = "solid_asp3")] { + mod solid; + pub use solid::*; + } else if #[cfg(target_os = "uefi")] { + mod uefi; + pub use uefi::*; + } else if #[cfg(target_os = "wasi")] { + mod wasi; + pub use wasi::*; + } else if #[cfg(target_os = "xous")] { + mod xous; + pub use xous::*; + } else if #[cfg(target_os = "zkvm")] { + mod zkvm; + pub use zkvm::*; + } else { + mod unsupported; + pub use unsupported::*; + } +} diff --git a/library/std/src/sys/env/sgx.rs b/library/std/src/sys/env/sgx.rs new file mode 100644 index 0000000000000..09090ec7cf0dd --- /dev/null +++ b/library/std/src/sys/env/sgx.rs @@ -0,0 +1,55 @@ +#![allow(fuzzy_provenance_casts)] // FIXME: this module systematically confuses pointers and integers + +pub use super::common::Env; +use crate::collections::HashMap; +use crate::ffi::{OsStr, OsString}; +use crate::io; +use crate::sync::atomic::{Atomic, AtomicUsize, Ordering}; +use crate::sync::{Mutex, Once}; + +// Specifying linkage/symbol name is solely to ensure a single instance between this crate and its unit tests +#[cfg_attr(test, linkage = "available_externally")] +#[unsafe(export_name = "_ZN16__rust_internals3std3sys3pal3sgx2os3ENVE")] +static ENV: Atomic = AtomicUsize::new(0); +// Specifying linkage/symbol name is solely to ensure a single instance between this crate and its unit tests +#[cfg_attr(test, linkage = "available_externally")] +#[unsafe(export_name = "_ZN16__rust_internals3std3sys3pal3sgx2os8ENV_INITE")] +static ENV_INIT: Once = Once::new(); +type EnvStore = Mutex>; + +fn get_env_store() -> Option<&'static EnvStore> { + unsafe { (ENV.load(Ordering::Relaxed) as *const EnvStore).as_ref() } +} + +fn create_env_store() -> &'static EnvStore { + ENV_INIT.call_once(|| { + ENV.store(Box::into_raw(Box::new(EnvStore::default())) as _, Ordering::Relaxed) + }); + unsafe { &*(ENV.load(Ordering::Relaxed) as *const EnvStore) } +} + +pub fn env() -> Env { + let clone_to_vec = |map: &HashMap| -> Vec<_> { + map.iter().map(|(k, v)| (k.clone(), v.clone())).collect() + }; + + let env = get_env_store().map(|env| clone_to_vec(&env.lock().unwrap())).unwrap_or_default(); + Env::new(env) +} + +pub fn getenv(k: &OsStr) -> Option { + get_env_store().and_then(|s| s.lock().unwrap().get(k).cloned()) +} + +pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { + let (k, v) = (k.to_owned(), v.to_owned()); + create_env_store().lock().unwrap().insert(k, v); + Ok(()) +} + +pub unsafe fn unsetenv(k: &OsStr) -> io::Result<()> { + if let Some(env) = get_env_store() { + env.lock().unwrap().remove(k); + } + Ok(()) +} diff --git a/library/std/src/sys/env/solid.rs b/library/std/src/sys/env/solid.rs new file mode 100644 index 0000000000000..ea77fc3c11930 --- /dev/null +++ b/library/std/src/sys/env/solid.rs @@ -0,0 +1,96 @@ +use core::slice::memchr; + +pub use super::common::Env; +use crate::ffi::{CStr, OsStr, OsString}; +use crate::io; +use crate::os::raw::{c_char, c_int}; +use crate::os::solid::ffi::{OsStrExt, OsStringExt}; +use crate::sync::{PoisonError, RwLock}; +use crate::sys::common::small_c_string::run_with_cstr; + +static ENV_LOCK: RwLock<()> = RwLock::new(()); + +pub fn env_read_lock() -> impl Drop { + ENV_LOCK.read().unwrap_or_else(PoisonError::into_inner) +} + +/// Returns a vector of (variable, value) byte-vector pairs for all the +/// environment variables of the current process. +pub fn env() -> Env { + unsafe extern "C" { + static mut environ: *const *const c_char; + } + + unsafe { + let _guard = env_read_lock(); + let mut result = Vec::new(); + if !environ.is_null() { + while !(*environ).is_null() { + if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) { + result.push(key_value); + } + environ = environ.add(1); + } + } + return Env::new(result); + } + + fn parse(input: &[u8]) -> Option<(OsString, OsString)> { + // Strategy (copied from glibc): Variable name and value are separated + // by an ASCII equals sign '='. Since a variable name must not be + // empty, allow variable names starting with an equals sign. Skip all + // malformed lines. + if input.is_empty() { + return None; + } + let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); + pos.map(|p| { + ( + OsStringExt::from_vec(input[..p].to_vec()), + OsStringExt::from_vec(input[p + 1..].to_vec()), + ) + }) + } +} + +pub fn getenv(k: &OsStr) -> Option { + // environment variables with a nul byte can't be set, so their value is + // always None as well + run_with_cstr(k.as_bytes(), &|k| { + let _guard = env_read_lock(); + let v = unsafe { libc::getenv(k.as_ptr()) } as *const libc::c_char; + + if v.is_null() { + Ok(None) + } else { + // SAFETY: `v` cannot be mutated while executing this line since we've a read lock + let bytes = unsafe { CStr::from_ptr(v) }.to_bytes().to_vec(); + + Ok(Some(OsStringExt::from_vec(bytes))) + } + }) + .ok() + .flatten() +} + +pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { + run_with_cstr(k.as_bytes(), &|k| { + run_with_cstr(v.as_bytes(), &|v| { + let _guard = ENV_LOCK.write(); + cvt_env(unsafe { libc::setenv(k.as_ptr(), v.as_ptr(), 1) }).map(drop) + }) + }) +} + +pub unsafe fn unsetenv(n: &OsStr) -> io::Result<()> { + run_with_cstr(n.as_bytes(), &|nbuf| { + let _guard = ENV_LOCK.write(); + cvt_env(unsafe { libc::unsetenv(nbuf.as_ptr()) }).map(drop) + }) +} + +/// In kmclib, `setenv` and `unsetenv` don't always set `errno`, so this +/// function just returns a generic error. +fn cvt_env(t: c_int) -> io::Result { + if t == -1 { Err(io::const_error!(io::ErrorKind::Uncategorized, "failure")) } else { Ok(t) } +} diff --git a/library/std/src/sys/env/uefi.rs b/library/std/src/sys/env/uefi.rs new file mode 100644 index 0000000000000..1561df41cac3f --- /dev/null +++ b/library/std/src/sys/env/uefi.rs @@ -0,0 +1,102 @@ +pub use super::common::Env; +use crate::ffi::{OsStr, OsString}; +use crate::io; + +pub fn env() -> Env { + let env = uefi_env::get_all().expect("not supported on this platform"); + Env::new(env) +} + +pub fn getenv(key: &OsStr) -> Option { + uefi_env::get(key) +} + +pub unsafe fn setenv(key: &OsStr, val: &OsStr) -> io::Result<()> { + uefi_env::set(key, val) +} + +pub unsafe fn unsetenv(key: &OsStr) -> io::Result<()> { + uefi_env::unset(key) +} + +mod uefi_env { + use crate::ffi::{OsStr, OsString}; + use crate::io; + use crate::os::uefi::ffi::OsStringExt; + use crate::ptr::NonNull; + use crate::sys::{helpers, unsupported_err}; + + pub(crate) fn get(key: &OsStr) -> Option { + let shell = helpers::open_shell()?; + let mut key_ptr = helpers::os_string_to_raw(key)?; + unsafe { get_raw(shell, key_ptr.as_mut_ptr()) } + } + + pub(crate) fn set(key: &OsStr, val: &OsStr) -> io::Result<()> { + let mut key_ptr = helpers::os_string_to_raw(key) + .ok_or(io::const_error!(io::ErrorKind::InvalidInput, "invalid key"))?; + let mut val_ptr = helpers::os_string_to_raw(val) + .ok_or(io::const_error!(io::ErrorKind::InvalidInput, "invalid value"))?; + unsafe { set_raw(key_ptr.as_mut_ptr(), val_ptr.as_mut_ptr()) } + } + + pub(crate) fn unset(key: &OsStr) -> io::Result<()> { + let mut key_ptr = helpers::os_string_to_raw(key) + .ok_or(io::const_error!(io::ErrorKind::InvalidInput, "invalid key"))?; + unsafe { set_raw(key_ptr.as_mut_ptr(), crate::ptr::null_mut()) } + } + + pub(crate) fn get_all() -> io::Result> { + let shell = helpers::open_shell().ok_or(unsupported_err())?; + + let mut vars = Vec::new(); + let val = unsafe { ((*shell.as_ptr()).get_env)(crate::ptr::null_mut()) }; + + if val.is_null() { + return Ok(vars); + } + + let mut start = 0; + + // UEFI Shell returns all keys separated by NULL. + // End of string is denoted by two NULLs + for i in 0.. { + if unsafe { *val.add(i) } == 0 { + // Two NULL signal end of string + if i == start { + break; + } + + let key = OsString::from_wide(unsafe { + crate::slice::from_raw_parts(val.add(start), i - start) + }); + // SAFETY: val.add(start) is always NULL terminated + let val = unsafe { get_raw(shell, val.add(start)) } + .ok_or(io::const_error!(io::ErrorKind::InvalidInput, "invalid value"))?; + + vars.push((key, val)); + start = i + 1; + } + } + + Ok(vars) + } + + unsafe fn get_raw( + shell: NonNull, + key_ptr: *mut r_efi::efi::Char16, + ) -> Option { + let val = unsafe { ((*shell.as_ptr()).get_env)(key_ptr) }; + helpers::os_string_from_raw(val) + } + + unsafe fn set_raw( + key_ptr: *mut r_efi::efi::Char16, + val_ptr: *mut r_efi::efi::Char16, + ) -> io::Result<()> { + let shell = helpers::open_shell().ok_or(unsupported_err())?; + let r = + unsafe { ((*shell.as_ptr()).set_env)(key_ptr, val_ptr, r_efi::efi::Boolean::FALSE) }; + if r.is_error() { Err(io::Error::from_raw_os_error(r.as_usize())) } else { Ok(()) } + } +} diff --git a/library/std/src/sys/env/unix.rs b/library/std/src/sys/env/unix.rs new file mode 100644 index 0000000000000..78c7af65f9e38 --- /dev/null +++ b/library/std/src/sys/env/unix.rs @@ -0,0 +1,126 @@ +use core::slice::memchr; + +use libc::c_char; + +pub use super::common::Env; +use crate::ffi::{CStr, OsStr, OsString}; +use crate::io; +use crate::os::unix::prelude::*; +use crate::sync::{PoisonError, RwLock}; +use crate::sys::common::small_c_string::run_with_cstr; +use crate::sys::cvt; + +// Use `_NSGetEnviron` on Apple platforms. +// +// `_NSGetEnviron` is the documented alternative (see `man environ`), and has +// been available since the first versions of both macOS and iOS. +// +// Nowadays, specifically since macOS 10.8, `environ` has been exposed through +// `libdyld.dylib`, which is linked via. `libSystem.dylib`: +// +// +// So in the end, it likely doesn't really matter which option we use, but the +// performance cost of using `_NSGetEnviron` is extremely miniscule, and it +// might be ever so slightly more supported, so let's just use that. +// +// NOTE: The header where this is defined (`crt_externs.h`) was added to the +// iOS 13.0 SDK, which has been the source of a great deal of confusion in the +// past about the availability of this API. +// +// NOTE(madsmtm): Neither this nor using `environ` has been verified to not +// cause App Store rejections; if this is found to be the case, an alternative +// implementation of this is possible using `[NSProcessInfo environment]` +// - which internally uses `_NSGetEnviron` and a system-wide lock on the +// environment variables to protect against `setenv`, so using that might be +// desirable anyhow? Though it also means that we have to link to Foundation. +#[cfg(target_vendor = "apple")] +pub unsafe fn environ() -> *mut *const *const c_char { + unsafe { libc::_NSGetEnviron() as *mut *const *const c_char } +} + +// Use the `environ` static which is part of POSIX. +#[cfg(not(target_vendor = "apple"))] +pub unsafe fn environ() -> *mut *const *const c_char { + unsafe extern "C" { + static mut environ: *const *const c_char; + } + &raw mut environ +} + +static ENV_LOCK: RwLock<()> = RwLock::new(()); + +pub fn env_read_lock() -> impl Drop { + ENV_LOCK.read().unwrap_or_else(PoisonError::into_inner) +} + +/// Returns a vector of (variable, value) byte-vector pairs for all the +/// environment variables of the current process. +pub fn env() -> Env { + unsafe { + let _guard = env_read_lock(); + let mut environ = *environ(); + let mut result = Vec::new(); + if !environ.is_null() { + while !(*environ).is_null() { + if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) { + result.push(key_value); + } + environ = environ.add(1); + } + } + return Env::new(result); + } + + fn parse(input: &[u8]) -> Option<(OsString, OsString)> { + // Strategy (copied from glibc): Variable name and value are separated + // by an ASCII equals sign '='. Since a variable name must not be + // empty, allow variable names starting with an equals sign. Skip all + // malformed lines. + if input.is_empty() { + return None; + } + let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); + pos.map(|p| { + ( + OsStringExt::from_vec(input[..p].to_vec()), + OsStringExt::from_vec(input[p + 1..].to_vec()), + ) + }) + } +} + +pub fn getenv(k: &OsStr) -> Option { + // environment variables with a nul byte can't be set, so their value is + // always None as well + run_with_cstr(k.as_bytes(), &|k| { + let _guard = env_read_lock(); + let v = unsafe { libc::getenv(k.as_ptr()) } as *const libc::c_char; + + if v.is_null() { + Ok(None) + } else { + // SAFETY: `v` cannot be mutated while executing this line since we've a read lock + let bytes = unsafe { CStr::from_ptr(v) }.to_bytes().to_vec(); + + Ok(Some(OsStringExt::from_vec(bytes))) + } + }) + .ok() + .flatten() +} + +pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { + run_with_cstr(k.as_bytes(), &|k| { + run_with_cstr(v.as_bytes(), &|v| { + let _guard = ENV_LOCK.write(); + cvt(unsafe { libc::setenv(k.as_ptr(), v.as_ptr(), 1) }).map(drop) + }) + }) +} + +pub unsafe fn unsetenv(n: &OsStr) -> io::Result<()> { + run_with_cstr(n.as_bytes(), &|nbuf| { + let _guard = ENV_LOCK.write(); + cvt(unsafe { libc::unsetenv(nbuf.as_ptr()) }).map(drop) + }) +} diff --git a/library/std/src/sys/env/unsupported.rs b/library/std/src/sys/env/unsupported.rs new file mode 100644 index 0000000000000..98905e6482747 --- /dev/null +++ b/library/std/src/sys/env/unsupported.rs @@ -0,0 +1,40 @@ +use crate::ffi::{OsStr, OsString}; +use crate::{fmt, io}; + +pub struct Env(!); + +impl Env { + // FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. + pub fn str_debug(&self) -> impl fmt::Debug + '_ { + self.0 + } +} + +impl fmt::Debug for Env { + fn fmt(&self, _: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0 + } +} + +impl Iterator for Env { + type Item = (OsString, OsString); + fn next(&mut self) -> Option<(OsString, OsString)> { + self.0 + } +} + +pub fn env() -> Env { + panic!("not supported on this platform") +} + +pub fn getenv(_: &OsStr) -> Option { + None +} + +pub unsafe fn setenv(_: &OsStr, _: &OsStr) -> io::Result<()> { + Err(io::const_error!(io::ErrorKind::Unsupported, "cannot set env vars on this platform")) +} + +pub unsafe fn unsetenv(_: &OsStr) -> io::Result<()> { + Err(io::const_error!(io::ErrorKind::Unsupported, "cannot unset env vars on this platform")) +} diff --git a/library/std/src/sys/env/wasi.rs b/library/std/src/sys/env/wasi.rs new file mode 100644 index 0000000000000..3719f9db51eb3 --- /dev/null +++ b/library/std/src/sys/env/wasi.rs @@ -0,0 +1,102 @@ +use core::slice::memchr; + +pub use super::common::Env; +use crate::ffi::{CStr, OsStr, OsString}; +use crate::io; +use crate::os::wasi::prelude::*; +use crate::sys::common::small_c_string::run_with_cstr; +use crate::sys::pal::os::{cvt, libc}; + +cfg_if::cfg_if! { + if #[cfg(target_feature = "atomics")] { + // Access to the environment must be protected by a lock in multi-threaded scenarios. + use crate::sync::{PoisonError, RwLock}; + static ENV_LOCK: RwLock<()> = RwLock::new(()); + pub fn env_read_lock() -> impl Drop { + ENV_LOCK.read().unwrap_or_else(PoisonError::into_inner) + } + pub fn env_write_lock() -> impl Drop { + ENV_LOCK.write().unwrap_or_else(PoisonError::into_inner) + } + } else { + // No need for a lock if we are single-threaded. + pub fn env_read_lock() -> impl Drop { + Box::new(()) + } + pub fn env_write_lock() -> impl Drop { + Box::new(()) + } + } +} + +pub fn env() -> Env { + unsafe { + let _guard = env_read_lock(); + + // Use `__wasilibc_get_environ` instead of `environ` here so that we + // don't require wasi-libc to eagerly initialize the environment + // variables. + let mut environ = libc::__wasilibc_get_environ(); + + let mut result = Vec::new(); + if !environ.is_null() { + while !(*environ).is_null() { + if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) { + result.push(key_value); + } + environ = environ.add(1); + } + } + return Env::new(result); + } + + // See src/libstd/sys/pal/unix/os.rs, same as that + fn parse(input: &[u8]) -> Option<(OsString, OsString)> { + if input.is_empty() { + return None; + } + let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); + pos.map(|p| { + ( + OsStringExt::from_vec(input[..p].to_vec()), + OsStringExt::from_vec(input[p + 1..].to_vec()), + ) + }) + } +} + +pub fn getenv(k: &OsStr) -> Option { + // environment variables with a nul byte can't be set, so their value is + // always None as well + run_with_cstr(k.as_bytes(), &|k| { + let _guard = env_read_lock(); + let v = unsafe { libc::getenv(k.as_ptr()) } as *const libc::c_char; + + if v.is_null() { + Ok(None) + } else { + // SAFETY: `v` cannot be mutated while executing this line since we've a read lock + let bytes = unsafe { CStr::from_ptr(v) }.to_bytes().to_vec(); + + Ok(Some(OsStringExt::from_vec(bytes))) + } + }) + .ok() + .flatten() +} + +pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { + run_with_cstr(k.as_bytes(), &|k| { + run_with_cstr(v.as_bytes(), &|v| unsafe { + let _guard = env_write_lock(); + cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(drop) + }) + }) +} + +pub unsafe fn unsetenv(n: &OsStr) -> io::Result<()> { + run_with_cstr(n.as_bytes(), &|nbuf| unsafe { + let _guard = env_write_lock(); + cvt(libc::unsetenv(nbuf.as_ptr())).map(drop) + }) +} diff --git a/library/std/src/sys/env/windows.rs b/library/std/src/sys/env/windows.rs new file mode 100644 index 0000000000000..3c4d4a84cfd6b --- /dev/null +++ b/library/std/src/sys/env/windows.rs @@ -0,0 +1,133 @@ +use crate::ffi::{OsStr, OsString}; +use crate::os::windows::prelude::*; +use crate::sys::pal::{c, cvt, fill_utf16_buf, to_u16s}; +use crate::{fmt, io, ptr, slice}; + +pub struct Env { + base: *mut c::WCHAR, + iter: EnvIterator, +} + +// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. +pub struct EnvStrDebug<'a> { + iter: &'a EnvIterator, +} + +impl fmt::Debug for EnvStrDebug<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { iter } = self; + let iter: EnvIterator = (*iter).clone(); + let mut list = f.debug_list(); + for (a, b) in iter { + list.entry(&(a.to_str().unwrap(), b.to_str().unwrap())); + } + list.finish() + } +} + +impl Env { + pub fn str_debug(&self) -> impl fmt::Debug + '_ { + let Self { base: _, iter } = self; + EnvStrDebug { iter } + } +} + +impl fmt::Debug for Env { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { base: _, iter } = self; + f.debug_list().entries(iter.clone()).finish() + } +} + +impl Iterator for Env { + type Item = (OsString, OsString); + + fn next(&mut self) -> Option<(OsString, OsString)> { + let Self { base: _, iter } = self; + iter.next() + } +} + +#[derive(Clone)] +struct EnvIterator(*mut c::WCHAR); + +impl Iterator for EnvIterator { + type Item = (OsString, OsString); + + fn next(&mut self) -> Option<(OsString, OsString)> { + let Self(cur) = self; + loop { + unsafe { + if **cur == 0 { + return None; + } + let p = *cur as *const u16; + let mut len = 0; + while *p.add(len) != 0 { + len += 1; + } + let s = slice::from_raw_parts(p, len); + *cur = cur.add(len + 1); + + // Windows allows environment variables to start with an equals + // symbol (in any other position, this is the separator between + // variable name and value). Since`s` has at least length 1 at + // this point (because the empty string terminates the array of + // environment variables), we can safely slice. + let pos = match s[1..].iter().position(|&u| u == b'=' as u16).map(|p| p + 1) { + Some(p) => p, + None => continue, + }; + return Some(( + OsStringExt::from_wide(&s[..pos]), + OsStringExt::from_wide(&s[pos + 1..]), + )); + } + } + } +} + +impl Drop for Env { + fn drop(&mut self) { + unsafe { + c::FreeEnvironmentStringsW(self.base); + } + } +} + +pub fn env() -> Env { + unsafe { + let ch = c::GetEnvironmentStringsW(); + if ch.is_null() { + panic!("failure getting env string from OS: {}", io::Error::last_os_error()); + } + Env { base: ch, iter: EnvIterator(ch) } + } +} + +pub fn getenv(k: &OsStr) -> Option { + let k = to_u16s(k).ok()?; + fill_utf16_buf( + |buf, sz| unsafe { c::GetEnvironmentVariableW(k.as_ptr(), buf, sz) }, + OsStringExt::from_wide, + ) + .ok() +} + +pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { + // SAFETY: We ensure that k and v are null-terminated wide strings. + unsafe { + let k = to_u16s(k)?; + let v = to_u16s(v)?; + + cvt(c::SetEnvironmentVariableW(k.as_ptr(), v.as_ptr())).map(drop) + } +} + +pub unsafe fn unsetenv(n: &OsStr) -> io::Result<()> { + // SAFETY: We ensure that v is a null-terminated wide strings. + unsafe { + let v = to_u16s(n)?; + cvt(c::SetEnvironmentVariableW(v.as_ptr(), ptr::null())).map(drop) + } +} diff --git a/library/std/src/sys/env/xous.rs b/library/std/src/sys/env/xous.rs new file mode 100644 index 0000000000000..8f65f30d35fcc --- /dev/null +++ b/library/std/src/sys/env/xous.rs @@ -0,0 +1,54 @@ +pub use super::common::Env; +use crate::collections::HashMap; +use crate::ffi::{OsStr, OsString}; +use crate::io; +use crate::sync::atomic::{Atomic, AtomicUsize, Ordering}; +use crate::sync::{Mutex, Once}; +use crate::sys::pal::os::{get_application_parameters, params}; + +static ENV: Atomic = AtomicUsize::new(0); +static ENV_INIT: Once = Once::new(); +type EnvStore = Mutex>; + +fn get_env_store() -> &'static EnvStore { + ENV_INIT.call_once(|| { + let env_store = EnvStore::default(); + if let Some(params) = get_application_parameters() { + for param in params { + if let Ok(envs) = params::EnvironmentBlock::try_from(¶m) { + let mut env_store = env_store.lock().unwrap(); + for env in envs { + env_store.insert(env.key.into(), env.value.into()); + } + break; + } + } + } + ENV.store(Box::into_raw(Box::new(env_store)) as _, Ordering::Relaxed) + }); + unsafe { &*core::ptr::with_exposed_provenance::(ENV.load(Ordering::Relaxed)) } +} + +pub fn env() -> Env { + let clone_to_vec = |map: &HashMap| -> Vec<_> { + map.iter().map(|(k, v)| (k.clone(), v.clone())).collect() + }; + + let env = clone_to_vec(&*get_env_store().lock().unwrap()); + Env::new(env) +} + +pub fn getenv(k: &OsStr) -> Option { + get_env_store().lock().unwrap().get(k).cloned() +} + +pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { + let (k, v) = (k.to_owned(), v.to_owned()); + get_env_store().lock().unwrap().insert(k, v); + Ok(()) +} + +pub unsafe fn unsetenv(k: &OsStr) -> io::Result<()> { + get_env_store().lock().unwrap().remove(k); + Ok(()) +} diff --git a/library/std/src/sys/env/zkvm.rs b/library/std/src/sys/env/zkvm.rs new file mode 100644 index 0000000000000..2eb7005ba1289 --- /dev/null +++ b/library/std/src/sys/env/zkvm.rs @@ -0,0 +1,32 @@ +#[expect(dead_code)] +#[path = "unsupported.rs"] +mod unsupported_env; +pub use unsupported_env::{Env, env, setenv, unsetenv}; + +use crate::ffi::{OsStr, OsString}; +use crate::sys::os_str; +use crate::sys::pal::{WORD_SIZE, abi}; +use crate::sys_common::FromInner; + +pub fn getenv(varname: &OsStr) -> Option { + let varname = varname.as_encoded_bytes(); + let nbytes = + unsafe { abi::sys_getenv(crate::ptr::null_mut(), 0, varname.as_ptr(), varname.len()) }; + if nbytes == usize::MAX { + return None; + } + + let nwords = (nbytes + WORD_SIZE - 1) / WORD_SIZE; + let words = unsafe { abi::sys_alloc_words(nwords) }; + + let nbytes2 = unsafe { abi::sys_getenv(words, nwords, varname.as_ptr(), varname.len()) }; + debug_assert_eq!(nbytes, nbytes2); + + // Convert to OsString. + // + // FIXME: We can probably get rid of the extra copy here if we + // reimplement "os_str" instead of just using the generic unix + // "os_str". + let u8s: &[u8] = unsafe { crate::slice::from_raw_parts(words.cast() as *const u8, nbytes) }; + Some(OsString::from_inner(os_str::Buf { inner: u8s.to_vec() })) +} diff --git a/library/std/src/sys/env_consts.rs b/library/std/src/sys/env_consts.rs new file mode 100644 index 0000000000000..9683fd47cf96b --- /dev/null +++ b/library/std/src/sys/env_consts.rs @@ -0,0 +1,415 @@ +//! Constants associated with each target. + +// Replaces the #[else] gate with #[cfg(not(any(…)))] of all the other gates. +// This ensures that they must be mutually exclusive and do not have precedence +// like cfg_if!. +macro cfg_unordered( + $(#[cfg($cfg:meta)] $os:item)* + #[else] $fallback:item +) { + $(#[cfg($cfg)] $os)* + #[cfg(not(any($($cfg),*)))] $fallback +} + +// Keep entries sorted alphabetically and mutually exclusive. + +cfg_unordered! { + +#[cfg(target_os = "aix")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "aix"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".a"; + pub const DLL_EXTENSION: &str = "a"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "android")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "android"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "cygwin")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "cygwin"; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ".dll"; + pub const DLL_EXTENSION: &str = "dll"; + pub const EXE_SUFFIX: &str = ".exe"; + pub const EXE_EXTENSION: &str = "exe"; +} + +#[cfg(target_os = "dragonfly")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "dragonfly"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "emscripten")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "emscripten"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ".js"; + pub const EXE_EXTENSION: &str = "js"; +} + +#[cfg(target_os = "espidf")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "espidf"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "freebsd")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "freebsd"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "fuchsia")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "fuchsia"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "haiku")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "haiku"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "hermit")] +pub mod os { + pub const FAMILY: &str = ""; + pub const OS: &str = "hermit"; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ""; + pub const DLL_EXTENSION: &str = ""; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "horizon")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "horizon"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ".elf"; + pub const EXE_EXTENSION: &str = "elf"; +} + +#[cfg(target_os = "hurd")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "hurd"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "illumos")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "illumos"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "ios")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "ios"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".dylib"; + pub const DLL_EXTENSION: &str = "dylib"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "l4re")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "l4re"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "linux")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "linux"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "macos")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "macos"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".dylib"; + pub const DLL_EXTENSION: &str = "dylib"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "netbsd")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "netbsd"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "nto")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "nto"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "nuttx")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "nuttx"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "openbsd")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "openbsd"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "redox")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "redox"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "rtems")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "rtems"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(all(target_vendor = "fortanix", target_env = "sgx"))] +pub mod os { + pub const FAMILY: &str = ""; + pub const OS: &str = ""; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ".sgxs"; + pub const DLL_EXTENSION: &str = "sgxs"; + pub const EXE_SUFFIX: &str = ".sgxs"; + pub const EXE_EXTENSION: &str = "sgxs"; +} + +#[cfg(target_os = "solaris")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "solaris"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "solid_asp3")] +pub mod os { + pub const FAMILY: &str = "itron"; + pub const OS: &str = "solid"; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "tvos")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "tvos"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".dylib"; + pub const DLL_EXTENSION: &str = "dylib"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "uefi")] +pub mod os { + pub const FAMILY: &str = ""; + pub const OS: &str = "uefi"; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ""; + pub const DLL_EXTENSION: &str = ""; + pub const EXE_SUFFIX: &str = ".efi"; + pub const EXE_EXTENSION: &str = "efi"; +} + +#[cfg(target_os = "visionos")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "visionos"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".dylib"; + pub const DLL_EXTENSION: &str = "dylib"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "vita")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "vita"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ".elf"; + pub const EXE_EXTENSION: &str = "elf"; +} + +#[cfg(target_os = "vxworks")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "vxworks"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".so"; + pub const DLL_EXTENSION: &str = "so"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(all(target_family = "wasm", not(any(target_os = "emscripten", target_os = "linux"))))] +pub mod os { + pub const FAMILY: &str = ""; + pub const OS: &str = ""; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ".wasm"; + pub const DLL_EXTENSION: &str = "wasm"; + pub const EXE_SUFFIX: &str = ".wasm"; + pub const EXE_EXTENSION: &str = "wasm"; +} + +#[cfg(target_os = "watchos")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "watchos"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".dylib"; + pub const DLL_EXTENSION: &str = "dylib"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +#[cfg(target_os = "windows")] +pub mod os { + pub const FAMILY: &str = "windows"; + pub const OS: &str = "windows"; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ".dll"; + pub const DLL_EXTENSION: &str = "dll"; + pub const EXE_SUFFIX: &str = ".exe"; + pub const EXE_EXTENSION: &str = "exe"; +} + +#[cfg(target_os = "zkvm")] +pub mod os { + pub const FAMILY: &str = ""; + pub const OS: &str = ""; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ".elf"; + pub const DLL_EXTENSION: &str = "elf"; + pub const EXE_SUFFIX: &str = ".elf"; + pub const EXE_EXTENSION: &str = "elf"; +} + +// The fallback when none of the other gates match. +#[else] +pub mod os { + pub const FAMILY: &str = ""; + pub const OS: &str = ""; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ""; + pub const DLL_EXTENSION: &str = ""; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} + +} diff --git a/library/std/src/sys/exit_guard.rs b/library/std/src/sys/exit_guard.rs index 5a090f506661d..bd70d1782440f 100644 --- a/library/std/src/sys/exit_guard.rs +++ b/library/std/src/sys/exit_guard.rs @@ -1,14 +1,5 @@ cfg_if::cfg_if! { if #[cfg(target_os = "linux")] { - /// pthread_t is a pointer on some platforms, - /// so we wrap it in this to impl Send + Sync. - #[derive(Clone, Copy)] - #[repr(transparent)] - struct PThread(libc::pthread_t); - // Safety: pthread_t is safe to send between threads - unsafe impl Send for PThread {} - // Safety: pthread_t is safe to share between threads - unsafe impl Sync for PThread {} /// Mitigation for /// /// On glibc, `libc::exit` has been observed to not always be thread-safe. @@ -30,28 +21,34 @@ cfg_if::cfg_if! { /// (waiting for the process to exit). #[cfg_attr(any(test, doctest), allow(dead_code))] pub(crate) fn unique_thread_exit() { - let this_thread_id = unsafe { libc::pthread_self() }; - use crate::sync::{Mutex, PoisonError}; - static EXITING_THREAD_ID: Mutex> = Mutex::new(None); - let mut exiting_thread_id = - EXITING_THREAD_ID.lock().unwrap_or_else(PoisonError::into_inner); - match *exiting_thread_id { - None => { + use crate::ffi::c_int; + use crate::ptr; + use crate::sync::atomic::AtomicPtr; + use crate::sync::atomic::Ordering::{Acquire, Relaxed}; + + static EXITING_THREAD_ID: AtomicPtr = AtomicPtr::new(ptr::null_mut()); + + // We use the address of `errno` as a cheap and safe way to identify + // threads. As the C standard mandates that `errno` must have thread + // storage duration, we can rely on its address not changing over the + // lifetime of the thread. Additionally, accesses to `errno` are + // async-signal-safe, so this function is available in all imaginable + // circumstances. + let this_thread_id = crate::sys::os::errno_location(); + match EXITING_THREAD_ID.compare_exchange(ptr::null_mut(), this_thread_id, Acquire, Relaxed) { + Ok(_) => { // This is the first thread to call `unique_thread_exit`, - // and this is the first time it is called. - // Set EXITING_THREAD_ID to this thread's ID and return. - *exiting_thread_id = Some(PThread(this_thread_id)); - }, - Some(exiting_thread_id) if exiting_thread_id.0 == this_thread_id => { + // and this is the first time it is called. Continue exiting. + } + Err(exiting_thread_id) if exiting_thread_id == this_thread_id => { // This is the first thread to call `unique_thread_exit`, // but this is the second time it is called. // Abort the process. core::panicking::panic_nounwind("std::process::exit called re-entrantly") } - Some(_) => { + Err(_) => { // This is not the first thread to call `unique_thread_exit`. // Pause until the process exits. - drop(exiting_thread_id); loop { // Safety: libc::pause is safe to call. unsafe { libc::pause(); } diff --git a/library/std/src/sys/fd/hermit.rs b/library/std/src/sys/fd/hermit.rs new file mode 100644 index 0000000000000..7e8ba065f1b96 --- /dev/null +++ b/library/std/src/sys/fd/hermit.rs @@ -0,0 +1,175 @@ +#![unstable(reason = "not public", issue = "none", feature = "fd")] + +use crate::cmp; +use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, Read, SeekFrom}; +use crate::os::hermit::hermit_abi; +use crate::os::hermit::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd}; +use crate::sys::{cvt, unsupported}; +use crate::sys_common::{AsInner, FromInner, IntoInner}; + +const fn max_iov() -> usize { + hermit_abi::IOV_MAX +} + +#[derive(Debug)] +pub struct FileDesc { + fd: OwnedFd, +} + +impl FileDesc { + pub fn read(&self, buf: &mut [u8]) -> io::Result { + let result = + cvt(unsafe { hermit_abi::read(self.fd.as_raw_fd(), buf.as_mut_ptr(), buf.len()) })?; + Ok(result as usize) + } + + pub fn read_buf(&self, mut buf: BorrowedCursor<'_>) -> io::Result<()> { + // SAFETY: The `read` syscall does not read from the buffer, so it is + // safe to use `&mut [MaybeUninit]`. + let result = cvt(unsafe { + hermit_abi::read( + self.fd.as_raw_fd(), + buf.as_mut().as_mut_ptr() as *mut u8, + buf.capacity(), + ) + })?; + // SAFETY: Exactly `result` bytes have been filled. + unsafe { buf.advance_unchecked(result as usize) }; + Ok(()) + } + + pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + let ret = cvt(unsafe { + hermit_abi::readv( + self.as_raw_fd(), + bufs.as_mut_ptr() as *mut hermit_abi::iovec as *const hermit_abi::iovec, + cmp::min(bufs.len(), max_iov()), + ) + })?; + Ok(ret as usize) + } + + #[inline] + pub fn is_read_vectored(&self) -> bool { + true + } + + pub fn read_to_end(&self, buf: &mut Vec) -> io::Result { + let mut me = self; + (&mut me).read_to_end(buf) + } + + pub fn write(&self, buf: &[u8]) -> io::Result { + let result = + cvt(unsafe { hermit_abi::write(self.fd.as_raw_fd(), buf.as_ptr(), buf.len()) })?; + Ok(result as usize) + } + + pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { + let ret = cvt(unsafe { + hermit_abi::writev( + self.as_raw_fd(), + bufs.as_ptr() as *const hermit_abi::iovec, + cmp::min(bufs.len(), max_iov()), + ) + })?; + Ok(ret as usize) + } + + #[inline] + pub fn is_write_vectored(&self) -> bool { + true + } + + pub fn seek(&self, pos: SeekFrom) -> io::Result { + let (whence, pos) = match pos { + // Casting to `i64` is fine, too large values will end up as + // negative which will cause an error in `lseek`. + SeekFrom::Start(off) => (hermit_abi::SEEK_SET, off as i64), + SeekFrom::End(off) => (hermit_abi::SEEK_END, off), + SeekFrom::Current(off) => (hermit_abi::SEEK_CUR, off), + }; + let n = cvt(unsafe { hermit_abi::lseek(self.as_raw_fd(), pos as isize, whence) })?; + Ok(n as u64) + } + + pub fn tell(&self) -> io::Result { + self.seek(SeekFrom::Current(0)) + } + + pub fn duplicate(&self) -> io::Result { + self.duplicate_path(&[]) + } + + pub fn duplicate_path(&self, _path: &[u8]) -> io::Result { + unsupported() + } + + pub fn nonblocking(&self) -> io::Result { + Ok(false) + } + + pub fn set_cloexec(&self) -> io::Result<()> { + unsupported() + } + + pub fn set_nonblocking(&self, _nonblocking: bool) -> io::Result<()> { + unsupported() + } + + pub fn fstat(&self, stat: *mut hermit_abi::stat) -> io::Result<()> { + cvt(unsafe { hermit_abi::fstat(self.fd.as_raw_fd(), stat) })?; + Ok(()) + } +} + +impl<'a> Read for &'a FileDesc { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + (**self).read(buf) + } +} + +impl IntoInner for FileDesc { + fn into_inner(self) -> OwnedFd { + self.fd + } +} + +impl FromInner for FileDesc { + fn from_inner(owned_fd: OwnedFd) -> Self { + Self { fd: owned_fd } + } +} + +impl FromRawFd for FileDesc { + unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { + let fd = unsafe { OwnedFd::from_raw_fd(raw_fd) }; + Self { fd } + } +} + +impl AsInner for FileDesc { + #[inline] + fn as_inner(&self) -> &OwnedFd { + &self.fd + } +} + +impl AsFd for FileDesc { + fn as_fd(&self) -> BorrowedFd<'_> { + self.fd.as_fd() + } +} + +impl AsRawFd for FileDesc { + #[inline] + fn as_raw_fd(&self) -> RawFd { + self.fd.as_raw_fd() + } +} + +impl IntoRawFd for FileDesc { + fn into_raw_fd(self) -> RawFd { + self.fd.into_raw_fd() + } +} diff --git a/library/std/src/sys/fd/mod.rs b/library/std/src/sys/fd/mod.rs new file mode 100644 index 0000000000000..e0f5eab69514e --- /dev/null +++ b/library/std/src/sys/fd/mod.rs @@ -0,0 +1,19 @@ +//! Platform-dependent file descriptor abstraction. + +#![forbid(unsafe_op_in_unsafe_fn)] + +cfg_if::cfg_if! { + if #[cfg(target_family = "unix")] { + mod unix; + pub use unix::*; + } else if #[cfg(target_os = "hermit")] { + mod hermit; + pub use hermit::*; + } else if #[cfg(all(target_vendor = "fortanix", target_env = "sgx"))] { + mod sgx; + pub use sgx::*; + } else if #[cfg(target_os = "wasi")] { + mod wasi; + pub use wasi::*; + } +} diff --git a/library/std/src/sys/fd/sgx.rs b/library/std/src/sys/fd/sgx.rs new file mode 100644 index 0000000000000..1ef768db64c7f --- /dev/null +++ b/library/std/src/sys/fd/sgx.rs @@ -0,0 +1,85 @@ +use fortanix_sgx_abi::Fd; + +use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut}; +use crate::mem::ManuallyDrop; +use crate::sys::pal::abi::usercalls; +use crate::sys::{AsInner, FromInner, IntoInner}; + +#[derive(Debug)] +pub struct FileDesc { + fd: Fd, +} + +impl FileDesc { + pub fn new(fd: Fd) -> FileDesc { + FileDesc { fd } + } + + pub fn raw(&self) -> Fd { + self.fd + } + + /// Extracts the actual file descriptor without closing it. + pub fn into_raw(self) -> Fd { + ManuallyDrop::new(self).fd + } + + pub fn read(&self, buf: &mut [u8]) -> io::Result { + usercalls::read(self.fd, &mut [IoSliceMut::new(buf)]) + } + + pub fn read_buf(&self, buf: BorrowedCursor<'_>) -> io::Result<()> { + usercalls::read_buf(self.fd, buf) + } + + pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + usercalls::read(self.fd, bufs) + } + + #[inline] + pub fn is_read_vectored(&self) -> bool { + true + } + + pub fn write(&self, buf: &[u8]) -> io::Result { + usercalls::write(self.fd, &[IoSlice::new(buf)]) + } + + pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { + usercalls::write(self.fd, bufs) + } + + #[inline] + pub fn is_write_vectored(&self) -> bool { + true + } + + pub fn flush(&self) -> io::Result<()> { + usercalls::flush(self.fd) + } +} + +impl AsInner for FileDesc { + #[inline] + fn as_inner(&self) -> &Fd { + &self.fd + } +} + +impl IntoInner for FileDesc { + fn into_inner(self) -> Fd { + ManuallyDrop::new(self).fd + } +} + +impl FromInner for FileDesc { + fn from_inner(fd: Fd) -> FileDesc { + FileDesc { fd } + } +} + +impl Drop for FileDesc { + fn drop(&mut self) { + usercalls::close(self.fd) + } +} diff --git a/library/std/src/sys/fd/unix.rs b/library/std/src/sys/fd/unix.rs new file mode 100644 index 0000000000000..cdca73cdca11e --- /dev/null +++ b/library/std/src/sys/fd/unix.rs @@ -0,0 +1,677 @@ +#![unstable(reason = "not public", issue = "none", feature = "fd")] + +#[cfg(test)] +mod tests; + +#[cfg(not(any( + target_os = "linux", + target_os = "l4re", + target_os = "android", + target_os = "hurd", +)))] +use libc::off_t as off64_t; +#[cfg(any( + target_os = "android", + target_os = "linux", + target_os = "l4re", + target_os = "hurd", +))] +use libc::off64_t; + +use crate::cmp; +use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, Read}; +use crate::os::unix::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd}; +use crate::sys::cvt; +#[cfg(all(target_os = "android", target_pointer_width = "64"))] +use crate::sys::pal::weak::syscall; +#[cfg(any(all(target_os = "android", target_pointer_width = "32"), target_vendor = "apple"))] +use crate::sys::pal::weak::weak; +use crate::sys_common::{AsInner, FromInner, IntoInner}; + +#[derive(Debug)] +pub struct FileDesc(OwnedFd); + +// The maximum read limit on most POSIX-like systems is `SSIZE_MAX`, +// with the man page quoting that if the count of bytes to read is +// greater than `SSIZE_MAX` the result is "unspecified". +// +// On Apple targets however, apparently the 64-bit libc is either buggy or +// intentionally showing odd behavior by rejecting any read with a size +// larger than or equal to INT_MAX. To handle both of these the read +// size is capped on both platforms. +const READ_LIMIT: usize = if cfg!(target_vendor = "apple") { + libc::c_int::MAX as usize - 1 +} else { + libc::ssize_t::MAX as usize +}; + +#[cfg(any( + target_os = "dragonfly", + target_os = "freebsd", + target_os = "netbsd", + target_os = "openbsd", + target_vendor = "apple", + target_os = "cygwin", +))] +const fn max_iov() -> usize { + libc::IOV_MAX as usize +} + +#[cfg(any( + target_os = "android", + target_os = "emscripten", + target_os = "linux", + target_os = "nto", +))] +const fn max_iov() -> usize { + libc::UIO_MAXIOV as usize +} + +#[cfg(not(any( + target_os = "android", + target_os = "dragonfly", + target_os = "emscripten", + target_os = "espidf", + target_os = "freebsd", + target_os = "linux", + target_os = "netbsd", + target_os = "nuttx", + target_os = "nto", + target_os = "openbsd", + target_os = "horizon", + target_os = "vita", + target_vendor = "apple", + target_os = "cygwin", +)))] +const fn max_iov() -> usize { + 16 // The minimum value required by POSIX. +} + +impl FileDesc { + #[inline] + pub fn try_clone(&self) -> io::Result { + self.duplicate() + } + + pub fn read(&self, buf: &mut [u8]) -> io::Result { + let ret = cvt(unsafe { + libc::read( + self.as_raw_fd(), + buf.as_mut_ptr() as *mut libc::c_void, + cmp::min(buf.len(), READ_LIMIT), + ) + })?; + Ok(ret as usize) + } + + #[cfg(not(any( + target_os = "espidf", + target_os = "horizon", + target_os = "vita", + target_os = "nuttx" + )))] + pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + let ret = cvt(unsafe { + libc::readv( + self.as_raw_fd(), + bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec, + cmp::min(bufs.len(), max_iov()) as libc::c_int, + ) + })?; + Ok(ret as usize) + } + + #[cfg(any( + target_os = "espidf", + target_os = "horizon", + target_os = "vita", + target_os = "nuttx" + ))] + pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + io::default_read_vectored(|b| self.read(b), bufs) + } + + #[inline] + pub fn is_read_vectored(&self) -> bool { + cfg!(not(any( + target_os = "espidf", + target_os = "horizon", + target_os = "vita", + target_os = "nuttx" + ))) + } + + pub fn read_to_end(&self, buf: &mut Vec) -> io::Result { + let mut me = self; + (&mut me).read_to_end(buf) + } + + #[cfg_attr(target_os = "vxworks", allow(unused_unsafe))] + pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { + #[cfg(not(any( + all(target_os = "linux", not(target_env = "musl")), + target_os = "android", + target_os = "hurd" + )))] + use libc::pread as pread64; + #[cfg(any( + all(target_os = "linux", not(target_env = "musl")), + target_os = "android", + target_os = "hurd" + ))] + use libc::pread64; + + unsafe { + cvt(pread64( + self.as_raw_fd(), + buf.as_mut_ptr() as *mut libc::c_void, + cmp::min(buf.len(), READ_LIMIT), + offset as off64_t, + )) + .map(|n| n as usize) + } + } + + pub fn read_buf(&self, mut cursor: BorrowedCursor<'_>) -> io::Result<()> { + let ret = cvt(unsafe { + libc::read( + self.as_raw_fd(), + cursor.as_mut().as_mut_ptr() as *mut libc::c_void, + cmp::min(cursor.capacity(), READ_LIMIT), + ) + })?; + + // Safety: `ret` bytes were written to the initialized portion of the buffer + unsafe { + cursor.advance_unchecked(ret as usize); + } + Ok(()) + } + + #[cfg(any( + target_os = "aix", + target_os = "dragonfly", // DragonFly 1.5 + target_os = "emscripten", + target_os = "freebsd", + target_os = "fuchsia", + target_os = "hurd", + target_os = "illumos", + target_os = "linux", + target_os = "netbsd", + target_os = "openbsd", // OpenBSD 2.7 + ))] + pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { + let ret = cvt(unsafe { + libc::preadv( + self.as_raw_fd(), + bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec, + cmp::min(bufs.len(), max_iov()) as libc::c_int, + offset as _, + ) + })?; + Ok(ret as usize) + } + + #[cfg(not(any( + target_os = "aix", + target_os = "android", + target_os = "dragonfly", + target_os = "emscripten", + target_os = "freebsd", + target_os = "fuchsia", + target_os = "hurd", + target_os = "illumos", + target_os = "linux", + target_os = "netbsd", + target_os = "openbsd", + target_vendor = "apple", + )))] + pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { + io::default_read_vectored(|b| self.read_at(b, offset), bufs) + } + + // We support some old Android versions that do not have `preadv` in libc, + // so we use weak linkage and fallback to a direct syscall if not available. + // + // On 32-bit targets, we don't want to deal with weird ABI issues around + // passing 64-bits parameters to syscalls, so we fallback to the default + // implementation if `preadv` is not available. + #[cfg(all(target_os = "android", target_pointer_width = "64"))] + pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { + syscall!( + fn preadv( + fd: libc::c_int, + iovec: *const libc::iovec, + n_iovec: libc::c_int, + offset: off64_t, + ) -> isize; + ); + + let ret = cvt(unsafe { + preadv( + self.as_raw_fd(), + bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec, + cmp::min(bufs.len(), max_iov()) as libc::c_int, + offset as _, + ) + })?; + Ok(ret as usize) + } + + #[cfg(all(target_os = "android", target_pointer_width = "32"))] + pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { + weak!( + fn preadv64( + fd: libc::c_int, + iovec: *const libc::iovec, + n_iovec: libc::c_int, + offset: off64_t, + ) -> isize; + ); + + match preadv64.get() { + Some(preadv) => { + let ret = cvt(unsafe { + preadv( + self.as_raw_fd(), + bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec, + cmp::min(bufs.len(), max_iov()) as libc::c_int, + offset as _, + ) + })?; + Ok(ret as usize) + } + None => io::default_read_vectored(|b| self.read_at(b, offset), bufs), + } + } + + // We support old MacOS, iOS, watchOS, tvOS and visionOS. `preadv` was added in the following + // Apple OS versions: + // ios 14.0 + // tvos 14.0 + // macos 11.0 + // watchos 7.0 + // + // These versions may be newer than the minimum supported versions of OS's we support so we must + // use "weak" linking. + #[cfg(target_vendor = "apple")] + pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { + weak!( + fn preadv( + fd: libc::c_int, + iovec: *const libc::iovec, + n_iovec: libc::c_int, + offset: off64_t, + ) -> isize; + ); + + match preadv.get() { + Some(preadv) => { + let ret = cvt(unsafe { + preadv( + self.as_raw_fd(), + bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec, + cmp::min(bufs.len(), max_iov()) as libc::c_int, + offset as _, + ) + })?; + Ok(ret as usize) + } + None => io::default_read_vectored(|b| self.read_at(b, offset), bufs), + } + } + + pub fn write(&self, buf: &[u8]) -> io::Result { + let ret = cvt(unsafe { + libc::write( + self.as_raw_fd(), + buf.as_ptr() as *const libc::c_void, + cmp::min(buf.len(), READ_LIMIT), + ) + })?; + Ok(ret as usize) + } + + #[cfg(not(any( + target_os = "espidf", + target_os = "horizon", + target_os = "vita", + target_os = "nuttx" + )))] + pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { + let ret = cvt(unsafe { + libc::writev( + self.as_raw_fd(), + bufs.as_ptr() as *const libc::iovec, + cmp::min(bufs.len(), max_iov()) as libc::c_int, + ) + })?; + Ok(ret as usize) + } + + #[cfg(any( + target_os = "espidf", + target_os = "horizon", + target_os = "vita", + target_os = "nuttx" + ))] + pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { + io::default_write_vectored(|b| self.write(b), bufs) + } + + #[inline] + pub fn is_write_vectored(&self) -> bool { + cfg!(not(any( + target_os = "espidf", + target_os = "horizon", + target_os = "vita", + target_os = "nuttx" + ))) + } + + #[cfg_attr(target_os = "vxworks", allow(unused_unsafe))] + pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { + #[cfg(not(any( + all(target_os = "linux", not(target_env = "musl")), + target_os = "android", + target_os = "hurd" + )))] + use libc::pwrite as pwrite64; + #[cfg(any( + all(target_os = "linux", not(target_env = "musl")), + target_os = "android", + target_os = "hurd" + ))] + use libc::pwrite64; + + unsafe { + cvt(pwrite64( + self.as_raw_fd(), + buf.as_ptr() as *const libc::c_void, + cmp::min(buf.len(), READ_LIMIT), + offset as off64_t, + )) + .map(|n| n as usize) + } + } + + #[cfg(any( + target_os = "aix", + target_os = "dragonfly", // DragonFly 1.5 + target_os = "emscripten", + target_os = "freebsd", + target_os = "fuchsia", + target_os = "hurd", + target_os = "illumos", + target_os = "linux", + target_os = "netbsd", + target_os = "openbsd", // OpenBSD 2.7 + ))] + pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { + let ret = cvt(unsafe { + libc::pwritev( + self.as_raw_fd(), + bufs.as_ptr() as *const libc::iovec, + cmp::min(bufs.len(), max_iov()) as libc::c_int, + offset as _, + ) + })?; + Ok(ret as usize) + } + + #[cfg(not(any( + target_os = "aix", + target_os = "android", + target_os = "dragonfly", + target_os = "emscripten", + target_os = "freebsd", + target_os = "fuchsia", + target_os = "hurd", + target_os = "illumos", + target_os = "linux", + target_os = "netbsd", + target_os = "openbsd", + target_vendor = "apple", + )))] + pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { + io::default_write_vectored(|b| self.write_at(b, offset), bufs) + } + + // We support some old Android versions that do not have `pwritev` in libc, + // so we use weak linkage and fallback to a direct syscall if not available. + // + // On 32-bit targets, we don't want to deal with weird ABI issues around + // passing 64-bits parameters to syscalls, so we fallback to the default + // implementation if `pwritev` is not available. + #[cfg(all(target_os = "android", target_pointer_width = "64"))] + pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { + syscall!( + fn pwritev( + fd: libc::c_int, + iovec: *const libc::iovec, + n_iovec: libc::c_int, + offset: off64_t, + ) -> isize; + ); + + let ret = cvt(unsafe { + pwritev( + self.as_raw_fd(), + bufs.as_ptr() as *const libc::iovec, + cmp::min(bufs.len(), max_iov()) as libc::c_int, + offset as _, + ) + })?; + Ok(ret as usize) + } + + #[cfg(all(target_os = "android", target_pointer_width = "32"))] + pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { + weak!( + fn pwritev64( + fd: libc::c_int, + iovec: *const libc::iovec, + n_iovec: libc::c_int, + offset: off64_t, + ) -> isize; + ); + + match pwritev64.get() { + Some(pwritev) => { + let ret = cvt(unsafe { + pwritev( + self.as_raw_fd(), + bufs.as_ptr() as *const libc::iovec, + cmp::min(bufs.len(), max_iov()) as libc::c_int, + offset as _, + ) + })?; + Ok(ret as usize) + } + None => io::default_write_vectored(|b| self.write_at(b, offset), bufs), + } + } + + // We support old MacOS, iOS, watchOS, tvOS and visionOS. `pwritev` was added in the following + // Apple OS versions: + // ios 14.0 + // tvos 14.0 + // macos 11.0 + // watchos 7.0 + // + // These versions may be newer than the minimum supported versions of OS's we support so we must + // use "weak" linking. + #[cfg(target_vendor = "apple")] + pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { + weak!( + fn pwritev( + fd: libc::c_int, + iovec: *const libc::iovec, + n_iovec: libc::c_int, + offset: off64_t, + ) -> isize; + ); + + match pwritev.get() { + Some(pwritev) => { + let ret = cvt(unsafe { + pwritev( + self.as_raw_fd(), + bufs.as_ptr() as *const libc::iovec, + cmp::min(bufs.len(), max_iov()) as libc::c_int, + offset as _, + ) + })?; + Ok(ret as usize) + } + None => io::default_write_vectored(|b| self.write_at(b, offset), bufs), + } + } + + #[cfg(not(any( + target_env = "newlib", + target_os = "solaris", + target_os = "illumos", + target_os = "emscripten", + target_os = "fuchsia", + target_os = "l4re", + target_os = "linux", + target_os = "cygwin", + target_os = "haiku", + target_os = "redox", + target_os = "vxworks", + target_os = "nto", + )))] + pub fn set_cloexec(&self) -> io::Result<()> { + unsafe { + cvt(libc::ioctl(self.as_raw_fd(), libc::FIOCLEX))?; + Ok(()) + } + } + #[cfg(any( + all( + target_env = "newlib", + not(any(target_os = "espidf", target_os = "horizon", target_os = "vita")) + ), + target_os = "solaris", + target_os = "illumos", + target_os = "emscripten", + target_os = "fuchsia", + target_os = "l4re", + target_os = "linux", + target_os = "cygwin", + target_os = "haiku", + target_os = "redox", + target_os = "vxworks", + target_os = "nto", + ))] + pub fn set_cloexec(&self) -> io::Result<()> { + unsafe { + let previous = cvt(libc::fcntl(self.as_raw_fd(), libc::F_GETFD))?; + let new = previous | libc::FD_CLOEXEC; + if new != previous { + cvt(libc::fcntl(self.as_raw_fd(), libc::F_SETFD, new))?; + } + Ok(()) + } + } + #[cfg(any(target_os = "espidf", target_os = "horizon", target_os = "vita"))] + pub fn set_cloexec(&self) -> io::Result<()> { + // FD_CLOEXEC is not supported in ESP-IDF, Horizon OS and Vita but there's no need to, + // because none of them supports spawning processes. + Ok(()) + } + + #[cfg(target_os = "linux")] + pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { + unsafe { + let v = nonblocking as libc::c_int; + cvt(libc::ioctl(self.as_raw_fd(), libc::FIONBIO, &v))?; + Ok(()) + } + } + + #[cfg(not(target_os = "linux"))] + pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { + unsafe { + let previous = cvt(libc::fcntl(self.as_raw_fd(), libc::F_GETFL))?; + let new = if nonblocking { + previous | libc::O_NONBLOCK + } else { + previous & !libc::O_NONBLOCK + }; + if new != previous { + cvt(libc::fcntl(self.as_raw_fd(), libc::F_SETFL, new))?; + } + Ok(()) + } + } + + #[inline] + pub fn duplicate(&self) -> io::Result { + Ok(Self(self.0.try_clone()?)) + } +} + +impl<'a> Read for &'a FileDesc { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + (**self).read(buf) + } + + fn read_buf(&mut self, cursor: BorrowedCursor<'_>) -> io::Result<()> { + (**self).read_buf(cursor) + } + + fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + (**self).read_vectored(bufs) + } + + #[inline] + fn is_read_vectored(&self) -> bool { + (**self).is_read_vectored() + } +} + +impl AsInner for FileDesc { + #[inline] + fn as_inner(&self) -> &OwnedFd { + &self.0 + } +} + +impl IntoInner for FileDesc { + fn into_inner(self) -> OwnedFd { + self.0 + } +} + +impl FromInner for FileDesc { + fn from_inner(owned_fd: OwnedFd) -> Self { + Self(owned_fd) + } +} + +impl AsFd for FileDesc { + fn as_fd(&self) -> BorrowedFd<'_> { + self.0.as_fd() + } +} + +impl AsRawFd for FileDesc { + #[inline] + fn as_raw_fd(&self) -> RawFd { + self.0.as_raw_fd() + } +} + +impl IntoRawFd for FileDesc { + fn into_raw_fd(self) -> RawFd { + self.0.into_raw_fd() + } +} + +impl FromRawFd for FileDesc { + unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { + Self(unsafe { FromRawFd::from_raw_fd(raw_fd) }) + } +} diff --git a/library/std/src/sys/fd/unix/tests.rs b/library/std/src/sys/fd/unix/tests.rs new file mode 100644 index 0000000000000..fcd66c71707d9 --- /dev/null +++ b/library/std/src/sys/fd/unix/tests.rs @@ -0,0 +1,12 @@ +use core::mem::ManuallyDrop; + +use super::FileDesc; +use crate::io::IoSlice; +use crate::os::unix::io::FromRawFd; + +#[test] +fn limit_vector_count() { + let stdout = ManuallyDrop::new(unsafe { FileDesc::from_raw_fd(1) }); + let bufs = (0..1500).map(|_| IoSlice::new(&[])).collect::>(); + assert!(stdout.write_vectored(&bufs).is_ok()); +} diff --git a/library/std/src/sys/fd/wasi.rs b/library/std/src/sys/fd/wasi.rs new file mode 100644 index 0000000000000..80a5143ff0b00 --- /dev/null +++ b/library/std/src/sys/fd/wasi.rs @@ -0,0 +1,331 @@ +#![expect(dead_code)] + +use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, SeekFrom}; +use crate::mem; +use crate::net::Shutdown; +use crate::os::wasi::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd}; +use crate::sys::pal::err2io; +use crate::sys_common::{AsInner, AsInnerMut, FromInner, IntoInner}; + +#[derive(Debug)] +pub struct WasiFd { + fd: OwnedFd, +} + +fn iovec<'a>(a: &'a mut [IoSliceMut<'_>]) -> &'a [wasi::Iovec] { + assert_eq!(size_of::>(), size_of::()); + assert_eq!(align_of::>(), align_of::()); + // SAFETY: `IoSliceMut` and `IoVec` have exactly the same memory layout. + // We decorate our `IoSliceMut` with `repr(transparent)` (see `io.rs`), and + // `crate::io::IoSliceMut` is a `repr(transparent)` wrapper around our type, so this is + // guaranteed. + unsafe { mem::transmute(a) } +} + +fn ciovec<'a>(a: &'a [IoSlice<'_>]) -> &'a [wasi::Ciovec] { + assert_eq!(size_of::>(), size_of::()); + assert_eq!(align_of::>(), align_of::()); + // SAFETY: `IoSlice` and `CIoVec` have exactly the same memory layout. + // We decorate our `IoSlice` with `repr(transparent)` (see `io.rs`), and + // `crate::io::IoSlice` is a `repr(transparent)` wrapper around our type, so this is + // guaranteed. + unsafe { mem::transmute(a) } +} + +impl WasiFd { + pub fn datasync(&self) -> io::Result<()> { + unsafe { wasi::fd_datasync(self.as_raw_fd() as wasi::Fd).map_err(err2io) } + } + + pub fn pread(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { + unsafe { wasi::fd_pread(self.as_raw_fd() as wasi::Fd, iovec(bufs), offset).map_err(err2io) } + } + + pub fn pwrite(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { + unsafe { + wasi::fd_pwrite(self.as_raw_fd() as wasi::Fd, ciovec(bufs), offset).map_err(err2io) + } + } + + pub fn read(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { + unsafe { wasi::fd_read(self.as_raw_fd() as wasi::Fd, iovec(bufs)).map_err(err2io) } + } + + pub fn read_buf(&self, mut buf: BorrowedCursor<'_>) -> io::Result<()> { + unsafe { + let bufs = [wasi::Iovec { + buf: buf.as_mut().as_mut_ptr() as *mut u8, + buf_len: buf.capacity(), + }]; + match wasi::fd_read(self.as_raw_fd() as wasi::Fd, &bufs) { + Ok(n) => { + buf.advance_unchecked(n); + Ok(()) + } + Err(e) => Err(err2io(e)), + } + } + } + + pub fn write(&self, bufs: &[IoSlice<'_>]) -> io::Result { + unsafe { wasi::fd_write(self.as_raw_fd() as wasi::Fd, ciovec(bufs)).map_err(err2io) } + } + + pub fn seek(&self, pos: SeekFrom) -> io::Result { + let (whence, offset) = match pos { + SeekFrom::Start(pos) => (wasi::WHENCE_SET, pos as i64), + SeekFrom::End(pos) => (wasi::WHENCE_END, pos), + SeekFrom::Current(pos) => (wasi::WHENCE_CUR, pos), + }; + unsafe { wasi::fd_seek(self.as_raw_fd() as wasi::Fd, offset, whence).map_err(err2io) } + } + + pub fn tell(&self) -> io::Result { + unsafe { wasi::fd_tell(self.as_raw_fd() as wasi::Fd).map_err(err2io) } + } + + // FIXME: __wasi_fd_fdstat_get + + pub fn set_flags(&self, flags: wasi::Fdflags) -> io::Result<()> { + unsafe { wasi::fd_fdstat_set_flags(self.as_raw_fd() as wasi::Fd, flags).map_err(err2io) } + } + + pub fn set_rights(&self, base: wasi::Rights, inheriting: wasi::Rights) -> io::Result<()> { + unsafe { + wasi::fd_fdstat_set_rights(self.as_raw_fd() as wasi::Fd, base, inheriting) + .map_err(err2io) + } + } + + pub fn sync(&self) -> io::Result<()> { + unsafe { wasi::fd_sync(self.as_raw_fd() as wasi::Fd).map_err(err2io) } + } + + pub(crate) fn advise(&self, offset: u64, len: u64, advice: wasi::Advice) -> io::Result<()> { + unsafe { + wasi::fd_advise(self.as_raw_fd() as wasi::Fd, offset, len, advice).map_err(err2io) + } + } + + pub fn allocate(&self, offset: u64, len: u64) -> io::Result<()> { + unsafe { wasi::fd_allocate(self.as_raw_fd() as wasi::Fd, offset, len).map_err(err2io) } + } + + pub fn create_directory(&self, path: &str) -> io::Result<()> { + unsafe { wasi::path_create_directory(self.as_raw_fd() as wasi::Fd, path).map_err(err2io) } + } + + pub fn link( + &self, + old_flags: wasi::Lookupflags, + old_path: &str, + new_fd: &WasiFd, + new_path: &str, + ) -> io::Result<()> { + unsafe { + wasi::path_link( + self.as_raw_fd() as wasi::Fd, + old_flags, + old_path, + new_fd.as_raw_fd() as wasi::Fd, + new_path, + ) + .map_err(err2io) + } + } + + pub fn open( + &self, + dirflags: wasi::Lookupflags, + path: &str, + oflags: wasi::Oflags, + fs_rights_base: wasi::Rights, + fs_rights_inheriting: wasi::Rights, + fs_flags: wasi::Fdflags, + ) -> io::Result { + unsafe { + wasi::path_open( + self.as_raw_fd() as wasi::Fd, + dirflags, + path, + oflags, + fs_rights_base, + fs_rights_inheriting, + fs_flags, + ) + .map(|fd| WasiFd::from_raw_fd(fd as RawFd)) + .map_err(err2io) + } + } + + pub fn readdir(&self, buf: &mut [u8], cookie: wasi::Dircookie) -> io::Result { + unsafe { + wasi::fd_readdir(self.as_raw_fd() as wasi::Fd, buf.as_mut_ptr(), buf.len(), cookie) + .map_err(err2io) + } + } + + pub fn readlink(&self, path: &str, buf: &mut [u8]) -> io::Result { + unsafe { + wasi::path_readlink(self.as_raw_fd() as wasi::Fd, path, buf.as_mut_ptr(), buf.len()) + .map_err(err2io) + } + } + + pub fn rename(&self, old_path: &str, new_fd: &WasiFd, new_path: &str) -> io::Result<()> { + unsafe { + wasi::path_rename( + self.as_raw_fd() as wasi::Fd, + old_path, + new_fd.as_raw_fd() as wasi::Fd, + new_path, + ) + .map_err(err2io) + } + } + + pub(crate) fn filestat_get(&self) -> io::Result { + unsafe { wasi::fd_filestat_get(self.as_raw_fd() as wasi::Fd).map_err(err2io) } + } + + pub fn filestat_set_times( + &self, + atim: wasi::Timestamp, + mtim: wasi::Timestamp, + fstflags: wasi::Fstflags, + ) -> io::Result<()> { + unsafe { + wasi::fd_filestat_set_times(self.as_raw_fd() as wasi::Fd, atim, mtim, fstflags) + .map_err(err2io) + } + } + + pub fn filestat_set_size(&self, size: u64) -> io::Result<()> { + unsafe { wasi::fd_filestat_set_size(self.as_raw_fd() as wasi::Fd, size).map_err(err2io) } + } + + pub(crate) fn path_filestat_get( + &self, + flags: wasi::Lookupflags, + path: &str, + ) -> io::Result { + unsafe { + wasi::path_filestat_get(self.as_raw_fd() as wasi::Fd, flags, path).map_err(err2io) + } + } + + pub fn path_filestat_set_times( + &self, + flags: wasi::Lookupflags, + path: &str, + atim: wasi::Timestamp, + mtim: wasi::Timestamp, + fstflags: wasi::Fstflags, + ) -> io::Result<()> { + unsafe { + wasi::path_filestat_set_times( + self.as_raw_fd() as wasi::Fd, + flags, + path, + atim, + mtim, + fstflags, + ) + .map_err(err2io) + } + } + + pub fn symlink(&self, old_path: &str, new_path: &str) -> io::Result<()> { + unsafe { + wasi::path_symlink(old_path, self.as_raw_fd() as wasi::Fd, new_path).map_err(err2io) + } + } + + pub fn unlink_file(&self, path: &str) -> io::Result<()> { + unsafe { wasi::path_unlink_file(self.as_raw_fd() as wasi::Fd, path).map_err(err2io) } + } + + pub fn remove_directory(&self, path: &str) -> io::Result<()> { + unsafe { wasi::path_remove_directory(self.as_raw_fd() as wasi::Fd, path).map_err(err2io) } + } + + pub fn sock_accept(&self, flags: wasi::Fdflags) -> io::Result { + unsafe { wasi::sock_accept(self.as_raw_fd() as wasi::Fd, flags).map_err(err2io) } + } + + pub fn sock_recv( + &self, + ri_data: &mut [IoSliceMut<'_>], + ri_flags: wasi::Riflags, + ) -> io::Result<(usize, wasi::Roflags)> { + unsafe { + wasi::sock_recv(self.as_raw_fd() as wasi::Fd, iovec(ri_data), ri_flags).map_err(err2io) + } + } + + pub fn sock_send(&self, si_data: &[IoSlice<'_>], si_flags: wasi::Siflags) -> io::Result { + unsafe { + wasi::sock_send(self.as_raw_fd() as wasi::Fd, ciovec(si_data), si_flags).map_err(err2io) + } + } + + pub fn sock_shutdown(&self, how: Shutdown) -> io::Result<()> { + let how = match how { + Shutdown::Read => wasi::SDFLAGS_RD, + Shutdown::Write => wasi::SDFLAGS_WR, + Shutdown::Both => wasi::SDFLAGS_WR | wasi::SDFLAGS_RD, + }; + unsafe { wasi::sock_shutdown(self.as_raw_fd() as wasi::Fd, how).map_err(err2io) } + } +} + +impl AsInner for WasiFd { + #[inline] + fn as_inner(&self) -> &OwnedFd { + &self.fd + } +} + +impl AsInnerMut for WasiFd { + #[inline] + fn as_inner_mut(&mut self) -> &mut OwnedFd { + &mut self.fd + } +} + +impl IntoInner for WasiFd { + fn into_inner(self) -> OwnedFd { + self.fd + } +} + +impl FromInner for WasiFd { + fn from_inner(owned_fd: OwnedFd) -> Self { + Self { fd: owned_fd } + } +} + +impl AsFd for WasiFd { + fn as_fd(&self) -> BorrowedFd<'_> { + self.fd.as_fd() + } +} + +impl AsRawFd for WasiFd { + #[inline] + fn as_raw_fd(&self) -> RawFd { + self.fd.as_raw_fd() + } +} + +impl IntoRawFd for WasiFd { + fn into_raw_fd(self) -> RawFd { + self.fd.into_raw_fd() + } +} + +impl FromRawFd for WasiFd { + unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { + unsafe { Self { fd: FromRawFd::from_raw_fd(raw_fd) } } + } +} diff --git a/library/std/src/sys/fs/hermit.rs b/library/std/src/sys/fs/hermit.rs index f83a2f90ed22a..a9774bef9e338 100644 --- a/library/std/src/sys/fs/hermit.rs +++ b/library/std/src/sys/fs/hermit.rs @@ -1,4 +1,5 @@ use crate::ffi::{CStr, OsStr, OsString, c_char}; +use crate::fs::TryLockError; use crate::io::{self, BorrowedCursor, Error, ErrorKind, IoSlice, IoSliceMut, SeekFrom}; use crate::os::hermit::ffi::OsStringExt; use crate::os::hermit::hermit_abi::{ @@ -9,10 +10,10 @@ use crate::os::hermit::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, Raw use crate::path::{Path, PathBuf}; use crate::sync::Arc; use crate::sys::common::small_c_string::run_path_with_cstr; +use crate::sys::fd::FileDesc; pub use crate::sys::fs::common::{copy, exists}; -use crate::sys::pal::fd::FileDesc; use crate::sys::time::SystemTime; -use crate::sys::{cvt, unsupported}; +use crate::sys::{cvt, unsupported, unsupported_err}; use crate::sys_common::{AsInner, AsInnerMut, FromInner, IntoInner}; use crate::{fmt, mem}; @@ -366,12 +367,12 @@ impl File { unsupported() } - pub fn try_lock(&self) -> io::Result { - unsupported() + pub fn try_lock(&self) -> Result<(), TryLockError> { + Err(TryLockError::Error(unsupported_err())) } - pub fn try_lock_shared(&self) -> io::Result { - unsupported() + pub fn try_lock_shared(&self) -> Result<(), TryLockError> { + Err(TryLockError::Error(unsupported_err())) } pub fn unlock(&self) -> io::Result<()> { diff --git a/library/std/src/sys/fs/mod.rs b/library/std/src/sys/fs/mod.rs index 3b176d0d16c44..d55e28074fe8c 100644 --- a/library/std/src/sys/fs/mod.rs +++ b/library/std/src/sys/fs/mod.rs @@ -9,7 +9,7 @@ cfg_if::cfg_if! { if #[cfg(target_family = "unix")] { mod unix; use unix as imp; - pub use unix::{chown, fchown, lchown}; + pub use unix::{chown, fchown, lchown, mkfifo}; #[cfg(not(target_os = "fuchsia"))] pub use unix::chroot; pub(crate) use unix::debug_assert_fd_is_open; @@ -20,6 +20,7 @@ cfg_if::cfg_if! { mod windows; use windows as imp; pub use windows::{symlink_inner, junction_point}; + use crate::sys::path::with_native_path; } else if #[cfg(target_os = "hermit")] { mod hermit; use hermit as imp; @@ -39,7 +40,7 @@ cfg_if::cfg_if! { } // FIXME: Replace this with platform-specific path conversion functions. -#[cfg(not(target_family = "unix"))] +#[cfg(not(any(target_family = "unix", target_os = "windows")))] #[inline] pub fn with_native_path(path: &Path, f: &dyn Fn(&Path) -> io::Result) -> io::Result { f(path) @@ -51,7 +52,7 @@ pub use imp::{ }; pub fn read_dir(path: &Path) -> io::Result { - // FIXME: use with_native_path + // FIXME: use with_native_path on all platforms imp::readdir(path) } @@ -68,8 +69,11 @@ pub fn remove_dir(path: &Path) -> io::Result<()> { } pub fn remove_dir_all(path: &Path) -> io::Result<()> { - // FIXME: use with_native_path - imp::remove_dir_all(path) + // FIXME: use with_native_path on all platforms + #[cfg(not(windows))] + return imp::remove_dir_all(path); + #[cfg(windows)] + with_native_path(path, &imp::remove_dir_all) } pub fn read_link(path: &Path) -> io::Result { @@ -77,6 +81,10 @@ pub fn read_link(path: &Path) -> io::Result { } pub fn symlink(original: &Path, link: &Path) -> io::Result<()> { + // FIXME: use with_native_path on all platforms + #[cfg(windows)] + return imp::symlink(original, link); + #[cfg(not(windows))] with_native_path(original, &|original| { with_native_path(link, &|link| imp::symlink(original, link)) }) @@ -105,11 +113,17 @@ pub fn canonicalize(path: &Path) -> io::Result { } pub fn copy(from: &Path, to: &Path) -> io::Result { - // FIXME: use with_native_path - imp::copy(from, to) + // FIXME: use with_native_path on all platforms + #[cfg(not(windows))] + return imp::copy(from, to); + #[cfg(windows)] + with_native_path(from, &|from| with_native_path(to, &|to| imp::copy(from, to))) } pub fn exists(path: &Path) -> io::Result { - // FIXME: use with_native_path - imp::exists(path) + // FIXME: use with_native_path on all platforms + #[cfg(not(windows))] + return imp::exists(path); + #[cfg(windows)] + with_native_path(path, &imp::exists) } diff --git a/library/std/src/sys/fs/solid.rs b/library/std/src/sys/fs/solid.rs index 39de933b7248b..3bfb39bac95bc 100644 --- a/library/std/src/sys/fs/solid.rs +++ b/library/std/src/sys/fs/solid.rs @@ -2,6 +2,7 @@ use crate::ffi::{CStr, CString, OsStr, OsString}; use crate::fmt; +use crate::fs::TryLockError; use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, SeekFrom}; use crate::mem::MaybeUninit; use crate::os::raw::{c_int, c_short}; @@ -11,7 +12,7 @@ use crate::sync::Arc; pub use crate::sys::fs::common::exists; use crate::sys::pal::{abi, error}; use crate::sys::time::SystemTime; -use crate::sys::unsupported; +use crate::sys::{unsupported, unsupported_err}; use crate::sys_common::ignore_notfound; type CIntNotMinusOne = core::num::niche_types::NotAllOnes; @@ -352,12 +353,12 @@ impl File { unsupported() } - pub fn try_lock(&self) -> io::Result { - unsupported() + pub fn try_lock(&self) -> Result<(), TryLockError> { + Err(TryLockError::Error(unsupported_err())) } - pub fn try_lock_shared(&self) -> io::Result { - unsupported() + pub fn try_lock_shared(&self) -> Result<(), TryLockError> { + Err(TryLockError::Error(unsupported_err())) } pub fn unlock(&self) -> io::Result<()> { diff --git a/library/std/src/sys/fs/uefi.rs b/library/std/src/sys/fs/uefi.rs index d6ae86bd3d26e..416c90b98b6d3 100644 --- a/library/std/src/sys/fs/uefi.rs +++ b/library/std/src/sys/fs/uefi.rs @@ -2,6 +2,7 @@ use r_efi::protocols::file; use crate::ffi::OsString; use crate::fmt; +use crate::fs::TryLockError; use crate::hash::Hash; use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, SeekFrom}; use crate::path::{Path, PathBuf}; @@ -227,11 +228,11 @@ impl File { self.0 } - pub fn try_lock(&self) -> io::Result { + pub fn try_lock(&self) -> Result<(), TryLockError> { self.0 } - pub fn try_lock_shared(&self) -> io::Result { + pub fn try_lock_shared(&self) -> Result<(), TryLockError> { self.0 } diff --git a/library/std/src/sys/fs/unix.rs b/library/std/src/sys/fs/unix.rs index 87865be0387d5..863358596c199 100644 --- a/library/std/src/sys/fs/unix.rs +++ b/library/std/src/sys/fs/unix.rs @@ -12,10 +12,11 @@ use libc::c_char; all(target_os = "linux", not(target_env = "musl")), target_os = "android", target_os = "fuchsia", - target_os = "hurd" + target_os = "hurd", + target_os = "illumos", ))] use libc::dirfd; -#[cfg(target_os = "fuchsia")] +#[cfg(any(target_os = "fuchsia", target_os = "illumos"))] use libc::fstatat as fstatat64; #[cfg(any(all(target_os = "linux", not(target_env = "musl")), target_os = "hurd"))] use libc::fstatat64; @@ -74,6 +75,7 @@ use libc::{dirent64, fstat64, ftruncate64, lseek64, lstat64, off64_t, open64, st use crate::ffi::{CStr, OsStr, OsString}; use crate::fmt::{self, Write as _}; +use crate::fs::TryLockError; use crate::io::{self, BorrowedCursor, Error, IoSlice, IoSliceMut, SeekFrom}; use crate::os::unix::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd}; use crate::os::unix::prelude::*; @@ -146,14 +148,14 @@ cfg_has_statx! {{ flags: i32, mask: u32, ) -> Option> { - use crate::sync::atomic::{AtomicU8, Ordering}; + use crate::sync::atomic::{Atomic, AtomicU8, Ordering}; // Linux kernel prior to 4.11 or glibc prior to glibc 2.28 don't support `statx`. // We check for it on first failure and remember availability to avoid having to // do it again. #[repr(u8)] enum STATX_STATE{ Unknown = 0, Present, Unavailable } - static STATX_SAVED_STATE: AtomicU8 = AtomicU8::new(STATX_STATE::Unknown as u8); + static STATX_SAVED_STATE: Atomic = AtomicU8::new(STATX_STATE::Unknown as u8); syscall!( fn statx( @@ -892,7 +894,8 @@ impl DirEntry { all(target_os = "linux", not(target_env = "musl")), target_os = "android", target_os = "fuchsia", - target_os = "hurd" + target_os = "hurd", + target_os = "illumos", ), not(miri) // no dirfd on Miri ))] @@ -922,6 +925,7 @@ impl DirEntry { target_os = "android", target_os = "fuchsia", target_os = "hurd", + target_os = "illumos", )), miri ))] @@ -1307,15 +1311,17 @@ impl File { target_os = "netbsd", target_vendor = "apple", ))] - pub fn try_lock(&self) -> io::Result { + pub fn try_lock(&self) -> Result<(), TryLockError> { let result = cvt(unsafe { libc::flock(self.as_raw_fd(), libc::LOCK_EX | libc::LOCK_NB) }); - if let Err(ref err) = result { + if let Err(err) = result { if err.kind() == io::ErrorKind::WouldBlock { - return Ok(false); + Err(TryLockError::WouldBlock) + } else { + Err(TryLockError::Error(err)) } + } else { + Ok(()) } - result?; - return Ok(true); } #[cfg(not(any( @@ -1325,8 +1331,11 @@ impl File { target_os = "netbsd", target_vendor = "apple", )))] - pub fn try_lock(&self) -> io::Result { - Err(io::const_error!(io::ErrorKind::Unsupported, "try_lock() not supported")) + pub fn try_lock(&self) -> Result<(), TryLockError> { + Err(TryLockError::Error(io::const_error!( + io::ErrorKind::Unsupported, + "try_lock() not supported" + ))) } #[cfg(any( @@ -1336,15 +1345,17 @@ impl File { target_os = "netbsd", target_vendor = "apple", ))] - pub fn try_lock_shared(&self) -> io::Result { + pub fn try_lock_shared(&self) -> Result<(), TryLockError> { let result = cvt(unsafe { libc::flock(self.as_raw_fd(), libc::LOCK_SH | libc::LOCK_NB) }); - if let Err(ref err) = result { + if let Err(err) = result { if err.kind() == io::ErrorKind::WouldBlock { - return Ok(false); + Err(TryLockError::WouldBlock) + } else { + Err(TryLockError::Error(err)) } + } else { + Ok(()) } - result?; - return Ok(true); } #[cfg(not(any( @@ -1354,8 +1365,11 @@ impl File { target_os = "netbsd", target_vendor = "apple", )))] - pub fn try_lock_shared(&self) -> io::Result { - Err(io::const_error!(io::ErrorKind::Unsupported, "try_lock_shared() not supported")) + pub fn try_lock_shared(&self) -> Result<(), TryLockError> { + Err(TryLockError::Error(io::const_error!( + io::ErrorKind::Unsupported, + "try_lock_shared() not supported" + ))) } #[cfg(any( @@ -1463,20 +1477,6 @@ impl File { Ok(()) } - // FIXME(#115199): Rust currently omits weak function definitions - // and its metadata from LLVM IR. - #[cfg_attr( - any( - target_os = "android", - all( - target_os = "linux", - target_env = "gnu", - target_pointer_width = "32", - not(target_arch = "riscv32") - ) - ), - no_sanitize(cfi) - )] pub fn set_times(&self, times: FileTimes) -> io::Result<()> { #[cfg(not(any( target_os = "redox", @@ -2148,6 +2148,12 @@ pub fn chroot(dir: &Path) -> io::Result<()> { Err(io::const_error!(io::ErrorKind::Unsupported, "chroot not supported by vxworks")) } +pub fn mkfifo(path: &Path, mode: u32) -> io::Result<()> { + run_path_with_cstr(path, &|path| { + cvt(unsafe { libc::mkfifo(path.as_ptr(), mode.try_into().unwrap()) }).map(|_| ()) + }) +} + pub use remove_dir_impl::remove_dir_all; // Fallback for REDOX, ESP-ID, Horizon, Vita, Vxworks and Miri diff --git a/library/std/src/sys/fs/unsupported.rs b/library/std/src/sys/fs/unsupported.rs index 45e93deffa3a4..0ff9533c04734 100644 --- a/library/std/src/sys/fs/unsupported.rs +++ b/library/std/src/sys/fs/unsupported.rs @@ -1,5 +1,6 @@ use crate::ffi::OsString; use crate::fmt; +use crate::fs::TryLockError; use crate::hash::{Hash, Hasher}; use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, SeekFrom}; use crate::path::{Path, PathBuf}; @@ -206,11 +207,11 @@ impl File { self.0 } - pub fn try_lock(&self) -> io::Result { + pub fn try_lock(&self) -> Result<(), TryLockError> { self.0 } - pub fn try_lock_shared(&self) -> io::Result { + pub fn try_lock_shared(&self) -> Result<(), TryLockError> { self.0 } diff --git a/library/std/src/sys/fs/wasi.rs b/library/std/src/sys/fs/wasi.rs index 773040571bc97..ebfc7377a2ead 100644 --- a/library/std/src/sys/fs/wasi.rs +++ b/library/std/src/sys/fs/wasi.rs @@ -1,4 +1,5 @@ use crate::ffi::{CStr, OsStr, OsString}; +use crate::fs::TryLockError; use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, SeekFrom}; use crate::mem::{self, ManuallyDrop}; use crate::os::raw::c_int; @@ -10,7 +11,7 @@ use crate::sys::common::small_c_string::run_path_with_cstr; use crate::sys::fd::WasiFd; pub use crate::sys::fs::common::exists; use crate::sys::time::SystemTime; -use crate::sys::unsupported; +use crate::sys::{unsupported, unsupported_err}; use crate::sys_common::{AsInner, FromInner, IntoInner, ignore_notfound}; use crate::{fmt, iter, ptr}; @@ -461,12 +462,12 @@ impl File { unsupported() } - pub fn try_lock(&self) -> io::Result { - unsupported() + pub fn try_lock(&self) -> Result<(), TryLockError> { + Err(TryLockError::Error(unsupported_err())) } - pub fn try_lock_shared(&self) -> io::Result { - unsupported() + pub fn try_lock_shared(&self) -> Result<(), TryLockError> { + Err(TryLockError::Error(unsupported_err())) } pub fn unlock(&self) -> io::Result<()> { diff --git a/library/std/src/sys/fs/windows.rs b/library/std/src/sys/fs/windows.rs index 06bba019393a5..9039fd00f5d62 100644 --- a/library/std/src/sys/fs/windows.rs +++ b/library/std/src/sys/fs/windows.rs @@ -3,6 +3,7 @@ use crate::alloc::{Layout, alloc, dealloc}; use crate::borrow::Cow; use crate::ffi::{OsStr, OsString, c_void}; +use crate::fs::TryLockError; use crate::io::{self, BorrowedCursor, Error, IoSlice, IoSliceMut, SeekFrom}; use crate::mem::{self, MaybeUninit, offset_of}; use crate::os::windows::io::{AsHandle, BorrowedHandle}; @@ -12,7 +13,7 @@ use crate::sync::Arc; use crate::sys::handle::Handle; use crate::sys::pal::api::{self, WinError, set_file_information_by_handle}; use crate::sys::pal::{IoResult, fill_utf16_buf, to_u16s, truncate_utf16_at_nul}; -use crate::sys::path::maybe_verbatim; +use crate::sys::path::{WCStr, maybe_verbatim}; use crate::sys::time::SystemTime; use crate::sys::{Align8, c, cvt}; use crate::sys_common::{AsInner, FromInner, IntoInner}; @@ -298,10 +299,12 @@ impl OpenOptions { impl File { pub fn open(path: &Path, opts: &OpenOptions) -> io::Result { let path = maybe_verbatim(path)?; + // SAFETY: maybe_verbatim returns null-terminated strings + let path = unsafe { WCStr::from_wchars_with_null_unchecked(&path) }; Self::open_native(&path, opts) } - fn open_native(path: &[u16], opts: &OpenOptions) -> io::Result { + fn open_native(path: &WCStr, opts: &OpenOptions) -> io::Result { let creation = opts.get_creation_mode()?; let handle = unsafe { c::CreateFileW( @@ -397,7 +400,7 @@ impl File { self.acquire_lock(0) } - pub fn try_lock(&self) -> io::Result { + pub fn try_lock(&self) -> Result<(), TryLockError> { let result = cvt(unsafe { let mut overlapped = mem::zeroed(); c::LockFileEx( @@ -411,18 +414,18 @@ impl File { }); match result { - Ok(_) => Ok(true), + Ok(_) => Ok(()), Err(err) if err.raw_os_error() == Some(c::ERROR_IO_PENDING as i32) || err.raw_os_error() == Some(c::ERROR_LOCK_VIOLATION as i32) => { - Ok(false) + Err(TryLockError::WouldBlock) } - Err(err) => Err(err), + Err(err) => Err(TryLockError::Error(err)), } } - pub fn try_lock_shared(&self) -> io::Result { + pub fn try_lock_shared(&self) -> Result<(), TryLockError> { let result = cvt(unsafe { let mut overlapped = mem::zeroed(); c::LockFileEx( @@ -436,14 +439,14 @@ impl File { }); match result { - Ok(_) => Ok(true), + Ok(_) => Ok(()), Err(err) if err.raw_os_error() == Some(c::ERROR_IO_PENDING as i32) || err.raw_os_error() == Some(c::ERROR_LOCK_VIOLATION as i32) => { - Ok(false) + Err(TryLockError::WouldBlock) } - Err(err) => Err(err), + Err(err) => Err(TryLockError::Error(err)), } } @@ -547,7 +550,7 @@ impl File { ))?; attr.file_size = info.AllocationSize as u64; attr.number_of_links = Some(info.NumberOfLinks); - if attr.file_type().is_reparse_point() { + if attr.attributes & c::FILE_ATTRIBUTE_REPARSE_POINT != 0 { let mut attr_tag: c::FILE_ATTRIBUTE_TAG_INFO = mem::zeroed(); cvt(c::GetFileInformationByHandleEx( self.handle.as_raw_handle(), @@ -1212,9 +1215,8 @@ pub fn readdir(p: &Path) -> io::Result { } } -pub fn unlink(p: &Path) -> io::Result<()> { - let p_u16s = maybe_verbatim(p)?; - if unsafe { c::DeleteFileW(p_u16s.as_ptr()) } == 0 { +pub fn unlink(path: &WCStr) -> io::Result<()> { + if unsafe { c::DeleteFileW(path.as_ptr()) } == 0 { let err = api::get_last_error(); // if `DeleteFileW` fails with ERROR_ACCESS_DENIED then try to remove // the file while ignoring the readonly attribute. @@ -1223,7 +1225,7 @@ pub fn unlink(p: &Path) -> io::Result<()> { let mut opts = OpenOptions::new(); opts.access_mode(c::DELETE); opts.custom_flags(c::FILE_FLAG_OPEN_REPARSE_POINT); - if let Ok(f) = File::open_native(&p_u16s, &opts) { + if let Ok(f) = File::open_native(&path, &opts) { if f.posix_delete().is_ok() { return Ok(()); } @@ -1236,10 +1238,7 @@ pub fn unlink(p: &Path) -> io::Result<()> { } } -pub fn rename(old: &Path, new: &Path) -> io::Result<()> { - let old = maybe_verbatim(old)?; - let new = maybe_verbatim(new)?; - +pub fn rename(old: &WCStr, new: &WCStr) -> io::Result<()> { if unsafe { c::MoveFileExW(old.as_ptr(), new.as_ptr(), c::MOVEFILE_REPLACE_EXISTING) } == 0 { let err = api::get_last_error(); // if `MoveFileExW` fails with ERROR_ACCESS_DENIED then try to move @@ -1253,7 +1252,8 @@ pub fn rename(old: &Path, new: &Path) -> io::Result<()> { // Calculate the layout of the `FILE_RENAME_INFO` we pass to `SetFileInformation` // This is a dynamically sized struct so we need to get the position of the last field to calculate the actual size. - let Ok(new_len_without_nul_in_bytes): Result = ((new.len() - 1) * 2).try_into() + let Ok(new_len_without_nul_in_bytes): Result = + ((new.count_bytes() - 1) * 2).try_into() else { return Err(err).io_result(); }; @@ -1282,7 +1282,7 @@ pub fn rename(old: &Path, new: &Path) -> io::Result<()> { new.as_ptr().copy_to_nonoverlapping( (&raw mut (*file_rename_info).FileName).cast::(), - new.len(), + new.count_bytes(), ); } @@ -1309,20 +1309,19 @@ pub fn rename(old: &Path, new: &Path) -> io::Result<()> { Ok(()) } -pub fn rmdir(p: &Path) -> io::Result<()> { - let p = maybe_verbatim(p)?; +pub fn rmdir(p: &WCStr) -> io::Result<()> { cvt(unsafe { c::RemoveDirectoryW(p.as_ptr()) })?; Ok(()) } -pub fn remove_dir_all(path: &Path) -> io::Result<()> { +pub fn remove_dir_all(path: &WCStr) -> io::Result<()> { // Open a file or directory without following symlinks. let mut opts = OpenOptions::new(); opts.access_mode(c::FILE_LIST_DIRECTORY); // `FILE_FLAG_BACKUP_SEMANTICS` allows opening directories. // `FILE_FLAG_OPEN_REPARSE_POINT` opens a link instead of its target. opts.custom_flags(c::FILE_FLAG_BACKUP_SEMANTICS | c::FILE_FLAG_OPEN_REPARSE_POINT); - let file = File::open(path, &opts)?; + let file = File::open_native(path, &opts)?; // Test if the file is not a directory or a symlink to a directory. if (file.basic_info()?.FileAttributes & c::FILE_ATTRIBUTE_DIRECTORY) == 0 { @@ -1333,14 +1332,14 @@ pub fn remove_dir_all(path: &Path) -> io::Result<()> { remove_dir_all_iterative(file).io_result() } -pub fn readlink(path: &Path) -> io::Result { +pub fn readlink(path: &WCStr) -> io::Result { // Open the link with no access mode, instead of generic read. // By default FILE_LIST_DIRECTORY is denied for the junction "C:\Documents and Settings", so // this is needed for a common case. let mut opts = OpenOptions::new(); opts.access_mode(0); opts.custom_flags(c::FILE_FLAG_OPEN_REPARSE_POINT | c::FILE_FLAG_BACKUP_SEMANTICS); - let file = File::open(path, &opts)?; + let file = File::open_native(&path, &opts)?; file.readlink() } @@ -1378,19 +1377,17 @@ pub fn symlink_inner(original: &Path, link: &Path, dir: bool) -> io::Result<()> } #[cfg(not(target_vendor = "uwp"))] -pub fn link(original: &Path, link: &Path) -> io::Result<()> { - let original = maybe_verbatim(original)?; - let link = maybe_verbatim(link)?; +pub fn link(original: &WCStr, link: &WCStr) -> io::Result<()> { cvt(unsafe { c::CreateHardLinkW(link.as_ptr(), original.as_ptr(), ptr::null_mut()) })?; Ok(()) } #[cfg(target_vendor = "uwp")] -pub fn link(_original: &Path, _link: &Path) -> io::Result<()> { +pub fn link(_original: &WCStr, _link: &WCStr) -> io::Result<()> { return Err(io::const_error!(io::ErrorKind::Unsupported, "hard link are not supported on UWP")); } -pub fn stat(path: &Path) -> io::Result { +pub fn stat(path: &WCStr) -> io::Result { match metadata(path, ReparsePoint::Follow) { Err(err) if err.raw_os_error() == Some(c::ERROR_CANT_ACCESS_FILE as i32) => { if let Ok(attrs) = lstat(path) { @@ -1404,7 +1401,7 @@ pub fn stat(path: &Path) -> io::Result { } } -pub fn lstat(path: &Path) -> io::Result { +pub fn lstat(path: &WCStr) -> io::Result { metadata(path, ReparsePoint::Open) } @@ -1420,7 +1417,7 @@ impl ReparsePoint { } } -fn metadata(path: &Path, reparse: ReparsePoint) -> io::Result { +fn metadata(path: &WCStr, reparse: ReparsePoint) -> io::Result { let mut opts = OpenOptions::new(); // No read or write permissions are necessary opts.access_mode(0); @@ -1429,7 +1426,7 @@ fn metadata(path: &Path, reparse: ReparsePoint) -> io::Result { // Attempt to open the file normally. // If that fails with `ERROR_SHARING_VIOLATION` then retry using `FindFirstFileExW`. // If the fallback fails for any reason we return the original error. - match File::open(path, &opts) { + match File::open_native(&path, &opts) { Ok(file) => file.file_attr(), Err(e) if [Some(c::ERROR_SHARING_VIOLATION as _), Some(c::ERROR_ACCESS_DENIED as _)] @@ -1442,8 +1439,6 @@ fn metadata(path: &Path, reparse: ReparsePoint) -> io::Result { // However, there are special system files, such as // `C:\hiberfil.sys`, that are locked in a way that denies even that. unsafe { - let path = maybe_verbatim(path)?; - // `FindFirstFileExW` accepts wildcard file names. // Fortunately wildcards are not valid file names and // `ERROR_SHARING_VIOLATION` means the file exists (but is locked) @@ -1482,8 +1477,7 @@ fn metadata(path: &Path, reparse: ReparsePoint) -> io::Result { } } -pub fn set_perm(p: &Path, perm: FilePermissions) -> io::Result<()> { - let p = maybe_verbatim(p)?; +pub fn set_perm(p: &WCStr, perm: FilePermissions) -> io::Result<()> { unsafe { cvt(c::SetFileAttributesW(p.as_ptr(), perm.attrs))?; Ok(()) @@ -1499,17 +1493,17 @@ fn get_path(f: &File) -> io::Result { ) } -pub fn canonicalize(p: &Path) -> io::Result { +pub fn canonicalize(p: &WCStr) -> io::Result { let mut opts = OpenOptions::new(); // No read or write permissions are necessary opts.access_mode(0); // This flag is so we can open directories too opts.custom_flags(c::FILE_FLAG_BACKUP_SEMANTICS); - let f = File::open(p, &opts)?; + let f = File::open_native(p, &opts)?; get_path(&f) } -pub fn copy(from: &Path, to: &Path) -> io::Result { +pub fn copy(from: &WCStr, to: &WCStr) -> io::Result { unsafe extern "system" fn callback( _TotalFileSize: i64, _TotalBytesTransferred: i64, @@ -1528,13 +1522,11 @@ pub fn copy(from: &Path, to: &Path) -> io::Result { c::PROGRESS_CONTINUE } } - let pfrom = maybe_verbatim(from)?; - let pto = maybe_verbatim(to)?; let mut size = 0i64; cvt(unsafe { c::CopyFileExW( - pfrom.as_ptr(), - pto.as_ptr(), + from.as_ptr(), + to.as_ptr(), Some(callback), (&raw mut size) as *mut _, ptr::null_mut(), @@ -1624,14 +1616,14 @@ pub fn junction_point(original: &Path, link: &Path) -> io::Result<()> { } // Try to see if a file exists but, unlike `exists`, report I/O errors. -pub fn exists(path: &Path) -> io::Result { +pub fn exists(path: &WCStr) -> io::Result { // Open the file to ensure any symlinks are followed to their target. let mut opts = OpenOptions::new(); // No read, write, etc access rights are needed. opts.access_mode(0); // Backup semantics enables opening directories as well as files. opts.custom_flags(c::FILE_FLAG_BACKUP_SEMANTICS); - match File::open(path, &opts) { + match File::open_native(path, &opts) { Err(e) => match e.kind() { // The file definitely does not exist io::ErrorKind::NotFound => Ok(false), diff --git a/library/std/src/sys/fs/windows/remove_dir_all.rs b/library/std/src/sys/fs/windows/remove_dir_all.rs index f51eced84164f..06734f9e3097b 100644 --- a/library/std/src/sys/fs/windows/remove_dir_all.rs +++ b/library/std/src/sys/fs/windows/remove_dir_all.rs @@ -29,7 +29,7 @@ //! race but we do make a best effort such that it *should* do so. use core::ptr; -use core::sync::atomic::{AtomicU32, Ordering}; +use core::sync::atomic::{Atomic, AtomicU32, Ordering}; use super::{AsRawHandle, DirBuff, File, FromRawHandle}; use crate::sys::c; @@ -87,7 +87,7 @@ fn open_link_no_reparse( // The `OBJ_DONT_REPARSE` attribute ensures that we haven't been // tricked into following a symlink. However, it may not be available in // earlier versions of Windows. - static ATTRIBUTES: AtomicU32 = AtomicU32::new(c::OBJ_DONT_REPARSE); + static ATTRIBUTES: Atomic = AtomicU32::new(c::OBJ_DONT_REPARSE); let result = unsafe { let mut path_str = c::UNICODE_STRING::from_ref(path); @@ -95,7 +95,7 @@ fn open_link_no_reparse( ObjectName: &mut path_str, RootDirectory: parent.as_raw_handle(), Attributes: ATTRIBUTES.load(Ordering::Relaxed), - ..c::OBJECT_ATTRIBUTES::default() + ..c::OBJECT_ATTRIBUTES::with_length() }; let share = c::FILE_SHARE_DELETE | c::FILE_SHARE_READ | c::FILE_SHARE_WRITE; let options = c::FILE_OPEN_REPARSE_POINT | options; diff --git a/library/std/src/sys/io/io_slice/iovec.rs b/library/std/src/sys/io/io_slice/iovec.rs index 072191315f7c5..df56358969a39 100644 --- a/library/std/src/sys/io/io_slice/iovec.rs +++ b/library/std/src/sys/io/io_slice/iovec.rs @@ -1,6 +1,6 @@ #[cfg(target_os = "hermit")] use hermit_abi::iovec; -#[cfg(target_family = "unix")] +#[cfg(any(target_family = "unix", target_os = "trusty"))] use libc::iovec; use crate::ffi::c_void; diff --git a/library/std/src/sys/io/mod.rs b/library/std/src/sys/io/mod.rs index e00b479109f39..4d0365d42fd9b 100644 --- a/library/std/src/sys/io/mod.rs +++ b/library/std/src/sys/io/mod.rs @@ -2,7 +2,7 @@ mod io_slice { cfg_if::cfg_if! { - if #[cfg(any(target_family = "unix", target_os = "hermit", target_os = "solid_asp3"))] { + if #[cfg(any(target_family = "unix", target_os = "hermit", target_os = "solid_asp3", target_os = "trusty"))] { mod iovec; pub use iovec::*; } else if #[cfg(target_os = "windows")] { diff --git a/library/std/src/sys/mod.rs b/library/std/src/sys/mod.rs index f0cfb9b277366..f9a02b522e5e1 100644 --- a/library/std/src/sys/mod.rs +++ b/library/std/src/sys/mod.rs @@ -9,9 +9,13 @@ mod alloc; mod personality; pub mod anonymous_pipe; +pub mod args; pub mod backtrace; pub mod cmath; +pub mod env; +pub mod env_consts; pub mod exit_guard; +pub mod fd; pub mod fs; pub mod io; pub mod net; diff --git a/library/std/src/sys/net/connection/socket/unix.rs b/library/std/src/sys/net/connection/socket/unix.rs index bbe1e038dccf5..b35d5d2aa8418 100644 --- a/library/std/src/sys/net/connection/socket/unix.rs +++ b/library/std/src/sys/net/connection/socket/unix.rs @@ -1,5 +1,6 @@ use libc::{MSG_PEEK, c_int, c_void, size_t, sockaddr, socklen_t}; +#[cfg(not(any(target_os = "espidf", target_os = "nuttx")))] use crate::ffi::CStr; use crate::io::{self, BorrowedBuf, BorrowedCursor, IoSlice, IoSliceMut}; use crate::net::{Shutdown, SocketAddr}; diff --git a/library/std/src/sys/net/connection/xous/tcplistener.rs b/library/std/src/sys/net/connection/xous/tcplistener.rs index 7f13ca5592040..bdf1fcd9302b7 100644 --- a/library/std/src/sys/net/connection/xous/tcplistener.rs +++ b/library/std/src/sys/net/connection/xous/tcplistener.rs @@ -1,5 +1,5 @@ use core::convert::TryInto; -use core::sync::atomic::{AtomicBool, AtomicU16, AtomicUsize, Ordering}; +use core::sync::atomic::{Atomic, AtomicBool, AtomicU16, AtomicUsize, Ordering}; use super::*; use crate::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr}; @@ -18,10 +18,10 @@ macro_rules! unimpl { #[derive(Clone)] pub struct TcpListener { - fd: Arc, + fd: Arc>, local: SocketAddr, - handle_count: Arc, - nonblocking: Arc, + handle_count: Arc>, + nonblocking: Arc>, } impl TcpListener { diff --git a/library/std/src/sys/net/connection/xous/tcpstream.rs b/library/std/src/sys/net/connection/xous/tcpstream.rs index e8aea8b706a58..5452476745265 100644 --- a/library/std/src/sys/net/connection/xous/tcpstream.rs +++ b/library/std/src/sys/net/connection/xous/tcpstream.rs @@ -1,4 +1,4 @@ -use core::sync::atomic::{AtomicBool, AtomicU32, AtomicUsize, Ordering}; +use core::sync::atomic::{Atomic, AtomicBool, AtomicU32, AtomicUsize, Ordering}; use super::*; use crate::fmt; @@ -29,11 +29,11 @@ pub struct TcpStream { remote_port: u16, peer_addr: SocketAddr, // milliseconds - read_timeout: Arc, + read_timeout: Arc>, // milliseconds - write_timeout: Arc, - handle_count: Arc, - nonblocking: Arc, + write_timeout: Arc>, + handle_count: Arc>, + nonblocking: Arc>, } fn sockaddr_to_buf(duration: Duration, addr: &SocketAddr, buf: &mut [u8]) { diff --git a/library/std/src/sys/net/connection/xous/udp.rs b/library/std/src/sys/net/connection/xous/udp.rs index c112c04ce94bc..2127d3267ed47 100644 --- a/library/std/src/sys/net/connection/xous/udp.rs +++ b/library/std/src/sys/net/connection/xous/udp.rs @@ -1,5 +1,5 @@ use core::convert::TryInto; -use core::sync::atomic::{AtomicUsize, Ordering}; +use core::sync::atomic::{Atomic, AtomicUsize, Ordering}; use super::*; use crate::cell::Cell; @@ -27,7 +27,7 @@ pub struct UdpSocket { read_timeout: Cell, // in milliseconds. The setting applies only to `send` calls after the timeout is set. write_timeout: Cell, - handle_count: Arc, + handle_count: Arc>, nonblocking: Cell, } diff --git a/library/std/src/sys/os_str/bytes.rs b/library/std/src/sys/os_str/bytes.rs index dfff2d3e5d31d..4a8808c923045 100644 --- a/library/std/src/sys/os_str/bytes.rs +++ b/library/std/src/sys/os_str/bytes.rs @@ -216,19 +216,26 @@ impl Buf { self.as_slice().into_rc() } - /// Provides plumbing to core `Vec::truncate`. - /// More well behaving alternative to allowing outer types - /// full mutable access to the core `Vec`. - #[inline] - pub(crate) fn truncate(&mut self, len: usize) { + /// Provides plumbing to `Vec::truncate` without giving full mutable access + /// to the `Vec`. + /// + /// # Safety + /// + /// The length must be at an `OsStr` boundary, according to + /// `Slice::check_public_boundary`. + #[inline] + pub unsafe fn truncate_unchecked(&mut self, len: usize) { self.inner.truncate(len); } - /// Provides plumbing to core `Vec::extend_from_slice`. - /// More well behaving alternative to allowing outer types - /// full mutable access to the core `Vec`. + /// Provides plumbing to `Vec::extend_from_slice` without giving full + /// mutable access to the `Vec`. + /// + /// # Safety + /// + /// This encoding has no safety requirements. #[inline] - pub(crate) fn extend_from_slice(&mut self, other: &[u8]) { + pub unsafe fn extend_from_slice_unchecked(&mut self, other: &[u8]) { self.inner.extend_from_slice(other); } } diff --git a/library/std/src/sys/os_str/wtf8.rs b/library/std/src/sys/os_str/wtf8.rs index a32f5d40f6a9c..5174ea65d0cd9 100644 --- a/library/std/src/sys/os_str/wtf8.rs +++ b/library/std/src/sys/os_str/wtf8.rs @@ -195,19 +195,31 @@ impl Buf { self.as_slice().into_rc() } - /// Provides plumbing to core `Vec::truncate`. - /// More well behaving alternative to allowing outer types - /// full mutable access to the core `Vec`. - #[inline] - pub(crate) fn truncate(&mut self, len: usize) { + /// Provides plumbing to `Vec::truncate` without giving full mutable access + /// to the `Vec`. + /// + /// # Safety + /// + /// The length must be at an `OsStr` boundary, according to + /// `Slice::check_public_boundary`. + #[inline] + pub unsafe fn truncate_unchecked(&mut self, len: usize) { self.inner.truncate(len); } - /// Provides plumbing to core `Vec::extend_from_slice`. - /// More well behaving alternative to allowing outer types - /// full mutable access to the core `Vec`. + /// Provides plumbing to `Vec::extend_from_slice` without giving full + /// mutable access to the `Vec`. + /// + /// # Safety + /// + /// The slice must be valid for the platform encoding (as described in + /// [`Slice::from_encoded_bytes_unchecked`]). + /// + /// This bypasses the WTF-8 surrogate joining, so `self` must not end with a + /// leading surrogate half and `other` must not start with with a trailing + /// surrogate half. #[inline] - pub(crate) fn extend_from_slice(&mut self, other: &[u8]) { + pub unsafe fn extend_from_slice_unchecked(&mut self, other: &[u8]) { self.inner.extend_from_slice(other); } } diff --git a/library/std/src/sys/pal/hermit/args.rs b/library/std/src/sys/pal/hermit/args.rs deleted file mode 100644 index 4402426027730..0000000000000 --- a/library/std/src/sys/pal/hermit/args.rs +++ /dev/null @@ -1,66 +0,0 @@ -use crate::ffi::{CStr, OsString, c_char}; -use crate::os::hermit::ffi::OsStringExt; -use crate::sync::atomic::Ordering::{Acquire, Relaxed, Release}; -use crate::sync::atomic::{AtomicIsize, AtomicPtr}; -use crate::{fmt, ptr, vec}; - -static ARGC: AtomicIsize = AtomicIsize::new(0); -static ARGV: AtomicPtr<*const u8> = AtomicPtr::new(ptr::null_mut()); - -/// One-time global initialization. -pub unsafe fn init(argc: isize, argv: *const *const u8) { - ARGC.store(argc, Relaxed); - // Use release ordering here to broadcast writes by the OS. - ARGV.store(argv as *mut *const u8, Release); -} - -/// Returns the command line arguments -pub fn args() -> Args { - // Synchronize with the store above. - let argv = ARGV.load(Acquire); - // If argv has not been initialized yet, do not return any arguments. - let argc = if argv.is_null() { 0 } else { ARGC.load(Relaxed) }; - let args: Vec = (0..argc) - .map(|i| unsafe { - let cstr = CStr::from_ptr(*argv.offset(i) as *const c_char); - OsStringExt::from_vec(cstr.to_bytes().to_vec()) - }) - .collect(); - - Args { iter: args.into_iter() } -} - -pub struct Args { - iter: vec::IntoIter, -} - -impl fmt::Debug for Args { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.iter.as_slice().fmt(f) - } -} - -impl !Send for Args {} -impl !Sync for Args {} - -impl Iterator for Args { - type Item = OsString; - fn next(&mut self) -> Option { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { - self.iter.len() - } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - self.iter.next_back() - } -} diff --git a/library/std/src/sys/pal/hermit/env.rs b/library/std/src/sys/pal/hermit/env.rs deleted file mode 100644 index 7a0fcb31ef2e8..0000000000000 --- a/library/std/src/sys/pal/hermit/env.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod os { - pub const FAMILY: &str = ""; - pub const OS: &str = "hermit"; - pub const DLL_PREFIX: &str = ""; - pub const DLL_SUFFIX: &str = ""; - pub const DLL_EXTENSION: &str = ""; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} diff --git a/library/std/src/sys/pal/hermit/fd.rs b/library/std/src/sys/pal/hermit/fd.rs deleted file mode 100644 index edd984d920a1b..0000000000000 --- a/library/std/src/sys/pal/hermit/fd.rs +++ /dev/null @@ -1,175 +0,0 @@ -#![unstable(reason = "not public", issue = "none", feature = "fd")] - -use super::hermit_abi; -use crate::cmp; -use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, Read, SeekFrom}; -use crate::os::hermit::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd}; -use crate::sys::{cvt, unsupported}; -use crate::sys_common::{AsInner, FromInner, IntoInner}; - -const fn max_iov() -> usize { - hermit_abi::IOV_MAX -} - -#[derive(Debug)] -pub struct FileDesc { - fd: OwnedFd, -} - -impl FileDesc { - pub fn read(&self, buf: &mut [u8]) -> io::Result { - let result = - cvt(unsafe { hermit_abi::read(self.fd.as_raw_fd(), buf.as_mut_ptr(), buf.len()) })?; - Ok(result as usize) - } - - pub fn read_buf(&self, mut buf: BorrowedCursor<'_>) -> io::Result<()> { - // SAFETY: The `read` syscall does not read from the buffer, so it is - // safe to use `&mut [MaybeUninit]`. - let result = cvt(unsafe { - hermit_abi::read( - self.fd.as_raw_fd(), - buf.as_mut().as_mut_ptr() as *mut u8, - buf.capacity(), - ) - })?; - // SAFETY: Exactly `result` bytes have been filled. - unsafe { buf.advance_unchecked(result as usize) }; - Ok(()) - } - - pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - let ret = cvt(unsafe { - hermit_abi::readv( - self.as_raw_fd(), - bufs.as_mut_ptr() as *mut hermit_abi::iovec as *const hermit_abi::iovec, - cmp::min(bufs.len(), max_iov()), - ) - })?; - Ok(ret as usize) - } - - #[inline] - pub fn is_read_vectored(&self) -> bool { - true - } - - pub fn read_to_end(&self, buf: &mut Vec) -> io::Result { - let mut me = self; - (&mut me).read_to_end(buf) - } - - pub fn write(&self, buf: &[u8]) -> io::Result { - let result = - cvt(unsafe { hermit_abi::write(self.fd.as_raw_fd(), buf.as_ptr(), buf.len()) })?; - Ok(result as usize) - } - - pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { - let ret = cvt(unsafe { - hermit_abi::writev( - self.as_raw_fd(), - bufs.as_ptr() as *const hermit_abi::iovec, - cmp::min(bufs.len(), max_iov()), - ) - })?; - Ok(ret as usize) - } - - #[inline] - pub fn is_write_vectored(&self) -> bool { - true - } - - pub fn seek(&self, pos: SeekFrom) -> io::Result { - let (whence, pos) = match pos { - // Casting to `i64` is fine, too large values will end up as - // negative which will cause an error in `lseek`. - SeekFrom::Start(off) => (hermit_abi::SEEK_SET, off as i64), - SeekFrom::End(off) => (hermit_abi::SEEK_END, off), - SeekFrom::Current(off) => (hermit_abi::SEEK_CUR, off), - }; - let n = cvt(unsafe { hermit_abi::lseek(self.as_raw_fd(), pos as isize, whence) })?; - Ok(n as u64) - } - - pub fn tell(&self) -> io::Result { - self.seek(SeekFrom::Current(0)) - } - - pub fn duplicate(&self) -> io::Result { - self.duplicate_path(&[]) - } - - pub fn duplicate_path(&self, _path: &[u8]) -> io::Result { - unsupported() - } - - pub fn nonblocking(&self) -> io::Result { - Ok(false) - } - - pub fn set_cloexec(&self) -> io::Result<()> { - unsupported() - } - - pub fn set_nonblocking(&self, _nonblocking: bool) -> io::Result<()> { - unsupported() - } - - pub fn fstat(&self, stat: *mut hermit_abi::stat) -> io::Result<()> { - cvt(unsafe { hermit_abi::fstat(self.fd.as_raw_fd(), stat) })?; - Ok(()) - } -} - -impl<'a> Read for &'a FileDesc { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - (**self).read(buf) - } -} - -impl IntoInner for FileDesc { - fn into_inner(self) -> OwnedFd { - self.fd - } -} - -impl FromInner for FileDesc { - fn from_inner(owned_fd: OwnedFd) -> Self { - Self { fd: owned_fd } - } -} - -impl FromRawFd for FileDesc { - unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { - let fd = unsafe { OwnedFd::from_raw_fd(raw_fd) }; - Self { fd } - } -} - -impl AsInner for FileDesc { - #[inline] - fn as_inner(&self) -> &OwnedFd { - &self.fd - } -} - -impl AsFd for FileDesc { - fn as_fd(&self) -> BorrowedFd<'_> { - self.fd.as_fd() - } -} - -impl AsRawFd for FileDesc { - #[inline] - fn as_raw_fd(&self) -> RawFd { - self.fd.as_raw_fd() - } -} - -impl IntoRawFd for FileDesc { - fn into_raw_fd(self) -> RawFd { - self.fd.into_raw_fd() - } -} diff --git a/library/std/src/sys/pal/hermit/futex.rs b/library/std/src/sys/pal/hermit/futex.rs index 670383b45aca9..78c86071fdd53 100644 --- a/library/std/src/sys/pal/hermit/futex.rs +++ b/library/std/src/sys/pal/hermit/futex.rs @@ -1,19 +1,19 @@ use super::hermit_abi; use crate::ptr::null; -use crate::sync::atomic::AtomicU32; +use crate::sync::atomic::Atomic; use crate::time::Duration; /// An atomic for use as a futex that is at least 32-bits but may be larger -pub type Futex = AtomicU32; +pub type Futex = Atomic; /// Must be the underlying type of Futex pub type Primitive = u32; /// An atomic for use as a futex that is at least 8-bits but may be larger. -pub type SmallFutex = AtomicU32; +pub type SmallFutex = Atomic; /// Must be the underlying type of SmallFutex pub type SmallPrimitive = u32; -pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) -> bool { +pub fn futex_wait(futex: &Atomic, expected: u32, timeout: Option) -> bool { // Calculate the timeout as a relative timespec. // // Overflows are rounded up to an infinite timeout (None). @@ -37,12 +37,12 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - } #[inline] -pub fn futex_wake(futex: &AtomicU32) -> bool { +pub fn futex_wake(futex: &Atomic) -> bool { unsafe { hermit_abi::futex_wake(futex.as_ptr(), 1) > 0 } } #[inline] -pub fn futex_wake_all(futex: &AtomicU32) { +pub fn futex_wake_all(futex: &Atomic) { unsafe { hermit_abi::futex_wake(futex.as_ptr(), i32::MAX); } diff --git a/library/std/src/sys/pal/hermit/mod.rs b/library/std/src/sys/pal/hermit/mod.rs index 67eab96fa4034..ea636938d703f 100644 --- a/library/std/src/sys/pal/hermit/mod.rs +++ b/library/std/src/sys/pal/hermit/mod.rs @@ -16,11 +16,11 @@ #![deny(unsafe_op_in_unsafe_fn)] #![allow(missing_docs, nonstandard_style)] +use crate::io::ErrorKind; +use crate::os::hermit::hermit_abi; use crate::os::raw::c_char; +use crate::sys::env; -pub mod args; -pub mod env; -pub mod fd; pub mod futex; pub mod os; #[path = "../unsupported/pipe.rs"] @@ -28,9 +28,6 @@ pub mod pipe; pub mod thread; pub mod time; -use crate::io::ErrorKind; -use crate::os::hermit::hermit_abi; - pub fn unsupported() -> crate::io::Result { Err(unsupported_err()) } @@ -59,7 +56,7 @@ pub extern "C" fn __rust_abort() { // NOTE: this is not guaranteed to run, for example when Rust code is called externally. pub unsafe fn init(argc: isize, argv: *const *const u8, _sigpipe: u8) { unsafe { - args::init(argc, argv); + crate::sys::args::init(argc, argv); } } @@ -79,7 +76,7 @@ pub unsafe extern "C" fn runtime_entry( } // initialize environment - os::init_environment(env); + env::init(env); let result = unsafe { main(argc as isize, argv) }; diff --git a/library/std/src/sys/pal/hermit/os.rs b/library/std/src/sys/pal/hermit/os.rs index 791cdb1e57e7d..a998c3165e52f 100644 --- a/library/std/src/sys/pal/hermit/os.rs +++ b/library/std/src/sys/pal/hermit/os.rs @@ -1,15 +1,10 @@ -use core::slice::memchr; - use super::hermit_abi; -use crate::collections::HashMap; use crate::error::Error as StdError; -use crate::ffi::{CStr, OsStr, OsString, c_char}; +use crate::ffi::{OsStr, OsString}; use crate::marker::PhantomData; -use crate::os::hermit::ffi::OsStringExt; use crate::path::{self, PathBuf}; -use crate::sync::Mutex; use crate::sys::unsupported; -use crate::{fmt, io, str, vec}; +use crate::{fmt, io, str}; pub fn errno() -> i32 { unsafe { hermit_abi::get_errno() } @@ -68,115 +63,6 @@ pub fn current_exe() -> io::Result { unsupported() } -static ENV: Mutex>> = Mutex::new(None); - -pub fn init_environment(env: *const *const c_char) { - let mut guard = ENV.lock().unwrap(); - let map = guard.insert(HashMap::new()); - - if env.is_null() { - return; - } - - unsafe { - let mut environ = env; - while !(*environ).is_null() { - if let Some((key, value)) = parse(CStr::from_ptr(*environ).to_bytes()) { - map.insert(key, value); - } - environ = environ.add(1); - } - } - - fn parse(input: &[u8]) -> Option<(OsString, OsString)> { - // Strategy (copied from glibc): Variable name and value are separated - // by an ASCII equals sign '='. Since a variable name must not be - // empty, allow variable names starting with an equals sign. Skip all - // malformed lines. - if input.is_empty() { - return None; - } - let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); - pos.map(|p| { - ( - OsStringExt::from_vec(input[..p].to_vec()), - OsStringExt::from_vec(input[p + 1..].to_vec()), - ) - }) - } -} - -pub struct Env { - iter: vec::IntoIter<(OsString, OsString)>, -} - -// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. -pub struct EnvStrDebug<'a> { - slice: &'a [(OsString, OsString)], -} - -impl fmt::Debug for EnvStrDebug<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { slice } = self; - f.debug_list() - .entries(slice.iter().map(|(a, b)| (a.to_str().unwrap(), b.to_str().unwrap()))) - .finish() - } -} - -impl Env { - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self { iter } = self; - EnvStrDebug { slice: iter.as_slice() } - } -} - -impl fmt::Debug for Env { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { iter } = self; - f.debug_list().entries(iter.as_slice()).finish() - } -} - -impl !Send for Env {} -impl !Sync for Env {} - -impl Iterator for Env { - type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -/// Returns a vector of (variable, value) byte-vector pairs for all the -/// environment variables of the current process. -pub fn env() -> Env { - let guard = ENV.lock().unwrap(); - let env = guard.as_ref().unwrap(); - - let result = env.iter().map(|(key, value)| (key.clone(), value.clone())).collect::>(); - - Env { iter: result.into_iter() } -} - -pub fn getenv(k: &OsStr) -> Option { - ENV.lock().unwrap().as_ref().unwrap().get(k).cloned() -} - -pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - let (k, v) = (k.to_owned(), v.to_owned()); - ENV.lock().unwrap().as_mut().unwrap().insert(k, v); - Ok(()) -} - -pub unsafe fn unsetenv(k: &OsStr) -> io::Result<()> { - ENV.lock().unwrap().as_mut().unwrap().remove(k); - Ok(()) -} - pub fn temp_dir() -> PathBuf { PathBuf::from("/tmp") } diff --git a/library/std/src/sys/pal/itron/spin.rs b/library/std/src/sys/pal/itron/spin.rs index 6a9a7c72deb7d..bc4f83260bbd0 100644 --- a/library/std/src/sys/pal/itron/spin.rs +++ b/library/std/src/sys/pal/itron/spin.rs @@ -1,12 +1,12 @@ use super::abi; use crate::cell::UnsafeCell; use crate::mem::MaybeUninit; -use crate::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, AtomicUsize, Ordering}; /// A mutex implemented by `dis_dsp` (for intra-core synchronization) and a /// spinlock (for inter-core synchronization). pub struct SpinMutex { - locked: AtomicBool, + locked: Atomic, data: UnsafeCell, } @@ -19,7 +19,7 @@ impl SpinMutex { /// Acquire a lock. #[inline] pub fn with_locked(&self, f: impl FnOnce(&mut T) -> R) -> R { - struct SpinMutexGuard<'a>(&'a AtomicBool); + struct SpinMutexGuard<'a>(&'a Atomic); impl Drop for SpinMutexGuard<'_> { #[inline] @@ -50,7 +50,7 @@ impl SpinMutex { /// It's assumed that `0` is not a valid ID, and all kernel /// object IDs fall into range `1..=usize::MAX`. pub struct SpinIdOnceCell { - id: AtomicUsize, + id: Atomic, spin: SpinMutex<()>, extra: UnsafeCell>, } diff --git a/library/std/src/sys/pal/itron/thread.rs b/library/std/src/sys/pal/itron/thread.rs index d1481f827e1e1..a974f4f17ae67 100644 --- a/library/std/src/sys/pal/itron/thread.rs +++ b/library/std/src/sys/pal/itron/thread.rs @@ -9,7 +9,7 @@ use crate::ffi::CStr; use crate::mem::ManuallyDrop; use crate::num::NonZero; use crate::ptr::NonNull; -use crate::sync::atomic::{AtomicUsize, Ordering}; +use crate::sync::atomic::{Atomic, AtomicUsize, Ordering}; use crate::time::Duration; use crate::{hint, io}; @@ -64,7 +64,7 @@ struct ThreadInner { /// '--> JOIN_FINALIZE ---' /// (-1) /// - lifecycle: AtomicUsize, + lifecycle: Atomic, } // Safety: The only `!Sync` field, `ThreadInner::start`, is only touched by diff --git a/library/std/src/sys/pal/sgx/abi/mod.rs b/library/std/src/sys/pal/sgx/abi/mod.rs index 2c805a4d0af01..57247cffad3f2 100644 --- a/library/std/src/sys/pal/sgx/abi/mod.rs +++ b/library/std/src/sys/pal/sgx/abi/mod.rs @@ -1,7 +1,7 @@ #![cfg_attr(test, allow(unused))] // RT initialization logic is not compiled for test use core::arch::global_asm; -use core::sync::atomic::{AtomicUsize, Ordering}; +use core::sync::atomic::{Atomic, AtomicUsize, Ordering}; use crate::io::Write; @@ -31,7 +31,7 @@ unsafe extern "C" fn tcs_init(secondary: bool) { const BUSY: usize = 1; const DONE: usize = 2; // Three-state spin-lock - static RELOC_STATE: AtomicUsize = AtomicUsize::new(UNINIT); + static RELOC_STATE: Atomic = AtomicUsize::new(UNINIT); if secondary && RELOC_STATE.load(Ordering::Relaxed) != DONE { rtabort!("Entered secondary TCS before main TCS!") diff --git a/library/std/src/sys/pal/sgx/abi/tls/mod.rs b/library/std/src/sys/pal/sgx/abi/tls/mod.rs index 8e2b271f1c970..41e38b6961680 100644 --- a/library/std/src/sys/pal/sgx/abi/tls/mod.rs +++ b/library/std/src/sys/pal/sgx/abi/tls/mod.rs @@ -3,7 +3,7 @@ mod sync_bitset; use self::sync_bitset::*; use crate::cell::Cell; use crate::num::NonZero; -use crate::sync::atomic::{AtomicUsize, Ordering}; +use crate::sync::atomic::{Atomic, AtomicUsize, Ordering}; use crate::{mem, ptr}; #[cfg(target_pointer_width = "64")] @@ -11,16 +11,14 @@ const USIZE_BITS: usize = 64; const TLS_KEYS: usize = 128; // Same as POSIX minimum const TLS_KEYS_BITSET_SIZE: usize = (TLS_KEYS + (USIZE_BITS - 1)) / USIZE_BITS; +// Specifying linkage/symbol name is solely to ensure a single instance between this crate and its unit tests #[cfg_attr(test, linkage = "available_externally")] -#[unsafe(export_name = "_ZN16__rust_internals3std3sys3sgx3abi3tls14TLS_KEY_IN_USEE")] +#[unsafe(export_name = "_ZN16__rust_internals3std3sys3pal3sgx3abi3tls14TLS_KEY_IN_USEE")] static TLS_KEY_IN_USE: SyncBitset = SYNC_BITSET_INIT; -macro_rules! dup { - ((* $($exp:tt)*) $($val:tt)*) => (dup!( ($($exp)*) $($val)* $($val)* )); - (() $($val:tt)*) => ([$($val),*]) -} +// Specifying linkage/symbol name is solely to ensure a single instance between this crate and its unit tests #[cfg_attr(test, linkage = "available_externally")] -#[unsafe(export_name = "_ZN16__rust_internals3std3sys3sgx3abi3tls14TLS_DESTRUCTORE")] -static TLS_DESTRUCTOR: [AtomicUsize; TLS_KEYS] = dup!((* * * * * * *) (AtomicUsize::new(0))); +#[unsafe(export_name = "_ZN16__rust_internals3std3sys3pal3sgx3abi3tls14TLS_DESTRUCTORE")] +static TLS_DESTRUCTOR: [Atomic; TLS_KEYS] = [const { AtomicUsize::new(0) }; TLS_KEYS]; unsafe extern "C" { fn get_tls_ptr() -> *const u8; @@ -82,7 +80,7 @@ impl<'a> Drop for ActiveTls<'a> { impl Tls { pub fn new() -> Tls { - Tls { data: dup!((* * * * * * *) (Cell::new(ptr::null_mut()))) } + Tls { data: [const { Cell::new(ptr::null_mut()) }; TLS_KEYS] } } pub unsafe fn activate(&self) -> ActiveTls<'_> { diff --git a/library/std/src/sys/pal/sgx/abi/tls/sync_bitset.rs b/library/std/src/sys/pal/sgx/abi/tls/sync_bitset.rs index 4eeff8f6ef773..9087168589aa5 100644 --- a/library/std/src/sys/pal/sgx/abi/tls/sync_bitset.rs +++ b/library/std/src/sys/pal/sgx/abi/tls/sync_bitset.rs @@ -4,10 +4,10 @@ mod tests; use super::{TLS_KEYS_BITSET_SIZE, USIZE_BITS}; use crate::iter::{Enumerate, Peekable}; use crate::slice::Iter; -use crate::sync::atomic::{AtomicUsize, Ordering}; +use crate::sync::atomic::{Atomic, AtomicUsize, Ordering}; /// A bitset that can be used synchronously. -pub(super) struct SyncBitset([AtomicUsize; TLS_KEYS_BITSET_SIZE]); +pub(super) struct SyncBitset([Atomic; TLS_KEYS_BITSET_SIZE]); pub(super) const SYNC_BITSET_INIT: SyncBitset = SyncBitset([AtomicUsize::new(0), AtomicUsize::new(0)]); @@ -58,7 +58,7 @@ impl SyncBitset { } pub(super) struct SyncBitsetIter<'a> { - iter: Peekable>>, + iter: Peekable>>>, elem_idx: usize, } diff --git a/library/std/src/sys/pal/sgx/abi/usercalls/alloc.rs b/library/std/src/sys/pal/sgx/abi/usercalls/alloc.rs index 3fe6dee3d6f4f..a60b83213fd96 100644 --- a/library/std/src/sys/pal/sgx/abi/usercalls/alloc.rs +++ b/library/std/src/sys/pal/sgx/abi/usercalls/alloc.rs @@ -675,7 +675,7 @@ where /// Obtain the number of elements in this user slice. pub fn len(&self) -> usize { - unsafe { (*self.0.get()).len() } + unsafe { self.0.get().len() } } /// Copies the value from user memory and appends it to `dest`. diff --git a/library/std/src/sys/pal/sgx/args.rs b/library/std/src/sys/pal/sgx/args.rs deleted file mode 100644 index e62bf383954eb..0000000000000 --- a/library/std/src/sys/pal/sgx/args.rs +++ /dev/null @@ -1,59 +0,0 @@ -use super::abi::usercalls::alloc; -use super::abi::usercalls::raw::ByteBuffer; -use crate::ffi::OsString; -use crate::sync::atomic::{AtomicUsize, Ordering}; -use crate::sys::os_str::Buf; -use crate::sys_common::FromInner; -use crate::{fmt, slice}; - -#[cfg_attr(test, linkage = "available_externally")] -#[unsafe(export_name = "_ZN16__rust_internals3std3sys3sgx4args4ARGSE")] -static ARGS: AtomicUsize = AtomicUsize::new(0); -type ArgsStore = Vec; - -#[cfg_attr(test, allow(dead_code))] -pub unsafe fn init(argc: isize, argv: *const *const u8) { - if argc != 0 { - let args = unsafe { alloc::User::<[ByteBuffer]>::from_raw_parts(argv as _, argc as _) }; - let args = args - .iter() - .map(|a| OsString::from_inner(Buf { inner: a.copy_user_buffer() })) - .collect::(); - ARGS.store(Box::into_raw(Box::new(args)) as _, Ordering::Relaxed); - } -} - -pub fn args() -> Args { - let args = unsafe { (ARGS.load(Ordering::Relaxed) as *const ArgsStore).as_ref() }; - if let Some(args) = args { Args(args.iter()) } else { Args([].iter()) } -} - -pub struct Args(slice::Iter<'static, OsString>); - -impl fmt::Debug for Args { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.as_slice().fmt(f) - } -} - -impl Iterator for Args { - type Item = OsString; - fn next(&mut self) -> Option { - self.0.next().cloned() - } - fn size_hint(&self) -> (usize, Option) { - self.0.size_hint() - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { - self.0.len() - } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - self.0.next_back().cloned() - } -} diff --git a/library/std/src/sys/pal/sgx/env.rs b/library/std/src/sys/pal/sgx/env.rs deleted file mode 100644 index 8043b7c5213a1..0000000000000 --- a/library/std/src/sys/pal/sgx/env.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod os { - pub const FAMILY: &str = ""; - pub const OS: &str = ""; - pub const DLL_PREFIX: &str = ""; - pub const DLL_SUFFIX: &str = ".sgxs"; - pub const DLL_EXTENSION: &str = "sgxs"; - pub const EXE_SUFFIX: &str = ".sgxs"; - pub const EXE_EXTENSION: &str = "sgxs"; -} diff --git a/library/std/src/sys/pal/sgx/fd.rs b/library/std/src/sys/pal/sgx/fd.rs deleted file mode 100644 index 399f6a1648984..0000000000000 --- a/library/std/src/sys/pal/sgx/fd.rs +++ /dev/null @@ -1,85 +0,0 @@ -use fortanix_sgx_abi::Fd; - -use super::abi::usercalls; -use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut}; -use crate::mem::ManuallyDrop; -use crate::sys::{AsInner, FromInner, IntoInner}; - -#[derive(Debug)] -pub struct FileDesc { - fd: Fd, -} - -impl FileDesc { - pub fn new(fd: Fd) -> FileDesc { - FileDesc { fd } - } - - pub fn raw(&self) -> Fd { - self.fd - } - - /// Extracts the actual file descriptor without closing it. - pub fn into_raw(self) -> Fd { - ManuallyDrop::new(self).fd - } - - pub fn read(&self, buf: &mut [u8]) -> io::Result { - usercalls::read(self.fd, &mut [IoSliceMut::new(buf)]) - } - - pub fn read_buf(&self, buf: BorrowedCursor<'_>) -> io::Result<()> { - usercalls::read_buf(self.fd, buf) - } - - pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - usercalls::read(self.fd, bufs) - } - - #[inline] - pub fn is_read_vectored(&self) -> bool { - true - } - - pub fn write(&self, buf: &[u8]) -> io::Result { - usercalls::write(self.fd, &[IoSlice::new(buf)]) - } - - pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { - usercalls::write(self.fd, bufs) - } - - #[inline] - pub fn is_write_vectored(&self) -> bool { - true - } - - pub fn flush(&self) -> io::Result<()> { - usercalls::flush(self.fd) - } -} - -impl AsInner for FileDesc { - #[inline] - fn as_inner(&self) -> &Fd { - &self.fd - } -} - -impl IntoInner for FileDesc { - fn into_inner(self) -> Fd { - ManuallyDrop::new(self).fd - } -} - -impl FromInner for FileDesc { - fn from_inner(fd: Fd) -> FileDesc { - FileDesc { fd } - } -} - -impl Drop for FileDesc { - fn drop(&mut self) { - usercalls::close(self.fd) - } -} diff --git a/library/std/src/sys/pal/sgx/mod.rs b/library/std/src/sys/pal/sgx/mod.rs index fe43cfd2caf7b..3932f64c0ef44 100644 --- a/library/std/src/sys/pal/sgx/mod.rs +++ b/library/std/src/sys/pal/sgx/mod.rs @@ -6,12 +6,9 @@ #![allow(fuzzy_provenance_casts)] // FIXME: this entire module systematically confuses pointers and integers use crate::io::ErrorKind; -use crate::sync::atomic::{AtomicBool, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, Ordering}; pub mod abi; -pub mod args; -pub mod env; -pub mod fd; mod libunwind_integration; pub mod os; #[path = "../unsupported/pipe.rs"] @@ -25,7 +22,7 @@ pub mod waitqueue; // NOTE: this is not guaranteed to run, for example when Rust code is called externally. pub unsafe fn init(argc: isize, argv: *const *const u8, _sigpipe: u8) { unsafe { - args::init(argc, argv); + crate::sys::args::init(argc, argv); } } @@ -49,7 +46,7 @@ pub fn unsupported_err() -> crate::io::Error { /// what happens when `SGX_INEFFECTIVE_ERROR` is set to `true`. If it is /// `false`, the behavior is the same as `unsupported`. pub fn sgx_ineffective(v: T) -> crate::io::Result { - static SGX_INEFFECTIVE_ERROR: AtomicBool = AtomicBool::new(false); + static SGX_INEFFECTIVE_ERROR: Atomic = AtomicBool::new(false); if SGX_INEFFECTIVE_ERROR.load(Ordering::Relaxed) { Err(crate::io::const_error!( ErrorKind::Uncategorized, diff --git a/library/std/src/sys/pal/sgx/os.rs b/library/std/src/sys/pal/sgx/os.rs index b1ec2afd764e6..70f838679c9ca 100644 --- a/library/std/src/sys/pal/sgx/os.rs +++ b/library/std/src/sys/pal/sgx/os.rs @@ -1,14 +1,11 @@ use fortanix_sgx_abi::{Error, RESULT_SUCCESS}; -use crate::collections::HashMap; use crate::error::Error as StdError; use crate::ffi::{OsStr, OsString}; use crate::marker::PhantomData; use crate::path::{self, PathBuf}; -use crate::sync::atomic::{AtomicUsize, Ordering}; -use crate::sync::{Mutex, Once}; use crate::sys::{decode_error_kind, sgx_ineffective, unsupported}; -use crate::{fmt, io, str, vec}; +use crate::{fmt, io, str}; pub fn errno() -> i32 { RESULT_SUCCESS @@ -73,99 +70,6 @@ pub fn current_exe() -> io::Result { unsupported() } -#[cfg_attr(test, linkage = "available_externally")] -#[unsafe(export_name = "_ZN16__rust_internals3std3sys3sgx2os3ENVE")] -static ENV: AtomicUsize = AtomicUsize::new(0); -#[cfg_attr(test, linkage = "available_externally")] -#[unsafe(export_name = "_ZN16__rust_internals3std3sys3sgx2os8ENV_INITE")] -static ENV_INIT: Once = Once::new(); -type EnvStore = Mutex>; - -fn get_env_store() -> Option<&'static EnvStore> { - unsafe { (ENV.load(Ordering::Relaxed) as *const EnvStore).as_ref() } -} - -fn create_env_store() -> &'static EnvStore { - ENV_INIT.call_once(|| { - ENV.store(Box::into_raw(Box::new(EnvStore::default())) as _, Ordering::Relaxed) - }); - unsafe { &*(ENV.load(Ordering::Relaxed) as *const EnvStore) } -} - -pub struct Env { - iter: vec::IntoIter<(OsString, OsString)>, -} - -// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. -pub struct EnvStrDebug<'a> { - slice: &'a [(OsString, OsString)], -} - -impl fmt::Debug for EnvStrDebug<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { slice } = self; - f.debug_list() - .entries(slice.iter().map(|(a, b)| (a.to_str().unwrap(), b.to_str().unwrap()))) - .finish() - } -} - -impl Env { - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self { iter } = self; - EnvStrDebug { slice: iter.as_slice() } - } -} - -impl fmt::Debug for Env { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { iter } = self; - f.debug_list().entries(iter.as_slice()).finish() - } -} - -impl !Send for Env {} -impl !Sync for Env {} - -impl Iterator for Env { - type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -pub fn env() -> Env { - let clone_to_vec = |map: &HashMap| -> Vec<_> { - map.iter().map(|(k, v)| (k.clone(), v.clone())).collect() - }; - - let iter = get_env_store() - .map(|env| clone_to_vec(&env.lock().unwrap())) - .unwrap_or_default() - .into_iter(); - Env { iter } -} - -pub fn getenv(k: &OsStr) -> Option { - get_env_store().and_then(|s| s.lock().unwrap().get(k).cloned()) -} - -pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - let (k, v) = (k.to_owned(), v.to_owned()); - create_env_store().lock().unwrap().insert(k, v); - Ok(()) -} - -pub unsafe fn unsetenv(k: &OsStr) -> io::Result<()> { - if let Some(env) = get_env_store() { - env.lock().unwrap().remove(k); - } - Ok(()) -} - pub fn temp_dir() -> PathBuf { panic!("no filesystem in SGX") } diff --git a/library/std/src/sys/pal/sgx/thread.rs b/library/std/src/sys/pal/sgx/thread.rs index b6932df431f42..219ef1b7a9897 100644 --- a/library/std/src/sys/pal/sgx/thread.rs +++ b/library/std/src/sys/pal/sgx/thread.rs @@ -45,8 +45,9 @@ mod task_queue { } } + // Specifying linkage/symbol name is solely to ensure a single instance between this crate and its unit tests #[cfg_attr(test, linkage = "available_externally")] - #[unsafe(export_name = "_ZN16__rust_internals3std3sys3sgx6thread10TASK_QUEUEE")] + #[unsafe(export_name = "_ZN16__rust_internals3std3sys3pal3sgx6thread10TASK_QUEUEE")] static TASK_QUEUE: Mutex> = Mutex::new(Vec::new()); pub(super) fn lock() -> MutexGuard<'static, Vec> { diff --git a/library/std/src/sys/pal/sgx/waitqueue/spin_mutex.rs b/library/std/src/sys/pal/sgx/waitqueue/spin_mutex.rs index f6e851ccaddfa..73c7a101d601d 100644 --- a/library/std/src/sys/pal/sgx/waitqueue/spin_mutex.rs +++ b/library/std/src/sys/pal/sgx/waitqueue/spin_mutex.rs @@ -7,12 +7,12 @@ mod tests; use crate::cell::UnsafeCell; use crate::hint; use crate::ops::{Deref, DerefMut}; -use crate::sync::atomic::{AtomicBool, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, Ordering}; #[derive(Default)] pub struct SpinMutex { value: UnsafeCell, - lock: AtomicBool, + lock: Atomic, } unsafe impl Send for SpinMutex {} diff --git a/library/std/src/sys/pal/solid/env.rs b/library/std/src/sys/pal/solid/env.rs deleted file mode 100644 index 6855c113b2893..0000000000000 --- a/library/std/src/sys/pal/solid/env.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod os { - pub const FAMILY: &str = "itron"; - pub const OS: &str = "solid"; - pub const DLL_PREFIX: &str = ""; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} diff --git a/library/std/src/sys/pal/solid/mod.rs b/library/std/src/sys/pal/solid/mod.rs index 22052a168fd15..0011cf256df74 100644 --- a/library/std/src/sys/pal/solid/mod.rs +++ b/library/std/src/sys/pal/solid/mod.rs @@ -16,9 +16,6 @@ pub mod itron { use super::unsupported; } -#[path = "../unsupported/args.rs"] -pub mod args; -pub mod env; // `error` is `pub(crate)` so that it can be accessed by `itron/error.rs` as // `crate::sys::error` pub(crate) mod error; diff --git a/library/std/src/sys/pal/solid/os.rs b/library/std/src/sys/pal/solid/os.rs index e3b2e0aa50f4a..8f5976b0592ec 100644 --- a/library/std/src/sys/pal/solid/os.rs +++ b/library/std/src/sys/pal/solid/os.rs @@ -1,14 +1,8 @@ -use core::slice::memchr; - use super::{error, itron, unsupported}; use crate::error::Error as StdError; -use crate::ffi::{CStr, OsStr, OsString}; -use crate::os::raw::{c_char, c_int}; -use crate::os::solid::ffi::{OsStrExt, OsStringExt}; +use crate::ffi::{OsStr, OsString}; use crate::path::{self, PathBuf}; -use crate::sync::{PoisonError, RwLock}; -use crate::sys::common::small_c_string::run_with_cstr; -use crate::{fmt, io, vec}; +use crate::{fmt, io}; // `solid` directly maps `errno`s to μITRON error codes. impl itron::error::ItronError { @@ -75,138 +69,6 @@ pub fn current_exe() -> io::Result { unsupported() } -static ENV_LOCK: RwLock<()> = RwLock::new(()); - -pub fn env_read_lock() -> impl Drop { - ENV_LOCK.read().unwrap_or_else(PoisonError::into_inner) -} - -pub struct Env { - iter: vec::IntoIter<(OsString, OsString)>, -} - -// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. -pub struct EnvStrDebug<'a> { - slice: &'a [(OsString, OsString)], -} - -impl fmt::Debug for EnvStrDebug<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { slice } = self; - f.debug_list() - .entries(slice.iter().map(|(a, b)| (a.to_str().unwrap(), b.to_str().unwrap()))) - .finish() - } -} - -impl Env { - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self { iter } = self; - EnvStrDebug { slice: iter.as_slice() } - } -} - -impl fmt::Debug for Env { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { iter } = self; - f.debug_list().entries(iter.as_slice()).finish() - } -} - -impl !Send for Env {} -impl !Sync for Env {} - -impl Iterator for Env { - type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -/// Returns a vector of (variable, value) byte-vector pairs for all the -/// environment variables of the current process. -pub fn env() -> Env { - unsafe extern "C" { - static mut environ: *const *const c_char; - } - - unsafe { - let _guard = env_read_lock(); - let mut result = Vec::new(); - if !environ.is_null() { - while !(*environ).is_null() { - if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) { - result.push(key_value); - } - environ = environ.add(1); - } - } - return Env { iter: result.into_iter() }; - } - - fn parse(input: &[u8]) -> Option<(OsString, OsString)> { - // Strategy (copied from glibc): Variable name and value are separated - // by an ASCII equals sign '='. Since a variable name must not be - // empty, allow variable names starting with an equals sign. Skip all - // malformed lines. - if input.is_empty() { - return None; - } - let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); - pos.map(|p| { - ( - OsStringExt::from_vec(input[..p].to_vec()), - OsStringExt::from_vec(input[p + 1..].to_vec()), - ) - }) - } -} - -pub fn getenv(k: &OsStr) -> Option { - // environment variables with a nul byte can't be set, so their value is - // always None as well - run_with_cstr(k.as_bytes(), &|k| { - let _guard = env_read_lock(); - let v = unsafe { libc::getenv(k.as_ptr()) } as *const libc::c_char; - - if v.is_null() { - Ok(None) - } else { - // SAFETY: `v` cannot be mutated while executing this line since we've a read lock - let bytes = unsafe { CStr::from_ptr(v) }.to_bytes().to_vec(); - - Ok(Some(OsStringExt::from_vec(bytes))) - } - }) - .ok() - .flatten() -} - -pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - run_with_cstr(k.as_bytes(), &|k| { - run_with_cstr(v.as_bytes(), &|v| { - let _guard = ENV_LOCK.write(); - cvt_env(unsafe { libc::setenv(k.as_ptr(), v.as_ptr(), 1) }).map(drop) - }) - }) -} - -pub unsafe fn unsetenv(n: &OsStr) -> io::Result<()> { - run_with_cstr(n.as_bytes(), &|nbuf| { - let _guard = ENV_LOCK.write(); - cvt_env(unsafe { libc::unsetenv(nbuf.as_ptr()) }).map(drop) - }) -} - -/// In kmclib, `setenv` and `unsetenv` don't always set `errno`, so this -/// function just returns a generic error. -fn cvt_env(t: c_int) -> io::Result { - if t == -1 { Err(io::const_error!(io::ErrorKind::Uncategorized, "failure")) } else { Ok(t) } -} - pub fn temp_dir() -> PathBuf { panic!("no standard temporary directory on this platform") } diff --git a/library/std/src/sys/pal/teeos/mod.rs b/library/std/src/sys/pal/teeos/mod.rs index c1921a2f40df5..c7b1777725858 100644 --- a/library/std/src/sys/pal/teeos/mod.rs +++ b/library/std/src/sys/pal/teeos/mod.rs @@ -6,11 +6,6 @@ #![allow(unused_variables)] #![allow(dead_code)] -#[path = "../unsupported/args.rs"] -pub mod args; -#[path = "../unsupported/env.rs"] -pub mod env; -//pub mod fd; pub mod os; #[path = "../unsupported/pipe.rs"] pub mod pipe; diff --git a/library/std/src/sys/pal/teeos/os.rs b/library/std/src/sys/pal/teeos/os.rs index bf6945811ab0e..03f3c72b0229a 100644 --- a/library/std/src/sys/pal/teeos/os.rs +++ b/library/std/src/sys/pal/teeos/os.rs @@ -73,47 +73,6 @@ pub fn current_exe() -> io::Result { unsupported() } -pub struct Env(!); - -impl Env { - // FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self(inner) = self; - match *inner {} - } -} - -impl fmt::Debug for Env { - fn fmt(&self, _: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self(inner) = self; - match *inner {} - } -} - -impl Iterator for Env { - type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { - let Self(inner) = self; - match *inner {} - } -} - -pub fn env() -> Env { - panic!("not supported on this platform") -} - -pub fn getenv(_: &OsStr) -> Option { - None -} - -pub unsafe fn setenv(_: &OsStr, _: &OsStr) -> io::Result<()> { - Err(io::const_error!(io::ErrorKind::Unsupported, "cannot set env vars on this platform")) -} - -pub unsafe fn unsetenv(_: &OsStr) -> io::Result<()> { - Err(io::const_error!(io::ErrorKind::Unsupported, "cannot unset env vars on this platform")) -} - pub fn temp_dir() -> PathBuf { panic!("no filesystem on this platform") } diff --git a/library/std/src/sys/pal/trusty/mod.rs b/library/std/src/sys/pal/trusty/mod.rs index 5295d3fdc9145..275f606246336 100644 --- a/library/std/src/sys/pal/trusty/mod.rs +++ b/library/std/src/sys/pal/trusty/mod.rs @@ -1,12 +1,8 @@ //! System bindings for the Trusty OS. -#[path = "../unsupported/args.rs"] -pub mod args; #[path = "../unsupported/common.rs"] #[deny(unsafe_op_in_unsafe_fn)] mod common; -#[path = "../unsupported/env.rs"] -pub mod env; #[path = "../unsupported/os.rs"] pub mod os; #[path = "../unsupported/pipe.rs"] diff --git a/library/std/src/sys/pal/uefi/args.rs b/library/std/src/sys/pal/uefi/args.rs deleted file mode 100644 index 0c29caf2db676..0000000000000 --- a/library/std/src/sys/pal/uefi/args.rs +++ /dev/null @@ -1,156 +0,0 @@ -use r_efi::protocols::loaded_image; - -use super::helpers; -use crate::env::current_exe; -use crate::ffi::OsString; -use crate::iter::Iterator; -use crate::{fmt, vec}; - -pub struct Args { - parsed_args_list: vec::IntoIter, -} - -pub fn args() -> Args { - let lazy_current_exe = || Vec::from([current_exe().map(Into::into).unwrap_or_default()]); - - // Each loaded image has an image handle that supports `EFI_LOADED_IMAGE_PROTOCOL`. Thus, this - // will never fail. - let protocol = - helpers::image_handle_protocol::(loaded_image::PROTOCOL_GUID) - .unwrap(); - - let lp_size = unsafe { (*protocol.as_ptr()).load_options_size } as usize; - // Break if we are sure that it cannot be UTF-16 - if lp_size < size_of::() || lp_size % size_of::() != 0 { - return Args { parsed_args_list: lazy_current_exe().into_iter() }; - } - let lp_size = lp_size / size_of::(); - - let lp_cmd_line = unsafe { (*protocol.as_ptr()).load_options as *const u16 }; - if !lp_cmd_line.is_aligned() { - return Args { parsed_args_list: lazy_current_exe().into_iter() }; - } - let lp_cmd_line = unsafe { crate::slice::from_raw_parts(lp_cmd_line, lp_size) }; - - Args { - parsed_args_list: parse_lp_cmd_line(lp_cmd_line) - .unwrap_or_else(lazy_current_exe) - .into_iter(), - } -} - -impl fmt::Debug for Args { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.parsed_args_list.as_slice().fmt(f) - } -} - -impl Iterator for Args { - type Item = OsString; - - fn next(&mut self) -> Option { - self.parsed_args_list.next() - } - - fn size_hint(&self) -> (usize, Option) { - self.parsed_args_list.size_hint() - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { - self.parsed_args_list.len() - } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - self.parsed_args_list.next_back() - } -} - -/// Implements the UEFI command-line argument parsing algorithm. -/// -/// This implementation is based on what is defined in Section 3.4 of -/// [UEFI Shell Specification](https://uefi.org/sites/default/files/resources/UEFI_Shell_Spec_2_0.pdf) -/// -/// Returns None in the following cases: -/// - Invalid UTF-16 (unpaired surrogate) -/// - Empty/improper arguments -fn parse_lp_cmd_line(code_units: &[u16]) -> Option> { - const QUOTE: char = '"'; - const SPACE: char = ' '; - const CARET: char = '^'; - const NULL: char = '\0'; - - let mut ret_val = Vec::new(); - let mut code_units_iter = char::decode_utf16(code_units.iter().cloned()).peekable(); - - // The executable name at the beginning is special. - let mut in_quotes = false; - let mut cur = String::new(); - while let Some(w) = code_units_iter.next() { - let w = w.ok()?; - match w { - // break on NULL - NULL => break, - // A quote mark always toggles `in_quotes` no matter what because - // there are no escape characters when parsing the executable name. - QUOTE => in_quotes = !in_quotes, - // If not `in_quotes` then whitespace ends argv[0]. - SPACE if !in_quotes => break, - // In all other cases the code unit is taken literally. - _ => cur.push(w), - } - } - - // If exe name is missing, the cli args are invalid - if cur.is_empty() { - return None; - } - - ret_val.push(OsString::from(cur)); - // Skip whitespace. - while code_units_iter.next_if_eq(&Ok(SPACE)).is_some() {} - - // Parse the arguments according to these rules: - // * All code units are taken literally except space, quote and caret. - // * When not `in_quotes`, space separate arguments. Consecutive spaces are - // treated as a single separator. - // * A space `in_quotes` is taken literally. - // * A quote toggles `in_quotes` mode unless it's escaped. An escaped quote is taken literally. - // * A quote can be escaped if preceded by caret. - // * A caret can be escaped if preceded by caret. - let mut cur = String::new(); - let mut in_quotes = false; - while let Some(w) = code_units_iter.next() { - let w = w.ok()?; - match w { - // break on NULL - NULL => break, - // If not `in_quotes`, a space or tab ends the argument. - SPACE if !in_quotes => { - ret_val.push(OsString::from(&cur[..])); - cur.truncate(0); - - // Skip whitespace. - while code_units_iter.next_if_eq(&Ok(SPACE)).is_some() {} - } - // Caret can escape quotes or carets - CARET if in_quotes => { - if let Some(x) = code_units_iter.next() { - cur.push(x.ok()?); - } - } - // If quote then flip `in_quotes` - QUOTE => in_quotes = !in_quotes, - // Everything else is always taken literally. - _ => cur.push(w), - } - } - // Push the final argument, if any. - if !cur.is_empty() || in_quotes { - ret_val.push(OsString::from(cur)); - } - Some(ret_val) -} diff --git a/library/std/src/sys/pal/uefi/env.rs b/library/std/src/sys/pal/uefi/env.rs deleted file mode 100644 index c106d5fed3e1d..0000000000000 --- a/library/std/src/sys/pal/uefi/env.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod os { - pub const FAMILY: &str = ""; - pub const OS: &str = "uefi"; - pub const DLL_PREFIX: &str = ""; - pub const DLL_SUFFIX: &str = ""; - pub const DLL_EXTENSION: &str = ""; - pub const EXE_SUFFIX: &str = ".efi"; - pub const EXE_EXTENSION: &str = "efi"; -} diff --git a/library/std/src/sys/pal/uefi/helpers.rs b/library/std/src/sys/pal/uefi/helpers.rs index 309022bcccf27..6ee3e0a8b6625 100644 --- a/library/std/src/sys/pal/uefi/helpers.rs +++ b/library/std/src/sys/pal/uefi/helpers.rs @@ -22,7 +22,7 @@ use crate::os::uefi::{self}; use crate::path::Path; use crate::ptr::NonNull; use crate::slice; -use crate::sync::atomic::{AtomicPtr, Ordering}; +use crate::sync::atomic::{Atomic, AtomicPtr, Ordering}; use crate::sys_common::wstr::WStrUnits; type BootInstallMultipleProtocolInterfaces = @@ -157,7 +157,7 @@ pub(crate) fn device_path_to_text(path: NonNull) -> io::R Ok(path) } - static LAST_VALID_HANDLE: AtomicPtr = + static LAST_VALID_HANDLE: Atomic<*mut crate::ffi::c_void> = AtomicPtr::new(crate::ptr::null_mut()); if let Some(handle) = NonNull::new(LAST_VALID_HANDLE.load(Ordering::Acquire)) { @@ -269,7 +269,7 @@ impl OwnedDevicePath { .ok_or_else(|| const_error!(io::ErrorKind::InvalidFilename, "invalid Device Path")) } - static LAST_VALID_HANDLE: AtomicPtr = + static LAST_VALID_HANDLE: Atomic<*mut crate::ffi::c_void> = AtomicPtr::new(crate::ptr::null_mut()); if let Some(handle) = NonNull::new(LAST_VALID_HANDLE.load(Ordering::Acquire)) { @@ -606,7 +606,7 @@ pub(crate) fn os_string_to_raw(s: &OsStr) -> Option> { } pub(crate) fn open_shell() -> Option> { - static LAST_VALID_HANDLE: AtomicPtr = + static LAST_VALID_HANDLE: Atomic<*mut crate::ffi::c_void> = AtomicPtr::new(crate::ptr::null_mut()); if let Some(handle) = NonNull::new(LAST_VALID_HANDLE.load(Ordering::Acquire)) { diff --git a/library/std/src/sys/pal/uefi/mod.rs b/library/std/src/sys/pal/uefi/mod.rs index 9760a23084aad..78fcfcb3b77d5 100644 --- a/library/std/src/sys/pal/uefi/mod.rs +++ b/library/std/src/sys/pal/uefi/mod.rs @@ -13,8 +13,6 @@ //! [`OsString`]: crate::ffi::OsString #![forbid(unsafe_op_in_unsafe_fn)] -pub mod args; -pub mod env; pub mod helpers; pub mod os; #[path = "../unsupported/pipe.rs"] @@ -30,9 +28,9 @@ pub type RawOsError = usize; use crate::io as std_io; use crate::os::uefi; use crate::ptr::NonNull; -use crate::sync::atomic::{AtomicPtr, Ordering}; +use crate::sync::atomic::{Atomic, AtomicPtr, Ordering}; -static EXIT_BOOT_SERVICE_EVENT: AtomicPtr = +static EXIT_BOOT_SERVICE_EVENT: Atomic<*mut crate::ffi::c_void> = AtomicPtr::new(crate::ptr::null_mut()); /// # SAFETY diff --git a/library/std/src/sys/pal/uefi/os.rs b/library/std/src/sys/pal/uefi/os.rs index d26d61890c19e..bfd4dc81cb44f 100644 --- a/library/std/src/sys/pal/uefi/os.rs +++ b/library/std/src/sys/pal/uefi/os.rs @@ -131,60 +131,6 @@ pub fn current_exe() -> io::Result { helpers::device_path_to_text(protocol).map(PathBuf::from) } -pub struct EnvStrDebug<'a> { - iter: &'a [(OsString, OsString)], -} - -impl fmt::Debug for EnvStrDebug<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut list = f.debug_list(); - for (a, b) in self.iter { - list.entry(&(a.to_str().unwrap(), b.to_str().unwrap())); - } - list.finish() - } -} - -pub struct Env(crate::vec::IntoIter<(OsString, OsString)>); - -impl Env { - // FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - EnvStrDebug { iter: self.0.as_slice() } - } -} - -impl Iterator for Env { - type Item = (OsString, OsString); - - fn next(&mut self) -> Option<(OsString, OsString)> { - self.0.next() - } -} - -impl fmt::Debug for Env { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -pub fn env() -> Env { - let env = uefi_env::get_all().expect("not supported on this platform"); - Env(env.into_iter()) -} - -pub fn getenv(key: &OsStr) -> Option { - uefi_env::get(key) -} - -pub unsafe fn setenv(key: &OsStr, val: &OsStr) -> io::Result<()> { - uefi_env::set(key, val) -} - -pub unsafe fn unsetenv(key: &OsStr) -> io::Result<()> { - uefi_env::unset(key) -} - pub fn temp_dir() -> PathBuf { panic!("no filesystem on this platform") } @@ -213,85 +159,3 @@ pub fn exit(code: i32) -> ! { pub fn getpid() -> u32 { panic!("no pids on this platform") } - -mod uefi_env { - use crate::ffi::{OsStr, OsString}; - use crate::io; - use crate::os::uefi::ffi::OsStringExt; - use crate::ptr::NonNull; - use crate::sys::{helpers, unsupported_err}; - - pub(crate) fn get(key: &OsStr) -> Option { - let shell = helpers::open_shell()?; - let mut key_ptr = helpers::os_string_to_raw(key)?; - unsafe { get_raw(shell, key_ptr.as_mut_ptr()) } - } - - pub(crate) fn set(key: &OsStr, val: &OsStr) -> io::Result<()> { - let mut key_ptr = helpers::os_string_to_raw(key) - .ok_or(io::const_error!(io::ErrorKind::InvalidInput, "invalid key"))?; - let mut val_ptr = helpers::os_string_to_raw(val) - .ok_or(io::const_error!(io::ErrorKind::InvalidInput, "invalid value"))?; - unsafe { set_raw(key_ptr.as_mut_ptr(), val_ptr.as_mut_ptr()) } - } - - pub(crate) fn unset(key: &OsStr) -> io::Result<()> { - let mut key_ptr = helpers::os_string_to_raw(key) - .ok_or(io::const_error!(io::ErrorKind::InvalidInput, "invalid key"))?; - unsafe { set_raw(key_ptr.as_mut_ptr(), crate::ptr::null_mut()) } - } - - pub(crate) fn get_all() -> io::Result> { - let shell = helpers::open_shell().ok_or(unsupported_err())?; - - let mut vars = Vec::new(); - let val = unsafe { ((*shell.as_ptr()).get_env)(crate::ptr::null_mut()) }; - - if val.is_null() { - return Ok(vars); - } - - let mut start = 0; - - // UEFI Shell returns all keys separated by NULL. - // End of string is denoted by two NULLs - for i in 0.. { - if unsafe { *val.add(i) } == 0 { - // Two NULL signal end of string - if i == start { - break; - } - - let key = OsString::from_wide(unsafe { - crate::slice::from_raw_parts(val.add(start), i - start) - }); - // SAFETY: val.add(start) is always NULL terminated - let val = unsafe { get_raw(shell, val.add(start)) } - .ok_or(io::const_error!(io::ErrorKind::InvalidInput, "invalid value"))?; - - vars.push((key, val)); - start = i + 1; - } - } - - Ok(vars) - } - - unsafe fn get_raw( - shell: NonNull, - key_ptr: *mut r_efi::efi::Char16, - ) -> Option { - let val = unsafe { ((*shell.as_ptr()).get_env)(key_ptr) }; - helpers::os_string_from_raw(val) - } - - unsafe fn set_raw( - key_ptr: *mut r_efi::efi::Char16, - val_ptr: *mut r_efi::efi::Char16, - ) -> io::Result<()> { - let shell = helpers::open_shell().ok_or(unsupported_err())?; - let r = - unsafe { ((*shell.as_ptr()).set_env)(key_ptr, val_ptr, r_efi::efi::Boolean::FALSE) }; - if r.is_error() { Err(io::Error::from_raw_os_error(r.as_usize())) } else { Ok(()) } - } -} diff --git a/library/std/src/sys/pal/uefi/time.rs b/library/std/src/sys/pal/uefi/time.rs index c4ff3015ac60d..eeb2c35ffbbc9 100644 --- a/library/std/src/sys/pal/uefi/time.rs +++ b/library/std/src/sys/pal/uefi/time.rs @@ -121,7 +121,7 @@ pub(crate) mod instant_internal { use super::*; use crate::mem::MaybeUninit; use crate::ptr::NonNull; - use crate::sync::atomic::{AtomicPtr, Ordering}; + use crate::sync::atomic::{Atomic, AtomicPtr, Ordering}; use crate::sys_common::mul_div_u64; const NS_PER_SEC: u64 = 1_000_000_000; @@ -142,7 +142,7 @@ pub(crate) mod instant_internal { Some(mul_div_u64(ts, NS_PER_SEC, freq)) } - static LAST_VALID_HANDLE: AtomicPtr = + static LAST_VALID_HANDLE: Atomic<*mut crate::ffi::c_void> = AtomicPtr::new(crate::ptr::null_mut()); if let Some(handle) = NonNull::new(LAST_VALID_HANDLE.load(Ordering::Acquire)) { diff --git a/library/std/src/sys/pal/unix/args.rs b/library/std/src/sys/pal/unix/args.rs deleted file mode 100644 index 0bb7b64007aba..0000000000000 --- a/library/std/src/sys/pal/unix/args.rs +++ /dev/null @@ -1,243 +0,0 @@ -//! Global initialization and retrieval of command line arguments. -//! -//! On some platforms these are stored during runtime startup, -//! and on some they are retrieved from the system on demand. - -#![allow(dead_code)] // runtime init functions not used during testing - -use crate::ffi::{CStr, OsString}; -use crate::os::unix::ffi::OsStringExt; -use crate::{fmt, vec}; - -/// One-time global initialization. -pub unsafe fn init(argc: isize, argv: *const *const u8) { - imp::init(argc, argv) -} - -/// Returns the command line arguments -pub fn args() -> Args { - let (argc, argv) = imp::argc_argv(); - - let mut vec = Vec::with_capacity(argc as usize); - - for i in 0..argc { - // SAFETY: `argv` is non-null if `argc` is positive, and it is - // guaranteed to be at least as long as `argc`, so reading from it - // should be safe. - let ptr = unsafe { argv.offset(i).read() }; - - // Some C commandline parsers (e.g. GLib and Qt) are replacing already - // handled arguments in `argv` with `NULL` and move them to the end. - // - // Since they can't directly ensure updates to `argc` as well, this - // means that `argc` might be bigger than the actual number of - // non-`NULL` pointers in `argv` at this point. - // - // To handle this we simply stop iterating at the first `NULL` - // argument. `argv` is also guaranteed to be `NULL`-terminated so any - // non-`NULL` arguments after the first `NULL` can safely be ignored. - if ptr.is_null() { - // NOTE: On Apple platforms, `-[NSProcessInfo arguments]` does not - // stop iterating here, but instead `continue`, always iterating - // up until it reached `argc`. - // - // This difference will only matter in very specific circumstances - // where `argc`/`argv` have been modified, but in unexpected ways, - // so it likely doesn't really matter which option we choose. - // See the following PR for further discussion: - // - break; - } - - // SAFETY: Just checked that the pointer is not NULL, and arguments - // are otherwise guaranteed to be valid C strings. - let cstr = unsafe { CStr::from_ptr(ptr) }; - vec.push(OsStringExt::from_vec(cstr.to_bytes().to_vec())); - } - - Args { iter: vec.into_iter() } -} - -pub struct Args { - iter: vec::IntoIter, -} - -impl !Send for Args {} -impl !Sync for Args {} - -impl fmt::Debug for Args { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.iter.as_slice().fmt(f) - } -} - -impl Iterator for Args { - type Item = OsString; - fn next(&mut self) -> Option { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { - self.iter.len() - } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - self.iter.next_back() - } -} - -#[cfg(any( - target_os = "linux", - target_os = "android", - target_os = "freebsd", - target_os = "dragonfly", - target_os = "netbsd", - target_os = "openbsd", - target_os = "cygwin", - target_os = "solaris", - target_os = "illumos", - target_os = "emscripten", - target_os = "haiku", - target_os = "l4re", - target_os = "fuchsia", - target_os = "redox", - target_os = "vxworks", - target_os = "horizon", - target_os = "aix", - target_os = "nto", - target_os = "hurd", - target_os = "rtems", - target_os = "nuttx", -))] -mod imp { - use crate::ffi::c_char; - use crate::ptr; - use crate::sync::atomic::{AtomicIsize, AtomicPtr, Ordering}; - - // The system-provided argc and argv, which we store in static memory - // here so that we can defer the work of parsing them until its actually - // needed. - // - // Note that we never mutate argv/argc, the argv array, or the argv - // strings, which allows the code in this file to be very simple. - static ARGC: AtomicIsize = AtomicIsize::new(0); - static ARGV: AtomicPtr<*const u8> = AtomicPtr::new(ptr::null_mut()); - - unsafe fn really_init(argc: isize, argv: *const *const u8) { - // These don't need to be ordered with each other or other stores, - // because they only hold the unmodified system-provide argv/argc. - ARGC.store(argc, Ordering::Relaxed); - ARGV.store(argv as *mut _, Ordering::Relaxed); - } - - #[inline(always)] - pub unsafe fn init(argc: isize, argv: *const *const u8) { - // on GNU/Linux if we are main then we will init argv and argc twice, it "duplicates work" - // BUT edge-cases are real: only using .init_array can break most emulators, dlopen, etc. - really_init(argc, argv); - } - - /// glibc passes argc, argv, and envp to functions in .init_array, as a non-standard extension. - /// This allows `std::env::args` to work even in a `cdylib`, as it does on macOS and Windows. - #[cfg(all(target_os = "linux", target_env = "gnu"))] - #[used] - #[unsafe(link_section = ".init_array.00099")] - static ARGV_INIT_ARRAY: extern "C" fn( - crate::os::raw::c_int, - *const *const u8, - *const *const u8, - ) = { - extern "C" fn init_wrapper( - argc: crate::os::raw::c_int, - argv: *const *const u8, - _envp: *const *const u8, - ) { - unsafe { - really_init(argc as isize, argv); - } - } - init_wrapper - }; - - pub fn argc_argv() -> (isize, *const *const c_char) { - // Load ARGC and ARGV, which hold the unmodified system-provided - // argc/argv, so we can read the pointed-to memory without atomics or - // synchronization. - // - // If either ARGC or ARGV is still zero or null, then either there - // really are no arguments, or someone is asking for `args()` before - // initialization has completed, and we return an empty list. - let argv = ARGV.load(Ordering::Relaxed); - let argc = if argv.is_null() { 0 } else { ARGC.load(Ordering::Relaxed) }; - - // Cast from `*mut *const u8` to `*const *const c_char` - (argc, argv.cast()) - } -} - -// Use `_NSGetArgc` and `_NSGetArgv` on Apple platforms. -// -// Even though these have underscores in their names, they've been available -// since the first versions of both macOS and iOS, and are declared in -// the header `crt_externs.h`. -// -// NOTE: This header was added to the iOS 13.0 SDK, which has been the source -// of a great deal of confusion in the past about the availability of these -// APIs. -// -// NOTE(madsmtm): This has not strictly been verified to not cause App Store -// rejections; if this is found to be the case, the previous implementation -// of this used `[[NSProcessInfo processInfo] arguments]`. -#[cfg(target_vendor = "apple")] -mod imp { - use crate::ffi::{c_char, c_int}; - - pub unsafe fn init(_argc: isize, _argv: *const *const u8) { - // No need to initialize anything in here, `libdyld.dylib` has already - // done the work for us. - } - - pub fn argc_argv() -> (isize, *const *const c_char) { - unsafe extern "C" { - // These functions are in crt_externs.h. - fn _NSGetArgc() -> *mut c_int; - fn _NSGetArgv() -> *mut *mut *mut c_char; - } - - // SAFETY: The returned pointer points to a static initialized early - // in the program lifetime by `libdyld.dylib`, and as such is always - // valid. - // - // NOTE: Similar to `_NSGetEnviron`, there technically isn't anything - // protecting us against concurrent modifications to this, and there - // doesn't exist a lock that we can take. Instead, it is generally - // expected that it's only modified in `main` / before other code - // runs, so reading this here should be fine. - let argc = unsafe { _NSGetArgc().read() }; - // SAFETY: Same as above. - let argv = unsafe { _NSGetArgv().read() }; - - // Cast from `*mut *mut c_char` to `*const *const c_char` - (argc as isize, argv.cast()) - } -} - -#[cfg(any(target_os = "espidf", target_os = "vita"))] -mod imp { - use crate::ffi::c_char; - use crate::ptr; - - #[inline(always)] - pub unsafe fn init(_argc: isize, _argv: *const *const u8) {} - - pub fn argc_argv() -> (isize, *const *const c_char) { - (0, ptr::null()) - } -} diff --git a/library/std/src/sys/pal/unix/env.rs b/library/std/src/sys/pal/unix/env.rs deleted file mode 100644 index c6609298f4b23..0000000000000 --- a/library/std/src/sys/pal/unix/env.rs +++ /dev/null @@ -1,307 +0,0 @@ -#[cfg(target_os = "linux")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "linux"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "macos")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "macos"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".dylib"; - pub const DLL_EXTENSION: &str = "dylib"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "ios")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "ios"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".dylib"; - pub const DLL_EXTENSION: &str = "dylib"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "tvos")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "tvos"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".dylib"; - pub const DLL_EXTENSION: &str = "dylib"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "watchos")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "watchos"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".dylib"; - pub const DLL_EXTENSION: &str = "dylib"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "visionos")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "visionos"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".dylib"; - pub const DLL_EXTENSION: &str = "dylib"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "freebsd")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "freebsd"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "dragonfly")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "dragonfly"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "netbsd")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "netbsd"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "openbsd")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "openbsd"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "cygwin")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "cygwin"; - pub const DLL_PREFIX: &str = ""; - pub const DLL_SUFFIX: &str = ".dll"; - pub const DLL_EXTENSION: &str = "dll"; - pub const EXE_SUFFIX: &str = ".exe"; - pub const EXE_EXTENSION: &str = "exe"; -} - -#[cfg(target_os = "android")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "android"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "solaris")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "solaris"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "illumos")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "illumos"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "haiku")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "haiku"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "horizon")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "horizon"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ".elf"; - pub const EXE_EXTENSION: &str = "elf"; -} - -#[cfg(target_os = "hurd")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "hurd"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "vita")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "vita"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ".elf"; - pub const EXE_EXTENSION: &str = "elf"; -} - -#[cfg(all(target_os = "emscripten", target_arch = "wasm32"))] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "emscripten"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ".js"; - pub const EXE_EXTENSION: &str = "js"; -} - -#[cfg(target_os = "fuchsia")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "fuchsia"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "l4re")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "l4re"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "nto")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "nto"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "redox")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "redox"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "rtems")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "rtems"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "vxworks")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "vxworks"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "espidf")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "espidf"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "aix")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "aix"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".a"; - pub const DLL_EXTENSION: &str = "a"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} - -#[cfg(target_os = "nuttx")] -pub mod os { - pub const FAMILY: &str = "unix"; - pub const OS: &str = "nuttx"; - pub const DLL_PREFIX: &str = "lib"; - pub const DLL_SUFFIX: &str = ".so"; - pub const DLL_EXTENSION: &str = "so"; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} diff --git a/library/std/src/sys/pal/unix/fd.rs b/library/std/src/sys/pal/unix/fd.rs deleted file mode 100644 index 2ec8d01c13f46..0000000000000 --- a/library/std/src/sys/pal/unix/fd.rs +++ /dev/null @@ -1,674 +0,0 @@ -#![unstable(reason = "not public", issue = "none", feature = "fd")] - -#[cfg(test)] -mod tests; - -#[cfg(not(any( - target_os = "linux", - target_os = "l4re", - target_os = "android", - target_os = "hurd", -)))] -use libc::off_t as off64_t; -#[cfg(any( - target_os = "android", - target_os = "linux", - target_os = "l4re", - target_os = "hurd", -))] -use libc::off64_t; - -use crate::cmp; -use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, Read}; -use crate::os::unix::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd}; -use crate::sys::cvt; -use crate::sys_common::{AsInner, FromInner, IntoInner}; - -#[derive(Debug)] -pub struct FileDesc(OwnedFd); - -// The maximum read limit on most POSIX-like systems is `SSIZE_MAX`, -// with the man page quoting that if the count of bytes to read is -// greater than `SSIZE_MAX` the result is "unspecified". -// -// On Apple targets however, apparently the 64-bit libc is either buggy or -// intentionally showing odd behavior by rejecting any read with a size -// larger than or equal to INT_MAX. To handle both of these the read -// size is capped on both platforms. -const READ_LIMIT: usize = if cfg!(target_vendor = "apple") { - libc::c_int::MAX as usize - 1 -} else { - libc::ssize_t::MAX as usize -}; - -#[cfg(any( - target_os = "dragonfly", - target_os = "freebsd", - target_os = "netbsd", - target_os = "openbsd", - target_vendor = "apple", - target_os = "cygwin", -))] -const fn max_iov() -> usize { - libc::IOV_MAX as usize -} - -#[cfg(any( - target_os = "android", - target_os = "emscripten", - target_os = "linux", - target_os = "nto", -))] -const fn max_iov() -> usize { - libc::UIO_MAXIOV as usize -} - -#[cfg(not(any( - target_os = "android", - target_os = "dragonfly", - target_os = "emscripten", - target_os = "freebsd", - target_os = "linux", - target_os = "netbsd", - target_os = "nto", - target_os = "openbsd", - target_os = "horizon", - target_os = "vita", - target_vendor = "apple", - target_os = "cygwin", -)))] -const fn max_iov() -> usize { - 16 // The minimum value required by POSIX. -} - -impl FileDesc { - #[inline] - pub fn try_clone(&self) -> io::Result { - self.duplicate() - } - - pub fn read(&self, buf: &mut [u8]) -> io::Result { - let ret = cvt(unsafe { - libc::read( - self.as_raw_fd(), - buf.as_mut_ptr() as *mut libc::c_void, - cmp::min(buf.len(), READ_LIMIT), - ) - })?; - Ok(ret as usize) - } - - #[cfg(not(any( - target_os = "espidf", - target_os = "horizon", - target_os = "vita", - target_os = "nuttx" - )))] - pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - let ret = cvt(unsafe { - libc::readv( - self.as_raw_fd(), - bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec, - cmp::min(bufs.len(), max_iov()) as libc::c_int, - ) - })?; - Ok(ret as usize) - } - - #[cfg(any( - target_os = "espidf", - target_os = "horizon", - target_os = "vita", - target_os = "nuttx" - ))] - pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - io::default_read_vectored(|b| self.read(b), bufs) - } - - #[inline] - pub fn is_read_vectored(&self) -> bool { - cfg!(not(any( - target_os = "espidf", - target_os = "horizon", - target_os = "vita", - target_os = "nuttx" - ))) - } - - pub fn read_to_end(&self, buf: &mut Vec) -> io::Result { - let mut me = self; - (&mut me).read_to_end(buf) - } - - #[cfg_attr(target_os = "vxworks", allow(unused_unsafe))] - pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { - #[cfg(not(any( - all(target_os = "linux", not(target_env = "musl")), - target_os = "android", - target_os = "hurd" - )))] - use libc::pread as pread64; - #[cfg(any( - all(target_os = "linux", not(target_env = "musl")), - target_os = "android", - target_os = "hurd" - ))] - use libc::pread64; - - unsafe { - cvt(pread64( - self.as_raw_fd(), - buf.as_mut_ptr() as *mut libc::c_void, - cmp::min(buf.len(), READ_LIMIT), - offset as off64_t, - )) - .map(|n| n as usize) - } - } - - pub fn read_buf(&self, mut cursor: BorrowedCursor<'_>) -> io::Result<()> { - let ret = cvt(unsafe { - libc::read( - self.as_raw_fd(), - cursor.as_mut().as_mut_ptr() as *mut libc::c_void, - cmp::min(cursor.capacity(), READ_LIMIT), - ) - })?; - - // Safety: `ret` bytes were written to the initialized portion of the buffer - unsafe { - cursor.advance_unchecked(ret as usize); - } - Ok(()) - } - - #[cfg(any( - target_os = "aix", - target_os = "dragonfly", // DragonFly 1.5 - target_os = "emscripten", - target_os = "freebsd", - target_os = "fuchsia", - target_os = "hurd", - target_os = "illumos", - target_os = "linux", - target_os = "netbsd", - target_os = "openbsd", // OpenBSD 2.7 - ))] - pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { - let ret = cvt(unsafe { - libc::preadv( - self.as_raw_fd(), - bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec, - cmp::min(bufs.len(), max_iov()) as libc::c_int, - offset as _, - ) - })?; - Ok(ret as usize) - } - - #[cfg(not(any( - target_os = "aix", - target_os = "android", - target_os = "dragonfly", - target_os = "emscripten", - target_os = "freebsd", - target_os = "fuchsia", - target_os = "hurd", - target_os = "illumos", - target_os = "linux", - target_os = "netbsd", - target_os = "openbsd", - target_vendor = "apple", - )))] - pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { - io::default_read_vectored(|b| self.read_at(b, offset), bufs) - } - - // We support some old Android versions that do not have `preadv` in libc, - // so we use weak linkage and fallback to a direct syscall if not available. - // - // On 32-bit targets, we don't want to deal with weird ABI issues around - // passing 64-bits parameters to syscalls, so we fallback to the default - // implementation if `preadv` is not available. - #[cfg(all(target_os = "android", target_pointer_width = "64"))] - pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { - super::weak::syscall!( - fn preadv( - fd: libc::c_int, - iovec: *const libc::iovec, - n_iovec: libc::c_int, - offset: off64_t, - ) -> isize; - ); - - let ret = cvt(unsafe { - preadv( - self.as_raw_fd(), - bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec, - cmp::min(bufs.len(), max_iov()) as libc::c_int, - offset as _, - ) - })?; - Ok(ret as usize) - } - - #[cfg(all(target_os = "android", target_pointer_width = "32"))] - // FIXME(#115199): Rust currently omits weak function definitions - // and its metadata from LLVM IR. - #[no_sanitize(cfi)] - pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { - super::weak::weak!( - fn preadv64( - fd: libc::c_int, - iovec: *const libc::iovec, - n_iovec: libc::c_int, - offset: off64_t, - ) -> isize; - ); - - match preadv64.get() { - Some(preadv) => { - let ret = cvt(unsafe { - preadv( - self.as_raw_fd(), - bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec, - cmp::min(bufs.len(), max_iov()) as libc::c_int, - offset as _, - ) - })?; - Ok(ret as usize) - } - None => io::default_read_vectored(|b| self.read_at(b, offset), bufs), - } - } - - // We support old MacOS, iOS, watchOS, tvOS and visionOS. `preadv` was added in the following - // Apple OS versions: - // ios 14.0 - // tvos 14.0 - // macos 11.0 - // watchos 7.0 - // - // These versions may be newer than the minimum supported versions of OS's we support so we must - // use "weak" linking. - #[cfg(target_vendor = "apple")] - pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { - super::weak::weak!( - fn preadv( - fd: libc::c_int, - iovec: *const libc::iovec, - n_iovec: libc::c_int, - offset: off64_t, - ) -> isize; - ); - - match preadv.get() { - Some(preadv) => { - let ret = cvt(unsafe { - preadv( - self.as_raw_fd(), - bufs.as_mut_ptr() as *mut libc::iovec as *const libc::iovec, - cmp::min(bufs.len(), max_iov()) as libc::c_int, - offset as _, - ) - })?; - Ok(ret as usize) - } - None => io::default_read_vectored(|b| self.read_at(b, offset), bufs), - } - } - - pub fn write(&self, buf: &[u8]) -> io::Result { - let ret = cvt(unsafe { - libc::write( - self.as_raw_fd(), - buf.as_ptr() as *const libc::c_void, - cmp::min(buf.len(), READ_LIMIT), - ) - })?; - Ok(ret as usize) - } - - #[cfg(not(any( - target_os = "espidf", - target_os = "horizon", - target_os = "vita", - target_os = "nuttx" - )))] - pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { - let ret = cvt(unsafe { - libc::writev( - self.as_raw_fd(), - bufs.as_ptr() as *const libc::iovec, - cmp::min(bufs.len(), max_iov()) as libc::c_int, - ) - })?; - Ok(ret as usize) - } - - #[cfg(any( - target_os = "espidf", - target_os = "horizon", - target_os = "vita", - target_os = "nuttx" - ))] - pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { - io::default_write_vectored(|b| self.write(b), bufs) - } - - #[inline] - pub fn is_write_vectored(&self) -> bool { - cfg!(not(any( - target_os = "espidf", - target_os = "horizon", - target_os = "vita", - target_os = "nuttx" - ))) - } - - #[cfg_attr(target_os = "vxworks", allow(unused_unsafe))] - pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { - #[cfg(not(any( - all(target_os = "linux", not(target_env = "musl")), - target_os = "android", - target_os = "hurd" - )))] - use libc::pwrite as pwrite64; - #[cfg(any( - all(target_os = "linux", not(target_env = "musl")), - target_os = "android", - target_os = "hurd" - ))] - use libc::pwrite64; - - unsafe { - cvt(pwrite64( - self.as_raw_fd(), - buf.as_ptr() as *const libc::c_void, - cmp::min(buf.len(), READ_LIMIT), - offset as off64_t, - )) - .map(|n| n as usize) - } - } - - #[cfg(any( - target_os = "aix", - target_os = "dragonfly", // DragonFly 1.5 - target_os = "emscripten", - target_os = "freebsd", - target_os = "fuchsia", - target_os = "hurd", - target_os = "illumos", - target_os = "linux", - target_os = "netbsd", - target_os = "openbsd", // OpenBSD 2.7 - ))] - pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { - let ret = cvt(unsafe { - libc::pwritev( - self.as_raw_fd(), - bufs.as_ptr() as *const libc::iovec, - cmp::min(bufs.len(), max_iov()) as libc::c_int, - offset as _, - ) - })?; - Ok(ret as usize) - } - - #[cfg(not(any( - target_os = "aix", - target_os = "android", - target_os = "dragonfly", - target_os = "emscripten", - target_os = "freebsd", - target_os = "fuchsia", - target_os = "hurd", - target_os = "illumos", - target_os = "linux", - target_os = "netbsd", - target_os = "openbsd", - target_vendor = "apple", - )))] - pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { - io::default_write_vectored(|b| self.write_at(b, offset), bufs) - } - - // We support some old Android versions that do not have `pwritev` in libc, - // so we use weak linkage and fallback to a direct syscall if not available. - // - // On 32-bit targets, we don't want to deal with weird ABI issues around - // passing 64-bits parameters to syscalls, so we fallback to the default - // implementation if `pwritev` is not available. - #[cfg(all(target_os = "android", target_pointer_width = "64"))] - pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { - super::weak::syscall!( - fn pwritev( - fd: libc::c_int, - iovec: *const libc::iovec, - n_iovec: libc::c_int, - offset: off64_t, - ) -> isize; - ); - - let ret = cvt(unsafe { - pwritev( - self.as_raw_fd(), - bufs.as_ptr() as *const libc::iovec, - cmp::min(bufs.len(), max_iov()) as libc::c_int, - offset as _, - ) - })?; - Ok(ret as usize) - } - - #[cfg(all(target_os = "android", target_pointer_width = "32"))] - pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { - super::weak::weak!( - fn pwritev64( - fd: libc::c_int, - iovec: *const libc::iovec, - n_iovec: libc::c_int, - offset: off64_t, - ) -> isize; - ); - - match pwritev64.get() { - Some(pwritev) => { - let ret = cvt(unsafe { - pwritev( - self.as_raw_fd(), - bufs.as_ptr() as *const libc::iovec, - cmp::min(bufs.len(), max_iov()) as libc::c_int, - offset as _, - ) - })?; - Ok(ret as usize) - } - None => io::default_write_vectored(|b| self.write_at(b, offset), bufs), - } - } - - // We support old MacOS, iOS, watchOS, tvOS and visionOS. `pwritev` was added in the following - // Apple OS versions: - // ios 14.0 - // tvos 14.0 - // macos 11.0 - // watchos 7.0 - // - // These versions may be newer than the minimum supported versions of OS's we support so we must - // use "weak" linking. - #[cfg(target_vendor = "apple")] - pub fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { - super::weak::weak!( - fn pwritev( - fd: libc::c_int, - iovec: *const libc::iovec, - n_iovec: libc::c_int, - offset: off64_t, - ) -> isize; - ); - - match pwritev.get() { - Some(pwritev) => { - let ret = cvt(unsafe { - pwritev( - self.as_raw_fd(), - bufs.as_ptr() as *const libc::iovec, - cmp::min(bufs.len(), max_iov()) as libc::c_int, - offset as _, - ) - })?; - Ok(ret as usize) - } - None => io::default_write_vectored(|b| self.write_at(b, offset), bufs), - } - } - - #[cfg(not(any( - target_env = "newlib", - target_os = "solaris", - target_os = "illumos", - target_os = "emscripten", - target_os = "fuchsia", - target_os = "l4re", - target_os = "linux", - target_os = "cygwin", - target_os = "haiku", - target_os = "redox", - target_os = "vxworks", - target_os = "nto", - )))] - pub fn set_cloexec(&self) -> io::Result<()> { - unsafe { - cvt(libc::ioctl(self.as_raw_fd(), libc::FIOCLEX))?; - Ok(()) - } - } - #[cfg(any( - all( - target_env = "newlib", - not(any(target_os = "espidf", target_os = "horizon", target_os = "vita")) - ), - target_os = "solaris", - target_os = "illumos", - target_os = "emscripten", - target_os = "fuchsia", - target_os = "l4re", - target_os = "linux", - target_os = "cygwin", - target_os = "haiku", - target_os = "redox", - target_os = "vxworks", - target_os = "nto", - ))] - pub fn set_cloexec(&self) -> io::Result<()> { - unsafe { - let previous = cvt(libc::fcntl(self.as_raw_fd(), libc::F_GETFD))?; - let new = previous | libc::FD_CLOEXEC; - if new != previous { - cvt(libc::fcntl(self.as_raw_fd(), libc::F_SETFD, new))?; - } - Ok(()) - } - } - #[cfg(any(target_os = "espidf", target_os = "horizon", target_os = "vita"))] - pub fn set_cloexec(&self) -> io::Result<()> { - // FD_CLOEXEC is not supported in ESP-IDF, Horizon OS and Vita but there's no need to, - // because none of them supports spawning processes. - Ok(()) - } - - #[cfg(target_os = "linux")] - pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { - unsafe { - let v = nonblocking as libc::c_int; - cvt(libc::ioctl(self.as_raw_fd(), libc::FIONBIO, &v))?; - Ok(()) - } - } - - #[cfg(not(target_os = "linux"))] - pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { - unsafe { - let previous = cvt(libc::fcntl(self.as_raw_fd(), libc::F_GETFL))?; - let new = if nonblocking { - previous | libc::O_NONBLOCK - } else { - previous & !libc::O_NONBLOCK - }; - if new != previous { - cvt(libc::fcntl(self.as_raw_fd(), libc::F_SETFL, new))?; - } - Ok(()) - } - } - - #[inline] - pub fn duplicate(&self) -> io::Result { - Ok(Self(self.0.try_clone()?)) - } -} - -impl<'a> Read for &'a FileDesc { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - (**self).read(buf) - } - - fn read_buf(&mut self, cursor: BorrowedCursor<'_>) -> io::Result<()> { - (**self).read_buf(cursor) - } - - fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - (**self).read_vectored(bufs) - } - - #[inline] - fn is_read_vectored(&self) -> bool { - (**self).is_read_vectored() - } -} - -impl AsInner for FileDesc { - #[inline] - fn as_inner(&self) -> &OwnedFd { - &self.0 - } -} - -impl IntoInner for FileDesc { - fn into_inner(self) -> OwnedFd { - self.0 - } -} - -impl FromInner for FileDesc { - fn from_inner(owned_fd: OwnedFd) -> Self { - Self(owned_fd) - } -} - -impl AsFd for FileDesc { - fn as_fd(&self) -> BorrowedFd<'_> { - self.0.as_fd() - } -} - -impl AsRawFd for FileDesc { - #[inline] - fn as_raw_fd(&self) -> RawFd { - self.0.as_raw_fd() - } -} - -impl IntoRawFd for FileDesc { - fn into_raw_fd(self) -> RawFd { - self.0.into_raw_fd() - } -} - -impl FromRawFd for FileDesc { - unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { - Self(FromRawFd::from_raw_fd(raw_fd)) - } -} diff --git a/library/std/src/sys/pal/unix/fd/tests.rs b/library/std/src/sys/pal/unix/fd/tests.rs deleted file mode 100644 index c5301ce655787..0000000000000 --- a/library/std/src/sys/pal/unix/fd/tests.rs +++ /dev/null @@ -1,11 +0,0 @@ -use core::mem::ManuallyDrop; - -use super::{FileDesc, IoSlice}; -use crate::os::unix::io::FromRawFd; - -#[test] -fn limit_vector_count() { - let stdout = ManuallyDrop::new(unsafe { FileDesc::from_raw_fd(1) }); - let bufs = (0..1500).map(|_| IoSlice::new(&[])).collect::>(); - assert!(stdout.write_vectored(&bufs).is_ok()); -} diff --git a/library/std/src/sys/pal/unix/fuchsia.rs b/library/std/src/sys/pal/unix/fuchsia.rs index 7932bd26d76c3..c118dee624764 100644 --- a/library/std/src/sys/pal/unix/fuchsia.rs +++ b/library/std/src/sys/pal/unix/fuchsia.rs @@ -1,48 +1,35 @@ -#![allow(non_camel_case_types, unused)] +#![expect(non_camel_case_types)] -use libc::{c_int, c_void, size_t}; +use libc::size_t; +use crate::ffi::{c_char, c_int, c_void}; use crate::io; -use crate::mem::MaybeUninit; -use crate::os::raw::c_char; -pub type zx_handle_t = u32; -pub type zx_vaddr_t = usize; -pub type zx_rights_t = u32; -pub type zx_status_t = i32; - -pub const ZX_HANDLE_INVALID: zx_handle_t = 0; +////////// +// Time // +////////// pub type zx_time_t = i64; -pub const ZX_TIME_INFINITE: zx_time_t = i64::MAX; - -pub type zx_signals_t = u32; - -pub const ZX_OBJECT_SIGNAL_3: zx_signals_t = 1 << 3; -pub const ZX_TASK_TERMINATED: zx_signals_t = ZX_OBJECT_SIGNAL_3; +pub const ZX_TIME_INFINITE: zx_time_t = i64::MAX; -pub const ZX_RIGHT_SAME_RIGHTS: zx_rights_t = 1 << 31; +unsafe extern "C" { + pub safe fn zx_clock_get_monotonic() -> zx_time_t; +} -// The upper four bits gives the minor version. -pub type zx_object_info_topic_t = u32; +///////////// +// Handles // +///////////// -pub const ZX_INFO_PROCESS: zx_object_info_topic_t = 3 | (1 << 28); +pub type zx_handle_t = u32; -pub type zx_info_process_flags_t = u32; +pub const ZX_HANDLE_INVALID: zx_handle_t = 0; -pub fn zx_cvt(t: T) -> io::Result -where - T: TryInto + Copy, -{ - if let Ok(status) = TryInto::try_into(t) { - if status < 0 { Err(io::Error::from_raw_os_error(status)) } else { Ok(t) } - } else { - Err(io::Error::last_os_error()) - } +unsafe extern "C" { + pub fn zx_handle_close(handle: zx_handle_t) -> zx_status_t; } -// Safe wrapper around zx_handle_t +/// A safe wrapper around `zx_handle_t`. pub struct Handle { raw: zx_handle_t, } @@ -65,6 +52,66 @@ impl Drop for Handle { } } +/////////// +// Futex // +/////////// + +pub type zx_futex_t = crate::sync::atomic::Atomic; + +unsafe extern "C" { + pub fn zx_object_wait_one( + handle: zx_handle_t, + signals: zx_signals_t, + timeout: zx_time_t, + pending: *mut zx_signals_t, + ) -> zx_status_t; + + pub fn zx_futex_wait( + value_ptr: *const zx_futex_t, + current_value: zx_futex_t, + new_futex_owner: zx_handle_t, + deadline: zx_time_t, + ) -> zx_status_t; + pub fn zx_futex_wake(value_ptr: *const zx_futex_t, wake_count: u32) -> zx_status_t; + pub fn zx_futex_wake_single_owner(value_ptr: *const zx_futex_t) -> zx_status_t; + pub safe fn zx_thread_self() -> zx_handle_t; +} + +//////////////// +// Properties // +//////////////// + +pub const ZX_PROP_NAME: u32 = 3; + +unsafe extern "C" { + pub fn zx_object_set_property( + handle: zx_handle_t, + property: u32, + value: *const libc::c_void, + value_size: libc::size_t, + ) -> zx_status_t; +} + +///////////// +// Signals // +///////////// + +pub type zx_signals_t = u32; + +pub const ZX_OBJECT_SIGNAL_3: zx_signals_t = 1 << 3; +pub const ZX_TASK_TERMINATED: zx_signals_t = ZX_OBJECT_SIGNAL_3; + +///////////////// +// Object info // +///////////////// + +// The upper four bits gives the minor version. +pub type zx_object_info_topic_t = u32; + +pub const ZX_INFO_PROCESS: zx_object_info_topic_t = 3 | (1 << 28); + +pub type zx_info_process_flags_t = u32; + // Returned for topic ZX_INFO_PROCESS #[derive(Default)] #[repr(C)] @@ -76,25 +123,6 @@ pub struct zx_info_process_t { } unsafe extern "C" { - pub fn zx_job_default() -> zx_handle_t; - - pub fn zx_task_kill(handle: zx_handle_t) -> zx_status_t; - - pub fn zx_handle_close(handle: zx_handle_t) -> zx_status_t; - - pub fn zx_handle_duplicate( - handle: zx_handle_t, - rights: zx_rights_t, - out: *const zx_handle_t, - ) -> zx_handle_t; - - pub fn zx_object_wait_one( - handle: zx_handle_t, - signals: zx_signals_t, - timeout: zx_time_t, - pending: *mut zx_signals_t, - ) -> zx_status_t; - pub fn zx_object_get_info( handle: zx_handle_t, topic: u32, @@ -105,6 +133,10 @@ unsafe extern "C" { ) -> zx_status_t; } +/////////////// +// Processes // +/////////////// + #[derive(Default)] #[repr(C)] pub struct fdio_spawn_action_t { @@ -130,6 +162,8 @@ unsafe extern "C" { pub fn fdio_fd_clone(fd: c_int, out_handle: *mut zx_handle_t) -> zx_status_t; pub fn fdio_fd_create(handle: zx_handle_t, fd: *mut c_int) -> zx_status_t; + + pub fn zx_task_kill(handle: zx_handle_t) -> zx_status_t; } // fdio_spawn_etc flags @@ -137,173 +171,34 @@ unsafe extern "C" { pub const FDIO_SPAWN_CLONE_JOB: u32 = 0x0001; pub const FDIO_SPAWN_CLONE_LDSVC: u32 = 0x0002; pub const FDIO_SPAWN_CLONE_NAMESPACE: u32 = 0x0004; -pub const FDIO_SPAWN_CLONE_STDIO: u32 = 0x0008; pub const FDIO_SPAWN_CLONE_ENVIRON: u32 = 0x0010; pub const FDIO_SPAWN_CLONE_UTC_CLOCK: u32 = 0x0020; -pub const FDIO_SPAWN_CLONE_ALL: u32 = 0xFFFF; // fdio_spawn_etc actions -pub const FDIO_SPAWN_ACTION_CLONE_FD: u32 = 0x0001; pub const FDIO_SPAWN_ACTION_TRANSFER_FD: u32 = 0x0002; -// Errors - -#[allow(unused)] -pub const ERR_INTERNAL: zx_status_t = -1; - -// ERR_NOT_SUPPORTED: The operation is not implemented, supported, -// or enabled. -#[allow(unused)] -pub const ERR_NOT_SUPPORTED: zx_status_t = -2; - -// ERR_NO_RESOURCES: The system was not able to allocate some resource -// needed for the operation. -#[allow(unused)] -pub const ERR_NO_RESOURCES: zx_status_t = -3; - -// ERR_NO_MEMORY: The system was not able to allocate memory needed -// for the operation. -#[allow(unused)] -pub const ERR_NO_MEMORY: zx_status_t = -4; - -// ERR_CALL_FAILED: The second phase of zx_channel_call(; did not complete -// successfully. -#[allow(unused)] -pub const ERR_CALL_FAILED: zx_status_t = -5; - -// ERR_INTERRUPTED_RETRY: The system call was interrupted, but should be -// retried. This should not be seen outside of the VDSO. -#[allow(unused)] -pub const ERR_INTERRUPTED_RETRY: zx_status_t = -6; - -// ======= Parameter errors ======= -// ERR_INVALID_ARGS: an argument is invalid, ex. null pointer -#[allow(unused)] -pub const ERR_INVALID_ARGS: zx_status_t = -10; - -// ERR_BAD_HANDLE: A specified handle value does not refer to a handle. -#[allow(unused)] -pub const ERR_BAD_HANDLE: zx_status_t = -11; - -// ERR_WRONG_TYPE: The subject of the operation is the wrong type to -// perform the operation. -// Example: Attempting a message_read on a thread handle. -#[allow(unused)] -pub const ERR_WRONG_TYPE: zx_status_t = -12; - -// ERR_BAD_SYSCALL: The specified syscall number is invalid. -#[allow(unused)] -pub const ERR_BAD_SYSCALL: zx_status_t = -13; - -// ERR_OUT_OF_RANGE: An argument is outside the valid range for this -// operation. -#[allow(unused)] -pub const ERR_OUT_OF_RANGE: zx_status_t = -14; - -// ERR_BUFFER_TOO_SMALL: A caller provided buffer is too small for -// this operation. -#[allow(unused)] -pub const ERR_BUFFER_TOO_SMALL: zx_status_t = -15; - -// ======= Precondition or state errors ======= -// ERR_BAD_STATE: operation failed because the current state of the -// object does not allow it, or a precondition of the operation is -// not satisfied -#[allow(unused)] -pub const ERR_BAD_STATE: zx_status_t = -20; - -// ERR_TIMED_OUT: The time limit for the operation elapsed before -// the operation completed. -#[allow(unused)] -pub const ERR_TIMED_OUT: zx_status_t = -21; - -// ERR_SHOULD_WAIT: The operation cannot be performed currently but -// potentially could succeed if the caller waits for a prerequisite -// to be satisfied, for example waiting for a handle to be readable -// or writable. -// Example: Attempting to read from a message pipe that has no -// messages waiting but has an open remote will return ERR_SHOULD_WAIT. -// Attempting to read from a message pipe that has no messages waiting -// and has a closed remote end will return ERR_REMOTE_CLOSED. -#[allow(unused)] -pub const ERR_SHOULD_WAIT: zx_status_t = -22; - -// ERR_CANCELED: The in-progress operation (e.g., a wait) has been -// // canceled. -#[allow(unused)] -pub const ERR_CANCELED: zx_status_t = -23; - -// ERR_PEER_CLOSED: The operation failed because the remote end -// of the subject of the operation was closed. -#[allow(unused)] -pub const ERR_PEER_CLOSED: zx_status_t = -24; - -// ERR_NOT_FOUND: The requested entity is not found. -#[allow(unused)] -pub const ERR_NOT_FOUND: zx_status_t = -25; - -// ERR_ALREADY_EXISTS: An object with the specified identifier -// already exists. -// Example: Attempting to create a file when a file already exists -// with that name. -#[allow(unused)] -pub const ERR_ALREADY_EXISTS: zx_status_t = -26; - -// ERR_ALREADY_BOUND: The operation failed because the named entity -// is already owned or controlled by another entity. The operation -// could succeed later if the current owner releases the entity. -#[allow(unused)] -pub const ERR_ALREADY_BOUND: zx_status_t = -27; - -// ERR_UNAVAILABLE: The subject of the operation is currently unable -// to perform the operation. -// Note: This is used when there's no direct way for the caller to -// observe when the subject will be able to perform the operation -// and should thus retry. -#[allow(unused)] -pub const ERR_UNAVAILABLE: zx_status_t = -28; - -// ======= Permission check errors ======= -// ERR_ACCESS_DENIED: The caller did not have permission to perform -// the specified operation. -#[allow(unused)] -pub const ERR_ACCESS_DENIED: zx_status_t = -30; - -// ======= Input-output errors ======= -// ERR_IO: Otherwise unspecified error occurred during I/O. -#[allow(unused)] -pub const ERR_IO: zx_status_t = -40; - -// ERR_REFUSED: The entity the I/O operation is being performed on -// rejected the operation. -// Example: an I2C device NAK'ing a transaction or a disk controller -// rejecting an invalid command. -#[allow(unused)] -pub const ERR_IO_REFUSED: zx_status_t = -41; - -// ERR_IO_DATA_INTEGRITY: The data in the operation failed an integrity -// check and is possibly corrupted. -// Example: CRC or Parity error. -#[allow(unused)] -pub const ERR_IO_DATA_INTEGRITY: zx_status_t = -42; - -// ERR_IO_DATA_LOSS: The data in the operation is currently unavailable -// and may be permanently lost. -// Example: A disk block is irrecoverably damaged. -#[allow(unused)] -pub const ERR_IO_DATA_LOSS: zx_status_t = -43; - -// Filesystem specific errors -#[allow(unused)] -pub const ERR_BAD_PATH: zx_status_t = -50; -#[allow(unused)] -pub const ERR_NOT_DIR: zx_status_t = -51; -#[allow(unused)] -pub const ERR_NOT_FILE: zx_status_t = -52; -// ERR_FILE_BIG: A file exceeds a filesystem-specific size limit. -#[allow(unused)] -pub const ERR_FILE_BIG: zx_status_t = -53; -// ERR_NO_SPACE: Filesystem or device space is exhausted. -#[allow(unused)] -pub const ERR_NO_SPACE: zx_status_t = -54; +//////////// +// Errors // +//////////// + +pub type zx_status_t = i32; + +pub const ZX_OK: zx_status_t = 0; +pub const ZX_ERR_NOT_SUPPORTED: zx_status_t = -2; +pub const ZX_ERR_INVALID_ARGS: zx_status_t = -10; +pub const ZX_ERR_BAD_HANDLE: zx_status_t = -11; +pub const ZX_ERR_WRONG_TYPE: zx_status_t = -12; +pub const ZX_ERR_BAD_STATE: zx_status_t = -20; +pub const ZX_ERR_TIMED_OUT: zx_status_t = -21; + +pub fn zx_cvt(t: T) -> io::Result +where + T: TryInto + Copy, +{ + if let Ok(status) = TryInto::try_into(t) { + if status < 0 { Err(io::Error::from_raw_os_error(status)) } else { Ok(t) } + } else { + Err(io::Error::last_os_error()) + } +} diff --git a/library/std/src/sys/pal/unix/futex.rs b/library/std/src/sys/pal/unix/futex.rs index 87ba13ca9321d..c23278bdf5e5d 100644 --- a/library/std/src/sys/pal/unix/futex.rs +++ b/library/std/src/sys/pal/unix/futex.rs @@ -8,16 +8,16 @@ target_os = "fuchsia", ))] -use crate::sync::atomic::AtomicU32; +use crate::sync::atomic::Atomic; use crate::time::Duration; /// An atomic for use as a futex that is at least 32-bits but may be larger -pub type Futex = AtomicU32; +pub type Futex = Atomic; /// Must be the underlying type of Futex pub type Primitive = u32; /// An atomic for use as a futex that is at least 8-bits but may be larger. -pub type SmallFutex = AtomicU32; +pub type SmallFutex = Atomic; /// Must be the underlying type of SmallFutex pub type SmallPrimitive = u32; @@ -27,7 +27,7 @@ pub type SmallPrimitive = u32; /// /// Returns false on timeout, and true in all other cases. #[cfg(any(target_os = "linux", target_os = "android", target_os = "freebsd"))] -pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) -> bool { +pub fn futex_wait(futex: &Atomic, expected: u32, timeout: Option) -> bool { use super::time::Timespec; use crate::ptr::null; use crate::sync::atomic::Ordering::Relaxed; @@ -60,7 +60,7 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - let umtx_timeout_ptr = umtx_timeout.as_ref().map_or(null(), |t| t as *const _); let umtx_timeout_size = umtx_timeout.as_ref().map_or(0, |t| size_of_val(t)); libc::_umtx_op( - futex as *const AtomicU32 as *mut _, + futex as *const Atomic as *mut _, libc::UMTX_OP_WAIT_UINT_PRIVATE, expected as libc::c_ulong, crate::ptr::without_provenance_mut(umtx_timeout_size), @@ -71,7 +71,7 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - // absolute time rather than a relative time. libc::syscall( libc::SYS_futex, - futex as *const AtomicU32, + futex as *const Atomic, libc::FUTEX_WAIT_BITSET | libc::FUTEX_PRIVATE_FLAG, expected, timespec.as_ref().map_or(null(), |t| t as *const libc::timespec), @@ -99,16 +99,16 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - /// /// On some platforms, this always returns false. #[cfg(any(target_os = "linux", target_os = "android"))] -pub fn futex_wake(futex: &AtomicU32) -> bool { - let ptr = futex as *const AtomicU32; +pub fn futex_wake(futex: &Atomic) -> bool { + let ptr = futex as *const Atomic; let op = libc::FUTEX_WAKE | libc::FUTEX_PRIVATE_FLAG; unsafe { libc::syscall(libc::SYS_futex, ptr, op, 1) > 0 } } /// Wakes up all threads that are waiting on `futex_wait` on this futex. #[cfg(any(target_os = "linux", target_os = "android"))] -pub fn futex_wake_all(futex: &AtomicU32) { - let ptr = futex as *const AtomicU32; +pub fn futex_wake_all(futex: &Atomic) { + let ptr = futex as *const Atomic; let op = libc::FUTEX_WAKE | libc::FUTEX_PRIVATE_FLAG; unsafe { libc::syscall(libc::SYS_futex, ptr, op, i32::MAX); @@ -117,11 +117,11 @@ pub fn futex_wake_all(futex: &AtomicU32) { // FreeBSD doesn't tell us how many threads are woken up, so this always returns false. #[cfg(target_os = "freebsd")] -pub fn futex_wake(futex: &AtomicU32) -> bool { +pub fn futex_wake(futex: &Atomic) -> bool { use crate::ptr::null_mut; unsafe { libc::_umtx_op( - futex as *const AtomicU32 as *mut _, + futex as *const Atomic as *mut _, libc::UMTX_OP_WAKE_PRIVATE, 1, null_mut(), @@ -132,11 +132,11 @@ pub fn futex_wake(futex: &AtomicU32) -> bool { } #[cfg(target_os = "freebsd")] -pub fn futex_wake_all(futex: &AtomicU32) { +pub fn futex_wake_all(futex: &Atomic) { use crate::ptr::null_mut; unsafe { libc::_umtx_op( - futex as *const AtomicU32 as *mut _, + futex as *const Atomic as *mut _, libc::UMTX_OP_WAKE_PRIVATE, i32::MAX as libc::c_ulong, null_mut(), @@ -146,7 +146,7 @@ pub fn futex_wake_all(futex: &AtomicU32) { } #[cfg(target_os = "openbsd")] -pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) -> bool { +pub fn futex_wait(futex: &Atomic, expected: u32, timeout: Option) -> bool { use super::time::Timespec; use crate::ptr::{null, null_mut}; @@ -157,7 +157,7 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - let r = unsafe { libc::futex( - futex as *const AtomicU32 as *mut u32, + futex as *const Atomic as *mut u32, libc::FUTEX_WAIT, expected as i32, timespec.as_ref().map_or(null(), |t| t as *const libc::timespec), @@ -169,20 +169,25 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - } #[cfg(target_os = "openbsd")] -pub fn futex_wake(futex: &AtomicU32) -> bool { +pub fn futex_wake(futex: &Atomic) -> bool { use crate::ptr::{null, null_mut}; unsafe { - libc::futex(futex as *const AtomicU32 as *mut u32, libc::FUTEX_WAKE, 1, null(), null_mut()) - > 0 + libc::futex( + futex as *const Atomic as *mut u32, + libc::FUTEX_WAKE, + 1, + null(), + null_mut(), + ) > 0 } } #[cfg(target_os = "openbsd")] -pub fn futex_wake_all(futex: &AtomicU32) { +pub fn futex_wake_all(futex: &Atomic) { use crate::ptr::{null, null_mut}; unsafe { libc::futex( - futex as *const AtomicU32 as *mut u32, + futex as *const Atomic as *mut u32, libc::FUTEX_WAKE, i32::MAX, null(), @@ -192,7 +197,7 @@ pub fn futex_wake_all(futex: &AtomicU32) { } #[cfg(target_os = "dragonfly")] -pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) -> bool { +pub fn futex_wait(futex: &Atomic, expected: u32, timeout: Option) -> bool { // A timeout of 0 means infinite. // We round smaller timeouts up to 1 millisecond. // Overflows are rounded up to an infinite timeout. @@ -200,7 +205,7 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - timeout.and_then(|d| Some(i32::try_from(d.as_millis()).ok()?.max(1))).unwrap_or(0); let r = unsafe { - libc::umtx_sleep(futex as *const AtomicU32 as *const i32, expected as i32, timeout_ms) + libc::umtx_sleep(futex as *const Atomic as *const i32, expected as i32, timeout_ms) }; r == 0 || super::os::errno() != libc::ETIMEDOUT @@ -208,28 +213,28 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - // DragonflyBSD doesn't tell us how many threads are woken up, so this always returns false. #[cfg(target_os = "dragonfly")] -pub fn futex_wake(futex: &AtomicU32) -> bool { - unsafe { libc::umtx_wakeup(futex as *const AtomicU32 as *const i32, 1) }; +pub fn futex_wake(futex: &Atomic) -> bool { + unsafe { libc::umtx_wakeup(futex as *const Atomic as *const i32, 1) }; false } #[cfg(target_os = "dragonfly")] -pub fn futex_wake_all(futex: &AtomicU32) { - unsafe { libc::umtx_wakeup(futex as *const AtomicU32 as *const i32, i32::MAX) }; +pub fn futex_wake_all(futex: &Atomic) { + unsafe { libc::umtx_wakeup(futex as *const Atomic as *const i32, i32::MAX) }; } #[cfg(target_os = "emscripten")] unsafe extern "C" { - fn emscripten_futex_wake(addr: *const AtomicU32, count: libc::c_int) -> libc::c_int; + fn emscripten_futex_wake(addr: *const Atomic, count: libc::c_int) -> libc::c_int; fn emscripten_futex_wait( - addr: *const AtomicU32, + addr: *const Atomic, val: libc::c_uint, max_wait_ms: libc::c_double, ) -> libc::c_int; } #[cfg(target_os = "emscripten")] -pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) -> bool { +pub fn futex_wait(futex: &Atomic, expected: u32, timeout: Option) -> bool { unsafe { emscripten_futex_wait( futex, @@ -240,72 +245,38 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - } #[cfg(target_os = "emscripten")] -pub fn futex_wake(futex: &AtomicU32) -> bool { +pub fn futex_wake(futex: &Atomic) -> bool { unsafe { emscripten_futex_wake(futex, 1) > 0 } } #[cfg(target_os = "emscripten")] -pub fn futex_wake_all(futex: &AtomicU32) { +pub fn futex_wake_all(futex: &Atomic) { unsafe { emscripten_futex_wake(futex, i32::MAX) }; } #[cfg(target_os = "fuchsia")] -pub mod zircon { - pub type zx_futex_t = crate::sync::atomic::AtomicU32; - pub type zx_handle_t = u32; - pub type zx_status_t = i32; - pub type zx_time_t = i64; - - pub const ZX_HANDLE_INVALID: zx_handle_t = 0; - - pub const ZX_TIME_INFINITE: zx_time_t = zx_time_t::MAX; - - pub const ZX_OK: zx_status_t = 0; - pub const ZX_ERR_INVALID_ARGS: zx_status_t = -10; - pub const ZX_ERR_BAD_HANDLE: zx_status_t = -11; - pub const ZX_ERR_WRONG_TYPE: zx_status_t = -12; - pub const ZX_ERR_BAD_STATE: zx_status_t = -20; - pub const ZX_ERR_TIMED_OUT: zx_status_t = -21; - - unsafe extern "C" { - pub fn zx_clock_get_monotonic() -> zx_time_t; - pub fn zx_futex_wait( - value_ptr: *const zx_futex_t, - current_value: zx_futex_t, - new_futex_owner: zx_handle_t, - deadline: zx_time_t, - ) -> zx_status_t; - pub fn zx_futex_wake(value_ptr: *const zx_futex_t, wake_count: u32) -> zx_status_t; - pub fn zx_futex_wake_single_owner(value_ptr: *const zx_futex_t) -> zx_status_t; - pub fn zx_thread_self() -> zx_handle_t; - } -} +pub fn futex_wait(futex: &Atomic, expected: u32, timeout: Option) -> bool { + use super::fuchsia::*; -#[cfg(target_os = "fuchsia")] -pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) -> bool { // Sleep forever if the timeout is longer than fits in a i64. let deadline = timeout - .and_then(|d| { - i64::try_from(d.as_nanos()) - .ok()? - .checked_add(unsafe { zircon::zx_clock_get_monotonic() }) - }) - .unwrap_or(zircon::ZX_TIME_INFINITE); + .and_then(|d| i64::try_from(d.as_nanos()).ok()?.checked_add(zx_clock_get_monotonic())) + .unwrap_or(ZX_TIME_INFINITE); unsafe { - zircon::zx_futex_wait(futex, AtomicU32::new(expected), zircon::ZX_HANDLE_INVALID, deadline) - != zircon::ZX_ERR_TIMED_OUT + zx_futex_wait(futex, zx_futex_t::new(expected), ZX_HANDLE_INVALID, deadline) + != ZX_ERR_TIMED_OUT } } // Fuchsia doesn't tell us how many threads are woken up, so this always returns false. #[cfg(target_os = "fuchsia")] -pub fn futex_wake(futex: &AtomicU32) -> bool { - unsafe { zircon::zx_futex_wake(futex, 1) }; +pub fn futex_wake(futex: &Atomic) -> bool { + unsafe { super::fuchsia::zx_futex_wake(futex, 1) }; false } #[cfg(target_os = "fuchsia")] -pub fn futex_wake_all(futex: &AtomicU32) { - unsafe { zircon::zx_futex_wake(futex, u32::MAX) }; +pub fn futex_wake_all(futex: &Atomic) { + unsafe { super::fuchsia::zx_futex_wake(futex, u32::MAX) }; } diff --git a/library/std/src/sys/pal/unix/kernel_copy.rs b/library/std/src/sys/pal/unix/kernel_copy.rs index d42a7e2a7fc51..b984afa149d06 100644 --- a/library/std/src/sys/pal/unix/kernel_copy.rs +++ b/library/std/src/sys/pal/unix/kernel_copy.rs @@ -62,7 +62,7 @@ use crate::os::unix::io::{AsRawFd, FromRawFd, RawFd}; use crate::os::unix::net::UnixStream; use crate::process::{ChildStderr, ChildStdin, ChildStdout}; use crate::ptr; -use crate::sync::atomic::{AtomicBool, AtomicU8, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, AtomicU8, Ordering}; use crate::sys::cvt; use crate::sys::fs::CachedFileMetadata; use crate::sys::weak::syscall; @@ -596,7 +596,7 @@ pub(super) fn copy_regular_files(reader: RawFd, writer: RawFd, max_len: u64) -> // Kernel prior to 4.5 don't have copy_file_range // We store the availability in a global to avoid unnecessary syscalls - static HAS_COPY_FILE_RANGE: AtomicU8 = AtomicU8::new(NOT_PROBED); + static HAS_COPY_FILE_RANGE: Atomic = AtomicU8::new(NOT_PROBED); let mut have_probed = match HAS_COPY_FILE_RANGE.load(Ordering::Relaxed) { NOT_PROBED => false, @@ -721,8 +721,8 @@ enum SpliceMode { /// performs splice or sendfile between file descriptors /// Does _not_ fall back to a generic copy loop. fn sendfile_splice(mode: SpliceMode, reader: RawFd, writer: RawFd, len: u64) -> CopyResult { - static HAS_SENDFILE: AtomicBool = AtomicBool::new(true); - static HAS_SPLICE: AtomicBool = AtomicBool::new(true); + static HAS_SENDFILE: Atomic = AtomicBool::new(true); + static HAS_SPLICE: Atomic = AtomicBool::new(true); // Android builds use feature level 14, but the libc wrapper for splice is // gated on feature level 21+, so we have to invoke the syscall directly. diff --git a/library/std/src/sys/pal/unix/linux/pidfd.rs b/library/std/src/sys/pal/unix/linux/pidfd.rs index 78744430f3b51..2d949ec9e91f7 100644 --- a/library/std/src/sys/pal/unix/linux/pidfd.rs +++ b/library/std/src/sys/pal/unix/linux/pidfd.rs @@ -1,7 +1,7 @@ use crate::io; use crate::os::fd::{AsRawFd, FromRawFd, RawFd}; use crate::sys::cvt; -use crate::sys::pal::unix::fd::FileDesc; +use crate::sys::fd::FileDesc; use crate::sys::process::ExitStatus; use crate::sys_common::{AsInner, FromInner, IntoInner}; diff --git a/library/std/src/sys/pal/unix/mod.rs b/library/std/src/sys/pal/unix/mod.rs index 413fda1d8d8e2..ba9e14b8009cd 100644 --- a/library/std/src/sys/pal/unix/mod.rs +++ b/library/std/src/sys/pal/unix/mod.rs @@ -6,9 +6,6 @@ use crate::io::ErrorKind; #[macro_use] pub mod weak; -pub mod args; -pub mod env; -pub mod fd; #[cfg(target_os = "fuchsia")] pub mod fuchsia; pub mod futex; @@ -28,6 +25,7 @@ pub mod time; pub fn init(_argc: isize, _argv: *const *const u8, _sigpipe: u8) {} #[cfg(not(target_os = "espidf"))] +#[cfg_attr(target_os = "vita", allow(unused_variables))] // SAFETY: must be called only once during runtime initialization. // NOTE: this is not guaranteed to run, for example when Rust code is called externally. // See `fn init()` in `library/std/src/rt.rs` for docs on `sigpipe`. @@ -48,7 +46,8 @@ pub unsafe fn init(argc: isize, argv: *const *const u8, sigpipe: u8) { reset_sigpipe(sigpipe); stack_overflow::init(); - args::init(argc, argv); + #[cfg(not(target_os = "vita"))] + crate::sys::args::init(argc, argv); // Normally, `thread::spawn` will call `Thread::set_name` but since this thread // already exists, we have to call it ourselves. We only do this on Apple targets @@ -205,7 +204,7 @@ pub unsafe fn init(argc: isize, argv: *const *const u8, sigpipe: u8) { target_os = "vxworks", target_os = "vita", )))] -static ON_BROKEN_PIPE_FLAG_USED: crate::sync::atomic::AtomicBool = +static ON_BROKEN_PIPE_FLAG_USED: crate::sync::atomic::Atomic = crate::sync::atomic::AtomicBool::new(false); #[cfg(not(any( @@ -274,6 +273,7 @@ pub fn decode_error_kind(errno: i32) -> ErrorKind { libc::ETXTBSY => ExecutableFileBusy, libc::EXDEV => CrossesDevices, libc::EINPROGRESS => InProgress, + libc::EOPNOTSUPP => Unsupported, libc::EACCES | libc::EPERM => PermissionDenied, diff --git a/library/std/src/sys/pal/unix/os.rs b/library/std/src/sys/pal/unix/os.rs index f47421c67051b..48609030aed1a 100644 --- a/library/std/src/sys/pal/unix/os.rs +++ b/library/std/src/sys/pal/unix/os.rs @@ -5,20 +5,15 @@ #[cfg(test)] mod tests; -use core::slice::memchr; - use libc::{c_char, c_int, c_void}; use crate::error::Error as StdError; -use crate::ffi::{CStr, CString, OsStr, OsString}; +use crate::ffi::{CStr, OsStr, OsString}; use crate::os::unix::prelude::*; use crate::path::{self, PathBuf}; -use crate::sync::{PoisonError, RwLock}; -use crate::sys::common::small_c_string::{run_path_with_cstr, run_with_cstr}; -#[cfg(all(target_env = "gnu", not(target_os = "vxworks")))] -use crate::sys::weak::weak; -use crate::sys::{cvt, fd}; -use crate::{fmt, io, iter, mem, ptr, slice, str, vec}; +use crate::sys::common::small_c_string::run_path_with_cstr; +use crate::sys::cvt; +use crate::{fmt, io, iter, mem, ptr, slice, str}; const TMPBUF_SZ: usize = 128; @@ -61,7 +56,7 @@ unsafe extern "C" { #[cfg_attr(target_os = "aix", link_name = "_Errno")] // SAFETY: this will always return the same pointer on a given thread. #[unsafe(ffi_const)] - fn errno_location() -> *mut c_int; + pub safe fn errno_location() -> *mut c_int; } /// Returns the platform-specific value of errno @@ -552,166 +547,6 @@ pub fn current_exe() -> io::Result { if !path.is_absolute() { getcwd().map(|cwd| cwd.join(path)) } else { Ok(path) } } -pub struct Env { - iter: vec::IntoIter<(OsString, OsString)>, -} - -// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. -pub struct EnvStrDebug<'a> { - slice: &'a [(OsString, OsString)], -} - -impl fmt::Debug for EnvStrDebug<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { slice } = self; - f.debug_list() - .entries(slice.iter().map(|(a, b)| (a.to_str().unwrap(), b.to_str().unwrap()))) - .finish() - } -} - -impl Env { - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self { iter } = self; - EnvStrDebug { slice: iter.as_slice() } - } -} - -impl fmt::Debug for Env { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { iter } = self; - f.debug_list().entries(iter.as_slice()).finish() - } -} - -impl !Send for Env {} -impl !Sync for Env {} - -impl Iterator for Env { - type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -// Use `_NSGetEnviron` on Apple platforms. -// -// `_NSGetEnviron` is the documented alternative (see `man environ`), and has -// been available since the first versions of both macOS and iOS. -// -// Nowadays, specifically since macOS 10.8, `environ` has been exposed through -// `libdyld.dylib`, which is linked via. `libSystem.dylib`: -// -// -// So in the end, it likely doesn't really matter which option we use, but the -// performance cost of using `_NSGetEnviron` is extremely miniscule, and it -// might be ever so slightly more supported, so let's just use that. -// -// NOTE: The header where this is defined (`crt_externs.h`) was added to the -// iOS 13.0 SDK, which has been the source of a great deal of confusion in the -// past about the availability of this API. -// -// NOTE(madsmtm): Neither this nor using `environ` has been verified to not -// cause App Store rejections; if this is found to be the case, an alternative -// implementation of this is possible using `[NSProcessInfo environment]` -// - which internally uses `_NSGetEnviron` and a system-wide lock on the -// environment variables to protect against `setenv`, so using that might be -// desirable anyhow? Though it also means that we have to link to Foundation. -#[cfg(target_vendor = "apple")] -pub unsafe fn environ() -> *mut *const *const c_char { - libc::_NSGetEnviron() as *mut *const *const c_char -} - -// Use the `environ` static which is part of POSIX. -#[cfg(not(target_vendor = "apple"))] -pub unsafe fn environ() -> *mut *const *const c_char { - unsafe extern "C" { - static mut environ: *const *const c_char; - } - &raw mut environ -} - -static ENV_LOCK: RwLock<()> = RwLock::new(()); - -pub fn env_read_lock() -> impl Drop { - ENV_LOCK.read().unwrap_or_else(PoisonError::into_inner) -} - -/// Returns a vector of (variable, value) byte-vector pairs for all the -/// environment variables of the current process. -pub fn env() -> Env { - unsafe { - let _guard = env_read_lock(); - let mut environ = *environ(); - let mut result = Vec::new(); - if !environ.is_null() { - while !(*environ).is_null() { - if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) { - result.push(key_value); - } - environ = environ.add(1); - } - } - return Env { iter: result.into_iter() }; - } - - fn parse(input: &[u8]) -> Option<(OsString, OsString)> { - // Strategy (copied from glibc): Variable name and value are separated - // by an ASCII equals sign '='. Since a variable name must not be - // empty, allow variable names starting with an equals sign. Skip all - // malformed lines. - if input.is_empty() { - return None; - } - let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); - pos.map(|p| { - ( - OsStringExt::from_vec(input[..p].to_vec()), - OsStringExt::from_vec(input[p + 1..].to_vec()), - ) - }) - } -} - -pub fn getenv(k: &OsStr) -> Option { - // environment variables with a nul byte can't be set, so their value is - // always None as well - run_with_cstr(k.as_bytes(), &|k| { - let _guard = env_read_lock(); - let v = unsafe { libc::getenv(k.as_ptr()) } as *const libc::c_char; - - if v.is_null() { - Ok(None) - } else { - // SAFETY: `v` cannot be mutated while executing this line since we've a read lock - let bytes = unsafe { CStr::from_ptr(v) }.to_bytes().to_vec(); - - Ok(Some(OsStringExt::from_vec(bytes))) - } - }) - .ok() - .flatten() -} - -pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - run_with_cstr(k.as_bytes(), &|k| { - run_with_cstr(v.as_bytes(), &|v| { - let _guard = ENV_LOCK.write(); - cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(drop) - }) - }) -} - -pub unsafe fn unsetenv(n: &OsStr) -> io::Result<()> { - run_with_cstr(n.as_bytes(), &|nbuf| { - let _guard = ENV_LOCK.write(); - cvt(libc::unsetenv(nbuf.as_ptr())).map(drop) - }) -} - #[cfg(not(target_os = "espidf"))] pub fn page_size() -> usize { unsafe { libc::sysconf(libc::_SC_PAGESIZE) as usize } diff --git a/library/std/src/sys/pal/unix/stack_overflow.rs b/library/std/src/sys/pal/unix/stack_overflow.rs index 34b3948e3f671..8bf6d8335159b 100644 --- a/library/std/src/sys/pal/unix/stack_overflow.rs +++ b/library/std/src/sys/pal/unix/stack_overflow.rs @@ -49,7 +49,7 @@ mod imp { use crate::cell::Cell; use crate::ops::Range; use crate::sync::OnceLock; - use crate::sync::atomic::{AtomicBool, AtomicPtr, AtomicUsize, Ordering}; + use crate::sync::atomic::{Atomic, AtomicBool, AtomicPtr, AtomicUsize, Ordering}; use crate::sys::pal::unix::os; use crate::{io, mem, ptr, thread}; @@ -118,9 +118,9 @@ mod imp { } } - static PAGE_SIZE: AtomicUsize = AtomicUsize::new(0); - static MAIN_ALTSTACK: AtomicPtr = AtomicPtr::new(ptr::null_mut()); - static NEED_ALTSTACK: AtomicBool = AtomicBool::new(false); + static PAGE_SIZE: Atomic = AtomicUsize::new(0); + static MAIN_ALTSTACK: Atomic<*mut libc::c_void> = AtomicPtr::new(ptr::null_mut()); + static NEED_ALTSTACK: Atomic = AtomicBool::new(false); /// # Safety /// Must be called only once diff --git a/library/std/src/sys/pal/unix/sync/condvar.rs b/library/std/src/sys/pal/unix/sync/condvar.rs index 73631053e9f47..efa6f8d776559 100644 --- a/library/std/src/sys/pal/unix/sync/condvar.rs +++ b/library/std/src/sys/pal/unix/sync/condvar.rs @@ -64,7 +64,10 @@ impl Condvar { // https://gist.github.com/stepancheg/198db4623a20aad2ad7cddb8fda4a63c // // To work around this issue, the timeout is clamped to 1000 years. - #[cfg(target_vendor = "apple")] + // + // Cygwin implementation is based on NT API and a super large timeout + // makes the syscall block forever. + #[cfg(any(target_vendor = "apple", target_os = "cygwin"))] let dur = Duration::min(dur, Duration::from_secs(1000 * 365 * 86400)); let timeout = Timespec::now(Self::CLOCK).checked_add_duration(&dur); diff --git a/library/std/src/sys/pal/unix/thread.rs b/library/std/src/sys/pal/unix/thread.rs index 9078dd1c23166..afda7c65e1084 100644 --- a/library/std/src/sys/pal/unix/thread.rs +++ b/library/std/src/sys/pal/unix/thread.rs @@ -8,31 +8,19 @@ use crate::sys::weak::weak; use crate::sys::{os, stack_overflow}; use crate::time::Duration; use crate::{cmp, io, ptr}; -#[cfg(not(any(target_os = "l4re", target_os = "vxworks", target_os = "espidf")))] +#[cfg(not(any( + target_os = "l4re", + target_os = "vxworks", + target_os = "espidf", + target_os = "nuttx" +)))] pub const DEFAULT_MIN_STACK_SIZE: usize = 2 * 1024 * 1024; #[cfg(target_os = "l4re")] pub const DEFAULT_MIN_STACK_SIZE: usize = 1024 * 1024; #[cfg(target_os = "vxworks")] pub const DEFAULT_MIN_STACK_SIZE: usize = 256 * 1024; -#[cfg(target_os = "espidf")] -pub const DEFAULT_MIN_STACK_SIZE: usize = 0; // 0 indicates that the stack size configured in the ESP-IDF menuconfig system should be used - -#[cfg(target_os = "fuchsia")] -mod zircon { - type zx_handle_t = u32; - type zx_status_t = i32; - pub const ZX_PROP_NAME: u32 = 3; - - unsafe extern "C" { - pub fn zx_object_set_property( - handle: zx_handle_t, - property: u32, - value: *const libc::c_void, - value_size: libc::size_t, - ) -> zx_status_t; - pub fn zx_thread_self() -> zx_handle_t; - } -} +#[cfg(any(target_os = "espidf", target_os = "nuttx"))] +pub const DEFAULT_MIN_STACK_SIZE: usize = 0; // 0 indicates that the stack size configured in the ESP-IDF/NuttX menuconfig system should be used pub struct Thread { id: libc::pthread_t, @@ -52,10 +40,10 @@ impl Thread { let mut attr: mem::MaybeUninit = mem::MaybeUninit::uninit(); assert_eq!(libc::pthread_attr_init(attr.as_mut_ptr()), 0); - #[cfg(target_os = "espidf")] + #[cfg(any(target_os = "espidf", target_os = "nuttx"))] if stack > 0 { // Only set the stack if a non-zero value is passed - // 0 is used as an indication that the default stack size configured in the ESP-IDF menuconfig system should be used + // 0 is used as an indication that the default stack size configured in the ESP-IDF/NuttX menuconfig system should be used assert_eq!( libc::pthread_attr_setstacksize( attr.as_mut_ptr(), @@ -65,7 +53,7 @@ impl Thread { ); } - #[cfg(not(target_os = "espidf"))] + #[cfg(not(any(target_os = "espidf", target_os = "nuttx")))] { let stack_size = cmp::max(stack, min_stack_size(attr.as_ptr())); @@ -189,9 +177,6 @@ impl Thread { } #[cfg(any(target_os = "solaris", target_os = "illumos", target_os = "nto"))] - // FIXME(#115199): Rust currently omits weak function definitions - // and its metadata from LLVM IR. - #[no_sanitize(cfi)] pub fn set_name(name: &CStr) { weak!( fn pthread_setname_np( @@ -214,7 +199,7 @@ impl Thread { #[cfg(target_os = "fuchsia")] pub fn set_name(name: &CStr) { - use self::zircon::*; + use super::fuchsia::*; unsafe { zx_object_set_property( zx_thread_self(), diff --git a/library/std/src/sys/pal/unix/time.rs b/library/std/src/sys/pal/unix/time.rs index b8469b1681f03..0074d7674741b 100644 --- a/library/std/src/sys/pal/unix/time.rs +++ b/library/std/src/sys/pal/unix/time.rs @@ -96,17 +96,6 @@ impl Timespec { } } - // FIXME(#115199): Rust currently omits weak function definitions - // and its metadata from LLVM IR. - #[cfg_attr( - all( - target_os = "linux", - target_env = "gnu", - target_pointer_width = "32", - not(target_arch = "riscv32") - ), - no_sanitize(cfi) - )] pub fn now(clock: libc::clockid_t) -> Timespec { use crate::mem::MaybeUninit; use crate::sys::cvt; diff --git a/library/std/src/sys/pal/unix/weak.rs b/library/std/src/sys/pal/unix/weak.rs index e7f4e005cc48c..c8cf75b876c26 100644 --- a/library/std/src/sys/pal/unix/weak.rs +++ b/library/std/src/sys/pal/unix/weak.rs @@ -20,10 +20,11 @@ // each instance of `weak!` and `syscall!`. Rather than trying to unify all of // that, we'll just allow that some unix targets don't use this module at all. #![allow(dead_code, unused_macros)] +#![forbid(unsafe_op_in_unsafe_fn)] use crate::ffi::CStr; use crate::marker::PhantomData; -use crate::sync::atomic::{self, AtomicPtr, Ordering}; +use crate::sync::atomic::{self, Atomic, AtomicPtr, Ordering}; use crate::{mem, ptr}; // We can use true weak linkage on ELF targets. @@ -79,7 +80,7 @@ pub(crate) macro dlsym { } pub(crate) struct DlsymWeak { name: &'static str, - func: AtomicPtr, + func: Atomic<*mut libc::c_void>, _marker: PhantomData, } @@ -131,11 +132,15 @@ impl DlsymWeak { unsafe fn initialize(&self) -> Option { assert_eq!(size_of::(), size_of::<*mut libc::c_void>()); - let val = fetch(self.name); + let val = unsafe { fetch(self.name) }; // This synchronizes with the acquire fence in `get`. self.func.store(val, Ordering::Release); - if val.is_null() { None } else { Some(mem::transmute_copy::<*mut libc::c_void, F>(&val)) } + if val.is_null() { + None + } else { + Some(unsafe { mem::transmute_copy::<*mut libc::c_void, F>(&val) }) + } } } @@ -144,20 +149,17 @@ unsafe fn fetch(name: &str) -> *mut libc::c_void { Ok(cstr) => cstr, Err(..) => return ptr::null_mut(), }; - libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr()) + unsafe { libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr()) } } #[cfg(not(any(target_os = "linux", target_os = "android")))] pub(crate) macro syscall { (fn $name:ident($($param:ident : $t:ty),* $(,)?) -> $ret:ty;) => ( - // FIXME(#115199): Rust currently omits weak function definitions - // and its metadata from LLVM IR. - #[no_sanitize(cfi)] unsafe fn $name($($param: $t),*) -> $ret { weak!(fn $name($($param: $t),*) -> $ret;); if let Some(fun) = $name.get() { - fun($($param),*) + unsafe { fun($($param),*) } } else { super::os::set_errno(libc::ENOSYS); -1 @@ -177,9 +179,9 @@ pub(crate) macro syscall { // Use a weak symbol from libc when possible, allowing `LD_PRELOAD` // interposition, but if it's not found just use a raw syscall. if let Some(fun) = $name.get() { - fun($($param),*) + unsafe { fun($($param),*) } } else { - libc::syscall(libc::${concat(SYS_, $name)}, $($param),*) as $ret + unsafe { libc::syscall(libc::${concat(SYS_, $name)}, $($param),*) as $ret } } } ) @@ -189,7 +191,7 @@ pub(crate) macro syscall { pub(crate) macro raw_syscall { (fn $name:ident($($param:ident : $t:ty),* $(,)?) -> $ret:ty;) => ( unsafe fn $name($($param: $t),*) -> $ret { - libc::syscall(libc::${concat(SYS_, $name)}, $($param),*) as $ret + unsafe { libc::syscall(libc::${concat(SYS_, $name)}, $($param),*) as $ret } } ) } diff --git a/library/std/src/sys/pal/unsupported/args.rs b/library/std/src/sys/pal/unsupported/args.rs deleted file mode 100644 index a2d75a6197633..0000000000000 --- a/library/std/src/sys/pal/unsupported/args.rs +++ /dev/null @@ -1,36 +0,0 @@ -use crate::ffi::OsString; -use crate::fmt; - -pub struct Args {} - -pub fn args() -> Args { - Args {} -} - -impl fmt::Debug for Args { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_list().finish() - } -} - -impl Iterator for Args { - type Item = OsString; - fn next(&mut self) -> Option { - None - } - fn size_hint(&self) -> (usize, Option) { - (0, Some(0)) - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { - 0 - } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - None - } -} diff --git a/library/std/src/sys/pal/unsupported/env.rs b/library/std/src/sys/pal/unsupported/env.rs deleted file mode 100644 index d2efec506c56b..0000000000000 --- a/library/std/src/sys/pal/unsupported/env.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod os { - pub const FAMILY: &str = ""; - pub const OS: &str = ""; - pub const DLL_PREFIX: &str = ""; - pub const DLL_SUFFIX: &str = ""; - pub const DLL_EXTENSION: &str = ""; - pub const EXE_SUFFIX: &str = ""; - pub const EXE_EXTENSION: &str = ""; -} diff --git a/library/std/src/sys/pal/unsupported/mod.rs b/library/std/src/sys/pal/unsupported/mod.rs index 38838b915b5c1..5e3295b1331a3 100644 --- a/library/std/src/sys/pal/unsupported/mod.rs +++ b/library/std/src/sys/pal/unsupported/mod.rs @@ -1,7 +1,5 @@ #![deny(unsafe_op_in_unsafe_fn)] -pub mod args; -pub mod env; pub mod os; pub mod pipe; pub mod thread; diff --git a/library/std/src/sys/pal/unsupported/os.rs b/library/std/src/sys/pal/unsupported/os.rs index 48de4312885fe..a8ef97ecf67ac 100644 --- a/library/std/src/sys/pal/unsupported/os.rs +++ b/library/std/src/sys/pal/unsupported/os.rs @@ -62,47 +62,6 @@ pub fn current_exe() -> io::Result { unsupported() } -pub struct Env(!); - -impl Env { - // FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self(inner) = self; - match *inner {} - } -} - -impl fmt::Debug for Env { - fn fmt(&self, _: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self(inner) = self; - match *inner {} - } -} - -impl Iterator for Env { - type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { - let Self(inner) = self; - match *inner {} - } -} - -pub fn env() -> Env { - panic!("not supported on this platform") -} - -pub fn getenv(_: &OsStr) -> Option { - None -} - -pub unsafe fn setenv(_: &OsStr, _: &OsStr) -> io::Result<()> { - Err(io::const_error!(io::ErrorKind::Unsupported, "cannot set env vars on this platform")) -} - -pub unsafe fn unsetenv(_: &OsStr) -> io::Result<()> { - Err(io::const_error!(io::ErrorKind::Unsupported, "cannot unset env vars on this platform")) -} - pub fn temp_dir() -> PathBuf { panic!("no filesystem on this platform") } diff --git a/library/std/src/sys/pal/wasi/args.rs b/library/std/src/sys/pal/wasi/args.rs deleted file mode 100644 index 52cfa202af825..0000000000000 --- a/library/std/src/sys/pal/wasi/args.rs +++ /dev/null @@ -1,61 +0,0 @@ -#![forbid(unsafe_op_in_unsafe_fn)] - -use crate::ffi::{CStr, OsStr, OsString}; -use crate::os::wasi::ffi::OsStrExt; -use crate::{fmt, vec}; - -pub struct Args { - iter: vec::IntoIter, -} - -impl !Send for Args {} -impl !Sync for Args {} - -/// Returns the command line arguments -pub fn args() -> Args { - Args { iter: maybe_args().unwrap_or(Vec::new()).into_iter() } -} - -fn maybe_args() -> Option> { - unsafe { - let (argc, buf_size) = wasi::args_sizes_get().ok()?; - let mut argv = Vec::with_capacity(argc); - let mut buf = Vec::with_capacity(buf_size); - wasi::args_get(argv.as_mut_ptr(), buf.as_mut_ptr()).ok()?; - argv.set_len(argc); - let mut ret = Vec::with_capacity(argc); - for ptr in argv { - let s = CStr::from_ptr(ptr.cast()); - ret.push(OsStr::from_bytes(s.to_bytes()).to_owned()); - } - Some(ret) - } -} - -impl fmt::Debug for Args { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.iter.as_slice().fmt(f) - } -} - -impl Iterator for Args { - type Item = OsString; - fn next(&mut self) -> Option { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { - self.iter.len() - } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - self.iter.next_back() - } -} diff --git a/library/std/src/sys/pal/wasi/env.rs b/library/std/src/sys/pal/wasi/env.rs deleted file mode 100644 index 8d44498267360..0000000000000 --- a/library/std/src/sys/pal/wasi/env.rs +++ /dev/null @@ -1,11 +0,0 @@ -#![forbid(unsafe_op_in_unsafe_fn)] - -pub mod os { - pub const FAMILY: &str = ""; - pub const OS: &str = ""; - pub const DLL_PREFIX: &str = ""; - pub const DLL_SUFFIX: &str = ".wasm"; - pub const DLL_EXTENSION: &str = "wasm"; - pub const EXE_SUFFIX: &str = ".wasm"; - pub const EXE_EXTENSION: &str = "wasm"; -} diff --git a/library/std/src/sys/pal/wasi/fd.rs b/library/std/src/sys/pal/wasi/fd.rs deleted file mode 100644 index 4b3dd1ce49ef6..0000000000000 --- a/library/std/src/sys/pal/wasi/fd.rs +++ /dev/null @@ -1,332 +0,0 @@ -#![forbid(unsafe_op_in_unsafe_fn)] -#![allow(dead_code)] - -use super::err2io; -use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut, SeekFrom}; -use crate::mem; -use crate::net::Shutdown; -use crate::os::wasi::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd}; -use crate::sys_common::{AsInner, AsInnerMut, FromInner, IntoInner}; - -#[derive(Debug)] -pub struct WasiFd { - fd: OwnedFd, -} - -fn iovec<'a>(a: &'a mut [IoSliceMut<'_>]) -> &'a [wasi::Iovec] { - assert_eq!(size_of::>(), size_of::()); - assert_eq!(align_of::>(), align_of::()); - // SAFETY: `IoSliceMut` and `IoVec` have exactly the same memory layout. - // We decorate our `IoSliceMut` with `repr(transparent)` (see `io.rs`), and - // `crate::io::IoSliceMut` is a `repr(transparent)` wrapper around our type, so this is - // guaranteed. - unsafe { mem::transmute(a) } -} - -fn ciovec<'a>(a: &'a [IoSlice<'_>]) -> &'a [wasi::Ciovec] { - assert_eq!(size_of::>(), size_of::()); - assert_eq!(align_of::>(), align_of::()); - // SAFETY: `IoSlice` and `CIoVec` have exactly the same memory layout. - // We decorate our `IoSlice` with `repr(transparent)` (see `io.rs`), and - // `crate::io::IoSlice` is a `repr(transparent)` wrapper around our type, so this is - // guaranteed. - unsafe { mem::transmute(a) } -} - -impl WasiFd { - pub fn datasync(&self) -> io::Result<()> { - unsafe { wasi::fd_datasync(self.as_raw_fd() as wasi::Fd).map_err(err2io) } - } - - pub fn pread(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result { - unsafe { wasi::fd_pread(self.as_raw_fd() as wasi::Fd, iovec(bufs), offset).map_err(err2io) } - } - - pub fn pwrite(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result { - unsafe { - wasi::fd_pwrite(self.as_raw_fd() as wasi::Fd, ciovec(bufs), offset).map_err(err2io) - } - } - - pub fn read(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - unsafe { wasi::fd_read(self.as_raw_fd() as wasi::Fd, iovec(bufs)).map_err(err2io) } - } - - pub fn read_buf(&self, mut buf: BorrowedCursor<'_>) -> io::Result<()> { - unsafe { - let bufs = [wasi::Iovec { - buf: buf.as_mut().as_mut_ptr() as *mut u8, - buf_len: buf.capacity(), - }]; - match wasi::fd_read(self.as_raw_fd() as wasi::Fd, &bufs) { - Ok(n) => { - buf.advance_unchecked(n); - Ok(()) - } - Err(e) => Err(err2io(e)), - } - } - } - - pub fn write(&self, bufs: &[IoSlice<'_>]) -> io::Result { - unsafe { wasi::fd_write(self.as_raw_fd() as wasi::Fd, ciovec(bufs)).map_err(err2io) } - } - - pub fn seek(&self, pos: SeekFrom) -> io::Result { - let (whence, offset) = match pos { - SeekFrom::Start(pos) => (wasi::WHENCE_SET, pos as i64), - SeekFrom::End(pos) => (wasi::WHENCE_END, pos), - SeekFrom::Current(pos) => (wasi::WHENCE_CUR, pos), - }; - unsafe { wasi::fd_seek(self.as_raw_fd() as wasi::Fd, offset, whence).map_err(err2io) } - } - - pub fn tell(&self) -> io::Result { - unsafe { wasi::fd_tell(self.as_raw_fd() as wasi::Fd).map_err(err2io) } - } - - // FIXME: __wasi_fd_fdstat_get - - pub fn set_flags(&self, flags: wasi::Fdflags) -> io::Result<()> { - unsafe { wasi::fd_fdstat_set_flags(self.as_raw_fd() as wasi::Fd, flags).map_err(err2io) } - } - - pub fn set_rights(&self, base: wasi::Rights, inheriting: wasi::Rights) -> io::Result<()> { - unsafe { - wasi::fd_fdstat_set_rights(self.as_raw_fd() as wasi::Fd, base, inheriting) - .map_err(err2io) - } - } - - pub fn sync(&self) -> io::Result<()> { - unsafe { wasi::fd_sync(self.as_raw_fd() as wasi::Fd).map_err(err2io) } - } - - pub(crate) fn advise(&self, offset: u64, len: u64, advice: wasi::Advice) -> io::Result<()> { - unsafe { - wasi::fd_advise(self.as_raw_fd() as wasi::Fd, offset, len, advice).map_err(err2io) - } - } - - pub fn allocate(&self, offset: u64, len: u64) -> io::Result<()> { - unsafe { wasi::fd_allocate(self.as_raw_fd() as wasi::Fd, offset, len).map_err(err2io) } - } - - pub fn create_directory(&self, path: &str) -> io::Result<()> { - unsafe { wasi::path_create_directory(self.as_raw_fd() as wasi::Fd, path).map_err(err2io) } - } - - pub fn link( - &self, - old_flags: wasi::Lookupflags, - old_path: &str, - new_fd: &WasiFd, - new_path: &str, - ) -> io::Result<()> { - unsafe { - wasi::path_link( - self.as_raw_fd() as wasi::Fd, - old_flags, - old_path, - new_fd.as_raw_fd() as wasi::Fd, - new_path, - ) - .map_err(err2io) - } - } - - pub fn open( - &self, - dirflags: wasi::Lookupflags, - path: &str, - oflags: wasi::Oflags, - fs_rights_base: wasi::Rights, - fs_rights_inheriting: wasi::Rights, - fs_flags: wasi::Fdflags, - ) -> io::Result { - unsafe { - wasi::path_open( - self.as_raw_fd() as wasi::Fd, - dirflags, - path, - oflags, - fs_rights_base, - fs_rights_inheriting, - fs_flags, - ) - .map(|fd| WasiFd::from_raw_fd(fd as RawFd)) - .map_err(err2io) - } - } - - pub fn readdir(&self, buf: &mut [u8], cookie: wasi::Dircookie) -> io::Result { - unsafe { - wasi::fd_readdir(self.as_raw_fd() as wasi::Fd, buf.as_mut_ptr(), buf.len(), cookie) - .map_err(err2io) - } - } - - pub fn readlink(&self, path: &str, buf: &mut [u8]) -> io::Result { - unsafe { - wasi::path_readlink(self.as_raw_fd() as wasi::Fd, path, buf.as_mut_ptr(), buf.len()) - .map_err(err2io) - } - } - - pub fn rename(&self, old_path: &str, new_fd: &WasiFd, new_path: &str) -> io::Result<()> { - unsafe { - wasi::path_rename( - self.as_raw_fd() as wasi::Fd, - old_path, - new_fd.as_raw_fd() as wasi::Fd, - new_path, - ) - .map_err(err2io) - } - } - - pub(crate) fn filestat_get(&self) -> io::Result { - unsafe { wasi::fd_filestat_get(self.as_raw_fd() as wasi::Fd).map_err(err2io) } - } - - pub fn filestat_set_times( - &self, - atim: wasi::Timestamp, - mtim: wasi::Timestamp, - fstflags: wasi::Fstflags, - ) -> io::Result<()> { - unsafe { - wasi::fd_filestat_set_times(self.as_raw_fd() as wasi::Fd, atim, mtim, fstflags) - .map_err(err2io) - } - } - - pub fn filestat_set_size(&self, size: u64) -> io::Result<()> { - unsafe { wasi::fd_filestat_set_size(self.as_raw_fd() as wasi::Fd, size).map_err(err2io) } - } - - pub(crate) fn path_filestat_get( - &self, - flags: wasi::Lookupflags, - path: &str, - ) -> io::Result { - unsafe { - wasi::path_filestat_get(self.as_raw_fd() as wasi::Fd, flags, path).map_err(err2io) - } - } - - pub fn path_filestat_set_times( - &self, - flags: wasi::Lookupflags, - path: &str, - atim: wasi::Timestamp, - mtim: wasi::Timestamp, - fstflags: wasi::Fstflags, - ) -> io::Result<()> { - unsafe { - wasi::path_filestat_set_times( - self.as_raw_fd() as wasi::Fd, - flags, - path, - atim, - mtim, - fstflags, - ) - .map_err(err2io) - } - } - - pub fn symlink(&self, old_path: &str, new_path: &str) -> io::Result<()> { - unsafe { - wasi::path_symlink(old_path, self.as_raw_fd() as wasi::Fd, new_path).map_err(err2io) - } - } - - pub fn unlink_file(&self, path: &str) -> io::Result<()> { - unsafe { wasi::path_unlink_file(self.as_raw_fd() as wasi::Fd, path).map_err(err2io) } - } - - pub fn remove_directory(&self, path: &str) -> io::Result<()> { - unsafe { wasi::path_remove_directory(self.as_raw_fd() as wasi::Fd, path).map_err(err2io) } - } - - pub fn sock_accept(&self, flags: wasi::Fdflags) -> io::Result { - unsafe { wasi::sock_accept(self.as_raw_fd() as wasi::Fd, flags).map_err(err2io) } - } - - pub fn sock_recv( - &self, - ri_data: &mut [IoSliceMut<'_>], - ri_flags: wasi::Riflags, - ) -> io::Result<(usize, wasi::Roflags)> { - unsafe { - wasi::sock_recv(self.as_raw_fd() as wasi::Fd, iovec(ri_data), ri_flags).map_err(err2io) - } - } - - pub fn sock_send(&self, si_data: &[IoSlice<'_>], si_flags: wasi::Siflags) -> io::Result { - unsafe { - wasi::sock_send(self.as_raw_fd() as wasi::Fd, ciovec(si_data), si_flags).map_err(err2io) - } - } - - pub fn sock_shutdown(&self, how: Shutdown) -> io::Result<()> { - let how = match how { - Shutdown::Read => wasi::SDFLAGS_RD, - Shutdown::Write => wasi::SDFLAGS_WR, - Shutdown::Both => wasi::SDFLAGS_WR | wasi::SDFLAGS_RD, - }; - unsafe { wasi::sock_shutdown(self.as_raw_fd() as wasi::Fd, how).map_err(err2io) } - } -} - -impl AsInner for WasiFd { - #[inline] - fn as_inner(&self) -> &OwnedFd { - &self.fd - } -} - -impl AsInnerMut for WasiFd { - #[inline] - fn as_inner_mut(&mut self) -> &mut OwnedFd { - &mut self.fd - } -} - -impl IntoInner for WasiFd { - fn into_inner(self) -> OwnedFd { - self.fd - } -} - -impl FromInner for WasiFd { - fn from_inner(owned_fd: OwnedFd) -> Self { - Self { fd: owned_fd } - } -} - -impl AsFd for WasiFd { - fn as_fd(&self) -> BorrowedFd<'_> { - self.fd.as_fd() - } -} - -impl AsRawFd for WasiFd { - #[inline] - fn as_raw_fd(&self) -> RawFd { - self.fd.as_raw_fd() - } -} - -impl IntoRawFd for WasiFd { - fn into_raw_fd(self) -> RawFd { - self.fd.into_raw_fd() - } -} - -impl FromRawFd for WasiFd { - unsafe fn from_raw_fd(raw_fd: RawFd) -> Self { - unsafe { Self { fd: FromRawFd::from_raw_fd(raw_fd) } } - } -} diff --git a/library/std/src/sys/pal/wasi/mod.rs b/library/std/src/sys/pal/wasi/mod.rs index cdd613f76b638..61dd1c3f98b10 100644 --- a/library/std/src/sys/pal/wasi/mod.rs +++ b/library/std/src/sys/pal/wasi/mod.rs @@ -13,9 +13,6 @@ //! compiling for wasm. That way it's a compile time error for something that's //! guaranteed to be a runtime error! -pub mod args; -pub mod env; -pub mod fd; #[allow(unused)] #[path = "../wasm/atomics/futex.rs"] pub mod futex; diff --git a/library/std/src/sys/pal/wasi/os.rs b/library/std/src/sys/pal/wasi/os.rs index ba2b65a1f40dc..672cf70d1a5b2 100644 --- a/library/std/src/sys/pal/wasi/os.rs +++ b/library/std/src/sys/pal/wasi/os.rs @@ -1,19 +1,16 @@ #![forbid(unsafe_op_in_unsafe_fn)] -use core::slice::memchr; - use crate::error::Error as StdError; use crate::ffi::{CStr, OsStr, OsString}; use crate::marker::PhantomData; -use crate::ops::Drop; use crate::os::wasi::prelude::*; use crate::path::{self, PathBuf}; -use crate::sys::common::small_c_string::{run_path_with_cstr, run_with_cstr}; +use crate::sys::common::small_c_string::run_path_with_cstr; use crate::sys::unsupported; -use crate::{fmt, io, str, vec}; +use crate::{fmt, io, str}; // Add a few symbols not in upstream `libc` just yet. -mod libc { +pub mod libc { pub use libc::*; unsafe extern "C" { @@ -23,28 +20,6 @@ mod libc { } } -cfg_if::cfg_if! { - if #[cfg(target_feature = "atomics")] { - // Access to the environment must be protected by a lock in multi-threaded scenarios. - use crate::sync::{PoisonError, RwLock}; - static ENV_LOCK: RwLock<()> = RwLock::new(()); - pub fn env_read_lock() -> impl Drop { - ENV_LOCK.read().unwrap_or_else(PoisonError::into_inner) - } - pub fn env_write_lock() -> impl Drop { - ENV_LOCK.write().unwrap_or_else(PoisonError::into_inner) - } - } else { - // No need for a lock if we are single-threaded. - pub fn env_read_lock() -> impl Drop { - Box::new(()) - } - pub fn env_write_lock() -> impl Drop { - Box::new(()) - } - } -} - pub fn errno() -> i32 { unsafe extern "C" { #[thread_local] @@ -141,123 +116,6 @@ pub fn current_exe() -> io::Result { unsupported() } -pub struct Env { - iter: vec::IntoIter<(OsString, OsString)>, -} - -// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. -pub struct EnvStrDebug<'a> { - slice: &'a [(OsString, OsString)], -} - -impl fmt::Debug for EnvStrDebug<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { slice } = self; - f.debug_list() - .entries(slice.iter().map(|(a, b)| (a.to_str().unwrap(), b.to_str().unwrap()))) - .finish() - } -} - -impl Env { - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self { iter } = self; - EnvStrDebug { slice: iter.as_slice() } - } -} - -impl fmt::Debug for Env { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { iter } = self; - f.debug_list().entries(iter.as_slice()).finish() - } -} - -impl !Send for Env {} -impl !Sync for Env {} - -impl Iterator for Env { - type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -pub fn env() -> Env { - unsafe { - let _guard = env_read_lock(); - - // Use `__wasilibc_get_environ` instead of `environ` here so that we - // don't require wasi-libc to eagerly initialize the environment - // variables. - let mut environ = libc::__wasilibc_get_environ(); - - let mut result = Vec::new(); - if !environ.is_null() { - while !(*environ).is_null() { - if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) { - result.push(key_value); - } - environ = environ.add(1); - } - } - return Env { iter: result.into_iter() }; - } - - // See src/libstd/sys/pal/unix/os.rs, same as that - fn parse(input: &[u8]) -> Option<(OsString, OsString)> { - if input.is_empty() { - return None; - } - let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); - pos.map(|p| { - ( - OsStringExt::from_vec(input[..p].to_vec()), - OsStringExt::from_vec(input[p + 1..].to_vec()), - ) - }) - } -} - -pub fn getenv(k: &OsStr) -> Option { - // environment variables with a nul byte can't be set, so their value is - // always None as well - run_with_cstr(k.as_bytes(), &|k| { - let _guard = env_read_lock(); - let v = unsafe { libc::getenv(k.as_ptr()) } as *const libc::c_char; - - if v.is_null() { - Ok(None) - } else { - // SAFETY: `v` cannot be mutated while executing this line since we've a read lock - let bytes = unsafe { CStr::from_ptr(v) }.to_bytes().to_vec(); - - Ok(Some(OsStringExt::from_vec(bytes))) - } - }) - .ok() - .flatten() -} - -pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - run_with_cstr(k.as_bytes(), &|k| { - run_with_cstr(v.as_bytes(), &|v| unsafe { - let _guard = env_write_lock(); - cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(drop) - }) - }) -} - -pub unsafe fn unsetenv(n: &OsStr) -> io::Result<()> { - run_with_cstr(n.as_bytes(), &|nbuf| unsafe { - let _guard = env_write_lock(); - cvt(libc::unsetenv(nbuf.as_ptr())).map(drop) - }) -} - #[allow(dead_code)] pub fn page_size() -> usize { unsafe { libc::sysconf(libc::_SC_PAGESIZE) as usize } @@ -294,6 +152,6 @@ macro_rules! impl_is_minus_one { impl_is_minus_one! { i8 i16 i32 i64 isize } -fn cvt(t: T) -> io::Result { +pub fn cvt(t: T) -> io::Result { if t.is_minus_one() { Err(io::Error::last_os_error()) } else { Ok(t) } } diff --git a/library/std/src/sys/pal/wasip2/mod.rs b/library/std/src/sys/pal/wasip2/mod.rs index 6ac28f1bf4fc5..47fe3221c9093 100644 --- a/library/std/src/sys/pal/wasip2/mod.rs +++ b/library/std/src/sys/pal/wasip2/mod.rs @@ -6,12 +6,6 @@ //! To begin with, this target mirrors the wasi target 1 to 1, but over //! time this will change significantly. -#[path = "../wasi/args.rs"] -pub mod args; -#[path = "../wasi/env.rs"] -pub mod env; -#[path = "../wasi/fd.rs"] -pub mod fd; #[allow(unused)] #[path = "../wasm/atomics/futex.rs"] pub mod futex; @@ -39,7 +33,6 @@ mod helpers; // import conflict rules. If we glob export `helpers` and `common` together, // then the compiler complains about conflicts. -use helpers::err2io; -pub use helpers::{abort_internal, decode_error_kind, is_interrupted}; +pub(crate) use helpers::{abort_internal, decode_error_kind, err2io, is_interrupted}; mod cabi_realloc; diff --git a/library/std/src/sys/pal/wasm/atomics/futex.rs b/library/std/src/sys/pal/wasm/atomics/futex.rs index bdad0da73f0a5..6676aa7e8e3a5 100644 --- a/library/std/src/sys/pal/wasm/atomics/futex.rs +++ b/library/std/src/sys/pal/wasm/atomics/futex.rs @@ -3,16 +3,16 @@ use core::arch::wasm32 as wasm; #[cfg(target_arch = "wasm64")] use core::arch::wasm64 as wasm; -use crate::sync::atomic::AtomicU32; +use crate::sync::atomic::Atomic; use crate::time::Duration; /// An atomic for use as a futex that is at least 32-bits but may be larger -pub type Futex = AtomicU32; +pub type Futex = Atomic; /// Must be the underlying type of Futex pub type Primitive = u32; /// An atomic for use as a futex that is at least 8-bits but may be larger. -pub type SmallFutex = AtomicU32; +pub type SmallFutex = Atomic; /// Must be the underlying type of SmallFutex pub type SmallPrimitive = u32; @@ -21,11 +21,14 @@ pub type SmallPrimitive = u32; /// Returns directly if the futex doesn't hold the expected value. /// /// Returns false on timeout, and true in all other cases. -pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) -> bool { +pub fn futex_wait(futex: &Atomic, expected: u32, timeout: Option) -> bool { let timeout = timeout.and_then(|t| t.as_nanos().try_into().ok()).unwrap_or(-1); unsafe { - wasm::memory_atomic_wait32(futex as *const AtomicU32 as *mut i32, expected as i32, timeout) - < 2 + wasm::memory_atomic_wait32( + futex as *const Atomic as *mut i32, + expected as i32, + timeout, + ) < 2 } } @@ -33,13 +36,13 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - /// /// Returns true if this actually woke up such a thread, /// or false if no thread was waiting on this futex. -pub fn futex_wake(futex: &AtomicU32) -> bool { - unsafe { wasm::memory_atomic_notify(futex as *const AtomicU32 as *mut i32, 1) > 0 } +pub fn futex_wake(futex: &Atomic) -> bool { + unsafe { wasm::memory_atomic_notify(futex as *const Atomic as *mut i32, 1) > 0 } } /// Wakes up all threads that are waiting on `futex_wait` on this futex. -pub fn futex_wake_all(futex: &AtomicU32) { +pub fn futex_wake_all(futex: &Atomic) { unsafe { - wasm::memory_atomic_notify(futex as *const AtomicU32 as *mut i32, i32::MAX as u32); + wasm::memory_atomic_notify(futex as *const Atomic as *mut i32, i32::MAX as u32); } } diff --git a/library/std/src/sys/pal/wasm/env.rs b/library/std/src/sys/pal/wasm/env.rs deleted file mode 100644 index 730e356d7fe95..0000000000000 --- a/library/std/src/sys/pal/wasm/env.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod os { - pub const FAMILY: &str = ""; - pub const OS: &str = ""; - pub const DLL_PREFIX: &str = ""; - pub const DLL_SUFFIX: &str = ".wasm"; - pub const DLL_EXTENSION: &str = "wasm"; - pub const EXE_SUFFIX: &str = ".wasm"; - pub const EXE_EXTENSION: &str = "wasm"; -} diff --git a/library/std/src/sys/pal/wasm/mod.rs b/library/std/src/sys/pal/wasm/mod.rs index 8d39b70d0397a..37cb46a8f6b3f 100644 --- a/library/std/src/sys/pal/wasm/mod.rs +++ b/library/std/src/sys/pal/wasm/mod.rs @@ -16,9 +16,6 @@ #![deny(unsafe_op_in_unsafe_fn)] -#[path = "../unsupported/args.rs"] -pub mod args; -pub mod env; #[path = "../unsupported/os.rs"] pub mod os; #[path = "../unsupported/pipe.rs"] diff --git a/library/std/src/sys/pal/windows/args.rs b/library/std/src/sys/pal/windows/args.rs deleted file mode 100644 index d973743639ab0..0000000000000 --- a/library/std/src/sys/pal/windows/args.rs +++ /dev/null @@ -1,445 +0,0 @@ -//! The Windows command line is just a string -//! -//! -//! This module implements the parsing necessary to turn that string into a list of arguments. - -#[cfg(test)] -mod tests; - -use super::ensure_no_nuls; -use super::os::current_exe; -use crate::ffi::{OsStr, OsString}; -use crate::num::NonZero; -use crate::os::windows::prelude::*; -use crate::path::{Path, PathBuf}; -use crate::sys::path::get_long_path; -use crate::sys::{c, to_u16s}; -use crate::sys_common::AsInner; -use crate::sys_common::wstr::WStrUnits; -use crate::{fmt, io, iter, vec}; - -pub fn args() -> Args { - // SAFETY: `GetCommandLineW` returns a pointer to a null terminated UTF-16 - // string so it's safe for `WStrUnits` to use. - unsafe { - let lp_cmd_line = c::GetCommandLineW(); - let parsed_args_list = parse_lp_cmd_line(WStrUnits::new(lp_cmd_line), || { - current_exe().map(PathBuf::into_os_string).unwrap_or_else(|_| OsString::new()) - }); - - Args { parsed_args_list: parsed_args_list.into_iter() } - } -} - -/// Implements the Windows command-line argument parsing algorithm. -/// -/// Microsoft's documentation for the Windows CLI argument format can be found at -/// -/// -/// A more in-depth explanation is here: -/// -/// -/// Windows includes a function to do command line parsing in shell32.dll. -/// However, this is not used for two reasons: -/// -/// 1. Linking with that DLL causes the process to be registered as a GUI application. -/// GUI applications add a bunch of overhead, even if no windows are drawn. See -/// . -/// -/// 2. It does not follow the modern C/C++ argv rules outlined in the first two links above. -/// -/// This function was tested for equivalence to the C/C++ parsing rules using an -/// extensive test suite available at -/// . -fn parse_lp_cmd_line<'a, F: Fn() -> OsString>( - lp_cmd_line: Option>, - exe_name: F, -) -> Vec { - const BACKSLASH: NonZero = NonZero::new(b'\\' as u16).unwrap(); - const QUOTE: NonZero = NonZero::new(b'"' as u16).unwrap(); - const TAB: NonZero = NonZero::new(b'\t' as u16).unwrap(); - const SPACE: NonZero = NonZero::new(b' ' as u16).unwrap(); - - let mut ret_val = Vec::new(); - // If the cmd line pointer is null or it points to an empty string then - // return the name of the executable as argv[0]. - if lp_cmd_line.as_ref().and_then(|cmd| cmd.peek()).is_none() { - ret_val.push(exe_name()); - return ret_val; - } - let mut code_units = lp_cmd_line.unwrap(); - - // The executable name at the beginning is special. - let mut in_quotes = false; - let mut cur = Vec::new(); - for w in &mut code_units { - match w { - // A quote mark always toggles `in_quotes` no matter what because - // there are no escape characters when parsing the executable name. - QUOTE => in_quotes = !in_quotes, - // If not `in_quotes` then whitespace ends argv[0]. - SPACE | TAB if !in_quotes => break, - // In all other cases the code unit is taken literally. - _ => cur.push(w.get()), - } - } - // Skip whitespace. - code_units.advance_while(|w| w == SPACE || w == TAB); - ret_val.push(OsString::from_wide(&cur)); - - // Parse the arguments according to these rules: - // * All code units are taken literally except space, tab, quote and backslash. - // * When not `in_quotes`, space and tab separate arguments. Consecutive spaces and tabs are - // treated as a single separator. - // * A space or tab `in_quotes` is taken literally. - // * A quote toggles `in_quotes` mode unless it's escaped. An escaped quote is taken literally. - // * A quote can be escaped if preceded by an odd number of backslashes. - // * If any number of backslashes is immediately followed by a quote then the number of - // backslashes is halved (rounding down). - // * Backslashes not followed by a quote are all taken literally. - // * If `in_quotes` then a quote can also be escaped using another quote - // (i.e. two consecutive quotes become one literal quote). - let mut cur = Vec::new(); - let mut in_quotes = false; - while let Some(w) = code_units.next() { - match w { - // If not `in_quotes`, a space or tab ends the argument. - SPACE | TAB if !in_quotes => { - ret_val.push(OsString::from_wide(&cur[..])); - cur.truncate(0); - - // Skip whitespace. - code_units.advance_while(|w| w == SPACE || w == TAB); - } - // Backslashes can escape quotes or backslashes but only if consecutive backslashes are followed by a quote. - BACKSLASH => { - let backslash_count = code_units.advance_while(|w| w == BACKSLASH) + 1; - if code_units.peek() == Some(QUOTE) { - cur.extend(iter::repeat(BACKSLASH.get()).take(backslash_count / 2)); - // The quote is escaped if there are an odd number of backslashes. - if backslash_count % 2 == 1 { - code_units.next(); - cur.push(QUOTE.get()); - } - } else { - // If there is no quote on the end then there is no escaping. - cur.extend(iter::repeat(BACKSLASH.get()).take(backslash_count)); - } - } - // If `in_quotes` and not backslash escaped (see above) then a quote either - // unsets `in_quote` or is escaped by another quote. - QUOTE if in_quotes => match code_units.peek() { - // Two consecutive quotes when `in_quotes` produces one literal quote. - Some(QUOTE) => { - cur.push(QUOTE.get()); - code_units.next(); - } - // Otherwise set `in_quotes`. - Some(_) => in_quotes = false, - // The end of the command line. - // Push `cur` even if empty, which we do by breaking while `in_quotes` is still set. - None => break, - }, - // If not `in_quotes` and not BACKSLASH escaped (see above) then a quote sets `in_quote`. - QUOTE => in_quotes = true, - // Everything else is always taken literally. - _ => cur.push(w.get()), - } - } - // Push the final argument, if any. - if !cur.is_empty() || in_quotes { - ret_val.push(OsString::from_wide(&cur[..])); - } - ret_val -} - -pub struct Args { - parsed_args_list: vec::IntoIter, -} - -impl fmt::Debug for Args { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.parsed_args_list.as_slice().fmt(f) - } -} - -impl Iterator for Args { - type Item = OsString; - fn next(&mut self) -> Option { - self.parsed_args_list.next() - } - fn size_hint(&self) -> (usize, Option) { - self.parsed_args_list.size_hint() - } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - self.parsed_args_list.next_back() - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { - self.parsed_args_list.len() - } -} - -#[derive(Debug)] -pub(crate) enum Arg { - /// Add quotes (if needed) - Regular(OsString), - /// Append raw string without quoting - Raw(OsString), -} - -enum Quote { - // Every arg is quoted - Always, - // Whitespace and empty args are quoted - Auto, - // Arg appended without any changes (#29494) - Never, -} - -pub(crate) fn append_arg(cmd: &mut Vec, arg: &Arg, force_quotes: bool) -> io::Result<()> { - let (arg, quote) = match arg { - Arg::Regular(arg) => (arg, if force_quotes { Quote::Always } else { Quote::Auto }), - Arg::Raw(arg) => (arg, Quote::Never), - }; - - // If an argument has 0 characters then we need to quote it to ensure - // that it actually gets passed through on the command line or otherwise - // it will be dropped entirely when parsed on the other end. - ensure_no_nuls(arg)?; - let arg_bytes = arg.as_encoded_bytes(); - let (quote, escape) = match quote { - Quote::Always => (true, true), - Quote::Auto => { - (arg_bytes.iter().any(|c| *c == b' ' || *c == b'\t') || arg_bytes.is_empty(), true) - } - Quote::Never => (false, false), - }; - if quote { - cmd.push('"' as u16); - } - - let mut backslashes: usize = 0; - for x in arg.encode_wide() { - if escape { - if x == '\\' as u16 { - backslashes += 1; - } else { - if x == '"' as u16 { - // Add n+1 backslashes to total 2n+1 before internal '"'. - cmd.extend((0..=backslashes).map(|_| '\\' as u16)); - } - backslashes = 0; - } - } - cmd.push(x); - } - - if quote { - // Add n backslashes to total 2n before ending '"'. - cmd.extend((0..backslashes).map(|_| '\\' as u16)); - cmd.push('"' as u16); - } - Ok(()) -} - -fn append_bat_arg(cmd: &mut Vec, arg: &OsStr, mut quote: bool) -> io::Result<()> { - ensure_no_nuls(arg)?; - // If an argument has 0 characters then we need to quote it to ensure - // that it actually gets passed through on the command line or otherwise - // it will be dropped entirely when parsed on the other end. - // - // We also need to quote the argument if it ends with `\` to guard against - // bat usage such as `"%~2"` (i.e. force quote arguments) otherwise a - // trailing slash will escape the closing quote. - if arg.is_empty() || arg.as_encoded_bytes().last() == Some(&b'\\') { - quote = true; - } - for cp in arg.as_inner().inner.code_points() { - if let Some(cp) = cp.to_char() { - // Rather than trying to find every ascii symbol that must be quoted, - // we assume that all ascii symbols must be quoted unless they're known to be good. - // We also quote Unicode control blocks for good measure. - // Note an unquoted `\` is fine so long as the argument isn't otherwise quoted. - static UNQUOTED: &str = r"#$*+-./:?@\_"; - let ascii_needs_quotes = - cp.is_ascii() && !(cp.is_ascii_alphanumeric() || UNQUOTED.contains(cp)); - if ascii_needs_quotes || cp.is_control() { - quote = true; - } - } - } - - if quote { - cmd.push('"' as u16); - } - // Loop through the string, escaping `\` only if followed by `"`. - // And escaping `"` by doubling them. - let mut backslashes: usize = 0; - for x in arg.encode_wide() { - if x == '\\' as u16 { - backslashes += 1; - } else { - if x == '"' as u16 { - // Add n backslashes to total 2n before internal `"`. - cmd.extend((0..backslashes).map(|_| '\\' as u16)); - // Appending an additional double-quote acts as an escape. - cmd.push(b'"' as u16) - } else if x == '%' as u16 || x == '\r' as u16 { - // yt-dlp hack: replaces `%` with `%%cd:~,%` to stop %VAR% being expanded as an environment variable. - // - // # Explanation - // - // cmd supports extracting a substring from a variable using the following syntax: - // %variable:~start_index,end_index% - // - // In the above command `cd` is used as the variable and the start_index and end_index are left blank. - // `cd` is a built-in variable that dynamically expands to the current directory so it's always available. - // Explicitly omitting both the start and end index creates a zero-length substring. - // - // Therefore it all resolves to nothing. However, by doing this no-op we distract cmd.exe - // from potentially expanding %variables% in the argument. - cmd.extend_from_slice(&[ - '%' as u16, '%' as u16, 'c' as u16, 'd' as u16, ':' as u16, '~' as u16, - ',' as u16, - ]); - } - backslashes = 0; - } - cmd.push(x); - } - if quote { - // Add n backslashes to total 2n before ending `"`. - cmd.extend((0..backslashes).map(|_| '\\' as u16)); - cmd.push('"' as u16); - } - Ok(()) -} - -pub(crate) fn make_bat_command_line( - script: &[u16], - args: &[Arg], - force_quotes: bool, -) -> io::Result> { - const INVALID_ARGUMENT_ERROR: io::Error = - io::const_error!(io::ErrorKind::InvalidInput, r#"batch file arguments are invalid"#); - // Set the start of the command line to `cmd.exe /c "` - // It is necessary to surround the command in an extra pair of quotes, - // hence the trailing quote here. It will be closed after all arguments - // have been added. - // Using /e:ON enables "command extensions" which is essential for the `%` hack to work. - let mut cmd: Vec = "cmd.exe /e:ON /v:OFF /d /c \"".encode_utf16().collect(); - - // Push the script name surrounded by its quote pair. - cmd.push(b'"' as u16); - // Windows file names cannot contain a `"` character or end with `\\`. - // If the script name does then return an error. - if script.contains(&(b'"' as u16)) || script.last() == Some(&(b'\\' as u16)) { - return Err(io::const_error!( - io::ErrorKind::InvalidInput, - "Windows file names may not contain `\"` or end with `\\`" - )); - } - cmd.extend_from_slice(script.strip_suffix(&[0]).unwrap_or(script)); - cmd.push(b'"' as u16); - - // Append the arguments. - // FIXME: This needs tests to ensure that the arguments are properly - // reconstructed by the batch script by default. - for arg in args { - cmd.push(' ' as u16); - match arg { - Arg::Regular(arg_os) => { - let arg_bytes = arg_os.as_encoded_bytes(); - // Disallow \r and \n as they may truncate the arguments. - const DISALLOWED: &[u8] = b"\r\n"; - if arg_bytes.iter().any(|c| DISALLOWED.contains(c)) { - return Err(INVALID_ARGUMENT_ERROR); - } - append_bat_arg(&mut cmd, arg_os, force_quotes)?; - } - _ => { - // Raw arguments are passed on as-is. - // It's the user's responsibility to properly handle arguments in this case. - append_arg(&mut cmd, arg, force_quotes)?; - } - }; - } - - // Close the quote we left opened earlier. - cmd.push(b'"' as u16); - - Ok(cmd) -} - -/// Takes a path and tries to return a non-verbatim path. -/// -/// This is necessary because cmd.exe does not support verbatim paths. -pub(crate) fn to_user_path(path: &Path) -> io::Result> { - from_wide_to_user_path(to_u16s(path)?) -} -pub(crate) fn from_wide_to_user_path(mut path: Vec) -> io::Result> { - use super::fill_utf16_buf; - use crate::ptr; - - // UTF-16 encoded code points, used in parsing and building UTF-16 paths. - // All of these are in the ASCII range so they can be cast directly to `u16`. - const SEP: u16 = b'\\' as _; - const QUERY: u16 = b'?' as _; - const COLON: u16 = b':' as _; - const U: u16 = b'U' as _; - const N: u16 = b'N' as _; - const C: u16 = b'C' as _; - - // Early return if the path is too long to remove the verbatim prefix. - const LEGACY_MAX_PATH: usize = 260; - if path.len() > LEGACY_MAX_PATH { - return Ok(path); - } - - match &path[..] { - // `\\?\C:\...` => `C:\...` - [SEP, SEP, QUERY, SEP, _, COLON, SEP, ..] => unsafe { - let lpfilename = path[4..].as_ptr(); - fill_utf16_buf( - |buffer, size| c::GetFullPathNameW(lpfilename, size, buffer, ptr::null_mut()), - |full_path: &[u16]| { - if full_path == &path[4..path.len() - 1] { - let mut path: Vec = full_path.into(); - path.push(0); - path - } else { - path - } - }, - ) - }, - // `\\?\UNC\...` => `\\...` - [SEP, SEP, QUERY, SEP, U, N, C, SEP, ..] => unsafe { - // Change the `C` in `UNC\` to `\` so we can get a slice that starts with `\\`. - path[6] = b'\\' as u16; - let lpfilename = path[6..].as_ptr(); - fill_utf16_buf( - |buffer, size| c::GetFullPathNameW(lpfilename, size, buffer, ptr::null_mut()), - |full_path: &[u16]| { - if full_path == &path[6..path.len() - 1] { - let mut path: Vec = full_path.into(); - path.push(0); - path - } else { - // Restore the 'C' in "UNC". - path[6] = b'C' as u16; - path - } - }, - ) - }, - // For everything else, leave the path unchanged. - _ => get_long_path(path, false), - } -} diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs index 004cbee52f62a..ac1c5e9932e1c 100644 --- a/library/std/src/sys/pal/windows/c.rs +++ b/library/std/src/sys/pal/windows/c.rs @@ -44,8 +44,8 @@ impl UNICODE_STRING { } } -impl Default for OBJECT_ATTRIBUTES { - fn default() -> Self { +impl OBJECT_ATTRIBUTES { + pub fn with_length() -> Self { Self { Length: size_of::() as _, RootDirectory: ptr::null_mut(), diff --git a/library/std/src/sys/pal/windows/c/bindings.txt b/library/std/src/sys/pal/windows/c/bindings.txt index e2c2163327968..d5fbb453c6f96 100644 --- a/library/std/src/sys/pal/windows/c/bindings.txt +++ b/library/std/src/sys/pal/windows/c/bindings.txt @@ -1,7 +1,8 @@ --out windows_sys.rs --flat --sys ---no-core +--no-deps +--link windows_targets --filter !INVALID_HANDLE_VALUE ABOVE_NORMAL_PRIORITY_CLASS @@ -19,7 +20,6 @@ ALL_PROCESSOR_GROUPS ARM64_NT_NEON128 BELOW_NORMAL_PRIORITY_CLASS bind -BOOL BY_HANDLE_FILE_INFORMATION CALLBACK_CHUNK_FINISHED CALLBACK_STREAM_SWITCH diff --git a/library/std/src/sys/pal/windows/c/windows_sys.rs b/library/std/src/sys/pal/windows/c/windows_sys.rs index 1d0e89f5d0f0e..eb2914b864473 100644 --- a/library/std/src/sys/pal/windows/c/windows_sys.rs +++ b/library/std/src/sys/pal/windows/c/windows_sys.rs @@ -1,4 +1,4 @@ -// Bindings generated by `windows-bindgen` 0.59.0 +// Bindings generated by `windows-bindgen` 0.61.0 #![allow(non_snake_case, non_upper_case_globals, non_camel_case_types, dead_code, clippy::all)] @@ -141,7 +141,7 @@ windows_targets::link!("ws2_32.dll" "system" fn setsockopt(s : SOCKET, level : i windows_targets::link!("ws2_32.dll" "system" fn shutdown(s : SOCKET, how : WINSOCK_SHUTDOWN_HOW) -> i32); pub const ABOVE_NORMAL_PRIORITY_CLASS: PROCESS_CREATION_FLAGS = 32768u32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct ACL { pub AclRevision: u8, pub Sbz1: u8, @@ -162,6 +162,11 @@ pub struct ADDRINFOA { pub ai_addr: *mut SOCKADDR, pub ai_next: *mut ADDRINFOA, } +impl Default for ADDRINFOA { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const AF_INET: ADDRESS_FAMILY = 2u16; pub const AF_INET6: ADDRESS_FAMILY = 23u16; pub const AF_UNIX: u16 = 1u16; @@ -176,8 +181,13 @@ pub union ARM64_NT_NEON128 { pub H: [u16; 8], pub B: [u8; 16], } +impl Default for ARM64_NT_NEON128 { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct ARM64_NT_NEON128_0 { pub Low: u64, pub High: i64, @@ -185,7 +195,7 @@ pub struct ARM64_NT_NEON128_0 { pub const BELOW_NORMAL_PRIORITY_CLASS: PROCESS_CREATION_FLAGS = 16384u32; pub type BOOL = i32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct BY_HANDLE_FILE_INFORMATION { pub dwFileAttributes: u32, pub ftCreationTime: FILETIME, @@ -206,9 +216,14 @@ pub type COMPARESTRING_RESULT = i32; pub struct CONDITION_VARIABLE { pub Ptr: *mut core::ffi::c_void, } +impl Default for CONDITION_VARIABLE { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub type CONSOLE_MODE = u32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct CONSOLE_READCONSOLE_CONTROL { pub nLength: u32, pub nInitialChars: u32, @@ -245,6 +260,12 @@ pub struct CONTEXT { pub SegSs: u32, pub ExtendedRegisters: [u8; 512], } +#[cfg(target_arch = "x86")] +impl Default for CONTEXT { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[cfg(any(target_arch = "arm64ec", target_arch = "x86_64"))] #[derive(Clone, Copy)] @@ -296,6 +317,12 @@ pub struct CONTEXT { pub LastExceptionToRip: u64, pub LastExceptionFromRip: u64, } +#[cfg(any(target_arch = "arm64ec", target_arch = "x86_64"))] +impl Default for CONTEXT { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[cfg(any(target_arch = "arm64ec", target_arch = "x86_64"))] #[derive(Clone, Copy)] @@ -303,6 +330,12 @@ pub union CONTEXT_0 { pub FltSave: XSAVE_FORMAT, pub Anonymous: CONTEXT_0_0, } +#[cfg(any(target_arch = "arm64ec", target_arch = "x86_64"))] +impl Default for CONTEXT_0 { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[cfg(any(target_arch = "arm64ec", target_arch = "x86_64"))] #[derive(Clone, Copy)] @@ -326,6 +359,12 @@ pub struct CONTEXT_0_0 { pub Xmm14: M128A, pub Xmm15: M128A, } +#[cfg(any(target_arch = "arm64ec", target_arch = "x86_64"))] +impl Default for CONTEXT_0_0 { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[cfg(target_arch = "aarch64")] #[derive(Clone, Copy)] @@ -343,6 +382,12 @@ pub struct CONTEXT { pub Wcr: [u32; 2], pub Wvr: [u64; 2], } +#[cfg(target_arch = "aarch64")] +impl Default for CONTEXT { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[cfg(target_arch = "aarch64")] #[derive(Clone, Copy)] @@ -350,9 +395,15 @@ pub union CONTEXT_0 { pub Anonymous: CONTEXT_0_0, pub X: [u64; 31], } +#[cfg(target_arch = "aarch64")] +impl Default for CONTEXT_0 { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[cfg(target_arch = "aarch64")] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct CONTEXT_0_0 { pub X0: u64, pub X1: u64, @@ -2305,6 +2356,11 @@ pub struct EXCEPTION_POINTERS { pub ExceptionRecord: *mut EXCEPTION_RECORD, pub ContextRecord: *mut CONTEXT, } +impl Default for EXCEPTION_POINTERS { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub struct EXCEPTION_RECORD { @@ -2315,6 +2371,11 @@ pub struct EXCEPTION_RECORD { pub NumberParameters: u32, pub ExceptionInformation: [usize; 15], } +impl Default for EXCEPTION_RECORD { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const EXCEPTION_STACK_OVERFLOW: NTSTATUS = 0xC00000FD_u32 as _; pub const EXTENDED_STARTUPINFO_PRESENT: PROCESS_CREATION_FLAGS = 524288u32; pub const E_NOTIMPL: HRESULT = 0x80004001_u32 as _; @@ -2333,8 +2394,13 @@ pub struct FD_SET { pub fd_count: u32, pub fd_array: [SOCKET; 64], } +impl Default for FD_SET { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct FILETIME { pub dwLowDateTime: u32, pub dwHighDateTime: u32, @@ -2343,7 +2409,7 @@ pub type FILE_ACCESS_RIGHTS = u32; pub const FILE_ADD_FILE: FILE_ACCESS_RIGHTS = 2u32; pub const FILE_ADD_SUBDIRECTORY: FILE_ACCESS_RIGHTS = 4u32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct FILE_ALLOCATION_INFO { pub AllocationSize: i64, } @@ -2369,7 +2435,7 @@ pub const FILE_ATTRIBUTE_REPARSE_POINT: FILE_FLAGS_AND_ATTRIBUTES = 1024u32; pub const FILE_ATTRIBUTE_SPARSE_FILE: FILE_FLAGS_AND_ATTRIBUTES = 512u32; pub const FILE_ATTRIBUTE_SYSTEM: FILE_FLAGS_AND_ATTRIBUTES = 4u32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct FILE_ATTRIBUTE_TAG_INFO { pub FileAttributes: u32, pub ReparseTag: u32, @@ -2378,7 +2444,7 @@ pub const FILE_ATTRIBUTE_TEMPORARY: FILE_FLAGS_AND_ATTRIBUTES = 256u32; pub const FILE_ATTRIBUTE_UNPINNED: FILE_FLAGS_AND_ATTRIBUTES = 1048576u32; pub const FILE_ATTRIBUTE_VIRTUAL: FILE_FLAGS_AND_ATTRIBUTES = 65536u32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct FILE_BASIC_INFO { pub CreationTime: i64, pub LastAccessTime: i64, @@ -2405,19 +2471,19 @@ pub const FILE_DISPOSITION_FLAG_IGNORE_READONLY_ATTRIBUTE: FILE_DISPOSITION_INFO pub const FILE_DISPOSITION_FLAG_ON_CLOSE: FILE_DISPOSITION_INFO_EX_FLAGS = 8u32; pub const FILE_DISPOSITION_FLAG_POSIX_SEMANTICS: FILE_DISPOSITION_INFO_EX_FLAGS = 2u32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct FILE_DISPOSITION_INFO { pub DeleteFile: bool, } #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct FILE_DISPOSITION_INFO_EX { pub Flags: FILE_DISPOSITION_INFO_EX_FLAGS, } pub type FILE_DISPOSITION_INFO_EX_FLAGS = u32; pub const FILE_END: SET_FILE_POINTER_MOVE_METHOD = 2u32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct FILE_END_OF_FILE_INFO { pub EndOfFile: i64, } @@ -2457,9 +2523,14 @@ pub struct FILE_ID_BOTH_DIR_INFO { pub FileId: i64, pub FileName: [u16; 1], } +impl Default for FILE_ID_BOTH_DIR_INFO { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub type FILE_INFO_BY_HANDLE_CLASS = i32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct FILE_IO_PRIORITY_HINT_INFO { pub PriorityHint: PRIORITY_HINT, } @@ -2494,12 +2565,22 @@ pub struct FILE_RENAME_INFO { pub FileNameLength: u32, pub FileName: [u16; 1], } +impl Default for FILE_RENAME_INFO { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub union FILE_RENAME_INFO_0 { pub ReplaceIfExists: bool, pub Flags: u32, } +impl Default for FILE_RENAME_INFO_0 { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const FILE_RESERVE_OPFILTER: NTCREATEFILE_CREATE_OPTIONS = 1048576u32; pub const FILE_SEQUENTIAL_ONLY: NTCREATEFILE_CREATE_OPTIONS = 4u32; pub const FILE_SESSION_AWARE: NTCREATEFILE_CREATE_OPTIONS = 262144u32; @@ -2509,7 +2590,7 @@ pub const FILE_SHARE_NONE: FILE_SHARE_MODE = 0u32; pub const FILE_SHARE_READ: FILE_SHARE_MODE = 1u32; pub const FILE_SHARE_WRITE: FILE_SHARE_MODE = 2u32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct FILE_STANDARD_INFO { pub AllocationSize: i64, pub EndOfFile: i64, @@ -2549,6 +2630,12 @@ pub struct FLOATING_SAVE_AREA { pub RegisterArea: [u8; 80], pub Spare0: u32, } +#[cfg(target_arch = "x86")] +impl Default for FLOATING_SAVE_AREA { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[cfg(any(target_arch = "aarch64", target_arch = "arm64ec", target_arch = "x86_64"))] #[derive(Clone, Copy)] @@ -2563,6 +2650,12 @@ pub struct FLOATING_SAVE_AREA { pub RegisterArea: [u8; 80], pub Cr0NpxState: u32, } +#[cfg(any(target_arch = "aarch64", target_arch = "arm64ec", target_arch = "x86_64"))] +impl Default for FLOATING_SAVE_AREA { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const FORMAT_MESSAGE_ALLOCATE_BUFFER: FORMAT_MESSAGE_OPTIONS = 256u32; pub const FORMAT_MESSAGE_ARGUMENT_ARRAY: FORMAT_MESSAGE_OPTIONS = 8192u32; pub const FORMAT_MESSAGE_FROM_HMODULE: FORMAT_MESSAGE_OPTIONS = 2048u32; @@ -2639,12 +2732,22 @@ pub const IDLE_PRIORITY_CLASS: PROCESS_CREATION_FLAGS = 64u32; pub struct IN6_ADDR { pub u: IN6_ADDR_0, } +impl Default for IN6_ADDR { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub union IN6_ADDR_0 { pub Byte: [u8; 16], pub Word: [u16; 8], } +impl Default for IN6_ADDR_0 { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const INFINITE: u32 = 4294967295u32; pub const INHERIT_CALLER_PRIORITY: PROCESS_CREATION_FLAGS = 131072u32; pub const INHERIT_PARENT_AFFINITY: PROCESS_CREATION_FLAGS = 65536u32; @@ -2653,6 +2756,11 @@ pub const INHERIT_PARENT_AFFINITY: PROCESS_CREATION_FLAGS = 65536u32; pub union INIT_ONCE { pub Ptr: *mut core::ffi::c_void, } +impl Default for INIT_ONCE { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const INIT_ONCE_INIT_FAILED: u32 = 4u32; pub const INVALID_FILE_ATTRIBUTES: u32 = 4294967295u32; pub const INVALID_SOCKET: SOCKET = -1i32 as _; @@ -2661,6 +2769,11 @@ pub const INVALID_SOCKET: SOCKET = -1i32 as _; pub struct IN_ADDR { pub S_un: IN_ADDR_0, } +impl Default for IN_ADDR { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub union IN_ADDR_0 { @@ -2668,8 +2781,13 @@ pub union IN_ADDR_0 { pub S_un_w: IN_ADDR_0_1, pub S_addr: u32, } +impl Default for IN_ADDR_0 { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct IN_ADDR_0_0 { pub s_b1: u8, pub s_b2: u8, @@ -2677,7 +2795,7 @@ pub struct IN_ADDR_0_0 { pub s_b4: u8, } #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct IN_ADDR_0_1 { pub s_w1: u16, pub s_w2: u16, @@ -2690,12 +2808,22 @@ pub struct IO_STATUS_BLOCK { pub Anonymous: IO_STATUS_BLOCK_0, pub Information: usize, } +impl Default for IO_STATUS_BLOCK { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub union IO_STATUS_BLOCK_0 { pub Status: NTSTATUS, pub Pointer: *mut core::ffi::c_void, } +impl Default for IO_STATUS_BLOCK_0 { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub type IPPROTO = i32; pub const IPPROTO_AH: IPPROTO = 51i32; pub const IPPROTO_CBT: IPPROTO = 7i32; @@ -2742,6 +2870,11 @@ pub struct IPV6_MREQ { pub ipv6mr_multiaddr: IN6_ADDR, pub ipv6mr_interface: u32, } +impl Default for IPV6_MREQ { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const IPV6_MULTICAST_LOOP: i32 = 11i32; pub const IPV6_V6ONLY: i32 = 27i32; pub const IP_ADD_MEMBERSHIP: i32 = 12i32; @@ -2752,11 +2885,16 @@ pub struct IP_MREQ { pub imr_multiaddr: IN_ADDR, pub imr_interface: IN_ADDR, } +impl Default for IP_MREQ { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const IP_MULTICAST_LOOP: i32 = 11i32; pub const IP_MULTICAST_TTL: i32 = 10i32; pub const IP_TTL: i32 = 4i32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct LINGER { pub l_onoff: u16, pub l_linger: u16, @@ -2797,7 +2935,7 @@ pub type LPWSAOVERLAPPED_COMPLETION_ROUTINE = Option< ), >; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct M128A { pub Low: u64, pub High: i64, @@ -2838,6 +2976,11 @@ pub struct OBJECT_ATTRIBUTES { pub SecurityDescriptor: *const SECURITY_DESCRIPTOR, pub SecurityQualityOfService: *const SECURITY_QUALITY_OF_SERVICE, } +impl Default for OBJECT_ATTRIBUTES { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub type OBJECT_ATTRIBUTE_FLAGS = u32; pub const OBJ_DONT_REPARSE: OBJECT_ATTRIBUTE_FLAGS = 4096u32; pub const OPEN_ALWAYS: FILE_CREATION_DISPOSITION = 4u32; @@ -2850,14 +2993,24 @@ pub struct OVERLAPPED { pub Anonymous: OVERLAPPED_0, pub hEvent: HANDLE, } +impl Default for OVERLAPPED { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub union OVERLAPPED_0 { pub Anonymous: OVERLAPPED_0_0, pub Pointer: *mut core::ffi::c_void, } +impl Default for OVERLAPPED_0 { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct OVERLAPPED_0_0 { pub Offset: u32, pub OffsetHigh: u32, @@ -2895,6 +3048,11 @@ pub struct PROCESS_INFORMATION { pub dwProcessId: u32, pub dwThreadId: u32, } +impl Default for PROCESS_INFORMATION { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const PROCESS_MODE_BACKGROUND_BEGIN: PROCESS_CREATION_FLAGS = 1048576u32; pub const PROCESS_MODE_BACKGROUND_END: PROCESS_CREATION_FLAGS = 2097152u32; pub const PROFILE_KERNEL: PROCESS_CREATION_FLAGS = 536870912u32; @@ -2926,6 +3084,11 @@ pub struct SECURITY_ATTRIBUTES { pub lpSecurityDescriptor: *mut core::ffi::c_void, pub bInheritHandle: BOOL, } +impl Default for SECURITY_ATTRIBUTES { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const SECURITY_CONTEXT_TRACKING: FILE_FLAGS_AND_ATTRIBUTES = 262144u32; pub const SECURITY_DELEGATION: FILE_FLAGS_AND_ATTRIBUTES = 196608u32; #[repr(C)] @@ -2939,13 +3102,18 @@ pub struct SECURITY_DESCRIPTOR { pub Sacl: *mut ACL, pub Dacl: *mut ACL, } +impl Default for SECURITY_DESCRIPTOR { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub type SECURITY_DESCRIPTOR_CONTROL = u16; pub const SECURITY_EFFECTIVE_ONLY: FILE_FLAGS_AND_ATTRIBUTES = 524288u32; pub const SECURITY_IDENTIFICATION: FILE_FLAGS_AND_ATTRIBUTES = 65536u32; pub const SECURITY_IMPERSONATION: FILE_FLAGS_AND_ATTRIBUTES = 131072u32; pub type SECURITY_IMPERSONATION_LEVEL = i32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct SECURITY_QUALITY_OF_SERVICE { pub Length: u32, pub ImpersonationLevel: SECURITY_IMPERSONATION_LEVEL, @@ -2962,6 +3130,11 @@ pub struct SOCKADDR { pub sa_family: ADDRESS_FAMILY, pub sa_data: [i8; 14], } +impl Default for SOCKADDR { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub struct SOCKADDR_STORAGE { @@ -2970,12 +3143,22 @@ pub struct SOCKADDR_STORAGE { pub __ss_align: i64, pub __ss_pad2: [i8; 112], } +impl Default for SOCKADDR_STORAGE { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub struct SOCKADDR_UN { pub sun_family: ADDRESS_FAMILY, pub sun_path: [i8; 108], } +impl Default for SOCKADDR_UN { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub type SOCKET = usize; pub const SOCKET_ERROR: i32 = -1i32; pub const SOCK_DGRAM: WINSOCK_SOCKET_TYPE = 2i32; @@ -2995,6 +3178,11 @@ pub const SPECIFIC_RIGHTS_ALL: FILE_ACCESS_RIGHTS = 65535u32; pub struct SRWLOCK { pub Ptr: *mut core::ffi::c_void, } +impl Default for SRWLOCK { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const STACK_SIZE_PARAM_IS_A_RESERVATION: THREAD_CREATION_FLAGS = 65536u32; pub const STANDARD_RIGHTS_ALL: FILE_ACCESS_RIGHTS = 2031616u32; pub const STANDARD_RIGHTS_EXECUTE: FILE_ACCESS_RIGHTS = 131072u32; @@ -3021,6 +3209,11 @@ pub struct STARTUPINFOEXW { pub StartupInfo: STARTUPINFOW, pub lpAttributeList: LPPROC_THREAD_ATTRIBUTE_LIST, } +impl Default for STARTUPINFOEXW { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub struct STARTUPINFOW { @@ -3043,6 +3236,11 @@ pub struct STARTUPINFOW { pub hStdOutput: HANDLE, pub hStdError: HANDLE, } +impl Default for STARTUPINFOW { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub type STARTUPINFOW_FLAGS = u32; pub const STATUS_DELETE_PENDING: NTSTATUS = 0xC0000056_u32 as _; pub const STATUS_DIRECTORY_NOT_EMPTY: NTSTATUS = 0xC0000101_u32 as _; @@ -3078,14 +3276,24 @@ pub struct SYSTEM_INFO { pub wProcessorLevel: u16, pub wProcessorRevision: u16, } +impl Default for SYSTEM_INFO { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub union SYSTEM_INFO_0 { pub dwOemId: u32, pub Anonymous: SYSTEM_INFO_0_0, } +impl Default for SYSTEM_INFO_0 { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct SYSTEM_INFO_0_0 { pub wProcessorArchitecture: PROCESSOR_ARCHITECTURE, pub wReserved: u16, @@ -3097,7 +3305,7 @@ pub type THREAD_CREATION_FLAGS = u32; pub const TIMER_ALL_ACCESS: SYNCHRONIZATION_ACCESS_RIGHTS = 2031619u32; pub const TIMER_MODIFY_STATE: SYNCHRONIZATION_ACCESS_RIGHTS = 2u32; #[repr(C)] -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Default)] pub struct TIMEVAL { pub tv_sec: i32, pub tv_usec: i32, @@ -3134,6 +3342,11 @@ pub struct UNICODE_STRING { pub MaximumLength: u16, pub Buffer: PWSTR, } +impl Default for UNICODE_STRING { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const VOLUME_NAME_DOS: GETFINALPATHNAMEBYHANDLE_FLAGS = 0u32; pub const VOLUME_NAME_GUID: GETFINALPATHNAMEBYHANDLE_FLAGS = 1u32; pub const VOLUME_NAME_NONE: GETFINALPATHNAMEBYHANDLE_FLAGS = 4u32; @@ -3160,6 +3373,11 @@ pub struct WIN32_FIND_DATAW { pub cFileName: [u16; 260], pub cAlternateFileName: [u16; 14], } +impl Default for WIN32_FIND_DATAW { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub type WINSOCK_SHUTDOWN_HOW = i32; pub type WINSOCK_SOCKET_TYPE = i32; pub const WRITE_DAC: FILE_ACCESS_RIGHTS = 262144u32; @@ -3171,6 +3389,11 @@ pub struct WSABUF { pub len: u32, pub buf: PSTR, } +impl Default for WSABUF { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[cfg(target_arch = "x86")] #[derive(Clone, Copy)] @@ -3183,6 +3406,12 @@ pub struct WSADATA { pub iMaxUdpDg: u16, pub lpVendorInfo: PSTR, } +#[cfg(target_arch = "x86")] +impl Default for WSADATA { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[cfg(any(target_arch = "aarch64", target_arch = "arm64ec", target_arch = "x86_64"))] #[derive(Clone, Copy)] @@ -3195,6 +3424,12 @@ pub struct WSADATA { pub szDescription: [i8; 257], pub szSystemStatus: [i8; 129], } +#[cfg(any(target_arch = "aarch64", target_arch = "arm64ec", target_arch = "x86_64"))] +impl Default for WSADATA { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const WSAEACCES: WSA_ERROR = 10013i32; pub const WSAEADDRINUSE: WSA_ERROR = 10048i32; pub const WSAEADDRNOTAVAIL: WSA_ERROR = 10049i32; @@ -3255,6 +3490,11 @@ pub struct WSAPROTOCOLCHAIN { pub ChainLen: i32, pub ChainEntries: [u32; 7], } +impl Default for WSAPROTOCOLCHAIN { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[derive(Clone, Copy)] pub struct WSAPROTOCOL_INFOW { @@ -3279,6 +3519,11 @@ pub struct WSAPROTOCOL_INFOW { pub dwProviderReserved: u32, pub szProtocol: [u16; 256], } +impl Default for WSAPROTOCOL_INFOW { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} pub const WSASERVICE_NOT_FOUND: WSA_ERROR = 10108i32; pub const WSASYSCALLFAILURE: WSA_ERROR = 10107i32; pub const WSASYSNOTREADY: WSA_ERROR = 10091i32; @@ -3348,6 +3593,12 @@ pub struct XSAVE_FORMAT { pub XmmRegisters: [M128A; 8], pub Reserved4: [u8; 224], } +#[cfg(target_arch = "x86")] +impl Default for XSAVE_FORMAT { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[repr(C)] #[cfg(any(target_arch = "aarch64", target_arch = "arm64ec", target_arch = "x86_64"))] #[derive(Clone, Copy)] @@ -3369,6 +3620,12 @@ pub struct XSAVE_FORMAT { pub XmmRegisters: [M128A; 16], pub Reserved4: [u8; 96], } +#[cfg(any(target_arch = "aarch64", target_arch = "arm64ec", target_arch = "x86_64"))] +impl Default for XSAVE_FORMAT { + fn default() -> Self { + unsafe { core::mem::zeroed() } + } +} #[cfg(target_arch = "arm")] #[repr(C)] diff --git a/library/std/src/sys/pal/windows/compat.rs b/library/std/src/sys/pal/windows/compat.rs index 2b9838437e9c1..14f2c8d881cf1 100644 --- a/library/std/src/sys/pal/windows/compat.rs +++ b/library/std/src/sys/pal/windows/compat.rs @@ -145,7 +145,7 @@ macro_rules! compat_fn_with_fallback { use super::*; use crate::mem; use crate::ffi::CStr; - use crate::sync::atomic::{AtomicPtr, Ordering}; + use crate::sync::atomic::{Atomic, AtomicPtr, Ordering}; use crate::sys::compat::Module; type F = unsafe extern "system" fn($($argtype),*) -> $rettype; @@ -155,7 +155,7 @@ macro_rules! compat_fn_with_fallback { /// When that is called it attempts to load the requested symbol. /// If it succeeds, `PTR` is set to the address of that symbol. /// If it fails, then `PTR` is set to `fallback`. - static PTR: AtomicPtr = AtomicPtr::new(load as *mut _); + static PTR: Atomic<*mut c_void> = AtomicPtr::new(load as *mut _); unsafe extern "system" fn load($($argname: $argtype),*) -> $rettype { unsafe { @@ -212,9 +212,9 @@ macro_rules! compat_fn_optional { use crate::ffi::c_void; use crate::mem; use crate::ptr::{self, NonNull}; - use crate::sync::atomic::{AtomicPtr, Ordering}; + use crate::sync::atomic::{Atomic, AtomicPtr, Ordering}; - pub(in crate::sys) static PTR: AtomicPtr = AtomicPtr::new(ptr::null_mut()); + pub(in crate::sys) static PTR: Atomic<*mut c_void> = AtomicPtr::new(ptr::null_mut()); type F = unsafe extern "system" fn($($argtype),*) $(-> $rettype)?; diff --git a/library/std/src/sys/pal/windows/env.rs b/library/std/src/sys/pal/windows/env.rs deleted file mode 100644 index f0a99d6200cac..0000000000000 --- a/library/std/src/sys/pal/windows/env.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod os { - pub const FAMILY: &str = "windows"; - pub const OS: &str = "windows"; - pub const DLL_PREFIX: &str = ""; - pub const DLL_SUFFIX: &str = ".dll"; - pub const DLL_EXTENSION: &str = "dll"; - pub const EXE_SUFFIX: &str = ".exe"; - pub const EXE_EXTENSION: &str = "exe"; -} diff --git a/library/std/src/sys/pal/windows/futex.rs b/library/std/src/sys/pal/windows/futex.rs index aebf638239ca9..cfa0a6b3815bd 100644 --- a/library/std/src/sys/pal/windows/futex.rs +++ b/library/std/src/sys/pal/windows/futex.rs @@ -1,8 +1,8 @@ use core::ffi::c_void; use core::ptr; use core::sync::atomic::{ - AtomicBool, AtomicI8, AtomicI16, AtomicI32, AtomicI64, AtomicIsize, AtomicPtr, AtomicU8, - AtomicU16, AtomicU32, AtomicU64, AtomicUsize, + Atomic, AtomicBool, AtomicI8, AtomicI16, AtomicI32, AtomicI64, AtomicIsize, AtomicPtr, + AtomicU8, AtomicU16, AtomicU32, AtomicU64, AtomicUsize, }; use core::time::Duration; @@ -10,12 +10,12 @@ use super::api::{self, WinError}; use crate::sys::{c, dur2timeout}; /// An atomic for use as a futex that is at least 32-bits but may be larger -pub type Futex = AtomicU32; +pub type Futex = Atomic; /// Must be the underlying type of Futex pub type Primitive = u32; /// An atomic for use as a futex that is at least 8-bits but may be larger. -pub type SmallFutex = AtomicU8; +pub type SmallFutex = Atomic; /// Must be the underlying type of SmallFutex pub type SmallPrimitive = u8; @@ -47,10 +47,10 @@ unsafe_waitable_int! { (usize, AtomicUsize), } unsafe impl Waitable for *const T { - type Futex = AtomicPtr; + type Futex = Atomic<*mut T>; } unsafe impl Waitable for *mut T { - type Futex = AtomicPtr; + type Futex = Atomic<*mut T>; } unsafe impl Futexable for AtomicPtr {} diff --git a/library/std/src/sys/pal/windows/mod.rs b/library/std/src/sys/pal/windows/mod.rs index bdf0cc2c59cf1..4f18c4009ab6c 100644 --- a/library/std/src/sys/pal/windows/mod.rs +++ b/library/std/src/sys/pal/windows/mod.rs @@ -14,9 +14,7 @@ pub mod compat; pub mod api; -pub mod args; pub mod c; -pub mod env; #[cfg(not(target_vendor = "win7"))] pub mod futex; pub mod handle; diff --git a/library/std/src/sys/pal/windows/os.rs b/library/std/src/sys/pal/windows/os.rs index 044dc2e8cd8fa..f331282d2d72a 100644 --- a/library/std/src/sys/pal/windows/os.rs +++ b/library/std/src/sys/pal/windows/os.rs @@ -5,16 +5,16 @@ #[cfg(test)] mod tests; +use super::api; #[cfg(not(target_vendor = "uwp"))] use super::api::WinError; -use super::{api, to_u16s}; use crate::error::Error as StdError; use crate::ffi::{OsStr, OsString}; use crate::os::windows::ffi::EncodeWide; use crate::os::windows::prelude::*; use crate::path::{self, PathBuf}; -use crate::sys::{c, cvt}; -use crate::{fmt, io, ptr, slice}; +use crate::sys::pal::{c, cvt}; +use crate::{fmt, io, ptr}; pub fn errno() -> i32 { api::get_last_error().code as i32 @@ -76,108 +76,6 @@ pub fn error_string(mut errnum: i32) -> String { } } -pub struct Env { - base: *mut c::WCHAR, - iter: EnvIterator, -} - -// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. -pub struct EnvStrDebug<'a> { - iter: &'a EnvIterator, -} - -impl fmt::Debug for EnvStrDebug<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { iter } = self; - let iter: EnvIterator = (*iter).clone(); - let mut list = f.debug_list(); - for (a, b) in iter { - list.entry(&(a.to_str().unwrap(), b.to_str().unwrap())); - } - list.finish() - } -} - -impl Env { - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self { base: _, iter } = self; - EnvStrDebug { iter } - } -} - -impl fmt::Debug for Env { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { base: _, iter } = self; - f.debug_list().entries(iter.clone()).finish() - } -} - -impl Iterator for Env { - type Item = (OsString, OsString); - - fn next(&mut self) -> Option<(OsString, OsString)> { - let Self { base: _, iter } = self; - iter.next() - } -} - -#[derive(Clone)] -struct EnvIterator(*mut c::WCHAR); - -impl Iterator for EnvIterator { - type Item = (OsString, OsString); - - fn next(&mut self) -> Option<(OsString, OsString)> { - let Self(cur) = self; - loop { - unsafe { - if **cur == 0 { - return None; - } - let p = *cur as *const u16; - let mut len = 0; - while *p.add(len) != 0 { - len += 1; - } - let s = slice::from_raw_parts(p, len); - *cur = cur.add(len + 1); - - // Windows allows environment variables to start with an equals - // symbol (in any other position, this is the separator between - // variable name and value). Since`s` has at least length 1 at - // this point (because the empty string terminates the array of - // environment variables), we can safely slice. - let pos = match s[1..].iter().position(|&u| u == b'=' as u16).map(|p| p + 1) { - Some(p) => p, - None => continue, - }; - return Some(( - OsStringExt::from_wide(&s[..pos]), - OsStringExt::from_wide(&s[pos + 1..]), - )); - } - } - } -} - -impl Drop for Env { - fn drop(&mut self) { - unsafe { - c::FreeEnvironmentStringsW(self.base); - } - } -} - -pub fn env() -> Env { - unsafe { - let ch = c::GetEnvironmentStringsW(); - if ch.is_null() { - panic!("failure getting env string from OS: {}", io::Error::last_os_error()); - } - Env { base: ch, iter: EnvIterator(ch) } - } -} - pub struct SplitPaths<'a> { data: EncodeWide<'a>, must_yield: bool, @@ -290,33 +188,6 @@ pub fn chdir(p: &path::Path) -> io::Result<()> { cvt(unsafe { c::SetCurrentDirectoryW(p.as_ptr()) }).map(drop) } -pub fn getenv(k: &OsStr) -> Option { - let k = to_u16s(k).ok()?; - super::fill_utf16_buf( - |buf, sz| unsafe { c::GetEnvironmentVariableW(k.as_ptr(), buf, sz) }, - OsStringExt::from_wide, - ) - .ok() -} - -pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - // SAFETY: We ensure that k and v are null-terminated wide strings. - unsafe { - let k = to_u16s(k)?; - let v = to_u16s(v)?; - - cvt(c::SetEnvironmentVariableW(k.as_ptr(), v.as_ptr())).map(drop) - } -} - -pub unsafe fn unsetenv(n: &OsStr) -> io::Result<()> { - // SAFETY: We ensure that v is a null-terminated wide strings. - unsafe { - let v = to_u16s(n)?; - cvt(c::SetEnvironmentVariableW(v.as_ptr(), ptr::null())).map(drop) - } -} - pub fn temp_dir() -> PathBuf { super::fill_utf16_buf(|buf, sz| unsafe { c::GetTempPath2W(sz, buf) }, super::os2path).unwrap() } diff --git a/library/std/src/sys/pal/windows/pipe.rs b/library/std/src/sys/pal/windows/pipe.rs index c785246492268..00d469fbaf8c7 100644 --- a/library/std/src/sys/pal/windows/pipe.rs +++ b/library/std/src/sys/pal/windows/pipe.rs @@ -3,8 +3,8 @@ use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut}; use crate::os::windows::prelude::*; use crate::path::Path; use crate::random::{DefaultRandomSource, Random}; -use crate::sync::atomic::AtomicUsize; use crate::sync::atomic::Ordering::Relaxed; +use crate::sync::atomic::{Atomic, AtomicUsize}; use crate::sys::c; use crate::sys::fs::{File, OpenOptions}; use crate::sys::handle::Handle; @@ -143,7 +143,6 @@ pub fn anon_pipe(ours_readable: bool, their_handle_inheritable: bool) -> io::Res }; opts.security_attributes(&mut sa); let theirs = File::open(Path::new(&name), &opts)?; - let theirs = AnonPipe { inner: theirs.into_inner() }; Ok(Pipes { ours: AnonPipe { inner: ours }, @@ -193,7 +192,7 @@ pub fn spawn_pipe_relay( } fn random_number() -> usize { - static N: AtomicUsize = AtomicUsize::new(0); + static N: Atomic = AtomicUsize::new(0); loop { if N.load(Relaxed) != 0 { return N.fetch_add(1, Relaxed); diff --git a/library/std/src/sys/pal/windows/time.rs b/library/std/src/sys/pal/windows/time.rs index d9010e3996109..68126bd8d2fa0 100644 --- a/library/std/src/sys/pal/windows/time.rs +++ b/library/std/src/sys/pal/windows/time.rs @@ -164,7 +164,7 @@ fn intervals2dur(intervals: u64) -> Duration { mod perf_counter { use super::NANOS_PER_SEC; - use crate::sync::atomic::{AtomicU64, Ordering}; + use crate::sync::atomic::{Atomic, AtomicU64, Ordering}; use crate::sys::{c, cvt}; use crate::sys_common::mul_div_u64; use crate::time::Duration; @@ -199,7 +199,7 @@ mod perf_counter { // uninitialized. Storing this as a single `AtomicU64` allows us to use // `Relaxed` operations, as we are only interested in the effects on a // single memory location. - static FREQUENCY: AtomicU64 = AtomicU64::new(0); + static FREQUENCY: Atomic = AtomicU64::new(0); let cached = FREQUENCY.load(Ordering::Relaxed); // If a previous thread has filled in this global state, use that. diff --git a/library/std/src/sys/pal/xous/args.rs b/library/std/src/sys/pal/xous/args.rs deleted file mode 100644 index 00c44ca220a9e..0000000000000 --- a/library/std/src/sys/pal/xous/args.rs +++ /dev/null @@ -1,53 +0,0 @@ -use crate::ffi::OsString; -use crate::sys::pal::xous::os::get_application_parameters; -use crate::sys::pal::xous::os::params::ArgumentList; -use crate::{fmt, vec}; - -pub struct Args { - parsed_args_list: vec::IntoIter, -} - -pub fn args() -> Args { - let Some(params) = get_application_parameters() else { - return Args { parsed_args_list: vec![].into_iter() }; - }; - - for param in params { - if let Ok(args) = ArgumentList::try_from(¶m) { - let mut parsed_args = vec![]; - for arg in args { - parsed_args.push(arg.into()); - } - return Args { parsed_args_list: parsed_args.into_iter() }; - } - } - Args { parsed_args_list: vec![].into_iter() } -} - -impl fmt::Debug for Args { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.parsed_args_list.as_slice().fmt(f) - } -} - -impl Iterator for Args { - type Item = OsString; - fn next(&mut self) -> Option { - self.parsed_args_list.next() - } - fn size_hint(&self) -> (usize, Option) { - self.parsed_args_list.size_hint() - } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - self.parsed_args_list.next_back() - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { - self.parsed_args_list.len() - } -} diff --git a/library/std/src/sys/pal/xous/mod.rs b/library/std/src/sys/pal/xous/mod.rs index 58926e2beb1d0..383d031ed4353 100644 --- a/library/std/src/sys/pal/xous/mod.rs +++ b/library/std/src/sys/pal/xous/mod.rs @@ -1,8 +1,5 @@ #![forbid(unsafe_op_in_unsafe_fn)] -pub mod args; -#[path = "../unsupported/env.rs"] -pub mod env; pub mod os; #[path = "../unsupported/pipe.rs"] pub mod pipe; diff --git a/library/std/src/sys/pal/xous/os.rs b/library/std/src/sys/pal/xous/os.rs index 2c87e7d91f27d..2230dabe096fd 100644 --- a/library/std/src/sys/pal/xous/os.rs +++ b/library/std/src/sys/pal/xous/os.rs @@ -1,17 +1,15 @@ use super::unsupported; -use crate::collections::HashMap; use crate::error::Error as StdError; use crate::ffi::{OsStr, OsString}; use crate::marker::PhantomData; use crate::os::xous::ffi::Error as XousError; use crate::path::{self, PathBuf}; -use crate::sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; -use crate::sync::{Mutex, Once}; -use crate::{fmt, io, vec}; +use crate::sync::atomic::{Atomic, AtomicPtr, Ordering}; +use crate::{fmt, io}; pub(crate) mod params; -static PARAMS_ADDRESS: AtomicPtr = AtomicPtr::new(core::ptr::null_mut()); +static PARAMS_ADDRESS: Atomic<*mut u8> = AtomicPtr::new(core::ptr::null_mut()); #[cfg(not(test))] #[cfg(feature = "panic_unwind")] @@ -136,100 +134,6 @@ pub(crate) fn get_application_parameters() -> Option>; - -fn get_env_store() -> &'static EnvStore { - ENV_INIT.call_once(|| { - let env_store = EnvStore::default(); - if let Some(params) = get_application_parameters() { - for param in params { - if let Ok(envs) = params::EnvironmentBlock::try_from(¶m) { - let mut env_store = env_store.lock().unwrap(); - for env in envs { - env_store.insert(env.key.into(), env.value.into()); - } - break; - } - } - } - ENV.store(Box::into_raw(Box::new(env_store)) as _, Ordering::Relaxed) - }); - unsafe { &*core::ptr::with_exposed_provenance::(ENV.load(Ordering::Relaxed)) } -} - -pub struct Env { - iter: vec::IntoIter<(OsString, OsString)>, -} - -// FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. -pub struct EnvStrDebug<'a> { - slice: &'a [(OsString, OsString)], -} - -impl fmt::Debug for EnvStrDebug<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { slice } = self; - f.debug_list() - .entries(slice.iter().map(|(a, b)| (a.to_str().unwrap(), b.to_str().unwrap()))) - .finish() - } -} - -impl Env { - // FIXME(https://github.com/rust-lang/rust/issues/114583): Remove this when ::fmt matches ::fmt. - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self { iter } = self; - EnvStrDebug { slice: iter.as_slice() } - } -} - -impl fmt::Debug for Env { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { iter } = self; - f.debug_list().entries(iter.as_slice()).finish() - } -} - -impl !Send for Env {} -impl !Sync for Env {} - -impl Iterator for Env { - type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -pub fn env() -> Env { - let clone_to_vec = |map: &HashMap| -> Vec<_> { - map.iter().map(|(k, v)| (k.clone(), v.clone())).collect() - }; - - let iter = clone_to_vec(&*get_env_store().lock().unwrap()).into_iter(); - Env { iter } -} - -pub fn getenv(k: &OsStr) -> Option { - get_env_store().lock().unwrap().get(k).cloned() -} - -pub unsafe fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - let (k, v) = (k.to_owned(), v.to_owned()); - get_env_store().lock().unwrap().insert(k, v); - Ok(()) -} - -pub unsafe fn unsetenv(k: &OsStr) -> io::Result<()> { - get_env_store().lock().unwrap().remove(k); - Ok(()) -} - pub fn temp_dir() -> PathBuf { panic!("no filesystem on this platform") } diff --git a/library/std/src/sys/pal/zkvm/args.rs b/library/std/src/sys/pal/zkvm/args.rs deleted file mode 100644 index 47857f6c448bc..0000000000000 --- a/library/std/src/sys/pal/zkvm/args.rs +++ /dev/null @@ -1,81 +0,0 @@ -use super::{WORD_SIZE, abi}; -use crate::ffi::OsString; -use crate::fmt; -use crate::sys::os_str; -use crate::sys_common::FromInner; - -pub struct Args { - i_forward: usize, - i_back: usize, - count: usize, -} - -pub fn args() -> Args { - let count = unsafe { abi::sys_argc() }; - Args { i_forward: 0, i_back: 0, count } -} - -impl Args { - /// Use sys_argv to get the arg at the requested index. Does not check that i is less than argc - /// and will not return if the index is out of bounds. - fn argv(i: usize) -> OsString { - let arg_len = unsafe { abi::sys_argv(crate::ptr::null_mut(), 0, i) }; - - let arg_len_words = (arg_len + WORD_SIZE - 1) / WORD_SIZE; - let words = unsafe { abi::sys_alloc_words(arg_len_words) }; - - let arg_len2 = unsafe { abi::sys_argv(words, arg_len_words, i) }; - debug_assert_eq!(arg_len, arg_len2); - - // Convert to OsString. - // - // FIXME: We can probably get rid of the extra copy here if we - // reimplement "os_str" instead of just using the generic unix - // "os_str". - let arg_bytes: &[u8] = - unsafe { crate::slice::from_raw_parts(words.cast() as *const u8, arg_len) }; - OsString::from_inner(os_str::Buf { inner: arg_bytes.to_vec() }) - } -} - -impl fmt::Debug for Args { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_list().finish() - } -} - -impl Iterator for Args { - type Item = OsString; - - fn next(&mut self) -> Option { - if self.i_forward >= self.count - self.i_back { - None - } else { - let arg = Self::argv(self.i_forward); - self.i_forward += 1; - Some(arg) - } - } - - fn size_hint(&self) -> (usize, Option) { - (self.count, Some(self.count)) - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { - self.count - } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - if self.i_back >= self.count - self.i_forward { - None - } else { - let arg = Self::argv(self.count - 1 - self.i_back); - self.i_back += 1; - Some(arg) - } - } -} diff --git a/library/std/src/sys/pal/zkvm/env.rs b/library/std/src/sys/pal/zkvm/env.rs deleted file mode 100644 index b85153642b1c9..0000000000000 --- a/library/std/src/sys/pal/zkvm/env.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod os { - pub const FAMILY: &str = ""; - pub const OS: &str = ""; - pub const DLL_PREFIX: &str = ""; - pub const DLL_SUFFIX: &str = ".elf"; - pub const DLL_EXTENSION: &str = "elf"; - pub const EXE_SUFFIX: &str = ".elf"; - pub const EXE_EXTENSION: &str = "elf"; -} diff --git a/library/std/src/sys/pal/zkvm/mod.rs b/library/std/src/sys/pal/zkvm/mod.rs index 4659dad16e85a..e1efa2406858f 100644 --- a/library/std/src/sys/pal/zkvm/mod.rs +++ b/library/std/src/sys/pal/zkvm/mod.rs @@ -8,12 +8,9 @@ //! will likely change over time. #![forbid(unsafe_op_in_unsafe_fn)] -const WORD_SIZE: usize = size_of::(); +pub const WORD_SIZE: usize = size_of::(); pub mod abi; -#[path = "../zkvm/args.rs"] -pub mod args; -pub mod env; pub mod os; #[path = "../unsupported/pipe.rs"] pub mod pipe; diff --git a/library/std/src/sys/pal/zkvm/os.rs b/library/std/src/sys/pal/zkvm/os.rs index 868b19e33b672..a8ef97ecf67ac 100644 --- a/library/std/src/sys/pal/zkvm/os.rs +++ b/library/std/src/sys/pal/zkvm/os.rs @@ -1,10 +1,8 @@ -use super::{WORD_SIZE, abi, unsupported}; +use super::unsupported; use crate::error::Error as StdError; use crate::ffi::{OsStr, OsString}; use crate::marker::PhantomData; use crate::path::{self, PathBuf}; -use crate::sys::os_str; -use crate::sys_common::FromInner; use crate::{fmt, io}; pub fn errno() -> i32 { @@ -64,64 +62,6 @@ pub fn current_exe() -> io::Result { unsupported() } -pub struct Env(!); - -impl Iterator for Env { - type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { - self.0 - } -} - -pub fn env() -> Env { - panic!("not supported on this platform") -} - -impl Env { - pub fn str_debug(&self) -> impl fmt::Debug + '_ { - let Self(inner) = self; - match *inner {} - } -} - -impl fmt::Debug for Env { - fn fmt(&self, _: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self(inner) = self; - match *inner {} - } -} - -pub fn getenv(varname: &OsStr) -> Option { - let varname = varname.as_encoded_bytes(); - let nbytes = - unsafe { abi::sys_getenv(crate::ptr::null_mut(), 0, varname.as_ptr(), varname.len()) }; - if nbytes == usize::MAX { - return None; - } - - let nwords = (nbytes + WORD_SIZE - 1) / WORD_SIZE; - let words = unsafe { abi::sys_alloc_words(nwords) }; - - let nbytes2 = unsafe { abi::sys_getenv(words, nwords, varname.as_ptr(), varname.len()) }; - debug_assert_eq!(nbytes, nbytes2); - - // Convert to OsString. - // - // FIXME: We can probably get rid of the extra copy here if we - // reimplement "os_str" instead of just using the generic unix - // "os_str". - let u8s: &[u8] = unsafe { crate::slice::from_raw_parts(words.cast() as *const u8, nbytes) }; - Some(OsString::from_inner(os_str::Buf { inner: u8s.to_vec() })) -} - -pub unsafe fn setenv(_: &OsStr, _: &OsStr) -> io::Result<()> { - Err(io::const_error!(io::ErrorKind::Unsupported, "cannot set env vars on this platform")) -} - -pub unsafe fn unsetenv(_: &OsStr) -> io::Result<()> { - Err(io::const_error!(io::ErrorKind::Unsupported, "cannot unset env vars on this platform")) -} - pub fn temp_dir() -> PathBuf { panic!("no filesystem on this platform") } diff --git a/library/std/src/sys/path/windows.rs b/library/std/src/sys/path/windows.rs index 1c53472191699..e0e003f6a8192 100644 --- a/library/std/src/sys/path/windows.rs +++ b/library/std/src/sys/path/windows.rs @@ -10,6 +10,40 @@ mod tests; pub const MAIN_SEP_STR: &str = "\\"; pub const MAIN_SEP: char = '\\'; +/// A null terminated wide string. +#[repr(transparent)] +pub struct WCStr([u16]); + +impl WCStr { + /// Convert a slice to a WCStr without checks. + /// + /// Though it is memory safe, the slice should also not contain interior nulls + /// as this may lead to unwanted truncation. + /// + /// # Safety + /// + /// The slice must end in a null. + pub unsafe fn from_wchars_with_null_unchecked(s: &[u16]) -> &Self { + unsafe { &*(s as *const [u16] as *const Self) } + } + + pub fn as_ptr(&self) -> *const u16 { + self.0.as_ptr() + } + + pub fn count_bytes(&self) -> usize { + self.0.len() + } +} + +#[inline] +pub fn with_native_path(path: &Path, f: &dyn Fn(&WCStr) -> io::Result) -> io::Result { + let path = maybe_verbatim(path)?; + // SAFETY: maybe_verbatim returns null-terminated strings + let path = unsafe { WCStr::from_wchars_with_null_unchecked(&path) }; + f(path) +} + #[inline] pub fn is_sep_byte(b: u8) -> bool { b == b'/' || b == b'\\' @@ -350,3 +384,46 @@ pub(crate) fn absolute(path: &Path) -> io::Result { pub(crate) fn is_absolute(path: &Path) -> bool { path.has_root() && path.prefix().is_some() } + +/// Test that the path is absolute, fully qualified and unchanged when processed by the Windows API. +/// +/// For example: +/// +/// - `C:\path\to\file` will return true. +/// - `C:\path\to\nul` returns false because the Windows API will convert it to \\.\NUL +/// - `C:\path\to\..\file` returns false because it will be resolved to `C:\path\file`. +/// +/// This is a useful property because it means the path can be converted from and to and verbatim +/// path just by changing the prefix. +pub(crate) fn is_absolute_exact(path: &[u16]) -> bool { + // This is implemented by checking that passing the path through + // GetFullPathNameW does not change the path in any way. + + // Windows paths are limited to i16::MAX length + // though the API here accepts a u32 for the length. + if path.is_empty() || path.len() > u32::MAX as usize || path.last() != Some(&0) { + return false; + } + // The path returned by `GetFullPathNameW` must be the same length as the + // given path, otherwise they're not equal. + let buffer_len = path.len(); + let mut new_path = Vec::with_capacity(buffer_len); + let result = unsafe { + c::GetFullPathNameW( + path.as_ptr(), + new_path.capacity() as u32, + new_path.as_mut_ptr(), + crate::ptr::null_mut(), + ) + }; + // Note: if non-zero, the returned result is the length of the buffer without the null termination + if result == 0 || result as usize != buffer_len - 1 { + false + } else { + // SAFETY: `GetFullPathNameW` initialized `result` bytes and does not exceed `nBufferLength - 1` (capacity). + unsafe { + new_path.set_len((result as usize) + 1); + } + path == &new_path + } +} diff --git a/library/std/src/sys/path/windows/tests.rs b/library/std/src/sys/path/windows/tests.rs index f2a60e30bc610..9eb79203dcac7 100644 --- a/library/std/src/sys/path/windows/tests.rs +++ b/library/std/src/sys/path/windows/tests.rs @@ -135,3 +135,15 @@ fn broken_unc_path() { assert_eq!(components.next(), Some(Component::Normal("foo".as_ref()))); assert_eq!(components.next(), Some(Component::Normal("bar".as_ref()))); } + +#[test] +fn test_is_absolute_exact() { + use crate::sys::pal::api::wide_str; + // These paths can be made verbatim by only changing their prefix. + assert!(is_absolute_exact(wide_str!(r"C:\path\to\file"))); + assert!(is_absolute_exact(wide_str!(r"\\server\share\path\to\file"))); + // These paths change more substantially + assert!(!is_absolute_exact(wide_str!(r"C:\path\to\..\file"))); + assert!(!is_absolute_exact(wide_str!(r"\\server\share\path\to\..\file"))); + assert!(!is_absolute_exact(wide_str!(r"C:\path\to\NUL"))); // Converts to \\.\NUL +} diff --git a/library/std/src/sys/process/env.rs b/library/std/src/sys/process/env.rs new file mode 100644 index 0000000000000..e08b476540ef9 --- /dev/null +++ b/library/std/src/sys/process/env.rs @@ -0,0 +1,115 @@ +use crate::collections::BTreeMap; +use crate::ffi::{OsStr, OsString}; +use crate::sys::process::EnvKey; +use crate::{env, fmt}; + +/// Stores a set of changes to an environment +#[derive(Clone, Default)] +pub struct CommandEnv { + clear: bool, + saw_path: bool, + vars: BTreeMap>, +} + +impl fmt::Debug for CommandEnv { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut debug_command_env = f.debug_struct("CommandEnv"); + debug_command_env.field("clear", &self.clear).field("vars", &self.vars); + debug_command_env.finish() + } +} + +impl CommandEnv { + // Capture the current environment with these changes applied + pub fn capture(&self) -> BTreeMap { + let mut result = BTreeMap::::new(); + if !self.clear { + for (k, v) in env::vars_os() { + result.insert(k.into(), v); + } + } + for (k, maybe_v) in &self.vars { + if let &Some(ref v) = maybe_v { + result.insert(k.clone(), v.clone()); + } else { + result.remove(k); + } + } + result + } + + pub fn is_unchanged(&self) -> bool { + !self.clear && self.vars.is_empty() + } + + pub fn capture_if_changed(&self) -> Option> { + if self.is_unchanged() { None } else { Some(self.capture()) } + } + + // The following functions build up changes + pub fn set(&mut self, key: &OsStr, value: &OsStr) { + let key = EnvKey::from(key); + self.maybe_saw_path(&key); + self.vars.insert(key, Some(value.to_owned())); + } + + pub fn remove(&mut self, key: &OsStr) { + let key = EnvKey::from(key); + self.maybe_saw_path(&key); + if self.clear { + self.vars.remove(&key); + } else { + self.vars.insert(key, None); + } + } + + pub fn clear(&mut self) { + self.clear = true; + self.vars.clear(); + } + + pub fn does_clear(&self) -> bool { + self.clear + } + + pub fn have_changed_path(&self) -> bool { + self.saw_path || self.clear + } + + fn maybe_saw_path(&mut self, key: &EnvKey) { + if !self.saw_path && key == "PATH" { + self.saw_path = true; + } + } + + pub fn iter(&self) -> CommandEnvs<'_> { + let iter = self.vars.iter(); + CommandEnvs { iter } + } +} + +#[derive(Debug)] +pub struct CommandEnvs<'a> { + iter: crate::collections::btree_map::Iter<'a, EnvKey, Option>, +} + +impl<'a> Iterator for CommandEnvs<'a> { + type Item = (&'a OsStr, Option<&'a OsStr>); + + fn next(&mut self) -> Option { + self.iter.next().map(|(key, value)| (key.as_ref(), value.as_deref())) + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl<'a> ExactSizeIterator for CommandEnvs<'a> { + fn len(&self) -> usize { + self.iter.len() + } + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} diff --git a/library/std/src/sys/process/mod.rs b/library/std/src/sys/process/mod.rs index 92cfac7f47cf6..91c7005a32855 100644 --- a/library/std/src/sys/process/mod.rs +++ b/library/std/src/sys/process/mod.rs @@ -14,6 +14,65 @@ cfg_if::cfg_if! { } } +// This module is shared by all platforms, but nearly all platforms except for +// the "normal" UNIX ones leave some of this code unused. +#[cfg_attr(not(target_os = "linux"), allow(dead_code))] +mod env; + +pub use env::CommandEnvs; pub use imp::{ Command, CommandArgs, EnvKey, ExitCode, ExitStatus, ExitStatusError, Process, Stdio, StdioPipes, }; + +#[cfg(any( + all( + target_family = "unix", + not(any( + target_os = "espidf", + target_os = "horizon", + target_os = "vita", + target_os = "nuttx" + )) + ), + target_os = "windows", +))] +pub fn output(cmd: &mut Command) -> crate::io::Result<(ExitStatus, Vec, Vec)> { + use crate::sys::pipe::read2; + + let (mut process, mut pipes) = cmd.spawn(Stdio::MakePipe, false)?; + + drop(pipes.stdin.take()); + let (mut stdout, mut stderr) = (Vec::new(), Vec::new()); + match (pipes.stdout.take(), pipes.stderr.take()) { + (None, None) => {} + (Some(out), None) => { + let res = out.read_to_end(&mut stdout); + res.unwrap(); + } + (None, Some(err)) => { + let res = err.read_to_end(&mut stderr); + res.unwrap(); + } + (Some(out), Some(err)) => { + let res = read2(out, &mut stdout, err, &mut stderr); + res.unwrap(); + } + } + + let status = process.wait()?; + Ok((status, stdout, stderr)) +} + +#[cfg(not(any( + all( + target_family = "unix", + not(any( + target_os = "espidf", + target_os = "horizon", + target_os = "vita", + target_os = "nuttx" + )) + ), + target_os = "windows", +)))] +pub use imp::output; diff --git a/library/std/src/sys/process/uefi.rs b/library/std/src/sys/process/uefi.rs index b46418ae9bb67..4864c58698817 100644 --- a/library/std/src/sys/process/uefi.rs +++ b/library/std/src/sys/process/uefi.rs @@ -1,5 +1,6 @@ -use r_efi::protocols::simple_text_output; +use r_efi::protocols::{simple_text_input, simple_text_output}; +use super::env::{CommandEnv, CommandEnvs}; use crate::collections::BTreeMap; pub use crate::ffi::OsString as EnvKey; use crate::ffi::{OsStr, OsString}; @@ -10,7 +11,6 @@ use crate::sys::pal::helpers; use crate::sys::pal::os::error_string; use crate::sys::pipe::AnonPipe; use crate::sys::unsupported; -use crate::sys_common::process::{CommandEnv, CommandEnvs}; use crate::{fmt, io}; //////////////////////////////////////////////////////////////////////////////// @@ -23,6 +23,7 @@ pub struct Command { args: Vec, stdout: Option, stderr: Option, + stdin: Option, env: CommandEnv, } @@ -48,6 +49,7 @@ impl Command { args: Vec::new(), stdout: None, stderr: None, + stdin: None, env: Default::default(), } } @@ -64,8 +66,8 @@ impl Command { panic!("unsupported") } - pub fn stdin(&mut self, _stdin: Stdio) { - panic!("unsupported") + pub fn stdin(&mut self, stdin: Stdio) { + self.stdin = Some(stdin); } pub fn stdout(&mut self, stdout: Stdio) { @@ -122,62 +124,87 @@ impl Command { } } - pub fn output(&mut self) -> io::Result<(ExitStatus, Vec, Vec)> { - let mut cmd = uefi_command_internal::Image::load_image(&self.prog)?; - - // UEFI adds the bin name by default - if !self.args.is_empty() { - let args = uefi_command_internal::create_args(&self.prog, &self.args); - cmd.set_args(args); + fn create_stdin( + s: Stdio, + ) -> io::Result>> { + match s { + Stdio::Null => unsafe { + helpers::OwnedProtocol::create( + uefi_command_internal::InputProtocol::null(), + simple_text_input::PROTOCOL_GUID, + ) + } + .map(Some), + Stdio::Inherit => Ok(None), + Stdio::MakePipe => unsupported(), } + } +} - // Setup Stdout - let stdout = self.stdout.unwrap_or(Stdio::MakePipe); - let stdout = Self::create_pipe(stdout)?; - if let Some(con) = stdout { - cmd.stdout_init(con) - } else { - cmd.stdout_inherit() - }; - - // Setup Stderr - let stderr = self.stderr.unwrap_or(Stdio::MakePipe); - let stderr = Self::create_pipe(stderr)?; - if let Some(con) = stderr { - cmd.stderr_init(con) - } else { - cmd.stderr_inherit() - }; - - let env = env_changes(&self.env); - - // Set any new vars - if let Some(e) = &env { - for (k, (_, v)) in e { - match v { - Some(v) => unsafe { crate::env::set_var(k, v) }, - None => unsafe { crate::env::remove_var(k) }, - } +pub fn output(command: &mut Command) -> io::Result<(ExitStatus, Vec, Vec)> { + let mut cmd = uefi_command_internal::Image::load_image(&command.prog)?; + + // UEFI adds the bin name by default + if !command.args.is_empty() { + let args = uefi_command_internal::create_args(&command.prog, &command.args); + cmd.set_args(args); + } + + // Setup Stdout + let stdout = command.stdout.unwrap_or(Stdio::MakePipe); + let stdout = Command::create_pipe(stdout)?; + if let Some(con) = stdout { + cmd.stdout_init(con) + } else { + cmd.stdout_inherit() + }; + + // Setup Stderr + let stderr = command.stderr.unwrap_or(Stdio::MakePipe); + let stderr = Command::create_pipe(stderr)?; + if let Some(con) = stderr { + cmd.stderr_init(con) + } else { + cmd.stderr_inherit() + }; + + // Setup Stdin + let stdin = command.stdin.unwrap_or(Stdio::Null); + let stdin = Command::create_stdin(stdin)?; + if let Some(con) = stdin { + cmd.stdin_init(con) + } else { + cmd.stdin_inherit() + }; + + let env = env_changes(&command.env); + + // Set any new vars + if let Some(e) = &env { + for (k, (_, v)) in e { + match v { + Some(v) => unsafe { crate::env::set_var(k, v) }, + None => unsafe { crate::env::remove_var(k) }, } } + } - let stat = cmd.start_image()?; + let stat = cmd.start_image()?; - // Rollback any env changes - if let Some(e) = env { - for (k, (v, _)) in e { - match v { - Some(v) => unsafe { crate::env::set_var(k, v) }, - None => unsafe { crate::env::remove_var(k) }, - } + // Rollback any env changes + if let Some(e) = env { + for (k, (v, _)) in e { + match v { + Some(v) => unsafe { crate::env::set_var(k, v) }, + None => unsafe { crate::env::remove_var(k) }, } } + } - let stdout = cmd.stdout()?; - let stderr = cmd.stderr()?; + let stdout = cmd.stdout()?; + let stderr = cmd.stderr()?; - Ok((ExitStatus(stat), stdout, stderr)) - } + Ok((ExitStatus(stat), stdout, stderr)) } impl From for Stdio { @@ -334,7 +361,7 @@ impl<'a> fmt::Debug for CommandArgs<'a> { #[allow(dead_code)] mod uefi_command_internal { - use r_efi::protocols::{loaded_image, simple_text_output}; + use r_efi::protocols::{loaded_image, simple_text_input, simple_text_output}; use crate::ffi::{OsStr, OsString}; use crate::io::{self, const_error}; @@ -350,6 +377,7 @@ mod uefi_command_internal { handle: NonNull, stdout: Option>, stderr: Option>, + stdin: Option>, st: OwnedTable, args: Option<(*mut u16, usize)>, } @@ -384,7 +412,14 @@ mod uefi_command_internal { helpers::open_protocol(child_handle, loaded_image::PROTOCOL_GUID).unwrap(); let st = OwnedTable::from_table(unsafe { (*loaded_image.as_ptr()).system_table }); - Ok(Self { handle: child_handle, stdout: None, stderr: None, st, args: None }) + Ok(Self { + handle: child_handle, + stdout: None, + stderr: None, + stdin: None, + st, + args: None, + }) } } @@ -445,6 +480,17 @@ mod uefi_command_internal { } } + fn set_stdin( + &mut self, + handle: r_efi::efi::Handle, + protocol: *mut simple_text_input::Protocol, + ) { + unsafe { + (*self.st.as_mut_ptr()).console_in_handle = handle; + (*self.st.as_mut_ptr()).con_in = protocol; + } + } + pub fn stdout_init(&mut self, protocol: helpers::OwnedProtocol) { self.set_stdout( protocol.handle().as_ptr(), @@ -471,6 +517,19 @@ mod uefi_command_internal { unsafe { self.set_stderr((*st.as_ptr()).standard_error_handle, (*st.as_ptr()).std_err) } } + pub(crate) fn stdin_init(&mut self, protocol: helpers::OwnedProtocol) { + self.set_stdin( + protocol.handle().as_ptr(), + protocol.as_ref() as *const InputProtocol as *mut simple_text_input::Protocol, + ); + self.stdin = Some(protocol); + } + + pub(crate) fn stdin_inherit(&mut self) { + let st: NonNull = system_table().cast(); + unsafe { self.set_stdin((*st.as_ptr()).console_in_handle, (*st.as_ptr()).con_in) } + } + pub fn stderr(&self) -> io::Result> { match &self.stderr { Some(stderr) => stderr.as_ref().utf8(), @@ -722,6 +781,56 @@ mod uefi_command_internal { } } + #[repr(C)] + pub(crate) struct InputProtocol { + reset: simple_text_input::ProtocolReset, + read_key_stroke: simple_text_input::ProtocolReadKeyStroke, + wait_for_key: r_efi::efi::Event, + } + + impl InputProtocol { + pub(crate) fn null() -> Self { + let evt = helpers::OwnedEvent::new( + r_efi::efi::EVT_NOTIFY_WAIT, + r_efi::efi::TPL_CALLBACK, + Some(Self::empty_notify), + None, + ) + .unwrap(); + + Self { + reset: Self::null_reset, + read_key_stroke: Self::null_read_key, + wait_for_key: evt.into_raw(), + } + } + + extern "efiapi" fn null_reset( + _: *mut simple_text_input::Protocol, + _: r_efi::efi::Boolean, + ) -> r_efi::efi::Status { + r_efi::efi::Status::SUCCESS + } + + extern "efiapi" fn null_read_key( + _: *mut simple_text_input::Protocol, + _: *mut simple_text_input::InputKey, + ) -> r_efi::efi::Status { + r_efi::efi::Status::UNSUPPORTED + } + + extern "efiapi" fn empty_notify(_: r_efi::efi::Event, _: *mut crate::ffi::c_void) {} + } + + impl Drop for InputProtocol { + fn drop(&mut self) { + // Close wait_for_key + unsafe { + let _ = helpers::OwnedEvent::from_raw(self.wait_for_key); + } + } + } + pub fn create_args(prog: &OsStr, args: &[OsString]) -> Box<[u16]> { const QUOTE: u16 = 0x0022; const SPACE: u16 = 0x0020; diff --git a/library/std/src/sys/process/unix/common.rs b/library/std/src/sys/process/unix/common.rs index 8bc17f314911d..a9c2510e6d454 100644 --- a/library/std/src/sys/process/unix/common.rs +++ b/library/std/src/sys/process/unix/common.rs @@ -12,7 +12,7 @@ use crate::sys::fs::File; #[cfg(not(target_os = "fuchsia"))] use crate::sys::fs::OpenOptions; use crate::sys::pipe::{self, AnonPipe}; -use crate::sys_common::process::{CommandEnv, CommandEnvs}; +use crate::sys::process::env::{CommandEnv, CommandEnvs}; use crate::sys_common::{FromInner, IntoInner}; use crate::{fmt, io, ptr}; diff --git a/library/std/src/sys/process/unix/fuchsia.rs b/library/std/src/sys/process/unix/fuchsia.rs index 0de32ecffd4b0..017ab91797ce6 100644 --- a/library/std/src/sys/process/unix/fuchsia.rs +++ b/library/std/src/sys/process/unix/fuchsia.rs @@ -31,11 +31,6 @@ impl Command { Ok((Process { handle: Handle::new(process_handle) }, ours)) } - pub fn output(&mut self) -> io::Result<(ExitStatus, Vec, Vec)> { - let (proc, pipes) = self.spawn(Stdio::MakePipe, false)?; - crate::sys_common::process::wait_with_output(proc, pipes) - } - pub fn exec(&mut self, default: Stdio) -> io::Error { if self.saw_nul() { return io::const_error!( @@ -81,7 +76,7 @@ impl Command { let mut handle = ZX_HANDLE_INVALID; let status = fdio_fd_clone(target_fd, &mut handle); - if status == ERR_INVALID_ARGS || status == ERR_NOT_SUPPORTED { + if status == ZX_ERR_INVALID_ARGS || status == ZX_ERR_NOT_SUPPORTED { // This descriptor is closed; skip it rather than generating an // error. return Ok(Default::default()); @@ -197,7 +192,7 @@ impl Process { zx_object_wait_one(self.handle.raw(), ZX_TASK_TERMINATED, 0, ptr::null_mut()); match status { 0 => {} // Success - x if x == ERR_TIMED_OUT => { + x if x == ZX_ERR_TIMED_OUT => { return Ok(None); } _ => { diff --git a/library/std/src/sys/process/unix/mod.rs b/library/std/src/sys/process/unix/mod.rs index 2e8b38f7de1b6..ee8fd8b2ca3c6 100644 --- a/library/std/src/sys/process/unix/mod.rs +++ b/library/std/src/sys/process/unix/mod.rs @@ -11,6 +11,7 @@ cfg_if::cfg_if! { } else if #[cfg(any(target_os = "espidf", target_os = "horizon", target_os = "vita", target_os = "nuttx"))] { mod unsupported; use unsupported as imp; + pub use unsupported::output; } else { mod unix; use unix as imp; diff --git a/library/std/src/sys/process/unix/unix.rs b/library/std/src/sys/process/unix/unix.rs index 191a09c8da913..1b3bd2de265da 100644 --- a/library/std/src/sys/process/unix/unix.rs +++ b/library/std/src/sys/process/unix/unix.rs @@ -88,7 +88,7 @@ impl Command { // in its own process. Thus the parent drops the lock guard immediately. // The child calls `mem::forget` to leak the lock, which is crucial because // releasing a lock is not async-signal-safe. - let env_lock = sys::os::env_read_lock(); + let env_lock = sys::env::env_read_lock(); let pid = unsafe { self.do_fork()? }; if pid == 0 { @@ -162,11 +162,6 @@ impl Command { } } - pub fn output(&mut self) -> io::Result<(ExitStatus, Vec, Vec)> { - let (proc, pipes) = self.spawn(Stdio::MakePipe, false)?; - crate::sys_common::process::wait_with_output(proc, pipes) - } - // WatchOS and TVOS headers mark the `fork`/`exec*` functions with // `__WATCHOS_PROHIBITED __TVOS_PROHIBITED`, and indicate that the // `posix_spawn*` functions should be used instead. It isn't entirely clear @@ -237,7 +232,7 @@ impl Command { // Similar to when forking, we want to ensure that access to // the environment is synchronized, so make sure to grab the // environment lock before we try to exec. - let _lock = sys::os::env_read_lock(); + let _lock = sys::env::env_read_lock(); let Err(e) = self.do_exec(theirs, envp.as_ref()); e @@ -386,13 +381,13 @@ impl Command { impl Drop for Reset { fn drop(&mut self) { unsafe { - *sys::os::environ() = self.0; + *sys::env::environ() = self.0; } } } - _reset = Some(Reset(*sys::os::environ())); - *sys::os::environ() = envp.as_ptr(); + _reset = Some(Reset(*sys::env::environ())); + *sys::env::environ() = envp.as_ptr(); } libc::execvp(self.get_program_cstr().as_ptr(), self.get_argv().as_ptr()); @@ -415,6 +410,7 @@ impl Command { all(target_os = "linux", target_env = "musl"), target_os = "nto", target_vendor = "apple", + target_os = "cygwin", )))] fn posix_spawn( &mut self, @@ -433,17 +429,15 @@ impl Command { all(target_os = "linux", target_env = "musl"), target_os = "nto", target_vendor = "apple", + target_os = "cygwin", ))] - // FIXME(#115199): Rust currently omits weak function definitions - // and its metadata from LLVM IR. - #[cfg_attr(target_os = "linux", no_sanitize(cfi))] fn posix_spawn( &mut self, stdio: &ChildPipes, envp: Option<&CStringArray>, ) -> io::Result> { #[cfg(target_os = "linux")] - use core::sync::atomic::{AtomicU8, Ordering}; + use core::sync::atomic::{Atomic, AtomicU8, Ordering}; use crate::mem::MaybeUninit; use crate::sys::{self, cvt_nz, on_broken_pipe_flag_used}; @@ -476,7 +470,7 @@ impl Command { fn pidfd_getpid(pidfd: libc::c_int) -> libc::c_int; ); - static PIDFD_SUPPORTED: AtomicU8 = AtomicU8::new(0); + static PIDFD_SUPPORTED: Atomic = AtomicU8::new(0); const UNKNOWN: u8 = 0; const SPAWN: u8 = 1; // Obtaining a pidfd via the fork+exec path might work @@ -587,7 +581,7 @@ impl Command { /// Some platforms can set a new working directory for a spawned process in the /// `posix_spawn` path. This function looks up the function pointer for adding /// such an action to a `posix_spawn_file_actions_t` struct. - #[cfg(not(all(target_os = "linux", target_env = "musl")))] + #[cfg(not(any(all(target_os = "linux", target_env = "musl"), target_os = "cygwin")))] fn get_posix_spawn_addchdir() -> Option { use crate::sys::weak::weak; @@ -621,7 +615,9 @@ impl Command { /// Weak symbol lookup doesn't work with statically linked libcs, so in cases /// where static linking is possible we need to either check for the presence /// of the symbol at compile time or know about it upfront. - #[cfg(all(target_os = "linux", target_env = "musl"))] + /// + /// Cygwin doesn't support weak symbol, so just link it. + #[cfg(any(all(target_os = "linux", target_env = "musl"), target_os = "cygwin"))] fn get_posix_spawn_addchdir() -> Option { // Our minimum required musl supports this function, so we can just use it. Some(libc::posix_spawn_file_actions_addchdir_np) @@ -738,8 +734,8 @@ impl Command { cvt_nz(libc::posix_spawnattr_setflags(attrs.0.as_mut_ptr(), flags as _))?; // Make sure we synchronize access to the global `environ` resource - let _env_lock = sys::os::env_read_lock(); - let envp = envp.map(|c| c.as_ptr()).unwrap_or_else(|| *sys::os::environ() as *const _); + let _env_lock = sys::env::env_read_lock(); + let envp = envp.map(|c| c.as_ptr()).unwrap_or_else(|| *sys::env::environ() as *const _); #[cfg(not(target_os = "nto"))] let spawn_fn = libc::posix_spawnp; diff --git a/library/std/src/sys/process/unix/unsupported.rs b/library/std/src/sys/process/unix/unsupported.rs index 78d270923cfa2..e86561a5c5c4f 100644 --- a/library/std/src/sys/process/unix/unsupported.rs +++ b/library/std/src/sys/process/unix/unsupported.rs @@ -18,15 +18,15 @@ impl Command { unsupported() } - pub fn output(&mut self) -> io::Result<(ExitStatus, Vec, Vec)> { - unsupported() - } - pub fn exec(&mut self, _default: Stdio) -> io::Error { unsupported_err() } } +pub fn output(_: &mut Command) -> io::Result<(ExitStatus, Vec, Vec)> { + unsupported() +} + //////////////////////////////////////////////////////////////////////////////// // Processes //////////////////////////////////////////////////////////////////////////////// diff --git a/library/std/src/sys/process/unix/vxworks.rs b/library/std/src/sys/process/unix/vxworks.rs index 5f1727789a1bc..fab3b36ebf3fa 100644 --- a/library/std/src/sys/process/unix/vxworks.rs +++ b/library/std/src/sys/process/unix/vxworks.rs @@ -67,7 +67,7 @@ impl Command { let c_envp = envp .as_ref() .map(|c| c.as_ptr()) - .unwrap_or_else(|| *sys::os::environ() as *const _); + .unwrap_or_else(|| *sys::env::environ() as *const _); let stack_size = crate::cmp::max( crate::env::var_os("RUST_MIN_STACK") .and_then(|s| s.to_str().and_then(|s| s.parse().ok())) @@ -76,7 +76,7 @@ impl Command { ); // ensure that access to the environment is synchronized - let _lock = sys::os::env_read_lock(); + let _lock = sys::env::env_read_lock(); let ret = libc::rtpSpawn( self.get_program_cstr().as_ptr(), @@ -112,11 +112,6 @@ impl Command { } } - pub fn output(&mut self) -> io::Result<(ExitStatus, Vec, Vec)> { - let (proc, pipes) = self.spawn(Stdio::MakePipe, false)?; - crate::sys_common::process::wait_with_output(proc, pipes) - } - pub fn exec(&mut self, default: Stdio) -> io::Error { let ret = Command::spawn(self, default, false); match ret { diff --git a/library/std/src/sys/process/unsupported.rs b/library/std/src/sys/process/unsupported.rs index fee81744f09ec..469922c78aca2 100644 --- a/library/std/src/sys/process/unsupported.rs +++ b/library/std/src/sys/process/unsupported.rs @@ -1,3 +1,4 @@ +use super::env::{CommandEnv, CommandEnvs}; pub use crate::ffi::OsString as EnvKey; use crate::ffi::{OsStr, OsString}; use crate::num::NonZero; @@ -5,7 +6,6 @@ use crate::path::Path; use crate::sys::fs::File; use crate::sys::pipe::AnonPipe; use crate::sys::unsupported; -use crate::sys_common::process::{CommandEnv, CommandEnvs}; use crate::{fmt, io}; //////////////////////////////////////////////////////////////////////////////// @@ -104,10 +104,10 @@ impl Command { ) -> io::Result<(Process, StdioPipes)> { unsupported() } +} - pub fn output(&mut self) -> io::Result<(ExitStatus, Vec, Vec)> { - unsupported() - } +pub fn output(_cmd: &mut Command) -> io::Result<(ExitStatus, Vec, Vec)> { + unsupported() } impl From for Stdio { diff --git a/library/std/src/sys/process/windows.rs b/library/std/src/sys/process/windows.rs index 06c15e08f3fb1..4acd753eec915 100644 --- a/library/std/src/sys/process/windows.rs +++ b/library/std/src/sys/process/windows.rs @@ -5,6 +5,7 @@ mod tests; use core::ffi::c_void; +use super::env::{CommandEnv, CommandEnvs}; use crate::collections::BTreeMap; use crate::env::consts::{EXE_EXTENSION, EXE_SUFFIX}; use crate::ffi::{OsStr, OsString}; @@ -19,12 +20,11 @@ use crate::sys::args::{self, Arg}; use crate::sys::c::{self, EXIT_FAILURE, EXIT_SUCCESS}; use crate::sys::fs::{File, OpenOptions}; use crate::sys::handle::Handle; -use crate::sys::pal::api::{self, WinError}; +use crate::sys::pal::api::{self, WinError, utf16}; use crate::sys::pal::{ensure_no_nuls, fill_utf16_buf}; use crate::sys::pipe::{self, AnonPipe}; use crate::sys::{cvt, path, stdio}; use crate::sys_common::IntoInner; -use crate::sys_common::process::{CommandEnv, CommandEnvs}; use crate::{cmp, env, fmt, ptr}; //////////////////////////////////////////////////////////////////////////////// @@ -389,11 +389,6 @@ impl Command { )) } } - - pub fn output(&mut self) -> io::Result<(ExitStatus, Vec, Vec)> { - let (proc, pipes) = self.spawn(Stdio::MakePipe, false)?; - crate::sys_common::process::wait_with_output(proc, pipes) - } } impl fmt::Debug for Command { @@ -880,9 +875,33 @@ fn make_envp(maybe_env: Option>) -> io::Result<(*mut fn make_dirp(d: Option<&OsString>) -> io::Result<(*const u16, Vec)> { match d { Some(dir) => { - let mut dir_str: Vec = ensure_no_nuls(dir)?.encode_wide().collect(); - dir_str.push(0); - Ok((dir_str.as_ptr(), dir_str)) + let mut dir_str: Vec = ensure_no_nuls(dir)?.encode_wide().chain([0]).collect(); + // Try to remove the `\\?\` prefix, if any. + // This is necessary because the current directory does not support verbatim paths. + // However. this can only be done if it doesn't change how the path will be resolved. + let ptr = if dir_str.starts_with(utf16!(r"\\?\UNC")) { + // Turn the `C` in `UNC` into a `\` so we can then use `\\rest\of\path`. + let start = r"\\?\UN".len(); + dir_str[start] = b'\\' as u16; + if path::is_absolute_exact(&dir_str[start..]) { + dir_str[start..].as_ptr() + } else { + // Revert the above change. + dir_str[start] = b'C' as u16; + dir_str.as_ptr() + } + } else if dir_str.starts_with(utf16!(r"\\?\")) { + // Strip the leading `\\?\` + let start = r"\\?\".len(); + if path::is_absolute_exact(&dir_str[start..]) { + dir_str[start..].as_ptr() + } else { + dir_str.as_ptr() + } + } else { + dir_str.as_ptr() + }; + Ok((ptr, dir_str)) } None => Ok((ptr::null(), Vec::new())), } diff --git a/library/std/src/sys/random/linux.rs b/library/std/src/sys/random/linux.rs index c0591ec0c1527..18196fae28bee 100644 --- a/library/std/src/sys/random/linux.rs +++ b/library/std/src/sys/random/linux.rs @@ -64,8 +64,8 @@ use crate::fs::File; use crate::io::Read; use crate::os::fd::AsRawFd; use crate::sync::OnceLock; -use crate::sync::atomic::AtomicBool; use crate::sync::atomic::Ordering::{Acquire, Relaxed, Release}; +use crate::sync::atomic::{Atomic, AtomicBool}; use crate::sys::pal::os::errno; use crate::sys::pal::weak::syscall; @@ -81,9 +81,9 @@ fn getrandom(mut bytes: &mut [u8], insecure: bool) { ) -> libc::ssize_t; ); - static GETRANDOM_AVAILABLE: AtomicBool = AtomicBool::new(true); - static GRND_INSECURE_AVAILABLE: AtomicBool = AtomicBool::new(true); - static URANDOM_READY: AtomicBool = AtomicBool::new(false); + static GETRANDOM_AVAILABLE: Atomic = AtomicBool::new(true); + static GRND_INSECURE_AVAILABLE: Atomic = AtomicBool::new(true); + static URANDOM_READY: Atomic = AtomicBool::new(false); static DEVICE: OnceLock = OnceLock::new(); if GETRANDOM_AVAILABLE.load(Relaxed) { diff --git a/library/std/src/sys/random/vxworks.rs b/library/std/src/sys/random/vxworks.rs index d549ccebdb2cd..14f02e8ecd220 100644 --- a/library/std/src/sys/random/vxworks.rs +++ b/library/std/src/sys/random/vxworks.rs @@ -1,7 +1,7 @@ -use crate::sync::atomic::AtomicBool; use crate::sync::atomic::Ordering::Relaxed; +use crate::sync::atomic::{Atomic, AtomicBool}; -static RNG_INIT: AtomicBool = AtomicBool::new(false); +static RNG_INIT: Atomic = AtomicBool::new(false); pub fn fill_bytes(mut bytes: &mut [u8]) { while !RNG_INIT.load(Relaxed) { diff --git a/library/std/src/sys/stdio/trusty.rs b/library/std/src/sys/stdio/trusty.rs index d393e95394d1a..e05461aa44a73 100644 --- a/library/std/src/sys/stdio/trusty.rs +++ b/library/std/src/sys/stdio/trusty.rs @@ -1,21 +1,14 @@ -use crate::io; +#[expect(dead_code)] +#[path = "unsupported.rs"] +mod unsupported_stdio; -pub struct Stdin; +use crate::cmp; +use crate::io::{self, IoSlice}; + +pub type Stdin = unsupported_stdio::Stdin; pub struct Stdout; pub struct Stderr; -impl Stdin { - pub const fn new() -> Stdin { - Stdin - } -} - -impl io::Read for Stdin { - fn read(&mut self, _buf: &mut [u8]) -> io::Result { - Ok(0) - } -} - impl Stdout { pub const fn new() -> Stdout { Stdout @@ -24,7 +17,16 @@ impl Stdout { impl io::Write for Stdout { fn write(&mut self, buf: &[u8]) -> io::Result { - _write(libc::STDOUT_FILENO, buf) + write(libc::STDOUT_FILENO, buf) + } + + fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result { + write_vectored(libc::STDOUT_FILENO, bufs) + } + + #[inline] + fn is_write_vectored(&self) -> bool { + true } fn flush(&mut self) -> io::Result<()> { @@ -40,7 +42,16 @@ impl Stderr { impl io::Write for Stderr { fn write(&mut self, buf: &[u8]) -> io::Result { - _write(libc::STDERR_FILENO, buf) + write(libc::STDERR_FILENO, buf) + } + + fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result { + write_vectored(libc::STDERR_FILENO, bufs) + } + + #[inline] + fn is_write_vectored(&self) -> bool { + true } fn flush(&mut self) -> io::Result<()> { @@ -48,7 +59,7 @@ impl io::Write for Stderr { } } -pub const STDIN_BUF_SIZE: usize = 0; +pub const STDIN_BUF_SIZE: usize = unsupported_stdio::STDIN_BUF_SIZE; pub fn is_ebadf(_err: &io::Error) -> bool { true @@ -58,24 +69,24 @@ pub fn panic_output() -> Option { Some(Stderr) } -fn _write(fd: i32, message: &[u8]) -> io::Result { - let mut iov = libc::iovec { iov_base: message.as_ptr() as *mut _, iov_len: message.len() }; - loop { - // SAFETY: syscall, safe arguments. - let ret = unsafe { libc::writev(fd, &iov, 1) }; - if ret < 0 { - return Err(io::Error::last_os_error()); - } - let ret = ret as usize; - if ret > iov.iov_len { - return Err(io::Error::last_os_error()); - } - if ret == iov.iov_len { - return Ok(message.len()); - } - // SAFETY: ret has been checked to be less than the length of - // the buffer - iov.iov_base = unsafe { iov.iov_base.add(ret) }; - iov.iov_len -= ret; +fn write(fd: i32, buf: &[u8]) -> io::Result { + let iov = libc::iovec { iov_base: buf.as_ptr() as *mut _, iov_len: buf.len() }; + // SAFETY: syscall, safe arguments. + let ret = unsafe { libc::writev(fd, &iov, 1) }; + // This check includes ret < 0, since the length is at most isize::MAX. + if ret as usize > iov.iov_len { + return Err(io::Error::last_os_error()); + } + Ok(ret as usize) +} + +fn write_vectored(fd: i32, bufs: &[IoSlice<'_>]) -> io::Result { + let iov = bufs.as_ptr() as *const libc::iovec; + let len = cmp::min(bufs.len(), libc::c_int::MAX as usize) as libc::c_int; + // SAFETY: syscall, safe arguments. + let ret = unsafe { libc::writev(fd, iov, len) }; + if ret < 0 { + return Err(io::Error::last_os_error()); } + Ok(ret as usize) } diff --git a/library/std/src/sys/stdio/uefi.rs b/library/std/src/sys/stdio/uefi.rs index 257e321dd03d7..ccd6bf658b0ff 100644 --- a/library/std/src/sys/stdio/uefi.rs +++ b/library/std/src/sys/stdio/uefi.rs @@ -142,8 +142,12 @@ impl io::Write for Stderr { // UTF-16 character should occupy 4 bytes at most in UTF-8 pub const STDIN_BUF_SIZE: usize = 4; -pub fn is_ebadf(_err: &io::Error) -> bool { - false +pub fn is_ebadf(err: &io::Error) -> bool { + if let Some(x) = err.raw_os_error() { + r_efi::efi::Status::UNSUPPORTED.as_usize() == x + } else { + false + } } pub fn panic_output() -> Option { diff --git a/library/std/src/sys/stdio/wasi.rs b/library/std/src/sys/stdio/wasi.rs index 8105b0cfa2f15..b70efd026f945 100644 --- a/library/std/src/sys/stdio/wasi.rs +++ b/library/std/src/sys/stdio/wasi.rs @@ -4,7 +4,7 @@ use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut}; use crate::mem::ManuallyDrop; use crate::os::raw; use crate::os::wasi::io::{AsRawFd, FromRawFd}; -use crate::sys::pal::fd::WasiFd; +use crate::sys::fd::WasiFd; pub struct Stdin; pub struct Stdout; diff --git a/library/std/src/sys/sync/condvar/pthread.rs b/library/std/src/sys/sync/condvar/pthread.rs index 5bb7431eecf0c..938b7071b88a7 100644 --- a/library/std/src/sys/sync/condvar/pthread.rs +++ b/library/std/src/sys/sync/condvar/pthread.rs @@ -2,15 +2,15 @@ use crate::pin::Pin; use crate::ptr; -use crate::sync::atomic::AtomicUsize; use crate::sync::atomic::Ordering::Relaxed; +use crate::sync::atomic::{Atomic, AtomicUsize}; use crate::sys::pal::sync as pal; use crate::sys::sync::{Mutex, OnceBox}; use crate::time::{Duration, Instant}; pub struct Condvar { cvar: OnceBox, - mutex: AtomicUsize, + mutex: Atomic, } impl Condvar { diff --git a/library/std/src/sys/sync/condvar/xous.rs b/library/std/src/sys/sync/condvar/xous.rs index b9e5f47abfcc2..21a1587214a11 100644 --- a/library/std/src/sys/sync/condvar/xous.rs +++ b/library/std/src/sys/sync/condvar/xous.rs @@ -1,4 +1,4 @@ -use core::sync::atomic::{AtomicUsize, Ordering}; +use core::sync::atomic::{Atomic, AtomicUsize, Ordering}; use crate::os::xous::ffi::{blocking_scalar, scalar}; use crate::os::xous::services::{TicktimerScalar, ticktimer_server}; @@ -11,8 +11,8 @@ use crate::time::Duration; const NOTIFY_TRIES: usize = 3; pub struct Condvar { - counter: AtomicUsize, - timed_out: AtomicUsize, + counter: Atomic, + timed_out: Atomic, } unsafe impl Send for Condvar {} diff --git a/library/std/src/sys/sync/mutex/fuchsia.rs b/library/std/src/sys/sync/mutex/fuchsia.rs index 3e871285bea01..cbb1926530f5f 100644 --- a/library/std/src/sys/sync/mutex/fuchsia.rs +++ b/library/std/src/sys/sync/mutex/fuchsia.rs @@ -37,9 +37,9 @@ //! //! [mutex in Fuchsia's libsync]: https://cs.opensource.google/fuchsia/fuchsia/+/main:zircon/system/ulib/sync/mutex.c -use crate::sync::atomic::AtomicU32; use crate::sync::atomic::Ordering::{Acquire, Relaxed, Release}; -use crate::sys::futex::zircon::{ +use crate::sync::atomic::{Atomic, AtomicU32}; +use crate::sys::fuchsia::{ ZX_ERR_BAD_HANDLE, ZX_ERR_BAD_STATE, ZX_ERR_INVALID_ARGS, ZX_ERR_TIMED_OUT, ZX_ERR_WRONG_TYPE, ZX_OK, ZX_TIME_INFINITE, zx_futex_wait, zx_futex_wake_single_owner, zx_handle_t, zx_thread_self, @@ -52,7 +52,7 @@ const CONTESTED_BIT: u32 = 1; const UNLOCKED: u32 = 0; pub struct Mutex { - futex: AtomicU32, + futex: Atomic, } #[inline] @@ -83,13 +83,13 @@ impl Mutex { #[inline] pub fn try_lock(&self) -> bool { - let thread_self = unsafe { zx_thread_self() }; + let thread_self = zx_thread_self(); self.futex.compare_exchange(UNLOCKED, to_state(thread_self), Acquire, Relaxed).is_ok() } #[inline] pub fn lock(&self) { - let thread_self = unsafe { zx_thread_self() }; + let thread_self = zx_thread_self(); if let Err(state) = self.futex.compare_exchange(UNLOCKED, to_state(thread_self), Acquire, Relaxed) { diff --git a/library/std/src/sys/sync/mutex/xous.rs b/library/std/src/sys/sync/mutex/xous.rs index c6b954c1711e6..d16faa5aea319 100644 --- a/library/std/src/sys/sync/mutex/xous.rs +++ b/library/std/src/sys/sync/mutex/xous.rs @@ -1,7 +1,7 @@ use crate::os::xous::ffi::{blocking_scalar, do_yield}; use crate::os::xous::services::{TicktimerScalar, ticktimer_server}; use crate::sync::atomic::Ordering::{Acquire, Relaxed, Release}; -use crate::sync::atomic::{AtomicBool, AtomicUsize}; +use crate::sync::atomic::{Atomic, AtomicBool, AtomicUsize}; pub struct Mutex { /// The "locked" value indicates how many threads are waiting on this @@ -14,12 +14,12 @@ pub struct Mutex { /// for a lock, or it is locked for long periods of time. Rather than /// spinning, these locks send a Message to the ticktimer server /// requesting that they be woken up when a lock is unlocked. - locked: AtomicUsize, + locked: Atomic, /// Whether this Mutex ever was contended, and therefore made a trip /// to the ticktimer server. If this was never set, then we were never /// on the slow path and can skip deregistering the mutex. - contended: AtomicBool, + contended: Atomic, } impl Mutex { diff --git a/library/std/src/sys/sync/once/queue.rs b/library/std/src/sys/sync/once/queue.rs index fde1e0ca51029..6a2ab0dcf1b33 100644 --- a/library/std/src/sys/sync/once/queue.rs +++ b/library/std/src/sys/sync/once/queue.rs @@ -57,7 +57,7 @@ use crate::cell::Cell; use crate::sync::atomic::Ordering::{AcqRel, Acquire, Release}; -use crate::sync::atomic::{AtomicBool, AtomicPtr}; +use crate::sync::atomic::{Atomic, AtomicBool, AtomicPtr}; use crate::sync::poison::once::ExclusiveState; use crate::thread::{self, Thread}; use crate::{fmt, ptr, sync as public}; @@ -65,7 +65,7 @@ use crate::{fmt, ptr, sync as public}; type StateAndQueue = *mut (); pub struct Once { - state_and_queue: AtomicPtr<()>, + state_and_queue: Atomic<*mut ()>, } pub struct OnceState { @@ -94,7 +94,7 @@ const QUEUE_MASK: usize = !STATE_MASK; #[repr(align(4))] // Ensure the two lower bits are free to use as state bits. struct Waiter { thread: Thread, - signaled: AtomicBool, + signaled: Atomic, next: Cell<*const Waiter>, } @@ -102,7 +102,7 @@ struct Waiter { // Every node is a struct on the stack of a waiting thread. // Will wake up the waiters when it gets dropped, i.e. also on panic. struct WaiterQueue<'a> { - state_and_queue: &'a AtomicPtr<()>, + state_and_queue: &'a Atomic<*mut ()>, set_state_on_drop_to: StateAndQueue, } @@ -232,7 +232,7 @@ impl Once { } fn wait( - state_and_queue: &AtomicPtr<()>, + state_and_queue: &Atomic<*mut ()>, mut current: StateAndQueue, return_on_poisoned: bool, ) -> StateAndQueue { diff --git a/library/std/src/sys/sync/once_box.rs b/library/std/src/sys/sync/once_box.rs index 6953b91999ad1..088f51aae78e6 100644 --- a/library/std/src/sys/sync/once_box.rs +++ b/library/std/src/sys/sync/once_box.rs @@ -8,11 +8,11 @@ use crate::mem::replace; use crate::pin::Pin; use crate::ptr::null_mut; -use crate::sync::atomic::AtomicPtr; use crate::sync::atomic::Ordering::{Acquire, Relaxed, Release}; +use crate::sync::atomic::{Atomic, AtomicPtr}; pub(crate) struct OnceBox { - ptr: AtomicPtr, + ptr: Atomic<*mut T>, } impl OnceBox { diff --git a/library/std/src/sys/sync/rwlock/queue.rs b/library/std/src/sys/sync/rwlock/queue.rs index bd15f8ee952c9..62f084acfd259 100644 --- a/library/std/src/sys/sync/rwlock/queue.rs +++ b/library/std/src/sys/sync/rwlock/queue.rs @@ -117,11 +117,11 @@ use crate::hint::spin_loop; use crate::mem; use crate::ptr::{self, NonNull, null_mut, without_provenance_mut}; use crate::sync::atomic::Ordering::{AcqRel, Acquire, Relaxed, Release}; -use crate::sync::atomic::{AtomicBool, AtomicPtr}; +use crate::sync::atomic::{Atomic, AtomicBool, AtomicPtr}; use crate::thread::{self, Thread}; /// The atomic lock state. -type AtomicState = AtomicPtr<()>; +type AtomicState = Atomic; /// The inner lock state. type State = *mut (); @@ -181,11 +181,11 @@ struct Node { tail: AtomicLink, write: bool, thread: OnceCell, - completed: AtomicBool, + completed: Atomic, } /// An atomic node pointer with relaxed operations. -struct AtomicLink(AtomicPtr); +struct AtomicLink(Atomic<*mut Node>); impl AtomicLink { fn new(v: Option>) -> AtomicLink { diff --git a/library/std/src/sys/sync/thread_parking/darwin.rs b/library/std/src/sys/sync/thread_parking/darwin.rs index a0d24a91e7c69..b9bcc538c65ab 100644 --- a/library/std/src/sys/sync/thread_parking/darwin.rs +++ b/library/std/src/sys/sync/thread_parking/darwin.rs @@ -13,8 +13,8 @@ #![allow(non_camel_case_types)] use crate::pin::Pin; -use crate::sync::atomic::AtomicI8; use crate::sync::atomic::Ordering::{Acquire, Release}; +use crate::sync::atomic::{Atomic, AtomicI8}; use crate::time::Duration; type dispatch_semaphore_t = *mut crate::ffi::c_void; @@ -38,7 +38,7 @@ const PARKED: i8 = -1; pub struct Parker { semaphore: dispatch_semaphore_t, - state: AtomicI8, + state: Atomic, } unsafe impl Sync for Parker {} diff --git a/library/std/src/sys/sync/thread_parking/id.rs b/library/std/src/sys/sync/thread_parking/id.rs index 6496435183770..fcc6ecca62867 100644 --- a/library/std/src/sys/sync/thread_parking/id.rs +++ b/library/std/src/sys/sync/thread_parking/id.rs @@ -10,12 +10,12 @@ use crate::cell::UnsafeCell; use crate::pin::Pin; use crate::sync::atomic::Ordering::{Acquire, Relaxed, Release}; -use crate::sync::atomic::{AtomicI8, fence}; +use crate::sync::atomic::{Atomic, AtomicI8, fence}; use crate::sys::thread_parking::{ThreadId, current, park, park_timeout, unpark}; use crate::time::Duration; pub struct Parker { - state: AtomicI8, + state: Atomic, tid: UnsafeCell>, } diff --git a/library/std/src/sys/sync/thread_parking/pthread.rs b/library/std/src/sys/sync/thread_parking/pthread.rs index 19cabd7dd75c8..14bc793c15de2 100644 --- a/library/std/src/sys/sync/thread_parking/pthread.rs +++ b/library/std/src/sys/sync/thread_parking/pthread.rs @@ -1,8 +1,8 @@ //! Thread parking without `futex` using the `pthread` synchronization primitives. use crate::pin::Pin; -use crate::sync::atomic::AtomicUsize; use crate::sync::atomic::Ordering::{Acquire, Relaxed, Release}; +use crate::sync::atomic::{Atomic, AtomicUsize}; use crate::sys::pal::sync::{Condvar, Mutex}; use crate::time::Duration; @@ -11,7 +11,7 @@ const PARKED: usize = 1; const NOTIFIED: usize = 2; pub struct Parker { - state: AtomicUsize, + state: Atomic, lock: Mutex, cvar: Condvar, } diff --git a/library/std/src/sys/sync/thread_parking/windows7.rs b/library/std/src/sys/sync/thread_parking/windows7.rs index a1a0f8427cd83..96e94a8053c4c 100644 --- a/library/std/src/sys/sync/thread_parking/windows7.rs +++ b/library/std/src/sys/sync/thread_parking/windows7.rs @@ -60,13 +60,13 @@ use core::ffi::c_void; use crate::pin::Pin; -use crate::sync::atomic::AtomicI8; use crate::sync::atomic::Ordering::{Acquire, Release}; +use crate::sync::atomic::{Atomic, AtomicI8}; use crate::sys::{c, dur2timeout}; use crate::time::Duration; pub struct Parker { - state: AtomicI8, + state: Atomic, } const PARKED: i8 = -1; @@ -186,8 +186,8 @@ impl Parker { mod keyed_events { use core::pin::Pin; use core::ptr; - use core::sync::atomic::AtomicPtr; use core::sync::atomic::Ordering::{Acquire, Relaxed}; + use core::sync::atomic::{Atomic, AtomicPtr}; use core::time::Duration; use super::{EMPTY, NOTIFIED, Parker}; @@ -244,7 +244,7 @@ mod keyed_events { fn keyed_event_handle() -> c::HANDLE { const INVALID: c::HANDLE = ptr::without_provenance_mut(!0); - static HANDLE: AtomicPtr = AtomicPtr::new(INVALID); + static HANDLE: Atomic<*mut crate::ffi::c_void> = AtomicPtr::new(INVALID); match HANDLE.load(Relaxed) { INVALID => { let mut handle = c::INVALID_HANDLE_VALUE; diff --git a/library/std/src/sys/sync/thread_parking/xous.rs b/library/std/src/sys/sync/thread_parking/xous.rs index 28c90249dc2c2..0f451c0ac29f9 100644 --- a/library/std/src/sys/sync/thread_parking/xous.rs +++ b/library/std/src/sys/sync/thread_parking/xous.rs @@ -2,8 +2,8 @@ use crate::os::xous::ffi::{blocking_scalar, scalar}; use crate::os::xous::services::{TicktimerScalar, ticktimer_server}; use crate::pin::Pin; use crate::ptr; -use crate::sync::atomic::AtomicI8; use crate::sync::atomic::Ordering::{Acquire, Release}; +use crate::sync::atomic::{Atomic, AtomicI8}; use crate::time::Duration; const NOTIFIED: i8 = 1; @@ -11,7 +11,7 @@ const EMPTY: i8 = 0; const PARKED: i8 = -1; pub struct Parker { - state: AtomicI8, + state: Atomic, } impl Parker { diff --git a/library/std/src/sys/thread_local/destructors/linux_like.rs b/library/std/src/sys/thread_local/destructors/linux_like.rs index 817941229eefe..d7cbaeb89f42e 100644 --- a/library/std/src/sys/thread_local/destructors/linux_like.rs +++ b/library/std/src/sys/thread_local/destructors/linux_like.rs @@ -12,9 +12,6 @@ use crate::mem::transmute; -// FIXME: The Rust compiler currently omits weakly function definitions (i.e., -// __cxa_thread_atexit_impl) and its metadata from LLVM IR. -#[no_sanitize(cfi, kcfi)] pub unsafe fn register(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { /// This is necessary because the __cxa_thread_atexit_impl implementation /// std links to by default may be a C or C++ implementation that was not diff --git a/library/std/src/sys/thread_local/key/racy.rs b/library/std/src/sys/thread_local/key/racy.rs index e1bc08eabb358..a12ff7ac36ba5 100644 --- a/library/std/src/sys/thread_local/key/racy.rs +++ b/library/std/src/sys/thread_local/key/racy.rs @@ -6,7 +6,7 @@ //! should be more lightweight and avoids circular dependencies with the rest of //! `std`. -use crate::sync::atomic::{self, AtomicUsize, Ordering}; +use crate::sync::atomic::{Atomic, AtomicUsize, Ordering}; /// A type for TLS keys that are statically allocated. /// @@ -14,7 +14,7 @@ use crate::sync::atomic::{self, AtomicUsize, Ordering}; /// dependencies with the rest of `std`. pub struct LazyKey { /// Inner static TLS key (internals). - key: AtomicUsize, + key: Atomic, /// Destructor for the TLS value. dtor: Option, } @@ -31,7 +31,7 @@ const KEY_SENTVAL: usize = libc::PTHREAD_KEYS_MAX + 1; impl LazyKey { pub const fn new(dtor: Option) -> LazyKey { - LazyKey { key: atomic::AtomicUsize::new(KEY_SENTVAL), dtor } + LazyKey { key: AtomicUsize::new(KEY_SENTVAL), dtor } } #[inline] diff --git a/library/std/src/sys/thread_local/key/windows.rs b/library/std/src/sys/thread_local/key/windows.rs index f4e0f25a476ee..c34c7bc204fd4 100644 --- a/library/std/src/sys/thread_local/key/windows.rs +++ b/library/std/src/sys/thread_local/key/windows.rs @@ -27,7 +27,7 @@ use crate::cell::UnsafeCell; use crate::ptr; use crate::sync::atomic::Ordering::{AcqRel, Acquire, Relaxed, Release}; -use crate::sync::atomic::{AtomicPtr, AtomicU32}; +use crate::sync::atomic::{Atomic, AtomicPtr, AtomicU32}; use crate::sys::c; use crate::sys::thread_local::guard; @@ -38,9 +38,9 @@ pub struct LazyKey { /// The key value shifted up by one. Since TLS_OUT_OF_INDEXES == u32::MAX /// is not a valid key value, this allows us to use zero as sentinel value /// without risking overflow. - key: AtomicU32, + key: Atomic, dtor: Option, - next: AtomicPtr, + next: Atomic<*mut LazyKey>, /// Currently, destructors cannot be unregistered, so we cannot use racy /// initialization for keys. Instead, we need synchronize initialization. /// Use the Windows-provided `Once` since it does not require TLS. @@ -142,7 +142,7 @@ pub unsafe fn get(key: Key) -> *mut u8 { unsafe { c::TlsGetValue(key).cast() } } -static DTORS: AtomicPtr = AtomicPtr::new(ptr::null_mut()); +static DTORS: Atomic<*mut LazyKey> = AtomicPtr::new(ptr::null_mut()); /// Should only be called once per key, otherwise loops or breaks may occur in /// the linked list. diff --git a/library/std/src/sys/thread_local/key/xous.rs b/library/std/src/sys/thread_local/key/xous.rs index 48dfe17ab3261..a27cec5ca1a60 100644 --- a/library/std/src/sys/thread_local/key/xous.rs +++ b/library/std/src/sys/thread_local/key/xous.rs @@ -42,7 +42,7 @@ use crate::mem::ManuallyDrop; use crate::os::xous::ffi::{MemoryFlags, map_memory, unmap_memory}; use crate::ptr; use crate::sync::atomic::Ordering::{Acquire, Relaxed, Release}; -use crate::sync::atomic::{AtomicPtr, AtomicUsize}; +use crate::sync::atomic::{Atomic, AtomicPtr, AtomicUsize}; pub type Key = usize; pub type Dtor = unsafe extern "C" fn(*mut u8); @@ -52,19 +52,19 @@ const TLS_MEMORY_SIZE: usize = 4096; /// TLS keys start at `1`. Index `0` is unused #[cfg(not(test))] #[unsafe(export_name = "_ZN16__rust_internals3std3sys4xous16thread_local_key13TLS_KEY_INDEXE")] -static TLS_KEY_INDEX: AtomicUsize = AtomicUsize::new(1); +static TLS_KEY_INDEX: Atomic = AtomicUsize::new(1); #[cfg(not(test))] #[unsafe(export_name = "_ZN16__rust_internals3std3sys4xous16thread_local_key9DTORSE")] -static DTORS: AtomicPtr = AtomicPtr::new(ptr::null_mut()); +static DTORS: Atomic<*mut Node> = AtomicPtr::new(ptr::null_mut()); #[cfg(test)] unsafe extern "Rust" { #[link_name = "_ZN16__rust_internals3std3sys4xous16thread_local_key13TLS_KEY_INDEXE"] - static TLS_KEY_INDEX: AtomicUsize; + static TLS_KEY_INDEX: Atomic; #[link_name = "_ZN16__rust_internals3std3sys4xous16thread_local_key9DTORSE"] - static DTORS: AtomicPtr; + static DTORS: Atomic<*mut Node>; } fn tls_ptr_addr() -> *mut *mut u8 { diff --git a/library/std/src/sys/thread_local/mod.rs b/library/std/src/sys/thread_local/mod.rs index 1ff13154b7b3c..9fafac3aa5b41 100644 --- a/library/std/src/sys/thread_local/mod.rs +++ b/library/std/src/sys/thread_local/mod.rs @@ -30,9 +30,9 @@ cfg_if::cfg_if! { target_os = "zkvm", target_os = "trusty", ))] { - mod statik; - pub use statik::{EagerStorage, LazyStorage, thread_local_inner}; - pub(crate) use statik::{LocalPointer, local_pointer}; + mod no_threads; + pub use no_threads::{EagerStorage, LazyStorage, thread_local_inner}; + pub(crate) use no_threads::{LocalPointer, local_pointer}; } else if #[cfg(target_thread_local)] { mod native; pub use native::{EagerStorage, LazyStorage, thread_local_inner}; @@ -138,6 +138,7 @@ pub(crate) mod key { not(target_family = "wasm"), target_family = "unix", ), + all(not(target_thread_local), target_vendor = "apple"), target_os = "teeos", all(target_os = "wasi", target_env = "p1", target_feature = "atomics"), ))] { diff --git a/library/std/src/sys/thread_local/statik.rs b/library/std/src/sys/thread_local/no_threads.rs similarity index 100% rename from library/std/src/sys/thread_local/statik.rs rename to library/std/src/sys/thread_local/no_threads.rs diff --git a/library/std/src/sys_common/mod.rs b/library/std/src/sys_common/mod.rs index 2a5de7f66661c..b7f4656fa3701 100644 --- a/library/std/src/sys_common/mod.rs +++ b/library/std/src/sys_common/mod.rs @@ -20,7 +20,6 @@ #[cfg(test)] mod tests; -pub mod process; pub mod wstr; pub mod wtf8; diff --git a/library/std/src/sys_common/process.rs b/library/std/src/sys_common/process.rs deleted file mode 100644 index 9f61d69d85875..0000000000000 --- a/library/std/src/sys_common/process.rs +++ /dev/null @@ -1,153 +0,0 @@ -#![allow(dead_code)] -#![unstable(feature = "process_internals", issue = "none")] - -use crate::collections::BTreeMap; -use crate::ffi::{OsStr, OsString}; -use crate::sys::pipe::read2; -use crate::sys::process::{EnvKey, ExitStatus, Process, StdioPipes}; -use crate::{env, fmt, io}; - -// Stores a set of changes to an environment -#[derive(Clone, Default)] -pub struct CommandEnv { - clear: bool, - saw_path: bool, - vars: BTreeMap>, -} - -impl fmt::Debug for CommandEnv { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut debug_command_env = f.debug_struct("CommandEnv"); - debug_command_env.field("clear", &self.clear).field("vars", &self.vars); - debug_command_env.finish() - } -} - -impl CommandEnv { - // Capture the current environment with these changes applied - pub fn capture(&self) -> BTreeMap { - let mut result = BTreeMap::::new(); - if !self.clear { - for (k, v) in env::vars_os() { - result.insert(k.into(), v); - } - } - for (k, maybe_v) in &self.vars { - if let &Some(ref v) = maybe_v { - result.insert(k.clone(), v.clone()); - } else { - result.remove(k); - } - } - result - } - - pub fn is_unchanged(&self) -> bool { - !self.clear && self.vars.is_empty() - } - - pub fn capture_if_changed(&self) -> Option> { - if self.is_unchanged() { None } else { Some(self.capture()) } - } - - // The following functions build up changes - pub fn set(&mut self, key: &OsStr, value: &OsStr) { - let key = EnvKey::from(key); - self.maybe_saw_path(&key); - self.vars.insert(key, Some(value.to_owned())); - } - - pub fn remove(&mut self, key: &OsStr) { - let key = EnvKey::from(key); - self.maybe_saw_path(&key); - if self.clear { - self.vars.remove(&key); - } else { - self.vars.insert(key, None); - } - } - - pub fn clear(&mut self) { - self.clear = true; - self.vars.clear(); - } - - pub fn does_clear(&self) -> bool { - self.clear - } - - pub fn have_changed_path(&self) -> bool { - self.saw_path || self.clear - } - - fn maybe_saw_path(&mut self, key: &EnvKey) { - if !self.saw_path && key == "PATH" { - self.saw_path = true; - } - } - - pub fn iter(&self) -> CommandEnvs<'_> { - let iter = self.vars.iter(); - CommandEnvs { iter } - } -} - -/// An iterator over the command environment variables. -/// -/// This struct is created by -/// [`Command::get_envs`][crate::process::Command::get_envs]. See its -/// documentation for more. -#[must_use = "iterators are lazy and do nothing unless consumed"] -#[stable(feature = "command_access", since = "1.57.0")] -#[derive(Debug)] -pub struct CommandEnvs<'a> { - iter: crate::collections::btree_map::Iter<'a, EnvKey, Option>, -} - -#[stable(feature = "command_access", since = "1.57.0")] -impl<'a> Iterator for CommandEnvs<'a> { - type Item = (&'a OsStr, Option<&'a OsStr>); - fn next(&mut self) -> Option { - self.iter.next().map(|(key, value)| (key.as_ref(), value.as_deref())) - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -#[stable(feature = "command_access", since = "1.57.0")] -impl<'a> ExactSizeIterator for CommandEnvs<'a> { - fn len(&self) -> usize { - self.iter.len() - } - fn is_empty(&self) -> bool { - self.iter.is_empty() - } -} - -pub fn wait_with_output( - mut process: Process, - mut pipes: StdioPipes, -) -> io::Result<(ExitStatus, Vec, Vec)> { - drop(pipes.stdin.take()); - - let (mut stdout, mut stderr) = (Vec::new(), Vec::new()); - match (pipes.stdout.take(), pipes.stderr.take()) { - (None, None) => {} - (Some(out), None) => { - let res = out.read_to_end(&mut stdout); - res.unwrap(); - } - (None, Some(err)) => { - let res = err.read_to_end(&mut stderr); - res.unwrap(); - } - (Some(out), Some(err)) => { - let res = read2(out, &mut stdout, err, &mut stderr); - res.unwrap(); - } - } - - let status = process.wait()?; - Ok((status, stdout, stderr)) -} diff --git a/library/std/src/thread/local.rs b/library/std/src/thread/local.rs index d5a5d10205dd8..7cd448733130d 100644 --- a/library/std/src/thread/local.rs +++ b/library/std/src/thread/local.rs @@ -22,12 +22,16 @@ use crate::fmt; /// /// Initialization is dynamically performed on the first call to a setter (e.g. /// [`with`]) within a thread, and values that implement [`Drop`] get -/// destructed when a thread exits. Some caveats apply, which are explained below. +/// destructed when a thread exits. Some platform-specific caveats apply, which +/// are explained below. +/// Note that, should the destructor panics, the whole process will be [aborted]. /// /// A `LocalKey`'s initializer cannot recursively depend on itself. Using a /// `LocalKey` in this way may cause panics, aborts or infinite recursion on /// the first call to `with`. /// +/// [aborted]: crate::process::abort +/// /// # Single-thread Synchronization /// /// Though there is no potential race with other threads, it is still possible to diff --git a/library/std/src/thread/mod.rs b/library/std/src/thread/mod.rs index 3f3ba02361cc8..6838f15e1748d 100644 --- a/library/std/src/thread/mod.rs +++ b/library/std/src/thread/mod.rs @@ -166,7 +166,7 @@ use crate::mem::{self, ManuallyDrop, forget}; use crate::num::NonZero; use crate::pin::Pin; use crate::sync::Arc; -use crate::sync::atomic::{AtomicUsize, Ordering}; +use crate::sync::atomic::{Atomic, AtomicUsize, Ordering}; use crate::sys::sync::Parker; use crate::sys::thread as imp; use crate::sys_common::{AsInner, IntoInner}; @@ -481,7 +481,7 @@ impl Builder { let Builder { name, stack_size, no_hooks } = self; let stack_size = stack_size.unwrap_or_else(|| { - static MIN: AtomicUsize = AtomicUsize::new(0); + static MIN: Atomic = AtomicUsize::new(0); match MIN.load(Ordering::Relaxed) { 0 => {} @@ -1195,9 +1195,9 @@ impl ThreadId { cfg_if::cfg_if! { if #[cfg(target_has_atomic = "64")] { - use crate::sync::atomic::AtomicU64; + use crate::sync::atomic::{Atomic, AtomicU64}; - static COUNTER: AtomicU64 = AtomicU64::new(0); + static COUNTER: Atomic = AtomicU64::new(0); let mut last = COUNTER.load(Ordering::Relaxed); loop { @@ -1302,10 +1302,10 @@ pub(crate) mod main_thread { cfg_if::cfg_if! { if #[cfg(target_has_atomic = "64")] { use super::ThreadId; - use crate::sync::atomic::AtomicU64; + use crate::sync::atomic::{Atomic, AtomicU64}; use crate::sync::atomic::Ordering::Relaxed; - static MAIN: AtomicU64 = AtomicU64::new(0); + static MAIN: Atomic = AtomicU64::new(0); pub(super) fn get() -> Option { ThreadId::from_u64(MAIN.load(Relaxed)) @@ -1319,10 +1319,10 @@ pub(crate) mod main_thread { } else { use super::ThreadId; use crate::mem::MaybeUninit; - use crate::sync::atomic::AtomicBool; + use crate::sync::atomic::{Atomic, AtomicBool}; use crate::sync::atomic::Ordering::{Acquire, Release}; - static INIT: AtomicBool = AtomicBool::new(false); + static INIT: Atomic = AtomicBool::new(false); static mut MAIN: MaybeUninit = MaybeUninit::uninit(); pub(super) fn get() -> Option { @@ -1676,6 +1676,7 @@ impl fmt::Debug for Thread { /// [`Result`]: crate::result::Result /// [`std::panic::resume_unwind`]: crate::panic::resume_unwind #[stable(feature = "rust1", since = "1.0.0")] +#[cfg_attr(not(bootstrap), doc(search_unbox))] pub type Result = crate::result::Result>; // This packet is used to communicate the return value between the spawned diff --git a/library/std/src/thread/scoped.rs b/library/std/src/thread/scoped.rs index 0033fc3a73283..a4c0ca5417d00 100644 --- a/library/std/src/thread/scoped.rs +++ b/library/std/src/thread/scoped.rs @@ -2,7 +2,7 @@ use super::{Builder, JoinInner, Result, Thread, current_or_unnamed}; use crate::marker::PhantomData; use crate::panic::{AssertUnwindSafe, catch_unwind, resume_unwind}; use crate::sync::Arc; -use crate::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; +use crate::sync::atomic::{Atomic, AtomicBool, AtomicUsize, Ordering}; use crate::{fmt, io}; /// A scope to spawn scoped threads in. @@ -35,8 +35,8 @@ pub struct Scope<'scope, 'env: 'scope> { pub struct ScopedJoinHandle<'scope, T>(JoinInner<'scope, T>); pub(super) struct ScopeData { - num_running_threads: AtomicUsize, - a_thread_panicked: AtomicBool, + num_running_threads: Atomic, + a_thread_panicked: Atomic, main_thread: Thread, } diff --git a/library/std/src/time.rs b/library/std/src/time.rs index 5ab71413586dc..03af35e809c91 100644 --- a/library/std/src/time.rs +++ b/library/std/src/time.rs @@ -205,8 +205,8 @@ pub struct Instant(time::Instant); /// println!("{}", elapsed.as_secs()); /// } /// Err(e) => { -/// // an error occurred! -/// println!("Error: {e:?}"); +/// // the system clock went backwards! +/// println!("Great Scott! {e:?}"); /// } /// } /// } @@ -245,6 +245,7 @@ pub struct Instant(time::Instant); /// > structure cannot represent the new point in time. /// /// [`add`]: SystemTime::add +/// [`UNIX_EPOCH`]: SystemTime::UNIX_EPOCH #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[stable(feature = "time2", since = "1.8.0")] pub struct SystemTime(time::SystemTime); diff --git a/library/std/tests/floats/f128.rs b/library/std/tests/floats/f128.rs index b4a6c672bf05f..8b13d6e65587a 100644 --- a/library/std/tests/floats/f128.rs +++ b/library/std/tests/floats/f128.rs @@ -1,9 +1,12 @@ // FIXME(f16_f128): only tested on platforms that have symbols and aren't buggy -#![cfg(reliable_f128)] +#![cfg(not(bootstrap))] +#![cfg(target_has_reliable_f128)] use std::f128::consts; use std::num::FpCategory as Fp; -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] use std::ops::Rem; use std::ops::{Add, Div, Mul, Sub}; @@ -19,7 +22,9 @@ const TOL: f128 = 1e-12; /// Tolerances for math that is allowed to be imprecise, usually due to multiple chained /// operations. -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] const TOL_IMPR: f128 = 1e-10; /// Smallest number @@ -66,8 +71,13 @@ fn test_num_f128() { assert_eq!(ten.div(two), ten / two); } +// FIXME(f16_f128,miri): many of these have to be disabled since miri does not yet support +// the intrinsics. + #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_num_f128_rem() { let ten = 10f128; let two = 2f128; @@ -75,28 +85,36 @@ fn test_num_f128_rem() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_min_nan() { assert_eq!(f128::NAN.min(2.0), 2.0); assert_eq!(2.0f128.min(f128::NAN), 2.0); } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_max_nan() { assert_eq!(f128::NAN.max(2.0), 2.0); assert_eq!(2.0f128.max(f128::NAN), 2.0); } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_minimum() { assert!(f128::NAN.minimum(2.0).is_nan()); assert!(2.0f128.minimum(f128::NAN).is_nan()); } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_maximum() { assert!(f128::NAN.maximum(2.0).is_nan()); assert!(2.0f128.maximum(f128::NAN).is_nan()); @@ -112,6 +130,8 @@ fn test_nan() { assert!(!nan.is_sign_negative()); assert!(!nan.is_normal()); assert_eq!(Fp::Nan, nan.classify()); + // Ensure the quiet bit is set. + assert!(nan.to_bits() & (1 << (f128::MANTISSA_DIGITS - 2)) != 0); } #[test] @@ -251,7 +271,9 @@ fn test_classify() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_floor() { assert_approx_eq!(1.0f128.floor(), 1.0f128, TOL_PRECISE); assert_approx_eq!(1.3f128.floor(), 1.0f128, TOL_PRECISE); @@ -266,7 +288,9 @@ fn test_floor() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_ceil() { assert_approx_eq!(1.0f128.ceil(), 1.0f128, TOL_PRECISE); assert_approx_eq!(1.3f128.ceil(), 2.0f128, TOL_PRECISE); @@ -281,7 +305,9 @@ fn test_ceil() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_round() { assert_approx_eq!(2.5f128.round(), 3.0f128, TOL_PRECISE); assert_approx_eq!(1.0f128.round(), 1.0f128, TOL_PRECISE); @@ -297,7 +323,9 @@ fn test_round() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_round_ties_even() { assert_approx_eq!(2.5f128.round_ties_even(), 2.0f128, TOL_PRECISE); assert_approx_eq!(1.0f128.round_ties_even(), 1.0f128, TOL_PRECISE); @@ -313,7 +341,9 @@ fn test_round_ties_even() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_trunc() { assert_approx_eq!(1.0f128.trunc(), 1.0f128, TOL_PRECISE); assert_approx_eq!(1.3f128.trunc(), 1.0f128, TOL_PRECISE); @@ -328,7 +358,9 @@ fn test_trunc() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_fract() { assert_approx_eq!(1.0f128.fract(), 0.0f128, TOL_PRECISE); assert_approx_eq!(1.3f128.fract(), 0.3f128, TOL_PRECISE); @@ -343,7 +375,9 @@ fn test_fract() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_abs() { assert_eq!(f128::INFINITY.abs(), f128::INFINITY); assert_eq!(1f128.abs(), 1f128); @@ -443,7 +477,9 @@ fn test_next_down() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_mul_add() { let nan: f128 = f128::NAN; let inf: f128 = f128::INFINITY; @@ -460,7 +496,9 @@ fn test_mul_add() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_recip() { let nan: f128 = f128::NAN; let inf: f128 = f128::INFINITY; @@ -482,7 +520,9 @@ fn test_recip() { // Many math functions allow for less accurate results, so the next tolerance up is used #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_powi() { let nan: f128 = f128::NAN; let inf: f128 = f128::INFINITY; @@ -497,7 +537,9 @@ fn test_powi() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_powf() { let nan: f128 = f128::NAN; let inf: f128 = f128::INFINITY; @@ -514,7 +556,9 @@ fn test_powf() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_sqrt_domain() { assert!(f128::NAN.sqrt().is_nan()); assert!(f128::NEG_INFINITY.sqrt().is_nan()); @@ -526,7 +570,9 @@ fn test_sqrt_domain() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_exp() { assert_eq!(1.0, 0.0f128.exp()); assert_approx_eq!(consts::E, 1.0f128.exp(), TOL); @@ -541,7 +587,9 @@ fn test_exp() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_exp2() { assert_eq!(32.0, 5.0f128.exp2()); assert_eq!(1.0, 0.0f128.exp2()); @@ -555,7 +603,9 @@ fn test_exp2() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_ln() { let nan: f128 = f128::NAN; let inf: f128 = f128::INFINITY; @@ -571,7 +621,9 @@ fn test_ln() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_log() { let nan: f128 = f128::NAN; let inf: f128 = f128::INFINITY; @@ -590,7 +642,9 @@ fn test_log() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_log2() { let nan: f128 = f128::NAN; let inf: f128 = f128::INFINITY; @@ -607,7 +661,9 @@ fn test_log2() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_log10() { let nan: f128 = f128::NAN; let inf: f128 = f128::INFINITY; @@ -657,7 +713,9 @@ fn test_to_radians() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_asinh() { // Lower accuracy results are allowed, use increased tolerances assert_eq!(0.0f128.asinh(), 0.0f128); @@ -688,7 +746,9 @@ fn test_asinh() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_acosh() { assert_eq!(1.0f128.acosh(), 0.0f128); assert!(0.999f128.acosh().is_nan()); @@ -707,7 +767,9 @@ fn test_acosh() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_atanh() { assert_eq!(0.0f128.atanh(), 0.0f128); assert_eq!((-0.0f128).atanh(), -0.0f128); @@ -727,7 +789,9 @@ fn test_atanh() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_gamma() { // precision can differ among platforms assert_approx_eq!(1.0f128.gamma(), 1.0f128, TOL_IMPR); @@ -748,7 +812,9 @@ fn test_gamma() { } #[test] -#[cfg(reliable_f128_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f128_math)] fn test_ln_gamma() { assert_approx_eq!(1.0f128.ln_gamma().0, 0.0f128, TOL_IMPR); assert_eq!(1.0f128.ln_gamma().1, 1); @@ -779,7 +845,9 @@ fn test_real_consts() { assert_approx_eq!(frac_1_pi, 1f128 / pi, TOL_PRECISE); assert_approx_eq!(frac_2_pi, 2f128 / pi, TOL_PRECISE); - #[cfg(reliable_f128_math)] + #[cfg(not(miri))] + #[cfg(not(bootstrap))] + #[cfg(target_has_reliable_f128_math)] { let frac_2_sqrtpi: f128 = consts::FRAC_2_SQRT_PI; let sqrt2: f128 = consts::SQRT_2; @@ -984,6 +1052,25 @@ fn test_total_cmp() { assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&s_nan())); } +#[test] +fn test_algebraic() { + let a: f128 = 123.0; + let b: f128 = 456.0; + + // Check that individual operations match their primitive counterparts. + // + // This is a check of current implementations and does NOT imply any form of + // guarantee about future behavior. The compiler reserves the right to make + // these operations inexact matches in the future. + let eps = if cfg!(miri) { 1e-6 } else { 0.0 }; + + assert_approx_eq!(a.algebraic_add(b), a + b, eps); + assert_approx_eq!(a.algebraic_sub(b), a - b, eps); + assert_approx_eq!(a.algebraic_mul(b), a * b, eps); + assert_approx_eq!(a.algebraic_div(b), a / b, eps); + assert_approx_eq!(a.algebraic_rem(b), a % b, eps); +} + #[test] fn test_from() { assert_eq!(f128::from(false), 0.0); diff --git a/library/std/tests/floats/f16.rs b/library/std/tests/floats/f16.rs index ca0b8efbe83ba..8b3b344dd467b 100644 --- a/library/std/tests/floats/f16.rs +++ b/library/std/tests/floats/f16.rs @@ -1,5 +1,6 @@ // FIXME(f16_f128): only tested on platforms that have symbols and aren't buggy -#![cfg(reliable_f16)] +#![cfg(not(bootstrap))] +#![cfg(target_has_reliable_f16)] use std::f16::consts; use std::num::FpCategory as Fp; @@ -57,29 +58,40 @@ fn test_num_f16() { crate::test_num(10f16, 2f16); } +// FIXME(f16_f128,miri): many of these have to be disabled since miri does not yet support +// the intrinsics. + #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_min_nan() { assert_eq!(f16::NAN.min(2.0), 2.0); assert_eq!(2.0f16.min(f16::NAN), 2.0); } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_max_nan() { assert_eq!(f16::NAN.max(2.0), 2.0); assert_eq!(2.0f16.max(f16::NAN), 2.0); } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_minimum() { assert!(f16::NAN.minimum(2.0).is_nan()); assert!(2.0f16.minimum(f16::NAN).is_nan()); } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_maximum() { assert!(f16::NAN.maximum(2.0).is_nan()); assert!(2.0f16.maximum(f16::NAN).is_nan()); @@ -95,6 +107,8 @@ fn test_nan() { assert!(!nan.is_sign_negative()); assert!(!nan.is_normal()); assert_eq!(Fp::Nan, nan.classify()); + // Ensure the quiet bit is set. + assert!(nan.to_bits() & (1 << (f16::MANTISSA_DIGITS - 2)) != 0); } #[test] @@ -234,7 +248,9 @@ fn test_classify() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_floor() { assert_approx_eq!(1.0f16.floor(), 1.0f16, TOL_0); assert_approx_eq!(1.3f16.floor(), 1.0f16, TOL_0); @@ -249,7 +265,9 @@ fn test_floor() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_ceil() { assert_approx_eq!(1.0f16.ceil(), 1.0f16, TOL_0); assert_approx_eq!(1.3f16.ceil(), 2.0f16, TOL_0); @@ -264,7 +282,9 @@ fn test_ceil() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_round() { assert_approx_eq!(2.5f16.round(), 3.0f16, TOL_0); assert_approx_eq!(1.0f16.round(), 1.0f16, TOL_0); @@ -280,7 +300,9 @@ fn test_round() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_round_ties_even() { assert_approx_eq!(2.5f16.round_ties_even(), 2.0f16, TOL_0); assert_approx_eq!(1.0f16.round_ties_even(), 1.0f16, TOL_0); @@ -296,7 +318,9 @@ fn test_round_ties_even() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_trunc() { assert_approx_eq!(1.0f16.trunc(), 1.0f16, TOL_0); assert_approx_eq!(1.3f16.trunc(), 1.0f16, TOL_0); @@ -311,7 +335,9 @@ fn test_trunc() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_fract() { assert_approx_eq!(1.0f16.fract(), 0.0f16, TOL_0); assert_approx_eq!(1.3f16.fract(), 0.3f16, TOL_0); @@ -326,7 +352,9 @@ fn test_fract() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_abs() { assert_eq!(f16::INFINITY.abs(), f16::INFINITY); assert_eq!(1f16.abs(), 1f16); @@ -426,7 +454,9 @@ fn test_next_down() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_mul_add() { let nan: f16 = f16::NAN; let inf: f16 = f16::INFINITY; @@ -443,7 +473,9 @@ fn test_mul_add() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_recip() { let nan: f16 = f16::NAN; let inf: f16 = f16::INFINITY; @@ -459,7 +491,9 @@ fn test_recip() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_powi() { let nan: f16 = f16::NAN; let inf: f16 = f16::INFINITY; @@ -474,7 +508,9 @@ fn test_powi() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_powf() { let nan: f16 = f16::NAN; let inf: f16 = f16::INFINITY; @@ -491,7 +527,9 @@ fn test_powf() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_sqrt_domain() { assert!(f16::NAN.sqrt().is_nan()); assert!(f16::NEG_INFINITY.sqrt().is_nan()); @@ -503,7 +541,9 @@ fn test_sqrt_domain() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_exp() { assert_eq!(1.0, 0.0f16.exp()); assert_approx_eq!(2.718282, 1.0f16.exp(), TOL_0); @@ -518,7 +558,9 @@ fn test_exp() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_exp2() { assert_eq!(32.0, 5.0f16.exp2()); assert_eq!(1.0, 0.0f16.exp2()); @@ -532,7 +574,9 @@ fn test_exp2() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_ln() { let nan: f16 = f16::NAN; let inf: f16 = f16::INFINITY; @@ -548,7 +592,9 @@ fn test_ln() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_log() { let nan: f16 = f16::NAN; let inf: f16 = f16::INFINITY; @@ -567,7 +613,9 @@ fn test_log() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_log2() { let nan: f16 = f16::NAN; let inf: f16 = f16::INFINITY; @@ -584,7 +632,9 @@ fn test_log2() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_log10() { let nan: f16 = f16::NAN; let inf: f16 = f16::INFINITY; @@ -632,7 +682,9 @@ fn test_to_radians() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_asinh() { assert_eq!(0.0f16.asinh(), 0.0f16); assert_eq!((-0.0f16).asinh(), -0.0f16); @@ -657,7 +709,9 @@ fn test_asinh() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_acosh() { assert_eq!(1.0f16.acosh(), 0.0f16); assert!(0.999f16.acosh().is_nan()); @@ -676,7 +730,9 @@ fn test_acosh() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_atanh() { assert_eq!(0.0f16.atanh(), 0.0f16); assert_eq!((-0.0f16).atanh(), -0.0f16); @@ -696,7 +752,9 @@ fn test_atanh() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_gamma() { // precision can differ among platforms assert_approx_eq!(1.0f16.gamma(), 1.0f16, TOL_0); @@ -717,7 +775,9 @@ fn test_gamma() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_ln_gamma() { assert_approx_eq!(1.0f16.ln_gamma().0, 0.0f16, TOL_0); assert_eq!(1.0f16.ln_gamma().1, 1); @@ -750,7 +810,9 @@ fn test_real_consts() { assert_approx_eq!(frac_1_pi, 1f16 / pi, TOL_0); assert_approx_eq!(frac_2_pi, 2f16 / pi, TOL_0); - #[cfg(reliable_f16_math)] + #[cfg(not(miri))] + #[cfg(not(bootstrap))] + #[cfg(target_has_reliable_f16_math)] { let frac_2_sqrtpi: f16 = consts::FRAC_2_SQRT_PI; let sqrt2: f16 = consts::SQRT_2; @@ -811,7 +873,9 @@ fn test_clamp_max_is_nan() { } #[test] -#[cfg(reliable_f16_math)] +#[cfg(not(miri))] +#[cfg(not(bootstrap))] +#[cfg(target_has_reliable_f16_math)] fn test_total_cmp() { use core::cmp::Ordering; @@ -954,6 +1018,27 @@ fn test_total_cmp() { assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&s_nan())); } +#[test] +fn test_algebraic() { + let a: f16 = 123.0; + let b: f16 = 456.0; + + // Check that individual operations match their primitive counterparts. + // + // This is a check of current implementations and does NOT imply any form of + // guarantee about future behavior. The compiler reserves the right to make + // these operations inexact matches in the future. + let eps_add = if cfg!(miri) { 1e1 } else { 0.0 }; + let eps_mul = if cfg!(miri) { 1e3 } else { 0.0 }; + let eps_div = if cfg!(miri) { 1e0 } else { 0.0 }; + + assert_approx_eq!(a.algebraic_add(b), a + b, eps_add); + assert_approx_eq!(a.algebraic_sub(b), a - b, eps_add); + assert_approx_eq!(a.algebraic_mul(b), a * b, eps_mul); + assert_approx_eq!(a.algebraic_div(b), a / b, eps_div); + assert_approx_eq!(a.algebraic_rem(b), a % b, eps_div); +} + #[test] fn test_from() { assert_eq!(f16::from(false), 0.0); diff --git a/library/std/tests/floats/f32.rs b/library/std/tests/floats/f32.rs index bf7641986ada8..9af23afc5bbfc 100644 --- a/library/std/tests/floats/f32.rs +++ b/library/std/tests/floats/f32.rs @@ -72,6 +72,8 @@ fn test_nan() { assert!(nan.is_sign_positive()); assert!(!nan.is_sign_negative()); assert_eq!(Fp::Nan, nan.classify()); + // Ensure the quiet bit is set. + assert!(nan.to_bits() & (1 << (f32::MANTISSA_DIGITS - 2)) != 0); } #[test] @@ -915,3 +917,24 @@ fn test_total_cmp() { assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&f32::INFINITY)); assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&s_nan())); } + +#[test] +fn test_algebraic() { + let a: f32 = 123.0; + let b: f32 = 456.0; + + // Check that individual operations match their primitive counterparts. + // + // This is a check of current implementations and does NOT imply any form of + // guarantee about future behavior. The compiler reserves the right to make + // these operations inexact matches in the future. + let eps_add = if cfg!(miri) { 1e-3 } else { 0.0 }; + let eps_mul = if cfg!(miri) { 1e-1 } else { 0.0 }; + let eps_div = if cfg!(miri) { 1e-4 } else { 0.0 }; + + assert_approx_eq!(a.algebraic_add(b), a + b, eps_add); + assert_approx_eq!(a.algebraic_sub(b), a - b, eps_add); + assert_approx_eq!(a.algebraic_mul(b), a * b, eps_mul); + assert_approx_eq!(a.algebraic_div(b), a / b, eps_div); + assert_approx_eq!(a.algebraic_rem(b), a % b, eps_div); +} diff --git a/library/std/tests/floats/f64.rs b/library/std/tests/floats/f64.rs index cbbfcd15efd26..de9c27eb33d39 100644 --- a/library/std/tests/floats/f64.rs +++ b/library/std/tests/floats/f64.rs @@ -60,6 +60,8 @@ fn test_nan() { assert!(nan.is_sign_positive()); assert!(!nan.is_sign_negative()); assert_eq!(Fp::Nan, nan.classify()); + // Ensure the quiet bit is set. + assert!(nan.to_bits() & (1 << (f64::MANTISSA_DIGITS - 2)) != 0); } #[test] @@ -894,3 +896,22 @@ fn test_total_cmp() { assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&f64::INFINITY)); assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&s_nan())); } + +#[test] +fn test_algebraic() { + let a: f64 = 123.0; + let b: f64 = 456.0; + + // Check that individual operations match their primitive counterparts. + // + // This is a check of current implementations and does NOT imply any form of + // guarantee about future behavior. The compiler reserves the right to make + // these operations inexact matches in the future. + let eps = if cfg!(miri) { 1e-6 } else { 0.0 }; + + assert_approx_eq!(a.algebraic_add(b), a + b, eps); + assert_approx_eq!(a.algebraic_sub(b), a - b, eps); + assert_approx_eq!(a.algebraic_mul(b), a * b, eps); + assert_approx_eq!(a.algebraic_div(b), a / b, eps); + assert_approx_eq!(a.algebraic_rem(b), a % b, eps); +} diff --git a/library/std/tests/floats/lib.rs b/library/std/tests/floats/lib.rs index ad82f1a44e711..7884fc9239e20 100644 --- a/library/std/tests/floats/lib.rs +++ b/library/std/tests/floats/lib.rs @@ -1,4 +1,6 @@ -#![feature(f16, f128, float_gamma, float_minimum_maximum)] +#![feature(f16, f128, float_algebraic, float_gamma, float_minimum_maximum)] +#![cfg_attr(not(bootstrap), feature(cfg_target_has_reliable_f16_f128))] +#![cfg_attr(not(bootstrap), expect(internal_features))] // for reliable_f16_f128 use std::fmt; use std::ops::{Add, Div, Mul, Rem, Sub}; @@ -10,7 +12,7 @@ macro_rules! assert_approx_eq { let (a, b) = (&$a, &$b); let diff = (*a - *b).abs(); assert!( - diff < $lim, + diff <= $lim, "{a:?} is not approximately equal to {b:?} (threshold {lim:?}, difference {diff:?})", lim = $lim ); diff --git a/library/std/tests/sync/mpmc.rs b/library/std/tests/sync/mpmc.rs index 81b92297f76a3..78abcb3bcbe1d 100644 --- a/library/std/tests/sync/mpmc.rs +++ b/library/std/tests/sync/mpmc.rs @@ -63,6 +63,24 @@ fn smoke_port_gone() { assert!(tx.send(1).is_err()); } +#[test] +fn smoke_receiver_clone() { + let (tx, rx) = channel::(); + let rx2 = rx.clone(); + drop(rx); + tx.send(1).unwrap(); + assert_eq!(rx2.recv().unwrap(), 1); +} + +#[test] +fn smoke_receiver_clone_port_gone() { + let (tx, rx) = channel::(); + let rx2 = rx.clone(); + drop(rx); + drop(rx2); + assert!(tx.send(1).is_err()); +} + #[test] fn smoke_shared_port_gone() { let (tx, rx) = channel::(); @@ -124,6 +142,18 @@ fn chan_gone_concurrent() { while rx.recv().is_ok() {} } +#[test] +fn receiver_cloning() { + let (tx, rx) = channel::(); + let rx2 = rx.clone(); + + tx.send(1).unwrap(); + tx.send(2).unwrap(); + + assert_eq!(rx2.recv(), Ok(1)); + assert_eq!(rx.recv(), Ok(2)); +} + #[test] fn stress() { let count = if cfg!(miri) { 100 } else { 10000 }; diff --git a/library/std/tests/sync/mutex.rs b/library/std/tests/sync/mutex.rs index 88fb448d1ebf6..ac82914d6de46 100644 --- a/library/std/tests/sync/mutex.rs +++ b/library/std/tests/sync/mutex.rs @@ -409,13 +409,13 @@ fn panic_while_mapping_unlocked_poison() { let _ = panic::catch_unwind(|| { let guard = lock.lock().unwrap(); - let _guard = MutexGuard::try_map::<(), _>(guard, |_| panic!()); + let _guard = MutexGuard::filter_map::<(), _>(guard, |_| panic!()); }); match lock.try_lock() { - Ok(_) => panic!("panicking in a MutexGuard::try_map closure should poison the Mutex"), + Ok(_) => panic!("panicking in a MutexGuard::filter_map closure should poison the Mutex"), Err(TryLockError::WouldBlock) => { - panic!("panicking in a MutexGuard::try_map closure should unlock the mutex") + panic!("panicking in a MutexGuard::filter_map closure should unlock the mutex") } Err(TryLockError::Poisoned(_)) => {} } @@ -437,13 +437,15 @@ fn panic_while_mapping_unlocked_poison() { let _ = panic::catch_unwind(|| { let guard = lock.lock().unwrap(); let guard = MutexGuard::map::<(), _>(guard, |val| val); - let _guard = MappedMutexGuard::try_map::<(), _>(guard, |_| panic!()); + let _guard = MappedMutexGuard::filter_map::<(), _>(guard, |_| panic!()); }); match lock.try_lock() { - Ok(_) => panic!("panicking in a MappedMutexGuard::try_map closure should poison the Mutex"), + Ok(_) => { + panic!("panicking in a MappedMutexGuard::filter_map closure should poison the Mutex") + } Err(TryLockError::WouldBlock) => { - panic!("panicking in a MappedMutexGuard::try_map closure should unlock the mutex") + panic!("panicking in a MappedMutexGuard::filter_map closure should unlock the mutex") } Err(TryLockError::Poisoned(_)) => {} } diff --git a/library/std/tests/sync/rwlock.rs b/library/std/tests/sync/rwlock.rs index d2c784aefcf61..1d55a1769483a 100644 --- a/library/std/tests/sync/rwlock.rs +++ b/library/std/tests/sync/rwlock.rs @@ -517,16 +517,20 @@ fn panic_while_mapping_read_unlocked_no_poison() { let _ = panic::catch_unwind(|| { let guard = lock.read().unwrap(); - let _guard = RwLockReadGuard::try_map::<(), _>(guard, |_| panic!()); + let _guard = RwLockReadGuard::filter_map::<(), _>(guard, |_| panic!()); }); match lock.try_write() { Ok(_) => {} Err(TryLockError::WouldBlock) => { - panic!("panicking in a RwLockReadGuard::try_map closure should release the read lock") + panic!( + "panicking in a RwLockReadGuard::filter_map closure should release the read lock" + ) } Err(TryLockError::Poisoned(_)) => { - panic!("panicking in a RwLockReadGuard::try_map closure should not poison the RwLock") + panic!( + "panicking in a RwLockReadGuard::filter_map closure should not poison the RwLock" + ) } } @@ -549,16 +553,16 @@ fn panic_while_mapping_read_unlocked_no_poison() { let _ = panic::catch_unwind(|| { let guard = lock.read().unwrap(); let guard = RwLockReadGuard::map::<(), _>(guard, |val| val); - let _guard = MappedRwLockReadGuard::try_map::<(), _>(guard, |_| panic!()); + let _guard = MappedRwLockReadGuard::filter_map::<(), _>(guard, |_| panic!()); }); match lock.try_write() { Ok(_) => {} Err(TryLockError::WouldBlock) => panic!( - "panicking in a MappedRwLockReadGuard::try_map closure should release the read lock" + "panicking in a MappedRwLockReadGuard::filter_map closure should release the read lock" ), Err(TryLockError::Poisoned(_)) => panic!( - "panicking in a MappedRwLockReadGuard::try_map closure should not poison the RwLock" + "panicking in a MappedRwLockReadGuard::filter_map closure should not poison the RwLock" ), } @@ -585,15 +589,17 @@ fn panic_while_mapping_write_unlocked_poison() { let _ = panic::catch_unwind(|| { let guard = lock.write().unwrap(); - let _guard = RwLockWriteGuard::try_map::<(), _>(guard, |_| panic!()); + let _guard = RwLockWriteGuard::filter_map::<(), _>(guard, |_| panic!()); }); match lock.try_write() { Ok(_) => { - panic!("panicking in a RwLockWriteGuard::try_map closure should poison the RwLock") + panic!("panicking in a RwLockWriteGuard::filter_map closure should poison the RwLock") } Err(TryLockError::WouldBlock) => { - panic!("panicking in a RwLockWriteGuard::try_map closure should release the write lock") + panic!( + "panicking in a RwLockWriteGuard::filter_map closure should release the write lock" + ) } Err(TryLockError::Poisoned(_)) => {} } @@ -617,15 +623,15 @@ fn panic_while_mapping_write_unlocked_poison() { let _ = panic::catch_unwind(|| { let guard = lock.write().unwrap(); let guard = RwLockWriteGuard::map::<(), _>(guard, |val| val); - let _guard = MappedRwLockWriteGuard::try_map::<(), _>(guard, |_| panic!()); + let _guard = MappedRwLockWriteGuard::filter_map::<(), _>(guard, |_| panic!()); }); match lock.try_write() { Ok(_) => panic!( - "panicking in a MappedRwLockWriteGuard::try_map closure should poison the RwLock" + "panicking in a MappedRwLockWriteGuard::filter_map closure should poison the RwLock" ), Err(TryLockError::WouldBlock) => panic!( - "panicking in a MappedRwLockWriteGuard::try_map closure should release the write lock" + "panicking in a MappedRwLockWriteGuard::filter_map closure should release the write lock" ), Err(TryLockError::Poisoned(_)) => {} } diff --git a/library/std/tests/thread_local/lib.rs b/library/std/tests/thread_local/lib.rs index c52914354253c..26af5f1eb0a9d 100644 --- a/library/std/tests/thread_local/lib.rs +++ b/library/std/tests/thread_local/lib.rs @@ -1,3 +1,5 @@ +#![feature(cfg_target_thread_local)] + #[cfg(not(any(target_os = "emscripten", target_os = "wasi")))] mod tests; diff --git a/library/std/tests/thread_local/tests.rs b/library/std/tests/thread_local/tests.rs index aa020c2559cc5..e8278361d9337 100644 --- a/library/std/tests/thread_local/tests.rs +++ b/library/std/tests/thread_local/tests.rs @@ -1,7 +1,7 @@ use std::cell::{Cell, UnsafeCell}; use std::sync::atomic::{AtomicU8, Ordering}; use std::sync::{Arc, Condvar, Mutex}; -use std::thread::{self, Builder, LocalKey}; +use std::thread::{self, LocalKey}; use std::thread_local; #[derive(Clone, Default)] @@ -345,8 +345,27 @@ fn join_orders_after_tls_destructors() { } // Test that thread::current is still available in TLS destructors. +// +// The test won't currently work without target_thread_local, aka with slow tls. +// The runtime tries very hard to drop last the TLS variable that keeps the information about the +// current thread, by using several tricks like deffering the drop to a later round of TLS destruction. +// However, this only seems to work with fast tls. +// +// With slow TLS, it seems that multiple libc implementations will just set the value to null the first +// time they encounter it, regardless of it having a destructor or not. This means that trying to +// retrieve it later in a drop impl of another TLS variable will not work. +// +// ** Apple libc: https://github.com/apple-oss-distributions/libpthread/blob/c032e0b076700a0a47db75528a282b8d3a06531a/src/pthread_tsd.c#L293 +// Sets the variable to null if it has a destructor and the value is not null. However, all variables +// created with pthread_key_create are marked as having a destructor, even if the fn ptr called with +// it is null. +// ** glibc: https://github.com/bminor/glibc/blob/e5893e6349541d871e8a25120bca014551d13ff5/nptl/nptl_deallocate_tsd.c#L59 +// ** musl: https://github.com/kraj/musl/blob/1880359b54ff7dd9f5016002bfdae4b136007dde/src/thread/pthread_key_create.c#L87 +#[cfg(target_thread_local)] #[test] fn thread_current_in_dtor() { + use std::thread::Builder; + // Go through one round of TLS destruction first. struct Defer; impl Drop for Defer { diff --git a/library/stdarch b/library/stdarch index 9426bb56586c6..4666c7376f25a 160000 --- a/library/stdarch +++ b/library/stdarch @@ -1 +1 @@ -Subproject commit 9426bb56586c6ae4095a2dcbd66c570253e6fb32 +Subproject commit 4666c7376f25a265c74535585d622da3da6dfeb1 diff --git a/library/sysroot/Cargo.toml b/library/sysroot/Cargo.toml index ec6ae31507e05..c149d513c32b4 100644 --- a/library/sysroot/Cargo.toml +++ b/library/sysroot/Cargo.toml @@ -31,5 +31,4 @@ panic_immediate_abort = ["std/panic_immediate_abort"] profiler = ["dep:profiler_builtins"] std_detect_file_io = ["std/std_detect_file_io"] std_detect_dlsym_getauxval = ["std/std_detect_dlsym_getauxval"] -std_detect_env_override = ["std/std_detect_env_override"] windows_raw_dylib = ["std/windows_raw_dylib"] diff --git a/library/test/src/cli.rs b/library/test/src/cli.rs index ef6786f431670..8840714a66238 100644 --- a/library/test/src/cli.rs +++ b/library/test/src/cli.rs @@ -61,7 +61,7 @@ fn optgroups() -> getopts::Options { .optopt("", "logfile", "Write logs to the specified file (deprecated)", "PATH") .optflag( "", - "nocapture", + "no-capture", "don't capture stdout/stderr of each \ task, allow printing directly", ) @@ -172,7 +172,7 @@ tests in the same order again. Note that --shuffle and --shuffle-seed do not affect whether the tests are run in parallel. All tests have their standard output and standard error captured by default. -This can be overridden with the --nocapture flag or setting RUST_TEST_NOCAPTURE +This can be overridden with the --no-capture flag or setting RUST_TEST_NOCAPTURE environment variable to a value other than "0". Logging is not captured by default. Test Attributes: @@ -199,7 +199,10 @@ Test Attributes: /// otherwise creates a `TestOpts` object and returns it. pub fn parse_opts(args: &[String]) -> Option { // Parse matches. - let opts = optgroups(); + let mut opts = optgroups(); + // Flags hidden from `usage` + opts.optflag("", "nocapture", "Deprecated, use `--no-capture`"); + let binary = args.first().map(|c| &**c).unwrap_or("..."); let args = args.get(1..).unwrap_or(args); let matches = match opts.parse(args) { @@ -210,7 +213,7 @@ pub fn parse_opts(args: &[String]) -> Option { // Check if help was requested. if matches.opt_present("h") { // Show help and do nothing more. - usage(binary, &opts); + usage(binary, &optgroups()); return None; } @@ -447,7 +450,7 @@ fn get_color_config(matches: &getopts::Matches) -> OptPartRes { } fn get_nocapture(matches: &getopts::Matches) -> OptPartRes { - let mut nocapture = matches.opt_present("nocapture"); + let mut nocapture = matches.opt_present("nocapture") || matches.opt_present("no-capture"); if !nocapture { nocapture = match env::var("RUST_TEST_NOCAPTURE") { Ok(val) => &val != "0", diff --git a/library/test/src/lib.rs b/library/test/src/lib.rs index 7ada3f269a002..7f56d1e362698 100644 --- a/library/test/src/lib.rs +++ b/library/test/src/lib.rs @@ -98,6 +98,15 @@ const SECONDARY_TEST_BENCH_BENCHMARKS_VAR: &str = "__RUST_TEST_BENCH_BENCHMARKS" // The default console test runner. It accepts the command line // arguments and a vector of test_descs. pub fn test_main(args: &[String], tests: Vec, options: Option) { + test_main_with_exit_callback(args, tests, options, || {}) +} + +pub fn test_main_with_exit_callback( + args: &[String], + tests: Vec, + options: Option, + exit_callback: F, +) { let mut opts = match cli::parse_opts(args) { Some(Ok(o)) => o, Some(Err(msg)) => { @@ -151,6 +160,7 @@ pub fn test_main(args: &[String], tests: Vec, options: Option {} Ok(false) => process::exit(ERROR_EXIT_CODE), @@ -666,10 +676,11 @@ fn run_test_in_process( io::set_output_capture(None); - let test_result = match result { - Ok(()) => calc_result(&desc, Ok(()), time_opts.as_ref(), exec_time.as_ref()), - Err(e) => calc_result(&desc, Err(e.as_ref()), time_opts.as_ref(), exec_time.as_ref()), - }; + // Determine whether the test passed or failed, by comparing its panic + // payload (if any) with its `ShouldPanic` value, and by checking for + // fatal timeout. + let test_result = + calc_result(&desc, result.err().as_deref(), time_opts.as_ref(), exec_time.as_ref()); let stdout = data.lock().unwrap_or_else(|e| e.into_inner()).to_vec(); let message = CompletedTest::new(id, desc, test_result, exec_time, stdout); monitor_ch.send(message).unwrap(); @@ -741,10 +752,7 @@ fn spawn_test_subprocess( fn run_test_in_spawned_subprocess(desc: TestDesc, runnable_test: RunnableTest) -> ! { let builtin_panic_hook = panic::take_hook(); let record_result = Arc::new(move |panic_info: Option<&'_ PanicHookInfo<'_>>| { - let test_result = match panic_info { - Some(info) => calc_result(&desc, Err(info.payload()), None, None), - None => calc_result(&desc, Ok(()), None, None), - }; + let test_result = calc_result(&desc, panic_info.map(|info| info.payload()), None, None); // We don't support serializing TrFailedMsg, so just // print the message out to stderr. diff --git a/library/test/src/test_result.rs b/library/test/src/test_result.rs index 73dcc2e2a0cca..4cb43fc45fd6c 100644 --- a/library/test/src/test_result.rs +++ b/library/test/src/test_result.rs @@ -39,15 +39,18 @@ pub enum TestResult { /// Creates a `TestResult` depending on the raw result of test execution /// and associated data. -pub(crate) fn calc_result<'a>( +pub(crate) fn calc_result( desc: &TestDesc, - task_result: Result<(), &'a (dyn Any + 'static + Send)>, + panic_payload: Option<&(dyn Any + Send)>, time_opts: Option<&time::TestTimeOptions>, exec_time: Option<&time::TestExecTime>, ) -> TestResult { - let result = match (&desc.should_panic, task_result) { - (&ShouldPanic::No, Ok(())) | (&ShouldPanic::Yes, Err(_)) => TestResult::TrOk, - (&ShouldPanic::YesWithMessage(msg), Err(err)) => { + let result = match (desc.should_panic, panic_payload) { + // The test did or didn't panic, as expected. + (ShouldPanic::No, None) | (ShouldPanic::Yes, Some(_)) => TestResult::TrOk, + + // Check the actual panic message against the expected message. + (ShouldPanic::YesWithMessage(msg), Some(err)) => { let maybe_panic_str = err .downcast_ref::() .map(|e| &**e) @@ -58,23 +61,31 @@ pub(crate) fn calc_result<'a>( } else if let Some(panic_str) = maybe_panic_str { TestResult::TrFailedMsg(format!( r#"panic did not contain expected string - panic message: `{panic_str:?}`, - expected substring: `{msg:?}`"# + panic message: {panic_str:?} + expected substring: {msg:?}"# )) } else { TestResult::TrFailedMsg(format!( r#"expected panic with string value, found non-string value: `{:?}` - expected substring: `{:?}`"#, - (*err).type_id(), - msg + expected substring: {msg:?}"#, + (*err).type_id() )) } } - (&ShouldPanic::Yes, Ok(())) | (&ShouldPanic::YesWithMessage(_), Ok(())) => { - TestResult::TrFailedMsg("test did not panic as expected".to_string()) + + // The test should have panicked, but didn't panic. + (ShouldPanic::Yes, None) | (ShouldPanic::YesWithMessage(_), None) => { + let fn_location = if !desc.source_file.is_empty() { + &format!(" at {}:{}:{}", desc.source_file, desc.start_line, desc.start_col) + } else { + "" + }; + TestResult::TrFailedMsg(format!("test did not panic as expected{}", fn_location)) } - _ => TestResult::TrFailed, + + // The test should not have panicked, but did panic. + (ShouldPanic::No, Some(_)) => TestResult::TrFailed, }; // If test is already failed (or allowed to fail), do not change the result. diff --git a/library/test/src/tests.rs b/library/test/src/tests.rs index 47f581fefae1f..d986bd74f772b 100644 --- a/library/test/src/tests.rs +++ b/library/test/src/tests.rs @@ -200,8 +200,8 @@ fn test_should_panic_bad_message() { } let expected = "foobar"; let failed_msg = r#"panic did not contain expected string - panic message: `"an error message"`, - expected substring: `"foobar"`"#; + panic message: "an error message" + expected substring: "foobar""#; let desc = TestDescAndFn { desc: TestDesc { name: StaticTestName("whatever"), @@ -238,7 +238,7 @@ fn test_should_panic_non_string_message_type() { let failed_msg = format!( r#"expected panic with string value, found non-string value: `{:?}` - expected substring: `"foobar"`"#, + expected substring: "foobar""#, TypeId::of::() ); let desc = TestDescAndFn { diff --git a/library/unwind/Cargo.toml b/library/unwind/Cargo.toml index da60924c2b419..df43e6ae80fb0 100644 --- a/library/unwind/Cargo.toml +++ b/library/unwind/Cargo.toml @@ -22,7 +22,7 @@ cfg-if = "1.0" libc = { version = "0.2.140", features = ['rustc-dep-of-std'], default-features = false } [target.'cfg(target_os = "xous")'.dependencies] -unwinding = { version = "0.2.5", features = ['rustc-dep-of-std', 'unwinder', 'fde-custom'], default-features = false } +unwinding = { version = "0.2.6", features = ['rustc-dep-of-std', 'unwinder', 'fde-custom'], default-features = false } [features] diff --git a/library/windows_targets/src/lib.rs b/library/windows_targets/src/lib.rs index c7d158584ebd8..bce54c5ffcef0 100644 --- a/library/windows_targets/src/lib.rs +++ b/library/windows_targets/src/lib.rs @@ -34,6 +34,7 @@ pub macro link { } #[cfg(not(feature = "windows_raw_dylib"))] +#[cfg(not(target_os = "cygwin"))] // Cygwin doesn't need these libs #[cfg_attr(target_vendor = "win7", link(name = "advapi32"))] #[link(name = "ntdll")] #[link(name = "userenv")] diff --git a/rustfmt.toml b/rustfmt.toml index 8feeb60ca12c2..d9857a7e3e788 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -19,6 +19,7 @@ ignore = [ "/tests/debuginfo/", # These tests are somewhat sensitive to source code layout. "/tests/incremental/", # These tests are somewhat sensitive to source code layout. "/tests/pretty/", # These tests are very sensitive to source code layout. + "/tests/run-make/export", # These tests contain syntax errors. "/tests/run-make/translation/test.rs", # This test contains syntax errors. "/tests/rustdoc/", # Some have syntax errors, some are whitespace-sensitive. "/tests/rustdoc-gui/", # Some tests are sensitive to source code layout. @@ -49,9 +50,12 @@ ignore = [ # These are ignored by a standard cargo fmt run. "compiler/rustc_codegen_cranelift/scripts", - "compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs", # uses edition 2024 "compiler/rustc_codegen_gcc/tests", # Code automatically generated and included. "compiler/rustc_codegen_gcc/src/intrinsic/archs.rs", "compiler/rustc_codegen_gcc/example", + + # Rustfmt doesn't support use closures yet + "tests/mir-opt/ergonomic-clones/closure.rs", + "tests/codegen/ergonomic-clones/closure.rs", ] diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock index 17ee4d610f958..cdad3bd46fab6 100644 --- a/src/bootstrap/Cargo.lock +++ b/src/bootstrap/Cargo.lock @@ -56,6 +56,7 @@ dependencies = [ "sha2", "sysinfo", "tar", + "tempfile", "termcolor", "toml", "tracing", @@ -63,7 +64,7 @@ dependencies = [ "tracing-subscriber", "tracing-tree", "walkdir", - "windows", + "windows 0.57.0", "xz2", ] @@ -157,12 +158,6 @@ dependencies = [ "cc", ] -[[package]] -name = "core-foundation-sys" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" - [[package]] name = "cpufeatures" version = "0.2.15" @@ -225,14 +220,20 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + [[package]] name = "fd-lock" version = "4.0.2" @@ -240,7 +241,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e5768da2206272c81ef0b5e951a41862938a6070da63bcea197899942d3b947" dependencies = [ "cfg-if", - "rustix", + "rustix 0.38.40", "windows-sys 0.52.0", ] @@ -266,6 +267,18 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasi", +] + [[package]] name = "globset" version = "0.4.15" @@ -334,9 +347,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.167" +version = "0.2.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc" +checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" [[package]] name = "libredox" @@ -355,6 +368,12 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "linux-raw-sys" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" + [[package]] name = "log" version = "0.4.22" @@ -415,6 +434,25 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "objc2-core-foundation" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166" +dependencies = [ + "bitflags", +] + +[[package]] +name = "objc2-io-kit" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71c1c64d6120e51cd86033f67176b1cb66780c2efe34dec55176f77befd93c0a" +dependencies = [ + "libc", + "objc2-core-foundation", +] + [[package]] name = "object" version = "0.36.5" @@ -486,6 +524,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "redox_syscall" version = "0.5.7" @@ -548,10 +592,23 @@ dependencies = [ "bitflags", "errno", "libc", - "linux-raw-sys", + "linux-raw-sys 0.4.14", "windows-sys 0.52.0", ] +[[package]] +name = "rustix" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7178faa4b75a30e269c71e61c353ce2748cf3d76f0c44c393f4e60abf49b825" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys 0.9.3", + "windows-sys 0.59.0", +] + [[package]] name = "ryu" version = "1.0.18" @@ -656,15 +713,16 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.33.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "948512566b1895f93b1592c7574baeb2de842f224f2aab158799ecadb8ebbb46" +checksum = "b897c8ea620e181c7955369a31be5f48d9a9121cb59fd33ecef9ff2a34323422" dependencies = [ - "core-foundation-sys", "libc", "memchr", "ntapi", - "windows", + "objc2-core-foundation", + "objc2-io-kit", + "windows 0.61.1", ] [[package]] @@ -678,6 +736,19 @@ dependencies = [ "xattr", ] +[[package]] +name = "tempfile" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488960f40a3fd53d72c2a29a58722561dee8afdd175bd88e3db4677d7b2ba600" +dependencies = [ + "fastrand", + "getrandom", + "once_cell", + "rustix 1.0.2", + "windows-sys 0.59.0", +] + [[package]] name = "termcolor" version = "1.4.1" @@ -824,6 +895,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "wasi" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + [[package]] name = "winapi" version = "0.3.9" @@ -861,22 +941,67 @@ version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" dependencies = [ - "windows-core", + "windows-core 0.57.0", "windows-targets", ] +[[package]] +name = "windows" +version = "0.61.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419" +dependencies = [ + "windows-collections", + "windows-core 0.61.0", + "windows-future", + "windows-link", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" +dependencies = [ + "windows-core 0.61.0", +] + [[package]] name = "windows-core" version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" dependencies = [ - "windows-implement", - "windows-interface", - "windows-result", + "windows-implement 0.57.0", + "windows-interface 0.57.0", + "windows-result 0.1.2", "windows-targets", ] +[[package]] +name = "windows-core" +version = "0.61.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" +dependencies = [ + "windows-implement 0.60.0", + "windows-interface 0.59.1", + "windows-link", + "windows-result 0.3.2", + "windows-strings", +] + +[[package]] +name = "windows-future" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32" +dependencies = [ + "windows-core 0.61.0", + "windows-link", +] + [[package]] name = "windows-implement" version = "0.57.0" @@ -888,6 +1013,17 @@ dependencies = [ "syn", ] +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "windows-interface" version = "0.57.0" @@ -899,6 +1035,33 @@ dependencies = [ "syn", ] +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" + +[[package]] +name = "windows-numerics" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" +dependencies = [ + "windows-core 0.61.0", + "windows-link", +] + [[package]] name = "windows-result" version = "0.1.2" @@ -908,6 +1071,24 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "windows-result" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +dependencies = [ + "windows-link", +] + [[package]] name = "windows-sys" version = "0.52.0" @@ -990,6 +1171,15 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags", +] + [[package]] name = "xattr" version = "1.3.1" @@ -997,8 +1187,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" dependencies = [ "libc", - "linux-raw-sys", - "rustix", + "linux-raw-sys 0.4.14", + "rustix 0.38.40", ] [[package]] diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml index 23aa87a74075b..e34de924cc18a 100644 --- a/src/bootstrap/Cargo.toml +++ b/src/bootstrap/Cargo.toml @@ -58,7 +58,7 @@ walkdir = "2.4" xz2 = "0.1" # Dependencies needed by the build-metrics feature -sysinfo = { version = "0.33.0", default-features = false, optional = true, features = ["system"] } +sysinfo = { version = "0.35.0", default-features = false, optional = true, features = ["system"] } # Dependencies needed by the `tracing` feature tracing = { version = "0.1", optional = true, features = ["attributes"] } @@ -83,6 +83,7 @@ features = [ [dev-dependencies] pretty_assertions = "1.4" +tempfile = "3.15.0" # We care a lot about bootstrap's compile times, so don't include debuginfo for # dependencies, only bootstrap itself. diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 68400ba0ea029..42ad14a81d029 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -1162,6 +1162,30 @@ def build_triple(self): config = self.get_toml("build") return config or default_build_triple(self.verbose) + def is_git_repository(self, repo_path): + return os.path.isdir(os.path.join(repo_path, ".git")) + + def get_latest_commit(self): + repo_path = self.rust_root + author_email = self.stage0_data.get("git_merge_commit_email") + if not self.is_git_repository(repo_path): + return "" + cmd = [ + "git", + "-C", + repo_path, + "rev-list", + "--author", + author_email, + "-n1", + "HEAD", + ] + try: + commit = subprocess.check_output(cmd, universal_newlines=True).strip() + return commit or "" + except subprocess.CalledProcessError: + return "" + def check_vendored_status(self): """Check that vendoring is configured properly""" # keep this consistent with the equivalent check in bootstrap: @@ -1174,7 +1198,8 @@ def check_vendored_status(self): eprint(" use vendored sources by default.") cargo_dir = os.path.join(self.rust_root, ".cargo") - url = "https://ci-artifacts.rust-lang.org/rustc-builds//rustc-nightly-src.tar.xz" + commit = self.get_latest_commit() + url = f"https://ci-artifacts.rust-lang.org/rustc-builds/{commit}/rustc-nightly-src.tar.xz" if self.use_vendored_sources: vendor_dir = os.path.join(self.rust_root, "vendor") if not os.path.exists(vendor_dir): @@ -1306,7 +1331,7 @@ def bootstrap(args): build.check_vendored_status() if not os.path.exists(build.build_dir): - os.makedirs(build.build_dir) + os.makedirs(os.path.realpath(build.build_dir)) # Fetch/build the bootstrap build.download_toolchain() diff --git a/src/bootstrap/defaults/bootstrap.dist.toml b/src/bootstrap/defaults/bootstrap.dist.toml index 7b381b416ca8a..f0cb34eb45856 100644 --- a/src/bootstrap/defaults/bootstrap.dist.toml +++ b/src/bootstrap/defaults/bootstrap.dist.toml @@ -7,6 +7,8 @@ test-stage = 2 doc-stage = 2 # When compiling from source, you usually want all tools. extended = true +# Use libtest built from the source tree instead of the precompiled one from stage 0. +compiletest-use-stage0-libtest = false # Most users installing from source want to build all parts of the project from source. [llvm] @@ -15,7 +17,7 @@ download-ci-llvm = false [rust] # We have several defaults in bootstrap that depend on whether the channel is `dev` (e.g. `omit-git-hash` and `download-ci-llvm`). # Make sure they don't get set when installing from source. -channel = "nightly" +channel = "auto-detect" # Never download a rustc, distributions must build a fresh compiler. download-rustc = false lld = true diff --git a/src/bootstrap/download-ci-llvm-stamp b/src/bootstrap/download-ci-llvm-stamp index e157ff233bbf7..b70d452b427c8 100644 --- a/src/bootstrap/download-ci-llvm-stamp +++ b/src/bootstrap/download-ci-llvm-stamp @@ -1,4 +1,4 @@ Change this file to make users of the `download-ci-llvm` configuration download a new version of LLVM from CI, even if the LLVM submodule hasn’t changed. -Last change is for: https://github.com/rust-lang/rust/pull/138784 +Last change is for: https://github.com/rust-lang/rust/pull/139931 diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in index 6cb0b19d7632a..08a288170fa30 100644 --- a/src/bootstrap/mk/Makefile.in +++ b/src/bootstrap/mk/Makefile.in @@ -73,12 +73,12 @@ check-aux: $(BOOTSTRAP) miri --stage 2 library/std \ $(BOOTSTRAP_ARGS) \ --no-doc -- \ - --skip fs:: --skip net:: --skip process:: --skip sys::pal:: + --skip fs:: --skip net:: --skip process:: --skip sys::fd:: --skip sys::pal:: $(Q)MIRIFLAGS="-Zmiri-disable-isolation" \ $(BOOTSTRAP) miri --stage 2 library/std \ $(BOOTSTRAP_ARGS) \ --doc -- \ - --skip fs:: --skip net:: --skip process:: --skip sys::pal:: + --skip fs:: --skip net:: --skip process:: --skip sys::fd:: --skip sys::pal:: # Also test some very target-specific modules on other targets # (making sure to cover an i686 target as well). $(Q)MIRIFLAGS="-Zmiri-disable-isolation" BOOTSTRAP_SKIP_TARGET_SANITY=1 \ @@ -120,10 +120,6 @@ ci-msvc: ci-msvc-py ci-msvc-ps1 # Set of tests that should represent half of the time of the test suite. # Used to split tests across multiple CI runners. # Test both x and bootstrap entrypoints. -ci-mingw-x-1: - $(Q)$(CFG_SRC_DIR)/x test --stage 2 $(SKIP_COMPILER) $(TEST_SET2) -ci-mingw-x-2: - $(Q)$(CFG_SRC_DIR)/x test --stage 2 $(SKIP_SRC) $(TEST_SET2) ci-mingw-x: $(Q)$(CFG_SRC_DIR)/x test --stage 2 $(TEST_SET1) ci-mingw-bootstrap: diff --git a/src/bootstrap/src/bin/main.rs b/src/bootstrap/src/bin/main.rs index cbfe00a757ce4..833f80279517a 100644 --- a/src/bootstrap/src/bin/main.rs +++ b/src/bootstrap/src/bin/main.rs @@ -163,7 +163,7 @@ fn check_version(config: &Config) -> Option { msg.push_str("WARNING: The `change-id` is missing in the `bootstrap.toml`. This means that you will not be able to track the major changes made to the bootstrap configurations.\n"); msg.push_str("NOTE: to silence this warning, "); msg.push_str(&format!( - "add `change-id = {latest_change_id}` or change-id = \"ignore\" at the top of `bootstrap.toml`" + "add `change-id = {latest_change_id}` or `change-id = \"ignore\"` at the top of `bootstrap.toml`" )); return Some(msg); } @@ -195,7 +195,7 @@ fn check_version(config: &Config) -> Option { msg.push_str("NOTE: to silence this warning, "); msg.push_str(&format!( - "update `bootstrap.toml` to use `change-id = {latest_change_id}` or change-id = \"ignore\" instead" + "update `bootstrap.toml` to use `change-id = {latest_change_id}` or `change-id = \"ignore\"` instead" )); if io::stdout().is_terminal() { diff --git a/src/bootstrap/src/core/build_steps/check.rs b/src/bootstrap/src/core/build_steps/check.rs index e67bc62a60352..fa848c492b4d2 100644 --- a/src/bootstrap/src/core/build_steps/check.rs +++ b/src/bootstrap/src/core/build_steps/check.rs @@ -3,7 +3,7 @@ use crate::core::build_steps::compile::{ add_to_sysroot, run_cargo, rustc_cargo, rustc_cargo_env, std_cargo, std_crates_for_run_make, }; -use crate::core::build_steps::tool::{SourceType, prepare_tool_cargo}; +use crate::core::build_steps::tool::{COMPILETEST_ALLOW_FEATURES, SourceType, prepare_tool_cargo}; use crate::core::builder::{ self, Alias, Builder, Kind, RunConfig, ShouldRun, Step, crate_description, }; @@ -369,6 +369,69 @@ impl Step for RustAnalyzer { } } +/// Compiletest is implicitly "checked" when it gets built in order to run tests, +/// so this is mainly for people working on compiletest to run locally. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Compiletest { + pub target: TargetSelection, +} + +impl Step for Compiletest { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = false; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/compiletest") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Compiletest { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + let mode = if builder.config.compiletest_use_stage0_libtest { + Mode::ToolBootstrap + } else { + Mode::ToolStd + }; + + let compiler = builder.compiler( + if mode == Mode::ToolBootstrap { 0 } else { builder.top_stage }, + builder.config.build, + ); + + if mode != Mode::ToolBootstrap { + builder.ensure(Rustc::new(self.target, builder)); + } + + let mut cargo = prepare_tool_cargo( + builder, + compiler, + mode, + self.target, + builder.kind, + "src/tools/compiletest", + SourceType::InTree, + &[], + ); + + cargo.allow_features(COMPILETEST_ALLOW_FEATURES); + + // For ./x.py clippy, don't run with --all-targets because + // linting tests and benchmarks can produce very noisy results + if builder.kind != Kind::Clippy { + cargo.arg("--all-targets"); + } + + let stamp = BuildStamp::new(&builder.cargo_out(compiler, mode, self.target)) + .with_prefix("compiletest-check"); + + let _guard = builder.msg_check("compiletest artifacts", self.target); + run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false); + } +} + macro_rules! tool_check_step { ( $name:ident { @@ -465,6 +528,69 @@ tool_check_step!(Bootstrap { path: "src/bootstrap", default: false }); // check to make it easier to work on. tool_check_step!(RunMakeSupport { path: "src/tools/run-make-support", default: false }); -// Compiletest is implicitly "checked" when it gets built in order to run tests, -// so this is mainly for people working on compiletest to run locally. -tool_check_step!(Compiletest { path: "src/tools/compiletest", default: false }); +/// Check step for the `coverage-dump` bootstrap tool. The coverage-dump tool +/// is used internally by coverage tests. +/// +/// FIXME(Zalathar): This is temporarily separate from the other tool check +/// steps so that it can use the stage 0 compiler instead of `top_stage`, +/// without introducing conflicts with the stage 0 redesign (#119899). +/// +/// After the stage 0 redesign lands, we can look into using the stage 0 +/// compiler to check all bootstrap tools (#139170). +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub(crate) struct CoverageDump; + +impl CoverageDump { + const PATH: &str = "src/tools/coverage-dump"; +} + +impl Step for CoverageDump { + type Output = (); + + /// Most contributors won't care about coverage-dump, so don't make their + /// check builds slower unless they opt in and check it explicitly. + const DEFAULT: bool = false; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path(Self::PATH) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Self {}); + } + + fn run(self, builder: &Builder<'_>) -> Self::Output { + // Make sure we haven't forgotten any fields, if there are any. + let Self {} = self; + let display_name = "coverage-dump"; + let host = builder.config.build; + let target = host; + let mode = Mode::ToolBootstrap; + + let compiler = builder.compiler(0, host); + let cargo = prepare_tool_cargo( + builder, + compiler, + mode, + target, + builder.kind, + Self::PATH, + SourceType::InTree, + &[], + ); + + let stamp = BuildStamp::new(&builder.cargo_out(compiler, mode, target)) + .with_prefix(&format!("{display_name}-check")); + + let _guard = builder.msg_tool( + builder.kind, + mode, + display_name, + compiler.stage, + &compiler.host, + &target, + ); + run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false); + } +} diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index 18b5d4426b1ee..2e5865e509695 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -111,14 +111,10 @@ impl Step for Std { // the `rust.download-rustc=true` option. let force_recompile = builder.rust_info().is_managed_git_subrepository() && builder.download_rustc() - && builder.config.last_modified_commit(&["library"], "download-rustc", true).is_none(); + && builder.config.has_changes_from_upstream(&["library"]); trace!("is managed git repo: {}", builder.rust_info().is_managed_git_subrepository()); trace!("download_rustc: {}", builder.download_rustc()); - trace!( - "last modified commit: {:?}", - builder.config.last_modified_commit(&["library"], "download-rustc", true) - ); trace!(force_recompile); run.builder.ensure(Std { @@ -155,7 +151,7 @@ impl Step for Std { // When using `download-rustc`, we already have artifacts for the host available. Don't // recompile them. - if builder.download_rustc() && builder.is_builder_target(target) + if builder.download_rustc() && builder.config.is_host_target(target) // NOTE: the beta compiler may generate different artifacts than the downloaded compiler, so // its artifacts can't be reused. && compiler.stage != 0 @@ -229,7 +225,7 @@ impl Step for Std { // The LLD wrappers and `rust-lld` are self-contained linking components that can be // necessary to link the stdlib on some targets. We'll also need to copy these binaries to // the `stage0-sysroot` to ensure the linker is found when bootstrapping on such a target. - if compiler.stage == 0 && builder.is_builder_target(compiler.host) { + if compiler.stage == 0 && builder.config.is_host_target(compiler.host) { trace!( "(build == host) copying linking components to `stage0-sysroot` for bootstrapping" ); @@ -1194,8 +1190,7 @@ pub fn rustc_cargo( let enzyme_dir = builder.build.out.join(arch).join("enzyme").join("lib"); cargo.rustflag("-L").rustflag(enzyme_dir.to_str().expect("Invalid path")); - if !builder.config.dry_run() { - let llvm_config = builder.llvm_config(builder.config.build).unwrap(); + if let Some(llvm_config) = builder.llvm_config(builder.config.build) { let llvm_version_major = llvm::get_llvm_version_major(builder, &llvm_config); cargo.rustflag("-l").rustflag(&format!("Enzyme-{llvm_version_major}")); } @@ -1374,7 +1369,7 @@ pub fn rustc_cargo_env( /// Pass down configuration from the LLVM build into the build of /// rustc_llvm and rustc_codegen_llvm. fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection) { - if builder.is_rust_llvm(target) { + if builder.config.is_rust_llvm(target) { cargo.env("LLVM_RUSTLLVM", "1"); } if builder.config.llvm_enzyme { @@ -2182,7 +2177,7 @@ impl Step for Assemble { debug!("copying codegen backends to sysroot"); copy_codegen_backends_to_sysroot(builder, build_compiler, target_compiler); - if builder.config.lld_enabled { + if builder.config.lld_enabled && !builder.config.is_system_llvm(target_compiler.host) { builder.ensure(crate::core::build_steps::tool::LldWrapper { build_compiler, target_compiler, @@ -2398,7 +2393,9 @@ pub fn run_cargo( // Ok now we need to actually find all the files listed in `toplevel`. We've // got a list of prefix/extensions and we basically just need to find the // most recent file in the `deps` folder corresponding to each one. - let contents = t!(target_deps_dir.read_dir()) + let contents = target_deps_dir + .read_dir() + .unwrap_or_else(|e| panic!("Couldn't read {}: {}", target_deps_dir.display(), e)) .map(|e| t!(e)) .map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata()))) .collect::>(); @@ -2530,7 +2527,9 @@ pub fn strip_debug(builder: &Builder<'_>, target: TargetSelection, path: &Path) // FIXME: to make things simpler for now, limit this to the host and target where we know // `strip -g` is both available and will fix the issue, i.e. on a x64 linux host that is not // cross-compiling. Expand this to other appropriate targets in the future. - if target != "x86_64-unknown-linux-gnu" || !builder.is_builder_target(target) || !path.exists() + if target != "x86_64-unknown-linux-gnu" + || !builder.config.is_host_target(target) + || !path.exists() { return; } diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index 83f71aeed7204..3c412683b9492 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -421,13 +421,13 @@ impl Step for Rustc { builder.install(&rustdoc, &image.join("bin"), FileType::Executable); } + let ra_proc_macro_srv_compiler = + builder.compiler_for(compiler.stage, builder.config.build, compiler.host); + builder.ensure(compile::Rustc::new(ra_proc_macro_srv_compiler, compiler.host)); + if let Some(ra_proc_macro_srv) = builder.ensure_if_default( tool::RustAnalyzerProcMacroSrv { - compiler: builder.compiler_for( - compiler.stage, - builder.config.build, - compiler.host, - ), + compiler: ra_proc_macro_srv_compiler, target: compiler.host, }, builder.kind, @@ -612,7 +612,7 @@ impl Step for DebuggerScripts { fn skip_host_target_lib(builder: &Builder<'_>, compiler: Compiler) -> bool { // The only true set of target libraries came from the build triple, so // let's reduce redundant work by only producing archives from that host. - if !builder.is_builder_target(compiler.host) { + if !builder.config.is_host_target(compiler.host) { builder.info("\tskipping, not a build host"); true } else { @@ -671,7 +671,8 @@ fn copy_target_libs( &self_contained_dst.join(path.file_name().unwrap()), FileType::NativeLibrary, ); - } else if dependency_type == DependencyType::Target || builder.is_builder_target(target) { + } else if dependency_type == DependencyType::Target || builder.config.is_host_target(target) + { builder.copy_link(&path, &dst.join(path.file_name().unwrap()), FileType::NativeLibrary); } } @@ -824,7 +825,7 @@ impl Step for Analysis { fn run(self, builder: &Builder<'_>) -> Option { let compiler = self.compiler; let target = self.target; - if !builder.is_builder_target(compiler.host) { + if !builder.config.is_host_target(compiler.host) { return None; } @@ -1177,6 +1178,8 @@ impl Step for Cargo { let compiler = self.compiler; let target = self.target; + builder.ensure(compile::Rustc::new(compiler, target)); + let cargo = builder.ensure(tool::Cargo { compiler, target }); let src = builder.src.join("src/tools/cargo"); let etc = src.join("src/etc"); @@ -1231,6 +1234,8 @@ impl Step for RustAnalyzer { let compiler = self.compiler; let target = self.target; + builder.ensure(compile::Rustc::new(compiler, target)); + let rust_analyzer = builder.ensure(tool::RustAnalyzer { compiler, target }); let mut tarball = Tarball::new(builder, "rust-analyzer", &target.triple); @@ -1273,6 +1278,8 @@ impl Step for Clippy { let compiler = self.compiler; let target = self.target; + builder.ensure(compile::Rustc::new(compiler, target)); + // Prepare the image directory // We expect clippy to build, because we've exited this step above if tool // state for clippy isn't testing. @@ -1323,9 +1330,12 @@ impl Step for Miri { if !builder.build.unstable_features() { return None; } + let compiler = self.compiler; let target = self.target; + builder.ensure(compile::Rustc::new(compiler, target)); + let miri = builder.ensure(tool::Miri { compiler, target }); let cargomiri = builder.ensure(tool::CargoMiri { compiler, target }); @@ -1462,6 +1472,8 @@ impl Step for Rustfmt { let compiler = self.compiler; let target = self.target; + builder.ensure(compile::Rustc::new(compiler, target)); + let rustfmt = builder.ensure(tool::Rustfmt { compiler, target }); let cargofmt = builder.ensure(tool::Cargofmt { compiler, target }); let mut tarball = Tarball::new(builder, "rustfmt", &target.triple); @@ -2118,7 +2130,7 @@ fn maybe_install_llvm( // // If the LLVM is coming from ourselves (just from CI) though, we // still want to install it, as it otherwise won't be available. - if builder.is_system_llvm(target) { + if builder.config.is_system_llvm(target) { trace!("system LLVM requested, no install"); return false; } @@ -2327,6 +2339,8 @@ impl Step for LlvmBitcodeLinker { let compiler = self.compiler; let target = self.target; + builder.ensure(compile::Rustc::new(compiler, target)); + let llbc_linker = builder.ensure(tool::LlvmBitcodeLinker { compiler, target, extra_features: vec![] }); diff --git a/src/bootstrap/src/core/build_steps/format.rs b/src/bootstrap/src/core/build_steps/format.rs index 7aa5cb2b6e5e1..93900a9043e7e 100644 --- a/src/bootstrap/src/core/build_steps/format.rs +++ b/src/bootstrap/src/core/build_steps/format.rs @@ -9,7 +9,7 @@ use std::sync::mpsc::SyncSender; use build_helper::git::get_git_modified_files; use ignore::WalkBuilder; -use crate::core::builder::Builder; +use crate::core::builder::{Builder, Kind}; use crate::utils::build_stamp::BuildStamp; use crate::utils::exec::command; use crate::utils::helpers::{self, t}; @@ -31,7 +31,7 @@ fn rustfmt( // Avoid the submodule config paths from coming into play. We only allow a single global config // for the workspace for now. cmd.arg("--config-path").arg(src.canonicalize().unwrap()); - cmd.arg("--edition").arg("2021"); + cmd.arg("--edition").arg("2024"); cmd.arg("--unstable-features"); cmd.arg("--skip-children"); if check { @@ -81,14 +81,19 @@ fn update_rustfmt_version(build: &Builder<'_>) { let Some((version, stamp_file)) = get_rustfmt_version(build) else { return; }; - t!(std::fs::write(stamp_file.path(), version)) + + t!(stamp_file.add_stamp(version).write()); } -/// Returns the Rust files modified between the `merge-base` of HEAD and -/// rust-lang/master and what is now on the disk. Does not include removed files. +/// Returns the Rust files modified between the last merge commit and what is now on the disk. +/// Does not include removed files. /// /// Returns `None` if all files should be formatted. fn get_modified_rs_files(build: &Builder<'_>) -> Result>, String> { + // In CI `get_git_modified_files` returns something different to normal environment. + // This shouldn't be called in CI anyway. + assert!(!build.config.is_running_on_ci); + if !verify_rustfmt_version(build) { return Ok(None); } @@ -103,7 +108,7 @@ struct RustfmtConfig { // Prints output describing a collection of paths, with lines such as "formatted modified file // foo/bar/baz" or "skipped 20 untracked files". -fn print_paths(build: &Builder<'_>, verb: &str, adjective: Option<&str>, paths: &[String]) { +fn print_paths(verb: &str, adjective: Option<&str>, paths: &[String]) { let len = paths.len(); let adjective = if let Some(adjective) = adjective { format!("{adjective} ") } else { String::new() }; @@ -114,12 +119,15 @@ fn print_paths(build: &Builder<'_>, verb: &str, adjective: Option<&str>, paths: } else { println!("fmt: {verb} {len} {adjective}files"); } - if len > 1000 && !build.config.is_running_on_ci { - println!("hint: if this number seems too high, try running `git fetch origin master`"); - } } pub fn format(build: &Builder<'_>, check: bool, all: bool, paths: &[PathBuf]) { + if build.kind == Kind::Format && build.top_stage != 0 { + eprintln!("ERROR: `x fmt` only supports stage 0."); + eprintln!("HELP: Use `x run rustfmt` to run in-tree rustfmt."); + crate::exit!(1); + } + if !paths.is_empty() { eprintln!( "fmt error: path arguments are no longer accepted; use `--all` to format everything" @@ -189,7 +197,7 @@ pub fn format(build: &Builder<'_>, check: bool, all: bool, paths: &[PathBuf]) { ) .map(|x| x.to_string()) .collect(); - print_paths(build, "skipped", Some("untracked"), &untracked_paths); + print_paths("skipped", Some("untracked"), &untracked_paths); for untracked_path in untracked_paths { // The leading `/` makes it an exact match against the @@ -212,7 +220,13 @@ pub fn format(build: &Builder<'_>, check: bool, all: bool, paths: &[PathBuf]) { override_builder.add(&format!("/{file}")).expect(&file); } } - Ok(None) => {} + Ok(None) => { + // NOTE: `Ok(None)` signifies that we need to format all files. + // The tricky part here is that if `override_builder` isn't given any white + // list files (i.e. files to be formatted, added without leading `!`), it + // will instead look for *all* files. So, by doing nothing here, we are + // actually making it so we format all files. + } Err(err) => { eprintln!("fmt warning: Something went wrong running git commands:"); eprintln!("fmt warning: {err}"); @@ -318,7 +332,7 @@ pub fn format(build: &Builder<'_>, check: bool, all: bool, paths: &[PathBuf]) { }); let mut paths = formatted_paths.into_inner().unwrap(); paths.sort(); - print_paths(build, if check { "checked" } else { "formatted" }, adjective, &paths); + print_paths(if check { "checked" } else { "formatted" }, adjective, &paths); drop(tx); @@ -328,7 +342,10 @@ pub fn format(build: &Builder<'_>, check: bool, all: bool, paths: &[PathBuf]) { crate::exit!(1); } - if !check { - update_rustfmt_version(build); - } + // Update `build/.rustfmt-stamp`, allowing this code to ignore files which have not been changed + // since last merge. + // + // NOTE: Because of the exit above, this is only reachable if formatting / format checking + // succeeded. So we are not commiting the version if formatting was not good. + update_rustfmt_version(build); } diff --git a/src/bootstrap/src/core/build_steps/gcc.rs b/src/bootstrap/src/core/build_steps/gcc.rs index 48bb5cb8e8761..ee8bd8286daec 100644 --- a/src/bootstrap/src/core/build_steps/gcc.rs +++ b/src/bootstrap/src/core/build_steps/gcc.rs @@ -96,6 +96,8 @@ pub enum GccBuildStatus { /// Returns a path to the libgccjit.so file. #[cfg(not(test))] fn try_download_gcc(builder: &Builder<'_>, target: TargetSelection) -> Option { + use build_helper::git::PathFreshness; + // Try to download GCC from CI if configured and available if !matches!(builder.config.gcc_ci_mode, crate::core::config::GccCiMode::DownloadFromCi) { return None; @@ -104,18 +106,40 @@ fn try_download_gcc(builder: &Builder<'_>, target: TargetSelection) -> Option { + // Download from upstream CI + let root = ci_gcc_root(&builder.config); + let gcc_stamp = BuildStamp::new(&root).with_prefix("gcc").add_stamp(&upstream); + if !gcc_stamp.is_up_to_date() && !builder.config.dry_run() { + builder.config.download_ci_gcc(&upstream, &root); + t!(gcc_stamp.write()); + } + + let libgccjit = root.join("lib").join("libgccjit.so"); + create_lib_alias(builder, &libgccjit); + Some(libgccjit) + } + PathFreshness::HasLocalModifications { .. } => { + // We have local modifications, rebuild GCC. + eprintln!("Found local GCC modifications, GCC will *not* be downloaded"); + None + } + PathFreshness::MissingUpstream => { + eprintln!("error: could not find commit hash for downloading GCC"); + eprintln!("HELP: maybe your repository history is too shallow?"); + eprintln!("HELP: consider disabling `download-ci-gcc`"); + eprintln!("HELP: or fetch enough history to include one upstream commit"); + None + } } - - let libgccjit = root.join("lib").join("libgccjit.so"); - create_lib_alias(builder, &libgccjit); - Some(libgccjit) } #[cfg(test)] @@ -264,31 +288,16 @@ fn ci_gcc_root(config: &crate::Config) -> PathBuf { config.out.join(config.build).join("ci-gcc") } -/// This retrieves the GCC sha we *want* to use, according to git history. +/// Detect whether GCC sources have been modified locally or not. #[cfg(not(test))] -fn detect_gcc_sha(config: &crate::Config, is_git: bool) -> String { - use build_helper::git::get_closest_merge_commit; - - let gcc_sha = if is_git { - get_closest_merge_commit( - Some(&config.src), - &config.git_config(), - &["src/gcc", "src/bootstrap/download-ci-gcc-stamp"], - ) - .unwrap() +fn detect_gcc_freshness(config: &crate::Config, is_git: bool) -> build_helper::git::PathFreshness { + use build_helper::git::PathFreshness; + + if is_git { + config.check_path_modifications(&["src/gcc", "src/bootstrap/download-ci-gcc-stamp"]) } else if let Some(info) = crate::utils::channel::read_commit_info_file(&config.src) { - info.sha.trim().to_owned() + PathFreshness::LastModifiedUpstream { upstream: info.sha.trim().to_owned() } } else { - "".to_owned() - }; - - if gcc_sha.is_empty() { - eprintln!("error: could not find commit hash for downloading GCC"); - eprintln!("HELP: maybe your repository history is too shallow?"); - eprintln!("HELP: consider disabling `download-ci-gcc`"); - eprintln!("HELP: or fetch enough history to include one upstream commit"); - panic!(); + PathFreshness::MissingUpstream } - - gcc_sha } diff --git a/src/bootstrap/src/core/build_steps/llvm.rs b/src/bootstrap/src/core/build_steps/llvm.rs index e21804fa3c07f..86af956535e5e 100644 --- a/src/bootstrap/src/core/build_steps/llvm.rs +++ b/src/bootstrap/src/core/build_steps/llvm.rs @@ -14,7 +14,7 @@ use std::path::{Path, PathBuf}; use std::sync::OnceLock; use std::{env, fs}; -use build_helper::git::get_closest_merge_commit; +use build_helper::git::PathFreshness; #[cfg(feature = "tracing")] use tracing::instrument; @@ -181,26 +181,15 @@ pub const LLVM_INVALIDATION_PATHS: &[&str] = &[ "src/version", ]; -/// This retrieves the LLVM sha we *want* to use, according to git history. -pub(crate) fn detect_llvm_sha(config: &Config, is_git: bool) -> String { - let llvm_sha = if is_git { - get_closest_merge_commit(Some(&config.src), &config.git_config(), LLVM_INVALIDATION_PATHS) - .unwrap() +/// Detect whether LLVM sources have been modified locally or not. +pub(crate) fn detect_llvm_freshness(config: &Config, is_git: bool) -> PathFreshness { + if is_git { + config.check_path_modifications(LLVM_INVALIDATION_PATHS) } else if let Some(info) = crate::utils::channel::read_commit_info_file(&config.src) { - info.sha.trim().to_owned() + PathFreshness::LastModifiedUpstream { upstream: info.sha.trim().to_owned() } } else { - "".to_owned() - }; - - if llvm_sha.is_empty() { - eprintln!("error: could not find commit hash for downloading LLVM"); - eprintln!("HELP: maybe your repository history is too shallow?"); - eprintln!("HELP: consider disabling `download-ci-llvm`"); - eprintln!("HELP: or fetch enough history to include one upstream commit"); - panic!(); + PathFreshness::MissingUpstream } - - llvm_sha } /// Returns whether the CI-found LLVM is currently usable. @@ -346,8 +335,6 @@ impl Step for Llvm { .define("LLVM_INCLUDE_DOCS", "OFF") .define("LLVM_INCLUDE_BENCHMARKS", "OFF") .define("LLVM_INCLUDE_TESTS", enable_tests) - // FIXME: remove this when minimal llvm is 19 - .define("LLVM_ENABLE_TERMINFO", "OFF") .define("LLVM_ENABLE_LIBEDIT", "OFF") .define("LLVM_ENABLE_BINDINGS", "OFF") .define("LLVM_ENABLE_Z3_SOLVER", "OFF") @@ -372,8 +359,8 @@ impl Step for Llvm { cfg.define("LLVM_PROFDATA_FILE", path); } - // Libraries for ELF section compression. - if !target.is_windows() { + // Libraries for ELF section compression and profraw files merging. + if !target.is_msvc() { cfg.define("LLVM_ENABLE_ZLIB", "ON"); } else { cfg.define("LLVM_ENABLE_ZLIB", "OFF"); @@ -487,7 +474,7 @@ impl Step for Llvm { } // https://llvm.org/docs/HowToCrossCompileLLVM.html - if !builder.is_builder_target(target) { + if !builder.config.is_host_target(target) { let LlvmResult { llvm_config, .. } = builder.ensure(Llvm { target: builder.config.build }); if !builder.config.dry_run() { @@ -610,11 +597,11 @@ fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) { let version = get_llvm_version(builder, llvm_config); let mut parts = version.split('.').take(2).filter_map(|s| s.parse::().ok()); if let (Some(major), Some(_minor)) = (parts.next(), parts.next()) { - if major >= 18 { + if major >= 19 { return; } } - panic!("\n\nbad LLVM version: {version}, need >=18\n\n") + panic!("\n\nbad LLVM version: {version}, need >=19\n\n") } fn configure_cmake( @@ -639,7 +626,7 @@ fn configure_cmake( } cfg.target(&target.triple).host(&builder.config.build.triple); - if !builder.is_builder_target(target) { + if !builder.config.is_host_target(target) { cfg.define("CMAKE_CROSSCOMPILING", "True"); // NOTE: Ideally, we wouldn't have to do this, and `cmake-rs` would just handle it for us. @@ -1100,7 +1087,7 @@ impl Step for Lld { .define("LLVM_CMAKE_DIR", llvm_cmake_dir) .define("LLVM_INCLUDE_TESTS", "OFF"); - if !builder.is_builder_target(target) { + if !builder.config.is_host_target(target) { // Use the host llvm-tblgen binary. cfg.define( "LLVM_TABLEGEN_EXE", diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs index 5cacd5b991472..0bba441c3fa26 100644 --- a/src/bootstrap/src/core/build_steps/run.rs +++ b/src/bootstrap/src/core/build_steps/run.rs @@ -392,3 +392,84 @@ impl Step for CyclicStep { builder.ensure(CyclicStep { n: self.n.saturating_sub(1) }) } } + +/// Step to manually run the coverage-dump tool (`./x run coverage-dump`). +/// +/// The coverage-dump tool is an internal detail of coverage tests, so this run +/// step is only needed when testing coverage-dump manually. +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] +pub struct CoverageDump; + +impl Step for CoverageDump { + type Output = (); + + const DEFAULT: bool = false; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/coverage-dump") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Self {}); + } + + fn run(self, builder: &Builder<'_>) { + let mut cmd = builder.tool_cmd(Tool::CoverageDump); + cmd.args(&builder.config.free_args); + cmd.run(builder); + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Rustfmt; + +impl Step for Rustfmt { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/rustfmt") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Rustfmt); + } + + fn run(self, builder: &Builder<'_>) { + let host = builder.build.build; + + // `x run` uses stage 0 by default but rustfmt does not work well with stage 0. + // Change the stage to 1 if it's not set explicitly. + let stage = if builder.config.is_explicit_stage() || builder.top_stage >= 1 { + builder.top_stage + } else { + 1 + }; + + if stage == 0 { + eprintln!("rustfmt cannot be run at stage 0"); + eprintln!("HELP: Use `x fmt` to use stage 0 rustfmt."); + std::process::exit(1); + } + + let compiler = builder.compiler(stage, host); + let rustfmt_build = builder.ensure(tool::Rustfmt { compiler, target: host }); + + let mut rustfmt = tool::prepare_tool_cargo( + builder, + rustfmt_build.build_compiler, + Mode::ToolRustc, + host, + Kind::Run, + "src/tools/rustfmt", + SourceType::InTree, + &[], + ); + + rustfmt.args(["--bin", "rustfmt", "--"]); + rustfmt.args(builder.config.args()); + + rustfmt.into_cmd().run(builder); + } +} diff --git a/src/bootstrap/src/core/build_steps/setup.rs b/src/bootstrap/src/core/build_steps/setup.rs index 80d92135dd378..9d07babe5196b 100644 --- a/src/bootstrap/src/core/build_steps/setup.rs +++ b/src/bootstrap/src/core/build_steps/setup.rs @@ -584,6 +584,7 @@ Select which editor you would like to set up [default: None]: "; "51068d4747a13732440d1a8b8f432603badb1864fa431d83d0fd4f8fa57039e0", "d29af4d949bbe2371eac928a3c31cf9496b1701aa1c45f11cd6c759865ad5c45", "b5dd299b93dca3ceeb9b335f929293cb3d4bf4977866fbe7ceeac2a8a9f99088", + "631c837b0e98ae35fd48b0e5f743b1ca60adadf2d0a2b23566ba25df372cf1a9", ], EditorKind::Helix => &[ "2d3069b8cf1b977e5d4023965eb6199597755e6c96c185ed5f2854f98b83d233", @@ -602,10 +603,12 @@ Select which editor you would like to set up [default: None]: "; "4eecb58a2168b252077369da446c30ed0e658301efe69691979d1ef0443928f4", "c394386e6133bbf29ffd32c8af0bb3d4aac354cba9ee051f29612aa9350f8f8d", "e53e9129ca5ee5dcbd6ec8b68c2d87376474eb154992deba3c6d9ab1703e0717", + "f954316090936c7e590c253ca9d524008375882fa13c5b41d7e2547a896ff893", ], EditorKind::Zed => &[ "bbce727c269d1bd0c98afef4d612eb4ce27aea3c3a8968c5f10b31affbc40b6c", "a5380cf5dd9328731aecc5dfb240d16dac46ed272126b9728006151ef42f5909", + "2e96bf0d443852b12f016c8fc9840ab3d0a2b4fe0b0fb3a157e8d74d5e7e0e26", ], } } @@ -680,7 +683,7 @@ impl Step for Editor { match EditorKind::prompt_user() { Ok(editor_kind) => { if let Some(editor_kind) = editor_kind { - while !t!(create_editor_settings_maybe(config, editor_kind.clone())) {} + while !t!(create_editor_settings_maybe(config, &editor_kind)) {} } else { println!("Ok, skipping editor setup!"); } @@ -692,7 +695,7 @@ impl Step for Editor { /// Create the recommended editor LSP config file for rustc development, or just print it /// If this method should be re-called, it returns `false`. -fn create_editor_settings_maybe(config: &Config, editor: EditorKind) -> io::Result { +fn create_editor_settings_maybe(config: &Config, editor: &EditorKind) -> io::Result { let hashes = editor.hashes(); let (current_hash, historical_hashes) = hashes.split_last().unwrap(); let settings_path = editor.settings_path(config); @@ -749,7 +752,7 @@ fn create_editor_settings_maybe(config: &Config, editor: EditorKind) -> io::Resu // exists but user modified, back it up Some(false) => { // exists and is not current version or outdated, so back it up - let backup = settings_path.clone().with_extension(editor.backup_extension()); + let backup = settings_path.with_extension(editor.backup_extension()); eprintln!( "WARNING: copying `{}` to `{}`", settings_path.file_name().unwrap().to_str().unwrap(), diff --git a/src/bootstrap/src/core/build_steps/suggest.rs b/src/bootstrap/src/core/build_steps/suggest.rs index 6a6731cafc54a..fd4918961adba 100644 --- a/src/bootstrap/src/core/build_steps/suggest.rs +++ b/src/bootstrap/src/core/build_steps/suggest.rs @@ -13,7 +13,6 @@ pub fn suggest(builder: &Builder<'_>, run: bool) { let git_config = builder.config.git_config(); let suggestions = builder .tool_cmd(Tool::SuggestTests) - .env("SUGGEST_TESTS_GIT_REPOSITORY", git_config.git_repository) .env("SUGGEST_TESTS_NIGHTLY_BRANCH", git_config.nightly_branch) .env("SUGGEST_TESTS_MERGE_COMMIT_EMAIL", git_config.git_merge_commit_email) .run_capture_stdout(builder) diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 81f6b473c4593..29fb576f5740a 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -15,7 +15,7 @@ use crate::core::build_steps::doc::DocumentationFormat; use crate::core::build_steps::gcc::{Gcc, add_cg_gcc_cargo_flags}; use crate::core::build_steps::llvm::get_llvm_version; use crate::core::build_steps::synthetic_targets::MirOptPanicAbortSyntheticTarget; -use crate::core::build_steps::tool::{self, SourceType, Tool}; +use crate::core::build_steps::tool::{self, COMPILETEST_ALLOW_FEATURES, SourceType, Tool}; use crate::core::build_steps::toolstate::ToolState; use crate::core::build_steps::{compile, dist, llvm}; use crate::core::builder::{ @@ -54,6 +54,7 @@ impl Step for CrateBootstrap { run.path("src/tools/jsondoclint") .path("src/tools/suggest-tests") .path("src/tools/replace-version-placeholder") + .path("src/tools/coverage-dump") // We want `./x test tidy` to _run_ the tidy tool, not its tests. // So we need a separate alias to test the tidy tool itself. .alias("tidyselftest") @@ -261,13 +262,7 @@ impl Step for Cargotest { .args(builder.config.test_args()) .env("RUSTC", builder.rustc(compiler)) .env("RUSTDOC", builder.rustdoc(compiler)); - add_rustdoc_cargo_linker_args( - &mut cmd, - builder, - compiler.host, - LldThreads::No, - compiler.stage, - ); + add_rustdoc_cargo_linker_args(&mut cmd, builder, compiler.host, LldThreads::No); cmd.delay_failure().run(builder); } } @@ -727,7 +722,7 @@ impl Step for CompiletestTest { SourceType::InTree, &[], ); - cargo.allow_features("test"); + cargo.allow_features(COMPILETEST_ALLOW_FEATURES); run_cargo_test(cargo, &[], &[], "compiletest self test", host, builder); } } @@ -845,7 +840,7 @@ impl Step for RustdocTheme { .env("CFG_RELEASE_CHANNEL", &builder.config.channel) .env("RUSTDOC_REAL", builder.rustdoc(self.compiler)) .env("RUSTC_BOOTSTRAP", "1"); - cmd.args(linker_args(builder, self.compiler.host, LldThreads::No, self.compiler.stage)); + cmd.args(linker_args(builder, self.compiler.host, LldThreads::No)); cmd.delay_failure().run(builder); } @@ -1021,13 +1016,7 @@ impl Step for RustdocGUI { cmd.env("RUSTDOC", builder.rustdoc(self.compiler)) .env("RUSTC", builder.rustc(self.compiler)); - add_rustdoc_cargo_linker_args( - &mut cmd, - builder, - self.compiler.host, - LldThreads::No, - self.compiler.stage, - ); + add_rustdoc_cargo_linker_args(&mut cmd, builder, self.compiler.host, LldThreads::No); for path in &builder.paths { if let Some(p) = helpers::is_valid_test_suite_arg(path, "tests/rustdoc-gui", builder) { @@ -1624,9 +1613,6 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the builder.tool_exe(Tool::RunMakeSupport); } - // Also provide `rust_test_helpers` for the host. - builder.ensure(TestHelpers { target: compiler.host }); - // ensure that `libproc_macro` is available on the host. if suite == "mir-opt" { builder.ensure(compile::Std::new(compiler, compiler.host).is_for_mir_opt_tests(true)); @@ -1634,11 +1620,6 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the builder.ensure(compile::Std::new(compiler, compiler.host)); } - // As well as the target - if suite != "mir-opt" { - builder.ensure(TestHelpers { target }); - } - let mut cmd = builder.tool_cmd(Tool::Compiletest); if suite == "mir-opt" { @@ -1804,11 +1785,18 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the } let mut hostflags = flags.clone(); - hostflags.push(format!("-Lnative={}", builder.test_helpers_out(compiler.host).display())); - hostflags.extend(linker_flags(builder, compiler.host, LldThreads::No, compiler.stage)); + hostflags.extend(linker_flags(builder, compiler.host, LldThreads::No)); let mut targetflags = flags; - targetflags.push(format!("-Lnative={}", builder.test_helpers_out(target).display())); + + // Provide `rust_test_helpers` for both host and target. + if suite == "ui" || suite == "incremental" { + builder.ensure(TestHelpers { target: compiler.host }); + builder.ensure(TestHelpers { target }); + hostflags + .push(format!("-Lnative={}", builder.test_helpers_out(compiler.host).display())); + targetflags.push(format!("-Lnative={}", builder.test_helpers_out(target).display())); + } for flag in hostflags { cmd.arg("--host-rustcflags").arg(flag); @@ -1907,7 +1895,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the .arg(llvm_components.trim()); llvm_components_passed = true; } - if !builder.is_rust_llvm(target) { + if !builder.config.is_rust_llvm(target) { cmd.arg("--system-llvm"); } @@ -2077,7 +2065,6 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the } let git_config = builder.config.git_config(); - cmd.arg("--git-repository").arg(git_config.git_repository); cmd.arg("--nightly-branch").arg(git_config.nightly_branch); cmd.arg("--git-merge-commit-email").arg(git_config.git_merge_commit_email); cmd.force_coloring_in_ci(); @@ -2570,9 +2557,9 @@ fn prepare_cargo_test( // We skip everything on Miri as then this overwrites the libdir set up // by `Cargo::new` and that actually makes things go wrong. if builder.kind != Kind::Miri { - let mut dylib_path = dylib_path(); - dylib_path.insert(0, PathBuf::from(&*builder.sysroot_target_libdir(compiler, target))); - cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); + let mut dylib_paths = builder.rustc_lib_paths(compiler); + dylib_paths.push(PathBuf::from(&builder.sysroot_target_libdir(compiler, target))); + helpers::add_dylib_path(dylib_paths, &mut cargo); } if builder.remote_tested(target) { @@ -2681,7 +2668,7 @@ impl Step for Crate { cargo } else { // Also prepare a sysroot for the target. - if !builder.is_builder_target(target) { + if !builder.config.is_host_target(target) { builder.ensure(compile::Std::new(compiler, target).force_recompile(true)); builder.ensure(RemoteCopyLibs { compiler, target }); } diff --git a/src/bootstrap/src/core/build_steps/tool.rs b/src/bootstrap/src/core/build_steps/tool.rs index cd57e06ae04a3..ac568eab2e8a5 100644 --- a/src/bootstrap/src/core/build_steps/tool.rs +++ b/src/bootstrap/src/core/build_steps/tool.rs @@ -148,10 +148,9 @@ impl Step for ToolBuild { &self.extra_features, ); - if path.ends_with("/rustdoc") && - // rustdoc is performance sensitive, so apply LTO to it. - is_lto_stage(&self.compiler) - { + // Rustc tools (miri, clippy, cargo, rustfmt, rust-analyzer) + // could use the additional optimizations. + if self.mode == Mode::ToolRustc && is_lto_stage(&self.compiler) { let lto = match builder.config.rust_lto { RustcLto::Off => Some("off"), RustcLto::Thin => Some("thin"), @@ -425,11 +424,14 @@ macro_rules! bootstrap_tool { } )* + let is_unstable = false $(|| $unstable)*; + let compiletest_wants_stage0 = $tool_name == "compiletest" && builder.config.compiletest_use_stage0_libtest; + builder.ensure(ToolBuild { compiler: self.compiler, target: self.target, tool: $tool_name, - mode: if false $(|| $unstable)* { + mode: if is_unstable && !compiletest_wants_stage0 { // use in-tree libraries for unstable features Mode::ToolStd } else { @@ -442,7 +444,11 @@ macro_rules! bootstrap_tool { SourceType::InTree }, extra_features: vec![], - allow_features: concat!($($allow_features)*), + allow_features: { + let mut _value = ""; + $( _value = $allow_features; )? + _value + }, cargo_args: vec![], artifact_kind: if false $(|| $artifact_kind == ToolArtifactKind::Library)* { ToolArtifactKind::Library @@ -456,6 +462,8 @@ macro_rules! bootstrap_tool { } } +pub(crate) const COMPILETEST_ALLOW_FEATURES: &str = "internal_output_capture"; + bootstrap_tool!( // This is marked as an external tool because it includes dependencies // from submodules. Trying to keep the lints in sync between all the repos @@ -466,7 +474,7 @@ bootstrap_tool!( Tidy, "src/tools/tidy", "tidy"; Linkchecker, "src/tools/linkchecker", "linkchecker"; CargoTest, "src/tools/cargotest", "cargotest"; - Compiletest, "src/tools/compiletest", "compiletest", is_unstable_tool = true, allow_features = "test"; + Compiletest, "src/tools/compiletest", "compiletest", is_unstable_tool = true, allow_features = COMPILETEST_ALLOW_FEATURES; BuildManifest, "src/tools/build-manifest", "build-manifest"; RemoteTestClient, "src/tools/remote-test-client", "remote-test-client"; RustInstaller, "src/tools/rust-installer", "rust-installer"; @@ -481,7 +489,8 @@ bootstrap_tool!( GenerateCopyright, "src/tools/generate-copyright", "generate-copyright"; SuggestTests, "src/tools/suggest-tests", "suggest-tests"; GenerateWindowsSys, "src/tools/generate-windows-sys", "generate-windows-sys"; - RustdocGUITest, "src/tools/rustdoc-gui-test", "rustdoc-gui-test", is_unstable_tool = true, allow_features = "test"; + // rustdoc-gui-test has a crate dependency on compiletest, so it needs the same unstable features. + RustdocGUITest, "src/tools/rustdoc-gui-test", "rustdoc-gui-test", is_unstable_tool = true, allow_features = COMPILETEST_ALLOW_FEATURES; CoverageDump, "src/tools/coverage-dump", "coverage-dump"; WasmComponentLd, "src/tools/wasm-component-ld", "wasm-component-ld", is_unstable_tool = true, allow_features = "min_specialization"; UnicodeTableGenerator, "src/tools/unicode-table-generator", "unicode-table-generator"; @@ -687,8 +696,7 @@ impl Step for Rustdoc { let files_to_track = &["src/librustdoc", "src/tools/rustdoc"]; // Check if unchanged - if builder.config.last_modified_commit(files_to_track, "download-rustc", true).is_some() - { + if !builder.config.has_changes_from_upstream(files_to_track) { let precompiled_rustdoc = builder .config .ci_rustc_dir() @@ -814,7 +822,6 @@ impl Step for LldWrapper { fields(build_compiler = ?self.build_compiler, target_compiler = ?self.target_compiler), ), )] - fn run(self, builder: &Builder<'_>) -> ToolBuildResult { if builder.config.dry_run() { return ToolBuildResult { diff --git a/src/bootstrap/src/core/builder/cargo.rs b/src/bootstrap/src/core/builder/cargo.rs index a96ccdd12c2c7..d625c2ef5847f 100644 --- a/src/bootstrap/src/core/builder/cargo.rs +++ b/src/bootstrap/src/core/builder/cargo.rs @@ -112,9 +112,8 @@ impl Cargo { let mut cargo = builder.cargo(compiler, mode, source_type, target, cmd_kind); match cmd_kind { - // No need to configure the target linker for these command types, - // as they don't invoke rustc at all. - Kind::Clean | Kind::Suggest | Kind::Format | Kind::Setup => {} + // No need to configure the target linker for these command types. + Kind::Clean | Kind::Check | Kind::Suggest | Kind::Format | Kind::Setup => {} _ => { cargo.configure_linker(builder); } @@ -205,6 +204,8 @@ impl Cargo { self } + // FIXME(onur-ozkan): Add coverage to make sure modifications to this function + // doesn't cause cache invalidations (e.g., #130108). fn configure_linker(&mut self, builder: &Builder<'_>) -> &mut Cargo { let target = self.target; let compiler = self.compiler; @@ -260,7 +261,7 @@ impl Cargo { } } - for arg in linker_args(builder, compiler.host, LldThreads::Yes, 0) { + for arg in linker_args(builder, compiler.host, LldThreads::Yes) { self.hostflags.arg(&arg); } @@ -270,10 +271,10 @@ impl Cargo { } // We want to set -Clinker using Cargo, therefore we only call `linker_flags` and not // `linker_args` here. - for flag in linker_flags(builder, target, LldThreads::Yes, compiler.stage) { + for flag in linker_flags(builder, target, LldThreads::Yes) { self.rustflags.arg(&flag); } - for arg in linker_args(builder, target, LldThreads::Yes, compiler.stage) { + for arg in linker_args(builder, target, LldThreads::Yes) { self.rustdocflags.arg(&arg); } @@ -872,11 +873,15 @@ impl Builder<'_> { } cargo.env( profile_var("DEBUG_ASSERTIONS"), - if mode == Mode::Std { - self.config.std_debug_assertions.to_string() - } else { - self.config.rustc_debug_assertions.to_string() - }, + match mode { + Mode::Std => self.config.std_debug_assertions, + Mode::Rustc => self.config.rustc_debug_assertions, + Mode::Codegen => self.config.rustc_debug_assertions, + Mode::ToolBootstrap => self.config.tools_debug_assertions, + Mode::ToolStd => self.config.tools_debug_assertions, + Mode::ToolRustc => self.config.tools_debug_assertions, + } + .to_string(), ); cargo.env( profile_var("OVERFLOW_CHECKS"), diff --git a/src/bootstrap/src/core/builder/mod.rs b/src/bootstrap/src/core/builder/mod.rs index a9058f888d38d..75cc5d3986b88 100644 --- a/src/bootstrap/src/core/builder/mod.rs +++ b/src/bootstrap/src/core/builder/mod.rs @@ -101,13 +101,13 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash { /// Primary function to implement `Step` logic. /// /// This function can be triggered in two ways: - /// 1. Directly from [`Builder::execute_cli`]. - /// 2. Indirectly by being called from other `Step`s using [`Builder::ensure`]. + /// 1. Directly from [`Builder::execute_cli`]. + /// 2. Indirectly by being called from other `Step`s using [`Builder::ensure`]. /// - /// When called with [`Builder::execute_cli`] (as done by `Build::build`), this function executed twice: - /// - First in "dry-run" mode to validate certain things (like cyclic Step invocations, - /// directory creation, etc) super quickly. - /// - Then it's called again to run the actual, very expensive process. + /// When called with [`Builder::execute_cli`] (as done by `Build::build`), this function is executed twice: + /// - First in "dry-run" mode to validate certain things (like cyclic Step invocations, + /// directory creation, etc) super quickly. + /// - Then it's called again to run the actual, very expensive process. /// /// When triggered indirectly from other `Step`s, it may still run twice (as dry-run and real mode) /// depending on the `Step::run` implementation of the caller. @@ -961,6 +961,7 @@ impl<'a> Builder<'a> { check::RunMakeSupport, check::Compiletest, check::FeaturesStatusDump, + check::CoverageDump, ), Kind::Test => describe!( crate::core::build_steps::toolstate::ToolStateCheck, @@ -1114,6 +1115,8 @@ impl<'a> Builder<'a> { run::UnicodeTableGenerator, run::FeaturesStatusDump, run::CyclicStep, + run::CoverageDump, + run::Rustfmt, ), Kind::Setup => { describe!(setup::Profile, setup::Hook, setup::Link, setup::Editor) @@ -1276,7 +1279,6 @@ impl<'a> Builder<'a> { ), ), )] - /// FIXME: This function is unnecessary (and dangerous, see ). /// We already have uplifting logic for the compiler, so remove this. pub fn compiler_for( @@ -1480,7 +1482,7 @@ impl<'a> Builder<'a> { cmd.arg("-Dwarnings"); } cmd.arg("-Znormalize-docs"); - cmd.args(linker_args(self, compiler.host, LldThreads::Yes, compiler.stage)); + cmd.args(linker_args(self, compiler.host, LldThreads::Yes)); cmd } @@ -1535,7 +1537,7 @@ impl<'a> Builder<'a> { let out = step.clone().run(self); let dur = start.elapsed(); let deps = self.time_spent_on_dependencies.replace(parent + dur); - (out, dur - deps) + (out, dur.saturating_sub(deps)) }; if self.config.print_step_timings && !self.config.dry_run() { diff --git a/src/bootstrap/src/core/builder/tests.rs b/src/bootstrap/src/core/builder/tests.rs index fd3b28e4e6ab2..51852099dc398 100644 --- a/src/bootstrap/src/core/builder/tests.rs +++ b/src/bootstrap/src/core/builder/tests.rs @@ -1,11 +1,14 @@ +use std::env::VarError; use std::{panic, thread}; +use build_helper::stage0_parser::parse_stage0_file; use llvm::prebuilt_llvm_config; use super::*; use crate::Flags; use crate::core::build_steps::doc::DocumentationFormat; use crate::core::config::Config; +use crate::utils::tests::git::{GitCtx, git_test}; static TEST_TRIPLE_1: &str = "i686-unknown-haiku"; static TEST_TRIPLE_2: &str = "i686-unknown-hurd-gnu"; @@ -239,42 +242,80 @@ fn alias_and_path_for_library() { } #[test] -fn ci_rustc_if_unchanged_logic() { - let config = Config::parse_inner( - Flags::parse(&[ - "build".to_owned(), - "--dry-run".to_owned(), - "--set=rust.download-rustc='if-unchanged'".to_owned(), - ]), - |&_| Ok(Default::default()), - ); - - let build = Build::new(config.clone()); - let builder = Builder::new(&build); - - if config.out.exists() { - fs::remove_dir_all(&config.out).unwrap(); - } +fn ci_rustc_if_unchanged_invalidate_on_compiler_changes() { + git_test(|ctx| { + prepare_rustc_checkout(ctx); + ctx.create_upstream_merge(&["compiler/bar"]); + // This change should invalidate download-ci-rustc + ctx.create_nonupstream_merge(&["compiler/foo"]); + + let config = parse_config_download_rustc_at(ctx.get_path(), "if-unchanged", true); + assert_eq!(config.download_rustc_commit, None); + }); +} - builder.run_step_descriptions(&Builder::get_step_descriptions(config.cmd.kind()), &[]); +#[test] +fn ci_rustc_if_unchanged_invalidate_on_library_changes_in_ci() { + git_test(|ctx| { + prepare_rustc_checkout(ctx); + ctx.create_upstream_merge(&["compiler/bar"]); + // This change should invalidate download-ci-rustc + ctx.create_nonupstream_merge(&["library/foo"]); + + let config = parse_config_download_rustc_at(ctx.get_path(), "if-unchanged", true); + assert_eq!(config.download_rustc_commit, None); + }); +} - // Make sure "if-unchanged" logic doesn't try to use CI rustc while there are changes - // in compiler and/or library. - if config.download_rustc_commit.is_some() { - let mut paths = vec!["compiler"]; +#[test] +fn ci_rustc_if_unchanged_do_not_invalidate_on_library_changes_outside_ci() { + git_test(|ctx| { + prepare_rustc_checkout(ctx); + let sha = ctx.create_upstream_merge(&["compiler/bar"]); + // This change should not invalidate download-ci-rustc + ctx.create_nonupstream_merge(&["library/foo"]); + + let config = parse_config_download_rustc_at(ctx.get_path(), "if-unchanged", false); + assert_eq!(config.download_rustc_commit, Some(sha)); + }); +} - // Handle library tree the same way as in `Config::download_ci_rustc_commit`. - if builder.config.is_running_on_ci { - paths.push("library"); - } +#[test] +fn ci_rustc_if_unchanged_do_not_invalidate_on_tool_changes() { + git_test(|ctx| { + prepare_rustc_checkout(ctx); + let sha = ctx.create_upstream_merge(&["compiler/bar"]); + // This change should not invalidate download-ci-rustc + ctx.create_nonupstream_merge(&["src/tools/foo"]); + + let config = parse_config_download_rustc_at(ctx.get_path(), "if-unchanged", true); + assert_eq!(config.download_rustc_commit, Some(sha)); + }); +} - let has_changes = config.last_modified_commit(&paths, "download-rustc", true).is_none(); +/// Prepares the given directory so that it looks like a rustc checkout. +/// Also configures `GitCtx` to use the correct merge bot e-mail for upstream merge commits. +fn prepare_rustc_checkout(ctx: &mut GitCtx) { + ctx.merge_bot_email = + format!("Merge bot <{}>", parse_stage0_file().config.git_merge_commit_email); + ctx.write("src/ci/channel", "nightly"); + ctx.commit(); +} - assert!( - !has_changes, - "CI-rustc can't be used with 'if-unchanged' while there are changes in compiler and/or library." - ); - } +/// Parses a Config directory from `path`, with the given value of `download_rustc`. +fn parse_config_download_rustc_at(path: &Path, download_rustc: &str, ci: bool) -> Config { + Config::parse_inner( + Flags::parse(&[ + "build".to_owned(), + "--dry-run".to_owned(), + "--ci".to_owned(), + if ci { "true" } else { "false" }.to_owned(), + format!("--set=rust.download-rustc='{download_rustc}'"), + "--src".to_owned(), + path.to_str().unwrap().to_owned(), + ]), + |&_| Ok(Default::default()), + ) } mod defaults { @@ -408,6 +449,7 @@ mod dist { use pretty_assertions::assert_eq; use super::{Config, TEST_TRIPLE_1, TEST_TRIPLE_2, TEST_TRIPLE_3, first, run_build}; + use crate::Flags; use crate::core::builder::*; fn configure(host: &[&str], target: &[&str]) -> Config { @@ -646,6 +688,37 @@ mod dist { ); } + /// This also serves as an important regression test for + /// and . + #[test] + fn dist_all_cross() { + let cmd_args = + &["dist", "--stage", "2", "--dry-run", "--config=/does/not/exist"].map(str::to_owned); + let config_str = r#" + [rust] + channel = "nightly" + + [build] + extended = true + + build = "i686-unknown-haiku" + host = ["i686-unknown-netbsd"] + target = ["i686-unknown-netbsd"] + "#; + let config = Config::parse_inner(Flags::parse(cmd_args), |&_| toml::from_str(config_str)); + let mut cache = run_build(&[], config); + + // Stage 2 `compile::Rustc` should **NEVER** be cached here. + assert_eq!( + first(cache.all::()), + &[ + rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_1, stage = 0), + rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_1, stage = 1), + rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_3, stage = 1), + ] + ); + } + #[test] fn build_all() { let build = Build::new(configure( @@ -1107,8 +1180,8 @@ fn test_is_builder_target() { let build = Build::new(config); let builder = Builder::new(&build); - assert!(builder.is_builder_target(target1)); - assert!(!builder.is_builder_target(target2)); + assert!(builder.config.is_host_target(target1)); + assert!(!builder.config.is_host_target(target2)); } } diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 1712be7f947fa..65a3e7667e7f0 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -6,16 +6,17 @@ use std::cell::{Cell, RefCell}; use std::collections::{BTreeSet, HashMap, HashSet}; use std::fmt::{self, Display}; +use std::hash::Hash; use std::io::IsTerminal; use std::path::{Path, PathBuf, absolute}; use std::process::Command; use std::str::FromStr; -use std::sync::OnceLock; +use std::sync::{Arc, Mutex, OnceLock}; use std::{cmp, env, fs}; use build_helper::ci::CiEnv; use build_helper::exit; -use build_helper::git::{GitConfig, get_closest_merge_commit, output_result}; +use build_helper::git::{GitConfig, PathFreshness, check_path_modifications, output_result}; use serde::{Deserialize, Deserializer}; use serde_derive::Deserialize; #[cfg(feature = "tracing")] @@ -189,7 +190,7 @@ pub enum GccCiMode { /// /// Note that this structure is not decoded directly into, but rather it is /// filled out from the decoded forms of the structs below. For documentation -/// each field, see the corresponding fields in +/// on each field, see the corresponding fields in /// `bootstrap.example.toml`. #[derive(Default, Clone)] pub struct Config { @@ -305,6 +306,7 @@ pub struct Config { pub rustc_debug_assertions: bool, pub std_debug_assertions: bool, + pub tools_debug_assertions: bool, pub rust_overflow_checks: bool, pub rust_overflow_checks_std: bool, @@ -417,7 +419,13 @@ pub struct Config { /// Command for visual diff display, e.g. `diff-tool --color=always`. pub compiletest_diff_tool: Option, + /// Whether to use the precompiled stage0 libtest with compiletest. + pub compiletest_use_stage0_libtest: bool, + pub is_running_on_ci: bool, + + /// Cache for determining path modifications + pub path_modification_cache: Arc, PathFreshness>>>, } #[derive(Clone, Debug, Default)] @@ -698,6 +706,7 @@ pub(crate) struct TomlConfig { target: Option>, dist: Option, profile: Option, + include: Option>, } /// This enum is used for deserializing change IDs from TOML, allowing both numeric values and the string `"ignore"`. @@ -744,27 +753,35 @@ enum ReplaceOpt { } trait Merge { - fn merge(&mut self, other: Self, replace: ReplaceOpt); + fn merge( + &mut self, + parent_config_path: Option, + included_extensions: &mut HashSet, + other: Self, + replace: ReplaceOpt, + ); } impl Merge for TomlConfig { fn merge( &mut self, - TomlConfig { build, install, llvm, gcc, rust, dist, target, profile, change_id }: Self, + parent_config_path: Option, + included_extensions: &mut HashSet, + TomlConfig { build, install, llvm, gcc, rust, dist, target, profile, change_id, include }: Self, replace: ReplaceOpt, ) { fn do_merge(x: &mut Option, y: Option, replace: ReplaceOpt) { if let Some(new) = y { if let Some(original) = x { - original.merge(new, replace); + original.merge(None, &mut Default::default(), new, replace); } else { *x = Some(new); } } } - self.change_id.inner.merge(change_id.inner, replace); - self.profile.merge(profile, replace); + self.change_id.inner.merge(None, &mut Default::default(), change_id.inner, replace); + self.profile.merge(None, &mut Default::default(), profile, replace); do_merge(&mut self.build, build, replace); do_merge(&mut self.install, install, replace); @@ -779,13 +796,50 @@ impl Merge for TomlConfig { (Some(original_target), Some(new_target)) => { for (triple, new) in new_target { if let Some(original) = original_target.get_mut(&triple) { - original.merge(new, replace); + original.merge(None, &mut Default::default(), new, replace); } else { original_target.insert(triple, new); } } } } + + let parent_dir = parent_config_path + .as_ref() + .and_then(|p| p.parent().map(ToOwned::to_owned)) + .unwrap_or_default(); + + // `include` handled later since we ignore duplicates using `ReplaceOpt::IgnoreDuplicate` to + // keep the upper-level configuration to take precedence. + for include_path in include.clone().unwrap_or_default().iter().rev() { + let include_path = parent_dir.join(include_path); + let include_path = include_path.canonicalize().unwrap_or_else(|e| { + eprintln!("ERROR: Failed to canonicalize '{}' path: {e}", include_path.display()); + exit!(2); + }); + + let included_toml = Config::get_toml_inner(&include_path).unwrap_or_else(|e| { + eprintln!("ERROR: Failed to parse '{}': {e}", include_path.display()); + exit!(2); + }); + + assert!( + included_extensions.insert(include_path.clone()), + "Cyclic inclusion detected: '{}' is being included again before its previous inclusion was fully processed.", + include_path.display() + ); + + self.merge( + Some(include_path.clone()), + included_extensions, + included_toml, + // Ensures that parent configuration always takes precedence + // over child configurations. + ReplaceOpt::IgnoreDuplicate, + ); + + included_extensions.remove(&include_path); + } } } @@ -800,7 +854,13 @@ macro_rules! define_config { } impl Merge for $name { - fn merge(&mut self, other: Self, replace: ReplaceOpt) { + fn merge( + &mut self, + _parent_config_path: Option, + _included_extensions: &mut HashSet, + other: Self, + replace: ReplaceOpt + ) { $( match replace { ReplaceOpt::IgnoreDuplicate => { @@ -900,7 +960,13 @@ macro_rules! define_config { } impl Merge for Option { - fn merge(&mut self, other: Self, replace: ReplaceOpt) { + fn merge( + &mut self, + _parent_config_path: Option, + _included_extensions: &mut HashSet, + other: Self, + replace: ReplaceOpt, + ) { match replace { ReplaceOpt::IgnoreDuplicate => { if self.is_none() { @@ -983,6 +1049,7 @@ define_config! { optimized_compiler_builtins: Option = "optimized-compiler-builtins", jobs: Option = "jobs", compiletest_diff_tool: Option = "compiletest-diff-tool", + compiletest_use_stage0_libtest: Option = "compiletest-use-stage0-libtest", ccache: Option = "ccache", exclude: Option> = "exclude", } @@ -1214,6 +1281,7 @@ define_config! { rustc_debug_assertions: Option = "debug-assertions", randomize_layout: Option = "randomize-layout", std_debug_assertions: Option = "debug-assertions-std", + tools_debug_assertions: Option = "debug-assertions-tools", overflow_checks: Option = "overflow-checks", overflow_checks_std: Option = "overflow-checks-std", debug_logging: Option = "debug-logging", @@ -1359,13 +1427,15 @@ impl Config { Self::get_toml(&builder_config_path) } - #[cfg(test)] - pub(crate) fn get_toml(_: &Path) -> Result { - Ok(TomlConfig::default()) + pub(crate) fn get_toml(file: &Path) -> Result { + #[cfg(test)] + return Ok(TomlConfig::default()); + + #[cfg(not(test))] + Self::get_toml_inner(file) } - #[cfg(not(test))] - pub(crate) fn get_toml(file: &Path) -> Result { + fn get_toml_inner(file: &Path) -> Result { let contents = t!(fs::read_to_string(file), format!("config file {} not found", file.display())); // Deserialize to Value and then TomlConfig to prevent the Deserialize impl of @@ -1540,14 +1610,12 @@ impl Config { } } - let file_content = t!(fs::read_to_string(config.src.join("src/ci/channel"))); - let ci_channel = file_content.trim_end(); - // Give a hard error if `--config` or `RUST_BOOTSTRAP_CONFIG` are set to a missing path, // but not if `bootstrap.toml` hasn't been created. let mut toml = if !using_default_path || toml_path.exists() { config.config = Some(if cfg!(not(test)) { - toml_path.canonicalize().unwrap() + toml_path = toml_path.canonicalize().unwrap(); + toml_path.clone() } else { toml_path.clone() }); @@ -1575,6 +1643,26 @@ impl Config { toml.profile = Some("dist".into()); } + // Reverse the list to ensure the last added config extension remains the most dominant. + // For example, given ["a.toml", "b.toml"], "b.toml" should take precedence over "a.toml". + // + // This must be handled before applying the `profile` since `include`s should always take + // precedence over `profile`s. + for include_path in toml.include.clone().unwrap_or_default().iter().rev() { + let include_path = toml_path.parent().unwrap().join(include_path); + + let included_toml = get_toml(&include_path).unwrap_or_else(|e| { + eprintln!("ERROR: Failed to parse '{}': {e}", include_path.display()); + exit!(2); + }); + toml.merge( + Some(include_path), + &mut Default::default(), + included_toml, + ReplaceOpt::IgnoreDuplicate, + ); + } + if let Some(include) = &toml.profile { // Allows creating alias for profile names, allowing // profiles to be renamed while maintaining back compatibility @@ -1596,7 +1684,12 @@ impl Config { ); exit!(2); }); - toml.merge(included_toml, ReplaceOpt::IgnoreDuplicate); + toml.merge( + Some(include_path), + &mut Default::default(), + included_toml, + ReplaceOpt::IgnoreDuplicate, + ); } let mut override_toml = TomlConfig::default(); @@ -1607,7 +1700,12 @@ impl Config { let mut err = match get_table(option) { Ok(v) => { - override_toml.merge(v, ReplaceOpt::ErrorOnDuplicate); + override_toml.merge( + None, + &mut Default::default(), + v, + ReplaceOpt::ErrorOnDuplicate, + ); continue; } Err(e) => e, @@ -1618,7 +1716,12 @@ impl Config { if !value.contains('"') { match get_table(&format!(r#"{key}="{value}""#)) { Ok(v) => { - override_toml.merge(v, ReplaceOpt::ErrorOnDuplicate); + override_toml.merge( + None, + &mut Default::default(), + v, + ReplaceOpt::ErrorOnDuplicate, + ); continue; } Err(e) => err = e, @@ -1628,7 +1731,7 @@ impl Config { eprintln!("failed to parse override `{option}`: `{err}"); exit!(2) } - toml.merge(override_toml, ReplaceOpt::Override); + toml.merge(None, &mut Default::default(), override_toml, ReplaceOpt::Override); config.change_id = toml.change_id.inner; @@ -1682,6 +1785,7 @@ impl Config { optimized_compiler_builtins, jobs, compiletest_diff_tool, + compiletest_use_stage0_libtest, mut ccache, exclude, } = toml.build.unwrap_or_default(); @@ -1835,6 +1939,7 @@ impl Config { let mut debug = None; let mut rustc_debug_assertions = None; let mut std_debug_assertions = None; + let mut tools_debug_assertions = None; let mut overflow_checks = None; let mut overflow_checks_std = None; let mut debug_logging = None; @@ -1847,17 +1952,21 @@ impl Config { let mut lld_enabled = None; let mut std_features = None; - let is_user_configured_rust_channel = - if let Some(channel) = toml.rust.as_ref().and_then(|r| r.channel.clone()) { - if channel == "auto-detect" { - config.channel = ci_channel.into(); - } else { - config.channel = channel; - } + let file_content = t!(fs::read_to_string(config.src.join("src/ci/channel"))); + let ci_channel = file_content.trim_end(); + + let toml_channel = toml.rust.as_ref().and_then(|r| r.channel.clone()); + let is_user_configured_rust_channel = match toml_channel { + Some(channel) if channel == "auto-detect" => { + config.channel = ci_channel.into(); true - } else { - false - }; + } + Some(channel) => { + config.channel = channel; + true + } + None => false, + }; let default = config.channel == "dev"; config.omit_git_hash = toml.rust.as_ref().and_then(|r| r.omit_git_hash).unwrap_or(default); @@ -1882,6 +1991,10 @@ impl Config { && config.src.join(".cargo/config.toml").exists(), ); + if !is_user_configured_rust_channel && config.rust_info.is_from_tarball() { + config.channel = ci_channel.into(); + } + if let Some(rust) = toml.rust { let Rust { optimize: optimize_toml, @@ -1890,6 +2003,7 @@ impl Config { codegen_units_std, rustc_debug_assertions: rustc_debug_assertions_toml, std_debug_assertions: std_debug_assertions_toml, + tools_debug_assertions: tools_debug_assertions_toml, overflow_checks: overflow_checks_toml, overflow_checks_std: overflow_checks_std_toml, debug_logging: debug_logging_toml, @@ -1974,6 +2088,7 @@ impl Config { debug = debug_toml; rustc_debug_assertions = rustc_debug_assertions_toml; std_debug_assertions = std_debug_assertions_toml; + tools_debug_assertions = tools_debug_assertions_toml; overflow_checks = overflow_checks_toml; overflow_checks_std = overflow_checks_std_toml; debug_logging = debug_logging_toml; @@ -2085,8 +2200,6 @@ impl Config { config.channel = channel; } - } else if config.rust_info.is_from_tarball() && !is_user_configured_rust_channel { - ci_channel.clone_into(&mut config.channel); } if let Some(llvm) = toml.llvm { @@ -2389,12 +2502,20 @@ impl Config { ); } + if config.lld_enabled && config.is_system_llvm(config.build) { + eprintln!( + "Warning: LLD is enabled when using external llvm-config. LLD will not be built and copied to the sysroot." + ); + } + let default_std_features = BTreeSet::from([String::from("panic-unwind")]); config.rust_std_features = std_features.unwrap_or(default_std_features); let default = debug == Some(true); config.rustc_debug_assertions = rustc_debug_assertions.unwrap_or(default); config.std_debug_assertions = std_debug_assertions.unwrap_or(config.rustc_debug_assertions); + config.tools_debug_assertions = + tools_debug_assertions.unwrap_or(config.rustc_debug_assertions); config.rust_overflow_checks = overflow_checks.unwrap_or(default); config.rust_overflow_checks_std = overflow_checks_std.unwrap_or(config.rust_overflow_checks); @@ -2415,6 +2536,7 @@ impl Config { config.optimized_compiler_builtins = optimized_compiler_builtins.unwrap_or(config.channel != "dev"); config.compiletest_diff_tool = compiletest_diff_tool; + config.compiletest_use_stage0_libtest = compiletest_use_stage0_libtest.unwrap_or(true); let download_rustc = config.download_rustc_commit.is_some(); config.explicit_stage_from_cli = flags.stage.is_some(); @@ -2848,7 +2970,6 @@ impl Config { pub fn git_config(&self) -> GitConfig<'_> { GitConfig { - git_repository: &self.stage0_metadata.config.git_repository, nightly_branch: &self.stage0_metadata.config.nightly_branch, git_merge_commit_email: &self.stage0_metadata.config.git_merge_commit_email, } @@ -2879,6 +3000,13 @@ impl Config { let absolute_path = self.src.join(relative_path); + // NOTE: This check is required because `jj git clone` doesn't create directories for + // submodules, they are completely ignored. The code below assumes this directory exists, + // so create it here. + if !absolute_path.exists() { + t!(fs::create_dir_all(&absolute_path)); + } + // NOTE: The check for the empty directory is here because when running x.py the first time, // the submodule won't be checked out. Check it out now so we can build it. if !GitInfo::new(false, &absolute_path).is_managed_git_subrepository() @@ -3074,19 +3202,30 @@ impl Config { let commit = if self.rust_info.is_managed_git_subrepository() { // Look for a version to compare to based on the current commit. // Only commits merged by bors will have CI artifacts. - match self.last_modified_commit(&allowed_paths, "download-rustc", if_unchanged) { - Some(commit) => commit, - None => { + let freshness = self.check_path_modifications(&allowed_paths); + self.verbose(|| { + eprintln!("rustc freshness: {freshness:?}"); + }); + match freshness { + PathFreshness::LastModifiedUpstream { upstream } => upstream, + PathFreshness::HasLocalModifications { upstream } => { if if_unchanged { return None; } - println!("ERROR: could not find commit hash for downloading rustc"); - println!("HELP: maybe your repository history is too shallow?"); - println!( - "HELP: consider setting `rust.download-rustc=false` in bootstrap.toml" - ); - println!("HELP: or fetch enough history to include one upstream commit"); - crate::exit!(1); + + if self.is_running_on_ci { + eprintln!("CI rustc commit matches with HEAD and we are in CI."); + eprintln!( + "`rustc.download-ci` functionality will be skipped as artifacts are not available." + ); + return None; + } + + upstream + } + PathFreshness::MissingUpstream => { + eprintln!("No upstream commit found"); + return None; } } } else { @@ -3095,19 +3234,6 @@ impl Config { .expect("git-commit-info is missing in the project root") }; - if self.is_running_on_ci && { - let head_sha = - output(helpers::git(Some(&self.src)).arg("rev-parse").arg("HEAD").as_command_mut()); - let head_sha = head_sha.trim(); - commit == head_sha - } { - eprintln!("CI rustc commit matches with HEAD and we are in CI."); - eprintln!( - "`rustc.download-ci` functionality will be skipped as artifacts are not available." - ); - return None; - } - if debug_assertions_requested { eprintln!( "WARN: `rust.debug-assertions = true` will prevent downloading CI rustc as alt CI \ @@ -3145,9 +3271,7 @@ impl Config { self.update_submodule("src/llvm-project"); // Check for untracked changes in `src/llvm-project` and other important places. - let has_changes = self - .last_modified_commit(LLVM_INVALIDATION_PATHS, "download-ci-llvm", true) - .is_none(); + let has_changes = self.has_changes_from_upstream(LLVM_INVALIDATION_PATHS); // Return false if there are untracked changes, otherwise check if CI LLVM is available. if has_changes { false } else { llvm::is_ci_llvm_available_for_target(self, asserts) } @@ -3178,51 +3302,70 @@ impl Config { } } - /// Returns the last commit in which any of `modified_paths` were changed, - /// or `None` if there are untracked changes in the working directory and `if_unchanged` is true. - pub fn last_modified_commit( - &self, - modified_paths: &[&str], - option_name: &str, - if_unchanged: bool, - ) -> Option { - assert!( - self.rust_info.is_managed_git_subrepository(), - "Can't run `Config::last_modified_commit` on a non-git source." - ); - - // Look for a version to compare to based on the current commit. - // Only commits merged by bors will have CI artifacts. - let commit = get_closest_merge_commit(Some(&self.src), &self.git_config(), &[]).unwrap(); - if commit.is_empty() { - println!("error: could not find commit hash for downloading components from CI"); - println!("help: maybe your repository history is too shallow?"); - println!("help: consider disabling `{option_name}`"); - println!("help: or fetch enough history to include one upstream commit"); - crate::exit!(1); + /// Returns true if any of the `paths` have been modified locally. + pub fn has_changes_from_upstream(&self, paths: &[&'static str]) -> bool { + match self.check_path_modifications(paths) { + PathFreshness::LastModifiedUpstream { .. } => false, + PathFreshness::HasLocalModifications { .. } | PathFreshness::MissingUpstream => true, } + } - // Warn if there were changes to the compiler or standard library since the ancestor commit. - let mut git = helpers::git(Some(&self.src)); - git.args(["diff-index", "--quiet", &commit, "--"]).args(modified_paths); + /// Checks whether any of the given paths have been modified w.r.t. upstream. + pub fn check_path_modifications(&self, paths: &[&'static str]) -> PathFreshness { + // Checking path modifications through git can be relatively expensive (>100ms). + // We do not assume that the sources would change during bootstrap's execution, + // so we can cache the results here. + // Note that we do not use a static variable for the cache, because it would cause problems + // in tests that create separate `Config` instsances. + self.path_modification_cache + .lock() + .unwrap() + .entry(paths.to_vec()) + .or_insert_with(|| { + check_path_modifications(&self.src, &self.git_config(), paths, CiEnv::current()) + .unwrap() + }) + .clone() + } - let has_changes = !t!(git.as_command_mut().status()).success(); - if has_changes { - if if_unchanged { - if self.is_verbose() { - println!( - "warning: saw changes to one of {modified_paths:?} since {commit}; \ - ignoring `{option_name}`" - ); - } - return None; + /// Checks if the given target is the same as the host target. + pub fn is_host_target(&self, target: TargetSelection) -> bool { + self.build == target + } + + /// Returns `true` if this is an external version of LLVM not managed by bootstrap. + /// In particular, we expect llvm sources to be available when this is false. + /// + /// NOTE: this is not the same as `!is_rust_llvm` when `llvm_has_patches` is set. + pub fn is_system_llvm(&self, target: TargetSelection) -> bool { + match self.target_config.get(&target) { + Some(Target { llvm_config: Some(_), .. }) => { + let ci_llvm = self.llvm_from_ci && self.is_host_target(target); + !ci_llvm } - println!( - "warning: `{option_name}` is enabled, but there are changes to one of {modified_paths:?}" - ); + // We're building from the in-tree src/llvm-project sources. + Some(Target { llvm_config: None, .. }) => false, + None => false, + } + } + + /// Returns `true` if this is our custom, patched, version of LLVM. + /// + /// This does not necessarily imply that we're managing the `llvm-project` submodule. + pub fn is_rust_llvm(&self, target: TargetSelection) -> bool { + match self.target_config.get(&target) { + // We're using a user-controlled version of LLVM. The user has explicitly told us whether the version has our patches. + // (They might be wrong, but that's not a supported use-case.) + // In particular, this tries to support `submodules = false` and `patches = false`, for using a newer version of LLVM that's not through `rust-lang/llvm-project`. + Some(Target { llvm_has_rust_patches: Some(patched), .. }) => *patched, + // The user hasn't promised the patches match. + // This only has our patches if it's downloaded from CI or built from source. + _ => !self.is_system_llvm(target), } + } - Some(commit.to_string()) + pub fn ci_env(&self) -> CiEnv { + if self.is_running_on_ci { CiEnv::GitHubActions } else { CiEnv::None } } } @@ -3432,6 +3575,7 @@ fn check_incompatible_options_for_ci_rustc( codegen_units_std: _, rustc_debug_assertions: _, std_debug_assertions: _, + tools_debug_assertions: _, overflow_checks: _, overflow_checks_std: _, debuginfo_level: _, diff --git a/src/bootstrap/src/core/config/tests.rs b/src/bootstrap/src/core/config/tests.rs index d8002ba8467bd..96ac8a6d52fab 100644 --- a/src/bootstrap/src/core/config/tests.rs +++ b/src/bootstrap/src/core/config/tests.rs @@ -1,10 +1,11 @@ use std::collections::BTreeSet; -use std::env; use std::fs::{File, remove_file}; use std::io::Write; -use std::path::Path; +use std::path::{Path, PathBuf}; +use std::{env, fs}; use build_helper::ci::CiEnv; +use build_helper::git::PathFreshness; use clap::CommandFactory; use serde::Deserialize; @@ -15,6 +16,7 @@ use crate::core::build_steps::clippy::{LintConfig, get_clippy_rules_in_order}; use crate::core::build_steps::llvm; use crate::core::build_steps::llvm::LLVM_INVALIDATION_PATHS; use crate::core::config::{LldMode, Target, TargetSelection, TomlConfig}; +use crate::utils::tests::git::git_test; pub(crate) fn parse(config: &str) -> Config { Config::parse_inner( @@ -23,6 +25,27 @@ pub(crate) fn parse(config: &str) -> Config { ) } +fn get_toml(file: &Path) -> Result { + let contents = std::fs::read_to_string(file).unwrap(); + toml::from_str(&contents).and_then(|table: toml::Value| TomlConfig::deserialize(table)) +} + +/// Helps with debugging by using consistent test-specific directories instead of +/// random temporary directories. +fn prepare_test_specific_dir() -> PathBuf { + let current = std::thread::current(); + // Replace "::" with "_" to make it safe for directory names on Windows systems + let test_path = current.name().unwrap().replace("::", "_"); + + let testdir = parse("").tempdir().join(test_path); + + // clean up any old test files + let _ = fs::remove_dir_all(&testdir); + let _ = fs::create_dir_all(&testdir); + + testdir +} + #[test] fn download_ci_llvm() { let config = parse("llvm.download-ci-llvm = false"); @@ -30,9 +53,7 @@ fn download_ci_llvm() { let if_unchanged_config = parse("llvm.download-ci-llvm = \"if-unchanged\""); if if_unchanged_config.llvm_from_ci && if_unchanged_config.is_running_on_ci { - let has_changes = if_unchanged_config - .last_modified_commit(LLVM_INVALIDATION_PATHS, "download-ci-llvm", true) - .is_none(); + let has_changes = if_unchanged_config.has_changes_from_upstream(LLVM_INVALIDATION_PATHS); assert!( !has_changes, @@ -539,3 +560,428 @@ fn test_ci_flag() { let config = Config::parse_inner(Flags::parse(&["check".into()]), |&_| toml::from_str("")); assert_eq!(config.is_running_on_ci, CiEnv::is_ci()); } + +#[test] +fn test_precedence_of_includes() { + let testdir = prepare_test_specific_dir(); + + let root_config = testdir.join("config.toml"); + let root_config_content = br#" + include = ["./extension.toml"] + + [llvm] + link-jobs = 2 + "#; + File::create(&root_config).unwrap().write_all(root_config_content).unwrap(); + + let extension = testdir.join("extension.toml"); + let extension_content = br#" + change-id=543 + include = ["./extension2.toml"] + "#; + File::create(extension).unwrap().write_all(extension_content).unwrap(); + + let extension = testdir.join("extension2.toml"); + let extension_content = br#" + change-id=742 + + [llvm] + link-jobs = 10 + + [build] + description = "Some creative description" + "#; + File::create(extension).unwrap().write_all(extension_content).unwrap(); + + let config = Config::parse_inner( + Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]), + get_toml, + ); + + assert_eq!(config.change_id.unwrap(), ChangeId::Id(543)); + assert_eq!(config.llvm_link_jobs.unwrap(), 2); + assert_eq!(config.description.unwrap(), "Some creative description"); +} + +#[test] +#[should_panic(expected = "Cyclic inclusion detected")] +fn test_cyclic_include_direct() { + let testdir = prepare_test_specific_dir(); + + let root_config = testdir.join("config.toml"); + let root_config_content = br#" + include = ["./extension.toml"] + "#; + File::create(&root_config).unwrap().write_all(root_config_content).unwrap(); + + let extension = testdir.join("extension.toml"); + let extension_content = br#" + include = ["./config.toml"] + "#; + File::create(extension).unwrap().write_all(extension_content).unwrap(); + + let config = Config::parse_inner( + Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]), + get_toml, + ); +} + +#[test] +#[should_panic(expected = "Cyclic inclusion detected")] +fn test_cyclic_include_indirect() { + let testdir = prepare_test_specific_dir(); + + let root_config = testdir.join("config.toml"); + let root_config_content = br#" + include = ["./extension.toml"] + "#; + File::create(&root_config).unwrap().write_all(root_config_content).unwrap(); + + let extension = testdir.join("extension.toml"); + let extension_content = br#" + include = ["./extension2.toml"] + "#; + File::create(extension).unwrap().write_all(extension_content).unwrap(); + + let extension = testdir.join("extension2.toml"); + let extension_content = br#" + include = ["./extension3.toml"] + "#; + File::create(extension).unwrap().write_all(extension_content).unwrap(); + + let extension = testdir.join("extension3.toml"); + let extension_content = br#" + include = ["./extension.toml"] + "#; + File::create(extension).unwrap().write_all(extension_content).unwrap(); + + let config = Config::parse_inner( + Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]), + get_toml, + ); +} + +#[test] +fn test_include_absolute_paths() { + let testdir = prepare_test_specific_dir(); + + let extension = testdir.join("extension.toml"); + File::create(&extension).unwrap().write_all(&[]).unwrap(); + + let root_config = testdir.join("config.toml"); + let extension_absolute_path = + extension.canonicalize().unwrap().to_str().unwrap().replace('\\', r"\\"); + let root_config_content = format!(r#"include = ["{}"]"#, extension_absolute_path); + File::create(&root_config).unwrap().write_all(root_config_content.as_bytes()).unwrap(); + + let config = Config::parse_inner( + Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]), + get_toml, + ); +} + +#[test] +fn test_include_relative_paths() { + let testdir = prepare_test_specific_dir(); + + let _ = fs::create_dir_all(&testdir.join("subdir/another_subdir")); + + let root_config = testdir.join("config.toml"); + let root_config_content = br#" + include = ["./subdir/extension.toml"] + "#; + File::create(&root_config).unwrap().write_all(root_config_content).unwrap(); + + let extension = testdir.join("subdir/extension.toml"); + let extension_content = br#" + include = ["../extension2.toml"] + "#; + File::create(extension).unwrap().write_all(extension_content).unwrap(); + + let extension = testdir.join("extension2.toml"); + let extension_content = br#" + include = ["./subdir/another_subdir/extension3.toml"] + "#; + File::create(extension).unwrap().write_all(extension_content).unwrap(); + + let extension = testdir.join("subdir/another_subdir/extension3.toml"); + let extension_content = br#" + include = ["../../extension4.toml"] + "#; + File::create(extension).unwrap().write_all(extension_content).unwrap(); + + let extension = testdir.join("extension4.toml"); + File::create(extension).unwrap().write_all(&[]).unwrap(); + + let config = Config::parse_inner( + Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]), + get_toml, + ); +} + +#[test] +fn test_include_precedence_over_profile() { + let testdir = prepare_test_specific_dir(); + + let root_config = testdir.join("config.toml"); + let root_config_content = br#" + profile = "dist" + include = ["./extension.toml"] + "#; + File::create(&root_config).unwrap().write_all(root_config_content).unwrap(); + + let extension = testdir.join("extension.toml"); + let extension_content = br#" + [rust] + channel = "dev" + "#; + File::create(extension).unwrap().write_all(extension_content).unwrap(); + + let config = Config::parse_inner( + Flags::parse(&["check".to_owned(), format!("--config={}", root_config.to_str().unwrap())]), + get_toml, + ); + + // "dist" profile would normally set the channel to "auto-detect", but includes should + // override profile settings, so we expect this to be "dev" here. + assert_eq!(config.channel, "dev"); +} + +#[test] +fn test_pr_ci_unchanged_anywhere() { + git_test(|ctx| { + let sha = ctx.create_upstream_merge(&["a"]); + ctx.create_nonupstream_merge(&["b"]); + let src = ctx.check_modifications(&["c"], CiEnv::GitHubActions); + assert_eq!(src, PathFreshness::LastModifiedUpstream { upstream: sha }); + }); +} + +#[test] +fn test_pr_ci_changed_in_pr() { + git_test(|ctx| { + let sha = ctx.create_upstream_merge(&["a"]); + ctx.create_nonupstream_merge(&["b"]); + let src = ctx.check_modifications(&["b"], CiEnv::GitHubActions); + assert_eq!(src, PathFreshness::HasLocalModifications { upstream: sha }); + }); +} + +#[test] +fn test_auto_ci_unchanged_anywhere_select_parent() { + git_test(|ctx| { + let sha = ctx.create_upstream_merge(&["a"]); + ctx.create_upstream_merge(&["b"]); + let src = ctx.check_modifications(&["c"], CiEnv::GitHubActions); + assert_eq!(src, PathFreshness::LastModifiedUpstream { upstream: sha }); + }); +} + +#[test] +fn test_auto_ci_changed_in_pr() { + git_test(|ctx| { + let sha = ctx.create_upstream_merge(&["a"]); + ctx.create_upstream_merge(&["b", "c"]); + let src = ctx.check_modifications(&["c", "d"], CiEnv::GitHubActions); + assert_eq!(src, PathFreshness::HasLocalModifications { upstream: sha }); + }); +} + +#[test] +fn test_local_uncommitted_modifications() { + git_test(|ctx| { + let sha = ctx.create_upstream_merge(&["a"]); + ctx.create_branch("feature"); + ctx.modify("a"); + + assert_eq!( + ctx.check_modifications(&["a", "d"], CiEnv::None), + PathFreshness::HasLocalModifications { upstream: sha } + ); + }); +} + +#[test] +fn test_local_committed_modifications() { + git_test(|ctx| { + let sha = ctx.create_upstream_merge(&["a"]); + ctx.create_upstream_merge(&["b", "c"]); + ctx.create_branch("feature"); + ctx.modify("x"); + ctx.commit(); + ctx.modify("a"); + ctx.commit(); + + assert_eq!( + ctx.check_modifications(&["a", "d"], CiEnv::None), + PathFreshness::HasLocalModifications { upstream: sha } + ); + }); +} + +#[test] +fn test_local_committed_modifications_subdirectory() { + git_test(|ctx| { + let sha = ctx.create_upstream_merge(&["a/b/c"]); + ctx.create_upstream_merge(&["b", "c"]); + ctx.create_branch("feature"); + ctx.modify("a/b/d"); + ctx.commit(); + + assert_eq!( + ctx.check_modifications(&["a/b"], CiEnv::None), + PathFreshness::HasLocalModifications { upstream: sha } + ); + }); +} + +#[test] +fn test_local_changes_in_head_upstream() { + git_test(|ctx| { + // We want to resolve to the upstream commit that made modifications to a, + // even if it is currently HEAD + let sha = ctx.create_upstream_merge(&["a"]); + assert_eq!( + ctx.check_modifications(&["a", "d"], CiEnv::None), + PathFreshness::LastModifiedUpstream { upstream: sha } + ); + }); +} + +#[test] +fn test_local_changes_in_previous_upstream() { + git_test(|ctx| { + // We want to resolve to this commit, which modified a + let sha = ctx.create_upstream_merge(&["a", "e"]); + // Not to this commit, which is the latest upstream commit + ctx.create_upstream_merge(&["b", "c"]); + ctx.create_branch("feature"); + ctx.modify("d"); + ctx.commit(); + + assert_eq!( + ctx.check_modifications(&["a"], CiEnv::None), + PathFreshness::LastModifiedUpstream { upstream: sha } + ); + }); +} + +#[test] +fn test_local_no_upstream_commit_with_changes() { + git_test(|ctx| { + ctx.create_upstream_merge(&["a", "e"]); + ctx.create_upstream_merge(&["a", "e"]); + // We want to fall back to this commit, because there are no commits + // that modified `x`. + let sha = ctx.create_upstream_merge(&["a", "e"]); + ctx.create_branch("feature"); + ctx.modify("d"); + ctx.commit(); + assert_eq!( + ctx.check_modifications(&["x"], CiEnv::None), + PathFreshness::LastModifiedUpstream { upstream: sha } + ); + }); +} + +#[test] +fn test_local_no_upstream_commit() { + git_test(|ctx| { + let src = ctx.check_modifications(&["c", "d"], CiEnv::None); + assert_eq!(src, PathFreshness::MissingUpstream); + }); +} + +#[test] +fn test_local_changes_negative_path() { + git_test(|ctx| { + let upstream = ctx.create_upstream_merge(&["a"]); + ctx.create_branch("feature"); + ctx.modify("b"); + ctx.modify("d"); + ctx.commit(); + + assert_eq!( + ctx.check_modifications(&[":!b", ":!d"], CiEnv::None), + PathFreshness::LastModifiedUpstream { upstream: upstream.clone() } + ); + assert_eq!( + ctx.check_modifications(&[":!c"], CiEnv::None), + PathFreshness::HasLocalModifications { upstream: upstream.clone() } + ); + assert_eq!( + ctx.check_modifications(&[":!d", ":!x"], CiEnv::None), + PathFreshness::HasLocalModifications { upstream } + ); + }); +} + +#[test] +fn test_local_changes_subtree_that_used_bors() { + // Here we simulate a very specific situation related to subtrees. + // When you have merge commits locally, we should ignore them w.r.t. the artifact download + // logic. + // The upstream search code currently uses a simple heuristic: + // - Find commits by bors (or in general an author with the merge commit e-mail) + // - Find the newest such commit + // This should make it work even for subtrees that: + // - Used bors in the past (so they have bors merge commits in their history). + // - Use Josh to merge rustc into the subtree, in a way that the rustc history is the second + // parent, not the first one. + // + // In addition, when searching for modified files, we cannot simply start from HEAD, because + // in this situation git wouldn't find the right commit. + // + // This test checks that this specific scenario will resolve to the right rustc commit, both + // when finding a modified file and when finding a non-existent file (which essentially means + // that we just lookup the most recent upstream commit). + // + // See https://github.com/rust-lang/rust/issues/101907#issuecomment-2697671282 for more details. + git_test(|ctx| { + ctx.create_upstream_merge(&["a"]); + + // Start unrelated subtree history + ctx.run_git(&["switch", "--orphan", "subtree"]); + ctx.modify("bar"); + ctx.commit(); + // Now we need to emulate old bors commits in the subtree. + // Git only has a resolution of one second, which is a problem, since our git logic orders + // merge commits by their date. + // To avoid sleeping in the test, we modify the commit date to be forcefully in the past. + ctx.create_upstream_merge(&["subtree/a"]); + ctx.run_git(&["commit", "--amend", "--date", "Wed Feb 16 14:00 2011 +0100", "--no-edit"]); + + // Merge the subtree history into rustc + ctx.switch_to_branch("main"); + ctx.run_git(&["merge", "subtree", "--allow-unrelated"]); + + // Create a rustc commit that modifies a path that we're interested in (`x`) + let upstream_1 = ctx.create_upstream_merge(&["x"]); + // Create another bors commit + let upstream_2 = ctx.create_upstream_merge(&["a"]); + + ctx.switch_to_branch("subtree"); + + // Create a subtree branch + ctx.create_branch("subtree-pr"); + ctx.modify("baz"); + ctx.commit(); + // We merge rustc into this branch (simulating a "subtree pull") + ctx.merge("main", "committer "); + + // And then merge that branch into the subtree (simulating a situation right before a + // "subtree push") + ctx.switch_to_branch("subtree"); + ctx.merge("subtree-pr", "committer "); + + // And we want to check that we resolve to the right commits. + assert_eq!( + ctx.check_modifications(&["x"], CiEnv::None), + PathFreshness::LastModifiedUpstream { upstream: upstream_1 } + ); + assert_eq!( + ctx.check_modifications(&["nonexistent"], CiEnv::None), + PathFreshness::LastModifiedUpstream { upstream: upstream_2 } + ); + }); +} diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index 5bd947f6e6360..b95d07356c1b9 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -417,7 +417,7 @@ enum DownloadSource { Dist, } -/// Functions that are only ever called once, but named for clarify and to avoid thousand-line functions. +/// Functions that are only ever called once, but named for clarity and to avoid thousand-line functions. impl Config { pub(crate) fn download_clippy(&self) -> PathBuf { self.verbose(|| println!("downloading stage0 clippy artifacts")); @@ -720,8 +720,9 @@ download-rustc = false #[cfg(not(test))] pub(crate) fn maybe_download_ci_llvm(&self) { use build_helper::exit; + use build_helper::git::PathFreshness; - use crate::core::build_steps::llvm::detect_llvm_sha; + use crate::core::build_steps::llvm::detect_llvm_freshness; use crate::core::config::check_incompatible_options_for_ci_llvm; if !self.llvm_from_ci { @@ -729,7 +730,22 @@ download-rustc = false } let llvm_root = self.ci_llvm_root(); - let llvm_sha = detect_llvm_sha(self, self.rust_info.is_managed_git_subrepository()); + let llvm_freshness = + detect_llvm_freshness(self, self.rust_info.is_managed_git_subrepository()); + self.verbose(|| { + eprintln!("LLVM freshness: {llvm_freshness:?}"); + }); + let llvm_sha = match llvm_freshness { + PathFreshness::LastModifiedUpstream { upstream } => upstream, + PathFreshness::HasLocalModifications { upstream } => upstream, + PathFreshness::MissingUpstream => { + eprintln!("error: could not find commit hash for downloading LLVM"); + eprintln!("HELP: maybe your repository history is too shallow?"); + eprintln!("HELP: consider disabling `download-ci-llvm`"); + eprintln!("HELP: or fetch enough history to include one upstream commit"); + crate::exit!(1); + } + }; let stamp_key = format!("{}{}", llvm_sha, self.llvm_assertions); let llvm_stamp = BuildStamp::new(&llvm_root).with_prefix("llvm").add_stamp(stamp_key); if !llvm_stamp.is_up_to_date() && !self.dry_run() { diff --git a/src/bootstrap/src/core/sanity.rs b/src/bootstrap/src/core/sanity.rs index dbfebd11f8283..eb0bf1d166a16 100644 --- a/src/bootstrap/src/core/sanity.rs +++ b/src/bootstrap/src/core/sanity.rs @@ -34,7 +34,7 @@ pub struct Finder { // Targets can be removed from this list once they are present in the stage0 compiler (usually by updating the beta compiler of the bootstrap). const STAGE0_MISSING_TARGETS: &[&str] = &[ // just a dummy comment so the list doesn't get onelined - "wasm32-wali-linux-musl", + "x86_64-lynx-lynxos178", ]; /// Minimum version threshold for libstdc++ required when using prebuilt LLVM @@ -326,7 +326,7 @@ than building it. if target.contains("musl") && !target.contains("unikraft") { // If this is a native target (host is also musl) and no musl-root is given, // fall back to the system toolchain in /usr before giving up - if build.musl_root(*target).is_none() && build.is_builder_target(*target) { + if build.musl_root(*target).is_none() && build.config.is_host_target(*target) { let target = build.config.target_config.entry(*target).or_default(); target.musl_root = Some("/usr".into()); } diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index 843d474f92de8..1e6acad5c0fc9 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -35,7 +35,7 @@ use utils::channel::GitInfo; use crate::core::builder; use crate::core::builder::Kind; -use crate::core::config::{DryRun, LldMode, LlvmLibunwind, Target, TargetSelection, flags}; +use crate::core::config::{DryRun, LldMode, LlvmLibunwind, TargetSelection, flags}; use crate::utils::exec::{BehaviorOnFailure, BootstrapCommand, CommandOutput, OutputMode, command}; use crate::utils::helpers::{ self, dir_is_empty, exe, libdir, output, set_file_times, split_debuginfo, symlink_dir, @@ -53,6 +53,7 @@ use tracing::{instrument, span}; pub use utils::change_tracker::{ CONFIG_CHANGE_HISTORY, find_recent_config_change_ids, human_readable_changes, }; +pub use utils::helpers::PanicTracker; use crate::core::build_steps::vendor::VENDOR_DIR; @@ -748,7 +749,7 @@ impl Build { features.push("llvm"); } // keep in sync with `bootstrap/compile.rs:rustc_cargo_env` - if self.config.rust_randomize_layout { + if self.config.rust_randomize_layout && check("rustc_randomized_layouts") { features.push("rustc_randomized_layouts"); } @@ -803,7 +804,7 @@ impl Build { /// Note that if LLVM is configured externally then the directory returned /// will likely be empty. fn llvm_out(&self, target: TargetSelection) -> PathBuf { - if self.config.llvm_from_ci && self.is_builder_target(target) { + if self.config.llvm_from_ci && self.config.is_host_target(target) { self.config.ci_llvm_root() } else { self.out.join(target).join("llvm") @@ -851,37 +852,6 @@ impl Build { if self.config.vendor { Some(self.src.join(VENDOR_DIR)) } else { None } } - /// Returns `true` if this is an external version of LLVM not managed by bootstrap. - /// In particular, we expect llvm sources to be available when this is false. - /// - /// NOTE: this is not the same as `!is_rust_llvm` when `llvm_has_patches` is set. - fn is_system_llvm(&self, target: TargetSelection) -> bool { - match self.config.target_config.get(&target) { - Some(Target { llvm_config: Some(_), .. }) => { - let ci_llvm = self.config.llvm_from_ci && self.is_builder_target(target); - !ci_llvm - } - // We're building from the in-tree src/llvm-project sources. - Some(Target { llvm_config: None, .. }) => false, - None => false, - } - } - - /// Returns `true` if this is our custom, patched, version of LLVM. - /// - /// This does not necessarily imply that we're managing the `llvm-project` submodule. - fn is_rust_llvm(&self, target: TargetSelection) -> bool { - match self.config.target_config.get(&target) { - // We're using a user-controlled version of LLVM. The user has explicitly told us whether the version has our patches. - // (They might be wrong, but that's not a supported use-case.) - // In particular, this tries to support `submodules = false` and `patches = false`, for using a newer version of LLVM that's not through `rust-lang/llvm-project`. - Some(Target { llvm_has_rust_patches: Some(patched), .. }) => *patched, - // The user hasn't promised the patches match. - // This only has our patches if it's downloaded from CI or built from source. - _ => !self.is_system_llvm(target), - } - } - /// Returns the path to `FileCheck` binary for the specified target fn llvm_filecheck(&self, target: TargetSelection) -> PathBuf { let target_config = self.config.target_config.get(&target); @@ -1356,7 +1326,7 @@ Executed at: {executed_at}"#, // need to use CXX compiler as linker to resolve the exception functions // that are only existed in CXX libraries Some(self.cxx.borrow()[&target].path().into()) - } else if !self.is_builder_target(target) + } else if !self.config.is_host_target(target) && helpers::use_host_linker(target) && !target.is_msvc() { @@ -1557,7 +1527,7 @@ Executed at: {executed_at}"#, !self.config.full_bootstrap && !self.config.download_rustc() && stage >= 2 - && (self.hosts.iter().any(|h| *h == target) || target == self.build) + && (self.hosts.contains(&target) || target == self.build) } /// Checks whether the `compiler` compiling for `target` should be forced to @@ -2025,11 +1995,6 @@ to download LLVM rather than building it. stream.reset().unwrap(); result } - - /// Checks if the given target is the same as the builder target. - fn is_builder_target(&self, target: TargetSelection) -> bool { - self.config.build == target - } } #[cfg(unix)] diff --git a/src/bootstrap/src/utils/cc_detect.rs b/src/bootstrap/src/utils/cc_detect.rs index 147b009d3f477..ceac24d4315c7 100644 --- a/src/bootstrap/src/utils/cc_detect.rs +++ b/src/bootstrap/src/utils/cc_detect.rs @@ -96,6 +96,7 @@ pub fn find(build: &Build) { let targets: HashSet<_> = match build.config.cmd { // We don't need to check cross targets for these commands. crate::Subcommand::Clean { .. } + | crate::Subcommand::Check { .. } | crate::Subcommand::Suggest { .. } | crate::Subcommand::Format { .. } | crate::Subcommand::Setup { .. } => { diff --git a/src/bootstrap/src/utils/cc_detect/tests.rs b/src/bootstrap/src/utils/cc_detect/tests.rs index b4a1b52dd2300..43d61ce02c5af 100644 --- a/src/bootstrap/src/utils/cc_detect/tests.rs +++ b/src/bootstrap/src/utils/cc_detect/tests.rs @@ -181,7 +181,7 @@ fn test_language_clang() { #[test] fn test_new_cc_build() { - let build = Build::new(Config { ..Config::parse(Flags::parse(&["check".to_owned()])) }); + let build = Build::new(Config { ..Config::parse(Flags::parse(&["build".to_owned()])) }); let target = TargetSelection::from_user("x86_64-unknown-linux-gnu"); let cfg = new_cc_build(&build, target.clone()); let compiler = cfg.get_compiler(); @@ -190,7 +190,7 @@ fn test_new_cc_build() { #[test] fn test_default_compiler_wasi() { - let build = Build::new(Config { ..Config::parse(Flags::parse(&["check".to_owned()])) }); + let build = Build::new(Config { ..Config::parse(Flags::parse(&["build".to_owned()])) }); let target = TargetSelection::from_user("wasm32-wasi"); let wasi_sdk = PathBuf::from("/wasi-sdk"); // SAFETY: bootstrap tests run on a single thread @@ -215,7 +215,7 @@ fn test_default_compiler_wasi() { #[test] fn test_default_compiler_fallback() { - let build = Build::new(Config { ..Config::parse(Flags::parse(&["check".to_owned()])) }); + let build = Build::new(Config { ..Config::parse(Flags::parse(&["build".to_owned()])) }); let target = TargetSelection::from_user("x86_64-unknown-linux-gnu"); let mut cfg = cc::Build::new(); let result = default_compiler(&mut cfg, Language::C, target, &build); @@ -224,7 +224,7 @@ fn test_default_compiler_fallback() { #[test] fn test_find_target_with_config() { - let mut build = Build::new(Config { ..Config::parse(Flags::parse(&["check".to_owned()])) }); + let mut build = Build::new(Config { ..Config::parse(Flags::parse(&["build".to_owned()])) }); let target = TargetSelection::from_user("x86_64-unknown-linux-gnu"); let mut target_config = Target::default(); target_config.cc = Some(PathBuf::from("dummy-cc")); @@ -249,7 +249,7 @@ fn test_find_target_with_config() { #[test] fn test_find_target_without_config() { - let mut build = Build::new(Config { ..Config::parse(Flags::parse(&["check".to_owned()])) }); + let mut build = Build::new(Config { ..Config::parse(Flags::parse(&["build".to_owned()])) }); let target = TargetSelection::from_user("x86_64-unknown-linux-gnu"); build.config.target_config.clear(); find_target(&build, target.clone()); @@ -262,7 +262,7 @@ fn test_find_target_without_config() { #[test] fn test_find() { - let mut build = Build::new(Config { ..Config::parse(Flags::parse(&["check".to_owned()])) }); + let mut build = Build::new(Config { ..Config::parse(Flags::parse(&["build".to_owned()])) }); let target1 = TargetSelection::from_user("x86_64-unknown-linux-gnu"); let target2 = TargetSelection::from_user("x86_64-unknown-openbsd"); build.targets.push(target1.clone()); diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs index 244391739f38a..1d0ea3ebf6105 100644 --- a/src/bootstrap/src/utils/change_tracker.rs +++ b/src/bootstrap/src/utils/change_tracker.rs @@ -391,4 +391,24 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ severity: ChangeSeverity::Info, summary: "You can now use `change-id = \"ignore\"` to suppress `change-id ` warnings in the console.", }, + ChangeInfo { + change_id: 139386, + severity: ChangeSeverity::Info, + summary: "Added a new option `build.compiletest-use-stage0-libtest` to force `compiletest` to use the stage 0 libtest.", + }, + ChangeInfo { + change_id: 138934, + severity: ChangeSeverity::Info, + summary: "Added new option `include` to create config extensions.", + }, + ChangeInfo { + change_id: 140438, + severity: ChangeSeverity::Info, + summary: "Added a new option `rust.debug-assertions-tools` to control debug asssertions for tools.", + }, + ChangeInfo { + change_id: 140732, + severity: ChangeSeverity::Info, + summary: "`./x run` now supports running in-tree `rustfmt`, e.g., `./x run rustfmt -- --check /path/to/file.rs`.", + }, ]; diff --git a/src/bootstrap/src/utils/helpers.rs b/src/bootstrap/src/utils/helpers.rs index f8e4d4e04717d..b31b2757767c3 100644 --- a/src/bootstrap/src/utils/helpers.rs +++ b/src/bootstrap/src/utils/helpers.rs @@ -7,8 +7,9 @@ use std::ffi::OsStr; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::sync::OnceLock; +use std::thread::panicking; use std::time::{Instant, SystemTime, UNIX_EPOCH}; -use std::{env, fs, io, str}; +use std::{env, fs, io, panic, str}; use build_helper::util::fail; use object::read::archive::ArchiveFile; @@ -22,6 +23,23 @@ pub use crate::utils::shared_helpers::{dylib_path, dylib_path_var}; #[cfg(test)] mod tests; +/// A wrapper around `std::panic::Location` used to track the location of panics +/// triggered by `t` macro usage. +pub struct PanicTracker<'a>(pub &'a panic::Location<'a>); + +impl Drop for PanicTracker<'_> { + fn drop(&mut self) { + if panicking() { + eprintln!( + "Panic was initiated from {}:{}:{}", + self.0.file(), + self.0.line(), + self.0.column() + ); + } + } +} + /// A helper macro to `unwrap` a result except also print out details like: /// /// * The file/line of the panic @@ -32,19 +50,21 @@ mod tests; /// using a `Result` with `try!`, but this may change one day... #[macro_export] macro_rules! t { - ($e:expr) => { + ($e:expr) => {{ + let _panic_guard = $crate::PanicTracker(std::panic::Location::caller()); match $e { Ok(e) => e, Err(e) => panic!("{} failed with {}", stringify!($e), e), } - }; + }}; // it can show extra info in the second parameter - ($e:expr, $extra:expr) => { + ($e:expr, $extra:expr) => {{ + let _panic_guard = $crate::PanicTracker(std::panic::Location::caller()); match $e { Ok(e) => e, Err(e) => panic!("{} failed with {} ({:?})", stringify!($e), e, $extra), } - }; + }}; } pub use t; @@ -432,9 +452,7 @@ pub fn dir_is_empty(dir: &Path) -> bool { /// the "y" part from the string. pub fn extract_beta_rev(version: &str) -> Option { let parts = version.splitn(2, "-beta.").collect::>(); - let count = parts.get(1).and_then(|s| s.find(' ').map(|p| s[..p].to_string())); - - count + parts.get(1).and_then(|s| s.find(' ').map(|p| s[..p].to_string())) } pub enum LldThreads { @@ -447,9 +465,8 @@ pub fn linker_args( builder: &Builder<'_>, target: TargetSelection, lld_threads: LldThreads, - stage: u32, ) -> Vec { - let mut args = linker_flags(builder, target, lld_threads, stage); + let mut args = linker_flags(builder, target, lld_threads); if let Some(linker) = builder.linker(target) { args.push(format!("-Clinker={}", linker.display())); @@ -464,23 +481,17 @@ pub fn linker_flags( builder: &Builder<'_>, target: TargetSelection, lld_threads: LldThreads, - stage: u32, ) -> Vec { let mut args = vec![]; if !builder.is_lld_direct_linker(target) && builder.config.lld_mode.is_used() { match builder.config.lld_mode { LldMode::External => { - // cfg(bootstrap) - remove after updating bootstrap compiler (#137498) - if stage == 0 && target.is_windows() { - args.push("-Clink-arg=-fuse-ld=lld".to_string()); - } else { - args.push("-Clinker-flavor=gnu-lld-cc".to_string()); - } + args.push("-Zlinker-features=+lld".to_string()); // FIXME(kobzol): remove this flag once MCP510 gets stabilized args.push("-Zunstable-options".to_string()); } LldMode::SelfContained => { - args.push("-Clinker-flavor=gnu-lld-cc".to_string()); + args.push("-Zlinker-features=+lld".to_string()); args.push("-Clink-self-contained=+linker".to_string()); // FIXME(kobzol): remove this flag once MCP510 gets stabilized args.push("-Zunstable-options".to_string()); @@ -503,9 +514,8 @@ pub fn add_rustdoc_cargo_linker_args( builder: &Builder<'_>, target: TargetSelection, lld_threads: LldThreads, - stage: u32, ) { - let args = linker_args(builder, target, lld_threads, stage); + let args = linker_args(builder, target, lld_threads); let mut flags = cmd .get_envs() .find_map(|(k, v)| if k == OsStr::new("RUSTDOCFLAGS") { v } else { None }) diff --git a/src/bootstrap/src/utils/mod.rs b/src/bootstrap/src/utils/mod.rs index caef8ce3088a7..169fcec303e90 100644 --- a/src/bootstrap/src/utils/mod.rs +++ b/src/bootstrap/src/utils/mod.rs @@ -20,4 +20,4 @@ pub(crate) mod tracing; pub(crate) mod metrics; #[cfg(test)] -mod tests; +pub(crate) mod tests; diff --git a/src/bootstrap/src/utils/proc_macro_deps.rs b/src/bootstrap/src/utils/proc_macro_deps.rs index dbfd6f47dc67f..b61fa3bb8d690 100644 --- a/src/bootstrap/src/utils/proc_macro_deps.rs +++ b/src/bootstrap/src/utils/proc_macro_deps.rs @@ -5,6 +5,7 @@ pub static CRATES: &[&str] = &[ // tidy-alphabetical-start "annotate-snippets", "anstyle", + "askama_parser", "basic-toml", "block-buffer", "bumpalo", @@ -27,18 +28,14 @@ pub static CRATES: &[&str] = &[ "libc", "log", "memchr", - "mime", - "mime_guess", "minimal-lexical", "nom", - "num-conv", "once_cell", "pest", "pest_generator", "pest_meta", "proc-macro2", "quote", - "rinja_parser", "rustc-hash", "self_cell", "serde", @@ -49,7 +46,6 @@ pub static CRATES: &[&str] = &[ "syn", "synstructure", "thiserror", - "time-core", "tinystr", "type-map", "typenum", @@ -57,13 +53,13 @@ pub static CRATES: &[&str] = &[ "unic-langid", "unic-langid-impl", "unic-langid-macros", - "unicase", "unicode-ident", "unicode-width", "version_check", "wasm-bindgen-backend", "wasm-bindgen-macro-support", "wasm-bindgen-shared", + "winnow", "yoke", "zerofrom", "zerovec", diff --git a/src/bootstrap/src/utils/tests/git.rs b/src/bootstrap/src/utils/tests/git.rs new file mode 100644 index 0000000000000..d9dd9ab980088 --- /dev/null +++ b/src/bootstrap/src/utils/tests/git.rs @@ -0,0 +1,154 @@ +use std::ffi::OsStr; +use std::fs::OpenOptions; +use std::path::Path; +use std::process::Command; + +use build_helper::ci::CiEnv; +use build_helper::git::{GitConfig, PathFreshness, check_path_modifications}; + +pub struct GitCtx { + dir: tempfile::TempDir, + pub git_repo: String, + pub nightly_branch: String, + pub merge_bot_email: String, +} + +impl GitCtx { + fn new() -> Self { + let dir = tempfile::TempDir::new().unwrap(); + let ctx = Self { + dir, + git_repo: "rust-lang/rust".to_string(), + nightly_branch: "nightly".to_string(), + merge_bot_email: "Merge bot ".to_string(), + }; + ctx.run_git(&["init"]); + ctx.run_git(&["config", "user.name", "Tester"]); + ctx.run_git(&["config", "user.email", "tester@rust-lang.org"]); + ctx.modify("README.md"); + ctx.commit(); + ctx.run_git(&["branch", "-m", "main"]); + ctx + } + + pub fn get_path(&self) -> &Path { + self.dir.path() + } + + pub fn check_modifications(&self, target_paths: &[&str], ci_env: CiEnv) -> PathFreshness { + check_path_modifications(self.dir.path(), &self.git_config(), target_paths, ci_env).unwrap() + } + + pub fn create_upstream_merge(&self, modified_files: &[&str]) -> String { + self.create_branch_and_merge("previous-pr", modified_files, &self.merge_bot_email) + } + + pub fn create_nonupstream_merge(&self, modified_files: &[&str]) -> String { + self.create_branch_and_merge("pr", modified_files, "Tester ") + } + + pub fn create_branch_and_merge( + &self, + branch: &str, + modified_files: &[&str], + author: &str, + ) -> String { + let current_branch = self.get_current_branch(); + + self.create_branch(branch); + for file in modified_files { + self.modify(file); + } + self.commit(); + self.switch_to_branch(¤t_branch); + self.merge(branch, author); + self.run_git(&["branch", "-d", branch]); + self.get_current_commit() + } + + pub fn get_current_commit(&self) -> String { + self.run_git(&["rev-parse", "HEAD"]) + } + + pub fn get_current_branch(&self) -> String { + self.run_git(&["rev-parse", "--abbrev-ref", "HEAD"]) + } + + pub fn merge(&self, branch: &str, author: &str) { + self.run_git(&["merge", "--no-commit", "--no-ff", branch]); + self.run_git(&[ + "commit".to_string(), + "-m".to_string(), + format!("Merge of {branch} into {}", self.get_current_branch()), + "--author".to_string(), + author.to_string(), + ]); + } + + pub fn modify(&self, path: &str) { + self.write(path, "line"); + } + + pub fn write(&self, path: &str, data: &str) { + use std::io::Write; + + let path = self.dir.path().join(path); + std::fs::create_dir_all(&path.parent().unwrap()).unwrap(); + + let mut file = OpenOptions::new().create(true).append(true).open(path).unwrap(); + writeln!(file, "{data}").unwrap(); + } + + pub fn commit(&self) -> String { + self.run_git(&["add", "."]); + self.run_git(&["commit", "-m", "commit message"]); + self.get_current_commit() + } + + pub fn switch_to_branch(&self, name: &str) { + self.run_git(&["switch", name]); + } + + /// Creates a branch and switches to it. + pub fn create_branch(&self, name: &str) { + self.run_git(&["checkout", "-b", name]); + } + + pub fn run_git>(&self, args: &[S]) -> String { + let mut cmd = self.git_cmd(); + cmd.args(args); + eprintln!("Running {cmd:?}"); + let output = cmd.output().unwrap(); + let stdout = String::from_utf8(output.stdout).unwrap().trim().to_string(); + let stderr = String::from_utf8(output.stderr).unwrap().trim().to_string(); + if !output.status.success() { + panic!("Git command `{cmd:?}` failed\nStdout\n{stdout}\nStderr\n{stderr}"); + } + stdout + } + + fn git_cmd(&self) -> Command { + let mut cmd = Command::new("git"); + cmd.env("GIT_CONFIG_NOSYSTEM", "1"); + cmd.env("GIT_CONFIG_SYSTEM", "/tmp/nonexistent"); + cmd.env("GIT_CONFIG_GLOBAL", "/tmp/nonexistent"); + cmd.current_dir(&self.dir); + cmd + } + + fn git_config(&self) -> GitConfig<'_> { + GitConfig { + nightly_branch: &self.nightly_branch, + git_merge_commit_email: &self.merge_bot_email, + } + } +} + +/// Run an end-to-end test that allows testing git logic. +pub fn git_test(test_fn: F) +where + F: FnOnce(&mut GitCtx), +{ + let mut ctx = GitCtx::new(); + test_fn(&mut ctx); +} diff --git a/src/bootstrap/src/utils/tests/mod.rs b/src/bootstrap/src/utils/tests/mod.rs index 0791f7a6e2074..73d55db994cc0 100644 --- a/src/bootstrap/src/utils/tests/mod.rs +++ b/src/bootstrap/src/utils/tests/mod.rs @@ -1 +1,2 @@ +pub mod git; mod shared_helpers_tests; diff --git a/src/build_helper/src/fs/mod.rs b/src/build_helper/src/fs/mod.rs index 02029846fd147..123df76e6a2e9 100644 --- a/src/build_helper/src/fs/mod.rs +++ b/src/build_helper/src/fs/mod.rs @@ -22,21 +22,27 @@ where /// A wrapper around [`std::fs::remove_dir_all`] that can also be used on *non-directory entries*, /// including files and symbolic links. /// -/// - This will produce an error if the target path is not found. +/// - This will not produce an error if the target path is not found. /// - Like [`std::fs::remove_dir_all`], this helper does not traverse symbolic links, will remove /// symbolic link itself. /// - This helper is **not** robust against races on the underlying filesystem, behavior is /// unspecified if this helper is called concurrently. /// - This helper is not robust against TOCTOU problems. /// -/// FIXME: this implementation is insufficiently robust to replace bootstrap's clean `rm_rf` -/// implementation: -/// -/// - This implementation currently does not perform retries. +/// FIXME: Audit whether this implementation is robust enough to replace bootstrap's clean `rm_rf`. #[track_caller] pub fn recursive_remove>(path: P) -> io::Result<()> { let path = path.as_ref(); - let metadata = fs::symlink_metadata(path)?; + + // If the path doesn't exist, we treat it as a successful no-op. + // From the caller's perspective, the goal is simply "ensure this file/dir is gone" — + // if it's already not there, that's a success, not an error. + let metadata = match fs::symlink_metadata(path) { + Ok(m) => m, + Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(()), + Err(e) => return Err(e), + }; + #[cfg(windows)] let is_dir_like = |meta: &fs::Metadata| { use std::os::windows::fs::FileTypeExt; @@ -45,11 +51,35 @@ pub fn recursive_remove>(path: P) -> io::Result<()> { #[cfg(not(windows))] let is_dir_like = fs::Metadata::is_dir; - if is_dir_like(&metadata) { - fs::remove_dir_all(path) - } else { - try_remove_op_set_perms(fs::remove_file, path, metadata) + const MAX_RETRIES: usize = 5; + const RETRY_DELAY_MS: u64 = 100; + + let try_remove = || { + if is_dir_like(&metadata) { + fs::remove_dir_all(path) + } else { + try_remove_op_set_perms(fs::remove_file, path, metadata.clone()) + } + }; + + // Retry deletion a few times to handle transient filesystem errors. + // This is unusual for local file operations, but it's a mitigation + // against unlikely events where malware scanners may be holding a + // file beyond our control, to give the malware scanners some opportunity + // to release their hold. + for attempt in 0..MAX_RETRIES { + match try_remove() { + Ok(()) => return Ok(()), + Err(e) if e.kind() == io::ErrorKind::NotFound => return Ok(()), + Err(_) if attempt < MAX_RETRIES - 1 => { + std::thread::sleep(std::time::Duration::from_millis(RETRY_DELAY_MS)); + continue; + } + Err(e) => return Err(e), + } } + + Ok(()) } fn try_remove_op_set_perms<'p, Op>(mut op: Op, path: &'p Path, metadata: Metadata) -> io::Result<()> @@ -67,3 +97,9 @@ where Err(e) => Err(e), } } + +pub fn remove_and_create_dir_all>(path: P) -> io::Result<()> { + let path = path.as_ref(); + recursive_remove(path)?; + fs::create_dir_all(path) +} diff --git a/src/build_helper/src/fs/tests.rs b/src/build_helper/src/fs/tests.rs index 1e694393127cb..7ce1d8928d1cb 100644 --- a/src/build_helper/src/fs/tests.rs +++ b/src/build_helper/src/fs/tests.rs @@ -14,7 +14,7 @@ mod recursive_remove_tests { let tmpdir = env::temp_dir(); let path = tmpdir.join("__INTERNAL_BOOTSTRAP_nonexistent_path"); assert!(fs::symlink_metadata(&path).is_err_and(|e| e.kind() == io::ErrorKind::NotFound)); - assert!(recursive_remove(&path).is_err_and(|e| e.kind() == io::ErrorKind::NotFound)); + assert!(recursive_remove(&path).is_ok()); } #[test] diff --git a/src/build_helper/src/git.rs b/src/build_helper/src/git.rs index 693e0fc8f46d8..438cd14389c1c 100644 --- a/src/build_helper/src/git.rs +++ b/src/build_helper/src/git.rs @@ -3,8 +3,8 @@ use std::process::{Command, Stdio}; use crate::ci::CiEnv; +#[derive(Debug)] pub struct GitConfig<'a> { - pub git_repository: &'a str, pub nightly_branch: &'a str, pub git_merge_commit_email: &'a str, } @@ -27,145 +27,234 @@ pub fn output_result(cmd: &mut Command) -> Result { String::from_utf8(output.stdout).map_err(|err| format!("{err:?}")) } -/// Finds the remote for rust-lang/rust. -/// For example for these remotes it will return `upstream`. -/// ```text -/// origin https://github.com/pietroalbani/rust.git (fetch) -/// origin https://github.com/pietroalbani/rust.git (push) -/// upstream https://github.com/rust-lang/rust (fetch) -/// upstream https://github.com/rust-lang/rust (push) -/// ``` -pub fn get_rust_lang_rust_remote( +/// Represents the result of checking whether a set of paths +/// have been modified locally or not. +#[derive(PartialEq, Debug, Clone)] +pub enum PathFreshness { + /// Artifacts should be downloaded from this upstream commit, + /// there are no local modifications. + LastModifiedUpstream { upstream: String }, + /// There are local modifications to a certain set of paths. + /// "Local" essentially means "not-upstream" here. + /// `upstream` is the latest upstream merge commit that made modifications to the + /// set of paths. + HasLocalModifications { upstream: String }, + /// No upstream commit was found. + /// This should not happen in most reasonable circumstances, but one never knows. + MissingUpstream, +} + +/// This function figures out if a set of paths was last modified upstream or +/// if there are some local modifications made to them. +/// It can be used to figure out if we should download artifacts from CI or rather +/// build them locally. +/// +/// The function assumes that at least a single upstream bors merge commit is in the +/// local git history. +/// +/// `target_paths` should be a non-empty slice of paths (git `pathspec`s) relative to `git_dir` +/// whose modifications would invalidate the artifact. +/// Each pathspec can also be a negative match, i.e. `:!foo`. This matches changes outside +/// the `foo` directory. +/// See +/// for how git `pathspec` works. +/// +/// The function behaves differently in CI and outside CI. +/// +/// - Outside CI, we want to find out if `target_paths` were modified in some local commit on +/// top of the latest upstream commit that is available in local git history. +/// If not, we try to find the most recent upstream commit (which we assume are commits +/// made by bors) that modified `target_paths`. +/// We don't want to simply take the latest master commit to avoid changing the output of +/// this function frequently after rebasing on the latest master branch even if `target_paths` +/// were not modified upstream in the meantime. In that case we would be redownloading CI +/// artifacts unnecessarily. +/// +/// - In CI, we use a shallow clone of depth 2, i.e., we fetch only a single parent commit +/// (which will be the most recent bors merge commit) and do not have access +/// to the full git history. Luckily, we only need to distinguish between two situations: +/// 1) The current PR made modifications to `target_paths`. +/// In that case, a build is typically necessary. +/// 2) The current PR did not make modifications to `target_paths`. +/// In that case we simply take the latest upstream commit, because on CI there is no need to avoid +/// redownloading. +pub fn check_path_modifications( + git_dir: &Path, config: &GitConfig<'_>, - git_dir: Option<&Path>, -) -> Result { - let mut git = Command::new("git"); - if let Some(git_dir) = git_dir { - git.current_dir(git_dir); + target_paths: &[&str], + ci_env: CiEnv, +) -> Result { + assert!(!target_paths.is_empty()); + for path in target_paths { + assert!(Path::new(path.trim_start_matches(":!")).is_relative()); } - git.args(["config", "--local", "--get-regex", "remote\\..*\\.url"]); - let stdout = output_result(&mut git)?; - let rust_lang_remote = stdout - .lines() - .find(|remote| remote.contains(config.git_repository)) - .ok_or_else(|| format!("{} remote not found", config.git_repository))?; + let upstream_sha = if matches!(ci_env, CiEnv::GitHubActions) { + // Here the situation is different for PR CI and try/auto CI. + // For PR CI, we have the following history: + // + // 1-N PR commits + // upstream merge commit made by bors + // + // For try/auto CI, we have the following history: + // <**non-upstream** merge commit made by bors> + // 1-N PR commits + // upstream merge commit made by bors + // + // But on both cases, HEAD should be a merge commit. + // So if HEAD contains modifications of `target_paths`, our PR has modified + // them. If not, we can use the only available upstream commit for downloading + // artifacts. - let remote_name = - rust_lang_remote.split('.').nth(1).ok_or_else(|| "remote name not found".to_owned())?; - Ok(remote_name.into()) -} + // Do not include HEAD, as it is never an upstream commit + // If we do not find an upstream commit in CI, something is seriously wrong. + Some( + get_closest_upstream_commit(Some(git_dir), config, ci_env)? + .expect("No upstream commit was found on CI"), + ) + } else { + // Outside CI, we want to find the most recent upstream commit that + // modified the set of paths, to have an upstream reference that does not change + // unnecessarily often. + // However, if such commit is not found, we can fall back to the latest upstream commit + let upstream_with_modifications = + get_latest_upstream_commit_that_modified_files(git_dir, config, target_paths)?; + match upstream_with_modifications { + Some(sha) => Some(sha), + None => get_closest_upstream_commit(Some(git_dir), config, ci_env)?, + } + }; -pub fn rev_exists(rev: &str, git_dir: Option<&Path>) -> Result { - let mut git = Command::new("git"); - if let Some(git_dir) = git_dir { - git.current_dir(git_dir); - } - git.args(["rev-parse", rev]); - let output = git.output().map_err(|err| format!("{err:?}"))?; - - match output.status.code() { - Some(0) => Ok(true), - Some(128) => Ok(false), - None => Err(format!( - "git didn't exit properly: {}", - String::from_utf8(output.stderr).map_err(|err| format!("{err:?}"))? - )), - Some(code) => Err(format!( - "git command exited with status code: {code}: {}", - String::from_utf8(output.stderr).map_err(|err| format!("{err:?}"))? - )), + let Some(upstream_sha) = upstream_sha else { + return Ok(PathFreshness::MissingUpstream); + }; + + // For local environments, we want to find out if something has changed + // from the latest upstream commit. + // However, that should be equivalent to checking if something has changed + // from the latest upstream commit *that modified `target_paths`*, and + // with this approach we do not need to invoke git an additional time. + if has_changed_since(git_dir, &upstream_sha, target_paths) { + Ok(PathFreshness::HasLocalModifications { upstream: upstream_sha }) + } else { + Ok(PathFreshness::LastModifiedUpstream { upstream: upstream_sha }) } } -/// Returns the master branch from which we can take diffs to see changes. -/// This will usually be rust-lang/rust master, but sometimes this might not exist. -/// This could be because the user is updating their forked master branch using the GitHub UI -/// and therefore doesn't need an upstream master branch checked out. -/// We will then fall back to origin/master in the hope that at least this exists. -pub fn updated_master_branch( - config: &GitConfig<'_>, - git_dir: Option<&Path>, -) -> Result { - let upstream_remote = get_rust_lang_rust_remote(config, git_dir)?; - let branch = config.nightly_branch; - for upstream_master in [format!("{upstream_remote}/{branch}"), format!("origin/{branch}")] { - if rev_exists(&upstream_master, git_dir)? { - return Ok(upstream_master); - } - } +/// Returns true if any of the passed `paths` have changed since the `base` commit. +pub fn has_changed_since(git_dir: &Path, base: &str, paths: &[&str]) -> bool { + let mut git = Command::new("git"); + git.current_dir(git_dir); + + git.args(["diff-index", "--quiet", base, "--"]).args(paths); - Err("Cannot find any suitable upstream master branch".to_owned()) + // Exit code 0 => no changes + // Exit code 1 => some changes were detected + !git.status().expect("cannot run git diff-index").success() } -/// Finds the nearest merge commit by comparing the local `HEAD` with the upstream branch's state. -/// To work correctly, the upstream remote must be properly configured using `git remote add `. -/// In most cases `get_closest_merge_commit` is the function you are looking for as it doesn't require remote -/// to be configured. -fn git_upstream_merge_base( - config: &GitConfig<'_>, - git_dir: Option<&Path>, -) -> Result { - let updated_master = updated_master_branch(config, git_dir)?; +/// Returns the latest upstream commit that modified `target_paths`, or `None` if no such commit +/// was found. +fn get_latest_upstream_commit_that_modified_files( + git_dir: &Path, + git_config: &GitConfig<'_>, + target_paths: &[&str], +) -> Result, String> { let mut git = Command::new("git"); - if let Some(git_dir) = git_dir { - git.current_dir(git_dir); + git.current_dir(git_dir); + + // In theory, we could just use + // `git rev-list --first-parent HEAD --author= -- ` + // to find the latest upstream commit that modified ``. + // However, this does not work if you are in a subtree sync branch that contains merge commits + // which have the subtree history as their first parent, and the rustc history as second parent: + // `--first-parent` will just walk up the subtree history and never see a single rustc commit. + // We thus have to take a two-pronged approach. First lookup the most recent upstream commit + // by *date* (this should work even in a subtree sync branch), and then start the lookup for + // modified paths starting from that commit. + // + // See https://github.com/rust-lang/rust/pull/138591#discussion_r2037081858 for more details. + let upstream = get_closest_upstream_commit(Some(git_dir), git_config, CiEnv::None)? + .unwrap_or_else(|| "HEAD".to_string()); + + git.args([ + "rev-list", + "--first-parent", + "-n1", + &upstream, + "--author", + git_config.git_merge_commit_email, + ]); + + if !target_paths.is_empty() { + git.arg("--").args(target_paths); } - Ok(output_result(git.arg("merge-base").arg(&updated_master).arg("HEAD"))?.trim().to_owned()) + let output = output_result(&mut git)?.trim().to_owned(); + if output.is_empty() { Ok(None) } else { Ok(Some(output)) } } -/// Searches for the nearest merge commit in the repository that also exists upstream. +/// Returns the most recent (ordered chronologically) commit found in the local history that +/// should exist upstream. We identify upstream commits by the e-mail of the commit +/// author. /// -/// It looks for the most recent commit made by the merge bot by matching the author's email -/// address with the merge bot's email. -pub fn get_closest_merge_commit( +/// If we are in CI, we simply return our first parent. +fn get_closest_upstream_commit( git_dir: Option<&Path>, config: &GitConfig<'_>, - target_paths: &[&str], -) -> Result { + env: CiEnv, +) -> Result, String> { + let base = match env { + CiEnv::None => "HEAD", + CiEnv::GitHubActions => { + // On CI, we should always have a non-upstream merge commit at the tip, + // and our first parent should be the most recently merged upstream commit. + // We thus simply return our first parent. + return resolve_commit_sha(git_dir, "HEAD^1").map(Some); + } + }; + let mut git = Command::new("git"); if let Some(git_dir) = git_dir { git.current_dir(git_dir); } - let channel = include_str!("../../ci/channel").trim(); - - let merge_base = { - if CiEnv::is_ci() && - // FIXME: When running on rust-lang managed CI and it's not a nightly build, - // `git_upstream_merge_base` fails with an error message similar to this: - // ``` - // called `Result::unwrap()` on an `Err` value: "command did not execute successfully: - // cd \"/checkout\" && \"git\" \"merge-base\" \"origin/master\" \"HEAD\"\nexpected success, got: exit status: 1\n" - // ``` - // Investigate and resolve this issue instead of skipping it like this. - (channel == "nightly" || !CiEnv::is_rust_lang_managed_ci_job()) - { - git_upstream_merge_base(config, git_dir).unwrap() - } else { - // For non-CI environments, ignore rust-lang/rust upstream as it usually gets - // outdated very quickly. - "HEAD".to_string() - } - }; - + // We do not use `--first-parent`, because we can be in a situation (outside CI) where we have + // a subtree merge that actually has the main rustc history as its second parent. + // Using `--first-parent` would recurse into the history of the subtree, which could have some + // old bors commits that are not relevant to us. + // With `--author-date-order`, git recurses into all parent subtrees, and returns the most + // chronologically recent bors commit. + // Here we assume that none of our subtrees use bors anymore, and that all their old bors + // commits are way older than recent rustc bors commits! git.args([ "rev-list", + "--author-date-order", &format!("--author={}", config.git_merge_commit_email), "-n1", - "--first-parent", - &merge_base, + &base, ]); - if !target_paths.is_empty() { - git.arg("--").args(target_paths); + let output = output_result(&mut git)?.trim().to_owned(); + if output.is_empty() { Ok(None) } else { Ok(Some(output)) } +} + +/// Resolve the commit SHA of `commit_ref`. +fn resolve_commit_sha(git_dir: Option<&Path>, commit_ref: &str) -> Result { + let mut git = Command::new("git"); + + if let Some(git_dir) = git_dir { + git.current_dir(git_dir); } + git.args(["rev-parse", commit_ref]); + Ok(output_result(&mut git)?.trim().to_owned()) } /// Returns the files that have been modified in the current branch compared to the master branch. +/// This includes committed changes, uncommitted changes, and changes that are not even staged. +/// /// The `extensions` parameter can be used to filter the files by their extension. /// Does not include removed files. /// If `extensions` is empty, all files will be returned. @@ -174,7 +263,9 @@ pub fn get_git_modified_files( git_dir: Option<&Path>, extensions: &[&str], ) -> Result, String> { - let merge_base = get_closest_merge_commit(git_dir, config, &[])?; + let Some(merge_base) = get_closest_upstream_commit(git_dir, config, CiEnv::None)? else { + return Err("No upstream commit was found".to_string()); + }; let mut git = Command::new("git"); if let Some(git_dir) = git_dir { @@ -202,13 +293,7 @@ pub fn get_git_modified_files( } /// Returns the files that haven't been added to git yet. -pub fn get_git_untracked_files( - config: &GitConfig<'_>, - git_dir: Option<&Path>, -) -> Result>, String> { - let Ok(_updated_master) = updated_master_branch(config, git_dir) else { - return Ok(None); - }; +pub fn get_git_untracked_files(git_dir: Option<&Path>) -> Result>, String> { let mut git = Command::new("git"); if let Some(git_dir) = git_dir { git.current_dir(git_dir); diff --git a/src/build_helper/src/stage0_parser.rs b/src/build_helper/src/stage0_parser.rs index 2a0c12a1c91c7..2723f4aa7b914 100644 --- a/src/build_helper/src/stage0_parser.rs +++ b/src/build_helper/src/stage0_parser.rs @@ -20,7 +20,6 @@ pub struct Stage0Config { pub artifacts_server: String, pub artifacts_with_llvm_assertions_server: String, pub git_merge_commit_email: String, - pub git_repository: String, pub nightly_branch: String, } @@ -49,7 +48,6 @@ pub fn parse_stage0_file() -> Stage0 { stage0.config.artifacts_with_llvm_assertions_server = value.to_owned() } "git_merge_commit_email" => stage0.config.git_merge_commit_email = value.to_owned(), - "git_repository" => stage0.config.git_repository = value.to_owned(), "nightly_branch" => stage0.config.nightly_branch = value.to_owned(), "compiler_date" => stage0.compiler.date = value.to_owned(), diff --git a/src/ci/citool/Cargo.lock b/src/ci/citool/Cargo.lock index 800eaae076650..43321d12cafcd 100644 --- a/src/ci/citool/Cargo.lock +++ b/src/ci/citool/Cargo.lock @@ -64,12 +64,63 @@ version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +[[package]] +name = "askama" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4744ed2eef2645831b441d8f5459689ade2ab27c854488fbab1fbe94fce1a7" +dependencies = [ + "askama_derive", + "itoa", + "percent-encoding", + "serde", + "serde_json", +] + +[[package]] +name = "askama_derive" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d661e0f57be36a5c14c48f78d09011e67e0cb618f269cca9f2fd8d15b68c46ac" +dependencies = [ + "askama_parser", + "basic-toml", + "memchr", + "proc-macro2", + "quote", + "rustc-hash", + "serde", + "serde_derive", + "syn", +] + +[[package]] +name = "askama_parser" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf315ce6524c857bb129ff794935cf6d42c82a6cff60526fe2a63593de4d0d4f" +dependencies = [ + "memchr", + "serde", + "serde_derive", + "winnow", +] + [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "basic-toml" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba62675e8242a4c4e806d12f11d136e626e6c8361d6b829310732241652a178a" +dependencies = [ + "serde", +] + [[package]] name = "build_helper" version = "0.1.0" @@ -104,6 +155,7 @@ name = "citool" version = "0.1.0" dependencies = [ "anyhow", + "askama", "build_helper", "clap", "csv", @@ -563,9 +615,9 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "miniz_oxide" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" +checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" dependencies = [ "adler2", ] @@ -646,6 +698,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + [[package]] name = "rustls" version = "0.23.23" @@ -1026,6 +1084,15 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +[[package]] +name = "winnow" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63d3fcd9bba44b03821e7d699eeee959f3126dcc4aa8e4ae18ec617c2a5cea10" +dependencies = [ + "memchr", +] + [[package]] name = "write16" version = "1.0.0" diff --git a/src/ci/citool/Cargo.toml b/src/ci/citool/Cargo.toml index f18436a126359..0e2aba3b9e3fc 100644 --- a/src/ci/citool/Cargo.toml +++ b/src/ci/citool/Cargo.toml @@ -5,6 +5,7 @@ edition = "2021" [dependencies] anyhow = "1" +askama = "0.13" clap = { version = "4.5", features = ["derive"] } csv = "1" diff = "0.1" diff --git a/src/ci/citool/src/analysis.rs b/src/ci/citool/src/analysis.rs index 7fbfad467c641..62974be2dbe8c 100644 --- a/src/ci/citool/src/analysis.rs +++ b/src/ci/citool/src/analysis.rs @@ -7,9 +7,10 @@ use build_helper::metrics::{ format_build_steps, }; -use crate::metrics; +use crate::github::JobInfoResolver; use crate::metrics::{JobMetrics, JobName, get_test_suites}; use crate::utils::{output_details, pluralize}; +use crate::{metrics, utils}; /// Outputs durations of individual bootstrap steps from the gathered bootstrap invocations, /// and also a table with summarized information about executed tests. @@ -185,13 +186,19 @@ fn render_table(suites: BTreeMap) -> String { } /// Outputs a report of test differences between the `parent` and `current` commits. -pub fn output_test_diffs(job_metrics: &HashMap) { +pub fn output_test_diffs( + job_metrics: &HashMap, + job_info_resolver: &mut JobInfoResolver, +) { let aggregated_test_diffs = aggregate_test_diffs(&job_metrics); - report_test_diffs(aggregated_test_diffs); + report_test_diffs(aggregated_test_diffs, job_metrics, job_info_resolver); } /// Prints the ten largest differences in bootstrap durations. -pub fn output_largest_duration_changes(job_metrics: &HashMap) { +pub fn output_largest_duration_changes( + job_metrics: &HashMap, + job_info_resolver: &mut JobInfoResolver, +) { struct Entry<'a> { job: &'a JobName, before: Duration, @@ -225,14 +232,14 @@ pub fn output_largest_duration_changes(job_metrics: &HashMap {:.1}s ({:.1}%)", + "{}. {}: {:.1}s -> {:.1}s ({:.1}%)", index + 1, - entry.job, + format_job_link(job_info_resolver, job_metrics, entry.job), entry.before.as_secs_f64(), entry.after.as_secs_f64(), entry.change @@ -387,20 +394,23 @@ fn aggregate_tests(metrics: &JsonRoot) -> TestSuiteData { // Poor man's detection of doctests based on the "(line XYZ)" suffix let is_doctest = matches!(suite.metadata, TestSuiteMetadata::CargoPackage { .. }) && test.name.contains("(line"); - let test_entry = Test { name: generate_test_name(&test.name), stage, is_doctest }; + let test_entry = Test { + name: utils::normalize_path_delimiters(&test.name).to_string(), + stage, + is_doctest, + }; tests.insert(test_entry, test.outcome.clone()); } } TestSuiteData { tests } } -/// Normalizes Windows-style path delimiters to Unix-style paths. -fn generate_test_name(name: &str) -> String { - name.replace('\\', "/") -} - /// Prints test changes in Markdown format to stdout. -fn report_test_diffs(diff: AggregatedTestDiffs) { +fn report_test_diffs( + diff: AggregatedTestDiffs, + job_metrics: &HashMap, + job_info_resolver: &mut JobInfoResolver, +) { println!("# Test differences"); if diff.diffs.is_empty() { println!("No test diffs found"); @@ -509,21 +519,42 @@ fn report_test_diffs(diff: AggregatedTestDiffs) { } if doctest_count > 0 { + let prefix = + if doctest_count < original_diff_count { "Additionally, " } else { "" }; println!( - "\nAdditionally, {doctest_count} doctest {} were found. These are ignored, as they are noisy.", + "\n{prefix}{doctest_count} doctest {} were found. These are ignored, as they are noisy.", pluralize("diff", doctest_count) ); } // Now print the job group index - println!("\n**Job group index**\n"); - for (group, jobs) in job_index.into_iter().enumerate() { - println!( - "- {}: {}", - format_job_group(group as u64), - jobs.iter().map(|j| format!("`{j}`")).collect::>().join(", ") - ); + if !job_index.is_empty() { + println!("\n**Job group index**\n"); + for (group, jobs) in job_index.into_iter().enumerate() { + println!( + "- {}: {}", + format_job_group(group as u64), + jobs.iter() + .map(|j| format_job_link(job_info_resolver, job_metrics, j)) + .collect::>() + .join(", ") + ); + } } }, ); } + +/// Tries to get a GitHub Actions job summary URL from the resolver. +/// If it is not available, just wraps the job name in backticks. +fn format_job_link( + job_info_resolver: &mut JobInfoResolver, + job_metrics: &HashMap, + job_name: &str, +) -> String { + job_metrics + .get(job_name) + .and_then(|metrics| job_info_resolver.get_job_summary_link(job_name, &metrics.current)) + .map(|summary_url| format!("[{job_name}]({summary_url})")) + .unwrap_or_else(|| format!("`{job_name}`")) +} diff --git a/src/ci/citool/src/github.rs b/src/ci/citool/src/github.rs new file mode 100644 index 0000000000000..35e4c3f9599d6 --- /dev/null +++ b/src/ci/citool/src/github.rs @@ -0,0 +1,109 @@ +use std::collections::HashMap; + +use anyhow::Context; +use build_helper::metrics::{CiMetadata, JsonRoot}; + +pub struct GitHubClient; + +impl GitHubClient { + fn get_workflow_run_jobs( + &self, + repo: &str, + workflow_run_id: u64, + ) -> anyhow::Result> { + let req = ureq::get(format!( + "https://api.github.com/repos/{repo}/actions/runs/{workflow_run_id}/jobs?per_page=100" + )) + .header("User-Agent", "rust-lang/rust/citool") + .header("Accept", "application/vnd.github+json") + .header("X-GitHub-Api-Version", "2022-11-28") + .call() + .context("cannot get workflow job list")?; + + let status = req.status(); + let mut body = req.into_body(); + if status.is_success() { + // This API response is actually paged, but we assume for now that there are at + // most 100 jobs per workflow. + let response = body + .read_json::() + .context("cannot deserialize workflow run jobs response")?; + // The CI job names have a prefix, e.g. `auto - foo`. We remove the prefix here to + // normalize the job name. + Ok(response + .jobs + .into_iter() + .map(|mut job| { + job.name = job + .name + .split_once(" - ") + .map(|res| res.1.to_string()) + .unwrap_or_else(|| job.name); + job + }) + .collect()) + } else { + Err(anyhow::anyhow!( + "Cannot get jobs of workflow run {workflow_run_id}: {status}\n{}", + body.read_to_string()? + )) + } + } +} + +#[derive(serde::Deserialize)] +struct WorkflowRunJobsResponse { + jobs: Vec, +} + +#[derive(serde::Deserialize)] +struct GitHubJob { + name: String, + id: u64, +} + +/// Can be used to resolve information about GitHub Actions jobs. +/// Caches results internally to avoid too unnecessary GitHub API calls. +pub struct JobInfoResolver { + client: GitHubClient, + // Workflow run ID -> jobs + workflow_job_cache: HashMap>, +} + +impl JobInfoResolver { + pub fn new() -> Self { + Self { client: GitHubClient, workflow_job_cache: Default::default() } + } + + /// Get a link to a job summary for the given job name and bootstrap execution. + pub fn get_job_summary_link(&mut self, job_name: &str, metrics: &JsonRoot) -> Option { + metrics.ci_metadata.as_ref().and_then(|metadata| { + self.get_job_id(metadata, job_name).map(|job_id| { + format!( + "https://github.com/{}/actions/runs/{}#summary-{job_id}", + metadata.repository, metadata.workflow_run_id + ) + }) + }) + } + + fn get_job_id(&mut self, ci_metadata: &CiMetadata, job_name: &str) -> Option { + if let Some(job) = self + .workflow_job_cache + .get(&ci_metadata.workflow_run_id) + .and_then(|jobs| jobs.iter().find(|j| j.name == job_name)) + { + return Some(job.id); + } + + let jobs = self + .client + .get_workflow_run_jobs(&ci_metadata.repository, ci_metadata.workflow_run_id) + .inspect_err(|e| eprintln!("Cannot download workflow jobs: {e:?}")) + .ok()?; + let job_id = jobs.iter().find(|j| j.name == job_name).map(|j| j.id); + // Save the cache even if the job name was not found, it could be useful for further lookups + self.workflow_job_cache.insert(ci_metadata.workflow_run_id, jobs); + job_id + } +} diff --git a/src/ci/citool/src/jobs.rs b/src/ci/citool/src/jobs.rs index 13880ad466a6b..5600d7b4db59b 100644 --- a/src/ci/citool/src/jobs.rs +++ b/src/ci/citool/src/jobs.rs @@ -3,12 +3,15 @@ mod tests; use std::collections::BTreeMap; +use anyhow::Context as _; use serde_yaml::Value; use crate::GitHubContext; +use crate::utils::load_env_var; /// Representation of a job loaded from the `src/ci/github-actions/jobs.yml` file. #[derive(serde::Deserialize, Debug, Clone)] +#[serde(deny_unknown_fields)] pub struct Job { /// Name of the job, e.g. mingw-check pub name: String, @@ -26,6 +29,8 @@ pub struct Job { pub free_disk: Option, /// Documentation link to a resource that could help people debug this CI job. pub doc_url: Option, + /// Whether the job is executed on AWS CodeBuild. + pub codebuild: Option, } impl Job { @@ -80,7 +85,7 @@ impl JobDatabase { } pub fn load_job_db(db: &str) -> anyhow::Result { - let mut db: Value = serde_yaml::from_str(&db)?; + let mut db: Value = serde_yaml::from_str(db)?; // We need to expand merge keys (<<), because serde_yaml can't deal with them // `apply_merge` only applies the merge once, so do it a few times to unwrap nested merges. @@ -107,6 +112,29 @@ struct GithubActionsJob { free_disk: Option, #[serde(skip_serializing_if = "Option::is_none")] doc_url: Option, + #[serde(skip_serializing_if = "Option::is_none")] + codebuild: Option, +} + +/// Replace GitHub context variables with environment variables in job configs. +/// Used for codebuild jobs like +/// `codebuild-ubuntu-22-8c-$github.run_id-$github.run_attempt` +fn substitute_github_vars(jobs: Vec) -> anyhow::Result> { + let run_id = load_env_var("GITHUB_RUN_ID")?; + let run_attempt = load_env_var("GITHUB_RUN_ATTEMPT")?; + + let jobs = jobs + .into_iter() + .map(|mut job| { + job.os = job + .os + .replace("$github.run_id", &run_id) + .replace("$github.run_attempt", &run_attempt); + job + }) + .collect(); + + Ok(jobs) } /// Skip CI jobs that are not supposed to be executed on the given `channel`. @@ -177,6 +205,8 @@ fn calculate_jobs( } RunType::AutoJob => (db.auto_jobs.clone(), "auto", &db.envs.auto_env), }; + let jobs = substitute_github_vars(jobs.clone()) + .context("Failed to substitute GitHub context variables in jobs")?; let jobs = skip_jobs(jobs, channel); let jobs = jobs .into_iter() @@ -207,6 +237,7 @@ fn calculate_jobs( continue_on_error: job.continue_on_error, free_disk: job.free_disk, doc_url: job.doc_url, + codebuild: job.codebuild, } }) .collect(); diff --git a/src/ci/citool/src/main.rs b/src/ci/citool/src/main.rs index 6db5eab458cca..87ce09cfb233d 100644 --- a/src/ci/citool/src/main.rs +++ b/src/ci/citool/src/main.rs @@ -1,8 +1,10 @@ mod analysis; mod cpu_usage; mod datadog; +mod github; mod jobs; mod metrics; +mod test_dashboard; mod utils; use std::collections::{BTreeMap, HashMap}; @@ -18,9 +20,11 @@ use serde_yaml::Value; use crate::analysis::{output_largest_duration_changes, output_test_diffs}; use crate::cpu_usage::load_cpu_usage; use crate::datadog::upload_datadog_metric; +use crate::github::JobInfoResolver; use crate::jobs::RunType; use crate::metrics::{JobMetrics, download_auto_job_metrics, download_job_metrics, load_metrics}; -use crate::utils::load_env_var; +use crate::test_dashboard::generate_test_dashboard; +use crate::utils::{load_env_var, output_details}; const CI_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/.."); const DOCKER_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../docker"); @@ -145,6 +149,7 @@ fn postprocess_metrics( ) -> anyhow::Result<()> { let metrics = load_metrics(&metrics_path)?; + let mut job_info_resolver = JobInfoResolver::new(); if let (Some(parent), Some(job_name)) = (parent, job_name) { // This command is executed also on PR builds, which might not have parent metrics // available, because some PR jobs don't run on auto builds, and PR jobs do not upload metrics @@ -160,7 +165,7 @@ fn postprocess_metrics( job_name, JobMetrics { parent: Some(parent_metrics), current: metrics }, )]); - output_test_diffs(&job_metrics); + output_test_diffs(&job_metrics, &mut job_info_resolver); return Ok(()); } Err(error) => { @@ -177,11 +182,27 @@ fn postprocess_metrics( } fn post_merge_report(db: JobDatabase, current: String, parent: String) -> anyhow::Result<()> { - let metrics = download_auto_job_metrics(&db, &parent, ¤t)?; + let metrics = download_auto_job_metrics(&db, Some(&parent), ¤t)?; println!("\nComparing {parent} (parent) -> {current} (this PR)\n"); - output_test_diffs(&metrics); - output_largest_duration_changes(&metrics); + + let mut job_info_resolver = JobInfoResolver::new(); + output_test_diffs(&metrics, &mut job_info_resolver); + + output_details("Test dashboard", || { + println!( + r#"Run + +```bash +cargo run --manifest-path src/ci/citool/Cargo.toml -- \ + test-dashboard {current} --output-dir test-dashboard +``` +And then open `test-dashboard/index.html` in your browser to see an overview of all executed tests. +"# + ); + }); + + output_largest_duration_changes(&metrics, &mut job_info_resolver); Ok(()) } @@ -229,6 +250,14 @@ enum Args { /// Current commit that will be compared to `parent`. current: String, }, + /// Generate a directory containing a HTML dashboard of test results from a CI run. + TestDashboard { + /// Commit SHA that was tested on CI to analyze. + current: String, + /// Output path for the HTML directory. + #[clap(long)] + output_dir: PathBuf, + }, } #[derive(clap::ValueEnum, Clone)] @@ -270,7 +299,11 @@ fn main() -> anyhow::Result<()> { postprocess_metrics(metrics_path, parent, job_name)?; } Args::PostMergeReport { current, parent } => { - post_merge_report(load_db(default_jobs_file)?, current, parent)?; + post_merge_report(load_db(&default_jobs_file)?, current, parent)?; + } + Args::TestDashboard { current, output_dir } => { + let db = load_db(&default_jobs_file)?; + generate_test_dashboard(db, ¤t, &output_dir)?; } } diff --git a/src/ci/citool/src/metrics.rs b/src/ci/citool/src/metrics.rs index a816fb3c4f165..3d8b1ad84cf72 100644 --- a/src/ci/citool/src/metrics.rs +++ b/src/ci/citool/src/metrics.rs @@ -46,24 +46,25 @@ pub struct JobMetrics { /// `parent` and `current` should be commit SHAs. pub fn download_auto_job_metrics( job_db: &JobDatabase, - parent: &str, + parent: Option<&str>, current: &str, ) -> anyhow::Result> { let mut jobs = HashMap::default(); for job in &job_db.auto_jobs { eprintln!("Downloading metrics of job {}", job.name); - let metrics_parent = match download_job_metrics(&job.name, parent) { - Ok(metrics) => Some(metrics), - Err(error) => { - eprintln!( - r#"Did not find metrics for job `{}` at `{parent}`: {error:?}. + let metrics_parent = + parent.and_then(|parent| match download_job_metrics(&job.name, parent) { + Ok(metrics) => Some(metrics), + Err(error) => { + eprintln!( + r#"Did not find metrics for job `{}` at `{parent}`: {error:?}. Maybe it was newly added?"#, - job.name - ); - None - } - }; + job.name + ); + None + } + }); let metrics_current = download_job_metrics(&job.name, current)?; jobs.insert( job.name.clone(), diff --git a/src/ci/citool/src/test_dashboard.rs b/src/ci/citool/src/test_dashboard.rs new file mode 100644 index 0000000000000..8fbd0d3f200d4 --- /dev/null +++ b/src/ci/citool/src/test_dashboard.rs @@ -0,0 +1,216 @@ +use std::collections::{BTreeMap, HashMap}; +use std::fs::File; +use std::io::BufWriter; +use std::path::{Path, PathBuf}; + +use askama::Template; +use build_helper::metrics::{TestOutcome, TestSuiteMetadata}; + +use crate::jobs::JobDatabase; +use crate::metrics::{JobMetrics, JobName, download_auto_job_metrics, get_test_suites}; +use crate::utils::normalize_path_delimiters; + +/// Generate a set of HTML files into a directory that contain a dashboard of test results. +pub fn generate_test_dashboard( + db: JobDatabase, + current: &str, + output_dir: &Path, +) -> anyhow::Result<()> { + let metrics = download_auto_job_metrics(&db, None, current)?; + let suites = gather_test_suites(&metrics); + + std::fs::create_dir_all(output_dir)?; + + let test_count = suites.test_count(); + write_page(output_dir, "index.html", &TestSuitesPage { suites, test_count })?; + + Ok(()) +} + +fn write_page(dir: &Path, name: &str, template: &T) -> anyhow::Result<()> { + let mut file = BufWriter::new(File::create(dir.join(name))?); + Template::write_into(template, &mut file)?; + Ok(()) +} + +fn gather_test_suites(job_metrics: &HashMap) -> TestSuites { + struct CoarseTestSuite<'a> { + tests: BTreeMap>, + } + + let mut suites: HashMap = HashMap::new(); + + // First, gather tests from all jobs, stages and targets, and aggregate them per suite + // Only work with compiletest suites. + for (job, metrics) in job_metrics { + let test_suites = get_test_suites(&metrics.current); + for suite in test_suites { + let (suite_name, stage, target) = match &suite.metadata { + TestSuiteMetadata::CargoPackage { .. } => { + continue; + } + TestSuiteMetadata::Compiletest { suite, stage, target, .. } => { + (suite.clone(), *stage, target) + } + }; + let suite_entry = suites + .entry(suite_name.clone()) + .or_insert_with(|| CoarseTestSuite { tests: Default::default() }); + let test_metadata = TestMetadata { job, stage, target }; + + for test in &suite.tests { + let test_name = normalize_test_name(&test.name, &suite_name); + let (test_name, variant_name) = match test_name.rsplit_once('#') { + Some((name, variant)) => (name.to_string(), variant.to_string()), + None => (test_name, "".to_string()), + }; + let test_entry = suite_entry + .tests + .entry(test_name.clone()) + .or_insert_with(|| Test { revisions: Default::default() }); + let variant_entry = test_entry + .revisions + .entry(variant_name) + .or_insert_with(|| TestResults { passed: vec![], ignored: vec![] }); + + match test.outcome { + TestOutcome::Passed => { + variant_entry.passed.push(test_metadata); + } + TestOutcome::Ignored { ignore_reason: _ } => { + variant_entry.ignored.push(test_metadata); + } + TestOutcome::Failed => { + eprintln!("Warning: failed test {test_name}"); + } + } + } + } + } + + // Then, split the suites per directory + let mut suites = suites.into_iter().collect::>(); + suites.sort_by(|a, b| a.0.cmp(&b.0)); + + let suites = suites + .into_iter() + .map(|(suite_name, suite)| TestSuite { group: build_test_group(&suite_name, suite.tests) }) + .collect(); + + TestSuites { suites } +} + +/// Recursively expand a test group based on filesystem hierarchy. +fn build_test_group<'a>(name: &str, tests: BTreeMap>) -> TestGroup<'a> { + let mut root_tests = vec![]; + let mut subdirs: BTreeMap>> = Default::default(); + + // Split tests into root tests and tests located in subdirectories + for (name, test) in tests { + let mut components = Path::new(&name).components().peekable(); + let subdir = components.next().unwrap(); + + if components.peek().is_none() { + // This is a root test + root_tests.push((name, test)); + } else { + // This is a test in a nested directory + let subdir_tests = + subdirs.entry(subdir.as_os_str().to_str().unwrap().to_string()).or_default(); + let test_name = + components.into_iter().collect::().to_str().unwrap().to_string(); + subdir_tests.insert(test_name, test); + } + } + let dirs = subdirs + .into_iter() + .map(|(name, tests)| { + let group = build_test_group(&name, tests); + (name, group) + }) + .collect(); + + TestGroup { name: name.to_string(), root_tests, groups: dirs } +} + +/// Compiletest tests start with `[suite] tests/[suite]/a/b/c...`. +/// Remove the `[suite] tests/[suite]/` prefix so that we can find the filesystem path. +/// Also normalizes path delimiters. +fn normalize_test_name(name: &str, suite_name: &str) -> String { + let name = normalize_path_delimiters(name); + let name = name.as_ref(); + let name = name.strip_prefix(&format!("[{suite_name}]")).unwrap_or(name).trim(); + let name = name.strip_prefix("tests/").unwrap_or(name); + let name = name.strip_prefix(suite_name).unwrap_or(name); + name.trim_start_matches("/").to_string() +} + +struct TestSuites<'a> { + suites: Vec>, +} + +impl<'a> TestSuites<'a> { + fn test_count(&self) -> u64 { + self.suites.iter().map(|suite| suite.group.test_count()).sum::() + } +} + +struct TestSuite<'a> { + group: TestGroup<'a>, +} + +struct TestResults<'a> { + passed: Vec>, + ignored: Vec>, +} + +struct Test<'a> { + revisions: BTreeMap>, +} + +impl<'a> Test<'a> { + /// If this is a test without revisions, it will have a single entry in `revisions` with + /// an empty string as the revision name. + fn single_test(&self) -> Option<&TestResults<'a>> { + if self.revisions.len() == 1 { + self.revisions.iter().next().take_if(|e| e.0.is_empty()).map(|e| e.1) + } else { + None + } + } +} + +#[derive(Clone, Copy)] +#[allow(dead_code)] +struct TestMetadata<'a> { + job: &'a str, + stage: u32, + target: &'a str, +} + +// We have to use a template for the TestGroup instead of a macro, because +// macros cannot be recursive in askama at the moment. +#[derive(Template)] +#[template(path = "test_group.askama")] +/// Represents a group of tests +struct TestGroup<'a> { + name: String, + /// Tests located directly in this directory + root_tests: Vec<(String, Test<'a>)>, + /// Nested directories with additional tests + groups: Vec<(String, TestGroup<'a>)>, +} + +impl<'a> TestGroup<'a> { + fn test_count(&self) -> u64 { + let root = self.root_tests.len() as u64; + self.groups.iter().map(|(_, group)| group.test_count()).sum::() + root + } +} + +#[derive(Template)] +#[template(path = "test_suites.askama")] +struct TestSuitesPage<'a> { + suites: TestSuites<'a>, + test_count: u64, +} diff --git a/src/ci/citool/src/utils.rs b/src/ci/citool/src/utils.rs index a4c6ff85ef73c..0367d349a1ef4 100644 --- a/src/ci/citool/src/utils.rs +++ b/src/ci/citool/src/utils.rs @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::path::Path; use anyhow::Context; @@ -28,3 +29,8 @@ where func(); println!("\n"); } + +/// Normalizes Windows-style path delimiters to Unix-style paths. +pub fn normalize_path_delimiters(name: &str) -> Cow { + if name.contains("\\") { name.replace('\\', "/").into() } else { name.into() } +} diff --git a/src/ci/citool/templates/layout.askama b/src/ci/citool/templates/layout.askama new file mode 100644 index 0000000000000..3b3b6f23741d4 --- /dev/null +++ b/src/ci/citool/templates/layout.askama @@ -0,0 +1,22 @@ + + + + Rust CI Test Dashboard + + + + +{% block content %}{% endblock %} +{% block scripts %}{% endblock %} + + diff --git a/src/ci/citool/templates/test_group.askama b/src/ci/citool/templates/test_group.askama new file mode 100644 index 0000000000000..95731103f3b9d --- /dev/null +++ b/src/ci/citool/templates/test_group.askama @@ -0,0 +1,42 @@ +{% macro test_result(r) -%} +passed: {{ r.passed.len() }}, ignored: {{ r.ignored.len() }} +{%- endmacro %} + +
  • +
    +{{ name }} ({{ test_count() }} test{{ test_count() | pluralize }}{% if !root_tests.is_empty() && root_tests.len() as u64 != test_count() -%} + , {{ root_tests.len() }} root test{{ root_tests.len() | pluralize }} +{%- endif %}{% if !groups.is_empty() -%} + , {{ groups.len() }} subdir{{ groups.len() | pluralize }} +{%- endif %}) + + +{% if !groups.is_empty() %} +
      + {% for (dir_name, subgroup) in groups %} + {{ subgroup|safe }} + {% endfor %} +
    +{% endif %} + +{% if !root_tests.is_empty() %} +
      + {% for (name, test) in root_tests %} +
    • + {% if let Some(result) = test.single_test() %} + {{ name }} ({% call test_result(result) %}) + {% else %} + {{ name }} ({{ test.revisions.len() }} revision{{ test.revisions.len() | pluralize }}) +
        + {% for (revision, result) in test.revisions %} +
      • #{{ revision }} ({% call test_result(result) %})
      • + {% endfor %} +
      + {% endif %} +
    • + {% endfor %} +
    +{% endif %} + +
    +
  • diff --git a/src/ci/citool/templates/test_suites.askama b/src/ci/citool/templates/test_suites.askama new file mode 100644 index 0000000000000..4997f6a3f1c9a --- /dev/null +++ b/src/ci/citool/templates/test_suites.askama @@ -0,0 +1,108 @@ +{% extends "layout.askama" %} + +{% block content %} +

    Rust CI test dashboard

    +
    +Here's how to interpret the "passed" and "ignored" counts: +the count includes all combinations of "stage" x "target" x "CI job where the test was executed or ignored". +
    +
    +
    +
    +
    Total tests: {{ test_count }}
    +
    + To find tests that haven't been executed anywhere, click on "Open all" and search for "passed: 0". +
    +
    +
    + + +
    +
    + +
      + {% for suite in suites.suites %} + {{ suite.group|safe }} + {% endfor %} +
    +
    +{% endblock %} + +{% block styles %} +h1 { + text-align: center; + color: #333333; + margin-bottom: 30px; +} + +.summary { + display: flex; + justify-content: space-between; +} + +.test-count { + font-size: 1.2em; +} + +.test-suites { + background: white; + border-radius: 8px; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); + padding: 20px; +} + +ul { + padding-left: 0; +} + +li { + list-style: none; + padding-left: 20px; +} +summary { + margin-bottom: 5px; + padding: 6px; + background-color: #F4F4F4; + border: 1px solid #ddd; + border-radius: 4px; + cursor: pointer; +} +summary:hover { + background-color: #CFCFCF; +} + +/* Style the disclosure triangles */ +details > summary { + list-style: none; + position: relative; +} + +details > summary::before { + content: "▶"; + position: absolute; + left: -15px; + transform: rotate(0); + transition: transform 0.2s; +} + +details[open] > summary::before { + transform: rotate(90deg); +} +{% endblock %} + +{% block scripts %} + +{% endblock %} diff --git a/src/ci/citool/tests/test-jobs.yml b/src/ci/citool/tests/test-jobs.yml index 3593b3f7df633..d81be88b70872 100644 --- a/src/ci/citool/tests/test-jobs.yml +++ b/src/ci/citool/tests/test-jobs.yml @@ -27,7 +27,7 @@ runners: <<: *base-job envs: env-x86_64-apple-tests: &env-x86_64-apple-tests - SCRIPT: ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc -- --exact + SCRIPT: ./x.py check compiletest --set build.compiletest-use-stage0-libtest=true && ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc -- --exact RUST_CONFIGURE_ARGS: --build=x86_64-apple-darwin --enable-sanitizers --enable-profiler --set rust.jemalloc RUSTC_RETRY_LINKER_ON_SEGFAULT: 1 # Ensure that host tooling is tested on our minimum supported macOS version. diff --git a/src/ci/docker/README.md b/src/ci/docker/README.md index 20b6f7d10ef56..488a6a2bce122 100644 --- a/src/ci/docker/README.md +++ b/src/ci/docker/README.md @@ -14,9 +14,9 @@ To run a specific CI job locally, you can use the `citool` Rust crate: cargo run --manifest-path src/ci/citool/Cargo.toml run-local ``` -For example, to run the `x86_64-gnu-llvm-18-1` job: +For example, to run the `x86_64-gnu-llvm-19-1` job: ``` -cargo run --manifest-path src/ci/citool/Cargo.toml run-local x86_64-gnu-llvm-18-1 +cargo run --manifest-path src/ci/citool/Cargo.toml run-local x86_64-gnu-llvm-19-1 ``` The job will output artifacts in an `obj/` dir at the root of a repository. Note @@ -27,10 +27,10 @@ Docker image executed in the given CI job. while locally, to the `obj/` directory. This is primarily to prevent strange linker errors when using multiple Docker images. -For some Linux workflows (for example `x86_64-gnu-llvm-18-N`), the process is more involved. You will need to see which script is executed for the given workflow inside the [`jobs.yml`](../github-actions/jobs.yml) file and pass it through the `DOCKER_SCRIPT` environment variable. For example, to reproduce the `x86_64-gnu-llvm-18-3` workflow, you can run the following script: +For some Linux workflows (for example `x86_64-gnu-llvm-19-N`), the process is more involved. You will need to see which script is executed for the given workflow inside the [`jobs.yml`](../github-actions/jobs.yml) file and pass it through the `DOCKER_SCRIPT` environment variable. For example, to reproduce the `x86_64-gnu-llvm-19-3` workflow, you can run the following script: ``` -DOCKER_SCRIPT=x86_64-gnu-llvm3.sh ./src/ci/docker/run.sh x86_64-gnu-llvm-18 +DOCKER_SCRIPT=x86_64-gnu-llvm3.sh ./src/ci/docker/run.sh x86_64-gnu-llvm-19 ``` ## Local Development diff --git a/src/ci/docker/host-x86_64/dist-arm-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-arm-linux/Dockerfile index 420c42bc9d807..3795859f308e6 100644 --- a/src/ci/docker/host-x86_64/dist-arm-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-arm-linux/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:22.04 +FROM ghcr.io/rust-lang/ubuntu:22.04 COPY scripts/cross-apt-packages.sh /scripts/ RUN sh /scripts/cross-apt-packages.sh diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile index ae5bf8946dd94..bedf45c8630cf 100644 --- a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile @@ -2,7 +2,7 @@ # CentOS 7 has headers for kernel 3.10, but that's fine as long as we don't # actually use newer APIs in rustc or std without a fallback. It's more # important that we match glibc for ELF symbol versioning. -FROM centos:7 +FROM ghcr.io/rust-lang/centos:7 WORKDIR /build diff --git a/src/ci/docker/host-x86_64/mingw-check/Dockerfile b/src/ci/docker/host-x86_64/mingw-check/Dockerfile index b32fa6c8e4eef..418408e9242ae 100644 --- a/src/ci/docker/host-x86_64/mingw-check/Dockerfile +++ b/src/ci/docker/host-x86_64/mingw-check/Dockerfile @@ -47,7 +47,8 @@ COPY host-x86_64/mingw-check/validate-error-codes.sh /scripts/ ENV SCRIPT \ python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \ /scripts/check-default-config-profiles.sh && \ - python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \ + python3 ../x.py check compiletest --set build.compiletest-use-stage0-libtest=true && \ + python3 ../x.py check --target=x86_64-pc-windows-gnu --host=x86_64-pc-windows-gnu && \ python3 ../x.py clippy ci && \ python3 ../x.py build --stage 0 src/tools/build-manifest && \ python3 ../x.py test --stage 0 src/tools/compiletest && \ diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock index dacf531e4048d..8b6a664ad9397 100644 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock +++ b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock @@ -1,12 +1,12 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "r-efi" -version = "4.5.0" +version = "5.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9e935efc5854715dfc0a4c9ef18dc69dee0ec3bf9cc3ab740db831c0fdd86a3" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" [[package]] name = "uefi_qemu_test" diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml index 976245f5bdd06..1a8d0d94368f6 100644 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml +++ b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml @@ -7,4 +7,4 @@ edition = "2021" resolver = "2" [dependencies] -r-efi = "4.1.0" +r-efi = "5.2.0" diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile index e0ed2e227f810..c09be047c6a80 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile @@ -3,6 +3,7 @@ FROM ubuntu:24.10 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update && apt-get install -y --no-install-recommends \ + bzip2 \ g++ \ gcc-multilib \ make \ @@ -58,11 +59,10 @@ COPY scripts/shared.sh /scripts/ ARG SCRIPT_ARG -COPY scripts/add_dummy_commit.sh /tmp/ COPY scripts/x86_64-gnu-llvm.sh /tmp/ COPY scripts/x86_64-gnu-llvm2.sh /tmp/ COPY scripts/x86_64-gnu-llvm3.sh /tmp/ COPY scripts/stage_2_test_set1.sh /tmp/ COPY scripts/stage_2_test_set2.sh /tmp/ -ENV SCRIPT "/tmp/add_dummy_commit.sh && /tmp/${SCRIPT_ARG}" +ENV SCRIPT "/tmp/${SCRIPT_ARG}" diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-18/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-20/Dockerfile similarity index 89% rename from src/ci/docker/host-x86_64/x86_64-gnu-llvm-18/Dockerfile rename to src/ci/docker/host-x86_64/x86_64-gnu-llvm-20/Dockerfile index aefc0f376f689..83a3bfb37a54b 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-18/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-20/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:24.04 +FROM ubuntu:25.04 ARG DEBIAN_FRONTEND=noninteractive @@ -16,8 +16,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ cmake \ sudo \ gdb \ - llvm-18-tools \ - llvm-18-dev \ + llvm-20-tools \ + llvm-20-dev \ libedit-dev \ libssl-dev \ pkg-config \ @@ -50,7 +50,7 @@ ENV EXTERNAL_LLVM 1 # Using llvm-link-shared due to libffi issues -- see #34486 ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ - --llvm-root=/usr/lib/llvm-18 \ + --llvm-root=/usr/lib/llvm-20 \ --enable-llvm-link-shared \ --set rust.randomize-layout=true \ --set rust.thin-lto-import-instr-limit=10 @@ -59,11 +59,10 @@ COPY scripts/shared.sh /scripts/ ARG SCRIPT_ARG -COPY scripts/add_dummy_commit.sh /tmp/ COPY scripts/x86_64-gnu-llvm.sh /tmp/ COPY scripts/x86_64-gnu-llvm2.sh /tmp/ COPY scripts/x86_64-gnu-llvm3.sh /tmp/ COPY scripts/stage_2_test_set1.sh /tmp/ COPY scripts/stage_2_test_set2.sh /tmp/ -ENV SCRIPT "/tmp/add_dummy_commit.sh && /tmp/${SCRIPT_ARG}" +ENV SCRIPT "/tmp/${SCRIPT_ARG}" diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile index 89806634c6c26..05c90af780732 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile @@ -101,4 +101,5 @@ COPY scripts/shared.sh /scripts/ # the local version of the package is different than the one used by the CI. ENV SCRIPT /tmp/checktools.sh ../x.py && \ npm install browser-ui-test@$(head -n 1 /tmp/browser-ui-test.version) --unsafe-perm=true && \ + python3 ../x.py check compiletest --set build.compiletest-use-stage0-libtest=true && \ python3 ../x.py test tests/rustdoc-gui --stage 2 --test-args "'--jobs 1'" diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version b/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version index 3428dd4826a73..e15121e0f3162 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version +++ b/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version @@ -1 +1 @@ -0.20.3 \ No newline at end of file +0.20.6 \ No newline at end of file diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh b/src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh index 8324d1ec58624..28c035daa5d50 100755 --- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh +++ b/src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh @@ -62,7 +62,6 @@ case $HOST_TARGET in # See # For now, these tests are moved to `x86_64-msvc-ext2` in `src/ci/github-actions/jobs.yml`. #python3 "$X_PY" test --stage 2 src/tools/miri --target aarch64-apple-darwin --test-args pass - #python3 "$X_PY" test --stage 2 src/tools/miri --target i686-pc-windows-gnu --test-args pass ;; *) echo "FATAL: unexpected host $HOST_TARGET" diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index 00d791eeb6b38..36f7df2b06907 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -288,7 +288,7 @@ args="$args --privileged" # `LOCAL_USER_ID` (recognized in `src/ci/run.sh`) to ensure that files are all # read/written as the same user as the bare-metal user. if [ -f /.dockerenv ]; then - docker create -v /checkout --name checkout alpine:3.4 /bin/true + docker create -v /checkout --name checkout ghcr.io/rust-lang/alpine:3.4 /bin/true docker cp . checkout:/checkout args="$args --volumes-from checkout" else diff --git a/src/ci/docker/scripts/add_dummy_commit.sh b/src/ci/docker/scripts/add_dummy_commit.sh deleted file mode 100755 index 029e4ae141f8f..0000000000000 --- a/src/ci/docker/scripts/add_dummy_commit.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash - -set -ex - -if [ "$READ_ONLY_SRC" = "0" ]; then - # `core::builder::tests::ci_rustc_if_unchanged_logic` bootstrap test ensures that - # "download-rustc=if-unchanged" logic don't use CI rustc while there are changes on - # compiler and/or library. Here we are adding a dummy commit on compiler and running - # that test to make sure we never download CI rustc with a change on the compiler tree. - echo "" >> ../compiler/rustc/src/main.rs - git config --global user.email "dummy@dummy.com" - git config --global user.name "dummy" - git add ../compiler/rustc/src/main.rs - git commit -m "test commit for rust.download-rustc=if-unchanged logic" - DISABLE_CI_RUSTC_IF_INCOMPATIBLE=0 ../x.py test bootstrap \ - -- core::builder::tests::ci_rustc_if_unchanged_logic - # Revert the dummy commit - git reset --hard HEAD~1 -fi diff --git a/src/ci/docker/scripts/rfl-build.sh b/src/ci/docker/scripts/rfl-build.sh index ea8066d95e028..1d280948ebe68 100755 --- a/src/ci/docker/scripts/rfl-build.sh +++ b/src/ci/docker/scripts/rfl-build.sh @@ -2,7 +2,7 @@ set -euo pipefail -LINUX_VERSION=v6.14-rc3 +LINUX_VERSION=v6.15-rc4 # Build rustc, rustdoc, cargo, clippy-driver and rustfmt ../x.py build --stage 2 library rustdoc clippy rustfmt diff --git a/src/ci/docker/scripts/x86_64-gnu-llvm3.sh b/src/ci/docker/scripts/x86_64-gnu-llvm3.sh index d1bf2dab1e2d7..17eb2cea59ac1 100755 --- a/src/ci/docker/scripts/x86_64-gnu-llvm3.sh +++ b/src/ci/docker/scripts/x86_64-gnu-llvm3.sh @@ -2,8 +2,6 @@ set -ex -/tmp/add_dummy_commit.sh - ##### Test stage 1 ##### ../x.py --stage 1 test --skip src/tools/tidy diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index f62ed23d038c7..afcc092e78e8d 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -23,8 +23,8 @@ runners: os: ubuntu-24.04-16core-64gb <<: *base-job - - &job-macos-xl - os: macos-13 # We use the standard runner for now + - &job-macos + os: macos-13 <<: *base-job - &job-macos-m1 @@ -56,9 +56,24 @@ runners: - &job-aarch64-linux-8c os: ubuntu-24.04-arm64-8core-32gb <<: *base-job + + # Codebuild runners are provisioned in + # https://github.com/rust-lang/simpleinfra/blob/b7ddd5e6bec8a93ec30510cdddec02c5666fefe9/terragrunt/accounts/ci-prod/ci-runners/terragrunt.hcl#L2 + - &job-linux-36c-codebuild + free_disk: true + codebuild: true + os: codebuild-ubuntu-22-36c-$github.run_id-$github.run_attempt + <<: *base-job + + - &job-linux-8c-codebuild + free_disk: true + codebuild: true + os: codebuild-ubuntu-22-8c-$github.run_id-$github.run_attempt + <<: *base-job + envs: env-x86_64-apple-tests: &env-x86_64-apple-tests - SCRIPT: ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc -- --exact + SCRIPT: ./x.py check compiletest --set build.compiletest-use-stage0-libtest=true && ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc -- --exact RUST_CONFIGURE_ARGS: --build=x86_64-apple-darwin --enable-sanitizers --enable-profiler --set rust.jemalloc RUSTC_RETRY_LINKER_ON_SEGFAULT: 1 # Ensure that host tooling is tested on our minimum supported macOS version. @@ -105,11 +120,9 @@ pr: - name: mingw-check-tidy continue_on_error: true <<: *job-linux-4c - - name: x86_64-gnu-llvm-18 + - name: x86_64-gnu-llvm-19 env: ENABLE_GCC_CODEGEN: "1" - # We are adding (temporarily) a dummy commit on the compiler - READ_ONLY_SRC: "0" DOCKER_SCRIPT: x86_64-gnu-llvm.sh <<: *job-linux-16c - name: x86_64-gnu-tools @@ -153,7 +166,7 @@ auto: <<: *job-linux-4c - name: dist-arm-linux - <<: *job-linux-8c + <<: *job-linux-8c-codebuild - name: dist-armhf-linux <<: *job-linux-4c @@ -206,7 +219,7 @@ auto: - name: dist-x86_64-linux env: CODEGEN_BACKENDS: llvm,cranelift - <<: *job-linux-16c + <<: *job-linux-36c-codebuild - name: dist-x86_64-linux-alt env: @@ -304,56 +317,53 @@ auto: - name: x86_64-gnu-distcheck <<: *job-linux-8c - # The x86_64-gnu-llvm-19 job is split into multiple jobs to run tests in parallel. - # x86_64-gnu-llvm-19-1 skips tests that run in x86_64-gnu-llvm-19-{2,3}. - - name: x86_64-gnu-llvm-19-1 + # The x86_64-gnu-llvm-20 job is split into multiple jobs to run tests in parallel. + # x86_64-gnu-llvm-20-1 skips tests that run in x86_64-gnu-llvm-20-{2,3}. + - name: x86_64-gnu-llvm-20-1 env: RUST_BACKTRACE: 1 - IMAGE: x86_64-gnu-llvm-19 + IMAGE: x86_64-gnu-llvm-20 DOCKER_SCRIPT: stage_2_test_set1.sh <<: *job-linux-4c - # Skip tests that run in x86_64-gnu-llvm-19-{1,3} - - name: x86_64-gnu-llvm-19-2 + # Skip tests that run in x86_64-gnu-llvm-20-{1,3} + - name: x86_64-gnu-llvm-20-2 env: RUST_BACKTRACE: 1 - IMAGE: x86_64-gnu-llvm-19 + IMAGE: x86_64-gnu-llvm-20 DOCKER_SCRIPT: x86_64-gnu-llvm2.sh <<: *job-linux-4c - # Skip tests that run in x86_64-gnu-llvm-19-{1,2} - - name: x86_64-gnu-llvm-19-3 + # Skip tests that run in x86_64-gnu-llvm-20-{1,2} + - name: x86_64-gnu-llvm-20-3 env: RUST_BACKTRACE: 1 - IMAGE: x86_64-gnu-llvm-19 + IMAGE: x86_64-gnu-llvm-20 DOCKER_SCRIPT: x86_64-gnu-llvm3.sh <<: *job-linux-4c - # The x86_64-gnu-llvm-18 job is split into multiple jobs to run tests in parallel. - # x86_64-gnu-llvm-18-1 skips tests that run in x86_64-gnu-llvm-18-{2,3}. - - name: x86_64-gnu-llvm-18-1 + # The x86_64-gnu-llvm-19 job is split into multiple jobs to run tests in parallel. + # x86_64-gnu-llvm-19-1 skips tests that run in x86_64-gnu-llvm-19-{2,3}. + - name: x86_64-gnu-llvm-19-1 env: RUST_BACKTRACE: 1 - READ_ONLY_SRC: "0" - IMAGE: x86_64-gnu-llvm-18 + IMAGE: x86_64-gnu-llvm-19 DOCKER_SCRIPT: stage_2_test_set1.sh <<: *job-linux-4c - # Skip tests that run in x86_64-gnu-llvm-18-{1,3} - - name: x86_64-gnu-llvm-18-2 + # Skip tests that run in x86_64-gnu-llvm-19-{1,3} + - name: x86_64-gnu-llvm-19-2 env: RUST_BACKTRACE: 1 - READ_ONLY_SRC: "0" - IMAGE: x86_64-gnu-llvm-18 + IMAGE: x86_64-gnu-llvm-19 DOCKER_SCRIPT: x86_64-gnu-llvm2.sh <<: *job-linux-4c - # Skip tests that run in x86_64-gnu-llvm-18-{1,2} - - name: x86_64-gnu-llvm-18-3 + # Skip tests that run in x86_64-gnu-llvm-19-{1,2} + - name: x86_64-gnu-llvm-19-3 env: RUST_BACKTRACE: 1 - READ_ONLY_SRC: "0" - IMAGE: x86_64-gnu-llvm-18 + IMAGE: x86_64-gnu-llvm-19 DOCKER_SCRIPT: x86_64-gnu-llvm3.sh <<: *job-linux-4c @@ -383,7 +393,7 @@ auto: NO_OVERFLOW_CHECKS: 1 DIST_REQUIRE_ALL_TOOLS: 1 CODEGEN_BACKENDS: llvm,cranelift - <<: *job-macos-xl + <<: *job-macos - name: dist-apple-various env: @@ -400,18 +410,18 @@ auto: NO_LLVM_ASSERTIONS: 1 NO_DEBUG_ASSERTIONS: 1 NO_OVERFLOW_CHECKS: 1 - <<: *job-macos-xl + <<: *job-macos - name: x86_64-apple-1 env: <<: *env-x86_64-apple-tests - <<: *job-macos-xl + <<: *job-macos - name: x86_64-apple-2 env: SCRIPT: ./x.py --stage 2 test tests/ui tests/rustdoc <<: *env-x86_64-apple-tests - <<: *job-macos-xl + <<: *job-macos - name: dist-aarch64-apple env: @@ -501,7 +511,7 @@ auto: env: SCRIPT: > python x.py test --stage 2 src/tools/miri --target aarch64-apple-darwin --test-args pass && - python x.py test --stage 2 src/tools/miri --target i686-pc-windows-gnu --test-args pass && + python x.py test --stage 2 src/tools/miri --target x86_64-pc-windows-gnu --test-args pass && python x.py miri --stage 2 library/core --test-args notest && python x.py miri --stage 2 library/alloc --test-args notest && python x.py miri --stage 2 library/std --test-args notest @@ -533,31 +543,6 @@ auto: # came from the mingw-w64 SourceForge download site. Unfortunately # SourceForge is notoriously flaky, so we mirror it on our own infrastructure. - # i686-mingw is split into three jobs to run tests in parallel. - - name: i686-mingw-1 - env: - RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu - SCRIPT: make ci-mingw-x-1 - # There is no dist-i686-mingw-alt, so there is no prebuilt LLVM with assertions - NO_DOWNLOAD_CI_LLVM: 1 - <<: *job-windows-25 - - - name: i686-mingw-2 - env: - RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu - SCRIPT: make ci-mingw-x-2 - # There is no dist-i686-mingw-alt, so there is no prebuilt LLVM with assertions - NO_DOWNLOAD_CI_LLVM: 1 - <<: *job-windows-25 - - - name: i686-mingw-3 - env: - RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu - SCRIPT: make ci-mingw-bootstrap - # There is no dist-i686-mingw-alt, so there is no prebuilt LLVM with assertions - NO_DOWNLOAD_CI_LLVM: 1 - <<: *job-windows-25 - # x86_64-mingw is split into two jobs to run tests in parallel. - name: x86_64-mingw-1 env: diff --git a/src/ci/run.sh b/src/ci/run.sh index 6980d8220e574..b6143af632ddc 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -183,6 +183,9 @@ else RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set llvm.static-libstdcpp" fi + # Download GCC from CI on test builders + RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set gcc.download-ci-gcc=true" + if [ "$NO_DOWNLOAD_CI_RUSTC" = "" ]; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.download-rustc=if-unchanged" fi diff --git a/src/ci/scripts/free-disk-space.sh b/src/ci/scripts/free-disk-space.sh index 055a6ac2211e3..173f64858b371 100755 --- a/src/ci/scripts/free-disk-space.sh +++ b/src/ci/scripts/free-disk-space.sh @@ -14,6 +14,17 @@ isX86() { fi } +# Check if we're on a GitHub hosted runner. +# In aws codebuild, the variable RUNNER_ENVIRONMENT is "self-hosted". +isGitHubRunner() { + # `:-` means "use the value of RUNNER_ENVIRONMENT if it exists, otherwise use an empty string". + if [[ "${RUNNER_ENVIRONMENT:-}" == "github-hosted" ]]; then + return 0 + else + return 1 + fi +} + # print a line of the specified character printSeparationLine() { for ((i = 0; i < 80; i++)); do @@ -32,7 +43,7 @@ getAvailableSpace() { # make Kb human readable (assume the input is Kb) # REF: https://unix.stackexchange.com/a/44087/60849 formatByteCount() { - numfmt --to=iec-i --suffix=B --padding=7 "$1"'000' + numfmt --to=iec-i --suffix=B --padding=7 "${1}000" } # macro to output saved space @@ -45,6 +56,11 @@ printSavedSpace() { after=$(getAvailableSpace) local saved=$((after - before)) + if [ "$saved" -lt 0 ]; then + echo "::warning::Saved space is negative: $saved. Using '0' as saved space." + saved=0 + fi + echo "" printSeparationLine "*" if [ -n "${title}" ]; then @@ -71,58 +87,83 @@ printDF() { removeUnusedFilesAndDirs() { local to_remove=( - "/usr/local/aws-sam-cli" - "/usr/local/doc/cmake" - "/usr/local/julia"* - "/usr/local/lib/android" - "/usr/local/share/chromedriver-"* - "/usr/local/share/chromium" - "/usr/local/share/cmake-"* - "/usr/local/share/edge_driver" - "/usr/local/share/gecko_driver" - "/usr/local/share/icons" - "/usr/local/share/vim" - "/usr/local/share/emacs" - "/usr/local/share/powershell" - "/usr/local/share/vcpkg" - "/usr/share/apache-maven-"* - "/usr/share/gradle-"* "/usr/share/java" - "/usr/share/kotlinc" - "/usr/share/miniconda" - "/usr/share/php" - "/usr/share/ri" - "/usr/share/swift" - - # binaries - "/usr/local/bin/azcopy" - "/usr/local/bin/bicep" - "/usr/local/bin/ccmake" - "/usr/local/bin/cmake-"* - "/usr/local/bin/cmake" - "/usr/local/bin/cpack" - "/usr/local/bin/ctest" - "/usr/local/bin/helm" - "/usr/local/bin/kind" - "/usr/local/bin/kustomize" - "/usr/local/bin/minikube" - "/usr/local/bin/packer" - "/usr/local/bin/phpunit" - "/usr/local/bin/pulumi-"* - "/usr/local/bin/pulumi" - "/usr/local/bin/stack" - - # Haskell runtime - "/usr/local/.ghcup" - - # Azure - "/opt/az" - "/usr/share/az_"* - - # Environment variable set by GitHub Actions - "$AGENT_TOOLSDIRECTORY" ) + if isGitHubRunner; then + to_remove+=( + "/usr/local/aws-sam-cli" + "/usr/local/doc/cmake" + "/usr/local/julia"* + "/usr/local/lib/android" + "/usr/local/share/chromedriver-"* + "/usr/local/share/chromium" + "/usr/local/share/cmake-"* + "/usr/local/share/edge_driver" + "/usr/local/share/emacs" + "/usr/local/share/gecko_driver" + "/usr/local/share/icons" + "/usr/local/share/powershell" + "/usr/local/share/vcpkg" + "/usr/local/share/vim" + "/usr/share/apache-maven-"* + "/usr/share/gradle-"* + "/usr/share/kotlinc" + "/usr/share/miniconda" + "/usr/share/php" + "/usr/share/ri" + "/usr/share/swift" + + # binaries + "/usr/local/bin/azcopy" + "/usr/local/bin/bicep" + "/usr/local/bin/ccmake" + "/usr/local/bin/cmake-"* + "/usr/local/bin/cmake" + "/usr/local/bin/cpack" + "/usr/local/bin/ctest" + "/usr/local/bin/helm" + "/usr/local/bin/kind" + "/usr/local/bin/kustomize" + "/usr/local/bin/minikube" + "/usr/local/bin/packer" + "/usr/local/bin/phpunit" + "/usr/local/bin/pulumi-"* + "/usr/local/bin/pulumi" + "/usr/local/bin/stack" + + # Haskell runtime + "/usr/local/.ghcup" + + # Azure + "/opt/az" + "/usr/share/az_"* + ) + + if [ -n "${AGENT_TOOLSDIRECTORY:-}" ]; then + # Environment variable set by GitHub Actions + to_remove+=( + "${AGENT_TOOLSDIRECTORY}" + ) + else + echo "::warning::AGENT_TOOLSDIRECTORY is not set. Skipping removal." + fi + else + # Remove folders and files present in AWS CodeBuild + to_remove+=( + # binaries + "/usr/local/bin/ecs-cli" + "/usr/local/bin/eksctl" + "/usr/local/bin/kubectl" + + "${HOME}/.gradle" + "${HOME}/.dotnet" + "${HOME}/.goenv" + "${HOME}/.phpenv" + + ) + fi + for element in "${to_remove[@]}"; do if [ ! -e "$element" ]; then # The file or directory doesn't exist. @@ -155,19 +196,28 @@ cleanPackages() { '^dotnet-.*' '^llvm-.*' '^mongodb-.*' - 'azure-cli' 'firefox' 'libgl1-mesa-dri' 'mono-devel' 'php.*' ) - if isX86; then + if isGitHubRunner; then + packages+=( + azure-cli + ) + + if isX86; then + packages+=( + 'google-chrome-stable' + 'google-cloud-cli' + 'google-cloud-sdk' + 'powershell' + ) + fi + else packages+=( 'google-chrome-stable' - 'google-cloud-cli' - 'google-cloud-sdk' - 'powershell' ) fi diff --git a/src/ci/scripts/setup-upstream-remote.sh b/src/ci/scripts/setup-upstream-remote.sh deleted file mode 100755 index 52b4c98a89016..0000000000000 --- a/src/ci/scripts/setup-upstream-remote.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -# In CI environments, bootstrap is forced to use the remote upstream based -# on "git_repository" and "nightly_branch" values from src/stage0 file. -# This script configures the remote as it may not exist by default. - -set -euo pipefail -IFS=$'\n\t' - -ci_dir=$(cd $(dirname $0) && pwd)/.. -source "$ci_dir/shared.sh" - -git_repository=$(parse_stage0_file_by_key "git_repository") -nightly_branch=$(parse_stage0_file_by_key "nightly_branch") - -# Configure "rust-lang/rust" upstream remote only when it's not origin. -if [ -z "$(git config remote.origin.url | grep $git_repository)" ]; then - echo "Configuring https://github.com/$git_repository remote as upstream." - git remote add upstream "https://github.com/$git_repository" - REMOTE_NAME="upstream" -else - REMOTE_NAME="origin" -fi - -git fetch $REMOTE_NAME $nightly_branch diff --git a/src/doc/book b/src/doc/book index 45f05367360f0..d33916341d480 160000 --- a/src/doc/book +++ b/src/doc/book @@ -1 +1 @@ -Subproject commit 45f05367360f033f89235eacbbb54e8d73ce6b70 +Subproject commit d33916341d480caede1d0ae57cbeae23aab23e88 diff --git a/src/doc/edition-guide b/src/doc/edition-guide index 1e27e5e6d5133..467f45637b73e 160000 --- a/src/doc/edition-guide +++ b/src/doc/edition-guide @@ -1 +1 @@ -Subproject commit 1e27e5e6d5133ae4612f5cc195c15fc8d51b1c9c +Subproject commit 467f45637b73ec6aa70fb36bc3054bb50b8967ea diff --git a/src/doc/nomicon b/src/doc/nomicon index b4448fa406a6d..0c10c30cc5473 160000 --- a/src/doc/nomicon +++ b/src/doc/nomicon @@ -1 +1 @@ -Subproject commit b4448fa406a6dccde62d1e2f34f70fc51814cdcc +Subproject commit 0c10c30cc54736c5c194ce98c50e2de84eeb6e79 diff --git a/src/doc/reference b/src/doc/reference index e95ebdfee0251..3340922df189b 160000 --- a/src/doc/reference +++ b/src/doc/reference @@ -1 +1 @@ -Subproject commit e95ebdfee02514d93f79ec92ae310a804e87f01f +Subproject commit 3340922df189bddcbaad17dc3927d51a76bcd5ed diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example index 6f69823c28ae8..0d7964d5b22cf 160000 --- a/src/doc/rust-by-example +++ b/src/doc/rust-by-example @@ -1 +1 @@ -Subproject commit 6f69823c28ae8d929d6c815181c73d3e99ef16d3 +Subproject commit 0d7964d5b22cf920237ef1282d869564b4883b88 diff --git a/src/doc/rustc-dev-guide/.github/workflows/ci.yml b/src/doc/rustc-dev-guide/.github/workflows/ci.yml index 22a4fb1901ab8..daf5223cbd4ac 100644 --- a/src/doc/rustc-dev-guide/.github/workflows/ci.yml +++ b/src/doc/rustc-dev-guide/.github/workflows/ci.yml @@ -14,10 +14,11 @@ jobs: if: github.repository == 'rust-lang/rustc-dev-guide' runs-on: ubuntu-latest env: - MDBOOK_VERSION: 0.4.21 + MDBOOK_VERSION: 0.4.48 MDBOOK_LINKCHECK2_VERSION: 0.9.1 MDBOOK_MERMAID_VERSION: 0.12.6 MDBOOK_TOC_VERSION: 0.11.2 + MDBOOK_OUTPUT__LINKCHECK__FOLLOW_WEB_LINKS: ${{ github.event_name != 'pull_request' }} DEPLOY_DIR: book/html BASE_SHA: ${{ github.event.pull_request.base.sha }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml b/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml index b19eccf9eb8c1..1e430d8b4e65a 100644 --- a/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml +++ b/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml @@ -28,7 +28,7 @@ jobs: # Cache the josh directory with checked out rustc cache-directories: "/home/runner/.cache/rustc-dev-guide-josh" - name: Install josh - run: RUSTFLAGS="--cap-lints warn" cargo +stable install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04 + run: RUSTFLAGS="--cap-lints warn" cargo install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04 - name: Setup bot git name and email run: | git config --global user.name 'The rustc-dev-guide Cronjob Bot' diff --git a/src/doc/rustc-dev-guide/README.md b/src/doc/rustc-dev-guide/README.md index 6a25a91f56a5d..08158801788c9 100644 --- a/src/doc/rustc-dev-guide/README.md +++ b/src/doc/rustc-dev-guide/README.md @@ -43,13 +43,13 @@ rustdocs][rustdocs]. To build a local static HTML site, install [`mdbook`](https://github.com/rust-lang/mdBook) with: ``` -> cargo install mdbook mdbook-linkcheck2 mdbook-toc mdbook-mermaid +cargo install mdbook mdbook-linkcheck2 mdbook-toc mdbook-mermaid ``` and execute the following command in the root of the repository: ``` -> mdbook build --open +mdbook build --open ``` The build files are found in the `book/html` directory. @@ -61,8 +61,8 @@ checking is **not** run by default locally, though it is in CI. To enable it locally, set the environment variable `ENABLE_LINKCHECK=1` like in the following example. -```console -$ ENABLE_LINKCHECK=1 mdbook serve +``` +ENABLE_LINKCHECK=1 mdbook serve ``` ### Table of Contents @@ -77,7 +77,7 @@ This repository is linked to `rust-lang/rust` as a [josh](https://josh-project.g You'll need to install `josh-proxy` locally via ``` -cargo +stable install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04 +cargo install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04 ``` Older versions of `josh-proxy` may not round trip commits losslessly so it is important to install this exact version. @@ -86,14 +86,14 @@ Older versions of `josh-proxy` may not round trip commits losslessly so it is im 1) Checkout a new branch that will be used to create a PR into `rust-lang/rustc-dev-guide` 2) Run the pull command ``` - $ cargo run --manifest-path josh-sync/Cargo.toml rustc-pull + cargo run --manifest-path josh-sync/Cargo.toml rustc-pull ``` 3) Push the branch to your fork and create a PR into `rustc-dev-guide` ### Push changes from this repository into `rust-lang/rust` 1) Run the push command to create a branch named `` in a `rustc` fork under the `` account ``` - $ cargo run --manifest-path josh-sync/Cargo.toml rustc-push + cargo run --manifest-path josh-sync/Cargo.toml rustc-push ``` 2) Create a PR from `` into `rust-lang/rust` @@ -106,5 +106,5 @@ You may observe "Nothing to pull" even if you *know* rustc-pull has something to To minimize the likelihood of this happening, you may wish to keep a separate *minimal* git config that *only* has `[user]` entries from global git config, then repoint system git to use the minimal git config instead. E.g. ``` -$ GIT_CONFIG_GLOBAL=/path/to/minimal/gitconfig GIT_CONFIG_SYSTEM='' cargo +stable run --manifest-path josh-sync/Cargo.toml -- rustc-pull +GIT_CONFIG_GLOBAL=/path/to/minimal/gitconfig GIT_CONFIG_SYSTEM='' cargo run --manifest-path josh-sync/Cargo.toml -- rustc-pull ``` diff --git a/src/doc/rustc-dev-guide/book.toml b/src/doc/rustc-dev-guide/book.toml index 67069d9930f57..b84b1e7548a86 100644 --- a/src/doc/rustc-dev-guide/book.toml +++ b/src/doc/rustc-dev-guide/book.toml @@ -1,6 +1,6 @@ [book] title = "Rust Compiler Development Guide" -author = "The Rust Project Developers" +authors = ["The Rust Project Developers"] description = "A guide to developing the Rust compiler (rustc)" [build] @@ -62,5 +62,7 @@ warning-policy = "error" "/diagnostics/sessiondiagnostic.html" = "diagnostic-structs.html" "/diagnostics/diagnostic-codes.html" = "error-codes.html" "/miri.html" = "const-eval/interpret.html" -"/tests/integration.html" = "ecosystem.html" +"/tests/fuchsia.html" = "ecosystem-test-jobs/fuchsia.html" "/tests/headers.html" = "directives.html" +"/tests/integration.html" = "ecosystem.html" +"/tests/rust-for-linux.html" = "ecosystem-test-jobs/rust-for-linux.html" diff --git a/src/doc/rustc-dev-guide/ci/date-check/src/main.rs b/src/doc/rustc-dev-guide/ci/date-check/src/main.rs index 5ab3e6c8b65a8..9af69dbbf3f5c 100644 --- a/src/doc/rustc-dev-guide/ci/date-check/src/main.rs +++ b/src/doc/rustc-dev-guide/ci/date-check/src/main.rs @@ -1,11 +1,8 @@ -use std::{ - collections::BTreeMap, - convert::TryInto as _, - env, fmt, fs, - path::{Path, PathBuf}, - process, - str::FromStr, -}; +use std::collections::BTreeMap; +use std::convert::TryInto as _; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::{env, fmt, fs, process}; use chrono::{Datelike as _, Month, TimeZone as _, Utc}; use glob::glob; @@ -19,19 +16,13 @@ struct Date { impl Date { fn months_since(self, other: Date) -> Option { - let self_chrono = Utc - .with_ymd_and_hms(self.year.try_into().unwrap(), self.month, 1, 0, 0, 0) - .unwrap(); - let other_chrono = Utc - .with_ymd_and_hms(other.year.try_into().unwrap(), other.month, 1, 0, 0, 0) - .unwrap(); + let self_chrono = + Utc.with_ymd_and_hms(self.year.try_into().unwrap(), self.month, 1, 0, 0, 0).unwrap(); + let other_chrono = + Utc.with_ymd_and_hms(other.year.try_into().unwrap(), other.month, 1, 0, 0, 0).unwrap(); let duration_since = self_chrono.signed_duration_since(other_chrono); let months_since = duration_since.num_days() / 30; - if months_since < 0 { - None - } else { - Some(months_since.try_into().unwrap()) - } + if months_since < 0 { None } else { Some(months_since.try_into().unwrap()) } } } @@ -66,26 +57,18 @@ fn collect_dates_from_file(date_regex: &Regex, text: &str) -> Vec<(usize, Date)> date_regex .captures_iter(text) .filter_map(|cap| { - if let (Some(month), Some(year), None, None) | (None, None, Some(month), Some(year)) = ( - cap.name("m1"), - cap.name("y1"), - cap.name("m2"), - cap.name("y2"), - ) { + if let (Some(month), Some(year), None, None) | (None, None, Some(month), Some(year)) = + (cap.name("m1"), cap.name("y1"), cap.name("m2"), cap.name("y2")) + { let year = year.as_str().parse().expect("year"); - let month = Month::from_str(month.as_str()) - .expect("month") - .number_from_month(); + let month = Month::from_str(month.as_str()).expect("month").number_from_month(); Some((cap.get(0).expect("all").range(), Date { year, month })) } else { None } }) .map(|(byte_range, date)| { - line += text[end_of_last_cap..byte_range.end] - .chars() - .filter(|c| *c == '\n') - .count(); + line += text[end_of_last_cap..byte_range.end].chars().filter(|c| *c == '\n').count(); end_of_last_cap = byte_range.end; (line, date) }) @@ -138,10 +121,7 @@ fn main() { let root_dir_path = Path::new(&root_dir); let glob_pat = format!("{}/**/*.md", root_dir); let today_chrono = Utc::now().date_naive(); - let current_month = Date { - year: today_chrono.year_ce().1, - month: today_chrono.month(), - }; + let current_month = Date { year: today_chrono.year_ce().1, month: today_chrono.month() }; let dates_by_file = collect_dates(glob(&glob_pat).unwrap().map(Result::unwrap)); let dates_by_file: BTreeMap<_, _> = @@ -173,10 +153,7 @@ fn main() { println!(); for (path, dates) in dates_by_file { - println!( - "- {}", - path.strip_prefix(&root_dir_path).unwrap_or(&path).display(), - ); + println!("- {}", path.strip_prefix(&root_dir_path).unwrap_or(&path).display(),); for (line, date) in dates { println!(" - [ ] line {}: {}", line, date); } @@ -191,14 +168,8 @@ mod tests { #[test] fn test_months_since() { - let date1 = Date { - year: 2020, - month: 3, - }; - let date2 = Date { - year: 2021, - month: 1, - }; + let date1 = Date { year: 2020, month: 3 }; + let date2 = Date { year: 2021, month: 1 }; assert_eq!(date2.months_since(date1), Some(10)); } @@ -273,83 +244,17 @@ Test8 assert_eq!( collect_dates_from_file(&make_date_regex(), text), vec![ - ( - 3, - Date { - year: 2021, - month: 1, - } - ), - ( - 6, - Date { - year: 2021, - month: 2, - } - ), - ( - 9, - Date { - year: 2021, - month: 3, - } - ), - ( - 11, - Date { - year: 2021, - month: 4, - } - ), - ( - 17, - Date { - year: 2021, - month: 5, - } - ), - ( - 20, - Date { - year: 2021, - month: 1, - } - ), - ( - 23, - Date { - year: 2021, - month: 2, - } - ), - ( - 26, - Date { - year: 2021, - month: 3, - } - ), - ( - 28, - Date { - year: 2021, - month: 4, - } - ), - ( - 34, - Date { - year: 2021, - month: 5, - } - ), - ( - 38, - Date { - year: 2021, - month: 6, - } - ), + (3, Date { year: 2021, month: 1 }), + (6, Date { year: 2021, month: 2 }), + (9, Date { year: 2021, month: 3 }), + (11, Date { year: 2021, month: 4 }), + (17, Date { year: 2021, month: 5 }), + (20, Date { year: 2021, month: 1 }), + (23, Date { year: 2021, month: 2 }), + (26, Date { year: 2021, month: 3 }), + (28, Date { year: 2021, month: 4 }), + (34, Date { year: 2021, month: 5 }), + (38, Date { year: 2021, month: 6 }), ], ); } diff --git a/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs b/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs index 984bd3e37ae30..db6ac18578542 100644 --- a/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs +++ b/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs @@ -1,4 +1,4 @@ -// Tested with nightly-2025-02-13 +// Tested with nightly-2025-03-28 #![feature(rustc_private)] @@ -34,9 +34,9 @@ impl rustc_span::source_map::FileLoader for MyFileLoader { fn read_file(&self, path: &Path) -> io::Result { if path == Path::new("main.rs") { Ok(r#" +static MESSAGE: &str = "Hello, World!"; fn main() { - let message = "Hello, World!"; - println!("{message}"); + println!("{MESSAGE}"); } "# .to_string()) @@ -71,14 +71,12 @@ impl rustc_driver::Callbacks for MyCallbacks { fn after_analysis(&mut self, _compiler: &Compiler, tcx: TyCtxt<'_>) -> Compilation { // Analyze the program and inspect the types of definitions. - for id in tcx.hir().items() { - let hir = tcx.hir(); - let item = hir.item(id); + for id in tcx.hir_free_items() { + let item = &tcx.hir_item(id); match item.kind { - rustc_hir::ItemKind::Static(_, _, _) | rustc_hir::ItemKind::Fn { .. } => { - let name = item.ident; + rustc_hir::ItemKind::Static(ident, ..) | rustc_hir::ItemKind::Fn { ident, .. } => { let ty = tcx.type_of(item.hir_id().owner.def_id); - println!("{name:?}:\t{ty:?}") + println!("{ident:?}:\t{ty:?}") } _ => (), } diff --git a/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs b/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs index 3270c722e0772..c0d7f977d3500 100644 --- a/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs +++ b/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs @@ -1,4 +1,4 @@ -// Tested with nightly-2025-02-13 +// Tested with nightly-2025-03-28 #![feature(rustc_private)] @@ -20,7 +20,7 @@ use std::path::Path; use std::sync::Arc; use rustc_ast_pretty::pprust::item_to_string; -use rustc_driver::{run_compiler, Compilation}; +use rustc_driver::{Compilation, run_compiler}; use rustc_interface::interface::{Compiler, Config}; use rustc_middle::ty::TyCtxt; @@ -70,11 +70,9 @@ impl rustc_driver::Callbacks for MyCallbacks { } fn after_analysis(&mut self, _compiler: &Compiler, tcx: TyCtxt<'_>) -> Compilation { - // Every compilation contains a single crate. - let hir_krate = tcx.hir(); // Iterate over the top-level items in the crate, looking for the main function. - for id in hir_krate.items() { - let item = hir_krate.item(id); + for id in tcx.hir_free_items() { + let item = &tcx.hir_item(id); // Use pattern-matching to find a specific node inside the main function. if let rustc_hir::ItemKind::Fn { body, .. } = item.kind { let expr = &tcx.hir_body(body).value; diff --git a/src/doc/rustc-dev-guide/examples/rustc-interface-example.rs b/src/doc/rustc-dev-guide/examples/rustc-interface-example.rs index 70f27c2a82a9c..360f70c8e865a 100644 --- a/src/doc/rustc-dev-guide/examples/rustc-interface-example.rs +++ b/src/doc/rustc-dev-guide/examples/rustc-interface-example.rs @@ -1,4 +1,4 @@ -// Tested with nightly-2025-02-13 +// Tested with nightly-2025-03-28 #![feature(rustc_private)] @@ -64,14 +64,13 @@ fn main() { println!("{krate:?}"); // Analyze the program and inspect the types of definitions. rustc_interface::create_and_enter_global_ctxt(&compiler, krate, |tcx| { - for id in tcx.hir().items() { - let hir = tcx.hir(); - let item = hir.item(id); + for id in tcx.hir_free_items() { + let item = tcx.hir_item(id); match item.kind { - rustc_hir::ItemKind::Static(_, _, _) | rustc_hir::ItemKind::Fn { .. } => { - let name = item.ident; + rustc_hir::ItemKind::Static(ident, ..) + | rustc_hir::ItemKind::Fn { ident, .. } => { let ty = tcx.type_of(item.hir_id().owner.def_id); - println!("{name:?}:\t{ty:?}") + println!("{ident:?}:\t{ty:?}") } _ => (), } diff --git a/src/doc/rustc-dev-guide/examples/rustc-interface-getting-diagnostics.rs b/src/doc/rustc-dev-guide/examples/rustc-interface-getting-diagnostics.rs index 39b236e1783a6..2512ba3c3f924 100644 --- a/src/doc/rustc-dev-guide/examples/rustc-interface-getting-diagnostics.rs +++ b/src/doc/rustc-dev-guide/examples/rustc-interface-getting-diagnostics.rs @@ -1,4 +1,4 @@ -// Tested with nightly-2025-02-13 +// Tested with nightly-2025-03-28 #![feature(rustc_private)] @@ -86,8 +86,10 @@ fn main() { rustc_interface::run_compiler(config, |compiler| { let krate = rustc_interface::passes::parse(&compiler.sess); rustc_interface::create_and_enter_global_ctxt(&compiler, krate, |tcx| { - // Run the analysis phase on the local crate to trigger the type error. - let _ = tcx.analysis(()); + // Iterate all the items defined and perform type checking. + tcx.par_hir_body_owners(|item_def_id| { + tcx.ensure_ok().typeck(item_def_id); + }); }); // If the compiler has encountered errors when this closure returns, it will abort (!) the program. // We avoid this by resetting the error count before returning diff --git a/src/doc/rustc-dev-guide/josh-sync/src/sync.rs b/src/doc/rustc-dev-guide/josh-sync/src/sync.rs index cd64be6367032..41d96397faaba 100644 --- a/src/doc/rustc-dev-guide/josh-sync/src/sync.rs +++ b/src/doc/rustc-dev-guide/josh-sync/src/sync.rs @@ -194,7 +194,7 @@ impl GitSync { ); println!( // Open PR with `subtree update` title to silence the `no-merges` triagebot check - " https://github.com/{UPSTREAM_REPO}/compare/{github_user}:{branch}?quick_pull=1&title=Rustc+dev+guide+subtree+update&body=r?+@ghost" + " https://github.com/{UPSTREAM_REPO}/compare/{github_user}:{branch}?quick_pull=1&title=rustc-dev-guide+subtree+update&body=r?+@ghost" ); drop(josh); diff --git a/src/doc/rustc-dev-guide/rust-version b/src/doc/rustc-dev-guide/rust-version index 6baf43397e881..66b4fe2bf3bf0 100644 --- a/src/doc/rustc-dev-guide/rust-version +++ b/src/doc/rustc-dev-guide/rust-version @@ -1 +1 @@ -493c38ba371929579fe136df26eccd9516347c7a +0c33fe2c3d3eecadd17a84b110bb067288a64f1c diff --git a/src/doc/rustc-dev-guide/rustfmt.toml b/src/doc/rustc-dev-guide/rustfmt.toml new file mode 100644 index 0000000000000..b285329c78e61 --- /dev/null +++ b/src/doc/rustc-dev-guide/rustfmt.toml @@ -0,0 +1,7 @@ +# matches that of rust-lang/rust +style_edition = "2024" +use_small_heuristics = "Max" +merge_derives = false +group_imports = "StdExternalCrate" +imports_granularity = "Module" +use_field_init_shorthand = true diff --git a/src/doc/rustc-dev-guide/src/SUMMARY.md b/src/doc/rustc-dev-guide/src/SUMMARY.md index ce74c741b3936..31119496e754a 100644 --- a/src/doc/rustc-dev-guide/src/SUMMARY.md +++ b/src/doc/rustc-dev-guide/src/SUMMARY.md @@ -1,6 +1,7 @@ # Summary [Getting Started](./getting-started.md) + [About this guide](./about-this-guide.md) --- @@ -10,9 +11,9 @@ - [How to build and run the compiler](./building/how-to-build-and-run.md) - [Quickstart](./building/quickstart.md) - [Prerequisites](./building/prerequisites.md) - - [Suggested Workflows](./building/suggested.md) + - [Suggested workflows](./building/suggested.md) - [Distribution artifacts](./building/build-install-distribution-artifacts.md) - - [Building Documentation](./building/compiler-documenting.md) + - [Building documentation](./building/compiler-documenting.md) - [Rustdoc overview](./rustdoc.md) - [Adding a new target](./building/new-target.md) - [Optimized build](./building/optimized-build.md) @@ -28,8 +29,11 @@ - [Minicore](./tests/minicore.md) - [Ecosystem testing](./tests/ecosystem.md) - [Crater](./tests/crater.md) - - [Fuchsia](./tests/fuchsia.md) - - [Rust for Linux](./tests/rust-for-linux.md) + - [Fuchsia](./tests/ecosystem-test-jobs/fuchsia.md) + - [Rust for Linux](./tests/ecosystem-test-jobs/rust-for-linux.md) + - [Codegen backend testing](./tests/codegen-backend-tests/intro.md) + - [Cranelift codegen backend](./tests/codegen-backend-tests/cg_clif.md) + - [GCC codegen backend](./tests/codegen-backend-tests/cg_gcc.md) - [Performance testing](./tests/perf.md) - [Suggest tests tool](./tests/suggest-tests.md) - [Misc info](./tests/misc.md) @@ -39,11 +43,11 @@ - [with the linux perf tool](./profiling/with_perf.md) - [with Windows Performance Analyzer](./profiling/wpa_profiling.md) - [with the Rust benchmark suite](./profiling/with_rustc_perf.md) -- [crates.io Dependencies](./crates-io.md) +- [crates.io dependencies](./crates-io.md) # Contributing to Rust -- [Contribution Procedures](./contributing.md) +- [Contribution procedures](./contributing.md) - [About the compiler team](./compiler-team.md) - [Using Git](./git.md) - [Mastering @rustbot](./rustbot.md) @@ -53,7 +57,7 @@ - [Stabilizing Features](./stabilization_guide.md) - [Feature Gates](./feature-gates.md) - [Coding conventions](./conventions.md) -- [Procedures for Breaking Changes](./bug-fix-procedure.md) +- [Procedures for breaking changes](./bug-fix-procedure.md) - [Using external repositories](./external-repos.md) - [Fuzzing](./fuzzing.md) - [Notification groups](notification-groups/about.md) @@ -61,12 +65,13 @@ - [ARM](notification-groups/arm.md) - [Cleanup Crew](notification-groups/cleanup-crew.md) - [Emscripten](notification-groups/emscripten.md) + - [Fuchsia](notification-groups/fuchsia.md) - [LLVM](notification-groups/llvm.md) - [RISC-V](notification-groups/risc-v.md) + - [Rust for Linux](notification-groups/rust-for-linux.md) - [WASI](notification-groups/wasi.md) - [WebAssembly](notification-groups/wasm.md) - [Windows](notification-groups/windows.md) - - [Rust for Linux](notification-groups/rust-for-linux.md) - [Licenses](./licenses.md) - [Editions](guides/editions.md) @@ -77,6 +82,7 @@ - [How Bootstrap does it](./building/bootstrapping/how-bootstrap-does-it.md) - [Writing tools in Bootstrap](./building/bootstrapping/writing-tools-in-bootstrap.md) - [Debugging bootstrap](./building/bootstrapping/debugging-bootstrap.md) +- [cfg(bootstrap) in dependencies](./building/bootstrapping/bootstrap-in-dependencies.md) # High-level Compiler Architecture @@ -84,29 +90,35 @@ - [Overview of the compiler](./overview.md) - [The compiler source code](./compiler-src.md) - [Queries: demand-driven compilation](./query.md) - - [The Query Evaluation Model in Detail](./queries/query-evaluation-model-in-detail.md) + - [The Query Evaluation Model in detail](./queries/query-evaluation-model-in-detail.md) - [Incremental compilation](./queries/incremental-compilation.md) - - [Incremental compilation In Detail](./queries/incremental-compilation-in-detail.md) - - [Debugging and Testing](./incrcomp-debugging.md) + - [Incremental compilation in detail](./queries/incremental-compilation-in-detail.md) + - [Debugging and testing](./incrcomp-debugging.md) - [Salsa](./queries/salsa.md) -- [Memory Management in Rustc](./memory.md) -- [Serialization in Rustc](./serialization.md) -- [Parallel Compilation](./parallel-rustc.md) +- [Memory management in rustc](./memory.md) +- [Serialization in rustc](./serialization.md) +- [Parallel compilation](./parallel-rustc.md) - [Rustdoc internals](./rustdoc-internals.md) - [Search](./rustdoc-internals/search.md) + - [The `rustdoc` test suite](./rustdoc-internals/rustdoc-test-suite.md) +- [Autodiff internals](./autodiff/internals.md) + - [Installation](./autodiff/installation.md) + - [How to debug](./autodiff/debugging.md) + - [Autodiff flags](./autodiff/flags.md) + - [Current limitations](./autodiff/limitations.md) # Source Code Representation - [Prologue](./part-3-intro.md) - [Syntax and the AST](./syntax-intro.md) - - [Lexing and Parsing](./the-parser.md) + - [Lexing and parsing](./the-parser.md) - [Macro expansion](./macro-expansion.md) - [Name resolution](./name-resolution.md) - [Attributes](./attributes.md) - - [`#[test]` Implementation](./test-implementation.md) - - [Panic Implementation](./panic-implementation.md) - - [AST Validation](./ast-validation.md) - - [Feature Gate Checking](./feature-gate-ck.md) + - [`#[test]` implementation](./test-implementation.md) + - [Panic implementation](./panic-implementation.md) + - [AST validation](./ast-validation.md) + - [Feature gate checking](./feature-gate-ck.md) - [Lang Items](./lang-items.md) - [The HIR (High-level IR)](./hir.md) - [Lowering AST to HIR](./ast-lowering.md) @@ -124,7 +136,8 @@ - [rustc_driver and rustc_interface](./rustc-driver/intro.md) - [Example: Type checking](./rustc-driver/interacting-with-the-ast.md) - [Example: Getting diagnostics](./rustc-driver/getting-diagnostics.md) -- [Errors and Lints](diagnostics.md) + - [Remarks on perma-unstable features](./rustc-driver/remarks-on-perma-unstable-features.md) +- [Errors and lints](diagnostics.md) - [Diagnostic and subdiagnostic structs](./diagnostics/diagnostic-structs.md) - [Translation](./diagnostics/translation.md) - [`LintStore`](./diagnostics/lintstore.md) @@ -144,10 +157,8 @@ - [ADTs and Generic Arguments](./ty_module/generic_arguments.md) - [Parameter types/consts/regions](./ty_module/param_ty_const_regions.md) - [`TypeFolder` and `TypeFoldable`](./ty-fold.md) -- [Parameter Environments](./param_env/param_env_summary.md) - - [What is it?](./param_env/param_env_what_is_it.md) - - [How are `ParamEnv`'s constructed internally](./param_env/param_env_construction_internals.md) - - [Which `ParamEnv` do I use?](./param_env/param_env_acquisition.md) +- [Aliases and Normalization](./normalization.md) +- [Typing/Param Envs](./typing_parameter_envs.md) - [Type inference](./type-inference.md) - [Trait solving](./traits/resolution.md) - [Higher-ranked trait bounds](./traits/hrtb.md) @@ -166,21 +177,20 @@ - [Coinduction](./solve/coinduction.md) - [Caching](./solve/caching.md) - [Proof trees](./solve/proof-trees.md) - - [Normalization](./solve/normalization.md) - [Opaque types](./solve/opaque-types.md) - [Significant changes and quirks](./solve/significant-changes.md) - [`Unsize` and `CoerceUnsized` traits](./traits/unsize.md) - [Type checking](./type-checking.md) - [Method Lookup](./method-lookup.md) - [Variance](./variance.md) - - [Coherence Checking](./coherence.md) - - [Opaque Types](./opaque-types-type-alias-impl-trait.md) + - [Coherence checking](./coherence.md) + - [Opaque types](./opaque-types-type-alias-impl-trait.md) - [Inference details](./opaque-types-impl-trait-inference.md) - [Return Position Impl Trait In Trait](./return-position-impl-trait-in-trait.md) - [Region inference restrictions][opaque-infer] - [Const condition checking](./effects.md) - [Pattern and Exhaustiveness Checking](./pat-exhaustive-checking.md) -- [Unsafety Checking](./unsafety-checking.md) +- [Unsafety checking](./unsafety-checking.md) - [MIR dataflow](./mir/dataflow.md) - [Drop elaboration](./mir/drop-elaboration.md) - [The borrow checker](./borrow_check.md) @@ -222,9 +232,13 @@ --- [Appendix A: Background topics](./appendix/background.md) + [Appendix B: Glossary](./appendix/glossary.md) + [Appendix C: Code Index](./appendix/code-index.md) + [Appendix D: Compiler Lecture Series](./appendix/compiler-lecture.md) + [Appendix E: Bibliography](./appendix/bibliography.md) [Appendix Z: HumorRust](./appendix/humorust.md) diff --git a/src/doc/rustc-dev-guide/src/about-this-guide.md b/src/doc/rustc-dev-guide/src/about-this-guide.md index 781a5c51bf7a8..057e4a4cceed3 100644 --- a/src/doc/rustc-dev-guide/src/about-this-guide.md +++ b/src/doc/rustc-dev-guide/src/about-this-guide.md @@ -3,33 +3,41 @@ This guide is meant to help document how rustc – the Rust compiler – works, as well as to help new contributors get involved in rustc development. -There are seven parts to this guide: +There are several parts to this guide: -1. [Building `rustc`][p1]: +1. [Building and debugging `rustc`][p1]: Contains information that should be useful no matter how you are contributing, about building, debugging, profiling, etc. -2. [Contributing to `rustc`][p2]: +1. [Contributing to Rust][p2]: Contains information that should be useful no matter how you are contributing, about procedures for contribution, using git and Github, stabilizing features, etc. -3. [High-Level Compiler Architecture][p3]: +1. [Bootstrapping][p3]: + Describes how the Rust compiler builds itself using previous versions, including + an introduction to the bootstrap process and debugging methods. +1. [High-level Compiler Architecture][p4]: Discusses the high-level architecture of the compiler and stages of the compile process. -4. [Source Code Representation][p4]: +1. [Source Code Representation][p5]: Describes the process of taking raw source code from the user and transforming it into various forms that the compiler can work with easily. -5. [Analysis][p5]: - discusses the analyses that the compiler uses to check various properties of the code +1. [Supporting Infrastructure][p6]: + Covers command-line argument conventions, compiler entry points like rustc_driver and + rustc_interface, and the design and implementation of errors and lints. +1. [Analysis][p7]: + Discusses the analyses that the compiler uses to check various properties of the code and inform later stages of the compile process (e.g., type checking). -6. [From MIR to Binaries][p6]: How linked executable machine code is generated. -7. [Appendices][p7] at the end with useful reference information. +1. [MIR to Binaries][p8]: How linked executable machine code is generated. +1. [Appendices][p9] at the end with useful reference information. There are a few of these with different information, including a glossary. [p1]: ./building/how-to-build-and-run.html [p2]: ./contributing.md -[p3]: ./part-2-intro.md -[p4]: ./part-3-intro.md -[p5]: ./part-4-intro.md -[p6]: ./part-5-intro.md -[p7]: ./appendix/background.md +[p3]: ./building/bootstrapping/intro.md +[p4]: ./part-2-intro.md +[p5]: ./part-3-intro.md +[p6]: ./cli.md +[p7]: ./part-4-intro.md +[p8]: ./part-5-intro.md +[p9]: ./appendix/background.md ### Constant change diff --git a/src/doc/rustc-dev-guide/src/appendix/code-index.md b/src/doc/rustc-dev-guide/src/appendix/code-index.md index b96ede68eab51..65fbf752d7921 100644 --- a/src/doc/rustc-dev-guide/src/appendix/code-index.md +++ b/src/doc/rustc-dev-guide/src/appendix/code-index.md @@ -40,5 +40,5 @@ Item | Kind | Short description | Chapter | [Emitting Diagnostics]: ../diagnostics.html [Macro expansion]: ../macro-expansion.html [Name resolution]: ../name-resolution.html -[Parameter Environment]: ../param_env/param_env_summary.html +[Parameter Environment]: ../typing_parameter_envs.html [Trait Solving: Goals and Clauses]: ../traits/goals-and-clauses.html#domain-goals diff --git a/src/doc/rustc-dev-guide/src/appendix/glossary.md b/src/doc/rustc-dev-guide/src/appendix/glossary.md index a7c3236d356ba..1837b59e850ae 100644 --- a/src/doc/rustc-dev-guide/src/appendix/glossary.md +++ b/src/doc/rustc-dev-guide/src/appendix/glossary.md @@ -31,7 +31,6 @@ Term | Meaning generics | The list of generic parameters defined on an item. There are three kinds of generic parameters: Type, lifetime and const parameters. HIR | The _high-level [IR](#ir)_, created by lowering and desugaring the AST. ([see more](../hir.md)) `HirId` | Identifies a particular node in the HIR by combining a def-id with an "intra-definition offset". See [the HIR chapter for more](../hir.md#identifiers-in-the-hir). -HIR map | The HIR map, accessible via `tcx.hir()`, allows you to quickly navigate the HIR and convert between various forms of identifiers. ICE | Short for _internal compiler error_, this is when the compiler crashes. ICH | Short for _incremental compilation hash_, these are used as fingerprints for things such as HIR and crate metadata, to check if changes have been made. This is useful in incremental compilation to see if part of a crate has changed and should be recompiled. `infcx` | The type inference context (`InferCtxt`). (see `rustc_middle::infer`) diff --git a/src/doc/rustc-dev-guide/src/ast-validation.md b/src/doc/rustc-dev-guide/src/ast-validation.md index fa0f1d954f86d..8f10bbecf21d0 100644 --- a/src/doc/rustc-dev-guide/src/ast-validation.md +++ b/src/doc/rustc-dev-guide/src/ast-validation.md @@ -1,4 +1,4 @@ -# AST Validation +# AST validation _AST validation_ is a separate AST pass that visits each item in the tree and performs simple checks. This pass diff --git a/src/doc/rustc-dev-guide/src/autodiff/debugging.md b/src/doc/rustc-dev-guide/src/autodiff/debugging.md new file mode 100644 index 0000000000000..bd46a66fade47 --- /dev/null +++ b/src/doc/rustc-dev-guide/src/autodiff/debugging.md @@ -0,0 +1,113 @@ +# Reporting backend crashes + +If after a compilation failure you are greeted by a large amount of llvm-ir code, then our enzyme backend likely failed to compile your code. These cases are harder to debug, so your help is highly appreciated. Please also keep in mind that release builds are usually much more likely to work at the moment. + +The final goal here is to reproduce your bug in the enzyme [compiler explorer](https://enzyme.mit.edu/explorer/), in order to create a bug report in the [Enzyme](https://github.com/enzymead/enzyme/issues) repository. + +We have an `autodiff` flag which you can pass to `rustflags` to help with this. it will print the whole llvm-ir module, along with some `__enzyme_fwddiff` or `__enzyme_autodiff` calls. A potential workflow on linux could look like: + +## Controlling llvm-ir generation + +Before generating the llvm-ir, keep in mind two techniques that can help ensure the relevant rust code is visible for debugging: + +- **`std::hint::black_box`**: wrap rust variables or expressions in `std::hint::black_box()` to prevent rust and llvm from optimizing them away. This is useful when you need to inspect or manually manipulate specific values in the llvm-ir. +- **`extern "rust"` or `extern "c"`**: if you want to see how a specific function declaration is lowered to llvm-ir, you can declare it as `extern "rust"` or `extern "c"`. You can also look for existing `__enzyme_autodiff` or similar declarations within the generated module for examples. + +## 1) Generate an llvm-ir reproducer + +```sh +rustflags="-z autodiff=enable,printmodbefore" cargo +enzyme build --release &> out.ll +``` + +This also captures a few warnings and info messages above and below your module. open out.ll and remove every line above `; moduleid = `. Now look at the end of the file and remove everything that's not part of llvm-ir, i.e. remove errors and warnings. The last line of your llvm-ir should now start with `! = `, i.e. `!40831 = !{i32 0, i32 1037508, i32 1037538, i32 1037559}` or `!43760 = !dilocation(line: 297, column: 5, scope: !43746)`. + +The actual numbers will depend on your code. + +## 2) Check your llvm-ir reproducer + +To confirm that your previous step worked, we will use llvm's `opt` tool. find your path to the opt binary, with a path similar to `/rust/build//build/bin/opt`. also find `llvmenzyme-19.` path, similar to `/rust/build/target-tripple/enzyme/build/enzyme/llvmenzyme-19`. Please keep in mind that llvm frequently updates it's llvm backend, so the version number might be higher (20, 21, ...). Once you have both, run the following command: + +```sh + out.ll -load-pass-plugin=/path/to/llvmenzyme-19.so -passes="enzyme" -s +``` + +If the previous step succeeded, you are going to see the same error that you saw when compiling your rust code with cargo. + +If you fail to get the same error, please open an issue in the rust repository. If you succeed, congrats! the file is still huge, so let's automatically minimize it. + +## 3) Minimize your llvm-ir reproducer + +First find your `llvm-extract` binary, it's in the same folder as your opt binary. then run: + +```sh + -s --func= --recursive --rfunc="enzyme_autodiff*" --rfunc="enzyme_fwddiff*" --rfunc= out.ll -o mwe.ll +``` + +This command creates `mwe.ll`, a minimal working example. + +Please adjust the name passed with the last `--func` flag. You can either apply the `#[no_mangle]` attribute to the function you differentiate, then you can replace it with the rust name. otherwise you will need to look up the mangled function name. To do that, open `out.ll` and search for `__enzyme_fwddiff` or `__enzyme_autodiff`. the first string in that function call is the name of your function. example: + +```llvm-ir +define double @enzyme_opt_helper_0(ptr %0, i64 %1, double %2) { + %4 = call double (...) @__enzyme_fwddiff(ptr @_zn2ad3_f217h3b3b1800bd39fde3e, metadata !"enzyme_const", ptr %0, metadata !"enzyme_const", i64 %1, metadata !"enzyme_dup", double %2, double %2) + ret double %4 +} +``` + +Here, `_zn2ad3_f217h3b3b1800bd39fde3e` is the correct name. make sure to not copy the leading `@`. redo step 2) by running the `opt` command again, but this time passing `mwe.ll` as the input file instead of `out.ll`. Check if this minimized example still reproduces the crash. + +## 4) (Optional) Minimize your llvm-ir reproducer further. + +After the previous step you should have an `mwe.ll` file with ~5k loc. let's try to get it down to 50. find your `llvm-reduce` binary next to `opt` and `llvm-extract`. Copy the first line of your error message, an example could be: + +```sh +opt: /home/manuel/prog/rust/src/llvm-project/llvm/lib/ir/instructions.cpp:686: void llvm::callinst::init(llvm::functiontype*, llvm::value*, llvm::arrayref, llvm::arrayref >, const llvm::twine&): assertion `(args.size() == fty->getnumparams() || (fty->isvararg() && args.size() > fty->getnumparams())) && "calling a function with bad signature!"' failed. +``` + +If you just get a `segfault` there is no sensible error message and not much to do automatically, so continue to 5). +otherwise, create a `script.sh` file containing + +```sh +#!/bin/bash + $1 -load-pass-plugin=/path/to/llvmenzyme-19.so -passes="enzyme" \ + |& grep "/some/path.cpp:686: void llvm::callinst::init" +``` + +Experiment a bit with which error message you pass to grep. it should be long enough to make sure that the error is unique. However, for longer errors including `(` or `)` you will need to escape them correctly which can become annoying. Run + +```sh + --test=script.sh mwe.ll +``` + +If you see `input isn't interesting! verify interesting-ness test`, you got the error message in script.sh wrong, you need to make sure that grep matches your actual error. If all works out, you will see a lot of iterations, ending with a new `reduced.ll` file. Verify with `opt` that you still get the same error. + +### Advanced debugging: manual llvm-ir investigation + +Once you have a minimized reproducer (`mwe.ll` or `reduced.ll`), you can delve deeper: + +- **manual editing:** try manually rewriting the llvm-ir. for certain issues, like those involving indirect calls, you might investigate enzyme-specific intrinsics like `__enzyme_virtualreverse`. Understanding how to use these might require consulting enzyme's documentation or source code. +- **enzyme test cases:** look for relevant test cases within the [enzyme repository](https://github.com/enzymead/enzyme/tree/main/enzyme/test) that might demonstrate the correct usage of features or intrinsics related to your problem. + +## 5) Report your bug. + +Afterwards, you should be able to copy and paste your `mwe.ll` (or `reduced.ll`) example into our [compiler explorer](https://enzyme.mit.edu/explorer/). + +- Select `llvm ir` as language and `opt 20` as compiler. +- Replace the field to the right of your compiler with `-passes="enzyme"`, if it is not already set. +- Hopefully, you will see once again your now familiar error. +- Please use the share button to copy links to them. +- Please create an issue on [https://github.com/enzymead/enzyme/issues](https://github.com/enzymead/enzyme/issues) and share `mwe.ll` and (if you have it) `reduced.ll`, as well as links to the compiler explorer. Please feel free to also add your rust code or a link to it. + +#### Documenting findings + +some enzyme errors, like `"attempting to call an indirect active function whose runtime value is inactive"`, have historically caused confusion. If you investigate such an issue, even if you don't find a complete solution, please consider documenting your findings. If the insights are general to enzyme and not specific to its rust usage, contributing them to the main [enzyme documentation](https://github.com/enzymead/www) is often the best first step. You can also mention your findings in the relevant enzyme github issue or propose updates to these docs if appropriate. This helps prevent others from starting from scratch. + +With a clear reproducer and documentation, hopefully an enzyme developer will be able to fix your bug. Once that happens, the enzyme submodule inside the rust compiler will be updated, which should allow you to differentiate your rust code. Thanks for helping us to improve rust-ad. + +# Minimize rust code + +Beyond having a minimal llvm-ir reproducer, it is also helpful to have a minimal rust reproducer without dependencies. This allows us to add it as a test case to ci once we fix it, which avoids regressions for the future. + +There are a few solutions to help you with minimizing the rust reproducer. This is probably the most simple automated approach: [cargo-minimize](https://github.com/nilstrieb/cargo-minimize). + +Otherwise we have various alternatives, including [`treereduce`](https://github.com/langston-barrett/treereduce), [`halfempty`](https://github.com/googleprojectzero/halfempty), or [`picireny`](https://github.com/renatahodovan/picireny), potentially also [`creduce`](https://github.com/csmith-project/creduce). diff --git a/src/doc/rustc-dev-guide/src/autodiff/flags.md b/src/doc/rustc-dev-guide/src/autodiff/flags.md new file mode 100644 index 0000000000000..946ae1d03ae6a --- /dev/null +++ b/src/doc/rustc-dev-guide/src/autodiff/flags.md @@ -0,0 +1,42 @@ +# Supported `RUSTFLAGS` + +To support you while debugging or profiling, we have added support for an experimental `-Z autodiff` rustc flag (which can be passed to cargo via `RUSTFLAGS`), which allow changing the behaviour of Enzyme, without recompiling rustc. We currently support the following values for `autodiff`. + +### Debug Flags + +```text +PrintTA // Print TypeAnalysis information +PrintAA // Print ActivityAnalysis information +Print // Print differentiated functions while they are being generated and optimized +PrintPerf // Print AD related Performance warnings +PrintModBefore // Print the whole LLVM-IR module directly before running AD +PrintModAfter // Print the whole LLVM-IR module after running AD, before optimizations +PrintModFinal // Print the whole LLVM-IR module after running optimizations and AD +LooseTypes // Risk incorrect derivatives instead of aborting when missing Type Info +``` + +
    +`LooseTypes` is often helpful to get rid of Enzyme errors stating `Can not deduce type of ` and to be able to run some code. But please keep in mind that this flag absolutely has the chance to cause incorrect gradients. Even worse, the gradients might be correct for certain input values, but not for others. So please create issues about such bugs and only use this flag temporarily while you wait for your bug to be fixed. +
    + +### Benchmark flags + +For performance experiments and benchmarking we also support + +```text +NoPostopt // We won't optimize the LLVM-IR Module after AD +RuntimeActivity // Enables the runtime activity feature from Enzyme +Inline // Instructs Enzyme to maximize inlining as far as possible, beyond LLVM's default +``` + +You can combine multiple `autodiff` values using a comma as separator: + +```bash +RUSTFLAGS="-Z autodiff=Enable,LooseTypes,PrintPerf" cargo +enzyme build +``` + +Using `-Zautodiff=Enable` will allow using autodiff and update your normal rustc compilation pipeline: + +1. Run your selected compilation pipeline. If you selected a release build, we will disable vectorization and loop unrolling. +2. Differentiate your functions. +3. Run your selected compilation pipeline again on the whole module. This time we do not disable vectorization or loop unrolling. diff --git a/src/doc/rustc-dev-guide/src/autodiff/installation.md b/src/doc/rustc-dev-guide/src/autodiff/installation.md new file mode 100644 index 0000000000000..f3c11395523bf --- /dev/null +++ b/src/doc/rustc-dev-guide/src/autodiff/installation.md @@ -0,0 +1,88 @@ +# Installation + +In the near future, `std::autodiff` should become available in nightly builds for users. As a contribute however, you will still need to build rustc from source. Please be aware that the msvc target is not supported at the moment, all other tier 1 targets should work. Please open an issue if you encounter any problems on a supported tier 1 target, or if you succesfully build this project on a tier2/tier3 target. + +## Build instructions + +First you need to clone and configure the Rust repository: +```bash +git clone --depth=1 git@github.com:rust-lang/rust.git +cd rust +./configure --enable-llvm-link-shared --enable-llvm-plugins --enable-llvm-enzyme --release-channel=nightly --enable-llvm-assertions --enable-clang --enable-lld --enable-option-checking --enable-ninja --disable-docs +``` + +Afterwards you can build rustc using: +```bash +./x.py build --stage 1 library +``` + +Afterwards rustc toolchain link will allow you to use it through cargo: +``` +rustup toolchain link enzyme build/host/stage1 +rustup toolchain install nightly # enables -Z unstable-options +``` + +You can then run our test cases: + +```bash +./x.py test --stage 1 tests/codegen/autodiff +./x.py test --stage 1 tests/pretty/autodiff +./x.py test --stage 1 tests/ui/autodiff +./x.py test --stage 1 tests/ui/feature-gates/feature-gate-autodiff.rs +``` + +Autodiff is still experimental, so if you want to use it in your own projects, you will need to add `lto="fat"` to your Cargo.toml +and use `RUSTFLAGS="-Zautodiff=Enable" cargo +enzyme` instead of `cargo` or `cargo +nightly`. + +## Compiler Explorer and dist builds + +Our compiler explorer instance can be updated to a newer rustc in a similar way. First, prepare a docker instance. +```bash +docker run -it ubuntu:22.04 +export CC=clang CXX=clang++ +apt update +apt install wget vim python3 git curl libssl-dev pkg-config lld ninja-build cmake clang build-essential +``` +Then build rustc in a slightly altered way: +```bash +git clone --depth=1 https://github.com/rust-lang/rust.git +cd rust +./configure --enable-llvm-link-shared --enable-llvm-plugins --enable-llvm-enzyme --release-channel=nightly --enable-llvm-assertions --enable-clang --enable-lld --enable-option-checking --enable-ninja --disable-docs +./x dist +``` +We then copy the tarball to our host. The dockerid is the newest entry under `docker ps -a`. +```bash +docker cp :/rust/build/dist/rust-nightly-x86_64-unknown-linux-gnu.tar.gz rust-nightly-x86_64-unknown-linux-gnu.tar.gz +``` +Afterwards we can create a new (pre-release) tag on the EnzymeAD/rust repository and make a PR against the EnzymeAD/enzyme-explorer repository to update the tag. +Remember to ping `tgymnich` on the PR to run his update script. Note: We should archive EnzymeAD/rust and update the instructions here. The explorer should soon +be able to get the rustc toolchain from the official rust servers. + + +## Build instruction for Enzyme itself + +Following the Rust build instruction above will build LLVMEnzyme, LLDEnzyme, and ClangEnzyme along with the Rust compiler. +We recommend that approach, if you just want to use any of them and have no experience with cmake. +However, if you prefer to just build Enzyme without Rust, then these instructions might help. + +```bash +git clone --depth=1 git@github.com:llvm/llvm-project.git +cd llvm-project +mkdir build +cd build +cmake -G Ninja ../llvm -DLLVM_TARGETS_TO_BUILD="host" -DLLVM_ENABLE_ASSERTIONS=ON -DLLVM_ENABLE_PROJECTS="clang;lld" -DLLVM_ENABLE_RUNTIMES="openmp" -DLLVM_ENABLE_PLUGINS=ON -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=. +ninja +ninja install +``` +This gives you a working LLVM build, now we can continue with building Enzyme. +Leave the `llvm-project` folder, and execute the following commands: +```bash +git clone git@github.com:EnzymeAD/Enzyme.git +cd Enzyme/enzyme +mkdir build +cd build +cmake .. -G Ninja -DLLVM_DIR=/llvm-project/build/lib/cmake/llvm/ -DLLVM_EXTERNAL_LIT=/llvm-project/llvm/utils/lit/lit.py -DCMAKE_BUILD_TYPE=Release -DCMAKE_EXPORT_COMPILE_COMMANDS=YES -DBUILD_SHARED_LIBS=ON +ninja +``` +This will build Enzyme, and you can find it in `Enzyme/enzyme/build/lib/Enzyme.so`. (Endings might differ based on your OS). + diff --git a/src/doc/rustc-dev-guide/src/autodiff/internals.md b/src/doc/rustc-dev-guide/src/autodiff/internals.md new file mode 100644 index 0000000000000..0093ef044c80b --- /dev/null +++ b/src/doc/rustc-dev-guide/src/autodiff/internals.md @@ -0,0 +1,27 @@ +The `std::autodiff` module in Rust allows differentiable programming: + +```rust +#![feature(autodiff)] +use std::autodiff::autodiff; + +// f(x) = x * x, f'(x) = 2.0 * x +// bar therefore returns (x * x, 2.0 * x) +#[autodiff(bar, Reverse, Active, Active)] +fn foo(x: f32) -> f32 { x * x } + +fn main() { + assert_eq!(bar(3.0, 1.0), (9.0, 6.0)); + assert_eq!(bar(4.0, 1.0), (16.0, 8.0)); +} +``` + +The detailed documentation for the `std::autodiff` module is available at [std::autodiff](https://doc.rust-lang.org/std/autodiff/index.html). + +Differentiable programing is used in various fields like numerical computing, [solid mechanics][ratel], [computational chemistry][molpipx], [fluid dynamics][waterlily] or for Neural Network training via Backpropagation, [ODE solver][diffsol], [differentiable rendering][libigl], [quantum computing][catalyst], and climate simulations. + +[ratel]: https://gitlab.com/micromorph/ratel +[molpipx]: https://arxiv.org/abs/2411.17011v +[waterlily]: https://github.com/WaterLily-jl/WaterLily.jl +[diffsol]: https://github.com/martinjrobins/diffsol +[libigl]: https://github.com/alecjacobson/libigl-enzyme-example?tab=readme-ov-file#run +[catalyst]: https://github.com/PennyLaneAI/catalyst diff --git a/src/doc/rustc-dev-guide/src/autodiff/limitations.md b/src/doc/rustc-dev-guide/src/autodiff/limitations.md new file mode 100644 index 0000000000000..90afbd51f3fd9 --- /dev/null +++ b/src/doc/rustc-dev-guide/src/autodiff/limitations.md @@ -0,0 +1,27 @@ +# Current limitations + +## Safety and Soundness + +Enzyme currently assumes that the user passes shadow arguments (`dx`, `dy`, ...) of appropriate size. Under Reverse Mode, we additionally assume that shadow arguments are mutable. In Reverse Mode we adjust the outermost pointer or reference to be mutable. Therefore `&f32` will receive the shadow type `&mut f32`. However, we do not check length for other types than slices (e.g. enums, Vec). We also do not enforce mutability of inner references, but will warn if we recognize them. We do intend to add additional checks over time. + +## ABI adjustments + +In some cases, a function parameter might get lowered in a way that we currently don't handle correctly, leading to a compile time type mismatch in the `rustc_codegen_llvm` backend. Here are some [examples](https://github.com/EnzymeAD/rust/issues/105). + +## Compile Times + +Enzyme will often achieve excellent runtime performance, but might increase your compile time by a large factor. For Rust, we already have made significant improvements and have a list of further improvements planed - please reach out if you have time to help here. + +### Type Analysis + +Most of the times, Type Analysis (TA) is the reason of large (>5x) compile time increases when using Enzyme. This poster explains why we need to run Type Analysis in the bottom left part: [Poster Link](https://c.wsmoses.com/posters/Enzyme-llvmdev.pdf). + +We intend to increase the number of locations where we pass down Type information based on Rust types, which in turn will reduce the number of locations where Enzyme has to run Type Analysis, which will help compile times. + +### Duplicated Optimizations + +The key reason for Enzyme offering often excellent performance is that Enzyme differentiates already optimized LLVM-IR. However, we also (have to) run LLVM's optimization pipeline after differentiating, to make sure that the code which Enzyme generates is optimized properly. As a result you should have excellent runtime performance (please fill an issue if not), but at a compile time cost for running optimizations twice. + +### Fat-LTO + +The usage of `#[autodiff(...)]` currently requires compiling your project with Fat-LTO. We technically only need LTO if the function being differentiated calls functions in other compilation units. Therefore, other solutions are possible, but this is the most simple one to get started. diff --git a/src/doc/rustc-dev-guide/src/backend/libs-and-metadata.md b/src/doc/rustc-dev-guide/src/backend/libs-and-metadata.md index 556b3fdf8f84f..513df1650c387 100644 --- a/src/doc/rustc-dev-guide/src/backend/libs-and-metadata.md +++ b/src/doc/rustc-dev-guide/src/backend/libs-and-metadata.md @@ -110,7 +110,7 @@ See [`compute_hir_hash`] for where the hash is actually computed. [SVH]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_data_structures/svh/struct.Svh.html [incremental compilation]: ../queries/incremental-compilation.md -[`compute_hir_hash`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast_lowering/struct.LoweringContext.html#method.compute_hir_hash +[`compute_hir_hash`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast_lowering/fn.compute_hir_hash.html ### Stable Crate Id diff --git a/src/doc/rustc-dev-guide/src/bug-fix-procedure.md b/src/doc/rustc-dev-guide/src/bug-fix-procedure.md index e6a16df6d2a9c..8e6725c54efac 100644 --- a/src/doc/rustc-dev-guide/src/bug-fix-procedure.md +++ b/src/doc/rustc-dev-guide/src/bug-fix-procedure.md @@ -1,4 +1,4 @@ -# Procedures for Breaking Changes +# Procedures for breaking changes diff --git a/src/doc/rustc-dev-guide/src/building/bootstrapping/bootstrap-in-dependencies.md b/src/doc/rustc-dev-guide/src/building/bootstrapping/bootstrap-in-dependencies.md new file mode 100644 index 0000000000000..68c5c2386bd5e --- /dev/null +++ b/src/doc/rustc-dev-guide/src/building/bootstrapping/bootstrap-in-dependencies.md @@ -0,0 +1,53 @@ +# `cfg(bootstrap)` in compiler dependencies + +The rust compiler uses some external crates that can run into cyclic dependencies with the compiler itself: the compiler needs an updated crate to build, but the crate needs an updated compiler. This page describes how `#[cfg(bootstrap)]` can be used to break this cycle. + +## Enabling `#[cfg(bootstrap)]` + +Usually the use of `#[cfg(bootstrap)]` in an external crate causes a warning: + +``` +warning: unexpected `cfg` condition name: `bootstrap` + --> src/main.rs:1:7 + | +1 | #[cfg(bootstrap)] + | ^^^^^^^^^ + | + = help: expected names are: `docsrs`, `feature`, and `test` and 31 more + = help: consider using a Cargo feature instead + = help: or consider adding in `Cargo.toml` the `check-cfg` lint config for the lint: + [lints.rust] + unexpected_cfgs = { level = "warn", check-cfg = ['cfg(bootstrap)'] } + = help: or consider adding `println!("cargo::rustc-check-cfg=cfg(bootstrap)");` to the top of the `build.rs` + = note: see for more information about checking conditional configuration + = note: `#[warn(unexpected_cfgs)]` on by default +``` + +This warning can be silenced by adding these lines to the project's `Cargo.toml`: + +```toml +[lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(bootstrap)'] } +``` + +Now `#[cfg(bootstrap)]` can be used in the crate just like it can be in the compiler: when the bootstrap compiler is used, code annotated with `#[cfg(bootstrap)]` is compiled, otherwise code annotated with `#[cfg(not(bootstrap))]` is compiled. + +## The update dance + +As a concrete example we'll use a change where the `#[naked]` attribute was made into an unsafe attribute, which caused a cyclic dependency with the `compiler-builtins` crate. + +### Step 1: accept the new behavior in the compiler ([#139797](https://github.com/rust-lang/rust/pull/139797)) + +In this example it is possible to accept both the old and new behavior at the same time by disabling an error. + +### Step 2: update the crate ([#821](https://github.com/rust-lang/compiler-builtins/pull/821)) + +Now in the crate, use `#[cfg(bootstrap)]` to use the old behavior, or `#[cfg(not(bootstrap))]` to use the new behavior. + +### Step 3: update the crate version used by the compiler ([#139934](https://github.com/rust-lang/rust/pull/139934)) + +For `compiler-builtins` this meant a version bump, in other cases it may be a git submodule update. + +### Step 4: remove the old behavior from the compiler ([#139753](https://github.com/rust-lang/rust/pull/139753)) + +The updated crate can now be used. In this example that meant that the old behavior could be removed. diff --git a/src/doc/rustc-dev-guide/src/building/bootstrapping/intro.md b/src/doc/rustc-dev-guide/src/building/bootstrapping/intro.md index f72918c8377fc..7f53097824cc9 100644 --- a/src/doc/rustc-dev-guide/src/building/bootstrapping/intro.md +++ b/src/doc/rustc-dev-guide/src/building/bootstrapping/intro.md @@ -6,8 +6,8 @@ of the same compiler. This raises a chicken-and-egg paradox: where did the first compiler come from? It must have been written in a different language. In Rust's case it was -[written in OCaml][ocaml-compiler]. However it was abandoned long ago and the -only way to build a modern version of rustc is a slightly less modern +[written in OCaml][ocaml-compiler]. However, it was abandoned long ago, and the +only way to build a modern version of rustc is with a slightly less modern version. This is exactly how `x.py` works: it downloads the current beta release of diff --git a/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md b/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md index 1dd5402f4cde7..a2930b3e42723 100644 --- a/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md +++ b/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md @@ -8,8 +8,8 @@ the same compiler. This raises a chicken-and-egg paradox: where did the first compiler come from? It must have been written in a different language. In Rust's case it was -[written in OCaml][ocaml-compiler]. However it was abandoned long ago and the -only way to build a modern version of `rustc` is a slightly less modern version. +[written in OCaml][ocaml-compiler]. However, it was abandoned long ago, and the +only way to build a modern version of `rustc` is with a slightly less modern version. This is exactly how [`./x.py`] works: it downloads the current beta release of `rustc`, then uses it to compile the new compiler. @@ -394,8 +394,8 @@ will be rare to want to use it. Finally, `MAGIC_EXTRA_RUSTFLAGS` bypasses the this is `compiletest`. For unit tests and doc tests this is the `libtest` runner. -Most test runner accept `--help`, which you can use to find out the options -accepted by the runner. +Most test runners accept `--help`, +which you can use to find out the options accepted by the runner. ## Environment Variables diff --git a/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md b/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md index 067e287111843..c3c1c41e3f697 100644 --- a/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md +++ b/src/doc/rustc-dev-guide/src/building/how-to-build-and-run.md @@ -63,7 +63,7 @@ cd rust > **NOTE**: A shallow clone limits which `git` commands can be run. > If you intend to work on and contribute to the compiler, it is > generally recommended to fully clone the repository [as shown above](#get-the-source-code), -> or to perform a [partial clone](#shallow-clone-the-repository) instead. +> or to perform a [partial clone](#partial-clone-the-repository) instead. > > For example, `git bisect` and `git blame` require access to the commit history, > so they don't work if the repository was cloned with `--depth 1`. diff --git a/src/doc/rustc-dev-guide/src/building/optimized-build.md b/src/doc/rustc-dev-guide/src/building/optimized-build.md index 0849464eab368..62dfaca89d24e 100644 --- a/src/doc/rustc-dev-guide/src/building/optimized-build.md +++ b/src/doc/rustc-dev-guide/src/building/optimized-build.md @@ -109,11 +109,16 @@ like Python or LLVM. Here is an example of how can `opt-dist` be used locally (outside of CI): -1. Build the tool with the following command: +1. Enable metrics in your `bootstrap.toml` file, because `opt-dist` expects it to be enabled: + ```toml + [build] + metrics = true + ``` +2. Build the tool with the following command: ```bash ./x build tools/opt-dist ``` -2. Run the tool with the `local` mode and provide necessary parameters: +3. Run the tool with the `local` mode and provide necessary parameters: ```bash ./build/host/stage0-tools-bin/opt-dist local \ --target-triple \ # select target, e.g. "x86_64-unknown-linux-gnu" diff --git a/src/doc/rustc-dev-guide/src/building/prerequisites.md b/src/doc/rustc-dev-guide/src/building/prerequisites.md index f49f6bb052789..6761cabac1f14 100644 --- a/src/doc/rustc-dev-guide/src/building/prerequisites.md +++ b/src/doc/rustc-dev-guide/src/building/prerequisites.md @@ -38,4 +38,4 @@ incremental compilation ([see here][config]). This will make compilation take longer (especially after a rebase), but will save a ton of space from the incremental caches. -[config]: ./how-to-build-and-run.md#create-a-configtoml +[config]: ./how-to-build-and-run.md#create-a-bootstraptoml diff --git a/src/doc/rustc-dev-guide/src/building/suggested.md b/src/doc/rustc-dev-guide/src/building/suggested.md index 43ff2ba726f91..f8a28b7f2e9a9 100644 --- a/src/doc/rustc-dev-guide/src/building/suggested.md +++ b/src/doc/rustc-dev-guide/src/building/suggested.md @@ -1,4 +1,4 @@ -# Suggested Workflows +# Suggested workflows The full bootstrapping process takes quite a while. Here are some suggestions to make your life easier. @@ -20,6 +20,43 @@ your `.git/hooks` folder as `pre-push` (without the `.sh` extension!). You can also install the hook as a step of running `./x setup`! +## Config extensions + +When working on different tasks, you might need to switch between different bootstrap configurations. +Sometimes you may want to keep an old configuration for future use. But saving raw config values in +random files and manually copying and pasting them can quickly become messy, especially if you have a +long history of different configurations. + +To simplify managing multiple configurations, you can create config extensions. + +For example, you can create a simple config file named `cross.toml`: + +```toml +[build] +build = "x86_64-unknown-linux-gnu" +host = ["i686-unknown-linux-gnu"] +target = ["i686-unknown-linux-gnu"] + + +[llvm] +download-ci-llvm = false + +[target.x86_64-unknown-linux-gnu] +llvm-config = "/path/to/llvm-19/bin/llvm-config" +``` + +Then, include this in your `bootstrap.toml`: + +```toml +include = ["cross.toml"] +``` + +You can also include extensions within extensions recursively. + +**Note:** In the `include` field, the overriding logic follows a right-to-left order. For example, +in `include = ["a.toml", "b.toml"]`, extension `b.toml` overrides `a.toml`. Also, parent extensions +always overrides the inner ones. + ## Configuring `rust-analyzer` for `rustc` ### Project-local rust-analyzer setup diff --git a/src/doc/rustc-dev-guide/src/coherence.md b/src/doc/rustc-dev-guide/src/coherence.md index b3af101fb876d..73f9213bf4056 100644 --- a/src/doc/rustc-dev-guide/src/coherence.md +++ b/src/doc/rustc-dev-guide/src/coherence.md @@ -1,4 +1,3 @@ - # Coherence > NOTE: this is based on [notes by @lcnr](https://github.com/rust-lang/rust/pull/121848) diff --git a/src/doc/rustc-dev-guide/src/compiler-debugging.md b/src/doc/rustc-dev-guide/src/compiler-debugging.md index 47f3976202228..102e20207792e 100644 --- a/src/doc/rustc-dev-guide/src/compiler-debugging.md +++ b/src/doc/rustc-dev-guide/src/compiler-debugging.md @@ -301,7 +301,8 @@ Right below you can find elaborate explainers on a selected few. Some compiler options for debugging specific features yield graphviz graphs - e.g. the `#[rustc_mir(borrowck_graphviz_postflow="suffix.dot")]` attribute -dumps various borrow-checker dataflow graphs. +on a function dumps various borrow-checker dataflow graphs in conjunction with +`-Zdump-mir-dataflow`. These all produce `.dot` files. To view these files, install graphviz (e.g. `apt-get install graphviz`) and then run the following commands: diff --git a/src/doc/rustc-dev-guide/src/compiler-src.md b/src/doc/rustc-dev-guide/src/compiler-src.md index c538fc8b788d7..00aa96226849d 100644 --- a/src/doc/rustc-dev-guide/src/compiler-src.md +++ b/src/doc/rustc-dev-guide/src/compiler-src.md @@ -62,21 +62,20 @@ huge. There is also the `rustc` crate which is the actual binary (i.e. the [`rustc_driver`] crate, which drives the various parts of compilation in other crates. -The dependency structure of these crates is complex, but roughly it is +The dependency order of these crates is complex, but roughly it is something like this: -- `rustc` (the binary) calls [`rustc_driver::main`][main]. - - [`rustc_driver`] depends on a lot of other crates, but the main one is - [`rustc_interface`]. - - [`rustc_interface`] depends on most of the other compiler crates. It - is a fairly generic interface for driving the whole compilation. - - Most of the other `rustc_*` crates depend on [`rustc_middle`], - which defines a lot of central data structures in the compiler. - - [`rustc_middle`] and most of the other crates depend on a - handful of crates representing the early parts of the - compiler (e.g. the parser), fundamental data structures (e.g. - [`Span`]), or error reporting: [`rustc_data_structures`], - [`rustc_span`], [`rustc_errors`], etc. +1. `rustc` (the binary) calls [`rustc_driver::main`][main]. +1. [`rustc_driver`] depends on a lot of other crates, but the main one is + [`rustc_interface`]. +1. [`rustc_interface`] depends on most of the other compiler crates. It is a + fairly generic interface for driving the whole compilation. +1. Most of the other `rustc_*` crates depend on [`rustc_middle`], which defines + a lot of central data structures in the compiler. +1. [`rustc_middle`] and most of the other crates depend on a handful of crates + representing the early parts of the compiler (e.g. the parser), fundamental + data structures (e.g. [`Span`]), or error reporting: + [`rustc_data_structures`], [`rustc_span`], [`rustc_errors`], etc. [`rustc_data_structures`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_data_structures/index.html [`rustc_driver`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_driver/index.html @@ -87,8 +86,12 @@ something like this: [`Span`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_span/struct.Span.html [main]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_driver/fn.main.html -You can see the exact dependencies by reading the [`Cargo.toml`] for the various -crates, just like a normal Rust crate. +You can see the exact dependencies by running `cargo tree`, +just like you would for any other Rust package: + +```console +cargo tree --package rustc_driver +``` One final thing: [`src/llvm-project`] is a submodule for our fork of LLVM. During bootstrapping, LLVM is built and the [`compiler/rustc_llvm`] crate diff --git a/src/doc/rustc-dev-guide/src/const-eval.md b/src/doc/rustc-dev-guide/src/const-eval.md index 69329a3e08511..ca6a35a5e97eb 100644 --- a/src/doc/rustc-dev-guide/src/const-eval.md +++ b/src/doc/rustc-dev-guide/src/const-eval.md @@ -35,7 +35,7 @@ They're the wrappers of the `const_eval` query. Statics are special; all other functions do not represent statics correctly and have thus assertions preventing their use on statics. -The `const_eval_*` functions use a [`ParamEnv`](./param_env/param_env_summary.html) of environment +The `const_eval_*` functions use a [`ParamEnv`](./typing_parameter_envs.html) of environment in which the constant is evaluated (e.g. the function within which the constant is used) and a [`GlobalId`]. The `GlobalId` is made up of an `Instance` referring to a constant or static or of an `Instance` of a function and an index into the function's `Promoted` table. diff --git a/src/doc/rustc-dev-guide/src/contributing.md b/src/doc/rustc-dev-guide/src/contributing.md index 09a7f912b9886..0575de642eeb3 100644 --- a/src/doc/rustc-dev-guide/src/contributing.md +++ b/src/doc/rustc-dev-guide/src/contributing.md @@ -1,4 +1,4 @@ -# Contribution Procedures +# Contribution procedures @@ -150,6 +150,20 @@ when contributing to Rust under [the git section](./git.md). [t-compiler]: https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler [triagebot]: https://github.com/rust-lang/rust/blob/master/triagebot.toml +### Keeping your branch up-to-date + +The CI in rust-lang/rust applies your patches directly against the current master, +not against the commit your branch is based on. This can lead to unexpected failures +if your branch is outdated, even when there are no explicit merge conflicts. + +Before submitting or updating a PR, make sure to update your branch +as mentioned [here](git.md#keeping-things-up-to-date) if it's significantly +behind the master branch (e.g., more than 100 commits behind). +This fetches the latest master branch and rebases your changes on top of it, +ensuring your PR is tested against the latest code. + +After rebasing, it's recommended to [run the relevant tests locally](tests/intro.md) to catch any issues before CI runs. + ### r? All pull requests are reviewed by another person. We have a bot, @@ -346,7 +360,7 @@ function in the same way as other pull requests. [`src/doc`]: https://github.com/rust-lang/rust/tree/master/src/doc [std-root]: https://github.com/rust-lang/rust/blob/master/library/std/src/lib.rs#L1 -To find documentation-related issues, sort by the [A-docs label]. +To find documentation-related issues, use the [A-docs label]. You can find documentation style guidelines in [RFC 1574]. @@ -373,7 +387,7 @@ Just a few things to keep in mind: There is no strict limit on line lengths; let the sentence or part of the sentence flow to its proper end on the same line. - When contributing text to the guide, please contextualize the information with some time period - and/or a reason so that the reader knows how much to trust or mistrust the information. + and/or a reason so that the reader knows how much to trust the information. Aim to provide a reasonable amount of context, possibly including but not limited to: - A reason for why the data may be out of date other than "change", @@ -387,28 +401,28 @@ Just a few things to keep in mind: - jan 2021 - january 2021 - There is a CI action (in `~/.github/workflows/date-check.yml`) - that generates a monthly showing those that are over 6 months old + There is a CI action (in `.github/workflows/date-check.yml`) + that generates a monthly report showing those that are over 6 months old ([example](https://github.com/rust-lang/rustc-dev-guide/issues/2052)). For the action to pick the date, add a special annotation before specifying the date: ```md - Sep 2024 + Apr 2025 ``` Example: ```md - As of Sep 2024, the foo did the bar. + As of Apr 2025, the foo did the bar. ``` For cases where the date should not be part of the visible rendered output, use the following instead: ```md - + ``` - A link to a relevant WG, tracking issue, `rustc` rustdoc page, or similar, that may provide diff --git a/src/doc/rustc-dev-guide/src/conventions.md b/src/doc/rustc-dev-guide/src/conventions.md index 0e624a4566d2a..4356cf246f89e 100644 --- a/src/doc/rustc-dev-guide/src/conventions.md +++ b/src/doc/rustc-dev-guide/src/conventions.md @@ -1,3 +1,5 @@ +# Coding conventions + This file offers some tips on the coding conventions for rustc. This chapter covers [formatting](#formatting), [coding for correctness](#cc), [using crates from crates.io](#cio), and some tips on @@ -5,7 +7,7 @@ chapter covers [formatting](#formatting), [coding for correctness](#cc),
    -# Formatting and the tidy script +## Formatting and the tidy script rustc is moving towards the [Rust standard coding style][fmt]. @@ -20,44 +22,42 @@ Formatting is checked by the `tidy` script. It runs automatically when you do `./x test` and can be run in isolation with `./x fmt --check`. If you want to use format-on-save in your editor, the pinned version of -`rustfmt` is built under `build//stage0/bin/rustfmt`. You'll have to -pass the `--edition=2021` argument yourself when calling -`rustfmt` directly. +`rustfmt` is built under `build//stage0/bin/rustfmt`. [fmt]: https://github.com/rust-dev-tools/fmt-rfcs - [`rustfmt`]:https://github.com/rust-lang/rustfmt -## Formatting C++ code +### Formatting C++ code The compiler contains some C++ code for interfacing with parts of LLVM that don't have a stable C API. When modifying that code, use this command to format it: -```sh -./x test tidy --extra-checks=cpp:fmt --bless +```console +./x test tidy --extra-checks cpp:fmt --bless ``` This uses a pinned version of `clang-format`, to avoid relying on the local environment. -## Formatting and linting Python code +### Formatting and linting Python code The Rust repository contains quite a lot of Python code. We try to keep -it both linted and formatted by the [ruff][ruff] tool. +it both linted and formatted by the [ruff] tool. When modifying Python code, use this command to format it: -```sh -./x test tidy --extra-checks=py:fmt --bless + +```console +./x test tidy --extra-checks py:fmt --bless ``` -and the following command to run lints: -```sh -./x test tidy --extra-checks=py:lint +And, the following command to run lints: + +```console +./x test tidy --extra-checks py:lint ``` -This uses a pinned version of `ruff`, to avoid relying on the local -environment. +These use a pinned version of `ruff`, to avoid relying on the local environment. [ruff]: https://github.com/astral-sh/ruff @@ -65,7 +65,7 @@ environment. -## Copyright notice +### Copyright notice In the past, files began with a copyright and license notice. Please **omit** @@ -75,41 +75,42 @@ MIT/Apache-2.0). All of the copyright notices should be gone by now, but if you come across one in the rust-lang/rust repo, feel free to open a PR to remove it. -## Line length +### Line length Lines should be at most 100 characters. It's even better if you can keep things to 80. -**Ignoring the line length limit.** Sometimes – in particular for -tests – it can be necessary to exempt yourself from this limit. In -that case, you can add a comment towards the top of the file like so: +Sometimes, and particularly for tests, it can be necessary to exempt yourself from this limit. +In that case, you can add a comment towards the top of the file like so: ```rust // ignore-tidy-linelength ``` -## Tabs vs spaces +### Tabs vs spaces -Prefer 4-space indent. +Prefer 4-space indents. -# Coding for correctness +## Coding for correctness Beyond formatting, there are a few other tips that are worth following. -## Prefer exhaustive matches +### Prefer exhaustive matches Using `_` in a match is convenient, but it means that when new variants are added to the enum, they may not get handled correctly. Ask yourself: if a new variant were added to this enum, what's the chance that it would want to use the `_` code, versus having some other treatment? Unless the answer is "low", then prefer an -exhaustive match. (The same advice applies to `if let` and `while -let`, which are effectively tests for a single variant.) +exhaustive match. + +The same advice applies to `if let` and `while let`, +which are effectively tests for a single variant. -## Use "TODO" comments for things you don't want to forget +### Use "TODO" comments for things you don't want to forget As a useful tool to yourself, you can insert a `// TODO` comment for something that you want to get back to before you land your PR: @@ -136,13 +137,13 @@ if foo { -# Using crates from crates.io +## Using crates from crates.io See the [crates.io dependencies][crates] section. -# How to structure your PR +## How to structure your PR How you prepare the commits in your PR can make a big difference for the reviewer. Here are some tips. @@ -172,7 +173,7 @@ require that every intermediate commit successfully builds – we only expect to be able to bisect at a PR level. However, if you *can* make individual commits build, that is always helpful. -# Naming conventions +## Naming conventions Apart from normal Rust style/naming conventions, there are also some specific to the compiler. diff --git a/src/doc/rustc-dev-guide/src/crates-io.md b/src/doc/rustc-dev-guide/src/crates-io.md index 403d61a81dada..4431585a2f02b 100644 --- a/src/doc/rustc-dev-guide/src/crates-io.md +++ b/src/doc/rustc-dev-guide/src/crates-io.md @@ -1,4 +1,4 @@ -# crates.io Dependencies +# crates.io dependencies The Rust compiler supports building with some dependencies from `crates.io`. Examples are `log` and `env_logger`. diff --git a/src/doc/rustc-dev-guide/src/diagnostics.md b/src/doc/rustc-dev-guide/src/diagnostics.md index 972309b5cd343..2f8f4b0ab8a0c 100644 --- a/src/doc/rustc-dev-guide/src/diagnostics.md +++ b/src/doc/rustc-dev-guide/src/diagnostics.md @@ -1,4 +1,4 @@ -# Errors and Lints +# Errors and lints @@ -772,7 +772,7 @@ store.register_renamed("single_use_lifetime", "single_use_lifetimes"); [`store.register_removed`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LintStore.html#method.register_removed [`rustc_lint::register_builtins`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/fn.register_builtins.html -### Lint Groups +### Lint groups Lints can be turned on in groups. These groups are declared in the [`register_builtins`][rbuiltins] function in [`rustc_lint::lib`][builtin]. The @@ -954,9 +954,6 @@ application of these fields based on a variety of attributes when using `Self="std::iter::Iterator"`. This is needed because `Self` is a keyword which cannot appear in attributes. - `direct`: user-specified rather than derived obligation. - - `from_method`: usable both as boolean (whether the flag is present, like - `crate_local`) or matching against a particular method. Currently used - for `try`. - `from_desugaring`: usable both as boolean (whether the flag is present) or matching against a particular desugaring. The desugaring is identified with its variant name in the `DesugaringKind` enum. diff --git a/src/doc/rustc-dev-guide/src/feature-gates.md b/src/doc/rustc-dev-guide/src/feature-gates.md index 24ce9bb71bfdf..9806f73c483c2 100644 --- a/src/doc/rustc-dev-guide/src/feature-gates.md +++ b/src/doc/rustc-dev-guide/src/feature-gates.md @@ -1,4 +1,4 @@ -# Feature Gates +# Feature gates This chapter is intended to provide basic help for adding, removing, and modifying feature gates. diff --git a/src/doc/rustc-dev-guide/src/git.md b/src/doc/rustc-dev-guide/src/git.md index 177495b536212..8118ddff10c92 100644 --- a/src/doc/rustc-dev-guide/src/git.md +++ b/src/doc/rustc-dev-guide/src/git.md @@ -38,13 +38,13 @@ If you've cloned your fork, then you will be able to reference it with `origin` in your local repo. It may be helpful to also set up a remote for the official rust-lang/rust repo via -```sh +```console git remote add upstream https://github.com/rust-lang/rust.git ``` if you're using HTTPS, or -```sh +```console git remote add upstream git@github.com:rust-lang/rust.git ``` @@ -112,7 +112,7 @@ See [Rebasing](#rebasing) for more about rebasing. This is not a problem from git's perspective. If you run `git remote -v`, it will say something like this: -``` +```console $ git remote -v origin git@github.com:jyn514/rust.git (fetch) origin git@github.com:jyn514/rust.git (push) @@ -158,11 +158,11 @@ To fix it, do the following things: ### I see "error: cannot rebase" when I try to rebase These are two common errors to see when rebasing: -``` +```console error: cannot rebase: Your index contains uncommitted changes. error: Please commit or stash them. ``` -``` +```console error: cannot rebase: You have unstaged changes. error: Please commit or stash them. ``` @@ -174,7 +174,7 @@ commit your changes, or make a temporary commit called a "stash" to have them st when you finish rebasing. You may want to configure git to make this "stash" automatically, which will prevent the "cannot rebase" error in nearly all cases: -``` +```console git config --global rebase.autostash true ``` @@ -205,7 +205,7 @@ git reset --hard master `git push` will not work properly and say something like this: -``` +```console ! [rejected] issue-xxxxx -> issue-xxxxx (non-fast-forward) error: failed to push some refs to 'https://github.com/username/rust.git' hint: Updates were rejected because the tip of your current branch is behind @@ -226,7 +226,7 @@ didn't write, it likely means you're trying to rebase over the wrong branch. For have a `rust-lang/rust` remote `upstream`, but ran `git rebase origin/master` instead of `git rebase upstream/master`. The fix is to abort the rebase and use the correct branch instead: -``` +```console git rebase --abort git rebase -i upstream/master ``` @@ -243,7 +243,7 @@ When updating your local repository with `git pull`, you may notice that sometim Git says you have modified some files that you have never edited. For example, running `git status` gives you something like (note the `new commits` mention): -``` +```console On branch master Your branch is up to date with 'origin/master'. @@ -256,9 +256,12 @@ Changes not staged for commit: no changes added to commit (use "git add" and/or "git commit -a") ``` -These changes are not changes to files: they are changes to submodules (more on this -[later](#git-submodules)). To get rid of those, run `./x --help`, which will automatically update -the submodules. +These changes are not changes to files: they are changes to submodules (more on this [later](#git-submodules)). +To get rid of those: + +```console +git submodule update +``` Some submodules are not actually needed; for example, `src/llvm-project` doesn't need to be checked out if you're using `download-ci-llvm`. To avoid having to keep fetching its history, you can use @@ -278,12 +281,12 @@ merged. To do that, you need to rebase your work on top of rust-lang/rust. To rebase your feature branch on top of the newest version of the master branch of rust-lang/rust, checkout your branch, and then run this command: -``` +```console git pull --rebase https://github.com/rust-lang/rust.git master ``` > If you are met with the following error: -> ``` +> ```console > error: cannot pull with rebase: Your index contains uncommitted changes. > error: please commit or stash them. > ``` @@ -300,13 +303,13 @@ reapply the changes fails because your changes conflicted with other changes that have been made. You can tell that this happened because you'll see lines in the output that look like -``` +```console CONFLICT (content): Merge conflict in file.rs ``` When you open these files, you'll see sections of the form -``` +```console <<<<<<< HEAD Original code ======= @@ -346,7 +349,7 @@ will keep it up-to-date. You will also want to rebase your feature branches up-to-date as well. After pulling, you can checkout the feature branches and rebase them: -``` +```console git checkout master git pull upstream master --ff-only # to make certain there are no merge commits git rebase master feature_branch @@ -384,7 +387,7 @@ change the order in which they are applied, or "squash" them into each other. Alternatively, you can sacrifice the commit history like this: -``` +```console # squash all the changes into one commit so you only have to worry about conflicts once git rebase -i --keep-base master # and squash all changes along the way git rebase master @@ -422,7 +425,7 @@ it shows you the differences between your old diff and your new diff. Here's an example of `git range-diff` output (taken from [Git's docs][range-diff-example-docs]): -``` +```console -: ------- > 1: 0ddba11 Prepare for the inevitable! 1: c0debee = 2: cab005e Add a helpful message at the start 2: f00dbal ! 3: decafe1 Describe a bug @@ -499,7 +502,7 @@ Git and Github's default diff view for large moves *within* a file is quite poor line as deleted and each line as added, forcing you to compare each line yourself. Git has an option to show moved lines in a different color: -``` +```console git log -p --color-moved=dimmed-zebra --color-moved-ws=allow-indentation-change ``` @@ -515,7 +518,7 @@ that was force-pushed to make sure there are no unexpected changes. Many large files in the repo are autogenerated. To view a diff that ignores changes to those files, you can use the following syntax (e.g. Cargo.lock): -``` +```console git log -p ':!Cargo.lock' ``` @@ -545,7 +548,7 @@ The contents of submodules are ignored by Git: submodules are in some sense isol from the rest of the repository. However, if you try to `cd src/llvm-project` and then run `git status`: -``` +```console HEAD detached at 9567f08afc943 nothing to commit, working tree clean ``` @@ -576,7 +579,7 @@ that Git can nicely and fairly conveniently handle for us. Sometimes you might run into (when you run `git status`) -``` +```console Changes not staged for commit: (use "git add ..." to update what will be committed) (use "git restore ..." to discard changes in working directory) @@ -586,7 +589,7 @@ Changes not staged for commit: and when you try to run `git submodule update` it breaks horribly with errors like -``` +```console error: RPC failed; curl 92 HTTP/2 stream 7 was not closed cleanly: CANCEL (err 8) error: 2782 bytes of body are still expected fetch-pack: unexpected disconnect while reading sideband packet @@ -597,8 +600,8 @@ fatal: Fetched in submodule path 'src/llvm-project', but it did not contain 5a51 If you see `(new commits, modified content)` you can run -```bash -$ git submodule foreach git reset --hard +```console +git submodule foreach git reset --hard ``` and then try `git submodule update` again. @@ -607,7 +610,7 @@ and then try `git submodule update` again. If that doesn't work, you can try to deinit all git submodules... -``` +```console git submodule deinit -f --all ``` @@ -618,7 +621,7 @@ completely messed up for some reason. Sometimes, for some forsaken reason, you might run into -```text +```console fatal: not a git repository: src/gcc/../../.git/modules/src/gcc ``` diff --git a/src/doc/rustc-dev-guide/src/guides/editions.md b/src/doc/rustc-dev-guide/src/guides/editions.md index ea207167791b7..9a92d4ebcb510 100644 --- a/src/doc/rustc-dev-guide/src/guides/editions.md +++ b/src/doc/rustc-dev-guide/src/guides/editions.md @@ -193,6 +193,23 @@ When a user runs `cargo fix --edition`, cargo will pass the `--force-warn rust-2 flag to force all of these lints to appear during the edition migration. Cargo also passes `--cap-lints=allow` so that no other lints interfere with the edition migration. +Make sure that the example code sets the correct edition. The example should illustrate the previous edition, and show what the migration warning would look like. For example, this lint for a 2024 migration shows an example in 2021: + +```rust,ignore +declare_lint! { + /// The `keyword_idents_2024` lint detects ... + /// + /// ### Example + /// + /// ```rust,edition2021 + /// #![warn(keyword_idents_2024)] + /// fn gen() {} + /// ``` + /// + /// {{produces}} +} +``` + Migration lints can be either `Allow` or `Warn` by default. If it is `Allow`, users usually won't see this warning unless they are doing an edition migration manually or there is a problem during the migration. @@ -334,3 +351,40 @@ In general it is recommended to avoid these special cases except for very high v [into-iter]: https://doc.rust-lang.org/nightly/edition-guide/rust-2021/IntoIterator-for-arrays.html [panic-macro]: https://doc.rust-lang.org/nightly/edition-guide/rust-2021/panic-macro-consistency.html [`non_fmt_panics`]: https://doc.rust-lang.org/nightly/rustc/lints/listing/warn-by-default.html#non-fmt-panics + +### Migrating the standard library edition + +Updating the edition of the standard library itself roughly involves the following process: + +- Wait until the newly stabilized edition has reached beta and the bootstrap compiler has been updated. +- Apply migration lints. This can be an involved process since some code is in external submodules[^std-submodules], and the standard library makes heavy use of conditional compilation. Also, running `cargo fix --edition` can be impractical on the standard library itself. One approach is to individually add `#![warn(...)]` at the top of each crate for each lint, run `./x check library`, apply the migrations, remove the `#![warn(...)]` and commit each migration separately. You'll likely need to run `./x check` with `--target` for many different targets to get full coverage (otherwise you'll likely spend days or weeks getting CI to pass)[^ed-docker]. See also the [advanced migration guide] for more tips. + - Apply migrations to [`backtrace-rs`]. [Example for 2024](https://github.com/rust-lang/backtrace-rs/pull/700). Note that this doesn't update the edition of the crate itself because that is published independently on crates.io, and that would otherwise restrict the minimum Rust version. Consider adding some `#![deny()]` attributes to avoid regressions until its edition gets updated. + - Apply migrations to [`stdarch`], and update its edition, and formatting. [Example for 2024](https://github.com/rust-lang/stdarch/pull/1710). + - Post PRs to update the backtrace and stdarch submodules, and wait for those to land. + - Apply migration lints to the standard library crates, and update their edition. I recommend working one crate at a time starting with `core`. [Example for 2024](https://github.com/rust-lang/rust/pull/138162). + +[^std-submodules]: This will hopefully change in the future to pull these submodules into `rust-lang/rust`. +[^ed-docker]: You'll also likely need to do a lot of testing for different targets, and this is where [docker testing](../tests/docker.md) comes in handy. + +[advanced migration guide]: https://doc.rust-lang.org/nightly/edition-guide/editions/advanced-migrations.html +[`backtrace-rs`]: https://github.com/rust-lang/backtrace-rs/ +[`stdarch`]: https://github.com/rust-lang/stdarch/ + +## Stabilizing an edition + +After the edition team has given the go-ahead, the process for stabilizing an edition is roughly: + +- Update [`LATEST_STABLE_EDITION`]. +- Update [`Edition::is_stable`]. +- Hunt and find any document that refers to edition by number, and update it: + - [`--edition` flag](https://github.com/rust-lang/rust/blob/master/src/doc/rustc/src/command-line-arguments.md#--edition-specify-the-edition-to-use) + - [Rustdoc attributes](https://github.com/rust-lang/rust/blob/master/src/doc/rustdoc/src/write-documentation/documentation-tests.md#attributes) +- Clean up any tests that use the `//@ edition` header to remove the `-Zunstable-options` flag to ensure they are indeed stable. Note: Ideally this should be automated, see [#133582]. +- Bless any tests that change. +- Update `lint-docs` to default to the new edition. + +See [example for 2024](https://github.com/rust-lang/rust/pull/133349). + +[`LATEST_STABLE_EDITION`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_span/edition/constant.LATEST_STABLE_EDITION.html +[`Edition::is_stable`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_span/edition/enum.Edition.html#method.is_stable +[#133582]: https://github.com/rust-lang/rust/issues/133582 diff --git a/src/doc/rustc-dev-guide/src/hir.md b/src/doc/rustc-dev-guide/src/hir.md index 75f5a9e204528..0c1c9941572dd 100644 --- a/src/doc/rustc-dev-guide/src/hir.md +++ b/src/doc/rustc-dev-guide/src/hir.md @@ -100,7 +100,7 @@ The HIR uses a bunch of different identifiers that coexist and serve different p a wrapper around a [`HirId`]. For more info about HIR bodies, please refer to the [HIR chapter][hir-bodies]. -These identifiers can be converted into one another through the [HIR map][map]. +These identifiers can be converted into one another through the `TyCtxt`. [`DefId`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/def_id/struct.DefId.html [`LocalDefId`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/def_id/struct.LocalDefId.html @@ -110,30 +110,24 @@ These identifiers can be converted into one another through the [HIR map][map]. [`CrateNum`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/def_id/struct.CrateNum.html [`DefIndex`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/def_id/struct.DefIndex.html [`Body`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/hir/struct.Body.html -[hir-map]: ./hir.md#the-hir-map [hir-bodies]: ./hir.md#hir-bodies -[map]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/hir/map/struct.Map.html -## The HIR Map +## HIR Operations Most of the time when you are working with the HIR, you will do so via -the **HIR Map**, accessible in the tcx via [`tcx.hir()`] (and defined in -the [`hir::map`] module). The [HIR map] contains a [number of methods] to -convert between IDs of various kinds and to lookup data associated -with a HIR node. +`TyCtxt`. It contains a number of methods, defined in the `hir::map` module and +mostly prefixed with `hir_`, to convert between IDs of various kinds and to +lookup data associated with a HIR node. -[`tcx.hir()`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyCtxt.html#method.hir -[`hir::map`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/hir/map/index.html -[HIR map]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/hir/map/struct.Map.html -[number of methods]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/hir/map/struct.Map.html#methods +[`TyCtxt`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyCtxt.html For example, if you have a [`LocalDefId`], and you would like to convert it -to a [`HirId`], you can use [`tcx.hir().local_def_id_to_hir_id(def_id)`][local_def_id_to_hir_id]. +to a [`HirId`], you can use [`tcx.local_def_id_to_hir_id(def_id)`][local_def_id_to_hir_id]. You need a `LocalDefId`, rather than a `DefId`, since only local items have HIR nodes. -[local_def_id_to_hir_id]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/hir/map/struct.Map.html#method.local_def_id_to_hir_id +[local_def_id_to_hir_id]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyCtxt.html#method.local_def_id_to_hir_id -Similarly, you can use [`tcx.hir().find(n)`][find] to lookup the node for a +Similarly, you can use [`tcx.hir_node(n)`][hir_node] to lookup the node for a [`HirId`]. This returns a `Option>`, where [`Node`] is an enum defined in the map. By matching on this, you can find out what sort of node the `HirId` referred to and also get a pointer to the data @@ -142,15 +136,16 @@ that `n` must be some HIR expression, you can do [`tcx.hir_expect_expr(n)`][expect_expr], which will extract and return the [`&hir::Expr`][Expr], panicking if `n` is not in fact an expression. -[find]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/hir/map/struct.Map.html#method.find +[hir_node]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyCtxt.html#method.hir_node [`Node`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/hir/enum.Node.html [expect_expr]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyCtxt.html#method.expect_expr [Expr]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/hir/struct.Expr.html -Finally, you can use the HIR map to find the parents of nodes, via -calls like [`tcx.hir().get_parent(n)`][get_parent]. +Finally, you can find the parents of nodes, via +calls like [`tcx.parent_hir_node(n)`][parent_hir_node]. + +[parent_hir_node]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyCtxt.html#method.parent_hir_node -[get_parent]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/hir/map/struct.Map.html#method.get_parent ## HIR Bodies @@ -158,10 +153,10 @@ A [`rustc_hir::Body`] represents some kind of executable code, such as the body of a function/closure or the definition of a constant. Bodies are associated with an **owner**, which is typically some kind of item (e.g. an `fn()` or `const`), but could also be a closure expression -(e.g. `|x, y| x + y`). You can use the HIR map to find the body -associated with a given def-id ([`maybe_body_owned_by`]) or to find -the owner of a body ([`body_owner_def_id`]). +(e.g. `|x, y| x + y`). You can use the `TyCtxt` to find the body +associated with a given def-id ([`hir_maybe_body_owned_by`]) or to find +the owner of a body ([`hir_body_owner_def_id`]). [`rustc_hir::Body`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/hir/struct.Body.html -[`maybe_body_owned_by`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/hir/map/struct.Map.html#method.maybe_body_owned_by -[`body_owner_def_id`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/hir/map/struct.Map.html#method.body_owner_def_id +[`hir_maybe_body_owned_by`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyCtxt.html#method.hir_maybe_body_owned_by +[`hir_body_owner_def_id`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyCtxt.html#method.hir_body_owner_def_id diff --git a/src/doc/rustc-dev-guide/src/incrcomp-debugging.md b/src/doc/rustc-dev-guide/src/incrcomp-debugging.md index 7045d3fa39d32..a548215cf0cee 100644 --- a/src/doc/rustc-dev-guide/src/incrcomp-debugging.md +++ b/src/doc/rustc-dev-guide/src/incrcomp-debugging.md @@ -1,4 +1,4 @@ -# Debugging and Testing Dependencies +# Debugging and testing dependencies ## Testing the dependency graph diff --git a/src/doc/rustc-dev-guide/src/memory.md b/src/doc/rustc-dev-guide/src/memory.md index 1e030ff45a789..eeb4a813980a6 100644 --- a/src/doc/rustc-dev-guide/src/memory.md +++ b/src/doc/rustc-dev-guide/src/memory.md @@ -1,4 +1,4 @@ -# Memory Management in Rustc +# Memory management in rustc Generally rustc tries to be pretty careful how it manages memory. The compiler allocates _a lot_ of data structures throughout compilation, diff --git a/src/doc/rustc-dev-guide/src/mir/dataflow.md b/src/doc/rustc-dev-guide/src/mir/dataflow.md index f31da5ca22ee3..85e57dd839b81 100644 --- a/src/doc/rustc-dev-guide/src/mir/dataflow.md +++ b/src/doc/rustc-dev-guide/src/mir/dataflow.md @@ -148,8 +148,7 @@ whereas this code uses [`ResultsCursor`]: ```rust,ignore let mut results = MyAnalysis::new() - .into_engine(tcx, body, def_id) - .iterate_to_fixpoint() + .iterate_to_fixpoint(tcx, body, None); .into_results_cursor(body); // Inspect the fixpoint state immediately before each `Drop` terminator. diff --git a/src/doc/rustc-dev-guide/src/name-resolution.md b/src/doc/rustc-dev-guide/src/name-resolution.md index 2727b8142f2a8..719ebce855366 100644 --- a/src/doc/rustc-dev-guide/src/name-resolution.md +++ b/src/doc/rustc-dev-guide/src/name-resolution.md @@ -120,9 +120,9 @@ even though they should be visible by ordinary scoping rules. An example: fn do_something(val: T) { // <- New rib in both types and values (1) // `val` is accessible, as is the helper function // `T` is accessible - let helper = || { // New rib on `helper` (2) and another on the block (3) + let helper = || { // New rib on the block (2) // `val` is accessible here - }; // End of (3) + }; // End of (2), new rib on `helper` (3) // `val` is accessible, `helper` variable shadows `helper` function fn helper() { // <- New rib in both types and values (4) // `val` is not accessible here, (4) is not transparent for locals @@ -130,7 +130,7 @@ fn do_something(val: T) { // <- New rib in both types and values (1) } // End of (4) let val = T::default(); // New rib (5) // `val` is the variable, not the parameter here -} // End of (5), (2) and (1) +} // End of (5), (3) and (1) ``` Because the rules for different namespaces are a bit different, each namespace diff --git a/src/doc/rustc-dev-guide/src/normalization.md b/src/doc/rustc-dev-guide/src/normalization.md new file mode 100644 index 0000000000000..ef530ccc5ed95 --- /dev/null +++ b/src/doc/rustc-dev-guide/src/normalization.md @@ -0,0 +1,309 @@ +# Aliases and Normalization + + + +## Aliases + +In Rust there are a number of types that are considered equal to some "underlying" type, for example inherent associated types, trait associated types, free type aliases (`type Foo = u32`), and opaque types (`-> impl RPIT`). We consider such types to be "aliases", alias types are represented by the [`TyKind::Alias`][tykind_alias] variant, with the kind of alias tracked by the [`AliasTyKind`][aliaskind] enum. + +Normalization is the process of taking these alias types and replacing them with the underlying type that they are equal to. For example given some type alias `type Foo = u32`, normalizing `Foo` would give `u32`. + +The concept of an alias is not unique to *types* and the concept also applies to constants/const generics. However, right now in the compiler we don't really treat const aliases as a "first class concept" so this chapter mostly discusses things in the context of types (even though the concepts transfer just fine). + +[tykind_alias]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_type_ir/enum.TyKind.html#variant.Alias +[aliaskind]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_type_ir/enum.AliasTyKind.html + +### Rigid, Ambiguous and Unnormalized Aliases + +Aliases can either be "rigid", "ambiguous", or simply unnormalized. + +We consider types to be rigid if their "shape" isn't going to change, for example `Box` is rigid as no amount of normalization can turn a `Box` into a `u32`, whereas ` as Iterator>::Item` is not rigid as it can be normalized to `u32`. + +Aliases are rigid when we will never be able to normalize them further. A concrete example of a *rigid* alias would be `::Item` in an environment where there is no `T: Iterator` bound, only a `T: Iterator` bound: +```rust +fn foo() { + // This alias is *rigid* + let _: ::Item; +} + +fn bar>() { + // This alias is *not* rigid as it can be normalized to `u32` + let _: ::Item; +} +``` + +When an alias can't yet be normalized but may wind up normalizable in the [current environment](./typing_parameter_envs.md), we consider it to be an "ambiguous" alias. This can occur when an alias contains inference variables which prevent being able to determine how the trait is implemented: +```rust +fn foo() { + // This alias is considered to be "ambiguous" + let _: <_ as Iterator>::Item; +} +``` + +The reason we call them "ambiguous" aliases is because its *ambiguous* whether this is a rigid alias or not. + +The source of the `_: Iterator` trait impl is *ambiguous* (i.e. unknown), it could be some `impl Iterator for u32` or it could be some `T: Iterator` trait bound, we don't know yet. Depending on why `_: Iterator` holds the alias could be an unnormalized alias or it could be a rigid alias; it's *ambiguous* what kind of alias this is. + +Finally, an alias can just be unnormalized, ` as IntoIterator>::Iter` is an unnormalized alias as it can already be normalized to `std::vec::IntoIter`, it just hasn't been done yet. + +--- + +It is worth noting that Free and Inherent aliases cannot be rigid or ambiguous as naming them also implies having resolved the definition of the alias, which specifies the underlying type of the alias. + +### Diverging Aliases + +An alias is considered to "diverge" if its definition does not specify an underlying non-alias type to normalize to. A concrete example of diverging aliases: +```rust +type Diverges = Diverges; + +trait Trait { + type DivergingAssoc; +} +impl Trait for () { + type DivergingAssoc = <() as Trait>::DivergingAssoc; +} +``` +In this example both `Diverges` and `DivergingAssoc` are "trivial" cases of diverging type aliases where they have been defined as being equal to themselves. There is no underlying type that `Diverges` can ever be normalized to. + +We generally try to error when diverging aliases are defined, but this is entirely a "best effort" check. In the previous example the definitions are "simple enough" to be detected and so errors are emitted. However, in more complex cases, or cases where only some instantiations of generic parameters would result in a diverging alias, we don't emit an error: +```rust +trait Trait { + type DivergingAssoc; +} +impl Trait for T { + // This alias always diverges but we don't emit an error because + // the compiler can't "see" that. + type DivergingAssoc = ::DivergingAssoc; +} +``` + +Ultimately this means that we have no guarantee that aliases in the type system are non-diverging. As aliases may only diverge for some specific generic arguments, it also means that we only know whether an alias diverges once it is fully concrete. This means that codegen/const-evaluation also has to handle diverging aliases: +```rust +trait Trait { + type Diverges; +} +impl Trait for T { + type Diverges = ::Diverges; +} + +fn foo() { + let a: T::Diverges; +} + +fn main() { + foo::<()>(); +} +``` +In this example we only encounter an error from the diverging alias during codegen of `foo::<()>`, if the call to `foo` is removed then no compilation error will be emitted. + +### Opaque Types + +Opaque types are a relatively special kind of alias, and are covered in their own chapter: [Opaque types](./opaque-types-type-alias-impl-trait.md). + +### Const Aliases + +Unlike type aliases, const aliases are not represented directly in the type system, instead const aliases are always an anonymous body containing a path expression to a const item. This means that the only "const alias" in the type system is an anonymous unevaluated const body. + +As such there is no `ConstKind::Alias(AliasCtKind::Projection/Inherent/Free, _)`, instead we only have `ConstKind::Unevaluated` which is used for representing anonymous constants. + +```rust +fn foo() {} + +const FREE_CONST: usize = 1 + 1; + +fn bar() { + foo::<{ FREE_CONST }>(); + // The const arg is represented with some anonymous constant: + // ```pseudo-rust + // const ANON: usize = FREE_CONST; + // foo::(); + // ``` +} +``` + +This is likely to change as const generics functionality is improved, for example `feature(associated_const_equality)` and `feature(min_generic_const_args)` both require handling const aliases similarly to types (without an anonymous constant wrapping all const args). + +## What is Normalization + +### Structural vs Deep normalization + +There are two forms of normalization, structural (sometimes called *shallow*) and deep. Structural normalization should be thought of as only normalizing the "outermost" part of a type. On the other hand deep normalization will normalize *all* aliases in a type. + +In practice structural normalization can result in more than just the outer layer of the type being normalized, but this behaviour should not be relied upon. Unnormalizable non-rigid aliases making use of bound variables (`for<'a>`) cannot be normalized by either kind of normalization. + +As an example: conceptually, structurally normalizing the type `Vec<::Assoc>` would be a no-op, whereas deeply normalizing would give `Vec`. In practice even structural normalization would give `Vec`, though, again, this should not be relied upon. + +Changing the alias to use bound variables will result in different behaviour; `Vec fn(<&'a u8 as Identity>::Assoc)>` would result in no change when structurally normalized, but would result in `Vec fn(&'a u8)>` when deeply normalized. + +### Core normalization logic + +Structurally normalizing aliases is a little bit more nuanced than replacing the alias with whatever it is defined as being equal to in its definition; the result of normalizing an alias should either be a rigid type or an inference variable (which will later be inferred to a rigid type). To accomplish this we do two things: + +First, when normalizing an ambiguous alias it is normalized to an inference variable instead of leaving it as-is, this has two main effects: +- Even though an inference variable is not a rigid type, it will always wind up inferred *to* a rigid type so we ensure that the result of normalization will not need to be normalized again +- Inference variables are used in all cases where a type is non-rigid, allowing the rest of the compiler to not have to deal with *both* ambiguous aliases *and* inference variables + +Secondly, instead of having normalization directly return the type specified in the definition of the alias, we normalize the type first before returning it[^1]. We do this so that normalization is idempotent/callers do not need to run it in a loop. + +```rust +#![feature(lazy_type_alias)] + +type Foo = Bar; +type Bar = ::Item; + +fn foo() { + let a_: Foo<_>; +} +``` + +In this example: +- Normalizing `Foo` would result in `Bar`, except we want to normalize aliases in the type `Foo` is defined as equal to +- Normalizing `Bar` would result in `::Item`, except, again, we want to normalize aliases in the type `Bar` is defined as equal to +- Normalizing `::Item` results in some new inference variable `?y`, as `::Item` is an ambiguous alias +- The final result is that normalizing `Foo` results in `?y` + +## How to normalize + +When interfacing with the type system it will often be the case that it's necessary to request a type be normalized. There are a number of different entry points to the underlying normalization logic and each entry point should only be used in specific parts of the compiler. + +An additional complication is that the compiler is currently undergoing a transition from the old trait solver to the new trait solver. As part of this transition our approach to normalization in the compiler has changed somewhat significantly, resulting in some normalization entry points being "old solver only" slated for removal in the long-term once the new solver has stabilized. + +Here is a rough overview of the different entry points to normalization in the compiler: +- `infcx.at.structurally_normalize` +- `infcx.at.(deeply_)?normalize` +- `infcx.query_normalize` +- `tcx.normalize_erasing_regions` +- `traits::normalize_with_depth(_to)` +- `EvalCtxt::structurally_normalize` + +### Outside of the trait solver + +The [`InferCtxt`][infcx] type exposes the "main" ways to normalize during analysis: [`normalize`][normalize], [`deeply_normalize`][deeply_normalize] and [`structurally_normalize`][structurally_normalize]. These functions are often wrapped and re-exposed on various `InferCtxt` wrapper types, such as [`FnCtxt`][fcx] or [`ObligationCtxt`][ocx] with minor API tweaks to handle some arguments or parts of the return type automatically. + +#### Structural `InferCtxt` normalization + +[`infcx.at.structurally_normalize`][structurally_normalize] exposes structural normalization that is able to handle inference variables and regions. It should generally be used whenever inspecting the kind of a type. + +Inside of HIR Typeck there is a related method of normalization- [`fcx.structurally_resolve`][structurally_resolve], which will error if the type being resolved is an unresolved inference variable. When the new solver is enabled it will also attempt to structurally normalize the type. + +Due to this there is a pattern in HIR typeck where a type is first normalized via `normalize` (only normalizing in the old solver), and then `structurally_resolve`'d (only normalizing in the new solver). This pattern should be preferred over calling `structurally_normalize` during HIR typeck as `structurally_resolve` will attempt to make inference progress by evaluating goals whereas `structurally_normalize` does not. + +#### Deep `InferCtxt` normalization + +##### `infcx.at.(deeply_)?normalize` + +There are two ways to deeply normalize with an `InferCtxt`, `normalize` and `deeply_normalize`. The reason for this is that `normalize` is a "legacy" normalization entry point used only by the old solver, whereas `deeply_normalize` is intended to be the long term way to deeply normalize. Both of these methods can handle regions. + +When the new solver is stabilized the `infcx.at.normalize` function will be removed and everything will have been migrated to the new deep or structural normalization methods. For this reason the `normalize` function is a no-op under the new solver, making it suitable only when the old solver needs normalization but the new solver does not. + +Using `deeply_normalize` will result in errors being emitted when encountering ambiguous aliases[^2] as it is not possible to support normalizing *all* ambiguous aliases to inference variables[^3]. `deeply_normalize` should generally only be used in cases where we do not expect to encounter ambiguous aliases, for example when working with types from item signatures. + +##### `infcx.query_normalize` + +[`infcx.query_normalize`][query_norm] is very rarely used, it has almost all the same restrictions as `normalize_erasing_regions` (cannot handle inference variables, no diagnostics support) with the main difference being that it retains lifetime information. For this reason `normalize_erasing_regions` is the better choice in almost all circumstances as it is more efficient due to caching lifetime-erased queries. + +In practice `query_normalize` is used for normalization in the borrow checker, and elsewhere as a performance optimization over `infcx.normalize`. Once the new solver is stabilized it is expected that `query_normalize` can be removed from the compiler as the new solvers normalization implementation should be performant enough for it to not be a performance regression. + +##### `tcx.normalize_erasing_regions` + +[`normalize_erasing_regions`][norm_erasing_regions] is generally used by parts of the compiler that are not doing type system analysis. This normalization entry point does not handle inference variables, lifetimes, or any diagnostics. Lints and codegen make heavy use of this entry point as they typically are working with fully inferred aliases that can be assumed to be well formed (or at least, are not responsible for erroring on). + +[query_norm]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/infer/at/struct.At.html#method.query_normalize +[norm_erasing_regions]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyCtxt.html#method.normalize_erasing_regions +[normalize]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/infer/at/struct.At.html#method.normalize +[deeply_normalize]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/normalize/trait.NormalizeExt.html#tymethod.deeply_normalize +[structurally_normalize]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/trait.StructurallyNormalizeExt.html#tymethod.structurally_normalize_ty +[infcx]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/infer/struct.InferCtxt.html +[fcx]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_typeck/fn_ctxt/struct.FnCtxt.html +[ocx]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/struct.ObligationCtxt.html +[structurally_resolve]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_typeck/fn_ctxt/struct.FnCtxt.html#method.structurally_resolve_type + +### Inside of the trait solver + +[`traits::normalize_with_depth(_to)`][norm_with_depth] and [`EvalCtxt::structurally_normalize`][eval_ctxt_structural_norm] are only used by the internals of the trait solvers (old and new respectively). It is effectively a raw entry point to the internals of how normalization is implemented by each trait solver. Other normalization entry points cannot be used from within the internals of trait solving as it wouldn't handle goal cycles and recursion depth correctly. + +[norm_with_depth]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/normalize/fn.normalize_with_depth.html +[eval_ctxt_structural_norm]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_next_trait_solver/solve/struct.EvalCtxt.html#method.structurally_normalize_term + +## When/Where to normalize (Old vs New solver) + +One of the big changes between the old and new solver is our approach to when we expect aliases to be normalized. + +### Old solver + +All types are expected to be normalized as soon as possible, so that all types encountered in the type system are either rigid or an inference variable (which will later be inferred to a rigid term). + +As a concrete example: equality of aliases is implemented by assuming they're rigid and recursively equating the generic arguments of the alias. + +### New solver + +It's expected that all types potentially contain ambiguous or unnormalized aliases. Whenever an operation is performed that requires aliases to be normalized, it's the responsibility of that logic to normalize the alias (this means that matching on `ty.kind()` pretty much always has to structurally normalize first). + +As a concrete example: equality of aliases is implemented by a custom goal kind ([`PredicateKind::AliasRelate`][aliasrelate]) so that it can handle normalization of the aliases itself instead of assuming all alias types being equated are rigid. + +Despite this approach we still deeply normalize during [writeback][writeback] for performance/simplicity, so that types in the MIR can still be assumed to have been deeply normalized. + +[aliasrelate]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/type.PredicateKind.html#variant.AliasRelate +[writeback]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_typeck/writeback/index.html + +--- + +There were a few main issues with the old solver's approach to normalization that motivated changing things in the new solver: + +### Missing normalization calls + +It was a frequent occurrence that normalization calls would be missing, resulting in passing unnormalized types to APIs expecting everything to already be normalized. Treating ambiguous or unnormalized aliases as rigid would result in all sorts of weird errors from aliases not being considered equal to one another, or surprising inference guidance from equating unnormalized aliases' generic arguments. + +### Normalizing parameter environments + +Another problem was that it was not possible to normalize `ParamEnv`s correctly in the old solver as normalization itself would expect a normalized `ParamEnv` in order to give correct results. See the chapter on `ParamEnv`s for more information: [`Typing/ParamEnv`s: Normalizing all bounds](./typing_parameter_envs.md#normalizing-all-bounds) + +### Unnormalizable non-rigid aliases in higher ranked types + +Given a type such as `for<'a> fn(::Assoc>)`, it is not possible to correctly handle this with the old solver's approach to normalization. + +If we were to normalize it to `for<'a> fn(?y)` and register a goal to normalize `for<'a> >::Assoc -> ?y`, this would result in errors in cases where `>::Assoc` normalized to `&'a u32`. The inference variable `?y` would be in a lower [universe][universes] than the placeholders made when instantiating the `for<'a>` binder. + +Leaving the alias unnormalized would also be wrong as the old solver expects all aliases to be rigid. This was a soundness bug before the new solver was stabilized in coherence: [relating projection substs is unsound during coherence](https://github.com/rust-lang/rust/issues/102048). + +Ultimately this means that it is not always possible to ensure all aliases inside of a value are rigid. + +[universes]: https://rustc-dev-guide.rust-lang.org/borrow_check/region_inference/placeholders_and_universes.html#what-is-a-universe +[deeply_normalize]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/normalize/trait.NormalizeExt.html#tymethod.deeply_normalize + +## Handling uses of diverging aliases + +Diverging aliases, like ambiguous aliases, are normalized to inference variables. As normalizing diverging aliases results in trait solver cycles, it always results in an error in the old solver. In the new solver it only results in an error if we wind up requiring all goals to hold in the current context. E.g. normalizing diverging aliases during HIR typeck will result in an error in both solvers. + +Alias well formedness doesn't require that the alias doesn't diverge[^4], this means that checking an alias is well formed isn't sufficient to cause an error to be emitted for diverging aliases; actually attempting to normalize the alias is required. + +Erroring on diverging aliases being a side effect of normalization means that it is very *arbitrary* whether we actually emit an error, it also differs between the old and new solver as we now normalize in less places. + +An example of the ad-hoc nature of erroring on diverging aliases causing "problems": +```rust +trait Trait { + type Diverges; +} + +impl Trait for T { + type Diverges = D::Diverges; +} + +struct Bar::Diverges>(Box); +``` + +In this example a diverging alias is used but we happen to not emit an error as we never explicitly normalize the defaults of generic parameters. If the `?Sized` opt out is removed then an error is emitted because we wind up happening to normalize a `::Diverges: Sized` goal which as a side effect results in erroring about the diverging alias. + +Const aliases differ from type aliases a bit here; well formedness of const aliases requires that they can be successfully evaluated (via [`ConstEvaluatable`][const_evaluatable] goals). This means that simply checking well formedness of const arguments is sufficient to error if they would fail to evaluate. It is somewhat unclear whether it would make sense to adopt this for type aliases too or if const aliases should stop requiring this for well formedness[^5]. + +[^1]: In the new solver this is done implicitly + +[^2]: There is a subtle difference in how ambiguous aliases in binders are handled between old and new solver. In the old solver we fail to error on some ambiguous aliases inside of higher ranked types whereas the new solver correctly errors. + +[^3]: Ambiguous aliases inside of binders cannot be normalized to inference variables, this will be covered more later. + +[^4]: As checking aliases are non-diverging cannot be done until they are fully concrete, this would either imply that we cant check aliases are well formed before codegen/const-evaluation or that aliases would go from being well-formed to not well-formed after monomorphization. + +[^5]: Const aliases certainly wouldn't be *less* sound than type aliases if we stopped doing this + +[const_evaluatable]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/type.ClauseKind.html#variant.ConstEvaluatable \ No newline at end of file diff --git a/src/doc/rustc-dev-guide/src/notification-groups/fuchsia.md b/src/doc/rustc-dev-guide/src/notification-groups/fuchsia.md new file mode 100644 index 0000000000000..e3c1a7148d3c7 --- /dev/null +++ b/src/doc/rustc-dev-guide/src/notification-groups/fuchsia.md @@ -0,0 +1,12 @@ +# Fuchsia notification group + +**Github Label:** [O-fuchsia]
    +**Ping command:** `@rustbot ping fuchsia` + +[O-fuchsia]: https://github.com/rust-lang/rust/labels/O-fuchsia + +This list will be used to notify [Fuchsia][fuchsia] maintainers +when the compiler or the standard library changes in a way that would +break the Fuchsia integration. + +[fuchsia]: ../tests/ecosystem-test-jobs/fuchsia.md diff --git a/src/doc/rustc-dev-guide/src/panic-implementation.md b/src/doc/rustc-dev-guide/src/panic-implementation.md index f358742866719..468190ffccd50 100644 --- a/src/doc/rustc-dev-guide/src/panic-implementation.md +++ b/src/doc/rustc-dev-guide/src/panic-implementation.md @@ -1,4 +1,4 @@ -# Panicking in rust +# Panicking in Rust diff --git a/src/doc/rustc-dev-guide/src/parallel-rustc.md b/src/doc/rustc-dev-guide/src/parallel-rustc.md index 690fb19c9f524..ce69b66c2daf5 100644 --- a/src/doc/rustc-dev-guide/src/parallel-rustc.md +++ b/src/doc/rustc-dev-guide/src/parallel-rustc.md @@ -1,4 +1,4 @@ -# Parallel Compilation +# Parallel compilation
    As of November 2024, @@ -28,7 +28,7 @@ The following sections are kept for now but are quite outdated. [codegen]: backend/codegen.md -## Code Generation +## Code generation During monomorphization the compiler splits up all the code to be generated into smaller chunks called _codegen units_. These are then generated by @@ -38,7 +38,7 @@ occurs in the [`rustc_codegen_ssa::base`] module. [`rustc_codegen_ssa::base`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_codegen_ssa/base/index.html -## Data Structures +## Data structures The underlying thread-safe data-structures used in the parallel compiler can be found in the [`rustc_data_structures::sync`] module. These data structures @@ -83,7 +83,7 @@ can be accessed directly through `Deref::deref`. [`rustc_data_structures::sync::worker_local`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_data_structures/sync/worker_local/index.html [`WorkerLocal`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_data_structures/sync/worker_local/struct.WorkerLocal.html -## Parallel Iterator +## Parallel iterator The parallel iterators provided by the [`rayon`] crate are easy ways to implement parallelism. In the current implementation of the parallel compiler @@ -124,7 +124,7 @@ the parallel iterator function has been used are as follows: There are still many loops that have the potential to use parallel iterators. -## Query System +## Query system The query model has some properties that make it actually feasible to evaluate multiple queries in parallel without too much effort: diff --git a/src/doc/rustc-dev-guide/src/param_env/param_env_acquisition.md b/src/doc/rustc-dev-guide/src/param_env/param_env_acquisition.md deleted file mode 100644 index f6cff2d6c63cc..0000000000000 --- a/src/doc/rustc-dev-guide/src/param_env/param_env_acquisition.md +++ /dev/null @@ -1,43 +0,0 @@ - -# Which `ParamEnv` do I use? - -When needing a [`ParamEnv`][pe] in the compiler there are a few options for obtaining one: -- The correct env is already in scope simply use it (or pass it down the call stack to where you are). -- The [`tcx.param_env(def_id)` query][param_env_query] -- Use [`ParamEnv::new`][param_env_new] to construct an env with an arbitrary set of where clauses. Then call [`traits::normalize_param_env_or_error`][normalize_env_or_error] which will handle normalizing and elaborating all the where clauses in the env for you. -- Creating an empty environment via [`ParamEnv::reveal_all`][env_reveal_all] or [`ParamEnv::empty`][env_empty] - -In the large majority of cases a `ParamEnv` when required already exists somewhere in scope or above in the call stack and should be passed down. A non exhaustive list of places where you might find an existing `ParamEnv`: -- During typeck `FnCtxt` has a [`param_env` field][fnctxt_param_env] -- When writing late lints the `LateContext` has a [`param_env` field][latectxt_param_env] -- During well formedness checking the `WfCheckingCtxt` has a [`param_env` field][wfckctxt_param_env] -- The `TypeChecker` used by Mir Typeck has a [`param_env` field][mirtypeck_param_env] -- In the next-gen trait solver all `Goal`s have a [`param_env` field][goal_param_env] specifying what environment to prove the goal in -- When editing an existing [`TypeRelation`][typerelation] if it implements `PredicateEmittingRelation` then a [`param_env` method][typerelation_param_env] will be available. - -Using the `param_env` query to obtain an env is generally done at the start of some kind of analysis and then passed everywhere that a `ParamEnv` is required. For example the type checker will create a `ParamEnv` for the item it is type checking and then pass it around everywhere. - -Creating an env from an arbitrary set of where clauses is usually unnecessary and should only be done if the environment you need does not correspond to an actual item in the source code (i.e. [`compare_method_predicate_entailment`][method_pred_entailment] as mentioned earlier). - -Creating an empty environment via `ParamEnv::empty` is almost always wrong. There are very few places where we actually know that the environment should be empty. One of the only places where we do actually know this is after monomorphization, however the `ParamEnv` there should be constructed via `ParamEnv::reveal_all` instead as at this point we should be able to determine the hidden type of opaque types. Codegen/Post-mono is one of the only places that should be using `ParamEnv::reveal_all`. - -An additional piece of complexity here is specifying the `Reveal` (see linked docs for explanation of what reveal does) used for the `ParamEnv`. When constructing a param env using the `param_env` query it will have `Reveal::UserFacing`, if `Reveal::All` is desired then the [`tcx.param_env_reveal_all_normalized`][env_reveal_all_normalized] query can be used instead. - -The `ParamEnv` type has a method [`ParamEnv::with_reveal_all_normalized`][with_reveal_all] which converts an existing `ParamEnv` into one with `Reveal::All` specified. Where possible the previously mentioned query should be preferred as it is more efficient. - -[param_env_new]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html#method.new -[normalize_env_or_error]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/fn.normalize_param_env_or_error.html -[fnctxt_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_typeck/fn_ctxt/struct.FnCtxt.html#structfield.param_env -[latectxt_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/context/struct.LateContext.html#structfield.param_env -[wfckctxt_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_analysis/check/wfcheck/struct.WfCheckingCtxt.html#structfield.param_env -[goal_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_infer/infer/canonical/ir/solve/struct.Goal.html#structfield.param_env -[typerelation_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_infer/infer/trait.PredicateEmittingRelation.html#tymethod.param_env -[typerelation]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/relate/trait.TypeRelation.html -[mirtypeck_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_borrowck/type_check/struct.TypeChecker.html#structfield.param_env -[env_reveal_all_normalized]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TyCtxt.html#method.param_env_reveal_all_normalized -[with_reveal_all]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html#method.with_reveal_all_normalized -[env_reveal_all]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html#method.reveal_all -[env_empty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html#method.empty -[pe]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html -[param_env_query]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_typeck/fn_ctxt/struct.FnCtxt.html#structfield.param_env -[method_pred_entailment]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_analysis/check/compare_impl_item/fn.compare_method_predicate_entailment.html diff --git a/src/doc/rustc-dev-guide/src/param_env/param_env_construction_internals.md b/src/doc/rustc-dev-guide/src/param_env/param_env_construction_internals.md deleted file mode 100644 index 69a262a176e09..0000000000000 --- a/src/doc/rustc-dev-guide/src/param_env/param_env_construction_internals.md +++ /dev/null @@ -1,83 +0,0 @@ - -# How are `ParamEnv`'s constructed internally? - -Creating a [`ParamEnv`][pe] is more complicated than simply using the list of where clauses defined on an item as written by the user. We need to both elaborate supertraits into the env and fully normalize all aliases. This logic is handled by [`traits::normalize_param_env_or_error`][normalize_env_or_error] (even though it does not mention anything about elaboration). - -## Elaborating supertraits - -When we have a function such as `fn foo()` we would like to be able to prove `T: Clone` inside of the function as the `Copy` trait has a `Clone` supertrait. Constructing a `ParamEnv` looks at all of the trait bounds in the env and explicitly adds new where clauses to the `ParamEnv` for any supertraits found on the traits. - -A concrete example would be the following function: -```rust -trait Trait: SuperTrait {} -trait SuperTrait: SuperSuperTrait {} - -// `bar`'s unelaborated `ParamEnv` would be: -// `[T: Sized, T: Copy, T: Trait]` -fn bar(a: T) { - requires_impl(a); -} - -fn requires_impl(a: T) {} -``` - -If we did not elaborate the env then the `requires_impl` call would fail to typecheck as we would not be able to prove `T: Clone` or `T: SuperSuperTrait`. In practice we elaborate the env which means that `bar`'s `ParamEnv` is actually: -`[T: Sized, T: Copy, T: Clone, T: Trait, T: SuperTrait, T: SuperSuperTrait]` -This allows us to prove `T: Clone` and `T: SuperSuperTrait` when type checking `bar`. - -The `Clone` trait has a `Sized` supertrait however we do not end up with two `T: Sized` bounds in the env (one for the supertrait and one for the implicitly added `T: Sized` bound). This is because the elaboration process (implemented via [`util::elaborate`][elaborate]) deduplicates the where clauses to avoid this. - -As a side effect this also means that even if no actual elaboration of supertraits takes place, the existing where clauses in the env are _also_ deduplicated. See the following example: -```rust -trait Trait {} -// The unelaborated `ParamEnv` would be: -// `[T: Sized, T: Trait, T: Trait]` -// but after elaboration it would be: -// `[T: Sized, T: Trait]` -fn foo() {} -``` - -The [next-gen trait solver][next-gen-solver] also requires this elaboration to take place. - -[elaborate]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_infer/traits/util/fn.elaborate.html -[next-gen-solver]: ../solve/trait-solving.md - -## Normalizing all bounds - -In the old trait solver the where clauses stored in `ParamEnv` are required to be fully normalized or else the trait solver will not function correctly. A concrete example of needing to normalize the `ParamEnv` is the following: -```rust -trait Trait { - type Assoc; -} - -trait Other { - type Bar; -} - -impl Other for T { - type Bar = u32; -} - -// `foo`'s unnormalized `ParamEnv` would be: -// `[T: Sized, U: Sized, U: Trait]` -fn foo(a: U) -where - U: Trait<::Bar>, -{ - requires_impl(a); -} - -fn requires_impl>(_: U) {} -``` - -As humans we can tell that `::Bar` is equal to `u32` so the trait bound on `U` is equivalent to `U: Trait`. In practice trying to prove `U: Trait` in the old solver in this environment would fail as it is unable to determine that `::Bar` is equal to `u32`. - -To work around this we normalize `ParamEnv`'s after constructing them so that `foo`'s `ParamEnv` is actually: `[T: Sized, U: Sized, U: Trait]` which means the trait solver is now able to use the `U: Trait` in the `ParamEnv` to determine that the trait bound `U: Trait` holds. - -This workaround does not work in all cases as normalizing associated types requires a `ParamEnv` which introduces a bootstrapping problem. We need a normalized `ParamEnv` in order for normalization to give correct results, but we need to normalize to get that `ParamEnv`. Currently we normalize the `ParamEnv` once using the unnormalized param env and it tends to give okay results in practice even though there are some examples where this breaks ([example]). - -In the next-gen trait solver the requirement for all where clauses in the `ParamEnv` to be fully normalized is not present and so we do not normalize when constructing `ParamEnv`s. - -[example]: https://play.rust-lang.org/?version=stable&mode=debug&edition=2021&gist=e6933265ea3e84eaa47019465739992c -[pe]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html -[normalize_env_or_error]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/fn.normalize_param_env_or_error.html diff --git a/src/doc/rustc-dev-guide/src/param_env/param_env_summary.md b/src/doc/rustc-dev-guide/src/param_env/param_env_summary.md deleted file mode 100644 index 0ff6d8fc394e2..0000000000000 --- a/src/doc/rustc-dev-guide/src/param_env/param_env_summary.md +++ /dev/null @@ -1,18 +0,0 @@ -# The `ParamEnv` type - -## Summary - -The [`ParamEnv`][pe] is used to store information about the environment that we are interacting with the type system from. For example the set of in-scope where-clauses is stored in `ParamEnv` as it differs between each item whereas the list of user written impls is not stored in the `ParamEnv` as this does not change for each item. - -This chapter of the dev guide covers: -- A high level summary of what a `ParamEnv` is and what it is used for -- Technical details about what the process of constructing a `ParamEnv` involves -- Guidance about how to acquire a `ParamEnv` when one is required - -## Bundling - -A useful API on `ParamEnv` is the [`and`][and] method which allows bundling a value with the `ParamEnv`. The `and` method produces a [`ParamEnvAnd`][pea] making it clearer that using the inner value is intended to be done in that specific environment. - -[and]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html#method.and -[pe]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html -[pea]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnvAnd.html \ No newline at end of file diff --git a/src/doc/rustc-dev-guide/src/param_env/param_env_what_is_it.md b/src/doc/rustc-dev-guide/src/param_env/param_env_what_is_it.md deleted file mode 100644 index 5c2f4d594052e..0000000000000 --- a/src/doc/rustc-dev-guide/src/param_env/param_env_what_is_it.md +++ /dev/null @@ -1,59 +0,0 @@ - -# What is a `ParamEnv`? - -The type system relies on information in the environment in order for it to function correctly. This information is stored in the [`ParamEnv`][pe] type and it is important to use the correct `ParamEnv` when interacting with the type system. - -The information represented by `ParamEnv` is a list of in-scope where-clauses, and a `Reveal` (see linked docs for more information). A `ParamEnv` typically corresponds to a specific item's where clauses, some clauses are not explicitly written bounds and instead are implicitly added in [`predicates_of`][predicates_of] such as `ConstArgHasType` or some implied bounds. - -A `ParamEnv` can also be created with arbitrary data that is not derived from a specific item such as in [`compare_method_predicate_entailment`][method_pred_entailment] which creates a hybrid `ParamEnv` consisting of the impl's where clauses and the trait definition's function's where clauses. In most cases `ParamEnv`s are initially created via the [`param_env` query][query] which returns a `ParamEnv` derived from the provided item's where clauses. - -If we have a function such as: -```rust -// `foo` would have a `ParamEnv` of: -// `[T: Sized, T: Trait, ::Assoc: Clone]` -fn foo() -where - ::Assoc: Clone, -{} -``` -If we were conceptually inside of `foo` (for example, type-checking or linting it) we would use this `ParamEnv` everywhere that we interact with the type system. This would allow things such as normalization (TODO: write a chapter about normalization and link it), evaluating generic constants, and proving where clauses/goals, to rely on `T` being sized, implementing `Trait`, etc. - -A more concrete example: -```rust -// `foo` would have a `ParamEnv` of: -// `[T: Sized, T: Clone]` -fn foo(a: T) { - // when typechecking `foo` we require all the where clauses on `bar` - // to hold in order for it to be legal to call. This means we have to - // prove `T: Clone`. As we are type checking `foo` we use `foo`'s - // environment when trying to check that `T: Clone` holds. - // - // Trying to prove `T: Clone` with a `ParamEnv` of `[T: Sized, T: Clone]` - // will trivially succeed as bound we want to prove is in our environment. - requires_clone(a); -} -``` - -Or alternatively an example that would not compile: -```rust -// `foo2` would have a `ParamEnv` of: -// `[T: Sized]` -fn foo2(a: T) { - // When typechecking `foo2` we attempt to prove `T: Clone`. - // As we are type checking `foo2` we use `foo2`'s environment - // when trying to prove `T: Clone`. - // - // Trying to prove `T: Clone` with a `ParamEnv` of `[T: Sized]` will - // fail as there is nothing in the environment telling the trait solver - // that `T` implements `Clone` and there exists no user written impl - // that could apply. - requires_clone(a); -} -``` - -It's very important to use the correct `ParamEnv` when interacting with the type system as otherwise it can lead to ICEs or things compiling when they shouldn't (or vice versa). See [#82159](https://github.com/rust-lang/rust/pull/82159) and [#82067](https://github.com/rust-lang/rust/pull/82067) as examples of PRs that changed rustc to use the correct param env to avoid ICE. Determining how to acquire the correct `ParamEnv` is explained later in this chapter. - -[predicates_of]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_analysis/collect/predicates_of/fn.predicates_of.html -[method_pred_entailment]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_analysis/check/compare_impl_item/fn.compare_method_predicate_entailment.html -[pe]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html -[query]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TyCtxt.html#method.param_env diff --git a/src/doc/rustc-dev-guide/src/queries/incremental-compilation-in-detail.md b/src/doc/rustc-dev-guide/src/queries/incremental-compilation-in-detail.md index 4133b196c0af7..03c822d4feed6 100644 --- a/src/doc/rustc-dev-guide/src/queries/incremental-compilation-in-detail.md +++ b/src/doc/rustc-dev-guide/src/queries/incremental-compilation-in-detail.md @@ -1,4 +1,4 @@ -# Incremental Compilation In Detail +# Incremental Compilation in detail diff --git a/src/doc/rustc-dev-guide/src/queries/query-evaluation-model-in-detail.md b/src/doc/rustc-dev-guide/src/queries/query-evaluation-model-in-detail.md index f7f204bf79d3b..444e20bc580e3 100644 --- a/src/doc/rustc-dev-guide/src/queries/query-evaluation-model-in-detail.md +++ b/src/doc/rustc-dev-guide/src/queries/query-evaluation-model-in-detail.md @@ -1,4 +1,4 @@ -# The Query Evaluation Model in Detail +# The Query Evaluation Model in detail diff --git a/src/doc/rustc-dev-guide/src/rustc-driver/getting-diagnostics.md b/src/doc/rustc-dev-guide/src/rustc-driver/getting-diagnostics.md index 1043df6ecb65c..518cf4e821a72 100644 --- a/src/doc/rustc-dev-guide/src/rustc-driver/getting-diagnostics.md +++ b/src/doc/rustc-dev-guide/src/rustc-driver/getting-diagnostics.md @@ -7,7 +7,7 @@ otherwise be printed to stderr. To get diagnostics from the compiler, configure [`rustc_interface::Config`] to output diagnostic to a buffer, -and run [`TyCtxt.analysis`]. +and run [`rustc_hir_typeck::typeck`] for each item. ```rust {{#include ../../examples/rustc-interface-getting-diagnostics.rs}} @@ -16,3 +16,4 @@ and run [`TyCtxt.analysis`]. [`rustc_interface`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_interface/index.html [`rustc_interface::Config`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_interface/interface/struct.Config.html [`TyCtxt.analysis`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_interface/passes/fn.analysis.html +[`rustc_hir_typeck::typeck`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_typeck/fn.typeck.html diff --git a/src/doc/rustc-dev-guide/src/rustc-driver/remarks-on-perma-unstable-features.md b/src/doc/rustc-dev-guide/src/rustc-driver/remarks-on-perma-unstable-features.md new file mode 100644 index 0000000000000..b434cfc9cf146 --- /dev/null +++ b/src/doc/rustc-dev-guide/src/rustc-driver/remarks-on-perma-unstable-features.md @@ -0,0 +1,54 @@ +# Remarks on perma unstable features + +## `rustc_private` + +### Overview + +The `rustc_private` feature allows external crates to use compiler internals. + +### Using `rustc-private` with Official Toolchains + +When using the `rustc_private` feature with official Rust toolchains distributed via rustup, you need to install two additional components: + +1. **`rustc-dev`**: Provides compiler libraries +2. **`llvm-tools`**: Provides LLVM libraries required for linking + +#### Installation Steps + +Install both components using rustup: + +```text +rustup component add rustc-dev llvm-tools +``` + +#### Common Error + +Without the `llvm-tools` component, you'll encounter linking errors like: + +```text +error: linking with `cc` failed: exit status: 1 + | + = note: rust-lld: error: unable to find library -lLLVM-{version} +``` + +### Using `rustc-private` with Custom Toolchains + +For custom-built toolchains or environments not using rustup, additional configuration is typically required: + +#### Requirements + +- LLVM libraries must be available in your system's library search paths +- The LLVM version must match the one used to build your Rust toolchain + +#### Troubleshooting Steps + +1. **Check LLVM installation**: Verify LLVM is installed and accessible +2. **Configure library paths**: You may need to set environment variables: + ```text + export LD_LIBRARY_PATH=/path/to/llvm/lib:$LD_LIBRARY_PATH + ``` +3. **Check version compatibility**: Ensure your LLVM version is compatible with your Rust toolchain + +### Additional Resources + +- [GitHub Issue #137421](https://github.com/rust-lang/rust/issues/137421): Explains that `rustc_private` linker failures often occur because `llvm-tools` is not installed diff --git a/src/doc/rustc-dev-guide/src/rustdoc-internals/rustdoc-test-suite.md b/src/doc/rustc-dev-guide/src/rustdoc-internals/rustdoc-test-suite.md new file mode 100644 index 0000000000000..169b95a7e1adc --- /dev/null +++ b/src/doc/rustc-dev-guide/src/rustdoc-internals/rustdoc-test-suite.md @@ -0,0 +1,112 @@ +# The `rustdoc` test suite + +This page is specifically about the test suite named `rustdoc`. +For other test suites used for testing rustdoc, see [Rustdoc tests](../rustdoc.md#tests). + +The `rustdoc` test suite is specifically used to test the HTML output of rustdoc. + +This is achieved by means of `htmldocck.py`, a custom checker script that leverages [XPath]. + +[XPath]: https://en.wikipedia.org/wiki/XPath + +## Directives +Directives to htmldocck are similar to those given to `compiletest` in that they take the form of `//@` comments. + +In addition to the directives listed here, +`rustdoc` tests also support most +[compiletest directives](../tests/directives.html). + +All `PATH`s in directives are relative to the the rustdoc output directory (`build/TARGET/test/rustdoc/TESTNAME`), +so it is conventional to use a `#![crate_name = "foo"]` attribute to avoid +having to write a long crate name multiple times. +To avoid repetion, `-` can be used in any `PATH` argument to re-use the previous `PATH` argument. + +All arguments take the form of quoted strings +(both single and double quotes are supported), +with the exception of `COUNT` and the special `-` form of `PATH`. + +Directives are assertions that place constraints on the generated HTML. + +All directives (except `files`) can be negated by putting a `!` in front of their name. + +Similar to shell commands, +directives can extend across multiple lines if their last char is `\`. +In this case, the start of the next line should be `//`, with no `@`. + +For example, `//@ !has 'foo/struct.Bar.html'` checks that crate `foo` does not have a page for a struct named `Bar` in the crate root. + +### `has` + +Usage 1: `//@ has PATH` +Usage 2: `//@ has PATH XPATH PATTERN` + +In the first form, `has` checks that a given file exists. + +In the second form, `has` is an alias for `matches`, +except `PATTERN` is a whitespace-normalized[^1] string instead of a regex. + +### `matches` + +Usage: `//@ matches PATH XPATH PATTERN` + +Checks that the text of each element selected by `XPATH` in `PATH` matches the python-flavored regex `PATTERN`. + +### `matchesraw` + +Usage: `//@ matchesraw PATH PATTERN` + +Checks that the contents of the file `PATH` matches the regex `PATTERN`. + +### `hasraw` + +Usage: `//@ hasraw PATH PATTERN` + +Same as `matchesraw`, except `PATTERN` is a whitespace-normalized[^1] string instead of a regex. + +### `count` + +Usage: `//@ count PATH XPATH COUNT` + +Checks that there are exactly `COUNT` matches for `XPATH` within the file `PATH`. + +### `snapshot` + +Usage: `//@ snapshot NAME PATH XPATH` + +Creates a snapshot test named NAME. +A snapshot test captures a subtree of the DOM, at the location +determined by the XPath, and compares it to a pre-recorded value +in a file. The file's name is the test's name with the `.rs` extension +replaced with `.NAME.html`, where NAME is the snapshot's name. + +htmldocck supports the `--bless` option to accept the current subtree +as expected, saving it to the file determined by the snapshot's name. +compiletest's `--bless` flag is forwarded to htmldocck. + +### `has-dir` + +Usage: `//@ has-dir PATH` + +Checks for the existance of directory `PATH`. + +### `files` + +Usage: `//@ files PATH ENTRIES` + +Checks that the directory `PATH` contains exactly `ENTRIES`. +`ENTRIES` is a python list of strings inside a quoted string, +as if it were to be parsed by `eval`. +(note that the list is actually parsed by `shlex.split`, +so it cannot contain arbitrary python expressions). + +Example: `//@ files "foo/bar" '["index.html", "sidebar-items.js"]'` + +[^1]: Whitespace normalization means that all spans of consecutive whitespace are replaced with a single space. The files themselves are also whitespace-normalized. + +## Limitations +`htmldocck.py` uses the xpath implementation from the standard library. +This leads to several limitations: +* All `XPATH` arguments must start with `//` due to a flaw in the implemention. +* Many XPath features (functions, axies, etc.) are not supported. +* Only well-formed HTML can be parsed (hopefully rustdoc doesn't output mismatched tags). + diff --git a/src/doc/rustc-dev-guide/src/rustdoc.md b/src/doc/rustc-dev-guide/src/rustdoc.md index 356698148e405..e36d6a388a981 100644 --- a/src/doc/rustc-dev-guide/src/rustdoc.md +++ b/src/doc/rustc-dev-guide/src/rustdoc.md @@ -77,29 +77,33 @@ does is call the `main()` that's in this crate's `lib.rs`, though.) `doctest.rs`. * The Markdown renderer is loaded up in `html/markdown.rs`, including functions for extracting doctests from a given block of Markdown. -* The tests on the structure of rustdoc HTML output are located in `tests/rustdoc`, where - they're handled by the test runner of bootstrap and the supplementary script - `src/etc/htmldocck.py`. * Frontend CSS and JavaScript are stored in `html/static/`. ## Tests -* All paths in this section are relative to `tests` in the rust-lang/rust repository. -* Tests on search engine and index are located in `rustdoc-js` and `rustdoc-js-std`. +* Tests on search engine and index are located in `tests/rustdoc-js` and `tests/rustdoc-js-std`. The format is specified [in the search guide](rustdoc-internals/search.md#testing-the-search-engine). * Tests on the "UI" of rustdoc (the terminal output it produces when run) are in - `rustdoc-ui` + `tests/rustdoc-ui` * Tests on the "GUI" of rustdoc (the HTML, JS, and CSS as rendered in a browser) - are in `rustdoc-gui`. These use a [NodeJS tool called + are in `tests/rustdoc-gui`. These use a [NodeJS tool called browser-UI-test](https://github.com/GuillaumeGomez/browser-UI-test/) that uses puppeteer to run tests in a headless browser and check rendering and - interactivity. + interactivity. For information on how to write this form of test, + see [`tests/rustdoc-gui/README.md`][rustdoc-gui-readme] + as well as [the description of the `.goml` format][goml-script] * Additionally, JavaScript type annotations are written using [TypeScript-flavored JSDoc] comments and an external d.ts file. The code itself is plain, valid JavaScript; we only use tsc as a linter. +* The tests on the structure of rustdoc HTML output are located in `tests/rustdoc`, + where they're handled by the test runner of bootstrap and + the supplementary script `src/etc/htmldocck.py`. + [These tests have several extra directives available to them](./rustdoc-internals/rustdoc-test-suite.md). [TypeScript-flavored JSDoc]: https://www.typescriptlang.org/docs/handbook/jsdoc-supported-types.html +[rustdoc-gui-readme]: https://github.com/rust-lang/rust/blob/master/tests/rustdoc-gui/README.md +[goml-script]: https://github.com/GuillaumeGomez/browser-UI-test/blob/master/goml-script.md ## Constraints diff --git a/src/doc/rustc-dev-guide/src/serialization.md b/src/doc/rustc-dev-guide/src/serialization.md index 0ff0499012863..670a37ffb0a97 100644 --- a/src/doc/rustc-dev-guide/src/serialization.md +++ b/src/doc/rustc-dev-guide/src/serialization.md @@ -1,4 +1,4 @@ -# Serialization in Rustc +# Serialization in rustc rustc has to [serialize] and deserialize various data during compilation. Specifically: diff --git a/src/doc/rustc-dev-guide/src/solve/normalization.md b/src/doc/rustc-dev-guide/src/solve/normalization.md deleted file mode 100644 index 99dc20c46b5d4..0000000000000 --- a/src/doc/rustc-dev-guide/src/solve/normalization.md +++ /dev/null @@ -1,127 +0,0 @@ -# Normalization in the new solver - -> FIXME: Normalization has been changed significantly since this chapter was written. - -With the new solver we've made some fairly significant changes to normalization when compared -to the existing implementation. - -We now differentiate between "one-step normalization", "structural normalization" and -"deep normalization". - -## One-step normalization - -One-step normalization is implemented via `NormalizesTo` goals. Unlike other goals -in the trait solver, `NormalizesTo` always expects the term to be an unconstrained -inference variable[^opaques]. Think of it as a function, taking an alias as input -and returning its underlying value. If the alias is rigid, `NormalizesTo` fails and -returns `NoSolution`. This is the case for `::Assoc` if there's a `T: Trait` -where-bound and for opaque types with `Reveal::UserFacing` unless they are in the -defining scope. We must not treat any aliases as rigid in coherence. - -The underlying value may itself be an unnormalized alias, e.g. -`NormalizesTo(<<() as Id>::This as Id>::This)` only returns `<() as Id>::This`, -even though that alias can be further normalized to `()`. As the term is -always an unconstrained inference variable, the expected term cannot influence -normalization, see [trait-system-refactor-initiative#22] for more. - -Only ever computing `NormalizesTo` goals with an unconstrained inference variable -requires special solver support. It is only used by `AliasRelate` goals and pending -`NormalizesTo` goals are tracked separately from other goals: [source][try-eval-norm]. -As the expected term is always erased in `NormalizesTo`, we have to return its -ambiguous nested goals to its caller as not doing so weakens inference. See -[#122687] for more details. - -[trait-system-refactor-initiative#22]: https://github.com/rust-lang/trait-system-refactor-initiative/issues/22 -[try-eval-norm]: https://github.com/rust-lang/rust/blob/2627e9f3012a97d3136b3e11bf6bd0853c38a534/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs#L523-L537 -[#122687]: https://github.com/rust-lang/rust/pull/122687 - -## `AliasRelate` and structural normalization - -We structurally normalize an alias by applying one-step normalization until -we end up with a rigid alias, ambiguity, or overflow. This is done by repeatedly -evaluating `NormalizesTo` goals inside of a snapshot: [source][structural_norm]. - -`AliasRelate(lhs, rhs)` is implemented by first structurally normalizing both the -`lhs` and the `rhs` and then relating the resulting rigid types (or inference -variables). Importantly, if `lhs` or `rhs` ends up as an alias, this alias can -now be treated as rigid and gets unified without emitting a nested `AliasRelate` -goal: [source][structural-relate]. - -This means that `AliasRelate` with an unconstrained `rhs` ends up functioning -similar to `NormalizesTo`, acting as a function which fully normalizes `lhs` -before assigning the resulting rigid type to an inference variable. This is used by -`fn structurally_normalize_ty` both [inside] and [outside] of the trait solver. -This has to be used whenever we match on the value of some type, both inside -and outside of the trait solver. - - - -[structural_norm]: https://github.com/rust-lang/rust/blob/2627e9f3012a97d3136b3e11bf6bd0853c38a534/compiler/rustc_trait_selection/src/solve/alias_relate.rs#L140-L175 -[structural-relate]: https://github.com/rust-lang/rust/blob/a0569fa8f91b5271e92d2f73fd252de7d3d05b9c/compiler/rustc_trait_selection/src/solve/alias_relate.rs#L88-L107 -[inside]: https://github.com/rust-lang/rust/blob/a0569fa8f91b5271e92d2f73fd252de7d3d05b9c/compiler/rustc_trait_selection/src/solve/mod.rs#L278-L299 -[outside]: https://github.com/rust-lang/rust/blob/a0569fa8f91b5271e92d2f73fd252de7d3d05b9c/compiler/rustc_trait_selection/src/traits/structural_normalize.rs#L17-L48 - -## Deep normalization - -By walking over a type, and using `fn structurally_normalize_ty` for each encountered -alias, it is possible to deeply normalize a type, normalizing all aliases as much as -possible. However, this only works for aliases referencing bound variables if they are -not ambiguous as we're unable to replace the alias with a corresponding inference -variable without leaking universes. - - - -[generalize-no-alias]: https://github.com/rust-lang/rust/blob/a0569fa8f91b5271e92d2f73fd252de7d3d05b9c/compiler/rustc_infer/src/infer/relate/generalize.rs#L353-L358 - -## Outside of the trait solver - -The core type system - relating types and trait solving - will not need deep -normalization with the new solver. There are still some areas which depend on it. -For these areas there is the function `At::deeply_normalize`. Without additional -trait solver support deep normalization does not always work in case of ambiguity. -Luckily deep normalization is currently only necessary in places where there is no ambiguity. -`At::deeply_normalize` immediately fails if there's ambiguity. - -If we only care about the outermost layer of types, we instead use -`At::structurally_normalize` or `FnCtxt::(try_)structurally_resolve_type`. -Unlike `At::deeply_normalize`, structural normalization is also used in cases where we -have to handle ambiguity. - -Because this may result in behavior changes depending on how the trait solver handles -ambiguity, it is safer to also require full normalization there. This happens in -`FnCtxt::structurally_resolve_type` which always emits a hard error if the self type ends -up as an inference variable. There are some existing places which have a fallback for -inference variables instead. These places use `try_structurally_resolve_type` instead. - -## Why deep normalization with ambiguity is hard - -Fully correct deep normalization is very challenging, especially with the new solver -given that we do not want to deeply normalize inside of the solver. Mostly deeply normalizing -but sometimes failing to do so is bound to cause very hard to minimize and understand bugs. -If possible, avoiding any reliance on deep normalization entirely therefore feels preferable. - -If the solver itself does not deeply normalize, any inference constraints returned by the -solver would require normalization. Handling this correctly is ugly. This also means that -we change goals we provide to the trait solver by "normalizing away" some projections. - -The way we (mostly) guarantee deep normalization with the old solver is by eagerly replacing -the projection with an inference variable and emitting a nested `Projection` goal. This works -as `Projection` goals in the old solver deeply normalize. Unless we add another `PredicateKind` -for deep normalization to the new solver we cannot emulate this behavior. This does not work -for projections with bound variables, sometimes leaving them unnormalized. An approach which -also supports projections with bound variables will be even more involved. - -[^opaques]: opaque types are currently handled a bit differently. this may change in the future diff --git a/src/doc/rustc-dev-guide/src/solve/opaque-types.md b/src/doc/rustc-dev-guide/src/solve/opaque-types.md index 672aab7708018..509c34a4d3a75 100644 --- a/src/doc/rustc-dev-guide/src/solve/opaque-types.md +++ b/src/doc/rustc-dev-guide/src/solve/opaque-types.md @@ -33,7 +33,7 @@ For opaque types in the defining scope and in the implicit-negative coherence mo always done in two steps. Outside of the defining scope `normalizes-to` for opaques always returns `Err(NoSolution)`. -We start by trying to to assign the expected type as a hidden type. +We start by trying to assign the expected type as a hidden type. In the implicit-negative coherence mode, this currently always results in ambiguity without interacting with the opaque types storage. We could instead add allow 'defining' all opaque types, diff --git a/src/doc/rustc-dev-guide/src/solve/significant-changes.md b/src/doc/rustc-dev-guide/src/solve/significant-changes.md index c82b5d468961a..eac8f0318fb19 100644 --- a/src/doc/rustc-dev-guide/src/solve/significant-changes.md +++ b/src/doc/rustc-dev-guide/src/solve/significant-changes.md @@ -106,4 +106,4 @@ their ambiguous nested goals are returned to the caller which then evaluates the See [#122687] for more details. [#122687]: https://github.com/rust-lang/rust/pull/122687 -[normalization]: ./normalization.md +[normalization]: ../normalization.md diff --git a/src/doc/rustc-dev-guide/src/test-implementation.md b/src/doc/rustc-dev-guide/src/test-implementation.md index bee783c0fa66c..e906dd29f25f5 100644 --- a/src/doc/rustc-dev-guide/src/test-implementation.md +++ b/src/doc/rustc-dev-guide/src/test-implementation.md @@ -83,7 +83,7 @@ with your hand-written one, it will not share a [Symbol][Symbol]. This technique prevents name collision during code generation and is the foundation of Rust's [`macro`] hygiene. -## Step 2: Harness Generation +## Step 2: Harness generation Now that our tests are accessible from the root of our crate, we need to do something with them using [`rustc_ast`][ast] generates a module like so: @@ -106,7 +106,7 @@ called [`test`][test] that is part of Rust core, that implements all of the runtime for testing. [`test`][test]'s interface is unstable, so the only stable way to interact with it is through the `#[test]` macro. -## Step 3: Test Object Generation +## Step 3: Test object generation If you've written tests in Rust before, you may be familiar with some of the optional attributes available on test functions. For example, a test can be diff --git a/src/doc/rustc-dev-guide/src/tests/best-practices.md b/src/doc/rustc-dev-guide/src/tests/best-practices.md index 6905ee13283a6..2bdc7f3a2431b 100644 --- a/src/doc/rustc-dev-guide/src/tests/best-practices.md +++ b/src/doc/rustc-dev-guide/src/tests/best-practices.md @@ -175,6 +175,8 @@ See [compiletest directives] for a listing of directives. - For `ignore-*`/`needs-*`/`only-*` directives, unless extremely obvious, provide a brief remark on why the directive is needed. E.g. `"//@ ignore-wasi (wasi codegens the main symbol differently)"`. +- When using `//@ ignore-auxiliary`, specify the corresponding main test files, + e.g. ``//@ ignore-auxiliary (used by `./foo.rs`)``. ## FileCheck best practices diff --git a/src/doc/rustc-dev-guide/src/tests/ci.md b/src/doc/rustc-dev-guide/src/tests/ci.md index c04f296ba0b14..825be11c82a97 100644 --- a/src/doc/rustc-dev-guide/src/tests/ci.md +++ b/src/doc/rustc-dev-guide/src/tests/ci.md @@ -135,12 +135,16 @@ There are several use-cases for try builds: - Run a specific CI job (e.g. Windows tests) on a PR, to quickly test if it passes the test suite executed by that job. -You can select which CI jobs will -be executed in the try build by adding lines containing `try-job: -` to the PR description. All such specified jobs will be executed -in the try build once the `@bors try` command is used on the PR. If no try -jobs are specified in this way, the jobs defined in the `try` section of -[`jobs.yml`] will be executed by default. +By default, if you send a comment with `@bors try`, the jobs defined in the `try` section of +[`jobs.yml`] will be executed. We call this mode a "fast try build". Such a try build +will not execute any tests, and it will allow compilation warnings. It is useful when you want to +get an optimized toolchain as fast as possible, for a crater run or performance benchmarks, +even if it might not be working fully correctly. + +If you want to run a custom CI job in a try build and make sure that it passes all tests and does +not produce any compilation warnings, you can select CI jobs to be executed by adding lines +containing `try-job: ` to the PR description. All such specified jobs will be executed +in the try build once the `@bors try` command is used on the PR. Each pattern can either be an exact name of a job or a glob pattern that matches multiple jobs, for example `*msvc*` or `*-alt`. You can start at most 20 jobs in a single try build. When using diff --git a/src/doc/rustc-dev-guide/src/tests/codegen-backend-tests/cg_clif.md b/src/doc/rustc-dev-guide/src/tests/codegen-backend-tests/cg_clif.md new file mode 100644 index 0000000000000..030ddd7dff571 --- /dev/null +++ b/src/doc/rustc-dev-guide/src/tests/codegen-backend-tests/cg_clif.md @@ -0,0 +1,3 @@ +# Cranelift codegen backend tests + +TODO: please add some more information to this page. diff --git a/src/doc/rustc-dev-guide/src/tests/codegen-backend-tests/cg_gcc.md b/src/doc/rustc-dev-guide/src/tests/codegen-backend-tests/cg_gcc.md new file mode 100644 index 0000000000000..4caf4c0e0eefa --- /dev/null +++ b/src/doc/rustc-dev-guide/src/tests/codegen-backend-tests/cg_gcc.md @@ -0,0 +1,3 @@ +# GCC codegen backend tests + +TODO: please add some more information to this page. diff --git a/src/doc/rustc-dev-guide/src/tests/codegen-backend-tests/intro.md b/src/doc/rustc-dev-guide/src/tests/codegen-backend-tests/intro.md new file mode 100644 index 0000000000000..6bf46ddcd21a6 --- /dev/null +++ b/src/doc/rustc-dev-guide/src/tests/codegen-backend-tests/intro.md @@ -0,0 +1,13 @@ +# Codegen backend testing + +See also the [Code generation](../../backend/codegen.md) chapter. + +In addition to the primary LLVM codegen backend, the rust-lang/rust CI also runs tests of the [cranelift][cg_clif] and [GCC][cg_gcc] codegen backends in certain test jobs. + +For more details on the tests involved, see: + +- [Cranelift codegen backend tests](./cg_clif.md) +- [GCC codegen backend tests](./cg_gcc.md) + +[cg_clif]: https://github.com/rust-lang/rustc_codegen_cranelift +[cg_gcc]: https://github.com/rust-lang/rustc_codegen_gcc diff --git a/src/doc/rustc-dev-guide/src/tests/directives.md b/src/doc/rustc-dev-guide/src/tests/directives.md index 81aa35f1a4653..dae659e6317b4 100644 --- a/src/doc/rustc-dev-guide/src/tests/directives.md +++ b/src/doc/rustc-dev-guide/src/tests/directives.md @@ -6,7 +6,8 @@ FIXME(jieyouxu) completely revise this chapter. --> -Directives are special comments that tell compiletest how to build and interpret a test. They must appear before the Rust source in the test. They may also appear in `rmake.rs` [run-make tests](compiletest.md#run-make-tests). +Directives are special comments that tell compiletest how to build and interpret a test. +They may also appear in `rmake.rs` [run-make tests](compiletest.md#run-make-tests). They are normally put after the short comment that explains the point of this test. Compiletest test suites use `//@` to signal that a comment is a directive. @@ -100,6 +101,7 @@ for more details. | `normalize-stdout` | Normalize actual stdout with a rule `"" -> ""` before comparing against snapshot | `ui`, `incremental` | `"" -> ""`, ``/`` is regex capture and replace syntax | | `dont-check-compiler-stderr` | Don't check actual compiler stderr vs stderr snapshot | `ui` | N/A | | `dont-check-compiler-stdout` | Don't check actual compiler stdout vs stdout snapshot | `ui` | N/A | +| `dont-require-annotations` | Don't require line annotations for the given diagnostic kind (`//~ KIND`) to be exhaustive | `ui`, `incremental` | `ERROR`, `WARN`, `NOTE`, `HELP`, `SUGGESTION` | | `run-rustfix` | Apply all suggestions via `rustfix`, snapshot fixed output, and check fixed output builds | `ui` | N/A | | `rustfix-only-machine-applicable` | `run-rustfix` but only machine-applicable suggestions | `ui` | N/A | | `exec-env` | Env var to set when executing a test | `ui`, `crashes` | `=` | @@ -122,6 +124,9 @@ means the test won't be compiled or run. * `ignore-X` where `X` is a target detail or other criteria on which to ignore the test (see below) * `only-X` is like `ignore-X`, but will *only* run the test on that target or stage +* `ignore-auxiliary` is intended for files that *participate* in one or more other + main test files but that `compiletest` should not try to build the file itself. + Please backlink to which main test is actually using the auxiliary file. * `ignore-test` always ignores the test. This can be used to temporarily disable a test if it is currently not working, but you want to keep it in tree to re-enable it later. @@ -190,8 +195,13 @@ settings: specified atomic widths, e.g. the test with `//@ needs-target-has-atomic: 8, 16, ptr` will only run if it supports the comma-separated list of atomic widths. -- `needs-dynamic-linking` - ignores if target does not support dynamic linking +- `needs-dynamic-linking` — ignores if target does not support dynamic linking (which is orthogonal to it being unable to create `dylib` and `cdylib` crate types) +- `needs-crate-type` — ignores if target platform does not support one or more + of the comma-delimited list of specified crate types. For example, + `//@ needs-crate-type: cdylib, proc-macro` will cause the test to be ignored + on `wasm32-unknown-unknown` target because the target does not support the + `proc-macro` crate type. The following directives will check LLVM support: @@ -228,14 +238,14 @@ ignoring debuggers. ### Affecting how tests are built -| Directive | Explanation | Supported test suites | Possible values | -|---------------------|----------------------------------------------------------------------------------------------|---------------------------|------------------------------------------------------------------------------| -| `compile-flags` | Flags passed to `rustc` when building the test or aux file | All except for `run-make` | Any valid `rustc` flags, e.g. `-Awarnings -Dfoo`. Cannot be `-Cincremental`. | -| `edition` | Alias for `compile-flags: --edition=xxx` | All except for `run-make` | Any valid `--edition` value | -| `rustc-env` | Env var to set when running `rustc` | All except for `run-make` | `=` | -| `unset-rustc-env` | Env var to unset when running `rustc` | All except for `run-make` | Any env var name | -| `incremental` | Proper incremental support for tests outside of incremental test suite | `ui`, `crashes` | N/A | -| `no-prefer-dynamic` | Don't use `-C prefer-dynamic`, don't build as a dylib via a `--crate-type=dylib` preset flag | `ui`, `crashes` | N/A | +| Directive | Explanation | Supported test suites | Possible values | +|---------------------|----------------------------------------------------------------------------------------------|---------------------------|--------------------------------------------------------------------------------------------| +| `compile-flags` | Flags passed to `rustc` when building the test or aux file | All except for `run-make` | Any valid `rustc` flags, e.g. `-Awarnings -Dfoo`. Cannot be `-Cincremental` or `--edition` | +| `edition` | The edition used to build the test | All except for `run-make` | Any valid `--edition` value | +| `rustc-env` | Env var to set when running `rustc` | All except for `run-make` | `=` | +| `unset-rustc-env` | Env var to unset when running `rustc` | All except for `run-make` | Any env var name | +| `incremental` | Proper incremental support for tests outside of incremental test suite | `ui`, `crashes` | N/A | +| `no-prefer-dynamic` | Don't use `-C prefer-dynamic`, don't build as a dylib via a `--crate-type=dylib` preset flag | `ui`, `crashes` | N/A |
    Tests (outside of `run-make`) that want to use incremental tests not in the diff --git a/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/fuchsia.md b/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/fuchsia.md new file mode 100644 index 0000000000000..b19d94d6ff734 --- /dev/null +++ b/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/fuchsia.md @@ -0,0 +1,177 @@ +# Fuchsia integration tests + +[Fuchsia](https://fuchsia.dev) is an open-source operating system with about 2 +million lines of Rust code.[^loc] It has caught a large number of [regressions] +in the past and was subsequently included in CI. + +## What to do if the Fuchsia job breaks? + +Please contact the [fuchsia][fuchsia-ping] ping group and ask them for help. + +```text +@rustbot ping fuchsia +``` + +## Building Fuchsia in CI + +Fuchsia builds as part of the suite of bors tests that run before a pull request +is merged. + +If you are worried that a pull request might break the Fuchsia builder and want +to test it out before submitting it to the bors queue, simply add this line to +your PR description: + +> try-job: x86_64-fuchsia + +Then when you `@bors try` it will pick the job that builds Fuchsia. + +## Building Fuchsia locally + +Because Fuchsia uses languages other than Rust, it does not use Cargo as a build +system. It also requires the toolchain build to be configured in a [certain +way][build-toolchain]. + +The recommended way to build Fuchsia is to use the Docker scripts that check out +and run a Fuchsia build for you. If you've run Docker tests before, you can +simply run this command from your Rust checkout to download and build Fuchsia +using your local Rust toolchain. + +``` +src/ci/docker/run.sh x86_64-fuchsia +``` + +See the [Testing with Docker](../docker.md) chapter for more details on how to run +and debug jobs with Docker. + +Note that a Fuchsia checkout is *large* – as of this writing, a checkout and +build takes 46G of space – and as you might imagine, it takes a while to +complete. + +### Modifying the Fuchsia checkout + +The main reason you would want to build Fuchsia locally is because you need to +investigate a regression. After running a Docker build, you'll find the Fuchsia +checkout inside the `obj/fuchsia` directory of your Rust checkout. If you +modify the `KEEP_CHECKOUT` line in the [build-fuchsia.sh] script to +`KEEP_CHECKOUT=1`, you can change the checkout as needed and rerun the build +command above. This will reuse all the build results from before. + +You can find more options to customize the Fuchsia checkout in the +[build-fuchsia.sh] script. + +### Customizing the Fuchsia build + +You can find more info about the options used to build Fuchsia in Rust CI in the +[build_fuchsia_from_rust_ci.sh] script invoked by [build-fuchsia.sh]. + +The Fuchsia build system uses [GN], a metabuild system that generates [Ninja] +files and then hands off the work of running the build to Ninja. + +Fuchsia developers use `fx` to run builds and perform other development tasks. +This tool is located in `.jiri_root/bin` of the Fuchsia checkout; you may need +to add this to your `$PATH` for some workflows. + +There are a few `fx` subcommands that are relevant, including: + +- `fx set` accepts build arguments, writes them to `out/default/args.gn`, and + runs GN. +- `fx build` builds the Fuchsia project using Ninja. It will automatically pick + up changes to build arguments and rerun GN. By default it builds everything, + but it also accepts target paths to build specific targets (see below). +- `fx clippy` runs Clippy on specific Rust targets (or all of them). We use this + in the Rust CI build to avoid running codegen on most Rust targets. Underneath + it invokes Ninja, just like `fx build`. The clippy results are saved in json + files inside the build output directory before being printed. + +#### Target paths + +GN uses paths like the following to identify build targets: + +``` +//src/starnix/kernel:starnix_core +``` + +The initial `//` means the root of the checkout, and the remaining slashes are +directory names. The string after `:` is the _target name_ of a target defined +in the `BUILD.gn` file of that directory. + +The target name can be omitted if it is the same as the directory name. In other +words, `//src/starnix/kernel` is the same as `//src/starnix/kernel:kernel`. + +These target paths are used inside `BUILD.gn` files to reference dependencies, +and can also be used in `fx build`. + +#### Modifying compiler flags + +You can put custom compiler flags inside a GN `config` that is added to a +target. As a simple example: + +``` +config("everybody_loops") { + rustflags = [ "-Zeverybody-loops" ] +} + +rustc_binary("example") { + crate_root = "src/bin.rs" + # ...existing keys here... + configs += [ ":everybody_loops" ] +} +``` + +This will add the flag `-Zeverybody-loops` to rustc when building the `example` +target. Note that you can also use [`public_configs`] for a config to be added +to every target that depends on that target. + +If you want to add a flag to every Rust target in the build, you can add +rustflags to the [`//build/config:compiler`] config or to the OS-specific +configs referenced in that file. Note that `cflags` and `ldflags` are ignored on +Rust targets. + +#### Running ninja and rustc commands directly + +Going down one layer, `fx build` invokes `ninja`, which in turn eventually +invokes `rustc`. All build actions are run inside the out directory, which is +usually `out/default` inside the Fuchsia checkout. + +You can get ninja to print the actual command it invokes by forcing that command +to fail, e.g. by adding a syntax error to one of the source files of the target. +Once you have the command, you can run it from inside the output directory. + +After changing the toolchain itself, the build setting `rustc_version_string` in +`out/default/args.gn` needs to be changed so that `fx build` or `ninja` will +rebuild all the Rust targets. This can be done in a text editor and the contents +of the string do not matter, as long as it changes from one build to the next. +[build_fuchsia_from_rust_ci.sh] does this for you by hashing the toolchain +directory. + +The Fuchsia website has more detailed documentation of the [build system]. + +#### Other tips and tricks + +When using `build_fuchsia_from_rust_ci.sh` you can comment out the `fx set` +command after the initial run so it won't rerun GN each time. If you do this you +can also comment out the version_string line to save a couple seconds. + +`export NINJA_PERSISTENT_MODE=1` to get faster ninja startup times after the +initial build. + +## Fuchsia target support + +To learn more about Fuchsia target support, see the Fuchsia chapter in [the +rustc book][platform-support]. + +[regressions]: https://gist.github.com/tmandry/7103eba4bd6a6fb0c439b5a90ae355fa +[build-toolchain]: https://fuchsia.dev/fuchsia-src/development/build/rust_toolchain +[build-fuchsia.sh]: https://github.com/rust-lang/rust/blob/221e2741c39515a5de6da42d8c76ee1e132c2c74/src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh +[build_fuchsia_from_rust_ci.sh]: https://cs.opensource.google/fuchsia/fuchsia/+/main:scripts/rust/build_fuchsia_from_rust_ci.sh?q=build_fuchsia_from_rust_ci&ss=fuchsia +[platform-support]: https://doc.rust-lang.org/nightly/rustc/platform-support/fuchsia.html +[GN]: https://gn.googlesource.com/gn/+/main#gn +[Ninja]: https://ninja-build.org/ +[`public_configs`]: https://gn.googlesource.com/gn/+/main/docs/reference.md#var_public_configs +[`//build/config:compiler`]: https://cs.opensource.google/fuchsia/fuchsia/+/main:build/config/BUILD.gn;l=121;drc=c26c473bef93b33117ae417893118907a026fec7 +[build system]: https://fuchsia.dev/fuchsia-src/development/build/build_system +[fuchsia-ping]: ../../notification-groups/fuchsia.md + +[^loc]: As of June 2024, Fuchsia had about 2 million lines of first-party Rust +code and a roughly equal amount of third-party code, as counted by tokei +(excluding comments and blanks). diff --git a/src/doc/rustc-dev-guide/src/tests/rust-for-linux.md b/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/rust-for-linux.md similarity index 95% rename from src/doc/rustc-dev-guide/src/tests/rust-for-linux.md rename to src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/rust-for-linux.md index c674d1575b781..d549ec6fca529 100644 --- a/src/doc/rustc-dev-guide/src/tests/rust-for-linux.md +++ b/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/rust-for-linux.md @@ -3,26 +3,7 @@ [Rust for Linux](https://rust-for-linux.com/) (RfL) is an effort for adding support for the Rust programming language into the Linux kernel. -## Building Rust for Linux in CI - -Rust for Linux builds as part of the suite of bors tests that run before a pull -request is merged. - -The workflow builds a stage1 sysroot of the Rust compiler, downloads the Linux -kernel, and tries to compile several Rust for Linux drivers and examples using -this sysroot. RfL uses several unstable compiler/language features, therefore -this workflow notifies us if a given compiler change would break it. - -If you are worried that a pull request might break the Rust for Linux builder -and want to test it out before submitting it to the bors queue, simply add this -line to your PR description: - -> try-job: x86_64-rust-for-linux - -Then when you `@bors try` it will pick the job that builds the Rust for Linux -integration. - -## What to do in case of failure +## What to do if the Rust for Linux job breaks? If a PR breaks the Rust for Linux CI job, then: @@ -48,4 +29,23 @@ ping group to ask for help: @rustbot ping rfl ``` -[rfl-ping]: ../notification-groups/rust-for-linux.md +## Building Rust for Linux in CI + +Rust for Linux builds as part of the suite of bors tests that run before a pull +request is merged. + +The workflow builds a stage1 sysroot of the Rust compiler, downloads the Linux +kernel, and tries to compile several Rust for Linux drivers and examples using +this sysroot. RfL uses several unstable compiler/language features, therefore +this workflow notifies us if a given compiler change would break it. + +If you are worried that a pull request might break the Rust for Linux builder +and want to test it out before submitting it to the bors queue, simply add this +line to your PR description: + +> try-job: x86_64-rust-for-linux + +Then when you `@bors try` it will pick the job that builds the Rust for Linux +integration. + +[rfl-ping]: ../../notification-groups/rust-for-linux.md diff --git a/src/doc/rustc-dev-guide/src/tests/ecosystem.md b/src/doc/rustc-dev-guide/src/tests/ecosystem.md index 083601404255b..eee07dd079bbf 100644 --- a/src/doc/rustc-dev-guide/src/tests/ecosystem.md +++ b/src/doc/rustc-dev-guide/src/tests/ecosystem.md @@ -15,14 +15,16 @@ CI. See the [Crater chapter](crater.md) for more details. `cargotest` is a small tool which runs `cargo test` on a few sample projects (such as `servo`, `ripgrep`, `tokei`, etc.). This runs as part of CI and ensures -there aren't any significant regressions. +there aren't any significant regressions: -> Example: `./x test src/tools/cargotest` +```console +./x test src/tools/cargotest +``` ### Large OSS Project builders We have CI jobs that build large open-source Rust projects that are used as regression tests in CI. Our integration jobs build the following projects: -- [Fuchsia](fuchsia.md) -- [Rust for Linux](rust-for-linux.md) +- [Fuchsia](./ecosystem-test-jobs/fuchsia.md) +- [Rust for Linux](./ecosystem-test-jobs/rust-for-linux.md) diff --git a/src/doc/rustc-dev-guide/src/tests/fuchsia.md b/src/doc/rustc-dev-guide/src/tests/fuchsia.md deleted file mode 100644 index e96290b921529..0000000000000 --- a/src/doc/rustc-dev-guide/src/tests/fuchsia.md +++ /dev/null @@ -1,168 +0,0 @@ -# Fuchsia integration tests - -[Fuchsia](https://fuchsia.dev) is an open-source operating system with about 2 -million lines of Rust code.[^loc] It has caught a large number of [regressions] -in the past and was subsequently included in CI. - -## Building Fuchsia in CI - -Fuchsia builds as part of the suite of bors tests that run before a pull request -is merged. - -If you are worried that a pull request might break the Fuchsia builder and want -to test it out before submitting it to the bors queue, simply add this line to -your PR description: - -> try-job: x86_64-fuchsia - -Then when you `@bors try` it will pick the job that builds Fuchsia. - -## Building Fuchsia locally - -Because Fuchsia uses languages other than Rust, it does not use Cargo as a build -system. It also requires the toolchain build to be configured in a [certain -way][build-toolchain]. - -The recommended way to build Fuchsia is to use the Docker scripts that check out -and run a Fuchsia build for you. If you've run Docker tests before, you can -simply run this command from your Rust checkout to download and build Fuchsia -using your local Rust toolchain. - -``` -src/ci/docker/run.sh x86_64-fuchsia -``` - -See the [Testing with Docker](docker.md) chapter for more details on how to run -and debug jobs with Docker. - -Note that a Fuchsia checkout is *large* – as of this writing, a checkout and -build takes 46G of space – and as you might imagine, it takes a while to -complete. - -### Modifying the Fuchsia checkout - -The main reason you would want to build Fuchsia locally is because you need to -investigate a regression. After running a Docker build, you'll find the Fuchsia -checkout inside the `obj/fuchsia` directory of your Rust checkout. If you -modify the `KEEP_CHECKOUT` line in the [build-fuchsia.sh] script to -`KEEP_CHECKOUT=1`, you can change the checkout as needed and rerun the build -command above. This will reuse all the build results from before. - -You can find more options to customize the Fuchsia checkout in the -[build-fuchsia.sh] script. - -### Customizing the Fuchsia build - -You can find more info about the options used to build Fuchsia in Rust CI in the -[build_fuchsia_from_rust_ci.sh] script invoked by [build-fuchsia.sh]. - -The Fuchsia build system uses [GN], a metabuild system that generates [Ninja] -files and then hands off the work of running the build to Ninja. - -Fuchsia developers use `fx` to run builds and perform other development tasks. -This tool is located in `.jiri_root/bin` of the Fuchsia checkout; you may need -to add this to your `$PATH` for some workflows. - -There are a few `fx` subcommands that are relevant, including: - -- `fx set` accepts build arguments, writes them to `out/default/args.gn`, and - runs GN. -- `fx build` builds the Fuchsia project using Ninja. It will automatically pick - up changes to build arguments and rerun GN. By default it builds everything, - but it also accepts target paths to build specific targets (see below). -- `fx clippy` runs Clippy on specific Rust targets (or all of them). We use this - in the Rust CI build to avoid running codegen on most Rust targets. Underneath - it invokes Ninja, just like `fx build`. The clippy results are saved in json - files inside the build output directory before being printed. - -#### Target paths - -GN uses paths like the following to identify build targets: - -``` -//src/starnix/kernel:starnix_core -``` - -The initial `//` means the root of the checkout, and the remaining slashes are -directory names. The string after `:` is the _target name_ of a target defined -in the `BUILD.gn` file of that directory. - -The target name can be omitted if it is the same as the directory name. In other -words, `//src/starnix/kernel` is the same as `//src/starnix/kernel:kernel`. - -These target paths are used inside `BUILD.gn` files to reference dependencies, -and can also be used in `fx build`. - -#### Modifying compiler flags - -You can put custom compiler flags inside a GN `config` that is added to a -target. As a simple example: - -``` -config("everybody_loops") { - rustflags = [ "-Zeverybody-loops" ] -} - -rustc_binary("example") { - crate_root = "src/bin.rs" - # ...existing keys here... - configs += [ ":everybody_loops" ] -} -``` - -This will add the flag `-Zeverybody-loops` to rustc when building the `example` -target. Note that you can also use [`public_configs`] for a config to be added -to every target that depends on that target. - -If you want to add a flag to every Rust target in the build, you can add -rustflags to the [`//build/config:compiler`] config or to the OS-specific -configs referenced in that file. Note that `cflags` and `ldflags` are ignored on -Rust targets. - -#### Running ninja and rustc commands directly - -Going down one layer, `fx build` invokes `ninja`, which in turn eventually -invokes `rustc`. All build actions are run inside the out directory, which is -usually `out/default` inside the Fuchsia checkout. - -You can get ninja to print the actual command it invokes by forcing that command -to fail, e.g. by adding a syntax error to one of the source files of the target. -Once you have the command, you can run it from inside the output directory. - -After changing the toolchain itself, the build setting `rustc_version_string` in -`out/default/args.gn` needs to be changed so that `fx build` or `ninja` will -rebuild all the Rust targets. This can be done in a text editor and the contents -of the string do not matter, as long as it changes from one build to the next. -[build_fuchsia_from_rust_ci.sh] does this for you by hashing the toolchain -directory. - -The Fuchsia website has more detailed documentation of the [build system]. - -#### Other tips and tricks - -When using `build_fuchsia_from_rust_ci.sh` you can comment out the `fx set` -command after the initial run so it won't rerun GN each time. If you do this you -can also comment out the version_string line to save a couple seconds. - -`export NINJA_PERSISTENT_MODE=1` to get faster ninja startup times after the -initial build. - -## Fuchsia target support - -To learn more about Fuchsia target support, see the Fuchsia chapter in [the -rustc book][platform-support]. - -[regressions]: https://gist.github.com/tmandry/7103eba4bd6a6fb0c439b5a90ae355fa -[build-toolchain]: https://fuchsia.dev/fuchsia-src/development/build/rust_toolchain -[build-fuchsia.sh]: https://github.com/rust-lang/rust/blob/221e2741c39515a5de6da42d8c76ee1e132c2c74/src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh -[build_fuchsia_from_rust_ci.sh]: https://cs.opensource.google/fuchsia/fuchsia/+/main:scripts/rust/build_fuchsia_from_rust_ci.sh?q=build_fuchsia_from_rust_ci&ss=fuchsia -[platform-support]: https://doc.rust-lang.org/nightly/rustc/platform-support/fuchsia.html -[GN]: https://gn.googlesource.com/gn/+/main#gn -[Ninja]: https://ninja-build.org/ -[`public_configs`]: https://gn.googlesource.com/gn/+/main/docs/reference.md#var_public_configs -[`//build/config:compiler`]: https://cs.opensource.google/fuchsia/fuchsia/+/main:build/config/BUILD.gn;l=121;drc=c26c473bef93b33117ae417893118907a026fec7 -[build system]: https://fuchsia.dev/fuchsia-src/development/build/build_system - -[^loc]: As of June 2024, Fuchsia had about 2 million lines of first-party Rust -code and a roughly equal amount of third-party code, as counted by tokei -(excluding comments and blanks). diff --git a/src/doc/rustc-dev-guide/src/tests/intro.md b/src/doc/rustc-dev-guide/src/tests/intro.md index ba44a969bf950..7bf30b106b43b 100644 --- a/src/doc/rustc-dev-guide/src/tests/intro.md +++ b/src/doc/rustc-dev-guide/src/tests/intro.md @@ -38,7 +38,7 @@ directory, and `x` will essentially run `cargo test` on that package. Examples: | Command | Description | -| ----------------------------------------- | ------------------------------------- | +|-------------------------------------------|---------------------------------------| | `./x test library/std` | Runs tests on `std` only | | `./x test library/core` | Runs tests on `core` only | | `./x test compiler/rustc_data_structures` | Runs tests on `rustc_data_structures` | @@ -86,7 +86,7 @@ above. Examples: | Command | Description | -| ----------------------- | ------------------------------------------------------------------ | +|-------------------------|--------------------------------------------------------------------| | `./x fmt --check` | Checks formatting and exits with an error if formatting is needed. | | `./x fmt` | Runs rustfmt across the entire codebase. | | `./x test tidy --bless` | First runs rustfmt to format the codebase, then runs tidy checks. | @@ -155,6 +155,10 @@ chapter](ecosystem.md) for more details. A separate infrastructure is used for testing and tracking performance of the compiler. See the [Performance testing chapter](perf.md) for more details. +### Codegen backend testing + +See [Codegen backend testing](./codegen-backend-tests/intro.md). + ## Miscellaneous information There are some other useful testing-related info at [Misc info](misc.md). diff --git a/src/doc/rustc-dev-guide/src/tests/minicore.md b/src/doc/rustc-dev-guide/src/tests/minicore.md index e4853b6d40e35..507b259e0275d 100644 --- a/src/doc/rustc-dev-guide/src/tests/minicore.md +++ b/src/doc/rustc-dev-guide/src/tests/minicore.md @@ -6,25 +6,37 @@ ui/codegen/assembly test suites. It provides `core` stubs for tests that need to build for cross-compiled targets but do not need/want to run. +
    +Please note that [`minicore`] is only intended for `core` items, and explicitly +**not** `std` or `alloc` items because `core` items are applicable to a wider +range of tests. +
    + A test can use [`minicore`] by specifying the `//@ add-core-stubs` directive. Then, mark the test with `#![feature(no_core)]` + `#![no_std]` + `#![no_core]`. Due to Edition 2015 extern prelude rules, you will probably need to declare `minicore` as an extern crate. +## Implied compiler flags + Due to the `no_std` + `no_core` nature of these tests, `//@ add-core-stubs` implies and requires that the test will be built with `-C panic=abort`. -Unwinding panics are not supported. +**Unwinding panics are not supported.** + +Tests will also be built with `-C force-unwind-tables=yes` to preserve CFI +directives in assembly tests. + +TL;DR: `//@ add-core-stubs` implies two compiler flags: + +1. `-C panic=abort` +2. `-C force-unwind-tables=yes` + +## Adding more `core` stubs If you find a `core` item to be missing from the [`minicore`] stub, consider adding it to the test auxiliary if it's likely to be used or is already needed by more than one test. -
    -Please note that [`minicore`] is only intended for `core` items, and explicitly -**not** `std` or `alloc` items because `core` items are applicable to a wider -range of tests. -
    - ## Example codegen test that uses `minicore` ```rust,no_run diff --git a/src/doc/rustc-dev-guide/src/tests/ui.md b/src/doc/rustc-dev-guide/src/tests/ui.md index 1190c2646af10..721d20b65c5a7 100644 --- a/src/doc/rustc-dev-guide/src/tests/ui.md +++ b/src/doc/rustc-dev-guide/src/tests/ui.md @@ -303,8 +303,7 @@ It should be preferred to using `error-pattern`, which is imprecise and non-exha ### `error-pattern` The `error-pattern` [directive](directives.md) can be used for runtime messages, which don't -have a specific span, or for compile time messages if imprecise matching is required due to -multi-line platform specific diagnostics. +have a specific span, or in exceptional cases, for compile time messages. Let's think about this test: @@ -317,8 +316,8 @@ fn main() { } ``` -We want to ensure this shows "index out of bounds" but we cannot use the `ERROR` -annotation since the error doesn't have any span. Then it's time to use the +We want to ensure this shows "index out of bounds", but we cannot use the `ERROR` +annotation since the runtime error doesn't have any span. Then it's time to use the `error-pattern` directive: ```rust,ignore @@ -331,24 +330,53 @@ fn main() { } ``` -But for strict testing, try to use the `ERROR` annotation as much as possible, -including `//~?` annotations for diagnostics without span. -For compile time diagnostics `error-pattern` should very rarely be necessary. +Use of `error-pattern` is not recommended in general. -### Error levels +For strict testing of compile time output, try to use the line annotations `//~` as much as +possible, including `//~?` annotations for diagnostics without spans. -The error levels that you can have are: +If the compile time output is target dependent or too verbose, use directive +`//@ dont-require-annotations: ` to make the line annotation checking +non-exhaustive. +Some of the compiler messages can stay uncovered by annotations in this mode. + +For checking runtime output, `//@ check-run-results` may be preferable. + +Only use `error-pattern` if none of the above works. + +Line annotations `//~` and `error-pattern` are compatible and can be used in the same test. + +### Diagnostic kinds (error levels) + +The diagnostic kinds that you can have are: - `ERROR` -- `WARN` or `WARNING` +- `WARN` (or `WARNING`) - `NOTE` -- `HELP` and `SUGGESTION` +- `HELP` +- `SUGGESTION` +- `RAW` -You are allowed to not include a level, but you should include it at least for -the primary message. - -The `SUGGESTION` level is used for specifying what the expected replacement text +The `SUGGESTION` kind is used for specifying what the expected replacement text should be for a diagnostic suggestion. +The `RAW` kind can be used for matching on lines from non-structured output sometimes emitted +by the compiler instead of or in addition to structured json. + +`ERROR` and `WARN` kinds are required to be exhaustively covered by line annotations +`//~` by default. + +Other kinds only need to be line-annotated if at least one annotation of that kind appears +in the test file. For example, one `//~ NOTE` will also require all other `//~ NOTE`s in the file +to be written out explicitly. + +Use directive `//@ dont-require-annotations` to opt out of exhaustive annotations. +E.g. use `//@ dont-require-annotations: NOTE` to annotate notes selectively. +Avoid using this directive for `ERROR`s and `WARN`ings, unless there's a serious reason, like +target-dependent compiler output. + +Some diagnostics are never required to be line-annotated, regardless of their kind or directives, +for example secondary lines of multiline diagnostics, +or ubiquitous diagnostics like `aborting due to N previous errors`. UI tests use the `-A unused` flag by default to ignore all unused warnings, as unused warnings are usually not the focus of a test. However, simple code @@ -448,6 +476,14 @@ reasons, including: can alert the developer so they know that the associated issue has been fixed and can possibly be closed. +This directive takes comma-separated issue numbers as arguments, or `"unknown"`: + +- `//@ known-bug: #123, #456` (when the issues are on rust-lang/rust) +- `//@ known-bug: rust-lang/chalk#123456` + (allows arbitrary text before the `#`, which is useful when the issue is on another repo) +- `//@ known-bug: unknown` + (when there is no known issue yet; preferrably open one if it does not already exist) + Do not include [error annotations](#error-annotations) in a test with `known-bug`. The test should still include other normal directives and stdout/stderr files. @@ -563,4 +599,27 @@ with "user-facing" Rust alone. Indeed, one could say that this slightly abuses the term "UI" (*user* interface) and turns such UI tests from black-box tests into white-box ones. Use them carefully and sparingly. -[compiler debugging]: ../compiler-debugging.md#rustc_test-attributes +[compiler debugging]: ../compiler-debugging.md#rustc_-test-attributes + +## UI test mode preset lint levels + +By default, test suites under UI test mode (`tests/ui`, `tests/ui-fulldeps`, +but not `tests/rustdoc-ui`) will specify + +- `-A unused` +- `-A internal_features` + +If: + +- The ui test's pass mode is below `run` (i.e. check or build). +- No compare modes are specified. + +Since they can be very noisy in ui tests. + +You can override them with `compile-flags` lint level flags or +in-source lint level attributes as required. + +Note that the `rustfix` version will *not* have `-A unused` passed, +meaning that you may have to `#[allow(unused)]` to suppress `unused` +lints on the rustfix'd file (because we might be testing rustfix +on `unused` lints themselves). diff --git a/src/doc/rustc-dev-guide/src/the-parser.md b/src/doc/rustc-dev-guide/src/the-parser.md index 60a71ae3873ff..601a81e2e485b 100644 --- a/src/doc/rustc-dev-guide/src/the-parser.md +++ b/src/doc/rustc-dev-guide/src/the-parser.md @@ -1,4 +1,4 @@ -# Lexing and Parsing +# Lexing and parsing The very first thing the compiler does is take the program (in UTF-8 Unicode text) and turn it into a data format the compiler can work with more conveniently than strings. @@ -59,7 +59,7 @@ Note that while parsing, we may encounter macro definitions or invocations. We set these aside to be expanded (see [Macro Expansion](./macro-expansion.md)). Expansion itself may require parsing the output of a macro, which may reveal more macros to be expanded, and so on. -## More on Lexical Analysis +## More on lexical analysis Code for lexical analysis is split between two crates: diff --git a/src/doc/rustc-dev-guide/src/traits/caching.md b/src/doc/rustc-dev-guide/src/traits/caching.md index a9f20969b5746..c44722a1d9a33 100644 --- a/src/doc/rustc-dev-guide/src/traits/caching.md +++ b/src/doc/rustc-dev-guide/src/traits/caching.md @@ -61,7 +61,7 @@ to be pretty clearly safe and also still retains a very high hit rate **TODO**: it looks like `pick_candidate_cache` no longer exists. In general, is this section still accurate at all? -[`ParamEnv`]: ../param_env/param_env_summary.html +[`ParamEnv`]: ../typing_parameter_envs.html [`tcx`]: ../ty.html [#18290]: https://github.com/rust-lang/rust/issues/18290 [#22019]: https://github.com/rust-lang/rust/issues/22019 diff --git a/src/doc/rustc-dev-guide/src/traits/resolution.md b/src/doc/rustc-dev-guide/src/traits/resolution.md index 26eb724588600..c62b0593694f1 100644 --- a/src/doc/rustc-dev-guide/src/traits/resolution.md +++ b/src/doc/rustc-dev-guide/src/traits/resolution.md @@ -183,7 +183,7 @@ in that list. If so, it is considered satisfied. More precisely, we want to check whether there is a where-clause obligation that is for the same trait (or some subtrait) and which can match against the obligation. -[parameter environment]: ../param_env/param_env_summary.html +[parameter environment]: ../typing_parameter_envs.html Consider this simple example: diff --git a/src/doc/rustc-dev-guide/src/ty.md b/src/doc/rustc-dev-guide/src/ty.md index b33d540358642..ce6cffec1adb7 100644 --- a/src/doc/rustc-dev-guide/src/ty.md +++ b/src/doc/rustc-dev-guide/src/ty.md @@ -61,11 +61,11 @@ Here is a summary: | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | Describe the *syntax* of a type: what the user wrote (with some desugaring). | Describe the *semantics* of a type: the meaning of what the user wrote. | | Each `rustc_hir::Ty` has its own spans corresponding to the appropriate place in the program. | Doesn’t correspond to a single place in the user’s program. | -| `rustc_hir::Ty` has generics and lifetimes; however, some of those lifetimes are special markers like [`LifetimeName::Implicit`][implicit]. | `ty::Ty` has the full type, including generics and lifetimes, even if the user left them out | +| `rustc_hir::Ty` has generics and lifetimes; however, some of those lifetimes are special markers like [`LifetimeKind::Implicit`][implicit]. | `ty::Ty` has the full type, including generics and lifetimes, even if the user left them out | | `fn foo(x: u32) → u32 { }` - Two `rustc_hir::Ty` representing each usage of `u32`, each has its own `Span`s, and `rustc_hir::Ty` doesn’t tell us that both are the same type | `fn foo(x: u32) → u32 { }` - One `ty::Ty` for all instances of `u32` throughout the program, and `ty::Ty` tells us that both usages of `u32` mean the same type. | -| `fn foo(x: &u32) -> &u32)` - Two `rustc_hir::Ty` again. Lifetimes for the references show up in the `rustc_hir::Ty`s using a special marker, [`LifetimeName::Implicit`][implicit]. | `fn foo(x: &u32) -> &u32)`- A single `ty::Ty`. The `ty::Ty` has the hidden lifetime param. | +| `fn foo(x: &u32) -> &u32)` - Two `rustc_hir::Ty` again. Lifetimes for the references show up in the `rustc_hir::Ty`s using a special marker, [`LifetimeKind::Implicit`][implicit]. | `fn foo(x: &u32) -> &u32)`- A single `ty::Ty`. The `ty::Ty` has the hidden lifetime param. | -[implicit]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/hir/enum.LifetimeName.html#variant.Implicit +[implicit]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/hir/enum.LifetimeKind.html#variant.Implicit **Order** @@ -323,4 +323,4 @@ When looking at the debug output of `Ty` or simply talking about different types - Generic parameters: `{name}/#{index}` e.g. `T/#0`, where `index` corresponds to its position in the list of generic parameters - Inference variables: `?{id}` e.g. `?x`/`?0`, where `id` identifies the inference variable - Variables from binders: `^{binder}_{index}` e.g. `^0_x`/`^0_2`, where `binder` and `index` identify which variable from which binder is being referred to -- Placeholders: `!{id}` or `!{id}_{universe}` e.g. `!x`/`!0`/`!x_2`/`!0_2`, representing some unique type in the specified universe. The universe is often elided when it is `0` \ No newline at end of file +- Placeholders: `!{id}` or `!{id}_{universe}` e.g. `!x`/`!0`/`!x_2`/`!0_2`, representing some unique type in the specified universe. The universe is often elided when it is `0` diff --git a/src/doc/rustc-dev-guide/src/ty_module/binders.md b/src/doc/rustc-dev-guide/src/ty_module/binders.md index defb7cde514a0..71157eca9b11e 100644 --- a/src/doc/rustc-dev-guide/src/ty_module/binders.md +++ b/src/doc/rustc-dev-guide/src/ty_module/binders.md @@ -40,7 +40,7 @@ We did not always explicitly track the set of bound vars introduced by each `Bin ``` Binder( fn(&'^1_0 &'^1 T/#0), - &[BoundVariarbleKind::Region(...)], + &[BoundVariableKind::Region(...)], ) ``` This would cause all kinds of issues as the region `'^1_0` refers to a binder at a higher level than the outermost binder i.e. it is an escaping bound var. The `'^1` region (also writeable as `'^0_1`) is also ill formed as the binder it refers to does not introduce a second parameter. Modern day rustc will ICE when constructing this binder due to both of those regions, in the past we would have simply allowed this to work and then ran into issues in other parts of the codebase. diff --git a/src/doc/rustc-dev-guide/src/typing_parameter_envs.md b/src/doc/rustc-dev-guide/src/typing_parameter_envs.md new file mode 100644 index 0000000000000..757296d1f6529 --- /dev/null +++ b/src/doc/rustc-dev-guide/src/typing_parameter_envs.md @@ -0,0 +1,206 @@ +# Typing/Parameter Environments + + + +## Typing Environments + +When interacting with the type system there are a few variables to consider that can affect the results of trait solving. The the set of in-scope where clauses, and what phase of the compiler type system operations are being performed in (the [`ParamEnv`][penv] and [`TypingMode`][tmode] structs respectively). + +When an environment to perform type system operations in has not yet been created, the [`TypingEnv`][tenv] can be used to bundle all of the external context required into a single type. + +Once a context to perform type system operations in has been created (e.g. an [`ObligationCtxt`][ocx] or [`FnCtxt`][fnctxt]) a `TypingEnv` is typically not stored anywhere as only the `TypingMode` is a property of the whole environment, whereas different `ParamEnv`s can be used on a per-goal basis. + +[ocx]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/struct.ObligationCtxt.html +[fnctxt]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_typeck/fn_ctxt/struct.FnCtxt.html + +## Parameter Environemnts + +### What is a `ParamEnv` + +The [`ParamEnv`][penv] is a list of in-scope where-clauses, it typically corresponds to a specific item's where clauses. Some clauses are not explicitly written but are instead are implicitly added in the [`predicates_of`][predicates_of] query, such as `ConstArgHasType` or (some) implied bounds. + +In most cases `ParamEnv`s are initially created via the [`param_env` query][query] which returns a `ParamEnv` derived from the provided item's where clauses. A `ParamEnv` can also be created with arbitrary sets of clauses that are not derived from a specific item, such as in [`compare_method_predicate_entailment`][method_pred_entailment] where we create a hybrid `ParamEnv` consisting of the impl's where clauses and the trait definition's function's where clauses. + +--- + +If we have a function such as: +```rust +// `foo` would have a `ParamEnv` of: +// `[T: Sized, T: Trait, ::Assoc: Clone]` +fn foo() +where + ::Assoc: Clone, +{} +``` +If we were conceptually inside of `foo` (for example, type-checking or linting it) we would use this `ParamEnv` everywhere that we interact with the type system. This would allow things such as normalization (TODO: write a chapter about normalization and link it), evaluating generic constants, and proving where clauses/goals, to rely on `T` being sized, implementing `Trait`, etc. + +A more concrete example: +```rust +// `foo` would have a `ParamEnv` of: +// `[T: Sized, T: Clone]` +fn foo(a: T) { + // when typechecking `foo` we require all the where clauses on `requires_clone` + // to hold in order for it to be legal to call. This means we have to + // prove `T: Clone`. As we are type checking `foo` we use `foo`'s + // environment when trying to check that `T: Clone` holds. + // + // Trying to prove `T: Clone` with a `ParamEnv` of `[T: Sized, T: Clone]` + // will trivially succeed as bound we want to prove is in our environment. + requires_clone(a); +} +``` + +Or alternatively an example that would not compile: +```rust +// `foo2` would have a `ParamEnv` of: +// `[T: Sized]` +fn foo2(a: T) { + // When typechecking `foo2` we attempt to prove `T: Clone`. + // As we are type checking `foo2` we use `foo2`'s environment + // when trying to prove `T: Clone`. + // + // Trying to prove `T: Clone` with a `ParamEnv` of `[T: Sized]` will + // fail as there is nothing in the environment telling the trait solver + // that `T` implements `Clone` and there exists no user written impl + // that could apply. + requires_clone(a); +} +``` + +[predicates_of]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_analysis/collect/predicates_of/fn.predicates_of.html +[method_pred_entailment]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_analysis/check/compare_impl_item/fn.compare_method_predicate_entailment.html +[query]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TyCtxt.html#method.param_env + +### Acquiring a `ParamEnv` + +Using the wrong [`ParamEnv`][penv] when interacting with the type system can lead to ICEs, illformed programs compiling, or erroing when we shouldn't. See [#82159](https://github.com/rust-lang/rust/pull/82159) and [#82067](https://github.com/rust-lang/rust/pull/82067) as examples of PRs that modified the compiler to use the correct param env and in the process fixed ICEs. + +In the large majority of cases, when a `ParamEnv` is required it either already exists somewhere in scope, or above in the call stack and should be passed down. A non exhaustive list of places where you might find an existing `ParamEnv`: +- During typeck `FnCtxt` has a [`param_env` field][fnctxt_param_env] +- When writing late lints the `LateContext` has a [`param_env` field][latectxt_param_env] +- During well formedness checking the `WfCheckingCtxt` has a [`param_env` field][wfckctxt_param_env] +- The `TypeChecker` used for MIR Typeck has a [`param_env` field][mirtypeck_param_env] +- In the next-gen trait solver all `Goal`s have a [`param_env` field][goal_param_env] specifying what environment to prove the goal in +- When editing an existing [`TypeRelation`][typerelation] if it implements [`PredicateEmittingRelation`][predicate_emitting_relation] then a [`param_env` method][typerelation_param_env] will be available. + +If you aren't sure if there's a `ParamEnv` in scope somewhere that can be used it can be worth opening a thread in the [`#t-compiler/help`][compiler_help] zulip stream where someone may be able to point out where a `ParamEnv` can be acquired from. + +Manually constructing a `ParamEnv` is typically only needed at the start of some kind of top level analysis (e.g. hir typeck or borrow checking). In such cases there are three ways it can be done: +- Calling the [`tcx.param_env(def_id)` query][param_env_query] which returns the environment associated with a given definition. +- Creating an empty environment with [`ParamEnv::empty`][env_empty]. +- Using [`ParamEnv::new`][param_env_new] to construct an env with an arbitrary set of where clauses. Then calling [`traits::normalize_param_env_or_error`][normalize_env_or_error] to handle normalizing and elaborating all the where clauses in the env. + +Using the `param_env` query is by far the most common way to construct a `ParamEnv` as most of the time the compiler is performing an analysis as part of some specific definition. + +Creating an empty environment with `ParamEnv::empty` is typically only done either in codegen (indirectly via [`TypingEnv::fully_monomorphized`][tenv_mono]), or as part of some analysis that do not expect to ever encounter generic parameters (e.g. various parts of coherence/orphan check). + +Creating an env from an arbitrary set of where clauses is usually unnecessary and should only be done if the environment you need does not correspond to an actual item in the source code (e.g. [`compare_method_predicate_entailment`][method_pred_entailment]). + +[param_env_new]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html#method.new +[normalize_env_or_error]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/fn.normalize_param_env_or_error.html +[fnctxt_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_typeck/fn_ctxt/struct.FnCtxt.html#structfield.param_env +[latectxt_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/context/struct.LateContext.html#structfield.param_env +[wfckctxt_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_analysis/check/wfcheck/struct.WfCheckingCtxt.html#structfield.param_env +[goal_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_infer/infer/canonical/ir/solve/struct.Goal.html#structfield.param_env +[typerelation_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_infer/infer/trait.PredicateEmittingRelation.html#tymethod.param_env +[typerelation]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/relate/trait.TypeRelation.html +[mirtypeck_param_env]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_borrowck/type_check/struct.TypeChecker.html#structfield.param_env +[env_empty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html#method.empty +[param_env_query]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_typeck/fn_ctxt/struct.FnCtxt.html#structfield.param_env +[method_pred_entailment]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir_analysis/check/compare_impl_item/fn.compare_method_predicate_entailment.html +[predicate_emitting_relation]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/relate/combine/trait.PredicateEmittingRelation.html +[tenv_mono]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypingEnv.html#method.fully_monomorphized +[compiler_help]: https://rust-lang.zulipchat.com/#narrow/channel/182449-t-compiler.2Fhelp + +### How are `ParamEnv`s constructed + +Creating a [`ParamEnv`][pe] is more complicated than simply using the list of where clauses defined on an item as written by the user. We need to both elaborate supertraits into the env and fully normalize all aliases. This logic is handled by [`traits::normalize_param_env_or_error`][normalize_env_or_error] (even though it does not mention anything about elaboration). + +#### Elaborating supertraits + +When we have a function such as `fn foo()` we would like to be able to prove `T: Clone` inside of the function as the `Copy` trait has a `Clone` supertrait. Constructing a `ParamEnv` looks at all of the trait bounds in the env and explicitly adds new where clauses to the `ParamEnv` for any supertraits found on the traits. + +A concrete example would be the following function: +```rust +trait Trait: SuperTrait {} +trait SuperTrait: SuperSuperTrait {} + +// `bar`'s unelaborated `ParamEnv` would be: +// `[T: Sized, T: Copy, T: Trait]` +fn bar(a: T) { + requires_impl(a); +} + +fn requires_impl(a: T) {} +``` + +If we did not elaborate the env then the `requires_impl` call would fail to typecheck as we would not be able to prove `T: Clone` or `T: SuperSuperTrait`. In practice we elaborate the env which means that `bar`'s `ParamEnv` is actually: +`[T: Sized, T: Copy, T: Clone, T: Trait, T: SuperTrait, T: SuperSuperTrait]` +This allows us to prove `T: Clone` and `T: SuperSuperTrait` when type checking `bar`. + +The `Clone` trait has a `Sized` supertrait however we do not end up with two `T: Sized` bounds in the env (one for the supertrait and one for the implicitly added `T: Sized` bound) as the elaboration process (implemented via [`util::elaborate`][elaborate]) deduplicates where clauses. + +A side effect of this is that even if no actual elaboration of supertraits takes place, the existing where clauses in the env are _also_ deduplicated. See the following example: +```rust +trait Trait {} +// The unelaborated `ParamEnv` would be: +// `[T: Sized, T: Trait, T: Trait]` +// but after elaboration it would be: +// `[T: Sized, T: Trait]` +fn foo() {} +``` + +The [next-gen trait solver][next-gen-solver] also requires this elaboration to take place. + +[elaborate]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_infer/traits/util/fn.elaborate.html +[next-gen-solver]: ./solve/trait-solving.md + +#### Normalizing all bounds + +In the old trait solver the where clauses stored in `ParamEnv` are required to be fully normalized as otherwise the trait solver will not function correctly. A concrete example of needing to normalize the `ParamEnv` is the following: +```rust +trait Trait { + type Assoc; +} + +trait Other { + type Bar; +} + +impl Other for T { + type Bar = u32; +} + +// `foo`'s unnormalized `ParamEnv` would be: +// `[T: Sized, U: Sized, U: Trait]` +fn foo(a: U) +where + U: Trait<::Bar>, +{ + requires_impl(a); +} + +fn requires_impl>(_: U) {} +``` + +As humans we can tell that `::Bar` is equal to `u32` so the trait bound on `U` is equivalent to `U: Trait`. In practice trying to prove `U: Trait` in the old solver in this environment would fail as it is unable to determine that `::Bar` is equal to `u32`. + +To work around this we normalize `ParamEnv`'s after constructing them so that `foo`'s `ParamEnv` is actually: `[T: Sized, U: Sized, U: Trait]` which means the trait solver is now able to use the `U: Trait` in the `ParamEnv` to determine that the trait bound `U: Trait` holds. + +This workaround does not work in all cases as normalizing associated types requires a `ParamEnv` which introduces a bootstrapping problem. We need a normalized `ParamEnv` in order for normalization to give correct results, but we need to normalize to get that `ParamEnv`. Currently we normalize the `ParamEnv` once using the unnormalized param env and it tends to give okay results in practice even though there are some examples where this breaks ([example]). + +In the next-gen trait solver the requirement for all where clauses in the `ParamEnv` to be fully normalized is not present and so we do not normalize when constructing `ParamEnv`s. + +[example]: https://play.rust-lang.org/?version=stable&mode=debug&edition=2021&gist=e6933265ea3e84eaa47019465739992c +[pe]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html +[normalize_env_or_error]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/fn.normalize_param_env_or_error.html + +## Typing Modes + +Depending on what context we are performing type system operations in, different behaviour may be required. For example during coherence there are stronger requirements about when we can consider goals to not hold or when we can consider types to be unequal. + +Tracking which "phase" of the compiler type system operations are being performed in is done by the [`TypingMode`][tenv] enum. The documentation on the `TypingMode` enum is quite good so instead of repeating it here verbatim we would recommend reading the API documentation directly. + +[penv]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.ParamEnv.html +[tenv]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_type_ir/infer_ctxt/enum.TypingMode.html +[tmode]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/type.TypingMode.html diff --git a/src/doc/rustc-dev-guide/src/unsafety-checking.md b/src/doc/rustc-dev-guide/src/unsafety-checking.md index 1130878944d2a..fbc19d8961c59 100644 --- a/src/doc/rustc-dev-guide/src/unsafety-checking.md +++ b/src/doc/rustc-dev-guide/src/unsafety-checking.md @@ -1,4 +1,4 @@ -# Unsafety Checking +# Unsafety checking Certain expressions in Rust can violate memory safety and as such need to be inside an `unsafe` block or function. The compiler will also warn if an unsafe diff --git a/src/doc/rustc-dev-guide/src/walkthrough.md b/src/doc/rustc-dev-guide/src/walkthrough.md index 6e07ceb7d7379..48b3f8bb15d3b 100644 --- a/src/doc/rustc-dev-guide/src/walkthrough.md +++ b/src/doc/rustc-dev-guide/src/walkthrough.md @@ -221,7 +221,7 @@ There are a couple of things that may happen for some PRs during the review proc some merge conflicts with other PRs that happen to get merged first. You should fix these merge conflicts using the normal git procedures. -[crater]: ./tests/intro.html#crater +[crater]: ./tests/crater.html If you are not doing a new feature or something like that (e.g. if you are fixing a bug), then that's it! Thanks for your contribution :) diff --git a/src/doc/rustc-dev-guide/triagebot.toml b/src/doc/rustc-dev-guide/triagebot.toml index 12aa0b7b8ff15..53fa72469fd2d 100644 --- a/src/doc/rustc-dev-guide/triagebot.toml +++ b/src/doc/rustc-dev-guide/triagebot.toml @@ -7,5 +7,12 @@ allow-unauthenticated = [ "blocked", ] +[no-mentions] + +[canonicalize-issue-links] + # Automatically close and reopen PRs made by bots to run CI on them [bot-pull-requests] + +[behind-upstream] +days-threshold = 7 \ No newline at end of file diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md index d08e0bd1edf2a..a3939e5a5c451 100644 --- a/src/doc/rustc/src/SUMMARY.md +++ b/src/doc/rustc/src/SUMMARY.md @@ -58,6 +58,7 @@ - [thumbv7m-none-eabi](./platform-support/thumbv7m-none-eabi.md) - [thumbv8m.base-none-eabi](./platform-support/thumbv8m.base-none-eabi.md) - [thumbv8m.main-none-eabi\*](./platform-support/thumbv8m.main-none-eabi.md) + - [armv5te-unknown-linux-gnueabi](platform-support/armv5te-unknown-linux-gnueabi.md) - [armv6k-nintendo-3ds](platform-support/armv6k-nintendo-3ds.md) - [armv7-rtems-eabihf](platform-support/armv7-rtems-eabihf.md) - [armv7-sony-vita-newlibeabihf](platform-support/armv7-sony-vita-newlibeabihf.md) @@ -78,6 +79,7 @@ - [illumos](platform-support/illumos.md) - [loongarch\*-unknown-linux-\*](platform-support/loongarch-linux.md) - [loongarch\*-unknown-none\*](platform-support/loongarch-none.md) + - [\*-lynxos178-\*](platform-support/lynxos178.md) - [m68k-unknown-linux-gnu](platform-support/m68k-unknown-linux-gnu.md) - [m68k-unknown-none-elf](platform-support/m68k-unknown-none-elf.md) - [mips64-openwrt-linux-musl](platform-support/mips64-openwrt-linux-musl.md) @@ -102,8 +104,7 @@ - [s390x-unknown-linux-gnu](platform-support/s390x-unknown-linux-gnu.md) - [s390x-unknown-linux-musl](platform-support/s390x-unknown-linux-musl.md) - [sparc-unknown-none-elf](./platform-support/sparc-unknown-none-elf.md) - - [sparcv9-sun-solaris](platform-support/solaris.md) - - [\*-pc-windows-gnullvm](platform-support/pc-windows-gnullvm.md) + - [solaris](platform-support/solaris.md) - [\*-nto-qnx-\*](platform-support/nto-qnx.md) - [\*-unikraft-linux-musl](platform-support/unikraft-linux-musl.md) - [\*-unknown-hermit](platform-support/hermit.md) @@ -122,11 +123,12 @@ - [wasm32-unknown-unknown](platform-support/wasm32-unknown-unknown.md) - [wasm32v1-none](platform-support/wasm32v1-none.md) - [wasm64-unknown-unknown](platform-support/wasm64-unknown-unknown.md) + - [windows-gnu](platform-support/windows-gnu.md) + - [windows-gnullvm](platform-support/windows-gnullvm.md) - [\*-win7-windows-gnu](platform-support/win7-windows-gnu.md) - [\*-win7-windows-msvc](platform-support/win7-windows-msvc.md) - [x86_64-fortanix-unknown-sgx](platform-support/x86_64-fortanix-unknown-sgx.md) - [x86_64-pc-cygwin](platform-support/x86_64-pc-cygwin.md) - - [x86_64-pc-solaris](platform-support/solaris.md) - [x86_64-unknown-linux-none.md](platform-support/x86_64-unknown-linux-none.md) - [x86_64-unknown-none](platform-support/x86_64-unknown-none.md) - [xtensa-\*-none-elf](platform-support/xtensa.md) diff --git a/src/doc/rustc/src/codegen-options/index.md b/src/doc/rustc/src/codegen-options/index.md index 8c1769a8c7722..a3b70e7f97711 100644 --- a/src/doc/rustc/src/codegen-options/index.md +++ b/src/doc/rustc/src/codegen-options/index.md @@ -110,6 +110,19 @@ It takes a path to [the dlltool executable](https://sourceware.org/binutils/docs If this flag is not specified, a dlltool executable will be inferred based on the host environment and target. +## dwarf-version + +This option controls the version of DWARF that the compiler emits, on platforms +that use DWARF to encode debug information. It takes one of the following +values: + +* `2`: DWARF version 2 (the default on certain platforms, like Android). +* `3`: DWARF version 3 (the default on certain platforms, like AIX). +* `4`: DWARF version 4 (the default on most platforms, like Linux & macOS). +* `5`: DWARF version 5. + +DWARF version 1 is not supported. + ## embed-bitcode This flag controls whether or not the compiler embeds LLVM bitcode into object diff --git a/src/doc/rustc/src/exploit-mitigations.md b/src/doc/rustc/src/exploit-mitigations.md index d4e2fc52e973f..f8bafe032140c 100644 --- a/src/doc/rustc/src/exploit-mitigations.md +++ b/src/doc/rustc/src/exploit-mitigations.md @@ -42,8 +42,7 @@ understood within a given context. This section documents the exploit mitigations applicable to the Rust compiler when building programs for the Linux operating system on the AMD64 architecture -and equivalent.1 All examples in this section were built using +and equivalent.[^all-targets] All examples in this section were built using nightly builds of the Rust compiler on Debian testing. The Rust Programming Language currently has no specification. The Rust compiler @@ -67,11 +66,8 @@ equivalent. | Forward-edge control flow protection | Yes | Nightly | | Backward-edge control flow protection (e.g., shadow and safe stack) | Yes | Nightly | -1\. See - -for a list of targets and their default options. - +[^all-targets]: See + for a list of targets and their default options. ### Position-independent executable @@ -141,18 +137,15 @@ Integer overflow checks are enabled when debug assertions are enabled (see Fig. 3), and disabled when debug assertions are disabled (see Fig. 4). To enable integer overflow checks independently, use the option to control integer overflow checks, scoped attributes, or explicit checking methods such as -`checked_add`2. +`checked_add`[^checked-methods]. It is recommended that explicit wrapping methods such as `wrapping_add` be used when wrapping semantics are intended, and that explicit checking and wrapping methods always be used when using Unsafe Rust. -2\. See [the `u32` docs](../std/primitive.u32.html) for more -information on the checked, overflowing, saturating, and wrapping methods -(using u32 as an example). - +[^checked-methods]: See [the `u32` docs](../std/primitive.u32.html) for more + information on the checked, overflowing, saturating, and wrapping methods + (using u32 as an example). ### Non-executable memory regions @@ -180,17 +173,14 @@ binary. The presence of an element of type `PT_GNU_STACK` in the program header table with the `PF_X` (i.e., executable) flag unset indicates non-executable memory -regions3 are enabled for a given binary (see Fig. 5). +regions[^other-regions] are enabled for a given binary (see Fig. 5). Conversely, the presence of an element of type `PT_GNU_STACK` in the program header table with the `PF_X` flag set or the absence of an element of type `PT_GNU_STACK` in the program header table indicates non-executable memory regions are not enabled for a given binary. -3\. See the Appendix section for more information on why it -affects other memory regions besides the stack. - +[^other-regions]: See the [Appendix section](#appendix) for more information + on why it affects other memory regions besides the stack. ### Stack clashing protection @@ -270,8 +260,7 @@ $ readelf -d target/release/hello-rust | grep BIND_NOW Fig. 10. Checking if immediate binding is enabled for a given binary. The presence of an element with the `DT_BIND_NOW` tag and the `DF_BIND_NOW` -flag4 in the dynamic section indicates immediate binding +flag[^bind-now] in the dynamic section indicates immediate binding is enabled for a given binary (see Fig. 10). Conversely, the absence of an element with the `DT_BIND_NOW` tag and the `DF_BIND_NOW` flag in the dynamic section indicates immediate binding is not enabled for a given binary. @@ -281,9 +270,7 @@ table and of an element with the `DT_BIND_NOW` tag and the `DF_BIND_NOW` flag in the dynamic section indicates full RELRO is enabled for a given binary (see Figs. 9–10). -4\. And the `DF_1_NOW` flag for some link editors. - +[^bind-now]: And the `DF_1_NOW` flag for some link editors. ### Heap corruption protection @@ -303,8 +290,7 @@ Rust’s default allocator has historically been [jemalloc](http://jemalloc.net/), and it has long been the cause of issues and the subject of much discussion[32]–[38]. Consequently, it has been removed as the default allocator in favor of the operating system’s standard C library -default allocator5 since version 1.32.0 (2019-01-17)[39]. +default allocator[^linx-allocator] since version 1.32.0 (2019-01-17)[39]. ```rust,no_run fn main() { @@ -343,11 +329,9 @@ Fig. 13. Build and execution of hello-rust-heap with debug assertions disabled Heap corruption checks are performed when using the default allocator (i.e., the GNU Allocator) (see Figs. 12–13). -5\. Linux's standard C library default allocator is the GNU -Allocator, which is derived from ptmalloc (pthreads malloc) by Wolfram Gloger, -which in turn is derived from dlmalloc (Doug Lea malloc) by Doug Lea. - +[^linx-allocator]: Linux's standard C library default allocator is the GNU + Allocator, which is derived from ptmalloc (pthreads malloc) by Wolfram Gloger, + which in turn is derived from dlmalloc (Doug Lea malloc) by Doug Lea. ### Stack smashing protection @@ -385,8 +369,7 @@ commercially available [grsecurity/PaX Reuse Attack Protector (RAP)](https://grsecurity.net/rap_faq). The Rust compiler supports forward-edge control flow protection on nightly -builds[41]-[42] 6. +builds[41]-[42] [^win-cfg]. ```text $ readelf -s -W target/release/hello-rust | grep "\.cfi" @@ -401,10 +384,8 @@ of symbols suffixed with ".cfi" or the `__cfi_init` symbol (and references to `__cfi_check`) indicates that LLVM CFI is not enabled for a given binary (see Fig. 15). -6\. It also supports Control Flow Guard (CFG) on Windows (see -). - +[^win-cfg]: It also supports Control Flow Guard (CFG) on Windows (see + ). ### Backward-edge control flow protection @@ -431,8 +412,7 @@ Newer processors provide hardware assistance for backward-edge control flow protection, such as ARM Pointer Authentication, and Intel Shadow Stack as part of Intel CET. -The Rust compiler supports shadow stack for the AArch64 architecture7on +The Rust compiler supports shadow stack for the AArch64 architecture[^amd64-shadow] on nightly builds[43]-[44], and also supports safe stack on nightly builds[45]-[46]. @@ -447,9 +427,8 @@ enabled for a given binary. Conversely, the absence of the `__safestack_init` symbol indicates that LLVM SafeStack is not enabled for a given binary (see Fig. 16). -7\. The shadow stack implementation for the AMD64 architecture -and equivalent in LLVM was removed due to performance and security issues. +[^amd64-shadow]: The shadow stack implementation for the AMD64 architecture + and equivalent in LLVM was removed due to performance and security issues. ## Appendix diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md index bc97568f85c97..60002a5f9e5da 100644 --- a/src/doc/rustc/src/platform-support.md +++ b/src/doc/rustc/src/platform-support.md @@ -34,16 +34,17 @@ target | notes -------|------- [`aarch64-apple-darwin`](platform-support/apple-darwin.md) | ARM64 macOS (11.0+, Big Sur+) `aarch64-unknown-linux-gnu` | ARM64 Linux (kernel 4.1, glibc 2.17+) -`i686-pc-windows-gnu` | 32-bit MinGW (Windows 10+, Windows Server 2016+, Pentium 4) [^x86_32-floats-return-ABI] -`i686-pc-windows-msvc` | 32-bit MSVC (Windows 10+, Windows Server 2016+, Pentium 4) [^x86_32-floats-return-ABI] +`i686-pc-windows-msvc` | 32-bit MSVC (Windows 10+, Windows Server 2016+, Pentium 4) [^x86_32-floats-return-ABI] [^win32-msvc-alignment] `i686-unknown-linux-gnu` | 32-bit Linux (kernel 3.2+, glibc 2.17+, Pentium 4) [^x86_32-floats-return-ABI] [`x86_64-apple-darwin`](platform-support/apple-darwin.md) | 64-bit macOS (10.12+, Sierra+) -`x86_64-pc-windows-gnu` | 64-bit MinGW (Windows 10+, Windows Server 2016+) +[`x86_64-pc-windows-gnu`](platform-support/windows-gnu.md) | 64-bit MinGW (Windows 10+, Windows Server 2016+) `x86_64-pc-windows-msvc` | 64-bit MSVC (Windows 10+, Windows Server 2016+) `x86_64-unknown-linux-gnu` | 64-bit Linux (kernel 3.2+, glibc 2.17+) [^x86_32-floats-return-ABI]: Due to limitations of the C ABI, floating-point support on `i686` targets is non-compliant: floating-point return values are passed via an x87 register, so NaN payload bits can be lost. Functions with the default Rust ABI are not affected. See [issue #115567][x86-32-float-return-issue]. +[^win32-msvc-alignment]: Due to non-standard behavior of MSVC, native C code on this target can cause types with an alignment of more than 4 bytes to be incorrectly aligned to only 4 bytes (this affects, e.g., `u64` and `i64`). Rust applies some mitigations to reduce the impact of this issue, but this can still cause unsoundness due to unsafe code that (correctly) assumes that references are always properly aligned. See [issue #112480](https://github.com/rust-lang/rust/issues/112480). + [77071]: https://github.com/rust-lang/rust/issues/77071 [x86-32-float-return-issue]: https://github.com/rust-lang/rust/issues/115567 @@ -96,6 +97,7 @@ target | notes [`armv7-unknown-linux-ohos`](platform-support/openharmony.md) | Armv7-A OpenHarmony [`loongarch64-unknown-linux-gnu`](platform-support/loongarch-linux.md) | LoongArch64 Linux, LP64D ABI (kernel 5.19, glibc 2.36) [`loongarch64-unknown-linux-musl`](platform-support/loongarch-linux.md) | LoongArch64 Linux, LP64D ABI (kernel 5.19, musl 1.2.5) +[`i686-pc-windows-gnu`](platform-support/windows-gnu.md) | 32-bit MinGW (Windows 10+, Windows Server 2016+, Pentium 4) [^x86_32-floats-return-ABI] [^win32-msvc-alignment] `powerpc-unknown-linux-gnu` | PowerPC Linux (kernel 3.2, glibc 2.17) `powerpc64-unknown-linux-gnu` | PPC64 Linux (kernel 3.2, glibc 2.17) [`powerpc64le-unknown-linux-gnu`](platform-support/powerpc64le-unknown-linux-gnu.md) | PPC64LE Linux (kernel 3.10, glibc 2.17) @@ -143,7 +145,7 @@ target | std | notes [`aarch64-apple-ios-macabi`](platform-support/apple-ios-macabi.md) | ✓ | Mac Catalyst on ARM64 [`aarch64-apple-ios-sim`](platform-support/apple-ios.md) | ✓ | Apple iOS Simulator on ARM64 [`aarch64-linux-android`](platform-support/android.md) | ✓ | ARM64 Android -[`aarch64-pc-windows-gnullvm`](platform-support/pc-windows-gnullvm.md) | ✓ | ARM64 MinGW (Windows 10+), LLVM ABI +[`aarch64-pc-windows-gnullvm`](platform-support/windows-gnullvm.md) | ✓ | ARM64 MinGW (Windows 10+), LLVM ABI [`aarch64-unknown-fuchsia`](platform-support/fuchsia.md) | ✓ | ARM64 Fuchsia `aarch64-unknown-none` | * | Bare ARM64, hardfloat `aarch64-unknown-none-softfloat` | * | Bare ARM64, softfloat @@ -154,7 +156,7 @@ target | std | notes [`arm64ec-pc-windows-msvc`](platform-support/arm64ec-pc-windows-msvc.md) | ✓ | Arm64EC Windows MSVC [`armebv7r-none-eabi`](platform-support/armv7r-none-eabi.md) | * | Bare Armv7-R, Big Endian [`armebv7r-none-eabihf`](platform-support/armv7r-none-eabi.md) | * | Bare Armv7-R, Big Endian, hardfloat -`armv5te-unknown-linux-gnueabi` | ✓ | Armv5TE Linux (kernel 4.4, glibc 2.23) +[`armv5te-unknown-linux-gnueabi`](platform-support/armv5te-unknown-linux-gnueabi.md) | ✓ | Armv5TE Linux (kernel 4.4, glibc 2.23) `armv5te-unknown-linux-musleabi` | ✓ | Armv5TE Linux with musl 1.2.3 [`armv7-linux-androideabi`](platform-support/android.md) | ✓ | Armv7-A Android `armv7-unknown-linux-gnueabi` | ✓ | Armv7-A Linux (kernel 4.15, glibc 2.27) @@ -166,10 +168,10 @@ target | std | notes `i586-unknown-linux-gnu` | ✓ | 32-bit Linux (kernel 3.2, glibc 2.17, original Pentium) [^x86_32-floats-x87] `i586-unknown-linux-musl` | ✓ | 32-bit Linux (musl 1.2.3, original Pentium) [^x86_32-floats-x87] [`i686-linux-android`](platform-support/android.md) | ✓ | 32-bit x86 Android ([Pentium 4 plus various extensions](https://developer.android.com/ndk/guides/abis.html#x86)) [^x86_32-floats-return-ABI] -[`i686-pc-windows-gnullvm`](platform-support/pc-windows-gnullvm.md) | ✓ | 32-bit x86 MinGW (Windows 10+, Pentium 4), LLVM ABI [^x86_32-floats-return-ABI] +[`i686-pc-windows-gnullvm`](platform-support/windows-gnullvm.md) | ✓ | 32-bit x86 MinGW (Windows 10+, Pentium 4), LLVM ABI [^x86_32-floats-return-ABI] [`i686-unknown-freebsd`](platform-support/freebsd.md) | ✓ | 32-bit x86 FreeBSD (Pentium 4) [^x86_32-floats-return-ABI] `i686-unknown-linux-musl` | ✓ | 32-bit Linux with musl 1.2.3 (Pentium 4) [^x86_32-floats-return-ABI] -[`i686-unknown-uefi`](platform-support/unknown-uefi.md) | ? | 32-bit UEFI (Pentium 4, softfloat) +[`i686-unknown-uefi`](platform-support/unknown-uefi.md) | ? | 32-bit UEFI (Pentium 4, softfloat) [^win32-msvc-alignment] [`loongarch64-unknown-none`](platform-support/loongarch-none.md) | * | LoongArch64 Bare-metal (LP64D ABI) [`loongarch64-unknown-none-softfloat`](platform-support/loongarch-none.md) | * | LoongArch64 Bare-metal (LP64S ABI) [`nvptx64-nvidia-cuda`](platform-support/nvptx64-nvidia-cuda.md) | * | --emit=asm generates PTX code that [runs on NVIDIA GPUs] @@ -202,7 +204,7 @@ target | std | notes [`x86_64-fortanix-unknown-sgx`](platform-support/x86_64-fortanix-unknown-sgx.md) | ✓ | [Fortanix ABI] for 64-bit Intel SGX [`x86_64-linux-android`](platform-support/android.md) | ✓ | 64-bit x86 Android [`x86_64-pc-solaris`](platform-support/solaris.md) | ✓ | 64-bit x86 Solaris 11.4 -[`x86_64-pc-windows-gnullvm`](platform-support/pc-windows-gnullvm.md) | ✓ | 64-bit x86 MinGW (Windows 10+), LLVM ABI +[`x86_64-pc-windows-gnullvm`](platform-support/windows-gnullvm.md) | ✓ | 64-bit x86 MinGW (Windows 10+), LLVM ABI [`x86_64-unknown-fuchsia`](platform-support/fuchsia.md) | ✓ | 64-bit x86 Fuchsia `x86_64-unknown-linux-gnux32` | ✓ | 64-bit Linux (x32 ABI) (kernel 4.15, glibc 2.27) [`x86_64-unknown-none`](platform-support/x86_64-unknown-none.md) | * | Freestanding/bare-metal x86_64, softfloat @@ -317,9 +319,9 @@ target | std | host | notes [`i686-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | NetBSD/i386 (Pentium 4) [^x86_32-floats-return-ABI] [`i686-unknown-openbsd`](platform-support/openbsd.md) | ✓ | ✓ | 32-bit OpenBSD (Pentium 4) [^x86_32-floats-return-ABI] `i686-uwp-windows-gnu` | ✓ | | [^x86_32-floats-return-ABI] -[`i686-uwp-windows-msvc`](platform-support/uwp-windows-msvc.md) | ✓ | | [^x86_32-floats-return-ABI] +[`i686-uwp-windows-msvc`](platform-support/uwp-windows-msvc.md) | ✓ | | [^x86_32-floats-return-ABI] [^win32-msvc-alignment] [`i686-win7-windows-gnu`](platform-support/win7-windows-gnu.md) | ✓ | | 32-bit Windows 7 support [^x86_32-floats-return-ABI] -[`i686-win7-windows-msvc`](platform-support/win7-windows-msvc.md) | ✓ | | 32-bit Windows 7 support [^x86_32-floats-return-ABI] +[`i686-win7-windows-msvc`](platform-support/win7-windows-msvc.md) | ✓ | | 32-bit Windows 7 support [^x86_32-floats-return-ABI] [^win32-msvc-alignment] [`i686-wrs-vxworks`](platform-support/vxworks.md) | ✓ | | [^x86_32-floats-return-ABI] [`loongarch64-unknown-linux-ohos`](platform-support/openharmony.md) | ✓ | | LoongArch64 OpenHarmony [`m68k-unknown-linux-gnu`](platform-support/m68k-unknown-linux-gnu.md) | ? | | Motorola 680x0 Linux @@ -407,6 +409,7 @@ target | std | host | notes [`wasm32-wali-linux-musl`](platform-support/wasm32-wali-linux.md) | ? | | WebAssembly with [WALI](https://github.com/arjunr2/WALI) [`x86_64-apple-tvos`](platform-support/apple-tvos.md) | ✓ | | x86 64-bit tvOS [`x86_64-apple-watchos-sim`](platform-support/apple-watchos.md) | ✓ | | x86 64-bit Apple WatchOS simulator +[`x86_64-lynx-lynxos178`](platform-support/lynxos178.md) | | | x86_64 LynxOS-178 [`x86_64-pc-cygwin`](platform-support/x86_64-pc-cygwin.md) | ✓ | | 64-bit x86 Cygwin | [`x86_64-pc-nto-qnx710`](platform-support/nto-qnx.md) | ✓ | | x86 64-bit QNX Neutrino 7.1 RTOS with default network stack (io-pkt) | [`x86_64-pc-nto-qnx710_iosock`](platform-support/nto-qnx.md) | ✓ | | x86 64-bit QNX Neutrino 7.1 RTOS with new network stack (io-sock) | diff --git a/src/doc/rustc/src/platform-support/TEMPLATE.md b/src/doc/rustc/src/platform-support/TEMPLATE.md index 96c79973a1634..f523237ab06ff 100644 --- a/src/doc/rustc/src/platform-support/TEMPLATE.md +++ b/src/doc/rustc/src/platform-support/TEMPLATE.md @@ -6,7 +6,8 @@ One-sentence description of the target (e.g. CPU, OS) ## Target maintainers -- Some Person, https://github.com/... +[@Ghost](https://github.com/Ghost) +[@octocat](https://github.com/octocat) ## Requirements diff --git a/src/doc/rustc/src/platform-support/aarch64-nintendo-switch-freestanding.md b/src/doc/rustc/src/platform-support/aarch64-nintendo-switch-freestanding.md index 308e1fe2f92a0..6951d7f23f8d8 100644 --- a/src/doc/rustc/src/platform-support/aarch64-nintendo-switch-freestanding.md +++ b/src/doc/rustc/src/platform-support/aarch64-nintendo-switch-freestanding.md @@ -4,10 +4,10 @@ Nintendo Switch with pure-Rust toolchain. -## Designated Developers +## Target Maintainers -* [@leo60228](https://github.com/leo60228) -* [@jam1garner](https://github.com/jam1garner) +[@leo60228](https://github.com/leo60228) +[@jam1garner](https://github.com/jam1garner) ## Requirements diff --git a/src/doc/rustc/src/platform-support/aarch64-unknown-teeos.md b/src/doc/rustc/src/platform-support/aarch64-unknown-teeos.md index e2f2379ec440d..be11d0cdd1037 100644 --- a/src/doc/rustc/src/platform-support/aarch64-unknown-teeos.md +++ b/src/doc/rustc/src/platform-support/aarch64-unknown-teeos.md @@ -20,8 +20,8 @@ TEEOS is open source in progress. [MORE about](https://gitee.com/opentrustee-gro ## Target maintainers -- Petrochenkov Vadim -- Sword-Destiny +[@petrochenkov](https://github.com/petrochenkov) +[@Sword-Destiny](https://github.com/Sword-Destiny) ## Setup We use OpenHarmony SDK for TEEOS. diff --git a/src/doc/rustc/src/platform-support/aix.md b/src/doc/rustc/src/platform-support/aix.md index 5a198062b9529..3002a5c4b2cd9 100644 --- a/src/doc/rustc/src/platform-support/aix.md +++ b/src/doc/rustc/src/platform-support/aix.md @@ -6,8 +6,8 @@ Rust for AIX operating system, currently only 64-bit PowerPC is supported. ## Target maintainers -- David Tenty `daltenty@ibm.com`, https://github.com/daltenty -- Chris Cambly, `ccambly@ca.ibm.com`, https://github.com/gilamn5tr +[@daltenty](https://github.com/daltenty) +[@gilamn5tr](https://github.com/gilamn5tr) ## Requirements diff --git a/src/doc/rustc/src/platform-support/amdgcn-amd-amdhsa.md b/src/doc/rustc/src/platform-support/amdgcn-amd-amdhsa.md index 0b2f798e66de1..16152dd2dad51 100644 --- a/src/doc/rustc/src/platform-support/amdgcn-amd-amdhsa.md +++ b/src/doc/rustc/src/platform-support/amdgcn-amd-amdhsa.md @@ -6,7 +6,7 @@ AMD GPU target for compute/HSA (Heterogeneous System Architecture). ## Target maintainers -- [@Flakebi](https://github.com/Flakebi) +[@Flakebi](https://github.com/Flakebi) ## Requirements diff --git a/src/doc/rustc/src/platform-support/android.md b/src/doc/rustc/src/platform-support/android.md index 54e7ddca32aab..a54288f8f050e 100644 --- a/src/doc/rustc/src/platform-support/android.md +++ b/src/doc/rustc/src/platform-support/android.md @@ -8,9 +8,9 @@ ## Target maintainers -- Chris Wailes ([@chriswailes](https://github.com/chriswailes)) -- Matthew Maurer ([@maurer](https://github.com/maurer)) -- Martin Geisler ([@mgeisler](https://github.com/mgeisler)) +[@chriswailes](https://github.com/chriswailes) +[@maurer](https://github.com/maurer) +[@mgeisler](https://github.com/mgeisler) ## Requirements diff --git a/src/doc/rustc/src/platform-support/apple-darwin.md b/src/doc/rustc/src/platform-support/apple-darwin.md index 22c54d04b1eb8..e41aee9bdb248 100644 --- a/src/doc/rustc/src/platform-support/apple-darwin.md +++ b/src/doc/rustc/src/platform-support/apple-darwin.md @@ -9,8 +9,8 @@ Apple macOS targets. ## Target maintainers -- [@thomcc](https://github.com/thomcc) -- [@madsmtm](https://github.com/madsmtm) +[@thomcc](https://github.com/thomcc) +[@madsmtm](https://github.com/madsmtm) ## Requirements diff --git a/src/doc/rustc/src/platform-support/apple-ios-macabi.md b/src/doc/rustc/src/platform-support/apple-ios-macabi.md index 79966d908d898..d4b71dbd4f492 100644 --- a/src/doc/rustc/src/platform-support/apple-ios-macabi.md +++ b/src/doc/rustc/src/platform-support/apple-ios-macabi.md @@ -9,9 +9,9 @@ Apple Mac Catalyst targets. ## Target maintainers -- [@badboy](https://github.com/badboy) -- [@BlackHoleFox](https://github.com/BlackHoleFox) -- [@madsmtm](https://github.com/madsmtm) +[@badboy](https://github.com/badboy) +[@BlackHoleFox](https://github.com/BlackHoleFox) +[@madsmtm](https://github.com/madsmtm) ## Requirements diff --git a/src/doc/rustc/src/platform-support/apple-ios.md b/src/doc/rustc/src/platform-support/apple-ios.md index 7f5dc361c49d7..64325554ab60b 100644 --- a/src/doc/rustc/src/platform-support/apple-ios.md +++ b/src/doc/rustc/src/platform-support/apple-ios.md @@ -15,9 +15,9 @@ Apple iOS / iPadOS targets. ## Target maintainers -- [@badboy](https://github.com/badboy) -- [@deg4uss3r](https://github.com/deg4uss3r) -- [@madsmtm](https://github.com/madsmtm) +[@badboy](https://github.com/badboy) +[@deg4uss3r](https://github.com/deg4uss3r) +[@madsmtm](https://github.com/madsmtm) ## Requirements diff --git a/src/doc/rustc/src/platform-support/apple-tvos.md b/src/doc/rustc/src/platform-support/apple-tvos.md index fc46db20074f4..193d64666121e 100644 --- a/src/doc/rustc/src/platform-support/apple-tvos.md +++ b/src/doc/rustc/src/platform-support/apple-tvos.md @@ -10,8 +10,8 @@ Apple tvOS targets. ## Target maintainers -- [@thomcc](https://github.com/thomcc) -- [@madsmtm](https://github.com/madsmtm) +[@thomcc](https://github.com/thomcc) +[@madsmtm](https://github.com/madsmtm) ## Requirements diff --git a/src/doc/rustc/src/platform-support/apple-visionos.md b/src/doc/rustc/src/platform-support/apple-visionos.md index 7cf9549227d9a..ed96912da7a48 100644 --- a/src/doc/rustc/src/platform-support/apple-visionos.md +++ b/src/doc/rustc/src/platform-support/apple-visionos.md @@ -9,8 +9,8 @@ Apple visionOS / xrOS targets. ## Target maintainers -- [@agg23](https://github.com/agg23) -- [@madsmtm](https://github.com/madsmtm) +[@agg23](https://github.com/agg23) +[@madsmtm](https://github.com/madsmtm) ## Requirements diff --git a/src/doc/rustc/src/platform-support/apple-watchos.md b/src/doc/rustc/src/platform-support/apple-watchos.md index 7b12d9ebfd4b7..6ac09d0d1e538 100644 --- a/src/doc/rustc/src/platform-support/apple-watchos.md +++ b/src/doc/rustc/src/platform-support/apple-watchos.md @@ -12,10 +12,10 @@ Apple watchOS targets. ## Target maintainers -- [@deg4uss3r](https://github.com/deg4uss3r) -- [@vladimir-ea](https://github.com/vladimir-ea) -- [@leohowell](https://github.com/leohowell) -- [@madsmtm](https://github.com/madsmtm) +[@deg4uss3r](https://github.com/deg4uss3r) +[@vladimir-ea](https://github.com/vladimir-ea) +[@leohowell](https://github.com/leohowell) +[@madsmtm](https://github.com/madsmtm) ## Requirements diff --git a/src/doc/rustc/src/platform-support/arm64e-apple-darwin.md b/src/doc/rustc/src/platform-support/arm64e-apple-darwin.md index 3200b7ae1b6ef..2043b34210531 100644 --- a/src/doc/rustc/src/platform-support/arm64e-apple-darwin.md +++ b/src/doc/rustc/src/platform-support/arm64e-apple-darwin.md @@ -6,7 +6,7 @@ ARM64e macOS (11.0+, Big Sur+) ## Target maintainers -- Artyom Tetyukhin ([@arttet](https://github.com/arttet)) +[@arttet](https://github.com/arttet) ## Requirements diff --git a/src/doc/rustc/src/platform-support/arm64e-apple-ios.md b/src/doc/rustc/src/platform-support/arm64e-apple-ios.md index aa99276a68fd1..a2b09e0772822 100644 --- a/src/doc/rustc/src/platform-support/arm64e-apple-ios.md +++ b/src/doc/rustc/src/platform-support/arm64e-apple-ios.md @@ -6,7 +6,7 @@ ARM64e iOS (14.0+) ## Target maintainers -- Artyom Tetyukhin ([@arttet](https://github.com/arttet)) +[@arttet](https://github.com/arttet) ## Requirements diff --git a/src/doc/rustc/src/platform-support/arm64e-apple-tvos.md b/src/doc/rustc/src/platform-support/arm64e-apple-tvos.md index 332ea750f208f..36588c5a96430 100644 --- a/src/doc/rustc/src/platform-support/arm64e-apple-tvos.md +++ b/src/doc/rustc/src/platform-support/arm64e-apple-tvos.md @@ -6,7 +6,7 @@ ARM64e tvOS (10.0+) ## Target maintainers -- Artyom Tetyukhin ([@arttet](https://github.com/arttet)) +[@arttet](https://github.com/arttet) ## Requirements diff --git a/src/doc/rustc/src/platform-support/arm64ec-pc-windows-msvc.md b/src/doc/rustc/src/platform-support/arm64ec-pc-windows-msvc.md index 67903ae640146..d02043b2ae9e8 100644 --- a/src/doc/rustc/src/platform-support/arm64ec-pc-windows-msvc.md +++ b/src/doc/rustc/src/platform-support/arm64ec-pc-windows-msvc.md @@ -7,7 +7,7 @@ applications on AArch64 Windows 11. See +[@Patryk27](https://github.com/Patryk27) ## Requirements diff --git a/src/doc/rustc/src/platform-support/csky-unknown-linux-gnuabiv2.md b/src/doc/rustc/src/platform-support/csky-unknown-linux-gnuabiv2.md index f749b37aa7a8d..e69d606ccd2fe 100644 --- a/src/doc/rustc/src/platform-support/csky-unknown-linux-gnuabiv2.md +++ b/src/doc/rustc/src/platform-support/csky-unknown-linux-gnuabiv2.md @@ -22,7 +22,7 @@ other links: ## Target maintainers -* [@Dirreke](https://github.com/Dirreke) +[@Dirreke](https://github.com/Dirreke) ## Requirements diff --git a/src/doc/rustc/src/platform-support/esp-idf.md b/src/doc/rustc/src/platform-support/esp-idf.md index 91d7d66627d09..baf42ab29a66a 100644 --- a/src/doc/rustc/src/platform-support/esp-idf.md +++ b/src/doc/rustc/src/platform-support/esp-idf.md @@ -6,9 +6,9 @@ Targets for the [ESP-IDF](https://github.com/espressif/esp-idf) development fram ## Target maintainers -- Ivan Markov [@ivmarkov](https://github.com/ivmarkov) -- Scott Mabin [@MabezDev](https://github.com/MabezDev) -- Sergio Gasquez [@SergioGasquez](https://github.com/SergioGasquez) +[@ivmarkov](https://github.com/ivmarkov) +[@MabezDev](https://github.com/MabezDev) +[@SergioGasquez](https://github.com/SergioGasquez) ## Requirements diff --git a/src/doc/rustc/src/platform-support/freebsd.md b/src/doc/rustc/src/platform-support/freebsd.md index 9d34d3649208a..9d7218b258ec4 100644 --- a/src/doc/rustc/src/platform-support/freebsd.md +++ b/src/doc/rustc/src/platform-support/freebsd.md @@ -6,8 +6,8 @@ ## Target maintainers -- Alan Somers `asomers@FreeBSD.org`, https://github.com/asomers -- Mikael Urankar `mikael@FreeBSD.org`, https://github.com/MikaelUrankar +[@asomers](https://github.com/asomers) +[@MikaelUrankar](https://github.com/MikaelUrankar) ## Requirements diff --git a/src/doc/rustc/src/platform-support/fuchsia.md b/src/doc/rustc/src/platform-support/fuchsia.md index bed5b81adc5bd..e2befc5d9955b 100644 --- a/src/doc/rustc/src/platform-support/fuchsia.md +++ b/src/doc/rustc/src/platform-support/fuchsia.md @@ -7,9 +7,11 @@ updatable, and performant. ## Target maintainers -See [`fuchsia.toml`] in the `team` repository for current target maintainers. +[@erickt](https://github.com/erickt) +[@Nashenas88](https://github.com/Nashenas88) -[`fuchsia.toml`]: https://github.com/rust-lang/team/blob/master/teams/fuchsia.toml +The up-to-date list can be also found via the +[fuchsia marker team](https://github.com/rust-lang/team/blob/master/teams/fuchsia.toml). ## Table of contents diff --git a/src/doc/rustc/src/platform-support/hermit.md b/src/doc/rustc/src/platform-support/hermit.md index df7bc495fce84..069c253bd38a3 100644 --- a/src/doc/rustc/src/platform-support/hermit.md +++ b/src/doc/rustc/src/platform-support/hermit.md @@ -14,8 +14,8 @@ Target triplets available so far: ## Target maintainers -- Stefan Lankes ([@stlankes](https://github.com/stlankes)) -- Martin Kröning ([@mkroening](https://github.com/mkroening)) +[@stlankes](https://github.com/stlankes) +[@mkroening](https://github.com/mkroening) ## Requirements diff --git a/src/doc/rustc/src/platform-support/hexagon-unknown-linux-musl.md b/src/doc/rustc/src/platform-support/hexagon-unknown-linux-musl.md index cfd2b2bac9cc6..be6e17883f4eb 100644 --- a/src/doc/rustc/src/platform-support/hexagon-unknown-linux-musl.md +++ b/src/doc/rustc/src/platform-support/hexagon-unknown-linux-musl.md @@ -11,7 +11,7 @@ DSP architecture. ## Target maintainers -- [Brian Cain](https://github.com/androm3da), `bcain@quicinc.com` +[@androm3da](https://github.com/androm3da) ## Requirements The target is cross-compiled. This target supports `std`. By default, code diff --git a/src/doc/rustc/src/platform-support/hexagon-unknown-none-elf.md b/src/doc/rustc/src/platform-support/hexagon-unknown-none-elf.md index c7726eacaf4e3..b07b0bb08d60a 100644 --- a/src/doc/rustc/src/platform-support/hexagon-unknown-none-elf.md +++ b/src/doc/rustc/src/platform-support/hexagon-unknown-none-elf.md @@ -10,7 +10,7 @@ Rust for baremetal Hexagon DSPs. ## Target maintainers -- [Brian Cain](https://github.com/androm3da), `bcain@quicinc.com` +[@androm3da](https://github.com/androm3da) ## Requirements diff --git a/src/doc/rustc/src/platform-support/hurd.md b/src/doc/rustc/src/platform-support/hurd.md index 2521f79dc5e65..6ecde1db5111e 100644 --- a/src/doc/rustc/src/platform-support/hurd.md +++ b/src/doc/rustc/src/platform-support/hurd.md @@ -6,7 +6,7 @@ ## Target maintainers -- Samuel Thibault, `samuel.thibault@ens-lyon.org`, https://github.com/sthibaul/ +[@sthibaul](https://github.com/sthibaul) ## Requirements diff --git a/src/doc/rustc/src/platform-support/i686-apple-darwin.md b/src/doc/rustc/src/platform-support/i686-apple-darwin.md index abb64dcc986de..5f18a5e271acb 100644 --- a/src/doc/rustc/src/platform-support/i686-apple-darwin.md +++ b/src/doc/rustc/src/platform-support/i686-apple-darwin.md @@ -4,8 +4,8 @@ Apple macOS on 32-bit x86. ## Target maintainers -- [@thomcc](https://github.com/thomcc) -- [@madsmtm](https://github.com/madsmtm) +[@thomcc](https://github.com/thomcc) +[@madsmtm](https://github.com/madsmtm) ## Requirements diff --git a/src/doc/rustc/src/platform-support/illumos.md b/src/doc/rustc/src/platform-support/illumos.md index dd2ae90f6741f..c03238269d37a 100644 --- a/src/doc/rustc/src/platform-support/illumos.md +++ b/src/doc/rustc/src/platform-support/illumos.md @@ -7,8 +7,8 @@ including advanced system debugging, next generation filesystem, networking, and ## Target maintainers -- Joshua M. Clulow ([@jclulow](https://github.com/jclulow)) -- Patrick Mooney ([@pfmooney](https://github.com/pfmooney)) +[@jclulow](https://github.com/jclulow) +[@pfmooney](https://github.com/pfmooney) ## Requirements diff --git a/src/doc/rustc/src/platform-support/kmc-solid.md b/src/doc/rustc/src/platform-support/kmc-solid.md index 44f47927286d7..838662a3741ae 100644 --- a/src/doc/rustc/src/platform-support/kmc-solid.md +++ b/src/doc/rustc/src/platform-support/kmc-solid.md @@ -14,9 +14,9 @@ The target names follow this format: `$ARCH-kmc-solid_$KERNEL-$ABI`, where `$ARC | `armv7a-kmc-solid_asp3-eabi` | `arm` | `kmc` | `solid_asp3` | | `armv7a-kmc-solid_asp3-eabihf` | `arm` | `kmc` | `solid_asp3` | -## Designated Developers +## Target Maintainers -- [@kawadakk](https://github.com/kawadakk) +[@kawadakk](https://github.com/kawadakk) ## Requirements diff --git a/src/doc/rustc/src/platform-support/loongarch-linux.md b/src/doc/rustc/src/platform-support/loongarch-linux.md index 2c9f712ce829d..817d3a892303a 100644 --- a/src/doc/rustc/src/platform-support/loongarch-linux.md +++ b/src/doc/rustc/src/platform-support/loongarch-linux.md @@ -22,10 +22,10 @@ Reference material: ## Target maintainers -- [WANG Rui](https://github.com/heiher) `wangrui@loongson.cn` -- [ZHAI Xiang](https://github.com/xiangzhai) `zhaixiang@loongson.cn` -- [ZHAI Xiaojuan](https://github.com/zhaixiaojuan) `zhaixiaojuan@loongson.cn` -- [WANG Xuerui](https://github.com/xen0n) `git@xen0n.name` +[@heiher](https://github.com/heiher) +[@xiangzhai](https://github.com/xiangzhai) +[@zhaixiaojuan](https://github.com/zhaixiaojuan) +[@xen0n](https://github.com/xen0n) ## Requirements diff --git a/src/doc/rustc/src/platform-support/loongarch-none.md b/src/doc/rustc/src/platform-support/loongarch-none.md index 6c5d866983011..a2bd6e5734cd4 100644 --- a/src/doc/rustc/src/platform-support/loongarch-none.md +++ b/src/doc/rustc/src/platform-support/loongarch-none.md @@ -11,8 +11,8 @@ Freestanding/bare-metal LoongArch64 binaries in ELF format: firmware, kernels, e ## Target maintainers -- [WANG Rui](https://github.com/heiher) `wangrui@loongson.cn` -- [WANG Xuerui](https://github.com/xen0n) `git@xen0n.name` +[@heiher](https://github.com/heiher) +[@xen0n](https://github.com/xen0n) ## Requirements diff --git a/src/doc/rustc/src/platform-support/lynxos178.md b/src/doc/rustc/src/platform-support/lynxos178.md new file mode 100644 index 0000000000000..6463f95a0b8e0 --- /dev/null +++ b/src/doc/rustc/src/platform-support/lynxos178.md @@ -0,0 +1,77 @@ +# `*-lynxos178-*` + +**Tier: 3** + +Targets for the LynxOS-178 operating system. + +[LynxOS-178](https://www.lynx.com/products/lynxos-178-do-178c-certified-posix-rtos) +is a commercial RTOS designed for safety-critical real-time systems. It is +developed by Lynx Software Technologies as part of the +[MOSA.ic](https://www.lynx.com/solutions/safe-and-secure-operating-environment) +product suite. + +Target triples available: +- `x86_64-lynx-lynxos178` + +## Target maintainers + +- Renat Fatykhov, https://github.com/rfatykhov-lynx + +## Requirements + +To build Rust programs for LynxOS-178, you must first have LYNX MOSA.ic +installed on the build machine. + +This target supports only cross-compilation, from the same hosts supported by +the Lynx CDK. + +Currently only `no_std` programs are supported. Work to support `std` is in +progress. + +## Building the target + +You can build Rust with support for x86_64-lynx-lynxos178 by adding that +to the `target` list in `config.toml`, and then running `./x build --target +x86_64-lynx-lynxos178 compiler`. + +## Building Rust programs + +Rust does not yet ship pre-compiled artifacts for this target. To compile for +this target, you will need to build Rust with the target enabled (see "Building +the target" above). + +Before executing `cargo`, you must configure the environment to build LynxOS-178 +binaries by running `source setup.sh` from the los178 directory. + +If your program/crates contain procedural macros, Rust must be able to build +binaries for the host as well. The host gcc is hidden by sourcing setup.sh. To +deal with this, add the following to your project's `.cargo/config.toml`: +```toml +[target.x86_64-unknown-linux-gnu] +linker = "lynx-host-gcc" +``` +(If necessary substitute your host target triple for x86_64-unknown-linux-gnu.) + +To point `cargo` at the correct rustc binary, set the RUSTC environment +variable. + +The core library should be usable. You can try by building it as part of your +project: +```bash +cargo +nightly build -Z build-std=core --target x86_64-lynx-lynxos178 +``` + +## Testing + +Binaries built with rust can be provided to a LynxOS-178 instance on its file +system, where they can be executed. Rust binaries tend to be large, so it may +be necessary to strip them first. + +It is possible to run the Rust testsuite by providing a test runner that takes +the test binary and executes it under LynxOS-178. Most (all?) tests won't run +without std support though, which is not yet supported. + +## Cross-compilation toolchains and C code + +LYNX MOSA.ic comes with all the tools required to cross-compile C code for +LynxOS-178. diff --git a/src/doc/rustc/src/platform-support/m68k-unknown-linux-gnu.md b/src/doc/rustc/src/platform-support/m68k-unknown-linux-gnu.md index b18a125f3b095..1efea86df92bc 100644 --- a/src/doc/rustc/src/platform-support/m68k-unknown-linux-gnu.md +++ b/src/doc/rustc/src/platform-support/m68k-unknown-linux-gnu.md @@ -4,10 +4,10 @@ Motorola 680x0 Linux -## Designated Developers +## Target Maintainers -* [@glaubitz](https://github.com/glaubitz) -* [@ricky26](https://github.com/ricky26) +[@glaubitz](https://github.com/glaubitz) +[@ricky26](https://github.com/ricky26) ## Requirements diff --git a/src/doc/rustc/src/platform-support/m68k-unknown-none-elf.md b/src/doc/rustc/src/platform-support/m68k-unknown-none-elf.md index 92780cb5a5ca5..e390ba0aee96b 100644 --- a/src/doc/rustc/src/platform-support/m68k-unknown-none-elf.md +++ b/src/doc/rustc/src/platform-support/m68k-unknown-none-elf.md @@ -4,9 +4,9 @@ Bare metal Motorola 680x0 -## Designated Developers +## Target Maintainers -* [@knickish](https://github.com/knickish) +[@knickish](https://github.com/knickish) ## Requirements diff --git a/src/doc/rustc/src/platform-support/mips-mti-none-elf.md b/src/doc/rustc/src/platform-support/mips-mti-none-elf.md index 731f0a8c42f18..c060ebf7c7e00 100644 --- a/src/doc/rustc/src/platform-support/mips-mti-none-elf.md +++ b/src/doc/rustc/src/platform-support/mips-mti-none-elf.md @@ -9,7 +9,7 @@ MIPS32r2 baremetal softfloat, Big Endian or Little Endian. ## Target maintainers -- YunQiang Su, `syq@debian.org`, https://github.com/wzssyqa +[@wzssyqa](https://github.com/wzssyqa) ## Background diff --git a/src/doc/rustc/src/platform-support/mips-release-6.md b/src/doc/rustc/src/platform-support/mips-release-6.md index b779477996d54..77f495751c15d 100644 --- a/src/doc/rustc/src/platform-support/mips-release-6.md +++ b/src/doc/rustc/src/platform-support/mips-release-6.md @@ -16,10 +16,10 @@ The target name follow this format: `--`, where ## Target Maintainers -- [Xuan Chen](https://github.com/chenx97) -- [Walter Ji](https://github.com/709924470) -- [Xinhui Yang](https://github.com/Cyanoxygen) -- [Lain Yang](https://github.com/Fearyncess) +[@chenx97](https://github.com/chenx97) +[@709924470](https://github.com/709924470) +[@Cyanoxygen](https://github.com/Cyanoxygen) +[@Fearyncess](https://github.com/Fearyncess) ## Requirements diff --git a/src/doc/rustc/src/platform-support/mips64-openwrt-linux-musl.md b/src/doc/rustc/src/platform-support/mips64-openwrt-linux-musl.md index 07470eef051d8..2ad33c9e20de6 100644 --- a/src/doc/rustc/src/platform-support/mips64-openwrt-linux-musl.md +++ b/src/doc/rustc/src/platform-support/mips64-openwrt-linux-musl.md @@ -2,7 +2,8 @@ **Tier: 3** ## Target maintainers -- Donald Hoskins `grommish@gmail.com`, https://github.com/Itus-Shield + +[@Itus-Shield](https://github.com/Itus-Shield) ## Requirements This target is cross-compiled. There is no support for `std`. There is no diff --git a/src/doc/rustc/src/platform-support/mipsel-sony-psx.md b/src/doc/rustc/src/platform-support/mipsel-sony-psx.md index 589100e8888b3..2343df227f5db 100644 --- a/src/doc/rustc/src/platform-support/mipsel-sony-psx.md +++ b/src/doc/rustc/src/platform-support/mipsel-sony-psx.md @@ -6,7 +6,7 @@ Sony PlayStation 1 (psx) ## Designated Developer -* [@ayrtonm](https://github.com/ayrtonm) +[@ayrtonm](https://github.com/ayrtonm) ## Requirements diff --git a/src/doc/rustc/src/platform-support/mipsel-unknown-linux-gnu.md b/src/doc/rustc/src/platform-support/mipsel-unknown-linux-gnu.md index b1ee8728c0207..eed0ce4437ac5 100644 --- a/src/doc/rustc/src/platform-support/mipsel-unknown-linux-gnu.md +++ b/src/doc/rustc/src/platform-support/mipsel-unknown-linux-gnu.md @@ -6,7 +6,7 @@ Little-endian 32 bit MIPS for Linux with `glibc. ## Target maintainers -- [@LukasWoodtli](https://github.com/LukasWoodtli) +[@LukasWoodtli](https://github.com/LukasWoodtli) ## Requirements diff --git a/src/doc/rustc/src/platform-support/netbsd.md b/src/doc/rustc/src/platform-support/netbsd.md index ef9337befa643..9040ef637be3d 100644 --- a/src/doc/rustc/src/platform-support/netbsd.md +++ b/src/doc/rustc/src/platform-support/netbsd.md @@ -31,10 +31,13 @@ are built for NetBSD 9.x, although some exceptions exist (some are built for NetBSD 8.x but also work on newer OS versions). -## Designated Developers +## Target Maintainers -- [@he32](https://github.com/he32), `he@NetBSD.org` -- [NetBSD/pkgsrc-wip's rust](https://github.com/NetBSD/pkgsrc-wip/blob/master/rust/Makefile) maintainer (see MAINTAINER variable). This package is part of "pkgsrc work-in-progress" and is used for deployment and testing of new versions of rust +[@he32](https://github.com/he32) + +Further contacts: + +- [NetBSD/pkgsrc-wip's rust](https://github.com/NetBSD/pkgsrc-wip/blob/master/rust185/Makefile) maintainer (see MAINTAINER variable). This package is part of "pkgsrc work-in-progress" and is used for deployment and testing of new versions of rust - [NetBSD's pkgsrc lang/rust](https://github.com/NetBSD/pkgsrc/tree/trunk/lang/rust) for the "proper" package in pkgsrc. - [NetBSD's pkgsrc lang/rust-bin](https://github.com/NetBSD/pkgsrc/tree/trunk/lang/rust-bin) which re-uses the bootstrap kit as a binary distribution and therefore avoids the rather protracted native build time of rust itself @@ -46,7 +49,7 @@ bug reporting system. The `x86_64-unknown-netbsd` artifacts is being distributed by the rust project. -The other targets are built by the designated developers (see above), +The other targets are built by the target maintainers (see above), and the targets are initially cross-compiled, but many if not most of them are also built natively as part of testing. diff --git a/src/doc/rustc/src/platform-support/nto-qnx.md b/src/doc/rustc/src/platform-support/nto-qnx.md index e097d32277d32..9f8960899c169 100644 --- a/src/doc/rustc/src/platform-support/nto-qnx.md +++ b/src/doc/rustc/src/platform-support/nto-qnx.md @@ -13,10 +13,10 @@ and [QNX][qnx.com]. ## Target maintainers -- Florian Bartels, `Florian.Bartels@elektrobit.com`, https://github.com/flba-eb -- Tristan Roach, `TRoach@blackberry.com`, https://github.com/gh-tr -- Jonathan Pallant `Jonathan.Pallant@ferrous-systems.com`, https://github.com/jonathanpallant -- Jorge Aparicio `Jorge.Aparicio@ferrous-systems.com`, https://github.com/japaric +[@flba-eb](https://github.com/flba-eb) +[@gh-tr](https://github.com/gh-tr) +[@jonathanpallant](https://github.com/jonathanpallant) +[@japaric](https://github.com/japaric) ## Requirements diff --git a/src/doc/rustc/src/platform-support/nuttx.md b/src/doc/rustc/src/platform-support/nuttx.md index f76fe0887b5dd..df3f4e7b394ed 100644 --- a/src/doc/rustc/src/platform-support/nuttx.md +++ b/src/doc/rustc/src/platform-support/nuttx.md @@ -12,7 +12,7 @@ For brevity, many parts of the documentation will refer to Apache NuttX as simpl ## Target maintainers -- Qi Huang [@no1wudi](https://github.com/no1wudi) +[@no1wudi](https://github.com/no1wudi) ## Requirements diff --git a/src/doc/rustc/src/platform-support/nvptx64-nvidia-cuda.md b/src/doc/rustc/src/platform-support/nvptx64-nvidia-cuda.md index ab8641ff69ae2..106ec562bfc79 100644 --- a/src/doc/rustc/src/platform-support/nvptx64-nvidia-cuda.md +++ b/src/doc/rustc/src/platform-support/nvptx64-nvidia-cuda.md @@ -7,8 +7,8 @@ platform. ## Target maintainers -- Riccardo D'Ambrosio, https://github.com/RDambrosio016 -- Kjetil Kjeka, https://github.com/kjetilkjeka +[@RDambrosio016](https://github.com/RDambrosio016) +[@kjetilkjeka](https://github.com/kjetilkjeka) tests/ui/foo_functions.rs:9:10 + | +9 | //~^ foo_functions + | ^^^^^^^^^^^^^ expected because of this pattern + | + +error: diagnostic code `clippy::foo_functions` not found on line 16 + --> tests/ui/foo_functions.rs:17:10 + | +17 | //~^ foo_functions + | ^^^^^^^^^^^^^ expected because of this pattern + | + +error: diagnostic code `clippy::foo_functions` not found on line 23 + --> tests/ui/foo_functions.rs:24:6 + | +24 | //~^ foo_functions + | ^^^^^^^^^^^^^ expected because of this pattern + | + ``` This is normal. After all, we wrote a bunch of Rust code but we haven't really diff --git a/src/tools/clippy/book/src/lint_configuration.md b/src/tools/clippy/book/src/lint_configuration.md index 3726d6e8a8691..2314d1beac7e0 100644 --- a/src/tools/clippy/book/src/lint_configuration.md +++ b/src/tools/clippy/book/src/lint_configuration.md @@ -425,6 +425,33 @@ Whether to check MSRV compatibility in `#[test]` and `#[cfg(test)]` code. * [`incompatible_msrv`](https://rust-lang.github.io/rust-clippy/master/index.html#incompatible_msrv) +## `check-inconsistent-struct-field-initializers` +Whether to suggest reordering constructor fields when initializers are present. + +Warnings produced by this configuration aren't necessarily fixed by just reordering the fields. Even if the +suggested code would compile, it can change semantics if the initializer expressions have side effects. The +following example [from rust-clippy#11846] shows how the suggestion can run into borrow check errors: + +```rust +struct MyStruct { + vector: Vec, + length: usize +} +fn main() { + let vector = vec![1,2,3]; + MyStruct { length: vector.len(), vector}; +} +``` + +[from rust-clippy#11846]: https://github.com/rust-lang/rust-clippy/issues/11846#issuecomment-1820747924 + +**Default Value:** `false` + +--- +**Affected lints:** +* [`inconsistent_struct_constructor`](https://rust-lang.github.io/rust-clippy/master/index.html#inconsistent_struct_constructor) + + ## `check-private-items` Whether to also run the listed lints on private items. @@ -613,31 +640,15 @@ The maximum size of the `Err`-variant in a `Result` returned from a function * [`result_large_err`](https://rust-lang.github.io/rust-clippy/master/index.html#result_large_err) -## `lint-inconsistent-struct-field-initializers` -Whether to suggest reordering constructor fields when initializers are present. - -Warnings produced by this configuration aren't necessarily fixed by just reordering the fields. Even if the -suggested code would compile, it can change semantics if the initializer expressions have side effects. The -following example [from rust-clippy#11846] shows how the suggestion can run into borrow check errors: - -```rust -struct MyStruct { - vector: Vec, - length: usize -} -fn main() { - let vector = vec![1,2,3]; - MyStruct { length: vector.len(), vector}; -} -``` - -[from rust-clippy#11846]: https://github.com/rust-lang/rust-clippy/issues/11846#issuecomment-1820747924 +## `lint-commented-code` +Whether collapsible `if` chains are linted if they contain comments inside the parts +that would be collapsed. **Default Value:** `false` --- **Affected lints:** -* [`inconsistent_struct_constructor`](https://rust-lang.github.io/rust-clippy/master/index.html#inconsistent_struct_constructor) +* [`collapsible_if`](https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if) ## `literal-representation-threshold` @@ -786,6 +797,7 @@ The minimum rust version that the project supports. Defaults to the `rust-versio * [`iter_kv_map`](https://rust-lang.github.io/rust-clippy/master/index.html#iter_kv_map) * [`legacy_numeric_constants`](https://rust-lang.github.io/rust-clippy/master/index.html#legacy_numeric_constants) * [`lines_filter_map_ok`](https://rust-lang.github.io/rust-clippy/master/index.html#lines_filter_map_ok) +* [`manual_abs_diff`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_abs_diff) * [`manual_bits`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_bits) * [`manual_c_str_literals`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_c_str_literals) * [`manual_clamp`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_clamp) @@ -793,6 +805,7 @@ The minimum rust version that the project supports. Defaults to the `rust-versio * [`manual_flatten`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_flatten) * [`manual_hash_one`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_hash_one) * [`manual_is_ascii_check`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_ascii_check) +* [`manual_is_power_of_two`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_power_of_two) * [`manual_let_else`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_let_else) * [`manual_midpoint`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_midpoint) * [`manual_non_exhaustive`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_non_exhaustive) @@ -1059,7 +1072,8 @@ The maximum allowed size of a bit mask before suggesting to use 'trailing_zeros' ## `warn-on-all-wildcard-imports` -Whether to allow certain wildcard imports (prelude, super in tests). +Whether to emit warnings on all wildcard imports, including those from `prelude`, from `super` in tests, +or for `pub use` reexports. **Default Value:** `false` diff --git a/src/tools/clippy/clippy.toml b/src/tools/clippy/clippy.toml index f4789c9d03035..0a7724bbe4e61 100644 --- a/src/tools/clippy/clippy.toml +++ b/src/tools/clippy/clippy.toml @@ -1,15 +1,20 @@ avoid-breaking-exported-api = false -lint-inconsistent-struct-field-initializers = true +check-inconsistent-struct-field-initializers = true + +lint-commented-code = true [[disallowed-methods]] path = "rustc_lint::context::LintContext::lint" reason = "this function does not add a link to our documentation, please use the `clippy_utils::diagnostics::span_lint*` functions instead" +allow-invalid = true [[disallowed-methods]] path = "rustc_lint::context::LintContext::span_lint" reason = "this function does not add a link to our documentation, please use the `clippy_utils::diagnostics::span_lint*` functions instead" +allow-invalid = true [[disallowed-methods]] path = "rustc_middle::ty::context::TyCtxt::node_span_lint" reason = "this function does not add a link to our documentation, please use the `clippy_utils::diagnostics::span_lint_hir*` functions instead" +allow-invalid = true diff --git a/src/tools/clippy/clippy_config/Cargo.toml b/src/tools/clippy/clippy_config/Cargo.toml index 934725fccb8ec..93fd2e35d1ba5 100644 --- a/src/tools/clippy/clippy_config/Cargo.toml +++ b/src/tools/clippy/clippy_config/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "clippy_config" # begin autogenerated version -version = "0.1.87" +version = "0.1.88" # end autogenerated version edition = "2024" publish = false diff --git a/src/tools/clippy/clippy_config/src/conf.rs b/src/tools/clippy/clippy_config/src/conf.rs index 798f8b3aa5a90..511cb84527d80 100644 --- a/src/tools/clippy/clippy_config/src/conf.rs +++ b/src/tools/clippy/clippy_config/src/conf.rs @@ -120,12 +120,7 @@ impl ConfError { Self { message: message.into(), suggestion, - span: Span::new( - file.start_pos + BytePos::from_usize(span.start), - file.start_pos + BytePos::from_usize(span.end), - SyntaxContext::root(), - None, - ), + span: span_from_toml_range(file, span), } } } @@ -176,11 +171,61 @@ macro_rules! default_text { }; } +macro_rules! deserialize { + ($map:expr, $ty:ty, $errors:expr, $file:expr) => {{ + let raw_value = $map.next_value::>()?; + let value_span = raw_value.span(); + let value = match <$ty>::deserialize(raw_value.into_inner()) { + Err(e) => { + $errors.push(ConfError::spanned( + $file, + e.to_string().replace('\n', " ").trim(), + None, + value_span, + )); + continue; + }, + Ok(value) => value, + }; + (value, value_span) + }}; + + ($map:expr, $ty:ty, $errors:expr, $file:expr, $replacements_allowed:expr) => {{ + let array = $map.next_value::>>()?; + let mut disallowed_paths_span = Range { + start: usize::MAX, + end: usize::MIN, + }; + let mut disallowed_paths = Vec::new(); + for raw_value in array { + let value_span = raw_value.span(); + let mut disallowed_path = match DisallowedPath::<$replacements_allowed>::deserialize(raw_value.into_inner()) + { + Err(e) => { + $errors.push(ConfError::spanned( + $file, + e.to_string().replace('\n', " ").trim(), + None, + value_span, + )); + continue; + }, + Ok(disallowed_path) => disallowed_path, + }; + disallowed_paths_span = union(&disallowed_paths_span, &value_span); + disallowed_path.set_span(span_from_toml_range($file, value_span)); + disallowed_paths.push(disallowed_path); + } + (disallowed_paths, disallowed_paths_span) + }}; +} + macro_rules! define_Conf { ($( $(#[doc = $doc:literal])+ $(#[conf_deprecated($dep:literal, $new_conf:ident)])? $(#[default_text = $default_text:expr])? + $(#[disallowed_paths_allow_replacements = $replacements_allowed:expr])? $(#[lints($($for_lints:ident),* $(,)?)])? $name:ident: $ty:ty = $default:expr, )*) => { @@ -218,42 +263,46 @@ macro_rules! define_Conf { let mut value_spans = HashMap::new(); let mut errors = Vec::new(); let mut warnings = Vec::new(); + + // Declare a local variable for each field available to a configuration file. $(let mut $name = None;)* + // could get `Field` here directly, but get `String` first for diagnostics while let Some(name) = map.next_key::>()? { - match Field::deserialize(name.get_ref().as_str().into_deserializer()) { + let field = match Field::deserialize(name.get_ref().as_str().into_deserializer()) { Err(e) => { let e: FieldError = e; errors.push(ConfError::spanned(self.0, e.error, e.suggestion, name.span())); + continue; } - $(Ok(Field::$name) => { + Ok(field) => field + }; + + match field { + $(Field::$name => { + // Is this a deprecated field, i.e., is `$dep` set? If so, push a warning. $(warnings.push(ConfError::spanned(self.0, format!("deprecated field `{}`. {}", name.get_ref(), $dep), None, name.span()));)? - let raw_value = map.next_value::>()?; - let value_span = raw_value.span(); - match <$ty>::deserialize(raw_value.into_inner()) { - Err(e) => errors.push(ConfError::spanned(self.0, e.to_string().replace('\n', " ").trim(), None, value_span)), - Ok(value) => match $name { - Some(_) => { - errors.push(ConfError::spanned(self.0, format!("duplicate field `{}`", name.get_ref()), None, name.span())); - } - None => { - $name = Some(value); - value_spans.insert(name.get_ref().as_str().to_string(), value_span); - // $new_conf is the same as one of the defined `$name`s, so - // this variable is defined in line 2 of this function. - $(match $new_conf { - Some(_) => errors.push(ConfError::spanned(self.0, concat!( - "duplicate field `", stringify!($new_conf), - "` (provided as `", stringify!($name), "`)" - ), None, name.span())), - None => $new_conf = $name.clone(), - })? - }, - } + let (value, value_span) = + deserialize!(map, $ty, errors, self.0 $(, $replacements_allowed)?); + // Was this field set previously? + if $name.is_some() { + errors.push(ConfError::spanned(self.0, format!("duplicate field `{}`", name.get_ref()), None, name.span())); + continue; } + $name = Some(value); + value_spans.insert(name.get_ref().as_str().to_string(), value_span); + // If this is a deprecated field, was the new field (`$new_conf`) set previously? + // Note that `$new_conf` is one of the defined `$name`s. + $(match $new_conf { + Some(_) => errors.push(ConfError::spanned(self.0, concat!( + "duplicate field `", stringify!($new_conf), + "` (provided as `", stringify!($name), "`)" + ), None, name.span())), + None => $new_conf = $name.clone(), + })? })* // ignore contents of the third_party key - Ok(Field::third_party) => drop(map.next_value::()) + Field::third_party => drop(map.next_value::()) } } let conf = Conf { $($name: $name.unwrap_or_else(defaults::$name),)* }; @@ -275,6 +324,22 @@ macro_rules! define_Conf { }; } +fn union(x: &Range, y: &Range) -> Range { + Range { + start: cmp::min(x.start, y.start), + end: cmp::max(x.end, y.end), + } +} + +fn span_from_toml_range(file: &SourceFile, span: Range) -> Span { + Span::new( + file.start_pos + BytePos::from_usize(span.start), + file.start_pos + BytePos::from_usize(span.end), + SyntaxContext::root(), + None, + ) +} + define_Conf! { /// Which crates to allow absolute paths from #[lints(absolute_paths)] @@ -461,6 +526,7 @@ define_Conf! { )] avoid_breaking_exported_api: bool = true, /// The list of types which may not be held across an await point. + #[disallowed_paths_allow_replacements = false] #[lints(await_holding_invalid_type)] await_holding_invalid_types: Vec = Vec::new(), /// DEPRECATED LINT: BLACKLISTED_NAME. @@ -474,6 +540,26 @@ define_Conf! { /// Whether to check MSRV compatibility in `#[test]` and `#[cfg(test)]` code. #[lints(incompatible_msrv)] check_incompatible_msrv_in_tests: bool = false, + /// Whether to suggest reordering constructor fields when initializers are present. + /// + /// Warnings produced by this configuration aren't necessarily fixed by just reordering the fields. Even if the + /// suggested code would compile, it can change semantics if the initializer expressions have side effects. The + /// following example [from rust-clippy#11846] shows how the suggestion can run into borrow check errors: + /// + /// ```rust + /// struct MyStruct { + /// vector: Vec, + /// length: usize + /// } + /// fn main() { + /// let vector = vec![1,2,3]; + /// MyStruct { length: vector.len(), vector}; + /// } + /// ``` + /// + /// [from rust-clippy#11846]: https://github.com/rust-lang/rust-clippy/issues/11846#issuecomment-1820747924 + #[lints(inconsistent_struct_constructor)] + check_inconsistent_struct_field_initializers: bool = false, /// Whether to also run the listed lints on private items. #[lints(missing_errors_doc, missing_panics_doc, missing_safety_doc, unnecessary_safety_doc)] check_private_items: bool = false, @@ -486,9 +572,11 @@ define_Conf! { #[conf_deprecated("Please use `cognitive-complexity-threshold` instead", cognitive_complexity_threshold)] cyclomatic_complexity_threshold: u64 = 25, /// The list of disallowed macros, written as fully qualified paths. + #[disallowed_paths_allow_replacements = true] #[lints(disallowed_macros)] disallowed_macros: Vec = Vec::new(), /// The list of disallowed methods, written as fully qualified paths. + #[disallowed_paths_allow_replacements = true] #[lints(disallowed_methods)] disallowed_methods: Vec = Vec::new(), /// The list of disallowed names to lint about. NB: `bar` is not here since it has legitimate uses. The value @@ -497,6 +585,7 @@ define_Conf! { #[lints(disallowed_names)] disallowed_names: Vec = DEFAULT_DISALLOWED_NAMES.iter().map(ToString::to_string).collect(), /// The list of disallowed types, written as fully qualified paths. + #[disallowed_paths_allow_replacements = true] #[lints(disallowed_types)] disallowed_types: Vec = Vec::new(), /// The list of words this lint should not consider as identifiers needing ticks. The value @@ -549,25 +638,15 @@ define_Conf! { /// The maximum size of the `Err`-variant in a `Result` returned from a function #[lints(result_large_err)] large_error_threshold: u64 = 128, + /// Whether collapsible `if` chains are linted if they contain comments inside the parts + /// that would be collapsed. + #[lints(collapsible_if)] + lint_commented_code: bool = false, /// Whether to suggest reordering constructor fields when initializers are present. + /// DEPRECATED CONFIGURATION: lint-inconsistent-struct-field-initializers /// - /// Warnings produced by this configuration aren't necessarily fixed by just reordering the fields. Even if the - /// suggested code would compile, it can change semantics if the initializer expressions have side effects. The - /// following example [from rust-clippy#11846] shows how the suggestion can run into borrow check errors: - /// - /// ```rust - /// struct MyStruct { - /// vector: Vec, - /// length: usize - /// } - /// fn main() { - /// let vector = vec![1,2,3]; - /// MyStruct { length: vector.len(), vector}; - /// } - /// ``` - /// - /// [from rust-clippy#11846]: https://github.com/rust-lang/rust-clippy/issues/11846#issuecomment-1820747924 - #[lints(inconsistent_struct_constructor)] + /// Use the `check-inconsistent-struct-field-initializers` configuration instead. + #[conf_deprecated("Please use `check-inconsistent-struct-field-initializers` instead", check_inconsistent_struct_field_initializers)] lint_inconsistent_struct_field_initializers: bool = false, /// The lower bound for linting decimal literals #[lints(decimal_literal_representation)] @@ -635,6 +714,7 @@ define_Conf! { iter_kv_map, legacy_numeric_constants, lines_filter_map_ok, + manual_abs_diff, manual_bits, manual_c_str_literals, manual_clamp, @@ -642,6 +722,7 @@ define_Conf! { manual_flatten, manual_hash_one, manual_is_ascii_check, + manual_is_power_of_two, manual_let_else, manual_midpoint, manual_non_exhaustive, @@ -760,7 +841,8 @@ define_Conf! { /// The maximum allowed size of a bit mask before suggesting to use 'trailing_zeros' #[lints(verbose_bit_mask)] verbose_bit_mask_threshold: u64 = 1, - /// Whether to allow certain wildcard imports (prelude, super in tests). + /// Whether to emit warnings on all wildcard imports, including those from `prelude`, from `super` in tests, + /// or for `pub use` reexports. #[lints(wildcard_imports)] warn_on_all_wildcard_imports: bool = false, /// Whether to also emit warnings for unsafe blocks with metavariable expansions in **private** macros. @@ -981,7 +1063,23 @@ impl serde::de::Error for FieldError { // set and allows it. use fmt::Write; - let mut expected = expected.to_vec(); + let metadata = get_configuration_metadata(); + let deprecated = metadata + .iter() + .filter_map(|conf| { + if conf.deprecation_reason.is_some() { + Some(conf.name.as_str()) + } else { + None + } + }) + .collect::>(); + + let mut expected = expected + .iter() + .copied() + .filter(|name| !deprecated.contains(name)) + .collect::>(); expected.sort_unstable(); let (rows, column_widths) = calculate_dimensions(&expected); @@ -1064,7 +1162,13 @@ mod tests { fn configs_are_tested() { let mut names: HashSet = crate::get_configuration_metadata() .into_iter() - .map(|meta| meta.name.replace('_', "-")) + .filter_map(|meta| { + if meta.deprecation_reason.is_none() { + Some(meta.name.replace('_', "-")) + } else { + None + } + }) .collect(); let toml_files = WalkDir::new("../tests") diff --git a/src/tools/clippy/clippy_config/src/lib.rs b/src/tools/clippy/clippy_config/src/lib.rs index 5d6e8b875166c..c227b8900b74a 100644 --- a/src/tools/clippy/clippy_config/src/lib.rs +++ b/src/tools/clippy/clippy_config/src/lib.rs @@ -13,6 +13,7 @@ rustc::untranslatable_diagnostic )] +extern crate rustc_data_structures; extern crate rustc_errors; extern crate rustc_hir; extern crate rustc_middle; diff --git a/src/tools/clippy/clippy_config/src/types.rs b/src/tools/clippy/clippy_config/src/types.rs index 8faac9ecffea4..5949eaca7bc1d 100644 --- a/src/tools/clippy/clippy_config/src/types.rs +++ b/src/tools/clippy/clippy_config/src/types.rs @@ -1,5 +1,7 @@ -use clippy_utils::def_path_def_ids; +use rustc_data_structures::fx::FxHashMap; use rustc_errors::{Applicability, Diag}; +use rustc_hir::PrimTy; +use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefIdMap; use rustc_middle::ty::TyCtxt; use rustc_span::Span; @@ -21,6 +23,17 @@ pub struct DisallowedPath { path: String, reason: Option, replacement: Option, + /// Setting `allow_invalid` to true suppresses a warning if `path` does not refer to an existing + /// definition. + /// + /// This could be useful when conditional compilation is used, or when a clippy.toml file is + /// shared among multiple projects. + allow_invalid: bool, + /// The span of the `DisallowedPath`. + /// + /// Used for diagnostics. + #[serde(skip_serializing)] + span: Span, } impl<'de, const REPLACEMENT_ALLOWED: bool> Deserialize<'de> for DisallowedPath { @@ -36,6 +49,8 @@ impl<'de, const REPLACEMENT_ALLOWED: bool> Deserialize<'de> for DisallowedPath, replacement: Option, + #[serde(rename = "allow-invalid")] + allow_invalid: Option, }, } @@ -58,7 +75,7 @@ impl DisallowedPath { &self.path } - pub fn diag_amendment(&self, span: Span) -> impl FnOnce(&mut Diag<'_, ()>) + use<'_, REPLACEMENT_ALLOWED> { + pub fn diag_amendment(&self, span: Span) -> impl FnOnce(&mut Diag<'_, ()>) { move |diag| { if let Some(replacement) = &self.replacement { diag.span_suggestion( @@ -72,6 +89,14 @@ impl DisallowedPath { } } } + + pub fn span(&self) -> Span { + self.span + } + + pub fn set_span(&mut self, span: Span) { + self.span = span; + } } impl DisallowedPathEnum { @@ -94,20 +119,87 @@ impl DisallowedPathEnum { Self::Simple(_) => None, } } + + fn allow_invalid(&self) -> bool { + match &self { + Self::WithReason { allow_invalid, .. } => allow_invalid.unwrap_or_default(), + Self::Simple(_) => false, + } + } } /// Creates a map of disallowed items to the reason they were disallowed. +#[allow(clippy::type_complexity)] pub fn create_disallowed_map( tcx: TyCtxt<'_>, - disallowed: &'static [DisallowedPath], -) -> DefIdMap<(&'static str, &'static DisallowedPath)> { - disallowed - .iter() - .map(|x| (x.path(), x.path().split("::").collect::>(), x)) - .flat_map(|(name, path, disallowed_path)| { - def_path_def_ids(tcx, &path).map(move |id| (id, (name, disallowed_path))) - }) - .collect() + disallowed_paths: &'static [DisallowedPath], + def_kind_predicate: impl Fn(DefKind) -> bool, + predicate_description: &str, + allow_prim_tys: bool, +) -> ( + DefIdMap<(&'static str, &'static DisallowedPath)>, + FxHashMap)>, +) { + let mut def_ids: DefIdMap<(&'static str, &'static DisallowedPath)> = DefIdMap::default(); + let mut prim_tys: FxHashMap)> = + FxHashMap::default(); + for disallowed_path in disallowed_paths { + let path = disallowed_path.path(); + let mut resolutions = clippy_utils::def_path_res(tcx, &path.split("::").collect::>()); + + let mut found_def_id = None; + let mut found_prim_ty = false; + resolutions.retain(|res| match res { + Res::Def(def_kind, def_id) => { + found_def_id = Some(*def_id); + def_kind_predicate(*def_kind) + }, + Res::PrimTy(_) => { + found_prim_ty = true; + allow_prim_tys + }, + _ => false, + }); + + if resolutions.is_empty() { + let span = disallowed_path.span(); + + if let Some(def_id) = found_def_id { + tcx.sess.dcx().span_warn( + span, + format!( + "expected a {predicate_description}, found {} {}", + tcx.def_descr_article(def_id), + tcx.def_descr(def_id) + ), + ); + } else if found_prim_ty { + tcx.sess.dcx().span_warn( + span, + format!("expected a {predicate_description}, found a primitive type",), + ); + } else if !disallowed_path.allow_invalid { + tcx.sess.dcx().span_warn( + span, + format!("`{path}` does not refer to an existing {predicate_description}"), + ); + } + } + + for res in resolutions { + match res { + Res::Def(_, def_id) => { + def_ids.insert(def_id, (path, disallowed_path)); + }, + Res::PrimTy(ty) => { + prim_tys.insert(ty, (path, disallowed_path)); + }, + _ => unreachable!(), + } + } + } + + (def_ids, prim_tys) } #[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize, Serialize)] diff --git a/src/tools/clippy/clippy_dev/src/lib.rs b/src/tools/clippy/clippy_dev/src/lib.rs index 9280369c23b8f..c1ffaf269c6fe 100644 --- a/src/tools/clippy/clippy_dev/src/lib.rs +++ b/src/tools/clippy/clippy_dev/src/lib.rs @@ -13,6 +13,7 @@ #[allow(unused_extern_crates)] extern crate rustc_driver; extern crate rustc_lexer; +extern crate rustc_literal_escaper; pub mod dogfood; pub mod fmt; diff --git a/src/tools/clippy/clippy_dev/src/main.rs b/src/tools/clippy/clippy_dev/src/main.rs index 074dea4ab77b6..83f8e66b33476 100644 --- a/src/tools/clippy/clippy_dev/src/main.rs +++ b/src/tools/clippy/clippy_dev/src/main.rs @@ -170,7 +170,6 @@ enum DevCommand { "restriction", "cargo", "nursery", - "internal", ], default_value = "nursery", )] @@ -334,7 +333,7 @@ struct SyncCommand { #[derive(Subcommand)] enum SyncSubcommand { #[command(name = "update_nightly")] - /// Update nightly version in rust-toolchain and `clippy_utils` + /// Update nightly version in `rust-toolchain.toml` and `clippy_utils` UpdateNightly, } diff --git a/src/tools/clippy/clippy_dev/src/setup/toolchain.rs b/src/tools/clippy/clippy_dev/src/setup/toolchain.rs index 2966629cf70a3..ecd80215f7e8f 100644 --- a/src/tools/clippy/clippy_dev/src/setup/toolchain.rs +++ b/src/tools/clippy/clippy_dev/src/setup/toolchain.rs @@ -62,7 +62,7 @@ pub fn create(standalone: bool, force: bool, release: bool, name: &str) { println!("Created toolchain {name}, use it in other projects with e.g. `cargo +{name} clippy`"); if !standalone { - println!("Note: This will need to be re-run whenever the Clippy `rust-toolchain` changes"); + println!("Note: This will need to be re-run whenever the Clippy `rust-toolchain.toml` changes"); } } diff --git a/src/tools/clippy/clippy_dev/src/sync.rs b/src/tools/clippy/clippy_dev/src/sync.rs index 3522d182e90ac..a6b65e561c223 100644 --- a/src/tools/clippy/clippy_dev/src/sync.rs +++ b/src/tools/clippy/clippy_dev/src/sync.rs @@ -10,7 +10,7 @@ pub fn update_nightly() { let date = Utc::now().format("%Y-%m-%d").to_string(); replace_region_in_file( UpdateMode::Change, - Path::new("rust-toolchain"), + Path::new("rust-toolchain.toml"), "# begin autogenerated nightly\n", "# end autogenerated nightly", |res| { diff --git a/src/tools/clippy/clippy_dev/src/update_lints.rs b/src/tools/clippy/clippy_dev/src/update_lints.rs index b80ee5aac7e76..d848a97f86d2f 100644 --- a/src/tools/clippy/clippy_dev/src/update_lints.rs +++ b/src/tools/clippy/clippy_dev/src/update_lints.rs @@ -1,7 +1,8 @@ use crate::utils::{UpdateMode, clippy_project_root, exit_with_failure, replace_region_in_file}; use aho_corasick::AhoCorasickBuilder; use itertools::Itertools; -use rustc_lexer::{LiteralKind, TokenKind, tokenize, unescape}; +use rustc_lexer::{LiteralKind, TokenKind, tokenize}; +use rustc_literal_escaper::{Mode, unescape_unicode}; use std::collections::{HashMap, HashSet}; use std::ffi::OsStr; use std::fmt::{self, Write}; @@ -37,9 +38,8 @@ fn generate_lint_files( deprecated_lints: &[DeprecatedLint], renamed_lints: &[RenamedLint], ) { - let internal_lints = Lint::internal_lints(lints); - let mut usable_lints = Lint::usable_lints(lints); - usable_lints.sort_by_key(|lint| lint.name.clone()); + let mut lints = lints.to_owned(); + lints.sort_by_key(|lint| lint.name.clone()); replace_region_in_file( update_mode, @@ -47,7 +47,7 @@ fn generate_lint_files( "[There are over ", " lints included in this crate!]", |res| { - write!(res, "{}", round_to_fifty(usable_lints.len())).unwrap(); + write!(res, "{}", round_to_fifty(lints.len())).unwrap(); }, ); @@ -57,7 +57,7 @@ fn generate_lint_files( "[There are over ", " lints included in this crate!]", |res| { - write!(res, "{}", round_to_fifty(usable_lints.len())).unwrap(); + write!(res, "{}", round_to_fifty(lints.len())).unwrap(); }, ); @@ -67,7 +67,7 @@ fn generate_lint_files( "\n", "", |res| { - for lint in usable_lints + for lint in lints .iter() .map(|l| &*l.name) .chain(deprecated_lints.iter().filter_map(|l| l.name.strip_prefix("clippy::"))) @@ -86,7 +86,7 @@ fn generate_lint_files( "// begin lints modules, do not remove this comment, it’s used in `update_lints`\n", "// end lints modules, do not remove this comment, it’s used in `update_lints`", |res| { - for lint_mod in usable_lints.iter().map(|l| &l.module).unique().sorted() { + for lint_mod in lints.iter().map(|l| &l.module).unique().sorted() { writeln!(res, "mod {lint_mod};").unwrap(); } }, @@ -95,7 +95,7 @@ fn generate_lint_files( process_file( "clippy_lints/src/declared_lints.rs", update_mode, - &gen_declared_lints(internal_lints.iter(), usable_lints.iter()), + &gen_declared_lints(lints.iter()), ); let content = gen_deprecated_lints_test(deprecated_lints); @@ -106,10 +106,9 @@ fn generate_lint_files( } pub fn print_lints() { - let (lint_list, _, _) = gather_all(); - let usable_lints = Lint::usable_lints(&lint_list); - let usable_lint_count = usable_lints.len(); - let grouped_by_lint_group = Lint::by_lint_group(usable_lints.into_iter()); + let (lints, _, _) = gather_all(); + let lint_count = lints.len(); + let grouped_by_lint_group = Lint::by_lint_group(lints.into_iter()); for (lint_group, mut lints) in grouped_by_lint_group { println!("\n## {lint_group}"); @@ -121,7 +120,7 @@ pub fn print_lints() { } } - println!("there are {usable_lint_count} lints"); + println!("there are {lint_count} lints"); } /// Runs the `rename_lint` command. @@ -402,53 +401,53 @@ fn remove_lint_declaration(name: &str, path: &Path, lints: &mut Vec) -> io } } - if path.exists() { - if let Some(lint) = lints.iter().find(|l| l.name == name) { - if lint.module == name { - // The lint name is the same as the file, we can just delete the entire file - fs::remove_file(path)?; - } else { - // We can't delete the entire file, just remove the declaration - - if let Some(Some("mod.rs")) = path.file_name().map(OsStr::to_str) { - // Remove clippy_lints/src/some_mod/some_lint.rs - let mut lint_mod_path = path.to_path_buf(); - lint_mod_path.set_file_name(name); - lint_mod_path.set_extension("rs"); + if path.exists() + && let Some(lint) = lints.iter().find(|l| l.name == name) + { + if lint.module == name { + // The lint name is the same as the file, we can just delete the entire file + fs::remove_file(path)?; + } else { + // We can't delete the entire file, just remove the declaration - let _ = fs::remove_file(lint_mod_path); - } + if let Some(Some("mod.rs")) = path.file_name().map(OsStr::to_str) { + // Remove clippy_lints/src/some_mod/some_lint.rs + let mut lint_mod_path = path.to_path_buf(); + lint_mod_path.set_file_name(name); + lint_mod_path.set_extension("rs"); - let mut content = - fs::read_to_string(path).unwrap_or_else(|_| panic!("failed to read `{}`", path.to_string_lossy())); + let _ = fs::remove_file(lint_mod_path); + } - eprintln!( - "warn: you will have to manually remove any code related to `{name}` from `{}`", - path.display() - ); + let mut content = + fs::read_to_string(path).unwrap_or_else(|_| panic!("failed to read `{}`", path.to_string_lossy())); - assert!( - content[lint.declaration_range.clone()].contains(&name.to_uppercase()), - "error: `{}` does not contain lint `{}`'s declaration", - path.display(), - lint.name - ); + eprintln!( + "warn: you will have to manually remove any code related to `{name}` from `{}`", + path.display() + ); - // Remove lint declaration (declare_clippy_lint!) - content.replace_range(lint.declaration_range.clone(), ""); + assert!( + content[lint.declaration_range.clone()].contains(&name.to_uppercase()), + "error: `{}` does not contain lint `{}`'s declaration", + path.display(), + lint.name + ); - // Remove the module declaration (mod xyz;) - let mod_decl = format!("\nmod {name};"); - content = content.replacen(&mod_decl, "", 1); + // Remove lint declaration (declare_clippy_lint!) + content.replace_range(lint.declaration_range.clone(), ""); - remove_impl_lint_pass(&lint.name.to_uppercase(), &mut content); - fs::write(path, content).unwrap_or_else(|_| panic!("failed to write to `{}`", path.to_string_lossy())); - } + // Remove the module declaration (mod xyz;) + let mod_decl = format!("\nmod {name};"); + content = content.replacen(&mod_decl, "", 1); - remove_test_assets(name); - remove_lint(name, lints); - return Ok(true); + remove_impl_lint_pass(&lint.name.to_uppercase(), &mut content); + fs::write(path, content).unwrap_or_else(|_| panic!("failed to write to `{}`", path.to_string_lossy())); } + + remove_test_assets(name); + remove_lint(name, lints); + return Ok(true); } Ok(false) @@ -527,22 +526,6 @@ impl Lint { } } - /// Returns all non-deprecated lints and non-internal lints - #[must_use] - fn usable_lints(lints: &[Self]) -> Vec { - lints - .iter() - .filter(|l| !l.group.starts_with("internal")) - .cloned() - .collect() - } - - /// Returns all internal lints - #[must_use] - fn internal_lints(lints: &[Self]) -> Vec { - lints.iter().filter(|l| l.group == "internal").cloned().collect() - } - /// Returns the lints in a `HashMap`, grouped by the different lint groups #[must_use] fn by_lint_group(lints: impl Iterator) -> HashMap> { @@ -579,23 +562,14 @@ impl RenamedLint { /// Generates the code for registering lints #[must_use] -fn gen_declared_lints<'a>( - internal_lints: impl Iterator, - usable_lints: impl Iterator, -) -> String { - let mut details: Vec<_> = internal_lints - .map(|l| (false, &l.module, l.name.to_uppercase())) - .chain(usable_lints.map(|l| (true, &l.module, l.name.to_uppercase()))) - .collect(); +fn gen_declared_lints<'a>(lints: impl Iterator) -> String { + let mut details: Vec<_> = lints.map(|l| (&l.module, l.name.to_uppercase())).collect(); details.sort_unstable(); let mut output = GENERATED_FILE_COMMENT.to_string(); output.push_str("pub static LINTS: &[&crate::LintInfo] = &[\n"); - for (is_public, module_name, lint_name) in details { - if !is_public { - output.push_str(" #[cfg(feature = \"internal\")]\n"); - } + for (module_name, lint_name) in details { let _: fmt::Result = writeln!(output, " crate::{module_name}::{lint_name}_INFO,"); } output.push_str("];\n"); @@ -830,7 +804,7 @@ fn remove_line_splices(s: &str) -> String { .and_then(|s| s.strip_suffix('"')) .unwrap_or_else(|| panic!("expected quoted string, found `{s}`")); let mut res = String::with_capacity(s.len()); - unescape::unescape_unicode(s, unescape::Mode::Str, &mut |range, ch| { + unescape_unicode(s, Mode::Str, &mut |range, ch| { if ch.is_ok() { res.push_str(&s[range]); } @@ -936,41 +910,6 @@ mod tests { assert_eq!(expected, result); } - #[test] - fn test_usable_lints() { - let lints = vec![ - Lint::new( - "should_assert_eq2", - "Not Deprecated", - "\"abc\"", - "module_name", - Range::default(), - ), - Lint::new( - "should_assert_eq2", - "internal", - "\"abc\"", - "module_name", - Range::default(), - ), - Lint::new( - "should_assert_eq2", - "internal_style", - "\"abc\"", - "module_name", - Range::default(), - ), - ]; - let expected = vec![Lint::new( - "should_assert_eq2", - "Not Deprecated", - "\"abc\"", - "module_name", - Range::default(), - )]; - assert_eq!(expected, Lint::usable_lints(&lints)); - } - #[test] fn test_by_lint_group() { let lints = vec![ diff --git a/src/tools/clippy/clippy_dev/src/utils.rs b/src/tools/clippy/clippy_dev/src/utils.rs index b87fcca13b1ce..206816398f50f 100644 --- a/src/tools/clippy/clippy_dev/src/utils.rs +++ b/src/tools/clippy/clippy_dev/src/utils.rs @@ -30,10 +30,10 @@ pub fn clippy_project_root() -> PathBuf { let current_dir = std::env::current_dir().unwrap(); for path in current_dir.ancestors() { let result = fs::read_to_string(path.join("Cargo.toml")); - if let Err(err) = &result { - if err.kind() == io::ErrorKind::NotFound { - continue; - } + if let Err(err) = &result + && err.kind() == io::ErrorKind::NotFound + { + continue; } let content = result.unwrap(); diff --git a/src/tools/clippy/clippy_lints/Cargo.toml b/src/tools/clippy/clippy_lints/Cargo.toml index 54347043a13d4..20951afccbb7e 100644 --- a/src/tools/clippy/clippy_lints/Cargo.toml +++ b/src/tools/clippy/clippy_lints/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "clippy_lints" # begin autogenerated version -version = "0.1.87" +version = "0.1.88" # end autogenerated version description = "A bunch of helpful lints to avoid common pitfalls in Rust" repository = "https://github.com/rust-lang/rust-clippy" @@ -19,10 +19,7 @@ itertools = "0.12" quine-mc_cluskey = "0.2" regex-syntax = "0.8" serde = { version = "1.0", features = ["derive"] } -serde_json = { version = "1.0", optional = true } -tempfile = { version = "3.3.0", optional = true } toml = "0.7.3" -regex = { version = "1.5", optional = true } unicode-normalization = "0.1" unicode-script = { version = "0.5", default-features = false } semver = "1.0" @@ -31,10 +28,6 @@ url = "2.2" [dev-dependencies] walkdir = "2.3" -[features] -# build clippy with internal lints enabled, off by default -internal = ["serde_json", "tempfile", "regex"] - [package.metadata.rust-analyzer] # This crate uses #[feature(rustc_private)] rustc_private = true diff --git a/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs b/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs index 57cabe437034a..272444475c0c4 100644 --- a/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs +++ b/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs @@ -5,6 +5,7 @@ use clippy_config::types::{ SourceItemOrderingWithinModuleItemGroupings, }; use clippy_utils::diagnostics::span_lint_and_note; +use clippy_utils::is_cfg_test; use rustc_hir::{ AssocItemKind, FieldDef, HirId, ImplItemRef, IsAuto, Item, ItemKind, Mod, QPath, TraitItemRef, TyKind, Variant, VariantData, @@ -263,10 +264,11 @@ impl<'tcx> LateLintPass<'tcx> for ArbitrarySourceItemOrdering { continue; } - if let Some(cur_v) = cur_v { - if cur_v.ident.name.as_str() > variant.ident.name.as_str() && cur_v.span != variant.span { - Self::lint_member_name(cx, &variant.ident, &cur_v.ident); - } + if let Some(cur_v) = cur_v + && cur_v.ident.name.as_str() > variant.ident.name.as_str() + && cur_v.span != variant.span + { + Self::lint_member_name(cx, &variant.ident, &cur_v.ident); } cur_v = Some(variant); } @@ -278,10 +280,11 @@ impl<'tcx> LateLintPass<'tcx> for ArbitrarySourceItemOrdering { continue; } - if let Some(cur_f) = cur_f { - if cur_f.ident.name.as_str() > field.ident.name.as_str() && cur_f.span != field.span { - Self::lint_member_name(cx, &field.ident, &cur_f.ident); - } + if let Some(cur_f) = cur_f + && cur_f.ident.name.as_str() > field.ident.name.as_str() + && cur_f.span != field.span + { + Self::lint_member_name(cx, &field.ident, &cur_f.ident); } cur_f = Some(field); } @@ -342,7 +345,7 @@ impl<'tcx> LateLintPass<'tcx> for ArbitrarySourceItemOrdering { struct CurItem<'a> { item: &'a Item<'a>, order: usize, - name: String, + name: Option, } let mut cur_t: Option> = None; @@ -359,32 +362,36 @@ impl<'tcx> LateLintPass<'tcx> for ArbitrarySourceItemOrdering { // as no sorting by source map/line of code has to be applied. // for item in items { - if item.span.in_external_macro(cx.sess().source_map()) { + if is_cfg_test(cx.tcx, item.hir_id()) { continue; } - let ident = if let Some(ident) = item.kind.ident() { - ident - } else if let ItemKind::Impl(_) = item.kind - && !get_item_name(item).is_empty() - { - rustc_span::Ident::empty() // FIXME: a bit strange, is there a better way to do it? - } else { - continue; - }; - - if ident.name.as_str().starts_with('_') { - // Filters out unnamed macro-like impls for various derives, - // e.g. serde::Serialize or num_derive::FromPrimitive. + if item.span.in_external_macro(cx.sess().source_map()) { continue; } - if ident.name == rustc_span::sym::std && item.span.is_dummy() { - if let ItemKind::ExternCrate(None, _) = item.kind { - // Filters the auto-included Rust standard library. + if let Some(ident) = item.kind.ident() { + if ident.name.as_str().starts_with('_') { + // Filters out unnamed macro-like impls for various derives, + // e.g. serde::Serialize or num_derive::FromPrimitive. continue; } - println!("Unknown item: {item:?}"); + + if ident.name == rustc_span::sym::std && item.span.is_dummy() { + if let ItemKind::ExternCrate(None, _) = item.kind { + // Filters the auto-included Rust standard library. + continue; + } + if cfg!(debug_assertions) { + rustc_middle::bug!("unknown item: {item:?}"); + } + } + } else if let ItemKind::Impl(_) = item.kind + && get_item_name(item).is_some() + { + // keep going below + } else { + continue; } let item_kind = convert_module_item_kind(&item.kind); @@ -493,7 +500,7 @@ fn convert_module_item_kind(value: &ItemKind<'_>) -> SourceItemOrderingModuleIte /// further in the [Rust Reference, Paths Chapter][rust_ref]. /// /// [rust_ref]: https://doc.rust-lang.org/reference/paths.html#crate-1 -fn get_item_name(item: &Item<'_>) -> String { +fn get_item_name(item: &Item<'_>) -> Option { match item.kind { ItemKind::Impl(im) => { if let TyKind::Path(path) = im.self_ty.kind { @@ -513,27 +520,19 @@ fn get_item_name(item: &Item<'_>) -> String { } segs.push(String::new()); - segs.join("!!") + Some(segs.join("!!")) }, QPath::TypeRelative(_, _path_seg) => { // This case doesn't exist in the clippy tests codebase. - String::new() + None }, - QPath::LangItem(_, _) => String::new(), + QPath::LangItem(_, _) => None, } } else { // Impls for anything that isn't a named type can be skipped. - String::new() + None } }, - // FIXME: `Ident::empty` for anonymous items is a bit strange, is there - // a better way to do it? - _ => item - .kind - .ident() - .unwrap_or(rustc_span::Ident::empty()) - .name - .as_str() - .to_owned(), + _ => item.kind.ident().map(|name| name.as_str().to_owned()), } } diff --git a/src/tools/clippy/clippy_lints/src/as_conversions.rs b/src/tools/clippy/clippy_lints/src/as_conversions.rs index 78102772927c0..27e304a848e33 100644 --- a/src/tools/clippy/clippy_lints/src/as_conversions.rs +++ b/src/tools/clippy/clippy_lints/src/as_conversions.rs @@ -12,17 +12,17 @@ declare_clippy_lint! { /// regardless of whether good alternatives exist or not. If you want more /// precise lints for `as`, please consider using these separate lints: /// - /// - clippy::cast_lossless - /// - clippy::cast_possible_truncation - /// - clippy::cast_possible_wrap - /// - clippy::cast_precision_loss - /// - clippy::cast_sign_loss - /// - clippy::char_lit_as_u8 - /// - clippy::fn_to_numeric_cast - /// - clippy::fn_to_numeric_cast_with_truncation - /// - clippy::ptr_as_ptr - /// - clippy::unnecessary_cast - /// - invalid_reference_casting + /// - `clippy::cast_lossless` + /// - `clippy::cast_possible_truncation` + /// - `clippy::cast_possible_wrap` + /// - `clippy::cast_precision_loss` + /// - `clippy::cast_sign_loss` + /// - `clippy::char_lit_as_u8` + /// - `clippy::fn_to_numeric_cast` + /// - `clippy::fn_to_numeric_cast_with_truncation` + /// - `clippy::ptr_as_ptr` + /// - `clippy::unnecessary_cast` + /// - `invalid_reference_casting` /// /// There is a good explanation the reason why this lint should work in this /// way and how it is useful [in this diff --git a/src/tools/clippy/clippy_lints/src/assigning_clones.rs b/src/tools/clippy/clippy_lints/src/assigning_clones.rs index ab34af7c31745..8b8b42bbf7228 100644 --- a/src/tools/clippy/clippy_lints/src/assigning_clones.rs +++ b/src/tools/clippy/clippy_lints/src/assigning_clones.rs @@ -3,14 +3,13 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::mir::{PossibleBorrowerMap, enclosing_mir}; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::sugg::Sugg; -use clippy_utils::{is_diag_trait_item, is_in_test, last_path_segment, local_is_initialized, path_to_local}; +use clippy_utils::{is_diag_trait_item, is_in_test, last_path_segment, local_is_initialized, path_to_local, sym}; use rustc_errors::Applicability; use rustc_hir::{self as hir, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::mir; use rustc_middle::ty::{self, Instance, Mutability}; use rustc_session::impl_lint_pass; -use rustc_span::symbol::sym; use rustc_span::{Span, SyntaxContext}; declare_clippy_lint! { @@ -86,9 +85,9 @@ impl<'tcx> LateLintPass<'tcx> for AssigningClones { && ctxt.is_root() && let which_trait = match fn_name { sym::clone if is_diag_trait_item(cx, fn_id, sym::Clone) => CloneTrait::Clone, - _ if fn_name.as_str() == "to_owned" - && is_diag_trait_item(cx, fn_id, sym::ToOwned) - && self.msrv.meets(cx, msrvs::CLONE_INTO) => + sym::to_owned + if is_diag_trait_item(cx, fn_id, sym::ToOwned) + && self.msrv.meets(cx, msrvs::CLONE_INTO) => { CloneTrait::ToOwned }, @@ -111,8 +110,8 @@ impl<'tcx> LateLintPass<'tcx> for AssigningClones { // Only suggest if `clone_from`/`clone_into` is explicitly implemented && resolved_assoc_items.in_definition_order().any(|assoc| match which_trait { - CloneTrait::Clone => assoc.name == sym::clone_from, - CloneTrait::ToOwned => assoc.name.as_str() == "clone_into", + CloneTrait::Clone => assoc.name() == sym::clone_from, + CloneTrait::ToOwned => assoc.name() == sym::clone_into, } ) && !clone_source_borrows_from_dest(cx, lhs, rhs.span) @@ -243,7 +242,7 @@ fn build_sugg<'tcx>( // `lhs = self_expr.clone();` -> `lhs.clone_from(self_expr)` Sugg::hir_with_applicability(cx, lhs, "_", app) } - .maybe_par(); + .maybe_paren(); // Determine whether we need to reference the argument to clone_from(). let clone_receiver_type = cx.typeck_results().expr_ty(fn_arg); @@ -284,7 +283,7 @@ fn build_sugg<'tcx>( let rhs_sugg = if let ExprKind::Unary(hir::UnOp::Deref, ref_expr) = lhs.kind { // `*lhs = rhs.to_owned()` -> `rhs.clone_into(lhs)` // `*lhs = ToOwned::to_owned(rhs)` -> `ToOwned::clone_into(rhs, lhs)` - let sugg = Sugg::hir_with_applicability(cx, ref_expr, "_", app).maybe_par(); + let sugg = Sugg::hir_with_applicability(cx, ref_expr, "_", app).maybe_paren(); let inner_type = cx.typeck_results().expr_ty(ref_expr); // If after unwrapping the dereference, the type is not a mutable reference, we add &mut to make it // deref to a mutable reference. @@ -296,7 +295,7 @@ fn build_sugg<'tcx>( } else { // `lhs = rhs.to_owned()` -> `rhs.clone_into(&mut lhs)` // `lhs = ToOwned::to_owned(rhs)` -> `ToOwned::clone_into(rhs, &mut lhs)` - Sugg::hir_with_applicability(cx, lhs, "_", app).maybe_par().mut_addr() + Sugg::hir_with_applicability(cx, lhs, "_", app).maybe_paren().mut_addr() }; match call_kind { diff --git a/src/tools/clippy/clippy_lints/src/attrs/blanket_clippy_restriction_lints.rs b/src/tools/clippy/clippy_lints/src/attrs/blanket_clippy_restriction_lints.rs index fecf316640636..4d64eec25d273 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/blanket_clippy_restriction_lints.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/blanket_clippy_restriction_lints.rs @@ -1,24 +1,23 @@ use super::BLANKET_CLIPPY_RESTRICTION_LINTS; use super::utils::extract_clippy_lint; use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_then}; +use clippy_utils::sym; use rustc_ast::MetaItemInner; use rustc_lint::{EarlyContext, Level, LintContext}; +use rustc_span::DUMMY_SP; use rustc_span::symbol::Symbol; -use rustc_span::{DUMMY_SP, sym}; pub(super) fn check(cx: &EarlyContext<'_>, name: Symbol, items: &[MetaItemInner]) { for lint in items { - if let Some(lint_name) = extract_clippy_lint(lint) { - if lint_name.as_str() == "restriction" && name != sym::allow { - span_lint_and_help( - cx, - BLANKET_CLIPPY_RESTRICTION_LINTS, - lint.span(), - "`clippy::restriction` is not meant to be enabled as a group", - None, - "enable the restriction lints you need individually", - ); - } + if name != sym::allow && extract_clippy_lint(lint) == Some(sym::restriction) { + span_lint_and_help( + cx, + BLANKET_CLIPPY_RESTRICTION_LINTS, + lint.span(), + "`clippy::restriction` is not meant to be enabled as a group", + None, + "enable the restriction lints you need individually", + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/attrs/deprecated_cfg_attr.rs b/src/tools/clippy/clippy_lints/src/attrs/deprecated_cfg_attr.rs index cd38aed26a3e0..0edb50be8c778 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/deprecated_cfg_attr.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/deprecated_cfg_attr.rs @@ -1,10 +1,10 @@ use super::{Attribute, DEPRECATED_CFG_ATTR, DEPRECATED_CLIPPY_CFG_ATTR, unnecessary_clippy_cfg}; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::msrvs::{self, MsrvStack}; +use clippy_utils::sym; use rustc_ast::AttrStyle; use rustc_errors::Applicability; use rustc_lint::EarlyContext; -use rustc_span::sym; pub(super) fn check(cx: &EarlyContext<'_>, attr: &Attribute, msrv: &MsrvStack) { // check cfg_attr @@ -18,7 +18,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, attr: &Attribute, msrv: &MsrvStack) { && msrv.meets(msrvs::TOOL_ATTRIBUTES) // check for `rustfmt_skip` and `rustfmt::skip` && let Some(skip_item) = &items[1].meta_item() - && (skip_item.has_name(sym!(rustfmt_skip)) + && (skip_item.has_name(sym::rustfmt_skip) || skip_item .path .segments @@ -73,7 +73,7 @@ fn check_deprecated_cfg_recursively(cx: &EarlyContext<'_>, attr: &rustc_ast::Met } fn check_cargo_clippy_attr(cx: &EarlyContext<'_>, item: &rustc_ast::MetaItem) { - if item.has_name(sym::feature) && item.value_str().is_some_and(|v| v.as_str() == "cargo-clippy") { + if item.has_name(sym::feature) && item.value_str() == Some(sym::cargo_clippy) { span_lint_and_sugg( cx, DEPRECATED_CLIPPY_CFG_ATTR, diff --git a/src/tools/clippy/clippy_lints/src/attrs/deprecated_semver.rs b/src/tools/clippy/clippy_lints/src/attrs/deprecated_semver.rs index d3153ec6613b5..bd6459d6f9dbc 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/deprecated_semver.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/deprecated_semver.rs @@ -1,15 +1,16 @@ use super::DEPRECATED_SEMVER; use clippy_utils::diagnostics::span_lint; +use clippy_utils::sym; use rustc_ast::{LitKind, MetaItemLit}; use rustc_lint::EarlyContext; use rustc_span::Span; use semver::Version; pub(super) fn check(cx: &EarlyContext<'_>, span: Span, lit: &MetaItemLit) { - if let LitKind::Str(is, _) = lit.kind { - if is.as_str() == "TBD" || Version::parse(is.as_str()).is_ok() { - return; - } + if let LitKind::Str(is, _) = lit.kind + && (is == sym::TBD || Version::parse(is.as_str()).is_ok()) + { + return; } span_lint( cx, diff --git a/src/tools/clippy/clippy_lints/src/attrs/duplicated_attributes.rs b/src/tools/clippy/clippy_lints/src/attrs/duplicated_attributes.rs index 4c84e61b1f26c..a851daaede71b 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/duplicated_attributes.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/duplicated_attributes.rs @@ -36,10 +36,7 @@ fn check_duplicated_attr( } let Some(ident) = attr.ident() else { return }; let name = ident.name; - if name == sym::doc - || name == sym::cfg_attr_trace - || name == sym::rustc_on_unimplemented - || name == sym::reason { + if name == sym::doc || name == sym::cfg_attr_trace || name == sym::rustc_on_unimplemented || name == sym::reason { // FIXME: Would be nice to handle `cfg_attr` as well. Only problem is to check that cfg // conditions are the same. // `#[rustc_on_unimplemented]` contains duplicated subattributes, that's expected. diff --git a/src/tools/clippy/clippy_lints/src/attrs/mod.rs b/src/tools/clippy/clippy_lints/src/attrs/mod.rs index e04d2ad5d13b7..f7f168cb26792 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/mod.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/mod.rs @@ -14,8 +14,9 @@ mod useless_attribute; mod utils; use clippy_config::Conf; +use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::msrvs::{self, Msrv, MsrvStack}; -use rustc_ast::{self as ast, Attribute, MetaItemInner, MetaItemKind}; +use rustc_ast::{self as ast, AttrArgs, AttrKind, Attribute, MetaItemInner, MetaItemKind}; use rustc_hir::{ImplItem, Item, ItemKind, TraitItem}; use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass}; use rustc_session::impl_lint_pass; @@ -448,6 +449,31 @@ declare_clippy_lint! { "duplicated attribute" } +declare_clippy_lint! { + /// ### What it does + /// Checks for ignored tests without messages. + /// + /// ### Why is this bad? + /// The reason for ignoring the test may not be obvious. + /// + /// ### Example + /// ```no_run + /// #[test] + /// #[ignore] + /// fn test() {} + /// ``` + /// Use instead: + /// ```no_run + /// #[test] + /// #[ignore = "Some good reason"] + /// fn test() {} + /// ``` + #[clippy::version = "1.85.0"] + pub IGNORE_WITHOUT_REASON, + pedantic, + "ignored tests without messages" +} + pub struct Attributes { msrv: Msrv, } @@ -532,6 +558,7 @@ impl_lint_pass!(PostExpansionEarlyAttributes => [ ALLOW_ATTRIBUTES, ALLOW_ATTRIBUTES_WITHOUT_REASON, DEPRECATED_SEMVER, + IGNORE_WITHOUT_REASON, USELESS_ATTRIBUTE, BLANKET_CLIPPY_RESTRICTION_LINTS, SHOULD_PANIC_WITHOUT_EXPECT, @@ -546,28 +573,27 @@ impl EarlyLintPass for PostExpansionEarlyAttributes { } fn check_attribute(&mut self, cx: &EarlyContext<'_>, attr: &Attribute) { - if let Some(items) = &attr.meta_item_list() { - if let Some(ident) = attr.ident() { - if matches!(ident.name, sym::allow) && self.msrv.meets(msrvs::LINT_REASONS_STABILIZATION) { - allow_attributes::check(cx, attr); - } - if matches!(ident.name, sym::allow | sym::expect) && self.msrv.meets(msrvs::LINT_REASONS_STABILIZATION) + if let Some(items) = &attr.meta_item_list() + && let Some(ident) = attr.ident() + { + if matches!(ident.name, sym::allow) && self.msrv.meets(msrvs::LINT_REASONS_STABILIZATION) { + allow_attributes::check(cx, attr); + } + if matches!(ident.name, sym::allow | sym::expect) && self.msrv.meets(msrvs::LINT_REASONS_STABILIZATION) { + allow_attributes_without_reason::check(cx, ident.name, items, attr); + } + if is_lint_level(ident.name, attr.id) { + blanket_clippy_restriction_lints::check(cx, ident.name, items); + } + if items.is_empty() || !attr.has_name(sym::deprecated) { + return; + } + for item in items { + if let MetaItemInner::MetaItem(mi) = &item + && let MetaItemKind::NameValue(lit) = &mi.kind + && mi.has_name(sym::since) { - allow_attributes_without_reason::check(cx, ident.name, items, attr); - } - if is_lint_level(ident.name, attr.id) { - blanket_clippy_restriction_lints::check(cx, ident.name, items); - } - if items.is_empty() || !attr.has_name(sym::deprecated) { - return; - } - for item in items { - if let MetaItemInner::MetaItem(mi) = &item - && let MetaItemKind::NameValue(lit) = &mi.kind - && mi.has_name(sym::since) - { - deprecated_semver::check(cx, item.span(), lit); - } + deprecated_semver::check(cx, item.span(), lit); } } } @@ -575,6 +601,22 @@ impl EarlyLintPass for PostExpansionEarlyAttributes { if attr.has_name(sym::should_panic) { should_panic_without_expect::check(cx, attr); } + + if attr.has_name(sym::ignore) + && match &attr.kind { + AttrKind::Normal(normal_attr) => !matches!(normal_attr.item.args, AttrArgs::Eq { .. }), + AttrKind::DocComment(..) => true, + } + { + span_lint_and_help( + cx, + IGNORE_WITHOUT_REASON, + attr.span, + "`#[ignore]` without reason", + None, + "add a reason with `= \"..\"`", + ); + } } fn check_item(&mut self, cx: &EarlyContext<'_>, item: &'_ ast::Item) { diff --git a/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs b/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs index e5cfbaf952a70..df01c7fde1819 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs @@ -30,7 +30,7 @@ pub(super) fn check(cx: &LateContext<'_>, item_span: Span, attrs: &[Attribute], diag.warn( "unqualified `#[repr(packed)]` defaults to `#[repr(Rust, packed)]`, which has no stable ABI", ) - .help("qualify the desired ABI explicity via `#[repr(C, packed)]` or `#[repr(Rust, packed)]`") + .help("qualify the desired ABI explicitly via `#[repr(C, packed)]` or `#[repr(Rust, packed)]`") .span_label(packed_span, "`packed` representation set here"); }, ); diff --git a/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs b/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs index e3e081ce08e9f..d75b73280e632 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs @@ -2,10 +2,10 @@ use super::USELESS_ATTRIBUTE; use super::utils::{is_lint_level, is_word, namespace_and_lint}; use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::{SpanRangeExt, first_line_of_span}; +use clippy_utils::sym; use rustc_ast::{Attribute, Item, ItemKind}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, LintContext}; -use rustc_span::sym; pub(super) fn check(cx: &EarlyContext<'_>, item: &Item, attrs: &[Attribute]) { let skip_unused_imports = attrs.iter().any(|attr| attr.has_name(sym::macro_use)); @@ -14,75 +14,75 @@ pub(super) fn check(cx: &EarlyContext<'_>, item: &Item, attrs: &[Attribute]) { if attr.span.in_external_macro(cx.sess().source_map()) { return; } - if let Some(lint_list) = &attr.meta_item_list() { - if attr.ident().is_some_and(|ident| is_lint_level(ident.name, attr.id)) { - for lint in lint_list { - match item.kind { - ItemKind::Use(..) => { - let (namespace @ (Some(sym::clippy) | None), Some(name)) = namespace_and_lint(lint) else { - return; - }; + if let Some(lint_list) = &attr.meta_item_list() + && attr.ident().is_some_and(|ident| is_lint_level(ident.name, attr.id)) + { + for lint in lint_list { + match item.kind { + ItemKind::Use(..) => { + let (namespace @ (Some(sym::clippy) | None), Some(name)) = namespace_and_lint(lint) else { + return; + }; - if namespace.is_none() - && matches!( - name.as_str(), - "ambiguous_glob_reexports" - | "dead_code" - | "deprecated" - | "hidden_glob_reexports" - | "unreachable_pub" - | "unused" - | "unused_braces" - | "unused_import_braces" - | "unused_imports" - ) - { - return; - } + if namespace.is_none() + && matches!( + name.as_str(), + "ambiguous_glob_reexports" + | "dead_code" + | "deprecated" + | "hidden_glob_reexports" + | "unreachable_pub" + | "unused" + | "unused_braces" + | "unused_import_braces" + | "unused_imports" + ) + { + return; + } - if namespace == Some(sym::clippy) - && matches!( - name.as_str(), - "wildcard_imports" - | "enum_glob_use" - | "redundant_pub_crate" - | "macro_use_imports" - | "unsafe_removed_from_name" - | "module_name_repetitions" - | "single_component_path_imports" - | "disallowed_types" - | "unused_trait_names" - ) - { - return; - } - }, - ItemKind::ExternCrate(..) => { - if is_word(lint, sym::unused_imports) && skip_unused_imports { - return; - } - if is_word(lint, sym!(unused_extern_crates)) { - return; - } - }, - _ => {}, - } + if namespace == Some(sym::clippy) + && matches!( + name.as_str(), + "wildcard_imports" + | "enum_glob_use" + | "redundant_pub_crate" + | "macro_use_imports" + | "unsafe_removed_from_name" + | "module_name_repetitions" + | "single_component_path_imports" + | "disallowed_types" + | "unused_trait_names" + ) + { + return; + } + }, + ItemKind::ExternCrate(..) => { + if is_word(lint, sym::unused_imports) && skip_unused_imports { + return; + } + if is_word(lint, sym::unused_extern_crates) { + return; + } + }, + _ => {}, } - let line_span = first_line_of_span(cx, attr.span); + } + let line_span = first_line_of_span(cx, attr.span); - if let Some(src) = line_span.get_source_text(cx) { - if src.contains("#[") { - #[expect(clippy::collapsible_span_lint_calls)] - span_lint_and_then(cx, USELESS_ATTRIBUTE, line_span, "useless lint attribute", |diag| { - diag.span_suggestion( - line_span, - "if you just forgot a `!`, use", - src.replacen("#[", "#![", 1), - Applicability::MaybeIncorrect, - ); - }); - } - } + if let Some(src) = line_span.get_source_text(cx) + && src.contains("#[") + { + #[expect(clippy::collapsible_span_lint_calls)] + span_lint_and_then(cx, USELESS_ATTRIBUTE, line_span, "useless lint attribute", |diag| { + diag.span_suggestion( + line_span, + "if you just forgot a `!`, use", + src.replacen("#[", "#![", 1), + Applicability::MaybeIncorrect, + ); + }); } } } diff --git a/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs b/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs index 92a0c7f9acbcd..52d1d5b4c67a1 100644 --- a/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs +++ b/src/tools/clippy/clippy_lints/src/await_holding_invalid.rs @@ -179,9 +179,14 @@ pub struct AwaitHolding { impl AwaitHolding { pub(crate) fn new(tcx: TyCtxt<'_>, conf: &'static Conf) -> Self { - Self { - def_ids: create_disallowed_map(tcx, &conf.await_holding_invalid_types), - } + let (def_ids, _) = create_disallowed_map( + tcx, + &conf.await_holding_invalid_types, + crate::disallowed_types::def_kind_predicate, + "type", + false, + ); + Self { def_ids } } } @@ -192,10 +197,9 @@ impl<'tcx> LateLintPass<'tcx> for AwaitHolding { def_id, .. }) = expr.kind + && let Some(coroutine_layout) = cx.tcx.mir_coroutine_witnesses(*def_id) { - if let Some(coroutine_layout) = cx.tcx.mir_coroutine_witnesses(*def_id) { - self.check_interior_types(cx, coroutine_layout); - } + self.check_interior_types(cx, coroutine_layout); } } } diff --git a/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs b/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs index adac2f27ea8cf..4a876b854165e 100644 --- a/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs +++ b/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs @@ -53,10 +53,10 @@ fn is_impl_not_trait_with_bool_out<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) - .not_trait() .filter(|trait_id| implements_trait(cx, ty, *trait_id, &[])) .and_then(|trait_id| { - cx.tcx.associated_items(trait_id).find_by_name_and_kind( + cx.tcx.associated_items(trait_id).find_by_ident_and_kind( cx.tcx, Ident::from_str("Output"), - ty::AssocKind::Type, + ty::AssocTag::Type, trait_id, ) }) diff --git a/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs b/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs index 612712d16843c..129e774784061 100644 --- a/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs +++ b/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs @@ -1,6 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::source::HasSession; use clippy_utils::sugg::Sugg; -use clippy_utils::{is_else_clause, is_in_const_context}; +use clippy_utils::{higher, is_else_clause, is_in_const_context, span_contains_comment}; use rustc_ast::LitKind; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; @@ -46,18 +47,25 @@ declare_lint_pass!(BoolToIntWithIf => [BOOL_TO_INT_WITH_IF]); impl<'tcx> LateLintPass<'tcx> for BoolToIntWithIf { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { - if let ExprKind::If(cond, then, Some(else_)) = expr.kind - && matches!(cond.kind, ExprKind::DropTemps(_)) + if !expr.span.from_expansion() + && let Some(higher::If { + cond, + then, + r#else: Some(r#else), + }) = higher::If::hir(expr) && let Some(then_lit) = as_int_bool_lit(then) - && let Some(else_lit) = as_int_bool_lit(else_) + && let Some(else_lit) = as_int_bool_lit(r#else) && then_lit != else_lit - && !expr.span.from_expansion() && !is_in_const_context(cx) { let ty = cx.typeck_results().expr_ty(then); - let mut applicability = Applicability::MachineApplicable; + let mut applicability = if span_contains_comment(cx.sess().source_map(), expr.span) { + Applicability::MaybeIncorrect + } else { + Applicability::MachineApplicable + }; let snippet = { - let mut sugg = Sugg::hir_with_applicability(cx, cond, "..", &mut applicability); + let mut sugg = Sugg::hir_with_context(cx, cond, expr.span.ctxt(), "..", &mut applicability); if !then_lit { sugg = !sugg; } @@ -72,7 +80,7 @@ impl<'tcx> LateLintPass<'tcx> for BoolToIntWithIf { s }; - let into_snippet = snippet.clone().maybe_par(); + let into_snippet = snippet.clone().maybe_paren(); let as_snippet = snippet.as_ty(ty); span_lint_and_then( @@ -91,10 +99,11 @@ impl<'tcx> LateLintPass<'tcx> for BoolToIntWithIf { } } -fn as_int_bool_lit(e: &Expr<'_>) -> Option { - if let ExprKind::Block(b, _) = e.kind +fn as_int_bool_lit(expr: &Expr<'_>) -> Option { + if let ExprKind::Block(b, _) = expr.kind && b.stmts.is_empty() && let Some(e) = b.expr + && !e.span.from_expansion() && let ExprKind::Lit(lit) = e.kind && let LitKind::Int(x, _) = lit.node { diff --git a/src/tools/clippy/clippy_lints/src/booleans.rs b/src/tools/clippy/clippy_lints/src/booleans.rs index 48b5d4da88860..bc6ba84772b3d 100644 --- a/src/tools/clippy/clippy_lints/src/booleans.rs +++ b/src/tools/clippy/clippy_lints/src/booleans.rs @@ -13,7 +13,7 @@ use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, UnOp}; use rustc_lint::{LateContext, LateLintPass, Level}; use rustc_session::impl_lint_pass; use rustc_span::def_id::LocalDefId; -use rustc_span::{Span, sym}; +use rustc_span::{Span, SyntaxContext, sym}; declare_clippy_lint! { /// ### What it does @@ -199,7 +199,7 @@ fn check_simplify_not(cx: &LateContext<'_>, msrv: Msrv, expr: &Expr<'_>) { && !expr.span.from_expansion() && !inner.span.from_expansion() && let Some(suggestion) = simplify_not(cx, msrv, inner) - && cx.tcx.lint_level_at_node(NONMINIMAL_BOOL, expr.hir_id).0 != Level::Allow + && cx.tcx.lint_level_at_node(NONMINIMAL_BOOL, expr.hir_id).level != Level::Allow { use clippy_utils::sugg::{Sugg, has_enclosing_paren}; let maybe_par = if let Some(sug) = Sugg::hir_opt(cx, inner) { @@ -242,11 +242,11 @@ struct Hir2Qmm<'a, 'tcx, 'v> { impl<'v> Hir2Qmm<'_, '_, 'v> { fn extract(&mut self, op: BinOpKind, a: &[&'v Expr<'_>], mut v: Vec) -> Result, String> { for a in a { - if let ExprKind::Binary(binop, lhs, rhs) = &a.kind { - if binop.node == op { - v = self.extract(op, &[lhs, rhs], v)?; - continue; - } + if let ExprKind::Binary(binop, lhs, rhs) = &a.kind + && binop.node == op + { + v = self.extract(op, &[lhs, rhs], v)?; + continue; } v.push(self.run(a)?); } @@ -349,9 +349,13 @@ impl SuggestContext<'_, '_, '_> { if let Some(str) = simplify_not(self.cx, self.msrv, terminal) { self.output.push_str(&str); } else { - self.output.push('!'); - self.output - .push_str(&Sugg::hir_opt(self.cx, terminal)?.maybe_par().to_string()); + let mut app = Applicability::MachineApplicable; + let snip = Sugg::hir_with_context(self.cx, terminal, SyntaxContext::root(), "", &mut app); + // Ignore the case If the expression is inside a macro expansion, or the default snippet is used + if app != Applicability::MachineApplicable { + return None; + } + self.output.push_str(&(!snip).to_string()); } }, True | False | Not(_) => { @@ -414,12 +418,12 @@ fn simplify_not(cx: &LateContext<'_>, curr_msrv: Msrv, expr: &Expr<'_>) -> Optio let lhs_snippet = lhs.span.get_source_text(cx)?; let rhs_snippet = rhs.span.get_source_text(cx)?; - if !(lhs_snippet.starts_with('(') && lhs_snippet.ends_with(')')) { - if let (ExprKind::Cast(..), BinOpKind::Ge) = (&lhs.kind, binop.node) { - // e.g. `(a as u64) < b`. Without the parens the `<` is - // interpreted as a start of generic arguments for `u64` - return Some(format!("({lhs_snippet}){op}{rhs_snippet}")); - } + if !(lhs_snippet.starts_with('(') && lhs_snippet.ends_with(')')) + && let (ExprKind::Cast(..), BinOpKind::Ge) = (&lhs.kind, binop.node) + { + // e.g. `(a as u64) < b`. Without the parens the `<` is + // interpreted as a start of generic arguments for `u64` + return Some(format!("({lhs_snippet}){op}{rhs_snippet}")); } Some(format!("{lhs_snippet}{op}{rhs_snippet}")) @@ -605,7 +609,7 @@ impl<'tcx> NonminimalBoolVisitor<'_, 'tcx> { } } let nonminimal_bool_lint = |mut suggestions: Vec<_>| { - if self.cx.tcx.lint_level_at_node(NONMINIMAL_BOOL, e.hir_id).0 != Level::Allow { + if self.cx.tcx.lint_level_at_node(NONMINIMAL_BOOL, e.hir_id).level != Level::Allow { suggestions.sort(); span_lint_hir_and_then( self.cx, diff --git a/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs b/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs index 8892a9e6b6b08..7cde007a9b66d 100644 --- a/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs +++ b/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs @@ -2,7 +2,7 @@ use crate::reference::DEREF_ADDROF; use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::SpanRangeExt; use clippy_utils::ty::implements_trait; -use clippy_utils::{get_parent_expr, is_from_proc_macro, is_lint_allowed}; +use clippy_utils::{get_parent_expr, is_from_proc_macro, is_lint_allowed, is_mutable}; use rustc_errors::Applicability; use rustc_hir::{BorrowKind, ExprKind, UnOp}; use rustc_lint::{LateContext, LateLintPass}; @@ -73,6 +73,9 @@ impl<'tcx> LateLintPass<'tcx> for BorrowDerefRef { } }) && !is_from_proc_macro(cx, e) + && let e_ty = cx.typeck_results().expr_ty_adjusted(e) + // check if the reference is coercing to a mutable reference + && (!matches!(e_ty.kind(), ty::Ref(_, _, Mutability::Mut)) || is_mutable(cx, deref_target)) && let Some(deref_text) = deref_target.span.get_source_text(cx) { span_lint_and_then( @@ -90,10 +93,10 @@ impl<'tcx> LateLintPass<'tcx> for BorrowDerefRef { // has deref trait -> give 2 help // doesn't have deref trait -> give 1 help - if let Some(deref_trait_id) = cx.tcx.lang_items().deref_trait() { - if !implements_trait(cx, *inner_ty, deref_trait_id, &[]) { - return; - } + if let Some(deref_trait_id) = cx.tcx.lang_items().deref_trait() + && !implements_trait(cx, *inner_ty, deref_trait_id, &[]) + { + return; } diag.span_suggestion( diff --git a/src/tools/clippy/clippy_lints/src/casts/borrow_as_ptr.rs b/src/tools/clippy/clippy_lints/src/casts/borrow_as_ptr.rs index 64345c81a2482..ad0a4f8cdf35a 100644 --- a/src/tools/clippy/clippy_lints/src/casts/borrow_as_ptr.rs +++ b/src/tools/clippy/clippy_lints/src/casts/borrow_as_ptr.rs @@ -1,11 +1,12 @@ -use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then}; use clippy_utils::msrvs::Msrv; use clippy_utils::source::{snippet_with_applicability, snippet_with_context}; use clippy_utils::sugg::has_enclosing_paren; -use clippy_utils::{is_expr_temporary_value, is_lint_allowed, msrvs, std_or_core}; +use clippy_utils::{get_parent_expr, is_expr_temporary_value, is_lint_allowed, msrvs, std_or_core}; use rustc_errors::Applicability; use rustc_hir::{BorrowKind, Expr, ExprKind, Mutability, Ty, TyKind}; use rustc_lint::LateContext; +use rustc_middle::ty::adjustment::{Adjust, AutoBorrow}; use rustc_span::BytePos; use super::BORROW_AS_PTR; @@ -29,10 +30,6 @@ pub(super) fn check<'tcx>( } let (suggestion, span) = if msrv.meets(cx, msrvs::RAW_REF_OP) { - let operator_kind = match mutability { - Mutability::Not => "const", - Mutability::Mut => "mut", - }; // Make sure that the span to be replaced doesn't include parentheses, that could break the // suggestion. let span = if has_enclosing_paren(snippet_with_applicability(cx, expr.span, "", &mut app)) { @@ -42,7 +39,7 @@ pub(super) fn check<'tcx>( } else { expr.span }; - (format!("&raw {operator_kind} {snip}"), span) + (format!("&raw {} {snip}", mutability.ptr_str()), span) } else { let Some(std_or_core) = std_or_core(cx) else { return false; @@ -59,3 +56,25 @@ pub(super) fn check<'tcx>( } false } + +/// Check for an implicit cast from reference to raw pointer outside an explicit `as`. +pub(super) fn check_implicit_cast(cx: &LateContext<'_>, expr: &Expr<'_>) { + if !expr.span.from_expansion() + && let ExprKind::AddrOf(BorrowKind::Ref, _, pointee) = expr.kind + && !matches!(get_parent_expr(cx, expr).map(|e| e.kind), Some(ExprKind::Cast(..))) + && let [deref, borrow] = cx.typeck_results().expr_adjustments(expr) + && matches!(deref.kind, Adjust::Deref(..)) + && let Adjust::Borrow(AutoBorrow::RawPtr(mutability)) = borrow.kind + // Do not suggest taking a raw pointer to a temporary value + && !is_expr_temporary_value(cx, pointee) + { + span_lint_and_then(cx, BORROW_AS_PTR, expr.span, "implicit borrow as raw pointer", |diag| { + diag.span_suggestion_verbose( + expr.span.until(pointee.span), + "use a raw pointer instead", + format!("&raw {} ", mutability.ptr_str()), + Applicability::MachineApplicable, + ); + }); + } +} diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_abs_to_unsigned.rs b/src/tools/clippy/clippy_lints/src/casts/cast_abs_to_unsigned.rs index 8b3529e84fc6e..ba31a51f738a6 100644 --- a/src/tools/clippy/clippy_lints/src/casts/cast_abs_to_unsigned.rs +++ b/src/tools/clippy/clippy_lints/src/casts/cast_abs_to_unsigned.rs @@ -1,6 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::sugg::Sugg; +use clippy_utils::sym; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::LateContext; @@ -19,7 +20,7 @@ pub(super) fn check( if let ty::Int(from) = cast_from.kind() && let ty::Uint(to) = cast_to.kind() && let ExprKind::MethodCall(method_path, receiver, [], _) = cast_expr.kind - && method_path.ident.name.as_str() == "abs" + && method_path.ident.name == sym::abs && msrv.meets(cx, msrvs::UNSIGNED_ABS) { let span = if from.bit_width() == to.bit_width() { @@ -36,7 +37,7 @@ pub(super) fn check( span, format!("casting the result of `{cast_from}::abs()` to {cast_to}"), "replace with", - format!("{}.unsigned_abs()", Sugg::hir(cx, receiver, "..").maybe_par()), + format!("{}.unsigned_abs()", Sugg::hir(cx, receiver, "..").maybe_paren()), Applicability::MachineApplicable, ); } diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_lossless.rs b/src/tools/clippy/clippy_lints/src/casts/cast_lossless.rs index 3ae43732dc03f..0f066fae11844 100644 --- a/src/tools/clippy/clippy_lints/src/casts/cast_lossless.rs +++ b/src/tools/clippy/clippy_lints/src/casts/cast_lossless.rs @@ -42,7 +42,7 @@ pub(super) fn check( diag.span_suggestion_verbose( expr.span, "use `Into::into` instead", - format!("{}.into()", from_sugg.maybe_par()), + format!("{}.into()", from_sugg.maybe_paren()), applicability, ); }, diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs b/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs index ca973f4bb1aae..e92879b853d7b 100644 --- a/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs +++ b/src/tools/clippy/clippy_lints/src/casts/cast_possible_truncation.rs @@ -1,9 +1,9 @@ use clippy_utils::consts::{ConstEvalCtxt, Constant}; use clippy_utils::diagnostics::{span_lint, span_lint_and_then}; -use clippy_utils::expr_or_init; use clippy_utils::source::snippet; use clippy_utils::sugg::Sugg; use clippy_utils::ty::{get_discriminant_value, is_isize_or_usize}; +use clippy_utils::{expr_or_init, sym}; use rustc_abi::IntegerType; use rustc_errors::{Applicability, Diag}; use rustc_hir::def::{DefKind, Res}; @@ -64,16 +64,16 @@ fn apply_reductions(cx: &LateContext<'_>, nbits: u64, expr: &Expr<'_>, signed: b apply_reductions(cx, nbits, left, signed).min(max_bits.unwrap_or(u64::MAX)) }, ExprKind::MethodCall(method, _, [lo, hi], _) => { - if method.ident.as_str() == "clamp" { + if method.ident.as_str() == "clamp" //FIXME: make this a diagnostic item - if let (Some(lo_bits), Some(hi_bits)) = (get_constant_bits(cx, lo), get_constant_bits(cx, hi)) { - return lo_bits.max(hi_bits); - } + && let (Some(lo_bits), Some(hi_bits)) = (get_constant_bits(cx, lo), get_constant_bits(cx, hi)) + { + return lo_bits.max(hi_bits); } nbits }, ExprKind::MethodCall(method, _value, [], _) => { - if method.ident.name.as_str() == "signum" { + if method.ident.name == sym::signum { 0 // do not lint if cast comes from a `signum` function } else { nbits @@ -185,7 +185,7 @@ fn offer_suggestion( ) { let cast_to_snip = snippet(cx, cast_to_span, ".."); let suggestion = if cast_to_snip == "_" { - format!("{}.try_into()", Sugg::hir(cx, cast_expr, "..").maybe_par()) + format!("{}.try_into()", Sugg::hir(cx, cast_expr, "..").maybe_paren()) } else { format!("{cast_to_snip}::try_from({})", Sugg::hir(cx, cast_expr, "..")) }; diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs b/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs index 57a135abc2e2b..01020f3eee21e 100644 --- a/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs +++ b/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs @@ -1,11 +1,10 @@ use clippy_utils::diagnostics::span_lint; use clippy_utils::ty::is_c_void; -use clippy_utils::{get_parent_expr, is_hir_ty_cfg_dependant}; +use clippy_utils::{get_parent_expr, is_hir_ty_cfg_dependant, sym}; use rustc_hir::{Expr, ExprKind, GenericArg}; use rustc_lint::LateContext; use rustc_middle::ty::layout::LayoutOf; use rustc_middle::ty::{self, Ty}; -use rustc_span::sym; use super::CAST_PTR_ALIGNMENT; @@ -19,16 +18,15 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>) { cx.typeck_results().expr_ty(expr), ); lint_cast_ptr_alignment(cx, expr, cast_from, cast_to); - } else if let ExprKind::MethodCall(method_path, self_arg, [], _) = &expr.kind { - if method_path.ident.name.as_str() == "cast" - && let Some(generic_args) = method_path.args - && let [GenericArg::Type(cast_to)] = generic_args.args - // There probably is no obvious reason to do this, just to be consistent with `as` cases. - && !is_hir_ty_cfg_dependant(cx, cast_to.as_unambig_ty()) - { - let (cast_from, cast_to) = (cx.typeck_results().expr_ty(self_arg), cx.typeck_results().expr_ty(expr)); - lint_cast_ptr_alignment(cx, expr, cast_from, cast_to); - } + } else if let ExprKind::MethodCall(method_path, self_arg, [], _) = &expr.kind + && method_path.ident.name == sym::cast + && let Some(generic_args) = method_path.args + && let [GenericArg::Type(cast_to)] = generic_args.args + // There probably is no obvious reason to do this, just to be consistent with `as` cases. + && !is_hir_ty_cfg_dependant(cx, cast_to.as_unambig_ty()) + { + let (cast_from, cast_to) = (cx.typeck_results().expr_ty(self_arg), cx.typeck_results().expr_ty(expr)); + lint_cast_ptr_alignment(cx, expr, cast_from, cast_to); } } diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs b/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs index c48f253606dcc..a5b295c88b1c7 100644 --- a/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs +++ b/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs @@ -21,42 +21,41 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>, msrv: Msrv) start_ty, end_ty, }) = expr_cast_chain_tys(cx, expr) + && let (Ok(from_layout), Ok(to_layout)) = (cx.layout_of(start_ty.ty), cx.layout_of(end_ty.ty)) { - if let (Ok(from_layout), Ok(to_layout)) = (cx.layout_of(start_ty.ty), cx.layout_of(end_ty.ty)) { - let from_size = from_layout.size.bytes(); - let to_size = to_layout.size.bytes(); - if from_size != to_size && from_size != 0 && to_size != 0 && msrv.meets(cx, msrvs::PTR_SLICE_RAW_PARTS) { - span_lint_and_then( - cx, - CAST_SLICE_DIFFERENT_SIZES, - expr.span, - format!( - "casting between raw pointers to `[{}]` (element size {from_size}) and `[{}]` (element size {to_size}) does not adjust the count", - start_ty.ty, end_ty.ty, - ), - |diag| { - let ptr_snippet = source::snippet(cx, left_cast.span, ".."); + let from_size = from_layout.size.bytes(); + let to_size = to_layout.size.bytes(); + if from_size != to_size && from_size != 0 && to_size != 0 && msrv.meets(cx, msrvs::PTR_SLICE_RAW_PARTS) { + span_lint_and_then( + cx, + CAST_SLICE_DIFFERENT_SIZES, + expr.span, + format!( + "casting between raw pointers to `[{}]` (element size {from_size}) and `[{}]` (element size {to_size}) does not adjust the count", + start_ty.ty, end_ty.ty, + ), + |diag| { + let ptr_snippet = source::snippet(cx, left_cast.span, ".."); - let (mutbl_fn_str, mutbl_ptr_str) = match end_ty.mutbl { - Mutability::Mut => ("_mut", "mut"), - Mutability::Not => ("", "const"), - }; - let sugg = format!( - "core::ptr::slice_from_raw_parts{mutbl_fn_str}({ptr_snippet} as *{mutbl_ptr_str} {}, ..)", - // get just the ty from the TypeAndMut so that the printed type isn't something like `mut - // T`, extract just the `T` - end_ty.ty - ); + let (mutbl_fn_str, mutbl_ptr_str) = match end_ty.mutbl { + Mutability::Mut => ("_mut", "mut"), + Mutability::Not => ("", "const"), + }; + let sugg = format!( + "core::ptr::slice_from_raw_parts{mutbl_fn_str}({ptr_snippet} as *{mutbl_ptr_str} {}, ..)", + // get just the ty from the TypeAndMut so that the printed type isn't something like `mut + // T`, extract just the `T` + end_ty.ty + ); - diag.span_suggestion( - expr.span, - format!("replace with `ptr::slice_from_raw_parts{mutbl_fn_str}`"), - sugg, - rustc_errors::Applicability::HasPlaceholders, - ); - }, - ); - } + diag.span_suggestion( + expr.span, + format!("replace with `ptr::slice_from_raw_parts{mutbl_fn_str}`"), + sugg, + rustc_errors::Applicability::HasPlaceholders, + ); + }, + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/casts/manual_dangling_ptr.rs b/src/tools/clippy/clippy_lints/src/casts/manual_dangling_ptr.rs new file mode 100644 index 0000000000000..8ace27eca895e --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/casts/manual_dangling_ptr.rs @@ -0,0 +1,82 @@ +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::source::SpanRangeExt; +use clippy_utils::ty::is_normalizable; +use clippy_utils::{expr_or_init, match_def_path, path_def_id, paths, std_or_core}; +use rustc_ast::LitKind; +use rustc_errors::Applicability; +use rustc_hir::{Expr, ExprKind, GenericArg, Mutability, QPath, Ty, TyKind}; +use rustc_lint::LateContext; +use rustc_span::source_map::Spanned; + +use super::MANUAL_DANGLING_PTR; + +pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, from: &Expr<'_>, to: &Ty<'_>) { + if let TyKind::Ptr(ref ptr_ty) = to.kind { + let init_expr = expr_or_init(cx, from); + if is_expr_const_aligned(cx, init_expr, ptr_ty.ty) + && let Some(std_or_core) = std_or_core(cx) + { + let sugg_fn = match ptr_ty.mutbl { + Mutability::Not => "ptr::dangling", + Mutability::Mut => "ptr::dangling_mut", + }; + + let sugg = if let TyKind::Infer(()) = ptr_ty.ty.kind { + format!("{std_or_core}::{sugg_fn}()") + } else if let Some(mut_ty_snip) = ptr_ty.ty.span.get_source_text(cx) { + format!("{std_or_core}::{sugg_fn}::<{mut_ty_snip}>()") + } else { + return; + }; + + span_lint_and_sugg( + cx, + MANUAL_DANGLING_PTR, + expr.span, + "manual creation of a dangling pointer", + "use", + sugg, + Applicability::MachineApplicable, + ); + } + } +} + +// Checks if the given expression is a call to `align_of` whose generic argument matches the target +// type, or a positive constant literal that matches the target type's alignment. +fn is_expr_const_aligned(cx: &LateContext<'_>, expr: &Expr<'_>, to: &Ty<'_>) -> bool { + match expr.kind { + ExprKind::Call(fun, _) => is_align_of_call(cx, fun, to), + ExprKind::Lit(lit) => is_literal_aligned(cx, lit, to), + _ => false, + } +} + +fn is_align_of_call(cx: &LateContext<'_>, fun: &Expr<'_>, to: &Ty<'_>) -> bool { + if let ExprKind::Path(QPath::Resolved(_, path)) = fun.kind + && let Some(fun_id) = path_def_id(cx, fun) + && match_def_path(cx, fun_id, &paths::ALIGN_OF) + && let Some(args) = path.segments.last().and_then(|seg| seg.args) + && let [GenericArg::Type(generic_ty)] = args.args + { + let typeck = cx.typeck_results(); + return typeck.node_type(generic_ty.hir_id) == typeck.node_type(to.hir_id); + } + false +} + +fn is_literal_aligned(cx: &LateContext<'_>, lit: &Spanned, to: &Ty<'_>) -> bool { + let LitKind::Int(val, _) = lit.node else { return false }; + if val == 0 { + return false; + } + let to_mid_ty = cx.typeck_results().node_type(to.hir_id); + is_normalizable(cx, cx.param_env, to_mid_ty) + && cx + .tcx + .layout_of(cx.typing_env().as_query_input(to_mid_ty)) + .is_ok_and(|layout| { + let align = u128::from(layout.align.abi.bytes()); + u128::from(val) <= align + }) +} diff --git a/src/tools/clippy/clippy_lints/src/casts/mod.rs b/src/tools/clippy/clippy_lints/src/casts/mod.rs index dc2a1fa85bf5c..76931fce209e5 100644 --- a/src/tools/clippy/clippy_lints/src/casts/mod.rs +++ b/src/tools/clippy/clippy_lints/src/casts/mod.rs @@ -17,6 +17,7 @@ mod char_lit_as_u8; mod fn_to_numeric_cast; mod fn_to_numeric_cast_any; mod fn_to_numeric_cast_with_truncation; +mod manual_dangling_ptr; mod ptr_as_ptr; mod ptr_cast_constness; mod ref_as_ptr; @@ -71,7 +72,7 @@ declare_clippy_lint! { /// ### Example /// ```no_run /// let y: i8 = -1; - /// y as u128; // will return 18446744073709551615 + /// y as u64; // will return 18446744073709551615 /// ``` #[clippy::version = "pre 1.29.0"] pub CAST_SIGN_LOSS, @@ -759,6 +760,32 @@ declare_clippy_lint! { "detects `as *mut _` and `as *const _` conversion" } +declare_clippy_lint! { + /// ### What it does + /// Checks for casts of small constant literals or `mem::align_of` results to raw pointers. + /// + /// ### Why is this bad? + /// This creates a dangling pointer and is better expressed as + /// {`std`, `core`}`::ptr::`{`dangling`, `dangling_mut`}. + /// + /// ### Example + /// ```no_run + /// let ptr = 4 as *const u32; + /// let aligned = std::mem::align_of::() as *const u32; + /// let mut_ptr: *mut i64 = 8 as *mut _; + /// ``` + /// Use instead: + /// ```no_run + /// let ptr = std::ptr::dangling::(); + /// let aligned = std::ptr::dangling::(); + /// let mut_ptr: *mut i64 = std::ptr::dangling_mut(); + /// ``` + #[clippy::version = "1.87.0"] + pub MANUAL_DANGLING_PTR, + style, + "casting small constant literals to pointers to create dangling pointers" +} + pub struct Casts { msrv: Msrv, } @@ -795,6 +822,7 @@ impl_lint_pass!(Casts => [ ZERO_PTR, REF_AS_PTR, AS_POINTER_UNDERSCORE, + MANUAL_DANGLING_PTR, ]); impl<'tcx> LateLintPass<'tcx> for Casts { @@ -823,6 +851,10 @@ impl<'tcx> LateLintPass<'tcx> for Casts { fn_to_numeric_cast_with_truncation::check(cx, expr, cast_from_expr, cast_from, cast_to); zero_ptr::check(cx, expr, cast_from_expr, cast_to_hir); + if self.msrv.meets(cx, msrvs::MANUAL_DANGLING_PTR) { + manual_dangling_ptr::check(cx, expr, cast_from_expr, cast_to_hir); + } + if cast_to.is_numeric() { cast_possible_truncation::check(cx, expr, cast_from_expr, cast_from, cast_to, cast_to_hir.span); if cast_from.is_numeric() { @@ -846,6 +878,9 @@ impl<'tcx> LateLintPass<'tcx> for Casts { } } + if self.msrv.meets(cx, msrvs::RAW_REF_OP) { + borrow_as_ptr::check_implicit_cast(cx, expr); + } cast_ptr_alignment::check(cx, expr); char_lit_as_u8::check(cx, expr); ptr_as_ptr::check(cx, expr, self.msrv); diff --git a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs index d57e391b55d54..6f944914b8fd6 100644 --- a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs +++ b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs @@ -81,7 +81,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, msrv: Msrv) { ( "try `pointer::cast`, a safer alternative", - format!("{}.cast{turbofish}()", cast_expr_sugg.maybe_par()), + format!("{}.cast{turbofish}()", cast_expr_sugg.maybe_paren()), ) }; diff --git a/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs b/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs index cad9c1df273f0..2471c7355518b 100644 --- a/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs +++ b/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs @@ -53,7 +53,8 @@ pub(super) fn check<'tcx>( } if msrv.meets(cx, msrvs::POINTER_CAST_CONSTNESS) { - let sugg = Sugg::hir(cx, cast_expr, "_"); + let mut app = Applicability::MachineApplicable; + let sugg = Sugg::hir_with_context(cx, cast_expr, expr.span.ctxt(), "_", &mut app); let constness = match *to_mutbl { Mutability::Not => "const", Mutability::Mut => "mut", @@ -65,8 +66,8 @@ pub(super) fn check<'tcx>( expr.span, "`as` casting between raw pointers while changing only its constness", format!("try `pointer::cast_{constness}`, a safer alternative"), - format!("{}.cast_{constness}()", sugg.maybe_par()), - Applicability::MachineApplicable, + format!("{}.cast_{constness}()", sugg.maybe_paren()), + app, ); } } diff --git a/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs b/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs index 7885f171461d7..8e8c55cf38329 100644 --- a/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs +++ b/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs @@ -8,7 +8,9 @@ use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{Expr, ExprKind, Lit, Node, Path, QPath, TyKind, UnOp}; use rustc_lint::{LateContext, LintContext}; +use rustc_middle::ty::adjustment::Adjust; use rustc_middle::ty::{self, FloatTy, InferTy, Ty}; +use rustc_span::{Symbol, sym}; use std::ops::ControlFlow; use super::UNNECESSARY_CAST; @@ -130,11 +132,11 @@ pub(super) fn check<'tcx>( | LitKind::Float(_, LitFloatType::Suffixed(_)) if cast_from.kind() == cast_to.kind() => { - if let Some(src) = cast_expr.span.get_source_text(cx) { - if let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit.node) { - lint_unnecessary_cast(cx, expr, num_lit.integer, cast_from, cast_to); - return true; - } + if let Some(src) = cast_expr.span.get_source_text(cx) + && let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit.node) + { + lint_unnecessary_cast(cx, expr, num_lit.integer, cast_from, cast_to); + return true; } }, _ => {}, @@ -142,23 +144,50 @@ pub(super) fn check<'tcx>( } if cast_from.kind() == cast_to.kind() && !expr.span.in_external_macro(cx.sess().source_map()) { + enum MaybeParenOrBlock { + Paren, + Block, + Nothing, + } + + fn is_borrow_expr(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { + matches!(expr.kind, ExprKind::AddrOf(..)) + || cx + .typeck_results() + .expr_adjustments(expr) + .first() + .is_some_and(|adj| matches!(adj.kind, Adjust::Borrow(_))) + } + + fn is_in_allowed_macro(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { + const ALLOWED_MACROS: &[Symbol] = &[ + sym::format_args_macro, + sym::assert_eq_macro, + sym::debug_assert_eq_macro, + sym::assert_ne_macro, + sym::debug_assert_ne_macro, + ]; + matches!(expr.span.ctxt().outer_expn_data().macro_def_id, Some(def_id) if + cx.tcx.get_diagnostic_name(def_id).is_some_and(|sym| ALLOWED_MACROS.contains(&sym))) + } + if let Some(id) = path_to_local(cast_expr) - && !cx.tcx.hir().span(id).eq_ctxt(cast_expr.span) + && !cx.tcx.hir_span(id).eq_ctxt(cast_expr.span) { // Binding context is different than the identifiers context. // Weird macro wizardry could be involved here. return false; } - // If the whole cast expression is a unary expression (`(*x as T)`) or an addressof - // expression (`(&x as T)`), then not surrounding the suggestion into a block risks us - // changing the precedence of operators if the cast expression is followed by an operation - // with higher precedence than the unary operator (`(*x as T).foo()` would become - // `*x.foo()`, which changes what the `*` applies on). - // The same is true if the expression encompassing the cast expression is a unary - // expression or an addressof expression. - let needs_block = matches!(cast_expr.kind, ExprKind::Unary(..) | ExprKind::AddrOf(..)) - || get_parent_expr(cx, expr).is_some_and(|e| matches!(e.kind, ExprKind::Unary(..) | ExprKind::AddrOf(..))); + // Changing `&(x as i32)` to `&x` would change the meaning of the code because the previous creates + // a reference to the temporary while the latter creates a reference to the original value. + let surrounding = match cx.tcx.parent_hir_node(expr.hir_id) { + Node::Expr(parent) if is_borrow_expr(cx, parent) && !is_in_allowed_macro(cx, parent) => { + MaybeParenOrBlock::Block + }, + Node::Expr(parent) if cast_expr.precedence() < parent.precedence() => MaybeParenOrBlock::Paren, + _ => MaybeParenOrBlock::Nothing, + }; span_lint_and_sugg( cx, @@ -166,10 +195,10 @@ pub(super) fn check<'tcx>( expr.span, format!("casting to the same type is unnecessary (`{cast_from}` -> `{cast_to}`)"), "try", - if needs_block { - format!("{{ {cast_str} }}") - } else { - cast_str + match surrounding { + MaybeParenOrBlock::Paren => format!("({cast_str})"), + MaybeParenOrBlock::Block => format!("{{ {cast_str} }}"), + MaybeParenOrBlock::Nothing => cast_str, }, Applicability::MachineApplicable, ); diff --git a/src/tools/clippy/clippy_lints/src/checked_conversions.rs b/src/tools/clippy/clippy_lints/src/checked_conversions.rs index b36c8662289ca..8ada608049c7b 100644 --- a/src/tools/clippy/clippy_lints/src/checked_conversions.rs +++ b/src/tools/clippy/clippy_lints/src/checked_conversions.rs @@ -253,11 +253,11 @@ fn get_types_from_cast<'a>( match limit.kind { // `from_type::from(_)` ExprKind::Call(path, _) => { - if let ExprKind::Path(ref path) = path.kind { + if let ExprKind::Path(ref path) = path.kind // `to_type` - if let Some(to_type) = get_implementing_type(path, types, func) { - return Some((from_type, to_type)); - } + && let Some(to_type) = get_implementing_type(path, types, func) + { + return Some((from_type, to_type)); } }, // `to_type::MAX` diff --git a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs index a1ff20dee721f..1d44c7e9c88b0 100644 --- a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs +++ b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs @@ -62,6 +62,7 @@ impl CognitiveComplexity { let mut cc = 1u64; let mut returns = 0u64; + let mut prev_expr: Option<&ExprKind<'tcx>> = None; let _: Option = for_each_expr_without_closures(expr, |e| { match e.kind { ExprKind::If(_, _, _) => { @@ -73,9 +74,14 @@ impl CognitiveComplexity { } cc += arms.iter().filter(|arm| arm.guard.is_some()).count() as u64; }, - ExprKind::Ret(_) => returns += 1, + ExprKind::Ret(_) => { + if !matches!(prev_expr, Some(ExprKind::Ret(_))) { + returns += 1; + } + }, _ => {}, } + prev_expr = Some(&e.kind); ControlFlow::Continue(()) }); diff --git a/src/tools/clippy/clippy_lints/src/collapsible_if.rs b/src/tools/clippy/clippy_lints/src/collapsible_if.rs index e73bfc6ebf7a1..20fae8a6775b9 100644 --- a/src/tools/clippy/clippy_lints/src/collapsible_if.rs +++ b/src/tools/clippy/clippy_lints/src/collapsible_if.rs @@ -1,10 +1,12 @@ +use clippy_config::Conf; use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then}; -use clippy_utils::source::{snippet, snippet_block, snippet_block_with_applicability}; -use clippy_utils::sugg::Sugg; -use rustc_ast::ast; +use clippy_utils::source::{IntoSpan as _, SpanRangeExt, snippet, snippet_block, snippet_block_with_applicability}; +use rustc_ast::BinOpKind; use rustc_errors::Applicability; -use rustc_lint::{EarlyContext, EarlyLintPass}; -use rustc_session::declare_lint_pass; +use rustc_hir::{Block, Expr, ExprKind, StmtKind}; +use rustc_lint::{LateContext, LateLintPass}; +use rustc_middle::ty::TyCtxt; +use rustc_session::impl_lint_pass; use rustc_span::Span; declare_clippy_lint! { @@ -75,105 +77,152 @@ declare_clippy_lint! { "nested `else`-`if` expressions that can be collapsed (e.g., `else { if x { ... } }`)" } -declare_lint_pass!(CollapsibleIf => [COLLAPSIBLE_IF, COLLAPSIBLE_ELSE_IF]); +pub struct CollapsibleIf { + let_chains_enabled: bool, + lint_commented_code: bool, +} + +impl CollapsibleIf { + pub fn new(tcx: TyCtxt<'_>, conf: &'static Conf) -> Self { + Self { + let_chains_enabled: tcx.features().let_chains(), + lint_commented_code: conf.lint_commented_code, + } + } -impl EarlyLintPass for CollapsibleIf { - fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) { - if let ast::ExprKind::If(cond, then, else_) = &expr.kind + fn check_collapsible_else_if(cx: &LateContext<'_>, then_span: Span, else_block: &Block<'_>) { + if !block_starts_with_comment(cx, else_block) + && let Some(else_) = expr_block(else_block) + && cx.tcx.hir_attrs(else_.hir_id).is_empty() + && !else_.span.from_expansion() + && let ExprKind::If(..) = else_.kind + { + // Prevent "elseif" + // Check that the "else" is followed by whitespace + let up_to_else = then_span.between(else_block.span); + let requires_space = if let Some(c) = snippet(cx, up_to_else, "..").chars().last() { + !c.is_whitespace() + } else { + false + }; + + let mut applicability = Applicability::MachineApplicable; + span_lint_and_sugg( + cx, + COLLAPSIBLE_ELSE_IF, + else_block.span, + "this `else { if .. }` block can be collapsed", + "collapse nested if block", + format!( + "{}{}", + if requires_space { " " } else { "" }, + snippet_block_with_applicability(cx, else_.span, "..", Some(else_block.span), &mut applicability) + ), + applicability, + ); + } + } + + fn check_collapsible_if_if(&self, cx: &LateContext<'_>, expr: &Expr<'_>, check: &Expr<'_>, then: &Block<'_>) { + if let Some(inner) = expr_block(then) + && cx.tcx.hir_attrs(inner.hir_id).is_empty() + && let ExprKind::If(check_inner, _, None) = &inner.kind + && self.eligible_condition(check_inner) + && let ctxt = expr.span.ctxt() + && inner.span.ctxt() == ctxt + && (self.lint_commented_code || !block_starts_with_comment(cx, then)) + { + span_lint_and_then( + cx, + COLLAPSIBLE_IF, + expr.span, + "this `if` statement can be collapsed", + |diag| { + let then_open_bracket = then.span.split_at(1).0.with_leading_whitespace(cx).into_span(); + let then_closing_bracket = { + let end = then.span.shrink_to_hi(); + end.with_lo(end.lo() - rustc_span::BytePos(1)) + .with_leading_whitespace(cx) + .into_span() + }; + let inner_if = inner.span.split_at(2).0; + let mut sugg = vec![ + // Remove the outer then block `{` + (then_open_bracket, String::new()), + // Remove the outer then block '}' + (then_closing_bracket, String::new()), + // Replace inner `if` by `&&` + (inner_if, String::from("&&")), + ]; + sugg.extend(parens_around(check)); + sugg.extend(parens_around(check_inner)); + + diag.multipart_suggestion("collapse nested if block", sugg, Applicability::MachineApplicable); + }, + ); + } + } + + pub fn eligible_condition(&self, cond: &Expr<'_>) -> bool { + self.let_chains_enabled || !matches!(cond.kind, ExprKind::Let(..)) + } +} + +impl_lint_pass!(CollapsibleIf => [COLLAPSIBLE_IF, COLLAPSIBLE_ELSE_IF]); + +impl LateLintPass<'_> for CollapsibleIf { + fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { + if let ExprKind::If(cond, then, else_) = &expr.kind && !expr.span.from_expansion() { - if let Some(else_) = else_ { - check_collapsible_maybe_if_let(cx, then.span, else_); - } else if !matches!(cond.kind, ast::ExprKind::Let(..)) { - check_collapsible_no_if_let(cx, expr, cond, then); + if let Some(else_) = else_ + && let ExprKind::Block(else_, None) = else_.kind + { + Self::check_collapsible_else_if(cx, then.span, else_); + } else if else_.is_none() + && self.eligible_condition(cond) + && let ExprKind::Block(then, None) = then.kind + { + self.check_collapsible_if_if(cx, expr, cond, then); } } } } -fn block_starts_with_comment(cx: &EarlyContext<'_>, expr: &ast::Block) -> bool { +fn block_starts_with_comment(cx: &LateContext<'_>, block: &Block<'_>) -> bool { // We trim all opening braces and whitespaces and then check if the next string is a comment. - let trimmed_block_text = snippet_block(cx, expr.span, "..", None) + let trimmed_block_text = snippet_block(cx, block.span, "..", None) .trim_start_matches(|c: char| c.is_whitespace() || c == '{') .to_owned(); trimmed_block_text.starts_with("//") || trimmed_block_text.starts_with("/*") } -fn check_collapsible_maybe_if_let(cx: &EarlyContext<'_>, then_span: Span, else_: &ast::Expr) { - if let ast::ExprKind::Block(ref block, _) = else_.kind - && !block_starts_with_comment(cx, block) - && let Some(else_) = expr_block(block) - && else_.attrs.is_empty() - && !else_.span.from_expansion() - && let ast::ExprKind::If(..) = else_.kind - { - // Prevent "elseif" - // Check that the "else" is followed by whitespace - let up_to_else = then_span.between(block.span); - let requires_space = if let Some(c) = snippet(cx, up_to_else, "..").chars().last() { - !c.is_whitespace() - } else { - false - }; - - let mut applicability = Applicability::MachineApplicable; - span_lint_and_sugg( - cx, - COLLAPSIBLE_ELSE_IF, - block.span, - "this `else { if .. }` block can be collapsed", - "collapse nested if block", - format!( - "{}{}", - if requires_space { " " } else { "" }, - snippet_block_with_applicability(cx, else_.span, "..", Some(block.span), &mut applicability) - ), - applicability, - ); - } -} - -fn check_collapsible_no_if_let(cx: &EarlyContext<'_>, expr: &ast::Expr, check: &ast::Expr, then: &ast::Block) { - if !block_starts_with_comment(cx, then) - && let Some(inner) = expr_block(then) - && inner.attrs.is_empty() - && let ast::ExprKind::If(ref check_inner, ref content, None) = inner.kind - // Prevent triggering on `if c { if let a = b { .. } }`. - && !matches!(check_inner.kind, ast::ExprKind::Let(..)) - && let ctxt = expr.span.ctxt() - && inner.span.ctxt() == ctxt - { - span_lint_and_then( - cx, - COLLAPSIBLE_IF, - expr.span, - "this `if` statement can be collapsed", - |diag| { - let mut app = Applicability::MachineApplicable; - let lhs = Sugg::ast(cx, check, "..", ctxt, &mut app); - let rhs = Sugg::ast(cx, check_inner, "..", ctxt, &mut app); - diag.span_suggestion( - expr.span, - "collapse nested if block", - format!( - "if {} {}", - lhs.and(&rhs), - snippet_block(cx, content.span, "..", Some(expr.span)), - ), - app, // snippet - ); - }, - ); +/// If `block` is a block with either one expression or a statement containing an expression, +/// return the expression. We don't peel blocks recursively, as extra blocks might be intentional. +fn expr_block<'tcx>(block: &Block<'tcx>) -> Option<&'tcx Expr<'tcx>> { + match block.stmts { + [] => block.expr, + [stmt] => { + if let StmtKind::Semi(expr) = stmt.kind { + Some(expr) + } else { + None + } + }, + _ => None, } } -/// If the block contains only one expression, return it. -fn expr_block(block: &ast::Block) -> Option<&ast::Expr> { - if let [stmt] = &*block.stmts - && let ast::StmtKind::Expr(expr) | ast::StmtKind::Semi(expr) = &stmt.kind +/// If the expression is a `||`, suggest parentheses around it. +fn parens_around(expr: &Expr<'_>) -> Vec<(Span, String)> { + if let ExprKind::Binary(op, _, _) = expr.peel_drop_temps().kind + && op.node == BinOpKind::Or { - Some(expr) + vec![ + (expr.span.shrink_to_lo(), String::from("(")), + (expr.span.shrink_to_hi(), String::from(")")), + ] } else { - None + vec![] } } diff --git a/src/tools/clippy/clippy_lints/src/comparison_chain.rs b/src/tools/clippy/clippy_lints/src/comparison_chain.rs index 0e7f01e44b049..9c3009a86cdc0 100644 --- a/src/tools/clippy/clippy_lints/src/comparison_chain.rs +++ b/src/tools/clippy/clippy_lints/src/comparison_chain.rs @@ -125,7 +125,7 @@ impl<'tcx> LateLintPass<'tcx> for ComparisonChain { let ExprKind::Binary(_, lhs, rhs) = conds[0].kind else { unreachable!(); }; - let lhs = Sugg::hir(cx, lhs, "..").maybe_par(); + let lhs = Sugg::hir(cx, lhs, "..").maybe_paren(); let rhs = Sugg::hir(cx, rhs, "..").addr(); span_lint_and_sugg( cx, diff --git a/src/tools/clippy/clippy_lints/src/copies.rs b/src/tools/clippy/clippy_lints/src/copies.rs index 03ed9c657b30a..42fbe6438d4ea 100644 --- a/src/tools/clippy/clippy_lints/src/copies.rs +++ b/src/tools/clippy/clippy_lints/src/copies.rs @@ -256,7 +256,7 @@ fn lint_branches_sharing_code<'tcx>( let suggestion = reindent_multiline(&suggestion, true, indent); let span = span.with_hi(last_block.span.hi()); - // Improve formatting if the inner block has indention (i.e. normal Rust formatting) + // Improve formatting if the inner block has indentation (i.e. normal Rust formatting) let span = span .map_range(cx, |src, range| { (range.start > 4 && src.get(range.start - 4..range.start)? == " ") @@ -539,10 +539,10 @@ fn check_for_warn_of_moved_symbol(cx: &LateContext<'_>, symbols: &[(HirId, Symbo .filter(|stmt| !ignore_span.overlaps(stmt.span)) .try_for_each(|stmt| intravisit::walk_stmt(&mut walker, stmt)); - if let Some(expr) = block.expr { - if res.is_continue() { - res = intravisit::walk_expr(&mut walker, expr); - } + if let Some(expr) = block.expr + && res.is_continue() + { + res = intravisit::walk_expr(&mut walker, expr); } res.is_break() diff --git a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs index 7d86bd3e540a1..19f62e8bf79c6 100644 --- a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs +++ b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs @@ -5,8 +5,8 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass}; use rustc_session::declare_lint_pass; -use rustc_span::Span; use rustc_span::symbol::sym; +use rustc_span::{Span, kw}; declare_clippy_lint! { /// ### What it does @@ -53,7 +53,7 @@ declare_lint_pass!(CrateInMacroDef => [CRATE_IN_MACRO_DEF]); impl EarlyLintPass for CrateInMacroDef { fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) { - if let ItemKind::MacroDef(macro_def) = &item.kind + if let ItemKind::MacroDef(_, macro_def) = &item.kind && item.attrs.iter().any(is_macro_export) && let Some(span) = contains_unhygienic_crate_reference(¯o_def.body.tokens) { @@ -105,12 +105,11 @@ fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option { fn is_crate_keyword(tt: &TokenTree) -> Option { if let TokenTree::Token( Token { - kind: TokenKind::Ident(symbol, _), + kind: TokenKind::Ident(kw::Crate, _), span, }, _, ) = tt - && symbol.as_str() == "crate" { Some(*span) } else { diff --git a/src/tools/clippy/clippy_lints/src/declare_clippy_lint.rs b/src/tools/clippy/clippy_lints/src/declare_clippy_lint.rs index 4d908af4084f3..9f82f87672794 100644 --- a/src/tools/clippy/clippy_lints/src/declare_clippy_lint.rs +++ b/src/tools/clippy/clippy_lints/src/declare_clippy_lint.rs @@ -165,17 +165,4 @@ macro_rules! declare_clippy_lint { $(, $eval_always)? } }; - - ( - $(#[doc = $lit:literal])* - pub $lint_name:ident, - internal, - $desc:literal - ) => { - declare_clippy_lint! {@ - $(#[doc = $lit])* - pub $lint_name, Allow, crate::LintCategory::Internal, $desc, - None, "0.0.0" - } - }; } diff --git a/src/tools/clippy/clippy_lints/src/declared_lints.rs b/src/tools/clippy/clippy_lints/src/declared_lints.rs index 39e4516370709..2cccd6ba27027 100644 --- a/src/tools/clippy/clippy_lints/src/declared_lints.rs +++ b/src/tools/clippy/clippy_lints/src/declared_lints.rs @@ -3,36 +3,6 @@ // Manual edits will be overwritten. pub static LINTS: &[&crate::LintInfo] = &[ - #[cfg(feature = "internal")] - crate::utils::internal_lints::almost_standard_lint_formulation::ALMOST_STANDARD_LINT_FORMULATION_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::collapsible_calls::COLLAPSIBLE_SPAN_LINT_CALLS_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::interning_defined_symbol::INTERNING_DEFINED_SYMBOL_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::interning_defined_symbol::UNNECESSARY_SYMBOL_STR_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::invalid_paths::INVALID_PATHS_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::lint_without_lint_pass::DEFAULT_LINT_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::lint_without_lint_pass::INVALID_CLIPPY_VERSION_ATTRIBUTE_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::lint_without_lint_pass::LINT_WITHOUT_LINT_PASS_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::lint_without_lint_pass::MISSING_CLIPPY_VERSION_ATTRIBUTE_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::msrv_attr_impl::MISSING_MSRV_ATTR_IMPL_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::outer_expn_data_pass::OUTER_EXPN_EXPN_DATA_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::produce_ice::PRODUCE_ICE_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::slow_symbol_comparisons::SLOW_SYMBOL_COMPARISONS_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::unnecessary_def_path::UNNECESSARY_DEF_PATH_INFO, - #[cfg(feature = "internal")] - crate::utils::internal_lints::unsorted_clippy_utils_paths::UNSORTED_CLIPPY_UTILS_PATHS_INFO, crate::absolute_paths::ABSOLUTE_PATHS_INFO, crate::almost_complete_range::ALMOST_COMPLETE_RANGE_INFO, crate::approx_const::APPROX_CONSTANT_INFO, @@ -52,6 +22,7 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::attrs::DEPRECATED_CLIPPY_CFG_ATTR_INFO, crate::attrs::DEPRECATED_SEMVER_INFO, crate::attrs::DUPLICATED_ATTRIBUTES_INFO, + crate::attrs::IGNORE_WITHOUT_REASON_INFO, crate::attrs::INLINE_ALWAYS_INFO, crate::attrs::MIXED_ATTRIBUTES_STYLE_INFO, crate::attrs::NON_MINIMAL_CFG_INFO, @@ -96,6 +67,7 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::casts::FN_TO_NUMERIC_CAST_INFO, crate::casts::FN_TO_NUMERIC_CAST_ANY_INFO, crate::casts::FN_TO_NUMERIC_CAST_WITH_TRUNCATION_INFO, + crate::casts::MANUAL_DANGLING_PTR_INFO, crate::casts::PTR_AS_PTR_INFO, crate::casts::PTR_CAST_CONSTNESS_INFO, crate::casts::REF_AS_PTR_INFO, @@ -286,6 +258,7 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::literal_representation::UNREADABLE_LITERAL_INFO, crate::literal_representation::UNUSUAL_BYTE_GROUPINGS_INFO, crate::literal_string_with_formatting_args::LITERAL_STRING_WITH_FORMATTING_ARGS_INFO, + crate::loops::CHAR_INDICES_AS_BYTE_INDICES_INFO, crate::loops::EMPTY_LOOP_INFO, crate::loops::EXPLICIT_COUNTER_LOOP_INFO, crate::loops::EXPLICIT_INTO_ITER_LOOP_INFO, @@ -312,6 +285,7 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::macro_metavars_in_unsafe::MACRO_METAVARS_IN_UNSAFE_INFO, crate::macro_use::MACRO_USE_IMPORTS_INFO, crate::main_recursion::MAIN_RECURSION_INFO, + crate::manual_abs_diff::MANUAL_ABS_DIFF_INFO, crate::manual_assert::MANUAL_ASSERT_INFO, crate::manual_async_fn::MANUAL_ASYNC_FN_INFO, crate::manual_bits::MANUAL_BITS_INFO, @@ -334,7 +308,6 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::manual_slice_size_calculation::MANUAL_SLICE_SIZE_CALCULATION_INFO, crate::manual_string_new::MANUAL_STRING_NEW_INFO, crate::manual_strip::MANUAL_STRIP_INFO, - crate::manual_unwrap_or_default::MANUAL_UNWRAP_OR_DEFAULT_INFO, crate::map_unit_fn::OPTION_MAP_UNIT_FN_INFO, crate::map_unit_fn::RESULT_MAP_UNIT_FN_INFO, crate::match_result_ok::MATCH_RESULT_OK_INFO, @@ -344,10 +317,10 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::matches::MANUAL_MAP_INFO, crate::matches::MANUAL_OK_ERR_INFO, crate::matches::MANUAL_UNWRAP_OR_INFO, + crate::matches::MANUAL_UNWRAP_OR_DEFAULT_INFO, crate::matches::MATCH_AS_REF_INFO, crate::matches::MATCH_BOOL_INFO, crate::matches::MATCH_LIKE_MATCHES_MACRO_INFO, - crate::matches::MATCH_ON_VEC_ITEMS_INFO, crate::matches::MATCH_OVERLAPPING_ARM_INFO, crate::matches::MATCH_REF_PATS_INFO, crate::matches::MATCH_SAME_ARMS_INFO, @@ -488,6 +461,7 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::methods::SUSPICIOUS_OPEN_OPTIONS_INFO, crate::methods::SUSPICIOUS_SPLITN_INFO, crate::methods::SUSPICIOUS_TO_OWNED_INFO, + crate::methods::SWAP_WITH_TEMPORARY_INFO, crate::methods::TYPE_ID_ON_BOX_INFO, crate::methods::UNBUFFERED_BYTES_INFO, crate::methods::UNINIT_ASSUMED_INIT_INFO, @@ -664,6 +638,7 @@ pub static LINTS: &[&crate::LintInfo] = &[ crate::redundant_slicing::DEREF_BY_SLICING_INFO, crate::redundant_slicing::REDUNDANT_SLICING_INFO, crate::redundant_static_lifetimes::REDUNDANT_STATIC_LIFETIMES_INFO, + crate::redundant_test_prefix::REDUNDANT_TEST_PREFIX_INFO, crate::redundant_type_annotations::REDUNDANT_TYPE_ANNOTATIONS_INFO, crate::ref_option_ref::REF_OPTION_REF_INFO, crate::ref_patterns::REF_PATTERNS_INFO, diff --git a/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs b/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs index bbd5dc15542d1..f8a9037fc8047 100644 --- a/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs +++ b/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs @@ -1,5 +1,6 @@ -use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::is_ty_alias; +use clippy_utils::source::SpanRangeExt as _; use hir::ExprKind; use hir::def::Res; use rustc_errors::Applicability; @@ -70,15 +71,26 @@ impl LateLintPass<'_> for DefaultConstructedUnitStructs { && let var @ ty::VariantDef { ctor: Some((hir::def::CtorKind::Const, _)), .. } = def.non_enum_variant() && !var.is_field_list_non_exhaustive() && !expr.span.from_expansion() && !qpath.span().from_expansion() + // do not suggest replacing an expression by a type name with placeholders + && !base.is_suggestable_infer_ty() { - span_lint_and_sugg( + let mut removals = vec![(expr.span.with_lo(qpath.qself_span().hi()), String::new())]; + if expr.span.with_source_text(cx, |s| s.starts_with('<')) == Some(true) { + // Remove `<`, '>` has already been removed by the existing removal expression. + removals.push((expr.span.with_hi(qpath.qself_span().lo()), String::new())); + } + span_lint_and_then( cx, DEFAULT_CONSTRUCTED_UNIT_STRUCTS, - expr.span.with_lo(qpath.qself_span().hi()), + expr.span, "use of `default` to create a unit struct", - "remove this call to `default`", - String::new(), - Applicability::MachineApplicable, + |diag| { + diag.multipart_suggestion( + "remove this call to `default`", + removals, + Applicability::MachineApplicable, + ); + }, ); } } diff --git a/src/tools/clippy/clippy_lints/src/deprecated_lints.rs b/src/tools/clippy/clippy_lints/src/deprecated_lints.rs index de66ead4f4204..b60c11d79d48f 100644 --- a/src/tools/clippy/clippy_lints/src/deprecated_lints.rs +++ b/src/tools/clippy/clippy_lints/src/deprecated_lints.rs @@ -42,6 +42,8 @@ declare_with_version! { DEPRECATED(DEPRECATED_VERSION): &[(&str, &str)] = &[ ("clippy::wrong_pub_self_convention", "`clippy::wrong_self_convention` now covers this case via the `avoid-breaking-exported-api` config"), #[clippy::version = "1.86.0"] ("clippy::option_map_or_err_ok", "`clippy::manual_ok_or` covers this case"), + #[clippy::version = "1.86.0"] + ("clippy::match_on_vec_items", "`clippy::indexing_slicing` covers indexing and slicing on `Vec<_>`"), // end deprecated lints. used by `cargo dev deprecate_lint` ]} diff --git a/src/tools/clippy/clippy_lints/src/dereference.rs b/src/tools/clippy/clippy_lints/src/dereference.rs index 849c60b89b97e..5edb5c235703e 100644 --- a/src/tools/clippy/clippy_lints/src/dereference.rs +++ b/src/tools/clippy/clippy_lints/src/dereference.rs @@ -853,7 +853,7 @@ impl TyCoercionStability { continue; }, ty::Param(_) if for_return => Self::Deref, - ty::Alias(ty::Weak | ty::Inherent, _) => unreachable!("should have been normalized away above"), + ty::Alias(ty::Free | ty::Inherent, _) => unreachable!("should have been normalized away above"), ty::Alias(ty::Projection, _) if !for_return && ty.has_non_region_param() => Self::Reborrow, ty::Infer(_) | ty::Error(_) @@ -1133,61 +1133,60 @@ fn report<'tcx>( impl<'tcx> Dereferencing<'tcx> { fn check_local_usage(&mut self, cx: &LateContext<'tcx>, e: &Expr<'tcx>, local: HirId) { - if let Some(outer_pat) = self.ref_locals.get_mut(&local) { - if let Some(pat) = outer_pat { - // Check for auto-deref - if !matches!( - cx.typeck_results().expr_adjustments(e), - [ - Adjustment { - kind: Adjust::Deref(_), - .. - }, - Adjustment { - kind: Adjust::Deref(_), - .. - }, + if let Some(outer_pat) = self.ref_locals.get_mut(&local) + && let Some(pat) = outer_pat + // Check for auto-deref + && !matches!( + cx.typeck_results().expr_adjustments(e), + [ + Adjustment { + kind: Adjust::Deref(_), .. - ] - ) { - match get_parent_expr(cx, e) { - // Field accesses are the same no matter the number of references. - Some(Expr { - kind: ExprKind::Field(..), - .. - }) => (), - Some(&Expr { - span, - kind: ExprKind::Unary(UnOp::Deref, _), - .. - }) if !span.from_expansion() => { - // Remove explicit deref. - let snip = snippet_with_context(cx, e.span, span.ctxt(), "..", &mut pat.app).0; - pat.replacements.push((span, snip.into())); - }, - Some(parent) if !parent.span.from_expansion() => { - // Double reference might be needed at this point. - if parent.precedence() == ExprPrecedence::Unambiguous { - // Parentheses would be needed here, don't lint. - *outer_pat = None; - } else { - pat.always_deref = false; - let snip = snippet_with_context(cx, e.span, parent.span.ctxt(), "..", &mut pat.app).0; - pat.replacements.push((e.span, format!("&{snip}"))); - } - }, - _ if !e.span.from_expansion() => { - // Double reference might be needed at this point. - pat.always_deref = false; - let snip = snippet_with_applicability(cx, e.span, "..", &mut pat.app); - pat.replacements.push((e.span, format!("&{snip}"))); - }, - // Edge case for macros. The span of the identifier will usually match the context of the - // binding, but not if the identifier was created in a macro. e.g. `concat_idents` and proc - // macros - _ => *outer_pat = None, + }, + Adjustment { + kind: Adjust::Deref(_), + .. + }, + .. + ] + ) + { + match get_parent_expr(cx, e) { + // Field accesses are the same no matter the number of references. + Some(Expr { + kind: ExprKind::Field(..), + .. + }) => (), + Some(&Expr { + span, + kind: ExprKind::Unary(UnOp::Deref, _), + .. + }) if !span.from_expansion() => { + // Remove explicit deref. + let snip = snippet_with_context(cx, e.span, span.ctxt(), "..", &mut pat.app).0; + pat.replacements.push((span, snip.into())); + }, + Some(parent) if !parent.span.from_expansion() => { + // Double reference might be needed at this point. + if parent.precedence() == ExprPrecedence::Unambiguous { + // Parentheses would be needed here, don't lint. + *outer_pat = None; + } else { + pat.always_deref = false; + let snip = snippet_with_context(cx, e.span, parent.span.ctxt(), "..", &mut pat.app).0; + pat.replacements.push((e.span, format!("&{snip}"))); } - } + }, + _ if !e.span.from_expansion() => { + // Double reference might be needed at this point. + pat.always_deref = false; + let snip = snippet_with_applicability(cx, e.span, "..", &mut pat.app); + pat.replacements.push((e.span, format!("&{snip}"))); + }, + // Edge case for macros. The span of the identifier will usually match the context of the + // binding, but not if the identifier was created in a macro. e.g. `concat_idents` and proc + // macros + _ => *outer_pat = None, } } } diff --git a/src/tools/clippy/clippy_lints/src/derivable_impls.rs b/src/tools/clippy/clippy_lints/src/derivable_impls.rs index 8d9222e4bf61e..10331b3855b84 100644 --- a/src/tools/clippy/clippy_lints/src/derivable_impls.rs +++ b/src/tools/clippy/clippy_lints/src/derivable_impls.rs @@ -94,18 +94,18 @@ fn check_struct<'tcx>( ty_args: GenericArgsRef<'_>, typeck_results: &'tcx TypeckResults<'tcx>, ) { - if let TyKind::Path(QPath::Resolved(_, p)) = self_ty.kind { - if let Some(PathSegment { args, .. }) = p.segments.last() { - let args = args.map(|a| a.args).unwrap_or(&[]); - - // ty_args contains the generic parameters of the type declaration, while args contains the - // arguments used at instantiation time. If both len are not equal, it means that some - // parameters were not provided (which means that the default values were used); in this - // case we will not risk suggesting too broad a rewrite. We won't either if any argument - // is a type or a const. - if ty_args.len() != args.len() || args.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_))) { - return; - } + if let TyKind::Path(QPath::Resolved(_, p)) = self_ty.kind + && let Some(PathSegment { args, .. }) = p.segments.last() + { + let args = args.map(|a| a.args).unwrap_or(&[]); + + // ty_args contains the generic parameters of the type declaration, while args contains the + // arguments used at instantiation time. If both len are not equal, it means that some + // parameters were not provided (which means that the default values were used); in this + // case we will not risk suggesting too broad a rewrite. We won't either if any argument + // is a type or a const. + if ty_args.len() != args.len() || args.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_))) { + return; } } @@ -188,7 +188,7 @@ impl<'tcx> LateLintPass<'tcx> for DerivableImpls { self_ty, .. }) = item.kind - && !cx.tcx.has_attr(item.owner_id, sym::automatically_derived) + && !cx.tcx.is_automatically_derived(item.owner_id.to_def_id()) && !item.span.from_expansion() && let Some(def_id) = trait_ref.trait_def_id() && cx.tcx.is_diagnostic_item(sym::Default, def_id) diff --git a/src/tools/clippy/clippy_lints/src/derive.rs b/src/tools/clippy/clippy_lints/src/derive.rs index 2ae35b4005579..06528f875a29b 100644 --- a/src/tools/clippy/clippy_lints/src/derive.rs +++ b/src/tools/clippy/clippy_lints/src/derive.rs @@ -206,7 +206,7 @@ impl<'tcx> LateLintPass<'tcx> for Derive { }) = item.kind { let ty = cx.tcx.type_of(item.owner_id).instantiate_identity(); - let is_automatically_derived = cx.tcx.has_attr(item.owner_id, sym::automatically_derived); + let is_automatically_derived = cx.tcx.is_automatically_derived(item.owner_id.to_def_id()); check_hash_peq(cx, item.span, trait_ref, ty, is_automatically_derived); check_ord_partial_ord(cx, item.span, trait_ref, ty, is_automatically_derived); @@ -235,7 +235,7 @@ fn check_hash_peq<'tcx>( { // Look for the PartialEq implementations for `ty` cx.tcx.for_each_relevant_impl(peq_trait_def_id, ty, |impl_id| { - let peq_is_automatically_derived = cx.tcx.has_attr(impl_id, sym::automatically_derived); + let peq_is_automatically_derived = cx.tcx.is_automatically_derived(impl_id); if !hash_is_automatically_derived || peq_is_automatically_derived { return; @@ -254,7 +254,7 @@ fn check_hash_peq<'tcx>( |diag| { if let Some(local_def_id) = impl_id.as_local() { let hir_id = cx.tcx.local_def_id_to_hir_id(local_def_id); - diag.span_note(cx.tcx.hir().span(hir_id), "`PartialEq` implemented here"); + diag.span_note(cx.tcx.hir_span(hir_id), "`PartialEq` implemented here"); } }, ); @@ -278,7 +278,7 @@ fn check_ord_partial_ord<'tcx>( { // Look for the PartialOrd implementations for `ty` cx.tcx.for_each_relevant_impl(partial_ord_trait_def_id, ty, |impl_id| { - let partial_ord_is_automatically_derived = cx.tcx.has_attr(impl_id, sym::automatically_derived); + let partial_ord_is_automatically_derived = cx.tcx.is_automatically_derived(impl_id); if partial_ord_is_automatically_derived == ord_is_automatically_derived { return; @@ -298,7 +298,7 @@ fn check_ord_partial_ord<'tcx>( span_lint_and_then(cx, DERIVE_ORD_XOR_PARTIAL_ORD, span, mess, |diag| { if let Some(local_def_id) = impl_id.as_local() { let hir_id = cx.tcx.local_def_id_to_hir_id(local_def_id); - diag.span_note(cx.tcx.hir().span(hir_id), "`PartialOrd` implemented here"); + diag.span_note(cx.tcx.hir_span(hir_id), "`PartialOrd` implemented here"); } }); } @@ -324,11 +324,9 @@ fn check_copy_clone<'tcx>(cx: &LateContext<'tcx>, item: &Item<'_>, trait_ref: &h // there's a Copy impl for any instance of the adt. if !is_copy(cx, ty) { if ty_subs.non_erasable_generics().next().is_some() { - let has_copy_impl = cx.tcx.all_local_trait_impls(()).get(©_id).is_some_and(|impls| { - impls.iter().any(|&id| { - matches!(cx.tcx.type_of(id).instantiate_identity().kind(), ty::Adt(adt, _) + let has_copy_impl = cx.tcx.local_trait_impls(copy_id).iter().any(|&id| { + matches!(cx.tcx.type_of(id).instantiate_identity().kind(), ty::Adt(adt, _) if ty_adt.did() == adt.did()) - }) }); if !has_copy_impl { return; @@ -351,6 +349,10 @@ fn check_copy_clone<'tcx>(cx: &LateContext<'tcx>, item: &Item<'_>, trait_ref: &h { return; } + // The presence of `unsafe` fields prevents deriving `Clone` automatically + if ty_adt.all_fields().any(|f| f.safety.is_unsafe()) { + return; + } span_lint_and_note( cx, @@ -428,10 +430,10 @@ impl<'tcx> Visitor<'tcx> for UnsafeVisitor<'_, 'tcx> { } fn visit_expr(&mut self, expr: &'tcx Expr<'_>) -> Self::Result { - if let ExprKind::Block(block, _) = expr.kind { - if block.rules == BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided) { - return ControlFlow::Break(()); - } + if let ExprKind::Block(block, _) = expr.kind + && block.rules == BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided) + { + return ControlFlow::Break(()); } walk_expr(self, expr) @@ -481,7 +483,7 @@ fn ty_implements_eq_trait<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, eq_trait_id: De tcx.non_blanket_impls_for_ty(eq_trait_id, ty).next().is_some() } -/// Creates the `ParamEnv` used for the give type's derived `Eq` impl. +/// Creates the `ParamEnv` used for the given type's derived `Eq` impl. fn typing_env_for_derived_eq(tcx: TyCtxt<'_>, did: DefId, eq_trait_id: DefId) -> ty::TypingEnv<'_> { // Initial map from generic index to param def. // Vec<(param_def, needs_eq)> diff --git a/src/tools/clippy/clippy_lints/src/disallowed_macros.rs b/src/tools/clippy/clippy_lints/src/disallowed_macros.rs index 4b8a689e99478..fc6af204a74a0 100644 --- a/src/tools/clippy/clippy_lints/src/disallowed_macros.rs +++ b/src/tools/clippy/clippy_lints/src/disallowed_macros.rs @@ -4,6 +4,7 @@ use clippy_config::types::{DisallowedPath, create_disallowed_map}; use clippy_utils::diagnostics::{span_lint_and_then, span_lint_hir_and_then}; use clippy_utils::macros::macro_backtrace; use rustc_data_structures::fx::FxHashSet; +use rustc_hir::def::DefKind; use rustc_hir::def_id::DefIdMap; use rustc_hir::{ AmbigArg, Expr, ExprKind, ForeignItem, HirId, ImplItem, Item, ItemKind, OwnerId, Pat, Path, Stmt, TraitItem, Ty, @@ -72,8 +73,15 @@ pub struct DisallowedMacros { impl DisallowedMacros { pub fn new(tcx: TyCtxt<'_>, conf: &'static Conf, earlies: AttrStorage) -> Self { + let (disallowed, _) = create_disallowed_map( + tcx, + &conf.disallowed_macros, + |def_kind| matches!(def_kind, DefKind::Macro(_)), + "macro", + false, + ); Self { - disallowed: create_disallowed_map(tcx, &conf.disallowed_macros), + disallowed, seen: FxHashSet::default(), derive_src: None, earlies, diff --git a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs index 149cf1cf2def1..1382dafa931e4 100644 --- a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs +++ b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs @@ -63,9 +63,19 @@ pub struct DisallowedMethods { impl DisallowedMethods { pub fn new(tcx: TyCtxt<'_>, conf: &'static Conf) -> Self { - Self { - disallowed: create_disallowed_map(tcx, &conf.disallowed_methods), - } + let (disallowed, _) = create_disallowed_map( + tcx, + &conf.disallowed_methods, + |def_kind| { + matches!( + def_kind, + DefKind::Fn | DefKind::Ctor(_, CtorKind::Fn) | DefKind::AssocFn + ) + }, + "function", + false, + ); + Self { disallowed } } } @@ -74,12 +84,7 @@ impl_lint_pass!(DisallowedMethods => [DISALLOWED_METHODS]); impl<'tcx> LateLintPass<'tcx> for DisallowedMethods { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { let (id, span) = match &expr.kind { - ExprKind::Path(path) - if let Res::Def(DefKind::Fn | DefKind::Ctor(_, CtorKind::Fn) | DefKind::AssocFn, id) = - cx.qpath_res(path, expr.hir_id) => - { - (id, expr.span) - }, + ExprKind::Path(path) if let Res::Def(_, id) = cx.qpath_res(path, expr.hir_id) => (id, expr.span), ExprKind::MethodCall(name, ..) if let Some(id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) => { (id, name.ident.span) }, diff --git a/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs b/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs index 53c24a3faf1d8..d1a8590c59b4d 100644 --- a/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs +++ b/src/tools/clippy/clippy_lints/src/disallowed_script_idents.rs @@ -69,7 +69,7 @@ impl EarlyLintPass for DisallowedScriptIdents { // Implementation is heavily inspired by the implementation of [`non_ascii_idents`] lint: // https://github.com/rust-lang/rust/blob/master/compiler/rustc_lint/src/non_ascii_idents.rs - let check_disallowed_script_idents = cx.builder.lint_level(DISALLOWED_SCRIPT_IDENTS).0 != Level::Allow; + let check_disallowed_script_idents = cx.builder.lint_level(DISALLOWED_SCRIPT_IDENTS).level != Level::Allow; if !check_disallowed_script_idents { return; } diff --git a/src/tools/clippy/clippy_lints/src/disallowed_types.rs b/src/tools/clippy/clippy_lints/src/disallowed_types.rs index 38903596414cf..2bae82648ac76 100644 --- a/src/tools/clippy/clippy_lints/src/disallowed_types.rs +++ b/src/tools/clippy/clippy_lints/src/disallowed_types.rs @@ -1,8 +1,8 @@ use clippy_config::Conf; -use clippy_config::types::DisallowedPath; +use clippy_config::types::{DisallowedPath, create_disallowed_map}; use clippy_utils::diagnostics::span_lint_and_then; use rustc_data_structures::fx::FxHashMap; -use rustc_hir::def::Res; +use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefIdMap; use rustc_hir::{AmbigArg, Item, ItemKind, PolyTraitRef, PrimTy, Ty, TyKind, UseKind}; use rustc_lint::{LateContext, LateLintPass}; @@ -60,22 +60,7 @@ pub struct DisallowedTypes { impl DisallowedTypes { pub fn new(tcx: TyCtxt<'_>, conf: &'static Conf) -> Self { - let mut def_ids = DefIdMap::default(); - let mut prim_tys = FxHashMap::default(); - for disallowed_path in &conf.disallowed_types { - let path: Vec<_> = disallowed_path.path().split("::").collect::>(); - for res in clippy_utils::def_path_res(tcx, &path) { - match res { - Res::Def(_, id) => { - def_ids.insert(id, (disallowed_path.path(), disallowed_path)); - }, - Res::PrimTy(ty) => { - prim_tys.insert(ty, (disallowed_path.path(), disallowed_path)); - }, - _ => {}, - } - } - } + let (def_ids, prim_tys) = create_disallowed_map(tcx, &conf.disallowed_types, def_kind_predicate, "type", true); Self { def_ids, prim_tys } } @@ -95,6 +80,19 @@ impl DisallowedTypes { } } +pub fn def_kind_predicate(def_kind: DefKind) -> bool { + matches!( + def_kind, + DefKind::Struct + | DefKind::Union + | DefKind::Enum + | DefKind::Trait + | DefKind::TyAlias + | DefKind::ForeignTy + | DefKind::AssocTy + ) +} + impl_lint_pass!(DisallowedTypes => [DISALLOWED_TYPES]); impl<'tcx> LateLintPass<'tcx> for DisallowedTypes { diff --git a/src/tools/clippy/clippy_lints/src/doc/markdown.rs b/src/tools/clippy/clippy_lints/src/doc/markdown.rs index 8cdaba88e5095..7a1c7c675d2ec 100644 --- a/src/tools/clippy/clippy_lints/src/doc/markdown.rs +++ b/src/tools/clippy/clippy_lints/src/doc/markdown.rs @@ -113,20 +113,20 @@ fn check_word(cx: &LateContext<'_>, word: &str, span: Span, code_level: isize, b s != "-" && s.contains('-') } - if let Ok(url) = Url::parse(word) { + if let Ok(url) = Url::parse(word) // try to get around the fact that `foo::bar` parses as a valid URL - if !url.cannot_be_a_base() { - span_lint_and_sugg( - cx, - DOC_MARKDOWN, - span, - "you should put bare URLs between `<`/`>` or make a proper Markdown link", - "try", - format!("<{word}>"), - Applicability::MachineApplicable, - ); - return; - } + && !url.cannot_be_a_base() + { + span_lint_and_sugg( + cx, + DOC_MARKDOWN, + span, + "you should put bare URLs between `<`/`>` or make a proper Markdown link", + "try", + format!("<{word}>"), + Applicability::MachineApplicable, + ); + return; } // We assume that mixed-case words are not meant to be put inside backticks. (Issue #2343) diff --git a/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs b/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs index e75abf28bace8..039937e0207b0 100644 --- a/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs +++ b/src/tools/clippy/clippy_lints/src/doc/missing_headers.rs @@ -1,11 +1,14 @@ use super::{DocHeaders, MISSING_ERRORS_DOC, MISSING_PANICS_DOC, MISSING_SAFETY_DOC, UNNECESSARY_SAFETY_DOC}; use clippy_utils::diagnostics::{span_lint, span_lint_and_note}; -use clippy_utils::ty::{implements_trait_with_env, is_type_diagnostic_item}; -use clippy_utils::{is_doc_hidden, return_ty}; +use clippy_utils::macros::{is_panic, root_macro_call_first_node}; +use clippy_utils::ty::{get_type_diagnostic_name, implements_trait_with_env, is_type_diagnostic_item}; +use clippy_utils::visitors::for_each_expr; +use clippy_utils::{fulfill_or_allowed, is_doc_hidden, method_chain_args, return_ty}; use rustc_hir::{BodyId, FnSig, OwnerId, Safety}; use rustc_lint::LateContext; use rustc_middle::ty; use rustc_span::{Span, sym}; +use std::ops::ControlFlow; pub fn check( cx: &LateContext<'_>, @@ -13,7 +16,6 @@ pub fn check( sig: FnSig<'_>, headers: DocHeaders, body_id: Option, - panic_info: Option<(Span, bool)>, check_private_items: bool, ) { if !check_private_items && !cx.effective_visibilities.is_exported(owner_id.def_id) { @@ -46,13 +48,16 @@ pub fn check( ), _ => (), } - if !headers.panics && panic_info.is_some_and(|el| !el.1) { + if !headers.panics + && let Some(body_id) = body_id + && let Some(panic_span) = find_panic(cx, body_id) + { span_lint_and_note( cx, MISSING_PANICS_DOC, span, "docs for function which may panic missing `# Panics` section", - panic_info.map(|el| el.0), + Some(panic_span), "first possible panic found here", ); } @@ -89,3 +94,39 @@ pub fn check( } } } + +fn find_panic(cx: &LateContext<'_>, body_id: BodyId) -> Option { + let mut panic_span = None; + let typeck = cx.tcx.typeck_body(body_id); + for_each_expr(cx, cx.tcx.hir_body(body_id), |expr| { + if let Some(macro_call) = root_macro_call_first_node(cx, expr) + && (is_panic(cx, macro_call.def_id) + || matches!( + cx.tcx.get_diagnostic_name(macro_call.def_id), + Some(sym::assert_macro | sym::assert_eq_macro | sym::assert_ne_macro) + )) + && !cx.tcx.hir_is_inside_const_context(expr.hir_id) + && !fulfill_or_allowed(cx, MISSING_PANICS_DOC, [expr.hir_id]) + && panic_span.is_none() + { + panic_span = Some(macro_call.span); + } + + // check for `unwrap` and `expect` for both `Option` and `Result` + if let Some(arglists) = method_chain_args(expr, &["unwrap"]).or_else(|| method_chain_args(expr, &["expect"])) + && let receiver_ty = typeck.expr_ty(arglists[0].0).peel_refs() + && matches!( + get_type_diagnostic_name(cx, receiver_ty), + Some(sym::Option | sym::Result) + ) + && !fulfill_or_allowed(cx, MISSING_PANICS_DOC, [expr.hir_id]) + && panic_span.is_none() + { + panic_span = Some(expr.span); + } + + // Visit all nodes to fulfill any `#[expect]`s after the first linted panic + ControlFlow::::Continue(()) + }); + panic_span +} diff --git a/src/tools/clippy/clippy_lints/src/doc/mod.rs b/src/tools/clippy/clippy_lints/src/doc/mod.rs index 36fd396cc1df8..ab77edf1147cb 100644 --- a/src/tools/clippy/clippy_lints/src/doc/mod.rs +++ b/src/tools/clippy/clippy_lints/src/doc/mod.rs @@ -3,11 +3,8 @@ use clippy_config::Conf; use clippy_utils::attrs::is_doc_hidden; use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_then}; -use clippy_utils::macros::{is_panic, root_macro_call_first_node}; use clippy_utils::source::snippet_opt; -use clippy_utils::ty::is_type_diagnostic_item; -use clippy_utils::visitors::Visitable; -use clippy_utils::{is_entrypoint_fn, is_trait_impl_item, method_chain_args}; +use clippy_utils::{is_entrypoint_fn, is_trait_impl_item}; use pulldown_cmark::Event::{ Code, DisplayMath, End, FootnoteReference, HardBreak, Html, InlineHtml, InlineMath, Rule, SoftBreak, Start, TaskListMarker, Text, @@ -16,18 +13,15 @@ use pulldown_cmark::Tag::{BlockQuote, CodeBlock, FootnoteDefinition, Heading, It use pulldown_cmark::{BrokenLink, CodeBlockKind, CowStr, Options, TagEnd}; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; -use rustc_hir::intravisit::{self, Visitor}; -use rustc_hir::{AnonConst, Attribute, Expr, ImplItemKind, ItemKind, Node, Safety, TraitItemKind}; +use rustc_hir::{Attribute, ImplItemKind, ItemKind, Node, Safety, TraitItemKind}; use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext}; -use rustc_middle::hir::nested_filter; -use rustc_middle::ty; use rustc_resolve::rustdoc::{ DocFragment, add_doc_fragment, attrs_to_doc_fragments, main_body_opts, source_span_for_markdown_range, span_of_fragments, }; use rustc_session::impl_lint_pass; +use rustc_span::Span; use rustc_span::edition::Edition; -use rustc_span::{Span, sym}; use std::ops::Range; use url::Url; @@ -194,6 +188,19 @@ declare_clippy_lint! { /// } /// } /// ``` + /// + /// Individual panics within a function can be ignored with `#[expect]` or + /// `#[allow]`: + /// + /// ```no_run + /// # use std::num::NonZeroUsize; + /// pub fn will_not_panic(x: usize) { + /// #[expect(clippy::missing_panics_doc, reason = "infallible")] + /// let y = NonZeroUsize::new(1).unwrap(); + /// + /// // If any panics are added in the future the lint will still catch them + /// } + /// ``` #[clippy::version = "1.51.0"] pub MISSING_PANICS_DOC, pedantic, @@ -657,20 +664,16 @@ impl<'tcx> LateLintPass<'tcx> for Documentation { self.check_private_items, ); match item.kind { - ItemKind::Fn { sig, body: body_id, .. } => { + ItemKind::Fn { sig, body, .. } => { if !(is_entrypoint_fn(cx, item.owner_id.to_def_id()) || item.span.in_external_macro(cx.tcx.sess.source_map())) { - let body = cx.tcx.hir_body(body_id); - - let panic_info = FindPanicUnwrap::find_span(cx, cx.tcx.typeck(item.owner_id), body.value); missing_headers::check( cx, item.owner_id, sig, headers, - Some(body_id), - panic_info, + Some(body), self.check_private_items, ); } @@ -697,15 +700,7 @@ impl<'tcx> LateLintPass<'tcx> for Documentation { if let TraitItemKind::Fn(sig, ..) = trait_item.kind && !trait_item.span.in_external_macro(cx.tcx.sess.source_map()) { - missing_headers::check( - cx, - trait_item.owner_id, - sig, - headers, - None, - None, - self.check_private_items, - ); + missing_headers::check(cx, trait_item.owner_id, sig, headers, None, self.check_private_items); } }, Node::ImplItem(impl_item) => { @@ -713,16 +708,12 @@ impl<'tcx> LateLintPass<'tcx> for Documentation { && !impl_item.span.in_external_macro(cx.tcx.sess.source_map()) && !is_trait_impl_item(cx, impl_item.hir_id()) { - let body = cx.tcx.hir_body(body_id); - - let panic_span = FindPanicUnwrap::find_span(cx, cx.tcx.typeck(impl_item.owner_id), body.value); missing_headers::check( cx, impl_item.owner_id, sig, headers, Some(body_id), - panic_span, self.check_private_items, ); } @@ -880,19 +871,18 @@ fn check_for_code_clusters<'a, Events: Iterator{}", doc[start..end].replace('`', "")); - diag.span_suggestion_verbose( - span, - "wrap the entire group in `` tags", - sugg, - Applicability::MaybeIncorrect, - ); - diag.help("separate code snippets will be shown with a gap"); - }); - } + span_lint_and_then(cx, DOC_LINK_CODE, span, "code link adjacent to code text", |diag| { + let sugg = format!("{}", doc[start..end].replace('`', "")); + diag.span_suggestion_verbose( + span, + "wrap the entire group in `` tags", + sugg, + Applicability::MaybeIncorrect, + ); + diag.help("separate code snippets will be shown with a gap"); + }); } code_includes_link = false; code_starts_at = None; @@ -1169,72 +1159,6 @@ fn check_doc<'a, Events: Iterator, Range { - cx: &'a LateContext<'tcx>, - is_const: bool, - panic_span: Option, - typeck_results: &'tcx ty::TypeckResults<'tcx>, -} - -impl<'a, 'tcx> FindPanicUnwrap<'a, 'tcx> { - pub fn find_span( - cx: &'a LateContext<'tcx>, - typeck_results: &'tcx ty::TypeckResults<'tcx>, - body: impl Visitable<'tcx>, - ) -> Option<(Span, bool)> { - let mut vis = Self { - cx, - is_const: false, - panic_span: None, - typeck_results, - }; - body.visit(&mut vis); - vis.panic_span.map(|el| (el, vis.is_const)) - } -} - -impl<'tcx> Visitor<'tcx> for FindPanicUnwrap<'_, 'tcx> { - type NestedFilter = nested_filter::OnlyBodies; - - fn visit_expr(&mut self, expr: &'tcx Expr<'_>) { - if self.panic_span.is_some() { - return; - } - - if let Some(macro_call) = root_macro_call_first_node(self.cx, expr) { - if is_panic(self.cx, macro_call.def_id) - || matches!( - self.cx.tcx.item_name(macro_call.def_id).as_str(), - "assert" | "assert_eq" | "assert_ne" - ) - { - self.is_const = self.cx.tcx.hir_is_inside_const_context(expr.hir_id); - self.panic_span = Some(macro_call.span); - } - } - - // check for `unwrap` and `expect` for both `Option` and `Result` - if let Some(arglists) = method_chain_args(expr, &["unwrap"]).or(method_chain_args(expr, &["expect"])) { - let receiver_ty = self.typeck_results.expr_ty(arglists[0].0).peel_refs(); - if is_type_diagnostic_item(self.cx, receiver_ty, sym::Option) - || is_type_diagnostic_item(self.cx, receiver_ty, sym::Result) - { - self.panic_span = Some(expr.span); - } - } - - // and check sub-expressions - intravisit::walk_expr(self, expr); - } - - // Panics in const blocks will cause compilation to fail. - fn visit_anon_const(&mut self, _: &'tcx AnonConst) {} - - fn maybe_tcx(&mut self) -> Self::MaybeTyCtxt { - self.cx.tcx - } -} - #[expect(clippy::range_plus_one)] // inclusive ranges aren't the same type fn looks_like_refdef(doc: &str, range: Range) -> Option> { if range.end < range.start { diff --git a/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs b/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs index 3008082c2329d..ec4538039a918 100644 --- a/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs +++ b/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs @@ -13,16 +13,16 @@ use rustc_parse::parser::ForceCollect; use rustc_session::parse::ParseSess; use rustc_span::edition::Edition; use rustc_span::source_map::{FilePathMapping, SourceMap}; -use rustc_span::{FileName, Pos, sym}; +use rustc_span::{FileName, Ident, Pos, sym}; use super::Fragments; -fn get_test_spans(item: &Item, test_attr_spans: &mut Vec>) { +fn get_test_spans(item: &Item, ident: Ident, test_attr_spans: &mut Vec>) { test_attr_spans.extend( item.attrs .iter() .find(|attr| attr.has_name(sym::test)) - .map(|attr| attr.span.lo().to_usize()..item.ident.span.hi().to_usize()), + .map(|attr| attr.span.lo().to_usize()..ident.span.hi().to_usize()), ); } @@ -38,7 +38,7 @@ pub fn check( // of all `#[test]` attributes in not ignored code examples fn check_code_sample(code: String, edition: Edition, ignore: bool) -> (bool, Vec>) { rustc_driver::catch_fatal_errors(|| { - rustc_span::create_session_globals_then(edition, None, || { + rustc_span::create_session_globals_then(edition, &[], None, || { let mut test_attr_spans = vec![]; let filename = FileName::anon_source_code(&code); @@ -64,10 +64,13 @@ pub fn check( match parser.parse_item(ForceCollect::No) { Ok(Some(item)) => match &item.kind { ItemKind::Fn(box Fn { - sig, body: Some(block), .. - }) if item.ident.name == sym::main => { + ident, + sig, + body: Some(block), + .. + }) if ident.name == sym::main => { if !ignore { - get_test_spans(&item, &mut test_attr_spans); + get_test_spans(&item, *ident, &mut test_attr_spans); } let is_async = matches!(sig.header.coroutine_kind, Some(CoroutineKind::Async { .. })); let returns_nothing = match &sig.decl.output { @@ -85,10 +88,10 @@ pub fn check( } }, // Another function was found; this case is ignored for needless_doctest_main - ItemKind::Fn(box Fn { .. }) => { + ItemKind::Fn(fn_) => { eligible = false; if !ignore { - get_test_spans(&item, &mut test_attr_spans); + get_test_spans(&item, fn_.ident, &mut test_attr_spans); } }, // Tests with one of these items are ignored diff --git a/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs b/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs index 617982f4da30f..5c360ce6a5f7e 100644 --- a/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs +++ b/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs @@ -144,10 +144,10 @@ impl<'tcx> LateLintPass<'tcx> for DropForgetRef { // .. // } fn is_single_call_in_arm<'tcx>(cx: &LateContext<'tcx>, arg: &'tcx Expr<'_>, drop_expr: &'tcx Expr<'_>) -> bool { - if matches!(arg.kind, ExprKind::Call(..) | ExprKind::MethodCall(..)) { - if let Node::Arm(Arm { body, .. }) = cx.tcx.parent_hir_node(drop_expr.hir_id) { - return body.hir_id == drop_expr.hir_id; - } + if matches!(arg.kind, ExprKind::Call(..) | ExprKind::MethodCall(..)) + && let Node::Arm(Arm { body, .. }) = cx.tcx.parent_hir_node(drop_expr.hir_id) + { + return body.hir_id == drop_expr.hir_id; } false } diff --git a/src/tools/clippy/clippy_lints/src/duplicate_mod.rs b/src/tools/clippy/clippy_lints/src/duplicate_mod.rs index 1dac7b971f957..ce551a64d9984 100644 --- a/src/tools/clippy/clippy_lints/src/duplicate_mod.rs +++ b/src/tools/clippy/clippy_lints/src/duplicate_mod.rs @@ -2,6 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_help; use rustc_ast::ast::{Crate, Inline, Item, ItemKind, ModKind}; use rustc_errors::MultiSpan; use rustc_lint::{EarlyContext, EarlyLintPass, Level, LintContext}; +use rustc_middle::lint::LevelAndSource; use rustc_session::impl_lint_pass; use rustc_span::{FileName, Span}; use std::collections::BTreeMap; @@ -45,11 +46,10 @@ declare_clippy_lint! { "file loaded as module multiple times" } -#[derive(PartialOrd, Ord, PartialEq, Eq)] struct Modules { local_path: PathBuf, spans: Vec, - lint_levels: Vec, + lint_levels: Vec, } #[derive(Default)] @@ -63,7 +63,7 @@ impl_lint_pass!(DuplicateMod => [DUPLICATE_MOD]); impl EarlyLintPass for DuplicateMod { fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) { - if let ItemKind::Mod(_, ModKind::Loaded(_, Inline::No, mod_spans, _)) = &item.kind + if let ItemKind::Mod(_, _, ModKind::Loaded(_, Inline::No, mod_spans, _)) = &item.kind && let FileName::Real(real) = cx.sess().source_map().span_to_filename(mod_spans.inner_span) && let Some(local_path) = real.into_local_path() && let Ok(absolute_path) = local_path.canonicalize() @@ -95,11 +95,11 @@ impl EarlyLintPass for DuplicateMod { .iter() .zip(lint_levels) .filter_map(|(span, lvl)| { - if let Some(id) = lvl.get_expectation_id() { + if let Some(id) = lvl.lint_id { cx.fulfill_expectation(id); } - (!matches!(lvl, Level::Allow | Level::Expect(_))).then_some(*span) + (!matches!(lvl.level, Level::Allow | Level::Expect)).then_some(*span) }) .collect(); diff --git a/src/tools/clippy/clippy_lints/src/empty_line_after.rs b/src/tools/clippy/clippy_lints/src/empty_line_after.rs index 80c2b03c41cf4..0c5f8bbf4ca53 100644 --- a/src/tools/clippy/clippy_lints/src/empty_line_after.rs +++ b/src/tools/clippy/clippy_lints/src/empty_line_after.rs @@ -1,3 +1,5 @@ +use std::borrow::Cow; + use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::{SpanRangeExt, snippet_indent}; use clippy_utils::tokenize_with_text; @@ -8,8 +10,7 @@ use rustc_errors::{Applicability, Diag, SuggestionStyle}; use rustc_lexer::TokenKind; use rustc_lint::{EarlyContext, EarlyLintPass, LintContext}; use rustc_session::impl_lint_pass; -use rustc_span::symbol::kw; -use rustc_span::{BytePos, ExpnKind, Ident, InnerSpan, Span, SpanData, Symbol}; +use rustc_span::{BytePos, ExpnKind, Ident, InnerSpan, Span, SpanData, Symbol, kw}; declare_clippy_lint! { /// ### What it does @@ -90,7 +91,7 @@ declare_clippy_lint! { #[derive(Debug)] struct ItemInfo { kind: &'static str, - name: Symbol, + name: Option, span: Span, mod_items: Option, } @@ -316,8 +317,12 @@ impl EmptyLineAfter { for stop in gaps.iter().flat_map(|gap| gap.prev_chunk) { stop.comment_out(cx, &mut suggestions); } + let name = match info.name { + Some(name) => format!("{} `{name}`", info.kind).into(), + None => Cow::from("the following item"), + }; diag.multipart_suggestion_verbose( - format!("if the doc comment should not document `{}` comment it out", info.name), + format!("if the doc comment should not document {name} then comment it out"), suggestions, Applicability::MaybeIncorrect, ); @@ -375,21 +380,20 @@ impl EmptyLineAfter { &mut self, cx: &EarlyContext<'_>, kind: &ItemKind, - ident: &Ident, + ident: Option, span: Span, attrs: &[Attribute], id: NodeId, ) { self.items.push(ItemInfo { kind: kind.descr(), - name: ident.name, - span: if span.contains(ident.span) { - span.with_hi(ident.span.hi()) - } else { - span.with_hi(span.lo()) + name: ident.map(|ident| ident.name), + span: match ident { + Some(ident) => span.with_hi(ident.span.hi()), + None => span.shrink_to_lo(), }, mod_items: match kind { - ItemKind::Mod(_, ModKind::Loaded(items, _, _, _)) => items + ItemKind::Mod(_, _, ModKind::Loaded(items, _, _, _)) => items .iter() .filter(|i| !matches!(i.span.ctxt().outer_expn_data().kind, ExpnKind::AstPass(_))) .map(|i| i.id) @@ -446,7 +450,7 @@ impl EarlyLintPass for EmptyLineAfter { fn check_crate(&mut self, _: &EarlyContext<'_>, krate: &Crate) { self.items.push(ItemInfo { kind: "crate", - name: kw::Crate, + name: Some(kw::Crate), span: krate.spans.inner_span.with_hi(krate.spans.inner_span.lo()), mod_items: krate .items @@ -471,7 +475,7 @@ impl EarlyLintPass for EmptyLineAfter { self.check_item_kind( cx, &item.kind.clone().into(), - &item.ident, + item.kind.ident(), item.span, &item.attrs, item.id, @@ -482,7 +486,7 @@ impl EarlyLintPass for EmptyLineAfter { self.check_item_kind( cx, &item.kind.clone().into(), - &item.ident, + item.kind.ident(), item.span, &item.attrs, item.id, @@ -490,6 +494,6 @@ impl EarlyLintPass for EmptyLineAfter { } fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) { - self.check_item_kind(cx, &item.kind, &item.ident, item.span, &item.attrs, item.id); + self.check_item_kind(cx, &item.kind, item.kind.ident(), item.span, &item.attrs, item.id); } } diff --git a/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs b/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs index 743ec5b9ea7fb..a38d6df89f2b9 100644 --- a/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs +++ b/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs @@ -1,10 +1,15 @@ -use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::source::snippet_opt; -use rustc_ast::ast::{Item, ItemKind, Variant, VariantData}; +use clippy_utils::attrs::span_contains_cfg; +use clippy_utils::diagnostics::{span_lint_and_then, span_lint_hir_and_then}; +use rustc_data_structures::fx::FxIndexMap; use rustc_errors::Applicability; -use rustc_lexer::TokenKind; -use rustc_lint::{EarlyContext, EarlyLintPass}; -use rustc_session::declare_lint_pass; +use rustc_hir::def::CtorOf; +use rustc_hir::def::DefKind::Ctor; +use rustc_hir::def::Res::Def; +use rustc_hir::def_id::LocalDefId; +use rustc_hir::{Expr, ExprKind, Item, ItemKind, Node, Path, QPath, Variant, VariantData}; +use rustc_lint::{LateContext, LateLintPass}; +use rustc_middle::ty::TyCtxt; +use rustc_session::impl_lint_pass; use rustc_span::Span; declare_clippy_lint! { @@ -70,14 +75,26 @@ declare_clippy_lint! { "finds enum variants with empty brackets" } -declare_lint_pass!(EmptyWithBrackets => [EMPTY_STRUCTS_WITH_BRACKETS, EMPTY_ENUM_VARIANTS_WITH_BRACKETS]); +#[derive(Debug)] +enum Usage { + Unused { redundant_use_sites: Vec }, + Used, + NoDefinition { redundant_use_sites: Vec }, +} + +#[derive(Default)] +pub struct EmptyWithBrackets { + // Value holds `Usage::Used` if the empty tuple variant was used as a function + empty_tuple_enum_variants: FxIndexMap, +} -impl EarlyLintPass for EmptyWithBrackets { - fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) { - let span_after_ident = item.span.with_lo(item.ident.span.hi()); +impl_lint_pass!(EmptyWithBrackets => [EMPTY_STRUCTS_WITH_BRACKETS, EMPTY_ENUM_VARIANTS_WITH_BRACKETS]); - if let ItemKind::Struct(var_data, _) = &item.kind +impl LateLintPass<'_> for EmptyWithBrackets { + fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) { + if let ItemKind::Struct(ident, var_data, _) = &item.kind && has_brackets(var_data) + && let span_after_ident = item.span.with_lo(ident.span.hi()) && has_no_fields(cx, var_data, span_after_ident) { span_lint_and_then( @@ -97,70 +114,175 @@ impl EarlyLintPass for EmptyWithBrackets { } } - fn check_variant(&mut self, cx: &EarlyContext<'_>, variant: &Variant) { + fn check_variant(&mut self, cx: &LateContext<'_>, variant: &Variant<'_>) { + // the span of the parentheses/braces let span_after_ident = variant.span.with_lo(variant.ident.span.hi()); - if has_brackets(&variant.data) && has_no_fields(cx, &variant.data, span_after_ident) { - span_lint_and_then( + if has_no_fields(cx, &variant.data, span_after_ident) { + match variant.data { + VariantData::Struct { .. } => { + // Empty struct variants can be linted immediately + span_lint_and_then( + cx, + EMPTY_ENUM_VARIANTS_WITH_BRACKETS, + span_after_ident, + "enum variant has empty brackets", + |diagnostic| { + diagnostic.span_suggestion_hidden( + span_after_ident, + "remove the brackets", + "", + Applicability::MaybeIncorrect, + ); + }, + ); + }, + VariantData::Tuple(.., local_def_id) => { + // Don't lint reachable tuple enums + if cx.effective_visibilities.is_reachable(variant.def_id) { + return; + } + if let Some(entry) = self.empty_tuple_enum_variants.get_mut(&local_def_id) { + // empty_tuple_enum_variants contains Usage::NoDefinition if the variant was called before the + // definition was encountered. Now that there's a definition, convert it + // to Usage::Unused. + if let Usage::NoDefinition { redundant_use_sites } = entry { + *entry = Usage::Unused { + redundant_use_sites: redundant_use_sites.clone(), + }; + } + } else { + self.empty_tuple_enum_variants.insert( + local_def_id, + Usage::Unused { + redundant_use_sites: vec![], + }, + ); + } + }, + VariantData::Unit(..) => {}, + } + } + } + + fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { + if let Some(def_id) = check_expr_for_enum_as_function(expr) { + if let Some(parentheses_span) = call_parentheses_span(cx.tcx, expr) { + // Do not count expressions from macro expansion as a redundant use site. + if expr.span.from_expansion() { + return; + } + match self.empty_tuple_enum_variants.get_mut(&def_id) { + Some( + &mut (Usage::Unused { + ref mut redundant_use_sites, + } + | Usage::NoDefinition { + ref mut redundant_use_sites, + }), + ) => { + redundant_use_sites.push(parentheses_span); + }, + None => { + // The variant isn't in the IndexMap which means its definition wasn't encountered yet. + self.empty_tuple_enum_variants.insert( + def_id, + Usage::NoDefinition { + redundant_use_sites: vec![parentheses_span], + }, + ); + }, + _ => {}, + } + } else { + // The parentheses are not redundant. + self.empty_tuple_enum_variants.insert(def_id, Usage::Used); + } + } + } + + fn check_crate_post(&mut self, cx: &LateContext<'_>) { + for (local_def_id, usage) in &self.empty_tuple_enum_variants { + // Ignore all variants with Usage::Used or Usage::NoDefinition + let Usage::Unused { redundant_use_sites } = usage else { + continue; + }; + // Attempt to fetch the Variant from LocalDefId. + let Node::Variant(variant) = cx.tcx.hir_node( + cx.tcx + .local_def_id_to_hir_id(cx.tcx.parent(local_def_id.to_def_id()).expect_local()), + ) else { + continue; + }; + // Span of the parentheses in variant definition + let span = variant.span.with_lo(variant.ident.span.hi()); + span_lint_hir_and_then( cx, EMPTY_ENUM_VARIANTS_WITH_BRACKETS, - span_after_ident, + variant.hir_id, + span, "enum variant has empty brackets", |diagnostic| { - diagnostic.span_suggestion_hidden( - span_after_ident, - "remove the brackets", - "", - Applicability::MaybeIncorrect, - ); + if redundant_use_sites.is_empty() { + // If there's no redundant use sites, the definition is the only place to modify. + diagnostic.span_suggestion_hidden( + span, + "remove the brackets", + "", + Applicability::MaybeIncorrect, + ); + } else { + let mut parentheses_spans: Vec<_> = + redundant_use_sites.iter().map(|span| (*span, String::new())).collect(); + parentheses_spans.push((span, String::new())); + diagnostic.multipart_suggestion( + "remove the brackets", + parentheses_spans, + Applicability::MaybeIncorrect, + ); + } }, ); } } } -fn has_no_ident_token(braces_span_str: &str) -> bool { - !rustc_lexer::tokenize(braces_span_str).any(|t| t.kind == TokenKind::Ident) -} - -fn has_brackets(var_data: &VariantData) -> bool { - !matches!(var_data, VariantData::Unit(_)) +fn has_brackets(var_data: &VariantData<'_>) -> bool { + !matches!(var_data, VariantData::Unit(..)) } -fn has_no_fields(cx: &EarlyContext<'_>, var_data: &VariantData, braces_span: Span) -> bool { - if !var_data.fields().is_empty() { - return false; - } - +fn has_no_fields(cx: &LateContext<'_>, var_data: &VariantData<'_>, braces_span: Span) -> bool { + var_data.fields().is_empty() && // there might still be field declarations hidden from the AST // (conditionally compiled code using #[cfg(..)]) - - let Some(braces_span_str) = snippet_opt(cx, braces_span) else { - return false; - }; - - has_no_ident_token(braces_span_str.as_ref()) + !span_contains_cfg(cx, braces_span) } -#[cfg(test)] -mod unit_test { - use super::*; - - #[test] - fn test_has_no_ident_token() { - let input = "{ field: u8 }"; - assert!(!has_no_ident_token(input)); - - let input = "(u8, String);"; - assert!(!has_no_ident_token(input)); - - let input = " { - // test = 5 - } - "; - assert!(has_no_ident_token(input)); +// If expression HIR ID and callee HIR ID are same, returns the span of the parentheses, else, +// returns None. +fn call_parentheses_span(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> Option { + if let Node::Expr(parent) = tcx.parent_hir_node(expr.hir_id) + && let ExprKind::Call(callee, ..) = parent.kind + && callee.hir_id == expr.hir_id + { + Some(parent.span.with_lo(expr.span.hi())) + } else { + None + } +} - let input = " ();"; - assert!(has_no_ident_token(input)); +// Returns the LocalDefId of the variant being called as a function if it exists. +fn check_expr_for_enum_as_function(expr: &Expr<'_>) -> Option { + if let ExprKind::Path(QPath::Resolved( + _, + Path { + res: Def(Ctor(CtorOf::Variant, _), def_id), + .. + }, + )) = expr.kind + { + def_id.as_local() + } else { + None } } diff --git a/src/tools/clippy/clippy_lints/src/entry.rs b/src/tools/clippy/clippy_lints/src/entry.rs index dcfee0b6d3c62..182cb4e46d2bd 100644 --- a/src/tools/clippy/clippy_lints/src/entry.rs +++ b/src/tools/clippy/clippy_lints/src/entry.rs @@ -95,14 +95,13 @@ impl<'tcx> LateLintPass<'tcx> for HashMapPass { return; }; - if then_search.is_key_used_and_no_copy || else_search.is_key_used_and_no_copy { - span_lint(cx, MAP_ENTRY, expr.span, lint_msg); - return; - } - if then_search.edits.is_empty() && else_search.edits.is_empty() { // No insertions return; + } else if then_search.is_key_used_and_no_copy || else_search.is_key_used_and_no_copy { + // If there are other uses of the key, and the key is not copy, + // we cannot perform a fix automatically, but continue to emit a lint. + None } else if then_search.edits.is_empty() || else_search.edits.is_empty() { // if .. { insert } else { .. } or if .. { .. } else { insert } let ((then_str, entry_kind), else_str) = match (else_search.edits.is_empty(), contains_expr.negated) { @@ -123,10 +122,10 @@ impl<'tcx> LateLintPass<'tcx> for HashMapPass { snippet_with_applicability(cx, then_expr.span, "{ .. }", &mut app), ), }; - format!( + Some(format!( "if let {}::{entry_kind} = {map_str}.entry({key_str}) {then_str} else {else_str}", map_ty.entry_path(), - ) + )) } else { // if .. { insert } else { insert } let ((then_str, then_entry), (else_str, else_entry)) = if contains_expr.negated { @@ -142,13 +141,13 @@ impl<'tcx> LateLintPass<'tcx> for HashMapPass { }; let indent_str = snippet_indent(cx, expr.span); let indent_str = indent_str.as_deref().unwrap_or(""); - format!( + Some(format!( "match {map_str}.entry({key_str}) {{\n{indent_str} {entry}::{then_entry} => {}\n\ {indent_str} {entry}::{else_entry} => {}\n{indent_str}}}", reindent_multiline(&then_str, true, Some(4 + indent_str.len())), reindent_multiline(&else_str, true, Some(4 + indent_str.len())), entry = map_ty.entry_path(), - ) + )) } } else { if then_search.edits.is_empty() { @@ -163,17 +162,17 @@ impl<'tcx> LateLintPass<'tcx> for HashMapPass { } else { then_search.snippet_occupied(cx, then_expr.span, &mut app) }; - format!( + Some(format!( "if let {}::{entry_kind} = {map_str}.entry({key_str}) {body_str}", map_ty.entry_path(), - ) + )) } else if let Some(insertion) = then_search.as_single_insertion() { let value_str = snippet_with_context(cx, insertion.value.span, then_expr.span.ctxt(), "..", &mut app).0; if contains_expr.negated { if insertion.value.can_have_side_effects() { - format!("{map_str}.entry({key_str}).or_insert_with(|| {value_str});") + Some(format!("{map_str}.entry({key_str}).or_insert_with(|| {value_str});")) } else { - format!("{map_str}.entry({key_str}).or_insert({value_str});") + Some(format!("{map_str}.entry({key_str}).or_insert({value_str});")) } } else { // TODO: suggest using `if let Some(v) = map.get_mut(k) { .. }` here. @@ -183,7 +182,7 @@ impl<'tcx> LateLintPass<'tcx> for HashMapPass { } else { let block_str = then_search.snippet_closure(cx, then_expr.span, &mut app); if contains_expr.negated { - format!("{map_str}.entry({key_str}).or_insert_with(|| {block_str});") + Some(format!("{map_str}.entry({key_str}).or_insert_with(|| {block_str});")) } else { // TODO: suggest using `if let Some(v) = map.get_mut(k) { .. }` here. // This would need to be a different lint. @@ -192,7 +191,11 @@ impl<'tcx> LateLintPass<'tcx> for HashMapPass { } }; - span_lint_and_sugg(cx, MAP_ENTRY, expr.span, lint_msg, "try", sugg, app); + if let Some(sugg) = sugg { + span_lint_and_sugg(cx, MAP_ENTRY, expr.span, lint_msg, "try", sugg, app); + } else { + span_lint(cx, MAP_ENTRY, expr.span, lint_msg); + } } } diff --git a/src/tools/clippy/clippy_lints/src/enum_clike.rs b/src/tools/clippy/clippy_lints/src/enum_clike.rs index f01b5c840d290..ec81294624efa 100644 --- a/src/tools/clippy/clippy_lints/src/enum_clike.rs +++ b/src/tools/clippy/clippy_lints/src/enum_clike.rs @@ -49,10 +49,10 @@ impl<'tcx> LateLintPass<'tcx> for UnportableVariant { .ok() .map(|val| rustc_middle::mir::Const::from_value(val, ty)); if let Some(Constant::Int(val)) = constant.and_then(|c| mir_to_const(cx.tcx, c)) { - if let ty::Adt(adt, _) = ty.kind() { - if adt.is_enum() { - ty = adt.repr().discr_type().to_ty(cx.tcx); - } + if let ty::Adt(adt, _) = ty.kind() + && adt.is_enum() + { + ty = adt.repr().discr_type().to_ty(cx.tcx); } match ty.kind() { ty::Int(IntTy::Isize) => { diff --git a/src/tools/clippy/clippy_lints/src/equatable_if_let.rs b/src/tools/clippy/clippy_lints/src/equatable_if_let.rs index cd9ab2764ac48..72f5eaf8a4bcc 100644 --- a/src/tools/clippy/clippy_lints/src/equatable_if_let.rs +++ b/src/tools/clippy/clippy_lints/src/equatable_if_let.rs @@ -45,6 +45,7 @@ fn unary_pattern(pat: &Pat<'_>) -> bool { pats.iter().all(unary_pattern) } match &pat.kind { + PatKind::Missing => unreachable!(), PatKind::Slice(_, _, _) | PatKind::Range(_, _, _) | PatKind::Binding(..) @@ -67,6 +68,38 @@ fn is_structural_partial_eq<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, other: T } } +/// Check if the pattern has any type mismatch that would prevent it from being used in an equality +/// check. This can happen if the expr has a reference type and the corresponding pattern is a +/// literal. +fn contains_type_mismatch(cx: &LateContext<'_>, pat: &Pat<'_>) -> bool { + let mut result = false; + pat.walk(|p| { + if result { + return false; + } + + if p.span.in_external_macro(cx.sess().source_map()) { + return true; + } + + let adjust_pat = match p.kind { + PatKind::Or([p, ..]) => p, + _ => p, + }; + + if let Some(adjustments) = cx.typeck_results().pat_adjustments().get(adjust_pat.hir_id) + && adjustments.first().is_some_and(|first| first.source.is_ref()) + { + result = true; + return false; + } + + true + }); + + result +} + impl<'tcx> LateLintPass<'tcx> for PatternEquality { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { if let ExprKind::Let(let_expr) = expr.kind @@ -77,7 +110,7 @@ impl<'tcx> LateLintPass<'tcx> for PatternEquality { let pat_ty = cx.typeck_results().pat_ty(let_expr.pat); let mut applicability = Applicability::MachineApplicable; - if is_structural_partial_eq(cx, exp_ty, pat_ty) { + if is_structural_partial_eq(cx, exp_ty, pat_ty) && !contains_type_mismatch(cx, let_expr.pat) { let pat_str = match let_expr.pat.kind { PatKind::Struct(..) => format!( "({})", diff --git a/src/tools/clippy/clippy_lints/src/escape.rs b/src/tools/clippy/clippy_lints/src/escape.rs index de0fc2b1bf4bb..2cb3b32babe89 100644 --- a/src/tools/clippy/clippy_lints/src/escape.rs +++ b/src/tools/clippy/clippy_lints/src/escape.rs @@ -72,10 +72,10 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal { _: Span, fn_def_id: LocalDefId, ) { - if let Some(header) = fn_kind.header() { - if header.abi != ExternAbi::Rust { - return; - } + if let Some(header) = fn_kind.header() + && header.abi != ExternAbi::Rust + { + return; } let parent_id = cx @@ -93,12 +93,11 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal { // find `self` ty for this trait if relevant if let ItemKind::Trait(_, _, _, _, _, items) = item.kind { for trait_item in items { - if trait_item.id.owner_id.def_id == fn_def_id { + if trait_item.id.owner_id.def_id == fn_def_id // be sure we have `self` parameter in this function - if trait_item.kind == (AssocItemKind::Fn { has_self: true }) { - trait_self_ty = - Some(TraitRef::identity(cx.tcx, trait_item.id.owner_id.to_def_id()).self_ty()); - } + && trait_item.kind == (AssocItemKind::Fn { has_self: true }) + { + trait_self_ty = Some(TraitRef::identity(cx.tcx, trait_item.id.owner_id.to_def_id()).self_ty()); } } } @@ -120,7 +119,7 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal { cx, BOXED_LOCAL, node, - cx.tcx.hir().span(node), + cx.tcx.hir_span(node), "local variable doesn't need to be boxed here", ); } @@ -142,22 +141,22 @@ fn is_argument(tcx: TyCtxt<'_>, id: HirId) -> bool { impl<'tcx> Delegate<'tcx> for EscapeDelegate<'_, 'tcx> { fn consume(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) { - if cmt.place.projections.is_empty() { - if let PlaceBase::Local(lid) = cmt.place.base { - // FIXME(rust/#120456) - is `swap_remove` correct? - self.set.swap_remove(&lid); - } + if cmt.place.projections.is_empty() + && let PlaceBase::Local(lid) = cmt.place.base + { + // FIXME(rust/#120456) - is `swap_remove` correct? + self.set.swap_remove(&lid); } } fn use_cloned(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {} fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, _: ty::BorrowKind) { - if cmt.place.projections.is_empty() { - if let PlaceBase::Local(lid) = cmt.place.base { - // FIXME(rust/#120456) - is `swap_remove` correct? - self.set.swap_remove(&lid); - } + if cmt.place.projections.is_empty() + && let PlaceBase::Local(lid) = cmt.place.base + { + // FIXME(rust/#120456) - is `swap_remove` correct? + self.set.swap_remove(&lid); } } @@ -171,10 +170,11 @@ impl<'tcx> Delegate<'tcx> for EscapeDelegate<'_, 'tcx> { // skip if there is a `self` parameter binding to a type // that contains `Self` (i.e.: `self: Box`), see #4804 - if let Some(trait_self_ty) = self.trait_self_ty { - if self.cx.tcx.hir_name(cmt.hir_id) == kw::SelfLower && cmt.place.ty().contains(trait_self_ty) { - return; - } + if let Some(trait_self_ty) = self.trait_self_ty + && self.cx.tcx.hir_name(cmt.hir_id) == kw::SelfLower + && cmt.place.ty().contains(trait_self_ty) + { + return; } if is_non_trait_box(cmt.place.ty()) && !self.is_large_box(cmt.place.ty()) { diff --git a/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs b/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs index f67d38d932b9a..c868b782f43c3 100644 --- a/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs +++ b/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs @@ -75,10 +75,10 @@ fn lint_impl_body(cx: &LateContext<'_>, impl_span: Span, impl_items: &[hir::Impl impl<'tcx> Visitor<'tcx> for FindPanicUnwrap<'_, 'tcx> { fn visit_expr(&mut self, expr: &'tcx Expr<'_>) { - if let Some(macro_call) = root_macro_call_first_node(self.lcx, expr) { - if is_panic(self.lcx, macro_call.def_id) { - self.result.push(expr.span); - } + if let Some(macro_call) = root_macro_call_first_node(self.lcx, expr) + && is_panic(self.lcx, macro_call.def_id) + { + self.result.push(expr.span); } // check for `unwrap` diff --git a/src/tools/clippy/clippy_lints/src/field_scoped_visibility_modifiers.rs b/src/tools/clippy/clippy_lints/src/field_scoped_visibility_modifiers.rs index ba2b37fbf11a3..aae8291905d37 100644 --- a/src/tools/clippy/clippy_lints/src/field_scoped_visibility_modifiers.rs +++ b/src/tools/clippy/clippy_lints/src/field_scoped_visibility_modifiers.rs @@ -51,7 +51,7 @@ declare_lint_pass!(FieldScopedVisibilityModifiers => [FIELD_SCOPED_VISIBILITY_MO impl EarlyLintPass for FieldScopedVisibilityModifiers { fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) { - let ItemKind::Struct(ref st, _) = item.kind else { + let ItemKind::Struct(_, ref st, _) = item.kind else { return; }; for field in st.fields() { diff --git a/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs b/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs index daa199779e3cb..553a00ed868d5 100644 --- a/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs +++ b/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs @@ -3,7 +3,7 @@ use clippy_utils::consts::{ConstEvalCtxt, Constant}; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::{ eq_expr_value, get_parent_expr, higher, is_in_const_context, is_inherent_method_call, is_no_std_crate, - numeric_literal, peel_blocks, sugg, + numeric_literal, peel_blocks, sugg, sym, }; use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Expr, ExprKind, PathSegment, UnOp}; @@ -154,7 +154,7 @@ fn prepare_receiver_sugg<'a>(cx: &LateContext<'_>, mut expr: &'a Expr<'a>) -> Su }; } - suggestion.maybe_par() + suggestion.maybe_paren() } fn check_log_base(cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>, args: &[Expr<'_>]) { @@ -165,7 +165,7 @@ fn check_log_base(cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>, ar expr.span, "logarithm for bases 2, 10 and e can be computed more accurately", "consider using", - format!("{}.{method}()", Sugg::hir(cx, receiver, "..").maybe_par()), + format!("{}.{method}()", Sugg::hir(cx, receiver, "..").maybe_paren()), Applicability::MachineApplicable, ); } @@ -228,24 +228,24 @@ fn get_integer_from_float_constant(value: &Constant<'_>) -> Option { fn check_powf(cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>, args: &[Expr<'_>]) { // Check receiver - if let Some(value) = ConstEvalCtxt::new(cx).eval(receiver) { - if let Some(method) = if F32(f32_consts::E) == value || F64(f64_consts::E) == value { + if let Some(value) = ConstEvalCtxt::new(cx).eval(receiver) + && let Some(method) = if F32(f32_consts::E) == value || F64(f64_consts::E) == value { Some("exp") } else if F32(2.0) == value || F64(2.0) == value { Some("exp2") } else { None - } { - span_lint_and_sugg( - cx, - SUBOPTIMAL_FLOPS, - expr.span, - "exponent for bases 2 and e can be computed more accurately", - "consider using", - format!("{}.{method}()", prepare_receiver_sugg(cx, &args[0])), - Applicability::MachineApplicable, - ); } + { + span_lint_and_sugg( + cx, + SUBOPTIMAL_FLOPS, + expr.span, + "exponent for bases 2 and e can be computed more accurately", + "consider using", + format!("{}.{method}()", prepare_receiver_sugg(cx, &args[0])), + Applicability::MachineApplicable, + ); } // Check argument @@ -254,13 +254,13 @@ fn check_powf(cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>, args: ( SUBOPTIMAL_FLOPS, "square-root of a number can be computed more efficiently and accurately", - format!("{}.sqrt()", Sugg::hir(cx, receiver, "..").maybe_par()), + format!("{}.sqrt()", Sugg::hir(cx, receiver, "..").maybe_paren()), ) } else if F32(1.0 / 3.0) == value || F64(1.0 / 3.0) == value { ( IMPRECISE_FLOPS, "cube-root of a number can be computed more accurately", - format!("{}.cbrt()", Sugg::hir(cx, receiver, "..").maybe_par()), + format!("{}.cbrt()", Sugg::hir(cx, receiver, "..").maybe_paren()), ) } else if let Some(exponent) = get_integer_from_float_constant(&value) { ( @@ -268,7 +268,7 @@ fn check_powf(cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>, args: "exponentiation with integer powers can be computed more efficiently", format!( "{}.powi({})", - Sugg::hir(cx, receiver, "..").maybe_par(), + Sugg::hir(cx, receiver, "..").maybe_paren(), numeric_literal::format(&exponent.to_string(), None, false) ), ) @@ -289,55 +289,53 @@ fn check_powf(cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>, args: } fn check_powi(cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>, args: &[Expr<'_>]) { - if let Some(value) = ConstEvalCtxt::new(cx).eval(&args[0]) { - if value == Int(2) { - if let Some(parent) = get_parent_expr(cx, expr) { - if let Some(grandparent) = get_parent_expr(cx, parent) { - if let ExprKind::MethodCall(PathSegment { ident: method_name, .. }, receiver, ..) = grandparent.kind - { - if method_name.as_str() == "sqrt" && detect_hypot(cx, receiver).is_some() { - return; - } - } - } + if let Some(value) = ConstEvalCtxt::new(cx).eval(&args[0]) + && value == Int(2) + && let Some(parent) = get_parent_expr(cx, expr) + { + if let Some(grandparent) = get_parent_expr(cx, parent) + && let ExprKind::MethodCall(PathSegment { ident: method_name, .. }, receiver, ..) = grandparent.kind + && method_name.as_str() == "sqrt" + && detect_hypot(cx, receiver).is_some() + { + return; + } - if let ExprKind::Binary( - Spanned { - node: op @ (BinOpKind::Add | BinOpKind::Sub), - .. - }, - lhs, - rhs, - ) = parent.kind - { - let other_addend = if lhs.hir_id == expr.hir_id { rhs } else { lhs }; - - // Negate expr if original code has subtraction and expr is on the right side - let maybe_neg_sugg = |expr, hir_id| { - let sugg = Sugg::hir(cx, expr, ".."); - if matches!(op, BinOpKind::Sub) && hir_id == rhs.hir_id { - -sugg - } else { - sugg - } - }; - - span_lint_and_sugg( - cx, - SUBOPTIMAL_FLOPS, - parent.span, - "multiply and add expressions can be calculated more efficiently and accurately", - "consider using", - format!( - "{}.mul_add({}, {})", - Sugg::hir(cx, receiver, "..").maybe_par(), - maybe_neg_sugg(receiver, expr.hir_id), - maybe_neg_sugg(other_addend, other_addend.hir_id), - ), - Applicability::MachineApplicable, - ); + if let ExprKind::Binary( + Spanned { + node: op @ (BinOpKind::Add | BinOpKind::Sub), + .. + }, + lhs, + rhs, + ) = parent.kind + { + let other_addend = if lhs.hir_id == expr.hir_id { rhs } else { lhs }; + + // Negate expr if original code has subtraction and expr is on the right side + let maybe_neg_sugg = |expr, hir_id| { + let sugg = Sugg::hir(cx, expr, ".."); + if matches!(op, BinOpKind::Sub) && hir_id == rhs.hir_id { + -sugg + } else { + sugg } - } + }; + + span_lint_and_sugg( + cx, + SUBOPTIMAL_FLOPS, + parent.span, + "multiply and add expressions can be calculated more efficiently and accurately", + "consider using", + format!( + "{}.mul_add({}, {})", + Sugg::hir(cx, receiver, "..").maybe_paren(), + maybe_neg_sugg(receiver, expr.hir_id), + maybe_neg_sugg(other_addend, other_addend.hir_id), + ), + Applicability::MachineApplicable, + ); } } } @@ -371,7 +369,7 @@ fn detect_hypot(cx: &LateContext<'_>, receiver: &Expr<'_>) -> Option { { return Some(format!( "{}.hypot({})", - Sugg::hir(cx, lmul_lhs, "..").maybe_par(), + Sugg::hir(cx, lmul_lhs, "..").maybe_paren(), Sugg::hir(cx, rmul_lhs, "..") )); } @@ -403,7 +401,7 @@ fn detect_hypot(cx: &LateContext<'_>, receiver: &Expr<'_>) -> Option { { return Some(format!( "{}.hypot({})", - Sugg::hir(cx, largs_0, "..").maybe_par(), + Sugg::hir(cx, largs_0, "..").maybe_paren(), Sugg::hir(cx, rargs_0, "..") )); } @@ -437,7 +435,7 @@ fn check_expm1(cx: &LateContext<'_>, expr: &Expr<'_>) { rhs, ) = expr.kind && let ExprKind::MethodCall(path, self_arg, [], _) = &lhs.kind - && path.ident.name.as_str() == "exp" + && path.ident.name == sym::exp && cx.typeck_results().expr_ty(lhs).is_floating_point() && let Some(value) = ConstEvalCtxt::new(cx).eval(rhs) && (F32(1.0) == value || F64(1.0) == value) @@ -449,7 +447,7 @@ fn check_expm1(cx: &LateContext<'_>, expr: &Expr<'_>) { expr.span, "(e.pow(x) - 1) can be computed more accurately", "consider using", - format!("{}.exp_m1()", Sugg::hir(cx, self_arg, "..").maybe_par()), + format!("{}.exp_m1()", Sugg::hir(cx, self_arg, "..").maybe_paren()), Applicability::MachineApplicable, ); } @@ -483,12 +481,12 @@ fn check_mul_add(cx: &LateContext<'_>, expr: &Expr<'_>) { rhs, ) = &expr.kind { - if let Some(parent) = get_parent_expr(cx, expr) { - if let ExprKind::MethodCall(PathSegment { ident: method_name, .. }, receiver, ..) = parent.kind { - if method_name.as_str() == "sqrt" && detect_hypot(cx, receiver).is_some() { - return; - } - } + if let Some(parent) = get_parent_expr(cx, expr) + && let ExprKind::MethodCall(PathSegment { ident: method_name, .. }, receiver, ..) = parent.kind + && method_name.as_str() == "sqrt" + && detect_hypot(cx, receiver).is_some() + { + return; } let maybe_neg_sugg = |expr| { @@ -566,15 +564,15 @@ fn is_zero(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { /// If the two expressions are not negations of each other, then it /// returns None. fn are_negated<'a>(cx: &LateContext<'_>, expr1: &'a Expr<'a>, expr2: &'a Expr<'a>) -> Option<(bool, &'a Expr<'a>)> { - if let ExprKind::Unary(UnOp::Neg, expr1_negated) = &expr1.kind { - if eq_expr_value(cx, expr1_negated, expr2) { - return Some((false, expr2)); - } + if let ExprKind::Unary(UnOp::Neg, expr1_negated) = &expr1.kind + && eq_expr_value(cx, expr1_negated, expr2) + { + return Some((false, expr2)); } - if let ExprKind::Unary(UnOp::Neg, expr2_negated) = &expr2.kind { - if eq_expr_value(cx, expr1, expr2_negated) { - return Some((true, expr1)); - } + if let ExprKind::Unary(UnOp::Neg, expr2_negated) = &expr2.kind + && eq_expr_value(cx, expr1, expr2_negated) + { + return Some((true, expr1)); } None } @@ -591,11 +589,11 @@ fn check_custom_abs(cx: &LateContext<'_>, expr: &Expr<'_>) { { let positive_abs_sugg = ( "manual implementation of `abs` method", - format!("{}.abs()", Sugg::hir(cx, body, "..").maybe_par()), + format!("{}.abs()", Sugg::hir(cx, body, "..").maybe_paren()), ); let negative_abs_sugg = ( "manual implementation of negation of `abs` method", - format!("-{}.abs()", Sugg::hir(cx, body, "..").maybe_par()), + format!("-{}.abs()", Sugg::hir(cx, body, "..").maybe_paren()), ); let sugg = if is_testing_positive(cx, cond, body) { if if_expr_positive { @@ -672,7 +670,7 @@ fn check_log_division(cx: &LateContext<'_>, expr: &Expr<'_>) { "consider using", format!( "{}.log({})", - Sugg::hir(cx, largs_self, "..").maybe_par(), + Sugg::hir(cx, largs_self, "..").maybe_paren(), Sugg::hir(cx, rargs_self, ".."), ), Applicability::MachineApplicable, @@ -703,7 +701,7 @@ fn check_radians(cx: &LateContext<'_>, expr: &Expr<'_>) { if (F32(f32_consts::PI) == rvalue || F64(f64_consts::PI) == rvalue) && (F32(180_f32) == lvalue || F64(180_f64) == lvalue) { - let mut proposal = format!("{}.to_degrees()", Sugg::hir(cx, mul_lhs, "..").maybe_par()); + let mut proposal = format!("{}.to_degrees()", Sugg::hir(cx, mul_lhs, "..").maybe_paren()); if let ExprKind::Lit(literal) = mul_lhs.kind && let ast::LitKind::Float(ref value, float_type) = literal.node && float_type == ast::LitFloatType::Unsuffixed @@ -726,7 +724,7 @@ fn check_radians(cx: &LateContext<'_>, expr: &Expr<'_>) { } else if (F32(180_f32) == rvalue || F64(180_f64) == rvalue) && (F32(f32_consts::PI) == lvalue || F64(f64_consts::PI) == lvalue) { - let mut proposal = format!("{}.to_radians()", Sugg::hir(cx, mul_lhs, "..").maybe_par()); + let mut proposal = format!("{}.to_radians()", Sugg::hir(cx, mul_lhs, "..").maybe_paren()); if let ExprKind::Lit(literal) = mul_lhs.kind && let ast::LitKind::Float(ref value, float_type) = literal.node && float_type == ast::LitFloatType::Unsuffixed diff --git a/src/tools/clippy/clippy_lints/src/format.rs b/src/tools/clippy/clippy_lints/src/format.rs index 5e3f6b6a13707..94e66769eb265 100644 --- a/src/tools/clippy/clippy_lints/src/format.rs +++ b/src/tools/clippy/clippy_lints/src/format.rs @@ -94,7 +94,7 @@ impl<'tcx> LateLintPass<'tcx> for UselessFormat { .into_owned() } else { let sugg = Sugg::hir_with_context(cx, value, call_site.ctxt(), "", &mut applicability); - format!("{}.to_string()", sugg.maybe_par()) + format!("{}.to_string()", sugg.maybe_paren()) }; span_useless_format(cx, call_site, sugg, applicability); } diff --git a/src/tools/clippy/clippy_lints/src/format_args.rs b/src/tools/clippy/clippy_lints/src/format_args.rs index 3862ff7921db8..8a3f8e1c5874d 100644 --- a/src/tools/clippy/clippy_lints/src/format_args.rs +++ b/src/tools/clippy/clippy_lints/src/format_args.rs @@ -141,7 +141,7 @@ declare_clippy_lint! { /// format!("{var:.prec$}"); /// ``` /// - /// If allow-mixed-uninlined-format-args is set to false in clippy.toml, + /// If `allow-mixed-uninlined-format-args` is set to `false` in clippy.toml, /// the following code will also trigger the lint: /// ```no_run /// # let var = 42; @@ -159,7 +159,7 @@ declare_clippy_lint! { /// nothing will be suggested, e.g. `println!("{0}={1}", var, 1+2)`. #[clippy::version = "1.66.0"] pub UNINLINED_FORMAT_ARGS, - pedantic, + style, "using non-inlined variables in `format!` calls" } @@ -550,7 +550,7 @@ impl<'tcx> FormatArgsExpr<'_, 'tcx> { // a `Target` that is in `self.ty_msrv_map`. if let Some(deref_trait_id) = self.cx.tcx.lang_items().deref_trait() && implements_trait(self.cx, ty, deref_trait_id, &[]) - && let Some(target_ty) = self.cx.get_associated_type(ty, deref_trait_id, "Target") + && let Some(target_ty) = self.cx.get_associated_type(ty, deref_trait_id, sym::Target) && let Some(msrv) = self.ty_msrv_map.get(&target_ty) && msrv.is_none_or(|msrv| self.msrv.meets(self.cx, msrv)) { diff --git a/src/tools/clippy/clippy_lints/src/format_impl.rs b/src/tools/clippy/clippy_lints/src/format_impl.rs index 5b42a40d850bb..0535ecf5240f9 100644 --- a/src/tools/clippy/clippy_lints/src/format_impl.rs +++ b/src/tools/clippy/clippy_lints/src/format_impl.rs @@ -1,13 +1,13 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg}; use clippy_utils::macros::{FormatArgsStorage, find_format_arg_expr, is_format_macro, root_macro_call_first_node}; -use clippy_utils::{get_parent_as_impl, is_diag_trait_item, path_to_local, peel_ref_operators}; +use clippy_utils::{get_parent_as_impl, is_diag_trait_item, path_to_local, peel_ref_operators, sym}; use rustc_ast::{FormatArgsPiece, FormatTrait}; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, Impl, ImplItem, ImplItemKind, QPath}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; +use rustc_span::Symbol; use rustc_span::symbol::kw; -use rustc_span::{Symbol, sym}; declare_clippy_lint! { /// ### What it does @@ -185,13 +185,13 @@ impl FormatImplExpr<'_, '_> { && let trait_name = match placeholder.format_trait { FormatTrait::Display => sym::Display, FormatTrait::Debug => sym::Debug, - FormatTrait::LowerExp => sym!(LowerExp), - FormatTrait::UpperExp => sym!(UpperExp), - FormatTrait::Octal => sym!(Octal), + FormatTrait::LowerExp => sym::LowerExp, + FormatTrait::UpperExp => sym::UpperExp, + FormatTrait::Octal => sym::Octal, FormatTrait::Pointer => sym::Pointer, - FormatTrait::Binary => sym!(Binary), - FormatTrait::LowerHex => sym!(LowerHex), - FormatTrait::UpperHex => sym!(UpperHex), + FormatTrait::Binary => sym::Binary, + FormatTrait::LowerHex => sym::LowerHex, + FormatTrait::UpperHex => sym::UpperHex, } && trait_name == self.format_trait_impl.name && let Ok(index) = placeholder.argument.index diff --git a/src/tools/clippy/clippy_lints/src/format_push_string.rs b/src/tools/clippy/clippy_lints/src/format_push_string.rs index 68cc50f39391f..b64d608c0c709 100644 --- a/src/tools/clippy/clippy_lints/src/format_push_string.rs +++ b/src/tools/clippy/clippy_lints/src/format_push_string.rs @@ -1,7 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::higher; use clippy_utils::ty::is_type_lang_item; -use rustc_hir::{BinOpKind, Expr, ExprKind, LangItem, MatchSource}; +use rustc_hir::{AssignOpKind, Expr, ExprKind, LangItem, MatchSource}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; use rustc_span::sym; @@ -77,7 +77,7 @@ impl<'tcx> LateLintPass<'tcx> for FormatPushString { return; } }, - ExprKind::AssignOp(op, left, arg) if op.node == BinOpKind::Add && is_string(cx, left) => arg, + ExprKind::AssignOp(op, left, arg) if op.node == AssignOpKind::AddAssign && is_string(cx, left) => arg, _ => return, }; if is_format(cx, arg) { diff --git a/src/tools/clippy/clippy_lints/src/formatting.rs b/src/tools/clippy/clippy_lints/src/formatting.rs index c8fe7ac73cb34..4b482f7b233b8 100644 --- a/src/tools/clippy/clippy_lints/src/formatting.rs +++ b/src/tools/clippy/clippy_lints/src/formatting.rs @@ -138,27 +138,28 @@ impl EarlyLintPass for Formatting { /// Implementation of the `SUSPICIOUS_ASSIGNMENT_FORMATTING` lint. fn check_assign(cx: &EarlyContext<'_>, expr: &Expr) { - if let ExprKind::Assign(ref lhs, ref rhs, _) = expr.kind { - if !lhs.span.from_expansion() && !rhs.span.from_expansion() { - let eq_span = lhs.span.between(rhs.span); - if let ExprKind::Unary(op, ref sub_rhs) = rhs.kind { - if let Some(eq_snippet) = snippet_opt(cx, eq_span) { - let op = op.as_str(); - let eqop_span = lhs.span.between(sub_rhs.span); - if eq_snippet.ends_with('=') { - span_lint_and_note( - cx, - SUSPICIOUS_ASSIGNMENT_FORMATTING, - eqop_span, - format!( - "this looks like you are trying to use `.. {op}= ..`, but you \ + if let ExprKind::Assign(ref lhs, ref rhs, _) = expr.kind + && !lhs.span.from_expansion() + && !rhs.span.from_expansion() + { + let eq_span = lhs.span.between(rhs.span); + if let ExprKind::Unary(op, ref sub_rhs) = rhs.kind + && let Some(eq_snippet) = snippet_opt(cx, eq_span) + { + let op = op.as_str(); + let eqop_span = lhs.span.between(sub_rhs.span); + if eq_snippet.ends_with('=') { + span_lint_and_note( + cx, + SUSPICIOUS_ASSIGNMENT_FORMATTING, + eqop_span, + format!( + "this looks like you are trying to use `.. {op}= ..`, but you \ really are doing `.. = ({op} ..)`" - ), - None, - format!("to remove this lint, use either `{op}=` or `= {op}`"), - ); - } - } + ), + None, + format!("to remove this lint, use either `{op}=` or `= {op}`"), + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs b/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs index c8828c9361576..5e2e2c9dbf725 100644 --- a/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs +++ b/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs @@ -1,12 +1,11 @@ use clippy_utils::diagnostics::span_lint_and_help; -use clippy_utils::path_def_id; use clippy_utils::ty::is_c_void; +use clippy_utils::{path_def_id, sym}; use rustc_hir::def_id::DefId; use rustc_hir::{Expr, ExprKind, QPath}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty; use rustc_session::declare_lint_pass; -use rustc_span::sym; declare_clippy_lint! { /// ### What it does @@ -41,7 +40,7 @@ impl LateLintPass<'_> for FromRawWithVoidPtr { fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { if let ExprKind::Call(box_from_raw, [arg]) = expr.kind && let ExprKind::Path(QPath::TypeRelative(ty, seg)) = box_from_raw.kind - && seg.ident.name.as_str() == "from_raw" + && seg.ident.name == sym::from_raw && let Some(type_str) = path_def_id(cx, ty).and_then(|id| def_id_matches_type(cx, id)) && let arg_kind = cx.typeck_results().expr_ty(arg).kind() && let ty::RawPtr(ty, _) = arg_kind diff --git a/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs b/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs index 7361546153c27..b816963cc825b 100644 --- a/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs +++ b/src/tools/clippy/clippy_lints/src/from_str_radix_10.rs @@ -1,13 +1,12 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::sugg::Sugg; use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item}; -use clippy_utils::{is_in_const_context, is_integer_literal}; +use clippy_utils::{is_in_const_context, is_integer_literal, sym}; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, LangItem, PrimTy, QPath, TyKind, def}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::Ty; use rustc_session::declare_lint_pass; -use rustc_span::symbol::sym; declare_clippy_lint! { /// ### What it does @@ -53,7 +52,7 @@ impl<'tcx> LateLintPass<'tcx> for FromStrRadix10 { // check if the second part of the path indeed calls the associated // function `from_str_radix` - && pathseg.ident.name.as_str() == "from_str_radix" + && pathseg.ident.name == sym::from_str_radix // check if the first part of the path is some integer primitive && let TyKind::Path(ty_qpath) = &ty.kind @@ -73,7 +72,7 @@ impl<'tcx> LateLintPass<'tcx> for FromStrRadix10 { }; let sugg = - Sugg::hir_with_applicability(cx, expr, "", &mut Applicability::MachineApplicable).maybe_par(); + Sugg::hir_with_applicability(cx, expr, "", &mut Applicability::MachineApplicable).maybe_paren(); span_lint_and_sugg( cx, diff --git a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs index 854fe144c2919..fa63876410f01 100644 --- a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs +++ b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs @@ -2,7 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::snippet; use rustc_errors::Applicability; use rustc_hir::intravisit::FnKind; -use rustc_hir::{Body, ExprKind, FnDecl, ImplicitSelfKind}; +use rustc_hir::{BlockCheckMode, Body, ExprKind, FnDecl, ImplicitSelfKind, UnsafeSource}; use rustc_lint::LateContext; use rustc_middle::ty; use rustc_span::Span; @@ -40,14 +40,25 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body: name }; - // Body must be &(mut) .name + // Body must be `&(mut) .name`, potentially in an `unsafe` block // self_data is not necessarily self, to also lint sub-getters, etc… let block_expr = if let ExprKind::Block(block, _) = body.value.kind && block.stmts.is_empty() && let Some(block_expr) = block.expr { - block_expr + if let ExprKind::Block(unsafe_block, _) = block_expr.kind + && unsafe_block.stmts.is_empty() + && matches!( + unsafe_block.rules, + BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided) + ) + && let Some(unsafe_block_expr) = unsafe_block.expr + { + unsafe_block_expr + } else { + block_expr + } } else { return; }; diff --git a/src/tools/clippy/clippy_lints/src/functions/renamed_function_params.rs b/src/tools/clippy/clippy_lints/src/functions/renamed_function_params.rs index 041f6228fba2d..2d22bb157a93c 100644 --- a/src/tools/clippy/clippy_lints/src/functions/renamed_function_params.rs +++ b/src/tools/clippy/clippy_lints/src/functions/renamed_function_params.rs @@ -22,8 +22,8 @@ pub(super) fn check_impl_item(cx: &LateContext<'_>, item: &ImplItem<'_>, ignored && let Some(did) = trait_item_def_id_of_impl(items, item.owner_id) && !is_from_ignored_trait(trait_ref, ignored_traits) { - let mut param_idents_iter = cx.tcx.hir_body_param_names(body_id); - let mut default_param_idents_iter = cx.tcx.fn_arg_names(did).iter().copied(); + let mut param_idents_iter = cx.tcx.hir_body_param_idents(body_id); + let mut default_param_idents_iter = cx.tcx.fn_arg_idents(did).iter().copied(); let renames = RenamedFnArgs::new(&mut default_param_idents_iter, &mut param_idents_iter); if !renames.0.is_empty() { @@ -59,9 +59,7 @@ impl RenamedFnArgs { let mut renamed: Vec<(Span, String)> = vec![]; debug_assert!(default_idents.size_hint() == current_idents.size_hint()); - while let (Some(default_ident), Some(current_ident)) = - (default_idents.next(), current_idents.next()) - { + while let (Some(default_ident), Some(current_ident)) = (default_idents.next(), current_idents.next()) { let has_name_to_check = |ident: Option| { if let Some(ident) = ident && ident.name != kw::Underscore diff --git a/src/tools/clippy/clippy_lints/src/functions/too_many_arguments.rs b/src/tools/clippy/clippy_lints/src/functions/too_many_arguments.rs index 05dc47f6fe580..48d050aa36aa0 100644 --- a/src/tools/clippy/clippy_lints/src/functions/too_many_arguments.rs +++ b/src/tools/clippy/clippy_lints/src/functions/too_many_arguments.rs @@ -47,16 +47,16 @@ pub(super) fn check_fn( } pub(super) fn check_trait_item(cx: &LateContext<'_>, item: &hir::TraitItem<'_>, too_many_arguments_threshold: u64) { - if let hir::TraitItemKind::Fn(ref sig, _) = item.kind { + if let hir::TraitItemKind::Fn(ref sig, _) = item.kind // don't lint extern functions decls, it's not their fault - if sig.header.abi == ExternAbi::Rust { - check_arg_number( - cx, - sig.decl, - item.span.with_hi(sig.decl.output.span().hi()), - too_many_arguments_threshold, - ); - } + && sig.header.abi == ExternAbi::Rust + { + check_arg_number( + cx, + sig.decl, + item.span.with_hi(sig.decl.output.span().hi()), + too_many_arguments_threshold, + ); } } diff --git a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs index fbbd33efd02d6..9e94280fc0746 100644 --- a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs +++ b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs @@ -94,7 +94,7 @@ impl<'tcx> LateLintPass<'tcx> for IfThenSomeElseNone { |diag| { let mut app = Applicability::MachineApplicable; let cond_snip = Sugg::hir_with_context(cx, cond, expr.span.ctxt(), "[condition]", &mut app) - .maybe_par() + .maybe_paren() .to_string(); let arg_snip = snippet_with_context(cx, then_arg.span, ctxt, "[body]", &mut app).0; let method_body = if let Some(first_stmt) = then_block.stmts.first() { diff --git a/src/tools/clippy/clippy_lints/src/implicit_hasher.rs b/src/tools/clippy/clippy_lints/src/implicit_hasher.rs index d2545e57652a8..4c17834c3adf9 100644 --- a/src/tools/clippy/clippy_lints/src/implicit_hasher.rs +++ b/src/tools/clippy/clippy_lints/src/implicit_hasher.rs @@ -10,10 +10,10 @@ use rustc_middle::hir::nested_filter; use rustc_middle::ty::{Ty, TypeckResults}; use rustc_session::declare_lint_pass; use rustc_span::Span; -use rustc_span::symbol::sym; use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::{IntoSpan, SpanRangeExt, snippet}; +use clippy_utils::sym; use clippy_utils::ty::is_type_diagnostic_item; declare_clippy_lint! { @@ -326,6 +326,7 @@ impl<'tcx> Visitor<'tcx> for ImplicitHasherConstructorVisitor<'_, '_, 'tcx> { fn visit_expr(&mut self, e: &'tcx Expr<'_>) { if let ExprKind::Call(fun, args) = e.kind && let ExprKind::Path(QPath::TypeRelative(ty, method)) = fun.kind + && matches!(method.ident.name, sym::new | sym::with_capacity) && let TyKind::Path(QPath::Resolved(None, ty_path)) = ty.kind && let Some(ty_did) = ty_path.res.opt_def_id() { @@ -333,10 +334,11 @@ impl<'tcx> Visitor<'tcx> for ImplicitHasherConstructorVisitor<'_, '_, 'tcx> { return; } - if self.cx.tcx.is_diagnostic_item(sym::HashMap, ty_did) { - if method.ident.name == sym::new { + match (self.cx.tcx.get_diagnostic_name(ty_did), method.ident.name) { + (Some(sym::HashMap), sym::new) => { self.suggestions.insert(e.span, "HashMap::default()".to_string()); - } else if method.ident.name.as_str() == "with_capacity" { + }, + (Some(sym::HashMap), sym::with_capacity) => { self.suggestions.insert( e.span, format!( @@ -344,11 +346,11 @@ impl<'tcx> Visitor<'tcx> for ImplicitHasherConstructorVisitor<'_, '_, 'tcx> { snippet(self.cx, args[0].span, "capacity"), ), ); - } - } else if self.cx.tcx.is_diagnostic_item(sym::HashSet, ty_did) { - if method.ident.name == sym::new { + }, + (Some(sym::HashSet), sym::new) => { self.suggestions.insert(e.span, "HashSet::default()".to_string()); - } else if method.ident.name.as_str() == "with_capacity" { + }, + (Some(sym::HashSet), sym::with_capacity) => { self.suggestions.insert( e.span, format!( @@ -356,7 +358,8 @@ impl<'tcx> Visitor<'tcx> for ImplicitHasherConstructorVisitor<'_, '_, 'tcx> { snippet(self.cx, args[0].span, "capacity"), ), ); - } + }, + _ => {}, } } diff --git a/src/tools/clippy/clippy_lints/src/implicit_return.rs b/src/tools/clippy/clippy_lints/src/implicit_return.rs index 5f95464e4d494..076017a247b4b 100644 --- a/src/tools/clippy/clippy_lints/src/implicit_return.rs +++ b/src/tools/clippy/clippy_lints/src/implicit_return.rs @@ -1,7 +1,7 @@ use clippy_utils::diagnostics::span_lint_hir_and_then; use clippy_utils::source::{snippet_with_applicability, snippet_with_context, walk_span_to_context}; use clippy_utils::visitors::for_each_expr_without_closures; -use clippy_utils::{get_async_fn_body, is_async_fn, is_from_proc_macro}; +use clippy_utils::{desugar_await, get_async_closure_expr, get_async_fn_body, is_async_fn, is_from_proc_macro}; use core::ops::ControlFlow; use rustc_errors::Applicability; use rustc_hir::intravisit::FnKind; @@ -134,6 +134,10 @@ fn lint_implicit_returns( }, ExprKind::Match(_, arms, _) => { + if let Some(await_expr) = desugar_await(expr) { + lint_return(cx, await_expr.hir_id, await_expr.span); + return LintLocation::Inner; + } for arm in arms { let res = lint_implicit_returns( cx, @@ -153,18 +157,18 @@ fn lint_implicit_returns( ExprKind::Loop(block, ..) => { let mut add_return = false; let _: Option = for_each_expr_without_closures(block, |e| { - if let ExprKind::Break(dest, sub_expr) = e.kind { - if dest.target_id.ok() == Some(expr.hir_id) { - if call_site_span.is_none() && e.span.ctxt() == ctxt { - // At this point sub_expr can be `None` in async functions which either diverge, or return - // the unit type. - if let Some(sub_expr) = sub_expr { - lint_break(cx, e.hir_id, e.span, sub_expr.span); - } - } else { - // the break expression is from a macro call, add a return to the loop - add_return = true; + if let ExprKind::Break(dest, sub_expr) = e.kind + && dest.target_id.ok() == Some(expr.hir_id) + { + if call_site_span.is_none() && e.span.ctxt() == ctxt { + // At this point sub_expr can be `None` in async functions which either diverge, or return + // the unit type. + if let Some(sub_expr) = sub_expr { + lint_break(cx, e.hir_id, e.span, sub_expr.span); } + } else { + // the break expression is from a macro call, add a return to the loop + add_return = true; } } ControlFlow::Continue(()) @@ -241,6 +245,8 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitReturn { Some(e) => e, None => return, } + } else if let Some(expr) = get_async_closure_expr(cx.tcx, body.value) { + expr } else { body.value }; diff --git a/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs b/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs index 41d2b18803d95..185fc2aa2d4ac 100644 --- a/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs +++ b/src/tools/clippy/clippy_lints/src/implicit_saturating_add.rs @@ -5,7 +5,7 @@ use clippy_utils::source::snippet_with_context; use rustc_ast::ast::{LitIntType, LitKind}; use rustc_data_structures::packed::Pu128; use rustc_errors::Applicability; -use rustc_hir::{BinOpKind, Block, Expr, ExprKind, Stmt, StmtKind}; +use rustc_hir::{AssignOpKind, BinOpKind, Block, Expr, ExprKind, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{IntTy, Ty, UintTy}; use rustc_session::declare_lint_pass; @@ -68,7 +68,7 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitSaturatingAdd { && ex.span.ctxt() == ctxt && expr1.span.ctxt() == ctxt && clippy_utils::SpanlessEq::new(cx).eq_expr(l, target) - && BinOpKind::Add == op1.node + && AssignOpKind::AddAssign == op1.node && let ExprKind::Lit(lit) = value.kind && let LitKind::Int(Pu128(1), LitIntType::Unsuffixed) = lit.node && block.expr.is_none() diff --git a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs index cbc3e2ccd5b87..514e72a48682d 100644 --- a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs +++ b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs @@ -8,7 +8,7 @@ use clippy_utils::{ use rustc_ast::ast::LitKind; use rustc_data_structures::packed::Pu128; use rustc_errors::Applicability; -use rustc_hir::{BinOp, BinOpKind, Expr, ExprKind, QPath}; +use rustc_hir::{AssignOpKind, BinOp, BinOpKind, Expr, ExprKind, QPath}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; use rustc_span::Span; @@ -239,7 +239,7 @@ fn check_subtraction( // This part of the condition is voluntarily split from the one before to ensure that // if `snippet_opt` fails, it won't try the next conditions. if (!is_in_const_context(cx) || msrv.meets(cx, msrvs::SATURATING_SUB_CONST)) - && let Some(big_expr_sugg) = Sugg::hir_opt(cx, big_expr).map(Sugg::maybe_par) + && let Some(big_expr_sugg) = Sugg::hir_opt(cx, big_expr).map(Sugg::maybe_paren) && let Some(little_expr_sugg) = Sugg::hir_opt(cx, little_expr) { let sugg = format!( @@ -366,7 +366,7 @@ fn subtracts_one<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<&'a Exp match peel_blocks_with_stmt(expr).kind { ExprKind::AssignOp(ref op1, target, value) => { // Check if literal being subtracted is one - (BinOpKind::Sub == op1.node && is_integer_literal(value, 1)).then_some(target) + (AssignOpKind::SubAssign == op1.node && is_integer_literal(value, 1)).then_some(target) }, ExprKind::Assign(target, value, _) => { if let ExprKind::Binary(ref op1, left1, right1) = value.kind diff --git a/src/tools/clippy/clippy_lints/src/implied_bounds_in_impls.rs b/src/tools/clippy/clippy_lints/src/implied_bounds_in_impls.rs index d02d9b2102bda..6b89abdb0367f 100644 --- a/src/tools/clippy/clippy_lints/src/implied_bounds_in_impls.rs +++ b/src/tools/clippy/clippy_lints/src/implied_bounds_in_impls.rs @@ -8,7 +8,7 @@ use rustc_hir::{ }; use rustc_hir_analysis::lower_ty; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::{self, ClauseKind, Generics, Ty, TyCtxt}; +use rustc_middle::ty::{self, AssocItem, ClauseKind, Generics, Ty, TyCtxt}; use rustc_session::declare_lint_pass; use rustc_span::Span; @@ -315,7 +315,7 @@ fn check<'tcx>(cx: &LateContext<'tcx>, bounds: GenericBounds<'tcx>) { assocs .filter_by_name_unhygienic(constraint.ident.name) .next() - .is_some_and(|assoc| assoc.kind == ty::AssocKind::Type) + .is_some_and(AssocItem::is_type) }) { emit_lint(cx, poly_trait, bounds, index, implied_constraints, bound); diff --git a/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs b/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs index e1dd7872b9d48..e6129757e560f 100644 --- a/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs +++ b/src/tools/clippy/clippy_lints/src/inconsistent_struct_constructor.rs @@ -65,13 +65,13 @@ declare_clippy_lint! { } pub struct InconsistentStructConstructor { - lint_inconsistent_struct_field_initializers: bool, + check_inconsistent_struct_field_initializers: bool, } impl InconsistentStructConstructor { pub fn new(conf: &'static Conf) -> Self { Self { - lint_inconsistent_struct_field_initializers: conf.lint_inconsistent_struct_field_initializers, + check_inconsistent_struct_field_initializers: conf.check_inconsistent_struct_field_initializers, } } } @@ -86,7 +86,7 @@ impl<'tcx> LateLintPass<'tcx> for InconsistentStructConstructor { let all_fields_are_shorthand = fields.iter().all(|f| f.is_shorthand); let applicability = if all_fields_are_shorthand { Applicability::MachineApplicable - } else if self.lint_inconsistent_struct_field_initializers { + } else if self.check_inconsistent_struct_field_initializers { Applicability::MaybeIncorrect } else { return; diff --git a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs index d53e139de014b..989997d69f7c7 100644 --- a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs +++ b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs @@ -248,7 +248,7 @@ impl<'tcx> Visitor<'tcx> for SliceIndexLintingVisitor<'_, 'tcx> { { use_info .index_use - .push((index_value, cx.tcx.hir().span(parent_expr.hir_id))); + .push((index_value, cx.tcx.hir_span(parent_expr.hir_id))); return; } diff --git a/src/tools/clippy/clippy_lints/src/indexing_slicing.rs b/src/tools/clippy/clippy_lints/src/indexing_slicing.rs index 33431385c7de3..99a393b4d53af 100644 --- a/src/tools/clippy/clippy_lints/src/indexing_slicing.rs +++ b/src/tools/clippy/clippy_lints/src/indexing_slicing.rs @@ -2,13 +2,12 @@ use clippy_config::Conf; use clippy_utils::consts::{ConstEvalCtxt, Constant}; use clippy_utils::diagnostics::{span_lint, span_lint_and_then}; use clippy_utils::ty::{deref_chain, get_adt_inherent_method}; -use clippy_utils::{higher, is_from_proc_macro, is_in_test}; +use clippy_utils::{higher, is_from_proc_macro, is_in_test, sym}; use rustc_ast::ast::RangeLimits; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, Ty}; use rustc_session::impl_lint_pass; -use rustc_span::sym; declare_clippy_lint! { /// ### What it does @@ -136,28 +135,28 @@ impl<'tcx> LateLintPass<'tcx> for IndexingSlicing { let const_range = to_const_range(cx, range, size); - if let (Some(start), _) = const_range { - if start > size { - span_lint( - cx, - OUT_OF_BOUNDS_INDEXING, - range.start.map_or(expr.span, |start| start.span), - "range is out of bounds", - ); - return; - } + if let (Some(start), _) = const_range + && start > size + { + span_lint( + cx, + OUT_OF_BOUNDS_INDEXING, + range.start.map_or(expr.span, |start| start.span), + "range is out of bounds", + ); + return; } - if let (_, Some(end)) = const_range { - if end > size { - span_lint( - cx, - OUT_OF_BOUNDS_INDEXING, - range.end.map_or(expr.span, |end| end.span), - "range is out of bounds", - ); - return; - } + if let (_, Some(end)) = const_range + && end > size + { + span_lint( + cx, + OUT_OF_BOUNDS_INDEXING, + range.end.map_or(expr.span, |end| end.span), + "range is out of bounds", + ); + return; } if let (Some(_), Some(_)) = const_range { @@ -268,7 +267,7 @@ fn ty_has_applicable_get_function<'tcx>( index_expr: &Expr<'_>, ) -> bool { if let ty::Adt(_, _) = array_ty.kind() - && let Some(get_output_ty) = get_adt_inherent_method(cx, ty, sym!(get)).map(|m| { + && let Some(get_output_ty) = get_adt_inherent_method(cx, ty, sym::get).map(|m| { cx.tcx .fn_sig(m.def_id) .skip_binder() diff --git a/src/tools/clippy/clippy_lints/src/infinite_iter.rs b/src/tools/clippy/clippy_lints/src/infinite_iter.rs index 960b9aa032bea..c4e10837bf192 100644 --- a/src/tools/clippy/clippy_lints/src/infinite_iter.rs +++ b/src/tools/clippy/clippy_lints/src/infinite_iter.rs @@ -1,10 +1,9 @@ use clippy_utils::diagnostics::span_lint; -use clippy_utils::higher; use clippy_utils::ty::{get_type_diagnostic_name, implements_trait}; +use clippy_utils::{higher, sym}; use rustc_hir::{BorrowKind, Closure, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; -use rustc_span::symbol::sym; declare_clippy_lint! { /// ### What it does @@ -156,11 +155,12 @@ fn is_infinite(cx: &LateContext<'_>, expr: &Expr<'_>) -> Finiteness { .and(cap); } } - if method.ident.name.as_str() == "flat_map" && args.len() == 1 { - if let ExprKind::Closure(&Closure { body, .. }) = args[0].kind { - let body = cx.tcx.hir_body(body); - return is_infinite(cx, body.value); - } + if method.ident.name == sym::flat_map + && args.len() == 1 + && let ExprKind::Closure(&Closure { body, .. }) = args[0].kind + { + let body = cx.tcx.hir_body(body); + return is_infinite(cx, body.value); } Finite }, @@ -223,7 +223,7 @@ fn complete_infinite_iter(cx: &LateContext<'_>, expr: &Expr<'_>) -> Finiteness { return MaybeInfinite.and(is_infinite(cx, receiver)); } } - if method.ident.name.as_str() == "last" && args.is_empty() { + if method.ident.name == sym::last && args.is_empty() { let not_double_ended = cx .tcx .get_diagnostic_item(sym::DoubleEndedIterator) @@ -231,7 +231,7 @@ fn complete_infinite_iter(cx: &LateContext<'_>, expr: &Expr<'_>) -> Finiteness { if not_double_ended { return is_infinite(cx, receiver); } - } else if method.ident.name.as_str() == "collect" { + } else if method.ident.name == sym::collect { let ty = cx.typeck_results().expr_ty(expr); if matches!( get_type_diagnostic_name(cx, ty), diff --git a/src/tools/clippy/clippy_lints/src/instant_subtraction.rs b/src/tools/clippy/clippy_lints/src/instant_subtraction.rs index 4ae1119ab3a27..91f65d0b79ca0 100644 --- a/src/tools/clippy/clippy_lints/src/instant_subtraction.rs +++ b/src/tools/clippy/clippy_lints/src/instant_subtraction.rs @@ -123,7 +123,7 @@ fn print_manual_instant_elapsed_sugg(cx: &LateContext<'_>, expr: &Expr<'_>, sugg expr.span, "manual implementation of `Instant::elapsed`", "try", - format!("{}.elapsed()", sugg.maybe_par()), + format!("{}.elapsed()", sugg.maybe_paren()), Applicability::MachineApplicable, ); } diff --git a/src/tools/clippy/clippy_lints/src/int_plus_one.rs b/src/tools/clippy/clippy_lints/src/int_plus_one.rs index fc575bff7e63f..67ce57de254d3 100644 --- a/src/tools/clippy/clippy_lints/src/int_plus_one.rs +++ b/src/tools/clippy/clippy_lints/src/int_plus_one.rs @@ -130,14 +130,14 @@ impl IntPlusOne { BinOpKind::Le => "<", _ => return None, }; - if let Some(snippet) = node.span.get_source_text(cx) { - if let Some(other_side_snippet) = other_side.span.get_source_text(cx) { - let rec = match side { - Side::Lhs => Some(format!("{snippet} {binop_string} {other_side_snippet}")), - Side::Rhs => Some(format!("{other_side_snippet} {binop_string} {snippet}")), - }; - return rec; - } + if let Some(snippet) = node.span.get_source_text(cx) + && let Some(other_side_snippet) = other_side.span.get_source_text(cx) + { + let rec = match side { + Side::Lhs => Some(format!("{snippet} {binop_string} {other_side_snippet}")), + Side::Rhs => Some(format!("{other_side_snippet} {binop_string} {snippet}")), + }; + return rec; } None } @@ -157,10 +157,10 @@ impl IntPlusOne { impl EarlyLintPass for IntPlusOne { fn check_expr(&mut self, cx: &EarlyContext<'_>, item: &Expr) { - if let ExprKind::Binary(ref kind, ref lhs, ref rhs) = item.kind { - if let Some(rec) = Self::check_binop(cx, kind.node, lhs, rhs) { - Self::emit_warning(cx, item, rec); - } + if let ExprKind::Binary(ref kind, ref lhs, ref rhs) = item.kind + && let Some(rec) = Self::check_binop(cx, kind.node, lhs, rhs) + { + Self::emit_warning(cx, item, rec); } } } diff --git a/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs b/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs index b42664340d1c8..b0ecc5d52ddb8 100644 --- a/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs +++ b/src/tools/clippy/clippy_lints/src/invalid_upcast_comparisons.rs @@ -91,49 +91,49 @@ fn upcast_comparison_bounds_err<'tcx>( rhs: &'tcx Expr<'_>, invert: bool, ) { - if let Some((lb, ub)) = lhs_bounds { - if let Some(norm_rhs_val) = ConstEvalCtxt::new(cx).eval_full_int(rhs) { - if rel == Rel::Eq || rel == Rel::Ne { - if norm_rhs_val < lb || norm_rhs_val > ub { - err_upcast_comparison(cx, span, lhs, rel == Rel::Ne); - } - } else if match rel { - Rel::Lt => { - if invert { - norm_rhs_val < lb - } else { - ub < norm_rhs_val - } - }, - Rel::Le => { - if invert { - norm_rhs_val <= lb - } else { - ub <= norm_rhs_val - } - }, - Rel::Eq | Rel::Ne => unreachable!(), - } { - err_upcast_comparison(cx, span, lhs, true); - } else if match rel { - Rel::Lt => { - if invert { - norm_rhs_val >= ub - } else { - lb >= norm_rhs_val - } - }, - Rel::Le => { - if invert { - norm_rhs_val > ub - } else { - lb > norm_rhs_val - } - }, - Rel::Eq | Rel::Ne => unreachable!(), - } { - err_upcast_comparison(cx, span, lhs, false); + if let Some((lb, ub)) = lhs_bounds + && let Some(norm_rhs_val) = ConstEvalCtxt::new(cx).eval_full_int(rhs) + { + if rel == Rel::Eq || rel == Rel::Ne { + if norm_rhs_val < lb || norm_rhs_val > ub { + err_upcast_comparison(cx, span, lhs, rel == Rel::Ne); } + } else if match rel { + Rel::Lt => { + if invert { + norm_rhs_val < lb + } else { + ub < norm_rhs_val + } + }, + Rel::Le => { + if invert { + norm_rhs_val <= lb + } else { + ub <= norm_rhs_val + } + }, + Rel::Eq | Rel::Ne => unreachable!(), + } { + err_upcast_comparison(cx, span, lhs, true); + } else if match rel { + Rel::Lt => { + if invert { + norm_rhs_val >= ub + } else { + lb >= norm_rhs_val + } + }, + Rel::Le => { + if invert { + norm_rhs_val > ub + } else { + lb > norm_rhs_val + } + }, + Rel::Eq | Rel::Ne => unreachable!(), + } { + err_upcast_comparison(cx, span, lhs, false); } } } diff --git a/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs b/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs index 977fd5fce15be..b1271a264b548 100644 --- a/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs +++ b/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs @@ -377,22 +377,21 @@ impl ItemNameRepetitions { "field name starts with the struct's name", ); } - if field_words.len() > item_name_words.len() { + if field_words.len() > item_name_words.len() // lint only if the end is not covered by the start - if field_words + && field_words .iter() .rev() .zip(item_name_words.iter().rev()) .all(|(a, b)| a == b) - { - span_lint_hir( - cx, - STRUCT_FIELD_NAMES, - field.hir_id, - field.span, - "field name ends with the struct's name", - ); - } + { + span_lint_hir( + cx, + STRUCT_FIELD_NAMES, + field.hir_id, + field.span, + "field name ends with the struct's name", + ); } } } @@ -445,57 +444,56 @@ impl LateLintPass<'_> for ItemNameRepetitions { let item_name = ident.name.as_str(); let item_camel = to_camel_case(item_name); - if !item.span.from_expansion() && is_present_in_source(cx, item.span) { - if let [.., (mod_name, mod_camel, mod_owner_id)] = &*self.modules { - // constants don't have surrounding modules - if !mod_camel.is_empty() { - if mod_name == &ident.name - && let ItemKind::Mod(..) = item.kind - && (!self.allow_private_module_inception || cx.tcx.visibility(mod_owner_id.def_id).is_public()) - { - span_lint( - cx, - MODULE_INCEPTION, - item.span, - "module has the same name as its containing module", - ); - } + if !item.span.from_expansion() && is_present_in_source(cx, item.span) + && let [.., (mod_name, mod_camel, mod_owner_id)] = &*self.modules + // constants don't have surrounding modules + && !mod_camel.is_empty() + { + if mod_name == &ident.name + && let ItemKind::Mod(..) = item.kind + && (!self.allow_private_module_inception || cx.tcx.visibility(mod_owner_id.def_id).is_public()) + { + span_lint( + cx, + MODULE_INCEPTION, + item.span, + "module has the same name as its containing module", + ); + } - // The `module_name_repetitions` lint should only trigger if the item has the module in its - // name. Having the same name is accepted. - if cx.tcx.visibility(item.owner_id).is_public() - && cx.tcx.visibility(mod_owner_id.def_id).is_public() - && item_camel.len() > mod_camel.len() - { - let matching = count_match_start(mod_camel, &item_camel); - let rmatching = count_match_end(mod_camel, &item_camel); - let nchars = mod_camel.chars().count(); - - let is_word_beginning = |c: char| c == '_' || c.is_uppercase() || c.is_numeric(); - - if matching.char_count == nchars { - match item_camel.chars().nth(nchars) { - Some(c) if is_word_beginning(c) => span_lint( - cx, - MODULE_NAME_REPETITIONS, - ident.span, - "item name starts with its containing module's name", - ), - _ => (), - } - } - if rmatching.char_count == nchars - && !self.is_allowed_prefix(&item_camel[..item_camel.len() - rmatching.byte_count]) - { - span_lint( - cx, - MODULE_NAME_REPETITIONS, - ident.span, - "item name ends with its containing module's name", - ); - } + // The `module_name_repetitions` lint should only trigger if the item has the module in its + // name. Having the same name is accepted. + if cx.tcx.visibility(item.owner_id).is_public() + && cx.tcx.visibility(mod_owner_id.def_id).is_public() + && item_camel.len() > mod_camel.len() + { + let matching = count_match_start(mod_camel, &item_camel); + let rmatching = count_match_end(mod_camel, &item_camel); + let nchars = mod_camel.chars().count(); + + let is_word_beginning = |c: char| c == '_' || c.is_uppercase() || c.is_numeric(); + + if matching.char_count == nchars { + match item_camel.chars().nth(nchars) { + Some(c) if is_word_beginning(c) => span_lint( + cx, + MODULE_NAME_REPETITIONS, + ident.span, + "item name starts with its containing module's name", + ), + _ => (), } } + if rmatching.char_count == nchars + && !self.is_allowed_prefix(&item_camel[..item_camel.len() - rmatching.byte_count]) + { + span_lint( + cx, + MODULE_NAME_REPETITIONS, + ident.span, + "item name ends with its containing module's name", + ); + } } } diff --git a/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs b/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs index 173232c511a57..900b20aa9cfb7 100644 --- a/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs +++ b/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs @@ -1,14 +1,13 @@ use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::get_parent_as_impl; use clippy_utils::source::snippet; use clippy_utils::ty::{deref_chain, get_adt_inherent_method, implements_trait, make_normalized_projection}; +use clippy_utils::{get_parent_as_impl, sym}; use rustc_ast::Mutability; use rustc_errors::Applicability; use rustc_hir::{FnRetTy, ImplItemKind, ImplicitSelfKind, ItemKind, TyKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::ty::{self, Ty}; use rustc_session::declare_lint_pass; -use rustc_span::sym; declare_clippy_lint! { /// ### What it does @@ -141,7 +140,7 @@ impl LateLintPass<'_> for IterWithoutIntoIter { ty.peel_refs().is_slice() || get_adt_inherent_method(cx, ty, expected_method_name).is_some() }) && let Some(iter_assoc_span) = imp.items.iter().find_map(|item| { - if item.ident.name.as_str() == "IntoIter" { + if item.ident.name == sym::IntoIter { Some(cx.tcx.hir_impl_item(item.id).expect_type().span) } else { None diff --git a/src/tools/clippy/clippy_lints/src/len_zero.rs b/src/tools/clippy/clippy_lints/src/len_zero.rs index 72e22ae59d8f4..aded31971cec0 100644 --- a/src/tools/clippy/clippy_lints/src/len_zero.rs +++ b/src/tools/clippy/clippy_lints/src/len_zero.rs @@ -2,21 +2,22 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_the use clippy_utils::source::{SpanRangeExt, snippet_with_context}; use clippy_utils::sugg::{Sugg, has_enclosing_paren}; use clippy_utils::ty::implements_trait; -use clippy_utils::{get_item_name, get_parent_as_impl, is_lint_allowed, is_trait_method, peel_ref_operators}; +use clippy_utils::{ + fulfill_or_allowed, get_parent_as_impl, is_trait_method, parent_item_name, peel_ref_operators, sym, +}; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; use rustc_hir::def::Res; use rustc_hir::def_id::{DefId, DefIdSet}; use rustc_hir::{ - AssocItemKind, BinOpKind, Expr, ExprKind, FnRetTy, GenericArg, GenericBound, ImplItem, ImplItemKind, + AssocItemKind, BinOpKind, Expr, ExprKind, FnRetTy, GenericArg, GenericBound, HirId, ImplItem, ImplItemKind, ImplicitSelfKind, Item, ItemKind, Mutability, Node, OpaqueTyOrigin, PatExprKind, PatKind, PathSegment, PrimTy, QPath, TraitItemRef, TyKind, }; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::{self, AssocKind, FnSig, Ty}; +use rustc_middle::ty::{self, FnSig, Ty}; use rustc_session::declare_lint_pass; use rustc_span::source_map::Spanned; -use rustc_span::symbol::sym; use rustc_span::{Ident, Span, Symbol}; use rustc_trait_selection::traits::supertrait_def_ids; @@ -143,7 +144,6 @@ impl<'tcx> LateLintPass<'tcx> for LenZero { && let Some(ty_id) = cx.qpath_res(ty_path, imp.self_ty.hir_id).opt_def_id() && let Some(local_id) = ty_id.as_local() && let ty_hir_id = cx.tcx.local_def_id_to_hir_id(local_id) - && !is_lint_allowed(cx, LEN_WITHOUT_IS_EMPTY, ty_hir_id) && let Some(output) = parse_len_output(cx, cx.tcx.fn_sig(item.owner_id).instantiate_identity().skip_binder()) { @@ -157,7 +157,17 @@ impl<'tcx> LateLintPass<'tcx> for LenZero { }, _ => return, }; - check_for_is_empty(cx, sig.span, sig.decl.implicit_self, output, ty_id, name, kind); + check_for_is_empty( + cx, + sig.span, + sig.decl.implicit_self, + output, + ty_id, + name, + kind, + item.hir_id(), + ty_hir_id, + ); } } @@ -180,7 +190,7 @@ impl<'tcx> LateLintPass<'tcx> for LenZero { let mut applicability = Applicability::MachineApplicable; let lit1 = peel_ref_operators(cx, lt.init); - let lit_str = Sugg::hir_with_context(cx, lit1, lt.span.ctxt(), "_", &mut applicability).maybe_par(); + let lit_str = Sugg::hir_with_context(cx, lit1, lt.span.ctxt(), "_", &mut applicability).maybe_paren(); span_lint_and_sugg( cx, @@ -202,7 +212,11 @@ impl<'tcx> LateLintPass<'tcx> for LenZero { expr.span, lhs_expr, peel_ref_operators(cx, rhs_expr), - (method.ident.name == sym::ne).then_some("!").unwrap_or_default(), + if method.ident.name == sym::ne { + "!" + } else { + Default::default() + }, ); } @@ -282,16 +296,10 @@ fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, ident: Iden { let mut current_and_super_traits = DefIdSet::default(); fill_trait_set(visited_trait.owner_id.to_def_id(), &mut current_and_super_traits, cx); - let is_empty = sym!(is_empty); - let is_empty_method_found = current_and_super_traits .items() - .flat_map(|&i| cx.tcx.associated_items(i).filter_by_name_unhygienic(is_empty)) - .any(|i| { - i.kind == AssocKind::Fn - && i.fn_has_self_parameter - && cx.tcx.fn_sig(i.def_id).skip_binder().inputs().skip_binder().len() == 1 - }); + .flat_map(|&i| cx.tcx.associated_items(i).filter_by_name_unhygienic(sym::is_empty)) + .any(|i| i.is_method() && cx.tcx.fn_sig(i.def_id).skip_binder().inputs().skip_binder().len() == 1); if !is_empty_method_found { span_lint( @@ -443,6 +451,7 @@ fn check_is_empty_sig<'tcx>( } /// Checks if the given type has an `is_empty` method with the appropriate signature. +#[expect(clippy::too_many_arguments)] fn check_for_is_empty( cx: &LateContext<'_>, span: Span, @@ -451,6 +460,8 @@ fn check_for_is_empty( impl_ty: DefId, item_name: Symbol, item_kind: &str, + len_method_hir_id: HirId, + ty_decl_hir_id: HirId, ) { // Implementor may be a type alias, in which case we need to get the `DefId` of the aliased type to // find the correct inherent impls. @@ -460,13 +471,12 @@ fn check_for_is_empty( return; }; - let is_empty = Symbol::intern("is_empty"); let is_empty = cx .tcx .inherent_impls(impl_ty) .iter() - .flat_map(|&id| cx.tcx.associated_items(id).filter_by_name_unhygienic(is_empty)) - .find(|item| item.kind == AssocKind::Fn); + .flat_map(|&id| cx.tcx.associated_items(id).filter_by_name_unhygienic(sym::is_empty)) + .find(|item| item.is_fn()); let (msg, is_empty_span, self_kind) = match is_empty { None => ( @@ -486,7 +496,7 @@ fn check_for_is_empty( None, ), Some(is_empty) - if !(is_empty.fn_has_self_parameter + if !(is_empty.is_method() && check_is_empty_sig( cx, cx.tcx.fn_sig(is_empty.def_id).instantiate_identity().skip_binder(), @@ -506,14 +516,16 @@ fn check_for_is_empty( Some(_) => return, }; - span_lint_and_then(cx, LEN_WITHOUT_IS_EMPTY, span, msg, |db| { - if let Some(span) = is_empty_span { - db.span_note(span, "`is_empty` defined here"); - } - if let Some(self_kind) = self_kind { - db.note(output.expected_sig(self_kind)); - } - }); + if !fulfill_or_allowed(cx, LEN_WITHOUT_IS_EMPTY, [len_method_hir_id, ty_decl_hir_id]) { + span_lint_and_then(cx, LEN_WITHOUT_IS_EMPTY, span, msg, |db| { + if let Some(span) = is_empty_span { + db.span_note(span, "`is_empty` defined here"); + } + if let Some(self_kind) = self_kind { + db.note(output.expected_sig(self_kind)); + } + }); + } } fn check_cmp(cx: &LateContext<'_>, span: Span, method: &Expr<'_>, lit: &Expr<'_>, op: &str, compare_to: u32) { @@ -523,10 +535,8 @@ fn check_cmp(cx: &LateContext<'_>, span: Span, method: &Expr<'_>, lit: &Expr<'_> if let (&ExprKind::MethodCall(method_path, receiver, [], _), ExprKind::Lit(lit)) = (&method.kind, &lit.kind) { // check if we are in an is_empty() method - if let Some(name) = get_item_name(cx, method) { - if name.as_str() == "is_empty" { - return; - } + if parent_item_name(cx, method) == Some(sym::is_empty) { + return; } check_len(cx, span, method_path.ident.name, receiver, &lit.node, op, compare_to); @@ -573,7 +583,7 @@ fn check_empty_expr(cx: &LateContext<'_>, span: Span, lit1: &Expr<'_>, lit2: &Ex let mut applicability = Applicability::MachineApplicable; let lit1 = peel_ref_operators(cx, lit1); - let lit_str = Sugg::hir_with_context(cx, lit1, span.ctxt(), "_", &mut applicability).maybe_par(); + let lit_str = Sugg::hir_with_context(cx, lit1, span.ctxt(), "_", &mut applicability).maybe_paren(); span_lint_and_sugg( cx, @@ -588,11 +598,11 @@ fn check_empty_expr(cx: &LateContext<'_>, span: Span, lit1: &Expr<'_>, lit2: &Ex } fn is_empty_string(expr: &Expr<'_>) -> bool { - if let ExprKind::Lit(lit) = expr.kind { - if let LitKind::Str(lit, _) = lit.node { - let lit = lit.as_str(); - return lit.is_empty(); - } + if let ExprKind::Lit(lit) = expr.kind + && let LitKind::Str(lit, _) = lit.node + { + let lit = lit.as_str(); + return lit.is_empty(); } false } @@ -608,7 +618,7 @@ fn is_empty_array(expr: &Expr<'_>) -> bool { fn has_is_empty(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { /// Gets an `AssocItem` and return true if it matches `is_empty(self)`. fn is_is_empty(cx: &LateContext<'_>, item: &ty::AssocItem) -> bool { - if item.kind == AssocKind::Fn { + if item.is_fn() { let sig = cx.tcx.fn_sig(item.def_id).skip_binder(); let ty = sig.skip_binder(); ty.inputs().len() == 1 @@ -619,11 +629,10 @@ fn has_is_empty(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { /// Checks the inherent impl's items for an `is_empty(self)` method. fn has_is_empty_impl(cx: &LateContext<'_>, id: DefId) -> bool { - let is_empty = sym!(is_empty); cx.tcx.inherent_impls(id).iter().any(|imp| { cx.tcx .associated_items(*imp) - .filter_by_name_unhygienic(is_empty) + .filter_by_name_unhygienic(sym::is_empty) .any(|item| is_is_empty(cx, item)) }) } @@ -631,10 +640,9 @@ fn has_is_empty(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { fn ty_has_is_empty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, depth: usize) -> bool { match ty.kind() { ty::Dynamic(tt, ..) => tt.principal().is_some_and(|principal| { - let is_empty = sym!(is_empty); cx.tcx .associated_items(principal.def_id()) - .filter_by_name_unhygienic(is_empty) + .filter_by_name_unhygienic(sym::is_empty) .any(|item| is_is_empty(cx, item)) }), ty::Alias(ty::Projection, proj) => has_is_empty_impl(cx, proj.def_id), @@ -644,7 +652,7 @@ fn has_is_empty(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { && cx.tcx.get_diagnostic_item(sym::Deref).is_some_and(|deref_id| { implements_trait(cx, ty, deref_id, &[]) && cx - .get_associated_type(ty, deref_id, "Target") + .get_associated_type(ty, deref_id, sym::Target) .is_some_and(|deref_ty| ty_has_is_empty(cx, deref_ty, depth + 1)) })) }, diff --git a/src/tools/clippy/clippy_lints/src/lib.rs b/src/tools/clippy/clippy_lints/src/lib.rs index cc3d972f017b8..bc7fc60827a0b 100644 --- a/src/tools/clippy/clippy_lints/src/lib.rs +++ b/src/tools/clippy/clippy_lints/src/lib.rs @@ -65,7 +65,6 @@ mod declare_clippy_lint; #[macro_use] extern crate clippy_utils; -#[cfg_attr(feature = "internal", allow(clippy::missing_clippy_version_attribute))] mod utils; pub mod ctfe; // Very important lint, do not remove (rust#125116) @@ -205,6 +204,7 @@ mod loops; mod macro_metavars_in_unsafe; mod macro_use; mod main_recursion; +mod manual_abs_diff; mod manual_assert; mod manual_async_fn; mod manual_bits; @@ -226,7 +226,6 @@ mod manual_rotate; mod manual_slice_size_calculation; mod manual_string_new; mod manual_strip; -mod manual_unwrap_or_default; mod map_unit_fn; mod match_result_ok; mod matches; @@ -320,6 +319,7 @@ mod redundant_locals; mod redundant_pub_crate; mod redundant_slicing; mod redundant_static_lifetimes; +mod redundant_test_prefix; mod redundant_type_annotations; mod ref_option_ref; mod ref_patterns; @@ -408,24 +408,9 @@ mod zombie_processes; use clippy_config::{Conf, get_configuration_metadata, sanitize_explanation}; use clippy_utils::macros::FormatArgsStorage; -use utils::attr_collector::{AttrCollector, AttrStorage}; use rustc_data_structures::fx::FxHashSet; use rustc_lint::{Lint, LintId}; - -/// Register all pre expansion lints -/// -/// Pre-expansion lints run before any macro expansion has happened. -/// -/// Note that due to the architecture of the compiler, currently `cfg_attr` attributes on crate -/// level (i.e `#![cfg_attr(...)]`) will still be expanded even when using a pre-expansion pass. -/// -/// Used in `./src/driver.rs`. -pub fn register_pre_expansion_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) { - // NOTE: Do not add any more pre-expansion passes. These should be removed eventually. - store.register_pre_expansion_pass(move || Box::new(attrs::EarlyAttributes::new(conf))); - - store.register_early_pass(move || Box::new(attrs::PostExpansionEarlyAttributes::new(conf))); -} +use utils::attr_collector::{AttrCollector, AttrStorage}; #[derive(Default)] struct RegistrationGroups { @@ -439,8 +424,6 @@ struct RegistrationGroups { restriction: Vec, style: Vec, suspicious: Vec, - #[cfg(feature = "internal")] - internal: Vec, } impl RegistrationGroups { @@ -456,8 +439,6 @@ impl RegistrationGroups { store.register_group(true, "clippy::restriction", Some("clippy_restriction"), self.restriction); store.register_group(true, "clippy::style", Some("clippy_style"), self.style); store.register_group(true, "clippy::suspicious", Some("clippy_suspicious"), self.suspicious); - #[cfg(feature = "internal")] - store.register_group(true, "clippy::internal", Some("clippy_internal"), self.internal); } } @@ -472,8 +453,6 @@ pub(crate) enum LintCategory { Restriction, Style, Suspicious, - #[cfg(feature = "internal")] - Internal, } #[allow(clippy::enum_glob_use)] @@ -495,8 +474,6 @@ impl LintCategory { Restriction => &mut groups.restriction, Style => &mut groups.style, Suspicious => &mut groups.suspicious, - #[cfg(feature = "internal")] - Internal => &mut groups.internal, } } } @@ -530,8 +507,6 @@ impl LintInfo { Restriction => "restriction", Style => "style", Suspicious => "suspicious", - #[cfg(feature = "internal")] - Internal => "internal", } } } @@ -589,6 +564,13 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) { store.register_removed(name, reason); } + // NOTE: Do not add any more pre-expansion passes. These should be removed eventually. + // Due to the architecture of the compiler, currently `cfg_attr` attributes on crate + // level (i.e `#![cfg_attr(...)]`) will still be expanded even when using a pre-expansion pass. + store.register_pre_expansion_pass(move || Box::new(attrs::EarlyAttributes::new(conf))); + + store.register_early_pass(move || Box::new(attrs::PostExpansionEarlyAttributes::new(conf))); + let format_args_storage = FormatArgsStorage::default(); let format_args = format_args_storage.clone(); store.register_early_pass(move || { @@ -601,30 +583,6 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) { let attrs = attr_storage.clone(); store.register_early_pass(move || Box::new(AttrCollector::new(attrs.clone()))); - // all the internal lints - #[cfg(feature = "internal")] - { - store.register_early_pass(|| { - Box::new(utils::internal_lints::unsorted_clippy_utils_paths::UnsortedClippyUtilsPaths) - }); - store.register_early_pass(|| Box::new(utils::internal_lints::produce_ice::ProduceIce)); - store.register_late_pass(|_| Box::new(utils::internal_lints::collapsible_calls::CollapsibleCalls)); - store.register_late_pass(|_| Box::new(utils::internal_lints::invalid_paths::InvalidPaths)); - store.register_late_pass(|_| { - Box::::default() - }); - store.register_late_pass(|_| { - Box::::default() - }); - store.register_late_pass(|_| Box::::default()); - store.register_late_pass(|_| Box::new(utils::internal_lints::outer_expn_data_pass::OuterExpnDataPass)); - store.register_late_pass(|_| Box::new(utils::internal_lints::msrv_attr_impl::MsrvAttrImpl)); - store.register_late_pass(|_| { - Box::new(utils::internal_lints::almost_standard_lint_formulation::AlmostStandardFormulation::new()) - }); - store.register_late_pass(|_| Box::new(utils::internal_lints::slow_symbol_comparisons::SlowSymbolComparisons)); - } - store.register_late_pass(|_| Box::new(ctfe::ClippyCtfe)); store.register_late_pass(move |_| Box::new(operators::arithmetic_side_effects::ArithmeticSideEffects::new(conf))); @@ -771,8 +729,9 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) { store.register_early_pass(|| Box::new(misc_early::MiscEarlyLints)); store.register_late_pass(|_| Box::new(redundant_closure_call::RedundantClosureCall)); store.register_early_pass(|| Box::new(unused_unit::UnusedUnit)); + store.register_late_pass(|_| Box::new(unused_unit::UnusedUnit)); store.register_late_pass(|_| Box::new(returns::Return)); - store.register_early_pass(|| Box::new(collapsible_if::CollapsibleIf)); + store.register_late_pass(move |tcx| Box::new(collapsible_if::CollapsibleIf::new(tcx, conf))); store.register_late_pass(|_| Box::new(items_after_statements::ItemsAfterStatements)); store.register_early_pass(|| Box::new(precedence::Precedence)); store.register_late_pass(|_| Box::new(needless_parens_on_range_literals::NeedlessParensOnRangeLiterals)); @@ -857,7 +816,7 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) { store.register_late_pass(move |_| Box::new(write::Write::new(conf, format_args.clone()))); store.register_late_pass(move |_| Box::new(cargo::Cargo::new(conf))); store.register_early_pass(|| Box::new(crate_in_macro_def::CrateInMacroDef)); - store.register_early_pass(|| Box::new(empty_with_brackets::EmptyWithBrackets)); + store.register_late_pass(|_| Box::new(empty_with_brackets::EmptyWithBrackets::default())); store.register_late_pass(|_| Box::new(unnecessary_owned_empty_strings::UnnecessaryOwnedEmptyStrings)); store.register_early_pass(|| Box::new(pub_use::PubUse)); store.register_late_pass(|_| Box::new(format_push_string::FormatPushString)); @@ -879,6 +838,7 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) { store.register_late_pass(move |_| Box::new(std_instead_of_core::StdReexports::new(conf))); store.register_late_pass(move |_| Box::new(instant_subtraction::InstantSubtraction::new(conf))); store.register_late_pass(|_| Box::new(partialeq_to_none::PartialeqToNone)); + store.register_late_pass(move |_| Box::new(manual_abs_diff::ManualAbsDiff::new(conf))); store.register_late_pass(move |_| Box::new(manual_clamp::ManualClamp::new(conf))); store.register_late_pass(|_| Box::new(manual_string_new::ManualStringNew)); store.register_late_pass(|_| Box::new(unused_peekable::UnusedPeekable)); @@ -960,7 +920,6 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) { store.register_early_pass(|| Box::new(multiple_bound_locations::MultipleBoundLocations)); store.register_late_pass(move |_| Box::new(assigning_clones::AssigningClones::new(conf))); store.register_late_pass(|_| Box::new(zero_repeat_side_effects::ZeroRepeatSideEffects)); - store.register_late_pass(|_| Box::new(manual_unwrap_or_default::ManualUnwrapOrDefault)); store.register_late_pass(|_| Box::new(integer_division_remainder_used::IntegerDivisionRemainderUsed)); store.register_late_pass(move |_| Box::new(macro_metavars_in_unsafe::ExprMetavarsInUnsafe::new(conf))); store.register_late_pass(move |_| Box::new(string_patterns::StringPatterns::new(conf))); @@ -971,7 +930,7 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) { store.register_late_pass(|_| Box::new(zombie_processes::ZombieProcesses)); store.register_late_pass(|_| Box::new(pointers_in_nomem_asm_block::PointersInNomemAsmBlock)); store.register_late_pass(move |_| Box::new(manual_div_ceil::ManualDivCeil::new(conf))); - store.register_late_pass(|_| Box::new(manual_is_power_of_two::ManualIsPowerOfTwo)); + store.register_late_pass(move |_| Box::new(manual_is_power_of_two::ManualIsPowerOfTwo::new(conf))); store.register_late_pass(|_| Box::new(non_zero_suggestions::NonZeroSuggestions)); store.register_late_pass(|_| Box::new(literal_string_with_formatting_args::LiteralStringWithFormattingArg)); store.register_late_pass(move |_| Box::new(unused_trait_names::UnusedTraitNames::new(conf))); @@ -984,5 +943,6 @@ pub fn register_lints(store: &mut rustc_lint::LintStore, conf: &'static Conf) { store.register_late_pass(move |_| Box::new(non_std_lazy_statics::NonStdLazyStatic::new(conf))); store.register_late_pass(|_| Box::new(manual_option_as_slice::ManualOptionAsSlice::new(conf))); store.register_late_pass(|_| Box::new(single_option_map::SingleOptionMap)); + store.register_late_pass(move |_| Box::new(redundant_test_prefix::RedundantTestPrefix)); // add lints here, do not remove this comment, it's used in `new_lint` } diff --git a/src/tools/clippy/clippy_lints/src/lifetimes.rs b/src/tools/clippy/clippy_lints/src/lifetimes.rs index 8d47c756fc53c..5ef5e3a44f85f 100644 --- a/src/tools/clippy/clippy_lints/src/lifetimes.rs +++ b/src/tools/clippy/clippy_lints/src/lifetimes.rs @@ -14,7 +14,7 @@ use rustc_hir::intravisit::{ }; use rustc_hir::{ AmbigArg, BareFnTy, BodyId, FnDecl, FnSig, GenericArg, GenericArgs, GenericBound, GenericParam, GenericParamKind, - Generics, HirId, Impl, ImplItem, ImplItemKind, Item, ItemKind, Lifetime, LifetimeName, LifetimeParamKind, Node, + Generics, HirId, Impl, ImplItem, ImplItemKind, Item, ItemKind, Lifetime, LifetimeKind, LifetimeParamKind, Node, PolyTraitRef, PredicateOrigin, TraitFn, TraitItem, TraitItemKind, Ty, TyKind, WhereBoundPredicate, WherePredicate, WherePredicateKind, lang_items, }; @@ -150,10 +150,10 @@ impl<'tcx> LateLintPass<'tcx> for Lifetimes { } = item.kind { check_fn_inner(cx, sig, Some(id), None, generics, item.span, true, self.msrv); - } else if let ItemKind::Impl(impl_) = item.kind { - if !item.span.from_expansion() { - report_extra_impl_lifetimes(cx, impl_); - } + } else if let ItemKind::Impl(impl_) = item.kind + && !item.span.from_expansion() + { + report_extra_impl_lifetimes(cx, impl_); } } @@ -218,7 +218,7 @@ fn check_fn_inner<'tcx>( for bound in pred.bounds { let mut visitor = RefVisitor::new(cx); walk_param_bound(&mut visitor, bound); - if visitor.lts.iter().any(|lt| matches!(lt.res, LifetimeName::Param(_))) { + if visitor.lts.iter().any(|lt| matches!(lt.kind, LifetimeKind::Param(_))) { return; } if let GenericBound::Trait(ref trait_ref) = *bound { @@ -235,7 +235,7 @@ fn check_fn_inner<'tcx>( _ => None, }); for bound in lifetimes { - if bound.res != LifetimeName::Static && !bound.is_elided() { + if bound.kind != LifetimeKind::Static && !bound.is_elided() { return; } } @@ -300,8 +300,8 @@ fn could_use_elision<'tcx>( let input_lts = input_visitor.lts; let output_lts = output_visitor.lts; - if let Some(trait_sig) = trait_sig - && non_elidable_self_type(cx, func, trait_sig.first().copied(), msrv) + if let Some(&[trait_sig]) = trait_sig + && non_elidable_self_type(cx, func, trait_sig, msrv) { return None; } @@ -310,11 +310,11 @@ fn could_use_elision<'tcx>( let body = cx.tcx.hir_body(body_id); let first_ident = body.params.first().and_then(|param| param.pat.simple_ident()); - if non_elidable_self_type(cx, func, Some(first_ident), msrv) { + if non_elidable_self_type(cx, func, first_ident, msrv) { return None; } - let mut checker = BodyLifetimeChecker; + let mut checker = BodyLifetimeChecker::new(cx); if checker.visit_expr(body.value).is_break() { return None; } @@ -384,8 +384,8 @@ fn allowed_lts_from(named_generics: &[GenericParam<'_>]) -> FxIndexSet(cx: &LateContext<'tcx>, func: &FnDecl<'tcx>, ident: Option>, msrv: Msrv) -> bool { - if let Some(Some(ident)) = ident +fn non_elidable_self_type<'tcx>(cx: &LateContext<'tcx>, func: &FnDecl<'tcx>, ident: Option, msrv: Msrv) -> bool { + if let Some(ident) = ident && ident.name == kw::SelfLower && !func.implicit_self.has_implicit_self() && let Some(self_ty) = func.inputs.first() @@ -421,8 +421,8 @@ fn named_lifetime_occurrences(lts: &[Lifetime]) -> Vec<(LocalDefId, usize)> { } fn named_lifetime(lt: &Lifetime) -> Option { - match lt.res { - LifetimeName::Param(id) if !lt.is_anonymous() => Some(id), + match lt.kind { + LifetimeKind::Param(id) if !lt.is_anonymous() => Some(id), _ => None, } } @@ -614,7 +614,7 @@ where // for lifetimes as parameters of generics fn visit_lifetime(&mut self, lifetime: &'tcx Lifetime) { - if let LifetimeName::Param(def_id) = lifetime.res + if let LifetimeKind::Param(def_id) = lifetime.kind && let Some(usages) = self.map.get_mut(&def_id) { usages.push(Usage { @@ -826,7 +826,7 @@ fn report_elidable_lifetimes( .iter() .map(|<| cx.tcx.def_span(lt)) .chain(usages.iter().filter_map(|usage| { - if let LifetimeName::Param(def_id) = usage.res + if let LifetimeKind::Param(def_id) = usage.kind && elidable_lts.contains(&def_id) { return Some(usage.ident.span); @@ -911,10 +911,23 @@ fn elision_suggestions( Some(suggestions) } -struct BodyLifetimeChecker; +struct BodyLifetimeChecker<'tcx> { + tcx: TyCtxt<'tcx>, +} -impl<'tcx> Visitor<'tcx> for BodyLifetimeChecker { +impl<'tcx> BodyLifetimeChecker<'tcx> { + fn new(cx: &LateContext<'tcx>) -> Self { + Self { tcx: cx.tcx } + } +} + +impl<'tcx> Visitor<'tcx> for BodyLifetimeChecker<'tcx> { type Result = ControlFlow<()>; + type NestedFilter = middle_nested_filter::OnlyBodies; + + fn maybe_tcx(&mut self) -> Self::MaybeTyCtxt { + self.tcx + } // for lifetimes as parameters of generics fn visit_lifetime(&mut self, lifetime: &'tcx Lifetime) -> ControlFlow<()> { if !lifetime.is_anonymous() && lifetime.ident.name != kw::StaticLifetime { diff --git a/src/tools/clippy/clippy_lints/src/literal_representation.rs b/src/tools/clippy/clippy_lints/src/literal_representation.rs index 805de23408bf5..7cbfa2d097ae5 100644 --- a/src/tools/clippy/clippy_lints/src/literal_representation.rs +++ b/src/tools/clippy/clippy_lints/src/literal_representation.rs @@ -387,12 +387,11 @@ impl LiteralDigitGrouping { let first = groups.next().expect("At least one group"); - if radix == Radix::Binary || radix == Radix::Octal || radix == Radix::Hexadecimal { - if let Some(second_size) = groups.next() { - if !groups.all(|i| i == second_size) || first > second_size { - return Err(WarningType::UnusualByteGroupings); - } - } + if (radix == Radix::Binary || radix == Radix::Octal || radix == Radix::Hexadecimal) + && let Some(second_size) = groups.next() + && (!groups.all(|i| i == second_size) || first > second_size) + { + return Err(WarningType::UnusualByteGroupings); } if let Some(second) = groups.next() { diff --git a/src/tools/clippy/clippy_lints/src/literal_string_with_formatting_args.rs b/src/tools/clippy/clippy_lints/src/literal_string_with_formatting_args.rs index 975e6833a35f8..244e7c95122e0 100644 --- a/src/tools/clippy/clippy_lints/src/literal_string_with_formatting_args.rs +++ b/src/tools/clippy/clippy_lints/src/literal_string_with_formatting_args.rs @@ -45,15 +45,14 @@ fn emit_lint(cx: &LateContext<'_>, expr: &Expr<'_>, spans: &[(Span, Option = ControlFlow::Continue(()); + +pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, pat: &Pat<'_>, iterable: &Expr<'_>, body: &'tcx Expr<'tcx>) { + if let ExprKind::MethodCall(_, enumerate_recv, _, enumerate_span) = iterable.kind + && let Some(method_id) = cx.typeck_results().type_dependent_def_id(iterable.hir_id) + && cx.tcx.is_diagnostic_item(sym::enumerate_method, method_id) + && let ExprKind::MethodCall(_, chars_recv, _, chars_span) = enumerate_recv.kind + && let Some(method_id) = cx.typeck_results().type_dependent_def_id(enumerate_recv.hir_id) + && cx.tcx.is_diagnostic_item(sym::str_chars, method_id) + { + if let PatKind::Tuple([pat, _], _) = pat.kind + && let PatKind::Binding(_, binding_id, ..) = pat.kind + { + // Destructured iterator element `(idx, _)`, look for uses of the binding + for_each_expr(cx, body, |expr| { + if path_to_local_id(expr, binding_id) { + check_index_usage(cx, expr, pat, enumerate_span, chars_span, chars_recv); + } + CONTINUE + }); + } else if let PatKind::Binding(_, binding_id, ..) = pat.kind { + // Bound as a tuple, look for `tup.0` + for_each_expr(cx, body, |expr| { + if let ExprKind::Field(e, field) = expr.kind + && path_to_local_id(e, binding_id) + && field.name == sym::integer(0) + { + check_index_usage(cx, expr, pat, enumerate_span, chars_span, chars_recv); + } + CONTINUE + }); + } + } +} + +fn check_index_usage<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx Expr<'tcx>, + pat: &Pat<'_>, + enumerate_span: Span, + chars_span: Span, + chars_recv: &Expr<'_>, +) { + let Some(parent_expr) = index_consumed_at(cx, expr) else { + return; + }; + + let is_string_like = |ty: Ty<'_>| ty.is_str() || is_type_lang_item(cx, ty, LangItem::String); + let message = match parent_expr.kind { + ExprKind::MethodCall(segment, recv, ..) + // We currently only lint `str` methods (which `String` can deref to), so a `.is_str()` check is sufficient here + // (contrary to the `ExprKind::Index` case which needs to handle both with `is_string_like` because `String` implements + // `Index` directly and no deref to `str` would happen in that case). + if cx.typeck_results().expr_ty_adjusted(recv).peel_refs().is_str() + && BYTE_INDEX_METHODS.contains(&segment.ident.name.as_str()) + && eq_expr_value(cx, chars_recv, recv) => + { + "passing a character position to a method that expects a byte index" + }, + ExprKind::Index(target, ..) + if is_string_like(cx.typeck_results().expr_ty_adjusted(target).peel_refs()) + && eq_expr_value(cx, chars_recv, target) => + { + "indexing into a string with a character position where a byte index is expected" + }, + _ => return, + }; + + span_lint_hir_and_then( + cx, + CHAR_INDICES_AS_BYTE_INDICES, + expr.hir_id, + expr.span, + message, + |diag| { + diag.note("a character can take up more than one byte, so they are not interchangeable") + .span_note( + MultiSpan::from_spans(vec![pat.span, enumerate_span]), + "position comes from the enumerate iterator", + ) + .span_suggestion_verbose( + chars_span.to(enumerate_span), + "consider using `.char_indices()` instead", + "char_indices()", + Applicability::MaybeIncorrect, + ); + }, + ); +} + +/// Returns the expression which ultimately consumes the index. +/// This is usually the parent expression, i.e. `.split_at(idx)` for `idx`, +/// but for `.get(..idx)` we want to consider the method call the consuming expression, +/// which requires skipping past the range expression. +fn index_consumed_at<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> { + for (_, node) in cx.tcx.hir_parent_iter(expr.hir_id) { + match node { + Node::Expr(expr) if higher::Range::hir(expr).is_some() => {}, + Node::ExprField(_) => {}, + Node::Expr(expr) => return Some(expr), + _ => break, + } + } + None +} diff --git a/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs b/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs index 412c78cc80411..d0b26c91ffafb 100644 --- a/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs +++ b/src/tools/clippy/clippy_lints/src/loops/explicit_iter_loop.rs @@ -2,6 +2,7 @@ use super::EXPLICIT_ITER_LOOP; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::snippet_with_applicability; +use clippy_utils::sym; use clippy_utils::ty::{ implements_trait, implements_trait_with_env, is_copy, is_type_lang_item, make_normalized_projection, make_normalized_projection_with_regions, normalize_with_regions, @@ -11,7 +12,6 @@ use rustc_hir::{Expr, Mutability}; use rustc_lint::LateContext; use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, AutoBorrowMutability}; use rustc_middle::ty::{self, EarlyBinder, Ty}; -use rustc_span::sym; pub(super) fn check( cx: &LateContext<'_>, @@ -119,7 +119,7 @@ fn is_ref_iterable<'tcx>( && let typing_env = ty::TypingEnv::non_body_analysis(cx.tcx, fn_id) && implements_trait_with_env(cx.tcx, typing_env, req_self_ty, trait_id, Some(fn_id), &[]) && let Some(into_iter_ty) = - make_normalized_projection_with_regions(cx.tcx, typing_env, trait_id, sym!(IntoIter), [req_self_ty]) + make_normalized_projection_with_regions(cx.tcx, typing_env, trait_id, sym::IntoIter, [req_self_ty]) && let req_res_ty = normalize_with_regions(cx.tcx, typing_env, req_res_ty) && into_iter_ty == req_res_ty { @@ -152,7 +152,7 @@ fn is_ref_iterable<'tcx>( // Using by value won't consume anything if implements_trait(cx, self_ty, trait_id, &[]) && let Some(ty) = - make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym!(IntoIter), [self_ty]) + make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym::IntoIter, [self_ty]) && ty == res_ty { return Some((AdjustKind::None, self_ty)); @@ -169,7 +169,7 @@ fn is_ref_iterable<'tcx>( }; if implements_trait(cx, self_ty, trait_id, &[]) && let Some(ty) = - make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym!(IntoIter), [self_ty]) + make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym::IntoIter, [self_ty]) && ty == res_ty { return Some((AdjustKind::reborrow(mutbl), self_ty)); @@ -183,7 +183,7 @@ fn is_ref_iterable<'tcx>( let self_ty = Ty::new_ref(cx.tcx, cx.tcx.lifetimes.re_erased, self_ty, mutbl); if implements_trait(cx, self_ty, trait_id, &[]) && let Some(ty) = - make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym!(IntoIter), [self_ty]) + make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym::IntoIter, [self_ty]) && ty == res_ty { return Some((AdjustKind::borrow(mutbl), self_ty)); @@ -206,7 +206,7 @@ fn is_ref_iterable<'tcx>( && target != self_ty && implements_trait(cx, target, trait_id, &[]) && let Some(ty) = - make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym!(IntoIter), [target]) + make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym::IntoIter, [target]) && ty == res_ty { Some((AdjustKind::auto_reborrow(mutbl), target)) @@ -224,7 +224,7 @@ fn is_ref_iterable<'tcx>( if is_copy(cx, target) && implements_trait(cx, target, trait_id, &[]) && let Some(ty) = - make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym!(IntoIter), [target]) + make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym::IntoIter, [target]) && ty == res_ty { Some((AdjustKind::Deref, target)) @@ -242,7 +242,7 @@ fn is_ref_iterable<'tcx>( if self_ty.is_ref() && implements_trait(cx, target, trait_id, &[]) && let Some(ty) = - make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym!(IntoIter), [target]) + make_normalized_projection(cx.tcx, cx.typing_env(), trait_id, sym::IntoIter, [target]) && ty == res_ty { Some((AdjustKind::auto_borrow(mutbl), target)) diff --git a/src/tools/clippy/clippy_lints/src/loops/for_kv_map.rs b/src/tools/clippy/clippy_lints/src/loops/for_kv_map.rs index 185d834becafc..e314bc2068b30 100644 --- a/src/tools/clippy/clippy_lints/src/loops/for_kv_map.rs +++ b/src/tools/clippy/clippy_lints/src/loops/for_kv_map.rs @@ -13,45 +13,45 @@ use rustc_span::sym; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>, arg: &'tcx Expr<'_>, body: &'tcx Expr<'_>) { let pat_span = pat.span; - if let PatKind::Tuple(pat, _) = pat.kind { - if pat.len() == 2 { - let arg_span = arg.span; - let (new_pat_span, kind, ty, mutbl) = match *cx.typeck_results().expr_ty(arg).kind() { - ty::Ref(_, ty, mutbl) => match (&pat[0].kind, &pat[1].kind) { - (key, _) if pat_is_wild(cx, key, body) => (pat[1].span, "value", ty, mutbl), - (_, value) if pat_is_wild(cx, value, body) => (pat[0].span, "key", ty, Mutability::Not), - _ => return, - }, + if let PatKind::Tuple(pat, _) = pat.kind + && pat.len() == 2 + { + let arg_span = arg.span; + let (new_pat_span, kind, ty, mutbl) = match *cx.typeck_results().expr_ty(arg).kind() { + ty::Ref(_, ty, mutbl) => match (&pat[0].kind, &pat[1].kind) { + (key, _) if pat_is_wild(cx, key, body) => (pat[1].span, "value", ty, mutbl), + (_, value) if pat_is_wild(cx, value, body) => (pat[0].span, "key", ty, Mutability::Not), _ => return, - }; - let mutbl = match mutbl { - Mutability::Not => "", - Mutability::Mut => "_mut", - }; - let arg = match arg.kind { - ExprKind::AddrOf(BorrowKind::Ref, _, expr) => expr, - _ => arg, - }; + }, + _ => return, + }; + let mutbl = match mutbl { + Mutability::Not => "", + Mutability::Mut => "_mut", + }; + let arg = match arg.kind { + ExprKind::AddrOf(BorrowKind::Ref, _, expr) => expr, + _ => arg, + }; - if is_type_diagnostic_item(cx, ty, sym::HashMap) || is_type_diagnostic_item(cx, ty, sym::BTreeMap) { - span_lint_and_then( - cx, - FOR_KV_MAP, - arg_span, - format!("you seem to want to iterate on a map's {kind}s"), - |diag| { - let map = sugg::Sugg::hir(cx, arg, "map"); - diag.multipart_suggestion( - "use the corresponding method", - vec![ - (pat_span, snippet(cx, new_pat_span, kind).into_owned()), - (arg_span, format!("{}.{kind}s{mutbl}()", map.maybe_par())), - ], - Applicability::MachineApplicable, - ); - }, - ); - } + if is_type_diagnostic_item(cx, ty, sym::HashMap) || is_type_diagnostic_item(cx, ty, sym::BTreeMap) { + span_lint_and_then( + cx, + FOR_KV_MAP, + arg_span, + format!("you seem to want to iterate on a map's {kind}s"), + |diag| { + let map = sugg::Sugg::hir(cx, arg, "map"); + diag.multipart_suggestion( + "use the corresponding method", + vec![ + (pat_span, snippet(cx, new_pat_span, kind).into_owned()), + (arg_span, format!("{}.{kind}s{mutbl}()", map.maybe_paren())), + ], + Applicability::MachineApplicable, + ); + }, + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_find.rs b/src/tools/clippy/clippy_lints/src/loops/manual_find.rs index aa8a2934f89bd..35737f3eafe23 100644 --- a/src/tools/clippy/clippy_lints/src/loops/manual_find.rs +++ b/src/tools/clippy/clippy_lints/src/loops/manual_find.rs @@ -3,6 +3,7 @@ use super::utils::make_iterator_snippet; use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::implements_trait; +use clippy_utils::usage::contains_return_break_continue_macro; use clippy_utils::{higher, is_res_lang_ctor, path_res, peel_blocks_with_stmt}; use rustc_errors::Applicability; use rustc_hir::def::Res; @@ -35,6 +36,7 @@ pub(super) fn check<'tcx>( && let ExprKind::Call(ctor, [inner_ret]) = ret_value.kind && is_res_lang_ctor(cx, path_res(cx, ctor), LangItem::OptionSome) && path_res(cx, inner_ret) == Res::Local(binding_id) + && !contains_return_break_continue_macro(cond) && let Some((last_stmt, last_ret)) = last_stmt_and_ret(cx, expr) { let mut applicability = Applicability::MachineApplicable; diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs b/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs index 701567a7d84e7..d9c4b526da99e 100644 --- a/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs +++ b/src/tools/clippy/clippy_lints/src/loops/manual_memcpy.rs @@ -28,37 +28,37 @@ pub(super) fn check<'tcx>( end: Some(end), limits, }) = higher::Range::hir(arg) - { // the var must be a single name - if let PatKind::Binding(_, canonical_id, _, _) = pat.kind { - let mut starts = vec![Start { - id: canonical_id, - kind: StartKind::Range, - }]; - - // This is one of few ways to return different iterators - // derived from: https://stackoverflow.com/questions/29760668/conditionally-iterate-over-one-of-several-possible-iterators/52064434#52064434 - let mut iter_a = None; - let mut iter_b = None; - - if let ExprKind::Block(block, _) = body.kind { - if let Some(loop_counters) = get_loop_counters(cx, block, expr) { - starts.extend(loop_counters); - } - iter_a = Some(get_assignments(block, &starts)); - } else { - iter_b = Some(get_assignment(body)); + && let PatKind::Binding(_, canonical_id, _, _) = pat.kind + { + let mut starts = vec![Start { + id: canonical_id, + kind: StartKind::Range, + }]; + + // This is one of few ways to return different iterators + // derived from: https://stackoverflow.com/questions/29760668/conditionally-iterate-over-one-of-several-possible-iterators/52064434#52064434 + let mut iter_a = None; + let mut iter_b = None; + + if let ExprKind::Block(block, _) = body.kind { + if let Some(loop_counters) = get_loop_counters(cx, block, expr) { + starts.extend(loop_counters); } + iter_a = Some(get_assignments(block, &starts)); + } else { + iter_b = Some(get_assignment(body)); + } - let assignments = iter_a.into_iter().flatten().chain(iter_b); + let assignments = iter_a.into_iter().flatten().chain(iter_b); - let big_sugg = assignments - // The only statements in the for loops can be indexed assignments from - // indexed retrievals (except increments of loop counters). - .map(|o| { - o.and_then(|(lhs, rhs)| { - let rhs = fetch_cloned_expr(rhs); - if let ExprKind::Index(base_left, idx_left, _) = lhs.kind + let big_sugg = assignments + // The only statements in the for loops can be indexed assignments from + // indexed retrievals (except increments of loop counters). + .map(|o| { + o.and_then(|(lhs, rhs)| { + let rhs = fetch_cloned_expr(rhs); + if let ExprKind::Index(base_left, idx_left, _) = lhs.kind && let ExprKind::Index(base_right, idx_right, _) = rhs.kind && let Some(ty) = get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_left)) && get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_right)).is_some() @@ -68,42 +68,41 @@ pub(super) fn check<'tcx>( && !local_used_in(cx, canonical_id, base_right) // Source and destination must be different && path_to_local(base_left) != path_to_local(base_right) - { - Some(( - ty, - IndexExpr { - base: base_left, - idx: start_left, - idx_offset: offset_left, - }, - IndexExpr { - base: base_right, - idx: start_right, - idx_offset: offset_right, - }, - )) - } else { - None - } - }) + { + Some(( + ty, + IndexExpr { + base: base_left, + idx: start_left, + idx_offset: offset_left, + }, + IndexExpr { + base: base_right, + idx: start_right, + idx_offset: offset_right, + }, + )) + } else { + None + } }) - .map(|o| o.map(|(ty, dst, src)| build_manual_memcpy_suggestion(cx, start, end, limits, ty, &dst, &src))) - .collect::>>() - .filter(|v| !v.is_empty()) - .map(|v| v.join("\n ")); - - if let Some(big_sugg) = big_sugg { - span_lint_and_sugg( - cx, - MANUAL_MEMCPY, - expr.span, - "it looks like you're manually copying between slices", - "try replacing the loop by", - big_sugg, - Applicability::Unspecified, - ); - return true; - } + }) + .map(|o| o.map(|(ty, dst, src)| build_manual_memcpy_suggestion(cx, start, end, limits, ty, &dst, &src))) + .collect::>>() + .filter(|v| !v.is_empty()) + .map(|v| v.join("\n ")); + + if let Some(big_sugg) = big_sugg { + span_lint_and_sugg( + cx, + MANUAL_MEMCPY, + expr.span, + "it looks like you're manually copying between slices", + "try replacing the loop by", + big_sugg, + Applicability::Unspecified, + ); + return true; } } false @@ -184,7 +183,12 @@ fn build_manual_memcpy_suggestion<'tcx>( { dst_base_str } else { - format!("{dst_base_str}[{}..{}]", dst_offset.maybe_par(), dst_limit.maybe_par()).into() + format!( + "{dst_base_str}[{}..{}]", + dst_offset.maybe_paren(), + dst_limit.maybe_paren() + ) + .into() }; let method_str = if is_copy(cx, elem_ty) { @@ -196,7 +200,12 @@ fn build_manual_memcpy_suggestion<'tcx>( let src = if is_array_length_equal_to_range(cx, start, end, src.base) { src_base_str } else { - format!("{src_base_str}[{}..{}]", src_offset.maybe_par(), src_limit.maybe_par()).into() + format!( + "{src_base_str}[{}..{}]", + src_offset.maybe_paren(), + src_limit.maybe_paren() + ) + .into() }; format!("{dst}.{method_str}(&{src});") diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs b/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs index 4473a3343c7c6..9527e258db8ab 100644 --- a/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs +++ b/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs @@ -81,15 +81,15 @@ fn check_local(cx: &LateContext<'_>, stmt: &Stmt<'_>, is_empty_recv: &Expr<'_>, } fn check_call_arguments(cx: &LateContext<'_>, stmt: &Stmt<'_>, is_empty_recv: &Expr<'_>, loop_span: Span) { - if let StmtKind::Semi(expr) | StmtKind::Expr(expr) = stmt.kind { - if let ExprKind::MethodCall(.., args, _) | ExprKind::Call(_, args) = expr.kind { - let offending_arg = args - .iter() - .find_map(|arg| is_vec_pop_unwrap(cx, arg, is_empty_recv).then_some(arg.span)); + if let StmtKind::Semi(expr) | StmtKind::Expr(expr) = stmt.kind + && let ExprKind::MethodCall(.., args, _) | ExprKind::Call(_, args) = expr.kind + { + let offending_arg = args + .iter() + .find_map(|arg| is_vec_pop_unwrap(cx, arg, is_empty_recv).then_some(arg.span)); - if let Some(offending_arg) = offending_arg { - report_lint(cx, offending_arg, PopStmt::Anonymous, loop_span, is_empty_recv.span); - } + if let Some(offending_arg) = offending_arg { + report_lint(cx, offending_arg, PopStmt::Anonymous, loop_span, is_empty_recv.span); } } } diff --git a/src/tools/clippy/clippy_lints/src/loops/mod.rs b/src/tools/clippy/clippy_lints/src/loops/mod.rs index 4b0bf5a4b3c94..2b66827e82eeb 100644 --- a/src/tools/clippy/clippy_lints/src/loops/mod.rs +++ b/src/tools/clippy/clippy_lints/src/loops/mod.rs @@ -1,3 +1,4 @@ +mod char_indices_as_byte_indices; mod empty_loop; mod explicit_counter_loop; mod explicit_into_iter_loop; @@ -740,6 +741,49 @@ declare_clippy_lint! { "manually filling a slice with a value" } +declare_clippy_lint! { + /// ### What it does + /// Checks for usage of a character position yielded by `.chars().enumerate()` in a context where a **byte index** is expected, + /// such as an argument to a specific `str` method or indexing into a `str` or `String`. + /// + /// ### Why is this bad? + /// A character (more specifically, a Unicode scalar value) that is yielded by `str::chars` can take up multiple bytes, + /// so a character position does not necessarily have the same byte index at which the character is stored. + /// Thus, using the character position where a byte index is expected can unexpectedly return wrong values + /// or panic when the string consists of multibyte characters. + /// + /// For example, the character `a` in `äa` is stored at byte index 2 but has the character position 1. + /// Using the character position 1 to index into the string will lead to a panic as it is in the middle of the first character. + /// + /// Instead of `.chars().enumerate()`, the correct iterator to use is `.char_indices()`, which yields byte indices. + /// + /// This pattern is technically fine if the strings are known to only use the ASCII subset, + /// though in those cases it would be better to use `bytes()` directly to make the intent clearer, + /// but there is also no downside to just using `.char_indices()` directly and supporting non-ASCII strings. + /// + /// You may also want to read the [chapter on strings in the Rust Book](https://doc.rust-lang.org/book/ch08-02-strings.html) + /// which goes into this in more detail. + /// + /// ### Example + /// ```no_run + /// # let s = "..."; + /// for (idx, c) in s.chars().enumerate() { + /// let _ = s[idx..]; // ⚠️ Panics for strings consisting of multibyte characters + /// } + /// ``` + /// Use instead: + /// ```no_run + /// # let s = "..."; + /// for (idx, c) in s.char_indices() { + /// let _ = s[idx..]; + /// } + /// ``` + #[clippy::version = "1.83.0"] + pub CHAR_INDICES_AS_BYTE_INDICES, + correctness, + "using the character position yielded by `.chars().enumerate()` in a context where a byte index is expected" +} + pub struct Loops { msrv: Msrv, enforce_iter_loop_reborrow: bool, @@ -777,6 +821,7 @@ impl_lint_pass!(Loops => [ UNUSED_ENUMERATE_INDEX, INFINITE_LOOP, MANUAL_SLICE_FILL, + CHAR_INDICES_AS_BYTE_INDICES, ]); impl<'tcx> LateLintPass<'tcx> for Loops { @@ -860,6 +905,7 @@ impl Loops { manual_flatten::check(cx, pat, arg, body, span, self.msrv); manual_find::check(cx, pat, arg, body, span, expr); unused_enumerate_index::check(cx, pat, arg, body); + char_indices_as_byte_indices::check(cx, pat, arg, body); } fn check_for_loop_arg(&self, cx: &LateContext<'_>, _: &Pat<'_>, arg: &Expr<'_>) { diff --git a/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs b/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs index 5afcf51167d47..70ca452013f91 100644 --- a/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs +++ b/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs @@ -82,14 +82,14 @@ impl<'tcx> Delegate<'tcx> for MutatePairDelegate<'_, 'tcx> { fn use_cloned(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {} fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, diag_expr_id: HirId, bk: ty::BorrowKind) { - if bk == ty::BorrowKind::Mutable { - if let PlaceBase::Local(id) = cmt.place.base { - if Some(id) == self.hir_id_low && !BreakAfterExprVisitor::is_found(self.cx, diag_expr_id) { - self.span_low = Some(self.cx.tcx.hir().span(diag_expr_id)); - } - if Some(id) == self.hir_id_high && !BreakAfterExprVisitor::is_found(self.cx, diag_expr_id) { - self.span_high = Some(self.cx.tcx.hir().span(diag_expr_id)); - } + if bk == ty::BorrowKind::Mutable + && let PlaceBase::Local(id) = cmt.place.base + { + if Some(id) == self.hir_id_low && !BreakAfterExprVisitor::is_found(self.cx, diag_expr_id) { + self.span_low = Some(self.cx.tcx.hir_span(diag_expr_id)); + } + if Some(id) == self.hir_id_high && !BreakAfterExprVisitor::is_found(self.cx, diag_expr_id) { + self.span_high = Some(self.cx.tcx.hir_span(diag_expr_id)); } } } @@ -97,10 +97,10 @@ impl<'tcx> Delegate<'tcx> for MutatePairDelegate<'_, 'tcx> { fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, diag_expr_id: HirId) { if let PlaceBase::Local(id) = cmt.place.base { if Some(id) == self.hir_id_low && !BreakAfterExprVisitor::is_found(self.cx, diag_expr_id) { - self.span_low = Some(self.cx.tcx.hir().span(diag_expr_id)); + self.span_low = Some(self.cx.tcx.hir_span(diag_expr_id)); } if Some(id) == self.hir_id_high && !BreakAfterExprVisitor::is_found(self.cx, diag_expr_id) { - self.span_high = Some(self.cx.tcx.hir().span(diag_expr_id)); + self.span_high = Some(self.cx.tcx.hir_span(diag_expr_id)); } } } diff --git a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs index 0f62183eb33d6..7837b18bcd36c 100644 --- a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs +++ b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs @@ -31,155 +31,154 @@ pub(super) fn check<'tcx>( ref end, limits, }) = higher::Range::hir(arg) - { // the var must be a single name - if let PatKind::Binding(_, canonical_id, ident, _) = pat.kind { - let mut visitor = VarVisitor { - cx, - var: canonical_id, - indexed_mut: FxHashSet::default(), - indexed_indirectly: FxHashMap::default(), - indexed_directly: FxIndexMap::default(), - referenced: FxHashSet::default(), - nonindex: false, - prefer_mutable: false, - }; - walk_expr(&mut visitor, body); - - // linting condition: we only indexed one variable, and indexed it directly - if visitor.indexed_indirectly.is_empty() && visitor.indexed_directly.len() == 1 { - let (indexed, (indexed_extent, indexed_ty)) = visitor - .indexed_directly - .into_iter() - .next() - .expect("already checked that we have exactly 1 element"); + && let PatKind::Binding(_, canonical_id, ident, _) = pat.kind + { + let mut visitor = VarVisitor { + cx, + var: canonical_id, + indexed_mut: FxHashSet::default(), + indexed_indirectly: FxHashMap::default(), + indexed_directly: FxIndexMap::default(), + referenced: FxHashSet::default(), + nonindex: false, + prefer_mutable: false, + }; + walk_expr(&mut visitor, body); - // ensure that the indexed variable was declared before the loop, see #601 - if let Some(indexed_extent) = indexed_extent { - let parent_def_id = cx.tcx.hir_get_parent_item(expr.hir_id); - let region_scope_tree = cx.tcx.region_scope_tree(parent_def_id); - let pat_extent = region_scope_tree.var_scope(pat.hir_id.local_id).unwrap(); - if region_scope_tree.is_subscope_of(indexed_extent, pat_extent) { - return; - } - } + // linting condition: we only indexed one variable, and indexed it directly + if visitor.indexed_indirectly.is_empty() && visitor.indexed_directly.len() == 1 { + let (indexed, (indexed_extent, indexed_ty)) = visitor + .indexed_directly + .into_iter() + .next() + .expect("already checked that we have exactly 1 element"); - // don't lint if the container that is indexed does not have .iter() method - let has_iter = has_iter_method(cx, indexed_ty); - if has_iter.is_none() { + // ensure that the indexed variable was declared before the loop, see #601 + if let Some(indexed_extent) = indexed_extent { + let parent_def_id = cx.tcx.hir_get_parent_item(expr.hir_id); + let region_scope_tree = cx.tcx.region_scope_tree(parent_def_id); + let pat_extent = region_scope_tree.var_scope(pat.hir_id.local_id).unwrap(); + if region_scope_tree.is_subscope_of(indexed_extent, pat_extent) { return; } + } - // don't lint if the container that is indexed into is also used without - // indexing - if visitor.referenced.contains(&indexed) { - return; - } + // don't lint if the container that is indexed does not have .iter() method + let has_iter = has_iter_method(cx, indexed_ty); + if has_iter.is_none() { + return; + } - let starts_at_zero = is_integer_const(cx, start, 0); + // don't lint if the container that is indexed into is also used without + // indexing + if visitor.referenced.contains(&indexed) { + return; + } - let skip = if starts_at_zero { - String::new() - } else if visitor.indexed_mut.contains(&indexed) && contains_name(indexed, start, cx) { - return; - } else { - format!(".skip({})", snippet(cx, start.span, "..")) - }; + let starts_at_zero = is_integer_const(cx, start, 0); - let mut end_is_start_plus_val = false; + let skip = if starts_at_zero { + String::new() + } else if visitor.indexed_mut.contains(&indexed) && contains_name(indexed, start, cx) { + return; + } else { + format!(".skip({})", snippet(cx, start.span, "..")) + }; - let take = if let Some(end) = *end { - let mut take_expr = end; + let mut end_is_start_plus_val = false; - if let ExprKind::Binary(ref op, left, right) = end.kind { - if op.node == BinOpKind::Add { - let start_equal_left = SpanlessEq::new(cx).eq_expr(start, left); - let start_equal_right = SpanlessEq::new(cx).eq_expr(start, right); + let take = if let Some(end) = *end { + let mut take_expr = end; - if start_equal_left { - take_expr = right; - } else if start_equal_right { - take_expr = left; - } + if let ExprKind::Binary(ref op, left, right) = end.kind + && op.node == BinOpKind::Add + { + let start_equal_left = SpanlessEq::new(cx).eq_expr(start, left); + let start_equal_right = SpanlessEq::new(cx).eq_expr(start, right); - end_is_start_plus_val = start_equal_left | start_equal_right; - } + if start_equal_left { + take_expr = right; + } else if start_equal_right { + take_expr = left; } - if is_len_call(end, indexed) || is_end_eq_array_len(cx, end, limits, indexed_ty) { - String::new() - } else if visitor.indexed_mut.contains(&indexed) && contains_name(indexed, take_expr, cx) { - return; - } else { - match limits { - ast::RangeLimits::Closed => { - let take_expr = sugg::Sugg::hir(cx, take_expr, ""); - format!(".take({})", take_expr + sugg::ONE) - }, - ast::RangeLimits::HalfOpen => { - format!(".take({})", snippet(cx, take_expr.span, "..")) - }, - } - } - } else { - String::new() - }; + end_is_start_plus_val = start_equal_left | start_equal_right; + } - let (ref_mut, method) = if visitor.indexed_mut.contains(&indexed) { - ("mut ", "iter_mut") + if is_len_call(end, indexed) || is_end_eq_array_len(cx, end, limits, indexed_ty) { + String::new() + } else if visitor.indexed_mut.contains(&indexed) && contains_name(indexed, take_expr, cx) { + return; } else { - ("", "iter") - }; + match limits { + ast::RangeLimits::Closed => { + let take_expr = sugg::Sugg::hir(cx, take_expr, ""); + format!(".take({})", take_expr + sugg::ONE) + }, + ast::RangeLimits::HalfOpen => { + format!(".take({})", snippet(cx, take_expr.span, "..")) + }, + } + } + } else { + String::new() + }; - let take_is_empty = take.is_empty(); - let mut method_1 = take; - let mut method_2 = skip; + let (ref_mut, method) = if visitor.indexed_mut.contains(&indexed) { + ("mut ", "iter_mut") + } else { + ("", "iter") + }; - if end_is_start_plus_val { - mem::swap(&mut method_1, &mut method_2); - } + let take_is_empty = take.is_empty(); + let mut method_1 = take; + let mut method_2 = skip; - if visitor.nonindex { - span_lint_and_then( - cx, - NEEDLESS_RANGE_LOOP, - arg.span, - format!("the loop variable `{}` is used to index `{indexed}`", ident.name), - |diag| { - diag.multipart_suggestion( - "consider using an iterator and enumerate()", - vec![ - (pat.span, format!("({}, )", ident.name)), - ( - arg.span, - format!("{indexed}.{method}().enumerate(){method_1}{method_2}"), - ), - ], - Applicability::HasPlaceholders, - ); - }, - ); + if end_is_start_plus_val { + mem::swap(&mut method_1, &mut method_2); + } + + if visitor.nonindex { + span_lint_and_then( + cx, + NEEDLESS_RANGE_LOOP, + arg.span, + format!("the loop variable `{}` is used to index `{indexed}`", ident.name), + |diag| { + diag.multipart_suggestion( + "consider using an iterator and enumerate()", + vec![ + (pat.span, format!("({}, )", ident.name)), + ( + arg.span, + format!("{indexed}.{method}().enumerate(){method_1}{method_2}"), + ), + ], + Applicability::HasPlaceholders, + ); + }, + ); + } else { + let repl = if starts_at_zero && take_is_empty { + format!("&{ref_mut}{indexed}") } else { - let repl = if starts_at_zero && take_is_empty { - format!("&{ref_mut}{indexed}") - } else { - format!("{indexed}.{method}(){method_1}{method_2}") - }; + format!("{indexed}.{method}(){method_1}{method_2}") + }; - span_lint_and_then( - cx, - NEEDLESS_RANGE_LOOP, - arg.span, - format!("the loop variable `{}` is only used to index `{indexed}`", ident.name), - |diag| { - diag.multipart_suggestion( - "consider using an iterator", - vec![(pat.span, "".to_string()), (arg.span, repl)], - Applicability::HasPlaceholders, - ); - }, - ); - } + span_lint_and_then( + cx, + NEEDLESS_RANGE_LOOP, + arg.span, + format!("the loop variable `{}` is only used to index `{indexed}`", ident.name), + |diag| { + diag.multipart_suggestion( + "consider using an iterator", + vec![(pat.span, "".to_string()), (arg.span, repl)], + Applicability::HasPlaceholders, + ); + }, + ); } } } @@ -346,10 +345,10 @@ impl<'tcx> Visitor<'tcx> for VarVisitor<'_, 'tcx> { for expr in args { let ty = self.cx.typeck_results().expr_ty_adjusted(expr); self.prefer_mutable = false; - if let ty::Ref(_, _, mutbl) = *ty.kind() { - if mutbl == Mutability::Mut { - self.prefer_mutable = true; - } + if let ty::Ref(_, _, mutbl) = *ty.kind() + && mutbl == Mutability::Mut + { + self.prefer_mutable = true; } self.visit_expr(expr); } @@ -361,10 +360,10 @@ impl<'tcx> Visitor<'tcx> for VarVisitor<'_, 'tcx> { iter::once(receiver).chain(args.iter()), ) { self.prefer_mutable = false; - if let ty::Ref(_, _, mutbl) = *ty.kind() { - if mutbl == Mutability::Mut { - self.prefer_mutable = true; - } + if let ty::Ref(_, _, mutbl) = *ty.kind() + && mutbl == Mutability::Mut + { + self.prefer_mutable = true; } self.visit_expr(expr); } diff --git a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs index c3a2a38b5ec25..69c84bc7038ef 100644 --- a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs +++ b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs @@ -244,10 +244,10 @@ fn never_loop_expr<'tcx>( }); combine_seq(first, || { // checks if break targets a block instead of a loop - if let ExprKind::Break(Destination { target_id: Ok(t), .. }, _) = expr.kind { - if let Some((_, reachable)) = local_labels.iter_mut().find(|(label, _)| *label == t) { - *reachable = true; - } + if let ExprKind::Break(Destination { target_id: Ok(t), .. }, _) = expr.kind + && let Some((_, reachable)) = local_labels.iter_mut().find(|(label, _)| *label == t) + { + *reachable = true; } NeverLoopResult::Diverging }) diff --git a/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs b/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs index 661b4b590d8fb..388034c39f522 100644 --- a/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs +++ b/src/tools/clippy/clippy_lints/src/loops/same_item_push.rs @@ -3,7 +3,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::msrvs::Msrv; use clippy_utils::source::snippet_with_context; use clippy_utils::ty::{implements_trait, is_type_diagnostic_item}; -use clippy_utils::{msrvs, path_to_local, std_or_core}; +use clippy_utils::{msrvs, path_to_local, std_or_core, sym}; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; @@ -11,7 +11,6 @@ use rustc_hir::intravisit::{Visitor, walk_expr}; use rustc_hir::{BindingMode, Block, Expr, ExprKind, HirId, Mutability, Node, Pat, PatKind, Stmt, StmtKind}; use rustc_lint::LateContext; use rustc_span::SyntaxContext; -use rustc_span::symbol::sym; /// Detects for loop pushing the same item into a Vec pub(super) fn check<'tcx>( @@ -187,8 +186,8 @@ fn get_vec_push<'tcx>( // Extract method being called and figure out the parameters for the method call && let ExprKind::MethodCall(path, self_expr, [pushed_item], _) = &semi_stmt.kind // Check that the method being called is push() on a Vec + && path.ident.name == sym::push && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(self_expr), sym::Vec) - && path.ident.name.as_str() == "push" { return Some((self_expr, pushed_item, semi_stmt.span.ctxt())); } diff --git a/src/tools/clippy/clippy_lints/src/loops/utils.rs b/src/tools/clippy/clippy_lints/src/loops/utils.rs index a5185d38e7c33..2f6950b4380c3 100644 --- a/src/tools/clippy/clippy_lints/src/loops/utils.rs +++ b/src/tools/clippy/clippy_lints/src/loops/utils.rs @@ -3,7 +3,7 @@ use clippy_utils::{get_parent_expr, is_integer_const, path_to_local, path_to_loc use rustc_ast::ast::{LitIntType, LitKind}; use rustc_errors::Applicability; use rustc_hir::intravisit::{Visitor, walk_expr, walk_local}; -use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind, HirId, HirIdMap, LetStmt, Mutability, PatKind}; +use rustc_hir::{AssignOpKind, BorrowKind, Expr, ExprKind, HirId, HirIdMap, LetStmt, Mutability, PatKind}; use rustc_lint::LateContext; use rustc_middle::hir::nested_filter; use rustc_middle::ty::{self, Ty}; @@ -58,7 +58,7 @@ impl<'tcx> Visitor<'tcx> for IncrementVisitor<'_, 'tcx> { match parent.kind { ExprKind::AssignOp(op, lhs, rhs) => { if lhs.hir_id == expr.hir_id { - *state = if op.node == BinOpKind::Add + *state = if op.node == AssignOpKind::AddAssign && is_integer_const(self.cx, rhs, 1) && *state == IncrementVisitorVarState::Initial && self.depth == 0 @@ -263,7 +263,7 @@ pub(super) fn make_iterator_snippet(cx: &LateContext<'_>, arg: &Expr<'_>, applic if impls_iterator { format!( "{}", - sugg::Sugg::hir_with_applicability(cx, arg, "_", applic_ref).maybe_par() + sugg::Sugg::hir_with_applicability(cx, arg, "_", applic_ref).maybe_paren() ) } else { // (&x).into_iter() ==> x.iter() @@ -281,12 +281,12 @@ pub(super) fn make_iterator_snippet(cx: &LateContext<'_>, arg: &Expr<'_>, applic }; format!( "{}.{method_name}()", - sugg::Sugg::hir_with_applicability(cx, caller, "_", applic_ref).maybe_par(), + sugg::Sugg::hir_with_applicability(cx, caller, "_", applic_ref).maybe_paren(), ) }, _ => format!( "{}.into_iter()", - sugg::Sugg::hir_with_applicability(cx, arg, "_", applic_ref).maybe_par() + sugg::Sugg::hir_with_applicability(cx, arg, "_", applic_ref).maybe_paren() ), } } diff --git a/src/tools/clippy/clippy_lints/src/macro_metavars_in_unsafe.rs b/src/tools/clippy/clippy_lints/src/macro_metavars_in_unsafe.rs index 006addb987f5b..9071c9c95f9d7 100644 --- a/src/tools/clippy/clippy_lints/src/macro_metavars_in_unsafe.rs +++ b/src/tools/clippy/clippy_lints/src/macro_metavars_in_unsafe.rs @@ -5,7 +5,8 @@ use itertools::Itertools; use rustc_hir::def_id::LocalDefId; use rustc_hir::intravisit::{Visitor, walk_block, walk_expr, walk_stmt}; use rustc_hir::{BlockCheckMode, Expr, ExprKind, HirId, Stmt, UnsafeSource}; -use rustc_lint::{LateContext, LateLintPass}; +use rustc_lint::{LateContext, LateLintPass, Level, LintContext}; +use rustc_middle::lint::LevelAndSource; use rustc_session::impl_lint_pass; use rustc_span::{Span, SyntaxContext, sym}; use std::collections::BTreeMap; @@ -249,11 +250,25 @@ impl<'tcx> LateLintPass<'tcx> for ExprMetavarsInUnsafe { }) .flatten() .copied() + .inspect(|&unsafe_block| { + if let LevelAndSource { + level: Level::Expect, + lint_id: Some(id), + .. + } = cx.tcx.lint_level_at_node(MACRO_METAVARS_IN_UNSAFE, unsafe_block) + { + // Since we're going to deduplicate expanded unsafe blocks by its enclosing macro definition soon, + // which would lead to unfulfilled `#[expect()]`s in all other unsafe blocks that are filtered out + // except for the one we emit the warning at, we must manually fulfill the lint + // for all unsafe blocks here. + cx.fulfill_expectation(id); + } + }) .map(|id| { // Remove the syntax context to hide "in this macro invocation" in the diagnostic. // The invocation doesn't matter. Also we want to dedupe by the unsafe block and not by anything // related to the callsite. - let span = cx.tcx.hir().span(id); + let span = cx.tcx.hir_span(id); (id, Span::new(span.lo(), span.hi(), SyntaxContext::root(), None)) }) diff --git a/src/tools/clippy/clippy_lints/src/macro_use.rs b/src/tools/clippy/clippy_lints/src/macro_use.rs index b712b351d063c..98ad1f6a160d8 100644 --- a/src/tools/clippy/clippy_lints/src/macro_use.rs +++ b/src/tools/clippy/clippy_lints/src/macro_use.rs @@ -153,9 +153,15 @@ impl LateLintPass<'_> for MacroUseImports { [] | [_] => return, [root, item] => { if !check_dup.contains(&(*item).to_string()) { - used.entry(((*root).to_string(), span, hir_id)) - .or_insert_with(Vec::new) - .push((*item).to_string()); + used.entry(( + (*root).to_string(), + span, + hir_id.local_id, + cx.tcx.def_path_hash(hir_id.owner.def_id.into()), + )) + .or_insert_with(|| (vec![], hir_id)) + .0 + .push((*item).to_string()); check_dup.push((*item).to_string()); } }, @@ -171,15 +177,27 @@ impl LateLintPass<'_> for MacroUseImports { } }) .collect::>(); - used.entry(((*root).to_string(), span, hir_id)) - .or_insert_with(Vec::new) - .push(filtered.join("::")); + used.entry(( + (*root).to_string(), + span, + hir_id.local_id, + cx.tcx.def_path_hash(hir_id.owner.def_id.into()), + )) + .or_insert_with(|| (vec![], hir_id)) + .0 + .push(filtered.join("::")); check_dup.extend(filtered); } else { let rest = rest.to_vec(); - used.entry(((*root).to_string(), span, hir_id)) - .or_insert_with(Vec::new) - .push(rest.join("::")); + used.entry(( + (*root).to_string(), + span, + hir_id.local_id, + cx.tcx.def_path_hash(hir_id.owner.def_id.into()), + )) + .or_insert_with(|| (vec![], hir_id)) + .0 + .push(rest.join("::")); check_dup.extend(rest.iter().map(ToString::to_string)); } }, @@ -190,7 +208,7 @@ impl LateLintPass<'_> for MacroUseImports { // If mac_refs is not empty we have encountered an import we could not handle // such as `std::prelude::v1::foo` or some other macro that expands to an import. if self.mac_refs.is_empty() { - for ((root, span, hir_id), path) in used { + for ((root, span, ..), (path, hir_id)) in used { let import = if let [single] = &path[..] { format!("{root}::{single}") } else { diff --git a/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs b/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs new file mode 100644 index 0000000000000..c515e41f242f5 --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs @@ -0,0 +1,152 @@ +use clippy_config::Conf; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::higher::If; +use clippy_utils::msrvs::{self, Msrv}; +use clippy_utils::source::HasSession as _; +use clippy_utils::sugg::Sugg; +use clippy_utils::ty::is_type_diagnostic_item; +use clippy_utils::{eq_expr_value, peel_blocks, span_contains_comment}; +use rustc_errors::Applicability; +use rustc_hir::{BinOpKind, Expr, ExprKind}; +use rustc_lint::{LateContext, LateLintPass}; +use rustc_middle::ty::{self, Ty}; +use rustc_session::impl_lint_pass; +use rustc_span::sym; + +declare_clippy_lint! { + /// ### What it does + /// Detects patterns like `if a > b { a - b } else { b - a }` and suggests using `a.abs_diff(b)`. + /// + /// ### Why is this bad? + /// Using `abs_diff` is shorter, more readable, and avoids control flow. + /// + /// ### Examples + /// ```no_run + /// # let (a, b) = (5_usize, 3_usize); + /// if a > b { + /// a - b + /// } else { + /// b - a + /// } + /// # ; + /// ``` + /// Use instead: + /// ```no_run + /// # let (a, b) = (5_usize, 3_usize); + /// a.abs_diff(b) + /// # ; + /// ``` + #[clippy::version = "1.86.0"] + pub MANUAL_ABS_DIFF, + complexity, + "using an if-else pattern instead of `abs_diff`" +} + +impl_lint_pass!(ManualAbsDiff => [MANUAL_ABS_DIFF]); + +pub struct ManualAbsDiff { + msrv: Msrv, +} + +impl ManualAbsDiff { + pub fn new(conf: &'static Conf) -> Self { + Self { msrv: conf.msrv } + } +} + +impl<'tcx> LateLintPass<'tcx> for ManualAbsDiff { + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { + if !expr.span.from_expansion() + && let Some(if_expr) = If::hir(expr) + && let Some(r#else) = if_expr.r#else + && let ExprKind::Binary(op, rhs, lhs) = if_expr.cond.kind + && let (BinOpKind::Gt | BinOpKind::Ge, mut a, mut b) | (BinOpKind::Lt | BinOpKind::Le, mut b, mut a) = + (op.node, rhs, lhs) + && let Some(ty) = self.are_ty_eligible(cx, a, b) + && is_sub_expr(cx, if_expr.then, a, b, ty) + && is_sub_expr(cx, r#else, b, a, ty) + { + span_lint_and_then( + cx, + MANUAL_ABS_DIFF, + expr.span, + "manual absolute difference pattern without using `abs_diff`", + |diag| { + if is_unsuffixed_numeral_lit(a) && !is_unsuffixed_numeral_lit(b) { + (a, b) = (b, a); + } + let applicability = { + let source_map = cx.sess().source_map(); + if span_contains_comment(source_map, if_expr.then.span) + || span_contains_comment(source_map, r#else.span) + { + Applicability::MaybeIncorrect + } else { + Applicability::MachineApplicable + } + }; + let sugg = format!( + "{}.abs_diff({})", + Sugg::hir(cx, a, "..").maybe_paren(), + Sugg::hir(cx, b, "..") + ); + diag.span_suggestion(expr.span, "replace with `abs_diff`", sugg, applicability); + }, + ); + } + } +} + +impl ManualAbsDiff { + /// Returns a type if `a` and `b` are both of it, and this lint can be applied to that + /// type (currently, any primitive int, or a `Duration`) + fn are_ty_eligible<'tcx>(&self, cx: &LateContext<'tcx>, a: &Expr<'_>, b: &Expr<'_>) -> Option> { + let is_int = |ty: Ty<'_>| matches!(ty.kind(), ty::Uint(_) | ty::Int(_)) && self.msrv.meets(cx, msrvs::ABS_DIFF); + let is_duration = + |ty| is_type_diagnostic_item(cx, ty, sym::Duration) && self.msrv.meets(cx, msrvs::DURATION_ABS_DIFF); + + let a_ty = cx.typeck_results().expr_ty(a).peel_refs(); + (a_ty == cx.typeck_results().expr_ty(b).peel_refs() && (is_int(a_ty) || is_duration(a_ty))).then_some(a_ty) + } +} + +/// Checks if the given expression is a subtraction operation between two expected expressions, +/// i.e. if `expr` is `{expected_a} - {expected_b}`. +/// +/// If `expected_ty` is a signed primitive integer, this function will only return `Some` if the +/// subtraction expr is wrapped in a cast to the equivalent unsigned int. +fn is_sub_expr( + cx: &LateContext<'_>, + expr: &Expr<'_>, + expected_a: &Expr<'_>, + expected_b: &Expr<'_>, + expected_ty: Ty<'_>, +) -> bool { + let expr = peel_blocks(expr).kind; + + if let ty::Int(ty) = expected_ty.kind() { + let unsigned = Ty::new_uint(cx.tcx, ty.to_unsigned()); + + return if let ExprKind::Cast(expr, cast_ty) = expr + && cx.typeck_results().node_type(cast_ty.hir_id) == unsigned + { + is_sub_expr(cx, expr, expected_a, expected_b, unsigned) + } else { + false + }; + } + + if let ExprKind::Binary(op, a, b) = expr + && let BinOpKind::Sub = op.node + && eq_expr_value(cx, a, expected_a) + && eq_expr_value(cx, b, expected_b) + { + true + } else { + false + } +} + +fn is_unsuffixed_numeral_lit(expr: &Expr<'_>) -> bool { + matches!(expr.kind, ExprKind::Lit(lit) if lit.node.is_numeric() && lit.node.is_unsuffixed()) +} diff --git a/src/tools/clippy/clippy_lints/src/manual_assert.rs b/src/tools/clippy/clippy_lints/src/manual_assert.rs index 83c16d4466d06..8378e15c581c6 100644 --- a/src/tools/clippy/clippy_lints/src/manual_assert.rs +++ b/src/tools/clippy/clippy_lints/src/manual_assert.rs @@ -60,7 +60,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualAssert { ExprKind::Unary(UnOp::Not, e) => (e, ""), _ => (cond, "!"), }; - let cond_sugg = sugg::Sugg::hir_with_applicability(cx, cond, "..", &mut applicability).maybe_par(); + let cond_sugg = sugg::Sugg::hir_with_applicability(cx, cond, "..", &mut applicability).maybe_paren(); let semicolon = if is_parent_stmt(cx, expr.hir_id) { ";" } else { "" }; let sugg = format!("assert!({not}{cond_sugg}, {format_args_snip}){semicolon}"); // we show to the user the suggestion without the comments, but when applying the fix, include the diff --git a/src/tools/clippy/clippy_lints/src/manual_clamp.rs b/src/tools/clippy/clippy_lints/src/manual_clamp.rs index 50c8331eebab4..02afe9f0997de 100644 --- a/src/tools/clippy/clippy_lints/src/manual_clamp.rs +++ b/src/tools/clippy/clippy_lints/src/manual_clamp.rs @@ -181,7 +181,7 @@ fn maybe_emit_suggestion<'tcx>(cx: &LateContext<'tcx>, suggestion: &ClampSuggest make_assignment, hir_with_ignore_attr, } = suggestion; - let input = Sugg::hir(cx, input, "..").maybe_par(); + let input = Sugg::hir(cx, input, "..").maybe_paren(); let min = Sugg::hir(cx, min, ".."); let max = Sugg::hir(cx, max, ".."); let semicolon = if make_assignment.is_some() { ";" } else { "" }; diff --git a/src/tools/clippy/clippy_lints/src/manual_div_ceil.rs b/src/tools/clippy/clippy_lints/src/manual_div_ceil.rs index 9944c4f880481..ed0cce754b954 100644 --- a/src/tools/clippy/clippy_lints/src/manual_div_ceil.rs +++ b/src/tools/clippy/clippy_lints/src/manual_div_ceil.rs @@ -1,8 +1,9 @@ -use clippy_utils::SpanlessEq; +use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::msrvs::{self, Msrv}; -use clippy_utils::source::snippet_with_applicability; +use clippy_utils::source::snippet_with_context; use clippy_utils::sugg::{Sugg, has_enclosing_paren}; +use clippy_utils::{SpanlessEq, sym}; use rustc_ast::{BinOpKind, LitIntType, LitKind, UnOp}; use rustc_data_structures::packed::Pu128; use rustc_errors::Applicability; @@ -11,9 +12,6 @@ use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self}; use rustc_session::impl_lint_pass; use rustc_span::source_map::Spanned; -use rustc_span::symbol::Symbol; - -use clippy_config::Conf; declare_clippy_lint! { /// ### What it does @@ -141,8 +139,7 @@ fn check_int_ty_and_feature(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { let expr_ty = cx.typeck_results().expr_ty(expr); match expr_ty.peel_refs().kind() { ty::Uint(_) => true, - ty::Int(_) => cx.tcx.features().enabled(Symbol::intern("int_roundings")), - + ty::Int(_) => cx.tcx.features().enabled(sym::int_roundings), _ => false, } } @@ -167,7 +164,7 @@ fn build_suggestion( rhs: &Expr<'_>, applicability: &mut Applicability, ) { - let dividend_sugg = Sugg::hir_with_applicability(cx, lhs, "..", applicability).maybe_par(); + let dividend_sugg = Sugg::hir_with_applicability(cx, lhs, "..", applicability).maybe_paren(); let type_suffix = if cx.typeck_results().expr_ty(lhs).is_numeric() && matches!( lhs.kind, @@ -202,9 +199,9 @@ fn build_suggestion( } else { format!("{dividend_sugg_str}{type_suffix}") }; - let divisor_snippet = snippet_with_applicability(cx, rhs.span.source_callsite(), "..", applicability); + let divisor_snippet = snippet_with_context(cx, rhs.span, expr.span.ctxt(), "..", applicability); - let sugg = format!("{suggestion_before_div_ceil}.div_ceil({divisor_snippet})"); + let sugg = format!("{suggestion_before_div_ceil}.div_ceil({})", divisor_snippet.0); span_lint_and_sugg( cx, diff --git a/src/tools/clippy/clippy_lints/src/manual_hash_one.rs b/src/tools/clippy/clippy_lints/src/manual_hash_one.rs index f71264a93ca84..b3ee45cc02098 100644 --- a/src/tools/clippy/clippy_lints/src/manual_hash_one.rs +++ b/src/tools/clippy/clippy_lints/src/manual_hash_one.rs @@ -3,12 +3,11 @@ use clippy_utils::diagnostics::span_lint_hir_and_then; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::SpanRangeExt; use clippy_utils::visitors::{is_local_used, local_used_once}; -use clippy_utils::{is_trait_method, path_to_local_id}; +use clippy_utils::{is_trait_method, path_to_local_id, sym}; use rustc_errors::Applicability; use rustc_hir::{BindingMode, ExprKind, LetStmt, Node, PatKind, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; -use rustc_span::sym; declare_clippy_lint! { /// ### What it does @@ -66,7 +65,7 @@ impl LateLintPass<'_> for ManualHashOne { && let Some(init) = local.init && !init.span.from_expansion() && let ExprKind::MethodCall(seg, build_hasher, [], _) = init.kind - && seg.ident.name.as_str() == "build_hasher" + && seg.ident.name == sym::build_hasher && let Node::Stmt(local_stmt) = cx.tcx.parent_hir_node(local.hir_id) && let Node::Block(block) = cx.tcx.parent_hir_node(local_stmt.hir_id) @@ -94,7 +93,7 @@ impl LateLintPass<'_> for ManualHashOne { && let Node::Expr(finish_expr) = cx.tcx.parent_hir_node(path_expr.hir_id) && !finish_expr.span.from_expansion() && let ExprKind::MethodCall(seg, _, [], _) = finish_expr.kind - && seg.ident.name.as_str() == "finish" + && seg.ident.name == sym::finish && self.msrv.meets(cx, msrvs::BUILD_HASHER_HASH_ONE) { diff --git a/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs b/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs index faf01a276a131..ac8c88f02057b 100644 --- a/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs +++ b/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs @@ -3,7 +3,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::macros::matching_root_macro_call; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::sugg::Sugg; -use clippy_utils::{higher, is_in_const_context, path_to_local, peel_ref_operators}; +use clippy_utils::{higher, is_in_const_context, path_to_local, peel_ref_operators, sym}; use rustc_ast::LitKind::{Byte, Char}; use rustc_ast::ast::RangeLimits; use rustc_errors::Applicability; @@ -11,7 +11,7 @@ use rustc_hir::{Expr, ExprKind, Lit, Node, Param, PatExpr, PatExprKind, PatKind, use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, Ty}; use rustc_session::impl_lint_pass; -use rustc_span::{Span, sym}; +use rustc_span::Span; declare_clippy_lint! { /// ### What it does @@ -103,7 +103,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualIsAsciiCheck { check_is_ascii(cx, macro_call.span, recv, &range, None); } } else if let ExprKind::MethodCall(path, receiver, [arg], ..) = expr.kind - && path.ident.name.as_str() == "contains" + && path.ident.name == sym::contains && let Some(higher::Range { start: Some(start), end: Some(end), @@ -148,7 +148,7 @@ fn check_is_ascii( }; let default_snip = ".."; let mut app = Applicability::MachineApplicable; - let recv = Sugg::hir_with_context(cx, recv, span.ctxt(), default_snip, &mut app).maybe_par(); + let recv = Sugg::hir_with_context(cx, recv, span.ctxt(), default_snip, &mut app).maybe_paren(); let mut suggestion = vec![(span, format!("{recv}.{sugg}()"))]; if let Some((ty_span, ty)) = ty_sugg { suggestion.push((ty_span, format!("{recv}: {ty}"))); diff --git a/src/tools/clippy/clippy_lints/src/manual_is_power_of_two.rs b/src/tools/clippy/clippy_lints/src/manual_is_power_of_two.rs index 841adfec4624b..b4cd988329d32 100644 --- a/src/tools/clippy/clippy_lints/src/manual_is_power_of_two.rs +++ b/src/tools/clippy/clippy_lints/src/manual_is_power_of_two.rs @@ -1,13 +1,14 @@ -use clippy_utils::SpanlessEq; +use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::source::snippet_with_applicability; -use rustc_ast::LitKind; -use rustc_data_structures::packed::Pu128; +use clippy_utils::msrvs::{self, Msrv}; +use clippy_utils::sugg::Sugg; +use clippy_utils::ty::ty_from_hir_ty; +use clippy_utils::{SpanlessEq, is_in_const_context, is_integer_literal}; use rustc_errors::Applicability; -use rustc_hir::{BinOpKind, Expr, ExprKind}; +use rustc_hir::{BinOpKind, Expr, ExprKind, QPath}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::Uint; -use rustc_session::declare_lint_pass; +use rustc_middle::ty; +use rustc_session::impl_lint_pass; declare_clippy_lint! { /// ### What it does @@ -33,112 +34,111 @@ declare_clippy_lint! { "manually reimplementing `is_power_of_two`" } -declare_lint_pass!(ManualIsPowerOfTwo => [MANUAL_IS_POWER_OF_TWO]); +pub struct ManualIsPowerOfTwo { + msrv: Msrv, +} -impl LateLintPass<'_> for ManualIsPowerOfTwo { - fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { - let mut applicability = Applicability::MachineApplicable; +impl_lint_pass!(ManualIsPowerOfTwo => [MANUAL_IS_POWER_OF_TWO]); - if let ExprKind::Binary(bin_op, left, right) = expr.kind - && bin_op.node == BinOpKind::Eq - { - // a.count_ones() == 1 - if let ExprKind::MethodCall(method_name, receiver, [], _) = left.kind - && method_name.ident.as_str() == "count_ones" - && let &Uint(_) = cx.typeck_results().expr_ty(receiver).kind() - && check_lit(right, 1) - { - build_sugg(cx, expr, receiver, &mut applicability); - } +impl ManualIsPowerOfTwo { + pub fn new(conf: &'static Conf) -> Self { + Self { msrv: conf.msrv } + } - // 1 == a.count_ones() - if let ExprKind::MethodCall(method_name, receiver, [], _) = right.kind - && method_name.ident.as_str() == "count_ones" - && let &Uint(_) = cx.typeck_results().expr_ty(receiver).kind() - && check_lit(left, 1) - { - build_sugg(cx, expr, receiver, &mut applicability); - } + fn build_sugg(&self, cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>) { + if is_in_const_context(cx) && !self.msrv.meets(cx, msrvs::CONST_IS_POWER_OF_TWO) { + return; + } - // a & (a - 1) == 0 - if let ExprKind::Binary(op1, left1, right1) = left.kind - && op1.node == BinOpKind::BitAnd - && let ExprKind::Binary(op2, left2, right2) = right1.kind - && op2.node == BinOpKind::Sub - && check_eq_expr(cx, left1, left2) - && let &Uint(_) = cx.typeck_results().expr_ty(left1).kind() - && check_lit(right2, 1) - && check_lit(right, 0) - { - build_sugg(cx, expr, left1, &mut applicability); - } + let mut applicability = Applicability::MachineApplicable; + let snippet = Sugg::hir_with_applicability(cx, receiver, "_", &mut applicability); - // (a - 1) & a == 0; - if let ExprKind::Binary(op1, left1, right1) = left.kind - && op1.node == BinOpKind::BitAnd - && let ExprKind::Binary(op2, left2, right2) = left1.kind - && op2.node == BinOpKind::Sub - && check_eq_expr(cx, right1, left2) - && let &Uint(_) = cx.typeck_results().expr_ty(right1).kind() - && check_lit(right2, 1) - && check_lit(right, 0) - { - build_sugg(cx, expr, right1, &mut applicability); - } + span_lint_and_sugg( + cx, + MANUAL_IS_POWER_OF_TWO, + expr.span, + "manually reimplementing `is_power_of_two`", + "consider using `.is_power_of_two()`", + format!("{}.is_power_of_two()", snippet.maybe_paren()), + applicability, + ); + } +} - // 0 == a & (a - 1); - if let ExprKind::Binary(op1, left1, right1) = right.kind - && op1.node == BinOpKind::BitAnd - && let ExprKind::Binary(op2, left2, right2) = right1.kind - && op2.node == BinOpKind::Sub - && check_eq_expr(cx, left1, left2) - && let &Uint(_) = cx.typeck_results().expr_ty(left1).kind() - && check_lit(right2, 1) - && check_lit(left, 0) +impl<'tcx> LateLintPass<'tcx> for ManualIsPowerOfTwo { + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'tcx>) { + if !expr.span.from_expansion() + && let Some((lhs, rhs)) = unexpanded_binop_operands(expr, BinOpKind::Eq) + { + if let Some(a) = count_ones_receiver(cx, lhs) + && is_integer_literal(rhs, 1) { - build_sugg(cx, expr, left1, &mut applicability); - } - - // 0 == (a - 1) & a - if let ExprKind::Binary(op1, left1, right1) = right.kind - && op1.node == BinOpKind::BitAnd - && let ExprKind::Binary(op2, left2, right2) = left1.kind - && op2.node == BinOpKind::Sub - && check_eq_expr(cx, right1, left2) - && let &Uint(_) = cx.typeck_results().expr_ty(right1).kind() - && check_lit(right2, 1) - && check_lit(left, 0) + self.build_sugg(cx, expr, a); + } else if let Some(a) = count_ones_receiver(cx, rhs) + && is_integer_literal(lhs, 1) + { + self.build_sugg(cx, expr, a); + } else if is_integer_literal(rhs, 0) + && let Some(a) = is_and_minus_one(cx, lhs) + { + self.build_sugg(cx, expr, a); + } else if is_integer_literal(lhs, 0) + && let Some(a) = is_and_minus_one(cx, rhs) { - build_sugg(cx, expr, right1, &mut applicability); + self.build_sugg(cx, expr, a); } } } } -fn build_sugg(cx: &LateContext<'_>, expr: &Expr<'_>, receiver: &Expr<'_>, applicability: &mut Applicability) { - let snippet = snippet_with_applicability(cx, receiver.span, "..", applicability); - - span_lint_and_sugg( - cx, - MANUAL_IS_POWER_OF_TWO, - expr.span, - "manually reimplementing `is_power_of_two`", - "consider using `.is_power_of_two()`", - format!("{snippet}.is_power_of_two()"), - *applicability, - ); +/// Return the unsigned integer receiver of `.count_ones()` or the argument of +/// `::count_ones(…)`. +fn count_ones_receiver<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> { + let (method, ty, receiver) = if let ExprKind::MethodCall(method_name, receiver, [], _) = expr.kind { + (method_name, cx.typeck_results().expr_ty_adjusted(receiver), receiver) + } else if let ExprKind::Call(func, [arg]) = expr.kind + && let ExprKind::Path(QPath::TypeRelative(ty, func_name)) = func.kind + { + (func_name, ty_from_hir_ty(cx, ty), arg) + } else { + return None; + }; + (method.ident.as_str() == "count_ones" && matches!(ty.kind(), ty::Uint(_))).then_some(receiver) } -fn check_lit(expr: &Expr<'_>, expected_num: u128) -> bool { - if let ExprKind::Lit(lit) = expr.kind - && let LitKind::Int(Pu128(num), _) = lit.node - && num == expected_num +/// Return `greater` if `smaller == greater - 1` +fn is_one_less<'tcx>( + cx: &LateContext<'tcx>, + greater: &'tcx Expr<'tcx>, + smaller: &Expr<'tcx>, +) -> Option<&'tcx Expr<'tcx>> { + if let Some((lhs, rhs)) = unexpanded_binop_operands(smaller, BinOpKind::Sub) + && SpanlessEq::new(cx).eq_expr(greater, lhs) + && is_integer_literal(rhs, 1) + && matches!(cx.typeck_results().expr_ty_adjusted(greater).kind(), ty::Uint(_)) { - return true; + Some(greater) + } else { + None } - false } -fn check_eq_expr(cx: &LateContext<'_>, lhs: &Expr<'_>, rhs: &Expr<'_>) -> bool { - SpanlessEq::new(cx).eq_expr(lhs, rhs) +/// Return `v` if `expr` is `v & (v - 1)` or `(v - 1) & v` +fn is_and_minus_one<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> { + let (lhs, rhs) = unexpanded_binop_operands(expr, BinOpKind::BitAnd)?; + is_one_less(cx, lhs, rhs).or_else(|| is_one_less(cx, rhs, lhs)) +} + +/// Return the operands of the `expr` binary operation if the operator is `op` and none of the +/// operands come from expansion. +fn unexpanded_binop_operands<'hir>(expr: &Expr<'hir>, op: BinOpKind) -> Option<(&'hir Expr<'hir>, &'hir Expr<'hir>)> { + if let ExprKind::Binary(binop, lhs, rhs) = expr.kind + && binop.node == op + && !lhs.span.from_expansion() + && !rhs.span.from_expansion() + { + Some((lhs, rhs)) + } else { + None + } } diff --git a/src/tools/clippy/clippy_lints/src/manual_option_as_slice.rs b/src/tools/clippy/clippy_lints/src/manual_option_as_slice.rs index 8dee29b2a0b5d..b365dbf088f58 100644 --- a/src/tools/clippy/clippy_lints/src/manual_option_as_slice.rs +++ b/src/tools/clippy/clippy_lints/src/manual_option_as_slice.rs @@ -1,14 +1,14 @@ use clippy_config::Conf; use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg}; use clippy_utils::msrvs::Msrv; -use clippy_utils::{is_none_arm, msrvs, peel_hir_expr_refs}; +use clippy_utils::{is_none_arm, msrvs, peel_hir_expr_refs, sym}; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{Arm, Expr, ExprKind, LangItem, Pat, PatKind, QPath, is_range_literal}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty; use rustc_session::impl_lint_pass; -use rustc_span::{Span, Symbol, sym}; +use rustc_span::{Span, Symbol}; declare_clippy_lint! { /// ### What it does @@ -34,7 +34,7 @@ declare_clippy_lint! { /// _ = opt.as_slice(); /// _ = opt.as_slice(); /// ``` - #[clippy::version = "1.85.0"] + #[clippy::version = "1.86.0"] pub MANUAL_OPTION_AS_SLICE, complexity, "manual `Option::as_slice`" @@ -76,7 +76,7 @@ impl LateLintPass<'_> for ManualOptionAsSlice { } }, ExprKind::MethodCall(seg, callee, [], _) => { - if seg.ident.name.as_str() == "unwrap_or_default" { + if seg.ident.name == sym::unwrap_or_default { check_map(cx, callee, span, self.msrv); } }, diff --git a/src/tools/clippy/clippy_lints/src/manual_retain.rs b/src/tools/clippy/clippy_lints/src/manual_retain.rs index 16dd1ad4e4784..98e8b1f5cf92c 100644 --- a/src/tools/clippy/clippy_lints/src/manual_retain.rs +++ b/src/tools/clippy/clippy_lints/src/manual_retain.rs @@ -92,10 +92,10 @@ fn check_into_iter( && let [filter_params] = filter_body.params { if match_map_type(cx, left_expr) { - if let hir::PatKind::Tuple([key_pat, value_pat], _) = filter_params.pat.kind { - if let Some(sugg) = make_sugg(cx, key_pat, value_pat, left_expr, filter_body) { - make_span_lint_and_sugg(cx, parent_expr_span, sugg); - } + if let hir::PatKind::Tuple([key_pat, value_pat], _) = filter_params.pat.kind + && let Some(sugg) = make_sugg(cx, key_pat, value_pat, left_expr, filter_body) + { + make_span_lint_and_sugg(cx, parent_expr_span, sugg); } // Cannot lint other cases because `retain` requires two parameters } else { @@ -196,22 +196,21 @@ fn check_to_owned( && let filter_body = cx.tcx.hir_body(closure.body) && let [filter_params] = filter_body.params && msrv.meets(cx, msrvs::STRING_RETAIN) + && let hir::PatKind::Ref(pat, _) = filter_params.pat.kind { - if let hir::PatKind::Ref(pat, _) = filter_params.pat.kind { - make_span_lint_and_sugg( - cx, - parent_expr_span, - format!( - "{}.retain(|{}| {})", - snippet(cx, left_expr.span, ".."), - snippet(cx, pat.span, ".."), - snippet(cx, filter_body.value.span, "..") - ), - ); - } - // Be conservative now. Do nothing for the `Binding` case. - // TODO: Ideally, we can rewrite the lambda by stripping one level of reference + make_span_lint_and_sugg( + cx, + parent_expr_span, + format!( + "{}.retain(|{}| {})", + snippet(cx, left_expr.span, ".."), + snippet(cx, pat.span, ".."), + snippet(cx, filter_body.value.span, "..") + ), + ); } + // Be conservative now. Do nothing for the `Binding` case. + // TODO: Ideally, we can rewrite the lambda by stripping one level of reference } fn make_sugg( diff --git a/src/tools/clippy/clippy_lints/src/manual_rotate.rs b/src/tools/clippy/clippy_lints/src/manual_rotate.rs index 07537fc65c08c..06ee00c2cef3c 100644 --- a/src/tools/clippy/clippy_lints/src/manual_rotate.rs +++ b/src/tools/clippy/clippy_lints/src/manual_rotate.rs @@ -101,7 +101,7 @@ impl LateLintPass<'_> for ManualRotate { (r_shift_dir, r_amount) }; let mut applicability = Applicability::MachineApplicable; - let expr_sugg = sugg::Sugg::hir_with_applicability(cx, l_expr, "_", &mut applicability).maybe_par(); + let expr_sugg = sugg::Sugg::hir_with_applicability(cx, l_expr, "_", &mut applicability).maybe_paren(); span_lint_and_sugg( cx, MANUAL_ROTATE, diff --git a/src/tools/clippy/clippy_lints/src/manual_string_new.rs b/src/tools/clippy/clippy_lints/src/manual_string_new.rs index 5c2a711b5cb23..7ca3b71206671 100644 --- a/src/tools/clippy/clippy_lints/src/manual_string_new.rs +++ b/src/tools/clippy/clippy_lints/src/manual_string_new.rs @@ -113,15 +113,14 @@ fn parse_call(cx: &LateContext<'_>, span: Span, func: &Expr<'_>, arg: &Expr<'_>) && is_expr_kind_empty_str(&arg.kind) { warn_then_suggest(cx, span); - } else if let QPath::Resolved(_, path) = qpath { + } else if let QPath::Resolved(_, path) = qpath // From::from(...) or TryFrom::try_from(...) - if let [path_seg1, path_seg2] = path.segments - && is_expr_kind_empty_str(&arg.kind) - && ((path_seg1.ident.name == sym::From && path_seg2.ident.name == sym::from) - || (path_seg1.ident.name == sym::TryFrom && path_seg2.ident.name == sym::try_from)) - { - warn_then_suggest(cx, span); - } + && let [path_seg1, path_seg2] = path.segments + && is_expr_kind_empty_str(&arg.kind) + && ((path_seg1.ident.name == sym::From && path_seg2.ident.name == sym::from) + || (path_seg1.ident.name == sym::TryFrom && path_seg2.ident.name == sym::try_from)) + { + warn_then_suggest(cx, span); } } } diff --git a/src/tools/clippy/clippy_lints/src/manual_unwrap_or_default.rs b/src/tools/clippy/clippy_lints/src/manual_unwrap_or_default.rs deleted file mode 100644 index 87d2faa225c52..0000000000000 --- a/src/tools/clippy/clippy_lints/src/manual_unwrap_or_default.rs +++ /dev/null @@ -1,212 +0,0 @@ -use rustc_errors::Applicability; -use rustc_hir::def::Res; -use rustc_hir::{Arm, Expr, ExprKind, HirId, LangItem, MatchSource, Pat, PatExpr, PatExprKind, PatKind, QPath}; -use rustc_lint::{LateContext, LateLintPass, LintContext}; -use rustc_middle::ty::GenericArgKind; -use rustc_session::declare_lint_pass; -use rustc_span::sym; - -use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::higher::IfLetOrMatch; -use clippy_utils::sugg::Sugg; -use clippy_utils::ty::{expr_type_is_certain, implements_trait}; -use clippy_utils::{is_default_equivalent, is_in_const_context, path_res, peel_blocks, span_contains_comment}; - -declare_clippy_lint! { - /// ### What it does - /// Checks if a `match` or `if let` expression can be simplified using - /// `.unwrap_or_default()`. - /// - /// ### Why is this bad? - /// It can be done in one call with `.unwrap_or_default()`. - /// - /// ### Example - /// ```no_run - /// let x: Option = Some(String::new()); - /// let y: String = match x { - /// Some(v) => v, - /// None => String::new(), - /// }; - /// - /// let x: Option> = Some(Vec::new()); - /// let y: Vec = if let Some(v) = x { - /// v - /// } else { - /// Vec::new() - /// }; - /// ``` - /// Use instead: - /// ```no_run - /// let x: Option = Some(String::new()); - /// let y: String = x.unwrap_or_default(); - /// - /// let x: Option> = Some(Vec::new()); - /// let y: Vec = x.unwrap_or_default(); - /// ``` - #[clippy::version = "1.79.0"] - pub MANUAL_UNWRAP_OR_DEFAULT, - suspicious, - "check if a `match` or `if let` can be simplified with `unwrap_or_default`" -} - -declare_lint_pass!(ManualUnwrapOrDefault => [MANUAL_UNWRAP_OR_DEFAULT]); - -fn get_some<'tcx>(cx: &LateContext<'tcx>, pat: &Pat<'tcx>) -> Option { - if let PatKind::TupleStruct(QPath::Resolved(_, path), &[pat], _) = pat.kind - && let PatKind::Binding(_, pat_id, _, _) = pat.kind - && let Some(def_id) = path.res.opt_def_id() - // Since it comes from a pattern binding, we need to get the parent to actually match - // against it. - && let Some(def_id) = cx.tcx.opt_parent(def_id) - && (cx.tcx.lang_items().get(LangItem::OptionSome) == Some(def_id) - || cx.tcx.lang_items().get(LangItem::ResultOk) == Some(def_id)) - { - Some(pat_id) - } else { - None - } -} - -fn get_none<'tcx>(cx: &LateContext<'tcx>, arm: &Arm<'tcx>) -> Option<&'tcx Expr<'tcx>> { - if let PatKind::Expr(PatExpr { kind: PatExprKind::Path(QPath::Resolved(_, path)), .. }) = arm.pat.kind - && let Some(def_id) = path.res.opt_def_id() - // Since it comes from a pattern binding, we need to get the parent to actually match - // against it. - && let Some(def_id) = cx.tcx.opt_parent(def_id) - && cx.tcx.lang_items().get(LangItem::OptionNone) == Some(def_id) - { - Some(arm.body) - } else if let PatKind::TupleStruct(QPath::Resolved(_, path), _, _)= arm.pat.kind - && let Some(def_id) = path.res.opt_def_id() - // Since it comes from a pattern binding, we need to get the parent to actually match - // against it. - && let Some(def_id) = cx.tcx.opt_parent(def_id) - && cx.tcx.lang_items().get(LangItem::ResultErr) == Some(def_id) - { - Some(arm.body) - } else if let PatKind::Wild = arm.pat.kind { - // We consider that the `Some` check will filter it out if it's not right. - Some(arm.body) - } else { - None - } -} - -fn get_some_and_none_bodies<'tcx>( - cx: &LateContext<'tcx>, - arm1: &'tcx Arm<'tcx>, - arm2: &'tcx Arm<'tcx>, -) -> Option<((&'tcx Expr<'tcx>, HirId), &'tcx Expr<'tcx>)> { - if let Some(binding_id) = get_some(cx, arm1.pat) - && let Some(body_none) = get_none(cx, arm2) - { - Some(((arm1.body, binding_id), body_none)) - } else if let Some(binding_id) = get_some(cx, arm2.pat) - && let Some(body_none) = get_none(cx, arm1) - { - Some(((arm2.body, binding_id), body_none)) - } else { - None - } -} - -#[allow(clippy::needless_pass_by_value)] -fn handle<'tcx>(cx: &LateContext<'tcx>, if_let_or_match: IfLetOrMatch<'tcx>, expr: &'tcx Expr<'tcx>) { - // Get expr_name ("if let" or "match" depending on kind of expression), the condition, the body for - // the some arm, the body for the none arm and the binding id of the some arm - let (expr_name, condition, body_some, body_none, binding_id) = match if_let_or_match { - IfLetOrMatch::Match(condition, [arm1, arm2], MatchSource::Normal | MatchSource::ForLoopDesugar) - // Make sure there are no guards to keep things simple - if arm1.guard.is_none() - && arm2.guard.is_none() - // Get the some and none bodies and the binding id of the some arm - && let Some(((body_some, binding_id), body_none)) = get_some_and_none_bodies(cx, arm1, arm2) => - { - ("match", condition, body_some, body_none, binding_id) - }, - IfLetOrMatch::IfLet(condition, pat, if_expr, Some(else_expr), _) - if let Some(binding_id) = get_some(cx, pat) => - { - ("if let", condition, if_expr, else_expr, binding_id) - }, - _ => { - // All other cases (match with number of arms != 2, if let without else, etc.) - return; - }, - }; - - // We check if the return type of the expression implements Default. - let expr_type = cx.typeck_results().expr_ty(expr); - if let Some(default_trait_id) = cx.tcx.get_diagnostic_item(sym::Default) - && implements_trait(cx, expr_type, default_trait_id, &[]) - // We check if the initial condition implements Default. - && let Some(condition_ty) = cx.typeck_results().expr_ty(condition).walk().nth(1) - && let GenericArgKind::Type(condition_ty) = condition_ty.unpack() - && implements_trait(cx, condition_ty, default_trait_id, &[]) - // We check that the `Some(x) => x` doesn't do anything apart "returning" the value in `Some`. - && let ExprKind::Path(QPath::Resolved(_, path)) = peel_blocks(body_some).kind - && let Res::Local(local_id) = path.res - && local_id == binding_id - // We now check the `None` arm is calling a method equivalent to `Default::default`. - && let body_none = peel_blocks(body_none) - && is_default_equivalent(cx, body_none) - && let Some(receiver) = Sugg::hir_opt(cx, condition).map(Sugg::maybe_par) - { - // Machine applicable only if there are no comments present - let applicability = if span_contains_comment(cx.sess().source_map(), expr.span) { - Applicability::MaybeIncorrect - } else { - Applicability::MachineApplicable - }; - - // We now check if the condition is a None variant, in which case we need to specify the type - if path_res(cx, condition) - .opt_def_id() - .is_some_and(|id| Some(cx.tcx.parent(id)) == cx.tcx.lang_items().option_none_variant()) - { - return span_lint_and_sugg( - cx, - MANUAL_UNWRAP_OR_DEFAULT, - expr.span, - format!("{expr_name} can be simplified with `.unwrap_or_default()`"), - "replace it with", - format!("{receiver}::<{expr_type}>.unwrap_or_default()"), - applicability, - ); - } - - // We check if the expression type is still uncertain, in which case we ask the user to specify it - if !expr_type_is_certain(cx, condition) { - return span_lint_and_sugg( - cx, - MANUAL_UNWRAP_OR_DEFAULT, - expr.span, - format!("{expr_name} can be simplified with `.unwrap_or_default()`"), - format!("ascribe the type {expr_type} and replace your expression with"), - format!("{receiver}.unwrap_or_default()"), - Applicability::Unspecified, - ); - } - - span_lint_and_sugg( - cx, - MANUAL_UNWRAP_OR_DEFAULT, - expr.span, - format!("{expr_name} can be simplified with `.unwrap_or_default()`"), - "replace it with", - format!("{receiver}.unwrap_or_default()"), - applicability, - ); - } -} - -impl<'tcx> LateLintPass<'tcx> for ManualUnwrapOrDefault { - fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { - if let Some(if_let_or_match) = IfLetOrMatch::parse(cx, expr) - && !expr.span.from_expansion() - && !is_in_const_context(cx) - { - handle(cx, if_let_or_match, expr); - } - } -} diff --git a/src/tools/clippy/clippy_lints/src/map_unit_fn.rs b/src/tools/clippy/clippy_lints/src/map_unit_fn.rs index 56aead85e7c41..b607f8117eb89 100644 --- a/src/tools/clippy/clippy_lints/src/map_unit_fn.rs +++ b/src/tools/clippy/clippy_lints/src/map_unit_fn.rs @@ -101,10 +101,10 @@ fn is_unit_type(ty: Ty<'_>) -> bool { fn is_unit_function(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool { let ty = cx.typeck_results().expr_ty(expr); - if let ty::FnDef(id, _) = *ty.kind() { - if let Some(fn_type) = cx.tcx.fn_sig(id).instantiate_identity().no_bound_vars() { - return is_unit_type(fn_type.output()); - } + if let ty::FnDef(id, _) = *ty.kind() + && let Some(fn_type) = cx.tcx.fn_sig(id).instantiate_identity().no_bound_vars() + { + return is_unit_type(fn_type.output()); } false } diff --git a/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs b/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs index 6f446bf956587..5b50efad3e44e 100644 --- a/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/collapsible_match.rs @@ -1,4 +1,4 @@ -use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::diagnostics::span_lint_hir_and_then; use clippy_utils::higher::IfLetOrMatch; use clippy_utils::msrvs::Msrv; use clippy_utils::source::snippet; @@ -99,7 +99,7 @@ fn check_arm<'tcx>( } else { String::new() }; - span_lint_and_then(cx, COLLAPSIBLE_MATCH, inner_expr.span, msg, |diag| { + span_lint_hir_and_then(cx, COLLAPSIBLE_MATCH, inner_expr.hir_id, inner_expr.span, msg, |diag| { let mut help_span = MultiSpan::from_spans(vec![binding_span, inner_then_pat.span]); help_span.push_span_label(binding_span, "replace this binding"); help_span.push_span_label(inner_then_pat.span, format!("with this pattern{replace_msg}")); diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs b/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs index 4cc43e427ec61..abf723fa6f4ca 100644 --- a/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs +++ b/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs @@ -41,10 +41,10 @@ fn get_cond_expr<'tcx>( fn peels_blocks_incl_unsafe_opt<'a>(expr: &'a Expr<'a>) -> Option<&'a Expr<'a>> { // we don't want to use `peel_blocks` here because we don't care if the block is unsafe, it's // checked by `contains_unsafe_block` - if let ExprKind::Block(block, None) = expr.kind { - if block.stmts.is_empty() { - return block.expr; - } + if let ExprKind::Block(block, None) = expr.kind + && block.stmts.is_empty() + { + return block.expr; } None } @@ -61,13 +61,13 @@ fn peels_blocks_incl_unsafe<'a>(expr: &'a Expr<'a>) -> &'a Expr<'a> { // } // Returns true if resolves to `Some(x)`, `false` otherwise fn is_some_expr(cx: &LateContext<'_>, target: HirId, ctxt: SyntaxContext, expr: &Expr<'_>) -> bool { - if let Some(inner_expr) = peels_blocks_incl_unsafe_opt(expr) { + if let Some(inner_expr) = peels_blocks_incl_unsafe_opt(expr) // there can be not statements in the block as they would be removed when switching to `.filter` - if let ExprKind::Call(callee, [arg]) = inner_expr.kind { - return ctxt == expr.span.ctxt() - && is_res_lang_ctor(cx, path_res(cx, callee), OptionSome) - && path_to_local_id(arg, target); - } + && let ExprKind::Call(callee, [arg]) = inner_expr.kind + { + return ctxt == expr.span.ctxt() + && is_res_lang_ctor(cx, path_res(cx, callee), OptionSome) + && path_to_local_id(arg, target); } false } diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_ok_err.rs b/src/tools/clippy/clippy_lints/src/matches/manual_ok_err.rs index 576e42a564c2b..4959908dad635 100644 --- a/src/tools/clippy/clippy_lints/src/matches/manual_ok_err.rs +++ b/src/tools/clippy/clippy_lints/src/matches/manual_ok_err.rs @@ -85,7 +85,7 @@ fn is_variant_or_wildcard(cx: &LateContext<'_>, pat: &Pat<'_>, can_be_wild: bool /// contains `Err(IDENT)`, `None` otherwise. fn is_ok_or_err<'hir>(cx: &LateContext<'_>, pat: &Pat<'hir>) -> Option<(bool, &'hir Ident)> { if let PatKind::TupleStruct(qpath, [arg], _) = &pat.kind - && let PatKind::Binding(BindingMode::NONE, _, ident, _) = &arg.kind + && let PatKind::Binding(BindingMode::NONE, _, ident, None) = &arg.kind && let res = cx.qpath_res(qpath, pat.hir_id) && let Res::Def(DefKind::Ctor(..), id) = res && let id @ Some(_) = cx.tcx.opt_parent(id) @@ -132,7 +132,7 @@ fn apply_lint(cx: &LateContext<'_>, expr: &Expr<'_>, scrutinee: &Expr<'_>, is_ok } else { Applicability::MachineApplicable }; - let scrut = Sugg::hir_with_applicability(cx, scrutinee, "..", &mut app).maybe_par(); + let scrut = Sugg::hir_with_applicability(cx, scrutinee, "..", &mut app).maybe_paren(); let sugg = format!("{scrut}.{method}()"); // If the expression being expanded is the `if …` part of an `else if …`, it must be blockified. let sugg = if let Some(parent_expr) = get_parent_expr(cx, expr) diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs index 2bf7ec8ab7dde..b64ae0b24d818 100644 --- a/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs +++ b/src/tools/clippy/clippy_lints/src/matches/manual_unwrap_or.rs @@ -1,133 +1,219 @@ use clippy_utils::consts::ConstEvalCtxt; -use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::source::{SpanRangeExt, indent_of, reindent_multiline}; -use clippy_utils::ty::is_type_diagnostic_item; -use clippy_utils::usage::contains_return_break_continue_macro; -use clippy_utils::{is_res_lang_ctor, path_to_local_id, peel_blocks, sugg}; +use clippy_utils::source::{SpanRangeExt as _, indent_of, reindent_multiline}; use rustc_errors::Applicability; -use rustc_hir::LangItem::{OptionNone, ResultErr}; -use rustc_hir::def::{DefKind, Res}; -use rustc_hir::{Arm, Expr, Pat, PatExpr, PatExprKind, PatKind}; -use rustc_lint::LateContext; -use rustc_middle::ty::Ty; +use rustc_hir::def::Res; +use rustc_hir::{Arm, Expr, ExprKind, HirId, LangItem, Pat, PatExpr, PatExprKind, PatKind, QPath}; +use rustc_lint::{LateContext, LintContext}; +use rustc_middle::ty::{GenericArgKind, Ty}; use rustc_span::sym; -use super::MANUAL_UNWRAP_OR; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::sugg::Sugg; +use clippy_utils::ty::{expr_type_is_certain, get_type_diagnostic_name, implements_trait}; +use clippy_utils::{is_default_equivalent, is_lint_allowed, path_res, peel_blocks, span_contains_comment}; -pub(super) fn check_match<'tcx>( - cx: &LateContext<'tcx>, - expr: &'tcx Expr<'tcx>, - scrutinee: &'tcx Expr<'_>, - arms: &'tcx [Arm<'_>], -) { - let ty = cx.typeck_results().expr_ty(scrutinee); - if let Some((or_arm, unwrap_arm)) = applicable_or_arm(cx, arms) { - check_and_lint(cx, expr, unwrap_arm.pat, scrutinee, unwrap_arm.body, or_arm.body, ty); +use super::{MANUAL_UNWRAP_OR, MANUAL_UNWRAP_OR_DEFAULT}; + +fn get_some(cx: &LateContext<'_>, pat: &Pat<'_>) -> Option { + if let PatKind::TupleStruct(QPath::Resolved(_, path), &[pat], _) = pat.kind + && let PatKind::Binding(_, pat_id, _, _) = pat.kind + && let Some(def_id) = path.res.opt_def_id() + // Since it comes from a pattern binding, we need to get the parent to actually match + // against it. + && let Some(def_id) = cx.tcx.opt_parent(def_id) + && let Some(lang_item) = cx.tcx.lang_items().from_def_id(def_id) + && matches!(lang_item, LangItem::OptionSome | LangItem::ResultOk) + { + Some(pat_id) + } else { + None } } -pub(super) fn check_if_let<'tcx>( - cx: &LateContext<'tcx>, - expr: &'tcx Expr<'_>, - let_pat: &'tcx Pat<'_>, - let_expr: &'tcx Expr<'_>, - then_expr: &'tcx Expr<'_>, - else_expr: &'tcx Expr<'_>, -) { - let ty = cx.typeck_results().expr_ty(let_expr); - let then_ty = cx.typeck_results().expr_ty(then_expr); - // The signature is `fn unwrap_or(self: Option, default: T) -> T`. - // When `expr_adjustments(then_expr).is_empty()`, `T` should equate to `default`'s type. - // Otherwise, type error will occur. - if cx.typeck_results().expr_adjustments(then_expr).is_empty() - && let rustc_middle::ty::Adt(_did, args) = ty.kind() - && let Some(some_ty) = args.first().and_then(|arg| arg.as_type()) - && some_ty != then_ty +fn get_none<'tcx>(cx: &LateContext<'_>, arm: &Arm<'tcx>) -> Option<&'tcx Expr<'tcx>> { + if let PatKind::Expr(PatExpr { kind: PatExprKind::Path(QPath::Resolved(_, path)), .. }) = arm.pat.kind + && let Some(def_id) = path.res.opt_def_id() + // Since it comes from a pattern binding, we need to get the parent to actually match + // against it. + && let Some(def_id) = cx.tcx.opt_parent(def_id) + && cx.tcx.lang_items().get(LangItem::OptionNone) == Some(def_id) { - return; + Some(arm.body) + } else if let PatKind::TupleStruct(QPath::Resolved(_, path), _, _)= arm.pat.kind + && let Some(def_id) = path.res.opt_def_id() + // Since it comes from a pattern binding, we need to get the parent to actually match + // against it. + && let Some(def_id) = cx.tcx.opt_parent(def_id) + && cx.tcx.lang_items().get(LangItem::ResultErr) == Some(def_id) + { + Some(arm.body) + } else if let PatKind::Wild = arm.pat.kind { + // We consider that the `Some` check will filter it out if it's not right. + Some(arm.body) + } else { + None } - check_and_lint(cx, expr, let_pat, let_expr, then_expr, peel_blocks(else_expr), ty); } -fn check_and_lint<'tcx>( +fn get_some_and_none_bodies<'tcx>( cx: &LateContext<'tcx>, - expr: &'tcx Expr<'_>, - let_pat: &'tcx Pat<'_>, - let_expr: &'tcx Expr<'_>, - then_expr: &'tcx Expr<'_>, - else_expr: &'tcx Expr<'_>, - ty: Ty<'tcx>, + arm1: &'tcx Arm<'tcx>, + arm2: &'tcx Arm<'tcx>, +) -> Option<((&'tcx Expr<'tcx>, HirId), &'tcx Expr<'tcx>)> { + if let Some(binding_id) = get_some(cx, arm1.pat) + && let Some(body_none) = get_none(cx, arm2) + { + Some(((arm1.body, binding_id), body_none)) + } else if let Some(binding_id) = get_some(cx, arm2.pat) + && let Some(body_none) = get_none(cx, arm1) + { + Some(((arm2.body, binding_id), body_none)) + } else { + None + } +} + +fn handle( + cx: &LateContext<'_>, + expr: &Expr<'_>, + expr_name: &'static str, + condition: &Expr<'_>, + body_some: &Expr<'_>, + body_none: &Expr<'_>, + binding_id: HirId, ) { - if let PatKind::TupleStruct(ref qpath, [unwrap_pat], _) = let_pat.kind - && let Res::Def(DefKind::Ctor(..), ctor_id) = cx.qpath_res(qpath, let_pat.hir_id) - && let Some(variant_id) = cx.tcx.opt_parent(ctor_id) - && (cx.tcx.lang_items().option_some_variant() == Some(variant_id) - || cx.tcx.lang_items().result_ok_variant() == Some(variant_id)) - && let PatKind::Binding(_, binding_hir_id, ..) = unwrap_pat.kind - && path_to_local_id(peel_blocks(then_expr), binding_hir_id) - && cx.typeck_results().expr_adjustments(then_expr).is_empty() - && let Some(ty_name) = find_type_name(cx, ty) - && let Some(or_body_snippet) = else_expr.span.get_source_text(cx) - && let Some(indent) = indent_of(cx, expr.span) - && ConstEvalCtxt::new(cx).eval_simple(else_expr).is_some() + // Only deal with situations where both alternatives return the same non-adjusted type. + if cx.typeck_results().expr_ty(body_some) != cx.typeck_results().expr_ty(body_none) { + return; + } + + let expr_type = cx.typeck_results().expr_ty(expr); + // We check that the `Some(x) => x` doesn't do anything apart "returning" the value in `Some`. + if let ExprKind::Path(QPath::Resolved(_, path)) = peel_blocks(body_some).kind + && let Res::Local(local_id) = path.res + && local_id == binding_id { - lint(cx, expr, let_expr, ty_name, &or_body_snippet, indent); + // Machine applicable only if there are no comments present + let mut applicability = if span_contains_comment(cx.sess().source_map(), expr.span) { + Applicability::MaybeIncorrect + } else { + Applicability::MachineApplicable + }; + let receiver = Sugg::hir_with_applicability(cx, condition, "_", &mut applicability).maybe_paren(); + + // We now check the `None` arm is calling a method equivalent to `Default::default`. + if !is_lint_allowed(cx, MANUAL_UNWRAP_OR_DEFAULT, expr.hir_id) + // We check if the return type of the expression implements Default. + && let Some(default_trait_id) = cx.tcx.get_diagnostic_item(sym::Default) + && implements_trait(cx, expr_type, default_trait_id, &[]) + // We check if the initial condition implements Default. + && let Some(condition_ty) = cx.typeck_results().expr_ty(condition).walk().nth(1) + && let GenericArgKind::Type(condition_ty) = condition_ty.unpack() + && implements_trait(cx, condition_ty, default_trait_id, &[]) + && is_default_equivalent(cx, peel_blocks(body_none)) + { + // We now check if the condition is a None variant, in which case we need to specify the type + if path_res(cx, condition) + .opt_def_id() + .is_some_and(|id| Some(cx.tcx.parent(id)) == cx.tcx.lang_items().option_none_variant()) + { + return span_lint_and_sugg( + cx, + MANUAL_UNWRAP_OR_DEFAULT, + expr.span, + format!("{expr_name} can be simplified with `.unwrap_or_default()`"), + "replace it with", + format!("{receiver}::<{expr_type}>.unwrap_or_default()"), + applicability, + ); + } + + // We check if the expression type is still uncertain, in which case we ask the user to specify it + if !expr_type_is_certain(cx, condition) { + return span_lint_and_sugg( + cx, + MANUAL_UNWRAP_OR_DEFAULT, + expr.span, + format!("{expr_name} can be simplified with `.unwrap_or_default()`"), + format!("ascribe the type {expr_type} and replace your expression with"), + format!("{receiver}.unwrap_or_default()"), + Applicability::Unspecified, + ); + } + + span_lint_and_sugg( + cx, + MANUAL_UNWRAP_OR_DEFAULT, + expr.span, + format!("{expr_name} can be simplified with `.unwrap_or_default()`"), + "replace it with", + format!("{receiver}.unwrap_or_default()"), + applicability, + ); + } else if let Some(ty_name) = find_type_name(cx, cx.typeck_results().expr_ty(condition)) + && cx.typeck_results().expr_adjustments(body_some).is_empty() + && let Some(or_body_snippet) = peel_blocks(body_none).span.get_source_text(cx) + && let Some(indent) = indent_of(cx, expr.span) + && ConstEvalCtxt::new(cx).eval_simple(body_none).is_some() + { + let reindented_or_body = reindent_multiline(&or_body_snippet, true, Some(indent)); + let mut app = Applicability::MachineApplicable; + let suggestion = Sugg::hir_with_context(cx, condition, expr.span.ctxt(), "..", &mut app).maybe_paren(); + span_lint_and_sugg( + cx, + MANUAL_UNWRAP_OR, + expr.span, + format!("this pattern reimplements `{ty_name}::unwrap_or`"), + "replace with", + format!("{suggestion}.unwrap_or({reindented_or_body})",), + app, + ); + } } } fn find_type_name<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<&'static str> { - if is_type_diagnostic_item(cx, ty, sym::Option) { - Some("Option") - } else if is_type_diagnostic_item(cx, ty, sym::Result) { - Some("Result") - } else { - None + match get_type_diagnostic_name(cx, ty)? { + sym::Option => Some("Option"), + sym::Result => Some("Result"), + _ => None, } } -fn applicable_or_arm<'a>(cx: &LateContext<'_>, arms: &'a [Arm<'a>]) -> Option<(&'a Arm<'a>, &'a Arm<'a>)> { - if arms.len() == 2 - && arms.iter().all(|arm| arm.guard.is_none()) - && let Some((idx, or_arm)) = arms.iter().enumerate().find(|(_, arm)| match arm.pat.kind { - PatKind::Expr(PatExpr { - hir_id, - kind: PatExprKind::Path(qpath), - .. - }) => is_res_lang_ctor(cx, cx.qpath_res(qpath, *hir_id), OptionNone), - PatKind::TupleStruct(ref qpath, [pat], _) => { - matches!(pat.kind, PatKind::Wild) - && is_res_lang_ctor(cx, cx.qpath_res(qpath, arm.pat.hir_id), ResultErr) - }, - _ => false, - }) - && let unwrap_arm = &arms[1 - idx] - && !contains_return_break_continue_macro(or_arm.body) +pub fn check_match<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx Expr<'tcx>, + scrutinee: &'tcx Expr<'tcx>, + arms: &'tcx [Arm<'tcx>], +) { + if let [arm1, arm2] = arms + // Make sure there are no guards to keep things simple + && arm1.guard.is_none() + && arm2.guard.is_none() + // Get the some and none bodies and the binding id of the some arm + && let Some(((body_some, binding_id), body_none)) = get_some_and_none_bodies(cx, arm1, arm2) { - Some((or_arm, unwrap_arm)) - } else { - None + handle(cx, expr, "match", scrutinee, body_some, body_none, binding_id); } } -fn lint<'tcx>( +pub fn check_if_let<'tcx>( cx: &LateContext<'tcx>, - expr: &Expr<'tcx>, - scrutinee: &'tcx Expr<'_>, - ty_name: &str, - or_body_snippet: &str, - indent: usize, + expr: &'tcx Expr<'tcx>, + pat: &'tcx Pat<'tcx>, + scrutinee: &'tcx Expr<'tcx>, + then_expr: &'tcx Expr<'tcx>, + else_expr: &'tcx Expr<'tcx>, ) { - let reindented_or_body = reindent_multiline(or_body_snippet, true, Some(indent)); - - let mut app = Applicability::MachineApplicable; - let suggestion = sugg::Sugg::hir_with_context(cx, scrutinee, expr.span.ctxt(), "..", &mut app).maybe_par(); - span_lint_and_sugg( - cx, - MANUAL_UNWRAP_OR, - expr.span, - format!("this pattern reimplements `{ty_name}::unwrap_or`"), - "replace with", - format!("{suggestion}.unwrap_or({reindented_or_body})",), - app, - ); + if let Some(binding_id) = get_some(cx, pat) { + handle( + cx, + expr, + "if let", + scrutinee, + peel_blocks(then_expr), + peel_blocks(else_expr), + binding_id, + ); + } } diff --git a/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs b/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs index d29d1ea3e96d9..f14b69d91ce4b 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs @@ -76,17 +76,18 @@ where && first_attrs.is_empty() && iter.all(|arm| find_bool_lit(&arm.2.kind).is_some_and(|b| b == b0) && arm.3.is_none() && arm.0.is_empty()) { - if let Some(last_pat) = last_pat_opt { - if !is_wild(last_pat) { - return false; - } + if let Some(last_pat) = last_pat_opt + && !is_wild(last_pat) + { + return false; } for arm in iter_without_last.clone() { - if let Some(pat) = arm.1 { - if !is_lint_allowed(cx, REDUNDANT_PATTERN_MATCHING, pat.hir_id) && is_some(pat.kind) { - return false; - } + if let Some(pat) = arm.1 + && !is_lint_allowed(cx, REDUNDANT_PATTERN_MATCHING, pat.hir_id) + && is_some(pat.kind) + { + return false; } } @@ -113,10 +114,10 @@ where // strip potential borrows (#6503), but only if the type is a reference let mut ex_new = ex; - if let ExprKind::AddrOf(BorrowKind::Ref, .., ex_inner) = ex.kind { - if let ty::Ref(..) = cx.typeck_results().expr_ty(ex_inner).kind() { - ex_new = ex_inner; - } + if let ExprKind::AddrOf(BorrowKind::Ref, .., ex_inner) = ex.kind + && let ty::Ref(..) = cx.typeck_results().expr_ty(ex_inner).kind() + { + ex_new = ex_inner; } span_lint_and_sugg( cx, diff --git a/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs b/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs deleted file mode 100644 index dd71560e169ea..0000000000000 --- a/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs +++ /dev/null @@ -1,50 +0,0 @@ -use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::source::snippet; -use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item}; -use rustc_errors::Applicability; -use rustc_hir::{Expr, ExprKind, LangItem}; -use rustc_lint::LateContext; -use rustc_span::sym; - -use super::MATCH_ON_VEC_ITEMS; - -pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'_>) { - if let Some(idx_expr) = is_vec_indexing(cx, scrutinee) - && let ExprKind::Index(vec, idx, _) = idx_expr.kind - { - // FIXME: could be improved to suggest surrounding every pattern with Some(_), - // but only when `or_patterns` are stabilized. - span_lint_and_sugg( - cx, - MATCH_ON_VEC_ITEMS, - scrutinee.span, - "indexing into a vector may panic", - "try", - format!("{}.get({})", snippet(cx, vec.span, ".."), snippet(cx, idx.span, "..")), - Applicability::MaybeIncorrect, - ); - } -} - -fn is_vec_indexing<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> { - if let ExprKind::Index(array, index, _) = expr.kind - && is_vector(cx, array) - && !is_full_range(cx, index) - { - return Some(expr); - } - - None -} - -fn is_vector(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { - let ty = cx.typeck_results().expr_ty(expr); - let ty = ty.peel_refs(); - is_type_diagnostic_item(cx, ty, sym::Vec) -} - -fn is_full_range(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { - let ty = cx.typeck_results().expr_ty(expr); - let ty = ty.peel_refs(); - is_type_lang_item(cx, ty, LangItem::RangeFull) -} diff --git a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs index 250f17fa9025a..a21597ffb93d7 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs @@ -253,6 +253,7 @@ fn iter_matching_struct_fields<'a>( impl<'a> NormalizedPat<'a> { fn from_pat(cx: &LateContext<'_>, arena: &'a DroplessArena, pat: &'a Pat<'_>) -> Self { match pat.kind { + PatKind::Missing => unreachable!(), PatKind::Wild | PatKind::Binding(.., None) => Self::Wild, PatKind::Binding(.., Some(pat)) | PatKind::Box(pat) diff --git a/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs b/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs index 864923b27739d..adda35869900d 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_single_binding.rs @@ -1,6 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::macros::HirNode; -use clippy_utils::source::{indent_of, snippet, snippet_block_with_context, snippet_with_applicability}; +use clippy_utils::source::{indent_of, snippet, snippet_block_with_context, snippet_with_context}; use clippy_utils::{get_parent_expr, is_refutable, peel_blocks}; use rustc_errors::Applicability; use rustc_hir::{Arm, Expr, ExprKind, Node, PatKind, StmtKind}; @@ -24,16 +24,10 @@ pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], e let bind_names = arms[0].pat.span; let match_body = peel_blocks(arms[0].body); let mut app = Applicability::MaybeIncorrect; - let mut snippet_body = snippet_block_with_context( - cx, - match_body.span, - arms[0].span.ctxt(), - "..", - Some(expr.span), - &mut app, - ) - .0 - .to_string(); + let ctxt = expr.span.ctxt(); + let mut snippet_body = snippet_block_with_context(cx, match_body.span, ctxt, "..", Some(expr.span), &mut app) + .0 + .to_string(); // Do we need to add ';' to suggestion ? if let Node::Stmt(stmt) = cx.tcx.parent_hir_node(expr.hir_id) @@ -77,10 +71,10 @@ pub(crate) fn check<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], e span, format!( "let {} = {};\n{}let {} = {snippet_body};", - snippet_with_applicability(cx, bind_names, "..", &mut app), - snippet_with_applicability(cx, matched_vars, "..", &mut app), + snippet_with_context(cx, bind_names, ctxt, "..", &mut app).0, + snippet_with_context(cx, matched_vars, ctxt, "..", &mut app).0, " ".repeat(indent_of(cx, expr.span).unwrap_or(0)), - snippet_with_applicability(cx, pat_span, "..", &mut app) + snippet_with_context(cx, pat_span, ctxt, "..", &mut app).0 ), ), None => { @@ -178,24 +172,24 @@ fn sugg_with_curlies<'a>( let mut indent = " ".repeat(indent_of(cx, ex.span).unwrap_or(0)); let (mut cbrace_start, mut cbrace_end) = (String::new(), String::new()); - if let Some(parent_expr) = get_parent_expr(cx, match_expr) { - if let ExprKind::Closure { .. } = parent_expr.kind { - cbrace_end = format!("\n{indent}}}"); - // Fix body indent due to the closure - indent = " ".repeat(indent_of(cx, bind_names).unwrap_or(0)); - cbrace_start = format!("{{\n{indent}"); - } + if let Some(parent_expr) = get_parent_expr(cx, match_expr) + && let ExprKind::Closure { .. } = parent_expr.kind + { + cbrace_end = format!("\n{indent}}}"); + // Fix body indent due to the closure + indent = " ".repeat(indent_of(cx, bind_names).unwrap_or(0)); + cbrace_start = format!("{{\n{indent}"); } // If the parent is already an arm, and the body is another match statement, // we need curly braces around suggestion - if let Node::Arm(arm) = &cx.tcx.parent_hir_node(match_expr.hir_id) { - if let ExprKind::Match(..) = arm.body.kind { - cbrace_end = format!("\n{indent}}}"); - // Fix body indent due to the match - indent = " ".repeat(indent_of(cx, bind_names).unwrap_or(0)); - cbrace_start = format!("{{\n{indent}"); - } + if let Node::Arm(arm) = &cx.tcx.parent_hir_node(match_expr.hir_id) + && let ExprKind::Match(..) = arm.body.kind + { + cbrace_end = format!("\n{indent}}}"); + // Fix body indent due to the match + indent = " ".repeat(indent_of(cx, bind_names).unwrap_or(0)); + cbrace_start = format!("{{\n{indent}"); } let assignment_str = assignment.map_or_else(String::new, |span| { @@ -204,14 +198,17 @@ fn sugg_with_curlies<'a>( s }); + let ctxt = match_expr.span.ctxt(); let scrutinee = if needs_var_binding { format!( "let {} = {}", - snippet_with_applicability(cx, bind_names, "..", applicability), - snippet_with_applicability(cx, matched_vars, "..", applicability) + snippet_with_context(cx, bind_names, ctxt, "..", applicability).0, + snippet_with_context(cx, matched_vars, ctxt, "..", applicability).0 ) } else { - snippet_with_applicability(cx, matched_vars, "..", applicability).to_string() + snippet_with_context(cx, matched_vars, ctxt, "..", applicability) + .0 + .to_string() }; format!("{cbrace_start}{scrutinee};\n{indent}{assignment_str}{snippet_body}{cbrace_end}") diff --git a/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs b/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs index df1b83cbb516a..65b93a095b926 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_str_case_mismatch.rs @@ -26,10 +26,10 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'_>, arm && let ty::Str = ty.kind() { let mut visitor = MatchExprVisitor { cx }; - if let ControlFlow::Break(case_method) = visitor.visit_expr(scrutinee) { - if let Some((bad_case_span, bad_case_sym)) = verify_case(&case_method, arms) { - lint(cx, &case_method, bad_case_span, bad_case_sym.as_str()); - } + if let ControlFlow::Break(case_method) = visitor.visit_expr(scrutinee) + && let Some((bad_case_span, bad_case_sym)) = verify_case(&case_method, arms) + { + lint(cx, &case_method, bad_case_span, bad_case_sym.as_str()); } } } diff --git a/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs b/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs index 11b588b33554d..24b4a6758004f 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs @@ -80,18 +80,20 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { path }, PatKind::TupleStruct(path, patterns, ..) => { - if let Some(id) = cx.qpath_res(path, pat.hir_id).opt_def_id() { - if arm.guard.is_none() && patterns.iter().all(|p| !is_refutable(cx, p)) { - missing_variants.retain(|e| e.ctor_def_id() != Some(id)); - } + if let Some(id) = cx.qpath_res(path, pat.hir_id).opt_def_id() + && arm.guard.is_none() + && patterns.iter().all(|p| !is_refutable(cx, p)) + { + missing_variants.retain(|e| e.ctor_def_id() != Some(id)); } path }, PatKind::Struct(path, patterns, ..) => { - if let Some(id) = cx.qpath_res(path, pat.hir_id).opt_def_id() { - if arm.guard.is_none() && patterns.iter().all(|p| !is_refutable(cx, p.pat)) { - missing_variants.retain(|e| e.def_id != id); - } + if let Some(id) = cx.qpath_res(path, pat.hir_id).opt_def_id() + && arm.guard.is_none() + && patterns.iter().all(|p| !is_refutable(cx, p.pat)) + { + missing_variants.retain(|e| e.def_id != id); } path }, diff --git a/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs b/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs index d0d2025878e48..8ce8453360f78 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_wild_err_arm.rs @@ -26,11 +26,12 @@ pub(crate) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &Expr<'tcx>, arms: &[Arm<' if !matching_wild { // Looking for unused bindings (i.e.: `_e`) for pat in inner { - if let PatKind::Binding(_, id, ident, None) = pat.kind { - if ident.as_str().starts_with('_') && !is_local_used(cx, arm.body, id) { - ident_bind_name = ident.name; - matching_wild = true; - } + if let PatKind::Binding(_, id, ident, None) = pat.kind + && ident.as_str().starts_with('_') + && !is_local_used(cx, arm.body, id) + { + ident_bind_name = ident.name; + matching_wild = true; } } } diff --git a/src/tools/clippy/clippy_lints/src/matches/mod.rs b/src/tools/clippy/clippy_lints/src/matches/mod.rs index 2b9173e6f4122..c6ebd6144c76f 100644 --- a/src/tools/clippy/clippy_lints/src/matches/mod.rs +++ b/src/tools/clippy/clippy_lints/src/matches/mod.rs @@ -8,7 +8,6 @@ mod manual_utils; mod match_as_ref; mod match_bool; mod match_like_matches; -mod match_on_vec_items; mod match_ref_pats; mod match_same_arms; mod match_single_binding; @@ -724,38 +723,39 @@ declare_clippy_lint! { declare_clippy_lint! { /// ### What it does - /// Checks for `match vec[idx]` or `match vec[n..m]`. + /// Checks if a `match` or `if let` expression can be simplified using + /// `.unwrap_or_default()`. /// /// ### Why is this bad? - /// This can panic at runtime. + /// It can be done in one call with `.unwrap_or_default()`. /// /// ### Example - /// ```rust, no_run - /// let arr = vec![0, 1, 2, 3]; - /// let idx = 1; + /// ```no_run + /// let x: Option = Some(String::new()); + /// let y: String = match x { + /// Some(v) => v, + /// None => String::new(), + /// }; /// - /// match arr[idx] { - /// 0 => println!("{}", 0), - /// 1 => println!("{}", 3), - /// _ => {}, - /// } + /// let x: Option> = Some(Vec::new()); + /// let y: Vec = if let Some(v) = x { + /// v + /// } else { + /// Vec::new() + /// }; /// ``` - /// /// Use instead: - /// ```rust, no_run - /// let arr = vec![0, 1, 2, 3]; - /// let idx = 1; + /// ```no_run + /// let x: Option = Some(String::new()); + /// let y: String = x.unwrap_or_default(); /// - /// match arr.get(idx) { - /// Some(0) => println!("{}", 0), - /// Some(1) => println!("{}", 3), - /// _ => {}, - /// } + /// let x: Option> = Some(Vec::new()); + /// let y: Vec = x.unwrap_or_default(); /// ``` - #[clippy::version = "1.45.0"] - pub MATCH_ON_VEC_ITEMS, - pedantic, - "matching on vector elements can panic" + #[clippy::version = "1.79.0"] + pub MANUAL_UNWRAP_OR_DEFAULT, + suspicious, + "check if a `match` or `if let` can be simplified with `unwrap_or_default`" } declare_clippy_lint! { @@ -1040,7 +1040,7 @@ impl_lint_pass!(Matches => [ NEEDLESS_MATCH, COLLAPSIBLE_MATCH, MANUAL_UNWRAP_OR, - MATCH_ON_VEC_ITEMS, + MANUAL_UNWRAP_OR_DEFAULT, MATCH_STR_CASE_MISMATCH, SIGNIFICANT_DROP_IN_SCRUTINEE, TRY_ERR, @@ -1118,7 +1118,6 @@ impl<'tcx> LateLintPass<'tcx> for Matches { match_wild_enum::check(cx, ex, arms); match_as_ref::check(cx, ex, arms, expr); needless_match::check_match(cx, ex, arms, expr); - match_on_vec_items::check(cx, ex); match_str_case_mismatch::check(cx, ex, arms); redundant_guards::check(cx, arms, self.msrv); diff --git a/src/tools/clippy/clippy_lints/src/matches/needless_match.rs b/src/tools/clippy/clippy_lints/src/matches/needless_match.rs index 7e65d586110e5..6c5d7cab2036e 100644 --- a/src/tools/clippy/clippy_lints/src/matches/needless_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/needless_match.rs @@ -67,10 +67,10 @@ fn check_all_arms(cx: &LateContext<'_>, match_expr: &Expr<'_>, arms: &[Arm<'_>]) for arm in arms { let arm_expr = peel_blocks_with_stmt(arm.body); - if let Some(guard_expr) = &arm.guard { - if guard_expr.can_have_side_effects() { - return false; - } + if let Some(guard_expr) = &arm.guard + && guard_expr.can_have_side_effects() + { + return false; } if let PatKind::Wild = arm.pat.kind { diff --git a/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs b/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs index 4184f8b9e6e8a..d3136c89178e6 100644 --- a/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs +++ b/src/tools/clippy/clippy_lints/src/matches/overlapping_arms.rs @@ -11,17 +11,17 @@ use super::MATCH_OVERLAPPING_ARM; pub(crate) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &'tcx Expr<'_>, arms: &'tcx [Arm<'_>]) { if arms.len() >= 2 && cx.typeck_results().expr_ty(ex).is_integral() { let ranges = all_ranges(cx, arms, cx.typeck_results().expr_ty(ex)); - if !ranges.is_empty() { - if let Some((start, end)) = overlapping(&ranges) { - span_lint_and_note( - cx, - MATCH_OVERLAPPING_ARM, - start.span, - "some ranges overlap", - Some(end.span), - "overlaps with this", - ); - } + if !ranges.is_empty() + && let Some((start, end)) = overlapping(&ranges) + { + span_lint_and_note( + cx, + MATCH_OVERLAPPING_ARM, + start.span, + "some ranges overlap", + Some(end.span), + "overlaps with this", + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs b/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs index ab53ad98572e4..9bbef8da0a466 100644 --- a/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs +++ b/src/tools/clippy/clippy_lints/src/matches/redundant_guards.rs @@ -246,7 +246,6 @@ fn emit_redundant_guards<'tcx>( fn expr_can_be_pat(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { for_each_expr_without_closures(expr, |expr| { if match expr.kind { - ExprKind::ConstBlock(..) => cx.tcx.features().inline_const_pat(), ExprKind::Call(c, ..) if let ExprKind::Path(qpath) = c.kind => { // Allow ctors matches!(cx.qpath_res(&qpath, c.hir_id), Res::Def(DefKind::Ctor(..), ..)) diff --git a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs index 722ea7042dd7f..db20be40f27ea 100644 --- a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs @@ -4,7 +4,7 @@ use clippy_utils::source::walk_span_to_context; use clippy_utils::sugg::{Sugg, make_unop}; use clippy_utils::ty::{is_type_diagnostic_item, needs_ordered_drop}; use clippy_utils::visitors::{any_temporaries_need_ordered_drop, for_each_expr_without_closures}; -use clippy_utils::{higher, is_expn_of, is_trait_method}; +use clippy_utils::{higher, is_expn_of, is_trait_method, sym}; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; use rustc_hir::LangItem::{self, OptionNone, OptionSome, PollPending, PollReady, ResultErr, ResultOk}; @@ -12,7 +12,7 @@ use rustc_hir::def::{DefKind, Res}; use rustc_hir::{Arm, Expr, ExprKind, Node, Pat, PatExpr, PatExprKind, PatKind, QPath, UnOp}; use rustc_lint::LateContext; use rustc_middle::ty::{self, GenericArgKind, Ty}; -use rustc_span::{Span, Symbol, sym}; +use rustc_span::{Span, Symbol}; use std::fmt::Write; use std::ops::ControlFlow; @@ -138,9 +138,9 @@ fn find_method_and_type<'tcx>( Some(("is_some()", op_ty)) } else if Some(id) == lang_items.poll_ready_variant() { Some(("is_ready()", op_ty)) - } else if is_pat_variant(cx, check_pat, qpath, Item::Diag(sym::IpAddr, sym!(V4))) { + } else if is_pat_variant(cx, check_pat, qpath, Item::Diag(sym::IpAddr, sym::V4)) { Some(("is_ipv4()", op_ty)) - } else if is_pat_variant(cx, check_pat, qpath, Item::Diag(sym::IpAddr, sym!(V6))) { + } else if is_pat_variant(cx, check_pat, qpath, Item::Diag(sym::IpAddr, sym::V6)) { Some(("is_ipv6()", op_ty)) } else { None @@ -255,7 +255,7 @@ fn find_method_sugg_for_if_let<'tcx>( }; let sugg = Sugg::hir_with_context(cx, result_expr, ctxt, "_", &mut app) - .maybe_par() + .maybe_paren() .to_string(); diag.span_suggestion(span, "try", format!("{keyword} {sugg}.{good_method}"), app); @@ -279,7 +279,7 @@ pub(super) fn check_match<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, op _ => op, }; let mut app = Applicability::MachineApplicable; - let receiver_sugg = Sugg::hir_with_applicability(cx, result_expr, "_", &mut app).maybe_par(); + let receiver_sugg = Sugg::hir_with_applicability(cx, result_expr, "_", &mut app).maybe_paren(); let mut sugg = format!("{receiver_sugg}.{good_method}"); if let Some(guard) = maybe_guard { @@ -303,7 +303,7 @@ pub(super) fn check_match<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, op } let guard = Sugg::hir(cx, guard, ".."); - let _ = write!(sugg, " && {}", guard.maybe_par()); + let _ = write!(sugg, " && {}", guard.maybe_paren()); } span_lint_and_sugg( @@ -345,8 +345,8 @@ fn found_good_method<'tcx>( arms, path_left, path_right, - Item::Diag(sym::IpAddr, sym!(V4)), - Item::Diag(sym::IpAddr, sym!(V6)), + Item::Diag(sym::IpAddr, sym::V4), + Item::Diag(sym::IpAddr, sym::V6), "is_ipv4()", "is_ipv6()", ) @@ -437,8 +437,8 @@ fn get_good_method<'tcx>( "None" => (Item::Lang(OptionNone), "is_none()", "is_some()"), "Ready" => (Item::Lang(PollReady), "is_ready()", "is_pending()"), "Pending" => (Item::Lang(PollPending), "is_pending()", "is_ready()"), - "V4" => (Item::Diag(sym::IpAddr, sym!(V4)), "is_ipv4()", "is_ipv6()"), - "V6" => (Item::Diag(sym::IpAddr, sym!(V6)), "is_ipv6()", "is_ipv4()"), + "V4" => (Item::Diag(sym::IpAddr, sym::V4), "is_ipv4()", "is_ipv6()"), + "V6" => (Item::Diag(sym::IpAddr, sym::V6), "is_ipv6()", "is_ipv4()"), _ => return None, }; return find_good_method_for_matches_macro( diff --git a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs index 37bac561a6e06..d7dc7604088f7 100644 --- a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs +++ b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs @@ -182,17 +182,16 @@ impl<'a, 'tcx> SigDropChecker<'a, 'tcx> { } fn has_sig_drop_attr_impl(&mut self, ty: Ty<'tcx>) -> bool { - if let Some(adt) = ty.ty_adt_def() { - if get_attr( + if let Some(adt) = ty.ty_adt_def() + && get_attr( self.cx.sess(), self.cx.tcx.get_attrs_unchecked(adt.did()), "has_significant_drop", ) .count() > 0 - { - return true; - } + { + return true; } if !self.seen_types.insert(ty) { diff --git a/src/tools/clippy/clippy_lints/src/matches/single_match.rs b/src/tools/clippy/clippy_lints/src/matches/single_match.rs index 56fbd626eefc4..08c0caa4266cc 100644 --- a/src/tools/clippy/clippy_lints/src/matches/single_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/single_match.rs @@ -1,5 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::source::{SpanRangeExt, expr_block, snippet, snippet_block_with_context}; +use clippy_utils::source::{ + SpanRangeExt, expr_block, snippet, snippet_block_with_context, snippet_with_applicability, snippet_with_context, +}; use clippy_utils::ty::implements_trait; use clippy_utils::{ is_lint_allowed, is_unit_expr, peel_blocks, peel_hir_pat_refs, peel_middle_ty_refs, peel_n_hir_expr_refs, @@ -34,8 +36,7 @@ fn empty_arm_has_comment(cx: &LateContext<'_>, span: Span) -> bool { #[rustfmt::skip] pub(crate) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &'tcx Expr<'_>, arms: &'tcx [Arm<'_>], expr: &'tcx Expr<'_>, contains_comments: bool) { if let [arm1, arm2] = arms - && arm1.guard.is_none() - && arm2.guard.is_none() + && !arms.iter().any(|arm| arm.guard.is_some() || arm.pat.span.from_expansion()) && !expr.span.from_expansion() // don't lint for or patterns for now, this makes // the lint noisy in unnecessary situations @@ -106,7 +107,7 @@ fn report_single_pattern( format!(" else {}", expr_block(cx, els, ctxt, "..", Some(expr.span), &mut app)) }); - if snippet(cx, ex.span, "..") == snippet(cx, arm.pat.span, "..") { + if ex.span.eq_ctxt(expr.span) && snippet(cx, ex.span, "..") == snippet(cx, arm.pat.span, "..") { let msg = "this pattern is irrefutable, `match` is useless"; let (sugg, help) = if is_unit_expr(arm.body) { (String::new(), "`match` expression can be removed") @@ -163,10 +164,10 @@ fn report_single_pattern( let msg = "you seem to be trying to use `match` for an equality check. Consider using `if`"; let sugg = format!( "if {} == {}{} {}{els_str}", - snippet(cx, ex.span, ".."), + snippet_with_context(cx, ex.span, ctxt, "..", &mut app).0, // PartialEq for different reference counts may not exist. "&".repeat(ref_count_diff), - snippet(cx, arm.pat.span, ".."), + snippet_with_applicability(cx, arm.pat.span, "..", &mut app), expr_block(cx, arm.body, ctxt, "..", Some(expr.span), &mut app), ); (msg, sugg) @@ -174,8 +175,8 @@ fn report_single_pattern( let msg = "you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`"; let sugg = format!( "if let {} = {} {}{els_str}", - snippet(cx, arm.pat.span, ".."), - snippet(cx, ex.span, ".."), + snippet_with_applicability(cx, arm.pat.span, "..", &mut app), + snippet_with_context(cx, ex.span, ctxt, "..", &mut app).0, expr_block(cx, arm.body, ctxt, "..", Some(expr.span), &mut app), ); (msg, sugg) @@ -406,6 +407,7 @@ impl<'a> PatState<'a> { pats.iter().map(|p| p.pat), ), + PatKind::Missing => unreachable!(), PatKind::Wild | PatKind::Binding(_, _, _, None) | PatKind::Expr(_) diff --git a/src/tools/clippy/clippy_lints/src/matches/wild_in_or_pats.rs b/src/tools/clippy/clippy_lints/src/matches/wild_in_or_pats.rs index b75d1ab9a7aa3..43102d78bfebd 100644 --- a/src/tools/clippy/clippy_lints/src/matches/wild_in_or_pats.rs +++ b/src/tools/clippy/clippy_lints/src/matches/wild_in_or_pats.rs @@ -15,18 +15,18 @@ pub(crate) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, arms: &[Arm<'_>]) { return; } for arm in arms { - if let PatKind::Or(fields) = arm.pat.kind { + if let PatKind::Or(fields) = arm.pat.kind // look for multiple fields in this arm that contains at least one Wild pattern - if fields.len() > 1 && fields.iter().any(is_wild) { - span_lint_and_help( - cx, - WILDCARD_IN_OR_PATTERNS, - arm.pat.span, - "wildcard pattern covers any other pattern as it will match anyway", - None, - "consider handling `_` separately", - ); - } + && fields.len() > 1 && fields.iter().any(is_wild) + { + span_lint_and_help( + cx, + WILDCARD_IN_OR_PATTERNS, + arm.pat.span, + "wildcard pattern covers any other pattern as it will match anyway", + None, + "consider handling `_` separately", + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/mem_replace.rs b/src/tools/clippy/clippy_lints/src/mem_replace.rs index a0919947b3fc7..a54d835b538c1 100644 --- a/src/tools/clippy/clippy_lints/src/mem_replace.rs +++ b/src/tools/clippy/clippy_lints/src/mem_replace.rs @@ -145,7 +145,7 @@ fn check_replace_option_with_none(cx: &LateContext<'_>, src: &Expr<'_>, dest: &E "consider `Option::take()` instead", format!( "{}.take()", - Sugg::hir_with_context(cx, sugg_expr, expr_span.ctxt(), "", &mut applicability).maybe_par() + Sugg::hir_with_context(cx, sugg_expr, expr_span.ctxt(), "", &mut applicability).maybe_paren() ), applicability, ); @@ -178,7 +178,7 @@ fn check_replace_option_with_some( "consider `Option::replace()` instead", format!( "{}.replace({})", - Sugg::hir_with_context(cx, sugg_expr, expr_span.ctxt(), "_", &mut applicability).maybe_par(), + Sugg::hir_with_context(cx, sugg_expr, expr_span.ctxt(), "_", &mut applicability).maybe_paren(), snippet_with_applicability(cx, src_arg.span, "_", &mut applicability) ), applicability, @@ -304,14 +304,12 @@ impl<'tcx> LateLintPass<'tcx> for MemReplace { && let ExprKind::Path(ref func_qpath) = func.kind && let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id() && cx.tcx.is_diagnostic_item(sym::mem_replace, def_id) - { // Check that second argument is `Option::None` - if !check_replace_option_with_none(cx, src, dest, expr.span) - && !check_replace_option_with_some(cx, src, dest, expr.span, self.msrv) - && !check_replace_with_default(cx, src, dest, expr, self.msrv) - { - check_replace_with_uninit(cx, src, dest, expr.span); - } + && !check_replace_option_with_none(cx, src, dest, expr.span) + && !check_replace_option_with_some(cx, src, dest, expr.span, self.msrv) + && !check_replace_with_default(cx, src, dest, expr, self.msrv) + { + check_replace_with_uninit(cx, src, dest, expr.span); } } } diff --git a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs index 1e9b29f567f41..f8520c23ea503 100644 --- a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs +++ b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs @@ -192,10 +192,10 @@ impl BindInsteadOfMap { } fn is_variant(&self, cx: &LateContext<'_>, res: Res) -> bool { - if let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Fn), id) = res { - if let Some(variant_id) = cx.tcx.lang_items().get(self.variant_lang_item) { - return cx.tcx.parent(id) == variant_id; - } + if let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Fn), id) = res + && let Some(variant_id) = cx.tcx.lang_items().get(self.variant_lang_item) + { + return cx.tcx.parent(id) == variant_id; } false } diff --git a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs index 18568e3661fe5..d07870d4951e0 100644 --- a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs +++ b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs @@ -1,4 +1,5 @@ use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::{SpanRangeExt, indent_of, reindent_multiline}; use clippy_utils::ty::is_type_lang_item; use rustc_ast::ast::LitKind; @@ -16,14 +17,15 @@ pub(super) fn check<'tcx>( call_span: Span, recv: &'tcx Expr<'_>, arg: &'tcx Expr<'_>, + msrv: Msrv, ) { - if let ExprKind::MethodCall(path_segment, ..) = recv.kind { - if matches!( + if let ExprKind::MethodCall(path_segment, ..) = recv.kind + && matches!( path_segment.ident.name.as_str(), "to_lowercase" | "to_uppercase" | "to_ascii_lowercase" | "to_ascii_uppercase" - ) { - return; - } + ) + { + return; } if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) @@ -58,11 +60,15 @@ pub(super) fn check<'tcx>( let suggestion_source = reindent_multiline( &format!( - "std::path::Path::new({}) + "std::path::Path::new({recv_source}) .extension() - .map_or(false, |ext| ext.eq_ignore_ascii_case(\"{}\"))", - recv_source, - ext_str.strip_prefix('.').unwrap() + .{}|ext| ext.eq_ignore_ascii_case(\"{}\"))", + if msrv.meets(cx, msrvs::OPTION_RESULT_IS_VARIANT_AND) { + "is_some_and(" + } else { + "map_or(false, " + }, + ext_str.strip_prefix('.').unwrap(), ), true, Some(indent_of(cx, call_span).unwrap_or(0) + 4), diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs index 1ee27d90d0545..2ecf3eb897988 100644 --- a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs +++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs @@ -40,10 +40,10 @@ pub(super) fn check( .map_or_else(|| cx.typeck_results().expr_ty(arg), |a| a.target); let ty = cx.typeck_results().expr_ty(expr); - if let ty::Ref(_, inner, _) = arg_ty.kind() { - if let ty::Ref(..) = inner.kind() { - return; // don't report clone_on_copy - } + if let ty::Ref(_, inner, _) = arg_ty.kind() + && let ty::Ref(..) = inner.kind() + { + return; // don't report clone_on_copy } if is_copy(cx, ty) { diff --git a/src/tools/clippy/clippy_lints/src/methods/double_ended_iterator_last.rs b/src/tools/clippy/clippy_lints/src/methods/double_ended_iterator_last.rs index e82211bbf3ef7..6d841853fbe5f 100644 --- a/src/tools/clippy/clippy_lints/src/methods/double_ended_iterator_last.rs +++ b/src/tools/clippy/clippy_lints/src/methods/double_ended_iterator_last.rs @@ -1,11 +1,11 @@ use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::ty::implements_trait; -use clippy_utils::{is_mutable, is_trait_method, path_to_local}; +use clippy_utils::ty::{has_non_owning_mutable_access, implements_trait}; +use clippy_utils::{is_mutable, is_trait_method, path_to_local, sym}; use rustc_errors::Applicability; use rustc_hir::{Expr, Node, PatKind}; use rustc_lint::LateContext; use rustc_middle::ty::Instance; -use rustc_span::{Span, sym}; +use rustc_span::Span; use super::DOUBLE_ENDED_ITERATOR_LAST; @@ -24,13 +24,18 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &'_ Expr<'_>, self_expr: &'_ Exp && let Ok(Some(fn_def)) = Instance::try_resolve(cx.tcx, cx.typing_env(), id, args) // find the provided definition of Iterator::last && let Some(item) = cx.tcx.get_diagnostic_item(sym::Iterator) - && let Some(last_def) = cx.tcx.provided_trait_methods(item).find(|m| m.name.as_str() == "last") + && let Some(last_def) = cx.tcx.provided_trait_methods(item).find(|m| m.name() == sym::last) // if the resolved method is the same as the provided definition && fn_def.def_id() == last_def.def_id + && let self_ty = cx.typeck_results().expr_ty(self_expr) + && !has_non_owning_mutable_access(cx, self_ty) { let mut sugg = vec![(call_span, String::from("next_back()"))]; let mut dont_apply = false; + // if `self_expr` is a reference, it is mutable because it is used for `.last()` + // TODO: Change this to lint only when the referred iterator is not used later. If it is used later, + // changing to `next_back()` may change its behavior. if !(is_mutable(cx, self_expr) || self_type.is_ref()) { if let Some(hir_id) = path_to_local(self_expr) && let Node::Pat(pat) = cx.tcx.hir_node(hir_id) diff --git a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs index daa6e0e7f940c..f5688e370a478 100644 --- a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs +++ b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs @@ -54,10 +54,11 @@ pub(super) fn check<'tcx>( if is_type_lang_item(cx, arg_ty, hir::LangItem::String) { return false; } - if let ty::Ref(_, ty, ..) = arg_ty.kind() { - if ty.is_str() && can_be_static_str(cx, arg) { - return false; - } + if let ty::Ref(_, ty, ..) = arg_ty.kind() + && ty.is_str() + && can_be_static_str(cx, arg) + { + return false; } true } diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map.rs index ae300cd5fe56d..4dd54cf197450 100644 --- a/src/tools/clippy/clippy_lints/src/methods/filter_map.rs +++ b/src/tools/clippy/clippy_lints/src/methods/filter_map.rs @@ -2,7 +2,7 @@ use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then}; use clippy_utils::macros::{is_panic, matching_root_macro_call, root_macro_call}; use clippy_utils::source::{indent_of, reindent_multiline, snippet}; use clippy_utils::ty::is_type_diagnostic_item; -use clippy_utils::{SpanlessEq, higher, is_trait_method, path_to_local_id, peel_blocks}; +use clippy_utils::{SpanlessEq, higher, is_trait_method, path_to_local_id, peel_blocks, sym}; use hir::{Body, HirId, MatchSource, Pat}; use rustc_errors::Applicability; use rustc_hir as hir; @@ -11,7 +11,7 @@ use rustc_hir::{Closure, Expr, ExprKind, PatKind, PathSegment, QPath, UnOp}; use rustc_lint::LateContext; use rustc_middle::ty::adjustment::Adjust; use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol, sym}; +use rustc_span::symbol::{Ident, Symbol}; use super::{MANUAL_FILTER_MAP, MANUAL_FIND_MAP, OPTION_FILTER_MAP, RESULT_FILTER_MAP}; @@ -43,10 +43,10 @@ fn is_method(cx: &LateContext<'_>, expr: &Expr<'_>, method_name: Symbol) -> bool } fn is_option_filter_map(cx: &LateContext<'_>, filter_arg: &Expr<'_>, map_arg: &Expr<'_>) -> bool { - is_method(cx, map_arg, sym::unwrap) && is_method(cx, filter_arg, sym!(is_some)) + is_method(cx, map_arg, sym::unwrap) && is_method(cx, filter_arg, sym::is_some) } fn is_ok_filter_map(cx: &LateContext<'_>, filter_arg: &Expr<'_>, map_arg: &Expr<'_>) -> bool { - is_method(cx, map_arg, sym::unwrap) && is_method(cx, filter_arg, sym!(is_ok)) + is_method(cx, map_arg, sym::unwrap) && is_method(cx, filter_arg, sym::is_ok) } #[derive(Debug, Copy, Clone)] @@ -233,12 +233,12 @@ impl<'tcx> OffendingFilterExpr<'tcx> { // the latter only calls `effect` once let side_effect_expr_span = receiver.can_have_side_effects().then_some(receiver.span); - if cx.tcx.is_diagnostic_item(sym::Option, recv_ty.did()) && path.ident.name.as_str() == "is_some" { + if cx.tcx.is_diagnostic_item(sym::Option, recv_ty.did()) && path.ident.name == sym::is_some { Some(Self::IsSome { receiver, side_effect_expr_span, }) - } else if cx.tcx.is_diagnostic_item(sym::Result, recv_ty.did()) && path.ident.name.as_str() == "is_ok" { + } else if cx.tcx.is_diagnostic_item(sym::Result, recv_ty.did()) && path.ident.name == sym::is_ok { Some(Self::IsOk { receiver, side_effect_expr_span, @@ -429,16 +429,15 @@ fn is_find_or_filter<'a>( } fn acceptable_methods(method: &PathSegment<'_>) -> bool { - let methods: [Symbol; 8] = [ - sym::clone, - sym::as_ref, - sym!(copied), - sym!(cloned), - sym!(as_deref), - sym!(as_mut), - sym!(as_deref_mut), - sym!(to_owned), - ]; - - methods.contains(&method.ident.name) + matches!( + method.ident.name, + sym::clone + | sym::as_ref + | sym::copied + | sym::cloned + | sym::as_deref + | sym::as_mut + | sym::as_deref_mut + | sym::to_owned + ) } diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs index f7e116c5310ed..965993808f6b5 100644 --- a/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs +++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs @@ -1,10 +1,14 @@ use super::FILTER_MAP_BOOL_THEN; -use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::SpanRangeExt; use clippy_utils::ty::is_copy; -use clippy_utils::{is_from_proc_macro, is_trait_method, peel_blocks}; +use clippy_utils::{ + CaptureKind, can_move_expr_to_closure, contains_return, is_from_proc_macro, is_trait_method, peel_blocks, +}; +use rustc_ast::Mutability; +use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; -use rustc_hir::{Expr, ExprKind}; +use rustc_hir::{Expr, ExprKind, HirId, Param, Pat}; use rustc_lint::{LateContext, LintContext}; use rustc_middle::ty::Binder; use rustc_middle::ty::adjustment::Adjust; @@ -44,17 +48,69 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, arg: & && let Some(filter) = recv.span.get_source_text(cx) && let Some(map) = then_body.span.get_source_text(cx) { - span_lint_and_sugg( + span_lint_and_then( cx, FILTER_MAP_BOOL_THEN, call_span, "usage of `bool::then` in `filter_map`", - "use `filter` then `map` instead", - format!( - "filter(|&{param_snippet}| {derefs}{filter}).map(|{param_snippet}| {map})", - derefs = "*".repeat(needed_derefs) - ), - Applicability::MachineApplicable, + |diag| { + if can_filter_and_then_move_to_closure(cx, ¶m, recv, then_body) { + diag.span_suggestion( + call_span, + "use `filter` then `map` instead", + format!( + "filter(|&{param_snippet}| {derefs}{filter}).map(|{param_snippet}| {map})", + derefs = "*".repeat(needed_derefs) + ), + Applicability::MachineApplicable, + ); + } else { + diag.help("consider using `filter` then `map` instead"); + } + }, ); } } + +/// Returns a set of all bindings found in the given pattern. +fn find_bindings_from_pat(pat: &Pat<'_>) -> FxHashSet { + let mut bindings = FxHashSet::default(); + pat.walk(|p| { + if let rustc_hir::PatKind::Binding(_, hir_id, _, _) = p.kind { + bindings.insert(hir_id); + } + true + }); + bindings +} + +/// Returns true if we can take a closure parameter and have it in both the `filter` function and +/// the`map` function. This is not the case if: +/// +/// - The `filter` would contain an early return, +/// - `filter` and `then` contain captures, and any of those are &mut +fn can_filter_and_then_move_to_closure<'tcx>( + cx: &LateContext<'tcx>, + param: &Param<'tcx>, + filter: &'tcx Expr<'tcx>, + then: &'tcx Expr<'tcx>, +) -> bool { + if contains_return(filter) { + return false; + } + + let Some(filter_captures) = can_move_expr_to_closure(cx, filter) else { + return true; + }; + let Some(then_captures) = can_move_expr_to_closure(cx, then) else { + return true; + }; + + let param_bindings = find_bindings_from_pat(param.pat); + filter_captures.iter().all(|(hir_id, filter_cap)| { + param_bindings.contains(hir_id) + || !then_captures + .get(hir_id) + .is_some_and(|then_cap| matches!(*filter_cap | *then_cap, CaptureKind::Ref(Mutability::Mut))) + }) +} diff --git a/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs b/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs index f4840785584ef..045363058d198 100644 --- a/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs +++ b/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs @@ -1,25 +1,31 @@ +use std::fmt::Write as _; + use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::source::SpanRangeExt; +use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::implements_trait; use clippy_utils::{is_path_diagnostic_item, sugg}; use rustc_errors::Applicability; -use rustc_hir as hir; +use rustc_hir::def::Res; +use rustc_hir::{self as hir, Expr, ExprKind, GenericArg, QPath, TyKind}; use rustc_lint::LateContext; -use rustc_middle::ty::Ty; +use rustc_middle::ty::GenericParamDefKind; use rustc_span::sym; use super::FROM_ITER_INSTEAD_OF_COLLECT; -pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Expr<'_>], func: &hir::Expr<'_>) { +pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, args: &[Expr<'_>], func: &Expr<'_>) { if is_path_diagnostic_item(cx, func, sym::from_iter_fn) - && let ty = cx.typeck_results().expr_ty(expr) && let arg_ty = cx.typeck_results().expr_ty(&args[0]) && let Some(iter_id) = cx.tcx.get_diagnostic_item(sym::Iterator) && implements_trait(cx, arg_ty, iter_id, &[]) { - // `expr` implements `FromIterator` trait - let iter_expr = sugg::Sugg::hir(cx, &args[0], "..").maybe_par(); - let turbofish = extract_turbofish(cx, expr, ty); + let mut app = Applicability::MaybeIncorrect; + let turbofish = match func.kind { + ExprKind::Path(QPath::TypeRelative(hir_ty, _)) => build_full_type(cx, hir_ty, &mut app), + ExprKind::Path(QPath::Resolved(Some(self_ty), _)) => build_full_type(cx, self_ty, &mut app), + _ => return, + }; + let iter_expr = sugg::Sugg::hir(cx, &args[0], "..").maybe_paren(); let sugg = format!("{iter_expr}.collect::<{turbofish}>()"); span_lint_and_sugg( cx, @@ -28,54 +34,47 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Exp "usage of `FromIterator::from_iter`", "use `.collect()` instead of `::from_iter()`", sugg, - Applicability::MaybeIncorrect, + app, ); } } -fn extract_turbofish(cx: &LateContext<'_>, expr: &hir::Expr<'_>, ty: Ty<'_>) -> String { - fn strip_angle_brackets(s: &str) -> Option<&str> { - s.strip_prefix('<')?.strip_suffix('>') - } - - let call_site = expr.span.source_callsite(); - if let Some(snippet) = call_site.get_source_text(cx) - && let snippet_split = snippet.split("::").collect::>() - && let Some((_, elements)) = snippet_split.split_last() +/// Build a type which can be used in a turbofish syntax from `hir_ty`, either by copying the +/// existing generic arguments with the exception of elided lifetimes, or by inserting placeholders +/// for types and consts without default values. +fn build_full_type(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, app: &mut Applicability) -> String { + if let TyKind::Path(ty_qpath) = hir_ty.kind + && let QPath::Resolved(None, ty_path) = &ty_qpath + && let Res::Def(_, ty_did) = ty_path.res { - if let [type_specifier, _] = snippet_split.as_slice() - && let Some(type_specifier) = strip_angle_brackets(type_specifier) - && let Some((type_specifier, ..)) = type_specifier.split_once(" as ") - { - type_specifier.to_string() + let mut ty_str = itertools::join(ty_path.segments.iter().map(|s| s.ident), "::"); + let mut first = true; + let mut append = |arg: &str| { + write!(&mut ty_str, "{}{arg}", [", ", "<"][usize::from(first)]).unwrap(); + first = false; + }; + if let Some(args) = ty_path.segments.last().and_then(|segment| segment.args) { + args.args + .iter() + .filter(|arg| !matches!(arg, GenericArg::Lifetime(lt) if lt.is_elided())) + .for_each(|arg| append(&snippet_with_applicability(cx, arg.span().source_callsite(), "_", app))); } else { - // is there a type specifier? (i.e.: like `` in `collections::BTreeSet::::`) - if let Some(type_specifier) = snippet_split.iter().find(|e| strip_angle_brackets(e).is_some()) { - // remove the type specifier from the path elements - let without_ts = elements - .iter() - .filter_map(|e| { - if e == type_specifier { - None - } else { - Some((*e).to_string()) - } - }) - .collect::>(); - // join and add the type specifier at the end (i.e.: `collections::BTreeSet`) - format!("{}{type_specifier}", without_ts.join("::")) - } else { - // type is not explicitly specified so wildcards are needed - // i.e.: 2 wildcards in `std::collections::BTreeMap<&i32, &char>` - let ty_str = ty.to_string(); - let start = ty_str.find('<').unwrap_or(0); - let end = ty_str.find('>').unwrap_or(ty_str.len()); - let nb_wildcard = ty_str[start..end].split(',').count(); - let wildcards = format!("_{}", ", _".repeat(nb_wildcard - 1)); - format!("{}<{wildcards}>", elements.join("::")) - } + cx.tcx + .generics_of(ty_did) + .own_params + .iter() + .filter(|param| { + matches!( + param.kind, + GenericParamDefKind::Type { has_default: false, .. } + | GenericParamDefKind::Const { has_default: false, .. } + ) + }) + .for_each(|_| append("_")); } + ty_str.push_str([">", ""][usize::from(first)]); + ty_str } else { - ty.to_string() + snippet_with_applicability(cx, hir_ty.span.source_callsite(), "_", app).into() } } diff --git a/src/tools/clippy/clippy_lints/src/methods/is_empty.rs b/src/tools/clippy/clippy_lints/src/methods/is_empty.rs index 4c81b22861b4c..545bef1a4c5bc 100644 --- a/src/tools/clippy/clippy_lints/src/methods/is_empty.rs +++ b/src/tools/clippy/clippy_lints/src/methods/is_empty.rs @@ -14,15 +14,13 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &'_ Expr<'_>, receiver: &Expr<'_ if expr.span.in_external_macro(cx.sess().source_map()) || !receiver.span.eq_ctxt(expr.span) { return; } - if let Some(parent) = get_parent_expr(cx, expr) { - if let Some(parent) = get_parent_expr(cx, parent) { - if is_inside_always_const_context(cx.tcx, expr.hir_id) - && let Some(macro_call) = root_macro_call(parent.span) - && is_assert_macro(cx, macro_call.def_id) - { - return; - } - } + if let Some(parent) = get_parent_expr(cx, expr) + && let Some(parent) = get_parent_expr(cx, parent) + && is_inside_always_const_context(cx.tcx, expr.hir_id) + && let Some(macro_call) = root_macro_call(parent.span) + && is_assert_macro(cx, macro_call.def_id) + { + return; } let init_expr = expr_or_init(cx, receiver); if !receiver.span.eq_ctxt(init_expr.span) { diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs b/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs index 49de83885a1ca..17cc07b91c5da 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iter_cloned_collect.rs @@ -1,16 +1,22 @@ use crate::methods::utils::derefs_to_slice; use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::ty::is_type_diagnostic_item; +use clippy_utils::ty::{get_iterator_item_ty, is_type_diagnostic_item}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; +use rustc_middle::ty; use rustc_span::sym; use super::ITER_CLONED_COLLECT; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, method_name: &str, expr: &hir::Expr<'_>, recv: &'tcx hir::Expr<'_>) { - if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(expr), sym::Vec) + let expr_ty = cx.typeck_results().expr_ty(expr); + if is_type_diagnostic_item(cx, expr_ty, sym::Vec) && let Some(slice) = derefs_to_slice(cx, recv, cx.typeck_results().expr_ty(recv)) + && let ty::Adt(_, args) = expr_ty.kind() + && let Some(iter_item_ty) = get_iterator_item_ty(cx, cx.typeck_results().expr_ty(recv)) + && let ty::Ref(_, iter_item_ty, _) = iter_item_ty.kind() + && *iter_item_ty == args.type_at(0) && let Some(to_replace) = expr.span.trim_start(slice.span.source_callsite()) { span_lint_and_sugg( diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_filter.rs b/src/tools/clippy/clippy_lints/src/methods/iter_filter.rs index bafabec7e0695..adeff375c8aad 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iter_filter.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iter_filter.rs @@ -6,12 +6,12 @@ use super::{ITER_FILTER_IS_OK, ITER_FILTER_IS_SOME}; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::{indent_of, reindent_multiline}; -use clippy_utils::{get_parent_expr, is_trait_method, peel_blocks, span_contains_comment}; +use clippy_utils::{get_parent_expr, is_trait_method, peel_blocks, span_contains_comment, sym}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_hir::QPath; use rustc_span::Span; -use rustc_span::symbol::{Ident, Symbol, sym}; +use rustc_span::symbol::{Ident, Symbol}; /// /// Returns true if the expression is a method call to `method_name` @@ -154,7 +154,7 @@ fn expression_type( if let Some(opt_defid) = cx.tcx.get_diagnostic_item(sym::Option) && let opt_ty = cx.tcx.type_of(opt_defid).skip_binder() && iter_item_ty.ty_adt_def() == opt_ty.ty_adt_def() - && is_method(cx, filter_arg, sym::Option, sym!(is_some), &[]) + && is_method(cx, filter_arg, sym::Option, sym::is_some, &[]) { return Some(FilterType::IsSome); } @@ -162,7 +162,7 @@ fn expression_type( if let Some(opt_defid) = cx.tcx.get_diagnostic_item(sym::Result) && let opt_ty = cx.tcx.type_of(opt_defid).skip_binder() && iter_item_ty.ty_adt_def() == opt_ty.ty_adt_def() - && is_method(cx, filter_arg, sym::Result, sym!(is_ok), &[]) + && is_method(cx, filter_arg, sym::Result, sym::is_ok, &[]) { return Some(FilterType::IsOk); } diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs b/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs index 94415fc91061e..3ac9299ba9157 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs @@ -37,7 +37,7 @@ pub(super) fn check<'tcx>( (PatKind::Binding(ann, _, key, _), value) if pat_is_wild(cx, value, m_arg) => ("key", ann, key), _ => return, } - && let ty = cx.typeck_results().expr_ty(recv) + && let ty = cx.typeck_results().expr_ty_adjusted(recv).peel_refs() && (is_type_diagnostic_item(cx, ty, sym::HashMap) || is_type_diagnostic_item(cx, ty, sym::BTreeMap)) { let mut applicability = rustc_errors::Applicability::MachineApplicable; diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs index f51bdc78f8a51..7bb625222ec0f 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs @@ -48,7 +48,7 @@ pub(super) fn check<'tcx>( && let Some(method_id) = typeck.type_dependent_def_id(cloned_call.hir_id) && cx.tcx.trait_of_item(method_id) == Some(iter_id) && let cloned_recv_ty = typeck.expr_ty_adjusted(cloned_recv) - && let Some(iter_assoc_ty) = cx.get_associated_type(cloned_recv_ty, iter_id, "Item") + && let Some(iter_assoc_ty) = cx.get_associated_type(cloned_recv_ty, iter_id, sym::Item) && matches!(*iter_assoc_ty.kind(), ty::Ref(_, ty, _) if !is_copy(cx, ty)) { if needs_into_iter diff --git a/src/tools/clippy/clippy_lints/src/methods/iterator_step_by_zero.rs b/src/tools/clippy/clippy_lints/src/methods/iterator_step_by_zero.rs index 9b358235a40df..90d5d9df55eed 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iterator_step_by_zero.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iterator_step_by_zero.rs @@ -8,14 +8,14 @@ use rustc_span::sym; use super::ITERATOR_STEP_BY_ZERO; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, arg: &'tcx hir::Expr<'_>) { - if is_trait_method(cx, expr, sym::Iterator) { - if let Some(Constant::Int(0)) = ConstEvalCtxt::new(cx).eval(arg) { - span_lint( - cx, - ITERATOR_STEP_BY_ZERO, - expr.span, - "`Iterator::step_by(0)` will panic at runtime", - ); - } + if is_trait_method(cx, expr, sym::Iterator) + && let Some(Constant::Int(0)) = ConstEvalCtxt::new(cx).eval(arg) + { + span_lint( + cx, + ITERATOR_STEP_BY_ZERO, + expr.span, + "`Iterator::step_by(0)` will panic at runtime", + ); } } diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs b/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs index 13918ed11b87d..18978a1d2bc86 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs @@ -106,15 +106,15 @@ fn is_min_or_max(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option { }; let check_lit = |expr: &hir::Expr<'_>, check_min: bool| { - if let hir::ExprKind::Lit(lit) = &expr.kind { - if let ast::LitKind::Int(value, _) = lit.node { - if value == maxval { - return Some(MinMax::Max); - } - - if check_min && value == minval { - return Some(MinMax::Min); - } + if let hir::ExprKind::Lit(lit) = &expr.kind + && let ast::LitKind::Int(value, _) = lit.node + { + if value == maxval { + return Some(MinMax::Max); + } + + if check_min && value == minval { + return Some(MinMax::Min); } } @@ -125,10 +125,10 @@ fn is_min_or_max(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option { return r; } - if ty.is_signed() { - if let hir::ExprKind::Unary(hir::UnOp::Neg, val) = &expr.kind { - return check_lit(val, true); - } + if ty.is_signed() + && let hir::ExprKind::Unary(hir::UnOp::Neg, val) = &expr.kind + { + return check_lit(val, true); } None diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs index 098721dc046f8..8167e4f960534 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs @@ -77,7 +77,7 @@ pub(super) fn check( s @ Cow::Borrowed(_) => s, }, RepeatKind::String => Sugg::hir_with_context(cx, repeat_arg, ctxt, "..", &mut app) - .maybe_par() + .maybe_paren() .to_string() .into(), }; diff --git a/src/tools/clippy/clippy_lints/src/methods/map_clone.rs b/src/tools/clippy/clippy_lints/src/methods/map_clone.rs index 128b3695f48b7..333a33f7527d6 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_clone.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_clone.rs @@ -51,19 +51,19 @@ pub(super) fn check(cx: &LateContext<'_>, e: &hir::Expr<'_>, recv: &hir::Expr<'_ let closure_expr = peel_blocks(closure_body.value); match closure_body.params[0].pat.kind { hir::PatKind::Ref(inner, Mutability::Not) => { - if let hir::PatKind::Binding(hir::BindingMode::NONE, .., name, None) = inner.kind { - if ident_eq(name, closure_expr) { - lint_explicit_closure(cx, e.span, recv.span, true, msrv); - } + if let hir::PatKind::Binding(hir::BindingMode::NONE, .., name, None) = inner.kind + && ident_eq(name, closure_expr) + { + lint_explicit_closure(cx, e.span, recv.span, true, msrv); } }, hir::PatKind::Binding(hir::BindingMode::NONE, .., name, None) => { match closure_expr.kind { hir::ExprKind::Unary(hir::UnOp::Deref, inner) => { - if ident_eq(name, inner) { - if let ty::Ref(.., Mutability::Not) = cx.typeck_results().expr_ty(inner).kind() { - lint_explicit_closure(cx, e.span, recv.span, true, msrv); - } + if ident_eq(name, inner) + && let ty::Ref(.., Mutability::Not) = cx.typeck_results().expr_ty(inner).kind() + { + lint_explicit_closure(cx, e.span, recv.span, true, msrv); } }, hir::ExprKind::MethodCall(method, obj, [], _) => { @@ -114,19 +114,17 @@ fn handle_path( ) { if let Some(path_def_id) = cx.qpath_res(qpath, arg.hir_id).opt_def_id() && cx.tcx.lang_items().get(LangItem::CloneFn) == Some(path_def_id) - { // The `copied` and `cloned` methods are only available on `&T` and `&mut T` in `Option` // and `Result`. - if let ty::Adt(_, args) = cx.typeck_results().expr_ty(recv).kind() - && let args = args.as_slice() - && let Some(ty) = args.iter().find_map(|generic_arg| generic_arg.as_type()) - && let ty::Ref(_, ty, Mutability::Not) = ty.kind() - && let ty::FnDef(_, lst) = cx.typeck_results().expr_ty(arg).kind() - && lst.iter().all(|l| l.as_type() == Some(*ty)) - && !should_call_clone_as_function(cx, *ty) - { - lint_path(cx, e.span, recv.span, is_copy(cx, ty.peel_refs())); - } + && let ty::Adt(_, args) = cx.typeck_results().expr_ty(recv).kind() + && let args = args.as_slice() + && let Some(ty) = args.iter().find_map(|generic_arg| generic_arg.as_type()) + && let ty::Ref(_, ty, Mutability::Not) = ty.kind() + && let ty::FnDef(_, lst) = cx.typeck_results().expr_ty(arg).kind() + && lst.iter().all(|l| l.as_type() == Some(*ty)) + && !should_call_clone_as_function(cx, *ty) + { + lint_path(cx, e.span, recv.span, is_copy(cx, ty.peel_refs())); } } diff --git a/src/tools/clippy/clippy_lints/src/methods/map_with_unused_argument_over_ranges.rs b/src/tools/clippy/clippy_lints/src/methods/map_with_unused_argument_over_ranges.rs index 6cf0936c598fa..a2a522a60687d 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_with_unused_argument_over_ranges.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_with_unused_argument_over_ranges.rs @@ -41,7 +41,7 @@ fn extract_count_with_applicability( return Some(format!("{count}")); } let end_snippet = Sugg::hir_with_applicability(cx, end, "...", applicability) - .maybe_par() + .maybe_paren() .into_string(); if lower_bound == 0 { if range.limits == RangeLimits::Closed { diff --git a/src/tools/clippy/clippy_lints/src/methods/mod.rs b/src/tools/clippy/clippy_lints/src/methods/mod.rs index 1d9296016e25f..10f4637d08f66 100644 --- a/src/tools/clippy/clippy_lints/src/methods/mod.rs +++ b/src/tools/clippy/clippy_lints/src/methods/mod.rs @@ -114,6 +114,7 @@ mod suspicious_command_arg_space; mod suspicious_map; mod suspicious_splitn; mod suspicious_to_owned; +mod swap_with_temporary; mod type_id_on_box; mod unbuffered_bytes; mod uninit_assumed_init; @@ -478,9 +479,6 @@ declare_clippy_lint! { /// Because you usually call `expect()` on the `Result` /// directly to get a better error message. /// - /// ### Known problems - /// The error type needs to implement `Debug` - /// /// ### Example /// ```no_run /// # let x = Ok::<_, ()>(()); @@ -2429,7 +2427,7 @@ declare_clippy_lint! { /// /// ### Limitations /// This lint currently only looks for usages of - /// `.then_some(..).unwrap_or(..)` and `.then(..).unwrap_or(..)`, but will be expanded + /// `.{then, then_some}(..).{unwrap_or, unwrap_or_else, unwrap_or_default}(..)`, but will be expanded /// to account for similar patterns. /// /// ### Example @@ -4286,7 +4284,7 @@ declare_clippy_lint! { /// ```no_run /// let last_arg = "echo hello world".split(' ').next_back(); /// ``` - #[clippy::version = "1.85.0"] + #[clippy::version = "1.86.0"] pub DOUBLE_ENDED_ITERATOR_LAST, perf, "using `Iterator::last` on a `DoubleEndedIterator`" @@ -4372,11 +4370,10 @@ declare_clippy_lint! { declare_clippy_lint! { /// ### What it does - /// - /// Detect functions that end with `Option::and_then` or `Result::and_then`, and suggest using a question mark (`?`) instead. + /// Detect functions that end with `Option::and_then` or `Result::and_then`, and suggest using + /// the `?` operator instead. /// /// ### Why is this bad? - /// /// The `and_then` method is used to chain a computation that returns an `Option` or a `Result`. /// This can be replaced with the `?` operator, which is more concise and idiomatic. /// @@ -4478,12 +4475,59 @@ declare_clippy_lint! { /// ```no_run /// let _ = std::io::Error::other("bad".to_string()); /// ``` - #[clippy::version = "1.86.0"] + #[clippy::version = "1.87.0"] pub IO_OTHER_ERROR, style, "calling `std::io::Error::new(std::io::ErrorKind::Other, _)`" } +declare_clippy_lint! { + /// ### What it does + /// Checks for usage of `std::mem::swap` with temporary values. + /// + /// ### Why is this bad? + /// Storing a new value in place of a temporary value which will + /// be dropped right after the `swap` is an inefficient way of performing + /// an assignment. The same result can be achieved by using a regular + /// assignment. + /// + /// ### Examples + /// ```no_run + /// fn replace_string(s: &mut String) { + /// std::mem::swap(s, &mut String::from("replaced")); + /// } + /// ``` + /// Use instead: + /// ```no_run + /// fn replace_string(s: &mut String) { + /// *s = String::from("replaced"); + /// } + /// ``` + /// + /// Also, swapping two temporary values has no effect, as they will + /// both be dropped right after swapping them. This is likely an indication + /// of a bug. For example, the following code swaps the references to + /// the last element of the vectors, instead of swapping the elements + /// themselves: + /// + /// ```no_run + /// fn bug(v1: &mut [i32], v2: &mut [i32]) { + /// // Incorrect: swapping temporary references (`&mut &mut` passed to swap) + /// std::mem::swap(&mut v1.last_mut().unwrap(), &mut v2.last_mut().unwrap()); + /// } + /// ``` + /// Use instead: + /// ```no_run + /// fn correct(v1: &mut [i32], v2: &mut [i32]) { + /// std::mem::swap(v1.last_mut().unwrap(), v2.last_mut().unwrap()); + /// } + /// ``` + #[clippy::version = "1.88.0"] + pub SWAP_WITH_TEMPORARY, + complexity, + "detect swap with a temporary value" +} + #[expect(clippy::struct_excessive_bools)] pub struct Methods { avoid_breaking_exported_api: bool, @@ -4661,17 +4705,19 @@ impl_lint_pass!(Methods => [ UNBUFFERED_BYTES, MANUAL_CONTAINS, IO_OTHER_ERROR, + SWAP_WITH_TEMPORARY, ]); /// Extracts a method call name, args, and `Span` of the method name. pub fn method_call<'tcx>( recv: &'tcx Expr<'tcx>, ) -> Option<(&'tcx str, &'tcx Expr<'tcx>, &'tcx [Expr<'tcx>], Span, Span)> { - if let ExprKind::MethodCall(path, receiver, args, call_span) = recv.kind { - if !args.iter().any(|e| e.span.from_expansion()) && !receiver.span.from_expansion() { - let name = path.ident.name.as_str(); - return Some((name, receiver, args, path.ident.span, call_span)); - } + if let ExprKind::MethodCall(path, receiver, args, call_span) = recv.kind + && !args.iter().any(|e| e.span.from_expansion()) + && !receiver.span.from_expansion() + { + let name = path.ident.name.as_str(); + return Some((name, receiver, args, path.ident.span, call_span)); } None } @@ -4691,6 +4737,7 @@ impl<'tcx> LateLintPass<'tcx> for Methods { manual_c_str_literals::check(cx, expr, func, args, self.msrv); useless_nonzero_new_unchecked::check(cx, expr, func, args, self.msrv); io_other_error::check(cx, expr, func, args, self.msrv); + swap_with_temporary::check(cx, expr, func, args); }, ExprKind::MethodCall(method_call, receiver, args, _) => { let method_span = method_call.ident.span; @@ -4992,7 +5039,7 @@ impl Methods { }, ("ends_with", [arg]) => { if let ExprKind::MethodCall(.., span) = expr.kind { - case_sensitive_file_extension_comparisons::check(cx, expr, span, recv, arg); + case_sensitive_file_extension_comparisons::check(cx, expr, span, recv, arg, self.msrv); } path_ends_with_ext::check(cx, recv, arg, expr, self.msrv, &self.allowed_dotfiles); }, @@ -5421,15 +5468,21 @@ impl Methods { option_map_unwrap_or::check(cx, expr, m_recv, m_arg, recv, u_arg, span, self.msrv); }, Some((then_method @ ("then" | "then_some"), t_recv, [t_arg], _, _)) => { - obfuscated_if_else::check(cx, expr, t_recv, t_arg, u_arg, then_method, "unwrap_or"); + obfuscated_if_else::check(cx, expr, t_recv, t_arg, Some(u_arg), then_method, "unwrap_or"); }, _ => {}, } unnecessary_literal_unwrap::check(cx, expr, recv, name, args); }, ("unwrap_or_default", []) => { - if let Some(("map", m_recv, [arg], span, _)) = method_call(recv) { - manual_is_variant_and::check(cx, expr, m_recv, arg, span, self.msrv); + match method_call(recv) { + Some(("map", m_recv, [arg], span, _)) => { + manual_is_variant_and::check(cx, expr, m_recv, arg, span, self.msrv); + }, + Some((then_method @ ("then" | "then_some"), t_recv, [t_arg], _, _)) => { + obfuscated_if_else::check(cx, expr, t_recv, t_arg, None, then_method, "unwrap_or_default"); + }, + _ => {}, } unnecessary_literal_unwrap::check(cx, expr, recv, name, args); }, @@ -5441,7 +5494,15 @@ impl Methods { Some(("map", recv, [map_arg], _, _)) if map_unwrap_or::check(cx, expr, recv, map_arg, u_arg, self.msrv) => {}, Some((then_method @ ("then" | "then_some"), t_recv, [t_arg], _, _)) => { - obfuscated_if_else::check(cx, expr, t_recv, t_arg, u_arg, then_method, "unwrap_or_else"); + obfuscated_if_else::check( + cx, + expr, + t_recv, + t_arg, + Some(u_arg), + then_method, + "unwrap_or_else", + ); }, _ => { unnecessary_lazy_eval::check(cx, expr, recv, u_arg, "unwrap_or"); diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_character_iteration.rs b/src/tools/clippy/clippy_lints/src/methods/needless_character_iteration.rs index 743aacf058856..f528f7f065c6e 100644 --- a/src/tools/clippy/clippy_lints/src/methods/needless_character_iteration.rs +++ b/src/tools/clippy/clippy_lints/src/methods/needless_character_iteration.rs @@ -9,7 +9,7 @@ use super::utils::get_last_chain_binding_hir_id; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::paths::CHAR_IS_ASCII; use clippy_utils::source::SpanRangeExt; -use clippy_utils::{match_def_path, path_to_local_id, peel_blocks}; +use clippy_utils::{match_def_path, path_to_local_id, peel_blocks, sym}; fn peels_expr_ref<'a, 'tcx>(mut expr: &'a Expr<'tcx>) -> &'a Expr<'tcx> { while let ExprKind::AddrOf(_, _, e) = expr.kind { @@ -32,7 +32,7 @@ fn handle_expr( // If we have `!is_ascii`, then only `.any()` should warn. And if the condition is // `is_ascii`, then only `.all()` should warn. if revert != is_all - && method.ident.name.as_str() == "is_ascii" + && method.ident.name == sym::is_ascii && path_to_local_id(receiver, first_param) && let char_arg_ty = cx.typeck_results().expr_ty_adjusted(receiver).peel_refs() && *char_arg_ty.kind() == ty::Char @@ -102,7 +102,7 @@ pub(super) fn check(cx: &LateContext<'_>, call_expr: &Expr<'_>, recv: &Expr<'_>, && let body = cx.tcx.hir_body(body) && let Some(first_param) = body.params.first() && let ExprKind::MethodCall(method, mut recv, [], _) = recv.kind - && method.ident.name.as_str() == "chars" + && method.ident.name == sym::chars && let str_ty = cx.typeck_results().expr_ty_adjusted(recv).peel_refs() && *str_ty.kind() == ty::Str { diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs b/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs index 56ff7e2c61b22..cd22583b8a253 100644 --- a/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs +++ b/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs @@ -4,10 +4,12 @@ use super::NEEDLESS_COLLECT; use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_hir_and_then}; use clippy_utils::source::{snippet, snippet_with_applicability}; use clippy_utils::sugg::Sugg; -use clippy_utils::ty::{get_type_diagnostic_name, make_normalized_projection, make_projection}; +use clippy_utils::ty::{ + get_type_diagnostic_name, has_non_owning_mutable_access, make_normalized_projection, make_projection, +}; use clippy_utils::{ CaptureKind, can_move_expr_to_closure, fn_def_id, get_enclosing_block, higher, is_trait_method, path_to_local, - path_to_local_id, + path_to_local_id, sym, }; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{Applicability, MultiSpan}; @@ -17,12 +19,13 @@ use rustc_hir::{ }; use rustc_lint::LateContext; use rustc_middle::hir::nested_filter; -use rustc_middle::ty::{self, AssocKind, ClauseKind, EarlyBinder, GenericArg, GenericArgKind, Ty}; +use rustc_middle::ty::{self, AssocTag, ClauseKind, EarlyBinder, GenericArg, GenericArgKind, Ty}; +use rustc_span::Span; use rustc_span::symbol::Ident; -use rustc_span::{Span, sym}; const NEEDLESS_COLLECT_MSG: &str = "avoid using `collect()` when not needed"; +#[expect(clippy::too_many_lines)] pub(super) fn check<'tcx>( cx: &LateContext<'tcx>, name_span: Span, @@ -30,6 +33,11 @@ pub(super) fn check<'tcx>( iter_expr: &'tcx Expr<'tcx>, call_span: Span, ) { + let iter_ty = cx.typeck_results().expr_ty(iter_expr); + if has_non_owning_mutable_access(cx, iter_ty) { + return; // don't lint if the iterator has side effects + } + match cx.tcx.parent_hir_node(collect_expr.hir_id) { Node::Expr(parent) => { check_collect_into_intoiterator(cx, parent, collect_expr, call_span, iter_expr); @@ -238,10 +246,10 @@ fn is_contains_sig(cx: &LateContext<'_>, call_id: HirId, iter_expr: &Expr<'_>) - .instantiate_bound_regions_with_erased(sig.rebind(search_ty)) .kind() && let Some(iter_trait) = cx.tcx.get_diagnostic_item(sym::Iterator) - && let Some(iter_item) = cx.tcx.associated_items(iter_trait).find_by_name_and_kind( + && let Some(iter_item) = cx.tcx.associated_items(iter_trait).find_by_ident_and_kind( cx.tcx, Ident::with_dummy_span(sym::Item), - AssocKind::Type, + AssocTag::Type, iter_trait, ) && let args = cx.tcx.mk_args(&[GenericArg::from(typeck.expr_ty_adjusted(iter_expr))]) @@ -331,7 +339,7 @@ impl<'tcx> Visitor<'tcx> for IterFunctionVisitor<'_, 'tcx> { // Check function calls on our collection if let ExprKind::MethodCall(method_name, recv, args, _) = &expr.kind { if args.is_empty() - && method_name.ident.name.as_str() == "collect" + && method_name.ident.name == sym::collect && is_trait_method(self.cx, expr, sym::Iterator) { self.current_mutably_captured_ids = get_captured_ids(self.cx, self.cx.typeck_results().expr_ty(recv)); @@ -377,20 +385,20 @@ impl<'tcx> Visitor<'tcx> for IterFunctionVisitor<'_, 'tcx> { return; } - if let Some(hir_id) = path_to_local(recv) { - if let Some(index) = self.hir_id_uses_map.remove(&hir_id) { - if self - .illegal_mutable_capture_ids - .intersection(&self.current_mutably_captured_ids) - .next() - .is_none() - { - if let Some(hir_id) = self.current_statement_hir_id { - self.hir_id_uses_map.insert(hir_id, index); - } - } else { - self.uses[index] = None; + if let Some(hir_id) = path_to_local(recv) + && let Some(index) = self.hir_id_uses_map.remove(&hir_id) + { + if self + .illegal_mutable_capture_ids + .intersection(&self.current_mutably_captured_ids) + .next() + .is_none() + { + if let Some(hir_id) = self.current_statement_hir_id { + self.hir_id_uses_map.insert(hir_id, index); } + } else { + self.uses[index] = None; } } } diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs b/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs index 88b9c69f6f949..cd1b97f3c51b5 100644 --- a/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs +++ b/src/tools/clippy/clippy_lints/src/methods/needless_option_take.rs @@ -9,26 +9,27 @@ use super::NEEDLESS_OPTION_TAKE; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) { // Checks if expression type is equal to sym::Option and if the expr is not a syntactic place - if !recv.is_syntactic_place_expr() && is_expr_option(cx, recv) { - if let Some(function_name) = source_of_temporary_value(recv) { - span_lint_and_then( - cx, - NEEDLESS_OPTION_TAKE, - expr.span, - "called `Option::take()` on a temporary value", - |diag| { - diag.note(format!( - "`{function_name}` creates a temporary value, so calling take() has no effect" - )); - diag.span_suggestion( - expr.span.with_lo(recv.span.hi()), - "remove", - "", - Applicability::MachineApplicable, - ); - }, - ); - } + if !recv.is_syntactic_place_expr() + && is_expr_option(cx, recv) + && let Some(function_name) = source_of_temporary_value(recv) + { + span_lint_and_then( + cx, + NEEDLESS_OPTION_TAKE, + expr.span, + "called `Option::take()` on a temporary value", + |diag| { + diag.note(format!( + "`{function_name}` creates a temporary value, so calling take() has no effect" + )); + diag.span_suggestion( + expr.span.with_lo(recv.span.hi()), + "remove", + "", + Applicability::MachineApplicable, + ); + }, + ); } } @@ -44,10 +45,10 @@ fn is_expr_option(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { fn source_of_temporary_value<'a>(expr: &'a Expr<'_>) -> Option<&'a str> { match expr.peel_borrows().kind { ExprKind::Call(function, _) => { - if let ExprKind::Path(QPath::Resolved(_, func_path)) = function.kind { - if !func_path.segments.is_empty() { - return Some(func_path.segments[0].ident.name.as_str()); - } + if let ExprKind::Path(QPath::Resolved(_, func_path)) = function.kind + && !func_path.segments.is_empty() + { + return Some(func_path.segments[0].ident.name.as_str()); } if let ExprKind::Path(QPath::TypeRelative(_, func_path_segment)) = function.kind { return Some(func_path_segment.ident.name.as_str()); diff --git a/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs b/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs index 9a5ffdeaf4e8e..1cc56de48763e 100644 --- a/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs +++ b/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs @@ -14,14 +14,17 @@ pub(super) fn check<'tcx>( expr: &'tcx hir::Expr<'_>, then_recv: &'tcx hir::Expr<'_>, then_arg: &'tcx hir::Expr<'_>, - unwrap_arg: &'tcx hir::Expr<'_>, + unwrap_arg: Option<&'tcx hir::Expr<'_>>, then_method_name: &str, unwrap_method_name: &str, ) { let recv_ty = cx.typeck_results().expr_ty(then_recv); if recv_ty.is_bool() { - let mut applicability = if switch_to_eager_eval(cx, then_arg) && switch_to_eager_eval(cx, unwrap_arg) { + let then_eager = switch_to_eager_eval(cx, then_arg); + let unwrap_eager = unwrap_arg.is_none_or(|arg| switch_to_eager_eval(cx, arg)); + + let mut applicability = if then_eager && unwrap_eager { Applicability::MachineApplicable } else { Applicability::MaybeIncorrect @@ -36,16 +39,17 @@ pub(super) fn check<'tcx>( _ => return, }; - // FIXME: Add `unwrap_or_else` symbol + // FIXME: Add `unwrap_or_else` and `unwrap_or_default` symbol let els = match unwrap_method_name { - "unwrap_or" => snippet_with_applicability(cx, unwrap_arg.span, "..", &mut applicability), - "unwrap_or_else" if let ExprKind::Closure(closure) = unwrap_arg.kind => { + "unwrap_or" => snippet_with_applicability(cx, unwrap_arg.unwrap().span, "..", &mut applicability), + "unwrap_or_else" if let ExprKind::Closure(closure) = unwrap_arg.unwrap().kind => { let body = cx.tcx.hir_body(closure.body); snippet_with_applicability(cx, body.value.span, "..", &mut applicability) }, - "unwrap_or_else" if let ExprKind::Path(_) = unwrap_arg.kind => { - snippet_with_applicability(cx, unwrap_arg.span, "_", &mut applicability) + "()" + "unwrap_or_else" if let ExprKind::Path(_) = unwrap_arg.unwrap().kind => { + snippet_with_applicability(cx, unwrap_arg.unwrap().span, "_", &mut applicability) + "()" }, + "unwrap_or_default" => "Default::default()".into(), _ => return, }; diff --git a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs index c03420a5143e6..b78b082e460ef 100644 --- a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs +++ b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs @@ -6,13 +6,13 @@ use clippy_utils::source::snippet_with_context; use clippy_utils::ty::{expr_type_is_certain, implements_trait, is_type_diagnostic_item}; use clippy_utils::visitors::for_each_expr; use clippy_utils::{ - contains_return, is_default_equivalent, is_default_equivalent_call, last_path_segment, peel_blocks, + contains_return, is_default_equivalent, is_default_equivalent_call, last_path_segment, peel_blocks, sym, }; use rustc_errors::Applicability; use rustc_lint::LateContext; use rustc_middle::ty; use rustc_span::Span; -use rustc_span::symbol::{self, Symbol, sym}; +use rustc_span::symbol::{self, Symbol}; use {rustc_ast as ast, rustc_hir as hir}; use super::{OR_FUN_CALL, UNWRAP_OR_DEFAULT}; @@ -66,8 +66,8 @@ pub(super) fn check<'tcx>( }; let sugg = match (name, call_expr.is_some()) { - ("unwrap_or", true) | ("unwrap_or_else", false) => sym!(unwrap_or_default), - ("or_insert", true) | ("or_insert_with", false) => sym!(or_default), + ("unwrap_or", true) | ("unwrap_or_else", false) => sym::unwrap_or_default, + ("or_insert", true) | ("or_insert_with", false) => sym::or_default, _ => return false, }; @@ -78,8 +78,7 @@ pub(super) fn check<'tcx>( .iter() .flat_map(|impl_id| cx.tcx.associated_items(impl_id).filter_by_name_unhygienic(sugg)) .find_map(|assoc| { - if assoc.fn_has_self_parameter - && cx.tcx.fn_sig(assoc.def_id).skip_binder().inputs().skip_binder().len() == 1 + if assoc.is_method() && cx.tcx.fn_sig(assoc.def_id).skip_binder().inputs().skip_binder().len() == 1 { Some(assoc.def_id) } else { diff --git a/src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs b/src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs index fe999a3b5f8f2..407f2e80aff25 100644 --- a/src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs +++ b/src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs @@ -1,17 +1,16 @@ use std::ops::ControlFlow; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::get_parent_expr; use clippy_utils::source::snippet; use clippy_utils::ty::is_type_diagnostic_item; use clippy_utils::visitors::for_each_local_use_after_expr; +use clippy_utils::{get_parent_expr, sym}; use rustc_ast::LitKind; use rustc_errors::Applicability; use rustc_hir::def::Res; use rustc_hir::{BinOpKind, Expr, ExprKind, QPath}; use rustc_lint::LateContext; use rustc_middle::ty::{self, Ty}; -use rustc_span::sym; use super::READ_LINE_WITHOUT_TRIM; @@ -44,7 +43,7 @@ pub fn check(cx: &LateContext<'_>, call: &Expr<'_>, recv: &Expr<'_>, arg: &Expr< if let Some(parent) = get_parent_expr(cx, expr) { let data = if let ExprKind::MethodCall(segment, recv, args, span) = parent.kind { if args.is_empty() - && segment.ident.name.as_str() == "parse" + && segment.ident.name == sym::parse && let parse_result_ty = cx.typeck_results().expr_ty(parent) && is_type_diagnostic_item(cx, parse_result_ty, sym::Result) && let ty::Adt(_, substs) = parse_result_ty.kind() @@ -58,7 +57,7 @@ pub fn check(cx: &LateContext<'_>, call: &Expr<'_>, recv: &Expr<'_>, arg: &Expr< "calling `.parse()` on a string without trimming the trailing newline character", "checking", )) - } else if segment.ident.name.as_str() == "ends_with" + } else if segment.ident.name == sym::ends_with && recv.span == expr.span && let [arg] = args && expr_is_string_literal_without_trailing_newline(arg) diff --git a/src/tools/clippy/clippy_lints/src/methods/return_and_then.rs b/src/tools/clippy/clippy_lints/src/methods/return_and_then.rs index e8861935d4216..91643b0dfefde 100644 --- a/src/tools/clippy/clippy_lints/src/methods/return_and_then.rs +++ b/src/tools/clippy/clippy_lints/src/methods/return_and_then.rs @@ -55,7 +55,6 @@ pub(super) fn check<'tcx>( None => &body_snip, }; - let msg = "use the question mark operator instead of an `and_then` call"; let sugg = format!( "let {} = {}?;\n{}", arg_snip, @@ -63,5 +62,13 @@ pub(super) fn check<'tcx>( reindent_multiline(inner, false, indent_of(cx, expr.span)) ); - span_lint_and_sugg(cx, RETURN_AND_THEN, expr.span, msg, "try", sugg, applicability); + span_lint_and_sugg( + cx, + RETURN_AND_THEN, + expr.span, + "use the `?` operator instead of an `and_then` call", + "try", + sugg, + applicability, + ); } diff --git a/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs b/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs index d318462e58415..8b51268da465e 100644 --- a/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs +++ b/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs @@ -3,33 +3,33 @@ use rustc_data_structures::packed::Pu128; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::LateContext; -use rustc_span::sym; use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::is_enum_variant_ctor; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::implements_trait; +use clippy_utils::{is_enum_variant_ctor, sym}; use super::SEEK_FROM_CURRENT; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, recv: &'tcx Expr<'_>, arg: &'tcx Expr<'_>) { let ty = cx.typeck_results().expr_ty(recv); - if let Some(def_id) = cx.tcx.get_diagnostic_item(sym::IoSeek) { - if implements_trait(cx, ty, def_id, &[]) && arg_is_seek_from_current(cx, arg) { - let mut applicability = Applicability::MachineApplicable; - let snip = snippet_with_applicability(cx, recv.span, "..", &mut applicability); + if let Some(def_id) = cx.tcx.get_diagnostic_item(sym::IoSeek) + && implements_trait(cx, ty, def_id, &[]) + && arg_is_seek_from_current(cx, arg) + { + let mut applicability = Applicability::MachineApplicable; + let snip = snippet_with_applicability(cx, recv.span, "..", &mut applicability); - span_lint_and_sugg( - cx, - SEEK_FROM_CURRENT, - expr.span, - "using `SeekFrom::Current` to start from current position", - "replace with", - format!("{snip}.stream_position()"), - applicability, - ); - } + span_lint_and_sugg( + cx, + SEEK_FROM_CURRENT, + expr.span, + "using `SeekFrom::Current` to start from current position", + "replace with", + format!("{snip}.stream_position()"), + applicability, + ); } } @@ -37,14 +37,12 @@ fn arg_is_seek_from_current<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) if let ExprKind::Call(f, [arg]) = expr.kind && let ExprKind::Path(ref path) = f.kind && let Some(ctor_call_id) = cx.qpath_res(path, f.hir_id).opt_def_id() - && is_enum_variant_ctor(cx, sym::SeekFrom, sym!(Current), ctor_call_id) - { + && is_enum_variant_ctor(cx, sym::SeekFrom, sym::Current, ctor_call_id) // check if argument of `SeekFrom::Current` is `0` - if let ExprKind::Lit(lit) = arg.kind - && let LitKind::Int(Pu128(0), LitIntType::Unsuffixed) = lit.node - { - return true; - } + && let ExprKind::Lit(lit) = arg.kind + && let LitKind::Int(Pu128(0), LitIntType::Unsuffixed) = lit.node + { + return true; } false diff --git a/src/tools/clippy/clippy_lints/src/methods/seek_to_start_instead_of_rewind.rs b/src/tools/clippy/clippy_lints/src/methods/seek_to_start_instead_of_rewind.rs index 7b1dd9e58c50e..b8405a78f23a9 100644 --- a/src/tools/clippy/clippy_lints/src/methods/seek_to_start_instead_of_rewind.rs +++ b/src/tools/clippy/clippy_lints/src/methods/seek_to_start_instead_of_rewind.rs @@ -1,12 +1,12 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::ty::implements_trait; -use clippy_utils::{is_enum_variant_ctor, is_expr_used_or_unified}; +use clippy_utils::{is_enum_variant_ctor, is_expr_used_or_unified, sym}; use rustc_ast::ast::{LitIntType, LitKind}; use rustc_data_structures::packed::Pu128; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::LateContext; -use rustc_span::{Span, sym}; +use rustc_span::Span; use super::SEEK_TO_START_INSTEAD_OF_REWIND; @@ -29,7 +29,7 @@ pub(super) fn check<'tcx>( && let ExprKind::Call(func, [arg]) = arg.kind && let ExprKind::Path(ref path) = func.kind && let Some(ctor_call_id) = cx.qpath_res(path, func.hir_id).opt_def_id() - && is_enum_variant_ctor(cx, sym::SeekFrom, sym!(Start), ctor_call_id) + && is_enum_variant_ctor(cx, sym::SeekFrom, sym::Start, ctor_call_id) && let ExprKind::Lit(lit) = arg.kind && let LitKind::Int(Pu128(0), LitIntType::Unsuffixed) = lit.node { diff --git a/src/tools/clippy/clippy_lints/src/methods/str_split.rs b/src/tools/clippy/clippy_lints/src/methods/str_split.rs index 3586e11f56ab2..fb4ac7b3613dd 100644 --- a/src/tools/clippy/clippy_lints/src/methods/str_split.rs +++ b/src/tools/clippy/clippy_lints/src/methods/str_split.rs @@ -1,5 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet_with_context; +use clippy_utils::sym; use clippy_utils::visitors::is_const_evaluatable; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; @@ -19,7 +20,7 @@ pub(super) fn check<'a>(cx: &LateContext<'a>, expr: &'_ Expr<'_>, split_recv: &' && !is_const_evaluatable(cx, trim_recv) && let ExprKind::Lit(split_lit) = split_arg.kind && (matches!(split_lit.node, LitKind::Char('\n')) - || matches!(split_lit.node, LitKind::Str(sym, _) if (sym.as_str() == "\n" || sym.as_str() == "\r\n"))) + || matches!(split_lit.node, LitKind::Str(sym::LF | sym::CRLF, _))) { let mut app = Applicability::MaybeIncorrect; span_lint_and_sugg( diff --git a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs index 4ccefb7ec9d77..d183457da25a2 100644 --- a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs +++ b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs @@ -238,15 +238,14 @@ fn indirect_usage<'tcx>( unwrap_kind: Some(unwrap_kind), .. } = iter_usage + && parent_id == local_hir_id { - if parent_id == local_hir_id { - return Some(IndirectUsage { - name: ident.name, - span: stmt.span, - init_expr, - unwrap_kind, - }); - } + return Some(IndirectUsage { + name: ident.name, + span: stmt.span, + init_expr, + unwrap_kind, + }); } } diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs index 1bd48525f12d8..788014d9bb632 100644 --- a/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs +++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_map.rs @@ -13,11 +13,11 @@ pub fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, count_recv: &hir::Expr< && let closure_body = cx.tcx.hir_body(closure.body) && !cx.typeck_results().expr_ty(closure_body.value).is_unit() { - if let Some(map_mutated_vars) = mutated_variables(closure_body.value, cx) { + if let Some(map_mutated_vars) = mutated_variables(closure_body.value, cx) // A variable is used mutably inside of the closure. Suppress the lint. - if !map_mutated_vars.is_empty() { - return; - } + && !map_mutated_vars.is_empty() + { + return; } span_lint_and_help( cx, diff --git a/src/tools/clippy/clippy_lints/src/methods/swap_with_temporary.rs b/src/tools/clippy/clippy_lints/src/methods/swap_with_temporary.rs new file mode 100644 index 0000000000000..de729fb343a34 --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/methods/swap_with_temporary.rs @@ -0,0 +1,125 @@ +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::sugg::Sugg; +use rustc_ast::BorrowKind; +use rustc_errors::{Applicability, Diag}; +use rustc_hir::{Expr, ExprKind, Node, QPath}; +use rustc_lint::LateContext; +use rustc_span::sym; + +use super::SWAP_WITH_TEMPORARY; + +const MSG_TEMPORARY: &str = "this expression returns a temporary value"; +const MSG_TEMPORARY_REFMUT: &str = "this is a mutable reference to a temporary value"; + +pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, func: &Expr<'_>, args: &[Expr<'_>]) { + if let ExprKind::Path(QPath::Resolved(_, func_path)) = func.kind + && let Some(func_def_id) = func_path.res.opt_def_id() + && cx.tcx.is_diagnostic_item(sym::mem_swap, func_def_id) + { + match (ArgKind::new(&args[0]), ArgKind::new(&args[1])) { + (ArgKind::RefMutToTemp(left_temp), ArgKind::RefMutToTemp(right_temp)) => { + emit_lint_useless(cx, expr, &args[0], &args[1], left_temp, right_temp); + }, + (ArgKind::RefMutToTemp(left_temp), right) => emit_lint_assign(cx, expr, &right, &args[0], left_temp), + (left, ArgKind::RefMutToTemp(right_temp)) => emit_lint_assign(cx, expr, &left, &args[1], right_temp), + _ => {}, + } + } +} + +enum ArgKind<'tcx> { + // Mutable reference to a place, coming from a macro + RefMutToPlaceAsMacro(&'tcx Expr<'tcx>), + // Place behind a mutable reference + RefMutToPlace(&'tcx Expr<'tcx>), + // Temporary value behind a mutable reference + RefMutToTemp(&'tcx Expr<'tcx>), + // Any other case + Expr(&'tcx Expr<'tcx>), +} + +impl<'tcx> ArgKind<'tcx> { + fn new(arg: &'tcx Expr<'tcx>) -> Self { + if let ExprKind::AddrOf(BorrowKind::Ref, _, target) = arg.kind { + if target.is_syntactic_place_expr() { + if arg.span.from_expansion() { + ArgKind::RefMutToPlaceAsMacro(arg) + } else { + ArgKind::RefMutToPlace(target) + } + } else { + ArgKind::RefMutToTemp(target) + } + } else { + ArgKind::Expr(arg) + } + } +} + +// Emits a note either on the temporary expression if it can be found in the same context as the +// base and returns `true`, or on the mutable reference to the temporary expression otherwise and +// returns `false`. +fn emit_note(diag: &mut Diag<'_, ()>, base: &Expr<'_>, expr: &Expr<'_>, expr_temp: &Expr<'_>) -> bool { + if base.span.eq_ctxt(expr.span) { + diag.span_note(expr_temp.span.source_callsite(), MSG_TEMPORARY); + true + } else { + diag.span_note(expr.span.source_callsite(), MSG_TEMPORARY_REFMUT); + false + } +} + +fn emit_lint_useless( + cx: &LateContext<'_>, + expr: &Expr<'_>, + left: &Expr<'_>, + right: &Expr<'_>, + left_temp: &Expr<'_>, + right_temp: &Expr<'_>, +) { + span_lint_and_then( + cx, + SWAP_WITH_TEMPORARY, + expr.span, + "swapping temporary values has no effect", + |diag| { + emit_note(diag, expr, left, left_temp); + emit_note(diag, expr, right, right_temp); + }, + ); +} + +fn emit_lint_assign(cx: &LateContext<'_>, expr: &Expr<'_>, target: &ArgKind<'_>, reftemp: &Expr<'_>, temp: &Expr<'_>) { + span_lint_and_then( + cx, + SWAP_WITH_TEMPORARY, + expr.span, + "swapping with a temporary value is inefficient", + |diag| { + if !emit_note(diag, expr, reftemp, temp) { + return; + } + + // Make the suggestion only when the original `swap()` call is a statement + // or the last expression in a block. + if matches!(cx.tcx.parent_hir_node(expr.hir_id), Node::Stmt(..) | Node::Block(..)) { + let mut applicability = Applicability::MachineApplicable; + let ctxt = expr.span.ctxt(); + let assign_target = match target { + ArgKind::Expr(target) | ArgKind::RefMutToPlaceAsMacro(target) => { + Sugg::hir_with_context(cx, target, ctxt, "_", &mut applicability).deref() + }, + ArgKind::RefMutToPlace(target) => Sugg::hir_with_context(cx, target, ctxt, "_", &mut applicability), + ArgKind::RefMutToTemp(_) => unreachable!(), + }; + let assign_source = Sugg::hir_with_context(cx, temp, ctxt, "_", &mut applicability); + diag.span_suggestion( + expr.span, + "use assignment instead", + format!("{assign_target} = {assign_source}"), + applicability, + ); + } + }, + ); +} diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs index ca42a9ac04e0b..79ed352193fd7 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs @@ -1,16 +1,14 @@ use super::utils::clone_or_copy_needed; -use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::diagnostics::span_lint; use clippy_utils::ty::is_copy; use clippy_utils::usage::mutated_variables; use clippy_utils::visitors::{Descend, for_each_expr_without_closures}; -use clippy_utils::{is_res_lang_ctor, is_trait_method, path_res, path_to_local_id}; +use clippy_utils::{is_res_lang_ctor, is_trait_method, path_res, path_to_local_id, sym}; use core::ops::ControlFlow; -use rustc_errors::Applicability; use rustc_hir as hir; use rustc_hir::LangItem::{OptionNone, OptionSome}; use rustc_lint::LateContext; use rustc_middle::ty; -use rustc_span::sym; use super::{UNNECESSARY_FILTER_MAP, UNNECESSARY_FIND_MAP}; @@ -45,30 +43,32 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>, a && is_res_lang_ctor(cx, path_res(cx, expr), OptionSome) && let hir::ExprKind::Path(_) = args[0].kind { - span_lint_and_sugg( + span_lint( cx, UNNECESSARY_FILTER_MAP, expr.span, - format!("{name} is unnecessary"), - "try removing the filter_map", - String::new(), - Applicability::MaybeIncorrect, + String::from("this call to `.filter_map(..)` is unnecessary"), ); + return; + } + if name == "filter_map" { + "map(..)" + } else { + "map(..).next()" } - if name == "filter_map" { "map" } else { "map(..).next()" } } else if !found_mapping && !mutates_arg && (!clone_or_copy_needed || is_copy(cx, in_ty)) { match cx.typeck_results().expr_ty(body.value).kind() { ty::Adt(adt, subst) if cx.tcx.is_diagnostic_item(sym::Option, adt.did()) && in_ty == subst.type_at(0) => { - if name == "filter_map" { "filter" } else { "find" } + if name == "filter_map" { "filter(..)" } else { "find(..)" } }, _ => return, } } else { return; }; - span_lint_and_sugg( + span_lint( cx, if name == "filter_map" { UNNECESSARY_FILTER_MAP @@ -76,10 +76,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>, a UNNECESSARY_FIND_MAP }, expr.span, - format!("this `.{name}` can be written more simply"), - "try instead", - sugg.to_string(), - Applicability::MaybeIncorrect, + format!("this `.{name}(..)` can be written more simply using `.{sugg}`"), ); } } @@ -97,7 +94,7 @@ fn check_expression<'tcx>(cx: &LateContext<'tcx>, arg_id: hir::HirId, expr: &'tc (true, true) }, hir::ExprKind::MethodCall(segment, recv, [arg], _) => { - if segment.ident.name.as_str() == "then_some" + if segment.ident.name == sym::then_some && cx.typeck_results().expr_ty(recv).is_bool() && path_to_local_id(arg, arg_id) { diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs index c0e0156858811..20cf35363d13f 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs @@ -99,7 +99,7 @@ pub fn check_for_loop_iter( && let Some(into_iterator_trait_id) = cx.tcx.get_diagnostic_item(sym::IntoIterator) && let collection_ty = cx.typeck_results().expr_ty(collection) && implements_trait(cx, collection_ty, into_iterator_trait_id, &[]) - && let Some(into_iter_item_ty) = cx.get_associated_type(collection_ty, into_iterator_trait_id, "Item") + && let Some(into_iter_item_ty) = cx.get_associated_type(collection_ty, into_iterator_trait_id, sym::Item) && iter_item_ty == into_iter_item_ty && let Some(collection_snippet) = collection.span.get_source_text(cx) { diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_lazy_eval.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_lazy_eval.rs index 9f4080100da20..71e606add526e 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_lazy_eval.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_lazy_eval.rs @@ -23,56 +23,61 @@ pub(super) fn check<'tcx>( let is_result = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv), sym::Result); let is_bool = cx.typeck_results().expr_ty(recv).is_bool(); - if is_option || is_result || is_bool { - if let hir::ExprKind::Closure(&hir::Closure { body, fn_decl, .. }) = arg.kind { - let body = cx.tcx.hir_body(body); - let body_expr = &body.value; + if (is_option || is_result || is_bool) + && let hir::ExprKind::Closure(&hir::Closure { + body, + fn_decl, + kind: hir::ClosureKind::Closure, + .. + }) = arg.kind + { + let body = cx.tcx.hir_body(body); + let body_expr = &body.value; - if usage::BindingUsageFinder::are_params_used(cx, body) || is_from_proc_macro(cx, expr) { - return false; - } + if usage::BindingUsageFinder::are_params_used(cx, body) || is_from_proc_macro(cx, expr) { + return false; + } - if eager_or_lazy::switch_to_eager_eval(cx, body_expr) { - let msg = if is_option { - "unnecessary closure used to substitute value for `Option::None`" - } else if is_result { - "unnecessary closure used to substitute value for `Result::Err`" - } else { - "unnecessary closure used with `bool::then`" - }; - let applicability = if body - .params - .iter() - // bindings are checked to be unused above - .all(|param| matches!(param.pat.kind, hir::PatKind::Binding(..) | hir::PatKind::Wild)) - && matches!( - fn_decl.output, - FnRetTy::DefaultReturn(_) - | FnRetTy::Return(hir::Ty { - kind: hir::TyKind::Infer(()), - .. - }) - ) { - Applicability::MachineApplicable - } else { - // replacing the lambda may break type inference - Applicability::MaybeIncorrect - }; + if eager_or_lazy::switch_to_eager_eval(cx, body_expr) { + let msg = if is_option { + "unnecessary closure used to substitute value for `Option::None`" + } else if is_result { + "unnecessary closure used to substitute value for `Result::Err`" + } else { + "unnecessary closure used with `bool::then`" + }; + let applicability = if body + .params + .iter() + // bindings are checked to be unused above + .all(|param| matches!(param.pat.kind, hir::PatKind::Binding(..) | hir::PatKind::Wild)) + && matches!( + fn_decl.output, + FnRetTy::DefaultReturn(_) + | FnRetTy::Return(hir::Ty { + kind: hir::TyKind::Infer(()), + .. + }) + ) { + Applicability::MachineApplicable + } else { + // replacing the lambda may break type inference + Applicability::MaybeIncorrect + }; - // This is a duplicate of what's happening in clippy_lints::methods::method_call, - // which isn't ideal, We want to get the method call span, - // but prefer to avoid changing the signature of the function itself. - if let hir::ExprKind::MethodCall(.., span) = expr.kind { - span_lint_and_then(cx, UNNECESSARY_LAZY_EVALUATIONS, expr.span, msg, |diag| { - diag.span_suggestion_verbose( - span, - format!("use `{simplify_using}` instead"), - format!("{simplify_using}({})", snippet(cx, body_expr.span, "..")), - applicability, - ); - }); - return true; - } + // This is a duplicate of what's happening in clippy_lints::methods::method_call, + // which isn't ideal, We want to get the method call span, + // but prefer to avoid changing the signature of the function itself. + if let hir::ExprKind::MethodCall(.., span) = expr.kind { + span_lint_and_then(cx, UNNECESSARY_LAZY_EVALUATIONS, expr.span, msg, |diag| { + diag.span_suggestion_verbose( + span, + format!("use `{simplify_using}` instead"), + format!("{simplify_using}({})", snippet(cx, body_expr.span, "..")), + applicability, + ); + }); + return true; } } } diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_map_or.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_map_or.rs index d7bd522ddab94..b90748dd1585f 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_map_or.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_map_or.rs @@ -76,7 +76,7 @@ pub(super) fn check<'a>( && ((BinOpKind::Eq == op.node && !def_bool) || (BinOpKind::Ne == op.node && def_bool)) && let non_binding_location = if path_to_local_id(l, hir_id) { r } else { l } && switch_to_eager_eval(cx, non_binding_location) - // xor, because if its both then thats a strange edge case and + // xor, because if its both then that's a strange edge case and // we can just ignore it, since by default clippy will error on this && (path_to_local_id(l, hir_id) ^ path_to_local_id(r, hir_id)) && !is_local_used(cx, non_binding_location, hir_id) @@ -92,7 +92,7 @@ pub(super) fn check<'a>( // we may need to add parens around the suggestion // in case the parent expression has additional method calls, // since for example `Some(5).map_or(false, |x| x == 5).then(|| 1)` - // being converted to `Some(5) == Some(5).then(|| 1)` isnt + // being converted to `Some(5) == Some(5).then(|| 1)` isn't // the same thing let inner_non_binding = Sugg::NonParen(Cow::Owned(format!( @@ -109,8 +109,8 @@ pub(super) fn check<'a>( let sugg = if let Some(parent_expr) = get_parent_expr(cx, expr) { match parent_expr.kind { - ExprKind::Binary(..) | ExprKind::Unary(..) | ExprKind::Cast(..) => binop.maybe_par(), - ExprKind::MethodCall(_, receiver, _, _) if receiver.hir_id == expr.hir_id => binop.maybe_par(), + ExprKind::Binary(..) | ExprKind::Unary(..) | ExprKind::Cast(..) => binop.maybe_paren(), + ExprKind::MethodCall(_, receiver, _, _) if receiver.hir_id == expr.hir_id => binop.maybe_paren(), _ => binop, } } else { diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs index 62ba3012643ce..87bb8d46a1d6a 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs @@ -7,7 +7,7 @@ use clippy_utils::ty::{get_iterator_item_ty, implements_trait, is_copy, is_type_ use clippy_utils::visitors::find_all_ret_expressions; use clippy_utils::{ fn_def_id, get_parent_expr, is_diag_item_method, is_diag_trait_item, is_expr_temporary_value, peel_middle_ty_refs, - return_ty, + return_ty, sym, }; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; @@ -20,7 +20,7 @@ use rustc_middle::ty::adjustment::{Adjust, Adjustment, OverloadedDeref}; use rustc_middle::ty::{ self, ClauseKind, GenericArg, GenericArgKind, GenericArgsRef, ParamTy, ProjectionPredicate, TraitPredicate, Ty, }; -use rustc_span::{Symbol, sym}; +use rustc_span::Symbol; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _; use rustc_trait_selection::traits::{Obligation, ObligationCause}; @@ -153,7 +153,7 @@ fn check_addr_of_expr( } if let Some(deref_trait_id) = cx.tcx.get_diagnostic_item(sym::Deref) && implements_trait(cx, receiver_ty, deref_trait_id, &[]) - && cx.get_associated_type(receiver_ty, deref_trait_id, "Target") == Some(target_ty) + && cx.get_associated_type(receiver_ty, deref_trait_id, sym::Target) == Some(target_ty) // Make sure that it's actually calling the right `.to_string()`, (#10033) // *or* this is a `Cow::into_owned()` call (which would be the wrong into_owned receiver (str != Cow) // but that's ok for Cow::into_owned specifically) @@ -312,8 +312,7 @@ fn check_string_from_utf8<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, /// call of a `to_owned`-like function is unnecessary. fn check_split_call_arg(cx: &LateContext<'_>, expr: &Expr<'_>, method_name: Symbol, receiver: &Expr<'_>) -> bool { if let Some(parent) = get_parent_expr(cx, expr) - && let Some((fn_name, argument_expr)) = get_fn_name_and_arg(cx, parent) - && fn_name.as_str() == "split" + && let Some((sym::split, argument_expr)) = get_fn_name_and_arg(cx, parent) && let Some(receiver_snippet) = receiver.span.get_source_text(cx) && let Some(arg_snippet) = argument_expr.span.get_source_text(cx) { @@ -322,7 +321,7 @@ fn check_split_call_arg(cx: &LateContext<'_>, expr: &Expr<'_>, method_name: Symb // add `.as_ref()` to the suggestion. let as_ref = if is_type_lang_item(cx, cx.typeck_results().expr_ty(expr), LangItem::String) && let Some(deref_trait_id) = cx.tcx.get_diagnostic_item(sym::Deref) - && cx.get_associated_type(cx.typeck_results().expr_ty(receiver), deref_trait_id, "Target") + && cx.get_associated_type(cx.typeck_results().expr_ty(receiver), deref_trait_id, sym::Target) != Some(cx.tcx.types.str_) { ".as_ref()" @@ -614,8 +613,7 @@ fn has_lifetime(ty: Ty<'_>) -> bool { /// Returns true if the named method is `Iterator::cloned` or `Iterator::copied`. fn is_cloned_or_copied(cx: &LateContext<'_>, method_name: Symbol, method_def_id: DefId) -> bool { - (method_name.as_str() == "cloned" || method_name.as_str() == "copied") - && is_diag_trait_item(cx, method_def_id, sym::Iterator) + matches!(method_name, sym::cloned | sym::copied) && is_diag_trait_item(cx, method_def_id, sym::Iterator) } /// Returns true if the named method can be used to convert the receiver to its "owned" @@ -628,7 +626,7 @@ fn is_to_owned_like<'a>(cx: &LateContext<'a>, call_expr: &Expr<'a>, method_name: /// Returns true if the named method is `Cow::into_owned`. fn is_cow_into_owned(cx: &LateContext<'_>, method_name: Symbol, method_def_id: DefId) -> bool { - method_name.as_str() == "into_owned" && is_diag_item_method(cx, method_def_id, sym::Cow) + method_name == sym::into_owned && is_diag_item_method(cx, method_def_id, sym::Cow) } /// Returns true if the named method is `ToString::to_string` and it's called on a type that @@ -648,7 +646,7 @@ fn is_to_string_on_string_like<'a>( && let GenericArgKind::Type(ty) = generic_arg.unpack() && let Some(deref_trait_id) = cx.tcx.get_diagnostic_item(sym::Deref) && let Some(as_ref_trait_id) = cx.tcx.get_diagnostic_item(sym::AsRef) - && (cx.get_associated_type(ty, deref_trait_id, "Target") == Some(cx.tcx.types.str_) + && (cx.get_associated_type(ty, deref_trait_id, sym::Target) == Some(cx.tcx.types.str_) || implements_trait(cx, ty, as_ref_trait_id, &[cx.tcx.types.str_.into()])) { true diff --git a/src/tools/clippy/clippy_lints/src/methods/utils.rs b/src/tools/clippy/clippy_lints/src/methods/utils.rs index 3611b341897a6..b0cc7a785bc31 100644 --- a/src/tools/clippy/clippy_lints/src/methods/utils.rs +++ b/src/tools/clippy/clippy_lints/src/methods/utils.rs @@ -8,6 +8,9 @@ use rustc_middle::ty::{self, Ty}; use rustc_span::Span; use rustc_span::symbol::sym; +/// Checks if `expr`, of type `ty`, corresponds to a slice or can be dereferenced to a slice, or if +/// `expr` is a method call to `.iter()` on such a type. In these cases, return the slice-like +/// expression. pub(super) fn derefs_to_slice<'tcx>( cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, diff --git a/src/tools/clippy/clippy_lints/src/minmax.rs b/src/tools/clippy/clippy_lints/src/minmax.rs index ed89b3b34386f..64eafc0ebccdc 100644 --- a/src/tools/clippy/clippy_lints/src/minmax.rs +++ b/src/tools/clippy/clippy_lints/src/minmax.rs @@ -1,10 +1,9 @@ use clippy_utils::consts::{ConstEvalCtxt, Constant}; use clippy_utils::diagnostics::span_lint; -use clippy_utils::is_trait_method; +use clippy_utils::{is_trait_method, sym}; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; -use rustc_span::sym; use std::cmp::Ordering::{Equal, Greater, Less}; declare_clippy_lint! { @@ -79,12 +78,10 @@ fn min_max<'a, 'tcx>(cx: &LateContext<'tcx>, expr: &'a Expr<'a>) -> Option<(MinM }, ExprKind::MethodCall(path, receiver, args @ [_], _) => { if cx.typeck_results().expr_ty(receiver).is_floating_point() || is_trait_method(cx, expr, sym::Ord) { - if path.ident.name.as_str() == "max" { - fetch_const(cx, Some(receiver), args, MinMax::Max) - } else if path.ident.name.as_str() == "min" { - fetch_const(cx, Some(receiver), args, MinMax::Min) - } else { - None + match path.ident.name { + sym::max => fetch_const(cx, Some(receiver), args, MinMax::Max), + sym::min => fetch_const(cx, Some(receiver), args, MinMax::Min), + _ => None, } } else { None diff --git a/src/tools/clippy/clippy_lints/src/misc.rs b/src/tools/clippy/clippy_lints/src/misc.rs index 693d1a8dd7643..09ee6f7037c64 100644 --- a/src/tools/clippy/clippy_lints/src/misc.rs +++ b/src/tools/clippy/clippy_lints/src/misc.rs @@ -329,7 +329,7 @@ fn used_underscore_binding<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { let name = ident.name.as_str(); if name.starts_with('_') && !name.starts_with("__") - && let definition_span = cx.tcx.hir().span(definition_hir_id) + && let definition_span = cx.tcx.hir_span(definition_hir_id) && !definition_span.from_expansion() && !fulfill_or_allowed(cx, USED_UNDERSCORE_BINDING, [expr.hir_id, definition_hir_id]) { diff --git a/src/tools/clippy/clippy_lints/src/misc_early/builtin_type_shadow.rs b/src/tools/clippy/clippy_lints/src/misc_early/builtin_type_shadow.rs index 662f7cd8500cf..9ee1e2f3fd17a 100644 --- a/src/tools/clippy/clippy_lints/src/misc_early/builtin_type_shadow.rs +++ b/src/tools/clippy/clippy_lints/src/misc_early/builtin_type_shadow.rs @@ -6,14 +6,14 @@ use rustc_lint::EarlyContext; use super::BUILTIN_TYPE_SHADOW; pub(super) fn check(cx: &EarlyContext<'_>, param: &GenericParam) { - if let GenericParamKind::Type { .. } = param.kind { - if let Some(prim_ty) = PrimTy::from_name(param.ident.name) { - span_lint( - cx, - BUILTIN_TYPE_SHADOW, - param.ident.span, - format!("this generic shadows the built-in type `{}`", prim_ty.name()), - ); - } + if let GenericParamKind::Type { .. } = param.kind + && let Some(prim_ty) = PrimTy::from_name(param.ident.name) + { + span_lint( + cx, + BUILTIN_TYPE_SHADOW, + param.ident.span, + format!("this generic shadows the built-in type `{}`", prim_ty.name()), + ); } } diff --git a/src/tools/clippy/clippy_lints/src/misc_early/redundant_pattern.rs b/src/tools/clippy/clippy_lints/src/misc_early/redundant_pattern.rs index d5b5b2bf2dd1b..3cb51671aaf18 100644 --- a/src/tools/clippy/clippy_lints/src/misc_early/redundant_pattern.rs +++ b/src/tools/clippy/clippy_lints/src/misc_early/redundant_pattern.rs @@ -6,20 +6,20 @@ use rustc_lint::EarlyContext; use super::REDUNDANT_PATTERN; pub(super) fn check(cx: &EarlyContext<'_>, pat: &Pat) { - if let PatKind::Ident(ann, ident, Some(ref right)) = pat.kind { - if let PatKind::Wild = right.kind { - span_lint_and_sugg( - cx, - REDUNDANT_PATTERN, - pat.span, - format!( - "the `{} @ _` pattern can be written as just `{}`", - ident.name, ident.name, - ), - "try", - format!("{}{}", ann.prefix_str(), ident.name), - Applicability::MachineApplicable, - ); - } + if let PatKind::Ident(ann, ident, Some(ref right)) = pat.kind + && let PatKind::Wild = right.kind + { + span_lint_and_sugg( + cx, + REDUNDANT_PATTERN, + pat.span, + format!( + "the `{} @ _` pattern can be written as just `{}`", + ident.name, ident.name, + ), + "try", + format!("{}{}", ann.prefix_str(), ident.name), + Applicability::MachineApplicable, + ); } } diff --git a/src/tools/clippy/clippy_lints/src/misc_early/unneeded_wildcard_pattern.rs b/src/tools/clippy/clippy_lints/src/misc_early/unneeded_wildcard_pattern.rs index 00f46629f102c..fffaf40c9d141 100644 --- a/src/tools/clippy/clippy_lints/src/misc_early/unneeded_wildcard_pattern.rs +++ b/src/tools/clippy/clippy_lints/src/misc_early/unneeded_wildcard_pattern.rs @@ -7,30 +7,30 @@ use rustc_span::Span; use super::UNNEEDED_WILDCARD_PATTERN; pub(super) fn check(cx: &EarlyContext<'_>, pat: &Pat) { - if let PatKind::TupleStruct(_, _, ref patterns) | PatKind::Tuple(ref patterns) = pat.kind { - if let Some(rest_index) = patterns.iter().position(|pat| pat.is_rest()) { - if let Some((left_index, left_pat)) = patterns[..rest_index] - .iter() - .rev() - .take_while(|pat| matches!(pat.kind, PatKind::Wild)) - .enumerate() - .last() - { - span_lint(cx, left_pat.span.until(patterns[rest_index].span), left_index == 0); - } + if let PatKind::TupleStruct(_, _, ref patterns) | PatKind::Tuple(ref patterns) = pat.kind + && let Some(rest_index) = patterns.iter().position(|pat| pat.is_rest()) + { + if let Some((left_index, left_pat)) = patterns[..rest_index] + .iter() + .rev() + .take_while(|pat| matches!(pat.kind, PatKind::Wild)) + .enumerate() + .last() + { + span_lint(cx, left_pat.span.until(patterns[rest_index].span), left_index == 0); + } - if let Some((right_index, right_pat)) = patterns[rest_index + 1..] - .iter() - .take_while(|pat| matches!(pat.kind, PatKind::Wild)) - .enumerate() - .last() - { - span_lint( - cx, - patterns[rest_index].span.shrink_to_hi().to(right_pat.span), - right_index == 0, - ); - } + if let Some((right_index, right_pat)) = patterns[rest_index + 1..] + .iter() + .take_while(|pat| matches!(pat.kind, PatKind::Wild)) + .enumerate() + .last() + { + span_lint( + cx, + patterns[rest_index].span.shrink_to_hi().to(right_pat.span), + right_index == 0, + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs b/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs index d52fe7e7d5b9c..394bc4aef1cc7 100644 --- a/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs +++ b/src/tools/clippy/clippy_lints/src/mismatching_type_param_order.rs @@ -111,10 +111,10 @@ impl<'tcx> LateLintPass<'tcx> for TypeParamMismatch { // Checks if impl_param_name is the same as one of type_param_names, // and is in a different position fn mismatch_param_name(i: usize, impl_param_name: &String, type_param_names: &FxHashMap<&String, usize>) -> bool { - if let Some(j) = type_param_names.get(impl_param_name) { - if i != *j { - return true; - } + if let Some(j) = type_param_names.get(impl_param_name) + && i != *j + { + return true; } false } diff --git a/src/tools/clippy/clippy_lints/src/missing_asserts_for_indexing.rs b/src/tools/clippy/clippy_lints/src/missing_asserts_for_indexing.rs index d78299fe08be8..c8e3462b24ef4 100644 --- a/src/tools/clippy/clippy_lints/src/missing_asserts_for_indexing.rs +++ b/src/tools/clippy/clippy_lints/src/missing_asserts_for_indexing.rs @@ -3,14 +3,15 @@ use std::ops::ControlFlow; use clippy_utils::comparisons::{Rel, normalize_comparison}; use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::macros::{find_assert_eq_args, first_node_macro_backtrace}; use clippy_utils::source::snippet; use clippy_utils::visitors::for_each_expr_without_closures; use clippy_utils::{eq_expr_value, hash_expr, higher}; -use rustc_ast::{LitKind, RangeLimits}; +use rustc_ast::{BinOpKind, LitKind, RangeLimits}; use rustc_data_structures::packed::Pu128; use rustc_data_structures::unhash::UnindexMap; use rustc_errors::{Applicability, Diag}; -use rustc_hir::{BinOp, Block, Body, Expr, ExprKind, UnOp}; +use rustc_hir::{Block, Body, Expr, ExprKind, UnOp}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; use rustc_span::source_map::Spanned; @@ -97,7 +98,7 @@ enum LengthComparison { /// /// E.g. for `v.len() > 5` this returns `Some((LengthComparison::IntLessThanLength, 5, v.len()))` fn len_comparison<'hir>( - bin_op: BinOp, + bin_op: BinOpKind, left: &'hir Expr<'hir>, right: &'hir Expr<'hir>, ) -> Option<(LengthComparison, usize, &'hir Expr<'hir>)> { @@ -112,7 +113,7 @@ fn len_comparison<'hir>( // normalize comparison, `v.len() > 4` becomes `4 < v.len()` // this simplifies the logic a bit - let (op, left, right) = normalize_comparison(bin_op.node, left, right)?; + let (op, left, right) = normalize_comparison(bin_op, left, right)?; match (op, left.kind, right.kind) { (Rel::Lt, int_lit_pat!(left), _) => Some((LengthComparison::IntLessThanLength, left as usize, right)), (Rel::Lt, _, int_lit_pat!(right)) => Some((LengthComparison::LengthLessThanInt, right as usize, left)), @@ -134,18 +135,30 @@ fn assert_len_expr<'hir>( cx: &LateContext<'_>, expr: &'hir Expr<'hir>, ) -> Option<(LengthComparison, usize, &'hir Expr<'hir>)> { - if let Some(higher::If { cond, then, .. }) = higher::If::hir(expr) + let (cmp, asserted_len, slice_len) = if let Some(higher::If { cond, then, .. }) = higher::If::hir(expr) && let ExprKind::Unary(UnOp::Not, condition) = &cond.kind && let ExprKind::Binary(bin_op, left, right) = &condition.kind - - && let Some((cmp, asserted_len, slice_len)) = len_comparison(*bin_op, left, right) - && let ExprKind::MethodCall(method, recv, [], _) = &slice_len.kind - && cx.typeck_results().expr_ty_adjusted(recv).peel_refs().is_slice() - && method.ident.name == sym::len - // check if `then` block has a never type expression && let ExprKind::Block(Block { expr: Some(then_expr), .. }, _) = then.kind && cx.typeck_results().expr_ty(then_expr).is_never() + { + len_comparison(bin_op.node, left, right)? + } else if let Some((macro_call, bin_op)) = first_node_macro_backtrace(cx, expr).find_map(|macro_call| { + match cx.tcx.get_diagnostic_name(macro_call.def_id) { + Some(sym::assert_eq_macro) => Some((macro_call, BinOpKind::Eq)), + Some(sym::assert_ne_macro) => Some((macro_call, BinOpKind::Ne)), + _ => None, + } + }) && let Some((left, right, _)) = find_assert_eq_args(cx, expr, macro_call.expn) + { + len_comparison(bin_op, left, right)? + } else { + return None; + }; + + if let ExprKind::MethodCall(method, recv, [], _) = &slice_len.kind + && cx.typeck_results().expr_ty_adjusted(recv).peel_refs().is_slice() + && method.ident.name == sym::len { Some((cmp, asserted_len, recv)) } else { @@ -168,6 +181,7 @@ enum IndexEntry<'hir> { /// if the `assert!` asserts the right length. AssertWithIndex { highest_index: usize, + is_first_highest: bool, asserted_len: usize, assert_span: Span, slice: &'hir Expr<'hir>, @@ -177,6 +191,7 @@ enum IndexEntry<'hir> { /// Indexing without an `assert!` IndexWithoutAssert { highest_index: usize, + is_first_highest: bool, indexes: Vec, slice: &'hir Expr<'hir>, }, @@ -244,28 +259,41 @@ fn check_index<'hir>(cx: &LateContext<'_>, expr: &'hir Expr<'hir>, map: &mut Uni assert_span, slice, } => { - *entry = IndexEntry::AssertWithIndex { - highest_index: index, - asserted_len: *asserted_len, - assert_span: *assert_span, - slice, - indexes: vec![expr.span], - comparison: *comparison, - }; + if slice.span.lo() > assert_span.lo() { + *entry = IndexEntry::AssertWithIndex { + highest_index: index, + is_first_highest: true, + asserted_len: *asserted_len, + assert_span: *assert_span, + slice, + indexes: vec![expr.span], + comparison: *comparison, + }; + } }, IndexEntry::IndexWithoutAssert { - highest_index, indexes, .. + highest_index, + indexes, + is_first_highest, + .. } | IndexEntry::AssertWithIndex { - highest_index, indexes, .. + highest_index, + indexes, + is_first_highest, + .. } => { indexes.push(expr.span); + if *is_first_highest { + (*is_first_highest) = *highest_index >= index; + } *highest_index = (*highest_index).max(index); }, } } else { indexes.push(IndexEntry::IndexWithoutAssert { highest_index: index, + is_first_highest: true, indexes: vec![expr.span], slice, }); @@ -284,15 +312,18 @@ fn check_assert<'hir>(cx: &LateContext<'_>, expr: &'hir Expr<'hir>, map: &mut Un if let Some(entry) = entry { if let IndexEntry::IndexWithoutAssert { highest_index, + is_first_highest, indexes, slice, } = entry + && expr.span.lo() <= slice.span.lo() { *entry = IndexEntry::AssertWithIndex { highest_index: *highest_index, indexes: mem::take(indexes), + is_first_highest: *is_first_highest, slice, - assert_span: expr.span, + assert_span: expr.span.source_callsite(), comparison, asserted_len, }; @@ -301,7 +332,7 @@ fn check_assert<'hir>(cx: &LateContext<'_>, expr: &'hir Expr<'hir>, map: &mut Un indexes.push(IndexEntry::StrayAssert { asserted_len, comparison, - assert_span: expr.span, + assert_span: expr.span.source_callsite(), slice, }); } @@ -325,12 +356,13 @@ fn report_indexes(cx: &LateContext<'_>, map: &UnindexMap match *entry { IndexEntry::AssertWithIndex { highest_index, + is_first_highest, asserted_len, ref indexes, comparison, assert_span, slice, - } if indexes.len() > 1 => { + } if indexes.len() > 1 && !is_first_highest => { // if we have found an `assert!`, let's also check that it's actually right // and if it covers the highest index and if not, suggest the correct length let sugg = match comparison { @@ -378,8 +410,9 @@ fn report_indexes(cx: &LateContext<'_>, map: &UnindexMap IndexEntry::IndexWithoutAssert { ref indexes, highest_index, + is_first_highest, slice, - } if indexes.len() > 1 => { + } if indexes.len() > 1 && !is_first_highest => { // if there was no `assert!` but more than one index, suggest // adding an `assert!` that covers the highest index report_lint( diff --git a/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs b/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs index 38a19dd2999bb..1f142bc3ba63c 100644 --- a/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs +++ b/src/tools/clippy/clippy_lints/src/missing_const_for_fn.rs @@ -139,12 +139,11 @@ impl<'tcx> LateLintPass<'tcx> for MissingConstForFn { // Const fns are not allowed as methods in a trait. { let parent = cx.tcx.hir_get_parent_item(hir_id).def_id; - if parent != CRATE_DEF_ID { - if let hir::Node::Item(item) = cx.tcx.hir_node_by_def_id(parent) { - if let hir::ItemKind::Trait(..) = &item.kind { - return; - } - } + if parent != CRATE_DEF_ID + && let hir::Node::Item(item) = cx.tcx.hir_node_by_def_id(parent) + && let hir::ItemKind::Trait(..) = &item.kind + { + return; } } @@ -156,9 +155,9 @@ impl<'tcx> LateLintPass<'tcx> for MissingConstForFn { return; } - let mir = cx.tcx.optimized_mir(def_id); + let mir = cx.tcx.mir_drops_elaborated_and_const_checked(def_id); - if let Ok(()) = is_min_const_fn(cx, mir, self.msrv) + if let Ok(()) = is_min_const_fn(cx, &mir.borrow(), self.msrv) && let hir::Node::Item(hir::Item { vis_span, .. }) | hir::Node::ImplItem(hir::ImplItem { vis_span, .. }) = cx.tcx.hir_node_by_def_id(def_id) { @@ -198,7 +197,7 @@ fn fn_inputs_has_impl_trait_ty(cx: &LateContext<'_>, def_id: LocalDefId) -> bool inputs.iter().any(|input| { matches!( input.kind(), - ty::Alias(ty::AliasTyKind::Weak, alias_ty) if cx.tcx.type_of(alias_ty.def_id).skip_binder().is_impl_trait() + ty::Alias(ty::AliasTyKind::Free, alias_ty) if cx.tcx.type_of(alias_ty.def_id).skip_binder().is_impl_trait() ) }) } diff --git a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs index 28dc242742842..be7dd74fd62b9 100644 --- a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs +++ b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs @@ -1,9 +1,9 @@ use std::ops::ControlFlow; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::is_path_lang_item; use clippy_utils::ty::is_type_diagnostic_item; use clippy_utils::visitors::{Visitable, for_each_expr}; +use clippy_utils::{is_path_lang_item, sym}; use rustc_ast::LitKind; use rustc_data_structures::fx::FxHashSet; use rustc_hir::def::{DefKind, Res}; @@ -13,7 +13,7 @@ use rustc_hir::{ use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{Ty, TypeckResults}; use rustc_session::declare_lint_pass; -use rustc_span::{Span, Symbol, sym}; +use rustc_span::{Span, Symbol}; declare_clippy_lint! { /// ### What it does @@ -116,7 +116,7 @@ fn should_lint<'tcx>( if path.ident.name == sym::debug_struct && is_type_diagnostic_item(cx, recv_ty, sym::Formatter) { has_debug_struct = true; - } else if path.ident.name.as_str() == "finish_non_exhaustive" + } else if path.ident.name == sym::finish_non_exhaustive && is_type_diagnostic_item(cx, recv_ty, sym::DebugStruct) { has_finish_non_exhaustive = true; @@ -209,7 +209,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingFieldsInDebug { && let Res::Def(DefKind::Struct | DefKind::Enum | DefKind::Union, self_path_did) = self_path.res && cx.tcx.is_diagnostic_item(sym::Debug, trait_def_id) // don't trigger if this impl was derived - && !cx.tcx.has_attr(item.owner_id, sym::automatically_derived) + && !cx.tcx.is_automatically_derived(item.owner_id.to_def_id()) && !item.span.from_expansion() // find `Debug::fmt` function && let Some(fmt_item) = items.iter().find(|i| i.ident.name == sym::fmt) @@ -224,11 +224,10 @@ impl<'tcx> LateLintPass<'tcx> for MissingFieldsInDebug { // NB: can't call cx.typeck_results() as we are not in a body && let typeck_results = cx.tcx.typeck_body(*body_id) && should_lint(cx, typeck_results, block) - { // we intentionally only lint structs, see lint description - if let ItemKind::Struct(_, data, _) = &self_item.kind { - check_struct(cx, typeck_results, block, self_ty, item, data); - } + && let ItemKind::Struct(_, data, _) = &self_item.kind + { + check_struct(cx, typeck_results, block, self_ty, item, data); } } } diff --git a/src/tools/clippy/clippy_lints/src/missing_inline.rs b/src/tools/clippy/clippy_lints/src/missing_inline.rs index f49e03ea76528..1f613171b46e8 100644 --- a/src/tools/clippy/clippy_lints/src/missing_inline.rs +++ b/src/tools/clippy/clippy_lints/src/missing_inline.rs @@ -160,12 +160,13 @@ impl<'tcx> LateLintPass<'tcx> for MissingInline { AssocItemContainer::Impl => cx.tcx.impl_trait_ref(container_id).map(|t| t.skip_binder().def_id), }; - if let Some(trait_def_id) = trait_def_id { - if trait_def_id.is_local() && !cx.effective_visibilities.is_exported(impl_item.owner_id.def_id) { - // If a trait is being implemented for an item, and the - // trait is not exported, we don't need #[inline] - return; - } + if let Some(trait_def_id) = trait_def_id + && trait_def_id.is_local() + && !cx.effective_visibilities.is_exported(impl_item.owner_id.def_id) + { + // If a trait is being implemented for an item, and the + // trait is not exported, we don't need #[inline] + return; } let attrs = cx.tcx.hir_attrs(impl_item.hir_id()); diff --git a/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs b/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs index 7ee746365d102..e266c36b6e734 100644 --- a/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs +++ b/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs @@ -81,7 +81,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingTraitMethods { cx, MISSING_TRAIT_METHODS, cx.tcx.def_span(item.owner_id), - format!("missing trait method provided by default: `{}`", assoc.name), + format!("missing trait method provided by default: `{}`", assoc.name()), |diag| { diag.span_help(cx.tcx.def_span(assoc.def_id), "implement the method"); }, diff --git a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs index be728e6c8b74b..d9f4fb271fb4b 100644 --- a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs +++ b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs @@ -1,6 +1,6 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_then}; use clippy_utils::macros::root_macro_call_first_node; -use clippy_utils::{get_parent_expr, path_to_local, path_to_local_id}; +use clippy_utils::{get_parent_expr, path_to_local, path_to_local_id, sym}; use rustc_hir::intravisit::{Visitor, walk_expr}; use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, LetStmt, Node, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; @@ -135,10 +135,10 @@ impl<'tcx> DivergenceVisitor<'_, 'tcx> { } fn report_diverging_sub_expr(&mut self, e: &Expr<'_>) { - if let Some(macro_call) = root_macro_call_first_node(self.cx, e) { - if self.cx.tcx.item_name(macro_call.def_id).as_str() == "todo" { - return; - } + if let Some(macro_call) = root_macro_call_first_node(self.cx, e) + && self.cx.tcx.is_diagnostic_item(sym::todo_macro, macro_call.def_id) + { + return; } span_lint(self.cx, DIVERGING_SUB_EXPRESSION, e.span, "sub-expression diverges"); } @@ -261,10 +261,11 @@ fn check_expr<'tcx>(vis: &mut ReadVisitor<'_, 'tcx>, expr: &'tcx Expr<'_>) -> St | ExprKind::Assign(..) | ExprKind::Index(..) | ExprKind::Repeat(_, _) - | ExprKind::Struct(_, _, _) => { + | ExprKind::Struct(_, _, _) + | ExprKind::AssignOp(_, _, _) => { walk_expr(vis, expr); }, - ExprKind::Binary(op, _, _) | ExprKind::AssignOp(op, _, _) => { + ExprKind::Binary(op, _, _) => { if op.node == BinOpKind::And || op.node == BinOpKind::Or { // x && y and x || y always evaluate x first, so these are // strictly sequenced. @@ -327,22 +328,22 @@ impl<'tcx> Visitor<'tcx> for ReadVisitor<'_, 'tcx> { return; } - if path_to_local_id(expr, self.var) { + if path_to_local_id(expr, self.var) // Check that this is a read, not a write. - if !is_in_assignment_position(self.cx, expr) { - span_lint_and_then( - self.cx, - MIXED_READ_WRITE_IN_EXPRESSION, - expr.span, - format!("unsequenced read of `{}`", self.cx.tcx.hir_name(self.var)), - |diag| { - diag.span_note( - self.write_expr.span, - "whether read occurs before this write depends on evaluation order", - ); - }, - ); - } + && !is_in_assignment_position(self.cx, expr) + { + span_lint_and_then( + self.cx, + MIXED_READ_WRITE_IN_EXPRESSION, + expr.span, + format!("unsequenced read of `{}`", self.cx.tcx.hir_name(self.var)), + |diag| { + diag.span_note( + self.write_expr.span, + "whether read occurs before this write depends on evaluation order", + ); + }, + ); } match expr.kind { // We're about to descend a closure. Since we don't know when (or @@ -372,10 +373,10 @@ impl<'tcx> Visitor<'tcx> for ReadVisitor<'_, 'tcx> { /// Returns `true` if `expr` is the LHS of an assignment, like `expr = ...`. fn is_in_assignment_position(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { - if let Some(parent) = get_parent_expr(cx, expr) { - if let ExprKind::Assign(lhs, ..) = parent.kind { - return lhs.hir_id == expr.hir_id; - } + if let Some(parent) = get_parent_expr(cx, expr) + && let ExprKind::Assign(lhs, ..) = parent.kind + { + return lhs.hir_id == expr.hir_id; } false } diff --git a/src/tools/clippy/clippy_lints/src/module_style.rs b/src/tools/clippy/clippy_lints/src/module_style.rs index 676d608eb318c..98614baffcea6 100644 --- a/src/tools/clippy/clippy_lints/src/module_style.rs +++ b/src/tools/clippy/clippy_lints/src/module_style.rs @@ -73,8 +73,8 @@ impl_lint_pass!(ModStyle => [MOD_MODULE_FILES, SELF_NAMED_MODULE_FILES]); impl EarlyLintPass for ModStyle { fn check_crate(&mut self, cx: &EarlyContext<'_>, _: &ast::Crate) { - if cx.builder.lint_level(MOD_MODULE_FILES).0 == Level::Allow - && cx.builder.lint_level(SELF_NAMED_MODULE_FILES).0 == Level::Allow + if cx.builder.lint_level(MOD_MODULE_FILES).level == Level::Allow + && cx.builder.lint_level(SELF_NAMED_MODULE_FILES).level == Level::Allow { return; } @@ -119,22 +119,22 @@ impl EarlyLintPass for ModStyle { } for folder in &folder_segments { - if !mod_folders.contains(folder) { - if let Some((file, path)) = file_map.get(folder) { - span_lint_and_then( - cx, - SELF_NAMED_MODULE_FILES, - Span::new(file.start_pos, file.start_pos, SyntaxContext::root(), None), - format!("`mod.rs` files are required, found `{}`", path.display()), - |diag| { - let mut correct = path.to_path_buf(); - correct.pop(); - correct.push(folder); - correct.push("mod.rs"); - diag.help(format!("move `{}` to `{}`", path.display(), correct.display(),)); - }, - ); - } + if !mod_folders.contains(folder) + && let Some((file, path)) = file_map.get(folder) + { + span_lint_and_then( + cx, + SELF_NAMED_MODULE_FILES, + Span::new(file.start_pos, file.start_pos, SyntaxContext::root(), None), + format!("`mod.rs` files are required, found `{}`", path.display()), + |diag| { + let mut correct = path.to_path_buf(); + correct.pop(); + correct.push(folder); + correct.push("mod.rs"); + diag.help(format!("move `{}` to `{}`", path.display(), correct.display(),)); + }, + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/multiple_bound_locations.rs b/src/tools/clippy/clippy_lints/src/multiple_bound_locations.rs index 0e1980a6acb61..4b32ba83b325e 100644 --- a/src/tools/clippy/clippy_lints/src/multiple_bound_locations.rs +++ b/src/tools/clippy/clippy_lints/src/multiple_bound_locations.rs @@ -39,7 +39,7 @@ declare_lint_pass!(MultipleBoundLocations => [MULTIPLE_BOUND_LOCATIONS]); impl EarlyLintPass for MultipleBoundLocations { fn check_fn(&mut self, cx: &EarlyContext<'_>, kind: FnKind<'_>, _: Span, _: NodeId) { - if let FnKind::Fn(_, _, _, Fn { generics, .. }) = kind + if let FnKind::Fn(_, _, Fn { generics, .. }) = kind && !generics.params.is_empty() && !generics.where_clause.predicates.is_empty() { diff --git a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs index 2adc27c0b709a..c6c27e22b90e5 100644 --- a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs +++ b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs @@ -142,10 +142,9 @@ fn collect_unsafe_exprs<'tcx>( .typeck_results() .type_dependent_def_id(expr.hir_id) .map(|def_id| cx.tcx.fn_sig(def_id)) + && sig.skip_binder().safety().is_unsafe() { - if sig.skip_binder().safety().is_unsafe() { - unsafe_ops.push(("unsafe method call occurs here", expr.span)); - } + unsafe_ops.push(("unsafe method call occurs here", expr.span)); } }, diff --git a/src/tools/clippy/clippy_lints/src/mut_key.rs b/src/tools/clippy/clippy_lints/src/mut_key.rs index 7abc5870d00e0..a45031ce22b91 100644 --- a/src/tools/clippy/clippy_lints/src/mut_key.rs +++ b/src/tools/clippy/clippy_lints/src/mut_key.rs @@ -82,10 +82,10 @@ impl<'tcx> LateLintPass<'tcx> for MutableKeyType<'tcx> { } fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'tcx>) { - if let hir::ImplItemKind::Fn(ref sig, ..) = item.kind { - if trait_ref_of_method(cx, item.owner_id.def_id).is_none() { - self.check_sig(cx, item.owner_id.def_id, sig.decl); - } + if let hir::ImplItemKind::Fn(ref sig, ..) = item.kind + && trait_ref_of_method(cx, item.owner_id.def_id).is_none() + { + self.check_sig(cx, item.owner_id.def_id, sig.decl); } } diff --git a/src/tools/clippy/clippy_lints/src/mut_mut.rs b/src/tools/clippy/clippy_lints/src/mut_mut.rs index 3c4ba5141dd9b..d98c70e7f5a85 100644 --- a/src/tools/clippy/clippy_lints/src/mut_mut.rs +++ b/src/tools/clippy/clippy_lints/src/mut_mut.rs @@ -77,16 +77,16 @@ impl<'tcx> intravisit::Visitor<'tcx> for MutVisitor<'_, 'tcx> { expr.span, "generally you want to avoid `&mut &mut _` if possible", ); - } else if let ty::Ref(_, ty, hir::Mutability::Mut) = self.cx.typeck_results().expr_ty(e).kind() { - if ty.peel_refs().is_sized(self.cx.tcx, self.cx.typing_env()) { - span_lint_hir( - self.cx, - MUT_MUT, - expr.hir_id, - expr.span, - "this expression mutably borrows a mutable reference. Consider reborrowing", - ); - } + } else if let ty::Ref(_, ty, hir::Mutability::Mut) = self.cx.typeck_results().expr_ty(e).kind() + && ty.peel_refs().is_sized(self.cx.tcx, self.cx.typing_env()) + { + span_lint_hir( + self.cx, + MUT_MUT, + expr.hir_id, + expr.span, + "this expression mutably borrows a mutable reference. Consider reborrowing", + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs b/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs index 13a23a13b9c24..270eebe075804 100644 --- a/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs +++ b/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs @@ -101,14 +101,13 @@ impl<'tcx> Visitor<'tcx> for MutArgVisitor<'_, 'tcx> { return; }, ExprKind::Path(_) => { - if let Some(adj) = self.cx.typeck_results().adjustments().get(expr.hir_id) { - if adj + if let Some(adj) = self.cx.typeck_results().adjustments().get(expr.hir_id) + && adj .iter() .any(|a| matches!(a.target.kind(), ty::Ref(_, _, Mutability::Mut))) - { - self.found = true; - return; - } + { + self.found = true; + return; } }, // Don't check await desugars diff --git a/src/tools/clippy/clippy_lints/src/mutex_atomic.rs b/src/tools/clippy/clippy_lints/src/mutex_atomic.rs index 49fd29d1dd6dc..fe2157ca533a6 100644 --- a/src/tools/clippy/clippy_lints/src/mutex_atomic.rs +++ b/src/tools/clippy/clippy_lints/src/mutex_atomic.rs @@ -91,19 +91,19 @@ declare_lint_pass!(Mutex => [MUTEX_ATOMIC, MUTEX_INTEGER]); impl<'tcx> LateLintPass<'tcx> for Mutex { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { let ty = cx.typeck_results().expr_ty(expr); - if let ty::Adt(_, subst) = ty.kind() { - if is_type_diagnostic_item(cx, ty, sym::Mutex) { - let mutex_param = subst.type_at(0); - if let Some(atomic_name) = get_atomic_name(mutex_param) { - let msg = format!( - "consider using an `{atomic_name}` instead of a `Mutex` here; if you just want the locking \ + if let ty::Adt(_, subst) = ty.kind() + && is_type_diagnostic_item(cx, ty, sym::Mutex) + { + let mutex_param = subst.type_at(0); + if let Some(atomic_name) = get_atomic_name(mutex_param) { + let msg = format!( + "consider using an `{atomic_name}` instead of a `Mutex` here; if you just want the locking \ behavior and not the internal type, consider using `Mutex<()>`" - ); - match *mutex_param.kind() { - ty::Uint(t) if t != UintTy::Usize => span_lint(cx, MUTEX_INTEGER, expr.span, msg), - ty::Int(t) if t != IntTy::Isize => span_lint(cx, MUTEX_INTEGER, expr.span, msg), - _ => span_lint(cx, MUTEX_ATOMIC, expr.span, msg), - } + ); + match *mutex_param.kind() { + ty::Uint(t) if t != UintTy::Usize => span_lint(cx, MUTEX_INTEGER, expr.span, msg), + ty::Int(t) if t != IntTy::Isize => span_lint(cx, MUTEX_INTEGER, expr.span, msg), + _ => span_lint(cx, MUTEX_ATOMIC, expr.span, msg), } } } diff --git a/src/tools/clippy/clippy_lints/src/needless_bool.rs b/src/tools/clippy/clippy_lints/src/needless_bool.rs index 2eacd6875d6b4..f768e11a4a2bb 100644 --- a/src/tools/clippy/clippy_lints/src/needless_bool.rs +++ b/src/tools/clippy/clippy_lints/src/needless_bool.rs @@ -154,7 +154,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessBool { || is_receiver_of_method_call(cx, e) || is_as_argument(cx, e) { - snip = snip.maybe_par(); + snip = snip.maybe_paren(); } span_lint_and_sugg( @@ -426,10 +426,10 @@ fn fetch_bool_block(expr: &Expr<'_>) -> Option { } fn fetch_bool_expr(expr: &Expr<'_>) -> Option { - if let ExprKind::Lit(lit_ptr) = peel_blocks(expr).kind { - if let LitKind::Bool(value) = lit_ptr.node { - return Some(value); - } + if let ExprKind::Lit(lit_ptr) = peel_blocks(expr).kind + && let LitKind::Bool(value) = lit_ptr.node + { + return Some(value); } None } diff --git a/src/tools/clippy/clippy_lints/src/needless_borrows_for_generic_args.rs b/src/tools/clippy/clippy_lints/src/needless_borrows_for_generic_args.rs index f686cc912ddb0..e579dd5947d74 100644 --- a/src/tools/clippy/clippy_lints/src/needless_borrows_for_generic_args.rs +++ b/src/tools/clippy/clippy_lints/src/needless_borrows_for_generic_args.rs @@ -299,7 +299,7 @@ fn has_ref_mut_self_method(cx: &LateContext<'_>, trait_def_id: DefId) -> bool { .associated_items(trait_def_id) .in_definition_order() .any(|assoc_item| { - if assoc_item.fn_has_self_parameter { + if assoc_item.is_method() { let self_ty = cx .tcx .fn_sig(assoc_item.def_id) diff --git a/src/tools/clippy/clippy_lints/src/needless_for_each.rs b/src/tools/clippy/clippy_lints/src/needless_for_each.rs index 90b27f5dbac82..7dd96f1f037fd 100644 --- a/src/tools/clippy/clippy_lints/src/needless_for_each.rs +++ b/src/tools/clippy/clippy_lints/src/needless_for_each.rs @@ -3,12 +3,12 @@ use rustc_hir::intravisit::{Visitor, walk_expr}; use rustc_hir::{Block, BlockCheckMode, Closure, Expr, ExprKind, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; -use rustc_span::{Span, sym}; +use rustc_span::Span; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::is_trait_method; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::has_iter_method; +use clippy_utils::{is_trait_method, sym}; declare_clippy_lint! { /// ### What it does @@ -64,7 +64,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessForEach { iter_recv.kind, ExprKind::Array(..) | ExprKind::Call(..) | ExprKind::Path(..) ) - && method_name.ident.name.as_str() == "for_each" + && method_name.ident.name == sym::for_each && is_trait_method(cx, expr, sym::Iterator) // Checks the type of the `iter` method receiver is NOT a user defined type. && has_iter_method(cx, cx.typeck_results().expr_ty(iter_recv)).is_some() diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs index 576bb27b254c9..7052e1d0fbe5d 100644 --- a/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs +++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs @@ -86,11 +86,11 @@ fn should_skip<'tcx>( return false; } - if let PatKind::Binding(.., name, _) = arg.pat.kind { + if let PatKind::Binding(.., name, _) = arg.pat.kind // If it's a potentially unused variable, we don't check it. - if name.name == kw::Underscore || name.as_str().starts_with('_') { - return true; - } + && (name.name == kw::Underscore || name.as_str().starts_with('_')) + { + return true; } // All spans generated from a proc-macro invocation are the same... @@ -164,13 +164,13 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByRefMut<'tcx> { }; // Exclude non-inherent impls - if let Node::Item(item) = cx.tcx.parent_hir_node(hir_id) { - if matches!( + if let Node::Item(item) = cx.tcx.parent_hir_node(hir_id) + && matches!( item.kind, ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..) - ) { - return; - } + ) + { + return; } let fn_sig = cx.tcx.fn_sig(fn_def_id).instantiate_identity(); @@ -280,7 +280,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByRefMut<'tcx> { diag.span_suggestion( sp, "consider changing to".to_string(), - format!("&{}", snippet(cx, cx.tcx.hir().span(inner_ty.ty.hir_id), "_"),), + format!("&{}", snippet(cx, cx.tcx.hir_span(inner_ty.ty.hir_id), "_"),), Applicability::Unspecified, ); if cx.effective_visibilities.is_exported(*fn_def_id) { @@ -353,10 +353,10 @@ impl MutablyUsedVariablesCtxt<'_> { for (parent, node) in self.tcx.hir_parent_iter(item) { if let Some(fn_sig) = self.tcx.hir_fn_sig_by_hir_id(parent) { return fn_sig.header.is_unsafe(); - } else if let Node::Block(block) = node { - if matches!(block.rules, BlockCheckMode::UnsafeBlock(_)) { - return true; - } + } else if let Node::Block(block) = node + && matches!(block.rules, BlockCheckMode::UnsafeBlock(_)) + { + return true; } } false @@ -426,10 +426,10 @@ impl<'tcx> euv::Delegate<'tcx> for MutablyUsedVariablesCtxt<'tcx> { // upon! self.add_mutably_used_var(*vid); } - } else if borrow == ty::BorrowKind::Immutable { + } else if borrow == ty::BorrowKind::Immutable // If there is an `async block`, it'll contain a call to a closure which we need to // go into to ensure all "mutate" checks are found. - if let Node::Expr(Expr { + && let Node::Expr(Expr { kind: ExprKind::Call( _, @@ -442,9 +442,8 @@ impl<'tcx> euv::Delegate<'tcx> for MutablyUsedVariablesCtxt<'tcx> { ), .. }) = self.tcx.hir_node(cmt.hir_id) - { - self.async_closures.insert(*def_id); - } + { + self.async_closures.insert(*def_id); } } @@ -460,10 +459,9 @@ impl<'tcx> euv::Delegate<'tcx> for MutablyUsedVariablesCtxt<'tcx> { }), .. } = &cmt.place + && !projections.is_empty() { - if !projections.is_empty() { - self.add_mutably_used_var(*vid); - } + self.add_mutably_used_var(*vid); } } @@ -477,10 +475,9 @@ impl<'tcx> euv::Delegate<'tcx> for MutablyUsedVariablesCtxt<'tcx> { }), .. } = &cmt.place + && self.is_in_unsafe_block(id) { - if self.is_in_unsafe_block(id) { - self.add_mutably_used_var(*vid); - } + self.add_mutably_used_var(*vid); } self.prev_bind = None; } @@ -499,15 +496,14 @@ impl<'tcx> euv::Delegate<'tcx> for MutablyUsedVariablesCtxt<'tcx> { }), .. } = &cmt.place + && let FakeReadCause::ForLet(Some(inner)) = cause { - if let FakeReadCause::ForLet(Some(inner)) = cause { - // Seems like we are inside an async function. We need to store the closure `DefId` - // to go through it afterwards. - self.async_closures.insert(inner); - self.add_alias(cmt.hir_id, *vid); - self.prev_move_to_closure.insert(*vid); - self.prev_bind = None; - } + // Seems like we are inside an async function. We need to store the closure `DefId` + // to go through it afterwards. + self.async_closures.insert(inner); + self.add_alias(cmt.hir_id, *vid); + self.prev_move_to_closure.insert(*vid); + self.prev_bind = None; } } @@ -522,10 +518,9 @@ impl<'tcx> euv::Delegate<'tcx> for MutablyUsedVariablesCtxt<'tcx> { }), .. } = &cmt.place + && self.is_in_unsafe_block(id) { - if self.is_in_unsafe_block(id) { - self.add_mutably_used_var(*vid); - } + self.add_mutably_used_var(*vid); } } } diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs index 55ca875edcee6..275d710c76a9b 100644 --- a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs +++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs @@ -98,13 +98,13 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { } // Exclude non-inherent impls - if let Node::Item(item) = cx.tcx.parent_hir_node(hir_id) { - if matches!( + if let Node::Item(item) = cx.tcx.parent_hir_node(hir_id) + && matches!( item.kind, ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..) - ) { - return; - } + ) + { + return; } // Allow `Borrow` or functions to be taken by value @@ -197,20 +197,18 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { { // Dereference suggestion let sugg = |diag: &mut Diag<'_, ()>| { - if let ty::Adt(def, ..) = ty.kind() { - if let Some(span) = cx.tcx.hir().span_if_local(def.did()) { - if type_allowed_to_implement_copy( - cx.tcx, - cx.param_env, - ty, - traits::ObligationCause::dummy_with_span(span), - rustc_hir::Safety::Safe, - ) - .is_ok() - { - diag.span_help(span, "or consider marking this type as `Copy`"); - } - } + if let ty::Adt(def, ..) = ty.kind() + && let Some(span) = cx.tcx.hir_span_if_local(def.did()) + && type_allowed_to_implement_copy( + cx.tcx, + cx.param_env, + ty, + traits::ObligationCause::dummy_with_span(span), + rustc_hir::Safety::Safe, + ) + .is_ok() + { + diag.span_help(span, "or consider marking this type as `Copy`"); } if is_type_diagnostic_item(cx, ty, sym::Vec) @@ -254,29 +252,28 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { return; } - if is_type_lang_item(cx, ty, LangItem::String) { - if let Some(clone_spans) = + if is_type_lang_item(cx, ty, LangItem::String) + && let Some(clone_spans) = get_spans(cx, Some(body.id()), idx, &[("clone", ".to_string()"), ("as_str", "")]) - { + { + diag.span_suggestion( + input.span, + "consider changing the type to", + "&str", + Applicability::Unspecified, + ); + + for (span, suggestion) in clone_spans { diag.span_suggestion( - input.span, - "consider changing the type to", - "&str", + span, + span.get_source_text(cx) + .map_or("change the call to".to_owned(), |src| format!("change `{src}` to")), + suggestion, Applicability::Unspecified, ); - - for (span, suggestion) in clone_spans { - diag.span_suggestion( - span, - span.get_source_text(cx) - .map_or("change the call to".to_owned(), |src| format!("change `{src}` to")), - suggestion, - Applicability::Unspecified, - ); - } - - return; } + + return; } diag.span_suggestion_verbose( diff --git a/src/tools/clippy/clippy_lints/src/needless_question_mark.rs b/src/tools/clippy/clippy_lints/src/needless_question_mark.rs index 72b0a80260e9f..2a2160c3be2d1 100644 --- a/src/tools/clippy/clippy_lints/src/needless_question_mark.rs +++ b/src/tools/clippy/clippy_lints/src/needless_question_mark.rs @@ -1,6 +1,5 @@ -use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::diagnostics::span_lint_hir_and_then; use clippy_utils::path_res; -use clippy_utils::source::snippet; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{Block, Body, Expr, ExprKind, LangItem, MatchSource, QPath}; @@ -9,52 +8,38 @@ use rustc_session::declare_lint_pass; declare_clippy_lint! { /// ### What it does - /// Suggests alternatives for useless applications of `?` in terminating expressions + /// Suggests replacing `Ok(x?)` or `Some(x?)` with `x` in return positions where the `?` operator + /// is not needed to convert the type of `x`. /// /// ### Why is this bad? /// There's no reason to use `?` to short-circuit when execution of the body will end there anyway. /// /// ### Example /// ```no_run - /// struct TO { - /// magic: Option, + /// # use std::num::ParseIntError; + /// fn f(s: &str) -> Option { + /// Some(s.find('x')?) /// } /// - /// fn f(to: TO) -> Option { - /// Some(to.magic?) + /// fn g(s: &str) -> Result { + /// Ok(s.parse()?) /// } - /// - /// struct TR { - /// magic: Result, - /// } - /// - /// fn g(tr: Result) -> Result { - /// tr.and_then(|t| Ok(t.magic?)) - /// } - /// /// ``` /// Use instead: /// ```no_run - /// struct TO { - /// magic: Option, + /// # use std::num::ParseIntError; + /// fn f(s: &str) -> Option { + /// s.find('x') /// } /// - /// fn f(to: TO) -> Option { - /// to.magic - /// } - /// - /// struct TR { - /// magic: Result, - /// } - /// - /// fn g(tr: Result) -> Result { - /// tr.and_then(|t| t.magic) + /// fn g(s: &str) -> Result { + /// s.parse() /// } /// ``` #[clippy::version = "1.51.0"] pub NEEDLESS_QUESTION_MARK, complexity, - "Suggest `value.inner_option` instead of `Some(value.inner_option?)`. The same goes for `Result`." + "using `Ok(x?)` or `Some(x?)` where `x` would be equivalent" } declare_lint_pass!(NeedlessQuestionMark => [NEEDLESS_QUESTION_MARK]); @@ -111,10 +96,10 @@ fn check(cx: &LateContext<'_>, expr: &Expr<'_>) { if let ExprKind::Call(path, [arg]) = expr.kind && let Res::Def(DefKind::Ctor(..), ctor_id) = path_res(cx, path) && let Some(variant_id) = cx.tcx.opt_parent(ctor_id) - && let sugg_remove = if cx.tcx.lang_items().option_some_variant() == Some(variant_id) { - "Some()" + && let variant = if cx.tcx.lang_items().option_some_variant() == Some(variant_id) { + "Some" } else if cx.tcx.lang_items().result_ok_variant() == Some(variant_id) { - "Ok()" + "Ok" } else { return; } @@ -126,14 +111,25 @@ fn check(cx: &LateContext<'_>, expr: &Expr<'_>) { && let inner_ty = cx.typeck_results().expr_ty(inner_expr) && expr_ty == inner_ty { - span_lint_and_sugg( + span_lint_hir_and_then( cx, NEEDLESS_QUESTION_MARK, + expr.hir_id, expr.span, - "question mark operator is useless here", - format!("try removing question mark and `{sugg_remove}`"), - format!("{}", snippet(cx, inner_expr.span, r#""...""#)), - Applicability::MachineApplicable, + format!("enclosing `{variant}` and `?` operator are unneeded"), + |diag| { + diag.multipart_suggestion( + format!("remove the enclosing `{variant}` and `?` operator"), + vec![ + (expr.span.until(inner_expr.span), String::new()), + ( + inner_expr.span.shrink_to_hi().to(expr.span.shrink_to_hi()), + String::new(), + ), + ], + Applicability::MachineApplicable, + ); + }, ); } } diff --git a/src/tools/clippy/clippy_lints/src/needless_update.rs b/src/tools/clippy/clippy_lints/src/needless_update.rs index cce0617ba3925..4a86c3720ca24 100644 --- a/src/tools/clippy/clippy_lints/src/needless_update.rs +++ b/src/tools/clippy/clippy_lints/src/needless_update.rs @@ -53,18 +53,16 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessUpdate { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { if let ExprKind::Struct(_, fields, StructTailExpr::Base(base)) = expr.kind { let ty = cx.typeck_results().expr_ty(expr); - if let ty::Adt(def, _) = ty.kind() { - let variant = def.non_enum_variant(); - if fields.len() == variant.fields.len() - && !variant.is_field_list_non_exhaustive() - { - span_lint( - cx, - NEEDLESS_UPDATE, - base.span, - "struct update has no effect, all the fields in the struct have already been specified", - ); - } + if let ty::Adt(def, _) = ty.kind() + && fields.len() == def.non_enum_variant().fields.len() + && !def.variant(0_usize.into()).is_field_list_non_exhaustive() + { + span_lint( + cx, + NEEDLESS_UPDATE, + base.span, + "struct update has no effect, all the fields in the struct have already been specified", + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/neg_multiply.rs b/src/tools/clippy/clippy_lints/src/neg_multiply.rs index 429afff9b6642..74c8142787ebc 100644 --- a/src/tools/clippy/clippy_lints/src/neg_multiply.rs +++ b/src/tools/clippy/clippy_lints/src/neg_multiply.rs @@ -16,9 +16,6 @@ declare_clippy_lint! { /// ### Why is this bad? /// It's more readable to just negate. /// - /// ### Known problems - /// This only catches integers (for now). - /// /// ### Example /// ```rust,ignore /// let a = x * -1; @@ -38,23 +35,32 @@ declare_lint_pass!(NegMultiply => [NEG_MULTIPLY]); impl<'tcx> LateLintPass<'tcx> for NegMultiply { fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) { - if let ExprKind::Binary(ref op, left, right) = e.kind { - if BinOpKind::Mul == op.node { - match (&left.kind, &right.kind) { - (&ExprKind::Unary(..), &ExprKind::Unary(..)) => {}, - (&ExprKind::Unary(UnOp::Neg, lit), _) => check_mul(cx, e.span, lit, right), - (_, &ExprKind::Unary(UnOp::Neg, lit)) => check_mul(cx, e.span, lit, left), - _ => {}, - } + if let ExprKind::Binary(ref op, left, right) = e.kind + && BinOpKind::Mul == op.node + { + match (&left.kind, &right.kind) { + (&ExprKind::Unary(..), &ExprKind::Unary(..)) => {}, + (&ExprKind::Unary(UnOp::Neg, lit), _) => check_mul(cx, e.span, lit, right), + (_, &ExprKind::Unary(UnOp::Neg, lit)) => check_mul(cx, e.span, lit, left), + _ => {}, } } } } fn check_mul(cx: &LateContext<'_>, span: Span, lit: &Expr<'_>, exp: &Expr<'_>) { + const F16_ONE: u16 = 1.0_f16.to_bits(); + const F128_ONE: u128 = 1.0_f128.to_bits(); if let ExprKind::Lit(l) = lit.kind - && consts::lit_to_mir_constant(&l.node, cx.typeck_results().expr_ty_opt(lit)) == Constant::Int(1) - && cx.typeck_results().expr_ty(exp).is_integral() + && matches!( + consts::lit_to_mir_constant(&l.node, cx.typeck_results().expr_ty_opt(lit)), + Constant::Int(1) + | Constant::F16(F16_ONE) + | Constant::F32(1.0) + | Constant::F64(1.0) + | Constant::F128(F128_ONE) + ) + && cx.typeck_results().expr_ty(exp).is_numeric() { let mut applicability = Applicability::MachineApplicable; let (snip, from_macro) = snippet_with_context(cx, exp.span, span.ctxt(), "..", &mut applicability); diff --git a/src/tools/clippy/clippy_lints/src/new_without_default.rs b/src/tools/clippy/clippy_lints/src/new_without_default.rs index f0ee613791fb9..4b73a4455f55b 100644 --- a/src/tools/clippy/clippy_lints/src/new_without_default.rs +++ b/src/tools/clippy/clippy_lints/src/new_without_default.rs @@ -97,14 +97,14 @@ impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault { { if self.impling_types.is_none() { let mut impls = HirIdSet::default(); - cx.tcx.for_each_impl(default_trait_id, |d| { + for &d in cx.tcx.local_trait_impls(default_trait_id) { let ty = cx.tcx.type_of(d).instantiate_identity(); - if let Some(ty_def) = ty.ty_adt_def() { - if let Some(local_def_id) = ty_def.did().as_local() { - impls.insert(cx.tcx.local_def_id_to_hir_id(local_def_id)); - } + if let Some(ty_def) = ty.ty_adt_def() + && let Some(local_def_id) = ty_def.did().as_local() + { + impls.insert(cx.tcx.local_def_id_to_hir_id(local_def_id)); } - }); + } self.impling_types = Some(impls); } diff --git a/src/tools/clippy/clippy_lints/src/no_effect.rs b/src/tools/clippy/clippy_lints/src/no_effect.rs index 7187a8f2c11a1..7ab7976d5697a 100644 --- a/src/tools/clippy/clippy_lints/src/no_effect.rs +++ b/src/tools/clippy/clippy_lints/src/no_effect.rs @@ -182,23 +182,22 @@ impl NoEffect { ); return true; } - } else if let StmtKind::Let(local) = stmt.kind { - if !is_lint_allowed(cx, NO_EFFECT_UNDERSCORE_BINDING, local.hir_id) - && !matches!(local.source, LocalSource::AsyncFn) - && let Some(init) = local.init - && local.els.is_none() - && !local.pat.span.from_expansion() - && has_no_effect(cx, init) - && let PatKind::Binding(_, hir_id, ident, _) = local.pat.kind - && ident.name.to_ident_string().starts_with('_') - && !in_automatically_derived(cx.tcx, local.hir_id) - { - if let Some(l) = self.local_bindings.last_mut() { - l.push(hir_id); - self.underscore_bindings.insert(hir_id, ident.span); - } - return true; + } else if let StmtKind::Let(local) = stmt.kind + && !is_lint_allowed(cx, NO_EFFECT_UNDERSCORE_BINDING, local.hir_id) + && !matches!(local.source, LocalSource::AsyncFn) + && let Some(init) = local.init + && local.els.is_none() + && !local.pat.span.from_expansion() + && has_no_effect(cx, init) + && let PatKind::Binding(_, hir_id, ident, _) = local.pat.kind + && ident.name.to_ident_string().starts_with('_') + && !in_automatically_derived(cx.tcx, local.hir_id) + { + if let Some(l) = self.local_bindings.last_mut() { + l.push(hir_id); + self.underscore_bindings.insert(hir_id, ident.span); } + return true; } false } diff --git a/src/tools/clippy/clippy_lints/src/non_canonical_impls.rs b/src/tools/clippy/clippy_lints/src/non_canonical_impls.rs index 448bb603cf2c9..93865197ec965 100644 --- a/src/tools/clippy/clippy_lints/src/non_canonical_impls.rs +++ b/src/tools/clippy/clippy_lints/src/non_canonical_impls.rs @@ -1,6 +1,8 @@ use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then}; use clippy_utils::ty::implements_trait; -use clippy_utils::{is_from_proc_macro, is_res_lang_ctor, last_path_segment, path_res, std_or_core}; +use clippy_utils::{ + is_diag_trait_item, is_from_proc_macro, is_res_lang_ctor, last_path_segment, path_res, std_or_core, +}; use rustc_errors::Applicability; use rustc_hir::def_id::LocalDefId; use rustc_hir::{Expr, ExprKind, ImplItem, ImplItemKind, LangItem, Node, UnOp}; @@ -98,7 +100,7 @@ declare_clippy_lint! { /// /// impl PartialOrd for A { /// fn partial_cmp(&self, other: &Self) -> Option { - /// Some(self.cmp(other)) + /// Some(self.cmp(other)) // or self.cmp(other).into() /// } /// } /// ``` @@ -185,65 +187,66 @@ impl LateLintPass<'_> for NonCanonicalImpls { if block.stmts.is_empty() && let Some(expr) = block.expr - && expr_is_cmp(cx, &expr.kind, impl_item, &mut needs_fully_qualified) + && expr_is_cmp(cx, expr, impl_item, &mut needs_fully_qualified) { + return; } // Fix #12683, allow [`needless_return`] here else if block.expr.is_none() && let Some(stmt) = block.stmts.first() && let rustc_hir::StmtKind::Semi(Expr { - kind: ExprKind::Ret(Some(Expr { kind: ret_kind, .. })), + kind: ExprKind::Ret(Some(ret)), .. }) = stmt.kind - && expr_is_cmp(cx, ret_kind, impl_item, &mut needs_fully_qualified) + && expr_is_cmp(cx, ret, impl_item, &mut needs_fully_qualified) { - } else { - // If `Self` and `Rhs` are not the same type, bail. This makes creating a valid - // suggestion tons more complex. - if let [lhs, rhs, ..] = trait_impl.args.as_slice() - && lhs != rhs - { - return; - } + return; + } + // If `Self` and `Rhs` are not the same type, bail. This makes creating a valid + // suggestion tons more complex. + else if let [lhs, rhs, ..] = trait_impl.args.as_slice() + && lhs != rhs + { + return; + } - span_lint_and_then( - cx, - NON_CANONICAL_PARTIAL_ORD_IMPL, - item.span, - "non-canonical implementation of `partial_cmp` on an `Ord` type", - |diag| { - let [_, other] = body.params else { - return; - }; - let Some(std_or_core) = std_or_core(cx) else { - return; - }; + span_lint_and_then( + cx, + NON_CANONICAL_PARTIAL_ORD_IMPL, + item.span, + "non-canonical implementation of `partial_cmp` on an `Ord` type", + |diag| { + let [_, other] = body.params else { + return; + }; + let Some(std_or_core) = std_or_core(cx) else { + return; + }; - let suggs = match (other.pat.simple_ident(), needs_fully_qualified) { - (Some(other_ident), true) => vec![( + let suggs = match (other.pat.simple_ident(), needs_fully_qualified) { + (Some(other_ident), true) => vec![( + block.span, + format!("{{ Some({std_or_core}::cmp::Ord::cmp(self, {})) }}", other_ident.name), + )], + (Some(other_ident), false) => { + vec![(block.span, format!("{{ Some(self.cmp({})) }}", other_ident.name))] + }, + (None, true) => vec![ + ( block.span, - format!("{{ Some({std_or_core}::cmp::Ord::cmp(self, {})) }}", other_ident.name), - )], - (Some(other_ident), false) => { - vec![(block.span, format!("{{ Some(self.cmp({})) }}", other_ident.name))] - }, - (None, true) => vec![ - ( - block.span, - format!("{{ Some({std_or_core}::cmp::Ord::cmp(self, other)) }}"), - ), - (other.pat.span, "other".to_owned()), - ], - (None, false) => vec![ - (block.span, "{ Some(self.cmp(other)) }".to_owned()), - (other.pat.span, "other".to_owned()), - ], - }; + format!("{{ Some({std_or_core}::cmp::Ord::cmp(self, other)) }}"), + ), + (other.pat.span, "other".to_owned()), + ], + (None, false) => vec![ + (block.span, "{ Some(self.cmp(other)) }".to_owned()), + (other.pat.span, "other".to_owned()), + ], + }; - diag.multipart_suggestion("change this to", suggs, Applicability::Unspecified); - }, - ); - } + diag.multipart_suggestion("change this to", suggs, Applicability::Unspecified); + }, + ); } } } @@ -251,10 +254,11 @@ impl LateLintPass<'_> for NonCanonicalImpls { /// Return true if `expr_kind` is a `cmp` call. fn expr_is_cmp<'tcx>( cx: &LateContext<'tcx>, - expr_kind: &'tcx ExprKind<'tcx>, + expr: &'tcx Expr<'tcx>, impl_item: &ImplItem<'_>, needs_fully_qualified: &mut bool, ) -> bool { + let impl_item_did = impl_item.owner_id.def_id; if let ExprKind::Call( Expr { kind: ExprKind::Path(some_path), @@ -262,11 +266,17 @@ fn expr_is_cmp<'tcx>( .. }, [cmp_expr], - ) = expr_kind + ) = expr.kind { is_res_lang_ctor(cx, cx.qpath_res(some_path, *some_hir_id), LangItem::OptionSome) // Fix #11178, allow `Self::cmp(self, ..)` too - && self_cmp_call(cx, cmp_expr, impl_item.owner_id.def_id, needs_fully_qualified) + && self_cmp_call(cx, cmp_expr, impl_item_did, needs_fully_qualified) + } else if let ExprKind::MethodCall(_, recv, [], _) = expr.kind { + cx.tcx + .typeck(impl_item_did) + .type_dependent_def_id(expr.hir_id) + .is_some_and(|def_id| is_diag_trait_item(cx, def_id, sym::Into)) + && self_cmp_call(cx, recv, impl_item_did, needs_fully_qualified) } else { false } diff --git a/src/tools/clippy/clippy_lints/src/non_copy_const.rs b/src/tools/clippy/clippy_lints/src/non_copy_const.rs index 9b53608ae7f3c..6d3e77b6b6e97 100644 --- a/src/tools/clippy/clippy_lints/src/non_copy_const.rs +++ b/src/tools/clippy/clippy_lints/src/non_copy_const.rs @@ -263,7 +263,7 @@ impl<'tcx> NonCopyConst<'tcx> { fn is_value_unfrozen_poly(cx: &LateContext<'tcx>, body_id: BodyId, ty: Ty<'tcx>) -> bool { let def_id = body_id.hir_id.owner.to_def_id(); let args = ty::GenericArgs::identity_for_item(cx.tcx, def_id); - let instance = ty::Instance::new(def_id, args); + let instance = ty::Instance::new_raw(def_id, args); let cid = GlobalId { instance, promoted: None, @@ -449,7 +449,7 @@ impl<'tcx> LateLintPass<'tcx> for NonCopyConst<'tcx> { dereferenced_expr = parent_expr; }, - ExprKind::Index(e, _, _) if ptr::eq(&**e, cur_expr) => { + ExprKind::Index(e, _, _) if ptr::eq(&raw const **e, cur_expr) => { // `e[i]` => desugared to `*Index::index(&e, i)`, // meaning `e` must be referenced. // no need to go further up since a method call is involved now. diff --git a/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs b/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs index 852c3885f5689..23a1622f30fff 100644 --- a/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs +++ b/src/tools/clippy/clippy_lints/src/non_octal_unix_permissions.rs @@ -1,10 +1,10 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::{SpanRangeExt, snippet_with_applicability}; +use clippy_utils::sym; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; -use rustc_span::sym; declare_clippy_lint! { /// ### What it does @@ -43,12 +43,12 @@ impl<'tcx> LateLintPass<'tcx> for NonOctalUnixPermissions { match &expr.kind { ExprKind::MethodCall(path, func, [param], _) => { if let Some(adt) = cx.typeck_results().expr_ty(func).peel_refs().ty_adt_def() - && ((path.ident.name.as_str() == "mode" + && ((path.ident.name == sym::mode && matches!( cx.tcx.get_diagnostic_name(adt.did()), Some(sym::FsOpenOptions | sym::DirBuilder) )) - || (path.ident.name.as_str() == "set_mode" + || (path.ident.name == sym::set_mode && cx.tcx.is_diagnostic_item(sym::FsPermissions, adt.did()))) && let ExprKind::Lit(_) = param.kind && param.span.eq_ctxt(expr.span) diff --git a/src/tools/clippy/clippy_lints/src/non_std_lazy_statics.rs b/src/tools/clippy/clippy_lints/src/non_std_lazy_statics.rs index 8305bf345ef19..f6bc9428d65f2 100644 --- a/src/tools/clippy/clippy_lints/src/non_std_lazy_statics.rs +++ b/src/tools/clippy/clippy_lints/src/non_std_lazy_statics.rs @@ -37,7 +37,7 @@ declare_clippy_lint! { /// static FOO: std::sync::LazyLock = std::sync::LazyLock::new(|| "FOO".to_lowercase()); /// static BAR: std::sync::LazyLock = std::sync::LazyLock::new(|| "BAR".to_lowercase()); /// ``` - #[clippy::version = "1.81.0"] + #[clippy::version = "1.86.0"] pub NON_STD_LAZY_STATICS, pedantic, "lazy static that could be replaced by `std::sync::LazyLock`" @@ -121,7 +121,7 @@ impl<'hir> LateLintPass<'hir> for NonStdLazyStatic { cx, NON_STD_LAZY_STATICS, macro_call.span, - "this macro has been superceded by `std::sync::LazyLock`", + "this macro has been superseded by `std::sync::LazyLock`", ); return; } @@ -240,7 +240,7 @@ impl LazyInfo { cx, NON_STD_LAZY_STATICS, self.ty_span_no_args, - "this type has been superceded by `LazyLock` in the standard library", + "this type has been superseded by `LazyLock` in the standard library", |diag| { diag.multipart_suggestion("use `std::sync::LazyLock` instead", suggs, appl); }, diff --git a/src/tools/clippy/clippy_lints/src/non_zero_suggestions.rs b/src/tools/clippy/clippy_lints/src/non_zero_suggestions.rs index 16c4391c0fbea..1b8ab1bdedf8a 100644 --- a/src/tools/clippy/clippy_lints/src/non_zero_suggestions.rs +++ b/src/tools/clippy/clippy_lints/src/non_zero_suggestions.rs @@ -1,12 +1,12 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet; +use clippy_utils::sym; use rustc_ast::ast::BinOpKind; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, Ty}; use rustc_session::declare_lint_pass; -use rustc_span::symbol::sym; declare_clippy_lint! { /// ### What it does @@ -72,7 +72,7 @@ fn check_non_zero_conversion(cx: &LateContext<'_>, expr: &Expr<'_>, applicabilit && let ExprKind::Path(qpath) = &func.kind && let Some(def_id) = cx.qpath_res(qpath, func.hir_id).opt_def_id() && let ExprKind::MethodCall(rcv_path, receiver, [], _) = &arg.kind - && rcv_path.ident.name.as_str() == "get" + && rcv_path.ident.name == sym::get { let fn_name = cx.tcx.item_name(def_id); let target_ty = cx.typeck_results().expr_ty(expr); @@ -82,11 +82,10 @@ fn check_non_zero_conversion(cx: &LateContext<'_>, expr: &Expr<'_>, applicabilit if let ty::Adt(adt_def, _) = receiver_ty.kind() && adt_def.is_struct() && cx.tcx.get_diagnostic_name(adt_def.did()) == Some(sym::NonZero) + && let Some(target_non_zero_type) = get_target_non_zero_type(target_ty) { - if let Some(target_non_zero_type) = get_target_non_zero_type(target_ty) { - let arg_snippet = get_arg_snippet(cx, arg, rcv_path); - suggest_non_zero_conversion(cx, expr, fn_name, target_non_zero_type, &arg_snippet, applicability); - } + let arg_snippet = get_arg_snippet(cx, arg, rcv_path); + suggest_non_zero_conversion(cx, expr, fn_name, target_non_zero_type, &arg_snippet, applicability); } } } diff --git a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs index 594101427f5a9..a78a342d4fe39 100644 --- a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs +++ b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs @@ -335,9 +335,12 @@ impl<'tcx> LateLintPass<'tcx> for ArithmeticSideEffects { return; } match &expr.kind { - hir::ExprKind::AssignOp(op, lhs, rhs) | hir::ExprKind::Binary(op, lhs, rhs) => { + hir::ExprKind::Binary(op, lhs, rhs) => { self.manage_bin_ops(cx, expr, op.node, lhs, rhs); }, + hir::ExprKind::AssignOp(op, lhs, rhs) => { + self.manage_bin_ops(cx, expr, op.node.into(), lhs, rhs); + }, hir::ExprKind::MethodCall(ps, receiver, args, _) => { self.manage_method_call(args, cx, expr, ps, receiver); }, @@ -354,7 +357,7 @@ impl<'tcx> LateLintPass<'tcx> for ArithmeticSideEffects { let body_owner_kind = cx.tcx.hir_body_owner_kind(body_owner_def_id); if let hir::BodyOwnerKind::Const { .. } | hir::BodyOwnerKind::Static(_) = body_owner_kind { - let body_span = cx.tcx.hir().span_with_body(body_owner); + let body_span = cx.tcx.hir_span_with_body(body_owner); if let Some(span) = self.const_span && span.contains(body_span) { @@ -366,7 +369,7 @@ impl<'tcx> LateLintPass<'tcx> for ArithmeticSideEffects { fn check_body_post(&mut self, cx: &LateContext<'_>, body: &hir::Body<'_>) { let body_owner = cx.tcx.hir_body_owner(body.id()); - let body_span = cx.tcx.hir().span(body_owner); + let body_span = cx.tcx.hir_span(body_owner); if let Some(span) = self.const_span && span.contains(body_span) { diff --git a/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs b/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs index cf6b8992973a7..9b2cfd91b8535 100644 --- a/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs +++ b/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs @@ -98,7 +98,7 @@ fn check_op(cx: &LateContext<'_>, expr: &Expr<'_>, other: &Expr<'_>, left: bool) let arg_snip = snippet(cx, arg_span, ".."); let expr_snip; let eq_impl; - if with_deref.is_implemented() { + if with_deref.is_implemented() && !arg_ty.peel_refs().is_str() { expr_snip = format!("*{arg_snip}"); eq_impl = with_deref; } else { diff --git a/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs b/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs index 01dc6a27c33e3..ded161c8576a1 100644 --- a/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs +++ b/src/tools/clippy/clippy_lints/src/operators/float_cmp.rs @@ -1,7 +1,7 @@ use clippy_utils::consts::{ConstEvalCtxt, Constant}; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::get_item_name; use clippy_utils::sugg::Sugg; +use clippy_utils::{parent_item_name, sym}; use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp}; use rustc_lint::LateContext; @@ -34,7 +34,7 @@ pub(crate) fn check<'tcx>( return; } - if let Some(name) = get_item_name(cx, expr) { + if let Some(name) = parent_item_name(cx, expr) { let name = name.as_str(); if name == "eq" || name == "ne" || name == "is_nan" || name.starts_with("eq_") || name.ends_with("_eq") { return; @@ -106,7 +106,7 @@ fn is_signum(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { } if let ExprKind::MethodCall(method_name, self_arg, [], _) = expr.kind - && method_name.ident.name.as_str() == "signum" + && method_name.ident.name == sym::signum // Check that the receiver of the signum() is a float (expressions[0] is the receiver of // the method call) { diff --git a/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs b/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs index 74e0a6333db0f..047a5a0159cb0 100644 --- a/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs +++ b/src/tools/clippy/clippy_lints/src/operators/float_equality_without_abs.rs @@ -50,7 +50,7 @@ pub(crate) fn check<'tcx>( // format the suggestion let suggestion = format!( "{}.abs()", - sugg::make_assoc(AssocOp::Binary(BinOpKind::Sub), &sug_l, &sug_r).maybe_par() + sugg::make_assoc(AssocOp::Binary(BinOpKind::Sub), &sug_l, &sug_r).maybe_paren() ); // spans the lint span_lint_and_then( diff --git a/src/tools/clippy/clippy_lints/src/operators/identity_op.rs b/src/tools/clippy/clippy_lints/src/operators/identity_op.rs index 0358232282786..e1fd09549a4b8 100644 --- a/src/tools/clippy/clippy_lints/src/operators/identity_op.rs +++ b/src/tools/clippy/clippy_lints/src/operators/identity_op.rs @@ -103,7 +103,7 @@ enum Parens { /// /// e.g. `-(x + y + 0)` cannot be reduced to `-x + y`, as the behavior changes silently. /// e.g. `1u64 + ((x + y + 0i32) as u64)` cannot be reduced to `1u64 + x + y as u64`, since -/// the the cast expression will not apply to the same expression. +/// the cast expression will not apply to the same expression. /// e.g. `0 + if b { 1 } else { 2 } + if b { 3 } else { 4 }` cannot be reduced /// to `if b { 1 } else { 2 } + if b { 3 } else { 4 }` where the `if` could be /// interpreted as a statement. The same behavior happens for `match`, `loop`, diff --git a/src/tools/clippy/clippy_lints/src/operators/mod.rs b/src/tools/clippy/clippy_lints/src/operators/mod.rs index f758d08d36633..d32c062cf56a7 100644 --- a/src/tools/clippy/clippy_lints/src/operators/mod.rs +++ b/src/tools/clippy/clippy_lints/src/operators/mod.rs @@ -913,9 +913,10 @@ impl<'tcx> LateLintPass<'tcx> for Operators { ); }, ExprKind::AssignOp(op, lhs, rhs) => { - self.arithmetic_context.check_binary(cx, e, op.node, lhs, rhs); - misrefactored_assign_op::check(cx, e, op.node, lhs, rhs); - modulo_arithmetic::check(cx, e, op.node, lhs, rhs, false); + let bin_op = op.node.into(); + self.arithmetic_context.check_binary(cx, e, bin_op, lhs, rhs); + misrefactored_assign_op::check(cx, e, bin_op, lhs, rhs); + modulo_arithmetic::check(cx, e, bin_op, lhs, rhs, false); }, ExprKind::Assign(lhs, rhs, _) => { assign_op_pattern::check(cx, e, lhs, rhs); diff --git a/src/tools/clippy/clippy_lints/src/operators/modulo_one.rs b/src/tools/clippy/clippy_lints/src/operators/modulo_one.rs index fc5565e821edd..2e6a071eb1848 100644 --- a/src/tools/clippy/clippy_lints/src/operators/modulo_one.rs +++ b/src/tools/clippy/clippy_lints/src/operators/modulo_one.rs @@ -12,15 +12,15 @@ pub(crate) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, op: BinOpKind, right: span_lint(cx, MODULO_ONE, expr.span, "any number modulo 1 will be 0"); } - if let ty::Int(ity) = cx.typeck_results().expr_ty(right).kind() { - if is_integer_const(cx, right, unsext(cx.tcx, -1, *ity)) { - span_lint( - cx, - MODULO_ONE, - expr.span, - "any number modulo -1 will panic/overflow or result in 0", - ); - } + if let ty::Int(ity) = cx.typeck_results().expr_ty(right).kind() + && is_integer_const(cx, right, unsext(cx.tcx, -1, *ity)) + { + span_lint( + cx, + MODULO_ONE, + expr.span, + "any number modulo -1 will panic/overflow or result in 0", + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs b/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs index c261fd9bd9cb0..e6be536ca0f4e 100644 --- a/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs +++ b/src/tools/clippy/clippy_lints/src/operators/numeric_arithmetic.rs @@ -73,12 +73,12 @@ impl Context { match cx.tcx.hir_body_owner_kind(body_owner_def_id) { hir::BodyOwnerKind::Static(_) | hir::BodyOwnerKind::Const { .. } => { - let body_span = cx.tcx.hir().span_with_body(body_owner); + let body_span = cx.tcx.hir_span_with_body(body_owner); - if let Some(span) = self.const_span { - if span.contains(body_span) { - return; - } + if let Some(span) = self.const_span + && span.contains(body_span) + { + return; } self.const_span = Some(body_span); }, @@ -88,12 +88,12 @@ impl Context { pub fn body_post(&mut self, cx: &LateContext<'_>, body: &hir::Body<'_>) { let body_owner = cx.tcx.hir_body_owner(body.id()); - let body_span = cx.tcx.hir().span_with_body(body_owner); + let body_span = cx.tcx.hir_span_with_body(body_owner); - if let Some(span) = self.const_span { - if span.contains(body_span) { - return; - } + if let Some(span) = self.const_span + && span.contains(body_span) + { + return; } self.const_span = None; } diff --git a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs index 378fed481f4fe..0faa7b9e64665 100644 --- a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs +++ b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs @@ -47,12 +47,11 @@ pub(crate) fn check<'tcx>( let rty = cx.typeck_results().expr_ty(r); let lcpy = is_copy(cx, lty); let rcpy = is_copy(cx, rty); - if let Some((self_ty, other_ty)) = in_impl(cx, e, trait_id) { - if (are_equal(cx, rty, self_ty) && are_equal(cx, lty, other_ty)) - || (are_equal(cx, rty, other_ty) && are_equal(cx, lty, self_ty)) - { - return; // Don't lint - } + if let Some((self_ty, other_ty)) = in_impl(cx, e, trait_id) + && ((are_equal(cx, rty, self_ty) && are_equal(cx, lty, other_ty)) + || (are_equal(cx, rty, other_ty) && are_equal(cx, lty, self_ty))) + { + return; // Don't lint } // either operator autorefs or both args are copyable if (requires_ref || (lcpy && rcpy)) && implements_trait(cx, lty, trait_id, &[rty.into()]) { @@ -86,7 +85,7 @@ pub(crate) fn check<'tcx>( left.span, "use the left value directly", lsnip, - Applicability::MaybeIncorrect, // FIXME #2597 + Applicability::MachineApplicable, ); }, ); @@ -105,7 +104,7 @@ pub(crate) fn check<'tcx>( right.span, "use the right value directly", rsnip, - Applicability::MaybeIncorrect, // FIXME #2597 + Applicability::MachineApplicable, ); }, ); @@ -137,7 +136,7 @@ pub(crate) fn check<'tcx>( left.span, "use the left value directly", lsnip, - Applicability::MaybeIncorrect, // FIXME #2597 + Applicability::MachineApplicable, ); }, ); @@ -164,7 +163,7 @@ pub(crate) fn check<'tcx>( right.span, "use the right value directly", rsnip, - Applicability::MaybeIncorrect, // FIXME #2597 + Applicability::MachineApplicable, ); }); } diff --git a/src/tools/clippy/clippy_lints/src/operators/verbose_bit_mask.rs b/src/tools/clippy/clippy_lints/src/operators/verbose_bit_mask.rs index a6aba33e431a4..1477378914120 100644 --- a/src/tools/clippy/clippy_lints/src/operators/verbose_bit_mask.rs +++ b/src/tools/clippy/clippy_lints/src/operators/verbose_bit_mask.rs @@ -32,7 +32,7 @@ pub(super) fn check<'tcx>( e.span, "bit mask could be simplified with a call to `trailing_zeros`", |diag| { - let sugg = Sugg::hir(cx, left1, "...").maybe_par(); + let sugg = Sugg::hir(cx, left1, "...").maybe_paren(); diag.span_suggestion( e.span, "try", diff --git a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs index 6f302ea196217..9487cec87efb8 100644 --- a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs +++ b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs @@ -4,8 +4,8 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::sugg::Sugg; use clippy_utils::ty::is_copy; use clippy_utils::{ - CaptureKind, can_move_expr_to_closure, eager_or_lazy, higher, is_else_clause, is_in_const_context, - is_res_lang_ctor, peel_blocks, peel_hir_expr_while, + CaptureKind, can_move_expr_to_closure, eager_or_lazy, expr_requires_coercion, higher, is_else_clause, + is_in_const_context, is_res_lang_ctor, peel_blocks, peel_hir_expr_while, }; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; @@ -106,7 +106,7 @@ struct OptionOccurrence { fn format_option_in_sugg(cond_sugg: Sugg<'_>, as_ref: bool, as_mut: bool) -> String { format!( "{}{}", - cond_sugg.maybe_par(), + cond_sugg.maybe_paren(), if as_mut { ".as_mut()" } else if as_ref { @@ -212,6 +212,15 @@ fn try_get_option_occurrence<'tcx>( } } + let some_body_ty = cx.typeck_results().expr_ty(some_body); + let none_body_ty = cx.typeck_results().expr_ty(none_body); + // Check if coercion is needed for the `None` arm. If so, we cannot suggest because it will + // introduce a type mismatch. A special case is when both arms have the same type, then + // coercion is fine. + if some_body_ty != none_body_ty && expr_requires_coercion(cx, none_body) { + return None; + } + let mut app = Applicability::Unspecified; let (none_body, is_argless_call) = match none_body.kind { diff --git a/src/tools/clippy/clippy_lints/src/partial_pub_fields.rs b/src/tools/clippy/clippy_lints/src/partial_pub_fields.rs index 267e2067e101a..cda752d003fa0 100644 --- a/src/tools/clippy/clippy_lints/src/partial_pub_fields.rs +++ b/src/tools/clippy/clippy_lints/src/partial_pub_fields.rs @@ -41,7 +41,7 @@ declare_lint_pass!(PartialPubFields => [PARTIAL_PUB_FIELDS]); impl EarlyLintPass for PartialPubFields { fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) { - let ItemKind::Struct(ref st, _) = item.kind else { + let ItemKind::Struct(_, ref st, _) = item.kind else { return; }; diff --git a/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs b/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs index 65671b478ba74..8eaf65e63065e 100644 --- a/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs +++ b/src/tools/clippy/clippy_lints/src/partialeq_ne_impl.rs @@ -38,7 +38,7 @@ impl<'tcx> LateLintPass<'tcx> for PartialEqNeImpl { items: impl_items, .. }) = item.kind - && !cx.tcx.has_attr(item.owner_id, sym::automatically_derived) + && !cx.tcx.is_automatically_derived(item.owner_id.to_def_id()) && let Some(eq_trait) = cx.tcx.lang_items().eq_trait() && trait_ref.path.res.def_id() == eq_trait { diff --git a/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs b/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs index 6d4216970cc4d..9b9024c810575 100644 --- a/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs +++ b/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs @@ -81,7 +81,7 @@ impl<'tcx> LateLintPass<'tcx> for PartialeqToNone { let sugg = format!( "{}.{}", sugg::Sugg::hir_with_applicability(cx, peel_ref_operators(cx, scrutinee), "..", &mut applicability) - .maybe_par(), + .maybe_paren(), if is_eq { "is_none()" } else { "is_some()" } ); diff --git a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs index 0a8e288564875..5d30b66def2c8 100644 --- a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs +++ b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs @@ -178,19 +178,18 @@ impl PassByRefOrValue { && size <= self.ref_min_size && let hir::TyKind::Ref(_, MutTy { ty: decl_ty, .. }) = input.kind { - if let Some(typeck) = cx.maybe_typeck_results() { + if let Some(typeck) = cx.maybe_typeck_results() // Don't lint if a raw pointer is created. // TODO: Limit the check only to raw pointers to the argument (or part of the argument) // which escape the current function. - if typeck.node_types().items().any(|(_, &ty)| ty.is_raw_ptr()) + && (typeck.node_types().items().any(|(_, &ty)| ty.is_raw_ptr()) || typeck .adjustments() .items() .flat_map(|(_, a)| a) - .any(|a| matches!(a.kind, Adjust::Pointer(PointerCoercion::UnsafeFnPointer))) - { - continue; - } + .any(|a| matches!(a.kind, Adjust::Pointer(PointerCoercion::UnsafeFnPointer)))) + { + continue; } let value_type = if fn_body.and_then(|body| body.params.get(index)).is_some_and(is_self) { "self".into() @@ -282,12 +281,11 @@ impl<'tcx> LateLintPass<'tcx> for PassByRefOrValue { } let attrs = cx.tcx.hir_attrs(hir_id); for a in attrs { - if let Some(meta_items) = a.meta_item_list() { - if a.has_name(sym::proc_macro_derive) - || (a.has_name(sym::inline) && attr::list_contains_name(&meta_items, sym::always)) - { - return; - } + if let Some(meta_items) = a.meta_item_list() + && (a.has_name(sym::proc_macro_derive) + || (a.has_name(sym::inline) && attr::list_contains_name(&meta_items, sym::always))) + { + return; } } }, @@ -296,13 +294,13 @@ impl<'tcx> LateLintPass<'tcx> for PassByRefOrValue { } // Exclude non-inherent impls - if let Node::Item(item) = cx.tcx.parent_hir_node(hir_id) { - if matches!( + if let Node::Item(item) = cx.tcx.parent_hir_node(hir_id) + && matches!( item.kind, ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..) - ) { - return; - } + ) + { + return; } self.check_poly_fn(cx, def_id, decl, Some(span)); diff --git a/src/tools/clippy/clippy_lints/src/pathbuf_init_then_push.rs b/src/tools/clippy/clippy_lints/src/pathbuf_init_then_push.rs index b653b459b04c8..35caac855cf61 100644 --- a/src/tools/clippy/clippy_lints/src/pathbuf_init_then_push.rs +++ b/src/tools/clippy/clippy_lints/src/pathbuf_init_then_push.rs @@ -173,16 +173,15 @@ impl<'tcx> LateLintPass<'tcx> for PathbufThenPush<'tcx> { } fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) { - if let Some(mut searcher) = self.searcher.take() { - if let StmtKind::Expr(expr) | StmtKind::Semi(expr) = stmt.kind - && let ExprKind::MethodCall(name, self_arg, [arg_expr], _) = expr.kind - && path_to_local_id(self_arg, searcher.local_id) - && name.ident.as_str() == "push" - { - searcher.err_span = searcher.err_span.to(stmt.span); - searcher.arg = Some(*arg_expr); - searcher.display_err(cx); - } + if let Some(mut searcher) = self.searcher.take() + && let StmtKind::Expr(expr) | StmtKind::Semi(expr) = stmt.kind + && let ExprKind::MethodCall(name, self_arg, [arg_expr], _) = expr.kind + && path_to_local_id(self_arg, searcher.local_id) + && name.ident.as_str() == "push" + { + searcher.err_span = searcher.err_span.to(stmt.span); + searcher.arg = Some(*arg_expr); + searcher.display_err(cx); } } diff --git a/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs b/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs index 8f1a1ee76c6a6..19d9acfc9305a 100644 --- a/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs +++ b/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs @@ -177,17 +177,16 @@ fn find_first_mismatch(cx: &LateContext<'_>, pat: &Pat<'_>) -> Option<(Span, Mut PatKind::Or([p, ..]) => p, _ => p, }; - if let Some(adjustments) = cx.typeck_results().pat_adjustments().get(adjust_pat.hir_id) { - if let [first, ..] = **adjustments { - if let ty::Ref(.., mutability) = *first.kind() { - let level = if p.hir_id == pat.hir_id { - Level::Top - } else { - Level::Lower - }; - result = Some((p.span, mutability, level)); - } - } + if let Some(adjustments) = cx.typeck_results().pat_adjustments().get(adjust_pat.hir_id) + && let [first, ..] = **adjustments + && let ty::Ref(.., mutability) = *first.source.kind() + { + let level = if p.hir_id == pat.hir_id { + Level::Top + } else { + Level::Lower + }; + result = Some((p.span, mutability, level)); } result.is_none() }); diff --git a/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs b/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs index dc142b6e15771..da56a785007c4 100644 --- a/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs +++ b/src/tools/clippy/clippy_lints/src/permissions_set_readonly_false.rs @@ -1,10 +1,10 @@ use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::sym; use clippy_utils::ty::is_type_diagnostic_item; use rustc_ast::ast::LitKind; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; -use rustc_span::sym; declare_clippy_lint! { /// ### What it does @@ -33,7 +33,7 @@ impl<'tcx> LateLintPass<'tcx> for PermissionsSetReadonlyFalse { if let ExprKind::MethodCall(path, receiver, [arg], _) = &expr.kind && let ExprKind::Lit(lit) = &arg.kind && LitKind::Bool(false) == lit.node - && path.ident.name.as_str() == "set_readonly" + && path.ident.name == sym::set_readonly && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(receiver), sym::FsPermissions) { span_lint_and_then( diff --git a/src/tools/clippy/clippy_lints/src/ptr.rs b/src/tools/clippy/clippy_lints/src/ptr.rs index 50ef56db167c1..9149406642d67 100644 --- a/src/tools/clippy/clippy_lints/src/ptr.rs +++ b/src/tools/clippy/clippy_lints/src/ptr.rs @@ -3,7 +3,7 @@ use clippy_utils::source::SpanRangeExt; use clippy_utils::sugg::Sugg; use clippy_utils::visitors::contains_unsafe_block; use clippy_utils::{get_expr_use_or_unification_node, is_lint_allowed, path_def_id, path_to_local, std_or_core}; -use hir::LifetimeName; +use hir::LifetimeKind; use rustc_abi::ExternAbi; use rustc_errors::{Applicability, MultiSpan}; use rustc_hir::hir_id::{HirId, HirIdMap}; @@ -264,8 +264,8 @@ impl<'tcx> LateLintPass<'tcx> for Ptr { is_null_path(cx, l), is_null_path(cx, r), ) { - (false, true, false) if let Some(sugg) = Sugg::hir_opt(cx, r) => sugg.maybe_par(), - (false, false, true) if let Some(sugg) = Sugg::hir_opt(cx, l) => sugg.maybe_par(), + (false, true, false) if let Some(sugg) = Sugg::hir_opt(cx, r) => sugg.maybe_paren(), + (false, false, true) if let Some(sugg) = Sugg::hir_opt(cx, l) => sugg.maybe_paren(), _ => return check_ptr_eq(cx, expr, op.node, l, r), }; @@ -432,7 +432,7 @@ fn check_fn_args<'cx, 'tcx: 'cx>( } None }) { - if let LifetimeName::Param(param_def_id) = lifetime.res + if let LifetimeKind::Param(param_def_id) = lifetime.kind && !lifetime.is_anonymous() && fn_sig .output() @@ -498,29 +498,33 @@ fn check_fn_args<'cx, 'tcx: 'cx>( } fn check_mut_from_ref<'tcx>(cx: &LateContext<'tcx>, sig: &FnSig<'_>, body: Option<&Body<'tcx>>) { - if let FnRetTy::Return(ty) = sig.decl.output - && let Some((out, Mutability::Mut, _)) = get_ref_lm(ty) - { + let FnRetTy::Return(ty) = sig.decl.output else { return }; + for (out, mutability, out_span) in get_lifetimes(ty) { + if mutability != Some(Mutability::Mut) { + continue; + } let out_region = cx.tcx.named_bound_var(out.hir_id); - let args: Option> = sig + // `None` if one of the types contains `&'a mut T` or `T<'a>`. + // Else, contains all the locations of `&'a T` types. + let args_immut_refs: Option> = sig .decl .inputs .iter() - .filter_map(get_ref_lm) + .flat_map(get_lifetimes) .filter(|&(lt, _, _)| cx.tcx.named_bound_var(lt.hir_id) == out_region) - .map(|(_, mutability, span)| (mutability == Mutability::Not).then_some(span)) + .map(|(_, mutability, span)| (mutability == Some(Mutability::Not)).then_some(span)) .collect(); - if let Some(args) = args - && !args.is_empty() + if let Some(args_immut_refs) = args_immut_refs + && !args_immut_refs.is_empty() && body.is_none_or(|body| sig.header.is_unsafe() || contains_unsafe_block(cx, body.value)) { span_lint_and_then( cx, MUT_FROM_REF, - ty.span, + out_span, "mutable borrow from immutable input(s)", |diag| { - let ms = MultiSpan::from_spans(args); + let ms = MultiSpan::from_spans(args_immut_refs); diag.span_note(ms, "immutable borrow here"); }, ); @@ -686,14 +690,38 @@ fn matches_preds<'tcx>( }) } -fn get_ref_lm<'tcx>(ty: &'tcx hir::Ty<'tcx>) -> Option<(&'tcx Lifetime, Mutability, Span)> { - if let TyKind::Ref(lt, ref m) = ty.kind { - Some((lt, m.mutbl, ty.span)) - } else { - None +struct LifetimeVisitor<'tcx> { + result: Vec<(&'tcx Lifetime, Option, Span)>, +} + +impl<'tcx> Visitor<'tcx> for LifetimeVisitor<'tcx> { + fn visit_ty(&mut self, ty: &'tcx hir::Ty<'tcx, hir::AmbigArg>) { + if let TyKind::Ref(lt, ref m) = ty.kind { + self.result.push((lt, Some(m.mutbl), ty.span)); + } + hir::intravisit::walk_ty(self, ty); + } + + fn visit_generic_arg(&mut self, generic_arg: &'tcx GenericArg<'tcx>) { + if let GenericArg::Lifetime(lt) = generic_arg { + self.result.push((lt, None, generic_arg.span())); + } + hir::intravisit::walk_generic_arg(self, generic_arg); } } +/// Visit `ty` and collect the all the lifetimes appearing in it, implicit or not. +/// +/// The second field of the vector's elements indicate if the lifetime is attached to a +/// shared reference, a mutable reference, or neither. +fn get_lifetimes<'tcx>(ty: &'tcx hir::Ty<'tcx>) -> Vec<(&'tcx Lifetime, Option, Span)> { + use hir::intravisit::VisitorExt as _; + + let mut visitor = LifetimeVisitor { result: Vec::new() }; + visitor.visit_ty_unambig(ty); + visitor.result +} + fn is_null_path(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { if let ExprKind::Call(pathexp, []) = expr.kind { path_def_id(cx, pathexp) @@ -715,9 +743,9 @@ fn check_ptr_eq<'tcx>( } // Remove one level of usize conversion if any - let (left, right) = match (expr_as_cast_to_usize(cx, left), expr_as_cast_to_usize(cx, right)) { - (Some(lhs), Some(rhs)) => (lhs, rhs), - _ => (left, right), + let (left, right, usize_peeled) = match (expr_as_cast_to_usize(cx, left), expr_as_cast_to_usize(cx, right)) { + (Some(lhs), Some(rhs)) => (lhs, rhs, true), + _ => (left, right, false), }; // This lint concerns raw pointers @@ -726,10 +754,16 @@ fn check_ptr_eq<'tcx>( return; } - let (left_var, right_var) = (peel_raw_casts(cx, left, left_ty), peel_raw_casts(cx, right, right_ty)); + let ((left_var, left_casts_peeled), (right_var, right_casts_peeled)) = + (peel_raw_casts(cx, left, left_ty), peel_raw_casts(cx, right, right_ty)); + + if !(usize_peeled || left_casts_peeled || right_casts_peeled) { + return; + } - if let Some(left_snip) = left_var.span.get_source_text(cx) - && let Some(right_snip) = right_var.span.get_source_text(cx) + let mut app = Applicability::MachineApplicable; + let left_snip = Sugg::hir_with_context(cx, left_var, expr.span.ctxt(), "_", &mut app); + let right_snip = Sugg::hir_with_context(cx, right_var, expr.span.ctxt(), "_", &mut app); { let Some(top_crate) = std_or_core(cx) else { return }; let invert = if op == BinOpKind::Eq { "" } else { "!" }; @@ -740,7 +774,7 @@ fn check_ptr_eq<'tcx>( format!("use `{top_crate}::ptr::eq` when comparing raw pointers"), "try", format!("{invert}{top_crate}::ptr::eq({left_snip}, {right_snip})"), - Applicability::MachineApplicable, + app, ); } } @@ -748,7 +782,8 @@ fn check_ptr_eq<'tcx>( // If the given expression is a cast to a usize, return the lhs of the cast // E.g., `foo as *const _ as usize` returns `foo as *const _`. fn expr_as_cast_to_usize<'tcx>(cx: &LateContext<'tcx>, cast_expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> { - if cx.typeck_results().expr_ty(cast_expr) == cx.tcx.types.usize + if !cast_expr.span.from_expansion() + && cx.typeck_results().expr_ty(cast_expr) == cx.tcx.types.usize && let ExprKind::Cast(expr, _) = cast_expr.kind { Some(expr) @@ -757,16 +792,18 @@ fn expr_as_cast_to_usize<'tcx>(cx: &LateContext<'tcx>, cast_expr: &'tcx Expr<'_> } } -// Peel raw casts if the remaining expression can be coerced to it -fn peel_raw_casts<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, expr_ty: Ty<'tcx>) -> &'tcx Expr<'tcx> { - if let ExprKind::Cast(inner, _) = expr.kind +// Peel raw casts if the remaining expression can be coerced to it, and whether casts have been +// peeled or not. +fn peel_raw_casts<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, expr_ty: Ty<'tcx>) -> (&'tcx Expr<'tcx>, bool) { + if !expr.span.from_expansion() + && let ExprKind::Cast(inner, _) = expr.kind && let ty::RawPtr(target_ty, _) = expr_ty.kind() && let inner_ty = cx.typeck_results().expr_ty(inner) && let ty::RawPtr(inner_target_ty, _) | ty::Ref(_, inner_target_ty, _) = inner_ty.kind() && target_ty == inner_target_ty { - peel_raw_casts(cx, inner, inner_ty) + (peel_raw_casts(cx, inner, inner_ty).0, true) } else { - expr + (expr, false) } } diff --git a/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs b/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs index 68ae575c9063f..d8d813f9846d5 100644 --- a/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs +++ b/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs @@ -1,10 +1,10 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg}; use clippy_utils::source::SpanRangeExt; +use clippy_utils::sym; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; -use rustc_span::sym; use std::fmt; declare_clippy_lint! { @@ -77,10 +77,10 @@ impl<'tcx> LateLintPass<'tcx> for PtrOffsetWithCast { // If the given expression is a cast from a usize, return the lhs of the cast fn expr_as_cast_from_usize<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> { - if let ExprKind::Cast(cast_lhs_expr, _) = expr.kind { - if is_expr_ty_usize(cx, cast_lhs_expr) { - return Some(cast_lhs_expr); - } + if let ExprKind::Cast(cast_lhs_expr, _) = expr.kind + && is_expr_ty_usize(cx, cast_lhs_expr) + { + return Some(cast_lhs_expr); } None } @@ -91,14 +91,14 @@ fn expr_as_ptr_offset_call<'tcx>( cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, ) -> Option<(&'tcx Expr<'tcx>, &'tcx Expr<'tcx>, Method)> { - if let ExprKind::MethodCall(path_segment, arg_0, [arg_1], _) = &expr.kind { - if is_expr_ty_raw_ptr(cx, arg_0) { - if path_segment.ident.name == sym::offset { - return Some((arg_0, arg_1, Method::Offset)); - } - if path_segment.ident.name.as_str() == "wrapping_offset" { - return Some((arg_0, arg_1, Method::WrappingOffset)); - } + if let ExprKind::MethodCall(path_segment, arg_0, [arg_1], _) = &expr.kind + && is_expr_ty_raw_ptr(cx, arg_0) + { + if path_segment.ident.name == sym::offset { + return Some((arg_0, arg_1, Method::Offset)); + } + if path_segment.ident.name == sym::wrapping_offset { + return Some((arg_0, arg_1, Method::WrappingOffset)); } } None diff --git a/src/tools/clippy/clippy_lints/src/question_mark.rs b/src/tools/clippy/clippy_lints/src/question_mark.rs index a80e1f79bbc77..c02e5e0621c9f 100644 --- a/src/tools/clippy/clippy_lints/src/question_mark.rs +++ b/src/tools/clippy/clippy_lints/src/question_mark.rs @@ -5,11 +5,12 @@ use clippy_config::types::MatchLintBehaviour; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::snippet_with_applicability; +use clippy_utils::sugg::Sugg; use clippy_utils::ty::{implements_trait, is_type_diagnostic_item}; use clippy_utils::{ eq_expr_value, higher, is_else_clause, is_in_const_context, is_lint_allowed, is_path_lang_item, is_res_lang_ctor, pat_and_expr_can_be_question_mark, path_res, path_to_local, path_to_local_id, peel_blocks, peel_blocks_with_stmt, - span_contains_cfg, span_contains_comment, + span_contains_cfg, span_contains_comment, sym, }; use rustc_errors::Applicability; use rustc_hir::LangItem::{self, OptionNone, OptionSome, ResultErr, ResultOk}; @@ -21,15 +22,14 @@ use rustc_hir::{ use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, Ty}; use rustc_session::impl_lint_pass; -use rustc_span::sym; use rustc_span::symbol::Symbol; declare_clippy_lint! { /// ### What it does - /// Checks for expressions that could be replaced by the question mark operator. + /// Checks for expressions that could be replaced by the `?` operator. /// /// ### Why is this bad? - /// Question mark usage is more idiomatic. + /// Using the `?` operator is shorter and more idiomatic. /// /// ### Example /// ```ignore @@ -46,7 +46,7 @@ declare_clippy_lint! { #[clippy::version = "pre 1.29.0"] pub QUESTION_MARK, style, - "checks for expressions that could be replaced by the question mark operator" + "checks for expressions that could be replaced by the `?` operator" } pub struct QuestionMark { @@ -144,7 +144,7 @@ fn check_let_some_else_return_none(cx: &LateContext<'_>, stmt: &Stmt<'_>) { && !span_contains_comment(cx.tcx.sess.source_map(), els.span) { let mut applicability = Applicability::MaybeIncorrect; - let init_expr_str = snippet_with_applicability(cx, init_expr.span, "..", &mut applicability); + let init_expr_str = Sugg::hir_with_applicability(cx, init_expr, "..", &mut applicability).maybe_paren(); // Take care when binding is `ref` let sugg = if let PatKind::Binding( BindingMode(ByRef::Yes(ref_mutability), binding_mutability), @@ -206,8 +206,8 @@ fn is_early_return(smbl: Symbol, cx: &LateContext<'_>, if_block: &IfBlockType<'_ is_type_diagnostic_item(cx, caller_ty, smbl) && expr_return_none_or_err(smbl, cx, if_then, caller, None) && match smbl { - sym::Option => call_sym.as_str() == "is_none", - sym::Result => call_sym.as_str() == "is_err", + sym::Option => call_sym == sym::is_none, + sym::Result => call_sym == sym::is_err, _ => false, } }, @@ -279,7 +279,7 @@ fn expr_return_none_or_err( /// } /// ``` /// -/// If it matches, it will suggest to use the question mark operator instead +/// If it matches, it will suggest to use the `?` operator instead fn check_is_none_or_err_and_early_return<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) { if let Some(higher::If { cond, then, r#else }) = higher::If::hir(expr) && !is_else_clause(cx.tcx, expr) diff --git a/src/tools/clippy/clippy_lints/src/question_mark_used.rs b/src/tools/clippy/clippy_lints/src/question_mark_used.rs index 0a974bf9d2f71..96ea485d76936 100644 --- a/src/tools/clippy/clippy_lints/src/question_mark_used.rs +++ b/src/tools/clippy/clippy_lints/src/question_mark_used.rs @@ -7,10 +7,10 @@ use rustc_session::declare_lint_pass; declare_clippy_lint! { /// ### What it does - /// Checks for expressions that use the question mark operator and rejects them. + /// Checks for expressions that use the `?` operator and rejects them. /// /// ### Why restrict this? - /// Sometimes code wants to avoid the question mark operator because for instance a local + /// Sometimes code wants to avoid the `?` operator because for instance a local /// block requires a macro to re-throw errors to attach additional information to the /// error. /// @@ -27,7 +27,7 @@ declare_clippy_lint! { #[clippy::version = "1.69.0"] pub QUESTION_MARK_USED, restriction, - "complains if the question mark operator is used" + "checks if the `?` operator is used" } declare_lint_pass!(QuestionMarkUsed => [QUESTION_MARK_USED]); @@ -40,15 +40,9 @@ impl<'tcx> LateLintPass<'tcx> for QuestionMarkUsed { } #[expect(clippy::collapsible_span_lint_calls, reason = "rust-clippy#7797")] - span_lint_and_then( - cx, - QUESTION_MARK_USED, - expr.span, - "question mark operator was used", - |diag| { - diag.help("consider using a custom macro or match expression"); - }, - ); + span_lint_and_then(cx, QUESTION_MARK_USED, expr.span, "the `?` operator was used", |diag| { + diag.help("consider using a custom macro or match expression"); + }); } } } diff --git a/src/tools/clippy/clippy_lints/src/ranges.rs b/src/tools/clippy/clippy_lints/src/ranges.rs index cc423eca74fbe..d292ed86ea4c6 100644 --- a/src/tools/clippy/clippy_lints/src/ranges.rs +++ b/src/tools/clippy/clippy_lints/src/ranges.rs @@ -179,10 +179,10 @@ impl_lint_pass!(Ranges => [ impl<'tcx> LateLintPass<'tcx> for Ranges { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - if let ExprKind::Binary(ref op, l, r) = expr.kind { - if self.msrv.meets(cx, msrvs::RANGE_CONTAINS) { - check_possible_range_contains(cx, op.node, l, r, expr, expr.span); - } + if let ExprKind::Binary(ref op, l, r) = expr.kind + && self.msrv.meets(cx, msrvs::RANGE_CONTAINS) + { + check_possible_range_contains(cx, op.node, l, r, expr, expr.span); } check_exclusive_range_plus_one(cx, expr); @@ -327,18 +327,18 @@ fn check_range_bounds<'a, 'tcx>(cx: &'a LateContext<'tcx>, ex: &'a Expr<'_>) -> inc: inclusive, }); } - } else if let Some(id) = path_to_local(r) { - if let Some(c) = ConstEvalCtxt::new(cx).eval(l) { - return Some(RangeBounds { - val: c, - expr: l, - id, - name_span: r.span, - val_span: l.span, - ord: ordering.reverse(), - inc: inclusive, - }); - } + } else if let Some(id) = path_to_local(r) + && let Some(c) = ConstEvalCtxt::new(cx).eval(l) + { + return Some(RangeBounds { + val: c, + expr: l, + id, + name_span: r.span, + val_span: l.span, + ord: ordering.reverse(), + inc: inclusive, + }); } } None @@ -361,8 +361,8 @@ fn check_exclusive_range_plus_one(cx: &LateContext<'_>, expr: &Expr<'_>) { span, "an inclusive range would be more readable", |diag| { - let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_par().to_string()); - let end = Sugg::hir(cx, y, "y").maybe_par(); + let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_paren().to_string()); + let end = Sugg::hir(cx, y, "y").maybe_paren(); match span.with_source_text(cx, |src| src.starts_with('(') && src.ends_with(')')) { Some(true) => { diag.span_suggestion(span, "use", format!("({start}..={end})"), Applicability::MaybeIncorrect); @@ -398,8 +398,8 @@ fn check_inclusive_range_minus_one(cx: &LateContext<'_>, expr: &Expr<'_>) { expr.span, "an exclusive range would be more readable", |diag| { - let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_par().to_string()); - let end = Sugg::hir(cx, y, "y").maybe_par(); + let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_paren().to_string()); + let end = Sugg::hir(cx, y, "y").maybe_paren(); diag.span_suggestion( expr.span, "use", diff --git a/src/tools/clippy/clippy_lints/src/raw_strings.rs b/src/tools/clippy/clippy_lints/src/raw_strings.rs index c6e6e782f9d45..6a79cae32a596 100644 --- a/src/tools/clippy/clippy_lints/src/raw_strings.rs +++ b/src/tools/clippy/clippy_lints/src/raw_strings.rs @@ -138,7 +138,7 @@ impl RawStrings { ); }, ); - if !matches!(cx.get_lint_level(NEEDLESS_RAW_STRINGS), rustc_lint::Allow) { + if !matches!(cx.get_lint_level(NEEDLESS_RAW_STRINGS).level, rustc_lint::Allow) { return; } } diff --git a/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs b/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs index 6bb7650a7e1cf..689a2ac4c6aeb 100644 --- a/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs +++ b/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs @@ -1,14 +1,14 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::higher::VecArgs; -use clippy_utils::last_path_segment; use clippy_utils::macros::root_macro_call_first_node; use clippy_utils::source::{indent_of, snippet}; +use clippy_utils::{last_path_segment, sym}; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, QPath, TyKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty; use rustc_session::declare_lint_pass; -use rustc_span::{Span, Symbol, sym}; +use rustc_span::{Span, Symbol}; declare_clippy_lint! { /// ### What it does @@ -135,7 +135,7 @@ fn ref_init(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<(Symbol, Span)> { if let ty::Adt(adt, _) = *cx.typeck_results().expr_ty(expr).kind() && matches!(cx.tcx.get_diagnostic_name(adt.did()), Some(sym::RcWeak | sym::ArcWeak)) { - return Some((Symbol::intern("Weak"), func.span)); + return Some((sym::Weak, func.span)); } } diff --git a/src/tools/clippy/clippy_lints/src/redundant_async_block.rs b/src/tools/clippy/clippy_lints/src/redundant_async_block.rs index 8289ec47bc7e1..d2442ad0f373a 100644 --- a/src/tools/clippy/clippy_lints/src/redundant_async_block.rs +++ b/src/tools/clippy/clippy_lints/src/redundant_async_block.rs @@ -1,14 +1,9 @@ -use std::ops::ControlFlow; - use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::peel_blocks; use clippy_utils::source::{snippet, walk_span_to_context}; use clippy_utils::ty::implements_trait; -use clippy_utils::visitors::for_each_expr_without_closures; +use clippy_utils::{desugar_await, peel_blocks}; use rustc_errors::Applicability; -use rustc_hir::{ - Closure, ClosureKind, CoroutineDesugaring, CoroutineKind, CoroutineSource, Expr, ExprKind, MatchSource, -}; +use rustc_hir::{Closure, ClosureKind, CoroutineDesugaring, CoroutineKind, CoroutineSource, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::UpvarCapture; use rustc_session::declare_lint_pass; @@ -99,20 +94,3 @@ fn desugar_async_block<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Op None } } - -/// If `expr` is a desugared `.await`, return the original expression if it does not come from a -/// macro expansion. -fn desugar_await<'tcx>(expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> { - if let ExprKind::Match(match_value, _, MatchSource::AwaitDesugar) = expr.kind - && let ExprKind::Call(_, [into_future_arg]) = match_value.kind - && let ctxt = expr.span.ctxt() - && for_each_expr_without_closures(into_future_arg, |e| { - walk_span_to_context(e.span, ctxt).map_or(ControlFlow::Break(()), |_| ControlFlow::Continue(())) - }) - .is_none() - { - Some(into_future_arg) - } else { - None - } -} diff --git a/src/tools/clippy/clippy_lints/src/redundant_clone.rs b/src/tools/clippy/clippy_lints/src/redundant_clone.rs index cfa622aea582f..e57b8cc2d84e3 100644 --- a/src/tools/clippy/clippy_lints/src/redundant_clone.rs +++ b/src/tools/clippy/clippy_lints/src/redundant_clone.rs @@ -109,10 +109,10 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClone { continue; } - if let ty::Adt(def, _) = arg_ty.kind() { - if def.is_manually_drop() { - continue; - } + if let ty::Adt(def, _) = arg_ty.kind() + && def.is_manually_drop() + { + continue; } // `{ arg = &cloned; clone(move arg); }` or `{ arg = &cloned; to_path_buf(arg); }` @@ -182,20 +182,25 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClone { let clone_usage = if local == ret_local { CloneUsage { - cloned_used: false, + cloned_use_loc: None.into(), cloned_consume_or_mutate_loc: None, clone_consumed_or_mutated: true, } } else { let clone_usage = visit_clone_usage(local, ret_local, mir, bb); - if clone_usage.cloned_used && clone_usage.clone_consumed_or_mutated { + if clone_usage.cloned_use_loc.maybe_used() && clone_usage.clone_consumed_or_mutated { // cloned value is used, and the clone is modified or moved continue; - } else if let Some(loc) = clone_usage.cloned_consume_or_mutate_loc { + } else if let MirLocalUsage::Used(loc) = clone_usage.cloned_use_loc + && possible_borrower.local_is_alive_at(ret_local, loc) + { + // cloned value is used, and the clone is alive. + continue; + } else if let Some(loc) = clone_usage.cloned_consume_or_mutate_loc // cloned value is mutated, and the clone is alive. - if possible_borrower.local_is_alive_at(ret_local, loc) { - continue; - } + && possible_borrower.local_is_alive_at(ret_local, loc) + { + continue; } clone_usage }; @@ -216,19 +221,18 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClone { let call_snip = &snip[dot + 1..]; // Machine applicable when `call_snip` looks like `foobar()` - if let Some(call_snip) = call_snip.strip_suffix("()").map(str::trim) { - if call_snip + if let Some(call_snip) = call_snip.strip_suffix("()").map(str::trim) + && call_snip .as_bytes() .iter() .all(|b| b.is_ascii_alphabetic() || *b == b'_') - { - app = Applicability::MachineApplicable; - } + { + app = Applicability::MachineApplicable; } span_lint_hir_and_then(cx, REDUNDANT_CLONE, node, sugg_span, "redundant clone", |diag| { diag.span_suggestion(sugg_span, "remove this", "", app); - if clone_usage.cloned_used { + if clone_usage.cloned_use_loc.maybe_used() { diag.span_note(span, "cloned value is neither consumed nor mutated"); } else { diag.span_note( @@ -329,10 +333,33 @@ fn base_local_and_movability<'tcx>( (place.local, deref || field || slice) } -#[derive(Default)] +#[derive(Debug, Default)] +enum MirLocalUsage { + /// The local maybe used, but we are not sure how. + Unknown, + /// The local is not used. + #[default] + Unused, + /// The local is used at a specific location. + Used(mir::Location), +} + +impl MirLocalUsage { + fn maybe_used(&self) -> bool { + matches!(self, MirLocalUsage::Unknown | MirLocalUsage::Used(_)) + } +} + +impl From> for MirLocalUsage { + fn from(loc: Option) -> Self { + loc.map_or(MirLocalUsage::Unused, MirLocalUsage::Used) + } +} + +#[derive(Debug, Default)] struct CloneUsage { - /// Whether the cloned value is used after the clone. - cloned_used: bool, + /// The first location where the cloned value is used, if any. + cloned_use_loc: MirLocalUsage, /// The first location where the cloned value is consumed or mutated, if any. cloned_consume_or_mutate_loc: Option, /// Whether the clone value is mutated. @@ -360,7 +387,7 @@ fn visit_clone_usage(cloned: mir::Local, clone: mir::Local, mir: &mir::Body<'_>, .map(|mut vec| (vec.remove(0), vec.remove(0))) { CloneUsage { - cloned_used: !cloned_use_locs.is_empty(), + cloned_use_loc: cloned_use_locs.first().copied().into(), cloned_consume_or_mutate_loc: cloned_consume_or_mutate_locs.first().copied(), // Consider non-temporary clones consumed. // TODO: Actually check for mutation of non-temporaries. @@ -369,7 +396,7 @@ fn visit_clone_usage(cloned: mir::Local, clone: mir::Local, mir: &mir::Body<'_>, } } else { CloneUsage { - cloned_used: true, + cloned_use_loc: MirLocalUsage::Unknown, cloned_consume_or_mutate_loc: None, clone_consumed_or_mutated: true, } diff --git a/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs b/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs index 1498a49a7a4a9..84597269a58fa 100644 --- a/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs +++ b/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs @@ -206,7 +206,7 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClosureCall { // avoid clippy::double_parens if !is_in_fn_call_arg { - hint = hint.maybe_par(); + hint = hint.maybe_paren(); } diag.span_suggestion(full_expr.span, "try doing something like", hint, applicability); diff --git a/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs b/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs index f2fdac5a8afaf..7b381fac5f118 100644 --- a/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs +++ b/src/tools/clippy/clippy_lints/src/redundant_pub_crate.rs @@ -52,13 +52,10 @@ impl<'tcx> LateLintPass<'tcx> for RedundantPubCrate { && is_not_macro_export(item) && !item.span.in_external_macro(cx.sess().source_map()) { - // FIXME: `DUMMY_SP` isn't right here, because it causes the - // resulting span to begin at the start of the file. - let span = item.span.with_hi( - item.kind - .ident() - .map_or(rustc_span::DUMMY_SP.hi(), |ident| ident.span.hi()), - ); + let span = item + .kind + .ident() + .map_or(item.span, |ident| item.span.with_hi(ident.span.hi())); let descr = cx.tcx.def_kind(item.owner_id).descr(item.owner_id.to_def_id()); span_lint_and_then( cx, diff --git a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs index 7038b19d27596..1117dea703c2a 100644 --- a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs +++ b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs @@ -135,25 +135,24 @@ impl<'tcx> LateLintPass<'tcx> for RedundantSlicing { }; diag.span_suggestion(expr.span, help_msg, sugg, app); }); - } else if let Some(target_id) = cx.tcx.lang_items().deref_target() { - if let Ok(deref_ty) = cx.tcx.try_normalize_erasing_regions( + } else if let Some(target_id) = cx.tcx.lang_items().deref_target() + && let Ok(deref_ty) = cx.tcx.try_normalize_erasing_regions( cx.typing_env(), Ty::new_projection_from_args(cx.tcx, target_id, cx.tcx.mk_args(&[GenericArg::from(indexed_ty)])), - ) { - if deref_ty == expr_ty { - let (lint, msg) = DEREF_BY_SLICING_LINT; - span_lint_and_then(cx, lint, expr.span, msg, |diag| { - let mut app = Applicability::MachineApplicable; - let snip = snippet_with_context(cx, indexed.span, ctxt, "..", &mut app).0; - let sugg = if needs_parens_for_prefix { - format!("(&{}{}*{snip})", mutability.prefix_str(), "*".repeat(indexed_ref_count)) - } else { - format!("&{}{}*{snip}", mutability.prefix_str(), "*".repeat(indexed_ref_count)) - }; - diag.span_suggestion(expr.span, "dereference the original value instead", sugg, app); - }); - } - } + ) + && deref_ty == expr_ty + { + let (lint, msg) = DEREF_BY_SLICING_LINT; + span_lint_and_then(cx, lint, expr.span, msg, |diag| { + let mut app = Applicability::MachineApplicable; + let snip = snippet_with_context(cx, indexed.span, ctxt, "..", &mut app).0; + let sugg = if needs_parens_for_prefix { + format!("(&{}{}*{snip})", mutability.prefix_str(), "*".repeat(indexed_ref_count)) + } else { + format!("&{}{}*{snip}", mutability.prefix_str(), "*".repeat(indexed_ref_count)) + }; + diag.span_suggestion(expr.span, "dereference the original value instead", sugg, app); + }); } } } diff --git a/src/tools/clippy/clippy_lints/src/redundant_test_prefix.rs b/src/tools/clippy/clippy_lints/src/redundant_test_prefix.rs new file mode 100644 index 0000000000000..84276e3216573 --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/redundant_test_prefix.rs @@ -0,0 +1,161 @@ +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::is_test_function; +use clippy_utils::visitors::for_each_expr; +use rustc_errors::Applicability; +use rustc_hir::intravisit::FnKind; +use rustc_hir::{self as hir, Body, ExprKind, FnDecl}; +use rustc_lexer::is_ident; +use rustc_lint::{LateContext, LateLintPass}; +use rustc_session::declare_lint_pass; +use rustc_span::def_id::LocalDefId; +use rustc_span::{Span, Symbol, edition}; +use std::borrow::Cow; +use std::ops::ControlFlow; + +declare_clippy_lint! { + /// ### What it does + /// Checks for test functions (functions annotated with `#[test]`) that are prefixed + /// with `test_` which is redundant. + /// + /// ### Why is this bad? + /// This is redundant because test functions are already annotated with `#[test]`. + /// Moreover, it clutters the output of `cargo test` since test functions are expanded as + /// `module::tests::test_use_case` in the output. Without the redundant prefix, the output + /// becomes `module::tests::use_case`, which is more readable. + /// + /// ### Example + /// ```no_run + /// #[cfg(test)] + /// mod tests { + /// use super::*; + /// + /// #[test] + /// fn test_use_case() { + /// // test code + /// } + /// } + /// ``` + /// Use instead: + /// ```no_run + /// #[cfg(test)] + /// mod tests { + /// use super::*; + /// + /// #[test] + /// fn use_case() { + /// // test code + /// } + /// } + /// ``` + #[clippy::version = "1.88.0"] + pub REDUNDANT_TEST_PREFIX, + restriction, + "redundant `test_` prefix in test function name" +} + +declare_lint_pass!(RedundantTestPrefix => [REDUNDANT_TEST_PREFIX]); + +impl<'tcx> LateLintPass<'tcx> for RedundantTestPrefix { + fn check_fn( + &mut self, + cx: &LateContext<'tcx>, + kind: FnKind<'_>, + _decl: &FnDecl<'_>, + body: &'tcx Body<'_>, + _span: Span, + fn_def_id: LocalDefId, + ) { + // Ignore methods and closures. + let FnKind::ItemFn(ref ident, ..) = kind else { + return; + }; + + // Skip the lint if the function is within a macro expansion. + if ident.span.from_expansion() { + return; + } + + // Skip if the function name does not start with `test_`. + if !ident.as_str().starts_with("test_") { + return; + } + + // If the function is not a test function, skip the lint. + if !is_test_function(cx.tcx, fn_def_id) { + return; + } + + span_lint_and_then( + cx, + REDUNDANT_TEST_PREFIX, + ident.span, + "redundant `test_` prefix in test function name", + |diag| { + let non_prefixed = Symbol::intern(ident.as_str().trim_start_matches("test_")); + if is_invalid_ident(non_prefixed) { + // If the prefix-trimmed name is not a valid function name, do not provide an + // automatic fix, just suggest renaming the function. + diag.help( + "consider function renaming (just removing `test_` prefix will produce invalid function name)", + ); + } else { + let (sugg, msg): (Cow<'_, str>, _) = if name_conflicts(cx, body, non_prefixed) { + // If `non_prefixed` conflicts with another function in the same module/scope, + // do not provide an automatic fix, but still emit a fix suggestion. + ( + format!("{non_prefixed}_works").into(), + "consider function renaming (just removing `test_` prefix will cause a name conflict)", + ) + } else { + // If `non_prefixed` is a valid identifier and does not conflict with another function, + // so we can suggest an auto-fix. + (non_prefixed.as_str().into(), "consider removing the `test_` prefix") + }; + diag.span_suggestion(ident.span, msg, sugg, Applicability::MaybeIncorrect); + } + }, + ); + } +} + +/// Checks whether removal of the `_test` prefix from the function name will cause a name conflict. +/// +/// There should be no other function with the same name in the same module/scope. Also, there +/// should not be any function call with the same name within the body of the function, to avoid +/// recursion. +fn name_conflicts<'tcx>(cx: &LateContext<'tcx>, body: &'tcx Body<'_>, fn_name: Symbol) -> bool { + let tcx = cx.tcx; + let id = body.id().hir_id; + + // Iterate over items in the same module/scope + let (module, _module_span, _module_hir) = tcx.hir_get_module(tcx.parent_module(id)); + if module + .item_ids + .iter() + .any(|item| matches!(tcx.hir_item(*item).kind, hir::ItemKind::Fn { ident, .. } if ident.name == fn_name)) + { + // Name conflict found + return true; + } + + // Also check that within the body of the function there is also no function call + // with the same name (since it will result in recursion) + for_each_expr(cx, body, |expr| { + if let ExprKind::Path(qpath) = &expr.kind + && let Some(def_id) = cx.qpath_res(qpath, expr.hir_id).opt_def_id() + && let Some(name) = tcx.opt_item_name(def_id) + && name == fn_name + { + // Function call with the same name found + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) + } + }) + .is_some() +} + +fn is_invalid_ident(ident: Symbol) -> bool { + // The identifier is either a reserved keyword, or starts with an invalid sequence. + ident.is_reserved(|| edition::LATEST_STABLE_EDITION) || !is_ident(ident.as_str()) +} diff --git a/src/tools/clippy/clippy_lints/src/regex.rs b/src/tools/clippy/clippy_lints/src/regex.rs index 9443dca154e33..834ff2af0e883 100644 --- a/src/tools/clippy/clippy_lints/src/regex.rs +++ b/src/tools/clippy/clippy_lints/src/regex.rs @@ -3,7 +3,7 @@ use std::fmt::Display; use clippy_utils::consts::{ConstEvalCtxt, Constant}; use clippy_utils::diagnostics::{span_lint, span_lint_and_help}; use clippy_utils::source::SpanRangeExt; -use clippy_utils::{def_path_res_with_base, find_crates, path_def_id, paths}; +use clippy_utils::{def_path_res_with_base, find_crates, path_def_id, paths, sym}; use rustc_ast::ast::{LitKind, StrStyle}; use rustc_hir::def_id::DefIdMap; use rustc_hir::{BorrowKind, Expr, ExprKind, OwnerId}; @@ -76,7 +76,7 @@ declare_clippy_lint! { /// This is documented as an antipattern [on the regex documentation](https://docs.rs/regex/latest/regex/#avoid-re-compiling-regexes-especially-in-a-loop) /// /// ### Example - /// ```no_run + /// ```rust,ignore /// # let haystacks = [""]; /// # const MY_REGEX: &str = "a.b"; /// for haystack in haystacks { @@ -87,7 +87,7 @@ declare_clippy_lint! { /// } /// ``` /// can be replaced with - /// ```no_run + /// ```rust,ignore /// # let haystacks = [""]; /// # const MY_REGEX: &str = "a.b"; /// let regex = regex::Regex::new(MY_REGEX).unwrap(); @@ -126,7 +126,7 @@ impl<'tcx> LateLintPass<'tcx> for Regex { // // `def_path_res_with_base` will resolve through re-exports but is relatively heavy, so we only // perform the operation once and store the results - let regex_crates = find_crates(cx.tcx, sym!(regex)); + let regex_crates = find_crates(cx.tcx, sym::regex); let mut resolve = |path: &[&str], kind: RegexKind| { for res in def_path_res_with_base(cx.tcx, regex_crates.clone(), &path[1..]) { if let Some(id) = res.opt_def_id() { diff --git a/src/tools/clippy/clippy_lints/src/returns.rs b/src/tools/clippy/clippy_lints/src/returns.rs index 4cb73df8b488f..d8e8ead291288 100644 --- a/src/tools/clippy/clippy_lints/src/returns.rs +++ b/src/tools/clippy/clippy_lints/src/returns.rs @@ -404,7 +404,7 @@ fn check_final_expr<'tcx>( match cx.tcx.hir_attrs(expr.hir_id) { [] => {}, [attr] => { - if matches!(Level::from_attr(attr), Some(Level::Expect(_))) + if matches!(Level::from_attr(attr), Some((Level::Expect, _))) && let metas = attr.meta_item_list() && let Some(lst) = metas && let [MetaItemInner::MetaItem(meta_item), ..] = lst.as_slice() diff --git a/src/tools/clippy/clippy_lints/src/same_name_method.rs b/src/tools/clippy/clippy_lints/src/same_name_method.rs index 552135b15fd8f..226e8ff6adbf5 100644 --- a/src/tools/clippy/clippy_lints/src/same_name_method.rs +++ b/src/tools/clippy/clippy_lints/src/same_name_method.rs @@ -3,7 +3,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{HirId, Impl, ItemKind, Node, Path, QPath, TraitRef, TyKind}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::AssocKind; +use rustc_middle::ty::AssocItem; use rustc_session::declare_lint_pass; use rustc_span::Span; use rustc_span::symbol::Symbol; @@ -85,8 +85,8 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod { cx.tcx .associated_items(did) .in_definition_order() - .filter(|assoc_item| matches!(assoc_item.kind, AssocKind::Fn)) - .map(|assoc_item| assoc_item.name) + .filter(|assoc_item| assoc_item.is_fn()) + .map(AssocItem::name) .collect() } else { BTreeSet::new() diff --git a/src/tools/clippy/clippy_lints/src/serde_api.rs b/src/tools/clippy/clippy_lints/src/serde_api.rs index 6a0dfde2d9c9c..a8c6518b592ba 100644 --- a/src/tools/clippy/clippy_lints/src/serde_api.rs +++ b/src/tools/clippy/clippy_lints/src/serde_api.rs @@ -32,28 +32,28 @@ impl<'tcx> LateLintPass<'tcx> for SerdeApi { }) = item.kind { let did = trait_ref.path.res.def_id(); - if let Some(visit_did) = get_trait_def_id(cx.tcx, &paths::SERDE_DE_VISITOR) { - if did == visit_did { - let mut seen_str = None; - let mut seen_string = None; - for item in *items { - match item.ident.as_str() { - "visit_str" => seen_str = Some(item.span), - "visit_string" => seen_string = Some(item.span), - _ => {}, - } - } - if let Some(span) = seen_string { - if seen_str.is_none() { - span_lint( - cx, - SERDE_API_MISUSE, - span, - "you should not implement `visit_string` without also implementing `visit_str`", - ); - } + if let Some(visit_did) = get_trait_def_id(cx.tcx, &paths::SERDE_DE_VISITOR) + && did == visit_did + { + let mut seen_str = None; + let mut seen_string = None; + for item in *items { + match item.ident.as_str() { + "visit_str" => seen_str = Some(item.span), + "visit_string" => seen_string = Some(item.span), + _ => {}, } } + if let Some(span) = seen_string + && seen_str.is_none() + { + span_lint( + cx, + SERDE_API_MISUSE, + span, + "you should not implement `visit_string` without also implementing `visit_str`", + ); + } } } } diff --git a/src/tools/clippy/clippy_lints/src/set_contains_or_insert.rs b/src/tools/clippy/clippy_lints/src/set_contains_or_insert.rs index 1185d67b1258b..ff6e6ef214b5b 100644 --- a/src/tools/clippy/clippy_lints/src/set_contains_or_insert.rs +++ b/src/tools/clippy/clippy_lints/src/set_contains_or_insert.rs @@ -3,12 +3,12 @@ use std::ops::ControlFlow; use clippy_utils::diagnostics::span_lint; use clippy_utils::ty::is_type_diagnostic_item; use clippy_utils::visitors::for_each_expr; -use clippy_utils::{SpanlessEq, higher, peel_hir_expr_while}; +use clippy_utils::{SpanlessEq, higher, peel_hir_expr_while, sym}; use rustc_hir::{Expr, ExprKind, UnOp}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; +use rustc_span::Span; use rustc_span::symbol::Symbol; -use rustc_span::{Span, sym}; declare_clippy_lint! { /// ### What it does @@ -58,7 +58,7 @@ impl<'tcx> LateLintPass<'tcx> for SetContainsOrInsert { then: then_expr, .. }) = higher::If::hir(expr) - && let Some((contains_expr, sym)) = try_parse_op_call(cx, cond_expr, sym!(contains))//try_parse_contains(cx, cond_expr) + && let Some((contains_expr, sym)) = try_parse_op_call(cx, cond_expr, sym::contains)//try_parse_contains(cx, cond_expr) && let Some(insert_expr) = find_insert_calls(cx, &contains_expr, then_expr) { span_lint( @@ -118,7 +118,7 @@ fn find_insert_calls<'tcx>( expr: &'tcx Expr<'_>, ) -> Option> { for_each_expr(cx, expr, |e| { - if let Some((insert_expr, _)) = try_parse_op_call(cx, e, sym!(insert)) + if let Some((insert_expr, _)) = try_parse_op_call(cx, e, sym::insert) && SpanlessEq::new(cx).eq_expr(contains_expr.receiver, insert_expr.receiver) && SpanlessEq::new(cx).eq_expr(contains_expr.value, insert_expr.value) { diff --git a/src/tools/clippy/clippy_lints/src/shadow.rs b/src/tools/clippy/clippy_lints/src/shadow.rs index ee282ee1dfb71..14399867f3181 100644 --- a/src/tools/clippy/clippy_lints/src/shadow.rs +++ b/src/tools/clippy/clippy_lints/src/shadow.rs @@ -8,7 +8,9 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def::Res; use rustc_hir::def_id::LocalDefId; use rustc_hir::hir_id::ItemLocalId; -use rustc_hir::{Block, Body, BodyOwnerKind, Expr, ExprKind, HirId, LetExpr, Node, Pat, PatKind, QPath, UnOp}; +use rustc_hir::{ + Block, Body, BodyOwnerKind, Expr, ExprKind, HirId, LetExpr, LocalSource, Node, Pat, PatKind, QPath, UnOp, +}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; use rustc_span::{Span, Symbol}; @@ -65,7 +67,7 @@ declare_clippy_lint! { #[clippy::version = "pre 1.29.0"] pub SHADOW_REUSE, restriction, - "rebinding a name to an expression that re-uses the original value, e.g., `let x = x + 1`" + "rebinding a name to an expression that reuses the original value, e.g., `let x = x + 1`" } declare_clippy_lint! { @@ -125,6 +127,17 @@ impl<'tcx> LateLintPass<'tcx> for Shadow { return; } + // Desugaring of a destructuring assignment may reuse the same identifier internally. + // Peel `Pat` and `PatField` nodes and check if we reach a desugared `Let` assignment. + if let Some((_, Node::LetStmt(let_stmt))) = cx + .tcx + .hir_parent_iter(pat.hir_id) + .find(|(_, node)| !matches!(node, Node::Pat(_) | Node::PatField(_))) + && let LocalSource::AssignDesugar(_) = let_stmt.source + { + return; + } + let HirId { owner, local_id } = id; // get (or insert) the list of items for this owner and symbol let (ref mut data, scope_owner) = *self.bindings.last_mut().unwrap(); @@ -167,10 +180,10 @@ impl<'tcx> LateLintPass<'tcx> for Shadow { fn is_shadow(cx: &LateContext<'_>, owner: LocalDefId, first: ItemLocalId, second: ItemLocalId) -> bool { let scope_tree = cx.tcx.region_scope_tree(owner.to_def_id()); - if let Some(first_scope) = scope_tree.var_scope(first) { - if let Some(second_scope) = scope_tree.var_scope(second) { - return scope_tree.is_subscope_of(second_scope, first_scope); - } + if let Some(first_scope) = scope_tree.var_scope(first) + && let Some(second_scope) = scope_tree.var_scope(second) + { + return scope_tree.is_subscope_of(second_scope, first_scope); } false @@ -218,7 +231,7 @@ fn lint_shadow(cx: &LateContext<'_>, pat: &Pat<'_>, shadowed: HirId, span: Span) }, }; span_lint_and_then(cx, lint, span, msg, |diag| { - diag.span_note(cx.tcx.hir().span(shadowed), "previous binding is here"); + diag.span_note(cx.tcx.hir_span(shadowed), "previous binding is here"); }); } diff --git a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs index e9db7c9d031a9..ccb1209c6fcbe 100644 --- a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs +++ b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs @@ -79,10 +79,11 @@ impl<'tcx> LateLintPass<'tcx> for SignificantDropTightening<'tcx> { if apa.counter <= 1 || !apa.has_expensive_expr_after_last_attr { continue; } + let first_bind_ident = apa.first_bind_ident.unwrap(); span_lint_and_then( cx, SIGNIFICANT_DROP_TIGHTENING, - apa.first_bind_ident.span, + first_bind_ident.span, "temporary with significant `Drop` can be early dropped", |diag| { match apa.counter { @@ -91,13 +92,13 @@ impl<'tcx> LateLintPass<'tcx> for SignificantDropTightening<'tcx> { let indent = " ".repeat(indent_of(cx, apa.last_stmt_span).unwrap_or(0)); let init_method = snippet(cx, apa.first_method_span, ".."); let usage_method = snippet(cx, apa.last_method_span, ".."); - let stmt = if apa.last_bind_ident == Ident::empty() { - format!("\n{indent}{init_method}.{usage_method};") - } else { + let stmt = if let Some(last_bind_ident) = apa.last_bind_ident { format!( "\n{indent}let {} = {init_method}.{usage_method};", - snippet(cx, apa.last_bind_ident.span, ".."), + snippet(cx, last_bind_ident.span, ".."), ) + } else { + format!("\n{indent}{init_method}.{usage_method};") }; diag.multipart_suggestion_verbose( @@ -113,7 +114,7 @@ impl<'tcx> LateLintPass<'tcx> for SignificantDropTightening<'tcx> { format!( "\n{}drop({});", " ".repeat(indent_of(cx, apa.last_stmt_span).unwrap_or(0)), - apa.first_bind_ident + first_bind_ident ), Applicability::MaybeIncorrect, ); @@ -123,8 +124,7 @@ impl<'tcx> LateLintPass<'tcx> for SignificantDropTightening<'tcx> { diag.span_label( apa.first_block_span, format!( - "temporary `{}` is currently being dropped at the end of its contained scope", - apa.first_bind_ident + "temporary `{first_bind_ident}` is currently being dropped at the end of its contained scope" ), ); }, @@ -144,7 +144,10 @@ impl<'cx, 'others, 'tcx> AttrChecker<'cx, 'others, 'tcx> { Self { cx, type_cache } } - fn has_sig_drop_attr(&mut self, ty: Ty<'tcx>) -> bool { + fn has_sig_drop_attr(&mut self, ty: Ty<'tcx>, depth: usize) -> bool { + if !self.cx.tcx.recursion_limit().value_within_limit(depth) { + return false; + } let ty = self .cx .tcx @@ -156,12 +159,12 @@ impl<'cx, 'others, 'tcx> AttrChecker<'cx, 'others, 'tcx> { e.insert(false); }, } - let value = self.has_sig_drop_attr_uncached(ty); + let value = self.has_sig_drop_attr_uncached(ty, depth + 1); self.type_cache.insert(ty, value); value } - fn has_sig_drop_attr_uncached(&mut self, ty: Ty<'tcx>) -> bool { + fn has_sig_drop_attr_uncached(&mut self, ty: Ty<'tcx>, depth: usize) -> bool { if let Some(adt) = ty.ty_adt_def() { let mut iter = get_attr( self.cx.sess(), @@ -176,15 +179,15 @@ impl<'cx, 'others, 'tcx> AttrChecker<'cx, 'others, 'tcx> { rustc_middle::ty::Adt(a, b) => { for f in a.all_fields() { let ty = f.ty(self.cx.tcx, b); - if self.has_sig_drop_attr(ty) { + if self.has_sig_drop_attr(ty, depth) { return true; } } for generic_arg in *b { - if let GenericArgKind::Type(ty) = generic_arg.unpack() { - if self.has_sig_drop_attr(ty) { - return true; - } + if let GenericArgKind::Type(ty) = generic_arg.unpack() + && self.has_sig_drop_attr(ty, depth) + { + return true; } } false @@ -192,7 +195,7 @@ impl<'cx, 'others, 'tcx> AttrChecker<'cx, 'others, 'tcx> { rustc_middle::ty::Array(ty, _) | rustc_middle::ty::RawPtr(ty, _) | rustc_middle::ty::Ref(_, ty, _) - | rustc_middle::ty::Slice(ty) => self.has_sig_drop_attr(*ty), + | rustc_middle::ty::Slice(ty) => self.has_sig_drop_attr(*ty, depth), _ => false, } } @@ -268,7 +271,7 @@ impl<'tcx> Visitor<'tcx> for StmtsChecker<'_, '_, '_, '_, 'tcx> { apa.has_expensive_expr_after_last_attr = false; }; let mut ac = AttrChecker::new(self.cx, self.type_cache); - if ac.has_sig_drop_attr(self.cx.typeck_results().expr_ty(expr)) { + if ac.has_sig_drop_attr(self.cx.typeck_results().expr_ty(expr), 0) { if let hir::StmtKind::Let(local) = self.ap.curr_stmt.kind && let hir::PatKind::Binding(_, hir_id, ident, _) = local.pat.kind && !self.ap.apas.contains_key(&hir_id) @@ -283,7 +286,7 @@ impl<'tcx> Visitor<'tcx> for StmtsChecker<'_, '_, '_, '_, 'tcx> { let mut apa = AuxParamsAttr { first_block_hir_id: self.ap.curr_block_hir_id, first_block_span: self.ap.curr_block_span, - first_bind_ident: ident, + first_bind_ident: Some(ident), first_method_span: { let expr_or_init = expr_or_init(self.cx, expr); if let hir::ExprKind::MethodCall(_, local_expr, _, span) = expr_or_init.kind { @@ -307,7 +310,7 @@ impl<'tcx> Visitor<'tcx> for StmtsChecker<'_, '_, '_, '_, 'tcx> { match self.ap.curr_stmt.kind { hir::StmtKind::Let(local) => { if let hir::PatKind::Binding(_, _, ident, _) = local.pat.kind { - apa.last_bind_ident = ident; + apa.last_bind_ident = Some(ident); } if let Some(local_init) = local.init && let hir::ExprKind::MethodCall(_, _, _, span) = local_init.kind @@ -316,7 +319,7 @@ impl<'tcx> Visitor<'tcx> for StmtsChecker<'_, '_, '_, '_, 'tcx> { } }, hir::StmtKind::Semi(semi_expr) => { - if has_drop(semi_expr, &apa.first_bind_ident, self.cx) { + if has_drop(semi_expr, apa.first_bind_ident, self.cx) { apa.has_expensive_expr_after_last_attr = false; apa.last_stmt_span = DUMMY_SP; return; @@ -373,7 +376,7 @@ struct AuxParamsAttr { first_block_span: Span, /// The binding or variable that references the initial construction of the type marked with /// `#[has_significant_drop]`. - first_bind_ident: Ident, + first_bind_ident: Option, /// Similar to `init_bind_ident` but encompasses the right-hand method call. first_method_span: Span, /// Similar to `init_bind_ident` but encompasses the whole contained statement. @@ -381,7 +384,7 @@ struct AuxParamsAttr { /// The last visited binding or variable span within a block that had any referenced inner type /// marked with `#[has_significant_drop]`. - last_bind_ident: Ident, + last_bind_ident: Option, /// Similar to `last_bind_span` but encompasses the right-hand method call. last_method_span: Span, /// Similar to `last_bind_span` but encompasses the whole contained statement. @@ -395,10 +398,10 @@ impl Default for AuxParamsAttr { has_expensive_expr_after_last_attr: false, first_block_hir_id: HirId::INVALID, first_block_span: DUMMY_SP, - first_bind_ident: Ident::empty(), + first_bind_ident: None, first_method_span: DUMMY_SP, first_stmt_span: DUMMY_SP, - last_bind_ident: Ident::empty(), + last_bind_ident: None, last_method_span: DUMMY_SP, last_stmt_span: DUMMY_SP, } @@ -413,7 +416,7 @@ fn dummy_stmt_expr<'any>(expr: &'any hir::Expr<'any>) -> hir::Stmt<'any> { } } -fn has_drop(expr: &hir::Expr<'_>, first_bind_ident: &Ident, lcx: &LateContext<'_>) -> bool { +fn has_drop(expr: &hir::Expr<'_>, first_bind_ident: Option, lcx: &LateContext<'_>) -> bool { if let hir::ExprKind::Call(fun, [first_arg]) = expr.kind && let hir::ExprKind::Path(hir::QPath::Resolved(_, fun_path)) = &fun.kind && let Res::Def(DefKind::Fn, did) = fun_path.res @@ -422,7 +425,8 @@ fn has_drop(expr: &hir::Expr<'_>, first_bind_ident: &Ident, lcx: &LateContext<'_ let has_ident = |local_expr: &hir::Expr<'_>| { if let hir::ExprKind::Path(hir::QPath::Resolved(_, arg_path)) = &local_expr.kind && let [first_arg_ps, ..] = arg_path.segments - && &first_arg_ps.ident == first_bind_ident + && let Some(first_bind_ident) = first_bind_ident + && first_arg_ps.ident == first_bind_ident { true } else { diff --git a/src/tools/clippy/clippy_lints/src/single_call_fn.rs b/src/tools/clippy/clippy_lints/src/single_call_fn.rs index 1a2fb77acc15a..64891743dc636 100644 --- a/src/tools/clippy/clippy_lints/src/single_call_fn.rs +++ b/src/tools/clippy/clippy_lints/src/single_call_fn.rs @@ -137,7 +137,7 @@ impl<'tcx> LateLintPass<'tcx> for SingleCallFn { for (&def_id, usage) in &self.def_id_to_usage { if let CallState::Once { call_site } = *usage && let fn_hir_id = cx.tcx.local_def_id_to_hir_id(def_id) - && let fn_span = cx.tcx.hir().span_with_body(fn_hir_id) + && let fn_span = cx.tcx.hir_span_with_body(fn_hir_id) && !self.is_function_allowed(cx, def_id, fn_hir_id, fn_span) { span_lint_hir_and_then( diff --git a/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs b/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs index 50a6ee316c8a6..8c34da0d14a4d 100644 --- a/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs +++ b/src/tools/clippy/clippy_lints/src/single_char_lifetime_names.rs @@ -45,19 +45,20 @@ impl EarlyLintPass for SingleCharLifetimeNames { return; } - if let GenericParamKind::Lifetime = param.kind { - if !param.is_placeholder && param.ident.as_str().len() <= 2 { - #[expect(clippy::collapsible_span_lint_calls, reason = "rust-clippy#7797")] - span_lint_and_then( - ctx, - SINGLE_CHAR_LIFETIME_NAMES, - param.ident.span, - "single-character lifetime names are likely uninformative", - |diag| { - diag.help("use a more informative name"); - }, - ); - } + if let GenericParamKind::Lifetime = param.kind + && !param.is_placeholder + && param.ident.as_str().len() <= 2 + { + #[expect(clippy::collapsible_span_lint_calls, reason = "rust-clippy#7797")] + span_lint_and_then( + ctx, + SINGLE_CHAR_LIFETIME_NAMES, + param.ident.span, + "single-character lifetime names are likely uninformative", + |diag| { + diag.help("use a more informative name"); + }, + ); } } } diff --git a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs index fa08245350429..62939912304ba 100644 --- a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs +++ b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs @@ -174,11 +174,11 @@ impl SingleComponentPathImports { } match &item.kind { - ItemKind::Mod(_, ModKind::Loaded(items, ..)) => { + ItemKind::Mod(_, _, ModKind::Loaded(items, ..)) => { self.check_mod(items); }, - ItemKind::MacroDef(MacroDef { macro_rules: true, .. }) => { - macros.push(item.ident.name); + ItemKind::MacroDef(ident, MacroDef { macro_rules: true, .. }) => { + macros.push(ident.name); }, ItemKind::Use(use_tree) => { let segments = &use_tree.prefix.segments; @@ -204,17 +204,17 @@ impl SingleComponentPathImports { if let UseTreeKind::Nested { items, .. } = &use_tree.kind { for tree in items { let segments = &tree.0.prefix.segments; - if segments.len() == 1 { - if let UseTreeKind::Simple(None) = tree.0.kind { - let name = segments[0].ident.name; - if !macros.contains(&name) { - single_use_usages.push(SingleUse { - name, - span: tree.0.span, - item_id: item.id, - can_suggest: false, - }); - } + if segments.len() == 1 + && let UseTreeKind::Simple(None) = tree.0.kind + { + let name = segments[0].ident.name; + if !macros.contains(&name) { + single_use_usages.push(SingleUse { + name, + span: tree.0.span, + item_id: item.id, + can_suggest: false, + }); } } } diff --git a/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs b/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs index d26288adb3919..30a5fe4db27e1 100644 --- a/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs +++ b/src/tools/clippy/clippy_lints/src/slow_vector_initialization.rs @@ -3,14 +3,13 @@ use clippy_utils::macros::matching_root_macro_call; use clippy_utils::sugg::Sugg; use clippy_utils::{ SpanlessEq, get_enclosing_block, is_integer_literal, is_path_diagnostic_item, path_to_local, path_to_local_id, - span_contains_comment, + span_contains_comment, sym, }; use rustc_errors::Applicability; use rustc_hir::intravisit::{Visitor, walk_block, walk_expr, walk_stmt}; use rustc_hir::{BindingMode, Block, Expr, ExprKind, HirId, PatKind, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; -use rustc_span::symbol::sym; declare_clippy_lint! { /// ### What it does @@ -248,7 +247,7 @@ impl<'tcx> VectorInitializationVisitor<'_, 'tcx> { if self.initialization_found && let ExprKind::MethodCall(path, self_arg, [extend_arg], _) = expr.kind && path_to_local_id(self_arg, self.vec_alloc.local_id) - && path.ident.name.as_str() == "extend" + && path.ident.name == sym::extend && self.is_repeat_take(extend_arg) { self.slow_expression = Some(InitializationType::Extend(expr)); @@ -260,7 +259,7 @@ impl<'tcx> VectorInitializationVisitor<'_, 'tcx> { if self.initialization_found && let ExprKind::MethodCall(path, self_arg, [len_arg, fill_arg], _) = expr.kind && path_to_local_id(self_arg, self.vec_alloc.local_id) - && path.ident.name.as_str() == "resize" + && path.ident.name == sym::resize // Check that is filled with 0 && is_integer_literal(fill_arg, 0) { @@ -282,7 +281,7 @@ impl<'tcx> VectorInitializationVisitor<'_, 'tcx> { /// Returns `true` if give expression is `repeat(0).take(...)` fn is_repeat_take(&mut self, expr: &'tcx Expr<'tcx>) -> bool { if let ExprKind::MethodCall(take_path, recv, [len_arg], _) = expr.kind - && take_path.ident.name.as_str() == "take" + && take_path.ident.name == sym::take // Check that take is applied to `repeat(0)` && self.is_repeat_zero(recv) { diff --git a/src/tools/clippy/clippy_lints/src/strings.rs b/src/tools/clippy/clippy_lints/src/strings.rs index 27c548bed9f64..af4d0d541f176 100644 --- a/src/tools/clippy/clippy_lints/src/strings.rs +++ b/src/tools/clippy/clippy_lints/src/strings.rs @@ -3,7 +3,7 @@ use clippy_utils::source::{snippet, snippet_with_applicability}; use clippy_utils::ty::is_type_lang_item; use clippy_utils::{ SpanlessEq, get_expr_use_or_unification_node, get_parent_expr, is_lint_allowed, method_calls, path_def_id, - peel_blocks, + peel_blocks, sym, }; use rustc_errors::Applicability; use rustc_hir::def_id::DefId; @@ -12,7 +12,6 @@ use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::ty; use rustc_session::declare_lint_pass; use rustc_span::source_map::Spanned; -use rustc_span::sym; use std::ops::ControlFlow; @@ -162,13 +161,12 @@ impl<'tcx> LateLintPass<'tcx> for StringAdd { if is_string(cx, left) { if !is_lint_allowed(cx, STRING_ADD_ASSIGN, e.hir_id) { let parent = get_parent_expr(cx, e); - if let Some(p) = parent { - if let ExprKind::Assign(target, _, _) = p.kind { + if let Some(p) = parent + && let ExprKind::Assign(target, _, _) = p.kind // avoid duplicate matches - if SpanlessEq::new(cx).eq_expr(target, left) { - return; - } - } + && SpanlessEq::new(cx).eq_expr(target, left) + { + return; } } span_lint( @@ -263,7 +261,7 @@ impl<'tcx> LateLintPass<'tcx> for StringLitAsBytes { && let ExprKind::AddrOf(BorrowKind::Ref, _, args) = bytes_arg.kind && let ExprKind::Index(left, right, _) = args.kind && let (method_names, expressions, _) = method_calls(left, 1) - && method_names == [sym!(as_bytes)] + && method_names == [sym::as_bytes] && expressions.len() == 1 && expressions[0].1.is_empty() @@ -288,7 +286,7 @@ impl<'tcx> LateLintPass<'tcx> for StringLitAsBytes { if !e.span.in_external_macro(cx.sess().source_map()) && let ExprKind::MethodCall(path, receiver, ..) = &e.kind - && path.ident.name.as_str() == "as_bytes" + && path.ident.name == sym::as_bytes && let ExprKind::Lit(lit) = &receiver.kind && let LitKind::Str(lit_content, _) = &lit.node { @@ -334,7 +332,7 @@ impl<'tcx> LateLintPass<'tcx> for StringLitAsBytes { } if let ExprKind::MethodCall(path, recv, [], _) = &e.kind - && path.ident.name.as_str() == "into_bytes" + && path.ident.name == sym::into_bytes && let ExprKind::MethodCall(path, recv, [], _) = &recv.kind && matches!(path.ident.name.as_str(), "to_owned" | "to_string") && let ExprKind::Lit(lit) = &recv.kind @@ -558,7 +556,7 @@ impl<'tcx> LateLintPass<'tcx> for TrimSplitWhitespace { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'_>) { let tyckres = cx.typeck_results(); if let ExprKind::MethodCall(path, split_recv, [], split_ws_span) = expr.kind - && path.ident.name.as_str() == "split_whitespace" + && path.ident.name == sym::split_whitespace && let Some(split_ws_def_id) = tyckres.type_dependent_def_id(expr.hir_id) && cx.tcx.is_diagnostic_item(sym::str_split_whitespace, split_ws_def_id) && let ExprKind::MethodCall(path, _trim_recv, [], trim_span) = split_recv.kind diff --git a/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs b/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs index fb426e91bf01d..83241f97a99ac 100644 --- a/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs +++ b/src/tools/clippy/clippy_lints/src/suspicious_trait_impl.rs @@ -5,6 +5,7 @@ use core::ops::ControlFlow; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::declare_lint_pass; +use rustc_span::Span; declare_clippy_lint! { /// ### What it does @@ -56,8 +57,20 @@ declare_lint_pass!(SuspiciousImpl => [SUSPICIOUS_ARITHMETIC_IMPL, SUSPICIOUS_OP_ impl<'tcx> LateLintPass<'tcx> for SuspiciousImpl { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) { - if let hir::ExprKind::Binary(binop, _, _) | hir::ExprKind::AssignOp(binop, ..) = expr.kind - && let Some((binop_trait_lang, op_assign_trait_lang)) = binop_traits(binop.node) + match expr.kind { + hir::ExprKind::Binary(op, _, _) => { + check_expr_inner(cx, expr, op.node, op.span); + }, + hir::ExprKind::AssignOp(op, _, _) => { + check_expr_inner(cx, expr, op.node.into(), op.span); + }, + _ => {}, + } + } +} + +fn check_expr_inner<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>, binop: hir::BinOpKind, span: Span) { + if let Some((binop_trait_lang, op_assign_trait_lang)) = binop_traits(binop) && let Some(binop_trait_id) = cx.tcx.lang_items().get(binop_trait_lang) && let Some(op_assign_trait_id) = cx.tcx.lang_items().get(op_assign_trait_lang) @@ -78,18 +91,17 @@ impl<'tcx> LateLintPass<'tcx> for SuspiciousImpl { .iter() .find(|&(ts, _)| ts.iter().any(|&t| Some(trait_id) == cx.tcx.lang_items().get(t))) && count_binops(body.value) == 1 - { - span_lint( - cx, - lint, - binop.span, - format!( - "suspicious use of `{}` in `{}` impl", - binop.node.as_str(), - cx.tcx.item_name(trait_id) - ), - ); - } + { + span_lint( + cx, + lint, + span, + format!( + "suspicious use of `{}` in `{}` impl", + binop.as_str(), + cx.tcx.item_name(trait_id) + ), + ); } } diff --git a/src/tools/clippy/clippy_lints/src/swap.rs b/src/tools/clippy/clippy_lints/src/swap.rs index 7176d533b6164..e3ecd6508bf9a 100644 --- a/src/tools/clippy/clippy_lints/src/swap.rs +++ b/src/tools/clippy/clippy_lints/src/swap.rs @@ -10,7 +10,7 @@ use rustc_data_structures::fx::FxIndexSet; use rustc_hir::intravisit::{Visitor, walk_expr}; use rustc_errors::Applicability; -use rustc_hir::{BinOpKind, Block, Expr, ExprKind, LetStmt, PatKind, QPath, Stmt, StmtKind}; +use rustc_hir::{AssignOpKind, Block, Expr, ExprKind, LetStmt, PatKind, QPath, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::ty; use rustc_session::declare_lint_pass; @@ -133,7 +133,7 @@ fn generate_swap_warning<'tcx>( applicability: &mut applicability, } .snippet_index_bindings(&[idx1, idx2, rhs1, rhs2]), - slice.maybe_par(), + slice.maybe_paren(), snippet_with_context(cx, idx1.span, ctxt, "..", &mut applicability).0, snippet_with_context(cx, idx2.span, ctxt, "..", &mut applicability).0, ), @@ -269,12 +269,11 @@ fn parse<'a, 'hir>(stmt: &'a Stmt<'hir>) -> Option<(ExprOrIdent<'hir>, &'a Expr< if let ExprKind::Assign(lhs, rhs, _) = expr.kind { return Some((ExprOrIdent::Expr(lhs), rhs)); } - } else if let StmtKind::Let(expr) = stmt.kind { - if let Some(rhs) = expr.init { - if let PatKind::Binding(_, _, ident_l, _) = expr.pat.kind { - return Some((ExprOrIdent::Ident(ident_l), rhs)); - } - } + } else if let StmtKind::Let(expr) = stmt.kind + && let Some(rhs) = expr.init + && let PatKind::Binding(_, _, ident_l, _) = expr.pat.kind + { + return Some((ExprOrIdent::Ident(ident_l), rhs)); } None } @@ -307,7 +306,7 @@ fn extract_sides_of_xor_assign<'a, 'hir>( if let StmtKind::Semi(expr) = stmt.kind && let ExprKind::AssignOp( Spanned { - node: BinOpKind::BitXor, + node: AssignOpKind::BitXorAssign, .. }, lhs, diff --git a/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs b/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs index 9993e6ae18b9d..bb969bc802fe5 100644 --- a/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs +++ b/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs @@ -1,6 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::match_def_path; use clippy_utils::source::snippet_with_applicability; +use clippy_utils::{match_def_path, sym}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass}; @@ -38,11 +38,11 @@ declare_lint_pass!(ToDigitIsSome => [TO_DIGIT_IS_SOME]); impl<'tcx> LateLintPass<'tcx> for ToDigitIsSome { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) { if let hir::ExprKind::MethodCall(is_some_path, to_digit_expr, [], _) = &expr.kind - && is_some_path.ident.name.as_str() == "is_some" + && is_some_path.ident.name == sym::is_some { let match_result = match &to_digit_expr.kind { hir::ExprKind::MethodCall(to_digits_path, char_arg, [radix_arg], _) => { - if to_digits_path.ident.name.as_str() == "to_digit" + if to_digits_path.ident.name == sym::to_digit && let char_arg_ty = cx.typeck_results().expr_ty_adjusted(char_arg) && *char_arg_ty.kind() == ty::Char { diff --git a/src/tools/clippy/clippy_lints/src/trait_bounds.rs b/src/tools/clippy/clippy_lints/src/trait_bounds.rs index fa36c9a21f65e..8aac3a5910294 100644 --- a/src/tools/clippy/clippy_lints/src/trait_bounds.rs +++ b/src/tools/clippy/clippy_lints/src/trait_bounds.rs @@ -151,20 +151,19 @@ impl<'tcx> LateLintPass<'tcx> for TraitBounds { .iter() .filter_map(get_trait_info_from_bound) .for_each(|(trait_item_res, trait_item_segments, span)| { - if let Some(self_segments) = self_bounds_map.get(&trait_item_res) { - if SpanlessEq::new(cx) + if let Some(self_segments) = self_bounds_map.get(&trait_item_res) + && SpanlessEq::new(cx) .paths_by_resolution() .eq_path_segments(self_segments, trait_item_segments) - { - span_lint_and_help( - cx, - TRAIT_DUPLICATION_IN_BOUNDS, - span, - "this trait bound is already specified in trait declaration", - None, - "consider removing this trait bound", - ); - } + { + span_lint_and_help( + cx, + TRAIT_DUPLICATION_IN_BOUNDS, + span, + "this trait bound is already specified in trait declaration", + None, + "consider removing this trait bound", + ); } }); } diff --git a/src/tools/clippy/clippy_lints/src/transmute/eager_transmute.rs b/src/tools/clippy/clippy_lints/src/transmute/eager_transmute.rs index 81c0a57083e80..1ccab62708b18 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/eager_transmute.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/eager_transmute.rs @@ -1,6 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::ty::is_normalizable; -use clippy_utils::{eq_expr_value, path_to_local}; +use clippy_utils::{eq_expr_value, path_to_local, sym}; use rustc_abi::WrappingRange; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, Node}; @@ -43,7 +43,7 @@ fn binops_with_local(cx: &LateContext<'_>, local_expr: &Expr<'_>, expr: &Expr<'_ binops_with_local(cx, local_expr, lhs) || binops_with_local(cx, local_expr, rhs) }, ExprKind::MethodCall(path, receiver, [arg], _) - if path.ident.name.as_str() == "contains" + if path.ident.name == sym::contains // ... `contains` called on some kind of range && let Some(receiver_adt) = cx.typeck_results().expr_ty(receiver).peel_refs().ty_adt_def() && let lang_items = cx.tcx.lang_items() @@ -81,7 +81,7 @@ pub(super) fn check<'tcx>( if let Some(then_some_call) = peel_parent_unsafe_blocks(cx, expr) && let ExprKind::MethodCall(path, receiver, [arg], _) = then_some_call.kind && cx.typeck_results().expr_ty(receiver).is_bool() - && path.ident.name.as_str() == "then_some" + && path.ident.name == sym::then_some && is_local_with_projections(transmutable) && binops_with_local(cx, transmutable, receiver) && is_normalizable(cx, cx.param_env, from_ty) diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs index f2c757952af38..df2f681a16291 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_float_to_int.rs @@ -47,7 +47,7 @@ pub(super) fn check<'tcx>( } } - sugg = sugg::Sugg::NonParen(format!("{}.to_bits()", sugg.maybe_par()).into()); + sugg = sugg::Sugg::NonParen(format!("{}.to_bits()", sugg.maybe_paren()).into()); // cast the result of `to_bits` if `to_ty` is signed sugg = if let ty::Int(int_ty) = to_ty.kind() { diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs index fcc763763bd2f..933e25fe98c65 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs @@ -33,7 +33,7 @@ pub(super) fn check<'tcx>( diag.span_suggestion_verbose( e.span, "use `pointer::cast` instead", - format!("{}.cast::<{to_pointee_ty}>()", arg.maybe_par()), + format!("{}.cast::<{to_pointee_ty}>()", arg.maybe_paren()), Applicability::MaybeIncorrect, ); } else if from_pointee_ty == to_pointee_ty @@ -48,7 +48,7 @@ pub(super) fn check<'tcx>( diag.span_suggestion_verbose( e.span, format!("use `pointer::{method}` instead"), - format!("{}.{method}()", arg.maybe_par()), + format!("{}.{method}()", arg.maybe_paren()), Applicability::MaybeIncorrect, ); } else { diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ref.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ref.rs index 45ee83c78ab67..e58212fae15cf 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ref.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ref.rs @@ -38,7 +38,7 @@ pub(super) fn check<'tcx>( let sugg = if let Some(ty) = get_explicit_type(path) { let ty_snip = snippet_with_applicability(cx, ty.span, "..", &mut app); if msrv.meets(cx, msrvs::POINTER_CAST) { - format!("{deref}{}.cast::<{ty_snip}>()", arg.maybe_par()) + format!("{deref}{}.cast::<{ty_snip}>()", arg.maybe_paren()) } else if from_ptr_ty.has_erased_regions() { sugg::make_unop(deref, arg.as_ty(format!("{cast} () as {cast} {ty_snip}"))).to_string() } else { @@ -47,7 +47,7 @@ pub(super) fn check<'tcx>( } else if *from_ptr_ty == *to_ref_ty { if from_ptr_ty.has_erased_regions() { if msrv.meets(cx, msrvs::POINTER_CAST) { - format!("{deref}{}.cast::<{to_ref_ty}>()", arg.maybe_par()) + format!("{deref}{}.cast::<{to_ref_ty}>()", arg.maybe_paren()) } else { sugg::make_unop(deref, arg.as_ty(format!("{cast} () as {cast} {to_ref_ty}"))) .to_string() diff --git a/src/tools/clippy/clippy_lints/src/types/mod.rs b/src/tools/clippy/clippy_lints/src/types/mod.rs index b6f4c4d7f0a41..3147058b4cda0 100644 --- a/src/tools/clippy/clippy_lints/src/types/mod.rs +++ b/src/tools/clippy/clippy_lints/src/types/mod.rs @@ -591,26 +591,26 @@ impl Types { TyKind::Path(ref qpath) if !context.in_body => { let hir_id = hir_ty.hir_id; let res = cx.qpath_res(qpath, hir_id); - if let Some(def_id) = res.opt_def_id() { - if self.is_type_change_allowed(context) { - // All lints that are being checked in this block are guarded by - // the `avoid_breaking_exported_api` configuration. When adding a - // new lint, please also add the name to the configuration documentation - // in `clippy_config::conf` - - let mut triggered = false; - triggered |= box_collection::check(cx, hir_ty, qpath, def_id); - triggered |= redundant_allocation::check(cx, hir_ty, qpath, def_id); - triggered |= rc_buffer::check(cx, hir_ty, qpath, def_id); - triggered |= vec_box::check(cx, hir_ty, qpath, def_id, self.vec_box_size_threshold); - triggered |= option_option::check(cx, hir_ty, qpath, def_id); - triggered |= linked_list::check(cx, hir_ty, def_id); - triggered |= rc_mutex::check(cx, hir_ty, qpath, def_id); - triggered |= owned_cow::check(cx, qpath, def_id); - - if triggered { - return; - } + if let Some(def_id) = res.opt_def_id() + && self.is_type_change_allowed(context) + { + // All lints that are being checked in this block are guarded by + // the `avoid_breaking_exported_api` configuration. When adding a + // new lint, please also add the name to the configuration documentation + // in `clippy_config::conf` + + let mut triggered = false; + triggered |= box_collection::check(cx, hir_ty, qpath, def_id); + triggered |= redundant_allocation::check(cx, hir_ty, qpath, def_id); + triggered |= rc_buffer::check(cx, hir_ty, qpath, def_id); + triggered |= vec_box::check(cx, hir_ty, qpath, def_id, self.vec_box_size_threshold); + triggered |= option_option::check(cx, hir_ty, qpath, def_id); + triggered |= linked_list::check(cx, hir_ty, def_id); + triggered |= rc_mutex::check(cx, hir_ty, qpath, def_id); + triggered |= owned_cow::check(cx, qpath, def_id); + + if triggered { + return; } } match *qpath { diff --git a/src/tools/clippy/clippy_lints/src/types/vec_box.rs b/src/tools/clippy/clippy_lints/src/types/vec_box.rs index 769244c675e1b..f13042a6fa6bf 100644 --- a/src/tools/clippy/clippy_lints/src/types/vec_box.rs +++ b/src/tools/clippy/clippy_lints/src/types/vec_box.rs @@ -19,61 +19,58 @@ pub(super) fn check<'tcx>( def_id: DefId, box_size_threshold: u64, ) -> bool { - if cx.tcx.is_diagnostic_item(sym::Vec, def_id) { - if let Some(last) = last_path_segment(qpath).args - // Get the _ part of Vec<_> - && let Some(GenericArg::Type(ty)) = last.args.first() - // extract allocator from the Vec for later - && let vec_alloc_ty = last.args.get(1) - // ty is now _ at this point - && let TyKind::Path(ref ty_qpath) = ty.kind - && let res = cx.qpath_res(ty_qpath, ty.hir_id) - && let Some(def_id) = res.opt_def_id() - && Some(def_id) == cx.tcx.lang_items().owned_box() - // At this point, we know ty is Box, now get T - && let Some(last) = last_path_segment(ty_qpath).args - && let Some(GenericArg::Type(boxed_ty)) = last.args.first() - // extract allocator from the Box for later - && let boxed_alloc_ty = last.args.get(1) - // we don't expect to encounter `_` here so ignore `GenericArg::Infer` is okay - && let ty_ty = lower_ty(cx.tcx, boxed_ty.as_unambig_ty()) - && !ty_ty.has_escaping_bound_vars() - && ty_ty.is_sized(cx.tcx, cx.typing_env()) - && let Ok(ty_ty_size) = cx.layout_of(ty_ty).map(|l| l.size.bytes()) - && ty_ty_size < box_size_threshold - // https://github.com/rust-lang/rust-clippy/issues/7114 - && match (vec_alloc_ty, boxed_alloc_ty) { - (None, None) => true, - // this is in the event that we have something like - // Vec<_, Global>, in which case is equivalent to - // Vec<_> - (None, Some(GenericArg::Type(inner))) | (Some(GenericArg::Type(inner)), None) => { - if let TyKind::Path(path) = inner.kind - && let Some(did) = cx.qpath_res(&path, inner.hir_id).opt_def_id() { - cx.tcx.lang_items().get(LangItem::GlobalAlloc) == Some(did) - } else { - false - } - }, - (Some(GenericArg::Type(l)), Some(GenericArg::Type(r))) => - // we don't expect to encounter `_` here so ignore `GenericArg::Infer` is okay - lower_ty(cx.tcx, l.as_unambig_ty()) == lower_ty(cx.tcx, r.as_unambig_ty()), - _ => false - } - { - span_lint_and_sugg( - cx, - VEC_BOX, - hir_ty.span, - "`Vec` is already on the heap, the boxing is unnecessary", - "try", - format!("Vec<{}>", snippet(cx, boxed_ty.span, "..")), - Applicability::Unspecified, - ); - true - } else { - false + if cx.tcx.is_diagnostic_item(sym::Vec, def_id) + && let Some(last) = last_path_segment(qpath).args + // Get the _ part of Vec<_> + && let Some(GenericArg::Type(ty)) = last.args.first() + // extract allocator from the Vec for later + && let vec_alloc_ty = last.args.get(1) + // ty is now _ at this point + && let TyKind::Path(ref ty_qpath) = ty.kind + && let res = cx.qpath_res(ty_qpath, ty.hir_id) + && let Some(def_id) = res.opt_def_id() + && Some(def_id) == cx.tcx.lang_items().owned_box() + // At this point, we know ty is Box, now get T + && let Some(last) = last_path_segment(ty_qpath).args + && let Some(GenericArg::Type(boxed_ty)) = last.args.first() + // extract allocator from the Box for later + && let boxed_alloc_ty = last.args.get(1) + // we don't expect to encounter `_` here so ignore `GenericArg::Infer` is okay + && let ty_ty = lower_ty(cx.tcx, boxed_ty.as_unambig_ty()) + && !ty_ty.has_escaping_bound_vars() + && ty_ty.is_sized(cx.tcx, cx.typing_env()) + && let Ok(ty_ty_size) = cx.layout_of(ty_ty).map(|l| l.size.bytes()) + && ty_ty_size < box_size_threshold + // https://github.com/rust-lang/rust-clippy/issues/7114 + && match (vec_alloc_ty, boxed_alloc_ty) { + (None, None) => true, + // this is in the event that we have something like + // Vec<_, Global>, in which case is equivalent to + // Vec<_> + (None, Some(GenericArg::Type(inner))) | (Some(GenericArg::Type(inner)), None) => { + if let TyKind::Path(path) = inner.kind + && let Some(did) = cx.qpath_res(&path, inner.hir_id).opt_def_id() { + cx.tcx.lang_items().get(LangItem::GlobalAlloc) == Some(did) + } else { + false + } + }, + (Some(GenericArg::Type(l)), Some(GenericArg::Type(r))) => + // we don't expect to encounter `_` here so ignore `GenericArg::Infer` is okay + lower_ty(cx.tcx, l.as_unambig_ty()) == lower_ty(cx.tcx, r.as_unambig_ty()), + _ => false } + { + span_lint_and_sugg( + cx, + VEC_BOX, + hir_ty.span, + "`Vec` is already on the heap, the boxing is unnecessary", + "try", + format!("Vec<{}>", snippet(cx, boxed_ty.span, "..")), + Applicability::Unspecified, + ); + true } else { false } diff --git a/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs b/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs index 51c7d6fce3128..d321c48f6aff8 100644 --- a/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs +++ b/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs @@ -10,7 +10,7 @@ use rustc_hir::{Body, Expr, ExprKind, FnDecl, HirId, Item, ItemKind, Node, QPath use rustc_hir_analysis::lower_ty; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter; -use rustc_middle::ty::{self, AssocKind, Ty, TyCtxt}; +use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_session::impl_lint_pass; use rustc_span::symbol::{Ident, kw}; use rustc_span::{Span, sym}; @@ -23,8 +23,8 @@ declare_clippy_lint! { /// implementations. /// /// ### Why is this bad? - /// This is a hard to find infinite recursion that will crash any code - /// using it. + /// Infinite recursion in trait implementation will either cause crashes + /// or result in an infinite loop, and it is hard to detect. /// /// ### Example /// ```no_run @@ -39,9 +39,31 @@ declare_clippy_lint! { /// } /// } /// ``` + /// /// Use instead: /// - /// In such cases, either use `#[derive(PartialEq)]` or don't implement it. + /// ```no_run + /// #[derive(PartialEq)] + /// enum Foo { + /// A, + /// B, + /// } + /// ``` + /// + /// As an alternative, rewrite the logic without recursion: + /// + /// ```no_run + /// enum Foo { + /// A, + /// B, + /// } + /// + /// impl PartialEq for Foo { + /// fn eq(&self, other: &Self) -> bool { + /// matches!((self, other), (Foo::A, Foo::A) | (Foo::B, Foo::B)) + /// } + /// } + /// ``` #[clippy::version = "1.77.0"] pub UNCONDITIONAL_RECURSION, suspicious, @@ -113,7 +135,7 @@ fn get_impl_trait_def_id(cx: &LateContext<'_>, method_def_id: LocalDefId) -> Opt }), )) = cx.tcx.hir_parent_iter(hir_id).next() // We exclude `impl` blocks generated from rustc's proc macros. - && !cx.tcx.has_attr(*owner_id, sym::automatically_derived) + && !cx.tcx.is_automatically_derived(owner_id.to_def_id()) // It is a implementation of a trait. && let Some(trait_) = impl_.of_trait { @@ -218,7 +240,7 @@ fn check_to_string(cx: &LateContext<'_>, method_span: Span, method_def_id: Local }), )) = cx.tcx.hir_parent_iter(hir_id).next() // We exclude `impl` blocks generated from rustc's proc macros. - && !cx.tcx.has_attr(*owner_id, sym::automatically_derived) + && !cx.tcx.is_automatically_derived(owner_id.to_def_id()) // It is a implementation of a trait. && let Some(trait_) = impl_.of_trait && let Some(trait_def_id) = trait_.trait_def_id() @@ -315,14 +337,14 @@ impl UnconditionalRecursion { for (ty, impl_def_ids) in impls.non_blanket_impls() { let Some(self_def_id) = ty.def() else { continue }; for impl_def_id in impl_def_ids { - if !cx.tcx.has_attr(*impl_def_id, sym::automatically_derived) && + if !cx.tcx.is_automatically_derived(*impl_def_id) && let Some(assoc_item) = cx .tcx .associated_items(impl_def_id) .in_definition_order() // We're not interested in foreign implementations of the `Default` trait. .find(|item| { - item.kind == AssocKind::Fn && item.def_id.is_local() && item.name == kw::Default + item.is_fn() && item.def_id.is_local() && item.name() == kw::Default }) && let Some(body_node) = cx.tcx.hir_get_if_local(assoc_item.def_id) && let Some(body_id) = body_node.body_id() diff --git a/src/tools/clippy/clippy_lints/src/unicode.rs b/src/tools/clippy/clippy_lints/src/unicode.rs index e1fc644e4ceeb..79571b0409d21 100644 --- a/src/tools/clippy/clippy_lints/src/unicode.rs +++ b/src/tools/clippy/clippy_lints/src/unicode.rs @@ -76,10 +76,10 @@ declare_lint_pass!(Unicode => [INVISIBLE_CHARACTERS, NON_ASCII_LITERAL, UNICODE_ impl LateLintPass<'_> for Unicode { fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'_ Expr<'_>) { - if let ExprKind::Lit(lit) = expr.kind { - if let LitKind::Str(_, _) | LitKind::Char(_) = lit.node { - check_str(cx, lit.span, expr.hir_id); - } + if let ExprKind::Lit(lit) = expr.kind + && let LitKind::Str(_, _) | LitKind::Char(_) = lit.node + { + check_str(cx, lit.span, expr.hir_id); } } } diff --git a/src/tools/clippy/clippy_lints/src/uninit_vec.rs b/src/tools/clippy/clippy_lints/src/uninit_vec.rs index 7803d5115c971..cee4a53f03cbe 100644 --- a/src/tools/clippy/clippy_lints/src/uninit_vec.rs +++ b/src/tools/clippy/clippy_lints/src/uninit_vec.rs @@ -1,12 +1,12 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_then}; use clippy_utils::higher::{VecInitKind, get_vec_init_kind}; use clippy_utils::ty::{is_type_diagnostic_item, is_uninit_value_valid_for_ty}; -use clippy_utils::{SpanlessEq, is_integer_literal, is_lint_allowed, path_to_local_id, peel_hir_expr_while}; +use clippy_utils::{SpanlessEq, is_integer_literal, is_lint_allowed, path_to_local_id, peel_hir_expr_while, sym}; use rustc_hir::{Block, Expr, ExprKind, HirId, PatKind, PathSegment, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty; use rustc_session::declare_lint_pass; -use rustc_span::{Span, sym}; +use rustc_span::Span; // TODO: add `ReadBuf` (RFC 2930) in "How to fix" once it is available in std declare_clippy_lint! { @@ -187,7 +187,7 @@ fn extract_init_or_reserve_target<'tcx>(cx: &LateContext<'tcx>, stmt: &'tcx Stmt fn is_reserve(cx: &LateContext<'_>, path: &PathSegment<'_>, self_expr: &Expr<'_>) -> bool { is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(self_expr).peel_refs(), sym::Vec) - && path.ident.name.as_str() == "reserve" + && path.ident.name == sym::reserve } /// Returns self if the expression is `Vec::set_len()` @@ -209,7 +209,7 @@ fn extract_set_len_self<'tcx>(cx: &LateContext<'_>, expr: &'tcx Expr<'_>) -> Opt ExprKind::MethodCall(path, self_expr, [arg], _) => { let self_type = cx.typeck_results().expr_ty(self_expr).peel_refs(); if is_type_diagnostic_item(cx, self_type, sym::Vec) - && path.ident.name.as_str() == "set_len" + && path.ident.name == sym::set_len && !is_integer_literal(arg, 0) { Some((self_expr, expr.span)) diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs b/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs index 937e35dea96d9..bcd05cceca9c3 100644 --- a/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs +++ b/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs @@ -93,13 +93,13 @@ impl<'tcx> LateLintPass<'tcx> for UnnecessaryWraps { // Abort if the method is implementing a trait or of it a trait method. let hir_id = cx.tcx.local_def_id_to_hir_id(def_id); - if let Node::Item(item) = cx.tcx.parent_hir_node(hir_id) { - if matches!( + if let Node::Item(item) = cx.tcx.parent_hir_node(hir_id) + && matches!( item.kind, ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..) - ) { - return; - } + ) + { + return; } // Get the wrapper and inner types, if can't, abort. diff --git a/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs b/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs index f43715d6752e3..9ad184450de43 100644 --- a/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs +++ b/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs @@ -69,10 +69,10 @@ impl EarlyLintPass for UnnestedOrPatterns { } fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) { - if self.msrv.meets(msrvs::OR_PATTERNS) { - if let ast::ExprKind::Let(pat, _, _, _) = &e.kind { - lint_unnested_or_patterns(cx, pat); - } + if self.msrv.meets(msrvs::OR_PATTERNS) + && let ast::ExprKind::Let(pat, _, _, _) = &e.kind + { + lint_unnested_or_patterns(cx, pat); } } @@ -120,18 +120,25 @@ fn lint_unnested_or_patterns(cx: &EarlyContext<'_>, pat: &Pat) { /// Remove all `(p)` patterns in `pat`. fn remove_all_parens(pat: &mut P) { - struct Visitor; + #[derive(Default)] + struct Visitor { + /// If is not in the outer most pattern. This is needed to avoid removing the outermost + /// parens because top-level or-patterns are not allowed in let statements. + is_inner: bool, + } + impl MutVisitor for Visitor { fn visit_pat(&mut self, pat: &mut P) { + let is_inner = mem::replace(&mut self.is_inner, true); walk_pat(self, pat); let inner = match &mut pat.kind { - Paren(i) => mem::replace(&mut i.kind, Wild), + Paren(i) if is_inner => mem::replace(&mut i.kind, Wild), _ => return, }; pat.kind = inner; } } - Visitor.visit_pat(pat); + Visitor::default().visit_pat(pat); } /// Insert parens where necessary according to Rust's precedence rules for patterns. @@ -224,6 +231,7 @@ fn transform_with_focus_on_idx(alternatives: &mut ThinVec>, focus_idx: us // We're trying to find whatever kind (~"constructor") we found in `alternatives[start..]`. let changed = match &mut focus_kind { + Missing => unreachable!(), // These pattern forms are "leafs" and do not have sub-patterns. // Therefore they are not some form of constructor `C`, // with which a pattern `C(p_0)` may be formed, diff --git a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs index 0687fc319af68..2d88c490b1abe 100644 --- a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs +++ b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs @@ -265,15 +265,14 @@ fn unpack_match<'a>(mut expr: &'a hir::Expr<'a>) -> &'a hir::Expr<'a> { /// If `expr` is an (e).await, return the inner expression "e" that's being /// waited on. Otherwise return None. fn unpack_await<'a>(expr: &'a hir::Expr<'a>) -> &'a hir::Expr<'a> { - if let ExprKind::Match(expr, _, hir::MatchSource::AwaitDesugar) = expr.kind { - if let ExprKind::Call(func, [arg_0]) = expr.kind { - if matches!( - func.kind, - ExprKind::Path(hir::QPath::LangItem(hir::LangItem::IntoFutureIntoFuture, ..)) - ) { - return arg_0; - } - } + if let ExprKind::Match(expr, _, hir::MatchSource::AwaitDesugar) = expr.kind + && let ExprKind::Call(func, [arg_0]) = expr.kind + && matches!( + func.kind, + ExprKind::Path(hir::QPath::LangItem(hir::LangItem::IntoFutureIntoFuture, ..)) + ) + { + return arg_0; } expr } diff --git a/src/tools/clippy/clippy_lints/src/unused_self.rs b/src/tools/clippy/clippy_lints/src/unused_self.rs index 582aa6e6001e8..12da891a71b11 100644 --- a/src/tools/clippy/clippy_lints/src/unused_self.rs +++ b/src/tools/clippy/clippy_lints/src/unused_self.rs @@ -1,6 +1,7 @@ use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::macros::root_macro_call_first_node; +use clippy_utils::sym; use clippy_utils::visitors::is_local_used; use rustc_hir::{Body, Impl, ImplItem, ImplItemKind, ItemKind}; use rustc_lint::{LateContext, LateLintPass}; @@ -61,12 +62,10 @@ impl<'tcx> LateLintPass<'tcx> for UnusedSelf { let assoc_item = cx.tcx.associated_item(impl_item.owner_id); let contains_todo = |cx, body: &'_ Body<'_>| -> bool { clippy_utils::visitors::for_each_expr_without_closures(body.value, |e| { - if let Some(macro_call) = root_macro_call_first_node(cx, e) { - if cx.tcx.item_name(macro_call.def_id).as_str() == "todo" { - ControlFlow::Break(()) - } else { - ControlFlow::Continue(()) - } + if let Some(macro_call) = root_macro_call_first_node(cx, e) + && cx.tcx.is_diagnostic_item(sym::todo_macro, macro_call.def_id) + { + ControlFlow::Break(()) } else { ControlFlow::Continue(()) } @@ -74,7 +73,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedSelf { .is_some() }; if let ItemKind::Impl(Impl { of_trait: None, .. }) = parent_item.kind - && assoc_item.fn_has_self_parameter + && assoc_item.is_method() && let ImplItemKind::Fn(.., body_id) = &impl_item.kind && (!cx.effective_visibilities.is_exported(impl_item.owner_id.def_id) || !self.avoid_breaking_exported_api) && let body = cx.tcx.hir_body(*body_id) diff --git a/src/tools/clippy/clippy_lints/src/unused_unit.rs b/src/tools/clippy/clippy_lints/src/unused_unit.rs index d5309aade7aac..9859ddfdf7bde 100644 --- a/src/tools/clippy/clippy_lints/src/unused_unit.rs +++ b/src/tools/clippy/clippy_lints/src/unused_unit.rs @@ -1,11 +1,18 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::{SpanRangeExt, position_before_rarrow}; -use rustc_ast::visit::FnKind; -use rustc_ast::{ClosureBinder, ast}; +use clippy_utils::{is_never_expr, is_unit_expr}; +use rustc_ast::{Block, StmtKind}; use rustc_errors::Applicability; -use rustc_lint::{EarlyContext, EarlyLintPass}; +use rustc_hir::def_id::LocalDefId; +use rustc_hir::intravisit::FnKind; +use rustc_hir::{ + AssocItemConstraintKind, Body, Expr, ExprKind, FnDecl, FnRetTy, GenericArgsParentheses, Node, PolyTraitRef, Term, + Ty, TyKind, +}; +use rustc_lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass}; use rustc_session::declare_lint_pass; -use rustc_span::{BytePos, Span}; +use rustc_span::edition::Edition; +use rustc_span::{BytePos, Span, sym}; declare_clippy_lint! { /// ### What it does @@ -34,27 +41,89 @@ declare_clippy_lint! { declare_lint_pass!(UnusedUnit => [UNUSED_UNIT]); -impl EarlyLintPass for UnusedUnit { - fn check_fn(&mut self, cx: &EarlyContext<'_>, kind: FnKind<'_>, span: Span, _: ast::NodeId) { - if let ast::FnRetTy::Ty(ref ty) = kind.decl().output - && let ast::TyKind::Tup(ref vals) = ty.kind - && vals.is_empty() - && !ty.span.from_expansion() - && get_def(span) == get_def(ty.span) +impl<'tcx> LateLintPass<'tcx> for UnusedUnit { + fn check_fn( + &mut self, + cx: &LateContext<'tcx>, + kind: FnKind<'tcx>, + decl: &'tcx FnDecl<'tcx>, + body: &'tcx Body<'tcx>, + span: Span, + def_id: LocalDefId, + ) { + if let FnRetTy::Return(hir_ty) = decl.output + && is_unit_ty(hir_ty) + && !hir_ty.span.from_expansion() + && get_def(span) == get_def(hir_ty.span) { // implicit types in closure signatures are forbidden when `for<...>` is present - if let FnKind::Closure(&ClosureBinder::For { .. }, ..) = kind { + if let FnKind::Closure = kind + && let Node::Expr(expr) = cx.tcx.hir_node_by_def_id(def_id) + && let ExprKind::Closure(closure) = expr.kind + && !closure.bound_generic_params.is_empty() + { + return; + } + + // unit never type fallback is no longer supported since Rust 2024. For more information, + // see + if cx.tcx.sess.edition() >= Edition::Edition2024 + && let ExprKind::Block(block, _) = body.value.kind + && let Some(expr) = block.expr + && is_never_expr(cx, expr).is_some() + { return; } - lint_unneeded_unit_return(cx, ty, span); + lint_unneeded_unit_return(cx, hir_ty.span, span); } } - fn check_block(&mut self, cx: &EarlyContext<'_>, block: &ast::Block) { - if let Some(stmt) = block.stmts.last() - && let ast::StmtKind::Expr(ref expr) = stmt.kind + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { + if let ExprKind::Ret(Some(expr)) | ExprKind::Break(_, Some(expr)) = expr.kind && is_unit_expr(expr) + && !expr.span.from_expansion() + { + span_lint_and_sugg( + cx, + UNUSED_UNIT, + expr.span, + "unneeded `()`", + "remove the `()`", + String::new(), + Applicability::MachineApplicable, + ); + } + } + + fn check_poly_trait_ref(&mut self, cx: &LateContext<'tcx>, poly: &'tcx PolyTraitRef<'tcx>) { + let segments = &poly.trait_ref.path.segments; + + if segments.len() == 1 + && ["Fn", "FnMut", "FnOnce"].contains(&segments[0].ident.name.as_str()) + && let Some(args) = segments[0].args + && args.parenthesized == GenericArgsParentheses::ParenSugar + && let constraints = &args.constraints + && constraints.len() == 1 + && constraints[0].ident.name == sym::Output + && let AssocItemConstraintKind::Equality { term: Term::Ty(hir_ty) } = constraints[0].kind + && args.span_ext.hi() != poly.span.hi() + && !hir_ty.span.from_expansion() + && is_unit_ty(hir_ty) + { + lint_unneeded_unit_return(cx, hir_ty.span, poly.span); + } + } +} + +impl EarlyLintPass for UnusedUnit { + /// Check for unit expressions in blocks. This is left in the early pass because some macros + /// expand its inputs as-is, making it invisible to the late pass. See #4076. + fn check_block(&mut self, cx: &EarlyContext<'_>, block: &Block) { + if let Some(stmt) = block.stmts.last() + && let StmtKind::Expr(expr) = &stmt.kind + && let rustc_ast::ExprKind::Tup(inner) = &expr.kind + && inner.is_empty() && let ctxt = block.span.ctxt() && stmt.span.ctxt() == ctxt && expr.span.ctxt() == ctxt @@ -72,39 +141,10 @@ impl EarlyLintPass for UnusedUnit { ); } } +} - fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) { - match e.kind { - ast::ExprKind::Ret(Some(ref expr)) | ast::ExprKind::Break(_, Some(ref expr)) => { - if is_unit_expr(expr) && !expr.span.from_expansion() { - span_lint_and_sugg( - cx, - UNUSED_UNIT, - expr.span, - "unneeded `()`", - "remove the `()`", - String::new(), - Applicability::MachineApplicable, - ); - } - }, - _ => (), - } - } - - fn check_poly_trait_ref(&mut self, cx: &EarlyContext<'_>, poly: &ast::PolyTraitRef) { - let segments = &poly.trait_ref.path.segments; - - if segments.len() == 1 - && ["Fn", "FnMut", "FnOnce"].contains(&segments[0].ident.name.as_str()) - && let Some(args) = &segments[0].args - && let ast::GenericArgs::Parenthesized(generic_args) = &**args - && let ast::FnRetTy::Ty(ty) = &generic_args.output - && ty.kind.is_unit() - { - lint_unneeded_unit_return(cx, ty, generic_args.span); - } - } +fn is_unit_ty(ty: &Ty<'_>) -> bool { + matches!(ty.kind, TyKind::Tup([])) } // get the def site @@ -117,24 +157,15 @@ fn get_def(span: Span) -> Option { } } -// is this expr a `()` unit? -fn is_unit_expr(expr: &ast::Expr) -> bool { - if let ast::ExprKind::Tup(ref vals) = expr.kind { - vals.is_empty() - } else { - false - } -} - -fn lint_unneeded_unit_return(cx: &EarlyContext<'_>, ty: &ast::Ty, span: Span) { +fn lint_unneeded_unit_return(cx: &LateContext<'_>, ty_span: Span, span: Span) { let (ret_span, appl) = - span.with_hi(ty.span.hi()) + span.with_hi(ty_span.hi()) .get_source_text(cx) - .map_or((ty.span, Applicability::MaybeIncorrect), |src| { - position_before_rarrow(&src).map_or((ty.span, Applicability::MaybeIncorrect), |rpos| { + .map_or((ty_span, Applicability::MaybeIncorrect), |src| { + position_before_rarrow(&src).map_or((ty_span, Applicability::MaybeIncorrect), |rpos| { ( #[expect(clippy::cast_possible_truncation)] - ty.span.with_lo(BytePos(span.lo().0 + rpos as u32)), + ty_span.with_lo(BytePos(span.lo().0 + rpos as u32)), Applicability::MachineApplicable, ) }) diff --git a/src/tools/clippy/clippy_lints/src/unwrap.rs b/src/tools/clippy/clippy_lints/src/unwrap.rs index b466a8e127a94..ba140788bb54e 100644 --- a/src/tools/clippy/clippy_lints/src/unwrap.rs +++ b/src/tools/clippy/clippy_lints/src/unwrap.rs @@ -1,7 +1,7 @@ use clippy_utils::diagnostics::span_lint_hir_and_then; use clippy_utils::ty::is_type_diagnostic_item; use clippy_utils::usage::is_potentially_local_place; -use clippy_utils::{higher, path_to_local}; +use clippy_utils::{higher, path_to_local, sym}; use rustc_errors::Applicability; use rustc_hir::intravisit::{FnKind, Visitor, walk_expr, walk_fn}; use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, HirId, Node, PathSegment, UnOp}; @@ -11,8 +11,8 @@ use rustc_middle::hir::nested_filter; use rustc_middle::mir::FakeReadCause; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_session::declare_lint_pass; +use rustc_span::Span; use rustc_span::def_id::LocalDefId; -use rustc_span::{Span, sym}; declare_clippy_lint! { /// ### What it does @@ -208,7 +208,7 @@ fn is_option_as_mut_use(tcx: TyCtxt<'_>, expr_id: HirId) -> bool { if let Node::Expr(mutating_expr) = tcx.parent_hir_node(expr_id) && let ExprKind::MethodCall(path, _, [], _) = mutating_expr.kind { - path.ident.name.as_str() == "as_mut" + path.ident.name == sym::as_mut } else { false } @@ -278,7 +278,7 @@ fn consume_option_as_ref<'tcx>(expr: &'tcx Expr<'tcx>) -> (&'tcx Expr<'tcx>, Opt if let ExprKind::MethodCall(path, recv, [], _) = expr.kind { if path.ident.name == sym::as_ref { (recv, Some(AsRefKind::AsRef)) - } else if path.ident.name.as_str() == "as_mut" { + } else if path.ident.name == sym::as_mut { (recv, Some(AsRefKind::AsMut)) } else { (expr, None) @@ -307,8 +307,8 @@ impl<'tcx> Visitor<'tcx> for UnwrappableVariablesVisitor<'_, 'tcx> { if let ExprKind::MethodCall(method_name, self_arg, ..) = expr.kind && let (self_arg, as_ref_kind) = consume_option_as_ref(self_arg) && let Some(id) = path_to_local(self_arg) - && [sym::unwrap, sym::expect, sym!(unwrap_err)].contains(&method_name.ident.name) - && let call_to_unwrap = [sym::unwrap, sym::expect].contains(&method_name.ident.name) + && matches!(method_name.ident.name, sym::unwrap | sym::expect | sym::unwrap_err) + && let call_to_unwrap = matches!(method_name.ident.name, sym::unwrap | sym::expect) && let Some(unwrappable) = self.unwrappables.iter() .find(|u| u.local_id == id) // Span contexts should not differ with the conditional branch diff --git a/src/tools/clippy/clippy_lints/src/useless_conversion.rs b/src/tools/clippy/clippy_lints/src/useless_conversion.rs index 57bb2fc27f145..3a9c997a579d1 100644 --- a/src/tools/clippy/clippy_lints/src/useless_conversion.rs +++ b/src/tools/clippy/clippy_lints/src/useless_conversion.rs @@ -92,36 +92,36 @@ fn into_iter_bound<'tcx>( let mut into_iter_span = None; for (pred, span) in cx.tcx.explicit_predicates_of(fn_did).predicates { - if let ty::ClauseKind::Trait(tr) = pred.kind().skip_binder() { - if tr.self_ty().is_param(param_index) { - if tr.def_id() == into_iter_did { - into_iter_span = Some(*span); - } else { - let tr = cx.tcx.erase_regions(tr); - if tr.has_escaping_bound_vars() { - return None; - } - - // Substitute generics in the predicate and replace the IntoIterator type parameter with the - // `.into_iter()` receiver to see if the bound also holds for that type. - let args = cx.tcx.mk_args_from_iter(node_args.iter().enumerate().map(|(i, arg)| { - if i == param_index as usize { - GenericArg::from(into_iter_receiver) - } else { - arg - } - })); + if let ty::ClauseKind::Trait(tr) = pred.kind().skip_binder() + && tr.self_ty().is_param(param_index) + { + if tr.def_id() == into_iter_did { + into_iter_span = Some(*span); + } else { + let tr = cx.tcx.erase_regions(tr); + if tr.has_escaping_bound_vars() { + return None; + } - let predicate = EarlyBinder::bind(tr).instantiate(cx.tcx, args); - let obligation = Obligation::new(cx.tcx, ObligationCause::dummy(), cx.param_env, predicate); - if !cx - .tcx - .infer_ctxt() - .build(cx.typing_mode()) - .predicate_must_hold_modulo_regions(&obligation) - { - return None; + // Substitute generics in the predicate and replace the IntoIterator type parameter with the + // `.into_iter()` receiver to see if the bound also holds for that type. + let args = cx.tcx.mk_args_from_iter(node_args.iter().enumerate().map(|(i, arg)| { + if i == param_index as usize { + GenericArg::from(into_iter_receiver) + } else { + arg } + })); + + let predicate = EarlyBinder::bind(tr).instantiate(cx.tcx, args); + let obligation = Obligation::new(cx.tcx, ObligationCause::dummy(), cx.param_env, predicate); + if !cx + .tcx + .infer_ctxt() + .build(cx.typing_mode()) + .predicate_must_hold_modulo_regions(&obligation) + { + return None; } } } @@ -356,7 +356,7 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion { if cx.tcx.is_diagnostic_item(sym::from_fn, def_id) && same_type_and_consts(a, b) { let mut app = Applicability::MachineApplicable; - let sugg = Sugg::hir_with_context(cx, arg, e.span.ctxt(), "", &mut app).maybe_par(); + let sugg = Sugg::hir_with_context(cx, arg, e.span.ctxt(), "", &mut app).maybe_paren(); let sugg_msg = format!("consider removing `{}()`", snippet(cx, path.span, "From::from")); span_lint_and_sugg( cx, diff --git a/src/tools/clippy/clippy_lints/src/utils/author.rs b/src/tools/clippy/clippy_lints/src/utils/author.rs index 4309cd2c9abdf..b7dcd2ffb0eea 100644 --- a/src/tools/clippy/clippy_lints/src/utils/author.rs +++ b/src/tools/clippy/clippy_lints/src/utils/author.rs @@ -676,6 +676,7 @@ impl<'a, 'tcx> PrintVisitor<'a, 'tcx> { } match pat.value.kind { + PatKind::Missing => unreachable!(), PatKind::Wild => kind!("Wild"), PatKind::Never => kind!("Never"), PatKind::Binding(ann, _, name, sub) => { diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints.rs deleted file mode 100644 index deb983b6971dc..0000000000000 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints.rs +++ /dev/null @@ -1,11 +0,0 @@ -pub mod almost_standard_lint_formulation; -pub mod collapsible_calls; -pub mod interning_defined_symbol; -pub mod invalid_paths; -pub mod lint_without_lint_pass; -pub mod msrv_attr_impl; -pub mod outer_expn_data_pass; -pub mod produce_ice; -pub mod slow_symbol_comparisons; -pub mod unnecessary_def_path; -pub mod unsorted_clippy_utils_paths; diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs deleted file mode 100644 index e454427adde1b..0000000000000 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/interning_defined_symbol.rs +++ /dev/null @@ -1,241 +0,0 @@ -use clippy_utils::consts::{ConstEvalCtxt, Constant}; -use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::source::snippet; -use clippy_utils::ty::match_type; -use clippy_utils::{def_path_def_ids, is_expn_of, match_def_path, paths}; -use rustc_data_structures::fx::FxHashMap; -use rustc_errors::Applicability; -use rustc_hir::def::{DefKind, Res}; -use rustc_hir::def_id::DefId; -use rustc_hir::{BinOpKind, Expr, ExprKind, UnOp}; -use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::mir::ConstValue; -use rustc_middle::ty; -use rustc_session::impl_lint_pass; -use rustc_span::sym; -use rustc_span::symbol::Symbol; - -use std::borrow::Cow; - -declare_clippy_lint! { - /// ### What it does - /// Checks for interning symbols that have already been pre-interned and defined as constants. - /// - /// ### Why is this bad? - /// It's faster and easier to use the symbol constant. - /// - /// ### Example - /// ```rust,ignore - /// let _ = sym!(f32); - /// ``` - /// - /// Use instead: - /// ```rust,ignore - /// let _ = sym::f32; - /// ``` - pub INTERNING_DEFINED_SYMBOL, - internal, - "interning a symbol that is pre-interned and defined as a constant" -} - -declare_clippy_lint! { - /// ### What it does - /// Checks for unnecessary conversion from Symbol to a string. - /// - /// ### Why is this bad? - /// It's faster use symbols directly instead of strings. - /// - /// ### Example - /// ```rust,ignore - /// symbol.as_str() == "clippy"; - /// ``` - /// - /// Use instead: - /// ```rust,ignore - /// symbol == sym::clippy; - /// ``` - pub UNNECESSARY_SYMBOL_STR, - internal, - "unnecessary conversion between Symbol and string" -} - -#[derive(Default)] -pub struct InterningDefinedSymbol { - // Maps the symbol value to the constant DefId. - symbol_map: FxHashMap, -} - -impl_lint_pass!(InterningDefinedSymbol => [INTERNING_DEFINED_SYMBOL, UNNECESSARY_SYMBOL_STR]); - -impl<'tcx> LateLintPass<'tcx> for InterningDefinedSymbol { - fn check_crate(&mut self, cx: &LateContext<'_>) { - if !self.symbol_map.is_empty() { - return; - } - - for &module in &[&paths::KW_MODULE, &paths::SYM_MODULE] { - for def_id in def_path_def_ids(cx.tcx, module) { - for item in cx.tcx.module_children(def_id) { - if let Res::Def(DefKind::Const, item_def_id) = item.res - && let ty = cx.tcx.type_of(item_def_id).instantiate_identity() - && match_type(cx, ty, &paths::SYMBOL) - && let Ok(ConstValue::Scalar(value)) = cx.tcx.const_eval_poly(item_def_id) - && let Some(value) = value.to_u32().discard_err() - { - self.symbol_map.insert(value, item_def_id); - } - } - } - } - } - - fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - if let ExprKind::Call(func, [arg]) = &expr.kind - && let ty::FnDef(def_id, _) = cx.typeck_results().expr_ty(func).kind() - && cx.tcx.is_diagnostic_item(sym::SymbolIntern, *def_id) - && let Some(Constant::Str(arg)) = ConstEvalCtxt::new(cx).eval_simple(arg) - && let value = Symbol::intern(&arg).as_u32() - && let Some(&def_id) = self.symbol_map.get(&value) - { - span_lint_and_sugg( - cx, - INTERNING_DEFINED_SYMBOL, - is_expn_of(expr.span, "sym").unwrap_or(expr.span), - "interning a defined symbol", - "try", - cx.tcx.def_path_str(def_id), - Applicability::MachineApplicable, - ); - } - if let ExprKind::Binary(op, left, right) = expr.kind { - if matches!(op.node, BinOpKind::Eq | BinOpKind::Ne) { - let data = [ - (left, self.symbol_str_expr(left, cx)), - (right, self.symbol_str_expr(right, cx)), - ]; - match data { - // both operands are a symbol string - [(_, Some(left)), (_, Some(right))] => { - span_lint_and_sugg( - cx, - UNNECESSARY_SYMBOL_STR, - expr.span, - "unnecessary `Symbol` to string conversion", - "try", - format!( - "{} {} {}", - left.as_symbol_snippet(cx), - op.node.as_str(), - right.as_symbol_snippet(cx), - ), - Applicability::MachineApplicable, - ); - }, - // one of the operands is a symbol string - [(expr, Some(symbol)), _] | [_, (expr, Some(symbol))] => { - // creating an owned string for comparison - if matches!(symbol, SymbolStrExpr::Expr { is_to_owned: true, .. }) { - span_lint_and_sugg( - cx, - UNNECESSARY_SYMBOL_STR, - expr.span, - "unnecessary string allocation", - "try", - format!("{}.as_str()", symbol.as_symbol_snippet(cx)), - Applicability::MachineApplicable, - ); - } - }, - // nothing found - [(_, None), (_, None)] => {}, - } - } - } - } -} - -impl InterningDefinedSymbol { - fn symbol_str_expr<'tcx>(&self, expr: &'tcx Expr<'tcx>, cx: &LateContext<'tcx>) -> Option> { - static IDENT_STR_PATHS: &[&[&str]] = &[&paths::IDENT_AS_STR]; - static SYMBOL_STR_PATHS: &[&[&str]] = &[&paths::SYMBOL_AS_STR, &paths::SYMBOL_TO_IDENT_STRING]; - let call = if let ExprKind::AddrOf(_, _, e) = expr.kind - && let ExprKind::Unary(UnOp::Deref, e) = e.kind - { - e - } else { - expr - }; - if let ExprKind::MethodCall(_, item, [], _) = call.kind - // is a method call - && let Some(did) = cx.typeck_results().type_dependent_def_id(call.hir_id) - && let ty = cx.typeck_results().expr_ty(item) - // ...on either an Ident or a Symbol - && let Some(is_ident) = if match_type(cx, ty, &paths::SYMBOL) { - Some(false) - } else if match_type(cx, ty, &paths::IDENT) { - Some(true) - } else { - None - } - // ...which converts it to a string - && let paths = if is_ident { IDENT_STR_PATHS } else { SYMBOL_STR_PATHS } - && let Some(is_to_owned) = paths - .iter() - .find_map(|path| if match_def_path(cx, did, path) { - Some(path == &paths::SYMBOL_TO_IDENT_STRING) - } else { - None - }) - .or_else(|| if cx.tcx.is_diagnostic_item(sym::to_string_method, did) { - Some(true) - } else { - None - }) - { - return Some(SymbolStrExpr::Expr { - item, - is_ident, - is_to_owned, - }); - } - // is a string constant - if let Some(Constant::Str(s)) = ConstEvalCtxt::new(cx).eval_simple(expr) { - let value = Symbol::intern(&s).as_u32(); - // ...which matches a symbol constant - if let Some(&def_id) = self.symbol_map.get(&value) { - return Some(SymbolStrExpr::Const(def_id)); - } - } - None - } -} - -enum SymbolStrExpr<'tcx> { - /// a string constant with a corresponding symbol constant - Const(DefId), - /// a "symbol to string" expression like `symbol.as_str()` - Expr { - /// part that evaluates to `Symbol` or `Ident` - item: &'tcx Expr<'tcx>, - is_ident: bool, - /// whether an owned `String` is created like `to_ident_string()` - is_to_owned: bool, - }, -} - -impl<'tcx> SymbolStrExpr<'tcx> { - /// Returns a snippet that evaluates to a `Symbol` and is const if possible - fn as_symbol_snippet(&self, cx: &LateContext<'_>) -> Cow<'tcx, str> { - match *self { - Self::Const(def_id) => cx.tcx.def_path_str(def_id).into(), - Self::Expr { item, is_ident, .. } => { - let mut snip = snippet(cx, item.span.source_callsite(), ".."); - if is_ident { - // get `Ident.name` - snip.to_mut().push_str(".name"); - } - snip - }, - } - } -} diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/slow_symbol_comparisons.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/slow_symbol_comparisons.rs deleted file mode 100644 index b8bcb9b375601..0000000000000 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/slow_symbol_comparisons.rs +++ /dev/null @@ -1,74 +0,0 @@ -use clippy_utils::consts::{ConstEvalCtxt, Constant}; -use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::paths; -use clippy_utils::source::snippet_with_applicability; -use clippy_utils::ty::match_type; -use rustc_errors::Applicability; -use rustc_hir::{BinOpKind, Expr, ExprKind}; -use rustc_lint::{LateContext, LateLintPass}; -use rustc_session::declare_lint_pass; -use rustc_span::{Span, sym}; - -declare_clippy_lint! { - /// ### What it does - /// - /// Detects symbol comparison using `Symbol::intern`. - /// - /// ### Why is this bad? - /// - /// Comparison via `Symbol::as_str()` is faster if the interned symbols are not reused. - /// - /// ### Example - /// - /// None, see suggestion. - pub SLOW_SYMBOL_COMPARISONS, - internal, - "detects slow comparisons of symbol" -} - -declare_lint_pass!(SlowSymbolComparisons => [SLOW_SYMBOL_COMPARISONS]); - -fn check_slow_comparison<'tcx>( - cx: &LateContext<'tcx>, - op1: &'tcx Expr<'tcx>, - op2: &'tcx Expr<'tcx>, -) -> Option<(Span, String)> { - if match_type(cx, cx.typeck_results().expr_ty(op1), &paths::SYMBOL) - && let ExprKind::Call(fun, args) = op2.kind - && let ExprKind::Path(ref qpath) = fun.kind - && cx - .tcx - .is_diagnostic_item(sym::SymbolIntern, cx.qpath_res(qpath, fun.hir_id).opt_def_id()?) - && let [symbol_name_expr] = args - && let Some(Constant::Str(symbol_name)) = ConstEvalCtxt::new(cx).eval_simple(symbol_name_expr) - { - Some((op1.span, symbol_name)) - } else { - None - } -} - -impl<'tcx> LateLintPass<'tcx> for SlowSymbolComparisons { - fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'tcx>) { - if let ExprKind::Binary(op, left, right) = expr.kind - && (op.node == BinOpKind::Eq || op.node == BinOpKind::Ne) - && let Some((symbol_span, symbol_name)) = - check_slow_comparison(cx, left, right).or_else(|| check_slow_comparison(cx, right, left)) - { - let mut applicability = Applicability::MachineApplicable; - span_lint_and_sugg( - cx, - SLOW_SYMBOL_COMPARISONS, - expr.span, - "comparing `Symbol` via `Symbol::intern`", - "use `Symbol::as_str` and check the string instead", - format!( - "{}.as_str() {} \"{symbol_name}\"", - snippet_with_applicability(cx, symbol_span, "symbol", &mut applicability), - op.node.as_str() - ), - applicability, - ); - } - } -} diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unsorted_clippy_utils_paths.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/unsorted_clippy_utils_paths.rs deleted file mode 100644 index a5c4bf474f7aa..0000000000000 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unsorted_clippy_utils_paths.rs +++ /dev/null @@ -1,49 +0,0 @@ -use clippy_utils::diagnostics::span_lint; -use rustc_ast::ast::{Crate, ItemKind, ModKind}; -use rustc_lint::{EarlyContext, EarlyLintPass}; -use rustc_session::declare_lint_pass; - -declare_clippy_lint! { - /// ### What it does - /// Checks that [`clippy_utils::paths`] is sorted lexically - /// - /// ### Why is this bad? - /// We like to pretend we're an example of tidy code. - /// - /// ### Example - /// Wrong ordering of the util::paths constants. - pub UNSORTED_CLIPPY_UTILS_PATHS, - internal, - "various things that will negatively affect your clippy experience" -} - -declare_lint_pass!(UnsortedClippyUtilsPaths => [UNSORTED_CLIPPY_UTILS_PATHS]); - -impl EarlyLintPass for UnsortedClippyUtilsPaths { - fn check_crate(&mut self, cx: &EarlyContext<'_>, krate: &Crate) { - if let Some(utils) = krate.items.iter().find(|item| item.ident.name.as_str() == "utils") { - if let ItemKind::Mod(_, ModKind::Loaded(ref items, ..)) = utils.kind { - if let Some(paths) = items.iter().find(|item| item.ident.name.as_str() == "paths") { - if let ItemKind::Mod(_, ModKind::Loaded(ref items, ..)) = paths.kind { - let mut last_name: Option<&str> = None; - for item in items { - let name = item.ident.as_str(); - if let Some(last_name) = last_name { - if *last_name > *name { - span_lint( - cx, - UNSORTED_CLIPPY_UTILS_PATHS, - item.span, - "this constant should be before the previous constant due to lexical \ - ordering", - ); - } - } - last_name = Some(name); - } - } - } - } - } - } -} diff --git a/src/tools/clippy/clippy_lints/src/utils/mod.rs b/src/tools/clippy/clippy_lints/src/utils/mod.rs index 4476cd1005e7e..16066dd96c0ab 100644 --- a/src/tools/clippy/clippy_lints/src/utils/mod.rs +++ b/src/tools/clippy/clippy_lints/src/utils/mod.rs @@ -2,6 +2,3 @@ pub mod attr_collector; pub mod author; pub mod dump_hir; pub mod format_args_collector; - -#[cfg(feature = "internal")] -pub mod internal_lints; diff --git a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs index 405310512dff3..45a5dbabeb4ef 100644 --- a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs +++ b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs @@ -68,6 +68,8 @@ declare_clippy_lint! { /// (including the standard library) provide modules named "prelude" specifically designed /// for wildcard import. /// + /// Wildcard imports reexported through `pub use` are also allowed. + /// /// `use super::*` is allowed in test modules. This is defined as any module with "test" in the name. /// /// These exceptions can be disabled using the `warn-on-all-wildcard-imports` configuration flag. @@ -121,7 +123,9 @@ impl LateLintPass<'_> for WildcardImports { } let module = cx.tcx.parent_module_from_def_id(item.owner_id.def_id); - if cx.tcx.visibility(item.owner_id.def_id) != ty::Visibility::Restricted(module.to_def_id()) { + if cx.tcx.visibility(item.owner_id.def_id) != ty::Visibility::Restricted(module.to_def_id()) + && !self.warn_on_all + { return; } if let ItemKind::Use(use_path, UseKind::Glob) = &item.kind @@ -150,7 +154,7 @@ impl LateLintPass<'_> for WildcardImports { (span, false) }; - let mut imports = used_imports.items().map(ToString::to_string).into_sorted_stable_ord(); + let mut imports: Vec<_> = used_imports.iter().map(ToString::to_string).collect(); let imports_string = if imports.len() == 1 { imports.pop().unwrap() } else if braced_glob { diff --git a/src/tools/clippy/clippy_lints/src/write.rs b/src/tools/clippy/clippy_lints/src/write.rs index 11c14c1477764..f24c127c4521d 100644 --- a/src/tools/clippy/clippy_lints/src/write.rs +++ b/src/tools/clippy/clippy_lints/src/write.rs @@ -1,8 +1,8 @@ use clippy_config::Conf; use clippy_utils::diagnostics::{span_lint, span_lint_and_then}; -use clippy_utils::is_in_test; use clippy_utils::macros::{FormatArgsStorage, MacroCall, format_arg_removal_span, root_macro_call_first_node}; use clippy_utils::source::{SpanRangeExt, expand_past_previous_comma}; +use clippy_utils::{is_in_test, sym}; use rustc_ast::token::LitKind; use rustc_ast::{ FormatArgPosition, FormatArgPositionKind, FormatArgs, FormatArgsPiece, FormatOptions, FormatPlaceholder, @@ -12,7 +12,7 @@ use rustc_errors::Applicability; use rustc_hir::{Expr, Impl, Item, ItemKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_session::impl_lint_pass; -use rustc_span::{BytePos, Span, sym}; +use rustc_span::{BytePos, Span}; declare_clippy_lint! { /// ### What it does @@ -359,7 +359,7 @@ fn is_debug_impl(cx: &LateContext<'_>, item: &Item<'_>) -> bool { } fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call: &MacroCall, name: &str) { - let Some(FormatArgsPiece::Literal(last)) = format_args.template.last() else { + let Some(&FormatArgsPiece::Literal(last)) = format_args.template.last() else { return; }; @@ -401,7 +401,7 @@ fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call: &Ma return; }; - if format_args.template.len() == 1 && last.as_str() == "\n" { + if format_args.template.len() == 1 && last == sym::LF { // print!("\n"), write!(f, "\n") diag.multipart_suggestion( @@ -427,9 +427,7 @@ fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call: &Ma } fn check_empty_string(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call: &MacroCall, name: &str) { - if let [FormatArgsPiece::Literal(literal)] = &format_args.template[..] - && literal.as_str() == "\n" - { + if let [FormatArgsPiece::Literal(sym::LF)] = &format_args.template[..] { let mut span = format_args.span; let lint = if name == "writeln" { diff --git a/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs b/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs index f6948be7f67aa..a97643e0eaca5 100644 --- a/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs +++ b/src/tools/clippy/clippy_lints/src/zero_sized_map_values.rs @@ -74,10 +74,10 @@ impl LateLintPass<'_> for ZeroSizedMapValues { fn in_trait_impl(cx: &LateContext<'_>, hir_id: HirId) -> bool { let parent_id = cx.tcx.hir_get_parent_item(hir_id); let second_parent_id = cx.tcx.hir_get_parent_item(parent_id.into()).def_id; - if let Node::Item(item) = cx.tcx.hir_node_by_def_id(second_parent_id) { - if let ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }) = item.kind { - return true; - } + if let Node::Item(item) = cx.tcx.hir_node_by_def_id(second_parent_id) + && let ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }) = item.kind + { + return true; } false } diff --git a/src/tools/clippy/clippy_lints/src/zombie_processes.rs b/src/tools/clippy/clippy_lints/src/zombie_processes.rs index 7667db469689e..09f1084fe7004 100644 --- a/src/tools/clippy/clippy_lints/src/zombie_processes.rs +++ b/src/tools/clippy/clippy_lints/src/zombie_processes.rs @@ -4,6 +4,7 @@ use clippy_utils::{fn_def_id, get_enclosing_block, path_to_local_id}; use rustc_ast::Mutability; use rustc_ast::visit::visit_opt; use rustc_errors::Applicability; +use rustc_hir::def_id::LocalDefId; use rustc_hir::intravisit::{Visitor, walk_block, walk_expr, walk_local}; use rustc_hir::{Expr, ExprKind, HirId, LetStmt, Node, PatKind, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; @@ -68,6 +69,7 @@ impl<'tcx> LateLintPass<'tcx> for ZombieProcesses { let mut vis = WaitFinder { cx, local_id, + body_id: cx.tcx.hir_enclosing_body_owner(expr.hir_id), state: VisitorState::WalkUpToLocal, early_return: None, missing_wait_branch: None, @@ -129,9 +131,10 @@ struct MaybeWait(Span); struct WaitFinder<'a, 'tcx> { cx: &'a LateContext<'tcx>, local_id: HirId, + body_id: LocalDefId, state: VisitorState, early_return: Option, - // When joining two if branches where one of them doesn't call `wait()`, stores its span for more targetted help + // When joining two if branches where one of them doesn't call `wait()`, stores its span for more targeted help // messages missing_wait_branch: Option, } @@ -186,7 +189,7 @@ impl<'tcx> Visitor<'tcx> for WaitFinder<'_, 'tcx> { } } else { match ex.kind { - ExprKind::Ret(e) => { + ExprKind::Ret(e) if self.cx.tcx.hir_enclosing_body_owner(ex.hir_id) == self.body_id => { visit_opt!(self, visit_expr, e); if self.early_return.is_none() { self.early_return = Some(ex.span); diff --git a/src/tools/clippy/clippy_lints_internal/Cargo.toml b/src/tools/clippy/clippy_lints_internal/Cargo.toml new file mode 100644 index 0000000000000..2a0ceac27a324 --- /dev/null +++ b/src/tools/clippy/clippy_lints_internal/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "clippy_lints_internal" +version = "0.0.1" +edition = "2021" + +[dependencies] +clippy_config = { path = "../clippy_config" } +clippy_utils = { path = "../clippy_utils" } +regex = { version = "1.5" } +rustc-semver = "1.1" + +[package.metadata.rust-analyzer] +# This crate uses #[feature(rustc_private)] +rustc_private = true diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs b/src/tools/clippy/clippy_lints_internal/src/almost_standard_lint_formulation.rs similarity index 92% rename from src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs rename to src/tools/clippy/clippy_lints_internal/src/almost_standard_lint_formulation.rs index 0a01a364a75b9..4fd5ea459a554 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/almost_standard_lint_formulation.rs +++ b/src/tools/clippy/clippy_lints_internal/src/almost_standard_lint_formulation.rs @@ -1,11 +1,12 @@ -use crate::utils::internal_lints::lint_without_lint_pass::is_lint_ref_type; +use crate::lint_without_lint_pass::is_lint_ref_type; use clippy_utils::diagnostics::span_lint_and_help; use regex::Regex; use rustc_hir::{Attribute, Item, ItemKind, Mutability}; use rustc_lint::{LateContext, LateLintPass}; +use rustc_lint_defs::declare_tool_lint; use rustc_session::impl_lint_pass; -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Checks if lint formulations have a standardized format. /// @@ -14,9 +15,10 @@ declare_clippy_lint! { /// /// ### Example /// `Checks for use...` can be written as `Checks for usage...` . - pub ALMOST_STANDARD_LINT_FORMULATION, - internal, - "lint formulations must have a standardized format." + pub clippy::ALMOST_STANDARD_LINT_FORMULATION, + Warn, + "lint formulations must have a standardized format.", + report_in_external_macro: true } impl_lint_pass!(AlmostStandardFormulation => [ALMOST_STANDARD_LINT_FORMULATION]); diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/collapsible_calls.rs b/src/tools/clippy/clippy_lints_internal/src/collapsible_calls.rs similarity index 97% rename from src/tools/clippy/clippy_lints/src/utils/internal_lints/collapsible_calls.rs rename to src/tools/clippy/clippy_lints_internal/src/collapsible_calls.rs index 2e6fb7c4ce4d5..d7967a0cc022f 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/collapsible_calls.rs +++ b/src/tools/clippy/clippy_lints_internal/src/collapsible_calls.rs @@ -4,12 +4,13 @@ use clippy_utils::{SpanlessEq, is_expr_path_def_path, is_lint_allowed, peel_bloc use rustc_errors::Applicability; use rustc_hir::{Closure, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; +use rustc_lint_defs::declare_tool_lint; use rustc_session::declare_lint_pass; use rustc_span::Span; use std::borrow::{Borrow, Cow}; -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Lints `span_lint_and_then` function calls, where the /// closure argument has only one statement and that statement is a method @@ -64,9 +65,10 @@ declare_clippy_lint! { /// span_lint_and_note(cx, TEST_LINT, expr.span, lint_msg, Some(expr.span), note_msg); /// span_lint_and_note(cx, TEST_LINT, expr.span, lint_msg, None, note_msg); /// ``` - pub COLLAPSIBLE_SPAN_LINT_CALLS, - internal, - "found collapsible `span_lint_and_then` calls" + pub clippy::COLLAPSIBLE_SPAN_LINT_CALLS, + Warn, + "found collapsible `span_lint_and_then` calls", + report_in_external_macro: true } declare_lint_pass!(CollapsibleCalls => [COLLAPSIBLE_SPAN_LINT_CALLS]); diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs b/src/tools/clippy/clippy_lints_internal/src/invalid_paths.rs similarity index 79% rename from src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs rename to src/tools/clippy/clippy_lints_internal/src/invalid_paths.rs index 252ac5e676822..bee87efa3fcd4 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs +++ b/src/tools/clippy/clippy_lints_internal/src/invalid_paths.rs @@ -5,12 +5,13 @@ use rustc_hir as hir; use rustc_hir::Item; use rustc_hir::def::DefKind; use rustc_lint::{LateContext, LateLintPass}; +use rustc_lint_defs::declare_tool_lint; use rustc_middle::ty::fast_reject::SimplifiedType; use rustc_middle::ty::{self, FloatTy}; use rustc_session::declare_lint_pass; use rustc_span::symbol::Symbol; -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Checks the paths module for invalid paths. /// @@ -19,9 +20,10 @@ declare_clippy_lint! { /// /// ### Example /// None. - pub INVALID_PATHS, - internal, - "invalid path" + pub clippy::INVALID_PATHS, + Warn, + "invalid path", + report_in_external_macro: true } declare_lint_pass!(InvalidPaths => [INVALID_PATHS]); @@ -80,22 +82,22 @@ pub fn check_path(cx: &LateContext<'_>, path: &[&str]) -> bool { .copied(); for item_def_id in lang_items.iter().map(|(_, def_id)| def_id).chain(incoherent_impls) { let lang_item_path = cx.get_def_path(item_def_id); - if path_syms.starts_with(&lang_item_path) { - if let [item] = &path_syms[lang_item_path.len()..] { - if matches!( - cx.tcx.def_kind(item_def_id), - DefKind::Mod | DefKind::Enum | DefKind::Trait - ) { - for child in cx.tcx.module_children(item_def_id) { - if child.ident.name == *item { - return true; - } + if path_syms.starts_with(&lang_item_path) + && let [item] = &path_syms[lang_item_path.len()..] + { + if matches!( + cx.tcx.def_kind(item_def_id), + DefKind::Mod | DefKind::Enum | DefKind::Trait + ) { + for child in cx.tcx.module_children(item_def_id) { + if child.ident.name == *item { + return true; } - } else { - for child in cx.tcx.associated_item_def_ids(item_def_id) { - if cx.tcx.item_name(*child) == *item { - return true; - } + } + } else { + for child in cx.tcx.associated_item_def_ids(item_def_id) { + if cx.tcx.item_name(*child) == *item { + return true; } } } diff --git a/src/tools/clippy/clippy_lints_internal/src/lib.rs b/src/tools/clippy/clippy_lints_internal/src/lib.rs new file mode 100644 index 0000000000000..b02d378619cab --- /dev/null +++ b/src/tools/clippy/clippy_lints_internal/src/lib.rs @@ -0,0 +1,76 @@ +#![feature(let_chains, rustc_private)] +#![allow( + clippy::missing_docs_in_private_items, + clippy::must_use_candidate, + clippy::symbol_as_str, + rustc::diagnostic_outside_of_impl, + rustc::untranslatable_diagnostic +)] +#![warn( + trivial_casts, + trivial_numeric_casts, + rust_2018_idioms, + unused_lifetimes, + unused_qualifications, + rustc::internal +)] +// Disable this rustc lint for now, as it was also done in rustc +#![allow(rustc::potential_query_instability)] +// None of these lints need a version. +#![allow(clippy::missing_clippy_version_attribute)] + +extern crate rustc_ast; +extern crate rustc_attr_parsing; +extern crate rustc_data_structures; +extern crate rustc_errors; +extern crate rustc_hir; +extern crate rustc_lint; +extern crate rustc_lint_defs; +extern crate rustc_middle; +extern crate rustc_session; +extern crate rustc_span; + +mod almost_standard_lint_formulation; +mod collapsible_calls; +mod invalid_paths; +mod lint_without_lint_pass; +mod msrv_attr_impl; +mod outer_expn_data_pass; +mod produce_ice; +mod symbols; +mod unnecessary_def_path; +mod unsorted_clippy_utils_paths; + +use rustc_lint::{Lint, LintStore}; + +static LINTS: &[&Lint] = &[ + almost_standard_lint_formulation::ALMOST_STANDARD_LINT_FORMULATION, + collapsible_calls::COLLAPSIBLE_SPAN_LINT_CALLS, + invalid_paths::INVALID_PATHS, + lint_without_lint_pass::DEFAULT_LINT, + lint_without_lint_pass::INVALID_CLIPPY_VERSION_ATTRIBUTE, + lint_without_lint_pass::LINT_WITHOUT_LINT_PASS, + lint_without_lint_pass::MISSING_CLIPPY_VERSION_ATTRIBUTE, + msrv_attr_impl::MISSING_MSRV_ATTR_IMPL, + outer_expn_data_pass::OUTER_EXPN_EXPN_DATA, + produce_ice::PRODUCE_ICE, + symbols::INTERNING_LITERALS, + symbols::SYMBOL_AS_STR, + unnecessary_def_path::UNNECESSARY_DEF_PATH, + unsorted_clippy_utils_paths::UNSORTED_CLIPPY_UTILS_PATHS, +]; + +pub fn register_lints(store: &mut LintStore) { + store.register_lints(LINTS); + + store.register_early_pass(|| Box::new(unsorted_clippy_utils_paths::UnsortedClippyUtilsPaths)); + store.register_early_pass(|| Box::new(produce_ice::ProduceIce)); + store.register_late_pass(|_| Box::new(collapsible_calls::CollapsibleCalls)); + store.register_late_pass(|_| Box::new(invalid_paths::InvalidPaths)); + store.register_late_pass(|_| Box::::default()); + store.register_late_pass(|_| Box::::default()); + store.register_late_pass(|_| Box::::default()); + store.register_late_pass(|_| Box::new(outer_expn_data_pass::OuterExpnDataPass)); + store.register_late_pass(|_| Box::new(msrv_attr_impl::MsrvAttrImpl)); + store.register_late_pass(|_| Box::new(almost_standard_lint_formulation::AlmostStandardFormulation::new())); +} diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs b/src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs similarity index 90% rename from src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs rename to src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs index 94a2e598522b2..6a75defcce341 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs +++ b/src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs @@ -9,13 +9,14 @@ use rustc_hir::hir_id::CRATE_HIR_ID; use rustc_hir::intravisit::Visitor; use rustc_hir::{ExprKind, HirId, Item, MutTy, Mutability, Path, TyKind}; use rustc_lint::{LateContext, LateLintPass}; +use rustc_lint_defs::declare_tool_lint; use rustc_middle::hir::nested_filter; use rustc_session::impl_lint_pass; use rustc_span::source_map::Spanned; use rustc_span::symbol::Symbol; use rustc_span::{Span, sym}; -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Ensures every lint is associated to a `LintPass`. /// @@ -37,12 +38,14 @@ declare_clippy_lint! { /// declare_lint_pass!(Pass => [LINT_1, LINT_2]); /// // missing FORGOTTEN_LINT /// ``` - pub LINT_WITHOUT_LINT_PASS, - internal, - "declaring a lint without associating it in a LintPass" + pub clippy::LINT_WITHOUT_LINT_PASS, + Warn, + "declaring a lint without associating it in a LintPass", + report_in_external_macro: true + } -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Checks for cases of an auto-generated lint without an updated description, /// i.e. `default lint description`. @@ -59,30 +62,32 @@ declare_clippy_lint! { /// ```rust,ignore /// declare_lint! { pub COOL_LINT, nursery, "a great new lint" } /// ``` - pub DEFAULT_LINT, - internal, - "found 'default lint description' in a lint declaration" + pub clippy::DEFAULT_LINT, + Warn, + "found 'default lint description' in a lint declaration", + report_in_external_macro: true } -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Checks for invalid `clippy::version` attributes. /// /// Valid values are: /// * "pre 1.29.0" /// * any valid semantic version - pub INVALID_CLIPPY_VERSION_ATTRIBUTE, - internal, - "found an invalid `clippy::version` attribute" + pub clippy::INVALID_CLIPPY_VERSION_ATTRIBUTE, + Warn, + "found an invalid `clippy::version` attribute", + report_in_external_macro: true } -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Checks for declared clippy lints without the `clippy::version` attribute. - /// - pub MISSING_CLIPPY_VERSION_ATTRIBUTE, - internal, - "found clippy lint without `clippy::version` attribute" + pub clippy::MISSING_CLIPPY_VERSION_ATTRIBUTE, + Warn, + "found clippy lint without `clippy::version` attribute", + report_in_external_macro: true } #[derive(Clone, Debug, Default)] @@ -100,10 +105,6 @@ impl_lint_pass!(LintWithoutLintPass => [ impl<'tcx> LateLintPass<'tcx> for LintWithoutLintPass { fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) { - if is_lint_allowed(cx, DEFAULT_LINT, item.hir_id()) { - return; - } - if let hir::ItemKind::Static(ident, ty, Mutability::Not, body_id) = item.kind { if is_lint_ref_type(cx, ty) { check_invalid_clippy_version_attribute(cx, item); @@ -205,12 +206,10 @@ pub(super) fn is_lint_ref_type(cx: &LateContext<'_>, ty: &hir::Ty<'_>) -> bool { mutbl: Mutability::Not, }, ) = ty.kind + && let TyKind::Path(ref path) = inner.kind + && let Res::Def(DefKind::Struct, def_id) = cx.qpath_res(path, inner.hir_id) { - if let TyKind::Path(ref path) = inner.kind { - if let Res::Def(DefKind::Struct, def_id) = cx.qpath_res(path, inner.hir_id) { - return match_def_path(cx, def_id, &paths::LINT); - } - } + return match_def_path(cx, def_id, &paths::LINT); } false diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs b/src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs similarity index 93% rename from src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs rename to src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs index 558acacb97245..dda054546e262 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/msrv_attr_impl.rs +++ b/src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs @@ -5,16 +5,17 @@ use clippy_utils::{match_def_path, paths}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass, LintContext}; +use rustc_lint_defs::declare_tool_lint; use rustc_middle::ty::{self, EarlyBinder, GenericArgKind}; use rustc_session::declare_lint_pass; -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Check that the `extract_msrv_attr!` macro is used, when a lint has a MSRV. - /// - pub MISSING_MSRV_ATTR_IMPL, - internal, - "checking if all necessary steps were taken when adding a MSRV to a lint" + pub clippy::MISSING_MSRV_ATTR_IMPL, + Warn, + "checking if all necessary steps were taken when adding a MSRV to a lint", + report_in_external_macro: true } declare_lint_pass!(MsrvAttrImpl => [MISSING_MSRV_ATTR_IMPL]); diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/outer_expn_data_pass.rs b/src/tools/clippy/clippy_lints_internal/src/outer_expn_data_pass.rs similarity index 92% rename from src/tools/clippy/clippy_lints/src/utils/internal_lints/outer_expn_data_pass.rs rename to src/tools/clippy/clippy_lints_internal/src/outer_expn_data_pass.rs index 326e172146130..e94419647978c 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/outer_expn_data_pass.rs +++ b/src/tools/clippy/clippy_lints_internal/src/outer_expn_data_pass.rs @@ -4,10 +4,11 @@ use clippy_utils::{is_lint_allowed, method_calls, paths}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass}; +use rustc_lint_defs::declare_tool_lint; use rustc_session::declare_lint_pass; use rustc_span::symbol::Symbol; -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Checks for calls to `cx.outer().expn_data()` and suggests to use /// the `cx.outer_expn_data()` @@ -24,9 +25,10 @@ declare_clippy_lint! { /// ```rust,ignore /// expr.span.ctxt().outer_expn_data() /// ``` - pub OUTER_EXPN_EXPN_DATA, - internal, - "using `cx.outer_expn().expn_data()` instead of `cx.outer_expn_data()`" + pub clippy::OUTER_EXPN_EXPN_DATA, + Warn, + "using `cx.outer_expn().expn_data()` instead of `cx.outer_expn_data()`", + report_in_external_macro: true } declare_lint_pass!(OuterExpnDataPass => [OUTER_EXPN_EXPN_DATA]); diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/produce_ice.rs b/src/tools/clippy/clippy_lints_internal/src/produce_ice.rs similarity index 79% rename from src/tools/clippy/clippy_lints/src/utils/internal_lints/produce_ice.rs rename to src/tools/clippy/clippy_lints_internal/src/produce_ice.rs index 0a07919d659fe..14e93dc6d5f13 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/produce_ice.rs +++ b/src/tools/clippy/clippy_lints_internal/src/produce_ice.rs @@ -1,10 +1,11 @@ use rustc_ast::ast::NodeId; use rustc_ast::visit::FnKind; use rustc_lint::{EarlyContext, EarlyLintPass, LintContext}; +use rustc_lint_defs::declare_tool_lint; use rustc_session::declare_lint_pass; use rustc_span::Span; -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Not an actual lint. This lint is only meant for testing our customized internal compiler /// error message by calling `panic`. @@ -16,9 +17,10 @@ declare_clippy_lint! { /// ```rust,ignore /// 🍦🍦🍦🍦🍦 /// ``` - pub PRODUCE_ICE, - internal, - "this message should not appear anywhere as we ICE before and don't emit the lint" + pub clippy::PRODUCE_ICE, + Warn, + "this message should not appear anywhere as we ICE before and don't emit the lint", + report_in_external_macro: true } declare_lint_pass!(ProduceIce => [PRODUCE_ICE]); @@ -35,7 +37,7 @@ impl EarlyLintPass for ProduceIce { fn is_trigger_fn(fn_kind: FnKind<'_>) -> bool { match fn_kind { - FnKind::Fn(_, ident, ..) => ident.name.as_str() == "it_looks_like_you_are_trying_to_kill_clippy", + FnKind::Fn(_, _, func) => func.ident.name.as_str() == "it_looks_like_you_are_trying_to_kill_clippy", FnKind::Closure(..) => false, } } diff --git a/src/tools/clippy/clippy_lints_internal/src/symbols.rs b/src/tools/clippy/clippy_lints_internal/src/symbols.rs new file mode 100644 index 0000000000000..c64e5821916bf --- /dev/null +++ b/src/tools/clippy/clippy_lints_internal/src/symbols.rs @@ -0,0 +1,169 @@ +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::ty::match_type; +use clippy_utils::{def_path_def_ids, match_def_path, paths}; +use rustc_ast::LitKind; +use rustc_data_structures::fx::FxHashMap; +use rustc_errors::Applicability; +use rustc_hir::def::{DefKind, Res}; +use rustc_hir::{Expr, ExprKind}; +use rustc_lint::{LateContext, LateLintPass}; +use rustc_lint_defs::declare_tool_lint; +use rustc_middle::mir::ConstValue; +use rustc_middle::ty; +use rustc_session::impl_lint_pass; +use rustc_span::symbol::Symbol; +use rustc_span::{Span, sym}; + +declare_tool_lint! { + /// ### What it does + /// Checks for interning string literals as symbols + /// + /// ### Why is this bad? + /// It's faster and easier to use the symbol constant. If one doesn't exist it can be added to `clippy_utils/src/sym.rs` + /// + /// ### Example + /// ```rust,ignore + /// let _ = Symbol::intern("f32"); + /// ``` + /// + /// Use instead: + /// ```rust,ignore + /// let _ = sym::f32; + /// ``` + pub clippy::INTERNING_LITERALS, + Warn, + "interning a symbol that is a literal", + report_in_external_macro: true +} + +declare_tool_lint! { + /// ### What it does + /// Checks for calls to `Symbol::as_str` + /// + /// ### Why is this bad? + /// It's faster and easier to use the symbol constant. If one doesn't exist it can be added to `clippy_utils/src/sym.rs` + /// + /// ### Example + /// ```rust,ignore + /// symbol.as_str() == "foo" + /// ``` + /// + /// Use instead: + /// ```rust,ignore + /// symbol == sym::foo + /// ``` + pub clippy::SYMBOL_AS_STR, + Warn, + "calls to `Symbol::as_str`", + report_in_external_macro: true +} + +#[derive(Default)] +pub struct Symbols { + // Maps the symbol to the import path + symbol_map: FxHashMap, +} + +impl_lint_pass!(Symbols => [INTERNING_LITERALS, SYMBOL_AS_STR]); + +impl<'tcx> LateLintPass<'tcx> for Symbols { + fn check_crate(&mut self, cx: &LateContext<'_>) { + let modules = [ + ("kw", &paths::KW_MODULE[..]), + ("sym", &paths::SYM_MODULE), + ("sym", &paths::CLIPPY_SYM_MODULE), + ]; + for (prefix, module) in modules { + for def_id in def_path_def_ids(cx.tcx, module) { + // When linting `clippy_utils` itself we can't use `module_children` as it's a local def id. It will + // still lint but the suggestion will say to add it to `sym.rs` even if it's already there + if def_id.is_local() { + continue; + } + + for item in cx.tcx.module_children(def_id) { + if let Res::Def(DefKind::Const, item_def_id) = item.res + && let ty = cx.tcx.type_of(item_def_id).instantiate_identity() + && match_type(cx, ty, &paths::SYMBOL) + && let Ok(ConstValue::Scalar(value)) = cx.tcx.const_eval_poly(item_def_id) + && let Some(value) = value.to_u32().discard_err() + { + self.symbol_map.insert(value, (prefix, item.ident.name)); + } + } + } + } + } + + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { + if let ExprKind::Call(func, [arg]) = &expr.kind + && let ty::FnDef(def_id, _) = cx.typeck_results().expr_ty(func).kind() + && cx.tcx.is_diagnostic_item(sym::SymbolIntern, *def_id) + && let ExprKind::Lit(lit) = arg.kind + && let LitKind::Str(name, _) = lit.node + { + span_lint_and_then( + cx, + INTERNING_LITERALS, + expr.span, + "interning a string literal", + |diag| { + let (message, path) = suggestion(&mut self.symbol_map, name); + diag.span_suggestion_verbose(expr.span, message, path, Applicability::MaybeIncorrect); + }, + ); + } + + if let ExprKind::Binary(_, lhs, rhs) = expr.kind { + check_binary(cx, lhs, rhs, &mut self.symbol_map); + check_binary(cx, rhs, lhs, &mut self.symbol_map); + } + } +} + +fn check_binary( + cx: &LateContext<'_>, + lhs: &Expr<'_>, + rhs: &Expr<'_>, + symbols: &mut FxHashMap, +) { + if let Some(removal_span) = as_str_span(cx, lhs) + && let ExprKind::Lit(lit) = rhs.kind + && let LitKind::Str(name, _) = lit.node + { + span_lint_and_then(cx, SYMBOL_AS_STR, lhs.span, "converting a Symbol to a string", |diag| { + let (message, path) = suggestion(symbols, name); + diag.multipart_suggestion_verbose( + message, + vec![(removal_span, String::new()), (rhs.span, path)], + Applicability::MachineApplicable, + ); + }); + } +} + +fn suggestion(symbols: &mut FxHashMap, name: Symbol) -> (&'static str, String) { + if let Some((prefix, name)) = symbols.get(&name.as_u32()) { + ("use the preinterned symbol", format!("{prefix}::{name}")) + } else { + ( + "add the symbol to `clippy_utils/src/sym.rs` and use it", + format!("sym::{}", name.as_str().replace(|ch: char| !ch.is_alphanumeric(), "_")), + ) + } +} + +/// ```ignore +/// symbol.as_str() +/// // ^^^^^^^^ +/// ``` +fn as_str_span(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option { + if let ExprKind::MethodCall(_, recv, [], _) = expr.kind + && let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) + && match_def_path(cx, method_def_id, &paths::SYMBOL_AS_STR) + { + Some(recv.span.shrink_to_hi().to(expr.span.shrink_to_hi())) + } else { + None + } +} diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs b/src/tools/clippy/clippy_lints_internal/src/unnecessary_def_path.rs similarity index 97% rename from src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs rename to src/tools/clippy/clippy_lints_internal/src/unnecessary_def_path.rs index 76b0a52621be4..6bdfbed55b062 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/unnecessary_def_path.rs +++ b/src/tools/clippy/clippy_lints_internal/src/unnecessary_def_path.rs @@ -8,6 +8,7 @@ use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::{Expr, ExprKind, LetStmt, Mutability, Node}; use rustc_lint::{LateContext, LateLintPass}; +use rustc_lint_defs::declare_tool_lint; use rustc_middle::mir::ConstValue; use rustc_middle::mir::interpret::{Allocation, GlobalAlloc}; use rustc_middle::ty::{self, Ty}; @@ -17,7 +18,7 @@ use rustc_span::symbol::Symbol; use std::str; -declare_clippy_lint! { +declare_tool_lint! { /// ### What it does /// Checks for usage of def paths when a diagnostic item or a `LangItem` could be used. /// @@ -34,9 +35,10 @@ declare_clippy_lint! { /// ```rust,ignore /// utils::is_type_diagnostic_item(cx, ty, sym::Vec) /// ``` - pub UNNECESSARY_DEF_PATH, - internal, - "using a def path when a diagnostic item or a `LangItem` is available" + pub clippy::UNNECESSARY_DEF_PATH, + Warn, + "using a def path when a diagnostic item or a `LangItem` is available", + report_in_external_macro: true } impl_lint_pass!(UnnecessaryDefPath => [UNNECESSARY_DEF_PATH]); @@ -281,10 +283,10 @@ fn path_from_array(exprs: &[Expr<'_>]) -> Option> { exprs .iter() .map(|expr| { - if let ExprKind::Lit(lit) = &expr.kind { - if let LitKind::Str(sym, _) = lit.node { - return Some((*sym.as_str()).to_owned()); - } + if let ExprKind::Lit(lit) = &expr.kind + && let LitKind::Str(sym, _) = lit.node + { + return Some((*sym.as_str()).to_owned()); } None diff --git a/src/tools/clippy/clippy_lints_internal/src/unsorted_clippy_utils_paths.rs b/src/tools/clippy/clippy_lints_internal/src/unsorted_clippy_utils_paths.rs new file mode 100644 index 0000000000000..8e281ecb2ee44 --- /dev/null +++ b/src/tools/clippy/clippy_lints_internal/src/unsorted_clippy_utils_paths.rs @@ -0,0 +1,54 @@ +use clippy_utils::diagnostics::span_lint; +use rustc_ast::ast::{Crate, ItemKind, ModKind}; +use rustc_lint::{EarlyContext, EarlyLintPass}; +use rustc_lint_defs::declare_tool_lint; +use rustc_session::declare_lint_pass; + +declare_tool_lint! { + /// ### What it does + /// Checks that [`clippy_utils::paths`] is sorted lexically + /// + /// ### Why is this bad? + /// We like to pretend we're an example of tidy code. + /// + /// ### Example + /// Wrong ordering of the util::paths constants. + pub clippy::UNSORTED_CLIPPY_UTILS_PATHS, + Warn, + "various things that will negatively affect your clippy experience", + report_in_external_macro: true +} + +declare_lint_pass!(UnsortedClippyUtilsPaths => [UNSORTED_CLIPPY_UTILS_PATHS]); + +impl EarlyLintPass for UnsortedClippyUtilsPaths { + fn check_crate(&mut self, cx: &EarlyContext<'_>, krate: &Crate) { + if let Some(utils) = krate + .items + .iter() + .find(|item| item.kind.ident().is_some_and(|i| i.name.as_str() == "utils")) + && let ItemKind::Mod(_, _, ModKind::Loaded(ref items, ..)) = utils.kind + && let Some(paths) = items + .iter() + .find(|item| item.kind.ident().is_some_and(|i| i.name.as_str() == "paths")) + && let ItemKind::Mod(_, _, ModKind::Loaded(ref items, ..)) = paths.kind + { + let mut last_name: Option = None; + for item in items { + let name = item.kind.ident().expect("const items have idents").to_string(); + if let Some(last_name) = last_name + && *last_name > *name + { + span_lint( + cx, + UNSORTED_CLIPPY_UTILS_PATHS, + item.span, + "this constant should be before the previous constant due to lexical \ + ordering", + ); + } + last_name = Some(name); + } + } + } +} diff --git a/src/tools/clippy/clippy_utils/Cargo.toml b/src/tools/clippy/clippy_utils/Cargo.toml index ba4bb1d177c57..b98e990175033 100644 --- a/src/tools/clippy/clippy_utils/Cargo.toml +++ b/src/tools/clippy/clippy_utils/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "clippy_utils" # begin autogenerated version -version = "0.1.87" +version = "0.1.88" # end autogenerated version edition = "2024" description = "Helpful tools for writing lints, provided as they are used in Clippy" diff --git a/src/tools/clippy/clippy_utils/README.md b/src/tools/clippy/clippy_utils/README.md index 7c665b4249776..66192f866fa0e 100644 --- a/src/tools/clippy/clippy_utils/README.md +++ b/src/tools/clippy/clippy_utils/README.md @@ -8,7 +8,7 @@ This crate is only guaranteed to build with this `nightly` toolchain: ``` -nightly-2025-03-20 +nightly-2025-05-01 ``` diff --git a/src/tools/clippy/clippy_utils/src/ast_utils/mod.rs b/src/tools/clippy/clippy_utils/src/ast_utils/mod.rs index 6023ae9cc7b16..899aa99d25c7e 100644 --- a/src/tools/clippy/clippy_utils/src/ast_utils/mod.rs +++ b/src/tools/clippy/clippy_utils/src/ast_utils/mod.rs @@ -33,6 +33,7 @@ pub fn eq_id(l: Ident, r: Ident) -> bool { pub fn eq_pat(l: &Pat, r: &Pat) -> bool { use PatKind::*; match (&l.kind, &r.kind) { + (Missing, _) | (_, Missing) => unreachable!(), (Paren(l), _) => eq_pat(l, r), (_, Paren(r)) => eq_pat(l, r), (Wild, Wild) | (Rest, Rest) => true, @@ -321,17 +322,18 @@ pub fn eq_local_kind(l: &LocalKind, r: &LocalKind) -> bool { } pub fn eq_item(l: &Item, r: &Item, mut eq_kind: impl FnMut(&K, &K) -> bool) -> bool { - eq_id(l.ident, r.ident) && over(&l.attrs, &r.attrs, eq_attr) && eq_vis(&l.vis, &r.vis) && eq_kind(&l.kind, &r.kind) + over(&l.attrs, &r.attrs, eq_attr) && eq_vis(&l.vis, &r.vis) && eq_kind(&l.kind, &r.kind) } #[expect(clippy::similar_names, clippy::too_many_lines)] // Just a big match statement pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool { use ItemKind::*; match (l, r) { - (ExternCrate(l), ExternCrate(r)) => l == r, + (ExternCrate(ls, li), ExternCrate(rs, ri)) => ls == rs && eq_id(*li, *ri), (Use(l), Use(r)) => eq_use_tree(l, r), ( Static(box StaticItem { + ident: li, ty: lt, mutability: lm, expr: le, @@ -339,16 +341,18 @@ pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool { define_opaque: _, }), Static(box StaticItem { + ident: ri, ty: rt, mutability: rm, expr: re, safety: rs, define_opaque: _, }), - ) => lm == rm && ls == rs && eq_ty(lt, rt) && eq_expr_opt(le.as_ref(), re.as_ref()), + ) => eq_id(*li, *ri) && lm == rm && ls == rs && eq_ty(lt, rt) && eq_expr_opt(le.as_ref(), re.as_ref()), ( Const(box ConstItem { defaultness: ld, + ident: li, generics: lg, ty: lt, expr: le, @@ -356,38 +360,51 @@ pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool { }), Const(box ConstItem { defaultness: rd, + ident: ri, generics: rg, ty: rt, expr: re, define_opaque: _, }), - ) => eq_defaultness(*ld, *rd) && eq_generics(lg, rg) && eq_ty(lt, rt) && eq_expr_opt(le.as_ref(), re.as_ref()), + ) => { + eq_defaultness(*ld, *rd) + && eq_id(*li, *ri) + && eq_generics(lg, rg) + && eq_ty(lt, rt) + && eq_expr_opt(le.as_ref(), re.as_ref()) + }, ( Fn(box ast::Fn { defaultness: ld, sig: lf, + ident: li, generics: lg, contract: lc, body: lb, define_opaque: _, + eii_impl: _, }), Fn(box ast::Fn { defaultness: rd, sig: rf, + ident: ri, generics: rg, contract: rc, body: rb, define_opaque: _, + eii_impl: _, }), ) => { eq_defaultness(*ld, *rd) && eq_fn_sig(lf, rf) + && eq_id(*li, *ri) && eq_generics(lg, rg) && eq_opt_fn_contract(lc, rc) && both(lb.as_ref(), rb.as_ref(), |l, r| eq_block(l, r)) }, - (Mod(lu, lmk), Mod(ru, rmk)) => { - lu == ru + (Mod(ls, li, lmk), Mod(rs, ri, rmk)) => { + ls == rs + && eq_id(*li, *ri) && match (lmk, rmk) { (ModKind::Loaded(litems, linline, _, _), ModKind::Loaded(ritems, rinline, _, _)) => { linline == rinline && over(litems, ritems, |l, r| eq_item(l, r, eq_item_kind)) @@ -421,33 +438,40 @@ pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool { && over(lb, rb, eq_generic_bound) && both(lt.as_ref(), rt.as_ref(), |l, r| eq_ty(l, r)) }, - (Enum(le, lg), Enum(re, rg)) => over(&le.variants, &re.variants, eq_variant) && eq_generics(lg, rg), - (Struct(lv, lg), Struct(rv, rg)) | (Union(lv, lg), Union(rv, rg)) => { - eq_variant_data(lv, rv) && eq_generics(lg, rg) + (Enum(li, le, lg), Enum(ri, re, rg)) => { + eq_id(*li, *ri) && over(&le.variants, &re.variants, eq_variant) && eq_generics(lg, rg) + }, + (Struct(li, lv, lg), Struct(ri, rv, rg)) | (Union(li, lv, lg), Union(ri, rv, rg)) => { + eq_id(*li, *ri) && eq_variant_data(lv, rv) && eq_generics(lg, rg) }, ( Trait(box ast::Trait { is_auto: la, safety: lu, + ident: li, generics: lg, bounds: lb, - items: li, + items: lis, }), Trait(box ast::Trait { is_auto: ra, safety: ru, + ident: ri, generics: rg, bounds: rb, - items: ri, + items: ris, }), ) => { la == ra && matches!(lu, Safety::Default) == matches!(ru, Safety::Default) + && eq_id(*li, *ri) && eq_generics(lg, rg) && over(lb, rb, eq_generic_bound) - && over(li, ri, |l, r| eq_item(l, r, eq_assoc_item_kind)) + && over(lis, ris, |l, r| eq_item(l, r, eq_assoc_item_kind)) + }, + (TraitAlias(li, lg, lb), TraitAlias(ri, rg, rb)) => { + eq_id(*li, *ri) && eq_generics(lg, rg) && over(lb, rb, eq_generic_bound) }, - (TraitAlias(lg, lb), TraitAlias(rg, rb)) => eq_generics(lg, rg) && over(lb, rb, eq_generic_bound), ( Impl(box ast::Impl { safety: lu, @@ -480,7 +504,9 @@ pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool { && over(li, ri, |l, r| eq_item(l, r, eq_assoc_item_kind)) }, (MacCall(l), MacCall(r)) => eq_mac_call(l, r), - (MacroDef(l), MacroDef(r)) => l.macro_rules == r.macro_rules && eq_delim_args(&l.body, &r.body), + (MacroDef(li, ld), MacroDef(ri, rd)) => { + eq_id(*li, *ri) && ld.macro_rules == rd.macro_rules && eq_delim_args(&ld.body, &rd.body) + }, _ => false, } } @@ -490,6 +516,7 @@ pub fn eq_foreign_item_kind(l: &ForeignItemKind, r: &ForeignItemKind) -> bool { match (l, r) { ( Static(box StaticItem { + ident: li, ty: lt, mutability: lm, expr: le, @@ -497,33 +524,39 @@ pub fn eq_foreign_item_kind(l: &ForeignItemKind, r: &ForeignItemKind) -> bool { define_opaque: _, }), Static(box StaticItem { + ident: ri, ty: rt, mutability: rm, expr: re, safety: rs, define_opaque: _, }), - ) => lm == rm && eq_ty(lt, rt) && eq_expr_opt(le.as_ref(), re.as_ref()) && ls == rs, + ) => eq_id(*li, *ri) && eq_ty(lt, rt) && lm == rm && eq_expr_opt(le.as_ref(), re.as_ref()) && ls == rs, ( Fn(box ast::Fn { defaultness: ld, sig: lf, + ident: li, generics: lg, contract: lc, body: lb, define_opaque: _, + eii_impl: _, }), Fn(box ast::Fn { defaultness: rd, sig: rf, + ident: ri, generics: rg, contract: rc, body: rb, define_opaque: _, + eii_impl: _, }), ) => { eq_defaultness(*ld, *rd) && eq_fn_sig(lf, rf) + && eq_id(*li, *ri) && eq_generics(lg, rg) && eq_opt_fn_contract(lc, rc) && both(lb.as_ref(), rb.as_ref(), |l, r| eq_block(l, r)) @@ -531,20 +564,23 @@ pub fn eq_foreign_item_kind(l: &ForeignItemKind, r: &ForeignItemKind) -> bool { ( TyAlias(box ast::TyAlias { defaultness: ld, + ident: li, generics: lg, + where_clauses: _, bounds: lb, ty: lt, - .. }), TyAlias(box ast::TyAlias { defaultness: rd, + ident: ri, generics: rg, + where_clauses: _, bounds: rb, ty: rt, - .. }), ) => { eq_defaultness(*ld, *rd) + && eq_id(*li, *ri) && eq_generics(lg, rg) && over(lb, rb, eq_generic_bound) && both(lt.as_ref(), rt.as_ref(), |l, r| eq_ty(l, r)) @@ -560,6 +596,7 @@ pub fn eq_assoc_item_kind(l: &AssocItemKind, r: &AssocItemKind) -> bool { ( Const(box ConstItem { defaultness: ld, + ident: li, generics: lg, ty: lt, expr: le, @@ -567,32 +604,44 @@ pub fn eq_assoc_item_kind(l: &AssocItemKind, r: &AssocItemKind) -> bool { }), Const(box ConstItem { defaultness: rd, + ident: ri, generics: rg, ty: rt, expr: re, define_opaque: _, }), - ) => eq_defaultness(*ld, *rd) && eq_generics(lg, rg) && eq_ty(lt, rt) && eq_expr_opt(le.as_ref(), re.as_ref()), + ) => { + eq_defaultness(*ld, *rd) + && eq_id(*li, *ri) + && eq_generics(lg, rg) + && eq_ty(lt, rt) + && eq_expr_opt(le.as_ref(), re.as_ref()) + }, ( Fn(box ast::Fn { defaultness: ld, sig: lf, + ident: li, generics: lg, contract: lc, body: lb, define_opaque: _, + eii_impl: _, }), Fn(box ast::Fn { defaultness: rd, sig: rf, + ident: ri, generics: rg, contract: rc, body: rb, define_opaque: _, + eii_impl: _, }), ) => { eq_defaultness(*ld, *rd) && eq_fn_sig(lf, rf) + && eq_id(*li, *ri) && eq_generics(lg, rg) && eq_opt_fn_contract(lc, rc) && both(lb.as_ref(), rb.as_ref(), |l, r| eq_block(l, r)) @@ -600,20 +649,23 @@ pub fn eq_assoc_item_kind(l: &AssocItemKind, r: &AssocItemKind) -> bool { ( Type(box TyAlias { defaultness: ld, + ident: li, generics: lg, + where_clauses: _, bounds: lb, ty: lt, - .. }), Type(box TyAlias { defaultness: rd, + ident: ri, generics: rg, + where_clauses: _, bounds: rb, ty: rt, - .. }), ) => { eq_defaultness(*ld, *rd) + && eq_id(*li, *ri) && eq_generics(lg, rg) && over(lb, rb, eq_generic_bound) && both(lt.as_ref(), rt.as_ref(), |l, r| eq_ty(l, r)) diff --git a/src/tools/clippy/clippy_utils/src/consts.rs b/src/tools/clippy/clippy_utils/src/consts.rs index dd149c4a29b9f..b9928b8eed497 100644 --- a/src/tools/clippy/clippy_utils/src/consts.rs +++ b/src/tools/clippy/clippy_utils/src/consts.rs @@ -15,7 +15,7 @@ use rustc_apfloat::ieee::{Half, Quad}; use rustc_ast::ast::{self, LitFloatType, LitKind}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{ - BinOp, BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item, ItemKind, Node, PatExpr, PatExprKind, QPath, UnOp, + BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item, ItemKind, Node, PatExpr, PatExprKind, QPath, UnOp, }; use rustc_lexer::tokenize; use rustc_lint::LateContext; @@ -43,14 +43,16 @@ pub enum Constant<'tcx> { Char(char), /// An integer's bit representation. Int(u128), - /// An `f16`. - F16(f16), + /// An `f16` bitcast to a `u16`. + // FIXME(f16_f128): use `f16` once builtins are available on all host tools platforms. + F16(u16), /// An `f32`. F32(f32), /// An `f64`. F64(f64), - /// An `f128`. - F128(f128), + /// An `f128` bitcast to a `u128`. + // FIXME(f16_f128): use `f128` once builtins are available on all host tools platforms. + F128(u128), /// `true` or `false`. Bool(bool), /// An array of constants. @@ -177,7 +179,7 @@ impl Hash for Constant<'_> { }, Self::F16(f) => { // FIXME(f16_f128): once conversions to/from `f128` are available on all platforms, - f.to_bits().hash(state); + f.hash(state); }, Self::F32(f) => { f64::from(f).to_bits().hash(state); @@ -186,7 +188,7 @@ impl Hash for Constant<'_> { f.to_bits().hash(state); }, Self::F128(f) => { - f.to_bits().hash(state); + f.hash(state); }, Self::Bool(b) => { b.hash(state); @@ -292,12 +294,12 @@ impl Constant<'_> { fn parse_f16(s: &str) -> Self { let f: Half = s.parse().unwrap(); - Self::F16(f16::from_bits(f.to_bits().try_into().unwrap())) + Self::F16(f.to_bits().try_into().unwrap()) } fn parse_f128(s: &str) -> Self { let f: Quad = s.parse().unwrap(); - Self::F128(f128::from_bits(f.to_bits())) + Self::F128(f.to_bits()) } } @@ -506,7 +508,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> { UnOp::Deref => Some(if let Constant::Ref(r) = o { *r } else { o }), }), ExprKind::If(cond, then, ref otherwise) => self.ifthenelse(cond, then, *otherwise), - ExprKind::Binary(op, left, right) => self.binop(op, left, right), + ExprKind::Binary(op, left, right) => self.binop(op.node, left, right), ExprKind::Call(callee, []) => { // We only handle a few const functions for now. if let ExprKind::Path(qpath) = &callee.kind @@ -744,7 +746,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> { } } - fn binop(&self, op: BinOp, left: &Expr<'_>, right: &Expr<'_>) -> Option> { + fn binop(&self, op: BinOpKind, left: &Expr<'_>, right: &Expr<'_>) -> Option> { let l = self.expr(left)?; let r = self.expr(right); match (l, r) { @@ -757,7 +759,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> { // Using / or %, where the left-hand argument is the smallest integer of a signed integer type and // the right-hand argument is -1 always panics, even with overflow-checks disabled - if let BinOpKind::Div | BinOpKind::Rem = op.node + if let BinOpKind::Div | BinOpKind::Rem = op && l == ty_min_value && r == -1 { @@ -765,7 +767,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> { } let zext = |n: i128| Constant::Int(unsext(self.tcx, n, ity)); - match op.node { + match op { // When +, * or binary - create a value greater than the maximum value, or less than // the minimum value that can be stored, it panics. BinOpKind::Add => l.checked_add(r).and_then(|n| ity.ensure_fits(n)).map(zext), @@ -792,7 +794,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> { ty::Uint(ity) => { let bits = ity.bits(); - match op.node { + match op { BinOpKind::Add => l.checked_add(r).and_then(|n| ity.ensure_fits(n)).map(Constant::Int), BinOpKind::Sub => l.checked_sub(r).and_then(|n| ity.ensure_fits(n)).map(Constant::Int), BinOpKind::Mul => l.checked_mul(r).and_then(|n| ity.ensure_fits(n)).map(Constant::Int), @@ -815,7 +817,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> { _ => None, }, // FIXME(f16_f128): add these types when binary operations are available on all platforms - (Constant::F32(l), Some(Constant::F32(r))) => match op.node { + (Constant::F32(l), Some(Constant::F32(r))) => match op { BinOpKind::Add => Some(Constant::F32(l + r)), BinOpKind::Sub => Some(Constant::F32(l - r)), BinOpKind::Mul => Some(Constant::F32(l * r)), @@ -829,7 +831,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> { BinOpKind::Gt => Some(Constant::Bool(l > r)), _ => None, }, - (Constant::F64(l), Some(Constant::F64(r))) => match op.node { + (Constant::F64(l), Some(Constant::F64(r))) => match op { BinOpKind::Add => Some(Constant::F64(l + r)), BinOpKind::Sub => Some(Constant::F64(l - r)), BinOpKind::Mul => Some(Constant::F64(l * r)), @@ -843,7 +845,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> { BinOpKind::Gt => Some(Constant::Bool(l > r)), _ => None, }, - (l, r) => match (op.node, l, r) { + (l, r) => match (op, l, r) { (BinOpKind::And, Constant::Bool(false), _) => Some(Constant::Bool(false)), (BinOpKind::Or, Constant::Bool(true), _) => Some(Constant::Bool(true)), (BinOpKind::And, Constant::Bool(true), Some(r)) | (BinOpKind::Or, Constant::Bool(false), Some(r)) => { @@ -868,10 +870,10 @@ pub fn mir_to_const<'tcx>(tcx: TyCtxt<'tcx>, result: mir::Const<'tcx>) -> Option ty::Adt(adt_def, _) if adt_def.is_struct() => Some(Constant::Adt(result)), ty::Bool => Some(Constant::Bool(int == ScalarInt::TRUE)), ty::Uint(_) | ty::Int(_) => Some(Constant::Int(int.to_bits(int.size()))), - ty::Float(FloatTy::F16) => Some(Constant::F16(f16::from_bits(int.into()))), + ty::Float(FloatTy::F16) => Some(Constant::F16(int.into())), ty::Float(FloatTy::F32) => Some(Constant::F32(f32::from_bits(int.into()))), ty::Float(FloatTy::F64) => Some(Constant::F64(f64::from_bits(int.into()))), - ty::Float(FloatTy::F128) => Some(Constant::F128(f128::from_bits(int.into()))), + ty::Float(FloatTy::F128) => Some(Constant::F128(int.into())), ty::RawPtr(_, _) => Some(Constant::RawPtr(int.to_bits(int.size()))), _ => None, }, @@ -892,10 +894,10 @@ pub fn mir_to_const<'tcx>(tcx: TyCtxt<'tcx>, result: mir::Const<'tcx>) -> Option let range = alloc_range(offset + size * idx, size); let val = alloc.read_scalar(&tcx, range, /* read_provenance */ false).ok()?; res.push(match flt { - FloatTy::F16 => Constant::F16(f16::from_bits(val.to_u16().discard_err()?)), + FloatTy::F16 => Constant::F16(val.to_u16().discard_err()?), FloatTy::F32 => Constant::F32(f32::from_bits(val.to_u32().discard_err()?)), FloatTy::F64 => Constant::F64(f64::from_bits(val.to_u64().discard_err()?)), - FloatTy::F128 => Constant::F128(f128::from_bits(val.to_u128().discard_err()?)), + FloatTy::F128 => Constant::F128(val.to_u128().discard_err()?), }); } Some(Constant::Vec(res)) diff --git a/src/tools/clippy/clippy_utils/src/diagnostics.rs b/src/tools/clippy/clippy_utils/src/diagnostics.rs index 292792408c642..cd2098a89891d 100644 --- a/src/tools/clippy/clippy_utils/src/diagnostics.rs +++ b/src/tools/clippy/clippy_utils/src/diagnostics.rs @@ -17,16 +17,16 @@ use rustc_span::Span; use std::env; fn docs_link(diag: &mut Diag<'_, ()>, lint: &'static Lint) { - if env::var("CLIPPY_DISABLE_DOCS_LINKS").is_err() { - if let Some(lint) = lint.name_lower().strip_prefix("clippy::") { - diag.help(format!( - "for further information visit https://rust-lang.github.io/rust-clippy/{}/index.html#{lint}", - &option_env!("RUST_RELEASE_NUM").map_or("master".to_string(), |n| { - // extract just major + minor version and ignore patch versions - format!("rust-{}", n.rsplit_once('.').unwrap().1) - }) - )); - } + if env::var("CLIPPY_DISABLE_DOCS_LINKS").is_err() + && let Some(lint) = lint.name_lower().strip_prefix("clippy::") + { + diag.help(format!( + "for further information visit https://rust-lang.github.io/rust-clippy/{}/index.html#{lint}", + &option_env!("RUST_RELEASE_NUM").map_or("master".to_string(), |n| { + // extract just major + minor version and ignore patch versions + format!("rust-{}", n.rsplit_once('.').unwrap().1) + }) + )); } } diff --git a/src/tools/clippy/clippy_utils/src/higher.rs b/src/tools/clippy/clippy_utils/src/higher.rs index c4d00002292c9..dbb993482902f 100644 --- a/src/tools/clippy/clippy_utils/src/higher.rs +++ b/src/tools/clippy/clippy_utils/src/higher.rs @@ -3,14 +3,14 @@ #![deny(clippy::missing_docs_in_private_items)] use crate::consts::{ConstEvalCtxt, Constant}; -use crate::is_expn_of; use crate::ty::is_type_diagnostic_item; +use crate::{is_expn_of, sym}; use rustc_ast::ast; use rustc_hir as hir; use rustc_hir::{Arm, Block, Expr, ExprKind, HirId, LoopSource, MatchSource, Node, Pat, QPath, StructTailExpr}; use rustc_lint::LateContext; -use rustc_span::{Span, sym, symbol}; +use rustc_span::{Span, symbol}; /// The essential nodes of a desugared for loop as well as the entire span: /// `for pat in arg { body }` becomes `(pat, arg, body)`. Returns `(pat, arg, body, span)`. @@ -118,18 +118,17 @@ impl<'hir> IfLet<'hir> { ) = expr.kind { let mut iter = cx.tcx.hir_parent_iter(expr.hir_id); - if let Some((_, Node::Block(Block { stmts: [], .. }))) = iter.next() { - if let Some(( + if let Some((_, Node::Block(Block { stmts: [], .. }))) = iter.next() + && let Some(( _, Node::Expr(Expr { kind: ExprKind::Loop(_, _, LoopSource::While, _), .. }), )) = iter.next() - { - // while loop desugar - return None; - } + { + // while loop desugar + return None; } return Some(Self { let_pat, @@ -176,6 +175,12 @@ impl<'hir> IfLetOrMatch<'hir> { ), } } + + pub fn scrutinee(&self) -> &'hir Expr<'hir> { + match self { + Self::Match(scrutinee, _, _) | Self::IfLet(scrutinee, _, _, _, _) => scrutinee, + } + } } /// An `if` or `if let` expression @@ -469,7 +474,7 @@ pub fn get_vec_init_kind<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) - return Some(VecInitKind::New); } else if name.ident.name == symbol::kw::Default { return Some(VecInitKind::Default); - } else if name.ident.name.as_str() == "with_capacity" { + } else if name.ident.name == sym::with_capacity { let arg = args.first()?; return match ConstEvalCtxt::new(cx).eval_simple(arg) { Some(Constant::Int(num)) => Some(VecInitKind::WithConstCapacity(num)), diff --git a/src/tools/clippy/clippy_utils/src/hir_utils.rs b/src/tools/clippy/clippy_utils/src/hir_utils.rs index 9938e64d24264..c37231d093129 100644 --- a/src/tools/clippy/clippy_utils/src/hir_utils.rs +++ b/src/tools/clippy/clippy_utils/src/hir_utils.rs @@ -8,7 +8,7 @@ use rustc_hir::MatchSource::TryDesugar; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{ AssocItemConstraint, BinOpKind, BindingMode, Block, BodyId, Closure, ConstArg, ConstArgKind, Expr, ExprField, - ExprKind, FnRetTy, GenericArg, GenericArgs, HirId, HirIdMap, InlineAsmOperand, LetExpr, Lifetime, LifetimeName, + ExprKind, FnRetTy, GenericArg, GenericArgs, HirId, HirIdMap, InlineAsmOperand, LetExpr, Lifetime, LifetimeKind, Pat, PatExpr, PatExprKind, PatField, PatKind, Path, PathSegment, PrimTy, QPath, Stmt, StmtKind, StructTailExpr, TraitBoundModifiers, Ty, TyKind, TyPat, TyPatKind, }; @@ -148,7 +148,7 @@ pub struct HirEqInterExpr<'a, 'b, 'tcx> { impl HirEqInterExpr<'_, '_, '_> { pub fn eq_stmt(&mut self, left: &Stmt<'_>, right: &Stmt<'_>) -> bool { match (&left.kind, &right.kind) { - (&StmtKind::Let(l), &StmtKind::Let(r)) => { + (StmtKind::Let(l), StmtKind::Let(r)) => { // This additional check ensures that the type of the locals are equivalent even if the init // expression or type have some inferred parts. if let Some((typeck_lhs, typeck_rhs)) = self.inner.maybe_typeck_results { @@ -166,7 +166,7 @@ impl HirEqInterExpr<'_, '_, '_> { && both(l.els.as_ref(), r.els.as_ref(), |l, r| self.eq_block(l, r)) && self.eq_pat(l.pat, r.pat) }, - (&StmtKind::Expr(l), &StmtKind::Expr(r)) | (&StmtKind::Semi(l), &StmtKind::Semi(r)) => self.eq_expr(l, r), + (StmtKind::Expr(l), StmtKind::Expr(r)) | (StmtKind::Semi(l), StmtKind::Semi(r)) => self.eq_expr(l, r), _ => false, } } @@ -260,7 +260,7 @@ impl HirEqInterExpr<'_, '_, '_> { fn should_ignore(&mut self, expr: &Expr<'_>) -> bool { macro_backtrace(expr.span).last().is_some_and(|macro_call| { matches!( - &self.inner.cx.tcx.get_diagnostic_name(macro_call.def_id), + self.inner.cx.tcx.get_diagnostic_name(macro_call.def_id), Some(sym::todo_macro | sym::unimplemented_macro) ) }) @@ -301,58 +301,58 @@ impl HirEqInterExpr<'_, '_, '_> { reduce_exprkind(self.inner.cx, &left.kind), reduce_exprkind(self.inner.cx, &right.kind), ) { - (&ExprKind::AddrOf(lb, l_mut, le), &ExprKind::AddrOf(rb, r_mut, re)) => { + (ExprKind::AddrOf(lb, l_mut, le), ExprKind::AddrOf(rb, r_mut, re)) => { lb == rb && l_mut == r_mut && self.eq_expr(le, re) }, - (&ExprKind::Array(l), &ExprKind::Array(r)) => self.eq_exprs(l, r), - (&ExprKind::Assign(ll, lr, _), &ExprKind::Assign(rl, rr, _)) => { + (ExprKind::Array(l), ExprKind::Array(r)) => self.eq_exprs(l, r), + (ExprKind::Assign(ll, lr, _), ExprKind::Assign(rl, rr, _)) => { self.inner.allow_side_effects && self.eq_expr(ll, rl) && self.eq_expr(lr, rr) }, - (&ExprKind::AssignOp(ref lo, ll, lr), &ExprKind::AssignOp(ref ro, rl, rr)) => { + (ExprKind::AssignOp(lo, ll, lr), ExprKind::AssignOp(ro, rl, rr)) => { self.inner.allow_side_effects && lo.node == ro.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr) }, - (&ExprKind::Block(l, _), &ExprKind::Block(r, _)) => self.eq_block(l, r), - (&ExprKind::Binary(l_op, ll, lr), &ExprKind::Binary(r_op, rl, rr)) => { + (ExprKind::Block(l, _), ExprKind::Block(r, _)) => self.eq_block(l, r), + (ExprKind::Binary(l_op, ll, lr), ExprKind::Binary(r_op, rl, rr)) => { l_op.node == r_op.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr) || swap_binop(l_op.node, ll, lr).is_some_and(|(l_op, ll, lr)| { l_op == r_op.node && self.eq_expr(ll, rl) && self.eq_expr(lr, rr) }) }, - (&ExprKind::Break(li, ref le), &ExprKind::Break(ri, ref re)) => { + (ExprKind::Break(li, le), ExprKind::Break(ri, re)) => { both(li.label.as_ref(), ri.label.as_ref(), |l, r| l.ident.name == r.ident.name) && both(le.as_ref(), re.as_ref(), |l, r| self.eq_expr(l, r)) }, - (&ExprKind::Call(l_fun, l_args), &ExprKind::Call(r_fun, r_args)) => { + (ExprKind::Call(l_fun, l_args), ExprKind::Call(r_fun, r_args)) => { self.inner.allow_side_effects && self.eq_expr(l_fun, r_fun) && self.eq_exprs(l_args, r_args) }, - (&ExprKind::Cast(lx, lt), &ExprKind::Cast(rx, rt)) => { + (ExprKind::Cast(lx, lt), ExprKind::Cast(rx, rt)) => { self.eq_expr(lx, rx) && self.eq_ty(lt, rt) }, - (&ExprKind::Closure(_l), &ExprKind::Closure(_r)) => false, - (&ExprKind::ConstBlock(lb), &ExprKind::ConstBlock(rb)) => self.eq_body(lb.body, rb.body), - (&ExprKind::Continue(li), &ExprKind::Continue(ri)) => { + (ExprKind::Closure(_l), ExprKind::Closure(_r)) => false, + (ExprKind::ConstBlock(lb), ExprKind::ConstBlock(rb)) => self.eq_body(lb.body, rb.body), + (ExprKind::Continue(li), ExprKind::Continue(ri)) => { both(li.label.as_ref(), ri.label.as_ref(), |l, r| l.ident.name == r.ident.name) }, - (&ExprKind::DropTemps(le), &ExprKind::DropTemps(re)) => self.eq_expr(le, re), - (&ExprKind::Field(l_f_exp, ref l_f_ident), &ExprKind::Field(r_f_exp, ref r_f_ident)) => { + (ExprKind::DropTemps(le), ExprKind::DropTemps(re)) => self.eq_expr(le, re), + (ExprKind::Field(l_f_exp, l_f_ident), ExprKind::Field(r_f_exp, r_f_ident)) => { l_f_ident.name == r_f_ident.name && self.eq_expr(l_f_exp, r_f_exp) }, - (&ExprKind::Index(la, li, _), &ExprKind::Index(ra, ri, _)) => self.eq_expr(la, ra) && self.eq_expr(li, ri), - (&ExprKind::If(lc, lt, ref le), &ExprKind::If(rc, rt, ref re)) => { + (ExprKind::Index(la, li, _), ExprKind::Index(ra, ri, _)) => self.eq_expr(la, ra) && self.eq_expr(li, ri), + (ExprKind::If(lc, lt, le), ExprKind::If(rc, rt, re)) => { self.eq_expr(lc, rc) && self.eq_expr(lt, rt) && both(le.as_ref(), re.as_ref(), |l, r| self.eq_expr(l, r)) }, - (&ExprKind::Let(l), &ExprKind::Let(r)) => { + (ExprKind::Let(l), ExprKind::Let(r)) => { self.eq_pat(l.pat, r.pat) && both(l.ty.as_ref(), r.ty.as_ref(), |l, r| self.eq_ty(l, r)) && self.eq_expr(l.init, r.init) }, (ExprKind::Lit(l), ExprKind::Lit(r)) => l.node == r.node, - (&ExprKind::Loop(lb, ref ll, ref lls, _), &ExprKind::Loop(rb, ref rl, ref rls, _)) => { + (ExprKind::Loop(lb, ll, lls, _), ExprKind::Loop(rb, rl, rls, _)) => { lls == rls && self.eq_block(lb, rb) && both(ll.as_ref(), rl.as_ref(), |l, r| l.ident.name == r.ident.name) }, - (&ExprKind::Match(le, la, ref ls), &ExprKind::Match(re, ra, ref rs)) => { + (ExprKind::Match(le, la, ls), ExprKind::Match(re, ra, rs)) => { (ls == rs || (matches!((ls, rs), (TryDesugar(_), TryDesugar(_))))) && self.eq_expr(le, re) && over(la, ra, |l, r| { @@ -362,27 +362,27 @@ impl HirEqInterExpr<'_, '_, '_> { }) }, ( - &ExprKind::MethodCall(l_path, l_receiver, l_args, _), - &ExprKind::MethodCall(r_path, r_receiver, r_args, _), + ExprKind::MethodCall(l_path, l_receiver, l_args, _), + ExprKind::MethodCall(r_path, r_receiver, r_args, _), ) => { self.inner.allow_side_effects && self.eq_path_segment(l_path, r_path) && self.eq_expr(l_receiver, r_receiver) && self.eq_exprs(l_args, r_args) }, - (&ExprKind::UnsafeBinderCast(lkind, le, None), &ExprKind::UnsafeBinderCast(rkind, re, None)) => + (ExprKind::UnsafeBinderCast(lkind, le, None), ExprKind::UnsafeBinderCast(rkind, re, None)) => lkind == rkind && self.eq_expr(le, re), - (&ExprKind::UnsafeBinderCast(lkind, le, Some(lt)), &ExprKind::UnsafeBinderCast(rkind, re, Some(rt))) => + (ExprKind::UnsafeBinderCast(lkind, le, Some(lt)), ExprKind::UnsafeBinderCast(rkind, re, Some(rt))) => lkind == rkind && self.eq_expr(le, re) && self.eq_ty(lt, rt), - (&ExprKind::OffsetOf(l_container, l_fields), &ExprKind::OffsetOf(r_container, r_fields)) => { + (ExprKind::OffsetOf(l_container, l_fields), ExprKind::OffsetOf(r_container, r_fields)) => { self.eq_ty(l_container, r_container) && over(l_fields, r_fields, |l, r| l.name == r.name) }, (ExprKind::Path(l), ExprKind::Path(r)) => self.eq_qpath(l, r), - (&ExprKind::Repeat(le, ll), &ExprKind::Repeat(re, rl)) => { + (ExprKind::Repeat(le, ll), ExprKind::Repeat(re, rl)) => { self.eq_expr(le, re) && self.eq_const_arg(ll, rl) }, (ExprKind::Ret(l), ExprKind::Ret(r)) => both(l.as_ref(), r.as_ref(), |l, r| self.eq_expr(l, r)), - (&ExprKind::Struct(l_path, lf, ref lo), &ExprKind::Struct(r_path, rf, ref ro)) => { + (ExprKind::Struct(l_path, lf, lo), ExprKind::Struct(r_path, rf, ro)) => { self.eq_qpath(l_path, r_path) && match (lo, ro) { (StructTailExpr::Base(l),StructTailExpr::Base(r)) => self.eq_expr(l, r), @@ -392,58 +392,58 @@ impl HirEqInterExpr<'_, '_, '_> { } && over(lf, rf, |l, r| self.eq_expr_field(l, r)) }, - (&ExprKind::Tup(l_tup), &ExprKind::Tup(r_tup)) => self.eq_exprs(l_tup, r_tup), - (&ExprKind::Use(l_expr, _), &ExprKind::Use(r_expr, _)) => self.eq_expr(l_expr, r_expr), - (&ExprKind::Type(le, lt), &ExprKind::Type(re, rt)) => self.eq_expr(le, re) && self.eq_ty(lt, rt), - (&ExprKind::Unary(l_op, le), &ExprKind::Unary(r_op, re)) => l_op == r_op && self.eq_expr(le, re), - (&ExprKind::Yield(le, _), &ExprKind::Yield(re, _)) => return self.eq_expr(le, re), + (ExprKind::Tup(l_tup), ExprKind::Tup(r_tup)) => self.eq_exprs(l_tup, r_tup), + (ExprKind::Use(l_expr, _), ExprKind::Use(r_expr, _)) => self.eq_expr(l_expr, r_expr), + (ExprKind::Type(le, lt), ExprKind::Type(re, rt)) => self.eq_expr(le, re) && self.eq_ty(lt, rt), + (ExprKind::Unary(l_op, le), ExprKind::Unary(r_op, re)) => l_op == r_op && self.eq_expr(le, re), + (ExprKind::Yield(le, _), ExprKind::Yield(re, _)) => return self.eq_expr(le, re), ( // Else branches for branches above, grouped as per `match_same_arms`. - | &ExprKind::AddrOf(..) - | &ExprKind::Array(..) - | &ExprKind::Assign(..) - | &ExprKind::AssignOp(..) - | &ExprKind::Binary(..) - | &ExprKind::Become(..) - | &ExprKind::Block(..) - | &ExprKind::Break(..) - | &ExprKind::Call(..) - | &ExprKind::Cast(..) - | &ExprKind::ConstBlock(..) - | &ExprKind::Continue(..) - | &ExprKind::DropTemps(..) - | &ExprKind::Field(..) - | &ExprKind::Index(..) - | &ExprKind::If(..) - | &ExprKind::Let(..) - | &ExprKind::Lit(..) - | &ExprKind::Loop(..) - | &ExprKind::Match(..) - | &ExprKind::MethodCall(..) - | &ExprKind::OffsetOf(..) - | &ExprKind::Path(..) - | &ExprKind::Repeat(..) - | &ExprKind::Ret(..) - | &ExprKind::Struct(..) - | &ExprKind::Tup(..) - | &ExprKind::Use(..) - | &ExprKind::Type(..) - | &ExprKind::Unary(..) - | &ExprKind::Yield(..) - | &ExprKind::UnsafeBinderCast(..) + | ExprKind::AddrOf(..) + | ExprKind::Array(..) + | ExprKind::Assign(..) + | ExprKind::AssignOp(..) + | ExprKind::Binary(..) + | ExprKind::Become(..) + | ExprKind::Block(..) + | ExprKind::Break(..) + | ExprKind::Call(..) + | ExprKind::Cast(..) + | ExprKind::ConstBlock(..) + | ExprKind::Continue(..) + | ExprKind::DropTemps(..) + | ExprKind::Field(..) + | ExprKind::Index(..) + | ExprKind::If(..) + | ExprKind::Let(..) + | ExprKind::Lit(..) + | ExprKind::Loop(..) + | ExprKind::Match(..) + | ExprKind::MethodCall(..) + | ExprKind::OffsetOf(..) + | ExprKind::Path(..) + | ExprKind::Repeat(..) + | ExprKind::Ret(..) + | ExprKind::Struct(..) + | ExprKind::Tup(..) + | ExprKind::Use(..) + | ExprKind::Type(..) + | ExprKind::Unary(..) + | ExprKind::Yield(..) + | ExprKind::UnsafeBinderCast(..) // --- Special cases that do not have a positive branch. // `Err` represents an invalid expression, so let's never assume that // an invalid expressions is equal to anything. - | &ExprKind::Err(..) + | ExprKind::Err(..) // For the time being, we always consider that two closures are unequal. // This behavior may change in the future. - | &ExprKind::Closure(..) + | ExprKind::Closure(..) // For the time being, we always consider that two instances of InlineAsm are different. // This behavior may change in the future. - | &ExprKind::InlineAsm(_) + | ExprKind::InlineAsm(_) , _ ) => false, }; @@ -483,7 +483,7 @@ impl HirEqInterExpr<'_, '_, '_> { } fn eq_lifetime(left: &Lifetime, right: &Lifetime) -> bool { - left.res == right.res + left.kind == right.kind } fn eq_pat_field(&mut self, left: &PatField<'_>, right: &PatField<'_>) -> bool { @@ -494,11 +494,11 @@ impl HirEqInterExpr<'_, '_, '_> { fn eq_pat_expr(&mut self, left: &PatExpr<'_>, right: &PatExpr<'_>) -> bool { match (&left.kind, &right.kind) { ( - &PatExprKind::Lit { + PatExprKind::Lit { lit: left, negated: left_neg, }, - &PatExprKind::Lit { + PatExprKind::Lit { lit: right, negated: right_neg, }, @@ -512,47 +512,47 @@ impl HirEqInterExpr<'_, '_, '_> { /// Checks whether two patterns are the same. fn eq_pat(&mut self, left: &Pat<'_>, right: &Pat<'_>) -> bool { match (&left.kind, &right.kind) { - (&PatKind::Box(l), &PatKind::Box(r)) => self.eq_pat(l, r), - (&PatKind::Struct(ref lp, la, ..), &PatKind::Struct(ref rp, ra, ..)) => { + (PatKind::Box(l), PatKind::Box(r)) => self.eq_pat(l, r), + (PatKind::Struct(lp, la, ..), PatKind::Struct(rp, ra, ..)) => { self.eq_qpath(lp, rp) && over(la, ra, |l, r| self.eq_pat_field(l, r)) }, - (&PatKind::TupleStruct(ref lp, la, ls), &PatKind::TupleStruct(ref rp, ra, rs)) => { + (PatKind::TupleStruct(lp, la, ls), PatKind::TupleStruct(rp, ra, rs)) => { self.eq_qpath(lp, rp) && over(la, ra, |l, r| self.eq_pat(l, r)) && ls == rs }, - (&PatKind::Binding(lb, li, _, ref lp), &PatKind::Binding(rb, ri, _, ref rp)) => { + (PatKind::Binding(lb, li, _, lp), PatKind::Binding(rb, ri, _, rp)) => { let eq = lb == rb && both(lp.as_ref(), rp.as_ref(), |l, r| self.eq_pat(l, r)); if eq { - self.locals.insert(li, ri); + self.locals.insert(*li, *ri); } eq }, - (&PatKind::Expr(l), &PatKind::Expr(r)) => self.eq_pat_expr(l, r), - (&PatKind::Tuple(l, ls), &PatKind::Tuple(r, rs)) => ls == rs && over(l, r, |l, r| self.eq_pat(l, r)), - (&PatKind::Range(ref ls, ref le, li), &PatKind::Range(ref rs, ref re, ri)) => { + (PatKind::Expr(l), PatKind::Expr(r)) => self.eq_pat_expr(l, r), + (PatKind::Tuple(l, ls), PatKind::Tuple(r, rs)) => ls == rs && over(l, r, |l, r| self.eq_pat(l, r)), + (PatKind::Range(ls, le, li), PatKind::Range(rs, re, ri)) => { both(ls.as_ref(), rs.as_ref(), |a, b| self.eq_pat_expr(a, b)) && both(le.as_ref(), re.as_ref(), |a, b| self.eq_pat_expr(a, b)) && (li == ri) }, - (&PatKind::Ref(le, ref lm), &PatKind::Ref(re, ref rm)) => lm == rm && self.eq_pat(le, re), - (&PatKind::Slice(ls, ref li, le), &PatKind::Slice(rs, ref ri, re)) => { + (PatKind::Ref(le, lm), PatKind::Ref(re, rm)) => lm == rm && self.eq_pat(le, re), + (PatKind::Slice(ls, li, le), PatKind::Slice(rs, ri, re)) => { over(ls, rs, |l, r| self.eq_pat(l, r)) && over(le, re, |l, r| self.eq_pat(l, r)) && both(li.as_ref(), ri.as_ref(), |l, r| self.eq_pat(l, r)) }, - (&PatKind::Wild, &PatKind::Wild) => true, + (PatKind::Wild, PatKind::Wild) => true, _ => false, } } fn eq_qpath(&mut self, left: &QPath<'_>, right: &QPath<'_>) -> bool { match (left, right) { - (&QPath::Resolved(ref lty, lpath), &QPath::Resolved(ref rty, rpath)) => { + (QPath::Resolved(lty, lpath), QPath::Resolved(rty, rpath)) => { both(lty.as_ref(), rty.as_ref(), |l, r| self.eq_ty(l, r)) && self.eq_path(lpath, rpath) }, - (&QPath::TypeRelative(lty, lseg), &QPath::TypeRelative(rty, rseg)) => { + (QPath::TypeRelative(lty, lseg), QPath::TypeRelative(rty, rseg)) => { self.eq_ty(lty, rty) && self.eq_path_segment(lseg, rseg) }, - (&QPath::LangItem(llang_item, ..), &QPath::LangItem(rlang_item, ..)) => llang_item == rlang_item, + (QPath::LangItem(llang_item, ..), QPath::LangItem(rlang_item, ..)) => llang_item == rlang_item, _ => false, } } @@ -611,15 +611,15 @@ impl HirEqInterExpr<'_, '_, '_> { pub fn eq_ty(&mut self, left: &Ty<'_>, right: &Ty<'_>) -> bool { match (&left.kind, &right.kind) { - (&TyKind::Slice(l_vec), &TyKind::Slice(r_vec)) => self.eq_ty(l_vec, r_vec), - (&TyKind::Array(lt, ll), &TyKind::Array(rt, rl)) => self.eq_ty(lt, rt) && self.eq_const_arg(ll, rl), + (TyKind::Slice(l_vec), TyKind::Slice(r_vec)) => self.eq_ty(l_vec, r_vec), + (TyKind::Array(lt, ll), TyKind::Array(rt, rl)) => self.eq_ty(lt, rt) && self.eq_const_arg(ll, rl), (TyKind::Ptr(l_mut), TyKind::Ptr(r_mut)) => l_mut.mutbl == r_mut.mutbl && self.eq_ty(l_mut.ty, r_mut.ty), (TyKind::Ref(_, l_rmut), TyKind::Ref(_, r_rmut)) => { l_rmut.mutbl == r_rmut.mutbl && self.eq_ty(l_rmut.ty, r_rmut.ty) }, (TyKind::Path(l), TyKind::Path(r)) => self.eq_qpath(l, r), - (&TyKind::Tup(l), &TyKind::Tup(r)) => over(l, r, |l, r| self.eq_ty(l, r)), - (&TyKind::Infer(()), &TyKind::Infer(())) => true, + (TyKind::Tup(l), TyKind::Tup(r)) => over(l, r, |l, r| self.eq_ty(l, r)), + (TyKind::Infer(()), TyKind::Infer(())) => true, _ => false, } } @@ -853,9 +853,9 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { std::mem::discriminant(&e.kind).hash(&mut self.s); - match e.kind { + match &e.kind { ExprKind::AddrOf(kind, m, e) => { - std::mem::discriminant(&kind).hash(&mut self.s); + std::mem::discriminant(kind).hash(&mut self.s); m.hash(&mut self.s); self.hash_expr(e); }, @@ -871,7 +871,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { self.hash_expr(l); self.hash_expr(r); }, - ExprKind::AssignOp(ref o, l, r) => { + ExprKind::AssignOp(o, l, r) => { std::mem::discriminant(&o.node).hash(&mut self.s); self.hash_expr(l); self.hash_expr(r); @@ -887,11 +887,11 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { self.hash_expr(l); self.hash_expr(r); }, - ExprKind::Break(i, ref j) => { + ExprKind::Break(i, j) => { if let Some(i) = i.label { self.hash_name(i.ident.name); } - if let Some(j) = *j { + if let Some(j) = j { self.hash_expr(j); } }, @@ -903,20 +903,20 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { self.hash_expr(e); self.hash_ty(ty); }, - ExprKind::Closure(&Closure { + ExprKind::Closure(Closure { capture_clause, body, .. }) => { - std::mem::discriminant(&capture_clause).hash(&mut self.s); + std::mem::discriminant(capture_clause).hash(&mut self.s); // closures inherit TypeckResults - self.hash_expr(self.cx.tcx.hir_body(body).value); + self.hash_expr(self.cx.tcx.hir_body(*body).value); }, - ExprKind::ConstBlock(ref l_id) => { + ExprKind::ConstBlock(l_id) => { self.hash_body(l_id.body); }, ExprKind::DropTemps(e) | ExprKind::Yield(e, _) => { self.hash_expr(e); }, - ExprKind::Field(e, ref f) => { + ExprKind::Field(e, f) => { self.hash_expr(e); self.hash_name(f.name); }, @@ -991,23 +991,23 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { ExprKind::Lit(l) => { l.node.hash(&mut self.s); }, - ExprKind::Loop(b, ref i, ..) => { + ExprKind::Loop(b, i, ..) => { self.hash_block(b); - if let Some(i) = *i { + if let Some(i) = i { self.hash_name(i.ident.name); } }, - ExprKind::If(cond, then, ref else_opt) => { + ExprKind::If(cond, then, else_opt) => { self.hash_expr(cond); self.hash_expr(then); - if let Some(e) = *else_opt { + if let Some(e) = else_opt { self.hash_expr(e); } }, - ExprKind::Match(e, arms, ref s) => { + ExprKind::Match(e, arms, s) => { self.hash_expr(e); - for arm in arms { + for arm in *arms { self.hash_pat(arm.pat); if let Some(e) = arm.guard { self.hash_expr(e); @@ -1017,38 +1017,38 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { s.hash(&mut self.s); }, - ExprKind::MethodCall(path, receiver, args, ref _fn_span) => { + ExprKind::MethodCall(path, receiver, args, _fn_span) => { self.hash_name(path.ident.name); self.hash_expr(receiver); self.hash_exprs(args); }, ExprKind::OffsetOf(container, fields) => { self.hash_ty(container); - for field in fields { + for field in *fields { self.hash_name(field.name); } }, - ExprKind::Path(ref qpath) => { + ExprKind::Path(qpath) => { self.hash_qpath(qpath); }, ExprKind::Repeat(e, len) => { self.hash_expr(e); self.hash_const_arg(len); }, - ExprKind::Ret(ref e) => { - if let Some(e) = *e { + ExprKind::Ret(e) => { + if let Some(e) = e { self.hash_expr(e); } }, - ExprKind::Struct(path, fields, ref expr) => { + ExprKind::Struct(path, fields, expr) => { self.hash_qpath(path); - for f in fields { + for f in *fields { self.hash_name(f.ident.name); self.hash_expr(f.expr); } - if let StructTailExpr::Base(e) = *expr { + if let StructTailExpr::Base(e) = expr { self.hash_expr(e); } }, @@ -1059,11 +1059,11 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { self.hash_expr(expr); }, ExprKind::Unary(lop, le) => { - std::mem::discriminant(&lop).hash(&mut self.s); + std::mem::discriminant(lop).hash(&mut self.s); self.hash_expr(le); }, ExprKind::UnsafeBinderCast(kind, expr, ty) => { - std::mem::discriminant(&kind).hash(&mut self.s); + std::mem::discriminant(kind).hash(&mut self.s); self.hash_expr(expr); if let Some(ty) = ty { self.hash_ty(ty); @@ -1084,7 +1084,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { } pub fn hash_qpath(&mut self, p: &QPath<'_>) { - match *p { + match p { QPath::Resolved(_, path) => { self.hash_path(path); }, @@ -1092,7 +1092,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { self.hash_name(path.ident.name); }, QPath::LangItem(lang_item, ..) => { - std::mem::discriminant(&lang_item).hash(&mut self.s); + std::mem::discriminant(lang_item).hash(&mut self.s); }, } // self.maybe_typeck_results.unwrap().qpath_res(p, id).hash(&mut self.s); @@ -1117,16 +1117,22 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { self.hash_const_arg(s); self.hash_const_arg(e); }, + TyPatKind::Or(variants) => { + for variant in variants { + self.hash_ty_pat(variant); + } + }, TyPatKind::Err(_) => {}, } } pub fn hash_pat(&mut self, pat: &Pat<'_>) { std::mem::discriminant(&pat.kind).hash(&mut self.s); - match pat.kind { + match &pat.kind { + PatKind::Missing => unreachable!(), PatKind::Binding(BindingMode(by_ref, mutability), _, _, pat) => { - std::mem::discriminant(&by_ref).hash(&mut self.s); - std::mem::discriminant(&mutability).hash(&mut self.s); + std::mem::discriminant(by_ref).hash(&mut self.s); + std::mem::discriminant(mutability).hash(&mut self.s); if let Some(pat) = pat { self.hash_pat(pat); } @@ -1134,7 +1140,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { PatKind::Box(pat) | PatKind::Deref(pat) => self.hash_pat(pat), PatKind::Expr(expr) => self.hash_pat_expr(expr), PatKind::Or(pats) => { - for pat in pats { + for pat in *pats { self.hash_pat(pat); } }, @@ -1145,44 +1151,44 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { if let Some(e) = e { self.hash_pat_expr(e); } - std::mem::discriminant(&i).hash(&mut self.s); + std::mem::discriminant(i).hash(&mut self.s); }, PatKind::Ref(pat, mu) => { self.hash_pat(pat); - std::mem::discriminant(&mu).hash(&mut self.s); + std::mem::discriminant(mu).hash(&mut self.s); }, PatKind::Guard(pat, guard) => { self.hash_pat(pat); self.hash_expr(guard); }, PatKind::Slice(l, m, r) => { - for pat in l { + for pat in *l { self.hash_pat(pat); } if let Some(pat) = m { self.hash_pat(pat); } - for pat in r { + for pat in *r { self.hash_pat(pat); } }, - PatKind::Struct(ref qpath, fields, e) => { + PatKind::Struct(qpath, fields, e) => { self.hash_qpath(qpath); - for f in fields { + for f in *fields { self.hash_name(f.ident.name); self.hash_pat(f.pat); } e.hash(&mut self.s); }, PatKind::Tuple(pats, e) => { - for pat in pats { + for pat in *pats { self.hash_pat(pat); } e.hash(&mut self.s); }, - PatKind::TupleStruct(ref qpath, pats, e) => { + PatKind::TupleStruct(qpath, pats, e) => { self.hash_qpath(qpath); - for pat in pats { + for pat in *pats { self.hash_pat(pat); } e.hash(&mut self.s); @@ -1244,8 +1250,8 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { pub fn hash_lifetime(&mut self, lifetime: &Lifetime) { lifetime.ident.name.hash(&mut self.s); - std::mem::discriminant(&lifetime.res).hash(&mut self.s); - if let LifetimeName::Param(param_id) = lifetime.res { + std::mem::discriminant(&lifetime.kind).hash(&mut self.s); + if let LifetimeKind::Param(param_id) = lifetime.kind { param_id.hash(&mut self.s); } } @@ -1260,7 +1266,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { TyKind::Slice(ty) => { self.hash_ty(ty); }, - &TyKind::Array(ty, len) => { + TyKind::Array(ty, len) => { self.hash_ty(ty); self.hash_const_arg(len); }, @@ -1333,11 +1339,11 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> { fn hash_generic_args(&mut self, arg_list: &[GenericArg<'_>]) { for arg in arg_list { - match *arg { + match arg { GenericArg::Lifetime(l) => self.hash_lifetime(l), GenericArg::Type(ty) => self.hash_ty(ty.as_unambig_ty()), GenericArg::Const(ca) => self.hash_const_arg(ca.as_unambig_ct()), - GenericArg::Infer(ref inf) => self.hash_ty(&inf.to_ty()), + GenericArg::Infer(inf) => self.hash_ty(&inf.to_ty()), } } } diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs index 668b0cb69e204..187dfa4dda845 100644 --- a/src/tools/clippy/clippy_utils/src/lib.rs +++ b/src/tools/clippy/clippy_utils/src/lib.rs @@ -1,9 +1,8 @@ #![feature(array_chunks)] #![feature(box_patterns)] -#![feature(f128)] -#![feature(f16)] #![feature(if_let_guard)] #![feature(macro_metavar_expr_concat)] +#![feature(macro_metavar_expr)] #![feature(let_chains)] #![feature(never_type)] #![feature(rustc_private)] @@ -52,9 +51,6 @@ extern crate rustc_span; extern crate rustc_trait_selection; extern crate smallvec; -#[macro_use] -pub mod sym_helper; - pub mod ast_utils; pub mod attrs; mod check_proc_macro; @@ -74,6 +70,7 @@ pub mod qualify_min_const_fn; pub mod source; pub mod str_utils; pub mod sugg; +pub mod sym; pub mod ty; pub mod usage; pub mod visitors; @@ -106,14 +103,15 @@ use rustc_hir::hir_id::{HirIdMap, HirIdSet}; use rustc_hir::intravisit::{FnKind, Visitor, walk_expr}; use rustc_hir::{ self as hir, Arm, BindingMode, Block, BlockCheckMode, Body, ByRef, Closure, ConstArgKind, ConstContext, - Destination, Expr, ExprField, ExprKind, FnDecl, FnRetTy, GenericArg, GenericArgs, HirId, Impl, ImplItem, - ImplItemKind, ImplItemRef, Item, ItemKind, LangItem, LetStmt, MatchSource, Mutability, Node, OwnerId, OwnerNode, - Param, Pat, PatExpr, PatExprKind, PatKind, Path, PathSegment, PrimTy, QPath, Stmt, StmtKind, TraitFn, TraitItem, - TraitItemKind, TraitItemRef, TraitRef, TyKind, UnOp, def, + CoroutineDesugaring, CoroutineKind, Destination, Expr, ExprField, ExprKind, FnDecl, FnRetTy, GenericArg, + GenericArgs, HirId, Impl, ImplItem, ImplItemKind, ImplItemRef, Item, ItemKind, LangItem, LetStmt, MatchSource, + Mutability, Node, OwnerId, OwnerNode, Param, Pat, PatExpr, PatExprKind, PatKind, Path, PathSegment, PrimTy, QPath, + Stmt, StmtKind, TraitFn, TraitItem, TraitItemKind, TraitItemRef, TraitRef, TyKind, UnOp, def, }; use rustc_lexer::{TokenKind, tokenize}; use rustc_lint::{LateContext, Level, Lint, LintContext}; use rustc_middle::hir::place::PlaceBase; +use rustc_middle::lint::LevelAndSource; use rustc_middle::mir::{AggregateKind, Operand, RETURN_PLACE, Rvalue, StatementKind, TerminatorKind}; use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow}; use rustc_middle::ty::fast_reject::SimplifiedType; @@ -125,7 +123,8 @@ use rustc_middle::ty::{ use rustc_span::hygiene::{ExpnKind, MacroKind}; use rustc_span::source_map::SourceMap; use rustc_span::symbol::{Ident, Symbol, kw}; -use rustc_span::{InnerSpan, Span, sym}; +use rustc_span::{InnerSpan, Span}; +use source::walk_span_to_context; use visitors::{Visitable, for_each_unconsumed_temporary}; use crate::consts::{ConstEvalCtxt, Constant, mir_to_const}; @@ -367,10 +366,10 @@ pub fn is_inherent_method_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { /// Checks if a method is defined in an impl of a diagnostic item pub fn is_diag_item_method(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool { - if let Some(impl_did) = cx.tcx.impl_of_method(def_id) { - if let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() { - return cx.tcx.is_diagnostic_item(diag_item, adt.did()); - } + if let Some(impl_did) = cx.tcx.impl_of_method(def_id) + && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() + { + return cx.tcx.is_diagnostic_item(diag_item, adt.did()); } false } @@ -457,10 +456,10 @@ pub fn match_qpath(path: &QPath<'_>, segments: &[&str]) -> bool { QPath::Resolved(_, path) => match_path(path, segments), QPath::TypeRelative(ty, segment) => match ty.kind { TyKind::Path(ref inner_path) => { - if let [prefix @ .., end] = segments { - if match_qpath(inner_path, prefix) { - return segment.ident.name.as_str() == *end; - } + if let [prefix @ .., end] = segments + && match_qpath(inner_path, prefix) + { + return segment.ident.name.as_str() == *end; } false }, @@ -523,10 +522,10 @@ pub fn match_path(path: &Path<'_>, segments: &[&str]) -> bool { /// If the expression is a path to a local, returns the canonical `HirId` of the local. pub fn path_to_local(expr: &Expr<'_>) -> Option { - if let ExprKind::Path(QPath::Resolved(None, path)) = expr.kind { - if let Res::Local(id) = path.res { - return Some(id); - } + if let ExprKind::Path(QPath::Resolved(None, path)) = expr.kind + && let Res::Local(id) = path.res + { + return Some(id); } None } @@ -893,16 +892,14 @@ fn is_default_equivalent_ctor(cx: &LateContext<'_>, def_id: DefId, path: &QPath< sym::BinaryHeap, ]; - if let QPath::TypeRelative(_, method) = path { - if method.ident.name == sym::new { - if let Some(impl_did) = cx.tcx.impl_of_method(def_id) { - if let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() { - return std_types_symbols.iter().any(|&symbol| { - cx.tcx.is_diagnostic_item(symbol, adt.did()) || Some(adt.did()) == cx.tcx.lang_items().string() - }); - } - } - } + if let QPath::TypeRelative(_, method) = path + && method.ident.name == sym::new + && let Some(impl_did) = cx.tcx.impl_of_method(def_id) + && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() + { + return std_types_symbols.iter().any(|&symbol| { + cx.tcx.is_diagnostic_item(symbol, adt.did()) || Some(adt.did()) == cx.tcx.lang_items().string() + }); } false } @@ -1027,6 +1024,7 @@ pub fn is_default_equivalent(cx: &LateContext<'_>, e: &Expr<'_>) -> bool { ExprKind::Call(from_func, [arg]) => is_default_equivalent_from(cx, from_func, arg), ExprKind::Path(qpath) => is_res_lang_ctor(cx, cx.qpath_res(qpath, e.hir_id), OptionNone), ExprKind::AddrOf(rustc_hir::BorrowKind::Ref, _, expr) => matches!(expr.kind, ExprKind::Array([])), + ExprKind::Block(Block { stmts: [], expr, .. }, _) => expr.is_some_and(|e| is_default_equivalent(cx, e)), _ => false, } } @@ -1203,12 +1201,10 @@ pub fn capture_local_usage(cx: &LateContext<'_>, e: &Expr<'_>) -> CaptureKind { .adjustments() .get(child_id) .map_or(&[][..], |x| &**x) - { - if let rustc_ty::RawPtr(_, mutability) | rustc_ty::Ref(_, _, mutability) = + && let rustc_ty::RawPtr(_, mutability) | rustc_ty::Ref(_, _, mutability) = *adjust.last().map_or(target, |a| a.target).kind() - { - return CaptureKind::Ref(mutability); - } + { + return CaptureKind::Ref(mutability); } match parent { @@ -1416,7 +1412,7 @@ pub fn is_in_panic_handler(cx: &LateContext<'_>, e: &Expr<'_>) -> bool { } /// Gets the name of the item the expression is in, if available. -pub fn get_item_name(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option { +pub fn parent_item_name(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option { let parent_id = cx.tcx.hir_get_parent_item(expr.hir_id).def_id; match cx.tcx.hir_node_by_def_id(parent_id) { Node::Item(item) => item.kind.ident().map(|ident| ident.name), @@ -1736,10 +1732,10 @@ pub fn is_integer_const(cx: &LateContext<'_>, e: &Expr<'_>, value: u128) -> bool /// Checks whether the given expression is a constant literal of the given value. pub fn is_integer_literal(expr: &Expr<'_>, value: u128) -> bool { // FIXME: use constant folding - if let ExprKind::Lit(spanned) = expr.kind { - if let LitKind::Int(v, _) = spanned.node { - return v == value; - } + if let ExprKind::Lit(spanned) = expr.kind + && let LitKind::Int(v, _) = spanned.node + { + return v == value; } false } @@ -1776,10 +1772,10 @@ pub fn is_expn_of(mut span: Span, name: &str) -> Option { let data = span.ctxt().outer_expn_data(); let new_span = data.call_site; - if let ExpnKind::Macro(MacroKind::Bang, mac_name) = data.kind { - if mac_name.as_str() == name { - return Some(new_span); - } + if let ExpnKind::Macro(MacroKind::Bang, mac_name) = data.kind + && mac_name.as_str() == name + { + return Some(new_span); } span = new_span; @@ -1805,10 +1801,10 @@ pub fn is_direct_expn_of(span: Span, name: &str) -> Option { let data = span.ctxt().outer_expn_data(); let new_span = data.call_site; - if let ExpnKind::Macro(MacroKind::Bang, mac_name) = data.kind { - if mac_name.as_str() == name { - return Some(new_span); - } + if let ExpnKind::Macro(MacroKind::Bang, mac_name) = data.kind + && mac_name.as_str() == name + { + return Some(new_span); } } @@ -1829,15 +1825,15 @@ pub fn nth_arg<'tcx>(cx: &LateContext<'tcx>, fn_def_id: OwnerId, nth: usize) -> /// Checks if an expression is constructing a tuple-like enum variant or struct pub fn is_ctor_or_promotable_const_function(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { - if let ExprKind::Call(fun, _) = expr.kind { - if let ExprKind::Path(ref qp) = fun.kind { - let res = cx.qpath_res(qp, fun.hir_id); - return match res { - Res::Def(DefKind::Variant | DefKind::Ctor(..), ..) => true, - Res::Def(_, def_id) => cx.tcx.is_promotable_const_fn(def_id), - _ => false, - }; - } + if let ExprKind::Call(fun, _) = expr.kind + && let ExprKind::Path(ref qp) = fun.kind + { + let res = cx.qpath_res(qp, fun.hir_id); + return match res { + Res::Def(DefKind::Variant | DefKind::Ctor(..), ..) => true, + Res::Def(_, def_id) => cx.tcx.is_promotable_const_fn(def_id), + _ => false, + }; } false } @@ -1857,6 +1853,7 @@ pub fn is_refutable(cx: &LateContext<'_>, pat: &Pat<'_>) -> bool { } match pat.kind { + PatKind::Missing => unreachable!(), PatKind::Wild | PatKind::Never => false, // If `!` typechecked then the type is empty, so not refutable. PatKind::Binding(_, _, _, pat) => pat.is_some_and(|pat| is_refutable(cx, pat)), PatKind::Box(pat) | PatKind::Ref(pat, _) => is_refutable(cx, pat), @@ -1910,10 +1907,10 @@ pub fn is_self(slf: &Param<'_>) -> bool { } pub fn is_self_ty(slf: &hir::Ty<'_>) -> bool { - if let TyKind::Path(QPath::Resolved(None, path)) = slf.kind { - if let Res::SelfTyParam { .. } | Res::SelfTyAlias { .. } = path.res { - return true; - } + if let TyKind::Path(QPath::Resolved(None, path)) = slf.kind + && let Res::SelfTyParam { .. } | Res::SelfTyAlias { .. } = path.res + { + return true; } false } @@ -1976,14 +1973,14 @@ pub fn fulfill_or_allowed(cx: &LateContext<'_>, lint: &'static Lint, ids: impl I let mut suppress_lint = false; for id in ids { - let (level, _) = cx.tcx.lint_level_at_node(lint, id); - if let Some(expectation) = level.get_expectation_id() { + let LevelAndSource { level, lint_id, .. } = cx.tcx.lint_level_at_node(lint, id); + if let Some(expectation) = lint_id { cx.fulfill_expectation(expectation); } match level { - Level::Allow | Level::Expect(_) => suppress_lint = true, - Level::Warn | Level::ForceWarn(_) | Level::Deny | Level::Forbid => {}, + Level::Allow | Level::Expect => suppress_lint = true, + Level::Warn | Level::ForceWarn | Level::Deny | Level::Forbid => {}, } } @@ -1998,7 +1995,7 @@ pub fn fulfill_or_allowed(cx: &LateContext<'_>, lint: &'static Lint, ids: impl I /// make sure to use `span_lint_hir` functions to emit the lint. This ensures that /// expectations at the checked nodes will be fulfilled. pub fn is_lint_allowed(cx: &LateContext<'_>, lint: &'static Lint, id: HirId) -> bool { - cx.tcx.lint_level_at_node(lint, id).0 == Level::Allow + cx.tcx.lint_level_at_node(lint, id).level == Level::Allow } pub fn strip_pat_refs<'hir>(mut pat: &'hir Pat<'hir>) -> &'hir Pat<'hir> { @@ -2091,7 +2088,7 @@ pub fn match_libc_symbol(cx: &LateContext<'_>, did: DefId, name: &str) -> bool { let path = cx.get_def_path(did); // libc is meant to be used as a flat list of names, but they're all actually defined in different // modules based on the target platform. Ignore everything but crate name and the item name. - path.first().is_some_and(|s| s.as_str() == "libc") && path.last().is_some_and(|s| s.as_str() == name) + path.first().is_some_and(|s| *s == sym::libc) && path.last().is_some_and(|s| s.as_str() == name) } /// Returns the list of condition expressions and the list of blocks in a @@ -2118,10 +2115,10 @@ pub fn if_sequence<'tcx>(mut expr: &'tcx Expr<'tcx>) -> (Vec<&'tcx Expr<'tcx>>, } // final `else {..}` - if !blocks.is_empty() { - if let ExprKind::Block(block, _) = expr.kind { - blocks.push(block); - } + if !blocks.is_empty() + && let ExprKind::Block(block, _) = expr.kind + { + blocks.push(block); } (conds, blocks) @@ -2136,26 +2133,34 @@ pub fn is_async_fn(kind: FnKind<'_>) -> bool { } } -/// Peels away all the compiler generated code surrounding the body of an async function, -pub fn get_async_fn_body<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Option<&'tcx Expr<'tcx>> { - if let ExprKind::Closure(&Closure { body, .. }) = body.value.kind { - if let ExprKind::Block( +/// Peels away all the compiler generated code surrounding the body of an async closure. +pub fn get_async_closure_expr<'tcx>(tcx: TyCtxt<'tcx>, expr: &Expr<'_>) -> Option<&'tcx Expr<'tcx>> { + if let ExprKind::Closure(&Closure { + body, + kind: hir::ClosureKind::Coroutine(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)), + .. + }) = expr.kind + && let ExprKind::Block( Block { - stmts: [], expr: Some(Expr { - kind: ExprKind::DropTemps(expr), + kind: ExprKind::DropTemps(inner_expr), .. }), .. }, _, ) = tcx.hir_body(body).value.kind - { - return Some(expr); - } + { + Some(inner_expr) + } else { + None } - None +} + +/// Peels away all the compiler generated code surrounding the body of an async function, +pub fn get_async_fn_body<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Option<&'tcx Expr<'tcx>> { + get_async_closure_expr(tcx, body.value) } // check if expr is calling method or function with #[must_use] attribute @@ -2359,14 +2364,14 @@ pub fn is_no_std_crate(cx: &LateContext<'_>) -> bool { cx.tcx .hir_attrs(hir::CRATE_HIR_ID) .iter() - .any(|attr| attr.name_or_empty() == sym::no_std) + .any(|attr| attr.has_name(sym::no_std)) } pub fn is_no_core_crate(cx: &LateContext<'_>) -> bool { cx.tcx .hir_attrs(hir::CRATE_HIR_ID) .iter() - .any(|attr| attr.name_or_empty() == sym::no_core) + .any(|attr| attr.has_name(sym::no_core)) } /// Check if parent of a hir node is a trait implementation block. @@ -2627,17 +2632,19 @@ pub fn peel_ref_operators<'hir>(cx: &LateContext<'_>, mut expr: &'hir Expr<'hir> } pub fn is_hir_ty_cfg_dependant(cx: &LateContext<'_>, ty: &hir::Ty<'_>) -> bool { - if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind { - if let Res::Def(_, def_id) = path.res { - return cx.tcx.has_attr(def_id, sym::cfg_trace) || cx.tcx.has_attr(def_id, sym::cfg_attr); - } + if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind + && let Res::Def(_, def_id) = path.res + { + return cx.tcx.has_attr(def_id, sym::cfg) || cx.tcx.has_attr(def_id, sym::cfg_attr); } false } static TEST_ITEM_NAMES_CACHE: OnceLock>>> = OnceLock::new(); -fn with_test_item_names(tcx: TyCtxt<'_>, module: LocalModDefId, f: impl Fn(&[Symbol]) -> bool) -> bool { +/// Apply `f()` to the set of test item names. +/// The names are sorted using the default `Symbol` ordering. +fn with_test_item_names(tcx: TyCtxt<'_>, module: LocalModDefId, f: impl FnOnce(&[Symbol]) -> bool) -> bool { let cache = TEST_ITEM_NAMES_CACHE.get_or_init(|| Mutex::new(FxHashMap::default())); let mut map: MutexGuard<'_, FxHashMap>> = cache.lock().unwrap(); let value = map.entry(module); @@ -2649,18 +2656,16 @@ fn with_test_item_names(tcx: TyCtxt<'_>, module: LocalModDefId, f: impl Fn(&[Sym if matches!(tcx.def_kind(id.owner_id), DefKind::Const) && let item = tcx.hir_item(id) && let ItemKind::Const(ident, ty, _generics, _body) = item.kind - { - if let TyKind::Path(QPath::Resolved(_, path)) = ty.kind { + && let TyKind::Path(QPath::Resolved(_, path)) = ty.kind // We could also check for the type name `test::TestDescAndFn` - if let Res::Def(DefKind::Struct, _) = path.res { - let has_test_marker = tcx - .hir_attrs(item.hir_id()) - .iter() - .any(|a| a.has_name(sym::rustc_test_marker)); - if has_test_marker { - names.push(ident.name); - } - } + && let Res::Def(DefKind::Struct, _) = path.res + { + let has_test_marker = tcx + .hir_attrs(item.hir_id()) + .iter() + .any(|a| a.has_name(sym::rustc_test_marker)); + if has_test_marker { + names.push(ident.name); } } } @@ -2681,18 +2686,37 @@ pub fn is_in_test_function(tcx: TyCtxt<'_>, id: HirId) -> bool { // Since you can nest functions we need to collect all until we leave // function scope .any(|(_id, node)| { - if let Node::Item(item) = node { - if let ItemKind::Fn { ident, .. } = item.kind { - // Note that we have sorted the item names in the visitor, - // so the binary_search gets the same as `contains`, but faster. - return names.binary_search(&ident.name).is_ok(); - } + if let Node::Item(item) = node + && let ItemKind::Fn { ident, .. } = item.kind + { + // Note that we have sorted the item names in the visitor, + // so the binary_search gets the same as `contains`, but faster. + return names.binary_search(&ident.name).is_ok(); } false }) }) } +/// Checks if `fn_def_id` has a `#[test]` attribute applied +/// +/// This only checks directly applied attributes. To see if a node has a parent function marked with +/// `#[test]` use [`is_in_test_function`]. +/// +/// Note: Add `//@compile-flags: --test` to UI tests with a `#[test]` function +pub fn is_test_function(tcx: TyCtxt<'_>, fn_def_id: LocalDefId) -> bool { + let id = tcx.local_def_id_to_hir_id(fn_def_id); + if let Node::Item(item) = tcx.hir_node(id) + && let ItemKind::Fn { ident, .. } = item.kind + { + with_test_item_names(tcx, tcx.parent_module(id), |names| { + names.binary_search(&ident.name).is_ok() + }) + } else { + false + } +} + /// Checks if `id` has a `#[cfg(test)]` attribute applied /// /// This only checks directly applied attributes, to see if a node is inside a `#[cfg(test)]` parent @@ -2990,7 +3014,7 @@ pub fn expr_use_ctxt<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>) -> ExprU { adjustments = cx.typeck_results().expr_adjustments(e); } - same_ctxt &= cx.tcx.hir().span(parent_id).ctxt() == ctxt; + same_ctxt &= cx.tcx.hir_span(parent_id).ctxt() == ctxt; if let Node::Expr(e) = parent { match e.kind { ExprKind::If(e, _, _) | ExprKind::Match(e, _, _) if e.hir_id != child_id => { @@ -3077,7 +3101,7 @@ pub fn span_find_starting_semi(sm: &SourceMap, span: Span) -> Span { sm.span_take_while(span, |&ch| ch == ' ' || ch == ';') } -/// Returns whether the given let pattern and else body can be turned into a question mark +/// Returns whether the given let pattern and else body can be turned into the `?` operator /// /// For this example: /// ```ignore @@ -3100,8 +3124,7 @@ pub fn span_find_starting_semi(sm: &SourceMap, span: Span) -> Span { /// ``` /// /// We output `Some(a)` in the first instance, and `Some(FooBar { a, b })` in the second, because -/// the question mark operator is applicable here. Callers have to check whether we are in a -/// constant or not. +/// the `?` operator is applicable here. Callers have to check whether we are in a constant or not. pub fn pat_and_expr_can_be_question_mark<'a, 'hir>( cx: &LateContext<'_>, pat: &'a Pat<'hir>, @@ -3500,7 +3523,7 @@ fn maybe_get_relative_path(from: &DefPath, to: &DefPath, max_super: usize) -> St // a::b::c ::d::sym refers to // e::f::sym:: :: // result should be super::super::super::super::e::f - if let DefPathData::TypeNs(Some(s)) = l { + if let DefPathData::TypeNs(s) = l { path.push(s.to_string()); } if let DefPathData::TypeNs(_) = r { @@ -3511,7 +3534,7 @@ fn maybe_get_relative_path(from: &DefPath, to: &DefPath, max_super: usize) -> St // a::b::sym:: :: refers to // c::d::e ::f::sym // when looking at `f` - Left(DefPathData::TypeNs(Some(sym))) => path.push(sym.to_string()), + Left(DefPathData::TypeNs(sym)) => path.push(sym.to_string()), // consider: // a::b::c ::d::sym refers to // e::f::sym:: :: @@ -3525,7 +3548,7 @@ fn maybe_get_relative_path(from: &DefPath, to: &DefPath, max_super: usize) -> St // `super` chain would be too long, just use the absolute path instead once(String::from("crate")) .chain(to.data.iter().filter_map(|el| { - if let DefPathData::TypeNs(Some(sym)) = el.data { + if let DefPathData::TypeNs(sym) = el.data { Some(sym.to_string()) } else { None @@ -3724,3 +3747,20 @@ pub fn peel_hir_ty_options<'tcx>(cx: &LateContext<'tcx>, mut hir_ty: &'tcx hir:: } hir_ty } + +/// If `expr` is a desugared `.await`, return the original expression if it does not come from a +/// macro expansion. +pub fn desugar_await<'tcx>(expr: &'tcx Expr<'_>) -> Option<&'tcx Expr<'tcx>> { + if let ExprKind::Match(match_value, _, MatchSource::AwaitDesugar) = expr.kind + && let ExprKind::Call(_, [into_future_arg]) = match_value.kind + && let ctxt = expr.span.ctxt() + && for_each_expr_without_closures(into_future_arg, |e| { + walk_span_to_context(e.span, ctxt).map_or(ControlFlow::Break(()), |_| ControlFlow::Continue(())) + }) + .is_none() + { + Some(into_future_arg) + } else { + None + } +} diff --git a/src/tools/clippy/clippy_utils/src/macros.rs b/src/tools/clippy/clippy_utils/src/macros.rs index 1a457bc7f2141..dfb30b9c21864 100644 --- a/src/tools/clippy/clippy_utils/src/macros.rs +++ b/src/tools/clippy/clippy_utils/src/macros.rs @@ -178,7 +178,6 @@ pub fn first_node_in_macro(cx: &LateContext<'_>, node: &impl HirNode) -> Option< // get the parent node, possibly skipping over a statement // if the parent is not found, it is sensible to return `Some(root)` - let hir = cx.tcx.hir(); let mut parent_iter = cx.tcx.hir_parent_iter(node.hir_id()); let (parent_id, _) = match parent_iter.next() { None => return Some(ExpnId::root()), @@ -190,7 +189,7 @@ pub fn first_node_in_macro(cx: &LateContext<'_>, node: &impl HirNode) -> Option< }; // get the macro expansion of the parent node - let parent_span = hir.span(parent_id); + let parent_span = cx.tcx.hir_span(parent_id); let Some(parent_macro_call) = macro_backtrace(parent_span).next() else { // the parent node is not in a macro return Some(ExpnId::root()); diff --git a/src/tools/clippy/clippy_utils/src/mir/mod.rs b/src/tools/clippy/clippy_utils/src/mir/mod.rs index ffcfcd240ea5a..9ba644fdd20ec 100644 --- a/src/tools/clippy/clippy_utils/src/mir/mod.rs +++ b/src/tools/clippy/clippy_utils/src/mir/mod.rs @@ -76,7 +76,7 @@ impl<'tcx> Visitor<'tcx> for V<'_> { } if matches!( ctx, - PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) + PlaceContext::NonMutatingUse(NonMutatingUseContext::Move | NonMutatingUseContext::Inspect) | PlaceContext::MutatingUse(MutatingUseContext::Borrow) ) { self.results[i].local_consume_or_mutate_locs.push(loc); diff --git a/src/tools/clippy/clippy_utils/src/msrvs.rs b/src/tools/clippy/clippy_utils/src/msrvs.rs index 86f4f190b950a..19061b574ff88 100644 --- a/src/tools/clippy/clippy_utils/src/msrvs.rs +++ b/src/tools/clippy/clippy_utils/src/msrvs.rs @@ -1,10 +1,10 @@ +use crate::sym; use rustc_ast::Attribute; use rustc_ast::attr::AttributeExt; - use rustc_attr_parsing::{RustcVersion, parse_version}; use rustc_lint::LateContext; use rustc_session::Session; -use rustc_span::{Symbol, sym}; +use rustc_span::Symbol; use serde::Deserialize; use smallvec::SmallVec; use std::iter::once; @@ -24,10 +24,10 @@ macro_rules! msrv_aliases { msrv_aliases! { 1,87,0 { OS_STR_DISPLAY, INT_MIDPOINT } 1,85,0 { UINT_FLOAT_MIDPOINT } - 1,84,0 { CONST_OPTION_AS_SLICE } + 1,84,0 { CONST_OPTION_AS_SLICE, MANUAL_DANGLING_PTR } 1,83,0 { CONST_EXTERN_FN, CONST_FLOAT_BITS_CONV, CONST_FLOAT_CLASSIFY, CONST_MUT_REFS, CONST_UNWRAP } 1,82,0 { IS_NONE_OR, REPEAT_N, RAW_REF_OP } - 1,81,0 { LINT_REASONS_STABILIZATION, ERROR_IN_CORE, EXPLICIT_SELF_TYPE_ELISION } + 1,81,0 { LINT_REASONS_STABILIZATION, ERROR_IN_CORE, EXPLICIT_SELF_TYPE_ELISION, DURATION_ABS_DIFF } 1,80,0 { BOX_INTO_ITER, LAZY_CELL } 1,77,0 { C_STR_LITERALS } 1,76,0 { PTR_FROM_REF, OPTION_RESULT_INSPECT } @@ -40,6 +40,7 @@ msrv_aliases! { 1,65,0 { LET_ELSE, POINTER_CAST_CONSTNESS } 1,63,0 { CLONE_INTO } 1,62,0 { BOOL_THEN_SOME, DEFAULT_ENUM_ATTRIBUTE, CONST_EXTERN_C_FN } + 1,60,0 { ABS_DIFF } 1,59,0 { THREAD_LOCAL_CONST_INIT } 1,58,0 { FORMAT_ARGS_CAPTURE, PATTERN_TRAIT_CHAR_ARRAY, CONST_RAW_PTR_DEREF } 1,57,0 { MAP_WHILE } @@ -63,6 +64,7 @@ msrv_aliases! { 1,35,0 { OPTION_COPIED, RANGE_CONTAINS } 1,34,0 { TRY_FROM } 1,33,0 { UNDERSCORE_IMPORTS } + 1,32,0 { CONST_IS_POWER_OF_TWO } 1,31,0 { OPTION_REPLACE } 1,30,0 { ITERATOR_FIND_MAP, TOOL_ATTRIBUTES } 1,29,0 { ITER_FLATTEN } @@ -182,8 +184,7 @@ impl MsrvStack { } fn parse_attrs(sess: &Session, attrs: &[impl AttributeExt]) -> Option { - let sym_msrv = Symbol::intern("msrv"); - let mut msrv_attrs = attrs.iter().filter(|attr| attr.path_matches(&[sym::clippy, sym_msrv])); + let mut msrv_attrs = attrs.iter().filter(|attr| attr.path_matches(&[sym::clippy, sym::msrv])); if let Some(msrv_attr) = msrv_attrs.next() { if let Some(duplicate) = msrv_attrs.next_back() { diff --git a/src/tools/clippy/clippy_utils/src/paths.rs b/src/tools/clippy/clippy_utils/src/paths.rs index 51d06ad9b1aa5..7f64ebd3b6437 100644 --- a/src/tools/clippy/clippy_utils/src/paths.rs +++ b/src/tools/clippy/clippy_utils/src/paths.rs @@ -30,9 +30,11 @@ pub const SYNTAX_CONTEXT: [&str; 3] = ["rustc_span", "hygiene", "SyntaxContext"] pub const CHAR_IS_ASCII: [&str; 5] = ["core", "char", "methods", "", "is_ascii"]; pub const IO_ERROR_NEW: [&str; 5] = ["std", "io", "error", "Error", "new"]; pub const IO_ERRORKIND_OTHER: [&str; 5] = ["std", "io", "error", "ErrorKind", "Other"]; +pub const ALIGN_OF: [&str; 3] = ["core", "mem", "align_of"]; // Paths in clippy itself pub const MSRV_STACK: [&str; 3] = ["clippy_utils", "msrvs", "MsrvStack"]; +pub const CLIPPY_SYM_MODULE: [&str; 2] = ["clippy_utils", "sym"]; // Paths in external crates #[expect(clippy::invalid_paths)] // internal lints do not know about all external crates diff --git a/src/tools/clippy/clippy_utils/src/source.rs b/src/tools/clippy/clippy_utils/src/source.rs index 80066e9702d34..8645d5730fedb 100644 --- a/src/tools/clippy/clippy_utils/src/source.rs +++ b/src/tools/clippy/clippy_utils/src/source.rs @@ -142,7 +142,20 @@ pub trait SpanRangeExt: SpanRange { map_range(cx.sess().source_map(), self.into_range(), f) } - /// Extends the range to include all preceding whitespace characters. + #[allow(rustdoc::invalid_rust_codeblocks, reason = "The codeblock is intentionally broken")] + /// Extends the range to include all preceding whitespace characters, unless there + /// are non-whitespace characters left on the same line after `self`. + /// + /// This extra condition prevents a problem when removing the '}' in: + /// ```ignore + /// ( // There was an opening bracket after the parenthesis, which has been removed + /// // This is a comment + /// }) + /// ``` + /// Removing the whitespaces, including the linefeed, before the '}', would put the + /// closing parenthesis at the end of the `// This is a comment` line, which would + /// make it part of the comment as well. In this case, it is best to keep the span + /// on the '}' alone. fn with_leading_whitespace(self, cx: &impl HasSession) -> Range { with_leading_whitespace(cx.sess().source_map(), self.into_range()) } @@ -263,10 +276,15 @@ fn map_range( } fn with_leading_whitespace(sm: &SourceMap, sp: Range) -> Range { - map_range(sm, sp.clone(), |src, range| { - Some(src.get(..range.start)?.trim_end().len()..range.end) + map_range(sm, sp, |src, range| { + let non_blank_after = src.len() - src.get(range.end..)?.trim_start().len(); + if src.get(range.end..non_blank_after)?.contains(['\r', '\n']) { + Some(src.get(..range.start)?.trim_end().len()..range.end) + } else { + Some(range) + } }) - .unwrap_or(sp) + .unwrap() } fn trim_start(sm: &SourceMap, sp: Range) -> Range { @@ -384,10 +402,10 @@ pub fn snippet_indent(sess: &impl HasSession, span: Span) -> Option { // For some reason these attributes don't have any expansion info on them, so // we have to check it this way until there is a better way. pub fn is_present_in_source(sess: &impl HasSession, span: Span) -> bool { - if let Some(snippet) = snippet_opt(sess, span) { - if snippet.is_empty() { - return false; - } + if let Some(snippet) = snippet_opt(sess, span) + && snippet.is_empty() + { + return false; } true } @@ -408,11 +426,11 @@ pub fn position_before_rarrow(s: &str) -> Option { let mut rpos = rpos; let chars: Vec = s.chars().collect(); while rpos > 1 { - if let Some(c) = chars.get(rpos - 1) { - if c.is_whitespace() { - rpos -= 1; - continue; - } + if let Some(c) = chars.get(rpos - 1) + && c.is_whitespace() + { + rpos -= 1; + continue; } break; } diff --git a/src/tools/clippy/clippy_utils/src/str_utils.rs b/src/tools/clippy/clippy_utils/src/str_utils.rs index 421b25a77fe8b..f0f82c8dddcf5 100644 --- a/src/tools/clippy/clippy_utils/src/str_utils.rs +++ b/src/tools/clippy/clippy_utils/src/str_utils.rs @@ -1,4 +1,4 @@ -/// Dealing with sting indices can be hard, this struct ensures that both the +/// Dealing with string indices can be hard, this struct ensures that both the /// character and byte index are provided for correct indexing. #[derive(Debug, Default, PartialEq, Eq)] pub struct StrIndex { @@ -165,7 +165,7 @@ pub fn camel_case_split(s: &str) -> Vec<&str> { offsets.windows(2).map(|w| &s[w[0]..w[1]]).collect() } -/// Dealing with sting comparison can be complicated, this struct ensures that both the +/// Dealing with string comparison can be complicated, this struct ensures that both the /// character and byte count are provided for correct indexing. #[derive(Debug, Default, PartialEq, Eq)] pub struct StrCount { diff --git a/src/tools/clippy/clippy_utils/src/sugg.rs b/src/tools/clippy/clippy_utils/src/sugg.rs index 68a1de96a3515..93dec113d31a5 100644 --- a/src/tools/clippy/clippy_utils/src/sugg.rs +++ b/src/tools/clippy/clippy_utils/src/sugg.rs @@ -326,7 +326,7 @@ impl<'a> Sugg<'a> { /// `self` argument of a method call /// (e.g., to build `bar.foo()` or `(1 + 2).foo()`). #[must_use] - pub fn maybe_par(self) -> Self { + pub fn maybe_paren(self) -> Self { match self { Sugg::NonParen(..) => self, // `(x)` and `(x).y()` both don't need additional parens. @@ -357,7 +357,7 @@ fn binop_to_string(op: AssocOp, lhs: &str, rhs: &str) -> String { match op { AssocOp::Binary(op) => format!("{lhs} {} {rhs}", op.as_str()), AssocOp::Assign => format!("{lhs} = {rhs}"), - AssocOp::AssignOp(op) => format!("{lhs} {}= {rhs}", op.as_str()), + AssocOp::AssignOp(op) => format!("{lhs} {} {rhs}", op.as_str()), AssocOp::Cast => format!("{lhs} as {rhs}"), AssocOp::Range(limits) => format!("{lhs}{}{rhs}", limits.as_str()), } @@ -494,7 +494,7 @@ impl Display for ParenHelper { /// operators have the same /// precedence. pub fn make_unop(op: &str, expr: Sugg<'_>) -> Sugg<'static> { - Sugg::MaybeParen(format!("{op}{}", expr.maybe_par()).into()) + Sugg::MaybeParen(format!("{op}{}", expr.maybe_paren()).into()) } /// Builds the string for ` ` adding parenthesis when necessary. @@ -839,8 +839,7 @@ impl<'tcx> Delegate<'tcx> for DerefDelegate<'_, 'tcx> { fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, _: ty::BorrowKind) { if let PlaceBase::Local(id) = cmt.place.base { - let map = self.cx.tcx.hir(); - let span = map.span(cmt.hir_id); + let span = self.cx.tcx.hir_span(cmt.hir_id); let start_span = Span::new(self.next_pos, span.lo(), span.ctxt(), None); let mut start_snip = snippet_with_applicability(self.cx, start_span, "..", &mut self.applicability); @@ -947,10 +946,9 @@ impl<'tcx> Delegate<'tcx> for DerefDelegate<'_, 'tcx> { // some items do not need explicit deref, such as array accesses, // so we mark them as already processed // i.e.: don't suggest `*sub[1..4].len()` for `|sub| sub[1..4].len() == 3` - if let ty::Ref(_, inner, _) = cmt.place.ty_before_projection(i).kind() { - if matches!(inner.kind(), ty::Ref(_, innermost, _) if innermost.is_array()) { - projections_handled = true; - } + if let ty::Ref(_, inner, _) = cmt.place.ty_before_projection(i).kind() + && matches!(inner.kind(), ty::Ref(_, innermost, _) if innermost.is_array()) { + projections_handled = true; } }, } @@ -1009,12 +1007,12 @@ mod test { } #[test] - fn binop_maybe_par() { + fn binop_maybe_paren() { let sugg = Sugg::BinOp(AssocOp::Binary(ast::BinOpKind::Add), "1".into(), "1".into()); - assert_eq!("(1 + 1)", sugg.maybe_par().to_string()); + assert_eq!("(1 + 1)", sugg.maybe_paren().to_string()); let sugg = Sugg::BinOp(AssocOp::Binary(ast::BinOpKind::Add), "(1 + 1)".into(), "(1 + 1)".into()); - assert_eq!("((1 + 1) + (1 + 1))", sugg.maybe_par().to_string()); + assert_eq!("((1 + 1) + (1 + 1))", sugg.maybe_paren().to_string()); } #[test] fn not_op() { diff --git a/src/tools/clippy/clippy_utils/src/sym.rs b/src/tools/clippy/clippy_utils/src/sym.rs new file mode 100644 index 0000000000000..38f077134c033 --- /dev/null +++ b/src/tools/clippy/clippy_utils/src/sym.rs @@ -0,0 +1,113 @@ +#![allow(non_upper_case_globals)] + +use rustc_span::symbol::{PREDEFINED_SYMBOLS_COUNT, Symbol}; + +#[doc(no_inline)] +pub use rustc_span::sym::*; + +macro_rules! val { + ($name:ident) => { + stringify!($name) + }; + ($name:ident $value:literal) => { + $value + }; +} + +macro_rules! generate { + ($($name:ident $(: $value:literal)? ,)*) => { + /// To be supplied to `rustc_interface::Config` + pub const EXTRA_SYMBOLS: &[&str] = &[ + $( + val!($name $($value)?), + )* + ]; + + $( + pub const $name: Symbol = Symbol::new(PREDEFINED_SYMBOLS_COUNT + ${index()}); + )* + }; +} + +generate! { + abs, + as_bytes, + as_deref_mut, + as_deref, + as_mut, + Binary, + build_hasher, + cargo_clippy: "cargo-clippy", + Cargo_toml: "Cargo.toml", + cast, + chars, + CLIPPY_ARGS, + CLIPPY_CONF_DIR, + clone_into, + cloned, + collect, + contains, + copied, + CRLF: "\r\n", + Current, + ends_with, + exp, + extend, + finish_non_exhaustive, + finish, + flat_map, + for_each, + from_raw, + from_str_radix, + get, + insert, + int_roundings, + into_bytes, + into_owned, + IntoIter, + is_ascii, + is_empty, + is_err, + is_none, + is_ok, + is_some, + last, + LF: "\n", + LowerExp, + LowerHex, + max, + min, + mode, + msrv, + Octal, + or_default, + parse, + push, + regex, + reserve, + resize, + restriction, + rustfmt_skip, + set_len, + set_mode, + set_readonly, + signum, + split_whitespace, + split, + Start, + take, + TBD, + then_some, + to_digit, + to_owned, + unused_extern_crates, + unwrap_err, + unwrap_or_default, + UpperExp, + UpperHex, + V4, + V6, + Weak, + with_capacity, + wrapping_offset, +} diff --git a/src/tools/clippy/clippy_utils/src/sym_helper.rs b/src/tools/clippy/clippy_utils/src/sym_helper.rs deleted file mode 100644 index f47dc80ebade8..0000000000000 --- a/src/tools/clippy/clippy_utils/src/sym_helper.rs +++ /dev/null @@ -1,7 +0,0 @@ -#[macro_export] -/// Convenience wrapper around rustc's `Symbol::intern` -macro_rules! sym { - ($tt:tt) => { - rustc_span::symbol::Symbol::intern(stringify!($tt)) - }; -} diff --git a/src/tools/clippy/clippy_utils/src/ty/mod.rs b/src/tools/clippy/clippy_utils/src/ty/mod.rs index 6fdf4c244f8d8..da09edd7f7c03 100644 --- a/src/tools/clippy/clippy_utils/src/ty/mod.rs +++ b/src/tools/clippy/clippy_utils/src/ty/mod.rs @@ -19,8 +19,8 @@ use rustc_middle::mir::interpret::Scalar; use rustc_middle::traits::EvaluationResult; use rustc_middle::ty::layout::ValidityRequirement; use rustc_middle::ty::{ - self, AdtDef, AliasTy, AssocItem, AssocKind, Binder, BoundRegion, FnSig, GenericArg, GenericArgKind, - GenericArgsRef, GenericParamDefKind, IntTy, ParamEnv, Region, RegionKind, TraitRef, Ty, TyCtxt, TypeSuperVisitable, + self, AdtDef, AliasTy, AssocItem, AssocTag, Binder, BoundRegion, FnSig, GenericArg, GenericArgKind, GenericArgsRef, + GenericParamDefKind, IntTy, ParamEnv, Region, RegionKind, TraitRef, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, UintTy, Upcast, VariantDef, VariantDiscr, }; use rustc_span::symbol::Ident; @@ -128,10 +128,10 @@ pub fn contains_ty_adt_constructor_opaque<'tcx>(cx: &LateContext<'tcx>, ty: Ty<' // For `impl Trait`, it will register a predicate of `::Assoc = U`, // so we check the term for `U`. ty::ClauseKind::Projection(projection_predicate) => { - if let ty::TermKind::Ty(ty) = projection_predicate.term.unpack() { - if contains_ty_adt_constructor_opaque_inner(cx, ty, needle, seen) { - return true; - } + if let ty::TermKind::Ty(ty) = projection_predicate.term.unpack() + && contains_ty_adt_constructor_opaque_inner(cx, ty, needle, seen) + { + return true; } }, _ => (), @@ -156,7 +156,7 @@ pub fn contains_ty_adt_constructor_opaque<'tcx>(cx: &LateContext<'tcx>, ty: Ty<' pub fn get_iterator_item_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option> { cx.tcx .get_diagnostic_item(sym::Iterator) - .and_then(|iter_did| cx.get_associated_type(ty, iter_did, "Item")) + .and_then(|iter_did| cx.get_associated_type(ty, iter_did, sym::Item)) } /// Get the diagnostic name of a type, e.g. `sym::HashMap`. To check if a type @@ -337,20 +337,20 @@ pub fn is_must_use_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool { ty::Tuple(args) => args.iter().any(|ty| is_must_use_ty(cx, ty)), ty::Alias(ty::Opaque, AliasTy { def_id, .. }) => { for (predicate, _) in cx.tcx.explicit_item_self_bounds(def_id).skip_binder() { - if let ty::ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder() { - if cx.tcx.has_attr(trait_predicate.trait_ref.def_id, sym::must_use) { - return true; - } + if let ty::ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder() + && cx.tcx.has_attr(trait_predicate.trait_ref.def_id, sym::must_use) + { + return true; } } false }, ty::Dynamic(binder, _, _) => { for predicate in *binder { - if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() { - if cx.tcx.has_attr(trait_ref.def_id, sym::must_use) { - return true; - } + if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() + && cx.tcx.has_attr(trait_ref.def_id, sym::must_use) + { + return true; } } false @@ -915,7 +915,7 @@ pub fn for_each_top_level_late_bound_region( ControlFlow::Continue(()) } } - fn visit_binder>>(&mut self, t: &Binder<'tcx, T>) -> Self::Result { + fn visit_binder>>(&mut self, t: &Binder<'tcx, T>) -> Self::Result { self.index += 1; let res = t.super_visit_with(self); self.index -= 1; @@ -1109,10 +1109,10 @@ pub fn make_projection<'tcx>( assoc_ty: Symbol, args: GenericArgsRef<'tcx>, ) -> Option> { - let Some(assoc_item) = tcx.associated_items(container_id).find_by_name_and_kind( + let Some(assoc_item) = tcx.associated_items(container_id).find_by_ident_and_kind( tcx, Ident::with_dummy_span(assoc_ty), - AssocKind::Type, + AssocTag::Type, container_id, ) else { debug_assert!(false, "type `{assoc_ty}` not found in `{container_id:?}`"); @@ -1345,14 +1345,14 @@ pub fn get_adt_inherent_method<'a>(cx: &'a LateContext<'_>, ty: Ty<'_>, method_n .associated_items(did) .filter_by_name_unhygienic(method_name) .next() - .filter(|item| item.kind == AssocKind::Fn) + .filter(|item| item.as_tag() == AssocTag::Fn) }) } else { None } } -/// Get's the type of a field by name. +/// Gets the type of a field by name. pub fn get_field_by_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, name: Symbol) -> Option> { match *ty.kind() { ty::Adt(def, args) if def.is_union() || def.is_struct() => def @@ -1376,3 +1376,49 @@ pub fn option_arg_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option None, } } + +/// Check if a Ty<'_> of `Iterator` contains any mutable access to non-owning types by checking if +/// it contains fields of mutable references or pointers, or references/pointers to non-`Freeze` +/// types, or `PhantomData` types containing any of the previous. This can be used to check whether +/// skipping iterating over an iterator will change its behavior. +pub fn has_non_owning_mutable_access<'tcx>(cx: &LateContext<'tcx>, iter_ty: Ty<'tcx>) -> bool { + fn normalize_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { + cx.tcx.try_normalize_erasing_regions(cx.typing_env(), ty).unwrap_or(ty) + } + + /// Check if `ty` contains mutable references or equivalent, which includes: + /// - A mutable reference/pointer. + /// - A reference/pointer to a non-`Freeze` type. + /// - A `PhantomData` type containing any of the previous. + fn has_non_owning_mutable_access_inner<'tcx>( + cx: &LateContext<'tcx>, + phantoms: &mut FxHashSet>, + ty: Ty<'tcx>, + ) -> bool { + match ty.kind() { + ty::Adt(adt_def, args) if adt_def.is_phantom_data() => { + phantoms.insert(ty) + && args + .types() + .any(|arg_ty| has_non_owning_mutable_access_inner(cx, phantoms, arg_ty)) + }, + ty::Adt(adt_def, args) => adt_def.all_fields().any(|field| { + has_non_owning_mutable_access_inner(cx, phantoms, normalize_ty(cx, field.ty(cx.tcx, args))) + }), + ty::Array(elem_ty, _) | ty::Slice(elem_ty) => has_non_owning_mutable_access_inner(cx, phantoms, *elem_ty), + ty::RawPtr(pointee_ty, mutability) | ty::Ref(_, pointee_ty, mutability) => { + mutability.is_mut() || !pointee_ty.is_freeze(cx.tcx, cx.typing_env()) + }, + ty::Closure(_, closure_args) => { + matches!(closure_args.types().next_back(), Some(captures) if has_non_owning_mutable_access_inner(cx, phantoms, captures)) + }, + ty::Tuple(tuple_args) => tuple_args + .iter() + .any(|arg_ty| has_non_owning_mutable_access_inner(cx, phantoms, arg_ty)), + _ => false, + } + } + + let mut phantoms = FxHashSet::default(); + has_non_owning_mutable_access_inner(cx, &mut phantoms, iter_ty) +} diff --git a/src/tools/clippy/clippy_utils/src/usage.rs b/src/tools/clippy/clippy_utils/src/usage.rs index a079fd940c009..1b049b6d12c4c 100644 --- a/src/tools/clippy/clippy_utils/src/usage.rs +++ b/src/tools/clippy/clippy_utils/src/usage.rs @@ -126,10 +126,10 @@ impl<'tcx> Visitor<'tcx> for BindingUsageFinder<'_, 'tcx> { type NestedFilter = nested_filter::OnlyBodies; fn visit_path(&mut self, path: &hir::Path<'tcx>, _: HirId) -> Self::Result { - if let Res::Local(id) = path.res { - if self.binding_ids.contains(&id) { - return ControlFlow::Break(()); - } + if let Res::Local(id) = path.res + && self.binding_ids.contains(&id) + { + return ControlFlow::Break(()); } ControlFlow::Continue(()) diff --git a/src/tools/clippy/clippy_utils/src/visitors.rs b/src/tools/clippy/clippy_utils/src/visitors.rs index 63dd00f2de0fb..fc6e30a980476 100644 --- a/src/tools/clippy/clippy_utils/src/visitors.rs +++ b/src/tools/clippy/clippy_utils/src/visitors.rs @@ -297,10 +297,10 @@ where /// Checks if the given resolved path is used in the given body. pub fn is_res_used(cx: &LateContext<'_>, res: Res, body: BodyId) -> bool { for_each_expr(cx, cx.tcx.hir_body(body).value, |e| { - if let ExprKind::Path(p) = &e.kind { - if cx.qpath_res(p, e.hir_id) == res { - return ControlFlow::Break(()); - } + if let ExprKind::Path(p) = &e.kind + && cx.qpath_res(p, e.hir_id) == res + { + return ControlFlow::Break(()); } ControlFlow::Continue(()) }) diff --git a/src/tools/clippy/lintcheck/ci-config/clippy.toml b/src/tools/clippy/lintcheck/ci-config/clippy.toml new file mode 100644 index 0000000000000..9853465c83f00 --- /dev/null +++ b/src/tools/clippy/lintcheck/ci-config/clippy.toml @@ -0,0 +1,7 @@ +# Configuration applied when running lintcheck from the CI +# +# The CI will set the `CLIPPY_CONF_DIR` environment variable +# to `$PWD/lintcheck/ci-config`. + +avoid-breaking-exported-api = false +lint-commented-code = false diff --git a/src/tools/clippy/lintcheck/src/main.rs b/src/tools/clippy/lintcheck/src/main.rs index 8d0d41ab9450f..fe488ef89da1f 100644 --- a/src/tools/clippy/lintcheck/src/main.rs +++ b/src/tools/clippy/lintcheck/src/main.rs @@ -120,14 +120,17 @@ impl Crate { if config.perf { cmd = Command::new("perf"); + let perf_data_filename = get_perf_data_filename(&self.path); cmd.args(&[ "record", "-e", "instructions", // Only count instructions "-g", // Enable call-graph, useful for flamegraphs and produces richer reports "--quiet", // Do not tamper with lintcheck's normal output + "--compression-level=22", + "--freq=3000", // Slow down program to capture all events "-o", - "perf.data", + &perf_data_filename, "--", "cargo", ]); @@ -165,7 +168,7 @@ impl Crate { return Vec::new(); } - if !config.fix { + if !config.fix && !config.perf { cmd.arg("--message-format=json"); } @@ -203,6 +206,11 @@ impl Crate { return Vec::new(); } + // We don't want to keep target directories if benchmarking + if config.perf { + let _ = fs::remove_dir_all(&shared_target_dir); + } + // get all clippy warnings and ICEs let mut entries: Vec = Message::parse_stream(stdout.as_bytes()) .filter_map(|msg| match msg { @@ -441,6 +449,35 @@ fn lintcheck(config: LintcheckConfig) { fs::write(&config.lintcheck_results_path, text).unwrap(); } +/// Traverse a directory looking for `perf.data.` files, and adds one +/// to the most recent of those files, returning the new most recent `perf.data` +/// file name. +fn get_perf_data_filename(source_path: &Path) -> String { + if source_path.join("perf.data").exists() { + let mut max_number = 0; + fs::read_dir(source_path) + .unwrap() + .filter_map(Result::ok) + .filter(|path| { + path.file_name() + .as_os_str() + .to_string_lossy() // We don't care about data loss, as we're checking for equality + .starts_with("perf.data") + }) + .for_each(|path| { + let file_name = path.file_name(); + let file_name = file_name.as_os_str().to_str().unwrap().split('.').next_back().unwrap(); + if let Ok(parsed_file_name) = file_name.parse::() + && parsed_file_name >= max_number + { + max_number = parsed_file_name + 1; + } + }); + return format!("perf.data.{max_number}"); + } + String::from("perf.data") +} + /// Returns the path to the Clippy project directory #[must_use] fn clippy_project_root() -> &'static Path { diff --git a/src/tools/clippy/lintcheck/src/recursive.rs b/src/tools/clippy/lintcheck/src/recursive.rs index 57073f523648e..6406b2dcb643b 100644 --- a/src/tools/clippy/lintcheck/src/recursive.rs +++ b/src/tools/clippy/lintcheck/src/recursive.rs @@ -64,7 +64,7 @@ fn process_stream( // It's 99% likely that dependencies compiled with recursive mode are on crates.io // and therefore on docs.rs. This links to the sources directly, do avoid invalid - // links due to remaped paths. See rust-lang/docs.rs#2551 for more details. + // links due to remapped paths. See rust-lang/docs.rs#2551 for more details. let base_url = format!( "https://docs.rs/crate/{}/{}/source/src/{{file}}#{{line}}", driver_info.package_name, driver_info.version diff --git a/src/tools/clippy/rust-toolchain b/src/tools/clippy/rust-toolchain deleted file mode 100644 index fcaeedc9a66b5..0000000000000 --- a/src/tools/clippy/rust-toolchain +++ /dev/null @@ -1,6 +0,0 @@ -[toolchain] -# begin autogenerated nightly -channel = "nightly-2025-03-20" -# end autogenerated nightly -components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"] -profile = "minimal" diff --git a/src/tools/clippy/rust-toolchain.toml b/src/tools/clippy/rust-toolchain.toml new file mode 100644 index 0000000000000..39c7f0e4ad5a5 --- /dev/null +++ b/src/tools/clippy/rust-toolchain.toml @@ -0,0 +1,6 @@ +[toolchain] +# begin autogenerated nightly +channel = "nightly-2025-05-01" +# end autogenerated nightly +components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"] +profile = "minimal" diff --git a/src/tools/clippy/rustc_tools_util/src/lib.rs b/src/tools/clippy/rustc_tools_util/src/lib.rs index 423154a69fa0b..b45edf2345585 100644 --- a/src/tools/clippy/rustc_tools_util/src/lib.rs +++ b/src/tools/clippy/rustc_tools_util/src/lib.rs @@ -121,7 +121,7 @@ fn get_output(cmd: &str, args: &[&str]) -> Option { pub fn rerun_if_git_changes() -> Option<()> { // Make sure we get rerun when the git commit changes. // We want to watch two files: HEAD, which tracks which branch we are on, - // and the file for that branch that tracks which commit is is on. + // and the file for that branch that tracks which commit is checked out. // First, find the `HEAD` file. This should work even with worktrees. let git_head_file = PathBuf::from(get_output("git", &["rev-parse", "--git-path", "HEAD"])?); diff --git a/src/tools/clippy/src/driver.rs b/src/tools/clippy/src/driver.rs index e4092bcd10564..87ca9c5beddfb 100644 --- a/src/tools/clippy/src/driver.rs +++ b/src/tools/clippy/src/driver.rs @@ -14,6 +14,7 @@ extern crate rustc_interface; extern crate rustc_session; extern crate rustc_span; +use clippy_utils::sym; use rustc_interface::interface; use rustc_session::EarlyDiagCtxt; use rustc_session::config::ErrorOutputType; @@ -78,7 +79,7 @@ fn track_clippy_args(psess: &mut ParseSess, args_env_var: Option<&str>) { psess .env_depinfo .get_mut() - .insert((Symbol::intern("CLIPPY_ARGS"), args_env_var.map(Symbol::intern))); + .insert((sym::CLIPPY_ARGS, args_env_var.map(Symbol::intern))); } /// Track files that may be accessed at runtime in `file_depinfo` so that cargo will re-run clippy @@ -89,7 +90,7 @@ fn track_files(psess: &mut ParseSess) { // Used by `clippy::cargo` lints and to determine the MSRV. `cargo clippy` executes `clippy-driver` // with the current directory set to `CARGO_MANIFEST_DIR` so a relative path is fine if Path::new("Cargo.toml").exists() { - file_depinfo.insert(Symbol::intern("Cargo.toml")); + file_depinfo.insert(sym::Cargo_toml); } // `clippy.toml` will be automatically tracked as it's loaded with `sess.source_map().load_file()` @@ -145,7 +146,7 @@ impl rustc_driver::Callbacks for ClippyCallbacks { // Trigger a rebuild if CLIPPY_CONF_DIR changes. The value must be a valid string so // changes between dirs that are invalid UTF-8 will not trigger rebuilds psess.env_depinfo.get_mut().insert(( - Symbol::intern("CLIPPY_CONF_DIR"), + sym::CLIPPY_CONF_DIR, env::var("CLIPPY_CONF_DIR").ok().map(|dir| Symbol::intern(&dir)), )); })); @@ -158,8 +159,10 @@ impl rustc_driver::Callbacks for ClippyCallbacks { let conf = clippy_config::Conf::read(sess, &conf_path); clippy_lints::register_lints(lint_store, conf); - clippy_lints::register_pre_expansion_lints(lint_store, conf); + #[cfg(feature = "internal")] + clippy_lints_internal::register_lints(lint_store); })); + config.extra_symbols = sym::EXTRA_SYMBOLS.into(); // FIXME: #4825; This is required, because Clippy lints that are based on MIR have to be // run on the unoptimized MIR. On the other hand this results in some false negatives. If @@ -207,12 +210,12 @@ pub fn main() { // Beside checking for existence of `--sysroot` on the command line, we need to // check for the arg files that are prefixed with @ as well to be consistent with rustc for arg in args.iter() { - if let Some(arg_file_path) = arg.strip_prefix('@') { - if let Ok(arg_file) = read_to_string(arg_file_path) { - let split_arg_file: Vec = arg_file.lines().map(ToString::to_string).collect(); - if has_arg(&split_arg_file, "--sysroot") { - return true; - } + if let Some(arg_file_path) = arg.strip_prefix('@') + && let Ok(arg_file) = read_to_string(arg_file_path) + { + let split_arg_file: Vec = arg_file.lines().map(ToString::to_string).collect(); + if has_arg(&split_arg_file, "--sysroot") { + return true; } } } @@ -221,10 +224,10 @@ pub fn main() { let sys_root_env = std::env::var("SYSROOT").ok(); let pass_sysroot_env_if_given = |args: &mut Vec, sys_root_env| { - if let Some(sys_root) = sys_root_env { - if !has_sysroot_arg(args) { - args.extend(vec!["--sysroot".into(), sys_root]); - } + if let Some(sys_root) = sys_root_env + && !has_sysroot_arg(args) + { + args.extend(vec!["--sysroot".into(), sys_root]); } }; diff --git a/src/tools/clippy/tests/clippy.toml b/src/tools/clippy/tests/clippy.toml index 5eb7ac0354198..91a2e55180b97 100644 --- a/src/tools/clippy/tests/clippy.toml +++ b/src/tools/clippy/tests/clippy.toml @@ -1 +1,2 @@ # default config for tests, overrides clippy.toml at the project root +lint-commented-code = false diff --git a/src/tools/clippy/tests/compile-test.rs b/src/tools/clippy/tests/compile-test.rs index 956a05288f358..6d391bd622a8d 100644 --- a/src/tools/clippy/tests/compile-test.rs +++ b/src/tools/clippy/tests/compile-test.rs @@ -2,6 +2,8 @@ #![warn(rust_2018_idioms, unused_lifetimes)] #![allow(unused_extern_crates)] +use askama::Template; +use askama::filters::Safe; use cargo_metadata::Message; use cargo_metadata::diagnostic::{Applicability, Diagnostic}; use clippy_config::ClippyConfiguration; @@ -9,11 +11,10 @@ use clippy_lints::LintInfo; use clippy_lints::declared_lints::LINTS; use clippy_lints::deprecated_lints::{DEPRECATED, DEPRECATED_VERSION, RENAMED}; use pulldown_cmark::{Options, Parser, html}; -use rinja::Template; -use rinja::filters::Safe; use serde::Deserialize; use test_utils::IS_RUSTC_TEST_SUITE; use ui_test::custom_flags::Flag; +use ui_test::custom_flags::edition::Edition; use ui_test::custom_flags::rustfix::RustfixMode; use ui_test::spanned::Spanned; use ui_test::{Args, CommandBuilder, Config, Match, error_on_output_conflict, status_emitter}; @@ -86,13 +87,13 @@ fn extern_flags() -> Vec { let name = name.strip_prefix("lib").unwrap_or(name); Some((name, path_str)) }; - if let Some((name, path)) = parse_name_path() { - if TEST_DEPENDENCIES.contains(&name) { - // A dependency may be listed twice if it is available in sysroot, - // and the sysroot dependencies are listed first. As of the writing, - // this only seems to apply to if_chain. - crates.insert(name, path); - } + if let Some((name, path)) = parse_name_path() + && TEST_DEPENDENCIES.contains(&name) + { + // A dependency may be listed twice if it is available in sysroot, + // and the sysroot dependencies are listed first. As of the writing, + // this only seems to apply to if_chain. + crates.insert(name, path); } } let not_found: Vec<&str> = TEST_DEPENDENCIES @@ -147,11 +148,16 @@ impl TestContext { .map(|filters| filters.split(',').map(str::to_string).collect()) .unwrap_or_default(), target: None, - bless_command: Some("cargo uibless".into()), + bless_command: Some(if IS_RUSTC_TEST_SUITE { + "./x test src/tools/clippy --bless".into() + } else { + "cargo uibless".into() + }), out_dir: target_dir.join("ui_test"), ..Config::rustc(Path::new("tests").join(test_dir)) }; let defaults = config.comment_defaults.base(); + defaults.set_custom("edition", Edition("2024".into())); defaults.exit_status = None.into(); if mandatory_annotations { defaults.require_annotations = Some(Spanned::dummy(true)).into(); diff --git a/src/tools/clippy/tests/dogfood.rs b/src/tools/clippy/tests/dogfood.rs index 858be389a9e6e..16a1a415102c4 100644 --- a/src/tools/clippy/tests/dogfood.rs +++ b/src/tools/clippy/tests/dogfood.rs @@ -36,6 +36,7 @@ fn dogfood() { for package in [ "./", "clippy_dev", + "clippy_lints_internal", "clippy_lints", "clippy_utils", "clippy_config", @@ -80,11 +81,9 @@ fn run_clippy_for_package(project: &str, args: &[&str]) -> bool { command.arg("--").args(args); command.arg("-Cdebuginfo=0"); // disable debuginfo to generate less data in the target dir + command.args(["-D", "clippy::dbg_macro"]); - if cfg!(feature = "internal") { - // internal lints only exist if we build with the internal feature - command.args(["-D", "clippy::internal"]); - } else { + if !cfg!(feature = "internal") { // running a clippy built without internal lints on the clippy source // that contains e.g. `allow(clippy::invalid_paths)` command.args(["-A", "unknown_lints"]); diff --git a/src/tools/clippy/tests/ui-internal/auxiliary/paths.rs b/src/tools/clippy/tests/ui-internal/auxiliary/paths.rs index 52fcaec4df32e..f730f564a09cf 100644 --- a/src/tools/clippy/tests/ui-internal/auxiliary/paths.rs +++ b/src/tools/clippy/tests/ui-internal/auxiliary/paths.rs @@ -1,2 +1,4 @@ +#![allow(clippy::unnecessary_def_path)] + pub static OPTION: [&str; 3] = ["core", "option", "Option"]; pub const RESULT: &[&str] = &["core", "result", "Result"]; diff --git a/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.rs b/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.rs index e5f6001b74d09..897002949e67e 100644 --- a/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.rs +++ b/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.rs @@ -1,5 +1,5 @@ -#![deny(clippy::internal)] #![feature(rustc_private)] +#![deny(clippy::invalid_clippy_version_attribute, clippy::missing_clippy_version_attribute)] #[macro_use] extern crate rustc_middle; @@ -86,6 +86,15 @@ mod internal_clippy_lints { } use crate::internal_clippy_lints::ALLOW_MISSING_ATTRIBUTE_ONE; -declare_lint_pass!(Pass2 => [VALID_ONE, VALID_TWO, VALID_THREE, INVALID_ONE, INVALID_TWO, MISSING_ATTRIBUTE_ONE, MISSING_ATTRIBUTE_TWO, ALLOW_MISSING_ATTRIBUTE_ONE]); +declare_lint_pass!(Pass2 => [ + VALID_ONE, + VALID_TWO, + VALID_THREE, + INVALID_ONE, + INVALID_TWO, + MISSING_ATTRIBUTE_ONE, + MISSING_ATTRIBUTE_TWO, + ALLOW_MISSING_ATTRIBUTE_ONE, +]); fn main() {} diff --git a/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.stderr b/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.stderr index 1129c35d1d01b..952bc94403033 100644 --- a/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.stderr +++ b/src/tools/clippy/tests/ui-internal/check_clippy_version_attribute.stderr @@ -12,11 +12,10 @@ LL | | } | = help: please use a valid semantic version, see `doc/adding_lints.md` note: the lint level is defined here - --> tests/ui-internal/check_clippy_version_attribute.rs:1:9 + --> tests/ui-internal/check_clippy_version_attribute.rs:2:9 | -LL | #![deny(clippy::internal)] - | ^^^^^^^^^^^^^^^^ - = note: `#[deny(clippy::invalid_clippy_version_attribute)]` implied by `#[deny(clippy::internal)]` +LL | #![deny(clippy::invalid_clippy_version_attribute, clippy::missing_clippy_version_attribute)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `$crate::declare_tool_lint` which comes from the expansion of the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info) error: this item has an invalid `clippy::version` attribute @@ -47,7 +46,11 @@ LL | | } | |_^ | = help: please use a `clippy::version` attribute, see `doc/adding_lints.md` - = note: `#[deny(clippy::missing_clippy_version_attribute)]` implied by `#[deny(clippy::internal)]` +note: the lint level is defined here + --> tests/ui-internal/check_clippy_version_attribute.rs:2:51 + | +LL | #![deny(clippy::invalid_clippy_version_attribute, clippy::missing_clippy_version_attribute)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `$crate::declare_tool_lint` which comes from the expansion of the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info) error: this lint is missing the `clippy::version` attribute or version value diff --git a/src/tools/clippy/tests/ui-internal/check_formulation.rs b/src/tools/clippy/tests/ui-internal/check_formulation.rs index 8265a78769d16..bcbb0d783198e 100644 --- a/src/tools/clippy/tests/ui-internal/check_formulation.rs +++ b/src/tools/clippy/tests/ui-internal/check_formulation.rs @@ -1,4 +1,5 @@ -#![warn(clippy::almost_standard_lint_formulation)] +#![deny(clippy::almost_standard_lint_formulation)] +#![allow(clippy::lint_without_lint_pass)] #![feature(rustc_private)] #[macro_use] diff --git a/src/tools/clippy/tests/ui-internal/check_formulation.stderr b/src/tools/clippy/tests/ui-internal/check_formulation.stderr index b16e1bf868737..9aeb9e1f2d49c 100644 --- a/src/tools/clippy/tests/ui-internal/check_formulation.stderr +++ b/src/tools/clippy/tests/ui-internal/check_formulation.stderr @@ -1,15 +1,18 @@ error: non-standard lint formulation - --> tests/ui-internal/check_formulation.rs:23:5 + --> tests/ui-internal/check_formulation.rs:24:5 | LL | /// Check for lint formulations that are correct | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: consider using `Checks for` - = note: `-D clippy::almost-standard-lint-formulation` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::almost_standard_lint_formulation)]` +note: the lint level is defined here + --> tests/ui-internal/check_formulation.rs:1:9 + | +LL | #![deny(clippy::almost_standard_lint_formulation)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: non-standard lint formulation - --> tests/ui-internal/check_formulation.rs:34:5 + --> tests/ui-internal/check_formulation.rs:35:5 | LL | /// Detects uses of incorrect formulations | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.fixed b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.fixed index 918e33345a779..76f68686ee2a8 100644 --- a/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.fixed +++ b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.fixed @@ -1,4 +1,4 @@ -#![deny(clippy::internal)] +#![deny(clippy::collapsible_span_lint_calls)] #![allow(clippy::missing_clippy_version_attribute)] #![feature(rustc_private)] diff --git a/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.rs b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.rs index 2f289ae2b4819..214c8783a6690 100644 --- a/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.rs +++ b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.rs @@ -1,4 +1,4 @@ -#![deny(clippy::internal)] +#![deny(clippy::collapsible_span_lint_calls)] #![allow(clippy::missing_clippy_version_attribute)] #![feature(rustc_private)] diff --git a/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.stderr b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.stderr index a2be1f1cd367d..9c83538947cab 100644 --- a/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.stderr +++ b/src/tools/clippy/tests/ui-internal/collapsible_span_lint_calls.stderr @@ -10,9 +10,8 @@ LL | | }); note: the lint level is defined here --> tests/ui-internal/collapsible_span_lint_calls.rs:1:9 | -LL | #![deny(clippy::internal)] - | ^^^^^^^^^^^^^^^^ - = note: `#[deny(clippy::collapsible_span_lint_calls)]` implied by `#[deny(clippy::internal)]` +LL | #![deny(clippy::collapsible_span_lint_calls)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: this call is collapsible --> tests/ui-internal/collapsible_span_lint_calls.rs:39:9 diff --git a/src/tools/clippy/tests/ui-internal/custom_ice_message.rs b/src/tools/clippy/tests/ui-internal/custom_ice_message.rs index 71819fe370701..c7e92b1bf164f 100644 --- a/src/tools/clippy/tests/ui-internal/custom_ice_message.rs +++ b/src/tools/clippy/tests/ui-internal/custom_ice_message.rs @@ -6,7 +6,7 @@ //@normalize-stderr-test: "rustc 1\.\d+.* running on .*" -> "rustc running on " //@normalize-stderr-test: "(?ms)query stack during panic:\n.*end of query stack\n" -> "" -#![deny(clippy::internal)] +#![deny(clippy::produce_ice)] #![allow(clippy::missing_clippy_version_attribute)] fn it_looks_like_you_are_trying_to_kill_clippy() {} diff --git a/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr b/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr index 589e1190a907e..884d3d035a29d 100644 --- a/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr +++ b/src/tools/clippy/tests/ui-internal/custom_ice_message.stderr @@ -8,7 +8,7 @@ error: internal compiler error: Would you like some help with that? LL | fn it_looks_like_you_are_trying_to_kill_clippy() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -note: delayed at clippy_lints/src/utils/internal_lints/produce_ice.rs - disabled backtrace +note: delayed at clippy_lints_internal/src/produce_ice.rs - disabled backtrace --> tests/ui-internal/custom_ice_message.rs:12:1 | LL | fn it_looks_like_you_are_trying_to_kill_clippy() {} diff --git a/src/tools/clippy/tests/ui-internal/default_lint.rs b/src/tools/clippy/tests/ui-internal/default_lint.rs index 959bfd27e3899..809f2c4d080dc 100644 --- a/src/tools/clippy/tests/ui-internal/default_lint.rs +++ b/src/tools/clippy/tests/ui-internal/default_lint.rs @@ -1,4 +1,4 @@ -#![deny(clippy::internal)] +#![deny(clippy::default_lint)] #![allow(clippy::missing_clippy_version_attribute)] #![feature(rustc_private)] diff --git a/src/tools/clippy/tests/ui-internal/default_lint.stderr b/src/tools/clippy/tests/ui-internal/default_lint.stderr index 9d4c2e15349f6..2c700ec82dcd4 100644 --- a/src/tools/clippy/tests/ui-internal/default_lint.stderr +++ b/src/tools/clippy/tests/ui-internal/default_lint.stderr @@ -13,9 +13,8 @@ LL | | } note: the lint level is defined here --> tests/ui-internal/default_lint.rs:1:9 | -LL | #![deny(clippy::internal)] - | ^^^^^^^^^^^^^^^^ - = note: `#[deny(clippy::default_lint)]` implied by `#[deny(clippy::internal)]` +LL | #![deny(clippy::default_lint)] + | ^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `$crate::declare_tool_lint` which comes from the expansion of the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info) error: aborting due to 1 previous error diff --git a/src/tools/clippy/tests/ui-internal/disallow_span_lint.rs b/src/tools/clippy/tests/ui-internal/disallow_span_lint.rs index 3fed38cab64d4..36e4158f6e688 100644 --- a/src/tools/clippy/tests/ui-internal/disallow_span_lint.rs +++ b/src/tools/clippy/tests/ui-internal/disallow_span_lint.rs @@ -1,4 +1,5 @@ #![feature(rustc_private)] +#![deny(clippy::disallowed_methods)] extern crate rustc_errors; extern crate rustc_hir; diff --git a/src/tools/clippy/tests/ui-internal/disallow_span_lint.stderr b/src/tools/clippy/tests/ui-internal/disallow_span_lint.stderr index 9a7a7ecbbff92..f03a745963e00 100644 --- a/src/tools/clippy/tests/ui-internal/disallow_span_lint.stderr +++ b/src/tools/clippy/tests/ui-internal/disallow_span_lint.stderr @@ -1,15 +1,18 @@ error: use of a disallowed method `rustc_lint::context::LintContext::span_lint` - --> tests/ui-internal/disallow_span_lint.rs:14:8 + --> tests/ui-internal/disallow_span_lint.rs:15:8 | LL | cx.span_lint(lint, span, |lint| { | ^^^^^^^^^ | = note: this function does not add a link to our documentation, please use the `clippy_utils::diagnostics::span_lint*` functions instead - = note: `-D clippy::disallowed-methods` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::disallowed_methods)]` +note: the lint level is defined here + --> tests/ui-internal/disallow_span_lint.rs:2:9 + | +LL | #![deny(clippy::disallowed_methods)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ error: use of a disallowed method `rustc_middle::ty::context::TyCtxt::node_span_lint` - --> tests/ui-internal/disallow_span_lint.rs:21:9 + --> tests/ui-internal/disallow_span_lint.rs:22:9 | LL | tcx.node_span_lint(lint, hir_id, span, |lint| { | ^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed deleted file mode 100644 index 92d3b1537e0c8..0000000000000 --- a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.fixed +++ /dev/null @@ -1,40 +0,0 @@ -#![deny(clippy::internal)] -#![allow(clippy::missing_clippy_version_attribute, clippy::let_unit_value)] -#![feature(rustc_private)] - -extern crate rustc_span; - -use rustc_span::symbol::Symbol; - -macro_rules! sym { - ($tt:tt) => { - rustc_span::symbol::Symbol::intern(stringify!($tt)) - }; -} - -fn main() { - // Direct use of Symbol::intern - let _ = rustc_span::sym::f32; - //~^ interning_defined_symbol - - // Using a sym macro - let _ = rustc_span::sym::f32; - //~^ interning_defined_symbol - - // Correct suggestion when symbol isn't stringified constant name - let _ = rustc_span::sym::proc_dash_macro; - //~^ interning_defined_symbol - - // interning a keyword - let _ = rustc_span::kw::SelfLower; - //~^ interning_defined_symbol - - // Interning a symbol that is not defined - let _ = Symbol::intern("xyz123"); - let _ = sym!(xyz123); - - // Using a different `intern` function - let _ = intern("f32"); -} - -fn intern(_: &str) {} diff --git a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.rs b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.rs deleted file mode 100644 index d1e6f9cb1c416..0000000000000 --- a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.rs +++ /dev/null @@ -1,40 +0,0 @@ -#![deny(clippy::internal)] -#![allow(clippy::missing_clippy_version_attribute, clippy::let_unit_value)] -#![feature(rustc_private)] - -extern crate rustc_span; - -use rustc_span::symbol::Symbol; - -macro_rules! sym { - ($tt:tt) => { - rustc_span::symbol::Symbol::intern(stringify!($tt)) - }; -} - -fn main() { - // Direct use of Symbol::intern - let _ = Symbol::intern("f32"); - //~^ interning_defined_symbol - - // Using a sym macro - let _ = sym!(f32); - //~^ interning_defined_symbol - - // Correct suggestion when symbol isn't stringified constant name - let _ = Symbol::intern("proc-macro"); - //~^ interning_defined_symbol - - // interning a keyword - let _ = Symbol::intern("self"); - //~^ interning_defined_symbol - - // Interning a symbol that is not defined - let _ = Symbol::intern("xyz123"); - let _ = sym!(xyz123); - - // Using a different `intern` function - let _ = intern("f32"); -} - -fn intern(_: &str) {} diff --git a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr b/src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr deleted file mode 100644 index c84a566436a8e..0000000000000 --- a/src/tools/clippy/tests/ui-internal/interning_defined_symbol.stderr +++ /dev/null @@ -1,33 +0,0 @@ -error: interning a defined symbol - --> tests/ui-internal/interning_defined_symbol.rs:17:13 - | -LL | let _ = Symbol::intern("f32"); - | ^^^^^^^^^^^^^^^^^^^^^ help: try: `rustc_span::sym::f32` - | -note: the lint level is defined here - --> tests/ui-internal/interning_defined_symbol.rs:1:9 - | -LL | #![deny(clippy::internal)] - | ^^^^^^^^^^^^^^^^ - = note: `#[deny(clippy::interning_defined_symbol)]` implied by `#[deny(clippy::internal)]` - -error: interning a defined symbol - --> tests/ui-internal/interning_defined_symbol.rs:21:13 - | -LL | let _ = sym!(f32); - | ^^^^^^^^^ help: try: `rustc_span::sym::f32` - -error: interning a defined symbol - --> tests/ui-internal/interning_defined_symbol.rs:25:13 - | -LL | let _ = Symbol::intern("proc-macro"); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `rustc_span::sym::proc_dash_macro` - -error: interning a defined symbol - --> tests/ui-internal/interning_defined_symbol.rs:29:13 - | -LL | let _ = Symbol::intern("self"); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `rustc_span::kw::SelfLower` - -error: aborting due to 4 previous errors - diff --git a/src/tools/clippy/tests/ui-internal/interning_literals.fixed b/src/tools/clippy/tests/ui-internal/interning_literals.fixed new file mode 100644 index 0000000000000..03e97768b996f --- /dev/null +++ b/src/tools/clippy/tests/ui-internal/interning_literals.fixed @@ -0,0 +1,31 @@ +#![allow(clippy::let_unit_value)] +#![feature(rustc_private)] + +extern crate rustc_span; + +use clippy_utils::sym; +use rustc_span::{Symbol, kw}; + +fn main() { + let _ = sym::f32; + //~^ interning_literals + + // Correct suggestion when symbol isn't stringified constant name + let _ = sym::proc_dash_macro; + //~^ interning_literals + + // Interning a keyword + let _ = kw::SelfLower; + //~^ interning_literals + + // Defined in clippy_utils + let _ = sym::msrv; + //~^ interning_literals + let _ = sym::Cargo_toml; + //~^ interning_literals + + // Using a different `intern` function + let _ = intern("f32"); +} + +fn intern(_: &str) {} diff --git a/src/tools/clippy/tests/ui-internal/interning_literals.rs b/src/tools/clippy/tests/ui-internal/interning_literals.rs new file mode 100644 index 0000000000000..561fd5702a59f --- /dev/null +++ b/src/tools/clippy/tests/ui-internal/interning_literals.rs @@ -0,0 +1,31 @@ +#![allow(clippy::let_unit_value)] +#![feature(rustc_private)] + +extern crate rustc_span; + +use clippy_utils::sym; +use rustc_span::{Symbol, kw}; + +fn main() { + let _ = Symbol::intern("f32"); + //~^ interning_literals + + // Correct suggestion when symbol isn't stringified constant name + let _ = Symbol::intern("proc-macro"); + //~^ interning_literals + + // Interning a keyword + let _ = Symbol::intern("self"); + //~^ interning_literals + + // Defined in clippy_utils + let _ = Symbol::intern("msrv"); + //~^ interning_literals + let _ = Symbol::intern("Cargo.toml"); + //~^ interning_literals + + // Using a different `intern` function + let _ = intern("f32"); +} + +fn intern(_: &str) {} diff --git a/src/tools/clippy/tests/ui-internal/interning_literals.stderr b/src/tools/clippy/tests/ui-internal/interning_literals.stderr new file mode 100644 index 0000000000000..628b97eff84da --- /dev/null +++ b/src/tools/clippy/tests/ui-internal/interning_literals.stderr @@ -0,0 +1,64 @@ +error: interning a string literal + --> tests/ui-internal/interning_literals.rs:10:13 + | +LL | let _ = Symbol::intern("f32"); + | ^^^^^^^^^^^^^^^^^^^^^ + | + = note: `-D clippy::interning-literals` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::interning_literals)]` +help: use the preinterned symbol + | +LL - let _ = Symbol::intern("f32"); +LL + let _ = sym::f32; + | + +error: interning a string literal + --> tests/ui-internal/interning_literals.rs:14:13 + | +LL | let _ = Symbol::intern("proc-macro"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: use the preinterned symbol + | +LL - let _ = Symbol::intern("proc-macro"); +LL + let _ = sym::proc_dash_macro; + | + +error: interning a string literal + --> tests/ui-internal/interning_literals.rs:18:13 + | +LL | let _ = Symbol::intern("self"); + | ^^^^^^^^^^^^^^^^^^^^^^ + | +help: use the preinterned symbol + | +LL - let _ = Symbol::intern("self"); +LL + let _ = kw::SelfLower; + | + +error: interning a string literal + --> tests/ui-internal/interning_literals.rs:22:13 + | +LL | let _ = Symbol::intern("msrv"); + | ^^^^^^^^^^^^^^^^^^^^^^ + | +help: use the preinterned symbol + | +LL - let _ = Symbol::intern("msrv"); +LL + let _ = sym::msrv; + | + +error: interning a string literal + --> tests/ui-internal/interning_literals.rs:24:13 + | +LL | let _ = Symbol::intern("Cargo.toml"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: use the preinterned symbol + | +LL - let _ = Symbol::intern("Cargo.toml"); +LL + let _ = sym::Cargo_toml; + | + +error: aborting due to 5 previous errors + diff --git a/src/tools/clippy/tests/ui-internal/interning_literals_unfixable.rs b/src/tools/clippy/tests/ui-internal/interning_literals_unfixable.rs new file mode 100644 index 0000000000000..43872e95a5854 --- /dev/null +++ b/src/tools/clippy/tests/ui-internal/interning_literals_unfixable.rs @@ -0,0 +1,16 @@ +//@no-rustfix: paths that don't exist yet +#![feature(rustc_private)] + +extern crate rustc_span; + +use rustc_span::Symbol; + +fn main() { + // Not yet defined + let _ = Symbol::intern("xyz123"); + //~^ interning_literals + let _ = Symbol::intern("with-dash"); + //~^ interning_literals + let _ = Symbol::intern("with.dot"); + //~^ interning_literals +} diff --git a/src/tools/clippy/tests/ui-internal/interning_literals_unfixable.stderr b/src/tools/clippy/tests/ui-internal/interning_literals_unfixable.stderr new file mode 100644 index 0000000000000..8294453a8f945 --- /dev/null +++ b/src/tools/clippy/tests/ui-internal/interning_literals_unfixable.stderr @@ -0,0 +1,40 @@ +error: interning a string literal + --> tests/ui-internal/interning_literals_unfixable.rs:10:13 + | +LL | let _ = Symbol::intern("xyz123"); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `-D clippy::interning-literals` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::interning_literals)]` +help: add the symbol to `clippy_utils/src/sym.rs` and use it + | +LL - let _ = Symbol::intern("xyz123"); +LL + let _ = sym::xyz123; + | + +error: interning a string literal + --> tests/ui-internal/interning_literals_unfixable.rs:12:13 + | +LL | let _ = Symbol::intern("with-dash"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: add the symbol to `clippy_utils/src/sym.rs` and use it + | +LL - let _ = Symbol::intern("with-dash"); +LL + let _ = sym::with_dash; + | + +error: interning a string literal + --> tests/ui-internal/interning_literals_unfixable.rs:14:13 + | +LL | let _ = Symbol::intern("with.dot"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: add the symbol to `clippy_utils/src/sym.rs` and use it + | +LL - let _ = Symbol::intern("with.dot"); +LL + let _ = sym::with_dot; + | + +error: aborting due to 3 previous errors + diff --git a/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.fixed b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.fixed index 6804e2bbae83c..238ef9ae6d0ac 100644 --- a/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.fixed +++ b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.fixed @@ -1,4 +1,4 @@ -#![deny(clippy::internal)] +#![deny(clippy::missing_msrv_attr_impl)] #![allow(clippy::missing_clippy_version_attribute)] #![feature(rustc_private)] diff --git a/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.rs b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.rs index c625a5d9a4590..7753dcaad7139 100644 --- a/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.rs +++ b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.rs @@ -1,4 +1,4 @@ -#![deny(clippy::internal)] +#![deny(clippy::missing_msrv_attr_impl)] #![allow(clippy::missing_clippy_version_attribute)] #![feature(rustc_private)] diff --git a/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.stderr b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.stderr index 0a7636313eff2..d5928d8c0c2de 100644 --- a/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.stderr +++ b/src/tools/clippy/tests/ui-internal/invalid_msrv_attr_impl.stderr @@ -7,9 +7,8 @@ LL | impl EarlyLintPass for Pass { note: the lint level is defined here --> tests/ui-internal/invalid_msrv_attr_impl.rs:1:9 | -LL | #![deny(clippy::internal)] - | ^^^^^^^^^^^^^^^^ - = note: `#[deny(clippy::missing_msrv_attr_impl)]` implied by `#[deny(clippy::internal)]` +LL | #![deny(clippy::missing_msrv_attr_impl)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add `extract_msrv_attr!()` to the `EarlyLintPass` implementation | LL ~ impl EarlyLintPass for Pass { diff --git a/src/tools/clippy/tests/ui-internal/invalid_paths.rs b/src/tools/clippy/tests/ui-internal/invalid_paths.rs index abfb111f938e4..7317abc2185a3 100644 --- a/src/tools/clippy/tests/ui-internal/invalid_paths.rs +++ b/src/tools/clippy/tests/ui-internal/invalid_paths.rs @@ -1,4 +1,4 @@ -#![warn(clippy::internal)] +#![deny(clippy::invalid_paths)] #![allow(clippy::missing_clippy_version_attribute, clippy::unnecessary_def_path)] mod paths { diff --git a/src/tools/clippy/tests/ui-internal/invalid_paths.stderr b/src/tools/clippy/tests/ui-internal/invalid_paths.stderr index 7bde37667be42..7b7b25ce8d8db 100644 --- a/src/tools/clippy/tests/ui-internal/invalid_paths.stderr +++ b/src/tools/clippy/tests/ui-internal/invalid_paths.stderr @@ -4,8 +4,11 @@ error: invalid path LL | pub const TRANSMUTE: [&str; 4] = ["core", "intrinsics", "", "transmute"]; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: `-D clippy::invalid-paths` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::invalid_paths)]` +note: the lint level is defined here + --> tests/ui-internal/invalid_paths.rs:1:9 + | +LL | #![deny(clippy::invalid_paths)] + | ^^^^^^^^^^^^^^^^^^^^^ error: invalid path --> tests/ui-internal/invalid_paths.rs:19:5 diff --git a/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.rs b/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.rs index 69591523432c8..6b649132aca31 100644 --- a/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.rs +++ b/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.rs @@ -1,4 +1,4 @@ -#![deny(clippy::internal)] +#![deny(clippy::lint_without_lint_pass)] #![allow(clippy::missing_clippy_version_attribute)] #![feature(rustc_private)] diff --git a/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr b/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr index 9cca96ca16020..3798293f4c111 100644 --- a/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr +++ b/src/tools/clippy/tests/ui-internal/lint_without_lint_pass.stderr @@ -13,9 +13,8 @@ LL | | } note: the lint level is defined here --> tests/ui-internal/lint_without_lint_pass.rs:1:9 | -LL | #![deny(clippy::internal)] - | ^^^^^^^^^^^^^^^^ - = note: `#[deny(clippy::lint_without_lint_pass)]` implied by `#[deny(clippy::internal)]` +LL | #![deny(clippy::lint_without_lint_pass)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `declare_tool_lint` (in Nightly builds, run with -Z macro-backtrace for more info) error: aborting due to 1 previous error diff --git a/src/tools/clippy/tests/ui-internal/outer_expn_data.fixed b/src/tools/clippy/tests/ui-internal/outer_expn_data.fixed index cb7680b8bb142..900ca5b2ab9d8 100644 --- a/src/tools/clippy/tests/ui-internal/outer_expn_data.fixed +++ b/src/tools/clippy/tests/ui-internal/outer_expn_data.fixed @@ -1,4 +1,4 @@ -#![deny(clippy::internal)] +#![deny(clippy::outer_expn_expn_data)] #![allow(clippy::missing_clippy_version_attribute)] #![feature(rustc_private)] diff --git a/src/tools/clippy/tests/ui-internal/outer_expn_data.rs b/src/tools/clippy/tests/ui-internal/outer_expn_data.rs index 41d735110b5a0..bcfc42aa2ac75 100644 --- a/src/tools/clippy/tests/ui-internal/outer_expn_data.rs +++ b/src/tools/clippy/tests/ui-internal/outer_expn_data.rs @@ -1,4 +1,4 @@ -#![deny(clippy::internal)] +#![deny(clippy::outer_expn_expn_data)] #![allow(clippy::missing_clippy_version_attribute)] #![feature(rustc_private)] diff --git a/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr b/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr index 33ac91e4fb0de..b86138a5d45d2 100644 --- a/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr +++ b/src/tools/clippy/tests/ui-internal/outer_expn_data.stderr @@ -7,9 +7,8 @@ LL | let _ = expr.span.ctxt().outer_expn().expn_data(); note: the lint level is defined here --> tests/ui-internal/outer_expn_data.rs:1:9 | -LL | #![deny(clippy::internal)] - | ^^^^^^^^^^^^^^^^ - = note: `#[deny(clippy::outer_expn_expn_data)]` implied by `#[deny(clippy::internal)]` +LL | #![deny(clippy::outer_expn_expn_data)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: aborting due to 1 previous error diff --git a/src/tools/clippy/tests/ui-internal/slow_symbol_comparisons.fixed b/src/tools/clippy/tests/ui-internal/slow_symbol_comparisons.fixed deleted file mode 100644 index 2cbd646a0fd5d..0000000000000 --- a/src/tools/clippy/tests/ui-internal/slow_symbol_comparisons.fixed +++ /dev/null @@ -1,24 +0,0 @@ -#![feature(rustc_private)] -#![warn(clippy::slow_symbol_comparisons)] - -extern crate rustc_span; - -use clippy_utils::sym; -use rustc_span::Symbol; - -fn main() { - let symbol = sym!(example); - let other_symbol = sym!(other_example); - - // Should lint - let slow_comparison = symbol.as_str() == "example"; - //~^ error: comparing `Symbol` via `Symbol::intern` - let slow_comparison_macro = symbol.as_str() == "example"; - //~^ error: comparing `Symbol` via `Symbol::intern` - let slow_comparison_backwards = symbol.as_str() == "example"; - //~^ error: comparing `Symbol` via `Symbol::intern` - - // Should not lint - let faster_comparison = symbol.as_str() == "other_example"; - let preinterned_comparison = symbol == other_symbol; -} diff --git a/src/tools/clippy/tests/ui-internal/slow_symbol_comparisons.rs b/src/tools/clippy/tests/ui-internal/slow_symbol_comparisons.rs deleted file mode 100644 index 0cea3c3fcff9f..0000000000000 --- a/src/tools/clippy/tests/ui-internal/slow_symbol_comparisons.rs +++ /dev/null @@ -1,24 +0,0 @@ -#![feature(rustc_private)] -#![warn(clippy::slow_symbol_comparisons)] - -extern crate rustc_span; - -use clippy_utils::sym; -use rustc_span::Symbol; - -fn main() { - let symbol = sym!(example); - let other_symbol = sym!(other_example); - - // Should lint - let slow_comparison = symbol == Symbol::intern("example"); - //~^ error: comparing `Symbol` via `Symbol::intern` - let slow_comparison_macro = symbol == sym!(example); - //~^ error: comparing `Symbol` via `Symbol::intern` - let slow_comparison_backwards = sym!(example) == symbol; - //~^ error: comparing `Symbol` via `Symbol::intern` - - // Should not lint - let faster_comparison = symbol.as_str() == "other_example"; - let preinterned_comparison = symbol == other_symbol; -} diff --git a/src/tools/clippy/tests/ui-internal/slow_symbol_comparisons.stderr b/src/tools/clippy/tests/ui-internal/slow_symbol_comparisons.stderr deleted file mode 100644 index 72cb20a7fed90..0000000000000 --- a/src/tools/clippy/tests/ui-internal/slow_symbol_comparisons.stderr +++ /dev/null @@ -1,23 +0,0 @@ -error: comparing `Symbol` via `Symbol::intern` - --> tests/ui-internal/slow_symbol_comparisons.rs:14:27 - | -LL | let slow_comparison = symbol == Symbol::intern("example"); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `Symbol::as_str` and check the string instead: `symbol.as_str() == "example"` - | - = note: `-D clippy::slow-symbol-comparisons` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::slow_symbol_comparisons)]` - -error: comparing `Symbol` via `Symbol::intern` - --> tests/ui-internal/slow_symbol_comparisons.rs:16:33 - | -LL | let slow_comparison_macro = symbol == sym!(example); - | ^^^^^^^^^^^^^^^^^^^^^^^ help: use `Symbol::as_str` and check the string instead: `symbol.as_str() == "example"` - -error: comparing `Symbol` via `Symbol::intern` - --> tests/ui-internal/slow_symbol_comparisons.rs:18:37 - | -LL | let slow_comparison_backwards = sym!(example) == symbol; - | ^^^^^^^^^^^^^^^^^^^^^^^ help: use `Symbol::as_str` and check the string instead: `symbol.as_str() == "example"` - -error: aborting due to 3 previous errors - diff --git a/src/tools/clippy/tests/ui-internal/symbol_as_str.fixed b/src/tools/clippy/tests/ui-internal/symbol_as_str.fixed new file mode 100644 index 0000000000000..3e26732836ca8 --- /dev/null +++ b/src/tools/clippy/tests/ui-internal/symbol_as_str.fixed @@ -0,0 +1,21 @@ +#![feature(rustc_private)] + +extern crate rustc_span; + +use clippy_utils::sym; +use rustc_span::{Symbol, kw}; + +fn f(s: Symbol) { + s == sym::f32; + //~^ symbol_as_str + s == sym::proc_dash_macro; + //~^ symbol_as_str + s == kw::SelfLower; + //~^ symbol_as_str + s == sym::msrv; + //~^ symbol_as_str + s == sym::Cargo_toml; + //~^ symbol_as_str + sym::get == s; + //~^ symbol_as_str +} diff --git a/src/tools/clippy/tests/ui-internal/symbol_as_str.rs b/src/tools/clippy/tests/ui-internal/symbol_as_str.rs new file mode 100644 index 0000000000000..334c32d189837 --- /dev/null +++ b/src/tools/clippy/tests/ui-internal/symbol_as_str.rs @@ -0,0 +1,21 @@ +#![feature(rustc_private)] + +extern crate rustc_span; + +use clippy_utils::sym; +use rustc_span::{Symbol, kw}; + +fn f(s: Symbol) { + s.as_str() == "f32"; + //~^ symbol_as_str + s.as_str() == "proc-macro"; + //~^ symbol_as_str + s.as_str() == "self"; + //~^ symbol_as_str + s.as_str() == "msrv"; + //~^ symbol_as_str + s.as_str() == "Cargo.toml"; + //~^ symbol_as_str + "get" == s.as_str(); + //~^ symbol_as_str +} diff --git a/src/tools/clippy/tests/ui-internal/symbol_as_str.stderr b/src/tools/clippy/tests/ui-internal/symbol_as_str.stderr new file mode 100644 index 0000000000000..39f81f3833c49 --- /dev/null +++ b/src/tools/clippy/tests/ui-internal/symbol_as_str.stderr @@ -0,0 +1,76 @@ +error: converting a Symbol to a string + --> tests/ui-internal/symbol_as_str.rs:9:5 + | +LL | s.as_str() == "f32"; + | ^^^^^^^^^^ + | + = note: `-D clippy::symbol-as-str` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::symbol_as_str)]` +help: use the preinterned symbol + | +LL - s.as_str() == "f32"; +LL + s == sym::f32; + | + +error: converting a Symbol to a string + --> tests/ui-internal/symbol_as_str.rs:11:5 + | +LL | s.as_str() == "proc-macro"; + | ^^^^^^^^^^ + | +help: use the preinterned symbol + | +LL - s.as_str() == "proc-macro"; +LL + s == sym::proc_dash_macro; + | + +error: converting a Symbol to a string + --> tests/ui-internal/symbol_as_str.rs:13:5 + | +LL | s.as_str() == "self"; + | ^^^^^^^^^^ + | +help: use the preinterned symbol + | +LL - s.as_str() == "self"; +LL + s == kw::SelfLower; + | + +error: converting a Symbol to a string + --> tests/ui-internal/symbol_as_str.rs:15:5 + | +LL | s.as_str() == "msrv"; + | ^^^^^^^^^^ + | +help: use the preinterned symbol + | +LL - s.as_str() == "msrv"; +LL + s == sym::msrv; + | + +error: converting a Symbol to a string + --> tests/ui-internal/symbol_as_str.rs:17:5 + | +LL | s.as_str() == "Cargo.toml"; + | ^^^^^^^^^^ + | +help: use the preinterned symbol + | +LL - s.as_str() == "Cargo.toml"; +LL + s == sym::Cargo_toml; + | + +error: converting a Symbol to a string + --> tests/ui-internal/symbol_as_str.rs:19:14 + | +LL | "get" == s.as_str(); + | ^^^^^^^^^^ + | +help: use the preinterned symbol + | +LL - "get" == s.as_str(); +LL + sym::get == s; + | + +error: aborting due to 6 previous errors + diff --git a/src/tools/clippy/tests/ui-internal/symbol_as_str_unfixable.rs b/src/tools/clippy/tests/ui-internal/symbol_as_str_unfixable.rs new file mode 100644 index 0000000000000..635f28007e9af --- /dev/null +++ b/src/tools/clippy/tests/ui-internal/symbol_as_str_unfixable.rs @@ -0,0 +1,15 @@ +//@no-rustfix: paths that don't exist yet +#![feature(rustc_private)] + +extern crate rustc_span; + +use rustc_span::Symbol; + +fn f(s: Symbol) { + s.as_str() == "xyz123"; + //~^ symbol_as_str + s.as_str() == "with-dash"; + //~^ symbol_as_str + s.as_str() == "with.dot"; + //~^ symbol_as_str +} diff --git a/src/tools/clippy/tests/ui-internal/symbol_as_str_unfixable.stderr b/src/tools/clippy/tests/ui-internal/symbol_as_str_unfixable.stderr new file mode 100644 index 0000000000000..5349983ca5196 --- /dev/null +++ b/src/tools/clippy/tests/ui-internal/symbol_as_str_unfixable.stderr @@ -0,0 +1,40 @@ +error: converting a Symbol to a string + --> tests/ui-internal/symbol_as_str_unfixable.rs:9:5 + | +LL | s.as_str() == "xyz123"; + | ^^^^^^^^^^ + | + = note: `-D clippy::symbol-as-str` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::symbol_as_str)]` +help: add the symbol to `clippy_utils/src/sym.rs` and use it + | +LL - s.as_str() == "xyz123"; +LL + s == sym::xyz123; + | + +error: converting a Symbol to a string + --> tests/ui-internal/symbol_as_str_unfixable.rs:11:5 + | +LL | s.as_str() == "with-dash"; + | ^^^^^^^^^^ + | +help: add the symbol to `clippy_utils/src/sym.rs` and use it + | +LL - s.as_str() == "with-dash"; +LL + s == sym::with_dash; + | + +error: converting a Symbol to a string + --> tests/ui-internal/symbol_as_str_unfixable.rs:13:5 + | +LL | s.as_str() == "with.dot"; + | ^^^^^^^^^^ + | +help: add the symbol to `clippy_utils/src/sym.rs` and use it + | +LL - s.as_str() == "with.dot"; +LL + s == sym::with_dot; + | + +error: aborting due to 3 previous errors + diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_def_path.fixed b/src/tools/clippy/tests/ui-internal/unnecessary_def_path.fixed index 577fad9341b64..89902ebe4e54e 100644 --- a/src/tools/clippy/tests/ui-internal/unnecessary_def_path.fixed +++ b/src/tools/clippy/tests/ui-internal/unnecessary_def_path.fixed @@ -1,5 +1,5 @@ //@aux-build:paths.rs -#![deny(clippy::internal)] +#![deny(clippy::unnecessary_def_path)] #![feature(rustc_private)] #![allow(clippy::unnecessary_map_or)] diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_def_path.rs b/src/tools/clippy/tests/ui-internal/unnecessary_def_path.rs index d4deb3626d0b6..cfca15267c195 100644 --- a/src/tools/clippy/tests/ui-internal/unnecessary_def_path.rs +++ b/src/tools/clippy/tests/ui-internal/unnecessary_def_path.rs @@ -1,5 +1,5 @@ //@aux-build:paths.rs -#![deny(clippy::internal)] +#![deny(clippy::unnecessary_def_path)] #![feature(rustc_private)] #![allow(clippy::unnecessary_map_or)] diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_def_path.stderr b/src/tools/clippy/tests/ui-internal/unnecessary_def_path.stderr index 0053ba321bbe7..d7fb4ea551e1d 100644 --- a/src/tools/clippy/tests/ui-internal/unnecessary_def_path.stderr +++ b/src/tools/clippy/tests/ui-internal/unnecessary_def_path.stderr @@ -7,9 +7,8 @@ LL | let _ = match_type(cx, ty, &OPTION); note: the lint level is defined here --> tests/ui-internal/unnecessary_def_path.rs:2:9 | -LL | #![deny(clippy::internal)] - | ^^^^^^^^^^^^^^^^ - = note: `#[deny(clippy::unnecessary_def_path)]` implied by `#[deny(clippy::internal)]` +LL | #![deny(clippy::unnecessary_def_path)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: use of a def path to a diagnostic item --> tests/ui-internal/unnecessary_def_path.rs:39:13 diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_def_path_hardcoded_path.rs b/src/tools/clippy/tests/ui-internal/unnecessary_def_path_hardcoded_path.rs index 4801d76bd2685..bd7a55114acbc 100644 --- a/src/tools/clippy/tests/ui-internal/unnecessary_def_path_hardcoded_path.rs +++ b/src/tools/clippy/tests/ui-internal/unnecessary_def_path_hardcoded_path.rs @@ -1,6 +1,6 @@ #![feature(rustc_private)] #![allow(unused)] -#![warn(clippy::unnecessary_def_path)] +#![deny(clippy::unnecessary_def_path)] extern crate rustc_hir; diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_def_path_hardcoded_path.stderr b/src/tools/clippy/tests/ui-internal/unnecessary_def_path_hardcoded_path.stderr index b938395193234..88fdf6f1c1888 100644 --- a/src/tools/clippy/tests/ui-internal/unnecessary_def_path_hardcoded_path.stderr +++ b/src/tools/clippy/tests/ui-internal/unnecessary_def_path_hardcoded_path.stderr @@ -5,8 +5,11 @@ LL | const DEREF_TRAIT: [&str; 4] = ["core", "ops", "deref", "Deref"]; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: convert all references to use `sym::Deref` - = note: `-D clippy::unnecessary-def-path` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::unnecessary_def_path)]` +note: the lint level is defined here + --> tests/ui-internal/unnecessary_def_path_hardcoded_path.rs:3:9 + | +LL | #![deny(clippy::unnecessary_def_path)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: hardcoded path to a language item --> tests/ui-internal/unnecessary_def_path_hardcoded_path.rs:12:40 diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed deleted file mode 100644 index dc564daef8293..0000000000000 --- a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.fixed +++ /dev/null @@ -1,26 +0,0 @@ -#![feature(rustc_private)] -#![deny(clippy::internal)] -#![allow( - clippy::slow_symbol_comparisons, - clippy::borrow_deref_ref, - clippy::unnecessary_operation, - unused_must_use, - clippy::missing_clippy_version_attribute -)] - -extern crate rustc_span; - -use rustc_span::symbol::{Ident, Symbol}; - -fn main() { - Symbol::intern("foo") == rustc_span::sym::clippy; - //~^ unnecessary_symbol_str - Symbol::intern("foo") == rustc_span::kw::SelfLower; - //~^ unnecessary_symbol_str - Symbol::intern("foo") != rustc_span::kw::SelfUpper; - //~^ unnecessary_symbol_str - Ident::empty().name == rustc_span::sym::clippy; - //~^ unnecessary_symbol_str - rustc_span::sym::clippy == Ident::empty().name; - //~^ unnecessary_symbol_str -} diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.rs b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.rs deleted file mode 100644 index d74262d1294b7..0000000000000 --- a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.rs +++ /dev/null @@ -1,26 +0,0 @@ -#![feature(rustc_private)] -#![deny(clippy::internal)] -#![allow( - clippy::slow_symbol_comparisons, - clippy::borrow_deref_ref, - clippy::unnecessary_operation, - unused_must_use, - clippy::missing_clippy_version_attribute -)] - -extern crate rustc_span; - -use rustc_span::symbol::{Ident, Symbol}; - -fn main() { - Symbol::intern("foo").as_str() == "clippy"; - //~^ unnecessary_symbol_str - Symbol::intern("foo").to_string() == "self"; - //~^ unnecessary_symbol_str - Symbol::intern("foo").to_ident_string() != "Self"; - //~^ unnecessary_symbol_str - &*Ident::empty().as_str() == "clippy"; - //~^ unnecessary_symbol_str - "clippy" == Ident::empty().to_string(); - //~^ unnecessary_symbol_str -} diff --git a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr b/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr deleted file mode 100644 index 517a395e93f2c..0000000000000 --- a/src/tools/clippy/tests/ui-internal/unnecessary_symbol_str.stderr +++ /dev/null @@ -1,39 +0,0 @@ -error: unnecessary `Symbol` to string conversion - --> tests/ui-internal/unnecessary_symbol_str.rs:16:5 - | -LL | Symbol::intern("foo").as_str() == "clippy"; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Symbol::intern("foo") == rustc_span::sym::clippy` - | -note: the lint level is defined here - --> tests/ui-internal/unnecessary_symbol_str.rs:2:9 - | -LL | #![deny(clippy::internal)] - | ^^^^^^^^^^^^^^^^ - = note: `#[deny(clippy::unnecessary_symbol_str)]` implied by `#[deny(clippy::internal)]` - -error: unnecessary `Symbol` to string conversion - --> tests/ui-internal/unnecessary_symbol_str.rs:18:5 - | -LL | Symbol::intern("foo").to_string() == "self"; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Symbol::intern("foo") == rustc_span::kw::SelfLower` - -error: unnecessary `Symbol` to string conversion - --> tests/ui-internal/unnecessary_symbol_str.rs:20:5 - | -LL | Symbol::intern("foo").to_ident_string() != "Self"; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Symbol::intern("foo") != rustc_span::kw::SelfUpper` - -error: unnecessary `Symbol` to string conversion - --> tests/ui-internal/unnecessary_symbol_str.rs:22:5 - | -LL | &*Ident::empty().as_str() == "clippy"; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Ident::empty().name == rustc_span::sym::clippy` - -error: unnecessary `Symbol` to string conversion - --> tests/ui-internal/unnecessary_symbol_str.rs:24:5 - | -LL | "clippy" == Ident::empty().to_string(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `rustc_span::sym::clippy == Ident::empty().name` - -error: aborting due to 5 previous errors - diff --git a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/ordering_good.rs b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/ordering_good.rs index b43791521cb5a..694ef45c75b08 100644 --- a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/ordering_good.rs +++ b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/ordering_good.rs @@ -16,6 +16,7 @@ //@[bad_conf_4] error-in-other-file: //@[bad_conf_5] error-in-other-file: //@[bad_conf_6] error-in-other-file: +//@compile-flags: --test #![allow(dead_code)] #![warn(clippy::arbitrary_source_item_ordering)] diff --git a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.default.stderr b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.default.stderr index 7fc216b30d508..fcd7864c6677d 100644 --- a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.default.stderr +++ b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.default.stderr @@ -1,16 +1,16 @@ error: incorrect ordering of items (must be alphabetically ordered) - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:35:5 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:36:5 | LL | a: bool, | ^ | note: should be placed before `b` - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:34:5 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:35:5 | LL | b: bool, | ^ note: the lint level is defined here - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:32:8 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:33:8 | LL | #[deny(clippy::arbitrary_source_item_ordering)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_in_2.stderr b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_in_2.stderr index 1f75f5099ecc1..81c35ff778b7f 100644 --- a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_in_2.stderr +++ b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_in_2.stderr @@ -1,33 +1,33 @@ error: incorrect ordering of items (must be alphabetically ordered) - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:24:8 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:25:8 | LL | struct OrderedChecked { | ^^^^^^^^^^^^^^ | note: should be placed before `Unordered` - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:18:8 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:19:8 | LL | struct Unordered { | ^^^^^^^^^ note: the lint level is defined here - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:9:9 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:10:9 | LL | #![deny(clippy::arbitrary_source_item_ordering)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: incorrect ordering of items (must be alphabetically ordered) - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:35:5 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:36:5 | LL | a: bool, | ^ | note: should be placed before `b` - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:34:5 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:35:5 | LL | b: bool, | ^ note: the lint level is defined here - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:32:8 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:33:8 | LL | #[deny(clippy::arbitrary_source_item_ordering)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_in_3.stderr b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_in_3.stderr index 8027f55add673..09ede57f295e8 100644 --- a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_in_3.stderr +++ b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_in_3.stderr @@ -1,16 +1,16 @@ error: incorrect ordering of items (must be alphabetically ordered) - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:24:8 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:25:8 | LL | struct OrderedChecked { | ^^^^^^^^^^^^^^ | note: should be placed before `Unordered` - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:18:8 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:19:8 | LL | struct Unordered { | ^^^^^^^^^ note: the lint level is defined here - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:9:9 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:10:9 | LL | #![deny(clippy::arbitrary_source_item_ordering)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_within.stderr b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_within.stderr index 333a601f6a952..7c515f050c127 100644 --- a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_within.stderr +++ b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.ord_within.stderr @@ -1,48 +1,60 @@ error: incorrect ordering of items (must be alphabetically ordered) - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:24:8 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:25:8 | LL | struct OrderedChecked { | ^^^^^^^^^^^^^^ | note: should be placed before `Unordered` - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:18:8 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:19:8 | LL | struct Unordered { | ^^^^^^^^^ note: the lint level is defined here - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:9:9 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:10:9 | LL | #![deny(clippy::arbitrary_source_item_ordering)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: incorrect ordering of items (must be alphabetically ordered) - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:45:4 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:46:4 | LL | fn before_main() {} | ^^^^^^^^^^^ | note: should be placed before `main` - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:41:4 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:42:4 | LL | fn main() { | ^^^^ error: incorrect ordering of items (must be alphabetically ordered) - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:35:5 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:36:5 | LL | a: bool, | ^ | note: should be placed before `b` - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:34:5 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:35:5 | LL | b: bool, | ^ note: the lint level is defined here - --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:32:8 + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:33:8 | LL | #[deny(clippy::arbitrary_source_item_ordering)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 3 previous errors +error: incorrect ordering of items (must be alphabetically ordered) + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:52:11 + | +LL | const A: i8 = 0; + | ^ + | +note: should be placed before `B` + --> tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs:51:11 + | +LL | const B: i8 = 1; + | ^ + +error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs index e32b921dd9659..cb6d0170b8f97 100644 --- a/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs +++ b/src/tools/clippy/tests/ui-toml/arbitrary_source_item_ordering/selective_ordering.rs @@ -4,6 +4,7 @@ //@[ord_within] rustc-env:CLIPPY_CONF_DIR=tests/ui-toml/arbitrary_source_item_ordering/ord_within //@[ord_in_2] rustc-env:CLIPPY_CONF_DIR=tests/ui-toml/arbitrary_source_item_ordering/ord_in_2 //@[ord_in_3] rustc-env:CLIPPY_CONF_DIR=tests/ui-toml/arbitrary_source_item_ordering/ord_in_3 +//@compile-flags: --test #![allow(dead_code)] #![deny(clippy::arbitrary_source_item_ordering)] @@ -44,3 +45,10 @@ fn main() { fn before_main() {} //~[ord_within]^ arbitrary_source_item_ordering + +#[cfg(test)] +mod test { + const B: i8 = 1; + const A: i8 = 0; + //~[ord_within]^ arbitrary_source_item_ordering +} diff --git a/src/tools/clippy/tests/ui-toml/await_holding_invalid_type_with_replacement/await_holding_invalid_type.stderr b/src/tools/clippy/tests/ui-toml/await_holding_invalid_type_with_replacement/await_holding_invalid_type.stderr index 86e30409af068..d0fce3614a145 100644 --- a/src/tools/clippy/tests/ui-toml/await_holding_invalid_type_with_replacement/await_holding_invalid_type.stderr +++ b/src/tools/clippy/tests/ui-toml/await_holding_invalid_type_with_replacement/await_holding_invalid_type.stderr @@ -1,11 +1,8 @@ error: error reading Clippy's configuration file: replacement not allowed for this configuration - --> $DIR/tests/ui-toml/await_holding_invalid_type_with_replacement/clippy.toml:1:31 + --> $DIR/tests/ui-toml/await_holding_invalid_type_with_replacement/clippy.toml:2:5 | -LL | await-holding-invalid-types = [ - | _______________________________^ -LL | | { path = "std::string::String", replacement = "std::net::Ipv4Addr" }, -LL | | ] - | |_^ +LL | { path = "std::string::String", replacement = "std::net::Ipv4Addr" }, + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: aborting due to 1 previous error diff --git a/src/tools/clippy/tests/ui-toml/collapsible_if/clippy.toml b/src/tools/clippy/tests/ui-toml/collapsible_if/clippy.toml new file mode 100644 index 0000000000000..592cea90cff5c --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/collapsible_if/clippy.toml @@ -0,0 +1 @@ +lint-commented-code = true diff --git a/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if.fixed b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if.fixed new file mode 100644 index 0000000000000..6f5cc47ba6c75 --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if.fixed @@ -0,0 +1,34 @@ +#![allow(clippy::eq_op, clippy::nonminimal_bool)] + +#[rustfmt::skip] +#[warn(clippy::collapsible_if)] +fn main() { + let (x, y) = ("hello", "world"); + + if x == "hello" + // Comment must be kept + && y == "world" { + println!("Hello world!"); + } + //~^^^^^^ collapsible_if + + // The following tests check for the fix of https://github.com/rust-lang/rust-clippy/issues/798 + if x == "hello" // Inner comment + && y == "world" { + println!("Hello world!"); + } + //~^^^^^ collapsible_if + + if x == "hello" + /* Inner comment */ + && y == "world" { + println!("Hello world!"); + } + //~^^^^^^ collapsible_if + + if x == "hello" /* Inner comment */ + && y == "world" { + println!("Hello world!"); + } + //~^^^^^ collapsible_if +} diff --git a/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if.rs b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if.rs new file mode 100644 index 0000000000000..868b4adcde502 --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if.rs @@ -0,0 +1,38 @@ +#![allow(clippy::eq_op, clippy::nonminimal_bool)] + +#[rustfmt::skip] +#[warn(clippy::collapsible_if)] +fn main() { + let (x, y) = ("hello", "world"); + + if x == "hello" { + // Comment must be kept + if y == "world" { + println!("Hello world!"); + } + } + //~^^^^^^ collapsible_if + + // The following tests check for the fix of https://github.com/rust-lang/rust-clippy/issues/798 + if x == "hello" { // Inner comment + if y == "world" { + println!("Hello world!"); + } + } + //~^^^^^ collapsible_if + + if x == "hello" { + /* Inner comment */ + if y == "world" { + println!("Hello world!"); + } + } + //~^^^^^^ collapsible_if + + if x == "hello" { /* Inner comment */ + if y == "world" { + println!("Hello world!"); + } + } + //~^^^^^ collapsible_if +} diff --git a/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if.stderr b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if.stderr new file mode 100644 index 0000000000000..357ce4ad32deb --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if.stderr @@ -0,0 +1,80 @@ +error: this `if` statement can be collapsed + --> tests/ui-toml/collapsible_if/collapsible_if.rs:8:5 + | +LL | / if x == "hello" { +LL | | // Comment must be kept +LL | | if y == "world" { +LL | | println!("Hello world!"); +LL | | } +LL | | } + | |_____^ + | + = note: `-D clippy::collapsible-if` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::collapsible_if)]` +help: collapse nested if block + | +LL ~ if x == "hello" +LL | // Comment must be kept +LL ~ && y == "world" { +LL | println!("Hello world!"); +LL ~ } + | + +error: this `if` statement can be collapsed + --> tests/ui-toml/collapsible_if/collapsible_if.rs:17:5 + | +LL | / if x == "hello" { // Inner comment +LL | | if y == "world" { +LL | | println!("Hello world!"); +LL | | } +LL | | } + | |_____^ + | +help: collapse nested if block + | +LL ~ if x == "hello" // Inner comment +LL ~ && y == "world" { +LL | println!("Hello world!"); +LL ~ } + | + +error: this `if` statement can be collapsed + --> tests/ui-toml/collapsible_if/collapsible_if.rs:24:5 + | +LL | / if x == "hello" { +LL | | /* Inner comment */ +LL | | if y == "world" { +LL | | println!("Hello world!"); +LL | | } +LL | | } + | |_____^ + | +help: collapse nested if block + | +LL ~ if x == "hello" +LL | /* Inner comment */ +LL ~ && y == "world" { +LL | println!("Hello world!"); +LL ~ } + | + +error: this `if` statement can be collapsed + --> tests/ui-toml/collapsible_if/collapsible_if.rs:32:5 + | +LL | / if x == "hello" { /* Inner comment */ +LL | | if y == "world" { +LL | | println!("Hello world!"); +LL | | } +LL | | } + | |_____^ + | +help: collapse nested if block + | +LL ~ if x == "hello" /* Inner comment */ +LL ~ && y == "world" { +LL | println!("Hello world!"); +LL ~ } + | + +error: aborting due to 4 previous errors + diff --git a/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if_let_chains.fixed b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if_let_chains.fixed new file mode 100644 index 0000000000000..f12273954c6dd --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if_let_chains.fixed @@ -0,0 +1,25 @@ +#![feature(let_chains)] +#![warn(clippy::collapsible_if)] + +fn main() { + if let Some(a) = Some(3) + // with comment + && let Some(b) = Some(4) { + let _ = a + b; + } + //~^^^^^^ collapsible_if + + if let Some(a) = Some(3) + // with comment + && a + 1 == 4 { + let _ = a; + } + //~^^^^^^ collapsible_if + + if Some(3) == Some(4).map(|x| x - 1) + // with comment + && let Some(b) = Some(4) { + let _ = b; + } + //~^^^^^^ collapsible_if +} diff --git a/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if_let_chains.rs b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if_let_chains.rs new file mode 100644 index 0000000000000..5a984d7a3cbee --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if_let_chains.rs @@ -0,0 +1,28 @@ +#![feature(let_chains)] +#![warn(clippy::collapsible_if)] + +fn main() { + if let Some(a) = Some(3) { + // with comment + if let Some(b) = Some(4) { + let _ = a + b; + } + } + //~^^^^^^ collapsible_if + + if let Some(a) = Some(3) { + // with comment + if a + 1 == 4 { + let _ = a; + } + } + //~^^^^^^ collapsible_if + + if Some(3) == Some(4).map(|x| x - 1) { + // with comment + if let Some(b) = Some(4) { + let _ = b; + } + } + //~^^^^^^ collapsible_if +} diff --git a/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if_let_chains.stderr b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if_let_chains.stderr new file mode 100644 index 0000000000000..c22a65a447301 --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/collapsible_if/collapsible_if_let_chains.stderr @@ -0,0 +1,64 @@ +error: this `if` statement can be collapsed + --> tests/ui-toml/collapsible_if/collapsible_if_let_chains.rs:5:5 + | +LL | / if let Some(a) = Some(3) { +LL | | // with comment +LL | | if let Some(b) = Some(4) { +LL | | let _ = a + b; +LL | | } +LL | | } + | |_____^ + | + = note: `-D clippy::collapsible-if` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::collapsible_if)]` +help: collapse nested if block + | +LL ~ if let Some(a) = Some(3) +LL | // with comment +LL ~ && let Some(b) = Some(4) { +LL | let _ = a + b; +LL ~ } + | + +error: this `if` statement can be collapsed + --> tests/ui-toml/collapsible_if/collapsible_if_let_chains.rs:13:5 + | +LL | / if let Some(a) = Some(3) { +LL | | // with comment +LL | | if a + 1 == 4 { +LL | | let _ = a; +LL | | } +LL | | } + | |_____^ + | +help: collapse nested if block + | +LL ~ if let Some(a) = Some(3) +LL | // with comment +LL ~ && a + 1 == 4 { +LL | let _ = a; +LL ~ } + | + +error: this `if` statement can be collapsed + --> tests/ui-toml/collapsible_if/collapsible_if_let_chains.rs:21:5 + | +LL | / if Some(3) == Some(4).map(|x| x - 1) { +LL | | // with comment +LL | | if let Some(b) = Some(4) { +LL | | let _ = b; +LL | | } +LL | | } + | |_____^ + | +help: collapse nested if block + | +LL ~ if Some(3) == Some(4).map(|x| x - 1) +LL | // with comment +LL ~ && let Some(b) = Some(4) { +LL | let _ = b; +LL ~ } + | + +error: aborting due to 3 previous errors + diff --git a/src/tools/clippy/tests/ui-toml/macro_metavars_in_unsafe/default/test.rs b/src/tools/clippy/tests/ui-toml/macro_metavars_in_unsafe/default/test.rs index 2465fe45645f1..d3d5b0c103e7f 100644 --- a/src/tools/clippy/tests/ui-toml/macro_metavars_in_unsafe/default/test.rs +++ b/src/tools/clippy/tests/ui-toml/macro_metavars_in_unsafe/default/test.rs @@ -251,6 +251,16 @@ pub mod issue13219 { } } +#[macro_export] +macro_rules! issue14488 { + ($e:expr) => { + #[expect(clippy::macro_metavars_in_unsafe)] + unsafe { + $e + } + }; +} + fn main() { allow_works!(1); simple!(1); @@ -271,4 +281,10 @@ fn main() { multiple_unsafe_blocks!(1, 1, 1); unsafe_from_root_ctxt!(unsafe { 1 }); nested_macros!(1, 1); + + // These two invocations lead to two expanded unsafe blocks, each with an `#[expect]` on it. + // Only of them gets a warning, which used to result in an unfulfilled expectation for the other + // expanded unsafe block. + issue14488!(1); + issue14488!(2); } diff --git a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.fixed b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.fixed index 36540bf1dcf73..2877871d0bf4c 100644 --- a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.fixed +++ b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.fixed @@ -1,3 +1,4 @@ +#![allow(clippy::uninlined_format_args)] #![deny(clippy::index_refutable_slice)] fn below_limit() { diff --git a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs index da76bb20fd961..f958b92a102a3 100644 --- a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs +++ b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs @@ -1,3 +1,4 @@ +#![allow(clippy::uninlined_format_args)] #![deny(clippy::index_refutable_slice)] fn below_limit() { diff --git a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr index 022deb330e6e3..e1a8941e102f5 100644 --- a/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr +++ b/src/tools/clippy/tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.stderr @@ -1,11 +1,11 @@ error: this binding can be a slice pattern to avoid indexing - --> tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs:5:17 + --> tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs:6:17 | LL | if let Some(slice) = slice { | ^^^^^ | note: the lint level is defined here - --> tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs:1:9 + --> tests/ui-toml/max_suggested_slice_pattern_length/index_refutable_slice.rs:2:9 | LL | #![deny(clippy::index_refutable_slice)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.rs b/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.rs index 08a8e1186d5cb..13e19e9fe14bf 100644 --- a/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.rs +++ b/src/tools/clippy/tests/ui-toml/strict_non_send_fields_in_send_ty/test.rs @@ -29,7 +29,7 @@ unsafe impl Send for MyOption {} //~^ non_send_fields_in_send_ty // All fields are disallowed when raw pointer heuristic is off -extern "C" { +unsafe extern "C" { type NonSend; } diff --git a/src/tools/clippy/tests/ui-toml/toml_inconsistent_struct_constructor/clippy.toml b/src/tools/clippy/tests/ui-toml/toml_inconsistent_struct_constructor/clippy.toml index f43c9d97e825d..3cb8523562a8f 100644 --- a/src/tools/clippy/tests/ui-toml/toml_inconsistent_struct_constructor/clippy.toml +++ b/src/tools/clippy/tests/ui-toml/toml_inconsistent_struct_constructor/clippy.toml @@ -1 +1 @@ -lint-inconsistent-struct-field-initializers = true +check-inconsistent-struct-field-initializers = true diff --git a/src/tools/clippy/tests/ui-toml/toml_invalid_path/clippy.toml b/src/tools/clippy/tests/ui-toml/toml_invalid_path/clippy.toml new file mode 100644 index 0000000000000..6d0d732a92237 --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/toml_invalid_path/clippy.toml @@ -0,0 +1,14 @@ +[[disallowed-types]] +path = "std::result::Result::Err" + +[[disallowed-macros]] +path = "bool" + +[[disallowed-methods]] +path = "std::process::current_exe" + +# negative test + +[[disallowed-methods]] +path = "std::current_exe" +allow-invalid = true diff --git a/src/tools/clippy/tests/ui-toml/toml_invalid_path/conf_invalid_path.rs b/src/tools/clippy/tests/ui-toml/toml_invalid_path/conf_invalid_path.rs new file mode 100644 index 0000000000000..c152038270348 --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/toml_invalid_path/conf_invalid_path.rs @@ -0,0 +1,5 @@ +//@error-in-other-file: expected a macro, found a primitive type +//@error-in-other-file: `std::process::current_exe` does not refer to an existing function +//@error-in-other-file: expected a type, found a tuple variant + +fn main() {} diff --git a/src/tools/clippy/tests/ui-toml/toml_invalid_path/conf_invalid_path.stderr b/src/tools/clippy/tests/ui-toml/toml_invalid_path/conf_invalid_path.stderr new file mode 100644 index 0000000000000..82550108eba53 --- /dev/null +++ b/src/tools/clippy/tests/ui-toml/toml_invalid_path/conf_invalid_path.stderr @@ -0,0 +1,23 @@ +warning: expected a macro, found a primitive type + --> $DIR/tests/ui-toml/toml_invalid_path/clippy.toml:4:1 + | +LL | / [[disallowed-macros]] +LL | | path = "bool" + | |_____________^ + +warning: `std::process::current_exe` does not refer to an existing function + --> $DIR/tests/ui-toml/toml_invalid_path/clippy.toml:7:1 + | +LL | / [[disallowed-methods]] +LL | | path = "std::process::current_exe" + | |__________________________________^ + +warning: expected a type, found a tuple variant + --> $DIR/tests/ui-toml/toml_invalid_path/clippy.toml:1:1 + | +LL | / [[disallowed-types]] +LL | | path = "std::result::Result::Err" + | |_________________________________^ + +warning: 3 warnings emitted + diff --git a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr index fee5b01b68982..f2eaa66a4ae41 100644 --- a/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr +++ b/src/tools/clippy/tests/ui-toml/toml_unknown_key/conf_unknown_key.stderr @@ -29,12 +29,11 @@ error: error reading Clippy's configuration file: unknown field `foobar`, expect array-size-threshold avoid-breaking-exported-api await-holding-invalid-types - blacklisted-names cargo-ignore-publish check-incompatible-msrv-in-tests + check-inconsistent-struct-field-initializers check-private-items cognitive-complexity-threshold - cyclomatic-complexity-threshold disallowed-macros disallowed-methods disallowed-names @@ -49,7 +48,7 @@ error: error reading Clippy's configuration file: unknown field `foobar`, expect future-size-threshold ignore-interior-mutability large-error-threshold - lint-inconsistent-struct-field-initializers + lint-commented-code literal-representation-threshold matches-for-let-else max-fn-params-bools @@ -122,12 +121,11 @@ error: error reading Clippy's configuration file: unknown field `barfoo`, expect array-size-threshold avoid-breaking-exported-api await-holding-invalid-types - blacklisted-names cargo-ignore-publish check-incompatible-msrv-in-tests + check-inconsistent-struct-field-initializers check-private-items cognitive-complexity-threshold - cyclomatic-complexity-threshold disallowed-macros disallowed-methods disallowed-names @@ -142,7 +140,7 @@ error: error reading Clippy's configuration file: unknown field `barfoo`, expect future-size-threshold ignore-interior-mutability large-error-threshold - lint-inconsistent-struct-field-initializers + lint-commented-code literal-representation-threshold matches-for-let-else max-fn-params-bools @@ -215,12 +213,11 @@ error: error reading Clippy's configuration file: unknown field `allow_mixed_uni array-size-threshold avoid-breaking-exported-api await-holding-invalid-types - blacklisted-names cargo-ignore-publish check-incompatible-msrv-in-tests + check-inconsistent-struct-field-initializers check-private-items cognitive-complexity-threshold - cyclomatic-complexity-threshold disallowed-macros disallowed-methods disallowed-names @@ -235,7 +232,7 @@ error: error reading Clippy's configuration file: unknown field `allow_mixed_uni future-size-threshold ignore-interior-mutability large-error-threshold - lint-inconsistent-struct-field-initializers + lint-commented-code literal-representation-threshold matches-for-let-else max-fn-params-bools diff --git a/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.fixed b/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.fixed index af72d6be0e096..20511cbed165e 100644 --- a/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.fixed +++ b/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.fixed @@ -15,7 +15,7 @@ mod my_crate { } } -use utils::{BAR, print}; +pub use utils::{BAR, print}; //~^ ERROR: usage of wildcard import use my_crate::utils::my_util_fn; //~^ ERROR: usage of wildcard import diff --git a/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.rs b/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.rs index 91009dd8835f8..8d05910f471ba 100644 --- a/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.rs +++ b/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.rs @@ -15,7 +15,7 @@ mod my_crate { } } -use utils::*; +pub use utils::*; //~^ ERROR: usage of wildcard import use my_crate::utils::*; //~^ ERROR: usage of wildcard import diff --git a/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.stderr b/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.stderr index 3d3be965aa411..5e624dd6c3cdc 100644 --- a/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.stderr +++ b/src/tools/clippy/tests/ui-toml/wildcard_imports/wildcard_imports.stderr @@ -1,8 +1,8 @@ error: usage of wildcard import - --> tests/ui-toml/wildcard_imports/wildcard_imports.rs:18:5 + --> tests/ui-toml/wildcard_imports/wildcard_imports.rs:18:9 | -LL | use utils::*; - | ^^^^^^^^ help: try: `utils::{BAR, print}` +LL | pub use utils::*; + | ^^^^^^^^ help: try: `utils::{BAR, print}` | = note: `-D clippy::wildcard-imports` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::wildcard_imports)]` diff --git a/src/tools/clippy/tests/ui/asm_syntax_not_x86.rs b/src/tools/clippy/tests/ui/asm_syntax_not_x86.rs index edcd5247f18ce..361bc2033934a 100644 --- a/src/tools/clippy/tests/ui/asm_syntax_not_x86.rs +++ b/src/tools/clippy/tests/ui/asm_syntax_not_x86.rs @@ -8,9 +8,11 @@ mod dont_warn { use std::arch::{asm, global_asm}; pub(super) unsafe fn use_asm() { - asm!(""); - asm!("", options()); - asm!("", options(nostack)); + unsafe { + asm!(""); + asm!("", options()); + asm!("", options(nostack)); + } } global_asm!(""); diff --git a/src/tools/clippy/tests/ui/asm_syntax_x86.rs b/src/tools/clippy/tests/ui/asm_syntax_x86.rs index 4e91f27cd3189..30401c9a0448a 100644 --- a/src/tools/clippy/tests/ui/asm_syntax_x86.rs +++ b/src/tools/clippy/tests/ui/asm_syntax_x86.rs @@ -5,17 +5,19 @@ mod warn_intel { use std::arch::{asm, global_asm}; pub(super) unsafe fn use_asm() { - asm!(""); - //~^ inline_asm_x86_intel_syntax + unsafe { + asm!(""); + //~^ inline_asm_x86_intel_syntax - asm!("", options()); - //~^ inline_asm_x86_intel_syntax + asm!("", options()); + //~^ inline_asm_x86_intel_syntax - asm!("", options(nostack)); - //~^ inline_asm_x86_intel_syntax + asm!("", options(nostack)); + //~^ inline_asm_x86_intel_syntax - asm!("", options(att_syntax)); - asm!("", options(nostack, att_syntax)); + asm!("", options(att_syntax)); + asm!("", options(nostack, att_syntax)); + } } global_asm!(""); @@ -32,14 +34,16 @@ mod warn_att { use std::arch::{asm, global_asm}; pub(super) unsafe fn use_asm() { - asm!(""); - asm!("", options()); - asm!("", options(nostack)); - asm!("", options(att_syntax)); - //~^ inline_asm_x86_att_syntax - - asm!("", options(nostack, att_syntax)); - //~^ inline_asm_x86_att_syntax + unsafe { + asm!(""); + asm!("", options()); + asm!("", options(nostack)); + asm!("", options(att_syntax)); + //~^ inline_asm_x86_att_syntax + + asm!("", options(nostack, att_syntax)); + //~^ inline_asm_x86_att_syntax + } } global_asm!(""); diff --git a/src/tools/clippy/tests/ui/asm_syntax_x86.stderr b/src/tools/clippy/tests/ui/asm_syntax_x86.stderr index 2dcd955f03479..8e068cf2349cd 100644 --- a/src/tools/clippy/tests/ui/asm_syntax_x86.stderr +++ b/src/tools/clippy/tests/ui/asm_syntax_x86.stderr @@ -1,31 +1,31 @@ error: Intel x86 assembly syntax used - --> tests/ui/asm_syntax_x86.rs:8:9 + --> tests/ui/asm_syntax_x86.rs:9:13 | -LL | asm!(""); - | ^^^^^^^^ +LL | asm!(""); + | ^^^^^^^^ | = help: use AT&T x86 assembly syntax = note: `-D clippy::inline-asm-x86-intel-syntax` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::inline_asm_x86_intel_syntax)]` error: Intel x86 assembly syntax used - --> tests/ui/asm_syntax_x86.rs:11:9 + --> tests/ui/asm_syntax_x86.rs:12:13 | -LL | asm!("", options()); - | ^^^^^^^^^^^^^^^^^^^ +LL | asm!("", options()); + | ^^^^^^^^^^^^^^^^^^^ | = help: use AT&T x86 assembly syntax error: Intel x86 assembly syntax used - --> tests/ui/asm_syntax_x86.rs:14:9 + --> tests/ui/asm_syntax_x86.rs:15:13 | -LL | asm!("", options(nostack)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | asm!("", options(nostack)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: use AT&T x86 assembly syntax error: Intel x86 assembly syntax used - --> tests/ui/asm_syntax_x86.rs:21:5 + --> tests/ui/asm_syntax_x86.rs:23:5 | LL | global_asm!(""); | ^^^^^^^^^^^^^^^ @@ -33,7 +33,7 @@ LL | global_asm!(""); = help: use AT&T x86 assembly syntax error: Intel x86 assembly syntax used - --> tests/ui/asm_syntax_x86.rs:24:5 + --> tests/ui/asm_syntax_x86.rs:26:5 | LL | global_asm!("", options()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -41,25 +41,25 @@ LL | global_asm!("", options()); = help: use AT&T x86 assembly syntax error: AT&T x86 assembly syntax used - --> tests/ui/asm_syntax_x86.rs:38:9 + --> tests/ui/asm_syntax_x86.rs:41:13 | -LL | asm!("", options(att_syntax)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | asm!("", options(att_syntax)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: use Intel x86 assembly syntax = note: `-D clippy::inline-asm-x86-att-syntax` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::inline_asm_x86_att_syntax)]` error: AT&T x86 assembly syntax used - --> tests/ui/asm_syntax_x86.rs:41:9 + --> tests/ui/asm_syntax_x86.rs:44:13 | -LL | asm!("", options(nostack, att_syntax)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | asm!("", options(nostack, att_syntax)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: use Intel x86 assembly syntax error: AT&T x86 assembly syntax used - --> tests/ui/asm_syntax_x86.rs:47:5 + --> tests/ui/asm_syntax_x86.rs:51:5 | LL | global_asm!("", options(att_syntax)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/author/if.rs b/src/tools/clippy/tests/ui/author/if.rs index 59bc9f5bfa5c2..abefc34cf6b3e 100644 --- a/src/tools/clippy/tests/ui/author/if.rs +++ b/src/tools/clippy/tests/ui/author/if.rs @@ -1,6 +1,6 @@ //@ check-pass -#[allow(clippy::all)] +#![allow(clippy::all)] fn main() { #[clippy::author] diff --git a/src/tools/clippy/tests/ui/author/macro_in_closure.rs b/src/tools/clippy/tests/ui/author/macro_in_closure.rs index 8a02f38fad87b..373f0148d475a 100644 --- a/src/tools/clippy/tests/ui/author/macro_in_closure.rs +++ b/src/tools/clippy/tests/ui/author/macro_in_closure.rs @@ -1,5 +1,7 @@ //@ check-pass +#![allow(clippy::uninlined_format_args)] + fn main() { #[clippy::author] let print_text = |x| println!("{}", x); diff --git a/src/tools/clippy/tests/ui/author/macro_in_loop.rs b/src/tools/clippy/tests/ui/author/macro_in_loop.rs index 84ffe416e839b..f68275fefaaa3 100644 --- a/src/tools/clippy/tests/ui/author/macro_in_loop.rs +++ b/src/tools/clippy/tests/ui/author/macro_in_loop.rs @@ -1,6 +1,7 @@ //@ check-pass #![feature(stmt_expr_attributes)] +#![allow(clippy::uninlined_format_args)] fn main() { #[clippy::author] diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs index 1a2a4ec231143..7a4cc4fa9ee8e 100644 --- a/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs +++ b/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs @@ -131,12 +131,12 @@ fn write_with_span(s: Span, mut input: IntoIter, out: &mut TokenStream) -> Resul pub fn make_it_big(input: TokenStream) -> TokenStream { let mut expr_repeat = syn::parse_macro_input!(input as syn::ExprRepeat); let len_span = expr_repeat.len.span(); - if let syn::Expr::Lit(expr_lit) = &mut *expr_repeat.len { - if let syn::Lit::Int(lit_int) = &expr_lit.lit { - let orig_val = lit_int.base10_parse::().expect("not a valid length parameter"); - let new_val = orig_val.saturating_mul(10); - expr_lit.lit = syn::parse_quote_spanned!( len_span => #new_val); - } + if let syn::Expr::Lit(expr_lit) = &mut *expr_repeat.len + && let syn::Lit::Int(lit_int) = &expr_lit.lit + { + let orig_val = lit_int.base10_parse::().expect("not a valid length parameter"); + let new_val = orig_val.saturating_mul(10); + expr_lit.lit = syn::parse_quote_spanned!( len_span => #new_val); } quote::quote!(#expr_repeat).into() } diff --git a/src/tools/clippy/tests/ui/blocks_in_conditions.fixed b/src/tools/clippy/tests/ui/blocks_in_conditions.fixed index cd307e803d0c9..6ae5b0cb2f041 100644 --- a/src/tools/clippy/tests/ui/blocks_in_conditions.fixed +++ b/src/tools/clippy/tests/ui/blocks_in_conditions.fixed @@ -3,7 +3,7 @@ #![warn(clippy::blocks_in_conditions)] #![allow( unused, - clippy::let_and_return, + unnecessary_transmutes, clippy::needless_if, clippy::missing_transmute_annotations )] @@ -71,28 +71,6 @@ fn block_in_assert() { ); } -// issue #11814 -fn block_in_match_expr(num: i32) -> i32 { - let res = { - //~^ ERROR: in a `match` scrutinee, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let` - let opt = Some(2); - opt - }; match res { - Some(0) => 1, - Some(n) => num * 2, - None => 0, - }; - - match unsafe { - let hearty_hearty_hearty = vec![240, 159, 146, 150]; - String::from_utf8_unchecked(hearty_hearty_hearty).as_str() - } { - "💖" => 1, - "what" => 2, - _ => 3, - } -} - // issue #12162 macro_rules! timed { ($name:expr, $body:expr $(,)?) => {{ diff --git a/src/tools/clippy/tests/ui/blocks_in_conditions.rs b/src/tools/clippy/tests/ui/blocks_in_conditions.rs index 6a211c8edfd4f..3fd060620728f 100644 --- a/src/tools/clippy/tests/ui/blocks_in_conditions.rs +++ b/src/tools/clippy/tests/ui/blocks_in_conditions.rs @@ -3,7 +3,7 @@ #![warn(clippy::blocks_in_conditions)] #![allow( unused, - clippy::let_and_return, + unnecessary_transmutes, clippy::needless_if, clippy::missing_transmute_annotations )] @@ -71,28 +71,6 @@ fn block_in_assert() { ); } -// issue #11814 -fn block_in_match_expr(num: i32) -> i32 { - match { - //~^ ERROR: in a `match` scrutinee, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let` - let opt = Some(2); - opt - } { - Some(0) => 1, - Some(n) => num * 2, - None => 0, - }; - - match unsafe { - let hearty_hearty_hearty = vec![240, 159, 146, 150]; - String::from_utf8_unchecked(hearty_hearty_hearty).as_str() - } { - "💖" => 1, - "what" => 2, - _ => 3, - } -} - // issue #12162 macro_rules! timed { ($name:expr, $body:expr $(,)?) => {{ diff --git a/src/tools/clippy/tests/ui/blocks_in_conditions.stderr b/src/tools/clippy/tests/ui/blocks_in_conditions.stderr index da21344a84289..282c42a98bfc2 100644 --- a/src/tools/clippy/tests/ui/blocks_in_conditions.stderr +++ b/src/tools/clippy/tests/ui/blocks_in_conditions.stderr @@ -34,24 +34,5 @@ LL | if true && x == 3 { 6 } else { 10 } = note: `-D clippy::nonminimal-bool` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::nonminimal_bool)]` -error: in a `match` scrutinee, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let` - --> tests/ui/blocks_in_conditions.rs:76:5 - | -LL | / match { -LL | | -LL | | let opt = Some(2); -LL | | opt -LL | | } { - | |_____^ - | -help: try - | -LL ~ let res = { -LL + -LL + let opt = Some(2); -LL + opt -LL ~ }; match res { - | - -error: aborting due to 4 previous errors +error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui/blocks_in_conditions_2021.fixed b/src/tools/clippy/tests/ui/blocks_in_conditions_2021.fixed new file mode 100644 index 0000000000000..c7cc643dba679 --- /dev/null +++ b/src/tools/clippy/tests/ui/blocks_in_conditions_2021.fixed @@ -0,0 +1,25 @@ +//@edition: 2021 + +#![allow(clippy::let_and_return)] + +// issue #11814 +fn block_in_match_expr(num: i32) -> i32 { + let res = { + //~^ ERROR: in a `match` scrutinee, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let` + let opt = Some(2); + opt + }; match res { + Some(0) => 1, + Some(n) => num * 2, + None => 0, + }; + + match unsafe { + let hearty_hearty_hearty = vec![240, 159, 146, 150]; + String::from_utf8_unchecked(hearty_hearty_hearty).as_str() + } { + "💖" => 1, + "what" => 2, + _ => 3, + } +} diff --git a/src/tools/clippy/tests/ui/blocks_in_conditions_2021.rs b/src/tools/clippy/tests/ui/blocks_in_conditions_2021.rs new file mode 100644 index 0000000000000..a911237f5f795 --- /dev/null +++ b/src/tools/clippy/tests/ui/blocks_in_conditions_2021.rs @@ -0,0 +1,25 @@ +//@edition: 2021 + +#![allow(clippy::let_and_return)] + +// issue #11814 +fn block_in_match_expr(num: i32) -> i32 { + match { + //~^ ERROR: in a `match` scrutinee, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let` + let opt = Some(2); + opt + } { + Some(0) => 1, + Some(n) => num * 2, + None => 0, + }; + + match unsafe { + let hearty_hearty_hearty = vec![240, 159, 146, 150]; + String::from_utf8_unchecked(hearty_hearty_hearty).as_str() + } { + "💖" => 1, + "what" => 2, + _ => 3, + } +} diff --git a/src/tools/clippy/tests/ui/blocks_in_conditions_2021.stderr b/src/tools/clippy/tests/ui/blocks_in_conditions_2021.stderr new file mode 100644 index 0000000000000..497ee9d679dde --- /dev/null +++ b/src/tools/clippy/tests/ui/blocks_in_conditions_2021.stderr @@ -0,0 +1,23 @@ +error: in a `match` scrutinee, avoid complex blocks or closures with blocks; instead, move the block or closure higher and bind it with a `let` + --> tests/ui/blocks_in_conditions_2021.rs:7:5 + | +LL | / match { +LL | | +LL | | let opt = Some(2); +LL | | opt +LL | | } { + | |_____^ + | + = note: `-D clippy::blocks-in-conditions` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::blocks_in_conditions)]` +help: try + | +LL ~ let res = { +LL + +LL + let opt = Some(2); +LL + opt +LL ~ }; match res { + | + +error: aborting due to 1 previous error + diff --git a/src/tools/clippy/tests/ui/bool_to_int_with_if.fixed b/src/tools/clippy/tests/ui/bool_to_int_with_if.fixed index 0080801d46b78..ed6141244b409 100644 --- a/src/tools/clippy/tests/ui/bool_to_int_with_if.fixed +++ b/src/tools/clippy/tests/ui/bool_to_int_with_if.fixed @@ -117,3 +117,27 @@ fn if_let(a: Enum, b: Enum) { 0 }; } + +fn issue14628() { + macro_rules! mac { + (if $cond:expr, $then:expr, $else:expr) => { + if $cond { $then } else { $else } + }; + (zero) => { + 0 + }; + (one) => { + 1 + }; + } + + let _ = i32::from(dbg!(4 > 0)); + //~^ bool_to_int_with_if + + let _ = dbg!(i32::from(4 > 0)); + //~^ bool_to_int_with_if + + let _ = mac!(if 4 > 0, 1, 0); + let _ = if 4 > 0 { mac!(one) } else { 0 }; + let _ = if 4 > 0 { 1 } else { mac!(zero) }; +} diff --git a/src/tools/clippy/tests/ui/bool_to_int_with_if.rs b/src/tools/clippy/tests/ui/bool_to_int_with_if.rs index 72c7e2c71c560..3f1f1c766e460 100644 --- a/src/tools/clippy/tests/ui/bool_to_int_with_if.rs +++ b/src/tools/clippy/tests/ui/bool_to_int_with_if.rs @@ -157,3 +157,27 @@ fn if_let(a: Enum, b: Enum) { 0 }; } + +fn issue14628() { + macro_rules! mac { + (if $cond:expr, $then:expr, $else:expr) => { + if $cond { $then } else { $else } + }; + (zero) => { + 0 + }; + (one) => { + 1 + }; + } + + let _ = if dbg!(4 > 0) { 1 } else { 0 }; + //~^ bool_to_int_with_if + + let _ = dbg!(if 4 > 0 { 1 } else { 0 }); + //~^ bool_to_int_with_if + + let _ = mac!(if 4 > 0, 1, 0); + let _ = if 4 > 0 { mac!(one) } else { 0 }; + let _ = if 4 > 0 { 1 } else { mac!(zero) }; +} diff --git a/src/tools/clippy/tests/ui/bool_to_int_with_if.stderr b/src/tools/clippy/tests/ui/bool_to_int_with_if.stderr index 415e80f8d73d1..94089bc6dc8ef 100644 --- a/src/tools/clippy/tests/ui/bool_to_int_with_if.stderr +++ b/src/tools/clippy/tests/ui/bool_to_int_with_if.stderr @@ -114,5 +114,21 @@ LL | if a { 1 } else { 0 } | = note: `a as u8` or `a.into()` can also be valid options -error: aborting due to 9 previous errors +error: boolean to int conversion using if + --> tests/ui/bool_to_int_with_if.rs:174:13 + | +LL | let _ = if dbg!(4 > 0) { 1 } else { 0 }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with from: `i32::from(dbg!(4 > 0))` + | + = note: `dbg!(4 > 0) as i32` or `dbg!(4 > 0).into()` can also be valid options + +error: boolean to int conversion using if + --> tests/ui/bool_to_int_with_if.rs:177:18 + | +LL | let _ = dbg!(if 4 > 0 { 1 } else { 0 }); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with from: `i32::from(4 > 0)` + | + = note: `(4 > 0) as i32` or `(4 > 0).into()` can also be valid options + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr.fixed b/src/tools/clippy/tests/ui/borrow_as_ptr.fixed index 3dca06fce4b8d..3ba2eea59f0b0 100644 --- a/src/tools/clippy/tests/ui/borrow_as_ptr.fixed +++ b/src/tools/clippy/tests/ui/borrow_as_ptr.fixed @@ -29,3 +29,21 @@ fn issue_13882() { let _raw = (&raw mut x[1]).wrapping_offset(-1); //~^ borrow_as_ptr } + +fn implicit_cast() { + let val = 1; + let p: *const i32 = &raw const val; + //~^ borrow_as_ptr + + let mut val = 1; + let p: *mut i32 = &raw mut val; + //~^ borrow_as_ptr + + let mut val = 1; + // Only lint the leftmost argument, the rightmost is ref to a temporary + core::ptr::eq(&raw const val, &1); + //~^ borrow_as_ptr + + // Do not lint references to temporaries + core::ptr::eq(&0i32, &1i32); +} diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr.rs b/src/tools/clippy/tests/ui/borrow_as_ptr.rs index 3559dc23d0185..8cdd0512da5f7 100644 --- a/src/tools/clippy/tests/ui/borrow_as_ptr.rs +++ b/src/tools/clippy/tests/ui/borrow_as_ptr.rs @@ -29,3 +29,21 @@ fn issue_13882() { let _raw = (&mut x[1] as *mut i32).wrapping_offset(-1); //~^ borrow_as_ptr } + +fn implicit_cast() { + let val = 1; + let p: *const i32 = &val; + //~^ borrow_as_ptr + + let mut val = 1; + let p: *mut i32 = &mut val; + //~^ borrow_as_ptr + + let mut val = 1; + // Only lint the leftmost argument, the rightmost is ref to a temporary + core::ptr::eq(&val, &1); + //~^ borrow_as_ptr + + // Do not lint references to temporaries + core::ptr::eq(&0i32, &1i32); +} diff --git a/src/tools/clippy/tests/ui/borrow_as_ptr.stderr b/src/tools/clippy/tests/ui/borrow_as_ptr.stderr index 4a9f2ed4aa003..b1fcce49403c8 100644 --- a/src/tools/clippy/tests/ui/borrow_as_ptr.stderr +++ b/src/tools/clippy/tests/ui/borrow_as_ptr.stderr @@ -25,5 +25,38 @@ error: borrow as raw pointer LL | let _raw = (&mut x[1] as *mut i32).wrapping_offset(-1); | ^^^^^^^^^^^^^^^^^^^^^ help: try: `&raw mut x[1]` -error: aborting due to 4 previous errors +error: implicit borrow as raw pointer + --> tests/ui/borrow_as_ptr.rs:35:25 + | +LL | let p: *const i32 = &val; + | ^^^^ + | +help: use a raw pointer instead + | +LL | let p: *const i32 = &raw const val; + | +++++++++ + +error: implicit borrow as raw pointer + --> tests/ui/borrow_as_ptr.rs:39:23 + | +LL | let p: *mut i32 = &mut val; + | ^^^^^^^^ + | +help: use a raw pointer instead + | +LL | let p: *mut i32 = &raw mut val; + | +++ + +error: implicit borrow as raw pointer + --> tests/ui/borrow_as_ptr.rs:44:19 + | +LL | core::ptr::eq(&val, &1); + | ^^^^ + | +help: use a raw pointer instead + | +LL | core::ptr::eq(&raw const val, &1); + | +++++++++ + +error: aborting due to 7 previous errors diff --git a/src/tools/clippy/tests/ui/borrow_deref_ref.fixed b/src/tools/clippy/tests/ui/borrow_deref_ref.fixed index 17c224f10bfea..765dd75fceb92 100644 --- a/src/tools/clippy/tests/ui/borrow_deref_ref.fixed +++ b/src/tools/clippy/tests/ui/borrow_deref_ref.fixed @@ -81,3 +81,46 @@ fn issue_13584() { let p = &raw const *s; let _ = p as *const i8; } + +mod issue_9905 { + use std::{fs, io}; + + pub enum File { + Stdio, + File(fs::File), + } + + impl io::Read for &'_ File { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + match self { + File::Stdio => io::stdin().read(buf), + File::File(file) => (&*file).read(buf), + } + } + } +} + +mod issue_11346 { + struct Struct; + + impl Struct { + fn foo(self: &mut &Self) {} + } + + trait Trait { + fn bar(&mut self) {} + } + + impl Trait for &Struct {} + + fn bar() { + let s = &Struct; + (&*s).foo(); + (&*s).bar(); + + let mut s = &Struct; + s.foo(); // To avoid a warning about `s` not needing to be mutable + s.foo(); + //~^ borrow_deref_ref + } +} diff --git a/src/tools/clippy/tests/ui/borrow_deref_ref.rs b/src/tools/clippy/tests/ui/borrow_deref_ref.rs index 130ed2903dc61..8ee66bfa881ab 100644 --- a/src/tools/clippy/tests/ui/borrow_deref_ref.rs +++ b/src/tools/clippy/tests/ui/borrow_deref_ref.rs @@ -81,3 +81,46 @@ fn issue_13584() { let p = &raw const *s; let _ = p as *const i8; } + +mod issue_9905 { + use std::{fs, io}; + + pub enum File { + Stdio, + File(fs::File), + } + + impl io::Read for &'_ File { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + match self { + File::Stdio => io::stdin().read(buf), + File::File(file) => (&*file).read(buf), + } + } + } +} + +mod issue_11346 { + struct Struct; + + impl Struct { + fn foo(self: &mut &Self) {} + } + + trait Trait { + fn bar(&mut self) {} + } + + impl Trait for &Struct {} + + fn bar() { + let s = &Struct; + (&*s).foo(); + (&*s).bar(); + + let mut s = &Struct; + s.foo(); // To avoid a warning about `s` not needing to be mutable + (&*s).foo(); + //~^ borrow_deref_ref + } +} diff --git a/src/tools/clippy/tests/ui/borrow_deref_ref.stderr b/src/tools/clippy/tests/ui/borrow_deref_ref.stderr index f5868aa874900..3d55da25b9b20 100644 --- a/src/tools/clippy/tests/ui/borrow_deref_ref.stderr +++ b/src/tools/clippy/tests/ui/borrow_deref_ref.stderr @@ -19,5 +19,11 @@ error: deref on an immutable reference LL | let addr_y = &&*x as *const _ as usize; // assert ok | ^^^ help: if you would like to reborrow, try removing `&*`: `x` -error: aborting due to 3 previous errors +error: deref on an immutable reference + --> tests/ui/borrow_deref_ref.rs:123:9 + | +LL | (&*s).foo(); + | ^^^^^ help: if you would like to reborrow, try removing `&*`: `s` + +error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/box_collection.rs b/src/tools/clippy/tests/ui/box_collection.rs index 0f7d3c74ddd07..7ae5446924fa0 100644 --- a/src/tools/clippy/tests/ui/box_collection.rs +++ b/src/tools/clippy/tests/ui/box_collection.rs @@ -1,4 +1,3 @@ -#![warn(clippy::all)] #![allow( clippy::boxed_local, clippy::needless_pass_by_value, diff --git a/src/tools/clippy/tests/ui/box_collection.stderr b/src/tools/clippy/tests/ui/box_collection.stderr index ebbc3d92b57f7..d730e2dcc1145 100644 --- a/src/tools/clippy/tests/ui/box_collection.stderr +++ b/src/tools/clippy/tests/ui/box_collection.stderr @@ -1,5 +1,5 @@ error: you seem to be trying to use `Box>`. Consider using just `Vec<..>` - --> tests/ui/box_collection.rs:21:15 + --> tests/ui/box_collection.rs:20:15 | LL | fn test1(foo: Box>) {} | ^^^^^^^^^^^^^^ @@ -9,7 +9,7 @@ LL | fn test1(foo: Box>) {} = help: to override `-D warnings` add `#[allow(clippy::box_collection)]` error: you seem to be trying to use `Box`. Consider using just `String` - --> tests/ui/box_collection.rs:29:15 + --> tests/ui/box_collection.rs:28:15 | LL | fn test3(foo: Box) {} | ^^^^^^^^^^^ @@ -17,7 +17,7 @@ LL | fn test3(foo: Box) {} = help: `String` is already on the heap, `Box` makes an extra allocation error: you seem to be trying to use `Box>`. Consider using just `HashMap<..>` - --> tests/ui/box_collection.rs:32:15 + --> tests/ui/box_collection.rs:31:15 | LL | fn test4(foo: Box>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -25,7 +25,7 @@ LL | fn test4(foo: Box>) {} = help: `HashMap<..>` is already on the heap, `Box>` makes an extra allocation error: you seem to be trying to use `Box>`. Consider using just `HashSet<..>` - --> tests/ui/box_collection.rs:35:15 + --> tests/ui/box_collection.rs:34:15 | LL | fn test5(foo: Box>) {} | ^^^^^^^^^^^^^^^^^ @@ -33,7 +33,7 @@ LL | fn test5(foo: Box>) {} = help: `HashSet<..>` is already on the heap, `Box>` makes an extra allocation error: you seem to be trying to use `Box>`. Consider using just `VecDeque<..>` - --> tests/ui/box_collection.rs:38:15 + --> tests/ui/box_collection.rs:37:15 | LL | fn test6(foo: Box>) {} | ^^^^^^^^^^^^^^^^^^ @@ -41,7 +41,7 @@ LL | fn test6(foo: Box>) {} = help: `VecDeque<..>` is already on the heap, `Box>` makes an extra allocation error: you seem to be trying to use `Box>`. Consider using just `LinkedList<..>` - --> tests/ui/box_collection.rs:41:15 + --> tests/ui/box_collection.rs:40:15 | LL | fn test7(foo: Box>) {} | ^^^^^^^^^^^^^^^^^^^^ @@ -49,7 +49,7 @@ LL | fn test7(foo: Box>) {} = help: `LinkedList<..>` is already on the heap, `Box>` makes an extra allocation error: you seem to be trying to use `Box>`. Consider using just `BTreeMap<..>` - --> tests/ui/box_collection.rs:44:15 + --> tests/ui/box_collection.rs:43:15 | LL | fn test8(foo: Box>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -57,7 +57,7 @@ LL | fn test8(foo: Box>) {} = help: `BTreeMap<..>` is already on the heap, `Box>` makes an extra allocation error: you seem to be trying to use `Box>`. Consider using just `BTreeSet<..>` - --> tests/ui/box_collection.rs:47:15 + --> tests/ui/box_collection.rs:46:15 | LL | fn test9(foo: Box>) {} | ^^^^^^^^^^^^^^^^^^ @@ -65,7 +65,7 @@ LL | fn test9(foo: Box>) {} = help: `BTreeSet<..>` is already on the heap, `Box>` makes an extra allocation error: you seem to be trying to use `Box>`. Consider using just `BinaryHeap<..>` - --> tests/ui/box_collection.rs:50:16 + --> tests/ui/box_collection.rs:49:16 | LL | fn test10(foo: Box>) {} | ^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.fixed b/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.fixed index bf7635fdf09bf..0c9d21243546d 100644 --- a/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.fixed +++ b/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.fixed @@ -1,5 +1,4 @@ #![warn(clippy::case_sensitive_file_extension_comparisons)] -#![allow(clippy::unnecessary_map_or)] use std::string::String; @@ -13,7 +12,7 @@ impl TestStruct { fn is_rust_file(filename: &str) -> bool { std::path::Path::new(filename) .extension() - .map_or(false, |ext| ext.eq_ignore_ascii_case("rs")) + .is_some_and(|ext| ext.eq_ignore_ascii_case("rs")) //~^ case_sensitive_file_extension_comparisons } @@ -21,18 +20,18 @@ fn main() { // std::string::String and &str should trigger the lint failure with .ext12 let _ = std::path::Path::new(&String::new()) .extension() - .map_or(false, |ext| ext.eq_ignore_ascii_case("ext12")); + .is_some_and(|ext| ext.eq_ignore_ascii_case("ext12")); //~^ case_sensitive_file_extension_comparisons let _ = std::path::Path::new("str") .extension() - .map_or(false, |ext| ext.eq_ignore_ascii_case("ext12")); + .is_some_and(|ext| ext.eq_ignore_ascii_case("ext12")); //~^ case_sensitive_file_extension_comparisons // The fixup should preserve the indentation level { let _ = std::path::Path::new("str") .extension() - .map_or(false, |ext| ext.eq_ignore_ascii_case("ext12")); + .is_some_and(|ext| ext.eq_ignore_ascii_case("ext12")); //~^ case_sensitive_file_extension_comparisons } @@ -42,11 +41,11 @@ fn main() { // std::string::String and &str should trigger the lint failure with .EXT12 let _ = std::path::Path::new(&String::new()) .extension() - .map_or(false, |ext| ext.eq_ignore_ascii_case("EXT12")); + .is_some_and(|ext| ext.eq_ignore_ascii_case("EXT12")); //~^ case_sensitive_file_extension_comparisons let _ = std::path::Path::new("str") .extension() - .map_or(false, |ext| ext.eq_ignore_ascii_case("EXT12")); + .is_some_and(|ext| ext.eq_ignore_ascii_case("EXT12")); //~^ case_sensitive_file_extension_comparisons // Should not trigger the lint failure because of the calls to to_lowercase and to_uppercase @@ -76,3 +75,11 @@ fn main() { let _ = "str".ends_with(".123"); TestStruct {}.ends_with(".123"); } + +#[clippy::msrv = "1.69"] +fn msrv_check() { + let _ = std::path::Path::new(&String::new()) + .extension() + .map_or(false, |ext| ext.eq_ignore_ascii_case("ext12")); + //~^ case_sensitive_file_extension_comparisons +} diff --git a/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.rs b/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.rs index 0c4070a42d4b0..f8a947aa827b9 100644 --- a/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.rs +++ b/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.rs @@ -1,5 +1,4 @@ #![warn(clippy::case_sensitive_file_extension_comparisons)] -#![allow(clippy::unnecessary_map_or)] use std::string::String; @@ -64,3 +63,9 @@ fn main() { let _ = "str".ends_with(".123"); TestStruct {}.ends_with(".123"); } + +#[clippy::msrv = "1.69"] +fn msrv_check() { + let _ = String::new().ends_with(".ext12"); + //~^ case_sensitive_file_extension_comparisons +} diff --git a/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.stderr b/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.stderr index e035534d26996..93bee8e766719 100644 --- a/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.stderr +++ b/src/tools/clippy/tests/ui/case_sensitive_file_extension_comparisons.stderr @@ -1,5 +1,5 @@ error: case-sensitive file extension comparison - --> tests/ui/case_sensitive_file_extension_comparisons.rs:14:5 + --> tests/ui/case_sensitive_file_extension_comparisons.rs:13:5 | LL | filename.ends_with(".rs") | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -11,11 +11,11 @@ help: use std::path::Path | LL ~ std::path::Path::new(filename) LL + .extension() -LL + .map_or(false, |ext| ext.eq_ignore_ascii_case("rs")) +LL + .is_some_and(|ext| ext.eq_ignore_ascii_case("rs")) | error: case-sensitive file extension comparison - --> tests/ui/case_sensitive_file_extension_comparisons.rs:20:13 + --> tests/ui/case_sensitive_file_extension_comparisons.rs:19:13 | LL | let _ = String::new().ends_with(".ext12"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -25,11 +25,11 @@ help: use std::path::Path | LL ~ let _ = std::path::Path::new(&String::new()) LL + .extension() -LL ~ .map_or(false, |ext| ext.eq_ignore_ascii_case("ext12")); +LL ~ .is_some_and(|ext| ext.eq_ignore_ascii_case("ext12")); | error: case-sensitive file extension comparison - --> tests/ui/case_sensitive_file_extension_comparisons.rs:22:13 + --> tests/ui/case_sensitive_file_extension_comparisons.rs:21:13 | LL | let _ = "str".ends_with(".ext12"); | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -39,11 +39,11 @@ help: use std::path::Path | LL ~ let _ = std::path::Path::new("str") LL + .extension() -LL ~ .map_or(false, |ext| ext.eq_ignore_ascii_case("ext12")); +LL ~ .is_some_and(|ext| ext.eq_ignore_ascii_case("ext12")); | error: case-sensitive file extension comparison - --> tests/ui/case_sensitive_file_extension_comparisons.rs:27:17 + --> tests/ui/case_sensitive_file_extension_comparisons.rs:26:17 | LL | let _ = "str".ends_with(".ext12"); | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -53,11 +53,11 @@ help: use std::path::Path | LL ~ let _ = std::path::Path::new("str") LL + .extension() -LL ~ .map_or(false, |ext| ext.eq_ignore_ascii_case("ext12")); +LL ~ .is_some_and(|ext| ext.eq_ignore_ascii_case("ext12")); | error: case-sensitive file extension comparison - --> tests/ui/case_sensitive_file_extension_comparisons.rs:35:13 + --> tests/ui/case_sensitive_file_extension_comparisons.rs:34:13 | LL | let _ = String::new().ends_with(".EXT12"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -67,11 +67,11 @@ help: use std::path::Path | LL ~ let _ = std::path::Path::new(&String::new()) LL + .extension() -LL ~ .map_or(false, |ext| ext.eq_ignore_ascii_case("EXT12")); +LL ~ .is_some_and(|ext| ext.eq_ignore_ascii_case("EXT12")); | error: case-sensitive file extension comparison - --> tests/ui/case_sensitive_file_extension_comparisons.rs:37:13 + --> tests/ui/case_sensitive_file_extension_comparisons.rs:36:13 | LL | let _ = "str".ends_with(".EXT12"); | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -81,8 +81,22 @@ help: use std::path::Path | LL ~ let _ = std::path::Path::new("str") LL + .extension() -LL ~ .map_or(false, |ext| ext.eq_ignore_ascii_case("EXT12")); +LL ~ .is_some_and(|ext| ext.eq_ignore_ascii_case("EXT12")); + | + +error: case-sensitive file extension comparison + --> tests/ui/case_sensitive_file_extension_comparisons.rs:69:13 + | +LL | let _ = String::new().ends_with(".ext12"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: consider using a case-insensitive comparison instead +help: use std::path::Path + | +LL ~ let _ = std::path::Path::new(&String::new()) +LL + .extension() +LL ~ .map_or(false, |ext| ext.eq_ignore_ascii_case("ext12")); | -error: aborting due to 6 previous errors +error: aborting due to 7 previous errors diff --git a/src/tools/clippy/tests/ui/char_indices_as_byte_indices.fixed b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.fixed new file mode 100644 index 0000000000000..04c8f6782c51e --- /dev/null +++ b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.fixed @@ -0,0 +1,65 @@ +#![feature(round_char_boundary)] +#![warn(clippy::char_indices_as_byte_indices)] + +trait StrExt { + fn use_index(&self, _: usize); +} +impl StrExt for str { + fn use_index(&self, _: usize) {} +} + +fn bad(prim: &str, string: String) { + for (idx, _) in prim.char_indices() { + let _ = prim[..idx]; + //~^ char_indices_as_byte_indices + prim.split_at(idx); + //~^ char_indices_as_byte_indices + + // This won't panic, but it can still return a wrong substring + let _ = prim[..prim.floor_char_boundary(idx)]; + //~^ char_indices_as_byte_indices + + // can't use #[expect] here because the .fixed file will still have the attribute and create an + // unfulfilled expectation, but make sure lint level attributes work on the use expression: + #[allow(clippy::char_indices_as_byte_indices)] + let _ = prim[..idx]; + } + + for c in prim.char_indices() { + let _ = prim[..c.0]; + //~^ char_indices_as_byte_indices + prim.split_at(c.0); + //~^ char_indices_as_byte_indices + } + + for (idx, _) in string.char_indices() { + let _ = string[..idx]; + //~^ char_indices_as_byte_indices + string.split_at(idx); + //~^ char_indices_as_byte_indices + } +} + +fn good(prim: &str, prim2: &str) { + for (idx, _) in prim.chars().enumerate() { + // Indexing into a different string + let _ = prim2[..idx]; + + // Unknown use + std::hint::black_box(idx); + + // Method call to user defined extension trait + prim.use_index(idx); + + // str method taking a usize that doesn't represent a byte index + prim.splitn(idx, prim2); + } + + let mut string = "äa".to_owned(); + for (idx, _) in string.clone().chars().enumerate() { + // Even though the receiver is the same expression, it should not be treated as the same value. + string.clone().remove(idx); + } +} + +fn main() {} diff --git a/src/tools/clippy/tests/ui/char_indices_as_byte_indices.rs b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.rs new file mode 100644 index 0000000000000..773a4fc65f12f --- /dev/null +++ b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.rs @@ -0,0 +1,65 @@ +#![feature(round_char_boundary)] +#![warn(clippy::char_indices_as_byte_indices)] + +trait StrExt { + fn use_index(&self, _: usize); +} +impl StrExt for str { + fn use_index(&self, _: usize) {} +} + +fn bad(prim: &str, string: String) { + for (idx, _) in prim.chars().enumerate() { + let _ = prim[..idx]; + //~^ char_indices_as_byte_indices + prim.split_at(idx); + //~^ char_indices_as_byte_indices + + // This won't panic, but it can still return a wrong substring + let _ = prim[..prim.floor_char_boundary(idx)]; + //~^ char_indices_as_byte_indices + + // can't use #[expect] here because the .fixed file will still have the attribute and create an + // unfulfilled expectation, but make sure lint level attributes work on the use expression: + #[allow(clippy::char_indices_as_byte_indices)] + let _ = prim[..idx]; + } + + for c in prim.chars().enumerate() { + let _ = prim[..c.0]; + //~^ char_indices_as_byte_indices + prim.split_at(c.0); + //~^ char_indices_as_byte_indices + } + + for (idx, _) in string.chars().enumerate() { + let _ = string[..idx]; + //~^ char_indices_as_byte_indices + string.split_at(idx); + //~^ char_indices_as_byte_indices + } +} + +fn good(prim: &str, prim2: &str) { + for (idx, _) in prim.chars().enumerate() { + // Indexing into a different string + let _ = prim2[..idx]; + + // Unknown use + std::hint::black_box(idx); + + // Method call to user defined extension trait + prim.use_index(idx); + + // str method taking a usize that doesn't represent a byte index + prim.splitn(idx, prim2); + } + + let mut string = "äa".to_owned(); + for (idx, _) in string.clone().chars().enumerate() { + // Even though the receiver is the same expression, it should not be treated as the same value. + string.clone().remove(idx); + } +} + +fn main() {} diff --git a/src/tools/clippy/tests/ui/char_indices_as_byte_indices.stderr b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.stderr new file mode 100644 index 0000000000000..e2b4c1db78cf4 --- /dev/null +++ b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.stderr @@ -0,0 +1,130 @@ +error: indexing into a string with a character position where a byte index is expected + --> tests/ui/char_indices_as_byte_indices.rs:13:24 + | +LL | let _ = prim[..idx]; + | ^^^ + | + = note: a character can take up more than one byte, so they are not interchangeable +note: position comes from the enumerate iterator + --> tests/ui/char_indices_as_byte_indices.rs:12:10 + | +LL | for (idx, _) in prim.chars().enumerate() { + | ^^^ ^^^^^^^^^^^ + = note: `-D clippy::char-indices-as-byte-indices` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::char_indices_as_byte_indices)]` +help: consider using `.char_indices()` instead + | +LL - for (idx, _) in prim.chars().enumerate() { +LL + for (idx, _) in prim.char_indices() { + | + +error: passing a character position to a method that expects a byte index + --> tests/ui/char_indices_as_byte_indices.rs:15:23 + | +LL | prim.split_at(idx); + | ^^^ + | + = note: a character can take up more than one byte, so they are not interchangeable +note: position comes from the enumerate iterator + --> tests/ui/char_indices_as_byte_indices.rs:12:10 + | +LL | for (idx, _) in prim.chars().enumerate() { + | ^^^ ^^^^^^^^^^^ +help: consider using `.char_indices()` instead + | +LL - for (idx, _) in prim.chars().enumerate() { +LL + for (idx, _) in prim.char_indices() { + | + +error: passing a character position to a method that expects a byte index + --> tests/ui/char_indices_as_byte_indices.rs:19:49 + | +LL | let _ = prim[..prim.floor_char_boundary(idx)]; + | ^^^ + | + = note: a character can take up more than one byte, so they are not interchangeable +note: position comes from the enumerate iterator + --> tests/ui/char_indices_as_byte_indices.rs:12:10 + | +LL | for (idx, _) in prim.chars().enumerate() { + | ^^^ ^^^^^^^^^^^ +help: consider using `.char_indices()` instead + | +LL - for (idx, _) in prim.chars().enumerate() { +LL + for (idx, _) in prim.char_indices() { + | + +error: indexing into a string with a character position where a byte index is expected + --> tests/ui/char_indices_as_byte_indices.rs:29:24 + | +LL | let _ = prim[..c.0]; + | ^^^ + | + = note: a character can take up more than one byte, so they are not interchangeable +note: position comes from the enumerate iterator + --> tests/ui/char_indices_as_byte_indices.rs:28:9 + | +LL | for c in prim.chars().enumerate() { + | ^ ^^^^^^^^^^^ +help: consider using `.char_indices()` instead + | +LL - for c in prim.chars().enumerate() { +LL + for c in prim.char_indices() { + | + +error: passing a character position to a method that expects a byte index + --> tests/ui/char_indices_as_byte_indices.rs:31:23 + | +LL | prim.split_at(c.0); + | ^^^ + | + = note: a character can take up more than one byte, so they are not interchangeable +note: position comes from the enumerate iterator + --> tests/ui/char_indices_as_byte_indices.rs:28:9 + | +LL | for c in prim.chars().enumerate() { + | ^ ^^^^^^^^^^^ +help: consider using `.char_indices()` instead + | +LL - for c in prim.chars().enumerate() { +LL + for c in prim.char_indices() { + | + +error: indexing into a string with a character position where a byte index is expected + --> tests/ui/char_indices_as_byte_indices.rs:36:26 + | +LL | let _ = string[..idx]; + | ^^^ + | + = note: a character can take up more than one byte, so they are not interchangeable +note: position comes from the enumerate iterator + --> tests/ui/char_indices_as_byte_indices.rs:35:10 + | +LL | for (idx, _) in string.chars().enumerate() { + | ^^^ ^^^^^^^^^^^ +help: consider using `.char_indices()` instead + | +LL - for (idx, _) in string.chars().enumerate() { +LL + for (idx, _) in string.char_indices() { + | + +error: passing a character position to a method that expects a byte index + --> tests/ui/char_indices_as_byte_indices.rs:38:25 + | +LL | string.split_at(idx); + | ^^^ + | + = note: a character can take up more than one byte, so they are not interchangeable +note: position comes from the enumerate iterator + --> tests/ui/char_indices_as_byte_indices.rs:35:10 + | +LL | for (idx, _) in string.chars().enumerate() { + | ^^^ ^^^^^^^^^^^ +help: consider using `.char_indices()` instead + | +LL - for (idx, _) in string.chars().enumerate() { +LL + for (idx, _) in string.char_indices() { + | + +error: aborting due to 7 previous errors + diff --git a/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr b/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr index c17eaef2326b3..ad3c420270c14 100644 --- a/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr +++ b/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr @@ -236,7 +236,7 @@ LL | if result.is_ok() { LL | result.as_mut().unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^ -error: creating a shared reference to mutable static is discouraged +error: creating a shared reference to mutable static --> tests/ui/checked_unwrap/simple_conditionals.rs:183:12 | LL | if X.is_some() { @@ -244,8 +244,7 @@ LL | if X.is_some() { | = note: for more information, see = note: shared references to mutable statics are dangerous; it's undefined behavior if the static is mutated or if a mutable reference is created for it while the shared reference lives - = note: `-D static-mut-refs` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(static_mut_refs)]` + = note: `#[deny(static_mut_refs)]` on by default error: aborting due to 26 previous errors diff --git a/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.fixed b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.fixed index eb01633a25fd5..85d0991bef05d 100644 --- a/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.fixed +++ b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.fixed @@ -74,3 +74,12 @@ impl ToOwned for Baz { Baz } } + +fn issue_8103() { + let foo1 = String::from("foo"); + let _ = foo1 == "foo"; + //~^ cmp_owned + let foo2 = "foo"; + let _ = foo1 == foo2; + //~^ cmp_owned +} diff --git a/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.rs b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.rs index 82409f27b129c..2393757d76f2b 100644 --- a/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.rs +++ b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.rs @@ -74,3 +74,12 @@ impl ToOwned for Baz { Baz } } + +fn issue_8103() { + let foo1 = String::from("foo"); + let _ = foo1 == "foo".to_owned(); + //~^ cmp_owned + let foo2 = "foo"; + let _ = foo1 == foo2.to_owned(); + //~^ cmp_owned +} diff --git a/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.stderr b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.stderr index ca2ab44847274..dd9ffa70897ab 100644 --- a/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.stderr +++ b/src/tools/clippy/tests/ui/cmp_owned/with_suggestion.stderr @@ -37,5 +37,17 @@ error: this creates an owned instance just for comparison LL | "abc".chars().filter(|c| c.to_owned() != 'X'); | ^^^^^^^^^^^^ help: try: `*c` -error: aborting due to 6 previous errors +error: this creates an owned instance just for comparison + --> tests/ui/cmp_owned/with_suggestion.rs:80:21 + | +LL | let _ = foo1 == "foo".to_owned(); + | ^^^^^^^^^^^^^^^^ help: try: `"foo"` + +error: this creates an owned instance just for comparison + --> tests/ui/cmp_owned/with_suggestion.rs:83:21 + | +LL | let _ = foo1 == foo2.to_owned(); + | ^^^^^^^^^^^^^^^ help: try: `foo2` + +error: aborting due to 8 previous errors diff --git a/src/tools/clippy/tests/ui/cognitive_complexity.rs b/src/tools/clippy/tests/ui/cognitive_complexity.rs index 2dbec955f63fe..8080c6775e0be 100644 --- a/src/tools/clippy/tests/ui/cognitive_complexity.rs +++ b/src/tools/clippy/tests/ui/cognitive_complexity.rs @@ -1,6 +1,11 @@ -#![allow(clippy::all)] #![warn(clippy::cognitive_complexity)] -#![allow(unused, unused_crate_dependencies)] +#![allow( + clippy::eq_op, + clippy::needless_borrows_for_generic_args, + clippy::needless_return, + clippy::nonminimal_bool, + clippy::uninlined_format_args +)] #[rustfmt::skip] fn main() { @@ -448,3 +453,22 @@ mod issue9300 { } } } + +#[clippy::cognitive_complexity = "1"] +mod issue14422 { + fn foo() { + //~^ cognitive_complexity + for _ in 0..10 { + println!("hello there"); + } + } + + fn bar() { + //~^ cognitive_complexity + for _ in 0..10 { + println!("hello there"); + } + return; + return; + } +} diff --git a/src/tools/clippy/tests/ui/cognitive_complexity.stderr b/src/tools/clippy/tests/ui/cognitive_complexity.stderr index 52607b87c60ef..67ef4e5655bd6 100644 --- a/src/tools/clippy/tests/ui/cognitive_complexity.stderr +++ b/src/tools/clippy/tests/ui/cognitive_complexity.stderr @@ -1,5 +1,5 @@ error: the function has a cognitive complexity of (28/25) - --> tests/ui/cognitive_complexity.rs:6:4 + --> tests/ui/cognitive_complexity.rs:11:4 | LL | fn main() { | ^^^^ @@ -9,7 +9,7 @@ LL | fn main() { = help: to override `-D warnings` add `#[allow(clippy::cognitive_complexity)]` error: the function has a cognitive complexity of (7/1) - --> tests/ui/cognitive_complexity.rs:93:4 + --> tests/ui/cognitive_complexity.rs:98:4 | LL | fn kaboom() { | ^^^^^^ @@ -17,7 +17,7 @@ LL | fn kaboom() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:153:4 + --> tests/ui/cognitive_complexity.rs:158:4 | LL | fn baa() { | ^^^ @@ -25,7 +25,7 @@ LL | fn baa() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:156:13 + --> tests/ui/cognitive_complexity.rs:161:13 | LL | let x = || match 99 { | ^^ @@ -33,7 +33,7 @@ LL | let x = || match 99 { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:174:4 + --> tests/ui/cognitive_complexity.rs:179:4 | LL | fn bar() { | ^^^ @@ -41,7 +41,7 @@ LL | fn bar() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:187:4 + --> tests/ui/cognitive_complexity.rs:192:4 | LL | fn dont_warn_on_tests() { | ^^^^^^^^^^^^^^^^^^ @@ -49,7 +49,7 @@ LL | fn dont_warn_on_tests() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:197:4 + --> tests/ui/cognitive_complexity.rs:202:4 | LL | fn barr() { | ^^^^ @@ -57,7 +57,7 @@ LL | fn barr() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (3/1) - --> tests/ui/cognitive_complexity.rs:209:4 + --> tests/ui/cognitive_complexity.rs:214:4 | LL | fn barr2() { | ^^^^^ @@ -65,7 +65,7 @@ LL | fn barr2() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:227:4 + --> tests/ui/cognitive_complexity.rs:232:4 | LL | fn barrr() { | ^^^^^ @@ -73,7 +73,7 @@ LL | fn barrr() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (3/1) - --> tests/ui/cognitive_complexity.rs:239:4 + --> tests/ui/cognitive_complexity.rs:244:4 | LL | fn barrr2() { | ^^^^^^ @@ -81,7 +81,7 @@ LL | fn barrr2() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:257:4 + --> tests/ui/cognitive_complexity.rs:262:4 | LL | fn barrrr() { | ^^^^^^ @@ -89,7 +89,7 @@ LL | fn barrrr() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (3/1) - --> tests/ui/cognitive_complexity.rs:269:4 + --> tests/ui/cognitive_complexity.rs:274:4 | LL | fn barrrr2() { | ^^^^^^^ @@ -97,7 +97,7 @@ LL | fn barrrr2() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:287:4 + --> tests/ui/cognitive_complexity.rs:292:4 | LL | fn cake() { | ^^^^ @@ -105,7 +105,7 @@ LL | fn cake() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (4/1) - --> tests/ui/cognitive_complexity.rs:299:8 + --> tests/ui/cognitive_complexity.rs:304:8 | LL | pub fn read_file(input_path: &str) -> String { | ^^^^^^^^^ @@ -113,7 +113,7 @@ LL | pub fn read_file(input_path: &str) -> String { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:332:4 + --> tests/ui/cognitive_complexity.rs:337:4 | LL | fn void(void: Void) { | ^^^^ @@ -121,7 +121,7 @@ LL | fn void(void: Void) { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (8/1) - --> tests/ui/cognitive_complexity.rs:385:4 + --> tests/ui/cognitive_complexity.rs:390:4 | LL | fn early_ret() -> i32 { | ^^^^^^^^^ @@ -129,7 +129,7 @@ LL | fn early_ret() -> i32 { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:408:13 + --> tests/ui/cognitive_complexity.rs:413:13 | LL | let x = |a: i32, b: i32| -> i32 { | ^^^^^^^^^^^^^^^^ @@ -137,7 +137,7 @@ LL | let x = |a: i32, b: i32| -> i32 { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:423:8 + --> tests/ui/cognitive_complexity.rs:428:8 | LL | fn moo(&self) { | ^^^ @@ -145,7 +145,7 @@ LL | fn moo(&self) { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:434:14 + --> tests/ui/cognitive_complexity.rs:439:14 | LL | async fn a() { | ^ @@ -153,12 +153,28 @@ LL | async fn a() { = help: you could split it up into multiple smaller functions error: the function has a cognitive complexity of (2/1) - --> tests/ui/cognitive_complexity.rs:443:22 + --> tests/ui/cognitive_complexity.rs:448:22 | LL | pub async fn async_method() { | ^^^^^^^^^^^^ | = help: you could split it up into multiple smaller functions -error: aborting due to 20 previous errors +error: the function has a cognitive complexity of (2/1) + --> tests/ui/cognitive_complexity.rs:459:8 + | +LL | fn foo() { + | ^^^ + | + = help: you could split it up into multiple smaller functions + +error: the function has a cognitive complexity of (2/1) + --> tests/ui/cognitive_complexity.rs:466:8 + | +LL | fn bar() { + | ^^^ + | + = help: you could split it up into multiple smaller functions + +error: aborting due to 22 previous errors diff --git a/src/tools/clippy/tests/ui/collapsible_if.fixed b/src/tools/clippy/tests/ui/collapsible_if.fixed index 6e994018aef01..e1ceb04f9cb89 100644 --- a/src/tools/clippy/tests/ui/collapsible_if.fixed +++ b/src/tools/clippy/tests/ui/collapsible_if.fixed @@ -12,34 +12,40 @@ fn main() { let x = "hello"; let y = "world"; - if x == "hello" && y == "world" { - println!("Hello world!"); - } + if x == "hello" + && y == "world" { + println!("Hello world!"); + } //~^^^^^ collapsible_if - if (x == "hello" || x == "world") && (y == "world" || y == "hello") { - println!("Hello world!"); - } + if (x == "hello" || x == "world") + && (y == "world" || y == "hello") { + println!("Hello world!"); + } //~^^^^^ collapsible_if - if x == "hello" && x == "world" && (y == "world" || y == "hello") { - println!("Hello world!"); - } + if x == "hello" && x == "world" + && (y == "world" || y == "hello") { + println!("Hello world!"); + } //~^^^^^ collapsible_if - if (x == "hello" || x == "world") && y == "world" && y == "hello" { - println!("Hello world!"); - } + if (x == "hello" || x == "world") + && y == "world" && y == "hello" { + println!("Hello world!"); + } //~^^^^^ collapsible_if - if x == "hello" && x == "world" && y == "world" && y == "hello" { - println!("Hello world!"); - } + if x == "hello" && x == "world" + && y == "world" && y == "hello" { + println!("Hello world!"); + } //~^^^^^ collapsible_if - if 42 == 1337 && 'a' != 'A' { - println!("world!") - } + if 42 == 1337 + && 'a' != 'A' { + println!("world!") + } //~^^^^^ collapsible_if // Works because any if with an else statement cannot be collapsed. @@ -71,37 +77,17 @@ fn main() { assert!(true); // assert! is just an `if` } - - // The following tests check for the fix of https://github.com/rust-lang/rust-clippy/issues/798 - if x == "hello" {// Not collapsible - if y == "world" { - println!("Hello world!"); - } - } - - if x == "hello" { // Not collapsible - if y == "world" { - println!("Hello world!"); - } - } - - if x == "hello" { - // Not collapsible - if y == "world" { + if x == "hello" + && y == "world" { // Collapsible println!("Hello world!"); } - } - - if x == "hello" && y == "world" { // Collapsible - println!("Hello world!"); - } //~^^^^^ collapsible_if if x == "hello" { print!("Hello "); } else { // Not collapsible - if y == "world" { + if let Some(42) = Some(42) { println!("world!") } } @@ -110,21 +96,8 @@ fn main() { print!("Hello "); } else { // Not collapsible - if let Some(42) = Some(42) { - println!("world!") - } - } - - if x == "hello" { - /* Not collapsible */ - if y == "world" { - println!("Hello world!"); - } - } - - if x == "hello" { /* Not collapsible */ if y == "world" { - println!("Hello world!"); + println!("world!") } } @@ -150,11 +123,13 @@ fn main() { } // Fix #5962 - if matches!(true, true) && matches!(true, true) {} + if matches!(true, true) + && matches!(true, true) {} //~^^^ collapsible_if // Issue #9375 - if matches!(true, true) && truth() && matches!(true, true) {} + if matches!(true, true) && truth() + && matches!(true, true) {} //~^^^ collapsible_if if true { @@ -163,4 +138,27 @@ fn main() { println!("Hello world!"); } } + + if true + && true { + println!("No comment, linted"); + } + //~^^^^^ collapsible_if + + if true { + // Do not collapse because of this comment + if true { + println!("Hello world!"); + } + } +} + +#[rustfmt::skip] +fn layout_check() -> u32 { + if true + && true { + } + // This is a comment, do not collapse code to it + ; 3 + //~^^^^^ collapsible_if } diff --git a/src/tools/clippy/tests/ui/collapsible_if.rs b/src/tools/clippy/tests/ui/collapsible_if.rs index 5cf591a658c7a..0b996dca22e85 100644 --- a/src/tools/clippy/tests/ui/collapsible_if.rs +++ b/src/tools/clippy/tests/ui/collapsible_if.rs @@ -83,27 +83,6 @@ fn main() { assert!(true); // assert! is just an `if` } - - // The following tests check for the fix of https://github.com/rust-lang/rust-clippy/issues/798 - if x == "hello" {// Not collapsible - if y == "world" { - println!("Hello world!"); - } - } - - if x == "hello" { // Not collapsible - if y == "world" { - println!("Hello world!"); - } - } - - if x == "hello" { - // Not collapsible - if y == "world" { - println!("Hello world!"); - } - } - if x == "hello" { if y == "world" { // Collapsible println!("Hello world!"); @@ -115,7 +94,7 @@ fn main() { print!("Hello "); } else { // Not collapsible - if y == "world" { + if let Some(42) = Some(42) { println!("world!") } } @@ -124,21 +103,8 @@ fn main() { print!("Hello "); } else { // Not collapsible - if let Some(42) = Some(42) { - println!("world!") - } - } - - if x == "hello" { - /* Not collapsible */ if y == "world" { - println!("Hello world!"); - } - } - - if x == "hello" { /* Not collapsible */ - if y == "world" { - println!("Hello world!"); + println!("world!") } } @@ -181,4 +147,28 @@ fn main() { println!("Hello world!"); } } + + if true { + if true { + println!("No comment, linted"); + } + } + //~^^^^^ collapsible_if + + if true { + // Do not collapse because of this comment + if true { + println!("Hello world!"); + } + } +} + +#[rustfmt::skip] +fn layout_check() -> u32 { + if true { + if true { + } + // This is a comment, do not collapse code to it + }; 3 + //~^^^^^ collapsible_if } diff --git a/src/tools/clippy/tests/ui/collapsible_if.stderr b/src/tools/clippy/tests/ui/collapsible_if.stderr index 3cc3fe5534f25..532811462393b 100644 --- a/src/tools/clippy/tests/ui/collapsible_if.stderr +++ b/src/tools/clippy/tests/ui/collapsible_if.stderr @@ -12,9 +12,10 @@ LL | | } = help: to override `-D warnings` add `#[allow(clippy::collapsible_if)]` help: collapse nested if block | -LL ~ if x == "hello" && y == "world" { -LL + println!("Hello world!"); -LL + } +LL ~ if x == "hello" +LL ~ && y == "world" { +LL | println!("Hello world!"); +LL ~ } | error: this `if` statement can be collapsed @@ -29,9 +30,10 @@ LL | | } | help: collapse nested if block | -LL ~ if (x == "hello" || x == "world") && (y == "world" || y == "hello") { -LL + println!("Hello world!"); -LL + } +LL ~ if (x == "hello" || x == "world") { +LL ~ && (y == "world" || y == "hello") { +LL | println!("Hello world!"); +LL ~ } | error: this `if` statement can be collapsed @@ -46,9 +48,10 @@ LL | | } | help: collapse nested if block | -LL ~ if x == "hello" && x == "world" && (y == "world" || y == "hello") { -LL + println!("Hello world!"); -LL + } +LL ~ if x == "hello" && x == "world" +LL ~ && (y == "world" || y == "hello") { +LL | println!("Hello world!"); +LL ~ } | error: this `if` statement can be collapsed @@ -63,9 +66,10 @@ LL | | } | help: collapse nested if block | -LL ~ if (x == "hello" || x == "world") && y == "world" && y == "hello" { -LL + println!("Hello world!"); -LL + } +LL ~ if (x == "hello" || x == "world") { +LL ~ && y == "world" && y == "hello" { +LL | println!("Hello world!"); +LL ~ } | error: this `if` statement can be collapsed @@ -80,9 +84,10 @@ LL | | } | help: collapse nested if block | -LL ~ if x == "hello" && x == "world" && y == "world" && y == "hello" { -LL + println!("Hello world!"); -LL + } +LL ~ if x == "hello" && x == "world" +LL ~ && y == "world" && y == "hello" { +LL | println!("Hello world!"); +LL ~ } | error: this `if` statement can be collapsed @@ -97,13 +102,14 @@ LL | | } | help: collapse nested if block | -LL ~ if 42 == 1337 && 'a' != 'A' { -LL + println!("world!") -LL + } +LL ~ if 42 == 1337 +LL ~ && 'a' != 'A' { +LL | println!("world!") +LL ~ } | error: this `if` statement can be collapsed - --> tests/ui/collapsible_if.rs:107:5 + --> tests/ui/collapsible_if.rs:86:5 | LL | / if x == "hello" { LL | | if y == "world" { // Collapsible @@ -114,26 +120,75 @@ LL | | } | help: collapse nested if block | -LL ~ if x == "hello" && y == "world" { // Collapsible -LL + println!("Hello world!"); -LL + } +LL ~ if x == "hello" +LL ~ && y == "world" { // Collapsible +LL | println!("Hello world!"); +LL ~ } | error: this `if` statement can be collapsed - --> tests/ui/collapsible_if.rs:167:5 + --> tests/ui/collapsible_if.rs:133:5 | LL | / if matches!(true, true) { LL | | if matches!(true, true) {} LL | | } - | |_____^ help: collapse nested if block: `if matches!(true, true) && matches!(true, true) {}` + | |_____^ + | +help: collapse nested if block + | +LL ~ if matches!(true, true) +LL ~ && matches!(true, true) {} + | error: this `if` statement can be collapsed - --> tests/ui/collapsible_if.rs:173:5 + --> tests/ui/collapsible_if.rs:139:5 | LL | / if matches!(true, true) && truth() { LL | | if matches!(true, true) {} LL | | } - | |_____^ help: collapse nested if block: `if matches!(true, true) && truth() && matches!(true, true) {}` + | |_____^ + | +help: collapse nested if block + | +LL ~ if matches!(true, true) && truth() +LL ~ && matches!(true, true) {} + | + +error: this `if` statement can be collapsed + --> tests/ui/collapsible_if.rs:151:5 + | +LL | / if true { +LL | | if true { +LL | | println!("No comment, linted"); +LL | | } +LL | | } + | |_____^ + | +help: collapse nested if block + | +LL ~ if true +LL ~ && true { +LL | println!("No comment, linted"); +LL ~ } + | + +error: this `if` statement can be collapsed + --> tests/ui/collapsible_if.rs:168:5 + | +LL | / if true { +LL | | if true { +... | +LL | | }; 3 + | |_____^ + | +help: collapse nested if block + | +LL ~ if true +LL ~ && true { +LL | } +LL | // This is a comment, do not collapse code to it +LL ~ ; 3 + | -error: aborting due to 9 previous errors +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/collapsible_if_let_chains.fixed b/src/tools/clippy/tests/ui/collapsible_if_let_chains.fixed new file mode 100644 index 0000000000000..3dd9498a4c9f9 --- /dev/null +++ b/src/tools/clippy/tests/ui/collapsible_if_let_chains.fixed @@ -0,0 +1,29 @@ +#![feature(let_chains)] +#![warn(clippy::collapsible_if)] + +fn main() { + if let Some(a) = Some(3) { + // with comment, so do not lint + if let Some(b) = Some(4) { + let _ = a + b; + } + } + + if let Some(a) = Some(3) + && let Some(b) = Some(4) { + let _ = a + b; + } + //~^^^^^ collapsible_if + + if let Some(a) = Some(3) + && a + 1 == 4 { + let _ = a; + } + //~^^^^^ collapsible_if + + if Some(3) == Some(4).map(|x| x - 1) + && let Some(b) = Some(4) { + let _ = b; + } + //~^^^^^ collapsible_if +} diff --git a/src/tools/clippy/tests/ui/collapsible_if_let_chains.rs b/src/tools/clippy/tests/ui/collapsible_if_let_chains.rs new file mode 100644 index 0000000000000..064b9a0be4847 --- /dev/null +++ b/src/tools/clippy/tests/ui/collapsible_if_let_chains.rs @@ -0,0 +1,32 @@ +#![feature(let_chains)] +#![warn(clippy::collapsible_if)] + +fn main() { + if let Some(a) = Some(3) { + // with comment, so do not lint + if let Some(b) = Some(4) { + let _ = a + b; + } + } + + if let Some(a) = Some(3) { + if let Some(b) = Some(4) { + let _ = a + b; + } + } + //~^^^^^ collapsible_if + + if let Some(a) = Some(3) { + if a + 1 == 4 { + let _ = a; + } + } + //~^^^^^ collapsible_if + + if Some(3) == Some(4).map(|x| x - 1) { + if let Some(b) = Some(4) { + let _ = b; + } + } + //~^^^^^ collapsible_if +} diff --git a/src/tools/clippy/tests/ui/collapsible_if_let_chains.stderr b/src/tools/clippy/tests/ui/collapsible_if_let_chains.stderr new file mode 100644 index 0000000000000..64a88114c47a3 --- /dev/null +++ b/src/tools/clippy/tests/ui/collapsible_if_let_chains.stderr @@ -0,0 +1,58 @@ +error: this `if` statement can be collapsed + --> tests/ui/collapsible_if_let_chains.rs:12:5 + | +LL | / if let Some(a) = Some(3) { +LL | | if let Some(b) = Some(4) { +LL | | let _ = a + b; +LL | | } +LL | | } + | |_____^ + | + = note: `-D clippy::collapsible-if` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::collapsible_if)]` +help: collapse nested if block + | +LL ~ if let Some(a) = Some(3) +LL ~ && let Some(b) = Some(4) { +LL | let _ = a + b; +LL ~ } + | + +error: this `if` statement can be collapsed + --> tests/ui/collapsible_if_let_chains.rs:19:5 + | +LL | / if let Some(a) = Some(3) { +LL | | if a + 1 == 4 { +LL | | let _ = a; +LL | | } +LL | | } + | |_____^ + | +help: collapse nested if block + | +LL ~ if let Some(a) = Some(3) +LL ~ && a + 1 == 4 { +LL | let _ = a; +LL ~ } + | + +error: this `if` statement can be collapsed + --> tests/ui/collapsible_if_let_chains.rs:26:5 + | +LL | / if Some(3) == Some(4).map(|x| x - 1) { +LL | | if let Some(b) = Some(4) { +LL | | let _ = b; +LL | | } +LL | | } + | |_____^ + | +help: collapse nested if block + | +LL ~ if Some(3) == Some(4).map(|x| x - 1) +LL ~ && let Some(b) = Some(4) { +LL | let _ = b; +LL ~ } + | + +error: aborting due to 3 previous errors + diff --git a/src/tools/clippy/tests/ui/collapsible_match.rs b/src/tools/clippy/tests/ui/collapsible_match.rs index 796cabd4b669a..55ef55844957a 100644 --- a/src/tools/clippy/tests/ui/collapsible_match.rs +++ b/src/tools/clippy/tests/ui/collapsible_match.rs @@ -303,6 +303,18 @@ pub fn test_2(x: Issue9647) { } } +// https://github.com/rust-lang/rust-clippy/issues/14281 +fn lint_emitted_at_right_node(opt: Option>) { + let n = match opt { + #[expect(clippy::collapsible_match)] + Some(n) => match n { + Ok(n) => n, + _ => return, + }, + None => return, + }; +} + fn make() -> T { unimplemented!() } diff --git a/src/tools/clippy/tests/ui/crashes/enum-glob-import-crate.rs b/src/tools/clippy/tests/ui/crashes/enum-glob-import-crate.rs index bbcd599f6d0b8..3352e822ef848 100644 --- a/src/tools/clippy/tests/ui/crashes/enum-glob-import-crate.rs +++ b/src/tools/clippy/tests/ui/crashes/enum-glob-import-crate.rs @@ -1,8 +1,5 @@ //@ check-pass -#![deny(clippy::all)] -#![allow(unused_imports)] - use std::*; fn main() {} diff --git a/src/tools/clippy/tests/ui/crashes/ice-11230.fixed b/src/tools/clippy/tests/ui/crashes/ice-11230.fixed index 181e1ebbe5a3a..c49a419f0d4ba 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-11230.fixed +++ b/src/tools/clippy/tests/ui/crashes/ice-11230.fixed @@ -12,7 +12,7 @@ fn main() { // needless_collect trait Helper<'a>: Iterator {} +// Should not be linted because we have no idea whether the iterator has side effects fn x(w: &mut dyn for<'a> Helper<'a>) { - w.next().is_none(); - //~^ needless_collect + w.collect::>().is_empty(); } diff --git a/src/tools/clippy/tests/ui/crashes/ice-11230.rs b/src/tools/clippy/tests/ui/crashes/ice-11230.rs index fb05dc781bc0d..f66b7e961c889 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-11230.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-11230.rs @@ -12,7 +12,7 @@ fn main() { // needless_collect trait Helper<'a>: Iterator {} +// Should not be linted because we have no idea whether the iterator has side effects fn x(w: &mut dyn for<'a> Helper<'a>) { w.collect::>().is_empty(); - //~^ needless_collect } diff --git a/src/tools/clippy/tests/ui/crashes/ice-11230.stderr b/src/tools/clippy/tests/ui/crashes/ice-11230.stderr index b4a3f67081aec..91d59121ac4ed 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-11230.stderr +++ b/src/tools/clippy/tests/ui/crashes/ice-11230.stderr @@ -7,14 +7,5 @@ LL | for v in A.iter() {} = note: `-D clippy::explicit-iter-loop` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::explicit_iter_loop)]` -error: avoid using `collect()` when not needed - --> tests/ui/crashes/ice-11230.rs:16:7 - | -LL | w.collect::>().is_empty(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with: `next().is_none()` - | - = note: `-D clippy::needless-collect` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::needless_collect)]` - -error: aborting due to 2 previous errors +error: aborting due to 1 previous error diff --git a/src/tools/clippy/tests/ui/crashes/ice-13544-original.rs b/src/tools/clippy/tests/ui/crashes/ice-13544-original.rs new file mode 100644 index 0000000000000..1709eaeb365e8 --- /dev/null +++ b/src/tools/clippy/tests/ui/crashes/ice-13544-original.rs @@ -0,0 +1,45 @@ +//@ check-pass +#![warn(clippy::significant_drop_tightening)] + +use std::mem::ManuallyDrop; +use std::ops::{Deref, DerefMut}; + +trait Scopable: Sized { + type SubType: Scopable; +} + +struct Subtree(ManuallyDrop>>); + +impl Drop for Subtree { + fn drop(&mut self) { + // SAFETY: The field cannot be used after we drop + unsafe { ManuallyDrop::drop(&mut self.0) } + } +} + +impl Deref for Subtree { + type Target = Tree; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for Subtree { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +enum Tree { + Group(Vec>), + Subtree(Subtree), + Leaf(T), +} + +impl Tree { + fn foo(self) -> Self { + self + } +} + +fn main() {} diff --git a/src/tools/clippy/tests/ui/crashes/ice-13544-reduced.rs b/src/tools/clippy/tests/ui/crashes/ice-13544-reduced.rs new file mode 100644 index 0000000000000..9266e71f5d0e0 --- /dev/null +++ b/src/tools/clippy/tests/ui/crashes/ice-13544-reduced.rs @@ -0,0 +1,16 @@ +//@ check-pass +#![warn(clippy::significant_drop_tightening)] +#![allow(unused, clippy::no_effect)] + +use std::marker::PhantomData; + +trait Trait { + type Assoc: Trait; +} +struct S(*const S, PhantomData); + +fn f(x: &mut S) { + &mut x.0; +} + +fn main() {} diff --git a/src/tools/clippy/tests/ui/crashes/ice-1588.rs b/src/tools/clippy/tests/ui/crashes/ice-1588.rs index 3ccd33052cd6c..422c29b66cf68 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-1588.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-1588.rs @@ -1,6 +1,6 @@ //@ check-pass -#![allow(clippy::all)] +#![expect(clippy::no_effect)] // Test for https://github.com/rust-lang/rust-clippy/issues/1588 diff --git a/src/tools/clippy/tests/ui/crashes/ice-1782.rs b/src/tools/clippy/tests/ui/crashes/ice-1782.rs index 4a1886c08af6a..776b0a93bf7c7 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-1782.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-1782.rs @@ -1,6 +1,6 @@ //@ check-pass -#![allow(dead_code, unused_variables, invalid_null_arguments)] +#![allow(dead_code, unused_variables, invalid_null_arguments, unnecessary_transmutes)] #![allow(clippy::unnecessary_cast, clippy::missing_transmute_annotations)] /// Should not trigger an ICE in `SpanlessEq` / `consts::constant` diff --git a/src/tools/clippy/tests/ui/crashes/ice-1969.rs b/src/tools/clippy/tests/ui/crashes/ice-1969.rs index 34ff725d71176..813972a046f75 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-1969.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-1969.rs @@ -1,7 +1,5 @@ //@ check-pass -#![allow(clippy::all)] - // Test for https://github.com/rust-lang/rust-clippy/issues/1969 fn main() {} diff --git a/src/tools/clippy/tests/ui/crashes/ice-3462.rs b/src/tools/clippy/tests/ui/crashes/ice-3462.rs index 4ce484917ae2f..e06eccdf142ca 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-3462.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-3462.rs @@ -1,8 +1,6 @@ //@ check-pass -#![warn(clippy::all)] -#![allow(clippy::disallowed_names, clippy::equatable_if_let, clippy::needless_if)] -#![allow(unused)] +#![expect(clippy::disallowed_names)] // Test for https://github.com/rust-lang/rust-clippy/issues/3462 diff --git a/src/tools/clippy/tests/ui/crashes/ice-700.rs b/src/tools/clippy/tests/ui/crashes/ice-700.rs index aa3bf493c201c..ca82f638b0bba 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-700.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-700.rs @@ -1,7 +1,5 @@ //@ check-pass -#![deny(clippy::all)] - // Test for https://github.com/rust-lang/rust-clippy/issues/700 fn core() {} diff --git a/src/tools/clippy/tests/ui/crashes/ice-7012.rs b/src/tools/clippy/tests/ui/crashes/ice-7012.rs index d76995adadf1a..48c1c5a98d40a 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-7012.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-7012.rs @@ -1,6 +1,6 @@ //@ check-pass -#![allow(clippy::all)] +#![expect(clippy::single_match)] enum _MyOption { None, diff --git a/src/tools/clippy/tests/ui/crashes/ice-7423.rs b/src/tools/clippy/tests/ui/crashes/ice-7423.rs index a03981842fcc4..fbf5d6520ed64 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-7423.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-7423.rs @@ -6,7 +6,7 @@ pub trait Trait { impl Trait for usize { fn f() { - extern "C" { + unsafe extern "C" { fn g() -> usize; } } diff --git a/src/tools/clippy/tests/ui/crashes/ice_exact_size.rs b/src/tools/clippy/tests/ui/crashes/ice_exact_size.rs index cb4685e78e22f..0aa55cd0fac61 100644 --- a/src/tools/clippy/tests/ui/crashes/ice_exact_size.rs +++ b/src/tools/clippy/tests/ui/crashes/ice_exact_size.rs @@ -1,10 +1,7 @@ //@ check-pass -#![deny(clippy::all)] - // Test for https://github.com/rust-lang/rust-clippy/issues/1336 -#[allow(dead_code)] struct Foo; impl Iterator for Foo { diff --git a/src/tools/clippy/tests/ui/crashes/needless_borrow_fp.rs b/src/tools/clippy/tests/ui/crashes/needless_borrow_fp.rs index 68e39531682a0..1b3b0290d909e 100644 --- a/src/tools/clippy/tests/ui/crashes/needless_borrow_fp.rs +++ b/src/tools/clippy/tests/ui/crashes/needless_borrow_fp.rs @@ -1,6 +1,5 @@ //@ check-pass -#[deny(clippy::all)] #[derive(Debug)] pub enum Error { Type(&'static str), diff --git a/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.fixed b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.fixed index e09a913ef06cd..9d977e9eddc84 100644 --- a/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.fixed +++ b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.fixed @@ -3,7 +3,6 @@ use core::panic::PanicInfo; -#[warn(clippy::all)] pub fn main() { let mut a = 42; let mut b = 1337; diff --git a/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.rs b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.rs index 66ca97690c175..0967efe2ed8dc 100644 --- a/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.rs +++ b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.rs @@ -3,7 +3,6 @@ use core::panic::PanicInfo; -#[warn(clippy::all)] pub fn main() { let mut a = 42; let mut b = 1337; diff --git a/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr index 3e37bd95ef340..e16b53b51a810 100644 --- a/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr +++ b/src/tools/clippy/tests/ui/crate_level_checks/no_std_swap.stderr @@ -1,5 +1,5 @@ error: this looks like you are trying to swap `a` and `b` - --> tests/ui/crate_level_checks/no_std_swap.rs:11:5 + --> tests/ui/crate_level_checks/no_std_swap.rs:10:5 | LL | / a = b; ... | @@ -7,8 +7,7 @@ LL | | b = a; | |_________^ help: try: `core::mem::swap(&mut a, &mut b)` | = note: or maybe you should use `core::mem::replace`? - = note: `-D clippy::almost-swapped` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::almost_swapped)]` + = note: `#[deny(clippy::almost_swapped)]` on by default error: aborting due to 1 previous error diff --git a/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.fixed b/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.fixed index fd1a0d8934b3c..3b9dee81898ab 100644 --- a/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.fixed +++ b/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.fixed @@ -1,5 +1,10 @@ +#![allow( + clippy::no_effect, + clippy::uninlined_format_args, + clippy::unit_arg, + clippy::unnecessary_operation +)] #![warn(clippy::dbg_macro)] -#![allow(clippy::unnecessary_operation, clippy::no_effect, clippy::unit_arg)] fn foo(n: u32) -> u32 { if let Some(n) = n.checked_sub(4) { n } else { n } diff --git a/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.rs b/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.rs index c96e2c7251c29..1dbbc6fe98456 100644 --- a/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.rs +++ b/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.rs @@ -1,5 +1,10 @@ +#![allow( + clippy::no_effect, + clippy::uninlined_format_args, + clippy::unit_arg, + clippy::unnecessary_operation +)] #![warn(clippy::dbg_macro)] -#![allow(clippy::unnecessary_operation, clippy::no_effect, clippy::unit_arg)] fn foo(n: u32) -> u32 { if let Some(n) = dbg!(n.checked_sub(4)) { n } else { n } diff --git a/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.stderr b/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.stderr index cd6dce584a2fa..f1412023cc897 100644 --- a/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.stderr +++ b/src/tools/clippy/tests/ui/dbg_macro/dbg_macro.stderr @@ -1,5 +1,5 @@ error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:5:22 + --> tests/ui/dbg_macro/dbg_macro.rs:10:22 | LL | if let Some(n) = dbg!(n.checked_sub(4)) { n } else { n } | ^^^^^^^^^^^^^^^^^^^^^^ @@ -13,7 +13,7 @@ LL + if let Some(n) = n.checked_sub(4) { n } else { n } | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:11:8 + --> tests/ui/dbg_macro/dbg_macro.rs:16:8 | LL | if dbg!(n <= 1) { | ^^^^^^^^^^^^ @@ -25,7 +25,7 @@ LL + if n <= 1 { | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:14:9 + --> tests/ui/dbg_macro/dbg_macro.rs:19:9 | LL | dbg!(1) | ^^^^^^^ @@ -37,7 +37,7 @@ LL + 1 | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:17:9 + --> tests/ui/dbg_macro/dbg_macro.rs:22:9 | LL | dbg!(n * factorial(n - 1)) | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -49,7 +49,7 @@ LL + n * factorial(n - 1) | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:23:5 + --> tests/ui/dbg_macro/dbg_macro.rs:28:5 | LL | dbg!(42); | ^^^^^^^^ @@ -61,7 +61,7 @@ LL + 42; | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:26:14 + --> tests/ui/dbg_macro/dbg_macro.rs:31:14 | LL | foo(3) + dbg!(factorial(4)); | ^^^^^^^^^^^^^^^^^^ @@ -73,7 +73,7 @@ LL + foo(3) + factorial(4); | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:29:5 + --> tests/ui/dbg_macro/dbg_macro.rs:34:5 | LL | dbg!(1, 2, 3, 4, 5); | ^^^^^^^^^^^^^^^^^^^ @@ -85,7 +85,7 @@ LL + (1, 2, 3, 4, 5); | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:51:5 + --> tests/ui/dbg_macro/dbg_macro.rs:56:5 | LL | dbg!(); | ^^^^^^ @@ -96,7 +96,7 @@ LL - dbg!(); | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:55:13 + --> tests/ui/dbg_macro/dbg_macro.rs:60:13 | LL | let _ = dbg!(); | ^^^^^^ @@ -108,7 +108,7 @@ LL + let _ = (); | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:58:9 + --> tests/ui/dbg_macro/dbg_macro.rs:63:9 | LL | bar(dbg!()); | ^^^^^^ @@ -120,7 +120,7 @@ LL + bar(()); | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:61:10 + --> tests/ui/dbg_macro/dbg_macro.rs:66:10 | LL | foo!(dbg!()); | ^^^^^^ @@ -132,7 +132,7 @@ LL + foo!(()); | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:64:16 + --> tests/ui/dbg_macro/dbg_macro.rs:69:16 | LL | foo2!(foo!(dbg!())); | ^^^^^^ @@ -144,7 +144,7 @@ LL + foo2!(foo!(())); | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:46:13 + --> tests/ui/dbg_macro/dbg_macro.rs:51:13 | LL | dbg!(); | ^^^^^^ @@ -159,7 +159,7 @@ LL - dbg!(); | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:87:9 + --> tests/ui/dbg_macro/dbg_macro.rs:92:9 | LL | dbg!(2); | ^^^^^^^ @@ -171,7 +171,7 @@ LL + 2; | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:94:5 + --> tests/ui/dbg_macro/dbg_macro.rs:99:5 | LL | dbg!(1); | ^^^^^^^ @@ -183,7 +183,7 @@ LL + 1; | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:100:5 + --> tests/ui/dbg_macro/dbg_macro.rs:105:5 | LL | dbg!(1); | ^^^^^^^ @@ -195,7 +195,7 @@ LL + 1; | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:107:9 + --> tests/ui/dbg_macro/dbg_macro.rs:112:9 | LL | dbg!(1); | ^^^^^^^ @@ -207,7 +207,7 @@ LL + 1; | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:114:31 + --> tests/ui/dbg_macro/dbg_macro.rs:119:31 | LL | println!("dbg: {:?}", dbg!(s)); | ^^^^^^^ @@ -219,7 +219,7 @@ LL + println!("dbg: {:?}", s); | error: the `dbg!` macro is intended as a debugging tool - --> tests/ui/dbg_macro/dbg_macro.rs:117:22 + --> tests/ui/dbg_macro/dbg_macro.rs:122:22 | LL | print!("{}", dbg!(s)); | ^^^^^^^ diff --git a/src/tools/clippy/tests/ui/def_id_nocore.rs b/src/tools/clippy/tests/ui/def_id_nocore.rs index 03f5ca31f5f07..40f40f7ea0960 100644 --- a/src/tools/clippy/tests/ui/def_id_nocore.rs +++ b/src/tools/clippy/tests/ui/def_id_nocore.rs @@ -5,7 +5,7 @@ #![allow(clippy::missing_safety_doc)] #[link(name = "c")] -extern "C" {} +unsafe extern "C" {} #[lang = "sized"] pub trait Sized {} diff --git a/src/tools/clippy/tests/ui/default_constructed_unit_structs.fixed b/src/tools/clippy/tests/ui/default_constructed_unit_structs.fixed index fa4d55177823e..1ca9be0ceddc7 100644 --- a/src/tools/clippy/tests/ui/default_constructed_unit_structs.fixed +++ b/src/tools/clippy/tests/ui/default_constructed_unit_structs.fixed @@ -161,3 +161,17 @@ fn main() { let _ = ::default(); } + +fn issue12654() { + #[derive(Default)] + struct G; + + fn f(_g: G) {} + + f(<_>::default()); + f(G); + //~^ default_constructed_unit_structs + + // No lint because `as Default` hides the singleton + f(::default()); +} diff --git a/src/tools/clippy/tests/ui/default_constructed_unit_structs.rs b/src/tools/clippy/tests/ui/default_constructed_unit_structs.rs index 291cd89da0b79..99eb8913fc3ce 100644 --- a/src/tools/clippy/tests/ui/default_constructed_unit_structs.rs +++ b/src/tools/clippy/tests/ui/default_constructed_unit_structs.rs @@ -161,3 +161,17 @@ fn main() { let _ = ::default(); } + +fn issue12654() { + #[derive(Default)] + struct G; + + fn f(_g: G) {} + + f(<_>::default()); + f(::default()); + //~^ default_constructed_unit_structs + + // No lint because `as Default` hides the singleton + f(::default()); +} diff --git a/src/tools/clippy/tests/ui/default_constructed_unit_structs.stderr b/src/tools/clippy/tests/ui/default_constructed_unit_structs.stderr index 6d4e1bdc2cc81..97fad792e4f7a 100644 --- a/src/tools/clippy/tests/ui/default_constructed_unit_structs.stderr +++ b/src/tools/clippy/tests/ui/default_constructed_unit_structs.stderr @@ -1,41 +1,65 @@ error: use of `default` to create a unit struct - --> tests/ui/default_constructed_unit_structs.rs:11:13 + --> tests/ui/default_constructed_unit_structs.rs:11:9 | LL | Self::default() - | ^^^^^^^^^^^ help: remove this call to `default` + | ^^^^----------- + | | + | help: remove this call to `default` | = note: `-D clippy::default-constructed-unit-structs` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::default_constructed_unit_structs)]` error: use of `default` to create a unit struct - --> tests/ui/default_constructed_unit_structs.rs:54:31 + --> tests/ui/default_constructed_unit_structs.rs:54:20 | LL | inner: PhantomData::default(), - | ^^^^^^^^^^^ help: remove this call to `default` + | ^^^^^^^^^^^----------- + | | + | help: remove this call to `default` error: use of `default` to create a unit struct - --> tests/ui/default_constructed_unit_structs.rs:128:33 + --> tests/ui/default_constructed_unit_structs.rs:128:13 | LL | let _ = PhantomData::::default(); - | ^^^^^^^^^^^ help: remove this call to `default` + | ^^^^^^^^^^^^^^^^^^^^----------- + | | + | help: remove this call to `default` error: use of `default` to create a unit struct - --> tests/ui/default_constructed_unit_structs.rs:130:42 + --> tests/ui/default_constructed_unit_structs.rs:130:31 | LL | let _: PhantomData = PhantomData::default(); - | ^^^^^^^^^^^ help: remove this call to `default` + | ^^^^^^^^^^^----------- + | | + | help: remove this call to `default` error: use of `default` to create a unit struct - --> tests/ui/default_constructed_unit_structs.rs:132:55 + --> tests/ui/default_constructed_unit_structs.rs:132:31 | LL | let _: PhantomData = std::marker::PhantomData::default(); - | ^^^^^^^^^^^ help: remove this call to `default` + | ^^^^^^^^^^^^^^^^^^^^^^^^----------- + | | + | help: remove this call to `default` error: use of `default` to create a unit struct - --> tests/ui/default_constructed_unit_structs.rs:134:23 + --> tests/ui/default_constructed_unit_structs.rs:134:13 | LL | let _ = UnitStruct::default(); - | ^^^^^^^^^^^ help: remove this call to `default` + | ^^^^^^^^^^----------- + | | + | help: remove this call to `default` -error: aborting due to 6 previous errors +error: use of `default` to create a unit struct + --> tests/ui/default_constructed_unit_structs.rs:172:7 + | +LL | f(::default()); + | ^^^^^^^^^^^^^^ + | +help: remove this call to `default` + | +LL - f(::default()); +LL + f(G); + | + +error: aborting due to 7 previous errors diff --git a/src/tools/clippy/tests/ui/deprecated.rs b/src/tools/clippy/tests/ui/deprecated.rs index 35646e1c23919..2787f6406fe39 100644 --- a/src/tools/clippy/tests/ui/deprecated.rs +++ b/src/tools/clippy/tests/ui/deprecated.rs @@ -16,5 +16,6 @@ #![warn(clippy::pub_enum_variant_names)] //~ ERROR: lint `clippy::pub_enum_variant_names` #![warn(clippy::wrong_pub_self_convention)] //~ ERROR: lint `clippy::wrong_pub_self_convention` #![warn(clippy::option_map_or_err_ok)] //~ ERROR: lint `clippy::option_map_or_err_ok` +#![warn(clippy::match_on_vec_items)] //~ ERROR: lint `clippy::match_on_vec_items` fn main() {} diff --git a/src/tools/clippy/tests/ui/deprecated.stderr b/src/tools/clippy/tests/ui/deprecated.stderr index d7be1e583b08b..604732405c370 100644 --- a/src/tools/clippy/tests/ui/deprecated.stderr +++ b/src/tools/clippy/tests/ui/deprecated.stderr @@ -85,5 +85,11 @@ error: lint `clippy::option_map_or_err_ok` has been removed: `clippy::manual_ok_ LL | #![warn(clippy::option_map_or_err_ok)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 14 previous errors +error: lint `clippy::match_on_vec_items` has been removed: `clippy::indexing_slicing` covers indexing and slicing on `Vec<_>` + --> tests/ui/deprecated.rs:19:9 + | +LL | #![warn(clippy::match_on_vec_items)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 15 previous errors diff --git a/src/tools/clippy/tests/ui/derive.rs b/src/tools/clippy/tests/ui/derive.rs index 707a9ff058576..e334203c7b2a7 100644 --- a/src/tools/clippy/tests/ui/derive.rs +++ b/src/tools/clippy/tests/ui/derive.rs @@ -6,6 +6,8 @@ dead_code )] #![warn(clippy::expl_impl_clone_on_copy)] +#![expect(incomplete_features)] // `unsafe_fields` is incomplete for the time being +#![feature(unsafe_fields)] // `clone()` cannot be derived automatically on unsafe fields #[derive(Copy)] @@ -113,4 +115,19 @@ impl Clone for Packed { } } +fn issue14558() { + pub struct Valid { + pub unsafe actual: (), + } + + unsafe impl Copy for Valid {} + + impl Clone for Valid { + #[inline] + fn clone(&self) -> Self { + *self + } + } +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/derive.stderr b/src/tools/clippy/tests/ui/derive.stderr index 20278d4f4e4a4..9004ced6849e5 100644 --- a/src/tools/clippy/tests/ui/derive.stderr +++ b/src/tools/clippy/tests/ui/derive.stderr @@ -1,5 +1,5 @@ error: you are implementing `Clone` explicitly on a `Copy` type - --> tests/ui/derive.rs:14:1 + --> tests/ui/derive.rs:16:1 | LL | / impl Clone for Qux { LL | | @@ -10,7 +10,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> tests/ui/derive.rs:14:1 + --> tests/ui/derive.rs:16:1 | LL | / impl Clone for Qux { LL | | @@ -23,7 +23,7 @@ LL | | } = help: to override `-D warnings` add `#[allow(clippy::expl_impl_clone_on_copy)]` error: you are implementing `Clone` explicitly on a `Copy` type - --> tests/ui/derive.rs:40:1 + --> tests/ui/derive.rs:42:1 | LL | / impl<'a> Clone for Lt<'a> { LL | | @@ -34,7 +34,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> tests/ui/derive.rs:40:1 + --> tests/ui/derive.rs:42:1 | LL | / impl<'a> Clone for Lt<'a> { LL | | @@ -45,7 +45,7 @@ LL | | } | |_^ error: you are implementing `Clone` explicitly on a `Copy` type - --> tests/ui/derive.rs:53:1 + --> tests/ui/derive.rs:55:1 | LL | / impl Clone for BigArray { LL | | @@ -56,7 +56,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> tests/ui/derive.rs:53:1 + --> tests/ui/derive.rs:55:1 | LL | / impl Clone for BigArray { LL | | @@ -67,7 +67,7 @@ LL | | } | |_^ error: you are implementing `Clone` explicitly on a `Copy` type - --> tests/ui/derive.rs:66:1 + --> tests/ui/derive.rs:68:1 | LL | / impl Clone for FnPtr { LL | | @@ -78,7 +78,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> tests/ui/derive.rs:66:1 + --> tests/ui/derive.rs:68:1 | LL | / impl Clone for FnPtr { LL | | @@ -89,7 +89,7 @@ LL | | } | |_^ error: you are implementing `Clone` explicitly on a `Copy` type - --> tests/ui/derive.rs:88:1 + --> tests/ui/derive.rs:90:1 | LL | / impl Clone for Generic2 { LL | | @@ -100,7 +100,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> tests/ui/derive.rs:88:1 + --> tests/ui/derive.rs:90:1 | LL | / impl Clone for Generic2 { LL | | diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.fixed b/src/tools/clippy/tests/ui/doc/doc-fixable.fixed index 5f2b697f88b02..8cf20d8b1a11c 100644 --- a/src/tools/clippy/tests/ui/doc/doc-fixable.fixed +++ b/src/tools/clippy/tests/ui/doc/doc-fixable.fixed @@ -1,4 +1,3 @@ - //! This file tests for the `DOC_MARKDOWN` lint. #![allow(dead_code, incomplete_features)] @@ -272,7 +271,7 @@ fn parenthesized_word() {} /// UXes fn plural_acronym_test() {} -extern "C" { +unsafe extern "C" { /// `foo()` //~^ doc_markdown fn in_extern(); diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.rs b/src/tools/clippy/tests/ui/doc/doc-fixable.rs index ed3925694c67e..5b6f2bd8330c5 100644 --- a/src/tools/clippy/tests/ui/doc/doc-fixable.rs +++ b/src/tools/clippy/tests/ui/doc/doc-fixable.rs @@ -1,4 +1,3 @@ - //! This file tests for the `DOC_MARKDOWN` lint. #![allow(dead_code, incomplete_features)] @@ -272,7 +271,7 @@ fn parenthesized_word() {} /// UXes fn plural_acronym_test() {} -extern "C" { +unsafe extern "C" { /// foo() //~^ doc_markdown fn in_extern(); diff --git a/src/tools/clippy/tests/ui/doc/doc-fixable.stderr b/src/tools/clippy/tests/ui/doc/doc-fixable.stderr index d67da75a230ca..98c26e6bec2eb 100644 --- a/src/tools/clippy/tests/ui/doc/doc-fixable.stderr +++ b/src/tools/clippy/tests/ui/doc/doc-fixable.stderr @@ -1,5 +1,5 @@ error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:9:9 + --> tests/ui/doc/doc-fixable.rs:8:9 | LL | /// The foo_bar function does _nothing_. See also foo::bar. (note the dot there) | ^^^^^^^ @@ -13,7 +13,7 @@ LL + /// The `foo_bar` function does _nothing_. See also foo::bar. (note the dot | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:9:51 + --> tests/ui/doc/doc-fixable.rs:8:51 | LL | /// The foo_bar function does _nothing_. See also foo::bar. (note the dot there) | ^^^^^^^^ @@ -25,7 +25,7 @@ LL + /// The foo_bar function does _nothing_. See also `foo::bar`. (note the dot | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:12:83 + --> tests/ui/doc/doc-fixable.rs:11:83 | LL | /// Markdown is _weird_. I mean _really weird_. This \_ is ok. So is `_`. But not Foo::some_fun | ^^^^^^^^^^^^^ @@ -37,7 +37,7 @@ LL + /// Markdown is _weird_. I mean _really weird_. This \_ is ok. So is `_`. B | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:15:13 + --> tests/ui/doc/doc-fixable.rs:14:13 | LL | /// Here be ::a::global:path, and _::another::global::path_. :: is not a path though. | ^^^^^^^^^^^^^^^^ @@ -49,7 +49,7 @@ LL + /// Here be `::a::global:path`, and _::another::global::path_. :: is not a | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:15:36 + --> tests/ui/doc/doc-fixable.rs:14:36 | LL | /// Here be ::a::global:path, and _::another::global::path_. :: is not a path though. | ^^^^^^^^^^^^^^^^^^^^^^^ @@ -61,7 +61,7 @@ LL + /// Here be ::a::global:path, and _`::another::global::path`_. :: is not a | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:18:25 + --> tests/ui/doc/doc-fixable.rs:17:25 | LL | /// Import an item from ::awesome::global::blob:: (Intended postfix) | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -73,7 +73,7 @@ LL + /// Import an item from `::awesome::global::blob::` (Intended postfix) | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:20:31 + --> tests/ui/doc/doc-fixable.rs:19:31 | LL | /// These are the options for ::Cat: (Intended trailing single colon, shouldn't be linted) | ^^^^^ @@ -85,7 +85,7 @@ LL + /// These are the options for `::Cat`: (Intended trailing single colon, sho | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:22:22 + --> tests/ui/doc/doc-fixable.rs:21:22 | LL | /// That's not code ~NotInCodeBlock~. | ^^^^^^^^^^^^^^ @@ -97,7 +97,7 @@ LL + /// That's not code ~`NotInCodeBlock`~. | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:24:5 + --> tests/ui/doc/doc-fixable.rs:23:5 | LL | /// be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -109,7 +109,7 @@ LL + /// `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:39:5 + --> tests/ui/doc/doc-fixable.rs:38:5 | LL | /// be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -121,7 +121,7 @@ LL + /// `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:47:5 + --> tests/ui/doc/doc-fixable.rs:46:5 | LL | /// be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -133,7 +133,7 @@ LL + /// `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:62:5 + --> tests/ui/doc/doc-fixable.rs:61:5 | LL | /// be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -145,7 +145,7 @@ LL + /// `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:91:5 + --> tests/ui/doc/doc-fixable.rs:90:5 | LL | /// be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -157,7 +157,7 @@ LL + /// `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:109:5 + --> tests/ui/doc/doc-fixable.rs:108:5 | LL | /// be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -169,7 +169,7 @@ LL + /// `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:118:8 + --> tests/ui/doc/doc-fixable.rs:117:8 | LL | /// ## CamelCaseThing | ^^^^^^^^^^^^^^ @@ -181,7 +181,7 @@ LL + /// ## `CamelCaseThing` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:122:7 + --> tests/ui/doc/doc-fixable.rs:121:7 | LL | /// # CamelCaseThing | ^^^^^^^^^^^^^^ @@ -193,7 +193,7 @@ LL + /// # `CamelCaseThing` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:125:22 + --> tests/ui/doc/doc-fixable.rs:124:22 | LL | /// Not a title #897 CamelCaseThing | ^^^^^^^^^^^^^^ @@ -205,7 +205,7 @@ LL + /// Not a title #897 `CamelCaseThing` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:127:5 + --> tests/ui/doc/doc-fixable.rs:126:5 | LL | /// be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -217,7 +217,7 @@ LL + /// `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:135:5 + --> tests/ui/doc/doc-fixable.rs:134:5 | LL | /// be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -229,7 +229,7 @@ LL + /// `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:149:5 + --> tests/ui/doc/doc-fixable.rs:148:5 | LL | /// be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -241,7 +241,7 @@ LL + /// `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:161:43 + --> tests/ui/doc/doc-fixable.rs:160:43 | LL | /** E.g., serialization of an empty list: FooBar | ^^^^^^ @@ -253,7 +253,7 @@ LL + /** E.g., serialization of an empty list: `FooBar` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:166:5 + --> tests/ui/doc/doc-fixable.rs:165:5 | LL | And BarQuz too. | ^^^^^^ @@ -265,7 +265,7 @@ LL + And `BarQuz` too. | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:167:1 + --> tests/ui/doc/doc-fixable.rs:166:1 | LL | be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -277,7 +277,7 @@ LL + `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:175:43 + --> tests/ui/doc/doc-fixable.rs:174:43 | LL | /** E.g., serialization of an empty list: FooBar | ^^^^^^ @@ -289,7 +289,7 @@ LL + /** E.g., serialization of an empty list: `FooBar` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:180:5 + --> tests/ui/doc/doc-fixable.rs:179:5 | LL | And BarQuz too. | ^^^^^^ @@ -301,7 +301,7 @@ LL + And `BarQuz` too. | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:181:1 + --> tests/ui/doc/doc-fixable.rs:180:1 | LL | be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -313,7 +313,7 @@ LL + `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:195:5 + --> tests/ui/doc/doc-fixable.rs:194:5 | LL | /// be_sure_we_got_to_the_end_of_it | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -325,7 +325,7 @@ LL + /// `be_sure_we_got_to_the_end_of_it` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:215:22 + --> tests/ui/doc/doc-fixable.rs:214:22 | LL | /// An iterator over mycrate::Collection's values. | ^^^^^^^^^^^^^^^^^^^ @@ -337,7 +337,7 @@ LL + /// An iterator over `mycrate::Collection`'s values. | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:240:34 + --> tests/ui/doc/doc-fixable.rs:239:34 | LL | /// Foo \[bar\] \[baz\] \[qux\]. DocMarkdownLint | ^^^^^^^^^^^^^^^ @@ -349,7 +349,7 @@ LL + /// Foo \[bar\] \[baz\] \[qux\]. `DocMarkdownLint` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:264:22 + --> tests/ui/doc/doc-fixable.rs:263:22 | LL | /// There is no try (do() or do_not()). | ^^^^ @@ -361,7 +361,7 @@ LL + /// There is no try (`do()` or do_not()). | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:264:30 + --> tests/ui/doc/doc-fixable.rs:263:30 | LL | /// There is no try (do() or do_not()). | ^^^^^^^^ @@ -373,7 +373,7 @@ LL + /// There is no try (do() or `do_not()`). | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:269:5 + --> tests/ui/doc/doc-fixable.rs:268:5 | LL | /// ABes | ^^^^ @@ -385,7 +385,7 @@ LL + /// `ABes` | error: item in documentation is missing backticks - --> tests/ui/doc/doc-fixable.rs:276:9 + --> tests/ui/doc/doc-fixable.rs:275:9 | LL | /// foo() | ^^^^^ @@ -397,7 +397,7 @@ LL + /// `foo()` | error: you should put bare URLs between `<`/`>` or make a proper Markdown link - --> tests/ui/doc/doc-fixable.rs:281:5 + --> tests/ui/doc/doc-fixable.rs:280:5 | LL | /// https://github.com/rust-lang/rust-clippy/pull/12836 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `` diff --git a/src/tools/clippy/tests/ui/doc_unsafe.rs b/src/tools/clippy/tests/ui/doc_unsafe.rs index 1bdf01e4e22e9..7146fd7941ab0 100644 --- a/src/tools/clippy/tests/ui/doc_unsafe.rs +++ b/src/tools/clippy/tests/ui/doc_unsafe.rs @@ -103,7 +103,7 @@ macro_rules! very_unsafe { /// /// Please keep the seat belt fastened pub unsafe fn drive() { - whee() + unsafe { whee() } } }; } diff --git a/src/tools/clippy/tests/ui/double_ended_iterator_last.fixed b/src/tools/clippy/tests/ui/double_ended_iterator_last.fixed index 17d0d71a88545..be31ee5fb4862 100644 --- a/src/tools/clippy/tests/ui/double_ended_iterator_last.fixed +++ b/src/tools/clippy/tests/ui/double_ended_iterator_last.fixed @@ -52,31 +52,51 @@ fn main() { let _ = CustomLast.last(); } +// Should not be linted because applying the lint would move the original iterator. This can only be +// linted if the iterator is used thereafter. fn issue_14139() { let mut index = [true, true, false, false, false, true].iter(); - let mut subindex = index.by_ref().take(3); - let _ = subindex.next_back(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let subindex = index.by_ref().take(3); + let _ = subindex.last(); + let _ = index.next(); let mut index = [true, true, false, false, false, true].iter(); let mut subindex = index.by_ref().take(3); - let _ = subindex.next_back(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let _ = subindex.last(); + let _ = index.next(); let mut index = [true, true, false, false, false, true].iter(); let mut subindex = index.by_ref().take(3); let subindex = &mut subindex; - let _ = subindex.next_back(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let _ = subindex.last(); + let _ = index.next(); let mut index = [true, true, false, false, false, true].iter(); let mut subindex = index.by_ref().take(3); let subindex = &mut subindex; - let _ = subindex.next_back(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let _ = subindex.last(); + let _ = index.next(); let mut index = [true, true, false, false, false, true].iter(); - let (mut subindex, _) = (index.by_ref().take(3), 42); - let _ = subindex.next_back(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let (subindex, _) = (index.by_ref().take(3), 42); + let _ = subindex.last(); + let _ = index.next(); } fn drop_order() { + struct DropDeIterator(std::vec::IntoIter); + impl Iterator for DropDeIterator { + type Item = S; + fn next(&mut self) -> Option { + self.0.next() + } + } + impl DoubleEndedIterator for DropDeIterator { + fn next_back(&mut self) -> Option { + self.0.next_back() + } + } + struct S(&'static str); impl std::ops::Drop for S { fn drop(&mut self) { @@ -85,8 +105,19 @@ fn drop_order() { } let v = vec![S("one"), S("two"), S("three")]; - let mut v = v.into_iter(); + let mut v = DropDeIterator(v.into_iter()); println!("Last element is {}", v.next_back().unwrap().0); //~^ ERROR: called `Iterator::last` on a `DoubleEndedIterator` println!("Done"); } + +fn issue_14444() { + let mut squares = vec![]; + let last_square = [1, 2, 3] + .into_iter() + .map(|x| { + squares.push(x * x); + Some(x * x) + }) + .last(); +} diff --git a/src/tools/clippy/tests/ui/double_ended_iterator_last.rs b/src/tools/clippy/tests/ui/double_ended_iterator_last.rs index 41bc669b1719f..30864e15bce7e 100644 --- a/src/tools/clippy/tests/ui/double_ended_iterator_last.rs +++ b/src/tools/clippy/tests/ui/double_ended_iterator_last.rs @@ -52,31 +52,51 @@ fn main() { let _ = CustomLast.last(); } +// Should not be linted because applying the lint would move the original iterator. This can only be +// linted if the iterator is used thereafter. fn issue_14139() { let mut index = [true, true, false, false, false, true].iter(); let subindex = index.by_ref().take(3); - let _ = subindex.last(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let _ = subindex.last(); + let _ = index.next(); let mut index = [true, true, false, false, false, true].iter(); let mut subindex = index.by_ref().take(3); - let _ = subindex.last(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let _ = subindex.last(); + let _ = index.next(); let mut index = [true, true, false, false, false, true].iter(); let mut subindex = index.by_ref().take(3); let subindex = &mut subindex; - let _ = subindex.last(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let _ = subindex.last(); + let _ = index.next(); let mut index = [true, true, false, false, false, true].iter(); let mut subindex = index.by_ref().take(3); let subindex = &mut subindex; - let _ = subindex.last(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let _ = subindex.last(); + let _ = index.next(); let mut index = [true, true, false, false, false, true].iter(); let (subindex, _) = (index.by_ref().take(3), 42); - let _ = subindex.last(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let _ = subindex.last(); + let _ = index.next(); } fn drop_order() { + struct DropDeIterator(std::vec::IntoIter); + impl Iterator for DropDeIterator { + type Item = S; + fn next(&mut self) -> Option { + self.0.next() + } + } + impl DoubleEndedIterator for DropDeIterator { + fn next_back(&mut self) -> Option { + self.0.next_back() + } + } + struct S(&'static str); impl std::ops::Drop for S { fn drop(&mut self) { @@ -85,8 +105,19 @@ fn drop_order() { } let v = vec![S("one"), S("two"), S("three")]; - let v = v.into_iter(); + let v = DropDeIterator(v.into_iter()); println!("Last element is {}", v.last().unwrap().0); //~^ ERROR: called `Iterator::last` on a `DoubleEndedIterator` println!("Done"); } + +fn issue_14444() { + let mut squares = vec![]; + let last_square = [1, 2, 3] + .into_iter() + .map(|x| { + squares.push(x * x); + Some(x * x) + }) + .last(); +} diff --git a/src/tools/clippy/tests/ui/double_ended_iterator_last.stderr b/src/tools/clippy/tests/ui/double_ended_iterator_last.stderr index 1702a24d7a055..72a6ead47a931 100644 --- a/src/tools/clippy/tests/ui/double_ended_iterator_last.stderr +++ b/src/tools/clippy/tests/ui/double_ended_iterator_last.stderr @@ -18,55 +18,7 @@ LL | let _ = DeIterator.last(); | help: try: `next_back()` error: called `Iterator::last` on a `DoubleEndedIterator`; this will needlessly iterate the entire iterator - --> tests/ui/double_ended_iterator_last.rs:58:13 - | -LL | let _ = subindex.last(); - | ^^^^^^^^^^^^^^^ - | -help: try - | -LL ~ let mut subindex = index.by_ref().take(3); -LL ~ let _ = subindex.next_back(); - | - -error: called `Iterator::last` on a `DoubleEndedIterator`; this will needlessly iterate the entire iterator - --> tests/ui/double_ended_iterator_last.rs:62:13 - | -LL | let _ = subindex.last(); - | ^^^^^^^^^------ - | | - | help: try: `next_back()` - -error: called `Iterator::last` on a `DoubleEndedIterator`; this will needlessly iterate the entire iterator - --> tests/ui/double_ended_iterator_last.rs:67:13 - | -LL | let _ = subindex.last(); - | ^^^^^^^^^------ - | | - | help: try: `next_back()` - -error: called `Iterator::last` on a `DoubleEndedIterator`; this will needlessly iterate the entire iterator - --> tests/ui/double_ended_iterator_last.rs:72:13 - | -LL | let _ = subindex.last(); - | ^^^^^^^^^------ - | | - | help: try: `next_back()` - -error: called `Iterator::last` on a `DoubleEndedIterator`; this will needlessly iterate the entire iterator - --> tests/ui/double_ended_iterator_last.rs:76:13 - | -LL | let _ = subindex.last(); - | ^^^^^^^^^^^^^^^ - | -help: try - | -LL ~ let (mut subindex, _) = (index.by_ref().take(3), 42); -LL ~ let _ = subindex.next_back(); - | - -error: called `Iterator::last` on a `DoubleEndedIterator`; this will needlessly iterate the entire iterator - --> tests/ui/double_ended_iterator_last.rs:89:36 + --> tests/ui/double_ended_iterator_last.rs:109:36 | LL | println!("Last element is {}", v.last().unwrap().0); | ^^^^^^^^ @@ -74,9 +26,9 @@ LL | println!("Last element is {}", v.last().unwrap().0); = note: this change will alter drop order which may be undesirable help: try | -LL ~ let mut v = v.into_iter(); +LL ~ let mut v = DropDeIterator(v.into_iter()); LL ~ println!("Last element is {}", v.next_back().unwrap().0); | -error: aborting due to 8 previous errors +error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui/double_ended_iterator_last_unfixable.rs b/src/tools/clippy/tests/ui/double_ended_iterator_last_unfixable.rs index 3f125c7f20c1e..e9218bbb40940 100644 --- a/src/tools/clippy/tests/ui/double_ended_iterator_last_unfixable.rs +++ b/src/tools/clippy/tests/ui/double_ended_iterator_last_unfixable.rs @@ -1,13 +1,29 @@ //@no-rustfix #![warn(clippy::double_ended_iterator_last)] +// Should not be linted because applying the lint would move the original iterator. This can only be +// linted if the iterator is used thereafter. fn main() { let mut index = [true, true, false, false, false, true].iter(); let subindex = (index.by_ref().take(3), 42); - let _ = subindex.0.last(); //~ ERROR: called `Iterator::last` on a `DoubleEndedIterator` + let _ = subindex.0.last(); + let _ = index.next(); } fn drop_order() { + struct DropDeIterator(std::vec::IntoIter); + impl Iterator for DropDeIterator { + type Item = S; + fn next(&mut self) -> Option { + self.0.next() + } + } + impl DoubleEndedIterator for DropDeIterator { + fn next_back(&mut self) -> Option { + self.0.next_back() + } + } + struct S(&'static str); impl std::ops::Drop for S { fn drop(&mut self) { @@ -16,7 +32,7 @@ fn drop_order() { } let v = vec![S("one"), S("two"), S("three")]; - let v = (v.into_iter(), 42); + let v = (DropDeIterator(v.into_iter()), 42); println!("Last element is {}", v.0.last().unwrap().0); //~^ ERROR: called `Iterator::last` on a `DoubleEndedIterator` println!("Done"); diff --git a/src/tools/clippy/tests/ui/double_ended_iterator_last_unfixable.stderr b/src/tools/clippy/tests/ui/double_ended_iterator_last_unfixable.stderr index f4be757d00d29..e330a22a35489 100644 --- a/src/tools/clippy/tests/ui/double_ended_iterator_last_unfixable.stderr +++ b/src/tools/clippy/tests/ui/double_ended_iterator_last_unfixable.stderr @@ -1,21 +1,5 @@ error: called `Iterator::last` on a `DoubleEndedIterator`; this will needlessly iterate the entire iterator - --> tests/ui/double_ended_iterator_last_unfixable.rs:7:13 - | -LL | let _ = subindex.0.last(); - | ^^^^^^^^^^^------ - | | - | help: try: `next_back()` - | -note: this must be made mutable to use `.next_back()` - --> tests/ui/double_ended_iterator_last_unfixable.rs:7:13 - | -LL | let _ = subindex.0.last(); - | ^^^^^^^^^^ - = note: `-D clippy::double-ended-iterator-last` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::double_ended_iterator_last)]` - -error: called `Iterator::last` on a `DoubleEndedIterator`; this will needlessly iterate the entire iterator - --> tests/ui/double_ended_iterator_last_unfixable.rs:20:36 + --> tests/ui/double_ended_iterator_last_unfixable.rs:36:36 | LL | println!("Last element is {}", v.0.last().unwrap().0); | ^^^^------ @@ -24,10 +8,12 @@ LL | println!("Last element is {}", v.0.last().unwrap().0); | = note: this change will alter drop order which may be undesirable note: this must be made mutable to use `.next_back()` - --> tests/ui/double_ended_iterator_last_unfixable.rs:20:36 + --> tests/ui/double_ended_iterator_last_unfixable.rs:36:36 | LL | println!("Last element is {}", v.0.last().unwrap().0); | ^^^ + = note: `-D clippy::double-ended-iterator-last` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::double_ended_iterator_last)]` -error: aborting due to 2 previous errors +error: aborting due to 1 previous error diff --git a/src/tools/clippy/tests/ui/eager_transmute.fixed b/src/tools/clippy/tests/ui/eager_transmute.fixed index 14cbb6113e62f..47a32ec836cc9 100644 --- a/src/tools/clippy/tests/ui/eager_transmute.fixed +++ b/src/tools/clippy/tests/ui/eager_transmute.fixed @@ -71,8 +71,10 @@ fn f(op: u8, op2: Data, unrelated: u8) { } unsafe fn f2(op: u8) { - (op < 4).then(|| std::mem::transmute::<_, Opcode>(op)); - //~^ eager_transmute + unsafe { + (op < 4).then(|| std::mem::transmute::<_, Opcode>(op)); + //~^ eager_transmute + } } #[rustc_layout_scalar_valid_range_end(254)] diff --git a/src/tools/clippy/tests/ui/eager_transmute.rs b/src/tools/clippy/tests/ui/eager_transmute.rs index 48d7d50cdaef8..906cd7bccc86f 100644 --- a/src/tools/clippy/tests/ui/eager_transmute.rs +++ b/src/tools/clippy/tests/ui/eager_transmute.rs @@ -71,8 +71,10 @@ fn f(op: u8, op2: Data, unrelated: u8) { } unsafe fn f2(op: u8) { - (op < 4).then_some(std::mem::transmute::<_, Opcode>(op)); - //~^ eager_transmute + unsafe { + (op < 4).then_some(std::mem::transmute::<_, Opcode>(op)); + //~^ eager_transmute + } } #[rustc_layout_scalar_valid_range_end(254)] diff --git a/src/tools/clippy/tests/ui/eager_transmute.stderr b/src/tools/clippy/tests/ui/eager_transmute.stderr index 54850d110eb26..c719ca8adc12e 100644 --- a/src/tools/clippy/tests/ui/eager_transmute.stderr +++ b/src/tools/clippy/tests/ui/eager_transmute.stderr @@ -157,19 +157,19 @@ LL + let _: Option = (..=3).contains(&op).then(|| unsafe { std::mem: | error: this transmute is always evaluated eagerly, even if the condition is false - --> tests/ui/eager_transmute.rs:74:24 + --> tests/ui/eager_transmute.rs:75:28 | -LL | (op < 4).then_some(std::mem::transmute::<_, Opcode>(op)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | (op < 4).then_some(std::mem::transmute::<_, Opcode>(op)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: consider using `bool::then` to only transmute if the condition holds | -LL - (op < 4).then_some(std::mem::transmute::<_, Opcode>(op)); -LL + (op < 4).then(|| std::mem::transmute::<_, Opcode>(op)); +LL - (op < 4).then_some(std::mem::transmute::<_, Opcode>(op)); +LL + (op < 4).then(|| std::mem::transmute::<_, Opcode>(op)); | error: this transmute is always evaluated eagerly, even if the condition is false - --> tests/ui/eager_transmute.rs:104:62 + --> tests/ui/eager_transmute.rs:106:62 | LL | let _: Option> = (v1 > 0).then_some(unsafe { std::mem::transmute(v1) }); | ^^^^^^^^^^^^^^^^^^^^^^^ @@ -181,7 +181,7 @@ LL + let _: Option> = (v1 > 0).then(|| unsafe { std::mem::transm | error: this transmute is always evaluated eagerly, even if the condition is false - --> tests/ui/eager_transmute.rs:111:86 + --> tests/ui/eager_transmute.rs:113:86 | LL | let _: Option = (v2 < NonZero::new(255u8).unwrap()).then_some(unsafe { std::mem::transmute(v2) }); | ^^^^^^^^^^^^^^^^^^^^^^^ @@ -193,7 +193,7 @@ LL + let _: Option = (v2 < NonZero::new(255u8).unwrap()).then(|| u | error: this transmute is always evaluated eagerly, even if the condition is false - --> tests/ui/eager_transmute.rs:118:93 + --> tests/ui/eager_transmute.rs:120:93 | LL | let _: Option = (v2 < NonZero::new(255u8).unwrap()).then_some(unsafe { std::mem::transmute(v2) }); | ^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/empty_docs.rs b/src/tools/clippy/tests/ui/empty_docs.rs index d7768e07901ae..57f8976cd6a79 100644 --- a/src/tools/clippy/tests/ui/empty_docs.rs +++ b/src/tools/clippy/tests/ui/empty_docs.rs @@ -84,7 +84,7 @@ mod issue_12377 { use proc_macro_attr::with_empty_docs; #[with_empty_docs] - extern "C" { + unsafe extern "C" { type Test; } diff --git a/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.fixed b/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.fixed index 885f6a50025e9..abdf6ca5cb61e 100644 --- a/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.fixed +++ b/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.fixed @@ -6,8 +6,7 @@ pub enum PublicTestEnum { NonEmptyParentheses(i32, i32), // No error EmptyBraces, //~^ empty_enum_variants_with_brackets - EmptyParentheses, - //~^ empty_enum_variants_with_brackets + EmptyParentheses(), // No error as enum is pub } enum TestEnum { @@ -20,6 +19,67 @@ enum TestEnum { AnotherEnum, // No error } +mod issue12551 { + enum EvenOdd { + // Used as functions -> no error + Even(), + Odd(), + // Not used as a function + Unknown, + //~^ empty_enum_variants_with_brackets + } + + fn even_odd(x: i32) -> EvenOdd { + (x % 2 == 0).then(EvenOdd::Even).unwrap_or_else(EvenOdd::Odd) + } + + fn natural_number(x: i32) -> NaturalOrNot { + (x > 0) + .then(NaturalOrNot::Natural) + .unwrap_or_else(NaturalOrNot::NotNatural) + } + + enum NaturalOrNot { + // Used as functions -> no error + Natural(), + NotNatural(), + // Not used as a function + Unknown, + //~^ empty_enum_variants_with_brackets + } + + enum RedundantParenthesesFunctionCall { + // Used as a function call but with redundant parentheses + Parentheses, + //~^ empty_enum_variants_with_brackets + // Not used as a function + NoParentheses, + } + + #[allow(clippy::no_effect)] + fn redundant_parentheses_function_call() { + // The parentheses in the below line are redundant. + RedundantParenthesesFunctionCall::Parentheses; + RedundantParenthesesFunctionCall::NoParentheses; + } + + // Same test as above but with usage of the enum occurring before the definition. + #[allow(clippy::no_effect)] + fn redundant_parentheses_function_call_2() { + // The parentheses in the below line are redundant. + RedundantParenthesesFunctionCall2::Parentheses; + RedundantParenthesesFunctionCall2::NoParentheses; + } + + enum RedundantParenthesesFunctionCall2 { + // Used as a function call but with redundant parentheses + Parentheses, + //~^ empty_enum_variants_with_brackets + // Not used as a function + NoParentheses, + } +} + enum TestEnumWithFeatures { NonEmptyBraces { #[cfg(feature = "thisisneverenabled")] @@ -28,4 +88,18 @@ enum TestEnumWithFeatures { NonEmptyParentheses(#[cfg(feature = "thisisneverenabled")] i32), // No error } +#[derive(Clone)] +enum Foo { + Variant1(i32), + Variant2, + Variant3, //~ ERROR: enum variant has empty brackets +} + +#[derive(Clone)] +pub enum PubFoo { + Variant1(i32), + Variant2, + Variant3(), +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.rs b/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.rs index 092712ee2ead4..63a5a8e9143e6 100644 --- a/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.rs +++ b/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.rs @@ -6,8 +6,7 @@ pub enum PublicTestEnum { NonEmptyParentheses(i32, i32), // No error EmptyBraces {}, //~^ empty_enum_variants_with_brackets - EmptyParentheses(), - //~^ empty_enum_variants_with_brackets + EmptyParentheses(), // No error as enum is pub } enum TestEnum { @@ -20,6 +19,67 @@ enum TestEnum { AnotherEnum, // No error } +mod issue12551 { + enum EvenOdd { + // Used as functions -> no error + Even(), + Odd(), + // Not used as a function + Unknown(), + //~^ empty_enum_variants_with_brackets + } + + fn even_odd(x: i32) -> EvenOdd { + (x % 2 == 0).then(EvenOdd::Even).unwrap_or_else(EvenOdd::Odd) + } + + fn natural_number(x: i32) -> NaturalOrNot { + (x > 0) + .then(NaturalOrNot::Natural) + .unwrap_or_else(NaturalOrNot::NotNatural) + } + + enum NaturalOrNot { + // Used as functions -> no error + Natural(), + NotNatural(), + // Not used as a function + Unknown(), + //~^ empty_enum_variants_with_brackets + } + + enum RedundantParenthesesFunctionCall { + // Used as a function call but with redundant parentheses + Parentheses(), + //~^ empty_enum_variants_with_brackets + // Not used as a function + NoParentheses, + } + + #[allow(clippy::no_effect)] + fn redundant_parentheses_function_call() { + // The parentheses in the below line are redundant. + RedundantParenthesesFunctionCall::Parentheses(); + RedundantParenthesesFunctionCall::NoParentheses; + } + + // Same test as above but with usage of the enum occurring before the definition. + #[allow(clippy::no_effect)] + fn redundant_parentheses_function_call_2() { + // The parentheses in the below line are redundant. + RedundantParenthesesFunctionCall2::Parentheses(); + RedundantParenthesesFunctionCall2::NoParentheses; + } + + enum RedundantParenthesesFunctionCall2 { + // Used as a function call but with redundant parentheses + Parentheses(), + //~^ empty_enum_variants_with_brackets + // Not used as a function + NoParentheses, + } +} + enum TestEnumWithFeatures { NonEmptyBraces { #[cfg(feature = "thisisneverenabled")] @@ -28,4 +88,18 @@ enum TestEnumWithFeatures { NonEmptyParentheses(#[cfg(feature = "thisisneverenabled")] i32), // No error } +#[derive(Clone)] +enum Foo { + Variant1(i32), + Variant2, + Variant3(), //~ ERROR: enum variant has empty brackets +} + +#[derive(Clone)] +pub enum PubFoo { + Variant1(i32), + Variant2, + Variant3(), +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.stderr b/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.stderr index a9ae3b476dd68..7fe85e829a351 100644 --- a/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.stderr +++ b/src/tools/clippy/tests/ui/empty_enum_variants_with_brackets.stderr @@ -9,7 +9,15 @@ LL | EmptyBraces {}, = help: remove the brackets error: enum variant has empty brackets - --> tests/ui/empty_enum_variants_with_brackets.rs:9:21 + --> tests/ui/empty_enum_variants_with_brackets.rs:15:16 + | +LL | EmptyBraces {}, + | ^^^ + | + = help: remove the brackets + +error: enum variant has empty brackets + --> tests/ui/empty_enum_variants_with_brackets.rs:17:21 | LL | EmptyParentheses(), | ^^ @@ -17,20 +25,58 @@ LL | EmptyParentheses(), = help: remove the brackets error: enum variant has empty brackets - --> tests/ui/empty_enum_variants_with_brackets.rs:16:16 + --> tests/ui/empty_enum_variants_with_brackets.rs:28:16 | -LL | EmptyBraces {}, - | ^^^ +LL | Unknown(), + | ^^ | = help: remove the brackets error: enum variant has empty brackets - --> tests/ui/empty_enum_variants_with_brackets.rs:18:21 + --> tests/ui/empty_enum_variants_with_brackets.rs:47:16 | -LL | EmptyParentheses(), - | ^^ +LL | Unknown(), + | ^^ + | + = help: remove the brackets + +error: enum variant has empty brackets + --> tests/ui/empty_enum_variants_with_brackets.rs:53:20 + | +LL | Parentheses(), + | ^^ + | +help: remove the brackets + | +LL ~ Parentheses, +LL | +... +LL | // The parentheses in the below line are redundant. +LL ~ RedundantParenthesesFunctionCall::Parentheses; + | + +error: enum variant has empty brackets + --> tests/ui/empty_enum_variants_with_brackets.rs:76:20 + | +LL | Parentheses(), + | ^^ + | +help: remove the brackets + | +LL ~ RedundantParenthesesFunctionCall2::Parentheses; +LL | RedundantParenthesesFunctionCall2::NoParentheses; +... +LL | // Used as a function call but with redundant parentheses +LL ~ Parentheses, + | + +error: enum variant has empty brackets + --> tests/ui/empty_enum_variants_with_brackets.rs:95:13 + | +LL | Variant3(), + | ^^ | = help: remove the brackets -error: aborting due to 4 previous errors +error: aborting due to 8 previous errors diff --git a/src/tools/clippy/tests/ui/empty_line_after/doc_comments.1.fixed b/src/tools/clippy/tests/ui/empty_line_after/doc_comments.1.fixed index e4ba09ea1d478..70ab235b694f5 100644 --- a/src/tools/clippy/tests/ui/empty_line_after/doc_comments.1.fixed +++ b/src/tools/clippy/tests/ui/empty_line_after/doc_comments.1.fixed @@ -142,4 +142,9 @@ impl Foo for LineComment { fn bar() {} } +//~v empty_line_after_doc_comments +/// Docs for this item. +// fn some_item() {} +impl LineComment {} // or any other nameless item kind + fn main() {} diff --git a/src/tools/clippy/tests/ui/empty_line_after/doc_comments.2.fixed b/src/tools/clippy/tests/ui/empty_line_after/doc_comments.2.fixed index a20f9bc20eb56..87c636c6ad2c7 100644 --- a/src/tools/clippy/tests/ui/empty_line_after/doc_comments.2.fixed +++ b/src/tools/clippy/tests/ui/empty_line_after/doc_comments.2.fixed @@ -152,4 +152,10 @@ impl Foo for LineComment { fn bar() {} } +//~v empty_line_after_doc_comments +// /// Docs for this item. +// fn some_item() {} + +impl LineComment {} // or any other nameless item kind + fn main() {} diff --git a/src/tools/clippy/tests/ui/empty_line_after/doc_comments.rs b/src/tools/clippy/tests/ui/empty_line_after/doc_comments.rs index 9e3ddfd5abe11..91e9c1ac0b6d3 100644 --- a/src/tools/clippy/tests/ui/empty_line_after/doc_comments.rs +++ b/src/tools/clippy/tests/ui/empty_line_after/doc_comments.rs @@ -155,4 +155,10 @@ impl Foo for LineComment { fn bar() {} } +//~v empty_line_after_doc_comments +/// Docs for this item. +// fn some_item() {} + +impl LineComment {} // or any other nameless item kind + fn main() {} diff --git a/src/tools/clippy/tests/ui/empty_line_after/doc_comments.stderr b/src/tools/clippy/tests/ui/empty_line_after/doc_comments.stderr index fe25ba9afcb90..ae8cb91ba12f7 100644 --- a/src/tools/clippy/tests/ui/empty_line_after/doc_comments.stderr +++ b/src/tools/clippy/tests/ui/empty_line_after/doc_comments.stderr @@ -87,7 +87,7 @@ LL | fn new_code() {} | ----------- the comment documents this function | = help: if the empty line is unintentional, remove it -help: if the doc comment should not document `new_code` comment it out +help: if the doc comment should not document function `new_code` then comment it out | LL | // /// docs for `old_code` | ++ @@ -107,7 +107,7 @@ LL | struct Multiple; | --------------- the comment documents this struct | = help: if the empty lines are unintentional, remove them -help: if the doc comment should not document `Multiple` comment it out +help: if the doc comment should not document struct `Multiple` then comment it out | LL ~ // /// Docs LL ~ // /// for OldA @@ -149,7 +149,7 @@ LL | fn new_code() {} | ----------- the comment documents this function | = help: if the empty line is unintentional, remove it -help: if the doc comment should not document `new_code` comment it out +help: if the doc comment should not document function `new_code` then comment it out | LL - /** LL + /* @@ -167,7 +167,7 @@ LL | fn new_code2() {} | ------------ the comment documents this function | = help: if the empty line is unintentional, remove it -help: if the doc comment should not document `new_code2` comment it out +help: if the doc comment should not document function `new_code2` then comment it out | LL | // /// Docs for `old_code2` | ++ @@ -183,10 +183,26 @@ LL | fn bar() {} | ------ the comment documents this function | = help: if the empty line is unintentional, remove it -help: if the doc comment should not document `bar` comment it out +help: if the doc comment should not document function `bar` then comment it out | LL | // /// comment on assoc item | ++ -error: aborting due to 11 previous errors +error: empty line after doc comment + --> tests/ui/empty_line_after/doc_comments.rs:159:1 + | +LL | / /// Docs for this item. +LL | | // fn some_item() {} +LL | | + | |_^ +LL | impl LineComment {} // or any other nameless item kind + | - the comment documents this implementation + | + = help: if the empty line is unintentional, remove it +help: if the doc comment should not document the following item then comment it out + | +LL | // /// Docs for this item. + | ++ + +error: aborting due to 12 previous errors diff --git a/src/tools/clippy/tests/ui/entry.fixed b/src/tools/clippy/tests/ui/entry.fixed index 69452a8d9a671..f2df9f0204ea6 100644 --- a/src/tools/clippy/tests/ui/entry.fixed +++ b/src/tools/clippy/tests/ui/entry.fixed @@ -226,4 +226,26 @@ fn issue11976() { } } +mod issue14449 { + use std::collections::BTreeMap; + + pub struct Meow { + map: BTreeMap, + } + + impl Meow { + fn pet(&self, _key: &str, _v: u32) -> u32 { + 42 + } + } + + pub fn f(meow: &Meow, x: String) { + if meow.map.contains_key(&x) { + let _ = meow.pet(&x, 1); + } else { + let _ = meow.pet(&x, 0); + } + } +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/entry.rs b/src/tools/clippy/tests/ui/entry.rs index 3578324f01c58..166eea417ac23 100644 --- a/src/tools/clippy/tests/ui/entry.rs +++ b/src/tools/clippy/tests/ui/entry.rs @@ -232,4 +232,26 @@ fn issue11976() { } } +mod issue14449 { + use std::collections::BTreeMap; + + pub struct Meow { + map: BTreeMap, + } + + impl Meow { + fn pet(&self, _key: &str, _v: u32) -> u32 { + 42 + } + } + + pub fn f(meow: &Meow, x: String) { + if meow.map.contains_key(&x) { + let _ = meow.pet(&x, 1); + } else { + let _ = meow.pet(&x, 0); + } + } +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/equatable_if_let.fixed b/src/tools/clippy/tests/ui/equatable_if_let.fixed index 166b1387ba265..ce8b67f9ca7b0 100644 --- a/src/tools/clippy/tests/ui/equatable_if_let.fixed +++ b/src/tools/clippy/tests/ui/equatable_if_let.fixed @@ -103,3 +103,39 @@ fn main() { external!({ if let 2 = $a {} }); } + +mod issue8710 { + fn str_ref(cs: &[char]) { + if matches!(cs.iter().next(), Some('i')) { + //~^ equatable_if_let + } else { + todo!(); + } + } + + fn i32_ref(cs: &[i32]) { + if matches!(cs.iter().next(), Some(1)) { + //~^ equatable_if_let + } else { + todo!(); + } + } + + fn enum_ref() { + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] + enum MyEnum { + A(i32), + B, + } + + fn get_enum() -> Option<&'static MyEnum> { + todo!() + } + + if matches!(get_enum(), Some(MyEnum::B)) { + //~^ equatable_if_let + } else { + todo!(); + } + } +} diff --git a/src/tools/clippy/tests/ui/equatable_if_let.rs b/src/tools/clippy/tests/ui/equatable_if_let.rs index 09c2483ae6d43..ff09533f26519 100644 --- a/src/tools/clippy/tests/ui/equatable_if_let.rs +++ b/src/tools/clippy/tests/ui/equatable_if_let.rs @@ -103,3 +103,39 @@ fn main() { external!({ if let 2 = $a {} }); } + +mod issue8710 { + fn str_ref(cs: &[char]) { + if let Some('i') = cs.iter().next() { + //~^ equatable_if_let + } else { + todo!(); + } + } + + fn i32_ref(cs: &[i32]) { + if let Some(1) = cs.iter().next() { + //~^ equatable_if_let + } else { + todo!(); + } + } + + fn enum_ref() { + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] + enum MyEnum { + A(i32), + B, + } + + fn get_enum() -> Option<&'static MyEnum> { + todo!() + } + + if let Some(MyEnum::B) = get_enum() { + //~^ equatable_if_let + } else { + todo!(); + } + } +} diff --git a/src/tools/clippy/tests/ui/equatable_if_let.stderr b/src/tools/clippy/tests/ui/equatable_if_let.stderr index 81e0e15a5c747..dd1832ad68b28 100644 --- a/src/tools/clippy/tests/ui/equatable_if_let.stderr +++ b/src/tools/clippy/tests/ui/equatable_if_let.stderr @@ -85,5 +85,23 @@ error: this pattern matching can be expressed using equality LL | if let inline!("abc") = "abc" { | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"abc" == inline!("abc")` -error: aborting due to 14 previous errors +error: this pattern matching can be expressed using `matches!` + --> tests/ui/equatable_if_let.rs:109:12 + | +LL | if let Some('i') = cs.iter().next() { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `matches!(cs.iter().next(), Some('i'))` + +error: this pattern matching can be expressed using `matches!` + --> tests/ui/equatable_if_let.rs:117:12 + | +LL | if let Some(1) = cs.iter().next() { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `matches!(cs.iter().next(), Some(1))` + +error: this pattern matching can be expressed using `matches!` + --> tests/ui/equatable_if_let.rs:135:12 + | +LL | if let Some(MyEnum::B) = get_enum() { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `matches!(get_enum(), Some(MyEnum::B))` + +error: aborting due to 17 previous errors diff --git a/src/tools/clippy/tests/ui/explicit_auto_deref.fixed b/src/tools/clippy/tests/ui/explicit_auto_deref.fixed index 7235f7d5b82af..ec6bed152e797 100644 --- a/src/tools/clippy/tests/ui/explicit_auto_deref.fixed +++ b/src/tools/clippy/tests/ui/explicit_auto_deref.fixed @@ -59,7 +59,7 @@ fn f_str_t(_: &str, _: T) {} fn f_box_t(_: &Box) {} -extern "C" { +unsafe extern "C" { fn var(_: u32, ...); } diff --git a/src/tools/clippy/tests/ui/explicit_auto_deref.rs b/src/tools/clippy/tests/ui/explicit_auto_deref.rs index c4d2b28ff4b5f..ca58c650d9ce9 100644 --- a/src/tools/clippy/tests/ui/explicit_auto_deref.rs +++ b/src/tools/clippy/tests/ui/explicit_auto_deref.rs @@ -59,7 +59,7 @@ fn f_str_t(_: &str, _: T) {} fn f_box_t(_: &Box) {} -extern "C" { +unsafe extern "C" { fn var(_: u32, ...); } diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then_unfixable.rs b/src/tools/clippy/tests/ui/filter_map_bool_then_unfixable.rs new file mode 100644 index 0000000000000..68294292502ac --- /dev/null +++ b/src/tools/clippy/tests/ui/filter_map_bool_then_unfixable.rs @@ -0,0 +1,63 @@ +#![allow(clippy::question_mark, unused)] +#![warn(clippy::filter_map_bool_then)] +//@no-rustfix + +fn issue11617() { + let mut x: Vec = vec![0; 10]; + let _ = (0..x.len()).zip(x.clone().iter()).filter_map(|(i, v)| { + //~^ filter_map_bool_then + (x[i] != *v).then(|| { + x[i] = i; + i + }) + }); +} + +mod issue14368 { + + fn do_something(_: ()) -> bool { + true + } + + fn option_with_early_return(x: &[Option]) { + let _ = x.iter().filter_map(|&x| x?.then(|| do_something(()))); + //~^ filter_map_bool_then + let _ = x + .iter() + .filter_map(|&x| if let Some(x) = x { x } else { return None }.then(|| do_something(()))); + //~^ filter_map_bool_then + let _ = x.iter().filter_map(|&x| { + //~^ filter_map_bool_then + match x { + Some(x) => x, + None => return None, + } + .then(|| do_something(())) + }); + } + + #[derive(Copy, Clone)] + enum Foo { + One(bool), + Two, + Three(Option), + } + + fn nested_type_with_early_return(x: &[Foo]) { + let _ = x.iter().filter_map(|&x| { + //~^ filter_map_bool_then + match x { + Foo::One(x) => x, + Foo::Two => return None, + Foo::Three(inner) => { + if inner? == 0 { + return Some(false); + } else { + true + } + }, + } + .then(|| do_something(())) + }); + } +} diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then_unfixable.stderr b/src/tools/clippy/tests/ui/filter_map_bool_then_unfixable.stderr new file mode 100644 index 0000000000000..2025958136ba6 --- /dev/null +++ b/src/tools/clippy/tests/ui/filter_map_bool_then_unfixable.stderr @@ -0,0 +1,65 @@ +error: usage of `bool::then` in `filter_map` + --> tests/ui/filter_map_bool_then_unfixable.rs:7:48 + | +LL | let _ = (0..x.len()).zip(x.clone().iter()).filter_map(|(i, v)| { + | ________________________________________________^ +LL | | +LL | | (x[i] != *v).then(|| { +LL | | x[i] = i; +LL | | i +LL | | }) +LL | | }); + | |______^ + | + = help: consider using `filter` then `map` instead + = note: `-D clippy::filter-map-bool-then` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::filter_map_bool_then)]` + +error: usage of `bool::then` in `filter_map` + --> tests/ui/filter_map_bool_then_unfixable.rs:23:26 + | +LL | let _ = x.iter().filter_map(|&x| x?.then(|| do_something(()))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: consider using `filter` then `map` instead + +error: usage of `bool::then` in `filter_map` + --> tests/ui/filter_map_bool_then_unfixable.rs:27:14 + | +LL | .filter_map(|&x| if let Some(x) = x { x } else { return None }.then(|| do_something(()))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: consider using `filter` then `map` instead + +error: usage of `bool::then` in `filter_map` + --> tests/ui/filter_map_bool_then_unfixable.rs:29:26 + | +LL | let _ = x.iter().filter_map(|&x| { + | __________________________^ +LL | | +LL | | match x { +LL | | Some(x) => x, +... | +LL | | .then(|| do_something(())) +LL | | }); + | |__________^ + | + = help: consider using `filter` then `map` instead + +error: usage of `bool::then` in `filter_map` + --> tests/ui/filter_map_bool_then_unfixable.rs:47:26 + | +LL | let _ = x.iter().filter_map(|&x| { + | __________________________^ +LL | | +LL | | match x { +LL | | Foo::One(x) => x, +... | +LL | | .then(|| do_something(())) +LL | | }); + | |__________^ + | + = help: consider using `filter` then `map` instead + +error: aborting due to 5 previous errors + diff --git a/src/tools/clippy/tests/ui/filter_map_next.rs b/src/tools/clippy/tests/ui/filter_map_next.rs index 2a2237ed16cf2..5414e01c87006 100644 --- a/src/tools/clippy/tests/ui/filter_map_next.rs +++ b/src/tools/clippy/tests/ui/filter_map_next.rs @@ -1,4 +1,4 @@ -#![warn(clippy::all, clippy::pedantic)] +#![warn(clippy::filter_map_next)] fn main() { let a = ["1", "lol", "3", "NaN", "5"]; diff --git a/src/tools/clippy/tests/ui/filter_map_next_fixable.fixed b/src/tools/clippy/tests/ui/filter_map_next_fixable.fixed index 285863ef340db..09c416041a4e9 100644 --- a/src/tools/clippy/tests/ui/filter_map_next_fixable.fixed +++ b/src/tools/clippy/tests/ui/filter_map_next_fixable.fixed @@ -1,5 +1,4 @@ -#![warn(clippy::all, clippy::pedantic)] -#![allow(unused)] +#![warn(clippy::filter_map_next)] fn main() { let a = ["1", "lol", "3", "NaN", "5"]; diff --git a/src/tools/clippy/tests/ui/filter_map_next_fixable.rs b/src/tools/clippy/tests/ui/filter_map_next_fixable.rs index af911689b7c72..3d686ef41d917 100644 --- a/src/tools/clippy/tests/ui/filter_map_next_fixable.rs +++ b/src/tools/clippy/tests/ui/filter_map_next_fixable.rs @@ -1,5 +1,4 @@ -#![warn(clippy::all, clippy::pedantic)] -#![allow(unused)] +#![warn(clippy::filter_map_next)] fn main() { let a = ["1", "lol", "3", "NaN", "5"]; diff --git a/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr b/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr index 707dec8687b1f..1002837732b86 100644 --- a/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr +++ b/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr @@ -1,5 +1,5 @@ error: called `filter_map(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find_map(..)` instead - --> tests/ui/filter_map_next_fixable.rs:7:32 + --> tests/ui/filter_map_next_fixable.rs:6:32 | LL | let element: Option = a.iter().filter_map(|s| s.parse().ok()).next(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `a.iter().find_map(|s| s.parse().ok())` @@ -8,7 +8,7 @@ LL | let element: Option = a.iter().filter_map(|s| s.parse().ok()).next = help: to override `-D warnings` add `#[allow(clippy::filter_map_next)]` error: called `filter_map(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find_map(..)` instead - --> tests/ui/filter_map_next_fixable.rs:21:26 + --> tests/ui/filter_map_next_fixable.rs:20:26 | LL | let _: Option = a.iter().filter_map(|s| s.parse().ok()).next(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `a.iter().find_map(|s| s.parse().ok())` diff --git a/src/tools/clippy/tests/ui/find_map.rs b/src/tools/clippy/tests/ui/find_map.rs index aba1f2cbe581e..a9a8508d5b7db 100644 --- a/src/tools/clippy/tests/ui/find_map.rs +++ b/src/tools/clippy/tests/ui/find_map.rs @@ -1,6 +1,5 @@ //@ check-pass -#![warn(clippy::all, clippy::pedantic)] #![allow(clippy::useless_vec)] #[derive(Debug, Copy, Clone)] diff --git a/src/tools/clippy/tests/ui/fn_params_excessive_bools.rs b/src/tools/clippy/tests/ui/fn_params_excessive_bools.rs index cc18708d25faf..25d25663d1e4d 100644 --- a/src/tools/clippy/tests/ui/fn_params_excessive_bools.rs +++ b/src/tools/clippy/tests/ui/fn_params_excessive_bools.rs @@ -1,7 +1,7 @@ #![warn(clippy::fn_params_excessive_bools)] #![allow(clippy::too_many_arguments)] -extern "C" { +unsafe extern "C" { // Should not lint, most of the time users have no control over extern function signatures fn f(_: bool, _: bool, _: bool, _: bool); } @@ -14,8 +14,8 @@ macro_rules! foo { foo!(); -#[no_mangle] -extern "C" fn k(_: bool, _: bool, _: bool, _: bool) {} +#[unsafe(no_mangle)] +unsafe extern "C" fn k(_: bool, _: bool, _: bool, _: bool) {} fn g(_: bool, _: bool, _: bool, _: bool) {} //~^ ERROR: more than 3 bools in function parameters fn h(_: bool, _: bool, _: bool) {} @@ -39,8 +39,8 @@ impl S { fn f(&self, _: bool, _: bool, _: bool, _: bool) {} //~^ ERROR: more than 3 bools in function parameters fn g(&self, _: bool, _: bool, _: bool) {} - #[no_mangle] - extern "C" fn h(_: bool, _: bool, _: bool, _: bool) {} + #[unsafe(no_mangle)] + unsafe extern "C" fn h(_: bool, _: bool, _: bool, _: bool) {} } impl Trait for S { diff --git a/src/tools/clippy/tests/ui/formatting.rs b/src/tools/clippy/tests/ui/formatting.rs index 4e84dcf7d5b70..009815633d759 100644 --- a/src/tools/clippy/tests/ui/formatting.rs +++ b/src/tools/clippy/tests/ui/formatting.rs @@ -1,6 +1,3 @@ -#![warn(clippy::all)] -#![allow(unused_variables)] -#![allow(unused_assignments)] #![allow(clippy::if_same_then_else)] #![allow(clippy::deref_addrof)] #![allow(clippy::nonminimal_bool)] diff --git a/src/tools/clippy/tests/ui/formatting.stderr b/src/tools/clippy/tests/ui/formatting.stderr index 972bd3a6a2e65..d9dc2a55f5b62 100644 --- a/src/tools/clippy/tests/ui/formatting.stderr +++ b/src/tools/clippy/tests/ui/formatting.stderr @@ -1,5 +1,5 @@ error: this looks like you are trying to use `.. -= ..`, but you really are doing `.. = (- ..)` - --> tests/ui/formatting.rs:16:6 + --> tests/ui/formatting.rs:13:6 | LL | a =- 35; | ^^^^ @@ -9,7 +9,7 @@ LL | a =- 35; = help: to override `-D warnings` add `#[allow(clippy::suspicious_assignment_formatting)]` error: this looks like you are trying to use `.. *= ..`, but you really are doing `.. = (* ..)` - --> tests/ui/formatting.rs:20:6 + --> tests/ui/formatting.rs:17:6 | LL | a =* &191; | ^^^^ @@ -17,7 +17,7 @@ LL | a =* &191; = note: to remove this lint, use either `*=` or `= *` error: this looks like you are trying to use `.. != ..`, but you really are doing `.. = (! ..)` - --> tests/ui/formatting.rs:26:6 + --> tests/ui/formatting.rs:23:6 | LL | b =! false; | ^^^^ @@ -25,17 +25,16 @@ LL | b =! false; = note: to remove this lint, use either `!=` or `= !` error: possibly missing a comma here - --> tests/ui/formatting.rs:38:19 + --> tests/ui/formatting.rs:35:19 | LL | -1, -2, -3 // <= no comma here | ^ | = note: to remove this lint, add a comma or write the expr in a single line - = note: `-D clippy::possible-missing-comma` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::possible_missing_comma)]` + = note: `#[deny(clippy::possible_missing_comma)]` on by default error: possibly missing a comma here - --> tests/ui/formatting.rs:45:19 + --> tests/ui/formatting.rs:42:19 | LL | -1, -2, -3 // <= no comma here | ^ @@ -43,7 +42,7 @@ LL | -1, -2, -3 // <= no comma here = note: to remove this lint, add a comma or write the expr in a single line error: possibly missing a comma here - --> tests/ui/formatting.rs:85:11 + --> tests/ui/formatting.rs:82:11 | LL | -1 | ^ diff --git a/src/tools/clippy/tests/ui/from_iter_instead_of_collect.fixed b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.fixed index 8618004efb89f..be98b22779584 100644 --- a/src/tools/clippy/tests/ui/from_iter_instead_of_collect.fixed +++ b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.fixed @@ -73,3 +73,46 @@ fn main() { for _i in [1, 2, 3].iter().collect::>() {} //~^ from_iter_instead_of_collect } + +fn issue14581() { + let nums = [0, 1, 2]; + let _ = &nums.iter().map(|&num| char::from_u32(num).unwrap()).collect::(); + //~^ from_iter_instead_of_collect +} + +fn test_implicit_generic_args(iter: impl Iterator + Copy) { + struct S<'l, T = i32, const A: usize = 3, const B: usize = 3> { + a: [&'l T; A], + b: [&'l T; B], + } + + impl<'l, T, const A: usize, const B: usize> FromIterator<&'l T> for S<'l, T, A, B> { + fn from_iter>(_: I) -> Self { + todo!() + } + } + + let _ = iter.collect::>(); + //~^ from_iter_instead_of_collect + + let _ = iter.collect::>(); + //~^ from_iter_instead_of_collect + + let _ = iter.collect::>(); + //~^ from_iter_instead_of_collect + + let _ = iter.collect::>(); + //~^ from_iter_instead_of_collect + + let _ = iter.collect::>(); + //~^ from_iter_instead_of_collect + + let _ = iter.collect::>(); + //~^ from_iter_instead_of_collect + + let _ = iter.collect::>(); + //~^ from_iter_instead_of_collect + + let _ = iter.collect::(); + //~^ from_iter_instead_of_collect +} diff --git a/src/tools/clippy/tests/ui/from_iter_instead_of_collect.rs b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.rs index c46397e8ff560..ce20fef2ac337 100644 --- a/src/tools/clippy/tests/ui/from_iter_instead_of_collect.rs +++ b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.rs @@ -73,3 +73,46 @@ fn main() { for _i in Vec::<&i32>::from_iter([1, 2, 3].iter()) {} //~^ from_iter_instead_of_collect } + +fn issue14581() { + let nums = [0, 1, 2]; + let _ = &String::from_iter(nums.iter().map(|&num| char::from_u32(num).unwrap())); + //~^ from_iter_instead_of_collect +} + +fn test_implicit_generic_args(iter: impl Iterator + Copy) { + struct S<'l, T = i32, const A: usize = 3, const B: usize = 3> { + a: [&'l T; A], + b: [&'l T; B], + } + + impl<'l, T, const A: usize, const B: usize> FromIterator<&'l T> for S<'l, T, A, B> { + fn from_iter>(_: I) -> Self { + todo!() + } + } + + let _ = >::from_iter(iter); + //~^ from_iter_instead_of_collect + + let _ = >::from_iter(iter); + //~^ from_iter_instead_of_collect + + let _ = >::from_iter(iter); + //~^ from_iter_instead_of_collect + + let _ = >::from_iter(iter); + //~^ from_iter_instead_of_collect + + let _ = >::from_iter(iter); + //~^ from_iter_instead_of_collect + + let _ = >::from_iter(iter); + //~^ from_iter_instead_of_collect + + let _ = >::from_iter(iter); + //~^ from_iter_instead_of_collect + + let _ = ::from_iter(iter); + //~^ from_iter_instead_of_collect +} diff --git a/src/tools/clippy/tests/ui/from_iter_instead_of_collect.stderr b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.stderr index b46d97af152f6..ec11a375c0d87 100644 --- a/src/tools/clippy/tests/ui/from_iter_instead_of_collect.stderr +++ b/src/tools/clippy/tests/ui/from_iter_instead_of_collect.stderr @@ -91,5 +91,59 @@ error: usage of `FromIterator::from_iter` LL | for _i in Vec::<&i32>::from_iter([1, 2, 3].iter()) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `[1, 2, 3].iter().collect::>()` -error: aborting due to 15 previous errors +error: usage of `FromIterator::from_iter` + --> tests/ui/from_iter_instead_of_collect.rs:79:14 + | +LL | let _ = &String::from_iter(nums.iter().map(|&num| char::from_u32(num).unwrap())); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `nums.iter().map(|&num| char::from_u32(num).unwrap()).collect::()` + +error: usage of `FromIterator::from_iter` + --> tests/ui/from_iter_instead_of_collect.rs:95:13 + | +LL | let _ = >::from_iter(iter); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `iter.collect::>()` + +error: usage of `FromIterator::from_iter` + --> tests/ui/from_iter_instead_of_collect.rs:98:13 + | +LL | let _ = >::from_iter(iter); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `iter.collect::>()` + +error: usage of `FromIterator::from_iter` + --> tests/ui/from_iter_instead_of_collect.rs:101:13 + | +LL | let _ = >::from_iter(iter); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `iter.collect::>()` + +error: usage of `FromIterator::from_iter` + --> tests/ui/from_iter_instead_of_collect.rs:104:13 + | +LL | let _ = >::from_iter(iter); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `iter.collect::>()` + +error: usage of `FromIterator::from_iter` + --> tests/ui/from_iter_instead_of_collect.rs:107:13 + | +LL | let _ = >::from_iter(iter); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `iter.collect::>()` + +error: usage of `FromIterator::from_iter` + --> tests/ui/from_iter_instead_of_collect.rs:110:13 + | +LL | let _ = >::from_iter(iter); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `iter.collect::>()` + +error: usage of `FromIterator::from_iter` + --> tests/ui/from_iter_instead_of_collect.rs:113:13 + | +LL | let _ = >::from_iter(iter); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `iter.collect::>()` + +error: usage of `FromIterator::from_iter` + --> tests/ui/from_iter_instead_of_collect.rs:116:13 + | +LL | let _ = ::from_iter(iter); + | ^^^^^^^^^^^^^^^^^^^^ help: use `.collect()` instead of `::from_iter()`: `iter.collect::()` + +error: aborting due to 24 previous errors diff --git a/src/tools/clippy/tests/ui/functions.rs b/src/tools/clippy/tests/ui/functions.rs index 9c1ca8bf93009..ceaba392dc200 100644 --- a/src/tools/clippy/tests/ui/functions.rs +++ b/src/tools/clippy/tests/ui/functions.rs @@ -1,5 +1,3 @@ -#![warn(clippy::all)] -#![allow(dead_code, unused_unsafe)] #![allow(clippy::missing_safety_doc, clippy::uninlined_format_args)] // TOO_MANY_ARGUMENTS diff --git a/src/tools/clippy/tests/ui/functions.stderr b/src/tools/clippy/tests/ui/functions.stderr index c8770023f77a0..65cc627cc44c1 100644 --- a/src/tools/clippy/tests/ui/functions.stderr +++ b/src/tools/clippy/tests/ui/functions.stderr @@ -1,5 +1,5 @@ error: this function has too many arguments (8/7) - --> tests/ui/functions.rs:8:1 + --> tests/ui/functions.rs:6:1 | LL | fn bad(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool, _eight: ()) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -8,7 +8,7 @@ LL | fn bad(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f = help: to override `-D warnings` add `#[allow(clippy::too_many_arguments)]` error: this function has too many arguments (8/7) - --> tests/ui/functions.rs:12:1 + --> tests/ui/functions.rs:10:1 | LL | / fn bad_multiline( LL | | @@ -20,88 +20,87 @@ LL | | ) { | |_^ error: this function has too many arguments (8/7) - --> tests/ui/functions.rs:48:5 + --> tests/ui/functions.rs:46:5 | LL | fn bad(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool, _eight: ()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: this function has too many arguments (8/7) - --> tests/ui/functions.rs:58:5 + --> tests/ui/functions.rs:56:5 | LL | fn bad_method(_one: u32, _two: u32, _three: &str, _four: bool, _five: f32, _six: f32, _seven: bool, _eight: ()) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:68:34 + --> tests/ui/functions.rs:66:34 | LL | println!("{}", unsafe { *p }); | ^ | - = note: `-D clippy::not-unsafe-ptr-arg-deref` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::not_unsafe_ptr_arg_deref)]` + = note: `#[deny(clippy::not_unsafe_ptr_arg_deref)]` on by default error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:71:35 + --> tests/ui/functions.rs:69:35 | LL | println!("{:?}", unsafe { p.as_ref() }); | ^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:74:33 + --> tests/ui/functions.rs:72:33 | LL | unsafe { std::ptr::read(p) }; | ^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:86:30 + --> tests/ui/functions.rs:84:30 | LL | println!("{}", unsafe { *p }); | ^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:89:31 + --> tests/ui/functions.rs:87:31 | LL | println!("{:?}", unsafe { p.as_ref() }); | ^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:92:29 + --> tests/ui/functions.rs:90:29 | LL | unsafe { std::ptr::read(p) }; | ^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:99:30 + --> tests/ui/functions.rs:97:30 | LL | println!("{}", unsafe { *p }); | ^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:102:31 + --> tests/ui/functions.rs:100:31 | LL | println!("{:?}", unsafe { p.as_ref() }); | ^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:105:29 + --> tests/ui/functions.rs:103:29 | LL | unsafe { std::ptr::read(p) }; | ^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:115:34 + --> tests/ui/functions.rs:113:34 | LL | println!("{}", unsafe { *p }); | ^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:118:35 + --> tests/ui/functions.rs:116:35 | LL | println!("{:?}", unsafe { p.as_ref() }); | ^ error: this public function might dereference a raw pointer but is not marked `unsafe` - --> tests/ui/functions.rs:121:33 + --> tests/ui/functions.rs:119:33 | LL | unsafe { std::ptr::read(p) }; | ^ diff --git a/src/tools/clippy/tests/ui/if_not_else.fixed b/src/tools/clippy/tests/ui/if_not_else.fixed index d26a15156cd89..4e6f43e5671e8 100644 --- a/src/tools/clippy/tests/ui/if_not_else.fixed +++ b/src/tools/clippy/tests/ui/if_not_else.fixed @@ -1,4 +1,3 @@ -#![warn(clippy::all)] #![warn(clippy::if_not_else)] fn foo() -> bool { diff --git a/src/tools/clippy/tests/ui/if_not_else.rs b/src/tools/clippy/tests/ui/if_not_else.rs index 6171cf1164955..6cd2e3bd63fe9 100644 --- a/src/tools/clippy/tests/ui/if_not_else.rs +++ b/src/tools/clippy/tests/ui/if_not_else.rs @@ -1,4 +1,3 @@ -#![warn(clippy::all)] #![warn(clippy::if_not_else)] fn foo() -> bool { diff --git a/src/tools/clippy/tests/ui/if_not_else.stderr b/src/tools/clippy/tests/ui/if_not_else.stderr index f44dd0aabc863..824837bd52bb1 100644 --- a/src/tools/clippy/tests/ui/if_not_else.stderr +++ b/src/tools/clippy/tests/ui/if_not_else.stderr @@ -1,5 +1,5 @@ error: unnecessary boolean `not` operation - --> tests/ui/if_not_else.rs:12:5 + --> tests/ui/if_not_else.rs:11:5 | LL | / if !bla() { LL | | @@ -24,7 +24,7 @@ LL + } | error: unnecessary `!=` operation - --> tests/ui/if_not_else.rs:19:5 + --> tests/ui/if_not_else.rs:18:5 | LL | / if 4 != 5 { LL | | @@ -47,7 +47,7 @@ LL + } | error: unnecessary boolean `not` operation - --> tests/ui/if_not_else.rs:34:5 + --> tests/ui/if_not_else.rs:33:5 | LL | / if !(foo() && bla()) { LL | | @@ -79,7 +79,7 @@ LL + } | error: unnecessary boolean `not` operation - --> tests/ui/if_not_else.rs:53:5 + --> tests/ui/if_not_else.rs:52:5 | LL | / if !foo() { LL | | @@ -102,7 +102,7 @@ LL + } | error: unnecessary boolean `not` operation - --> tests/ui/if_not_else.rs:61:5 + --> tests/ui/if_not_else.rs:60:5 | LL | / if !bla() { LL | | @@ -125,7 +125,7 @@ LL + } | error: unnecessary boolean `not` operation - --> tests/ui/if_not_else.rs:72:5 + --> tests/ui/if_not_else.rs:71:5 | LL | / if !foo() { LL | | diff --git a/src/tools/clippy/tests/ui/ignore_without_reason.rs b/src/tools/clippy/tests/ui/ignore_without_reason.rs new file mode 100644 index 0000000000000..53ac34c27248e --- /dev/null +++ b/src/tools/clippy/tests/ui/ignore_without_reason.rs @@ -0,0 +1,14 @@ +#![warn(clippy::ignore_without_reason)] + +fn main() {} + +#[test] +fn unignored_test() {} + +#[test] +#[ignore = "Some good reason"] +fn ignored_with_reason() {} + +#[test] +#[ignore] //~ ignore_without_reason +fn ignored_without_reason() {} diff --git a/src/tools/clippy/tests/ui/ignore_without_reason.stderr b/src/tools/clippy/tests/ui/ignore_without_reason.stderr new file mode 100644 index 0000000000000..4c0210c2bbc08 --- /dev/null +++ b/src/tools/clippy/tests/ui/ignore_without_reason.stderr @@ -0,0 +1,12 @@ +error: `#[ignore]` without reason + --> tests/ui/ignore_without_reason.rs:13:1 + | +LL | #[ignore] + | ^^^^^^^^^ + | + = help: add a reason with `= ".."` + = note: `-D clippy::ignore-without-reason` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::ignore_without_reason)]` + +error: aborting due to 1 previous error + diff --git a/src/tools/clippy/tests/ui/implicit_return.fixed b/src/tools/clippy/tests/ui/implicit_return.fixed index 1cb639b60a9af..728c6e015c155 100644 --- a/src/tools/clippy/tests/ui/implicit_return.fixed +++ b/src/tools/clippy/tests/ui/implicit_return.fixed @@ -165,3 +165,46 @@ with_span!( x } ); + +fn desugared_closure_14446() { + let _ = async || return 0; + //~^ implicit_return + #[rustfmt::skip] + let _ = async || -> i32 { return 0 }; + //~^ implicit_return + let _ = async |a: i32| return a; + //~^ implicit_return + #[rustfmt::skip] + let _ = async |a: i32| { return a }; + //~^ implicit_return + + let _ = async || return 0; + let _ = async || -> i32 { return 0 }; + let _ = async |a: i32| return a; + #[rustfmt::skip] + let _ = async |a: i32| { return a; }; + + let _ = async || return foo().await; + //~^ implicit_return + let _ = async || { + foo().await; + return foo().await + }; + //~^^ implicit_return + #[rustfmt::skip] + let _ = async || { return foo().await }; + //~^ implicit_return + let _ = async || -> bool { return foo().await }; + //~^ implicit_return + + let _ = async || return foo().await; + let _ = async || { + foo().await; + return foo().await; + }; + #[rustfmt::skip] + let _ = async || { return foo().await; }; + let _ = async || -> bool { + return foo().await; + }; +} diff --git a/src/tools/clippy/tests/ui/implicit_return.rs b/src/tools/clippy/tests/ui/implicit_return.rs index 99d75e4987e47..3381fffb6e450 100644 --- a/src/tools/clippy/tests/ui/implicit_return.rs +++ b/src/tools/clippy/tests/ui/implicit_return.rs @@ -165,3 +165,46 @@ with_span!( x } ); + +fn desugared_closure_14446() { + let _ = async || 0; + //~^ implicit_return + #[rustfmt::skip] + let _ = async || -> i32 { 0 }; + //~^ implicit_return + let _ = async |a: i32| a; + //~^ implicit_return + #[rustfmt::skip] + let _ = async |a: i32| { a }; + //~^ implicit_return + + let _ = async || return 0; + let _ = async || -> i32 { return 0 }; + let _ = async |a: i32| return a; + #[rustfmt::skip] + let _ = async |a: i32| { return a; }; + + let _ = async || foo().await; + //~^ implicit_return + let _ = async || { + foo().await; + foo().await + }; + //~^^ implicit_return + #[rustfmt::skip] + let _ = async || { foo().await }; + //~^ implicit_return + let _ = async || -> bool { foo().await }; + //~^ implicit_return + + let _ = async || return foo().await; + let _ = async || { + foo().await; + return foo().await; + }; + #[rustfmt::skip] + let _ = async || { return foo().await; }; + let _ = async || -> bool { + return foo().await; + }; +} diff --git a/src/tools/clippy/tests/ui/implicit_return.stderr b/src/tools/clippy/tests/ui/implicit_return.stderr index 02044df47ac3c..05cd7f62583b1 100644 --- a/src/tools/clippy/tests/ui/implicit_return.stderr +++ b/src/tools/clippy/tests/ui/implicit_return.stderr @@ -183,5 +183,93 @@ help: add `return` as shown LL | return true | ++++++ -error: aborting due to 16 previous errors +error: missing `return` statement + --> tests/ui/implicit_return.rs:170:22 + | +LL | let _ = async || 0; + | ^ + | +help: add `return` as shown + | +LL | let _ = async || return 0; + | ++++++ + +error: missing `return` statement + --> tests/ui/implicit_return.rs:173:31 + | +LL | let _ = async || -> i32 { 0 }; + | ^ + | +help: add `return` as shown + | +LL | let _ = async || -> i32 { return 0 }; + | ++++++ + +error: missing `return` statement + --> tests/ui/implicit_return.rs:175:28 + | +LL | let _ = async |a: i32| a; + | ^ + | +help: add `return` as shown + | +LL | let _ = async |a: i32| return a; + | ++++++ + +error: missing `return` statement + --> tests/ui/implicit_return.rs:178:30 + | +LL | let _ = async |a: i32| { a }; + | ^ + | +help: add `return` as shown + | +LL | let _ = async |a: i32| { return a }; + | ++++++ + +error: missing `return` statement + --> tests/ui/implicit_return.rs:187:22 + | +LL | let _ = async || foo().await; + | ^^^^^ + | +help: add `return` as shown + | +LL | let _ = async || return foo().await; + | ++++++ + +error: missing `return` statement + --> tests/ui/implicit_return.rs:191:9 + | +LL | foo().await + | ^^^^^ + | +help: add `return` as shown + | +LL | return foo().await + | ++++++ + +error: missing `return` statement + --> tests/ui/implicit_return.rs:195:24 + | +LL | let _ = async || { foo().await }; + | ^^^^^ + | +help: add `return` as shown + | +LL | let _ = async || { return foo().await }; + | ++++++ + +error: missing `return` statement + --> tests/ui/implicit_return.rs:197:32 + | +LL | let _ = async || -> bool { foo().await }; + | ^^^^^ + | +help: add `return` as shown + | +LL | let _ = async || -> bool { return foo().await }; + | ++++++ + +error: aborting due to 24 previous errors diff --git a/src/tools/clippy/tests/ui/items_after_test_module/root_module.fixed b/src/tools/clippy/tests/ui/items_after_test_module/root_module.fixed index f036b368a6676..c00d6440f1c6a 100644 --- a/src/tools/clippy/tests/ui/items_after_test_module/root_module.fixed +++ b/src/tools/clippy/tests/ui/items_after_test_module/root_module.fixed @@ -1,4 +1,3 @@ -#![allow(unused)] #![warn(clippy::items_after_test_module)] fn main() {} diff --git a/src/tools/clippy/tests/ui/items_after_test_module/root_module.rs b/src/tools/clippy/tests/ui/items_after_test_module/root_module.rs index de0cbb120330e..23d191e3b13b0 100644 --- a/src/tools/clippy/tests/ui/items_after_test_module/root_module.rs +++ b/src/tools/clippy/tests/ui/items_after_test_module/root_module.rs @@ -1,4 +1,3 @@ -#![allow(unused)] #![warn(clippy::items_after_test_module)] fn main() {} diff --git a/src/tools/clippy/tests/ui/items_after_test_module/root_module.stderr b/src/tools/clippy/tests/ui/items_after_test_module/root_module.stderr index bed8d4bd5a00c..952489ff5ef9a 100644 --- a/src/tools/clippy/tests/ui/items_after_test_module/root_module.stderr +++ b/src/tools/clippy/tests/ui/items_after_test_module/root_module.stderr @@ -1,5 +1,5 @@ error: items after a test module - --> tests/ui/items_after_test_module/root_module.rs:12:1 + --> tests/ui/items_after_test_module/root_module.rs:11:1 | LL | mod tests { | ^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.fixed b/src/tools/clippy/tests/ui/iter_cloned_collect.fixed index e9fb44e89598e..231fac7cdde7d 100644 --- a/src/tools/clippy/tests/ui/iter_cloned_collect.fixed +++ b/src/tools/clippy/tests/ui/iter_cloned_collect.fixed @@ -29,3 +29,30 @@ fn main() { let _: Vec = v.to_vec(); //~^ iter_cloned_collect } + +mod issue9119 { + + use std::iter; + + #[derive(Clone)] + struct Example(u16); + + impl iter::FromIterator for Vec { + fn from_iter(iter: T) -> Self + where + T: IntoIterator, + { + iter.into_iter().flat_map(|e| e.0.to_le_bytes()).collect() + } + } + + fn foo() { + let examples = [Example(1), Example(0x1234)]; + let encoded: Vec = examples.iter().cloned().collect(); + assert_eq!(encoded, vec![0x01, 0x00, 0x34, 0x12]); + + let a = [&&String::new()]; + let v: Vec<&&String> = a.to_vec(); + //~^ iter_cloned_collect + } +} diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.rs b/src/tools/clippy/tests/ui/iter_cloned_collect.rs index c9b8abcc9a0d0..e73b6ecae8021 100644 --- a/src/tools/clippy/tests/ui/iter_cloned_collect.rs +++ b/src/tools/clippy/tests/ui/iter_cloned_collect.rs @@ -33,3 +33,30 @@ fn main() { let _: Vec = v.iter().copied().collect(); //~^ iter_cloned_collect } + +mod issue9119 { + + use std::iter; + + #[derive(Clone)] + struct Example(u16); + + impl iter::FromIterator for Vec { + fn from_iter(iter: T) -> Self + where + T: IntoIterator, + { + iter.into_iter().flat_map(|e| e.0.to_le_bytes()).collect() + } + } + + fn foo() { + let examples = [Example(1), Example(0x1234)]; + let encoded: Vec = examples.iter().cloned().collect(); + assert_eq!(encoded, vec![0x01, 0x00, 0x34, 0x12]); + + let a = [&&String::new()]; + let v: Vec<&&String> = a.iter().cloned().collect(); + //~^ iter_cloned_collect + } +} diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.stderr b/src/tools/clippy/tests/ui/iter_cloned_collect.stderr index 119698cb46343..f8a507943270d 100644 --- a/src/tools/clippy/tests/ui/iter_cloned_collect.stderr +++ b/src/tools/clippy/tests/ui/iter_cloned_collect.stderr @@ -36,5 +36,11 @@ error: called `iter().copied().collect()` on a slice to create a `Vec`. Calling LL | let _: Vec = v.iter().copied().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()` -error: aborting due to 5 previous errors +error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable + --> tests/ui/iter_cloned_collect.rs:59:33 + | +LL | let v: Vec<&&String> = a.iter().cloned().collect(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()` + +error: aborting due to 6 previous errors diff --git a/src/tools/clippy/tests/ui/iter_kv_map.fixed b/src/tools/clippy/tests/ui/iter_kv_map.fixed index 7fcab6592e26c..874f749b33d02 100644 --- a/src/tools/clippy/tests/ui/iter_kv_map.fixed +++ b/src/tools/clippy/tests/ui/iter_kv_map.fixed @@ -166,3 +166,18 @@ fn msrv_1_54() { let _ = map.values().map(|v| v + 2).collect::>(); //~^ iter_kv_map } + +fn issue14595() { + pub struct Foo(BTreeMap); + + impl AsRef> for Foo { + fn as_ref(&self) -> &BTreeMap { + &self.0 + } + } + + let map = Foo(BTreeMap::default()); + + let _ = map.as_ref().values().copied().collect::>(); + //~^ iter_kv_map +} diff --git a/src/tools/clippy/tests/ui/iter_kv_map.rs b/src/tools/clippy/tests/ui/iter_kv_map.rs index b590aef7b8031..f570e3c32cb67 100644 --- a/src/tools/clippy/tests/ui/iter_kv_map.rs +++ b/src/tools/clippy/tests/ui/iter_kv_map.rs @@ -170,3 +170,18 @@ fn msrv_1_54() { let _ = map.iter().map(|(_, v)| v + 2).collect::>(); //~^ iter_kv_map } + +fn issue14595() { + pub struct Foo(BTreeMap); + + impl AsRef> for Foo { + fn as_ref(&self) -> &BTreeMap { + &self.0 + } + } + + let map = Foo(BTreeMap::default()); + + let _ = map.as_ref().iter().map(|(_, v)| v).copied().collect::>(); + //~^ iter_kv_map +} diff --git a/src/tools/clippy/tests/ui/iter_kv_map.stderr b/src/tools/clippy/tests/ui/iter_kv_map.stderr index 00d566ed14a28..31ee76c25b7a5 100644 --- a/src/tools/clippy/tests/ui/iter_kv_map.stderr +++ b/src/tools/clippy/tests/ui/iter_kv_map.stderr @@ -263,5 +263,11 @@ error: iterating on a map's values LL | let _ = map.iter().map(|(_, v)| v + 2).collect::>(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.values().map(|v| v + 2)` -error: aborting due to 38 previous errors +error: iterating on a map's values + --> tests/ui/iter_kv_map.rs:185:13 + | +LL | let _ = map.as_ref().iter().map(|(_, v)| v).copied().collect::>(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map.as_ref().values()` + +error: aborting due to 39 previous errors diff --git a/src/tools/clippy/tests/ui/iter_overeager_cloned.fixed b/src/tools/clippy/tests/ui/iter_overeager_cloned.fixed index 9999126902903..b0e548f179093 100644 --- a/src/tools/clippy/tests/ui/iter_overeager_cloned.fixed +++ b/src/tools/clippy/tests/ui/iter_overeager_cloned.fixed @@ -59,7 +59,7 @@ fn main() { iter: impl Iterator + 'a, target: String, ) -> impl Iterator + 'a { - iter.filter(move |&(&a, b)| a == 1 && b == &target).cloned() + iter.filter(move |&&(&a, ref b)| a == 1 && b == &target).cloned() //~^ iter_overeager_cloned } diff --git a/src/tools/clippy/tests/ui/iter_overeager_cloned.rs b/src/tools/clippy/tests/ui/iter_overeager_cloned.rs index 6a860dad5afd8..cedf62a6b4730 100644 --- a/src/tools/clippy/tests/ui/iter_overeager_cloned.rs +++ b/src/tools/clippy/tests/ui/iter_overeager_cloned.rs @@ -60,7 +60,7 @@ fn main() { iter: impl Iterator + 'a, target: String, ) -> impl Iterator + 'a { - iter.cloned().filter(move |(&a, b)| a == 1 && b == &target) + iter.cloned().filter(move |&(&a, ref b)| a == 1 && b == &target) //~^ iter_overeager_cloned } diff --git a/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr b/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr index f3239b59582e3..1616dec95b792 100644 --- a/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr +++ b/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr @@ -120,10 +120,10 @@ LL | let _ = vec.iter().cloned().find(f); error: unnecessarily eager cloning of iterator items --> tests/ui/iter_overeager_cloned.rs:63:9 | -LL | iter.cloned().filter(move |(&a, b)| a == 1 && b == &target) - | ^^^^------------------------------------------------------- +LL | iter.cloned().filter(move |&(&a, ref b)| a == 1 && b == &target) + | ^^^^------------------------------------------------------------ | | - | help: try: `.filter(move |&(&a, b)| a == 1 && b == &target).cloned()` + | help: try: `.filter(move |&&(&a, ref b)| a == 1 && b == &target).cloned()` error: unnecessarily eager cloning of iterator items --> tests/ui/iter_overeager_cloned.rs:75:13 diff --git a/src/tools/clippy/tests/ui/large_futures.fixed b/src/tools/clippy/tests/ui/large_futures.fixed index c2159c58de1ec..4c7215f0abeb0 100644 --- a/src/tools/clippy/tests/ui/large_futures.fixed +++ b/src/tools/clippy/tests/ui/large_futures.fixed @@ -1,7 +1,10 @@ +#![allow( + clippy::future_not_send, + clippy::manual_async_fn, + clippy::never_loop, + clippy::uninlined_format_args +)] #![warn(clippy::large_futures)] -#![allow(clippy::never_loop)] -#![allow(clippy::future_not_send)] -#![allow(clippy::manual_async_fn)] async fn big_fut(_arg: [u8; 1024 * 16]) {} diff --git a/src/tools/clippy/tests/ui/large_futures.rs b/src/tools/clippy/tests/ui/large_futures.rs index 567f6344afeac..2b5860583f5ec 100644 --- a/src/tools/clippy/tests/ui/large_futures.rs +++ b/src/tools/clippy/tests/ui/large_futures.rs @@ -1,7 +1,10 @@ +#![allow( + clippy::future_not_send, + clippy::manual_async_fn, + clippy::never_loop, + clippy::uninlined_format_args +)] #![warn(clippy::large_futures)] -#![allow(clippy::never_loop)] -#![allow(clippy::future_not_send)] -#![allow(clippy::manual_async_fn)] async fn big_fut(_arg: [u8; 1024 * 16]) {} diff --git a/src/tools/clippy/tests/ui/large_futures.stderr b/src/tools/clippy/tests/ui/large_futures.stderr index fd6ba4e3563de..4280c9e2af284 100644 --- a/src/tools/clippy/tests/ui/large_futures.stderr +++ b/src/tools/clippy/tests/ui/large_futures.stderr @@ -1,5 +1,5 @@ error: large future with a size of 16385 bytes - --> tests/ui/large_futures.rs:10:9 + --> tests/ui/large_futures.rs:13:9 | LL | big_fut([0u8; 1024 * 16]).await; | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider `Box::pin` on it: `Box::pin(big_fut([0u8; 1024 * 16]))` @@ -8,37 +8,37 @@ LL | big_fut([0u8; 1024 * 16]).await; = help: to override `-D warnings` add `#[allow(clippy::large_futures)]` error: large future with a size of 16386 bytes - --> tests/ui/large_futures.rs:13:5 + --> tests/ui/large_futures.rs:16:5 | LL | f.await | ^ help: consider `Box::pin` on it: `Box::pin(f)` error: large future with a size of 16387 bytes - --> tests/ui/large_futures.rs:18:9 + --> tests/ui/large_futures.rs:21:9 | LL | wait().await; | ^^^^^^ help: consider `Box::pin` on it: `Box::pin(wait())` error: large future with a size of 16387 bytes - --> tests/ui/large_futures.rs:24:13 + --> tests/ui/large_futures.rs:27:13 | LL | wait().await; | ^^^^^^ help: consider `Box::pin` on it: `Box::pin(wait())` error: large future with a size of 65540 bytes - --> tests/ui/large_futures.rs:32:5 + --> tests/ui/large_futures.rs:35:5 | LL | foo().await; | ^^^^^ help: consider `Box::pin` on it: `Box::pin(foo())` error: large future with a size of 49159 bytes - --> tests/ui/large_futures.rs:35:5 + --> tests/ui/large_futures.rs:38:5 | LL | calls_fut(fut).await; | ^^^^^^^^^^^^^^ help: consider `Box::pin` on it: `Box::pin(calls_fut(fut))` error: large future with a size of 65540 bytes - --> tests/ui/large_futures.rs:48:5 + --> tests/ui/large_futures.rs:51:5 | LL | / async { LL | | @@ -61,7 +61,7 @@ LL + }) | error: large future with a size of 65540 bytes - --> tests/ui/large_futures.rs:61:13 + --> tests/ui/large_futures.rs:64:13 | LL | / async { LL | | diff --git a/src/tools/clippy/tests/ui/len_without_is_empty_expect.rs b/src/tools/clippy/tests/ui/len_without_is_empty_expect.rs new file mode 100644 index 0000000000000..9d1245e2d02ad --- /dev/null +++ b/src/tools/clippy/tests/ui/len_without_is_empty_expect.rs @@ -0,0 +1,28 @@ +//@no-rustfix +#![allow(clippy::len_without_is_empty)] + +// Check that the lint expectation is fulfilled even if the lint is allowed at the type level. +pub struct Empty; + +impl Empty { + #[expect(clippy::len_without_is_empty)] + pub fn len(&self) -> usize { + 0 + } +} + +// Check that the lint expectation is not triggered if it should not +pub struct Empty2; + +impl Empty2 { + #[expect(clippy::len_without_is_empty)] //~ ERROR: this lint expectation is unfulfilled + pub fn len(&self) -> usize { + 0 + } + + pub fn is_empty(&self) -> bool { + false + } +} + +fn main() {} diff --git a/src/tools/clippy/tests/ui/len_without_is_empty_expect.stderr b/src/tools/clippy/tests/ui/len_without_is_empty_expect.stderr new file mode 100644 index 0000000000000..e96870f054e43 --- /dev/null +++ b/src/tools/clippy/tests/ui/len_without_is_empty_expect.stderr @@ -0,0 +1,11 @@ +error: this lint expectation is unfulfilled + --> tests/ui/len_without_is_empty_expect.rs:18:14 + | +LL | #[expect(clippy::len_without_is_empty)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `-D unfulfilled-lint-expectations` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(unfulfilled_lint_expectations)]` + +error: aborting due to 1 previous error + diff --git a/src/tools/clippy/tests/ui/manual_abs_diff.fixed b/src/tools/clippy/tests/ui/manual_abs_diff.fixed new file mode 100644 index 0000000000000..f1b1278ea6d22 --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_abs_diff.fixed @@ -0,0 +1,106 @@ +#![warn(clippy::manual_abs_diff)] + +use std::time::Duration; + +fn main() { + let a: usize = 5; + let b: usize = 3; + let c: usize = 8; + let d: usize = 11; + + let _ = a.abs_diff(b); + //~^ manual_abs_diff + let _ = b.abs_diff(a); + //~^ manual_abs_diff + + let _ = b.abs_diff(5); + //~^ manual_abs_diff + let _ = b.abs_diff(5); + //~^ manual_abs_diff + + let _ = a.abs_diff(b); + //~^ manual_abs_diff + let _ = b.abs_diff(a); + //~^ manual_abs_diff + + #[allow(arithmetic_overflow)] + { + let _ = if a > b { b - a } else { a - b }; + let _ = if a < b { a - b } else { b - a }; + } + + let _ = (a + b).abs_diff(c + d); + let _ = (c + d).abs_diff(a + b); + + const A: usize = 5; + const B: usize = 3; + // check const context + const _: usize = A.abs_diff(B); + //~^ manual_abs_diff + + let a = Duration::from_secs(3); + let b = Duration::from_secs(5); + let _ = a.abs_diff(b); + //~^ manual_abs_diff + + let a: i32 = 3; + let b: i32 = -5; + let _ = if a > b { a - b } else { b - a }; + let _ = a.abs_diff(b); + //~^ manual_abs_diff +} + +// FIXME: bunch of patterns that should be linted +fn fixme() { + let a: usize = 5; + let b: usize = 3; + let c: usize = 8; + let d: usize = 11; + + { + let out; + if a > b { + out = a - b; + } else { + out = b - a; + } + } + + { + let mut out = 0; + if a > b { + out = a - b; + } else if a < b { + out = b - a; + } + } + + #[allow(clippy::implicit_saturating_sub)] + let _ = if a > b { + a - b + } else if a < b { + b - a + } else { + 0 + }; + + let a: i32 = 3; + let b: i32 = 5; + let _: u32 = if a > b { a - b } else { b - a } as u32; +} + +fn non_primitive_ty() { + #[derive(Eq, PartialEq, PartialOrd)] + struct S(i32); + + impl std::ops::Sub for S { + type Output = S; + + fn sub(self, rhs: Self) -> Self::Output { + Self(self.0 - rhs.0) + } + } + + let (a, b) = (S(10), S(20)); + let _ = if a < b { b - a } else { a - b }; +} diff --git a/src/tools/clippy/tests/ui/manual_abs_diff.rs b/src/tools/clippy/tests/ui/manual_abs_diff.rs new file mode 100644 index 0000000000000..60ef819c12d30 --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_abs_diff.rs @@ -0,0 +1,116 @@ +#![warn(clippy::manual_abs_diff)] + +use std::time::Duration; + +fn main() { + let a: usize = 5; + let b: usize = 3; + let c: usize = 8; + let d: usize = 11; + + let _ = if a > b { a - b } else { b - a }; + //~^ manual_abs_diff + let _ = if a < b { b - a } else { a - b }; + //~^ manual_abs_diff + + let _ = if 5 > b { 5 - b } else { b - 5 }; + //~^ manual_abs_diff + let _ = if b > 5 { b - 5 } else { 5 - b }; + //~^ manual_abs_diff + + let _ = if a >= b { a - b } else { b - a }; + //~^ manual_abs_diff + let _ = if a <= b { b - a } else { a - b }; + //~^ manual_abs_diff + + #[allow(arithmetic_overflow)] + { + let _ = if a > b { b - a } else { a - b }; + let _ = if a < b { a - b } else { b - a }; + } + + let _ = if (a + b) > (c + d) { + //~^ manual_abs_diff + (a + b) - (c + d) + } else { + (c + d) - (a + b) + }; + let _ = if (a + b) < (c + d) { + //~^ manual_abs_diff + (c + d) - (a + b) + } else { + (a + b) - (c + d) + }; + + const A: usize = 5; + const B: usize = 3; + // check const context + const _: usize = if A > B { A - B } else { B - A }; + //~^ manual_abs_diff + + let a = Duration::from_secs(3); + let b = Duration::from_secs(5); + let _ = if a > b { a - b } else { b - a }; + //~^ manual_abs_diff + + let a: i32 = 3; + let b: i32 = -5; + let _ = if a > b { a - b } else { b - a }; + let _ = if a > b { (a - b) as u32 } else { (b - a) as u32 }; + //~^ manual_abs_diff +} + +// FIXME: bunch of patterns that should be linted +fn fixme() { + let a: usize = 5; + let b: usize = 3; + let c: usize = 8; + let d: usize = 11; + + { + let out; + if a > b { + out = a - b; + } else { + out = b - a; + } + } + + { + let mut out = 0; + if a > b { + out = a - b; + } else if a < b { + out = b - a; + } + } + + #[allow(clippy::implicit_saturating_sub)] + let _ = if a > b { + a - b + } else if a < b { + b - a + } else { + 0 + }; + + let a: i32 = 3; + let b: i32 = 5; + let _: u32 = if a > b { a - b } else { b - a } as u32; +} + +fn non_primitive_ty() { + #[derive(Eq, PartialEq, PartialOrd)] + struct S(i32); + + impl std::ops::Sub for S { + type Output = S; + + fn sub(self, rhs: Self) -> Self::Output { + Self(self.0 - rhs.0) + } + } + + let (a, b) = (S(10), S(20)); + let _ = if a < b { b - a } else { a - b }; +} diff --git a/src/tools/clippy/tests/ui/manual_abs_diff.stderr b/src/tools/clippy/tests/ui/manual_abs_diff.stderr new file mode 100644 index 0000000000000..c14c1dc830fbd --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_abs_diff.stderr @@ -0,0 +1,83 @@ +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:11:13 + | +LL | let _ = if a > b { a - b } else { b - a }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `a.abs_diff(b)` + | + = note: `-D clippy::manual-abs-diff` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::manual_abs_diff)]` + +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:13:13 + | +LL | let _ = if a < b { b - a } else { a - b }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `b.abs_diff(a)` + +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:16:13 + | +LL | let _ = if 5 > b { 5 - b } else { b - 5 }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `b.abs_diff(5)` + +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:18:13 + | +LL | let _ = if b > 5 { b - 5 } else { 5 - b }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `b.abs_diff(5)` + +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:21:13 + | +LL | let _ = if a >= b { a - b } else { b - a }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `a.abs_diff(b)` + +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:23:13 + | +LL | let _ = if a <= b { b - a } else { a - b }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `b.abs_diff(a)` + +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:32:13 + | +LL | let _ = if (a + b) > (c + d) { + | _____________^ +LL | | +LL | | (a + b) - (c + d) +LL | | } else { +LL | | (c + d) - (a + b) +LL | | }; + | |_____^ help: replace with `abs_diff`: `(a + b).abs_diff(c + d)` + +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:38:13 + | +LL | let _ = if (a + b) < (c + d) { + | _____________^ +LL | | +LL | | (c + d) - (a + b) +LL | | } else { +LL | | (a + b) - (c + d) +LL | | }; + | |_____^ help: replace with `abs_diff`: `(c + d).abs_diff(a + b)` + +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:48:22 + | +LL | const _: usize = if A > B { A - B } else { B - A }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `A.abs_diff(B)` + +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:53:13 + | +LL | let _ = if a > b { a - b } else { b - a }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `a.abs_diff(b)` + +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:59:13 + | +LL | let _ = if a > b { (a - b) as u32 } else { (b - a) as u32 }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `a.abs_diff(b)` + +error: aborting due to 11 previous errors + diff --git a/src/tools/clippy/tests/ui/manual_async_fn.fixed b/src/tools/clippy/tests/ui/manual_async_fn.fixed index ad0266d39e982..a284ca9f62530 100644 --- a/src/tools/clippy/tests/ui/manual_async_fn.fixed +++ b/src/tools/clippy/tests/ui/manual_async_fn.fixed @@ -75,7 +75,7 @@ impl S { async fn elided(_: &i32) -> i32 { 42 } // should be ignored -fn elided_not_bound(_: &i32) -> impl Future { +fn elided_not_bound(_: &i32) -> impl Future + use<> { async { 42 } } @@ -84,7 +84,7 @@ async fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> i32 { 42 } // should be ignored #[allow(clippy::needless_lifetimes)] -fn explicit_not_bound<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future { +fn explicit_not_bound<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future + use<> { async { 42 } } @@ -94,7 +94,7 @@ mod issue_5765 { struct A; impl A { - fn f(&self) -> impl Future { + fn f(&self) -> impl Future + use<> { async {} } } diff --git a/src/tools/clippy/tests/ui/manual_async_fn.rs b/src/tools/clippy/tests/ui/manual_async_fn.rs index fe367b4bc7b9b..188f8a4982c36 100644 --- a/src/tools/clippy/tests/ui/manual_async_fn.rs +++ b/src/tools/clippy/tests/ui/manual_async_fn.rs @@ -102,7 +102,7 @@ fn elided(_: &i32) -> impl Future + '_ { } // should be ignored -fn elided_not_bound(_: &i32) -> impl Future { +fn elided_not_bound(_: &i32) -> impl Future + use<> { async { 42 } } @@ -114,7 +114,7 @@ fn explicit<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future + 'a + // should be ignored #[allow(clippy::needless_lifetimes)] -fn explicit_not_bound<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future { +fn explicit_not_bound<'a, 'b>(_: &'a i32, _: &'b i32) -> impl Future + use<> { async { 42 } } @@ -124,7 +124,7 @@ mod issue_5765 { struct A; impl A { - fn f(&self) -> impl Future { + fn f(&self) -> impl Future + use<> { async {} } } diff --git a/src/tools/clippy/tests/ui/manual_dangling_ptr.fixed b/src/tools/clippy/tests/ui/manual_dangling_ptr.fixed new file mode 100644 index 0000000000000..b6afe7898906c --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_dangling_ptr.fixed @@ -0,0 +1,44 @@ +#![warn(clippy::manual_dangling_ptr)] +use std::mem; + +pub fn foo(_const: *const f32, _mut: *mut i32) {} + +fn main() { + let _: *const u8 = std::ptr::dangling(); + //~^ manual_dangling_ptr + let _ = std::ptr::dangling::(); + //~^ manual_dangling_ptr + let _ = std::ptr::dangling_mut::(); + //~^ manual_dangling_ptr + + let _ = std::ptr::dangling::(); + //~^ manual_dangling_ptr + let _ = std::ptr::dangling::(); + //~^ manual_dangling_ptr + let _ = std::ptr::dangling::(); + //~^ manual_dangling_ptr + + foo(std::ptr::dangling(), std::ptr::dangling_mut()); + //~^ manual_dangling_ptr + //~| manual_dangling_ptr +} + +fn should_not_lint() { + let _ = 0x10 as *mut i32; + let _ = mem::align_of::() as *const u8; + + foo(0 as _, 0 as _); +} + +#[clippy::msrv = "1.83"] +fn _msrv_1_83() { + // `{core, std}::ptr::dangling` was stabilized in 1.84. Do not lint this + foo(4 as *const _, 4 as *mut _); +} + +#[clippy::msrv = "1.84"] +fn _msrv_1_84() { + foo(std::ptr::dangling(), std::ptr::dangling_mut()); + //~^ manual_dangling_ptr + //~| manual_dangling_ptr +} diff --git a/src/tools/clippy/tests/ui/manual_dangling_ptr.rs b/src/tools/clippy/tests/ui/manual_dangling_ptr.rs new file mode 100644 index 0000000000000..581ad50113e28 --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_dangling_ptr.rs @@ -0,0 +1,44 @@ +#![warn(clippy::manual_dangling_ptr)] +use std::mem; + +pub fn foo(_const: *const f32, _mut: *mut i32) {} + +fn main() { + let _: *const u8 = 1 as *const _; + //~^ manual_dangling_ptr + let _ = 2 as *const u32; + //~^ manual_dangling_ptr + let _ = 4 as *mut f32; + //~^ manual_dangling_ptr + + let _ = mem::align_of::() as *const u8; + //~^ manual_dangling_ptr + let _ = mem::align_of::() as *const u32; + //~^ manual_dangling_ptr + let _ = mem::align_of::() as *const usize; + //~^ manual_dangling_ptr + + foo(4 as *const _, 4 as *mut _); + //~^ manual_dangling_ptr + //~| manual_dangling_ptr +} + +fn should_not_lint() { + let _ = 0x10 as *mut i32; + let _ = mem::align_of::() as *const u8; + + foo(0 as _, 0 as _); +} + +#[clippy::msrv = "1.83"] +fn _msrv_1_83() { + // `{core, std}::ptr::dangling` was stabilized in 1.84. Do not lint this + foo(4 as *const _, 4 as *mut _); +} + +#[clippy::msrv = "1.84"] +fn _msrv_1_84() { + foo(4 as *const _, 4 as *mut _); + //~^ manual_dangling_ptr + //~| manual_dangling_ptr +} diff --git a/src/tools/clippy/tests/ui/manual_dangling_ptr.stderr b/src/tools/clippy/tests/ui/manual_dangling_ptr.stderr new file mode 100644 index 0000000000000..e3bc9b16b0d93 --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_dangling_ptr.stderr @@ -0,0 +1,65 @@ +error: manual creation of a dangling pointer + --> tests/ui/manual_dangling_ptr.rs:7:24 + | +LL | let _: *const u8 = 1 as *const _; + | ^^^^^^^^^^^^^ help: use: `std::ptr::dangling()` + | + = note: `-D clippy::manual-dangling-ptr` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::manual_dangling_ptr)]` + +error: manual creation of a dangling pointer + --> tests/ui/manual_dangling_ptr.rs:9:13 + | +LL | let _ = 2 as *const u32; + | ^^^^^^^^^^^^^^^ help: use: `std::ptr::dangling::()` + +error: manual creation of a dangling pointer + --> tests/ui/manual_dangling_ptr.rs:11:13 + | +LL | let _ = 4 as *mut f32; + | ^^^^^^^^^^^^^ help: use: `std::ptr::dangling_mut::()` + +error: manual creation of a dangling pointer + --> tests/ui/manual_dangling_ptr.rs:14:13 + | +LL | let _ = mem::align_of::() as *const u8; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `std::ptr::dangling::()` + +error: manual creation of a dangling pointer + --> tests/ui/manual_dangling_ptr.rs:16:13 + | +LL | let _ = mem::align_of::() as *const u32; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `std::ptr::dangling::()` + +error: manual creation of a dangling pointer + --> tests/ui/manual_dangling_ptr.rs:18:13 + | +LL | let _ = mem::align_of::() as *const usize; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `std::ptr::dangling::()` + +error: manual creation of a dangling pointer + --> tests/ui/manual_dangling_ptr.rs:21:9 + | +LL | foo(4 as *const _, 4 as *mut _); + | ^^^^^^^^^^^^^ help: use: `std::ptr::dangling()` + +error: manual creation of a dangling pointer + --> tests/ui/manual_dangling_ptr.rs:21:24 + | +LL | foo(4 as *const _, 4 as *mut _); + | ^^^^^^^^^^^ help: use: `std::ptr::dangling_mut()` + +error: manual creation of a dangling pointer + --> tests/ui/manual_dangling_ptr.rs:41:9 + | +LL | foo(4 as *const _, 4 as *mut _); + | ^^^^^^^^^^^^^ help: use: `std::ptr::dangling()` + +error: manual creation of a dangling pointer + --> tests/ui/manual_dangling_ptr.rs:41:24 + | +LL | foo(4 as *const _, 4 as *mut _); + | ^^^^^^^^^^^ help: use: `std::ptr::dangling_mut()` + +error: aborting due to 10 previous errors + diff --git a/src/tools/clippy/tests/ui/manual_div_ceil.fixed b/src/tools/clippy/tests/ui/manual_div_ceil.fixed index 57fe8917afe88..58ee6978fc125 100644 --- a/src/tools/clippy/tests/ui/manual_div_ceil.fixed +++ b/src/tools/clippy/tests/ui/manual_div_ceil.fixed @@ -1,5 +1,21 @@ #![warn(clippy::manual_div_ceil)] +macro_rules! y { + () => { + let x = 33u32; + let _ = x.div_ceil(8); + //~^ manual_div_ceil + let _ = x.div_ceil(8); + //~^ manual_div_ceil + }; +} + +macro_rules! eight { + () => { + 8 + }; +} + fn main() { let x = 7_u32; let y = 4_u32; @@ -32,6 +48,13 @@ fn main() { let _ = (z as i32 + (y_i - 1)) / y_i; let _ = (7_u32 as i32 + (y_i - 1)) / y_i; let _ = (7_u32 as i32 + (4 - 1)) / 4; + + // Test lint with macro + y!(); + + // Also test if RHS should be result of macro expansion + let _ = 33u32.div_ceil(eight!()); + //~^ manual_div_ceil } fn issue_13843() { diff --git a/src/tools/clippy/tests/ui/manual_div_ceil.rs b/src/tools/clippy/tests/ui/manual_div_ceil.rs index ec343513e5ce3..aa0d81b22a0e2 100644 --- a/src/tools/clippy/tests/ui/manual_div_ceil.rs +++ b/src/tools/clippy/tests/ui/manual_div_ceil.rs @@ -1,5 +1,21 @@ #![warn(clippy::manual_div_ceil)] +macro_rules! y { + () => { + let x = 33u32; + let _ = (x + 7) / 8; + //~^ manual_div_ceil + let _ = (7 + x) / 8; + //~^ manual_div_ceil + }; +} + +macro_rules! eight { + () => { + 8 + }; +} + fn main() { let x = 7_u32; let y = 4_u32; @@ -32,6 +48,13 @@ fn main() { let _ = (z as i32 + (y_i - 1)) / y_i; let _ = (7_u32 as i32 + (y_i - 1)) / y_i; let _ = (7_u32 as i32 + (4 - 1)) / 4; + + // Test lint with macro + y!(); + + // Also test if RHS should be result of macro expansion + let _ = (33u32 + 7) / eight!(); + //~^ manual_div_ceil } fn issue_13843() { diff --git a/src/tools/clippy/tests/ui/manual_div_ceil.stderr b/src/tools/clippy/tests/ui/manual_div_ceil.stderr index 8e14ab274269a..9be5a19bf391f 100644 --- a/src/tools/clippy/tests/ui/manual_div_ceil.stderr +++ b/src/tools/clippy/tests/ui/manual_div_ceil.stderr @@ -1,5 +1,5 @@ error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:9:13 + --> tests/ui/manual_div_ceil.rs:25:13 | LL | let _ = (x + (y - 1)) / y; | ^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `x.div_ceil(y)` @@ -8,94 +8,122 @@ LL | let _ = (x + (y - 1)) / y; = help: to override `-D warnings` add `#[allow(clippy::manual_div_ceil)]` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:11:13 + --> tests/ui/manual_div_ceil.rs:27:13 | LL | let _ = ((y - 1) + x) / y; | ^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `x.div_ceil(y)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:13:13 + --> tests/ui/manual_div_ceil.rs:29:13 | LL | let _ = (x + y - 1) / y; | ^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `x.div_ceil(y)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:16:13 + --> tests/ui/manual_div_ceil.rs:32:13 | LL | let _ = (7_u32 + (4 - 1)) / 4; | ^^^^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `7_u32.div_ceil(4)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:18:13 + --> tests/ui/manual_div_ceil.rs:34:13 | LL | let _ = (7_i32 as u32 + (4 - 1)) / 4; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `(7_i32 as u32).div_ceil(4)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:39:13 + --> tests/ui/manual_div_ceil.rs:6:17 + | +LL | let _ = (x + 7) / 8; + | ^^^^^^^^^^^ help: consider using `.div_ceil()`: `x.div_ceil(8)` +... +LL | y!(); + | ---- in this macro invocation + | + = note: this error originates in the macro `y` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: manually reimplementing `div_ceil` + --> tests/ui/manual_div_ceil.rs:8:17 + | +LL | let _ = (7 + x) / 8; + | ^^^^^^^^^^^ help: consider using `.div_ceil()`: `x.div_ceil(8)` +... +LL | y!(); + | ---- in this macro invocation + | + = note: this error originates in the macro `y` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: manually reimplementing `div_ceil` + --> tests/ui/manual_div_ceil.rs:56:13 + | +LL | let _ = (33u32 + 7) / eight!(); + | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `33u32.div_ceil(eight!())` + +error: manually reimplementing `div_ceil` + --> tests/ui/manual_div_ceil.rs:62:13 | LL | let _ = (2048 + x - 1) / x; | ^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `2048_usize.div_ceil(x)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:43:13 + --> tests/ui/manual_div_ceil.rs:66:13 | LL | let _ = (2048usize + x - 1) / x; | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `2048usize.div_ceil(x)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:47:13 + --> tests/ui/manual_div_ceil.rs:70:13 | LL | let _ = (2048_usize + x - 1) / x; | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `2048_usize.div_ceil(x)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:51:13 + --> tests/ui/manual_div_ceil.rs:74:13 | LL | let _ = (x + 4 - 1) / 4; | ^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `x.div_ceil(4)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:54:18 + --> tests/ui/manual_div_ceil.rs:77:18 | LL | let _: u32 = (2048 + 6 - 1) / 6; | ^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `2048_u32.div_ceil(6)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:56:20 + --> tests/ui/manual_div_ceil.rs:79:20 | LL | let _: usize = (2048 + 6 - 1) / 6; | ^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `2048_usize.div_ceil(6)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:58:18 + --> tests/ui/manual_div_ceil.rs:81:18 | LL | let _: u32 = (0x2048 + 0x6 - 1) / 0x6; | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `0x2048_u32.div_ceil(0x6)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:61:13 + --> tests/ui/manual_div_ceil.rs:84:13 | LL | let _ = (2048 + 6u32 - 1) / 6u32; | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `2048_u32.div_ceil(6u32)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:64:13 + --> tests/ui/manual_div_ceil.rs:87:13 | LL | let _ = (1_000_000 + 6u32 - 1) / 6u32; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.div_ceil()`: `1_000_000_u32.div_ceil(6u32)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:70:13 + --> tests/ui/manual_div_ceil.rs:93:13 | LL | let _ = (x + 7) / 8; | ^^^^^^^^^^^ help: consider using `.div_ceil()`: `x.div_ceil(8)` error: manually reimplementing `div_ceil` - --> tests/ui/manual_div_ceil.rs:72:13 + --> tests/ui/manual_div_ceil.rs:95:13 | LL | let _ = (7 + x) / 8; | ^^^^^^^^^^^ help: consider using `.div_ceil()`: `x.div_ceil(8)` -error: aborting due to 16 previous errors +error: aborting due to 19 previous errors diff --git a/src/tools/clippy/tests/ui/manual_find.rs b/src/tools/clippy/tests/ui/manual_find.rs index 20b557f21d141..7b9846cfe429d 100644 --- a/src/tools/clippy/tests/ui/manual_find.rs +++ b/src/tools/clippy/tests/ui/manual_find.rs @@ -23,4 +23,32 @@ fn tuple(arr: Vec<(String, i32)>) -> Option { None } +mod issue9521 { + fn condition(x: u32, y: u32) -> Result { + todo!() + } + + fn find_with_early_return(v: Vec) -> Option { + for x in v { + if condition(x, 10).ok()? { + return Some(x); + } + } + None + } + + fn find_with_early_break(v: Vec) -> Option { + for x in v { + if if x < 3 { + break; + } else { + x < 10 + } { + return Some(x); + } + } + None + } +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/manual_ignore_case_cmp.fixed b/src/tools/clippy/tests/ui/manual_ignore_case_cmp.fixed index c1c929585cfd3..cd7adc20b127e 100644 --- a/src/tools/clippy/tests/ui/manual_ignore_case_cmp.fixed +++ b/src/tools/clippy/tests/ui/manual_ignore_case_cmp.fixed @@ -1,5 +1,11 @@ -#![allow(clippy::all)] -#![deny(clippy::manual_ignore_case_cmp)] +#![warn(clippy::manual_ignore_case_cmp)] +#![allow( + clippy::deref_addrof, + clippy::op_ref, + clippy::ptr_arg, + clippy::short_circuit_statement, + clippy::unnecessary_operation +)] use std::ffi::{OsStr, OsString}; diff --git a/src/tools/clippy/tests/ui/manual_ignore_case_cmp.rs b/src/tools/clippy/tests/ui/manual_ignore_case_cmp.rs index ca401e595fe97..85f6719827c93 100644 --- a/src/tools/clippy/tests/ui/manual_ignore_case_cmp.rs +++ b/src/tools/clippy/tests/ui/manual_ignore_case_cmp.rs @@ -1,5 +1,11 @@ -#![allow(clippy::all)] -#![deny(clippy::manual_ignore_case_cmp)] +#![warn(clippy::manual_ignore_case_cmp)] +#![allow( + clippy::deref_addrof, + clippy::op_ref, + clippy::ptr_arg, + clippy::short_circuit_statement, + clippy::unnecessary_operation +)] use std::ffi::{OsStr, OsString}; diff --git a/src/tools/clippy/tests/ui/manual_ignore_case_cmp.stderr b/src/tools/clippy/tests/ui/manual_ignore_case_cmp.stderr index 47378a65799fc..fa7fadd910760 100644 --- a/src/tools/clippy/tests/ui/manual_ignore_case_cmp.stderr +++ b/src/tools/clippy/tests/ui/manual_ignore_case_cmp.stderr @@ -1,14 +1,11 @@ error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:9:8 + --> tests/ui/manual_ignore_case_cmp.rs:15:8 | LL | if a.to_ascii_lowercase() == b.to_ascii_lowercase() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -note: the lint level is defined here - --> tests/ui/manual_ignore_case_cmp.rs:2:9 - | -LL | #![deny(clippy::manual_ignore_case_cmp)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: `-D clippy::manual-ignore-case-cmp` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::manual_ignore_case_cmp)]` help: consider using `.eq_ignore_ascii_case()` instead | LL - if a.to_ascii_lowercase() == b.to_ascii_lowercase() { @@ -16,7 +13,7 @@ LL + if a.eq_ignore_ascii_case(b) { | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:13:8 + --> tests/ui/manual_ignore_case_cmp.rs:19:8 | LL | if a.to_ascii_uppercase() == b.to_ascii_uppercase() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -28,7 +25,7 @@ LL + if a.eq_ignore_ascii_case(b) { | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:17:13 + --> tests/ui/manual_ignore_case_cmp.rs:23:13 | LL | let r = a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -40,7 +37,7 @@ LL + let r = a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:19:18 + --> tests/ui/manual_ignore_case_cmp.rs:25:18 | LL | let r = r || a.to_ascii_uppercase() == b.to_ascii_uppercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -52,7 +49,7 @@ LL + let r = r || a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:21:10 + --> tests/ui/manual_ignore_case_cmp.rs:27:10 | LL | r && a.to_ascii_lowercase() == b.to_uppercase().to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -64,7 +61,7 @@ LL + r && a.eq_ignore_ascii_case(&b.to_uppercase()); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:24:8 + --> tests/ui/manual_ignore_case_cmp.rs:30:8 | LL | if a.to_ascii_lowercase() != b.to_ascii_lowercase() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -76,7 +73,7 @@ LL + if !a.eq_ignore_ascii_case(b) { | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:28:8 + --> tests/ui/manual_ignore_case_cmp.rs:34:8 | LL | if a.to_ascii_uppercase() != b.to_ascii_uppercase() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -88,7 +85,7 @@ LL + if !a.eq_ignore_ascii_case(b) { | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:32:13 + --> tests/ui/manual_ignore_case_cmp.rs:38:13 | LL | let r = a.to_ascii_lowercase() != b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -100,7 +97,7 @@ LL + let r = !a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:34:18 + --> tests/ui/manual_ignore_case_cmp.rs:40:18 | LL | let r = r || a.to_ascii_uppercase() != b.to_ascii_uppercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -112,7 +109,7 @@ LL + let r = r || !a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:36:10 + --> tests/ui/manual_ignore_case_cmp.rs:42:10 | LL | r && a.to_ascii_lowercase() != b.to_uppercase().to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -124,7 +121,7 @@ LL + r && !a.eq_ignore_ascii_case(&b.to_uppercase()); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:48:5 + --> tests/ui/manual_ignore_case_cmp.rs:54:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -136,7 +133,7 @@ LL + a.eq_ignore_ascii_case(&b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:52:5 + --> tests/ui/manual_ignore_case_cmp.rs:58:5 | LL | a.to_ascii_lowercase() == 'a'; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -148,7 +145,7 @@ LL + a.eq_ignore_ascii_case(&'a'); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:54:5 + --> tests/ui/manual_ignore_case_cmp.rs:60:5 | LL | 'a' == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -160,7 +157,7 @@ LL + 'a'.eq_ignore_ascii_case(&b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:58:5 + --> tests/ui/manual_ignore_case_cmp.rs:64:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -172,7 +169,7 @@ LL + a.eq_ignore_ascii_case(&b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:60:5 + --> tests/ui/manual_ignore_case_cmp.rs:66:5 | LL | a.to_ascii_lowercase() == b'a'; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -184,7 +181,7 @@ LL + a.eq_ignore_ascii_case(&b'a'); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:62:5 + --> tests/ui/manual_ignore_case_cmp.rs:68:5 | LL | b'a' == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -196,7 +193,7 @@ LL + b'a'.eq_ignore_ascii_case(&b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:66:5 + --> tests/ui/manual_ignore_case_cmp.rs:72:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -208,7 +205,7 @@ LL + a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:68:5 + --> tests/ui/manual_ignore_case_cmp.rs:74:5 | LL | a.to_uppercase().to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -220,7 +217,7 @@ LL + a.to_uppercase().eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:70:5 + --> tests/ui/manual_ignore_case_cmp.rs:76:5 | LL | a.to_ascii_lowercase() == "a"; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -232,7 +229,7 @@ LL + a.eq_ignore_ascii_case("a"); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:72:5 + --> tests/ui/manual_ignore_case_cmp.rs:78:5 | LL | "a" == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -244,7 +241,7 @@ LL + "a".eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:76:5 + --> tests/ui/manual_ignore_case_cmp.rs:82:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -256,7 +253,7 @@ LL + a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:78:5 + --> tests/ui/manual_ignore_case_cmp.rs:84:5 | LL | a.to_uppercase().to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -268,7 +265,7 @@ LL + a.to_uppercase().eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:80:5 + --> tests/ui/manual_ignore_case_cmp.rs:86:5 | LL | a.to_ascii_lowercase() == "a"; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -280,7 +277,7 @@ LL + a.eq_ignore_ascii_case("a"); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:82:5 + --> tests/ui/manual_ignore_case_cmp.rs:88:5 | LL | "a" == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -292,7 +289,7 @@ LL + "a".eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:86:5 + --> tests/ui/manual_ignore_case_cmp.rs:92:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -304,7 +301,7 @@ LL + a.eq_ignore_ascii_case(&b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:88:5 + --> tests/ui/manual_ignore_case_cmp.rs:94:5 | LL | a.to_ascii_lowercase() == "a"; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -316,7 +313,7 @@ LL + a.eq_ignore_ascii_case("a"); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:90:5 + --> tests/ui/manual_ignore_case_cmp.rs:96:5 | LL | "a" == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -328,7 +325,7 @@ LL + "a".eq_ignore_ascii_case(&b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:94:5 + --> tests/ui/manual_ignore_case_cmp.rs:100:5 | LL | a.to_ascii_lowercase() == "a"; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -340,7 +337,7 @@ LL + a.eq_ignore_ascii_case("a"); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:96:5 + --> tests/ui/manual_ignore_case_cmp.rs:102:5 | LL | "a" == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -352,7 +349,7 @@ LL + "a".eq_ignore_ascii_case(&b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:100:5 + --> tests/ui/manual_ignore_case_cmp.rs:106:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -364,7 +361,7 @@ LL + a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:102:5 + --> tests/ui/manual_ignore_case_cmp.rs:108:5 | LL | a.to_ascii_lowercase() == "a"; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -376,7 +373,7 @@ LL + a.eq_ignore_ascii_case("a"); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:104:5 + --> tests/ui/manual_ignore_case_cmp.rs:110:5 | LL | "a" == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -388,7 +385,7 @@ LL + "a".eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:107:5 + --> tests/ui/manual_ignore_case_cmp.rs:113:5 | LL | b.to_ascii_lowercase() == a.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -400,7 +397,7 @@ LL + b.eq_ignore_ascii_case(&a); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:109:5 + --> tests/ui/manual_ignore_case_cmp.rs:115:5 | LL | b.to_ascii_lowercase() == "a"; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -412,7 +409,7 @@ LL + b.eq_ignore_ascii_case("a"); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:111:5 + --> tests/ui/manual_ignore_case_cmp.rs:117:5 | LL | "a" == a.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -424,7 +421,7 @@ LL + "a".eq_ignore_ascii_case(&a); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:115:5 + --> tests/ui/manual_ignore_case_cmp.rs:121:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -436,7 +433,7 @@ LL + a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:117:5 + --> tests/ui/manual_ignore_case_cmp.rs:123:5 | LL | a.to_ascii_lowercase() == "a"; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -448,7 +445,7 @@ LL + a.eq_ignore_ascii_case("a"); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:119:5 + --> tests/ui/manual_ignore_case_cmp.rs:125:5 | LL | "a" == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -460,7 +457,7 @@ LL + "a".eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:122:5 + --> tests/ui/manual_ignore_case_cmp.rs:128:5 | LL | b.to_ascii_lowercase() == a.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -472,7 +469,7 @@ LL + b.eq_ignore_ascii_case(&a); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:124:5 + --> tests/ui/manual_ignore_case_cmp.rs:130:5 | LL | b.to_ascii_lowercase() == "a"; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -484,7 +481,7 @@ LL + b.eq_ignore_ascii_case("a"); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:126:5 + --> tests/ui/manual_ignore_case_cmp.rs:132:5 | LL | "a" == a.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -496,7 +493,7 @@ LL + "a".eq_ignore_ascii_case(&a); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:130:5 + --> tests/ui/manual_ignore_case_cmp.rs:136:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -508,7 +505,7 @@ LL + a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:134:5 + --> tests/ui/manual_ignore_case_cmp.rs:140:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -520,7 +517,7 @@ LL + a.eq_ignore_ascii_case(&b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:138:5 + --> tests/ui/manual_ignore_case_cmp.rs:144:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -532,7 +529,7 @@ LL + a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:140:5 + --> tests/ui/manual_ignore_case_cmp.rs:146:5 | LL | b.to_ascii_lowercase() == a.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -544,7 +541,7 @@ LL + b.eq_ignore_ascii_case(&a); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:144:5 + --> tests/ui/manual_ignore_case_cmp.rs:150:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -556,7 +553,7 @@ LL + a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:148:5 + --> tests/ui/manual_ignore_case_cmp.rs:154:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -568,7 +565,7 @@ LL + a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:152:5 + --> tests/ui/manual_ignore_case_cmp.rs:158:5 | LL | a.to_ascii_lowercase() == b.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -580,7 +577,7 @@ LL + a.eq_ignore_ascii_case(b); | error: manual case-insensitive ASCII comparison - --> tests/ui/manual_ignore_case_cmp.rs:154:5 + --> tests/ui/manual_ignore_case_cmp.rs:160:5 | LL | b.to_ascii_lowercase() == a.to_ascii_lowercase(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/manual_inspect.fixed b/src/tools/clippy/tests/ui/manual_inspect.fixed index 44f15d61f8563..ec87fe217aee6 100644 --- a/src/tools/clippy/tests/ui/manual_inspect.fixed +++ b/src/tools/clippy/tests/ui/manual_inspect.fixed @@ -1,5 +1,5 @@ +#![allow(clippy::no_effect, clippy::op_ref, clippy::uninlined_format_args)] #![warn(clippy::manual_inspect)] -#![allow(clippy::no_effect, clippy::op_ref)] fn main() { let _ = Some(0).inspect(|&x| { @@ -107,7 +107,7 @@ fn main() { let _ = || { let _x = x; }; - return; + return ; } println!("test"); }); @@ -185,3 +185,12 @@ fn main() { }); } } + +#[rustfmt::skip] +fn layout_check() { + if let Some(x) = Some(1).inspect(|&x| { println!("{x}"); //~ manual_inspect + // Do not collapse code into this comment + }) { + println!("{x}"); + } +} diff --git a/src/tools/clippy/tests/ui/manual_inspect.rs b/src/tools/clippy/tests/ui/manual_inspect.rs index d34f2abce6ae1..e679636201e6a 100644 --- a/src/tools/clippy/tests/ui/manual_inspect.rs +++ b/src/tools/clippy/tests/ui/manual_inspect.rs @@ -1,5 +1,5 @@ +#![allow(clippy::no_effect, clippy::op_ref, clippy::uninlined_format_args)] #![warn(clippy::manual_inspect)] -#![allow(clippy::no_effect, clippy::op_ref)] fn main() { let _ = Some(0).map(|x| { @@ -197,3 +197,12 @@ fn main() { }); } } + +#[rustfmt::skip] +fn layout_check() { + if let Some(x) = Some(1).map(|x| { println!("{x}"); //~ manual_inspect + // Do not collapse code into this comment + x }) { + println!("{x}"); + } +} diff --git a/src/tools/clippy/tests/ui/manual_inspect.stderr b/src/tools/clippy/tests/ui/manual_inspect.stderr index 510325d2baaa9..eb98f9f5995a3 100644 --- a/src/tools/clippy/tests/ui/manual_inspect.stderr +++ b/src/tools/clippy/tests/ui/manual_inspect.stderr @@ -98,7 +98,7 @@ LL | if x.is_empty() { LL | let _ = || { LL ~ let _x = x; LL | }; -LL ~ return; +LL ~ return ; LL | } LL ~ println!("test"); | @@ -187,5 +187,18 @@ LL | LL ~ println!("{}", x); | -error: aborting due to 13 previous errors +error: using `map` over `inspect` + --> tests/ui/manual_inspect.rs:203:30 + | +LL | if let Some(x) = Some(1).map(|x| { println!("{x}"); + | ^^^ + | +help: try + | +LL ~ if let Some(x) = Some(1).inspect(|&x| { println!("{x}"); +LL | // Do not collapse code into this comment +LL ~ }) { + | + +error: aborting due to 14 previous errors diff --git a/src/tools/clippy/tests/ui/manual_is_power_of_two.fixed b/src/tools/clippy/tests/ui/manual_is_power_of_two.fixed index 6f29d76bd2109..8a1ab785dfbfd 100644 --- a/src/tools/clippy/tests/ui/manual_is_power_of_two.fixed +++ b/src/tools/clippy/tests/ui/manual_is_power_of_two.fixed @@ -1,4 +1,17 @@ #![warn(clippy::manual_is_power_of_two)] +#![allow(clippy::precedence)] + +macro_rules! binop { + ($a: expr, equal, $b: expr) => { + $a == $b + }; + ($a: expr, and, $b: expr) => { + $a & $b + }; + ($a: expr, minus, $b: expr) => { + $a - $b + }; +} fn main() { let a = 16_u64; @@ -7,6 +20,8 @@ fn main() { //~^ manual_is_power_of_two let _ = a.is_power_of_two(); //~^ manual_is_power_of_two + let _ = a.is_power_of_two(); + //~^ manual_is_power_of_two // Test different orders of expression let _ = a.is_power_of_two(); @@ -23,4 +38,23 @@ fn main() { // is_power_of_two only works for unsigned integers let _ = b.count_ones() == 1; let _ = b & (b - 1) == 0; + + let i: i32 = 3; + let _ = (i as u32).is_power_of_two(); + //~^ manual_is_power_of_two + + let _ = binop!(a.count_ones(), equal, 1); + let _ = binop!(a, and, a - 1) == 0; + let _ = a & binop!(a, minus, 1) == 0; +} + +#[clippy::msrv = "1.31"] +const fn low_msrv(a: u32) -> bool { + a & (a - 1) == 0 +} + +#[clippy::msrv = "1.32"] +const fn high_msrv(a: u32) -> bool { + a.is_power_of_two() + //~^ manual_is_power_of_two } diff --git a/src/tools/clippy/tests/ui/manual_is_power_of_two.rs b/src/tools/clippy/tests/ui/manual_is_power_of_two.rs index 0c44d7a660b43..57a3b05e0336a 100644 --- a/src/tools/clippy/tests/ui/manual_is_power_of_two.rs +++ b/src/tools/clippy/tests/ui/manual_is_power_of_two.rs @@ -1,10 +1,25 @@ #![warn(clippy::manual_is_power_of_two)] +#![allow(clippy::precedence)] + +macro_rules! binop { + ($a: expr, equal, $b: expr) => { + $a == $b + }; + ($a: expr, and, $b: expr) => { + $a & $b + }; + ($a: expr, minus, $b: expr) => { + $a - $b + }; +} fn main() { let a = 16_u64; let _ = a.count_ones() == 1; //~^ manual_is_power_of_two + let _ = u64::count_ones(a) == 1; + //~^ manual_is_power_of_two let _ = a & (a - 1) == 0; //~^ manual_is_power_of_two @@ -23,4 +38,23 @@ fn main() { // is_power_of_two only works for unsigned integers let _ = b.count_ones() == 1; let _ = b & (b - 1) == 0; + + let i: i32 = 3; + let _ = i as u32 & (i as u32 - 1) == 0; + //~^ manual_is_power_of_two + + let _ = binop!(a.count_ones(), equal, 1); + let _ = binop!(a, and, a - 1) == 0; + let _ = a & binop!(a, minus, 1) == 0; +} + +#[clippy::msrv = "1.31"] +const fn low_msrv(a: u32) -> bool { + a & (a - 1) == 0 +} + +#[clippy::msrv = "1.32"] +const fn high_msrv(a: u32) -> bool { + a & (a - 1) == 0 + //~^ manual_is_power_of_two } diff --git a/src/tools/clippy/tests/ui/manual_is_power_of_two.stderr b/src/tools/clippy/tests/ui/manual_is_power_of_two.stderr index ad12ee10565f6..5781a093d5f2b 100644 --- a/src/tools/clippy/tests/ui/manual_is_power_of_two.stderr +++ b/src/tools/clippy/tests/ui/manual_is_power_of_two.stderr @@ -1,5 +1,5 @@ error: manually reimplementing `is_power_of_two` - --> tests/ui/manual_is_power_of_two.rs:6:13 + --> tests/ui/manual_is_power_of_two.rs:19:13 | LL | let _ = a.count_ones() == 1; | ^^^^^^^^^^^^^^^^^^^ help: consider using `.is_power_of_two()`: `a.is_power_of_two()` @@ -8,34 +8,52 @@ LL | let _ = a.count_ones() == 1; = help: to override `-D warnings` add `#[allow(clippy::manual_is_power_of_two)]` error: manually reimplementing `is_power_of_two` - --> tests/ui/manual_is_power_of_two.rs:8:13 + --> tests/ui/manual_is_power_of_two.rs:21:13 + | +LL | let _ = u64::count_ones(a) == 1; + | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.is_power_of_two()`: `a.is_power_of_two()` + +error: manually reimplementing `is_power_of_two` + --> tests/ui/manual_is_power_of_two.rs:23:13 | LL | let _ = a & (a - 1) == 0; | ^^^^^^^^^^^^^^^^ help: consider using `.is_power_of_two()`: `a.is_power_of_two()` error: manually reimplementing `is_power_of_two` - --> tests/ui/manual_is_power_of_two.rs:12:13 + --> tests/ui/manual_is_power_of_two.rs:27:13 | LL | let _ = 1 == a.count_ones(); | ^^^^^^^^^^^^^^^^^^^ help: consider using `.is_power_of_two()`: `a.is_power_of_two()` error: manually reimplementing `is_power_of_two` - --> tests/ui/manual_is_power_of_two.rs:14:13 + --> tests/ui/manual_is_power_of_two.rs:29:13 | LL | let _ = (a - 1) & a == 0; | ^^^^^^^^^^^^^^^^ help: consider using `.is_power_of_two()`: `a.is_power_of_two()` error: manually reimplementing `is_power_of_two` - --> tests/ui/manual_is_power_of_two.rs:16:13 + --> tests/ui/manual_is_power_of_two.rs:31:13 | LL | let _ = 0 == a & (a - 1); | ^^^^^^^^^^^^^^^^ help: consider using `.is_power_of_two()`: `a.is_power_of_two()` error: manually reimplementing `is_power_of_two` - --> tests/ui/manual_is_power_of_two.rs:18:13 + --> tests/ui/manual_is_power_of_two.rs:33:13 | LL | let _ = 0 == (a - 1) & a; | ^^^^^^^^^^^^^^^^ help: consider using `.is_power_of_two()`: `a.is_power_of_two()` -error: aborting due to 6 previous errors +error: manually reimplementing `is_power_of_two` + --> tests/ui/manual_is_power_of_two.rs:43:13 + | +LL | let _ = i as u32 & (i as u32 - 1) == 0; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `.is_power_of_two()`: `(i as u32).is_power_of_two()` + +error: manually reimplementing `is_power_of_two` + --> tests/ui/manual_is_power_of_two.rs:58:5 + | +LL | a & (a - 1) == 0 + | ^^^^^^^^^^^^^^^^ help: consider using `.is_power_of_two()`: `a.is_power_of_two()` + +error: aborting due to 9 previous errors diff --git a/src/tools/clippy/tests/ui/manual_map_option.rs b/src/tools/clippy/tests/ui/manual_map_option.rs index 9477d0d795d2d..40133748d4599 100644 --- a/src/tools/clippy/tests/ui/manual_map_option.rs +++ b/src/tools/clippy/tests/ui/manual_map_option.rs @@ -101,7 +101,7 @@ fn main() { match &mut Some(String::new()) { //~^ manual_map - Some(ref x) => Some(x.len()), + &mut Some(ref x) => Some(x.len()), None => None, }; diff --git a/src/tools/clippy/tests/ui/manual_map_option.stderr b/src/tools/clippy/tests/ui/manual_map_option.stderr index 8f9bce4c265c9..486379c1e5f33 100644 --- a/src/tools/clippy/tests/ui/manual_map_option.stderr +++ b/src/tools/clippy/tests/ui/manual_map_option.stderr @@ -127,7 +127,7 @@ error: manual implementation of `Option::map` | LL | / match &mut Some(String::new()) { LL | | -LL | | Some(ref x) => Some(x.len()), +LL | | &mut Some(ref x) => Some(x.len()), LL | | None => None, LL | | }; | |_____^ help: try: `Some(String::new()).as_ref().map(|x| x.len())` diff --git a/src/tools/clippy/tests/ui/manual_map_option_2.fixed b/src/tools/clippy/tests/ui/manual_map_option_2.fixed index d698cc74ea65a..206c6d5d07763 100644 --- a/src/tools/clippy/tests/ui/manual_map_option_2.fixed +++ b/src/tools/clippy/tests/ui/manual_map_option_2.fixed @@ -115,7 +115,7 @@ mod with_type_coercion { fn with_fn_ret(s: &Option) -> Option<(String, &str)> { // Don't lint, `map` doesn't work as the return type is adjusted. match s { - Some(x) => Some({ if let Some(ref s) = s { (x.clone(), s) } else { panic!() } }), + Some(x) => Some({ if let Some(s) = s { (x.clone(), s) } else { panic!() } }), None => None, } } @@ -124,7 +124,7 @@ mod with_type_coercion { if true { // Don't lint, `map` doesn't work as the return type is adjusted. return match s { - Some(x) => Some({ if let Some(ref s) = s { (x.clone(), s) } else { panic!() } }), + Some(x) => Some({ if let Some(s) = s { (x.clone(), s) } else { panic!() } }), None => None, }; } @@ -136,7 +136,7 @@ mod with_type_coercion { let x: Option<(String, &'a str)>; x = { match s { - Some(x) => Some({ if let Some(ref s) = s { (x.clone(), s) } else { panic!() } }), + Some(x) => Some({ if let Some(s) = s { (x.clone(), s) } else { panic!() } }), None => None, } }; diff --git a/src/tools/clippy/tests/ui/manual_map_option_2.rs b/src/tools/clippy/tests/ui/manual_map_option_2.rs index 069c2381f6db1..a47dc950760e2 100644 --- a/src/tools/clippy/tests/ui/manual_map_option_2.rs +++ b/src/tools/clippy/tests/ui/manual_map_option_2.rs @@ -143,7 +143,7 @@ mod with_type_coercion { fn with_fn_ret(s: &Option) -> Option<(String, &str)> { // Don't lint, `map` doesn't work as the return type is adjusted. match s { - Some(x) => Some({ if let Some(ref s) = s { (x.clone(), s) } else { panic!() } }), + Some(x) => Some({ if let Some(s) = s { (x.clone(), s) } else { panic!() } }), None => None, } } @@ -152,7 +152,7 @@ mod with_type_coercion { if true { // Don't lint, `map` doesn't work as the return type is adjusted. return match s { - Some(x) => Some({ if let Some(ref s) = s { (x.clone(), s) } else { panic!() } }), + Some(x) => Some({ if let Some(s) = s { (x.clone(), s) } else { panic!() } }), None => None, }; } @@ -164,7 +164,7 @@ mod with_type_coercion { let x: Option<(String, &'a str)>; x = { match s { - Some(x) => Some({ if let Some(ref s) = s { (x.clone(), s) } else { panic!() } }), + Some(x) => Some({ if let Some(s) = s { (x.clone(), s) } else { panic!() } }), None => None, } }; diff --git a/src/tools/clippy/tests/ui/manual_ok_err.fixed b/src/tools/clippy/tests/ui/manual_ok_err.fixed index bc169b64be9fd..e6f799aa58d61 100644 --- a/src/tools/clippy/tests/ui/manual_ok_err.fixed +++ b/src/tools/clippy/tests/ui/manual_ok_err.fixed @@ -80,6 +80,11 @@ fn no_lint() { Ok(3) => None, Ok(v) => Some(v), }; + + let _ = match funcall() { + Ok(v @ 1..) => Some(v), + _ => None, + }; } const fn cf(x: Result) -> Option { diff --git a/src/tools/clippy/tests/ui/manual_ok_err.rs b/src/tools/clippy/tests/ui/manual_ok_err.rs index 03c730d4b4e46..972b2c41ee7aa 100644 --- a/src/tools/clippy/tests/ui/manual_ok_err.rs +++ b/src/tools/clippy/tests/ui/manual_ok_err.rs @@ -116,6 +116,11 @@ fn no_lint() { Ok(3) => None, Ok(v) => Some(v), }; + + let _ = match funcall() { + Ok(v @ 1..) => Some(v), + _ => None, + }; } const fn cf(x: Result) -> Option { diff --git a/src/tools/clippy/tests/ui/manual_ok_err.stderr b/src/tools/clippy/tests/ui/manual_ok_err.stderr index 13fceacda1074..040e170f397e2 100644 --- a/src/tools/clippy/tests/ui/manual_ok_err.stderr +++ b/src/tools/clippy/tests/ui/manual_ok_err.stderr @@ -94,7 +94,7 @@ LL | | }; | |_____^ help: replace with: `(-S).ok()` error: manual implementation of `ok` - --> tests/ui/manual_ok_err.rs:132:12 + --> tests/ui/manual_ok_err.rs:137:12 | LL | } else if let Ok(n) = "1".parse::() { | ____________^ diff --git a/src/tools/clippy/tests/ui/manual_retain.fixed b/src/tools/clippy/tests/ui/manual_retain.fixed index ca8491131c06c..016f520e216c0 100644 --- a/src/tools/clippy/tests/ui/manual_retain.fixed +++ b/src/tools/clippy/tests/ui/manual_retain.fixed @@ -1,5 +1,5 @@ #![warn(clippy::manual_retain)] -#![allow(unused, clippy::redundant_clone)] +#![allow(unused, clippy::needless_borrowed_reference, clippy::redundant_clone)] use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}; fn main() { @@ -31,7 +31,7 @@ fn binary_heap_retain() { // Do lint, because we use pattern matching let mut tuples = BinaryHeap::from([(0, 1), (1, 2), (2, 3)]); - tuples.retain(|(ref x, ref y)| *x == 0); + tuples.retain(|&(ref x, ref y)| *x == 0); //~^ manual_retain tuples.retain(|(x, y)| *x == 0); //~^ manual_retain @@ -99,7 +99,7 @@ fn btree_set_retain() { // Do lint, because we use pattern matching let mut tuples = BTreeSet::from([(0, 1), (1, 2), (2, 3)]); - tuples.retain(|(ref x, ref y)| *x == 0); + tuples.retain(|&(ref x, ref y)| *x == 0); //~^ manual_retain tuples.retain(|(x, y)| *x == 0); //~^ manual_retain @@ -166,7 +166,7 @@ fn hash_set_retain() { // Do lint, because we use pattern matching let mut tuples = HashSet::from([(0, 1), (1, 2), (2, 3)]); - tuples.retain(|(ref x, ref y)| *x == 0); + tuples.retain(|&(ref x, ref y)| *x == 0); //~^ manual_retain tuples.retain(|(x, y)| *x == 0); //~^ manual_retain @@ -220,7 +220,7 @@ fn vec_retain() { // Do lint, because we use pattern matching let mut tuples = vec![(0, 1), (1, 2), (2, 3)]; - tuples.retain(|(ref x, ref y)| *x == 0); + tuples.retain(|&(ref x, ref y)| *x == 0); //~^ manual_retain tuples.retain(|(x, y)| *x == 0); //~^ manual_retain diff --git a/src/tools/clippy/tests/ui/manual_retain.rs b/src/tools/clippy/tests/ui/manual_retain.rs index cd05a41f3f25a..62f9b7b0595d0 100644 --- a/src/tools/clippy/tests/ui/manual_retain.rs +++ b/src/tools/clippy/tests/ui/manual_retain.rs @@ -1,5 +1,5 @@ #![warn(clippy::manual_retain)] -#![allow(unused, clippy::redundant_clone)] +#![allow(unused, clippy::needless_borrowed_reference, clippy::redundant_clone)] use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}; fn main() { @@ -31,7 +31,7 @@ fn binary_heap_retain() { // Do lint, because we use pattern matching let mut tuples = BinaryHeap::from([(0, 1), (1, 2), (2, 3)]); - tuples = tuples.iter().filter(|(ref x, ref y)| *x == 0).copied().collect(); + tuples = tuples.iter().filter(|&&(ref x, ref y)| *x == 0).copied().collect(); //~^ manual_retain tuples = tuples.iter().filter(|(x, y)| *x == 0).copied().collect(); //~^ manual_retain @@ -103,7 +103,7 @@ fn btree_set_retain() { // Do lint, because we use pattern matching let mut tuples = BTreeSet::from([(0, 1), (1, 2), (2, 3)]); - tuples = tuples.iter().filter(|(ref x, ref y)| *x == 0).copied().collect(); + tuples = tuples.iter().filter(|&&(ref x, ref y)| *x == 0).copied().collect(); //~^ manual_retain tuples = tuples.iter().filter(|(x, y)| *x == 0).copied().collect(); //~^ manual_retain @@ -174,7 +174,7 @@ fn hash_set_retain() { // Do lint, because we use pattern matching let mut tuples = HashSet::from([(0, 1), (1, 2), (2, 3)]); - tuples = tuples.iter().filter(|(ref x, ref y)| *x == 0).copied().collect(); + tuples = tuples.iter().filter(|&&(ref x, ref y)| *x == 0).copied().collect(); //~^ manual_retain tuples = tuples.iter().filter(|(x, y)| *x == 0).copied().collect(); //~^ manual_retain @@ -228,7 +228,7 @@ fn vec_retain() { // Do lint, because we use pattern matching let mut tuples = vec![(0, 1), (1, 2), (2, 3)]; - tuples = tuples.iter().filter(|(ref x, ref y)| *x == 0).copied().collect(); + tuples = tuples.iter().filter(|&&(ref x, ref y)| *x == 0).copied().collect(); //~^ manual_retain tuples = tuples.iter().filter(|(x, y)| *x == 0).copied().collect(); //~^ manual_retain diff --git a/src/tools/clippy/tests/ui/manual_retain.stderr b/src/tools/clippy/tests/ui/manual_retain.stderr index 2f81647dd8b7b..e7d3e34b5d7d4 100644 --- a/src/tools/clippy/tests/ui/manual_retain.stderr +++ b/src/tools/clippy/tests/ui/manual_retain.stderr @@ -22,8 +22,8 @@ LL | binary_heap = binary_heap.iter().filter(|&x| x % 2 == 0).cloned().colle error: this expression can be written more simply using `.retain()` --> tests/ui/manual_retain.rs:34:5 | -LL | tuples = tuples.iter().filter(|(ref x, ref y)| *x == 0).copied().collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `tuples.retain(|(ref x, ref y)| *x == 0)` +LL | tuples = tuples.iter().filter(|&&(ref x, ref y)| *x == 0).copied().collect(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `tuples.retain(|&(ref x, ref y)| *x == 0)` error: this expression can be written more simply using `.retain()` --> tests/ui/manual_retain.rs:36:5 @@ -74,8 +74,8 @@ LL | btree_set = btree_set.into_iter().filter(|x| x % 2 == 0).collect(); error: this expression can be written more simply using `.retain()` --> tests/ui/manual_retain.rs:106:5 | -LL | tuples = tuples.iter().filter(|(ref x, ref y)| *x == 0).copied().collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `tuples.retain(|(ref x, ref y)| *x == 0)` +LL | tuples = tuples.iter().filter(|&&(ref x, ref y)| *x == 0).copied().collect(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `tuples.retain(|&(ref x, ref y)| *x == 0)` error: this expression can be written more simply using `.retain()` --> tests/ui/manual_retain.rs:108:5 @@ -126,8 +126,8 @@ LL | hash_set = hash_set.iter().filter(|&x| x % 2 == 0).cloned().collect(); error: this expression can be written more simply using `.retain()` --> tests/ui/manual_retain.rs:177:5 | -LL | tuples = tuples.iter().filter(|(ref x, ref y)| *x == 0).copied().collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `tuples.retain(|(ref x, ref y)| *x == 0)` +LL | tuples = tuples.iter().filter(|&&(ref x, ref y)| *x == 0).copied().collect(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `tuples.retain(|&(ref x, ref y)| *x == 0)` error: this expression can be written more simply using `.retain()` --> tests/ui/manual_retain.rs:179:5 @@ -162,8 +162,8 @@ LL | vec = vec.into_iter().filter(|x| x % 2 == 0).collect(); error: this expression can be written more simply using `.retain()` --> tests/ui/manual_retain.rs:231:5 | -LL | tuples = tuples.iter().filter(|(ref x, ref y)| *x == 0).copied().collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `tuples.retain(|(ref x, ref y)| *x == 0)` +LL | tuples = tuples.iter().filter(|&&(ref x, ref y)| *x == 0).copied().collect(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `tuples.retain(|&(ref x, ref y)| *x == 0)` error: this expression can be written more simply using `.retain()` --> tests/ui/manual_retain.rs:233:5 diff --git a/src/tools/clippy/tests/ui/manual_strip_fixable.fixed b/src/tools/clippy/tests/ui/manual_strip_fixable.fixed index 75a3f1645de33..b59e3719d951d 100644 --- a/src/tools/clippy/tests/ui/manual_strip_fixable.fixed +++ b/src/tools/clippy/tests/ui/manual_strip_fixable.fixed @@ -1,4 +1,5 @@ #![warn(clippy::manual_strip)] +#![allow(clippy::uninlined_format_args)] fn main() { let s = "abc"; diff --git a/src/tools/clippy/tests/ui/manual_strip_fixable.rs b/src/tools/clippy/tests/ui/manual_strip_fixable.rs index 5080068449e20..4fb3a9bf007f6 100644 --- a/src/tools/clippy/tests/ui/manual_strip_fixable.rs +++ b/src/tools/clippy/tests/ui/manual_strip_fixable.rs @@ -1,4 +1,5 @@ #![warn(clippy::manual_strip)] +#![allow(clippy::uninlined_format_args)] fn main() { let s = "abc"; diff --git a/src/tools/clippy/tests/ui/manual_strip_fixable.stderr b/src/tools/clippy/tests/ui/manual_strip_fixable.stderr index 1c276e5d8fdfe..da8b0cd08f893 100644 --- a/src/tools/clippy/tests/ui/manual_strip_fixable.stderr +++ b/src/tools/clippy/tests/ui/manual_strip_fixable.stderr @@ -1,11 +1,11 @@ error: stripping a prefix manually - --> tests/ui/manual_strip_fixable.rs:7:24 + --> tests/ui/manual_strip_fixable.rs:8:24 | LL | let stripped = &s["ab".len()..]; | ^^^^^^^^^^^^^^^^ | note: the prefix was tested here - --> tests/ui/manual_strip_fixable.rs:6:5 + --> tests/ui/manual_strip_fixable.rs:7:5 | LL | if s.starts_with("ab") { | ^^^^^^^^^^^^^^^^^^^^^^^ @@ -19,13 +19,13 @@ LL ~ println!("{stripped}{}", stripped); | error: stripping a suffix manually - --> tests/ui/manual_strip_fixable.rs:13:24 + --> tests/ui/manual_strip_fixable.rs:14:24 | LL | let stripped = &s[..s.len() - "bc".len()]; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ | note: the suffix was tested here - --> tests/ui/manual_strip_fixable.rs:12:5 + --> tests/ui/manual_strip_fixable.rs:13:5 | LL | if s.ends_with("bc") { | ^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/manual_unwrap_or.fixed b/src/tools/clippy/tests/ui/manual_unwrap_or.fixed index 07e4bdd483a8c..e12287a709395 100644 --- a/src/tools/clippy/tests/ui/manual_unwrap_or.fixed +++ b/src/tools/clippy/tests/ui/manual_unwrap_or.fixed @@ -18,11 +18,9 @@ fn option_unwrap_or() { // multiline case #[rustfmt::skip] - Some(1).unwrap_or({ - 42 + 42 - + 42 + 42 + 42 - + 42 + 42 + 42 - }); + Some(1).unwrap_or(42 + 42 + + 42 + 42 + 42 + + 42 + 42 + 42); // string case Some("Bob").unwrap_or("Alice"); @@ -125,11 +123,9 @@ fn result_unwrap_or() { // multiline case #[rustfmt::skip] - Ok::(1).unwrap_or({ - 42 + 42 - + 42 + 42 + 42 - + 42 + 42 + 42 - }); + Ok::(1).unwrap_or(42 + 42 + + 42 + 42 + 42 + + 42 + 42 + 42); // string case Ok::<&str, &str>("Bob").unwrap_or("Alice"); @@ -159,11 +155,7 @@ fn result_unwrap_or() { Ok(s) => s, Err(s) => s, }; - // could lint, but unused_variables takes care of it - match Ok::<&str, &str>("Alice") { - Ok(s) => s, - Err(s) => "Bob", - }; + Ok::<&str, &str>("Alice").unwrap_or("Bob"); Ok::(1).unwrap_or(42); @@ -250,4 +242,12 @@ mod issue_13018 { } } +fn implicit_deref(v: Vec) { + let _ = if let Some(s) = v.first() { s } else { "" }; +} + +fn allowed_manual_unwrap_or_zero() -> u32 { + Some(42).unwrap_or(0) +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/manual_unwrap_or.rs b/src/tools/clippy/tests/ui/manual_unwrap_or.rs index c88b6f95da68e..53cffcab5b56c 100644 --- a/src/tools/clippy/tests/ui/manual_unwrap_or.rs +++ b/src/tools/clippy/tests/ui/manual_unwrap_or.rs @@ -216,8 +216,8 @@ fn result_unwrap_or() { Ok(s) => s, Err(s) => s, }; - // could lint, but unused_variables takes care of it match Ok::<&str, &str>("Alice") { + //~^ manual_unwrap_or Ok(s) => s, Err(s) => "Bob", }; @@ -316,4 +316,17 @@ mod issue_13018 { } } +fn implicit_deref(v: Vec) { + let _ = if let Some(s) = v.first() { s } else { "" }; +} + +fn allowed_manual_unwrap_or_zero() -> u32 { + if let Some(x) = Some(42) { + //~^ manual_unwrap_or + x + } else { + 0 + } +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/manual_unwrap_or.stderr b/src/tools/clippy/tests/ui/manual_unwrap_or.stderr index a5deb55786e96..320e895fb8237 100644 --- a/src/tools/clippy/tests/ui/manual_unwrap_or.stderr +++ b/src/tools/clippy/tests/ui/manual_unwrap_or.stderr @@ -44,11 +44,9 @@ LL | | }; | help: replace with | -LL ~ Some(1).unwrap_or({ -LL + 42 + 42 -LL + + 42 + 42 + 42 -LL + + 42 + 42 + 42 -LL ~ }); +LL ~ Some(1).unwrap_or(42 + 42 +LL + + 42 + 42 + 42 +LL ~ + 42 + 42 + 42); | error: this pattern reimplements `Option::unwrap_or` @@ -145,11 +143,9 @@ LL | | }; | help: replace with | -LL ~ Ok::(1).unwrap_or({ -LL + 42 + 42 -LL + + 42 + 42 + 42 -LL + + 42 + 42 + 42 -LL ~ }); +LL ~ Ok::(1).unwrap_or(42 + 42 +LL + + 42 + 42 + 42 +LL ~ + 42 + 42 + 42); | error: this pattern reimplements `Result::unwrap_or` @@ -162,6 +158,16 @@ LL | | Err(_) => "Alice", LL | | }; | |_____^ help: replace with: `Ok::<&str, &str>("Bob").unwrap_or("Alice")` +error: this pattern reimplements `Result::unwrap_or` + --> tests/ui/manual_unwrap_or.rs:219:5 + | +LL | / match Ok::<&str, &str>("Alice") { +LL | | +LL | | Ok(s) => s, +LL | | Err(s) => "Bob", +LL | | }; + | |_____^ help: replace with: `Ok::<&str, &str>("Alice").unwrap_or("Bob")` + error: this pattern reimplements `Result::unwrap_or` --> tests/ui/manual_unwrap_or.rs:225:5 | @@ -184,5 +190,16 @@ LL | | None => 0, LL | | }; | |_________^ help: replace with: `some_macro!().unwrap_or(0)` -error: aborting due to 16 previous errors +error: this pattern reimplements `Option::unwrap_or` + --> tests/ui/manual_unwrap_or.rs:324:5 + | +LL | / if let Some(x) = Some(42) { +LL | | +LL | | x +LL | | } else { +LL | | 0 +LL | | } + | |_____^ help: replace with: `Some(42).unwrap_or(0)` + +error: aborting due to 18 previous errors diff --git a/src/tools/clippy/tests/ui/manual_unwrap_or_default.fixed b/src/tools/clippy/tests/ui/manual_unwrap_or_default.fixed index 832376fa5af15..9dae9fcae079b 100644 --- a/src/tools/clippy/tests/ui/manual_unwrap_or_default.fixed +++ b/src/tools/clippy/tests/ui/manual_unwrap_or_default.fixed @@ -1,5 +1,5 @@ #![warn(clippy::manual_unwrap_or_default)] -#![allow(clippy::unnecessary_literal_unwrap, clippy::manual_unwrap_or)] +#![allow(clippy::unnecessary_literal_unwrap)] fn main() { let x: Option> = None; @@ -36,10 +36,12 @@ fn main() { // Issue #12531 unsafe fn no_deref_ptr(a: Option, b: *const Option) -> i32 { - match a { - // `*b` being correct depends on `a == Some(_)` - Some(_) => (*b).unwrap_or_default(), - _ => 0, + unsafe { + match a { + // `*b` being correct depends on `a == Some(_)` + Some(_) => (*b).unwrap_or_default(), + _ => 0, + } } } @@ -99,3 +101,8 @@ fn issue_12928() { let y = if let Some(Y(a, _)) = x { a } else { 0 }; let y = if let Some(Y(a, ..)) = x { a } else { 0 }; } + +// For symetry with `manual_unwrap_or` test +fn allowed_manual_unwrap_or_zero() -> u32 { + Some(42).unwrap_or_default() +} diff --git a/src/tools/clippy/tests/ui/manual_unwrap_or_default.rs b/src/tools/clippy/tests/ui/manual_unwrap_or_default.rs index bedb3f0af0f3e..539d7a8bbae59 100644 --- a/src/tools/clippy/tests/ui/manual_unwrap_or_default.rs +++ b/src/tools/clippy/tests/ui/manual_unwrap_or_default.rs @@ -1,5 +1,5 @@ #![warn(clippy::manual_unwrap_or_default)] -#![allow(clippy::unnecessary_literal_unwrap, clippy::manual_unwrap_or)] +#![allow(clippy::unnecessary_literal_unwrap)] fn main() { let x: Option> = None; @@ -68,14 +68,16 @@ fn main() { // Issue #12531 unsafe fn no_deref_ptr(a: Option, b: *const Option) -> i32 { - match a { - // `*b` being correct depends on `a == Some(_)` - Some(_) => match *b { - //~^ manual_unwrap_or_default - Some(v) => v, + unsafe { + match a { + // `*b` being correct depends on `a == Some(_)` + Some(_) => match *b { + //~^ manual_unwrap_or_default + Some(v) => v, + _ => 0, + }, _ => 0, - }, - _ => 0, + } } } @@ -135,3 +137,13 @@ fn issue_12928() { let y = if let Some(Y(a, _)) = x { a } else { 0 }; let y = if let Some(Y(a, ..)) = x { a } else { 0 }; } + +// For symetry with `manual_unwrap_or` test +fn allowed_manual_unwrap_or_zero() -> u32 { + if let Some(x) = Some(42) { + //~^ manual_unwrap_or_default + x + } else { + 0 + } +} diff --git a/src/tools/clippy/tests/ui/manual_unwrap_or_default.stderr b/src/tools/clippy/tests/ui/manual_unwrap_or_default.stderr index ca9aa159152e3..e8f38a2e3899e 100644 --- a/src/tools/clippy/tests/ui/manual_unwrap_or_default.stderr +++ b/src/tools/clippy/tests/ui/manual_unwrap_or_default.stderr @@ -76,15 +76,26 @@ LL | | }; | |_____^ help: replace it with: `x.unwrap_or_default()` error: match can be simplified with `.unwrap_or_default()` - --> tests/ui/manual_unwrap_or_default.rs:73:20 + --> tests/ui/manual_unwrap_or_default.rs:74:24 | -LL | Some(_) => match *b { - | ____________________^ +LL | Some(_) => match *b { + | ________________________^ LL | | -LL | | Some(v) => v, -LL | | _ => 0, -LL | | }, - | |_________^ help: replace it with: `(*b).unwrap_or_default()` +LL | | Some(v) => v, +LL | | _ => 0, +LL | | }, + | |_____________^ help: replace it with: `(*b).unwrap_or_default()` -error: aborting due to 8 previous errors +error: if let can be simplified with `.unwrap_or_default()` + --> tests/ui/manual_unwrap_or_default.rs:143:5 + | +LL | / if let Some(x) = Some(42) { +LL | | +LL | | x +LL | | } else { +LL | | 0 +LL | | } + | |_____^ help: replace it with: `Some(42).unwrap_or_default()` + +error: aborting due to 9 previous errors diff --git a/src/tools/clippy/tests/ui/map_flatten_fixable.fixed b/src/tools/clippy/tests/ui/map_flatten_fixable.fixed index 948fec970d869..f8379ed23c5b2 100644 --- a/src/tools/clippy/tests/ui/map_flatten_fixable.fixed +++ b/src/tools/clippy/tests/ui/map_flatten_fixable.fixed @@ -1,10 +1,11 @@ -#![warn(clippy::all, clippy::pedantic)] -#![allow(clippy::let_underscore_untyped)] -#![allow(clippy::missing_docs_in_private_items)] -#![allow(clippy::map_identity)] -#![allow(clippy::redundant_closure)] -#![allow(clippy::unnecessary_wraps)] #![feature(result_flattening)] +#![allow( + clippy::let_underscore_untyped, + clippy::missing_docs_in_private_items, + clippy::map_identity, + clippy::redundant_closure, + clippy::unnecessary_wraps +)] fn main() { // mapping to Option on Iterator diff --git a/src/tools/clippy/tests/ui/map_flatten_fixable.rs b/src/tools/clippy/tests/ui/map_flatten_fixable.rs index 67a91ab94147e..040a9ca85f647 100644 --- a/src/tools/clippy/tests/ui/map_flatten_fixable.rs +++ b/src/tools/clippy/tests/ui/map_flatten_fixable.rs @@ -1,10 +1,11 @@ -#![warn(clippy::all, clippy::pedantic)] -#![allow(clippy::let_underscore_untyped)] -#![allow(clippy::missing_docs_in_private_items)] -#![allow(clippy::map_identity)] -#![allow(clippy::redundant_closure)] -#![allow(clippy::unnecessary_wraps)] #![feature(result_flattening)] +#![allow( + clippy::let_underscore_untyped, + clippy::missing_docs_in_private_items, + clippy::map_identity, + clippy::redundant_closure, + clippy::unnecessary_wraps +)] fn main() { // mapping to Option on Iterator diff --git a/src/tools/clippy/tests/ui/map_flatten_fixable.stderr b/src/tools/clippy/tests/ui/map_flatten_fixable.stderr index 05d4d9a6ad85c..fe68eb7e4ab44 100644 --- a/src/tools/clippy/tests/ui/map_flatten_fixable.stderr +++ b/src/tools/clippy/tests/ui/map_flatten_fixable.stderr @@ -1,5 +1,5 @@ error: called `map(..).flatten()` on `Iterator` - --> tests/ui/map_flatten_fixable.rs:16:47 + --> tests/ui/map_flatten_fixable.rs:17:47 | LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(option_id).flatten().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `filter_map` and remove the `.flatten()`: `filter_map(option_id)` @@ -8,43 +8,43 @@ LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(option_id).flatten().coll = help: to override `-D warnings` add `#[allow(clippy::map_flatten)]` error: called `map(..).flatten()` on `Iterator` - --> tests/ui/map_flatten_fixable.rs:18:47 + --> tests/ui/map_flatten_fixable.rs:19:47 | LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(option_id_ref).flatten().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `filter_map` and remove the `.flatten()`: `filter_map(option_id_ref)` error: called `map(..).flatten()` on `Iterator` - --> tests/ui/map_flatten_fixable.rs:20:47 + --> tests/ui/map_flatten_fixable.rs:21:47 | LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(option_id_closure).flatten().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `filter_map` and remove the `.flatten()`: `filter_map(option_id_closure)` error: called `map(..).flatten()` on `Iterator` - --> tests/ui/map_flatten_fixable.rs:22:47 + --> tests/ui/map_flatten_fixable.rs:23:47 | LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(|x| x.checked_add(1)).flatten().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `filter_map` and remove the `.flatten()`: `filter_map(|x| x.checked_add(1))` error: called `map(..).flatten()` on `Iterator` - --> tests/ui/map_flatten_fixable.rs:26:47 + --> tests/ui/map_flatten_fixable.rs:27:47 | LL | let _: Vec<_> = vec![5_i8; 6].into_iter().map(|x| 0..x).flatten().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `flat_map` and remove the `.flatten()`: `flat_map(|x| 0..x)` error: called `map(..).flatten()` on `Option` - --> tests/ui/map_flatten_fixable.rs:30:40 + --> tests/ui/map_flatten_fixable.rs:31:40 | LL | let _: Option<_> = (Some(Some(1))).map(|x| x).flatten(); | ^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `and_then` and remove the `.flatten()`: `and_then(|x| x)` error: called `map(..).flatten()` on `Result` - --> tests/ui/map_flatten_fixable.rs:34:42 + --> tests/ui/map_flatten_fixable.rs:35:42 | LL | let _: Result<_, &str> = (Ok(Ok(1))).map(|x| x).flatten(); | ^^^^^^^^^^^^^^^^^^^^ help: try replacing `map` with `and_then` and remove the `.flatten()`: `and_then(|x| x)` error: called `map(..).flatten()` on `Iterator` - --> tests/ui/map_flatten_fixable.rs:44:10 + --> tests/ui/map_flatten_fixable.rs:45:10 | LL | .map(|n| match n { | __________^ @@ -74,7 +74,7 @@ LL ~ }); | error: called `map(..).flatten()` on `Option` - --> tests/ui/map_flatten_fixable.rs:65:10 + --> tests/ui/map_flatten_fixable.rs:66:10 | LL | .map(|_| { | __________^ diff --git a/src/tools/clippy/tests/ui/match_on_vec_items.rs b/src/tools/clippy/tests/ui/match_on_vec_items.rs deleted file mode 100644 index f3174ec9734df..0000000000000 --- a/src/tools/clippy/tests/ui/match_on_vec_items.rs +++ /dev/null @@ -1,161 +0,0 @@ -#![warn(clippy::match_on_vec_items)] -#![allow(clippy::redundant_at_rest_pattern, clippy::useless_vec)] -//@no-rustfix -fn match_with_wildcard() { - let arr = vec![0, 1, 2, 3]; - let range = 1..3; - let idx = 1; - - // Lint, may panic - match arr[idx] { - //~^ match_on_vec_items - 0 => println!("0"), - 1 => println!("1"), - _ => {}, - } - - // Lint, may panic - match arr[range] { - //~^ match_on_vec_items - [0, 1] => println!("0 1"), - [1, 2] => println!("1 2"), - _ => {}, - } -} - -fn match_without_wildcard() { - let arr = vec![0, 1, 2, 3]; - let range = 1..3; - let idx = 2; - - // Lint, may panic - match arr[idx] { - //~^ match_on_vec_items - 0 => println!("0"), - 1 => println!("1"), - num => {}, - } - - // Lint, may panic - match arr[range] { - //~^ match_on_vec_items - [0, 1] => println!("0 1"), - [1, 2] => println!("1 2"), - [ref sub @ ..] => {}, - } -} - -fn match_wildcard_and_action() { - let arr = vec![0, 1, 2, 3]; - let range = 1..3; - let idx = 3; - - // Lint, may panic - match arr[idx] { - //~^ match_on_vec_items - 0 => println!("0"), - 1 => println!("1"), - _ => println!("Hello, World!"), - } - - // Lint, may panic - match arr[range] { - //~^ match_on_vec_items - [0, 1] => println!("0 1"), - [1, 2] => println!("1 2"), - _ => println!("Hello, World!"), - } -} - -fn match_vec_ref() { - let arr = &vec![0, 1, 2, 3]; - let range = 1..3; - let idx = 3; - - // Lint, may panic - match arr[idx] { - //~^ match_on_vec_items - 0 => println!("0"), - 1 => println!("1"), - _ => {}, - } - - // Lint, may panic - match arr[range] { - //~^ match_on_vec_items - [0, 1] => println!("0 1"), - [1, 2] => println!("1 2"), - _ => {}, - } -} - -fn match_with_get() { - let arr = vec![0, 1, 2, 3]; - let range = 1..3; - let idx = 3; - - // Ok - match arr.get(idx) { - Some(0) => println!("0"), - Some(1) => println!("1"), - _ => {}, - } - - // Ok - match arr.get(range) { - Some(&[0, 1]) => println!("0 1"), - Some(&[1, 2]) => println!("1 2"), - _ => {}, - } -} - -fn match_with_array() { - let arr = [0, 1, 2, 3]; - let range = 1..3; - let idx = 3; - - // Ok - match arr[idx] { - 0 => println!("0"), - 1 => println!("1"), - _ => {}, - } - - // Ok - match arr[range] { - [0, 1] => println!("0 1"), - [1, 2] => println!("1 2"), - _ => {}, - } -} - -fn match_with_endless_range() { - let arr = vec![0, 1, 2, 3]; - let range = ..; - - // Ok - match arr[range] { - [0, 1] => println!("0 1"), - [1, 2] => println!("1 2"), - [0, 1, 2, 3] => println!("0, 1, 2, 3"), - _ => {}, - } - - // Ok - match arr[..] { - [0, 1] => println!("0 1"), - [1, 2] => println!("1 2"), - [0, 1, 2, 3] => println!("0, 1, 2, 3"), - _ => {}, - } -} - -fn main() { - match_with_wildcard(); - match_without_wildcard(); - match_wildcard_and_action(); - match_vec_ref(); - match_with_get(); - match_with_array(); - match_with_endless_range(); -} diff --git a/src/tools/clippy/tests/ui/match_on_vec_items.stderr b/src/tools/clippy/tests/ui/match_on_vec_items.stderr deleted file mode 100644 index ae79e1305f7fd..0000000000000 --- a/src/tools/clippy/tests/ui/match_on_vec_items.stderr +++ /dev/null @@ -1,53 +0,0 @@ -error: indexing into a vector may panic - --> tests/ui/match_on_vec_items.rs:10:11 - | -LL | match arr[idx] { - | ^^^^^^^^ help: try: `arr.get(idx)` - | - = note: `-D clippy::match-on-vec-items` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::match_on_vec_items)]` - -error: indexing into a vector may panic - --> tests/ui/match_on_vec_items.rs:18:11 - | -LL | match arr[range] { - | ^^^^^^^^^^ help: try: `arr.get(range)` - -error: indexing into a vector may panic - --> tests/ui/match_on_vec_items.rs:32:11 - | -LL | match arr[idx] { - | ^^^^^^^^ help: try: `arr.get(idx)` - -error: indexing into a vector may panic - --> tests/ui/match_on_vec_items.rs:40:11 - | -LL | match arr[range] { - | ^^^^^^^^^^ help: try: `arr.get(range)` - -error: indexing into a vector may panic - --> tests/ui/match_on_vec_items.rs:54:11 - | -LL | match arr[idx] { - | ^^^^^^^^ help: try: `arr.get(idx)` - -error: indexing into a vector may panic - --> tests/ui/match_on_vec_items.rs:62:11 - | -LL | match arr[range] { - | ^^^^^^^^^^ help: try: `arr.get(range)` - -error: indexing into a vector may panic - --> tests/ui/match_on_vec_items.rs:76:11 - | -LL | match arr[idx] { - | ^^^^^^^^ help: try: `arr.get(idx)` - -error: indexing into a vector may panic - --> tests/ui/match_on_vec_items.rs:84:11 - | -LL | match arr[range] { - | ^^^^^^^^^^ help: try: `arr.get(range)` - -error: aborting due to 8 previous errors - diff --git a/src/tools/clippy/tests/ui/match_single_binding.fixed b/src/tools/clippy/tests/ui/match_single_binding.fixed index 3a3eee4c958b4..bdf39796ebfcb 100644 --- a/src/tools/clippy/tests/ui/match_single_binding.fixed +++ b/src/tools/clippy/tests/ui/match_single_binding.fixed @@ -171,3 +171,20 @@ fn issue_10447() -> usize { 2 } + +fn issue14634() { + macro_rules! id { + ($i:ident) => { + $i + }; + } + dbg!(3); + println!("here"); + //~^^^ match_single_binding + let id!(a) = dbg!(3); + println!("found {a}"); + //~^^^ match_single_binding + let id!(b) = dbg!(3); + let id!(_a) = dbg!(b + 1); + //~^^^ match_single_binding +} diff --git a/src/tools/clippy/tests/ui/match_single_binding.rs b/src/tools/clippy/tests/ui/match_single_binding.rs index ada51254c6cdf..419ff95d873b0 100644 --- a/src/tools/clippy/tests/ui/match_single_binding.rs +++ b/src/tools/clippy/tests/ui/match_single_binding.rs @@ -229,3 +229,23 @@ fn issue_10447() -> usize { 2 } + +fn issue14634() { + macro_rules! id { + ($i:ident) => { + $i + }; + } + match dbg!(3) { + _ => println!("here"), + } + //~^^^ match_single_binding + match dbg!(3) { + id!(a) => println!("found {a}"), + } + //~^^^ match_single_binding + let id!(_a) = match dbg!(3) { + id!(b) => dbg!(b + 1), + }; + //~^^^ match_single_binding +} diff --git a/src/tools/clippy/tests/ui/match_single_binding.stderr b/src/tools/clippy/tests/ui/match_single_binding.stderr index 7e1ec32dac2ff..bdd0134a5f1c9 100644 --- a/src/tools/clippy/tests/ui/match_single_binding.stderr +++ b/src/tools/clippy/tests/ui/match_single_binding.stderr @@ -336,5 +336,47 @@ LL | | _ => println!("1"), LL | | }, | |_________^ help: consider using the match body instead: `println!("1")` -error: aborting due to 24 previous errors +error: this match could be replaced by its scrutinee and body + --> tests/ui/match_single_binding.rs:239:5 + | +LL | / match dbg!(3) { +LL | | _ => println!("here"), +LL | | } + | |_____^ + | +help: consider using the scrutinee and body instead + | +LL ~ dbg!(3); +LL + println!("here"); + | + +error: this match could be written as a `let` statement + --> tests/ui/match_single_binding.rs:243:5 + | +LL | / match dbg!(3) { +LL | | id!(a) => println!("found {a}"), +LL | | } + | |_____^ + | +help: consider using a `let` statement + | +LL ~ let id!(a) = dbg!(3); +LL + println!("found {a}"); + | + +error: this match could be written as a `let` statement + --> tests/ui/match_single_binding.rs:247:5 + | +LL | / let id!(_a) = match dbg!(3) { +LL | | id!(b) => dbg!(b + 1), +LL | | }; + | |______^ + | +help: consider using a `let` statement + | +LL ~ let id!(b) = dbg!(3); +LL + let id!(_a) = dbg!(b + 1); + | + +error: aborting due to 27 previous errors diff --git a/src/tools/clippy/tests/ui/methods.rs b/src/tools/clippy/tests/ui/methods.rs index 76b0d131dd41d..2f4004181f6a8 100644 --- a/src/tools/clippy/tests/ui/methods.rs +++ b/src/tools/clippy/tests/ui/methods.rs @@ -1,6 +1,5 @@ //@aux-build:option_helpers.rs -#![warn(clippy::all, clippy::pedantic)] #![allow( clippy::disallowed_names, clippy::default_trait_access, @@ -19,8 +18,7 @@ clippy::wrong_self_convention, clippy::unused_async, clippy::unused_self, - clippy::useless_vec, - unused + clippy::useless_vec )] #[macro_use] diff --git a/src/tools/clippy/tests/ui/methods.stderr b/src/tools/clippy/tests/ui/methods.stderr index 353b999d7da0f..b226ce7c65dab 100644 --- a/src/tools/clippy/tests/ui/methods.stderr +++ b/src/tools/clippy/tests/ui/methods.stderr @@ -1,5 +1,5 @@ error: methods called `new` usually return `Self` - --> tests/ui/methods.rs:104:5 + --> tests/ui/methods.rs:102:5 | LL | / fn new() -> i32 { LL | | @@ -11,7 +11,7 @@ LL | | } = help: to override `-D warnings` add `#[allow(clippy::new_ret_no_self)]` error: called `filter(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find(..)` instead - --> tests/ui/methods.rs:126:13 + --> tests/ui/methods.rs:124:13 | LL | let _ = v.iter().filter(|&x| { | _____________^ diff --git a/src/tools/clippy/tests/ui/min_max.rs b/src/tools/clippy/tests/ui/min_max.rs index f3eeb85f20ed6..ee19d3ff71421 100644 --- a/src/tools/clippy/tests/ui/min_max.rs +++ b/src/tools/clippy/tests/ui/min_max.rs @@ -1,4 +1,3 @@ -#![warn(clippy::all)] #![allow(clippy::manual_clamp)] use std::cmp::{max as my_max, max, min as my_min, min}; diff --git a/src/tools/clippy/tests/ui/min_max.stderr b/src/tools/clippy/tests/ui/min_max.stderr index 84b4d37545529..87510a465a08b 100644 --- a/src/tools/clippy/tests/ui/min_max.stderr +++ b/src/tools/clippy/tests/ui/min_max.stderr @@ -1,80 +1,79 @@ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:22:5 + --> tests/ui/min_max.rs:21:5 | LL | min(1, max(3, x)); | ^^^^^^^^^^^^^^^^^ | - = note: `-D clippy::min-max` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::min_max)]` + = note: `#[deny(clippy::min_max)]` on by default error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:25:5 + --> tests/ui/min_max.rs:24:5 | LL | min(max(3, x), 1); | ^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:28:5 + --> tests/ui/min_max.rs:27:5 | LL | max(min(x, 1), 3); | ^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:31:5 + --> tests/ui/min_max.rs:30:5 | LL | max(3, min(x, 1)); | ^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:34:5 + --> tests/ui/min_max.rs:33:5 | LL | my_max(3, my_min(x, 1)); | ^^^^^^^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:45:5 + --> tests/ui/min_max.rs:44:5 | LL | min("Apple", max("Zoo", s)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:48:5 + --> tests/ui/min_max.rs:47:5 | LL | max(min(s, "Apple"), "Zoo"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:54:5 + --> tests/ui/min_max.rs:53:5 | LL | x.min(1).max(3); | ^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:57:5 + --> tests/ui/min_max.rs:56:5 | LL | x.max(3).min(1); | ^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:60:5 + --> tests/ui/min_max.rs:59:5 | LL | f.max(3f32).min(1f32); | ^^^^^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:67:5 + --> tests/ui/min_max.rs:66:5 | LL | max(x.min(1), 3); | ^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:72:5 + --> tests/ui/min_max.rs:71:5 | LL | s.max("Zoo").min("Apple"); | ^^^^^^^^^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> tests/ui/min_max.rs:75:5 + --> tests/ui/min_max.rs:74:5 | LL | s.min("Apple").max("Zoo"); | ^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/misnamed_getters.fixed b/src/tools/clippy/tests/ui/misnamed_getters.fixed index cada5307b1c8e..bc123d1a40ba2 100644 --- a/src/tools/clippy/tests/ui/misnamed_getters.fixed +++ b/src/tools/clippy/tests/ui/misnamed_getters.fixed @@ -54,63 +54,63 @@ impl B { unsafe fn a(&self) -> &u8 { //~^ misnamed_getters - &self.a + unsafe { &self.a } } unsafe fn a_mut(&mut self) -> &mut u8 { //~^ misnamed_getters - &mut self.a + unsafe { &mut self.a } } unsafe fn b(self) -> u8 { //~^ misnamed_getters - self.b + unsafe { self.b } } unsafe fn b_mut(&mut self) -> &mut u8 { //~^ misnamed_getters - &mut self.b + unsafe { &mut self.b } } unsafe fn c(&self) -> &u8 { - &self.b + unsafe { &self.b } } unsafe fn c_mut(&mut self) -> &mut u8 { - &mut self.a + unsafe { &mut self.a } } unsafe fn a_unchecked(&self) -> &u8 { //~^ misnamed_getters - &self.a + unsafe { &self.a } } unsafe fn a_unchecked_mut(&mut self) -> &mut u8 { //~^ misnamed_getters - &mut self.a + unsafe { &mut self.a } } unsafe fn b_unchecked(self) -> u8 { //~^ misnamed_getters - self.b + unsafe { self.b } } unsafe fn b_unchecked_mut(&mut self) -> &mut u8 { //~^ misnamed_getters - &mut self.b + unsafe { &mut self.b } } unsafe fn c_unchecked(&self) -> &u8 { - &self.b + unsafe { &self.b } } unsafe fn c_unchecked_mut(&mut self) -> &mut u8 { - &mut self.a + unsafe { &mut self.a } } } diff --git a/src/tools/clippy/tests/ui/misnamed_getters.rs b/src/tools/clippy/tests/ui/misnamed_getters.rs index f529c56b4717b..6590101157c3f 100644 --- a/src/tools/clippy/tests/ui/misnamed_getters.rs +++ b/src/tools/clippy/tests/ui/misnamed_getters.rs @@ -54,63 +54,63 @@ impl B { unsafe fn a(&self) -> &u8 { //~^ misnamed_getters - &self.b + unsafe { &self.b } } unsafe fn a_mut(&mut self) -> &mut u8 { //~^ misnamed_getters - &mut self.b + unsafe { &mut self.b } } unsafe fn b(self) -> u8 { //~^ misnamed_getters - self.a + unsafe { self.a } } unsafe fn b_mut(&mut self) -> &mut u8 { //~^ misnamed_getters - &mut self.a + unsafe { &mut self.a } } unsafe fn c(&self) -> &u8 { - &self.b + unsafe { &self.b } } unsafe fn c_mut(&mut self) -> &mut u8 { - &mut self.a + unsafe { &mut self.a } } unsafe fn a_unchecked(&self) -> &u8 { //~^ misnamed_getters - &self.b + unsafe { &self.b } } unsafe fn a_unchecked_mut(&mut self) -> &mut u8 { //~^ misnamed_getters - &mut self.b + unsafe { &mut self.b } } unsafe fn b_unchecked(self) -> u8 { //~^ misnamed_getters - self.a + unsafe { self.a } } unsafe fn b_unchecked_mut(&mut self) -> &mut u8 { //~^ misnamed_getters - &mut self.a + unsafe { &mut self.a } } unsafe fn c_unchecked(&self) -> &u8 { - &self.b + unsafe { &self.b } } unsafe fn c_unchecked_mut(&mut self) -> &mut u8 { - &mut self.a + unsafe { &mut self.a } } } diff --git a/src/tools/clippy/tests/ui/misnamed_getters.stderr b/src/tools/clippy/tests/ui/misnamed_getters.stderr index 5dd1d75bcf6f1..aaf21cecb9255 100644 --- a/src/tools/clippy/tests/ui/misnamed_getters.stderr +++ b/src/tools/clippy/tests/ui/misnamed_getters.stderr @@ -73,8 +73,8 @@ error: getter function appears to return the wrong field LL | / unsafe fn a(&self) -> &u8 { LL | | LL | | -LL | | &self.b - | | ------- help: consider using: `&self.a` +LL | | unsafe { &self.b } + | | ------- help: consider using: `&self.a` LL | | } | |_____^ @@ -84,8 +84,8 @@ error: getter function appears to return the wrong field LL | / unsafe fn a_mut(&mut self) -> &mut u8 { LL | | LL | | -LL | | &mut self.b - | | ----------- help: consider using: `&mut self.a` +LL | | unsafe { &mut self.b } + | | ----------- help: consider using: `&mut self.a` LL | | } | |_____^ @@ -95,8 +95,8 @@ error: getter function appears to return the wrong field LL | / unsafe fn b(self) -> u8 { LL | | LL | | -LL | | self.a - | | ------ help: consider using: `self.b` +LL | | unsafe { self.a } + | | ------ help: consider using: `self.b` LL | | } | |_____^ @@ -106,8 +106,8 @@ error: getter function appears to return the wrong field LL | / unsafe fn b_mut(&mut self) -> &mut u8 { LL | | LL | | -LL | | &mut self.a - | | ----------- help: consider using: `&mut self.b` +LL | | unsafe { &mut self.a } + | | ----------- help: consider using: `&mut self.b` LL | | } | |_____^ @@ -117,8 +117,8 @@ error: getter function appears to return the wrong field LL | / unsafe fn a_unchecked(&self) -> &u8 { LL | | LL | | -LL | | &self.b - | | ------- help: consider using: `&self.a` +LL | | unsafe { &self.b } + | | ------- help: consider using: `&self.a` LL | | } | |_____^ @@ -128,8 +128,8 @@ error: getter function appears to return the wrong field LL | / unsafe fn a_unchecked_mut(&mut self) -> &mut u8 { LL | | LL | | -LL | | &mut self.b - | | ----------- help: consider using: `&mut self.a` +LL | | unsafe { &mut self.b } + | | ----------- help: consider using: `&mut self.a` LL | | } | |_____^ @@ -139,8 +139,8 @@ error: getter function appears to return the wrong field LL | / unsafe fn b_unchecked(self) -> u8 { LL | | LL | | -LL | | self.a - | | ------ help: consider using: `self.b` +LL | | unsafe { self.a } + | | ------ help: consider using: `self.b` LL | | } | |_____^ @@ -150,8 +150,8 @@ error: getter function appears to return the wrong field LL | / unsafe fn b_unchecked_mut(&mut self) -> &mut u8 { LL | | LL | | -LL | | &mut self.a - | | ----------- help: consider using: `&mut self.b` +LL | | unsafe { &mut self.a } + | | ----------- help: consider using: `&mut self.b` LL | | } | |_____^ diff --git a/src/tools/clippy/tests/ui/misnamed_getters_2021.fixed b/src/tools/clippy/tests/ui/misnamed_getters_2021.fixed new file mode 100644 index 0000000000000..7112719a9f284 --- /dev/null +++ b/src/tools/clippy/tests/ui/misnamed_getters_2021.fixed @@ -0,0 +1,24 @@ +//@edition: 2021 +#![allow(unused)] +#![allow(clippy::struct_field_names)] +#![warn(clippy::misnamed_getters)] + +// Edition 2021 specific check, where `unsafe` blocks are not required +// inside `unsafe fn`. + +union B { + a: u8, + b: u8, +} + +impl B { + unsafe fn a(&self) -> &u8 { + //~^ misnamed_getters + + &self.a + } +} + +fn main() { + // test code goes here +} diff --git a/src/tools/clippy/tests/ui/misnamed_getters_2021.rs b/src/tools/clippy/tests/ui/misnamed_getters_2021.rs new file mode 100644 index 0000000000000..19b5d086041f4 --- /dev/null +++ b/src/tools/clippy/tests/ui/misnamed_getters_2021.rs @@ -0,0 +1,24 @@ +//@edition: 2021 +#![allow(unused)] +#![allow(clippy::struct_field_names)] +#![warn(clippy::misnamed_getters)] + +// Edition 2021 specific check, where `unsafe` blocks are not required +// inside `unsafe fn`. + +union B { + a: u8, + b: u8, +} + +impl B { + unsafe fn a(&self) -> &u8 { + //~^ misnamed_getters + + &self.b + } +} + +fn main() { + // test code goes here +} diff --git a/src/tools/clippy/tests/ui/misnamed_getters_2021.stderr b/src/tools/clippy/tests/ui/misnamed_getters_2021.stderr new file mode 100644 index 0000000000000..5495e2e3733f0 --- /dev/null +++ b/src/tools/clippy/tests/ui/misnamed_getters_2021.stderr @@ -0,0 +1,16 @@ +error: getter function appears to return the wrong field + --> tests/ui/misnamed_getters_2021.rs:15:5 + | +LL | / unsafe fn a(&self) -> &u8 { +LL | | +LL | | +LL | | &self.b + | | ------- help: consider using: `&self.a` +LL | | } + | |_____^ + | + = note: `-D clippy::misnamed-getters` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::misnamed_getters)]` + +error: aborting due to 1 previous error + diff --git a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.fixed b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.fixed index 3bbafe0bba3fe..9018f38100efd 100644 --- a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.fixed +++ b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.fixed @@ -139,4 +139,31 @@ fn issue11835(v1: &[u8], v2: &[u8], v3: &[u8], v4: &[u8]) { let _ = v4[0] + v4[1] + v4[2]; } +// ok +fn same_index_multiple_times(v1: &[u8]) { + let _ = v1[0] + v1[0]; +} + +// ok +fn highest_index_first(v1: &[u8]) { + let _ = v1[2] + v1[1] + v1[0]; +} + +fn issue14255(v1: &[u8], v2: &[u8], v3: &[u8], v4: &[u8]) { + assert!(v1.len() == 3); + assert_eq!(v2.len(), 4); + assert!(v3.len() == 3); + assert_eq!(4, v4.len()); + + let _ = v1[0] + v1[1] + v1[2]; + //~^ missing_asserts_for_indexing + + let _ = v2[0] + v2[1] + v2[2]; + + let _ = v3[0] + v3[1] + v3[2]; + //~^ missing_asserts_for_indexing + + let _ = v4[0] + v4[1] + v4[2]; +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.rs b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.rs index f8ea0173c13fc..44c5eddf3d8b9 100644 --- a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.rs +++ b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.rs @@ -139,4 +139,31 @@ fn issue11835(v1: &[u8], v2: &[u8], v3: &[u8], v4: &[u8]) { let _ = v4[0] + v4[1] + v4[2]; } +// ok +fn same_index_multiple_times(v1: &[u8]) { + let _ = v1[0] + v1[0]; +} + +// ok +fn highest_index_first(v1: &[u8]) { + let _ = v1[2] + v1[1] + v1[0]; +} + +fn issue14255(v1: &[u8], v2: &[u8], v3: &[u8], v4: &[u8]) { + assert_eq!(v1.len(), 2); + assert_eq!(v2.len(), 4); + assert_eq!(2, v3.len()); + assert_eq!(4, v4.len()); + + let _ = v1[0] + v1[1] + v1[2]; + //~^ missing_asserts_for_indexing + + let _ = v2[0] + v2[1] + v2[2]; + + let _ = v3[0] + v3[1] + v3[2]; + //~^ missing_asserts_for_indexing + + let _ = v4[0] + v4[1] + v4[2]; +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.stderr b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.stderr index 5d30920ccf521..b610de94b5308 100644 --- a/src/tools/clippy/tests/ui/missing_asserts_for_indexing.stderr +++ b/src/tools/clippy/tests/ui/missing_asserts_for_indexing.stderr @@ -301,5 +301,57 @@ LL | let _ = v3[0] + v3[1] + v3[2]; | ^^^^^ = note: asserting the length before indexing will elide bounds checks -error: aborting due to 11 previous errors +error: indexing into a slice multiple times with an `assert` that does not cover the highest index + --> tests/ui/missing_asserts_for_indexing.rs:158:13 + | +LL | assert_eq!(v1.len(), 2); + | ----------------------- help: provide the highest index that is indexed with: `assert!(v1.len() == 3)` +... +LL | let _ = v1[0] + v1[1] + v1[2]; + | ^^^^^^^^^^^^^^^^^^^^^ + | +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing.rs:158:13 + | +LL | let _ = v1[0] + v1[1] + v1[2]; + | ^^^^^ +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing.rs:158:21 + | +LL | let _ = v1[0] + v1[1] + v1[2]; + | ^^^^^ +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing.rs:158:29 + | +LL | let _ = v1[0] + v1[1] + v1[2]; + | ^^^^^ + = note: asserting the length before indexing will elide bounds checks + +error: indexing into a slice multiple times with an `assert` that does not cover the highest index + --> tests/ui/missing_asserts_for_indexing.rs:163:13 + | +LL | assert_eq!(2, v3.len()); + | ----------------------- help: provide the highest index that is indexed with: `assert!(v3.len() == 3)` +... +LL | let _ = v3[0] + v3[1] + v3[2]; + | ^^^^^^^^^^^^^^^^^^^^^ + | +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing.rs:163:13 + | +LL | let _ = v3[0] + v3[1] + v3[2]; + | ^^^^^ +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing.rs:163:21 + | +LL | let _ = v3[0] + v3[1] + v3[2]; + | ^^^^^ +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing.rs:163:29 + | +LL | let _ = v3[0] + v3[1] + v3[2]; + | ^^^^^ + = note: asserting the length before indexing will elide bounds checks + +error: aborting due to 13 previous errors diff --git a/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.rs b/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.rs index a520151a2dd94..eb98969efa47f 100644 --- a/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.rs +++ b/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.rs @@ -73,4 +73,17 @@ pub fn issue11856(values: &[i32]) -> usize { ascending.len() } +fn assert_after_indexing(v1: &[u8]) { + let _ = v1[1] + v1[2]; + //~^ ERROR: indexing into a slice multiple times without an `assert` + assert!(v1.len() > 2); +} + +fn issue14255(v1: &[u8]) { + assert_ne!(v1.len(), 2); + + let _ = v1[0] + v1[1] + v1[2]; + //~^ missing_asserts_for_indexing +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.stderr b/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.stderr index 24109b052a8af..a17ad02321386 100644 --- a/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.stderr +++ b/src/tools/clippy/tests/ui/missing_asserts_for_indexing_unfixable.stderr @@ -180,5 +180,48 @@ LL | let _ = x[0] + x[1]; | ^^^^ = note: asserting the length before indexing will elide bounds checks -error: aborting due to 8 previous errors +error: indexing into a slice multiple times without an `assert` + --> tests/ui/missing_asserts_for_indexing_unfixable.rs:77:13 + | +LL | let _ = v1[1] + v1[2]; + | ^^^^^^^^^^^^^ + | + = help: consider asserting the length before indexing: `assert!(v1.len() > 2);` +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing_unfixable.rs:77:13 + | +LL | let _ = v1[1] + v1[2]; + | ^^^^^ +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing_unfixable.rs:77:21 + | +LL | let _ = v1[1] + v1[2]; + | ^^^^^ + = note: asserting the length before indexing will elide bounds checks + +error: indexing into a slice multiple times without an `assert` + --> tests/ui/missing_asserts_for_indexing_unfixable.rs:85:13 + | +LL | let _ = v1[0] + v1[1] + v1[2]; + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: consider asserting the length before indexing: `assert!(v1.len() > 2);` +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing_unfixable.rs:85:13 + | +LL | let _ = v1[0] + v1[1] + v1[2]; + | ^^^^^ +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing_unfixable.rs:85:21 + | +LL | let _ = v1[0] + v1[1] + v1[2]; + | ^^^^^ +note: slice indexed here + --> tests/ui/missing_asserts_for_indexing_unfixable.rs:85:29 + | +LL | let _ = v1[0] + v1[1] + v1[2]; + | ^^^^^ + = note: asserting the length before indexing will elide bounds checks + +error: aborting due to 10 previous errors diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.fixed b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.fixed index 10df44e73b85f..65eb2d5938b6b 100644 --- a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.fixed +++ b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.fixed @@ -144,7 +144,7 @@ mod msrv { #[clippy::msrv = "1.62"] mod with_extern { - const extern "C" fn c() {} + const unsafe extern "C" fn c() {} //~^ missing_const_for_fn #[rustfmt::skip] @@ -153,7 +153,7 @@ mod msrv { //~^ missing_const_for_fn // any item functions in extern block won't trigger this lint - extern "C" { + unsafe extern "C" { fn c_in_block(); } } diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs index bc44b34daef7e..3690d2f799ff4 100644 --- a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs +++ b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.rs @@ -144,7 +144,7 @@ mod msrv { #[clippy::msrv = "1.62"] mod with_extern { - extern "C" fn c() {} + unsafe extern "C" fn c() {} //~^ missing_const_for_fn #[rustfmt::skip] @@ -153,7 +153,7 @@ mod msrv { //~^ missing_const_for_fn // any item functions in extern block won't trigger this lint - extern "C" { + unsafe extern "C" { fn c_in_block(); } } diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr index 5df5a54ff5216..10e07d12f5a4c 100644 --- a/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr +++ b/src/tools/clippy/tests/ui/missing_const_for_fn/could_be_const.stderr @@ -212,12 +212,12 @@ LL | const fn union_access_can_be_const() { error: this could be a `const fn` --> tests/ui/missing_const_for_fn/could_be_const.rs:147:9 | -LL | extern "C" fn c() {} - | ^^^^^^^^^^^^^^^^^^^^ +LL | unsafe extern "C" fn c() {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: make the function `const` | -LL | const extern "C" fn c() {} +LL | const unsafe extern "C" fn c() {} | +++++ error: this could be a `const fn` diff --git a/src/tools/clippy/tests/ui/missing_panics_doc.rs b/src/tools/clippy/tests/ui/missing_panics_doc.rs index 95e361c5d5556..ffdae8504f72e 100644 --- a/src/tools/clippy/tests/ui/missing_panics_doc.rs +++ b/src/tools/clippy/tests/ui/missing_panics_doc.rs @@ -151,6 +151,45 @@ pub fn debug_assertions() { debug_assert_ne!(1, 2); } +pub fn partially_const(n: usize) { + //~^ missing_panics_doc + + const { + assert!(N > 5); + } + + assert!(N > n); +} + +pub fn expect_allow(i: Option) { + #[expect(clippy::missing_panics_doc)] + i.unwrap(); + + #[allow(clippy::missing_panics_doc)] + i.unwrap(); +} + +pub fn expect_allow_with_error(i: Option) { + //~^ missing_panics_doc + + #[expect(clippy::missing_panics_doc)] + i.unwrap(); + + #[allow(clippy::missing_panics_doc)] + i.unwrap(); + + i.unwrap(); +} + +pub fn expect_after_error(x: Option, y: Option) { + //~^ missing_panics_doc + + let x = x.unwrap(); + + #[expect(clippy::missing_panics_doc)] + let y = y.unwrap(); +} + // all function must be triggered the lint. // `pub` is required, because the lint does not consider unreachable items pub mod issue10240 { diff --git a/src/tools/clippy/tests/ui/missing_panics_doc.stderr b/src/tools/clippy/tests/ui/missing_panics_doc.stderr index a83e2fa367dd3..7f0acf8de9b77 100644 --- a/src/tools/clippy/tests/ui/missing_panics_doc.stderr +++ b/src/tools/clippy/tests/ui/missing_panics_doc.stderr @@ -73,76 +73,112 @@ LL | assert_ne!(x, 0); | ^^^^^^^^^^^^^^^^ error: docs for function which may panic missing `# Panics` section - --> tests/ui/missing_panics_doc.rs:157:5 + --> tests/ui/missing_panics_doc.rs:154:1 + | +LL | pub fn partially_const(n: usize) { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: first possible panic found here + --> tests/ui/missing_panics_doc.rs:161:5 + | +LL | assert!(N > n); + | ^^^^^^^^^^^^^^ + +error: docs for function which may panic missing `# Panics` section + --> tests/ui/missing_panics_doc.rs:172:1 + | +LL | pub fn expect_allow_with_error(i: Option) { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: first possible panic found here + --> tests/ui/missing_panics_doc.rs:181:5 + | +LL | i.unwrap(); + | ^^^^^^^^^^ + +error: docs for function which may panic missing `# Panics` section + --> tests/ui/missing_panics_doc.rs:184:1 + | +LL | pub fn expect_after_error(x: Option, y: Option) { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: first possible panic found here + --> tests/ui/missing_panics_doc.rs:187:13 + | +LL | let x = x.unwrap(); + | ^^^^^^^^^^ + +error: docs for function which may panic missing `# Panics` section + --> tests/ui/missing_panics_doc.rs:196:5 | LL | pub fn option_unwrap(v: &[T]) -> &T { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | note: first possible panic found here - --> tests/ui/missing_panics_doc.rs:160:9 + --> tests/ui/missing_panics_doc.rs:199:9 | LL | o.unwrap() | ^^^^^^^^^^ error: docs for function which may panic missing `# Panics` section - --> tests/ui/missing_panics_doc.rs:163:5 + --> tests/ui/missing_panics_doc.rs:202:5 | LL | pub fn option_expect(v: &[T]) -> &T { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | note: first possible panic found here - --> tests/ui/missing_panics_doc.rs:166:9 + --> tests/ui/missing_panics_doc.rs:205:9 | LL | o.expect("passed an empty thing") | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: docs for function which may panic missing `# Panics` section - --> tests/ui/missing_panics_doc.rs:169:5 + --> tests/ui/missing_panics_doc.rs:208:5 | LL | pub fn result_unwrap(v: &[T]) -> &T { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | note: first possible panic found here - --> tests/ui/missing_panics_doc.rs:172:9 + --> tests/ui/missing_panics_doc.rs:211:9 | LL | res.unwrap() | ^^^^^^^^^^^^ error: docs for function which may panic missing `# Panics` section - --> tests/ui/missing_panics_doc.rs:175:5 + --> tests/ui/missing_panics_doc.rs:214:5 | LL | pub fn result_expect(v: &[T]) -> &T { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | note: first possible panic found here - --> tests/ui/missing_panics_doc.rs:178:9 + --> tests/ui/missing_panics_doc.rs:217:9 | LL | res.expect("passed an empty thing") | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: docs for function which may panic missing `# Panics` section - --> tests/ui/missing_panics_doc.rs:181:5 + --> tests/ui/missing_panics_doc.rs:220:5 | LL | pub fn last_unwrap(v: &[u32]) -> u32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | note: first possible panic found here - --> tests/ui/missing_panics_doc.rs:183:10 + --> tests/ui/missing_panics_doc.rs:222:10 | LL | *v.last().unwrap() | ^^^^^^^^^^^^^^^^^ error: docs for function which may panic missing `# Panics` section - --> tests/ui/missing_panics_doc.rs:186:5 + --> tests/ui/missing_panics_doc.rs:225:5 | LL | pub fn last_expect(v: &[u32]) -> u32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | note: first possible panic found here - --> tests/ui/missing_panics_doc.rs:188:10 + --> tests/ui/missing_panics_doc.rs:227:10 | LL | *v.last().expect("passed an empty thing") | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 12 previous errors +error: aborting due to 15 previous errors diff --git a/src/tools/clippy/tests/ui/missing_transmute_annotations.fixed b/src/tools/clippy/tests/ui/missing_transmute_annotations.fixed index a3c94ab139ec6..58faeaee09d46 100644 --- a/src/tools/clippy/tests/ui/missing_transmute_annotations.fixed +++ b/src/tools/clippy/tests/ui/missing_transmute_annotations.fixed @@ -18,8 +18,10 @@ fn bar(x: i32) -> i32 { } unsafe fn foo1() -> i32 { - // Should not warn! - std::mem::transmute([1u16, 2u16]) + unsafe { + // Should not warn! + std::mem::transmute([1u16, 2u16]) + } } // Should not warn! @@ -31,33 +33,35 @@ enum Foo { } unsafe fn foo2() -> i32 { - let mut i: i32 = 0; - i = std::mem::transmute::<[u16; 2], i32>([1u16, 2u16]); - //~^ ERROR: transmute used without annotations - i = std::mem::transmute::<[u16; 2], i32>([1u16, 2u16]); - //~^ ERROR: transmute used without annotations - i = std::mem::transmute::<[u16; 2], i32>([1u16, 2u16]); - //~^ ERROR: transmute used without annotations - i = std::mem::transmute::<[u16; 2], i32>([1u16, 2u16]); - //~^ ERROR: transmute used without annotations + unsafe { + let mut i: i32 = 0; + i = std::mem::transmute::<[u16; 2], i32>([1u16, 2u16]); + //~^ ERROR: transmute used without annotations + i = std::mem::transmute::<[u16; 2], i32>([1u16, 2u16]); + //~^ ERROR: transmute used without annotations + i = std::mem::transmute::<[u16; 2], i32>([1u16, 2u16]); + //~^ ERROR: transmute used without annotations + i = std::mem::transmute::<[u16; 2], i32>([1u16, 2u16]); + //~^ ERROR: transmute used without annotations - let x: i32 = bar(std::mem::transmute::<[u16; 2], i32>([1u16, 2u16])); - //~^ ERROR: transmute used without annotations - bar(std::mem::transmute::<[u16; 2], i32>([1u16, 2u16])); - //~^ ERROR: transmute used without annotations + let x: i32 = bar(std::mem::transmute::<[u16; 2], i32>([1u16, 2u16])); + //~^ ERROR: transmute used without annotations + bar(std::mem::transmute::<[u16; 2], i32>([1u16, 2u16])); + //~^ ERROR: transmute used without annotations - i = local_bad_transmute!([1u16, 2u16]); + i = local_bad_transmute!([1u16, 2u16]); - // Should not warn. - i = bad_transmute!([1u16, 2u16]); + // Should not warn. + i = bad_transmute!([1u16, 2u16]); - i = std::mem::transmute::<[i16; 2], i32>([0i16, 0i16]); - //~^ ERROR: transmute used without annotations + i = std::mem::transmute::<[i16; 2], i32>([0i16, 0i16]); + //~^ ERROR: transmute used without annotations - i = std::mem::transmute::(Foo::A); - //~^ ERROR: transmute used without annotations + i = std::mem::transmute::(Foo::A); + //~^ ERROR: transmute used without annotations - i + i + } } fn main() { diff --git a/src/tools/clippy/tests/ui/missing_transmute_annotations.rs b/src/tools/clippy/tests/ui/missing_transmute_annotations.rs index c12e1b0f8d220..c9a4c5fa83b2b 100644 --- a/src/tools/clippy/tests/ui/missing_transmute_annotations.rs +++ b/src/tools/clippy/tests/ui/missing_transmute_annotations.rs @@ -18,8 +18,10 @@ fn bar(x: i32) -> i32 { } unsafe fn foo1() -> i32 { - // Should not warn! - std::mem::transmute([1u16, 2u16]) + unsafe { + // Should not warn! + std::mem::transmute([1u16, 2u16]) + } } // Should not warn! @@ -31,33 +33,35 @@ enum Foo { } unsafe fn foo2() -> i32 { - let mut i: i32 = 0; - i = std::mem::transmute([1u16, 2u16]); - //~^ ERROR: transmute used without annotations - i = std::mem::transmute::<_, _>([1u16, 2u16]); - //~^ ERROR: transmute used without annotations - i = std::mem::transmute::<_, i32>([1u16, 2u16]); - //~^ ERROR: transmute used without annotations - i = std::mem::transmute::<[u16; 2], _>([1u16, 2u16]); - //~^ ERROR: transmute used without annotations + unsafe { + let mut i: i32 = 0; + i = std::mem::transmute([1u16, 2u16]); + //~^ ERROR: transmute used without annotations + i = std::mem::transmute::<_, _>([1u16, 2u16]); + //~^ ERROR: transmute used without annotations + i = std::mem::transmute::<_, i32>([1u16, 2u16]); + //~^ ERROR: transmute used without annotations + i = std::mem::transmute::<[u16; 2], _>([1u16, 2u16]); + //~^ ERROR: transmute used without annotations - let x: i32 = bar(std::mem::transmute::<[u16; 2], _>([1u16, 2u16])); - //~^ ERROR: transmute used without annotations - bar(std::mem::transmute::<[u16; 2], _>([1u16, 2u16])); - //~^ ERROR: transmute used without annotations + let x: i32 = bar(std::mem::transmute::<[u16; 2], _>([1u16, 2u16])); + //~^ ERROR: transmute used without annotations + bar(std::mem::transmute::<[u16; 2], _>([1u16, 2u16])); + //~^ ERROR: transmute used without annotations - i = local_bad_transmute!([1u16, 2u16]); + i = local_bad_transmute!([1u16, 2u16]); - // Should not warn. - i = bad_transmute!([1u16, 2u16]); + // Should not warn. + i = bad_transmute!([1u16, 2u16]); - i = std::mem::transmute([0i16, 0i16]); - //~^ ERROR: transmute used without annotations + i = std::mem::transmute([0i16, 0i16]); + //~^ ERROR: transmute used without annotations - i = std::mem::transmute(Foo::A); - //~^ ERROR: transmute used without annotations + i = std::mem::transmute(Foo::A); + //~^ ERROR: transmute used without annotations - i + i + } } fn main() { diff --git a/src/tools/clippy/tests/ui/missing_transmute_annotations.stderr b/src/tools/clippy/tests/ui/missing_transmute_annotations.stderr index 5903ed488ef18..63f7e28ee7dc6 100644 --- a/src/tools/clippy/tests/ui/missing_transmute_annotations.stderr +++ b/src/tools/clippy/tests/ui/missing_transmute_annotations.stderr @@ -1,41 +1,41 @@ error: transmute used without annotations - --> tests/ui/missing_transmute_annotations.rs:35:19 + --> tests/ui/missing_transmute_annotations.rs:38:23 | -LL | i = std::mem::transmute([1u16, 2u16]); - | ^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` +LL | i = std::mem::transmute([1u16, 2u16]); + | ^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` | = note: `-D clippy::missing-transmute-annotations` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::missing_transmute_annotations)]` error: transmute used without annotations - --> tests/ui/missing_transmute_annotations.rs:37:19 + --> tests/ui/missing_transmute_annotations.rs:40:23 | -LL | i = std::mem::transmute::<_, _>([1u16, 2u16]); - | ^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` +LL | i = std::mem::transmute::<_, _>([1u16, 2u16]); + | ^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` error: transmute used without annotations - --> tests/ui/missing_transmute_annotations.rs:39:19 + --> tests/ui/missing_transmute_annotations.rs:42:23 | -LL | i = std::mem::transmute::<_, i32>([1u16, 2u16]); - | ^^^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` +LL | i = std::mem::transmute::<_, i32>([1u16, 2u16]); + | ^^^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` error: transmute used without annotations - --> tests/ui/missing_transmute_annotations.rs:41:19 + --> tests/ui/missing_transmute_annotations.rs:44:23 | -LL | i = std::mem::transmute::<[u16; 2], _>([1u16, 2u16]); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` +LL | i = std::mem::transmute::<[u16; 2], _>([1u16, 2u16]); + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` error: transmute used without annotations - --> tests/ui/missing_transmute_annotations.rs:44:32 + --> tests/ui/missing_transmute_annotations.rs:47:36 | -LL | let x: i32 = bar(std::mem::transmute::<[u16; 2], _>([1u16, 2u16])); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` +LL | let x: i32 = bar(std::mem::transmute::<[u16; 2], _>([1u16, 2u16])); + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` error: transmute used without annotations - --> tests/ui/missing_transmute_annotations.rs:46:19 + --> tests/ui/missing_transmute_annotations.rs:49:23 | -LL | bar(std::mem::transmute::<[u16; 2], _>([1u16, 2u16])); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` +LL | bar(std::mem::transmute::<[u16; 2], _>([1u16, 2u16])); + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` error: transmute used without annotations --> tests/ui/missing_transmute_annotations.rs:11:19 @@ -43,31 +43,31 @@ error: transmute used without annotations LL | std::mem::transmute($e) | ^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` ... -LL | i = local_bad_transmute!([1u16, 2u16]); - | ---------------------------------- in this macro invocation +LL | i = local_bad_transmute!([1u16, 2u16]); + | ---------------------------------- in this macro invocation | = note: this error originates in the macro `local_bad_transmute` (in Nightly builds, run with -Z macro-backtrace for more info) error: transmute used without annotations - --> tests/ui/missing_transmute_annotations.rs:54:19 + --> tests/ui/missing_transmute_annotations.rs:57:23 | -LL | i = std::mem::transmute([0i16, 0i16]); - | ^^^^^^^^^ help: consider adding missing annotations: `transmute::<[i16; 2], i32>` +LL | i = std::mem::transmute([0i16, 0i16]); + | ^^^^^^^^^ help: consider adding missing annotations: `transmute::<[i16; 2], i32>` error: transmute used without annotations - --> tests/ui/missing_transmute_annotations.rs:57:19 + --> tests/ui/missing_transmute_annotations.rs:60:23 | -LL | i = std::mem::transmute(Foo::A); - | ^^^^^^^^^ help: consider adding missing annotations: `transmute::` +LL | i = std::mem::transmute(Foo::A); + | ^^^^^^^^^ help: consider adding missing annotations: `transmute::` error: transmute used without annotations - --> tests/ui/missing_transmute_annotations.rs:64:35 + --> tests/ui/missing_transmute_annotations.rs:68:35 | LL | let x: _ = unsafe { std::mem::transmute::<_, i32>([1u16, 2u16]) }; | ^^^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` error: transmute used without annotations - --> tests/ui/missing_transmute_annotations.rs:67:30 + --> tests/ui/missing_transmute_annotations.rs:71:30 | LL | let x: _ = std::mem::transmute::<_, i32>([1u16, 2u16]); | ^^^^^^^^^^^^^^^^^^^ help: consider adding missing annotations: `transmute::<[u16; 2], i32>` diff --git a/src/tools/clippy/tests/ui/must_use_candidates.fixed b/src/tools/clippy/tests/ui/must_use_candidates.fixed index b5d356a502170..4c1d6b1ccb596 100644 --- a/src/tools/clippy/tests/ui/must_use_candidates.fixed +++ b/src/tools/clippy/tests/ui/must_use_candidates.fixed @@ -88,11 +88,13 @@ static mut COUNTER: usize = 0; /// /// Don't ever call this from multiple threads pub unsafe fn mutates_static() -> usize { - COUNTER += 1; - COUNTER + unsafe { + COUNTER += 1; + COUNTER + } } -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn unmangled(i: bool) -> bool { !i } diff --git a/src/tools/clippy/tests/ui/must_use_candidates.rs b/src/tools/clippy/tests/ui/must_use_candidates.rs index 14ea16662fdb4..71d546718ae79 100644 --- a/src/tools/clippy/tests/ui/must_use_candidates.rs +++ b/src/tools/clippy/tests/ui/must_use_candidates.rs @@ -88,11 +88,13 @@ static mut COUNTER: usize = 0; /// /// Don't ever call this from multiple threads pub unsafe fn mutates_static() -> usize { - COUNTER += 1; - COUNTER + unsafe { + COUNTER += 1; + COUNTER + } } -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn unmangled(i: bool) -> bool { !i } diff --git a/src/tools/clippy/tests/ui/mut_from_ref.rs b/src/tools/clippy/tests/ui/mut_from_ref.rs index b8c10f3eeb8f9..1b0b351518cbb 100644 --- a/src/tools/clippy/tests/ui/mut_from_ref.rs +++ b/src/tools/clippy/tests/ui/mut_from_ref.rs @@ -1,4 +1,10 @@ -#![allow(unused, clippy::needless_lifetimes, clippy::needless_pass_by_ref_mut)] +#![allow( + unused, + clippy::needless_lifetimes, + clippy::needless_pass_by_ref_mut, + clippy::redundant_allocation, + clippy::boxed_local +)] #![warn(clippy::mut_from_ref)] struct Foo; @@ -40,6 +46,18 @@ fn fail_double<'a, 'b>(x: &'a u32, y: &'a u32, z: &'b mut u32) -> &'a mut u32 { unsafe { unimplemented!() } } +fn fail_tuples<'a>(x: (&'a u32, &'a u32)) -> &'a mut u32 { + //~^ mut_from_ref + + unsafe { unimplemented!() } +} + +fn fail_box<'a>(x: Box<&'a u32>) -> &'a mut u32 { + //~^ mut_from_ref + + unsafe { unimplemented!() } +} + // this is OK, because the result borrows y fn works<'a>(x: &u32, y: &'a mut u32) -> &'a mut u32 { unsafe { unimplemented!() } @@ -50,6 +68,20 @@ fn also_works<'a>(x: &'a u32, y: &'a mut u32) -> &'a mut u32 { unsafe { unimplemented!() } } +fn works_tuples<'a>(x: (&'a u32, &'a mut u32)) -> &'a mut u32 { + unsafe { unimplemented!() } +} + +fn works_box<'a>(x: &'a u32, y: Box<&'a mut u32>) -> &'a mut u32 { + unsafe { unimplemented!() } +} + +struct RefMut<'a>(&'a mut u32); + +fn works_parameter<'a>(x: &'a u32, y: RefMut<'a>) -> &'a mut u32 { + unsafe { unimplemented!() } +} + unsafe fn also_broken(x: &u32) -> &mut u32 { //~^ mut_from_ref diff --git a/src/tools/clippy/tests/ui/mut_from_ref.stderr b/src/tools/clippy/tests/ui/mut_from_ref.stderr index 8c3c8e0c3d851..0974268734653 100644 --- a/src/tools/clippy/tests/ui/mut_from_ref.stderr +++ b/src/tools/clippy/tests/ui/mut_from_ref.stderr @@ -1,11 +1,11 @@ error: mutable borrow from immutable input(s) - --> tests/ui/mut_from_ref.rs:7:39 + --> tests/ui/mut_from_ref.rs:13:39 | LL | fn this_wont_hurt_a_bit(&self) -> &mut Foo { | ^^^^^^^^ | note: immutable borrow here - --> tests/ui/mut_from_ref.rs:7:29 + --> tests/ui/mut_from_ref.rs:13:29 | LL | fn this_wont_hurt_a_bit(&self) -> &mut Foo { | ^^^^^ @@ -13,64 +13,88 @@ LL | fn this_wont_hurt_a_bit(&self) -> &mut Foo { = help: to override `-D warnings` add `#[allow(clippy::mut_from_ref)]` error: mutable borrow from immutable input(s) - --> tests/ui/mut_from_ref.rs:15:25 + --> tests/ui/mut_from_ref.rs:21:25 | LL | fn ouch(x: &Foo) -> &mut Foo; | ^^^^^^^^ | note: immutable borrow here - --> tests/ui/mut_from_ref.rs:15:16 + --> tests/ui/mut_from_ref.rs:21:16 | LL | fn ouch(x: &Foo) -> &mut Foo; | ^^^^ error: mutable borrow from immutable input(s) - --> tests/ui/mut_from_ref.rs:25:21 + --> tests/ui/mut_from_ref.rs:31:21 | LL | fn fail(x: &u32) -> &mut u16 { | ^^^^^^^^ | note: immutable borrow here - --> tests/ui/mut_from_ref.rs:25:12 + --> tests/ui/mut_from_ref.rs:31:12 | LL | fn fail(x: &u32) -> &mut u16 { | ^^^^ error: mutable borrow from immutable input(s) - --> tests/ui/mut_from_ref.rs:31:50 + --> tests/ui/mut_from_ref.rs:37:50 | LL | fn fail_lifetime<'a>(x: &'a u32, y: &mut u32) -> &'a mut u32 { | ^^^^^^^^^^^ | note: immutable borrow here - --> tests/ui/mut_from_ref.rs:31:25 + --> tests/ui/mut_from_ref.rs:37:25 | LL | fn fail_lifetime<'a>(x: &'a u32, y: &mut u32) -> &'a mut u32 { | ^^^^^^^ error: mutable borrow from immutable input(s) - --> tests/ui/mut_from_ref.rs:37:67 + --> tests/ui/mut_from_ref.rs:43:67 | LL | fn fail_double<'a, 'b>(x: &'a u32, y: &'a u32, z: &'b mut u32) -> &'a mut u32 { | ^^^^^^^^^^^ | note: immutable borrow here - --> tests/ui/mut_from_ref.rs:37:27 + --> tests/ui/mut_from_ref.rs:43:27 | LL | fn fail_double<'a, 'b>(x: &'a u32, y: &'a u32, z: &'b mut u32) -> &'a mut u32 { | ^^^^^^^ ^^^^^^^ error: mutable borrow from immutable input(s) - --> tests/ui/mut_from_ref.rs:53:35 + --> tests/ui/mut_from_ref.rs:49:46 + | +LL | fn fail_tuples<'a>(x: (&'a u32, &'a u32)) -> &'a mut u32 { + | ^^^^^^^^^^^ + | +note: immutable borrow here + --> tests/ui/mut_from_ref.rs:49:24 + | +LL | fn fail_tuples<'a>(x: (&'a u32, &'a u32)) -> &'a mut u32 { + | ^^^^^^^ ^^^^^^^ + +error: mutable borrow from immutable input(s) + --> tests/ui/mut_from_ref.rs:55:37 + | +LL | fn fail_box<'a>(x: Box<&'a u32>) -> &'a mut u32 { + | ^^^^^^^^^^^ + | +note: immutable borrow here + --> tests/ui/mut_from_ref.rs:55:24 + | +LL | fn fail_box<'a>(x: Box<&'a u32>) -> &'a mut u32 { + | ^^^^^^^ + +error: mutable borrow from immutable input(s) + --> tests/ui/mut_from_ref.rs:85:35 | LL | unsafe fn also_broken(x: &u32) -> &mut u32 { | ^^^^^^^^ | note: immutable borrow here - --> tests/ui/mut_from_ref.rs:53:26 + --> tests/ui/mut_from_ref.rs:85:26 | LL | unsafe fn also_broken(x: &u32) -> &mut u32 { | ^^^^ -error: aborting due to 6 previous errors +error: aborting due to 8 previous errors diff --git a/src/tools/clippy/tests/ui/mutex_atomic.rs b/src/tools/clippy/tests/ui/mutex_atomic.rs index 80a712a9286a4..7db5c9f274f6e 100644 --- a/src/tools/clippy/tests/ui/mutex_atomic.rs +++ b/src/tools/clippy/tests/ui/mutex_atomic.rs @@ -1,4 +1,3 @@ -#![warn(clippy::all)] #![warn(clippy::mutex_integer)] #![warn(clippy::mutex_atomic)] #![allow(clippy::borrow_as_ptr)] diff --git a/src/tools/clippy/tests/ui/mutex_atomic.stderr b/src/tools/clippy/tests/ui/mutex_atomic.stderr index 838fc1d7c36ee..a6d5d60fbf05b 100644 --- a/src/tools/clippy/tests/ui/mutex_atomic.stderr +++ b/src/tools/clippy/tests/ui/mutex_atomic.stderr @@ -1,5 +1,5 @@ error: consider using an `AtomicBool` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:8:5 + --> tests/ui/mutex_atomic.rs:7:5 | LL | Mutex::new(true); | ^^^^^^^^^^^^^^^^ @@ -8,31 +8,31 @@ LL | Mutex::new(true); = help: to override `-D warnings` add `#[allow(clippy::mutex_atomic)]` error: consider using an `AtomicUsize` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:11:5 + --> tests/ui/mutex_atomic.rs:10:5 | LL | Mutex::new(5usize); | ^^^^^^^^^^^^^^^^^^ error: consider using an `AtomicIsize` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:14:5 + --> tests/ui/mutex_atomic.rs:13:5 | LL | Mutex::new(9isize); | ^^^^^^^^^^^^^^^^^^ error: consider using an `AtomicPtr` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:18:5 + --> tests/ui/mutex_atomic.rs:17:5 | LL | Mutex::new(&x as *const u32); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: consider using an `AtomicPtr` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:21:5 + --> tests/ui/mutex_atomic.rs:20:5 | LL | Mutex::new(&mut x as *mut u32); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: consider using an `AtomicU32` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:24:5 + --> tests/ui/mutex_atomic.rs:23:5 | LL | Mutex::new(0u32); | ^^^^^^^^^^^^^^^^ @@ -41,31 +41,31 @@ LL | Mutex::new(0u32); = help: to override `-D warnings` add `#[allow(clippy::mutex_integer)]` error: consider using an `AtomicI32` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:27:5 + --> tests/ui/mutex_atomic.rs:26:5 | LL | Mutex::new(0i32); | ^^^^^^^^^^^^^^^^ error: consider using an `AtomicU8` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:31:5 + --> tests/ui/mutex_atomic.rs:30:5 | LL | Mutex::new(0u8); | ^^^^^^^^^^^^^^^ error: consider using an `AtomicI16` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:34:5 + --> tests/ui/mutex_atomic.rs:33:5 | LL | Mutex::new(0i16); | ^^^^^^^^^^^^^^^^ error: consider using an `AtomicI8` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:37:25 + --> tests/ui/mutex_atomic.rs:36:25 | LL | let _x: Mutex = Mutex::new(0); | ^^^^^^^^^^^^^ error: consider using an `AtomicI64` instead of a `Mutex` here; if you just want the locking behavior and not the internal type, consider using `Mutex<()>` - --> tests/ui/mutex_atomic.rs:41:5 + --> tests/ui/mutex_atomic.rs:40:5 | LL | Mutex::new(X); | ^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/needless_borrowed_ref.fixed b/src/tools/clippy/tests/ui/needless_borrowed_ref.fixed index e4504bc2784cc..84924cac62d52 100644 --- a/src/tools/clippy/tests/ui/needless_borrowed_ref.fixed +++ b/src/tools/clippy/tests/ui/needless_borrowed_ref.fixed @@ -89,7 +89,7 @@ fn should_not_lint( tuple_struct: TupleStruct, s: Struct, ) { - if let [ref a] = slice {} + if let [a] = slice {} if let &[ref a, b] = slice {} if let &[ref a, .., b] = slice {} diff --git a/src/tools/clippy/tests/ui/needless_borrowed_ref.rs b/src/tools/clippy/tests/ui/needless_borrowed_ref.rs index 7edfda60b9790..280cef43340cc 100644 --- a/src/tools/clippy/tests/ui/needless_borrowed_ref.rs +++ b/src/tools/clippy/tests/ui/needless_borrowed_ref.rs @@ -89,7 +89,7 @@ fn should_not_lint( tuple_struct: TupleStruct, s: Struct, ) { - if let [ref a] = slice {} + if let [a] = slice {} if let &[ref a, b] = slice {} if let &[ref a, .., b] = slice {} diff --git a/src/tools/clippy/tests/ui/needless_collect.fixed b/src/tools/clippy/tests/ui/needless_collect.fixed index 6551fa56b42ce..b09efe9888f50 100644 --- a/src/tools/clippy/tests/ui/needless_collect.fixed +++ b/src/tools/clippy/tests/ui/needless_collect.fixed @@ -126,3 +126,87 @@ fn main() { fn foo(_: impl IntoIterator) {} fn bar>(_: Vec, _: I) {} fn baz>(_: I, _: (), _: impl IntoIterator) {} + +mod issue9191 { + use std::cell::Cell; + use std::collections::HashSet; + use std::hash::Hash; + use std::marker::PhantomData; + use std::ops::Deref; + + fn captures_ref_mut(xs: Vec, mut ys: HashSet) { + if xs.iter().map(|x| ys.remove(x)).collect::>().contains(&true) { + todo!() + } + } + + #[derive(Debug, Clone)] + struct MyRef<'a>(PhantomData<&'a mut Cell>>, *mut Cell>); + + impl MyRef<'_> { + fn new(target: &mut Cell>) -> Self { + MyRef(PhantomData, target) + } + + fn get(&mut self) -> &mut Cell> { + unsafe { &mut *self.1 } + } + } + + fn captures_phantom(xs: Vec, mut ys: Cell>) { + let mut ys_ref = MyRef::new(&mut ys); + if xs + .iter() + .map({ + let mut ys_ref = ys_ref.clone(); + move |x| ys_ref.get().get_mut().remove(x) + }) + .collect::>() + .contains(&true) + { + todo!() + } + } +} + +pub fn issue8055(v: impl IntoIterator) -> Result, usize> { + let mut zeros = 0; + + let res: Vec<_> = v + .into_iter() + .filter(|i| { + if *i == 0 { + zeros += 1 + }; + *i != 0 + }) + .collect(); + + if zeros != 0 { + return Err(zeros); + } + Ok(res.into_iter()) +} + +mod issue8055_regression { + struct Foo { + inner: T, + marker: core::marker::PhantomData, + } + + impl Iterator for Foo { + type Item = T::Item; + fn next(&mut self) -> Option { + self.inner.next() + } + } + + fn foo() { + Foo { + inner: [].iter(), + marker: core::marker::PhantomData, + } + .collect::>() + .len(); + } +} diff --git a/src/tools/clippy/tests/ui/needless_collect.rs b/src/tools/clippy/tests/ui/needless_collect.rs index 973c41c687544..da4182966bb17 100644 --- a/src/tools/clippy/tests/ui/needless_collect.rs +++ b/src/tools/clippy/tests/ui/needless_collect.rs @@ -126,3 +126,87 @@ fn main() { fn foo(_: impl IntoIterator) {} fn bar>(_: Vec, _: I) {} fn baz>(_: I, _: (), _: impl IntoIterator) {} + +mod issue9191 { + use std::cell::Cell; + use std::collections::HashSet; + use std::hash::Hash; + use std::marker::PhantomData; + use std::ops::Deref; + + fn captures_ref_mut(xs: Vec, mut ys: HashSet) { + if xs.iter().map(|x| ys.remove(x)).collect::>().contains(&true) { + todo!() + } + } + + #[derive(Debug, Clone)] + struct MyRef<'a>(PhantomData<&'a mut Cell>>, *mut Cell>); + + impl MyRef<'_> { + fn new(target: &mut Cell>) -> Self { + MyRef(PhantomData, target) + } + + fn get(&mut self) -> &mut Cell> { + unsafe { &mut *self.1 } + } + } + + fn captures_phantom(xs: Vec, mut ys: Cell>) { + let mut ys_ref = MyRef::new(&mut ys); + if xs + .iter() + .map({ + let mut ys_ref = ys_ref.clone(); + move |x| ys_ref.get().get_mut().remove(x) + }) + .collect::>() + .contains(&true) + { + todo!() + } + } +} + +pub fn issue8055(v: impl IntoIterator) -> Result, usize> { + let mut zeros = 0; + + let res: Vec<_> = v + .into_iter() + .filter(|i| { + if *i == 0 { + zeros += 1 + }; + *i != 0 + }) + .collect(); + + if zeros != 0 { + return Err(zeros); + } + Ok(res.into_iter()) +} + +mod issue8055_regression { + struct Foo { + inner: T, + marker: core::marker::PhantomData, + } + + impl Iterator for Foo { + type Item = T::Item; + fn next(&mut self) -> Option { + self.inner.next() + } + } + + fn foo() { + Foo { + inner: [].iter(), + marker: core::marker::PhantomData, + } + .collect::>() + .len(); + } +} diff --git a/src/tools/clippy/tests/ui/needless_if.fixed b/src/tools/clippy/tests/ui/needless_if.fixed index 6208ca19b82b4..347dbff7c595c 100644 --- a/src/tools/clippy/tests/ui/needless_if.fixed +++ b/src/tools/clippy/tests/ui/needless_if.fixed @@ -46,9 +46,7 @@ fn main() { if let true = true && true {} - if true - && let true = true - {} + if true && let true = true {} // Can lint nested `if let`s ({ //~^ needless_if diff --git a/src/tools/clippy/tests/ui/needless_if.rs b/src/tools/clippy/tests/ui/needless_if.rs index b459ff877be61..5e0f2a14408bc 100644 --- a/src/tools/clippy/tests/ui/needless_if.rs +++ b/src/tools/clippy/tests/ui/needless_if.rs @@ -46,9 +46,7 @@ fn main() { if let true = true && true {} - if true - && let true = true - {} + if true && let true = true {} // Can lint nested `if let`s if { //~^ needless_if diff --git a/src/tools/clippy/tests/ui/needless_if.stderr b/src/tools/clippy/tests/ui/needless_if.stderr index eeb8d044526d3..62cdf2459448d 100644 --- a/src/tools/clippy/tests/ui/needless_if.stderr +++ b/src/tools/clippy/tests/ui/needless_if.stderr @@ -31,7 +31,7 @@ LL + }); | error: this `if` branch is empty - --> tests/ui/needless_if.rs:53:5 + --> tests/ui/needless_if.rs:51:5 | LL | / if { LL | | @@ -57,19 +57,19 @@ LL + } && true); | error: this `if` branch is empty - --> tests/ui/needless_if.rs:98:5 + --> tests/ui/needless_if.rs:96:5 | LL | if { maybe_side_effect() } {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can remove it: `({ maybe_side_effect() });` error: this `if` branch is empty - --> tests/ui/needless_if.rs:101:5 + --> tests/ui/needless_if.rs:99:5 | LL | if { maybe_side_effect() } && true {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can remove it: `({ maybe_side_effect() } && true);` error: this `if` branch is empty - --> tests/ui/needless_if.rs:106:5 + --> tests/ui/needless_if.rs:104:5 | LL | if true {} | ^^^^^^^^^^ help: you can remove it: `true;` diff --git a/src/tools/clippy/tests/ui/needless_late_init.fixed b/src/tools/clippy/tests/ui/needless_late_init.fixed index 391d4bc3fcc74..f832752ccd798 100644 --- a/src/tools/clippy/tests/ui/needless_late_init.fixed +++ b/src/tools/clippy/tests/ui/needless_late_init.fixed @@ -246,9 +246,7 @@ fn does_not_lint() { } let x; - if true - && let Some(n) = Some("let chains too") - { + if true && let Some(n) = Some("let chains too") { x = 1; } else { x = 2; diff --git a/src/tools/clippy/tests/ui/needless_late_init.rs b/src/tools/clippy/tests/ui/needless_late_init.rs index 6096e8300e1a1..a52fbf5292344 100644 --- a/src/tools/clippy/tests/ui/needless_late_init.rs +++ b/src/tools/clippy/tests/ui/needless_late_init.rs @@ -246,9 +246,7 @@ fn does_not_lint() { } let x; - if true - && let Some(n) = Some("let chains too") - { + if true && let Some(n) = Some("let chains too") { x = 1; } else { x = 2; diff --git a/src/tools/clippy/tests/ui/needless_late_init.stderr b/src/tools/clippy/tests/ui/needless_late_init.stderr index e7c36136847b0..b24c127588162 100644 --- a/src/tools/clippy/tests/ui/needless_late_init.stderr +++ b/src/tools/clippy/tests/ui/needless_late_init.stderr @@ -276,7 +276,7 @@ LL ~ }; | error: unneeded late initialization - --> tests/ui/needless_late_init.rs:302:5 + --> tests/ui/needless_late_init.rs:300:5 | LL | let r; | ^^^^^^ created here diff --git a/src/tools/clippy/tests/ui/needless_lifetimes.fixed b/src/tools/clippy/tests/ui/needless_lifetimes.fixed index d59393fb3f3c6..e9d811986aa49 100644 --- a/src/tools/clippy/tests/ui/needless_lifetimes.fixed +++ b/src/tools/clippy/tests/ui/needless_lifetimes.fixed @@ -534,4 +534,11 @@ mod issue13749bis { impl<'a, T: 'a> Generic {} } +pub fn issue14607<'s>(x: &'s u8) { + #[expect(clippy::redundant_closure_call)] + (|| { + let _: &'s u8 = x; + })(); +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/needless_lifetimes.rs b/src/tools/clippy/tests/ui/needless_lifetimes.rs index e24907ab5fcdf..0b6eb9755b932 100644 --- a/src/tools/clippy/tests/ui/needless_lifetimes.rs +++ b/src/tools/clippy/tests/ui/needless_lifetimes.rs @@ -534,4 +534,11 @@ mod issue13749bis { impl<'a, T: 'a> Generic {} } +pub fn issue14607<'s>(x: &'s u8) { + #[expect(clippy::redundant_closure_call)] + (|| { + let _: &'s u8 = x; + })(); +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs index f0c5a716ac991..bdad3e3d5b008 100644 --- a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs +++ b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs @@ -1,8 +1,9 @@ #![allow( clippy::if_same_then_else, clippy::no_effect, + clippy::ptr_arg, clippy::redundant_closure_call, - clippy::ptr_arg + clippy::uninlined_format_args )] #![warn(clippy::needless_pass_by_ref_mut)] //@no-rustfix @@ -300,7 +301,7 @@ struct Data { } // Unsafe functions should not warn. unsafe fn get_mut_unchecked(ptr: &mut NonNull>) -> &mut T { - &mut (*ptr.as_ptr()).value + unsafe { &mut (*ptr.as_ptr()).value } } // Unsafe blocks should not warn. fn get_mut_unchecked2(ptr: &mut NonNull>) -> &mut T { diff --git a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr index 6637a255b5f51..94d98f0e9b12d 100644 --- a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr +++ b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr @@ -1,5 +1,5 @@ error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:11:11 + --> tests/ui/needless_pass_by_ref_mut.rs:12:11 | LL | fn foo(s: &mut Vec, b: &u32, x: &mut u32) { | ^^^^^^^^^^^^^ help: consider changing to: `&Vec` @@ -8,79 +8,79 @@ LL | fn foo(s: &mut Vec, b: &u32, x: &mut u32) { = help: to override `-D warnings` add `#[allow(clippy::needless_pass_by_ref_mut)]` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:37:12 + --> tests/ui/needless_pass_by_ref_mut.rs:38:12 | LL | fn foo6(s: &mut Vec) { | ^^^^^^^^^^^^^ help: consider changing to: `&Vec` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:48:12 + --> tests/ui/needless_pass_by_ref_mut.rs:49:12 | LL | fn bar(&mut self) {} | ^^^^^^^^^ help: consider changing to: `&self` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:51:29 + --> tests/ui/needless_pass_by_ref_mut.rs:52:29 | LL | fn mushroom(&self, vec: &mut Vec) -> usize { | ^^^^^^^^^^^^^ help: consider changing to: `&Vec` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:129:16 + --> tests/ui/needless_pass_by_ref_mut.rs:130:16 | LL | async fn a1(x: &mut i32) { | ^^^^^^^^ help: consider changing to: `&i32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:134:16 + --> tests/ui/needless_pass_by_ref_mut.rs:135:16 | LL | async fn a2(x: &mut i32, y: String) { | ^^^^^^^^ help: consider changing to: `&i32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:139:16 + --> tests/ui/needless_pass_by_ref_mut.rs:140:16 | LL | async fn a3(x: &mut i32, y: String, z: String) { | ^^^^^^^^ help: consider changing to: `&i32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:144:16 + --> tests/ui/needless_pass_by_ref_mut.rs:145:16 | LL | async fn a4(x: &mut i32, y: i32) { | ^^^^^^^^ help: consider changing to: `&i32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:149:24 + --> tests/ui/needless_pass_by_ref_mut.rs:150:24 | LL | async fn a5(x: i32, y: &mut i32) { | ^^^^^^^^ help: consider changing to: `&i32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:154:24 + --> tests/ui/needless_pass_by_ref_mut.rs:155:24 | LL | async fn a6(x: i32, y: &mut i32) { | ^^^^^^^^ help: consider changing to: `&i32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:159:32 + --> tests/ui/needless_pass_by_ref_mut.rs:160:32 | LL | async fn a7(x: i32, y: i32, z: &mut i32) { | ^^^^^^^^ help: consider changing to: `&i32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:164:24 + --> tests/ui/needless_pass_by_ref_mut.rs:165:24 | LL | async fn a8(x: i32, a: &mut i32, y: i32, z: &mut i32) { | ^^^^^^^^ help: consider changing to: `&i32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:164:45 + --> tests/ui/needless_pass_by_ref_mut.rs:165:45 | LL | async fn a8(x: i32, a: &mut i32, y: i32, z: &mut i32) { | ^^^^^^^^ help: consider changing to: `&i32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:200:16 + --> tests/ui/needless_pass_by_ref_mut.rs:201:16 | LL | fn cfg_warn(s: &mut u32) {} | ^^^^^^^^ help: consider changing to: `&u32` @@ -88,7 +88,7 @@ LL | fn cfg_warn(s: &mut u32) {} = note: this is cfg-gated and may require further changes error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:205:20 + --> tests/ui/needless_pass_by_ref_mut.rs:206:20 | LL | fn cfg_warn(s: &mut u32) {} | ^^^^^^^^ help: consider changing to: `&u32` @@ -96,115 +96,115 @@ LL | fn cfg_warn(s: &mut u32) {} = note: this is cfg-gated and may require further changes error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:218:39 + --> tests/ui/needless_pass_by_ref_mut.rs:219:39 | LL | async fn inner_async2(x: &mut i32, y: &mut u32) { | ^^^^^^^^ help: consider changing to: `&u32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:227:26 + --> tests/ui/needless_pass_by_ref_mut.rs:228:26 | LL | async fn inner_async3(x: &mut i32, y: &mut u32) { | ^^^^^^^^ help: consider changing to: `&i32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:247:30 + --> tests/ui/needless_pass_by_ref_mut.rs:248:30 | LL | async fn call_in_closure1(n: &mut str) { | ^^^^^^^^ help: consider changing to: `&str` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:267:16 + --> tests/ui/needless_pass_by_ref_mut.rs:268:16 | LL | fn closure2(n: &mut usize) -> impl '_ + FnMut() -> usize { | ^^^^^^^^^^ help: consider changing to: `&usize` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:279:22 + --> tests/ui/needless_pass_by_ref_mut.rs:280:22 | LL | async fn closure4(n: &mut usize) { | ^^^^^^^^^^ help: consider changing to: `&usize` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:334:12 + --> tests/ui/needless_pass_by_ref_mut.rs:335:12 | LL | fn bar(&mut self) {} | ^^^^^^^^^ help: consider changing to: `&self` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:337:18 + --> tests/ui/needless_pass_by_ref_mut.rs:338:18 | LL | async fn foo(&mut self, u: &mut i32, v: &mut u32) { | ^^^^^^^^^ help: consider changing to: `&self` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:337:45 + --> tests/ui/needless_pass_by_ref_mut.rs:338:45 | LL | async fn foo(&mut self, u: &mut i32, v: &mut u32) { | ^^^^^^^^ help: consider changing to: `&u32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:346:46 + --> tests/ui/needless_pass_by_ref_mut.rs:347:46 | LL | async fn foo2(&mut self, u: &mut i32, v: &mut u32) { | ^^^^^^^^ help: consider changing to: `&u32` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:363:18 + --> tests/ui/needless_pass_by_ref_mut.rs:364:18 | LL | fn _empty_tup(x: &mut (())) {} | ^^^^^^^^^ help: consider changing to: `&()` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:365:19 + --> tests/ui/needless_pass_by_ref_mut.rs:366:19 | LL | fn _single_tup(x: &mut ((i32,))) {} | ^^^^^^^^^^^^^ help: consider changing to: `&(i32,)` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:367:18 + --> tests/ui/needless_pass_by_ref_mut.rs:368:18 | LL | fn _multi_tup(x: &mut ((i32, u32))) {} | ^^^^^^^^^^^^^^^^^ help: consider changing to: `&(i32, u32)` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:369:11 + --> tests/ui/needless_pass_by_ref_mut.rs:370:11 | LL | fn _fn(x: &mut (fn())) {} | ^^^^^^^^^^^ help: consider changing to: `&fn()` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:372:23 + --> tests/ui/needless_pass_by_ref_mut.rs:373:23 | LL | fn _extern_rust_fn(x: &mut extern "Rust" fn()) {} | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&extern "Rust" fn()` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:374:20 + --> tests/ui/needless_pass_by_ref_mut.rs:375:20 | LL | fn _extern_c_fn(x: &mut extern "C" fn()) {} | ^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&extern "C" fn()` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:376:18 + --> tests/ui/needless_pass_by_ref_mut.rs:377:18 | LL | fn _unsafe_fn(x: &mut unsafe fn()) {} | ^^^^^^^^^^^^^^^^ help: consider changing to: `&unsafe fn()` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:378:25 + --> tests/ui/needless_pass_by_ref_mut.rs:379:25 | LL | fn _unsafe_extern_fn(x: &mut unsafe extern "C" fn()) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&unsafe extern "C" fn()` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:380:20 + --> tests/ui/needless_pass_by_ref_mut.rs:381:20 | LL | fn _fn_with_arg(x: &mut unsafe extern "C" fn(i32)) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&unsafe extern "C" fn(i32)` error: this argument is a mutable reference, but not used mutably - --> tests/ui/needless_pass_by_ref_mut.rs:382:20 + --> tests/ui/needless_pass_by_ref_mut.rs:383:20 | LL | fn _fn_with_ret(x: &mut unsafe extern "C" fn() -> (i32)) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&unsafe extern "C" fn() -> (i32)` diff --git a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut_2021.rs b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut_2021.rs new file mode 100644 index 0000000000000..994eba9cae3d7 --- /dev/null +++ b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut_2021.rs @@ -0,0 +1,12 @@ +//@edition: 2021 +//@check-pass +#![warn(clippy::needless_pass_by_ref_mut)] + +struct Data { + value: T, +} + +// Unsafe functions should not warn. +unsafe fn get_mut_unchecked(ptr: &mut std::ptr::NonNull>) -> &mut T { + &mut (*ptr.as_ptr()).value +} diff --git a/src/tools/clippy/tests/ui/needless_question_mark.stderr b/src/tools/clippy/tests/ui/needless_question_mark.stderr index 55da4f28976c1..8516cee48e679 100644 --- a/src/tools/clippy/tests/ui/needless_question_mark.stderr +++ b/src/tools/clippy/tests/ui/needless_question_mark.stderr @@ -1,100 +1,188 @@ -error: question mark operator is useless here +error: enclosing `Some` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:20:12 | LL | return Some(to.magic?); - | ^^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `to.magic` + | ^^^^^^^^^^^^^^^ | = note: `-D clippy::needless-question-mark` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::needless_question_mark)]` +help: remove the enclosing `Some` and `?` operator + | +LL - return Some(to.magic?); +LL + return to.magic; + | -error: question mark operator is useless here +error: enclosing `Some` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:29:12 | LL | return Some(to.magic?) - | ^^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `to.magic` + | ^^^^^^^^^^^^^^^ + | +help: remove the enclosing `Some` and `?` operator + | +LL - return Some(to.magic?) +LL + return to.magic + | -error: question mark operator is useless here +error: enclosing `Some` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:35:5 | LL | Some(to.magic?) - | ^^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `to.magic` + | ^^^^^^^^^^^^^^^ + | +help: remove the enclosing `Some` and `?` operator + | +LL - Some(to.magic?) +LL + to.magic + | -error: question mark operator is useless here +error: enclosing `Some` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:41:21 | LL | to.and_then(|t| Some(t.magic?)) - | ^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `t.magic` + | ^^^^^^^^^^^^^^ + | +help: remove the enclosing `Some` and `?` operator + | +LL - to.and_then(|t| Some(t.magic?)) +LL + to.and_then(|t| t.magic) + | -error: question mark operator is useless here +error: enclosing `Some` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:51:9 | LL | Some(t.magic?) - | ^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `t.magic` + | ^^^^^^^^^^^^^^ + | +help: remove the enclosing `Some` and `?` operator + | +LL - Some(t.magic?) +LL + t.magic + | -error: question mark operator is useless here +error: enclosing `Ok` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:57:12 | LL | return Ok(tr.magic?); - | ^^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `tr.magic` + | ^^^^^^^^^^^^^ + | +help: remove the enclosing `Ok` and `?` operator + | +LL - return Ok(tr.magic?); +LL + return tr.magic; + | -error: question mark operator is useless here +error: enclosing `Ok` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:65:12 | LL | return Ok(tr.magic?) - | ^^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `tr.magic` + | ^^^^^^^^^^^^^ + | +help: remove the enclosing `Ok` and `?` operator + | +LL - return Ok(tr.magic?) +LL + return tr.magic + | -error: question mark operator is useless here +error: enclosing `Ok` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:70:5 | LL | Ok(tr.magic?) - | ^^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `tr.magic` + | ^^^^^^^^^^^^^ + | +help: remove the enclosing `Ok` and `?` operator + | +LL - Ok(tr.magic?) +LL + tr.magic + | -error: question mark operator is useless here +error: enclosing `Ok` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:75:21 | LL | tr.and_then(|t| Ok(t.magic?)) - | ^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `t.magic` + | ^^^^^^^^^^^^ + | +help: remove the enclosing `Ok` and `?` operator + | +LL - tr.and_then(|t| Ok(t.magic?)) +LL + tr.and_then(|t| t.magic) + | -error: question mark operator is useless here +error: enclosing `Ok` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:84:9 | LL | Ok(t.magic?) - | ^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `t.magic` + | ^^^^^^^^^^^^ + | +help: remove the enclosing `Ok` and `?` operator + | +LL - Ok(t.magic?) +LL + t.magic + | -error: question mark operator is useless here +error: enclosing `Ok` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:92:16 | LL | return Ok(t.magic?); - | ^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `t.magic` + | ^^^^^^^^^^^^ + | +help: remove the enclosing `Ok` and `?` operator + | +LL - return Ok(t.magic?); +LL + return t.magic; + | -error: question mark operator is useless here +error: enclosing `Some` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:128:27 | LL | || -> Option<_> { Some(Some($expr)?) }() - | ^^^^^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `Some($expr)` + | ^^^^^^^^^^^^^^^^^^ ... LL | let _x = some_and_qmark_in_macro!(x?); | ---------------------------- in this macro invocation | = note: this error originates in the macro `some_and_qmark_in_macro` (in Nightly builds, run with -Z macro-backtrace for more info) +help: remove the enclosing `Some` and `?` operator + | +LL - || -> Option<_> { Some(Some($expr)?) }() +LL + || -> Option<_> { Some($expr) }() + | -error: question mark operator is useless here +error: enclosing `Some` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:140:5 | LL | Some(to.magic?) - | ^^^^^^^^^^^^^^^ help: try removing question mark and `Some()`: `to.magic` + | ^^^^^^^^^^^^^^^ + | +help: remove the enclosing `Some` and `?` operator + | +LL - Some(to.magic?) +LL + to.magic + | -error: question mark operator is useless here +error: enclosing `Ok` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:149:5 | LL | Ok(s.magic?) - | ^^^^^^^^^^^^ help: try removing question mark and `Ok()`: `s.magic` + | ^^^^^^^^^^^^ + | +help: remove the enclosing `Ok` and `?` operator + | +LL - Ok(s.magic?) +LL + s.magic + | -error: question mark operator is useless here +error: enclosing `Some` and `?` operator are unneeded --> tests/ui/needless_question_mark.rs:154:7 | LL | { Some(a?) } - | ^^^^^^^^ help: try removing question mark and `Some()`: `a` + | ^^^^^^^^ + | +help: remove the enclosing `Some` and `?` operator + | +LL - { Some(a?) } +LL + { a } + | error: aborting due to 15 previous errors diff --git a/src/tools/clippy/tests/ui/neg_multiply.fixed b/src/tools/clippy/tests/ui/neg_multiply.fixed index 995470493bfb7..ff6e08300e298 100644 --- a/src/tools/clippy/tests/ui/neg_multiply.fixed +++ b/src/tools/clippy/tests/ui/neg_multiply.fixed @@ -53,3 +53,32 @@ fn main() { X * -1; // should be ok -1 * X; // should also be ok } + +fn float() { + let x = 0.0; + + -x; + //~^ neg_multiply + + -x; + //~^ neg_multiply + + 100.0 + -x; + //~^ neg_multiply + + -(100.0 + x); + //~^ neg_multiply + + -17.0; + //~^ neg_multiply + + 0.0 + -0.0; + //~^ neg_multiply + + -(3.0_f32 as f64); + //~^ neg_multiply + -(3.0_f32 as f64); + //~^ neg_multiply + + -1.0 * -1.0; // should be ok +} diff --git a/src/tools/clippy/tests/ui/neg_multiply.rs b/src/tools/clippy/tests/ui/neg_multiply.rs index 95b94e29517fa..b0f4e85c78e5d 100644 --- a/src/tools/clippy/tests/ui/neg_multiply.rs +++ b/src/tools/clippy/tests/ui/neg_multiply.rs @@ -53,3 +53,32 @@ fn main() { X * -1; // should be ok -1 * X; // should also be ok } + +fn float() { + let x = 0.0; + + x * -1.0; + //~^ neg_multiply + + -1.0 * x; + //~^ neg_multiply + + 100.0 + x * -1.0; + //~^ neg_multiply + + (100.0 + x) * -1.0; + //~^ neg_multiply + + -1.0 * 17.0; + //~^ neg_multiply + + 0.0 + 0.0 * -1.0; + //~^ neg_multiply + + 3.0_f32 as f64 * -1.0; + //~^ neg_multiply + (3.0_f32 as f64) * -1.0; + //~^ neg_multiply + + -1.0 * -1.0; // should be ok +} diff --git a/src/tools/clippy/tests/ui/neg_multiply.stderr b/src/tools/clippy/tests/ui/neg_multiply.stderr index 9efa5d3ba1f1d..2ef7e32ce05e1 100644 --- a/src/tools/clippy/tests/ui/neg_multiply.stderr +++ b/src/tools/clippy/tests/ui/neg_multiply.stderr @@ -49,5 +49,53 @@ error: this multiplication by -1 can be written more succinctly LL | (3_usize as i32) * -1; | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `-(3_usize as i32)` -error: aborting due to 8 previous errors +error: this multiplication by -1 can be written more succinctly + --> tests/ui/neg_multiply.rs:60:5 + | +LL | x * -1.0; + | ^^^^^^^^ help: consider using: `-x` + +error: this multiplication by -1 can be written more succinctly + --> tests/ui/neg_multiply.rs:63:5 + | +LL | -1.0 * x; + | ^^^^^^^^ help: consider using: `-x` + +error: this multiplication by -1 can be written more succinctly + --> tests/ui/neg_multiply.rs:66:13 + | +LL | 100.0 + x * -1.0; + | ^^^^^^^^ help: consider using: `-x` + +error: this multiplication by -1 can be written more succinctly + --> tests/ui/neg_multiply.rs:69:5 + | +LL | (100.0 + x) * -1.0; + | ^^^^^^^^^^^^^^^^^^ help: consider using: `-(100.0 + x)` + +error: this multiplication by -1 can be written more succinctly + --> tests/ui/neg_multiply.rs:72:5 + | +LL | -1.0 * 17.0; + | ^^^^^^^^^^^ help: consider using: `-17.0` + +error: this multiplication by -1 can be written more succinctly + --> tests/ui/neg_multiply.rs:75:11 + | +LL | 0.0 + 0.0 * -1.0; + | ^^^^^^^^^^ help: consider using: `-0.0` + +error: this multiplication by -1 can be written more succinctly + --> tests/ui/neg_multiply.rs:78:5 + | +LL | 3.0_f32 as f64 * -1.0; + | ^^^^^^^^^^^^^^^^^^^^^ help: consider using: `-(3.0_f32 as f64)` + +error: this multiplication by -1 can be written more succinctly + --> tests/ui/neg_multiply.rs:80:5 + | +LL | (3.0_f32 as f64) * -1.0; + | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `-(3.0_f32 as f64)` + +error: aborting due to 16 previous errors diff --git a/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.rs b/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.rs index 0d09b3ceecde7..f4248ffc0f4d0 100644 --- a/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.rs +++ b/src/tools/clippy/tests/ui/no_mangle_with_rust_abi.rs @@ -43,7 +43,7 @@ extern "C" fn c_abi_fn(arg_one: u32, arg_two: usize) {} extern "C" fn c_abi_fn_again(arg_one: u32, arg_two: usize) {} -extern "C" { +unsafe extern "C" { fn c_abi_in_block(arg_one: u32, arg_two: usize); } diff --git a/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.fixed b/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.fixed index 8774c666db11f..23dbee5a08488 100644 --- a/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.fixed +++ b/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.fixed @@ -162,3 +162,36 @@ impl PartialOrd for I { return Some(self.cmp(other)); } } + +// #13640, do not lint + +#[derive(Eq, PartialEq)] +struct J(u32); + +impl Ord for J { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for J { + fn partial_cmp(&self, other: &Self) -> Option { + self.cmp(other).into() + } +} + +// #13640, check that a simple `.into()` does not obliterate the lint + +#[derive(Eq, PartialEq)] +struct K(u32); + +impl Ord for K { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for K { + //~^ non_canonical_partial_ord_impl + fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } +} diff --git a/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.rs b/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.rs index 568b97c8fff7b..12f055a542b89 100644 --- a/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.rs +++ b/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.rs @@ -166,3 +166,38 @@ impl PartialOrd for I { return Some(self.cmp(other)); } } + +// #13640, do not lint + +#[derive(Eq, PartialEq)] +struct J(u32); + +impl Ord for J { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for J { + fn partial_cmp(&self, other: &Self) -> Option { + self.cmp(other).into() + } +} + +// #13640, check that a simple `.into()` does not obliterate the lint + +#[derive(Eq, PartialEq)] +struct K(u32); + +impl Ord for K { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for K { + //~^ non_canonical_partial_ord_impl + fn partial_cmp(&self, other: &Self) -> Option { + Ordering::Greater.into() + } +} diff --git a/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.stderr b/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.stderr index 86845df4ea906..c7de968588f8b 100644 --- a/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.stderr +++ b/src/tools/clippy/tests/ui/non_canonical_partial_ord_impl.stderr @@ -31,5 +31,18 @@ LL - fn partial_cmp(&self, _: &Self) -> Option { LL + fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } | -error: aborting due to 2 previous errors +error: non-canonical implementation of `partial_cmp` on an `Ord` type + --> tests/ui/non_canonical_partial_ord_impl.rs:198:1 + | +LL | / impl PartialOrd for K { +LL | | +LL | | fn partial_cmp(&self, other: &Self) -> Option { + | | _____________________________________________________________- +LL | || Ordering::Greater.into() +LL | || } + | ||_____- help: change this to: `{ Some(self.cmp(other)) }` +LL | | } + | |__^ + +error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui/non_expressive_names.rs b/src/tools/clippy/tests/ui/non_expressive_names.rs index b772c754f8b76..3f34dff563d26 100644 --- a/src/tools/clippy/tests/ui/non_expressive_names.rs +++ b/src/tools/clippy/tests/ui/non_expressive_names.rs @@ -1,5 +1,4 @@ -#![warn(clippy::all)] -#![allow(unused, clippy::println_empty_string, non_snake_case, clippy::let_unit_value)] +#![allow(clippy::println_empty_string, non_snake_case, clippy::let_unit_value)] #[derive(Clone, Debug)] enum MaybeInst { diff --git a/src/tools/clippy/tests/ui/non_expressive_names.stderr b/src/tools/clippy/tests/ui/non_expressive_names.stderr index 3bd77a730fe78..11b12d2c5f103 100644 --- a/src/tools/clippy/tests/ui/non_expressive_names.stderr +++ b/src/tools/clippy/tests/ui/non_expressive_names.stderr @@ -1,5 +1,5 @@ error: consider choosing a more descriptive name - --> tests/ui/non_expressive_names.rs:28:9 + --> tests/ui/non_expressive_names.rs:27:9 | LL | let _1 = 1; | ^^ @@ -8,31 +8,31 @@ LL | let _1 = 1; = help: to override `-D warnings` add `#[allow(clippy::just_underscores_and_digits)]` error: consider choosing a more descriptive name - --> tests/ui/non_expressive_names.rs:30:9 + --> tests/ui/non_expressive_names.rs:29:9 | LL | let ____1 = 1; | ^^^^^ error: consider choosing a more descriptive name - --> tests/ui/non_expressive_names.rs:32:9 + --> tests/ui/non_expressive_names.rs:31:9 | LL | let __1___2 = 12; | ^^^^^^^ error: consider choosing a more descriptive name - --> tests/ui/non_expressive_names.rs:54:13 + --> tests/ui/non_expressive_names.rs:53:13 | LL | let _1 = 1; | ^^ error: consider choosing a more descriptive name - --> tests/ui/non_expressive_names.rs:56:13 + --> tests/ui/non_expressive_names.rs:55:13 | LL | let ____1 = 1; | ^^^^^ error: consider choosing a more descriptive name - --> tests/ui/non_expressive_names.rs:58:13 + --> tests/ui/non_expressive_names.rs:57:13 | LL | let __1___2 = 12; | ^^^^^^^ diff --git a/src/tools/clippy/tests/ui/non_send_fields_in_send_ty.rs b/src/tools/clippy/tests/ui/non_send_fields_in_send_ty.rs index 046ea70b08f16..31778f7450989 100644 --- a/src/tools/clippy/tests/ui/non_send_fields_in_send_ty.rs +++ b/src/tools/clippy/tests/ui/non_send_fields_in_send_ty.rs @@ -35,7 +35,7 @@ unsafe impl Send for ArcGuard {} //~^ ERROR: some fields in `ArcGuard` are not safe to be sent to another thread // rusb / RUSTSEC-2020-0098 -extern "C" { +unsafe extern "C" { type libusb_device_handle; } @@ -90,7 +90,7 @@ unsafe impl Send for MultiParam {} //~^ ERROR: some fields in `MultiParam` are not safe to be sent to another thread // Tests for raw pointer heuristic -extern "C" { +unsafe extern "C" { type NonSend; } diff --git a/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.fixed b/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.fixed index f7c56b6fffe81..2b30c8f984ebe 100644 --- a/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.fixed +++ b/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.fixed @@ -9,16 +9,16 @@ use once_cell::sync::Lazy; fn main() {} static LAZY_FOO: std::sync::LazyLock = std::sync::LazyLock::new(|| "foo".to_uppercase()); -//~^ ERROR: this type has been superceded by `LazyLock` in the standard library +//~^ ERROR: this type has been superseded by `LazyLock` in the standard library static LAZY_BAR: std::sync::LazyLock = std::sync::LazyLock::new(|| { - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library let x = "bar"; x.to_uppercase() }); static LAZY_BAZ: std::sync::LazyLock = { std::sync::LazyLock::new(|| "baz".to_uppercase()) }; -//~^ ERROR: this type has been superceded by `LazyLock` in the standard library +//~^ ERROR: this type has been superseded by `LazyLock` in the standard library static LAZY_QUX: std::sync::LazyLock = { - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library if "qux".len() == 3 { std::sync::LazyLock::new(|| "qux".to_uppercase()) } else if "qux".is_ascii() { @@ -39,11 +39,11 @@ mod once_cell_lazy_with_fns { use once_cell::sync::Lazy; static LAZY_FOO: std::sync::LazyLock = std::sync::LazyLock::new(|| "foo".to_uppercase()); - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library static LAZY_BAR: std::sync::LazyLock = std::sync::LazyLock::new(|| "bar".to_uppercase()); - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library static mut LAZY_BAZ: std::sync::LazyLock = std::sync::LazyLock::new(|| "baz".to_uppercase()); - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library fn calling_replaceable_fns() { let _ = std::sync::LazyLock::force(&LAZY_FOO); diff --git a/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs b/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs index 90bc428137cea..c52338eee83cb 100644 --- a/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs +++ b/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs @@ -9,16 +9,16 @@ use once_cell::sync::Lazy; fn main() {} static LAZY_FOO: Lazy = Lazy::new(|| "foo".to_uppercase()); -//~^ ERROR: this type has been superceded by `LazyLock` in the standard library +//~^ ERROR: this type has been superseded by `LazyLock` in the standard library static LAZY_BAR: Lazy = Lazy::new(|| { - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library let x = "bar"; x.to_uppercase() }); static LAZY_BAZ: Lazy = { Lazy::new(|| "baz".to_uppercase()) }; -//~^ ERROR: this type has been superceded by `LazyLock` in the standard library +//~^ ERROR: this type has been superseded by `LazyLock` in the standard library static LAZY_QUX: Lazy = { - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library if "qux".len() == 3 { Lazy::new(|| "qux".to_uppercase()) } else if "qux".is_ascii() { @@ -39,11 +39,11 @@ mod once_cell_lazy_with_fns { use once_cell::sync::Lazy; static LAZY_FOO: Lazy = Lazy::new(|| "foo".to_uppercase()); - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library static LAZY_BAR: Lazy = Lazy::new(|| "bar".to_uppercase()); - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library static mut LAZY_BAZ: Lazy = Lazy::new(|| "baz".to_uppercase()); - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library fn calling_replaceable_fns() { let _ = Lazy::force(&LAZY_FOO); diff --git a/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.stderr b/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.stderr index 333052ae1c110..bb80cd11c7199 100644 --- a/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.stderr +++ b/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.stderr @@ -1,4 +1,4 @@ -error: this type has been superceded by `LazyLock` in the standard library +error: this type has been superseded by `LazyLock` in the standard library --> tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs:11:18 | LL | static LAZY_FOO: Lazy = Lazy::new(|| "foo".to_uppercase()); @@ -12,7 +12,7 @@ LL - static LAZY_FOO: Lazy = Lazy::new(|| "foo".to_uppercase()); LL + static LAZY_FOO: std::sync::LazyLock = std::sync::LazyLock::new(|| "foo".to_uppercase()); | -error: this type has been superceded by `LazyLock` in the standard library +error: this type has been superseded by `LazyLock` in the standard library --> tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs:13:18 | LL | static LAZY_BAR: Lazy = Lazy::new(|| { @@ -24,7 +24,7 @@ LL - static LAZY_BAR: Lazy = Lazy::new(|| { LL + static LAZY_BAR: std::sync::LazyLock = std::sync::LazyLock::new(|| { | -error: this type has been superceded by `LazyLock` in the standard library +error: this type has been superseded by `LazyLock` in the standard library --> tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs:18:18 | LL | static LAZY_BAZ: Lazy = { Lazy::new(|| "baz".to_uppercase()) }; @@ -36,7 +36,7 @@ LL - static LAZY_BAZ: Lazy = { Lazy::new(|| "baz".to_uppercase()) }; LL + static LAZY_BAZ: std::sync::LazyLock = { std::sync::LazyLock::new(|| "baz".to_uppercase()) }; | -error: this type has been superceded by `LazyLock` in the standard library +error: this type has been superseded by `LazyLock` in the standard library --> tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs:20:18 | LL | static LAZY_QUX: Lazy = { @@ -54,7 +54,7 @@ LL | } else { LL ~ std::sync::LazyLock::new(|| "qux".to_string()) | -error: this type has been superceded by `LazyLock` in the standard library +error: this type has been superseded by `LazyLock` in the standard library --> tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs:41:22 | LL | static LAZY_FOO: Lazy = Lazy::new(|| "foo".to_uppercase()); @@ -69,7 +69,7 @@ LL | fn calling_replaceable_fns() { LL ~ let _ = std::sync::LazyLock::force(&LAZY_FOO); | -error: this type has been superceded by `LazyLock` in the standard library +error: this type has been superseded by `LazyLock` in the standard library --> tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs:43:22 | LL | static LAZY_BAR: Lazy = Lazy::new(|| "bar".to_uppercase()); @@ -84,7 +84,7 @@ LL | let _ = Lazy::force(&LAZY_FOO); LL ~ let _ = std::sync::LazyLock::force(&LAZY_BAR); | -error: this type has been superceded by `LazyLock` in the standard library +error: this type has been superseded by `LazyLock` in the standard library --> tests/ui/non_std_lazy_static/non_std_lazy_static_fixable.rs:45:26 | LL | static mut LAZY_BAZ: Lazy = Lazy::new(|| "baz".to_uppercase()); diff --git a/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.rs b/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.rs index 34f8dd1ccb2ea..acc8c04678f50 100644 --- a/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.rs +++ b/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.rs @@ -9,11 +9,11 @@ mod once_cell_lazy { use once_cell::sync::Lazy; static LAZY_FOO: Lazy = Lazy::new(|| "foo".to_uppercase()); - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library static mut LAZY_BAR: Lazy = Lazy::new(|| "bar".to_uppercase()); - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library static mut LAZY_BAZ: Lazy = Lazy::new(|| "baz".to_uppercase()); - //~^ ERROR: this type has been superceded by `LazyLock` in the standard library + //~^ ERROR: this type has been superseded by `LazyLock` in the standard library fn calling_irreplaceable_fns() { let _ = Lazy::get(&LAZY_FOO); @@ -31,13 +31,13 @@ mod lazy_static_lazy_static { lazy_static! { static ref LAZY_FOO: String = "foo".to_uppercase(); } - //~^^^ ERROR: this macro has been superceded by `std::sync::LazyLock` + //~^^^ ERROR: this macro has been superseded by `std::sync::LazyLock` lazy_static! { static ref LAZY_BAR: String = "bar".to_uppercase(); static ref LAZY_BAZ: String = "baz".to_uppercase(); } - //~^^^^ ERROR: this macro has been superceded by `std::sync::LazyLock` - //~| ERROR: this macro has been superceded by `std::sync::LazyLock` + //~^^^^ ERROR: this macro has been superseded by `std::sync::LazyLock` + //~| ERROR: this macro has been superseded by `std::sync::LazyLock` } fn main() {} diff --git a/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.stderr b/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.stderr index 216190ae4ca31..2c35cad6237ab 100644 --- a/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.stderr +++ b/src/tools/clippy/tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.stderr @@ -1,4 +1,4 @@ -error: this macro has been superceded by `std::sync::LazyLock` +error: this macro has been superseded by `std::sync::LazyLock` --> tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.rs:31:5 | LL | / lazy_static! { @@ -9,7 +9,7 @@ LL | | } = note: `-D clippy::non-std-lazy-statics` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::non_std_lazy_statics)]` -error: this macro has been superceded by `std::sync::LazyLock` +error: this macro has been superseded by `std::sync::LazyLock` --> tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.rs:35:5 | LL | / lazy_static! { @@ -18,7 +18,7 @@ LL | | static ref LAZY_BAZ: String = "baz".to_uppercase(); LL | | } | |_____^ -error: this macro has been superceded by `std::sync::LazyLock` +error: this macro has been superseded by `std::sync::LazyLock` --> tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.rs:35:5 | LL | / lazy_static! { @@ -29,7 +29,7 @@ LL | | } | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: this type has been superceded by `LazyLock` in the standard library +error: this type has been superseded by `LazyLock` in the standard library --> tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.rs:11:22 | LL | static LAZY_FOO: Lazy = Lazy::new(|| "foo".to_uppercase()); @@ -41,7 +41,7 @@ LL - static LAZY_FOO: Lazy = Lazy::new(|| "foo".to_uppercase()); LL + static LAZY_FOO: std::sync::LazyLock = std::sync::LazyLock::new(|| "foo".to_uppercase()); | -error: this type has been superceded by `LazyLock` in the standard library +error: this type has been superseded by `LazyLock` in the standard library --> tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.rs:13:26 | LL | static mut LAZY_BAR: Lazy = Lazy::new(|| "bar".to_uppercase()); @@ -53,7 +53,7 @@ LL - static mut LAZY_BAR: Lazy = Lazy::new(|| "bar".to_uppercase()); LL + static mut LAZY_BAR: std::sync::LazyLock = std::sync::LazyLock::new(|| "bar".to_uppercase()); | -error: this type has been superceded by `LazyLock` in the standard library +error: this type has been superseded by `LazyLock` in the standard library --> tests/ui/non_std_lazy_static/non_std_lazy_static_unfixable.rs:15:26 | LL | static mut LAZY_BAZ: Lazy = Lazy::new(|| "baz".to_uppercase()); diff --git a/src/tools/clippy/tests/ui/nonminimal_bool.rs b/src/tools/clippy/tests/ui/nonminimal_bool.rs index a155ff3508be0..1eecc3dee3dc5 100644 --- a/src/tools/clippy/tests/ui/nonminimal_bool.rs +++ b/src/tools/clippy/tests/ui/nonminimal_bool.rs @@ -216,3 +216,23 @@ fn issue14184(a: f32, b: bool) { println!("Hi"); } } + +mod issue14404 { + enum TyKind { + Ref(i32, i32, i32), + Other, + } + + struct Expr; + + fn is_mutable(expr: &Expr) -> bool { + todo!() + } + + fn should_not_give_macro(ty: TyKind, expr: Expr) { + if !(matches!(ty, TyKind::Ref(_, _, _)) && !is_mutable(&expr)) { + //~^ nonminimal_bool + todo!() + } + } +} diff --git a/src/tools/clippy/tests/ui/nonminimal_bool.stderr b/src/tools/clippy/tests/ui/nonminimal_bool.stderr index 336cce40abf0d..0e3e4cf7988e2 100644 --- a/src/tools/clippy/tests/ui/nonminimal_bool.stderr +++ b/src/tools/clippy/tests/ui/nonminimal_bool.stderr @@ -227,7 +227,13 @@ error: this boolean expression can be simplified --> tests/ui/nonminimal_bool.rs:214:8 | LL | if !(a < 2.0 && !b) { - | ^^^^^^^^^^^^^^^^ help: try: `!(a < 2.0) || b` + | ^^^^^^^^^^^^^^^^ help: try: `a >= 2.0 || b` -error: aborting due to 30 previous errors +error: this boolean expression can be simplified + --> tests/ui/nonminimal_bool.rs:233:12 + | +LL | if !(matches!(ty, TyKind::Ref(_, _, _)) && !is_mutable(&expr)) { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `!matches!(ty, TyKind::Ref(_, _, _)) || is_mutable(&expr)` + +error: aborting due to 31 previous errors diff --git a/src/tools/clippy/tests/ui/obfuscated_if_else.fixed b/src/tools/clippy/tests/ui/obfuscated_if_else.fixed index 66f5070787b09..70ae090626b96 100644 --- a/src/tools/clippy/tests/ui/obfuscated_if_else.fixed +++ b/src/tools/clippy/tests/ui/obfuscated_if_else.fixed @@ -46,6 +46,18 @@ fn main() { let partial = true.then_some(1); partial.unwrap_or_else(|| n * 2); // not lint + + if true { () } else { Default::default() }; + //~^ obfuscated_if_else + + if true { () } else { Default::default() }; + //~^ obfuscated_if_else + + if true { 1 } else { Default::default() }; + //~^ obfuscated_if_else + + if true { 1 } else { Default::default() }; + //~^ obfuscated_if_else } fn issue11141() { diff --git a/src/tools/clippy/tests/ui/obfuscated_if_else.rs b/src/tools/clippy/tests/ui/obfuscated_if_else.rs index 4efd740eb60bb..8e1f57ca2c026 100644 --- a/src/tools/clippy/tests/ui/obfuscated_if_else.rs +++ b/src/tools/clippy/tests/ui/obfuscated_if_else.rs @@ -46,6 +46,18 @@ fn main() { let partial = true.then_some(1); partial.unwrap_or_else(|| n * 2); // not lint + + true.then_some(()).unwrap_or_default(); + //~^ obfuscated_if_else + + true.then(|| ()).unwrap_or_default(); + //~^ obfuscated_if_else + + true.then_some(1).unwrap_or_default(); + //~^ obfuscated_if_else + + true.then(|| 1).unwrap_or_default(); + //~^ obfuscated_if_else } fn issue11141() { diff --git a/src/tools/clippy/tests/ui/obfuscated_if_else.stderr b/src/tools/clippy/tests/ui/obfuscated_if_else.stderr index d676c25669570..0de7259d8bb82 100644 --- a/src/tools/clippy/tests/ui/obfuscated_if_else.stderr +++ b/src/tools/clippy/tests/ui/obfuscated_if_else.stderr @@ -68,52 +68,76 @@ LL | true.then_some(1).unwrap_or_else(Default::default); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { 1 } else { Default::default() }` error: this method chain can be written more clearly with `if .. else ..` - --> tests/ui/obfuscated_if_else.rs:53:13 + --> tests/ui/obfuscated_if_else.rs:50:5 + | +LL | true.then_some(()).unwrap_or_default(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { () } else { Default::default() }` + +error: this method chain can be written more clearly with `if .. else ..` + --> tests/ui/obfuscated_if_else.rs:53:5 + | +LL | true.then(|| ()).unwrap_or_default(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { () } else { Default::default() }` + +error: this method chain can be written more clearly with `if .. else ..` + --> tests/ui/obfuscated_if_else.rs:56:5 + | +LL | true.then_some(1).unwrap_or_default(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { 1 } else { Default::default() }` + +error: this method chain can be written more clearly with `if .. else ..` + --> tests/ui/obfuscated_if_else.rs:59:5 + | +LL | true.then(|| 1).unwrap_or_default(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { 1 } else { Default::default() }` + +error: this method chain can be written more clearly with `if .. else ..` + --> tests/ui/obfuscated_if_else.rs:65:13 | LL | let _ = true.then_some(40).unwrap_or(17) | 2; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(if true { 40 } else { 17 })` error: this method chain can be written more clearly with `if .. else ..` - --> tests/ui/obfuscated_if_else.rs:57:13 + --> tests/ui/obfuscated_if_else.rs:69:13 | LL | let _ = true.then_some(30).unwrap_or(17) | true.then_some(2).unwrap_or(3) | true.then_some(10).unwrap_or(1); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(if true { 30 } else { 17 })` error: this method chain can be written more clearly with `if .. else ..` - --> tests/ui/obfuscated_if_else.rs:57:48 + --> tests/ui/obfuscated_if_else.rs:69:48 | LL | let _ = true.then_some(30).unwrap_or(17) | true.then_some(2).unwrap_or(3) | true.then_some(10).unwrap_or(1); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { 2 } else { 3 }` error: this method chain can be written more clearly with `if .. else ..` - --> tests/ui/obfuscated_if_else.rs:57:81 + --> tests/ui/obfuscated_if_else.rs:69:81 | LL | let _ = true.then_some(30).unwrap_or(17) | true.then_some(2).unwrap_or(3) | true.then_some(10).unwrap_or(1); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { 10 } else { 1 }` error: this method chain can be written more clearly with `if .. else ..` - --> tests/ui/obfuscated_if_else.rs:63:17 + --> tests/ui/obfuscated_if_else.rs:75:17 | LL | let _ = 2 | true.then_some(40).unwrap_or(17); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { 40 } else { 17 }` error: this method chain can be written more clearly with `if .. else ..` - --> tests/ui/obfuscated_if_else.rs:67:13 + --> tests/ui/obfuscated_if_else.rs:79:13 | LL | let _ = true.then_some(42).unwrap_or(17) as u8; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { 42 } else { 17 }` error: this method chain can be written more clearly with `if .. else ..` - --> tests/ui/obfuscated_if_else.rs:71:14 + --> tests/ui/obfuscated_if_else.rs:83:14 | LL | let _ = *true.then_some(&42).unwrap_or(&17); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { &42 } else { &17 }` error: this method chain can be written more clearly with `if .. else ..` - --> tests/ui/obfuscated_if_else.rs:75:14 + --> tests/ui/obfuscated_if_else.rs:87:14 | LL | let _ = *true.then_some(&42).unwrap_or(&17) as u8; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `if true { &42 } else { &17 }` -error: aborting due to 19 previous errors +error: aborting due to 23 previous errors diff --git a/src/tools/clippy/tests/ui/op_ref.fixed b/src/tools/clippy/tests/ui/op_ref.fixed index 46a59e419cce0..f412190b9fd9e 100644 --- a/src/tools/clippy/tests/ui/op_ref.fixed +++ b/src/tools/clippy/tests/ui/op_ref.fixed @@ -98,3 +98,15 @@ impl Mul for A { self * &rhs } } + +mod issue_2597 { + fn ex1() { + let a: &str = "abc"; + let b: String = "abc".to_owned(); + println!("{}", a > &b); + } + + pub fn ex2(array: &[T], val: &T, idx: usize) -> bool { + &array[idx] < val + } +} diff --git a/src/tools/clippy/tests/ui/op_ref.rs b/src/tools/clippy/tests/ui/op_ref.rs index e10840ff4b97b..a4bbd86c7e95b 100644 --- a/src/tools/clippy/tests/ui/op_ref.rs +++ b/src/tools/clippy/tests/ui/op_ref.rs @@ -98,3 +98,15 @@ impl Mul for A { self * &rhs } } + +mod issue_2597 { + fn ex1() { + let a: &str = "abc"; + let b: String = "abc".to_owned(); + println!("{}", a > &b); + } + + pub fn ex2(array: &[T], val: &T, idx: usize) -> bool { + &array[idx] < val + } +} diff --git a/src/tools/clippy/tests/ui/option_if_let_else.fixed b/src/tools/clippy/tests/ui/option_if_let_else.fixed index ee30988960175..fe3ac9e8f92c3 100644 --- a/src/tools/clippy/tests/ui/option_if_let_else.fixed +++ b/src/tools/clippy/tests/ui/option_if_let_else.fixed @@ -288,3 +288,17 @@ mod issue13964 { }; } } + +mod issue11059 { + use std::fmt::Debug; + + fn box_coercion_unsize(o: Option) -> Box { + if let Some(o) = o { Box::new(o) } else { Box::new("foo") } + } + + static S: String = String::new(); + + fn deref_with_overload(o: Option<&str>) -> &str { + if let Some(o) = o { o } else { &S } + } +} diff --git a/src/tools/clippy/tests/ui/option_if_let_else.rs b/src/tools/clippy/tests/ui/option_if_let_else.rs index 525a5df4371c2..5b7498bc8e23b 100644 --- a/src/tools/clippy/tests/ui/option_if_let_else.rs +++ b/src/tools/clippy/tests/ui/option_if_let_else.rs @@ -351,3 +351,17 @@ mod issue13964 { }; } } + +mod issue11059 { + use std::fmt::Debug; + + fn box_coercion_unsize(o: Option) -> Box { + if let Some(o) = o { Box::new(o) } else { Box::new("foo") } + } + + static S: String = String::new(); + + fn deref_with_overload(o: Option<&str>) -> &str { + if let Some(o) = o { o } else { &S } + } +} diff --git a/src/tools/clippy/tests/ui/or_fun_call.fixed b/src/tools/clippy/tests/ui/or_fun_call.fixed index 1794ac57fe5b1..a1119d75c231b 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.fixed +++ b/src/tools/clippy/tests/ui/or_fun_call.fixed @@ -179,16 +179,20 @@ fn f() -> Option<()> { mod issue6675 { unsafe fn ptr_to_ref<'a, T>(p: *const T) -> &'a T { - #[allow(unused)] - let x = vec![0; 1000]; // future-proofing, make this function expensive. - &*p + unsafe { + #[allow(unused)] + let x = vec![0; 1000]; // future-proofing, make this function expensive. + &*p + } } unsafe fn foo() { - let s = "test".to_owned(); - let s = &s as *const _; - None.unwrap_or_else(|| ptr_to_ref(s)); - //~^ or_fun_call + unsafe { + let s = "test".to_owned(); + let s = &s as *const _; + None.unwrap_or_else(|| ptr_to_ref(s)); + //~^ or_fun_call + } } fn bar() { diff --git a/src/tools/clippy/tests/ui/or_fun_call.rs b/src/tools/clippy/tests/ui/or_fun_call.rs index 256db343c0573..a7cd632bf166f 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.rs +++ b/src/tools/clippy/tests/ui/or_fun_call.rs @@ -179,16 +179,20 @@ fn f() -> Option<()> { mod issue6675 { unsafe fn ptr_to_ref<'a, T>(p: *const T) -> &'a T { - #[allow(unused)] - let x = vec![0; 1000]; // future-proofing, make this function expensive. - &*p + unsafe { + #[allow(unused)] + let x = vec![0; 1000]; // future-proofing, make this function expensive. + &*p + } } unsafe fn foo() { - let s = "test".to_owned(); - let s = &s as *const _; - None.unwrap_or(ptr_to_ref(s)); - //~^ or_fun_call + unsafe { + let s = "test".to_owned(); + let s = &s as *const _; + None.unwrap_or(ptr_to_ref(s)); + //~^ or_fun_call + } } fn bar() { diff --git a/src/tools/clippy/tests/ui/or_fun_call.stderr b/src/tools/clippy/tests/ui/or_fun_call.stderr index 93c87b2f12cde..35bda7e4d3314 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.stderr +++ b/src/tools/clippy/tests/ui/or_fun_call.stderr @@ -125,91 +125,91 @@ LL | let _ = Some("a".to_string()).or(Some("b".to_string())); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_else(|| Some("b".to_string()))` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:190:14 + --> tests/ui/or_fun_call.rs:193:18 | -LL | None.unwrap_or(ptr_to_ref(s)); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| ptr_to_ref(s))` +LL | None.unwrap_or(ptr_to_ref(s)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| ptr_to_ref(s))` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:197:14 + --> tests/ui/or_fun_call.rs:201:14 | LL | None.unwrap_or(unsafe { ptr_to_ref(s) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:200:14 + --> tests/ui/or_fun_call.rs:204:14 | LL | None.unwrap_or( unsafe { ptr_to_ref(s) } ); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })` error: function call inside of `map_or` - --> tests/ui/or_fun_call.rs:276:25 + --> tests/ui/or_fun_call.rs:280:25 | LL | let _ = Some(4).map_or(g(), |v| v); | ^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(g, |v| v)` error: function call inside of `map_or` - --> tests/ui/or_fun_call.rs:278:25 + --> tests/ui/or_fun_call.rs:282:25 | LL | let _ = Some(4).map_or(g(), f); | ^^^^^^^^^^^^^^ help: try: `map_or_else(g, f)` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:310:18 + --> tests/ui/or_fun_call.rs:314:18 | LL | with_new.unwrap_or_else(Vec::new); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:314:28 + --> tests/ui/or_fun_call.rs:318:28 | LL | with_default_trait.unwrap_or_else(Default::default); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:318:27 + --> tests/ui/or_fun_call.rs:322:27 | LL | with_default_type.unwrap_or_else(u64::default); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:322:22 + --> tests/ui/or_fun_call.rs:326:22 | LL | real_default.unwrap_or_else(::default); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `or_insert_with` to construct default value - --> tests/ui/or_fun_call.rs:326:23 + --> tests/ui/or_fun_call.rs:330:23 | LL | map.entry(42).or_insert_with(String::new); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()` error: use of `or_insert_with` to construct default value - --> tests/ui/or_fun_call.rs:330:25 + --> tests/ui/or_fun_call.rs:334:25 | LL | btree.entry(42).or_insert_with(String::new); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:334:25 + --> tests/ui/or_fun_call.rs:338:25 | LL | let _ = stringy.unwrap_or_else(String::new); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:376:17 + --> tests/ui/or_fun_call.rs:380:17 | LL | let _ = opt.unwrap_or({ f() }); // suggest `.unwrap_or_else(f)` | ^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(f)` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:381:17 + --> tests/ui/or_fun_call.rs:385:17 | LL | let _ = opt.unwrap_or(f() + 1); // suggest `.unwrap_or_else(|| f() + 1)` | ^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| f() + 1)` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:386:17 + --> tests/ui/or_fun_call.rs:390:17 | LL | let _ = opt.unwrap_or({ | _________________^ @@ -229,19 +229,19 @@ LL ~ }); | error: function call inside of `map_or` - --> tests/ui/or_fun_call.rs:392:17 + --> tests/ui/or_fun_call.rs:396:17 | LL | let _ = opt.map_or(f() + 1, |v| v); // suggest `.map_or_else(|| f() + 1, |v| v)` | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(|| f() + 1, |v| v)` error: use of `unwrap_or` to construct default value - --> tests/ui/or_fun_call.rs:397:17 + --> tests/ui/or_fun_call.rs:401:17 | LL | let _ = opt.unwrap_or({ i32::default() }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:404:21 + --> tests/ui/or_fun_call.rs:408:21 | LL | let _ = opt_foo.unwrap_or(Foo { val: String::default() }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| Foo { val: String::default() })` diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.rs index bdac3764bf167..643d8fedda984 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.rs +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/mutability.rs @@ -1,5 +1,5 @@ -#![allow(clippy::all)] #![warn(clippy::pattern_type_mismatch)] +#![allow(clippy::single_match)] fn main() {} diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.rs index 3c789f570b038..a1c447d258346 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.rs +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.rs @@ -1,4 +1,3 @@ -#![allow(clippy::all)] #![warn(clippy::pattern_type_mismatch)] fn main() {} diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.stderr index 763f688ea8975..b3ae63ec031a4 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.stderr +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_alternatives.stderr @@ -1,5 +1,5 @@ error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_alternatives.rs:15:12 + --> tests/ui/pattern_type_mismatch/pattern_alternatives.rs:14:12 | LL | if let Value::B | Value::A(_) = ref_value {} | ^^^^^^^^^^^^^^^^^^^^^^ @@ -9,7 +9,7 @@ LL | if let Value::B | Value::A(_) = ref_value {} = help: to override `-D warnings` add `#[allow(clippy::pattern_type_mismatch)]` error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_alternatives.rs:18:34 + --> tests/ui/pattern_type_mismatch/pattern_alternatives.rs:17:34 | LL | if let &Value::B | &Value::A(Some(_)) = ref_value {} | ^^^^^^^ @@ -17,7 +17,7 @@ LL | if let &Value::B | &Value::A(Some(_)) = ref_value {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_alternatives.rs:21:32 + --> tests/ui/pattern_type_mismatch/pattern_alternatives.rs:20:32 | LL | if let Value::B | Value::A(Some(_)) = *ref_value {} | ^^^^^^^ diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.rs index 7fc53d591a917..c5e395c4084f2 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.rs +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.rs @@ -1,4 +1,3 @@ -#![allow(clippy::all)] #![warn(clippy::pattern_type_mismatch)] fn main() {} diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.stderr index 70f7bdc389061..e18a88c2bf510 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.stderr +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_structs.stderr @@ -1,5 +1,5 @@ error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_structs.rs:13:9 + --> tests/ui/pattern_type_mismatch/pattern_structs.rs:12:9 | LL | let Struct { .. } = ref_value; | ^^^^^^^^^^^^^ @@ -9,7 +9,7 @@ LL | let Struct { .. } = ref_value; = help: to override `-D warnings` add `#[allow(clippy::pattern_type_mismatch)]` error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_structs.rs:16:33 + --> tests/ui/pattern_type_mismatch/pattern_structs.rs:15:33 | LL | if let &Struct { ref_inner: Some(_) } = ref_value {} | ^^^^^^^ @@ -17,7 +17,7 @@ LL | if let &Struct { ref_inner: Some(_) } = ref_value {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_structs.rs:19:32 + --> tests/ui/pattern_type_mismatch/pattern_structs.rs:18:32 | LL | if let Struct { ref_inner: Some(_) } = *ref_value {} | ^^^^^^^ @@ -25,7 +25,7 @@ LL | if let Struct { ref_inner: Some(_) } = *ref_value {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_structs.rs:37:12 + --> tests/ui/pattern_type_mismatch/pattern_structs.rs:36:12 | LL | if let StructEnum::Var { .. } = ref_value {} | ^^^^^^^^^^^^^^^^^^^^^^ @@ -33,7 +33,7 @@ LL | if let StructEnum::Var { .. } = ref_value {} = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_structs.rs:40:12 + --> tests/ui/pattern_type_mismatch/pattern_structs.rs:39:12 | LL | if let StructEnum::Var { inner_ref: Some(_) } = ref_value {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -41,7 +41,7 @@ LL | if let StructEnum::Var { inner_ref: Some(_) } = ref_value {} = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_structs.rs:43:42 + --> tests/ui/pattern_type_mismatch/pattern_structs.rs:42:42 | LL | if let &StructEnum::Var { inner_ref: Some(_) } = ref_value {} | ^^^^^^^ @@ -49,7 +49,7 @@ LL | if let &StructEnum::Var { inner_ref: Some(_) } = ref_value {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_structs.rs:46:41 + --> tests/ui/pattern_type_mismatch/pattern_structs.rs:45:41 | LL | if let StructEnum::Var { inner_ref: Some(_) } = *ref_value {} | ^^^^^^^ @@ -57,7 +57,7 @@ LL | if let StructEnum::Var { inner_ref: Some(_) } = *ref_value {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_structs.rs:49:12 + --> tests/ui/pattern_type_mismatch/pattern_structs.rs:48:12 | LL | if let StructEnum::Empty = ref_value {} | ^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.rs index ecd95d9ae2b3b..8bec5abc88f17 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.rs +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.rs @@ -1,4 +1,3 @@ -#![allow(clippy::all)] #![warn(clippy::pattern_type_mismatch)] fn main() {} diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.stderr index d47c5d509c3fa..ee307be63c1a5 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.stderr +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/pattern_tuples.stderr @@ -1,5 +1,5 @@ error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:11:9 + --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:10:9 | LL | let TupleStruct(_) = ref_value; | ^^^^^^^^^^^^^^ @@ -9,7 +9,7 @@ LL | let TupleStruct(_) = ref_value; = help: to override `-D warnings` add `#[allow(clippy::pattern_type_mismatch)]` error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:14:25 + --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:13:25 | LL | if let &TupleStruct(Some(_)) = ref_value {} | ^^^^^^^ @@ -17,7 +17,7 @@ LL | if let &TupleStruct(Some(_)) = ref_value {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:17:24 + --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:16:24 | LL | if let TupleStruct(Some(_)) = *ref_value {} | ^^^^^^^ @@ -25,7 +25,7 @@ LL | if let TupleStruct(Some(_)) = *ref_value {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:35:12 + --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:34:12 | LL | if let TupleEnum::Var(_) = ref_value {} | ^^^^^^^^^^^^^^^^^ @@ -33,7 +33,7 @@ LL | if let TupleEnum::Var(_) = ref_value {} = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:38:28 + --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:37:28 | LL | if let &TupleEnum::Var(Some(_)) = ref_value {} | ^^^^^^^ @@ -41,7 +41,7 @@ LL | if let &TupleEnum::Var(Some(_)) = ref_value {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:41:27 + --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:40:27 | LL | if let TupleEnum::Var(Some(_)) = *ref_value {} | ^^^^^^^ @@ -49,7 +49,7 @@ LL | if let TupleEnum::Var(Some(_)) = *ref_value {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:44:12 + --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:43:12 | LL | if let TupleEnum::Empty = ref_value {} | ^^^^^^^^^^^^^^^^ @@ -57,7 +57,7 @@ LL | if let TupleEnum::Empty = ref_value {} = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:60:9 + --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:59:9 | LL | let (_a, _b) = ref_value; | ^^^^^^^^ @@ -65,7 +65,7 @@ LL | let (_a, _b) = ref_value; = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:63:18 + --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:62:18 | LL | if let &(_a, Some(_)) = ref_value {} | ^^^^^^^ @@ -73,7 +73,7 @@ LL | if let &(_a, Some(_)) = ref_value {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:66:17 + --> tests/ui/pattern_type_mismatch/pattern_tuples.rs:65:17 | LL | if let (_a, Some(_)) = *ref_value {} | ^^^^^^^ diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs index 0bbc26a0c27c5..49ea1d3f7a67c 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs @@ -1,5 +1,10 @@ -#![allow(clippy::all)] #![warn(clippy::pattern_type_mismatch)] +#![allow( + clippy::match_ref_pats, + clippy::never_loop, + clippy::redundant_pattern_matching, + clippy::single_match +)] fn main() {} diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr index 3f6b5feb9b07b..cd604d604c12c 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr @@ -1,5 +1,5 @@ error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:11:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:16:9 | LL | Some(_) => (), | ^^^^^^^ @@ -9,7 +9,7 @@ LL | Some(_) => (), = help: to override `-D warnings` add `#[allow(clippy::pattern_type_mismatch)]` error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:31:12 + --> tests/ui/pattern_type_mismatch/syntax.rs:36:12 | LL | if let Some(_) = ref_value {} | ^^^^^^^ @@ -17,7 +17,7 @@ LL | if let Some(_) = ref_value {} = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:43:15 + --> tests/ui/pattern_type_mismatch/syntax.rs:48:15 | LL | while let Some(_) = ref_value { | ^^^^^^^ @@ -25,7 +25,7 @@ LL | while let Some(_) = ref_value { = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:63:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:68:9 | LL | for (_a, _b) in slice.iter() {} | ^^^^^^^^ @@ -33,7 +33,7 @@ LL | for (_a, _b) in slice.iter() {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:74:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:79:9 | LL | let (_n, _m) = ref_value; | ^^^^^^^^ @@ -41,7 +41,7 @@ LL | let (_n, _m) = ref_value; = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:84:12 + --> tests/ui/pattern_type_mismatch/syntax.rs:89:12 | LL | fn foo((_a, _b): &(i32, i32)) {} | ^^^^^^^^ @@ -49,7 +49,7 @@ LL | fn foo((_a, _b): &(i32, i32)) {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:99:10 + --> tests/ui/pattern_type_mismatch/syntax.rs:104:10 | LL | foo(|(_a, _b)| ()); | ^^^^^^^^ @@ -57,7 +57,7 @@ LL | foo(|(_a, _b)| ()); = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:116:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:121:9 | LL | Some(_) => (), | ^^^^^^^ @@ -65,7 +65,7 @@ LL | Some(_) => (), = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:137:17 + --> tests/ui/pattern_type_mismatch/syntax.rs:142:17 | LL | Some(_) => (), | ^^^^^^^ diff --git a/src/tools/clippy/tests/ui/patterns.fixed b/src/tools/clippy/tests/ui/patterns.fixed index bcb8ecfc38d25..a6dd5fd63a9f6 100644 --- a/src/tools/clippy/tests/ui/patterns.fixed +++ b/src/tools/clippy/tests/ui/patterns.fixed @@ -1,6 +1,4 @@ //@aux-build:proc_macros.rs -#![warn(clippy::all)] -#![allow(unused)] #![allow(clippy::uninlined_format_args, clippy::single_match)] #[macro_use] diff --git a/src/tools/clippy/tests/ui/patterns.rs b/src/tools/clippy/tests/ui/patterns.rs index 19639ebd13d60..64bfbdecdac2b 100644 --- a/src/tools/clippy/tests/ui/patterns.rs +++ b/src/tools/clippy/tests/ui/patterns.rs @@ -1,6 +1,4 @@ //@aux-build:proc_macros.rs -#![warn(clippy::all)] -#![allow(unused)] #![allow(clippy::uninlined_format_args, clippy::single_match)] #[macro_use] diff --git a/src/tools/clippy/tests/ui/patterns.stderr b/src/tools/clippy/tests/ui/patterns.stderr index b9950fe181cc7..ff5e1a8de90a4 100644 --- a/src/tools/clippy/tests/ui/patterns.stderr +++ b/src/tools/clippy/tests/ui/patterns.stderr @@ -1,5 +1,5 @@ error: the `y @ _` pattern can be written as just `y` - --> tests/ui/patterns.rs:14:9 + --> tests/ui/patterns.rs:12:9 | LL | y @ _ => (), | ^^^^^ help: try: `y` @@ -8,13 +8,13 @@ LL | y @ _ => (), = help: to override `-D warnings` add `#[allow(clippy::redundant_pattern)]` error: the `x @ _` pattern can be written as just `x` - --> tests/ui/patterns.rs:30:9 + --> tests/ui/patterns.rs:28:9 | LL | ref mut x @ _ => { | ^^^^^^^^^^^^^ help: try: `ref mut x` error: the `x @ _` pattern can be written as just `x` - --> tests/ui/patterns.rs:39:9 + --> tests/ui/patterns.rs:37:9 | LL | ref x @ _ => println!("vec: {:?}", x), | ^^^^^^^^^ help: try: `ref x` diff --git a/src/tools/clippy/tests/ui/pointers_in_nomem_asm_block.rs b/src/tools/clippy/tests/ui/pointers_in_nomem_asm_block.rs index 171716be26024..7f69c61b0289c 100644 --- a/src/tools/clippy/tests/ui/pointers_in_nomem_asm_block.rs +++ b/src/tools/clippy/tests/ui/pointers_in_nomem_asm_block.rs @@ -6,29 +6,37 @@ use core::arch::asm; unsafe fn nomem_bad(p: &i32) { - asm!( - "asdf {p1}, {p2}, {p3}", - p1 = in(reg) p, - //~^ pointers_in_nomem_asm_block + unsafe { + asm!( + "asdf {p1}, {p2}, {p3}", + p1 = in(reg) p, + //~^ pointers_in_nomem_asm_block - p2 = in(reg) p as *const _ as usize, - p3 = in(reg) p, - options(nomem, nostack, preserves_flags) - ); + p2 = in(reg) p as *const _ as usize, + p3 = in(reg) p, + options(nomem, nostack, preserves_flags) + ); + } } unsafe fn nomem_good(p: &i32) { - asm!("asdf {p}", p = in(reg) p, options(readonly, nostack, preserves_flags)); - let p = p as *const i32 as usize; - asm!("asdf {p}", p = in(reg) p, options(nomem, nostack, preserves_flags)); + unsafe { + asm!("asdf {p}", p = in(reg) p, options(readonly, nostack, preserves_flags)); + let p = p as *const i32 as usize; + asm!("asdf {p}", p = in(reg) p, options(nomem, nostack, preserves_flags)); + } } unsafe fn nomem_bad2(p: &mut i32) { - asm!("asdf {p}", p = in(reg) p, options(nomem, nostack, preserves_flags)); - //~^ pointers_in_nomem_asm_block + unsafe { + asm!("asdf {p}", p = in(reg) p, options(nomem, nostack, preserves_flags)); + //~^ pointers_in_nomem_asm_block + } } unsafe fn nomem_fn(p: extern "C" fn()) { - asm!("call {p}", p = in(reg) p, options(nomem)); - //~^ pointers_in_nomem_asm_block + unsafe { + asm!("call {p}", p = in(reg) p, options(nomem)); + //~^ pointers_in_nomem_asm_block + } } diff --git a/src/tools/clippy/tests/ui/pointers_in_nomem_asm_block.stderr b/src/tools/clippy/tests/ui/pointers_in_nomem_asm_block.stderr index ca24e34f63c0f..eabac2444eccc 100644 --- a/src/tools/clippy/tests/ui/pointers_in_nomem_asm_block.stderr +++ b/src/tools/clippy/tests/ui/pointers_in_nomem_asm_block.stderr @@ -1,11 +1,11 @@ error: passing pointers to nomem asm block - --> tests/ui/pointers_in_nomem_asm_block.rs:11:9 + --> tests/ui/pointers_in_nomem_asm_block.rs:12:13 | -LL | p1 = in(reg) p, - | ^^^^^^^^^^^^^^ +LL | p1 = in(reg) p, + | ^^^^^^^^^^^^^^ ... -LL | p3 = in(reg) p, - | ^^^^^^^^^^^^^^ +LL | p3 = in(reg) p, + | ^^^^^^^^^^^^^^ | = note: `nomem` means that no memory write or read happens inside the asm! block = note: if this is intentional and no pointers are read or written to, consider allowing the lint @@ -13,19 +13,19 @@ LL | p3 = in(reg) p, = help: to override `-D warnings` add `#[allow(clippy::pointers_in_nomem_asm_block)]` error: passing pointers to nomem asm block - --> tests/ui/pointers_in_nomem_asm_block.rs:27:22 + --> tests/ui/pointers_in_nomem_asm_block.rs:32:26 | -LL | asm!("asdf {p}", p = in(reg) p, options(nomem, nostack, preserves_flags)); - | ^^^^^^^^^^^^^ +LL | asm!("asdf {p}", p = in(reg) p, options(nomem, nostack, preserves_flags)); + | ^^^^^^^^^^^^^ | = note: `nomem` means that no memory write or read happens inside the asm! block = note: if this is intentional and no pointers are read or written to, consider allowing the lint error: passing pointers to nomem asm block - --> tests/ui/pointers_in_nomem_asm_block.rs:32:22 + --> tests/ui/pointers_in_nomem_asm_block.rs:39:26 | -LL | asm!("call {p}", p = in(reg) p, options(nomem)); - | ^^^^^^^^^^^^^ +LL | asm!("call {p}", p = in(reg) p, options(nomem)); + | ^^^^^^^^^^^^^ | = note: `nomem` means that no memory write or read happens inside the asm! block = note: if this is intentional and no pointers are read or written to, consider allowing the lint diff --git a/src/tools/clippy/tests/ui/ptr_cast_constness.fixed b/src/tools/clippy/tests/ui/ptr_cast_constness.fixed index 6dded72d3e191..79bfae1f7ebb4 100644 --- a/src/tools/clippy/tests/ui/ptr_cast_constness.fixed +++ b/src/tools/clippy/tests/ui/ptr_cast_constness.fixed @@ -12,11 +12,13 @@ extern crate proc_macros; use proc_macros::{external, inline_macros}; unsafe fn ptr_to_ref(p: *const T, om: *mut U) { - let _: &mut T = std::mem::transmute(p.cast_mut()); - //~^ ptr_cast_constness - let _ = &mut *p.cast_mut(); - //~^ ptr_cast_constness - let _: &T = &*(om as *const T); + unsafe { + let _: &mut T = std::mem::transmute(p.cast_mut()); + //~^ ptr_cast_constness + let _ = &mut *p.cast_mut(); + //~^ ptr_cast_constness + let _: &T = &*(om as *const T); + } } #[inline_macros] @@ -98,3 +100,9 @@ fn null_pointers() { let _ = external!(ptr::null::() as *mut u32); let _ = external!(ptr::null::().cast_mut()); } + +fn issue14621() { + let mut local = 4; + let _ = std::ptr::addr_of_mut!(local).cast_const(); + //~^ ptr_cast_constness +} diff --git a/src/tools/clippy/tests/ui/ptr_cast_constness.rs b/src/tools/clippy/tests/ui/ptr_cast_constness.rs index e9629f5290ec1..f6590dabd5b84 100644 --- a/src/tools/clippy/tests/ui/ptr_cast_constness.rs +++ b/src/tools/clippy/tests/ui/ptr_cast_constness.rs @@ -12,11 +12,13 @@ extern crate proc_macros; use proc_macros::{external, inline_macros}; unsafe fn ptr_to_ref(p: *const T, om: *mut U) { - let _: &mut T = std::mem::transmute(p as *mut T); - //~^ ptr_cast_constness - let _ = &mut *(p as *mut T); - //~^ ptr_cast_constness - let _: &T = &*(om as *const T); + unsafe { + let _: &mut T = std::mem::transmute(p as *mut T); + //~^ ptr_cast_constness + let _ = &mut *(p as *mut T); + //~^ ptr_cast_constness + let _: &T = &*(om as *const T); + } } #[inline_macros] @@ -98,3 +100,9 @@ fn null_pointers() { let _ = external!(ptr::null::() as *mut u32); let _ = external!(ptr::null::().cast_mut()); } + +fn issue14621() { + let mut local = 4; + let _ = std::ptr::addr_of_mut!(local) as *const _; + //~^ ptr_cast_constness +} diff --git a/src/tools/clippy/tests/ui/ptr_cast_constness.stderr b/src/tools/clippy/tests/ui/ptr_cast_constness.stderr index 1eeeef7470138..0b1644168ff51 100644 --- a/src/tools/clippy/tests/ui/ptr_cast_constness.stderr +++ b/src/tools/clippy/tests/ui/ptr_cast_constness.stderr @@ -1,74 +1,74 @@ error: `as` casting between raw pointers while changing only its constness - --> tests/ui/ptr_cast_constness.rs:15:41 + --> tests/ui/ptr_cast_constness.rs:16:45 | -LL | let _: &mut T = std::mem::transmute(p as *mut T); - | ^^^^^^^^^^^ help: try `pointer::cast_mut`, a safer alternative: `p.cast_mut()` +LL | let _: &mut T = std::mem::transmute(p as *mut T); + | ^^^^^^^^^^^ help: try `pointer::cast_mut`, a safer alternative: `p.cast_mut()` | = note: `-D clippy::ptr-cast-constness` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::ptr_cast_constness)]` error: `as` casting between raw pointers while changing only its constness - --> tests/ui/ptr_cast_constness.rs:17:19 + --> tests/ui/ptr_cast_constness.rs:18:23 | -LL | let _ = &mut *(p as *mut T); - | ^^^^^^^^^^^^^ help: try `pointer::cast_mut`, a safer alternative: `p.cast_mut()` +LL | let _ = &mut *(p as *mut T); + | ^^^^^^^^^^^^^ help: try `pointer::cast_mut`, a safer alternative: `p.cast_mut()` error: `as` casting between raw pointers while changing only its constness - --> tests/ui/ptr_cast_constness.rs:33:17 + --> tests/ui/ptr_cast_constness.rs:35:17 | LL | let _ = *ptr_ptr as *mut u32; | ^^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast_mut`, a safer alternative: `(*ptr_ptr).cast_mut()` error: `as` casting between raw pointers while changing only its constness - --> tests/ui/ptr_cast_constness.rs:37:13 + --> tests/ui/ptr_cast_constness.rs:39:13 | LL | let _ = ptr as *mut u32; | ^^^^^^^^^^^^^^^ help: try `pointer::cast_mut`, a safer alternative: `ptr.cast_mut()` error: `as` casting between raw pointers while changing only its constness - --> tests/ui/ptr_cast_constness.rs:39:13 + --> tests/ui/ptr_cast_constness.rs:41:13 | LL | let _ = mut_ptr as *const u32; | ^^^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast_const`, a safer alternative: `mut_ptr.cast_const()` error: `as` casting between raw pointers while changing only its constness - --> tests/ui/ptr_cast_constness.rs:73:13 + --> tests/ui/ptr_cast_constness.rs:75:13 | LL | let _ = ptr as *mut u32; | ^^^^^^^^^^^^^^^ help: try `pointer::cast_mut`, a safer alternative: `ptr.cast_mut()` error: `as` casting between raw pointers while changing only its constness - --> tests/ui/ptr_cast_constness.rs:75:13 + --> tests/ui/ptr_cast_constness.rs:77:13 | LL | let _ = mut_ptr as *const u32; | ^^^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast_const`, a safer alternative: `mut_ptr.cast_const()` error: `as` casting to make a const null pointer into a mutable null pointer - --> tests/ui/ptr_cast_constness.rs:82:13 + --> tests/ui/ptr_cast_constness.rs:84:13 | LL | let _ = ptr::null::() as *mut String; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `null_mut()` directly instead: `std::ptr::null_mut::()` error: `as` casting to make a mutable null pointer into a const null pointer - --> tests/ui/ptr_cast_constness.rs:84:13 + --> tests/ui/ptr_cast_constness.rs:86:13 | LL | let _ = ptr::null_mut::() as *const u32; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `null()` directly instead: `std::ptr::null::()` error: changing constness of a null pointer - --> tests/ui/ptr_cast_constness.rs:86:13 + --> tests/ui/ptr_cast_constness.rs:88:13 | LL | let _ = ptr::null::().cast_mut(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `null_mut()` directly instead: `std::ptr::null_mut::()` error: changing constness of a null pointer - --> tests/ui/ptr_cast_constness.rs:88:13 + --> tests/ui/ptr_cast_constness.rs:90:13 | LL | let _ = ptr::null_mut::().cast_const(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `null()` directly instead: `std::ptr::null::()` error: `as` casting to make a const null pointer into a mutable null pointer - --> tests/ui/ptr_cast_constness.rs:92:21 + --> tests/ui/ptr_cast_constness.rs:94:21 | LL | let _ = inline!(ptr::null::() as *mut u32); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `null_mut()` directly instead: `std::ptr::null_mut::()` @@ -76,12 +76,18 @@ LL | let _ = inline!(ptr::null::() as *mut u32); = note: this error originates in the macro `__inline_mac_fn_null_pointers` (in Nightly builds, run with -Z macro-backtrace for more info) error: changing constness of a null pointer - --> tests/ui/ptr_cast_constness.rs:94:21 + --> tests/ui/ptr_cast_constness.rs:96:21 | LL | let _ = inline!(ptr::null::().cast_mut()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `null_mut()` directly instead: `std::ptr::null_mut::()` | = note: this error originates in the macro `__inline_mac_fn_null_pointers` (in Nightly builds, run with -Z macro-backtrace for more info) -error: aborting due to 13 previous errors +error: `as` casting between raw pointers while changing only its constness + --> tests/ui/ptr_cast_constness.rs:106:13 + | +LL | let _ = std::ptr::addr_of_mut!(local) as *const _; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try `pointer::cast_const`, a safer alternative: `std::ptr::addr_of_mut!(local).cast_const()` + +error: aborting due to 14 previous errors diff --git a/src/tools/clippy/tests/ui/ptr_eq.fixed b/src/tools/clippy/tests/ui/ptr_eq.fixed index df6305ed497e8..9629b3eea5870 100644 --- a/src/tools/clippy/tests/ui/ptr_eq.fixed +++ b/src/tools/clippy/tests/ui/ptr_eq.fixed @@ -4,6 +4,9 @@ macro_rules! mac { ($a:expr, $b:expr) => { $a as *const _ as usize == $b as *const _ as usize }; + (cast $a:expr) => { + $a as *const [i32; 3] + }; } macro_rules! another_mac { @@ -20,23 +23,25 @@ fn main() { //~^ ptr_eq let _ = std::ptr::eq(a, b); //~^ ptr_eq - let _ = std::ptr::eq(a.as_ptr(), b as *const _); - //~^ ptr_eq - let _ = std::ptr::eq(a.as_ptr(), b.as_ptr()); - //~^ ptr_eq - // Do not lint + // Do not lint: the rhs conversion is needed + let _ = a.as_ptr() == b as *const _; + + // Do not lint: we have two raw pointers already + let _ = a.as_ptr() == b.as_ptr(); + // Do not lint let _ = mac!(a, b); let _ = another_mac!(a, b); let a = &mut [1, 2, 3]; let b = &mut [1, 2, 3]; - let _ = std::ptr::eq(a.as_mut_ptr(), b as *mut [i32] as *mut _); - //~^ ptr_eq - let _ = std::ptr::eq(a.as_mut_ptr(), b.as_mut_ptr()); - //~^ ptr_eq + // Do not lint: the rhs conversion is needed + let _ = a.as_mut_ptr() == b as *mut [i32] as *mut _; + + // Do not lint: we have two raw pointers already + let _ = a.as_mut_ptr() == b.as_mut_ptr(); let _ = a == b; let _ = core::ptr::eq(a, b); @@ -48,7 +53,15 @@ fn main() { let _ = !std::ptr::eq(x, y); //~^ ptr_eq - #[allow(clippy::eq_op)] - let _issue14337 = std::ptr::eq(main as *const (), main as *const ()); + #[expect(clippy::eq_op)] + // Do not lint: casts are needed to not change type + let _issue14337 = main as *const () == main as *const (); + + // Do not peel the content of macros + let _ = std::ptr::eq(mac!(cast a), mac!(cast b)); + //~^ ptr_eq + + // Do not peel the content of macros + let _ = std::ptr::eq(mac!(cast a), mac!(cast b)); //~^ ptr_eq } diff --git a/src/tools/clippy/tests/ui/ptr_eq.rs b/src/tools/clippy/tests/ui/ptr_eq.rs index 0ed0ff0d13716..2b741d8df4684 100644 --- a/src/tools/clippy/tests/ui/ptr_eq.rs +++ b/src/tools/clippy/tests/ui/ptr_eq.rs @@ -4,6 +4,9 @@ macro_rules! mac { ($a:expr, $b:expr) => { $a as *const _ as usize == $b as *const _ as usize }; + (cast $a:expr) => { + $a as *const [i32; 3] + }; } macro_rules! another_mac { @@ -20,23 +23,25 @@ fn main() { //~^ ptr_eq let _ = a as *const _ == b as *const _; //~^ ptr_eq + + // Do not lint: the rhs conversion is needed let _ = a.as_ptr() == b as *const _; - //~^ ptr_eq + + // Do not lint: we have two raw pointers already let _ = a.as_ptr() == b.as_ptr(); - //~^ ptr_eq // Do not lint - let _ = mac!(a, b); let _ = another_mac!(a, b); let a = &mut [1, 2, 3]; let b = &mut [1, 2, 3]; + // Do not lint: the rhs conversion is needed let _ = a.as_mut_ptr() == b as *mut [i32] as *mut _; - //~^ ptr_eq + + // Do not lint: we have two raw pointers already let _ = a.as_mut_ptr() == b.as_mut_ptr(); - //~^ ptr_eq let _ = a == b; let _ = core::ptr::eq(a, b); @@ -48,7 +53,15 @@ fn main() { let _ = x as *const u32 != y as *mut u32 as *const u32; //~^ ptr_eq - #[allow(clippy::eq_op)] + #[expect(clippy::eq_op)] + // Do not lint: casts are needed to not change type let _issue14337 = main as *const () == main as *const (); + + // Do not peel the content of macros + let _ = mac!(cast a) as *const _ == mac!(cast b) as *const _; + //~^ ptr_eq + + // Do not peel the content of macros + let _ = mac!(cast a) as *const _ == mac!(cast b) as *const _; //~^ ptr_eq } diff --git a/src/tools/clippy/tests/ui/ptr_eq.stderr b/src/tools/clippy/tests/ui/ptr_eq.stderr index 33190df284a3f..e7340624b5950 100644 --- a/src/tools/clippy/tests/ui/ptr_eq.stderr +++ b/src/tools/clippy/tests/ui/ptr_eq.stderr @@ -1,5 +1,5 @@ error: use `std::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq.rs:19:13 + --> tests/ui/ptr_eq.rs:22:13 | LL | let _ = a as *const _ as usize == b as *const _ as usize; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(a, b)` @@ -8,52 +8,34 @@ LL | let _ = a as *const _ as usize == b as *const _ as usize; = help: to override `-D warnings` add `#[allow(clippy::ptr_eq)]` error: use `std::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq.rs:21:13 + --> tests/ui/ptr_eq.rs:24:13 | LL | let _ = a as *const _ == b as *const _; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(a, b)` error: use `std::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq.rs:23:13 - | -LL | let _ = a.as_ptr() == b as *const _; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(a.as_ptr(), b as *const _)` - -error: use `std::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq.rs:25:13 - | -LL | let _ = a.as_ptr() == b.as_ptr(); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(a.as_ptr(), b.as_ptr())` - -error: use `std::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq.rs:36:13 - | -LL | let _ = a.as_mut_ptr() == b as *mut [i32] as *mut _; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(a.as_mut_ptr(), b as *mut [i32] as *mut _)` - -error: use `std::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq.rs:38:13 - | -LL | let _ = a.as_mut_ptr() == b.as_mut_ptr(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(a.as_mut_ptr(), b.as_mut_ptr())` - -error: use `std::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq.rs:45:13 + --> tests/ui/ptr_eq.rs:50:13 | LL | let _ = x as *const u32 == y as *mut u32 as *const u32; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(x, y)` error: use `std::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq.rs:48:13 + --> tests/ui/ptr_eq.rs:53:13 | LL | let _ = x as *const u32 != y as *mut u32 as *const u32; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `!std::ptr::eq(x, y)` error: use `std::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq.rs:52:23 + --> tests/ui/ptr_eq.rs:61:13 + | +LL | let _ = mac!(cast a) as *const _ == mac!(cast b) as *const _; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(mac!(cast a), mac!(cast b))` + +error: use `std::ptr::eq` when comparing raw pointers + --> tests/ui/ptr_eq.rs:65:13 | -LL | let _issue14337 = main as *const () == main as *const (); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(main as *const (), main as *const ())` +LL | let _ = mac!(cast a) as *const _ == mac!(cast b) as *const _; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::eq(mac!(cast a), mac!(cast b))` -error: aborting due to 9 previous errors +error: aborting due to 6 previous errors diff --git a/src/tools/clippy/tests/ui/ptr_eq_no_std.fixed b/src/tools/clippy/tests/ui/ptr_eq_no_std.fixed index d8ee4ea88f843..48cbad62e1a36 100644 --- a/src/tools/clippy/tests/ui/ptr_eq_no_std.fixed +++ b/src/tools/clippy/tests/ui/ptr_eq_no_std.fixed @@ -32,23 +32,25 @@ fn main() { //~^ ptr_eq let _ = core::ptr::eq(a, b); //~^ ptr_eq - let _ = core::ptr::eq(a.as_ptr(), b as *const _); - //~^ ptr_eq - let _ = core::ptr::eq(a.as_ptr(), b.as_ptr()); - //~^ ptr_eq - // Do not lint + // Do not lint: the rhs conversion is needed + let _ = a.as_ptr() == b as *const _; + + // Do not lint: we have two raw pointers already + let _ = a.as_ptr() == b.as_ptr(); + // Do not lint let _ = mac!(a, b); let _ = another_mac!(a, b); let a = &mut [1, 2, 3]; let b = &mut [1, 2, 3]; - let _ = core::ptr::eq(a.as_mut_ptr(), b as *mut [i32] as *mut _); - //~^ ptr_eq - let _ = core::ptr::eq(a.as_mut_ptr(), b.as_mut_ptr()); - //~^ ptr_eq + // Do not lint: the rhs conversion is needed + let _ = a.as_mut_ptr() == b as *mut [i32] as *mut _; + + // Do not lint: we have two raw pointers already + let _ = a.as_mut_ptr() == b.as_mut_ptr(); let _ = a == b; let _ = core::ptr::eq(a, b); diff --git a/src/tools/clippy/tests/ui/ptr_eq_no_std.rs b/src/tools/clippy/tests/ui/ptr_eq_no_std.rs index a236314c29b77..3827178640eea 100644 --- a/src/tools/clippy/tests/ui/ptr_eq_no_std.rs +++ b/src/tools/clippy/tests/ui/ptr_eq_no_std.rs @@ -32,23 +32,25 @@ fn main() { //~^ ptr_eq let _ = a as *const _ == b as *const _; //~^ ptr_eq + + // Do not lint: the rhs conversion is needed let _ = a.as_ptr() == b as *const _; - //~^ ptr_eq + + // Do not lint: we have two raw pointers already let _ = a.as_ptr() == b.as_ptr(); - //~^ ptr_eq // Do not lint - let _ = mac!(a, b); let _ = another_mac!(a, b); let a = &mut [1, 2, 3]; let b = &mut [1, 2, 3]; + // Do not lint: the rhs conversion is needed let _ = a.as_mut_ptr() == b as *mut [i32] as *mut _; - //~^ ptr_eq + + // Do not lint: we have two raw pointers already let _ = a.as_mut_ptr() == b.as_mut_ptr(); - //~^ ptr_eq let _ = a == b; let _ = core::ptr::eq(a, b); diff --git a/src/tools/clippy/tests/ui/ptr_eq_no_std.stderr b/src/tools/clippy/tests/ui/ptr_eq_no_std.stderr index 5b8135dc8e8bc..8c7b1ff76661f 100644 --- a/src/tools/clippy/tests/ui/ptr_eq_no_std.stderr +++ b/src/tools/clippy/tests/ui/ptr_eq_no_std.stderr @@ -13,29 +13,5 @@ error: use `core::ptr::eq` when comparing raw pointers LL | let _ = a as *const _ == b as *const _; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `core::ptr::eq(a, b)` -error: use `core::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq_no_std.rs:35:13 - | -LL | let _ = a.as_ptr() == b as *const _; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `core::ptr::eq(a.as_ptr(), b as *const _)` - -error: use `core::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq_no_std.rs:37:13 - | -LL | let _ = a.as_ptr() == b.as_ptr(); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `core::ptr::eq(a.as_ptr(), b.as_ptr())` - -error: use `core::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq_no_std.rs:48:13 - | -LL | let _ = a.as_mut_ptr() == b as *mut [i32] as *mut _; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `core::ptr::eq(a.as_mut_ptr(), b as *mut [i32] as *mut _)` - -error: use `core::ptr::eq` when comparing raw pointers - --> tests/ui/ptr_eq_no_std.rs:50:13 - | -LL | let _ = a.as_mut_ptr() == b.as_mut_ptr(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `core::ptr::eq(a.as_mut_ptr(), b.as_mut_ptr())` - -error: aborting due to 6 previous errors +error: aborting due to 2 previous errors diff --git a/src/tools/clippy/tests/ui/question_mark.fixed b/src/tools/clippy/tests/ui/question_mark.fixed index fff41f578284d..507bc2b29d862 100644 --- a/src/tools/clippy/tests/ui/question_mark.fixed +++ b/src/tools/clippy/tests/ui/question_mark.fixed @@ -3,6 +3,8 @@ #![allow(dead_code)] #![allow(clippy::unnecessary_wraps)] +use std::sync::MutexGuard; + fn some_func(a: Option) -> Option { a?; @@ -299,6 +301,11 @@ fn pattern() -> Result<(), PatternedError> { res } +fn expect_expr(a: Option) -> Option { + #[expect(clippy::needless_question_mark)] + Some(a?) +} + fn main() {} // `?` is not the same as `return None;` if inside of a try block @@ -430,3 +437,9 @@ fn msrv_1_13(arg: Option) -> Option { println!("{}", val); Some(val) } + +fn issue_14615(a: MutexGuard>) -> Option { + let a = (*a)?; + //~^^^ question_mark + Some(format!("{a}")) +} diff --git a/src/tools/clippy/tests/ui/question_mark.rs b/src/tools/clippy/tests/ui/question_mark.rs index c71c8ee984edd..64b51b849ede0 100644 --- a/src/tools/clippy/tests/ui/question_mark.rs +++ b/src/tools/clippy/tests/ui/question_mark.rs @@ -3,6 +3,8 @@ #![allow(dead_code)] #![allow(clippy::unnecessary_wraps)] +use std::sync::MutexGuard; + fn some_func(a: Option) -> Option { if a.is_none() { //~^ question_mark @@ -369,6 +371,11 @@ fn pattern() -> Result<(), PatternedError> { res } +fn expect_expr(a: Option) -> Option { + #[expect(clippy::needless_question_mark)] + Some(a?) +} + fn main() {} // `?` is not the same as `return None;` if inside of a try block @@ -524,3 +531,11 @@ fn msrv_1_13(arg: Option) -> Option { println!("{}", val); Some(val) } + +fn issue_14615(a: MutexGuard>) -> Option { + let Some(a) = *a else { + return None; + }; + //~^^^ question_mark + Some(format!("{a}")) +} diff --git a/src/tools/clippy/tests/ui/question_mark.stderr b/src/tools/clippy/tests/ui/question_mark.stderr index 183b8866a7481..d8ce4420aeeb6 100644 --- a/src/tools/clippy/tests/ui/question_mark.stderr +++ b/src/tools/clippy/tests/ui/question_mark.stderr @@ -1,5 +1,5 @@ error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:7:5 + --> tests/ui/question_mark.rs:9:5 | LL | / if a.is_none() { LL | | @@ -11,7 +11,7 @@ LL | | } = help: to override `-D warnings` add `#[allow(clippy::question_mark)]` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:53:9 + --> tests/ui/question_mark.rs:55:9 | LL | / if (self.opt).is_none() { LL | | @@ -20,7 +20,7 @@ LL | | } | |_________^ help: replace it with: `(self.opt)?;` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:58:9 + --> tests/ui/question_mark.rs:60:9 | LL | / if self.opt.is_none() { LL | | @@ -29,7 +29,7 @@ LL | | } | |_________^ help: replace it with: `self.opt?;` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:63:17 + --> tests/ui/question_mark.rs:65:17 | LL | let _ = if self.opt.is_none() { | _________________^ @@ -41,7 +41,7 @@ LL | | }; | |_________^ help: replace it with: `Some(self.opt?)` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:70:17 + --> tests/ui/question_mark.rs:72:17 | LL | let _ = if let Some(x) = self.opt { | _________________^ @@ -53,7 +53,7 @@ LL | | }; | |_________^ help: replace it with: `self.opt?` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:88:9 + --> tests/ui/question_mark.rs:90:9 | LL | / if self.opt.is_none() { LL | | @@ -62,7 +62,7 @@ LL | | } | |_________^ help: replace it with: `self.opt.as_ref()?;` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:97:9 + --> tests/ui/question_mark.rs:99:9 | LL | / if self.opt.is_none() { LL | | @@ -71,7 +71,7 @@ LL | | } | |_________^ help: replace it with: `self.opt.as_ref()?;` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:106:9 + --> tests/ui/question_mark.rs:108:9 | LL | / if self.opt.is_none() { LL | | @@ -80,7 +80,7 @@ LL | | } | |_________^ help: replace it with: `self.opt.as_ref()?;` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:114:26 + --> tests/ui/question_mark.rs:116:26 | LL | let v: &Vec<_> = if let Some(ref v) = self.opt { | __________________________^ @@ -92,7 +92,7 @@ LL | | }; | |_________^ help: replace it with: `self.opt.as_ref()?` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:125:17 + --> tests/ui/question_mark.rs:127:17 | LL | let v = if let Some(v) = self.opt { | _________________^ @@ -104,7 +104,7 @@ LL | | }; | |_________^ help: replace it with: `self.opt?` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:147:5 + --> tests/ui/question_mark.rs:149:5 | LL | / if f().is_none() { LL | | @@ -113,7 +113,7 @@ LL | | } | |_____^ help: replace it with: `f()?;` error: this `match` expression can be replaced with `?` - --> tests/ui/question_mark.rs:152:16 + --> tests/ui/question_mark.rs:154:16 | LL | let _val = match f() { | ________________^ @@ -124,7 +124,7 @@ LL | | }; | |_____^ help: try instead: `f()?` error: this `match` expression can be replaced with `?` - --> tests/ui/question_mark.rs:163:5 + --> tests/ui/question_mark.rs:165:5 | LL | / match f() { LL | | @@ -134,7 +134,7 @@ LL | | }; | |_____^ help: try instead: `f()?` error: this `match` expression can be replaced with `?` - --> tests/ui/question_mark.rs:169:5 + --> tests/ui/question_mark.rs:171:5 | LL | / match opt_none!() { LL | | @@ -144,13 +144,13 @@ LL | | }; | |_____^ help: try instead: `opt_none!()?` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:196:13 + --> tests/ui/question_mark.rs:198:13 | LL | let _ = if let Ok(x) = x { x } else { return x }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `x?` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:199:5 + --> tests/ui/question_mark.rs:201:5 | LL | / if x.is_err() { LL | | @@ -159,7 +159,7 @@ LL | | } | |_____^ help: replace it with: `x?;` error: this `match` expression can be replaced with `?` - --> tests/ui/question_mark.rs:204:16 + --> tests/ui/question_mark.rs:206:16 | LL | let _val = match func_returning_result() { | ________________^ @@ -170,7 +170,7 @@ LL | | }; | |_____^ help: try instead: `func_returning_result()?` error: this `match` expression can be replaced with `?` - --> tests/ui/question_mark.rs:210:5 + --> tests/ui/question_mark.rs:212:5 | LL | / match func_returning_result() { LL | | @@ -180,7 +180,7 @@ LL | | }; | |_____^ help: try instead: `func_returning_result()?` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:302:5 + --> tests/ui/question_mark.rs:304:5 | LL | / if let Err(err) = func_returning_result() { LL | | @@ -189,7 +189,7 @@ LL | | } | |_____^ help: replace it with: `func_returning_result()?;` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:310:5 + --> tests/ui/question_mark.rs:312:5 | LL | / if let Err(err) = func_returning_result() { LL | | @@ -198,7 +198,7 @@ LL | | } | |_____^ help: replace it with: `func_returning_result()?;` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:388:13 + --> tests/ui/question_mark.rs:395:13 | LL | / if a.is_none() { LL | | @@ -208,7 +208,7 @@ LL | | } | |_____________^ help: replace it with: `a?;` error: this `let...else` may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:449:5 + --> tests/ui/question_mark.rs:456:5 | LL | / let Some(v) = bar.foo.owned.clone() else { LL | | return None; @@ -216,7 +216,7 @@ LL | | }; | |______^ help: replace it with: `let v = bar.foo.owned.clone()?;` error: this `let...else` may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:464:5 + --> tests/ui/question_mark.rs:471:5 | LL | / let Some(ref x) = foo.opt_x else { LL | | return None; @@ -224,7 +224,7 @@ LL | | }; | |______^ help: replace it with: `let x = foo.opt_x.as_ref()?;` error: this `let...else` may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:474:5 + --> tests/ui/question_mark.rs:481:5 | LL | / let Some(ref mut x) = foo.opt_x else { LL | | return None; @@ -232,7 +232,7 @@ LL | | }; | |______^ help: replace it with: `let x = foo.opt_x.as_mut()?;` error: this `let...else` may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:485:5 + --> tests/ui/question_mark.rs:492:5 | LL | / let Some(ref x @ ref y) = foo.opt_x else { LL | | return None; @@ -240,7 +240,7 @@ LL | | }; | |______^ help: replace it with: `let x @ y = foo.opt_x.as_ref()?;` error: this `let...else` may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:489:5 + --> tests/ui/question_mark.rs:496:5 | LL | / let Some(ref x @ WrapperStructWithString(_)) = bar else { LL | | return None; @@ -248,7 +248,7 @@ LL | | }; | |______^ help: replace it with: `let x @ &WrapperStructWithString(_) = bar.as_ref()?;` error: this `let...else` may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:493:5 + --> tests/ui/question_mark.rs:500:5 | LL | / let Some(ref mut x @ WrapperStructWithString(_)) = bar else { LL | | return None; @@ -256,7 +256,7 @@ LL | | }; | |______^ help: replace it with: `let x @ &mut WrapperStructWithString(_) = bar.as_mut()?;` error: this block may be rewritten with the `?` operator - --> tests/ui/question_mark.rs:515:5 + --> tests/ui/question_mark.rs:522:5 | LL | / if arg.is_none() { LL | | @@ -265,7 +265,7 @@ LL | | } | |_____^ help: replace it with: `arg?;` error: this `match` expression can be replaced with `?` - --> tests/ui/question_mark.rs:519:15 + --> tests/ui/question_mark.rs:526:15 | LL | let val = match arg { | _______________^ @@ -275,5 +275,13 @@ LL | | None => return None, LL | | }; | |_____^ help: try instead: `arg?` -error: aborting due to 29 previous errors +error: this `let...else` may be rewritten with the `?` operator + --> tests/ui/question_mark.rs:536:5 + | +LL | / let Some(a) = *a else { +LL | | return None; +LL | | }; + | |______^ help: replace it with: `let a = (*a)?;` + +error: aborting due to 30 previous errors diff --git a/src/tools/clippy/tests/ui/question_mark_used.stderr b/src/tools/clippy/tests/ui/question_mark_used.stderr index 53cb59c021667..82f0d32504077 100644 --- a/src/tools/clippy/tests/ui/question_mark_used.stderr +++ b/src/tools/clippy/tests/ui/question_mark_used.stderr @@ -1,4 +1,4 @@ -error: question mark operator was used +error: the `?` operator was used --> tests/ui/question_mark_used.rs:11:5 | LL | other_function()?; diff --git a/src/tools/clippy/tests/ui/redundant_allocation.rs b/src/tools/clippy/tests/ui/redundant_allocation.rs index 0562f7dcc7614..832f147c6ed53 100644 --- a/src/tools/clippy/tests/ui/redundant_allocation.rs +++ b/src/tools/clippy/tests/ui/redundant_allocation.rs @@ -1,4 +1,3 @@ -#![warn(clippy::all)] #![allow(clippy::boxed_local, clippy::disallowed_names)] pub struct MyStruct; diff --git a/src/tools/clippy/tests/ui/redundant_allocation.stderr b/src/tools/clippy/tests/ui/redundant_allocation.stderr index 44d30f95d7bc3..886ed2088c67b 100644 --- a/src/tools/clippy/tests/ui/redundant_allocation.stderr +++ b/src/tools/clippy/tests/ui/redundant_allocation.stderr @@ -1,5 +1,5 @@ error: usage of `Box>` - --> tests/ui/redundant_allocation.rs:16:30 + --> tests/ui/redundant_allocation.rs:15:30 | LL | pub fn box_test6(foo: Box>) {} | ^^^^^^^^^^ @@ -10,7 +10,7 @@ LL | pub fn box_test6(foo: Box>) {} = help: to override `-D warnings` add `#[allow(clippy::redundant_allocation)]` error: usage of `Box>` - --> tests/ui/redundant_allocation.rs:19:30 + --> tests/ui/redundant_allocation.rs:18:30 | LL | pub fn box_test7(foo: Box>) {} | ^^^^^^^^^^^ @@ -19,7 +19,7 @@ LL | pub fn box_test7(foo: Box>) {} = help: consider using just `Box` or `Arc` error: usage of `Box>>` - --> tests/ui/redundant_allocation.rs:22:27 + --> tests/ui/redundant_allocation.rs:21:27 | LL | pub fn box_test8() -> Box>> { | ^^^^^^^^^^^^^^^^^^^^ @@ -28,7 +28,7 @@ LL | pub fn box_test8() -> Box>> { = help: consider using just `Box>` or `Rc>` error: usage of `Box>` - --> tests/ui/redundant_allocation.rs:28:30 + --> tests/ui/redundant_allocation.rs:27:30 | LL | pub fn box_test9(foo: Box>) -> Box>> { | ^^^^^^^^^^^ @@ -37,7 +37,7 @@ LL | pub fn box_test9(foo: Box>) -> Box>> { = help: consider using just `Box` or `Arc` error: usage of `Box>>` - --> tests/ui/redundant_allocation.rs:28:46 + --> tests/ui/redundant_allocation.rs:27:46 | LL | pub fn box_test9(foo: Box>) -> Box>> { | ^^^^^^^^^^^^^^^^^ @@ -46,7 +46,7 @@ LL | pub fn box_test9(foo: Box>) -> Box>> { = help: consider using just `Box>` or `Arc>` error: usage of `Rc>` - --> tests/ui/redundant_allocation.rs:42:24 + --> tests/ui/redundant_allocation.rs:41:24 | LL | pub fn rc_test5(a: Rc>) {} | ^^^^^^^^^^^^^ @@ -55,7 +55,7 @@ LL | pub fn rc_test5(a: Rc>) {} = help: consider using just `Rc` or `Box` error: usage of `Rc>` - --> tests/ui/redundant_allocation.rs:45:24 + --> tests/ui/redundant_allocation.rs:44:24 | LL | pub fn rc_test7(a: Rc>) {} | ^^^^^^^^^^^^^ @@ -64,7 +64,7 @@ LL | pub fn rc_test7(a: Rc>) {} = help: consider using just `Rc` or `Arc` error: usage of `Rc>>` - --> tests/ui/redundant_allocation.rs:48:26 + --> tests/ui/redundant_allocation.rs:47:26 | LL | pub fn rc_test8() -> Rc>> { | ^^^^^^^^^^^^^^^^^^^^ @@ -73,7 +73,7 @@ LL | pub fn rc_test8() -> Rc>> { = help: consider using just `Rc>` or `Box>` error: usage of `Rc>` - --> tests/ui/redundant_allocation.rs:54:29 + --> tests/ui/redundant_allocation.rs:53:29 | LL | pub fn rc_test9(foo: Rc>) -> Rc>> { | ^^^^^^^^^^ @@ -82,7 +82,7 @@ LL | pub fn rc_test9(foo: Rc>) -> Rc>> { = help: consider using just `Rc` or `Arc` error: usage of `Rc>>` - --> tests/ui/redundant_allocation.rs:54:44 + --> tests/ui/redundant_allocation.rs:53:44 | LL | pub fn rc_test9(foo: Rc>) -> Rc>> { | ^^^^^^^^^^^^^^^^ @@ -91,7 +91,7 @@ LL | pub fn rc_test9(foo: Rc>) -> Rc>> { = help: consider using just `Rc>` or `Arc>` error: usage of `Arc>` - --> tests/ui/redundant_allocation.rs:68:25 + --> tests/ui/redundant_allocation.rs:67:25 | LL | pub fn arc_test5(a: Arc>) {} | ^^^^^^^^^^^^^^ @@ -100,7 +100,7 @@ LL | pub fn arc_test5(a: Arc>) {} = help: consider using just `Arc` or `Box` error: usage of `Arc>` - --> tests/ui/redundant_allocation.rs:71:25 + --> tests/ui/redundant_allocation.rs:70:25 | LL | pub fn arc_test6(a: Arc>) {} | ^^^^^^^^^^^^^ @@ -109,7 +109,7 @@ LL | pub fn arc_test6(a: Arc>) {} = help: consider using just `Arc` or `Rc` error: usage of `Arc>>` - --> tests/ui/redundant_allocation.rs:74:27 + --> tests/ui/redundant_allocation.rs:73:27 | LL | pub fn arc_test8() -> Arc>> { | ^^^^^^^^^^^^^^^^^^^^^ @@ -118,7 +118,7 @@ LL | pub fn arc_test8() -> Arc>> { = help: consider using just `Arc>` or `Box>` error: usage of `Arc>` - --> tests/ui/redundant_allocation.rs:80:30 + --> tests/ui/redundant_allocation.rs:79:30 | LL | pub fn arc_test9(foo: Arc>) -> Arc>> { | ^^^^^^^^^^ @@ -127,7 +127,7 @@ LL | pub fn arc_test9(foo: Arc>) -> Arc>> { = help: consider using just `Arc` or `Rc` error: usage of `Arc>>` - --> tests/ui/redundant_allocation.rs:80:45 + --> tests/ui/redundant_allocation.rs:79:45 | LL | pub fn arc_test9(foo: Arc>) -> Arc>> { | ^^^^^^^^^^^^^^^^ @@ -136,7 +136,7 @@ LL | pub fn arc_test9(foo: Arc>) -> Arc>> { = help: consider using just `Arc>` or `Rc>` error: usage of `Rc>>` - --> tests/ui/redundant_allocation.rs:105:27 + --> tests/ui/redundant_allocation.rs:104:27 | LL | pub fn test_rc_box(_: Rc>>) {} | ^^^^^^^^^^^^^^^^^^^ @@ -145,7 +145,7 @@ LL | pub fn test_rc_box(_: Rc>>) {} = help: consider using just `Rc>` or `Box>` error: usage of `Rc>>` - --> tests/ui/redundant_allocation.rs:138:31 + --> tests/ui/redundant_allocation.rs:137:31 | LL | pub fn test_rc_box_str(_: Rc>>) {} | ^^^^^^^^^^^^^^^^^ @@ -154,7 +154,7 @@ LL | pub fn test_rc_box_str(_: Rc>>) {} = help: consider using just `Rc>` or `Box>` error: usage of `Rc>>` - --> tests/ui/redundant_allocation.rs:141:33 + --> tests/ui/redundant_allocation.rs:140:33 | LL | pub fn test_rc_box_slice(_: Rc>>) {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -163,7 +163,7 @@ LL | pub fn test_rc_box_slice(_: Rc>>) {} = help: consider using just `Rc>` or `Box>` error: usage of `Rc>>` - --> tests/ui/redundant_allocation.rs:144:32 + --> tests/ui/redundant_allocation.rs:143:32 | LL | pub fn test_rc_box_path(_: Rc>>) {} | ^^^^^^^^^^^^^^^^^^ @@ -172,7 +172,7 @@ LL | pub fn test_rc_box_path(_: Rc>>) {} = help: consider using just `Rc>` or `Box>` error: usage of `Rc>>` - --> tests/ui/redundant_allocation.rs:147:34 + --> tests/ui/redundant_allocation.rs:146:34 | LL | pub fn test_rc_box_custom(_: Rc>>) {} | ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed b/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed index 7773ba11f973e..dbc6c0794d1a7 100644 --- a/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed +++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed @@ -1,7 +1,5 @@ -#![warn(clippy::all)] #![allow(clippy::boxed_local, clippy::needless_pass_by_value)] -#![allow(clippy::disallowed_names, unused_variables, dead_code)] -#![allow(unused_imports)] +#![allow(clippy::disallowed_names)] pub struct MyStruct; diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs b/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs index fb86ed2b3cfdf..05b6429492ce7 100644 --- a/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs +++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs @@ -1,7 +1,5 @@ -#![warn(clippy::all)] #![allow(clippy::boxed_local, clippy::needless_pass_by_value)] -#![allow(clippy::disallowed_names, unused_variables, dead_code)] -#![allow(unused_imports)] +#![allow(clippy::disallowed_names)] pub struct MyStruct; diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr b/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr index ed8282cc82ce9..4073766887174 100644 --- a/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr +++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr @@ -1,5 +1,5 @@ error: usage of `Box<&T>` - --> tests/ui/redundant_allocation_fixable.rs:23:30 + --> tests/ui/redundant_allocation_fixable.rs:21:30 | LL | pub fn box_test1(foo: Box<&T>) {} | ^^^^^^^ help: try: `&T` @@ -9,7 +9,7 @@ LL | pub fn box_test1(foo: Box<&T>) {} = help: to override `-D warnings` add `#[allow(clippy::redundant_allocation)]` error: usage of `Box<&MyStruct>` - --> tests/ui/redundant_allocation_fixable.rs:26:27 + --> tests/ui/redundant_allocation_fixable.rs:24:27 | LL | pub fn box_test2(foo: Box<&MyStruct>) {} | ^^^^^^^^^^^^^^ help: try: `&MyStruct` @@ -17,7 +17,7 @@ LL | pub fn box_test2(foo: Box<&MyStruct>) {} = note: `&MyStruct` is already a pointer, `Box<&MyStruct>` allocates a pointer on the heap error: usage of `Box<&MyEnum>` - --> tests/ui/redundant_allocation_fixable.rs:29:27 + --> tests/ui/redundant_allocation_fixable.rs:27:27 | LL | pub fn box_test3(foo: Box<&MyEnum>) {} | ^^^^^^^^^^^^ help: try: `&MyEnum` @@ -25,7 +25,7 @@ LL | pub fn box_test3(foo: Box<&MyEnum>) {} = note: `&MyEnum` is already a pointer, `Box<&MyEnum>` allocates a pointer on the heap error: usage of `Box>` - --> tests/ui/redundant_allocation_fixable.rs:34:30 + --> tests/ui/redundant_allocation_fixable.rs:32:30 | LL | pub fn box_test5(foo: Box>) {} | ^^^^^^^^^^^ help: try: `Box` @@ -33,7 +33,7 @@ LL | pub fn box_test5(foo: Box>) {} = note: `Box` is already on the heap, `Box>` makes an extra allocation error: usage of `Rc<&T>` - --> tests/ui/redundant_allocation_fixable.rs:44:29 + --> tests/ui/redundant_allocation_fixable.rs:42:29 | LL | pub fn rc_test1(foo: Rc<&T>) {} | ^^^^^^ help: try: `&T` @@ -41,7 +41,7 @@ LL | pub fn rc_test1(foo: Rc<&T>) {} = note: `&T` is already a pointer, `Rc<&T>` allocates a pointer on the heap error: usage of `Rc<&MyStruct>` - --> tests/ui/redundant_allocation_fixable.rs:47:26 + --> tests/ui/redundant_allocation_fixable.rs:45:26 | LL | pub fn rc_test2(foo: Rc<&MyStruct>) {} | ^^^^^^^^^^^^^ help: try: `&MyStruct` @@ -49,7 +49,7 @@ LL | pub fn rc_test2(foo: Rc<&MyStruct>) {} = note: `&MyStruct` is already a pointer, `Rc<&MyStruct>` allocates a pointer on the heap error: usage of `Rc<&MyEnum>` - --> tests/ui/redundant_allocation_fixable.rs:50:26 + --> tests/ui/redundant_allocation_fixable.rs:48:26 | LL | pub fn rc_test3(foo: Rc<&MyEnum>) {} | ^^^^^^^^^^^ help: try: `&MyEnum` @@ -57,7 +57,7 @@ LL | pub fn rc_test3(foo: Rc<&MyEnum>) {} = note: `&MyEnum` is already a pointer, `Rc<&MyEnum>` allocates a pointer on the heap error: usage of `Rc>` - --> tests/ui/redundant_allocation_fixable.rs:55:24 + --> tests/ui/redundant_allocation_fixable.rs:53:24 | LL | pub fn rc_test6(a: Rc>) {} | ^^^^^^^^^^^^ help: try: `Rc` @@ -65,7 +65,7 @@ LL | pub fn rc_test6(a: Rc>) {} = note: `Rc` is already on the heap, `Rc>` makes an extra allocation error: usage of `Arc<&T>` - --> tests/ui/redundant_allocation_fixable.rs:65:30 + --> tests/ui/redundant_allocation_fixable.rs:63:30 | LL | pub fn arc_test1(foo: Arc<&T>) {} | ^^^^^^^ help: try: `&T` @@ -73,7 +73,7 @@ LL | pub fn arc_test1(foo: Arc<&T>) {} = note: `&T` is already a pointer, `Arc<&T>` allocates a pointer on the heap error: usage of `Arc<&MyStruct>` - --> tests/ui/redundant_allocation_fixable.rs:68:27 + --> tests/ui/redundant_allocation_fixable.rs:66:27 | LL | pub fn arc_test2(foo: Arc<&MyStruct>) {} | ^^^^^^^^^^^^^^ help: try: `&MyStruct` @@ -81,7 +81,7 @@ LL | pub fn arc_test2(foo: Arc<&MyStruct>) {} = note: `&MyStruct` is already a pointer, `Arc<&MyStruct>` allocates a pointer on the heap error: usage of `Arc<&MyEnum>` - --> tests/ui/redundant_allocation_fixable.rs:71:27 + --> tests/ui/redundant_allocation_fixable.rs:69:27 | LL | pub fn arc_test3(foo: Arc<&MyEnum>) {} | ^^^^^^^^^^^^ help: try: `&MyEnum` @@ -89,7 +89,7 @@ LL | pub fn arc_test3(foo: Arc<&MyEnum>) {} = note: `&MyEnum` is already a pointer, `Arc<&MyEnum>` allocates a pointer on the heap error: usage of `Arc>` - --> tests/ui/redundant_allocation_fixable.rs:76:25 + --> tests/ui/redundant_allocation_fixable.rs:74:25 | LL | pub fn arc_test7(a: Arc>) {} | ^^^^^^^^^^^^^^ help: try: `Arc` diff --git a/src/tools/clippy/tests/ui/redundant_clone.fixed b/src/tools/clippy/tests/ui/redundant_clone.fixed index 23c00b34a00a8..c1c389f7c4ed2 100644 --- a/src/tools/clippy/tests/ui/redundant_clone.fixed +++ b/src/tools/clippy/tests/ui/redundant_clone.fixed @@ -259,3 +259,35 @@ fn false_negative_5707() { let _z = x.clone(); // pr 7346 can't lint on `x` drop(y); } + +mod issue10074 { + #[derive(Debug, Clone)] + enum MyEnum { + A = 1, + } + + fn false_positive_on_as() { + let e = MyEnum::A; + let v = e.clone() as u16; + + println!("{e:?}"); + println!("{v}"); + } +} + +mod issue13900 { + use std::fmt::Display; + + fn do_something(f: impl Display + Clone) -> String { + let g = f.clone(); + format!("{} + {}", f, g) + } + + fn regression() { + let mut a = String::new(); + let mut b = String::new(); + for _ in 1..10 { + b = a.clone(); + } + } +} diff --git a/src/tools/clippy/tests/ui/redundant_clone.rs b/src/tools/clippy/tests/ui/redundant_clone.rs index f9fe8ba0236d0..78d98762efc86 100644 --- a/src/tools/clippy/tests/ui/redundant_clone.rs +++ b/src/tools/clippy/tests/ui/redundant_clone.rs @@ -259,3 +259,35 @@ fn false_negative_5707() { let _z = x.clone(); // pr 7346 can't lint on `x` drop(y); } + +mod issue10074 { + #[derive(Debug, Clone)] + enum MyEnum { + A = 1, + } + + fn false_positive_on_as() { + let e = MyEnum::A; + let v = e.clone() as u16; + + println!("{e:?}"); + println!("{v}"); + } +} + +mod issue13900 { + use std::fmt::Display; + + fn do_something(f: impl Display + Clone) -> String { + let g = f.clone(); + format!("{} + {}", f, g) + } + + fn regression() { + let mut a = String::new(); + let mut b = String::new(); + for _ in 1..10 { + b = a.clone(); + } + } +} diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed index 549c97d9534ad..1cec19ab8c99d 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed @@ -1,5 +1,4 @@ -#![warn(clippy::all, clippy::redundant_pattern_matching)] -#![allow(unused_must_use)] +#![warn(clippy::redundant_pattern_matching)] #![allow( clippy::match_like_matches_macro, clippy::needless_bool, diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs index decb1396d56dd..123573a8602b6 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs @@ -1,5 +1,4 @@ -#![warn(clippy::all, clippy::redundant_pattern_matching)] -#![allow(unused_must_use)] +#![warn(clippy::redundant_pattern_matching)] #![allow( clippy::match_like_matches_macro, clippy::needless_bool, diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr index 66d2cecdc0c91..3be7cf81afe95 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr @@ -1,5 +1,5 @@ error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:15:12 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:14:12 | LL | if let V4(_) = &ipaddr {} | -------^^^^^---------- help: try: `if ipaddr.is_ipv4()` @@ -8,43 +8,43 @@ LL | if let V4(_) = &ipaddr {} = help: to override `-D warnings` add `#[allow(clippy::redundant_pattern_matching)]` error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:18:12 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:17:12 | LL | if let V4(_) = V4(Ipv4Addr::LOCALHOST) {} | -------^^^^^-------------------------- help: try: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:21:12 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:20:12 | LL | if let V6(_) = V6(Ipv6Addr::LOCALHOST) {} | -------^^^^^-------------------------- help: try: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:25:8 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:24:8 | LL | if matches!(V4(Ipv4Addr::LOCALHOST), V4(_)) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:29:8 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:28:8 | LL | if matches!(V6(Ipv6Addr::LOCALHOST), V6(_)) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:32:15 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:31:15 | LL | while let V4(_) = V4(Ipv4Addr::LOCALHOST) {} | ----------^^^^^-------------------------- help: try: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:35:15 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:34:15 | LL | while let V6(_) = V6(Ipv6Addr::LOCALHOST) {} | ----------^^^^^-------------------------- help: try: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:46:5 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:45:5 | LL | / match V4(Ipv4Addr::LOCALHOST) { LL | | @@ -54,7 +54,7 @@ LL | | }; | |_____^ help: try: `V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:52:5 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:51:5 | LL | / match V4(Ipv4Addr::LOCALHOST) { LL | | @@ -64,7 +64,7 @@ LL | | }; | |_____^ help: try: `V4(Ipv4Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv6()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:58:5 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:57:5 | LL | / match V6(Ipv6Addr::LOCALHOST) { LL | | @@ -74,7 +74,7 @@ LL | | }; | |_____^ help: try: `V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:64:5 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:63:5 | LL | / match V6(Ipv6Addr::LOCALHOST) { LL | | @@ -84,49 +84,49 @@ LL | | }; | |_____^ help: try: `V6(Ipv6Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:70:20 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:69:20 | LL | let _ = if let V4(_) = V4(Ipv4Addr::LOCALHOST) { | -------^^^^^-------------------------- help: try: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:79:20 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:78:20 | LL | let _ = if let V4(_) = gen_ipaddr() { | -------^^^^^--------------- help: try: `if gen_ipaddr().is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:82:19 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:81:19 | LL | } else if let V6(_) = gen_ipaddr() { | -------^^^^^--------------- help: try: `if gen_ipaddr().is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:95:12 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:94:12 | LL | if let V4(_) = V4(Ipv4Addr::LOCALHOST) {} | -------^^^^^-------------------------- help: try: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:98:12 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:97:12 | LL | if let V6(_) = V6(Ipv6Addr::LOCALHOST) {} | -------^^^^^-------------------------- help: try: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:101:15 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:100:15 | LL | while let V4(_) = V4(Ipv4Addr::LOCALHOST) {} | ----------^^^^^-------------------------- help: try: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:104:15 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:103:15 | LL | while let V6(_) = V6(Ipv6Addr::LOCALHOST) {} | ----------^^^^^-------------------------- help: try: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:107:5 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:106:5 | LL | / match V4(Ipv4Addr::LOCALHOST) { LL | | @@ -136,7 +136,7 @@ LL | | }; | |_____^ help: try: `V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> tests/ui/redundant_pattern_matching_ipaddr.rs:113:5 + --> tests/ui/redundant_pattern_matching_ipaddr.rs:112:5 | LL | / match V6(Ipv6Addr::LOCALHOST) { LL | | diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed index 5585006dc362b..33a5308bd3574 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.fixed @@ -1,14 +1,12 @@ -#![warn(clippy::all)] +#![feature(let_chains, if_let_guard)] #![warn(clippy::redundant_pattern_matching)] #![allow( - unused_must_use, clippy::needless_bool, clippy::needless_if, clippy::match_like_matches_macro, clippy::equatable_if_let, clippy::if_same_then_else )] -#![feature(let_chains, if_let_guard)] fn issue_11174(boolean: bool, maybe_some: Option) -> bool { maybe_some.is_none() && (!boolean) diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs index 581a432f38e11..60bce2994ea3a 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.rs @@ -1,14 +1,12 @@ -#![warn(clippy::all)] +#![feature(let_chains, if_let_guard)] #![warn(clippy::redundant_pattern_matching)] #![allow( - unused_must_use, clippy::needless_bool, clippy::needless_if, clippy::match_like_matches_macro, clippy::equatable_if_let, clippy::if_same_then_else )] -#![feature(let_chains, if_let_guard)] fn issue_11174(boolean: bool, maybe_some: Option) -> bool { matches!(maybe_some, None if !boolean) diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr index 681602567d2f2..e5a6598898aa1 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr @@ -1,5 +1,5 @@ error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:14:5 + --> tests/ui/redundant_pattern_matching_option.rs:12:5 | LL | matches!(maybe_some, None if !boolean) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `maybe_some.is_none() && (!boolean)` @@ -8,55 +8,55 @@ LL | matches!(maybe_some, None if !boolean) = help: to override `-D warnings` add `#[allow(clippy::redundant_pattern_matching)]` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:19:13 + --> tests/ui/redundant_pattern_matching_option.rs:17:13 | LL | let _ = matches!(maybe_some, None if boolean || boolean2); // guard needs parentheses | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `maybe_some.is_none() && (boolean || boolean2)` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:35:12 + --> tests/ui/redundant_pattern_matching_option.rs:33:12 | LL | if let None = None::<()> {} | -------^^^^------------- help: try: `if None::<()>.is_none()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:38:12 + --> tests/ui/redundant_pattern_matching_option.rs:36:12 | LL | if let Some(_) = Some(42) {} | -------^^^^^^^----------- help: try: `if Some(42).is_some()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:41:12 + --> tests/ui/redundant_pattern_matching_option.rs:39:12 | LL | if let Some(_) = Some(42) { | -------^^^^^^^----------- help: try: `if Some(42).is_some()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:48:15 + --> tests/ui/redundant_pattern_matching_option.rs:46:15 | LL | while let Some(_) = Some(42) {} | ----------^^^^^^^----------- help: try: `while Some(42).is_some()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:51:15 + --> tests/ui/redundant_pattern_matching_option.rs:49:15 | LL | while let None = Some(42) {} | ----------^^^^----------- help: try: `while Some(42).is_none()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:54:15 + --> tests/ui/redundant_pattern_matching_option.rs:52:15 | LL | while let None = None::<()> {} | ----------^^^^------------- help: try: `while None::<()>.is_none()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:58:15 + --> tests/ui/redundant_pattern_matching_option.rs:56:15 | LL | while let Some(_) = v.pop() { | ----------^^^^^^^---------- help: try: `while v.pop().is_some()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:67:5 + --> tests/ui/redundant_pattern_matching_option.rs:65:5 | LL | / match Some(42) { LL | | @@ -66,7 +66,7 @@ LL | | }; | |_____^ help: try: `Some(42).is_some()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:73:5 + --> tests/ui/redundant_pattern_matching_option.rs:71:5 | LL | / match None::<()> { LL | | @@ -76,7 +76,7 @@ LL | | }; | |_____^ help: try: `None::<()>.is_none()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:79:13 + --> tests/ui/redundant_pattern_matching_option.rs:77:13 | LL | let _ = match None::<()> { | _____________^ @@ -87,55 +87,55 @@ LL | | }; | |_____^ help: try: `None::<()>.is_none()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:86:20 + --> tests/ui/redundant_pattern_matching_option.rs:84:20 | LL | let _ = if let Some(_) = opt { true } else { false }; | -------^^^^^^^------ help: try: `if opt.is_some()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:93:20 + --> tests/ui/redundant_pattern_matching_option.rs:91:20 | LL | let _ = if let Some(_) = gen_opt() { | -------^^^^^^^------------ help: try: `if gen_opt().is_some()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:96:19 + --> tests/ui/redundant_pattern_matching_option.rs:94:19 | LL | } else if let None = gen_opt() { | -------^^^^------------ help: try: `if gen_opt().is_none()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:103:12 + --> tests/ui/redundant_pattern_matching_option.rs:101:12 | LL | if let Some(..) = gen_opt() {} | -------^^^^^^^^------------ help: try: `if gen_opt().is_some()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:119:12 + --> tests/ui/redundant_pattern_matching_option.rs:117:12 | LL | if let Some(_) = Some(42) {} | -------^^^^^^^----------- help: try: `if Some(42).is_some()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:122:12 + --> tests/ui/redundant_pattern_matching_option.rs:120:12 | LL | if let None = None::<()> {} | -------^^^^------------- help: try: `if None::<()>.is_none()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:125:15 + --> tests/ui/redundant_pattern_matching_option.rs:123:15 | LL | while let Some(_) = Some(42) {} | ----------^^^^^^^----------- help: try: `while Some(42).is_some()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:128:15 + --> tests/ui/redundant_pattern_matching_option.rs:126:15 | LL | while let None = None::<()> {} | ----------^^^^------------- help: try: `while None::<()>.is_none()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:131:5 + --> tests/ui/redundant_pattern_matching_option.rs:129:5 | LL | / match Some(42) { LL | | @@ -145,7 +145,7 @@ LL | | }; | |_____^ help: try: `Some(42).is_some()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:137:5 + --> tests/ui/redundant_pattern_matching_option.rs:135:5 | LL | / match None::<()> { LL | | @@ -155,19 +155,19 @@ LL | | }; | |_____^ help: try: `None::<()>.is_none()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:146:12 + --> tests/ui/redundant_pattern_matching_option.rs:144:12 | LL | if let None = *(&None::<()>) {} | -------^^^^----------------- help: try: `if (&None::<()>).is_none()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:148:12 + --> tests/ui/redundant_pattern_matching_option.rs:146:12 | LL | if let None = *&None::<()> {} | -------^^^^--------------- help: try: `if (&None::<()>).is_none()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:155:5 + --> tests/ui/redundant_pattern_matching_option.rs:153:5 | LL | / match x { LL | | @@ -177,7 +177,7 @@ LL | | }; | |_____^ help: try: `x.is_some()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:161:5 + --> tests/ui/redundant_pattern_matching_option.rs:159:5 | LL | / match x { LL | | @@ -187,7 +187,7 @@ LL | | }; | |_____^ help: try: `x.is_none()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:167:5 + --> tests/ui/redundant_pattern_matching_option.rs:165:5 | LL | / match x { LL | | @@ -197,7 +197,7 @@ LL | | }; | |_____^ help: try: `x.is_none()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:173:5 + --> tests/ui/redundant_pattern_matching_option.rs:171:5 | LL | / match x { LL | | @@ -207,19 +207,19 @@ LL | | }; | |_____^ help: try: `x.is_some()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_option.rs:189:13 + --> tests/ui/redundant_pattern_matching_option.rs:187:13 | LL | let _ = matches!(x, Some(_)); | ^^^^^^^^^^^^^^^^^^^^ help: try: `x.is_some()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:192:13 + --> tests/ui/redundant_pattern_matching_option.rs:190:13 | LL | let _ = matches!(x, None); | ^^^^^^^^^^^^^^^^^ help: try: `x.is_none()` error: redundant pattern matching, consider using `is_none()` - --> tests/ui/redundant_pattern_matching_option.rs:203:17 + --> tests/ui/redundant_pattern_matching_option.rs:201:17 | LL | let _ = matches!(*p, None); | ^^^^^^^^^^^^^^^^^^ help: try: `(*p).is_none()` diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.fixed index c8e18e8676f21..800889b5fda0a 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.fixed +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.fixed @@ -1,7 +1,5 @@ -#![warn(clippy::all)] #![warn(clippy::redundant_pattern_matching)] #![allow( - unused_must_use, clippy::needless_bool, clippy::needless_if, clippy::match_like_matches_macro, diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.rs index 727503d21a54a..1668c2ff2bbac 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.rs +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.rs @@ -1,7 +1,5 @@ -#![warn(clippy::all)] #![warn(clippy::redundant_pattern_matching)] #![allow( - unused_must_use, clippy::needless_bool, clippy::needless_if, clippy::match_like_matches_macro, diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr index 5f659184f7b38..5cd9d9636e466 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr @@ -1,5 +1,5 @@ error: redundant pattern matching, consider using `is_pending()` - --> tests/ui/redundant_pattern_matching_poll.rs:15:12 + --> tests/ui/redundant_pattern_matching_poll.rs:13:12 | LL | if let Pending = Pending::<()> {} | -------^^^^^^^---------------- help: try: `if Pending::<()>.is_pending()` @@ -8,49 +8,49 @@ LL | if let Pending = Pending::<()> {} = help: to override `-D warnings` add `#[allow(clippy::redundant_pattern_matching)]` error: redundant pattern matching, consider using `is_ready()` - --> tests/ui/redundant_pattern_matching_poll.rs:18:12 + --> tests/ui/redundant_pattern_matching_poll.rs:16:12 | LL | if let Ready(_) = Ready(42) {} | -------^^^^^^^^------------ help: try: `if Ready(42).is_ready()` error: redundant pattern matching, consider using `is_ready()` - --> tests/ui/redundant_pattern_matching_poll.rs:21:12 + --> tests/ui/redundant_pattern_matching_poll.rs:19:12 | LL | if let Ready(_) = Ready(42) { | -------^^^^^^^^------------ help: try: `if Ready(42).is_ready()` error: redundant pattern matching, consider using `is_ready()` - --> tests/ui/redundant_pattern_matching_poll.rs:29:8 + --> tests/ui/redundant_pattern_matching_poll.rs:27:8 | LL | if matches!(Ready(42), Ready(_)) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` - --> tests/ui/redundant_pattern_matching_poll.rs:33:8 + --> tests/ui/redundant_pattern_matching_poll.rs:31:8 | LL | if matches!(Pending::<()>, Pending) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_ready()` - --> tests/ui/redundant_pattern_matching_poll.rs:36:15 + --> tests/ui/redundant_pattern_matching_poll.rs:34:15 | LL | while let Ready(_) = Ready(42) {} | ----------^^^^^^^^------------ help: try: `while Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` - --> tests/ui/redundant_pattern_matching_poll.rs:39:15 + --> tests/ui/redundant_pattern_matching_poll.rs:37:15 | LL | while let Pending = Ready(42) {} | ----------^^^^^^^------------ help: try: `while Ready(42).is_pending()` error: redundant pattern matching, consider using `is_pending()` - --> tests/ui/redundant_pattern_matching_poll.rs:42:15 + --> tests/ui/redundant_pattern_matching_poll.rs:40:15 | LL | while let Pending = Pending::<()> {} | ----------^^^^^^^---------------- help: try: `while Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_ready()` - --> tests/ui/redundant_pattern_matching_poll.rs:49:5 + --> tests/ui/redundant_pattern_matching_poll.rs:47:5 | LL | / match Ready(42) { LL | | @@ -60,7 +60,7 @@ LL | | }; | |_____^ help: try: `Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` - --> tests/ui/redundant_pattern_matching_poll.rs:55:5 + --> tests/ui/redundant_pattern_matching_poll.rs:53:5 | LL | / match Pending::<()> { LL | | @@ -70,7 +70,7 @@ LL | | }; | |_____^ help: try: `Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_pending()` - --> tests/ui/redundant_pattern_matching_poll.rs:61:13 + --> tests/ui/redundant_pattern_matching_poll.rs:59:13 | LL | let _ = match Pending::<()> { | _____________^ @@ -81,49 +81,49 @@ LL | | }; | |_____^ help: try: `Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_ready()` - --> tests/ui/redundant_pattern_matching_poll.rs:68:20 + --> tests/ui/redundant_pattern_matching_poll.rs:66:20 | LL | let _ = if let Ready(_) = poll { true } else { false }; | -------^^^^^^^^------- help: try: `if poll.is_ready()` error: redundant pattern matching, consider using `is_ready()` - --> tests/ui/redundant_pattern_matching_poll.rs:73:20 + --> tests/ui/redundant_pattern_matching_poll.rs:71:20 | LL | let _ = if let Ready(_) = gen_poll() { | -------^^^^^^^^------------- help: try: `if gen_poll().is_ready()` error: redundant pattern matching, consider using `is_pending()` - --> tests/ui/redundant_pattern_matching_poll.rs:76:19 + --> tests/ui/redundant_pattern_matching_poll.rs:74:19 | LL | } else if let Pending = gen_poll() { | -------^^^^^^^------------- help: try: `if gen_poll().is_pending()` error: redundant pattern matching, consider using `is_ready()` - --> tests/ui/redundant_pattern_matching_poll.rs:93:12 + --> tests/ui/redundant_pattern_matching_poll.rs:91:12 | LL | if let Ready(_) = Ready(42) {} | -------^^^^^^^^------------ help: try: `if Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` - --> tests/ui/redundant_pattern_matching_poll.rs:96:12 + --> tests/ui/redundant_pattern_matching_poll.rs:94:12 | LL | if let Pending = Pending::<()> {} | -------^^^^^^^---------------- help: try: `if Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_ready()` - --> tests/ui/redundant_pattern_matching_poll.rs:99:15 + --> tests/ui/redundant_pattern_matching_poll.rs:97:15 | LL | while let Ready(_) = Ready(42) {} | ----------^^^^^^^^------------ help: try: `while Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` - --> tests/ui/redundant_pattern_matching_poll.rs:102:15 + --> tests/ui/redundant_pattern_matching_poll.rs:100:15 | LL | while let Pending = Pending::<()> {} | ----------^^^^^^^---------------- help: try: `while Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_ready()` - --> tests/ui/redundant_pattern_matching_poll.rs:105:5 + --> tests/ui/redundant_pattern_matching_poll.rs:103:5 | LL | / match Ready(42) { LL | | @@ -133,7 +133,7 @@ LL | | }; | |_____^ help: try: `Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` - --> tests/ui/redundant_pattern_matching_poll.rs:111:5 + --> tests/ui/redundant_pattern_matching_poll.rs:109:5 | LL | / match Pending::<()> { LL | | diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed index 1158796083147..dab816716d598 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.fixed @@ -1,6 +1,5 @@ -#![warn(clippy::all)] #![warn(clippy::redundant_pattern_matching)] -#![allow(deprecated, unused_must_use)] +#![allow(deprecated)] #![allow( clippy::if_same_then_else, clippy::match_like_matches_macro, diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.rs index 35f8f91b31527..3fd70515d0847 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.rs +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.rs @@ -1,6 +1,5 @@ -#![warn(clippy::all)] #![warn(clippy::redundant_pattern_matching)] -#![allow(deprecated, unused_must_use)] +#![allow(deprecated)] #![allow( clippy::if_same_then_else, clippy::match_like_matches_macro, diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr index 4f78b95356c21..7e7d27d07a7f6 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr @@ -1,5 +1,5 @@ error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:15:12 + --> tests/ui/redundant_pattern_matching_result.rs:14:12 | LL | if let Ok(_) = &result {} | -------^^^^^---------- help: try: `if result.is_ok()` @@ -8,31 +8,31 @@ LL | if let Ok(_) = &result {} = help: to override `-D warnings` add `#[allow(clippy::redundant_pattern_matching)]` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:18:12 + --> tests/ui/redundant_pattern_matching_result.rs:17:12 | LL | if let Ok(_) = Ok::(42) {} | -------^^^^^--------------------- help: try: `if Ok::(42).is_ok()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:21:12 + --> tests/ui/redundant_pattern_matching_result.rs:20:12 | LL | if let Err(_) = Err::(42) {} | -------^^^^^^---------------------- help: try: `if Err::(42).is_err()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:24:15 + --> tests/ui/redundant_pattern_matching_result.rs:23:15 | LL | while let Ok(_) = Ok::(10) {} | ----------^^^^^--------------------- help: try: `while Ok::(10).is_ok()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:27:15 + --> tests/ui/redundant_pattern_matching_result.rs:26:15 | LL | while let Err(_) = Ok::(10) {} | ----------^^^^^^--------------------- help: try: `while Ok::(10).is_err()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:38:5 + --> tests/ui/redundant_pattern_matching_result.rs:37:5 | LL | / match Ok::(42) { LL | | @@ -42,7 +42,7 @@ LL | | }; | |_____^ help: try: `Ok::(42).is_ok()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:44:5 + --> tests/ui/redundant_pattern_matching_result.rs:43:5 | LL | / match Ok::(42) { LL | | @@ -52,7 +52,7 @@ LL | | }; | |_____^ help: try: `Ok::(42).is_err()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:50:5 + --> tests/ui/redundant_pattern_matching_result.rs:49:5 | LL | / match Err::(42) { LL | | @@ -62,7 +62,7 @@ LL | | }; | |_____^ help: try: `Err::(42).is_err()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:56:5 + --> tests/ui/redundant_pattern_matching_result.rs:55:5 | LL | / match Err::(42) { LL | | @@ -72,73 +72,73 @@ LL | | }; | |_____^ help: try: `Err::(42).is_ok()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:62:20 + --> tests/ui/redundant_pattern_matching_result.rs:61:20 | LL | let _ = if let Ok(_) = Ok::(4) { true } else { false }; | -------^^^^^--------------------- help: try: `if Ok::(4).is_ok()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:71:20 + --> tests/ui/redundant_pattern_matching_result.rs:70:20 | LL | let _ = if let Ok(_) = gen_res() { | -------^^^^^------------ help: try: `if gen_res().is_ok()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:74:19 + --> tests/ui/redundant_pattern_matching_result.rs:73:19 | LL | } else if let Err(_) = gen_res() { | -------^^^^^^------------ help: try: `if gen_res().is_err()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_result.rs:98:19 + --> tests/ui/redundant_pattern_matching_result.rs:97:19 | LL | while let Some(_) = r#try!(result_opt()) {} | ----------^^^^^^^----------------------- help: try: `while r#try!(result_opt()).is_some()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_result.rs:100:16 + --> tests/ui/redundant_pattern_matching_result.rs:99:16 | LL | if let Some(_) = r#try!(result_opt()) {} | -------^^^^^^^----------------------- help: try: `if r#try!(result_opt()).is_some()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_result.rs:107:12 + --> tests/ui/redundant_pattern_matching_result.rs:106:12 | LL | if let Some(_) = m!() {} | -------^^^^^^^------- help: try: `if m!().is_some()` error: redundant pattern matching, consider using `is_some()` - --> tests/ui/redundant_pattern_matching_result.rs:109:15 + --> tests/ui/redundant_pattern_matching_result.rs:108:15 | LL | while let Some(_) = m!() {} | ----------^^^^^^^------- help: try: `while m!().is_some()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:128:12 + --> tests/ui/redundant_pattern_matching_result.rs:127:12 | LL | if let Ok(_) = Ok::(42) {} | -------^^^^^--------------------- help: try: `if Ok::(42).is_ok()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:131:12 + --> tests/ui/redundant_pattern_matching_result.rs:130:12 | LL | if let Err(_) = Err::(42) {} | -------^^^^^^---------------------- help: try: `if Err::(42).is_err()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:134:15 + --> tests/ui/redundant_pattern_matching_result.rs:133:15 | LL | while let Ok(_) = Ok::(10) {} | ----------^^^^^--------------------- help: try: `while Ok::(10).is_ok()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:137:15 + --> tests/ui/redundant_pattern_matching_result.rs:136:15 | LL | while let Err(_) = Ok::(10) {} | ----------^^^^^^--------------------- help: try: `while Ok::(10).is_err()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:140:5 + --> tests/ui/redundant_pattern_matching_result.rs:139:5 | LL | / match Ok::(42) { LL | | @@ -148,7 +148,7 @@ LL | | }; | |_____^ help: try: `Ok::(42).is_ok()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:146:5 + --> tests/ui/redundant_pattern_matching_result.rs:145:5 | LL | / match Err::(42) { LL | | @@ -158,7 +158,7 @@ LL | | }; | |_____^ help: try: `Err::(42).is_err()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:157:5 + --> tests/ui/redundant_pattern_matching_result.rs:156:5 | LL | / match x { LL | | @@ -168,7 +168,7 @@ LL | | }; | |_____^ help: try: `x.is_ok()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:163:5 + --> tests/ui/redundant_pattern_matching_result.rs:162:5 | LL | / match x { LL | | @@ -178,7 +178,7 @@ LL | | }; | |_____^ help: try: `x.is_err()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:169:5 + --> tests/ui/redundant_pattern_matching_result.rs:168:5 | LL | / match x { LL | | @@ -188,7 +188,7 @@ LL | | }; | |_____^ help: try: `x.is_err()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:175:5 + --> tests/ui/redundant_pattern_matching_result.rs:174:5 | LL | / match x { LL | | @@ -198,13 +198,13 @@ LL | | }; | |_____^ help: try: `x.is_ok()` error: redundant pattern matching, consider using `is_ok()` - --> tests/ui/redundant_pattern_matching_result.rs:197:13 + --> tests/ui/redundant_pattern_matching_result.rs:196:13 | LL | let _ = matches!(x, Ok(_)); | ^^^^^^^^^^^^^^^^^^ help: try: `x.is_ok()` error: redundant pattern matching, consider using `is_err()` - --> tests/ui/redundant_pattern_matching_result.rs:200:13 + --> tests/ui/redundant_pattern_matching_result.rs:199:13 | LL | let _ = matches!(x, Err(_)); | ^^^^^^^^^^^^^^^^^^^ help: try: `x.is_err()` diff --git a/src/tools/clippy/tests/ui/redundant_pub_crate.fixed b/src/tools/clippy/tests/ui/redundant_pub_crate.fixed index a6450123f4c9d..8a30fedede4a4 100644 --- a/src/tools/clippy/tests/ui/redundant_pub_crate.fixed +++ b/src/tools/clippy/tests/ui/redundant_pub_crate.fixed @@ -131,6 +131,14 @@ mod m4 { } } +mod m5 { + pub mod m5_1 {} + // Test that the primary span isn't butchered for item kinds that don't have an ident. + pub use m5_1::*; //~ redundant_pub_crate + #[rustfmt::skip] + pub use m5_1::{*}; //~ redundant_pub_crate +} + pub use m4::*; mod issue_8732 { diff --git a/src/tools/clippy/tests/ui/redundant_pub_crate.rs b/src/tools/clippy/tests/ui/redundant_pub_crate.rs index 7415d34d50cc7..45ba13a63b2e2 100644 --- a/src/tools/clippy/tests/ui/redundant_pub_crate.rs +++ b/src/tools/clippy/tests/ui/redundant_pub_crate.rs @@ -131,6 +131,14 @@ mod m4 { } } +mod m5 { + pub mod m5_1 {} + // Test that the primary span isn't butchered for item kinds that don't have an ident. + pub(crate) use m5_1::*; //~ redundant_pub_crate + #[rustfmt::skip] + pub(crate) use m5_1::{*}; //~ redundant_pub_crate +} + pub use m4::*; mod issue_8732 { diff --git a/src/tools/clippy/tests/ui/redundant_pub_crate.stderr b/src/tools/clippy/tests/ui/redundant_pub_crate.stderr index 95909ea8b0663..4a47a321028d1 100644 --- a/src/tools/clippy/tests/ui/redundant_pub_crate.stderr +++ b/src/tools/clippy/tests/ui/redundant_pub_crate.stderr @@ -129,5 +129,21 @@ LL | pub(crate) fn g() {} // private due to m4_2 | | | help: consider using: `pub` -error: aborting due to 16 previous errors +error: pub(crate) import inside private module + --> tests/ui/redundant_pub_crate.rs:137:5 + | +LL | pub(crate) use m5_1::*; + | ----------^^^^^^^^^^^^^ + | | + | help: consider using: `pub` + +error: pub(crate) import inside private module + --> tests/ui/redundant_pub_crate.rs:139:27 + | +LL | pub(crate) use m5_1::{*}; + | ---------- ^ + | | + | help: consider using: `pub` + +error: aborting due to 18 previous errors diff --git a/src/tools/clippy/tests/ui/redundant_test_prefix.fixed b/src/tools/clippy/tests/ui/redundant_test_prefix.fixed new file mode 100644 index 0000000000000..b99771f0640ca --- /dev/null +++ b/src/tools/clippy/tests/ui/redundant_test_prefix.fixed @@ -0,0 +1,158 @@ +#![allow(dead_code)] +#![warn(clippy::redundant_test_prefix)] + +fn main() { + // Normal function, no redundant prefix. +} + +fn f1() { + // Normal function, no redundant prefix. +} + +fn test_f2() { + // Has prefix, but no `#[test]` attribute, ignore. +} + +#[test] +fn f3() { + //~^ redundant_test_prefix + + // Has prefix, has `#[test]` attribute. Not within a `#[cfg(test)]`. + // No collision with other functions, should emit warning. +} + +#[cfg(test)] +#[test] +fn f4() { + //~^ redundant_test_prefix + + // Has prefix, has `#[test]` attribute, within a `#[cfg(test)]`. + // No collision with other functions, should emit warning. +} + +mod m1 { + pub fn f5() {} +} + +#[cfg(test)] +#[test] +fn f6() { + //~^ redundant_test_prefix + + use m1::f5; + + f5(); + // Has prefix, has `#[test]` attribute, within a `#[cfg(test)]`. + // No collision, has function call, but it will not result in recursion. +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn foo() { + //~^ redundant_test_prefix + } + + #[test] + fn foo_with_call() { + //~^ redundant_test_prefix + + main(); + } + + #[test] + fn f1() { + //~^ redundant_test_prefix + } + + #[test] + fn f2() { + //~^ redundant_test_prefix + } + + #[test] + fn f3() { + //~^ redundant_test_prefix + } + + #[test] + fn f4() { + //~^ redundant_test_prefix + } + + #[test] + fn f5() { + //~^ redundant_test_prefix + } + + #[test] + fn f6() { + //~^ redundant_test_prefix + } +} + +mod tests_no_annotations { + use super::*; + + #[test] + fn foo() { + //~^ redundant_test_prefix + } + + #[test] + fn foo_with_call() { + //~^ redundant_test_prefix + + main(); + } + + #[test] + fn f1() { + //~^ redundant_test_prefix + } + + #[test] + fn f2() { + //~^ redundant_test_prefix + } + + #[test] + fn f3() { + //~^ redundant_test_prefix + } + + #[test] + fn f4() { + //~^ redundant_test_prefix + } + + #[test] + fn f5() { + //~^ redundant_test_prefix + } + + #[test] + fn f6() { + //~^ redundant_test_prefix + } +} + +// This test is inspired by real test in `clippy_utils/src/sugg.rs`. +// The `is_in_test_function()` checks whether any identifier within a given node's parents is +// marked with `#[test]` attribute. Thus flagging false positives when nested functions are +// prefixed with `test_`. Therefore `is_test_function()` has been defined in `clippy_utils`, +// allowing to select only functions that are immediately marked with `#[test]` annotation. +// +// This test case ensures that for such nested functions no error is emitted. +#[test] +fn not_op() { + fn test_not(foo: bool) { + assert!(foo); + } + + // Use helper function + test_not(true); + test_not(false); +} diff --git a/src/tools/clippy/tests/ui/redundant_test_prefix.rs b/src/tools/clippy/tests/ui/redundant_test_prefix.rs new file mode 100644 index 0000000000000..3aec577cffa16 --- /dev/null +++ b/src/tools/clippy/tests/ui/redundant_test_prefix.rs @@ -0,0 +1,158 @@ +#![allow(dead_code)] +#![warn(clippy::redundant_test_prefix)] + +fn main() { + // Normal function, no redundant prefix. +} + +fn f1() { + // Normal function, no redundant prefix. +} + +fn test_f2() { + // Has prefix, but no `#[test]` attribute, ignore. +} + +#[test] +fn test_f3() { + //~^ redundant_test_prefix + + // Has prefix, has `#[test]` attribute. Not within a `#[cfg(test)]`. + // No collision with other functions, should emit warning. +} + +#[cfg(test)] +#[test] +fn test_f4() { + //~^ redundant_test_prefix + + // Has prefix, has `#[test]` attribute, within a `#[cfg(test)]`. + // No collision with other functions, should emit warning. +} + +mod m1 { + pub fn f5() {} +} + +#[cfg(test)] +#[test] +fn test_f6() { + //~^ redundant_test_prefix + + use m1::f5; + + f5(); + // Has prefix, has `#[test]` attribute, within a `#[cfg(test)]`. + // No collision, has function call, but it will not result in recursion. +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_foo() { + //~^ redundant_test_prefix + } + + #[test] + fn test_foo_with_call() { + //~^ redundant_test_prefix + + main(); + } + + #[test] + fn test_f1() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f2() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f3() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f4() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f5() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f6() { + //~^ redundant_test_prefix + } +} + +mod tests_no_annotations { + use super::*; + + #[test] + fn test_foo() { + //~^ redundant_test_prefix + } + + #[test] + fn test_foo_with_call() { + //~^ redundant_test_prefix + + main(); + } + + #[test] + fn test_f1() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f2() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f3() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f4() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f5() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f6() { + //~^ redundant_test_prefix + } +} + +// This test is inspired by real test in `clippy_utils/src/sugg.rs`. +// The `is_in_test_function()` checks whether any identifier within a given node's parents is +// marked with `#[test]` attribute. Thus flagging false positives when nested functions are +// prefixed with `test_`. Therefore `is_test_function()` has been defined in `clippy_utils`, +// allowing to select only functions that are immediately marked with `#[test]` annotation. +// +// This test case ensures that for such nested functions no error is emitted. +#[test] +fn not_op() { + fn test_not(foo: bool) { + assert!(foo); + } + + // Use helper function + test_not(true); + test_not(false); +} diff --git a/src/tools/clippy/tests/ui/redundant_test_prefix.stderr b/src/tools/clippy/tests/ui/redundant_test_prefix.stderr new file mode 100644 index 0000000000000..d156af586df3f --- /dev/null +++ b/src/tools/clippy/tests/ui/redundant_test_prefix.stderr @@ -0,0 +1,119 @@ +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:17:4 + | +LL | fn test_f3() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f3` + | + = note: `-D clippy::redundant-test-prefix` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::redundant_test_prefix)]` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:26:4 + | +LL | fn test_f4() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f4` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:39:4 + | +LL | fn test_f6() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f6` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:54:8 + | +LL | fn test_foo() { + | ^^^^^^^^ help: consider removing the `test_` prefix: `foo` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:59:8 + | +LL | fn test_foo_with_call() { + | ^^^^^^^^^^^^^^^^^^ help: consider removing the `test_` prefix: `foo_with_call` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:66:8 + | +LL | fn test_f1() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f1` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:71:8 + | +LL | fn test_f2() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f2` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:76:8 + | +LL | fn test_f3() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f3` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:81:8 + | +LL | fn test_f4() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f4` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:86:8 + | +LL | fn test_f5() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f5` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:91:8 + | +LL | fn test_f6() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f6` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:100:8 + | +LL | fn test_foo() { + | ^^^^^^^^ help: consider removing the `test_` prefix: `foo` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:105:8 + | +LL | fn test_foo_with_call() { + | ^^^^^^^^^^^^^^^^^^ help: consider removing the `test_` prefix: `foo_with_call` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:112:8 + | +LL | fn test_f1() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f1` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:117:8 + | +LL | fn test_f2() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f2` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:122:8 + | +LL | fn test_f3() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f3` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:127:8 + | +LL | fn test_f4() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f4` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:132:8 + | +LL | fn test_f5() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f5` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix.rs:137:8 + | +LL | fn test_f6() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f6` + +error: aborting due to 19 previous errors + diff --git a/src/tools/clippy/tests/ui/redundant_test_prefix_noautofix.rs b/src/tools/clippy/tests/ui/redundant_test_prefix_noautofix.rs new file mode 100644 index 0000000000000..6ad5d011d8b71 --- /dev/null +++ b/src/tools/clippy/tests/ui/redundant_test_prefix_noautofix.rs @@ -0,0 +1,288 @@ +//@no-rustfix: name conflicts + +#![allow(dead_code)] +#![warn(clippy::redundant_test_prefix)] + +fn main() { + // Normal function, no redundant prefix. +} + +fn f1() { + // Normal function, no redundant prefix. +} + +fn test_f2() { + // Has prefix, but no `#[test]` attribute, ignore. +} + +#[test] +fn test_f3() { + //~^ redundant_test_prefix + + // Has prefix, has `#[test]` attribute. Not within a `#[cfg(test)]`. + // No collision with other functions, should emit warning. +} + +#[cfg(test)] +#[test] +fn test_f4() { + //~^ redundant_test_prefix + + // Has prefix, has `#[test]` attribute, within a `#[cfg(test)]`. + // No collision with other functions, should emit warning. +} + +fn f5() {} + +#[cfg(test)] +#[test] +fn test_f5() { + //~^ redundant_test_prefix + + // Has prefix, has `#[test]` attribute, within a `#[cfg(test)]`. + // Collision with existing function. +} + +mod m1 { + pub fn f6() {} + pub fn f7() {} +} + +#[cfg(test)] +#[test] +fn test_f6() { + //~^ redundant_test_prefix + + use m1::f6; + + f6(); + // Has prefix, has `#[test]` attribute, within a `#[cfg(test)]`. + // No collision, but has a function call that will result in recursion. +} + +#[cfg(test)] +#[test] +fn test_f8() { + //~^ redundant_test_prefix + + use m1::f7; + + f7(); + // Has prefix, has `#[test]` attribute, within a `#[cfg(test)]`. + // No collision, has function call, but it will not result in recursion. +} + +// Although there's no direct call of `f` in the test, name collision still exists, +// since all `m3` functions are imported and then `map` is used to call `f`. +mod m2 { + mod m3 { + pub fn f(_: i32) -> i32 { + 0 + } + } + + use m3::*; + + #[cfg(test)] + #[test] + fn test_f() { + //~^ redundant_test_prefix + let a = Some(3); + let _ = a.map(f); + } +} + +mod m3 { + fn test_m3_1() { + // Has prefix, but no `#[test]` attribute, ignore. + } + + #[test] + fn test_m3_2() { + //~^ redundant_test_prefix + + // Has prefix, has `#[test]` attribute. Not within a `#[cfg(test)]`. + // No collision with other functions, should emit warning. + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_foo() { + //~^ redundant_test_prefix + } + + #[test] + fn test_foo_with_call() { + //~^ redundant_test_prefix + + main(); + } + + #[test] + fn test_f1() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f2() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f3() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f4() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f5() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f6() { + //~^ redundant_test_prefix + } + + #[test] + fn test_1() { + //~^ redundant_test_prefix + + // `1` is invalid function name, so suggestion to rename is emitted + } + + #[test] + fn test_const() { + //~^ redundant_test_prefix + + // `const` is reserved keyword, so suggestion to rename is emitted + } + + #[test] + fn test_async() { + //~^ redundant_test_prefix + + // `async` is reserved keyword, so suggestion to rename is emitted + } + + #[test] + fn test_yield() { + //~^ redundant_test_prefix + + // `yield` is reserved keyword for future use, so suggestion to rename is emitted + } + + #[test] + fn test_() { + //~^ redundant_test_prefix + + // `` is invalid function name, so suggestion to rename is emitted + } +} + +mod tests_no_annotations { + use super::*; + + #[test] + fn test_foo() { + //~^ redundant_test_prefix + } + + #[test] + fn test_foo_with_call() { + //~^ redundant_test_prefix + + main(); + } + + #[test] + fn test_f1() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f2() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f3() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f4() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f5() { + //~^ redundant_test_prefix + } + + #[test] + fn test_f6() { + //~^ redundant_test_prefix + } + + #[test] + fn test_1() { + //~^ redundant_test_prefix + + // `1` is invalid function name, so suggestion to rename is emitted + } + + #[test] + fn test_const() { + //~^ redundant_test_prefix + + // `const` is reserved keyword, so suggestion to rename is emitted + } + + #[test] + fn test_async() { + //~^ redundant_test_prefix + + // `async` is reserved keyword, so suggestion to rename is emitted + } + + #[test] + fn test_yield() { + //~^ redundant_test_prefix + + // `yield` is reserved keyword for future use, so suggestion to rename is emitted + } + + #[test] + fn test_() { + //~^ redundant_test_prefix + + // `` is invalid function name, so suggestion to rename is emitted + } +} + +// This test is inspired by real test in `clippy_utils/src/sugg.rs`. +// The `is_in_test_function()` checks whether any identifier within a given node's parents is +// marked with `#[test]` attribute. Thus flagging false positives when nested functions are +// prefixed with `test_`. Therefore `is_test_function()` has been defined in `clippy_utils`, +// allowing to select only functions that are immediately marked with `#[test]` annotation. +// +// This test case ensures that for such nested functions no error is emitted. +#[test] +fn not_op() { + fn test_not(foo: bool) { + assert!(foo); + } + + // Use helper function + test_not(true); + test_not(false); +} diff --git a/src/tools/clippy/tests/ui/redundant_test_prefix_noautofix.stderr b/src/tools/clippy/tests/ui/redundant_test_prefix_noautofix.stderr new file mode 100644 index 0000000000000..6440faf1b3c83 --- /dev/null +++ b/src/tools/clippy/tests/ui/redundant_test_prefix_noautofix.stderr @@ -0,0 +1,241 @@ +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:19:4 + | +LL | fn test_f3() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f3` + | + = note: `-D clippy::redundant-test-prefix` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::redundant_test_prefix)]` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:28:4 + | +LL | fn test_f4() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f4` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:39:4 + | +LL | fn test_f5() { + | ^^^^^^^ + | +help: consider function renaming (just removing `test_` prefix will cause a name conflict) + | +LL - fn test_f5() { +LL + fn f5_works() { + | + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:53:4 + | +LL | fn test_f6() { + | ^^^^^^^ + | +help: consider function renaming (just removing `test_` prefix will cause a name conflict) + | +LL - fn test_f6() { +LL + fn f6_works() { + | + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:65:4 + | +LL | fn test_f8() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f8` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:88:8 + | +LL | fn test_f() { + | ^^^^^^ + | +help: consider function renaming (just removing `test_` prefix will cause a name conflict) + | +LL - fn test_f() { +LL + fn f_works() { + | + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:101:8 + | +LL | fn test_m3_2() { + | ^^^^^^^^^ help: consider removing the `test_` prefix: `m3_2` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:114:8 + | +LL | fn test_foo() { + | ^^^^^^^^ help: consider removing the `test_` prefix: `foo` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:119:8 + | +LL | fn test_foo_with_call() { + | ^^^^^^^^^^^^^^^^^^ help: consider removing the `test_` prefix: `foo_with_call` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:126:8 + | +LL | fn test_f1() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f1` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:131:8 + | +LL | fn test_f2() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f2` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:136:8 + | +LL | fn test_f3() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f3` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:141:8 + | +LL | fn test_f4() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f4` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:146:8 + | +LL | fn test_f5() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f5` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:151:8 + | +LL | fn test_f6() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f6` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:156:8 + | +LL | fn test_1() { + | ^^^^^^ + | + = help: consider function renaming (just removing `test_` prefix will produce invalid function name) + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:163:8 + | +LL | fn test_const() { + | ^^^^^^^^^^ + | + = help: consider function renaming (just removing `test_` prefix will produce invalid function name) + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:170:8 + | +LL | fn test_async() { + | ^^^^^^^^^^ + | + = help: consider function renaming (just removing `test_` prefix will produce invalid function name) + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:177:8 + | +LL | fn test_yield() { + | ^^^^^^^^^^ + | + = help: consider function renaming (just removing `test_` prefix will produce invalid function name) + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:184:8 + | +LL | fn test_() { + | ^^^^^ + | + = help: consider function renaming (just removing `test_` prefix will produce invalid function name) + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:195:8 + | +LL | fn test_foo() { + | ^^^^^^^^ help: consider removing the `test_` prefix: `foo` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:200:8 + | +LL | fn test_foo_with_call() { + | ^^^^^^^^^^^^^^^^^^ help: consider removing the `test_` prefix: `foo_with_call` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:207:8 + | +LL | fn test_f1() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f1` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:212:8 + | +LL | fn test_f2() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f2` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:217:8 + | +LL | fn test_f3() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f3` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:222:8 + | +LL | fn test_f4() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f4` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:227:8 + | +LL | fn test_f5() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f5` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:232:8 + | +LL | fn test_f6() { + | ^^^^^^^ help: consider removing the `test_` prefix: `f6` + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:237:8 + | +LL | fn test_1() { + | ^^^^^^ + | + = help: consider function renaming (just removing `test_` prefix will produce invalid function name) + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:244:8 + | +LL | fn test_const() { + | ^^^^^^^^^^ + | + = help: consider function renaming (just removing `test_` prefix will produce invalid function name) + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:251:8 + | +LL | fn test_async() { + | ^^^^^^^^^^ + | + = help: consider function renaming (just removing `test_` prefix will produce invalid function name) + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:258:8 + | +LL | fn test_yield() { + | ^^^^^^^^^^ + | + = help: consider function renaming (just removing `test_` prefix will produce invalid function name) + +error: redundant `test_` prefix in test function name + --> tests/ui/redundant_test_prefix_noautofix.rs:265:8 + | +LL | fn test_() { + | ^^^^^ + | + = help: consider function renaming (just removing `test_` prefix will produce invalid function name) + +error: aborting due to 33 previous errors + diff --git a/src/tools/clippy/tests/ui/ref_option/ref_option_traits.all.stderr b/src/tools/clippy/tests/ui/ref_option/ref_option_traits.all.stderr index 030a9a28ec684..886bf2b034989 100644 --- a/src/tools/clippy/tests/ui/ref_option/ref_option_traits.all.stderr +++ b/src/tools/clippy/tests/ui/ref_option/ref_option_traits.all.stderr @@ -1,5 +1,5 @@ error: it is more idiomatic to use `Option<&T>` instead of `&Option` - --> tests/ui/ref_option/ref_option_traits.rs:10:5 + --> tests/ui/ref_option/ref_option_traits.rs:9:5 | LL | fn pub_trait_opt(&self, a: &Option>); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^----------------^^ @@ -10,7 +10,7 @@ LL | fn pub_trait_opt(&self, a: &Option>); = help: to override `-D warnings` add `#[allow(clippy::ref_option)]` error: it is more idiomatic to use `Option<&T>` instead of `&Option` - --> tests/ui/ref_option/ref_option_traits.rs:12:5 + --> tests/ui/ref_option/ref_option_traits.rs:11:5 | LL | fn pub_trait_ret(&self) -> &Option>; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^----------------^ @@ -18,7 +18,7 @@ LL | fn pub_trait_ret(&self) -> &Option>; | help: change this to: `Option<&Vec>` error: it is more idiomatic to use `Option<&T>` instead of `&Option` - --> tests/ui/ref_option/ref_option_traits.rs:17:5 + --> tests/ui/ref_option/ref_option_traits.rs:16:5 | LL | fn trait_opt(&self, a: &Option); | ^^^^^^^^^^^^^^^^^^^^^^^---------------^^ @@ -26,7 +26,7 @@ LL | fn trait_opt(&self, a: &Option); | help: change this to: `Option<&String>` error: it is more idiomatic to use `Option<&T>` instead of `&Option` - --> tests/ui/ref_option/ref_option_traits.rs:19:5 + --> tests/ui/ref_option/ref_option_traits.rs:18:5 | LL | fn trait_ret(&self) -> &Option; | ^^^^^^^^^^^^^^^^^^^^^^^---------------^ diff --git a/src/tools/clippy/tests/ui/ref_option/ref_option_traits.private.stderr b/src/tools/clippy/tests/ui/ref_option/ref_option_traits.private.stderr index 2837ee80fb2ef..cfab7fa5734c3 100644 --- a/src/tools/clippy/tests/ui/ref_option/ref_option_traits.private.stderr +++ b/src/tools/clippy/tests/ui/ref_option/ref_option_traits.private.stderr @@ -1,5 +1,5 @@ error: it is more idiomatic to use `Option<&T>` instead of `&Option` - --> tests/ui/ref_option/ref_option_traits.rs:17:5 + --> tests/ui/ref_option/ref_option_traits.rs:16:5 | LL | fn trait_opt(&self, a: &Option); | ^^^^^^^^^^^^^^^^^^^^^^^---------------^^ @@ -10,7 +10,7 @@ LL | fn trait_opt(&self, a: &Option); = help: to override `-D warnings` add `#[allow(clippy::ref_option)]` error: it is more idiomatic to use `Option<&T>` instead of `&Option` - --> tests/ui/ref_option/ref_option_traits.rs:19:5 + --> tests/ui/ref_option/ref_option_traits.rs:18:5 | LL | fn trait_ret(&self) -> &Option; | ^^^^^^^^^^^^^^^^^^^^^^^---------------^ diff --git a/src/tools/clippy/tests/ui/ref_option/ref_option_traits.rs b/src/tools/clippy/tests/ui/ref_option/ref_option_traits.rs index 811da2eb4d500..4c773e84f8da8 100644 --- a/src/tools/clippy/tests/ui/ref_option/ref_option_traits.rs +++ b/src/tools/clippy/tests/ui/ref_option/ref_option_traits.rs @@ -3,7 +3,6 @@ //@[private] rustc-env:CLIPPY_CONF_DIR=tests/ui/ref_option/private //@[all] rustc-env:CLIPPY_CONF_DIR=tests/ui/ref_option/all -#![allow(unused, clippy::all)] #![warn(clippy::ref_option)] pub trait PubTrait { diff --git a/src/tools/clippy/tests/ui/rename.fixed b/src/tools/clippy/tests/ui/rename.fixed index 7964047069689..acf7914d25365 100644 --- a/src/tools/clippy/tests/ui/rename.fixed +++ b/src/tools/clippy/tests/ui/rename.fixed @@ -13,8 +13,9 @@ #![allow(clippy::disallowed_methods)] #![allow(clippy::disallowed_types)] #![allow(clippy::mixed_read_write_in_expression)] -#![allow(clippy::manual_filter_map)] #![allow(clippy::manual_find_map)] +#![allow(clippy::manual_filter_map)] +#![allow(unpredictable_function_pointer_comparisons)] #![allow(clippy::useless_conversion)] #![allow(clippy::redundant_pattern_matching)] #![allow(clippy::match_result_ok)] @@ -29,7 +30,6 @@ #![allow(clippy::unwrap_used)] #![allow(clippy::panicking_overflow_checks)] #![allow(clippy::needless_borrow)] -#![allow(clippy::reversed_empty_ranges)] #![allow(clippy::single_char_add_str)] #![allow(clippy::module_name_repetitions)] #![allow(clippy::missing_const_for_thread_local)] @@ -39,11 +39,11 @@ #![allow(invalid_reference_casting)] #![allow(suspicious_double_ref_op)] #![allow(invalid_nan_comparisons)] +#![allow(invalid_null_arguments)] #![allow(double_negations)] #![allow(drop_bounds)] #![allow(dropping_copy_types)] #![allow(dropping_references)] -#![allow(unpredictable_function_pointer_comparisons)] #![allow(useless_ptr_null_checks)] #![allow(for_loops_over_fallibles)] #![allow(forgetting_copy_types)] @@ -62,6 +62,7 @@ #![allow(unknown_lints)] #![allow(unused_labels)] #![allow(ambiguous_wide_pointer_comparisons)] +#![allow(clippy::reversed_empty_ranges)] #![warn(clippy::almost_complete_range)] //~ ERROR: lint `clippy::almost_complete_letter_range` #![warn(clippy::disallowed_names)] //~ ERROR: lint `clippy::blacklisted_name` #![warn(clippy::blocks_in_conditions)] //~ ERROR: lint `clippy::block_in_if_condition_expr` @@ -74,8 +75,9 @@ #![warn(clippy::disallowed_methods)] //~ ERROR: lint `clippy::disallowed_method` #![warn(clippy::disallowed_types)] //~ ERROR: lint `clippy::disallowed_type` #![warn(clippy::mixed_read_write_in_expression)] //~ ERROR: lint `clippy::eval_order_dependence` -#![warn(clippy::manual_filter_map)] //~ ERROR: lint `clippy::filter_map` #![warn(clippy::manual_find_map)] //~ ERROR: lint `clippy::find_map` +#![warn(clippy::manual_filter_map)] //~ ERROR: lint `clippy::filter_map` +#![warn(unpredictable_function_pointer_comparisons)] //~ ERROR: lint `clippy::fn_address_comparisons` #![warn(clippy::useless_conversion)] //~ ERROR: lint `clippy::identity_conversion` #![warn(clippy::redundant_pattern_matching)] //~ ERROR: lint `clippy::if_let_redundant_pattern_matching` #![warn(clippy::match_result_ok)] //~ ERROR: lint `clippy::if_let_some_result` @@ -94,7 +96,6 @@ #![warn(clippy::expect_used)] //~ ERROR: lint `clippy::result_expect_used` #![warn(clippy::map_unwrap_or)] //~ ERROR: lint `clippy::result_map_unwrap_or_else` #![warn(clippy::unwrap_used)] //~ ERROR: lint `clippy::result_unwrap_used` -#![warn(clippy::reversed_empty_ranges)] //~ ERROR: lint `clippy::reverse_range_loop` #![warn(clippy::single_char_add_str)] //~ ERROR: lint `clippy::single_char_push_str` #![warn(clippy::module_name_repetitions)] //~ ERROR: lint `clippy::stutter` #![warn(clippy::missing_const_for_thread_local)] //~ ERROR: lint `clippy::thread_local_initializer_can_be_made_const` @@ -104,11 +105,11 @@ #![warn(invalid_reference_casting)] //~ ERROR: lint `clippy::cast_ref_to_mut` #![warn(suspicious_double_ref_op)] //~ ERROR: lint `clippy::clone_double_ref` #![warn(invalid_nan_comparisons)] //~ ERROR: lint `clippy::cmp_nan` +#![warn(invalid_null_arguments)] //~ ERROR: lint `clippy::invalid_null_ptr_usage` #![warn(double_negations)] //~ ERROR: lint `clippy::double_neg` #![warn(drop_bounds)] //~ ERROR: lint `clippy::drop_bounds` #![warn(dropping_copy_types)] //~ ERROR: lint `clippy::drop_copy` #![warn(dropping_references)] //~ ERROR: lint `clippy::drop_ref` -#![warn(unpredictable_function_pointer_comparisons)] //~ ERROR: lint `clippy::fn_address_comparisons` #![warn(useless_ptr_null_checks)] //~ ERROR: lint `clippy::fn_null_check` #![warn(for_loops_over_fallibles)] //~ ERROR: lint `clippy::for_loop_over_option` #![warn(for_loops_over_fallibles)] //~ ERROR: lint `clippy::for_loop_over_result` @@ -119,7 +120,6 @@ #![warn(invalid_atomic_ordering)] //~ ERROR: lint `clippy::invalid_atomic_ordering` #![warn(invalid_value)] //~ ERROR: lint `clippy::invalid_ref` #![warn(invalid_from_utf8_unchecked)] //~ ERROR: lint `clippy::invalid_utf8_in_unchecked` -#![warn(invalid_null_arguments)] //~ ERROR: lint `clippy::invalid_null_ptr_usage` #![warn(let_underscore_drop)] //~ ERROR: lint `clippy::let_underscore_drop` #![warn(unexpected_cfgs)] //~ ERROR: lint `clippy::maybe_misused_cfg` #![warn(enum_intrinsics_non_enums)] //~ ERROR: lint `clippy::mem_discriminant_non_enum` @@ -131,5 +131,6 @@ #![warn(unknown_lints)] //~ ERROR: lint `clippy::unknown_clippy_lints` #![warn(unused_labels)] //~ ERROR: lint `clippy::unused_label` #![warn(ambiguous_wide_pointer_comparisons)] //~ ERROR: lint `clippy::vtable_address_comparisons` +#![warn(clippy::reversed_empty_ranges)] //~ ERROR: lint `clippy::reverse_range_loop` fn main() {} diff --git a/src/tools/clippy/tests/ui/rename.rs b/src/tools/clippy/tests/ui/rename.rs index aa7b905b4b818..32641a684a44b 100644 --- a/src/tools/clippy/tests/ui/rename.rs +++ b/src/tools/clippy/tests/ui/rename.rs @@ -13,8 +13,9 @@ #![allow(clippy::disallowed_methods)] #![allow(clippy::disallowed_types)] #![allow(clippy::mixed_read_write_in_expression)] -#![allow(clippy::manual_filter_map)] #![allow(clippy::manual_find_map)] +#![allow(clippy::manual_filter_map)] +#![allow(unpredictable_function_pointer_comparisons)] #![allow(clippy::useless_conversion)] #![allow(clippy::redundant_pattern_matching)] #![allow(clippy::match_result_ok)] @@ -29,7 +30,6 @@ #![allow(clippy::unwrap_used)] #![allow(clippy::panicking_overflow_checks)] #![allow(clippy::needless_borrow)] -#![allow(clippy::reversed_empty_ranges)] #![allow(clippy::single_char_add_str)] #![allow(clippy::module_name_repetitions)] #![allow(clippy::missing_const_for_thread_local)] @@ -39,11 +39,11 @@ #![allow(invalid_reference_casting)] #![allow(suspicious_double_ref_op)] #![allow(invalid_nan_comparisons)] +#![allow(invalid_null_arguments)] #![allow(double_negations)] #![allow(drop_bounds)] #![allow(dropping_copy_types)] #![allow(dropping_references)] -#![allow(unpredictable_function_pointer_comparisons)] #![allow(useless_ptr_null_checks)] #![allow(for_loops_over_fallibles)] #![allow(forgetting_copy_types)] @@ -62,6 +62,7 @@ #![allow(unknown_lints)] #![allow(unused_labels)] #![allow(ambiguous_wide_pointer_comparisons)] +#![allow(clippy::reversed_empty_ranges)] #![warn(clippy::almost_complete_letter_range)] //~ ERROR: lint `clippy::almost_complete_letter_range` #![warn(clippy::blacklisted_name)] //~ ERROR: lint `clippy::blacklisted_name` #![warn(clippy::block_in_if_condition_expr)] //~ ERROR: lint `clippy::block_in_if_condition_expr` @@ -74,8 +75,9 @@ #![warn(clippy::disallowed_method)] //~ ERROR: lint `clippy::disallowed_method` #![warn(clippy::disallowed_type)] //~ ERROR: lint `clippy::disallowed_type` #![warn(clippy::eval_order_dependence)] //~ ERROR: lint `clippy::eval_order_dependence` -#![warn(clippy::filter_map)] //~ ERROR: lint `clippy::filter_map` #![warn(clippy::find_map)] //~ ERROR: lint `clippy::find_map` +#![warn(clippy::filter_map)] //~ ERROR: lint `clippy::filter_map` +#![warn(clippy::fn_address_comparisons)] //~ ERROR: lint `clippy::fn_address_comparisons` #![warn(clippy::identity_conversion)] //~ ERROR: lint `clippy::identity_conversion` #![warn(clippy::if_let_redundant_pattern_matching)] //~ ERROR: lint `clippy::if_let_redundant_pattern_matching` #![warn(clippy::if_let_some_result)] //~ ERROR: lint `clippy::if_let_some_result` @@ -94,7 +96,6 @@ #![warn(clippy::result_expect_used)] //~ ERROR: lint `clippy::result_expect_used` #![warn(clippy::result_map_unwrap_or_else)] //~ ERROR: lint `clippy::result_map_unwrap_or_else` #![warn(clippy::result_unwrap_used)] //~ ERROR: lint `clippy::result_unwrap_used` -#![warn(clippy::reverse_range_loop)] //~ ERROR: lint `clippy::reverse_range_loop` #![warn(clippy::single_char_push_str)] //~ ERROR: lint `clippy::single_char_push_str` #![warn(clippy::stutter)] //~ ERROR: lint `clippy::stutter` #![warn(clippy::thread_local_initializer_can_be_made_const)] //~ ERROR: lint `clippy::thread_local_initializer_can_be_made_const` @@ -104,11 +105,11 @@ #![warn(clippy::cast_ref_to_mut)] //~ ERROR: lint `clippy::cast_ref_to_mut` #![warn(clippy::clone_double_ref)] //~ ERROR: lint `clippy::clone_double_ref` #![warn(clippy::cmp_nan)] //~ ERROR: lint `clippy::cmp_nan` +#![warn(clippy::invalid_null_ptr_usage)] //~ ERROR: lint `clippy::invalid_null_ptr_usage` #![warn(clippy::double_neg)] //~ ERROR: lint `clippy::double_neg` #![warn(clippy::drop_bounds)] //~ ERROR: lint `clippy::drop_bounds` #![warn(clippy::drop_copy)] //~ ERROR: lint `clippy::drop_copy` #![warn(clippy::drop_ref)] //~ ERROR: lint `clippy::drop_ref` -#![warn(clippy::fn_address_comparisons)] //~ ERROR: lint `clippy::fn_address_comparisons` #![warn(clippy::fn_null_check)] //~ ERROR: lint `clippy::fn_null_check` #![warn(clippy::for_loop_over_option)] //~ ERROR: lint `clippy::for_loop_over_option` #![warn(clippy::for_loop_over_result)] //~ ERROR: lint `clippy::for_loop_over_result` @@ -119,7 +120,6 @@ #![warn(clippy::invalid_atomic_ordering)] //~ ERROR: lint `clippy::invalid_atomic_ordering` #![warn(clippy::invalid_ref)] //~ ERROR: lint `clippy::invalid_ref` #![warn(clippy::invalid_utf8_in_unchecked)] //~ ERROR: lint `clippy::invalid_utf8_in_unchecked` -#![warn(clippy::invalid_null_ptr_usage)] //~ ERROR: lint `clippy::invalid_null_ptr_usage` #![warn(clippy::let_underscore_drop)] //~ ERROR: lint `clippy::let_underscore_drop` #![warn(clippy::maybe_misused_cfg)] //~ ERROR: lint `clippy::maybe_misused_cfg` #![warn(clippy::mem_discriminant_non_enum)] //~ ERROR: lint `clippy::mem_discriminant_non_enum` @@ -131,5 +131,6 @@ #![warn(clippy::unknown_clippy_lints)] //~ ERROR: lint `clippy::unknown_clippy_lints` #![warn(clippy::unused_label)] //~ ERROR: lint `clippy::unused_label` #![warn(clippy::vtable_address_comparisons)] //~ ERROR: lint `clippy::vtable_address_comparisons` +#![warn(clippy::reverse_range_loop)] //~ ERROR: lint `clippy::reverse_range_loop` fn main() {} diff --git a/src/tools/clippy/tests/ui/rename.stderr b/src/tools/clippy/tests/ui/rename.stderr index b3c88167c1115..e9d2debff91a3 100644 --- a/src/tools/clippy/tests/ui/rename.stderr +++ b/src/tools/clippy/tests/ui/rename.stderr @@ -1,5 +1,5 @@ error: lint `clippy::almost_complete_letter_range` has been renamed to `clippy::almost_complete_range` - --> tests/ui/rename.rs:65:9 + --> tests/ui/rename.rs:66:9 | LL | #![warn(clippy::almost_complete_letter_range)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::almost_complete_range` @@ -8,347 +8,341 @@ LL | #![warn(clippy::almost_complete_letter_range)] = help: to override `-D warnings` add `#[allow(renamed_and_removed_lints)]` error: lint `clippy::blacklisted_name` has been renamed to `clippy::disallowed_names` - --> tests/ui/rename.rs:66:9 + --> tests/ui/rename.rs:67:9 | LL | #![warn(clippy::blacklisted_name)] | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_names` error: lint `clippy::block_in_if_condition_expr` has been renamed to `clippy::blocks_in_conditions` - --> tests/ui/rename.rs:67:9 + --> tests/ui/rename.rs:68:9 | LL | #![warn(clippy::block_in_if_condition_expr)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_conditions` error: lint `clippy::block_in_if_condition_stmt` has been renamed to `clippy::blocks_in_conditions` - --> tests/ui/rename.rs:68:9 + --> tests/ui/rename.rs:69:9 | LL | #![warn(clippy::block_in_if_condition_stmt)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_conditions` error: lint `clippy::blocks_in_if_conditions` has been renamed to `clippy::blocks_in_conditions` - --> tests/ui/rename.rs:69:9 + --> tests/ui/rename.rs:70:9 | LL | #![warn(clippy::blocks_in_if_conditions)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::blocks_in_conditions` error: lint `clippy::box_vec` has been renamed to `clippy::box_collection` - --> tests/ui/rename.rs:70:9 + --> tests/ui/rename.rs:71:9 | LL | #![warn(clippy::box_vec)] | ^^^^^^^^^^^^^^^ help: use the new name: `clippy::box_collection` error: lint `clippy::const_static_lifetime` has been renamed to `clippy::redundant_static_lifetimes` - --> tests/ui/rename.rs:71:9 + --> tests/ui/rename.rs:72:9 | LL | #![warn(clippy::const_static_lifetime)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::redundant_static_lifetimes` error: lint `clippy::cyclomatic_complexity` has been renamed to `clippy::cognitive_complexity` - --> tests/ui/rename.rs:72:9 + --> tests/ui/rename.rs:73:9 | LL | #![warn(clippy::cyclomatic_complexity)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::cognitive_complexity` error: lint `clippy::derive_hash_xor_eq` has been renamed to `clippy::derived_hash_with_manual_eq` - --> tests/ui/rename.rs:73:9 + --> tests/ui/rename.rs:74:9 | LL | #![warn(clippy::derive_hash_xor_eq)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::derived_hash_with_manual_eq` error: lint `clippy::disallowed_method` has been renamed to `clippy::disallowed_methods` - --> tests/ui/rename.rs:74:9 + --> tests/ui/rename.rs:75:9 | LL | #![warn(clippy::disallowed_method)] | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_methods` error: lint `clippy::disallowed_type` has been renamed to `clippy::disallowed_types` - --> tests/ui/rename.rs:75:9 + --> tests/ui/rename.rs:76:9 | LL | #![warn(clippy::disallowed_type)] | ^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::disallowed_types` error: lint `clippy::eval_order_dependence` has been renamed to `clippy::mixed_read_write_in_expression` - --> tests/ui/rename.rs:76:9 + --> tests/ui/rename.rs:77:9 | LL | #![warn(clippy::eval_order_dependence)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::mixed_read_write_in_expression` +error: lint `clippy::find_map` has been renamed to `clippy::manual_find_map` + --> tests/ui/rename.rs:78:9 + | +LL | #![warn(clippy::find_map)] + | ^^^^^^^^^^^^^^^^ help: use the new name: `clippy::manual_find_map` + error: lint `clippy::filter_map` has been renamed to `clippy::manual_filter_map` - --> tests/ui/rename.rs:77:9 + --> tests/ui/rename.rs:79:9 | LL | #![warn(clippy::filter_map)] | ^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::manual_filter_map` -error: lint `clippy::find_map` has been renamed to `clippy::manual_find_map` - --> tests/ui/rename.rs:78:9 +error: lint `clippy::fn_address_comparisons` has been renamed to `unpredictable_function_pointer_comparisons` + --> tests/ui/rename.rs:80:9 | -LL | #![warn(clippy::find_map)] - | ^^^^^^^^^^^^^^^^ help: use the new name: `clippy::manual_find_map` +LL | #![warn(clippy::fn_address_comparisons)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unpredictable_function_pointer_comparisons` error: lint `clippy::identity_conversion` has been renamed to `clippy::useless_conversion` - --> tests/ui/rename.rs:79:9 + --> tests/ui/rename.rs:81:9 | LL | #![warn(clippy::identity_conversion)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::useless_conversion` error: lint `clippy::if_let_redundant_pattern_matching` has been renamed to `clippy::redundant_pattern_matching` - --> tests/ui/rename.rs:80:9 + --> tests/ui/rename.rs:82:9 | LL | #![warn(clippy::if_let_redundant_pattern_matching)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::redundant_pattern_matching` error: lint `clippy::if_let_some_result` has been renamed to `clippy::match_result_ok` - --> tests/ui/rename.rs:81:9 + --> tests/ui/rename.rs:83:9 | LL | #![warn(clippy::if_let_some_result)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::match_result_ok` error: lint `clippy::incorrect_clone_impl_on_copy_type` has been renamed to `clippy::non_canonical_clone_impl` - --> tests/ui/rename.rs:82:9 + --> tests/ui/rename.rs:84:9 | LL | #![warn(clippy::incorrect_clone_impl_on_copy_type)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::non_canonical_clone_impl` error: lint `clippy::incorrect_partial_ord_impl_on_ord_type` has been renamed to `clippy::non_canonical_partial_ord_impl` - --> tests/ui/rename.rs:83:9 + --> tests/ui/rename.rs:85:9 | LL | #![warn(clippy::incorrect_partial_ord_impl_on_ord_type)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::non_canonical_partial_ord_impl` error: lint `clippy::integer_arithmetic` has been renamed to `clippy::arithmetic_side_effects` - --> tests/ui/rename.rs:84:9 + --> tests/ui/rename.rs:86:9 | LL | #![warn(clippy::integer_arithmetic)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::arithmetic_side_effects` error: lint `clippy::logic_bug` has been renamed to `clippy::overly_complex_bool_expr` - --> tests/ui/rename.rs:85:9 + --> tests/ui/rename.rs:87:9 | LL | #![warn(clippy::logic_bug)] | ^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::overly_complex_bool_expr` error: lint `clippy::new_without_default_derive` has been renamed to `clippy::new_without_default` - --> tests/ui/rename.rs:86:9 + --> tests/ui/rename.rs:88:9 | LL | #![warn(clippy::new_without_default_derive)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::new_without_default` error: lint `clippy::option_and_then_some` has been renamed to `clippy::bind_instead_of_map` - --> tests/ui/rename.rs:87:9 + --> tests/ui/rename.rs:89:9 | LL | #![warn(clippy::option_and_then_some)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::bind_instead_of_map` error: lint `clippy::option_expect_used` has been renamed to `clippy::expect_used` - --> tests/ui/rename.rs:88:9 + --> tests/ui/rename.rs:90:9 | LL | #![warn(clippy::option_expect_used)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::expect_used` error: lint `clippy::option_map_unwrap_or` has been renamed to `clippy::map_unwrap_or` - --> tests/ui/rename.rs:89:9 + --> tests/ui/rename.rs:91:9 | LL | #![warn(clippy::option_map_unwrap_or)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or` error: lint `clippy::option_map_unwrap_or_else` has been renamed to `clippy::map_unwrap_or` - --> tests/ui/rename.rs:90:9 + --> tests/ui/rename.rs:92:9 | LL | #![warn(clippy::option_map_unwrap_or_else)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or` error: lint `clippy::option_unwrap_used` has been renamed to `clippy::unwrap_used` - --> tests/ui/rename.rs:91:9 + --> tests/ui/rename.rs:93:9 | LL | #![warn(clippy::option_unwrap_used)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_used` error: lint `clippy::overflow_check_conditional` has been renamed to `clippy::panicking_overflow_checks` - --> tests/ui/rename.rs:92:9 + --> tests/ui/rename.rs:94:9 | LL | #![warn(clippy::overflow_check_conditional)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::panicking_overflow_checks` error: lint `clippy::ref_in_deref` has been renamed to `clippy::needless_borrow` - --> tests/ui/rename.rs:93:9 + --> tests/ui/rename.rs:95:9 | LL | #![warn(clippy::ref_in_deref)] | ^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::needless_borrow` error: lint `clippy::result_expect_used` has been renamed to `clippy::expect_used` - --> tests/ui/rename.rs:94:9 + --> tests/ui/rename.rs:96:9 | LL | #![warn(clippy::result_expect_used)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::expect_used` error: lint `clippy::result_map_unwrap_or_else` has been renamed to `clippy::map_unwrap_or` - --> tests/ui/rename.rs:95:9 + --> tests/ui/rename.rs:97:9 | LL | #![warn(clippy::result_map_unwrap_or_else)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::map_unwrap_or` error: lint `clippy::result_unwrap_used` has been renamed to `clippy::unwrap_used` - --> tests/ui/rename.rs:96:9 + --> tests/ui/rename.rs:98:9 | LL | #![warn(clippy::result_unwrap_used)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_used` -error: lint `clippy::reverse_range_loop` has been renamed to `clippy::reversed_empty_ranges` - --> tests/ui/rename.rs:97:9 - | -LL | #![warn(clippy::reverse_range_loop)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::reversed_empty_ranges` - error: lint `clippy::single_char_push_str` has been renamed to `clippy::single_char_add_str` - --> tests/ui/rename.rs:98:9 + --> tests/ui/rename.rs:99:9 | LL | #![warn(clippy::single_char_push_str)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::single_char_add_str` error: lint `clippy::stutter` has been renamed to `clippy::module_name_repetitions` - --> tests/ui/rename.rs:99:9 + --> tests/ui/rename.rs:100:9 | LL | #![warn(clippy::stutter)] | ^^^^^^^^^^^^^^^ help: use the new name: `clippy::module_name_repetitions` error: lint `clippy::thread_local_initializer_can_be_made_const` has been renamed to `clippy::missing_const_for_thread_local` - --> tests/ui/rename.rs:100:9 + --> tests/ui/rename.rs:101:9 | LL | #![warn(clippy::thread_local_initializer_can_be_made_const)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::missing_const_for_thread_local` error: lint `clippy::to_string_in_display` has been renamed to `clippy::recursive_format_impl` - --> tests/ui/rename.rs:101:9 + --> tests/ui/rename.rs:102:9 | LL | #![warn(clippy::to_string_in_display)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::recursive_format_impl` error: lint `clippy::unwrap_or_else_default` has been renamed to `clippy::unwrap_or_default` - --> tests/ui/rename.rs:102:9 + --> tests/ui/rename.rs:103:9 | LL | #![warn(clippy::unwrap_or_else_default)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::unwrap_or_default` error: lint `clippy::zero_width_space` has been renamed to `clippy::invisible_characters` - --> tests/ui/rename.rs:103:9 + --> tests/ui/rename.rs:104:9 | LL | #![warn(clippy::zero_width_space)] | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::invisible_characters` error: lint `clippy::cast_ref_to_mut` has been renamed to `invalid_reference_casting` - --> tests/ui/rename.rs:104:9 + --> tests/ui/rename.rs:105:9 | LL | #![warn(clippy::cast_ref_to_mut)] | ^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_reference_casting` error: lint `clippy::clone_double_ref` has been renamed to `suspicious_double_ref_op` - --> tests/ui/rename.rs:105:9 + --> tests/ui/rename.rs:106:9 | LL | #![warn(clippy::clone_double_ref)] | ^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `suspicious_double_ref_op` error: lint `clippy::cmp_nan` has been renamed to `invalid_nan_comparisons` - --> tests/ui/rename.rs:106:9 + --> tests/ui/rename.rs:107:9 | LL | #![warn(clippy::cmp_nan)] | ^^^^^^^^^^^^^^^ help: use the new name: `invalid_nan_comparisons` +error: lint `clippy::invalid_null_ptr_usage` has been renamed to `invalid_null_arguments` + --> tests/ui/rename.rs:108:9 + | +LL | #![warn(clippy::invalid_null_ptr_usage)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_null_arguments` + error: lint `clippy::double_neg` has been renamed to `double_negations` - --> tests/ui/rename.rs:107:9 + --> tests/ui/rename.rs:109:9 | LL | #![warn(clippy::double_neg)] | ^^^^^^^^^^^^^^^^^^ help: use the new name: `double_negations` error: lint `clippy::drop_bounds` has been renamed to `drop_bounds` - --> tests/ui/rename.rs:108:9 + --> tests/ui/rename.rs:110:9 | LL | #![warn(clippy::drop_bounds)] | ^^^^^^^^^^^^^^^^^^^ help: use the new name: `drop_bounds` error: lint `clippy::drop_copy` has been renamed to `dropping_copy_types` - --> tests/ui/rename.rs:109:9 + --> tests/ui/rename.rs:111:9 | LL | #![warn(clippy::drop_copy)] | ^^^^^^^^^^^^^^^^^ help: use the new name: `dropping_copy_types` error: lint `clippy::drop_ref` has been renamed to `dropping_references` - --> tests/ui/rename.rs:110:9 + --> tests/ui/rename.rs:112:9 | LL | #![warn(clippy::drop_ref)] | ^^^^^^^^^^^^^^^^ help: use the new name: `dropping_references` -error: lint `clippy::fn_address_comparisons` has been renamed to `unpredictable_function_pointer_comparisons` - --> tests/ui/rename.rs:111:9 - | -LL | #![warn(clippy::fn_address_comparisons)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `unpredictable_function_pointer_comparisons` - error: lint `clippy::fn_null_check` has been renamed to `useless_ptr_null_checks` - --> tests/ui/rename.rs:112:9 + --> tests/ui/rename.rs:113:9 | LL | #![warn(clippy::fn_null_check)] | ^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `useless_ptr_null_checks` error: lint `clippy::for_loop_over_option` has been renamed to `for_loops_over_fallibles` - --> tests/ui/rename.rs:113:9 + --> tests/ui/rename.rs:114:9 | LL | #![warn(clippy::for_loop_over_option)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles` error: lint `clippy::for_loop_over_result` has been renamed to `for_loops_over_fallibles` - --> tests/ui/rename.rs:114:9 + --> tests/ui/rename.rs:115:9 | LL | #![warn(clippy::for_loop_over_result)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles` error: lint `clippy::for_loops_over_fallibles` has been renamed to `for_loops_over_fallibles` - --> tests/ui/rename.rs:115:9 + --> tests/ui/rename.rs:116:9 | LL | #![warn(clippy::for_loops_over_fallibles)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `for_loops_over_fallibles` error: lint `clippy::forget_copy` has been renamed to `forgetting_copy_types` - --> tests/ui/rename.rs:116:9 + --> tests/ui/rename.rs:117:9 | LL | #![warn(clippy::forget_copy)] | ^^^^^^^^^^^^^^^^^^^ help: use the new name: `forgetting_copy_types` error: lint `clippy::forget_ref` has been renamed to `forgetting_references` - --> tests/ui/rename.rs:117:9 + --> tests/ui/rename.rs:118:9 | LL | #![warn(clippy::forget_ref)] | ^^^^^^^^^^^^^^^^^^ help: use the new name: `forgetting_references` error: lint `clippy::into_iter_on_array` has been renamed to `array_into_iter` - --> tests/ui/rename.rs:118:9 + --> tests/ui/rename.rs:119:9 | LL | #![warn(clippy::into_iter_on_array)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `array_into_iter` error: lint `clippy::invalid_atomic_ordering` has been renamed to `invalid_atomic_ordering` - --> tests/ui/rename.rs:119:9 + --> tests/ui/rename.rs:120:9 | LL | #![warn(clippy::invalid_atomic_ordering)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_atomic_ordering` error: lint `clippy::invalid_ref` has been renamed to `invalid_value` - --> tests/ui/rename.rs:120:9 + --> tests/ui/rename.rs:121:9 | LL | #![warn(clippy::invalid_ref)] | ^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_value` error: lint `clippy::invalid_utf8_in_unchecked` has been renamed to `invalid_from_utf8_unchecked` - --> tests/ui/rename.rs:121:9 + --> tests/ui/rename.rs:122:9 | LL | #![warn(clippy::invalid_utf8_in_unchecked)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_from_utf8_unchecked` -error: lint `clippy::invalid_null_ptr_usage` has been renamed to `invalid_null_arguments` - --> tests/ui/rename.rs:122:9 - | -LL | #![warn(clippy::invalid_null_ptr_usage)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `invalid_null_arguments` - error: lint `clippy::let_underscore_drop` has been renamed to `let_underscore_drop` --> tests/ui/rename.rs:123:9 | @@ -415,5 +409,11 @@ error: lint `clippy::vtable_address_comparisons` has been renamed to `ambiguous_ LL | #![warn(clippy::vtable_address_comparisons)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `ambiguous_wide_pointer_comparisons` +error: lint `clippy::reverse_range_loop` has been renamed to `clippy::reversed_empty_ranges` + --> tests/ui/rename.rs:134:9 + | +LL | #![warn(clippy::reverse_range_loop)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the new name: `clippy::reversed_empty_ranges` + error: aborting due to 69 previous errors diff --git a/src/tools/clippy/tests/ui/repr_packed_without_abi.stderr b/src/tools/clippy/tests/ui/repr_packed_without_abi.stderr index d1078b3e8e48e..f688e4bc744ae 100644 --- a/src/tools/clippy/tests/ui/repr_packed_without_abi.stderr +++ b/src/tools/clippy/tests/ui/repr_packed_without_abi.stderr @@ -11,7 +11,7 @@ LL | | } | |_^ | = warning: unqualified `#[repr(packed)]` defaults to `#[repr(Rust, packed)]`, which has no stable ABI - = help: qualify the desired ABI explicity via `#[repr(C, packed)]` or `#[repr(Rust, packed)]` + = help: qualify the desired ABI explicitly via `#[repr(C, packed)]` or `#[repr(Rust, packed)]` note: the lint level is defined here --> tests/ui/repr_packed_without_abi.rs:1:9 | @@ -31,7 +31,7 @@ LL | | } | |_^ | = warning: unqualified `#[repr(packed)]` defaults to `#[repr(Rust, packed)]`, which has no stable ABI - = help: qualify the desired ABI explicity via `#[repr(C, packed)]` or `#[repr(Rust, packed)]` + = help: qualify the desired ABI explicitly via `#[repr(C, packed)]` or `#[repr(Rust, packed)]` error: aborting due to 2 previous errors diff --git a/src/tools/clippy/tests/ui/result_unit_error_no_std.rs b/src/tools/clippy/tests/ui/result_unit_error_no_std.rs index 8a1849b8490ab..a64e8414d78fe 100644 --- a/src/tools/clippy/tests/ui/result_unit_error_no_std.rs +++ b/src/tools/clippy/tests/ui/result_unit_error_no_std.rs @@ -14,7 +14,7 @@ pub fn returns_unit_error_lint() -> Result { Err(()) } -#[no_mangle] +#[unsafe(no_mangle)] extern "C" fn main(_argc: core::ffi::c_int, _argv: *const *const u8) -> core::ffi::c_int { 0 } diff --git a/src/tools/clippy/tests/ui/return_and_then.stderr b/src/tools/clippy/tests/ui/return_and_then.stderr index cc611c3dba679..a7acbe7b3401c 100644 --- a/src/tools/clippy/tests/ui/return_and_then.stderr +++ b/src/tools/clippy/tests/ui/return_and_then.stderr @@ -1,4 +1,4 @@ -error: use the question mark operator instead of an `and_then` call +error: use the `?` operator instead of an `and_then` call --> tests/ui/return_and_then.rs:5:9 | LL | / opt.and_then(|n| { @@ -20,7 +20,7 @@ LL + ret += n; LL + if n > 1 { Some(ret) } else { None } | -error: use the question mark operator instead of an `and_then` call +error: use the `?` operator instead of an `and_then` call --> tests/ui/return_and_then.rs:14:9 | LL | opt.and_then(|n| test_opt_block(Some(n))) @@ -32,7 +32,7 @@ LL ~ let n = opt?; LL + test_opt_block(Some(n)) | -error: use the question mark operator instead of an `and_then` call +error: use the `?` operator instead of an `and_then` call --> tests/ui/return_and_then.rs:19:9 | LL | gen_option(1).and_then(|n| test_opt_block(Some(n))) @@ -44,7 +44,7 @@ LL ~ let n = gen_option(1)?; LL + test_opt_block(Some(n)) | -error: use the question mark operator instead of an `and_then` call +error: use the `?` operator instead of an `and_then` call --> tests/ui/return_and_then.rs:24:9 | LL | opt.and_then(|n| if n > 1 { Ok(n + 1) } else { Err(n) }) @@ -56,7 +56,7 @@ LL ~ let n = opt?; LL + if n > 1 { Ok(n + 1) } else { Err(n) } | -error: use the question mark operator instead of an `and_then` call +error: use the `?` operator instead of an `and_then` call --> tests/ui/return_and_then.rs:29:9 | LL | opt.and_then(|n| test_res_block(Ok(n))) @@ -68,7 +68,7 @@ LL ~ let n = opt?; LL + test_res_block(Ok(n)) | -error: use the question mark operator instead of an `and_then` call +error: use the `?` operator instead of an `and_then` call --> tests/ui/return_and_then.rs:35:9 | LL | Some("").and_then(|x| if x.len() > 2 { Some(3) } else { None }) @@ -80,7 +80,7 @@ LL ~ let x = Some("")?; LL + if x.len() > 2 { Some(3) } else { None } | -error: use the question mark operator instead of an `and_then` call +error: use the `?` operator instead of an `and_then` call --> tests/ui/return_and_then.rs:41:9 | LL | / Some(match (vec![1, 2, 3], vec![1, 2, 4]) { diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_none.fixed b/src/tools/clippy/tests/ui/search_is_some_fixable_none.fixed index 847e5140d3e65..cc4dbc919d81d 100644 --- a/src/tools/clippy/tests/ui/search_is_some_fixable_none.fixed +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_none.fixed @@ -214,10 +214,9 @@ mod issue7392 { } fn ref_bindings() { - let _ = ![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y); - //~^ search_is_some - let _ = ![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y); - //~^ search_is_some + let _ = ![&(&1, 2), &(&3, 4), &(&5, 4)] + //~^ search_is_some + .iter().any(|&&(&x, ref y)| x == *y); } fn test_string_1(s: &str) -> bool { diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_none.rs b/src/tools/clippy/tests/ui/search_is_some_fixable_none.rs index e976d12600cc1..fa31a9ddedc66 100644 --- a/src/tools/clippy/tests/ui/search_is_some_fixable_none.rs +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_none.rs @@ -221,10 +221,11 @@ mod issue7392 { } fn ref_bindings() { - let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_none(); - //~^ search_is_some - let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_none(); - //~^ search_is_some + let _ = [&(&1, 2), &(&3, 4), &(&5, 4)] + //~^ search_is_some + .iter() + .find(|&&&(&x, ref y)| x == *y) + .is_none(); } fn test_string_1(s: &str) -> bool { diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_none.stderr b/src/tools/clippy/tests/ui/search_is_some_fixable_none.stderr index ccc17025222d9..b079cf7ea361b 100644 --- a/src/tools/clippy/tests/ui/search_is_some_fixable_none.stderr +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_none.stderr @@ -248,116 +248,122 @@ LL | let _ = vfoo.iter().find(|v| v.by_ref(&v.bar)).is_none(); error: called `is_none()` after searching an `Iterator` with `find` --> tests/ui/search_is_some_fixable_none.rs:224:17 | -LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_none(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y)` - -error: called `is_none()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_none.rs:226:17 +LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)] + | _________________^ +LL | | +LL | | .iter() +LL | | .find(|&&&(&x, ref y)| x == *y) +LL | | .is_none(); + | |______________________^ + | +help: consider using + | +LL ~ let _ = ![&(&1, 2), &(&3, 4), &(&5, 4)] +LL + +LL ~ .iter().any(|&&(&x, ref y)| x == *y); | -LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_none(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y)` error: called `is_none()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_none.rs:246:17 + --> tests/ui/search_is_some_fixable_none.rs:247:17 | LL | let _ = v.iter().find(|s| s[0].is_empty()).is_none(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!v.iter().any(|s| s[0].is_empty())` error: called `is_none()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_none.rs:248:17 + --> tests/ui/search_is_some_fixable_none.rs:249:17 | LL | let _ = v.iter().find(|s| test_string_1(&s[0])).is_none(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!v.iter().any(|s| test_string_1(&s[0]))` error: called `is_none()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_none.rs:258:17 + --> tests/ui/search_is_some_fixable_none.rs:259:17 | LL | let _ = v.iter().find(|fp| fp.field.is_power_of_two()).is_none(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!v.iter().any(|fp| fp.field.is_power_of_two())` error: called `is_none()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_none.rs:260:17 + --> tests/ui/search_is_some_fixable_none.rs:261:17 | LL | let _ = v.iter().find(|fp| test_u32_1(fp.field)).is_none(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!v.iter().any(|fp| test_u32_1(fp.field))` error: called `is_none()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_none.rs:262:17 + --> tests/ui/search_is_some_fixable_none.rs:263:17 | LL | let _ = v.iter().find(|fp| test_u32_2(*fp.field)).is_none(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!v.iter().any(|fp| test_u32_2(*fp.field))` error: called `is_none()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_none.rs:279:17 + --> tests/ui/search_is_some_fixable_none.rs:280:17 | LL | let _ = v.iter().find(|x| **x == 42).is_none(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!v.iter().any(|x| *x == 42)` error: called `is_none()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_none.rs:281:17 + --> tests/ui/search_is_some_fixable_none.rs:282:17 | LL | Foo.bar(v.iter().find(|x| **x == 42).is_none()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!v.iter().any(|x| *x == 42)` error: called `is_none()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_none.rs:287:9 + --> tests/ui/search_is_some_fixable_none.rs:288:9 | LL | v.iter().find(|x| **x == 42).is_none().then(computations); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(!v.iter().any(|x| *x == 42))` error: called `is_none()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_none.rs:293:9 + --> tests/ui/search_is_some_fixable_none.rs:294:9 | LL | v.iter().find(|x| **x == 42).is_none().then_some(0); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(!v.iter().any(|x| *x == 42))` error: called `is_none()` after calling `find()` on a string - --> tests/ui/search_is_some_fixable_none.rs:299:17 + --> tests/ui/search_is_some_fixable_none.rs:300:17 | LL | let _ = s.find("world").is_none(); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!s.contains("world")` error: called `is_none()` after calling `find()` on a string - --> tests/ui/search_is_some_fixable_none.rs:301:17 + --> tests/ui/search_is_some_fixable_none.rs:302:17 | LL | Foo.bar(s.find("world").is_none()); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!s.contains("world")` error: called `is_none()` after calling `find()` on a string - --> tests/ui/search_is_some_fixable_none.rs:304:17 + --> tests/ui/search_is_some_fixable_none.rs:305:17 | LL | let _ = s.find("world").is_none(); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!s.contains("world")` error: called `is_none()` after calling `find()` on a string - --> tests/ui/search_is_some_fixable_none.rs:306:17 + --> tests/ui/search_is_some_fixable_none.rs:307:17 | LL | Foo.bar(s.find("world").is_none()); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `!s.contains("world")` error: called `is_none()` after calling `find()` on a string - --> tests/ui/search_is_some_fixable_none.rs:312:17 + --> tests/ui/search_is_some_fixable_none.rs:313:17 | LL | let _ = s.find("world").is_none().then(computations); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(!s.contains("world"))` error: called `is_none()` after calling `find()` on a string - --> tests/ui/search_is_some_fixable_none.rs:315:17 + --> tests/ui/search_is_some_fixable_none.rs:316:17 | LL | let _ = s.find("world").is_none().then(computations); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(!s.contains("world"))` error: called `is_none()` after calling `find()` on a string - --> tests/ui/search_is_some_fixable_none.rs:321:17 + --> tests/ui/search_is_some_fixable_none.rs:322:17 | LL | let _ = s.find("world").is_none().then_some(0); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(!s.contains("world"))` error: called `is_none()` after calling `find()` on a string - --> tests/ui/search_is_some_fixable_none.rs:324:17 + --> tests/ui/search_is_some_fixable_none.rs:325:17 | LL | let _ = s.find("world").is_none().then_some(0); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `(!s.contains("world"))` -error: aborting due to 55 previous errors +error: aborting due to 54 previous errors diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_none_2021.fixed b/src/tools/clippy/tests/ui/search_is_some_fixable_none_2021.fixed new file mode 100644 index 0000000000000..6e15244901c28 --- /dev/null +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_none_2021.fixed @@ -0,0 +1,14 @@ +//@edition: 2021 +#![warn(clippy::search_is_some)] + +fn main() { + fn ref_bindings() { + let _ = ![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y); + //~^ search_is_some + let _ = ![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y); + //~^ search_is_some + let _ = ![&(&1, 2), &(&3, 4), &(&5, 4)] + //~^ search_is_some + .iter().any(|&&(&x, ref y)| x == *y); + } +} diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_none_2021.rs b/src/tools/clippy/tests/ui/search_is_some_fixable_none_2021.rs new file mode 100644 index 0000000000000..4b1db3f9fc328 --- /dev/null +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_none_2021.rs @@ -0,0 +1,16 @@ +//@edition: 2021 +#![warn(clippy::search_is_some)] + +fn main() { + fn ref_bindings() { + let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_none(); + //~^ search_is_some + let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_none(); + //~^ search_is_some + let _ = [&(&1, 2), &(&3, 4), &(&5, 4)] + //~^ search_is_some + .iter() + .find(|&&&(&x, ref y)| x == *y) + .is_none(); + } +} diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_none_2021.stderr b/src/tools/clippy/tests/ui/search_is_some_fixable_none_2021.stderr new file mode 100644 index 0000000000000..af93be1a70719 --- /dev/null +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_none_2021.stderr @@ -0,0 +1,35 @@ +error: called `is_none()` after searching an `Iterator` with `find` + --> tests/ui/search_is_some_fixable_none_2021.rs:6:17 + | +LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_none(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y)` + | + = note: `-D clippy::search-is-some` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::search_is_some)]` + +error: called `is_none()` after searching an `Iterator` with `find` + --> tests/ui/search_is_some_fixable_none_2021.rs:8:17 + | +LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_none(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `![&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y)` + +error: called `is_none()` after searching an `Iterator` with `find` + --> tests/ui/search_is_some_fixable_none_2021.rs:10:17 + | +LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)] + | _________________^ +LL | | +LL | | .iter() +LL | | .find(|&&&(&x, ref y)| x == *y) +LL | | .is_none(); + | |______________________^ + | +help: consider using + | +LL ~ let _ = ![&(&1, 2), &(&3, 4), &(&5, 4)] +LL + +LL ~ .iter().any(|&&(&x, ref y)| x == *y); + | + +error: aborting due to 3 previous errors + diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_some.fixed b/src/tools/clippy/tests/ui/search_is_some_fixable_some.fixed index 05e88b8528f15..42b39b33b575c 100644 --- a/src/tools/clippy/tests/ui/search_is_some_fixable_some.fixed +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_some.fixed @@ -214,10 +214,9 @@ mod issue7392 { } fn ref_bindings() { - let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y); - //~^ search_is_some - let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y); - //~^ search_is_some + let _ = [&(&1, 2), &(&3, 4), &(&5, 4)] + .iter() + .any(|&&(&x, ref y)| x == *y); } fn test_string_1(s: &str) -> bool { diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_some.rs b/src/tools/clippy/tests/ui/search_is_some_fixable_some.rs index caab816f24361..ca4f4d941cb2f 100644 --- a/src/tools/clippy/tests/ui/search_is_some_fixable_some.rs +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_some.rs @@ -220,10 +220,11 @@ mod issue7392 { } fn ref_bindings() { - let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_some(); - //~^ search_is_some - let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_some(); - //~^ search_is_some + let _ = [&(&1, 2), &(&3, 4), &(&5, 4)] + .iter() + .find(|&&&(&x, ref y)| x == *y) + //~^ search_is_some + .is_some(); } fn test_string_1(s: &str) -> bool { diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_some.stderr b/src/tools/clippy/tests/ui/search_is_some_fixable_some.stderr index af719b78831a1..8291f48d43c4d 100644 --- a/src/tools/clippy/tests/ui/search_is_some_fixable_some.stderr +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_some.stderr @@ -227,70 +227,67 @@ LL | let _ = vfoo.iter().find(|v| v.by_ref(&v.bar)).is_some(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|v| v.by_ref(&v.bar))` error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:223:55 + --> tests/ui/search_is_some_fixable_some.rs:225:14 | -LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_some(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|(&x, y)| x == *y)` - -error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:225:55 - | -LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_some(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|(&x, y)| x == *y)` +LL | .find(|&&&(&x, ref y)| x == *y) + | ______________^ +LL | | +LL | | .is_some(); + | |______________________^ help: consider using: `any(|&&(&x, ref y)| x == *y)` error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:245:26 + --> tests/ui/search_is_some_fixable_some.rs:246:26 | LL | let _ = v.iter().find(|s| s[0].is_empty()).is_some(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|s| s[0].is_empty())` error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:247:26 + --> tests/ui/search_is_some_fixable_some.rs:248:26 | LL | let _ = v.iter().find(|s| test_string_1(&s[0])).is_some(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|s| test_string_1(&s[0]))` error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:257:26 + --> tests/ui/search_is_some_fixable_some.rs:258:26 | LL | let _ = v.iter().find(|fp| fp.field.is_power_of_two()).is_some(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|fp| fp.field.is_power_of_two())` error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:259:26 + --> tests/ui/search_is_some_fixable_some.rs:260:26 | LL | let _ = v.iter().find(|fp| test_u32_1(fp.field)).is_some(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|fp| test_u32_1(fp.field))` error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:261:26 + --> tests/ui/search_is_some_fixable_some.rs:262:26 | LL | let _ = v.iter().find(|fp| test_u32_2(*fp.field)).is_some(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|fp| test_u32_2(*fp.field))` error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:277:18 + --> tests/ui/search_is_some_fixable_some.rs:278:18 | LL | v.iter().find(|x: &&u32| func(x)).is_some() | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|x: &u32| func(&x))` error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:287:26 + --> tests/ui/search_is_some_fixable_some.rs:288:26 | LL | let _ = v.iter().find(|x: &&u32| arg_no_deref_impl(x)).is_some(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|x: &u32| arg_no_deref_impl(&x))` error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:291:26 + --> tests/ui/search_is_some_fixable_some.rs:292:26 | LL | let _ = v.iter().find(|x: &&u32| arg_no_deref_dyn(x)).is_some(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|x: &u32| arg_no_deref_dyn(&x))` error: called `is_some()` after searching an `Iterator` with `find` - --> tests/ui/search_is_some_fixable_some.rs:295:26 + --> tests/ui/search_is_some_fixable_some.rs:296:26 | LL | let _ = v.iter().find(|x: &&u32| (*arg_no_deref_dyn)(x)).is_some(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|x: &u32| (*arg_no_deref_dyn)(&x))` -error: aborting due to 47 previous errors +error: aborting due to 46 previous errors diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_some_2021.fixed b/src/tools/clippy/tests/ui/search_is_some_fixable_some_2021.fixed new file mode 100644 index 0000000000000..d2b05db562a0b --- /dev/null +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_some_2021.fixed @@ -0,0 +1,11 @@ +//@edition: 2021 +#![warn(clippy::search_is_some)] + +fn main() { + fn ref_bindings() { + let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y); + //~^ search_is_some + let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().any(|(&x, y)| x == *y); + //~^ search_is_some + } +} diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_some_2021.rs b/src/tools/clippy/tests/ui/search_is_some_fixable_some_2021.rs new file mode 100644 index 0000000000000..c3f5ef769dab7 --- /dev/null +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_some_2021.rs @@ -0,0 +1,11 @@ +//@edition: 2021 +#![warn(clippy::search_is_some)] + +fn main() { + fn ref_bindings() { + let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_some(); + //~^ search_is_some + let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_some(); + //~^ search_is_some + } +} diff --git a/src/tools/clippy/tests/ui/search_is_some_fixable_some_2021.stderr b/src/tools/clippy/tests/ui/search_is_some_fixable_some_2021.stderr new file mode 100644 index 0000000000000..91d9540e6fcf3 --- /dev/null +++ b/src/tools/clippy/tests/ui/search_is_some_fixable_some_2021.stderr @@ -0,0 +1,17 @@ +error: called `is_some()` after searching an `Iterator` with `find` + --> tests/ui/search_is_some_fixable_some_2021.rs:6:55 + | +LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|(&x, y)| x == *y).is_some(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|(&x, y)| x == *y)` + | + = note: `-D clippy::search-is-some` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::search_is_some)]` + +error: called `is_some()` after searching an `Iterator` with `find` + --> tests/ui/search_is_some_fixable_some_2021.rs:8:55 + | +LL | let _ = [&(&1, 2), &(&3, 4), &(&5, 4)].iter().find(|&(&x, y)| x == *y).is_some(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `any(|(&x, y)| x == *y)` + +error: aborting due to 2 previous errors + diff --git a/src/tools/clippy/tests/ui/shadow.rs b/src/tools/clippy/tests/ui/shadow.rs index 7d503a1cf6c17..05009b2ddd416 100644 --- a/src/tools/clippy/tests/ui/shadow.rs +++ b/src/tools/clippy/tests/ui/shadow.rs @@ -167,4 +167,19 @@ fn issue13795(value: Issue13795) { //~^ shadow_same } +fn issue14377() { + let a; + let b; + (a, b) = (0, 1); + + struct S { + c: i32, + d: i32, + } + + let c; + let d; + S { c, d } = S { c: 1, d: 2 }; +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/should_impl_trait/corner_cases.rs b/src/tools/clippy/tests/ui/should_impl_trait/corner_cases.rs index 4ec0f02d66451..53704f59cb999 100644 --- a/src/tools/clippy/tests/ui/should_impl_trait/corner_cases.rs +++ b/src/tools/clippy/tests/ui/should_impl_trait/corner_cases.rs @@ -1,6 +1,5 @@ //@ check-pass -#![warn(clippy::all, clippy::pedantic)] #![allow( clippy::missing_errors_doc, clippy::needless_pass_by_value, diff --git a/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.rs b/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.rs index 87b3a7d2fa0cf..e8de0e04c0c4c 100644 --- a/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.rs +++ b/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.rs @@ -1,4 +1,3 @@ -#![warn(clippy::all, clippy::pedantic)] #![allow( clippy::missing_errors_doc, clippy::needless_pass_by_value, diff --git a/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.stderr b/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.stderr index 8738b61192a3c..5609d6a21a360 100644 --- a/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.stderr +++ b/src/tools/clippy/tests/ui/should_impl_trait/method_list_1.stderr @@ -1,5 +1,5 @@ error: method `add` can be confused for the standard trait method `std::ops::Add::add` - --> tests/ui/should_impl_trait/method_list_1.rs:25:5 + --> tests/ui/should_impl_trait/method_list_1.rs:24:5 | LL | / pub fn add(self, other: T) -> T { LL | | @@ -13,7 +13,7 @@ LL | | } = help: to override `-D warnings` add `#[allow(clippy::should_implement_trait)]` error: method `as_mut` can be confused for the standard trait method `std::convert::AsMut::as_mut` - --> tests/ui/should_impl_trait/method_list_1.rs:31:5 + --> tests/ui/should_impl_trait/method_list_1.rs:30:5 | LL | / pub fn as_mut(&mut self) -> &mut T { LL | | @@ -25,7 +25,7 @@ LL | | } = help: consider implementing the trait `std::convert::AsMut` or choosing a less ambiguous method name error: method `as_ref` can be confused for the standard trait method `std::convert::AsRef::as_ref` - --> tests/ui/should_impl_trait/method_list_1.rs:37:5 + --> tests/ui/should_impl_trait/method_list_1.rs:36:5 | LL | / pub fn as_ref(&self) -> &T { LL | | @@ -37,7 +37,7 @@ LL | | } = help: consider implementing the trait `std::convert::AsRef` or choosing a less ambiguous method name error: method `bitand` can be confused for the standard trait method `std::ops::BitAnd::bitand` - --> tests/ui/should_impl_trait/method_list_1.rs:43:5 + --> tests/ui/should_impl_trait/method_list_1.rs:42:5 | LL | / pub fn bitand(self, rhs: T) -> T { LL | | @@ -49,7 +49,7 @@ LL | | } = help: consider implementing the trait `std::ops::BitAnd` or choosing a less ambiguous method name error: method `bitor` can be confused for the standard trait method `std::ops::BitOr::bitor` - --> tests/ui/should_impl_trait/method_list_1.rs:49:5 + --> tests/ui/should_impl_trait/method_list_1.rs:48:5 | LL | / pub fn bitor(self, rhs: Self) -> Self { LL | | @@ -61,7 +61,7 @@ LL | | } = help: consider implementing the trait `std::ops::BitOr` or choosing a less ambiguous method name error: method `bitxor` can be confused for the standard trait method `std::ops::BitXor::bitxor` - --> tests/ui/should_impl_trait/method_list_1.rs:55:5 + --> tests/ui/should_impl_trait/method_list_1.rs:54:5 | LL | / pub fn bitxor(self, rhs: Self) -> Self { LL | | @@ -73,7 +73,7 @@ LL | | } = help: consider implementing the trait `std::ops::BitXor` or choosing a less ambiguous method name error: method `borrow` can be confused for the standard trait method `std::borrow::Borrow::borrow` - --> tests/ui/should_impl_trait/method_list_1.rs:61:5 + --> tests/ui/should_impl_trait/method_list_1.rs:60:5 | LL | / pub fn borrow(&self) -> &str { LL | | @@ -85,7 +85,7 @@ LL | | } = help: consider implementing the trait `std::borrow::Borrow` or choosing a less ambiguous method name error: method `borrow_mut` can be confused for the standard trait method `std::borrow::BorrowMut::borrow_mut` - --> tests/ui/should_impl_trait/method_list_1.rs:67:5 + --> tests/ui/should_impl_trait/method_list_1.rs:66:5 | LL | / pub fn borrow_mut(&mut self) -> &mut str { LL | | @@ -97,7 +97,7 @@ LL | | } = help: consider implementing the trait `std::borrow::BorrowMut` or choosing a less ambiguous method name error: method `clone` can be confused for the standard trait method `std::clone::Clone::clone` - --> tests/ui/should_impl_trait/method_list_1.rs:73:5 + --> tests/ui/should_impl_trait/method_list_1.rs:72:5 | LL | / pub fn clone(&self) -> Self { LL | | @@ -109,7 +109,7 @@ LL | | } = help: consider implementing the trait `std::clone::Clone` or choosing a less ambiguous method name error: method `cmp` can be confused for the standard trait method `std::cmp::Ord::cmp` - --> tests/ui/should_impl_trait/method_list_1.rs:79:5 + --> tests/ui/should_impl_trait/method_list_1.rs:78:5 | LL | / pub fn cmp(&self, other: &Self) -> Self { LL | | @@ -121,7 +121,7 @@ LL | | } = help: consider implementing the trait `std::cmp::Ord` or choosing a less ambiguous method name error: method `default` can be confused for the standard trait method `std::default::Default::default` - --> tests/ui/should_impl_trait/method_list_1.rs:85:5 + --> tests/ui/should_impl_trait/method_list_1.rs:84:5 | LL | / pub fn default() -> Self { LL | | @@ -133,7 +133,7 @@ LL | | } = help: consider implementing the trait `std::default::Default` or choosing a less ambiguous method name error: method `deref` can be confused for the standard trait method `std::ops::Deref::deref` - --> tests/ui/should_impl_trait/method_list_1.rs:91:5 + --> tests/ui/should_impl_trait/method_list_1.rs:90:5 | LL | / pub fn deref(&self) -> &Self { LL | | @@ -145,7 +145,7 @@ LL | | } = help: consider implementing the trait `std::ops::Deref` or choosing a less ambiguous method name error: method `deref_mut` can be confused for the standard trait method `std::ops::DerefMut::deref_mut` - --> tests/ui/should_impl_trait/method_list_1.rs:97:5 + --> tests/ui/should_impl_trait/method_list_1.rs:96:5 | LL | / pub fn deref_mut(&mut self) -> &mut Self { LL | | @@ -157,7 +157,7 @@ LL | | } = help: consider implementing the trait `std::ops::DerefMut` or choosing a less ambiguous method name error: method `div` can be confused for the standard trait method `std::ops::Div::div` - --> tests/ui/should_impl_trait/method_list_1.rs:103:5 + --> tests/ui/should_impl_trait/method_list_1.rs:102:5 | LL | / pub fn div(self, rhs: Self) -> Self { LL | | @@ -169,7 +169,7 @@ LL | | } = help: consider implementing the trait `std::ops::Div` or choosing a less ambiguous method name error: method `drop` can be confused for the standard trait method `std::ops::Drop::drop` - --> tests/ui/should_impl_trait/method_list_1.rs:109:5 + --> tests/ui/should_impl_trait/method_list_1.rs:108:5 | LL | / pub fn drop(&mut self) { LL | | diff --git a/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.rs b/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.rs index f0c4d4f15cb63..1f25ab3938a3d 100644 --- a/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.rs +++ b/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.rs @@ -1,4 +1,3 @@ -#![warn(clippy::all, clippy::pedantic)] #![allow( clippy::missing_errors_doc, clippy::needless_pass_by_value, diff --git a/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr b/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr index 85de74337020d..0f5818507779f 100644 --- a/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr +++ b/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr @@ -1,5 +1,5 @@ error: method `eq` can be confused for the standard trait method `std::cmp::PartialEq::eq` - --> tests/ui/should_impl_trait/method_list_2.rs:26:5 + --> tests/ui/should_impl_trait/method_list_2.rs:25:5 | LL | / pub fn eq(&self, other: &Self) -> bool { LL | | @@ -13,7 +13,7 @@ LL | | } = help: to override `-D warnings` add `#[allow(clippy::should_implement_trait)]` error: method `from_iter` can be confused for the standard trait method `std::iter::FromIterator::from_iter` - --> tests/ui/should_impl_trait/method_list_2.rs:32:5 + --> tests/ui/should_impl_trait/method_list_2.rs:31:5 | LL | / pub fn from_iter(iter: T) -> Self { LL | | @@ -25,7 +25,7 @@ LL | | } = help: consider implementing the trait `std::iter::FromIterator` or choosing a less ambiguous method name error: method `from_str` can be confused for the standard trait method `std::str::FromStr::from_str` - --> tests/ui/should_impl_trait/method_list_2.rs:38:5 + --> tests/ui/should_impl_trait/method_list_2.rs:37:5 | LL | / pub fn from_str(s: &str) -> Result { LL | | @@ -37,7 +37,7 @@ LL | | } = help: consider implementing the trait `std::str::FromStr` or choosing a less ambiguous method name error: method `hash` can be confused for the standard trait method `std::hash::Hash::hash` - --> tests/ui/should_impl_trait/method_list_2.rs:44:5 + --> tests/ui/should_impl_trait/method_list_2.rs:43:5 | LL | / pub fn hash(&self, state: &mut T) { LL | | @@ -49,7 +49,7 @@ LL | | } = help: consider implementing the trait `std::hash::Hash` or choosing a less ambiguous method name error: method `index` can be confused for the standard trait method `std::ops::Index::index` - --> tests/ui/should_impl_trait/method_list_2.rs:50:5 + --> tests/ui/should_impl_trait/method_list_2.rs:49:5 | LL | / pub fn index(&self, index: usize) -> &Self { LL | | @@ -61,7 +61,7 @@ LL | | } = help: consider implementing the trait `std::ops::Index` or choosing a less ambiguous method name error: method `index_mut` can be confused for the standard trait method `std::ops::IndexMut::index_mut` - --> tests/ui/should_impl_trait/method_list_2.rs:56:5 + --> tests/ui/should_impl_trait/method_list_2.rs:55:5 | LL | / pub fn index_mut(&mut self, index: usize) -> &mut Self { LL | | @@ -73,7 +73,7 @@ LL | | } = help: consider implementing the trait `std::ops::IndexMut` or choosing a less ambiguous method name error: method `into_iter` can be confused for the standard trait method `std::iter::IntoIterator::into_iter` - --> tests/ui/should_impl_trait/method_list_2.rs:62:5 + --> tests/ui/should_impl_trait/method_list_2.rs:61:5 | LL | / pub fn into_iter(self) -> Self { LL | | @@ -85,7 +85,7 @@ LL | | } = help: consider implementing the trait `std::iter::IntoIterator` or choosing a less ambiguous method name error: method `mul` can be confused for the standard trait method `std::ops::Mul::mul` - --> tests/ui/should_impl_trait/method_list_2.rs:68:5 + --> tests/ui/should_impl_trait/method_list_2.rs:67:5 | LL | / pub fn mul(self, rhs: Self) -> Self { LL | | @@ -97,7 +97,7 @@ LL | | } = help: consider implementing the trait `std::ops::Mul` or choosing a less ambiguous method name error: method `neg` can be confused for the standard trait method `std::ops::Neg::neg` - --> tests/ui/should_impl_trait/method_list_2.rs:74:5 + --> tests/ui/should_impl_trait/method_list_2.rs:73:5 | LL | / pub fn neg(self) -> Self { LL | | @@ -109,7 +109,7 @@ LL | | } = help: consider implementing the trait `std::ops::Neg` or choosing a less ambiguous method name error: method `next` can be confused for the standard trait method `std::iter::Iterator::next` - --> tests/ui/should_impl_trait/method_list_2.rs:80:5 + --> tests/ui/should_impl_trait/method_list_2.rs:79:5 | LL | / pub fn next(&mut self) -> Option { LL | | @@ -121,7 +121,7 @@ LL | | } = help: consider implementing the trait `std::iter::Iterator` or choosing a less ambiguous method name error: method `not` can be confused for the standard trait method `std::ops::Not::not` - --> tests/ui/should_impl_trait/method_list_2.rs:86:5 + --> tests/ui/should_impl_trait/method_list_2.rs:85:5 | LL | / pub fn not(self) -> Self { LL | | @@ -133,7 +133,7 @@ LL | | } = help: consider implementing the trait `std::ops::Not` or choosing a less ambiguous method name error: method `rem` can be confused for the standard trait method `std::ops::Rem::rem` - --> tests/ui/should_impl_trait/method_list_2.rs:92:5 + --> tests/ui/should_impl_trait/method_list_2.rs:91:5 | LL | / pub fn rem(self, rhs: Self) -> Self { LL | | @@ -145,7 +145,7 @@ LL | | } = help: consider implementing the trait `std::ops::Rem` or choosing a less ambiguous method name error: method `shl` can be confused for the standard trait method `std::ops::Shl::shl` - --> tests/ui/should_impl_trait/method_list_2.rs:98:5 + --> tests/ui/should_impl_trait/method_list_2.rs:97:5 | LL | / pub fn shl(self, rhs: Self) -> Self { LL | | @@ -157,7 +157,7 @@ LL | | } = help: consider implementing the trait `std::ops::Shl` or choosing a less ambiguous method name error: method `shr` can be confused for the standard trait method `std::ops::Shr::shr` - --> tests/ui/should_impl_trait/method_list_2.rs:104:5 + --> tests/ui/should_impl_trait/method_list_2.rs:103:5 | LL | / pub fn shr(self, rhs: Self) -> Self { LL | | @@ -169,7 +169,7 @@ LL | | } = help: consider implementing the trait `std::ops::Shr` or choosing a less ambiguous method name error: method `sub` can be confused for the standard trait method `std::ops::Sub::sub` - --> tests/ui/should_impl_trait/method_list_2.rs:110:5 + --> tests/ui/should_impl_trait/method_list_2.rs:109:5 | LL | / pub fn sub(self, rhs: Self) -> Self { LL | | diff --git a/src/tools/clippy/tests/ui/single_call_fn.rs b/src/tools/clippy/tests/ui/single_call_fn.rs index c1cc4032bec99..a1ecd7bc166cf 100644 --- a/src/tools/clippy/tests/ui/single_call_fn.rs +++ b/src/tools/clippy/tests/ui/single_call_fn.rs @@ -94,7 +94,7 @@ trait Trait { //~^ single_call_fn fn foo(&self); } -extern "C" { +unsafe extern "C" { // test some kind of foreign item fn rand() -> std::ffi::c_int; } diff --git a/src/tools/clippy/tests/ui/single_match.fixed b/src/tools/clippy/tests/ui/single_match.fixed index 0e198ec79344a..db5107600ee6d 100644 --- a/src/tools/clippy/tests/ui/single_match.fixed +++ b/src/tools/clippy/tests/ui/single_match.fixed @@ -366,3 +366,39 @@ fn irrefutable_match() { //~^^^^^^^^^ single_match //~| NOTE: you might want to preserve the comments from inside the `match` } + +fn issue_14493() { + macro_rules! mac { + (some) => { + Some(42) + }; + (any) => { + _ + }; + (str) => { + "foo" + }; + } + + if let Some(u) = mac!(some) { println!("{u}") } + //~^^^^ single_match + + // When scrutinee comes from macro, do not tell that arm will always match + // and suggest an equality check instead. + if mac!(str) == "foo" { println!("eq") } + //~^^^^ ERROR: for an equality check + + // Do not lint if any match arm come from expansion + match Some(0) { + mac!(some) => println!("eq"), + mac!(any) => println!("neq"), + } + match Some(0) { + Some(42) => println!("eq"), + mac!(any) => println!("neq"), + } + match Some(0) { + mac!(some) => println!("eq"), + _ => println!("neq"), + } +} diff --git a/src/tools/clippy/tests/ui/single_match.rs b/src/tools/clippy/tests/ui/single_match.rs index fcac65f8aaf5e..a367b94c4ca6b 100644 --- a/src/tools/clippy/tests/ui/single_match.rs +++ b/src/tools/clippy/tests/ui/single_match.rs @@ -461,3 +461,45 @@ fn irrefutable_match() { //~^^^^^^^^^ single_match //~| NOTE: you might want to preserve the comments from inside the `match` } + +fn issue_14493() { + macro_rules! mac { + (some) => { + Some(42) + }; + (any) => { + _ + }; + (str) => { + "foo" + }; + } + + match mac!(some) { + Some(u) => println!("{u}"), + _ => (), + } + //~^^^^ single_match + + // When scrutinee comes from macro, do not tell that arm will always match + // and suggest an equality check instead. + match mac!(str) { + "foo" => println!("eq"), + _ => (), + } + //~^^^^ ERROR: for an equality check + + // Do not lint if any match arm come from expansion + match Some(0) { + mac!(some) => println!("eq"), + mac!(any) => println!("neq"), + } + match Some(0) { + Some(42) => println!("eq"), + mac!(any) => println!("neq"), + } + match Some(0) { + mac!(some) => println!("eq"), + _ => println!("neq"), + } +} diff --git a/src/tools/clippy/tests/ui/single_match.stderr b/src/tools/clippy/tests/ui/single_match.stderr index 2467423b9c17d..1a4edc45c928d 100644 --- a/src/tools/clippy/tests/ui/single_match.stderr +++ b/src/tools/clippy/tests/ui/single_match.stderr @@ -321,5 +321,23 @@ LL + println!("{u}"); LL + } | -error: aborting due to 29 previous errors +error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` + --> tests/ui/single_match.rs:478:5 + | +LL | / match mac!(some) { +LL | | Some(u) => println!("{u}"), +LL | | _ => (), +LL | | } + | |_____^ help: try: `if let Some(u) = mac!(some) { println!("{u}") }` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match.rs:486:5 + | +LL | / match mac!(str) { +LL | | "foo" => println!("eq"), +LL | | _ => (), +LL | | } + | |_____^ help: try: `if mac!(str) == "foo" { println!("eq") }` + +error: aborting due to 31 previous errors diff --git a/src/tools/clippy/tests/ui/suspicious_doc_comments.fixed b/src/tools/clippy/tests/ui/suspicious_doc_comments.fixed index 3696b0e066d28..3faa4b21ee414 100644 --- a/src/tools/clippy/tests/ui/suspicious_doc_comments.fixed +++ b/src/tools/clippy/tests/ui/suspicious_doc_comments.fixed @@ -87,4 +87,8 @@ pub mod useless_outer_doc { use std::mem; } +// Do not lint, this is not a `///!` +#[doc = "! here's some docs !"] +fn issue14265() {} + fn main() {} diff --git a/src/tools/clippy/tests/ui/suspicious_doc_comments.rs b/src/tools/clippy/tests/ui/suspicious_doc_comments.rs index 4107f5526d132..4af6ed850c2bb 100644 --- a/src/tools/clippy/tests/ui/suspicious_doc_comments.rs +++ b/src/tools/clippy/tests/ui/suspicious_doc_comments.rs @@ -87,4 +87,8 @@ pub mod useless_outer_doc { use std::mem; } +// Do not lint, this is not a `///!` +#[doc = "! here's some docs !"] +fn issue14265() {} + fn main() {} diff --git a/src/tools/clippy/tests/ui/swap.fixed b/src/tools/clippy/tests/ui/swap.fixed index 888665a17ad16..6a64e64e98fa2 100644 --- a/src/tools/clippy/tests/ui/swap.fixed +++ b/src/tools/clippy/tests/ui/swap.fixed @@ -1,14 +1,9 @@ //@aux-build: macro_rules.rs -#![warn(clippy::all)] #![allow( clippy::disallowed_names, clippy::no_effect, clippy::redundant_clone, - redundant_semicolons, - dead_code, - unused_assignments, - unused_variables, clippy::let_and_return, clippy::useless_vec, clippy::redundant_locals diff --git a/src/tools/clippy/tests/ui/swap.rs b/src/tools/clippy/tests/ui/swap.rs index 51af55ecd27c8..e2d89c47382da 100644 --- a/src/tools/clippy/tests/ui/swap.rs +++ b/src/tools/clippy/tests/ui/swap.rs @@ -1,14 +1,9 @@ //@aux-build: macro_rules.rs -#![warn(clippy::all)] #![allow( clippy::disallowed_names, clippy::no_effect, clippy::redundant_clone, - redundant_semicolons, - dead_code, - unused_assignments, - unused_variables, clippy::let_and_return, clippy::useless_vec, clippy::redundant_locals diff --git a/src/tools/clippy/tests/ui/swap.stderr b/src/tools/clippy/tests/ui/swap.stderr index 15f7566d58960..195b888187e6d 100644 --- a/src/tools/clippy/tests/ui/swap.stderr +++ b/src/tools/clippy/tests/ui/swap.stderr @@ -1,5 +1,5 @@ error: this looks like you are swapping `bar.a` and `bar.b` manually - --> tests/ui/swap.rs:28:5 + --> tests/ui/swap.rs:23:5 | LL | / let temp = bar.a; LL | | @@ -12,7 +12,7 @@ LL | | bar.b = temp; = help: to override `-D warnings` add `#[allow(clippy::manual_swap)]` error: this looks like you are swapping elements of `foo` manually - --> tests/ui/swap.rs:41:5 + --> tests/ui/swap.rs:36:5 | LL | / let temp = foo[0]; LL | | @@ -21,7 +21,7 @@ LL | | foo[1] = temp; | |__________________^ help: try: `foo.swap(0, 1);` error: this looks like you are swapping elements of `foo` manually - --> tests/ui/swap.rs:51:5 + --> tests/ui/swap.rs:46:5 | LL | / let temp = foo[0]; LL | | @@ -30,7 +30,7 @@ LL | | foo[1] = temp; | |__________________^ help: try: `foo.swap(0, 1);` error: this looks like you are swapping elements of `foo` manually - --> tests/ui/swap.rs:71:5 + --> tests/ui/swap.rs:66:5 | LL | / let temp = foo[0]; LL | | @@ -39,7 +39,7 @@ LL | | foo[1] = temp; | |__________________^ help: try: `foo.swap(0, 1);` error: this looks like you are swapping `a` and `b` manually - --> tests/ui/swap.rs:83:5 + --> tests/ui/swap.rs:78:5 | LL | / a ^= b; LL | | @@ -48,7 +48,7 @@ LL | | a ^= b; | |___________^ help: try: `std::mem::swap(&mut a, &mut b);` error: this looks like you are swapping `bar.a` and `bar.b` manually - --> tests/ui/swap.rs:92:5 + --> tests/ui/swap.rs:87:5 | LL | / bar.a ^= bar.b; LL | | @@ -57,7 +57,7 @@ LL | | bar.a ^= bar.b; | |___________________^ help: try: `std::mem::swap(&mut bar.a, &mut bar.b);` error: this looks like you are swapping elements of `foo` manually - --> tests/ui/swap.rs:101:5 + --> tests/ui/swap.rs:96:5 | LL | / foo[0] ^= foo[1]; LL | | @@ -66,7 +66,7 @@ LL | | foo[0] ^= foo[1]; | |_____________________^ help: try: `foo.swap(0, 1);` error: this looks like you are swapping `foo[0][1]` and `bar[1][0]` manually - --> tests/ui/swap.rs:131:5 + --> tests/ui/swap.rs:126:5 | LL | / let temp = foo[0][1]; LL | | @@ -77,7 +77,7 @@ LL | | bar[1][0] = temp; = note: or maybe you should use `std::mem::replace`? error: this looks like you are swapping `a` and `b` manually - --> tests/ui/swap.rs:147:7 + --> tests/ui/swap.rs:142:7 | LL | ; let t = a; | _______^ @@ -89,7 +89,7 @@ LL | | b = t; = note: or maybe you should use `std::mem::replace`? error: this looks like you are swapping `c.0` and `a` manually - --> tests/ui/swap.rs:158:7 + --> tests/ui/swap.rs:153:7 | LL | ; let t = c.0; | _______^ @@ -101,7 +101,7 @@ LL | | a = t; = note: or maybe you should use `std::mem::replace`? error: this looks like you are swapping `b` and `a` manually - --> tests/ui/swap.rs:188:5 + --> tests/ui/swap.rs:183:5 | LL | / let t = b; LL | | @@ -112,7 +112,7 @@ LL | | a = t; = note: or maybe you should use `std::mem::replace`? error: this looks like you are trying to swap `a` and `b` - --> tests/ui/swap.rs:143:5 + --> tests/ui/swap.rs:138:5 | LL | / a = b; LL | | @@ -120,11 +120,10 @@ LL | | b = a; | |_________^ help: try: `std::mem::swap(&mut a, &mut b)` | = note: or maybe you should use `std::mem::replace`? - = note: `-D clippy::almost-swapped` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::almost_swapped)]` + = note: `#[deny(clippy::almost_swapped)]` on by default error: this looks like you are trying to swap `c.0` and `a` - --> tests/ui/swap.rs:154:5 + --> tests/ui/swap.rs:149:5 | LL | / c.0 = a; LL | | @@ -134,7 +133,7 @@ LL | | a = c.0; = note: or maybe you should use `std::mem::replace`? error: this looks like you are trying to swap `a` and `b` - --> tests/ui/swap.rs:163:5 + --> tests/ui/swap.rs:158:5 | LL | / let a = b; LL | | @@ -144,7 +143,7 @@ LL | | let b = a; = note: or maybe you should use `std::mem::replace`? error: this looks like you are trying to swap `d` and `c` - --> tests/ui/swap.rs:169:5 + --> tests/ui/swap.rs:164:5 | LL | / d = c; LL | | @@ -154,7 +153,7 @@ LL | | c = d; = note: or maybe you should use `std::mem::replace`? error: this looks like you are trying to swap `a` and `b` - --> tests/ui/swap.rs:174:5 + --> tests/ui/swap.rs:169:5 | LL | / let a = b; LL | | @@ -164,7 +163,7 @@ LL | | b = a; = note: or maybe you should use `std::mem::replace`? error: this looks like you are swapping `s.0.x` and `s.0.y` manually - --> tests/ui/swap.rs:224:5 + --> tests/ui/swap.rs:219:5 | LL | / let t = s.0.x; LL | | diff --git a/src/tools/clippy/tests/ui/swap_with_temporary.fixed b/src/tools/clippy/tests/ui/swap_with_temporary.fixed new file mode 100644 index 0000000000000..4007d998ba068 --- /dev/null +++ b/src/tools/clippy/tests/ui/swap_with_temporary.fixed @@ -0,0 +1,74 @@ +#![warn(clippy::swap_with_temporary)] + +use std::mem::swap; + +fn func() -> String { + String::from("func") +} + +fn func_returning_refmut(s: &mut String) -> &mut String { + s +} + +fn main() { + let mut x = String::from("x"); + let mut y = String::from("y"); + let mut zz = String::from("zz"); + let z = &mut zz; + + // No lint + swap(&mut x, &mut y); + + y = func(); + //~^ ERROR: swapping with a temporary value is inefficient + + x = func(); + //~^ ERROR: swapping with a temporary value is inefficient + + *z = func(); + //~^ ERROR: swapping with a temporary value is inefficient + + // No lint + swap(z, func_returning_refmut(&mut x)); + + swap(&mut y, z); + + *z = func(); + //~^ ERROR: swapping with a temporary value is inefficient + + macro_rules! mac { + (refmut $x:expr) => { + &mut $x + }; + (funcall $f:ident) => { + $f() + }; + (wholeexpr) => { + swap(&mut 42, &mut 0) + }; + (ident $v:ident) => { + $v + }; + } + *z = mac!(funcall func); + //~^ ERROR: swapping with a temporary value is inefficient + *mac!(ident z) = mac!(funcall func); + //~^ ERROR: swapping with a temporary value is inefficient + *mac!(ident z) = mac!(funcall func); + //~^ ERROR: swapping with a temporary value is inefficient + *mac!(refmut y) = func(); + //~^ ERROR: swapping with a temporary value is inefficient + + // No lint if it comes from a macro as it may depend on the arguments + mac!(wholeexpr); +} + +struct S { + t: String, +} + +fn dont_lint_those(s: &mut S, v: &mut [String], w: Option<&mut String>) { + swap(&mut s.t, &mut v[0]); + swap(&mut s.t, v.get_mut(0).unwrap()); + swap(w.unwrap(), &mut s.t); +} diff --git a/src/tools/clippy/tests/ui/swap_with_temporary.rs b/src/tools/clippy/tests/ui/swap_with_temporary.rs new file mode 100644 index 0000000000000..d403c086c0f4f --- /dev/null +++ b/src/tools/clippy/tests/ui/swap_with_temporary.rs @@ -0,0 +1,74 @@ +#![warn(clippy::swap_with_temporary)] + +use std::mem::swap; + +fn func() -> String { + String::from("func") +} + +fn func_returning_refmut(s: &mut String) -> &mut String { + s +} + +fn main() { + let mut x = String::from("x"); + let mut y = String::from("y"); + let mut zz = String::from("zz"); + let z = &mut zz; + + // No lint + swap(&mut x, &mut y); + + swap(&mut func(), &mut y); + //~^ ERROR: swapping with a temporary value is inefficient + + swap(&mut x, &mut func()); + //~^ ERROR: swapping with a temporary value is inefficient + + swap(z, &mut func()); + //~^ ERROR: swapping with a temporary value is inefficient + + // No lint + swap(z, func_returning_refmut(&mut x)); + + swap(&mut y, z); + + swap(&mut func(), z); + //~^ ERROR: swapping with a temporary value is inefficient + + macro_rules! mac { + (refmut $x:expr) => { + &mut $x + }; + (funcall $f:ident) => { + $f() + }; + (wholeexpr) => { + swap(&mut 42, &mut 0) + }; + (ident $v:ident) => { + $v + }; + } + swap(&mut mac!(funcall func), z); + //~^ ERROR: swapping with a temporary value is inefficient + swap(&mut mac!(funcall func), mac!(ident z)); + //~^ ERROR: swapping with a temporary value is inefficient + swap(mac!(ident z), &mut mac!(funcall func)); + //~^ ERROR: swapping with a temporary value is inefficient + swap(mac!(refmut y), &mut func()); + //~^ ERROR: swapping with a temporary value is inefficient + + // No lint if it comes from a macro as it may depend on the arguments + mac!(wholeexpr); +} + +struct S { + t: String, +} + +fn dont_lint_those(s: &mut S, v: &mut [String], w: Option<&mut String>) { + swap(&mut s.t, &mut v[0]); + swap(&mut s.t, v.get_mut(0).unwrap()); + swap(w.unwrap(), &mut s.t); +} diff --git a/src/tools/clippy/tests/ui/swap_with_temporary.stderr b/src/tools/clippy/tests/ui/swap_with_temporary.stderr new file mode 100644 index 0000000000000..59355771a9648 --- /dev/null +++ b/src/tools/clippy/tests/ui/swap_with_temporary.stderr @@ -0,0 +1,100 @@ +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary.rs:22:5 + | +LL | swap(&mut func(), &mut y); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use assignment instead: `y = func()` + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary.rs:22:15 + | +LL | swap(&mut func(), &mut y); + | ^^^^^^ + = note: `-D clippy::swap-with-temporary` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::swap_with_temporary)]` + +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary.rs:25:5 + | +LL | swap(&mut x, &mut func()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use assignment instead: `x = func()` + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary.rs:25:23 + | +LL | swap(&mut x, &mut func()); + | ^^^^^^ + +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary.rs:28:5 + | +LL | swap(z, &mut func()); + | ^^^^^^^^^^^^^^^^^^^^ help: use assignment instead: `*z = func()` + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary.rs:28:18 + | +LL | swap(z, &mut func()); + | ^^^^^^ + +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary.rs:36:5 + | +LL | swap(&mut func(), z); + | ^^^^^^^^^^^^^^^^^^^^ help: use assignment instead: `*z = func()` + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary.rs:36:15 + | +LL | swap(&mut func(), z); + | ^^^^^^ + +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary.rs:53:5 + | +LL | swap(&mut mac!(funcall func), z); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use assignment instead: `*z = mac!(funcall func)` + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary.rs:53:15 + | +LL | swap(&mut mac!(funcall func), z); + | ^^^^^^^^^^^^^^^^^^ + +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary.rs:55:5 + | +LL | swap(&mut mac!(funcall func), mac!(ident z)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use assignment instead: `*mac!(ident z) = mac!(funcall func)` + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary.rs:55:15 + | +LL | swap(&mut mac!(funcall func), mac!(ident z)); + | ^^^^^^^^^^^^^^^^^^ + +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary.rs:57:5 + | +LL | swap(mac!(ident z), &mut mac!(funcall func)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use assignment instead: `*mac!(ident z) = mac!(funcall func)` + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary.rs:57:30 + | +LL | swap(mac!(ident z), &mut mac!(funcall func)); + | ^^^^^^^^^^^^^^^^^^ + +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary.rs:59:5 + | +LL | swap(mac!(refmut y), &mut func()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use assignment instead: `*mac!(refmut y) = func()` + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary.rs:59:31 + | +LL | swap(mac!(refmut y), &mut func()); + | ^^^^^^ + +error: aborting due to 8 previous errors + diff --git a/src/tools/clippy/tests/ui/swap_with_temporary_unfixable.rs b/src/tools/clippy/tests/ui/swap_with_temporary_unfixable.rs new file mode 100644 index 0000000000000..a974ca82abf26 --- /dev/null +++ b/src/tools/clippy/tests/ui/swap_with_temporary_unfixable.rs @@ -0,0 +1,62 @@ +//@no-rustfix +#![warn(clippy::swap_with_temporary)] + +use std::mem::swap; + +fn func() -> String { + String::from("func") +} + +fn func_returning_refmut(s: &mut String) -> &mut String { + s +} + +fn main() { + let mut x = String::from("x"); + let mut y = String::from("y"); + let mut zz = String::from("zz"); + let z = &mut zz; + + swap(&mut func(), &mut func()); + //~^ ERROR: swapping temporary values has no effect + + if matches!(swap(&mut func(), &mut func()), ()) { + //~^ ERROR: swapping temporary values has no effect + println!("Yeah"); + } + + if matches!(swap(z, &mut func()), ()) { + //~^ ERROR: swapping with a temporary value is inefficient + println!("Yeah"); + } + + macro_rules! mac { + (refmut $x:expr) => { + &mut $x + }; + (refmut) => { + mac!(refmut String::new()) + }; + (funcall $f:ident) => { + $f() + }; + } + + swap(mac!(refmut func()), z); + //~^ ERROR: swapping with a temporary value is inefficient + swap(&mut mac!(funcall func), &mut mac!(funcall func)); + //~^ ERROR: swapping temporary values has no effect + swap(mac!(refmut), mac!(refmut)); + //~^ ERROR: swapping temporary values has no effect + swap(mac!(refmut y), mac!(refmut)); + //~^ ERROR: swapping with a temporary value is inefficient +} + +fn bug(v1: &mut [i32], v2: &mut [i32]) { + // Incorrect: swapping temporary references (`&mut &mut` passed to swap) + std::mem::swap(&mut v1.last_mut().unwrap(), &mut v2.last_mut().unwrap()); + //~^ ERROR: swapping temporary values has no effect + + // Correct + std::mem::swap(v1.last_mut().unwrap(), v2.last_mut().unwrap()); +} diff --git a/src/tools/clippy/tests/ui/swap_with_temporary_unfixable.stderr b/src/tools/clippy/tests/ui/swap_with_temporary_unfixable.stderr new file mode 100644 index 0000000000000..856c5415d676c --- /dev/null +++ b/src/tools/clippy/tests/ui/swap_with_temporary_unfixable.stderr @@ -0,0 +1,125 @@ +error: swapping temporary values has no effect + --> tests/ui/swap_with_temporary_unfixable.rs:20:5 + | +LL | swap(&mut func(), &mut func()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:20:15 + | +LL | swap(&mut func(), &mut func()); + | ^^^^^^ +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:20:28 + | +LL | swap(&mut func(), &mut func()); + | ^^^^^^ + = note: `-D clippy::swap-with-temporary` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::swap_with_temporary)]` + +error: swapping temporary values has no effect + --> tests/ui/swap_with_temporary_unfixable.rs:23:17 + | +LL | if matches!(swap(&mut func(), &mut func()), ()) { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:23:27 + | +LL | if matches!(swap(&mut func(), &mut func()), ()) { + | ^^^^^^ +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:23:40 + | +LL | if matches!(swap(&mut func(), &mut func()), ()) { + | ^^^^^^ + +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary_unfixable.rs:28:17 + | +LL | if matches!(swap(z, &mut func()), ()) { + | ^^^^^^^^^^^^^^^^^^^^ + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:28:30 + | +LL | if matches!(swap(z, &mut func()), ()) { + | ^^^^^^ + +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary_unfixable.rs:45:5 + | +LL | swap(mac!(refmut func()), z); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: this is a mutable reference to a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:45:10 + | +LL | swap(mac!(refmut func()), z); + | ^^^^^^^^^^^^^^^^^^^ + +error: swapping temporary values has no effect + --> tests/ui/swap_with_temporary_unfixable.rs:47:5 + | +LL | swap(&mut mac!(funcall func), &mut mac!(funcall func)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:47:15 + | +LL | swap(&mut mac!(funcall func), &mut mac!(funcall func)); + | ^^^^^^^^^^^^^^^^^^ +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:47:40 + | +LL | swap(&mut mac!(funcall func), &mut mac!(funcall func)); + | ^^^^^^^^^^^^^^^^^^ + +error: swapping temporary values has no effect + --> tests/ui/swap_with_temporary_unfixable.rs:49:5 + | +LL | swap(mac!(refmut), mac!(refmut)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: this is a mutable reference to a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:49:10 + | +LL | swap(mac!(refmut), mac!(refmut)); + | ^^^^^^^^^^^^ +note: this is a mutable reference to a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:49:24 + | +LL | swap(mac!(refmut), mac!(refmut)); + | ^^^^^^^^^^^^ + +error: swapping with a temporary value is inefficient + --> tests/ui/swap_with_temporary_unfixable.rs:51:5 + | +LL | swap(mac!(refmut y), mac!(refmut)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: this is a mutable reference to a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:51:26 + | +LL | swap(mac!(refmut y), mac!(refmut)); + | ^^^^^^^^^^^^ + +error: swapping temporary values has no effect + --> tests/ui/swap_with_temporary_unfixable.rs:57:5 + | +LL | std::mem::swap(&mut v1.last_mut().unwrap(), &mut v2.last_mut().unwrap()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:57:25 + | +LL | std::mem::swap(&mut v1.last_mut().unwrap(), &mut v2.last_mut().unwrap()); + | ^^^^^^^^^^^^^^^^^^^^^^ +note: this expression returns a temporary value + --> tests/ui/swap_with_temporary_unfixable.rs:57:54 + | +LL | std::mem::swap(&mut v1.last_mut().unwrap(), &mut v2.last_mut().unwrap()); + | ^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 8 previous errors + diff --git a/src/tools/clippy/tests/ui/transmute.rs b/src/tools/clippy/tests/ui/transmute.rs index 3aecde398dc3f..2b8b6c539ad3c 100644 --- a/src/tools/clippy/tests/ui/transmute.rs +++ b/src/tools/clippy/tests/ui/transmute.rs @@ -3,6 +3,7 @@ #![allow( dead_code, clippy::borrow_as_ptr, + unnecessary_transmutes, clippy::needless_lifetimes, clippy::missing_transmute_annotations )] @@ -23,19 +24,21 @@ fn my_vec() -> MyVec { #[allow(clippy::needless_lifetimes, clippy::transmute_ptr_to_ptr)] #[warn(clippy::useless_transmute)] unsafe fn _generic<'a, T, U: 'a>(t: &'a T) { - // FIXME: should lint - // let _: &'a T = core::mem::transmute(t); + unsafe { + // FIXME: should lint + // let _: &'a T = core::mem::transmute(t); - let _: &'a U = core::mem::transmute(t); + let _: &'a U = core::mem::transmute(t); - let _: *const T = core::mem::transmute(t); - //~^ useless_transmute + let _: *const T = core::mem::transmute(t); + //~^ useless_transmute - let _: *mut T = core::mem::transmute(t); - //~^ useless_transmute + let _: *mut T = core::mem::transmute(t); + //~^ useless_transmute - let _: *const U = core::mem::transmute(t); - //~^ useless_transmute + let _: *const U = core::mem::transmute(t); + //~^ useless_transmute + } } #[warn(clippy::useless_transmute)] @@ -59,7 +62,7 @@ fn useless() { let _: *const usize = std::mem::transmute(5_isize); //~^ useless_transmute - let _ = 5_isize as *const usize; + let _ = std::ptr::dangling::(); let _: *const usize = std::mem::transmute(1 + 1usize); //~^ useless_transmute @@ -68,19 +71,19 @@ fn useless() { } unsafe fn _f<'a, 'b>(x: &'a u32) -> &'b u32 { - std::mem::transmute(x) + unsafe { std::mem::transmute(x) } } unsafe fn _f2<'a, 'b>(x: *const (dyn Iterator + 'a)) -> *const (dyn Iterator + 'b) { - std::mem::transmute(x) + unsafe { std::mem::transmute(x) } } unsafe fn _f3<'a, 'b>(x: fn(&'a u32)) -> fn(&'b u32) { - std::mem::transmute(x) + unsafe { std::mem::transmute(x) } } unsafe fn _f4<'a, 'b>(x: std::borrow::Cow<'a, str>) -> std::borrow::Cow<'b, str> { - std::mem::transmute(x) + unsafe { std::mem::transmute(x) } } } diff --git a/src/tools/clippy/tests/ui/transmute.stderr b/src/tools/clippy/tests/ui/transmute.stderr index e0d28437aafc8..1bb70151965cd 100644 --- a/src/tools/clippy/tests/ui/transmute.stderr +++ b/src/tools/clippy/tests/ui/transmute.stderr @@ -1,68 +1,68 @@ error: transmute from a reference to a pointer - --> tests/ui/transmute.rs:31:23 + --> tests/ui/transmute.rs:33:27 | -LL | let _: *const T = core::mem::transmute(t); - | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T` +LL | let _: *const T = core::mem::transmute(t); + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T` | = note: `-D clippy::useless-transmute` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::useless_transmute)]` error: transmute from a reference to a pointer - --> tests/ui/transmute.rs:34:21 + --> tests/ui/transmute.rs:36:25 | -LL | let _: *mut T = core::mem::transmute(t); - | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *mut T` +LL | let _: *mut T = core::mem::transmute(t); + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *mut T` error: transmute from a reference to a pointer - --> tests/ui/transmute.rs:37:23 + --> tests/ui/transmute.rs:39:27 | -LL | let _: *const U = core::mem::transmute(t); - | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *const U` +LL | let _: *const U = core::mem::transmute(t); + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *const U` error: transmute from a type (`std::vec::Vec`) to itself - --> tests/ui/transmute.rs:44:27 + --> tests/ui/transmute.rs:47:27 | LL | let _: Vec = core::mem::transmute(my_vec()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a type (`std::vec::Vec`) to itself - --> tests/ui/transmute.rs:47:27 + --> tests/ui/transmute.rs:50:27 | LL | let _: Vec = core::mem::transmute(my_vec()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a type (`std::vec::Vec`) to itself - --> tests/ui/transmute.rs:50:27 + --> tests/ui/transmute.rs:53:27 | LL | let _: Vec = std::mem::transmute(my_vec()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a type (`std::vec::Vec`) to itself - --> tests/ui/transmute.rs:53:27 + --> tests/ui/transmute.rs:56:27 | LL | let _: Vec = std::mem::transmute(my_vec()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a type (`std::vec::Vec`) to itself - --> tests/ui/transmute.rs:56:27 + --> tests/ui/transmute.rs:59:27 | LL | let _: Vec = my_transmute(my_vec()); | ^^^^^^^^^^^^^^^^^^^^^^ error: transmute from an integer to a pointer - --> tests/ui/transmute.rs:59:31 + --> tests/ui/transmute.rs:62:31 | LL | let _: *const usize = std::mem::transmute(5_isize); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `5_isize as *const usize` error: transmute from an integer to a pointer - --> tests/ui/transmute.rs:64:31 + --> tests/ui/transmute.rs:67:31 | LL | let _: *const usize = std::mem::transmute(1 + 1usize); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(1 + 1usize) as *const usize` error: transmute from a type (`*const Usize`) to the type that it points to (`Usize`) - --> tests/ui/transmute.rs:96:24 + --> tests/ui/transmute.rs:99:24 | LL | let _: Usize = core::mem::transmute(int_const_ptr); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -71,25 +71,25 @@ LL | let _: Usize = core::mem::transmute(int_const_ptr); = help: to override `-D warnings` add `#[allow(clippy::crosspointer_transmute)]` error: transmute from a type (`*mut Usize`) to the type that it points to (`Usize`) - --> tests/ui/transmute.rs:99:24 + --> tests/ui/transmute.rs:102:24 | LL | let _: Usize = core::mem::transmute(int_mut_ptr); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a type (`Usize`) to a pointer to that type (`*const Usize`) - --> tests/ui/transmute.rs:102:31 + --> tests/ui/transmute.rs:105:31 | LL | let _: *const Usize = core::mem::transmute(my_int()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a type (`Usize`) to a pointer to that type (`*mut Usize`) - --> tests/ui/transmute.rs:105:29 + --> tests/ui/transmute.rs:108:29 | LL | let _: *mut Usize = core::mem::transmute(my_int()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmute from a `u8` to a `bool` - --> tests/ui/transmute.rs:112:28 + --> tests/ui/transmute.rs:115:28 | LL | let _: bool = unsafe { std::mem::transmute(0_u8) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `0_u8 != 0` @@ -98,7 +98,7 @@ LL | let _: bool = unsafe { std::mem::transmute(0_u8) }; = help: to override `-D warnings` add `#[allow(clippy::transmute_int_to_bool)]` error: transmute from a `u16` to a `f16` - --> tests/ui/transmute.rs:119:31 + --> tests/ui/transmute.rs:122:31 | LL | let _: f16 = unsafe { std::mem::transmute(0_u16) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f16::from_bits(0_u16)` @@ -107,97 +107,97 @@ LL | let _: f16 = unsafe { std::mem::transmute(0_u16) }; = help: to override `-D warnings` add `#[allow(clippy::transmute_int_to_float)]` error: transmute from a `i16` to a `f16` - --> tests/ui/transmute.rs:122:31 + --> tests/ui/transmute.rs:125:31 | LL | let _: f16 = unsafe { std::mem::transmute(0_i16) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f16::from_bits(0_i16 as u16)` error: transmute from a `u32` to a `f32` - --> tests/ui/transmute.rs:125:31 + --> tests/ui/transmute.rs:128:31 | LL | let _: f32 = unsafe { std::mem::transmute(0_u32) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f32::from_bits(0_u32)` error: transmute from a `i32` to a `f32` - --> tests/ui/transmute.rs:128:31 + --> tests/ui/transmute.rs:131:31 | LL | let _: f32 = unsafe { std::mem::transmute(0_i32) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f32::from_bits(0_i32 as u32)` error: transmute from a `u64` to a `f64` - --> tests/ui/transmute.rs:131:31 + --> tests/ui/transmute.rs:134:31 | LL | let _: f64 = unsafe { std::mem::transmute(0_u64) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f64::from_bits(0_u64)` error: transmute from a `i64` to a `f64` - --> tests/ui/transmute.rs:134:31 + --> tests/ui/transmute.rs:137:31 | LL | let _: f64 = unsafe { std::mem::transmute(0_i64) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f64::from_bits(0_i64 as u64)` error: transmute from a `u128` to a `f128` - --> tests/ui/transmute.rs:137:32 + --> tests/ui/transmute.rs:140:32 | LL | let _: f128 = unsafe { std::mem::transmute(0_u128) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f128::from_bits(0_u128)` error: transmute from a `i128` to a `f128` - --> tests/ui/transmute.rs:140:32 + --> tests/ui/transmute.rs:143:32 | LL | let _: f128 = unsafe { std::mem::transmute(0_i128) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f128::from_bits(0_i128 as u128)` error: transmute from a `u16` to a `f16` - --> tests/ui/transmute.rs:145:39 + --> tests/ui/transmute.rs:148:39 | LL | const VALUE16: f16 = unsafe { std::mem::transmute(0_u16) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f16::from_bits(0_u16)` error: transmute from a `u32` to a `f32` - --> tests/ui/transmute.rs:148:39 + --> tests/ui/transmute.rs:151:39 | LL | const VALUE32: f32 = unsafe { std::mem::transmute(0_u32) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f32::from_bits(0_u32)` error: transmute from a `i64` to a `f64` - --> tests/ui/transmute.rs:151:39 + --> tests/ui/transmute.rs:154:39 | LL | const VALUE64: f64 = unsafe { std::mem::transmute(0_i64) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f64::from_bits(0_i64 as u64)` error: transmute from a `i128` to a `f128` - --> tests/ui/transmute.rs:154:41 + --> tests/ui/transmute.rs:157:41 | LL | const VALUE128: f128 = unsafe { std::mem::transmute(0_i128) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f128::from_bits(0_i128 as u128)` error: transmute from a `i16` to a `f16` - --> tests/ui/transmute.rs:158:22 + --> tests/ui/transmute.rs:161:22 | LL | unsafe { std::mem::transmute(v) } | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f16::from_bits(v as u16)` error: transmute from a `i32` to a `f32` - --> tests/ui/transmute.rs:163:22 + --> tests/ui/transmute.rs:166:22 | LL | unsafe { std::mem::transmute(v) } | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f32::from_bits(v as u32)` error: transmute from a `u64` to a `f64` - --> tests/ui/transmute.rs:168:22 + --> tests/ui/transmute.rs:171:22 | LL | unsafe { std::mem::transmute(v) } | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f64::from_bits(v)` error: transmute from a `u128` to a `f128` - --> tests/ui/transmute.rs:173:22 + --> tests/ui/transmute.rs:176:22 | LL | unsafe { std::mem::transmute(v) } | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `f128::from_bits(v)` error: transmute from a `u8` to a `[u8; 1]` - --> tests/ui/transmute.rs:182:30 + --> tests/ui/transmute.rs:185:30 | LL | let _: [u8; 1] = std::mem::transmute(0u8); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u8.to_ne_bytes()` @@ -206,121 +206,121 @@ LL | let _: [u8; 1] = std::mem::transmute(0u8); = help: to override `-D warnings` add `#[allow(clippy::transmute_num_to_bytes)]` error: transmute from a `u32` to a `[u8; 4]` - --> tests/ui/transmute.rs:185:30 + --> tests/ui/transmute.rs:188:30 | LL | let _: [u8; 4] = std::mem::transmute(0u32); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u32.to_ne_bytes()` error: transmute from a `u128` to a `[u8; 16]` - --> tests/ui/transmute.rs:188:31 + --> tests/ui/transmute.rs:191:31 | LL | let _: [u8; 16] = std::mem::transmute(0u128); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u128.to_ne_bytes()` error: transmute from a `i8` to a `[u8; 1]` - --> tests/ui/transmute.rs:191:30 + --> tests/ui/transmute.rs:194:30 | LL | let _: [u8; 1] = std::mem::transmute(0i8); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i8.to_ne_bytes()` error: transmute from a `i32` to a `[u8; 4]` - --> tests/ui/transmute.rs:194:30 + --> tests/ui/transmute.rs:197:30 | LL | let _: [u8; 4] = std::mem::transmute(0i32); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i32.to_ne_bytes()` error: transmute from a `i128` to a `[u8; 16]` - --> tests/ui/transmute.rs:197:31 + --> tests/ui/transmute.rs:200:31 | LL | let _: [u8; 16] = std::mem::transmute(0i128); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i128.to_ne_bytes()` error: transmute from a `f16` to a `[u8; 2]` - --> tests/ui/transmute.rs:200:30 + --> tests/ui/transmute.rs:203:30 | LL | let _: [u8; 2] = std::mem::transmute(0.0f16); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0.0f16.to_ne_bytes()` error: transmute from a `f32` to a `[u8; 4]` - --> tests/ui/transmute.rs:203:30 + --> tests/ui/transmute.rs:206:30 | LL | let _: [u8; 4] = std::mem::transmute(0.0f32); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0.0f32.to_ne_bytes()` error: transmute from a `f64` to a `[u8; 8]` - --> tests/ui/transmute.rs:206:30 + --> tests/ui/transmute.rs:209:30 | LL | let _: [u8; 8] = std::mem::transmute(0.0f64); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0.0f64.to_ne_bytes()` error: transmute from a `f128` to a `[u8; 16]` - --> tests/ui/transmute.rs:209:31 + --> tests/ui/transmute.rs:212:31 | LL | let _: [u8; 16] = std::mem::transmute(0.0f128); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0.0f128.to_ne_bytes()` error: transmute from a `u8` to a `[u8; 1]` - --> tests/ui/transmute.rs:215:30 + --> tests/ui/transmute.rs:218:30 | LL | let _: [u8; 1] = std::mem::transmute(0u8); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u8.to_ne_bytes()` error: transmute from a `u32` to a `[u8; 4]` - --> tests/ui/transmute.rs:218:30 + --> tests/ui/transmute.rs:221:30 | LL | let _: [u8; 4] = std::mem::transmute(0u32); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u32.to_ne_bytes()` error: transmute from a `u128` to a `[u8; 16]` - --> tests/ui/transmute.rs:221:31 + --> tests/ui/transmute.rs:224:31 | LL | let _: [u8; 16] = std::mem::transmute(0u128); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0u128.to_ne_bytes()` error: transmute from a `i8` to a `[u8; 1]` - --> tests/ui/transmute.rs:224:30 + --> tests/ui/transmute.rs:227:30 | LL | let _: [u8; 1] = std::mem::transmute(0i8); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i8.to_ne_bytes()` error: transmute from a `i32` to a `[u8; 4]` - --> tests/ui/transmute.rs:227:30 + --> tests/ui/transmute.rs:230:30 | LL | let _: [u8; 4] = std::mem::transmute(0i32); | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i32.to_ne_bytes()` error: transmute from a `i128` to a `[u8; 16]` - --> tests/ui/transmute.rs:230:31 + --> tests/ui/transmute.rs:233:31 | LL | let _: [u8; 16] = std::mem::transmute(0i128); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0i128.to_ne_bytes()` error: transmute from a `f16` to a `[u8; 2]` - --> tests/ui/transmute.rs:233:30 + --> tests/ui/transmute.rs:236:30 | LL | let _: [u8; 2] = std::mem::transmute(0.0f16); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0.0f16.to_ne_bytes()` error: transmute from a `f32` to a `[u8; 4]` - --> tests/ui/transmute.rs:236:30 + --> tests/ui/transmute.rs:239:30 | LL | let _: [u8; 4] = std::mem::transmute(0.0f32); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0.0f32.to_ne_bytes()` error: transmute from a `f64` to a `[u8; 8]` - --> tests/ui/transmute.rs:239:30 + --> tests/ui/transmute.rs:242:30 | LL | let _: [u8; 8] = std::mem::transmute(0.0f64); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0.0f64.to_ne_bytes()` error: transmute from a `f128` to a `[u8; 16]` - --> tests/ui/transmute.rs:242:31 + --> tests/ui/transmute.rs:245:31 | LL | let _: [u8; 16] = std::mem::transmute(0.0f128); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `to_ne_bytes()`: `0.0f128.to_ne_bytes()` error: transmute from a `&[u8]` to a `&str` - --> tests/ui/transmute.rs:251:28 + --> tests/ui/transmute.rs:254:28 | LL | let _: &str = unsafe { std::mem::transmute(B) }; | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::str::from_utf8(B).unwrap()` @@ -329,13 +329,13 @@ LL | let _: &str = unsafe { std::mem::transmute(B) }; = help: to override `-D warnings` add `#[allow(clippy::transmute_bytes_to_str)]` error: transmute from a `&mut [u8]` to a `&mut str` - --> tests/ui/transmute.rs:254:32 + --> tests/ui/transmute.rs:257:32 | LL | let _: &mut str = unsafe { std::mem::transmute(mb) }; | ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::str::from_utf8_mut(mb).unwrap()` error: transmute from a `&[u8]` to a `&str` - --> tests/ui/transmute.rs:257:30 + --> tests/ui/transmute.rs:260:30 | LL | const _: &str = unsafe { std::mem::transmute(B) }; | ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::str::from_utf8_unchecked(B)` diff --git a/src/tools/clippy/tests/ui/transmute_float_to_int.fixed b/src/tools/clippy/tests/ui/transmute_float_to_int.fixed index 1f97b997eaa0e..844445907d7c2 100644 --- a/src/tools/clippy/tests/ui/transmute_float_to_int.fixed +++ b/src/tools/clippy/tests/ui/transmute_float_to_int.fixed @@ -1,5 +1,5 @@ #![warn(clippy::transmute_float_to_int)] -#![allow(clippy::missing_transmute_annotations)] +#![allow(clippy::missing_transmute_annotations, unnecessary_transmutes)] #![feature(f128)] #![feature(f16)] diff --git a/src/tools/clippy/tests/ui/transmute_float_to_int.rs b/src/tools/clippy/tests/ui/transmute_float_to_int.rs index 788a7e1026c67..a1f3b15bbfee4 100644 --- a/src/tools/clippy/tests/ui/transmute_float_to_int.rs +++ b/src/tools/clippy/tests/ui/transmute_float_to_int.rs @@ -1,5 +1,5 @@ #![warn(clippy::transmute_float_to_int)] -#![allow(clippy::missing_transmute_annotations)] +#![allow(clippy::missing_transmute_annotations, unnecessary_transmutes)] #![feature(f128)] #![feature(f16)] diff --git a/src/tools/clippy/tests/ui/transmute_int_to_char.fixed b/src/tools/clippy/tests/ui/transmute_int_to_char.fixed index b5425a2e9e854..28644aa9ebbb7 100644 --- a/src/tools/clippy/tests/ui/transmute_int_to_char.fixed +++ b/src/tools/clippy/tests/ui/transmute_int_to_char.fixed @@ -1,5 +1,5 @@ #![warn(clippy::transmute_int_to_char)] -#![allow(clippy::missing_transmute_annotations)] +#![allow(clippy::missing_transmute_annotations, unnecessary_transmutes)] fn int_to_char() { let _: char = unsafe { std::char::from_u32(0_u32).unwrap() }; diff --git a/src/tools/clippy/tests/ui/transmute_int_to_char.rs b/src/tools/clippy/tests/ui/transmute_int_to_char.rs index b24bb177c9fc0..8c83ecc8914b6 100644 --- a/src/tools/clippy/tests/ui/transmute_int_to_char.rs +++ b/src/tools/clippy/tests/ui/transmute_int_to_char.rs @@ -1,5 +1,5 @@ #![warn(clippy::transmute_int_to_char)] -#![allow(clippy::missing_transmute_annotations)] +#![allow(clippy::missing_transmute_annotations, unnecessary_transmutes)] fn int_to_char() { let _: char = unsafe { std::mem::transmute(0_u32) }; diff --git a/src/tools/clippy/tests/ui/transmute_int_to_char_no_std.fixed b/src/tools/clippy/tests/ui/transmute_int_to_char_no_std.fixed index e525751e306ea..e6e09a2be4bf5 100644 --- a/src/tools/clippy/tests/ui/transmute_int_to_char_no_std.fixed +++ b/src/tools/clippy/tests/ui/transmute_int_to_char_no_std.fixed @@ -1,7 +1,7 @@ #![no_std] #![feature(lang_items)] #![warn(clippy::transmute_int_to_char)] -#![allow(clippy::missing_transmute_annotations)] +#![allow(clippy::missing_transmute_annotations, unnecessary_transmutes)] use core::panic::PanicInfo; diff --git a/src/tools/clippy/tests/ui/transmute_int_to_char_no_std.rs b/src/tools/clippy/tests/ui/transmute_int_to_char_no_std.rs index 7cb508ceaf3bc..0f2106df00e6c 100644 --- a/src/tools/clippy/tests/ui/transmute_int_to_char_no_std.rs +++ b/src/tools/clippy/tests/ui/transmute_int_to_char_no_std.rs @@ -1,7 +1,7 @@ #![no_std] #![feature(lang_items)] #![warn(clippy::transmute_int_to_char)] -#![allow(clippy::missing_transmute_annotations)] +#![allow(clippy::missing_transmute_annotations, unnecessary_transmutes)] use core::panic::PanicInfo; diff --git a/src/tools/clippy/tests/ui/transmute_null_to_fn.rs b/src/tools/clippy/tests/ui/transmute_null_to_fn.rs index e88f05bb662e2..4712374af934f 100644 --- a/src/tools/clippy/tests/ui/transmute_null_to_fn.rs +++ b/src/tools/clippy/tests/ui/transmute_null_to_fn.rs @@ -1,6 +1,7 @@ #![allow(dead_code)] #![warn(clippy::transmute_null_to_fn)] #![allow(clippy::zero_ptr, clippy::missing_transmute_annotations)] +#![allow(clippy::manual_dangling_ptr)] // Easy to lint because these only span one line. fn one_liners() { diff --git a/src/tools/clippy/tests/ui/transmute_null_to_fn.stderr b/src/tools/clippy/tests/ui/transmute_null_to_fn.stderr index f7d80147445d8..b5b0d4ecc7c03 100644 --- a/src/tools/clippy/tests/ui/transmute_null_to_fn.stderr +++ b/src/tools/clippy/tests/ui/transmute_null_to_fn.stderr @@ -1,5 +1,5 @@ error: transmuting a known null pointer into a function pointer - --> tests/ui/transmute_null_to_fn.rs:8:23 + --> tests/ui/transmute_null_to_fn.rs:9:23 | LL | let _: fn() = std::mem::transmute(0 as *const ()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this transmute results in undefined behavior @@ -9,7 +9,7 @@ LL | let _: fn() = std::mem::transmute(0 as *const ()); = help: to override `-D warnings` add `#[allow(clippy::transmute_null_to_fn)]` error: transmuting a known null pointer into a function pointer - --> tests/ui/transmute_null_to_fn.rs:11:23 + --> tests/ui/transmute_null_to_fn.rs:12:23 | LL | let _: fn() = std::mem::transmute(std::ptr::null::<()>()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this transmute results in undefined behavior @@ -17,7 +17,7 @@ LL | let _: fn() = std::mem::transmute(std::ptr::null::<()>()); = help: try wrapping your function pointer type in `Option` instead, and using `None` as a null pointer value error: transmuting a known null pointer into a function pointer - --> tests/ui/transmute_null_to_fn.rs:22:23 + --> tests/ui/transmute_null_to_fn.rs:23:23 | LL | let _: fn() = std::mem::transmute(ZPTR); | ^^^^^^^^^^^^^^^^^^^^^^^^^ this transmute results in undefined behavior @@ -25,7 +25,7 @@ LL | let _: fn() = std::mem::transmute(ZPTR); = help: try wrapping your function pointer type in `Option` instead, and using `None` as a null pointer value error: transmuting a known null pointer into a function pointer - --> tests/ui/transmute_null_to_fn.rs:32:23 + --> tests/ui/transmute_null_to_fn.rs:33:23 | LL | let _: fn() = std::mem::transmute(0 as *const u8 as *const ()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this transmute results in undefined behavior @@ -33,7 +33,7 @@ LL | let _: fn() = std::mem::transmute(0 as *const u8 as *const ()); = help: try wrapping your function pointer type in `Option` instead, and using `None` as a null pointer value error: transmuting a known null pointer into a function pointer - --> tests/ui/transmute_null_to_fn.rs:35:23 + --> tests/ui/transmute_null_to_fn.rs:36:23 | LL | let _: fn() = std::mem::transmute(std::ptr::null::<()>() as *const u8); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this transmute results in undefined behavior @@ -41,7 +41,7 @@ LL | let _: fn() = std::mem::transmute(std::ptr::null::<()>() as *const = help: try wrapping your function pointer type in `Option` instead, and using `None` as a null pointer value error: transmuting a known null pointer into a function pointer - --> tests/ui/transmute_null_to_fn.rs:38:23 + --> tests/ui/transmute_null_to_fn.rs:39:23 | LL | let _: fn() = std::mem::transmute(ZPTR as *const u8); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this transmute results in undefined behavior diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.fixed b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.fixed index 3a67be5f45d0b..476e7e35a1f61 100644 --- a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.fixed +++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.fixed @@ -8,12 +8,12 @@ use std::mem::transmute; // Make sure we can do static lifetime transmutes unsafe fn transmute_lifetime_to_static<'a, T>(t: &'a T) -> &'static T { - transmute::<&'a T, &'static T>(t) + unsafe { transmute::<&'a T, &'static T>(t) } } // Make sure we can do non-static lifetime transmutes unsafe fn transmute_lifetime<'a, 'b, T>(t: &'a T, u: &'b T) -> &'b T { - transmute::<&'a T, &'b T>(t) + unsafe { transmute::<&'a T, &'b T>(t) } } struct LifetimeParam<'a> { diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs index 01ad3a3296b17..7356668bcab5a 100644 --- a/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs +++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ptr.rs @@ -8,12 +8,12 @@ use std::mem::transmute; // Make sure we can do static lifetime transmutes unsafe fn transmute_lifetime_to_static<'a, T>(t: &'a T) -> &'static T { - transmute::<&'a T, &'static T>(t) + unsafe { transmute::<&'a T, &'static T>(t) } } // Make sure we can do non-static lifetime transmutes unsafe fn transmute_lifetime<'a, 'b, T>(t: &'a T, u: &'b T) -> &'b T { - transmute::<&'a T, &'b T>(t) + unsafe { transmute::<&'a T, &'b T>(t) } } struct LifetimeParam<'a> { diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ref.fixed b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.fixed index 1bd45bc10a39b..61e3ac2fe88e3 100644 --- a/src/tools/clippy/tests/ui/transmute_ptr_to_ref.fixed +++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.fixed @@ -6,33 +6,35 @@ )] unsafe fn _ptr_to_ref(p: *const T, m: *mut T, o: *const U, om: *mut U) { - let _: &T = &*p; - //~^ transmute_ptr_to_ref - let _: &T = &*p; + unsafe { + let _: &T = &*p; + //~^ transmute_ptr_to_ref + let _: &T = &*p; - let _: &mut T = &mut *m; - //~^ transmute_ptr_to_ref - let _: &mut T = &mut *m; + let _: &mut T = &mut *m; + //~^ transmute_ptr_to_ref + let _: &mut T = &mut *m; - let _: &T = &*m; - //~^ transmute_ptr_to_ref - let _: &T = &*m; + let _: &T = &*m; + //~^ transmute_ptr_to_ref + let _: &T = &*m; - let _: &mut T = &mut *(p as *mut T); - //~^ transmute_ptr_to_ref - let _ = &mut *(p as *mut T); + let _: &mut T = &mut *(p as *mut T); + //~^ transmute_ptr_to_ref + let _ = &mut *(p as *mut T); - let _: &T = &*(o as *const T); - //~^ transmute_ptr_to_ref - let _: &T = &*(o as *const T); + let _: &T = &*(o as *const T); + //~^ transmute_ptr_to_ref + let _: &T = &*(o as *const T); - let _: &mut T = &mut *(om as *mut T); - //~^ transmute_ptr_to_ref - let _: &mut T = &mut *(om as *mut T); + let _: &mut T = &mut *(om as *mut T); + //~^ transmute_ptr_to_ref + let _: &mut T = &mut *(om as *mut T); - let _: &T = &*(om as *const T); - //~^ transmute_ptr_to_ref - let _: &T = &*(om as *const T); + let _: &T = &*(om as *const T); + //~^ transmute_ptr_to_ref + let _: &T = &*(om as *const T); + } } fn _issue1231() { @@ -54,47 +56,53 @@ fn _issue1231() { } unsafe fn _issue8924<'a, 'b, 'c>(x: *const &'a u32, y: *const &'b u32) -> &'c &'b u32 { - match 0 { - 0 => &*x.cast::<&u32>(), - //~^ transmute_ptr_to_ref - 1 => &*y.cast::<&u32>(), - //~^ transmute_ptr_to_ref - 2 => &*x.cast::<&'b u32>(), - //~^ transmute_ptr_to_ref - _ => &*y.cast::<&'b u32>(), - //~^ transmute_ptr_to_ref + unsafe { + match 0 { + 0 => &*x.cast::<&u32>(), + //~^ transmute_ptr_to_ref + 1 => &*y.cast::<&u32>(), + //~^ transmute_ptr_to_ref + 2 => &*x.cast::<&'b u32>(), + //~^ transmute_ptr_to_ref + _ => &*y.cast::<&'b u32>(), + //~^ transmute_ptr_to_ref + } } } #[clippy::msrv = "1.38"] unsafe fn _meets_msrv<'a, 'b, 'c>(x: *const &'a u32) -> &'c &'b u32 { - let a = 0u32; - let a = &a as *const u32; - let _: &u32 = &*a; - //~^ transmute_ptr_to_ref - let _: &u32 = &*a.cast::(); - //~^ transmute_ptr_to_ref - match 0 { - 0 => &*x.cast::<&u32>(), + unsafe { + let a = 0u32; + let a = &a as *const u32; + let _: &u32 = &*a; //~^ transmute_ptr_to_ref - _ => &*x.cast::<&'b u32>(), + let _: &u32 = &*a.cast::(); //~^ transmute_ptr_to_ref + match 0 { + 0 => &*x.cast::<&u32>(), + //~^ transmute_ptr_to_ref + _ => &*x.cast::<&'b u32>(), + //~^ transmute_ptr_to_ref + } } } #[clippy::msrv = "1.37"] unsafe fn _under_msrv<'a, 'b, 'c>(x: *const &'a u32) -> &'c &'b u32 { - let a = 0u32; - let a = &a as *const u32; - let _: &u32 = &*a; - //~^ transmute_ptr_to_ref - let _: &u32 = &*(a as *const u32); - //~^ transmute_ptr_to_ref - match 0 { - 0 => &*(x as *const () as *const &u32), + unsafe { + let a = 0u32; + let a = &a as *const u32; + let _: &u32 = &*a; //~^ transmute_ptr_to_ref - _ => &*(x as *const () as *const &'b u32), + let _: &u32 = &*(a as *const u32); //~^ transmute_ptr_to_ref + match 0 { + 0 => &*(x as *const () as *const &u32), + //~^ transmute_ptr_to_ref + _ => &*(x as *const () as *const &'b u32), + //~^ transmute_ptr_to_ref + } } } diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ref.rs b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.rs index cbe64bf1ea6bc..48e2f527b554c 100644 --- a/src/tools/clippy/tests/ui/transmute_ptr_to_ref.rs +++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.rs @@ -6,33 +6,35 @@ )] unsafe fn _ptr_to_ref(p: *const T, m: *mut T, o: *const U, om: *mut U) { - let _: &T = std::mem::transmute(p); - //~^ transmute_ptr_to_ref - let _: &T = &*p; + unsafe { + let _: &T = std::mem::transmute(p); + //~^ transmute_ptr_to_ref + let _: &T = &*p; - let _: &mut T = std::mem::transmute(m); - //~^ transmute_ptr_to_ref - let _: &mut T = &mut *m; + let _: &mut T = std::mem::transmute(m); + //~^ transmute_ptr_to_ref + let _: &mut T = &mut *m; - let _: &T = std::mem::transmute(m); - //~^ transmute_ptr_to_ref - let _: &T = &*m; + let _: &T = std::mem::transmute(m); + //~^ transmute_ptr_to_ref + let _: &T = &*m; - let _: &mut T = std::mem::transmute(p as *mut T); - //~^ transmute_ptr_to_ref - let _ = &mut *(p as *mut T); + let _: &mut T = std::mem::transmute(p as *mut T); + //~^ transmute_ptr_to_ref + let _ = &mut *(p as *mut T); - let _: &T = std::mem::transmute(o); - //~^ transmute_ptr_to_ref - let _: &T = &*(o as *const T); + let _: &T = std::mem::transmute(o); + //~^ transmute_ptr_to_ref + let _: &T = &*(o as *const T); - let _: &mut T = std::mem::transmute(om); - //~^ transmute_ptr_to_ref - let _: &mut T = &mut *(om as *mut T); + let _: &mut T = std::mem::transmute(om); + //~^ transmute_ptr_to_ref + let _: &mut T = &mut *(om as *mut T); - let _: &T = std::mem::transmute(om); - //~^ transmute_ptr_to_ref - let _: &T = &*(om as *const T); + let _: &T = std::mem::transmute(om); + //~^ transmute_ptr_to_ref + let _: &T = &*(om as *const T); + } } fn _issue1231() { @@ -54,47 +56,53 @@ fn _issue1231() { } unsafe fn _issue8924<'a, 'b, 'c>(x: *const &'a u32, y: *const &'b u32) -> &'c &'b u32 { - match 0 { - 0 => std::mem::transmute(x), - //~^ transmute_ptr_to_ref - 1 => std::mem::transmute(y), - //~^ transmute_ptr_to_ref - 2 => std::mem::transmute::<_, &&'b u32>(x), - //~^ transmute_ptr_to_ref - _ => std::mem::transmute::<_, &&'b u32>(y), - //~^ transmute_ptr_to_ref + unsafe { + match 0 { + 0 => std::mem::transmute(x), + //~^ transmute_ptr_to_ref + 1 => std::mem::transmute(y), + //~^ transmute_ptr_to_ref + 2 => std::mem::transmute::<_, &&'b u32>(x), + //~^ transmute_ptr_to_ref + _ => std::mem::transmute::<_, &&'b u32>(y), + //~^ transmute_ptr_to_ref + } } } #[clippy::msrv = "1.38"] unsafe fn _meets_msrv<'a, 'b, 'c>(x: *const &'a u32) -> &'c &'b u32 { - let a = 0u32; - let a = &a as *const u32; - let _: &u32 = std::mem::transmute(a); - //~^ transmute_ptr_to_ref - let _: &u32 = std::mem::transmute::<_, &u32>(a); - //~^ transmute_ptr_to_ref - match 0 { - 0 => std::mem::transmute(x), + unsafe { + let a = 0u32; + let a = &a as *const u32; + let _: &u32 = std::mem::transmute(a); //~^ transmute_ptr_to_ref - _ => std::mem::transmute::<_, &&'b u32>(x), + let _: &u32 = std::mem::transmute::<_, &u32>(a); //~^ transmute_ptr_to_ref + match 0 { + 0 => std::mem::transmute(x), + //~^ transmute_ptr_to_ref + _ => std::mem::transmute::<_, &&'b u32>(x), + //~^ transmute_ptr_to_ref + } } } #[clippy::msrv = "1.37"] unsafe fn _under_msrv<'a, 'b, 'c>(x: *const &'a u32) -> &'c &'b u32 { - let a = 0u32; - let a = &a as *const u32; - let _: &u32 = std::mem::transmute(a); - //~^ transmute_ptr_to_ref - let _: &u32 = std::mem::transmute::<_, &u32>(a); - //~^ transmute_ptr_to_ref - match 0 { - 0 => std::mem::transmute(x), + unsafe { + let a = 0u32; + let a = &a as *const u32; + let _: &u32 = std::mem::transmute(a); //~^ transmute_ptr_to_ref - _ => std::mem::transmute::<_, &&'b u32>(x), + let _: &u32 = std::mem::transmute::<_, &u32>(a); //~^ transmute_ptr_to_ref + match 0 { + 0 => std::mem::transmute(x), + //~^ transmute_ptr_to_ref + _ => std::mem::transmute::<_, &&'b u32>(x), + //~^ transmute_ptr_to_ref + } } } diff --git a/src/tools/clippy/tests/ui/transmute_ptr_to_ref.stderr b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.stderr index 7fad9b4065a56..7685c345c8619 100644 --- a/src/tools/clippy/tests/ui/transmute_ptr_to_ref.stderr +++ b/src/tools/clippy/tests/ui/transmute_ptr_to_ref.stderr @@ -1,137 +1,137 @@ error: transmute from a pointer type (`*const T`) to a reference type (`&T`) - --> tests/ui/transmute_ptr_to_ref.rs:9:17 + --> tests/ui/transmute_ptr_to_ref.rs:10:21 | -LL | let _: &T = std::mem::transmute(p); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*p` +LL | let _: &T = std::mem::transmute(p); + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*p` | = note: `-D clippy::transmute-ptr-to-ref` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::transmute_ptr_to_ref)]` error: transmute from a pointer type (`*mut T`) to a reference type (`&mut T`) - --> tests/ui/transmute_ptr_to_ref.rs:13:21 + --> tests/ui/transmute_ptr_to_ref.rs:14:25 | -LL | let _: &mut T = std::mem::transmute(m); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut *m` +LL | let _: &mut T = std::mem::transmute(m); + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut *m` error: transmute from a pointer type (`*mut T`) to a reference type (`&T`) - --> tests/ui/transmute_ptr_to_ref.rs:17:17 + --> tests/ui/transmute_ptr_to_ref.rs:18:21 | -LL | let _: &T = std::mem::transmute(m); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*m` +LL | let _: &T = std::mem::transmute(m); + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*m` error: transmute from a pointer type (`*mut T`) to a reference type (`&mut T`) - --> tests/ui/transmute_ptr_to_ref.rs:21:21 + --> tests/ui/transmute_ptr_to_ref.rs:22:25 | -LL | let _: &mut T = std::mem::transmute(p as *mut T); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut *(p as *mut T)` +LL | let _: &mut T = std::mem::transmute(p as *mut T); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut *(p as *mut T)` error: transmute from a pointer type (`*const U`) to a reference type (`&T`) - --> tests/ui/transmute_ptr_to_ref.rs:25:17 + --> tests/ui/transmute_ptr_to_ref.rs:26:21 | -LL | let _: &T = std::mem::transmute(o); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(o as *const T)` +LL | let _: &T = std::mem::transmute(o); + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(o as *const T)` error: transmute from a pointer type (`*mut U`) to a reference type (`&mut T`) - --> tests/ui/transmute_ptr_to_ref.rs:29:21 + --> tests/ui/transmute_ptr_to_ref.rs:30:25 | -LL | let _: &mut T = std::mem::transmute(om); - | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut *(om as *mut T)` +LL | let _: &mut T = std::mem::transmute(om); + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut *(om as *mut T)` error: transmute from a pointer type (`*mut U`) to a reference type (`&T`) - --> tests/ui/transmute_ptr_to_ref.rs:33:17 + --> tests/ui/transmute_ptr_to_ref.rs:34:21 | -LL | let _: &T = std::mem::transmute(om); - | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(om as *const T)` +LL | let _: &T = std::mem::transmute(om); + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(om as *const T)` error: transmute from a pointer type (`*const i32`) to a reference type (`&_issue1231::Foo<'_, u8>`) - --> tests/ui/transmute_ptr_to_ref.rs:44:32 + --> tests/ui/transmute_ptr_to_ref.rs:46:32 | LL | let _: &Foo = unsafe { std::mem::transmute::<_, &Foo<_>>(raw) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*raw.cast::>()` error: transmute from a pointer type (`*const i32`) to a reference type (`&_issue1231::Foo<'_, &u8>`) - --> tests/ui/transmute_ptr_to_ref.rs:47:33 + --> tests/ui/transmute_ptr_to_ref.rs:49:33 | LL | let _: &Foo<&u8> = unsafe { std::mem::transmute::<_, &Foo<&_>>(raw) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*raw.cast::>()` error: transmute from a pointer type (`*const i32`) to a reference type (`&u8`) - --> tests/ui/transmute_ptr_to_ref.rs:52:14 + --> tests/ui/transmute_ptr_to_ref.rs:54:14 | LL | unsafe { std::mem::transmute::<_, Bar>(raw) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(raw as *const u8)` error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:58:14 + --> tests/ui/transmute_ptr_to_ref.rs:61:18 | -LL | 0 => std::mem::transmute(x), - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&u32>()` +LL | 0 => std::mem::transmute(x), + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&u32>()` error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:60:14 + --> tests/ui/transmute_ptr_to_ref.rs:63:18 | -LL | 1 => std::mem::transmute(y), - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*y.cast::<&u32>()` +LL | 1 => std::mem::transmute(y), + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*y.cast::<&u32>()` error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:62:14 + --> tests/ui/transmute_ptr_to_ref.rs:65:18 | -LL | 2 => std::mem::transmute::<_, &&'b u32>(x), - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&'b u32>()` +LL | 2 => std::mem::transmute::<_, &&'b u32>(x), + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&'b u32>()` error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:64:14 + --> tests/ui/transmute_ptr_to_ref.rs:67:18 | -LL | _ => std::mem::transmute::<_, &&'b u32>(y), - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*y.cast::<&'b u32>()` +LL | _ => std::mem::transmute::<_, &&'b u32>(y), + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*y.cast::<&'b u32>()` error: transmute from a pointer type (`*const u32`) to a reference type (`&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:73:19 + --> tests/ui/transmute_ptr_to_ref.rs:78:23 | -LL | let _: &u32 = std::mem::transmute(a); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*a` +LL | let _: &u32 = std::mem::transmute(a); + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*a` error: transmute from a pointer type (`*const u32`) to a reference type (`&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:75:19 + --> tests/ui/transmute_ptr_to_ref.rs:80:23 | -LL | let _: &u32 = std::mem::transmute::<_, &u32>(a); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*a.cast::()` +LL | let _: &u32 = std::mem::transmute::<_, &u32>(a); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*a.cast::()` error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:78:14 + --> tests/ui/transmute_ptr_to_ref.rs:83:18 | -LL | 0 => std::mem::transmute(x), - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&u32>()` +LL | 0 => std::mem::transmute(x), + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&u32>()` error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:80:14 + --> tests/ui/transmute_ptr_to_ref.rs:85:18 | -LL | _ => std::mem::transmute::<_, &&'b u32>(x), - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&'b u32>()` +LL | _ => std::mem::transmute::<_, &&'b u32>(x), + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*x.cast::<&'b u32>()` error: transmute from a pointer type (`*const u32`) to a reference type (`&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:89:19 + --> tests/ui/transmute_ptr_to_ref.rs:96:23 | -LL | let _: &u32 = std::mem::transmute(a); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*a` +LL | let _: &u32 = std::mem::transmute(a); + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*a` error: transmute from a pointer type (`*const u32`) to a reference type (`&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:91:19 + --> tests/ui/transmute_ptr_to_ref.rs:98:23 | -LL | let _: &u32 = std::mem::transmute::<_, &u32>(a); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(a as *const u32)` +LL | let _: &u32 = std::mem::transmute::<_, &u32>(a); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(a as *const u32)` error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:94:14 + --> tests/ui/transmute_ptr_to_ref.rs:101:18 | -LL | 0 => std::mem::transmute(x), - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(x as *const () as *const &u32)` +LL | 0 => std::mem::transmute(x), + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(x as *const () as *const &u32)` error: transmute from a pointer type (`*const &u32`) to a reference type (`&&u32`) - --> tests/ui/transmute_ptr_to_ref.rs:96:14 + --> tests/ui/transmute_ptr_to_ref.rs:103:18 | -LL | _ => std::mem::transmute::<_, &&'b u32>(x), - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(x as *const () as *const &'b u32)` +LL | _ => std::mem::transmute::<_, &&'b u32>(x), + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*(x as *const () as *const &'b u32)` error: aborting due to 22 previous errors diff --git a/src/tools/clippy/tests/ui/transmuting_null.rs b/src/tools/clippy/tests/ui/transmuting_null.rs index bcd35bbd4e72a..f3eb5060cd0d3 100644 --- a/src/tools/clippy/tests/ui/transmuting_null.rs +++ b/src/tools/clippy/tests/ui/transmuting_null.rs @@ -3,6 +3,7 @@ #![allow(clippy::zero_ptr)] #![allow(clippy::transmute_ptr_to_ref)] #![allow(clippy::eq_op, clippy::missing_transmute_annotations)] +#![allow(clippy::manual_dangling_ptr)] // Easy to lint because these only span one line. fn one_liners() { diff --git a/src/tools/clippy/tests/ui/transmuting_null.stderr b/src/tools/clippy/tests/ui/transmuting_null.stderr index 84e6e374d5253..c68e4102e405b 100644 --- a/src/tools/clippy/tests/ui/transmuting_null.stderr +++ b/src/tools/clippy/tests/ui/transmuting_null.stderr @@ -1,5 +1,5 @@ error: transmuting a known null pointer into a reference - --> tests/ui/transmuting_null.rs:10:23 + --> tests/ui/transmuting_null.rs:11:23 | LL | let _: &u64 = std::mem::transmute(0 as *const u64); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -8,13 +8,13 @@ LL | let _: &u64 = std::mem::transmute(0 as *const u64); = help: to override `-D warnings` add `#[allow(clippy::transmuting_null)]` error: transmuting a known null pointer into a reference - --> tests/ui/transmuting_null.rs:13:23 + --> tests/ui/transmuting_null.rs:14:23 | LL | let _: &u64 = std::mem::transmute(std::ptr::null::()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: transmuting a known null pointer into a reference - --> tests/ui/transmuting_null.rs:24:23 + --> tests/ui/transmuting_null.rs:25:23 | LL | let _: &u64 = std::mem::transmute(ZPTR); | ^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/type_complexity.rs b/src/tools/clippy/tests/ui/type_complexity.rs index 89c4955c9f6f0..9d145516d6107 100644 --- a/src/tools/clippy/tests/ui/type_complexity.rs +++ b/src/tools/clippy/tests/ui/type_complexity.rs @@ -1,6 +1,5 @@ -#![warn(clippy::all)] -#![allow(unused, clippy::needless_pass_by_value, clippy::vec_box, clippy::useless_vec)] #![feature(associated_type_defaults)] +#![allow(clippy::needless_pass_by_value, clippy::vec_box, clippy::useless_vec)] type Alias = Vec>>; // no warning here diff --git a/src/tools/clippy/tests/ui/type_complexity.stderr b/src/tools/clippy/tests/ui/type_complexity.stderr index 181e04d38e9a4..a7f6a074a4a4d 100644 --- a/src/tools/clippy/tests/ui/type_complexity.stderr +++ b/src/tools/clippy/tests/ui/type_complexity.stderr @@ -1,5 +1,5 @@ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:7:12 + --> tests/ui/type_complexity.rs:6:12 | LL | const CST: (u32, (u32, (u32, (u32, u32)))) = (0, (0, (0, (0, 0)))); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -8,85 +8,85 @@ LL | const CST: (u32, (u32, (u32, (u32, u32)))) = (0, (0, (0, (0, 0)))); = help: to override `-D warnings` add `#[allow(clippy::type_complexity)]` error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:10:12 + --> tests/ui/type_complexity.rs:9:12 | LL | static ST: (u32, (u32, (u32, (u32, u32)))) = (0, (0, (0, (0, 0)))); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:14:8 + --> tests/ui/type_complexity.rs:13:8 | LL | f: Vec>>, | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:18:11 + --> tests/ui/type_complexity.rs:17:11 | LL | struct Ts(Vec>>); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:22:11 + --> tests/ui/type_complexity.rs:21:11 | LL | Tuple(Vec>>), | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:24:17 + --> tests/ui/type_complexity.rs:23:17 | LL | Struct { f: Vec>> }, | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:29:14 + --> tests/ui/type_complexity.rs:28:14 | LL | const A: (u32, (u32, (u32, (u32, u32)))) = (0, (0, (0, (0, 0)))); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:32:30 + --> tests/ui/type_complexity.rs:31:30 | LL | fn impl_method(&self, p: Vec>>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:37:14 + --> tests/ui/type_complexity.rs:36:14 | LL | const A: Vec>>; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:40:14 + --> tests/ui/type_complexity.rs:39:14 | LL | type B = Vec>>; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:43:25 + --> tests/ui/type_complexity.rs:42:25 | LL | fn method(&self, p: Vec>>); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:46:29 + --> tests/ui/type_complexity.rs:45:29 | LL | fn def_method(&self, p: Vec>>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:59:15 + --> tests/ui/type_complexity.rs:58:15 | LL | fn test1() -> Vec>> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:65:14 + --> tests/ui/type_complexity.rs:64:14 | LL | fn test2(_x: Vec>>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: very complex type used. Consider factoring parts into `type` definitions - --> tests/ui/type_complexity.rs:69:13 + --> tests/ui/type_complexity.rs:68:13 | LL | let _y: Vec>> = vec![]; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/uninit_vec.rs b/src/tools/clippy/tests/ui/uninit_vec.rs index a48397137992f..eeb281322da92 100644 --- a/src/tools/clippy/tests/ui/uninit_vec.rs +++ b/src/tools/clippy/tests/ui/uninit_vec.rs @@ -15,9 +15,17 @@ union MyOwnMaybeUninit { // https://github.com/rust-lang/rust/issues/119620 unsafe fn requires_paramenv() { - let mut vec = Vec::>::with_capacity(1); + unsafe { + let mut vec = Vec::>::with_capacity(1); + //~^ uninit_vec + vec.set_len(1); + } + + let mut vec = Vec::>::with_capacity(2); //~^ uninit_vec - vec.set_len(1); + unsafe { + vec.set_len(2); + } } fn main() { diff --git a/src/tools/clippy/tests/ui/uninit_vec.stderr b/src/tools/clippy/tests/ui/uninit_vec.stderr index 7ff6140a2c3ec..1b821ef004e6f 100644 --- a/src/tools/clippy/tests/ui/uninit_vec.stderr +++ b/src/tools/clippy/tests/ui/uninit_vec.stderr @@ -1,18 +1,29 @@ error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:18:5 + --> tests/ui/uninit_vec.rs:24:5 | -LL | let mut vec = Vec::>::with_capacity(1); +LL | let mut vec = Vec::>::with_capacity(2); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -LL | -LL | vec.set_len(1); - | ^^^^^^^^^^^^^^ +... +LL | vec.set_len(2); + | ^^^^^^^^^^^^^^ | = help: initialize the buffer or wrap the content in `MaybeUninit` = note: `-D clippy::uninit-vec` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::uninit_vec)]` error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:25:5 + --> tests/ui/uninit_vec.rs:19:9 + | +LL | let mut vec = Vec::>::with_capacity(1); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | +LL | vec.set_len(1); + | ^^^^^^^^^^^^^^ + | + = help: initialize the buffer or wrap the content in `MaybeUninit` + +error: calling `set_len()` immediately after reserving a buffer creates uninitialized values + --> tests/ui/uninit_vec.rs:33:5 | LL | let mut vec: Vec = Vec::with_capacity(1000); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -23,7 +34,7 @@ LL | vec.set_len(200); = help: initialize the buffer or wrap the content in `MaybeUninit` error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:33:5 + --> tests/ui/uninit_vec.rs:41:5 | LL | vec.reserve(1000); | ^^^^^^^^^^^^^^^^^^ @@ -34,7 +45,7 @@ LL | vec.set_len(200); = help: initialize the buffer or wrap the content in `MaybeUninit` error: calling `set_len()` on empty `Vec` creates out-of-bound values - --> tests/ui/uninit_vec.rs:41:5 + --> tests/ui/uninit_vec.rs:49:5 | LL | let mut vec: Vec = Vec::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -43,7 +54,7 @@ LL | vec.set_len(200); | ^^^^^^^^^^^^^^^^ error: calling `set_len()` on empty `Vec` creates out-of-bound values - --> tests/ui/uninit_vec.rs:49:5 + --> tests/ui/uninit_vec.rs:57:5 | LL | let mut vec: Vec = Default::default(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -52,7 +63,7 @@ LL | vec.set_len(200); | ^^^^^^^^^^^^^^^^ error: calling `set_len()` on empty `Vec` creates out-of-bound values - --> tests/ui/uninit_vec.rs:56:5 + --> tests/ui/uninit_vec.rs:64:5 | LL | let mut vec: Vec = Vec::default(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -61,7 +72,7 @@ LL | vec.set_len(200); | ^^^^^^^^^^^^^^^^ error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:76:5 + --> tests/ui/uninit_vec.rs:84:5 | LL | let mut vec: Vec = Vec::with_capacity(1000); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -72,7 +83,7 @@ LL | vec.set_len(200); = help: initialize the buffer or wrap the content in `MaybeUninit` error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:87:5 + --> tests/ui/uninit_vec.rs:95:5 | LL | my_vec.vec.reserve(1000); | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -83,7 +94,7 @@ LL | my_vec.vec.set_len(200); = help: initialize the buffer or wrap the content in `MaybeUninit` error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:94:5 + --> tests/ui/uninit_vec.rs:102:5 | LL | my_vec.vec = Vec::with_capacity(1000); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -94,7 +105,7 @@ LL | my_vec.vec.set_len(200); = help: initialize the buffer or wrap the content in `MaybeUninit` error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:65:9 + --> tests/ui/uninit_vec.rs:73:9 | LL | let mut vec: Vec = Vec::with_capacity(1000); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -105,7 +116,7 @@ LL | vec.set_len(200); = help: initialize the buffer or wrap the content in `MaybeUninit` error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:70:9 + --> tests/ui/uninit_vec.rs:78:9 | LL | vec.reserve(1000); | ^^^^^^^^^^^^^^^^^^ @@ -116,7 +127,7 @@ LL | vec.set_len(200); = help: initialize the buffer or wrap the content in `MaybeUninit` error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:150:9 + --> tests/ui/uninit_vec.rs:158:9 | LL | let mut vec: Vec = Vec::with_capacity(1000); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -127,7 +138,7 @@ LL | vec.set_len(10); = help: initialize the buffer or wrap the content in `MaybeUninit` error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:178:9 + --> tests/ui/uninit_vec.rs:186:9 | LL | let mut vec: Vec> = Vec::with_capacity(1); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -138,7 +149,7 @@ LL | vec.set_len(1); = help: initialize the buffer or wrap the content in `MaybeUninit` error: calling `set_len()` immediately after reserving a buffer creates uninitialized values - --> tests/ui/uninit_vec.rs:192:9 + --> tests/ui/uninit_vec.rs:200:9 | LL | let mut vec: Vec> = Vec::with_capacity(1); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -148,5 +159,5 @@ LL | vec.set_len(1); | = help: initialize the buffer or wrap the content in `MaybeUninit` -error: aborting due to 14 previous errors +error: aborting due to 15 previous errors diff --git a/src/tools/clippy/tests/ui/unnecessary_cast.fixed b/src/tools/clippy/tests/ui/unnecessary_cast.fixed index ba167e79a308b..91ff4b9ee7713 100644 --- a/src/tools/clippy/tests/ui/unnecessary_cast.fixed +++ b/src/tools/clippy/tests/ui/unnecessary_cast.fixed @@ -266,7 +266,21 @@ mod fixable { // Issue #11968: The suggestion for this lint removes the parentheses and leave the code as // `*x.pow(2)` which tries to dereference the return value rather than `x`. fn issue_11968(x: &usize) -> usize { - { *x }.pow(2) + (*x).pow(2) + //~^ unnecessary_cast + } + + #[allow(clippy::cast_lossless)] + fn issue_14640() { + let x = 5usize; + let vec: Vec = vec![1, 2, 3, 4, 5]; + assert_eq!(vec.len(), x); + //~^ unnecessary_cast + + let _ = (5i32 as i64).abs(); + //~^ unnecessary_cast + + let _ = 5i32 as i64; //~^ unnecessary_cast } } diff --git a/src/tools/clippy/tests/ui/unnecessary_cast.rs b/src/tools/clippy/tests/ui/unnecessary_cast.rs index 0f90a8b05965a..5444a914db167 100644 --- a/src/tools/clippy/tests/ui/unnecessary_cast.rs +++ b/src/tools/clippy/tests/ui/unnecessary_cast.rs @@ -269,4 +269,18 @@ mod fixable { (*x as usize).pow(2) //~^ unnecessary_cast } + + #[allow(clippy::cast_lossless)] + fn issue_14640() { + let x = 5usize; + let vec: Vec = vec![1, 2, 3, 4, 5]; + assert_eq!(vec.len(), x as usize); + //~^ unnecessary_cast + + let _ = (5i32 as i64 as i64).abs(); + //~^ unnecessary_cast + + let _ = 5i32 as i64 as i64; + //~^ unnecessary_cast + } } diff --git a/src/tools/clippy/tests/ui/unnecessary_cast.stderr b/src/tools/clippy/tests/ui/unnecessary_cast.stderr index c83770c1a2992..3e3c5eb81c105 100644 --- a/src/tools/clippy/tests/ui/unnecessary_cast.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_cast.stderr @@ -245,7 +245,25 @@ error: casting to the same type is unnecessary (`usize` -> `usize`) --> tests/ui/unnecessary_cast.rs:269:9 | LL | (*x as usize).pow(2) - | ^^^^^^^^^^^^^ help: try: `{ *x }` + | ^^^^^^^^^^^^^ help: try: `(*x)` -error: aborting due to 41 previous errors +error: casting to the same type is unnecessary (`usize` -> `usize`) + --> tests/ui/unnecessary_cast.rs:277:31 + | +LL | assert_eq!(vec.len(), x as usize); + | ^^^^^^^^^^ help: try: `x` + +error: casting to the same type is unnecessary (`i64` -> `i64`) + --> tests/ui/unnecessary_cast.rs:280:17 + | +LL | let _ = (5i32 as i64 as i64).abs(); + | ^^^^^^^^^^^^^^^^^^^^ help: try: `(5i32 as i64)` + +error: casting to the same type is unnecessary (`i64` -> `i64`) + --> tests/ui/unnecessary_cast.rs:283:17 + | +LL | let _ = 5i32 as i64 as i64; + | ^^^^^^^^^^^^^^^^^^ help: try: `5i32 as i64` + +error: aborting due to 44 previous errors diff --git a/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs index 2bb64c3e80e34..4b1f4f76cc45e 100644 --- a/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs +++ b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs @@ -17,8 +17,10 @@ mod issue11113 { impl TearOff { unsafe fn query(&self) { - ((*(*(self.object as *mut *mut _) as *mut Vtbl)).query)() - //~^ unnecessary_cast + unsafe { + ((*(*(self.object as *mut *mut _) as *mut Vtbl)).query)() + //~^ unnecessary_cast + } } } } diff --git a/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr index 6ba1c78730667..6b26bea9de2a7 100644 --- a/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr @@ -8,10 +8,10 @@ LL | let _ = std::ptr::null() as *const u8; = help: to override `-D warnings` add `#[allow(clippy::unnecessary_cast)]` error: casting raw pointers to the same type and constness is unnecessary (`*mut issue11113::Vtbl` -> `*mut issue11113::Vtbl`) - --> tests/ui/unnecessary_cast_unfixable.rs:20:16 + --> tests/ui/unnecessary_cast_unfixable.rs:21:20 | -LL | ((*(*(self.object as *mut *mut _) as *mut Vtbl)).query)() - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `*(self.object as *mut *mut _)` +LL | ((*(*(self.object as *mut *mut _) as *mut Vtbl)).query)() + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `*(self.object as *mut *mut _)` error: aborting due to 2 previous errors diff --git a/src/tools/clippy/tests/ui/unnecessary_filter_map.rs b/src/tools/clippy/tests/ui/unnecessary_filter_map.rs index c4f1b6bc7e3d7..85582c399ce5f 100644 --- a/src/tools/clippy/tests/ui/unnecessary_filter_map.rs +++ b/src/tools/clippy/tests/ui/unnecessary_filter_map.rs @@ -1,5 +1,4 @@ -//@no-rustfix -#![allow(dead_code)] +#![allow(clippy::redundant_closure)] fn main() { let _ = (0..4).filter_map(|x| if x > 1 { Some(x) } else { None }); @@ -27,9 +26,7 @@ fn main() { let _ = (0..4).filter_map(Some); let _ = vec![Some(10), None].into_iter().filter_map(|x| Some(x)); - //~^ redundant_closure - //~| unnecessary_filter_map - //~| unnecessary_filter_map + //~^ unnecessary_filter_map } fn filter_map_none_changes_item_type() -> impl Iterator { diff --git a/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr b/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr index 6683444b72730..a879633e10f2a 100644 --- a/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_filter_map.stderr @@ -1,14 +1,14 @@ -error: this `.filter_map` can be written more simply - --> tests/ui/unnecessary_filter_map.rs:5:13 +error: this `.filter_map(..)` can be written more simply using `.filter(..)` + --> tests/ui/unnecessary_filter_map.rs:4:13 | LL | let _ = (0..4).filter_map(|x| if x > 1 { Some(x) } else { None }); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try instead: `filter` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `-D clippy::unnecessary-filter-map` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::unnecessary_filter_map)]` -error: this `.filter_map` can be written more simply - --> tests/ui/unnecessary_filter_map.rs:8:13 +error: this `.filter_map(..)` can be written more simply using `.filter(..)` + --> tests/ui/unnecessary_filter_map.rs:7:13 | LL | let _ = (0..4).filter_map(|x| { | _____________^ @@ -18,10 +18,10 @@ LL | | if x > 1 { ... | LL | | None LL | | }); - | |______^ help: try instead: `filter` + | |______^ -error: this `.filter_map` can be written more simply - --> tests/ui/unnecessary_filter_map.rs:16:13 +error: this `.filter_map(..)` can be written more simply using `.filter(..)` + --> tests/ui/unnecessary_filter_map.rs:15:13 | LL | let _ = (0..4).filter_map(|x| match x { | _____________^ @@ -29,40 +29,25 @@ LL | | LL | | 0 | 1 => None, LL | | _ => Some(x), LL | | }); - | |______^ help: try instead: `filter` + | |______^ -error: this `.filter_map` can be written more simply - --> tests/ui/unnecessary_filter_map.rs:22:13 +error: this `.filter_map(..)` can be written more simply using `.map(..)` + --> tests/ui/unnecessary_filter_map.rs:21:13 | LL | let _ = (0..4).filter_map(|x| Some(x + 1)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try instead: `map` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: redundant closure - --> tests/ui/unnecessary_filter_map.rs:29:57 +error: this call to `.filter_map(..)` is unnecessary + --> tests/ui/unnecessary_filter_map.rs:28:61 | LL | let _ = vec![Some(10), None].into_iter().filter_map(|x| Some(x)); - | ^^^^^^^^^^^ help: replace the closure with the function itself: `Some` - | - = note: `-D clippy::redundant-closure` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::redundant_closure)]` - -error: filter_map is unnecessary - --> tests/ui/unnecessary_filter_map.rs:29:61 - | -LL | let _ = vec![Some(10), None].into_iter().filter_map(|x| Some(x)); - | ^^^^ help: try removing the filter_map - -error: this `.filter_map` can be written more simply - --> tests/ui/unnecessary_filter_map.rs:29:13 - | -LL | let _ = vec![Some(10), None].into_iter().filter_map(|x| Some(x)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try instead: `map` + | ^^^^ -error: this `.filter_map` can be written more simply - --> tests/ui/unnecessary_filter_map.rs:169:14 +error: this `.filter_map(..)` can be written more simply using `.filter(..)` + --> tests/ui/unnecessary_filter_map.rs:166:14 | LL | let _x = std::iter::once(1).filter_map(|n| (n > 1).then_some(n)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try instead: `filter` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 8 previous errors +error: aborting due to 6 previous errors diff --git a/src/tools/clippy/tests/ui/unnecessary_find_map.rs b/src/tools/clippy/tests/ui/unnecessary_find_map.rs index 8c8a3799f0216..33ba7074d623b 100644 --- a/src/tools/clippy/tests/ui/unnecessary_find_map.rs +++ b/src/tools/clippy/tests/ui/unnecessary_find_map.rs @@ -1,4 +1,3 @@ -//@no-rustfix #![allow(dead_code)] fn main() { diff --git a/src/tools/clippy/tests/ui/unnecessary_find_map.stderr b/src/tools/clippy/tests/ui/unnecessary_find_map.stderr index 94e320773a6fe..3754a3d99538e 100644 --- a/src/tools/clippy/tests/ui/unnecessary_find_map.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_find_map.stderr @@ -1,14 +1,14 @@ -error: this `.find_map` can be written more simply - --> tests/ui/unnecessary_find_map.rs:5:13 +error: this `.find_map(..)` can be written more simply using `.find(..)` + --> tests/ui/unnecessary_find_map.rs:4:13 | LL | let _ = (0..4).find_map(|x| if x > 1 { Some(x) } else { None }); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try instead: `find` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `-D clippy::unnecessary-find-map` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::unnecessary_find_map)]` -error: this `.find_map` can be written more simply - --> tests/ui/unnecessary_find_map.rs:8:13 +error: this `.find_map(..)` can be written more simply using `.find(..)` + --> tests/ui/unnecessary_find_map.rs:7:13 | LL | let _ = (0..4).find_map(|x| { | _____________^ @@ -18,10 +18,10 @@ LL | | if x > 1 { ... | LL | | None LL | | }); - | |______^ help: try instead: `find` + | |______^ -error: this `.find_map` can be written more simply - --> tests/ui/unnecessary_find_map.rs:16:13 +error: this `.find_map(..)` can be written more simply using `.find(..)` + --> tests/ui/unnecessary_find_map.rs:15:13 | LL | let _ = (0..4).find_map(|x| match x { | _____________^ @@ -29,19 +29,19 @@ LL | | LL | | 0 | 1 => None, LL | | _ => Some(x), LL | | }); - | |______^ help: try instead: `find` + | |______^ -error: this `.find_map` can be written more simply - --> tests/ui/unnecessary_find_map.rs:22:13 +error: this `.find_map(..)` can be written more simply using `.map(..).next()` + --> tests/ui/unnecessary_find_map.rs:21:13 | LL | let _ = (0..4).find_map(|x| Some(x + 1)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try instead: `map(..).next()` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: this `.find_map` can be written more simply - --> tests/ui/unnecessary_find_map.rs:34:14 +error: this `.find_map(..)` can be written more simply using `.find(..)` + --> tests/ui/unnecessary_find_map.rs:33:14 | LL | let _x = std::iter::once(1).find_map(|n| (n > 1).then_some(n)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try instead: `find` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: aborting due to 5 previous errors diff --git a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed index aed2dbe1f1ce4..61f2e3745ad05 100644 --- a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed +++ b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.fixed @@ -1,4 +1,4 @@ -#![allow(unused_assignments)] +#![allow(unused_assignments, clippy::uninlined_format_args)] #![warn(clippy::unnecessary_to_owned)] #[allow(dead_code)] diff --git a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs index 12fdd150e4233..b90ca00a5fece 100644 --- a/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs +++ b/src/tools/clippy/tests/ui/unnecessary_iter_cloned.rs @@ -1,4 +1,4 @@ -#![allow(unused_assignments)] +#![allow(unused_assignments, clippy::uninlined_format_args)] #![warn(clippy::unnecessary_to_owned)] #[allow(dead_code)] diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed index 9a32908163897..409a8efbfeb95 100644 --- a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed +++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.fixed @@ -321,3 +321,7 @@ fn panicky_arithmetic_ops(x: usize, y: isize) { let _x = false.then_some(f1 + f2); //~^ unnecessary_lazy_evaluations } + +fn issue14578() { + let _: Box> = Box::new(true.then(async || 42).unwrap()); +} diff --git a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs index 2d05ef5c29175..54735023a935d 100644 --- a/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs +++ b/src/tools/clippy/tests/ui/unnecessary_lazy_eval.rs @@ -321,3 +321,7 @@ fn panicky_arithmetic_ops(x: usize, y: isize) { let _x = false.then(|| f1 + f2); //~^ unnecessary_lazy_evaluations } + +fn issue14578() { + let _: Box> = Box::new(true.then(async || 42).unwrap()); +} diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.fixed b/src/tools/clippy/tests/ui/unnecessary_operation.fixed index 05dfb72f48d2d..645b56fe95e74 100644 --- a/src/tools/clippy/tests/ui/unnecessary_operation.fixed +++ b/src/tools/clippy/tests/ui/unnecessary_operation.fixed @@ -1,9 +1,10 @@ #![allow( clippy::deref_addrof, - dead_code, - unused, clippy::no_effect, - clippy::unnecessary_struct_initialization + clippy::uninlined_format_args, + clippy::unnecessary_struct_initialization, + dead_code, + unused )] #![warn(clippy::unnecessary_operation)] diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.rs b/src/tools/clippy/tests/ui/unnecessary_operation.rs index 6ef74c3eb1c1e..97e90269c5c0c 100644 --- a/src/tools/clippy/tests/ui/unnecessary_operation.rs +++ b/src/tools/clippy/tests/ui/unnecessary_operation.rs @@ -1,9 +1,10 @@ #![allow( clippy::deref_addrof, - dead_code, - unused, clippy::no_effect, - clippy::unnecessary_struct_initialization + clippy::uninlined_format_args, + clippy::unnecessary_struct_initialization, + dead_code, + unused )] #![warn(clippy::unnecessary_operation)] diff --git a/src/tools/clippy/tests/ui/unnecessary_operation.stderr b/src/tools/clippy/tests/ui/unnecessary_operation.stderr index eb98af09e7a3d..0fda1dfde1903 100644 --- a/src/tools/clippy/tests/ui/unnecessary_operation.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_operation.stderr @@ -1,5 +1,5 @@ error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:70:5 + --> tests/ui/unnecessary_operation.rs:71:5 | LL | Tuple(get_number()); | ^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();` @@ -8,103 +8,103 @@ LL | Tuple(get_number()); = help: to override `-D warnings` add `#[allow(clippy::unnecessary_operation)]` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:72:5 + --> tests/ui/unnecessary_operation.rs:73:5 | LL | Struct { field: get_number() }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:74:5 + --> tests/ui/unnecessary_operation.rs:75:5 | LL | Struct { ..get_struct() }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_struct();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:76:5 + --> tests/ui/unnecessary_operation.rs:77:5 | LL | Enum::Tuple(get_number()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:78:5 + --> tests/ui/unnecessary_operation.rs:79:5 | LL | Enum::Struct { field: get_number() }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:80:5 + --> tests/ui/unnecessary_operation.rs:81:5 | LL | 5 + get_number(); | ^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:82:5 + --> tests/ui/unnecessary_operation.rs:83:5 | LL | *&get_number(); | ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:84:5 + --> tests/ui/unnecessary_operation.rs:85:5 | LL | &get_number(); | ^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:86:5 + --> tests/ui/unnecessary_operation.rs:87:5 | LL | (5, 6, get_number()); | ^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;6;get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:88:5 + --> tests/ui/unnecessary_operation.rs:89:5 | LL | get_number()..; | ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:90:5 + --> tests/ui/unnecessary_operation.rs:91:5 | LL | ..get_number(); | ^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:92:5 + --> tests/ui/unnecessary_operation.rs:93:5 | LL | 5..get_number(); | ^^^^^^^^^^^^^^^^ help: statement can be reduced to: `5;get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:94:5 + --> tests/ui/unnecessary_operation.rs:95:5 | LL | [42, get_number()]; | ^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `42;get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:96:5 + --> tests/ui/unnecessary_operation.rs:97:5 | LL | [42, 55][get_usize()]; | ^^^^^^^^^^^^^^^^^^^^^^ help: statement can be written as: `assert!([42, 55].len() > get_usize());` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:98:5 + --> tests/ui/unnecessary_operation.rs:99:5 | LL | (42, get_number()).1; | ^^^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `42;get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:100:5 + --> tests/ui/unnecessary_operation.rs:101:5 | LL | [get_number(); 55]; | ^^^^^^^^^^^^^^^^^^^ help: statement can be reduced to: `get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:102:5 + --> tests/ui/unnecessary_operation.rs:103:5 | LL | [42; 55][get_usize()]; | ^^^^^^^^^^^^^^^^^^^^^^ help: statement can be written as: `assert!([42; 55].len() > get_usize());` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:104:5 + --> tests/ui/unnecessary_operation.rs:105:5 | LL | / { LL | | @@ -113,7 +113,7 @@ LL | | }; | |______^ help: statement can be reduced to: `get_number();` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:108:5 + --> tests/ui/unnecessary_operation.rs:109:5 | LL | / FooString { LL | | @@ -122,7 +122,7 @@ LL | | }; | |______^ help: statement can be reduced to: `String::from("blah");` error: unnecessary operation - --> tests/ui/unnecessary_operation.rs:149:5 + --> tests/ui/unnecessary_operation.rs:150:5 | LL | [42, 55][get_usize()]; | ^^^^^^^^^^^^^^^^^^^^^^ help: statement can be written as: `assert!([42, 55].len() > get_usize());` diff --git a/src/tools/clippy/tests/ui/unnecessary_os_str_debug_formatting.rs b/src/tools/clippy/tests/ui/unnecessary_os_str_debug_formatting.rs index 12663ec9a528f..6652efd9ae1d8 100644 --- a/src/tools/clippy/tests/ui/unnecessary_os_str_debug_formatting.rs +++ b/src/tools/clippy/tests/ui/unnecessary_os_str_debug_formatting.rs @@ -1,4 +1,5 @@ #![warn(clippy::unnecessary_debug_formatting)] +#![allow(clippy::uninlined_format_args)] use std::ffi::{OsStr, OsString}; diff --git a/src/tools/clippy/tests/ui/unnecessary_os_str_debug_formatting.stderr b/src/tools/clippy/tests/ui/unnecessary_os_str_debug_formatting.stderr index 001309ab817a1..382e59b046193 100644 --- a/src/tools/clippy/tests/ui/unnecessary_os_str_debug_formatting.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_os_str_debug_formatting.stderr @@ -1,5 +1,5 @@ error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_os_str_debug_formatting.rs:14:22 + --> tests/ui/unnecessary_os_str_debug_formatting.rs:15:22 | LL | println!("{:?}", os_str); | ^^^^^^ @@ -10,7 +10,7 @@ LL | println!("{:?}", os_str); = help: to override `-D warnings` add `#[allow(clippy::unnecessary_debug_formatting)]` error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_os_str_debug_formatting.rs:15:22 + --> tests/ui/unnecessary_os_str_debug_formatting.rs:16:22 | LL | println!("{:?}", os_string); | ^^^^^^^^^ @@ -19,7 +19,7 @@ LL | println!("{:?}", os_string); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_os_str_debug_formatting.rs:17:16 + --> tests/ui/unnecessary_os_str_debug_formatting.rs:18:16 | LL | println!("{os_str:?}"); | ^^^^^^ @@ -28,7 +28,7 @@ LL | println!("{os_str:?}"); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_os_str_debug_formatting.rs:18:16 + --> tests/ui/unnecessary_os_str_debug_formatting.rs:19:16 | LL | println!("{os_string:?}"); | ^^^^^^^^^ @@ -37,7 +37,7 @@ LL | println!("{os_string:?}"); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `format!` args - --> tests/ui/unnecessary_os_str_debug_formatting.rs:20:37 + --> tests/ui/unnecessary_os_str_debug_formatting.rs:21:37 | LL | let _: String = format!("{:?}", os_str); | ^^^^^^ @@ -46,7 +46,7 @@ LL | let _: String = format!("{:?}", os_str); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `format!` args - --> tests/ui/unnecessary_os_str_debug_formatting.rs:21:37 + --> tests/ui/unnecessary_os_str_debug_formatting.rs:22:37 | LL | let _: String = format!("{:?}", os_string); | ^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/unnecessary_path_debug_formatting.rs b/src/tools/clippy/tests/ui/unnecessary_path_debug_formatting.rs index f14f6085c9a14..215e0d5d7802e 100644 --- a/src/tools/clippy/tests/ui/unnecessary_path_debug_formatting.rs +++ b/src/tools/clippy/tests/ui/unnecessary_path_debug_formatting.rs @@ -1,4 +1,5 @@ #![warn(clippy::unnecessary_debug_formatting)] +#![allow(clippy::uninlined_format_args)] use std::ffi::{OsStr, OsString}; use std::ops::Deref; diff --git a/src/tools/clippy/tests/ui/unnecessary_path_debug_formatting.stderr b/src/tools/clippy/tests/ui/unnecessary_path_debug_formatting.stderr index f12fa72c84b35..d244b9ad6716a 100644 --- a/src/tools/clippy/tests/ui/unnecessary_path_debug_formatting.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_path_debug_formatting.stderr @@ -1,5 +1,5 @@ error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_path_debug_formatting.rs:29:22 + --> tests/ui/unnecessary_path_debug_formatting.rs:30:22 | LL | println!("{:?}", os_str); | ^^^^^^ @@ -10,7 +10,7 @@ LL | println!("{:?}", os_str); = help: to override `-D warnings` add `#[allow(clippy::unnecessary_debug_formatting)]` error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_path_debug_formatting.rs:30:22 + --> tests/ui/unnecessary_path_debug_formatting.rs:31:22 | LL | println!("{:?}", os_string); | ^^^^^^^^^ @@ -19,7 +19,7 @@ LL | println!("{:?}", os_string); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_path_debug_formatting.rs:32:22 + --> tests/ui/unnecessary_path_debug_formatting.rs:33:22 | LL | println!("{:?}", path); | ^^^^ @@ -28,7 +28,7 @@ LL | println!("{:?}", path); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_path_debug_formatting.rs:33:22 + --> tests/ui/unnecessary_path_debug_formatting.rs:34:22 | LL | println!("{:?}", path_buf); | ^^^^^^^^ @@ -37,7 +37,7 @@ LL | println!("{:?}", path_buf); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_path_debug_formatting.rs:35:16 + --> tests/ui/unnecessary_path_debug_formatting.rs:36:16 | LL | println!("{path:?}"); | ^^^^ @@ -46,7 +46,7 @@ LL | println!("{path:?}"); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_path_debug_formatting.rs:36:16 + --> tests/ui/unnecessary_path_debug_formatting.rs:37:16 | LL | println!("{path_buf:?}"); | ^^^^^^^^ @@ -55,7 +55,7 @@ LL | println!("{path_buf:?}"); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `format!` args - --> tests/ui/unnecessary_path_debug_formatting.rs:38:37 + --> tests/ui/unnecessary_path_debug_formatting.rs:39:37 | LL | let _: String = format!("{:?}", path); | ^^^^ @@ -64,7 +64,7 @@ LL | let _: String = format!("{:?}", path); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `format!` args - --> tests/ui/unnecessary_path_debug_formatting.rs:39:37 + --> tests/ui/unnecessary_path_debug_formatting.rs:40:37 | LL | let _: String = format!("{:?}", path_buf); | ^^^^^^^^ @@ -73,7 +73,7 @@ LL | let _: String = format!("{:?}", path_buf); = note: switching to `Display` formatting will change how the value is shown; escaped characters will no longer be escaped and surrounding quotes will be removed error: unnecessary `Debug` formatting in `println!` args - --> tests/ui/unnecessary_path_debug_formatting.rs:42:22 + --> tests/ui/unnecessary_path_debug_formatting.rs:43:22 | LL | println!("{:?}", &*deref_path); | ^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed b/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed index 5410033dbd8f4..b064a8b8f46fb 100644 --- a/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed +++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed @@ -1,10 +1,11 @@ #![allow( + clippy::manual_async_fn, clippy::needless_borrow, clippy::needless_borrows_for_generic_args, - clippy::ptr_arg, - clippy::manual_async_fn, clippy::needless_lifetimes, - clippy::owned_cow + clippy::owned_cow, + clippy::ptr_arg, + clippy::uninlined_format_args )] #![warn(clippy::unnecessary_to_owned, clippy::redundant_clone)] diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.rs b/src/tools/clippy/tests/ui/unnecessary_to_owned.rs index 0619dd4ddec09..7954a4ad4ce77 100644 --- a/src/tools/clippy/tests/ui/unnecessary_to_owned.rs +++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.rs @@ -1,10 +1,11 @@ #![allow( + clippy::manual_async_fn, clippy::needless_borrow, clippy::needless_borrows_for_generic_args, - clippy::ptr_arg, - clippy::manual_async_fn, clippy::needless_lifetimes, - clippy::owned_cow + clippy::owned_cow, + clippy::ptr_arg, + clippy::uninlined_format_args )] #![warn(clippy::unnecessary_to_owned, clippy::redundant_clone)] diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr b/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr index 8926db34da8c8..6c52be8393010 100644 --- a/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.stderr @@ -1,11 +1,11 @@ error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:217:64 + --> tests/ui/unnecessary_to_owned.rs:218:64 | LL | require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned()); | ^^^^^^^^^^^ help: remove this | note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:217:20 + --> tests/ui/unnecessary_to_owned.rs:218:20 | LL | require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -13,55 +13,55 @@ LL | require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned()) = help: to override `-D warnings` add `#[allow(clippy::redundant_clone)]` error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:219:40 + --> tests/ui/unnecessary_to_owned.rs:220:40 | LL | require_os_str(&OsString::from("x").to_os_string()); | ^^^^^^^^^^^^^^^ help: remove this | note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:219:21 + --> tests/ui/unnecessary_to_owned.rs:220:21 | LL | require_os_str(&OsString::from("x").to_os_string()); | ^^^^^^^^^^^^^^^^^^^ error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:221:48 + --> tests/ui/unnecessary_to_owned.rs:222:48 | LL | require_path(&std::path::PathBuf::from("x").to_path_buf()); | ^^^^^^^^^^^^^^ help: remove this | note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:221:19 + --> tests/ui/unnecessary_to_owned.rs:222:19 | LL | require_path(&std::path::PathBuf::from("x").to_path_buf()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:223:35 + --> tests/ui/unnecessary_to_owned.rs:224:35 | LL | require_str(&String::from("x").to_string()); | ^^^^^^^^^^^^ help: remove this | note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:223:18 + --> tests/ui/unnecessary_to_owned.rs:224:18 | LL | require_str(&String::from("x").to_string()); | ^^^^^^^^^^^^^^^^^ error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:225:39 + --> tests/ui/unnecessary_to_owned.rs:226:39 | LL | require_slice(&[String::from("x")].to_owned()); | ^^^^^^^^^^^ help: remove this | note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:225:20 + --> tests/ui/unnecessary_to_owned.rs:226:20 | LL | require_slice(&[String::from("x")].to_owned()); | ^^^^^^^^^^^^^^^^^^^ error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:65:36 + --> tests/ui/unnecessary_to_owned.rs:66:36 | LL | require_c_str(&Cow::from(c_str).into_owned()); | ^^^^^^^^^^^^^ help: remove this @@ -70,391 +70,391 @@ LL | require_c_str(&Cow::from(c_str).into_owned()); = help: to override `-D warnings` add `#[allow(clippy::unnecessary_to_owned)]` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:67:19 + --> tests/ui/unnecessary_to_owned.rs:68:19 | LL | require_c_str(&c_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_os_string` - --> tests/ui/unnecessary_to_owned.rs:70:20 + --> tests/ui/unnecessary_to_owned.rs:71:20 | LL | require_os_str(&os_str.to_os_string()); | ^^^^^^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:72:38 + --> tests/ui/unnecessary_to_owned.rs:73:38 | LL | require_os_str(&Cow::from(os_str).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:74:20 + --> tests/ui/unnecessary_to_owned.rs:75:20 | LL | require_os_str(&os_str.to_owned()); | ^^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_path_buf` - --> tests/ui/unnecessary_to_owned.rs:77:18 + --> tests/ui/unnecessary_to_owned.rs:78:18 | LL | require_path(&path.to_path_buf()); | ^^^^^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:79:34 + --> tests/ui/unnecessary_to_owned.rs:80:34 | LL | require_path(&Cow::from(path).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:81:18 + --> tests/ui/unnecessary_to_owned.rs:82:18 | LL | require_path(&path.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:84:17 + --> tests/ui/unnecessary_to_owned.rs:85:17 | LL | require_str(&s.to_string()); | ^^^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:86:30 + --> tests/ui/unnecessary_to_owned.rs:87:30 | LL | require_str(&Cow::from(s).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:88:17 + --> tests/ui/unnecessary_to_owned.rs:89:17 | LL | require_str(&s.to_owned()); | ^^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:90:17 + --> tests/ui/unnecessary_to_owned.rs:91:17 | LL | require_str(&x_ref.to_string()); | ^^^^^^^^^^^^^^^^^^ help: use: `x_ref.as_ref()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:93:19 + --> tests/ui/unnecessary_to_owned.rs:94:19 | LL | require_slice(&slice.to_vec()); | ^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:95:36 + --> tests/ui/unnecessary_to_owned.rs:96:36 | LL | require_slice(&Cow::from(slice).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:97:19 + --> tests/ui/unnecessary_to_owned.rs:98:19 | LL | require_slice(&array.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `array.as_ref()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:99:19 + --> tests/ui/unnecessary_to_owned.rs:100:19 | LL | require_slice(&array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref.as_ref()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:101:19 + --> tests/ui/unnecessary_to_owned.rs:102:19 | LL | require_slice(&slice.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:105:42 + --> tests/ui/unnecessary_to_owned.rs:106:42 | LL | require_x(&Cow::::Owned(x.clone()).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:109:25 + --> tests/ui/unnecessary_to_owned.rs:110:25 | LL | require_deref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:111:26 + --> tests/ui/unnecessary_to_owned.rs:112:26 | LL | require_deref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:113:24 + --> tests/ui/unnecessary_to_owned.rs:114:24 | LL | require_deref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:115:23 + --> tests/ui/unnecessary_to_owned.rs:116:23 | LL | require_deref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:117:25 + --> tests/ui/unnecessary_to_owned.rs:118:25 | LL | require_deref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:120:30 + --> tests/ui/unnecessary_to_owned.rs:121:30 | LL | require_impl_deref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:122:31 + --> tests/ui/unnecessary_to_owned.rs:123:31 | LL | require_impl_deref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:124:29 + --> tests/ui/unnecessary_to_owned.rs:125:29 | LL | require_impl_deref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:126:28 + --> tests/ui/unnecessary_to_owned.rs:127:28 | LL | require_impl_deref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:128:30 + --> tests/ui/unnecessary_to_owned.rs:129:30 | LL | require_impl_deref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:131:29 + --> tests/ui/unnecessary_to_owned.rs:132:29 | LL | require_deref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:131:43 + --> tests/ui/unnecessary_to_owned.rs:132:43 | LL | require_deref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:134:29 + --> tests/ui/unnecessary_to_owned.rs:135:29 | LL | require_deref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:134:47 + --> tests/ui/unnecessary_to_owned.rs:135:47 | LL | require_deref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:138:26 + --> tests/ui/unnecessary_to_owned.rs:139:26 | LL | require_as_ref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:140:27 + --> tests/ui/unnecessary_to_owned.rs:141:27 | LL | require_as_ref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:142:25 + --> tests/ui/unnecessary_to_owned.rs:143:25 | LL | require_as_ref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:144:24 + --> tests/ui/unnecessary_to_owned.rs:145:24 | LL | require_as_ref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:146:24 + --> tests/ui/unnecessary_to_owned.rs:147:24 | LL | require_as_ref_str(x.to_owned()); | ^^^^^^^^^^^^ help: use: `&x` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:148:26 + --> tests/ui/unnecessary_to_owned.rs:149:26 | LL | require_as_ref_slice(array.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:150:26 + --> tests/ui/unnecessary_to_owned.rs:151:26 | LL | require_as_ref_slice(array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:152:26 + --> tests/ui/unnecessary_to_owned.rs:153:26 | LL | require_as_ref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:155:31 + --> tests/ui/unnecessary_to_owned.rs:156:31 | LL | require_impl_as_ref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:157:32 + --> tests/ui/unnecessary_to_owned.rs:158:32 | LL | require_impl_as_ref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:159:30 + --> tests/ui/unnecessary_to_owned.rs:160:30 | LL | require_impl_as_ref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:161:29 + --> tests/ui/unnecessary_to_owned.rs:162:29 | LL | require_impl_as_ref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:163:29 + --> tests/ui/unnecessary_to_owned.rs:164:29 | LL | require_impl_as_ref_str(x.to_owned()); | ^^^^^^^^^^^^ help: use: `&x` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:165:31 + --> tests/ui/unnecessary_to_owned.rs:166:31 | LL | require_impl_as_ref_slice(array.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:167:31 + --> tests/ui/unnecessary_to_owned.rs:168:31 | LL | require_impl_as_ref_slice(array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:169:31 + --> tests/ui/unnecessary_to_owned.rs:170:31 | LL | require_impl_as_ref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:172:30 + --> tests/ui/unnecessary_to_owned.rs:173:30 | LL | require_as_ref_str_slice(s.to_owned(), array.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:172:44 + --> tests/ui/unnecessary_to_owned.rs:173:44 | LL | require_as_ref_str_slice(s.to_owned(), array.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:175:30 + --> tests/ui/unnecessary_to_owned.rs:176:30 | LL | require_as_ref_str_slice(s.to_owned(), array_ref.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:175:44 + --> tests/ui/unnecessary_to_owned.rs:176:44 | LL | require_as_ref_str_slice(s.to_owned(), array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:178:30 + --> tests/ui/unnecessary_to_owned.rs:179:30 | LL | require_as_ref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:178:44 + --> tests/ui/unnecessary_to_owned.rs:179:44 | LL | require_as_ref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:181:30 + --> tests/ui/unnecessary_to_owned.rs:182:30 | LL | require_as_ref_slice_str(array.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:181:48 + --> tests/ui/unnecessary_to_owned.rs:182:48 | LL | require_as_ref_slice_str(array.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:184:30 + --> tests/ui/unnecessary_to_owned.rs:185:30 | LL | require_as_ref_slice_str(array_ref.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:184:52 + --> tests/ui/unnecessary_to_owned.rs:185:52 | LL | require_as_ref_slice_str(array_ref.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:187:30 + --> tests/ui/unnecessary_to_owned.rs:188:30 | LL | require_as_ref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:187:48 + --> tests/ui/unnecessary_to_owned.rs:188:48 | LL | require_as_ref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:191:20 + --> tests/ui/unnecessary_to_owned.rs:192:20 | LL | let _ = x.join(&x_ref.to_string()); | ^^^^^^^^^^^^^^^^^^ help: use: `x_ref` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:194:13 + --> tests/ui/unnecessary_to_owned.rs:195:13 | LL | let _ = slice.to_vec().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:196:13 + --> tests/ui/unnecessary_to_owned.rs:197:13 | LL | let _ = slice.to_owned().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:199:13 + --> tests/ui/unnecessary_to_owned.rs:200:13 | LL | let _ = IntoIterator::into_iter(slice.to_vec()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:201:13 + --> tests/ui/unnecessary_to_owned.rs:202:13 | LL | let _ = IntoIterator::into_iter(slice.to_owned()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: allocating a new `String` only to create a temporary `&str` from it - --> tests/ui/unnecessary_to_owned.rs:229:26 + --> tests/ui/unnecessary_to_owned.rs:230:26 | LL | let _ref_str: &str = &String::from_utf8(slice.to_vec()).expect("not UTF-8"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -466,7 +466,7 @@ LL + let _ref_str: &str = core::str::from_utf8(&slice).expect("not UTF-8"); | error: allocating a new `String` only to create a temporary `&str` from it - --> tests/ui/unnecessary_to_owned.rs:231:26 + --> tests/ui/unnecessary_to_owned.rs:232:26 | LL | let _ref_str: &str = &String::from_utf8(b"foo".to_vec()).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -478,7 +478,7 @@ LL + let _ref_str: &str = core::str::from_utf8(b"foo").unwrap(); | error: allocating a new `String` only to create a temporary `&str` from it - --> tests/ui/unnecessary_to_owned.rs:233:26 + --> tests/ui/unnecessary_to_owned.rs:234:26 | LL | let _ref_str: &str = &String::from_utf8(b"foo".as_slice().to_owned()).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -490,7 +490,7 @@ LL + let _ref_str: &str = core::str::from_utf8(b"foo".as_slice()).unwrap(); | error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:291:14 + --> tests/ui/unnecessary_to_owned.rs:292:14 | LL | for t in file_types.to_vec() { | ^^^^^^^^^^^^^^^^^^^ @@ -503,49 +503,49 @@ LL ~ let path = match get_file_path(t) { | error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:357:24 + --> tests/ui/unnecessary_to_owned.rs:358:24 | LL | Box::new(build(y.to_string())) | ^^^^^^^^^^^^^ help: use: `y` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:467:12 + --> tests/ui/unnecessary_to_owned.rs:468:12 | LL | id("abc".to_string()) | ^^^^^^^^^^^^^^^^^ help: use: `"abc"` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:611:37 + --> tests/ui/unnecessary_to_owned.rs:612:37 | LL | IntoFuture::into_future(foo([].to_vec(), &0)); | ^^^^^^^^^^^ help: use: `[]` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:622:18 + --> tests/ui/unnecessary_to_owned.rs:623:18 | LL | s.remove(&a.to_vec()); | ^^^^^^^^^^^ help: replace it with: `a` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:627:14 + --> tests/ui/unnecessary_to_owned.rs:628:14 | LL | s.remove(&"b".to_owned()); | ^^^^^^^^^^^^^^^ help: replace it with: `"b"` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:629:14 + --> tests/ui/unnecessary_to_owned.rs:630:14 | LL | s.remove(&"b".to_string()); | ^^^^^^^^^^^^^^^^ help: replace it with: `"b"` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:635:14 + --> tests/ui/unnecessary_to_owned.rs:636:14 | LL | s.remove(&["b"].to_vec()); | ^^^^^^^^^^^^^^^ help: replace it with: `["b"].as_slice()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:637:14 + --> tests/ui/unnecessary_to_owned.rs:638:14 | LL | s.remove(&(&["b"]).to_vec()); | ^^^^^^^^^^^^^^^^^^ help: replace it with: `(&["b"]).as_slice()` diff --git a/src/tools/clippy/tests/ui/unnested_or_patterns.fixed b/src/tools/clippy/tests/ui/unnested_or_patterns.fixed index 791b2fa131f23..2081772d06b36 100644 --- a/src/tools/clippy/tests/ui/unnested_or_patterns.fixed +++ b/src/tools/clippy/tests/ui/unnested_or_patterns.fixed @@ -64,3 +64,16 @@ fn msrv_1_53() { if let [1 | 53] = [0] {} //~^ unnested_or_patterns } + +mod issue9952 { + fn or_in_local() { + let (0 | 1 | _) = 0; + //~^ unnested_or_patterns + + if let (0 | 1 | _) = 0 {} + //~^ unnested_or_patterns + } + + fn or_in_param((x | x | x): i32) {} + //~^ unnested_or_patterns +} diff --git a/src/tools/clippy/tests/ui/unnested_or_patterns.rs b/src/tools/clippy/tests/ui/unnested_or_patterns.rs index e7e7c7cd2e494..6bf8fce36616c 100644 --- a/src/tools/clippy/tests/ui/unnested_or_patterns.rs +++ b/src/tools/clippy/tests/ui/unnested_or_patterns.rs @@ -64,3 +64,16 @@ fn msrv_1_53() { if let [1] | [53] = [0] {} //~^ unnested_or_patterns } + +mod issue9952 { + fn or_in_local() { + let (0 | (1 | _)) = 0; + //~^ unnested_or_patterns + + if let (0 | (1 | _)) = 0 {} + //~^ unnested_or_patterns + } + + fn or_in_param((x | (x | x)): i32) {} + //~^ unnested_or_patterns +} diff --git a/src/tools/clippy/tests/ui/unnested_or_patterns.stderr b/src/tools/clippy/tests/ui/unnested_or_patterns.stderr index ec5eb983c5a01..c805dc992b1c2 100644 --- a/src/tools/clippy/tests/ui/unnested_or_patterns.stderr +++ b/src/tools/clippy/tests/ui/unnested_or_patterns.stderr @@ -204,5 +204,41 @@ LL - if let [1] | [53] = [0] {} LL + if let [1 | 53] = [0] {} | -error: aborting due to 17 previous errors +error: unnested or-patterns + --> tests/ui/unnested_or_patterns.rs:70:13 + | +LL | let (0 | (1 | _)) = 0; + | ^^^^^^^^^^^^^ + | +help: nest the patterns + | +LL - let (0 | (1 | _)) = 0; +LL + let (0 | 1 | _) = 0; + | + +error: unnested or-patterns + --> tests/ui/unnested_or_patterns.rs:73:16 + | +LL | if let (0 | (1 | _)) = 0 {} + | ^^^^^^^^^^^^^ + | +help: nest the patterns + | +LL - if let (0 | (1 | _)) = 0 {} +LL + if let (0 | 1 | _) = 0 {} + | + +error: unnested or-patterns + --> tests/ui/unnested_or_patterns.rs:77:20 + | +LL | fn or_in_param((x | (x | x)): i32) {} + | ^^^^^^^^^^^^^ + | +help: nest the patterns + | +LL - fn or_in_param((x | (x | x)): i32) {} +LL + fn or_in_param((x | x | x): i32) {} + | + +error: aborting due to 20 previous errors diff --git a/src/tools/clippy/tests/ui/unused_unit.edition2021.fixed b/src/tools/clippy/tests/ui/unused_unit.edition2021.fixed new file mode 100644 index 0000000000000..93dd58b8e9d7b --- /dev/null +++ b/src/tools/clippy/tests/ui/unused_unit.edition2021.fixed @@ -0,0 +1,146 @@ +//@revisions: edition2021 edition2024 +//@[edition2021] edition:2021 +//@[edition2024] edition:2024 + +// The output for humans should just highlight the whole span without showing +// the suggested replacement, but we also want to test that suggested +// replacement only removes one set of parentheses, rather than naïvely +// stripping away any starting or ending parenthesis characters—hence this +// test of the JSON error format. + +#![feature(custom_inner_attributes)] +#![feature(closure_lifetime_binder)] +#![rustfmt::skip] + +#![deny(clippy::unused_unit)] +#![allow(dead_code)] +#![allow(clippy::from_over_into)] + +struct Unitter; +impl Unitter { + #[allow(clippy::no_effect)] + pub fn get_unit(&self, f: F, _g: G) + //~^ unused_unit + //~| unused_unit + where G: Fn() { + //~^ unused_unit + let _y: &dyn Fn() = &f; + //~^ unused_unit + (); // this should not lint, as it's not in return type position + } +} + +impl Into<()> for Unitter { + #[rustfmt::skip] + fn into(self) { + //~^ unused_unit + + //~^ unused_unit + } +} + +trait Trait { + fn redundant(&self, _f: F, _g: G, _h: H) + //~^ unused_unit + where + G: FnMut(), + //~^ unused_unit + H: Fn(); + //~^ unused_unit +} + +impl Trait for Unitter { + fn redundant(&self, _f: F, _g: G, _h: H) + //~^ unused_unit + where + G: FnMut(), + //~^ unused_unit + H: Fn() {} + //~^ unused_unit +} + +fn return_unit() { } +//~^ unused_unit +//~| unused_unit + +#[allow(clippy::needless_return)] +#[allow(clippy::never_loop)] +#[allow(clippy::unit_cmp)] +fn main() { + let u = Unitter; + assert_eq!(u.get_unit(|| {}, return_unit), u.into()); + return_unit(); + loop { + break; + //~^ unused_unit + } + return; + //~^ unused_unit +} + +// https://github.com/rust-lang/rust-clippy/issues/4076 +fn foo() { + macro_rules! foo { + (recv($r:expr) -> $res:pat => $body:expr) => { + $body + } + } + + foo! { + recv(rx) -> _x => () + } +} + +#[rustfmt::skip] +fn test(){} +//~^ unused_unit + +#[rustfmt::skip] +fn test2(){} +//~^ unused_unit + +#[rustfmt::skip] +fn test3(){} +//~^ unused_unit + +fn macro_expr() { + macro_rules! e { + () => (()); + } + e!() +} + +mod issue9748 { + fn main() { + let _ = for<'a> |_: &'a u32| -> () {}; + } +} + +mod issue9949 { + fn main() { + #[doc = "documentation"] + () + } +} + +mod issue14577 { + trait Unit {} + impl Unit for () {} + + fn run(f: impl FnOnce() -> R) { + f(); + } + + #[allow(dependency_on_unit_never_type_fallback)] + fn bar() { + run(|| { todo!() }); + //~[edition2021]^ unused_unit + } + + struct UnitStruct; + impl UnitStruct { + fn apply Fn(&'c mut Self)>(&mut self, f: F) { + todo!() + } + } +} \ No newline at end of file diff --git a/src/tools/clippy/tests/ui/unused_unit.edition2021.stderr b/src/tools/clippy/tests/ui/unused_unit.edition2021.stderr new file mode 100644 index 0000000000000..13cc20d4d7adc --- /dev/null +++ b/src/tools/clippy/tests/ui/unused_unit.edition2021.stderr @@ -0,0 +1,128 @@ +error: unneeded unit expression + --> tests/ui/unused_unit.rs:37:9 + | +LL | () + | ^^ help: remove the final `()` + | +note: the lint level is defined here + --> tests/ui/unused_unit.rs:15:9 + | +LL | #![deny(clippy::unused_unit)] + | ^^^^^^^^^^^^^^^^^^^ + +error: unneeded unit expression + --> tests/ui/unused_unit.rs:62:26 + | +LL | fn return_unit() -> () { () } + | ^^ help: remove the final `()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:22:28 + | +LL | pub fn get_unit (), G>(&self, f: F, _g: G) -> () + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:25:18 + | +LL | where G: Fn() -> () { + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:22:58 + | +LL | pub fn get_unit (), G>(&self, f: F, _g: G) -> () + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:27:26 + | +LL | let _y: &dyn Fn() -> () = &f; + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:35:18 + | +LL | fn into(self) -> () { + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:43:29 + | +LL | fn redundant (), G, H>(&self, _f: F, _g: G, _h: H) + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:46:19 + | +LL | G: FnMut() -> (), + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:48:16 + | +LL | H: Fn() -> (); + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:53:29 + | +LL | fn redundant (), G, H>(&self, _f: F, _g: G, _h: H) + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:56:19 + | +LL | G: FnMut() -> (), + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:58:16 + | +LL | H: Fn() -> () {} + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:62:17 + | +LL | fn return_unit() -> () { () } + | ^^^^^^ help: remove the `-> ()` + +error: unneeded `()` + --> tests/ui/unused_unit.rs:74:14 + | +LL | break(); + | ^^ help: remove the `()` + +error: unneeded `()` + --> tests/ui/unused_unit.rs:77:11 + | +LL | return(); + | ^^ help: remove the `()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:95:10 + | +LL | fn test()->(){} + | ^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:99:11 + | +LL | fn test2() ->(){} + | ^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:103:11 + | +LL | fn test3()-> (){} + | ^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:136:15 + | +LL | run(|| -> () { todo!() }); + | ^^^^^^ help: remove the `-> ()` + +error: aborting due to 20 previous errors + diff --git a/src/tools/clippy/tests/ui/unused_unit.edition2024.fixed b/src/tools/clippy/tests/ui/unused_unit.edition2024.fixed new file mode 100644 index 0000000000000..987d901b97df7 --- /dev/null +++ b/src/tools/clippy/tests/ui/unused_unit.edition2024.fixed @@ -0,0 +1,146 @@ +//@revisions: edition2021 edition2024 +//@[edition2021] edition:2021 +//@[edition2024] edition:2024 + +// The output for humans should just highlight the whole span without showing +// the suggested replacement, but we also want to test that suggested +// replacement only removes one set of parentheses, rather than naïvely +// stripping away any starting or ending parenthesis characters—hence this +// test of the JSON error format. + +#![feature(custom_inner_attributes)] +#![feature(closure_lifetime_binder)] +#![rustfmt::skip] + +#![deny(clippy::unused_unit)] +#![allow(dead_code)] +#![allow(clippy::from_over_into)] + +struct Unitter; +impl Unitter { + #[allow(clippy::no_effect)] + pub fn get_unit(&self, f: F, _g: G) + //~^ unused_unit + //~| unused_unit + where G: Fn() { + //~^ unused_unit + let _y: &dyn Fn() = &f; + //~^ unused_unit + (); // this should not lint, as it's not in return type position + } +} + +impl Into<()> for Unitter { + #[rustfmt::skip] + fn into(self) { + //~^ unused_unit + + //~^ unused_unit + } +} + +trait Trait { + fn redundant(&self, _f: F, _g: G, _h: H) + //~^ unused_unit + where + G: FnMut(), + //~^ unused_unit + H: Fn(); + //~^ unused_unit +} + +impl Trait for Unitter { + fn redundant(&self, _f: F, _g: G, _h: H) + //~^ unused_unit + where + G: FnMut(), + //~^ unused_unit + H: Fn() {} + //~^ unused_unit +} + +fn return_unit() { } +//~^ unused_unit +//~| unused_unit + +#[allow(clippy::needless_return)] +#[allow(clippy::never_loop)] +#[allow(clippy::unit_cmp)] +fn main() { + let u = Unitter; + assert_eq!(u.get_unit(|| {}, return_unit), u.into()); + return_unit(); + loop { + break; + //~^ unused_unit + } + return; + //~^ unused_unit +} + +// https://github.com/rust-lang/rust-clippy/issues/4076 +fn foo() { + macro_rules! foo { + (recv($r:expr) -> $res:pat => $body:expr) => { + $body + } + } + + foo! { + recv(rx) -> _x => () + } +} + +#[rustfmt::skip] +fn test(){} +//~^ unused_unit + +#[rustfmt::skip] +fn test2(){} +//~^ unused_unit + +#[rustfmt::skip] +fn test3(){} +//~^ unused_unit + +fn macro_expr() { + macro_rules! e { + () => (()); + } + e!() +} + +mod issue9748 { + fn main() { + let _ = for<'a> |_: &'a u32| -> () {}; + } +} + +mod issue9949 { + fn main() { + #[doc = "documentation"] + () + } +} + +mod issue14577 { + trait Unit {} + impl Unit for () {} + + fn run(f: impl FnOnce() -> R) { + f(); + } + + #[allow(dependency_on_unit_never_type_fallback)] + fn bar() { + run(|| -> () { todo!() }); + //~[edition2021]^ unused_unit + } + + struct UnitStruct; + impl UnitStruct { + fn apply Fn(&'c mut Self)>(&mut self, f: F) { + todo!() + } + } +} \ No newline at end of file diff --git a/src/tools/clippy/tests/ui/unused_unit.edition2024.stderr b/src/tools/clippy/tests/ui/unused_unit.edition2024.stderr new file mode 100644 index 0000000000000..a79e70e066bd3 --- /dev/null +++ b/src/tools/clippy/tests/ui/unused_unit.edition2024.stderr @@ -0,0 +1,122 @@ +error: unneeded unit expression + --> tests/ui/unused_unit.rs:37:9 + | +LL | () + | ^^ help: remove the final `()` + | +note: the lint level is defined here + --> tests/ui/unused_unit.rs:15:9 + | +LL | #![deny(clippy::unused_unit)] + | ^^^^^^^^^^^^^^^^^^^ + +error: unneeded unit expression + --> tests/ui/unused_unit.rs:62:26 + | +LL | fn return_unit() -> () { () } + | ^^ help: remove the final `()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:22:28 + | +LL | pub fn get_unit (), G>(&self, f: F, _g: G) -> () + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:25:18 + | +LL | where G: Fn() -> () { + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:22:58 + | +LL | pub fn get_unit (), G>(&self, f: F, _g: G) -> () + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:27:26 + | +LL | let _y: &dyn Fn() -> () = &f; + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:35:18 + | +LL | fn into(self) -> () { + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:43:29 + | +LL | fn redundant (), G, H>(&self, _f: F, _g: G, _h: H) + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:46:19 + | +LL | G: FnMut() -> (), + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:48:16 + | +LL | H: Fn() -> (); + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:53:29 + | +LL | fn redundant (), G, H>(&self, _f: F, _g: G, _h: H) + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:56:19 + | +LL | G: FnMut() -> (), + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:58:16 + | +LL | H: Fn() -> () {} + | ^^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:62:17 + | +LL | fn return_unit() -> () { () } + | ^^^^^^ help: remove the `-> ()` + +error: unneeded `()` + --> tests/ui/unused_unit.rs:74:14 + | +LL | break(); + | ^^ help: remove the `()` + +error: unneeded `()` + --> tests/ui/unused_unit.rs:77:11 + | +LL | return(); + | ^^ help: remove the `()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:95:10 + | +LL | fn test()->(){} + | ^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:99:11 + | +LL | fn test2() ->(){} + | ^^^^^ help: remove the `-> ()` + +error: unneeded unit return type + --> tests/ui/unused_unit.rs:103:11 + | +LL | fn test3()-> (){} + | ^^^^^ help: remove the `-> ()` + +error: aborting due to 19 previous errors + diff --git a/src/tools/clippy/tests/ui/unused_unit.fixed b/src/tools/clippy/tests/ui/unused_unit.fixed index e3c02681c9fd7..6668bf90c0924 100644 --- a/src/tools/clippy/tests/ui/unused_unit.fixed +++ b/src/tools/clippy/tests/ui/unused_unit.fixed @@ -120,3 +120,24 @@ mod issue9949 { () } } + +#[clippy::msrv = "1.85"] +mod issue14577 { + trait Unit {} + impl Unit for () {} + + fn run(f: impl FnOnce() -> R) { + f(); + } + + fn bar() { + run(|| -> () { todo!() }); + } + + struct UnitStruct; + impl UnitStruct { + fn apply Fn(&'c mut Self)>(&mut self, f: F) { + todo!() + } + } +} \ No newline at end of file diff --git a/src/tools/clippy/tests/ui/unused_unit.rs b/src/tools/clippy/tests/ui/unused_unit.rs index 4353026c594c1..b7645f7b6a263 100644 --- a/src/tools/clippy/tests/ui/unused_unit.rs +++ b/src/tools/clippy/tests/ui/unused_unit.rs @@ -1,4 +1,6 @@ - +//@revisions: edition2021 edition2024 +//@[edition2021] edition:2021 +//@[edition2024] edition:2024 // The output for humans should just highlight the whole span without showing // the suggested replacement, but we also want to test that suggested @@ -120,3 +122,25 @@ mod issue9949 { () } } + +mod issue14577 { + trait Unit {} + impl Unit for () {} + + fn run(f: impl FnOnce() -> R) { + f(); + } + + #[allow(dependency_on_unit_never_type_fallback)] + fn bar() { + run(|| -> () { todo!() }); + //~[edition2021]^ unused_unit + } + + struct UnitStruct; + impl UnitStruct { + fn apply Fn(&'c mut Self)>(&mut self, f: F) { + todo!() + } + } +} \ No newline at end of file diff --git a/src/tools/clippy/tests/ui/unused_unit.stderr b/src/tools/clippy/tests/ui/unused_unit.stderr index 172fe06550281..366f2142095ff 100644 --- a/src/tools/clippy/tests/ui/unused_unit.stderr +++ b/src/tools/clippy/tests/ui/unused_unit.stderr @@ -1,8 +1,8 @@ -error: unneeded unit return type - --> tests/ui/unused_unit.rs:20:58 +error: unneeded unit expression + --> tests/ui/unused_unit.rs:35:9 | -LL | pub fn get_unit (), G>(&self, f: F, _g: G) -> () - | ^^^^^^ help: remove the `-> ()` +LL | () + | ^^ help: remove the final `()` | note: the lint level is defined here --> tests/ui/unused_unit.rs:13:9 @@ -10,6 +10,12 @@ note: the lint level is defined here LL | #![deny(clippy::unused_unit)] | ^^^^^^^^^^^^^^^^^^^ +error: unneeded unit expression + --> tests/ui/unused_unit.rs:60:26 + | +LL | fn return_unit() -> () { () } + | ^^ help: remove the final `()` + error: unneeded unit return type --> tests/ui/unused_unit.rs:20:28 | @@ -22,6 +28,12 @@ error: unneeded unit return type LL | where G: Fn() -> () { | ^^^^^^ help: remove the `-> ()` +error: unneeded unit return type + --> tests/ui/unused_unit.rs:20:58 + | +LL | pub fn get_unit (), G>(&self, f: F, _g: G) -> () + | ^^^^^^ help: remove the `-> ()` + error: unneeded unit return type --> tests/ui/unused_unit.rs:25:26 | @@ -34,12 +46,6 @@ error: unneeded unit return type LL | fn into(self) -> () { | ^^^^^^ help: remove the `-> ()` -error: unneeded unit expression - --> tests/ui/unused_unit.rs:35:9 - | -LL | () - | ^^ help: remove the final `()` - error: unneeded unit return type --> tests/ui/unused_unit.rs:41:29 | @@ -82,12 +88,6 @@ error: unneeded unit return type LL | fn return_unit() -> () { () } | ^^^^^^ help: remove the `-> ()` -error: unneeded unit expression - --> tests/ui/unused_unit.rs:60:26 - | -LL | fn return_unit() -> () { () } - | ^^ help: remove the final `()` - error: unneeded `()` --> tests/ui/unused_unit.rs:72:14 | diff --git a/src/tools/clippy/tests/ui/unwrap_or.fixed b/src/tools/clippy/tests/ui/unwrap_or.fixed index c794ed577032d..e550484b5d9f0 100644 --- a/src/tools/clippy/tests/ui/unwrap_or.fixed +++ b/src/tools/clippy/tests/ui/unwrap_or.fixed @@ -1,4 +1,4 @@ -#![warn(clippy::all, clippy::or_fun_call)] +#![warn(clippy::or_fun_call)] #![allow(clippy::unnecessary_literal_unwrap)] fn main() { diff --git a/src/tools/clippy/tests/ui/unwrap_or.rs b/src/tools/clippy/tests/ui/unwrap_or.rs index 11a6883b7403f..cdd61ac898e6d 100644 --- a/src/tools/clippy/tests/ui/unwrap_or.rs +++ b/src/tools/clippy/tests/ui/unwrap_or.rs @@ -1,4 +1,4 @@ -#![warn(clippy::all, clippy::or_fun_call)] +#![warn(clippy::or_fun_call)] #![allow(clippy::unnecessary_literal_unwrap)] fn main() { diff --git a/src/tools/clippy/tests/ui/used_underscore_items.rs b/src/tools/clippy/tests/ui/used_underscore_items.rs index 3401df6ae7438..7e8289f1406ba 100644 --- a/src/tools/clippy/tests/ui/used_underscore_items.rs +++ b/src/tools/clippy/tests/ui/used_underscore_items.rs @@ -73,7 +73,7 @@ fn external_item_call() { // should not lint foreign functions. // issue #14156 -extern "C" { +unsafe extern "C" { pub fn _exit(code: i32) -> !; } diff --git a/src/tools/clippy/tests/ui/wildcard_imports.fixed b/src/tools/clippy/tests/ui/wildcard_imports.fixed index a26b4a34190cc..17510683f03e4 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports.fixed +++ b/src/tools/clippy/tests/ui/wildcard_imports.fixed @@ -16,7 +16,7 @@ use crate::fn_mod::foo; //~^ wildcard_imports use crate::mod_mod::inner_mod; //~^ wildcard_imports -use crate::multi_fn_mod::{multi_bar, multi_foo, multi_inner_mod}; +use crate::multi_fn_mod::{multi_foo, multi_bar, multi_inner_mod}; //~^ wildcard_imports #[macro_use] use crate::struct_mod::{A, inner_struct_mod}; @@ -26,7 +26,7 @@ use crate::struct_mod::{A, inner_struct_mod}; use wildcard_imports_helper::inner::inner_for_self_import; use wildcard_imports_helper::inner::inner_for_self_import::inner_extern_bar; //~^ wildcard_imports -use wildcard_imports_helper::{ExternA, extern_foo}; +use wildcard_imports_helper::{extern_foo, ExternA}; //~^ wildcard_imports use std::io::prelude::*; @@ -138,7 +138,7 @@ mod in_fn_test { fn test_extern() { use wildcard_imports_helper::inner::inner_for_self_import::{self, inner_extern_foo}; //~^ wildcard_imports - use wildcard_imports_helper::{ExternA, extern_foo}; + use wildcard_imports_helper::{extern_foo, ExternA}; //~^ wildcard_imports inner_for_self_import::inner_extern_foo(); @@ -160,7 +160,7 @@ mod in_fn_test { } fn test_extern_reexported() { - use wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}; + use wildcard_imports_helper::{extern_exported, ExternExportedStruct, ExternExportedEnum}; //~^ wildcard_imports extern_exported(); @@ -190,7 +190,7 @@ mod in_fn_test { } fn test_reexported() { - use crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}; + use crate::in_fn_test::{exported, ExportedStruct, ExportedEnum}; //~^ wildcard_imports exported(); diff --git a/src/tools/clippy/tests/ui/wildcard_imports.stderr b/src/tools/clippy/tests/ui/wildcard_imports.stderr index f774126102bce..26434656a509e 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports.stderr +++ b/src/tools/clippy/tests/ui/wildcard_imports.stderr @@ -17,7 +17,7 @@ error: usage of wildcard import --> tests/ui/wildcard_imports.rs:19:5 | LL | use crate::multi_fn_mod::*; - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::multi_fn_mod::{multi_bar, multi_foo, multi_inner_mod}` + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::multi_fn_mod::{multi_foo, multi_bar, multi_inner_mod}` error: usage of wildcard import --> tests/ui/wildcard_imports.rs:22:5 @@ -35,7 +35,7 @@ error: usage of wildcard import --> tests/ui/wildcard_imports.rs:29:5 | LL | use wildcard_imports_helper::*; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{extern_foo, ExternA}` error: usage of wildcard import --> tests/ui/wildcard_imports.rs:100:13 @@ -59,7 +59,7 @@ error: usage of wildcard import --> tests/ui/wildcard_imports.rs:141:13 | LL | use wildcard_imports_helper::*; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{extern_foo, ExternA}` error: usage of wildcard import --> tests/ui/wildcard_imports.rs:154:20 @@ -77,13 +77,13 @@ error: usage of wildcard import --> tests/ui/wildcard_imports.rs:163:13 | LL | use wildcard_imports_helper::*; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{extern_exported, ExternExportedStruct, ExternExportedEnum}` error: usage of wildcard import --> tests/ui/wildcard_imports.rs:193:9 | LL | use crate::in_fn_test::*; - | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}` + | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{exported, ExportedStruct, ExportedEnum}` error: usage of wildcard import --> tests/ui/wildcard_imports.rs:203:9 diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed index a3d1aebba8af4..f97b883ea231f 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed +++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed @@ -14,7 +14,7 @@ use crate::fn_mod::foo; //~^ wildcard_imports use crate::mod_mod::inner_mod; //~^ wildcard_imports -use crate::multi_fn_mod::{multi_bar, multi_foo, multi_inner_mod}; +use crate::multi_fn_mod::{multi_foo, multi_bar, multi_inner_mod}; //~^ wildcard_imports use crate::struct_mod::{A, inner_struct_mod}; //~^ wildcard_imports @@ -23,7 +23,7 @@ use crate::struct_mod::{A, inner_struct_mod}; use wildcard_imports_helper::inner::inner_for_self_import::inner_extern_bar; //~^ wildcard_imports use wildcard_imports_helper::prelude::v1::*; -use wildcard_imports_helper::{ExternA, extern_foo}; +use wildcard_imports_helper::{extern_foo, ExternA}; //~^ wildcard_imports use std::io::prelude::*; @@ -132,7 +132,7 @@ mod in_fn_test { fn test_extern() { use wildcard_imports_helper::inner::inner_for_self_import::{self, inner_extern_foo}; //~^ wildcard_imports - use wildcard_imports_helper::{ExternA, extern_foo}; + use wildcard_imports_helper::{extern_foo, ExternA}; //~^ wildcard_imports inner_for_self_import::inner_extern_foo(); @@ -154,7 +154,7 @@ mod in_fn_test { } fn test_extern_reexported() { - use wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}; + use wildcard_imports_helper::{extern_exported, ExternExportedStruct, ExternExportedEnum}; //~^ wildcard_imports extern_exported(); @@ -184,7 +184,7 @@ mod in_fn_test { } fn test_reexported() { - use crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}; + use crate::in_fn_test::{exported, ExportedStruct, ExportedEnum}; //~^ wildcard_imports exported(); diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr index a1b557f39f0d2..873ce41b04f49 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr +++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr @@ -17,7 +17,7 @@ error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:17:5 | LL | use crate::multi_fn_mod::*; - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::multi_fn_mod::{multi_bar, multi_foo, multi_inner_mod}` + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::multi_fn_mod::{multi_foo, multi_bar, multi_inner_mod}` error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:19:5 @@ -35,7 +35,7 @@ error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:26:5 | LL | use wildcard_imports_helper::*; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{extern_foo, ExternA}` error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:95:13 @@ -59,7 +59,7 @@ error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:135:13 | LL | use wildcard_imports_helper::*; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{extern_foo, ExternA}` error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:148:20 @@ -77,13 +77,13 @@ error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:157:13 | LL | use wildcard_imports_helper::*; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{extern_exported, ExternExportedStruct, ExternExportedEnum}` error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:187:9 | LL | use crate::in_fn_test::*; - | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}` + | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{exported, ExportedStruct, ExportedEnum}` error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:197:9 diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed index a3d1aebba8af4..f97b883ea231f 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed +++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed @@ -14,7 +14,7 @@ use crate::fn_mod::foo; //~^ wildcard_imports use crate::mod_mod::inner_mod; //~^ wildcard_imports -use crate::multi_fn_mod::{multi_bar, multi_foo, multi_inner_mod}; +use crate::multi_fn_mod::{multi_foo, multi_bar, multi_inner_mod}; //~^ wildcard_imports use crate::struct_mod::{A, inner_struct_mod}; //~^ wildcard_imports @@ -23,7 +23,7 @@ use crate::struct_mod::{A, inner_struct_mod}; use wildcard_imports_helper::inner::inner_for_self_import::inner_extern_bar; //~^ wildcard_imports use wildcard_imports_helper::prelude::v1::*; -use wildcard_imports_helper::{ExternA, extern_foo}; +use wildcard_imports_helper::{extern_foo, ExternA}; //~^ wildcard_imports use std::io::prelude::*; @@ -132,7 +132,7 @@ mod in_fn_test { fn test_extern() { use wildcard_imports_helper::inner::inner_for_self_import::{self, inner_extern_foo}; //~^ wildcard_imports - use wildcard_imports_helper::{ExternA, extern_foo}; + use wildcard_imports_helper::{extern_foo, ExternA}; //~^ wildcard_imports inner_for_self_import::inner_extern_foo(); @@ -154,7 +154,7 @@ mod in_fn_test { } fn test_extern_reexported() { - use wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}; + use wildcard_imports_helper::{extern_exported, ExternExportedStruct, ExternExportedEnum}; //~^ wildcard_imports extern_exported(); @@ -184,7 +184,7 @@ mod in_fn_test { } fn test_reexported() { - use crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}; + use crate::in_fn_test::{exported, ExportedStruct, ExportedEnum}; //~^ wildcard_imports exported(); diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr index a1b557f39f0d2..873ce41b04f49 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr +++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr @@ -17,7 +17,7 @@ error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:17:5 | LL | use crate::multi_fn_mod::*; - | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::multi_fn_mod::{multi_bar, multi_foo, multi_inner_mod}` + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::multi_fn_mod::{multi_foo, multi_bar, multi_inner_mod}` error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:19:5 @@ -35,7 +35,7 @@ error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:26:5 | LL | use wildcard_imports_helper::*; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{extern_foo, ExternA}` error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:95:13 @@ -59,7 +59,7 @@ error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:135:13 | LL | use wildcard_imports_helper::*; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{extern_foo, ExternA}` error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:148:20 @@ -77,13 +77,13 @@ error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:157:13 | LL | use wildcard_imports_helper::*; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{extern_exported, ExternExportedStruct, ExternExportedEnum}` error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:187:9 | LL | use crate::in_fn_test::*; - | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}` + | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{exported, ExportedStruct, ExportedEnum}` error: usage of wildcard import --> tests/ui/wildcard_imports_2021.rs:197:9 diff --git a/src/tools/clippy/tests/ui/zombie_processes.rs b/src/tools/clippy/tests/ui/zombie_processes.rs index 25bbc02ffb762..395f9dd2defb5 100644 --- a/src/tools/clippy/tests/ui/zombie_processes.rs +++ b/src/tools/clippy/tests/ui/zombie_processes.rs @@ -176,3 +176,25 @@ fn return_wait() -> ExitStatus { let mut x = Command::new("").spawn().unwrap(); return x.wait().unwrap(); } + +mod issue14677 { + use std::io; + use std::process::Command; + + fn do_something Result<(), ()>>(f: F) { + todo!() + } + + fn foo() { + let mut child = Command::new("true").spawn().unwrap(); + let some_condition = true; + do_something(|| { + if some_condition { + return Err(()); + } + Ok(()) + }); + child.kill().unwrap(); + child.wait().unwrap(); + } +} diff --git a/src/tools/clippy/triagebot.toml b/src/tools/clippy/triagebot.toml index 33d3b0728f3d1..f27b109e99536 100644 --- a/src/tools/clippy/triagebot.toml +++ b/src/tools/clippy/triagebot.toml @@ -9,11 +9,32 @@ allow-unauthenticated = [ # See https://forge.rust-lang.org/triagebot/shortcuts.html [shortcut] +[merge-conflicts] + +[note] + +[canonicalize-issue-links] + +# Prevents mentions in commits to avoid users being spammed +[no-mentions] + # Have rustbot inform users about the *No Merge Policy* [no-merges] exclude_titles = ["Rustup"] # exclude syncs from rust-lang/rust labels = ["has-merge-commits", "S-waiting-on-author"] +[review-requested] +# Those labels are removed when PR author requests a review from an assignee +remove_labels = ["S-waiting-on-author"] +# Those labels are added when PR author requests a review from an assignee +add_labels = ["S-waiting-on-review"] + +[review-submitted] +# These labels are removed when a review is submitted. +review_labels = ["S-waiting-on-review"] +# This label is added when a review is submitted. +reviewed_label = "S-waiting-on-author" + [autolabel."S-waiting-on-review"] new_pr = true @@ -21,10 +42,12 @@ new_pr = true contributing_url = "https://github.com/rust-lang/rust-clippy/blob/master/CONTRIBUTING.md" users_on_vacation = [ "matthiaskrgr", + "samueltardieu", ] [assign.owners] "/.github" = ["@flip1995"] +"/triagebot.toml" = ["@flip1995"] "/book" = ["@flip1995"] "*" = [ "@Manishearth", @@ -34,4 +57,5 @@ users_on_vacation = [ "@Jarcho", "@blyxyas", "@y21", + "@samueltardieu", ] diff --git a/src/tools/clippy/util/etc/pre-commit.sh b/src/tools/clippy/util/etc/pre-commit.sh index 5dd2ba3d5f53b..528f8953b25d8 100755 --- a/src/tools/clippy/util/etc/pre-commit.sh +++ b/src/tools/clippy/util/etc/pre-commit.sh @@ -6,7 +6,6 @@ set -e # Update lints cargo dev update_lints git add clippy_lints/src/lib.rs -git add clippy_lints/src/lib.*.rs # Formatting: # Git will not automatically add the formatted code to the staged changes once diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml index 4f8e475e7625f..93f7b1cb7cf2c 100644 --- a/src/tools/compiletest/Cargo.toml +++ b/src/tools/compiletest/Cargo.toml @@ -1,31 +1,34 @@ [package] name = "compiletest" version = "0.0.0" -edition = "2021" +edition = "2024" [lib] doctest = false [dependencies] +# tidy-alphabetical-start anstyle-svg = "0.1.3" +build_helper = { path = "../../build_helper" } +camino = "1" colored = "2" diff = "0.1.10" -unified-diff = "0.2.1" getopts = "0.2" +glob = "0.3.0" +home = "0.5.5" indexmap = "2.0.0" miropt-test-tools = { path = "../miropt-test-tools" } -build_helper = { path = "../../build_helper" } -tracing = "0.1" -tracing-subscriber = { version = "0.3.3", default-features = false, features = ["ansi", "env-filter", "fmt", "parking_lot", "smallvec"] } +rayon = "1.10.0" regex = "1.0" +rustfix = "0.8.1" semver = { version = "1.0.23", features = ["serde"] } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" -rustfix = "0.8.1" +tracing = "0.1" +tracing-subscriber = { version = "0.3.3", default-features = false, features = ["ansi", "env-filter", "fmt", "parking_lot", "smallvec"] } +unified-diff = "0.2.1" walkdir = "2" -glob = "0.3.0" -anyhow = "1" -home = "0.5.5" +# tidy-alphabetical-end [target.'cfg(unix)'.dependencies] libc = "0.2" diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs index 08d3c1c343e08..4f93b49874134 100644 --- a/src/tools/compiletest/src/common.rs +++ b/src/tools/compiletest/src/common.rs @@ -1,18 +1,17 @@ use std::collections::{BTreeSet, HashMap, HashSet}; -use std::ffi::OsString; -use std::path::{Path, PathBuf}; use std::process::Command; use std::str::FromStr; use std::sync::OnceLock; use std::{fmt, iter}; use build_helper::git::GitConfig; +use camino::{Utf8Path, Utf8PathBuf}; use semver::Version; use serde::de::{Deserialize, Deserializer, Error as _}; -use test::{ColorConfig, OutputFormat}; pub use self::Mode::*; -use crate::util::{PathBufExt, add_dylib_path}; +use crate::executor::{ColorConfig, OutputFormat}; +use crate::util::{Utf8PathBufExt, add_dylib_path}; macro_rules! string_enum { ($(#[$meta:meta])* $vis:vis enum $name:ident { $($variant:ident => $repr:expr,)* }) => { @@ -178,26 +177,30 @@ pub struct Config { /// `true` to overwrite stderr/stdout files instead of complaining about changes in output. pub bless: bool, + /// Stop as soon as possible after any test fails. + /// May run a few more tests before stopping, due to threading. + pub fail_fast: bool, + /// The library paths required for running the compiler. - pub compile_lib_path: PathBuf, + pub compile_lib_path: Utf8PathBuf, /// The library paths required for running compiled programs. - pub run_lib_path: PathBuf, + pub run_lib_path: Utf8PathBuf, /// The rustc executable. - pub rustc_path: PathBuf, + pub rustc_path: Utf8PathBuf, /// The cargo executable. - pub cargo_path: Option, + pub cargo_path: Option, /// Rustc executable used to compile run-make recipes. - pub stage0_rustc_path: Option, + pub stage0_rustc_path: Option, /// The rustdoc executable. - pub rustdoc_path: Option, + pub rustdoc_path: Option, /// The coverage-dump executable. - pub coverage_dump_path: Option, + pub coverage_dump_path: Option, /// The Python executable to use for LLDB and htmldocck. pub python: String, @@ -209,27 +212,27 @@ pub struct Config { pub jsondoclint_path: Option, /// The LLVM `FileCheck` binary path. - pub llvm_filecheck: Option, + pub llvm_filecheck: Option, /// Path to LLVM's bin directory. - pub llvm_bin_dir: Option, + pub llvm_bin_dir: Option, /// The path to the Clang executable to run Clang-based tests with. If /// `None` then these tests will be ignored. pub run_clang_based_tests_with: Option, /// The directory containing the sources. - pub src_root: PathBuf, + pub src_root: Utf8PathBuf, /// The directory containing the test suite sources. Must be a subdirectory of `src_root`. - pub src_test_suite_root: PathBuf, + pub src_test_suite_root: Utf8PathBuf, /// Root build directory (e.g. `build/`). - pub build_root: PathBuf, + pub build_root: Utf8PathBuf, /// Test suite specific build directory (e.g. `build/host/test/ui/`). - pub build_test_suite_root: PathBuf, + pub build_test_suite_root: Utf8PathBuf, /// The directory containing the compiler sysroot - pub sysroot_base: PathBuf, + pub sysroot_base: Utf8PathBuf, /// The number of the stage under test. pub stage: u32, @@ -271,9 +274,6 @@ pub struct Config { /// Explicitly enable or disable running. pub run: Option, - /// Write out a parseable log of tests that were run - pub logfile: Option, - /// A command line to prefix program execution with, /// for running under valgrind for example. /// @@ -300,7 +300,7 @@ pub struct Config { pub host: String, /// Path to / name of the Microsoft Console Debugger (CDB) executable - pub cdb: Option, + pub cdb: Option, /// Version of CDB pub cdb_version: Option<[u16; 4]>, @@ -321,7 +321,7 @@ pub struct Config { pub system_llvm: bool, /// Path to the android tools - pub android_cross_path: PathBuf, + pub android_cross_path: Utf8PathBuf, /// Extra parameter to run adb on arm-linux-androideabi pub adb_path: String, @@ -345,7 +345,7 @@ pub struct Config { pub color: ColorConfig, /// where to find the remote test client process, if we're using it - pub remote_test_client: Option, + pub remote_test_client: Option, /// mode describing what file the actual ui output will be compared to pub compare_mode: Option, @@ -394,11 +394,11 @@ pub struct Config { pub target_cfgs: OnceLock, pub builtin_cfg_names: OnceLock>, + pub supported_crate_types: OnceLock>, pub nocapture: bool, // Needed both to construct build_helper::git::GitConfig - pub git_repository: String, pub nightly_branch: String, pub git_merge_commit_email: String, @@ -412,7 +412,7 @@ pub struct Config { /// Path to minicore aux library, used for `no_core` tests that need `core` stubs in /// cross-compilation scenarios that do not otherwise want/need to `-Zbuild-std`. Used in e.g. /// ABI tests. - pub minicore_path: PathBuf, + pub minicore_path: Utf8PathBuf, } impl Config { @@ -471,6 +471,11 @@ impl Config { self.builtin_cfg_names.get_or_init(|| builtin_cfg_names(self)) } + /// Get the list of crate types that the target platform supports. + pub fn supported_crate_types(&self) -> &HashSet { + self.supported_crate_types.get_or_init(|| supported_crate_types(self)) + } + pub fn has_threads(&self) -> bool { // Wasm targets don't have threads unless `-threads` is in the target // name, such as `wasm32-wasip1-threads`. @@ -500,7 +505,6 @@ impl Config { pub fn git_config(&self) -> GitConfig<'_> { GitConfig { - git_repository: &self.git_repository, nightly_branch: &self.nightly_branch, git_merge_commit_email: &self.git_merge_commit_email, } @@ -744,6 +748,31 @@ fn builtin_cfg_names(config: &Config) -> HashSet { .collect() } +pub const KNOWN_CRATE_TYPES: &[&str] = + &["bin", "cdylib", "dylib", "lib", "proc-macro", "rlib", "staticlib"]; + +fn supported_crate_types(config: &Config) -> HashSet { + let crate_types: HashSet<_> = rustc_output( + config, + &["--target", &config.target, "--print=supported-crate-types", "-Zunstable-options"], + Default::default(), + ) + .lines() + .map(|l| l.to_string()) + .collect(); + + for crate_type in crate_types.iter() { + assert!( + KNOWN_CRATE_TYPES.contains(&crate_type.as_str()), + "unexpected crate type `{}`: known crate types are {:?}", + crate_type, + KNOWN_CRATE_TYPES + ); + } + + crate_types +} + fn rustc_output(config: &Config, args: &[&str], envs: HashMap) -> String { let mut command = Command::new(&config.rustc_path); add_dylib_path(&mut command, iter::once(&config.compile_lib_path)); @@ -772,8 +801,8 @@ fn serde_parse_u32<'de, D: Deserializer<'de>>(deserializer: D) -> Result, compare_mode: &Option, kind: &str, -) -> PathBuf { +) -> Utf8PathBuf { assert!(UI_EXTENSIONS.contains(&kind)); let mut parts = Vec::new(); @@ -833,7 +862,7 @@ pub const UI_COVERAGE_MAP: &str = "cov-map"; /// ``` /// /// This is created early when tests are collected to avoid race conditions. -pub fn output_relative_path(config: &Config, relative_dir: &Path) -> PathBuf { +pub fn output_relative_path(config: &Config, relative_dir: &Utf8Path) -> Utf8PathBuf { config.build_test_suite_root.join(relative_dir) } @@ -842,10 +871,10 @@ pub fn output_testname_unique( config: &Config, testpaths: &TestPaths, revision: Option<&str>, -) -> PathBuf { +) -> Utf8PathBuf { let mode = config.compare_mode.as_ref().map_or("", |m| m.to_str()); let debugger = config.debugger.as_ref().map_or("", |m| m.to_str()); - PathBuf::from(&testpaths.file.file_stem().unwrap()) + Utf8PathBuf::from(&testpaths.file.file_stem().unwrap()) .with_extra_extension(config.mode.output_dir_disambiguator()) .with_extra_extension(revision.unwrap_or("")) .with_extra_extension(mode) @@ -855,7 +884,11 @@ pub fn output_testname_unique( /// Absolute path to the directory where all output for the given /// test/revision should reside. Example: /// /path/to/build/host-tuple/test/ui/relative/testname.revision.mode/ -pub fn output_base_dir(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf { +pub fn output_base_dir( + config: &Config, + testpaths: &TestPaths, + revision: Option<&str>, +) -> Utf8PathBuf { output_relative_path(config, &testpaths.relative_dir) .join(output_testname_unique(config, testpaths, revision)) } @@ -863,12 +896,20 @@ pub fn output_base_dir(config: &Config, testpaths: &TestPaths, revision: Option< /// Absolute path to the base filename used as output for the given /// test/revision. Example: /// /path/to/build/host-tuple/test/ui/relative/testname.revision.mode/testname -pub fn output_base_name(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf { +pub fn output_base_name( + config: &Config, + testpaths: &TestPaths, + revision: Option<&str>, +) -> Utf8PathBuf { output_base_dir(config, testpaths, revision).join(testpaths.file.file_stem().unwrap()) } /// Absolute path to the directory to use for incremental compilation. Example: /// /path/to/build/host-tuple/test/ui/relative/testname.mode/testname.inc -pub fn incremental_dir(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf { +pub fn incremental_dir( + config: &Config, + testpaths: &TestPaths, + revision: Option<&str>, +) -> Utf8PathBuf { output_base_name(config, testpaths, revision).with_extension("inc") } diff --git a/src/tools/compiletest/src/compute_diff.rs b/src/tools/compiletest/src/compute_diff.rs index 4c942c51bae13..509e7e117039c 100644 --- a/src/tools/compiletest/src/compute_diff.rs +++ b/src/tools/compiletest/src/compute_diff.rs @@ -1,6 +1,7 @@ use std::collections::VecDeque; use std::fs::{File, FileType}; -use std::path::Path; + +use camino::Utf8Path; #[derive(Debug, PartialEq)] pub enum DiffLine { @@ -112,8 +113,8 @@ pub(crate) fn write_diff(expected: &str, actual: &str, context_size: usize) -> S /// Returns whether any data was actually written. pub(crate) fn write_filtered_diff( diff_filename: &str, - out_dir: &Path, - compare_dir: &Path, + out_dir: &Utf8Path, + compare_dir: &Utf8Path, verbose: bool, filter: Filter, ) -> bool @@ -123,19 +124,21 @@ where use std::io::{Read, Write}; let mut diff_output = File::create(diff_filename).unwrap(); let mut wrote_data = false; - for entry in walkdir::WalkDir::new(out_dir) { + for entry in walkdir::WalkDir::new(out_dir.as_std_path()) { let entry = entry.expect("failed to read file"); let extension = entry.path().extension().and_then(|p| p.to_str()); if filter(entry.file_type(), extension) { - let expected_path = compare_dir.join(entry.path().strip_prefix(&out_dir).unwrap()); + let expected_path = compare_dir + .as_std_path() + .join(entry.path().strip_prefix(&out_dir.as_std_path()).unwrap()); let expected = if let Ok(s) = std::fs::read(&expected_path) { s } else { continue }; let actual_path = entry.path(); let actual = std::fs::read(&actual_path).unwrap(); let diff = unified_diff::diff( &expected, - &expected_path.to_string_lossy(), + &expected_path.to_str().unwrap(), &actual, - &actual_path.to_string_lossy(), + &actual_path.to_str().unwrap(), 3, ); wrote_data |= !diff.is_empty(); diff --git a/src/tools/compiletest/src/debuggers.rs b/src/tools/compiletest/src/debuggers.rs index 20e3c8dfb9ee7..c133d7fd4fbd0 100644 --- a/src/tools/compiletest/src/debuggers.rs +++ b/src/tools/compiletest/src/debuggers.rs @@ -1,9 +1,9 @@ use std::env; -use std::ffi::OsString; -use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::Arc; +use camino::{Utf8Path, Utf8PathBuf}; + use crate::common::{Config, Debugger}; pub(crate) fn configure_cdb(config: &Config) -> Option> { @@ -40,7 +40,9 @@ pub(crate) fn configure_gdb(config: &Config) -> Option> { // // we should figure out how to lift this restriction! (run them all // on different ports allocated dynamically). - env::set_var("RUST_TEST_THREADS", "1"); + // + // SAFETY: at this point we are still single-threaded. + unsafe { env::set_var("RUST_TEST_THREADS", "1") }; } Some(Arc::new(Config { debugger: Some(Debugger::Gdb), ..config.clone() })) @@ -76,12 +78,15 @@ fn is_pc_windows_msvc_target(target: &str) -> bool { target.ends_with("-pc-windows-msvc") } -fn find_cdb(target: &str) -> Option { +fn find_cdb(target: &str) -> Option { if !(cfg!(windows) && is_pc_windows_msvc_target(target)) { return None; } - let pf86 = env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?; + let pf86 = Utf8PathBuf::from_path_buf( + env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?.into(), + ) + .unwrap(); let cdb_arch = if cfg!(target_arch = "x86") { "x86" } else if cfg!(target_arch = "x86_64") { @@ -94,8 +99,7 @@ fn find_cdb(target: &str) -> Option { return None; // No compatible CDB.exe in the Windows 10 SDK }; - let mut path = PathBuf::new(); - path.push(pf86); + let mut path = pf86; path.push(r"Windows Kits\10\Debuggers"); // We could check 8.1 etc. too? path.push(cdb_arch); path.push(r"cdb.exe"); @@ -104,15 +108,15 @@ fn find_cdb(target: &str) -> Option { return None; } - Some(path.into_os_string()) + Some(path) } /// Returns Path to CDB pub(crate) fn analyze_cdb( cdb: Option, target: &str, -) -> (Option, Option<[u16; 4]>) { - let cdb = cdb.map(OsString::from).or_else(|| find_cdb(target)); +) -> (Option, Option<[u16; 4]>) { + let cdb = cdb.map(Utf8PathBuf::from).or_else(|| find_cdb(target)); let mut version = None; if let Some(cdb) = cdb.as_ref() { @@ -141,7 +145,7 @@ pub(crate) fn extract_cdb_version(full_version_line: &str) -> Option<[u16; 4]> { pub(crate) fn analyze_gdb( gdb: Option, target: &str, - android_cross_path: &Path, + android_cross_path: &Utf8Path, ) -> (Option, Option) { #[cfg(not(windows))] const GDB_FALLBACK: &str = "gdb"; @@ -150,10 +154,7 @@ pub(crate) fn analyze_gdb( let fallback_gdb = || { if is_android_gdb_target(target) { - let mut gdb_path = match android_cross_path.to_str() { - Some(x) => x.to_owned(), - None => panic!("cannot find android cross path"), - }; + let mut gdb_path = android_cross_path.to_string(); gdb_path.push_str("/bin/gdb"); gdb_path } else { diff --git a/src/tools/compiletest/src/directive-list.rs b/src/tools/compiletest/src/directive-list.rs index b2ad5a3b3d0bb..5757e422ae21e 100644 --- a/src/tools/compiletest/src/directive-list.rs +++ b/src/tools/compiletest/src/directive-list.rs @@ -22,6 +22,7 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ "dont-check-compiler-stderr", "dont-check-compiler-stdout", "dont-check-failure-status", + "dont-require-annotations", "edition", "error-pattern", "exact-llvm-major-version", @@ -34,6 +35,7 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ "ignore-32bit", "ignore-64bit", "ignore-aarch64", + "ignore-aarch64-pc-windows-msvc", "ignore-aarch64-unknown-linux-gnu", "ignore-aix", "ignore-android", @@ -43,6 +45,7 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ "ignore-arm-unknown-linux-gnueabihf", "ignore-arm-unknown-linux-musleabi", "ignore-arm-unknown-linux-musleabihf", + "ignore-auxiliary", "ignore-avr", "ignore-beta", "ignore-cdb", @@ -132,6 +135,7 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ "min-llvm-version", "min-system-llvm-version", "needs-asm-support", + "needs-crate-type", "needs-deterministic-layouts", "needs-dlltool", "needs-dynamic-linking", @@ -175,6 +179,7 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ "only-32bit", "only-64bit", "only-aarch64", + "only-aarch64-apple-darwin", "only-aarch64-unknown-linux-gnu", "only-apple", "only-arm", @@ -188,6 +193,7 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ "only-gnu", "only-i686-pc-windows-gnu", "only-i686-pc-windows-msvc", + "only-i686-unknown-linux-gnu", "only-ios", "only-linux", "only-loongarch64", @@ -219,6 +225,7 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ "only-windows-msvc", "only-x86", "only-x86_64", + "only-x86_64-apple-darwin", "only-x86_64-fortanix-unknown-sgx", "only-x86_64-pc-windows-gnu", "only-x86_64-pc-windows-msvc", diff --git a/src/tools/compiletest/src/errors.rs b/src/tools/compiletest/src/errors.rs index b68f817146fd7..b5a2b7feac9d6 100644 --- a/src/tools/compiletest/src/errors.rs +++ b/src/tools/compiletest/src/errors.rs @@ -2,34 +2,49 @@ use std::fmt; use std::fs::File; use std::io::BufReader; use std::io::prelude::*; -use std::path::Path; -use std::str::FromStr; use std::sync::OnceLock; +use camino::Utf8Path; use regex::Regex; use tracing::*; -#[derive(Copy, Clone, Debug, PartialEq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum ErrorKind { Help, Error, Note, Suggestion, Warning, + Raw, } -impl FromStr for ErrorKind { - type Err = (); - fn from_str(s: &str) -> Result { - let s = s.to_uppercase(); - let part0: &str = s.split(':').next().unwrap(); - match part0 { - "HELP" => Ok(ErrorKind::Help), - "ERROR" => Ok(ErrorKind::Error), - "NOTE" => Ok(ErrorKind::Note), - "SUGGESTION" => Ok(ErrorKind::Suggestion), - "WARN" | "WARNING" => Ok(ErrorKind::Warning), - _ => Err(()), +impl ErrorKind { + pub fn from_compiler_str(s: &str) -> ErrorKind { + match s { + "help" => ErrorKind::Help, + "error" | "error: internal compiler error" => ErrorKind::Error, + "note" | "failure-note" => ErrorKind::Note, + "warning" => ErrorKind::Warning, + _ => panic!("unexpected compiler diagnostic kind `{s}`"), + } + } + + /// Either the canonical uppercase string, or some additional versions for compatibility. + /// FIXME: consider keeping only the canonical versions here. + pub fn from_user_str(s: &str) -> ErrorKind { + match s { + "HELP" | "help" => ErrorKind::Help, + "ERROR" | "error" => ErrorKind::Error, + // `MONO_ITEM` makes annotations in `codegen-units` tests syntactically correct, + // but those tests never use the error kind later on. + "NOTE" | "note" | "MONO_ITEM" => ErrorKind::Note, + "SUGGESTION" => ErrorKind::Suggestion, + "WARN" | "WARNING" | "warn" | "warning" => ErrorKind::Warning, + "RAW" => ErrorKind::Raw, + _ => panic!( + "unexpected diagnostic kind `{s}`, expected \ + `ERROR`, `WARN`, `NOTE`, `HELP` or `SUGGESTION`" + ), } } } @@ -37,11 +52,12 @@ impl FromStr for ErrorKind { impl fmt::Display for ErrorKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { - ErrorKind::Help => write!(f, "help message"), - ErrorKind::Error => write!(f, "error"), - ErrorKind::Note => write!(f, "note"), - ErrorKind::Suggestion => write!(f, "suggestion"), - ErrorKind::Warning => write!(f, "warning"), + ErrorKind::Help => write!(f, "HELP"), + ErrorKind::Error => write!(f, "ERROR"), + ErrorKind::Note => write!(f, "NOTE"), + ErrorKind::Suggestion => write!(f, "SUGGESTION"), + ErrorKind::Warning => write!(f, "WARN"), + ErrorKind::Raw => write!(f, "RAW"), } } } @@ -50,20 +66,18 @@ impl fmt::Display for ErrorKind { pub struct Error { pub line_num: Option, /// What kind of message we expect (e.g., warning, error, suggestion). - /// `None` if not specified or unknown message kind. - pub kind: Option, + pub kind: ErrorKind, pub msg: String, + /// For some `Error`s, like secondary lines of multi-line diagnostics, line annotations + /// are not mandatory, even if they would otherwise be mandatory for primary errors. + /// Only makes sense for "actual" errors, not for "expected" errors. + pub require_annotation: bool, } impl Error { pub fn render_for_expected(&self) -> String { use colored::Colorize; - format!( - "{: <10}line {: >3}: {}", - self.kind.map(|kind| kind.to_string()).unwrap_or_default().to_uppercase(), - self.line_num_str(), - self.msg.cyan(), - ) + format!("{: <10}line {: >3}: {}", self.kind, self.line_num_str(), self.msg.cyan()) } pub fn line_num_str(&self) -> String { @@ -81,8 +95,8 @@ impl Error { /// /// If revision is not None, then we look /// for `//[X]~` instead, where `X` is the current revision. -pub fn load_errors(testfile: &Path, revision: Option<&str>) -> Vec { - let rdr = BufReader::new(File::open(testfile).unwrap()); +pub fn load_errors(testfile: &Utf8Path, revision: Option<&str>) -> Vec { + let rdr = BufReader::new(File::open(testfile.as_std_path()).unwrap()); // `last_nonfollow_error` tracks the most recently seen // line with an error template that did not use the @@ -150,18 +164,13 @@ fn parse_expected( } // Get the part of the comment after the sigil (e.g. `~^^` or ~|). - let whole_match = captures.get(0).unwrap(); - let (_, mut msg) = line.split_at(whole_match.end()); - - let first_word = msg.split_whitespace().next().expect("Encountered unexpected empty comment"); - - // If we find `//~ ERROR foo` or something like that, skip the first word. - let kind = first_word.parse::().ok(); - if kind.is_some() { - msg = &msg.trim_start().split_at(first_word.len()).1; - } - - let msg = msg.trim().to_owned(); + let tag = captures.get(0).unwrap(); + let rest = line[tag.end()..].trim_start(); + let (kind_str, _) = + rest.split_once(|c: char| c != '_' && !c.is_ascii_alphabetic()).unwrap_or((rest, "")); + let kind = ErrorKind::from_user_str(kind_str); + let untrimmed_msg = &rest[kind_str.len()..]; + let msg = untrimmed_msg.strip_prefix(':').unwrap_or(untrimmed_msg).trim().to_owned(); let line_num_adjust = &captures["adjust"]; let (follow_prev, line_num) = if line_num_adjust == "|" { @@ -177,12 +186,12 @@ fn parse_expected( debug!( "line={:?} tag={:?} follow_prev={:?} kind={:?} msg={:?}", line_num, - whole_match.as_str(), + tag.as_str(), follow_prev, kind, msg ); - Some((follow_prev, Error { line_num, kind, msg })) + Some((follow_prev, Error { line_num, kind, msg, require_annotation: true })) } #[cfg(test)] diff --git a/src/tools/compiletest/src/executor.rs b/src/tools/compiletest/src/executor.rs new file mode 100644 index 0000000000000..e774c5e2047c8 --- /dev/null +++ b/src/tools/compiletest/src/executor.rs @@ -0,0 +1,294 @@ +//! This module contains a reimplementation of the subset of libtest +//! functionality needed by compiletest. + +use std::borrow::Cow; +use std::collections::HashMap; +use std::hash::{BuildHasherDefault, DefaultHasher}; +use std::num::NonZero; +use std::sync::{Arc, Mutex, mpsc}; +use std::{env, hint, io, mem, panic, thread}; + +use crate::common::{Config, TestPaths}; + +mod deadline; +mod json; + +pub(crate) fn run_tests(config: &Config, tests: Vec) -> bool { + let tests_len = tests.len(); + let filtered = filter_tests(config, tests); + // Iterator yielding tests that haven't been started yet. + let mut fresh_tests = (0..).map(TestId).zip(&filtered); + + let concurrency = get_concurrency(); + assert!(concurrency > 0); + let concurrent_capacity = concurrency.min(filtered.len()); + + let mut listener = json::Listener::new(); + let mut running_tests = HashMap::with_capacity_and_hasher( + concurrent_capacity, + BuildHasherDefault::::new(), + ); + let mut deadline_queue = deadline::DeadlineQueue::with_capacity(concurrent_capacity); + + let num_filtered_out = tests_len - filtered.len(); + listener.suite_started(filtered.len(), num_filtered_out); + + // Channel used by test threads to report the test outcome when done. + let (completion_tx, completion_rx) = mpsc::channel::(); + + // Unlike libtest, we don't have a separate code path for concurrency=1. + // In that case, the tests will effectively be run serially anyway. + loop { + // Spawn new test threads, up to the concurrency limit. + // FIXME(let_chains): Use a let-chain here when stable in bootstrap. + 'spawn: while running_tests.len() < concurrency { + let Some((id, test)) = fresh_tests.next() else { break 'spawn }; + listener.test_started(test); + deadline_queue.push(id, test); + let join_handle = spawn_test_thread(id, test, completion_tx.clone()); + running_tests.insert(id, RunningTest { test, join_handle }); + } + + // If all running tests have finished, and there weren't any unstarted + // tests to spawn, then we're done. + if running_tests.is_empty() { + break; + } + + let completion = deadline_queue + .read_channel_while_checking_deadlines( + &completion_rx, + |id| running_tests.contains_key(&id), + |_id, test| listener.test_timed_out(test), + ) + .expect("receive channel should never be closed early"); + + let RunningTest { test, join_handle } = running_tests.remove(&completion.id).unwrap(); + if let Some(join_handle) = join_handle { + join_handle.join().unwrap_or_else(|_| { + panic!("thread for `{}` panicked after reporting completion", test.desc.name) + }); + } + + listener.test_finished(test, &completion); + + if completion.outcome.is_failed() && config.fail_fast { + // Prevent any other in-flight threads from panicking when they + // write to the completion channel. + mem::forget(completion_rx); + break; + } + } + + let suite_passed = listener.suite_finished(); + suite_passed +} + +/// Spawns a thread to run a single test, and returns the thread's join handle. +/// +/// Returns `None` if the test was ignored, so no thread was spawned. +fn spawn_test_thread( + id: TestId, + test: &CollectedTest, + completion_tx: mpsc::Sender, +) -> Option> { + if test.desc.ignore && !test.config.run_ignored { + completion_tx + .send(TestCompletion { id, outcome: TestOutcome::Ignored, stdout: None }) + .unwrap(); + return None; + } + + let runnable_test = RunnableTest::new(test); + let should_panic = test.desc.should_panic; + let run_test = move || run_test_inner(id, should_panic, runnable_test, completion_tx); + + let thread_builder = thread::Builder::new().name(test.desc.name.clone()); + let join_handle = thread_builder.spawn(run_test).unwrap(); + Some(join_handle) +} + +/// Runs a single test, within the dedicated thread spawned by the caller. +fn run_test_inner( + id: TestId, + should_panic: ShouldPanic, + runnable_test: RunnableTest, + completion_sender: mpsc::Sender, +) { + let is_capture = !runnable_test.config.nocapture; + let capture_buf = is_capture.then(|| Arc::new(Mutex::new(vec![]))); + + if let Some(capture_buf) = &capture_buf { + io::set_output_capture(Some(Arc::clone(capture_buf))); + } + + let panic_payload = panic::catch_unwind(move || runnable_test.run()).err(); + + if is_capture { + io::set_output_capture(None); + } + + let outcome = match (should_panic, panic_payload) { + (ShouldPanic::No, None) | (ShouldPanic::Yes, Some(_)) => TestOutcome::Succeeded, + (ShouldPanic::No, Some(_)) => TestOutcome::Failed { message: None }, + (ShouldPanic::Yes, None) => { + TestOutcome::Failed { message: Some("test did not panic as expected") } + } + }; + let stdout = capture_buf.map(|mutex| mutex.lock().unwrap_or_else(|e| e.into_inner()).to_vec()); + + completion_sender.send(TestCompletion { id, outcome, stdout }).unwrap(); +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +struct TestId(usize); + +struct RunnableTest { + config: Arc, + testpaths: TestPaths, + revision: Option, +} + +impl RunnableTest { + fn new(test: &CollectedTest) -> Self { + let config = Arc::clone(&test.config); + let testpaths = test.testpaths.clone(); + let revision = test.revision.clone(); + Self { config, testpaths, revision } + } + + fn run(&self) { + __rust_begin_short_backtrace(|| { + crate::runtest::run( + Arc::clone(&self.config), + &self.testpaths, + self.revision.as_deref(), + ); + }); + } +} + +/// Fixed frame used to clean the backtrace with `RUST_BACKTRACE=1`. +#[inline(never)] +fn __rust_begin_short_backtrace T>(f: F) -> T { + let result = f(); + + // prevent this frame from being tail-call optimised away + hint::black_box(result) +} + +struct RunningTest<'a> { + test: &'a CollectedTest, + join_handle: Option>, +} + +/// Test completion message sent by individual test threads when their test +/// finishes (successfully or unsuccessfully). +struct TestCompletion { + id: TestId, + outcome: TestOutcome, + stdout: Option>, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +enum TestOutcome { + Succeeded, + Failed { message: Option<&'static str> }, + Ignored, +} + +impl TestOutcome { + fn is_failed(&self) -> bool { + matches!(self, Self::Failed { .. }) + } +} + +/// Applies command-line arguments for filtering/skipping tests by name. +/// +/// Adapted from `filter_tests` in libtest. +/// +/// FIXME(#139660): After the libtest dependency is removed, redesign the whole +/// filtering system to do a better job of understanding and filtering _paths_, +/// instead of being tied to libtest's substring/exact matching behaviour. +fn filter_tests(opts: &Config, tests: Vec) -> Vec { + let mut filtered = tests; + + let matches_filter = |test: &CollectedTest, filter_str: &str| { + let test_name = &test.desc.name; + if opts.filter_exact { test_name == filter_str } else { test_name.contains(filter_str) } + }; + + // Remove tests that don't match the test filter + if !opts.filters.is_empty() { + filtered.retain(|test| opts.filters.iter().any(|filter| matches_filter(test, filter))); + } + + // Skip tests that match any of the skip filters + if !opts.skip.is_empty() { + filtered.retain(|test| !opts.skip.iter().any(|sf| matches_filter(test, sf))); + } + + filtered +} + +/// Determines the number of tests to run concurrently. +/// +/// Copied from `get_concurrency` in libtest. +/// +/// FIXME(#139660): After the libtest dependency is removed, consider making +/// bootstrap specify the number of threads on the command-line, instead of +/// propagating the `RUST_TEST_THREADS` environment variable. +fn get_concurrency() -> usize { + if let Ok(value) = env::var("RUST_TEST_THREADS") { + match value.parse::>().ok() { + Some(n) => n.get(), + _ => panic!("RUST_TEST_THREADS is `{value}`, should be a positive integer."), + } + } else { + thread::available_parallelism().map(|n| n.get()).unwrap_or(1) + } +} + +/// Information needed to create a `test::TestDescAndFn`. +pub(crate) struct CollectedTest { + pub(crate) desc: CollectedTestDesc, + pub(crate) config: Arc, + pub(crate) testpaths: TestPaths, + pub(crate) revision: Option, +} + +/// Information needed to create a `test::TestDesc`. +pub(crate) struct CollectedTestDesc { + pub(crate) name: String, + pub(crate) ignore: bool, + pub(crate) ignore_message: Option>, + pub(crate) should_panic: ShouldPanic, +} + +/// Whether console output should be colored or not. +#[derive(Copy, Clone, Default, Debug)] +pub enum ColorConfig { + #[default] + AutoColor, + AlwaysColor, + NeverColor, +} + +/// Format of the test results output. +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +pub enum OutputFormat { + /// Verbose output + Pretty, + /// Quiet output + #[default] + Terse, + /// JSON output + Json, +} + +/// Whether test is expected to panic or not. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub(crate) enum ShouldPanic { + No, + Yes, +} diff --git a/src/tools/compiletest/src/executor/deadline.rs b/src/tools/compiletest/src/executor/deadline.rs new file mode 100644 index 0000000000000..3536eff2fd80d --- /dev/null +++ b/src/tools/compiletest/src/executor/deadline.rs @@ -0,0 +1,102 @@ +use std::collections::VecDeque; +use std::sync::mpsc::{self, RecvError, RecvTimeoutError}; +use std::time::{Duration, Instant}; + +use crate::executor::{CollectedTest, TestId}; + +const TEST_WARN_TIMEOUT_S: u64 = 60; + +struct DeadlineEntry<'a> { + id: TestId, + test: &'a CollectedTest, + deadline: Instant, +} + +pub(crate) struct DeadlineQueue<'a> { + queue: VecDeque>, +} + +impl<'a> DeadlineQueue<'a> { + pub(crate) fn with_capacity(capacity: usize) -> Self { + Self { queue: VecDeque::with_capacity(capacity) } + } + + /// All calls to [`Instant::now`] go through this wrapper method. + /// This makes it easier to find all places that read the current time. + fn now(&self) -> Instant { + Instant::now() + } + + pub(crate) fn push(&mut self, id: TestId, test: &'a CollectedTest) { + let deadline = self.now() + Duration::from_secs(TEST_WARN_TIMEOUT_S); + if let Some(back) = self.queue.back() { + assert!(back.deadline <= deadline); + } + self.queue.push_back(DeadlineEntry { id, test, deadline }); + } + + /// Equivalent to `rx.recv()`, except that if a test exceeds its deadline + /// during the wait, the given callback will also be called for that test. + pub(crate) fn read_channel_while_checking_deadlines( + &mut self, + rx: &mpsc::Receiver, + is_running: impl Fn(TestId) -> bool, + mut on_deadline_passed: impl FnMut(TestId, &CollectedTest), + ) -> Result { + loop { + let Some(next_deadline) = self.next_deadline() else { + // All currently-running tests have already exceeded their + // deadline, so do a normal receive. + return rx.recv(); + }; + let next_deadline_timeout = next_deadline.saturating_duration_since(self.now()); + + let recv_result = rx.recv_timeout(next_deadline_timeout); + // Process deadlines after every receive attempt, regardless of + // outcome, so that we don't build up an unbounded backlog of stale + // entries due to a constant stream of tests finishing. + self.for_each_entry_past_deadline(&is_running, &mut on_deadline_passed); + + match recv_result { + Ok(value) => return Ok(value), + // Deadlines have already been processed, so loop and do another receive. + Err(RecvTimeoutError::Timeout) => {} + Err(RecvTimeoutError::Disconnected) => return Err(RecvError), + } + } + } + + fn next_deadline(&self) -> Option { + Some(self.queue.front()?.deadline) + } + + fn for_each_entry_past_deadline( + &mut self, + is_running: impl Fn(TestId) -> bool, + mut on_deadline_passed: impl FnMut(TestId, &CollectedTest), + ) { + let now = self.now(); + + // Clear out entries that are past their deadline, but only invoke the + // callback for tests that are still considered running. + while let Some(entry) = pop_front_if(&mut self.queue, |entry| entry.deadline <= now) { + if is_running(entry.id) { + on_deadline_passed(entry.id, entry.test); + } + } + + // Also clear out any leading entries that are no longer running, even + // if their deadline hasn't been reached. + while let Some(_) = pop_front_if(&mut self.queue, |entry| !is_running(entry.id)) {} + + if let Some(front) = self.queue.front() { + assert!(now < front.deadline); + } + } +} + +/// FIXME(vec_deque_pop_if): Use `VecDeque::pop_front_if` when it is stable in bootstrap. +fn pop_front_if(queue: &mut VecDeque, predicate: impl FnOnce(&T) -> bool) -> Option { + let first = queue.front()?; + if predicate(first) { queue.pop_front() } else { None } +} diff --git a/src/tools/compiletest/src/executor/json.rs b/src/tools/compiletest/src/executor/json.rs new file mode 100644 index 0000000000000..c74ed81a36b85 --- /dev/null +++ b/src/tools/compiletest/src/executor/json.rs @@ -0,0 +1,111 @@ +//! Collects statistics and emits suite/test events as JSON messages, using +//! the same JSON format as libtest's JSON formatter. +//! +//! These messages are then parsed by bootstrap, which replaces them with +//! user-friendly terminal output. + +use std::time::Instant; + +use serde_json::json; + +use crate::executor::{CollectedTest, TestCompletion, TestOutcome}; + +pub(crate) struct Listener { + suite_start: Option, + passed: usize, + failed: usize, + ignored: usize, + filtered_out: usize, +} + +impl Listener { + pub(crate) fn new() -> Self { + Self { suite_start: None, passed: 0, failed: 0, ignored: 0, filtered_out: 0 } + } + + fn print_message(&self, message: &serde_json::Value) { + println!("{message}"); + } + + fn now(&self) -> Instant { + Instant::now() + } + + pub(crate) fn suite_started(&mut self, test_count: usize, filtered_out: usize) { + self.suite_start = Some(self.now()); + self.filtered_out = filtered_out; + let message = json!({ "type": "suite", "event": "started", "test_count": test_count }); + self.print_message(&message); + } + + pub(crate) fn test_started(&mut self, test: &CollectedTest) { + let name = test.desc.name.as_str(); + let message = json!({ "type": "test", "event": "started", "name": name }); + self.print_message(&message); + } + + pub(crate) fn test_timed_out(&mut self, test: &CollectedTest) { + let name = test.desc.name.as_str(); + let message = json!({ "type": "test", "event": "timeout", "name": name }); + self.print_message(&message); + } + + pub(crate) fn test_finished(&mut self, test: &CollectedTest, completion: &TestCompletion) { + let event; + let name = test.desc.name.as_str(); + let mut maybe_message = None; + let maybe_stdout = completion.stdout.as_deref().map(String::from_utf8_lossy); + + match completion.outcome { + TestOutcome::Succeeded => { + self.passed += 1; + event = "ok"; + } + TestOutcome::Failed { message } => { + self.failed += 1; + maybe_message = message; + event = "failed"; + } + TestOutcome::Ignored => { + self.ignored += 1; + maybe_message = test.desc.ignore_message.as_deref(); + event = "ignored"; + } + }; + + // This emits optional fields as `null`, instead of omitting them + // completely as libtest does, but bootstrap can parse the result + // either way. + let json = json!({ + "type": "test", + "event": event, + "name": name, + "message": maybe_message, + "stdout": maybe_stdout, + }); + + self.print_message(&json); + } + + pub(crate) fn suite_finished(&mut self) -> bool { + let exec_time = self.suite_start.map(|start| (self.now() - start).as_secs_f64()); + let suite_passed = self.failed == 0; + + let event = if suite_passed { "ok" } else { "failed" }; + let message = json!({ + "type": "suite", + "event": event, + "passed": self.passed, + "failed": self.failed, + "ignored": self.ignored, + // Compiletest doesn't run any benchmarks, but we still need to set this + // field to 0 so that bootstrap's JSON parser can read our message. + "measured": 0, + "filtered_out": self.filtered_out, + "exec_time": exec_time, + }); + + self.print_message(&message); + suite_passed + } +} diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs index f654bd9c90b51..8bee9caacc949 100644 --- a/src/tools/compiletest/src/header.rs +++ b/src/tools/compiletest/src/header.rs @@ -3,14 +3,16 @@ use std::env; use std::fs::File; use std::io::BufReader; use std::io::prelude::*; -use std::path::{Path, PathBuf}; use std::process::Command; +use camino::{Utf8Path, Utf8PathBuf}; use semver::Version; use tracing::*; use crate::common::{Config, Debugger, FailMode, Mode, PassMode}; use crate::debuggers::{extract_cdb_version, extract_gdb_version}; +use crate::errors::ErrorKind; +use crate::executor::{CollectedTestDesc, ShouldPanic}; use crate::header::auxiliary::{AuxProps, parse_and_update_aux}; use crate::header::needs::CachedNeedsConditions; use crate::util::static_regex; @@ -43,12 +45,12 @@ pub struct EarlyProps { } impl EarlyProps { - pub fn from_file(config: &Config, testfile: &Path) -> Self { - let file = File::open(testfile).expect("open test file to parse earlyprops"); + pub fn from_file(config: &Config, testfile: &Utf8Path) -> Self { + let file = File::open(testfile.as_std_path()).expect("open test file to parse earlyprops"); Self::from_reader(config, testfile, file) } - pub fn from_reader(config: &Config, testfile: &Path, rdr: R) -> Self { + pub fn from_reader(config: &Config, testfile: &Utf8Path, rdr: R) -> Self { let mut props = EarlyProps::default(); let mut poisoned = false; iter_header( @@ -64,7 +66,7 @@ impl EarlyProps { ); if poisoned { - eprintln!("errors encountered during EarlyProps parsing: {}", testfile.display()); + eprintln!("errors encountered during EarlyProps parsing: {}", testfile); panic!("errors encountered during EarlyProps parsing"); } @@ -86,7 +88,7 @@ pub struct TestProps { pub doc_flags: Vec, // If present, the name of a file that this test should match when // pretty-printed - pub pp_exact: Option, + pub pp_exact: Option, /// Auxiliary crates that should be built and made available to this test. pub(crate) aux: AuxProps, // Environment settings to use for compiling @@ -132,7 +134,7 @@ pub struct TestProps { // not set by end-users; rather it is set by the incremental // testing harness and used when generating compilation // arguments. (In particular, it propagates to the aux-builds.) - pub incremental_dir: Option, + pub incremental_dir: Option, // If `true`, this test will use incremental compilation. // // This can be set manually with the `incremental` header, or implicitly @@ -195,6 +197,8 @@ pub struct TestProps { /// Build and use `minicore` as `core` stub for `no_core` tests in cross-compilation scenarios /// that don't otherwise want/need `-Z build-std`. pub add_core_stubs: bool, + /// Whether line annotatins are required for the given error kind. + pub dont_require_annotations: HashSet, } mod directives { @@ -211,6 +215,7 @@ mod directives { pub const CHECK_RUN_RESULTS: &'static str = "check-run-results"; pub const DONT_CHECK_COMPILER_STDOUT: &'static str = "dont-check-compiler-stdout"; pub const DONT_CHECK_COMPILER_STDERR: &'static str = "dont-check-compiler-stderr"; + pub const DONT_REQUIRE_ANNOTATIONS: &'static str = "dont-require-annotations"; pub const NO_PREFER_DYNAMIC: &'static str = "no-prefer-dynamic"; pub const PRETTY_MODE: &'static str = "pretty-mode"; pub const PRETTY_COMPARE_ONLY: &'static str = "pretty-compare-only"; @@ -296,10 +301,16 @@ impl TestProps { no_auto_check_cfg: false, has_enzyme: false, add_core_stubs: false, + dont_require_annotations: Default::default(), } } - pub fn from_aux_file(&self, testfile: &Path, revision: Option<&str>, config: &Config) -> Self { + pub fn from_aux_file( + &self, + testfile: &Utf8Path, + revision: Option<&str>, + config: &Config, + ) -> Self { let mut props = TestProps::new(); // copy over select properties to the aux build: @@ -310,10 +321,10 @@ impl TestProps { props } - pub fn from_file(testfile: &Path, revision: Option<&str>, config: &Config) -> Self { + pub fn from_file(testfile: &Utf8Path, revision: Option<&str>, config: &Config) -> Self { let mut props = TestProps::new(); props.load_from(testfile, revision, config); - props.exec_env.push(("RUSTC".to_string(), config.rustc_path.display().to_string())); + props.exec_env.push(("RUSTC".to_string(), config.rustc_path.to_string())); match (props.pass_mode, props.fail_mode) { (None, None) if config.mode == Mode::Ui => props.fail_mode = Some(FailMode::Check), @@ -328,10 +339,10 @@ impl TestProps { /// tied to a particular revision `foo` (indicated by writing /// `//@[foo]`), then the property is ignored unless `test_revision` is /// `Some("foo")`. - fn load_from(&mut self, testfile: &Path, test_revision: Option<&str>, config: &Config) { + fn load_from(&mut self, testfile: &Utf8Path, test_revision: Option<&str>, config: &Config) { let mut has_edition = false; if !testfile.is_dir() { - let file = File::open(testfile).unwrap(); + let file = File::open(testfile.as_std_path()).unwrap(); let mut poisoned = false; @@ -377,14 +388,22 @@ impl TestProps { } if let Some(flags) = config.parse_name_value_directive(ln, COMPILE_FLAGS) { - self.compile_flags.extend(split_flags(&flags)); + let flags = split_flags(&flags); + for flag in &flags { + if flag == "--edition" || flag.starts_with("--edition=") { + panic!("you must use `//@ edition` to configure the edition"); + } + } + self.compile_flags.extend(flags); } if config.parse_name_value_directive(ln, INCORRECT_COMPILER_FLAGS).is_some() { panic!("`compiler-flags` directive should be spelled `compile-flags`"); } if let Some(edition) = config.parse_edition(ln) { - self.compile_flags.push(format!("--edition={}", edition.trim())); + // The edition is added at the start, since flags from //@compile-flags must + // be passed to rustc last. + self.compile_flags.insert(0, format!("--edition={}", edition.trim())); has_edition = true; } @@ -440,7 +459,7 @@ impl TestProps { ln, UNSET_EXEC_ENV, &mut self.unset_exec_env, - |r| r, + |r| r.trim().to_owned(), ); config.push_name_value_directive( ln, @@ -452,7 +471,7 @@ impl TestProps { ln, UNSET_RUSTC_ENV, &mut self.unset_rustc_env, - |r| r, + |r| r.trim().to_owned(), ); config.push_name_value_directive( ln, @@ -569,11 +588,18 @@ impl TestProps { config.set_name_directive(ln, NO_AUTO_CHECK_CFG, &mut self.no_auto_check_cfg); self.update_add_core_stubs(ln, config); + + if let Some(err_kind) = + config.parse_name_value_directive(ln, DONT_REQUIRE_ANNOTATIONS) + { + self.dont_require_annotations + .insert(ErrorKind::from_user_str(err_kind.trim())); + } }, ); if poisoned { - eprintln!("errors encountered during TestProps parsing: {}", testfile.display()); + eprintln!("errors encountered during TestProps parsing: {}", testfile); panic!("errors encountered during TestProps parsing"); } } @@ -605,7 +631,9 @@ impl TestProps { } if let (Some(edition), false) = (&config.edition, has_edition) { - self.compile_flags.push(format!("--edition={}", edition)); + // The edition is added at the start, since flags from //@compile-flags must be passed + // to rustc last. + self.compile_flags.insert(0, format!("--edition={}", edition)); } } @@ -842,7 +870,7 @@ fn iter_header( mode: Mode, _suite: &str, poisoned: &mut bool, - testfile: &Path, + testfile: &Utf8Path, rdr: impl Read, it: &mut dyn FnMut(DirectiveLine<'_>), ) { @@ -894,9 +922,7 @@ fn iter_header( eprintln!( "error: detected unknown compiletest test directive `{}` in {}:{}", - directive_line.raw_directive, - testfile.display(), - line_number, + directive_line.raw_directive, testfile, line_number, ); return; @@ -908,10 +934,7 @@ fn iter_header( eprintln!( "error: detected trailing compiletest test directive `{}` in {}:{}\n \ help: put the trailing directive in it's own line: `//@ {}`", - trailing_directive, - testfile.display(), - line_number, - trailing_directive, + trailing_directive, testfile, line_number, trailing_directive, ); return; @@ -923,7 +946,12 @@ fn iter_header( } impl Config { - fn parse_and_update_revisions(&self, testfile: &Path, line: &str, existing: &mut Vec) { + fn parse_and_update_revisions( + &self, + testfile: &Utf8Path, + line: &str, + existing: &mut Vec, + ) { const FORBIDDEN_REVISION_NAMES: [&str; 2] = [ // `//@ revisions: true false` Implying `--cfg=true` and `--cfg=false` makes it very // weird for the test, since if the test writer wants a cfg of the same revision name @@ -936,26 +964,19 @@ impl Config { if let Some(raw) = self.parse_name_value_directive(line, "revisions") { if self.mode == Mode::RunMake { - panic!("`run-make` tests do not support revisions: {}", testfile.display()); + panic!("`run-make` tests do not support revisions: {}", testfile); } let mut duplicates: HashSet<_> = existing.iter().cloned().collect(); for revision in raw.split_whitespace() { if !duplicates.insert(revision.to_string()) { - panic!( - "duplicate revision: `{}` in line `{}`: {}", - revision, - raw, - testfile.display() - ); + panic!("duplicate revision: `{}` in line `{}`: {}", revision, raw, testfile); } if FORBIDDEN_REVISION_NAMES.contains(&revision) { panic!( "revision name `{revision}` is not permitted: `{}` in line `{}`: {}", - revision, - raw, - testfile.display() + revision, raw, testfile ); } @@ -966,8 +987,7 @@ impl Config { "revision name `{revision}` is not permitted in a test suite that uses \ `FileCheck` annotations as it is confusing when used as custom `FileCheck` \ prefix: `{revision}` in line `{}`: {}", - raw, - testfile.display() + raw, testfile ); } @@ -978,23 +998,20 @@ impl Config { fn parse_env(nv: String) -> (String, String) { // nv is either FOO or FOO=BAR - let mut strs: Vec = nv.splitn(2, '=').map(str::to_owned).collect(); - - match strs.len() { - 1 => (strs.pop().unwrap(), String::new()), - 2 => { - let end = strs.pop().unwrap(); - (strs.pop().unwrap(), end) - } - n => panic!("Expected 1 or 2 strings, not {}", n), - } + // FIXME(Zalathar): The form without `=` seems to be unused; should + // we drop support for it? + let (name, value) = nv.split_once('=').unwrap_or((&nv, "")); + // Trim whitespace from the name, so that `//@ exec-env: FOO=BAR` + // sees the name as `FOO` and not ` FOO`. + let name = name.trim(); + (name.to_owned(), value.to_owned()) } - fn parse_pp_exact(&self, line: &str, testfile: &Path) -> Option { + fn parse_pp_exact(&self, line: &str, testfile: &Utf8Path) -> Option { if let Some(s) = self.parse_name_value_directive(line, "pp-exact") { - Some(PathBuf::from(&s)) + Some(Utf8PathBuf::from(&s)) } else if self.parse_name_directive(line, "pp-exact") { - testfile.file_name().map(PathBuf::from) + testfile.file_name().map(Utf8PathBuf::from) } else { None } @@ -1100,20 +1117,19 @@ fn expand_variables(mut value: String, config: &Config) -> String { if value.contains(CWD) { let cwd = env::current_dir().unwrap(); - value = value.replace(CWD, &cwd.to_string_lossy()); + value = value.replace(CWD, &cwd.to_str().unwrap()); } if value.contains(SRC_BASE) { - value = value.replace(SRC_BASE, &config.src_test_suite_root.to_str().unwrap()); + value = value.replace(SRC_BASE, &config.src_test_suite_root.as_str()); } if value.contains(TEST_SUITE_BUILD_BASE) { - value = - value.replace(TEST_SUITE_BUILD_BASE, &config.build_test_suite_root.to_str().unwrap()); + value = value.replace(TEST_SUITE_BUILD_BASE, &config.build_test_suite_root.as_str()); } if value.contains(SYSROOT_BASE) { - value = value.replace(SYSROOT_BASE, &config.sysroot_base.to_str().unwrap()); + value = value.replace(SYSROOT_BASE, &config.sysroot_base.as_str()); } if value.contains(TARGET_LINKER) { @@ -1126,9 +1142,9 @@ fn expand_variables(mut value: String, config: &Config) -> String { if value.contains(RUST_SRC_BASE) { let src_base = config.sysroot_base.join("lib/rustlib/src/rust"); - src_base.try_exists().expect(&*format!("{} should exists", src_base.display())); - let src_base = src_base.read_link().unwrap_or(src_base); - value = value.replace(RUST_SRC_BASE, &src_base.to_string_lossy()); + src_base.try_exists().expect(&*format!("{} should exists", src_base)); + let src_base = src_base.read_link_utf8().unwrap_or(src_base); + value = value.replace(RUST_SRC_BASE, &src_base.as_str()); } value @@ -1231,14 +1247,14 @@ pub fn llvm_has_libzstd(config: &Config) -> bool { // contains a path to that static lib, and that it exists. // // See compiler/rustc_llvm/build.rs for more details and similar expectations. - fn is_zstd_in_config(llvm_bin_dir: &Path) -> Option<()> { + fn is_zstd_in_config(llvm_bin_dir: &Utf8Path) -> Option<()> { let llvm_config_path = llvm_bin_dir.join("llvm-config"); let output = Command::new(llvm_config_path).arg("--system-libs").output().ok()?; assert!(output.status.success(), "running llvm-config --system-libs failed"); let libs = String::from_utf8(output.stdout).ok()?; for lib in libs.split_whitespace() { - if lib.ends_with("libzstd.a") && Path::new(lib).exists() { + if lib.ends_with("libzstd.a") && Utf8Path::new(lib).exists() { return Some(()); } } @@ -1256,7 +1272,7 @@ pub fn llvm_has_libzstd(config: &Config) -> bool { // `lld` supports it. If not, an error will be emitted: "LLVM was not built with // LLVM_ENABLE_ZSTD or did not find zstd at build time". #[cfg(unix)] - fn is_lld_built_with_zstd(llvm_bin_dir: &Path) -> Option<()> { + fn is_lld_built_with_zstd(llvm_bin_dir: &Utf8Path) -> Option<()> { let lld_path = llvm_bin_dir.join("lld"); if lld_path.exists() { // We can't call `lld` as-is, it expects to be invoked by a compiler driver using a @@ -1292,7 +1308,7 @@ pub fn llvm_has_libzstd(config: &Config) -> bool { } #[cfg(not(unix))] - fn is_lld_built_with_zstd(_llvm_bin_dir: &Path) -> Option<()> { + fn is_lld_built_with_zstd(_llvm_bin_dir: &Utf8Path) -> Option<()> { None } @@ -1355,15 +1371,15 @@ where Some((min, max)) } -pub fn make_test_description( +pub(crate) fn make_test_description( config: &Config, cache: &HeadersCache, - name: test::TestName, - path: &Path, + name: String, + path: &Utf8Path, src: R, test_revision: Option<&str>, poisoned: &mut bool, -) -> test::TestDesc { +) -> CollectedTestDesc { let mut ignore = false; let mut ignore_message = None; let mut should_fail = false; @@ -1387,13 +1403,10 @@ pub fn make_test_description( match $e { IgnoreDecision::Ignore { reason } => { ignore = true; - // The ignore reason must be a &'static str, so we have to leak memory to - // create it. This is fine, as the header is parsed only at the start of - // compiletest so it won't grow indefinitely. - ignore_message = Some(&*Box::leak(Box::::from(reason))); + ignore_message = Some(reason.into()); } IgnoreDecision::Error { message } => { - eprintln!("error: {}:{line_number}: {message}", path.display()); + eprintln!("error: {}:{line_number}: {message}", path); *poisoned = true; return; } @@ -1423,7 +1436,7 @@ pub fn make_test_description( ); if local_poisoned { - eprintln!("errors encountered when trying to make test description: {}", path.display()); + eprintln!("errors encountered when trying to make test description: {}", path); panic!("errors encountered when trying to make test description"); } @@ -1431,25 +1444,12 @@ pub fn make_test_description( // since we run the pretty printer across all tests by default. // If desired, we could add a `should-fail-pretty` annotation. let should_panic = match config.mode { - crate::common::Pretty => test::ShouldPanic::No, - _ if should_fail => test::ShouldPanic::Yes, - _ => test::ShouldPanic::No, + crate::common::Pretty => ShouldPanic::No, + _ if should_fail => ShouldPanic::Yes, + _ => ShouldPanic::No, }; - test::TestDesc { - name, - ignore, - ignore_message, - source_file: "", - start_line: 0, - start_col: 0, - end_line: 0, - end_col: 0, - should_panic, - compile_fail: false, - no_run: false, - test_type: test::TestType::Unknown, - } + CollectedTestDesc { name, ignore, ignore_message, should_panic } } fn ignore_cdb(config: &Config, line: &str) -> IgnoreDecision { @@ -1545,7 +1545,7 @@ fn ignore_lldb(config: &Config, line: &str) -> IgnoreDecision { IgnoreDecision::Continue } -fn ignore_llvm(config: &Config, path: &Path, line: &str) -> IgnoreDecision { +fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { if let Some(needed_components) = config.parse_name_value_directive(line, "needs-llvm-components") { @@ -1557,8 +1557,7 @@ fn ignore_llvm(config: &Config, path: &Path, line: &str) -> IgnoreDecision { if env::var_os("COMPILETEST_REQUIRE_ALL_LLVM_COMPONENTS").is_some() { panic!( "missing LLVM component {}, and COMPILETEST_REQUIRE_ALL_LLVM_COMPONENTS is set: {}", - missing_component, - path.display() + missing_component, path ); } return IgnoreDecision::Ignore { diff --git a/src/tools/compiletest/src/header/cfg.rs b/src/tools/compiletest/src/header/cfg.rs index c369fff97f4f0..f1f1384afb971 100644 --- a/src/tools/compiletest/src/header/cfg.rs +++ b/src/tools/compiletest/src/header/cfg.rs @@ -100,6 +100,10 @@ fn parse_cfg_name_directive<'a>( name: "test", message: "always" } + condition! { + name: "auxiliary", + message: "used by another main test file" + } condition! { name: &config.target, allowed_names: &target_cfgs.all_targets, diff --git a/src/tools/compiletest/src/header/needs.rs b/src/tools/compiletest/src/header/needs.rs index 12f0790fb1040..2ace40c490bf3 100644 --- a/src/tools/compiletest/src/header/needs.rs +++ b/src/tools/compiletest/src/header/needs.rs @@ -1,4 +1,4 @@ -use crate::common::{Config, KNOWN_TARGET_HAS_ATOMIC_WIDTHS, Sanitizer}; +use crate::common::{Config, KNOWN_CRATE_TYPES, KNOWN_TARGET_HAS_ATOMIC_WIDTHS, Sanitizer}; use crate::header::{IgnoreDecision, llvm_has_libzstd}; pub(super) fn handle_needs( @@ -6,7 +6,7 @@ pub(super) fn handle_needs( config: &Config, ln: &str, ) -> IgnoreDecision { - // Note thet we intentionally still put the needs- prefix here to make the file show up when + // Note that we intentionally still put the needs- prefix here to make the file show up when // grepping for a directive name, even though we could technically strip that. let needs = &[ Need { @@ -224,6 +224,50 @@ pub(super) fn handle_needs( } } + // FIXME(jieyouxu): share multi-value directive logic with `needs-target-has-atomic` above. + if name == "needs-crate-type" { + let Some(rest) = rest else { + return IgnoreDecision::Error { + message: + "expected `needs-crate-type` to have a comma-separated list of crate types" + .to_string(), + }; + }; + + // Expect directive value to be a list of comma-separated crate-types. + let specified_crate_types = rest + .split(',') + .map(|crate_type| crate_type.trim()) + .map(ToString::to_string) + .collect::>(); + + for crate_type in &specified_crate_types { + if !KNOWN_CRATE_TYPES.contains(&crate_type.as_str()) { + return IgnoreDecision::Error { + message: format!( + "unknown crate type specified in `needs-crate-type`: `{crate_type}` is not \ + a known crate type, known values are `{:?}`", + KNOWN_CRATE_TYPES + ), + }; + } + } + + let satisfies_all_crate_types = specified_crate_types + .iter() + .all(|specified| config.supported_crate_types().contains(specified)); + if satisfies_all_crate_types { + return IgnoreDecision::Continue; + } else { + return IgnoreDecision::Ignore { + reason: format!( + "skipping test as target does not support all of the crate types `{:?}`", + specified_crate_types + ), + }; + } + } + if !name.starts_with("needs-") { return IgnoreDecision::Continue; } diff --git a/src/tools/compiletest/src/header/tests.rs b/src/tools/compiletest/src/header/tests.rs index 4d90f152ee204..e7e5ff0ab0093 100644 --- a/src/tools/compiletest/src/header/tests.rs +++ b/src/tools/compiletest/src/header/tests.rs @@ -1,6 +1,6 @@ use std::io::Read; -use std::path::Path; +use camino::Utf8Path; use semver::Version; use super::{ @@ -8,14 +8,15 @@ use super::{ parse_normalize_rule, }; use crate::common::{Config, Debugger, Mode}; +use crate::executor::{CollectedTestDesc, ShouldPanic}; fn make_test_description( config: &Config, - name: test::TestName, - path: &Path, + name: String, + path: &Utf8Path, src: R, revision: Option<&str>, -) -> test::TestDesc { +) -> CollectedTestDesc { let cache = HeadersCache::load(config); let mut poisoned = false; let test = crate::header::make_test_description( @@ -174,7 +175,6 @@ impl ConfigBuilder { self.host.as_deref().unwrap_or("x86_64-unknown-linux-gnu"), "--target", self.target.as_deref().unwrap_or("x86_64-unknown-linux-gnu"), - "--git-repository=", "--nightly-branch=", "--git-merge-commit-email=", "--minicore-path=", @@ -229,12 +229,12 @@ fn cfg() -> ConfigBuilder { fn parse_rs(config: &Config, contents: &str) -> EarlyProps { let bytes = contents.as_bytes(); - EarlyProps::from_reader(config, Path::new("a.rs"), bytes) + EarlyProps::from_reader(config, Utf8Path::new("a.rs"), bytes) } fn check_ignore(config: &Config, contents: &str) -> bool { - let tn = test::DynTestName(String::new()); - let p = Path::new("a.rs"); + let tn = String::new(); + let p = Utf8Path::new("a.rs"); let d = make_test_description(&config, tn, p, std::io::Cursor::new(contents), None); d.ignore } @@ -242,13 +242,13 @@ fn check_ignore(config: &Config, contents: &str) -> bool { #[test] fn should_fail() { let config: Config = cfg().build(); - let tn = test::DynTestName(String::new()); - let p = Path::new("a.rs"); + let tn = String::new(); + let p = Utf8Path::new("a.rs"); let d = make_test_description(&config, tn.clone(), p, std::io::Cursor::new(""), None); - assert_eq!(d.should_panic, test::ShouldPanic::No); + assert_eq!(d.should_panic, ShouldPanic::No); let d = make_test_description(&config, tn, p, std::io::Cursor::new("//@ should-fail"), None); - assert_eq!(d.should_panic, test::ShouldPanic::Yes); + assert_eq!(d.should_panic, ShouldPanic::Yes); } #[test] @@ -458,9 +458,6 @@ fn profiler_runtime() { #[test] fn asm_support() { let asms = [ - #[cfg(bootstrap)] - ("avr-unknown-gnu-atmega328", false), - #[cfg(not(bootstrap))] ("avr-none", false), ("i686-unknown-netbsd", true), ("riscv32gc-unknown-linux-gnu", true), @@ -786,7 +783,7 @@ fn threads_support() { } } -fn run_path(poisoned: &mut bool, path: &Path, buf: &[u8]) { +fn run_path(poisoned: &mut bool, path: &Utf8Path, buf: &[u8]) { let rdr = std::io::Cursor::new(&buf); iter_header(Mode::Ui, "ui", poisoned, path, rdr, &mut |_| {}); } @@ -796,7 +793,7 @@ fn test_unknown_directive_check() { let mut poisoned = false; run_path( &mut poisoned, - Path::new("a.rs"), + Utf8Path::new("a.rs"), include_bytes!("./test-auxillary/unknown_directive.rs"), ); assert!(poisoned); @@ -807,7 +804,7 @@ fn test_known_directive_check_no_error() { let mut poisoned = false; run_path( &mut poisoned, - Path::new("a.rs"), + Utf8Path::new("a.rs"), include_bytes!("./test-auxillary/known_directive.rs"), ); assert!(!poisoned); @@ -818,7 +815,7 @@ fn test_error_annotation_no_error() { let mut poisoned = false; run_path( &mut poisoned, - Path::new("a.rs"), + Utf8Path::new("a.rs"), include_bytes!("./test-auxillary/error_annotation.rs"), ); assert!(!poisoned); @@ -829,7 +826,7 @@ fn test_non_rs_unknown_directive_not_checked() { let mut poisoned = false; run_path( &mut poisoned, - Path::new("a.Makefile"), + Utf8Path::new("a.Makefile"), include_bytes!("./test-auxillary/not_rs.Makefile"), ); assert!(!poisoned); @@ -838,21 +835,21 @@ fn test_non_rs_unknown_directive_not_checked() { #[test] fn test_trailing_directive() { let mut poisoned = false; - run_path(&mut poisoned, Path::new("a.rs"), b"//@ only-x86 only-arm"); + run_path(&mut poisoned, Utf8Path::new("a.rs"), b"//@ only-x86 only-arm"); assert!(poisoned); } #[test] fn test_trailing_directive_with_comment() { let mut poisoned = false; - run_path(&mut poisoned, Path::new("a.rs"), b"//@ only-x86 only-arm with comment"); + run_path(&mut poisoned, Utf8Path::new("a.rs"), b"//@ only-x86 only-arm with comment"); assert!(poisoned); } #[test] fn test_not_trailing_directive() { let mut poisoned = false; - run_path(&mut poisoned, Path::new("a.rs"), b"//@ revisions: incremental"); + run_path(&mut poisoned, Utf8Path::new("a.rs"), b"//@ revisions: incremental"); assert!(!poisoned); } @@ -904,3 +901,47 @@ fn test_rustc_abi() { assert!(!check_ignore(&config, "//@ ignore-rustc_abi-x86-sse2")); assert!(check_ignore(&config, "//@ only-rustc_abi-x86-sse2")); } + +#[test] +fn test_supported_crate_types() { + // Basic assumptions check on under-test compiler's `--print=supported-crate-types` output based + // on knowledge about the cherry-picked `x86_64-unknown-linux-gnu` and `wasm32-unknown-unknown` + // targets. Also smoke tests the `needs-crate-type` directive itself. + + use std::collections::HashSet; + + let config = cfg().target("x86_64-unknown-linux-gnu").build(); + assert_eq!( + config.supported_crate_types().iter().map(String::as_str).collect::>(), + HashSet::from(["bin", "cdylib", "dylib", "lib", "proc-macro", "rlib", "staticlib"]), + ); + assert!(!check_ignore(&config, "//@ needs-crate-type: rlib")); + assert!(!check_ignore(&config, "//@ needs-crate-type: dylib")); + assert!(!check_ignore( + &config, + "//@ needs-crate-type: bin, cdylib, dylib, lib, proc-macro, rlib, staticlib" + )); + + let config = cfg().target("wasm32-unknown-unknown").build(); + assert_eq!( + config.supported_crate_types().iter().map(String::as_str).collect::>(), + HashSet::from(["bin", "cdylib", "lib", "rlib", "staticlib"]), + ); + + // rlib is supported + assert!(!check_ignore(&config, "//@ needs-crate-type: rlib")); + // dylib is not + assert!(check_ignore(&config, "//@ needs-crate-type: dylib")); + // If multiple crate types are specified, then all specified crate types need to be supported. + assert!(check_ignore(&config, "//@ needs-crate-type: cdylib, dylib")); + assert!(check_ignore( + &config, + "//@ needs-crate-type: bin, cdylib, dylib, lib, proc-macro, rlib, staticlib" + )); +} + +#[test] +fn test_ignore_auxiliary() { + let config = cfg().build(); + assert!(check_ignore(&config, "//@ ignore-auxiliary")); +} diff --git a/src/tools/compiletest/src/json.rs b/src/tools/compiletest/src/json.rs index 9bc26fedf8f4c..6ed2b52c66d21 100644 --- a/src/tools/compiletest/src/json.rs +++ b/src/tools/compiletest/src/json.rs @@ -1,14 +1,12 @@ //! These structs are a subset of the ones found in `rustc_errors::json`. use std::path::{Path, PathBuf}; -use std::str::FromStr; use std::sync::OnceLock; use regex::Regex; use serde::Deserialize; use crate::errors::{Error, ErrorKind}; -use crate::runtest::ProcRes; #[derive(Deserialize)] struct Diagnostic { @@ -141,44 +139,26 @@ pub fn extract_rendered(output: &str) -> String { .collect() } -pub fn parse_output(file_name: &str, output: &str, proc_res: &ProcRes) -> Vec { - output.lines().flat_map(|line| parse_line(file_name, line, output, proc_res)).collect() -} - -fn parse_line(file_name: &str, line: &str, output: &str, proc_res: &ProcRes) -> Vec { - // The compiler sometimes intermingles non-JSON stuff into the - // output. This hack just skips over such lines. Yuck. - if line.starts_with('{') { +pub fn parse_output(file_name: &str, output: &str) -> Vec { + let mut errors = Vec::new(); + for line in output.lines() { + // Compiler can emit non-json lines in non-`--error-format=json` modes, + // and in some situations even in json mode. match serde_json::from_str::(line) { - Ok(diagnostic) => { - let mut expected_errors = vec![]; - push_expected_errors(&mut expected_errors, &diagnostic, &[], file_name); - expected_errors - } - Err(error) => { - // Ignore the future compat report message - this is handled - // by `extract_rendered` - if serde_json::from_str::(line).is_ok() { - vec![] - } else { - proc_res.fatal( - Some(&format!( - "failed to decode compiler output as json: \ - `{}`\nline: {}\noutput: {}", - error, line, output - )), - || (), - ); - } - } + Ok(diagnostic) => push_actual_errors(&mut errors, &diagnostic, &[], file_name), + Err(_) => errors.push(Error { + line_num: None, + kind: ErrorKind::Raw, + msg: line.to_string(), + require_annotation: false, + }), } - } else { - vec![] } + errors } -fn push_expected_errors( - expected_errors: &mut Vec, +fn push_actual_errors( + errors: &mut Vec, diagnostic: &Diagnostic, default_spans: &[&DiagnosticSpan], file_name: &str, @@ -191,8 +171,6 @@ fn push_expected_errors( .filter(|(_, span)| Path::new(&span.file_name) == Path::new(&file_name)) .collect(); - let spans_in_this_file: Vec<_> = spans_info_in_this_file.iter().map(|(_, span)| span).collect(); - let primary_spans: Vec<_> = spans_info_in_this_file .iter() .filter(|(is_primary, _)| *is_primary) @@ -236,44 +214,47 @@ fn push_expected_errors( } }; - // Convert multi-line messages into multiple expected - // errors. We expect to replace these with something - // more structured shortly anyhow. + // Convert multi-line messages into multiple errors. + // We expect to replace these with something more structured anyhow. let mut message_lines = diagnostic.message.lines(); - if let Some(first_line) = message_lines.next() { - let ignore = |s| { - static RE: OnceLock = OnceLock::new(); - RE.get_or_init(|| { - Regex::new(r"aborting due to \d+ previous errors?|\d+ warnings? emitted").unwrap() - }) - .is_match(s) - }; - - if primary_spans.is_empty() && !ignore(first_line) { - let msg = with_code(None, first_line); - let kind = ErrorKind::from_str(&diagnostic.level).ok(); - expected_errors.push(Error { line_num: None, kind, msg }); - } else { - for span in primary_spans { - let msg = with_code(Some(span), first_line); - let kind = ErrorKind::from_str(&diagnostic.level).ok(); - expected_errors.push(Error { line_num: Some(span.line_start), kind, msg }); - } + let kind = ErrorKind::from_compiler_str(&diagnostic.level); + let first_line = message_lines.next().unwrap_or(&diagnostic.message); + if primary_spans.is_empty() { + static RE: OnceLock = OnceLock::new(); + let re_init = + || Regex::new(r"aborting due to \d+ previous errors?|\d+ warnings? emitted").unwrap(); + errors.push(Error { + line_num: None, + kind, + msg: with_code(None, first_line), + require_annotation: diagnostic.level != "failure-note" + && !RE.get_or_init(re_init).is_match(first_line), + }); + } else { + for span in primary_spans { + errors.push(Error { + line_num: Some(span.line_start), + kind, + msg: with_code(Some(span), first_line), + require_annotation: true, + }); } } for next_line in message_lines { if primary_spans.is_empty() { - expected_errors.push(Error { + errors.push(Error { line_num: None, - kind: None, + kind, msg: with_code(None, next_line), + require_annotation: false, }); } else { for span in primary_spans { - expected_errors.push(Error { + errors.push(Error { line_num: Some(span.line_start), - kind: None, + kind, msg: with_code(Some(span), next_line), + require_annotation: false, }); } } @@ -283,10 +264,13 @@ fn push_expected_errors( for span in primary_spans { if let Some(ref suggested_replacement) = span.suggested_replacement { for (index, line) in suggested_replacement.lines().enumerate() { - expected_errors.push(Error { + errors.push(Error { line_num: Some(span.line_start + index), - kind: Some(ErrorKind::Suggestion), + kind: ErrorKind::Suggestion, msg: line.to_string(), + // Empty suggestions (suggestions to remove something) are common + // and annotating them in source is not useful. + require_annotation: !line.is_empty(), }); } } @@ -295,39 +279,44 @@ fn push_expected_errors( // Add notes for the backtrace for span in primary_spans { if let Some(frame) = &span.expansion { - push_backtrace(expected_errors, frame, file_name); + push_backtrace(errors, frame, file_name); } } // Add notes for any labels that appear in the message. - for span in spans_in_this_file.iter().filter(|span| span.label.is_some()) { - expected_errors.push(Error { - line_num: Some(span.line_start), - kind: Some(ErrorKind::Note), - msg: span.label.clone().unwrap(), - }); + for (_, span) in spans_info_in_this_file { + if let Some(label) = &span.label { + errors.push(Error { + line_num: Some(span.line_start), + kind: ErrorKind::Note, + msg: label.clone(), + // Empty labels (only underlining spans) are common and do not need annotations. + require_annotation: !label.is_empty(), + }); + } } // Flatten out the children. for child in &diagnostic.children { - push_expected_errors(expected_errors, child, primary_spans, file_name); + push_actual_errors(errors, child, primary_spans, file_name); } } fn push_backtrace( - expected_errors: &mut Vec, + errors: &mut Vec, expansion: &DiagnosticSpanMacroExpansion, file_name: &str, ) { if Path::new(&expansion.span.file_name) == Path::new(&file_name) { - expected_errors.push(Error { + errors.push(Error { line_num: Some(expansion.span.line_start), - kind: Some(ErrorKind::Note), + kind: ErrorKind::Note, msg: format!("in this expansion of {}", expansion.macro_decl_name), + require_annotation: true, }); } if let Some(previous_expansion) = &expansion.span.expansion { - push_backtrace(expected_errors, previous_expansion, file_name); + push_backtrace(errors, previous_expansion, file_name); } } diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs index 950566b2582a8..0db4d3f6a4100 100644 --- a/src/tools/compiletest/src/lib.rs +++ b/src/tools/compiletest/src/lib.rs @@ -1,9 +1,9 @@ #![crate_name = "compiletest"] -// The `test` crate is the only unstable feature -// allowed here, just to share similar code. -#![feature(test)] - -extern crate test; +// Needed by the "new" test executor that does not depend on libtest. +// FIXME(Zalathar): We should be able to get rid of `internal_output_capture`, +// by having `runtest` manually capture all of its println-like output instead. +// That would result in compiletest being written entirely in stable Rust! +#![feature(internal_output_capture)] #[cfg(test)] mod tests; @@ -12,6 +12,7 @@ pub mod common; pub mod compute_diff; mod debuggers; pub mod errors; +mod executor; pub mod header; mod json; mod raise_fd_limit; @@ -21,18 +22,17 @@ pub mod util; use core::panic; use std::collections::HashSet; -use std::ffi::OsString; use std::fmt::Write; use std::io::{self, ErrorKind}; -use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::sync::{Arc, OnceLock}; use std::time::SystemTime; use std::{env, fs, vec}; use build_helper::git::{get_git_modified_files, get_git_untracked_files}; +use camino::{Utf8Path, Utf8PathBuf}; use getopts::Options; -use test::ColorConfig; +use rayon::iter::{ParallelBridge, ParallelIterator}; use tracing::*; use walkdir::WalkDir; @@ -41,6 +41,7 @@ use crate::common::{ CompareMode, Config, Debugger, Mode, PassMode, TestPaths, UI_EXTENSIONS, expected_output_path, output_base_dir, output_relative_path, }; +use crate::executor::{CollectedTest, ColorConfig, OutputFormat}; use crate::header::HeadersCache; use crate::util::logv; @@ -50,6 +51,12 @@ use crate::util::logv; /// some code here that inspects environment variables or even runs executables /// (e.g. when discovering debugger versions). pub fn parse_config(args: Vec) -> Config { + if env::var("RUST_TEST_NOCAPTURE").is_ok() { + eprintln!( + "WARNING: RUST_TEST_NOCAPTURE is not supported. Use the `--no-capture` flag instead." + ); + } + let mut opts = Options::new(); opts.reqopt("", "compile-lib-path", "path to host shared libraries", "PATH") .reqopt("", "run-lib-path", "path to target shared libraries", "PATH") @@ -128,10 +135,10 @@ pub fn parse_config(args: Vec) -> Config { "bless", "overwrite stderr/stdout files instead of complaining about a mismatch", ) + .optflag("", "fail-fast", "stop as soon as possible after any test fails") .optflag("", "quiet", "print one character per test instead of one line") .optopt("", "color", "coloring: auto, always, never", "WHEN") .optflag("", "json", "emit json output instead of plaintext output") - .optopt("", "logfile", "file to log test execution to", "FILE") .optopt("", "target", "the target to build for", "TARGET") .optopt("", "host", "the host to build for", "HOST") .optopt("", "cdb", "path to CDB to use for CDB debuginfo tests", "PATH") @@ -181,7 +188,6 @@ pub fn parse_config(args: Vec) -> Config { "run tests which rely on commit version being compiled into the binaries", ) .optopt("", "edition", "default Rust edition", "EDITION") - .reqopt("", "git-repository", "name of the git repository", "ORG/REPO") .reqopt("", "nightly-branch", "name of the git branch for nightly", "BRANCH") .reqopt( "", @@ -196,6 +202,7 @@ pub fn parse_config(args: Vec) -> Config { "COMMAND", ) .reqopt("", "minicore-path", "path to minicore aux library", "PATH") + .optflag("N", "no-new-executor", "disables the new test executor, and uses libtest instead") .optopt( "", "debugger", @@ -223,15 +230,19 @@ pub fn parse_config(args: Vec) -> Config { panic!() } - fn opt_path(m: &getopts::Matches, nm: &str) -> PathBuf { - match m.opt_str(nm) { - Some(s) => PathBuf::from(&s), - None => panic!("no option (=path) found for {}", nm), + fn make_absolute(path: Utf8PathBuf) -> Utf8PathBuf { + if path.is_relative() { + Utf8PathBuf::try_from(env::current_dir().unwrap()).unwrap().join(path) + } else { + path } } - fn make_absolute(path: PathBuf) -> PathBuf { - if path.is_relative() { env::current_dir().unwrap().join(path) } else { path } + fn opt_path(m: &getopts::Matches, nm: &str) -> Utf8PathBuf { + match m.opt_str(nm) { + Some(s) => Utf8PathBuf::from(&s), + None => panic!("no option (=path) found for {}", nm), + } } let target = opt_str2(matches.opt_str("target")); @@ -272,12 +283,12 @@ pub fn parse_config(args: Vec) -> Config { .free .iter() .map(|f| { - let path = Path::new(f); + let path = Utf8Path::new(f); let mut iter = path.iter().skip(1); // We skip the test folder and check if the user passed `rmake.rs`. if iter.next().is_some_and(|s| s == "rmake.rs") && iter.next().is_none() { - path.parent().unwrap().to_str().unwrap().to_string() + path.parent().unwrap().to_string() } else { f.to_string() } @@ -309,8 +320,8 @@ pub fn parse_config(args: Vec) -> Config { assert!( src_test_suite_root.starts_with(&src_root), "`src-root` must be a parent of `src-test-suite-root`: `src-root`=`{}`, `src-test-suite-root` = `{}`", - src_root.display(), - src_test_suite_root.display() + src_root, + src_test_suite_root ); let build_root = opt_path(matches, "build-root"); @@ -319,19 +330,22 @@ pub fn parse_config(args: Vec) -> Config { Config { bless: matches.opt_present("bless"), + fail_fast: matches.opt_present("fail-fast") + || env::var_os("RUSTC_TEST_FAIL_FAST").is_some(), + compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")), run_lib_path: make_absolute(opt_path(matches, "run-lib-path")), rustc_path: opt_path(matches, "rustc-path"), - cargo_path: matches.opt_str("cargo-path").map(PathBuf::from), - stage0_rustc_path: matches.opt_str("stage0-rustc-path").map(PathBuf::from), - rustdoc_path: matches.opt_str("rustdoc-path").map(PathBuf::from), - coverage_dump_path: matches.opt_str("coverage-dump-path").map(PathBuf::from), + cargo_path: matches.opt_str("cargo-path").map(Utf8PathBuf::from), + stage0_rustc_path: matches.opt_str("stage0-rustc-path").map(Utf8PathBuf::from), + rustdoc_path: matches.opt_str("rustdoc-path").map(Utf8PathBuf::from), + coverage_dump_path: matches.opt_str("coverage-dump-path").map(Utf8PathBuf::from), python: matches.opt_str("python").unwrap(), jsondocck_path: matches.opt_str("jsondocck-path"), jsondoclint_path: matches.opt_str("jsondoclint-path"), run_clang_based_tests_with: matches.opt_str("run-clang-based-tests-with"), - llvm_filecheck: matches.opt_str("llvm-filecheck").map(PathBuf::from), - llvm_bin_dir: matches.opt_str("llvm-bin-dir").map(PathBuf::from), + llvm_filecheck: matches.opt_str("llvm-filecheck").map(Utf8PathBuf::from), + llvm_bin_dir: matches.opt_str("llvm-bin-dir").map(Utf8PathBuf::from), src_root, src_test_suite_root, @@ -367,7 +381,6 @@ pub fn parse_config(args: Vec) -> Config { "never" => Some(false), _ => panic!("unknown `--run` option `{}` given", mode), }), - logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)), runner: matches.opt_str("runner"), host_rustcflags: matches.opt_strs("host-rustcflags"), target_rustcflags: matches.opt_strs("target-rustcflags"), @@ -392,13 +405,13 @@ pub fn parse_config(args: Vec) -> Config { verbose: matches.opt_present("verbose"), format: match (matches.opt_present("quiet"), matches.opt_present("json")) { (true, true) => panic!("--quiet and --json are incompatible"), - (true, false) => test::OutputFormat::Terse, - (false, true) => test::OutputFormat::Json, - (false, false) => test::OutputFormat::Pretty, + (true, false) => OutputFormat::Terse, + (false, true) => OutputFormat::Json, + (false, false) => OutputFormat::Pretty, }, only_modified: matches.opt_present("only-modified"), color, - remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from), + remote_test_client: matches.opt_str("remote-test-client").map(Utf8PathBuf::from), compare_mode, rustfix_coverage: matches.opt_present("rustfix-coverage"), has_html_tidy, @@ -422,10 +435,10 @@ pub fn parse_config(args: Vec) -> Config { target_cfgs: OnceLock::new(), builtin_cfg_names: OnceLock::new(), + supported_crate_types: OnceLock::new(), nocapture: matches.opt_present("no-capture"), - git_repository: matches.opt_str("git-repository").unwrap(), nightly_branch: matches.opt_str("nightly-branch").unwrap(), git_merge_commit_email: matches.opt_str("git-merge-commit-email").unwrap(), @@ -440,19 +453,19 @@ pub fn parse_config(args: Vec) -> Config { pub fn log_config(config: &Config) { let c = config; logv(c, "configuration:".to_string()); - logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path)); - logv(c, format!("run_lib_path: {:?}", config.run_lib_path)); - logv(c, format!("rustc_path: {:?}", config.rustc_path.display())); + logv(c, format!("compile_lib_path: {}", config.compile_lib_path)); + logv(c, format!("run_lib_path: {}", config.run_lib_path)); + logv(c, format!("rustc_path: {}", config.rustc_path)); logv(c, format!("cargo_path: {:?}", config.cargo_path)); logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path)); - logv(c, format!("src_root: {}", config.src_root.display())); - logv(c, format!("src_test_suite_root: {}", config.src_test_suite_root.display())); + logv(c, format!("src_root: {}", config.src_root)); + logv(c, format!("src_test_suite_root: {}", config.src_test_suite_root)); - logv(c, format!("build_root: {}", config.build_root.display())); - logv(c, format!("build_test_suite_root: {}", config.build_test_suite_root.display())); + logv(c, format!("build_root: {}", config.build_root)); + logv(c, format!("build_test_suite_root: {}", config.build_test_suite_root)); - logv(c, format!("sysroot_base: {}", config.sysroot_base.display())); + logv(c, format!("sysroot_base: {}", config.sysroot_base)); logv(c, format!("stage: {}", config.stage)); logv(c, format!("stage_id: {}", config.stage_id)); @@ -470,16 +483,16 @@ pub fn log_config(config: &Config) { logv(c, format!("target-rustcflags: {:?}", config.target_rustcflags)); logv(c, format!("target: {}", config.target)); logv(c, format!("host: {}", config.host)); - logv(c, format!("android-cross-path: {:?}", config.android_cross_path.display())); - logv(c, format!("adb_path: {:?}", config.adb_path)); - logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir)); + logv(c, format!("android-cross-path: {}", config.android_cross_path)); + logv(c, format!("adb_path: {}", config.adb_path)); + logv(c, format!("adb_test_dir: {}", config.adb_test_dir)); logv(c, format!("adb_device_status: {}", config.adb_device_status)); logv(c, format!("ar: {}", config.ar)); logv(c, format!("target-linker: {:?}", config.target_linker)); logv(c, format!("host-linker: {:?}", config.host_linker)); logv(c, format!("verbose: {}", config.verbose)); logv(c, format!("format: {:?}", config.format)); - logv(c, format!("minicore_path: {:?}", config.minicore_path.display())); + logv(c, format!("minicore_path: {}", config.minicore_path)); logv(c, "\n".to_string()); } @@ -507,7 +520,7 @@ pub fn run_tests(config: Arc) { coverage_file_path.push("rustfix_missing_coverage.txt"); if coverage_file_path.exists() { if let Err(e) = fs::remove_file(&coverage_file_path) { - panic!("Could not delete {} due to {}", coverage_file_path.display(), e) + panic!("Could not delete {} due to {}", coverage_file_path, e) } } } @@ -520,12 +533,14 @@ pub fn run_tests(config: Arc) { } // Prevent issue #21352 UAC blocking .exe containing 'patch' etc. on Windows // If #11207 is resolved (adding manifest to .exe) this becomes unnecessary - env::set_var("__COMPAT_LAYER", "RunAsInvoker"); - - // Let tests know which target they're running as - env::set_var("TARGET", &config.target); + // + // SAFETY: at this point we're still single-threaded. + unsafe { env::set_var("__COMPAT_LAYER", "RunAsInvoker") }; - let opts = test_opts(&config); + // Let tests know which target they're running as. + // + // SAFETY: at this point we're still single-threaded. + unsafe { env::set_var("TARGET", &config.target) }; let mut configs = Vec::new(); if let Mode::DebugInfo = config.mode { @@ -553,12 +568,15 @@ pub fn run_tests(config: Arc) { tests.extend(collect_and_make_tests(c)); } - tests.sort_by(|a, b| a.desc.name.as_slice().cmp(&b.desc.name.as_slice())); + tests.sort_by(|a, b| Ord::cmp(&a.desc.name, &b.desc.name)); - // Delegate to libtest to filter and run the big list of structures created - // during test discovery. When libtest decides to run a test, it will invoke - // the corresponding closure created by `make_test_closure`. - let res = test::run_tests_console(&opts, tests); + // Delegate to the executor to filter and run the big list of test structures + // created during test discovery. When the executor decides to run a test, + // it will return control to the rest of compiletest by calling `runtest::run`. + // FIXME(Zalathar): Once we're confident that we won't need to revert the + // removal of the libtest-based executor, remove this Result and other + // remnants of the old executor. + let res: io::Result = Ok(executor::run_tests(&config, tests)); // Check the outcome reported by libtest. match res { @@ -602,80 +620,54 @@ pub fn run_tests(config: Arc) { } } -pub fn test_opts(config: &Config) -> test::TestOpts { - if env::var("RUST_TEST_NOCAPTURE").is_ok() { - eprintln!( - "WARNING: RUST_TEST_NOCAPTURE is no longer used. \ - Use the `--nocapture` flag instead." - ); - } - - test::TestOpts { - exclude_should_panic: false, - filters: config.filters.clone(), - filter_exact: config.filter_exact, - run_ignored: if config.run_ignored { test::RunIgnored::Yes } else { test::RunIgnored::No }, - format: config.format, - logfile: config.logfile.clone(), - run_tests: true, - bench_benchmarks: true, - nocapture: config.nocapture, - color: config.color, - shuffle: false, - shuffle_seed: None, - test_threads: None, - skip: config.skip.clone(), - list: false, - options: test::Options::new(), - time_options: None, - force_run_in_process: false, - fail_fast: std::env::var_os("RUSTC_TEST_FAIL_FAST").is_some(), - } -} - /// Read-only context data used during test collection. struct TestCollectorCx { config: Arc, cache: HeadersCache, common_inputs_stamp: Stamp, - modified_tests: Vec, + modified_tests: Vec, } /// Mutable state used during test collection. struct TestCollector { - tests: Vec, - found_path_stems: HashSet, + tests: Vec, + found_path_stems: HashSet, poisoned: bool, } -/// Creates libtest structures for every test/revision in the test suite directory. +impl TestCollector { + fn new() -> Self { + TestCollector { tests: vec![], found_path_stems: HashSet::new(), poisoned: false } + } + + fn merge(&mut self, mut other: Self) { + self.tests.append(&mut other.tests); + self.found_path_stems.extend(other.found_path_stems); + self.poisoned |= other.poisoned; + } +} + +/// Creates test structures for every test/revision in the test suite directory. /// /// This always inspects _all_ test files in the suite (e.g. all 17k+ ui tests), /// regardless of whether any filters/tests were specified on the command-line, /// because filtering is handled later by libtest. -pub fn collect_and_make_tests(config: Arc) -> Vec { - debug!("making tests from {}", config.src_test_suite_root.display()); +pub(crate) fn collect_and_make_tests(config: Arc) -> Vec { + debug!("making tests from {}", config.src_test_suite_root); let common_inputs_stamp = common_inputs_stamp(&config); let modified_tests = modified_tests(&config, &config.src_test_suite_root).unwrap_or_else(|err| { panic!( "modified_tests got error from dir: {}, error: {}", - config.src_test_suite_root.display(), - err + config.src_test_suite_root, err ) }); let cache = HeadersCache::load(&config); let cx = TestCollectorCx { config, cache, common_inputs_stamp, modified_tests }; - let mut collector = - TestCollector { tests: vec![], found_path_stems: HashSet::new(), poisoned: false }; - - collect_tests_from_dir(&cx, &mut collector, &cx.config.src_test_suite_root, Path::new("")) + let collector = collect_tests_from_dir(&cx, &cx.config.src_test_suite_root, Utf8Path::new("")) .unwrap_or_else(|reason| { - panic!( - "Could not read tests from {}: {reason}", - cx.config.src_test_suite_root.display() - ) + panic!("Could not read tests from {}: {reason}", cx.config.src_test_suite_root) }); let TestCollector { tests, found_path_stems, poisoned } = collector; @@ -744,24 +736,29 @@ fn common_inputs_stamp(config: &Config) -> Stamp { /// the `--only-modified` flag is in use. /// /// (Might be inaccurate in some cases.) -fn modified_tests(config: &Config, dir: &Path) -> Result, String> { +fn modified_tests(config: &Config, dir: &Utf8Path) -> Result, String> { // If `--only-modified` wasn't passed, the list of modified tests won't be // used for anything, so avoid some work and just return an empty list. if !config.only_modified { return Ok(vec![]); } - let files = - get_git_modified_files(&config.git_config(), Some(dir), &vec!["rs", "stderr", "fixed"])?; + let files = get_git_modified_files( + &config.git_config(), + Some(dir.as_std_path()), + &vec!["rs", "stderr", "fixed"], + )?; // Add new test cases to the list, it will be convenient in daily development. - let untracked_files = get_git_untracked_files(&config.git_config(), None)?.unwrap_or(vec![]); + let untracked_files = get_git_untracked_files(Some(dir.as_std_path()))?.unwrap_or(vec![]); let all_paths = [&files[..], &untracked_files[..]].concat(); let full_paths = { - let mut full_paths: Vec = all_paths + let mut full_paths: Vec = all_paths .into_iter() - .map(|f| PathBuf::from(f).with_extension("").with_extension("rs")) - .filter_map(|f| if Path::new(&f).exists() { f.canonicalize().ok() } else { None }) + .map(|f| Utf8PathBuf::from(f).with_extension("").with_extension("rs")) + .filter_map( + |f| if Utf8Path::new(&f).exists() { f.canonicalize_utf8().ok() } else { None }, + ) .collect(); full_paths.dedup(); full_paths.sort_unstable(); @@ -774,25 +771,25 @@ fn modified_tests(config: &Config, dir: &Path) -> Result, String> { /// that will be handed over to libtest. fn collect_tests_from_dir( cx: &TestCollectorCx, - collector: &mut TestCollector, - dir: &Path, - relative_dir_path: &Path, -) -> io::Result<()> { + dir: &Utf8Path, + relative_dir_path: &Utf8Path, +) -> io::Result { // Ignore directories that contain a file named `compiletest-ignore-dir`. if dir.join("compiletest-ignore-dir").exists() { - return Ok(()); + return Ok(TestCollector::new()); } // For run-make tests, a "test file" is actually a directory that contains an `rmake.rs`. if cx.config.mode == Mode::RunMake { + let mut collector = TestCollector::new(); if dir.join("rmake.rs").exists() { let paths = TestPaths { file: dir.to_path_buf(), relative_dir: relative_dir_path.parent().unwrap().to_path_buf(), }; - make_test(cx, collector, &paths); + make_test(cx, &mut collector, &paths); // This directory is a test, so don't try to find other tests inside it. - return Ok(()); + return Ok(collector); } } @@ -809,42 +806,51 @@ fn collect_tests_from_dir( // subdirectories we find, except for `auxiliary` directories. // FIXME: this walks full tests tree, even if we have something to ignore // use walkdir/ignore like in tidy? - for file in fs::read_dir(dir)? { - let file = file?; - let file_path = file.path(); - let file_name = file.file_name(); - - if is_test(&file_name) - && (!cx.config.only_modified || cx.modified_tests.contains(&file_path)) - { - // We found a test file, so create the corresponding libtest structures. - debug!("found test file: {:?}", file_path.display()); - - // Record the stem of the test file, to check for overlaps later. - let rel_test_path = relative_dir_path.join(file_path.file_stem().unwrap()); - collector.found_path_stems.insert(rel_test_path); - - let paths = - TestPaths { file: file_path, relative_dir: relative_dir_path.to_path_buf() }; - make_test(cx, collector, &paths); - } else if file_path.is_dir() { - // Recurse to find more tests in a subdirectory. - let relative_file_path = relative_dir_path.join(file.file_name()); - if &file_name != "auxiliary" { - debug!("found directory: {:?}", file_path.display()); - collect_tests_from_dir(cx, collector, &file_path, &relative_file_path)?; + fs::read_dir(dir.as_std_path())? + .par_bridge() + .map(|file| { + let mut collector = TestCollector::new(); + let file = file?; + let file_path = Utf8PathBuf::try_from(file.path()).unwrap(); + let file_name = file_path.file_name().unwrap(); + + if is_test(file_name) + && (!cx.config.only_modified || cx.modified_tests.contains(&file_path)) + { + // We found a test file, so create the corresponding libtest structures. + debug!(%file_path, "found test file"); + + // Record the stem of the test file, to check for overlaps later. + let rel_test_path = relative_dir_path.join(file_path.file_stem().unwrap()); + collector.found_path_stems.insert(rel_test_path); + + let paths = + TestPaths { file: file_path, relative_dir: relative_dir_path.to_path_buf() }; + make_test(cx, &mut collector, &paths); + } else if file_path.is_dir() { + // Recurse to find more tests in a subdirectory. + let relative_file_path = relative_dir_path.join(file_name); + if file_name != "auxiliary" { + debug!(%file_path, "found directory"); + collector.merge(collect_tests_from_dir(cx, &file_path, &relative_file_path)?); + } + } else { + debug!(%file_path, "found other file/directory"); } - } else { - debug!("found other file/directory: {:?}", file_path.display()); - } - } - Ok(()) + Ok(collector) + }) + .reduce( + || Ok(TestCollector::new()), + |a, b| { + let mut a = a?; + a.merge(b?); + Ok(a) + }, + ) } /// Returns true if `file_name` looks like a proper test file name. -pub fn is_test(file_name: &OsString) -> bool { - let file_name = file_name.to_str().unwrap(); - +pub fn is_test(file_name: &str) -> bool { if !file_name.ends_with(".rs") { return false; } @@ -863,7 +869,7 @@ fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &Te let test_path = if cx.config.mode == Mode::RunMake { testpaths.file.join("rmake.rs") } else { - PathBuf::from(&testpaths.file) + testpaths.file.clone() }; // Scan the test file to discover its revisions, if any. @@ -882,7 +888,7 @@ fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &Te }; // For each revision (or the sole dummy revision), create and append a - // `test::TestDescAndFn` that can be handed over to libtest. + // `CollectedTest` that can be handed over to the test executor. collector.tests.extend(revisions.into_iter().map(|revision| { // Create a test name and description to hand over to libtest. let src_file = fs::File::open(&test_path).expect("open test file to parse ignores"); @@ -905,19 +911,20 @@ fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &Te if !cx.config.force_rerun && is_up_to_date(cx, testpaths, &early_props, revision) { desc.ignore = true; // Keep this in sync with the "up-to-date" message detected by bootstrap. - desc.ignore_message = Some("up-to-date"); + desc.ignore_message = Some("up-to-date".into()); } - // Create the callback that will run this test/revision when libtest calls it. - let testfn = make_test_closure(Arc::clone(&cx.config), testpaths, revision); + let config = Arc::clone(&cx.config); + let testpaths = testpaths.clone(); + let revision = revision.map(str::to_owned); - test::TestDescAndFn { desc, testfn } + CollectedTest { desc, config, testpaths, revision } })); } /// The path of the `stamp` file that gets created or updated whenever a /// particular test completes successfully. -fn stamp_file_path(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf { +fn stamp_file_path(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> Utf8PathBuf { output_base_dir(config, testpaths, revision).join("stamp") } @@ -930,7 +937,7 @@ fn files_related_to_test( testpaths: &TestPaths, props: &EarlyProps, revision: Option<&str>, -) -> Vec { +) -> Vec { let mut related = vec![]; if testpaths.file.is_dir() { @@ -938,7 +945,7 @@ fn files_related_to_test( for entry in WalkDir::new(&testpaths.file) { let path = entry.unwrap().into_path(); if path.is_file() { - related.push(path); + related.push(Utf8PathBuf::try_from(path).unwrap()); } } } else { @@ -1009,7 +1016,7 @@ struct Stamp { impl Stamp { /// Creates a timestamp holding the last-modified time of the specified file. - fn from_path(path: &Path) -> Self { + fn from_path(path: &Utf8Path) -> Self { let mut stamp = Stamp { time: SystemTime::UNIX_EPOCH }; stamp.add_path(path); stamp @@ -1017,8 +1024,8 @@ impl Stamp { /// Updates this timestamp to the last-modified time of the specified file, /// if it is later than the currently-stored timestamp. - fn add_path(&mut self, path: &Path) { - let modified = fs::metadata(path) + fn add_path(&mut self, path: &Utf8Path) { + let modified = fs::metadata(path.as_std_path()) .and_then(|metadata| metadata.modified()) .unwrap_or(SystemTime::UNIX_EPOCH); self.time = self.time.max(modified); @@ -1027,7 +1034,8 @@ impl Stamp { /// Updates this timestamp to the most recent last-modified time of all files /// recursively contained in the given directory, if it is later than the /// currently-stored timestamp. - fn add_dir(&mut self, path: &Path) { + fn add_dir(&mut self, path: &Utf8Path) { + let path = path.as_std_path(); for entry in WalkDir::new(path) { let entry = entry.unwrap(); if entry.file_type().is_file() { @@ -1043,11 +1051,7 @@ impl Stamp { } /// Creates a name for this test/revision that can be handed over to libtest. -fn make_test_name( - config: &Config, - testpaths: &TestPaths, - revision: Option<&str>, -) -> test::TestName { +fn make_test_name(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> String { // Print the name of the file, relative to the sources root. let path = testpaths.file.strip_prefix(&config.src_root).unwrap(); let debugger = match config.debugger { @@ -1059,32 +1063,14 @@ fn make_test_name( None => String::new(), }; - test::DynTestName(format!( + format!( "[{}{}{}] {}{}", config.mode, debugger, mode_suffix, - path.display(), + path, revision.map_or("".to_string(), |rev| format!("#{}", rev)) - )) -} - -/// Creates a callback for this test/revision that libtest will call when it -/// decides to actually run the underlying test. -fn make_test_closure( - config: Arc, - testpaths: &TestPaths, - revision: Option<&str>, -) -> test::TestFn { - let testpaths = testpaths.clone(); - let revision = revision.map(str::to_owned); - - // This callback is the link between compiletest's test discovery code, - // and the parts of compiletest that know how to run an individual test. - test::DynTestFn(Box::new(move || { - runtest::run(config, &testpaths, revision.as_deref()); - Ok(()) - })) + ) } /// Checks that test discovery didn't find any tests whose name stem is a prefix @@ -1104,7 +1090,7 @@ fn make_test_closure( /// To avoid problems, we forbid test names from overlapping in this way. /// /// See for more context. -fn check_for_overlapping_test_paths(found_path_stems: &HashSet) { +fn check_for_overlapping_test_paths(found_path_stems: &HashSet) { let mut collisions = Vec::new(); for path in found_path_stems { for ancestor in path.ancestors().skip(1) { @@ -1117,7 +1103,7 @@ fn check_for_overlapping_test_paths(found_path_stems: &HashSet) { collisions.sort(); let collisions: String = collisions .into_iter() - .map(|(path, check_parent)| format!("test {path:?} clashes with {check_parent:?}\n")) + .map(|(path, check_parent)| format!("test {path} clashes with {check_parent}\n")) .collect(); panic!( "{collisions}\n\ diff --git a/src/tools/compiletest/src/raise_fd_limit.rs b/src/tools/compiletest/src/raise_fd_limit.rs index 7b12ba946b9eb..653b125a6b413 100644 --- a/src/tools/compiletest/src/raise_fd_limit.rs +++ b/src/tools/compiletest/src/raise_fd_limit.rs @@ -6,6 +6,7 @@ /// This fixes issue #7772. #[cfg(target_vendor = "apple")] #[allow(non_camel_case_types)] +// FIXME(#139616): document caller contract. pub unsafe fn raise_fd_limit() { use std::ptr::null_mut; use std::{cmp, io}; @@ -21,8 +22,10 @@ pub unsafe fn raise_fd_limit() { let mut mib: [libc::c_int; 2] = [CTL_KERN, KERN_MAXFILESPERPROC]; let mut maxfiles: libc::c_int = 0; let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t; - if libc::sysctl(&mut mib[0], 2, &mut maxfiles as *mut _ as *mut _, &mut size, null_mut(), 0) - != 0 + // FIXME(#139616): justify why this is sound. + if unsafe { + libc::sysctl(&mut mib[0], 2, &mut maxfiles as *mut _ as *mut _, &mut size, null_mut(), 0) + } != 0 { let err = io::Error::last_os_error(); panic!("raise_fd_limit: error calling sysctl: {}", err); @@ -30,7 +33,8 @@ pub unsafe fn raise_fd_limit() { // Fetch the current resource limits let mut rlim = libc::rlimit { rlim_cur: 0, rlim_max: 0 }; - if libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) != 0 { + // FIXME(#139616): justify why this is sound. + if unsafe { libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) } != 0 { let err = io::Error::last_os_error(); panic!("raise_fd_limit: error calling getrlimit: {}", err); } @@ -41,7 +45,8 @@ pub unsafe fn raise_fd_limit() { rlim.rlim_cur = cmp::min(maxfiles as libc::rlim_t, rlim.rlim_max); // Set our newly-increased resource limit. - if libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) != 0 { + // FIXME(#139616): justify why this is sound. + if unsafe { libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) } != 0 { let err = io::Error::last_os_error(); panic!("raise_fd_limit: error calling setrlimit: {}", err); } diff --git a/src/tools/compiletest/src/read2.rs b/src/tools/compiletest/src/read2.rs index 28ca5589992a2..2213dd07160a7 100644 --- a/src/tools/compiletest/src/read2.rs +++ b/src/tools/compiletest/src/read2.rs @@ -165,6 +165,7 @@ mod imp { mut err_pipe: ChildStderr, data: &mut dyn FnMut(bool, &mut Vec, bool), ) -> io::Result<()> { + // FIXME(#139616): justify why this is sound. unsafe { libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); @@ -175,6 +176,7 @@ mod imp { let mut out = Vec::new(); let mut err = Vec::new(); + // FIXME(#139616): justify why this is sound. let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() }; fds[0].fd = out_pipe.as_raw_fd(); fds[0].events = libc::POLLIN; @@ -185,6 +187,7 @@ mod imp { while nfds > 0 { // wait for either pipe to become readable using `select` + // FIXME(#139616): justify why this is sound. let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) }; if r == -1 { let err = io::Error::last_os_error(); @@ -256,6 +259,7 @@ mod imp { port.add_handle(0, &out_pipe)?; port.add_handle(1, &err_pipe)?; + // FIXME(#139616): justify why this is sound. unsafe { let mut out_pipe = Pipe::new(out_pipe, &mut out); let mut err_pipe = Pipe::new(err_pipe, &mut err); @@ -284,18 +288,23 @@ mod imp { } impl<'a> Pipe<'a> { + // FIXME(#139616): document caller contract. unsafe fn new(p: P, dst: &'a mut Vec) -> Pipe<'a> { Pipe { dst, - pipe: NamedPipe::from_raw_handle(p.into_raw_handle()), + // FIXME(#139616): justify why this is sound. + pipe: unsafe { NamedPipe::from_raw_handle(p.into_raw_handle()) }, overlapped: Overlapped::zero(), done: false, } } + // FIXME(#139616): document caller contract. unsafe fn read(&mut self) -> io::Result<()> { - let dst = slice_to_end(self.dst); - match self.pipe.read_overlapped(dst, self.overlapped.raw()) { + // FIXME(#139616): justify why this is sound. + let dst = unsafe { slice_to_end(self.dst) }; + // FIXME(#139616): justify why this is sound. + match unsafe { self.pipe.read_overlapped(dst, self.overlapped.raw()) } { Ok(_) => Ok(()), Err(e) => { if e.raw_os_error() == Some(ERROR_BROKEN_PIPE.0 as i32) { @@ -308,15 +317,18 @@ mod imp { } } + // FIXME(#139616): document caller contract. unsafe fn complete(&mut self, status: &CompletionStatus) { let prev = self.dst.len(); - self.dst.set_len(prev + status.bytes_transferred() as usize); + // FIXME(#139616): justify why this is sound. + unsafe { self.dst.set_len(prev + status.bytes_transferred() as usize) }; if status.bytes_transferred() == 0 { self.done = true; } } } + // FIXME(#139616): document caller contract. unsafe fn slice_to_end(v: &mut Vec) -> &mut [u8] { if v.capacity() == 0 { v.reserve(16); @@ -324,6 +336,12 @@ mod imp { if v.capacity() == v.len() { v.reserve(1); } - slice::from_raw_parts_mut(v.as_mut_ptr().offset(v.len() as isize), v.capacity() - v.len()) + // FIXME(#139616): justify why this is sound. + unsafe { + slice::from_raw_parts_mut( + v.as_mut_ptr().offset(v.len() as isize), + v.capacity() - v.len(), + ) + } } } diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index c8a60b68da8b2..40c9f29375b22 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -1,16 +1,16 @@ use std::borrow::Cow; use std::collections::{HashMap, HashSet}; -use std::ffi::{OsStr, OsString}; +use std::ffi::OsString; use std::fs::{self, File, create_dir_all}; use std::hash::{DefaultHasher, Hash, Hasher}; use std::io::prelude::*; use std::io::{self, BufReader}; -use std::path::{Path, PathBuf}; use std::process::{Child, Command, ExitStatus, Output, Stdio}; use std::sync::Arc; use std::{env, iter, str}; -use anyhow::Context; +use build_helper::fs::remove_and_create_dir_all; +use camino::{Utf8Path, Utf8PathBuf}; use colored::Colorize; use regex::{Captures, Regex}; use tracing::*; @@ -23,10 +23,10 @@ use crate::common::{ output_base_dir, output_base_name, output_testname_unique, }; use crate::compute_diff::{DiffLine, make_diff, write_diff, write_filtered_diff}; -use crate::errors::{self, Error, ErrorKind}; +use crate::errors::{Error, ErrorKind, load_errors}; use crate::header::TestProps; use crate::read2::{Truncated, read2_abbreviated}; -use crate::util::{PathBufExt, add_dylib_path, logv, static_regex}; +use crate::util::{Utf8PathBufExt, add_dylib_path, logv, static_regex}; use crate::{ColorConfig, json, stamp_file_path}; mod debugger; @@ -132,7 +132,7 @@ pub fn run(config: Arc, testpaths: &TestPaths, revision: Option<&str>) { // We're going to be dumping a lot of info. Start on a new line. print!("\n\n"); } - debug!("running {:?}", testpaths.file.display()); + debug!("running {}", testpaths.file); let mut props = TestProps::from_file(&testpaths.file, revision, &config); // For non-incremental (i.e. regular UI) tests, the incremental directory @@ -143,11 +143,11 @@ pub fn run(config: Arc, testpaths: &TestPaths, revision: Option<&str>) { } let cx = TestCx { config: &config, props: &props, testpaths, revision }; - create_dir_all(&cx.output_base_dir()) - .with_context(|| { - format!("failed to create output base directory {}", cx.output_base_dir().display()) - }) - .unwrap(); + + if let Err(e) = create_dir_all(&cx.output_base_dir()) { + panic!("failed to create output base directory {}: {e}", cx.output_base_dir()); + } + if props.incremental { cx.init_incremental_test(); } @@ -178,6 +178,7 @@ pub fn compute_stamp_hash(config: &Config) -> String { let mut hash = DefaultHasher::new(); config.stage_id.hash(&mut hash); config.run.hash(&mut hash); + config.edition.hash(&mut hash); match config.debugger { Some(Debugger::Cdb) => { @@ -207,11 +208,6 @@ pub fn compute_stamp_hash(config: &Config) -> String { format!("{:x}", hash.finish()) } -fn remove_and_create_dir_all(path: &Path) { - let _ = fs::remove_dir_all(path); - fs::create_dir_all(path).unwrap(); -} - #[derive(Copy, Clone, Debug)] struct TestCx<'test> { config: &'test Config, @@ -423,7 +419,7 @@ impl<'test> TestCx<'test> { let aux_dir = self.aux_output_dir_name(); let input: &str = match read_from { ReadFrom::Stdin(_) => "-", - ReadFrom::Path => self.testpaths.file.to_str().unwrap(), + ReadFrom::Path => self.testpaths.file.as_str(), }; let mut rustc = Command::new(&self.config.rustc_path); @@ -446,8 +442,8 @@ impl<'test> TestCx<'test> { self.compose_and_run( rustc, - self.config.compile_lib_path.to_str().unwrap(), - Some(aux_dir.to_str().unwrap()), + self.config.compile_lib_path.as_path(), + Some(aux_dir.as_path()), src, ) } @@ -522,7 +518,9 @@ impl<'test> TestCx<'test> { let mut rustc = Command::new(&self.config.rustc_path); let out_dir = self.output_base_name().with_extension("pretty-out"); - remove_and_create_dir_all(&out_dir); + remove_and_create_dir_all(&out_dir).unwrap_or_else(|e| { + panic!("failed to remove and recreate output directory `{out_dir}`: {e}") + }); let target = if self.props.force_host { &*self.config.host } else { &*self.config.target }; @@ -579,26 +577,9 @@ impl<'test> TestCx<'test> { } } - fn check_all_error_patterns( - &self, - output_to_check: &str, - proc_res: &ProcRes, - pm: Option, - ) { - if self.props.error_patterns.is_empty() && self.props.regex_error_patterns.is_empty() { - if pm.is_some() { - // FIXME(#65865) - return; - } else { - self.fatal(&format!( - "no error pattern specified in {:?}", - self.testpaths.file.display() - )); - } - } - + /// Check `error-pattern` and `regex-error-pattern` directives. + fn check_all_error_patterns(&self, output_to_check: &str, proc_res: &ProcRes) { let mut missing_patterns: Vec = Vec::new(); - self.check_error_patterns(output_to_check, &mut missing_patterns); self.check_regex_error_patterns(output_to_check, proc_res, &mut missing_patterns); @@ -675,14 +656,14 @@ impl<'test> TestCx<'test> { } } - fn check_expected_errors(&self, expected_errors: Vec, proc_res: &ProcRes) { + /// Check `//~ KIND message` annotations. + fn check_expected_errors(&self, proc_res: &ProcRes) { + let expected_errors = load_errors(&self.testpaths.file, self.revision); debug!( "check_expected_errors: expected_errors={:?} proc_res.status={:?}", expected_errors, proc_res.status ); - if proc_res.status.success() - && expected_errors.iter().any(|x| x.kind == Some(ErrorKind::Error)) - { + if proc_res.status.success() && expected_errors.iter().any(|x| x.kind == ErrorKind::Error) { self.fatal_proc_rec("process did not return an error status", proc_res); } @@ -697,39 +678,51 @@ impl<'test> TestCx<'test> { } // On Windows, translate all '\' path separators to '/' - let file_name = format!("{}", self.testpaths.file.display()).replace(r"\", "/"); + let file_name = self.testpaths.file.to_string().replace(r"\", "/"); // On Windows, keep all '\' path separators to match the paths reported in the JSON output // from the compiler let diagnostic_file_name = if self.props.remap_src_base { - let mut p = PathBuf::from(FAKE_SRC_BASE); + let mut p = Utf8PathBuf::from(FAKE_SRC_BASE); p.push(&self.testpaths.relative_dir); p.push(self.testpaths.file.file_name().unwrap()); - p.display().to_string() + p.to_string() } else { - self.testpaths.file.display().to_string() + self.testpaths.file.to_string() }; - // If the testcase being checked contains at least one expected "help" - // message, then we'll ensure that all "help" messages are expected. - // Otherwise, all "help" messages reported by the compiler will be ignored. - // This logic also applies to "note" messages. - let expect_help = expected_errors.iter().any(|ee| ee.kind == Some(ErrorKind::Help)); - let expect_note = expected_errors.iter().any(|ee| ee.kind == Some(ErrorKind::Note)); + // Errors and warnings are always expected, other diagnostics are only expected + // if one of them actually occurs in the test. + let expected_kinds: HashSet<_> = [ErrorKind::Error, ErrorKind::Warning] + .into_iter() + .chain(expected_errors.iter().map(|e| e.kind)) + .collect(); // Parse the JSON output from the compiler and extract out the messages. - let actual_errors = json::parse_output(&diagnostic_file_name, &proc_res.stderr, proc_res); + let actual_errors = json::parse_output(&diagnostic_file_name, &self.get_output(proc_res)) + .into_iter() + .map(|e| Error { msg: self.normalize_output(&e.msg, &[]), ..e }); + let mut unexpected = Vec::new(); let mut found = vec![false; expected_errors.len()]; - for mut actual_error in actual_errors { - actual_error.msg = self.normalize_output(&actual_error.msg, &[]); + for actual_error in actual_errors { + for pattern in &self.props.error_patterns { + let pattern = pattern.trim(); + if actual_error.msg.contains(pattern) { + let q = if actual_error.line_num.is_none() { "?" } else { "" }; + self.fatal(&format!( + "error pattern '{pattern}' is found in structured \ + diagnostics, use `//~{q} {} {pattern}` instead", + actual_error.kind, + )); + } + } let opt_index = expected_errors.iter().enumerate().position(|(index, expected_error)| { !found[index] && actual_error.line_num == expected_error.line_num - && (expected_error.kind.is_none() - || actual_error.kind == expected_error.kind) + && actual_error.kind == expected_error.kind && actual_error.msg.contains(&expected_error.msg) }); @@ -741,17 +734,15 @@ impl<'test> TestCx<'test> { } None => { - // If the test is a known bug, don't require that the error is annotated - if self.is_unexpected_compiler_message(&actual_error, expect_help, expect_note) + if actual_error.require_annotation + && expected_kinds.contains(&actual_error.kind) + && !self.props.dont_require_annotations.contains(&actual_error.kind) { self.error(&format!( "{}:{}: unexpected {}: '{}'", file_name, actual_error.line_num_str(), - actual_error - .kind - .as_ref() - .map_or(String::from("message"), |k| k.to_string()), + actual_error.kind, actual_error.msg )); unexpected.push(actual_error); @@ -768,7 +759,7 @@ impl<'test> TestCx<'test> { "{}:{}: expected {} not found: {}", file_name, expected_error.line_num_str(), - expected_error.kind.as_ref().map_or("message".into(), |k| k.to_string()), + expected_error.kind, expected_error.msg )); not_found.push(expected_error); @@ -800,25 +791,6 @@ impl<'test> TestCx<'test> { } } - /// Returns `true` if we should report an error about `actual_error`, - /// which did not match any of the expected error. We always require - /// errors/warnings to be explicitly listed, but only require - /// helps/notes if there are explicit helps/notes given. - fn is_unexpected_compiler_message( - &self, - actual_error: &Error, - expect_help: bool, - expect_note: bool, - ) -> bool { - !actual_error.msg.is_empty() - && match actual_error.kind { - Some(ErrorKind::Help) => expect_help, - Some(ErrorKind::Note) => expect_note, - Some(ErrorKind::Error) | Some(ErrorKind::Warning) => true, - Some(ErrorKind::Suggestion) | None => false, - } - } - fn should_emit_metadata(&self, pm: Option) -> Emit { match (pm, self.props.fail_mode, self.config.mode) { (Some(PassMode::Check), ..) | (_, Some(FailMode::Check), Ui) => Emit::Metadata, @@ -889,7 +861,7 @@ impl<'test> TestCx<'test> { /// `root_out_dir` and `root_testpaths` refer to the parameters of the actual test being run. /// Auxiliaries, no matter how deep, have the same root_out_dir and root_testpaths. - fn document(&self, root_out_dir: &Path, root_testpaths: &TestPaths) -> ProcRes { + fn document(&self, root_out_dir: &Utf8Path, root_testpaths: &TestPaths) -> ProcRes { if self.props.build_aux_docs { for rel_ab in &self.props.aux.builds { let aux_testpaths = self.compute_aux_test_paths(root_testpaths, rel_ab); @@ -918,13 +890,13 @@ impl<'test> TestCx<'test> { // actual --out-dir given to the auxiliary or test, as opposed to the root out dir for the entire // test - let out_dir: Cow<'_, Path> = if self.props.unique_doc_out_dir { + let out_dir: Cow<'_, Utf8Path> = if self.props.unique_doc_out_dir { let file_name = self.testpaths.file.file_stem().expect("file name should not be empty"); - let out_dir = PathBuf::from_iter([ + let out_dir = Utf8PathBuf::from_iter([ root_out_dir, - Path::new("docs"), - Path::new(file_name), - Path::new("doc"), + Utf8Path::new("docs"), + Utf8Path::new(file_name), + Utf8Path::new("doc"), ]); create_dir_all(&out_dir).unwrap(); Cow::Owned(out_dir) @@ -937,7 +909,7 @@ impl<'test> TestCx<'test> { rustdoc.current_dir(current_dir); rustdoc .arg("-L") - .arg(self.config.run_lib_path.to_str().unwrap()) + .arg(self.config.run_lib_path.as_path()) .arg("-L") .arg(aux_dir) .arg("-o") @@ -971,16 +943,16 @@ impl<'test> TestCx<'test> { delete_after_success: bool, ) -> ProcRes { let prepare_env = |cmd: &mut Command| { - for key in &self.props.unset_exec_env { - cmd.env_remove(key); - } - for (key, val) in &self.props.exec_env { cmd.env(key, val); } for (key, val) in env_extra { cmd.env(key, val); } + + for key in &self.props.unset_exec_env { + cmd.env_remove(key); + } }; let proc_res = match &*self.config.target { @@ -1023,8 +995,8 @@ impl<'test> TestCx<'test> { self.compose_and_run( test_client, - self.config.run_lib_path.to_str().unwrap(), - Some(aux_dir.to_str().unwrap()), + self.config.run_lib_path.as_path(), + Some(aux_dir.as_path()), None, ) } @@ -1038,8 +1010,8 @@ impl<'test> TestCx<'test> { self.compose_and_run( wr_run, - self.config.run_lib_path.to_str().unwrap(), - Some(aux_dir.to_str().unwrap()), + self.config.run_lib_path.as_path(), + Some(aux_dir.as_path()), None, ) } @@ -1053,8 +1025,8 @@ impl<'test> TestCx<'test> { self.compose_and_run( program, - self.config.run_lib_path.to_str().unwrap(), - Some(aux_dir.to_str().unwrap()), + self.config.run_lib_path.as_path(), + Some(aux_dir.as_path()), None, ) } @@ -1075,7 +1047,7 @@ impl<'test> TestCx<'test> { let test_ab = of.file.parent().expect("test file path has no parent").join("auxiliary").join(rel_ab); if !test_ab.exists() { - self.fatal(&format!("aux-build `{}` source not found", test_ab.display())) + self.fatal(&format!("aux-build `{}` source not found", test_ab)) } TestPaths { @@ -1112,23 +1084,29 @@ impl<'test> TestCx<'test> { || !self.props.aux.proc_macros.is_empty() } - fn aux_output_dir(&self) -> PathBuf { + fn aux_output_dir(&self) -> Utf8PathBuf { let aux_dir = self.aux_output_dir_name(); if !self.props.aux.builds.is_empty() { - remove_and_create_dir_all(&aux_dir); + remove_and_create_dir_all(&aux_dir).unwrap_or_else(|e| { + panic!("failed to remove and recreate output directory `{aux_dir}`: {e}") + }); } if !self.props.aux.bins.is_empty() { let aux_bin_dir = self.aux_bin_output_dir_name(); - remove_and_create_dir_all(&aux_dir); - remove_and_create_dir_all(&aux_bin_dir); + remove_and_create_dir_all(&aux_dir).unwrap_or_else(|e| { + panic!("failed to remove and recreate output directory `{aux_dir}`: {e}") + }); + remove_and_create_dir_all(&aux_bin_dir).unwrap_or_else(|e| { + panic!("failed to remove and recreate output directory `{aux_bin_dir}`: {e}") + }); } aux_dir } - fn build_all_auxiliary(&self, of: &TestPaths, aux_dir: &Path, rustc: &mut Command) { + fn build_all_auxiliary(&self, of: &TestPaths, aux_dir: &Utf8Path, rustc: &mut Command) { for rel_ab in &self.props.aux.builds { self.build_auxiliary(of, rel_ab, &aux_dir, None); } @@ -1148,12 +1126,7 @@ impl<'test> TestCx<'test> { |rustc: &mut Command, aux_name: &str, aux_path: &str, aux_type: AuxType| { let lib_name = get_lib_name(&path_to_crate_name(aux_path), aux_type); if let Some(lib_name) = lib_name { - rustc.arg("--extern").arg(format!( - "{}={}/{}", - aux_name, - aux_dir.display(), - lib_name - )); + rustc.arg("--extern").arg(format!("{}={}/{}", aux_name, aux_dir, lib_name)); } }; @@ -1174,7 +1147,7 @@ impl<'test> TestCx<'test> { let aux_type = self.build_auxiliary(of, aux_file, aux_dir, None); if let Some(lib_name) = get_lib_name(aux_file.trim_end_matches(".rs"), aux_type) { let lib_path = aux_dir.join(&lib_name); - rustc.arg(format!("-Zcodegen-backend={}", lib_path.display())); + rustc.arg(format!("-Zcodegen-backend={}", lib_path)); } } } @@ -1190,7 +1163,7 @@ impl<'test> TestCx<'test> { if self.props.add_core_stubs { let minicore_path = self.build_minicore(); rustc.arg("--extern"); - rustc.arg(&format!("minicore={}", minicore_path.to_str().unwrap())); + rustc.arg(&format!("minicore={}", minicore_path)); } let aux_dir = self.aux_output_dir(); @@ -1200,15 +1173,15 @@ impl<'test> TestCx<'test> { self.props.unset_rustc_env.iter().fold(&mut rustc, Command::env_remove); self.compose_and_run( rustc, - self.config.compile_lib_path.to_str().unwrap(), - Some(aux_dir.to_str().unwrap()), + self.config.compile_lib_path.as_path(), + Some(aux_dir.as_path()), input, ) } /// Builds `minicore`. Returns the path to the minicore rlib within the base test output /// directory. - fn build_minicore(&self) -> PathBuf { + fn build_minicore(&self) -> Utf8PathBuf { let output_file_path = self.output_base_dir().join("libminicore.rlib"); let mut rustc = self.make_compile_args( &self.config.minicore_path, @@ -1222,14 +1195,10 @@ impl<'test> TestCx<'test> { rustc.args(&["--crate-type", "rlib"]); rustc.arg("-Cpanic=abort"); - let res = - self.compose_and_run(rustc, self.config.compile_lib_path.to_str().unwrap(), None, None); + let res = self.compose_and_run(rustc, self.config.compile_lib_path.as_path(), None, None); if !res.status.success() { self.fatal_proc_rec( - &format!( - "auxiliary build of {:?} failed to compile: ", - self.config.minicore_path.display() - ), + &format!("auxiliary build of {} failed to compile: ", self.config.minicore_path), &res, ); } @@ -1244,7 +1213,7 @@ impl<'test> TestCx<'test> { &self, of: &TestPaths, source_path: &str, - aux_dir: &Path, + aux_dir: &Utf8Path, aux_type: Option, ) -> AuxType { let aux_testpaths = self.compute_aux_test_paths(of, source_path); @@ -1335,16 +1304,13 @@ impl<'test> TestCx<'test> { let auxres = aux_cx.compose_and_run( aux_rustc, - aux_cx.config.compile_lib_path.to_str().unwrap(), - Some(aux_dir.to_str().unwrap()), + aux_cx.config.compile_lib_path.as_path(), + Some(aux_dir.as_path()), None, ); if !auxres.status.success() { self.fatal_proc_rec( - &format!( - "auxiliary build of {:?} failed to compile: ", - aux_testpaths.file.display() - ), + &format!("auxiliary build of {} failed to compile: ", aux_testpaths.file), &auxres, ); } @@ -1353,8 +1319,8 @@ impl<'test> TestCx<'test> { fn read2_abbreviated(&self, child: Child) -> (Output, Truncated) { let mut filter_paths_from_len = Vec::new(); - let mut add_path = |path: &Path| { - let path = path.display().to_string(); + let mut add_path = |path: &Utf8Path| { + let path = path.to_string(); let windows = path.replace("\\", "\\\\"); if windows != path { filter_paths_from_len.push(windows); @@ -1376,8 +1342,8 @@ impl<'test> TestCx<'test> { fn compose_and_run( &self, mut command: Command, - lib_path: &str, - aux_path: Option<&str>, + lib_path: &Utf8Path, + aux_path: Option<&Utf8Path>, input: Option, ) -> ProcRes { let cmdline = { @@ -1422,17 +1388,9 @@ impl<'test> TestCx<'test> { matches!(self.config.suite.as_str(), "rustdoc-ui" | "rustdoc-js" | "rustdoc-json") } - fn get_mir_dump_dir(&self) -> PathBuf { - let mut mir_dump_dir = self.config.build_test_suite_root.clone(); - debug!("input_file: {:?}", self.testpaths.file); - mir_dump_dir.push(&self.testpaths.relative_dir); - mir_dump_dir.push(self.testpaths.file.file_stem().unwrap()); - mir_dump_dir - } - fn make_compile_args( &self, - input_file: &Path, + input_file: &Utf8Path, output_file: TargetLocation, emit: Emit, allow_unused: AllowUnused, @@ -1473,7 +1431,7 @@ impl<'test> TestCx<'test> { // Similarly, vendored sources shouldn't be shown when running from a dist tarball. rustc.arg("-Z").arg(format!( "ignore-directory-in-diagnostics-source-blocks={}", - self.config.src_root.join("vendor").to_str().unwrap(), + self.config.src_root.join("vendor"), )); // Optionally prevent default --sysroot if specified in test compile-flags. @@ -1497,7 +1455,7 @@ impl<'test> TestCx<'test> { if !is_rustdoc { if let Some(ref incremental_dir) = self.props.incremental_dir { - rustc.args(&["-C", &format!("incremental={}", incremental_dir.display())]); + rustc.args(&["-C", &format!("incremental={}", incremental_dir)]); rustc.args(&["-Z", "incremental-verify-ich"]); } @@ -1538,10 +1496,9 @@ impl<'test> TestCx<'test> { } let set_mir_dump_dir = |rustc: &mut Command| { - let mir_dump_dir = self.get_mir_dump_dir(); - remove_and_create_dir_all(&mir_dump_dir); + let mir_dump_dir = self.output_base_dir(); let mut dir_opt = "-Zdump-mir-dir=".to_string(); - dir_opt.push_str(mir_dump_dir.to_str().unwrap()); + dir_opt.push_str(mir_dump_dir.as_str()); debug!("dir_opt: {:?}", dir_opt); rustc.arg(dir_opt); }; @@ -1634,8 +1591,7 @@ impl<'test> TestCx<'test> { if self.props.remap_src_base { rustc.arg(format!( "--remap-path-prefix={}={}", - self.config.src_test_suite_root.to_str().unwrap(), - FAKE_SRC_BASE, + self.config.src_test_suite_root, FAKE_SRC_BASE, )); } @@ -1747,18 +1703,22 @@ impl<'test> TestCx<'test> { rustc.args(&self.props.compile_flags); // FIXME(jieyouxu): we should report a fatal error or warning if user wrote `-Cpanic=` with - // something that's not `abort`, however, by moving this last we should override previous - // `-Cpanic=`s + // something that's not `abort` and `-Cforce-unwind-tables` with a value that is not `yes`, + // however, by moving this last we should override previous `-Cpanic`s and + // `-Cforce-unwind-tables`s. Note that checking here is very fragile, because we'd have to + // account for all possible compile flag splittings (they have some... intricacies and are + // not yet normalized). // // `minicore` requires `#![no_std]` and `#![no_core]`, which means no unwinding panics. if self.props.add_core_stubs { rustc.arg("-Cpanic=abort"); + rustc.arg("-Cforce-unwind-tables=yes"); } rustc } - fn make_exe_name(&self) -> PathBuf { + fn make_exe_name(&self) -> Utf8PathBuf { // Using a single letter here to keep the path length down for // Windows. Some test names get very long. rustc creates `rcgu` // files with the module name appended to it which can more than @@ -1809,7 +1769,7 @@ impl<'test> TestCx<'test> { } } - fn make_cmdline(&self, command: &Command, libpath: &str) -> String { + fn make_cmdline(&self, command: &Command, libpath: &Utf8Path) -> String { use crate::util; // Linux and mac don't require adjusting the library search path @@ -1822,7 +1782,7 @@ impl<'test> TestCx<'test> { format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path)) } - format!("{} {:?}", lib_path_cmd_prefix(libpath), command) + format!("{} {:?}", lib_path_cmd_prefix(libpath.as_str()), command) } } @@ -1836,20 +1796,19 @@ impl<'test> TestCx<'test> { return; } - let path = Path::new(proc_name); + let path = Utf8Path::new(proc_name); let proc_name = if path.file_stem().is_some_and(|p| p == "rmake") { - OsString::from_iter( + String::from_iter( path.parent() .unwrap() .file_name() .into_iter() - .chain(Some(OsStr::new("/"))) + .chain(Some("/")) .chain(path.file_name()), ) } else { path.file_name().unwrap().into() }; - let proc_name = proc_name.to_string_lossy(); println!("------{proc_name} stdout------------------------------"); println!("{}", out); println!("------{proc_name} stderr------------------------------"); @@ -1859,18 +1818,18 @@ impl<'test> TestCx<'test> { fn dump_output_file(&self, out: &str, extension: &str) { let outfile = self.make_out_name(extension); - fs::write(&outfile, out).unwrap(); + fs::write(outfile.as_std_path(), out).unwrap(); } /// Creates a filename for output with the given extension. /// E.g., `/.../testname.revision.mode/testname.extension`. - fn make_out_name(&self, extension: &str) -> PathBuf { + fn make_out_name(&self, extension: &str) -> Utf8PathBuf { self.output_base_name().with_extension(extension) } /// Gets the directory where auxiliary files are written. /// E.g., `/.../testname.revision.mode/auxiliary/`. - fn aux_output_dir_name(&self) -> PathBuf { + fn aux_output_dir_name(&self) -> Utf8PathBuf { self.output_base_dir() .join("auxiliary") .with_extra_extension(self.config.mode.aux_dir_disambiguator()) @@ -1878,12 +1837,12 @@ impl<'test> TestCx<'test> { /// Gets the directory where auxiliary binaries are written. /// E.g., `/.../testname.revision.mode/auxiliary/bin`. - fn aux_bin_output_dir_name(&self) -> PathBuf { + fn aux_bin_output_dir_name(&self) -> Utf8PathBuf { self.aux_output_dir_name().join("bin") } /// Generates a unique name for the test, such as `testname.revision.mode`. - fn output_testname_unique(&self) -> PathBuf { + fn output_testname_unique(&self) -> Utf8PathBuf { output_testname_unique(self.config, self.testpaths, self.safe_revision()) } @@ -1896,14 +1855,14 @@ impl<'test> TestCx<'test> { /// Gets the absolute path to the directory where all output for the given /// test/revision should reside. /// E.g., `/path/to/build/host-tuple/test/ui/relative/testname.revision.mode/`. - fn output_base_dir(&self) -> PathBuf { + fn output_base_dir(&self) -> Utf8PathBuf { output_base_dir(self.config, self.testpaths, self.safe_revision()) } /// Gets the absolute path to the base filename used as output for the given /// test/revision. /// E.g., `/.../relative/testname.revision.mode/testname`. - fn output_base_name(&self) -> PathBuf { + fn output_base_name(&self) -> Utf8PathBuf { output_base_name(self.config, self.testpaths, self.safe_revision()) } @@ -1938,7 +1897,7 @@ impl<'test> TestCx<'test> { // codegen tests (using FileCheck) - fn compile_test_and_save_ir(&self) -> (ProcRes, PathBuf) { + fn compile_test_and_save_ir(&self) -> (ProcRes, Utf8PathBuf) { let output_path = self.output_base_name().with_extension("ll"); let input_file = &self.testpaths.file; let rustc = self.make_compile_args( @@ -1954,7 +1913,7 @@ impl<'test> TestCx<'test> { (proc_res, output_path) } - fn verify_with_filecheck(&self, output: &Path) -> ProcRes { + fn verify_with_filecheck(&self, output: &Utf8Path) -> ProcRes { let mut filecheck = Command::new(self.config.llvm_filecheck.as_ref().unwrap()); filecheck.arg("--input-file").arg(output).arg(&self.testpaths.file); @@ -1983,7 +1942,8 @@ impl<'test> TestCx<'test> { // Add custom flags supplied by the `filecheck-flags:` test header. filecheck.args(&self.props.filecheck_flags); - self.compose_and_run(filecheck, "", None, None) + // FIXME(jieyouxu): don't pass an empty Path + self.compose_and_run(filecheck, Utf8Path::new(""), None, None) } fn charset() -> &'static str { @@ -1991,7 +1951,7 @@ impl<'test> TestCx<'test> { if cfg!(target_os = "freebsd") { "ISO-8859-1" } else { "UTF-8" } } - fn compare_to_default_rustdoc(&mut self, out_dir: &Path) { + fn compare_to_default_rustdoc(&mut self, out_dir: &Utf8Path) { if !self.config.has_html_tidy { return; } @@ -2000,7 +1960,9 @@ impl<'test> TestCx<'test> { let suffix = self.safe_revision().map_or("nightly".into(), |path| path.to_owned() + "-nightly"); let compare_dir = output_base_dir(self.config, self.testpaths, Some(&suffix)); - remove_and_create_dir_all(&compare_dir); + remove_and_create_dir_all(&compare_dir).unwrap_or_else(|e| { + panic!("failed to remove and recreate output directory `{compare_dir}`: {e}") + }); // We need to create a new struct for the lifetimes on `config` to work. let new_rustdoc = TestCx { @@ -2143,12 +2105,8 @@ impl<'test> TestCx<'test> { }; } - fn get_lines>( - &self, - path: &P, - mut other_files: Option<&mut Vec>, - ) -> Vec { - let content = fs::read_to_string(&path).unwrap(); + fn get_lines(&self, path: &Utf8Path, mut other_files: Option<&mut Vec>) -> Vec { + let content = fs::read_to_string(path.as_std_path()).unwrap(); let mut ignore = false; content .lines() @@ -2194,8 +2152,8 @@ impl<'test> TestCx<'test> { for other_file in other_files { let mut path = self.testpaths.file.clone(); path.set_file_name(&format!("{}.rs", other_file)); - let path = fs::canonicalize(path).expect("failed to canonicalize"); - let normalized = path.to_str().unwrap().replace('\\', "/"); + let path = path.canonicalize_utf8().expect("failed to canonicalize"); + let normalized = path.as_str().replace('\\', "/"); files.insert(normalized, self.get_lines(&path, None)); } @@ -2379,26 +2337,24 @@ impl<'test> TestCx<'test> { let mut normalized = output.to_string(); - let mut normalize_path = |from: &Path, to: &str| { - let mut from = from.display().to_string(); - if json { - from = from.replace("\\", "\\\\"); - } - normalized = normalized.replace(&from, to); + let mut normalize_path = |from: &Utf8Path, to: &str| { + let from = if json { &from.as_str().replace("\\", "\\\\") } else { from.as_str() }; + + normalized = normalized.replace(from, to); }; let parent_dir = self.testpaths.file.parent().unwrap(); normalize_path(parent_dir, "$DIR"); if self.props.remap_src_base { - let mut remapped_parent_dir = PathBuf::from(FAKE_SRC_BASE); - if self.testpaths.relative_dir != Path::new("") { + let mut remapped_parent_dir = Utf8PathBuf::from(FAKE_SRC_BASE); + if self.testpaths.relative_dir != Utf8Path::new("") { remapped_parent_dir.push(&self.testpaths.relative_dir); } normalize_path(&remapped_parent_dir, "$DIR"); } - let base_dir = Path::new("/rustc/FAKE_PREFIX"); + let base_dir = Utf8Path::new("/rustc/FAKE_PREFIX"); // Fake paths into the libstd/libcore normalize_path(&base_dir.join("library"), "$SRC_DIR"); // `ui-fulldeps` tests can show paths to the compiler source when testing macros from @@ -2408,19 +2364,26 @@ impl<'test> TestCx<'test> { // Real paths into the libstd/libcore let rust_src_dir = &self.config.sysroot_base.join("lib/rustlib/src/rust"); - rust_src_dir.try_exists().expect(&*format!("{} should exists", rust_src_dir.display())); - let rust_src_dir = rust_src_dir.read_link().unwrap_or(rust_src_dir.to_path_buf()); + rust_src_dir.try_exists().expect(&*format!("{} should exists", rust_src_dir)); + let rust_src_dir = rust_src_dir.read_link_utf8().unwrap_or(rust_src_dir.to_path_buf()); normalize_path(&rust_src_dir.join("library"), "$SRC_DIR_REAL"); // eg. // /home/user/rust/build/x86_64-unknown-linux-gnu/test/ui//$name.$revision.$mode/ normalize_path(&self.output_base_dir(), "$TEST_BUILD_DIR"); + // Same as above, but with a canonicalized path. + // This is required because some tests print canonical paths inside test build directory, + // so if the build directory is a symlink, normalization doesn't help. + // + // NOTE: There are also tests which print the non-canonical name, so we need both this and + // the above normalizations. + normalize_path(&self.output_base_dir().canonicalize_utf8().unwrap(), "$TEST_BUILD_DIR"); // eg. /home/user/rust/build normalize_path(&self.config.build_root, "$BUILD_DIR"); if json { // escaped newlines in json strings should be readable - // in the stderr files. There's no point int being correct, + // in the stderr files. There's no point in being correct, // since only humans process the stderr files. // Thus we just turn escaped newlines back into newlines. normalized = normalized.replace("\\n", "\n"); @@ -2549,7 +2512,7 @@ impl<'test> TestCx<'test> { .replace("\r\n", "\n") } - fn expected_output_path(&self, kind: &str) -> PathBuf { + fn expected_output_path(&self, kind: &str) -> Utf8PathBuf { let mut path = expected_output_path(&self.testpaths, self.revision, &self.config.compare_mode, kind); @@ -2578,19 +2541,18 @@ impl<'test> TestCx<'test> { } } - fn load_expected_output_from_path(&self, path: &Path) -> Result { - fs::read_to_string(path).map_err(|err| { - format!("failed to load expected output from `{}`: {}", path.display(), err) - }) + fn load_expected_output_from_path(&self, path: &Utf8Path) -> Result { + fs::read_to_string(path) + .map_err(|err| format!("failed to load expected output from `{}`: {}", path, err)) } - fn delete_file(&self, file: &Path) { + fn delete_file(&self, file: &Utf8Path) { if !file.exists() { // Deleting a nonexistent file would error. return; } - if let Err(e) = fs::remove_file(file) { - self.fatal(&format!("failed to delete `{}`: {}", file.display(), e,)); + if let Err(e) = fs::remove_file(file.as_std_path()) { + self.fatal(&format!("failed to delete `{}`: {}", file, e,)); } } @@ -2696,8 +2658,8 @@ impl<'test> TestCx<'test> { fn show_diff( &self, stream: &str, - expected_path: &Path, - actual_path: &Path, + expected_path: &Utf8Path, + actual_path: &Utf8Path, expected: &str, actual: &str, actual_unnormalized: &str, @@ -2836,7 +2798,7 @@ impl<'test> TestCx<'test> { fs::create_dir_all(&incremental_dir).unwrap(); if self.config.verbose { - println!("init_incremental_test: incremental_dir={}", incremental_dir.display()); + println!("init_incremental_test: incremental_dir={incremental_dir}"); } } } @@ -2894,8 +2856,8 @@ impl ProcRes { #[derive(Debug)] enum TargetLocation { - ThisFile(PathBuf), - ThisDirectory(PathBuf), + ThisFile(Utf8PathBuf), + ThisDirectory(Utf8PathBuf), } enum AllowUnused { diff --git a/src/tools/compiletest/src/runtest/assembly.rs b/src/tools/compiletest/src/runtest/assembly.rs index 89d7de58c203c..91d4f620f7194 100644 --- a/src/tools/compiletest/src/runtest/assembly.rs +++ b/src/tools/compiletest/src/runtest/assembly.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use camino::Utf8PathBuf; use super::{AllowUnused, Emit, LinkToAux, ProcRes, TargetLocation, TestCx}; @@ -19,7 +19,7 @@ impl TestCx<'_> { } } - fn compile_test_and_save_assembly(&self) -> (ProcRes, PathBuf) { + fn compile_test_and_save_assembly(&self) -> (ProcRes, Utf8PathBuf) { // This works with both `--emit asm` (as default output name for the assembly) // and `ptx-linker` because the latter can write output at requested location. let output_path = self.output_base_name().with_extension("s"); diff --git a/src/tools/compiletest/src/runtest/codegen_units.rs b/src/tools/compiletest/src/runtest/codegen_units.rs index 6c866cbef21ab..8dfa8d18d1a0b 100644 --- a/src/tools/compiletest/src/runtest/codegen_units.rs +++ b/src/tools/compiletest/src/runtest/codegen_units.rs @@ -26,9 +26,7 @@ impl TestCx<'_> { .stdout .lines() .filter(|line| line.starts_with(PREFIX)) - .map(|line| { - line.replace(&self.testpaths.file.display().to_string(), "TEST_PATH").to_string() - }) + .map(|line| line.replace(&self.testpaths.file.as_str(), "TEST_PATH").to_string()) .map(|line| str_to_mono_item(&line, true)) .collect(); diff --git a/src/tools/compiletest/src/runtest/coverage.rs b/src/tools/compiletest/src/runtest/coverage.rs index 56fc5baf5f248..41cfeaee35ffb 100644 --- a/src/tools/compiletest/src/runtest/coverage.rs +++ b/src/tools/compiletest/src/runtest/coverage.rs @@ -1,9 +1,9 @@ //! Code specific to the coverage test suites. use std::ffi::OsStr; -use std::path::{Path, PathBuf}; use std::process::Command; +use camino::{Utf8Path, Utf8PathBuf}; use glob::glob; use crate::common::{UI_COVERAGE, UI_COVERAGE_MAP}; @@ -11,7 +11,7 @@ use crate::runtest::{Emit, ProcRes, TestCx, WillExecute}; use crate::util::static_regex; impl<'test> TestCx<'test> { - fn coverage_dump_path(&self) -> &Path { + fn coverage_dump_path(&self) -> &Utf8Path { self.config .coverage_dump_path .as_deref() @@ -79,10 +79,8 @@ impl<'test> TestCx<'test> { std::fs::remove_file(&profdata_path).unwrap(); } - let proc_res = self.exec_compiled_test_general( - &[("LLVM_PROFILE_FILE", &profraw_path.to_str().unwrap())], - false, - ); + let proc_res = + self.exec_compiled_test_general(&[("LLVM_PROFILE_FILE", profraw_path.as_str())], false); if self.props.failure_status.is_some() { self.check_correct_failure_status(&proc_res); } else if !proc_res.status.success() { @@ -158,8 +156,8 @@ impl<'test> TestCx<'test> { /// `.profraw` files and doctest executables to the given vectors. fn run_doctests_for_coverage( &self, - profraw_paths: &mut Vec, - bin_paths: &mut Vec, + profraw_paths: &mut Vec, + bin_paths: &mut Vec, ) { // Put .profraw files and doctest executables in dedicated directories, // to make it easier to glob them all later. @@ -204,10 +202,9 @@ impl<'test> TestCx<'test> { self.fatal_proc_rec("rustdoc --test failed!", &proc_res) } - fn glob_iter(path: impl AsRef) -> impl Iterator { - let path_str = path.as_ref().to_str().unwrap(); - let iter = glob(path_str).unwrap(); - iter.map(Result::unwrap) + fn glob_iter(path: impl AsRef) -> impl Iterator { + let iter = glob(path.as_ref().as_str()).unwrap(); + iter.map(Result::unwrap).map(Utf8PathBuf::try_from).map(Result::unwrap) } // Find all profraw files in the profraw directory. diff --git a/src/tools/compiletest/src/runtest/debugger.rs b/src/tools/compiletest/src/runtest/debugger.rs index d9e5c3fa0d8fa..a4103c5b4a9a4 100644 --- a/src/tools/compiletest/src/runtest/debugger.rs +++ b/src/tools/compiletest/src/runtest/debugger.rs @@ -1,7 +1,8 @@ use std::fmt::Write; use std::fs::File; use std::io::{BufRead, BufReader}; -use std::path::{Path, PathBuf}; + +use camino::{Utf8Path, Utf8PathBuf}; use crate::common::Config; use crate::runtest::ProcRes; @@ -15,11 +16,15 @@ pub(super) struct DebuggerCommands { /// Contains the source line number to check and the line itself check_lines: Vec<(usize, String)>, /// Source file name - file: PathBuf, + file: Utf8PathBuf, } impl DebuggerCommands { - pub fn parse_from(file: &Path, config: &Config, debugger_prefix: &str) -> Result { + pub fn parse_from( + file: &Utf8Path, + config: &Config, + debugger_prefix: &str, + ) -> Result { let command_directive = format!("{debugger_prefix}-command"); let check_directive = format!("{debugger_prefix}-check"); @@ -27,7 +32,7 @@ impl DebuggerCommands { let mut commands = vec![]; let mut check_lines = vec![]; let mut counter = 0; - let reader = BufReader::new(File::open(file).unwrap()); + let reader = BufReader::new(File::open(file.as_std_path()).unwrap()); for (line_no, line) in reader.lines().enumerate() { counter += 1; let line = line.map_err(|e| format!("Error while parsing debugger commands: {}", e))?; @@ -50,7 +55,7 @@ impl DebuggerCommands { } } - Ok(Self { commands, breakpoint_lines, check_lines, file: file.to_owned() }) + Ok(Self { commands, breakpoint_lines, check_lines, file: file.to_path_buf() }) } /// Given debugger output and lines to check, ensure that every line is @@ -81,10 +86,10 @@ impl DebuggerCommands { if missing.is_empty() { Ok(()) } else { - let fname = self.file.file_name().unwrap().to_string_lossy(); + let fname = self.file.file_name().unwrap(); let mut msg = format!( "check directive(s) from `{}` not found in debugger output. errors:", - self.file.display() + self.file ); for (src_lineno, err_line) in missing { diff --git a/src/tools/compiletest/src/runtest/debuginfo.rs b/src/tools/compiletest/src/runtest/debuginfo.rs index 170b8a8099687..31240dff9a196 100644 --- a/src/tools/compiletest/src/runtest/debuginfo.rs +++ b/src/tools/compiletest/src/runtest/debuginfo.rs @@ -1,9 +1,9 @@ use std::ffi::{OsStr, OsString}; use std::fs::File; use std::io::{BufRead, BufReader, Read}; -use std::path::Path; use std::process::{Command, Output, Stdio}; +use camino::Utf8Path; use tracing::debug; use super::debugger::DebuggerCommands; @@ -73,11 +73,11 @@ impl TestCx<'_> { let mut js_extension = self.testpaths.file.clone(); js_extension.set_extension("cdb.js"); if js_extension.exists() { - script_str.push_str(&format!(".scriptload \"{}\"\n", js_extension.to_string_lossy())); + script_str.push_str(&format!(".scriptload \"{}\"\n", js_extension)); } // Set breakpoints on every line that contains the string "#break" - let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy(); + let source_file_name = self.testpaths.file.file_name().unwrap(); for line in &dbg_cmds.breakpoint_lines { script_str.push_str(&format!("bp `{}:{}`\n", source_file_name, line)); } @@ -104,7 +104,7 @@ impl TestCx<'_> { let debugger_run_result = self.compose_and_run( cdb, - self.config.run_lib_path.to_str().unwrap(), + self.config.run_lib_path.as_path(), None, // aux_path None, // input ); @@ -151,16 +151,11 @@ impl TestCx<'_> { if is_android_gdb_target(&self.config.target) { cmds = cmds.replace("run", "continue"); - let tool_path = match self.config.android_cross_path.to_str() { - Some(x) => x.to_owned(), - None => self.fatal("cannot find android cross path"), - }; - // write debugger script let mut script_str = String::with_capacity(2048); script_str.push_str(&format!("set charset {}\n", Self::charset())); - script_str.push_str(&format!("set sysroot {}\n", tool_path)); - script_str.push_str(&format!("file {}\n", exe_file.to_str().unwrap())); + script_str.push_str(&format!("set sysroot {}\n", &self.config.android_cross_path)); + script_str.push_str(&format!("file {}\n", exe_file)); script_str.push_str("target remote :5039\n"); script_str.push_str(&format!( "set solib-search-path \ @@ -169,12 +164,8 @@ impl TestCx<'_> { )); for line in &dbg_cmds.breakpoint_lines { script_str.push_str( - format!( - "break {:?}:{}\n", - self.testpaths.file.file_name().unwrap().to_string_lossy(), - *line - ) - .as_str(), + format!("break {}:{}\n", self.testpaths.file.file_name().unwrap(), *line) + .as_str(), ); } script_str.push_str(&cmds); @@ -203,7 +194,7 @@ impl TestCx<'_> { self.config.adb_test_dir.clone(), if self.config.target.contains("aarch64") { "64" } else { "" }, self.config.adb_test_dir.clone(), - exe_file.file_name().unwrap().to_str().unwrap() + exe_file.file_name().unwrap() ); debug!("adb arg: {}", adb_arg); @@ -241,7 +232,8 @@ impl TestCx<'_> { let cmdline = { let mut gdb = Command::new(&format!("{}-gdb", self.config.target)); gdb.args(debugger_opts); - let cmdline = self.make_cmdline(&gdb, ""); + // FIXME(jieyouxu): don't pass an empty Path + let cmdline = self.make_cmdline(&gdb, Utf8Path::new("")); logv(self.config, format!("executing {}", cmdline)); cmdline }; @@ -258,7 +250,6 @@ impl TestCx<'_> { } } else { let rust_pp_module_abs_path = self.config.src_root.join("src").join("etc"); - let rust_pp_module_abs_path = rust_pp_module_abs_path.to_str().unwrap(); // write debugger script let mut script_str = String::with_capacity(2048); script_str.push_str(&format!("set charset {}\n", Self::charset())); @@ -273,17 +264,15 @@ impl TestCx<'_> { // GDB's script auto loading safe path script_str.push_str(&format!( "add-auto-load-safe-path {}\n", - rust_pp_module_abs_path.replace(r"\", r"\\") + rust_pp_module_abs_path.as_str().replace(r"\", r"\\") )); - let output_base_dir = self.output_base_dir().to_str().unwrap().to_owned(); - // Add the directory containing the output binary to // include embedded pretty printers to GDB's script // auto loading safe path script_str.push_str(&format!( "add-auto-load-safe-path {}\n", - output_base_dir.replace(r"\", r"\\") + self.output_base_dir().as_str().replace(r"\", r"\\") )); } } @@ -300,12 +289,13 @@ impl TestCx<'_> { script_str.push_str("set print pretty off\n"); // Add the pretty printer directory to GDB's source-file search path - script_str - .push_str(&format!("directory {}\n", rust_pp_module_abs_path.replace(r"\", r"\\"))); + script_str.push_str(&format!( + "directory {}\n", + rust_pp_module_abs_path.as_str().replace(r"\", r"\\") + )); // Load the target executable - script_str - .push_str(&format!("file {}\n", exe_file.to_str().unwrap().replace(r"\", r"\\"))); + script_str.push_str(&format!("file {}\n", exe_file.as_str().replace(r"\", r"\\"))); // Force GDB to print values in the Rust format. script_str.push_str("set language rust\n"); @@ -314,7 +304,7 @@ impl TestCx<'_> { for line in &dbg_cmds.breakpoint_lines { script_str.push_str(&format!( "break '{}':{}\n", - self.testpaths.file.file_name().unwrap().to_string_lossy(), + self.testpaths.file.file_name().unwrap(), *line )); } @@ -340,7 +330,7 @@ impl TestCx<'_> { gdb.args(debugger_opts).env("PYTHONPATH", pythonpath); debugger_run_result = - self.compose_and_run(gdb, self.config.run_lib_path.to_str().unwrap(), None, None); + self.compose_and_run(gdb, self.config.run_lib_path.as_path(), None, None); } if !debugger_run_result.status.success() { @@ -409,14 +399,14 @@ impl TestCx<'_> { script_str.push_str(&format!( "command script import {}/lldb_lookup.py\n", - rust_pp_module_abs_path.to_str().unwrap() + rust_pp_module_abs_path )); File::open(rust_pp_module_abs_path.join("lldb_commands")) .and_then(|mut file| file.read_to_string(&mut script_str)) .expect("Failed to read lldb_commands"); // Set breakpoints on every line that contains the string "#break" - let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy(); + let source_file_name = self.testpaths.file.file_name().unwrap(); for line in &dbg_cmds.breakpoint_lines { script_str.push_str(&format!( "breakpoint set --file '{}' --line {}\n", @@ -450,7 +440,7 @@ impl TestCx<'_> { } } - fn run_lldb(&self, test_executable: &Path, debugger_script: &Path) -> ProcRes { + fn run_lldb(&self, test_executable: &Utf8Path, debugger_script: &Utf8Path) -> ProcRes { // Prepare the lldb_batchmode which executes the debugger script let lldb_script_path = self.config.src_root.join("src/etc/lldb_batchmode.py"); let pythonpath = if let Ok(pp) = std::env::var("PYTHONPATH") { diff --git a/src/tools/compiletest/src/runtest/incremental.rs b/src/tools/compiletest/src/runtest/incremental.rs index ea985866a0522..90cff6bab4dc6 100644 --- a/src/tools/compiletest/src/runtest/incremental.rs +++ b/src/tools/compiletest/src/runtest/incremental.rs @@ -100,16 +100,8 @@ impl TestCx<'_> { self.check_no_compiler_crash(&proc_res, self.props.should_ice); let output_to_check = self.get_output(&proc_res); - let expected_errors = errors::load_errors(&self.testpaths.file, self.revision); - if !expected_errors.is_empty() { - if !self.props.error_patterns.is_empty() || !self.props.regex_error_patterns.is_empty() - { - self.fatal("both error pattern and expected errors specified"); - } - self.check_expected_errors(expected_errors, &proc_res); - } else { - self.check_all_error_patterns(&output_to_check, &proc_res, pm); - } + self.check_expected_errors(&proc_res); + self.check_all_error_patterns(&output_to_check, &proc_res); if self.props.should_ice { match proc_res.status.code() { Some(101) => (), @@ -137,6 +129,6 @@ impl TestCx<'_> { let output_to_check = self.get_output(&proc_res); self.check_correct_failure_status(&proc_res); - self.check_all_error_patterns(&output_to_check, &proc_res, pm); + self.check_all_error_patterns(&output_to_check, &proc_res); } } diff --git a/src/tools/compiletest/src/runtest/js_doc.rs b/src/tools/compiletest/src/runtest/js_doc.rs index d630affbec104..fd53f01ca1746 100644 --- a/src/tools/compiletest/src/runtest/js_doc.rs +++ b/src/tools/compiletest/src/runtest/js_doc.rs @@ -9,8 +9,7 @@ impl TestCx<'_> { self.document(&out_dir, &self.testpaths); - let file_stem = - self.testpaths.file.file_stem().and_then(|f| f.to_str()).expect("no file stem"); + let file_stem = self.testpaths.file.file_stem().expect("no file stem"); let res = self.run_command_to_procres( Command::new(&nodejs) .arg(self.config.src_root.join("src/tools/rustdoc-js/tester.js")) diff --git a/src/tools/compiletest/src/runtest/mir_opt.rs b/src/tools/compiletest/src/runtest/mir_opt.rs index d1ec00357449d..efdb131bf14a8 100644 --- a/src/tools/compiletest/src/runtest/mir_opt.rs +++ b/src/tools/compiletest/src/runtest/mir_opt.rs @@ -1,6 +1,6 @@ use std::fs; -use std::path::{Path, PathBuf}; +use camino::{Utf8Path, Utf8PathBuf}; use glob::glob; use miropt_test_tools::{MiroptTest, MiroptTestFile, files_for_miropt_test}; use tracing::debug; @@ -14,7 +14,7 @@ impl TestCx<'_> { let should_run = self.should_run(pm); let mut test_info = files_for_miropt_test( - &self.testpaths.file, + &self.testpaths.file.as_std_path(), self.config.get_pointer_width(), self.config.target_cfg().panic.for_miropt_test_tools(), ); @@ -38,20 +38,15 @@ impl TestCx<'_> { fn check_mir_dump(&self, test_info: MiroptTest) { let test_dir = self.testpaths.file.parent().unwrap(); - let test_crate = - self.testpaths.file.file_stem().unwrap().to_str().unwrap().replace('-', "_"); + let test_crate = self.testpaths.file.file_stem().unwrap().replace('-', "_"); let MiroptTest { run_filecheck, suffix, files, passes: _ } = test_info; if self.config.bless { - for e in - glob(&format!("{}/{}.*{}.mir", test_dir.display(), test_crate, suffix)).unwrap() - { + for e in glob(&format!("{}/{}.*{}.mir", test_dir, test_crate, suffix)).unwrap() { fs::remove_file(e.unwrap()).unwrap(); } - for e in - glob(&format!("{}/{}.*{}.diff", test_dir.display(), test_crate, suffix)).unwrap() - { + for e in glob(&format!("{}/{}.*{}.diff", test_dir, test_crate, suffix)).unwrap() { fs::remove_file(e.unwrap()).unwrap(); } } @@ -60,19 +55,15 @@ impl TestCx<'_> { let dumped_string = if let Some(after) = to_file { self.diff_mir_files(from_file.into(), after.into()) } else { - let mut output_file = PathBuf::new(); - output_file.push(self.get_mir_dump_dir()); + let mut output_file = Utf8PathBuf::new(); + output_file.push(self.output_base_dir()); output_file.push(&from_file); - debug!( - "comparing the contents of: {} with {}", - output_file.display(), - expected_file.display() - ); + debug!("comparing the contents of: {} with {:?}", output_file, expected_file); if !output_file.exists() { panic!( "Output file `{}` from test does not exist, available files are in `{}`", - output_file.display(), - output_file.parent().unwrap().display() + output_file, + output_file.parent().unwrap() ); } self.check_mir_test_timestamp(&from_file, &output_file); @@ -107,21 +98,20 @@ impl TestCx<'_> { } } - fn diff_mir_files(&self, before: PathBuf, after: PathBuf) -> String { - let to_full_path = |path: PathBuf| { - let full = self.get_mir_dump_dir().join(&path); + fn diff_mir_files(&self, before: Utf8PathBuf, after: Utf8PathBuf) -> String { + let to_full_path = |path: Utf8PathBuf| { + let full = self.output_base_dir().join(&path); if !full.exists() { panic!( "the mir dump file for {} does not exist (requested in {})", - path.display(), - self.testpaths.file.display(), + path, self.testpaths.file, ); } full }; let before = to_full_path(before); let after = to_full_path(after); - debug!("comparing the contents of: {} with {}", before.display(), after.display()); + debug!("comparing the contents of: {} with {}", before, after); let before = fs::read_to_string(before).unwrap(); let after = fs::read_to_string(after).unwrap(); let before = self.normalize_output(&before, &[]); @@ -138,8 +128,8 @@ impl TestCx<'_> { dumped_string } - fn check_mir_test_timestamp(&self, test_name: &str, output_file: &Path) { - let t = |file| fs::metadata(file).unwrap().modified().unwrap(); + fn check_mir_test_timestamp(&self, test_name: &str, output_file: &Utf8Path) { + let t = |file: &Utf8Path| fs::metadata(file.as_std_path()).unwrap().modified().unwrap(); let source_file = &self.testpaths.file; let output_time = t(output_file); let source_time = t(source_file); @@ -147,8 +137,7 @@ impl TestCx<'_> { debug!("source file time: {:?} output file time: {:?}", source_time, output_time); panic!( "test source file `{}` is newer than potentially stale output file `{}`.", - source_file.display(), - test_name + source_file, test_name ); } } diff --git a/src/tools/compiletest/src/runtest/run_make.rs b/src/tools/compiletest/src/runtest/run_make.rs index 073116933bdb6..a5ce929f9b8e4 100644 --- a/src/tools/compiletest/src/runtest/run_make.rs +++ b/src/tools/compiletest/src/runtest/run_make.rs @@ -1,8 +1,8 @@ -use std::path::Path; use std::process::{Command, Output, Stdio}; use std::{env, fs}; use build_helper::fs::{ignore_not_found, recursive_remove}; +use camino::{Utf8Path, Utf8PathBuf}; use super::{ProcRes, TestCx, disable_error_reporting}; use crate::util::{copy_dir_all, dylib_env_var}; @@ -39,14 +39,16 @@ impl TestCx<'_> { // Copy all input files (apart from rmake.rs) to the temporary directory, // so that the input directory structure from `tests/run-make/` is mirrored // to the `rmake_out` directory. - for path in walkdir::WalkDir::new(&self.testpaths.file).min_depth(1) { - let path = path.unwrap().path().to_path_buf(); + for entry in walkdir::WalkDir::new(&self.testpaths.file).min_depth(1) { + let entry = entry.unwrap(); + let path = entry.path(); + let path = <&Utf8Path>::try_from(path).unwrap(); if path.file_name().is_some_and(|s| s != "rmake.rs") { let target = rmake_out_dir.join(path.strip_prefix(&self.testpaths.file).unwrap()); if path.is_dir() { - copy_dir_all(&path, target).unwrap(); + copy_dir_all(&path, &target).unwrap(); } else { - fs::copy(&path, target).unwrap(); + fs::copy(path.as_std_path(), target).unwrap(); } } } @@ -83,8 +85,10 @@ impl TestCx<'_> { // on some linux distros. // 2. Specific library paths in `self.config.compile_lib_path` needed for running rustc. - let base_dylib_search_paths = - Vec::from_iter(env::split_paths(&env::var(dylib_env_var()).unwrap())); + let base_dylib_search_paths = Vec::from_iter( + env::split_paths(&env::var(dylib_env_var()).unwrap()) + .map(|p| Utf8PathBuf::try_from(p).expect("dylib env var contains non-UTF8 paths")), + ); // Calculate the paths of the recipe binary. As previously discussed, this is placed at // `/` with `bin_name` being `rmake` or `rmake.exe` depending on @@ -113,13 +117,13 @@ impl TestCx<'_> { .arg("-o") .arg(&recipe_bin) // Specify library search paths for `run_make_support`. - .arg(format!("-Ldependency={}", &support_lib_path.parent().unwrap().to_string_lossy())) - .arg(format!("-Ldependency={}", &support_lib_deps.to_string_lossy())) - .arg(format!("-Ldependency={}", &support_lib_deps_deps.to_string_lossy())) + .arg(format!("-Ldependency={}", &support_lib_path.parent().unwrap())) + .arg(format!("-Ldependency={}", &support_lib_deps)) + .arg(format!("-Ldependency={}", &support_lib_deps_deps)) // Provide `run_make_support` as extern prelude, so test writers don't need to write // `extern run_make_support;`. .arg("--extern") - .arg(format!("run_make_support={}", &support_lib_path.to_string_lossy())) + .arg(format!("run_make_support={}", &support_lib_path)) .arg("--edition=2021") .arg(&self.testpaths.file.join("rmake.rs")) .arg("-Cprefer-dynamic"); @@ -240,7 +244,7 @@ impl TestCx<'_> { if self.config.target.contains("msvc") && !self.config.cc.is_empty() { // We need to pass a path to `lib.exe`, so assume that `cc` is `cl.exe` // and that `lib.exe` lives next to it. - let lib = Path::new(&self.config.cc).parent().unwrap().join("lib.exe"); + let lib = Utf8Path::new(&self.config.cc).parent().unwrap().join("lib.exe"); // MSYS doesn't like passing flags of the form `/foo` as it thinks it's // a path and instead passes `C:\msys64\foo`, so convert all @@ -262,8 +266,8 @@ impl TestCx<'_> { cmd.env("IS_MSVC", "1") .env("IS_WINDOWS", "1") - .env("MSVC_LIB", format!("'{}' -nologo", lib.display())) - .env("MSVC_LIB_PATH", format!("{}", lib.display())) + .env("MSVC_LIB", format!("'{}' -nologo", lib)) + .env("MSVC_LIB_PATH", &lib) // Note: we diverge from legacy run_make and don't lump `CC` the compiler and // default flags together. .env("CC_DEFAULT_FLAGS", &cflags) diff --git a/src/tools/compiletest/src/runtest/rustdoc.rs b/src/tools/compiletest/src/runtest/rustdoc.rs index 2583ae96a6788..637ea833357a2 100644 --- a/src/tools/compiletest/src/runtest/rustdoc.rs +++ b/src/tools/compiletest/src/runtest/rustdoc.rs @@ -7,7 +7,9 @@ impl TestCx<'_> { assert!(self.revision.is_none(), "revisions not relevant here"); let out_dir = self.output_base_dir(); - remove_and_create_dir_all(&out_dir); + remove_and_create_dir_all(&out_dir).unwrap_or_else(|e| { + panic!("failed to remove and recreate output directory `{out_dir}`: {e}") + }); let proc_res = self.document(&out_dir, &self.testpaths); if !proc_res.status.success() { diff --git a/src/tools/compiletest/src/runtest/rustdoc_json.rs b/src/tools/compiletest/src/runtest/rustdoc_json.rs index bf7eb2e109a46..9f88faca89268 100644 --- a/src/tools/compiletest/src/runtest/rustdoc_json.rs +++ b/src/tools/compiletest/src/runtest/rustdoc_json.rs @@ -9,7 +9,9 @@ impl TestCx<'_> { assert!(self.revision.is_none(), "revisions not relevant here"); let out_dir = self.output_base_dir(); - remove_and_create_dir_all(&out_dir); + remove_and_create_dir_all(&out_dir).unwrap_or_else(|e| { + panic!("failed to remove and recreate output directory `{out_dir}`: {e}") + }); let proc_res = self.document(&out_dir, &self.testpaths); if !proc_res.status.success() { diff --git a/src/tools/compiletest/src/runtest/ui.rs b/src/tools/compiletest/src/runtest/ui.rs index 9b5b8b56b600e..cc50a918f757a 100644 --- a/src/tools/compiletest/src/runtest/ui.rs +++ b/src/tools/compiletest/src/runtest/ui.rs @@ -6,10 +6,10 @@ use rustfix::{Filter, apply_suggestions, get_suggestions_from_json}; use tracing::debug; use super::{ - AllowUnused, Emit, ErrorKind, FailMode, LinkToAux, PassMode, TargetLocation, TestCx, - TestOutput, Truncated, UI_FIXED, WillExecute, + AllowUnused, Emit, FailMode, LinkToAux, PassMode, TargetLocation, TestCx, TestOutput, + Truncated, UI_FIXED, WillExecute, }; -use crate::{errors, json}; +use crate::json; impl TestCx<'_> { pub(super) fn run_ui_test(&self) { @@ -68,7 +68,7 @@ impl TestCx<'_> { { let mut coverage_file_path = self.config.build_test_suite_root.clone(); coverage_file_path.push("rustfix_missing_coverage.txt"); - debug!("coverage_file_path: {}", coverage_file_path.display()); + debug!("coverage_file_path: {}", coverage_file_path); let mut file = OpenOptions::new() .create(true) @@ -76,8 +76,8 @@ impl TestCx<'_> { .open(coverage_file_path.as_path()) .expect("could not create or open file"); - if let Err(e) = writeln!(file, "{}", self.testpaths.file.display()) { - panic!("couldn't write to {}: {e:?}", coverage_file_path.display()); + if let Err(e) = writeln!(file, "{}", self.testpaths.file) { + panic!("couldn't write to {}: {e:?}", coverage_file_path); } } } else if self.props.run_rustfix { @@ -119,7 +119,7 @@ impl TestCx<'_> { self.testpaths.relative_dir.join(self.testpaths.file.file_name().unwrap()); println!( "To only update this specific test, also pass `--test-args {}`", - relative_path_to_file.display(), + relative_path_to_file, ); self.fatal_proc_rec( &format!("{} errors occurred comparing output.", errors), @@ -127,9 +127,7 @@ impl TestCx<'_> { ); } - let expected_errors = errors::load_errors(&self.testpaths.file, self.revision); - - if let WillExecute::Yes = should_run { + let output_to_check = if let WillExecute::Yes = should_run { let proc_res = self.exec_compiled_test(); let run_output_errors = if self.props.check_run_results { self.load_compare_outputs(&proc_res, TestOutput::Run, explicit) @@ -150,48 +148,19 @@ impl TestCx<'_> { self.fatal_proc_rec("test run succeeded!", &proc_res); } - let output_to_check = self.get_output(&proc_res); - if !self.props.error_patterns.is_empty() || !self.props.regex_error_patterns.is_empty() - { - // "// error-pattern" comments - self.check_all_error_patterns(&output_to_check, &proc_res, pm); - } - self.check_forbid_output(&output_to_check, &proc_res) - } + self.get_output(&proc_res) + } else { + self.get_output(&proc_res) + }; debug!( - "run_ui_test: explicit={:?} config.compare_mode={:?} expected_errors={:?} \ + "run_ui_test: explicit={:?} config.compare_mode={:?} \ proc_res.status={:?} props.error_patterns={:?}", - explicit, - self.config.compare_mode, - expected_errors, - proc_res.status, - self.props.error_patterns + explicit, self.config.compare_mode, proc_res.status, self.props.error_patterns ); - let check_patterns = should_run == WillExecute::No - && (!self.props.error_patterns.is_empty() - || !self.props.regex_error_patterns.is_empty()); - if !explicit && self.config.compare_mode.is_none() { - let check_annotations = !check_patterns || !expected_errors.is_empty(); - - if check_annotations { - // "//~ERROR comments" - self.check_expected_errors(expected_errors, &proc_res); - } - } else if explicit && !expected_errors.is_empty() { - let msg = format!( - "line {}: cannot combine `--error-format` with {} annotations; use `error-pattern` instead", - expected_errors[0].line_num_str(), - expected_errors[0].kind.unwrap_or(ErrorKind::Error), - ); - self.fatal(&msg); - } - let output_to_check = self.get_output(&proc_res); - if check_patterns { - // "// error-pattern" comments - self.check_all_error_patterns(&output_to_check, &proc_res, pm); - } + self.check_expected_errors(&proc_res); + self.check_all_error_patterns(&output_to_check, &proc_res); self.check_forbid_output(&output_to_check, &proc_res); if self.props.run_rustfix && self.config.compare_mode.is_none() { @@ -215,8 +184,6 @@ impl TestCx<'_> { let crate_name = self.testpaths.file.file_stem().expect("test must have a file stem"); // crate name must be alphanumeric or `_`. - let crate_name = - crate_name.to_str().expect("crate name implies file name must be valid UTF-8"); // replace `a.foo` -> `a__foo` for crate name purposes. // replace `revision-name-with-dashes` -> `revision_name_with_underscore` let crate_name = crate_name.replace('.', "__"); diff --git a/src/tools/compiletest/src/tests.rs b/src/tools/compiletest/src/tests.rs index 43c6dc0a67e89..e3e4a81755d09 100644 --- a/src/tools/compiletest/src/tests.rs +++ b/src/tools/compiletest/src/tests.rs @@ -1,5 +1,3 @@ -use std::ffi::OsString; - use crate::debuggers::{extract_gdb_version, extract_lldb_version}; use crate::is_test; @@ -60,11 +58,11 @@ fn test_extract_lldb_version() { #[test] fn is_test_test() { - assert!(is_test(&OsString::from("a_test.rs"))); - assert!(!is_test(&OsString::from(".a_test.rs"))); - assert!(!is_test(&OsString::from("a_cat.gif"))); - assert!(!is_test(&OsString::from("#a_dog_gif"))); - assert!(!is_test(&OsString::from("~a_temp_file"))); + assert!(is_test("a_test.rs")); + assert!(!is_test(".a_test.rs")); + assert!(!is_test("a_cat.gif")); + assert!(!is_test("#a_dog_gif")); + assert!(!is_test("~a_temp_file")); } #[test] diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs index bff02f1db9f02..81f5679aead77 100644 --- a/src/tools/compiletest/src/util.rs +++ b/src/tools/compiletest/src/util.rs @@ -1,8 +1,7 @@ use std::env; -use std::ffi::OsStr; -use std::path::{Path, PathBuf}; use std::process::Command; +use camino::{Utf8Path, Utf8PathBuf}; use tracing::*; use crate::common::Config; @@ -34,21 +33,21 @@ pub fn logv(config: &Config, s: String) { } } -pub trait PathBufExt { +pub trait Utf8PathBufExt { /// Append an extension to the path, even if it already has one. - fn with_extra_extension>(&self, extension: S) -> PathBuf; + fn with_extra_extension(&self, extension: &str) -> Utf8PathBuf; } -impl PathBufExt for PathBuf { - fn with_extra_extension>(&self, extension: S) -> PathBuf { - if extension.as_ref().is_empty() { +impl Utf8PathBufExt for Utf8PathBuf { + fn with_extra_extension(&self, extension: &str) -> Utf8PathBuf { + if extension.is_empty() { self.clone() } else { - let mut fname = self.file_name().unwrap().to_os_string(); - if !extension.as_ref().to_str().unwrap().starts_with('.') { - fname.push("."); + let mut fname = self.file_name().unwrap().to_string(); + if !extension.starts_with('.') { + fname.push_str("."); } - fname.push(extension); + fname.push_str(extension); self.with_file_name(fname) } } @@ -71,22 +70,27 @@ pub fn dylib_env_var() -> &'static str { /// Adds a list of lookup paths to `cmd`'s dynamic library lookup path. /// If the dylib_path_var is already set for this cmd, the old value will be overwritten! -pub fn add_dylib_path(cmd: &mut Command, paths: impl Iterator>) { +pub fn add_dylib_path( + cmd: &mut Command, + paths: impl Iterator>, +) { let path_env = env::var_os(dylib_env_var()); let old_paths = path_env.as_ref().map(env::split_paths); let new_paths = paths.map(Into::into).chain(old_paths.into_iter().flatten()); cmd.env(dylib_env_var(), env::join_paths(new_paths).unwrap()); } -pub fn copy_dir_all(src: impl AsRef, dst: impl AsRef) -> std::io::Result<()> { - std::fs::create_dir_all(&dst)?; - for entry in std::fs::read_dir(src)? { +pub fn copy_dir_all(src: &Utf8Path, dst: &Utf8Path) -> std::io::Result<()> { + std::fs::create_dir_all(dst.as_std_path())?; + for entry in std::fs::read_dir(src.as_std_path())? { let entry = entry?; + let path = Utf8PathBuf::try_from(entry.path()).unwrap(); + let file_name = path.file_name().unwrap(); let ty = entry.file_type()?; if ty.is_dir() { - copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?; + copy_dir_all(&path, &dst.join(file_name))?; } else { - std::fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?; + std::fs::copy(path.as_std_path(), dst.join(file_name).as_std_path())?; } } Ok(()) diff --git a/src/tools/compiletest/src/util/tests.rs b/src/tools/compiletest/src/util/tests.rs index b09a183b14e6a..5bcae0dcee146 100644 --- a/src/tools/compiletest/src/util/tests.rs +++ b/src/tools/compiletest/src/util/tests.rs @@ -3,12 +3,12 @@ use super::*; #[test] fn path_buf_with_extra_extension_test() { assert_eq!( - PathBuf::from("foo.rs.stderr"), - PathBuf::from("foo.rs").with_extra_extension("stderr") + Utf8PathBuf::from("foo.rs.stderr"), + Utf8PathBuf::from("foo.rs").with_extra_extension("stderr") ); assert_eq!( - PathBuf::from("foo.rs.stderr"), - PathBuf::from("foo.rs").with_extra_extension(".stderr") + Utf8PathBuf::from("foo.rs.stderr"), + Utf8PathBuf::from("foo.rs").with_extra_extension(".stderr") ); - assert_eq!(PathBuf::from("foo.rs"), PathBuf::from("foo.rs").with_extra_extension("")); + assert_eq!(Utf8PathBuf::from("foo.rs"), Utf8PathBuf::from("foo.rs").with_extra_extension("")); } diff --git a/src/tools/coverage-dump/Cargo.toml b/src/tools/coverage-dump/Cargo.toml index 7f14286b5d0c4..6f92ac50d963c 100644 --- a/src/tools/coverage-dump/Cargo.toml +++ b/src/tools/coverage-dump/Cargo.toml @@ -7,6 +7,7 @@ edition = "2021" [dependencies] anyhow = "1.0.71" +itertools = "0.12" leb128 = "0.2.5" md5 = { package = "md-5" , version = "0.10.5" } miniz_oxide = "0.7.1" diff --git a/src/tools/coverage-dump/src/covfun.rs b/src/tools/coverage-dump/src/covfun.rs index 82ebd33d0d1ca..1cc9f4dc5d6a3 100644 --- a/src/tools/coverage-dump/src/covfun.rs +++ b/src/tools/coverage-dump/src/covfun.rs @@ -1,23 +1,33 @@ use std::collections::HashMap; use std::fmt::{self, Debug, Write as _}; -use std::sync::OnceLock; +use std::sync::LazyLock; -use anyhow::{Context, anyhow}; +use anyhow::{Context, anyhow, bail, ensure}; +use itertools::Itertools; use regex::Regex; -use crate::parser::{Parser, unescape_llvm_string_contents}; +use crate::covmap::FilenameTables; +use crate::llvm_utils::unescape_llvm_string_contents; +use crate::parser::Parser; + +#[cfg(test)] +mod tests; pub(crate) fn dump_covfun_mappings( llvm_ir: &str, + filename_tables: &FilenameTables, function_names: &HashMap, ) -> anyhow::Result<()> { // Extract function coverage entries from the LLVM IR assembly, and associate // each entry with its (demangled) name. let mut covfun_entries = llvm_ir .lines() - .filter_map(covfun_line_data) - .map(|line_data| (function_names.get(&line_data.name_hash).map(String::as_str), line_data)) - .collect::>(); + .filter(|line| is_covfun_line(line)) + .map(parse_covfun_line) + .map_ok(|line_data| { + (function_names.get(&line_data.name_hash).map(String::as_str), line_data) + }) + .collect::, _>>()?; covfun_entries.sort_by(|a, b| { // Sort entries primarily by name, to help make the order consistent // across platforms and relatively insensitive to changes. @@ -41,8 +51,12 @@ pub(crate) fn dump_covfun_mappings( println!("Number of files: {num_files}"); for i in 0..num_files { - let global_file_id = parser.read_uleb128_u32()?; - println!("- file {i} => global file {global_file_id}"); + let global_file_id = parser.read_uleb128_usize()?; + let &CovfunLineData { filenames_hash, .. } = line_data; + let Some(filename) = filename_tables.lookup(filenames_hash, global_file_id) else { + bail!("couldn't resolve global file: {filenames_hash}, {global_file_id}"); + }; + println!("- file {i} => {filename}"); } let num_expressions = parser.read_uleb128_u32()?; @@ -107,36 +121,50 @@ pub(crate) fn dump_covfun_mappings( Ok(()) } +#[derive(Debug, PartialEq, Eq)] struct CovfunLineData { - name_hash: u64, is_used: bool, + name_hash: u64, + filenames_hash: u64, payload: Vec, } -/// Checks a line of LLVM IR assembly to see if it contains an `__llvm_covfun` -/// entry, and if so extracts relevant data in a `CovfunLineData`. -fn covfun_line_data(line: &str) -> Option { - let re = { - // We cheat a little bit and match variable names `@__covrec_[HASH]u` - // rather than the section name, because the section name is harder to - // extract and differs across Linux/Windows/macOS. We also extract the - // symbol name hash from the variable name rather than the data, since - // it's easier and both should match. - static RE: OnceLock = OnceLock::new(); - RE.get_or_init(|| { - Regex::new( - r#"^@__covrec_(?[0-9A-Z]+)(?u)? = .*\[[0-9]+ x i8\] c"(?[^"]*)".*$"#, - ) - .unwrap() - }) - }; +fn is_covfun_line(line: &str) -> bool { + line.starts_with("@__covrec_") +} - let captures = re.captures(line)?; - let name_hash = u64::from_str_radix(&captures["name_hash"], 16).unwrap(); +/// Given a line of LLVM IR assembly that should contain an `__llvm_covfun` +/// entry, parses it to extract relevant data in a `CovfunLineData`. +fn parse_covfun_line(line: &str) -> anyhow::Result { + ensure!(is_covfun_line(line)); + + // We cheat a little bit and match variable names `@__covrec_[HASH]u` + // rather than the section name, because the section name is harder to + // extract and differs across Linux/Windows/macOS. + const RE_STRING: &str = r#"(?x)^ + @__covrec_[0-9A-Z]+(?u)? + \ = \ # (trailing space) + .* + <\{ + \ i64 \ (? -? [0-9]+), + \ i32 \ -? [0-9]+, # (length of payload; currently unused) + \ i64 \ -? [0-9]+, # (source hash; currently unused) + \ i64 \ (? -? [0-9]+), + \ \[ [0-9]+ \ x \ i8 \] \ c"(?[^"]*)" + \ # (trailing space) + }> + .*$ + "#; + static RE: LazyLock = LazyLock::new(|| Regex::new(RE_STRING).unwrap()); + + let captures = + RE.captures(line).with_context(|| format!("couldn't parse covfun line: {line:?}"))?; let is_used = captures.name("is_used").is_some(); + let name_hash = i64::from_str_radix(&captures["name_hash"], 10).unwrap() as u64; + let filenames_hash = i64::from_str_radix(&captures["filenames_hash"], 10).unwrap() as u64; let payload = unescape_llvm_string_contents(&captures["payload"]); - Some(CovfunLineData { name_hash, is_used, payload }) + Ok(CovfunLineData { is_used, name_hash, filenames_hash, payload }) } // Extra parser methods only needed when parsing `covfun` payloads. diff --git a/src/tools/coverage-dump/src/covfun/tests.rs b/src/tools/coverage-dump/src/covfun/tests.rs new file mode 100644 index 0000000000000..1ce833784bd45 --- /dev/null +++ b/src/tools/coverage-dump/src/covfun/tests.rs @@ -0,0 +1,53 @@ +use super::{CovfunLineData, parse_covfun_line}; + +/// Integers in LLVM IR are not inherently signed/unsigned, and the text format tends +/// to emit them in signed form, so this helper function converts `i64` to `u64`. +fn as_u64(x: i64) -> u64 { + x as u64 +} + +#[test] +fn parse_covfun_line_data() { + struct Case { + line: &'static str, + expected: CovfunLineData, + } + let cases = &[ + // Copied from `trivial.ll`: + Case { + line: r#"@__covrec_49A9BAAE5F896E81u = linkonce_odr hidden constant <{ i64, i32, i64, i64, [9 x i8] }> <{ i64 5307978893922758273, i32 9, i64 445092354169400020, i64 6343436898695299756, [9 x i8] c"\01\01\00\01\01\03\01\00\0D" }>, section "__LLVM_COV,__llvm_covfun", align 8"#, + expected: CovfunLineData { + is_used: true, + name_hash: as_u64(5307978893922758273), + filenames_hash: as_u64(6343436898695299756), + payload: b"\x01\x01\x00\x01\x01\x03\x01\x00\x0D".to_vec(), + }, + }, + // Copied from `on-off-sandwich.ll`: + Case { + line: r#"@__covrec_D0CE53C5E64F319Au = linkonce_odr hidden constant <{ i64, i32, i64, i64, [14 x i8] }> <{ i64 -3400688559180533350, i32 14, i64 7307957714577672185, i64 892196767019953100, [14 x i8] c"\01\01\00\02\01\10\05\02\10\01\07\05\00\06" }>, section "__LLVM_COV,__llvm_covfun", align 8"#, + expected: CovfunLineData { + is_used: true, + name_hash: as_u64(-3400688559180533350), + filenames_hash: as_u64(892196767019953100), + payload: b"\x01\x01\x00\x02\x01\x10\x05\x02\x10\x01\x07\x05\x00\x06".to_vec(), + }, + }, + // Copied from `no-core.ll`: + Case { + line: r#"@__covrec_F8016FC82D46106u = linkonce_odr hidden constant <{ i64, i32, i64, i64, [9 x i8] }> <{ i64 1116917981370409222, i32 9, i64 -8857254680411629915, i64 -3625186110715410276, [9 x i8] c"\01\01\00\01\01\0C\01\00\0D" }>, section "__LLVM_COV,__llvm_covfun", align 8"#, + expected: CovfunLineData { + is_used: true, + name_hash: as_u64(1116917981370409222), + filenames_hash: as_u64(-3625186110715410276), + payload: b"\x01\x01\x00\x01\x01\x0C\x01\x00\x0D".to_vec(), + }, + }, + ]; + + for &Case { line, ref expected } in cases { + println!("- {line}"); + let line_data = parse_covfun_line(line).map_err(|e| e.to_string()); + assert_eq!(line_data.as_ref(), Ok(expected)); + } +} diff --git a/src/tools/coverage-dump/src/covmap.rs b/src/tools/coverage-dump/src/covmap.rs new file mode 100644 index 0000000000000..2246ca2d57574 --- /dev/null +++ b/src/tools/coverage-dump/src/covmap.rs @@ -0,0 +1,75 @@ +use std::collections::HashMap; +use std::sync::LazyLock; + +use anyhow::{Context, ensure}; +use regex::Regex; + +use crate::llvm_utils::{truncated_md5, unescape_llvm_string_contents}; +use crate::parser::Parser; + +#[derive(Debug, Default)] +pub(crate) struct FilenameTables { + map: HashMap>, +} + +impl FilenameTables { + pub(crate) fn lookup(&self, filenames_hash: u64, global_file_id: usize) -> Option<&str> { + let table = self.map.get(&filenames_hash)?; + let filename = table.get(global_file_id)?; + Some(filename) + } +} + +struct CovmapLineData { + payload: Vec, +} + +pub(crate) fn make_filename_tables(llvm_ir: &str) -> anyhow::Result { + let mut map = HashMap::default(); + + for line in llvm_ir.lines().filter(|line| is_covmap_line(line)) { + let CovmapLineData { payload } = parse_covmap_line(line)?; + + let mut parser = Parser::new(&payload); + let n_filenames = parser.read_uleb128_usize()?; + let uncompressed_bytes = parser.read_chunk_to_uncompressed_bytes()?; + parser.ensure_empty()?; + + let mut filenames_table = vec![]; + + let mut parser = Parser::new(&uncompressed_bytes); + for _ in 0..n_filenames { + let len = parser.read_uleb128_usize()?; + let bytes = parser.read_n_bytes(len)?; + let filename = str::from_utf8(bytes)?; + filenames_table.push(filename.to_owned()); + } + + let filenames_hash = truncated_md5(&payload); + map.insert(filenames_hash, filenames_table); + } + + Ok(FilenameTables { map }) +} + +fn is_covmap_line(line: &str) -> bool { + line.starts_with("@__llvm_coverage_mapping ") +} + +fn parse_covmap_line(line: &str) -> anyhow::Result { + ensure!(is_covmap_line(line)); + + const RE_STRING: &str = r#"(?x)^ + @__llvm_coverage_mapping \ = + .* + \[ [0-9]+ \ x \ i8 \] \ c"(?[^"]*)" + .*$ + "#; + static RE: LazyLock = LazyLock::new(|| Regex::new(RE_STRING).unwrap()); + + let captures = + RE.captures(line).with_context(|| format!("couldn't parse covmap line: {line:?}"))?; + let payload = unescape_llvm_string_contents(&captures["payload"]); + + Ok(CovmapLineData { payload }) +} diff --git a/src/tools/coverage-dump/src/llvm_utils.rs b/src/tools/coverage-dump/src/llvm_utils.rs new file mode 100644 index 0000000000000..92322b256a828 --- /dev/null +++ b/src/tools/coverage-dump/src/llvm_utils.rs @@ -0,0 +1,85 @@ +use std::borrow::Cow; +use std::sync::OnceLock; + +use anyhow::{anyhow, ensure}; +use regex::bytes; + +use crate::parser::Parser; + +#[cfg(test)] +mod tests; + +/// Given the raw contents of a string literal in LLVM IR assembly, decodes any +/// backslash escapes and returns a vector containing the resulting byte string. +pub(crate) fn unescape_llvm_string_contents(contents: &str) -> Vec { + let escape_re = { + static RE: OnceLock = OnceLock::new(); + // LLVM IR supports two string escapes: `\\` and `\xx`. + RE.get_or_init(|| bytes::Regex::new(r"\\\\|\\([0-9A-Za-z]{2})").unwrap()) + }; + + fn u8_from_hex_digits(digits: &[u8]) -> u8 { + // We know that the input contains exactly 2 hex digits, so these calls + // should never fail. + assert_eq!(digits.len(), 2); + let digits = std::str::from_utf8(digits).unwrap(); + u8::from_str_radix(digits, 16).unwrap() + } + + escape_re + .replace_all(contents.as_bytes(), |captures: &bytes::Captures<'_>| { + let byte = match captures.get(1) { + None => b'\\', + Some(hex_digits) => u8_from_hex_digits(hex_digits.as_bytes()), + }; + [byte] + }) + .into_owned() +} + +/// LLVM's profiler/coverage metadata often uses an MD5 hash truncated to +/// 64 bits as a way to associate data stored in different tables/sections. +pub(crate) fn truncated_md5(bytes: &[u8]) -> u64 { + use md5::{Digest, Md5}; + let mut hasher = Md5::new(); + hasher.update(bytes); + let hash: [u8; 8] = hasher.finalize().as_slice()[..8].try_into().unwrap(); + // The truncated hash is explicitly little-endian, regardless of host + // or target platform. (See `MD5Result::low` in LLVM's `MD5.h`.) + u64::from_le_bytes(hash) +} + +impl<'a> Parser<'a> { + /// Reads a sequence of: + /// - Length of uncompressed data in bytes, as ULEB128 + /// - Length of compressed data in bytes (or 0), as ULEB128 + /// - The indicated number of compressed or uncompressed bytes + /// + /// If the number of compressed bytes is 0, the subsequent bytes are + /// uncompressed. Otherwise, the subsequent bytes are compressed, and will + /// be decompressed. + /// + /// Returns the uncompressed bytes that were read directly or decompressed. + pub(crate) fn read_chunk_to_uncompressed_bytes(&mut self) -> anyhow::Result> { + let uncompressed_len = self.read_uleb128_usize()?; + let compressed_len = self.read_uleb128_usize()?; + + if compressed_len == 0 { + // The bytes are uncompressed, so read them directly. + let uncompressed_bytes = self.read_n_bytes(uncompressed_len)?; + Ok(Cow::Borrowed(uncompressed_bytes)) + } else { + // The bytes are compressed, so read and decompress them. + let compressed_bytes = self.read_n_bytes(compressed_len)?; + + let uncompressed_bytes = miniz_oxide::inflate::decompress_to_vec_zlib_with_limit( + compressed_bytes, + uncompressed_len, + ) + .map_err(|e| anyhow!("{e:?}"))?; + ensure!(uncompressed_bytes.len() == uncompressed_len); + + Ok(Cow::Owned(uncompressed_bytes)) + } + } +} diff --git a/src/tools/coverage-dump/src/llvm_utils/tests.rs b/src/tools/coverage-dump/src/llvm_utils/tests.rs new file mode 100644 index 0000000000000..506b0a6200bb1 --- /dev/null +++ b/src/tools/coverage-dump/src/llvm_utils/tests.rs @@ -0,0 +1,34 @@ +use super::unescape_llvm_string_contents; + +// Tests for `unescape_llvm_string_contents`: + +#[test] +fn unescape_empty() { + assert_eq!(unescape_llvm_string_contents(""), &[]); +} + +#[test] +fn unescape_noop() { + let input = "The quick brown fox jumps over the lazy dog."; + assert_eq!(unescape_llvm_string_contents(input), input.as_bytes()); +} + +#[test] +fn unescape_backslash() { + let input = r"\\Hello\\world\\"; + assert_eq!(unescape_llvm_string_contents(input), r"\Hello\world\".as_bytes()); +} + +#[test] +fn unescape_hex() { + let input = r"\01\02\03\04\0a\0b\0C\0D\fd\fE\FF"; + let expected: &[u8] = &[0x01, 0x02, 0x03, 0x04, 0x0a, 0x0b, 0x0c, 0x0d, 0xfd, 0xfe, 0xff]; + assert_eq!(unescape_llvm_string_contents(input), expected); +} + +#[test] +fn unescape_mixed() { + let input = r"\\01.\5c\5c"; + let expected: &[u8] = br"\01.\\"; + assert_eq!(unescape_llvm_string_contents(input), expected); +} diff --git a/src/tools/coverage-dump/src/main.rs b/src/tools/coverage-dump/src/main.rs index b21e3e292f2b4..2c76d2f246022 100644 --- a/src/tools/coverage-dump/src/main.rs +++ b/src/tools/coverage-dump/src/main.rs @@ -1,4 +1,6 @@ mod covfun; +mod covmap; +mod llvm_utils; mod parser; mod prf_names; @@ -17,8 +19,9 @@ fn main() -> anyhow::Result<()> { let llvm_ir_path = args.get(1).context("LLVM IR file not specified")?; let llvm_ir = std::fs::read_to_string(llvm_ir_path).context("couldn't read LLVM IR file")?; + let filename_tables = covmap::make_filename_tables(&llvm_ir)?; let function_names = crate::prf_names::make_function_names_table(&llvm_ir)?; - crate::covfun::dump_covfun_mappings(&llvm_ir, &function_names)?; + crate::covfun::dump_covfun_mappings(&llvm_ir, &filename_tables, &function_names)?; Ok(()) } diff --git a/src/tools/coverage-dump/src/parser.rs b/src/tools/coverage-dump/src/parser.rs index 0bd4abdae3ef2..f26a57b43b331 100644 --- a/src/tools/coverage-dump/src/parser.rs +++ b/src/tools/coverage-dump/src/parser.rs @@ -1,38 +1,4 @@ -#[cfg(test)] -mod tests; - -use std::sync::OnceLock; - use anyhow::ensure; -use regex::bytes; - -/// Given the raw contents of a string literal in LLVM IR assembly, decodes any -/// backslash escapes and returns a vector containing the resulting byte string. -pub(crate) fn unescape_llvm_string_contents(contents: &str) -> Vec { - let escape_re = { - static RE: OnceLock = OnceLock::new(); - // LLVM IR supports two string escapes: `\\` and `\xx`. - RE.get_or_init(|| bytes::Regex::new(r"\\\\|\\([0-9A-Za-z]{2})").unwrap()) - }; - - fn u8_from_hex_digits(digits: &[u8]) -> u8 { - // We know that the input contains exactly 2 hex digits, so these calls - // should never fail. - assert_eq!(digits.len(), 2); - let digits = std::str::from_utf8(digits).unwrap(); - u8::from_str_radix(digits, 16).unwrap() - } - - escape_re - .replace_all(contents.as_bytes(), |captures: &bytes::Captures<'_>| { - let byte = match captures.get(1) { - None => b'\\', - Some(hex_digits) => u8_from_hex_digits(hex_digits.as_bytes()), - }; - [byte] - }) - .into_owned() -} pub(crate) struct Parser<'a> { rest: &'a [u8], diff --git a/src/tools/coverage-dump/src/parser/tests.rs b/src/tools/coverage-dump/src/parser/tests.rs deleted file mode 100644 index a673606b9c4c8..0000000000000 --- a/src/tools/coverage-dump/src/parser/tests.rs +++ /dev/null @@ -1,38 +0,0 @@ -use super::unescape_llvm_string_contents; - -// WARNING: These tests don't necessarily run in CI, and were mainly used to -// help track down problems when originally developing this tool. -// (The tool is still tested indirectly by snapshot tests that rely on it.) - -// Tests for `unescape_llvm_string_contents`: - -#[test] -fn unescape_empty() { - assert_eq!(unescape_llvm_string_contents(""), &[]); -} - -#[test] -fn unescape_noop() { - let input = "The quick brown fox jumps over the lazy dog."; - assert_eq!(unescape_llvm_string_contents(input), input.as_bytes()); -} - -#[test] -fn unescape_backslash() { - let input = r"\\Hello\\world\\"; - assert_eq!(unescape_llvm_string_contents(input), r"\Hello\world\".as_bytes()); -} - -#[test] -fn unescape_hex() { - let input = r"\01\02\03\04\0a\0b\0C\0D\fd\fE\FF"; - let expected: &[u8] = &[0x01, 0x02, 0x03, 0x04, 0x0a, 0x0b, 0x0c, 0x0d, 0xfd, 0xfe, 0xff]; - assert_eq!(unescape_llvm_string_contents(input), expected); -} - -#[test] -fn unescape_mixed() { - let input = r"\\01.\5c\5c"; - let expected: &[u8] = br"\01.\\"; - assert_eq!(unescape_llvm_string_contents(input), expected); -} diff --git a/src/tools/coverage-dump/src/prf_names.rs b/src/tools/coverage-dump/src/prf_names.rs index 96d097c79a315..f9ab35deba505 100644 --- a/src/tools/coverage-dump/src/prf_names.rs +++ b/src/tools/coverage-dump/src/prf_names.rs @@ -1,10 +1,10 @@ use std::collections::HashMap; use std::sync::OnceLock; -use anyhow::{anyhow, ensure}; use regex::Regex; -use crate::parser::{Parser, unescape_llvm_string_contents}; +use crate::llvm_utils::{truncated_md5, unescape_llvm_string_contents}; +use crate::parser::Parser; /// Scans through the contents of an LLVM IR assembly file to find `__llvm_prf_names` /// entries, decodes them, and creates a table that maps name hash values to @@ -25,18 +25,6 @@ pub(crate) fn make_function_names_table(llvm_ir: &str) -> anyhow::Result u64 { - use md5::{Digest, Md5}; - let mut hasher = Md5::new(); - hasher.update(bytes); - let hash: [u8; 8] = hasher.finalize().as_slice()[..8].try_into().unwrap(); - // The truncated hash is explicitly little-endian, regardless of host - // or target platform. (See `MD5Result::low` in LLVM's `MD5.h`.) - u64::from_le_bytes(hash) - } - fn demangle_if_able(symbol_name_bytes: &[u8]) -> anyhow::Result { // In practice, raw symbol names should always be ASCII. let symbol_name_str = std::str::from_utf8(symbol_name_bytes)?; @@ -54,26 +42,8 @@ pub(crate) fn make_function_names_table(llvm_ir: &str) -> anyhow::Result anyhow::Result = LazyLock::new(|| { RegexBuilder::new( r#" + ^\s* //@\s+ (?P!?) - (?P[A-Za-z]+(?:-[A-Za-z]+)*) + (?P[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*) (?P.*)$ "#, ) diff --git a/src/tools/jsondoclint/src/validator/tests.rs b/src/tools/jsondoclint/src/validator/tests.rs index 28deb7e7ceef6..dd0b4ac5601a7 100644 --- a/src/tools/jsondoclint/src/validator/tests.rs +++ b/src/tools/jsondoclint/src/validator/tests.rs @@ -42,6 +42,7 @@ fn errors_on_missing_links() { )]), paths: FxHashMap::default(), external_crates: FxHashMap::default(), + target: rustdoc_json_types::Target { triple: "".to_string(), target_features: vec![] }, format_version: rustdoc_json_types::FORMAT_VERSION, }; @@ -112,6 +113,7 @@ fn errors_on_local_in_paths_and_not_index() { }, )]), external_crates: FxHashMap::default(), + target: rustdoc_json_types::Target { triple: "".to_string(), target_features: vec![] }, format_version: rustdoc_json_types::FORMAT_VERSION, }; @@ -216,6 +218,7 @@ fn errors_on_missing_path() { ItemSummary { crate_id: 0, path: vec!["foo".to_owned()], kind: ItemKind::Module }, )]), external_crates: FxHashMap::default(), + target: rustdoc_json_types::Target { triple: "".to_string(), target_features: vec![] }, format_version: rustdoc_json_types::FORMAT_VERSION, }; @@ -259,6 +262,7 @@ fn checks_local_crate_id_is_correct() { )]), paths: FxHashMap::default(), external_crates: FxHashMap::default(), + target: rustdoc_json_types::Target { triple: "".to_string(), target_features: vec![] }, format_version: FORMAT_VERSION, }; check(&krate, &[]); diff --git a/src/tools/lint-docs/src/lib.rs b/src/tools/lint-docs/src/lib.rs index 9fd33e23204e7..cacce01675fe2 100644 --- a/src/tools/lint-docs/src/lib.rs +++ b/src/tools/lint-docs/src/lib.rs @@ -312,6 +312,7 @@ impl<'a> LintExtractor<'a> { if matches!( lint.name.as_str(), "unused_features" // broken lint + | "soft_unstable" // cannot have a stable example ) { return Ok(()); } @@ -444,21 +445,15 @@ impl<'a> LintExtractor<'a> { fs::write(&tempfile, source) .map_err(|e| format!("failed to write {}: {}", tempfile.display(), e))?; let mut cmd = Command::new(self.rustc_path); - if options.contains(&"edition2024") { - cmd.arg("--edition=2024"); - cmd.arg("-Zunstable-options"); - } else if options.contains(&"edition2021") { - cmd.arg("--edition=2021"); - } else if options.contains(&"edition2018") { - cmd.arg("--edition=2018"); - } else if options.contains(&"edition2015") { - cmd.arg("--edition=2015"); - } else if options.contains(&"edition") { - panic!("lint-docs: unknown edition"); - } else { + let edition = options + .iter() + .filter_map(|opt| opt.strip_prefix("edition")) + .next() // defaults to latest edition - cmd.arg("--edition=2021"); - } + .unwrap_or("2024"); + cmd.arg(format!("--edition={edition}")); + // Just in case this is an unstable edition. + cmd.arg("-Zunstable-options"); cmd.arg("--error-format=json"); cmd.arg("--target").arg(self.rustc_target); if let Some(target_linker) = self.rustc_linker { diff --git a/src/tools/miri/.github/workflows/ci.yml b/src/tools/miri/.github/workflows/ci.yml index 59bae513a58f4..9dbf51e9796a7 100644 --- a/src/tools/miri/.github/workflows/ci.yml +++ b/src/tools/miri/.github/workflows/ci.yml @@ -89,41 +89,16 @@ jobs: # Check if all jobs that we depend on (in the needs array) were successful. jq --exit-status 'all(.result == "success")' <<< '${{ toJson(needs) }}' - cron-fail-notify: - name: cronjob failure notification + cron-rustc-pull: + name: automatic pull from rustc runs-on: ubuntu-latest permissions: # The cronjob needs to be able to push to the repo... contents: write # ... and create a PR. pull-requests: write - needs: [build, style, coverage] - if: ${{ github.event_name == 'schedule' && failure() }} + if: ${{ github.event_name == 'schedule' }} steps: - # Send a Zulip notification - - name: Install zulip-send - run: pip3 install zulip - - name: Send Zulip notification - env: - ZULIP_BOT_EMAIL: ${{ secrets.ZULIP_BOT_EMAIL }} - ZULIP_API_TOKEN: ${{ secrets.ZULIP_API_TOKEN }} - run: | - ~/.local/bin/zulip-send --user $ZULIP_BOT_EMAIL --api-key $ZULIP_API_TOKEN --site https://rust-lang.zulipchat.com \ - --stream miri --subject "Miri Build Failure ($(date -u +%Y-%m))" \ - --message 'Dear @*T-miri*, - - It would appear that the [Miri cron job build]('"https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID"') failed. - - This likely means that rustc changed the miri directory and - we now need to do a [`./miri rustc-pull`](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#importing-changes-from-the-rustc-repo). - - Would you mind investigating this issue? - - Thanks in advance! - Sincerely, - The Miri Cronjobs Bot' - - # Attempt to auto-sync with rustc - uses: actions/checkout@v4 with: fetch-depth: 256 # get a bit more of the history @@ -143,18 +118,45 @@ jobs: run: | ./miri toolchain ./miri fmt --check || (./miri fmt && git commit -am "fmt") - - name: Push changes to a branch + - name: Push changes to a branch and create PR run: | + # `git diff --exit-code` "succeeds" if the diff is empty. + if git diff --exit-code HEAD^; then echo "Nothing changed in rustc, skipping PR"; exit 0; fi + # The diff is non-empty, create a PR. BRANCH="rustup-$(date -u +%Y-%m-%d)" git switch -c $BRANCH git push -u origin $BRANCH - - name: Create Pull Request - run: | - PR=$(gh pr create -B master --title 'Automatic Rustup' --body 'Please close and re-open this PR to trigger CI, then enable auto-merge.') - ~/.local/bin/zulip-send --user $ZULIP_BOT_EMAIL --api-key $ZULIP_API_TOKEN --site https://rust-lang.zulipchat.com \ - --stream miri --subject "Miri Build Failure ($(date -u +%Y-%m))" \ - --message "A PR doing a rustc-pull [has been automatically created]($PR) for your convenience." + gh pr create -B master --title 'Automatic Rustup' --body 'Please close and re-open this PR to trigger CI, then enable auto-merge.' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} ZULIP_BOT_EMAIL: ${{ secrets.ZULIP_BOT_EMAIL }} ZULIP_API_TOKEN: ${{ secrets.ZULIP_API_TOKEN }} + + cron-fail-notify: + name: cronjob failure notification + runs-on: ubuntu-latest + needs: [build, style, coverage] + if: ${{ github.event_name == 'schedule' && failure() }} + steps: + # Send a Zulip notification + - name: Install zulip-send + run: pip3 install zulip + - name: Send Zulip notification + env: + ZULIP_BOT_EMAIL: ${{ secrets.ZULIP_BOT_EMAIL }} + ZULIP_API_TOKEN: ${{ secrets.ZULIP_API_TOKEN }} + run: | + ~/.local/bin/zulip-send --user $ZULIP_BOT_EMAIL --api-key $ZULIP_API_TOKEN --site https://rust-lang.zulipchat.com \ + --stream miri --subject "Miri Build Failure ($(date -u +%Y-%m))" \ + --message 'Dear @*T-miri*, + + It would appear that the [Miri cron job build]('"https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID"') failed. + + This likely means that rustc changed the miri directory and + we now need to do a [`./miri rustc-pull`](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#importing-changes-from-the-rustc-repo). + + Would you mind investigating this issue? + + Thanks in advance! + Sincerely, + The Miri Cronjobs Bot' diff --git a/src/tools/miri/.gitignore b/src/tools/miri/.gitignore index 03c5591b78751..ed2d0ba7ba079 100644 --- a/src/tools/miri/.gitignore +++ b/src/tools/miri/.gitignore @@ -9,6 +9,9 @@ tex/*/out *.mm_profdata perf.data perf.data.old -flamegraph.svg +flamegraph*.svg +rustc-ice*.txt tests/native-lib/libtestlib.so .auto-* + +/genmc/ diff --git a/src/tools/miri/CONTRIBUTING.md b/src/tools/miri/CONTRIBUTING.md index 0d77ca06e1b48..739f0702252ab 100644 --- a/src/tools/miri/CONTRIBUTING.md +++ b/src/tools/miri/CONTRIBUTING.md @@ -19,12 +19,10 @@ When you get a review, please take care of the requested changes in new commits. existing commits. Generally avoid force-pushing. The only time you should force push is when there is a conflict with the master branch (in that case you should rebase across master, not merge), and all the way at the end of the review process when the reviewer tells you that the PR is done and you -should squash the commits. For the latter case, use `git rebase --keep-base ...` to squash without -changing the base commit your PR branches off of. Use your own judgment and the reviewer's guidance -to decide whether the PR should be squashed into a single commit or multiple logically separate -commits. (All this is to work around the fact that Github is quite bad at dealing with force pushes -and does not support `git range-diff`. Maybe one day Github will be good at git and then life can -become easier.) +should squash the commits. If you are unsure how to use `git rebase` to squash commits, use `./miri +squash` which automates the process but leaves little room for customization. (All this is to work +around the fact that Github is quite bad at dealing with force pushes and does not support `git +range-diff`. Maybe one day Github will be good at git and then life can become easier.) Most PRs bounce back and forth between the reviewer and the author several times, so it is good to keep track of who is expected to take the next step. We are using the `S-waiting-for-review` and diff --git a/src/tools/miri/Cargo.lock b/src/tools/miri/Cargo.lock index b8100d0e7ad99..6f4bd3eab51a2 100644 --- a/src/tools/miri/Cargo.lock +++ b/src/tools/miri/Cargo.lock @@ -39,31 +39,31 @@ dependencies = [ [[package]] name = "annotate-snippets" -version = "0.11.4" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24e35ed54e5ea7997c14ed4c70ba043478db1112e98263b3b035907aa197d991" +checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4" dependencies = [ "anstyle", - "unicode-width", + "unicode-width 0.2.0", ] [[package]] name = "anstyle" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backtrace" @@ -82,15 +82,15 @@ dependencies = [ [[package]] name = "bitflags" -version = "2.6.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" [[package]] name = "bstr" -version = "1.10.0" +version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" +checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" dependencies = [ "memchr", "regex-automata", @@ -98,25 +98,25 @@ dependencies = [ ] [[package]] -name = "byteorder" -version = "1.5.0" +name = "bumpalo" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "camino" -version = "1.1.7" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" +checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" dependencies = [ "serde", ] [[package]] name = "cargo-platform" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" dependencies = [ "serde", ] @@ -132,14 +132,14 @@ dependencies = [ "semver", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] name = "cc" -version = "1.1.22" +version = "1.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0" +checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a" dependencies = [ "shlex", ] @@ -152,18 +152,18 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" dependencies = [ "num-traits", ] [[package]] name = "chrono-tz" -version = "0.10.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd6dd8046d00723a59a2f8c5f295c515b9bb9a331ee4f8f3d4dd49e428acd3b6" +checksum = "efdce149c370f133a071ca8ef6ea340b7b88748ab0810097a9e2976eaa34b4f3" dependencies = [ "chrono", "chrono-tz-build", @@ -172,9 +172,9 @@ dependencies = [ [[package]] name = "chrono-tz-build" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94fea34d77a245229e7746bd2beb786cd2a896f306ff491fb8cecb3074b10a7" +checksum = "8f10f8c9340e31fc120ff885fcdb54a0b48e474bbd77cab557f0c30a3e569402" dependencies = [ "parse-zoneinfo", "phf_codegen", @@ -219,12 +219,12 @@ dependencies = [ [[package]] name = "colored" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -235,40 +235,40 @@ checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335" [[package]] name = "console" -version = "0.15.8" +version = "0.15.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" dependencies = [ "encode_unicode", - "lazy_static", "libc", - "unicode-width", - "windows-sys 0.52.0", + "once_cell", + "unicode-width 0.2.0", + "windows-sys", ] [[package]] name = "cpufeatures" -version = "0.2.12" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crossbeam-channel" -version = "0.5.13" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crypto-common" @@ -282,39 +282,39 @@ dependencies = [ [[package]] name = "directories" -version = "5.0.1" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" +checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" dependencies = [ "dirs-sys", ] [[package]] name = "dirs-sys" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] name = "encode_unicode" -version = "0.3.6" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] @@ -329,9 +329,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.1.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "generic-array" @@ -356,14 +356,14 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" dependencies = [ "cfg-if", "libc", - "wasi 0.13.3+wasi-0.2.2", - "windows-targets 0.52.6", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", ] [[package]] @@ -380,40 +380,41 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] name = "indicatif" -version = "0.17.8" +version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" +checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" dependencies = [ "console", - "instant", "number_prefix", "portable-atomic", - "unicode-width", + "unicode-width 0.2.0", + "web-time", ] [[package]] name = "inout" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" dependencies = [ "generic-array", ] [[package]] -name = "instant" -version = "0.1.13" +name = "itoa" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", -] +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] -name = "itoa" -version = "1.0.11" +name = "js-sys" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] [[package]] name = "lazy_static" @@ -429,15 +430,15 @@ checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" [[package]] name = "libc" -version = "0.2.155" +version = "0.2.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" [[package]] name = "libffi" -version = "3.2.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce826c243048e3d5cec441799724de52e2d42f820468431fc3fceee2341871e2" +checksum = "4a9434b6fc77375fb624698d5f8c49d7e80b10d59eb1219afda27d1f824d4074" dependencies = [ "libc", "libffi-sys", @@ -445,21 +446,21 @@ dependencies = [ [[package]] name = "libffi-sys" -version = "2.3.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36115160c57e8529781b4183c2bb51fdc1f6d6d1ed345591d84be7703befb3c" +checksum = "ead36a2496acfc8edd6cc32352110e9478ac5b9b5f5b9856ebd3d28019addb84" dependencies = [ "cc", ] [[package]] name = "libloading" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets", ] [[package]] @@ -474,9 +475,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" [[package]] name = "lock_api" @@ -490,15 +491,15 @@ dependencies = [ [[package]] name = "log" -version = "0.4.22" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "measureme" -version = "11.0.1" +version = "12.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfa4a40f09af7aa6faef38285402a78847d0d72bf8827006cd2a332e1e6e4a8d" +checksum = "570a507d8948a66a97f42cbbaf8a6bb9516a51017d4ee949502ad7a10a864395" dependencies = [ "log", "memmap2", @@ -537,11 +538,12 @@ name = "miri" version = "0.1.0" dependencies = [ "aes", + "bitflags", "chrono", "chrono-tz", "colored", "directories", - "getrandom 0.3.1", + "getrandom 0.3.2", "libc", "libffi", "libloading", @@ -553,7 +555,7 @@ dependencies = [ "tempfile", "tikv-jemalloc-sys", "ui_test", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] @@ -582,9 +584,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.19.0" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "option-ext" @@ -604,7 +606,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2ad9b889f1b12e0b9ee24db044b5129150d5eada288edc800f789928dc8c0e3" dependencies = [ - "unicode-width", + "unicode-width 0.1.14", ] [[package]] @@ -627,7 +629,7 @@ dependencies = [ "libc", "redox_syscall", "smallvec", - "windows-targets 0.52.6", + "windows-targets", ] [[package]] @@ -650,18 +652,18 @@ dependencies = [ [[package]] name = "phf" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ "phf_shared", ] [[package]] name = "phf_codegen" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ "phf_generator", "phf_shared", @@ -669,9 +671,9 @@ dependencies = [ [[package]] name = "phf_generator" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ "phf_shared", "rand 0.8.5", @@ -679,32 +681,32 @@ dependencies = [ [[package]] name = "phf_shared" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ "siphasher", ] [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "portable-atomic" -version = "1.7.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265" +checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" [[package]] name = "ppv-lite86" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ - "zerocopy 0.7.35", + "zerocopy", ] [[package]] @@ -719,22 +721,28 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.36" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "rand" version = "0.8.5" @@ -751,8 +759,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" dependencies = [ "rand_chacha", - "rand_core 0.9.0", - "zerocopy 0.8.14", + "rand_core 0.9.3", + "zerocopy", ] [[package]] @@ -762,7 +770,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", - "rand_core 0.9.0", + "rand_core 0.9.3", ] [[package]] @@ -773,39 +781,38 @@ checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" [[package]] name = "rand_core" -version = "0.9.0" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b08f3c9802962f7e1b25113931d94f43ed9725bebc59db9d0c3e9a23b67e15ff" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.3.1", - "zerocopy 0.8.14", + "getrandom 0.3.2", ] [[package]] name = "redox_syscall" -version = "0.5.3" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" +checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1" dependencies = [ "bitflags", ] [[package]] name = "redox_users" -version = "0.4.5" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" dependencies = [ "getrandom 0.2.15", "libredox", - "thiserror", + "thiserror 2.0.12", ] [[package]] name = "regex" -version = "1.10.6" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", @@ -815,9 +822,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -826,9 +833,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rustc-demangle" @@ -844,43 +851,43 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustfix" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f5b7fc8060f4f8373f9381a630304b42e1183535d9beb1d3f596b236c9106a" +checksum = "82fa69b198d894d84e23afde8e9ab2af4400b2cba20d6bf2b428a8b01c222c5a" dependencies = [ "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tracing", ] [[package]] name = "rustix" -version = "0.38.34" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "scopeguard" @@ -890,27 +897,27 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "semver" -version = "1.0.23" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.204" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.204" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", @@ -919,9 +926,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.122" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "itoa", "memchr", @@ -946,15 +953,15 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "siphasher" -version = "0.3.11" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "smallvec" -version = "1.13.2" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" [[package]] name = "spanned" @@ -968,9 +975,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.72" +version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", @@ -979,31 +986,51 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.11.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53" +checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" dependencies = [ - "cfg-if", "fastrand", + "getrandom 0.3.2", "once_cell", "rustix", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +dependencies = [ + "thiserror-impl 2.0.12", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ - "thiserror-impl", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", @@ -1032,9 +1059,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "pin-project-lite", "tracing-core", @@ -1042,9 +1069,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -1052,9 +1079,9 @@ dependencies = [ [[package]] name = "tracing-error" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d686ec1c0f384b1277f097b2f279a2ecc11afe8c133c1aabf036a27cb4cd206e" +checksum = "8b1581020d7a273442f5b45074a6a57d5757ad0a47dac0e9f0bd57b81936f3db" dependencies = [ "tracing", "tracing-subscriber", @@ -1062,9 +1089,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "sharded-slab", "thread_local", @@ -1073,15 +1100,15 @@ dependencies = [ [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "ui_test" -version = "0.29.1" +version = "0.29.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14bf63f2931a28a04af0bd24c5f850223d29f3a40afae49ed6ce442a65eb8652" +checksum = "1211b1111c752c73b33073d2958072be08825fd97c9ab4d83444da361a06634b" dependencies = [ "annotate-snippets", "anyhow", @@ -1105,21 +1132,27 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-width" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] name = "valuable" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "version_check" @@ -1135,91 +1168,116 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi" -version = "0.13.3+wasi-0.2.2" +version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" dependencies = [ "wit-bindgen-rt", ] [[package]] -name = "windows-sys" -version = "0.48.0" +name = "wasm-bindgen" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ - "windows-targets 0.48.5", + "cfg-if", + "once_cell", + "wasm-bindgen-macro", ] [[package]] -name = "windows-sys" -version = "0.52.0" +name = "wasm-bindgen-backend" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ - "windows-targets 0.52.6", + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", ] [[package]] -name = "windows-targets" -version = "0.48.5" +name = "wasm-bindgen-macro" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", + "quote", + "wasm-bindgen-macro-support", ] [[package]] -name = "windows-targets" -version = "0.52.6" +name = "wasm-bindgen-macro-support" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" +name = "wasm-bindgen-shared" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" +name = "web-time" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] [[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" +name = "windows-sys" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] [[package]] -name = "windows_aarch64_msvc" +name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] [[package]] -name = "windows_i686_gnu" -version = "0.48.5" +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -1233,48 +1291,24 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - [[package]] name = "windows_x86_64_msvc" version = "0.52.6" @@ -1283,48 +1317,27 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "wit-bindgen-rt" -version = "0.33.0" +version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ "bitflags", ] [[package]] name = "zerocopy" -version = "0.7.35" +version = "0.8.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879" dependencies = [ - "byteorder", - "zerocopy-derive 0.7.35", -] - -[[package]] -name = "zerocopy" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a367f292d93d4eab890745e75a778da40909cab4d6ff8173693812f79c4a2468" -dependencies = [ - "zerocopy-derive 0.8.14", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" -dependencies = [ - "proc-macro2", - "quote", - "syn", + "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.14" +version = "0.8.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3931cb58c62c13adec22e38686b559c86a30565e16ad6e8510a337cedc611e1" +checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be" dependencies = [ "proc-macro2", "quote", diff --git a/src/tools/miri/Cargo.toml b/src/tools/miri/Cargo.toml index 5d8c9a8664456..7b7be97aa518f 100644 --- a/src/tools/miri/Cargo.toml +++ b/src/tools/miri/Cargo.toml @@ -22,10 +22,11 @@ getrandom = { version = "0.3", features = ["std"] } rand = "0.9" smallvec = { version = "1.7", features = ["drain_filter"] } aes = { version = "0.8.3", features = ["hazmat"] } -measureme = "11" +measureme = "12" chrono = { version = "0.4.38", default-features = false } chrono-tz = "0.10" -directories = "5" +directories = "6" +bitflags = "2.6" # Copied from `compiler/rustc/Cargo.toml`. # But only for some targets, it fails for others. Rustc configures this in its CI, but we can't @@ -36,11 +37,11 @@ features = ['unprefixed_malloc_on_supported_platforms'] [target.'cfg(unix)'.dependencies] libc = "0.2" -libffi = "3.2.0" +libffi = "4.0.0" libloading = "0.8" [target.'cfg(target_family = "windows")'.dependencies] -windows-sys = { version = "0.52", features = [ +windows-sys = { version = "0.59", features = [ "Win32_Foundation", "Win32_System_IO", "Win32_Storage_FileSystem", @@ -64,6 +65,7 @@ harness = false [features] default = ["stack-cache"] +genmc = [] stack-cache = [] stack-cache-consistency-check = ["stack-cache"] diff --git a/src/tools/miri/README.md b/src/tools/miri/README.md index 201aa1f538695..a78cc9d931980 100644 --- a/src/tools/miri/README.md +++ b/src/tools/miri/README.md @@ -277,22 +277,15 @@ Try running `cargo miri clean`. Miri adds its own set of `-Z` flags, which are usually set via the `MIRIFLAGS` environment variable. We first document the most relevant and most commonly used flags: -* `-Zmiri-address-reuse-rate=` changes the probability that a freed *non-stack* allocation - will be added to the pool for address reuse, and the probability that a new *non-stack* allocation - will be taken from the pool. Stack allocations never get added to or taken from the pool. The - default is `0.5`. -* `-Zmiri-address-reuse-cross-thread-rate=` changes the probability that an allocation which - attempts to reuse a previously freed block of memory will also consider blocks freed by *other - threads*. The default is `0.1`, which means by default, in 90% of the cases where an address reuse - attempt is made, only addresses from the same thread will be considered. Reusing an address from - another thread induces synchronization between those threads, which can mask data races and weak - memory bugs. -* `-Zmiri-compare-exchange-weak-failure-rate=` changes the failure rate of - `compare_exchange_weak` operations. The default is `0.8` (so 4 out of 5 weak ops will fail). - You can change it to any value between `0.0` and `1.0`, where `1.0` means it - will always fail and `0.0` means it will never fail. Note that setting it to - `1.0` will likely cause hangs, since it means programs using - `compare_exchange_weak` cannot make progress. +* `-Zmiri-deterministic-concurrency` makes Miri's concurrency-related behavior fully deterministic. + Strictly speaking, Miri is always fully deterministic when isolation is enabled (the default + mode), but this determinism is achieved by using an RNG with a fixed seed. Seemingly harmless + changes to the program, or just running it for a different target architecture, can thus lead to + completely different program behavior down the line. This flag disables the use of an RNG for + concurrency-related decisions. Therefore, Miri cannot find bugs that only occur under some + specific circumstances, but Miri's behavior will also be more stable across versions and targets. + This is equivalent to `-Zmiri-fixed-schedule -Zmiri-compare-exchange-weak-failure-rate=0.0 + -Zmiri-address-reuse-cross-thread-rate=0.0 -Zmiri-disable-weak-memory-emulation`. * `-Zmiri-disable-isolation` disables host isolation. As a consequence, the program has access to host resources such as environment variables, file systems, and randomness. @@ -334,9 +327,6 @@ environment variable. We first document the most relevant and most commonly used This will necessarily miss some bugs as those operations are not efficiently and accurately implementable in a sanitizer, but it will only miss bugs that concern memory/pointers which is subject to these operations. -* `-Zmiri-preemption-rate` configures the probability that at the end of a basic block, the active - thread will be preempted. The default is `0.01` (i.e., 1%). Setting this to `0` disables - preemption. * `-Zmiri-report-progress` makes Miri print the current stacktrace every now and then, so you can tell what it is doing when a program just keeps running. You can customize how frequently the report is printed via `-Zmiri-report-progress=`, which prints the report every N basic @@ -365,6 +355,22 @@ The remaining flags are for advanced use only, and more likely to change or be r Some of these are **unsound**, which means they can lead to Miri failing to detect cases of undefined behavior in a program. +* `-Zmiri-address-reuse-rate=` changes the probability that a freed *non-stack* allocation + will be added to the pool for address reuse, and the probability that a new *non-stack* allocation + will be taken from the pool. Stack allocations never get added to or taken from the pool. The + default is `0.5`. +* `-Zmiri-address-reuse-cross-thread-rate=` changes the probability that an allocation which + attempts to reuse a previously freed block of memory will also consider blocks freed by *other + threads*. The default is `0.1`, which means by default, in 90% of the cases where an address reuse + attempt is made, only addresses from the same thread will be considered. Reusing an address from + another thread induces synchronization between those threads, which can mask data races and weak + memory bugs. +* `-Zmiri-compare-exchange-weak-failure-rate=` changes the failure rate of + `compare_exchange_weak` operations. The default is `0.8` (so 4 out of 5 weak ops will fail). + You can change it to any value between `0.0` and `1.0`, where `1.0` means it + will always fail and `0.0` means it will never fail. Note that setting it to + `1.0` will likely cause hangs, since it means programs using + `compare_exchange_weak` cannot make progress. * `-Zmiri-disable-alignment-check` disables checking pointer alignment, so you can focus on other failures, but it means Miri can miss bugs in your program. Using this flag is **unsound**. @@ -383,6 +389,10 @@ to Miri failing to detect cases of undefined behavior in a program. this flag is **unsound**. * `-Zmiri-disable-weak-memory-emulation` disables the emulation of some C++11 weak memory effects. +* `-Zmiri-fixed-schedule` disables preemption (like `-Zmiri-preemption-rate=0.0`) and furthermore + disables the randomization of the next thread to be picked, instead fixing a round-robin schedule. + Note however that other aspects of Miri's concurrency behavior are still randomize; use + `-Zmiri-deterministic-concurrency` to disable them all. * `-Zmiri-native-lib=` is an experimental flag for providing support for calling native functions from inside the interpreter via FFI. The flag is supported only on Unix systems. Functions not provided by that file are still executed via the usual Miri shims. @@ -412,6 +422,10 @@ to Miri failing to detect cases of undefined behavior in a program. without an explicit value), `none` means it never recurses, `scalar` means it only recurses for types where we would also emit `noalias` annotations in the generated LLVM IR (types passed as individual scalars or pairs of scalars). Setting this to `none` is **unsound**. +* `-Zmiri-preemption-rate` configures the probability that at the end of a basic block, the active + thread will be preempted. The default is `0.01` (i.e., 1%). Setting this to `0` disables + preemption. Note that even without preemption, the schedule is still non-deterministic: + if a thread blocks or yields, the next thread is chosen randomly. * `-Zmiri-provenance-gc=` configures how often the pointer provenance garbage collector runs. The default is to search for and remove unreachable provenance once every `10000` basic blocks. Setting this to `0` disables the garbage collector, which causes some programs to have explosive memory @@ -443,9 +457,6 @@ to Miri failing to detect cases of undefined behavior in a program. casts are not supported in this mode, but that may change in the future. * `-Zmiri-force-page-size=` overrides the default page size for an architecture, in multiples of 1k. `4` is default for most targets. This value should always be a power of 2 and nonzero. -* `-Zmiri-unique-is-unique` performs additional aliasing checks for `core::ptr::Unique` to ensure - that it could theoretically be considered `noalias`. This flag is experimental and has - an effect only when used with `-Zmiri-tree-borrows`. [function ABI]: https://doc.rust-lang.org/reference/items/functions.html#extern-function-qualifier @@ -489,7 +500,7 @@ Miri knows where it is supposed to start execution: ```rust #[cfg(miri)] -#[no_mangle] +#[unsafe(no_mangle)] fn miri_start(argc: isize, argv: *const *const u8) -> isize { // Call the actual start function that your project implements, based on your target's conventions. } @@ -565,6 +576,7 @@ Definite bugs found: * [Occasional memory leak in `std::mpsc` channels](https://github.com/rust-lang/rust/issues/121582) (original code in [crossbeam](https://github.com/crossbeam-rs/crossbeam/pull/1084)) * [Weak-memory-induced memory leak in Windows thread-local storage](https://github.com/rust-lang/rust/pull/124281) * [A bug in the new `RwLock::downgrade` implementation](https://rust-lang.zulipchat.com/#narrow/channel/269128-miri/topic/Miri.20error.20library.20test) (caught by Miri before it landed in the Rust repo) +* [Mockall reading unintialized memory when mocking `std::io::Read::read`, even if all expectations are satisfied](https://github.com/asomers/mockall/issues/647) (caught by Miri running Tokio's test suite) Violations of [Stacked Borrows] found that are likely bugs (but Stacked Borrows is currently just an experiment): diff --git a/src/tools/miri/bench-cargo-miri/big-allocs/src/main.rs b/src/tools/miri/bench-cargo-miri/big-allocs/src/main.rs index a1c1708cf3baa..89797c4982067 100644 --- a/src/tools/miri/bench-cargo-miri/big-allocs/src/main.rs +++ b/src/tools/miri/bench-cargo-miri/big-allocs/src/main.rs @@ -7,7 +7,7 @@ fn main() { // We can't use too big of an allocation or this code will encounter an allocation failure in // CI. Since the allocation can't be huge, we need to do a few iterations so that the effect // we're trying to measure is clearly visible above the interpreter's startup time. - for _ in 0..10 { + for _ in 0..20 { drop(Vec::::with_capacity(512 * 1024 * 1024)); } } diff --git a/src/tools/miri/bench-cargo-miri/mse/src/main.rs b/src/tools/miri/bench-cargo-miri/mse/src/main.rs index 06d5487d1d4ea..69c7c39cdd7e2 100644 --- a/src/tools/miri/bench-cargo-miri/mse/src/main.rs +++ b/src/tools/miri/bench-cargo-miri/mse/src/main.rs @@ -13,7 +13,7 @@ fn read_i16(buffer: &[u8], index: usize) -> i16 { const SIZE: usize = size_of::(); let mut bytes: [u8; SIZE] = [0u8; SIZE]; bytes.copy_from_slice(&buffer[(index * SIZE)..(index * SIZE + SIZE)]); - unsafe { std::mem::transmute(bytes) } + i16::from_ne_bytes(bytes) } fn mse(samples: usize, frame_buf: &[i16], buf_ref: &[u8]) -> f64 { diff --git a/src/tools/miri/cargo-miri/Cargo.lock b/src/tools/miri/cargo-miri/Cargo.lock index b8e08d39a8611..bd4ca2860f35a 100644 --- a/src/tools/miri/cargo-miri/Cargo.lock +++ b/src/tools/miri/cargo-miri/Cargo.lock @@ -4,21 +4,21 @@ version = 4 [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" [[package]] name = "bitflags" -version = "2.6.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" [[package]] name = "camino" -version = "1.1.7" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" +checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" dependencies = [ "serde", ] @@ -38,18 +38,18 @@ dependencies = [ [[package]] name = "cargo-platform" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" dependencies = [ "serde", ] [[package]] name = "cargo_metadata" -version = "0.18.1" +version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037" +checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" dependencies = [ "camino", "cargo-platform", @@ -67,40 +67,40 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "directories" -version = "5.0.1" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" +checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" dependencies = [ "dirs-sys", ] [[package]] name = "dirs-sys" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] name = "errno" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] name = "fastrand" -version = "2.1.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "getrandom" @@ -110,20 +110,32 @@ checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", ] [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "libc" -version = "0.2.155" +version = "0.2.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" [[package]] name = "libredox" @@ -137,9 +149,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" [[package]] name = "memchr" @@ -149,9 +161,9 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "once_cell" -version = "1.19.0" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "option-ext" @@ -161,29 +173,35 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.36" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "redox_users" -version = "0.4.5" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" dependencies = [ - "getrandom", + "getrandom 0.2.15", "libredox", "thiserror", ] @@ -202,37 +220,37 @@ dependencies = [ [[package]] name = "rustc_tools_util" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3316159ab19e19d1065ecc49278e87f767a9dae9fae80348d2b4d4fa4ae02d4d" +checksum = "a3b75158011a63889ba12084cf1224baad7bcad50f6ee7c842f772b74aa148ed" [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustix" -version = "0.38.34" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "same-file" @@ -245,27 +263,27 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.23" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.204" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.204" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", @@ -274,9 +292,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.122" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "itoa", "memchr", @@ -286,9 +304,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.72" +version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", @@ -297,31 +315,31 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.11.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53" +checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" dependencies = [ - "cfg-if", "fastrand", + "getrandom 0.3.2", "once_cell", "rustix", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] name = "thiserror" -version = "1.0.63" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", @@ -330,9 +348,9 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "walkdir" @@ -351,30 +369,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] -name = "winapi-util" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" -dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "windows-sys" -version = "0.48.0" +name = "wasi" +version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" dependencies = [ - "windows-targets 0.48.5", + "wit-bindgen-rt", ] [[package]] -name = "windows-sys" -version = "0.52.0" +name = "winapi-util" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-targets 0.52.6", + "windows-sys", ] [[package]] @@ -383,22 +392,7 @@ version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", + "windows-targets", ] [[package]] @@ -407,46 +401,28 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - [[package]] name = "windows_i686_gnu" version = "0.52.6" @@ -459,36 +435,18 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" @@ -497,12 +455,15 @@ checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" +name = "wit-bindgen-rt" +version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags", +] diff --git a/src/tools/miri/cargo-miri/Cargo.toml b/src/tools/miri/cargo-miri/Cargo.toml index de0988d6d1cd7..ed142b0e2114d 100644 --- a/src/tools/miri/cargo-miri/Cargo.toml +++ b/src/tools/miri/cargo-miri/Cargo.toml @@ -14,10 +14,10 @@ test = false # we have no unit tests doctest = false # and no doc tests [dependencies] -directories = "5" +directories = "6" rustc_version = "0.4" serde_json = "1.0.40" -cargo_metadata = "0.18.0" +cargo_metadata = "0.19" rustc-build-sysroot = "0.5.4" # Enable some feature flags that dev-dependencies need but dependencies diff --git a/src/tools/miri/cargo-miri/src/main.rs b/src/tools/miri/cargo-miri/src/main.rs index 7d9f77f3752d9..322ef0a6c2aa8 100644 --- a/src/tools/miri/cargo-miri/src/main.rs +++ b/src/tools/miri/cargo-miri/src/main.rs @@ -53,7 +53,7 @@ fn main() { // with `RustcPhase::Rustdoc`. There we perform a check-build (needed to get the expected // build failures for `compile_fail` doctests) and then store a JSON file with the // information needed to run this test. - // - We also set `--runtool` to ourselves, which ends up in `phase_runner` with + // - We also set `--test-runtool` to ourselves, which ends up in `phase_runner` with // `RunnerPhase::Rustdoc`. There we parse the JSON file written in `phase_rustc` and invoke // the Miri driver for interpretation. diff --git a/src/tools/miri/cargo-miri/src/phases.rs b/src/tools/miri/cargo-miri/src/phases.rs index 71ea07f34636a..cb62e12413c84 100644 --- a/src/tools/miri/cargo-miri/src/phases.rs +++ b/src/tools/miri/cargo-miri/src/phases.rs @@ -666,8 +666,8 @@ pub fn phase_rustdoc(mut args: impl Iterator) { if arg == "--extern" { // Patch --extern arguments to use *.rmeta files, since phase_cargo_rustc only creates stub *.rlib files. forward_patched_extern_arg(&mut args, &mut cmd); - } else if arg == "--runtool" { - // An existing --runtool flag indicates cargo is running in cross-target mode, which we don't support. + } else if arg == "--test-runtool" { + // An existing --test-runtool flag indicates cargo is running in cross-target mode, which we don't support. // Note that this is only passed when cargo is run with the unstable -Zdoctest-xcompile flag; // otherwise, we won't be called as rustdoc at all. show_error!("cross-interpreting doctests is not currently supported by Miri."); @@ -693,8 +693,8 @@ pub fn phase_rustdoc(mut args: impl Iterator) { // to let phase_cargo_rustc know to expect that. We'll use this environment variable as a flag: cmd.env("MIRI_CALLED_FROM_RUSTDOC", "1"); - // The `--test-builder` and `--runtool` arguments are unstable rustdoc features, - // which are disabled by default. We first need to enable them explicitly: + // The `--test-builder` is an unstable rustdoc features, + // which is disabled by default. We first need to enable them explicitly: cmd.arg("-Zunstable-options"); // rustdoc needs to know the right sysroot. @@ -705,7 +705,7 @@ pub fn phase_rustdoc(mut args: impl Iterator) { // Make rustdoc call us back. let cargo_miri_path = env::current_exe().expect("current executable path invalid"); cmd.arg("--test-builder").arg(&cargo_miri_path); // invoked by forwarding most arguments - cmd.arg("--runtool").arg(&cargo_miri_path); // invoked with just a single path argument + cmd.arg("--test-runtool").arg(&cargo_miri_path); // invoked with just a single path argument debug_cmd("[cargo-miri rustdoc]", verbose, &cmd); exec(cmd) diff --git a/src/tools/miri/ci/ci.sh b/src/tools/miri/ci/ci.sh index 7155d692ee5c9..755e02d02eca1 100755 --- a/src/tools/miri/ci/ci.sh +++ b/src/tools/miri/ci/ci.sh @@ -164,9 +164,9 @@ case $HOST_TARGET in # Partially supported targets (tier 2) BASIC="empty_main integer heap_alloc libc-mem vec string btreemap" # ensures we have the basics: pre-main code, system allocator UNIX="hello panic/panic panic/unwind concurrency/simple atomic libc-mem libc-misc libc-random env num_cpus" # the things that are very similar across all Unixes, and hence easily supported there - TEST_TARGET=x86_64-unknown-freebsd run_tests_minimal $BASIC $UNIX time hashmap random threadname pthread fs libc-pipe - TEST_TARGET=i686-unknown-freebsd run_tests_minimal $BASIC $UNIX time hashmap random threadname pthread fs libc-pipe - TEST_TARGET=aarch64-linux-android run_tests_minimal $BASIC $UNIX time hashmap random sync concurrency thread epoll eventfd + TEST_TARGET=x86_64-unknown-freebsd run_tests_minimal $BASIC $UNIX time hashmap random thread sync concurrency fs libc-pipe + TEST_TARGET=i686-unknown-freebsd run_tests_minimal $BASIC $UNIX time hashmap random thread sync concurrency fs libc-pipe + TEST_TARGET=aarch64-linux-android run_tests_minimal $BASIC $UNIX time hashmap random thread sync concurrency epoll eventfd TEST_TARGET=wasm32-wasip2 run_tests_minimal $BASIC wasm TEST_TARGET=wasm32-unknown-unknown run_tests_minimal no_std empty_main wasm # this target doesn't really have std TEST_TARGET=thumbv7em-none-eabihf run_tests_minimal no_std diff --git a/src/tools/miri/miri-script/Cargo.lock b/src/tools/miri/miri-script/Cargo.lock index 25c6f817575e2..3494a241ec50f 100644 --- a/src/tools/miri/miri-script/Cargo.lock +++ b/src/tools/miri/miri-script/Cargo.lock @@ -38,30 +38,31 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.59.0", + "windows-sys", ] [[package]] name = "anstyle-wincon" -version = "3.0.6" +version = "3.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" dependencies = [ "anstyle", - "windows-sys 0.59.0", + "once_cell", + "windows-sys", ] [[package]] name = "anyhow" -version = "1.0.80" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" +checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" [[package]] name = "bitflags" -version = "2.4.2" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" [[package]] name = "cfg-if" @@ -71,9 +72,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "clap" -version = "4.5.23" +version = "4.5.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" +checksum = "d8aa86934b44c19c50f87cc2790e19f54f7a67aedb64101c2e1a2e5ecfb73944" dependencies = [ "clap_builder", "clap_derive", @@ -81,9 +82,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.23" +version = "4.5.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" +checksum = "2414dbb2dd0695280da6ea9261e327479e9d37b0630f6b53ba2a11c60c679fd9" dependencies = [ "anstream", "anstyle", @@ -93,9 +94,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.18" +version = "4.5.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" dependencies = [ "heck", "proc-macro2", @@ -117,78 +118,87 @@ checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "directories" -version = "5.0.1" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" +checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" dependencies = [ "dirs-sys", ] [[package]] name = "dirs-sys" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] name = "dunce" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "either" -version = "1.10.0" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "env_home" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" +checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] name = "fastrand" -version = "2.1.1" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "getrandom" -version = "0.2.12" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", ] [[package]] -name = "heck" -version = "0.5.0" +name = "getrandom" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", +] [[package]] -name = "home" -version = "0.5.9" +name = "heck" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" -dependencies = [ - "windows-sys 0.52.0", -] +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "is_terminal_polyfill" @@ -198,24 +208,24 @@ checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" -version = "0.11.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "libc" -version = "0.2.159" +version = "0.2.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" +checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" [[package]] name = "libredox" @@ -229,9 +239,15 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "linux-raw-sys" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" [[package]] name = "memchr" @@ -262,9 +278,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.20.2" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "option-ext" @@ -280,60 +296,79 @@ checksum = "a6e819bbd49d5939f682638fa54826bf1650abddcd65d000923de8ad63cc7d15" [[package]] name = "proc-macro2" -version = "1.0.78" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.35" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "redox_users" -version = "0.4.6" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" dependencies = [ - "getrandom", + "getrandom 0.2.15", "libredox", "thiserror", ] [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustix" -version = "0.38.37" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys 0.4.15", + "windows-sys", +] + +[[package]] +name = "rustix" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" dependencies = [ "bitflags", "errno", "libc", - "linux-raw-sys", - "windows-sys 0.52.0", + "linux-raw-sys 0.9.3", + "windows-sys", ] [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "same-file" @@ -346,24 +381,24 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.22" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" [[package]] name = "serde" -version = "1.0.210" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.210" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", @@ -372,9 +407,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.128" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "itoa", "memchr", @@ -396,9 +431,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "syn" -version = "2.0.50" +version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f1bdc9872430ce9b75da68329d1c1746faf50ffac5f19e02b71e37ff881ffb" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", @@ -407,31 +442,31 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.13.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" +checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" dependencies = [ - "cfg-if", "fastrand", + "getrandom 0.3.2", "once_cell", - "rustix", - "windows-sys 0.59.0", + "rustix 1.0.5", + "windows-sys", ] [[package]] name = "thiserror" -version = "1.0.57" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.57" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", @@ -440,9 +475,9 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "utf8parse" @@ -452,9 +487,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "walkdir" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", @@ -466,15 +501,24 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasi" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + [[package]] name = "which" -version = "6.0.3" +version = "7.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4ee928febd44d98f2f459a4a79bd4d928591333a494a10a868418ac1b39cf1f" +checksum = "2774c861e1f072b3aadc02f8ba886c26ad6321567ecc294c935434cad06f1283" dependencies = [ "either", - "home", - "rustix", + "env_home", + "rustix 0.38.44", "winsafe", ] @@ -484,25 +528,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.6", + "windows-sys", ] [[package]] @@ -511,22 +537,7 @@ version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", + "windows-targets", ] [[package]] @@ -535,46 +546,28 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - [[package]] name = "windows_i686_gnu" version = "0.52.6" @@ -587,48 +580,24 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - [[package]] name = "windows_x86_64_msvc" version = "0.52.6" @@ -641,6 +610,15 @@ version = "0.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags", +] + [[package]] name = "xshell" version = "0.2.7" diff --git a/src/tools/miri/miri-script/Cargo.toml b/src/tools/miri/miri-script/Cargo.toml index 5879b2717e5d6..a04898de6abab 100644 --- a/src/tools/miri/miri-script/Cargo.toml +++ b/src/tools/miri/miri-script/Cargo.toml @@ -13,16 +13,16 @@ edition = "2021" # This is needed to make this package build on stable when the parent package uses unstable cargo features. [dependencies] -which = "6.0" +which = "7" walkdir = "2.3" -itertools = "0.11" +itertools = "0.14" path_macro = "1.0" shell-words = "1.1" anyhow = "1.0" xshell = "0.2.6" rustc_version = "0.4" dunce = "1.0.4" -directories = "5" +directories = "6" serde = "1" serde_json = "1" serde_derive = "1" diff --git a/src/tools/miri/miri-script/src/commands.rs b/src/tools/miri/miri-script/src/commands.rs index 17a7c06b52535..1c9750e2cbdc1 100644 --- a/src/tools/miri/miri-script/src/commands.rs +++ b/src/tools/miri/miri-script/src/commands.rs @@ -1,7 +1,8 @@ use std::collections::HashMap; use std::ffi::{OsStr, OsString}; -use std::fs::File; -use std::io::{BufReader, BufWriter, Write}; +use std::fmt::Write as _; +use std::fs::{self, File}; +use std::io::{self, BufRead, BufReader, BufWriter, Write as _}; use std::ops::Not; use std::path::PathBuf; use std::time::Duration; @@ -169,7 +170,8 @@ impl Command { | Command::Toolchain { .. } | Command::Bench { .. } | Command::RustcPull { .. } - | Command::RustcPush { .. } => {} + | Command::RustcPush { .. } + | Command::Squash => {} } // Then run the actual command. match self { @@ -188,6 +190,7 @@ impl Command { Command::Toolchain { flags } => Self::toolchain(flags), Command::RustcPull { commit } => Self::rustc_pull(commit.clone()), Command::RustcPush { github_user, branch } => Self::rustc_push(github_user, branch), + Command::Squash => Self::squash(), } } @@ -383,6 +386,72 @@ impl Command { Ok(()) } + fn squash() -> Result<()> { + let sh = Shell::new()?; + sh.change_dir(miri_dir()?); + // Figure out base wrt latest upstream master. + // (We can't trust any of the local ones, they can all be outdated.) + let origin_master = { + cmd!(sh, "git fetch https://github.com/rust-lang/miri/") + .quiet() + .ignore_stdout() + .ignore_stderr() + .run()?; + cmd!(sh, "git rev-parse FETCH_HEAD").read()? + }; + let base = cmd!(sh, "git merge-base HEAD {origin_master}").read()?; + // Rebase onto that, setting ourselves as the sequence editor so that we can edit the sequence programmatically. + // We want to forward the host stdin so apparently we cannot use `cmd!`. + let mut cmd = process::Command::new("git"); + cmd.arg("rebase").arg(&base).arg("--interactive"); + cmd.env("GIT_SEQUENCE_EDITOR", env::current_exe()?); + cmd.env("MIRI_SCRIPT_IS_GIT_SEQUENCE_EDITOR", "1"); + cmd.current_dir(sh.current_dir()); + let result = cmd.status()?; + if !result.success() { + bail!("`git rebase` failed"); + } + Ok(()) + } + + pub fn squash_sequence_editor() -> Result<()> { + let sequence_file = env::args().nth(1).expect("git should pass us a filename"); + if sequence_file == "fmt" { + // This is probably us being called as a git hook as part of the rebase. Let's just + // ignore this. Sadly `git rebase` does not have a flag to skip running hooks. + return Ok(()); + } + // Read the provided sequence and adjust it. + let rebase_sequence = { + let mut rebase_sequence = String::new(); + let file = fs::File::open(&sequence_file).with_context(|| { + format!("failed to read rebase sequence from {sequence_file:?}") + })?; + let file = io::BufReader::new(file); + for line in file.lines() { + let line = line?; + // The first line is left unchanged. + if rebase_sequence.is_empty() { + writeln!(rebase_sequence, "{line}").unwrap(); + continue; + } + // If this is a "pick" like, make it "squash". + if let Some(rest) = line.strip_prefix("pick ") { + writeln!(rebase_sequence, "squash {rest}").unwrap(); + continue; + } + // We've reached the end of the relevant part of the sequence, and we can stop. + break; + } + rebase_sequence + }; + // Write out the adjusted sequence. + fs::write(&sequence_file, rebase_sequence).with_context(|| { + format!("failed to write adjusted rebase sequence to {sequence_file:?}") + })?; + Ok(()) + } + fn bench( target: Option, no_install: bool, diff --git a/src/tools/miri/miri-script/src/main.rs b/src/tools/miri/miri-script/src/main.rs index 279bdf8cc3f48..6aab2f79bd78c 100644 --- a/src/tools/miri/miri-script/src/main.rs +++ b/src/tools/miri/miri-script/src/main.rs @@ -133,6 +133,8 @@ pub enum Command { #[arg(default_value = "miri-sync")] branch: String, }, + /// Squash the commits of the current feature branch into one. + Squash, } impl Command { @@ -154,7 +156,7 @@ impl Command { flags.extend(remainder); Ok(()) } - Self::Bench { .. } | Self::RustcPull { .. } | Self::RustcPush { .. } => + Self::Bench { .. } | Self::RustcPull { .. } | Self::RustcPush { .. } | Self::Squash => bail!("unexpected \"--\" found in arguments"), } } @@ -170,6 +172,11 @@ pub struct Cli { } fn main() -> Result<()> { + // If we are invoked as the git sequence editor, jump to that logic. + if !std::env::var_os("MIRI_SCRIPT_IS_GIT_SEQUENCE_EDITOR").unwrap_or_default().is_empty() { + return Command::squash_sequence_editor(); + } + // Split the arguments into the part before the `--` and the part after. // The `--` itself ends up in the second part. let miri_args: Vec<_> = std::env::args().take_while(|x| *x != "--").collect(); diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index cf36b6fd03854..97bc826b57a40 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -4ac032f857b46037b55c1fc0fa702450aad37f43 +2ad5f8607d0e192b60b130e5cc416b477b351c18 diff --git a/src/tools/miri/src/alloc_addresses/mod.rs b/src/tools/miri/src/alloc_addresses/mod.rs index c263e86c08266..dd389d97cdce0 100644 --- a/src/tools/miri/src/alloc_addresses/mod.rs +++ b/src/tools/miri/src/alloc_addresses/mod.rs @@ -107,47 +107,6 @@ fn align_addr(addr: u64, align: u64) -> u64 { impl<'tcx> EvalContextExtPriv<'tcx> for crate::MiriInterpCx<'tcx> {} trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { - // Returns the exposed `AllocId` that corresponds to the specified addr, - // or `None` if the addr is out of bounds - fn alloc_id_from_addr(&self, addr: u64, size: i64) -> Option { - let this = self.eval_context_ref(); - let global_state = this.machine.alloc_addresses.borrow(); - assert!(global_state.provenance_mode != ProvenanceMode::Strict); - - // We always search the allocation to the right of this address. So if the size is structly - // negative, we have to search for `addr-1` instead. - let addr = if size >= 0 { addr } else { addr.saturating_sub(1) }; - let pos = global_state.int_to_ptr_map.binary_search_by_key(&addr, |(addr, _)| *addr); - - // Determine the in-bounds provenance for this pointer. - let alloc_id = match pos { - Ok(pos) => Some(global_state.int_to_ptr_map[pos].1), - Err(0) => None, - Err(pos) => { - // This is the largest of the addresses smaller than `int`, - // i.e. the greatest lower bound (glb) - let (glb, alloc_id) = global_state.int_to_ptr_map[pos - 1]; - // This never overflows because `addr >= glb` - let offset = addr - glb; - // We require this to be strict in-bounds of the allocation. This arm is only - // entered for addresses that are not the base address, so even zero-sized - // allocations will get recognized at their base address -- but all other - // allocations will *not* be recognized at their "end" address. - let size = this.get_alloc_info(alloc_id).size; - if offset < size.bytes() { Some(alloc_id) } else { None } - } - }?; - - // We only use this provenance if it has been exposed. - if global_state.exposed.contains(&alloc_id) { - // This must still be live, since we remove allocations from `int_to_ptr_map` when they get freed. - debug_assert!(this.is_alloc_live(alloc_id)); - Some(alloc_id) - } else { - None - } - } - fn addr_from_alloc_id_uncached( &self, global_state: &mut GlobalStateInner, @@ -155,8 +114,16 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { memory_kind: MemoryKind, ) -> InterpResult<'tcx, u64> { let this = self.eval_context_ref(); - let mut rng = this.machine.rng.borrow_mut(); let info = this.get_alloc_info(alloc_id); + + // Miri's address assignment leaks state across thread boundaries, which is incompatible + // with GenMC execution. So we instead let GenMC assign addresses to allocations. + if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { + let addr = genmc_ctx.handle_alloc(&this.machine, info.size, info.align, memory_kind)?; + return interp_ok(addr); + } + + let mut rng = this.machine.rng.borrow_mut(); // This is either called immediately after allocation (and then cached), or when // adjusting `tcx` pointers (which never get freed). So assert that we are looking // at a live allocation. This also ensures that we never re-assign an address to an @@ -238,15 +205,74 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { if global_state.next_base_addr > this.target_usize_max() { throw_exhaust!(AddressSpaceFull); } + // If we filled up more than half the address space, start aggressively reusing + // addresses to avoid running out. + if global_state.next_base_addr > u64::try_from(this.target_isize_max()).unwrap() { + global_state.reuse.address_space_shortage(); + } interp_ok(base_addr) } } +} +impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} +pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { + // Returns the `AllocId` that corresponds to the specified addr, + // or `None` if the addr is out of bounds. + // Setting `only_exposed_allocations` selects whether only exposed allocations are considered. + fn alloc_id_from_addr( + &self, + addr: u64, + size: i64, + only_exposed_allocations: bool, + ) -> Option { + let this = self.eval_context_ref(); + let global_state = this.machine.alloc_addresses.borrow(); + assert!(global_state.provenance_mode != ProvenanceMode::Strict); + + // We always search the allocation to the right of this address. So if the size is strictly + // negative, we have to search for `addr-1` instead. + let addr = if size >= 0 { addr } else { addr.saturating_sub(1) }; + let pos = global_state.int_to_ptr_map.binary_search_by_key(&addr, |(addr, _)| *addr); + + // Determine the in-bounds provenance for this pointer. + let alloc_id = match pos { + Ok(pos) => Some(global_state.int_to_ptr_map[pos].1), + Err(0) => None, + Err(pos) => { + // This is the largest of the addresses smaller than `int`, + // i.e. the greatest lower bound (glb) + let (glb, alloc_id) = global_state.int_to_ptr_map[pos - 1]; + // This never overflows because `addr >= glb` + let offset = addr - glb; + // We require this to be strict in-bounds of the allocation. This arm is only + // entered for addresses that are not the base address, so even zero-sized + // allocations will get recognized at their base address -- but all other + // allocations will *not* be recognized at their "end" address. + let size = this.get_alloc_info(alloc_id).size; + if offset < size.bytes() { Some(alloc_id) } else { None } + } + }?; + + // We only use this provenance if it has been exposed, or if the caller requested also non-exposed allocations + if !only_exposed_allocations || global_state.exposed.contains(&alloc_id) { + // This must still be live, since we remove allocations from `int_to_ptr_map` when they get freed. + debug_assert!(this.is_alloc_live(alloc_id)); + Some(alloc_id) + } else { + None + } + } + + /// Returns the base address of an allocation, or an error if no base address could be found + /// + /// # Panics + /// If `memory_kind = None` and the `alloc_id` is not cached, meaning that the first call to this function per `alloc_id` must get the `memory_kind`. fn addr_from_alloc_id( &self, alloc_id: AllocId, - memory_kind: MemoryKind, + memory_kind: Option, ) -> InterpResult<'tcx, u64> { let this = self.eval_context_ref(); let mut global_state = this.machine.alloc_addresses.borrow_mut(); @@ -256,8 +282,10 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { Some(&addr) => interp_ok(addr), None => { // First time we're looking for the absolute address of this allocation. + let memory_kind = + memory_kind.expect("memory_kind is required since alloc_id is not cached"); let base_addr = - self.addr_from_alloc_id_uncached(global_state, alloc_id, memory_kind)?; + this.addr_from_alloc_id_uncached(global_state, alloc_id, memory_kind)?; trace!("Assigning base address {:#x} to allocation {:?}", base_addr, alloc_id); // Store address in cache. @@ -283,10 +311,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } } } -} -impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} -pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { fn expose_provenance(&self, provenance: Provenance) -> InterpResult<'tcx> { let this = self.eval_context_ref(); let mut global_state = this.machine.alloc_addresses.borrow_mut(); @@ -365,7 +390,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let alloc_id = prov.alloc_id(); // Get a pointer to the beginning of this allocation. - let base_addr = this.addr_from_alloc_id(alloc_id, kind)?; + let base_addr = this.addr_from_alloc_id(alloc_id, Some(kind))?; let base_ptr = interpret::Pointer::new( Provenance::Concrete { alloc_id, tag }, Size::from_bytes(base_addr), @@ -388,7 +413,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // In native lib mode, MiriAllocBytes for global allocations are handled via `prepared_alloc_bytes`. // This additional call ensures that some `MiriAllocBytes` are always prepared, just in case // this function gets called before the first time `addr_from_alloc_id` gets called. - this.addr_from_alloc_id(id, MiriMemoryKind::Global.into())?; + this.addr_from_alloc_id(id, Some(MiriMemoryKind::Global.into()))?; // The memory we need here will have already been allocated during an earlier call to // `addr_from_alloc_id` for this allocation. So don't create a new `MiriAllocBytes` here, instead // fetch the previously prepared bytes from `prepared_alloc_bytes`. @@ -423,7 +448,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { alloc_id } else { // A wildcard pointer. - this.alloc_id_from_addr(addr.bytes(), size)? + let only_exposed_allocations = true; + this.alloc_id_from_addr(addr.bytes(), size, only_exposed_allocations)? }; // This cannot fail: since we already have a pointer with that provenance, adjust_alloc_root_pointer @@ -477,7 +503,7 @@ impl<'tcx> MiriMachine<'tcx> { // Also remember this address for future reuse. let thread = self.threads.active_thread(); global_state.reuse.add_addr(rng, addr, size, align, kind, thread, || { - if let Some(data_race) = &self.data_race { + if let Some(data_race) = self.data_race.as_vclocks_ref() { data_race.release_clock(&self.threads, |clock| clock.clone()) } else { VClock::default() diff --git a/src/tools/miri/src/alloc_addresses/reuse_pool.rs b/src/tools/miri/src/alloc_addresses/reuse_pool.rs index c0d24a9fbbcf9..29d4f2bb7b0f3 100644 --- a/src/tools/miri/src/alloc_addresses/reuse_pool.rs +++ b/src/tools/miri/src/alloc_addresses/reuse_pool.rs @@ -20,7 +20,7 @@ pub struct ReusePool { /// allocations as address-size pairs, the list must be sorted by the size and then the thread ID. /// /// Each of these maps has at most MAX_POOL_SIZE elements, and since alignment is limited to - /// less than 64 different possible value, that bounds the overall size of the pool. + /// less than 64 different possible values, that bounds the overall size of the pool. /// /// We also store the ID and the data-race clock of the thread that donated this pool element, /// to ensure synchronization with the thread that picks up this address. @@ -36,6 +36,15 @@ impl ReusePool { } } + /// Call this when we are using up a lot of the address space: if memory reuse is enabled at all, + /// this will bump the intra-thread reuse rate to 100% so that we can keep running this program as + /// long as possible. + pub fn address_space_shortage(&mut self) { + if self.address_reuse_rate > 0.0 { + self.address_reuse_rate = 1.0; + } + } + fn subpool(&mut self, align: Align) -> &mut Vec<(u64, Size, ThreadId, VClock)> { let pool_idx: usize = align.bytes().trailing_zeros().try_into().unwrap(); if self.pool.len() <= pool_idx { @@ -55,9 +64,7 @@ impl ReusePool { clock: impl FnOnce() -> VClock, ) { // Let's see if we even want to remember this address. - // We don't remember stack addresses: there's a lot of them (so the perf impact is big), - // and we only want to reuse stack slots within the same thread or else we'll add a lot of - // undesired synchronization. + // We don't remember stack addresses since there's so many of them (so the perf impact is big). if kind == MemoryKind::Stack || !rng.random_bool(self.address_reuse_rate) { return; } diff --git a/src/tools/miri/src/bin/miri.rs b/src/tools/miri/src/bin/miri.rs index 56ee96502b3e7..69aa035fdc3df 100644 --- a/src/tools/miri/src/bin/miri.rs +++ b/src/tools/miri/src/bin/miri.rs @@ -28,13 +28,14 @@ use std::env::{self, VarError}; use std::num::NonZero; use std::ops::Range; use std::path::PathBuf; +use std::rc::Rc; use std::str::FromStr; use std::sync::atomic::{AtomicI32, AtomicU32, Ordering}; use std::sync::{Arc, Once}; use miri::{ - BacktraceStyle, BorrowTrackerMethod, MiriConfig, MiriEntryFnType, ProvenanceMode, RetagFields, - ValidationMode, + BacktraceStyle, BorrowTrackerMethod, GenmcConfig, GenmcCtx, MiriConfig, MiriEntryFnType, + ProvenanceMode, RetagFields, ValidationMode, }; use rustc_abi::ExternAbi; use rustc_data_structures::sync; @@ -60,6 +61,8 @@ use tracing::debug; struct MiriCompilerCalls { miri_config: Option, many_seeds: Option, + /// Settings for using GenMC with Miri. + genmc_config: Option, } struct ManySeedsConfig { @@ -68,8 +71,12 @@ struct ManySeedsConfig { } impl MiriCompilerCalls { - fn new(miri_config: MiriConfig, many_seeds: Option) -> Self { - Self { miri_config: Some(miri_config), many_seeds } + fn new( + miri_config: MiriConfig, + many_seeds: Option, + genmc_config: Option, + ) -> Self { + Self { miri_config: Some(miri_config), many_seeds, genmc_config } } } @@ -106,7 +113,7 @@ fn entry_fn(tcx: TyCtxt<'_>) -> (DefId, MiriEntryFnType) { } else { tcx.dcx().fatal( "`miri_start` must have the following signature:\n\ - fn miri_start(argc: isize, argv: *const *const u8) -> isize", + fn miri_start(argc: isize, argv: *const *const u8) -> isize", ); } } else { @@ -115,7 +122,7 @@ fn entry_fn(tcx: TyCtxt<'_>) -> (DefId, MiriEntryFnType) { Alternatively, you can export a `miri_start` function:\n\ \n\ #[cfg(miri)]\n\ - #[no_mangle]\n\ + #[unsafe(no_mangle)]\n\ fn miri_start(argc: isize, argv: *const *const u8) -> isize {\ \n // Call the actual start function that your project implements, based on your target's conventions.\n\ }" @@ -179,16 +186,28 @@ impl rustc_driver::Callbacks for MiriCompilerCalls { optimizations is usually marginal at best."); } + if let Some(genmc_config) = &self.genmc_config { + let _genmc_ctx = Rc::new(GenmcCtx::new(&config, genmc_config)); + + todo!("GenMC mode not yet implemented"); + }; + if let Some(many_seeds) = self.many_seeds.take() { assert!(config.seed.is_none()); let exit_code = sync::IntoDynSyncSend(AtomicI32::new(rustc_driver::EXIT_SUCCESS)); let num_failed = sync::IntoDynSyncSend(AtomicU32::new(0)); sync::par_for_each_in(many_seeds.seeds.clone(), |seed| { let mut config = config.clone(); - config.seed = Some(seed.into()); + config.seed = Some((*seed).into()); eprintln!("Trying seed: {seed}"); - let return_code = miri::eval_entry(tcx, entry_def_id, entry_type, config) - .unwrap_or(rustc_driver::EXIT_FAILURE); + let return_code = miri::eval_entry( + tcx, + entry_def_id, + entry_type, + &config, + /* genmc_ctx */ None, + ) + .unwrap_or(rustc_driver::EXIT_FAILURE); if return_code != rustc_driver::EXIT_SUCCESS { eprintln!("FAILING SEED: {seed}"); if !many_seeds.keep_going { @@ -206,11 +225,12 @@ impl rustc_driver::Callbacks for MiriCompilerCalls { } std::process::exit(exit_code.0.into_inner()); } else { - let return_code = miri::eval_entry(tcx, entry_def_id, entry_type, config) + let return_code = miri::eval_entry(tcx, entry_def_id, entry_type, &config, None) .unwrap_or_else(|| { tcx.dcx().abort_if_errors(); rustc_driver::EXIT_FAILURE }); + std::process::exit(return_code); } @@ -506,6 +526,7 @@ fn main() { let mut many_seeds_keep_going = false; let mut miri_config = MiriConfig::default(); miri_config.env = env_snapshot; + let mut genmc_config = None; let mut rustc_args = vec![]; let mut after_dashdash = false; @@ -533,8 +554,6 @@ fn main() { } else if arg == "-Zmiri-tree-borrows" { miri_config.borrow_tracker = Some(BorrowTrackerMethod::TreeBorrows); miri_config.provenance_mode = ProvenanceMode::Strict; - } else if arg == "-Zmiri-unique-is-unique" { - miri_config.unique_is_unique = true; } else if arg == "-Zmiri-disable-data-race-detector" { miri_config.data_race_detector = false; miri_config.weak_memory_emulation = false; @@ -573,6 +592,13 @@ fn main() { miri_config.mute_stdout_stderr = true; } else if arg == "-Zmiri-retag-fields" { miri_config.retag_fields = RetagFields::Yes; + } else if arg == "-Zmiri-fixed-schedule" { + miri_config.fixed_scheduling = true; + } else if arg == "-Zmiri-deterministic-concurrency" { + miri_config.fixed_scheduling = true; + miri_config.address_reuse_cross_thread_rate = 0.0; + miri_config.cmpxchg_weak_failure_rate = 0.0; + miri_config.weak_memory_emulation = false; } else if let Some(retag_fields) = arg.strip_prefix("-Zmiri-retag-fields=") { miri_config.retag_fields = match retag_fields { "all" => RetagFields::Yes, @@ -596,6 +622,10 @@ fn main() { many_seeds = Some(0..64); } else if arg == "-Zmiri-many-seeds-keep-going" { many_seeds_keep_going = true; + } else if let Some(trimmed_arg) = arg.strip_prefix("-Zmiri-genmc") { + // FIXME(GenMC): Currently, GenMC mode is incompatible with aliasing model checking. + miri_config.borrow_tracker = None; + GenmcConfig::parse_arg(&mut genmc_config, trimmed_arg); } else if let Some(param) = arg.strip_prefix("-Zmiri-env-forward=") { miri_config.forwarded_env_vars.push(param.to_owned()); } else if let Some(param) = arg.strip_prefix("-Zmiri-env-set=") { @@ -690,14 +720,6 @@ fn main() { rustc_args.push(arg); } } - // `-Zmiri-unique-is-unique` should only be used with `-Zmiri-tree-borrows` - if miri_config.unique_is_unique - && !matches!(miri_config.borrow_tracker, Some(BorrowTrackerMethod::TreeBorrows)) - { - show_error!( - "-Zmiri-unique-is-unique only has an effect when -Zmiri-tree-borrows is also used" - ); - } // Tree Borrows implies strict provenance, and is not compatible with native calls. if matches!(miri_config.borrow_tracker, Some(BorrowTrackerMethod::TreeBorrows)) { if miri_config.provenance_mode != ProvenanceMode::Strict { @@ -727,7 +749,24 @@ fn main() { let many_seeds = many_seeds.map(|seeds| ManySeedsConfig { seeds, keep_going: many_seeds_keep_going }); + // Validate settings for data race detection and GenMC mode. + assert_eq!(genmc_config.is_some(), miri_config.genmc_mode); + if genmc_config.is_some() { + if !miri_config.data_race_detector { + show_error!("Cannot disable data race detection in GenMC mode (currently)"); + } else if !miri_config.weak_memory_emulation { + show_error!("Cannot disable weak memory emulation in GenMC mode"); + } + } else if miri_config.weak_memory_emulation && !miri_config.data_race_detector { + show_error!( + "Weak memory emulation cannot be enabled when the data race detector is disabled" + ); + }; + debug!("rustc arguments: {:?}", rustc_args); debug!("crate arguments: {:?}", miri_config.args); - run_compiler_and_exit(&rustc_args, &mut MiriCompilerCalls::new(miri_config, many_seeds)) + run_compiler_and_exit( + &rustc_args, + &mut MiriCompilerCalls::new(miri_config, many_seeds, genmc_config), + ) } diff --git a/src/tools/miri/src/borrow_tracker/mod.rs b/src/tools/miri/src/borrow_tracker/mod.rs index 9808102f4ba66..b66c561d2b8ad 100644 --- a/src/tools/miri/src/borrow_tracker/mod.rs +++ b/src/tools/miri/src/borrow_tracker/mod.rs @@ -102,8 +102,6 @@ pub struct GlobalStateInner { tracked_pointer_tags: FxHashSet, /// Whether to recurse into datatypes when searching for pointers to retag. retag_fields: RetagFields, - /// Whether `core::ptr::Unique` gets special (`Box`-like) handling. - unique_is_unique: bool, } impl VisitProvenance for GlobalStateInner { @@ -164,7 +162,6 @@ impl GlobalStateInner { borrow_tracker_method: BorrowTrackerMethod, tracked_pointer_tags: FxHashSet, retag_fields: RetagFields, - unique_is_unique: bool, ) -> Self { GlobalStateInner { borrow_tracker_method, @@ -173,7 +170,6 @@ impl GlobalStateInner { protected_tags: FxHashMap::default(), tracked_pointer_tags, retag_fields, - unique_is_unique, } } @@ -239,7 +235,6 @@ impl BorrowTrackerMethod { self, config.tracked_pointer_tags.clone(), config.retag_fields, - config.unique_is_unique, )) } } diff --git a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs index 18a5a0612bb06..bc57ba697b382 100644 --- a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs +++ b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs @@ -594,7 +594,7 @@ trait EvalContextPrivExt<'tcx, 'ecx>: crate::MiriInterpCxExt<'tcx> { ) -> InterpResult<'tcx, Option> { let this = self.eval_context_mut(); // Ensure we bail out if the pointer goes out-of-bounds (see miri#1050). - this.check_ptr_access(place.ptr(), size, CheckInAllocMsg::InboundsTest)?; + this.check_ptr_access(place.ptr(), size, CheckInAllocMsg::Dereferenceable)?; // It is crucial that this gets called on all code paths, to ensure we track tag creation. let log_creation = |this: &MiriInterpCx<'tcx>, @@ -740,7 +740,7 @@ trait EvalContextPrivExt<'tcx, 'ecx>: crate::MiriInterpCxExt<'tcx> { if let Some(access) = access { assert_eq!(access, AccessKind::Write); // Make sure the data race model also knows about this. - if let Some(data_race) = alloc_extra.data_race.as_mut() { + if let Some(data_race) = alloc_extra.data_race.as_vclocks_mut() { data_race.write( alloc_id, range, @@ -789,7 +789,7 @@ trait EvalContextPrivExt<'tcx, 'ecx>: crate::MiriInterpCxExt<'tcx> { if let Some(access) = access { assert_eq!(access, AccessKind::Read); // Make sure the data race model also knows about this. - if let Some(data_race) = alloc_extra.data_race.as_ref() { + if let Some(data_race) = alloc_extra.data_race.as_vclocks_ref() { data_race.read( alloc_id, range, diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs index f39a606513d5c..f3e32e75f2f2c 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs @@ -2,7 +2,6 @@ use rustc_abi::{BackendRepr, Size}; use rustc_middle::mir::{Mutability, RetagKind}; use rustc_middle::ty::layout::HasTypingEnv; use rustc_middle::ty::{self, Ty}; -use rustc_span::def_id::DefId; use crate::borrow_tracker::{GlobalState, GlobalStateInner, ProtectorKind}; use crate::concurrency::data_race::NaReadType; @@ -115,14 +114,15 @@ impl<'tcx> Tree { /// Policy for a new borrow. #[derive(Debug, Clone, Copy)] struct NewPermission { - /// Optionally ignore the actual size to do a zero-size reborrow. - /// If this is set then `dereferenceable` is not enforced. - zero_size: bool, /// Which permission should the pointer start with. initial_state: Permission, /// Whether this pointer is part of the arguments of a function call. /// `protector` is `Some(_)` for all pointers marked `noalias`. protector: Option, + /// Whether a read should be performed on a retag. This should be `false` + /// for `Cell` because this could cause data races when using thread-safe + /// data types like `Mutex`. + initial_read: bool, } impl<'tcx> NewPermission { @@ -141,18 +141,19 @@ impl<'tcx> NewPermission { // To eliminate the case of Protected Reserved IM we override interior mutability // in the case of a protected reference: protected references are always considered // "freeze" in their reservation phase. - let initial_state = match mutability { - Mutability::Mut if ty_is_unpin => Permission::new_reserved(ty_is_freeze, is_protected), - Mutability::Not if ty_is_freeze => Permission::new_frozen(), + let (initial_state, initial_read) = match mutability { + Mutability::Mut if ty_is_unpin => + (Permission::new_reserved(ty_is_freeze, is_protected), true), + Mutability::Not if ty_is_freeze => (Permission::new_frozen(), true), + Mutability::Not if !ty_is_freeze => (Permission::new_cell(), false), // Raw pointers never enter this function so they are not handled. // However raw pointers are not the only pointers that take the parent - // tag, this also happens for `!Unpin` `&mut`s and interior mutable - // `&`s, which are excluded above. + // tag, this also happens for `!Unpin` `&mut`s, which are excluded above. _ => return None, }; let protector = is_protected.then_some(ProtectorKind::StrongProtector); - Some(Self { zero_size: false, initial_state, protector }) + Some(Self { initial_state, protector, initial_read }) } /// Compute permission for `Box`-like type (`Box` always, and also `Unique` if enabled). @@ -162,19 +163,18 @@ impl<'tcx> NewPermission { ty: Ty<'tcx>, kind: RetagKind, cx: &crate::MiriInterpCx<'tcx>, - zero_size: bool, ) -> Option { let pointee = ty.builtin_deref(true).unwrap(); pointee.is_unpin(*cx.tcx, cx.typing_env()).then_some(()).map(|()| { // Regular `Unpin` box, give it `noalias` but only a weak protector // because it is valid to deallocate it within the function. - let ty_is_freeze = ty.is_freeze(*cx.tcx, cx.typing_env()); + let ty_is_freeze = pointee.is_freeze(*cx.tcx, cx.typing_env()); let protected = kind == RetagKind::FnEntry; let initial_state = Permission::new_reserved(ty_is_freeze, protected); Self { - zero_size, initial_state, protector: protected.then_some(ProtectorKind::WeakProtector), + initial_read: true, } }) } @@ -197,7 +197,7 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Make sure the new permission makes sense as the initial permission of a fresh tag. assert!(new_perm.initial_state.is_initial()); // Ensure we bail out if the pointer goes out-of-bounds (see miri#1050). - this.check_ptr_access(place.ptr(), ptr_size, CheckInAllocMsg::InboundsTest)?; + this.check_ptr_access(place.ptr(), ptr_size, CheckInAllocMsg::Dereferenceable)?; // It is crucial that this gets called on all code paths, to ensure we track tag creation. let log_creation = |this: &MiriInterpCx<'tcx>, @@ -289,13 +289,15 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let mut tree_borrows = alloc_extra.borrow_tracker_tb().borrow_mut(); // All reborrows incur a (possibly zero-sized) read access to the parent - tree_borrows.perform_access( - orig_tag, - Some((range, AccessKind::Read, diagnostics::AccessCause::Reborrow)), - this.machine.borrow_tracker.as_ref().unwrap(), - alloc_id, - this.machine.current_span(), - )?; + if new_perm.initial_read { + tree_borrows.perform_access( + orig_tag, + Some((range, AccessKind::Read, diagnostics::AccessCause::Reborrow)), + this.machine.borrow_tracker.as_ref().unwrap(), + alloc_id, + this.machine.current_span(), + )?; + } // Record the parent-child pair in the tree. tree_borrows.new_child( orig_tag, @@ -308,8 +310,8 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> { drop(tree_borrows); // Also inform the data race model (but only if any bytes are actually affected). - if range.size.bytes() > 0 { - if let Some(data_race) = alloc_extra.data_race.as_ref() { + if range.size.bytes() > 0 && new_perm.initial_read { + if let Some(data_race) = alloc_extra.data_race.as_vclocks_ref() { data_race.read( alloc_id, range, @@ -333,15 +335,12 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Determine the size of the reborrow. // For most types this is the entire size of the place, however // - when `extern type` is involved we use the size of the known prefix, - // - if the pointer is not reborrowed (raw pointer) or if `zero_size` is set - // then we override the size to do a zero-length reborrow. - let reborrow_size = match new_perm { - NewPermission { zero_size: false, .. } => - this.size_and_align_of_mplace(place)? - .map(|(size, _)| size) - .unwrap_or(place.layout.size), - _ => Size::from_bytes(0), - }; + // - if the pointer is not reborrowed (raw pointer) then we override the size + // to do a zero-length reborrow. + let reborrow_size = this + .size_and_align_of_mplace(place)? + .map(|(size, _)| size) + .unwrap_or(place.layout.size); trace!("Creating new permission: {:?} with size {:?}", new_perm, reborrow_size); // This new tag is not guaranteed to actually be used. @@ -405,9 +404,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let this = self.eval_context_mut(); let options = this.machine.borrow_tracker.as_mut().unwrap().get_mut(); let retag_fields = options.retag_fields; - let unique_did = - options.unique_is_unique.then(|| this.tcx.lang_items().ptr_unique()).flatten(); - let mut visitor = RetagVisitor { ecx: this, kind, retag_fields, unique_did }; + let mut visitor = RetagVisitor { ecx: this, kind, retag_fields }; return visitor.visit_value(place); // The actual visitor. @@ -415,7 +412,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ecx: &'ecx mut MiriInterpCx<'tcx>, kind: RetagKind, retag_fields: RetagFields, - unique_did: Option, } impl<'ecx, 'tcx> RetagVisitor<'ecx, 'tcx> { #[inline(always)] // yes this helps in our benchmarks @@ -446,12 +442,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { fn visit_box(&mut self, box_ty: Ty<'tcx>, place: &PlaceTy<'tcx>) -> InterpResult<'tcx> { // Only boxes for the global allocator get any special treatment. if box_ty.is_box_global(*self.ecx.tcx) { - let new_perm = NewPermission::from_unique_ty( - place.layout.ty, - self.kind, - self.ecx, - /* zero_size */ false, - ); + let new_perm = + NewPermission::from_unique_ty(place.layout.ty, self.kind, self.ecx); self.retag_ptr_inplace(place, new_perm)?; } interp_ok(()) @@ -485,16 +477,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // even if field retagging is not enabled. *shrug*) self.walk_value(place)?; } - ty::Adt(adt, _) if self.unique_did == Some(adt.did()) => { - let place = inner_ptr_of_unique(self.ecx, place)?; - let new_perm = NewPermission::from_unique_ty( - place.layout.ty, - self.kind, - self.ecx, - /* zero_size */ true, - ); - self.retag_ptr_inplace(&place, new_perm)?; - } _ => { // Not a reference/pointer/box. Only recurse if configured appropriately. let recurse = match self.retag_fields { @@ -533,8 +515,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Retag it. With protection! That is the entire point. let new_perm = NewPermission { initial_state: Permission::new_reserved(ty_is_freeze, /* protected */ true), - zero_size: false, protector: Some(ProtectorKind::StrongProtector), + initial_read: true, }; this.tb_retag_place(place, new_perm) } @@ -594,27 +576,3 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { tree_borrows.give_pointer_debug_name(tag, nth_parent, name) } } - -/// Takes a place for a `Unique` and turns it into a place with the inner raw pointer. -/// I.e. input is what you get from the visitor upon encountering an `adt` that is `Unique`, -/// and output can be used by `retag_ptr_inplace`. -fn inner_ptr_of_unique<'tcx>( - ecx: &MiriInterpCx<'tcx>, - place: &PlaceTy<'tcx>, -) -> InterpResult<'tcx, PlaceTy<'tcx>> { - // Follows the same layout as `interpret/visitor.rs:walk_value` for `Box` in - // `rustc_const_eval`, just with one fewer layer. - // Here we have a `Unique(NonNull(*mut), PhantomData)` - assert_eq!(place.layout.fields.count(), 2, "Unique must have exactly 2 fields"); - let (nonnull, phantom) = (ecx.project_field(place, 0)?, ecx.project_field(place, 1)?); - assert!( - phantom.layout.ty.ty_adt_def().is_some_and(|adt| adt.is_phantom_data()), - "2nd field of `Unique` should be `PhantomData` but is `{:?}`", - phantom.layout.ty, - ); - // Now down to `NonNull(*mut)` - assert_eq!(nonnull.layout.fields.count(), 1, "NonNull must have exactly 1 field"); - let ptr = ecx.project_field(&nonnull, 0)?; - // Finally a plain `*mut` - interp_ok(ptr) -} diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs index 5c12ce39d10da..087f6fc3f24b0 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs @@ -8,6 +8,10 @@ use crate::borrow_tracker::tree_borrows::tree::AccessRelatedness; /// The activation states of a pointer. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum PermissionPriv { + /// represents: a shared reference to interior mutable data. + /// allows: all foreign and child accesses; + /// rejects: nothing + Cell, /// represents: a local mutable reference that has not yet been written to; /// allows: child reads, foreign reads; /// affected by: child writes (becomes Active), @@ -60,6 +64,14 @@ impl PartialOrd for PermissionPriv { use Ordering::*; Some(match (self, other) { (a, b) if a == b => Equal, + // Versions of `Reserved` with different interior mutability are incomparable with each + // other. + (ReservedIM, ReservedFrz { .. }) + | (ReservedFrz { .. }, ReservedIM) + // `Cell` is not comparable with any other permission + // since it never transitions to any other state and we + // can never get to `Cell` from another state. + | (Cell, _) | (_, Cell) => return None, (Disabled, _) => Greater, (_, Disabled) => Less, (Frozen, _) => Greater, @@ -71,9 +83,6 @@ impl PartialOrd for PermissionPriv { // `bool` is ordered such that `false <= true`, so this works as intended. c1.cmp(c2) } - // Versions of `Reserved` with different interior mutability are incomparable with each - // other. - (ReservedIM, ReservedFrz { .. }) | (ReservedFrz { .. }, ReservedIM) => return None, }) } } @@ -81,17 +90,22 @@ impl PartialOrd for PermissionPriv { impl PermissionPriv { /// Check if `self` can be the initial state of a pointer. fn is_initial(&self) -> bool { - matches!(self, ReservedFrz { conflicted: false } | Frozen | ReservedIM) + matches!(self, ReservedFrz { conflicted: false } | Frozen | ReservedIM | Cell) } /// Reject `ReservedIM` that cannot exist in the presence of a protector. fn compatible_with_protector(&self) -> bool { - !matches!(self, ReservedIM) + // FIXME(TB-Cell): It is unclear what to do here. + // `Cell` will occur with a protector but won't provide the guarantees + // of noalias (it will fail the `protected_enforces_noalias` test). + !matches!(self, ReservedIM | Cell) } /// See `foreign_access_skipping.rs`. Computes the SIFA of a permission. fn strongest_idempotent_foreign_access(&self, prot: bool) -> IdempotentForeignAccess { match self { + // Cell survives any foreign access + Cell => IdempotentForeignAccess::Write, // A protected non-conflicted Reserved will become conflicted under a foreign read, // and is hence not idempotent under it. ReservedFrz { conflicted } if prot && !conflicted => IdempotentForeignAccess::None, @@ -124,7 +138,7 @@ mod transition { Disabled => return None, // The inner data `ty_is_freeze` of `Reserved` is always irrelevant for Read // accesses, since the data is not being mutated. Hence the `{ .. }`. - readable @ (ReservedFrz { .. } | ReservedIM | Active | Frozen) => readable, + readable @ (Cell | ReservedFrz { .. } | ReservedIM | Active | Frozen) => readable, }) } @@ -132,6 +146,8 @@ mod transition { /// is protected; invalidate `Active`. fn foreign_read(state: PermissionPriv, protected: bool) -> Option { Some(match state { + // Cell ignores foreign reads. + Cell => Cell, // Non-writeable states just ignore foreign reads. non_writeable @ (Frozen | Disabled) => non_writeable, // Writeable states are more tricky, and depend on whether things are protected. @@ -167,6 +183,8 @@ mod transition { /// write permissions, `Frozen` and `Disabled` cannot obtain such permissions and produce UB. fn child_write(state: PermissionPriv, protected: bool) -> Option { Some(match state { + // Cell ignores child writes. + Cell => Cell, // If the `conflicted` flag is set, then there was a foreign read during // the function call that is still ongoing (still `protected`), // this is UB (`noalias` violation). @@ -185,6 +203,8 @@ mod transition { // types receive a `ReservedFrz` instead of `ReservedIM` when retagged under a protector, // so the result of this function does indirectly depend on (past) protector status. Some(match state { + // Cell ignores foreign writes. + Cell => Cell, res @ ReservedIM => { // We can never create a `ReservedIM` under a protector, only `ReservedFrz`. assert!(!protected); @@ -242,6 +262,11 @@ impl Permission { self.inner == Frozen } + /// Check if `self` is the shared-reference-to-interior-mutable-data state of a pointer. + pub fn is_cell(&self) -> bool { + self.inner == Cell + } + /// Default initial permission of the root of a new tree at inbounds positions. /// Must *only* be used for the root, this is not in general an "initial" permission! pub fn new_active() -> Self { @@ -278,11 +303,27 @@ impl Permission { Self { inner: Disabled } } + /// Default initial permission of a shared reference to interior mutable data. + pub fn new_cell() -> Self { + Self { inner: Cell } + } + /// Reject `ReservedIM` that cannot exist in the presence of a protector. pub fn compatible_with_protector(&self) -> bool { self.inner.compatible_with_protector() } + /// What kind of access to perform before releasing the protector. + pub fn protector_end_access(&self) -> Option { + match self.inner { + // Do not do perform access if it is a `Cell`, as this + // can cause data races when using thread-safe data types. + Cell => None, + Active => Some(AccessKind::Write), + _ => Some(AccessKind::Read), + } + } + /// Apply the transition to the inner PermissionPriv. pub fn perform_access( kind: AccessKind, @@ -306,30 +347,32 @@ impl Permission { /// remove protected parents. pub fn can_be_replaced_by_child(self, child: Self) -> bool { match (self.inner, child.inner) { - // ReservedIM can be replaced by anything, as it allows all - // transitions. + // Cell allows all transitions. + (Cell, _) => true, + // Cell is the most permissive, nothing can be replaced by Cell. + // (ReservedIM, Cell) => true, + (_, Cell) => false, + // ReservedIM can be replaced by anything besides Cell. + // ReservedIM allows all transitions, but unlike Cell, a local write + // to ReservedIM transitions to Active, while it is a no-op for Cell. (ReservedIM, _) => true, + (_, ReservedIM) => false, // Reserved (as parent, where conflictedness does not matter) - // can be replaced by all but ReservedIM, - // since ReservedIM alone would survive foreign writes - (ReservedFrz { .. }, ReservedIM) => false, + // can be replaced by all but ReservedIM and Cell, + // since ReservedIM and Cell alone would survive foreign writes (ReservedFrz { .. }, _) => true, + (_, ReservedFrz { .. }) => false, // Active can not be replaced by something surviving - // foreign reads and then remaining writable. - (Active, ReservedIM) => false, - (Active, ReservedFrz { .. }) => false, + // foreign reads and then remaining writable (i.e., Reserved*). // Replacing a state by itself is always okay, even if the child state is protected. - (Active, Active) => true, // Active can be replaced by Frozen, since it is not protected. - (Active, Frozen) => true, - (Active, Disabled) => true, + (Active, Active | Frozen | Disabled) => true, + (_, Active) => false, // Frozen can only be replaced by Disabled (and itself). - (Frozen, Frozen) => true, - (Frozen, Disabled) => true, - (Frozen, _) => false, + (Frozen, Frozen | Disabled) => true, + (_, Frozen) => false, // Disabled can not be replaced by anything else. (Disabled, Disabled) => true, - (Disabled, _) => false, } } @@ -383,6 +426,7 @@ pub mod diagnostics { f, "{}", match self { + Cell => "Cell", ReservedFrz { conflicted: false } => "Reserved", ReservedFrz { conflicted: true } => "Reserved (conflicted)", ReservedIM => "Reserved (interior mutable)", @@ -413,6 +457,7 @@ pub mod diagnostics { // and also as `diagnostics::DisplayFmtPermission.uninit` otherwise // alignment will be incorrect. match self.inner { + Cell => "Cel ", ReservedFrz { conflicted: false } => "Res ", ReservedFrz { conflicted: true } => "ResC", ReservedIM => "ReIM", @@ -459,7 +504,7 @@ pub mod diagnostics { /// (Reserved < Active < Frozen < Disabled); /// - between `self` and `err` the permission should also be increasing, /// so all permissions inside `err` should be greater than `self.1`; - /// - `Active` and `Reserved(conflicted=false)` cannot cause an error + /// - `Active`, `Reserved(conflicted=false)`, and `Cell` cannot cause an error /// due to insufficient permissions, so `err` cannot be a `ChildAccessForbidden(_)` /// of either of them; /// - `err` should not be `ProtectedDisabled(Disabled)`, because the protected @@ -492,13 +537,14 @@ pub mod diagnostics { (ReservedFrz { conflicted: true } | Active | Frozen, Disabled) => false, (ReservedFrz { conflicted: true }, Frozen) => false, - // `Active` and `Reserved` have all permissions, so a + // `Active`, `Reserved`, and `Cell` have all permissions, so a // `ChildAccessForbidden(Reserved | Active)` can never exist. - (_, Active) | (_, ReservedFrz { conflicted: false }) => + (_, Active) | (_, ReservedFrz { conflicted: false }) | (_, Cell) => unreachable!("this permission cannot cause an error"), // No transition has `Reserved { conflicted: false }` or `ReservedIM` - // as its `.to` unless it's a noop. - (ReservedFrz { conflicted: false } | ReservedIM, _) => + // as its `.to` unless it's a noop. `Cell` cannot be in its `.to` + // because all child accesses are a noop. + (ReservedFrz { conflicted: false } | ReservedIM | Cell, _) => unreachable!("self is a noop transition"), // All transitions produced in normal executions (using `apply_access`) // change permissions in the order `Reserved -> Active -> Frozen -> Disabled`. @@ -544,16 +590,17 @@ pub mod diagnostics { "permission that results in Disabled should not itself be Disabled in the first place" ), // No transition has `Reserved { conflicted: false }` or `ReservedIM` as its `.to` - // unless it's a noop. - (ReservedFrz { conflicted: false } | ReservedIM, _) => + // unless it's a noop. `Cell` cannot be in its `.to` because all child + // accesses are a noop. + (ReservedFrz { conflicted: false } | ReservedIM | Cell, _) => unreachable!("self is a noop transition"), // Permissions only evolve in the order `Reserved -> Active -> Frozen -> Disabled`, // so permissions found must be increasing in the order // `self.from < self.to <= forbidden.from < forbidden.to`. - (Disabled, ReservedFrz { .. } | ReservedIM | Active | Frozen) - | (Frozen, ReservedFrz { .. } | ReservedIM | Active) - | (Active, ReservedFrz { .. } | ReservedIM) => + (Disabled, Cell | ReservedFrz { .. } | ReservedIM | Active | Frozen) + | (Frozen, Cell | ReservedFrz { .. } | ReservedIM | Active) + | (Active, Cell | ReservedFrz { .. } | ReservedIM) => unreachable!("permissions between self and err must be increasing"), } } @@ -590,7 +637,7 @@ mod propagation_optimization_checks { impl Exhaustive for PermissionPriv { fn exhaustive() -> Box> { Box::new( - vec![Active, Frozen, Disabled, ReservedIM] + vec![Active, Frozen, Disabled, ReservedIM, Cell] .into_iter() .chain(::exhaustive().map(|conflicted| ReservedFrz { conflicted })), ) diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs index 3389b1c602c33..47ccaadbb9e36 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs @@ -721,9 +721,14 @@ impl<'tcx> Tree { // visit all children, skipping none |_| ContinueTraversal::Recurse, |args: NodeAppArgs<'_>| -> Result<(), TransitionError> { - let NodeAppArgs { node, .. } = args; + let NodeAppArgs { node, perm, .. } = args; + let perm = + perm.get().copied().unwrap_or_else(|| node.default_location_state()); if global.borrow().protected_tags.get(&node.tag) == Some(&ProtectorKind::StrongProtector) + // Don't check for protector if it is a Cell (see `unsafe_cell_deallocate` in `interior_mutability.rs`). + // Related to https://github.com/rust-lang/rust/issues/55005. + && !perm.permission().is_cell() { Err(TransitionError::ProtectedDealloc) } else { @@ -865,10 +870,9 @@ impl<'tcx> Tree { let idx = self.tag_mapping.get(&tag).unwrap(); // Only visit initialized permissions if let Some(p) = perms.get(idx) + && let Some(access_kind) = p.permission.protector_end_access() && p.initialized { - let access_kind = - if p.permission.is_active() { AccessKind::Write } else { AccessKind::Read }; let access_cause = diagnostics::AccessCause::FnExit(access_kind); TreeVisitor { nodes: &mut self.nodes, tag_mapping: &self.tag_mapping, perms } .traverse_nonchildren( diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs index a429940748c86..dbfa9807e3b5a 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs @@ -61,8 +61,7 @@ fn all_read_accesses_commute() { // ... and produce the same final result. assert_eq!( loc12, loc21, - "Read accesses {:?} followed by {:?} do not commute !", - rel1, rel2 + "Read accesses {rel1:?} followed by {rel2:?} do not commute !" ); } } @@ -674,8 +673,8 @@ mod spurious_read { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let (x, y) = self.retag_permissions(); write!(f, "{}; ", self.xy_rel)?; - write!(f, "y: ({}); ", y,)?; - write!(f, "retag x ({}); ", x)?; + write!(f, "y: ({y}); ")?; + write!(f, "retag x ({x}); ")?; write!(f, "; ;")?; Ok(()) @@ -730,17 +729,17 @@ mod spurious_read { // protector. final_source .distinguishable::(&final_target) - .then_some(format!("{}", final_target)) + .then_some(format!("{final_target}")) } else { Some(format!("UB")) } }; if let Some(final_target) = distinguishable { eprintln!( - "For pattern '{}', inserting a spurious read through x makes the final state '{}' instead of '{}' which is observable", - pat, final_target, final_source + "For pattern '{pat}', inserting a spurious read through x makes the final state '{final_target}' \ + instead of '{final_source}' which is observable" ); - eprintln!(" (arbitrary code instanciated with '{}')", opaque); + eprintln!(" (arbitrary code instanciated with '{opaque}')"); err += 1; // We found an instanciation of the opaque code that makes this Pattern // fail, we don't really need to check the rest. diff --git a/src/tools/miri/src/clock.rs b/src/tools/miri/src/clock.rs index c9bffc449f7f0..34465e9cac60d 100644 --- a/src/tools/miri/src/clock.rs +++ b/src/tools/miri/src/clock.rs @@ -62,12 +62,12 @@ impl Instant { /// A monotone clock used for `Instant` simulation. #[derive(Debug)] -pub struct Clock { - kind: ClockKind, +pub struct MonotonicClock { + kind: MonotonicClockKind, } #[derive(Debug)] -enum ClockKind { +enum MonotonicClockKind { Host { /// The "epoch" for this machine's monotone clock: /// the moment we consider to be time = 0. @@ -79,13 +79,13 @@ enum ClockKind { }, } -impl Clock { +impl MonotonicClock { /// Create a new clock based on the availability of communication with the host. pub fn new(communicate: bool) -> Self { let kind = if communicate { - ClockKind::Host { epoch: StdInstant::now() } + MonotonicClockKind::Host { epoch: StdInstant::now() } } else { - ClockKind::Virtual { nanoseconds: 0.into() } + MonotonicClockKind::Virtual { nanoseconds: 0.into() } }; Self { kind } @@ -94,10 +94,10 @@ impl Clock { /// Let the time pass for a small interval. pub fn tick(&self) { match &self.kind { - ClockKind::Host { .. } => { + MonotonicClockKind::Host { .. } => { // Time will pass without us doing anything. } - ClockKind::Virtual { nanoseconds } => { + MonotonicClockKind::Virtual { nanoseconds } => { nanoseconds.update(|x| x + NANOSECONDS_PER_BASIC_BLOCK); } } @@ -106,8 +106,8 @@ impl Clock { /// Sleep for the desired duration. pub fn sleep(&self, duration: Duration) { match &self.kind { - ClockKind::Host { .. } => std::thread::sleep(duration), - ClockKind::Virtual { nanoseconds } => { + MonotonicClockKind::Host { .. } => std::thread::sleep(duration), + MonotonicClockKind::Virtual { nanoseconds } => { // Just pretend that we have slept for some time. let nanos: u128 = duration.as_nanos(); nanoseconds.update(|x| { @@ -121,15 +121,17 @@ impl Clock { /// Return the `epoch` instant (time = 0), to convert between monotone instants and absolute durations. pub fn epoch(&self) -> Instant { match &self.kind { - ClockKind::Host { epoch } => Instant { kind: InstantKind::Host(*epoch) }, - ClockKind::Virtual { .. } => Instant { kind: InstantKind::Virtual { nanoseconds: 0 } }, + MonotonicClockKind::Host { epoch } => Instant { kind: InstantKind::Host(*epoch) }, + MonotonicClockKind::Virtual { .. } => + Instant { kind: InstantKind::Virtual { nanoseconds: 0 } }, } } pub fn now(&self) -> Instant { match &self.kind { - ClockKind::Host { .. } => Instant { kind: InstantKind::Host(StdInstant::now()) }, - ClockKind::Virtual { nanoseconds } => + MonotonicClockKind::Host { .. } => + Instant { kind: InstantKind::Host(StdInstant::now()) }, + MonotonicClockKind::Virtual { nanoseconds } => Instant { kind: InstantKind::Virtual { nanoseconds: nanoseconds.get() } }, } } diff --git a/src/tools/miri/src/concurrency/data_race.rs b/src/tools/miri/src/concurrency/data_race.rs index b1ca434361b4a..714eb1fba91c5 100644 --- a/src/tools/miri/src/concurrency/data_race.rs +++ b/src/tools/miri/src/concurrency/data_race.rs @@ -1,4 +1,4 @@ -//! Implementation of a data-race detector using Lamport Timestamps / Vector-clocks +//! Implementation of a data-race detector using Lamport Timestamps / Vector clocks //! based on the Dynamic Race Detection for C++: //! //! which does not report false-positives when fences are used, and gives better @@ -54,6 +54,7 @@ use rustc_span::Span; use super::vector_clock::{VClock, VTimestamp, VectorIdx}; use super::weak_memory::EvalContextExt as _; +use crate::concurrency::GlobalDataRaceHandler; use crate::diagnostics::RacingOp; use crate::*; @@ -259,7 +260,7 @@ enum AccessType { /// Per-byte vector clock metadata for data-race detection. #[derive(Clone, PartialEq, Eq, Debug)] struct MemoryCellClocks { - /// The vector-clock timestamp and the thread that did the last non-atomic write. We don't need + /// The vector clock timestamp and the thread that did the last non-atomic write. We don't need /// a full `VClock` here, it's always a single thread and nothing synchronizes, so the effective /// clock is all-0 except for the thread that did the write. write: (VectorIdx, VTimestamp), @@ -269,7 +270,7 @@ struct MemoryCellClocks { /// a deallocation of memory. write_type: NaWriteType, - /// The vector-clock of all non-atomic reads that happened since the last non-atomic write + /// The vector clock of all non-atomic reads that happened since the last non-atomic write /// (i.e., we join together the "singleton" clocks corresponding to each read). It is reset to /// zero on each write operation. read: VClock, @@ -298,7 +299,7 @@ struct ThreadExtraState { } /// Global data-race detection state, contains the currently -/// executing thread as well as the vector-clocks associated +/// executing thread as well as the vector clocks associated /// with each of the threads. // FIXME: it is probably better to have one large RefCell, than to have so many small ones. #[derive(Debug, Clone)] @@ -335,7 +336,7 @@ pub struct GlobalState { /// for use as the index for a new thread. /// Elements in this set may still require the vector index to /// report data-races, and can only be re-used after all - /// active vector-clocks catch up with the threads timestamp. + /// active vector clocks catch up with the threads timestamp. reuse_candidates: RefCell>, /// We make SC fences act like RMWs on a global location. @@ -348,6 +349,9 @@ pub struct GlobalState { /// Track when an outdated (weak memory) load happens. pub track_outdated_loads: bool, + + /// Whether weak memory emulation is enabled + pub weak_memory: bool, } impl VisitProvenance for GlobalState { @@ -381,7 +385,7 @@ impl AccessType { }); if let Some(ty) = ty { - msg.push_str(&format!(" of type `{}`", ty)); + msg.push_str(&format!(" of type `{ty}`")); } msg @@ -680,6 +684,23 @@ impl MemoryCellClocks { } } +impl GlobalDataRaceHandler { + /// Select whether data race checking is disabled. This is solely an + /// implementation detail of `allow_data_races_*` and must not be used anywhere else! + fn set_ongoing_action_data_race_free(&self, enable: bool) { + match self { + GlobalDataRaceHandler::None => {} + GlobalDataRaceHandler::Vclocks(data_race) => { + let old = data_race.ongoing_action_data_race_free.replace(enable); + assert_ne!(old, enable, "cannot nest allow_data_races"); + } + GlobalDataRaceHandler::Genmc(genmc_ctx) => { + genmc_ctx.set_ongoing_action_data_race_free(enable); + } + } + } +} + /// Evaluation context extensions. impl<'tcx> EvalContextExt<'tcx> for MiriInterpCx<'tcx> {} pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { @@ -696,6 +717,19 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { // This is fine with StackedBorrow and race checks because they don't concern metadata on // the *value* (including the associated provenance if this is an AtomicPtr) at this location. // Only metadata on the location itself is used. + + if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { + // FIXME(GenMC): Inform GenMC what a non-atomic read here would return, to support mixed atomics/non-atomics + let old_val = None; + return genmc_ctx.atomic_load( + this, + place.ptr().addr(), + place.layout.size, + atomic, + old_val, + ); + } + let scalar = this.allow_data_races_ref(move |this| this.read_scalar(place))?; let buffered_scalar = this.buffered_atomic_read(place, atomic, scalar, || { this.validate_atomic_load(place, atomic) @@ -717,7 +751,13 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { // The program didn't actually do a read, so suppress the memory access hooks. // This is also a very special exception where we just ignore an error -- if this read // was UB e.g. because the memory is uninitialized, we don't want to know! - let old_val = this.run_for_validation(|this| this.read_scalar(dest)).discard_err(); + let old_val = this.run_for_validation_mut(|this| this.read_scalar(dest)).discard_err(); + // Inform GenMC about the atomic store. + if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { + // FIXME(GenMC): Inform GenMC what a non-atomic read here would return, to support mixed atomics/non-atomics + genmc_ctx.atomic_store(this, dest.ptr().addr(), dest.layout.size, val, atomic)?; + return interp_ok(()); + } this.allow_data_races_mut(move |this| this.write_scalar(val, dest))?; this.validate_atomic_store(dest, atomic)?; this.buffered_atomic_write(val, dest, atomic, old_val) @@ -737,6 +777,21 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { let old = this.allow_data_races_mut(|this| this.read_immediate(place))?; + // Inform GenMC about the atomic rmw operation. + if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { + // FIXME(GenMC): Inform GenMC what a non-atomic read here would return, to support mixed atomics/non-atomics + let (old_val, new_val) = genmc_ctx.atomic_rmw_op( + this, + place.ptr().addr(), + place.layout.size, + atomic, + (op, not), + rhs.to_scalar(), + )?; + this.allow_data_races_mut(|this| this.write_scalar(new_val, place))?; + return interp_ok(ImmTy::from_scalar(old_val, old.layout)); + } + let val = this.binary_op(op, &old, rhs)?; let val = if not { this.unary_op(mir::UnOp::Not, &val)? } else { val }; this.allow_data_races_mut(|this| this.write_immediate(*val, place))?; @@ -761,6 +816,19 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { let old = this.allow_data_races_mut(|this| this.read_scalar(place))?; this.allow_data_races_mut(|this| this.write_scalar(new, place))?; + // Inform GenMC about the atomic atomic exchange. + if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { + // FIXME(GenMC): Inform GenMC what a non-atomic read here would return, to support mixed atomics/non-atomics + let (old_val, _is_success) = genmc_ctx.atomic_exchange( + this, + place.ptr().addr(), + place.layout.size, + new, + atomic, + )?; + return interp_ok(old_val); + } + this.validate_atomic_rmw(place, atomic)?; this.buffered_atomic_rmw(new, place, atomic, old)?; @@ -780,6 +848,23 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { this.atomic_access_check(place, AtomicAccessType::Rmw)?; let old = this.allow_data_races_mut(|this| this.read_immediate(place))?; + + // Inform GenMC about the atomic min/max operation. + if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { + // FIXME(GenMC): Inform GenMC what a non-atomic read here would return, to support mixed atomics/non-atomics + let (old_val, new_val) = genmc_ctx.atomic_min_max_op( + this, + place.ptr().addr(), + place.layout.size, + atomic, + min, + old.layout.backend_repr.is_signed(), + rhs.to_scalar(), + )?; + this.allow_data_races_mut(|this| this.write_scalar(new_val, place))?; + return interp_ok(ImmTy::from_scalar(old_val, old.layout)); + } + let lt = this.binary_op(mir::BinOp::Lt, &old, &rhs)?.to_scalar().to_bool()?; #[rustfmt::skip] // rustfmt makes this unreadable @@ -823,6 +908,25 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { // read ordering and write in the success case. // Read as immediate for the sake of `binary_op()` let old = this.allow_data_races_mut(|this| this.read_immediate(place))?; + + // Inform GenMC about the atomic atomic compare exchange. + if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { + let (old, cmpxchg_success) = genmc_ctx.atomic_compare_exchange( + this, + place.ptr().addr(), + place.layout.size, + this.read_scalar(expect_old)?, + new, + success, + fail, + can_fail_spuriously, + )?; + if cmpxchg_success { + this.allow_data_races_mut(|this| this.write_scalar(new, place))?; + } + return interp_ok(Immediate::ScalarPair(old, Scalar::from_bool(cmpxchg_success))); + } + // `binary_op` will bail if either of them is not a scalar. let eq = this.binary_op(mir::BinOp::Eq, &old, expect_old)?; // If the operation would succeed, but is "weak", fail some portion @@ -859,49 +963,11 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { /// Update the data-race detector for an atomic fence on the current thread. fn atomic_fence(&mut self, atomic: AtomicFenceOrd) -> InterpResult<'tcx> { let this = self.eval_context_mut(); - let current_span = this.machine.current_span(); - if let Some(data_race) = &mut this.machine.data_race { - data_race.maybe_perform_sync_operation( - &this.machine.threads, - current_span, - |index, mut clocks| { - trace!("Atomic fence on {:?} with ordering {:?}", index, atomic); - - // Apply data-race detection for the current fences - // this treats AcqRel and SeqCst as the same as an acquire - // and release fence applied in the same timestamp. - if atomic != AtomicFenceOrd::Release { - // Either Acquire | AcqRel | SeqCst - clocks.apply_acquire_fence(); - } - if atomic == AtomicFenceOrd::SeqCst { - // Behave like an RMW on the global fence location. This takes full care of - // all the SC fence requirements, including C++17 §32.4 [atomics.order] - // paragraph 6 (which would limit what future reads can see). It also rules - // out many legal behaviors, but we don't currently have a model that would - // be more precise. - // Also see the second bullet on page 10 of - // . - let mut sc_fence_clock = data_race.last_sc_fence.borrow_mut(); - sc_fence_clock.join(&clocks.clock); - clocks.clock.join(&sc_fence_clock); - // Also establish some sort of order with the last SC write that happened, globally - // (but this is only respected by future reads). - clocks.write_seqcst.join(&data_race.last_sc_write_per_thread.borrow()); - } - // The release fence is last, since both of the above could alter our clock, - // which should be part of what is being released. - if atomic != AtomicFenceOrd::Acquire { - // Either Release | AcqRel | SeqCst - clocks.apply_release_fence(); - } - - // Increment timestamp in case of release semantics. - interp_ok(atomic != AtomicFenceOrd::Acquire) - }, - ) - } else { - interp_ok(()) + let machine = &this.machine; + match &this.machine.data_race { + GlobalDataRaceHandler::None => interp_ok(()), + GlobalDataRaceHandler::Vclocks(data_race) => data_race.atomic_fence(machine, atomic), + GlobalDataRaceHandler::Genmc(genmc_ctx) => genmc_ctx.atomic_fence(machine, atomic), } } @@ -910,10 +976,7 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { fn allow_data_races_all_threads_done(&mut self) { let this = self.eval_context_ref(); assert!(this.have_all_terminated()); - if let Some(data_race) = &this.machine.data_race { - let old = data_race.ongoing_action_data_race_free.replace(true); - assert!(!old, "cannot nest allow_data_races"); - } + this.machine.data_race.set_ongoing_action_data_race_free(true); } /// Calls the callback with the "release" clock of the current thread. @@ -923,14 +986,16 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { /// The closure will only be invoked if data race handling is on. fn release_clock(&self, callback: impl FnOnce(&VClock) -> R) -> Option { let this = self.eval_context_ref(); - Some(this.machine.data_race.as_ref()?.release_clock(&this.machine.threads, callback)) + Some( + this.machine.data_race.as_vclocks_ref()?.release_clock(&this.machine.threads, callback), + ) } /// Acquire the given clock into the current thread, establishing synchronization with /// the moment when that clock snapshot was taken via `release_clock`. fn acquire_clock(&self, clock: &VClock) { let this = self.eval_context_ref(); - if let Some(data_race) = &this.machine.data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race.acquire_clock(clock, &this.machine.threads); } } @@ -1132,7 +1197,7 @@ impl VClockAlloc { machine: &MiriMachine<'_>, ) -> InterpResult<'tcx> { let current_span = machine.current_span(); - let global = machine.data_race.as_ref().unwrap(); + let global = machine.data_race.as_vclocks_ref().unwrap(); if !global.race_detecting() { return interp_ok(()); } @@ -1174,7 +1239,7 @@ impl VClockAlloc { machine: &mut MiriMachine<'_>, ) -> InterpResult<'tcx> { let current_span = machine.current_span(); - let global = machine.data_race.as_mut().unwrap(); + let global = machine.data_race.as_vclocks_mut().unwrap(); if !global.race_detecting() { return interp_ok(()); } @@ -1228,7 +1293,7 @@ impl Default for LocalClocks { impl FrameState { pub fn local_write(&self, local: mir::Local, storage_live: bool, machine: &MiriMachine<'_>) { let current_span = machine.current_span(); - let global = machine.data_race.as_ref().unwrap(); + let global = machine.data_race.as_vclocks_ref().unwrap(); if !global.race_detecting() { return; } @@ -1258,7 +1323,7 @@ impl FrameState { pub fn local_read(&self, local: mir::Local, machine: &MiriMachine<'_>) { let current_span = machine.current_span(); - let global = machine.data_race.as_ref().unwrap(); + let global = machine.data_race.as_vclocks_ref().unwrap(); if !global.race_detecting() { return; } @@ -1281,7 +1346,7 @@ impl FrameState { alloc: &mut VClockAlloc, machine: &MiriMachine<'_>, ) { - let global = machine.data_race.as_ref().unwrap(); + let global = machine.data_race.as_vclocks_ref().unwrap(); if !global.race_detecting() { return; } @@ -1314,14 +1379,9 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { #[inline] fn allow_data_races_ref(&self, op: impl FnOnce(&MiriInterpCx<'tcx>) -> R) -> R { let this = self.eval_context_ref(); - if let Some(data_race) = &this.machine.data_race { - let old = data_race.ongoing_action_data_race_free.replace(true); - assert!(!old, "cannot nest allow_data_races"); - } + this.machine.data_race.set_ongoing_action_data_race_free(true); let result = op(this); - if let Some(data_race) = &this.machine.data_race { - data_race.ongoing_action_data_race_free.set(false); - } + this.machine.data_race.set_ongoing_action_data_race_free(false); result } @@ -1331,14 +1391,9 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { #[inline] fn allow_data_races_mut(&mut self, op: impl FnOnce(&mut MiriInterpCx<'tcx>) -> R) -> R { let this = self.eval_context_mut(); - if let Some(data_race) = &this.machine.data_race { - let old = data_race.ongoing_action_data_race_free.replace(true); - assert!(!old, "cannot nest allow_data_races"); - } + this.machine.data_race.set_ongoing_action_data_race_free(true); let result = op(this); - if let Some(data_race) = &this.machine.data_race { - data_race.ongoing_action_data_race_free.set(false); - } + this.machine.data_race.set_ongoing_action_data_race_free(false); result } @@ -1355,7 +1410,7 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { let align = Align::from_bytes(place.layout.size.bytes()).unwrap(); this.check_ptr_align(place.ptr(), align)?; // Ensure the allocation is mutable. Even failing (read-only) compare_exchange need mutable - // memory on many targets (i.e., they segfault if taht memory is mapped read-only), and + // memory on many targets (i.e., they segfault if that memory is mapped read-only), and // atomic loads can be implemented via compare_exchange on some targets. There could // possibly be some very specific exceptions to this, see // for details. @@ -1486,7 +1541,9 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { ) -> InterpResult<'tcx> { let this = self.eval_context_ref(); assert!(access.is_atomic()); - let Some(data_race) = &this.machine.data_race else { return interp_ok(()) }; + let Some(data_race) = this.machine.data_race.as_vclocks_ref() else { + return interp_ok(()); + }; if !data_race.race_detecting() { return interp_ok(()); } @@ -1494,7 +1551,7 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { let (alloc_id, base_offset, _prov) = this.ptr_get_alloc_id(place.ptr(), 0)?; // Load and log the atomic operation. // Note that atomic loads are possible even from read-only allocations, so `get_alloc_extra_mut` is not an option. - let alloc_meta = this.get_alloc_extra(alloc_id)?.data_race.as_ref().unwrap(); + let alloc_meta = this.get_alloc_extra(alloc_id)?.data_race.as_vclocks_ref().unwrap(); trace!( "Atomic op({}) with ordering {:?} on {:?} (size={})", access.description(None, None), @@ -1565,6 +1622,7 @@ impl GlobalState { last_sc_fence: RefCell::new(VClock::default()), last_sc_write_per_thread: RefCell::new(VClock::default()), track_outdated_loads: config.track_outdated_loads, + weak_memory: config.weak_memory_emulation, }; // Setup the main-thread since it is not explicitly created: @@ -1728,7 +1786,7 @@ impl GlobalState { } } - /// On thread termination, the vector-clock may re-used + /// On thread termination, the vector clock may be re-used /// in the future once all remaining thread-clocks catch /// up with the time index of the terminated thread. /// This assigns thread termination with a unique index @@ -1750,6 +1808,50 @@ impl GlobalState { reuse.insert(current_index); } + /// Update the data-race detector for an atomic fence on the current thread. + fn atomic_fence<'tcx>( + &self, + machine: &MiriMachine<'tcx>, + atomic: AtomicFenceOrd, + ) -> InterpResult<'tcx> { + let current_span = machine.current_span(); + self.maybe_perform_sync_operation(&machine.threads, current_span, |index, mut clocks| { + trace!("Atomic fence on {:?} with ordering {:?}", index, atomic); + + // Apply data-race detection for the current fences + // this treats AcqRel and SeqCst as the same as an acquire + // and release fence applied in the same timestamp. + if atomic != AtomicFenceOrd::Release { + // Either Acquire | AcqRel | SeqCst + clocks.apply_acquire_fence(); + } + if atomic == AtomicFenceOrd::SeqCst { + // Behave like an RMW on the global fence location. This takes full care of + // all the SC fence requirements, including C++17 §32.4 [atomics.order] + // paragraph 6 (which would limit what future reads can see). It also rules + // out many legal behaviors, but we don't currently have a model that would + // be more precise. + // Also see the second bullet on page 10 of + // . + let mut sc_fence_clock = self.last_sc_fence.borrow_mut(); + sc_fence_clock.join(&clocks.clock); + clocks.clock.join(&sc_fence_clock); + // Also establish some sort of order with the last SC write that happened, globally + // (but this is only respected by future reads). + clocks.write_seqcst.join(&self.last_sc_write_per_thread.borrow()); + } + // The release fence is last, since both of the above could alter our clock, + // which should be part of what is being released. + if atomic != AtomicFenceOrd::Acquire { + // Either Release | AcqRel | SeqCst + clocks.apply_release_fence(); + } + + // Increment timestamp in case of release semantics. + interp_ok(atomic != AtomicFenceOrd::Acquire) + }) + } + /// Attempt to perform a synchronized operation, this /// will perform no operation if multi-threading is /// not currently enabled. diff --git a/src/tools/miri/src/concurrency/data_race_handler.rs b/src/tools/miri/src/concurrency/data_race_handler.rs new file mode 100644 index 0000000000000..047c37e56b8f3 --- /dev/null +++ b/src/tools/miri/src/concurrency/data_race_handler.rs @@ -0,0 +1,91 @@ +use std::rc::Rc; + +use super::{data_race, weak_memory}; +use crate::concurrency::GenmcCtx; +use crate::{VisitProvenance, VisitWith}; + +pub enum GlobalDataRaceHandler { + /// No data race detection will be done. + None, + /// State required to run in GenMC mode. + /// In this mode, the program will be executed repeatedly to explore different concurrent executions. + /// The `GenmcCtx` must persist across multiple executions, so it is behind an `Rc`. + /// + /// The `GenmcCtx` has several methods with which to inform it about events like atomic memory accesses. + /// In GenMC mode, some functionality is taken over by GenMC: + /// - Memory Allocation: Allocated addresses need to be consistent across executions, which Miri's allocator doesn't guarantee + /// - Scheduling: To influence which concurrent execution we will explore next, GenMC takes over scheduling + /// - Atomic operations: GenMC will ensure that we explore all possible values that the memory model allows + /// an atomic operation to see at any specific point of the program. + Genmc(Rc), + /// The default data race detector for Miri using vector clocks. + Vclocks(Box), +} + +#[derive(Debug)] +pub enum AllocDataRaceHandler { + None, + Genmc, + /// Data race detection via the use of vector clocks. + /// Weak memory emulation via the use of store buffers (if enabled). + Vclocks(data_race::AllocState, Option), +} + +impl GlobalDataRaceHandler { + pub fn is_none(&self) -> bool { + matches!(self, GlobalDataRaceHandler::None) + } + + pub fn as_vclocks_ref(&self) -> Option<&data_race::GlobalState> { + if let Self::Vclocks(data_race) = self { Some(data_race) } else { None } + } + + pub fn as_vclocks_mut(&mut self) -> Option<&mut data_race::GlobalState> { + if let Self::Vclocks(data_race) = self { Some(data_race) } else { None } + } + + pub fn as_genmc_ref(&self) -> Option<&GenmcCtx> { + if let Self::Genmc(genmc_ctx) = self { Some(genmc_ctx) } else { None } + } +} + +impl AllocDataRaceHandler { + pub fn as_vclocks_ref(&self) -> Option<&data_race::AllocState> { + if let Self::Vclocks(data_race, _weak_memory) = self { Some(data_race) } else { None } + } + + pub fn as_vclocks_mut(&mut self) -> Option<&mut data_race::AllocState> { + if let Self::Vclocks(data_race, _weak_memory) = self { Some(data_race) } else { None } + } + + pub fn as_weak_memory_ref(&self) -> Option<&weak_memory::AllocState> { + if let Self::Vclocks(_data_race, weak_memory) = self { weak_memory.as_ref() } else { None } + } + + pub fn as_weak_memory_mut(&mut self) -> Option<&mut weak_memory::AllocState> { + if let Self::Vclocks(_data_race, weak_memory) = self { weak_memory.as_mut() } else { None } + } +} + +impl VisitProvenance for GlobalDataRaceHandler { + fn visit_provenance(&self, visit: &mut VisitWith<'_>) { + match self { + GlobalDataRaceHandler::None => {} + GlobalDataRaceHandler::Vclocks(data_race) => data_race.visit_provenance(visit), + GlobalDataRaceHandler::Genmc(genmc_ctx) => genmc_ctx.visit_provenance(visit), + } + } +} + +impl VisitProvenance for AllocDataRaceHandler { + fn visit_provenance(&self, visit: &mut VisitWith<'_>) { + match self { + AllocDataRaceHandler::None => {} + AllocDataRaceHandler::Genmc => {} + AllocDataRaceHandler::Vclocks(data_race, weak_memory) => { + data_race.visit_provenance(visit); + weak_memory.visit_provenance(visit); + } + } + } +} diff --git a/src/tools/miri/src/concurrency/genmc/config.rs b/src/tools/miri/src/concurrency/genmc/config.rs new file mode 100644 index 0000000000000..f91211a670f65 --- /dev/null +++ b/src/tools/miri/src/concurrency/genmc/config.rs @@ -0,0 +1,19 @@ +use crate::MiriConfig; + +#[derive(Debug, Default, Clone)] +pub struct GenmcConfig { + // TODO: add fields +} + +impl GenmcConfig { + /// Function for parsing command line options for GenMC mode. + /// All GenMC arguments start with the string "-Zmiri-genmc". + /// + /// `trimmed_arg` should be the argument to be parsed, with the suffix "-Zmiri-genmc" removed + pub fn parse_arg(genmc_config: &mut Option, trimmed_arg: &str) { + if genmc_config.is_none() { + *genmc_config = Some(Default::default()); + } + todo!("implement parsing of GenMC options") + } +} diff --git a/src/tools/miri/src/concurrency/genmc/dummy.rs b/src/tools/miri/src/concurrency/genmc/dummy.rs new file mode 100644 index 0000000000000..3d0558fb68530 --- /dev/null +++ b/src/tools/miri/src/concurrency/genmc/dummy.rs @@ -0,0 +1,239 @@ +#![allow(unused)] + +use rustc_abi::{Align, Size}; +use rustc_const_eval::interpret::{InterpCx, InterpResult}; +use rustc_middle::mir; + +use crate::{ + AtomicFenceOrd, AtomicReadOrd, AtomicRwOrd, AtomicWriteOrd, MemoryKind, MiriConfig, + MiriMachine, Scalar, ThreadId, ThreadManager, VisitProvenance, VisitWith, +}; + +#[derive(Debug)] +pub struct GenmcCtx {} + +#[derive(Debug, Default, Clone)] +pub struct GenmcConfig {} + +impl GenmcCtx { + pub fn new(_miri_config: &MiriConfig, _genmc_config: &GenmcConfig) -> Self { + unreachable!() + } + + pub fn get_stuck_execution_count(&self) -> usize { + unreachable!() + } + + pub fn print_genmc_graph(&self) { + unreachable!() + } + + pub fn is_exploration_done(&self) -> bool { + unreachable!() + } + + /**** Memory access handling ****/ + + pub(crate) fn handle_execution_start(&self) { + unreachable!() + } + + pub(crate) fn handle_execution_end<'tcx>( + &self, + _ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + ) -> Result<(), String> { + unreachable!() + } + + pub(super) fn set_ongoing_action_data_race_free(&self, _enable: bool) { + unreachable!() + } + + //* might fails if there's a race, load might also not read anything (returns None) */ + pub(crate) fn atomic_load<'tcx>( + &self, + _ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + _address: Size, + _size: Size, + _ordering: AtomicReadOrd, + _old_val: Option, + ) -> InterpResult<'tcx, Scalar> { + unreachable!() + } + + pub(crate) fn atomic_store<'tcx>( + &self, + _ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + _address: Size, + _size: Size, + _value: Scalar, + _ordering: AtomicWriteOrd, + ) -> InterpResult<'tcx, ()> { + unreachable!() + } + + pub(crate) fn atomic_fence<'tcx>( + &self, + _machine: &MiriMachine<'tcx>, + _ordering: AtomicFenceOrd, + ) -> InterpResult<'tcx, ()> { + unreachable!() + } + + pub(crate) fn atomic_rmw_op<'tcx>( + &self, + _ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + _address: Size, + _size: Size, + _ordering: AtomicRwOrd, + (rmw_op, not): (mir::BinOp, bool), + _rhs_scalar: Scalar, + ) -> InterpResult<'tcx, (Scalar, Scalar)> { + unreachable!() + } + + pub(crate) fn atomic_min_max_op<'tcx>( + &self, + ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + address: Size, + size: Size, + ordering: AtomicRwOrd, + min: bool, + is_signed: bool, + rhs_scalar: Scalar, + ) -> InterpResult<'tcx, (Scalar, Scalar)> { + unreachable!() + } + + pub(crate) fn atomic_exchange<'tcx>( + &self, + _ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + _address: Size, + _size: Size, + _rhs_scalar: Scalar, + _ordering: AtomicRwOrd, + ) -> InterpResult<'tcx, (Scalar, bool)> { + unreachable!() + } + + pub(crate) fn atomic_compare_exchange<'tcx>( + &self, + _ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + _address: Size, + _size: Size, + _expected_old_value: Scalar, + _new_value: Scalar, + _success: AtomicRwOrd, + _fail: AtomicReadOrd, + _can_fail_spuriously: bool, + ) -> InterpResult<'tcx, (Scalar, bool)> { + unreachable!() + } + + pub(crate) fn memory_load<'tcx>( + &self, + _machine: &MiriMachine<'tcx>, + _address: Size, + _size: Size, + ) -> InterpResult<'tcx, ()> { + unreachable!() + } + + pub(crate) fn memory_store<'tcx>( + &self, + _machine: &MiriMachine<'tcx>, + _address: Size, + _size: Size, + ) -> InterpResult<'tcx, ()> { + unreachable!() + } + + /**** Memory (de)allocation ****/ + + pub(crate) fn handle_alloc<'tcx>( + &self, + _machine: &MiriMachine<'tcx>, + _size: Size, + _alignment: Align, + _memory_kind: MemoryKind, + ) -> InterpResult<'tcx, u64> { + unreachable!() + } + + pub(crate) fn handle_dealloc<'tcx>( + &self, + _machine: &MiriMachine<'tcx>, + _address: Size, + _size: Size, + _align: Align, + _kind: MemoryKind, + ) -> InterpResult<'tcx, ()> { + unreachable!() + } + + /**** Thread management ****/ + + pub(crate) fn handle_thread_create<'tcx>( + &self, + _threads: &ThreadManager<'tcx>, + _new_thread_id: ThreadId, + ) -> InterpResult<'tcx, ()> { + unreachable!() + } + + pub(crate) fn handle_thread_join<'tcx>( + &self, + _active_thread_id: ThreadId, + _child_thread_id: ThreadId, + ) -> InterpResult<'tcx, ()> { + unreachable!() + } + + pub(crate) fn handle_thread_stack_empty(&self, _thread_id: ThreadId) { + unreachable!() + } + + pub(crate) fn handle_thread_finish<'tcx>( + &self, + _threads: &ThreadManager<'tcx>, + ) -> InterpResult<'tcx, ()> { + unreachable!() + } + + /**** Scheduling functionality ****/ + + pub(crate) fn schedule_thread<'tcx>( + &self, + _ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + ) -> InterpResult<'tcx, ThreadId> { + unreachable!() + } + + /**** Blocking instructions ****/ + + pub(crate) fn handle_verifier_assume<'tcx>( + &self, + _machine: &MiriMachine<'tcx>, + _condition: bool, + ) -> InterpResult<'tcx, ()> { + unreachable!() + } +} + +impl VisitProvenance for GenmcCtx { + fn visit_provenance(&self, _visit: &mut VisitWith<'_>) { + unreachable!() + } +} + +impl GenmcConfig { + pub fn parse_arg(_genmc_config: &mut Option, trimmed_arg: &str) { + unimplemented!( + "GenMC feature im Miri is disabled, cannot handle argument: \"-Zmiri-genmc{trimmed_arg}\"" + ); + } + + pub fn should_print_graph(&self, _rep: usize) -> bool { + unreachable!() + } +} diff --git a/src/tools/miri/src/concurrency/genmc/mod.rs b/src/tools/miri/src/concurrency/genmc/mod.rs new file mode 100644 index 0000000000000..0dfd4b9b80f98 --- /dev/null +++ b/src/tools/miri/src/concurrency/genmc/mod.rs @@ -0,0 +1,284 @@ +#![allow(unused)] // FIXME(GenMC): remove this + +use std::cell::Cell; + +use rustc_abi::{Align, Size}; +use rustc_const_eval::interpret::{InterpCx, InterpResult, interp_ok}; +use rustc_middle::mir; + +use crate::{ + AtomicFenceOrd, AtomicReadOrd, AtomicRwOrd, AtomicWriteOrd, MemoryKind, MiriConfig, + MiriMachine, Scalar, ThreadId, ThreadManager, VisitProvenance, VisitWith, +}; + +mod config; + +pub use self::config::GenmcConfig; + +// FIXME(GenMC): add fields +pub struct GenmcCtx { + /// Some actions Miri does are allowed to cause data races. + /// GenMC will not be informed about certain actions (e.g. non-atomic loads) when this flag is set. + allow_data_races: Cell, +} + +impl GenmcCtx { + /// Create a new `GenmcCtx` from a given config. + pub fn new(miri_config: &MiriConfig, genmc_config: &GenmcConfig) -> Self { + assert!(miri_config.genmc_mode); + todo!() + } + + pub fn get_stuck_execution_count(&self) -> usize { + todo!() + } + + pub fn print_genmc_graph(&self) { + todo!() + } + + /// This function determines if we should continue exploring executions or if we are done. + /// + /// In GenMC mode, the input program should be repeatedly executed until this function returns `true` or an error is found. + pub fn is_exploration_done(&self) -> bool { + todo!() + } + + /// Inform GenMC that a new program execution has started. + /// This function should be called at the start of every execution. + pub(crate) fn handle_execution_start(&self) { + todo!() + } + + /// Inform GenMC that the program's execution has ended. + /// + /// This function must be called even when the execution got stuck (i.e., it returned a `InterpErrorKind::MachineStop` with error kind `TerminationInfo::GenmcStuckExecution`). + pub(crate) fn handle_execution_end<'tcx>( + &self, + ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + ) -> Result<(), String> { + todo!() + } + + /**** Memory access handling ****/ + + /// Select whether data race free actions should be allowed. This function should be used carefully! + /// + /// If `true` is passed, allow for data races to happen without triggering an error, until this function is called again with argument `false`. + /// This allows for racy non-atomic memory accesses to be ignored (GenMC is not informed about them at all). + /// + /// Certain operations are not permitted in GenMC mode with data races disabled and will cause a panic, e.g., atomic accesses or asking for scheduling decisions. + /// + /// # Panics + /// If data race free is attempted to be set more than once (i.e., no nesting allowed). + pub(super) fn set_ongoing_action_data_race_free(&self, enable: bool) { + let old = self.allow_data_races.replace(enable); + assert_ne!(old, enable, "cannot nest allow_data_races"); + } + + pub(crate) fn atomic_load<'tcx>( + &self, + ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + address: Size, + size: Size, + ordering: AtomicReadOrd, + old_val: Option, + ) -> InterpResult<'tcx, Scalar> { + assert!(!self.allow_data_races.get()); + todo!() + } + + pub(crate) fn atomic_store<'tcx>( + &self, + ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + address: Size, + size: Size, + value: Scalar, + ordering: AtomicWriteOrd, + ) -> InterpResult<'tcx, ()> { + assert!(!self.allow_data_races.get()); + todo!() + } + + pub(crate) fn atomic_fence<'tcx>( + &self, + machine: &MiriMachine<'tcx>, + ordering: AtomicFenceOrd, + ) -> InterpResult<'tcx, ()> { + assert!(!self.allow_data_races.get()); + todo!() + } + + /// Inform GenMC about an atomic read-modify-write operation. + /// + /// Returns `(old_val, new_val)`. + pub(crate) fn atomic_rmw_op<'tcx>( + &self, + ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + address: Size, + size: Size, + ordering: AtomicRwOrd, + (rmw_op, not): (mir::BinOp, bool), + rhs_scalar: Scalar, + ) -> InterpResult<'tcx, (Scalar, Scalar)> { + assert!(!self.allow_data_races.get()); + todo!() + } + + /// Inform GenMC about an atomic `min` or `max` operation. + /// + /// Returns `(old_val, new_val)`. + pub(crate) fn atomic_min_max_op<'tcx>( + &self, + ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + address: Size, + size: Size, + ordering: AtomicRwOrd, + min: bool, + is_signed: bool, + rhs_scalar: Scalar, + ) -> InterpResult<'tcx, (Scalar, Scalar)> { + assert!(!self.allow_data_races.get()); + todo!() + } + + pub(crate) fn atomic_exchange<'tcx>( + &self, + ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + address: Size, + size: Size, + rhs_scalar: Scalar, + ordering: AtomicRwOrd, + ) -> InterpResult<'tcx, (Scalar, bool)> { + assert!(!self.allow_data_races.get()); + todo!() + } + + pub(crate) fn atomic_compare_exchange<'tcx>( + &self, + ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + address: Size, + size: Size, + expected_old_value: Scalar, + new_value: Scalar, + success: AtomicRwOrd, + fail: AtomicReadOrd, + can_fail_spuriously: bool, + ) -> InterpResult<'tcx, (Scalar, bool)> { + assert!(!self.allow_data_races.get()); + todo!() + } + + /// Inform GenMC about a non-atomic memory load + /// + /// NOTE: Unlike for *atomic* loads, we don't return a value here. Non-atomic values are still handled by Miri. + pub(crate) fn memory_load<'tcx>( + &self, + machine: &MiriMachine<'tcx>, + address: Size, + size: Size, + ) -> InterpResult<'tcx, ()> { + todo!() + } + + pub(crate) fn memory_store<'tcx>( + &self, + machine: &MiriMachine<'tcx>, + address: Size, + size: Size, + ) -> InterpResult<'tcx, ()> { + todo!() + } + + /**** Memory (de)allocation ****/ + + pub(crate) fn handle_alloc<'tcx>( + &self, + machine: &MiriMachine<'tcx>, + size: Size, + alignment: Align, + memory_kind: MemoryKind, + ) -> InterpResult<'tcx, u64> { + todo!() + } + + pub(crate) fn handle_dealloc<'tcx>( + &self, + machine: &MiriMachine<'tcx>, + address: Size, + size: Size, + align: Align, + kind: MemoryKind, + ) -> InterpResult<'tcx, ()> { + todo!() + } + + /**** Thread management ****/ + + pub(crate) fn handle_thread_create<'tcx>( + &self, + threads: &ThreadManager<'tcx>, + new_thread_id: ThreadId, + ) -> InterpResult<'tcx, ()> { + assert!(!self.allow_data_races.get()); + todo!() + } + + pub(crate) fn handle_thread_join<'tcx>( + &self, + active_thread_id: ThreadId, + child_thread_id: ThreadId, + ) -> InterpResult<'tcx, ()> { + assert!(!self.allow_data_races.get()); + todo!() + } + + pub(crate) fn handle_thread_stack_empty(&self, thread_id: ThreadId) { + todo!() + } + + pub(crate) fn handle_thread_finish<'tcx>( + &self, + threads: &ThreadManager<'tcx>, + ) -> InterpResult<'tcx, ()> { + assert!(!self.allow_data_races.get()); + todo!() + } + + /**** Scheduling functionality ****/ + + /// Ask for a scheduling decision. This should be called before every MIR instruction. + /// + /// GenMC may realize that the execution got stuck, then this function will return a `InterpErrorKind::MachineStop` with error kind `TerminationInfo::GenmcStuckExecution`). + /// + /// This is **not** an error by iself! Treat this as if the program ended normally: `handle_execution_end` should be called next, which will determine if were are any actual errors. + pub(crate) fn schedule_thread<'tcx>( + &self, + ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, + ) -> InterpResult<'tcx, ThreadId> { + assert!(!self.allow_data_races.get()); + todo!() + } + + /**** Blocking instructions ****/ + + pub(crate) fn handle_verifier_assume<'tcx>( + &self, + machine: &MiriMachine<'tcx>, + condition: bool, + ) -> InterpResult<'tcx, ()> { + if condition { interp_ok(()) } else { self.handle_user_block(machine) } + } +} + +impl VisitProvenance for GenmcCtx { + fn visit_provenance(&self, _visit: &mut VisitWith<'_>) { + // We don't have any tags. + } +} + +impl GenmcCtx { + fn handle_user_block<'tcx>(&self, machine: &MiriMachine<'tcx>) -> InterpResult<'tcx, ()> { + todo!() + } +} diff --git a/src/tools/miri/src/concurrency/init_once.rs b/src/tools/miri/src/concurrency/init_once.rs index 534f02545bdec..c26384f65f6cc 100644 --- a/src/tools/miri/src/concurrency/init_once.rs +++ b/src/tools/miri/src/concurrency/init_once.rs @@ -72,7 +72,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { init_once.status = InitOnceStatus::Complete; // Each complete happens-before the end of the wait - if let Some(data_race) = &this.machine.data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race .release_clock(&this.machine.threads, |clock| init_once.clock.clone_from(clock)); } @@ -99,7 +99,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { init_once.status = InitOnceStatus::Uninitialized; // Each complete happens-before the end of the wait - if let Some(data_race) = &this.machine.data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race .release_clock(&this.machine.threads, |clock| init_once.clock.clone_from(clock)); } diff --git a/src/tools/miri/src/concurrency/mod.rs b/src/tools/miri/src/concurrency/mod.rs index c5082b4e40b4c..dd33f90f153d5 100644 --- a/src/tools/miri/src/concurrency/mod.rs +++ b/src/tools/miri/src/concurrency/mod.rs @@ -1,5 +1,6 @@ pub mod cpu_affinity; pub mod data_race; +mod data_race_handler; pub mod init_once; mod range_object_map; pub mod sync; @@ -7,4 +8,19 @@ pub mod thread; mod vector_clock; pub mod weak_memory; +// Import either the real genmc adapter or a dummy module. +cfg_match! { + feature = "genmc" => { + mod genmc; + pub use self::genmc::{GenmcCtx, GenmcConfig}; + } + _ => { + #[path = "genmc/dummy.rs"] + mod genmc_dummy; + use self::genmc_dummy as genmc; + pub use self::genmc::{GenmcCtx, GenmcConfig}; + } +} + +pub use self::data_race_handler::{AllocDataRaceHandler, GlobalDataRaceHandler}; pub use self::vector_clock::VClock; diff --git a/src/tools/miri/src/concurrency/sync.rs b/src/tools/miri/src/concurrency/sync.rs index 268268848ed2f..64f34d3e21cc8 100644 --- a/src/tools/miri/src/concurrency/sync.rs +++ b/src/tools/miri/src/concurrency/sync.rs @@ -361,7 +361,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { mutex.owner = Some(thread); } mutex.lock_count = mutex.lock_count.strict_add(1); - if let Some(data_race) = &this.machine.data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race.acquire_clock(&mutex.clock, &this.machine.threads); } } @@ -385,7 +385,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { mutex.owner = None; // The mutex is completely unlocked. Try transferring ownership // to another thread. - if let Some(data_race) = &this.machine.data_race { + + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race.release_clock(&this.machine.threads, |clock| { mutex.clock.clone_from(clock) }); @@ -477,7 +478,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let rwlock = &mut this.machine.sync.rwlocks[id]; let count = rwlock.readers.entry(thread).or_insert(0); *count = count.strict_add(1); - if let Some(data_race) = &this.machine.data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race.acquire_clock(&rwlock.clock_unlocked, &this.machine.threads); } } @@ -502,7 +503,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } Entry::Vacant(_) => return interp_ok(false), // we did not even own this lock } - if let Some(data_race) = &this.machine.data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { // Add this to the shared-release clock of all concurrent readers. data_race.release_clock(&this.machine.threads, |clock| { rwlock.clock_current_readers.join(clock) @@ -565,7 +566,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { trace!("rwlock_writer_lock: {:?} now held by {:?}", id, thread); let rwlock = &mut this.machine.sync.rwlocks[id]; rwlock.writer = Some(thread); - if let Some(data_race) = &this.machine.data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race.acquire_clock(&rwlock.clock_unlocked, &this.machine.threads); } } @@ -585,7 +586,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { rwlock.writer = None; trace!("rwlock_writer_unlock: {:?} unlocked by {:?}", id, thread); // Record release clock for next lock holder. - if let Some(data_race) = &this.machine.data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race.release_clock(&this.machine.threads, |clock| { rwlock.clock_unlocked.clone_from(clock) }); @@ -691,7 +692,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match unblock { UnblockKind::Ready => { // The condvar was signaled. Make sure we get the clock for that. - if let Some(data_race) = &this.machine.data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race.acquire_clock( &this.machine.sync.condvars[condvar].clock, &this.machine.threads, @@ -721,10 +722,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { fn condvar_signal(&mut self, id: CondvarId) -> InterpResult<'tcx, bool> { let this = self.eval_context_mut(); let condvar = &mut this.machine.sync.condvars[id]; - let data_race = &this.machine.data_race; // Each condvar signal happens-before the end of the condvar wake - if let Some(data_race) = data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race.release_clock(&this.machine.threads, |clock| condvar.clock.clone_from(clock)); } let Some(waiter) = condvar.waiters.pop_front() else { @@ -764,7 +764,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { UnblockKind::Ready => { let futex = futex_ref.0.borrow(); // Acquire the clock of the futex. - if let Some(data_race) = &this.machine.data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race.acquire_clock(&futex.clock, &this.machine.threads); } }, @@ -792,10 +792,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ) -> InterpResult<'tcx, usize> { let this = self.eval_context_mut(); let mut futex = futex_ref.0.borrow_mut(); - let data_race = &this.machine.data_race; // Each futex-wake happens-before the end of the futex wait - if let Some(data_race) = data_race { + if let Some(data_race) = this.machine.data_race.as_vclocks_ref() { data_race.release_clock(&this.machine.threads, |clock| futex.clock.clone_from(clock)); } diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs index 94629964ea604..8aa65e6cb612b 100644 --- a/src/tools/miri/src/concurrency/thread.rs +++ b/src/tools/miri/src/concurrency/thread.rs @@ -6,6 +6,7 @@ use std::task::Poll; use std::time::{Duration, SystemTime}; use either::Either; +use rand::seq::IteratorRandom; use rustc_abi::ExternAbi; use rustc_const_eval::CTRL_C_RECEIVED; use rustc_data_structures::fx::FxHashMap; @@ -15,7 +16,7 @@ use rustc_middle::mir::Mutability; use rustc_middle::ty::layout::TyAndLayout; use rustc_span::Span; -use crate::concurrency::data_race; +use crate::concurrency::GlobalDataRaceHandler; use crate::shims::tls; use crate::*; @@ -347,7 +348,7 @@ enum Timeout { impl Timeout { /// How long do we have to wait from now until the specified time? - fn get_wait_time(&self, clock: &Clock) -> Duration { + fn get_wait_time(&self, clock: &MonotonicClock) -> Duration { match self { Timeout::Monotonic(instant) => instant.duration_since(clock.now()), Timeout::RealTime(time) => @@ -401,6 +402,8 @@ pub struct ThreadManager<'tcx> { thread_local_allocs: FxHashMap<(DefId, ThreadId), StrictPointer>, /// A flag that indicates that we should change the active thread. yield_active_thread: bool, + /// A flag that indicates that we should do round robin scheduling of threads else randomized scheduling is used. + fixed_scheduling: bool, } impl VisitProvenance for ThreadManager<'_> { @@ -410,6 +413,7 @@ impl VisitProvenance for ThreadManager<'_> { thread_local_allocs, active_thread: _, yield_active_thread: _, + fixed_scheduling: _, } = self; for thread in threads { @@ -421,8 +425,8 @@ impl VisitProvenance for ThreadManager<'_> { } } -impl<'tcx> Default for ThreadManager<'tcx> { - fn default() -> Self { +impl<'tcx> ThreadManager<'tcx> { + pub(crate) fn new(config: &MiriConfig) -> Self { let mut threads = IndexVec::new(); // Create the main thread and add it to the list of threads. threads.push(Thread::new(Some("main"), None)); @@ -431,11 +435,10 @@ impl<'tcx> Default for ThreadManager<'tcx> { threads, thread_local_allocs: Default::default(), yield_active_thread: false, + fixed_scheduling: config.fixed_scheduling, } } -} -impl<'tcx> ThreadManager<'tcx> { pub(crate) fn init( ecx: &mut MiriInterpCx<'tcx>, on_main_stack_empty: StackEmptyCallback<'tcx>, @@ -580,13 +583,28 @@ impl<'tcx> ThreadManager<'tcx> { fn join_thread( &mut self, joined_thread_id: ThreadId, - data_race: Option<&mut data_race::GlobalState>, + data_race_handler: &mut GlobalDataRaceHandler, ) -> InterpResult<'tcx> { if self.threads[joined_thread_id].join_status == ThreadJoinStatus::Detached { // On Windows this corresponds to joining on a closed handle. throw_ub_format!("trying to join a detached thread"); } + fn after_join<'tcx>( + threads: &mut ThreadManager<'_>, + joined_thread_id: ThreadId, + data_race_handler: &mut GlobalDataRaceHandler, + ) -> InterpResult<'tcx> { + match data_race_handler { + GlobalDataRaceHandler::None => {} + GlobalDataRaceHandler::Vclocks(data_race) => + data_race.thread_joined(threads, joined_thread_id), + GlobalDataRaceHandler::Genmc(genmc_ctx) => + genmc_ctx.handle_thread_join(threads.active_thread, joined_thread_id)?, + } + interp_ok(()) + } + // Mark the joined thread as being joined so that we detect if other // threads try to join it. self.threads[joined_thread_id].join_status = ThreadJoinStatus::Joined; @@ -606,18 +624,13 @@ impl<'tcx> ThreadManager<'tcx> { } |this, unblock: UnblockKind| { assert_eq!(unblock, UnblockKind::Ready); - if let Some(data_race) = &mut this.machine.data_race { - data_race.thread_joined(&this.machine.threads, joined_thread_id); - } - interp_ok(()) + after_join(&mut this.machine.threads, joined_thread_id, &mut this.machine.data_race) } ), ); } else { // The thread has already terminated - establish happens-before - if let Some(data_race) = data_race { - data_race.thread_joined(self, joined_thread_id); - } + after_join(self, joined_thread_id, data_race_handler)?; } interp_ok(()) } @@ -627,7 +640,7 @@ impl<'tcx> ThreadManager<'tcx> { fn join_thread_exclusive( &mut self, joined_thread_id: ThreadId, - data_race: Option<&mut data_race::GlobalState>, + data_race_handler: &mut GlobalDataRaceHandler, ) -> InterpResult<'tcx> { if self.threads[joined_thread_id].join_status == ThreadJoinStatus::Joined { throw_ub_format!("trying to join an already joined thread"); @@ -645,7 +658,7 @@ impl<'tcx> ThreadManager<'tcx> { "this thread already has threads waiting for its termination" ); - self.join_thread(joined_thread_id, data_race) + self.join_thread(joined_thread_id, data_race_handler) } /// Set the name of the given thread. @@ -683,7 +696,7 @@ impl<'tcx> ThreadManager<'tcx> { } /// Get the wait time for the next timeout, or `None` if no timeout is pending. - fn next_callback_wait_time(&self, clock: &Clock) -> Option { + fn next_callback_wait_time(&self, clock: &MonotonicClock) -> Option { self.threads .iter() .filter_map(|t| { @@ -695,70 +708,6 @@ impl<'tcx> ThreadManager<'tcx> { }) .min() } - - /// Decide which action to take next and on which thread. - /// - /// The currently implemented scheduling policy is the one that is commonly - /// used in stateless model checkers such as Loom: run the active thread as - /// long as we can and switch only when we have to (the active thread was - /// blocked, terminated, or has explicitly asked to be preempted). - fn schedule(&mut self, clock: &Clock) -> InterpResult<'tcx, SchedulingAction> { - // This thread and the program can keep going. - if self.threads[self.active_thread].state.is_enabled() && !self.yield_active_thread { - // The currently active thread is still enabled, just continue with it. - return interp_ok(SchedulingAction::ExecuteStep); - } - // The active thread yielded or got terminated. Let's see if there are any timeouts to take - // care of. We do this *before* running any other thread, to ensure that timeouts "in the - // past" fire before any other thread can take an action. This ensures that for - // `pthread_cond_timedwait`, "an error is returned if [...] the absolute time specified by - // abstime has already been passed at the time of the call". - // - let potential_sleep_time = self.next_callback_wait_time(clock); - if potential_sleep_time == Some(Duration::ZERO) { - return interp_ok(SchedulingAction::ExecuteTimeoutCallback); - } - // No callbacks immediately scheduled, pick a regular thread to execute. - // The active thread blocked or yielded. So we go search for another enabled thread. - // Crucially, we start searching at the current active thread ID, rather than at 0, since we - // want to avoid always scheduling threads 0 and 1 without ever making progress in thread 2. - // - // `skip(N)` means we start iterating at thread N, so we skip 1 more to start just *after* - // the active thread. Then after that we look at `take(N)`, i.e., the threads *before* the - // active thread. - let threads = self - .threads - .iter_enumerated() - .skip(self.active_thread.index() + 1) - .chain(self.threads.iter_enumerated().take(self.active_thread.index())); - for (id, thread) in threads { - debug_assert_ne!(self.active_thread, id); - if thread.state.is_enabled() { - info!( - "---------- Now executing on thread `{}` (previous: `{}`) ----------------------------------------", - self.get_thread_display_name(id), - self.get_thread_display_name(self.active_thread) - ); - self.active_thread = id; - break; - } - } - self.yield_active_thread = false; - if self.threads[self.active_thread].state.is_enabled() { - return interp_ok(SchedulingAction::ExecuteStep); - } - // We have not found a thread to execute. - if self.threads.iter().all(|thread| thread.state.is_terminated()) { - unreachable!("all threads terminated without the main thread terminating?!"); - } else if let Some(sleep_time) = potential_sleep_time { - // All threads are currently blocked, but we have unexecuted - // timeout_callbacks, which may unblock some of the threads. Hence, - // sleep until the first callback. - interp_ok(SchedulingAction::Sleep(sleep_time)) - } else { - throw_machine_stop!(TerminationInfo::Deadlock); - } - } } impl<'tcx> EvalContextPrivExt<'tcx> for MiriInterpCx<'tcx> {} @@ -772,7 +721,7 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { for (id, thread) in this.machine.threads.threads.iter_enumerated_mut() { match &thread.state { ThreadState::Blocked { timeout: Some(timeout), .. } - if timeout.get_wait_time(&this.machine.clock) == Duration::ZERO => + if timeout.get_wait_time(&this.machine.monotonic_clock) == Duration::ZERO => { let old_state = mem::replace(&mut thread.state, ThreadState::Enabled); let ThreadState::Blocked { callback, .. } = old_state else { unreachable!() }; @@ -806,6 +755,11 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { #[inline] fn run_on_stack_empty(&mut self) -> InterpResult<'tcx, Poll<()>> { let this = self.eval_context_mut(); + // Inform GenMC that a thread has finished all user code. GenMC needs to know this for scheduling. + if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { + let thread_id = this.active_thread(); + genmc_ctx.handle_thread_stack_empty(thread_id); + } let mut callback = this .active_thread_mut() .on_stack_empty @@ -815,6 +769,102 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { this.active_thread_mut().on_stack_empty = Some(callback); interp_ok(res) } + + /// Decide which action to take next and on which thread. + /// + /// The currently implemented scheduling policy is the one that is commonly + /// used in stateless model checkers such as Loom: run the active thread as + /// long as we can and switch only when we have to (the active thread was + /// blocked, terminated, or has explicitly asked to be preempted). + /// + /// If GenMC mode is active, the scheduling is instead handled by GenMC. + fn schedule(&mut self) -> InterpResult<'tcx, SchedulingAction> { + let this = self.eval_context_mut(); + // In GenMC mode, we let GenMC do the scheduling + if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { + let next_thread_id = genmc_ctx.schedule_thread(this)?; + + let thread_manager = &mut this.machine.threads; + thread_manager.active_thread = next_thread_id; + thread_manager.yield_active_thread = false; + + assert!(thread_manager.threads[thread_manager.active_thread].state.is_enabled()); + return interp_ok(SchedulingAction::ExecuteStep); + } + + // We are not in GenMC mode, so we control the schedule + let thread_manager = &mut this.machine.threads; + let clock = &this.machine.monotonic_clock; + let rng = this.machine.rng.get_mut(); + // This thread and the program can keep going. + if thread_manager.threads[thread_manager.active_thread].state.is_enabled() + && !thread_manager.yield_active_thread + { + // The currently active thread is still enabled, just continue with it. + return interp_ok(SchedulingAction::ExecuteStep); + } + // The active thread yielded or got terminated. Let's see if there are any timeouts to take + // care of. We do this *before* running any other thread, to ensure that timeouts "in the + // past" fire before any other thread can take an action. This ensures that for + // `pthread_cond_timedwait`, "an error is returned if [...] the absolute time specified by + // abstime has already been passed at the time of the call". + // + let potential_sleep_time = thread_manager.next_callback_wait_time(clock); + if potential_sleep_time == Some(Duration::ZERO) { + return interp_ok(SchedulingAction::ExecuteTimeoutCallback); + } + // No callbacks immediately scheduled, pick a regular thread to execute. + // The active thread blocked or yielded. So we go search for another enabled thread. + // We build the list of threads by starting with the threads after the current one, followed by + // the threads before the current one and then the current thread itself (i.e., this iterator acts + // like `threads.rotate_left(self.active_thread.index() + 1)`. This ensures that if we pick the first + // eligible thread, we do regular round-robin scheduling, and all threads get a chance to take a step. + let mut threads_iter = thread_manager + .threads + .iter_enumerated() + .skip(thread_manager.active_thread.index() + 1) + .chain( + thread_manager + .threads + .iter_enumerated() + .take(thread_manager.active_thread.index() + 1), + ) + .filter(|(_id, thread)| thread.state.is_enabled()); + // Pick a new thread, and switch to it. + let new_thread = if thread_manager.fixed_scheduling { + threads_iter.next() + } else { + threads_iter.choose(rng) + }; + + if let Some((id, _thread)) = new_thread { + if thread_manager.active_thread != id { + info!( + "---------- Now executing on thread `{}` (previous: `{}`) ----------------------------------------", + thread_manager.get_thread_display_name(id), + thread_manager.get_thread_display_name(thread_manager.active_thread) + ); + thread_manager.active_thread = id; + } + } + // This completes the `yield`, if any was requested. + thread_manager.yield_active_thread = false; + + if thread_manager.threads[thread_manager.active_thread].state.is_enabled() { + return interp_ok(SchedulingAction::ExecuteStep); + } + // We have not found a thread to execute. + if thread_manager.threads.iter().all(|thread| thread.state.is_terminated()) { + unreachable!("all threads terminated without the main thread terminating?!"); + } else if let Some(sleep_time) = potential_sleep_time { + // All threads are currently blocked, but we have unexecuted + // timeout_callbacks, which may unblock some of the threads. Hence, + // sleep until the first callback. + interp_ok(SchedulingAction::Sleep(sleep_time)) + } else { + throw_machine_stop!(TerminationInfo::Deadlock); + } + } } // Public interface to thread management. @@ -880,10 +930,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { Box::new(move |m| state.on_stack_empty(m)) }); let current_span = this.machine.current_span(); - if let Some(data_race) = &mut this.machine.data_race { - data_race.thread_created(&this.machine.threads, new_thread_id, current_span); + match &mut this.machine.data_race { + GlobalDataRaceHandler::None => {} + GlobalDataRaceHandler::Vclocks(data_race) => + data_race.thread_created(&this.machine.threads, new_thread_id, current_span), + GlobalDataRaceHandler::Genmc(genmc_ctx) => + genmc_ctx.handle_thread_create(&this.machine.threads, new_thread_id)?, } - // Write the current thread-id, switch to the next thread later // to treat this write operation as occurring on the current thread. if let Some(thread_info_place) = thread { @@ -930,12 +983,17 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { /// This is called by the eval loop when a thread's on_stack_empty returns `Ready`. fn terminate_active_thread(&mut self, tls_alloc_action: TlsAllocAction) -> InterpResult<'tcx> { let this = self.eval_context_mut(); + // Mark thread as terminated. let thread = this.active_thread_mut(); assert!(thread.stack.is_empty(), "only threads with an empty stack can be terminated"); thread.state = ThreadState::Terminated; - if let Some(ref mut data_race) = this.machine.data_race { - data_race.thread_terminated(&this.machine.threads); + match &mut this.machine.data_race { + GlobalDataRaceHandler::None => {} + GlobalDataRaceHandler::Vclocks(data_race) => + data_race.thread_terminated(&this.machine.threads), + GlobalDataRaceHandler::Genmc(genmc_ctx) => + genmc_ctx.handle_thread_finish(&this.machine.threads)?, } // Deallocate TLS. let gone_thread = this.active_thread(); @@ -1006,8 +1064,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } TimeoutClock::Monotonic => Timeout::Monotonic(match anchor { - TimeoutAnchor::Absolute => this.machine.clock.epoch(), - TimeoutAnchor::Relative => this.machine.clock.now(), + TimeoutAnchor::Absolute => this.machine.monotonic_clock.epoch(), + TimeoutAnchor::Relative => this.machine.monotonic_clock.now(), }), }; anchor.add_lossy(duration) @@ -1051,7 +1109,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { #[inline] fn join_thread(&mut self, joined_thread_id: ThreadId) -> InterpResult<'tcx> { let this = self.eval_context_mut(); - this.machine.threads.join_thread(joined_thread_id, this.machine.data_race.as_mut())?; + this.machine.threads.join_thread(joined_thread_id, &mut this.machine.data_race)?; interp_ok(()) } @@ -1060,7 +1118,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let this = self.eval_context_mut(); this.machine .threads - .join_thread_exclusive(joined_thread_id, this.machine.data_race.as_mut())?; + .join_thread_exclusive(joined_thread_id, &mut this.machine.data_race)?; interp_ok(()) } @@ -1138,7 +1196,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { use rand::Rng as _; let this = self.eval_context_mut(); - if this.machine.rng.get_mut().random_bool(this.machine.preemption_rate) { + if !this.machine.threads.fixed_scheduling + && this.machine.rng.get_mut().random_bool(this.machine.preemption_rate) + { this.yield_active_thread(); } } @@ -1152,7 +1212,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.machine.handle_abnormal_termination(); throw_machine_stop!(TerminationInfo::Interrupted); } - match this.machine.threads.schedule(&this.machine.clock)? { + match this.schedule()? { SchedulingAction::ExecuteStep => { if !this.step()? { // See if this thread can do something else. @@ -1167,7 +1227,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.run_timeout_callback()?; } SchedulingAction::Sleep(duration) => { - this.machine.clock.sleep(duration); + this.machine.monotonic_clock.sleep(duration); } } } diff --git a/src/tools/miri/src/concurrency/vector_clock.rs b/src/tools/miri/src/concurrency/vector_clock.rs index 345726634299b..78858fcedaec7 100644 --- a/src/tools/miri/src/concurrency/vector_clock.rs +++ b/src/tools/miri/src/concurrency/vector_clock.rs @@ -40,8 +40,8 @@ impl From for VectorIdx { } } -/// The size of the vector-clock to store inline -/// clock vectors larger than this will be stored on the heap +/// The size of the vector clock to store inline. +/// Clock vectors larger than this will be stored on the heap. const SMALL_VECTOR: usize = 4; /// The time-stamps recorded in the data-race detector consist of both @@ -136,7 +136,7 @@ impl Ord for VTimestamp { pub struct VClock(SmallVec<[VTimestamp; SMALL_VECTOR]>); impl VClock { - /// Create a new vector-clock containing all zeros except + /// Create a new vector clock containing all zeros except /// for a value at the given index pub(super) fn new_with_index(index: VectorIdx, timestamp: VTimestamp) -> VClock { if timestamp.time() == 0 { @@ -185,8 +185,8 @@ impl VClock { } } - // Join the two vector-clocks together, this - // sets each vector-element to the maximum value + // Join the two vector clocks together, this + // sets each vector element to the maximum value // of that element in either of the two source elements. pub fn join(&mut self, other: &Self) { let rhs_slice = other.as_slice(); diff --git a/src/tools/miri/src/concurrency/weak_memory.rs b/src/tools/miri/src/concurrency/weak_memory.rs index 1a3e9614f8af0..95c010be2fd21 100644 --- a/src/tools/miri/src/concurrency/weak_memory.rs +++ b/src/tools/miri/src/concurrency/weak_memory.rs @@ -77,7 +77,7 @@ // (https://github.com/ChrisLidbury/tsan11/blob/ecbd6b81e9b9454e01cba78eb9d88684168132c7/lib/tsan/rtl/tsan_relaxed.cc#L160-L167) // and here. // -// 4. W_SC ; R_SC case requires the SC load to ignore all but last store maked SC (stores not marked SC are not +// 4. W_SC ; R_SC case requires the SC load to ignore all but last store marked SC (stores not marked SC are not // affected). But this rule is applied to all loads in ReadsFromSet from the paper (last two lines of code), not just SC load. // This is implemented correctly in tsan11 // (https://github.com/ChrisLidbury/tsan11/blob/ecbd6b81e9b9454e01cba78eb9d88684168132c7/lib/tsan/rtl/tsan_relaxed.cc#L295) @@ -88,9 +88,11 @@ use std::collections::VecDeque; use rustc_data_structures::fx::FxHashMap; +use super::AllocDataRaceHandler; use super::data_race::{GlobalState as DataRaceState, ThreadClockSet}; use super::range_object_map::{AccessType, RangeObjectMap}; use super::vector_clock::{VClock, VTimestamp, VectorIdx}; +use crate::concurrency::GlobalDataRaceHandler; use crate::*; pub type AllocState = StoreBufferAlloc; @@ -459,8 +461,13 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let this = self.eval_context_mut(); let (alloc_id, base_offset, ..) = this.ptr_get_alloc_id(place.ptr(), 0)?; if let ( - crate::AllocExtra { weak_memory: Some(alloc_buffers), .. }, - crate::MiriMachine { data_race: Some(global), threads, .. }, + crate::AllocExtra { + data_race: AllocDataRaceHandler::Vclocks(_, Some(alloc_buffers)), + .. + }, + crate::MiriMachine { + data_race: GlobalDataRaceHandler::Vclocks(global), threads, .. + }, ) = this.get_alloc_extra_mut(alloc_id)? { if atomic == AtomicRwOrd::SeqCst { @@ -484,9 +491,11 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ) -> InterpResult<'tcx, Option> { let this = self.eval_context_ref(); 'fallback: { - if let Some(global) = &this.machine.data_race { + if let Some(global) = this.machine.data_race.as_vclocks_ref() { let (alloc_id, base_offset, ..) = this.ptr_get_alloc_id(place.ptr(), 0)?; - if let Some(alloc_buffers) = this.get_alloc_extra(alloc_id)?.weak_memory.as_ref() { + if let Some(alloc_buffers) = + this.get_alloc_extra(alloc_id)?.data_race.as_weak_memory_ref() + { if atomic == AtomicReadOrd::SeqCst { global.sc_read(&this.machine.threads); } @@ -534,8 +543,13 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let this = self.eval_context_mut(); let (alloc_id, base_offset, ..) = this.ptr_get_alloc_id(dest.ptr(), 0)?; if let ( - crate::AllocExtra { weak_memory: Some(alloc_buffers), .. }, - crate::MiriMachine { data_race: Some(global), threads, .. }, + crate::AllocExtra { + data_race: AllocDataRaceHandler::Vclocks(_, Some(alloc_buffers)), + .. + }, + crate::MiriMachine { + data_race: GlobalDataRaceHandler::Vclocks(global), threads, .. + }, ) = this.get_alloc_extra_mut(alloc_id)? { if atomic == AtomicWriteOrd::SeqCst { @@ -561,13 +575,15 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ) -> InterpResult<'tcx> { let this = self.eval_context_ref(); - if let Some(global) = &this.machine.data_race { + if let Some(global) = this.machine.data_race.as_vclocks_ref() { if atomic == AtomicReadOrd::SeqCst { global.sc_read(&this.machine.threads); } let size = place.layout.size; let (alloc_id, base_offset, ..) = this.ptr_get_alloc_id(place.ptr(), 0)?; - if let Some(alloc_buffers) = this.get_alloc_extra(alloc_id)?.weak_memory.as_ref() { + if let Some(alloc_buffers) = + this.get_alloc_extra(alloc_id)?.data_race.as_weak_memory_ref() + { let Some(buffer) = alloc_buffers.get_store_buffer(alloc_range(base_offset, size))? else { diff --git a/src/tools/miri/src/diagnostics.rs b/src/tools/miri/src/diagnostics.rs index 014b1299f2dd0..89768077d8780 100644 --- a/src/tools/miri/src/diagnostics.rs +++ b/src/tools/miri/src/diagnostics.rs @@ -31,6 +31,8 @@ pub enum TerminationInfo { }, Int2PtrWithStrictProvenance, Deadlock, + /// In GenMC mode, an execution can get stuck in certain cases. This is not an error. + GenmcStuckExecution, MultipleSymbolDefinitions { link_name: Symbol, first: SpanData, @@ -75,6 +77,7 @@ impl fmt::Display for TerminationInfo { StackedBorrowsUb { msg, .. } => write!(f, "{msg}"), TreeBorrowsUb { title, .. } => write!(f, "{title}"), Deadlock => write!(f, "the evaluated program deadlocked"), + GenmcStuckExecution => write!(f, "GenMC determined that the execution got stuck"), MultipleSymbolDefinitions { link_name, .. } => write!(f, "multiple definitions of symbol `{link_name}`"), SymbolShimClashing { link_name, .. } => @@ -235,6 +238,12 @@ pub fn report_error<'tcx>( StackedBorrowsUb { .. } | TreeBorrowsUb { .. } | DataRace { .. } => Some("Undefined Behavior"), Deadlock => Some("deadlock"), + GenmcStuckExecution => { + // This case should only happen in GenMC mode. We treat it like a normal program exit. + assert!(ecx.machine.data_race.as_genmc_ref().is_some()); + tracing::info!("GenMC: found stuck execution"); + return Some((0, true)); + } MultipleSymbolDefinitions { .. } | SymbolShimClashing { .. } => None, }; #[rustfmt::skip] diff --git a/src/tools/miri/src/eval.rs b/src/tools/miri/src/eval.rs index ed13f670a90e3..bb5e5d7ee8146 100644 --- a/src/tools/miri/src/eval.rs +++ b/src/tools/miri/src/eval.rs @@ -3,6 +3,7 @@ use std::ffi::{OsStr, OsString}; use std::panic::{self, AssertUnwindSafe}; use std::path::PathBuf; +use std::rc::Rc; use std::task::Poll; use std::{iter, thread}; @@ -14,6 +15,7 @@ use rustc_middle::ty::layout::{LayoutCx, LayoutOf}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_session::config::EntryFnType; +use crate::concurrency::GenmcCtx; use crate::concurrency::thread::TlsAllocAction; use crate::diagnostics::report_leaks; use crate::shims::tls; @@ -99,10 +101,6 @@ pub struct MiriConfig { pub validation: ValidationMode, /// Determines if Stacked Borrows or Tree Borrows is enabled. pub borrow_tracker: Option, - /// Whether `core::ptr::Unique` receives special treatment. - /// If `true` then `Unique` is reborrowed with its own new tag and permission, - /// otherwise `Unique` is just another raw pointer. - pub unique_is_unique: bool, /// Controls alignment checking. pub check_alignment: AlignmentCheck, /// Action for an op requiring communication with the host. @@ -117,16 +115,18 @@ pub struct MiriConfig { pub args: Vec, /// The seed to use when non-determinism or randomness are required (e.g. ptr-to-int cast, `getrandom()`). pub seed: Option, - /// The stacked borrows pointer ids to report about + /// The stacked borrows pointer ids to report about. pub tracked_pointer_tags: FxHashSet, /// The allocation ids to report about. pub tracked_alloc_ids: FxHashSet, /// For the tracked alloc ids, also report read/write accesses. pub track_alloc_accesses: bool, - /// Determine if data race detection should be enabled + /// Determine if data race detection should be enabled. pub data_race_detector: bool, - /// Determine if weak memory emulation should be enabled. Requires data race detection to be enabled + /// Determine if weak memory emulation should be enabled. Requires data race detection to be enabled. pub weak_memory_emulation: bool, + /// Determine if we are running in GenMC mode. In this mode, Miri will explore multiple concurrent executions of the given program. + pub genmc_mode: bool, /// Track when an outdated (weak memory) load happens. pub track_outdated_loads: bool, /// Rate of spurious failures for compare_exchange_weak atomic operations, @@ -137,7 +137,7 @@ pub struct MiriConfig { pub measureme_out: Option, /// Which style to use for printing backtraces. pub backtrace_style: BacktraceStyle, - /// Which provenance to use for int2ptr casts + /// Which provenance to use for int2ptr casts. pub provenance_mode: ProvenanceMode, /// Whether to ignore any output by the program. This is helpful when debugging miri /// as its messages don't get intermingled with the program messages. @@ -155,7 +155,7 @@ pub struct MiriConfig { pub gc_interval: u32, /// The number of CPUs to be reported by miri. pub num_cpus: u32, - /// Requires Miri to emulate pages of a certain size + /// Requires Miri to emulate pages of a certain size. pub page_size: Option, /// Whether to collect a backtrace when each allocation is created, just in case it leaks. pub collect_leak_backtraces: bool, @@ -163,6 +163,8 @@ pub struct MiriConfig { pub address_reuse_rate: f64, /// Probability for address reuse across threads. pub address_reuse_cross_thread_rate: f64, + /// Round Robin scheduling with no preemption. + pub fixed_scheduling: bool, } impl Default for MiriConfig { @@ -171,7 +173,6 @@ impl Default for MiriConfig { env: vec![], validation: ValidationMode::Shallow, borrow_tracker: Some(BorrowTrackerMethod::StackedBorrows), - unique_is_unique: false, check_alignment: AlignmentCheck::Int, isolated_op: IsolatedOp::Reject(RejectOpWith::Abort), ignore_leaks: false, @@ -184,6 +185,7 @@ impl Default for MiriConfig { track_alloc_accesses: false, data_race_detector: true, weak_memory_emulation: true, + genmc_mode: false, track_outdated_loads: false, cmpxchg_weak_failure_rate: 0.8, // 80% measureme_out: None, @@ -200,6 +202,7 @@ impl Default for MiriConfig { collect_leak_backtraces: true, address_reuse_rate: 0.5, address_reuse_cross_thread_rate: 0.1, + fixed_scheduling: false, } } } @@ -230,16 +233,22 @@ impl<'tcx> MainThreadState<'tcx> { match state.on_stack_empty(this)? { Poll::Pending => {} // just keep going Poll::Ready(()) => { - // Give background threads a chance to finish by yielding the main thread a - // couple of times -- but only if we would also preempt threads randomly. - if this.machine.preemption_rate > 0.0 { - // There is a non-zero chance they will yield back to us often enough to - // make Miri terminate eventually. - *self = Yield { remaining: MAIN_THREAD_YIELDS_AT_SHUTDOWN }; - } else { - // The other threads did not get preempted, so no need to yield back to - // them. + if this.machine.data_race.as_genmc_ref().is_some() { + // In GenMC mode, we don't yield at the end of the main thread. + // Instead, the `GenmcCtx` will ensure that unfinished threads get a chance to run at this point. *self = Done; + } else { + // Give background threads a chance to finish by yielding the main thread a + // couple of times -- but only if we would also preempt threads randomly. + if this.machine.preemption_rate > 0.0 { + // There is a non-zero chance they will yield back to us often enough to + // make Miri terminate eventually. + *self = Yield { remaining: MAIN_THREAD_YIELDS_AT_SHUTDOWN }; + } else { + // The other threads did not get preempted, so no need to yield back to + // them. + *self = Done; + } } } }, @@ -265,6 +274,17 @@ impl<'tcx> MainThreadState<'tcx> { // Deal with our thread-local memory. We do *not* want to actually free it, instead we consider TLS // to be like a global `static`, so that all memory reached by it is considered to "not leak". this.terminate_active_thread(TlsAllocAction::Leak)?; + + // Machine cleanup. Only do this if all threads have terminated; threads that are still running + // might cause Stacked Borrows errors (https://github.com/rust-lang/miri/issues/2396). + if this.have_all_terminated() { + // Even if all threads have terminated, we have to beware of data races since some threads + // might not have joined the main thread (https://github.com/rust-lang/miri/issues/2020, + // https://github.com/rust-lang/miri/issues/2508). + this.allow_data_races_all_threads_done(); + EnvVars::cleanup(this).expect("error during env var cleanup"); + } + // Stop interpreter loop. throw_machine_stop!(TerminationInfo::Exit { code: exit_code, leak_check: true }); } @@ -280,11 +300,16 @@ pub fn create_ecx<'tcx>( entry_id: DefId, entry_type: MiriEntryFnType, config: &MiriConfig, + genmc_ctx: Option>, ) -> InterpResult<'tcx, InterpCx<'tcx, MiriMachine<'tcx>>> { let typing_env = ty::TypingEnv::fully_monomorphized(); let layout_cx = LayoutCx::new(tcx, typing_env); - let mut ecx = - InterpCx::new(tcx, rustc_span::DUMMY_SP, typing_env, MiriMachine::new(config, layout_cx)); + let mut ecx = InterpCx::new( + tcx, + rustc_span::DUMMY_SP, + typing_env, + MiriMachine::new(config, layout_cx, genmc_ctx), + ); // Some parts of initialization require a full `InterpCx`. MiriMachine::late_init(&mut ecx, config, { @@ -438,12 +463,17 @@ pub fn eval_entry<'tcx>( tcx: TyCtxt<'tcx>, entry_id: DefId, entry_type: MiriEntryFnType, - config: MiriConfig, + config: &MiriConfig, + genmc_ctx: Option>, ) -> Option { // Copy setting before we move `config`. let ignore_leaks = config.ignore_leaks; - let mut ecx = match create_ecx(tcx, entry_id, entry_type, &config).report_err() { + if let Some(genmc_ctx) = &genmc_ctx { + genmc_ctx.handle_execution_start(); + } + + let mut ecx = match create_ecx(tcx, entry_id, entry_type, config, genmc_ctx).report_err() { Ok(v) => v, Err(err) => { let (kind, backtrace) = err.into_parts(); @@ -459,21 +489,25 @@ pub fn eval_entry<'tcx>( ecx.handle_ice(); panic::resume_unwind(panic_payload) }); - // `Ok` can never happen. + // `Ok` can never happen; the interpreter loop always exits with an "error" + // (but that "error" might be just "regular program termination"). let Err(err) = res.report_err(); - // Machine cleanup. Only do this if all threads have terminated; threads that are still running - // might cause Stacked Borrows errors (https://github.com/rust-lang/miri/issues/2396). - if ecx.have_all_terminated() { - // Even if all threads have terminated, we have to beware of data races since some threads - // might not have joined the main thread (https://github.com/rust-lang/miri/issues/2020, - // https://github.com/rust-lang/miri/issues/2508). - ecx.allow_data_races_all_threads_done(); - EnvVars::cleanup(&mut ecx).expect("error during env var cleanup"); + // Show diagnostic, if any. + let (return_code, leak_check) = report_error(&ecx, err)?; + + // We inform GenMC that the execution is complete. + if let Some(genmc_ctx) = ecx.machine.data_race.as_genmc_ref() + && let Err(error) = genmc_ctx.handle_execution_end(&ecx) + { + // FIXME(GenMC): Improve error reporting. + tcx.dcx().err(format!("GenMC returned an error: \"{error}\"")); + return None; } - // Process the result. - let (return_code, leak_check) = report_error(&ecx, err)?; + // If we get here there was no fatal error. + + // Possibly check for memory leaks. if leak_check && !ignore_leaks { // Check for thread leaks. if !ecx.have_all_terminated() { diff --git a/src/tools/miri/src/helpers.rs b/src/tools/miri/src/helpers.rs index 29ed94a2e4a29..a3aa8bbbfb32d 100644 --- a/src/tools/miri/src/helpers.rs +++ b/src/tools/miri/src/helpers.rs @@ -602,6 +602,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // We want to not actually read from memory for this visit. So, before // walking this value, we have to make sure it is not a // `Variants::Multiple`. + // FIXME: the current logic here is layout-dependent, so enums with + // multiple variants where all but 1 are uninhabited will be recursed into. + // Is that truly what we want? match v.layout.variants { Variants::Multiple { .. } => { // A multi-variant enum, or coroutine, or so. @@ -1014,7 +1017,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } /// Check that the given `caller_fn_abi` matches the expected ABI described by - /// `callee_abi`, `callee_input_tys`, `callee_output_ty`, and the return the list of + /// `callee_abi`, `callee_input_tys`, `callee_output_ty`, and then returns the list of /// arguments. fn check_shim_abi<'a, const N: usize>( &mut self, @@ -1379,6 +1382,11 @@ pub(crate) fn bool_to_simd_element(b: bool, size: Size) -> Scalar { } pub(crate) fn simd_element_to_bool(elem: ImmTy<'_>) -> InterpResult<'_, bool> { + assert!( + matches!(elem.layout.ty.kind(), ty::Int(_) | ty::Uint(_)), + "SIMD mask element type must be an integer, but this is `{}`", + elem.layout.ty + ); let val = elem.to_scalar().to_int(elem.layout.size)?; interp_ok(match val { 0 => false, diff --git a/src/tools/miri/src/intrinsics/atomic.rs b/src/tools/miri/src/intrinsics/atomic.rs index dcafa7b6cabe5..2eb8086f578f2 100644 --- a/src/tools/miri/src/intrinsics/atomic.rs +++ b/src/tools/miri/src/intrinsics/atomic.rs @@ -149,7 +149,7 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { // Perform regular load. let val = this.read_scalar(val)?; - // Perform atomic store + // Perform atomic store. this.write_scalar_atomic(val, &place, atomic)?; interp_ok(()) } @@ -161,7 +161,7 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { ) -> InterpResult<'tcx> { let [] = check_intrinsic_arg_count(args)?; let _ = atomic; - //FIXME: compiler fences are currently ignored + // FIXME, FIXME(GenMC): compiler fences are currently ignored (also ignored in GenMC mode) interp_ok(()) } @@ -199,23 +199,16 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { span_bug!(this.cur_span(), "atomic arithmetic operation type mismatch"); } - match atomic_op { - AtomicOp::Min => { - let old = this.atomic_min_max_scalar(&place, rhs, true, atomic)?; - this.write_immediate(*old, dest)?; // old value is returned - interp_ok(()) - } - AtomicOp::Max => { - let old = this.atomic_min_max_scalar(&place, rhs, false, atomic)?; - this.write_immediate(*old, dest)?; // old value is returned - interp_ok(()) - } - AtomicOp::MirOp(op, not) => { - let old = this.atomic_rmw_op_immediate(&place, &rhs, op, not, atomic)?; - this.write_immediate(*old, dest)?; // old value is returned - interp_ok(()) - } - } + let old = match atomic_op { + AtomicOp::Min => + this.atomic_min_max_scalar(&place, rhs, /* min */ true, atomic)?, + AtomicOp::Max => + this.atomic_min_max_scalar(&place, rhs, /* min */ false, atomic)?, + AtomicOp::MirOp(op, not) => + this.atomic_rmw_op_immediate(&place, &rhs, op, not, atomic)?, + }; + this.write_immediate(*old, dest)?; // old value is returned + interp_ok(()) } fn atomic_exchange( diff --git a/src/tools/miri/src/intrinsics/mod.rs b/src/tools/miri/src/intrinsics/mod.rs index 85fb280a9a908..3334c0b5edf96 100644 --- a/src/tools/miri/src/intrinsics/mod.rs +++ b/src/tools/miri/src/intrinsics/mod.rs @@ -7,13 +7,13 @@ use rand::Rng; use rustc_abi::Size; use rustc_apfloat::{Float, Round}; use rustc_middle::mir; -use rustc_middle::ty::{self, FloatTy, ScalarInt}; +use rustc_middle::ty::{self, FloatTy}; use rustc_span::{Symbol, sym}; use self::atomic::EvalContextExt as _; use self::helpers::{ToHost, ToSoft, check_intrinsic_arg_count}; use self::simd::EvalContextExt as _; -use crate::math::apply_random_float_error_ulp; +use crate::math::apply_random_float_error_to_imm; use crate::*; impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} @@ -391,32 +391,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } - #[rustfmt::skip] - | "fadd_algebraic" - | "fsub_algebraic" - | "fmul_algebraic" - | "fdiv_algebraic" - | "frem_algebraic" - => { - let [a, b] = check_intrinsic_arg_count(args)?; - let a = this.read_immediate(a)?; - let b = this.read_immediate(b)?; - let op = match intrinsic_name { - "fadd_algebraic" => mir::BinOp::Add, - "fsub_algebraic" => mir::BinOp::Sub, - "fmul_algebraic" => mir::BinOp::Mul, - "fdiv_algebraic" => mir::BinOp::Div, - "frem_algebraic" => mir::BinOp::Rem, - _ => bug!(), - }; - let res = this.binary_op(op, &a, &b)?; - // `binary_op` already called `generate_nan` if needed. - // Apply a relative error of 16ULP to simulate non-deterministic precision loss - // due to optimizations. - let res = apply_random_float_error_to_imm(this, res, 4 /* log2(16) */)?; - this.write_immediate(*res, dest)?; - } - #[rustfmt::skip] | "fadd_fast" | "fsub_fast" @@ -464,9 +438,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { if !float_finite(&res)? { throw_ub_format!("`{intrinsic_name}` intrinsic produced non-finite value as result"); } - // Apply a relative error of 16ULP to simulate non-deterministic precision loss + // Apply a relative error of 4ULP to simulate non-deterministic precision loss // due to optimizations. - let res = apply_random_float_error_to_imm(this, res, 4 /* log2(16) */)?; + let res = apply_random_float_error_to_imm(this, res, 2 /* log2(4) */)?; this.write_immediate(*res, dest)?; } @@ -499,26 +473,3 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { interp_ok(EmulateItemResult::NeedsReturn) } } - -/// Applies a random 16ULP floating point error to `val` and returns the new value. -/// Will fail if `val` is not a floating point number. -fn apply_random_float_error_to_imm<'tcx>( - ecx: &mut MiriInterpCx<'tcx>, - val: ImmTy<'tcx>, - ulp_exponent: u32, -) -> InterpResult<'tcx, ImmTy<'tcx>> { - let scalar = val.to_scalar_int()?; - let res: ScalarInt = match val.layout.ty.kind() { - ty::Float(FloatTy::F16) => - apply_random_float_error_ulp(ecx, scalar.to_f16(), ulp_exponent).into(), - ty::Float(FloatTy::F32) => - apply_random_float_error_ulp(ecx, scalar.to_f32(), ulp_exponent).into(), - ty::Float(FloatTy::F64) => - apply_random_float_error_ulp(ecx, scalar.to_f64(), ulp_exponent).into(), - ty::Float(FloatTy::F128) => - apply_random_float_error_ulp(ecx, scalar.to_f128(), ulp_exponent).into(), - _ => bug!("intrinsic called with non-float input type"), - }; - - interp_ok(ImmTy::from_scalar_int(res, val.layout)) -} diff --git a/src/tools/miri/src/intrinsics/simd.rs b/src/tools/miri/src/intrinsics/simd.rs index de5da6ec898a4..c9250ba1b818d 100644 --- a/src/tools/miri/src/intrinsics/simd.rs +++ b/src/tools/miri/src/intrinsics/simd.rs @@ -506,7 +506,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }; let dest_len = u32::try_from(dest_len).unwrap(); - let bitmask_len = u32::try_from(bitmask_len).unwrap(); for i in 0..dest_len { let bit_i = simd_bitmask_index(i, dest_len, this.data_layout().endian); let mask = mask & 1u64.strict_shl(bit_i); @@ -517,17 +516,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let val = if mask != 0 { yes } else { no }; this.write_immediate(*val, &dest)?; } - for i in dest_len..bitmask_len { - // If the mask is "padded", ensure that padding is all-zero. - // This deliberately does not use `simd_bitmask_index`; these bits are outside - // the bitmask. It does not matter in which order we check them. - let mask = mask & 1u64.strict_shl(i); - if mask != 0 { - throw_ub_format!( - "a SIMD bitmask less than 8 bits long must be filled with 0s for the remaining bits" - ); - } - } + // The remaining bits of the mask are ignored. } // Converts a "vector of bool" into a bitmask. "bitmask" => { diff --git a/src/tools/miri/src/lib.rs b/src/tools/miri/src/lib.rs index 03f76cfa6524c..58b93ae82a1e4 100644 --- a/src/tools/miri/src/lib.rs +++ b/src/tools/miri/src/lib.rs @@ -1,6 +1,5 @@ #![feature(rustc_private)] #![feature(cfg_match)] -#![feature(cell_update)] #![feature(float_gamma)] #![feature(float_erf)] #![feature(map_try_insert)] @@ -121,7 +120,7 @@ pub use crate::borrow_tracker::stacked_borrows::{ }; pub use crate::borrow_tracker::tree_borrows::{EvalContextExt as _, Tree}; pub use crate::borrow_tracker::{BorTag, BorrowTrackerMethod, EvalContextExt as _, RetagFields}; -pub use crate::clock::{Clock, Instant}; +pub use crate::clock::{Instant, MonotonicClock}; pub use crate::concurrency::cpu_affinity::MAX_CPUS; pub use crate::concurrency::data_race::{ AtomicFenceOrd, AtomicReadOrd, AtomicRwOrd, AtomicWriteOrd, EvalContextExt as _, @@ -134,6 +133,7 @@ pub use crate::concurrency::thread::{ BlockReason, DynUnblockCallback, EvalContextExt as _, StackEmptyCallback, ThreadId, ThreadManager, TimeoutAnchor, TimeoutClock, UnblockKind, }; +pub use crate::concurrency::{GenmcConfig, GenmcCtx}; pub use crate::diagnostics::{ EvalContextExt as _, NonHaltingDiagnostic, TerminationInfo, report_error, }; @@ -169,7 +169,7 @@ pub const MIRI_DEFAULT_ARGS: &[&str] = &[ "-Zalways-encode-mir", "-Zextra-const-ub-checks", "-Zmir-emit-retag", - "-Zmir-keep-place-mention", + "-Zmir-preserve-ub", "-Zmir-opt-level=0", "-Zmir-enable-passes=-CheckAlignment,-CheckNull", // Deduplicating diagnostics means we miss events when tracking what happens during an diff --git a/src/tools/miri/src/machine.rs b/src/tools/miri/src/machine.rs index fb99bdc51764d..6060d41dac594 100644 --- a/src/tools/miri/src/machine.rs +++ b/src/tools/miri/src/machine.rs @@ -6,6 +6,7 @@ use std::borrow::Cow; use std::cell::{Cell, RefCell}; use std::collections::hash_map::Entry; use std::path::Path; +use std::rc::Rc; use std::{fmt, process}; use rand::rngs::StdRng; @@ -27,9 +28,10 @@ use rustc_span::def_id::{CrateNum, DefId}; use rustc_span::{Span, SpanData, Symbol}; use rustc_target::callconv::FnAbi; +use crate::alloc_addresses::EvalContextExt; use crate::concurrency::cpu_affinity::{self, CpuAffinityMask}; use crate::concurrency::data_race::{self, NaReadType, NaWriteType}; -use crate::concurrency::weak_memory; +use crate::concurrency::{AllocDataRaceHandler, GenmcCtx, GlobalDataRaceHandler, weak_memory}; use crate::*; /// First real-time signal. @@ -332,12 +334,10 @@ impl ProvenanceExtra { pub struct AllocExtra<'tcx> { /// Global state of the borrow tracker, if enabled. pub borrow_tracker: Option, - /// Data race detection via the use of a vector-clock. - /// This is only added if it is enabled. - pub data_race: Option, - /// Weak memory emulation via the use of store buffers. - /// This is only added if it is enabled. - pub weak_memory: Option, + /// Extra state for data race detection. + /// + /// Invariant: The enum variant must match the enum variant in the `data_race` field on `MiriMachine` + pub data_race: AllocDataRaceHandler, /// A backtrace to where this allocation was allocated. /// As this is recorded for leak reports, it only exists /// if this allocation is leakable. The backtrace is not @@ -360,11 +360,10 @@ impl<'tcx> Clone for AllocExtra<'tcx> { impl VisitProvenance for AllocExtra<'_> { fn visit_provenance(&self, visit: &mut VisitWith<'_>) { - let AllocExtra { borrow_tracker, data_race, weak_memory, backtrace: _, sync: _ } = self; + let AllocExtra { borrow_tracker, data_race, backtrace: _, sync: _ } = self; borrow_tracker.visit_provenance(visit); data_race.visit_provenance(visit); - weak_memory.visit_provenance(visit); } } @@ -447,8 +446,12 @@ pub struct MiriMachine<'tcx> { /// Global data for borrow tracking. pub borrow_tracker: Option, - /// Data race detector global data. - pub data_race: Option, + /// Depending on settings, this will be `None`, + /// global data for a data race detector, + /// or the context required for running in GenMC mode. + /// + /// Invariant: The enum variant must match the enum variant of `AllocDataRaceHandler` in the `data_race` field of all `AllocExtra`. + pub data_race: GlobalDataRaceHandler, /// Ptr-int-cast module global data. pub alloc_addresses: alloc_addresses::GlobalState, @@ -486,7 +489,7 @@ pub struct MiriMachine<'tcx> { pub(crate) epoll_interests: shims::EpollInterestTable, /// This machine's monotone clock. - pub(crate) clock: Clock, + pub(crate) monotonic_clock: MonotonicClock, /// The set of threads. pub(crate) threads: ThreadManager<'tcx>, @@ -544,9 +547,6 @@ pub struct MiriMachine<'tcx> { /// Corresponds to -Zmiri-mute-stdout-stderr and doesn't write the output but acts as if it succeeded. pub(crate) mute_stdout_stderr: bool, - /// Whether weak memory emulation is enabled - pub(crate) weak_memory: bool, - /// The probability of the active thread being preempted at the end of each basic block. pub(crate) preemption_rate: f64, @@ -617,7 +617,11 @@ pub struct MiriMachine<'tcx> { } impl<'tcx> MiriMachine<'tcx> { - pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx>) -> Self { + pub(crate) fn new( + config: &MiriConfig, + layout_cx: LayoutCx<'tcx>, + genmc_ctx: Option>, + ) -> Self { let tcx = layout_cx.tcx(); let local_crates = helpers::get_local_crates(tcx); let layouts = @@ -636,7 +640,14 @@ impl<'tcx> MiriMachine<'tcx> { }); let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0)); let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config)); - let data_race = config.data_race_detector.then(|| data_race::GlobalState::new(config)); + let data_race = if config.genmc_mode { + // `genmc_ctx` persists across executions, so we don't create a new one here. + GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap()) + } else if config.data_race_detector { + GlobalDataRaceHandler::Vclocks(Box::new(data_race::GlobalState::new(config))) + } else { + GlobalDataRaceHandler::None + }; // Determine page size, stack address, and stack size. // These values are mostly meaningless, but the stack address is also where we start // allocating physical integer addresses for all allocations. @@ -669,7 +680,7 @@ impl<'tcx> MiriMachine<'tcx> { cpu_affinity::MAX_CPUS, config.num_cpus ); - let threads = ThreadManager::default(); + let threads = ThreadManager::new(config); let mut thread_cpu_affinity = FxHashMap::default(); if matches!(&*tcx.sess.target.os, "linux" | "freebsd" | "android") { thread_cpu_affinity @@ -709,11 +720,10 @@ impl<'tcx> MiriMachine<'tcx> { check_alignment: config.check_alignment, cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate, mute_stdout_stderr: config.mute_stdout_stderr, - weak_memory: config.weak_memory_emulation, preemption_rate: config.preemption_rate, report_progress: config.report_progress, basic_block_count: 0, - clock: Clock::new(config.isolated_op == IsolatedOp::Allow), + monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow), #[cfg(unix)] native_lib: config.native_lib.as_ref().map(|lib_file_path| { let host_triple = rustc_session::config::host_tuple(); @@ -721,9 +731,8 @@ impl<'tcx> MiriMachine<'tcx> { // Check if host target == the session target. if host_triple != target_triple { panic!( - "calling external C functions in linked .so file requires host and target to be the same: host={}, target={}", - host_triple, - target_triple, + "calling native C functions in linked .so file requires host and target to be the same: \ + host={host_triple}, target={target_triple}", ); } // Note: it is the user's responsibility to provide a correct SO file. @@ -836,16 +845,25 @@ impl<'tcx> MiriMachine<'tcx> { .as_ref() .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine)); - let data_race = ecx.machine.data_race.as_ref().map(|data_race| { - data_race::AllocState::new_allocation( - data_race, - &ecx.machine.threads, - size, - kind, - ecx.machine.current_span(), - ) - }); - let weak_memory = ecx.machine.weak_memory.then(weak_memory::AllocState::new_allocation); + let data_race = match &ecx.machine.data_race { + GlobalDataRaceHandler::None => AllocDataRaceHandler::None, + GlobalDataRaceHandler::Vclocks(data_race) => + AllocDataRaceHandler::Vclocks( + data_race::AllocState::new_allocation( + data_race, + &ecx.machine.threads, + size, + kind, + ecx.machine.current_span(), + ), + data_race.weak_memory.then(weak_memory::AllocState::new_allocation), + ), + GlobalDataRaceHandler::Genmc(_genmc_ctx) => { + // GenMC learns about new allocations directly from the alloc_addresses module, + // since it has to be able to control the address at which they are placed. + AllocDataRaceHandler::Genmc + } + }; // If an allocation is leaked, we want to report a backtrace to indicate where it was // allocated. We don't need to record a backtrace for allocations which are allowed to @@ -863,13 +881,7 @@ impl<'tcx> MiriMachine<'tcx> { .insert(id, (ecx.machine.current_span(), None)); } - interp_ok(AllocExtra { - borrow_tracker, - data_race, - weak_memory, - backtrace, - sync: FxHashMap::default(), - }) + interp_ok(AllocExtra { borrow_tracker, data_race, backtrace, sync: FxHashMap::default() }) } } @@ -896,7 +908,7 @@ impl VisitProvenance for MiriMachine<'_> { tcx: _, isolated_op: _, validation: _, - clock: _, + monotonic_clock: _, layouts: _, static_roots: _, profiler: _, @@ -910,7 +922,6 @@ impl VisitProvenance for MiriMachine<'_> { check_alignment: _, cmpxchg_weak_failure_rate: _, mute_stdout_stderr: _, - weak_memory: _, preemption_rate: _, report_progress: _, basic_block_count: _, @@ -1199,6 +1210,14 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { ecx.generate_nan(inputs) } + #[inline(always)] + fn apply_float_nondet( + ecx: &mut InterpCx<'tcx, Self>, + val: ImmTy<'tcx>, + ) -> InterpResult<'tcx, ImmTy<'tcx>> { + crate::math::apply_random_float_error_to_imm(ecx, val, 2 /* log2(4) */) + } + #[inline(always)] fn equal_float_min_max(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F { ecx.equal_float_min_max(a, b) @@ -1371,7 +1390,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { _tcx: TyCtxtAt<'tcx>, machine: &Self, alloc_extra: &AllocExtra<'tcx>, - _ptr: Pointer, + ptr: Pointer, (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra), range: AllocRange, ) -> InterpResult<'tcx> { @@ -1379,15 +1398,25 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { machine .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Read)); } - if let Some(data_race) = &alloc_extra.data_race { - data_race.read(alloc_id, range, NaReadType::Read, None, machine)?; + // The order of checks is deliberate, to prefer reporting a data race over a borrow tracker error. + match &machine.data_race { + GlobalDataRaceHandler::None => {} + GlobalDataRaceHandler::Genmc(genmc_ctx) => + genmc_ctx.memory_load(machine, ptr.addr(), range.size)?, + GlobalDataRaceHandler::Vclocks(_data_race) => { + let AllocDataRaceHandler::Vclocks(data_race, weak_memory) = &alloc_extra.data_race + else { + unreachable!(); + }; + data_race.read(alloc_id, range, NaReadType::Read, None, machine)?; + if let Some(weak_memory) = weak_memory { + weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap()); + } + } } if let Some(borrow_tracker) = &alloc_extra.borrow_tracker { borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?; } - if let Some(weak_memory) = &alloc_extra.weak_memory { - weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap()); - } interp_ok(()) } @@ -1396,7 +1425,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { _tcx: TyCtxtAt<'tcx>, machine: &mut Self, alloc_extra: &mut AllocExtra<'tcx>, - _ptr: Pointer, + ptr: Pointer, (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra), range: AllocRange, ) -> InterpResult<'tcx> { @@ -1404,15 +1433,26 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { machine .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Write)); } - if let Some(data_race) = &mut alloc_extra.data_race { - data_race.write(alloc_id, range, NaWriteType::Write, None, machine)?; + match &machine.data_race { + GlobalDataRaceHandler::None => {} + GlobalDataRaceHandler::Genmc(genmc_ctx) => { + genmc_ctx.memory_store(machine, ptr.addr(), range.size)?; + } + GlobalDataRaceHandler::Vclocks(_global_state) => { + let AllocDataRaceHandler::Vclocks(data_race, weak_memory) = + &mut alloc_extra.data_race + else { + unreachable!() + }; + data_race.write(alloc_id, range, NaWriteType::Write, None, machine)?; + if let Some(weak_memory) = weak_memory { + weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap()); + } + } } if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker { borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?; } - if let Some(weak_memory) = &alloc_extra.weak_memory { - weak_memory.memory_accessed(range, machine.data_race.as_ref().unwrap()); - } interp_ok(()) } @@ -1421,7 +1461,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { _tcx: TyCtxtAt<'tcx>, machine: &mut Self, alloc_extra: &mut AllocExtra<'tcx>, - _ptr: Pointer, + ptr: Pointer, (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra), size: Size, align: Align, @@ -1430,14 +1470,20 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { if machine.tracked_alloc_ids.contains(&alloc_id) { machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id)); } - if let Some(data_race) = &mut alloc_extra.data_race { - data_race.write( - alloc_id, - alloc_range(Size::ZERO, size), - NaWriteType::Deallocate, - None, - machine, - )?; + match &machine.data_race { + GlobalDataRaceHandler::None => {} + GlobalDataRaceHandler::Genmc(genmc_ctx) => + genmc_ctx.handle_dealloc(machine, ptr.addr(), size, align, kind)?, + GlobalDataRaceHandler::Vclocks(_global_state) => { + let data_race = alloc_extra.data_race.as_vclocks_mut().unwrap(); + data_race.write( + alloc_id, + alloc_range(Size::ZERO, size), + NaWriteType::Deallocate, + None, + machine, + )?; + } } if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker { borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?; @@ -1524,7 +1570,11 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { timing, is_user_relevant: ecx.machine.is_user_relevant(&frame), salt: ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL), - data_race: ecx.machine.data_race.as_ref().map(|_| data_race::FrameState::default()), + data_race: ecx + .machine + .data_race + .as_vclocks_ref() + .map(|_| data_race::FrameState::default()), }; interp_ok(frame.with_extra(extra)) @@ -1568,7 +1618,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { ecx.maybe_preempt_active_thread(); // Make sure some time passes. - ecx.machine.clock.tick(); + ecx.machine.monotonic_clock.tick(); interp_ok(()) } @@ -1670,7 +1720,11 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { if let Some(data_race) = &machine.threads.active_thread_stack().last().unwrap().extra.data_race { - data_race.local_moved_to_memory(local, alloc_info.data_race.as_mut().unwrap(), machine); + data_race.local_moved_to_memory( + local, + alloc_info.data_race.as_vclocks_mut().unwrap(), + machine, + ); } interp_ok(()) } diff --git a/src/tools/miri/src/math.rs b/src/tools/miri/src/math.rs index fdd021f85394b..2ff29c7ac1aad 100644 --- a/src/tools/miri/src/math.rs +++ b/src/tools/miri/src/math.rs @@ -1,6 +1,9 @@ use rand::Rng as _; use rustc_apfloat::Float as _; use rustc_apfloat::ieee::IeeeFloat; +use rustc_middle::ty::{self, FloatTy, ScalarInt}; + +use crate::*; /// Disturbes a floating-point result by a relative error in the range (-2^scale, 2^scale). /// @@ -43,6 +46,29 @@ pub(crate) fn apply_random_float_error_ulp( apply_random_float_error(ecx, val, err_scale) } +/// Applies a random 16ULP floating point error to `val` and returns the new value. +/// Will fail if `val` is not a floating point number. +pub(crate) fn apply_random_float_error_to_imm<'tcx>( + ecx: &mut MiriInterpCx<'tcx>, + val: ImmTy<'tcx>, + ulp_exponent: u32, +) -> InterpResult<'tcx, ImmTy<'tcx>> { + let scalar = val.to_scalar_int()?; + let res: ScalarInt = match val.layout.ty.kind() { + ty::Float(FloatTy::F16) => + apply_random_float_error_ulp(ecx, scalar.to_f16(), ulp_exponent).into(), + ty::Float(FloatTy::F32) => + apply_random_float_error_ulp(ecx, scalar.to_f32(), ulp_exponent).into(), + ty::Float(FloatTy::F64) => + apply_random_float_error_ulp(ecx, scalar.to_f64(), ulp_exponent).into(), + ty::Float(FloatTy::F128) => + apply_random_float_error_ulp(ecx, scalar.to_f128(), ulp_exponent).into(), + _ => bug!("intrinsic called with non-float input type"), + }; + + interp_ok(ImmTy::from_scalar_int(res, val.layout)) +} + pub(crate) fn sqrt(x: IeeeFloat) -> IeeeFloat { match x.category() { // preserve zero sign diff --git a/src/tools/miri/src/shims/files.rs b/src/tools/miri/src/shims/files.rs index 6b4f4cdc922a0..42603e784bbd7 100644 --- a/src/tools/miri/src/shims/files.rs +++ b/src/tools/miri/src/shims/files.rs @@ -1,6 +1,7 @@ use std::any::Any; use std::collections::BTreeMap; -use std::io::{IsTerminal, SeekFrom, Write}; +use std::fs::{File, Metadata}; +use std::io::{IsTerminal, Seek, SeekFrom, Write}; use std::marker::CoercePointee; use std::ops::Deref; use std::rc::{Rc, Weak}; @@ -192,7 +193,7 @@ pub trait FileDescription: std::fmt::Debug + FileDescriptionExt { false } - fn as_unix(&self) -> &dyn UnixFileDescription { + fn as_unix<'tcx>(&self, _ecx: &MiriInterpCx<'tcx>) -> &dyn UnixFileDescription { panic!("Not a unix file descriptor: {}", self.name()); } } @@ -278,6 +279,97 @@ impl FileDescription for io::Stderr { } } +#[derive(Debug)] +pub struct FileHandle { + pub(crate) file: File, + pub(crate) writable: bool, +} + +impl FileDescription for FileHandle { + fn name(&self) -> &'static str { + "file" + } + + fn read<'tcx>( + self: FileDescriptionRef, + communicate_allowed: bool, + ptr: Pointer, + len: usize, + ecx: &mut MiriInterpCx<'tcx>, + finish: DynMachineCallback<'tcx, Result>, + ) -> InterpResult<'tcx> { + assert!(communicate_allowed, "isolation should have prevented even opening a file"); + + let result = ecx.read_from_host(&self.file, len, ptr)?; + finish.call(ecx, result) + } + + fn write<'tcx>( + self: FileDescriptionRef, + communicate_allowed: bool, + ptr: Pointer, + len: usize, + ecx: &mut MiriInterpCx<'tcx>, + finish: DynMachineCallback<'tcx, Result>, + ) -> InterpResult<'tcx> { + assert!(communicate_allowed, "isolation should have prevented even opening a file"); + + let result = ecx.write_to_host(&self.file, len, ptr)?; + finish.call(ecx, result) + } + + fn seek<'tcx>( + &self, + communicate_allowed: bool, + offset: SeekFrom, + ) -> InterpResult<'tcx, io::Result> { + assert!(communicate_allowed, "isolation should have prevented even opening a file"); + interp_ok((&mut &self.file).seek(offset)) + } + + fn close<'tcx>( + self, + communicate_allowed: bool, + _ecx: &mut MiriInterpCx<'tcx>, + ) -> InterpResult<'tcx, io::Result<()>> { + assert!(communicate_allowed, "isolation should have prevented even opening a file"); + // We sync the file if it was opened in a mode different than read-only. + if self.writable { + // `File::sync_all` does the checks that are done when closing a file. We do this to + // to handle possible errors correctly. + let result = self.file.sync_all(); + // Now we actually close the file and return the result. + drop(self.file); + interp_ok(result) + } else { + // We drop the file, this closes it but ignores any errors + // produced when closing it. This is done because + // `File::sync_all` cannot be done over files like + // `/dev/urandom` which are read-only. Check + // https://github.com/rust-lang/miri/issues/999#issuecomment-568920439 + // for a deeper discussion. + drop(self.file); + interp_ok(Ok(())) + } + } + + fn metadata<'tcx>(&self) -> InterpResult<'tcx, io::Result> { + interp_ok(self.file.metadata()) + } + + fn is_tty(&self, communicate_allowed: bool) -> bool { + communicate_allowed && self.file.is_terminal() + } + + fn as_unix<'tcx>(&self, ecx: &MiriInterpCx<'tcx>) -> &dyn UnixFileDescription { + assert!( + ecx.target_os_is_unix(), + "unix file operations are only available for unix targets" + ); + self + } +} + /// Like /dev/null #[derive(Debug)] pub struct NullOutput; @@ -300,10 +392,13 @@ impl FileDescription for NullOutput { } } +/// Internal type of a file-descriptor - this is what [`FdTable`] expects +pub type FdNum = i32; + /// The file descriptor table #[derive(Debug)] pub struct FdTable { - pub fds: BTreeMap, + pub fds: BTreeMap, /// Unique identifier for file description, used to differentiate between various file description. next_file_description_id: FdId, } @@ -339,12 +434,12 @@ impl FdTable { } /// Insert a new file description to the FdTable. - pub fn insert_new(&mut self, fd: impl FileDescription) -> i32 { + pub fn insert_new(&mut self, fd: impl FileDescription) -> FdNum { let fd_ref = self.new_ref(fd); self.insert(fd_ref) } - pub fn insert(&mut self, fd_ref: DynFileDescriptionRef) -> i32 { + pub fn insert(&mut self, fd_ref: DynFileDescriptionRef) -> FdNum { self.insert_with_min_num(fd_ref, 0) } @@ -352,8 +447,8 @@ impl FdTable { pub fn insert_with_min_num( &mut self, file_handle: DynFileDescriptionRef, - min_fd_num: i32, - ) -> i32 { + min_fd_num: FdNum, + ) -> FdNum { // Find the lowest unused FD, starting from min_fd. If the first such unused FD is in // between used FDs, the find_map combinator will return it. If the first such unused FD // is after all other used FDs, the find_map combinator will return None, and we will use @@ -379,16 +474,16 @@ impl FdTable { new_fd_num } - pub fn get(&self, fd_num: i32) -> Option { + pub fn get(&self, fd_num: FdNum) -> Option { let fd = self.fds.get(&fd_num)?; Some(fd.clone()) } - pub fn remove(&mut self, fd_num: i32) -> Option { + pub fn remove(&mut self, fd_num: FdNum) -> Option { self.fds.remove(&fd_num) } - pub fn is_fd_num(&self, fd_num: i32) -> bool { + pub fn is_fd_num(&self, fd_num: FdNum) -> bool { self.fds.contains_key(&fd_num) } } diff --git a/src/tools/miri/src/shims/time.rs b/src/tools/miri/src/shims/time.rs index 64b3ce6b4e494..28f4ca5bb1b76 100644 --- a/src/tools/miri/src/shims/time.rs +++ b/src/tools/miri/src/shims/time.rs @@ -21,7 +21,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { &mut self, clk_id_op: &OpTy<'tcx>, tp_op: &OpTy<'tcx>, - ) -> InterpResult<'tcx, Scalar> { + dest: &MPlaceTy<'tcx>, + ) -> InterpResult<'tcx> { // This clock support is deliberately minimal because a lot of clock types have fiddly // properties (is it possible for Miri to be suspended independently of the host?). If you // have a use for another clock type, please open an issue. @@ -29,8 +30,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let this = self.eval_context_mut(); this.assert_target_os_is_unix("clock_gettime"); + let clockid_t_size = this.libc_ty_layout("clockid_t").size; - let clk_id = this.read_scalar(clk_id_op)?.to_i32()?; + let clk_id = this.read_scalar(clk_id_op)?.to_int(clockid_t_size)?; let tp = this.deref_pointer_as(tp_op, this.libc_ty_layout("timespec"))?; let absolute_clocks; @@ -43,34 +45,34 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Linux further distinguishes regular and "coarse" clocks, but the "coarse" version // is just specified to be "faster and less precise", so we implement both the same way. absolute_clocks = vec![ - this.eval_libc_i32("CLOCK_REALTIME"), - this.eval_libc_i32("CLOCK_REALTIME_COARSE"), + this.eval_libc("CLOCK_REALTIME").to_int(clockid_t_size)?, + this.eval_libc("CLOCK_REALTIME_COARSE").to_int(clockid_t_size)?, ]; // The second kind is MONOTONIC clocks for which 0 is an arbitrary time point, but they are // never allowed to go backwards. We don't need to do any additional monotonicity // enforcement because std::time::Instant already guarantees that it is monotonic. relative_clocks = vec![ - this.eval_libc_i32("CLOCK_MONOTONIC"), - this.eval_libc_i32("CLOCK_MONOTONIC_COARSE"), + this.eval_libc("CLOCK_MONOTONIC").to_int(clockid_t_size)?, + this.eval_libc("CLOCK_MONOTONIC_COARSE").to_int(clockid_t_size)?, ]; } "macos" => { - absolute_clocks = vec![this.eval_libc_i32("CLOCK_REALTIME")]; - relative_clocks = vec![this.eval_libc_i32("CLOCK_MONOTONIC")]; + absolute_clocks = vec![this.eval_libc("CLOCK_REALTIME").to_int(clockid_t_size)?]; + relative_clocks = vec![this.eval_libc("CLOCK_MONOTONIC").to_int(clockid_t_size)?]; // `CLOCK_UPTIME_RAW` supposed to not increment while the system is asleep... but // that's not really something a program running inside Miri can tell, anyway. // We need to support it because std uses it. - relative_clocks.push(this.eval_libc_i32("CLOCK_UPTIME_RAW")); + relative_clocks.push(this.eval_libc("CLOCK_UPTIME_RAW").to_int(clockid_t_size)?); } "solaris" | "illumos" => { // The REALTIME clock returns the actual time since the Unix epoch. - absolute_clocks = vec![this.eval_libc_i32("CLOCK_REALTIME")]; + absolute_clocks = vec![this.eval_libc("CLOCK_REALTIME").to_int(clockid_t_size)?]; // MONOTONIC, in the other hand, is the high resolution, non-adjustable // clock from an arbitrary time in the past. // Note that the man page mentions HIGHRES but it is just // an alias of MONOTONIC and the libc crate does not expose it anyway. // https://docs.oracle.com/cd/E23824_01/html/821-1465/clock-gettime-3c.html - relative_clocks = vec![this.eval_libc_i32("CLOCK_MONOTONIC")]; + relative_clocks = vec![this.eval_libc("CLOCK_MONOTONIC").to_int(clockid_t_size)?]; } target => throw_unsup_format!("`clock_gettime` is not supported on target OS {target}"), } @@ -79,17 +81,18 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.check_no_isolation("`clock_gettime` with `REALTIME` clocks")?; system_time_to_duration(&SystemTime::now())? } else if relative_clocks.contains(&clk_id) { - this.machine.clock.now().duration_since(this.machine.clock.epoch()) + this.machine.monotonic_clock.now().duration_since(this.machine.monotonic_clock.epoch()) } else { - return this.set_last_error_and_return_i32(LibcError("EINVAL")); + return this.set_last_error_and_return(LibcError("EINVAL"), dest); }; let tv_sec = duration.as_secs(); let tv_nsec = duration.subsec_nanos(); this.write_int_fields(&[tv_sec.into(), tv_nsec.into()], &tp)?; + this.write_int(0, dest)?; - interp_ok(Scalar::from_i32(0)) + interp_ok(()) } fn gettimeofday( @@ -188,10 +191,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { tm_zone.push('+'); } let offset_hour = offset_in_seconds.abs() / 3600; - write!(tm_zone, "{:02}", offset_hour).unwrap(); + write!(tm_zone, "{offset_hour:02}").unwrap(); let offset_min = (offset_in_seconds.abs() % 3600) / 60; if offset_min != 0 { - write!(tm_zone, "{:02}", offset_min).unwrap(); + write!(tm_zone, "{offset_min:02}").unwrap(); } // Add null terminator for C string compatibility. @@ -219,16 +222,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let filetime = this.deref_pointer_as(LPFILETIME_op, this.windows_ty_layout("FILETIME"))?; - let NANOS_PER_SEC = this.eval_windows_u64("time", "NANOS_PER_SEC"); - let INTERVALS_PER_SEC = this.eval_windows_u64("time", "INTERVALS_PER_SEC"); - let INTERVALS_TO_UNIX_EPOCH = this.eval_windows_u64("time", "INTERVALS_TO_UNIX_EPOCH"); - let NANOS_PER_INTERVAL = NANOS_PER_SEC / INTERVALS_PER_SEC; - let SECONDS_TO_UNIX_EPOCH = INTERVALS_TO_UNIX_EPOCH / INTERVALS_PER_SEC; - - let duration = system_time_to_duration(&SystemTime::now())? - + Duration::from_secs(SECONDS_TO_UNIX_EPOCH); - let duration_ticks = u64::try_from(duration.as_nanos() / u128::from(NANOS_PER_INTERVAL)) - .map_err(|_| err_unsup_format!("programs running more than 2^64 Windows ticks after the Windows epoch are not supported"))?; + let duration = this.system_time_since_windows_epoch(&SystemTime::now())?; + let duration_ticks = this.windows_ticks_for(duration)?; let dwLowDateTime = u32::try_from(duration_ticks & 0x00000000FFFFFFFF).unwrap(); let dwHighDateTime = u32::try_from((duration_ticks & 0xFFFFFFFF00000000) >> 32).unwrap(); @@ -248,7 +243,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // QueryPerformanceCounter uses a hardware counter as its basis. // Miri will emulate a counter with a resolution of 1 nanosecond. - let duration = this.machine.clock.now().duration_since(this.machine.clock.epoch()); + let duration = + this.machine.monotonic_clock.now().duration_since(this.machine.monotonic_clock.epoch()); let qpc = i64::try_from(duration.as_nanos()).map_err(|_| { err_unsup_format!("programs running longer than 2^63 nanoseconds are not supported") })?; @@ -280,6 +276,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { interp_ok(Scalar::from_i32(-1)) // Return non-zero on success } + #[allow(non_snake_case, clippy::arithmetic_side_effects)] + fn system_time_since_windows_epoch(&self, time: &SystemTime) -> InterpResult<'tcx, Duration> { + let this = self.eval_context_ref(); + + let INTERVALS_PER_SEC = this.eval_windows_u64("time", "INTERVALS_PER_SEC"); + let INTERVALS_TO_UNIX_EPOCH = this.eval_windows_u64("time", "INTERVALS_TO_UNIX_EPOCH"); + let SECONDS_TO_UNIX_EPOCH = INTERVALS_TO_UNIX_EPOCH / INTERVALS_PER_SEC; + + interp_ok(system_time_to_duration(time)? + Duration::from_secs(SECONDS_TO_UNIX_EPOCH)) + } + + #[allow(non_snake_case, clippy::arithmetic_side_effects)] + fn windows_ticks_for(&self, duration: Duration) -> InterpResult<'tcx, u64> { + let this = self.eval_context_ref(); + + let NANOS_PER_SEC = this.eval_windows_u64("time", "NANOS_PER_SEC"); + let INTERVALS_PER_SEC = this.eval_windows_u64("time", "INTERVALS_PER_SEC"); + let NANOS_PER_INTERVAL = NANOS_PER_SEC / INTERVALS_PER_SEC; + + let ticks = u64::try_from(duration.as_nanos() / u128::from(NANOS_PER_INTERVAL)) + .map_err(|_| err_unsup_format!("programs running more than 2^64 Windows ticks after the Windows epoch are not supported"))?; + interp_ok(ticks) + } + fn mach_absolute_time(&self) -> InterpResult<'tcx, Scalar> { let this = self.eval_context_ref(); @@ -287,7 +307,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // This returns a u64, with time units determined dynamically by `mach_timebase_info`. // We return plain nanoseconds. - let duration = this.machine.clock.now().duration_since(this.machine.clock.epoch()); + let duration = + this.machine.monotonic_clock.now().duration_since(this.machine.monotonic_clock.epoch()); let res = u64::try_from(duration.as_nanos()).map_err(|_| { err_unsup_format!("programs running longer than 2^64 nanoseconds are not supported") })?; diff --git a/src/tools/miri/src/shims/unix/android/thread.rs b/src/tools/miri/src/shims/unix/android/thread.rs index c7e2c4d507b22..30ec0aefcbf1a 100644 --- a/src/tools/miri/src/shims/unix/android/thread.rs +++ b/src/tools/miri/src/shims/unix/android/thread.rs @@ -42,7 +42,7 @@ pub fn prctl<'tcx>( ecx.check_ptr_access( name.to_pointer(ecx)?, Size::from_bytes(TASK_COMM_LEN), - CheckInAllocMsg::MemoryAccessTest, + CheckInAllocMsg::MemoryAccess, )?; let res = ecx.pthread_getname_np(thread, name, len, /* truncate*/ false)?; assert_eq!(res, ThreadNameResult::Ok); diff --git a/src/tools/miri/src/shims/unix/fd.rs b/src/tools/miri/src/shims/unix/fd.rs index 3f85b9ae9bd94..156814a26fa7a 100644 --- a/src/tools/miri/src/shims/unix/fd.rs +++ b/src/tools/miri/src/shims/unix/fd.rs @@ -121,7 +121,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { throw_unsup_format!("unsupported flags {:#x}", op); }; - let result = fd.as_unix().flock(this.machine.communicate(), parsed_op)?; + let result = fd.as_unix(this).flock(this.machine.communicate(), parsed_op)?; // return `0` if flock is successful let result = result.map(|()| 0i32); interp_ok(Scalar::from_i32(this.try_unwrap_io_result(result)?)) @@ -226,7 +226,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { trace!("Reading from FD {}, size {}", fd_num, count); // Check that the *entire* buffer is actually valid memory. - this.check_ptr_access(buf, Size::from_bytes(count), CheckInAllocMsg::MemoryAccessTest)?; + this.check_ptr_access(buf, Size::from_bytes(count), CheckInAllocMsg::MemoryAccess)?; // We cap the number of read bytes to the largest value that we are able to fit in both the // host's and target's `isize`. This saves us from having to handle overflows later. @@ -273,7 +273,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let Ok(offset) = u64::try_from(offset) else { return this.set_last_error_and_return(LibcError("EINVAL"), dest); }; - fd.as_unix().pread(communicate, offset, buf, count, this, finish)? + fd.as_unix(this).pread(communicate, offset, buf, count, this, finish)? } }; interp_ok(()) @@ -292,7 +292,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Isolation check is done via `FileDescription` trait. // Check that the *entire* buffer is actually valid memory. - this.check_ptr_access(buf, Size::from_bytes(count), CheckInAllocMsg::MemoryAccessTest)?; + this.check_ptr_access(buf, Size::from_bytes(count), CheckInAllocMsg::MemoryAccess)?; // We cap the number of written bytes to the largest value that we are able to fit in both the // host's and target's `isize`. This saves us from having to handle overflows later. @@ -333,7 +333,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let Ok(offset) = u64::try_from(offset) else { return this.set_last_error_and_return(LibcError("EINVAL"), dest); }; - fd.as_unix().pwrite(communicate, buf, count, offset, this, finish)? + fd.as_unix(this).pwrite(communicate, buf, count, offset, this, finish)? } }; interp_ok(()) diff --git a/src/tools/miri/src/shims/unix/foreign_items.rs b/src/tools/miri/src/shims/unix/foreign_items.rs index 1770b99c0a22b..026aa1f950399 100644 --- a/src/tools/miri/src/shims/unix/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/foreign_items.rs @@ -112,51 +112,122 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Environment related shims "getenv" => { - let [name] = this.check_shim(abi, Conv::C, link_name, args)?; + let [name] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty], + this.machine.layouts.mut_raw_ptr.ty, + args, + )?; let result = this.getenv(name)?; this.write_pointer(result, dest)?; } "unsetenv" => { - let [name] = this.check_shim(abi, Conv::C, link_name, args)?; + let [name] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; let result = this.unsetenv(name)?; this.write_scalar(result, dest)?; } "setenv" => { - let [name, value, overwrite] = this.check_shim(abi, Conv::C, link_name, args)?; + let [name, value, overwrite] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [ + this.machine.layouts.const_raw_ptr.ty, + this.machine.layouts.const_raw_ptr.ty, + this.tcx.types.i32, + ], + this.tcx.types.i32, + args, + )?; this.read_scalar(overwrite)?.to_i32()?; let result = this.setenv(name, value)?; this.write_scalar(result, dest)?; } "getcwd" => { - let [buf, size] = this.check_shim(abi, Conv::C, link_name, args)?; + let [buf, size] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.mut_raw_ptr.ty, this.tcx.types.usize], + this.machine.layouts.mut_raw_ptr.ty, + args, + )?; let result = this.getcwd(buf, size)?; this.write_pointer(result, dest)?; } "chdir" => { - let [path] = this.check_shim(abi, Conv::C, link_name, args)?; + let [path] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; let result = this.chdir(path)?; this.write_scalar(result, dest)?; } "getpid" => { - let [] = this.check_shim(abi, Conv::C, link_name, args)?; + let [] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [], + this.libc_ty_layout("pid_t").ty, + args, + )?; let result = this.getpid()?; this.write_scalar(result, dest)?; } "sysconf" => { - let [val] = this.check_shim(abi, Conv::C, link_name, args)?; + let [val] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32], + this.tcx.types.isize, + args, + )?; let result = this.sysconf(val)?; this.write_scalar(result, dest)?; } // File descriptors "read" => { - let [fd, buf, count] = this.check_shim(abi, Conv::C, link_name, args)?; + let [fd, buf, count] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32, this.machine.layouts.mut_raw_ptr.ty, this.tcx.types.usize], + this.tcx.types.isize, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(count)?; this.read(fd, buf, count, None, dest)?; } "write" => { - let [fd, buf, n] = this.check_shim(abi, Conv::C, link_name, args)?; + let [fd, buf, n] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [ + this.tcx.types.i32, + this.machine.layouts.const_raw_ptr.ty, + this.tcx.types.usize, + ], + this.tcx.types.isize, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(n)?; @@ -164,38 +235,88 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write(fd, buf, count, None, dest)?; } "pread" => { - let [fd, buf, count, offset] = this.check_shim(abi, Conv::C, link_name, args)?; + let off_t = this.libc_ty_layout("off_t"); + let [fd, buf, count, offset] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [ + this.tcx.types.i32, + this.machine.layouts.mut_raw_ptr.ty, + this.tcx.types.usize, + off_t.ty, + ], + this.tcx.types.isize, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(count)?; - let offset = this.read_scalar(offset)?.to_int(this.libc_ty_layout("off_t").size)?; + let offset = this.read_scalar(offset)?.to_int(off_t.size)?; this.read(fd, buf, count, Some(offset), dest)?; } "pwrite" => { - let [fd, buf, n, offset] = this.check_shim(abi, Conv::C, link_name, args)?; + let off_t = this.libc_ty_layout("off_t"); + let [fd, buf, n, offset] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [ + this.tcx.types.i32, + this.machine.layouts.const_raw_ptr.ty, + this.tcx.types.usize, + off_t.ty, + ], + this.tcx.types.isize, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(n)?; - let offset = this.read_scalar(offset)?.to_int(this.libc_ty_layout("off_t").size)?; + let offset = this.read_scalar(offset)?.to_int(off_t.size)?; trace!("Called pwrite({:?}, {:?}, {:?}, {:?})", fd, buf, count, offset); this.write(fd, buf, count, Some(offset), dest)?; } "pread64" => { - let [fd, buf, count, offset] = this.check_shim(abi, Conv::C, link_name, args)?; + let off64_t = this.libc_ty_layout("off64_t"); + let [fd, buf, count, offset] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [ + this.tcx.types.i32, + this.machine.layouts.mut_raw_ptr.ty, + this.tcx.types.usize, + off64_t.ty, + ], + this.tcx.types.isize, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(count)?; - let offset = - this.read_scalar(offset)?.to_int(this.libc_ty_layout("off64_t").size)?; + let offset = this.read_scalar(offset)?.to_int(off64_t.size)?; this.read(fd, buf, count, Some(offset), dest)?; } "pwrite64" => { - let [fd, buf, n, offset] = this.check_shim(abi, Conv::C, link_name, args)?; + let off64_t = this.libc_ty_layout("off64_t"); + let [fd, buf, n, offset] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [ + this.tcx.types.i32, + this.machine.layouts.const_raw_ptr.ty, + this.tcx.types.usize, + off64_t.ty, + ], + this.tcx.types.isize, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(n)?; - let offset = - this.read_scalar(offset)?.to_int(this.libc_ty_layout("off64_t").size)?; + let offset = this.read_scalar(offset)?.to_int(off64_t.size)?; trace!("Called pwrite64({:?}, {:?}, {:?}, {:?})", fd, buf, count, offset); this.write(fd, buf, count, Some(offset), dest)?; } @@ -218,20 +339,43 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(result, dest)?; } "dup" => { - let [old_fd] = this.check_shim(abi, Conv::C, link_name, args)?; + let [old_fd] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32], + this.tcx.types.i32, + args, + )?; let old_fd = this.read_scalar(old_fd)?.to_i32()?; let new_fd = this.dup(old_fd)?; this.write_scalar(new_fd, dest)?; } "dup2" => { - let [old_fd, new_fd] = this.check_shim(abi, Conv::C, link_name, args)?; + let [old_fd, new_fd] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32, this.tcx.types.i32], + this.tcx.types.i32, + args, + )?; let old_fd = this.read_scalar(old_fd)?.to_i32()?; let new_fd = this.read_scalar(new_fd)?.to_i32()?; let result = this.dup2(old_fd, new_fd)?; this.write_scalar(result, dest)?; } "flock" => { - let [fd, op] = this.check_shim(abi, Conv::C, link_name, args)?; + // Currently this function does not exist on all Unixes, e.g. on Solaris. + this.check_target_os(&["linux", "freebsd", "macos", "illumos"], link_name)?; + let [fd, op] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32, this.tcx.types.i32], + this.tcx.types.i32, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; let op = this.read_scalar(op)?.to_i32()?; let result = this.flock(fd, op)?; @@ -248,140 +392,311 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(result, dest)?; } "unlink" => { - let [path] = this.check_shim(abi, Conv::C, link_name, args)?; + let [path] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; let result = this.unlink(path)?; this.write_scalar(result, dest)?; } "symlink" => { - let [target, linkpath] = this.check_shim(abi, Conv::C, link_name, args)?; + let [target, linkpath] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.const_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; let result = this.symlink(target, linkpath)?; this.write_scalar(result, dest)?; } "rename" => { - let [oldpath, newpath] = this.check_shim(abi, Conv::C, link_name, args)?; + let [oldpath, newpath] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.const_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; let result = this.rename(oldpath, newpath)?; this.write_scalar(result, dest)?; } "mkdir" => { - let [path, mode] = this.check_shim(abi, Conv::C, link_name, args)?; + let [path, mode] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty, this.libc_ty_layout("mode_t").ty], + this.tcx.types.i32, + args, + )?; let result = this.mkdir(path, mode)?; this.write_scalar(result, dest)?; } "rmdir" => { - let [path] = this.check_shim(abi, Conv::C, link_name, args)?; + let [path] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; let result = this.rmdir(path)?; this.write_scalar(result, dest)?; } "opendir" => { - let [name] = this.check_shim(abi, Conv::C, link_name, args)?; + let [name] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty], + this.machine.layouts.mut_raw_ptr.ty, + args, + )?; let result = this.opendir(name)?; this.write_scalar(result, dest)?; } "closedir" => { - let [dirp] = this.check_shim(abi, Conv::C, link_name, args)?; + let [dirp] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.mut_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; let result = this.closedir(dirp)?; this.write_scalar(result, dest)?; } "lseek64" => { - let [fd, offset, whence] = this.check_shim(abi, Conv::C, link_name, args)?; + let off64_t = this.libc_ty_layout("off64_t"); + let [fd, offset, whence] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32, off64_t.ty, this.tcx.types.i32], + off64_t.ty, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; - let offset = this.read_scalar(offset)?.to_i64()?; + let offset = this.read_scalar(offset)?.to_int(off64_t.size)?; let whence = this.read_scalar(whence)?.to_i32()?; - let result = this.lseek64(fd, offset.into(), whence)?; - this.write_scalar(result, dest)?; + this.lseek64(fd, offset, whence, dest)?; } "lseek" => { - let [fd, offset, whence] = this.check_shim(abi, Conv::C, link_name, args)?; + let off_t = this.libc_ty_layout("off_t"); + let [fd, offset, whence] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32, off_t.ty, this.tcx.types.i32], + off_t.ty, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; - let offset = this.read_scalar(offset)?.to_int(this.libc_ty_layout("off_t").size)?; + let offset = this.read_scalar(offset)?.to_int(off_t.size)?; let whence = this.read_scalar(whence)?.to_i32()?; - let result = this.lseek64(fd, offset, whence)?; - this.write_scalar(result, dest)?; + this.lseek64(fd, offset, whence, dest)?; } "ftruncate64" => { - let [fd, length] = this.check_shim(abi, Conv::C, link_name, args)?; + let off64_t = this.libc_ty_layout("off64_t"); + let [fd, length] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32, off64_t.ty], + this.tcx.types.i32, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; - let length = this.read_scalar(length)?.to_i64()?; - let result = this.ftruncate64(fd, length.into())?; + let length = this.read_scalar(length)?.to_int(off64_t.size)?; + let result = this.ftruncate64(fd, length)?; this.write_scalar(result, dest)?; } "ftruncate" => { - let [fd, length] = this.check_shim(abi, Conv::C, link_name, args)?; + let off_t = this.libc_ty_layout("off_t"); + let [fd, length] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32, off_t.ty], + this.tcx.types.i32, + args, + )?; let fd = this.read_scalar(fd)?.to_i32()?; - let length = this.read_scalar(length)?.to_int(this.libc_ty_layout("off_t").size)?; + let length = this.read_scalar(length)?.to_int(off_t.size)?; let result = this.ftruncate64(fd, length)?; this.write_scalar(result, dest)?; } "fsync" => { - let [fd] = this.check_shim(abi, Conv::C, link_name, args)?; + let [fd] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32], + this.tcx.types.i32, + args, + )?; let result = this.fsync(fd)?; this.write_scalar(result, dest)?; } "fdatasync" => { - let [fd] = this.check_shim(abi, Conv::C, link_name, args)?; + let [fd] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32], + this.tcx.types.i32, + args, + )?; let result = this.fdatasync(fd)?; this.write_scalar(result, dest)?; } "readlink" => { - let [pathname, buf, bufsize] = this.check_shim(abi, Conv::C, link_name, args)?; + let [pathname, buf, bufsize] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [ + this.machine.layouts.const_raw_ptr.ty, + this.machine.layouts.mut_raw_ptr.ty, + this.tcx.types.usize, + ], + this.tcx.types.isize, + args, + )?; let result = this.readlink(pathname, buf, bufsize)?; this.write_scalar(Scalar::from_target_isize(result, this), dest)?; } "posix_fadvise" => { - let [fd, offset, len, advice] = this.check_shim(abi, Conv::C, link_name, args)?; + let off_t = this.libc_ty_layout("off_t"); + let [fd, offset, len, advice] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.tcx.types.i32, off_t.ty, off_t.ty, this.tcx.types.i32], + this.tcx.types.i32, + args, + )?; this.read_scalar(fd)?.to_i32()?; - this.read_target_isize(offset)?; - this.read_target_isize(len)?; + this.read_scalar(offset)?.to_int(off_t.size)?; + this.read_scalar(len)?.to_int(off_t.size)?; this.read_scalar(advice)?.to_i32()?; // fadvise is only informational, we can ignore it. this.write_null(dest)?; } "realpath" => { - let [path, resolved_path] = this.check_shim(abi, Conv::C, link_name, args)?; + let [path, resolved_path] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.mut_raw_ptr.ty], + this.machine.layouts.mut_raw_ptr.ty, + args, + )?; let result = this.realpath(path, resolved_path)?; this.write_scalar(result, dest)?; } "mkstemp" => { - let [template] = this.check_shim(abi, Conv::C, link_name, args)?; + let [template] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.mut_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; let result = this.mkstemp(template)?; this.write_scalar(result, dest)?; } // Unnamed sockets and pipes "socketpair" => { - let [domain, type_, protocol, sv] = - this.check_shim(abi, Conv::C, link_name, args)?; + let [domain, type_, protocol, sv] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [ + this.tcx.types.i32, + this.tcx.types.i32, + this.tcx.types.i32, + this.machine.layouts.mut_raw_ptr.ty, + ], + this.tcx.types.i32, + args, + )?; let result = this.socketpair(domain, type_, protocol, sv)?; this.write_scalar(result, dest)?; } "pipe" => { - let [pipefd] = this.check_shim(abi, Conv::C, link_name, args)?; + let [pipefd] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.mut_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; let result = this.pipe2(pipefd, /*flags*/ None)?; this.write_scalar(result, dest)?; } "pipe2" => { // Currently this function does not exist on all Unixes, e.g. on macOS. this.check_target_os(&["linux", "freebsd", "solaris", "illumos"], link_name)?; - let [pipefd, flags] = this.check_shim(abi, Conv::C, link_name, args)?; + let [pipefd, flags] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.mut_raw_ptr.ty, this.tcx.types.i32], + this.tcx.types.i32, + args, + )?; let result = this.pipe2(pipefd, Some(flags))?; this.write_scalar(result, dest)?; } // Time "gettimeofday" => { - let [tv, tz] = this.check_shim(abi, Conv::C, link_name, args)?; + let [tv, tz] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.mut_raw_ptr.ty, this.machine.layouts.mut_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; let result = this.gettimeofday(tv, tz)?; this.write_scalar(result, dest)?; } "localtime_r" => { - let [timep, result_op] = this.check_shim(abi, Conv::C, link_name, args)?; + let [timep, result_op] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.mut_raw_ptr.ty], + this.machine.layouts.mut_raw_ptr.ty, + args, + )?; let result = this.localtime_r(timep, result_op)?; this.write_pointer(result, dest)?; } "clock_gettime" => { - let [clk_id, tp] = this.check_shim(abi, Conv::C, link_name, args)?; - let result = this.clock_gettime(clk_id, tp)?; - this.write_scalar(result, dest)?; + let [clk_id, tp] = this.check_shim_abi( + link_name, + abi, + ExternAbi::C { unwind: false }, + [this.libc_ty_layout("clockid_t").ty, this.machine.layouts.mut_raw_ptr.ty], + this.tcx.types.i32, + args, + )?; + this.clock_gettime(clk_id, tp, dest)?; } // Allocation @@ -832,7 +1147,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // These shims are enabled only when the caller is in the standard library. "pthread_attr_getguardsize" if this.frame_in_std() => { let [_attr, guard_size] = this.check_shim(abi, Conv::C, link_name, args)?; - let guard_size_layout = this.libc_ty_layout("size_t"); + let guard_size_layout = this.machine.layouts.usize; let guard_size = this.deref_pointer_as(guard_size, guard_size_layout)?; this.write_scalar( Scalar::from_uint(this.machine.page_size, guard_size_layout.size), diff --git a/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs b/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs index 08d06fe5d4c61..21a386b29272a 100644 --- a/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs @@ -2,6 +2,7 @@ use rustc_middle::ty::Ty; use rustc_span::Symbol; use rustc_target::callconv::{Conv, FnAbi}; +use super::sync::EvalContextExt as _; use crate::shims::unix::*; use crate::*; @@ -55,6 +56,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } + // Synchronization primitives + "_umtx_op" => { + let [obj, op, val, uaddr, uaddr2] = + this.check_shim(abi, Conv::C, link_name, args)?; + this._umtx_op(obj, op, val, uaddr, uaddr2, dest)?; + } + // File related shims // For those, we both intercept `func` and `call@FBSD_1.0` symbols cases // since freebsd 12 the former form can be expected. diff --git a/src/tools/miri/src/shims/unix/freebsd/mod.rs b/src/tools/miri/src/shims/unix/freebsd/mod.rs index 09c6507b24f84..50fb2b9d32870 100644 --- a/src/tools/miri/src/shims/unix/freebsd/mod.rs +++ b/src/tools/miri/src/shims/unix/freebsd/mod.rs @@ -1 +1,2 @@ pub mod foreign_items; +pub mod sync; diff --git a/src/tools/miri/src/shims/unix/freebsd/sync.rs b/src/tools/miri/src/shims/unix/freebsd/sync.rs new file mode 100644 index 0000000000000..54650f35b2cb7 --- /dev/null +++ b/src/tools/miri/src/shims/unix/freebsd/sync.rs @@ -0,0 +1,251 @@ +//! Contains FreeBSD-specific synchronization functions + +use core::time::Duration; + +use crate::concurrency::sync::FutexRef; +use crate::*; + +pub struct FreeBsdFutex { + futex: FutexRef, +} + +/// Extended variant of the `timespec` struct. +pub struct UmtxTime { + timeout: Duration, + abs_time: bool, + timeout_clock: TimeoutClock, +} + +impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} +pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { + /// Implementation of the FreeBSD [`_umtx_op`](https://man.freebsd.org/cgi/man.cgi?query=_umtx_op&sektion=2&manpath=FreeBSD+14.2-RELEASE+and+Ports) syscall. + /// This is used for futex operations on FreeBSD. + /// + /// `obj`: a pointer to the futex object (can be a lot of things, mostly *AtomicU32) + /// `op`: the futex operation to run + /// `val`: the current value of the object as a `c_long` (for wait/wake) + /// `uaddr`: `op`-specific optional parameter, pointer-sized integer or pointer to an `op`-specific struct + /// `uaddr2`: `op`-specific optional parameter, pointer-sized integer or pointer to an `op`-specific struct + /// `dest`: the place this syscall returns to, 0 for success, -1 for failure + /// + /// # Note + /// Curently only the WAIT and WAKE operations are implemented. + fn _umtx_op( + &mut self, + obj: &OpTy<'tcx>, + op: &OpTy<'tcx>, + val: &OpTy<'tcx>, + uaddr: &OpTy<'tcx>, + uaddr2: &OpTy<'tcx>, + dest: &MPlaceTy<'tcx>, + ) -> InterpResult<'tcx> { + let this = self.eval_context_mut(); + + let obj = this.read_pointer(obj)?; + let op = this.read_scalar(op)?.to_i32()?; + let val = this.read_target_usize(val)?; + let uaddr = this.read_target_usize(uaddr)?; + let uaddr2 = this.read_pointer(uaddr2)?; + + let wait = this.eval_libc_i32("UMTX_OP_WAIT"); + let wait_uint = this.eval_libc_i32("UMTX_OP_WAIT_UINT"); + let wait_uint_private = this.eval_libc_i32("UMTX_OP_WAIT_UINT_PRIVATE"); + + let wake = this.eval_libc_i32("UMTX_OP_WAKE"); + let wake_private = this.eval_libc_i32("UMTX_OP_WAKE_PRIVATE"); + + let timespec_layout = this.libc_ty_layout("timespec"); + let umtx_time_layout = this.libc_ty_layout("_umtx_time"); + assert!( + timespec_layout.size != umtx_time_layout.size, + "`struct timespec` and `struct _umtx_time` should have different sizes." + ); + + match op { + // UMTX_OP_WAIT_UINT and UMTX_OP_WAIT_UINT_PRIVATE only differ in whether they work across + // processes or not. For Miri, we can treat them the same. + op if op == wait || op == wait_uint || op == wait_uint_private => { + let obj_layout = + if op == wait { this.machine.layouts.isize } else { this.machine.layouts.u32 }; + let obj = this.ptr_to_mplace(obj, obj_layout); + + // Read the Linux futex wait implementation in Miri to understand why this fence is needed. + this.atomic_fence(AtomicFenceOrd::SeqCst)?; + let obj_val = this + .read_scalar_atomic(&obj, AtomicReadOrd::Acquire)? + .to_bits(obj_layout.size)?; // isize and u32 can have different sizes + + if obj_val == u128::from(val) { + // This cannot fail since we already did an atomic acquire read on that pointer. + // Acquire reads are only allowed on mutable memory. + let futex_ref = this + .get_sync_or_init(obj.ptr(), |_| FreeBsdFutex { futex: Default::default() }) + .unwrap() + .futex + .clone(); + + // From the manual: + // The timeout is specified by passing either the address of `struct timespec`, or its + // extended variant, `struct _umtx_time`, as the `uaddr2` argument of _umtx_op(). + // They are distinguished by the `uaddr` value, which must be equal + // to the size of the structure pointed to by `uaddr2`, casted to uintptr_t. + let timeout = if this.ptr_is_null(uaddr2)? { + // no timeout parameter + None + } else { + if uaddr == umtx_time_layout.size.bytes() { + // `uaddr2` points to a `struct _umtx_time`. + let umtx_time_place = this.ptr_to_mplace(uaddr2, umtx_time_layout); + + let umtx_time = match this.read_umtx_time(&umtx_time_place)? { + Some(ut) => ut, + None => { + return this + .set_last_error_and_return(LibcError("EINVAL"), dest); + } + }; + + let anchor = if umtx_time.abs_time { + TimeoutAnchor::Absolute + } else { + TimeoutAnchor::Relative + }; + + Some((umtx_time.timeout_clock, anchor, umtx_time.timeout)) + } else if uaddr == timespec_layout.size.bytes() { + // RealTime clock can't be used in isolation mode. + this.check_no_isolation("`_umtx_op` with `timespec` timeout")?; + + // `uaddr2` points to a `struct timespec`. + let timespec = this.ptr_to_mplace(uaddr2, timespec_layout); + let duration = match this.read_timespec(×pec)? { + Some(duration) => duration, + None => { + return this + .set_last_error_and_return(LibcError("EINVAL"), dest); + } + }; + + // FreeBSD does not seem to document which clock is used when the timeout + // is passed as a `struct timespec*`. Based on discussions online and the source + // code (umtx_copyin_umtx_time() in kern_umtx.c), it seems to default to CLOCK_REALTIME, + // so that's what we also do. + // Discussion in golang: https://github.com/golang/go/issues/17168#issuecomment-250235271 + Some((TimeoutClock::RealTime, TimeoutAnchor::Relative, duration)) + } else { + return this.set_last_error_and_return(LibcError("EINVAL"), dest); + } + }; + + let dest = dest.clone(); + this.futex_wait( + futex_ref, + u32::MAX, // we set the bitset to include all bits + timeout, + callback!( + @capture<'tcx> { + dest: MPlaceTy<'tcx>, + } + |ecx, unblock: UnblockKind| match unblock { + UnblockKind::Ready => { + // From the manual: + // If successful, all requests, except UMTX_SHM_CREAT and UMTX_SHM_LOOKUP + // sub-requests of the UMTX_OP_SHM request, will return zero. + ecx.write_int(0, &dest) + } + UnblockKind::TimedOut => { + ecx.set_last_error_and_return(LibcError("ETIMEDOUT"), &dest) + } + } + ), + ); + interp_ok(()) + } else { + // The manual doesn’t specify what should happen if the futex value doesn’t match the expected one. + // On FreeBSD 14.2, testing shows that WAIT operations return 0 even when the value is incorrect. + this.write_int(0, dest)?; + interp_ok(()) + } + } + // UMTX_OP_WAKE and UMTX_OP_WAKE_PRIVATE only differ in whether they work across + // processes or not. For Miri, we can treat them the same. + op if op == wake || op == wake_private => { + let Some(futex_ref) = + this.get_sync_or_init(obj, |_| FreeBsdFutex { futex: Default::default() }) + else { + // From Linux implemenation: + // No AllocId, or no live allocation at that AllocId. + // Return an error code. (That seems nicer than silently doing something non-intuitive.) + // This means that if an address gets reused by a new allocation, + // we'll use an independent futex queue for this... that seems acceptable. + return this.set_last_error_and_return(LibcError("EFAULT"), dest); + }; + let futex_ref = futex_ref.futex.clone(); + + // Saturating cast for when usize is smaller than u64. + let count = usize::try_from(val).unwrap_or(usize::MAX); + + // Read the Linux futex wake implementation in Miri to understand why this fence is needed. + this.atomic_fence(AtomicFenceOrd::SeqCst)?; + + // `_umtx_op` doesn't return the amount of woken threads. + let _woken = this.futex_wake( + &futex_ref, + u32::MAX, // we set the bitset to include all bits + count, + )?; + + // From the manual: + // If successful, all requests, except UMTX_SHM_CREAT and UMTX_SHM_LOOKUP + // sub-requests of the UMTX_OP_SHM request, will return zero. + this.write_int(0, dest)?; + interp_ok(()) + } + op => { + throw_unsup_format!("Miri does not support `_umtx_op` syscall with op={}", op) + } + } + } + + /// Parses a `_umtx_time` struct. + /// Returns `None` if the underlying `timespec` struct is invalid. + fn read_umtx_time(&mut self, ut: &MPlaceTy<'tcx>) -> InterpResult<'tcx, Option> { + let this = self.eval_context_mut(); + // Only flag allowed is UMTX_ABSTIME. + let abs_time = this.eval_libc_u32("UMTX_ABSTIME"); + + let timespec_place = this.project_field(ut, 0)?; + // Inner `timespec` must still be valid. + let duration = match this.read_timespec(×pec_place)? { + Some(dur) => dur, + None => return interp_ok(None), + }; + + let flags_place = this.project_field(ut, 1)?; + let flags = this.read_scalar(&flags_place)?.to_u32()?; + let abs_time_flag = flags == abs_time; + + let clock_id_place = this.project_field(ut, 2)?; + let clock_id = this.read_scalar(&clock_id_place)?.to_i32()?; + let timeout_clock = this.translate_umtx_time_clock_id(clock_id)?; + + interp_ok(Some(UmtxTime { timeout: duration, abs_time: abs_time_flag, timeout_clock })) + } + + /// Translate raw FreeBSD clockid to a Miri TimeoutClock. + /// FIXME: share this code with the pthread and clock_gettime shims. + fn translate_umtx_time_clock_id(&mut self, raw_id: i32) -> InterpResult<'tcx, TimeoutClock> { + let this = self.eval_context_mut(); + + let timeout = if raw_id == this.eval_libc_i32("CLOCK_REALTIME") { + // RealTime clock can't be used in isolation mode. + this.check_no_isolation("`_umtx_op` with `CLOCK_REALTIME` timeout")?; + TimeoutClock::RealTime + } else if raw_id == this.eval_libc_i32("CLOCK_MONOTONIC") { + TimeoutClock::Monotonic + } else { + throw_unsup_format!("unsupported clock id {raw_id}"); + }; + interp_ok(timeout) + } +} diff --git a/src/tools/miri/src/shims/unix/fs.rs b/src/tools/miri/src/shims/unix/fs.rs index f8e0c638c90d5..1f6acff0787a6 100644 --- a/src/tools/miri/src/shims/unix/fs.rs +++ b/src/tools/miri/src/shims/unix/fs.rs @@ -2,10 +2,9 @@ use std::borrow::Cow; use std::fs::{ - DirBuilder, File, FileType, Metadata, OpenOptions, ReadDir, read_dir, remove_dir, remove_file, - rename, + DirBuilder, File, FileType, OpenOptions, ReadDir, read_dir, remove_dir, remove_file, rename, }; -use std::io::{self, ErrorKind, IsTerminal, Read, Seek, SeekFrom, Write}; +use std::io::{self, ErrorKind, Read, Seek, SeekFrom, Write}; use std::path::{Path, PathBuf}; use std::time::SystemTime; @@ -14,98 +13,11 @@ use rustc_data_structures::fx::FxHashMap; use self::shims::time::system_time_to_duration; use crate::helpers::check_min_vararg_count; -use crate::shims::files::{EvalContextExt as _, FileDescription, FileDescriptionRef}; +use crate::shims::files::FileHandle; use crate::shims::os_str::bytes_to_os_str; use crate::shims::unix::fd::{FlockOp, UnixFileDescription}; use crate::*; -#[derive(Debug)] -struct FileHandle { - file: File, - writable: bool, -} - -impl FileDescription for FileHandle { - fn name(&self) -> &'static str { - "file" - } - - fn read<'tcx>( - self: FileDescriptionRef, - communicate_allowed: bool, - ptr: Pointer, - len: usize, - ecx: &mut MiriInterpCx<'tcx>, - finish: DynMachineCallback<'tcx, Result>, - ) -> InterpResult<'tcx> { - assert!(communicate_allowed, "isolation should have prevented even opening a file"); - - let result = ecx.read_from_host(&self.file, len, ptr)?; - finish.call(ecx, result) - } - - fn write<'tcx>( - self: FileDescriptionRef, - communicate_allowed: bool, - ptr: Pointer, - len: usize, - ecx: &mut MiriInterpCx<'tcx>, - finish: DynMachineCallback<'tcx, Result>, - ) -> InterpResult<'tcx> { - assert!(communicate_allowed, "isolation should have prevented even opening a file"); - - let result = ecx.write_to_host(&self.file, len, ptr)?; - finish.call(ecx, result) - } - - fn seek<'tcx>( - &self, - communicate_allowed: bool, - offset: SeekFrom, - ) -> InterpResult<'tcx, io::Result> { - assert!(communicate_allowed, "isolation should have prevented even opening a file"); - interp_ok((&mut &self.file).seek(offset)) - } - - fn close<'tcx>( - self, - communicate_allowed: bool, - _ecx: &mut MiriInterpCx<'tcx>, - ) -> InterpResult<'tcx, io::Result<()>> { - assert!(communicate_allowed, "isolation should have prevented even opening a file"); - // We sync the file if it was opened in a mode different than read-only. - if self.writable { - // `File::sync_all` does the checks that are done when closing a file. We do this to - // to handle possible errors correctly. - let result = self.file.sync_all(); - // Now we actually close the file and return the result. - drop(self.file); - interp_ok(result) - } else { - // We drop the file, this closes it but ignores any errors - // produced when closing it. This is done because - // `File::sync_all` cannot be done over files like - // `/dev/urandom` which are read-only. Check - // https://github.com/rust-lang/miri/issues/999#issuecomment-568920439 - // for a deeper discussion. - drop(self.file); - interp_ok(Ok(())) - } - } - - fn metadata<'tcx>(&self) -> InterpResult<'tcx, io::Result> { - interp_ok(self.file.metadata()) - } - - fn is_tty(&self, communicate_allowed: bool) -> bool { - communicate_allowed && self.file.is_terminal() - } - - fn as_unix(&self) -> &dyn UnixFileDescription { - self - } -} - impl UnixFileDescription for FileHandle { fn pread<'tcx>( &self, @@ -590,7 +502,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { interp_ok(Scalar::from_i32(this.try_unwrap_io_result(fd)?)) } - fn lseek64(&mut self, fd_num: i32, offset: i128, whence: i32) -> InterpResult<'tcx, Scalar> { + fn lseek64( + &mut self, + fd_num: i32, + offset: i128, + whence: i32, + dest: &MPlaceTy<'tcx>, + ) -> InterpResult<'tcx> { let this = self.eval_context_mut(); // Isolation check is done via `FileDescription` trait. @@ -598,7 +516,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let seek_from = if whence == this.eval_libc_i32("SEEK_SET") { if offset < 0 { // Negative offsets return `EINVAL`. - return this.set_last_error_and_return_i64(LibcError("EINVAL")); + return this.set_last_error_and_return(LibcError("EINVAL"), dest); } else { SeekFrom::Start(u64::try_from(offset).unwrap()) } @@ -607,19 +525,20 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } else if whence == this.eval_libc_i32("SEEK_END") { SeekFrom::End(i64::try_from(offset).unwrap()) } else { - return this.set_last_error_and_return_i64(LibcError("EINVAL")); + return this.set_last_error_and_return(LibcError("EINVAL"), dest); }; let communicate = this.machine.communicate(); let Some(fd) = this.machine.fds.get(fd_num) else { - return this.set_last_error_and_return_i64(LibcError("EBADF")); + return this.set_last_error_and_return(LibcError("EBADF"), dest); }; let result = fd.seek(communicate, seek_from)?.map(|offset| i64::try_from(offset).unwrap()); drop(fd); let result = this.try_unwrap_io_result(result)?; - interp_ok(Scalar::from_i64(result)) + this.write_int(result, dest)?; + interp_ok(()) } fn unlink(&mut self, path_op: &OpTy<'tcx>) -> InterpResult<'tcx, Scalar> { diff --git a/src/tools/miri/src/shims/unix/linux_like/epoll.rs b/src/tools/miri/src/shims/unix/linux_like/epoll.rs index de8bcb54aef5b..b489595b4cd04 100644 --- a/src/tools/miri/src/shims/unix/linux_like/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux_like/epoll.rs @@ -153,7 +153,7 @@ impl FileDescription for Epoll { interp_ok(Ok(())) } - fn as_unix(&self) -> &dyn UnixFileDescription { + fn as_unix<'tcx>(&self, _ecx: &MiriInterpCx<'tcx>) -> &dyn UnixFileDescription { self } } @@ -590,7 +590,7 @@ fn check_and_update_one_event_interest<'tcx>( ecx: &MiriInterpCx<'tcx>, ) -> InterpResult<'tcx, bool> { // Get the bitmask of ready events for a file description. - let ready_events_bitmask = fd_ref.as_unix().get_epoll_ready_events()?.get_event_bitmask(ecx); + let ready_events_bitmask = fd_ref.as_unix(ecx).get_epoll_ready_events()?.get_event_bitmask(ecx); let epoll_event_interest = interest.borrow(); let epfd = epoll_event_interest.weak_epfd.upgrade().unwrap(); // This checks if any of the events specified in epoll_event_interest.events diff --git a/src/tools/miri/src/shims/unix/linux_like/eventfd.rs b/src/tools/miri/src/shims/unix/linux_like/eventfd.rs index 936d436bd82d6..ee7deb8d38308 100644 --- a/src/tools/miri/src/shims/unix/linux_like/eventfd.rs +++ b/src/tools/miri/src/shims/unix/linux_like/eventfd.rs @@ -100,7 +100,7 @@ impl FileDescription for EventFd { eventfd_write(buf_place, self, ecx, finish) } - fn as_unix(&self) -> &dyn UnixFileDescription { + fn as_unix<'tcx>(&self, _ecx: &MiriInterpCx<'tcx>) -> &dyn UnixFileDescription { self } } diff --git a/src/tools/miri/src/shims/unix/macos/foreign_items.rs b/src/tools/miri/src/shims/unix/macos/foreign_items.rs index 918fd8dd52dfd..5046e96508227 100644 --- a/src/tools/miri/src/shims/unix/macos/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/macos/foreign_items.rs @@ -222,7 +222,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } - // Futex primitives + // Synchronization primitives "os_sync_wait_on_address" => { let [addr_op, value_op, size_op, flags_op] = this.check_shim(abi, Conv::C, link_name, args)?; @@ -273,7 +273,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { addr_op, size_op, flags_op, /* all */ true, dest, )?; } - "os_unfair_lock_lock" => { let [lock_op] = this.check_shim(abi, Conv::C, link_name, args)?; this.os_unfair_lock_lock(lock_op)?; diff --git a/src/tools/miri/src/shims/unix/unnamed_socket.rs b/src/tools/miri/src/shims/unix/unnamed_socket.rs index e183bfdf0e137..135d8f6bee7e1 100644 --- a/src/tools/miri/src/shims/unix/unnamed_socket.rs +++ b/src/tools/miri/src/shims/unix/unnamed_socket.rs @@ -107,7 +107,7 @@ impl FileDescription for AnonSocket { anonsocket_write(self, ptr, len, ecx, finish) } - fn as_unix(&self) -> &dyn UnixFileDescription { + fn as_unix<'tcx>(&self, _ecx: &MiriInterpCx<'tcx>) -> &dyn UnixFileDescription { self } } diff --git a/src/tools/miri/src/shims/windows/foreign_items.rs b/src/tools/miri/src/shims/windows/foreign_items.rs index fae6170a9e72c..c80858c63639a 100644 --- a/src/tools/miri/src/shims/windows/foreign_items.rs +++ b/src/tools/miri/src/shims/windows/foreign_items.rs @@ -9,14 +9,9 @@ use rustc_target::callconv::{Conv, FnAbi}; use self::shims::windows::handle::{Handle, PseudoHandle}; use crate::shims::os_str::bytes_to_os_str; -use crate::shims::windows::handle::HandleError; use crate::shims::windows::*; use crate::*; -// The NTSTATUS STATUS_INVALID_HANDLE (0xC0000008) encoded as a HRESULT by setting the N bit. -// (https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-erref/0642cb2f-2075-4469-918c-4441e69c548a) -const STATUS_INVALID_HANDLE: u32 = 0xD0000008; - pub fn is_dyn_sym(name: &str) -> bool { // std does dynamic detection for these symbols matches!( @@ -26,57 +21,107 @@ pub fn is_dyn_sym(name: &str) -> bool { } #[cfg(windows)] -fn win_absolute<'tcx>(path: &Path) -> InterpResult<'tcx, io::Result> { +fn win_get_full_path_name<'tcx>(path: &Path) -> InterpResult<'tcx, io::Result> { // We are on Windows so we can simply let the host do this. interp_ok(path::absolute(path)) } #[cfg(unix)] #[expect(clippy::get_first, clippy::arithmetic_side_effects)] -fn win_absolute<'tcx>(path: &Path) -> InterpResult<'tcx, io::Result> { - // We are on Unix, so we need to implement parts of the logic ourselves. +fn win_get_full_path_name<'tcx>(path: &Path) -> InterpResult<'tcx, io::Result> { + use std::sync::LazyLock; + + use rustc_data_structures::fx::FxHashSet; + + // We are on Unix, so we need to implement parts of the logic ourselves. `path` will use `/` + // separators, and the result should also use `/`. + // See for more + // information about Windows paths. + // This does not handle all corner cases correctly, see + // for more cursed + // examples. let bytes = path.as_os_str().as_encoded_bytes(); - // If it starts with `//` (these were backslashes but are already converted) - // then this is a magic special path, we just leave it unchanged. - if bytes.get(0).copied() == Some(b'/') && bytes.get(1).copied() == Some(b'/') { + // If it starts with `//./` or `//?/` then this is a magic special path, we just leave it + // unchanged. + if bytes.get(0).copied() == Some(b'/') + && bytes.get(1).copied() == Some(b'/') + && matches!(bytes.get(2), Some(b'.' | b'?')) + && bytes.get(3).copied() == Some(b'/') + { return interp_ok(Ok(path.into())); }; - // Special treatment for Windows' magic filenames: they are treated as being relative to `\\.\`. - let magic_filenames = &[ - "CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", - "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9", - ]; - if magic_filenames.iter().any(|m| m.as_bytes() == bytes) { - let mut result: Vec = br"//./".into(); + let is_unc = bytes.starts_with(b"//"); + // Special treatment for Windows' magic filenames: they are treated as being relative to `//./`. + static MAGIC_FILENAMES: LazyLock> = LazyLock::new(|| { + FxHashSet::from_iter([ + "CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", + "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9", + ]) + }); + if str::from_utf8(bytes).is_ok_and(|s| MAGIC_FILENAMES.contains(&*s.to_ascii_uppercase())) { + let mut result: Vec = b"//./".into(); result.extend(bytes); return interp_ok(Ok(bytes_to_os_str(&result)?.into())); } // Otherwise we try to do something kind of close to what Windows does, but this is probably not - // right in all cases. We iterate over the components between `/`, and remove trailing `.`, - // except that trailing `..` remain unchanged. - let mut result = vec![]; + // right in all cases. + let mut result: Vec<&[u8]> = vec![]; // will be a vector of components, joined by `/`. let mut bytes = bytes; // the remaining bytes to process - loop { - let len = bytes.iter().position(|&b| b == b'/').unwrap_or(bytes.len()); - let mut component = &bytes[..len]; - if len >= 2 && component[len - 1] == b'.' && component[len - 2] != b'.' { - // Strip trailing `.` - component = &component[..len - 1]; + let mut stop = false; + while !stop { + // Find next component, and advance `bytes`. + let mut component = match bytes.iter().position(|&b| b == b'/') { + Some(pos) => { + let (component, tail) = bytes.split_at(pos); + bytes = &tail[1..]; // remove the `/`. + component + } + None => { + // There's no more `/`. + stop = true; + let component = bytes; + bytes = &[]; + component + } + }; + // `NUL` and only `NUL` also gets changed to be relative to `//./` later in the path. + // (This changed with Windows 11; previously, all magic filenames behaved like this.) + // Also, this does not apply to UNC paths. + if !is_unc && component.eq_ignore_ascii_case(b"NUL") { + let mut result: Vec = b"//./".into(); + result.extend(component); + return interp_ok(Ok(bytes_to_os_str(&result)?.into())); } - // Add this component to output. - result.extend(component); - // Prepare next iteration. - if len < bytes.len() { - // There's a component after this; add `/` and process remaining bytes. - result.push(b'/'); - bytes = &bytes[len + 1..]; + // Deal with `..` -- Windows handles this entirely syntactically. + if component == b".." { + // Remove previous component, unless we are at the "root" already, then just ignore the `..`. + let is_root = { + // Paths like `/C:`. + result.len() == 2 && matches!(result[0], []) && matches!(result[1], [_, b':']) + } || { + // Paths like `//server/share` + result.len() == 4 && matches!(result[0], []) && matches!(result[1], []) + }; + if !is_root { + result.pop(); + } continue; - } else { - // This was the last component and it did not have a trailing `/`. - break; } + // Preserve this component. + // Strip trailing `.`, but preserve trailing `..`. But not for UNC paths! + let len = component.len(); + if !is_unc && len >= 2 && component[len - 1] == b'.' && component[len - 2] != b'.' { + component = &component[..len - 1]; + } + // Add this component to output. + result.push(component); } - // Let the host `absolute` function do working-dir handling + // Drive letters must be followed by a `/`. + if result.len() == 2 && matches!(result[0], []) && matches!(result[1], [_, b':']) { + result.push(&[]); + } + // Let the host `absolute` function do working-dir handling. + let result = result.join(&b'/'); interp_ok(path::absolute(bytes_to_os_str(&result)?)) } @@ -231,7 +276,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } let filename = this.read_path_from_wide_str(filename)?; - let result = match win_absolute(&filename)? { + let result = match win_get_full_path_name(&filename)? { Err(err) => { this.set_last_error(err)?; Scalar::from_u32(0) // return zero upon failure @@ -246,6 +291,37 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }; this.write_scalar(result, dest)?; } + "CreateFileW" => { + let [ + file_name, + desired_access, + share_mode, + security_attributes, + creation_disposition, + flags_and_attributes, + template_file, + ] = this.check_shim(abi, sys_conv, link_name, args)?; + let handle = this.CreateFileW( + file_name, + desired_access, + share_mode, + security_attributes, + creation_disposition, + flags_and_attributes, + template_file, + )?; + this.write_scalar(handle.to_scalar(this), dest)?; + } + "GetFileInformationByHandle" => { + let [handle, info] = this.check_shim(abi, sys_conv, link_name, args)?; + let res = this.GetFileInformationByHandle(handle, info)?; + this.write_scalar(res, dest)?; + } + "DeleteFileW" => { + let [file_name] = this.check_shim(abi, sys_conv, link_name, args)?; + let res = this.DeleteFileW(file_name)?; + this.write_scalar(res, dest)?; + } // Allocation "HeapAlloc" => { @@ -325,6 +401,25 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let last_error = this.get_last_error()?; this.write_scalar(last_error, dest)?; } + "RtlNtStatusToDosError" => { + let [status] = this.check_shim(abi, sys_conv, link_name, args)?; + let status = this.read_scalar(status)?.to_u32()?; + let err = match status { + // STATUS_MEDIA_WRITE_PROTECTED => ERROR_WRITE_PROTECT + 0xC00000A2 => 19, + // STATUS_FILE_INVALID => ERROR_FILE_INVALID + 0xC0000098 => 1006, + // STATUS_DISK_FULL => ERROR_DISK_FULL + 0xC000007F => 112, + // STATUS_IO_DEVICE_ERROR => ERROR_IO_DEVICE + 0xC0000185 => 1117, + // STATUS_ACCESS_DENIED => ERROR_ACCESS_DENIED + 0xC0000022 => 5, + // Anything without an error code => ERROR_MR_MID_NOT_FOUND + _ => 317, + }; + this.write_scalar(Scalar::from_i32(err), dest)?; + } // Querying system information "GetSystemInfo" => { @@ -498,52 +593,37 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "SetThreadDescription" => { let [handle, name] = this.check_shim(abi, sys_conv, link_name, args)?; - let handle = this.read_scalar(handle)?; + let handle = this.read_handle(handle, "SetThreadDescription")?; let name = this.read_wide_str(this.read_pointer(name)?)?; - let thread = match Handle::try_from_scalar(handle, this)? { - Ok(Handle::Thread(thread)) => Ok(thread), - Ok(Handle::Pseudo(PseudoHandle::CurrentThread)) => Ok(this.active_thread()), - Ok(_) | Err(HandleError::InvalidHandle) => - this.invalid_handle("SetThreadDescription")?, - Err(HandleError::ThreadNotFound(e)) => Err(e), - }; - let res = match thread { - Ok(thread) => { - // FIXME: use non-lossy conversion - this.set_thread_name(thread, String::from_utf16_lossy(&name).into_bytes()); - Scalar::from_u32(0) - } - Err(_) => Scalar::from_u32(STATUS_INVALID_HANDLE), + let thread = match handle { + Handle::Thread(thread) => thread, + Handle::Pseudo(PseudoHandle::CurrentThread) => this.active_thread(), + _ => this.invalid_handle("SetThreadDescription")?, }; - - this.write_scalar(res, dest)?; + // FIXME: use non-lossy conversion + this.set_thread_name(thread, String::from_utf16_lossy(&name).into_bytes()); + this.write_scalar(Scalar::from_u32(0), dest)?; } "GetThreadDescription" => { let [handle, name_ptr] = this.check_shim(abi, sys_conv, link_name, args)?; - let handle = this.read_scalar(handle)?; + let handle = this.read_handle(handle, "GetThreadDescription")?; let name_ptr = this.deref_pointer_as(name_ptr, this.machine.layouts.mut_raw_ptr)?; // the pointer where we should store the ptr to the name - let thread = match Handle::try_from_scalar(handle, this)? { - Ok(Handle::Thread(thread)) => Ok(thread), - Ok(Handle::Pseudo(PseudoHandle::CurrentThread)) => Ok(this.active_thread()), - Ok(_) | Err(HandleError::InvalidHandle) => - this.invalid_handle("GetThreadDescription")?, - Err(HandleError::ThreadNotFound(e)) => Err(e), - }; - let (name, res) = match thread { - Ok(thread) => { - // Looks like the default thread name is empty. - let name = this.get_thread_name(thread).unwrap_or(b"").to_owned(); - let name = this.alloc_os_str_as_wide_str( - bytes_to_os_str(&name)?, - MiriMemoryKind::WinLocal.into(), - )?; - (Scalar::from_maybe_pointer(name, this), Scalar::from_u32(0)) - } - Err(_) => (Scalar::null_ptr(this), Scalar::from_u32(STATUS_INVALID_HANDLE)), + let thread = match handle { + Handle::Thread(thread) => thread, + Handle::Pseudo(PseudoHandle::CurrentThread) => this.active_thread(), + _ => this.invalid_handle("GetThreadDescription")?, }; + // Looks like the default thread name is empty. + let name = this.get_thread_name(thread).unwrap_or(b"").to_owned(); + let name = this.alloc_os_str_as_wide_str( + bytes_to_os_str(&name)?, + MiriMemoryKind::WinLocal.into(), + )?; + let name = Scalar::from_maybe_pointer(name, this); + let res = Scalar::from_u32(0); this.write_scalar(name, &name_ptr)?; this.write_scalar(res, dest)?; @@ -638,11 +718,11 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let [handle, filename, size] = this.check_shim(abi, sys_conv, link_name, args)?; this.check_no_isolation("`GetModuleFileNameW`")?; - let handle = this.read_target_usize(handle)?; + let handle = this.read_handle(handle, "GetModuleFileNameW")?; let filename = this.read_pointer(filename)?; let size = this.read_scalar(size)?.to_u32()?; - if handle != 0 { + if handle != Handle::Null { throw_unsup_format!("`GetModuleFileNameW` only supports the NULL handle"); } diff --git a/src/tools/miri/src/shims/windows/fs.rs b/src/tools/miri/src/shims/windows/fs.rs new file mode 100644 index 0000000000000..7561bf45219ba --- /dev/null +++ b/src/tools/miri/src/shims/windows/fs.rs @@ -0,0 +1,421 @@ +use std::fs::{Metadata, OpenOptions}; +use std::io; +use std::path::PathBuf; +use std::time::SystemTime; + +use bitflags::bitflags; + +use crate::shims::files::{FileDescription, FileHandle}; +use crate::shims::windows::handle::{EvalContextExt as _, Handle}; +use crate::*; + +#[derive(Debug)] +pub struct DirHandle { + pub(crate) path: PathBuf, +} + +impl FileDescription for DirHandle { + fn name(&self) -> &'static str { + "directory" + } + + fn metadata<'tcx>(&self) -> InterpResult<'tcx, io::Result> { + interp_ok(self.path.metadata()) + } + + fn close<'tcx>( + self, + _communicate_allowed: bool, + _ecx: &mut MiriInterpCx<'tcx>, + ) -> InterpResult<'tcx, io::Result<()>> { + interp_ok(Ok(())) + } +} + +/// Windows supports handles without any read/write/delete permissions - these handles can get +/// metadata, but little else. We represent that by storing the metadata from the time the handle +/// was opened. +#[derive(Debug)] +pub struct MetadataHandle { + pub(crate) meta: Metadata, +} + +impl FileDescription for MetadataHandle { + fn name(&self) -> &'static str { + "metadata-only" + } + + fn metadata<'tcx>(&self) -> InterpResult<'tcx, io::Result> { + interp_ok(Ok(self.meta.clone())) + } + + fn close<'tcx>( + self, + _communicate_allowed: bool, + _ecx: &mut MiriInterpCx<'tcx>, + ) -> InterpResult<'tcx, io::Result<()>> { + interp_ok(Ok(())) + } +} + +#[derive(Copy, Clone, Debug, PartialEq)] +enum CreationDisposition { + CreateAlways, + CreateNew, + OpenAlways, + OpenExisting, + TruncateExisting, +} + +impl CreationDisposition { + fn new<'tcx>( + value: u32, + ecx: &mut MiriInterpCx<'tcx>, + ) -> InterpResult<'tcx, CreationDisposition> { + let create_always = ecx.eval_windows_u32("c", "CREATE_ALWAYS"); + let create_new = ecx.eval_windows_u32("c", "CREATE_NEW"); + let open_always = ecx.eval_windows_u32("c", "OPEN_ALWAYS"); + let open_existing = ecx.eval_windows_u32("c", "OPEN_EXISTING"); + let truncate_existing = ecx.eval_windows_u32("c", "TRUNCATE_EXISTING"); + + let out = if value == create_always { + CreationDisposition::CreateAlways + } else if value == create_new { + CreationDisposition::CreateNew + } else if value == open_always { + CreationDisposition::OpenAlways + } else if value == open_existing { + CreationDisposition::OpenExisting + } else if value == truncate_existing { + CreationDisposition::TruncateExisting + } else { + throw_unsup_format!("CreateFileW: Unsupported creation disposition: {value}"); + }; + interp_ok(out) + } +} + +bitflags! { + #[derive(PartialEq)] + struct FileAttributes: u32 { + const ZERO = 0; + const NORMAL = 1 << 0; + /// This must be passed to allow getting directory handles. If not passed, we error on trying + /// to open directories + const BACKUP_SEMANTICS = 1 << 1; + /// Open a reparse point as a regular file - this is basically similar to 'readlink' in Unix + /// terminology. A reparse point is a file with custom logic when navigated to, of which + /// a symlink is one specific example. + const OPEN_REPARSE = 1 << 2; + } +} + +impl FileAttributes { + fn new<'tcx>( + mut value: u32, + ecx: &mut MiriInterpCx<'tcx>, + ) -> InterpResult<'tcx, FileAttributes> { + let file_attribute_normal = ecx.eval_windows_u32("c", "FILE_ATTRIBUTE_NORMAL"); + let file_flag_backup_semantics = ecx.eval_windows_u32("c", "FILE_FLAG_BACKUP_SEMANTICS"); + let file_flag_open_reparse_point = + ecx.eval_windows_u32("c", "FILE_FLAG_OPEN_REPARSE_POINT"); + + let mut out = FileAttributes::ZERO; + if value & file_flag_backup_semantics != 0 { + value &= !file_flag_backup_semantics; + out |= FileAttributes::BACKUP_SEMANTICS; + } + if value & file_flag_open_reparse_point != 0 { + value &= !file_flag_open_reparse_point; + out |= FileAttributes::OPEN_REPARSE; + } + if value & file_attribute_normal != 0 { + value &= !file_attribute_normal; + out |= FileAttributes::NORMAL; + } + + if value != 0 { + throw_unsup_format!("CreateFileW: Unsupported flags_and_attributes: {value}"); + } + + if out == FileAttributes::ZERO { + // NORMAL is equivalent to 0. Avoid needing to check both cases by unifying the two. + out = FileAttributes::NORMAL; + } + interp_ok(out) + } +} + +impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} +#[allow(non_snake_case)] +pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { + fn CreateFileW( + &mut self, + file_name: &OpTy<'tcx>, // LPCWSTR + desired_access: &OpTy<'tcx>, // DWORD + share_mode: &OpTy<'tcx>, // DWORD + security_attributes: &OpTy<'tcx>, // LPSECURITY_ATTRIBUTES + creation_disposition: &OpTy<'tcx>, // DWORD + flags_and_attributes: &OpTy<'tcx>, // DWORD + template_file: &OpTy<'tcx>, // HANDLE + ) -> InterpResult<'tcx, Handle> { + // ^ Returns HANDLE + use CreationDisposition::*; + + let this = self.eval_context_mut(); + this.assert_target_os("windows", "CreateFileW"); + this.check_no_isolation("`CreateFileW`")?; + + // This function appears to always set the error to 0. This is important for some flag + // combinations, which may set error code on success. + this.set_last_error(IoError::Raw(Scalar::from_i32(0)))?; + + let file_name = this.read_path_from_wide_str(this.read_pointer(file_name)?)?; + let mut desired_access = this.read_scalar(desired_access)?.to_u32()?; + let share_mode = this.read_scalar(share_mode)?.to_u32()?; + let security_attributes = this.read_pointer(security_attributes)?; + let creation_disposition = this.read_scalar(creation_disposition)?.to_u32()?; + let flags_and_attributes = this.read_scalar(flags_and_attributes)?.to_u32()?; + let template_file = this.read_target_usize(template_file)?; + + let generic_read = this.eval_windows_u32("c", "GENERIC_READ"); + let generic_write = this.eval_windows_u32("c", "GENERIC_WRITE"); + + let file_share_delete = this.eval_windows_u32("c", "FILE_SHARE_DELETE"); + let file_share_read = this.eval_windows_u32("c", "FILE_SHARE_READ"); + let file_share_write = this.eval_windows_u32("c", "FILE_SHARE_WRITE"); + + let creation_disposition = CreationDisposition::new(creation_disposition, this)?; + let attributes = FileAttributes::new(flags_and_attributes, this)?; + + if share_mode != (file_share_delete | file_share_read | file_share_write) { + throw_unsup_format!("CreateFileW: Unsupported share mode: {share_mode}"); + } + if !this.ptr_is_null(security_attributes)? { + throw_unsup_format!("CreateFileW: Security attributes are not supported"); + } + + if attributes.contains(FileAttributes::OPEN_REPARSE) && creation_disposition == CreateAlways + { + throw_machine_stop!(TerminationInfo::Abort("Invalid CreateFileW argument combination: FILE_FLAG_OPEN_REPARSE_POINT with CREATE_ALWAYS".to_string())); + } + + if template_file != 0 { + throw_unsup_format!("CreateFileW: Template files are not supported"); + } + + // We need to know if the file is a directory to correctly open directory handles. + // This is racy, but currently the stdlib doesn't appear to offer a better solution. + let is_dir = file_name.is_dir(); + + // BACKUP_SEMANTICS is how Windows calls the act of opening a directory handle. + if !attributes.contains(FileAttributes::BACKUP_SEMANTICS) && is_dir { + this.set_last_error(IoError::WindowsError("ERROR_ACCESS_DENIED"))?; + return interp_ok(Handle::Invalid); + } + + let desired_read = desired_access & generic_read != 0; + let desired_write = desired_access & generic_write != 0; + + let mut options = OpenOptions::new(); + if desired_read { + desired_access &= !generic_read; + options.read(true); + } + if desired_write { + desired_access &= !generic_write; + options.write(true); + } + + if desired_access != 0 { + throw_unsup_format!( + "CreateFileW: Unsupported bits set for access mode: {desired_access:#x}" + ); + } + + // Per the documentation: + // If the specified file exists and is writable, the function truncates the file, + // the function succeeds, and last-error code is set to ERROR_ALREADY_EXISTS. + // If the specified file does not exist and is a valid path, a new file is created, + // the function succeeds, and the last-error code is set to zero. + // https://learn.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-createfilew + // + // This is racy, but there doesn't appear to be an std API that both succeeds if a + // file exists but tells us it isn't new. Either we accept racing one way or another, + // or we use an iffy heuristic like file creation time. This implementation prefers + // to fail in the direction of erroring more often. + if let CreateAlways | OpenAlways = creation_disposition + && file_name.exists() + { + this.set_last_error(IoError::WindowsError("ERROR_ALREADY_EXISTS"))?; + } + + let handle = if is_dir { + // Open this as a directory. + let fd_num = this.machine.fds.insert_new(DirHandle { path: file_name }); + Ok(Handle::File(fd_num)) + } else if creation_disposition == OpenExisting && !(desired_read || desired_write) { + // Windows supports handles with no permissions. These allow things such as reading + // metadata, but not file content. + file_name.metadata().map(|meta| { + let fd_num = this.machine.fds.insert_new(MetadataHandle { meta }); + Handle::File(fd_num) + }) + } else { + // Open this as a standard file. + match creation_disposition { + CreateAlways | OpenAlways => { + options.create(true); + if creation_disposition == CreateAlways { + options.truncate(true); + } + } + CreateNew => { + options.create_new(true); + // Per `create_new` documentation: + // The file must be opened with write or append access in order to create a new file. + // https://doc.rust-lang.org/std/fs/struct.OpenOptions.html#method.create_new + if !desired_write { + options.append(true); + } + } + OpenExisting => {} // Default options + TruncateExisting => { + options.truncate(true); + } + } + + options.open(file_name).map(|file| { + let fd_num = + this.machine.fds.insert_new(FileHandle { file, writable: desired_write }); + Handle::File(fd_num) + }) + }; + + match handle { + Ok(handle) => interp_ok(handle), + Err(e) => { + this.set_last_error(e)?; + interp_ok(Handle::Invalid) + } + } + } + + fn GetFileInformationByHandle( + &mut self, + file: &OpTy<'tcx>, // HANDLE + file_information: &OpTy<'tcx>, // LPBY_HANDLE_FILE_INFORMATION + ) -> InterpResult<'tcx, Scalar> { + // ^ Returns BOOL (i32 on Windows) + let this = self.eval_context_mut(); + this.assert_target_os("windows", "GetFileInformationByHandle"); + this.check_no_isolation("`GetFileInformationByHandle`")?; + + let file = this.read_handle(file, "GetFileInformationByHandle")?; + let file_information = this.deref_pointer_as( + file_information, + this.windows_ty_layout("BY_HANDLE_FILE_INFORMATION"), + )?; + + let fd_num = if let Handle::File(fd_num) = file { + fd_num + } else { + this.invalid_handle("GetFileInformationByHandle")? + }; + + let Some(desc) = this.machine.fds.get(fd_num) else { + this.invalid_handle("GetFileInformationByHandle")? + }; + + let metadata = match desc.metadata()? { + Ok(meta) => meta, + Err(e) => { + this.set_last_error(e)?; + return interp_ok(this.eval_windows("c", "FALSE")); + } + }; + + let size = metadata.len(); + + let file_type = metadata.file_type(); + let attributes = if file_type.is_dir() { + this.eval_windows_u32("c", "FILE_ATTRIBUTE_DIRECTORY") + } else if file_type.is_file() { + this.eval_windows_u32("c", "FILE_ATTRIBUTE_NORMAL") + } else { + this.eval_windows_u32("c", "FILE_ATTRIBUTE_DEVICE") + }; + + // Per the Windows documentation: + // "If the underlying file system does not support the [...] time, this member is zero (0)." + // https://learn.microsoft.com/en-us/windows/win32/api/fileapi/ns-fileapi-by_handle_file_information + let created = extract_windows_epoch(this, metadata.created())?.unwrap_or((0, 0)); + let accessed = extract_windows_epoch(this, metadata.accessed())?.unwrap_or((0, 0)); + let written = extract_windows_epoch(this, metadata.modified())?.unwrap_or((0, 0)); + + this.write_int_fields_named(&[("dwFileAttributes", attributes.into())], &file_information)?; + write_filetime_field(this, &file_information, "ftCreationTime", created)?; + write_filetime_field(this, &file_information, "ftLastAccessTime", accessed)?; + write_filetime_field(this, &file_information, "ftLastWriteTime", written)?; + this.write_int_fields_named( + &[ + ("dwVolumeSerialNumber", 0), + ("nFileSizeHigh", (size >> 32).into()), + ("nFileSizeLow", (size & 0xFFFFFFFF).into()), + ("nNumberOfLinks", 1), + ("nFileIndexHigh", 0), + ("nFileIndexLow", 0), + ], + &file_information, + )?; + + interp_ok(this.eval_windows("c", "TRUE")) + } + + fn DeleteFileW( + &mut self, + file_name: &OpTy<'tcx>, // LPCWSTR + ) -> InterpResult<'tcx, Scalar> { + // ^ Returns BOOL (i32 on Windows) + let this = self.eval_context_mut(); + this.assert_target_os("windows", "DeleteFileW"); + this.check_no_isolation("`DeleteFileW`")?; + + let file_name = this.read_path_from_wide_str(this.read_pointer(file_name)?)?; + match std::fs::remove_file(file_name) { + Ok(_) => interp_ok(this.eval_windows("c", "TRUE")), + Err(e) => { + this.set_last_error(e)?; + interp_ok(this.eval_windows("c", "FALSE")) + } + } + } +} + +/// Windows FILETIME is measured in 100-nanosecs since 1601 +fn extract_windows_epoch<'tcx>( + ecx: &MiriInterpCx<'tcx>, + time: io::Result, +) -> InterpResult<'tcx, Option<(u32, u32)>> { + match time.ok() { + Some(time) => { + let duration = ecx.system_time_since_windows_epoch(&time)?; + let duration_ticks = ecx.windows_ticks_for(duration)?; + #[allow(clippy::cast_possible_truncation)] + interp_ok(Some((duration_ticks as u32, (duration_ticks >> 32) as u32))) + } + None => interp_ok(None), + } +} + +fn write_filetime_field<'tcx>( + cx: &mut MiriInterpCx<'tcx>, + val: &MPlaceTy<'tcx>, + name: &str, + (low, high): (u32, u32), +) -> InterpResult<'tcx> { + cx.write_int_fields_named( + &[("dwLowDateTime", low.into()), ("dwHighDateTime", high.into())], + &cx.project_field_named(val, name)?, + ) +} diff --git a/src/tools/miri/src/shims/windows/handle.rs b/src/tools/miri/src/shims/windows/handle.rs index c4eb11fbd3f97..eec6c62bebc73 100644 --- a/src/tools/miri/src/shims/windows/handle.rs +++ b/src/tools/miri/src/shims/windows/handle.rs @@ -3,6 +3,7 @@ use std::mem::variant_count; use rustc_abi::HasDataLayout; use crate::concurrency::thread::ThreadNotFound; +use crate::shims::files::FdNum; use crate::*; #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] @@ -16,6 +17,8 @@ pub enum Handle { Null, Pseudo(PseudoHandle), Thread(ThreadId), + File(FdNum), + Invalid, } impl PseudoHandle { @@ -47,12 +50,18 @@ impl Handle { const NULL_DISCRIMINANT: u32 = 0; const PSEUDO_DISCRIMINANT: u32 = 1; const THREAD_DISCRIMINANT: u32 = 2; + const FILE_DISCRIMINANT: u32 = 3; + // Chosen to ensure Handle::Invalid encodes to -1. Update this value if there are ever more than + // 8 discriminants. + const INVALID_DISCRIMINANT: u32 = 7; fn discriminant(self) -> u32 { match self { Self::Null => Self::NULL_DISCRIMINANT, Self::Pseudo(_) => Self::PSEUDO_DISCRIMINANT, Self::Thread(_) => Self::THREAD_DISCRIMINANT, + Self::File(_) => Self::FILE_DISCRIMINANT, + Self::Invalid => Self::INVALID_DISCRIMINANT, } } @@ -61,17 +70,27 @@ impl Handle { Self::Null => 0, Self::Pseudo(pseudo_handle) => pseudo_handle.value(), Self::Thread(thread) => thread.to_u32(), + #[expect(clippy::cast_sign_loss)] + Self::File(fd) => fd as u32, + // INVALID_HANDLE_VALUE is -1. This fact is explicitly declared or implied in several + // pages of Windows documentation. + // 1: https://learn.microsoft.com/en-us/dotnet/api/microsoft.win32.safehandles.safefilehandle?view=net-9.0 + // 2: https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/get-osfhandle?view=msvc-170 + Self::Invalid => 0x1FFFFFFF, } } fn packed_disc_size() -> u32 { - // ceil(log2(x)) is how many bits it takes to store x numbers + // ceil(log2(x)) is how many bits it takes to store x numbers. + // We ensure that INVALID_HANDLE_VALUE (0xFFFFFFFF) decodes to Handle::Invalid. + // see https://devblogs.microsoft.com/oldnewthing/20230914-00/?p=108766 for more detail on + // INVALID_HANDLE_VALUE. let variant_count = variant_count::(); - // however, std's ilog2 is floor(log2(x)) + // However, std's ilog2 is floor(log2(x)). let floor_log2 = variant_count.ilog2(); - // we need to add one for non powers of two to compensate for the difference + // We need to add one for non powers of two to compensate for the difference. #[expect(clippy::arithmetic_side_effects)] // cannot overflow if variant_count.is_power_of_two() { floor_log2 } else { floor_log2 + 1 } } @@ -105,6 +124,13 @@ impl Handle { Self::NULL_DISCRIMINANT if data == 0 => Some(Self::Null), Self::PSEUDO_DISCRIMINANT => Some(Self::Pseudo(PseudoHandle::from_value(data)?)), Self::THREAD_DISCRIMINANT => Some(Self::Thread(ThreadId::new_unchecked(data))), + #[expect(clippy::cast_possible_wrap)] + Self::FILE_DISCRIMINANT => { + // This cast preserves all bits. + assert_eq!(size_of_val(&data), size_of::()); + Some(Self::File(data as FdNum)) + } + Self::INVALID_DISCRIMINANT => Some(Self::Invalid), _ => None, } } @@ -139,7 +165,7 @@ impl Handle { /// Structurally invalid handles return [`HandleError::InvalidHandle`]. /// If the handle is structurally valid but semantically invalid, e.g. a for non-existent thread /// ID, returns [`HandleError::ThreadNotFound`]. - pub fn try_from_scalar<'tcx>( + fn try_from_scalar<'tcx>( handle: Scalar, cx: &MiriInterpCx<'tcx>, ) -> InterpResult<'tcx, Result> { @@ -171,6 +197,27 @@ impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} #[allow(non_snake_case)] pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { + /// Convert a scalar into a structured `Handle`. + /// If the handle is invalid, or references a non-existent item, execution is aborted. + #[track_caller] + fn read_handle(&self, handle: &OpTy<'tcx>, function_name: &str) -> InterpResult<'tcx, Handle> { + let this = self.eval_context_ref(); + let handle = this.read_scalar(handle)?; + match Handle::try_from_scalar(handle, this)? { + Ok(handle) => interp_ok(handle), + Err(HandleError::InvalidHandle) => + throw_machine_stop!(TerminationInfo::Abort(format!( + "invalid handle {} passed to {function_name}", + handle.to_target_isize(this)?, + ))), + Err(HandleError::ThreadNotFound(_)) => + throw_machine_stop!(TerminationInfo::Abort(format!( + "invalid thread ID {} passed to {function_name}", + handle.to_target_isize(this)?, + ))), + } + } + fn invalid_handle(&mut self, function_name: &str) -> InterpResult<'tcx, !> { throw_machine_stop!(TerminationInfo::Abort(format!( "invalid handle passed to `{function_name}`" @@ -180,15 +227,38 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { fn CloseHandle(&mut self, handle_op: &OpTy<'tcx>) -> InterpResult<'tcx, Scalar> { let this = self.eval_context_mut(); - let handle = this.read_scalar(handle_op)?; - let ret = match Handle::try_from_scalar(handle, this)? { - Ok(Handle::Thread(thread)) => { + let handle = this.read_handle(handle_op, "CloseHandle")?; + let ret = match handle { + Handle::Thread(thread) => { this.detach_thread(thread, /*allow_terminated_joined*/ true)?; this.eval_windows("c", "TRUE") } + Handle::File(fd_num) => + if let Some(fd) = this.machine.fds.remove(fd_num) { + let err = fd.close_ref(this.machine.communicate(), this)?; + if let Err(e) = err { + this.set_last_error(e)?; + this.eval_windows("c", "FALSE") + } else { + this.eval_windows("c", "TRUE") + } + } else { + this.invalid_handle("CloseHandle")? + }, _ => this.invalid_handle("CloseHandle")?, }; interp_ok(ret) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_invalid_encoding() { + // Ensure the invalid handle encodes to `u32::MAX`/`INVALID_HANDLE_VALUE`. + assert_eq!(Handle::Invalid.to_packed(), u32::MAX) + } +} diff --git a/src/tools/miri/src/shims/windows/mod.rs b/src/tools/miri/src/shims/windows/mod.rs index 892bd6924fc93..442c5a0dd11fd 100644 --- a/src/tools/miri/src/shims/windows/mod.rs +++ b/src/tools/miri/src/shims/windows/mod.rs @@ -1,12 +1,14 @@ pub mod foreign_items; mod env; +mod fs; mod handle; mod sync; mod thread; // All the Windows-specific extension traits pub use self::env::{EvalContextExt as _, WindowsEnvVars}; +pub use self::fs::EvalContextExt as _; pub use self::handle::EvalContextExt as _; pub use self::sync::EvalContextExt as _; pub use self::thread::EvalContextExt as _; diff --git a/src/tools/miri/src/shims/windows/thread.rs b/src/tools/miri/src/shims/windows/thread.rs index 5db554044227c..d5f9ed4e968ea 100644 --- a/src/tools/miri/src/shims/windows/thread.rs +++ b/src/tools/miri/src/shims/windows/thread.rs @@ -62,14 +62,14 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ) -> InterpResult<'tcx, Scalar> { let this = self.eval_context_mut(); - let handle = this.read_scalar(handle_op)?; + let handle = this.read_handle(handle_op, "WaitForSingleObject")?; let timeout = this.read_scalar(timeout_op)?.to_u32()?; - let thread = match Handle::try_from_scalar(handle, this)? { - Ok(Handle::Thread(thread)) => thread, + let thread = match handle { + Handle::Thread(thread) => thread, // Unlike on posix, the outcome of joining the current thread is not documented. // On current Windows, it just deadlocks. - Ok(Handle::Pseudo(PseudoHandle::CurrentThread)) => this.active_thread(), + Handle::Pseudo(PseudoHandle::CurrentThread) => this.active_thread(), _ => this.invalid_handle("WaitForSingleObject")?, }; diff --git a/src/tools/miri/test-cargo-miri/Cargo.lock b/src/tools/miri/test-cargo-miri/Cargo.lock index 8f618e7ffb38f..32119426184d4 100644 --- a/src/tools/miri/test-cargo-miri/Cargo.lock +++ b/src/tools/miri/test-cargo-miri/Cargo.lock @@ -1,12 +1,12 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "autocfg" -version = "1.2.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "byteorder" @@ -96,15 +96,15 @@ version = "0.1.0" [[package]] name = "once_cell" -version = "1.19.0" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "proc-macro2" -version = "1.0.79" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] @@ -129,6 +129,6 @@ version = "0.1.0" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" diff --git a/src/tools/miri/test-cargo-miri/Cargo.toml b/src/tools/miri/test-cargo-miri/Cargo.toml index 574f1d05a6fae..f5092a4748f31 100644 --- a/src/tools/miri/test-cargo-miri/Cargo.toml +++ b/src/tools/miri/test-cargo-miri/Cargo.toml @@ -6,7 +6,7 @@ exclude = ["no-std-smoke"] # it wants to be panic="abort" name = "cargo-miri-test" version = "0.1.0" authors = ["Miri Team"] -edition = "2018" +edition = "2024" [dependencies] byteorder = "1.0" diff --git a/src/tools/miri/test-cargo-miri/run-test.py b/src/tools/miri/test-cargo-miri/run-test.py index 5b77092979d34..a9d09ac7a9d6e 100755 --- a/src/tools/miri/test-cargo-miri/run-test.py +++ b/src/tools/miri/test-cargo-miri/run-test.py @@ -136,7 +136,7 @@ def test_cargo_miri_run(): cargo_miri("run") + ["--target-dir=custom-run", "--", "--target-dir=target/custom-run"], "run.args.stdout.ref", "run.custom-target-dir.stderr.ref", ) - test("`cargo miri run --package=test-local-crate-detection` (test local crate detection)", + test("`cargo miri run` (test local crate detection)", cargo_miri("run") + ["--package=test-local-crate-detection"], "run.local_crate.stdout.ref", "run.local_crate.stderr.ref", ) @@ -147,49 +147,46 @@ def test_cargo_miri_test(): default_ref = "test.cross-target.stdout.ref" if is_foreign else "test.default.stdout.ref" filter_ref = "test.filter.cross-target.stdout.ref" if is_foreign else "test.filter.stdout.ref" - # macOS needs permissive provenance inside getrandom_1. test("`cargo miri test`", cargo_miri("test"), - default_ref, "test.stderr-empty.ref", - env={'MIRIFLAGS': "-Zmiri-permissive-provenance -Zmiri-seed=4242"}, + default_ref, "test.empty.ref", + env={'MIRIFLAGS': "-Zmiri-seed=4242"}, ) test("`cargo miri test` (no isolation, no doctests)", cargo_miri("test") + ["--bins", "--tests"], # no `--lib`, we disabled that in `Cargo.toml` - "test.cross-target.stdout.ref", "test.stderr-empty.ref", - env={'MIRIFLAGS': "-Zmiri-permissive-provenance -Zmiri-disable-isolation"}, + "test.cross-target.stdout.ref", "test.empty.ref", + env={'MIRIFLAGS': "-Zmiri-disable-isolation"}, ) test("`cargo miri test` (with filter)", cargo_miri("test") + ["--", "--format=pretty", "pl"], - filter_ref, "test.stderr-empty.ref", + filter_ref, "test.empty.ref", ) test("`cargo miri test` (test target)", cargo_miri("test") + ["--test", "test", "--", "--format=pretty"], - "test.test-target.stdout.ref", "test.stderr-empty.ref", - env={'MIRIFLAGS': "-Zmiri-permissive-provenance"}, + "test.test-target.stdout.ref", "test.empty.ref", ) test("`cargo miri test` (bin target)", cargo_miri("test") + ["--bin", "cargo-miri-test", "--", "--format=pretty"], - "test.bin-target.stdout.ref", "test.stderr-empty.ref", + "test.bin-target.stdout.ref", "test.empty.ref", ) test("`cargo miri t` (subcrate, no isolation)", cargo_miri("t") + ["-p", "subcrate"], - "test.subcrate.stdout.ref", "test.stderr-proc-macro.ref", + "test.subcrate.cross-target.stdout.ref" if is_foreign else "test.subcrate.stdout.ref", + "test.empty.ref", env={'MIRIFLAGS': "-Zmiri-disable-isolation"}, ) - test("`cargo miri test` (subcrate, doctests)", - cargo_miri("test") + ["-p", "subcrate", "--doc"], - "test.stdout-empty.ref", "test.stderr-proc-macro-doctest.ref", + test("`cargo miri test` (proc-macro crate)", + cargo_miri("test") + ["-p", "proc_macro_crate"], + "test.empty.ref", "test.proc-macro.stderr.ref", ) test("`cargo miri test` (custom target dir)", cargo_miri("test") + ["--target-dir=custom-test"], - default_ref, "test.stderr-empty.ref", - env={'MIRIFLAGS': "-Zmiri-permissive-provenance"}, + default_ref, "test.empty.ref", ) del os.environ["CARGO_TARGET_DIR"] # this overrides `build.target-dir` passed by `--config`, so unset it test("`cargo miri test` (config-cli)", cargo_miri("test") + ["--config=build.target-dir=\"config-cli\""], - default_ref, "test.stderr-empty.ref", - env={'MIRIFLAGS': "-Zmiri-permissive-provenance"}, + default_ref, "test.empty.ref", ) if ARGS.multi_target: test_cargo_miri_multi_target() @@ -198,8 +195,7 @@ def test_cargo_miri_test(): def test_cargo_miri_multi_target(): test("`cargo miri test` (multiple targets)", cargo_miri("test", targets = ["aarch64-unknown-linux-gnu", "s390x-unknown-linux-gnu"]), - "test.multiple_targets.stdout.ref", "test.stderr-empty.ref", - env={'MIRIFLAGS': "-Zmiri-permissive-provenance"}, + "test.multiple_targets.stdout.ref", "test.empty.ref", ) args_parser = argparse.ArgumentParser(description='`cargo miri` testing') diff --git a/src/tools/miri/test-cargo-miri/src/lib.rs b/src/tools/miri/test-cargo-miri/src/lib.rs index 003341d0974ce..3b63f8afc9023 100644 --- a/src/tools/miri/test-cargo-miri/src/lib.rs +++ b/src/tools/miri/test-cargo-miri/src/lib.rs @@ -26,7 +26,8 @@ /// /// let _val = Fail::::C; /// ``` -#[no_mangle] +// This is imported in `main.rs`. +#[unsafe(no_mangle)] pub fn make_true() -> bool { proc_macro_crate::use_the_dependency!(); issue_1567::use_the_dependency(); diff --git a/src/tools/miri/test-cargo-miri/src/main.rs b/src/tools/miri/test-cargo-miri/src/main.rs index efe95bf3abab7..00a239a9161a1 100644 --- a/src/tools/miri/test-cargo-miri/src/main.rs +++ b/src/tools/miri/test-cargo-miri/src/main.rs @@ -30,7 +30,7 @@ fn main() { let mut out = Vec::with_capacity(1024); unsafe { - extern "Rust" { + unsafe extern "Rust" { fn miri_host_to_target_path( path: *const c_char, out: *mut c_char, @@ -81,7 +81,7 @@ mod test { // Test calling exported symbols in (transitive) dependencies. // Repeat calls to make sure the `Instance` cache is not broken. for _ in 0..3 { - extern "Rust" { + unsafe extern "Rust" { fn exported_symbol() -> i32; fn assoc_fn_as_exported_symbol() -> i32; fn make_true() -> bool; diff --git a/src/tools/miri/test-cargo-miri/subcrate/Cargo.toml b/src/tools/miri/test-cargo-miri/subcrate/Cargo.toml index 06b1ce1cba4b8..f2f6360f2d219 100644 --- a/src/tools/miri/test-cargo-miri/subcrate/Cargo.toml +++ b/src/tools/miri/test-cargo-miri/subcrate/Cargo.toml @@ -2,11 +2,11 @@ name = "subcrate" version = "0.1.0" authors = ["Miri Team"] +# This is deliberately *not* on the 2024 edition to ensure doctests keep working +# on old editions. edition = "2018" [lib] -proc-macro = true -doctest = false [[bin]] name = "subcrate" diff --git a/src/tools/miri/test-cargo-miri/subcrate/src/lib.rs b/src/tools/miri/test-cargo-miri/subcrate/src/lib.rs index 98c22fef07664..b9278c54dbed1 100644 --- a/src/tools/miri/test-cargo-miri/subcrate/src/lib.rs +++ b/src/tools/miri/test-cargo-miri/subcrate/src/lib.rs @@ -1,16 +1,8 @@ -// This is a proc-macro crate. - -extern crate proc_macro; // make sure proc_macro is in the sysroot - -#[cfg(doctest)] -compile_error!("rustdoc should not touch me"); - -#[cfg(miri)] -compile_error!("Miri should not touch me"); - -use proc_macro::TokenStream; - -#[proc_macro] -pub fn make_answer(_item: TokenStream) -> TokenStream { - "fn answer() -> u32 { 42 }".parse().unwrap() +/// Doc-test test +/// +/// ```rust +/// assert!(subcrate::make_true()); +/// ``` +pub fn make_true() -> bool { + true } diff --git a/src/tools/miri/test-cargo-miri/test.stderr-empty.ref b/src/tools/miri/test-cargo-miri/test.empty.ref similarity index 100% rename from src/tools/miri/test-cargo-miri/test.stderr-empty.ref rename to src/tools/miri/test-cargo-miri/test.empty.ref diff --git a/src/tools/miri/test-cargo-miri/test.proc-macro.stderr.ref b/src/tools/miri/test-cargo-miri/test.proc-macro.stderr.ref new file mode 100644 index 0000000000000..b95474208b27a --- /dev/null +++ b/src/tools/miri/test-cargo-miri/test.proc-macro.stderr.ref @@ -0,0 +1,2 @@ +Running unit tests of `proc-macro` crates is not currently supported by Miri. +Running doctests of `proc-macro` crates is not currently supported by Miri. diff --git a/src/tools/miri/test-cargo-miri/test.stderr-proc-macro-doctest.ref b/src/tools/miri/test-cargo-miri/test.stderr-proc-macro-doctest.ref deleted file mode 100644 index ca5e3a2392db8..0000000000000 --- a/src/tools/miri/test-cargo-miri/test.stderr-proc-macro-doctest.ref +++ /dev/null @@ -1 +0,0 @@ -Running doctests of `proc-macro` crates is not currently supported by Miri. diff --git a/src/tools/miri/test-cargo-miri/test.stderr-proc-macro.ref b/src/tools/miri/test-cargo-miri/test.stderr-proc-macro.ref deleted file mode 100644 index 4983250917b59..0000000000000 --- a/src/tools/miri/test-cargo-miri/test.stderr-proc-macro.ref +++ /dev/null @@ -1 +0,0 @@ -Running unit tests of `proc-macro` crates is not currently supported by Miri. diff --git a/src/tools/miri/test-cargo-miri/test.subcrate.cross-target.stdout.ref b/src/tools/miri/test-cargo-miri/test.subcrate.cross-target.stdout.ref new file mode 100644 index 0000000000000..436e6e4fbbbca --- /dev/null +++ b/src/tools/miri/test-cargo-miri/test.subcrate.cross-target.stdout.ref @@ -0,0 +1,11 @@ + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + +subcrate testing diff --git a/src/tools/miri/test-cargo-miri/test.subcrate.stdout.ref b/src/tools/miri/test-cargo-miri/test.subcrate.stdout.ref index e50838ebc838b..c7c7bc8351b27 100644 --- a/src/tools/miri/test-cargo-miri/test.subcrate.stdout.ref +++ b/src/tools/miri/test-cargo-miri/test.subcrate.stdout.ref @@ -1,6 +1,16 @@ +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + + running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME subcrate testing + +running 1 test +. +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/src/tools/miri/test_dependencies/Cargo.lock b/src/tools/miri/test_dependencies/Cargo.lock index af92f9d0dec49..276c518e74f34 100644 --- a/src/tools/miri/test_dependencies/Cargo.lock +++ b/src/tools/miri/test_dependencies/Cargo.lock @@ -4,60 +4,51 @@ version = 4 [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] -name = "adler" -version = "1.0.2" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", + "windows-targets", ] [[package]] name = "bitflags" -version = "2.6.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "bytes" -version = "1.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" - -[[package]] -name = "cc" -version = "1.1.22" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0" -dependencies = [ - "shlex", -] +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "cfg-if" @@ -67,19 +58,19 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] name = "fastrand" -version = "2.1.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "getrandom" @@ -107,21 +98,21 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" dependencies = [ "cfg-if", "libc", - "wasi 0.13.3+wasi-0.2.2", - "windows-targets", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", ] [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "hermit-abi" @@ -131,30 +122,31 @@ checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "js-sys" -version = "0.3.69" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ + "once_cell", "wasm-bindgen", ] [[package]] name = "libc" -version = "0.2.161" +version = "0.2.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" +checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" [[package]] name = "log" -version = "0.4.22" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "memchr" @@ -164,23 +156,22 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "miniz_oxide" -version = "0.7.4" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" dependencies = [ - "adler", + "adler2", ] [[package]] name = "mio" -version = "1.0.1" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4569e456d394deccd22ce1c1913e6ea0e54519f577285001215d33557431afe4" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi", "libc", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -190,13 +181,13 @@ dependencies = [ "cfg-if", "getrandom 0.1.16", "getrandom 0.2.15", - "getrandom 0.3.1", + "getrandom 0.3.2", "libc", "num_cpus", "page_size", "tempfile", "tokio", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -211,18 +202,18 @@ dependencies = [ [[package]] name = "object" -version = "0.36.2" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f203fa8daa7bb185f760ae12bd8e097f63d17041dcdcaf675ac54cdf863170e" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "page_size" @@ -236,28 +227,34 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.36" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "rustc-demangle" version = "0.1.24" @@ -266,23 +263,17 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustix" -version = "0.38.34" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.59.0", ] -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - [[package]] name = "signal-hook-registry" version = "1.4.2" @@ -294,19 +285,19 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "syn" -version = "2.0.72" +version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", @@ -315,22 +306,22 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.11.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53" +checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" dependencies = [ - "cfg-if", "fastrand", + "getrandom 0.3.2", "once_cell", "rustix", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] name = "tokio" -version = "1.39.2" +version = "1.44.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daa4fb1bc778bd6f04cbfc4bb2d06a7396a8f299dc33ea1900cedaa316f467b1" +checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" dependencies = [ "backtrace", "bytes", @@ -340,14 +331,14 @@ dependencies = [ "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", @@ -356,9 +347,9 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "wasi" @@ -374,32 +365,32 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasi" -version = "0.13.3+wasi-0.2.2" +version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" dependencies = [ "wit-bindgen-rt", ] [[package]] name = "wasm-bindgen" -version = "0.2.92" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.92" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", "syn", @@ -408,9 +399,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.92" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -418,9 +409,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.92" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", @@ -431,9 +422,12 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.92" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] name = "winapi" @@ -466,6 +460,15 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + [[package]] name = "windows-targets" version = "0.52.6" @@ -532,9 +535,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "wit-bindgen-rt" -version = "0.33.0" +version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ "bitflags", ] diff --git a/src/tools/miri/test_dependencies/Cargo.toml b/src/tools/miri/test_dependencies/Cargo.toml index 78dddaf11dff5..653228a5e3db6 100644 --- a/src/tools/miri/test_dependencies/Cargo.toml +++ b/src/tools/miri/test_dependencies/Cargo.toml @@ -22,9 +22,9 @@ tempfile = "3" page_size = "0.6" # Avoid pulling in all of tokio's dependencies. # However, without `net` and `signal`, tokio uses fewer relevant system APIs. -tokio = { version = "1.24", features = ["macros", "rt-multi-thread", "time", "net", "fs", "sync", "signal", "io-util"] } +tokio = { version = "1", features = ["macros", "rt-multi-thread", "time", "net", "fs", "sync", "signal", "io-util"] } [target.'cfg(windows)'.dependencies] -windows-sys = { version = "0.52", features = [ "Win32_Foundation", "Win32_System_Threading" ] } +windows-sys = { version = "0.59", features = ["Win32_Foundation", "Win32_System_Threading", "Win32_Storage_FileSystem", "Win32_Security"] } [workspace] diff --git a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_join_self.rs b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_join_self.rs index 53760b05a3127..f75f306e5a631 100644 --- a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_join_self.rs +++ b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_join_self.rs @@ -1,6 +1,6 @@ //@ignore-target: windows # No pthreads on Windows // We are making scheduler assumptions here. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency // Joining itself is undefined behavior. diff --git a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_mutex_deadlock.rs b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_mutex_deadlock.rs index e3d5da26aeae4..68dce8f865423 100644 --- a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_mutex_deadlock.rs +++ b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_mutex_deadlock.rs @@ -1,5 +1,7 @@ //@ignore-target: windows # No pthreads on Windows //@error-in-other-file: deadlock +// We are making scheduler assumptions here. +//@compile-flags: -Zmiri-deterministic-concurrency use std::cell::UnsafeCell; use std::sync::Arc; diff --git a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_rwlock_write_read_deadlock.rs b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_rwlock_write_read_deadlock.rs index 3a985122e22e0..3a932404238db 100644 --- a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_rwlock_write_read_deadlock.rs +++ b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_rwlock_write_read_deadlock.rs @@ -1,5 +1,7 @@ //@ignore-target: windows # No pthreads on Windows //@error-in-other-file: deadlock +// We are making scheduler assumptions here. +//@compile-flags: -Zmiri-deterministic-concurrency use std::cell::UnsafeCell; use std::sync::Arc; diff --git a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_rwlock_write_write_deadlock.rs b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_rwlock_write_write_deadlock.rs index 6d7bb80d8e6c6..3b21738846357 100644 --- a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_rwlock_write_write_deadlock.rs +++ b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_rwlock_write_write_deadlock.rs @@ -1,5 +1,7 @@ //@ignore-target: windows # No pthreads on Windows //@error-in-other-file: deadlock +// We are making scheduler assumptions here. +//@compile-flags: -Zmiri-deterministic-concurrency use std::cell::UnsafeCell; use std::sync::Arc; diff --git a/src/tools/miri/tests/fail-dep/concurrency/windows_join_main.rs b/src/tools/miri/tests/fail-dep/concurrency/windows_join_main.rs index 279201df867cf..2980d257a2925 100644 --- a/src/tools/miri/tests/fail-dep/concurrency/windows_join_main.rs +++ b/src/tools/miri/tests/fail-dep/concurrency/windows_join_main.rs @@ -1,6 +1,6 @@ //@only-target: windows # Uses win32 api functions // We are making scheduler assumptions here. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock // On windows, joining main is not UB, but it will block a thread forever. @@ -13,7 +13,7 @@ use windows_sys::Win32::System::Threading::{INFINITE, WaitForSingleObject}; // XXX HACK: This is how miri represents the handle for thread 0. // This value can be "legitimately" obtained by using `GetCurrentThread` with `DuplicateHandle` // but miri does not implement `DuplicateHandle` yet. -const MAIN_THREAD: HANDLE = (2i32 << 30) as HANDLE; +const MAIN_THREAD: HANDLE = (2i32 << 29) as HANDLE; fn main() { thread::spawn(|| { diff --git a/src/tools/miri/tests/fail-dep/concurrency/windows_join_self.rs b/src/tools/miri/tests/fail-dep/concurrency/windows_join_self.rs index eee2979f3cff3..85672ec860f56 100644 --- a/src/tools/miri/tests/fail-dep/concurrency/windows_join_self.rs +++ b/src/tools/miri/tests/fail-dep/concurrency/windows_join_self.rs @@ -1,6 +1,6 @@ //@only-target: windows # Uses win32 api functions // We are making scheduler assumptions here. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock // On windows, a thread joining itself is not UB, but it will deadlock. diff --git a/src/tools/miri/tests/fail-dep/libc/affinity.stderr b/src/tools/miri/tests/fail-dep/libc/affinity.stderr index 5a226c6a44b8f..cc3daa47e00bc 100644 --- a/src/tools/miri/tests/fail-dep/libc/affinity.stderr +++ b/src/tools/miri/tests/fail-dep/libc/affinity.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 129 bytes of memory, but got ALLOC which is only 128 bytes from the end of the allocation +error: Undefined Behavior: memory access failed: attempting to access 129 bytes, but got ALLOC which is only 128 bytes from the end of the allocation --> tests/fail-dep/libc/affinity.rs:LL:CC | LL | let err = unsafe { sched_setaffinity(PID, size_of::() + 1, &cpuset) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: expected a pointer to 129 bytes of memory, but got ALLOC which is only 128 bytes from the end of the allocation + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: attempting to access 129 bytes, but got ALLOC which is only 128 bytes from the end of the allocation | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail-dep/libc/env-set_var-data-race.rs b/src/tools/miri/tests/fail-dep/libc/env-set_var-data-race.rs index 3a832bb0ce0c1..5c80f6425eaae 100644 --- a/src/tools/miri/tests/fail-dep/libc/env-set_var-data-race.rs +++ b/src/tools/miri/tests/fail-dep/libc/env-set_var-data-race.rs @@ -1,4 +1,4 @@ -//@compile-flags: -Zmiri-disable-isolation -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-disable-isolation -Zmiri-deterministic-concurrency //@ignore-target: windows # No libc env support on Windows use std::{env, thread}; diff --git a/src/tools/miri/tests/fail-dep/libc/eventfd_block_read_twice.rs b/src/tools/miri/tests/fail-dep/libc/eventfd_block_read_twice.rs index 0d893663fd6d3..9dc554030c0e1 100644 --- a/src/tools/miri/tests/fail-dep/libc/eventfd_block_read_twice.rs +++ b/src/tools/miri/tests/fail-dep/libc/eventfd_block_read_twice.rs @@ -1,7 +1,7 @@ //@only-target: linux android illumos //~^ERROR: deadlocked //~^^ERROR: deadlocked -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock use std::thread; diff --git a/src/tools/miri/tests/fail-dep/libc/eventfd_block_write_twice.rs b/src/tools/miri/tests/fail-dep/libc/eventfd_block_write_twice.rs index 9fed47c17d405..5297a3297750a 100644 --- a/src/tools/miri/tests/fail-dep/libc/eventfd_block_write_twice.rs +++ b/src/tools/miri/tests/fail-dep/libc/eventfd_block_write_twice.rs @@ -1,7 +1,7 @@ //@only-target: linux android illumos //~^ERROR: deadlocked //~^^ERROR: deadlocked -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock use std::thread; diff --git a/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs b/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs index 45f6bf6da0968..314ce90cfb5d4 100644 --- a/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs +++ b/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs @@ -4,7 +4,7 @@ //! to be considered synchronized. //@only-target: linux android illumos // ensure deterministic schedule -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::convert::TryInto; use std::thread; diff --git a/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs b/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs index 059b24cb8c0be..f6f2e2b93121b 100644 --- a/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs +++ b/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs @@ -1,4 +1,4 @@ -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //~^ERROR: deadlocked //~^^ERROR: deadlocked //@only-target: linux android illumos diff --git a/src/tools/miri/tests/fail-dep/libc/memchr_null.stderr b/src/tools/miri/tests/fail-dep/libc/memchr_null.stderr index 6d3ff176c3559..5690277a04606 100644 --- a/src/tools/miri/tests/fail-dep/libc/memchr_null.stderr +++ b/src/tools/miri/tests/fail-dep/libc/memchr_null.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer +error: Undefined Behavior: pointer not dereferenceable: pointer must point to some allocation, but got null pointer --> tests/fail-dep/libc/memchr_null.rs:LL:CC | LL | libc::memchr(ptr::null(), 0, 0); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must point to some allocation, but got null pointer | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail-dep/libc/memcmp_null.stderr b/src/tools/miri/tests/fail-dep/libc/memcmp_null.stderr index a4ca205c37704..bd3a0719fa33a 100644 --- a/src/tools/miri/tests/fail-dep/libc/memcmp_null.stderr +++ b/src/tools/miri/tests/fail-dep/libc/memcmp_null.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer +error: Undefined Behavior: pointer not dereferenceable: pointer must point to some allocation, but got null pointer --> tests/fail-dep/libc/memcmp_null.rs:LL:CC | LL | libc::memcmp(ptr::null(), ptr::null(), 0); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must point to some allocation, but got null pointer | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail-dep/libc/memcmp_zero.stderr b/src/tools/miri/tests/fail-dep/libc/memcmp_zero.stderr index d7b046c18235f..2044c15476176 100644 --- a/src/tools/miri/tests/fail-dep/libc/memcmp_zero.stderr +++ b/src/tools/miri/tests/fail-dep/libc/memcmp_zero.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to some allocation, but got 0x2a[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: pointer not dereferenceable: pointer must point to some allocation, but got 0x2a[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail-dep/libc/memcmp_zero.rs:LL:CC | LL | libc::memcmp(ptr.cast(), ptr.cast(), 0); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got 0x2a[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must point to some allocation, but got 0x2a[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail-dep/libc/memcpy_zero.stderr b/src/tools/miri/tests/fail-dep/libc/memcpy_zero.stderr index 336113e344088..789e9daf43bc5 100644 --- a/src/tools/miri/tests/fail-dep/libc/memcpy_zero.stderr +++ b/src/tools/miri/tests/fail-dep/libc/memcpy_zero.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to some allocation, but got 0x17[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: pointer not dereferenceable: pointer must point to some allocation, but got 0x17[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail-dep/libc/memcpy_zero.rs:LL:CC | LL | libc::memcpy(to.cast(), from.cast(), 0); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got 0x17[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must point to some allocation, but got 0x17[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail-dep/libc/memrchr_null.stderr b/src/tools/miri/tests/fail-dep/libc/memrchr_null.stderr index ce759f3e17a56..27e7a9855d101 100644 --- a/src/tools/miri/tests/fail-dep/libc/memrchr_null.stderr +++ b/src/tools/miri/tests/fail-dep/libc/memrchr_null.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer +error: Undefined Behavior: pointer not dereferenceable: pointer must point to some allocation, but got null pointer --> tests/fail-dep/libc/memrchr_null.rs:LL:CC | LL | libc::memrchr(ptr::null(), 0, 0); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must point to some allocation, but got null pointer | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair-close-while-blocked.rs b/src/tools/miri/tests/fail-dep/libc/socketpair-close-while-blocked.rs index 8413e118819cd..0699dec6556d4 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair-close-while-blocked.rs +++ b/src/tools/miri/tests/fail-dep/libc/socketpair-close-while-blocked.rs @@ -2,7 +2,7 @@ //! faulty logic around `release_clock` that led to this code not reporting a data race. //~^^ERROR: deadlock //@ignore-target: windows # no libc socketpair on Windows -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-address-reuse-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock use std::thread; diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair-data-race.rs b/src/tools/miri/tests/fail-dep/libc/socketpair-data-race.rs index 55491da9f60d7..37fac436ff3e1 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair-data-race.rs +++ b/src/tools/miri/tests/fail-dep/libc/socketpair-data-race.rs @@ -1,7 +1,7 @@ //! This is a regression test for : we had some //! faulty logic around `release_clock` that led to this code not reporting a data race. //@ignore-target: windows # no libc socketpair on Windows -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-address-reuse-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::thread; fn main() { diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs b/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs index d3e4c43f2b75c..b383985950029 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs +++ b/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs @@ -2,7 +2,7 @@ //~^ERROR: deadlocked //~^^ERROR: deadlocked // test_race depends on a deterministic schedule. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock use std::thread; diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs b/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs index 4f951acb2c31a..7d84d87ebbb14 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs +++ b/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs @@ -2,7 +2,7 @@ //~^ERROR: deadlocked //~^^ERROR: deadlocked // test_race depends on a deterministic schedule. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock use std::thread; diff --git a/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.rs b/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.rs index 49de3dd0b1052..dd7dae9cecf91 100644 --- a/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.rs +++ b/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.rs @@ -1,6 +1,6 @@ //@revisions: stack tree //@[tree]compile-flags: -Zmiri-tree-borrows -//@error-in-other-file: expected a pointer to 4 bytes of memory +//@error-in-other-file: pointer not dereferenceable fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.stack.stderr b/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.stack.stderr index cd27bb818e766..0e6d838dfff69 100644 --- a/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.stack.stderr +++ b/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.stack.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got ALLOC which is only 2 bytes from the end of the allocation +error: Undefined Behavior: pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got ALLOC which is only 2 bytes from the end of the allocation --> RUSTLIB/alloc/src/boxed.rs:LL:CC | LL | Box(unsafe { Unique::new_unchecked(raw) }, alloc) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got ALLOC which is only 2 bytes from the end of the allocation + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got ALLOC which is only 2 bytes from the end of the allocation | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.tree.stderr b/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.tree.stderr index cd27bb818e766..0e6d838dfff69 100644 --- a/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-1.tree.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got ALLOC which is only 2 bytes from the end of the allocation +error: Undefined Behavior: pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got ALLOC which is only 2 bytes from the end of the allocation --> RUSTLIB/alloc/src/boxed.rs:LL:CC | LL | Box(unsafe { Unique::new_unchecked(raw) }, alloc) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got ALLOC which is only 2 bytes from the end of the allocation + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got ALLOC which is only 2 bytes from the end of the allocation | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-2.stack.stderr b/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-2.stack.stderr index 04e5765371e7f..861173f549686 100644 --- a/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-2.stack.stderr +++ b/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-2.stack.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got 0x4[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got 0x4[noalloc] which is a dangling pointer (it has no provenance) --> RUSTLIB/alloc/src/boxed.rs:LL:CC | LL | Box(unsafe { Unique::new_unchecked(raw) }, alloc) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got 0x4[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got 0x4[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-2.tree.stderr b/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-2.tree.stderr index 04e5765371e7f..861173f549686 100644 --- a/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-2.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/issue-miri-1050-2.tree.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got 0x4[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got 0x4[noalloc] which is a dangling pointer (it has no provenance) --> RUSTLIB/alloc/src/boxed.rs:LL:CC | LL | Box(unsafe { Unique::new_unchecked(raw) }, alloc) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got 0x4[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got 0x4[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/both_borrows/retag_data_race_write.rs b/src/tools/miri/tests/fail/both_borrows/retag_data_race_write.rs index 0061679eaa4e7..31f1a22f9f6d5 100644 --- a/src/tools/miri/tests/fail/both_borrows/retag_data_race_write.rs +++ b/src/tools/miri/tests/fail/both_borrows/retag_data_race_write.rs @@ -1,8 +1,6 @@ //! Make sure that a retag acts like a write for the data race model. //@revisions: stack tree -//@compile-flags: -Zmiri-preemption-rate=0 -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //@[tree]compile-flags: -Zmiri-tree-borrows #[derive(Copy, Clone)] struct SendPtr(*mut u8); diff --git a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_offset.rs b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_offset.rs index 65eca07a0708c..54b3280e71acf 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_offset.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_offset.rs @@ -6,6 +6,6 @@ fn main() { let b = Box::new(42); &*b as *const i32 }; - let x = unsafe { p.offset(42) }; //~ ERROR: /out-of-bounds pointer arithmetic: .* has been freed/ + let x = unsafe { p.offset(42) }; //~ ERROR: /in-bounds pointer arithmetic failed: .* has been freed/ panic!("this should never print: {:?}", x); } diff --git a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_offset.stderr b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_offset.stderr index 076d688046186..fd1a5e7faafe7 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_offset.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_offset.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: ALLOC has been freed, so this pointer is dangling +error: Undefined Behavior: in-bounds pointer arithmetic failed: ALLOC has been freed, so this pointer is dangling --> tests/fail/dangling_pointers/dangling_pointer_offset.rs:LL:CC | LL | let x = unsafe { p.offset(42) }; - | ^^^^^^^^^^^^ out-of-bounds pointer arithmetic: ALLOC has been freed, so this pointer is dangling + | ^^^^^^^^^^^^ in-bounds pointer arithmetic failed: ALLOC has been freed, so this pointer is dangling | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let.rs b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let.rs index 22a5ce8ea7419..02ea09efab636 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let.rs @@ -7,6 +7,6 @@ fn main() { &*b as *const i32 as *const (u8, u8, u8, u8) }; unsafe { - let _ = (*p).1; //~ ERROR: out-of-bounds pointer arithmetic + let _ = (*p).1; //~ ERROR: in-bounds pointer arithmetic failed } } diff --git a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let.stderr b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let.stderr index ffb525e398142..ffb8bc9507b56 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: ALLOC has been freed, so this pointer is dangling +error: Undefined Behavior: in-bounds pointer arithmetic failed: ALLOC has been freed, so this pointer is dangling --> tests/fail/dangling_pointers/dangling_pointer_project_underscore_let.rs:LL:CC | LL | let _ = (*p).1; - | ^^^^^^ out-of-bounds pointer arithmetic: ALLOC has been freed, so this pointer is dangling + | ^^^^^^ in-bounds pointer arithmetic failed: ALLOC has been freed, so this pointer is dangling | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let_type_annotation.rs b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let_type_annotation.rs index fc10a826c1e14..7ab295cb6c6c1 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let_type_annotation.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let_type_annotation.rs @@ -7,6 +7,6 @@ fn main() { &*b as *const i32 as *const (u8, u8, u8, u8) }; unsafe { - let _: u8 = (*p).1; //~ ERROR: out-of-bounds pointer arithmetic + let _: u8 = (*p).1; //~ ERROR: in-bounds pointer arithmetic failed } } diff --git a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let_type_annotation.stderr b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let_type_annotation.stderr index 14dfa43b2d687..cf3e1db13d3d7 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let_type_annotation.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_let_type_annotation.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: ALLOC has been freed, so this pointer is dangling +error: Undefined Behavior: in-bounds pointer arithmetic failed: ALLOC has been freed, so this pointer is dangling --> tests/fail/dangling_pointers/dangling_pointer_project_underscore_let_type_annotation.rs:LL:CC | LL | let _: u8 = (*p).1; - | ^^^^^^ out-of-bounds pointer arithmetic: ALLOC has been freed, so this pointer is dangling + | ^^^^^^ in-bounds pointer arithmetic failed: ALLOC has been freed, so this pointer is dangling | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_match.rs b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_match.rs index 8541da848578b..2855dd13fdc4e 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_match.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_match.rs @@ -8,7 +8,7 @@ fn main() { }; unsafe { match (*p).1 { - //~^ ERROR: out-of-bounds pointer arithmetic + //~^ ERROR: in-bounds pointer arithmetic failed _ => {} } } diff --git a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_match.stderr b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_match.stderr index ff39e1475738f..e2d04433b6399 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_match.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_project_underscore_match.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: ALLOC has been freed, so this pointer is dangling +error: Undefined Behavior: in-bounds pointer arithmetic failed: ALLOC has been freed, so this pointer is dangling --> tests/fail/dangling_pointers/dangling_pointer_project_underscore_match.rs:LL:CC | LL | match (*p).1 { - | ^^^^^^ out-of-bounds pointer arithmetic: ALLOC has been freed, so this pointer is dangling + | ^^^^^^ in-bounds pointer arithmetic failed: ALLOC has been freed, so this pointer is dangling | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_to_raw_pointer.stderr b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_to_raw_pointer.stderr index 99194d6e07256..5a2b85696abc8 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_to_raw_pointer.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/dangling_pointer_to_raw_pointer.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got 0x10[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got 0x10[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/dangling_pointers/dangling_pointer_to_raw_pointer.rs:LL:CC | LL | unsafe { &(*x).0 as *const i32 } - | ^^^^^^^ out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got 0x10[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^ pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got 0x10[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/deref-invalid-ptr.stderr b/src/tools/miri/tests/fail/dangling_pointers/deref-invalid-ptr.stderr index 09a201983b1f3..ad4280c2d7447 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/deref-invalid-ptr.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/deref-invalid-ptr.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got 0x10[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got 0x10[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/dangling_pointers/deref-invalid-ptr.rs:LL:CC | LL | let _y = unsafe { &*x as *const u32 }; - | ^^^ out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got 0x10[noalloc] which is a dangling pointer (it has no provenance) + | ^^^ pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got 0x10[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/null_pointer_deref.stderr b/src/tools/miri/tests/fail/dangling_pointers/null_pointer_deref.stderr index d87a8bc59e951..3135db9dc6d5b 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/null_pointer_deref.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/null_pointer_deref.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 4 bytes of memory, but got a null pointer +error: Undefined Behavior: memory access failed: attempting to access 4 bytes, but got null pointer --> tests/fail/dangling_pointers/null_pointer_deref.rs:LL:CC | LL | let x: i32 = unsafe { *std::ptr::null() }; - | ^^^^^^^^^^^^^^^^^ memory access failed: expected a pointer to 4 bytes of memory, but got a null pointer + | ^^^^^^^^^^^^^^^^^ memory access failed: attempting to access 4 bytes, but got null pointer | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/null_pointer_write.stderr b/src/tools/miri/tests/fail/dangling_pointers/null_pointer_write.stderr index 39d861a63882b..012b38ee5a6cd 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/null_pointer_write.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/null_pointer_write.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 4 bytes of memory, but got a null pointer +error: Undefined Behavior: memory access failed: attempting to access 4 bytes, but got null pointer --> tests/fail/dangling_pointers/null_pointer_write.rs:LL:CC | LL | unsafe { *std::ptr::null_mut() = 0i32 }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: expected a pointer to 4 bytes of memory, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: attempting to access 4 bytes, but got null pointer | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_project.rs b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_project.rs index b596ba428ae59..7a2ad483d015f 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_project.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_project.rs @@ -7,6 +7,6 @@ fn main() { let ptr = addr_of!(v).cast::<(u32, u32, u32)>(); unsafe { let _field = addr_of!((*ptr).1); // still just in-bounds - let _field = addr_of!((*ptr).2); //~ ERROR: out-of-bounds pointer arithmetic + let _field = addr_of!((*ptr).2); //~ ERROR: in-bounds pointer arithmetic failed } } diff --git a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_project.stderr b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_project.stderr index 27a437c7483d0..c11ccdb45a7f6 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_project.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_project.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: expected a pointer to 8 bytes of memory, but got ALLOC which is only 4 bytes from the end of the allocation +error: Undefined Behavior: in-bounds pointer arithmetic failed: attempting to offset pointer by 8 bytes, but got ALLOC which is only 4 bytes from the end of the allocation --> tests/fail/dangling_pointers/out_of_bounds_project.rs:LL:CC | LL | let _field = addr_of!((*ptr).2); - | ^^^^^^^^^^^^^^^^^^ out-of-bounds pointer arithmetic: expected a pointer to 8 bytes of memory, but got ALLOC which is only 4 bytes from the end of the allocation + | ^^^^^^^^^^^^^^^^^^ in-bounds pointer arithmetic failed: attempting to offset pointer by 8 bytes, but got ALLOC which is only 4 bytes from the end of the allocation | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read.rs b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read.rs index 595a229baa5aa..78a7b7e460bee 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read.rs @@ -1,6 +1,6 @@ fn main() { let v: Vec = vec![1, 2]; // This read is also misaligned. We make sure that the OOB message has priority. - let x = unsafe { *v.as_ptr().wrapping_byte_add(5) }; //~ ERROR: expected a pointer to 2 bytes of memory + let x = unsafe { *v.as_ptr().wrapping_byte_add(5) }; //~ ERROR: attempting to access 2 bytes panic!("this should never print: {}", x); } diff --git a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read.stderr b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read.stderr index 813bcef54f12c..1de4b806da205 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 2 bytes of memory, but got ALLOC+0x5 which is at or beyond the end of the allocation of size 4 bytes +error: Undefined Behavior: memory access failed: attempting to access 2 bytes, but got ALLOC+0x5 which is at or beyond the end of the allocation of size 4 bytes --> tests/fail/dangling_pointers/out_of_bounds_read.rs:LL:CC | LL | let x = unsafe { *v.as_ptr().wrapping_byte_add(5) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: expected a pointer to 2 bytes of memory, but got ALLOC+0x5 which is at or beyond the end of the allocation of size 4 bytes + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: attempting to access 2 bytes, but got ALLOC+0x5 which is at or beyond the end of the allocation of size 4 bytes | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_write.rs b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_write.rs index 054e1c66cc18d..f83601b44f8d3 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_write.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_write.rs @@ -1,5 +1,5 @@ fn main() { let mut v: Vec = vec![1, 2]; // This read is also misaligned. We make sure that the OOB message has priority. - unsafe { *v.as_mut_ptr().wrapping_byte_add(5) = 0 }; //~ ERROR: expected a pointer to 2 bytes of memory + unsafe { *v.as_mut_ptr().wrapping_byte_add(5) = 0 }; //~ ERROR: attempting to access 2 bytes } diff --git a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_write.stderr b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_write.stderr index 1056a739a436f..db16d70704e8a 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_write.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_write.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 2 bytes of memory, but got ALLOC+0x5 which is at or beyond the end of the allocation of size 4 bytes +error: Undefined Behavior: memory access failed: attempting to access 2 bytes, but got ALLOC+0x5 which is at or beyond the end of the allocation of size 4 bytes --> tests/fail/dangling_pointers/out_of_bounds_write.rs:LL:CC | LL | unsafe { *v.as_mut_ptr().wrapping_byte_add(5) = 0 }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: expected a pointer to 2 bytes of memory, but got ALLOC+0x5 which is at or beyond the end of the allocation of size 4 bytes + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: attempting to access 2 bytes, but got ALLOC+0x5 which is at or beyond the end of the allocation of size 4 bytes | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/storage_dead_dangling.stderr b/src/tools/miri/tests/fail/dangling_pointers/storage_dead_dangling.stderr index 9061121494d0e..e97427ab7ebc2 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/storage_dead_dangling.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/storage_dead_dangling.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/dangling_pointers/storage_dead_dangling.rs:LL:CC | LL | let _ref = unsafe { &mut *(LEAK as *mut i32) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must be dereferenceable for 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/dangling_pointers/wild_pointer_deref.stderr b/src/tools/miri/tests/fail/dangling_pointers/wild_pointer_deref.stderr index 3e7aac4724dcc..79e27fa346174 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/wild_pointer_deref.stderr +++ b/src/tools/miri/tests/fail/dangling_pointers/wild_pointer_deref.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 4 bytes of memory, but got 0x2c[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: memory access failed: attempting to access 4 bytes, but got 0x2c[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/dangling_pointers/wild_pointer_deref.rs:LL:CC | LL | let x = unsafe { *p }; - | ^^ memory access failed: expected a pointer to 4 bytes of memory, but got 0x2c[noalloc] which is a dangling pointer (it has no provenance) + | ^^ memory access failed: attempting to access 4 bytes, but got 0x2c[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/data_race/alloc_read_race.rs b/src/tools/miri/tests/fail/data_race/alloc_read_race.rs index 312b7ba05d31c..7c5116989943e 100644 --- a/src/tools/miri/tests/fail/data_race/alloc_read_race.rs +++ b/src/tools/miri/tests/fail/data_race/alloc_read_race.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::mem::MaybeUninit; use std::ptr::null_mut; diff --git a/src/tools/miri/tests/fail/data_race/alloc_write_race.rs b/src/tools/miri/tests/fail/data_race/alloc_write_race.rs index f1f308b37e7bf..ba8a888de9ea0 100644 --- a/src/tools/miri/tests/fail/data_race/alloc_write_race.rs +++ b/src/tools/miri/tests/fail/data_race/alloc_write_race.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::ptr::null_mut; use std::sync::atomic::{AtomicPtr, Ordering}; diff --git a/src/tools/miri/tests/fail/data_race/atomic_read_na_write_race1.rs b/src/tools/miri/tests/fail/data_race/atomic_read_na_write_race1.rs index 4003892f0a609..8cce54603ce57 100644 --- a/src/tools/miri/tests/fail/data_race/atomic_read_na_write_race1.rs +++ b/src/tools/miri/tests/fail/data_race/atomic_read_na_write_race1.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/atomic_read_na_write_race2.rs b/src/tools/miri/tests/fail/data_race/atomic_read_na_write_race2.rs index c67ce65eb3404..b6c0ef37cb920 100644 --- a/src/tools/miri/tests/fail/data_race/atomic_read_na_write_race2.rs +++ b/src/tools/miri/tests/fail/data_race/atomic_read_na_write_race2.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/atomic_write_na_read_race1.rs b/src/tools/miri/tests/fail/data_race/atomic_write_na_read_race1.rs index 5e328740e8517..03ae6895c5745 100644 --- a/src/tools/miri/tests/fail/data_race/atomic_write_na_read_race1.rs +++ b/src/tools/miri/tests/fail/data_race/atomic_write_na_read_race1.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/atomic_write_na_read_race2.rs b/src/tools/miri/tests/fail/data_race/atomic_write_na_read_race2.rs index e0876a93fdd8c..4a5edf5cc14dd 100644 --- a/src/tools/miri/tests/fail/data_race/atomic_write_na_read_race2.rs +++ b/src/tools/miri/tests/fail/data_race/atomic_write_na_read_race2.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/atomic_write_na_write_race1.rs b/src/tools/miri/tests/fail/data_race/atomic_write_na_write_race1.rs index 1010216a4976b..e8d930a51dee3 100644 --- a/src/tools/miri/tests/fail/data_race/atomic_write_na_write_race1.rs +++ b/src/tools/miri/tests/fail/data_race/atomic_write_na_write_race1.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/atomic_write_na_write_race2.rs b/src/tools/miri/tests/fail/data_race/atomic_write_na_write_race2.rs index fdc0f9e20f079..4c67d2d765415 100644 --- a/src/tools/miri/tests/fail/data_race/atomic_write_na_write_race2.rs +++ b/src/tools/miri/tests/fail/data_race/atomic_write_na_write_race2.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/dangling_thread_async_race.rs b/src/tools/miri/tests/fail/data_race/dangling_thread_async_race.rs index dffafe3cfaa97..fbb2c01e5a9aa 100644 --- a/src/tools/miri/tests/fail/data_race/dangling_thread_async_race.rs +++ b/src/tools/miri/tests/fail/data_race/dangling_thread_async_race.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::mem; use std::thread::{sleep, spawn}; diff --git a/src/tools/miri/tests/fail/data_race/dangling_thread_race.rs b/src/tools/miri/tests/fail/data_race/dangling_thread_race.rs index 8dc35c7ea720e..7431bc589ff95 100644 --- a/src/tools/miri/tests/fail/data_race/dangling_thread_race.rs +++ b/src/tools/miri/tests/fail/data_race/dangling_thread_race.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::mem; use std::thread::{sleep, spawn}; diff --git a/src/tools/miri/tests/fail/data_race/dealloc_read_race1.rs b/src/tools/miri/tests/fail/data_race/dealloc_read_race1.rs index d0a2848205410..999cc2392f5af 100644 --- a/src/tools/miri/tests/fail/data_race/dealloc_read_race1.rs +++ b/src/tools/miri/tests/fail/data_race/dealloc_read_race1.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows #![feature(rustc_attrs)] diff --git a/src/tools/miri/tests/fail/data_race/dealloc_read_race2.rs b/src/tools/miri/tests/fail/data_race/dealloc_read_race2.rs index f56c44cabc234..bd3b037e58381 100644 --- a/src/tools/miri/tests/fail/data_race/dealloc_read_race2.rs +++ b/src/tools/miri/tests/fail/data_race/dealloc_read_race2.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows #![feature(rustc_attrs)] diff --git a/src/tools/miri/tests/fail/data_race/dealloc_read_race_stack.rs b/src/tools/miri/tests/fail/data_race/dealloc_read_race_stack.rs index c67e03d362b05..e3d06660aab34 100644 --- a/src/tools/miri/tests/fail/data_race/dealloc_read_race_stack.rs +++ b/src/tools/miri/tests/fail/data_race/dealloc_read_race_stack.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::ptr::null_mut; use std::sync::atomic::{AtomicPtr, Ordering}; diff --git a/src/tools/miri/tests/fail/data_race/dealloc_write_race1.rs b/src/tools/miri/tests/fail/data_race/dealloc_write_race1.rs index a16ea25e11ce8..90e87f8c49564 100644 --- a/src/tools/miri/tests/fail/data_race/dealloc_write_race1.rs +++ b/src/tools/miri/tests/fail/data_race/dealloc_write_race1.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows #![feature(rustc_attrs)] diff --git a/src/tools/miri/tests/fail/data_race/dealloc_write_race2.rs b/src/tools/miri/tests/fail/data_race/dealloc_write_race2.rs index f3855e33c98d9..d9b1af80af493 100644 --- a/src/tools/miri/tests/fail/data_race/dealloc_write_race2.rs +++ b/src/tools/miri/tests/fail/data_race/dealloc_write_race2.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows #![feature(rustc_attrs)] diff --git a/src/tools/miri/tests/fail/data_race/dealloc_write_race_stack.rs b/src/tools/miri/tests/fail/data_race/dealloc_write_race_stack.rs index 8e63bc1dc7b41..c1ab1942c6884 100644 --- a/src/tools/miri/tests/fail/data_race/dealloc_write_race_stack.rs +++ b/src/tools/miri/tests/fail/data_race/dealloc_write_race_stack.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::ptr::null_mut; use std::sync::atomic::{AtomicPtr, Ordering}; diff --git a/src/tools/miri/tests/fail/data_race/enable_after_join_to_main.rs b/src/tools/miri/tests/fail/data_race/enable_after_join_to_main.rs index 53050608d2715..67af6862737dc 100644 --- a/src/tools/miri/tests/fail/data_race/enable_after_join_to_main.rs +++ b/src/tools/miri/tests/fail/data_race/enable_after_join_to_main.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/fence_after_load.rs b/src/tools/miri/tests/fail/data_race/fence_after_load.rs index 5dfb260c20bdc..b1eb1dda6d886 100644 --- a/src/tools/miri/tests/fail/data_race/fence_after_load.rs +++ b/src/tools/miri/tests/fail/data_race/fence_after_load.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::Arc; use std::sync::atomic::{AtomicUsize, Ordering, fence}; diff --git a/src/tools/miri/tests/fail/data_race/local_variable_alloc_race.rs b/src/tools/miri/tests/fail/data_race/local_variable_alloc_race.rs index 751a308a39998..647f209e8bf88 100644 --- a/src/tools/miri/tests/fail/data_race/local_variable_alloc_race.rs +++ b/src/tools/miri/tests/fail/data_race/local_variable_alloc_race.rs @@ -1,4 +1,4 @@ -//@compile-flags: -Zmiri-preemption-rate=0.0 -Zmiri-disable-weak-memory-emulation +//@compile-flags:-Zmiri-deterministic-concurrency #![feature(core_intrinsics)] #![feature(custom_mir)] diff --git a/src/tools/miri/tests/fail/data_race/local_variable_read_race.rs b/src/tools/miri/tests/fail/data_race/local_variable_read_race.rs index 16a23f595ee47..f83d6c89fe5f9 100644 --- a/src/tools/miri/tests/fail/data_race/local_variable_read_race.rs +++ b/src/tools/miri/tests/fail/data_race/local_variable_read_race.rs @@ -1,4 +1,4 @@ -//@compile-flags: -Zmiri-preemption-rate=0.0 -Zmiri-disable-weak-memory-emulation +//@compile-flags:-Zmiri-deterministic-concurrency use std::sync::atomic::Ordering::*; use std::sync::atomic::*; diff --git a/src/tools/miri/tests/fail/data_race/local_variable_write_race.rs b/src/tools/miri/tests/fail/data_race/local_variable_write_race.rs index 7e00573146c25..ee1bef7ba5c08 100644 --- a/src/tools/miri/tests/fail/data_race/local_variable_write_race.rs +++ b/src/tools/miri/tests/fail/data_race/local_variable_write_race.rs @@ -1,4 +1,4 @@ -//@compile-flags: -Zmiri-preemption-rate=0.0 -Zmiri-disable-weak-memory-emulation +//@compile-flags:-Zmiri-deterministic-concurrency use std::sync::atomic::Ordering::*; use std::sync::atomic::*; diff --git a/src/tools/miri/tests/fail/data_race/mixed_size_read_read_write.rs b/src/tools/miri/tests/fail/data_race/mixed_size_read_read_write.rs index e76654806bb1e..720602d011e36 100644 --- a/src/tools/miri/tests/fail/data_race/mixed_size_read_read_write.rs +++ b/src/tools/miri/tests/fail/data_race/mixed_size_read_read_write.rs @@ -1,6 +1,4 @@ -//@compile-flags: -Zmiri-preemption-rate=0.0 -Zmiri-disable-weak-memory-emulation -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags:-Zmiri-deterministic-concurrency // Two variants: the atomic store matches the size of the first or second atomic load. //@revisions: match_first_load match_second_load diff --git a/src/tools/miri/tests/fail/data_race/mixed_size_read_write.rs b/src/tools/miri/tests/fail/data_race/mixed_size_read_write.rs index 53016bab78045..78bba173ed7d0 100644 --- a/src/tools/miri/tests/fail/data_race/mixed_size_read_write.rs +++ b/src/tools/miri/tests/fail/data_race/mixed_size_read_write.rs @@ -1,6 +1,4 @@ -//@compile-flags: -Zmiri-preemption-rate=0.0 -Zmiri-disable-weak-memory-emulation -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags:-Zmiri-deterministic-concurrency // Two revisions, depending on which access goes first. //@revisions: read_write write_read diff --git a/src/tools/miri/tests/fail/data_race/mixed_size_write_write.rs b/src/tools/miri/tests/fail/data_race/mixed_size_write_write.rs index 545e354a0372c..808280a4b3113 100644 --- a/src/tools/miri/tests/fail/data_race/mixed_size_write_write.rs +++ b/src/tools/miri/tests/fail/data_race/mixed_size_write_write.rs @@ -1,6 +1,4 @@ -//@compile-flags: -Zmiri-preemption-rate=0.0 -Zmiri-disable-weak-memory-emulation -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags:-Zmiri-deterministic-concurrency //@revisions: fst snd use std::sync::atomic::{AtomicU8, AtomicU16, Ordering}; diff --git a/src/tools/miri/tests/fail/data_race/read_write_race.rs b/src/tools/miri/tests/fail/data_race/read_write_race.rs index adf19dda9d3db..2aadef36c5b95 100644 --- a/src/tools/miri/tests/fail/data_race/read_write_race.rs +++ b/src/tools/miri/tests/fail/data_race/read_write_race.rs @@ -1,7 +1,5 @@ // We want to control preemption here. Stacked borrows interferes by having its own accesses. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/read_write_race_stack.rs b/src/tools/miri/tests/fail/data_race/read_write_race_stack.rs index f411767f7b57e..cca39bb002c17 100644 --- a/src/tools/miri/tests/fail/data_race/read_write_race_stack.rs +++ b/src/tools/miri/tests/fail/data_race/read_write_race_stack.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::ptr::null_mut; use std::sync::atomic::{AtomicPtr, Ordering}; diff --git a/src/tools/miri/tests/fail/data_race/relax_acquire_race.rs b/src/tools/miri/tests/fail/data_race/relax_acquire_race.rs index c4f943808229c..262c039e4ae17 100644 --- a/src/tools/miri/tests/fail/data_race/relax_acquire_race.rs +++ b/src/tools/miri/tests/fail/data_race/relax_acquire_race.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/release_seq_race.rs b/src/tools/miri/tests/fail/data_race/release_seq_race.rs index f03ab3efa0624..8aeb6ee6ef1d1 100644 --- a/src/tools/miri/tests/fail/data_race/release_seq_race.rs +++ b/src/tools/miri/tests/fail/data_race/release_seq_race.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread::{sleep, spawn}; diff --git a/src/tools/miri/tests/fail/data_race/release_seq_race_same_thread.rs b/src/tools/miri/tests/fail/data_race/release_seq_race_same_thread.rs index 88ae01b3ca1c7..f465160718f45 100644 --- a/src/tools/miri/tests/fail/data_race/release_seq_race_same_thread.rs +++ b/src/tools/miri/tests/fail/data_race/release_seq_race_same_thread.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/rmw_race.rs b/src/tools/miri/tests/fail/data_race/rmw_race.rs index d738caa105879..39588c15ec7ea 100644 --- a/src/tools/miri/tests/fail/data_race/rmw_race.rs +++ b/src/tools/miri/tests/fail/data_race/rmw_race.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::sync::atomic::{AtomicUsize, Ordering}; use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/stack_pop_race.rs b/src/tools/miri/tests/fail/data_race/stack_pop_race.rs index 762a8e51f692a..5138bcbf8f74d 100644 --- a/src/tools/miri/tests/fail/data_race/stack_pop_race.rs +++ b/src/tools/miri/tests/fail/data_race/stack_pop_race.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::thread; diff --git a/src/tools/miri/tests/fail/data_race/write_write_race.rs b/src/tools/miri/tests/fail/data_race/write_write_race.rs index 993d8d25b4c17..b1a6b08b4c886 100644 --- a/src/tools/miri/tests/fail/data_race/write_write_race.rs +++ b/src/tools/miri/tests/fail/data_race/write_write_race.rs @@ -1,7 +1,5 @@ -// We want to control preemption here. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::thread::spawn; diff --git a/src/tools/miri/tests/fail/data_race/write_write_race_stack.rs b/src/tools/miri/tests/fail/data_race/write_write_race_stack.rs index 8070a7f4fc2a0..cd21b0a8fa6c1 100644 --- a/src/tools/miri/tests/fail/data_race/write_write_race_stack.rs +++ b/src/tools/miri/tests/fail/data_race/write_write_race_stack.rs @@ -1,6 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows -// Avoid accidental synchronization via address reuse inside `thread::spawn`. -//@compile-flags: -Zmiri-address-reuse-cross-thread-rate=0 +// We want to control preemption here. Stacked borrows interferes by having its own accesses. +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-stacked-borrows use std::ptr::null_mut; use std::sync::atomic::{AtomicPtr, Ordering}; diff --git a/src/tools/miri/tests/fail/function_pointers/cast_int_to_fn_ptr.stderr b/src/tools/miri/tests/fail/function_pointers/cast_int_to_fn_ptr.stderr index f2d9933188d7b..37d3beefcd7bc 100644 --- a/src/tools/miri/tests/fail/function_pointers/cast_int_to_fn_ptr.stderr +++ b/src/tools/miri/tests/fail/function_pointers/cast_int_to_fn_ptr.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to some allocation, but got 0x2a[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: pointer not dereferenceable: pointer must point to some allocation, but got 0x2a[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/function_pointers/cast_int_to_fn_ptr.rs:LL:CC | LL | g(42) - | ^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got 0x2a[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^ pointer not dereferenceable: pointer must point to some allocation, but got 0x2a[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/intrinsics/ctlz_nonzero.rs b/src/tools/miri/tests/fail/intrinsics/ctlz_nonzero.rs index f73c1b6acb70f..3da54b9188262 100644 --- a/src/tools/miri/tests/fail/intrinsics/ctlz_nonzero.rs +++ b/src/tools/miri/tests/fail/intrinsics/ctlz_nonzero.rs @@ -1,13 +1,8 @@ -#![feature(intrinsics)] - -mod rusti { - #[rustc_intrinsic] - pub unsafe fn ctlz_nonzero(x: T) -> u32; -} +#![feature(core_intrinsics)] pub fn main() { unsafe { - use crate::rusti::*; + use std::intrinsics::*; ctlz_nonzero(0u8); //~ ERROR: `ctlz_nonzero` called on 0 } diff --git a/src/tools/miri/tests/fail/intrinsics/cttz_nonzero.rs b/src/tools/miri/tests/fail/intrinsics/cttz_nonzero.rs index a41cb8b15536f..2b68f6713d806 100644 --- a/src/tools/miri/tests/fail/intrinsics/cttz_nonzero.rs +++ b/src/tools/miri/tests/fail/intrinsics/cttz_nonzero.rs @@ -1,13 +1,8 @@ -#![feature(intrinsics)] - -mod rusti { - #[rustc_intrinsic] - pub unsafe fn cttz_nonzero(x: T) -> u32; -} +#![feature(core_intrinsics)] pub fn main() { unsafe { - use crate::rusti::*; + use std::intrinsics::*; cttz_nonzero(0u8); //~ ERROR: `cttz_nonzero` called on 0 } diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_inf1.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_inf1.rs index 7ee0117ffb33f..831a65966ce20 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_inf1.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_inf1.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_infneg1.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_infneg1.rs index 22bf881cef0ac..a7032e97430e7 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_infneg1.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_infneg1.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_nan.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_nan.rs index 571121f4019fa..e8c987619568d 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_nan.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_nan.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_nanneg.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_nanneg.rs index 12600ef612558..c8b29cbcfe99f 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_nanneg.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_nanneg.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_neg.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_neg.rs index f848a137c2797..0996d0244e882 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_neg.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_neg.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_big1.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_big1.rs index 43ef4a95738f8..f28227134d868 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_big1.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_big1.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_big2.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_big2.rs index 83432c2b77c0b..9e400b4ad4065 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_big2.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_big2.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_small1.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_small1.rs index 609443e6d4efd..9aca349b918c5 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_small1.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_32_too_small1.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_inf1.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_inf1.rs index fb3eb11c0bb54..54361f6c32c4a 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_inf1.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_inf1.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_infneg1.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_infneg1.rs index b46c4777ba71f..75f5229636730 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_infneg1.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_infneg1.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_infneg2.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_infneg2.rs index 8a3b9dbdc7105..20d8fa1ae801f 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_infneg2.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_infneg2.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_nan.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_nan.rs index e0c826cb0463e..611a0ade0a56f 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_nan.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_nan.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_neg.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_neg.rs index c7c5bf402264f..8ff8c3bee00be 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_neg.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_neg.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big1.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big1.rs index fb3d7bda4e478..1f662e6c32a46 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big1.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big1.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big2.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big2.rs index 2cf27b33553cc..fad172801eaa7 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big2.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big2.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big3.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big3.rs index 22dca505e64b6..7eb3559527aa0 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big3.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big3.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big4.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big4.rs index b59c8fa8e0c2a..351fc6c6f1cba 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big4.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big4.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big5.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big5.rs index 4ad0cd343a406..a6f73c7971073 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big5.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big5.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big6.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big6.rs index fd47dfc03d7a6..b01ff3aafc039 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big6.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big6.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big7.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big7.rs index 680ebda1c96bf..a573e4e852c84 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big7.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_big7.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small1.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small1.rs index e4cb36c5d2ebe..4fb38c9bc2e79 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small1.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small1.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small2.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small2.rs index fe4bac92bd3e3..c4c0d3c17f03b 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small2.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small2.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small3.rs b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small3.rs index 219efd8031623..de7d2215fd624 100644 --- a/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small3.rs +++ b/src/tools/miri/tests/fail/intrinsics/float_to_int_64_too_small3.rs @@ -1,8 +1,6 @@ -#![feature(intrinsics)] - +#![feature(core_intrinsics)] // Directly call intrinsic to avoid debug assertions in libstd -#[rustc_intrinsic] -unsafe fn float_to_int_unchecked(value: Float) -> Int; +use std::intrinsics::float_to_int_unchecked; fn main() { unsafe { diff --git a/src/tools/miri/tests/fail/intrinsics/ptr_offset_int_plus_int.stderr b/src/tools/miri/tests/fail/intrinsics/ptr_offset_int_plus_int.stderr index c87ce321784b5..b7ed36f6428fa 100644 --- a/src/tools/miri/tests/fail/intrinsics/ptr_offset_int_plus_int.stderr +++ b/src/tools/miri/tests/fail/intrinsics/ptr_offset_int_plus_int.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: expected a pointer to 1 byte of memory, but got 0x1[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: in-bounds pointer arithmetic failed: attempting to offset pointer by 1 byte, but got 0x1[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/intrinsics/ptr_offset_int_plus_int.rs:LL:CC | LL | let _val = (1 as *mut u8).offset(1); - | ^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer arithmetic: expected a pointer to 1 byte of memory, but got 0x1[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^^^^^^^^^^^^^^^^^^ in-bounds pointer arithmetic failed: attempting to offset pointer by 1 byte, but got 0x1[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/intrinsics/ptr_offset_int_plus_ptr.stderr b/src/tools/miri/tests/fail/intrinsics/ptr_offset_int_plus_ptr.stderr index 78239d501378f..29d9e1c64bd7d 100644 --- a/src/tools/miri/tests/fail/intrinsics/ptr_offset_int_plus_ptr.stderr +++ b/src/tools/miri/tests/fail/intrinsics/ptr_offset_int_plus_ptr.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: expected a pointer to $BYTES bytes of memory, but got 0x1[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: in-bounds pointer arithmetic failed: attempting to offset pointer by $BYTES bytes, but got 0x1[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/intrinsics/ptr_offset_int_plus_ptr.rs:LL:CC | LL | let _val = (1 as *mut u8).offset(ptr as isize); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer arithmetic: expected a pointer to $BYTES bytes of memory, but got 0x1[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ in-bounds pointer arithmetic failed: attempting to offset pointer by $BYTES bytes, but got 0x1[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds.rs b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds.rs index 905fc678f6d50..e44d398c998cb 100644 --- a/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds.rs +++ b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds.rs @@ -1,6 +1,6 @@ fn main() { let v = [0i8; 4]; let x = &v as *const i8; - let x = unsafe { x.offset(5) }; //~ERROR: expected a pointer to 5 bytes of memory + let x = unsafe { x.offset(5) }; //~ERROR: is only 4 bytes from the end of the allocation panic!("this should never print: {:?}", x); } diff --git a/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds.stderr b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds.stderr index 4f6b45b897b45..143fae8587b63 100644 --- a/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds.stderr +++ b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: expected a pointer to 5 bytes of memory, but got ALLOC which is only 4 bytes from the end of the allocation +error: Undefined Behavior: in-bounds pointer arithmetic failed: attempting to offset pointer by 5 bytes, but got ALLOC which is only 4 bytes from the end of the allocation --> tests/fail/intrinsics/ptr_offset_out_of_bounds.rs:LL:CC | LL | let x = unsafe { x.offset(5) }; - | ^^^^^^^^^^^ out-of-bounds pointer arithmetic: expected a pointer to 5 bytes of memory, but got ALLOC which is only 4 bytes from the end of the allocation + | ^^^^^^^^^^^ in-bounds pointer arithmetic failed: attempting to offset pointer by 5 bytes, but got ALLOC which is only 4 bytes from the end of the allocation | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg.rs b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg.rs index bd1d5c064c065..5ad1737356693 100644 --- a/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg.rs +++ b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg.rs @@ -1,6 +1,6 @@ fn main() { let v = [0i8; 4]; let x = &v as *const i8; - let x = unsafe { x.offset(-1) }; //~ERROR: expected a pointer to the end of 1 byte of memory + let x = unsafe { x.offset(-1) }; //~ERROR: is at the beginning of the allocation panic!("this should never print: {:?}", x); } diff --git a/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg.stderr b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg.stderr index 2dd4c943e864d..14163d9240419 100644 --- a/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg.stderr +++ b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: expected a pointer to the end of 1 byte of memory, but got ALLOC which is at the beginning of the allocation +error: Undefined Behavior: in-bounds pointer arithmetic failed: attempting to offset pointer by -1 bytes, but got ALLOC which is at the beginning of the allocation --> tests/fail/intrinsics/ptr_offset_out_of_bounds_neg.rs:LL:CC | LL | let x = unsafe { x.offset(-1) }; - | ^^^^^^^^^^^^ out-of-bounds pointer arithmetic: expected a pointer to the end of 1 byte of memory, but got ALLOC which is at the beginning of the allocation + | ^^^^^^^^^^^^ in-bounds pointer arithmetic failed: attempting to offset pointer by -1 bytes, but got ALLOC which is at the beginning of the allocation | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/intrinsics/ptr_offset_overflow.rs b/src/tools/miri/tests/fail/intrinsics/ptr_offset_overflow.rs index 6839431223270..6767b1f117a8f 100644 --- a/src/tools/miri/tests/fail/intrinsics/ptr_offset_overflow.rs +++ b/src/tools/miri/tests/fail/intrinsics/ptr_offset_overflow.rs @@ -3,6 +3,6 @@ fn main() { let v = [0i8; 4]; let x = &v as *const i8; - let x = unsafe { x.offset(isize::MIN) }; //~ERROR: out-of-bounds pointer arithmetic + let x = unsafe { x.offset(isize::MIN) }; //~ERROR: in-bounds pointer arithmetic failed panic!("this should never print: {:?}", x); } diff --git a/src/tools/miri/tests/fail/intrinsics/ptr_offset_overflow.stderr b/src/tools/miri/tests/fail/intrinsics/ptr_offset_overflow.stderr index d03c9f870e24a..af08bfb3c947e 100644 --- a/src/tools/miri/tests/fail/intrinsics/ptr_offset_overflow.stderr +++ b/src/tools/miri/tests/fail/intrinsics/ptr_offset_overflow.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: expected a pointer to the end of $BYTES bytes of memory, but got ALLOC which is at the beginning of the allocation +error: Undefined Behavior: in-bounds pointer arithmetic failed: attempting to offset pointer by -$BYTES bytes, but got ALLOC which is at the beginning of the allocation --> tests/fail/intrinsics/ptr_offset_overflow.rs:LL:CC | LL | let x = unsafe { x.offset(isize::MIN) }; - | ^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer arithmetic: expected a pointer to the end of $BYTES bytes of memory, but got ALLOC which is at the beginning of the allocation + | ^^^^^^^^^^^^^^^^^^^^ in-bounds pointer arithmetic failed: attempting to offset pointer by -$BYTES bytes, but got ALLOC which is at the beginning of the allocation | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/intrinsics/simd-gather.rs b/src/tools/miri/tests/fail/intrinsics/simd-gather.rs index b837395245179..45a3dfa577244 100644 --- a/src/tools/miri/tests/fail/intrinsics/simd-gather.rs +++ b/src/tools/miri/tests/fail/intrinsics/simd-gather.rs @@ -6,6 +6,6 @@ fn main() { let vec: &[i8] = &[10, 11, 12, 13, 14, 15, 16, 17, 18]; let idxs = Simd::from_array([9, 3, 0, 17]); let _result = Simd::gather_select_unchecked(&vec, Mask::splat(true), idxs, Simd::splat(0)); - //~^ERROR: expected a pointer to 1 byte of memory + //~^ERROR: attempting to access 1 byte } } diff --git a/src/tools/miri/tests/fail/intrinsics/simd-gather.stderr b/src/tools/miri/tests/fail/intrinsics/simd-gather.stderr index ee1c900961080..e91d5d2185f37 100644 --- a/src/tools/miri/tests/fail/intrinsics/simd-gather.stderr +++ b/src/tools/miri/tests/fail/intrinsics/simd-gather.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 1 byte of memory, but got ALLOC+0x9 which is at or beyond the end of the allocation of size 9 bytes +error: Undefined Behavior: memory access failed: attempting to access 1 byte, but got ALLOC+0x9 which is at or beyond the end of the allocation of size 9 bytes --> tests/fail/intrinsics/simd-gather.rs:LL:CC | LL | let _result = Simd::gather_select_unchecked(&vec, Mask::splat(true), idxs, Simd::splat(0)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: expected a pointer to 1 byte of memory, but got ALLOC+0x9 which is at or beyond the end of the allocation of size 9 bytes + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: attempting to access 1 byte, but got ALLOC+0x9 which is at or beyond the end of the allocation of size 9 bytes | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/intrinsics/simd-scatter.rs b/src/tools/miri/tests/fail/intrinsics/simd-scatter.rs index bb8c9dbe4c7a8..4e727edd6244a 100644 --- a/src/tools/miri/tests/fail/intrinsics/simd-scatter.rs +++ b/src/tools/miri/tests/fail/intrinsics/simd-scatter.rs @@ -6,7 +6,7 @@ fn main() { let mut vec: Vec = vec![10, 11, 12, 13, 14, 15, 16, 17, 18]; let idxs = Simd::from_array([9, 3, 0, 17]); Simd::from_array([-27, 82, -41, 124]).scatter_select_unchecked( - //~^ERROR: expected a pointer to 1 byte of memory + //~^ERROR: attempting to access 1 byte &mut vec, Mask::splat(true), idxs, diff --git a/src/tools/miri/tests/fail/intrinsics/simd-scatter.stderr b/src/tools/miri/tests/fail/intrinsics/simd-scatter.stderr index aaacb94f458df..56c8e7b38b648 100644 --- a/src/tools/miri/tests/fail/intrinsics/simd-scatter.stderr +++ b/src/tools/miri/tests/fail/intrinsics/simd-scatter.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 1 byte of memory, but got ALLOC+0x9 which is at or beyond the end of the allocation of size 9 bytes +error: Undefined Behavior: memory access failed: attempting to access 1 byte, but got ALLOC+0x9 which is at or beyond the end of the allocation of size 9 bytes --> tests/fail/intrinsics/simd-scatter.rs:LL:CC | LL | / Simd::from_array([-27, 82, -41, 124]).scatter_select_unchecked( @@ -7,7 +7,7 @@ LL | | &mut vec, LL | | Mask::splat(true), LL | | idxs, LL | | ); - | |_________^ memory access failed: expected a pointer to 1 byte of memory, but got ALLOC+0x9 which is at or beyond the end of the allocation of size 9 bytes + | |_________^ memory access failed: attempting to access 1 byte, but got ALLOC+0x9 which is at or beyond the end of the allocation of size 9 bytes | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/intrinsics/simd-select-bitmask-invalid.rs b/src/tools/miri/tests/fail/intrinsics/simd-select-bitmask-invalid.rs deleted file mode 100644 index 409098ac3b5df..0000000000000 --- a/src/tools/miri/tests/fail/intrinsics/simd-select-bitmask-invalid.rs +++ /dev/null @@ -1,15 +0,0 @@ -#![feature(core_intrinsics, repr_simd)] - -use std::intrinsics::simd::simd_select_bitmask; - -#[repr(simd)] -#[allow(non_camel_case_types)] -#[derive(Copy, Clone)] -struct i32x2([i32; 2]); - -fn main() { - unsafe { - let x = i32x2([0, 1]); - simd_select_bitmask(0b11111111u8, x, x); //~ERROR: bitmask less than 8 bits long must be filled with 0s for the remaining bits - } -} diff --git a/src/tools/miri/tests/fail/intrinsics/simd-select-bitmask-invalid.stderr b/src/tools/miri/tests/fail/intrinsics/simd-select-bitmask-invalid.stderr deleted file mode 100644 index 9acb51d8c5f36..0000000000000 --- a/src/tools/miri/tests/fail/intrinsics/simd-select-bitmask-invalid.stderr +++ /dev/null @@ -1,15 +0,0 @@ -error: Undefined Behavior: a SIMD bitmask less than 8 bits long must be filled with 0s for the remaining bits - --> tests/fail/intrinsics/simd-select-bitmask-invalid.rs:LL:CC - | -LL | simd_select_bitmask(0b11111111u8, x, x); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ a SIMD bitmask less than 8 bits long must be filled with 0s for the remaining bits - | - = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior - = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information - = note: BACKTRACE: - = note: inside `main` at tests/fail/intrinsics/simd-select-bitmask-invalid.rs:LL:CC - -note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace - -error: aborting due to 1 previous error - diff --git a/src/tools/miri/tests/fail/no_main.stderr b/src/tools/miri/tests/fail/no_main.stderr index e9b9e5d65b1d1..e7f63be794f42 100644 --- a/src/tools/miri/tests/fail/no_main.stderr +++ b/src/tools/miri/tests/fail/no_main.stderr @@ -2,7 +2,7 @@ error: Miri can only run programs that have a main function. Alternatively, you can export a `miri_start` function: #[cfg(miri)] - #[no_mangle] + #[unsafe(no_mangle)] fn miri_start(argc: isize, argv: *const *const u8) -> isize { // Call the actual start function that your project implements, based on your target's conventions. } diff --git a/src/tools/miri/tests/fail/panic/tls_macro_const_drop_panic.stderr b/src/tools/miri/tests/fail/panic/tls_macro_const_drop_panic.stderr index aadb9976609c3..1dcdb4a399680 100644 --- a/src/tools/miri/tests/fail/panic/tls_macro_const_drop_panic.stderr +++ b/src/tools/miri/tests/fail/panic/tls_macro_const_drop_panic.stderr @@ -1,7 +1,7 @@ thread $NAME panicked at tests/fail/panic/tls_macro_const_drop_panic.rs:LL:CC: ow -fatal runtime error: thread local panicked on drop +fatal runtime error: thread local panicked on drop, aborting error: abnormal termination: the program aborted execution error: aborting due to 1 previous error diff --git a/src/tools/miri/tests/fail/panic/tls_macro_drop_panic.stderr b/src/tools/miri/tests/fail/panic/tls_macro_drop_panic.stderr index 546ee7e1ed214..7e4907abd9336 100644 --- a/src/tools/miri/tests/fail/panic/tls_macro_drop_panic.stderr +++ b/src/tools/miri/tests/fail/panic/tls_macro_drop_panic.stderr @@ -1,7 +1,7 @@ thread $NAME panicked at tests/fail/panic/tls_macro_drop_panic.rs:LL:CC: ow -fatal runtime error: thread local panicked on drop +fatal runtime error: thread local panicked on drop, aborting error: abnormal termination: the program aborted execution error: aborting due to 1 previous error diff --git a/src/tools/miri/tests/fail/provenance/int_copy_looses_provenance3.stderr b/src/tools/miri/tests/fail/provenance/int_copy_looses_provenance3.stderr index 62e3bd2e954cf..4e741fe8329b5 100644 --- a/src/tools/miri/tests/fail/provenance/int_copy_looses_provenance3.stderr +++ b/src/tools/miri/tests/fail/provenance/int_copy_looses_provenance3.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/provenance/int_copy_looses_provenance3.rs:LL:CC | LL | let _val = unsafe { *ptr }; - | ^^^^ memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^ memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/provenance/pointer_partial_overwrite.stderr b/src/tools/miri/tests/fail/provenance/pointer_partial_overwrite.stderr index 6bc92fffd5afb..370f9463b73ca 100644 --- a/src/tools/miri/tests/fail/provenance/pointer_partial_overwrite.stderr +++ b/src/tools/miri/tests/fail/provenance/pointer_partial_overwrite.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/provenance/pointer_partial_overwrite.rs:LL:CC | LL | let x = *p; - | ^^ memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + | ^^ memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/provenance/provenance_transmute.stderr b/src/tools/miri/tests/fail/provenance/provenance_transmute.stderr index 7403f4382de21..38e2e19009a94 100644 --- a/src/tools/miri/tests/fail/provenance/provenance_transmute.stderr +++ b/src/tools/miri/tests/fail/provenance/provenance_transmute.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 1 byte of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: memory access failed: attempting to access 1 byte, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/provenance/provenance_transmute.rs:LL:CC | LL | let _val = *left_ptr; - | ^^^^^^^^^ memory access failed: expected a pointer to 1 byte of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^^^ memory access failed: attempting to access 1 byte, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/provenance/ptr_copy_loses_partial_provenance0.stderr b/src/tools/miri/tests/fail/provenance/ptr_copy_loses_partial_provenance0.stderr index 5ed83951c60a9..5225ab328652f 100644 --- a/src/tools/miri/tests/fail/provenance/ptr_copy_loses_partial_provenance0.stderr +++ b/src/tools/miri/tests/fail/provenance/ptr_copy_loses_partial_provenance0.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/provenance/ptr_copy_loses_partial_provenance0.rs:LL:CC | LL | let _val = *ptr; - | ^^^^ memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^ memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/provenance/ptr_copy_loses_partial_provenance1.stderr b/src/tools/miri/tests/fail/provenance/ptr_copy_loses_partial_provenance1.stderr index 3675653cbe79a..c17c98fa10568 100644 --- a/src/tools/miri/tests/fail/provenance/ptr_copy_loses_partial_provenance1.stderr +++ b/src/tools/miri/tests/fail/provenance/ptr_copy_loses_partial_provenance1.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/provenance/ptr_copy_loses_partial_provenance1.rs:LL:CC | LL | let _val = *ptr; - | ^^^^ memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^ memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/provenance/ptr_int_unexposed.stderr b/src/tools/miri/tests/fail/provenance/ptr_int_unexposed.stderr index 1b6518612efcc..78290d4ed63bd 100644 --- a/src/tools/miri/tests/fail/provenance/ptr_int_unexposed.stderr +++ b/src/tools/miri/tests/fail/provenance/ptr_int_unexposed.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/provenance/ptr_int_unexposed.rs:LL:CC | LL | assert_eq!(unsafe { *ptr }, 3); - | ^^^^ memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^ memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/provenance/ptr_invalid.stderr b/src/tools/miri/tests/fail/provenance/ptr_invalid.stderr index 84347ec7a11d3..ff73fbb9d1b3e 100644 --- a/src/tools/miri/tests/fail/provenance/ptr_invalid.stderr +++ b/src/tools/miri/tests/fail/provenance/ptr_invalid.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/provenance/ptr_invalid.rs:LL:CC | LL | let _val = unsafe { *xptr_invalid }; - | ^^^^^^^^^^^^^ memory access failed: expected a pointer to 4 bytes of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^^^^^^^ memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/provenance/ptr_invalid_offset.stderr b/src/tools/miri/tests/fail/provenance/ptr_invalid_offset.stderr index 3910bc4df4b08..07556540f13ef 100644 --- a/src/tools/miri/tests/fail/provenance/ptr_invalid_offset.stderr +++ b/src/tools/miri/tests/fail/provenance/ptr_invalid_offset.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer arithmetic: expected a pointer to 1 byte of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: in-bounds pointer arithmetic failed: attempting to offset pointer by 1 byte, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/provenance/ptr_invalid_offset.rs:LL:CC | LL | let _ = unsafe { roundtrip.offset(1) }; - | ^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer arithmetic: expected a pointer to 1 byte of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + | ^^^^^^^^^^^^^^^^^^^ in-bounds pointer arithmetic failed: attempting to offset pointer by 1 byte, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/rc_as_ptr.stderr b/src/tools/miri/tests/fail/rc_as_ptr.stderr index 0fcb0faf49718..e1d0e5780a074 100644 --- a/src/tools/miri/tests/fail/rc_as_ptr.stderr +++ b/src/tools/miri/tests/fail/rc_as_ptr.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: ALLOC has been freed, so this pointer is dangling +error: Undefined Behavior: pointer not dereferenceable: ALLOC has been freed, so this pointer is dangling --> tests/fail/rc_as_ptr.rs:LL:CC | LL | assert_eq!(42, **unsafe { &*Weak::as_ptr(&weak) }); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: ALLOC has been freed, so this pointer is dangling + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: ALLOC has been freed, so this pointer is dangling | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/read_from_trivial_switch.rs b/src/tools/miri/tests/fail/read_from_trivial_switch.rs new file mode 100644 index 0000000000000..d34b1cd582009 --- /dev/null +++ b/src/tools/miri/tests/fail/read_from_trivial_switch.rs @@ -0,0 +1,14 @@ +// Ensure that we don't optimize out `SwitchInt` reads even if that terminator +// branches to the same basic block on every target, since the operand may have +// side-effects that affect analysis of the MIR. +// +// See . + +use std::mem::MaybeUninit; + +fn main() { + let uninit: MaybeUninit = MaybeUninit::uninit(); + let bad_ref: &i32 = unsafe { uninit.assume_init_ref() }; + let &(0 | _) = bad_ref; + //~^ ERROR: Undefined Behavior: using uninitialized data, but this operation requires initialized memory +} diff --git a/src/tools/miri/tests/fail/read_from_trivial_switch.stderr b/src/tools/miri/tests/fail/read_from_trivial_switch.stderr new file mode 100644 index 0000000000000..6b3d4539b9681 --- /dev/null +++ b/src/tools/miri/tests/fail/read_from_trivial_switch.stderr @@ -0,0 +1,15 @@ +error: Undefined Behavior: using uninitialized data, but this operation requires initialized memory + --> tests/fail/read_from_trivial_switch.rs:LL:CC + | +LL | let &(0 | _) = bad_ref; + | ^^^^^^^^ using uninitialized data, but this operation requires initialized memory + | + = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior + = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information + = note: BACKTRACE: + = note: inside `main` at tests/fail/read_from_trivial_switch.rs:LL:CC + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to 1 previous error + diff --git a/src/tools/miri/tests/fail/reading_half_a_pointer.stderr b/src/tools/miri/tests/fail/reading_half_a_pointer.stderr index 921796441694a..61fb9cd4e5260 100644 --- a/src/tools/miri/tests/fail/reading_half_a_pointer.stderr +++ b/src/tools/miri/tests/fail/reading_half_a_pointer.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 1 byte of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) +error: Undefined Behavior: memory access failed: attempting to access 1 byte, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) --> tests/fail/reading_half_a_pointer.rs:LL:CC | LL | let _val = *x; - | ^^ memory access failed: expected a pointer to 1 byte of memory, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + | ^^ memory access failed: attempting to access 1 byte, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/shims/backtrace/bad-backtrace-ptr.rs b/src/tools/miri/tests/fail/shims/backtrace/bad-backtrace-ptr.rs index 75f7aae9718d6..a398eb1ae0c22 100644 --- a/src/tools/miri/tests/fail/shims/backtrace/bad-backtrace-ptr.rs +++ b/src/tools/miri/tests/fail/shims/backtrace/bad-backtrace-ptr.rs @@ -4,6 +4,6 @@ extern "Rust" { fn main() { unsafe { - miri_resolve_frame(std::ptr::null_mut(), 0); //~ ERROR: got a null pointer + miri_resolve_frame(std::ptr::null_mut(), 0); //~ ERROR: null pointer } } diff --git a/src/tools/miri/tests/fail/shims/backtrace/bad-backtrace-ptr.stderr b/src/tools/miri/tests/fail/shims/backtrace/bad-backtrace-ptr.stderr index 7ae9558fad70a..126f41fbb0e62 100644 --- a/src/tools/miri/tests/fail/shims/backtrace/bad-backtrace-ptr.stderr +++ b/src/tools/miri/tests/fail/shims/backtrace/bad-backtrace-ptr.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer +error: Undefined Behavior: pointer not dereferenceable: pointer must point to some allocation, but got null pointer --> tests/fail/shims/backtrace/bad-backtrace-ptr.rs:LL:CC | LL | miri_resolve_frame(std::ptr::null_mut(), 0); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pointer not dereferenceable: pointer must point to some allocation, but got null pointer | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/shims/vararg_caller_signature_mismatch.rs b/src/tools/miri/tests/fail/shims/vararg_caller_signature_mismatch.rs index 515e467fb54d3..ac6e221fcd8d3 100644 --- a/src/tools/miri/tests/fail/shims/vararg_caller_signature_mismatch.rs +++ b/src/tools/miri/tests/fail/shims/vararg_caller_signature_mismatch.rs @@ -9,6 +9,6 @@ extern "C" { fn main() { let mut fds = [-1, -1]; let res = unsafe { pipe(fds.as_mut_ptr()) }; - //~^ ERROR: calling a non-variadic function with a variadic caller-side signature + //~^ ERROR: ABI mismatch: calling a non-variadic function with a variadic caller-side signature assert_eq!(res, 0); } diff --git a/src/tools/miri/tests/fail/shims/vararg_caller_signature_mismatch.stderr b/src/tools/miri/tests/fail/shims/vararg_caller_signature_mismatch.stderr index 2782f3b3269cf..0f642aca322af 100644 --- a/src/tools/miri/tests/fail/shims/vararg_caller_signature_mismatch.stderr +++ b/src/tools/miri/tests/fail/shims/vararg_caller_signature_mismatch.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: calling a non-variadic function with a variadic caller-side signature +error: Undefined Behavior: ABI mismatch: calling a non-variadic function with a variadic caller-side signature --> tests/fail/shims/vararg_caller_signature_mismatch.rs:LL:CC | LL | let res = unsafe { pipe(fds.as_mut_ptr()) }; - | ^^^^^^^^^^^^^^^^^^^^^^ calling a non-variadic function with a variadic caller-side signature + | ^^^^^^^^^^^^^^^^^^^^^^ ABI mismatch: calling a non-variadic function with a variadic caller-side signature | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/fail/stacked_borrows/retag_data_race_protected_read.rs b/src/tools/miri/tests/fail/stacked_borrows/retag_data_race_protected_read.rs index a6ee7b40c340b..6ff69554387e7 100644 --- a/src/tools/miri/tests/fail/stacked_borrows/retag_data_race_protected_read.rs +++ b/src/tools/miri/tests/fail/stacked_borrows/retag_data_race_protected_read.rs @@ -1,5 +1,4 @@ -// Avoid accidental synchronization via address reuse. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags:-Zmiri-deterministic-concurrency use std::thread; #[derive(Copy, Clone)] diff --git a/src/tools/miri/tests/fail/stacked_borrows/retag_data_race_read.rs b/src/tools/miri/tests/fail/stacked_borrows/retag_data_race_read.rs index 949f659e7e8e5..f46f13a39e7ad 100644 --- a/src/tools/miri/tests/fail/stacked_borrows/retag_data_race_read.rs +++ b/src/tools/miri/tests/fail/stacked_borrows/retag_data_race_read.rs @@ -1,6 +1,5 @@ //! Make sure that a retag acts like a read for the data race model. -// Avoid accidental synchronization via address reuse. -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags:-Zmiri-deterministic-concurrency #[derive(Copy, Clone)] struct SendPtr(*mut u8); diff --git a/src/tools/miri/tests/fail/tree_borrows/children-can-alias.default.stderr b/src/tools/miri/tests/fail/tree_borrows/children-can-alias.default.stderr deleted file mode 100644 index b9651e21ecece..0000000000000 --- a/src/tools/miri/tests/fail/tree_borrows/children-can-alias.default.stderr +++ /dev/null @@ -1,15 +0,0 @@ -error: Undefined Behavior: entering unreachable code - --> tests/fail/tree_borrows/children-can-alias.rs:LL:CC - | -LL | std::hint::unreachable_unchecked(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ entering unreachable code - | - = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior - = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information - = note: BACKTRACE: - = note: inside `main` at tests/fail/tree_borrows/children-can-alias.rs:LL:CC - -note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace - -error: aborting due to 1 previous error - diff --git a/src/tools/miri/tests/fail/tree_borrows/children-can-alias.rs b/src/tools/miri/tests/fail/tree_borrows/children-can-alias.rs deleted file mode 100644 index d3f272dfbf81d..0000000000000 --- a/src/tools/miri/tests/fail/tree_borrows/children-can-alias.rs +++ /dev/null @@ -1,58 +0,0 @@ -//@revisions: default uniq -//@compile-flags: -Zmiri-tree-borrows -//@[uniq]compile-flags: -Zmiri-unique-is-unique - -//! This is NOT intended behavior. -//! We should eventually find a solution so that the version with `Unique` passes too, -//! otherwise `Unique` is more strict than `&mut`! - -#![feature(ptr_internals)] - -use core::ptr::{Unique, addr_of_mut}; - -fn main() { - let mut data = 0u8; - let raw = addr_of_mut!(data); - unsafe { - raw_children_of_refmut_can_alias(&mut *raw); - raw_children_of_unique_can_alias(Unique::new_unchecked(raw)); - - // Ultimately the intended behavior is that both above tests would - // succeed. - std::hint::unreachable_unchecked(); - //~[default]^ ERROR: entering unreachable code - } -} - -unsafe fn raw_children_of_refmut_can_alias(x: &mut u8) { - let child1 = addr_of_mut!(*x); - let child2 = addr_of_mut!(*x); - // We create two raw aliases of `x`: they have the exact same - // tag and can be used interchangeably. - child1.write(1); - child2.write(2); - child1.write(1); - child2.write(2); -} - -unsafe fn raw_children_of_unique_can_alias(x: Unique) { - let child1 = x.as_ptr(); - let child2 = x.as_ptr(); - // Under `-Zmiri-unique-is-unique`, `Unique` accidentally offers more guarantees - // than `&mut`. Not because it responds differently to accesses but because - // there is no easy way to obtain a copy with the same tag. - // - // The closest (non-hack) attempt is two calls to `as_ptr`. - // - Without `-Zmiri-unique-is-unique`, independent `as_ptr` calls return pointers - // with the same tag that can thus be used interchangeably. - // - With the current implementation of `-Zmiri-unique-is-unique`, they return cousin - // tags with permissions that do not tolerate aliasing. - // Eventually we should make such aliasing allowed in some situations - // (e.g. when there is no protector), which will probably involve - // introducing a new kind of permission. - child1.write(1); - child2.write(2); - //~[uniq]^ ERROR: /write access through .* is forbidden/ - child1.write(1); - child2.write(2); -} diff --git a/src/tools/miri/tests/fail/tree_borrows/children-can-alias.uniq.stderr b/src/tools/miri/tests/fail/tree_borrows/children-can-alias.uniq.stderr deleted file mode 100644 index 83c506abb2d73..0000000000000 --- a/src/tools/miri/tests/fail/tree_borrows/children-can-alias.uniq.stderr +++ /dev/null @@ -1,31 +0,0 @@ -error: Undefined Behavior: write access through at ALLOC[0x0] is forbidden - --> tests/fail/tree_borrows/children-can-alias.rs:LL:CC - | -LL | child2.write(2); - | ^^^^^^^^^^^^^^^ write access through at ALLOC[0x0] is forbidden - | - = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag has state Disabled which forbids this child write access -help: the accessed tag was created here, in the initial state Reserved - --> tests/fail/tree_borrows/children-can-alias.rs:LL:CC - | -LL | let child2 = x.as_ptr(); - | ^^^^^^^^^^ -help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x1] - --> tests/fail/tree_borrows/children-can-alias.rs:LL:CC - | -LL | child1.write(1); - | ^^^^^^^^^^^^^^^ - = help: this transition corresponds to a loss of read and write permissions - = note: BACKTRACE (of the first span): - = note: inside `raw_children_of_unique_can_alias` at tests/fail/tree_borrows/children-can-alias.rs:LL:CC -note: inside `main` - --> tests/fail/tree_borrows/children-can-alias.rs:LL:CC - | -LL | raw_children_of_unique_can_alias(Unique::new_unchecked(raw)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace - -error: aborting due to 1 previous error - diff --git a/src/tools/miri/tests/fail/tree_borrows/reserved/cell-protected-write.rs b/src/tools/miri/tests/fail/tree_borrows/reserved/cell-protected-write.rs index 7af1a7636fad7..bf963f6a8f774 100644 --- a/src/tools/miri/tests/fail/tree_borrows/reserved/cell-protected-write.rs +++ b/src/tools/miri/tests/fail/tree_borrows/reserved/cell-protected-write.rs @@ -12,16 +12,16 @@ use std::cell::UnsafeCell; fn main() { unsafe { let n = &mut UnsafeCell::new(0u8); - name!(n.get(), "base"); + name!(n as *mut _, "base"); let x = &mut *(n as *mut UnsafeCell<_>); - name!(x.get(), "x"); - let y = (&mut *n).get(); + name!(x as *mut _, "x"); + let y = (&mut *n) as *mut UnsafeCell<_> as *mut _; name!(y); write_second(x, y); unsafe fn write_second(x: &mut UnsafeCell, y: *mut u8) { let alloc_id = alloc_id!(x.get()); - name!(x.get(), "callee:x"); - name!(x.get()=>1, "caller:x"); + name!(x as *mut _, "callee:x"); + name!((x as *mut _)=>1, "caller:x"); name!(y, "callee:y"); name!(y, "caller:y"); print_state!(alloc_id); diff --git a/src/tools/miri/tests/fail/tree_borrows/reserved/cell-protected-write.stderr b/src/tools/miri/tests/fail/tree_borrows/reserved/cell-protected-write.stderr index 03f79fe0a5d74..10414df6a6a26 100644 --- a/src/tools/miri/tests/fail/tree_borrows/reserved/cell-protected-write.stderr +++ b/src/tools/miri/tests/fail/tree_borrows/reserved/cell-protected-write.stderr @@ -21,7 +21,7 @@ LL | *y = 1; help: the accessed tag was created here --> tests/fail/tree_borrows/reserved/cell-protected-write.rs:LL:CC | -LL | let y = (&mut *n).get(); +LL | let y = (&mut *n) as *mut UnsafeCell<_> as *mut _; | ^^^^^^^^^ help: the protected tag was created here, in the initial state Reserved --> tests/fail/tree_borrows/reserved/cell-protected-write.rs:LL:CC diff --git a/src/tools/miri/tests/fail/tree_borrows/reservedim_spurious_write.rs b/src/tools/miri/tests/fail/tree_borrows/reservedim_spurious_write.rs index 73f227fee2fcb..024a14600b1ba 100644 --- a/src/tools/miri/tests/fail/tree_borrows/reservedim_spurious_write.rs +++ b/src/tools/miri/tests/fail/tree_borrows/reservedim_spurious_write.rs @@ -1,7 +1,7 @@ // Illustrating a problematic interaction between Reserved, interior mutability, // and protectors, that makes spurious writes fail in the previous model of Tree Borrows. // As for all similar tests, we disable preemption so that the error message is deterministic. -//@compile-flags: -Zmiri-tree-borrows -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-tree-borrows -Zmiri-deterministic-concurrency // // One revision without spurious read (default source code) and one with spurious read. // Both are expected to be UB. Both revisions are expected to have the *same* error diff --git a/src/tools/miri/tests/fail/tree_borrows/spurious_read.rs b/src/tools/miri/tests/fail/tree_borrows/spurious_read.rs index 50ef0ceef91e7..4ca0fb9433813 100644 --- a/src/tools/miri/tests/fail/tree_borrows/spurious_read.rs +++ b/src/tools/miri/tests/fail/tree_borrows/spurious_read.rs @@ -1,8 +1,8 @@ // We ensure a deterministic execution. // Note that we are *also* using barriers: the barriers enforce the -// specific interleaving of operations that we want, but only the preemption -// rate guarantees that the error message is also deterministic. -//@compile-flags: -Zmiri-preemption-rate=0 +// specific interleaving of operations that we want, but we need to disable +// preemption to ensure that the error message is also deterministic. +//@compile-flags: -Zmiri-deterministic-concurrency //@compile-flags: -Zmiri-tree-borrows use std::sync::{Arc, Barrier}; diff --git a/src/tools/miri/tests/fail/tree_borrows/unique.rs b/src/tools/miri/tests/fail/tree_borrows/unique.rs deleted file mode 100644 index 0844dd21a5923..0000000000000 --- a/src/tools/miri/tests/fail/tree_borrows/unique.rs +++ /dev/null @@ -1,27 +0,0 @@ -//@revisions: default uniq -//@compile-flags: -Zmiri-tree-borrows -//@[uniq]compile-flags: -Zmiri-unique-is-unique - -// A pattern that detects if `Unique` is treated as exclusive or not: -// activate the pointer behind a `Unique` then do a read that is parent -// iff `Unique` was specially reborrowed. - -#![feature(ptr_internals)] -use core::ptr::Unique; - -fn main() { - let mut data = 0u8; - let refmut = &mut data; - let rawptr = refmut as *mut u8; - - unsafe { - let uniq = Unique::new_unchecked(rawptr); - *uniq.as_ptr() = 1; // activation - let _maybe_parent = *rawptr; // maybe becomes Frozen - *uniq.as_ptr() = 2; - //~[uniq]^ ERROR: /write access through .* is forbidden/ - let _definitely_parent = data; // definitely Frozen by now - *uniq.as_ptr() = 3; - //~[default]^ ERROR: /write access through .* is forbidden/ - } -} diff --git a/src/tools/miri/tests/fail/tree_borrows/unique.uniq.stderr b/src/tools/miri/tests/fail/tree_borrows/unique.uniq.stderr deleted file mode 100644 index 4ecff3ea0e1f1..0000000000000 --- a/src/tools/miri/tests/fail/tree_borrows/unique.uniq.stderr +++ /dev/null @@ -1,38 +0,0 @@ -error: Undefined Behavior: write access through at ALLOC[0x0] is forbidden - --> tests/fail/tree_borrows/unique.rs:LL:CC - | -LL | *uniq.as_ptr() = 2; - | ^^^^^^^^^^^^^^^^^^ write access through at ALLOC[0x0] is forbidden - | - = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Frozen which forbids this child write access -help: the accessed tag was created here - --> tests/fail/tree_borrows/unique.rs:LL:CC - | -LL | *uniq.as_ptr() = 2; - | ^^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Reserved - --> tests/fail/tree_borrows/unique.rs:LL:CC - | -LL | let uniq = Unique::new_unchecked(rawptr); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: the conflicting tag later transitioned to Active due to a child write access at offsets [0x0..0x1] - --> tests/fail/tree_borrows/unique.rs:LL:CC - | -LL | *uniq.as_ptr() = 1; // activation - | ^^^^^^^^^^^^^^^^^^ - = help: this transition corresponds to the first write to a 2-phase borrowed mutable reference -help: the conflicting tag later transitioned to Frozen due to a foreign read access at offsets [0x0..0x1] - --> tests/fail/tree_borrows/unique.rs:LL:CC - | -LL | let _maybe_parent = *rawptr; // maybe becomes Frozen - | ^^^^^^^ - = help: this transition corresponds to a loss of write permissions - = note: BACKTRACE (of the first span): - = note: inside `main` at tests/fail/tree_borrows/unique.rs:LL:CC - -note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace - -error: aborting due to 1 previous error - diff --git a/src/tools/miri/tests/fail/validity/invalid_bool.rs b/src/tools/miri/tests/fail/validity/invalid_bool.rs index 4f11bb2629f5f..bd448af834df7 100644 --- a/src/tools/miri/tests/fail/validity/invalid_bool.rs +++ b/src/tools/miri/tests/fail/validity/invalid_bool.rs @@ -1,3 +1,4 @@ +#![allow(unnecessary_transmutes)] fn main() { let _b = unsafe { std::mem::transmute::(2) }; //~ ERROR: expected a boolean } diff --git a/src/tools/miri/tests/fail/validity/invalid_bool_op.rs b/src/tools/miri/tests/fail/validity/invalid_bool_op.rs index fe9bb3bed7f01..0cbe2d76dc6ca 100644 --- a/src/tools/miri/tests/fail/validity/invalid_bool_op.rs +++ b/src/tools/miri/tests/fail/validity/invalid_bool_op.rs @@ -2,6 +2,7 @@ // Make sure we find these even with many checks disabled. //@compile-flags: -Zmiri-disable-alignment-check -Zmiri-disable-stacked-borrows -Zmiri-disable-validation +#![allow(unnecessary_transmutes)] fn main() { let b = unsafe { std::mem::transmute::(2) }; let _x = b == std::hint::black_box(true); //~ ERROR: interpreting an invalid 8-bit value as a bool diff --git a/src/tools/miri/tests/fail/validity/invalid_char.rs b/src/tools/miri/tests/fail/validity/invalid_char.rs index 568892e591096..d57c933dac17e 100644 --- a/src/tools/miri/tests/fail/validity/invalid_char.rs +++ b/src/tools/miri/tests/fail/validity/invalid_char.rs @@ -1,3 +1,4 @@ +#![allow(unnecessary_transmutes)] fn main() { assert!(std::char::from_u32(-1_i32 as u32).is_none()); let _val = match unsafe { std::mem::transmute::(-1) } { diff --git a/src/tools/miri/tests/fail/validity/invalid_char_op.rs b/src/tools/miri/tests/fail/validity/invalid_char_op.rs index 699248229445f..e3a5f837e1891 100644 --- a/src/tools/miri/tests/fail/validity/invalid_char_op.rs +++ b/src/tools/miri/tests/fail/validity/invalid_char_op.rs @@ -2,6 +2,7 @@ // Make sure we find these even with many checks disabled. //@compile-flags: -Zmiri-disable-alignment-check -Zmiri-disable-stacked-borrows -Zmiri-disable-validation +#![allow(unnecessary_transmutes)] fn main() { let c = 0xFFFFFFu32; assert!(std::char::from_u32(c).is_none()); diff --git a/src/tools/miri/tests/fail/weak_memory/weak_uninit.rs b/src/tools/miri/tests/fail/weak_memory/weak_uninit.rs index 79c97a5b75273..b4b4b08498773 100644 --- a/src/tools/miri/tests/fail/weak_memory/weak_uninit.rs +++ b/src/tools/miri/tests/fail/weak_memory/weak_uninit.rs @@ -1,4 +1,4 @@ -//@compile-flags: -Zmiri-ignore-leaks -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-ignore-leaks -Zmiri-fixed-schedule // Tests showing weak memory behaviours are exhibited. All tests // return true when the desired behaviour is seen. diff --git a/src/tools/miri/tests/fail/zst_local_oob.rs b/src/tools/miri/tests/fail/zst_local_oob.rs index ab48b7d330b37..2bf184c72bae2 100644 --- a/src/tools/miri/tests/fail/zst_local_oob.rs +++ b/src/tools/miri/tests/fail/zst_local_oob.rs @@ -1,5 +1,5 @@ fn main() { // make sure ZST locals cannot be accessed let x = &() as *const () as *const i8; - let _val = unsafe { *x }; //~ ERROR: expected a pointer to 1 byte of memory + let _val = unsafe { *x }; //~ ERROR: attempting to access 1 byte } diff --git a/src/tools/miri/tests/fail/zst_local_oob.stderr b/src/tools/miri/tests/fail/zst_local_oob.stderr index 26911948eff71..e9423096226eb 100644 --- a/src/tools/miri/tests/fail/zst_local_oob.stderr +++ b/src/tools/miri/tests/fail/zst_local_oob.stderr @@ -1,8 +1,8 @@ -error: Undefined Behavior: memory access failed: expected a pointer to 1 byte of memory, but got ALLOC which is at or beyond the end of the allocation of size 0 bytes +error: Undefined Behavior: memory access failed: attempting to access 1 byte, but got ALLOC which is at or beyond the end of the allocation of size 0 bytes --> tests/fail/zst_local_oob.rs:LL:CC | LL | let _val = unsafe { *x }; - | ^^ memory access failed: expected a pointer to 1 byte of memory, but got ALLOC which is at or beyond the end of the allocation of size 0 bytes + | ^^ memory access failed: attempting to access 1 byte, but got ALLOC which is at or beyond the end of the allocation of size 0 bytes | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/pass-dep/concurrency/apple-futex.rs b/src/tools/miri/tests/pass-dep/concurrency/apple-futex.rs index becb90eb92307..a28f08c3bb1a0 100644 --- a/src/tools/miri/tests/pass-dep/concurrency/apple-futex.rs +++ b/src/tools/miri/tests/pass-dep/concurrency/apple-futex.rs @@ -1,5 +1,5 @@ //@only-target: darwin -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::time::{Duration, Instant}; use std::{io, ptr, thread}; diff --git a/src/tools/miri/tests/pass-dep/concurrency/env-cleanup-data-race.rs b/src/tools/miri/tests/pass-dep/concurrency/env-cleanup-data-race.rs index c9c9dc5dfd2ac..91cf24a944ad6 100644 --- a/src/tools/miri/tests/pass-dep/concurrency/env-cleanup-data-race.rs +++ b/src/tools/miri/tests/pass-dep/concurrency/env-cleanup-data-race.rs @@ -1,4 +1,4 @@ -//@compile-flags: -Zmiri-disable-isolation -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-disable-isolation -Zmiri-deterministic-concurrency //@ignore-target: windows # No libc env support on Windows use std::ffi::CStr; diff --git a/src/tools/miri/tests/pass-dep/concurrency/freebsd-futex.rs b/src/tools/miri/tests/pass-dep/concurrency/freebsd-futex.rs new file mode 100644 index 0000000000000..b10ded8e4d008 --- /dev/null +++ b/src/tools/miri/tests/pass-dep/concurrency/freebsd-futex.rs @@ -0,0 +1,260 @@ +//@only-target: freebsd +//@compile-flags: -Zmiri-deterministic-concurrency -Zmiri-disable-isolation + +use std::mem::{self, MaybeUninit}; +use std::ptr::{self, addr_of}; +use std::sync::atomic::AtomicU32; +use std::time::Instant; +use std::{io, thread}; + +fn wait_wake() { + fn wake_nobody() { + // Current thread waits on futex. + // New thread wakes up 0 threads waiting on that futex. + // Current thread should time out. + static mut FUTEX: u32 = 0; + + let waker = thread::spawn(|| { + unsafe { + assert_eq!( + libc::_umtx_op( + addr_of!(FUTEX) as *mut _, + libc::UMTX_OP_WAKE_PRIVATE, + 0, // wake up 0 waiters + ptr::null_mut::(), + ptr::null_mut::(), + ), + 0 + ); + } + }); + + // 10ms should be enough. + let mut timeout = libc::timespec { tv_sec: 0, tv_nsec: 10_000_000 }; + let timeout_size_arg = + ptr::without_provenance_mut::(mem::size_of::()); + unsafe { + assert_eq!( + libc::_umtx_op( + addr_of!(FUTEX) as *mut _, + libc::UMTX_OP_WAIT_UINT_PRIVATE, + 0, + timeout_size_arg, + &mut timeout as *mut _ as _, + ), + -1 + ); + // Main thread did not get woken up, so it timed out. + assert_eq!(io::Error::last_os_error().raw_os_error().unwrap(), libc::ETIMEDOUT); + } + + waker.join().unwrap(); + } + + fn wake_two_of_three() { + // We create 2 threads that wait on a futex with a 100ms timeout. + // The main thread wakes up 2 threads waiting on this futex and after this + // checks that only those threads woke up and the other one timed out. + static mut FUTEX: u32 = 0; + + fn waiter() -> bool { + let mut timeout = libc::timespec { tv_sec: 0, tv_nsec: 100_000_000 }; + let timeout_size_arg = + ptr::without_provenance_mut::(mem::size_of::()); + unsafe { + libc::_umtx_op( + addr_of!(FUTEX) as *mut _, + libc::UMTX_OP_WAIT_UINT_PRIVATE, + 0, // FUTEX is 0 + timeout_size_arg, + &mut timeout as *mut _ as _, + ); + // Return true if this thread woke up. + io::Error::last_os_error().raw_os_error().unwrap() != libc::ETIMEDOUT + } + } + + let t1 = thread::spawn(waiter); + let t2 = thread::spawn(waiter); + let t3 = thread::spawn(waiter); + + // Run all the waiters, so they can go to sleep. + thread::yield_now(); + + // Wake up 2 thread and make sure 1 is still waiting. + unsafe { + assert_eq!( + libc::_umtx_op( + addr_of!(FUTEX) as *mut _, + libc::UMTX_OP_WAKE_PRIVATE, + 2, + ptr::null_mut::(), + ptr::null_mut::(), + ), + 0 + ); + } + + // Treat the booleans as numbers to simplify checking how many threads were woken up. + let t1 = t1.join().unwrap() as usize; + let t2 = t2.join().unwrap() as usize; + let t3 = t3.join().unwrap() as usize; + let woken_up_count = t1 + t2 + t3; + assert!(woken_up_count == 2, "Expected 2 threads to wake up got: {woken_up_count}"); + } + + wake_nobody(); + wake_two_of_three(); +} + +fn wake_dangling() { + let futex = Box::new(0); + let ptr: *const u32 = &*futex; + drop(futex); + + // Expect error since this is now "unmapped" memory. + unsafe { + assert_eq!( + libc::_umtx_op( + ptr as *const AtomicU32 as *mut _, + libc::UMTX_OP_WAKE_PRIVATE, + 0, + ptr::null_mut::(), + ptr::null_mut::(), + ), + -1 + ); + assert_eq!(io::Error::last_os_error().raw_os_error().unwrap(), libc::EFAULT); + } +} + +fn wait_wrong_val() { + let futex: u32 = 123; + + // Wait with a wrong value just returns 0 + unsafe { + assert_eq!( + libc::_umtx_op( + ptr::from_ref(&futex).cast_mut().cast(), + libc::UMTX_OP_WAIT_UINT_PRIVATE, + 456, + ptr::null_mut::(), + ptr::null_mut::(), + ), + 0 + ); + } +} + +fn wait_relative_timeout() { + fn without_timespec() { + let start = Instant::now(); + + let futex: u32 = 123; + + let mut timeout = libc::timespec { tv_sec: 0, tv_nsec: 200_000_000 }; + let timeout_size_arg = + ptr::without_provenance_mut::(mem::size_of::()); + // Wait for 200ms, with nobody waking us up early + unsafe { + assert_eq!( + libc::_umtx_op( + ptr::from_ref(&futex).cast_mut().cast(), + libc::UMTX_OP_WAIT_UINT_PRIVATE, + 123, + timeout_size_arg, + &mut timeout as *mut _ as _, + ), + -1 + ); + assert_eq!(io::Error::last_os_error().raw_os_error().unwrap(), libc::ETIMEDOUT); + } + + assert!((200..1000).contains(&start.elapsed().as_millis())); + } + + fn with_timespec() { + let futex: u32 = 123; + let mut timeout = libc::_umtx_time { + _timeout: libc::timespec { tv_sec: 0, tv_nsec: 200_000_000 }, + _flags: 0, + _clockid: libc::CLOCK_MONOTONIC as u32, + }; + let timeout_size_arg = + ptr::without_provenance_mut::(mem::size_of::()); + + let start = Instant::now(); + + // Wait for 200ms, with nobody waking us up early + unsafe { + assert_eq!( + libc::_umtx_op( + ptr::from_ref(&futex).cast_mut().cast(), + libc::UMTX_OP_WAIT_UINT_PRIVATE, + 123, + timeout_size_arg, + &mut timeout as *mut _ as _, + ), + -1 + ); + assert_eq!(io::Error::last_os_error().raw_os_error().unwrap(), libc::ETIMEDOUT); + } + assert!((200..1000).contains(&start.elapsed().as_millis())); + } + + without_timespec(); + with_timespec(); +} + +fn wait_absolute_timeout() { + let start = Instant::now(); + + // Get the current monotonic timestamp as timespec. + let mut timeout = unsafe { + let mut now: MaybeUninit = MaybeUninit::uninit(); + assert_eq!(libc::clock_gettime(libc::CLOCK_MONOTONIC, now.as_mut_ptr()), 0); + now.assume_init() + }; + + // Add 200ms. + timeout.tv_nsec += 200_000_000; + if timeout.tv_nsec > 1_000_000_000 { + timeout.tv_nsec -= 1_000_000_000; + timeout.tv_sec += 1; + } + + // Create umtx_timeout struct with that absolute timeout. + let umtx_timeout = libc::_umtx_time { + _timeout: timeout, + _flags: libc::UMTX_ABSTIME, + _clockid: libc::CLOCK_MONOTONIC as u32, + }; + let umtx_timeout_ptr = &umtx_timeout as *const _; + let umtx_timeout_size = ptr::without_provenance_mut(mem::size_of_val(&umtx_timeout)); + + let futex: u32 = 123; + + // Wait for 200ms from now, with nobody waking us up early. + unsafe { + assert_eq!( + libc::_umtx_op( + ptr::from_ref(&futex).cast_mut().cast(), + libc::UMTX_OP_WAIT_UINT_PRIVATE, + 123, + umtx_timeout_size, + umtx_timeout_ptr as *mut _, + ), + -1 + ); + assert_eq!(io::Error::last_os_error().raw_os_error().unwrap(), libc::ETIMEDOUT); + } + assert!((200..1000).contains(&start.elapsed().as_millis())); +} + +fn main() { + wait_wake(); + wake_dangling(); + wait_wrong_val(); + wait_relative_timeout(); + wait_absolute_timeout(); +} diff --git a/src/tools/miri/tests/pass-dep/concurrency/windows_detach_terminated.rs b/src/tools/miri/tests/pass-dep/concurrency/windows_detach_terminated.rs index fe2d20bb76f8e..41b26c84393d2 100644 --- a/src/tools/miri/tests/pass-dep/concurrency/windows_detach_terminated.rs +++ b/src/tools/miri/tests/pass-dep/concurrency/windows_detach_terminated.rs @@ -1,6 +1,6 @@ //@only-target: windows # Uses win32 api functions // We are making scheduler assumptions here. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::os::windows::io::IntoRawHandle; use std::thread; diff --git a/src/tools/miri/tests/pass-dep/concurrency/windows_init_once.rs b/src/tools/miri/tests/pass-dep/concurrency/windows_init_once.rs index 6853395686aa9..0a1fb11750794 100644 --- a/src/tools/miri/tests/pass-dep/concurrency/windows_init_once.rs +++ b/src/tools/miri/tests/pass-dep/concurrency/windows_init_once.rs @@ -1,6 +1,6 @@ //@only-target: windows # Uses win32 api functions // We are making scheduler assumptions here. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::ptr::null_mut; use std::thread; diff --git a/src/tools/miri/tests/pass-dep/concurrency/windows_join_multiple.rs b/src/tools/miri/tests/pass-dep/concurrency/windows_join_multiple.rs index ce829eee22704..acd9270ad16ad 100644 --- a/src/tools/miri/tests/pass-dep/concurrency/windows_join_multiple.rs +++ b/src/tools/miri/tests/pass-dep/concurrency/windows_join_multiple.rs @@ -1,6 +1,6 @@ //@only-target: windows # Uses win32 api functions // We are making scheduler assumptions here. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::os::windows::io::IntoRawHandle; use std::sync::atomic::{AtomicBool, Ordering}; @@ -9,6 +9,10 @@ use std::thread; use windows_sys::Win32::Foundation::{HANDLE, WAIT_OBJECT_0}; use windows_sys::Win32::System::Threading::{INFINITE, WaitForSingleObject}; +#[derive(Copy, Clone)] +struct UnsafeSendWrapper(T); +unsafe impl Send for UnsafeSendWrapper {} + fn main() { static FLAG: AtomicBool = AtomicBool::new(false); @@ -17,10 +21,12 @@ fn main() { thread::yield_now(); } }) - .into_raw_handle() as HANDLE; + .into_raw_handle(); + let blocker = UnsafeSendWrapper(blocker as HANDLE); let waiter = move || unsafe { - assert_eq!(WaitForSingleObject(blocker, INFINITE), WAIT_OBJECT_0); + let blocker = blocker; // circumvent per-field capturing + assert_eq!(WaitForSingleObject(blocker.0, INFINITE), WAIT_OBJECT_0); }; let waiter1 = thread::spawn(waiter); diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs index 825e1355848bf..54ebfa9d198d1 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs @@ -1,6 +1,6 @@ //@only-target: linux android illumos // test_epoll_block_then_unblock and test_epoll_race depend on a deterministic schedule. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::convert::TryInto; use std::thread; diff --git a/src/tools/miri/tests/pass-dep/libc/libc-eventfd.rs b/src/tools/miri/tests/pass-dep/libc/libc-eventfd.rs index 30e1bbb8fa1b1..56d215d0ed633 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-eventfd.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-eventfd.rs @@ -1,6 +1,6 @@ //@only-target: linux android illumos // test_race, test_blocking_read and test_blocking_write depend on a deterministic schedule. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency // FIXME(static_mut_refs): Do not allow `static_mut_refs` lint #![allow(static_mut_refs)] diff --git a/src/tools/miri/tests/pass-dep/libc/libc-fs-flock.rs b/src/tools/miri/tests/pass-dep/libc/libc-fs-flock.rs index 99d6d2b38f8de..116cde4b425c1 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-fs-flock.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-fs-flock.rs @@ -1,4 +1,5 @@ //@ignore-target: windows # File handling is not implemented yet +//@ignore-target: solaris # Does not have flock //@compile-flags: -Zmiri-disable-isolation use std::fs::File; diff --git a/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs b/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs index d6072c2569e92..05f6c870c3d77 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs @@ -1,6 +1,6 @@ //@ignore-target: windows # No libc pipe on Windows // test_race depends on a deterministic schedule. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::thread; fn main() { test_pipe(); diff --git a/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs b/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs index 9163fd3d06fa6..9e48410f7045e 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs @@ -1,6 +1,6 @@ //@ignore-target: windows # No libc socketpair on Windows // test_race depends on a deterministic schedule. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency // FIXME(static_mut_refs): Do not allow `static_mut_refs` lint #![allow(static_mut_refs)] diff --git a/src/tools/miri/tests/pass-dep/libc/pthread-sync.rs b/src/tools/miri/tests/pass-dep/libc/pthread-sync.rs index fa11b5b129906..255944662940d 100644 --- a/src/tools/miri/tests/pass-dep/libc/pthread-sync.rs +++ b/src/tools/miri/tests/pass-dep/libc/pthread-sync.rs @@ -1,6 +1,6 @@ //@ignore-target: windows # No pthreads on Windows // We use `yield` to test specific interleavings, so disable automatic preemption. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency #![feature(sync_unsafe_cell)] use std::cell::SyncUnsafeCell; diff --git a/src/tools/miri/tests/pass-dep/shims/windows-fs.rs b/src/tools/miri/tests/pass-dep/shims/windows-fs.rs new file mode 100644 index 0000000000000..698ca4e0b4ba6 --- /dev/null +++ b/src/tools/miri/tests/pass-dep/shims/windows-fs.rs @@ -0,0 +1,224 @@ +//@only-target: windows # this directly tests windows-only functions +//@compile-flags: -Zmiri-disable-isolation +#![allow(nonstandard_style)] + +use std::io::ErrorKind; +use std::os::windows::ffi::OsStrExt; +use std::path::Path; +use std::ptr; + +#[path = "../../utils/mod.rs"] +mod utils; + +use windows_sys::Win32::Foundation::{ + CloseHandle, ERROR_ACCESS_DENIED, ERROR_ALREADY_EXISTS, ERROR_IO_DEVICE, GENERIC_READ, + GENERIC_WRITE, GetLastError, RtlNtStatusToDosError, STATUS_ACCESS_DENIED, + STATUS_IO_DEVICE_ERROR, +}; +use windows_sys::Win32::Storage::FileSystem::{ + BY_HANDLE_FILE_INFORMATION, CREATE_ALWAYS, CREATE_NEW, CreateFileW, DeleteFileW, + FILE_ATTRIBUTE_DIRECTORY, FILE_ATTRIBUTE_NORMAL, FILE_FLAG_BACKUP_SEMANTICS, + FILE_FLAG_OPEN_REPARSE_POINT, FILE_SHARE_DELETE, FILE_SHARE_READ, FILE_SHARE_WRITE, + GetFileInformationByHandle, OPEN_ALWAYS, OPEN_EXISTING, +}; + +fn main() { + unsafe { + test_create_dir_file(); + test_create_normal_file(); + test_create_always_twice(); + test_open_always_twice(); + test_open_dir_reparse(); + test_delete_file(); + test_ntstatus_to_dos(); + } +} + +unsafe fn test_create_dir_file() { + let temp = utils::tmp(); + let raw_path = to_wide_cstr(&temp); + // Open the `temp` directory. + let handle = CreateFileW( + raw_path.as_ptr(), + GENERIC_READ, + FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE, + ptr::null_mut(), + OPEN_EXISTING, + FILE_FLAG_BACKUP_SEMANTICS, + ptr::null_mut(), + ); + assert_ne!(handle.addr(), usize::MAX, "CreateFileW Failed: {}", GetLastError()); + let mut info = std::mem::zeroed::(); + if GetFileInformationByHandle(handle, &mut info) == 0 { + panic!("Failed to get file information") + }; + assert!(info.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY != 0); + if CloseHandle(handle) == 0 { + panic!("Failed to close file") + }; +} + +unsafe fn test_create_normal_file() { + let temp = utils::tmp().join("test.txt"); + let raw_path = to_wide_cstr(&temp); + let handle = CreateFileW( + raw_path.as_ptr(), + GENERIC_READ | GENERIC_WRITE, + FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE, + ptr::null_mut(), + CREATE_NEW, + 0, + ptr::null_mut(), + ); + assert_ne!(handle.addr(), usize::MAX, "CreateFileW Failed: {}", GetLastError()); + let mut info = std::mem::zeroed::(); + if GetFileInformationByHandle(handle, &mut info) == 0 { + panic!("Failed to get file information: {}", GetLastError()) + }; + assert!(info.dwFileAttributes & FILE_ATTRIBUTE_NORMAL != 0); + if CloseHandle(handle) == 0 { + panic!("Failed to close file") + }; + + // Test metadata-only handle + let handle = CreateFileW( + raw_path.as_ptr(), + 0, + FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE, + ptr::null_mut(), + OPEN_EXISTING, + 0, + ptr::null_mut(), + ); + assert_ne!(handle.addr(), usize::MAX, "CreateFileW Failed: {}", GetLastError()); + let mut info = std::mem::zeroed::(); + if GetFileInformationByHandle(handle, &mut info) == 0 { + panic!("Failed to get file information: {}", GetLastError()) + }; + assert!(info.dwFileAttributes & FILE_ATTRIBUTE_NORMAL != 0); + if CloseHandle(handle) == 0 { + panic!("Failed to close file") + }; +} + +/// Tests that CREATE_ALWAYS sets the error value correctly based on whether the file already exists +unsafe fn test_create_always_twice() { + let temp = utils::tmp().join("test_create_always.txt"); + let raw_path = to_wide_cstr(&temp); + let handle = CreateFileW( + raw_path.as_ptr(), + GENERIC_READ | GENERIC_WRITE, + FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE, + ptr::null_mut(), + CREATE_ALWAYS, + 0, + ptr::null_mut(), + ); + assert_ne!(handle.addr(), usize::MAX, "CreateFileW Failed: {}", GetLastError()); + assert_eq!(GetLastError(), 0); + if CloseHandle(handle) == 0 { + panic!("Failed to close file") + }; + + let handle = CreateFileW( + raw_path.as_ptr(), + GENERIC_READ | GENERIC_WRITE, + FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE, + ptr::null_mut(), + CREATE_ALWAYS, + 0, + ptr::null_mut(), + ); + assert_ne!(handle.addr(), usize::MAX, "CreateFileW Failed: {}", GetLastError()); + assert_eq!(GetLastError(), ERROR_ALREADY_EXISTS); + if CloseHandle(handle) == 0 { + panic!("Failed to close file") + }; +} + +/// Tests that OPEN_ALWAYS sets the error value correctly based on whether the file already exists +unsafe fn test_open_always_twice() { + let temp = utils::tmp().join("test_open_always.txt"); + let raw_path = to_wide_cstr(&temp); + let handle = CreateFileW( + raw_path.as_ptr(), + GENERIC_READ | GENERIC_WRITE, + FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE, + ptr::null_mut(), + OPEN_ALWAYS, + 0, + ptr::null_mut(), + ); + assert_ne!(handle.addr(), usize::MAX, "CreateFileW Failed: {}", GetLastError()); + assert_eq!(GetLastError(), 0); + if CloseHandle(handle) == 0 { + panic!("Failed to close file") + }; + + let handle = CreateFileW( + raw_path.as_ptr(), + GENERIC_READ | GENERIC_WRITE, + FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE, + ptr::null_mut(), + OPEN_ALWAYS, + 0, + ptr::null_mut(), + ); + assert_ne!(handle.addr(), usize::MAX, "CreateFileW Failed: {}", GetLastError()); + assert_eq!(GetLastError(), ERROR_ALREADY_EXISTS); + if CloseHandle(handle) == 0 { + panic!("Failed to close file") + }; +} + +// TODO: Once we support more of the std API, it would be nice to test against an actual symlink +unsafe fn test_open_dir_reparse() { + let temp = utils::tmp(); + let raw_path = to_wide_cstr(&temp); + // Open the `temp` directory. + let handle = CreateFileW( + raw_path.as_ptr(), + GENERIC_READ, + FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE, + ptr::null_mut(), + OPEN_EXISTING, + FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT, + ptr::null_mut(), + ); + assert_ne!(handle.addr(), usize::MAX, "CreateFileW Failed: {}", GetLastError()); + let mut info = std::mem::zeroed::(); + if GetFileInformationByHandle(handle, &mut info) == 0 { + panic!("Failed to get file information") + }; + assert!(info.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY != 0); + if CloseHandle(handle) == 0 { + panic!("Failed to close file") + }; +} + +unsafe fn test_delete_file() { + let temp = utils::tmp().join("test_delete_file.txt"); + let raw_path = to_wide_cstr(&temp); + let _ = std::fs::File::create(&temp).unwrap(); + + if DeleteFileW(raw_path.as_ptr()) == 0 { + panic!("Failed to delete file"); + } + + match std::fs::File::open(temp) { + Ok(_) => panic!("File not deleted"), + Err(e) => assert!(e.kind() == ErrorKind::NotFound, "File not deleted"), + } +} + +unsafe fn test_ntstatus_to_dos() { + // We won't test all combinations, just a couple common ones + assert_eq!(RtlNtStatusToDosError(STATUS_IO_DEVICE_ERROR), ERROR_IO_DEVICE); + assert_eq!(RtlNtStatusToDosError(STATUS_ACCESS_DENIED), ERROR_ACCESS_DENIED); +} + +fn to_wide_cstr(path: &Path) -> Vec { + let mut raw_path = path.as_os_str().encode_wide().collect::>(); + raw_path.extend([0, 0]); + raw_path +} diff --git a/src/tools/miri/tests/pass/async-drop.rs b/src/tools/miri/tests/pass/async-drop.rs index 6d556b77795d9..4fa84384d9bdd 100644 --- a/src/tools/miri/tests/pass/async-drop.rs +++ b/src/tools/miri/tests/pass/async-drop.rs @@ -4,7 +4,7 @@ // WARNING: If you would ever want to modify this test, // please consider modifying rustc's async drop test at -// `tests/ui/async-await/async-drop.rs`. +// `tests/ui/async-await/async-drop/async-drop-initial.rs`. #![feature(async_drop, impl_trait_in_assoc_type)] #![allow(incomplete_features, dead_code)] @@ -68,7 +68,8 @@ fn main() { test_async_drop(SyncThenAsync { i: 15, a: AsyncInt(16), b: SyncInt(17), c: AsyncInt(18) }) .await; - let async_drop_fut = pin!(core::future::async_drop(AsyncInt(19))); + let mut ptr19 = mem::MaybeUninit::new(AsyncInt(19)); + let async_drop_fut = pin!(unsafe { async_drop_in_place(ptr19.as_mut_ptr()) }); test_idempotency(async_drop_fut).await; let foo = AsyncInt(20); @@ -89,13 +90,14 @@ fn main() { struct AsyncInt(i32); +impl Drop for AsyncInt { + fn drop(&mut self) { + println!("AsyncInt::drop: {}", self.0); + } +} impl AsyncDrop for AsyncInt { - type Dropper<'a> = impl Future; - - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncInt::Dropper::poll: {}", self.0); - } + async fn drop(self: Pin<&mut Self>) { + println!("AsyncInt::async_drop: {}", self.0); } } @@ -124,16 +126,14 @@ struct AsyncReference<'a> { foo: &'a AsyncInt, } +impl Drop for AsyncReference<'_> { + fn drop(&mut self) { + println!("AsyncReference::drop: {}", self.foo.0); + } +} impl AsyncDrop for AsyncReference<'_> { - type Dropper<'a> - = impl Future - where - Self: 'a; - - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncReference::Dropper::poll: {}", self.foo.0); - } + async fn drop(self: Pin<&mut Self>) { + println!("AsyncReference::async_drop: {}", self.foo.0); } } @@ -145,13 +145,14 @@ struct AsyncStruct { b: AsyncInt, } +impl Drop for AsyncStruct { + fn drop(&mut self) { + println!("AsyncStruct::drop: {}", self.i); + } +} impl AsyncDrop for AsyncStruct { - type Dropper<'a> = impl Future; - - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncStruct::Dropper::poll: {}", self.i); - } + async fn drop(self: Pin<&mut Self>) { + println!("AsyncStruct::async_drop: {}", self.i); } } @@ -160,23 +161,34 @@ enum AsyncEnum { B(SyncInt), } +impl Drop for AsyncEnum { + fn drop(&mut self) { + let new_self = match self { + AsyncEnum::A(foo) => { + println!("AsyncEnum(A)::drop: {}", foo.0); + AsyncEnum::B(SyncInt(foo.0)) + } + AsyncEnum::B(foo) => { + println!("AsyncEnum(B)::drop: {}", foo.0); + AsyncEnum::A(AsyncInt(foo.0)) + } + }; + mem::forget(mem::replace(&mut *self, new_self)); + } +} impl AsyncDrop for AsyncEnum { - type Dropper<'a> = impl Future; - - fn async_drop(mut self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - let new_self = match &*self { - AsyncEnum::A(foo) => { - println!("AsyncEnum(A)::Dropper::poll: {}", foo.0); - AsyncEnum::B(SyncInt(foo.0)) - } - AsyncEnum::B(foo) => { - println!("AsyncEnum(B)::Dropper::poll: {}", foo.0); - AsyncEnum::A(AsyncInt(foo.0)) - } - }; - mem::forget(mem::replace(&mut *self, new_self)); - } + async fn drop(mut self: Pin<&mut Self>) { + let new_self = match &*self { + AsyncEnum::A(foo) => { + println!("AsyncEnum(A)::async_drop: {}", foo.0); + AsyncEnum::B(SyncInt(foo.0)) + } + AsyncEnum::B(foo) => { + println!("AsyncEnum(B)::async_drop: {}", foo.0); + AsyncEnum::A(AsyncInt(foo.0)) + } + }; + mem::forget(mem::replace(&mut *self, new_self)); } } @@ -186,14 +198,15 @@ union AsyncUnion { unsigned: u32, } +impl Drop for AsyncUnion { + fn drop(&mut self) { + println!("AsyncUnion::drop: {}, {}", unsafe { self.signed }, unsafe { self.unsigned },); + } +} impl AsyncDrop for AsyncUnion { - type Dropper<'a> = impl Future; - - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncUnion::Dropper::poll: {}, {}", unsafe { self.signed }, unsafe { - self.unsigned - }); - } + async fn drop(self: Pin<&mut Self>) { + println!("AsyncUnion::async_drop: {}, {}", unsafe { self.signed }, unsafe { + self.unsigned + }); } } diff --git a/src/tools/miri/tests/pass/async-drop.stack.stdout b/src/tools/miri/tests/pass/async-drop.stack.stdout index 9cae4331caf92..fc53df2f1b485 100644 --- a/src/tools/miri/tests/pass/async-drop.stack.stdout +++ b/src/tools/miri/tests/pass/async-drop.stack.stdout @@ -1,22 +1,23 @@ -AsyncInt::Dropper::poll: 0 -AsyncInt::Dropper::poll: 1 -AsyncInt::Dropper::poll: 2 -AsyncInt::Dropper::poll: 3 -AsyncInt::Dropper::poll: 4 -AsyncStruct::Dropper::poll: 6 -AsyncInt::Dropper::poll: 7 -AsyncInt::Dropper::poll: 8 -AsyncReference::Dropper::poll: 10 -AsyncInt::Dropper::poll: 11 -AsyncEnum(A)::Dropper::poll: 12 +AsyncInt::async_drop: 0 +AsyncInt::async_drop: 1 +AsyncInt::async_drop: 2 +AsyncInt::async_drop: 3 +AsyncInt::async_drop: 4 +AsyncStruct::async_drop: 6 +AsyncInt::async_drop: 7 +AsyncInt::async_drop: 8 +AsyncReference::async_drop: 10 +AsyncInt::async_drop: 11 +AsyncEnum(A)::async_drop: 12 SyncInt::drop: 12 -AsyncEnum(B)::Dropper::poll: 13 -AsyncInt::Dropper::poll: 13 +AsyncEnum(B)::async_drop: 13 +AsyncInt::async_drop: 13 SyncInt::drop: 14 SyncThenAsync::drop: 15 -AsyncInt::Dropper::poll: 16 +AsyncInt::async_drop: 16 SyncInt::drop: 17 -AsyncInt::Dropper::poll: 18 -AsyncInt::Dropper::poll: 19 -AsyncInt::Dropper::poll: 20 -AsyncUnion::Dropper::poll: 21, 21 +AsyncInt::async_drop: 18 +AsyncInt::async_drop: 19 +AsyncInt::async_drop: 20 +AsyncUnion::async_drop: 21, 21 +AsyncInt::async_drop: 10 diff --git a/src/tools/miri/tests/pass/async-drop.tree.stdout b/src/tools/miri/tests/pass/async-drop.tree.stdout index 9cae4331caf92..fc53df2f1b485 100644 --- a/src/tools/miri/tests/pass/async-drop.tree.stdout +++ b/src/tools/miri/tests/pass/async-drop.tree.stdout @@ -1,22 +1,23 @@ -AsyncInt::Dropper::poll: 0 -AsyncInt::Dropper::poll: 1 -AsyncInt::Dropper::poll: 2 -AsyncInt::Dropper::poll: 3 -AsyncInt::Dropper::poll: 4 -AsyncStruct::Dropper::poll: 6 -AsyncInt::Dropper::poll: 7 -AsyncInt::Dropper::poll: 8 -AsyncReference::Dropper::poll: 10 -AsyncInt::Dropper::poll: 11 -AsyncEnum(A)::Dropper::poll: 12 +AsyncInt::async_drop: 0 +AsyncInt::async_drop: 1 +AsyncInt::async_drop: 2 +AsyncInt::async_drop: 3 +AsyncInt::async_drop: 4 +AsyncStruct::async_drop: 6 +AsyncInt::async_drop: 7 +AsyncInt::async_drop: 8 +AsyncReference::async_drop: 10 +AsyncInt::async_drop: 11 +AsyncEnum(A)::async_drop: 12 SyncInt::drop: 12 -AsyncEnum(B)::Dropper::poll: 13 -AsyncInt::Dropper::poll: 13 +AsyncEnum(B)::async_drop: 13 +AsyncInt::async_drop: 13 SyncInt::drop: 14 SyncThenAsync::drop: 15 -AsyncInt::Dropper::poll: 16 +AsyncInt::async_drop: 16 SyncInt::drop: 17 -AsyncInt::Dropper::poll: 18 -AsyncInt::Dropper::poll: 19 -AsyncInt::Dropper::poll: 20 -AsyncUnion::Dropper::poll: 21, 21 +AsyncInt::async_drop: 18 +AsyncInt::async_drop: 19 +AsyncInt::async_drop: 20 +AsyncUnion::async_drop: 21, 21 +AsyncInt::async_drop: 10 diff --git a/src/tools/miri/tests/pass/both_borrows/2phase.rs b/src/tools/miri/tests/pass/both_borrows/2phase.rs new file mode 100644 index 0000000000000..7a3962a7c1a44 --- /dev/null +++ b/src/tools/miri/tests/pass/both_borrows/2phase.rs @@ -0,0 +1,70 @@ +//@revisions: stack tree +//@[tree]compile-flags: -Zmiri-tree-borrows + +trait S: Sized { + fn tpb(&mut self, _s: Self) {} +} + +impl S for i32 {} + +fn two_phase1() { + let mut x = 3; + x.tpb(x); +} + +fn two_phase2() { + let mut v = vec![]; + v.push(v.len()); +} + +fn two_phase3(b: bool) { + let mut x = &mut vec![]; + let mut y = vec![]; + x.push(( + { + if b { + x = &mut y; + } + 22 + }, + x.len(), + )); +} + +fn two_phase_raw() { + let x: &mut Vec = &mut vec![]; + #[allow(unreachable_code)] // The `push` itself never gets reached. + x.push({ + // Unfortunately this does not trigger the problem of creating a + // raw ponter from a pointer that had a two-phase borrow derived from + // it because of the implicit &mut reborrow. + let raw = x as *mut _; + unsafe { + *raw = vec![1]; + } + return; + }); +} + +fn two_phase_overlapping1() { + let mut x = vec![]; + let p = &x; + x.push(p.len()); +} + +fn two_phase_overlapping2() { + use std::ops::AddAssign; + let mut x = 1; + let l = &x; + x.add_assign(x + *l); +} + +fn main() { + two_phase1(); + two_phase2(); + two_phase3(false); + two_phase3(true); + two_phase_raw(); + two_phase_overlapping1(); + two_phase_overlapping2(); +} diff --git a/src/tools/miri/tests/pass/both_borrows/basic_aliasing_model.rs b/src/tools/miri/tests/pass/both_borrows/basic_aliasing_model.rs new file mode 100644 index 0000000000000..c2b6a7e68be50 --- /dev/null +++ b/src/tools/miri/tests/pass/both_borrows/basic_aliasing_model.rs @@ -0,0 +1,272 @@ +//@revisions: stack tree +//@[tree]compile-flags: -Zmiri-tree-borrows +#![feature(allocator_api)] +use std::ptr; + +// Test various aliasing-model-related things. +fn main() { + read_does_not_invalidate1(); + read_does_not_invalidate2(); + mut_raw_then_mut_shr(); + mut_shr_then_mut_raw(); + mut_raw_mut(); + partially_invalidate_mut(); + drop_after_sharing(); + // direct_mut_to_const_raw(); + two_raw(); + shr_and_raw(); + disjoint_mutable_subborrows(); + raw_ref_to_part(); + array_casts(); + mut_below_shr(); + wide_raw_ptr_in_tuple(); + not_unpin_not_protected(); + write_does_not_invalidate_all_aliases(); + box_into_raw_allows_interior_mutable_alias(); +} + +// Make sure that reading from an `&mut` does, like reborrowing to `&`, +// NOT invalidate other reborrows. +fn read_does_not_invalidate1() { + fn foo(x: &mut (i32, i32)) -> &i32 { + let xraw = x as *mut (i32, i32); + let ret = unsafe { &(*xraw).1 }; + let _val = x.1; // we just read, this does NOT invalidate the reborrows. + ret + } + assert_eq!(*foo(&mut (1, 2)), 2); +} +// Same as above, but this time we first create a raw, then read from `&mut` +// and then freeze from the raw. +fn read_does_not_invalidate2() { + fn foo(x: &mut (i32, i32)) -> &i32 { + let xraw = x as *mut (i32, i32); + let _val = x.1; // we just read, this does NOT invalidate the raw reborrow. + let ret = unsafe { &(*xraw).1 }; + ret + } + assert_eq!(*foo(&mut (1, 2)), 2); +} + +// Escape a mut to raw, then share the same mut and use the share, then the raw. +// That should work. +fn mut_raw_then_mut_shr() { + let mut x = 2; + let xref = &mut x; + let xraw = &mut *xref as *mut _; + let xshr = &*xref; + assert_eq!(*xshr, 2); + unsafe { + *xraw = 4; + } + assert_eq!(x, 4); +} + +// Create first a shared reference and then a raw pointer from a `&mut` +// should permit mutation through that raw pointer. +fn mut_shr_then_mut_raw() { + let xref = &mut 2; + let _xshr = &*xref; + let xraw = xref as *mut _; + unsafe { + *xraw = 3; + } + assert_eq!(*xref, 3); +} + +// Ensure that if we derive from a mut a raw, and then from that a mut, +// and then read through the original mut, that does not invalidate the raw. +// This shows that the read-exception for `&mut` applies even if the `Shr` item +// on the stack is not at the top. +fn mut_raw_mut() { + let mut x = 2; + { + let xref1 = &mut x; + let xraw = xref1 as *mut _; + let _xref2 = unsafe { &mut *xraw }; + let _val = *xref1; + unsafe { + *xraw = 4; + } + // we can now use both xraw and xref1, for reading + assert_eq!(*xref1, 4); + assert_eq!(unsafe { *xraw }, 4); + assert_eq!(*xref1, 4); + assert_eq!(unsafe { *xraw }, 4); + // we cannot use xref2; see `compile-fail/stacked-borrows/illegal_read4.rs` + } + assert_eq!(x, 4); +} + +fn partially_invalidate_mut() { + let data = &mut (0u8, 0u8); + let reborrow = &mut *data as *mut (u8, u8); + let shard = unsafe { &mut (*reborrow).0 }; + data.1 += 1; // the deref overlaps with `shard`, but that is ok; the access does not overlap. + *shard += 1; // so we can still use `shard`. + assert_eq!(*data, (1, 1)); +} + +// Make sure that we can handle the situation where a location is frozen when being dropped. +fn drop_after_sharing() { + let x = String::from("hello!"); + let _len = x.len(); +} + +// Make sure that we can create two raw pointers from a mutable reference and use them both. +fn two_raw() { + unsafe { + let x = &mut 0; + let y1 = x as *mut _; + let y2 = x as *mut _; + *y1 += 2; + *y2 += 1; + } +} + +// Make sure that creating a *mut does not invalidate existing shared references. +fn shr_and_raw() { + unsafe { + use std::mem; + let x = &mut 0; + let y1: &i32 = mem::transmute(&*x); // launder lifetimes + let y2 = x as *mut _; + let _val = *y1; + *y2 += 1; + } +} + +fn disjoint_mutable_subborrows() { + struct Foo { + a: String, + b: Vec, + } + + unsafe fn borrow_field_a<'a>(this: *mut Foo) -> &'a mut String { + &mut (*this).a + } + + unsafe fn borrow_field_b<'a>(this: *mut Foo) -> &'a mut Vec { + &mut (*this).b + } + + let mut foo = Foo { a: "hello".into(), b: vec![0, 1, 2] }; + + let ptr = &mut foo as *mut Foo; + + let a = unsafe { borrow_field_a(ptr) }; + let b = unsafe { borrow_field_b(ptr) }; + b.push(4); + a.push_str(" world"); + assert_eq!(format!("{:?} {:?}", a, b), r#""hello world" [0, 1, 2, 4]"#); +} + +fn raw_ref_to_part() { + struct Part { + _lame: i32, + } + + #[repr(C)] + struct Whole { + part: Part, + extra: i32, + } + + let it = Box::new(Whole { part: Part { _lame: 0 }, extra: 42 }); + let whole = ptr::addr_of_mut!(*Box::leak(it)); + let part = unsafe { ptr::addr_of_mut!((*whole).part) }; + let typed = unsafe { &mut *(part as *mut Whole) }; + assert!(typed.extra == 42); + drop(unsafe { Box::from_raw(whole) }); +} + +/// When casting an array reference to a raw element ptr, that should cover the whole array. +fn array_casts() { + let mut x: [usize; 2] = [0, 0]; + let p = &mut x as *mut usize; + unsafe { + *p.add(1) = 1; + } + + let x: [usize; 2] = [0, 1]; + let p = &x as *const usize; + assert_eq!(unsafe { *p.add(1) }, 1); +} + +/// Transmuting &&i32 to &&mut i32 is fine. +fn mut_below_shr() { + let x = 0; + let y = &x; + let p = unsafe { core::mem::transmute::<&&i32, &&mut i32>(&y) }; + let r = &**p; + let _val = *r; +} + +fn wide_raw_ptr_in_tuple() { + let mut x: Box = Box::new("ouch"); + let r = &mut *x as *mut dyn std::any::Any; + // This triggers the visitor-based recursive retagging. It is *not* supposed to retag raw + // pointers, but then the visitor might recurse into the "fields" of a wide raw pointer and + // finds a reference (to a vtable) there that it wants to retag... and that would be Wrong. + let pair = (r, &0); + let r = unsafe { &mut *pair.0 }; + // Make sure the fn ptr part of the vtable is still fine. + r.type_id(); +} + +fn not_unpin_not_protected() { + // `&mut !Unpin`, at least for now, does not get `noalias` nor `dereferenceable`, so we also + // don't add protectors. (We could, but until we have a better idea for where we want to go with + // the self-referential-coroutine situation, it does not seem worth the potential trouble.) + use std::marker::PhantomPinned; + + pub struct NotUnpin(#[allow(dead_code)] i32, PhantomPinned); + + fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) { + // `f` is allowed to deallocate `x`. + f(x) + } + + inner(Box::leak(Box::new(NotUnpin(0, PhantomPinned))), |x| { + let raw = x as *mut _; + drop(unsafe { Box::from_raw(raw) }); + }); +} + +fn write_does_not_invalidate_all_aliases() { + mod other { + /// Some private memory to store stuff in. + static mut S: *mut i32 = 0 as *mut i32; + + pub fn lib1(x: &&mut i32) { + unsafe { + S = (x as *const &mut i32).cast::<*mut i32>().read(); + } + } + + pub fn lib2() { + unsafe { + *S = 1337; + } + } + } + + let x = &mut 0; + other::lib1(&x); + *x = 42; // a write to x -- invalidates other pointers? + other::lib2(); + assert_eq!(*x, 1337); // oops, the value changed! I guess not all pointers were invalidated +} + +fn box_into_raw_allows_interior_mutable_alias() { + unsafe { + let b = Box::new(std::cell::Cell::new(42)); + let raw = Box::into_raw(b); + let c = &*raw; + let d = raw.cast::(); // bypassing `Cell` -- only okay in Miri tests + // `c` and `d` should permit arbitrary aliasing with each other now. + *d = 1; + c.set(2); + drop(Box::from_raw(raw)); + } +} diff --git a/src/tools/miri/tests/pass/stacked-borrows/interior_mutability.rs b/src/tools/miri/tests/pass/both_borrows/interior_mutability.rs similarity index 83% rename from src/tools/miri/tests/pass/stacked-borrows/interior_mutability.rs rename to src/tools/miri/tests/pass/both_borrows/interior_mutability.rs index 830e9c33847cf..f095e215e0043 100644 --- a/src/tools/miri/tests/pass/stacked-borrows/interior_mutability.rs +++ b/src/tools/miri/tests/pass/both_borrows/interior_mutability.rs @@ -1,3 +1,7 @@ +//@revisions: stack tree +//@[tree]compile-flags: -Zmiri-tree-borrows +#![allow(dangerous_implicit_autorefs)] + use std::cell::{Cell, Ref, RefCell, RefMut, UnsafeCell}; use std::mem::{self, MaybeUninit}; @@ -12,6 +16,7 @@ fn main() { ref_protector(); ref_mut_protector(); rust_issue_68303(); + two_phase(); } fn aliasing_mut_and_shr() { @@ -100,7 +105,14 @@ fn unsafe_cell_invalidate() { let ref1 = unsafe { &mut *raw1 }; let raw2 = ref1 as *mut _; // Now the borrow stack is: raw1, ref2, raw2. - // So using raw1 invalidates raw2. + // + // For TB, the tree is + // + // Act x + // Res `- raw1 + // Res `- ref1, raw2 + // + // Either way, using raw1 invalidates raw2. f(unsafe { mem::transmute(raw2) }, raw1); } @@ -136,7 +148,7 @@ fn refcell_basic() { } } -// Adding a Stacked Borrows protector for `Ref` would break this +// Adding a protector for `Ref` would break this fn ref_protector() { fn break_it(rc: &RefCell, r: Ref<'_, i32>) { // `r` has a shared reference, it is passed in as argument and hence @@ -172,3 +184,27 @@ fn rust_issue_68303() { assert!(optional.is_some()); *handle = true; } + +fn two_phase() { + use std::cell::Cell; + + trait Thing: Sized { + fn do_the_thing(&mut self, _s: i32) {} + } + + impl Thing for Cell {} + + let mut x = Cell::new(1); + let l = &x; + + x.do_the_thing({ + // In TB terms: + // Several Foreign accesses (both Reads and Writes) to the location + // being reborrowed. Reserved + unprotected + interior mut + // makes the pointer immune to everything as long as all accesses + // are child accesses to its parent pointer x. + x.set(3); + l.set(4); + x.get() + l.get() + }); +} diff --git a/src/tools/miri/tests/pass/concurrency/data_race.rs b/src/tools/miri/tests/pass/concurrency/data_race.rs index d16de0ae8e232..d5dd1deb2d9dd 100644 --- a/src/tools/miri/tests/pass/concurrency/data_race.rs +++ b/src/tools/miri/tests/pass/concurrency/data_race.rs @@ -1,4 +1,5 @@ -//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 +// This tests carefully crafted schedules to ensure they are not considered races. +//@compile-flags: -Zmiri-deterministic-concurrency use std::sync::atomic::*; use std::thread::{self, spawn}; diff --git a/src/tools/miri/tests/pass/concurrency/disable_data_race_detector.rs b/src/tools/miri/tests/pass/concurrency/disable_data_race_detector.rs index 354a4bef932e9..ecc4ca59bd18a 100644 --- a/src/tools/miri/tests/pass/concurrency/disable_data_race_detector.rs +++ b/src/tools/miri/tests/pass/concurrency/disable_data_race_detector.rs @@ -1,6 +1,6 @@ //@compile-flags: -Zmiri-disable-data-race-detector // Avoid non-determinism -//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-address-reuse-cross-thread-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::thread::spawn; diff --git a/src/tools/miri/tests/pass/concurrency/spin_loops_nopreempt.rs b/src/tools/miri/tests/pass/concurrency/spin_loops_nopreempt.rs index 44b16e1ac74d4..4361f1da92407 100644 --- a/src/tools/miri/tests/pass/concurrency/spin_loops_nopreempt.rs +++ b/src/tools/miri/tests/pass/concurrency/spin_loops_nopreempt.rs @@ -1,5 +1,5 @@ // This specifically tests behavior *without* preemption. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::cell::Cell; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; diff --git a/src/tools/miri/tests/pass/concurrency/sync.rs b/src/tools/miri/tests/pass/concurrency/sync.rs index 91c67b215a1a9..a92359758dadf 100644 --- a/src/tools/miri/tests/pass/concurrency/sync.rs +++ b/src/tools/miri/tests/pass/concurrency/sync.rs @@ -1,7 +1,7 @@ //@revisions: stack tree //@[tree]compile-flags: -Zmiri-tree-borrows // We use `yield` to test specific interleavings, so disable automatic preemption. -//@compile-flags: -Zmiri-disable-isolation -Zmiri-strict-provenance -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-disable-isolation -Zmiri-deterministic-concurrency use std::sync::{Arc, Barrier, Condvar, Mutex, Once, RwLock}; use std::thread; diff --git a/src/tools/miri/tests/pass/concurrency/sync_nopreempt.rs b/src/tools/miri/tests/pass/concurrency/sync_nopreempt.rs index c6cff038f81e0..bea8f87243aa7 100644 --- a/src/tools/miri/tests/pass/concurrency/sync_nopreempt.rs +++ b/src/tools/miri/tests/pass/concurrency/sync_nopreempt.rs @@ -1,5 +1,5 @@ // We are making scheduler assumptions here. -//@compile-flags: -Zmiri-strict-provenance -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::sync::{Arc, Condvar, Mutex, RwLock}; use std::thread; diff --git a/src/tools/miri/tests/pass/dst-raw.rs b/src/tools/miri/tests/pass/dst-raw.rs index f26191a1d5998..3d0b843b3da22 100644 --- a/src/tools/miri/tests/pass/dst-raw.rs +++ b/src/tools/miri/tests/pass/dst-raw.rs @@ -1,5 +1,7 @@ // Test DST raw pointers +#![allow(dangerous_implicit_autorefs)] + trait Trait { fn foo(&self) -> isize; } diff --git a/src/tools/miri/tests/pass/float.rs b/src/tools/miri/tests/pass/float.rs index 05ac5e82b564e..98a88cfd62dc8 100644 --- a/src/tools/miri/tests/pass/float.rs +++ b/src/tools/miri/tests/pass/float.rs @@ -6,6 +6,7 @@ #![feature(f16)] #![allow(arithmetic_overflow)] #![allow(internal_features)] +#![allow(unnecessary_transmutes)] use std::any::type_name; use std::cmp::min; @@ -38,8 +39,9 @@ macro_rules! assert_approx_eq { }}; ($a:expr, $b: expr) => { - // accept up to 64ULP (16ULP for host floats and 16ULP for miri artificial error and 32 for any rounding errors) - assert_approx_eq!($a, $b, 64); + // accept up to 12ULP (4ULP for host floats and 4ULP for miri artificial error and 4 for any additional effects + // due to having multiple error sources. + assert_approx_eq!($a, $b, 12); }; } @@ -1290,8 +1292,7 @@ fn test_non_determinism() { } } // We saw the same thing N times. - // FIXME: temporarily disabled as it breaks std tests. - //panic!("expected non-determinism, got {rounds} times the same result: {first:?}"); + panic!("expected non-determinism, got {rounds} times the same result: {first:?}"); } macro_rules! test_operations_f { @@ -1317,66 +1318,68 @@ fn test_non_determinism() { } pub fn test_operations_f32(a: f32, b: f32) { test_operations_f!(a, b); - ensure_nondet(|| a.log(b)); - ensure_nondet(|| a.exp()); - ensure_nondet(|| 10f32.exp2()); - ensure_nondet(|| f32::consts::E.ln()); - ensure_nondet(|| 1f32.ln_1p()); - ensure_nondet(|| 10f32.log10()); - ensure_nondet(|| 8f32.log2()); - ensure_nondet(|| 27.0f32.cbrt()); - ensure_nondet(|| 3.0f32.hypot(4.0f32)); - ensure_nondet(|| 1f32.sin()); - ensure_nondet(|| 0f32.cos()); - // On i686-pc-windows-msvc , these functions are implemented by calling the `f64` version, - // which means the little rounding errors Miri introduces are discard by the cast down to `f32`. - // Just skip the test for them. - if !cfg!(all(target_os = "windows", target_env = "msvc", target_arch = "x86")) { - ensure_nondet(|| 1.0f32.tan()); - ensure_nondet(|| 1.0f32.asin()); - ensure_nondet(|| 5.0f32.acos()); - ensure_nondet(|| 1.0f32.atan()); - ensure_nondet(|| 1.0f32.atan2(2.0f32)); - ensure_nondet(|| 1.0f32.sinh()); - ensure_nondet(|| 1.0f32.cosh()); - ensure_nondet(|| 1.0f32.tanh()); - } - ensure_nondet(|| 1.0f32.asinh()); - ensure_nondet(|| 2.0f32.acosh()); - ensure_nondet(|| 0.5f32.atanh()); - ensure_nondet(|| 5.0f32.gamma()); - ensure_nondet(|| 5.0f32.ln_gamma()); - ensure_nondet(|| 5.0f32.erf()); - ensure_nondet(|| 5.0f32.erfc()); + // FIXME: temporarily disabled as it breaks std tests. + // ensure_nondet(|| a.log(b)); + // ensure_nondet(|| a.exp()); + // ensure_nondet(|| 10f32.exp2()); + // ensure_nondet(|| f32::consts::E.ln()); + // ensure_nondet(|| 1f32.ln_1p()); + // ensure_nondet(|| 10f32.log10()); + // ensure_nondet(|| 8f32.log2()); + // ensure_nondet(|| 27.0f32.cbrt()); + // ensure_nondet(|| 3.0f32.hypot(4.0f32)); + // ensure_nondet(|| 1f32.sin()); + // ensure_nondet(|| 0f32.cos()); + // // On i686-pc-windows-msvc , these functions are implemented by calling the `f64` version, + // // which means the little rounding errors Miri introduces are discard by the cast down to `f32`. + // // Just skip the test for them. + // if !cfg!(all(target_os = "windows", target_env = "msvc", target_arch = "x86")) { + // ensure_nondet(|| 1.0f32.tan()); + // ensure_nondet(|| 1.0f32.asin()); + // ensure_nondet(|| 5.0f32.acos()); + // ensure_nondet(|| 1.0f32.atan()); + // ensure_nondet(|| 1.0f32.atan2(2.0f32)); + // ensure_nondet(|| 1.0f32.sinh()); + // ensure_nondet(|| 1.0f32.cosh()); + // ensure_nondet(|| 1.0f32.tanh()); + // } + // ensure_nondet(|| 1.0f32.asinh()); + // ensure_nondet(|| 2.0f32.acosh()); + // ensure_nondet(|| 0.5f32.atanh()); + // ensure_nondet(|| 5.0f32.gamma()); + // ensure_nondet(|| 5.0f32.ln_gamma()); + // ensure_nondet(|| 5.0f32.erf()); + // ensure_nondet(|| 5.0f32.erfc()); } pub fn test_operations_f64(a: f64, b: f64) { test_operations_f!(a, b); - ensure_nondet(|| a.log(b)); - ensure_nondet(|| a.exp()); - ensure_nondet(|| 50f64.exp2()); - ensure_nondet(|| 3f64.ln()); - ensure_nondet(|| 1f64.ln_1p()); - ensure_nondet(|| f64::consts::E.log10()); - ensure_nondet(|| f64::consts::E.log2()); - ensure_nondet(|| 27.0f64.cbrt()); - ensure_nondet(|| 3.0f64.hypot(4.0f64)); - ensure_nondet(|| 1f64.sin()); - ensure_nondet(|| 0f64.cos()); - ensure_nondet(|| 1.0f64.tan()); - ensure_nondet(|| 1.0f64.asin()); - ensure_nondet(|| 5.0f64.acos()); - ensure_nondet(|| 1.0f64.atan()); - ensure_nondet(|| 1.0f64.atan2(2.0f64)); - ensure_nondet(|| 1.0f64.sinh()); - ensure_nondet(|| 1.0f64.cosh()); - ensure_nondet(|| 1.0f64.tanh()); - ensure_nondet(|| 1.0f64.asinh()); - ensure_nondet(|| 3.0f64.acosh()); - ensure_nondet(|| 0.5f64.atanh()); - ensure_nondet(|| 5.0f64.gamma()); - ensure_nondet(|| 5.0f64.ln_gamma()); - ensure_nondet(|| 5.0f64.erf()); - ensure_nondet(|| 5.0f64.erfc()); + // FIXME: temporarily disabled as it breaks std tests. + // ensure_nondet(|| a.log(b)); + // ensure_nondet(|| a.exp()); + // ensure_nondet(|| 50f64.exp2()); + // ensure_nondet(|| 3f64.ln()); + // ensure_nondet(|| 1f64.ln_1p()); + // ensure_nondet(|| f64::consts::E.log10()); + // ensure_nondet(|| f64::consts::E.log2()); + // ensure_nondet(|| 27.0f64.cbrt()); + // ensure_nondet(|| 3.0f64.hypot(4.0f64)); + // ensure_nondet(|| 1f64.sin()); + // ensure_nondet(|| 0f64.cos()); + // ensure_nondet(|| 1.0f64.tan()); + // ensure_nondet(|| 1.0f64.asin()); + // ensure_nondet(|| 5.0f64.acos()); + // ensure_nondet(|| 1.0f64.atan()); + // ensure_nondet(|| 1.0f64.atan2(2.0f64)); + // ensure_nondet(|| 1.0f64.sinh()); + // ensure_nondet(|| 1.0f64.cosh()); + // ensure_nondet(|| 1.0f64.tanh()); + // ensure_nondet(|| 1.0f64.asinh()); + // ensure_nondet(|| 3.0f64.acosh()); + // ensure_nondet(|| 0.5f64.atanh()); + // ensure_nondet(|| 5.0f64.gamma()); + // ensure_nondet(|| 5.0f64.ln_gamma()); + // ensure_nondet(|| 5.0f64.erf()); + // ensure_nondet(|| 5.0f64.erfc()); } pub fn test_operations_f128(a: f128, b: f128) { test_operations_f!(a, b); diff --git a/src/tools/miri/tests/pass/fn_align.rs b/src/tools/miri/tests/pass/fn_align.rs new file mode 100644 index 0000000000000..550bb1cb4d718 --- /dev/null +++ b/src/tools/miri/tests/pass/fn_align.rs @@ -0,0 +1,21 @@ +//@compile-flags: -Zmin-function-alignment=8 +#![feature(fn_align)] + +// When a function uses `repr(align(N))`, the function address should be a multiple of `N`. + +#[repr(align(256))] +fn foo() {} + +#[repr(align(16))] +fn bar() {} + +#[repr(align(4))] +fn baz() {} + +fn main() { + assert!((foo as usize).is_multiple_of(256)); + assert!((bar as usize).is_multiple_of(16)); + + // The maximum of `repr(align(N))` and `-Zmin-function-alignment=N` is used. + assert!((baz as usize).is_multiple_of(8)); +} diff --git a/src/tools/miri/tests/pass/intrinsics/portable-simd.rs b/src/tools/miri/tests/pass/intrinsics/portable-simd.rs index cc753dac2156f..e14ce51f35a3f 100644 --- a/src/tools/miri/tests/pass/intrinsics/portable-simd.rs +++ b/src/tools/miri/tests/pass/intrinsics/portable-simd.rs @@ -331,6 +331,19 @@ fn simd_mask() { ); assert_eq!(selected1, i32x4::from_array([0, 0, 0, 1])); assert_eq!(selected2, selected1); + // Non-zero "padding" (the extra bits) is also allowed. + let selected1 = simd_select_bitmask::( + if cfg!(target_endian = "little") { 0b11111000 } else { 0b11110001 }, + i32x4::splat(1), // yes + i32x4::splat(0), // no + ); + let selected2 = simd_select_bitmask::<[u8; 1], _>( + if cfg!(target_endian = "little") { [0b11111000] } else { [0b11110001] }, + i32x4::splat(1), // yes + i32x4::splat(0), // no + ); + assert_eq!(selected1, i32x4::from_array([0, 0, 0, 1])); + assert_eq!(selected2, selected1); } // Non-power-of-2 multi-byte mask. diff --git a/src/tools/miri/tests/pass/issues/issue-134713-swap_nonoverlapping_untyped.rs b/src/tools/miri/tests/pass/issues/issue-134713-swap_nonoverlapping_untyped.rs new file mode 100644 index 0000000000000..27ea12398d88a --- /dev/null +++ b/src/tools/miri/tests/pass/issues/issue-134713-swap_nonoverlapping_untyped.rs @@ -0,0 +1,30 @@ +use std::mem::{align_of, size_of}; + +// See + +#[repr(C)] +struct Foo(usize, u8); + +fn main() { + let buf1: [usize; 2] = [1000, 2000]; + let buf2: [usize; 2] = [3000, 4000]; + + // Foo and [usize; 2] have the same size and alignment, + // so swap_nonoverlapping should treat them the same + assert_eq!(size_of::(), size_of::<[usize; 2]>()); + assert_eq!(align_of::(), align_of::<[usize; 2]>()); + + let mut b1 = buf1; + let mut b2 = buf2; + // Safety: b1 and b2 are distinct local variables, + // with the same size and alignment as Foo. + unsafe { + std::ptr::swap_nonoverlapping( + b1.as_mut_ptr().cast::(), + b2.as_mut_ptr().cast::(), + 1, + ); + } + assert_eq!(b1, buf2); + assert_eq!(b2, buf1); +} diff --git a/src/tools/miri/tests/pass/issues/issue-139553.rs b/src/tools/miri/tests/pass/issues/issue-139553.rs new file mode 100644 index 0000000000000..119d589d1eada --- /dev/null +++ b/src/tools/miri/tests/pass/issues/issue-139553.rs @@ -0,0 +1,45 @@ +//@compile-flags: -Zmiri-preemption-rate=0 -Zmiri-compare-exchange-weak-failure-rate=0 +use std::sync::mpsc::channel; +use std::thread; + +/// This test aims to trigger a race condition that causes a double free in the unbounded channel +/// implementation. The test relies on a particular thread scheduling to happen as annotated by the +/// comments below. +fn main() { + let (s1, r) = channel::(); + let s2 = s1.clone(); + + let t1 = thread::spawn(move || { + // 1. The first action executed is an attempt to send the first value in the channel. This + // will begin to initialize the channel but will stop at a critical momement as + // indicated by the `yield_now()` call in the `start_send` method of the implementation. + let _ = s1.send(42); + // 4. The sender is re-scheduled and it finishes the initialization of the channel by + // setting head.block to the same value as tail.block. It then proceeds to publish its + // value but observes that the channel has already disconnected (due to the concurrent + // call of `discard_all_messages`) and aborts the send. + }); + std::thread::yield_now(); + + // 2. A second sender attempts to send a value while the channel is in a half-initialized + // state. Here, half-initialized means that the `tail.block` pointer points to a valid block + // but `head.block` is still null. This condition is ensured by the yield of step 1. When + // this call returns the channel state has tail.index != head.index, tail.block != NULL, and + // head.block = NULL. + s2.send(42).unwrap(); + // 3. This thread continues with dropping the one and only receiver. When all receivers are + // gone `discard_all_messages` will attempt to drop all currently sent values and + // de-allocate all the blocks. If `tail.block != NULL` but `head.block = NULL` the + // implementation waits for the initializing sender to finish by spinning/yielding. + drop(r); + // 5. This thread is rescheduled and `discard_all_messages` observes the head.block pointer set + // by step 4 and proceeds with deallocation. In the problematic version of the code + // `head.block` is simply read via an `Acquire` load and not swapped with NULL. After this + // call returns the channel state has tail.index = head.index, tail.block = NULL, and + // head.block != NULL. + t1.join().unwrap(); + // 6. The last sender (s2) is dropped here which also attempts to cleanup any data in the + // channel. It observes `tail.index = head.index` and so it doesn't attempt to cleanup any + // messages but it also observes that `head.block != NULL` and attempts to deallocate it. + // This is however already deallocated by `discard_all_messages`, leading to a double free. +} diff --git a/src/tools/miri/tests/pass/issues/issue-miri-184.rs b/src/tools/miri/tests/pass/issues/issue-miri-184.rs index 39c841403ef0c..964d850298fbf 100644 --- a/src/tools/miri/tests/pass/issues/issue-miri-184.rs +++ b/src/tools/miri/tests/pass/issues/issue-miri-184.rs @@ -1,3 +1,4 @@ +#![allow(unnecessary_transmutes)] pub fn main() { let bytes: [u8; 8] = unsafe { ::std::mem::transmute(0u64) }; let _val: &[u8] = &bytes; diff --git a/src/tools/miri/tests/pass/panic/concurrent-panic.rs b/src/tools/miri/tests/pass/panic/concurrent-panic.rs index e804df90977a9..4bf645960f705 100644 --- a/src/tools/miri/tests/pass/panic/concurrent-panic.rs +++ b/src/tools/miri/tests/pass/panic/concurrent-panic.rs @@ -1,5 +1,5 @@ // We are making scheduler assumptions here. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //! Cause a panic in one thread while another thread is unwinding. This checks //! that separate threads have their own panicking state. diff --git a/src/tools/miri/tests/pass/path.rs b/src/tools/miri/tests/pass/path.rs index 299ee6cfe9dde..7428d0afcc674 100644 --- a/src/tools/miri/tests/pass/path.rs +++ b/src/tools/miri/tests/pass/path.rs @@ -6,7 +6,11 @@ mod utils; #[track_caller] fn assert_absolute_eq(in_: &str, out: &str) { - assert_eq!(absolute(in_).unwrap().as_os_str(), Path::new(out).as_os_str()); + assert_eq!( + absolute(in_).unwrap().as_os_str(), + Path::new(out).as_os_str(), + "incorrect absolute path for {in_:?}" + ); } fn test_absolute() { @@ -29,11 +33,28 @@ fn test_absolute() { assert_absolute_eq(r"\\?\C:\path\to\file", r"\\?\C:\path\to\file"); assert_absolute_eq(r"\\?\UNC\server\share\to\file", r"\\?\UNC\server\share\to\file"); assert_absolute_eq(r"\\?\PIPE\name", r"\\?\PIPE\name"); + assert_absolute_eq(r"\\server\share\NUL", r"\\server\share\NUL"); + // This fails on Windows 10 hosts. FIXME: enable this once GHA runners are on Windows 11. + //assert_absolute_eq(r"C:\path\to\COM1", r"C:\path\to\COM1"); // Verbatim paths are always unchanged, no matter what. assert_absolute_eq(r"\\?\path.\to/file..", r"\\?\path.\to/file.."); - + // Trailing dot is removed here. assert_absolute_eq(r"C:\path..\to.\file.", r"C:\path..\to\file"); + // `..` is resolved here. + assert_absolute_eq(r"C:\path\to\..\file", r"C:\path\file"); + assert_absolute_eq(r"C:\path\to\..\..\file", r"C:\file"); + assert_absolute_eq(r"C:\path\to\..\..\..\..\..\..\file", r"C:\file"); + assert_absolute_eq(r"C:\..", r"C:\"); + assert_absolute_eq(r"\\server\share\to\path\with\..\file", r"\\server\share\to\path\file"); + assert_absolute_eq(r"\\server\share\to\..\..\..\..\file", r"\\server\share\file"); + assert_absolute_eq(r"\\server\share\..", r"\\server\share"); + // Magic filenames. + assert_absolute_eq(r"NUL", r"\\.\NUL"); + assert_absolute_eq(r"nul", r"\\.\nul"); assert_absolute_eq(r"COM1", r"\\.\COM1"); + assert_absolute_eq(r"com1", r"\\.\com1"); + assert_absolute_eq(r"C:\path\to\NUL", r"\\.\NUL"); + assert_absolute_eq(r"C:\path\to\nul", r"\\.\nul"); } else { panic!("unsupported OS"); } diff --git a/src/tools/miri/tests/pass/shims/env/var.rs b/src/tools/miri/tests/pass/shims/env/var.rs index 655b29674e340..31e59c4a41cbc 100644 --- a/src/tools/miri/tests/pass/shims/env/var.rs +++ b/src/tools/miri/tests/pass/shims/env/var.rs @@ -1,4 +1,4 @@ -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::{env, thread}; fn main() { diff --git a/src/tools/miri/tests/pass/shims/fs.rs b/src/tools/miri/tests/pass/shims/fs.rs index 289c6aa2fcec9..d0a7f245ee0ff 100644 --- a/src/tools/miri/tests/pass/shims/fs.rs +++ b/src/tools/miri/tests/pass/shims/fs.rs @@ -1,4 +1,3 @@ -//@ignore-target: windows # File handling is not implemented yet //@compile-flags: -Zmiri-disable-isolation #![feature(io_error_more)] @@ -18,20 +17,23 @@ mod utils; fn main() { test_path_conversion(); - test_file(); - test_file_clone(); test_file_create_new(); - test_seek(); - test_metadata(); - test_file_set_len(); - test_file_sync(); - test_errors(); - test_rename(); - test_directory(); - test_canonicalize(); - test_from_raw_os_error(); - #[cfg(unix)] - test_pread_pwrite(); + // Windows file handling is very incomplete. + if cfg!(not(windows)) { + test_file(); + test_seek(); + test_file_clone(); + test_metadata(); + test_file_set_len(); + test_file_sync(); + test_errors(); + test_rename(); + test_directory(); + test_canonicalize(); + test_from_raw_os_error(); + #[cfg(unix)] + test_pread_pwrite(); + } } fn test_path_conversion() { @@ -144,10 +146,10 @@ fn test_metadata() { let path = utils::prepare_with_content("miri_test_fs_metadata.txt", bytes); // Test that metadata of an absolute path is correct. - check_metadata(bytes, &path).unwrap(); + check_metadata(bytes, &path).expect("absolute path metadata"); // Test that metadata of a relative path is correct. std::env::set_current_dir(path.parent().unwrap()).unwrap(); - check_metadata(bytes, Path::new(path.file_name().unwrap())).unwrap(); + check_metadata(bytes, Path::new(path.file_name().unwrap())).expect("relative path metadata"); // Removing file should succeed. remove_file(&path).unwrap(); diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-gfni.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-gfni.rs index a629e2acfe998..882b5e3f79524 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-gfni.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-gfni.rs @@ -368,7 +368,7 @@ unsafe fn load_m256i_word(data: &[T], word_index: usize) -> __m256i { #[target_feature(enable = "avx512f")] unsafe fn load_m512i_word(data: &[T], word_index: usize) -> __m512i { let byte_offset = word_index * 64 / size_of::(); - let pointer = data.as_ptr().add(byte_offset) as *const i32; + let pointer = data.as_ptr().add(byte_offset) as *const __m512i; _mm512_loadu_si512(black_box(pointer)) } diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse.rs index 6f7ab3b3c9fb9..be3f961e10ffa 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse.rs @@ -1,5 +1,6 @@ // We're testing x86 target specific features //@only-target: x86_64 i686 +#![allow(unnecessary_transmutes)] #[cfg(target_arch = "x86")] use std::arch::x86::*; diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse2.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse2.rs index 7aaf9c2624f9c..731d8b577637a 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse2.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse2.rs @@ -1,5 +1,6 @@ // We're testing x86 target specific features //@only-target: x86_64 i686 +#![allow(unnecessary_transmutes)] #[cfg(target_arch = "x86")] use std::arch::x86::*; diff --git a/src/tools/miri/tests/pass/slices.rs b/src/tools/miri/tests/pass/slices.rs index dd18a061cdd51..686683c3a25cf 100644 --- a/src/tools/miri/tests/pass/slices.rs +++ b/src/tools/miri/tests/pass/slices.rs @@ -1,7 +1,6 @@ //@revisions: stack tree //@[tree]compile-flags: -Zmiri-tree-borrows //@compile-flags: -Zmiri-strict-provenance -#![feature(slice_as_chunks)] #![feature(slice_partition_dedup)] #![feature(layout_for_ptr)] @@ -227,7 +226,7 @@ fn test_for_invalidated_pointers() { buffer.reverse(); - // Calls `fn as_chunks_unchecked_mut` internally (requires unstable `#![feature(slice_as_chunks)]`): + // Calls `fn as_chunks_unchecked_mut` internally: assert_eq!(2, buffer.as_chunks_mut::<32>().0.len()); for chunk in buffer.as_chunks_mut::<32>().0 { for elem in chunk { diff --git a/src/tools/miri/tests/pass/stacked-borrows/2phase.rs b/src/tools/miri/tests/pass/stacked-borrows/2phase.rs deleted file mode 100644 index fb4ba60583736..0000000000000 --- a/src/tools/miri/tests/pass/stacked-borrows/2phase.rs +++ /dev/null @@ -1,110 +0,0 @@ -// FIXME: this miscompiles with optimizations, see . -//@compile-flags: -Zmir-opt-level=0 - -trait S: Sized { - fn tpb(&mut self, _s: Self) {} -} - -impl S for i32 {} - -fn two_phase1() { - let mut x = 3; - x.tpb(x); -} - -fn two_phase2() { - let mut v = vec![]; - v.push(v.len()); -} - -fn two_phase3(b: bool) { - let mut x = &mut vec![]; - let mut y = vec![]; - x.push(( - { - if b { - x = &mut y; - } - 22 - }, - x.len(), - )); -} - -#[allow(unreachable_code)] -fn two_phase_raw() { - let x: &mut Vec = &mut vec![]; - x.push({ - // Unfortunately this does not trigger the problem of creating a - // raw ponter from a pointer that had a two-phase borrow derived from - // it because of the implicit &mut reborrow. - let raw = x as *mut _; - unsafe { - *raw = vec![1]; - } - return; - }); -} - -fn two_phase_overlapping1() { - let mut x = vec![]; - let p = &x; - x.push(p.len()); -} - -fn two_phase_overlapping2() { - use std::ops::AddAssign; - let mut x = 1; - let l = &x; - x.add_assign(x + *l); -} - -fn with_interior_mutability() { - use std::cell::Cell; - - trait Thing: Sized { - fn do_the_thing(&mut self, _s: i32) {} - } - - impl Thing for Cell {} - - let mut x = Cell::new(1); - let l = &x; - - x.do_the_thing({ - x.set(3); - l.set(4); - x.get() + l.get() - }); -} - -// This one really shouldn't be accepted, but since we treat 2phase as raw, we do accept it. -// Tree Borrows rejects it. -fn aliasing_violation() { - struct Foo(u64); - impl Foo { - fn add(&mut self, n: u64) -> u64 { - self.0 + n - } - } - - let mut f = Foo(0); - let alias = &mut f.0 as *mut u64; - let res = f.add(unsafe { - *alias = 42; - 0 - }); - assert_eq!(res, 42); -} - -fn main() { - two_phase1(); - two_phase2(); - two_phase3(false); - two_phase3(true); - two_phase_raw(); - with_interior_mutability(); - two_phase_overlapping1(); - two_phase_overlapping2(); - aliasing_violation(); -} diff --git a/src/tools/miri/tests/pass/stacked-borrows/stacked-borrows.rs b/src/tools/miri/tests/pass/stacked-borrows/stacked-borrows.rs deleted file mode 100644 index 4261f411eea47..0000000000000 --- a/src/tools/miri/tests/pass/stacked-borrows/stacked-borrows.rs +++ /dev/null @@ -1,292 +0,0 @@ -#![feature(allocator_api)] -use std::ptr; - -// Test various stacked-borrows-related things. -fn main() { - read_does_not_invalidate1(); - read_does_not_invalidate2(); - mut_raw_then_mut_shr(); - mut_shr_then_mut_raw(); - mut_raw_mut(); - mut_raw_mut2(); - partially_invalidate_mut(); - drop_after_sharing(); - // direct_mut_to_const_raw(); - two_raw(); - shr_and_raw(); - disjoint_mutable_subborrows(); - raw_ref_to_part(); - array_casts(); - mut_below_shr(); - wide_raw_ptr_in_tuple(); - not_unpin_not_protected(); - write_does_not_invalidate_all_aliases(); - box_into_raw_allows_interior_mutable_alias(); -} - -// Make sure that reading from an `&mut` does, like reborrowing to `&`, -// NOT invalidate other reborrows. -fn read_does_not_invalidate1() { - fn foo(x: &mut (i32, i32)) -> &i32 { - let xraw = x as *mut (i32, i32); - let ret = unsafe { &(*xraw).1 }; - let _val = x.1; // we just read, this does NOT invalidate the reborrows. - ret - } - assert_eq!(*foo(&mut (1, 2)), 2); -} -// Same as above, but this time we first create a raw, then read from `&mut` -// and then freeze from the raw. -fn read_does_not_invalidate2() { - fn foo(x: &mut (i32, i32)) -> &i32 { - let xraw = x as *mut (i32, i32); - let _val = x.1; // we just read, this does NOT invalidate the raw reborrow. - let ret = unsafe { &(*xraw).1 }; - ret - } - assert_eq!(*foo(&mut (1, 2)), 2); -} - -// Escape a mut to raw, then share the same mut and use the share, then the raw. -// That should work. -fn mut_raw_then_mut_shr() { - let mut x = 2; - let xref = &mut x; - let xraw = &mut *xref as *mut _; - let xshr = &*xref; - assert_eq!(*xshr, 2); - unsafe { - *xraw = 4; - } - assert_eq!(x, 4); -} - -// Create first a shared reference and then a raw pointer from a `&mut` -// should permit mutation through that raw pointer. -fn mut_shr_then_mut_raw() { - let xref = &mut 2; - let _xshr = &*xref; - let xraw = xref as *mut _; - unsafe { - *xraw = 3; - } - assert_eq!(*xref, 3); -} - -// Ensure that if we derive from a mut a raw, and then from that a mut, -// and then read through the original mut, that does not invalidate the raw. -// This shows that the read-exception for `&mut` applies even if the `Shr` item -// on the stack is not at the top. -fn mut_raw_mut() { - let mut x = 2; - { - let xref1 = &mut x; - let xraw = xref1 as *mut _; - let _xref2 = unsafe { &mut *xraw }; - let _val = *xref1; - unsafe { - *xraw = 4; - } - // we can now use both xraw and xref1, for reading - assert_eq!(*xref1, 4); - assert_eq!(unsafe { *xraw }, 4); - assert_eq!(*xref1, 4); - assert_eq!(unsafe { *xraw }, 4); - // we cannot use xref2; see `compile-fail/stacked-borrows/illegal_read4.rs` - } - assert_eq!(x, 4); -} - -// A variant of `mut_raw_mut` that does *not* get accepted by Tree Borrows. -// It's kind of an accident that we accept it in Stacked Borrows... -fn mut_raw_mut2() { - unsafe { - let mut root = 0; - let to = &mut root as *mut i32; - *to = 0; - let _val = root; - *to = 0; - } -} - -fn partially_invalidate_mut() { - let data = &mut (0u8, 0u8); - let reborrow = &mut *data as *mut (u8, u8); - let shard = unsafe { &mut (*reborrow).0 }; - data.1 += 1; // the deref overlaps with `shard`, but that is ok; the access does not overlap. - *shard += 1; // so we can still use `shard`. - assert_eq!(*data, (1, 1)); -} - -// Make sure that we can handle the situation where a location is frozen when being dropped. -fn drop_after_sharing() { - let x = String::from("hello!"); - let _len = x.len(); -} - -// Make sure that coercing &mut T to *const T produces a writeable pointer. -// TODO: This is currently disabled, waiting on a decision on -/*fn direct_mut_to_const_raw() { - let x = &mut 0; - let y: *const i32 = x; - unsafe { *(y as *mut i32) = 1; } - assert_eq!(*x, 1); -}*/ - -// Make sure that we can create two raw pointers from a mutable reference and use them both. -fn two_raw() { - unsafe { - let x = &mut 0; - let y1 = x as *mut _; - let y2 = x as *mut _; - *y1 += 2; - *y2 += 1; - } -} - -// Make sure that creating a *mut does not invalidate existing shared references. -fn shr_and_raw() { - unsafe { - use std::mem; - let x = &mut 0; - let y1: &i32 = mem::transmute(&*x); // launder lifetimes - let y2 = x as *mut _; - let _val = *y1; - *y2 += 1; - } -} - -fn disjoint_mutable_subborrows() { - struct Foo { - a: String, - b: Vec, - } - - unsafe fn borrow_field_a<'a>(this: *mut Foo) -> &'a mut String { - &mut (*this).a - } - - unsafe fn borrow_field_b<'a>(this: *mut Foo) -> &'a mut Vec { - &mut (*this).b - } - - let mut foo = Foo { a: "hello".into(), b: vec![0, 1, 2] }; - - let ptr = &mut foo as *mut Foo; - - let a = unsafe { borrow_field_a(ptr) }; - let b = unsafe { borrow_field_b(ptr) }; - b.push(4); - a.push_str(" world"); - eprintln!("{:?} {:?}", a, b); -} - -fn raw_ref_to_part() { - struct Part { - _lame: i32, - } - - #[repr(C)] - struct Whole { - part: Part, - extra: i32, - } - - let it = Box::new(Whole { part: Part { _lame: 0 }, extra: 42 }); - let whole = ptr::addr_of_mut!(*Box::leak(it)); - let part = unsafe { ptr::addr_of_mut!((*whole).part) }; - let typed = unsafe { &mut *(part as *mut Whole) }; - assert!(typed.extra == 42); - drop(unsafe { Box::from_raw(whole) }); -} - -/// When casting an array reference to a raw element ptr, that should cover the whole array. -fn array_casts() { - let mut x: [usize; 2] = [0, 0]; - let p = &mut x as *mut usize; - unsafe { - *p.add(1) = 1; - } - - let x: [usize; 2] = [0, 1]; - let p = &x as *const usize; - assert_eq!(unsafe { *p.add(1) }, 1); -} - -/// Transmuting &&i32 to &&mut i32 is fine. -fn mut_below_shr() { - let x = 0; - let y = &x; - let p = unsafe { core::mem::transmute::<&&i32, &&mut i32>(&y) }; - let r = &**p; - let _val = *r; -} - -fn wide_raw_ptr_in_tuple() { - let mut x: Box = Box::new("ouch"); - let r = &mut *x as *mut dyn std::any::Any; - // This triggers the visitor-based recursive retagging. It is *not* supposed to retag raw - // pointers, but then the visitor might recurse into the "fields" of a wide raw pointer and - // finds a reference (to a vtable) there that it wants to retag... and that would be Wrong. - let pair = (r, &0); - let r = unsafe { &mut *pair.0 }; - // Make sure the fn ptr part of the vtable is still fine. - r.type_id(); -} - -fn not_unpin_not_protected() { - // `&mut !Unpin`, at least for now, does not get `noalias` nor `dereferenceable`, so we also - // don't add protectors. (We could, but until we have a better idea for where we want to go with - // the self-referential-coroutine situation, it does not seem worth the potential trouble.) - use std::marker::PhantomPinned; - - pub struct NotUnpin(#[allow(dead_code)] i32, PhantomPinned); - - fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) { - // `f` may mutate, but it may not deallocate! - f(x) - } - - inner(Box::leak(Box::new(NotUnpin(0, PhantomPinned))), |x| { - let raw = x as *mut _; - drop(unsafe { Box::from_raw(raw) }); - }); -} - -fn write_does_not_invalidate_all_aliases() { - mod other { - /// Some private memory to store stuff in. - static mut S: *mut i32 = 0 as *mut i32; - - pub fn lib1(x: &&mut i32) { - unsafe { - S = (x as *const &mut i32).cast::<*mut i32>().read(); - } - } - - pub fn lib2() { - unsafe { - *S = 1337; - } - } - } - - let x = &mut 0; - other::lib1(&x); - *x = 42; // a write to x -- invalidates other pointers? - other::lib2(); - assert_eq!(*x, 1337); // oops, the value changed! I guess not all pointers were invalidated -} - -fn box_into_raw_allows_interior_mutable_alias() { - unsafe { - let b = Box::new(std::cell::Cell::new(42)); - let raw = Box::into_raw(b); - let c = &*raw; - let d = raw.cast::(); // bypassing `Cell` -- only okay in Miri tests - // `c` and `d` should permit arbitrary aliasing with each other now. - *d = 1; - c.set(2); - drop(Box::from_raw(raw)); - } -} diff --git a/src/tools/miri/tests/pass/stacked-borrows/stacked-borrows.stderr b/src/tools/miri/tests/pass/stacked-borrows/stacked-borrows.stderr deleted file mode 100644 index 8ee4e25dbef84..0000000000000 --- a/src/tools/miri/tests/pass/stacked-borrows/stacked-borrows.stderr +++ /dev/null @@ -1 +0,0 @@ -"hello world" [0, 1, 2, 4] diff --git a/src/tools/miri/tests/pass/stacked-borrows/coroutine-self-referential.rs b/src/tools/miri/tests/pass/stacked_borrows/coroutine-self-referential.rs similarity index 100% rename from src/tools/miri/tests/pass/stacked-borrows/coroutine-self-referential.rs rename to src/tools/miri/tests/pass/stacked_borrows/coroutine-self-referential.rs diff --git a/src/tools/miri/tests/pass/stacked-borrows/int-to-ptr.rs b/src/tools/miri/tests/pass/stacked_borrows/int-to-ptr.rs similarity index 100% rename from src/tools/miri/tests/pass/stacked-borrows/int-to-ptr.rs rename to src/tools/miri/tests/pass/stacked_borrows/int-to-ptr.rs diff --git a/src/tools/miri/tests/pass/stacked-borrows/issue-miri-2389.rs b/src/tools/miri/tests/pass/stacked_borrows/issue-miri-2389.rs similarity index 100% rename from src/tools/miri/tests/pass/stacked-borrows/issue-miri-2389.rs rename to src/tools/miri/tests/pass/stacked_borrows/issue-miri-2389.rs diff --git a/src/tools/miri/tests/pass/stacked-borrows/issue-miri-2389.stderr b/src/tools/miri/tests/pass/stacked_borrows/issue-miri-2389.stderr similarity index 89% rename from src/tools/miri/tests/pass/stacked-borrows/issue-miri-2389.stderr rename to src/tools/miri/tests/pass/stacked_borrows/issue-miri-2389.stderr index bcb7a65e90f05..8ca3c6c618ea7 100644 --- a/src/tools/miri/tests/pass/stacked-borrows/issue-miri-2389.stderr +++ b/src/tools/miri/tests/pass/stacked_borrows/issue-miri-2389.stderr @@ -1,5 +1,5 @@ warning: integer-to-pointer cast - --> tests/pass/stacked-borrows/issue-miri-2389.rs:LL:CC + --> tests/pass/stacked_borrows/issue-miri-2389.rs:LL:CC | LL | let wildcard = &root0 as *const Cell as usize as *const Cell; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ integer-to-pointer cast @@ -10,5 +10,5 @@ LL | let wildcard = &root0 as *const Cell as usize as *const Cell. +//@compile-flags: -Zmir-opt-level=0 + +// Test various stacked-borrows-specific things +// (i.e., these do not work the same under TB). +fn main() { + mut_raw_mut2(); + // direct_mut_to_const_raw(); + two_phase_aliasing_violation(); +} + +// A variant of `mut_raw_mut` that does *not* get accepted by Tree Borrows. +// It's kind of an accident that we accept it in Stacked Borrows... +fn mut_raw_mut2() { + unsafe { + let mut root = 0; + let to = &mut root as *mut i32; + *to = 0; + let _val = root; + *to = 0; + } +} + +// Make sure that coercing &mut T to *const T produces a writeable pointer. +// TODO: This is currently disabled, waiting on a decision on +/*fn direct_mut_to_const_raw() { + let x = &mut 0; + let y: *const i32 = x; + unsafe { *(y as *mut i32) = 1; } + assert_eq!(*x, 1); +}*/ + +// This one really shouldn't be accepted, but since we treat 2phase as raw, we do accept it. +// Tree Borrows rejects it. +fn two_phase_aliasing_violation() { + struct Foo(u64); + impl Foo { + fn add(&mut self, n: u64) -> u64 { + self.0 + n + } + } + + let mut f = Foo(0); + let alias = &mut f.0 as *mut u64; + let res = f.add(unsafe { + *alias = 42; + 0 + }); + assert_eq!(res, 42); +} diff --git a/src/tools/miri/tests/pass/stacked-borrows/unknown-bottom-gc.rs b/src/tools/miri/tests/pass/stacked_borrows/unknown-bottom-gc.rs similarity index 100% rename from src/tools/miri/tests/pass/stacked-borrows/unknown-bottom-gc.rs rename to src/tools/miri/tests/pass/stacked_borrows/unknown-bottom-gc.rs diff --git a/src/tools/miri/tests/pass/stacked-borrows/zst-field-retagging-terminates.rs b/src/tools/miri/tests/pass/stacked_borrows/zst-field-retagging-terminates.rs similarity index 100% rename from src/tools/miri/tests/pass/stacked-borrows/zst-field-retagging-terminates.rs rename to src/tools/miri/tests/pass/stacked_borrows/zst-field-retagging-terminates.rs diff --git a/src/tools/miri/tests/pass/tls/tls_leak_main_thread_allowed.rs b/src/tools/miri/tests/pass/tls/tls_leak_main_thread_allowed.rs index 341b2280e0109..abc0968f7c4c6 100644 --- a/src/tools/miri/tests/pass/tls/tls_leak_main_thread_allowed.rs +++ b/src/tools/miri/tests/pass/tls/tls_leak_main_thread_allowed.rs @@ -13,7 +13,7 @@ pub fn main() { TLS.set(Some(Box::leak(Box::new(123)))); // We can only ignore leaks on targets that use `#[thread_local]` statics to implement - // `thread_local!`. Ignore the test on targest that don't. + // `thread_local!`. Ignore the test on targets that don't. if cfg!(target_thread_local) { thread_local! { static TLS_KEY: Cell> = Cell::new(None); diff --git a/src/tools/miri/tests/pass/tree_borrows/2phase-interiormut.rs b/src/tools/miri/tests/pass/tree_borrows/2phase-interiormut.rs deleted file mode 100644 index af52f53791a4f..0000000000000 --- a/src/tools/miri/tests/pass/tree_borrows/2phase-interiormut.rs +++ /dev/null @@ -1,27 +0,0 @@ -//@compile-flags: -Zmiri-tree-borrows - -// Counterpart to tests/fail/tree-borrows/write-during-2phase.rs, -// this is the opposite situation: the Write is not problematic because -// the Protector has not yet been added and the Reserved has interior -// mutability. -use core::cell::Cell; - -trait Thing: Sized { - fn do_the_thing(&mut self, _s: i32) {} -} -impl Thing for Cell {} - -fn main() { - let mut x = Cell::new(1); - let l = &x; - - x.do_the_thing({ - // Several Foreign accesses (both Reads and Writes) to the location - // being reborrowed. Reserved + unprotected + interior mut - // makes the pointer immune to everything as long as all accesses - // are child accesses to its parent pointer x. - x.set(3); - l.set(4); - x.get() + l.get() - }); -} diff --git a/src/tools/miri/tests/pass/tree_borrows/cell-alternate-writes.rs b/src/tools/miri/tests/pass/tree_borrows/cell-alternate-writes.rs index 3269acb511938..019ea369811d9 100644 --- a/src/tools/miri/tests/pass/tree_borrows/cell-alternate-writes.rs +++ b/src/tools/miri/tests/pass/tree_borrows/cell-alternate-writes.rs @@ -6,16 +6,16 @@ mod utils; use std::cell::UnsafeCell; -// UnsafeCells use the parent tag, so it is possible to use them with +// UnsafeCells use the `Cell` state, so it is possible to use them with // few restrictions when only among themselves. fn main() { unsafe { let data = &mut UnsafeCell::new(0u8); - name!(data.get(), "data"); + name!(data as *mut _, "data"); let x = &*data; - name!(x.get(), "x"); + name!(x as *const _, "x"); let y = &*data; - name!(y.get(), "y"); + name!(y as *const _, "y"); let alloc_id = alloc_id!(data.get()); print_state!(alloc_id); // y and x tolerate alternating Writes diff --git a/src/tools/miri/tests/pass/tree_borrows/cell-alternate-writes.stderr b/src/tools/miri/tests/pass/tree_borrows/cell-alternate-writes.stderr index d13e9ad0215bc..75a30c9a08375 100644 --- a/src/tools/miri/tests/pass/tree_borrows/cell-alternate-writes.stderr +++ b/src/tools/miri/tests/pass/tree_borrows/cell-alternate-writes.stderr @@ -2,11 +2,15 @@ Warning: this tree is indicative only. Some tags may have been hidden. 0.. 1 | Act | └─┬── -| ReIM| └──── +| ReIM| └─┬── +| Cel | ├──── +| Cel | └──── ────────────────────────────────────────────────── ────────────────────────────────────────────────── Warning: this tree is indicative only. Some tags may have been hidden. 0.. 1 | Act | └─┬── -| Act | └──── +| Act | └─┬── +| Cel | ├──── +| Cel | └──── ────────────────────────────────────────────────── diff --git a/src/tools/miri/tests/pass/tree_borrows/cell-inside-box.rs b/src/tools/miri/tests/pass/tree_borrows/cell-inside-box.rs new file mode 100644 index 0000000000000..adf2f4e845b5b --- /dev/null +++ b/src/tools/miri/tests/pass/tree_borrows/cell-inside-box.rs @@ -0,0 +1,35 @@ +//@compile-flags: -Zmiri-tree-borrows +#![feature(box_as_ptr)] +#[path = "../../utils/mod.rs"] +#[macro_use] +mod utils; + +use std::cell::UnsafeCell; + +pub fn main() { + let cell = UnsafeCell::new(42); + let box1 = Box::new(cell); + + unsafe { + let ptr1: *mut UnsafeCell = Box::into_raw(box1); + name!(ptr1); + + let mut box2 = Box::from_raw(ptr1); + // `ptr2` will be a descendant of `ptr1`. + let ptr2: *mut UnsafeCell = Box::as_mut_ptr(&mut box2); + name!(ptr2); + + // We perform a write through `x`. + // Because `ptr1` is ReservedIM, a child write will make it transition to Active. + // Because `ptr2` is ReservedIM, a foreign write doesn't have any effect on it. + let x = (*ptr1).get(); + *x = 1; + + // We can still read from `ptr2`. + let val = *(*ptr2).get(); + assert_eq!(val, 1); + + let alloc_id = alloc_id!(ptr1); + print_state!(alloc_id); + } +} diff --git a/src/tools/miri/tests/pass/tree_borrows/cell-inside-box.stderr b/src/tools/miri/tests/pass/tree_borrows/cell-inside-box.stderr new file mode 100644 index 0000000000000..5dbfff718b1e6 --- /dev/null +++ b/src/tools/miri/tests/pass/tree_borrows/cell-inside-box.stderr @@ -0,0 +1,7 @@ +────────────────────────────────────────────────── +Warning: this tree is indicative only. Some tags may have been hidden. +0.. 4 +| Act | └─┬── +| Act | └─┬── +| ReIM| └──── +────────────────────────────────────────────────── diff --git a/src/tools/miri/tests/pass/tree_borrows/read_retag_no_race.rs b/src/tools/miri/tests/pass/tree_borrows/read_retag_no_race.rs index d9897a1033fbe..71b7817a9c700 100644 --- a/src/tools/miri/tests/pass/tree_borrows/read_retag_no_race.rs +++ b/src/tools/miri/tests/pass/tree_borrows/read_retag_no_race.rs @@ -2,7 +2,7 @@ // This test relies on a specific interleaving that cannot be enforced // with just barriers. We must remove preemption so that the execution and the // error messages are deterministic. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency use std::ptr::addr_of_mut; use std::sync::{Arc, Barrier}; use std::thread; diff --git a/src/tools/miri/tests/pass/tree_borrows/reserved.rs b/src/tools/miri/tests/pass/tree_borrows/reserved.rs index f93cac8361e31..c57cd7fcf0abe 100644 --- a/src/tools/miri/tests/pass/tree_borrows/reserved.rs +++ b/src/tools/miri/tests/pass/tree_borrows/reserved.rs @@ -43,11 +43,11 @@ unsafe fn read_second(x: &mut T, y: *mut u8) { unsafe fn cell_protected_read() { print("[interior mut + protected] Foreign Read: Re* -> Frz"); let base = &mut UnsafeCell::new(0u8); - name!(base.get(), "base"); + name!(base as *mut _, "base"); let alloc_id = alloc_id!(base.get()); let x = &mut *(base as *mut UnsafeCell); - name!(x.get(), "x"); - let y = (&mut *base).get(); + name!(x as *mut _, "x"); + let y = &mut *base as *mut UnsafeCell as *mut u8; name!(y); read_second(x, y); // Foreign Read for callee:x print_state!(alloc_id); @@ -57,11 +57,11 @@ unsafe fn cell_protected_read() { unsafe fn cell_unprotected_read() { print("[interior mut] Foreign Read: Re* -> Re*"); let base = &mut UnsafeCell::new(0u64); - name!(base.get(), "base"); + name!(base as *mut _, "base"); let alloc_id = alloc_id!(base.get()); let x = &mut *(base as *mut UnsafeCell<_>); - name!(x.get(), "x"); - let y = (&mut *base).get(); + name!(x as *mut _, "x"); + let y = &mut *base as *mut UnsafeCell as *mut u64; name!(y); let _val = *y; // Foreign Read for x print_state!(alloc_id); @@ -72,11 +72,11 @@ unsafe fn cell_unprotected_read() { unsafe fn cell_unprotected_write() { print("[interior mut] Foreign Write: Re* -> Re*"); let base = &mut UnsafeCell::new(0u64); - name!(base.get(), "base"); + name!(base as *mut _, "base"); let alloc_id = alloc_id!(base.get()); let x = &mut *(base as *mut UnsafeCell); - name!(x.get(), "x"); - let y = (&mut *base).get(); + name!(x as *mut _, "x"); + let y = &mut *base as *mut UnsafeCell as *mut u64; name!(y); *y = 1; // Foreign Write for x print_state!(alloc_id); diff --git a/src/tools/miri/tests/pass/tree_borrows/spurious_read.rs b/src/tools/miri/tests/pass/tree_borrows/spurious_read.rs index 71e93d2f84f57..840832c633cf5 100644 --- a/src/tools/miri/tests/pass/tree_borrows/spurious_read.rs +++ b/src/tools/miri/tests/pass/tree_borrows/spurious_read.rs @@ -2,7 +2,7 @@ // Note that we are *also* using barriers: the barriers enforce the // specific interleaving of operations that we want, but only the preemption // rate guarantees that the error message is also deterministic. -//@compile-flags: -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-deterministic-concurrency //@compile-flags: -Zmiri-tree-borrows use std::sync::{Arc, Barrier}; diff --git a/src/tools/miri/tests/pass/tree_borrows/tree-borrows.rs b/src/tools/miri/tests/pass/tree_borrows/tree-borrows.rs index b9d5ca06ed058..87eb447049d61 100644 --- a/src/tools/miri/tests/pass/tree_borrows/tree-borrows.rs +++ b/src/tools/miri/tests/pass/tree_borrows/tree-borrows.rs @@ -1,10 +1,10 @@ -//@revisions: default uniq //@compile-flags: -Zmiri-tree-borrows -//@[uniq]compile-flags: -Zmiri-unique-is-unique #![feature(allocator_api)] use std::{mem, ptr}; +// Test various tree-borrows-specific things +// (i.e., these do not work the same under SB). fn main() { aliasing_read_only_mutable_refs(); string_as_mut_ptr(); @@ -12,24 +12,6 @@ fn main() { direct_mut_to_const_raw(); local_addr_of_mut(); returned_mut_is_usable(); - - // Stacked Borrows tests - read_does_not_invalidate1(); - read_does_not_invalidate2(); - mut_raw_then_mut_shr(); - mut_shr_then_mut_raw(); - mut_raw_mut(); - partially_invalidate_mut(); - drop_after_sharing(); - two_raw(); - shr_and_raw(); - disjoint_mutable_subborrows(); - raw_ref_to_part(); - array_casts(); - mut_below_shr(); - wide_raw_ptr_in_tuple(); - not_unpin_not_protected(); - write_does_not_invalidate_all_aliases(); } #[allow(unused_assignments)] @@ -111,96 +93,6 @@ fn returned_mut_is_usable() { *y = 1; } -// ----- The tests below were taken from Stacked Borrows ---- - -// Make sure that reading from an `&mut` does, like reborrowing to `&`, -// NOT invalidate other reborrows. -fn read_does_not_invalidate1() { - fn foo(x: &mut (i32, i32)) -> &i32 { - let xraw = x as *mut (i32, i32); - let ret = unsafe { &(*xraw).1 }; - let _val = x.1; // we just read, this does NOT invalidate the reborrows. - ret - } - assert_eq!(*foo(&mut (1, 2)), 2); -} -// Same as above, but this time we first create a raw, then read from `&mut` -// and then freeze from the raw. -fn read_does_not_invalidate2() { - fn foo(x: &mut (i32, i32)) -> &i32 { - let xraw = x as *mut (i32, i32); - let _val = x.1; // we just read, this does NOT invalidate the raw reborrow. - let ret = unsafe { &(*xraw).1 }; - ret - } - assert_eq!(*foo(&mut (1, 2)), 2); -} - -// Escape a mut to raw, then share the same mut and use the share, then the raw. -// That should work. -fn mut_raw_then_mut_shr() { - let mut x = 2; - let xref = &mut x; - let xraw = &mut *xref as *mut _; - let xshr = &*xref; - assert_eq!(*xshr, 2); - unsafe { - *xraw = 4; - } - assert_eq!(x, 4); -} - -// Create first a shared reference and then a raw pointer from a `&mut` -// should permit mutation through that raw pointer. -fn mut_shr_then_mut_raw() { - let xref = &mut 2; - let _xshr = &*xref; - let xraw = xref as *mut _; - unsafe { - *xraw = 3; - } - assert_eq!(*xref, 3); -} - -// Ensure that if we derive from a mut a raw, and then from that a mut, -// and then read through the original mut, that does not invalidate the raw. -// This shows that the read-exception for `&mut` applies even if the `Shr` item -// on the stack is not at the top. -fn mut_raw_mut() { - let mut x = 2; - { - let xref1 = &mut x; - let xraw = xref1 as *mut _; - let _xref2 = unsafe { &mut *xraw }; - let _val = *xref1; - unsafe { - *xraw = 4; - } - // we can now use both xraw and xref1, for reading - assert_eq!(*xref1, 4); - assert_eq!(unsafe { *xraw }, 4); - assert_eq!(*xref1, 4); - assert_eq!(unsafe { *xraw }, 4); - // we cannot use xref2; see `compile-fail/stacked-borrows/illegal_read4.rs` - } - assert_eq!(x, 4); -} - -fn partially_invalidate_mut() { - let data = &mut (0u8, 0u8); - let reborrow = &mut *data as *mut (u8, u8); - let shard = unsafe { &mut (*reborrow).0 }; - data.1 += 1; // the deref overlaps with `shard`, but that is ok; the access does not overlap. - *shard += 1; // so we can still use `shard`. - assert_eq!(*data, (1, 1)); -} - -// Make sure that we can handle the situation where a location is frozen when being dropped. -fn drop_after_sharing() { - let x = String::from("hello!"); - let _len = x.len(); -} - // Make sure that coercing &mut T to *const T produces a writeable pointer. fn direct_mut_to_const_raw() { let x = &mut 0; @@ -210,150 +102,3 @@ fn direct_mut_to_const_raw() { } assert_eq!(*x, 1); } - -// Make sure that we can create two raw pointers from a mutable reference and use them both. -fn two_raw() { - unsafe { - let x = &mut 0; - let y1 = x as *mut _; - let y2 = x as *mut _; - *y1 += 2; - *y2 += 1; - } -} - -// Make sure that creating a *mut does not invalidate existing shared references. -fn shr_and_raw() { - unsafe { - let x = &mut 0; - let y1: &i32 = mem::transmute(&*x); // launder lifetimes - let y2 = x as *mut _; - let _val = *y1; - *y2 += 1; - } -} - -fn disjoint_mutable_subborrows() { - struct Foo { - a: String, - b: Vec, - } - - unsafe fn borrow_field_a<'a>(this: *mut Foo) -> &'a mut String { - &mut (*this).a - } - - unsafe fn borrow_field_b<'a>(this: *mut Foo) -> &'a mut Vec { - &mut (*this).b - } - - let mut foo = Foo { a: "hello".into(), b: vec![0, 1, 2] }; - - let ptr = &mut foo as *mut Foo; - - let a = unsafe { borrow_field_a(ptr) }; - let b = unsafe { borrow_field_b(ptr) }; - b.push(4); - a.push_str(" world"); - assert_eq!(format!("{:?} {:?}", a, b), r#""hello world" [0, 1, 2, 4]"#); -} - -fn raw_ref_to_part() { - struct Part { - _lame: i32, - } - - #[repr(C)] - struct Whole { - part: Part, - extra: i32, - } - - let it = Box::new(Whole { part: Part { _lame: 0 }, extra: 42 }); - let whole = ptr::addr_of_mut!(*Box::leak(it)); - let part = unsafe { ptr::addr_of_mut!((*whole).part) }; - let typed = unsafe { &mut *(part as *mut Whole) }; - assert!(typed.extra == 42); - drop(unsafe { Box::from_raw(whole) }); -} - -/// When casting an array reference to a raw element ptr, that should cover the whole array. -fn array_casts() { - let mut x: [usize; 2] = [0, 0]; - let p = &mut x as *mut usize; - unsafe { - *p.add(1) = 1; - } - - let x: [usize; 2] = [0, 1]; - let p = &x as *const usize; - assert_eq!(unsafe { *p.add(1) }, 1); -} - -/// Transmuting &&i32 to &&mut i32 is fine. -fn mut_below_shr() { - let x = 0; - let y = &x; - let p = unsafe { core::mem::transmute::<&&i32, &&mut i32>(&y) }; - let r = &**p; - let _val = *r; -} - -fn wide_raw_ptr_in_tuple() { - let mut x: Box = Box::new("ouch"); - let r = &mut *x as *mut dyn std::any::Any; - // This triggers the visitor-based recursive retagging. It is *not* supposed to retag raw - // pointers, but then the visitor might recurse into the "fields" of a wide raw pointer and - // finds a reference (to a vtable) there that it wants to retag... and that would be Wrong. - let pair = (r, &0); - let r = unsafe { &mut *pair.0 }; - // Make sure the fn ptr part of the vtable is still fine. - r.type_id(); -} - -fn not_unpin_not_protected() { - // `&mut !Unpin`, at least for now, does not get `noalias` nor `dereferenceable`, so we also - // don't add protectors. (We could, but until we have a better idea for where we want to go with - // the self-referential-coroutine situation, it does not seem worth the potential trouble.) - use std::marker::PhantomPinned; - - pub struct NotUnpin(#[allow(dead_code)] i32, PhantomPinned); - - fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) { - // `f` is allowed to deallocate `x`. - f(x) - } - - inner(Box::leak(Box::new(NotUnpin(0, PhantomPinned))), |x| { - let raw = x as *mut _; - drop(unsafe { Box::from_raw(raw) }); - }); -} - -fn write_does_not_invalidate_all_aliases() { - // In TB there are other ways to do that (`addr_of!(*x)` has the same tag as `x`), - // but let's still make sure this SB test keeps working. - - mod other { - /// Some private memory to store stuff in. - static mut S: *mut i32 = 0 as *mut i32; - - pub fn lib1(x: &&mut i32) { - unsafe { - S = (x as *const &mut i32).cast::<*mut i32>().read(); - } - } - - pub fn lib2() { - unsafe { - *S = 1337; - } - } - } - - let x = &mut 0; - other::lib1(&x); - *x = 42; // a write to x -- invalidates other pointers? - other::lib2(); - assert_eq!(*x, 1337); // oops, the value changed! I guess not all pointers were invalidated -} diff --git a/src/tools/miri/tests/pass/tree_borrows/unique.default.stderr b/src/tools/miri/tests/pass/tree_borrows/unique.default.stderr deleted file mode 100644 index 6098c855bde72..0000000000000 --- a/src/tools/miri/tests/pass/tree_borrows/unique.default.stderr +++ /dev/null @@ -1,21 +0,0 @@ -────────────────────────────────────────────────── -Warning: this tree is indicative only. Some tags may have been hidden. -0.. 1 -| Act | └─┬── -| Res | └─┬── -| Res | └──── -────────────────────────────────────────────────── -────────────────────────────────────────────────── -Warning: this tree is indicative only. Some tags may have been hidden. -0.. 1 -| Act | └─┬── -| Act | └─┬── -| Act | └──── -────────────────────────────────────────────────── -────────────────────────────────────────────────── -Warning: this tree is indicative only. Some tags may have been hidden. -0.. 1 -| Act | └─┬── -| Act | └─┬── -| Act | └──── -────────────────────────────────────────────────── diff --git a/src/tools/miri/tests/pass/tree_borrows/unique.rs b/src/tools/miri/tests/pass/tree_borrows/unique.rs deleted file mode 100644 index f1ca1b51aa857..0000000000000 --- a/src/tools/miri/tests/pass/tree_borrows/unique.rs +++ /dev/null @@ -1,67 +0,0 @@ -//@revisions: default uniq -// We disable the GC for this test because it would change what is printed. -//@compile-flags: -Zmiri-tree-borrows -Zmiri-provenance-gc=0 -//@[uniq]compile-flags: -Zmiri-unique-is-unique - -#![feature(ptr_internals)] - -#[path = "../../utils/mod.rs"] -#[macro_use] -mod utils; - -use core::ptr::Unique; - -// Check general handling of Unique - -fn main() { - unsafe { - let base = &mut 5u8; - let alloc_id = alloc_id!(base); - name!(base); - - let raw = &mut *base as *mut u8; - name!(raw); - - // We create a `Unique` and expect it to have a fresh tag - // and uninitialized permissions. - let uniq = Unique::new_unchecked(raw); - - // With `-Zmiri-unique-is-unique`, `Unique::as_ptr` (which is called by - // `Vec::as_ptr`) generates pointers with a fresh tag, so to name the actual - // `base` pointer we care about we have to walk up the tree a bit. - // - // We care about naming this specific parent tag because it is the one - // that stays `Active` during the entire execution, unlike the leaves - // that will be invalidated the next time `as_ptr` is called. - // - // (We name it twice so that we have an indicator in the output of - // whether we got the distance correct: - // If the output shows - // - // |- - // '- - // - // then `nth_parent` is not big enough. - // The correct value for `nth_parent` should be the minimum - // integer for which the output shows - // - // '- - // ) - // - // Ultimately we want pointers obtained through independent - // calls of `as_ptr` to be able to alias, which will probably involve - // a new permission that allows aliasing when there is no protector. - let nth_parent = if cfg!(uniq) { 2 } else { 0 }; - name!(uniq.as_ptr()=>nth_parent, "uniq"); - name!(uniq.as_ptr()=>nth_parent, "uniq"); - print_state!(alloc_id); - - // We can activate the Unique and use it mutably. - *uniq.as_ptr() = 42; - print_state!(alloc_id); - - // Write through the raw parent disables the Unique - *raw = 42; - print_state!(alloc_id); - } -} diff --git a/src/tools/miri/tests/pass/tree_borrows/unique.uniq.stderr b/src/tools/miri/tests/pass/tree_borrows/unique.uniq.stderr deleted file mode 100644 index 960c7e216e1d0..0000000000000 --- a/src/tools/miri/tests/pass/tree_borrows/unique.uniq.stderr +++ /dev/null @@ -1,24 +0,0 @@ -────────────────────────────────────────────────── -Warning: this tree is indicative only. Some tags may have been hidden. -0.. 1 -| Act | └─┬── -| Res | └─┬── -| Res | └─┬── -|-----| └──── -────────────────────────────────────────────────── -────────────────────────────────────────────────── -Warning: this tree is indicative only. Some tags may have been hidden. -0.. 1 -| Act | └─┬── -| Act | └─┬── -| Act | └─┬── -| Act | └──── -────────────────────────────────────────────────── -────────────────────────────────────────────────── -Warning: this tree is indicative only. Some tags may have been hidden. -0.. 1 -| Act | └─┬── -| Act | └─┬── -| Act | └─┬── -| Dis | └──── -────────────────────────────────────────────────── diff --git a/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs b/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs deleted file mode 100644 index af4c3b0693191..0000000000000 --- a/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs +++ /dev/null @@ -1,69 +0,0 @@ -//@revisions: default uniq -// We disable the GC for this test because it would change what is printed. -//@compile-flags: -Zmiri-tree-borrows -Zmiri-provenance-gc=0 -//@[uniq]compile-flags: -Zmiri-unique-is-unique - -#![feature(vec_into_raw_parts)] - -#[path = "../../utils/mod.rs"] -#[macro_use] -mod utils; - -// Check general handling of `Unique`: -// there is no *explicit* `Unique` being used here, but there is one -// hidden a few layers inside `Vec` that should be reflected in the tree structure. - -fn main() { - unsafe { - let base = vec![0u8, 1]; - let alloc_id = alloc_id!(base.as_ptr()); - - // With `-Zmiri-unique-is-unique`, `Unique::as_ptr` (which is called by - // `Vec::as_ptr`) generates pointers with a fresh tag, so to name the actual - // `base` pointer we care about we have to walk up the tree a bit. - // - // We care about naming this specific parent tag because it is the one - // that stays `Active` during the entire execution, unlike the leaves - // that will be invalidated the next time `as_ptr` is called. - // - // (We name it twice so that we have an indicator in the output of - // whether we got the distance correct: - // If the output shows - // - // ├─ - // └─ - // - // then `nth_parent` is not big enough. - // The correct value for `nth_parent` should be the minimum - // integer for which the output shows - // - // └─ - // ) - // - // Ultimately we want pointers obtained through independent - // calls of `as_ptr` to be able to alias, which will probably involve - // a new permission that allows aliasing when there is no protector. - let nth_parent = if cfg!(uniq) { 9 } else { 0 }; - name!(base.as_ptr()=>nth_parent); - name!(base.as_ptr()=>nth_parent); - - // Destruct the `Vec` - let (ptr, len, cap) = base.into_raw_parts(); - - // Expect this to be again the same pointer as the one obtained from `as_ptr`. - // Under `-Zmiri-unique-is-unique`, this will be a strict child. - name!(ptr, "raw_parts.0"); - - // This is where the presence of `Unique` has implications, - // because there will be a reborrow here iff the exclusivity of `Unique` - // is enforced. - let reconstructed = Vec::from_raw_parts(ptr, len, cap); - - // The `as_ptr` here (twice for the same reason as above) return either - // the same pointer once more (default) or a strict child (uniq). - name!(reconstructed.as_ptr()=>nth_parent); - name!(reconstructed.as_ptr()=>nth_parent); - - print_state!(alloc_id, false); - } -} diff --git a/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr b/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr deleted file mode 100644 index 7942e9884f401..0000000000000 --- a/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr +++ /dev/null @@ -1,8 +0,0 @@ -────────────────────────────────────────────────── -Warning: this tree is indicative only. Some tags may have been hidden. -0.. 2 -| Act | └─┬── -|-----| └─┬── -|-----| └─┬── -|-----| └──── -────────────────────────────────────────────────── diff --git a/src/tools/miri/tests/pass/vec.rs b/src/tools/miri/tests/pass/vec.rs index 4ab2bcb7f2281..3e526813bb457 100644 --- a/src/tools/miri/tests/pass/vec.rs +++ b/src/tools/miri/tests/pass/vec.rs @@ -1,7 +1,6 @@ -//@revisions: stack tree tree_uniq +//@revisions: stack tree //@compile-flags: -Zmiri-strict-provenance //@[tree]compile-flags: -Zmiri-tree-borrows -//@[tree_uniq]compile-flags: -Zmiri-tree-borrows -Zmiri-unique-is-unique #![feature(iter_advance_by, iter_next_chunk)] // Gather all references from a mutable iterator and make sure Miri notices if diff --git a/src/tools/miri/tests/pass/vecdeque.rs b/src/tools/miri/tests/pass/vecdeque.rs index 9153c428e184d..bdf57f281a08f 100644 --- a/src/tools/miri/tests/pass/vecdeque.rs +++ b/src/tools/miri/tests/pass/vecdeque.rs @@ -1,8 +1,6 @@ -//@revisions: stack tree tree_uniq +//@revisions: stack tree //@compile-flags: -Zmiri-strict-provenance //@[tree]compile-flags: -Zmiri-tree-borrows -//@[tree_uniq]compile-flags: -Zmiri-tree-borrows -Zmiri-unique-is-unique - use std::collections::VecDeque; fn test_all_refs<'a, T: 'a>(dummy: &mut T, iter: impl Iterator) { diff --git a/src/tools/miri/tests/pass/weak_memory/weak.rs b/src/tools/miri/tests/pass/weak_memory/weak.rs index 5d636431d8675..eeab4ebf129e8 100644 --- a/src/tools/miri/tests/pass/weak_memory/weak.rs +++ b/src/tools/miri/tests/pass/weak_memory/weak.rs @@ -1,4 +1,4 @@ -//@compile-flags: -Zmiri-ignore-leaks -Zmiri-preemption-rate=0 +//@compile-flags: -Zmiri-ignore-leaks -Zmiri-fixed-schedule // Tests showing weak memory behaviours are exhibited. All tests // return true when the desired behaviour is seen. diff --git a/src/tools/miri/triagebot.toml b/src/tools/miri/triagebot.toml index 4e013764d8713..60e80c3f67330 100644 --- a/src/tools/miri/triagebot.toml +++ b/src/tools/miri/triagebot.toml @@ -40,3 +40,9 @@ unless = ["S-blocked", "S-waiting-on-team", "S-waiting-on-review"] # Automatically close and reopen PRs made by bots to run CI on them [bot-pull-requests] + +# Canonicalize issue numbers to avoid closing the wrong issue when upstreaming this subtree +[canonicalize-issue-links] + +# Prevents mentions in commits to avoid users being spammed +[no-mentions] diff --git a/src/tools/nix-dev-shell/flake.nix b/src/tools/nix-dev-shell/flake.nix index 1b838bd2f7b37..b8287de5fcf09 100644 --- a/src/tools/nix-dev-shell/flake.nix +++ b/src/tools/nix-dev-shell/flake.nix @@ -1,32 +1,24 @@ { description = "rustc dev shell"; - inputs = { - nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; - flake-utils.url = "github:numtide/flake-utils"; - }; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; - outputs = { self, nixpkgs, flake-utils, ... }: - flake-utils.lib.eachDefaultSystem (system: - let - pkgs = import nixpkgs { inherit system; }; - x = import ./x { inherit pkgs; }; - in - { - devShells.default = with pkgs; mkShell { - name = "rustc-dev-shell"; - nativeBuildInputs = with pkgs; [ - binutils cmake ninja pkg-config python3 git curl cacert patchelf nix - ]; - buildInputs = with pkgs; [ - openssl glibc.out glibc.static x - ]; - # Avoid creating text files for ICEs. - RUSTC_ICE = "0"; - # Provide `libstdc++.so.6` for the self-contained lld. - # Provide `libz.so.1`. - LD_LIBRARY_PATH = "${with pkgs; lib.makeLibraryPath [stdenv.cc.cc.lib zlib]}"; - }; - } - ); + outputs = + { + self, + nixpkgs, + }: + let + inherit (nixpkgs) lib; + forEachSystem = lib.genAttrs lib.systems.flakeExposed; + in + { + devShells = forEachSystem (system: { + default = nixpkgs.legacyPackages.${system}.callPackage ./shell.nix { }; + }); + + packages = forEachSystem (system: { + default = nixpkgs.legacyPackages.${system}.callPackage ./x { }; + }); + }; } diff --git a/src/tools/nix-dev-shell/shell.nix b/src/tools/nix-dev-shell/shell.nix index a3f5969bd812d..0adbacf7e8d56 100644 --- a/src/tools/nix-dev-shell/shell.nix +++ b/src/tools/nix-dev-shell/shell.nix @@ -1,18 +1,26 @@ -{ pkgs ? import {} }: -let - x = import ./x { inherit pkgs; }; +{ + pkgs ? import { }, +}: +let + inherit (pkgs.lib) lists attrsets; + + x = pkgs.callPackage ./x { }; + inherit (x.passthru) cacert env; in pkgs.mkShell { - name = "rustc"; - nativeBuildInputs = with pkgs; [ - binutils cmake ninja pkg-config python3 git curl cacert patchelf nix - ]; - buildInputs = with pkgs; [ - openssl glibc.out glibc.static x - ]; - # Avoid creating text files for ICEs. - RUSTC_ICE = "0"; - # Provide `libstdc++.so.6` for the self-contained lld. - # Provide `libz.so.1` - LD_LIBRARY_PATH = "${with pkgs; lib.makeLibraryPath [stdenv.cc.cc.lib zlib]}"; + name = "rustc-shell"; + + inputsFrom = [ x ]; + packages = [ + pkgs.git + pkgs.nix + x + # Get the runtime deps of the x wrapper + ] ++ lists.flatten (attrsets.attrValues env); + + env = { + # Avoid creating text files for ICEs. + RUSTC_ICE = 0; + SSL_CERT_FILE = cacert; + }; } diff --git a/src/tools/nix-dev-shell/x/default.nix b/src/tools/nix-dev-shell/x/default.nix index e6dfbad6f19c8..422c1c4a2aed8 100644 --- a/src/tools/nix-dev-shell/x/default.nix +++ b/src/tools/nix-dev-shell/x/default.nix @@ -1,22 +1,83 @@ { - pkgs ? import { }, + pkgs, + lib, + stdenv, + rustc, + python3, + makeBinaryWrapper, + # Bootstrap + curl, + pkg-config, + libiconv, + openssl, + patchelf, + cacert, + zlib, + # LLVM Deps + ninja, + cmake, + glibc, }: -pkgs.stdenv.mkDerivation { - name = "x"; +stdenv.mkDerivation (self: { + strictDeps = true; + name = "x-none"; + + outputs = [ + "out" + "unwrapped" + ]; src = ./x.rs; dontUnpack = true; - nativeBuildInputs = with pkgs; [ rustc ]; + nativeBuildInputs = [ + rustc + makeBinaryWrapper + ]; + env.PYTHON = python3.interpreter; buildPhase = '' - PYTHON=${pkgs.lib.getExe pkgs.python3} rustc -Copt-level=3 --crate-name x $src --out-dir $out/bin + rustc -Copt-level=3 --crate-name x $src --out-dir $unwrapped/bin ''; - meta = with pkgs.lib; { + installPhase = + let + inherit (self.passthru) cacert env; + in + '' + makeWrapper $unwrapped/bin/x $out/bin/x \ + --set-default SSL_CERT_FILE ${cacert} \ + --prefix CPATH ";" "${lib.makeSearchPath "include" env.cpath}" \ + --prefix PATH : ${lib.makeBinPath env.path} \ + --prefix LD_LIBRARY_PATH : ${lib.makeLibraryPath env.ldLib} + ''; + + # For accessing them in the devshell + passthru = { + env = { + cpath = [ libiconv ]; + path = [ + python3 + patchelf + curl + pkg-config + cmake + ninja + stdenv.cc + ]; + ldLib = [ + openssl + zlib + stdenv.cc.cc.lib + ]; + }; + cacert = "${cacert}/etc/ssl/certs/ca-bundle.crt"; + }; + + meta = { description = "Helper for rust-lang/rust x.py"; homepage = "https://github.com/rust-lang/rust/blob/master/src/tools/x"; - license = licenses.mit; + license = lib.licenses.mit; mainProgram = "x"; }; -} +}) diff --git a/src/tools/opt-dist/Cargo.toml b/src/tools/opt-dist/Cargo.toml index cea234cc74cb2..dfa884bc3f771 100644 --- a/src/tools/opt-dist/Cargo.toml +++ b/src/tools/opt-dist/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "opt-dist" version = "0.1.0" -edition = "2021" +edition = "2024" [dependencies] build_helper = { path = "../../build_helper" } @@ -10,12 +10,11 @@ log = "0.4" anyhow = "1" humantime = "2" humansize = "2" -sysinfo = { version = "0.31.2", default-features = false, features = ["disk"] } +sysinfo = { version = "0.35.0", default-features = false, features = ["disk"] } fs_extra = "1" camino = "1" tar = "0.4" xz = { version = "0.1", package = "xz2" } -serde = { version = "1", features = ["derive"] } serde_json = "1" glob = "0.3" tempfile = "3.5" diff --git a/src/tools/opt-dist/src/environment.rs b/src/tools/opt-dist/src/environment.rs index 90d0ca717b25c..946e926a3c010 100644 --- a/src/tools/opt-dist/src/environment.rs +++ b/src/tools/opt-dist/src/environment.rs @@ -25,6 +25,8 @@ pub struct Environment { prebuilt_rustc_perf: Option, use_bolt: bool, shared_llvm: bool, + run_tests: bool, + fast_try_build: bool, } impl Environment { @@ -101,6 +103,14 @@ impl Environment { pub fn benchmark_cargo_config(&self) -> &[String] { &self.benchmark_cargo_config } + + pub fn run_tests(&self) -> bool { + self.run_tests + } + + pub fn is_fast_try_build(&self) -> bool { + self.fast_try_build + } } /// What is the extension of binary executables on this platform? diff --git a/src/tools/opt-dist/src/exec.rs b/src/tools/opt-dist/src/exec.rs index deff69a7f9c00..64ce5cc377522 100644 --- a/src/tools/opt-dist/src/exec.rs +++ b/src/tools/opt-dist/src/exec.rs @@ -113,13 +113,16 @@ impl Bootstrap { "library/std", ]) .env("RUST_BACKTRACE", "full"); + let cmd = add_shared_x_flags(env, cmd); + Self { cmd, metrics_path } } pub fn dist(env: &Environment, dist_args: &[String]) -> Self { let metrics_path = env.build_root().join("build").join("metrics.json"); - let cmd = cmd(&dist_args.iter().map(|arg| arg.as_str()).collect::>()) - .env("RUST_BACKTRACE", "full"); + let args = dist_args.iter().map(|arg| arg.as_str()).collect::>(); + let cmd = cmd(&args).env("RUST_BACKTRACE", "full"); + let cmd = add_shared_x_flags(env, cmd); Self { cmd, metrics_path } } @@ -184,3 +187,7 @@ impl Bootstrap { Ok(()) } } + +fn add_shared_x_flags(env: &Environment, cmd: CmdBuilder) -> CmdBuilder { + if env.is_fast_try_build() { cmd.arg("--set").arg("rust.deny-warnings=false") } else { cmd } +} diff --git a/src/tools/opt-dist/src/main.rs b/src/tools/opt-dist/src/main.rs index ac5d294f07ed1..d2827ec01ca7d 100644 --- a/src/tools/opt-dist/src/main.rs +++ b/src/tools/opt-dist/src/main.rs @@ -76,7 +76,7 @@ enum EnvironmentCmd { rustc_perf_checkout_dir: Option, /// Is LLVM for `rustc` built in shared library mode? - #[arg(long, default_value_t = true)] + #[arg(long, default_value_t = true, action(clap::ArgAction::Set))] llvm_shared: bool, /// Should BOLT optimization be used? If yes, host LLVM must have BOLT binaries @@ -94,6 +94,10 @@ enum EnvironmentCmd { /// Arguments passed to `rustc-perf --cargo-config ` when running benchmarks. #[arg(long)] benchmark_cargo_config: Vec, + + /// Perform tests after final build if it's not a fast try build + #[arg(long)] + run_tests: bool, }, /// Perform an optimized build on Linux CI, from inside Docker. LinuxCi { @@ -107,11 +111,14 @@ enum EnvironmentCmd { }, } -fn is_try_build() -> bool { +/// For a fast try build, we want to only build the bare minimum of components to get a +/// working toolchain, and not run any tests. +fn is_fast_try_build() -> bool { std::env::var("DIST_TRY_BUILD").unwrap_or_else(|_| "0".to_string()) != "0" } fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec)> { + let is_fast_try_build = is_fast_try_build(); let (env, args) = match args.env { EnvironmentCmd::Local { target_triple, @@ -125,6 +132,7 @@ fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec)> skipped_tests, benchmark_cargo_config, shared, + run_tests, } => { let env = EnvironmentBuilder::default() .host_tuple(target_triple) @@ -138,6 +146,8 @@ fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec)> .use_bolt(use_bolt) .skipped_tests(skipped_tests) .benchmark_cargo_config(benchmark_cargo_config) + .run_tests(run_tests) + .fast_try_build(is_fast_try_build) .build()?; (env, shared.build_args) @@ -160,6 +170,8 @@ fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec)> // FIXME: Enable bolt for aarch64 once it's fixed upstream. Broken as of December 2024. .use_bolt(!is_aarch64) .skipped_tests(vec![]) + .run_tests(true) + .fast_try_build(is_fast_try_build) .build()?; (env, shared.build_args) @@ -179,6 +191,8 @@ fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec)> .shared_llvm(false) .use_bolt(false) .skipped_tests(vec![]) + .run_tests(true) + .fast_try_build(is_fast_try_build) .build()?; (env, shared.build_args) @@ -342,9 +356,8 @@ fn execute_pipeline( // After dist has finished, run a subset of the test suite on the optimized artifacts to discover // possible regressions. - // The tests are not executed for try builds, which can be in various broken states, so we don't - // want to gatekeep them with tests. - if !is_try_build() { + // The tests are not executed for fast try builds, which can be broken and might not pass them. + if !is_fast_try_build() && env.run_tests() { timer.section("Run tests", |_| run_tests(env))?; } @@ -353,7 +366,10 @@ fn execute_pipeline( fn main() -> anyhow::Result<()> { // Make sure that we get backtraces for easier debugging in CI - std::env::set_var("RUST_BACKTRACE", "1"); + unsafe { + // SAFETY: we are the only thread running at this point + std::env::set_var("RUST_BACKTRACE", "1"); + } env_logger::builder() .filter_level(LevelFilter::Info) @@ -385,9 +401,9 @@ fn main() -> anyhow::Result<()> { let (env, mut build_args) = create_environment(args).context("Cannot create environment")?; - // Skip components that are not needed for try builds to speed them up - if is_try_build() { - log::info!("Skipping building of unimportant components for a try build"); + // Skip components that are not needed for fast try builds to speed them up + if is_fast_try_build() { + log::info!("Skipping building of unimportant components for a fast try build"); for target in [ "rust-docs", "rustc-docs", diff --git a/src/tools/opt-dist/src/training.rs b/src/tools/opt-dist/src/training.rs index 30c79f9594744..47159a43140f6 100644 --- a/src/tools/opt-dist/src/training.rs +++ b/src/tools/opt-dist/src/training.rs @@ -70,7 +70,9 @@ fn merge_llvm_profiles( profdata: LlvmProfdata, ) -> anyhow::Result<()> { let llvm_profdata = match profdata { - LlvmProfdata::Host => env.host_llvm_dir().join("bin/llvm-profdata"), + LlvmProfdata::Host => { + env.host_llvm_dir().join(format!("bin/llvm-profdata{}", executable_extension())) + } LlvmProfdata::Target => env .build_artifacts() .join("llvm") diff --git a/src/tools/opt-dist/src/utils/mod.rs b/src/tools/opt-dist/src/utils/mod.rs index 32d88a59af92d..fb4f14ea41aea 100644 --- a/src/tools/opt-dist/src/utils/mod.rs +++ b/src/tools/opt-dist/src/utils/mod.rs @@ -36,7 +36,9 @@ pub fn clear_llvm_files(env: &Environment) -> anyhow::Result<()> { // directories ourselves. log::info!("Clearing LLVM build files"); delete_directory(&env.build_artifacts().join("llvm"))?; - delete_directory(&env.build_artifacts().join("lld"))?; + if env.build_artifacts().join("lld").is_dir() { + delete_directory(&env.build_artifacts().join("lld"))?; + } Ok(()) } diff --git a/src/tools/run-make-support/Cargo.toml b/src/tools/run-make-support/Cargo.toml index f9beffec75085..15ed03ad5c23d 100644 --- a/src/tools/run-make-support/Cargo.toml +++ b/src/tools/run-make-support/Cargo.toml @@ -14,9 +14,5 @@ build_helper = { path = "../../build_helper" } serde_json = "1.0" libc = "0.2" -# FIXME(#137532): replace `os_pipe` with `anonymous_pipe` once it stabilizes and -# reaches beta. -os_pipe = "1.2.1" - [lib] crate-type = ["lib", "dylib"] diff --git a/src/tools/run-make-support/src/artifact_names.rs b/src/tools/run-make-support/src/artifact_names.rs index 8968f831542e6..b0d588d3550ac 100644 --- a/src/tools/run-make-support/src/artifact_names.rs +++ b/src/tools/run-make-support/src/artifact_names.rs @@ -1,11 +1,11 @@ //! A collection of helpers to construct artifact names, such as names of dynamic or static -//! librarys which are target-dependent. - -// FIXME(jieyouxu): convert these to return `PathBuf`s instead of strings! +//! libraries which are target-dependent. +use crate::target; use crate::targets::is_msvc; /// Construct the static library name based on the target. +#[track_caller] #[must_use] pub fn static_lib_name(name: &str) -> String { assert!(!name.contains(char::is_whitespace), "static library name cannot contain whitespace"); @@ -14,15 +14,34 @@ pub fn static_lib_name(name: &str) -> String { } /// Construct the dynamic library name based on the target. +#[track_caller] #[must_use] pub fn dynamic_lib_name(name: &str) -> String { assert!(!name.contains(char::is_whitespace), "dynamic library name cannot contain whitespace"); - format!("{}{name}.{}", std::env::consts::DLL_PREFIX, std::env::consts::DLL_EXTENSION) + format!("{}{name}.{}", dynamic_lib_prefix(), dynamic_lib_extension()) +} + +fn dynamic_lib_prefix() -> &'static str { + if target().contains("windows") { "" } else { "lib" } } -/// Construct the name of the import library for the dynamic library, exclusive to MSVC and -/// accepted by link.exe. +/// Construct the dynamic library extension based on the target. +#[must_use] +pub fn dynamic_lib_extension() -> &'static str { + let target = target(); + + if target.contains("apple") { + "dylib" + } else if target.contains("windows") { + "dll" + } else { + "so" + } +} + +/// Construct the name of the import library for the dynamic library, exclusive to MSVC and accepted +/// by link.exe. #[track_caller] #[must_use] pub fn msvc_import_dynamic_lib_name(name: &str) -> String { @@ -32,20 +51,28 @@ pub fn msvc_import_dynamic_lib_name(name: &str) -> String { format!("{name}.dll.lib") } -/// Construct the dynamic library extension based on the target. -#[must_use] -pub fn dynamic_lib_extension() -> &'static str { - std::env::consts::DLL_EXTENSION -} - /// Construct the name of a rust library (rlib). +#[track_caller] #[must_use] pub fn rust_lib_name(name: &str) -> String { format!("lib{name}.rlib") } /// Construct the binary (executable) name based on the target. +#[track_caller] #[must_use] pub fn bin_name(name: &str) -> String { - format!("{name}{}", std::env::consts::EXE_SUFFIX) + let target = target(); + + if target.contains("windows") { + format!("{name}.exe") + } else if target.contains("uefi") { + format!("{name}.efi") + } else if target.contains("wasm") { + format!("{name}.wasm") + } else if target.contains("nvptx") { + format!("{name}.ptx") + } else { + name.to_string() + } } diff --git a/src/tools/run-make-support/src/external_deps/cargo.rs b/src/tools/run-make-support/src/external_deps/cargo.rs index e91d101cb995b..8da9f002c41b3 100644 --- a/src/tools/run-make-support/src/external_deps/cargo.rs +++ b/src/tools/run-make-support/src/external_deps/cargo.rs @@ -1,8 +1,11 @@ use crate::command::Command; use crate::env_var; +use crate::util::set_host_compiler_dylib_path; /// Returns a command that can be used to invoke cargo. The cargo is provided by compiletest /// through the `CARGO` env var. pub fn cargo() -> Command { - Command::new(env_var("CARGO")) + let mut cmd = Command::new(env_var("CARGO")); + set_host_compiler_dylib_path(&mut cmd); + cmd } diff --git a/src/tools/run-make-support/src/external_deps/rustc.rs b/src/tools/run-make-support/src/external_deps/rustc.rs index 0e2239147f122..a7081d4f86a29 100644 --- a/src/tools/run-make-support/src/external_deps/rustc.rs +++ b/src/tools/run-make-support/src/external_deps/rustc.rs @@ -22,12 +22,6 @@ pub fn bare_rustc() -> Rustc { Rustc::bare() } -/// Construct a new `rustc` aux-build invocation. -#[track_caller] -pub fn aux_build() -> Rustc { - Rustc::new_aux_build() -} - /// A `rustc` invocation builder. #[derive(Debug)] #[must_use] @@ -67,14 +61,6 @@ impl Rustc { Self { cmd } } - /// Construct a new `rustc` invocation with `aux_build` preset (setting `--crate-type=lib`). - #[track_caller] - pub fn new_aux_build() -> Self { - let mut cmd = setup_common(); - cmd.arg("--crate-type=lib"); - Self { cmd } - } - // Argument provider methods /// Configure the compilation environment. diff --git a/src/tools/run-make-support/src/lib.rs b/src/tools/run-make-support/src/lib.rs index e0ad3ee9bedaa..f37b38ac0b151 100644 --- a/src/tools/run-make-support/src/lib.rs +++ b/src/tools/run-make-support/src/lib.rs @@ -17,6 +17,7 @@ pub mod assertion_helpers; pub mod diff; pub mod env; pub mod external_deps; +pub mod linker; pub mod path_helpers; pub mod run; pub mod scoped_run; @@ -40,8 +41,6 @@ pub use bstr; pub use gimli; pub use libc; pub use object; -// FIXME(#137532): replace with std `anonymous_pipe` once it stabilizes and reaches beta. -pub use os_pipe; pub use regex; pub use serde_json; pub use similar; @@ -68,7 +67,7 @@ pub use llvm::{ LlvmFilecheck, LlvmNm, LlvmObjcopy, LlvmObjdump, LlvmProfdata, LlvmReadobj, }; pub use python::python_command; -pub use rustc::{aux_build, bare_rustc, rustc, rustc_path, Rustc}; +pub use rustc::{bare_rustc, rustc, rustc_path, Rustc}; pub use rustdoc::{rustdoc, Rustdoc}; /// [`diff`][mod@diff] is implemented in terms of the [similar] library. diff --git a/src/tools/run-make-support/src/linker.rs b/src/tools/run-make-support/src/linker.rs new file mode 100644 index 0000000000000..89093cf011393 --- /dev/null +++ b/src/tools/run-make-support/src/linker.rs @@ -0,0 +1,36 @@ +use regex::Regex; + +use crate::{Rustc, is_msvc}; + +/// Asserts that `rustc` uses LLD for linking when executed. +pub fn assert_rustc_uses_lld(rustc: &mut Rustc) { + let stderr = get_stderr_with_linker_messages(rustc); + assert!( + has_lld_version_in_logs(&stderr), + "LLD version should be present in rustc stderr:\n{stderr}" + ); +} + +/// Asserts that `rustc` doesn't use LLD for linking when executed. +pub fn assert_rustc_doesnt_use_lld(rustc: &mut Rustc) { + let stderr = get_stderr_with_linker_messages(rustc); + assert!( + !has_lld_version_in_logs(&stderr), + "LLD version should NOT be present in rustc stderr:\n{stderr}" + ); +} + +fn get_stderr_with_linker_messages(rustc: &mut Rustc) -> String { + // lld-link is used if msvc, otherwise a gnu-compatible lld is used. + let linker_version_flag = if is_msvc() { "--version" } else { "-Wl,-v" }; + + let output = rustc.arg("-Wlinker-messages").link_arg(linker_version_flag).run(); + output.stderr_utf8() +} + +fn has_lld_version_in_logs(stderr: &str) -> bool { + // Strip the `-Wlinker-messages` wrappers prefixing the linker output. + let stderr = Regex::new(r"warning: linker std(out|err):").unwrap().replace_all(&stderr, ""); + let lld_version_re = Regex::new(r"^LLD [0-9]+\.[0-9]+\.[0-9]+").unwrap(); + stderr.lines().any(|line| lld_version_re.is_match(line.trim())) +} diff --git a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md index 0d99d06bcddee..8333cf08929f8 100644 --- a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md +++ b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md @@ -8,7 +8,7 @@ assignees: '' --- diff --git a/src/tools/rust-analyzer/.github/workflows/ci.yaml b/src/tools/rust-analyzer/.github/workflows/ci.yaml index 7a6b43a053155..79fb7a2d2ea96 100644 --- a/src/tools/rust-analyzer/.github/workflows/ci.yaml +++ b/src/tools/rust-analyzer/.github/workflows/ci.yaml @@ -15,7 +15,6 @@ env: CARGO_NET_RETRY: 10 CI: 1 RUST_BACKTRACE: short - RUSTFLAGS: "-D warnings -D elided_lifetimes_in_paths -D explicit_outlives_requirements -D unsafe_op_in_unsafe_fn -D unused_extern_crates -D unused_lifetimes -D unreachable_pub" RUSTUP_MAX_RETRIES: 10 jobs: @@ -25,7 +24,6 @@ jobs: pull-requests: read outputs: typescript: ${{ steps.filter.outputs.typescript }} - proc_macros: ${{ steps.filter.outputs.proc_macros }} steps: - uses: actions/checkout@v4 - uses: dorny/paths-filter@1441771bbfdd59dcd748680ee64ebd8faab1a242 @@ -34,52 +32,54 @@ jobs: filters: | typescript: - 'editors/code/**' - proc_macros: - - 'crates/tt/**' - - 'crates/proc-macro-api/**' - - 'crates/proc-macro-srv/**' - - 'crates/proc-macro-srv-cli/**' proc-macro-srv: - needs: changes - if: github.repository == 'rust-lang/rust-analyzer' && needs.changes.outputs.proc_macros == 'true' + if: github.repository == 'rust-lang/rust-analyzer' name: proc-macro-srv runs-on: ubuntu-latest + env: + RUSTFLAGS: "-D warnings" + steps: - name: Checkout repository uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha }} + - name: Install rustup-toolchain-install-master + run: cargo install rustup-toolchain-install-master@1.6.0 + + # Install a pinned rustc commit to avoid surprises - name: Install Rust toolchain run: | - rustup update --no-self-update nightly - rustup default nightly - rustup component add --toolchain nightly rust-src rustfmt + RUSTC_VERSION=`cat rust-version` + rustup-toolchain-install-master ${RUSTC_VERSION} -c rust-src -c rustfmt + rustup default ${RUSTC_VERSION} + + # Emulate a nightly toolchain, because the toolchain installed above does not have "nightly" + # in its version string. + - name: Emulate a nightly toolchain + run: echo "RUSTC_BOOTSTRAP=1" >> $GITHUB_ENV + # https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json - name: Install Rust Problem Matcher - if: matrix.os == 'ubuntu-latest' run: echo "::add-matcher::.github/rust.json" - - name: Cache Dependencies - uses: Swatinem/rust-cache@9bdad043e88c75890e36ad3bbc8d27f0090dd609 - - - name: Bump opt-level - if: matrix.os == 'ubuntu-latest' - run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml - - name: Test - run: cargo test --features sysroot-abi -p rust-analyzer -p proc-macro-srv -p proc-macro-srv-cli -p proc-macro-api -- --quiet + run: cargo test --features sysroot-abi -p proc-macro-srv -p proc-macro-srv-cli -p proc-macro-api -- --quiet + + - name: Check salsa dependency + run: "! (cargo tree -p proc-macro-srv-cli | grep -q salsa)" rust: if: github.repository == 'rust-lang/rust-analyzer' name: Rust runs-on: ${{ matrix.os }} env: + RUSTFLAGS: "-Dwarnings" CC: deny_c strategy: - fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macos-latest] @@ -93,7 +93,7 @@ jobs: run: | rustup update --no-self-update stable rustup default stable - rustup component add --toolchain stable rust-src + rustup component add --toolchain stable rust-src clippy # We always use a nightly rustfmt, regardless of channel, because we need # --file-lines. rustup toolchain install nightly --profile minimal --component rustfmt @@ -102,51 +102,105 @@ jobs: if: matrix.os == 'ubuntu-latest' run: echo "::add-matcher::.github/rust.json" - - name: Cache Dependencies - uses: Swatinem/rust-cache@9bdad043e88c75890e36ad3bbc8d27f0090dd609 + # - name: Cache Dependencies + # uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 + # with: + # workspaces: | + # . -> target + # ./crates/proc-macro-srv/proc-macro-test/imp -> target - - name: Bump opt-level - if: matrix.os == 'ubuntu-latest' - run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml + - name: Install nextest + uses: taiki-e/install-action@nextest - name: Codegen checks (rust-analyzer) + if: matrix.os == 'ubuntu-latest' run: cargo codegen --check - - name: Compile (tests) - run: cargo test --no-run --locked + - name: Compile tests + run: cargo test --no-run - # It's faster to `test` before `build` ¯\_(ツ)_/¯ - - name: Compile (rust-analyzer) - if: matrix.os == 'ubuntu-latest' - run: cargo build --quiet + - name: Run tests + run: cargo nextest run --no-fail-fast --hide-progress-bar --status-level fail - - name: Test - if: matrix.os == 'ubuntu-latest' || matrix.os == 'windows-latest' || github.event_name == 'push' - run: cargo test -- --quiet + - name: Cancel parallel jobs + if: failure() + run: | + # https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#cancel-a-workflow-run + curl -L \ + -X POST \ + -H "Accept: application/vnd.github.v3+json" \ + -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/cancel + + - name: Run Clippy + if: matrix.os == 'macos-latest' + run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr - - name: Switch to stable toolchain + analysis-stats: + if: github.repository == 'rust-lang/rust-analyzer' + runs-on: ubuntu-latest + env: + RUSTC_BOOTSTRAP: 1 + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Rust toolchain run: | rustup update --no-self-update stable - rustup component add --toolchain stable rust-src clippy rustup default stable + rustup component add rustfmt - - name: Run analysis-stats on rust-analyzer - if: matrix.os == 'ubuntu-latest' - run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats . + # - name: Cache Dependencies + # uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 - - name: Run analysis-stats on the rust standard libraries - if: matrix.os == 'ubuntu-latest' - env: - RUSTC_BOOTSTRAP: 1 - run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps --no-sysroot --no-test $(rustc --print sysroot)/lib/rustlib/src/rust/library/ + - name: Bump opt-level + run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml - - name: clippy - if: matrix.os == 'windows-latest' - run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr + - run: cargo build -p rust-analyzer - - name: rustfmt - if: matrix.os == 'ubuntu-latest' - run: cargo fmt -- --check + - name: ./rust-analyzer + run: ./target/debug/rust-analyzer analysis-stats . -q + + - name: sysroot/lib/rustlib/src/rust/library/ + run: ./target/debug/rust-analyzer analysis-stats --with-deps --no-sysroot --no-test $(rustc --print sysroot)/lib/rustlib/src/rust/library/ -q + + rustfmt: + if: github.repository == 'rust-lang/rust-analyzer' + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Rust toolchain + run: | + rustup update --no-self-update stable + rustup default stable + rustup component add rustfmt + + - run: cargo fmt -- --check + + miri: + if: github.repository == 'rust-lang/rust-analyzer' + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Rust toolchain + run: | + rustup update --no-self-update nightly + rustup default nightly + rustup component add miri + + # - name: Cache Dependencies + # uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 + + - run: cargo miri test -p intern # Weird targets to catch non-portable code rust-cross: @@ -154,11 +208,16 @@ jobs: name: Rust Cross runs-on: ubuntu-latest + strategy: + matrix: + target: [powerpc-unknown-linux-gnu, x86_64-unknown-linux-musl, wasm32-unknown-unknown] + include: + # The rust-analyzer binary is not expected to compile on WASM, but the IDE + # crate should + - target: wasm32-unknown-unknown + ide-only: true env: - targets: "powerpc-unknown-linux-gnu x86_64-unknown-linux-musl" - # The rust-analyzer binary is not expected to compile on WASM, but the IDE - # crate should - targets_ide: "wasm32-unknown-unknown" + RUSTFLAGS: "-Dwarnings" steps: - name: Checkout repository @@ -167,19 +226,15 @@ jobs: - name: Install Rust toolchain run: | rustup update --no-self-update stable - rustup target add ${{ env.targets }} ${{ env.targets_ide }} + rustup target add ${{ matrix.target }} - - name: Cache Dependencies - uses: Swatinem/rust-cache@9bdad043e88c75890e36ad3bbc8d27f0090dd609 + # - name: Cache Dependencies + # uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 - - name: Check - run: | - for target in ${{ env.targets }}; do - cargo check --target=$target --all-targets - done - for target in ${{ env.targets_ide }}; do - cargo check -p ide --target=$target --all-targets - done + - run: cargo check --target=${{ matrix.target }} --all-targets -p ide + if: ${{ matrix.ide-only }} + - run: cargo check --target=${{ matrix.target }} --all-targets + if: ${{ !matrix.ide-only }} typescript: needs: changes @@ -261,7 +316,7 @@ jobs: run: typos conclusion: - needs: [rust, rust-cross, typescript, typo-check, proc-macro-srv] + needs: [rust, rust-cross, typescript, typo-check, proc-macro-srv, miri, rustfmt, analysis-stats] # We need to ensure this job does *not* get skipped if its dependencies fail, # because a skipped job is considered a success by GitHub. So we have to # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run diff --git a/src/tools/rust-analyzer/.github/workflows/release.yaml b/src/tools/rust-analyzer/.github/workflows/release.yaml index c8e6de72ce98f..a758ecfd46796 100644 --- a/src/tools/rust-analyzer/.github/workflows/release.yaml +++ b/src/tools/rust-analyzer/.github/workflows/release.yaml @@ -29,19 +29,25 @@ jobs: - os: windows-latest target: x86_64-pc-windows-msvc code-target: win32-x64 + pgo: clap-rs/clap@v4.5.36 - os: windows-latest target: i686-pc-windows-msvc + pgo: clap-rs/clap@v4.5.36 - os: windows-latest target: aarch64-pc-windows-msvc code-target: win32-arm64 - os: ubuntu-latest target: x86_64-unknown-linux-gnu - zig_target: x86_64-unknown-linux-gnu.2.28 + # Use a container with glibc 2.28 + # Zig is not used because it doesn't work with PGO + container: quay.io/pypa/manylinux_2_28_x86_64 code-target: linux-x64 - - os: ubuntu-latest + pgo: clap-rs/clap@v4.5.36 + - os: ubuntu-24.04-arm target: aarch64-unknown-linux-gnu - zig_target: aarch64-unknown-linux-gnu.2.28 + container: quay.io/pypa/manylinux_2_28_aarch64 code-target: linux-arm64 + pgo: clap-rs/clap@v4.5.36 - os: ubuntu-latest target: arm-unknown-linux-gnueabihf zig_target: arm-unknown-linux-gnueabihf.2.28 @@ -49,9 +55,11 @@ jobs: - os: macos-13 target: x86_64-apple-darwin code-target: darwin-x64 - - os: macos-13 + pgo: clap-rs/clap@v4.5.36 + - os: macos-14 target: aarch64-apple-darwin code-target: darwin-arm64 + pgo: clap-rs/clap@v4.5.36 name: dist (${{ matrix.target }}) runs-on: ${{ matrix.os }} @@ -71,10 +79,17 @@ jobs: with: node-version: 22 + - name: Install rustup + if: ${{ matrix.container }} + run: | + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal + echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install Rust toolchain run: | rustup update --no-self-update stable - rustup component add rust-src + # llvm-tools contain the llvm-profdata tool which is needed for PGO + rustup component add rust-src ${{ matrix.pgo && 'llvm-tools' || '' }} rustup target add ${{ matrix.target }} - name: Install Zig toolchain @@ -87,11 +102,11 @@ jobs: - name: Dist (plain) if: ${{ !matrix.zig_target }} - run: cargo xtask dist --client-patch-version ${{ github.run_number }} + run: cargo xtask dist --client-patch-version ${{ github.run_number }} ${{ matrix.pgo && format('--pgo {0}', matrix.pgo) || ''}} - name: Dist (using zigbuild) if: ${{ matrix.zig_target }} - run: RA_TARGET=${{ matrix.zig_target}} cargo xtask dist --client-patch-version ${{ github.run_number }} --zig + run: RA_TARGET=${{ matrix.zig_target}} cargo xtask dist --client-patch-version ${{ github.run_number }} --zig ${{ matrix.pgo && format('--pgo {0}', matrix.pgo) || ''}} - run: npm ci working-directory: editors/code diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 1e1d68f778247..8d6c8284e44ef 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -1,65 +1,77 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] -name = "adler" -version = "1.0.2" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] -name = "always-assert" -version = "0.2.0" +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1078fa1ce1e34b1872d8611ad921196d76bdd7027e949fbe31231abde201892" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" [[package]] name = "arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +dependencies = [ + "derive_arbitrary", +] [[package]] name = "arrayvec" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", "miniz_oxide", - "object 0.36.3", + "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -67,14 +79,15 @@ name = "base-db" version = "0.0.0" dependencies = [ "cfg", + "dashmap", "intern", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lz4_flex", - "rustc-hash 2.0.0", + "query-group-macro", + "rustc-hash 2.1.1", "salsa", + "salsa-macros", "semver", "span", - "stdx", "syntax", "tracing", "triomphe", @@ -95,62 +108,65 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.7.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" [[package]] name = "borsh" -version = "1.5.1" +version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed" +checksum = "5430e3be710b68d984d1391c854eb431a9d548640711faa54eecb1df93db91cc" dependencies = [ - "cfg_aliases 0.2.1", + "cfg_aliases", ] [[package]] -name = "byteorder" -version = "1.5.0" +name = "boxcar" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +checksum = "6740c6e2fc6360fa57c35214c7493826aee95993926092606f27c983b40837be" +dependencies = [ + "loom", +] [[package]] name = "camino" -version = "1.1.7" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" +checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" dependencies = [ "serde", ] [[package]] name = "cargo-platform" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" dependencies = [ "serde", ] [[package]] name = "cargo_metadata" -version = "0.18.1" +version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037" +checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" dependencies = [ "camino", "cargo-platform", "semver", "serde", "serde_json", - "thiserror", + "thiserror 2.0.12", ] [[package]] name = "cc" -version = "1.1.22" +version = "1.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0" +checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c" dependencies = [ "shlex", ] @@ -164,7 +180,7 @@ dependencies = [ "expect-test", "intern", "oorandom", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "syntax", "syntax-bridge", "tracing", @@ -177,12 +193,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "cfg_aliases" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" - [[package]] name = "cfg_aliases" version = "0.2.1" @@ -191,9 +201,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chalk-derive" -version = "0.100.0" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab2d131019373f0d0d1f2af0abd4f719739f6583c1b33965112455f643a910af" +checksum = "feb14e3ff0ebac26d8e58b6ed1417afb60c4a0a44b6425546ee7eb9c75ebb336" dependencies = [ "proc-macro2", "quote", @@ -203,19 +213,19 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.100.0" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f114996bda14c0213f014a4ef31a7867dcf5f539a3900477fc6b20138e7a17b" +checksum = "72f0a61621a088af69fee8df39ec63cf5b6d0b9ab663a740cdeb376aabf2f244" dependencies = [ - "bitflags 2.7.0", + "bitflags 2.9.0", "chalk-derive", ] [[package]] name = "chalk-recursive" -version = "0.100.0" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "551e956e031c09057c7b21f17d48d91de99c9b6b6e34bceaf5e7202d71021268" +checksum = "cbd3415cc540015533aa4a8ad007696d585dd9c5f81e7c099872f1dd4bf14894" dependencies = [ "chalk-derive", "chalk-ir", @@ -226,15 +236,15 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.100.0" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd7ca50181156ce649efe8e5dd00580f573651554e4dcd11afa4e2ac93f53324" +checksum = "747707b0c082b3ecf4b1ae28d0d8df708a46cddd22a386f9cc85a312a4de25ff" dependencies = [ "chalk-derive", "chalk-ir", "ena", "indexmap", - "itertools", + "itertools 0.12.1", "petgraph", "rustc-hash 1.1.0", "tracing", @@ -263,18 +273,18 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.13" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-deque" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", @@ -289,30 +299,40 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "ctrlc" -version = "3.4.4" +version = "3.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345" +checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3" dependencies = [ "nix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "dashmap" -version = "5.5.3" +version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" dependencies = [ "cfg-if", - "hashbrown", + "crossbeam-utils", + "hashbrown 0.14.5", "lock_api", "once_cell", "parking_lot_core", @@ -320,18 +340,18 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.11" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" dependencies = [ "powerfmt", ] [[package]] name = "derive_arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" +checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", @@ -340,39 +360,50 @@ dependencies = [ [[package]] name = "directories" -version = "5.0.1" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" +checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" dependencies = [ "dirs-sys", ] [[package]] name = "dirs" -version = "5.0.1" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" +checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" dependencies = [ "dirs-sys", ] [[package]] name = "dirs-sys" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.48.0", + "windows-sys 0.59.0", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] name = "dissimilar" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59f8e79d1fbf76bdfbde321e902714bf6c49df88a7dda6fc682fc2979226962d" +checksum = "8975ffdaa0ef3661bfe02dbdcc06c9f829dfafe6a3c474de366a8d5e44276921" [[package]] name = "dot" @@ -392,9 +423,9 @@ version = "0.0.0" [[package]] name = "either" -version = "1.13.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "ena" @@ -407,15 +438,15 @@ dependencies = [ [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "expect-test" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0be0a561335815e06dab7c62e50353134c796e7a6155402a64bcff66b6a5e0" +checksum = "63af43ff4431e848fb47472a920f14fa71c24de13255a5692e93d4e90302acb0" dependencies = [ "dissimilar", "once_cell", @@ -423,9 +454,9 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.24" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", @@ -441,14 +472,20 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.31" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f211bbe8e69bbd0cfdea405084f128ae8b4aaa6b0b522fc8f2b009084797920" +checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" dependencies = [ "crc32fast", "miniz_oxide", ] +[[package]] +name = "foldhash" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -473,6 +510,19 @@ version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a" +[[package]] +name = "generator" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd" +dependencies = [ + "cfg-if", + "libc", + "log", + "rustversion", + "windows 0.58.0", +] + [[package]] name = "getrandom" version = "0.2.15" @@ -486,9 +536,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "hashbrown" @@ -496,11 +546,31 @@ version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.2", +] + [[package]] name = "heck" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "hermit-abi" @@ -522,8 +592,8 @@ dependencies = [ "hir-ty", "indexmap", "intern", - "itertools", - "rustc-hash 2.0.0", + "itertools 0.14.0", + "rustc-hash 2.1.1", "smallvec", "span", "stdx", @@ -542,26 +612,26 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", - "bitflags 2.7.0", + "bitflags 2.9.0", "cfg", "cov-mark", - "dashmap", "drop_bomb", "either", "expect-test", "fst", - "hashbrown", "hir-expand", "indexmap", "intern", - "itertools", + "itertools 0.14.0", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "mbe", + "query-group-macro", "ra-ap-rustc_abi", - "ra-ap-rustc_hashes", "ra-ap-rustc_parse_format", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "rustc_apfloat", + "salsa", + "salsa-macros", "smallvec", "span", "stdx", @@ -570,6 +640,7 @@ dependencies = [ "test-fixture", "test-utils", "text-size", + "thin-vec", "tracing", "triomphe", "tt", @@ -584,13 +655,14 @@ dependencies = [ "cov-mark", "either", "expect-test", - "hashbrown", "intern", - "itertools", - "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "itertools 0.14.0", "mbe", "parser", - "rustc-hash 2.0.0", + "query-group-macro", + "rustc-hash 2.1.1", + "salsa", + "salsa-macros", "smallvec", "span", "stdx", @@ -607,7 +679,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", - "bitflags 2.7.0", + "bitflags 2.9.0", "chalk-derive", "chalk-ir", "chalk-recursive", @@ -620,16 +692,18 @@ dependencies = [ "hir-expand", "indexmap", "intern", - "itertools", + "itertools 0.14.0", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "nohash-hasher", "oorandom", "project-model", + "query-group-macro", "ra-ap-rustc_abi", "ra-ap-rustc_index", "ra-ap-rustc_pattern_analysis", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "rustc_apfloat", + "salsa", + "salsa-macros", "scoped-tls", "smallvec", "span", @@ -646,11 +720,129 @@ dependencies = [ [[package]] name = "home" -version = "0.5.9" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", +] + +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -669,7 +861,7 @@ dependencies = [ "ide-db", "ide-diagnostics", "ide-ssr", - "itertools", + "itertools 0.14.0", "nohash-hasher", "oorandom", "profile", @@ -697,7 +889,7 @@ dependencies = [ "expect-test", "hir", "ide-db", - "itertools", + "itertools 0.14.0", "smallvec", "stdx", "syntax", @@ -715,7 +907,7 @@ dependencies = [ "expect-test", "hir", "ide-db", - "itertools", + "itertools 0.14.0", "smallvec", "stdx", "syntax", @@ -730,7 +922,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", - "bitflags 2.7.0", + "bitflags 2.9.0", "cov-mark", "crossbeam-channel", "either", @@ -738,14 +930,17 @@ dependencies = [ "fst", "hir", "indexmap", - "itertools", + "itertools 0.14.0", "line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "memchr", "nohash-hasher", "parser", "profile", + "query-group-macro", "rayon", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", + "salsa", + "salsa-macros", "span", "stdx", "syntax", @@ -753,6 +948,7 @@ dependencies = [ "test-utils", "tracing", "triomphe", + "vfs", ] [[package]] @@ -765,7 +961,7 @@ dependencies = [ "expect-test", "hir", "ide-db", - "itertools", + "itertools 0.14.0", "paths", "serde_json", "stdx", @@ -783,10 +979,8 @@ dependencies = [ "expect-test", "hir", "ide-db", - "itertools", - "nohash-hasher", + "itertools 0.14.0", "parser", - "stdx", "syntax", "test-fixture", "test-utils", @@ -795,22 +989,34 @@ dependencies = [ [[package]] name = "idna" -version = "0.5.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", ] [[package]] name = "indexmap" -version = "2.3.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0" +checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.15.2", + "serde", ] [[package]] @@ -819,7 +1025,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" dependencies = [ - "bitflags 2.7.0", + "bitflags 2.9.0", "inotify-sys", "libc", ] @@ -838,8 +1044,8 @@ name = "intern" version = "0.0.0" dependencies = [ "dashmap", - "hashbrown", - "rustc-hash 2.0.0", + "hashbrown 0.14.5", + "rustc-hash 2.1.1", "triomphe", ] @@ -852,17 +1058,26 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jod-thread" -version = "0.1.2" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae" +checksum = "a037eddb7d28de1d0fc42411f501b53b75838d313908078d6698d064f3029b24" [[package]] name = "kqueue" @@ -902,15 +1117,15 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.169" +version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" [[package]] name = "libloading" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", "windows-targets 0.52.6", @@ -918,9 +1133,9 @@ dependencies = [ [[package]] name = "libmimalloc-sys" -version = "0.1.39" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44" +checksum = "07d0e07885d6a754b9c7993f2625187ad694ee985d60f23355ff0e7077261502" dependencies = [ "cc", "libc", @@ -932,7 +1147,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.7.0", + "bitflags 2.9.0", "libc", "redox_syscall", ] @@ -957,10 +1172,10 @@ dependencies = [ ] [[package]] -name = "linked-hash-map" -version = "0.5.6" +name = "litemap" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" +checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" [[package]] name = "load-cargo" @@ -971,8 +1186,7 @@ dependencies = [ "hir-expand", "ide-db", "intern", - "itertools", - "paths", + "itertools 0.14.0", "proc-macro-api", "project-model", "span", @@ -994,30 +1208,44 @@ dependencies = [ [[package]] name = "log" -version = "0.4.22" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" [[package]] -name = "lsp-server" -version = "0.7.7" +name = "loom" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "550446e84739dcaf6d48a4a093973850669e13e8a34d8f8d64851041be267cd9" +checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" +dependencies = [ + "cfg-if", + "generator", + "scoped-tls", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "lsp-server" +version = "0.7.8" dependencies = [ "crossbeam-channel", + "ctrlc", "log", + "lsp-types", "serde", + "serde_derive", "serde_json", ] [[package]] name = "lsp-server" version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9462c4dc73e17f971ec1f171d44bfffb72e65a130117233388a0ebc7ec5656f9" dependencies = [ "crossbeam-channel", - "ctrlc", "log", - "lsp-types", "serde", "serde_derive", "serde_json", @@ -1037,10 +1265,13 @@ dependencies = [ ] [[package]] -name = "lz4_flex" -version = "0.11.3" +name = "matchers" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] [[package]] name = "mbe" @@ -1052,14 +1283,13 @@ dependencies = [ "intern", "parser", "ra-ap-rustc_lexer", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "smallvec", "span", "stdx", "syntax", "syntax-bridge", "test-utils", - "tracing", "tt", ] @@ -1071,9 +1301,9 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memmap2" -version = "0.5.10" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" +checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f" dependencies = [ "libc", ] @@ -1089,20 +1319,20 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.43" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633" +checksum = "99585191385958383e13f6b822e6b6d8d9cf928e7d286ceb092da92b43c87bc1" dependencies = [ "libmimalloc-sys", ] [[package]] name = "miniz_oxide" -version = "0.7.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ - "adler", + "adler2", ] [[package]] @@ -1128,13 +1358,13 @@ dependencies = [ [[package]] name = "nix" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags 2.7.0", + "bitflags 2.9.0", "cfg-if", - "cfg_aliases 0.1.1", + "cfg_aliases", "libc", ] @@ -1150,7 +1380,7 @@ version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943" dependencies = [ - "bitflags 2.7.0", + "bitflags 2.9.0", "filetime", "fsevent-sys", "inotify", @@ -1169,6 +1399,16 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "nu-ansi-term" version = "0.50.1" @@ -1205,33 +1445,24 @@ dependencies = [ [[package]] name = "object" -version = "0.33.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8dd6c0cdf9429bce006e1362bfce61fa1bfd8c898a643ed8d2b471934701d3d" -dependencies = [ - "memchr", -] - -[[package]] -name = "object" -version = "0.36.3" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc" [[package]] name = "oorandom" -version = "11.1.4" +version = "11.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" [[package]] name = "option-ext" @@ -1239,6 +1470,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "parking_lot" version = "0.12.3" @@ -1270,6 +1507,7 @@ dependencies = [ "edition", "expect-test", "ra-ap-rustc_lexer", + "rustc-literal-escaper", "stdx", "tracing", ] @@ -1324,24 +1562,21 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] -name = "powerfmt" -version = "0.2.0" +name = "portable-atomic" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" +checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" [[package]] -name = "ppv-lite86" -version = "0.2.20" +name = "powerfmt" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" -dependencies = [ - "zerocopy", -] +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "proc-macro-api" @@ -1350,7 +1585,7 @@ dependencies = [ "indexmap", "intern", "paths", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "serde", "serde_derive", "serde_json", @@ -1369,12 +1604,11 @@ dependencies = [ "libc", "libloading", "memmap2", - "object 0.33.0", + "object", "paths", "proc-macro-test", "ra-ap-rustc_lexer", "span", - "stdx", "syntax-bridge", "tt", ] @@ -1397,23 +1631,23 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.93" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" dependencies = [ "unicode-ident", ] [[package]] name = "process-wrap" -version = "8.0.2" +version = "8.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38ee68ae331824036479c84060534b18254c864fa73366c58d86db3b7b811619" +checksum = "d35f4dc9988d1326b065b4def5e950c3ed727aa03e3151b86cc9e2aec6b03f54" dependencies = [ "indexmap", "nix", "tracing", - "windows", + "windows 0.59.0", ] [[package]] @@ -1437,10 +1671,10 @@ dependencies = [ "cfg", "expect-test", "intern", - "itertools", + "itertools 0.14.0", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "paths", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "semver", "serde", "serde_derive", @@ -1460,7 +1694,7 @@ checksum = "a3a7c64d9bf75b1b8d981124c14c179074e8caa7dfe7b6a12e6222ddcd0c8f72" dependencies = [ "once_cell", "protobuf-support", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1469,7 +1703,7 @@ version = "3.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b088fd20b938a875ea00843b6faf48579462630015c3788d397ad6a786663252" dependencies = [ - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1478,7 +1712,7 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" dependencies = [ - "bitflags 2.7.0", + "bitflags 2.9.0", "memchr", "unicase", ] @@ -1492,22 +1726,34 @@ dependencies = [ "pulldown-cmark", ] +[[package]] +name = "query-group-macro" +version = "0.0.0" +dependencies = [ + "expect-test", + "proc-macro2", + "quote", + "salsa", + "salsa-macros", + "syn", +] + [[package]] name = "quote" -version = "1.0.36" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] [[package]] name = "ra-ap-rustc_abi" -version = "0.100.0" +version = "0.110.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1651b0f7e8c3eb7c27a88f39d277e69c32bfe58e3be174d286c1a24d6a7a4d8" +checksum = "912228bd8ed3beff1f6f9e5e2d4b37c0827ba3e2070060bf3858a311d0e29e30" dependencies = [ - "bitflags 2.7.0", + "bitflags 2.9.0", "ra-ap-rustc_hashes", "ra-ap-rustc_index", "tracing", @@ -1515,18 +1761,18 @@ dependencies = [ [[package]] name = "ra-ap-rustc_hashes" -version = "0.100.0" +version = "0.110.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bcd85e93dc0ea850bcfe7957a115957df799ccbc9eea488bdee5ec6780d212b" +checksum = "ba520764daf057a9d963fa769f4762eaf87ac5d4900ae76195eeead64cd35afd" dependencies = [ "rustc-stable-hash", ] [[package]] name = "ra-ap-rustc_index" -version = "0.100.0" +version = "0.110.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b295fc0640cd9fe0ecab872ee4a17a96f90a3998ec9f0c4765e9b8415c12cc" +checksum = "b76b5f9ee55f2d0e5a65bea23f6d738893349ce8d3d17a6720933e647ab04978" dependencies = [ "ra-ap-rustc_index_macros", "smallvec", @@ -1534,9 +1780,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.100.0" +version = "0.110.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c675f4257023aa933882906f13802cae287e88cc39ab13cbb96809083db0c801" +checksum = "ddd972eb1face2fcaa0d94c01d97862fb955b5561d4f5932003bce8a6cadd8c6" dependencies = [ "proc-macro2", "quote", @@ -1545,9 +1791,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_lexer" -version = "0.100.0" +version = "0.110.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8358702c2a510ea84ba5801ddc047d9ad9520902cfb0e6173277610cdce2c9c" +checksum = "ba3a9876456fb2521097deef33ddeac1c18260c8eafb68054d986f8b9d6ce9fa" dependencies = [ "memchr", "unicode-properties", @@ -1556,97 +1802,111 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.100.0" +version = "0.110.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b98f402011d46732c35c47bfd111dec0495747fef2ec900ddee7fe15d78449a7" +checksum = "8e85de58dfcc60a5f9d5ec0157a657e3f84abd8f22c8a0c4d707cfb42c9011f4" dependencies = [ - "ra-ap-rustc_index", "ra-ap-rustc_lexer", + "rustc-literal-escaper", ] [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.100.0" +version = "0.110.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef3ff73fa4653252ffe1d1e9177a446f49ef46d97140e4816b7ff2dad59ed53" +checksum = "ceadf9db550db67deff7eff2e2765109b860c9d7e5bdfca144863020289c823d" dependencies = [ "ra-ap-rustc_index", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "rustc_apfloat", "smallvec", "tracing", ] [[package]] -name = "rand" -version = "0.8.5" +name = "rayon" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ - "libc", - "rand_chacha", - "rand_core", + "either", + "rayon-core", ] [[package]] -name = "rand_chacha" -version = "0.3.1" +name = "rayon-core" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ - "ppv-lite86", - "rand_core", + "crossbeam-deque", + "crossbeam-utils", ] [[package]] -name = "rand_core" -version = "0.6.4" +name = "redox_syscall" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1" dependencies = [ - "getrandom", + "bitflags 2.9.0", ] [[package]] -name = "rayon" -version = "1.10.0" +name = "redox_users" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" dependencies = [ - "either", - "rayon-core", + "getrandom", + "libredox", + "thiserror 2.0.12", ] [[package]] -name = "rayon-core" -version = "1.12.1" +name = "regex" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ - "crossbeam-deque", - "crossbeam-utils", + "aho-corasick", + "memchr", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", ] [[package]] -name = "redox_syscall" -version = "0.5.3" +name = "regex-automata" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" dependencies = [ - "bitflags 2.7.0", + "regex-syntax 0.6.29", ] [[package]] -name = "redox_users" -version = "0.4.5" +name = "regex-automata" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ - "getrandom", - "libredox", - "thiserror", + "aho-corasick", + "memchr", + "regex-syntax 0.8.5", ] +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + [[package]] name = "rowan" version = "0.15.15" @@ -1654,7 +1914,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49" dependencies = [ "countme", - "hashbrown", + "hashbrown 0.14.5", "memoffset", "rustc-hash 1.1.0", "text-size", @@ -1664,7 +1924,6 @@ dependencies = [ name = "rust-analyzer" version = "0.0.0" dependencies = [ - "always-assert", "anyhow", "base64", "cargo_metadata", @@ -1680,10 +1939,11 @@ dependencies = [ "ide-completion", "ide-db", "ide-ssr", + "indexmap", "intern", - "itertools", + "itertools 0.14.0", "load-cargo", - "lsp-server 0.7.7", + "lsp-server 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-types", "memchr", "mimalloc", @@ -1698,7 +1958,7 @@ dependencies = [ "profile", "project-model", "rayon", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "scip", "semver", "serde", @@ -1739,60 +1999,84 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustc-literal-escaper" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04" [[package]] name = "rustc-stable-hash" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2febf9acc5ee5e99d1ad0afcdbccc02d87aa3f857a1f01f825b80eacf8edfcd1" +checksum = "781442f29170c5c93b7185ad559492601acdc71d5bb0706f5868094f45cfcd08" [[package]] name = "rustc_apfloat" -version = "0.2.1+llvm-462a31f5a5ab" +version = "0.2.2+llvm-462a31f5a5ab" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "886d94c63c812a8037c4faca2607453a0fa4cf82f734665266876b022244543f" +checksum = "121e2195ff969977a4e2b5c9965ea867fce7e4cb5aee5b09dee698a7932d574f" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.9.0", "smallvec", ] +[[package]] +name = "rustversion" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" + [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "salsa" -version = "0.0.0" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f80d5cf3c3fcab2cef898012f242a670477a1baa609267376af9cb4409026c5" dependencies = [ - "dissimilar", - "expect-test", + "boxcar", + "crossbeam-queue", + "dashmap", + "hashbrown 0.15.2", + "hashlink", "indexmap", - "itertools", - "linked-hash-map", - "lock_api", - "oorandom", "parking_lot", - "rand", - "rustc-hash 2.0.0", + "portable-atomic", + "rayon", + "rustc-hash 2.1.1", + "salsa-macro-rules", "salsa-macros", "smallvec", + "thin-vec", "tracing", - "triomphe", ] +[[package]] +name = "salsa-macro-rules" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05303d72606fbf2b9c9523cda2039bb8ecb00304027a3cd7e52b02a65c7d9185" + [[package]] name = "salsa-macros" -version = "0.0.0" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb2f0e2a30c65cb3cd63440c491dde68d9af7e1be2b77832ac7057141107db50" dependencies = [ "heck", "proc-macro2", "quote", "syn", + "synstructure", ] [[package]] @@ -1806,9 +2090,9 @@ dependencies = [ [[package]] name = "scip" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dfafd2fa14c6237fa1fc4310f739d02fa915d92977fa069426591f1de046f81" +checksum = "fb2b449a5e4660ce817676a0871cd1b4e2ff1023e33a1ac046670fa594b543a2" dependencies = [ "protobuf", ] @@ -1827,27 +2111,27 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "semver" -version = "1.0.23" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.216" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.216" +version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", @@ -1856,9 +2140,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.124" +version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "indexmap", "itoa", @@ -1869,9 +2153,9 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", @@ -1880,9 +2164,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] @@ -1904,9 +2188,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "smallvec" -version = "1.13.2" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" [[package]] name = "smol_str" @@ -1922,9 +2206,9 @@ dependencies = [ name = "span" version = "0.0.0" dependencies = [ - "hashbrown", + "hashbrown 0.14.5", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "salsa", "stdx", "syntax", @@ -1932,13 +2216,19 @@ dependencies = [ "vfs", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "stdx" version = "0.0.0" dependencies = [ "backtrace", "crossbeam-channel", - "itertools", + "itertools 0.14.0", "jod-thread", "libc", "miow", @@ -1948,9 +2238,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.87" +version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", @@ -1972,16 +2262,14 @@ dependencies = [ name = "syntax" version = "0.0.0" dependencies = [ - "cov-mark", "either", "expect-test", - "indexmap", - "itertools", + "itertools 0.14.0", "parser", - "ra-ap-rustc_lexer", "rayon", "rowan", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", + "rustc-literal-escaper", "rustc_apfloat", "smol_str", "stdx", @@ -1996,12 +2284,11 @@ version = "0.0.0" dependencies = [ "intern", "parser", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "span", "stdx", "syntax", "test-utils", - "tracing", "tt", ] @@ -2019,10 +2306,12 @@ dependencies = [ "cfg", "hir-expand", "intern", - "rustc-hash 2.0.0", + "paths", + "rustc-hash 2.1.1", "span", "stdx", "test-utils", + "triomphe", "tt", ] @@ -2033,10 +2322,9 @@ dependencies = [ "dissimilar", "paths", "profile", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "stdx", "text-size", - "tracing", ] [[package]] @@ -2045,20 +2333,46 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" +[[package]] +name = "thin-vec" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d" + [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +dependencies = [ + "thiserror-impl 2.0.12", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ - "thiserror-impl", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" dependencies = [ "proc-macro2", "quote", @@ -2108,9 +2422,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "9d9c75b47bdff86fa3334a3db91356b8d7d86a9b839dab7d0bdc5c3d3a077618" dependencies = [ "deranged", "itoa", @@ -2125,40 +2439,35 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "29aa485584182073ed57fd5004aa09c371f021325014694e432313345865fd04" dependencies = [ "num-conv", "time-core", ] [[package]] -name = "tinyvec" -version = "1.8.0" +name = "tinystr" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" dependencies = [ - "tinyvec_macros", + "displaydoc", + "zerovec", ] -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - [[package]] name = "toml" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" dependencies = [ "serde", "serde_spanned", @@ -2177,9 +2486,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.20" +version = "0.22.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" +checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ "indexmap", "serde", @@ -2198,9 +2507,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "pin-project-lite", "tracing-attributes", @@ -2209,9 +2518,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", @@ -2220,9 +2529,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -2241,24 +2550,30 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ + "matchers", + "nu-ansi-term 0.46.0", + "once_cell", + "regex", "sharded-slab", + "smallvec", "thread_local", "time", + "tracing", "tracing-core", "tracing-log", ] [[package]] name = "tracing-tree" -version = "0.3.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b56c62d2c80033cb36fae448730a2f2ef99410fe3ecbffc916681a32f6807dbe" +checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c" dependencies = [ - "nu-ansi-term", + "nu-ansi-term 0.50.1", "tracing-core", "tracing-log", "tracing-subscriber", @@ -2295,51 +2610,33 @@ checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f" [[package]] name = "unicase" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] - -[[package]] -name = "unicode-bidi" -version = "0.3.15" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "unicode-normalization" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" -dependencies = [ - "tinyvec", -] +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-properties" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" [[package]] name = "unicode-xid" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] name = "url" -version = "2.5.2" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -2348,16 +2645,22 @@ dependencies = [ ] [[package]] -name = "valuable" -version = "0.1.0" +name = "utf16_iter" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" [[package]] -name = "version_check" -version = "0.9.5" +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "valuable" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "vfs" @@ -2368,7 +2671,7 @@ dependencies = [ "indexmap", "nohash-hasher", "paths", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "stdx", "tracing", ] @@ -2381,7 +2684,7 @@ dependencies = [ "notify", "paths", "rayon", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "stdx", "tracing", "vfs", @@ -2404,6 +2707,22 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + [[package]] name = "winapi-util" version = "0.1.9" @@ -2413,33 +2732,74 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + [[package]] name = "windows" -version = "0.56.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1de69df01bdf1ead2f4ac895dc77c9351aefff65b2f3db429a343f9cbf05e132" +checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" dependencies = [ - "windows-core", + "windows-core 0.58.0", "windows-targets 0.52.6", ] +[[package]] +name = "windows" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f919aee0a93304be7f62e8e5027811bbba96bcb1de84d6618be56e43f8a32a1" +dependencies = [ + "windows-core 0.59.0", + "windows-targets 0.53.0", +] + [[package]] name = "windows-core" -version = "0.56.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4698e52ed2d08f8658ab0c39512a7c00ee5fe2688c65f8c0a4f06750d729f2a6" +checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" dependencies = [ - "windows-implement", - "windows-interface", - "windows-result", + "windows-implement 0.58.0", + "windows-interface 0.58.0", + "windows-result 0.2.0", + "windows-strings 0.1.0", "windows-targets 0.52.6", ] +[[package]] +name = "windows-core" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "810ce18ed2112484b0d4e15d022e5f598113e220c53e373fb31e67e21670c1ce" +dependencies = [ + "windows-implement 0.59.0", + "windows-interface 0.59.0", + "windows-result 0.3.1", + "windows-strings 0.3.1", + "windows-targets 0.53.0", +] + +[[package]] +name = "windows-implement" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "windows-implement" -version = "0.56.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6fc35f58ecd95a9b71c4f2329b911016e6bec66b3f2e6a4aad86bd2e99e2f9b" +checksum = "83577b051e2f49a058c308f17f273b570a6a758386fc291b5f6a934dd84e48c1" dependencies = [ "proc-macro2", "quote", @@ -2448,24 +2808,69 @@ dependencies = [ [[package]] name = "windows-interface" -version = "0.56.0" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08990546bf4edef8f431fa6326e032865f27138718c587dc21bc0265bbcb57cc" +checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", "syn", ] +[[package]] +name = "windows-interface" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb26fd936d991781ea39e87c3a27285081e3c0da5ca0fcbc02d368cc6f52ff01" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3" + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-result" -version = "0.1.2" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06374efe858fab7e4f881500e6e86ec8bc28f9462c47e5a9941a0142ad86b189" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" dependencies = [ + "windows-result 0.2.0", "windows-targets 0.52.6", ] +[[package]] +name = "windows-strings" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" +dependencies = [ + "windows-link", +] + [[package]] name = "windows-sys" version = "0.48.0" @@ -2517,13 +2922,29 @@ dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", + "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] +[[package]] +name = "windows-targets" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b" +dependencies = [ + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -2536,6 +2957,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -2548,6 +2975,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -2560,12 +2993,24 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -2578,6 +3023,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +[[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -2590,6 +3041,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -2602,6 +3059,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -2614,11 +3077,17 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + [[package]] name = "winnow" -version = "0.6.18" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" +checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" dependencies = [ "memchr", ] @@ -2629,6 +3098,18 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23f6174b2566cc4a74f95e1367ec343e7fa80c93cc8087f5c4a3d6a1088b2118" +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "xflags" version = "0.3.2" @@ -2668,7 +3149,7 @@ dependencies = [ "edition", "either", "flate2", - "itertools", + "itertools 0.14.0", "proc-macro2", "quote", "stdx", @@ -2681,20 +3162,66 @@ dependencies = [ ] [[package]] -name = "zerocopy" -version = "0.7.35" +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ - "byteorder", - "zerocopy-derive", + "zerofrom-derive", ] [[package]] -name = "zerocopy-derive" -version = "0.7.35" +name = "zerofrom-derive" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", @@ -2703,13 +3230,17 @@ dependencies = [ [[package]] name = "zip" -version = "0.6.6" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" +checksum = "fabe6324e908f85a1c52063ce7aa26b68dcb7eb6dbc83a2d148403c9bc3eba50" dependencies = [ - "byteorder", + "arbitrary", "crc32fast", "crossbeam-utils", + "displaydoc", "flate2", + "indexmap", + "memchr", + "thiserror 2.0.12", "time", ] diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index ce2d66000e396..c4c2fdf34bae9 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -4,8 +4,8 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"] resolver = "2" [workspace.package] -rust-version = "1.84" -edition = "2021" +rust-version = "1.86" +edition = "2024" license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] repository = "https://github.com/rust-lang/rust-analyzer" @@ -46,7 +46,7 @@ debug = 2 # ungrammar = { path = "../ungrammar" } -# rust-analyzer-salsa = { path = "../salsa" } +# salsa = { path = "../salsa" } [workspace.dependencies] # local crates @@ -72,7 +72,7 @@ proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" } proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" } profile = { path = "./crates/profile", version = "0.0.0" } project-model = { path = "./crates/project-model", version = "0.0.0" } -ra-salsa = { path = "./crates/ra-salsa", package = "salsa", version = "0.0.0" } +query-group = { package = "query-group-macro", path = "./crates/query-group-macro", version = "0.0.0" } span = { path = "./crates/span", version = "0.0.0" } stdx = { path = "./crates/stdx", version = "0.0.0" } syntax = { path = "./crates/syntax", version = "0.0.0" } @@ -85,71 +85,71 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } edition = { path = "./crates/edition", version = "0.0.0" } -ra-ap-rustc_hashes = { version = "0.100", default-features = false } -ra-ap-rustc_lexer = { version = "0.100", default-features = false } -ra-ap-rustc_parse_format = { version = "0.100", default-features = false } -ra-ap-rustc_index = { version = "0.100", default-features = false } -ra-ap-rustc_abi = { version = "0.100", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.100", default-features = false } +ra-ap-rustc_lexer = { version = "0.110", default-features = false } +ra-ap-rustc_parse_format = { version = "0.110", default-features = false } +ra-ap-rustc_index = { version = "0.110", default-features = false } +ra-ap-rustc_abi = { version = "0.110", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.110", default-features = false } # local crates that aren't published to crates.io. These should not have versions. # in-tree crates that are published separately and follow semver. See lib/README.md line-index = { version = "0.1.2" } la-arena = { version = "0.3.1" } -lsp-server = { version = "0.7.6" } +lsp-server = { version = "0.7.8" } # non-local crates -anyhow = "1.0.75" -arrayvec = "0.7.4" -bitflags = "2.4.1" -cargo_metadata = "0.18.1" -camino = "1.1.6" -chalk-solve = { version = "0.100.0", default-features = false } -chalk-ir = "0.100.0" -chalk-recursive = { version = "0.100.0", default-features = false } -chalk-derive = "0.100.0" -crossbeam-channel = "0.5.8" -dissimilar = "1.0.7" +anyhow = "1.0.97" +arrayvec = "0.7.6" +bitflags = "2.9.0" +cargo_metadata = "0.19.2" +camino = "1.1.9" +chalk-solve = { version = "0.102.0", default-features = false } +chalk-ir = "0.102.0" +chalk-recursive = { version = "0.102.0", default-features = false } +chalk-derive = "0.102.0" +crossbeam-channel = "0.5.15" +dissimilar = "1.0.10" dot = "0.1.4" -either = "1.9.0" -expect-test = "1.4.0" -hashbrown = { version = "0.14", features = [ - "inline-more", -], default-features = false } -indexmap = "2.1.0" -itertools = "0.12.0" -libc = "0.2.150" -libloading = "0.8.0" -memmap2 = "0.5.4" +either = "1.15.0" +expect-test = "1.5.1" +indexmap = { version = "2.8.0", features = ["serde"] } +itertools = "0.14.0" +libc = "0.2.171" +libloading = "0.8.6" +memmap2 = "0.9.5" nohash-hasher = "0.2.0" -oorandom = "11.1.3" -object = { version = "0.33.0", default-features = false, features = [ +oorandom = "11.1.5" +object = { version = "0.36.7", default-features = false, features = [ "std", "read_core", "elf", "macho", "pe", ] } -process-wrap = { version = "8.0.2", features = ["std"] } +process-wrap = { version = "8.2.0", features = ["std"] } pulldown-cmark-to-cmark = "10.0.4" -pulldown-cmark = { version = "0.9.0", default-features = false } -rayon = "1.8.0" -rustc-hash = "2.0.0" -semver = "1.0.14" -serde = { version = "1.0.192" } -serde_derive = { version = "1.0.192" } -serde_json = "1.0.108" -smallvec = { version = "1.10.0", features = [ +pulldown-cmark = { version = "0.9.6", default-features = false } +rayon = "1.10.0" +rowan = "=0.15.15" +salsa = { version = "0.21.1", default-features = false, features = ["rayon","salsa_unstable"] } +salsa-macros = "0.21.1" +semver = "1.0.26" +serde = { version = "1.0.219" } +serde_derive = { version = "1.0.219" } +serde_json = "1.0.140" +rustc-hash = "2.1.1" +rustc-literal-escaper = "0.0.2" +smallvec = { version = "1.14.0", features = [ "const_new", "union", "const_generics", ] } smol_str = "0.3.2" text-size = "1.1.1" -tracing = "0.1.40" -tracing-tree = "0.3.0" -tracing-subscriber = { version = "0.3.18", default-features = false, features = [ +tracing = "0.1.41" +tracing-tree = "0.4.0" +tracing-subscriber = { version = "0.3.19", default-features = false, features = [ "registry", "fmt", "local-time", @@ -158,12 +158,15 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features = "tracing-log", ] } triomphe = { version = "0.1.14", default-features = false, features = ["std"] } -url = "2.3.1" -xshell = "0.2.5" - +url = "2.5.4" +xshell = "0.2.7" # We need to freeze the version of the crate, as the raw-api feature is considered unstable -dashmap = { version = "=5.5.3", features = ["raw-api"] } +dashmap = { version = "=6.1.0", features = ["raw-api", "inline"] } +# We need to freeze the version of the crate, as it needs to match with dashmap +hashbrown = { version = "0.14.0", features = [ + "inline-more", +], default-features = false } [workspace.lints.rust] # remember to update RUSTFLAGS in ci.yml if you add something here @@ -171,6 +174,7 @@ dashmap = { version = "=5.5.3", features = ["raw-api"] } elided_lifetimes_in_paths = "warn" explicit_outlives_requirements = "warn" unsafe_op_in_unsafe_fn = "warn" +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(bootstrap)'] } unused_extern_crates = "warn" unused_lifetimes = "warn" unreachable_pub = "warn" diff --git a/src/tools/rust-analyzer/bench_data/numerous_macro_rules b/src/tools/rust-analyzer/bench_data/numerous_macro_rules index 7610a3ae1e3cb..60997065b2408 100644 --- a/src/tools/rust-analyzer/bench_data/numerous_macro_rules +++ b/src/tools/rust-analyzer/bench_data/numerous_macro_rules @@ -528,7 +528,7 @@ macro_rules! __ra_macro_fixture526 {($expr : expr )=>{|| -> _ { Some ($expr )}( macro_rules! __ra_macro_fixture527 {($($arg : tt )*)=>($crate :: io :: _print ($crate :: format_args ! ($($arg )*))); } macro_rules! __ra_macro_fixture528 {($fmt : literal , $($tt : tt ),*)=>{ mbe :: ExpandError :: ProcMacroError ( tt :: ExpansionError :: Unknown ( format ! ($fmt , $($tt ),*)))}; ($fmt : literal )=>{ mbe :: ExpandError :: ProcMacroError ( tt :: ExpansionError :: Unknown ($fmt . to_string ()))}} macro_rules! __ra_macro_fixture529 {($($tt : tt )* )=>{$crate :: quote :: IntoTt :: to_subtree ($crate :: __quote ! ($($tt )*))}} -macro_rules! __ra_macro_fixture530 {()=>{ Vec ::< tt :: TokenTree >:: new ()}; (@ SUBTREE $delim : ident $($tt : tt )* )=>{{ let children = $crate :: __quote ! ($($tt )*); tt :: Subtree { delimiter : Some ( tt :: Delimiter { kind : tt :: DelimiterKind ::$delim , id : tt :: TokenId :: unspecified (), }), token_trees : $crate :: quote :: IntoTt :: to_tokens ( children ), }}}; (@ PUNCT $first : literal )=>{{ vec ! [ tt :: Leaf :: Punct ( tt :: Punct { char : $first , spacing : tt :: Spacing :: Alone , id : tt :: TokenId :: unspecified (), }). into ()]}}; (@ PUNCT $first : literal , $sec : literal )=>{{ vec ! [ tt :: Leaf :: Punct ( tt :: Punct { char : $first , spacing : tt :: Spacing :: Joint , id : tt :: TokenId :: unspecified (), }). into (), tt :: Leaf :: Punct ( tt :: Punct { char : $sec , spacing : tt :: Spacing :: Alone , id : tt :: TokenId :: unspecified (), }). into ()]}}; (# $first : ident $($tail : tt )* )=>{{ let token = $crate :: quote :: ToTokenTree :: to_token ($first ); let mut tokens = vec ! [ token . into ()]; let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; (## $first : ident $($tail : tt )* )=>{{ let mut tokens = $first . into_iter (). map ($crate :: quote :: ToTokenTree :: to_token ). collect ::< Vec < tt :: TokenTree >> (); let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; ({$($tt : tt )* })=>{$crate :: __quote ! (@ SUBTREE Brace $($tt )*)}; ([$($tt : tt )* ])=>{$crate :: __quote ! (@ SUBTREE Bracket $($tt )*)}; (($($tt : tt )* ))=>{$crate :: __quote ! (@ SUBTREE Parenthesis $($tt )*)}; ($tt : literal )=>{ vec ! [$crate :: quote :: ToTokenTree :: to_token ($tt ). into ()]}; ($tt : ident )=>{ vec ! [{ tt :: Leaf :: Ident ( tt :: Ident { text : stringify ! ($tt ). into (), id : tt :: TokenId :: unspecified (), }). into ()}]}; (-> )=>{$crate :: __quote ! (@ PUNCT '-' , '>' )}; (& )=>{$crate :: __quote ! (@ PUNCT '&' )}; (, )=>{$crate :: __quote ! (@ PUNCT ',' )}; (: )=>{$crate :: __quote ! (@ PUNCT ':' )}; (; )=>{$crate :: __quote ! (@ PUNCT ';' )}; (:: )=>{$crate :: __quote ! (@ PUNCT ':' , ':' )}; (. )=>{$crate :: __quote ! (@ PUNCT '.' )}; (< )=>{$crate :: __quote ! (@ PUNCT '<' )}; (> )=>{$crate :: __quote ! (@ PUNCT '>' )}; ($first : tt $($tail : tt )+ )=>{{ let mut tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($first )); let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; } +macro_rules! __ra_macro_fixture530 {()=>{ Vec ::< tt :: TokenTree >:: new ()}; (@ SUBTREE $delim : ident $($tt : tt )* )=>{{ let children = $crate :: __quote ! ($($tt )*); tt :: Subtree { delimiter : Some ( tt :: Delimiter { kind : tt :: DelimiterKind ::$delim , id : tt :: TokenId :: unspecified (), }), token_trees : $crate :: quote :: IntoTt :: to_tokens ( children ), }}}; (@ PUNCT $first : literal )=>{{ vec ! [ tt :: Leaf :: Punct ( tt :: Punct { char : $first , spacing : tt :: Spacing :: Alone , id : tt :: TokenId :: unspecified (), }). into ()]}}; (@ PUNCT $first : literal , $sec : literal )=>{{ vec ! [ tt :: Leaf :: Punct ( tt :: Punct { char : $first , spacing : tt :: Spacing :: Joint , id : tt :: TokenId :: unspecified (), }). into (), tt :: Leaf :: Punct ( tt :: Punct { char : $sec , spacing : tt :: Spacing :: Alone , id : tt :: TokenId :: unspecified (), }). into ()]}}; (# $first : ident $($tail : tt )* )=>{{ let token = $crate :: quote :: ToTokenTree :: to_token ($first ); let mut tokens = vec ! [ token . into ()]; let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; (# # $first : ident $($tail : tt )* )=>{{ let mut tokens = $first . into_iter (). map ($crate :: quote :: ToTokenTree :: to_token ). collect ::< Vec < tt :: TokenTree >> (); let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; ({$($tt : tt )* })=>{$crate :: __quote ! (@ SUBTREE Brace $($tt )*)}; ([$($tt : tt )* ])=>{$crate :: __quote ! (@ SUBTREE Bracket $($tt )*)}; (($($tt : tt )* ))=>{$crate :: __quote ! (@ SUBTREE Parenthesis $($tt )*)}; ($tt : literal )=>{ vec ! [$crate :: quote :: ToTokenTree :: to_token ($tt ). into ()]}; ($tt : ident )=>{ vec ! [{ tt :: Leaf :: Ident ( tt :: Ident { text : stringify ! ($tt ). into (), id : tt :: TokenId :: unspecified (), }). into ()}]}; (-> )=>{$crate :: __quote ! (@ PUNCT '-' , '>' )}; (& )=>{$crate :: __quote ! (@ PUNCT '&' )}; (, )=>{$crate :: __quote ! (@ PUNCT ',' )}; (: )=>{$crate :: __quote ! (@ PUNCT ':' )}; (; )=>{$crate :: __quote ! (@ PUNCT ';' )}; (:: )=>{$crate :: __quote ! (@ PUNCT ':' , ':' )}; (. )=>{$crate :: __quote ! (@ PUNCT '.' )}; (< )=>{$crate :: __quote ! (@ PUNCT '<' )}; (> )=>{$crate :: __quote ! (@ PUNCT '>' )}; ($first : tt $($tail : tt )+ )=>{{ let mut tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($first )); let mut tail_tokens = $crate :: quote :: IntoTt :: to_tokens ($crate :: __quote ! ($($tail )*)); tokens . append (& mut tail_tokens ); tokens }}; } macro_rules! __ra_macro_fixture531 {($($name : ident )*)=>{$(if let Some ( it )= & self .$name { f . field ( stringify ! ($name ), it ); })*}} macro_rules! __ra_macro_fixture532 {($fmt : expr )=>{ RenameError ( format ! ($fmt ))}; ($fmt : expr , $($arg : tt )+)=>{ RenameError ( format ! ($fmt , $($arg )+))}} macro_rules! __ra_macro_fixture533 {($($tokens : tt )*)=>{ return Err ( format_err ! ($($tokens )*))}} diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml index 042dd36488aa9..e2e3253773fe3 100644 --- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml @@ -12,10 +12,11 @@ rust-version.workspace = true [lib] [dependencies] -lz4_flex = { version = "0.11", default-features = false } - la-arena.workspace = true -ra-salsa.workspace = true +dashmap.workspace = true +salsa.workspace = true +salsa-macros.workspace = true +query-group.workspace = true rustc-hash.workspace = true triomphe.workspace = true semver.workspace = true @@ -23,7 +24,6 @@ tracing.workspace = true # local deps cfg.workspace = true -stdx.workspace = true syntax.workspace = true vfs.workspace = true span.workspace = true diff --git a/src/tools/rust-analyzer/crates/base-db/src/change.rs b/src/tools/rust-analyzer/crates/base-db/src/change.rs index 7e40f5408f144..da2fb27571c2f 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/change.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/change.rs @@ -3,23 +3,18 @@ use std::fmt; -use ra_salsa::Durability; -use rustc_hash::FxHashMap; +use salsa::Durability; use triomphe::Arc; use vfs::FileId; -use crate::{ - CrateGraph, CrateId, CrateWorkspaceData, SourceDatabaseFileInputExt, SourceRoot, - SourceRootDatabase, SourceRootId, -}; +use crate::{CrateGraphBuilder, CratesIdMap, RootQueryDb, SourceRoot, SourceRootId}; /// Encapsulate a bunch of raw `.set` calls on the database. #[derive(Default)] pub struct FileChange { pub roots: Option>, pub files_changed: Vec<(FileId, Option)>, - pub crate_graph: Option, - pub ws_data: Option>>, + pub crate_graph: Option, } impl fmt::Debug for FileChange { @@ -39,10 +34,6 @@ impl fmt::Debug for FileChange { } impl FileChange { - pub fn new() -> Self { - FileChange::default() - } - pub fn set_roots(&mut self, roots: Vec) { self.roots = Some(roots); } @@ -51,48 +42,45 @@ impl FileChange { self.files_changed.push((file_id, new_text)) } - pub fn set_crate_graph(&mut self, graph: CrateGraph) { + pub fn set_crate_graph(&mut self, graph: CrateGraphBuilder) { self.crate_graph = Some(graph); } - pub fn set_ws_data(&mut self, data: FxHashMap>) { - self.ws_data = Some(data); - } - - pub fn apply(self, db: &mut dyn SourceRootDatabase) { + pub fn apply(self, db: &mut dyn RootQueryDb) -> Option { let _p = tracing::info_span!("FileChange::apply").entered(); if let Some(roots) = self.roots { for (idx, root) in roots.into_iter().enumerate() { let root_id = SourceRootId(idx as u32); - let durability = durability(&root); + let durability = source_root_durability(&root); for file_id in root.iter() { db.set_file_source_root_with_durability(file_id, root_id, durability); } + db.set_source_root_with_durability(root_id, Arc::new(root), durability); } } for (file_id, text) in self.files_changed { let source_root_id = db.file_source_root(file_id); - let source_root = db.source_root(source_root_id); - let durability = durability(&source_root); + let source_root = db.source_root(source_root_id.source_root_id(db)); + + let durability = file_text_durability(&source_root.source_root(db)); // XXX: can't actually remove the file, just reset the text let text = text.unwrap_or_default(); db.set_file_text_with_durability(file_id, &text, durability) } + if let Some(crate_graph) = self.crate_graph { - db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH); - } - if let Some(data) = self.ws_data { - db.set_crate_workspace_data_with_durability(Arc::new(data), Durability::HIGH); + return Some(crate_graph.set_in_db(db)); } + None } } -fn durability(source_root: &SourceRoot) -> Durability { - if source_root.is_library { - Durability::HIGH - } else { - Durability::LOW - } +fn source_root_durability(source_root: &SourceRoot) -> Durability { + if source_root.is_library { Durability::MEDIUM } else { Durability::LOW } +} + +fn file_text_durability(source_root: &SourceRoot) -> Durability { + if source_root.is_library { Durability::HIGH } else { Durability::LOW } } diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index bd08387b58219..9660e6e87cca8 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -6,17 +6,23 @@ //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO is done and lowered to input. +use std::hash::BuildHasherDefault; use std::{fmt, mem, ops}; -use cfg::CfgOptions; +use cfg::{CfgOptions, HashableCfgOptions}; +use dashmap::DashMap; +use dashmap::mapref::entry::Entry; use intern::Symbol; use la_arena::{Arena, Idx, RawIdx}; -use rustc_hash::{FxHashMap, FxHashSet}; -use span::{Edition, EditionedFileId}; +use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; +use salsa::{Durability, Setter}; +use span::Edition; use triomphe::Arc; -use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; +use vfs::{AbsPathBuf, AnchoredPath, FileId, VfsPath, file_set::FileSet}; -pub type ProcMacroPaths = FxHashMap>; +use crate::{CrateWorkspaceData, EditionedFileId, RootQueryDb}; + +pub type ProcMacroPaths = FxHashMap>; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct SourceRootId(pub u32); @@ -64,30 +70,31 @@ impl SourceRoot { } } -/// `CrateGraph` is a bit of information which turns a set of text files into a -/// number of Rust crates. -/// -/// Each crate is defined by the `FileId` of its root module, the set of enabled -/// `cfg` flags and the set of dependencies. -/// -/// Note that, due to cfg's, there might be several crates for a single `FileId`! -/// -/// For the purposes of analysis, a crate does not have a name. Instead, names -/// are specified on dependency edges. That is, a crate might be known under -/// different names in different dependent crates. -/// -/// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust -/// language proper, not a concept of the build system. In practice, we get -/// `CrateGraph` by lowering `cargo metadata` output. -/// -/// `CrateGraph` is `!Serialize` by design, see -/// -#[derive(Clone, Default)] -pub struct CrateGraph { - arena: Arena, +#[derive(Default, Clone)] +pub struct CrateGraphBuilder { + arena: Arena, +} + +pub type CrateBuilderId = Idx; + +impl ops::Index for CrateGraphBuilder { + type Output = CrateBuilder; + + fn index(&self, index: CrateBuilderId) -> &Self::Output { + &self.arena[index] + } } -impl fmt::Debug for CrateGraph { +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CrateBuilder { + pub basic: CrateDataBuilder, + pub extra: ExtraCrateData, + pub cfg_options: CfgOptions, + pub env: Env, + ws_data: Arc, +} + +impl fmt::Debug for CrateGraphBuilder { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_map() .entries(self.arena.iter().map(|(id, data)| (u32::from(id.into_raw()), data))) @@ -95,8 +102,6 @@ impl fmt::Debug for CrateGraph { } } -pub type CrateId = Idx; - #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateName(Symbol); @@ -105,11 +110,7 @@ impl CrateName { /// Dashes are not allowed in the crate names, /// hence the input string is returned as `Err` for those cases. pub fn new(name: &str) -> Result { - if name.contains('-') { - Err(name) - } else { - Ok(Self(Symbol::intern(name))) - } + if name.contains('-') { Err(name) } else { Ok(Self(Symbol::intern(name))) } } /// Creates a crate name, unconditionally replacing the dashes with underscores. @@ -272,10 +273,49 @@ impl ReleaseChannel { } } -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct CrateData { +/// The crate data from which we derive the `Crate`. +/// +/// We want this to contain as little data as possible, because if it contains dependencies and +/// something changes, this crate and all of its dependencies ids are invalidated, which causes +/// pretty much everything to be recomputed. If the crate id is not invalidated, only this crate's +/// information needs to be recomputed. +/// +/// *Most* different crates have different root files (actually, pretty much all of them). +/// Still, it is possible to have crates distinguished by other factors (e.g. dependencies). +/// So we store only the root file - unless we find that this crate has the same root file as +/// another crate, in which case we store all data for one of them (if one is a dependency of +/// the other, we store for it, because it has more dependencies to be invalidated). +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct UniqueCrateData { + root_file_id: FileId, + disambiguator: Option>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CrateData { pub root_file_id: FileId, pub edition: Edition, + /// The dependencies of this crate. + /// + /// Note that this may contain more dependencies than the crate actually uses. + /// A common example is the test crate which is included but only actually is active when + /// declared in source via `extern crate test`. + pub dependencies: Vec>, + pub origin: CrateOrigin, + pub is_proc_macro: bool, + /// The working directory to run proc-macros in invoked in the context of this crate. + /// This is the workspace root of the cargo workspace for workspace members, the crate manifest + /// dir otherwise. + // FIXME: This ought to be a `VfsPath` or something opaque. + pub proc_macro_cwd: Arc, +} + +pub type CrateDataBuilder = CrateData; +pub type BuiltCrateData = CrateData; + +/// Crate data unrelated to analysis. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExtraCrateData { pub version: Option, /// A name used in the package's project declaration: for Cargo projects, /// its `[package].name` can be different for other project types or even @@ -284,21 +324,8 @@ pub struct CrateData { /// For purposes of analysis, crates are anonymous (only names in /// `Dependency` matters), this name should only be used for UI. pub display_name: Option, - pub cfg_options: Arc, /// The cfg options that could be used by the crate - pub potential_cfg_options: Option>, - pub env: Env, - /// The dependencies of this crate. - /// - /// Note that this may contain more dependencies than the crate actually uses. - /// A common example is the test crate which is included but only actually is active when - /// declared in source via `extern crate test`. - pub dependencies: Vec, - pub origin: CrateOrigin, - pub is_proc_macro: bool, - /// The working directory to run proc-macros in. This is the workspace root of the cargo workspace - /// for workspace members, the crate manifest dir otherwise. - pub proc_macro_cwd: Option, + pub potential_cfg_options: Option, } #[derive(Default, Clone, PartialEq, Eq)] @@ -326,22 +353,32 @@ impl fmt::Debug for Env { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Dependency { - pub crate_id: CrateId, +pub struct Dependency { + pub crate_id: Id, pub name: CrateName, prelude: bool, sysroot: bool, } -impl Dependency { - pub fn new(name: CrateName, crate_id: CrateId) -> Self { +pub type DependencyBuilder = Dependency; +pub type BuiltDependency = Dependency; + +impl DependencyBuilder { + pub fn new(name: CrateName, crate_id: CrateBuilderId) -> Self { Self { name, crate_id, prelude: true, sysroot: false } } - pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool, sysroot: bool) -> Self { + pub fn with_prelude( + name: CrateName, + crate_id: CrateBuilderId, + prelude: bool, + sysroot: bool, + ) -> Self { Self { name, crate_id, prelude, sysroot } } +} +impl BuiltDependency { /// Whether this dependency is to be added to the depending crate's extern prelude. pub fn is_prelude(&self) -> bool { self.prelude @@ -353,41 +390,71 @@ impl Dependency { } } -impl CrateGraph { +pub type CratesIdMap = FxHashMap; + +#[salsa_macros::input] +#[derive(Debug)] +pub struct Crate { + #[return_ref] + pub data: BuiltCrateData, + /// Crate data that is not needed for analysis. + /// + /// This is split into a separate field to increase incrementality. + #[return_ref] + pub extra_data: ExtraCrateData, + // This is in `Arc` because it is shared for all crates in a workspace. + #[return_ref] + pub workspace_data: Arc, + #[return_ref] + pub cfg_options: CfgOptions, + #[return_ref] + pub env: Env, +} + +/// The mapping from [`UniqueCrateData`] to their [`Crate`] input. +#[derive(Debug, Default)] +pub struct CratesMap(DashMap>); + +impl CrateGraphBuilder { pub fn add_crate_root( &mut self, root_file_id: FileId, edition: Edition, display_name: Option, version: Option, - cfg_options: Arc, - potential_cfg_options: Option>, + mut cfg_options: CfgOptions, + mut potential_cfg_options: Option, mut env: Env, origin: CrateOrigin, is_proc_macro: bool, - proc_macro_cwd: Option, - ) -> CrateId { + proc_macro_cwd: Arc, + ws_data: Arc, + ) -> CrateBuilderId { env.entries.shrink_to_fit(); - let data = CrateData { - root_file_id, - edition, - version, - display_name, + cfg_options.shrink_to_fit(); + if let Some(potential_cfg_options) = &mut potential_cfg_options { + potential_cfg_options.shrink_to_fit(); + } + self.arena.alloc(CrateBuilder { + basic: CrateData { + root_file_id, + edition, + dependencies: Vec::new(), + origin, + is_proc_macro, + proc_macro_cwd, + }, + extra: ExtraCrateData { version, display_name, potential_cfg_options }, cfg_options, - potential_cfg_options, env, - dependencies: Vec::new(), - origin, - is_proc_macro, - proc_macro_cwd, - }; - self.arena.alloc(data) + ws_data, + }) } pub fn add_dep( &mut self, - from: CrateId, - dep: Dependency, + from: CrateBuilderId, + dep: DependencyBuilder, ) -> Result<(), CyclicDependenciesError> { let _p = tracing::info_span!("add_dep").entered(); @@ -395,37 +462,154 @@ impl CrateGraph { // that out, look for a path in the *opposite* direction, from `to` to // `from`. if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) { - let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect(); + let path = + path.into_iter().map(|it| (it, self[it].extra.display_name.clone())).collect(); let err = CyclicDependenciesError { path }; assert!(err.from().0 == from && err.to().0 == dep.crate_id); return Err(err); } - self.arena[from].add_dep(dep); + self.arena[from].basic.dependencies.push(dep); Ok(()) } - pub fn is_empty(&self) -> bool { - self.arena.is_empty() - } + pub fn set_in_db(self, db: &mut dyn RootQueryDb) -> CratesIdMap { + let mut all_crates = Vec::with_capacity(self.arena.len()); + let mut visited = FxHashMap::default(); + let mut visited_root_files = FxHashSet::default(); - pub fn len(&self) -> usize { - self.arena.len() - } + let old_all_crates = db.all_crates(); - pub fn iter(&self) -> impl Iterator + '_ { - self.arena.iter().map(|(idx, _)| idx) + let crates_map = db.crates_map(); + // salsa doesn't compare new input to old input to see if they are the same, so here we are doing all the work ourselves. + for krate in self.iter() { + go( + &self, + db, + &crates_map, + &mut visited, + &mut visited_root_files, + &mut all_crates, + krate, + ); + } + + if **old_all_crates != *all_crates { + db.set_all_crates_with_durability( + Arc::new(all_crates.into_boxed_slice()), + Durability::MEDIUM, + ); + } + + return visited; + + fn go( + graph: &CrateGraphBuilder, + db: &mut dyn RootQueryDb, + crates_map: &CratesMap, + visited: &mut FxHashMap, + visited_root_files: &mut FxHashSet, + all_crates: &mut Vec, + source: CrateBuilderId, + ) -> Crate { + if let Some(&crate_id) = visited.get(&source) { + return crate_id; + } + let krate = &graph[source]; + let dependencies = krate + .basic + .dependencies + .iter() + .map(|dep| BuiltDependency { + crate_id: go( + graph, + db, + crates_map, + visited, + visited_root_files, + all_crates, + dep.crate_id, + ), + name: dep.name.clone(), + prelude: dep.prelude, + sysroot: dep.sysroot, + }) + .collect::>(); + let crate_data = BuiltCrateData { + dependencies, + edition: krate.basic.edition, + is_proc_macro: krate.basic.is_proc_macro, + origin: krate.basic.origin.clone(), + root_file_id: krate.basic.root_file_id, + proc_macro_cwd: krate.basic.proc_macro_cwd.clone(), + }; + let disambiguator = if visited_root_files.insert(krate.basic.root_file_id) { + None + } else { + Some(Box::new((crate_data.clone(), krate.cfg_options.to_hashable()))) + }; + + let unique_crate_data = + UniqueCrateData { root_file_id: krate.basic.root_file_id, disambiguator }; + let crate_input = match crates_map.0.entry(unique_crate_data) { + Entry::Occupied(entry) => { + let old_crate = *entry.get(); + if crate_data != *old_crate.data(db) { + old_crate.set_data(db).with_durability(Durability::MEDIUM).to(crate_data); + } + if krate.extra != *old_crate.extra_data(db) { + old_crate + .set_extra_data(db) + .with_durability(Durability::MEDIUM) + .to(krate.extra.clone()); + } + if krate.cfg_options != *old_crate.cfg_options(db) { + old_crate + .set_cfg_options(db) + .with_durability(Durability::MEDIUM) + .to(krate.cfg_options.clone()); + } + if krate.env != *old_crate.env(db) { + old_crate + .set_env(db) + .with_durability(Durability::MEDIUM) + .to(krate.env.clone()); + } + if krate.ws_data != *old_crate.workspace_data(db) { + old_crate + .set_workspace_data(db) + .with_durability(Durability::MEDIUM) + .to(krate.ws_data.clone()); + } + old_crate + } + Entry::Vacant(entry) => { + let input = Crate::builder( + crate_data, + krate.extra.clone(), + krate.ws_data.clone(), + krate.cfg_options.clone(), + krate.env.clone(), + ) + .durability(Durability::MEDIUM) + .new(db); + entry.insert(input); + input + } + }; + all_crates.push(crate_input); + visited.insert(source, crate_input); + crate_input + } } - // FIXME: used for fixing up the toolchain sysroot, should be removed and done differently - #[doc(hidden)] - pub fn iter_mut(&mut self) -> impl Iterator + '_ { - self.arena.iter_mut() + pub fn iter(&self) -> impl Iterator + '_ { + self.arena.iter().map(|(idx, _)| idx) } /// Returns an iterator over all transitive dependencies of the given crate, /// including the crate itself. - pub fn transitive_deps(&self, of: CrateId) -> impl Iterator { + pub fn transitive_deps(&self, of: CrateBuilderId) -> impl Iterator { let mut worklist = vec![of]; let mut deps = FxHashSet::default(); @@ -434,42 +618,15 @@ impl CrateGraph { continue; } - worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id)); + worklist.extend(self[krate].basic.dependencies.iter().map(|dep| dep.crate_id)); } deps.into_iter() } - /// Returns all transitive reverse dependencies of the given crate, - /// including the crate itself. - pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator { - let mut worklist = vec![of]; - let mut rev_deps = FxHashSet::default(); - rev_deps.insert(of); - - let mut inverted_graph = FxHashMap::<_, Vec<_>>::default(); - self.arena.iter().for_each(|(krate, data)| { - data.dependencies - .iter() - .for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate)) - }); - - while let Some(krate) = worklist.pop() { - if let Some(krate_rev_deps) = inverted_graph.get(&krate) { - krate_rev_deps - .iter() - .copied() - .filter(|&rev_dep| rev_deps.insert(rev_dep)) - .for_each(|rev_dep| worklist.push(rev_dep)); - } - } - - rev_deps.into_iter() - } - /// Returns all crates in the graph, sorted in topological order (ie. dependencies of a crate /// come before the crate itself). - pub fn crates_in_topological_order(&self) -> Vec { + fn crates_in_topological_order(&self) -> Vec { let mut res = Vec::new(); let mut visited = FxHashSet::default(); @@ -480,15 +637,15 @@ impl CrateGraph { return res; fn go( - graph: &CrateGraph, - visited: &mut FxHashSet, - res: &mut Vec, - source: CrateId, + graph: &CrateGraphBuilder, + visited: &mut FxHashSet, + res: &mut Vec, + source: CrateBuilderId, ) { if !visited.insert(source) { return; } - for dep in graph[source].dependencies.iter() { + for dep in graph[source].basic.dependencies.iter() { go(graph, visited, res, dep.crate_id) } res.push(source) @@ -504,23 +661,27 @@ impl CrateGraph { /// Returns a map mapping `other`'s IDs to the new IDs in `self`. pub fn extend( &mut self, - mut other: CrateGraph, + mut other: CrateGraphBuilder, proc_macros: &mut ProcMacroPaths, - ) -> FxHashMap { + ) -> FxHashMap { // Sorting here is a bit pointless because the input is likely already sorted. // However, the overhead is small and it makes the `extend` method harder to misuse. self.arena .iter_mut() - .for_each(|(_, data)| data.dependencies.sort_by_key(|dep| dep.crate_id)); + .for_each(|(_, data)| data.basic.dependencies.sort_by_key(|dep| dep.crate_id)); - let m = self.len(); + let m = self.arena.len(); let topo = other.crates_in_topological_order(); - let mut id_map: FxHashMap = FxHashMap::default(); + let mut id_map: FxHashMap = FxHashMap::default(); for topo in topo { let crate_data = &mut other.arena[topo]; - crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]); - crate_data.dependencies.sort_by_key(|dep| dep.crate_id); + crate_data + .basic + .dependencies + .iter_mut() + .for_each(|dep| dep.crate_id = id_map[&dep.crate_id]); + crate_data.basic.dependencies.sort_by_key(|dep| dep.crate_id); let find = self.arena.iter().take(m).find_map(|(k, v)| (v == crate_data).then_some(k)); let new_id = find.unwrap_or_else(|| self.arena.alloc(crate_data.clone())); @@ -534,10 +695,10 @@ impl CrateGraph { fn find_path( &self, - visited: &mut FxHashSet, - from: CrateId, - to: CrateId, - ) -> Option> { + visited: &mut FxHashSet, + from: CrateBuilderId, + to: CrateBuilderId, + ) -> Option> { if !visited.insert(from) { return None; } @@ -546,7 +707,7 @@ impl CrateGraph { return Some(vec![to]); } - for dep in &self[from].dependencies { + for dep in &self[from].basic.dependencies { let crate_id = dep.crate_id; if let Some(mut path) = self.find_path(visited, crate_id, to) { path.push(from); @@ -559,7 +720,10 @@ impl CrateGraph { /// Removes all crates from this crate graph except for the ones in `to_keep` and fixes up the dependencies. /// Returns a mapping from old crate ids to new crate ids. - pub fn remove_crates_except(&mut self, to_keep: &[CrateId]) -> Vec> { + pub fn remove_crates_except( + &mut self, + to_keep: &[CrateBuilderId], + ) -> Vec> { let mut id_map = vec![None; self.arena.len()]; self.arena = std::mem::take(&mut self.arena) .into_iter() @@ -567,12 +731,12 @@ impl CrateGraph { .enumerate() .map(|(new_id, (id, data))| { id_map[id.into_raw().into_u32() as usize] = - Some(CrateId::from_raw(RawIdx::from_u32(new_id as u32))); + Some(CrateBuilderId::from_raw(RawIdx::from_u32(new_id as u32))); data }) .collect(); for (_, data) in self.arena.iter_mut() { - data.dependencies.iter_mut().for_each(|dep| { + data.basic.dependencies.iter_mut().for_each(|dep| { dep.crate_id = id_map[dep.crate_id.into_raw().into_u32() as usize].expect("crate was filtered") }); @@ -585,22 +749,36 @@ impl CrateGraph { } } -impl ops::Index for CrateGraph { - type Output = CrateData; - fn index(&self, crate_id: CrateId) -> &CrateData { - &self.arena[crate_id] +pub(crate) fn transitive_rev_deps(db: &dyn RootQueryDb, of: Crate) -> FxHashSet { + let mut worklist = vec![of]; + let mut rev_deps = FxHashSet::default(); + rev_deps.insert(of); + + let mut inverted_graph = FxHashMap::<_, Vec<_>>::default(); + db.all_crates().iter().for_each(|&krate| { + krate + .data(db) + .dependencies + .iter() + .for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate)) + }); + + while let Some(krate) = worklist.pop() { + if let Some(crate_rev_deps) = inverted_graph.get(&krate) { + crate_rev_deps + .iter() + .copied() + .filter(|&rev_dep| rev_deps.insert(rev_dep)) + .for_each(|rev_dep| worklist.push(rev_dep)); + } } -} -impl CrateData { - /// Add a dependency to `self` without checking if the dependency - // is existent among `self.dependencies`. - fn add_dep(&mut self, dep: Dependency) { - self.dependencies.push(dep) - } + rev_deps +} - pub fn root_file_id(&self) -> EditionedFileId { - EditionedFileId::new(self.root_file_id, self.edition) +impl BuiltCrateData { + pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId { + EditionedFileId::new(db, self.root_file_id, self.edition) } } @@ -657,21 +835,21 @@ impl<'a> IntoIterator for &'a Env { #[derive(Debug)] pub struct CyclicDependenciesError { - path: Vec<(CrateId, Option)>, + path: Vec<(CrateBuilderId, Option)>, } impl CyclicDependenciesError { - fn from(&self) -> &(CrateId, Option) { + fn from(&self) -> &(CrateBuilderId, Option) { self.path.first().unwrap() } - fn to(&self) -> &(CrateId, Option) { + fn to(&self) -> &(CrateBuilderId, Option) { self.path.last().unwrap() } } impl fmt::Display for CyclicDependenciesError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let render = |(id, name): &(CrateId, Option)| match name { + let render = |(id, name): &(CrateBuilderId, Option)| match name { Some(it) => format!("{it}({id:?})"), None => format!("{id:?}"), }; @@ -688,13 +866,20 @@ impl fmt::Display for CyclicDependenciesError { #[cfg(test)] mod tests { - use crate::CrateOrigin; + use triomphe::Arc; + use vfs::AbsPathBuf; + + use crate::{CrateWorkspaceData, DependencyBuilder}; - use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; + use super::{CrateGraphBuilder, CrateName, CrateOrigin, Edition::Edition2018, Env, FileId}; + + fn empty_ws_data() -> Arc { + Arc::new(CrateWorkspaceData { data_layout: Err("".into()), toolchain: None }) + } #[test] fn detect_cyclic_dependency_indirect() { - let mut graph = CrateGraph::default(); + let mut graph = CrateGraphBuilder::default(); let crate1 = graph.add_crate_root( FileId::from_raw(1u32), Edition2018, @@ -705,7 +890,8 @@ mod tests { Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, + Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), + empty_ws_data(), ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -717,7 +903,8 @@ mod tests { Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, + Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), + empty_ws_data(), ); let crate3 = graph.add_crate_root( FileId::from_raw(3u32), @@ -729,22 +916,29 @@ mod tests { Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, + Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), + empty_ws_data(), + ); + assert!( + graph + .add_dep(crate1, DependencyBuilder::new(CrateName::new("crate2").unwrap(), crate2,)) + .is_ok() + ); + assert!( + graph + .add_dep(crate2, DependencyBuilder::new(CrateName::new("crate3").unwrap(), crate3,)) + .is_ok() + ); + assert!( + graph + .add_dep(crate3, DependencyBuilder::new(CrateName::new("crate1").unwrap(), crate1,)) + .is_err() ); - assert!(graph - .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) - .is_ok()); - assert!(graph - .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,)) - .is_ok()); - assert!(graph - .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1,)) - .is_err()); } #[test] fn detect_cyclic_dependency_direct() { - let mut graph = CrateGraph::default(); + let mut graph = CrateGraphBuilder::default(); let crate1 = graph.add_crate_root( FileId::from_raw(1u32), Edition2018, @@ -755,7 +949,8 @@ mod tests { Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, + Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), + empty_ws_data(), ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -767,19 +962,24 @@ mod tests { Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, + Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), + empty_ws_data(), + ); + assert!( + graph + .add_dep(crate1, DependencyBuilder::new(CrateName::new("crate2").unwrap(), crate2,)) + .is_ok() + ); + assert!( + graph + .add_dep(crate2, DependencyBuilder::new(CrateName::new("crate2").unwrap(), crate2,)) + .is_err() ); - assert!(graph - .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) - .is_ok()); - assert!(graph - .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) - .is_err()); } #[test] fn it_works() { - let mut graph = CrateGraph::default(); + let mut graph = CrateGraphBuilder::default(); let crate1 = graph.add_crate_root( FileId::from_raw(1u32), Edition2018, @@ -790,7 +990,8 @@ mod tests { Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, + Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), + empty_ws_data(), ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -802,7 +1003,8 @@ mod tests { Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, + Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), + empty_ws_data(), ); let crate3 = graph.add_crate_root( FileId::from_raw(3u32), @@ -814,19 +1016,24 @@ mod tests { Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, + Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), + empty_ws_data(), + ); + assert!( + graph + .add_dep(crate1, DependencyBuilder::new(CrateName::new("crate2").unwrap(), crate2,)) + .is_ok() + ); + assert!( + graph + .add_dep(crate2, DependencyBuilder::new(CrateName::new("crate3").unwrap(), crate3,)) + .is_ok() ); - assert!(graph - .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) - .is_ok()); - assert!(graph - .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,)) - .is_ok()); } #[test] fn dashes_are_normalized() { - let mut graph = CrateGraph::default(); + let mut graph = CrateGraphBuilder::default(); let crate1 = graph.add_crate_root( FileId::from_raw(1u32), Edition2018, @@ -837,7 +1044,8 @@ mod tests { Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, + Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), + empty_ws_data(), ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -849,17 +1057,25 @@ mod tests { Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, + Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), + empty_ws_data(), + ); + assert!( + graph + .add_dep( + crate1, + DependencyBuilder::new( + CrateName::normalize_dashes("crate-name-with-dashes"), + crate2, + ) + ) + .is_ok() ); - assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2,) - ) - .is_ok()); assert_eq!( - graph[crate1].dependencies, - vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2,)] + graph.arena[crate1].basic.dependencies, + vec![ + DependencyBuilder::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2,) + ] ); } } diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index eed8c88683951..a67fbf75c02f1 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -1,203 +1,433 @@ //! base_db defines basic database traits. The concrete DB is defined by ide. + +pub use salsa; +pub use salsa_macros; + // FIXME: Rename this crate, base db is non descriptive mod change; mod input; -use std::panic; - -use ra_salsa::Durability; -use rustc_hash::FxHashMap; -use span::EditionedFileId; -use syntax::{ast, Parse, SourceFile, SyntaxError}; -use triomphe::Arc; -use vfs::FileId; +use std::{cell::RefCell, hash::BuildHasherDefault, panic, sync::Once}; pub use crate::{ change::FileChange, input::{ - CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, - LangCrateOrigin, ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, - TargetLayoutLoadResult, + BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder, + CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap, + DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, + SourceRoot, SourceRootId, TargetLayoutLoadResult, UniqueCrateData, }, }; -pub use ra_salsa::{self, Cancelled}; -pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, VfsPath}; - +use dashmap::{DashMap, mapref::entry::Entry}; +pub use query_group::{self}; +use rustc_hash::{FxHashSet, FxHasher}; +use salsa::{Durability, Setter}; pub use semver::{BuildMetadata, Prerelease, Version, VersionReq}; +use span::Edition; +use syntax::{Parse, SyntaxError, ast}; +use triomphe::Arc; +pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet}; #[macro_export] macro_rules! impl_intern_key { - ($name:ident) => { - impl $crate::ra_salsa::InternKey for $name { - fn from_intern_id(v: $crate::ra_salsa::InternId) -> Self { - $name(v) - } - fn as_intern_id(&self) -> $crate::ra_salsa::InternId { - self.0 + ($id:ident, $loc:ident) => { + #[salsa_macros::interned(no_lifetime)] + pub struct $id { + pub loc: $loc, + } + + // If we derive this salsa prints the values recursively, and this causes us to blow. + impl ::std::fmt::Debug for $id { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + f.debug_tuple(stringify!($id)) + .field(&format_args!("{:04x}", self.0.as_u32())) + .finish() } } }; } -pub trait Upcast { - fn upcast(&self) -> &T; -} - pub const DEFAULT_FILE_TEXT_LRU_CAP: u16 = 16; pub const DEFAULT_PARSE_LRU_CAP: u16 = 128; pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024; -pub trait FileLoader { - fn resolve_path(&self, path: AnchoredPath<'_>) -> Option; - /// Crates whose root's source root is the same as the source root of `file_id` - fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>; +#[derive(Debug, Default)] +pub struct Files { + files: Arc>>, + source_roots: Arc>>, + file_source_roots: Arc>>, } -/// Database which stores all significant input facts: source code and project -/// model. Everything else in rust-analyzer is derived from these queries. -#[ra_salsa::query_group(SourceDatabaseStorage)] -pub trait SourceDatabase: FileLoader + std::fmt::Debug { - #[ra_salsa::input] - fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>; +impl Files { + pub fn file_text(&self, file_id: vfs::FileId) -> FileText { + match self.files.get(&file_id) { + Some(text) => *text, + None => { + panic!("Unable to fetch file text for `vfs::FileId`: {file_id:?}; this is a bug") + } + } + } - /// Text of the file. - #[ra_salsa::lru] - fn file_text(&self, file_id: FileId) -> Arc; + pub fn set_file_text(&self, db: &mut dyn SourceDatabase, file_id: vfs::FileId, text: &str) { + match self.files.entry(file_id) { + Entry::Occupied(mut occupied) => { + occupied.get_mut().set_text(db).to(Arc::from(text)); + } + Entry::Vacant(vacant) => { + let text = FileText::new(db, Arc::from(text), file_id); + vacant.insert(text); + } + }; + } - /// Parses the file into the syntax tree. - #[ra_salsa::lru] - fn parse(&self, file_id: EditionedFileId) -> Parse; + pub fn set_file_text_with_durability( + &self, + db: &mut dyn SourceDatabase, + file_id: vfs::FileId, + text: &str, + durability: Durability, + ) { + match self.files.entry(file_id) { + Entry::Occupied(mut occupied) => { + occupied.get_mut().set_text(db).with_durability(durability).to(Arc::from(text)); + } + Entry::Vacant(vacant) => { + let text = + FileText::builder(Arc::from(text), file_id).durability(durability).new(db); + vacant.insert(text); + } + }; + } - /// Returns the set of errors obtained from parsing the file including validation errors. - fn parse_errors(&self, file_id: EditionedFileId) -> Option>; + /// Source root of the file. + pub fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput { + let source_root = match self.source_roots.get(&source_root_id) { + Some(source_root) => source_root, + None => panic!( + "Unable to fetch `SourceRootInput` with `SourceRootId` ({source_root_id:?}); this is a bug" + ), + }; + + *source_root + } - /// The crate graph. - #[ra_salsa::input] - fn crate_graph(&self) -> Arc; + pub fn set_source_root_with_durability( + &self, + db: &mut dyn SourceDatabase, + source_root_id: SourceRootId, + source_root: Arc, + durability: Durability, + ) { + match self.source_roots.entry(source_root_id) { + Entry::Occupied(mut occupied) => { + occupied.get_mut().set_source_root(db).with_durability(durability).to(source_root); + } + Entry::Vacant(vacant) => { + let source_root = + SourceRootInput::builder(source_root).durability(durability).new(db); + vacant.insert(source_root); + } + }; + } - #[ra_salsa::input] - fn crate_workspace_data(&self) -> Arc>>; + pub fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput { + let file_source_root = match self.file_source_roots.get(&id) { + Some(file_source_root) => file_source_root, + None => panic!( + "Unable to get `FileSourceRootInput` with `vfs::FileId` ({id:?}); this is a bug", + ), + }; + *file_source_root + } - #[ra_salsa::transparent] - fn toolchain_channel(&self, krate: CrateId) -> Option; + pub fn set_file_source_root_with_durability( + &self, + db: &mut dyn SourceDatabase, + id: vfs::FileId, + source_root_id: SourceRootId, + durability: Durability, + ) { + match self.file_source_roots.entry(id) { + Entry::Occupied(mut occupied) => { + occupied + .get_mut() + .set_source_root_id(db) + .with_durability(durability) + .to(source_root_id); + } + Entry::Vacant(vacant) => { + let file_source_root = + FileSourceRootInput::builder(source_root_id).durability(durability).new(db); + vacant.insert(file_source_root); + } + }; + } } -/// Crate related data shared by the whole workspace. -#[derive(Debug, PartialEq, Eq, Hash, Clone)] -pub struct CrateWorkspaceData { - // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query - pub data_layout: TargetLayoutLoadResult, - /// Toolchain version used to compile the crate. - pub toolchain: Option, +#[salsa_macros::interned(no_lifetime, debug, constructor=from_span)] +pub struct EditionedFileId { + pub editioned_file_id: span::EditionedFileId, } -fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option { - db.crate_workspace_data() - .get(&krate)? - .toolchain - .as_ref() - .and_then(|v| ReleaseChannel::from_str(&v.pre)) +impl EditionedFileId { + // Salsa already uses the name `new`... + #[inline] + pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self { + EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition)) + } + + #[inline] + pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self { + EditionedFileId::new(db, file_id, Edition::CURRENT) + } + + #[inline] + pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId { + let id = self.editioned_file_id(db); + id.file_id() + } + + #[inline] + pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) { + let id = self.editioned_file_id(db); + (id.file_id(), id.edition()) + } + + #[inline] + pub fn edition(self, db: &dyn SourceDatabase) -> Edition { + self.editioned_file_id(db).edition() + } } -fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse { - let _p = tracing::info_span!("parse", ?file_id).entered(); - let (file_id, edition) = file_id.unpack(); - let text = db.file_text(file_id); - SourceFile::parse(&text, edition) +#[salsa_macros::input(debug)] +pub struct FileText { + pub text: Arc, + pub file_id: vfs::FileId, } -fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option> { - let errors = db.parse(file_id).errors(); - match &*errors { - [] => None, - [..] => Some(errors.into()), - } +#[salsa_macros::input(debug)] +pub struct FileSourceRootInput { + pub source_root_id: SourceRootId, } -fn file_text(db: &dyn SourceDatabase, file_id: FileId) -> Arc { - let bytes = db.compressed_file_text(file_id); - let bytes = - lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail"); - let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8"); - Arc::from(text) +#[salsa_macros::input(debug)] +pub struct SourceRootInput { + pub source_root: Arc, } -/// We don't want to give HIR knowledge of source roots, hence we extract these -/// methods into a separate DB. -#[ra_salsa::query_group(SourceRootDatabaseStorage)] -pub trait SourceRootDatabase: SourceDatabase { - /// Path to a file, relative to the root of its source root. - /// Source root of the file. - #[ra_salsa::input] - fn file_source_root(&self, file_id: FileId) -> SourceRootId; - /// Contents of the source root. - #[ra_salsa::input] - fn source_root(&self, id: SourceRootId) -> Arc; +/// Database which stores all significant input facts: source code and project +/// model. Everything else in rust-analyzer is derived from these queries. +#[query_group::query_group] +pub trait RootQueryDb: SourceDatabase + salsa::Database { + /// Parses the file into the syntax tree. + #[salsa::invoke(parse)] + #[salsa::lru(128)] + fn parse(&self, file_id: EditionedFileId) -> Parse; + + /// Returns the set of errors obtained from parsing the file including validation errors. + #[salsa::transparent] + fn parse_errors(&self, file_id: EditionedFileId) -> Option<&[SyntaxError]>; + + #[salsa::transparent] + fn toolchain_channel(&self, krate: Crate) -> Option; /// Crates whose root file is in `id`. - fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>; + #[salsa::invoke_interned(source_root_crates)] + fn source_root_crates(&self, id: SourceRootId) -> Arc<[Crate]>; + + #[salsa::transparent] + fn relevant_crates(&self, file_id: FileId) -> Arc<[Crate]>; + + /// Returns the crates in topological order. + /// + /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications. + #[salsa::input] + fn all_crates(&self) -> Arc>; + + /// Returns an iterator over all transitive dependencies of the given crate, + /// including the crate itself. + /// + /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications. + #[salsa::transparent] + fn transitive_deps(&self, crate_id: Crate) -> FxHashSet; + + /// Returns all transitive reverse dependencies of the given crate, + /// including the crate itself. + /// + /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications. + #[salsa::invoke(input::transitive_rev_deps)] + #[salsa::transparent] + fn transitive_rev_deps(&self, of: Crate) -> FxHashSet; } -pub trait SourceDatabaseFileInputExt { - fn set_file_text(&mut self, file_id: FileId, text: &str) { - self.set_file_text_with_durability(file_id, text, Durability::LOW); +pub fn transitive_deps(db: &dyn SourceDatabase, crate_id: Crate) -> FxHashSet { + // There is a bit of duplication here and in `CrateGraphBuilder` in the same method, but it's not terrible + // and removing that is a bit difficult. + let mut worklist = vec![crate_id]; + let mut deps = FxHashSet::default(); + + while let Some(krate) = worklist.pop() { + if !deps.insert(krate) { + continue; + } + + worklist.extend(krate.data(db).dependencies.iter().map(|dep| dep.crate_id)); } + deps +} + +#[salsa_macros::db] +pub trait SourceDatabase: salsa::Database { + /// Text of the file. + fn file_text(&self, file_id: vfs::FileId) -> FileText; + + fn set_file_text(&mut self, file_id: vfs::FileId, text: &str); + fn set_file_text_with_durability( &mut self, - file_id: FileId, + file_id: vfs::FileId, text: &str, durability: Durability, ); -} -impl SourceDatabaseFileInputExt for Db { - fn set_file_text_with_durability( + /// Contents of the source root. + fn source_root(&self, id: SourceRootId) -> SourceRootInput; + + fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput; + + fn set_file_source_root_with_durability( &mut self, - file_id: FileId, - text: &str, + id: vfs::FileId, + source_root_id: SourceRootId, durability: Durability, - ) { - let bytes = text.as_bytes(); - let compressed = lz4_flex::compress_prepend_size(bytes); - self.set_compressed_file_text_with_durability( - file_id, - Arc::from(compressed.as_slice()), - durability, - ) + ); + + /// Source root of the file. + fn set_source_root_with_durability( + &mut self, + source_root_id: SourceRootId, + source_root: Arc, + durability: Durability, + ); + + fn resolve_path(&self, path: AnchoredPath<'_>) -> Option { + // FIXME: this *somehow* should be platform agnostic... + let source_root = self.file_source_root(path.anchor); + let source_root = self.source_root(source_root.source_root_id(self)); + source_root.source_root(self).resolve_path(path) + } + + #[doc(hidden)] + fn crates_map(&self) -> Arc; +} + +/// Crate related data shared by the whole workspace. +#[derive(Debug, PartialEq, Eq, Hash, Clone)] +pub struct CrateWorkspaceData { + // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query + pub data_layout: TargetLayoutLoadResult, + /// Toolchain version used to compile the crate. + pub toolchain: Option, +} + +impl CrateWorkspaceData { + pub fn is_atleast_187(&self) -> bool { + const VERSION_187: Version = Version { + major: 1, + minor: 87, + patch: 0, + pre: Prerelease::EMPTY, + build: BuildMetadata::EMPTY, + }; + self.toolchain.as_ref().map_or(false, |v| *v >= VERSION_187) } } -fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> { - let graph = db.crate_graph(); - let mut crates = graph +fn toolchain_channel(db: &dyn RootQueryDb, krate: Crate) -> Option { + krate.workspace_data(db).toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) +} + +fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse { + let _p = tracing::info_span!("parse", ?file_id).entered(); + let (file_id, edition) = file_id.unpack(db.as_dyn_database()); + let text = db.file_text(file_id).text(db); + ast::SourceFile::parse(&text, edition) +} + +fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> { + #[salsa_macros::tracked(return_ref)] + fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option> { + let errors = db.parse(file_id).errors(); + match &*errors { + [] => None, + [..] => Some(errors.into()), + } + } + parse_errors(db, file_id).as_ref().map(|it| &**it) +} + +fn source_root_crates(db: &dyn RootQueryDb, id: SourceRootId) -> Arc<[Crate]> { + let crates = db.all_crates(); + crates .iter() + .copied() .filter(|&krate| { - let root_file = graph[krate].root_file_id; - db.file_source_root(root_file) == id + let root_file = krate.data(db).root_file_id; + db.file_source_root(root_file).source_root_id(db) == id }) - .collect::>(); - crates.sort(); - crates.dedup(); - crates.into_iter().collect() + .collect() } -// FIXME: Would be nice to get rid of this somehow -/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split -/// regarding FileLoader -pub struct FileLoaderDelegate(pub T); +fn relevant_crates(db: &dyn RootQueryDb, file_id: FileId) -> Arc<[Crate]> { + let _p = tracing::info_span!("relevant_crates").entered(); -impl FileLoader for FileLoaderDelegate<&'_ T> { - fn resolve_path(&self, path: AnchoredPath<'_>) -> Option { - // FIXME: this *somehow* should be platform agnostic... - let source_root = self.0.file_source_root(path.anchor); - let source_root = self.0.source_root(source_root); - source_root.resolve_path(path) + let source_root = db.file_source_root(file_id); + db.source_root_crates(source_root.source_root_id(db)) +} + +#[must_use] +#[non_exhaustive] +pub struct DbPanicContext; + +impl Drop for DbPanicContext { + fn drop(&mut self) { + Self::with_ctx(|ctx| assert!(ctx.pop().is_some())); + } +} + +impl DbPanicContext { + pub fn enter(frame: String) -> DbPanicContext { + #[expect(clippy::print_stderr, reason = "already panicking anyway")] + fn set_hook() { + let default_hook = panic::take_hook(); + panic::set_hook(Box::new(move |panic_info| { + default_hook(panic_info); + if let Some(backtrace) = salsa::Backtrace::capture() { + eprintln!("{backtrace:#}"); + } + DbPanicContext::with_ctx(|ctx| { + if !ctx.is_empty() { + eprintln!("additional context:"); + for (idx, frame) in ctx.iter().enumerate() { + eprintln!("{idx:>4}: {frame}\n"); + } + } + }); + })); + } + + static SET_HOOK: Once = Once::new(); + SET_HOOK.call_once(set_hook); + + Self::with_ctx(|ctx| ctx.push(frame)); + DbPanicContext } - fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> { - let _p = tracing::info_span!("relevant_crates").entered(); - let source_root = self.0.file_source_root(file_id); - self.0.source_root_crates(source_root) + fn with_ctx(f: impl FnOnce(&mut Vec)) { + thread_local! { + static CTX: RefCell> = const { RefCell::new(Vec::new()) }; + } + CTX.with(|ctx| f(&mut ctx.borrow_mut())); } } diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml index e887368ef28f8..d7764a16c044c 100644 --- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml +++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml @@ -20,13 +20,13 @@ tt = { workspace = true, optional = true } intern.workspace = true [dev-dependencies] -expect-test = "1.4.1" -oorandom = "11.1.3" +expect-test = "1.5.1" +oorandom = "11.1.5" # We depend on both individually instead of using `features = ["derive"]` to microoptimize the # build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr` # supports `arbitrary`. This way, we avoid feature unification. -arbitrary = "1.3.2" -derive_arbitrary = "1.3.2" +arbitrary = "1.4.1" +derive_arbitrary = "1.4.1" # local deps syntax-bridge.workspace = true diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs index 08545b685119d..906106ca5db0b 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs @@ -9,7 +9,7 @@ use std::fmt; use rustc_hash::FxHashSet; -use intern::{sym, Symbol}; +use intern::{Symbol, sym}; pub use cfg_expr::{CfgAtom, CfgExpr}; pub use dnf::DnfExpr; @@ -31,7 +31,7 @@ pub struct CfgOptions { impl Default for CfgOptions { fn default() -> Self { - Self { enabled: FxHashSet::from_iter([CfgAtom::Flag(sym::true_.clone())]) } + Self { enabled: FxHashSet::from_iter([CfgAtom::Flag(sym::true_)]) } } } @@ -104,6 +104,17 @@ impl CfgOptions { _ => None, }) } + + pub fn to_hashable(&self) -> HashableCfgOptions { + let mut enabled = self.enabled.iter().cloned().collect::>(); + enabled.sort_unstable(); + HashableCfgOptions { _enabled: enabled } + } + + #[inline] + pub fn shrink_to_fit(&mut self) { + self.enabled.shrink_to_fit(); + } } impl Extend for CfgOptions { @@ -256,3 +267,9 @@ impl fmt::Display for InactiveReason { Ok(()) } } + +/// A `CfgOptions` that implements `Hash`, for the sake of hashing only. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct HashableCfgOptions { + _enabled: Box<[CfgAtom]>, +} diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs index 6d87d83ad9300..6766748097f00 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs @@ -1,10 +1,11 @@ use arbitrary::{Arbitrary, Unstructured}; -use expect_test::{expect, Expect}; +use expect_test::{Expect, expect}; use intern::Symbol; -use syntax::{ast, AstNode, Edition}; +use syntax::{AstNode, Edition, ast}; use syntax_bridge::{ - dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, - syntax_node_to_token_tree, DocCommentDesugarMode, + DocCommentDesugarMode, + dummy_test_span_utils::{DUMMY, DummyTestSpanMap}, + syntax_node_to_token_tree, }; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; diff --git a/src/tools/rust-analyzer/crates/edition/src/lib.rs b/src/tools/rust-analyzer/crates/edition/src/lib.rs index 7e9c94af408c1..f1a1fe596493b 100644 --- a/src/tools/rust-analyzer/crates/edition/src/lib.rs +++ b/src/tools/rust-analyzer/crates/edition/src/lib.rs @@ -15,9 +15,19 @@ pub enum Edition { impl Edition { pub const DEFAULT: Edition = Edition::Edition2015; pub const LATEST: Edition = Edition::Edition2024; - pub const CURRENT: Edition = Edition::Edition2021; + pub const CURRENT: Edition = Edition::Edition2024; /// The current latest stable edition, note this is usually not the right choice in code. - pub const CURRENT_FIXME: Edition = Edition::Edition2021; + pub const CURRENT_FIXME: Edition = Edition::Edition2024; + + pub fn from_u32(u32: u32) -> Edition { + match u32 { + 0 => Edition::Edition2015, + 1 => Edition::Edition2018, + 2 => Edition::Edition2021, + 3 => Edition::Edition2024, + _ => panic!("invalid edition"), + } + } pub fn at_least_2024(self) -> bool { self >= Edition::Edition2024 @@ -31,6 +41,15 @@ impl Edition { self >= Edition::Edition2018 } + pub fn number(&self) -> usize { + match self { + Edition::Edition2015 => 2015, + Edition::Edition2018 => 2018, + Edition::Edition2021 => 2021, + Edition::Edition2024 => 2024, + } + } + pub fn iter() -> impl Iterator { [Edition::Edition2015, Edition::Edition2018, Edition::Edition2021, Edition::Edition2024] .iter() diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml index a22961c26c84c..c1c89e8d1cc3a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml @@ -14,8 +14,7 @@ rust-version.workspace = true [dependencies] arrayvec.workspace = true bitflags.workspace = true -cov-mark = "2.0.0-pre.1" -dashmap.workspace = true +cov-mark = "2.0.0" drop_bomb = "0.1.5" either.workspace = true fst = { version = "0.4.7", default-features = false } @@ -25,12 +24,13 @@ la-arena.workspace = true rustc-hash.workspace = true tracing.workspace = true smallvec.workspace = true -hashbrown.workspace = true triomphe.workspace = true -rustc_apfloat = "0.2.0" +rustc_apfloat = "0.2.2" text-size.workspace = true +salsa.workspace = true +salsa-macros.workspace = true +query-group.workspace = true -ra-ap-rustc_hashes.workspace = true ra-ap-rustc_parse_format.workspace = true ra-ap-rustc_abi.workspace = true @@ -44,7 +44,7 @@ mbe.workspace = true cfg.workspace = true tt.workspace = true span.workspace = true - +thin-vec = "0.2.14" [dev-dependencies] expect-test.workspace = true @@ -53,6 +53,7 @@ expect-test.workspace = true test-utils.workspace = true test-fixture.workspace = true syntax-bridge.workspace = true + [features] in-rust-tree = ["hir-expand/in-rust-tree"] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs index 710bffcefe900..bb6222b1d4648 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs @@ -1,32 +1,34 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. -use std::{borrow::Cow, hash::Hash, ops}; +use std::{borrow::Cow, convert::identity, hash::Hash, ops}; -use base_db::CrateId; +use base_db::Crate; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ - attrs::{collect_attrs, Attr, AttrId, RawAttrs}, HirFileId, InFile, + attrs::{Attr, AttrId, RawAttrs, collect_attrs}, + span_map::SpanMapRef, }; -use intern::{sym, Symbol}; +use intern::{Symbol, sym}; use la_arena::{ArenaMap, Idx, RawIdx}; use mbe::DelimiterKind; +use rustc_abi::ReprOptions; use syntax::{ - ast::{self, HasAttrs}, AstPtr, + ast::{self, HasAttrs}, }; use triomphe::Arc; use tt::iter::{TtElement, TtIter}; use crate::{ + AdtId, AttrDefId, GenericParamId, HasModule, ItemTreeLoc, LocalFieldId, Lookup, MacroId, + VariantId, db::DefDatabase, item_tree::{AttrOwner, FieldParent, ItemTreeNode}, lang_item::LangItem, nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource}, - AdtId, AttrDefId, GenericParamId, HasModule, ItemTreeLoc, LocalFieldId, Lookup, MacroId, - VariantId, }; /// Desugared attributes of an item post `cfg_attr` expansion. @@ -44,8 +46,27 @@ impl Attrs { (**self).iter().find(|attr| attr.id == id) } - pub(crate) fn filter(db: &dyn DefDatabase, krate: CrateId, raw_attrs: RawAttrs) -> Attrs { - Attrs(raw_attrs.filter(db.upcast(), krate)) + pub(crate) fn expand_cfg_attr( + db: &dyn DefDatabase, + krate: Crate, + raw_attrs: RawAttrs, + ) -> Attrs { + Attrs(raw_attrs.expand_cfg_attr(db, krate)) + } + + pub(crate) fn is_cfg_enabled_for( + db: &dyn DefDatabase, + owner: &dyn ast::HasAttrs, + span_map: SpanMapRef<'_>, + cfg_options: &CfgOptions, + ) -> Result<(), CfgExpr> { + RawAttrs::attrs_iter_expanded::(db, owner, span_map, cfg_options) + .filter_map(|attr| attr.cfg()) + .find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) { + true => None, + false => Some(cfg), + }) + .map_or(Ok(()), Err) } } @@ -75,8 +96,6 @@ impl Attrs { let _p = tracing::info_span!("fields_attrs_query").entered(); // FIXME: There should be some proper form of mapping between item tree field ids and hir field ids let mut res = ArenaMap::default(); - - let crate_graph = db.crate_graph(); let item_tree; let (parent, fields, krate) = match v { VariantId::EnumVariantId(it) => { @@ -84,7 +103,7 @@ impl Attrs { let krate = loc.parent.lookup(db).container.krate; item_tree = loc.id.item_tree(db); let variant = &item_tree[loc.id.value]; - (FieldParent::Variant(loc.id.value), &variant.fields, krate) + (FieldParent::EnumVariant(loc.id.value), &variant.fields, krate) } VariantId::StructId(it) => { let loc = it.lookup(db); @@ -102,7 +121,7 @@ impl Attrs { } }; - let cfg_options = &crate_graph[krate].cfg_options; + let cfg_options = krate.cfg_options(db); let mut idx = 0; for (id, _field) in fields.iter().enumerate() { @@ -118,17 +137,20 @@ impl Attrs { } impl Attrs { - pub fn by_key<'attrs>(&'attrs self, key: &'attrs Symbol) -> AttrQuery<'attrs> { + #[inline] + pub fn by_key(&self, key: Symbol) -> AttrQuery<'_> { AttrQuery { attrs: self, key } } + #[inline] pub fn rust_analyzer_tool(&self) -> impl Iterator { self.iter() .filter(|&attr| attr.path.segments().first().is_some_and(|s| *s == sym::rust_analyzer)) } + #[inline] pub fn cfg(&self) -> Option { - let mut cfgs = self.by_key(&sym::cfg).tt_values().map(CfgExpr::parse); + let mut cfgs = self.by_key(sym::cfg).tt_values().map(CfgExpr::parse); let first = cfgs.next()?; match cfgs.next() { Some(second) => { @@ -139,10 +161,12 @@ impl Attrs { } } + #[inline] pub fn cfgs(&self) -> impl Iterator + '_ { - self.by_key(&sym::cfg).tt_values().map(CfgExpr::parse) + self.by_key(sym::cfg).tt_values().map(CfgExpr::parse) } + #[inline] pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool { match self.cfg() { None => true, @@ -150,80 +174,227 @@ impl Attrs { } } + #[inline] pub fn lang(&self) -> Option<&Symbol> { - self.by_key(&sym::lang).string_value() + self.by_key(sym::lang).string_value() } + #[inline] pub fn lang_item(&self) -> Option { - self.by_key(&sym::lang).string_value().and_then(LangItem::from_symbol) + self.by_key(sym::lang).string_value().and_then(LangItem::from_symbol) } + #[inline] pub fn has_doc_hidden(&self) -> bool { - self.by_key(&sym::doc).tt_values().any(|tt| { + self.by_key(sym::doc).tt_values().any(|tt| { tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis && matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden) }) } + #[inline] pub fn has_doc_notable_trait(&self) -> bool { - self.by_key(&sym::doc).tt_values().any(|tt| { + self.by_key(sym::doc).tt_values().any(|tt| { tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis && matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait) }) } + #[inline] pub fn doc_exprs(&self) -> impl Iterator + '_ { - self.by_key(&sym::doc).tt_values().map(DocExpr::parse) + self.by_key(sym::doc).tt_values().map(DocExpr::parse) } + #[inline] pub fn doc_aliases(&self) -> impl Iterator + '_ { self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec()) } + #[inline] pub fn export_name(&self) -> Option<&Symbol> { - self.by_key(&sym::export_name).string_value() + self.by_key(sym::export_name).string_value() } + #[inline] pub fn is_proc_macro(&self) -> bool { - self.by_key(&sym::proc_macro).exists() + self.by_key(sym::proc_macro).exists() } + #[inline] pub fn is_proc_macro_attribute(&self) -> bool { - self.by_key(&sym::proc_macro_attribute).exists() + self.by_key(sym::proc_macro_attribute).exists() } + #[inline] pub fn is_proc_macro_derive(&self) -> bool { - self.by_key(&sym::proc_macro_derive).exists() + self.by_key(sym::proc_macro_derive).exists() } + #[inline] pub fn is_test(&self) -> bool { self.iter().any(|it| { it.path() .segments() .iter() .rev() - .zip( - [sym::core.clone(), sym::prelude.clone(), sym::v1.clone(), sym::test.clone()] - .iter() - .rev(), - ) + .zip([sym::core, sym::prelude, sym::v1, sym::test].iter().rev()) .all(|it| it.0 == it.1) }) } + #[inline] pub fn is_ignore(&self) -> bool { - self.by_key(&sym::ignore).exists() + self.by_key(sym::ignore).exists() } + #[inline] pub fn is_bench(&self) -> bool { - self.by_key(&sym::bench).exists() + self.by_key(sym::bench).exists() } + #[inline] pub fn is_unstable(&self) -> bool { - self.by_key(&sym::unstable).exists() + self.by_key(sym::unstable).exists() + } + + #[inline] + pub fn rustc_legacy_const_generics(&self) -> Option>> { + self.by_key(sym::rustc_legacy_const_generics) + .tt_values() + .next() + .map(parse_rustc_legacy_const_generics) + .filter(|it| !it.is_empty()) + .map(Box::new) + } + + #[inline] + pub fn repr(&self) -> Option { + self.by_key(sym::repr).tt_values().filter_map(parse_repr_tt).fold(None, |acc, repr| { + acc.map_or(Some(repr), |mut acc| { + merge_repr(&mut acc, repr); + Some(acc) + }) + }) + } +} + +fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> { + let mut indices = Vec::new(); + let mut iter = tt.iter(); + while let (Some(first), second) = (iter.next(), iter.next()) { + match first { + TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() { + Ok(index) => indices.push(index), + Err(_) => break, + }, + _ => break, + } + + if let Some(comma) = second { + match comma { + TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {} + _ => break, + } + } + } + + indices.into_boxed_slice() +} + +fn merge_repr(this: &mut ReprOptions, other: ReprOptions) { + let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this; + flags.insert(other.flags); + *align = (*align).max(other.align); + *pack = match (*pack, other.pack) { + (Some(pack), None) | (None, Some(pack)) => Some(pack), + _ => (*pack).min(other.pack), + }; + if other.int.is_some() { + *int = other.int; } } +fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option { + use crate::builtin_type::{BuiltinInt, BuiltinUint}; + use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; + + match tt.top_subtree().delimiter { + tt::Delimiter { kind: DelimiterKind::Parenthesis, .. } => {} + _ => return None, + } + + let mut acc = ReprOptions::default(); + let mut tts = tt.iter(); + while let Some(tt) = tts.next() { + let TtElement::Leaf(tt::Leaf::Ident(ident)) = tt else { + continue; + }; + let repr = match &ident.sym { + s if *s == sym::packed => { + let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { + tts.next(); + if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() { + lit.symbol.as_str().parse().unwrap_or_default() + } else { + 0 + } + } else { + 0 + }; + let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE)); + ReprOptions { pack, ..Default::default() } + } + s if *s == sym::align => { + let mut align = None; + if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { + tts.next(); + if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() { + if let Ok(a) = lit.symbol.as_str().parse() { + align = Align::from_bytes(a).ok(); + } + } + } + ReprOptions { align, ..Default::default() } + } + s if *s == sym::C => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() }, + s if *s == sym::transparent => { + ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() } + } + s if *s == sym::simd => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() }, + repr => { + let mut int = None; + if let Some(builtin) = BuiltinInt::from_suffix_sym(repr) + .map(Either::Left) + .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right)) + { + int = Some(match builtin { + Either::Left(bi) => match bi { + BuiltinInt::Isize => IntegerType::Pointer(true), + BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true), + BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true), + BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true), + BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true), + BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true), + }, + Either::Right(bu) => match bu { + BuiltinUint::Usize => IntegerType::Pointer(false), + BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false), + BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false), + BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false), + BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false), + BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false), + }, + }); + } + ReprOptions { int, ..Default::default() } + } + }; + merge_repr(&mut acc, repr); + } + + Some(acc) +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum DocAtom { /// eg. `#[doc(hidden)]` @@ -371,38 +542,41 @@ impl AttrsWithOwner { GenericParamId::ConstParamId(it) => { let src = it.parent().child_source(db); // FIXME: We should be never getting `None` here. - match src.value.get(it.local_id()) { - Some(val) => RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(val), + return Attrs(match src.value.get(it.local_id()) { + Some(val) => RawAttrs::new_expanded( + db, + val, db.span_map(src.file_id).as_ref(), + def.krate(db).cfg_options(db), ), None => RawAttrs::EMPTY, - } + }); } GenericParamId::TypeParamId(it) => { let src = it.parent().child_source(db); // FIXME: We should be never getting `None` here. - match src.value.get(it.local_id()) { - Some(val) => RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(val), + return Attrs(match src.value.get(it.local_id()) { + Some(val) => RawAttrs::new_expanded( + db, + val, db.span_map(src.file_id).as_ref(), + def.krate(db).cfg_options(db), ), None => RawAttrs::EMPTY, - } + }); } GenericParamId::LifetimeParamId(it) => { let src = it.parent.child_source(db); // FIXME: We should be never getting `None` here. - match src.value.get(it.local_id) { - Some(val) => RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(val), + return Attrs(match src.value.get(it.local_id) { + Some(val) => RawAttrs::new_expanded( + db, + val, db.span_map(src.file_id).as_ref(), + def.krate(db).cfg_options(db), ), None => RawAttrs::EMPTY, - } + }); } }, AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), @@ -410,7 +584,7 @@ impl AttrsWithOwner { AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it), }; - let attrs = raw_attrs.filter(db.upcast(), def.krate(db)); + let attrs = raw_attrs.expand_cfg_attr(db, def.krate(db)); Attrs(attrs) } @@ -547,36 +721,42 @@ impl AttrSourceMap { } } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone)] pub struct AttrQuery<'attr> { attrs: &'attr Attrs, - key: &'attr Symbol, + key: Symbol, } impl<'attr> AttrQuery<'attr> { + #[inline] pub fn tt_values(self) -> impl Iterator { self.attrs().filter_map(|attr| attr.token_tree_value()) } + #[inline] pub fn string_value(self) -> Option<&'attr Symbol> { self.attrs().find_map(|attr| attr.string_value()) } + #[inline] pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> { self.attrs().find_map(|attr| attr.string_value_with_span()) } + #[inline] pub fn string_value_unescape(self) -> Option> { self.attrs().find_map(|attr| attr.string_value_unescape()) } + #[inline] pub fn exists(self) -> bool { self.attrs().next().is_some() } + #[inline] pub fn attrs(self) -> impl Iterator + Clone { let key = self.key; - self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == *key)) + self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key)) } /// Find string value for a specific key inside token tree @@ -585,10 +765,11 @@ impl<'attr> AttrQuery<'attr> { /// #[doc(html_root_url = "url")] /// ^^^^^^^^^^^^^ key /// ``` - pub fn find_string_value_in_tt(self, key: &'attr Symbol) -> Option<&'attr str> { + #[inline] + pub fn find_string_value_in_tt(self, key: Symbol) -> Option<&'attr str> { self.tt_values().find_map(|tt| { let name = tt.iter() - .skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == *key)) + .skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == key)) .nth(2); match name { @@ -601,17 +782,14 @@ impl<'attr> AttrQuery<'attr> { fn any_has_attrs<'db>( db: &(dyn DefDatabase + 'db), - id: impl Lookup< - Database<'db> = dyn DefDatabase + 'db, - Data = impl HasSource, - >, + id: impl Lookup>, ) -> InFile { id.lookup(db).source(db).map(ast::AnyHasAttrs::new) } fn attrs_from_item_tree_loc<'db, N: ItemTreeNode>( db: &(dyn DefDatabase + 'db), - lookup: impl Lookup = dyn DefDatabase + 'db, Data = impl ItemTreeLoc>, + lookup: impl Lookup>, ) -> RawAttrs { let id = lookup.lookup(db).item_tree_id(); let tree = id.item_tree(db); @@ -649,8 +827,8 @@ mod tests { use hir_expand::span_map::{RealSpanMap, SpanMap}; use span::FileId; - use syntax::{ast, AstNode, TextRange}; - use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode}; + use syntax::{AstNode, TextRange, ast}; + use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree}; use crate::attr::{DocAtom, DocExpr}; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs index 14b9af84e6ffb..8b61c6a9f0349 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/builtin_type.rs @@ -6,7 +6,7 @@ use std::fmt; use hir_expand::name::{AsName, Name}; -use intern::{sym, Symbol}; +use intern::{Symbol, sym}; /// Different signed int types. #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum BuiltinInt { @@ -51,28 +51,28 @@ impl BuiltinType { #[rustfmt::skip] pub fn all_builtin_types() -> [(Name, BuiltinType); 19] { [ - (Name::new_symbol_root(sym::char.clone()), BuiltinType::Char), - (Name::new_symbol_root(sym::bool.clone()), BuiltinType::Bool), - (Name::new_symbol_root(sym::str.clone()), BuiltinType::Str), - - (Name::new_symbol_root(sym::isize.clone()), BuiltinType::Int(BuiltinInt::Isize)), - (Name::new_symbol_root(sym::i8.clone()), BuiltinType::Int(BuiltinInt::I8)), - (Name::new_symbol_root(sym::i16.clone()), BuiltinType::Int(BuiltinInt::I16)), - (Name::new_symbol_root(sym::i32.clone()), BuiltinType::Int(BuiltinInt::I32)), - (Name::new_symbol_root(sym::i64.clone()), BuiltinType::Int(BuiltinInt::I64)), - (Name::new_symbol_root(sym::i128.clone()), BuiltinType::Int(BuiltinInt::I128)), - - (Name::new_symbol_root(sym::usize.clone()), BuiltinType::Uint(BuiltinUint::Usize)), - (Name::new_symbol_root(sym::u8.clone()), BuiltinType::Uint(BuiltinUint::U8)), - (Name::new_symbol_root(sym::u16.clone()), BuiltinType::Uint(BuiltinUint::U16)), - (Name::new_symbol_root(sym::u32.clone()), BuiltinType::Uint(BuiltinUint::U32)), - (Name::new_symbol_root(sym::u64.clone()), BuiltinType::Uint(BuiltinUint::U64)), - (Name::new_symbol_root(sym::u128.clone()), BuiltinType::Uint(BuiltinUint::U128)), - - (Name::new_symbol_root(sym::f16.clone()), BuiltinType::Float(BuiltinFloat::F16)), - (Name::new_symbol_root(sym::f32.clone()), BuiltinType::Float(BuiltinFloat::F32)), - (Name::new_symbol_root(sym::f64.clone()), BuiltinType::Float(BuiltinFloat::F64)), - (Name::new_symbol_root(sym::f128.clone()), BuiltinType::Float(BuiltinFloat::F128)), + (Name::new_symbol_root(sym::char), BuiltinType::Char), + (Name::new_symbol_root(sym::bool), BuiltinType::Bool), + (Name::new_symbol_root(sym::str), BuiltinType::Str), + + (Name::new_symbol_root(sym::isize), BuiltinType::Int(BuiltinInt::Isize)), + (Name::new_symbol_root(sym::i8), BuiltinType::Int(BuiltinInt::I8)), + (Name::new_symbol_root(sym::i16), BuiltinType::Int(BuiltinInt::I16)), + (Name::new_symbol_root(sym::i32), BuiltinType::Int(BuiltinInt::I32)), + (Name::new_symbol_root(sym::i64), BuiltinType::Int(BuiltinInt::I64)), + (Name::new_symbol_root(sym::i128), BuiltinType::Int(BuiltinInt::I128)), + + (Name::new_symbol_root(sym::usize), BuiltinType::Uint(BuiltinUint::Usize)), + (Name::new_symbol_root(sym::u8), BuiltinType::Uint(BuiltinUint::U8)), + (Name::new_symbol_root(sym::u16), BuiltinType::Uint(BuiltinUint::U16)), + (Name::new_symbol_root(sym::u32), BuiltinType::Uint(BuiltinUint::U32)), + (Name::new_symbol_root(sym::u64), BuiltinType::Uint(BuiltinUint::U64)), + (Name::new_symbol_root(sym::u128), BuiltinType::Uint(BuiltinUint::U128)), + + (Name::new_symbol_root(sym::f16), BuiltinType::Float(BuiltinFloat::F16)), + (Name::new_symbol_root(sym::f32), BuiltinType::Float(BuiltinFloat::F32)), + (Name::new_symbol_root(sym::f64), BuiltinType::Float(BuiltinFloat::F64)), + (Name::new_symbol_root(sym::f128), BuiltinType::Float(BuiltinFloat::F128)), ] } @@ -86,30 +86,30 @@ impl BuiltinType { impl AsName for BuiltinType { fn as_name(&self) -> Name { match self { - BuiltinType::Char => Name::new_symbol_root(sym::char.clone()), - BuiltinType::Bool => Name::new_symbol_root(sym::bool.clone()), - BuiltinType::Str => Name::new_symbol_root(sym::str.clone()), + BuiltinType::Char => Name::new_symbol_root(sym::char), + BuiltinType::Bool => Name::new_symbol_root(sym::bool), + BuiltinType::Str => Name::new_symbol_root(sym::str), BuiltinType::Int(it) => match it { - BuiltinInt::Isize => Name::new_symbol_root(sym::isize.clone()), - BuiltinInt::I8 => Name::new_symbol_root(sym::i8.clone()), - BuiltinInt::I16 => Name::new_symbol_root(sym::i16.clone()), - BuiltinInt::I32 => Name::new_symbol_root(sym::i32.clone()), - BuiltinInt::I64 => Name::new_symbol_root(sym::i64.clone()), - BuiltinInt::I128 => Name::new_symbol_root(sym::i128.clone()), + BuiltinInt::Isize => Name::new_symbol_root(sym::isize), + BuiltinInt::I8 => Name::new_symbol_root(sym::i8), + BuiltinInt::I16 => Name::new_symbol_root(sym::i16), + BuiltinInt::I32 => Name::new_symbol_root(sym::i32), + BuiltinInt::I64 => Name::new_symbol_root(sym::i64), + BuiltinInt::I128 => Name::new_symbol_root(sym::i128), }, BuiltinType::Uint(it) => match it { - BuiltinUint::Usize => Name::new_symbol_root(sym::usize.clone()), - BuiltinUint::U8 => Name::new_symbol_root(sym::u8.clone()), - BuiltinUint::U16 => Name::new_symbol_root(sym::u16.clone()), - BuiltinUint::U32 => Name::new_symbol_root(sym::u32.clone()), - BuiltinUint::U64 => Name::new_symbol_root(sym::u64.clone()), - BuiltinUint::U128 => Name::new_symbol_root(sym::u128.clone()), + BuiltinUint::Usize => Name::new_symbol_root(sym::usize), + BuiltinUint::U8 => Name::new_symbol_root(sym::u8), + BuiltinUint::U16 => Name::new_symbol_root(sym::u16), + BuiltinUint::U32 => Name::new_symbol_root(sym::u32), + BuiltinUint::U64 => Name::new_symbol_root(sym::u64), + BuiltinUint::U128 => Name::new_symbol_root(sym::u128), }, BuiltinType::Float(it) => match it { - BuiltinFloat::F16 => Name::new_symbol_root(sym::f16.clone()), - BuiltinFloat::F32 => Name::new_symbol_root(sym::f32.clone()), - BuiltinFloat::F64 => Name::new_symbol_root(sym::f64.clone()), - BuiltinFloat::F128 => Name::new_symbol_root(sym::f128.clone()), + BuiltinFloat::F16 => Name::new_symbol_root(sym::f16), + BuiltinFloat::F32 => Name::new_symbol_root(sym::f32), + BuiltinFloat::F64 => Name::new_symbol_root(sym::f64), + BuiltinFloat::F128 => Name::new_symbol_root(sym::f128), }, } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs deleted file mode 100644 index bec662787728c..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ /dev/null @@ -1,843 +0,0 @@ -//! Contains basic data about various HIR declarations. - -pub mod adt; - -use base_db::CrateId; -use hir_expand::{ - name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefKind, -}; -use intern::{sym, Symbol}; -use la_arena::{Idx, RawIdx}; -use smallvec::SmallVec; -use syntax::{ast, Parse}; -use triomphe::Arc; -use tt::iter::TtElement; - -use crate::{ - db::DefDatabase, - expander::{Expander, Mark}, - item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId}, - macro_call_as_call_id, - nameres::{ - attr_resolution::ResolvedAttr, - diagnostics::{DefDiagnostic, DefDiagnostics}, - proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroKind}, - DefMap, MacroSubNs, - }, - path::ImportAlias, - type_ref::{TraitRef, TypeBound, TypeRefId, TypesMap}, - visibility::RawVisibility, - AssocItemId, AstIdWithPath, ConstId, ConstLoc, ExternCrateId, FunctionId, FunctionLoc, - HasModule, ImplId, Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, - ProcMacroId, StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc, -}; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct FunctionData { - pub name: Name, - pub params: Box<[TypeRefId]>, - pub ret_type: TypeRefId, - pub visibility: RawVisibility, - pub abi: Option, - pub legacy_const_generics_indices: Option>>, - pub rustc_allow_incoherent_impl: bool, - pub types_map: Arc, - flags: FnFlags, -} - -impl FunctionData { - pub(crate) fn fn_data_query(db: &dyn DefDatabase, func: FunctionId) -> Arc { - let loc = func.lookup(db); - let krate = loc.container.module(db).krate; - let item_tree = loc.id.item_tree(db); - let func = &item_tree[loc.id.value]; - let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container { - trait_vis(db, trait_id) - } else { - item_tree[func.visibility].clone() - }; - - let crate_graph = db.crate_graph(); - let cfg_options = &crate_graph[krate].cfg_options; - let attr_owner = |idx| { - item_tree::AttrOwner::Param(loc.id.value, Idx::from_raw(RawIdx::from(idx as u32))) - }; - - let mut flags = func.flags; - if flags.contains(FnFlags::HAS_SELF_PARAM) { - // If there's a self param in the syntax, but it is cfg'd out, remove the flag. - let is_cfgd_out = - !item_tree.attrs(db, krate, attr_owner(0usize)).is_cfg_enabled(cfg_options); - if is_cfgd_out { - cov_mark::hit!(cfgd_out_self_param); - flags.remove(FnFlags::HAS_SELF_PARAM); - } - } - if flags.contains(FnFlags::IS_VARARGS) { - if let Some((_, param)) = func.params.iter().enumerate().rev().find(|&(idx, _)| { - item_tree.attrs(db, krate, attr_owner(idx)).is_cfg_enabled(cfg_options) - }) { - if param.type_ref.is_some() { - flags.remove(FnFlags::IS_VARARGS); - } - } else { - flags.remove(FnFlags::IS_VARARGS); - } - } - - let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); - let legacy_const_generics_indices = attrs - .by_key(&sym::rustc_legacy_const_generics) - .tt_values() - .next() - .map(parse_rustc_legacy_const_generics) - .filter(|it| !it.is_empty()) - .map(Box::new); - let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists(); - if flags.contains(FnFlags::HAS_UNSAFE_KW) - && attrs.by_key(&sym::rustc_deprecated_safe_2024).exists() - { - flags.remove(FnFlags::HAS_UNSAFE_KW); - flags.insert(FnFlags::DEPRECATED_SAFE_2024); - } - - if attrs.by_key(&sym::target_feature).exists() { - flags.insert(FnFlags::HAS_TARGET_FEATURE); - } - - Arc::new(FunctionData { - name: func.name.clone(), - params: func - .params - .iter() - .enumerate() - .filter(|&(idx, _)| { - item_tree.attrs(db, krate, attr_owner(idx)).is_cfg_enabled(cfg_options) - }) - .filter_map(|(_, param)| param.type_ref) - .collect(), - ret_type: func.ret_type, - visibility, - abi: func.abi.clone(), - legacy_const_generics_indices, - types_map: func.types_map.clone(), - flags, - rustc_allow_incoherent_impl, - }) - } - - pub fn has_body(&self) -> bool { - self.flags.contains(FnFlags::HAS_BODY) - } - - /// True if the first param is `self`. This is relevant to decide whether this - /// can be called as a method. - pub fn has_self_param(&self) -> bool { - self.flags.contains(FnFlags::HAS_SELF_PARAM) - } - - pub fn is_default(&self) -> bool { - self.flags.contains(FnFlags::HAS_DEFAULT_KW) - } - - pub fn is_const(&self) -> bool { - self.flags.contains(FnFlags::HAS_CONST_KW) - } - - pub fn is_async(&self) -> bool { - self.flags.contains(FnFlags::HAS_ASYNC_KW) - } - - pub fn is_unsafe(&self) -> bool { - self.flags.contains(FnFlags::HAS_UNSAFE_KW) - } - - pub fn is_deprecated_safe_2024(&self) -> bool { - self.flags.contains(FnFlags::DEPRECATED_SAFE_2024) - } - - pub fn is_safe(&self) -> bool { - self.flags.contains(FnFlags::HAS_SAFE_KW) - } - - pub fn is_varargs(&self) -> bool { - self.flags.contains(FnFlags::IS_VARARGS) - } - - pub fn has_target_feature(&self) -> bool { - self.flags.contains(FnFlags::HAS_TARGET_FEATURE) - } -} - -fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> { - let mut indices = Vec::new(); - let mut iter = tt.iter(); - while let (Some(first), second) = (iter.next(), iter.next()) { - match first { - TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() { - Ok(index) => indices.push(index), - Err(_) => break, - }, - _ => break, - } - - if let Some(comma) = second { - match comma { - TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {} - _ => break, - } - } - } - - indices.into_boxed_slice() -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TypeAliasData { - pub name: Name, - pub type_ref: Option, - pub visibility: RawVisibility, - pub is_extern: bool, - pub rustc_has_incoherent_inherent_impls: bool, - pub rustc_allow_incoherent_impl: bool, - /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl). - pub bounds: Box<[TypeBound]>, - pub types_map: Arc, -} - -impl TypeAliasData { - pub(crate) fn type_alias_data_query( - db: &dyn DefDatabase, - typ: TypeAliasId, - ) -> Arc { - let loc = typ.lookup(db); - let item_tree = loc.id.item_tree(db); - let typ = &item_tree[loc.id.value]; - let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container { - trait_vis(db, trait_id) - } else { - item_tree[typ.visibility].clone() - }; - - let attrs = item_tree.attrs( - db, - loc.container.module(db).krate(), - ModItem::from(loc.id.value).into(), - ); - let rustc_has_incoherent_inherent_impls = - attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists(); - let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists(); - - Arc::new(TypeAliasData { - name: typ.name.clone(), - type_ref: typ.type_ref, - visibility, - is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)), - rustc_has_incoherent_inherent_impls, - rustc_allow_incoherent_impl, - bounds: typ.bounds.clone(), - types_map: typ.types_map.clone(), - }) - } -} - -bitflags::bitflags! { - #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] - pub struct TraitFlags: u8 { - const IS_AUTO = 1 << 0; - const IS_UNSAFE = 1 << 1; - const IS_FUNDAMENTAL = 1 << 2; - const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 3; - const SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 4; - const SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 5; - const RUSTC_PAREN_SUGAR = 1 << 6; - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TraitData { - pub name: Name, - pub items: Box<[(Name, AssocItemId)]>, - pub flags: TraitFlags, - pub visibility: RawVisibility, - // box it as the vec is usually empty anyways - pub macro_calls: Option, MacroCallId)>>>, -} - -impl TraitData { - #[inline] - pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc { - db.trait_data_with_diagnostics(tr).0 - } - - pub(crate) fn trait_data_with_diagnostics_query( - db: &dyn DefDatabase, - tr: TraitId, - ) -> (Arc, DefDiagnostics) { - let ItemLoc { container: module_id, id: tree_id } = tr.lookup(db); - let item_tree = tree_id.item_tree(db); - let tr_def = &item_tree[tree_id.value]; - let name = tr_def.name.clone(); - let visibility = item_tree[tr_def.visibility].clone(); - let attrs = item_tree.attrs(db, module_id.krate(), ModItem::from(tree_id.value).into()); - - let mut flags = TraitFlags::empty(); - - if tr_def.is_auto { - flags |= TraitFlags::IS_AUTO; - } - if tr_def.is_unsafe { - flags |= TraitFlags::IS_UNSAFE; - } - if attrs.by_key(&sym::fundamental).exists() { - flags |= TraitFlags::IS_FUNDAMENTAL; - } - if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() { - flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; - } - if attrs.by_key(&sym::rustc_paren_sugar).exists() { - flags |= TraitFlags::RUSTC_PAREN_SUGAR; - } - - let mut skip_array_during_method_dispatch = - attrs.by_key(&sym::rustc_skip_array_during_method_dispatch).exists(); - let mut skip_boxed_slice_during_method_dispatch = false; - for tt in attrs.by_key(&sym::rustc_skip_during_method_dispatch).tt_values() { - for tt in tt.iter() { - if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt { - skip_array_during_method_dispatch |= ident.sym == sym::array; - skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice; - } - } - } - - if skip_array_during_method_dispatch { - flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH; - } - if skip_boxed_slice_during_method_dispatch { - flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH; - } - - let mut collector = - AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr)); - collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items); - let (items, macro_calls, diagnostics) = collector.finish(); - - ( - Arc::new(TraitData { name, macro_calls, items, visibility, flags }), - DefDiagnostics::new(diagnostics), - ) - } - - pub fn associated_types(&self) -> impl Iterator + '_ { - self.items.iter().filter_map(|(_name, item)| match item { - AssocItemId::TypeAliasId(t) => Some(*t), - _ => None, - }) - } - - pub fn associated_type_by_name(&self, name: &Name) -> Option { - self.items.iter().find_map(|(item_name, item)| match item { - AssocItemId::TypeAliasId(t) if item_name == name => Some(*t), - _ => None, - }) - } - - pub fn method_by_name(&self, name: &Name) -> Option { - self.items.iter().find_map(|(item_name, item)| match item { - AssocItemId::FunctionId(t) if item_name == name => Some(*t), - _ => None, - }) - } - - pub fn attribute_calls(&self) -> impl Iterator, MacroCallId)> + '_ { - self.macro_calls.iter().flat_map(|it| it.iter()).copied() - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TraitAliasData { - pub name: Name, - pub visibility: RawVisibility, -} - -impl TraitAliasData { - pub(crate) fn trait_alias_query(db: &dyn DefDatabase, id: TraitAliasId) -> Arc { - let loc = id.lookup(db); - let item_tree = loc.id.item_tree(db); - let alias = &item_tree[loc.id.value]; - let visibility = item_tree[alias.visibility].clone(); - - Arc::new(TraitAliasData { name: alias.name.clone(), visibility }) - } -} - -#[derive(Debug, PartialEq, Eq)] -pub struct ImplData { - pub target_trait: Option, - pub self_ty: TypeRefId, - pub items: Box<[(Name, AssocItemId)]>, - pub is_negative: bool, - pub is_unsafe: bool, - // box it as the vec is usually empty anyways - pub macro_calls: Option, MacroCallId)>>>, - pub types_map: Arc, -} - -impl ImplData { - #[inline] - pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc { - db.impl_data_with_diagnostics(id).0 - } - - pub(crate) fn impl_data_with_diagnostics_query( - db: &dyn DefDatabase, - id: ImplId, - ) -> (Arc, DefDiagnostics) { - let _p = tracing::info_span!("impl_data_with_diagnostics_query").entered(); - let ItemLoc { container: module_id, id: tree_id } = id.lookup(db); - - let item_tree = tree_id.item_tree(db); - let impl_def = &item_tree[tree_id.value]; - let target_trait = impl_def.target_trait; - let self_ty = impl_def.self_ty; - let is_negative = impl_def.is_negative; - let is_unsafe = impl_def.is_unsafe; - - let mut collector = - AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id)); - collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items); - - let (items, macro_calls, diagnostics) = collector.finish(); - - ( - Arc::new(ImplData { - target_trait, - self_ty, - items, - is_negative, - is_unsafe, - macro_calls, - types_map: impl_def.types_map.clone(), - }), - DefDiagnostics::new(diagnostics), - ) - } - - pub fn attribute_calls(&self) -> impl Iterator, MacroCallId)> + '_ { - self.macro_calls.iter().flat_map(|it| it.iter()).copied() - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Macro2Data { - pub name: Name, - pub visibility: RawVisibility, - // It's a bit wasteful as currently this is only for builtin `Default` derive macro, but macro2 - // are rarely used in practice so I think it's okay for now. - /// Derive helpers, if this is a derive rustc_builtin_macro - pub helpers: Option>, -} - -impl Macro2Data { - pub(crate) fn macro2_data_query(db: &dyn DefDatabase, makro: Macro2Id) -> Arc { - let loc = makro.lookup(db); - let item_tree = loc.id.item_tree(db); - let makro = &item_tree[loc.id.value]; - - let helpers = item_tree - .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into()) - .by_key(&sym::rustc_builtin_macro) - .tt_values() - .next() - .and_then(parse_macro_name_and_helper_attrs) - .map(|(_, helpers)| helpers); - - Arc::new(Macro2Data { - name: makro.name.clone(), - visibility: item_tree[makro.visibility].clone(), - helpers, - }) - } -} -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct MacroRulesData { - pub name: Name, - pub macro_export: bool, -} - -impl MacroRulesData { - pub(crate) fn macro_rules_data_query( - db: &dyn DefDatabase, - makro: MacroRulesId, - ) -> Arc { - let loc = makro.lookup(db); - let item_tree = loc.id.item_tree(db); - let makro = &item_tree[loc.id.value]; - - let macro_export = item_tree - .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into()) - .by_key(&sym::macro_export) - .exists(); - - Arc::new(MacroRulesData { name: makro.name.clone(), macro_export }) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ProcMacroData { - pub name: Name, - /// Derive helpers, if this is a derive - pub helpers: Option>, -} - -impl ProcMacroData { - pub(crate) fn proc_macro_data_query( - db: &dyn DefDatabase, - makro: ProcMacroId, - ) -> Arc { - let loc = makro.lookup(db); - let item_tree = loc.id.item_tree(db); - let makro = &item_tree[loc.id.value]; - - let (name, helpers) = if let Some(def) = item_tree - .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into()) - .parse_proc_macro_decl(&makro.name) - { - ( - def.name, - match def.kind { - ProcMacroKind::Derive { helpers } => Some(helpers), - ProcMacroKind::Bang | ProcMacroKind::Attr => None, - }, - ) - } else { - // eeeh... - stdx::never!("proc macro declaration is not a proc macro"); - (makro.name.clone(), None) - }; - Arc::new(ProcMacroData { name, helpers }) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ExternCrateDeclData { - pub name: Name, - pub alias: Option, - pub visibility: RawVisibility, - pub crate_id: Option, -} - -impl ExternCrateDeclData { - pub(crate) fn extern_crate_decl_data_query( - db: &dyn DefDatabase, - extern_crate: ExternCrateId, - ) -> Arc { - let loc = extern_crate.lookup(db); - let item_tree = loc.id.item_tree(db); - let extern_crate = &item_tree[loc.id.value]; - - let name = extern_crate.name.clone(); - let krate = loc.container.krate(); - let crate_id = if name == sym::self_.clone() { - Some(krate) - } else { - db.crate_graph()[krate].dependencies.iter().find_map(|dep| { - if dep.name.symbol() == name.symbol() { - Some(dep.crate_id) - } else { - None - } - }) - }; - - Arc::new(Self { - name, - visibility: item_tree[extern_crate.visibility].clone(), - alias: extern_crate.alias.clone(), - crate_id, - }) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ConstData { - /// `None` for `const _: () = ();` - pub name: Option, - pub type_ref: TypeRefId, - pub visibility: RawVisibility, - pub rustc_allow_incoherent_impl: bool, - pub has_body: bool, - pub types_map: Arc, -} - -impl ConstData { - pub(crate) fn const_data_query(db: &dyn DefDatabase, konst: ConstId) -> Arc { - let loc = konst.lookup(db); - let item_tree = loc.id.item_tree(db); - let konst = &item_tree[loc.id.value]; - let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container { - trait_vis(db, trait_id) - } else { - item_tree[konst.visibility].clone() - }; - - let rustc_allow_incoherent_impl = item_tree - .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into()) - .by_key(&sym::rustc_allow_incoherent_impl) - .exists(); - - Arc::new(ConstData { - name: konst.name.clone(), - type_ref: konst.type_ref, - visibility, - rustc_allow_incoherent_impl, - has_body: konst.has_body, - types_map: konst.types_map.clone(), - }) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct StaticData { - pub name: Name, - pub type_ref: TypeRefId, - pub visibility: RawVisibility, - pub mutable: bool, - pub is_extern: bool, - pub has_safe_kw: bool, - pub has_unsafe_kw: bool, - pub types_map: Arc, -} - -impl StaticData { - pub(crate) fn static_data_query(db: &dyn DefDatabase, konst: StaticId) -> Arc { - let loc = konst.lookup(db); - let item_tree = loc.id.item_tree(db); - let statik = &item_tree[loc.id.value]; - - Arc::new(StaticData { - name: statik.name.clone(), - type_ref: statik.type_ref, - visibility: item_tree[statik.visibility].clone(), - mutable: statik.mutable, - is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)), - has_safe_kw: statik.has_safe_kw, - has_unsafe_kw: statik.has_unsafe_kw, - types_map: statik.types_map.clone(), - }) - } -} - -struct AssocItemCollector<'a> { - db: &'a dyn DefDatabase, - module_id: ModuleId, - def_map: Arc, - diagnostics: Vec, - container: ItemContainerId, - expander: Expander, - - items: Vec<(Name, AssocItemId)>, - macro_calls: Vec<(AstId, MacroCallId)>, -} - -impl<'a> AssocItemCollector<'a> { - fn new( - db: &'a dyn DefDatabase, - module_id: ModuleId, - file_id: HirFileId, - container: ItemContainerId, - ) -> Self { - Self { - db, - module_id, - def_map: module_id.def_map(db), - container, - expander: Expander::new(db, file_id, module_id), - items: Vec::new(), - macro_calls: Vec::new(), - diagnostics: Vec::new(), - } - } - - fn finish( - self, - ) -> ( - Box<[(Name, AssocItemId)]>, - Option, MacroCallId)>>>, - Vec, - ) { - ( - self.items.into_boxed_slice(), - if self.macro_calls.is_empty() { None } else { Some(Box::new(self.macro_calls)) }, - self.diagnostics, - ) - } - - fn collect(&mut self, item_tree: &ItemTree, tree_id: TreeId, assoc_items: &[AssocItem]) { - let container = self.container; - self.items.reserve(assoc_items.len()); - - 'items: for &item in assoc_items { - let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into()); - if !attrs.is_cfg_enabled(self.expander.cfg_options()) { - self.diagnostics.push(DefDiagnostic::unconfigured_code( - self.module_id.local_id, - tree_id, - ModItem::from(item).into(), - attrs.cfg().unwrap(), - self.expander.cfg_options().clone(), - )); - continue; - } - - 'attrs: for attr in &*attrs { - let ast_id = - AstId::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()); - let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id }; - - match self.def_map.resolve_attr_macro( - self.db, - self.module_id.local_id, - ast_id_with_path, - attr, - ) { - Ok(ResolvedAttr::Macro(call_id)) => { - let loc = self.db.lookup_intern_macro_call(call_id); - if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind { - // If there's no expander for the proc macro (e.g. the - // proc macro is ignored, or building the proc macro - // crate failed), skip expansion like we would if it was - // disabled. This is analogous to the handling in - // `DefCollector::collect_macros`. - if let Some(err) = exp.as_expand_error(self.module_id.krate) { - self.diagnostics.push(DefDiagnostic::macro_error( - self.module_id.local_id, - ast_id, - (*attr.path).clone(), - err, - )); - continue 'attrs; - } - } - - self.macro_calls.push((ast_id, call_id)); - let res = - self.expander.enter_expand_id::(self.db, call_id); - self.collect_macro_items(res); - continue 'items; - } - Ok(_) => (), - Err(_) => { - self.diagnostics.push(DefDiagnostic::unresolved_macro_call( - self.module_id.local_id, - MacroCallKind::Attr { - ast_id, - attr_args: None, - invoc_attr_index: attr.id, - }, - attr.path().clone(), - )); - } - } - } - - self.collect_item(item_tree, tree_id, container, item); - } - } - - fn collect_item( - &mut self, - item_tree: &ItemTree, - tree_id: TreeId, - container: ItemContainerId, - item: AssocItem, - ) { - match item { - AssocItem::Function(id) => { - let item = &item_tree[id]; - let def = - FunctionLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db); - self.items.push((item.name.clone(), def.into())); - } - AssocItem::TypeAlias(id) => { - let item = &item_tree[id]; - let def = - TypeAliasLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db); - self.items.push((item.name.clone(), def.into())); - } - AssocItem::Const(id) => { - let item = &item_tree[id]; - let Some(name) = item.name.clone() else { return }; - let def = ConstLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db); - self.items.push((name, def.into())); - } - AssocItem::MacroCall(call) => { - let file_id = self.expander.current_file_id(); - let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call]; - let module = self.expander.module.local_id; - - let resolver = |path: &_| { - self.def_map - .resolve_path( - self.db, - module, - path, - crate::item_scope::BuiltinShadowMode::Other, - Some(MacroSubNs::Bang), - ) - .0 - .take_macros() - .map(|it| self.db.macro_def(it)) - }; - match macro_call_as_call_id( - self.db.upcast(), - &AstIdWithPath::new(file_id, ast_id, Clone::clone(path)), - ctxt, - expand_to, - self.expander.krate(), - resolver, - ) { - Ok(Some(call_id)) => { - let res = - self.expander.enter_expand_id::(self.db, call_id); - self.macro_calls.push((InFile::new(file_id, ast_id.upcast()), call_id)); - self.collect_macro_items(res); - } - Ok(None) => (), - Err(_) => { - self.diagnostics.push(DefDiagnostic::unresolved_macro_call( - self.module_id.local_id, - MacroCallKind::FnLike { - ast_id: InFile::new(file_id, ast_id), - expand_to, - eager: None, - }, - Clone::clone(path), - )); - } - } - } - } - } - - fn collect_macro_items(&mut self, res: ExpandResult)>>) { - let Some((mark, _parse)) = res.value else { return }; - - let tree_id = item_tree::TreeId::new(self.expander.current_file_id(), None); - let item_tree = tree_id.item_tree(self.db); - let iter: SmallVec<[_; 2]> = - item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item).collect(); - - self.collect(&item_tree, tree_id, &iter); - - self.expander.exit(mark); - } -} - -fn trait_vis(db: &dyn DefDatabase, trait_id: TraitId) -> RawVisibility { - let ItemLoc { id: tree_id, .. } = trait_id.lookup(db); - let item_tree = tree_id.item_tree(db); - let tr_def = &item_tree[tree_id.value]; - item_tree[tr_def.visibility].clone() -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs deleted file mode 100644 index c94622016d355..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs +++ /dev/null @@ -1,489 +0,0 @@ -//! Defines hir-level representation of structs, enums and unions - -use base_db::CrateId; -use bitflags::bitflags; -use cfg::CfgOptions; -use either::Either; - -use hir_expand::name::Name; -use intern::sym; -use la_arena::Arena; -use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; -use rustc_hashes::Hash64; -use triomphe::Arc; -use tt::iter::TtElement; - -use crate::{ - builtin_type::{BuiltinInt, BuiltinUint}, - db::DefDatabase, - hir::Expr, - item_tree::{ - AttrOwner, Field, FieldParent, FieldsShape, ItemTree, ModItem, RawVisibilityId, TreeId, - }, - lang_item::LangItem, - nameres::diagnostics::{DefDiagnostic, DefDiagnostics}, - tt::{Delimiter, DelimiterKind, Leaf, TopSubtree}, - type_ref::{TypeRefId, TypesMap}, - visibility::RawVisibility, - EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId, VariantId, -}; - -/// Note that we use `StructData` for unions as well! -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct StructData { - pub name: Name, - pub variant_data: Arc, - pub repr: Option, - pub visibility: RawVisibility, - pub flags: StructFlags, -} - -bitflags! { - #[derive(Debug, Copy, Clone, PartialEq, Eq)] - pub struct StructFlags: u8 { - const NO_FLAGS = 0; - /// Indicates whether the struct is `PhantomData`. - const IS_PHANTOM_DATA = 1 << 2; - /// Indicates whether the struct has a `#[fundamental]` attribute. - const IS_FUNDAMENTAL = 1 << 3; - // FIXME: should this be a flag? - /// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute. - const IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL = 1 << 4; - /// Indicates whether this struct is `Box`. - const IS_BOX = 1 << 5; - /// Indicates whether this struct is `ManuallyDrop`. - const IS_MANUALLY_DROP = 1 << 6; - /// Indicates whether this struct is `UnsafeCell`. - const IS_UNSAFE_CELL = 1 << 7; - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct EnumData { - pub name: Name, - pub variants: Box<[(EnumVariantId, Name)]>, - pub repr: Option, - pub visibility: RawVisibility, - pub rustc_has_incoherent_inherent_impls: bool, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct EnumVariantData { - pub name: Name, - pub variant_data: Arc, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum VariantData { - Record { fields: Arena, types_map: Arc }, - Tuple { fields: Arena, types_map: Arc }, - Unit, -} - -/// A single field of an enum variant or struct -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct FieldData { - pub name: Name, - pub type_ref: TypeRefId, - pub visibility: RawVisibility, -} - -fn repr_from_value( - db: &dyn DefDatabase, - krate: CrateId, - item_tree: &ItemTree, - of: AttrOwner, -) -> Option { - item_tree.attrs(db, krate, of).by_key(&sym::repr).tt_values().find_map(parse_repr_tt) -} - -fn parse_repr_tt(tt: &TopSubtree) -> Option { - match tt.top_subtree().delimiter { - Delimiter { kind: DelimiterKind::Parenthesis, .. } => {} - _ => return None, - } - - let mut flags = ReprFlags::empty(); - let mut int = None; - let mut max_align: Option = None; - let mut min_pack: Option = None; - - let mut tts = tt.iter(); - while let Some(tt) = tts.next() { - if let TtElement::Leaf(Leaf::Ident(ident)) = tt { - flags.insert(match &ident.sym { - s if *s == sym::packed => { - let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { - tts.next(); - if let Some(TtElement::Leaf(Leaf::Literal(lit))) = tt_iter.next() { - lit.symbol.as_str().parse().unwrap_or_default() - } else { - 0 - } - } else { - 0 - }; - let pack = Align::from_bytes(pack).unwrap_or(Align::ONE); - min_pack = - Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack }); - ReprFlags::empty() - } - s if *s == sym::align => { - if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { - tts.next(); - if let Some(TtElement::Leaf(Leaf::Literal(lit))) = tt_iter.next() { - if let Ok(align) = lit.symbol.as_str().parse() { - let align = Align::from_bytes(align).ok(); - max_align = max_align.max(align); - } - } - } - ReprFlags::empty() - } - s if *s == sym::C => ReprFlags::IS_C, - s if *s == sym::transparent => ReprFlags::IS_TRANSPARENT, - s if *s == sym::simd => ReprFlags::IS_SIMD, - repr => { - if let Some(builtin) = BuiltinInt::from_suffix_sym(repr) - .map(Either::Left) - .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right)) - { - int = Some(match builtin { - Either::Left(bi) => match bi { - BuiltinInt::Isize => IntegerType::Pointer(true), - BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true), - BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true), - BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true), - BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true), - BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true), - }, - Either::Right(bu) => match bu { - BuiltinUint::Usize => IntegerType::Pointer(false), - BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false), - BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false), - BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false), - BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false), - BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false), - }, - }); - } - ReprFlags::empty() - } - }) - } - } - - Some(ReprOptions { - int, - align: max_align, - pack: min_pack, - flags, - field_shuffle_seed: Hash64::ZERO, - }) -} - -impl StructData { - #[inline] - pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc { - db.struct_data_with_diagnostics(id).0 - } - - pub(crate) fn struct_data_with_diagnostics_query( - db: &dyn DefDatabase, - id: StructId, - ) -> (Arc, DefDiagnostics) { - let loc = id.lookup(db); - let krate = loc.container.krate; - let item_tree = loc.id.item_tree(db); - let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); - let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); - - let mut flags = StructFlags::NO_FLAGS; - if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() { - flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL; - } - if attrs.by_key(&sym::fundamental).exists() { - flags |= StructFlags::IS_FUNDAMENTAL; - } - if let Some(lang) = attrs.lang_item() { - match lang { - LangItem::PhantomData => flags |= StructFlags::IS_PHANTOM_DATA, - LangItem::OwnedBox => flags |= StructFlags::IS_BOX, - LangItem::ManuallyDrop => flags |= StructFlags::IS_MANUALLY_DROP, - LangItem::UnsafeCell => flags |= StructFlags::IS_UNSAFE_CELL, - _ => (), - } - } - - let strukt = &item_tree[loc.id.value]; - let (fields, diagnostics) = lower_fields( - db, - krate, - loc.container.local_id, - loc.id.tree_id(), - &item_tree, - &db.crate_graph()[krate].cfg_options, - FieldParent::Struct(loc.id.value), - &strukt.fields, - None, - ); - let types_map = strukt.types_map.clone(); - ( - Arc::new(StructData { - name: strukt.name.clone(), - variant_data: Arc::new(match strukt.shape { - FieldsShape::Record => VariantData::Record { fields, types_map }, - FieldsShape::Tuple => VariantData::Tuple { fields, types_map }, - FieldsShape::Unit => VariantData::Unit, - }), - repr, - visibility: item_tree[strukt.visibility].clone(), - flags, - }), - DefDiagnostics::new(diagnostics), - ) - } - - #[inline] - pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc { - db.union_data_with_diagnostics(id).0 - } - - pub(crate) fn union_data_with_diagnostics_query( - db: &dyn DefDatabase, - id: UnionId, - ) -> (Arc, DefDiagnostics) { - let loc = id.lookup(db); - let krate = loc.container.krate; - let item_tree = loc.id.item_tree(db); - let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); - let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); - let mut flags = StructFlags::NO_FLAGS; - if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() { - flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL; - } - if attrs.by_key(&sym::fundamental).exists() { - flags |= StructFlags::IS_FUNDAMENTAL; - } - - let union = &item_tree[loc.id.value]; - let (fields, diagnostics) = lower_fields( - db, - krate, - loc.container.local_id, - loc.id.tree_id(), - &item_tree, - &db.crate_graph()[krate].cfg_options, - FieldParent::Union(loc.id.value), - &union.fields, - None, - ); - let types_map = union.types_map.clone(); - ( - Arc::new(StructData { - name: union.name.clone(), - variant_data: Arc::new(VariantData::Record { fields, types_map }), - repr, - visibility: item_tree[union.visibility].clone(), - flags, - }), - DefDiagnostics::new(diagnostics), - ) - } -} - -impl EnumData { - pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc { - let loc = e.lookup(db); - let krate = loc.container.krate; - let item_tree = loc.id.item_tree(db); - let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); - let rustc_has_incoherent_inherent_impls = item_tree - .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) - .by_key(&sym::rustc_has_incoherent_inherent_impls) - .exists(); - - let enum_ = &item_tree[loc.id.value]; - - Arc::new(EnumData { - name: enum_.name.clone(), - variants: loc.container.def_map(db).enum_definitions[&e] - .iter() - .map(|&id| (id, item_tree[id.lookup(db).id.value].name.clone())) - .collect(), - repr, - visibility: item_tree[enum_.visibility].clone(), - rustc_has_incoherent_inherent_impls, - }) - } - - pub fn variant(&self, name: &Name) -> Option { - let &(id, _) = self.variants.iter().find(|(_id, n)| n == name)?; - Some(id) - } - - pub fn variant_body_type(&self) -> IntegerType { - match self.repr { - Some(ReprOptions { int: Some(builtin), .. }) => builtin, - _ => IntegerType::Pointer(true), - } - } - - // [Adopted from rustc](https://github.com/rust-lang/rust/blob/bd53aa3bf7a24a70d763182303bd75e5fc51a9af/compiler/rustc_middle/src/ty/adt.rs#L446-L448) - pub fn is_payload_free(&self, db: &dyn DefDatabase) -> bool { - self.variants.iter().all(|(v, _)| { - // The condition check order is slightly modified from rustc - // to improve performance by early returning with relatively fast checks - let variant = &db.enum_variant_data(*v).variant_data; - if !variant.fields().is_empty() { - return false; - } - // The outer if condition is whether this variant has const ctor or not - if !matches!(variant.kind(), StructKind::Unit) { - let body = db.body((*v).into()); - // A variant with explicit discriminant - if body.exprs[body.body_expr] != Expr::Missing { - return false; - } - } - true - }) - } -} - -impl EnumVariantData { - #[inline] - pub(crate) fn enum_variant_data_query( - db: &dyn DefDatabase, - e: EnumVariantId, - ) -> Arc { - db.enum_variant_data_with_diagnostics(e).0 - } - - pub(crate) fn enum_variant_data_with_diagnostics_query( - db: &dyn DefDatabase, - e: EnumVariantId, - ) -> (Arc, DefDiagnostics) { - let loc = e.lookup(db); - let container = loc.parent.lookup(db).container; - let krate = container.krate; - let item_tree = loc.id.item_tree(db); - let variant = &item_tree[loc.id.value]; - - let (fields, diagnostics) = lower_fields( - db, - krate, - container.local_id, - loc.id.tree_id(), - &item_tree, - &db.crate_graph()[krate].cfg_options, - FieldParent::Variant(loc.id.value), - &variant.fields, - Some(item_tree[loc.parent.lookup(db).id.value].visibility), - ); - let types_map = variant.types_map.clone(); - - ( - Arc::new(EnumVariantData { - name: variant.name.clone(), - variant_data: Arc::new(match variant.shape { - FieldsShape::Record => VariantData::Record { fields, types_map }, - FieldsShape::Tuple => VariantData::Tuple { fields, types_map }, - FieldsShape::Unit => VariantData::Unit, - }), - }), - DefDiagnostics::new(diagnostics), - ) - } -} - -impl VariantData { - pub fn fields(&self) -> &Arena { - const EMPTY: &Arena = &Arena::new(); - match &self { - VariantData::Record { fields, .. } | VariantData::Tuple { fields, .. } => fields, - _ => EMPTY, - } - } - - pub fn types_map(&self) -> &TypesMap { - match &self { - VariantData::Record { types_map, .. } | VariantData::Tuple { types_map, .. } => { - types_map - } - VariantData::Unit => TypesMap::EMPTY, - } - } - - // FIXME: Linear lookup - pub fn field(&self, name: &Name) -> Option { - self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None }) - } - - pub fn kind(&self) -> StructKind { - match self { - VariantData::Record { .. } => StructKind::Record, - VariantData::Tuple { .. } => StructKind::Tuple, - VariantData::Unit => StructKind::Unit, - } - } - - #[allow(clippy::self_named_constructors)] - pub(crate) fn variant_data(db: &dyn DefDatabase, id: VariantId) -> Arc { - match id { - VariantId::StructId(it) => db.struct_data(it).variant_data.clone(), - VariantId::EnumVariantId(it) => db.enum_variant_data(it).variant_data.clone(), - VariantId::UnionId(it) => db.union_data(it).variant_data.clone(), - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum StructKind { - Tuple, - Record, - Unit, -} - -fn lower_fields( - db: &dyn DefDatabase, - krate: CrateId, - container: LocalModuleId, - tree_id: TreeId, - item_tree: &ItemTree, - cfg_options: &CfgOptions, - parent: FieldParent, - fields: &[Field], - override_visibility: Option, -) -> (Arena, Vec) { - let mut diagnostics = Vec::new(); - let mut arena = Arena::new(); - for (idx, field) in fields.iter().enumerate() { - let attr_owner = AttrOwner::make_field_indexed(parent, idx); - let attrs = item_tree.attrs(db, krate, attr_owner); - if attrs.is_cfg_enabled(cfg_options) { - arena.alloc(lower_field(item_tree, field, override_visibility)); - } else { - diagnostics.push(DefDiagnostic::unconfigured_code( - container, - tree_id, - attr_owner, - attrs.cfg().unwrap(), - cfg_options.clone(), - )) - } - } - (arena, diagnostics) -} - -fn lower_field( - item_tree: &ItemTree, - field: &Field, - override_visibility: Option, -) -> FieldData { - FieldData { - name: field.name.clone(), - type_ref: field.type_ref, - visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(), - } -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index 598a850898bb6..2cbdbe16f9bb6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -1,273 +1,367 @@ //! Defines database & queries for name resolution. -use base_db::{ra_salsa, CrateId, SourceDatabase, Upcast}; +use base_db::{Crate, RootQueryDb, SourceDatabase}; use either::Either; -use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId}; +use hir_expand::{EditionedFileId, HirFileId, MacroCallId, MacroDefId, db::ExpandDatabase}; use intern::sym; use la_arena::ArenaMap; -use span::{EditionedFileId, MacroCallId}; -use syntax::{ast, AstPtr}; +use syntax::{AstPtr, ast}; +use thin_vec::ThinVec; use triomphe::Arc; use crate::{ + AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, EnumVariantId, + EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, + FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc, MacroId, + MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId, + StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, + TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId, attr::{Attrs, AttrsWithOwner}, - data::{ - adt::{EnumData, EnumVariantData, StructData, VariantData}, - ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData, - ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData, + expr_store::{ + Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes, }, - expr_store::{scope::ExprScopes, Body, BodySourceMap}, - generics::GenericParams, + hir::generics::GenericParams, import_map::ImportMap, - item_tree::{AttrOwner, ItemTree, ItemTreeSourceMaps}, - lang_item::{self, LangItem, LangItemTarget, LangItems}, - nameres::{diagnostics::DefDiagnostics, DefMap}, + item_tree::{AttrOwner, ItemTree}, + lang_item::{self, LangItem}, + nameres::{ + DefMap, LocalDefMap, + assoc::{ImplItems, TraitItems}, + diagnostics::DefDiagnostics, + }, + signatures::{ + ConstSignature, EnumSignature, EnumVariants, FunctionSignature, ImplSignature, + InactiveEnumVariantCode, StaticSignature, StructSignature, TraitAliasSignature, + TraitSignature, TypeAliasSignature, UnionSignature, VariantFields, + }, tt, - type_ref::TypesSourceMap, visibility::{self, Visibility}, - AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId, - EnumId, EnumLoc, EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, - ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, InTypeConstId, - InTypeConstLoc, LocalFieldId, Macro2Id, Macro2Loc, MacroId, MacroRulesId, MacroRulesLoc, - MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, - TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, - UseId, UseLoc, VariantId, }; -#[ra_salsa::query_group(InternDatabaseStorage)] -pub trait InternDatabase: SourceDatabase { +use salsa::plumbing::AsId; + +#[query_group::query_group(InternDatabaseStorage)] +pub trait InternDatabase: RootQueryDb { // region: items - #[ra_salsa::interned] + #[salsa::interned] fn intern_use(&self, loc: UseLoc) -> UseId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_extern_crate(&self, loc: ExternCrateLoc) -> ExternCrateId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_function(&self, loc: FunctionLoc) -> FunctionId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_struct(&self, loc: StructLoc) -> StructId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_union(&self, loc: UnionLoc) -> UnionId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_enum(&self, loc: EnumLoc) -> EnumId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_enum_variant(&self, loc: EnumVariantLoc) -> EnumVariantId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_const(&self, loc: ConstLoc) -> ConstId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_static(&self, loc: StaticLoc) -> StaticId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_trait(&self, loc: TraitLoc) -> TraitId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_trait_alias(&self, loc: TraitAliasLoc) -> TraitAliasId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_impl(&self, loc: ImplLoc) -> ImplId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_extern_block(&self, loc: ExternBlockLoc) -> ExternBlockId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_macro2(&self, loc: Macro2Loc) -> Macro2Id; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_proc_macro(&self, loc: ProcMacroLoc) -> ProcMacroId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId; - // endregion: items + // // endregion: items - #[ra_salsa::interned] + #[salsa::interned] fn intern_block(&self, loc: BlockLoc) -> BlockId; - #[ra_salsa::interned] - fn intern_anonymous_const(&self, id: ConstBlockLoc) -> ConstBlockId; - #[ra_salsa::interned] - fn intern_in_type_const(&self, id: InTypeConstLoc) -> InTypeConstId; } -#[ra_salsa::query_group(DefDatabaseStorage)] -pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast { +#[query_group::query_group] +pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase { /// Whether to expand procedural macros during name resolution. - #[ra_salsa::input] + #[salsa::input] fn expand_proc_attr_macros(&self) -> bool; /// Computes an [`ItemTree`] for the given file or macro expansion. - #[ra_salsa::invoke(ItemTree::file_item_tree_query)] + #[salsa::invoke(ItemTree::file_item_tree_query)] fn file_item_tree(&self, file_id: HirFileId) -> Arc; - #[ra_salsa::invoke(ItemTree::block_item_tree_query)] + #[salsa::invoke(ItemTree::block_item_tree_query)] fn block_item_tree(&self, block_id: BlockId) -> Arc; - #[ra_salsa::invoke(ItemTree::file_item_tree_with_source_map_query)] - fn file_item_tree_with_source_map( - &self, - file_id: HirFileId, - ) -> (Arc, Arc); - - #[ra_salsa::invoke(ItemTree::block_item_tree_with_source_map_query)] - fn block_item_tree_with_source_map( - &self, - block_id: BlockId, - ) -> (Arc, Arc); + #[salsa::invoke(DefMap::crate_local_def_map_query)] + fn crate_local_def_map(&self, krate: Crate) -> (Arc, Arc); - #[ra_salsa::invoke(DefMap::crate_def_map_query)] - fn crate_def_map(&self, krate: CrateId) -> Arc; + #[salsa::invoke(DefMap::crate_def_map_query)] + fn crate_def_map(&self, krate: Crate) -> Arc; /// Computes the block-level `DefMap`. - #[ra_salsa::invoke(DefMap::block_def_map_query)] + #[salsa::invoke(DefMap::block_def_map_query)] fn block_def_map(&self, block: BlockId) -> Arc; /// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution. + #[salsa::invoke(macro_def)] fn macro_def(&self, m: MacroId) -> MacroDefId; // region:data - #[ra_salsa::transparent] - #[ra_salsa::invoke(StructData::struct_data_query)] - fn struct_data(&self, id: StructId) -> Arc; + #[salsa::invoke(VariantFields::query)] + fn variant_fields_with_source_map( + &self, + id: VariantId, + ) -> (Arc, Arc); - #[ra_salsa::invoke(StructData::struct_data_with_diagnostics_query)] - fn struct_data_with_diagnostics(&self, id: StructId) -> (Arc, DefDiagnostics); + #[salsa::tracked] + fn enum_variants(&self, id: EnumId) -> Arc { + self.enum_variants_with_diagnostics(id).0 + } - #[ra_salsa::transparent] - #[ra_salsa::invoke(StructData::union_data_query)] - fn union_data(&self, id: UnionId) -> Arc; + #[salsa::invoke(EnumVariants::enum_variants_query)] + fn enum_variants_with_diagnostics( + &self, + id: EnumId, + ) -> (Arc, Option>>); - #[ra_salsa::invoke(StructData::union_data_with_diagnostics_query)] - fn union_data_with_diagnostics(&self, id: UnionId) -> (Arc, DefDiagnostics); + #[salsa::transparent] + #[salsa::invoke(ImplItems::impl_items_query)] + fn impl_items(&self, e: ImplId) -> Arc; - #[ra_salsa::invoke(EnumData::enum_data_query)] - fn enum_data(&self, e: EnumId) -> Arc; + #[salsa::invoke(ImplItems::impl_items_with_diagnostics_query)] + fn impl_items_with_diagnostics(&self, e: ImplId) -> (Arc, DefDiagnostics); - #[ra_salsa::transparent] - #[ra_salsa::invoke(EnumVariantData::enum_variant_data_query)] - fn enum_variant_data(&self, id: EnumVariantId) -> Arc; + #[salsa::transparent] + #[salsa::invoke(TraitItems::trait_items_query)] + fn trait_items(&self, e: TraitId) -> Arc; - #[ra_salsa::invoke(EnumVariantData::enum_variant_data_with_diagnostics_query)] - fn enum_variant_data_with_diagnostics( - &self, - id: EnumVariantId, - ) -> (Arc, DefDiagnostics); + #[salsa::invoke(TraitItems::trait_items_with_diagnostics_query)] + fn trait_items_with_diagnostics(&self, tr: TraitId) -> (Arc, DefDiagnostics); + + #[salsa::tracked] + fn variant_fields(&self, id: VariantId) -> Arc { + self.variant_fields_with_source_map(id).0 + } + + #[salsa::tracked] + fn trait_signature(&self, trait_: TraitId) -> Arc { + self.trait_signature_with_source_map(trait_).0 + } + + #[salsa::tracked] + fn impl_signature(&self, impl_: ImplId) -> Arc { + self.impl_signature_with_source_map(impl_).0 + } + + #[salsa::tracked] + fn struct_signature(&self, struct_: StructId) -> Arc { + self.struct_signature_with_source_map(struct_).0 + } - #[ra_salsa::transparent] - #[ra_salsa::invoke(VariantData::variant_data)] - fn variant_data(&self, id: VariantId) -> Arc; - #[ra_salsa::transparent] - #[ra_salsa::invoke(ImplData::impl_data_query)] - fn impl_data(&self, e: ImplId) -> Arc; + #[salsa::tracked] + fn union_signature(&self, union_: UnionId) -> Arc { + self.union_signature_with_source_map(union_).0 + } - #[ra_salsa::invoke(ImplData::impl_data_with_diagnostics_query)] - fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc, DefDiagnostics); + #[salsa::tracked] + fn enum_signature(&self, e: EnumId) -> Arc { + self.enum_signature_with_source_map(e).0 + } - #[ra_salsa::transparent] - #[ra_salsa::invoke(TraitData::trait_data_query)] - fn trait_data(&self, e: TraitId) -> Arc; + #[salsa::tracked] + fn const_signature(&self, e: ConstId) -> Arc { + self.const_signature_with_source_map(e).0 + } - #[ra_salsa::invoke(TraitData::trait_data_with_diagnostics_query)] - fn trait_data_with_diagnostics(&self, tr: TraitId) -> (Arc, DefDiagnostics); + #[salsa::tracked] + fn static_signature(&self, e: StaticId) -> Arc { + self.static_signature_with_source_map(e).0 + } - #[ra_salsa::invoke(TraitAliasData::trait_alias_query)] - fn trait_alias_data(&self, e: TraitAliasId) -> Arc; + #[salsa::tracked] + fn function_signature(&self, e: FunctionId) -> Arc { + self.function_signature_with_source_map(e).0 + } - #[ra_salsa::invoke(TypeAliasData::type_alias_data_query)] - fn type_alias_data(&self, e: TypeAliasId) -> Arc; + #[salsa::tracked] + fn trait_alias_signature(&self, e: TraitAliasId) -> Arc { + self.trait_alias_signature_with_source_map(e).0 + } - #[ra_salsa::invoke(FunctionData::fn_data_query)] - fn function_data(&self, func: FunctionId) -> Arc; + #[salsa::tracked] + fn type_alias_signature(&self, e: TypeAliasId) -> Arc { + self.type_alias_signature_with_source_map(e).0 + } - #[ra_salsa::invoke(ConstData::const_data_query)] - fn const_data(&self, konst: ConstId) -> Arc; + #[salsa::invoke(TraitSignature::query)] + fn trait_signature_with_source_map( + &self, + trait_: TraitId, + ) -> (Arc, Arc); - #[ra_salsa::invoke(StaticData::static_data_query)] - fn static_data(&self, statik: StaticId) -> Arc; + #[salsa::invoke(ImplSignature::query)] + fn impl_signature_with_source_map( + &self, + impl_: ImplId, + ) -> (Arc, Arc); - #[ra_salsa::invoke(Macro2Data::macro2_data_query)] - fn macro2_data(&self, makro: Macro2Id) -> Arc; + #[salsa::invoke(StructSignature::query)] + fn struct_signature_with_source_map( + &self, + struct_: StructId, + ) -> (Arc, Arc); - #[ra_salsa::invoke(MacroRulesData::macro_rules_data_query)] - fn macro_rules_data(&self, makro: MacroRulesId) -> Arc; + #[salsa::invoke(UnionSignature::query)] + fn union_signature_with_source_map( + &self, + union_: UnionId, + ) -> (Arc, Arc); - #[ra_salsa::invoke(ProcMacroData::proc_macro_data_query)] - fn proc_macro_data(&self, makro: ProcMacroId) -> Arc; + #[salsa::invoke(EnumSignature::query)] + fn enum_signature_with_source_map( + &self, + e: EnumId, + ) -> (Arc, Arc); - #[ra_salsa::invoke(ExternCrateDeclData::extern_crate_decl_data_query)] - fn extern_crate_decl_data(&self, extern_crate: ExternCrateId) -> Arc; + #[salsa::invoke(ConstSignature::query)] + fn const_signature_with_source_map( + &self, + e: ConstId, + ) -> (Arc, Arc); + + #[salsa::invoke(StaticSignature::query)] + fn static_signature_with_source_map( + &self, + e: StaticId, + ) -> (Arc, Arc); + + #[salsa::invoke(FunctionSignature::query)] + fn function_signature_with_source_map( + &self, + e: FunctionId, + ) -> (Arc, Arc); + + #[salsa::invoke(TraitAliasSignature::query)] + fn trait_alias_signature_with_source_map( + &self, + e: TraitAliasId, + ) -> (Arc, Arc); + + #[salsa::invoke(TypeAliasSignature::query)] + fn type_alias_signature_with_source_map( + &self, + e: TypeAliasId, + ) -> (Arc, Arc); // endregion:data - #[ra_salsa::invoke(Body::body_with_source_map_query)] - #[ra_salsa::lru] + #[salsa::invoke(Body::body_with_source_map_query)] + #[salsa::lru(512)] fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc, Arc); - #[ra_salsa::invoke(Body::body_query)] + #[salsa::invoke(Body::body_query)] fn body(&self, def: DefWithBodyId) -> Arc; - #[ra_salsa::invoke(ExprScopes::expr_scopes_query)] + #[salsa::invoke(ExprScopes::expr_scopes_query)] fn expr_scopes(&self, def: DefWithBodyId) -> Arc; - #[ra_salsa::invoke(GenericParams::generic_params_query)] + #[salsa::transparent] + #[salsa::invoke(GenericParams::new)] fn generic_params(&self, def: GenericDefId) -> Arc; - /// If this returns `None` for the source map, that means it is the same as with the item tree. - #[ra_salsa::invoke(GenericParams::generic_params_with_source_map_query)] - fn generic_params_with_source_map( + #[salsa::transparent] + #[salsa::invoke(GenericParams::generic_params_and_store)] + fn generic_params_and_store( &self, def: GenericDefId, - ) -> (Arc, Option>); + ) -> (Arc, Arc); + + #[salsa::transparent] + #[salsa::invoke(GenericParams::generic_params_and_store_and_source_map)] + fn generic_params_and_store_and_source_map( + &self, + def: GenericDefId, + ) -> (Arc, Arc, Arc); // region:attrs - #[ra_salsa::invoke(Attrs::fields_attrs_query)] + #[salsa::invoke(Attrs::fields_attrs_query)] fn fields_attrs(&self, def: VariantId) -> Arc>; // should this really be a query? - #[ra_salsa::invoke(crate::attr::fields_attrs_source_map)] + #[salsa::invoke(crate::attr::fields_attrs_source_map)] fn fields_attrs_source_map( &self, def: VariantId, ) -> Arc>>>; - #[ra_salsa::invoke(AttrsWithOwner::attrs_query)] + // FIXME: Make this a non-interned query. + #[salsa::invoke_interned(AttrsWithOwner::attrs_query)] fn attrs(&self, def: AttrDefId) -> Attrs; - #[ra_salsa::transparent] - #[ra_salsa::invoke(lang_item::lang_attr)] + #[salsa::transparent] + #[salsa::invoke(lang_item::lang_attr)] fn lang_attr(&self, def: AttrDefId) -> Option; // endregion:attrs - #[ra_salsa::invoke(LangItems::lang_item_query)] - fn lang_item(&self, start_crate: CrateId, item: LangItem) -> Option; - - #[ra_salsa::invoke(ImportMap::import_map_query)] - fn import_map(&self, krate: CrateId) -> Arc; + #[salsa::invoke(ImportMap::import_map_query)] + fn import_map(&self, krate: Crate) -> Arc; // region:visibilities - #[ra_salsa::invoke(visibility::field_visibilities_query)] + #[salsa::invoke(visibility::field_visibilities_query)] fn field_visibilities(&self, var: VariantId) -> Arc>; // FIXME: unify function_visibility and const_visibility? - #[ra_salsa::invoke(visibility::function_visibility_query)] + + #[salsa::invoke(visibility::function_visibility_query)] fn function_visibility(&self, def: FunctionId) -> Visibility; - #[ra_salsa::invoke(visibility::const_visibility_query)] + #[salsa::invoke(visibility::const_visibility_query)] fn const_visibility(&self, def: ConstId) -> Visibility; - // endregion:visibilities + #[salsa::invoke(visibility::type_alias_visibility_query)] + fn type_alias_visibility(&self, def: TypeAliasId) -> Visibility; - #[ra_salsa::invoke(LangItems::crate_lang_items_query)] - fn crate_lang_items(&self, krate: CrateId) -> Option>; + // endregion:visibilities - #[ra_salsa::invoke(crate::lang_item::notable_traits_in_deps)] - fn notable_traits_in_deps(&self, krate: CrateId) -> Arc<[Arc<[TraitId]>]>; - #[ra_salsa::invoke(crate::lang_item::crate_notable_traits)] - fn crate_notable_traits(&self, krate: CrateId) -> Option>; + #[salsa::invoke(crate::lang_item::notable_traits_in_deps)] + fn notable_traits_in_deps(&self, krate: Crate) -> Arc<[Arc<[TraitId]>]>; + #[salsa::invoke(crate::lang_item::crate_notable_traits)] + fn crate_notable_traits(&self, krate: Crate) -> Option>; - fn crate_supports_no_std(&self, crate_id: CrateId) -> bool; + #[salsa::invoke(crate_supports_no_std)] + fn crate_supports_no_std(&self, crate_id: Crate) -> bool; - fn include_macro_invoc(&self, crate_id: CrateId) -> Arc<[(MacroCallId, EditionedFileId)]>; + #[salsa::invoke(include_macro_invoc)] + fn include_macro_invoc(&self, crate_id: Crate) -> Arc<[(MacroCallId, EditionedFileId)]>; } // return: macro call id and include file id fn include_macro_invoc( db: &dyn DefDatabase, - krate: CrateId, + krate: Crate, ) -> Arc<[(MacroCallId, EditionedFileId)]> { db.crate_def_map(krate) .modules @@ -275,20 +369,20 @@ fn include_macro_invoc( .flat_map(|m| m.scope.iter_macro_invoc()) .filter_map(|invoc| { db.lookup_intern_macro_call(*invoc.1) - .include_file_id(db.upcast(), *invoc.1) + .include_file_id(db, *invoc.1) .map(|x| (*invoc.1, x)) }) .collect() } -fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { - let file = db.crate_graph()[crate_id].root_file_id(); +fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool { + let file = crate_id.data(db).root_file_id(db); let item_tree = db.file_item_tree(file.into()); let attrs = item_tree.raw_attrs(AttrOwner::TopLevel); for attr in &**attrs { match attr.path().as_ident() { - Some(ident) if *ident == sym::no_std.clone() => return true, - Some(ident) if *ident == sym::cfg_attr.clone() => {} + Some(ident) if *ident == sym::no_std => return true, + Some(ident) if *ident == sym::cfg_attr => {} _ => continue, } @@ -304,7 +398,7 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { for output in segments.skip(1) { match output.flat_tokens() { [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => { - return true + return true; } _ => {} } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs index 8868bc0cd95bd..eed1490a7af62 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs @@ -27,15 +27,15 @@ pub mod keys { use std::marker::PhantomData; - use hir_expand::{attrs::AttrId, MacroCallId}; + use hir_expand::{MacroCallId, attrs::AttrId}; use rustc_hash::FxHashMap; - use syntax::{ast, AstNode, AstPtr}; + use syntax::{AstNode, AstPtr, ast}; use crate::{ - dyn_map::{DynMap, Policy}, BlockId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId, + dyn_map::{DynMap, Policy}, }; pub type Key = crate::dyn_map::Key, V, AstPtrPolicy>; @@ -112,6 +112,10 @@ pub struct Key { } impl Key { + #[allow( + clippy::new_without_default, + reason = "this a const fn, so it can't be default yet. See " + )] pub(crate) const fn new() -> Key { Key { _phantom: PhantomData } } @@ -148,16 +152,11 @@ impl Policy for (K, V) { } } +#[derive(Default)] pub struct DynMap { pub(crate) map: Map, } -impl Default for DynMap { - fn default() -> Self { - DynMap { map: Map::new() } - } -} - #[repr(transparent)] pub struct KeyMap { map: DynMap, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs deleted file mode 100644 index a1b3123c9914e..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs +++ /dev/null @@ -1,243 +0,0 @@ -//! Macro expansion utilities. - -use std::cell::OnceCell; - -use base_db::CrateId; -use cfg::CfgOptions; -use drop_bomb::DropBomb; -use hir_expand::{ - attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandErrorKind, - ExpandResult, HirFileId, InFile, Lookup, MacroCallId, -}; -use span::{Edition, SyntaxContextId}; -use syntax::{ast, Parse}; -use triomphe::Arc; - -use crate::type_ref::{TypesMap, TypesSourceMap}; -use crate::{ - attr::Attrs, db::DefDatabase, lower::LowerCtx, path::Path, AsMacroCall, MacroId, ModuleId, - UnresolvedMacro, -}; - -#[derive(Debug)] -pub struct Expander { - cfg_options: Arc, - span_map: OnceCell, - current_file_id: HirFileId, - pub(crate) module: ModuleId, - /// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached. - recursion_depth: u32, - recursion_limit: usize, -} - -impl Expander { - pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander { - let recursion_limit = module.def_map(db).recursion_limit() as usize; - let recursion_limit = if cfg!(test) { - // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug - std::cmp::min(32, recursion_limit) - } else { - recursion_limit - }; - Expander { - current_file_id, - module, - recursion_depth: 0, - recursion_limit, - cfg_options: db.crate_graph()[module.krate].cfg_options.clone(), - span_map: OnceCell::new(), - } - } - - pub(crate) fn span_map(&self, db: &dyn DefDatabase) -> &SpanMap { - self.span_map.get_or_init(|| db.span_map(self.current_file_id)) - } - - pub fn krate(&self) -> CrateId { - self.module.krate - } - - pub fn syntax_context(&self) -> SyntaxContextId { - // FIXME: - SyntaxContextId::root(Edition::CURRENT) - } - - pub fn enter_expand( - &mut self, - db: &dyn DefDatabase, - macro_call: ast::MacroCall, - resolver: impl Fn(&ModPath) -> Option, - ) -> Result)>>, UnresolvedMacro> { - // FIXME: within_limit should support this, instead of us having to extract the error - let mut unresolved_macro_err = None; - - let result = self.within_limit(db, |this| { - let macro_call = this.in_file(¯o_call); - match macro_call.as_call_id_with_errors(db.upcast(), this.module.krate(), |path| { - resolver(path).map(|it| db.macro_def(it)) - }) { - Ok(call_id) => call_id, - Err(resolve_err) => { - unresolved_macro_err = Some(resolve_err); - ExpandResult { value: None, err: None } - } - } - }); - - if let Some(err) = unresolved_macro_err { - Err(err) - } else { - Ok(result) - } - } - - pub fn enter_expand_id( - &mut self, - db: &dyn DefDatabase, - call_id: MacroCallId, - ) -> ExpandResult)>> { - self.within_limit(db, |_this| ExpandResult::ok(Some(call_id))) - } - - pub fn exit(&mut self, mut mark: Mark) { - self.span_map = mark.span_map; - self.current_file_id = mark.file_id; - if self.recursion_depth == u32::MAX { - // Recursion limit has been reached somewhere in the macro expansion tree. Reset the - // depth only when we get out of the tree. - if !self.current_file_id.is_macro() { - self.recursion_depth = 0; - } - } else { - self.recursion_depth -= 1; - } - mark.bomb.defuse(); - } - - pub fn ctx<'a>( - &self, - db: &'a dyn DefDatabase, - types_map: &'a mut TypesMap, - types_source_map: &'a mut TypesSourceMap, - ) -> LowerCtx<'a> { - LowerCtx::with_span_map_cell( - db, - self.current_file_id, - self.span_map.clone(), - types_map, - types_source_map, - ) - } - - pub(crate) fn in_file(&self, value: T) -> InFile { - InFile { file_id: self.current_file_id, value } - } - - pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { - Attrs::filter( - db, - self.krate(), - RawAttrs::new( - db.upcast(), - owner, - self.span_map.get_or_init(|| db.span_map(self.current_file_id)).as_ref(), - ), - ) - } - - pub(crate) fn cfg_options(&self) -> &CfgOptions { - &self.cfg_options - } - - pub fn current_file_id(&self) -> HirFileId { - self.current_file_id - } - - pub(crate) fn parse_path( - &mut self, - db: &dyn DefDatabase, - path: ast::Path, - types_map: &mut TypesMap, - types_source_map: &mut TypesSourceMap, - ) -> Option { - let mut ctx = LowerCtx::with_span_map_cell( - db, - self.current_file_id, - self.span_map.clone(), - types_map, - types_source_map, - ); - Path::from_src(&mut ctx, path) - } - - fn within_limit( - &mut self, - db: &dyn DefDatabase, - op: F, - ) -> ExpandResult)>> - where - F: FnOnce(&mut Self) -> ExpandResult>, - { - if self.recursion_depth == u32::MAX { - // Recursion limit has been reached somewhere in the macro expansion tree. We should - // stop expanding other macro calls in this tree, or else this may result in - // exponential number of macro expansions, leading to a hang. - // - // The overflow error should have been reported when it occurred (see the next branch), - // so don't return overflow error here to avoid diagnostics duplication. - cov_mark::hit!(overflow_but_not_me); - return ExpandResult::ok(None); - } - - let ExpandResult { value, err } = op(self); - let Some(call_id) = value else { - return ExpandResult { value: None, err }; - }; - if self.recursion_depth as usize > self.recursion_limit { - self.recursion_depth = u32::MAX; - cov_mark::hit!(your_stack_belongs_to_me); - return ExpandResult::only_err(ExpandError::new( - db.macro_arg_considering_derives(call_id, &call_id.lookup(db.upcast()).kind).2, - ExpandErrorKind::RecursionOverflow, - )); - } - - let macro_file = call_id.as_macro_file(); - let res = db.parse_macro_expansion(macro_file); - - let err = err.or(res.err); - ExpandResult { - value: match &err { - // If proc-macro is disabled or unresolved, we want to expand to a missing expression - // instead of an empty tree which might end up in an empty block. - Some(e) if matches!(e.kind(), ExpandErrorKind::MissingProcMacroExpander(_)) => None, - _ => (|| { - let parse = res.value.0.cast::()?; - - self.recursion_depth += 1; - let old_span_map = OnceCell::new(); - if let Some(prev) = self.span_map.take() { - _ = old_span_map.set(prev); - }; - _ = self.span_map.set(SpanMap::ExpansionSpanMap(res.value.1)); - let old_file_id = - std::mem::replace(&mut self.current_file_id, macro_file.into()); - let mark = Mark { - file_id: old_file_id, - span_map: old_span_map, - bomb: DropBomb::new("expansion mark dropped"), - }; - Some((mark, parse)) - })(), - }, - err, - } - } -} - -#[derive(Debug)] -pub struct Mark { - file_id: HirFileId, - span_map: OnceCell, - bomb: DropBomb, -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs index 5ff6a7ffe5669..e3775c4931ae8 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs @@ -1,10 +1,11 @@ //! Defines `ExpressionStore`: a lowered representation of functions, statics and //! consts. -mod body; -mod lower; -mod pretty; +pub mod body; +mod expander; +pub mod lower; +pub mod path; +pub mod pretty; pub mod scope; - #[cfg(test)] mod tests; @@ -12,45 +13,49 @@ use std::ops::{Deref, Index}; use cfg::{CfgExpr, CfgOptions}; use either::Either; -use hir_expand::{name::Name, ExpandError, InFile}; +use hir_expand::{ExpandError, InFile, MacroCallId, mod_path::ModPath, name::Name}; use la_arena::{Arena, ArenaMap}; use rustc_hash::FxHashMap; use smallvec::SmallVec; -use span::{Edition, MacroFileId, SyntaxContextData}; -use syntax::{ast, AstPtr, SyntaxNodePtr}; +use span::{Edition, SyntaxContext}; +use syntax::{AstPtr, SyntaxNodePtr, ast}; use triomphe::Arc; use tt::TextRange; use crate::{ + BlockId, SyntheticSyntax, db::DefDatabase, + expr_store::path::Path, hir::{ Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, LabelId, Pat, PatId, RecordFieldPat, Statement, }, nameres::DefMap, - path::{ModPath, Path}, - type_ref::{TypeRef, TypeRefId, TypesMap, TypesSourceMap}, - BlockId, DefWithBodyId, Lookup, SyntheticSyntax, + type_ref::{LifetimeRef, LifetimeRefId, PathId, TypeRef, TypeRefId}, }; pub use self::body::{Body, BodySourceMap}; +pub use self::lower::{ + hir_assoc_type_binding_to_ast, hir_generic_arg_to_ast, hir_segment_to_ast_segment, +}; /// A wrapper around [`span::SyntaxContextId`] that is intended only for comparisons. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct HygieneId(span::SyntaxContextId); +pub struct HygieneId(span::SyntaxContext); impl HygieneId { // The edition doesn't matter here, we only use this for comparisons and to lookup the macro. - pub const ROOT: Self = Self(span::SyntaxContextId::root(Edition::Edition2015)); + pub const ROOT: Self = Self(span::SyntaxContext::root(Edition::Edition2015)); - pub fn new(mut ctx: span::SyntaxContextId) -> Self { + pub fn new(mut ctx: span::SyntaxContext) -> Self { // See `Name` for why we're doing that. ctx.remove_root_edition(); Self(ctx) } - pub(crate) fn lookup(self, db: &dyn DefDatabase) -> SyntaxContextData { - db.lookup_intern_syntax_context(self.0) + // FIXME: Inline this + pub(crate) fn lookup(self) -> SyntaxContext { + self.0 } pub(crate) fn is_root(self) -> bool { @@ -79,27 +84,26 @@ pub type ExprOrPatSource = InFile; pub type SelfParamPtr = AstPtr; pub type MacroCallPtr = AstPtr; +pub type TypePtr = AstPtr; +pub type TypeSource = InFile; + +pub type LifetimePtr = AstPtr; +pub type LifetimeSource = InFile; + #[derive(Debug, Eq, PartialEq)] pub struct ExpressionStore { pub exprs: Arena, pub pats: Arena, pub bindings: Arena, pub labels: Arena::E::F", &["A"]); +} + +#[test] +fn hir_to_ast_plain_path() { + check_hir_to_ast("A::B::C::D::E::F", &[]); +} + +#[test] +fn hir_to_ast_crate_path() { + check_hir_to_ast("crate::A::B::C", &[]); + check_hir_to_ast("crate::super::super::A::B::C", &[]); +} + +#[test] +fn hir_to_ast_self_path() { + check_hir_to_ast("self::A::B::C", &[]); + check_hir_to_ast("self::super::super::A::B::C", &[]); +} + +#[test] +fn hir_to_ast_super_path() { + check_hir_to_ast("super::A::B::C", &[]); + check_hir_to_ast("super::super::super::A::B::C", &[]); +} + +#[test] +fn hir_to_ast_type_anchor_path() { + check_hir_to_ast("::C::D", &["A", "B"]); +} + +#[test] +fn hir_to_ast_path_super_in_middle() { + check_hir_to_ast("A::super::B::super::super::C::D", &[]); +} + +#[track_caller] +fn check_fail_lowering(path: &str) { + let (_, _, lowered_path) = lower_path(make::path_from_text(path)); + assert!(lowered_path.is_none(), "path `{path}` should fail lowering"); +} + +#[test] +fn keywords_in_middle_fail_lowering1() { + check_fail_lowering("self::A::self::B::super::C::crate::D"); +} + +#[test] +fn keywords_in_middle_fail_lowering2() { + check_fail_lowering("A::super::self::C::D"); +} + +#[test] +fn keywords_in_middle_fail_lowering3() { + check_fail_lowering("A::crate::B::C::D"); +} + +#[track_caller] +fn check_path_lowering(path: &str, expected: Expect) { + let (db, store, lowered_path) = lower_path(make::path_from_text(path)); + let lowered_path = lowered_path.expect("failed to lower path"); + let buf = pretty::print_path(&db, &store, &lowered_path, Edition::CURRENT); + expected.assert_eq(&buf); +} + +#[test] +fn fn_like_path_with_coloncolon() { + check_path_lowering("Fn::(A, B) -> C", expect![[r#"Fn::<(A, B), Output = C>"#]]); + check_path_lowering("Fn::(A, B)", expect![[r#"Fn::<(A, B), Output = ()>"#]]); +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs new file mode 100644 index 0000000000000..db83e73a0b95f --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs @@ -0,0 +1,311 @@ +//! A desugared representation of paths like `crate::foo` or `::bar`. + +use std::iter; + +use crate::{ + lang_item::LangItemTarget, + type_ref::{ConstRef, LifetimeRefId, TypeBound, TypeRefId}, +}; +use hir_expand::{ + mod_path::{ModPath, PathKind}, + name::Name, +}; +use intern::Interned; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Path { + /// `BarePath` is used when the path has neither generics nor type anchor, since the vast majority of paths + /// are in this category, and splitting `Path` this way allows it to be more thin. When the path has either generics + /// or type anchor, it is `Path::Normal` with the generics filled with `None` even if there are none (practically + /// this is not a problem since many more paths have generics than a type anchor). + BarePath(Interned), + /// `Path::Normal` will always have either generics or type anchor. + Normal(Box), + /// A link to a lang item. It is used in desugaring of things like `it?`. We can show these + /// links via a normal path since they might be private and not accessible in the usage place. + LangItem(LangItemTarget, Option), +} + +// This type is being used a lot, make sure it doesn't grow unintentionally. +#[cfg(target_arch = "x86_64")] +const _: () = { + assert!(size_of::() == 16); + assert!(size_of::>() == 16); +}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct NormalPath { + pub generic_args: Box<[Option]>, + pub type_anchor: Option, + pub mod_path: Interned, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum GenericArgsParentheses { + No, + /// Bounds of the form `Type::method(..): Send` or `impl Trait`, + /// aka. Return Type Notation or RTN. + ReturnTypeNotation, + /// `Fn`-family parenthesized traits, e.g. `impl Fn(u32) -> String`. + /// + /// This is desugared into one generic argument containing a tuple of all arguments, + /// and an associated type binding for `Output` for the return type. + ParenSugar, +} + +/// Generic arguments to a path segment (e.g. the `i32` in `Option`). This +/// also includes bindings of associated types, like in `Iterator`. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct GenericArgs { + pub args: Box<[GenericArg]>, + /// This specifies whether the args contain a Self type as the first + /// element. This is the case for path segments like ``, where + /// `T` is actually a type parameter for the path `Trait` specifying the + /// Self type. Otherwise, when we have a path `Trait`, the Self type + /// is left out. + pub has_self_type: bool, + /// Associated type bindings like in `Iterator`. + pub bindings: Box<[AssociatedTypeBinding]>, + /// Whether these generic args were written with parentheses and how. + pub parenthesized: GenericArgsParentheses, +} + +/// An associated type binding like in `Iterator`. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AssociatedTypeBinding { + /// The name of the associated type. + pub name: Name, + /// The generic arguments to the associated type. e.g. For `Trait = &'a T>`, this + /// would be `['a, T]`. + pub args: Option, + /// The type bound to this associated type (in `Item = T`, this would be the + /// `T`). This can be `None` if there are bounds instead. + pub type_ref: Option, + /// Bounds for the associated type, like in `Iterator`. (This is the unstable `associated_type_bounds` + /// feature.) + pub bounds: Box<[TypeBound]>, +} + +/// A single generic argument. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum GenericArg { + Type(TypeRefId), + Lifetime(LifetimeRefId), + Const(ConstRef), +} + +impl Path { + /// Converts a known mod path to `Path`. + pub fn from_known_path(path: ModPath, generic_args: Vec>) -> Path { + Path::Normal(Box::new(NormalPath { + generic_args: generic_args.into_boxed_slice(), + type_anchor: None, + mod_path: Interned::new(path), + })) + } + + /// Converts a known mod path to `Path`. + pub fn from_known_path_with_no_generic(path: ModPath) -> Path { + Path::BarePath(Interned::new(path)) + } + + #[inline] + pub fn kind(&self) -> &PathKind { + match self { + Path::BarePath(mod_path) => &mod_path.kind, + Path::Normal(path) => &path.mod_path.kind, + Path::LangItem(..) => &PathKind::Abs, + } + } + + #[inline] + pub fn type_anchor(&self) -> Option { + match self { + Path::Normal(path) => path.type_anchor, + Path::LangItem(..) | Path::BarePath(_) => None, + } + } + + #[inline] + pub fn generic_args(&self) -> Option<&[Option]> { + match self { + Path::Normal(path) => Some(&path.generic_args), + Path::LangItem(..) | Path::BarePath(_) => None, + } + } + + pub fn segments(&self) -> PathSegments<'_> { + match self { + Path::BarePath(mod_path) => { + PathSegments { segments: mod_path.segments(), generic_args: None } + } + Path::Normal(path) => PathSegments { + segments: path.mod_path.segments(), + generic_args: Some(&path.generic_args), + }, + Path::LangItem(_, seg) => PathSegments { segments: seg.as_slice(), generic_args: None }, + } + } + + pub fn mod_path(&self) -> Option<&ModPath> { + match self { + Path::BarePath(mod_path) => Some(mod_path), + Path::Normal(path) => Some(&path.mod_path), + Path::LangItem(..) => None, + } + } + + pub fn qualifier(&self) -> Option { + match self { + Path::BarePath(mod_path) => { + if mod_path.is_ident() { + return None; + } + Some(Path::BarePath(Interned::new(ModPath::from_segments( + mod_path.kind, + mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), + )))) + } + Path::Normal(path) => { + let mod_path = &path.mod_path; + if mod_path.is_ident() { + return None; + } + let type_anchor = path.type_anchor; + let generic_args = &path.generic_args; + let qualifier_mod_path = Interned::new(ModPath::from_segments( + mod_path.kind, + mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), + )); + let qualifier_generic_args = &generic_args[..generic_args.len() - 1]; + if type_anchor.is_none() && qualifier_generic_args.iter().all(|it| it.is_none()) { + Some(Path::BarePath(qualifier_mod_path)) + } else { + Some(Path::Normal(Box::new(NormalPath { + type_anchor, + mod_path: qualifier_mod_path, + generic_args: qualifier_generic_args.iter().cloned().collect(), + }))) + } + } + Path::LangItem(..) => None, + } + } + + pub fn is_self_type(&self) -> bool { + match self { + Path::BarePath(mod_path) => mod_path.is_Self(), + Path::Normal(path) => { + path.type_anchor.is_none() + && path.mod_path.is_Self() + && path.generic_args.iter().all(|args| args.is_none()) + } + Path::LangItem(..) => false, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct PathSegment<'a> { + pub name: &'a Name, + pub args_and_bindings: Option<&'a GenericArgs>, +} + +impl PathSegment<'_> { + pub const MISSING: PathSegment<'static> = + PathSegment { name: &Name::missing(), args_and_bindings: None }; +} + +#[derive(Debug, Clone, Copy)] +pub struct PathSegments<'a> { + segments: &'a [Name], + generic_args: Option<&'a [Option]>, +} + +impl<'a> PathSegments<'a> { + pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: None }; + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + pub fn len(&self) -> usize { + self.segments.len() + } + pub fn first(&self) -> Option> { + self.get(0) + } + pub fn last(&self) -> Option> { + self.get(self.len().checked_sub(1)?) + } + + pub fn get(&self, idx: usize) -> Option> { + let res = PathSegment { + name: self.segments.get(idx)?, + args_and_bindings: self.generic_args.and_then(|it| it.get(idx)?.as_ref()), + }; + Some(res) + } + + pub fn skip(&self, len: usize) -> PathSegments<'a> { + PathSegments { + segments: self.segments.get(len..).unwrap_or(&[]), + generic_args: self.generic_args.and_then(|it| it.get(len..)), + } + } + + pub fn take(&self, len: usize) -> PathSegments<'a> { + PathSegments { + segments: self.segments.get(..len).unwrap_or(self.segments), + generic_args: self.generic_args.map(|it| it.get(..len).unwrap_or(it)), + } + } + + pub fn strip_last(&self) -> PathSegments<'a> { + PathSegments { + segments: self.segments.split_last().map_or(&[], |it| it.1), + generic_args: self.generic_args.map(|it| it.split_last().map_or(&[][..], |it| it.1)), + } + } + + pub fn strip_last_two(&self) -> PathSegments<'a> { + PathSegments { + segments: self.segments.get(..self.segments.len().saturating_sub(2)).unwrap_or(&[]), + generic_args: self + .generic_args + .map(|it| it.get(..it.len().saturating_sub(2)).unwrap_or(&[])), + } + } + + pub fn iter(&self) -> impl Iterator> { + self.segments + .iter() + .zip(self.generic_args.into_iter().flatten().chain(iter::repeat(&None))) + .map(|(name, args)| PathSegment { name, args_and_bindings: args.as_ref() }) + } +} + +impl GenericArgs { + pub(crate) fn empty() -> GenericArgs { + GenericArgs { + args: Box::default(), + has_self_type: false, + bindings: Box::default(), + parenthesized: GenericArgsParentheses::No, + } + } + + pub(crate) fn return_type_notation() -> GenericArgs { + GenericArgs { + args: Box::default(), + has_self_type: false, + bindings: Box::default(), + parenthesized: GenericArgsParentheses::ReturnTypeNotation, + } + } +} + +impl From for Path { + fn from(name: Name) -> Path { + Path::BarePath(Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name)))) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs index 82ad756dc2c6a..f12a9b7a5445b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs @@ -1,56 +1,83 @@ //! A pretty-printer for HIR. +#![allow(dead_code)] -use std::fmt::{self, Write}; +use std::{ + fmt::{self, Write}, + mem, +}; +use hir_expand::{Lookup, mod_path::PathKind}; use itertools::Itertools; use span::Edition; use crate::{ - hir::{Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement}, - pretty::{print_generic_args, print_path, print_type_ref}, + AdtId, DefWithBodyId, GenericDefId, ItemTreeLoc, TypeParamId, VariantId, + expr_store::path::{GenericArg, GenericArgs}, + hir::{ + Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement, + generics::{GenericParams, WherePredicate}, + }, + lang_item::LangItemTarget, + signatures::{FnFlags, FunctionSignature, StructSignature}, + type_ref::{ConstRef, LifetimeRef, Mutability, TraitBoundModifier, TypeBound, UseArgRef}, }; +use crate::{LifetimeParamId, signatures::StructFlags}; +use crate::{item_tree::FieldsShape, signatures::FieldData}; use super::*; +macro_rules! w { + ($dst:expr, $($arg:tt)*) => { + { let _ = write!($dst, $($arg)*); } + }; +} + +macro_rules! wln { + ($dst:expr) => { + { $dst.newline(); } + }; + ($dst:expr, $($arg:tt)*) => { + { let _ = w!($dst, $($arg)*); $dst.newline(); } + }; +} + #[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(super) enum LineFormat { +pub enum LineFormat { Oneline, Newline, Indentation, } -pub(super) fn print_body_hir( +pub fn print_body_hir( db: &dyn DefDatabase, body: &Body, owner: DefWithBodyId, edition: Edition, ) -> String { let header = match owner { - DefWithBodyId::FunctionId(it) => it - .lookup(db) - .id - .resolved(db, |it| format!("fn {}", it.name.display(db.upcast(), edition))), + DefWithBodyId::FunctionId(it) => { + it.lookup(db).id.resolved(db, |it| format!("fn {}", it.name.display(db, edition))) + } DefWithBodyId::StaticId(it) => it .lookup(db) .id - .resolved(db, |it| format!("static {} = ", it.name.display(db.upcast(), edition))), + .resolved(db, |it| format!("static {} = ", it.name.display(db, edition))), DefWithBodyId::ConstId(it) => it.lookup(db).id.resolved(db, |it| { format!( "const {} = ", match &it.name { - Some(name) => name.display(db.upcast(), edition).to_string(), + Some(name) => name.display(db, edition).to_string(), None => "_".to_owned(), } ) }), - DefWithBodyId::InTypeConstId(_) => "In type const = ".to_owned(), DefWithBodyId::VariantId(it) => { let loc = it.lookup(db); let enum_loc = loc.parent.lookup(db); format!( "enum {}::{}", - enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition), - loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition), + enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition), + loc.id.item_tree(db)[loc.id.value].name.display(db, edition), ) } }; @@ -63,32 +90,21 @@ pub(super) fn print_body_hir( line_format: LineFormat::Newline, edition, }; - if let DefWithBodyId::FunctionId(it) = owner { + if let DefWithBodyId::FunctionId(_) = owner { p.buf.push('('); - let function_data = db.function_data(it); - let (mut params, ret_type) = (function_data.params.iter(), &function_data.ret_type); if let Some(self_param) = body.self_param { p.print_binding(self_param); - p.buf.push_str(": "); - if let Some(ty) = params.next() { - p.print_type_ref(*ty, &function_data.types_map); - p.buf.push_str(", "); - } + p.buf.push_str(", "); } - body.params.iter().zip(params).for_each(|(¶m, ty)| { - p.print_pat(param); - p.buf.push_str(": "); - p.print_type_ref(*ty, &function_data.types_map); + body.params.iter().for_each(|param| { + p.print_pat(*param); p.buf.push_str(", "); }); // remove the last ", " in param list - if body.params.len() > 0 { + if !body.params.is_empty() { p.buf.truncate(p.buf.len() - 2); } p.buf.push(')'); - // return type - p.buf.push_str(" -> "); - p.print_type_ref(*ret_type, &function_data.types_map); p.buf.push(' '); } p.print_expr(body.body_expr); @@ -98,7 +114,298 @@ pub(super) fn print_body_hir( p.buf } -pub(super) fn print_expr_hir( +pub fn print_variant_body_hir(db: &dyn DefDatabase, owner: VariantId, edition: Edition) -> String { + let header = match owner { + VariantId::StructId(it) => { + it.lookup(db).id.resolved(db, |it| format!("struct {}", it.name.display(db, edition))) + } + VariantId::EnumVariantId(enum_variant_id) => { + let loc = enum_variant_id.lookup(db); + let enum_loc = loc.parent.lookup(db); + format!( + "enum {}::{}", + enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition), + loc.id.item_tree(db)[loc.id.value].name.display(db, edition), + ) + } + VariantId::UnionId(union_id) => union_id + .lookup(db) + .id + .resolved(db, |it| format!("union {}", it.name.display(db, edition))), + }; + + let fields = db.variant_fields(owner); + + let mut p = Printer { + db, + store: &fields.store, + buf: header, + indent_level: 0, + line_format: LineFormat::Newline, + edition, + }; + match fields.shape { + FieldsShape::Record => wln!(p, " {{"), + FieldsShape::Tuple => wln!(p, "("), + FieldsShape::Unit => (), + } + + for (_, data) in fields.fields().iter() { + let FieldData { name, type_ref, visibility, is_unsafe } = data; + match visibility { + crate::item_tree::RawVisibility::Module(interned, _visibility_explicitness) => { + w!(p, "{}", interned.display(db, p.edition)) + } + crate::item_tree::RawVisibility::Public => w!(p, "pub "), + } + if *is_unsafe { + w!(p, "unsafe "); + } + w!(p, "{}: ", name.display(db, p.edition)); + p.print_type_ref(*type_ref); + } + + match fields.shape { + FieldsShape::Record => wln!(p, "}}"), + FieldsShape::Tuple => wln!(p, ");"), + FieldsShape::Unit => wln!(p, ";"), + } + p.buf +} + +pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Edition) -> String { + match owner { + GenericDefId::AdtId(id) => match id { + AdtId::StructId(id) => { + let signature = db.struct_signature(id); + print_struct(db, &signature, edition) + } + AdtId::UnionId(id) => { + format!("unimplemented {id:?}") + } + AdtId::EnumId(id) => { + format!("unimplemented {id:?}") + } + }, + GenericDefId::ConstId(id) => format!("unimplemented {id:?}"), + GenericDefId::FunctionId(id) => { + let signature = db.function_signature(id); + print_function(db, &signature, edition) + } + GenericDefId::ImplId(id) => format!("unimplemented {id:?}"), + GenericDefId::StaticId(id) => format!("unimplemented {id:?}"), + GenericDefId::TraitAliasId(id) => format!("unimplemented {id:?}"), + GenericDefId::TraitId(id) => format!("unimplemented {id:?}"), + GenericDefId::TypeAliasId(id) => format!("unimplemented {id:?}"), + } +} + +pub fn print_path( + db: &dyn DefDatabase, + store: &ExpressionStore, + path: &Path, + edition: Edition, +) -> String { + let mut p = Printer { + db, + store, + buf: String::new(), + indent_level: 0, + line_format: LineFormat::Newline, + edition, + }; + p.print_path(path); + p.buf +} + +pub fn print_struct( + db: &dyn DefDatabase, + StructSignature { name, generic_params, store, flags, shape, repr }: &StructSignature, + edition: Edition, +) -> String { + let mut p = Printer { + db, + store, + buf: String::new(), + indent_level: 0, + line_format: LineFormat::Newline, + edition, + }; + if let Some(repr) = repr { + if repr.c() { + wln!(p, "#[repr(C)]"); + } + if let Some(align) = repr.align { + wln!(p, "#[repr(align({}))]", align.bytes()); + } + if let Some(pack) = repr.pack { + wln!(p, "#[repr(pack({}))]", pack.bytes()); + } + } + if flags.contains(StructFlags::FUNDAMENTAL) { + wln!(p, "#[fundamental]"); + } + w!(p, "struct "); + w!(p, "{}", name.display(db, edition)); + print_generic_params(db, generic_params, &mut p); + match shape { + FieldsShape::Record => wln!(p, " {{...}}"), + FieldsShape::Tuple => wln!(p, "(...)"), + FieldsShape::Unit => (), + } + + print_where_clauses(db, generic_params, &mut p); + + match shape { + FieldsShape::Record => wln!(p), + FieldsShape::Tuple => wln!(p, ";"), + FieldsShape::Unit => wln!(p, ";"), + } + + p.buf +} + +pub fn print_function( + db: &dyn DefDatabase, + FunctionSignature { + name, + generic_params, + store, + params, + ret_type, + abi, + flags, + legacy_const_generics_indices, + }: &FunctionSignature, + edition: Edition, +) -> String { + let mut p = Printer { + db, + store, + buf: String::new(), + indent_level: 0, + line_format: LineFormat::Newline, + edition, + }; + if flags.contains(FnFlags::CONST) { + w!(p, "const "); + } + if flags.contains(FnFlags::ASYNC) { + w!(p, "async "); + } + if flags.contains(FnFlags::UNSAFE) { + w!(p, "unsafe "); + } + if flags.contains(FnFlags::EXPLICIT_SAFE) { + w!(p, "safe "); + } + if let Some(abi) = abi { + w!(p, "extern \"{}\" ", abi.as_str()); + } + w!(p, "fn "); + w!(p, "{}", name.display(db, edition)); + print_generic_params(db, generic_params, &mut p); + w!(p, "("); + for (i, param) in params.iter().enumerate() { + if i != 0 { + w!(p, ", "); + } + if legacy_const_generics_indices.as_ref().is_some_and(|idx| idx.contains(&(i as u32))) { + w!(p, "const: "); + } + p.print_type_ref(*param); + } + w!(p, ")"); + if let Some(ret_type) = ret_type { + w!(p, " -> "); + p.print_type_ref(*ret_type); + } + + print_where_clauses(db, generic_params, &mut p); + wln!(p, " {{...}}"); + + p.buf +} + +fn print_where_clauses(db: &dyn DefDatabase, generic_params: &GenericParams, p: &mut Printer<'_>) { + if !generic_params.where_predicates.is_empty() { + w!(p, "\nwhere\n"); + p.indented(|p| { + for (i, pred) in generic_params.where_predicates.iter().enumerate() { + if i != 0 { + w!(p, ",\n"); + } + match pred { + WherePredicate::TypeBound { target, bound } => { + p.print_type_ref(*target); + w!(p, ": "); + p.print_type_bounds(std::slice::from_ref(bound)); + } + WherePredicate::Lifetime { target, bound } => { + p.print_lifetime_ref(*target); + w!(p, ": "); + p.print_lifetime_ref(*bound); + } + WherePredicate::ForLifetime { lifetimes, target, bound } => { + w!(p, "for<"); + for (i, lifetime) in lifetimes.iter().enumerate() { + if i != 0 { + w!(p, ", "); + } + w!(p, "{}", lifetime.display(db, p.edition)); + } + w!(p, "> "); + p.print_type_ref(*target); + w!(p, ": "); + p.print_type_bounds(std::slice::from_ref(bound)); + } + } + } + }); + wln!(p); + } +} + +fn print_generic_params(db: &dyn DefDatabase, generic_params: &GenericParams, p: &mut Printer<'_>) { + if !generic_params.is_empty() { + w!(p, "<"); + let mut first = true; + for (_i, param) in generic_params.iter_lt() { + if !first { + w!(p, ", "); + } + first = false; + w!(p, "{}", param.name.display(db, p.edition)); + } + for (i, param) in generic_params.iter_type_or_consts() { + if !first { + w!(p, ", "); + } + first = false; + if let Some(const_param) = param.const_param() { + w!(p, "const {}: ", const_param.name.display(db, p.edition)); + p.print_type_ref(const_param.ty); + if let Some(default) = const_param.default { + w!(p, " = "); + p.print_expr(default.expr); + } + } + if let Some(type_param) = param.type_param() { + match &type_param.name { + Some(name) => w!(p, "{}", name.display(db, p.edition)), + None => w!(p, "Param[{}]", i.into_raw()), + } + if let Some(default) = type_param.default { + w!(p, " = "); + p.print_type_ref(default); + } + } + } + w!(p, ">"); + } +} + +pub fn print_expr_hir( db: &dyn DefDatabase, store: &ExpressionStore, _owner: DefWithBodyId, @@ -117,7 +424,7 @@ pub(super) fn print_expr_hir( p.buf } -pub(super) fn print_pat_hir( +pub fn print_pat_hir( db: &dyn DefDatabase, store: &ExpressionStore, _owner: DefWithBodyId, @@ -137,21 +444,6 @@ pub(super) fn print_pat_hir( p.buf } -macro_rules! w { - ($dst:expr, $($arg:tt)*) => { - { let _ = write!($dst, $($arg)*); } - }; -} - -macro_rules! wln { - ($dst:expr) => { - { $dst.newline(); } - }; - ($dst:expr, $($arg:tt)*) => { - { let _ = w!($dst, $($arg)*); $dst.newline(); } - }; -} - struct Printer<'a> { db: &'a dyn DefDatabase, store: &'a ExpressionStore, @@ -238,7 +530,7 @@ impl Printer<'_> { Expr::InlineAsm(_) => w!(self, "builtin#asm(_)"), Expr::OffsetOf(offset_of) => { w!(self, "builtin#offset_of("); - self.print_type_ref(offset_of.container, &self.store.types); + self.print_type_ref(offset_of.container); let edition = self.edition; w!( self, @@ -246,7 +538,7 @@ impl Printer<'_> { offset_of .fields .iter() - .format_with(".", |field, f| f(&field.display(self.db.upcast(), edition))) + .format_with(".", |field, f| f(&field.display(self.db, edition))) ); } Expr::Path(path) => self.print_path(path), @@ -268,7 +560,7 @@ impl Printer<'_> { } Expr::Loop { body, label } => { if let Some(lbl) = label { - w!(self, "{}: ", self.store[*lbl].name.display(self.db.upcast(), self.edition)); + w!(self, "{}: ", self.store[*lbl].name.display(self.db, self.edition)); } w!(self, "loop "); self.print_expr(*body); @@ -288,11 +580,10 @@ impl Printer<'_> { } Expr::MethodCall { receiver, method_name, args, generic_args } => { self.print_expr(*receiver); - w!(self, ".{}", method_name.display(self.db.upcast(), self.edition)); + w!(self, ".{}", method_name.display(self.db, self.edition)); if let Some(args) = generic_args { w!(self, "::<"); - let edition = self.edition; - print_generic_args(self.db, args, &self.store.types, self, edition).unwrap(); + self.print_generic_args(args); w!(self, ">"); } w!(self, "("); @@ -327,13 +618,13 @@ impl Printer<'_> { Expr::Continue { label } => { w!(self, "continue"); if let Some(lbl) = label { - w!(self, " {}", self.store[*lbl].name.display(self.db.upcast(), self.edition)); + w!(self, " {}", self.store[*lbl].name.display(self.db, self.edition)); } } Expr::Break { expr, label } => { w!(self, "break"); if let Some(lbl) = label { - w!(self, " {}", self.store[*lbl].name.display(self.db.upcast(), self.edition)); + w!(self, " {}", self.store[*lbl].name.display(self.db, self.edition)); } if let Some(expr) = expr { self.whitespace(); @@ -378,7 +669,7 @@ impl Printer<'_> { let edition = self.edition; self.indented(|p| { for field in &**fields { - w!(p, "{}: ", field.name.display(self.db.upcast(), edition)); + w!(p, "{}: ", field.name.display(self.db, edition)); p.print_expr(field.expr); wln!(p, ","); } @@ -392,7 +683,7 @@ impl Printer<'_> { } Expr::Field { expr, name } => { self.print_expr(*expr); - w!(self, ".{}", name.display(self.db.upcast(), self.edition)); + w!(self, ".{}", name.display(self.db, self.edition)); } Expr::Await { expr } => { self.print_expr(*expr); @@ -401,7 +692,7 @@ impl Printer<'_> { Expr::Cast { expr, type_ref } => { self.print_expr(*expr); w!(self, " as "); - self.print_type_ref(*type_ref, &self.store.types); + self.print_type_ref(*type_ref); } Expr::Ref { expr, rawness, mutability } => { w!(self, "&"); @@ -489,13 +780,13 @@ impl Printer<'_> { self.print_pat(*pat); if let Some(ty) = ty { w!(self, ": "); - self.print_type_ref(*ty, &self.store.types); + self.print_type_ref(*ty); } } w!(self, "|"); if let Some(ret_ty) = ret_type { w!(self, " -> "); - self.print_type_ref(*ret_ty, &self.store.types); + self.print_type_ref(*ret_ty); } self.whitespace(); self.print_expr(*body); @@ -531,7 +822,7 @@ impl Printer<'_> { Expr::Literal(lit) => self.print_literal(lit), Expr::Block { id: _, statements, tail, label } => { let label = label.map(|lbl| { - format!("{}: ", self.store[lbl].name.display(self.db.upcast(), self.edition)) + format!("{}: ", self.store[lbl].name.display(self.db, self.edition)) }); self.print_block(label.as_deref(), statements, tail); } @@ -617,7 +908,7 @@ impl Printer<'_> { let oneline = matches!(self.line_format, LineFormat::Oneline); self.indented(|p| { for (idx, arg) in args.iter().enumerate() { - let field_name = arg.name.display(self.db.upcast(), edition).to_string(); + let field_name = arg.name.display(self.db, edition).to_string(); let mut same_name = false; if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] { @@ -731,7 +1022,7 @@ impl Printer<'_> { self.print_pat(*pat); if let Some(ty) = type_ref { w!(self, ": "); - self.print_type_ref(*ty, &self.store.types); + self.print_type_ref(*ty); } if let Some(init) = initializer { w!(self, " = "); @@ -782,16 +1073,6 @@ impl Printer<'_> { } } - fn print_type_ref(&mut self, ty: TypeRefId, map: &TypesMap) { - let edition = self.edition; - print_type_ref(self.db, ty, map, self, edition).unwrap(); - } - - fn print_path(&mut self, path: &Path) { - let edition = self.edition; - print_path(self.db, path, &self.store.types, self, edition).unwrap(); - } - fn print_binding(&mut self, id: BindingId) { let Binding { name, mode, .. } = &self.store.bindings[id]; let mode = match mode { @@ -800,6 +1081,288 @@ impl Printer<'_> { BindingAnnotation::Ref => "ref ", BindingAnnotation::RefMut => "ref mut ", }; - w!(self, "{}{}", mode, name.display(self.db.upcast(), self.edition)); + w!(self, "{}{}", mode, name.display(self.db, self.edition)); + } + + fn print_path(&mut self, path: &Path) { + if let Path::LangItem(it, s) = path { + w!(self, "builtin#lang("); + macro_rules! write_name { + ($it:ident) => {{ + let loc = $it.lookup(self.db); + let tree = loc.item_tree_id().item_tree(self.db); + let name = &tree[loc.id.value].name; + w!(self, "{}", name.display(self.db, self.edition)); + }}; + } + match *it { + LangItemTarget::ImplDef(it) => w!(self, "{it:?}"), + LangItemTarget::EnumId(it) => write_name!(it), + LangItemTarget::Function(it) => write_name!(it), + LangItemTarget::Static(it) => write_name!(it), + LangItemTarget::Struct(it) => write_name!(it), + LangItemTarget::Union(it) => write_name!(it), + LangItemTarget::TypeAlias(it) => write_name!(it), + LangItemTarget::Trait(it) => write_name!(it), + LangItemTarget::EnumVariant(it) => write_name!(it), + } + + if let Some(s) = s { + w!(self, "::{}", s.display(self.db, self.edition)); + } + return w!(self, ")"); + } + match path.type_anchor() { + Some(anchor) => { + w!(self, "<"); + self.print_type_ref(anchor); + w!(self, ">::"); + } + None => match path.kind() { + PathKind::Plain => {} + &PathKind::SELF => w!(self, "self"), + PathKind::Super(n) => { + for i in 0..*n { + if i == 0 { + w!(self, "super"); + } else { + w!(self, "::super"); + } + } + } + PathKind::Crate => w!(self, "crate"), + PathKind::Abs => {} + PathKind::DollarCrate(krate) => w!( + self, + "{}", + krate + .extra_data(self.db) + .display_name + .as_ref() + .map(|it| it.crate_name().symbol().as_str()) + .unwrap_or("$crate") + ), + }, + } + + for (i, segment) in path.segments().iter().enumerate() { + if i != 0 || !matches!(path.kind(), PathKind::Plain) { + w!(self, "::"); + } + + w!(self, "{}", segment.name.display(self.db, self.edition)); + if let Some(generics) = segment.args_and_bindings { + w!(self, "::<"); + self.print_generic_args(generics); + + w!(self, ">"); + } + } + } + + pub(crate) fn print_generic_args(&mut self, generics: &GenericArgs) { + let mut first = true; + let args = if generics.has_self_type { + let (self_ty, args) = generics.args.split_first().unwrap(); + w!(self, "Self="); + self.print_generic_arg(self_ty); + first = false; + args + } else { + &generics.args + }; + for arg in args { + if !first { + w!(self, ", "); + } + first = false; + self.print_generic_arg(arg); + } + for binding in generics.bindings.iter() { + if !first { + w!(self, ", "); + } + first = false; + w!(self, "{}", binding.name.display(self.db, self.edition)); + if !binding.bounds.is_empty() { + w!(self, ": "); + self.print_type_bounds(&binding.bounds); + } + if let Some(ty) = binding.type_ref { + w!(self, " = "); + self.print_type_ref(ty); + } + } + } + + pub(crate) fn print_generic_arg(&mut self, arg: &GenericArg) { + match arg { + GenericArg::Type(ty) => self.print_type_ref(*ty), + GenericArg::Const(ConstRef { expr }) => self.print_expr(*expr), + GenericArg::Lifetime(lt) => self.print_lifetime_ref(*lt), + } + } + + pub(crate) fn print_type_param(&mut self, param: TypeParamId) { + let generic_params = self.db.generic_params(param.parent()); + + match generic_params[param.local_id()].name() { + Some(name) => w!(self, "{}", name.display(self.db, self.edition)), + None => w!(self, "Param[{}]", param.local_id().into_raw()), + } + } + + pub(crate) fn print_lifetime_param(&mut self, param: LifetimeParamId) { + let generic_params = self.db.generic_params(param.parent); + w!(self, "{}", generic_params[param.local_id].name.display(self.db, self.edition)) + } + + pub(crate) fn print_lifetime_ref(&mut self, lt_ref: LifetimeRefId) { + match &self.store[lt_ref] { + LifetimeRef::Static => w!(self, "'static"), + LifetimeRef::Named(lt) => { + w!(self, "{}", lt.display(self.db, self.edition)) + } + LifetimeRef::Placeholder => w!(self, "'_"), + LifetimeRef::Error => w!(self, "'{{error}}"), + &LifetimeRef::Param(p) => self.print_lifetime_param(p), + } + } + + pub(crate) fn print_type_ref(&mut self, type_ref: TypeRefId) { + // FIXME: deduplicate with `HirDisplay` impl + match &self.store[type_ref] { + TypeRef::Never => w!(self, "!"), + &TypeRef::TypeParam(p) => self.print_type_param(p), + TypeRef::Placeholder => w!(self, "_"), + TypeRef::Tuple(fields) => { + w!(self, "("); + for (i, field) in fields.iter().enumerate() { + if i != 0 { + w!(self, ", "); + } + self.print_type_ref(*field); + } + w!(self, ")"); + } + TypeRef::Path(path) => self.print_path(path), + TypeRef::RawPtr(pointee, mtbl) => { + let mtbl = match mtbl { + Mutability::Shared => "*const", + Mutability::Mut => "*mut", + }; + w!(self, "{mtbl} "); + self.print_type_ref(*pointee); + } + TypeRef::Reference(ref_) => { + let mtbl = match ref_.mutability { + Mutability::Shared => "", + Mutability::Mut => "mut ", + }; + w!(self, "&"); + if let Some(lt) = &ref_.lifetime { + self.print_lifetime_ref(*lt); + w!(self, " "); + } + w!(self, "{mtbl}"); + self.print_type_ref(ref_.ty); + } + TypeRef::Array(array) => { + w!(self, "["); + self.print_type_ref(array.ty); + w!(self, "; "); + self.print_generic_arg(&GenericArg::Const(array.len)); + w!(self, "]"); + } + TypeRef::Slice(elem) => { + w!(self, "["); + self.print_type_ref(*elem); + w!(self, "]"); + } + TypeRef::Fn(fn_) => { + let ((_, return_type), args) = + fn_.params.split_last().expect("TypeRef::Fn is missing return type"); + if fn_.is_unsafe { + w!(self, "unsafe "); + } + if let Some(abi) = &fn_.abi { + w!(self, "extern "); + w!(self, "{}", abi.as_str()); + w!(self, " "); + } + w!(self, "fn("); + for (i, (_, typeref)) in args.iter().enumerate() { + if i != 0 { + w!(self, ", "); + } + self.print_type_ref(*typeref); + } + if fn_.is_varargs { + if !args.is_empty() { + w!(self, ", "); + } + w!(self, "..."); + } + w!(self, ") -> "); + self.print_type_ref(*return_type); + } + TypeRef::Error => w!(self, "{{error}}"), + TypeRef::ImplTrait(bounds) => { + w!(self, "impl "); + self.print_type_bounds(bounds); + } + TypeRef::DynTrait(bounds) => { + w!(self, "dyn "); + self.print_type_bounds(bounds); + } + } + } + + pub(crate) fn print_type_bounds(&mut self, bounds: &[TypeBound]) { + for (i, bound) in bounds.iter().enumerate() { + if i != 0 { + w!(self, " + "); + } + + match bound { + TypeBound::Path(path, modifier) => { + match modifier { + TraitBoundModifier::None => (), + TraitBoundModifier::Maybe => w!(self, "?"), + } + self.print_path(&self.store[*path]); + } + TypeBound::ForLifetime(lifetimes, path) => { + w!( + self, + "for<{}> ", + lifetimes + .iter() + .map(|it| it.display(self.db, self.edition)) + .format(", ") + .to_string() + ); + self.print_path(&self.store[*path]); + } + TypeBound::Lifetime(lt) => self.print_lifetime_ref(*lt), + TypeBound::Use(args) => { + w!(self, "use<"); + let mut first = true; + for arg in args { + if !mem::take(&mut first) { + w!(self, ", "); + } + match arg { + UseArgRef::Name(it) => { + w!(self, "{}", it.display(self.db, self.edition)) + } + UseArgRef::Lifetime(it) => self.print_lifetime_ref(*it), + } + } + w!(self, ">") + } + TypeBound::Error => w!(self, "{{unknown}}"), + } + } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs index 859a706177aab..431ea9eb1d465 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs @@ -1,13 +1,13 @@ //! Name resolution for expressions. -use hir_expand::{name::Name, MacroDefId}; +use hir_expand::{MacroDefId, name::Name}; use la_arena::{Arena, ArenaMap, Idx, IdxRange, RawIdx}; use triomphe::Arc; use crate::{ + BlockId, DefWithBodyId, db::DefDatabase, expr_store::{Body, ExpressionStore, HygieneId}, hir::{Binding, BindingId, Expr, ExprId, Item, LabelId, Pat, PatId, Statement}, - BlockId, ConstBlockId, DefWithBodyId, }; pub type ScopeId = Idx; @@ -53,9 +53,7 @@ pub struct ScopeData { impl ExprScopes { pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc { let body = db.body(def); - let mut scopes = ExprScopes::new_body(&body, |const_block| { - db.lookup_intern_anonymous_const(const_block).root - }); + let mut scopes = ExprScopes::new_body(&body); scopes.shrink_to_fit(); Arc::new(scopes) } @@ -104,10 +102,7 @@ fn empty_entries(idx: usize) -> IdxRange { } impl ExprScopes { - fn new_body( - body: &Body, - resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy, - ) -> ExprScopes { + fn new_body(body: &Body) -> ExprScopes { let mut scopes = ExprScopes { scopes: Arena::default(), scope_entries: Arena::default(), @@ -118,7 +113,7 @@ impl ExprScopes { scopes.add_bindings(body, root, self_param, body.binding_hygiene(self_param)); } scopes.add_params_bindings(body, root, &body.params); - compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root, resolve_const_block); + compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root); scopes } @@ -221,23 +216,22 @@ fn compute_block_scopes( store: &ExpressionStore, scopes: &mut ExprScopes, scope: &mut ScopeId, - resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy, ) { for stmt in statements { match stmt { Statement::Let { pat, initializer, else_branch, .. } => { if let Some(expr) = initializer { - compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block); + compute_expr_scopes(*expr, store, scopes, scope); } if let Some(expr) = else_branch { - compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block); + compute_expr_scopes(*expr, store, scopes, scope); } *scope = scopes.new_scope(*scope); scopes.add_pat_bindings(store, *scope, *pat); } Statement::Expr { expr, .. } => { - compute_expr_scopes(*expr, store, scopes, scope, resolve_const_block); + compute_expr_scopes(*expr, store, scopes, scope); } Statement::Item(Item::MacroDef(macro_id)) => { *scope = scopes.new_macro_def_scope(*scope, macro_id.clone()); @@ -246,7 +240,7 @@ fn compute_block_scopes( } } if let Some(expr) = tail { - compute_expr_scopes(expr, store, scopes, scope, resolve_const_block); + compute_expr_scopes(expr, store, scopes, scope); } } @@ -255,13 +249,12 @@ fn compute_expr_scopes( store: &ExpressionStore, scopes: &mut ExprScopes, scope: &mut ScopeId, - resolve_const_block: impl (Fn(ConstBlockId) -> ExprId) + Copy, ) { let make_label = |label: &Option| label.map(|label| (label, store.labels[label].name.clone())); let compute_expr_scopes = |scopes: &mut ExprScopes, expr: ExprId, scope: &mut ScopeId| { - compute_expr_scopes(expr, store, scopes, scope, resolve_const_block) + compute_expr_scopes(expr, store, scopes, scope) }; scopes.set_scope(expr, *scope); @@ -271,18 +264,18 @@ fn compute_expr_scopes( // Overwrite the old scope for the block expr, so that every block scope can be found // via the block itself (important for blocks that only contain items, no expressions). scopes.set_scope(expr, scope); - compute_block_scopes(statements, *tail, store, scopes, &mut scope, resolve_const_block); + compute_block_scopes(statements, *tail, store, scopes, &mut scope); } Expr::Const(id) => { let mut scope = scopes.root_scope(); - compute_expr_scopes(scopes, resolve_const_block(*id), &mut scope); + compute_expr_scopes(scopes, *id, &mut scope); } Expr::Unsafe { id, statements, tail } | Expr::Async { id, statements, tail } => { let mut scope = scopes.new_block_scope(*scope, *id, None); // Overwrite the old scope for the block expr, so that every block scope can be found // via the block itself (important for blocks that only contain items, no expressions). scopes.set_scope(expr, scope); - compute_block_scopes(statements, *tail, store, scopes, &mut scope, resolve_const_block); + compute_block_scopes(statements, *tail, store, scopes, &mut scope); } Expr::Loop { body: body_expr, label } => { let mut scope = scopes.new_labeled_scope(*scope, make_label(label)); @@ -324,20 +317,20 @@ fn compute_expr_scopes( #[cfg(test)] mod tests { - use base_db::SourceDatabase; - use hir_expand::{name::AsName, InFile}; + use base_db::RootQueryDb; + use hir_expand::{InFile, name::AsName}; use span::FileId; - use syntax::{algo::find_node_at_offset, ast, AstNode}; + use syntax::{AstNode, algo::find_node_at_offset, ast}; use test_fixture::WithFixture; use test_utils::{assert_eq_text, extract_offset}; - use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId}; + use crate::{FunctionId, ModuleDefId, db::DefDatabase, test_db::TestDB}; fn find_function(db: &TestDB, file_id: FileId) -> FunctionId { let krate = db.test_crate(); let crate_def_map = db.crate_def_map(krate); - let module = crate_def_map.modules_for_file(file_id).next().unwrap(); + let module = crate_def_map.modules_for_file(db, file_id).next().unwrap(); let (_, def) = crate_def_map[module].scope.entries().next().unwrap(); match def.take_values().unwrap() { ModuleDefId::FunctionId(it) => it, @@ -357,18 +350,20 @@ mod tests { }; let (db, position) = TestDB::with_position(&code); - let file_id = position.file_id; + let editioned_file_id = position.file_id; let offset = position.offset; - let file_syntax = db.parse(file_id).syntax_node(); + let (file_id, _) = editioned_file_id.unpack(&db); + + let file_syntax = db.parse(editioned_file_id).syntax_node(); let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap(); - let function = find_function(&db, file_id.file_id()); + let function = find_function(&db, file_id); let scopes = db.expr_scopes(function.into()); let (_body, source_map) = db.body_with_source_map(function.into()); let expr_id = source_map - .node_expr(InFile { file_id: file_id.into(), value: &marker.into() }) + .node_expr(InFile { file_id: editioned_file_id.into(), value: &marker.into() }) .unwrap() .as_expr() .unwrap(); @@ -511,15 +506,17 @@ fn foo() { fn do_check_local_name(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_offset: u32) { let (db, position) = TestDB::with_position(ra_fixture); - let file_id = position.file_id; + let editioned_file_id = position.file_id; let offset = position.offset; - let file = db.parse(file_id).ok().unwrap(); + let (file_id, _) = editioned_file_id.unpack(&db); + + let file = db.parse(editioned_file_id).ok().unwrap(); let expected_name = find_node_at_offset::(file.syntax(), expected_offset.into()) .expect("failed to find a name at the target offset"); let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap(); - let function = find_function(&db, file_id.file_id()); + let function = find_function(&db, file_id); let scopes = db.expr_scopes(function.into()); let (_, source_map) = db.body_with_source_map(function.into()); @@ -527,7 +524,7 @@ fn foo() { let expr_scope = { let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap(); let expr_id = source_map - .node_expr(InFile { file_id: file_id.into(), value: &expr_ast }) + .node_expr(InFile { file_id: editioned_file_id.into(), value: &expr_ast }) .unwrap() .as_expr() .unwrap(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests.rs index 16bf46d3e3f95..f09ee6f0b9981 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests.rs @@ -1,503 +1,2 @@ -mod block; - -use crate::{hir::MatchArm, test_db::TestDB, ModuleDefId}; -use expect_test::{expect, Expect}; -use la_arena::RawIdx; -use test_fixture::WithFixture; - -use super::*; - -fn lower(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (TestDB, Arc, DefWithBodyId) { - let db = TestDB::with_files(ra_fixture); - - let krate = db.fetch_test_crate(); - let def_map = db.crate_def_map(krate); - let mut fn_def = None; - 'outer: for (_, module) in def_map.modules() { - for decl in module.scope.declarations() { - if let ModuleDefId::FunctionId(it) = decl { - fn_def = Some(it); - break 'outer; - } - } - } - let fn_def = fn_def.unwrap().into(); - - let body = db.body(fn_def); - (db, body, fn_def) -} - -fn def_map_at(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String { - let (db, position) = TestDB::with_position(ra_fixture); - - let module = db.module_at_position(position); - module.def_map(&db).dump(&db) -} - -fn check_block_scopes_at(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { - let (db, position) = TestDB::with_position(ra_fixture); - - let module = db.module_at_position(position); - let actual = module.def_map(&db).dump_block_scopes(&db); - expect.assert_eq(&actual); -} - -fn check_at(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { - let actual = def_map_at(ra_fixture); - expect.assert_eq(&actual); -} - -#[test] -fn your_stack_belongs_to_me() { - cov_mark::check!(your_stack_belongs_to_me); - lower( - r#" -#![recursion_limit = "32"] -macro_rules! n_nuple { - ($e:tt) => (); - ($($rest:tt)*) => {{ - (n_nuple!($($rest)*)None,) - }}; -} -fn main() { n_nuple!(1,2,3); } -"#, - ); -} - -#[test] -fn your_stack_belongs_to_me2() { - cov_mark::check!(overflow_but_not_me); - lower( - r#" -#![recursion_limit = "32"] -macro_rules! foo { - () => {{ foo!(); foo!(); }} -} -fn main() { foo!(); } -"#, - ); -} - -#[test] -fn recursion_limit() { - lower( - r#" -#![recursion_limit = "2"] -macro_rules! n_nuple { - ($e:tt) => (); - ($first:tt $($rest:tt)*) => {{ - n_nuple!($($rest)*) - }}; -} -fn main() { n_nuple!(1,2,3); } -"#, - ); -} - -#[test] -fn issue_3642_bad_macro_stackover() { - lower( - r#" -#[macro_export] -macro_rules! match_ast { - (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; - - (match ($node:expr) { - $( ast::$ast:ident($it:ident) => $res:expr, )* - _ => $catch_all:expr $(,)? - }) => {{ - $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )* - { $catch_all } - }}; -} - -fn main() { - let anchor = match_ast! { - match parent { - as => {}, - _ => return None - } - }; -}"#, - ); -} - -#[test] -fn macro_resolve() { - // Regression test for a path resolution bug introduced with inner item handling. - lower( - r#" -macro_rules! vec { - () => { () }; - ($elem:expr; $n:expr) => { () }; - ($($x:expr),+ $(,)?) => { () }; -} -mod m { - fn outer() { - let _ = vec![FileSet::default(); self.len()]; - } -} -"#, - ); -} - -#[test] -fn desugar_for_loop() { - let (db, body, def) = lower( - r#" -//- minicore: iterator -fn main() { - for ident in 0..10 { - foo(); - bar() - } -} -"#, - ); - - expect![[r#" - fn main() -> () { - match builtin#lang(into_iter)( - (0) ..(10) , - ) { - mut 11 => loop { - match builtin#lang(next)( - &mut 11, - ) { - builtin#lang(None) => break, - builtin#lang(Some)(ident) => { - foo(); - bar() - }, - } - }, - } - }"#]] - .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) -} - -#[test] -fn desugar_builtin_format_args() { - let (db, body, def) = lower( - r#" -//- minicore: fmt -fn main() { - let are = "are"; - let count = 10; - builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!"); -} -"#, - ); - - expect![[r#" - fn main() -> () { - let are = "are"; - let count = 10; - builtin#lang(Arguments::new_v1_formatted)( - &[ - "\u{1b}hello ", " ", " friends, we ", " ", "", - ], - &[ - builtin#lang(Argument::new_display)( - &count, - ), builtin#lang(Argument::new_display)( - &"fancy", - ), builtin#lang(Argument::new_debug)( - &are, - ), builtin#lang(Argument::new_display)( - &"!", - ), - ], - &[ - builtin#lang(Placeholder::new)( - 0usize, - ' ', - builtin#lang(Alignment::Unknown), - 8u32, - builtin#lang(Count::Implied), - builtin#lang(Count::Is)( - 2usize, - ), - ), builtin#lang(Placeholder::new)( - 1usize, - ' ', - builtin#lang(Alignment::Unknown), - 0u32, - builtin#lang(Count::Implied), - builtin#lang(Count::Implied), - ), builtin#lang(Placeholder::new)( - 2usize, - ' ', - builtin#lang(Alignment::Unknown), - 0u32, - builtin#lang(Count::Implied), - builtin#lang(Count::Implied), - ), builtin#lang(Placeholder::new)( - 1usize, - ' ', - builtin#lang(Alignment::Unknown), - 0u32, - builtin#lang(Count::Implied), - builtin#lang(Count::Implied), - ), builtin#lang(Placeholder::new)( - 3usize, - ' ', - builtin#lang(Alignment::Unknown), - 0u32, - builtin#lang(Count::Implied), - builtin#lang(Count::Implied), - ), - ], - unsafe { - builtin#lang(UnsafeArg::new)() - }, - ); - }"#]] - .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) -} - -#[test] -fn test_macro_hygiene() { - let (db, body, def) = lower( - r##" -//- minicore: fmt, from -//- /main.rs -mod error; - -use crate::error::error; - -fn main() { - // _ = forces body expansion instead of block def map expansion - _ = error!("Failed to resolve path `{}`", node.text()); -} -//- /error.rs -macro_rules! _error { - ($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))} -} -pub(crate) use _error as error; -macro_rules! _intermediate { - ($arg:expr) => {$crate::error::SsrError::new($arg)} -} -pub(crate) use _intermediate as intermediate; - -pub struct SsrError(pub(crate) core::fmt::Arguments); - -impl SsrError { - pub(crate) fn new(message: impl Into) -> SsrError { - SsrError(message.into()) - } -} -"##, - ); - - assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]); - expect![[r#" - fn main() -> () { - _ = $crate::error::SsrError::new( - builtin#lang(Arguments::new_v1_formatted)( - &[ - "Failed to resolve path `", "`", - ], - &[ - builtin#lang(Argument::new_display)( - &node.text(), - ), - ], - &[ - builtin#lang(Placeholder::new)( - 0usize, - ' ', - builtin#lang(Alignment::Unknown), - 0u32, - builtin#lang(Count::Implied), - builtin#lang(Count::Implied), - ), - ], - unsafe { - builtin#lang(UnsafeArg::new)() - }, - ), - ); - }"#]] - .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) -} - -#[test] -fn regression_10300() { - let (db, body, def) = lower( - r#" -//- minicore: concat, panic -mod private { - pub use core::concat; -} - -macro_rules! m { - () => { - panic!(concat!($crate::private::concat!("cc"))); - }; -} - -fn f(a: i32, b: u32) -> String { - m!(); -} -"#, - ); - - let (_, source_map) = db.body_with_source_map(def); - assert_eq!(source_map.diagnostics(), &[]); - - for (_, def_map) in body.blocks(&db) { - assert_eq!(def_map.diagnostics(), &[]); - } - - expect![[r#" - fn f(a: i32, b: u32) -> String { - { - $crate::panicking::panic_fmt( - builtin#lang(Arguments::new_v1_formatted)( - &[ - "cc", - ], - &[], - &[], - unsafe { - builtin#lang(UnsafeArg::new)() - }, - ), - ); - }; - }"#]] - .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) -} - -#[test] -fn destructuring_assignment_tuple_macro() { - // This is a funny one. `let m!()() = Bar()` is an error in rustc, because `m!()()` isn't a valid pattern, - // but in destructuring assignment it is valid, because `m!()()` is a valid expression, and destructuring - // assignments start their lives as expressions. So we have to do the same. - - let (db, body, def) = lower( - r#" -struct Bar(); - -macro_rules! m { - () => { Bar }; -} - -fn foo() { - m!()() = Bar(); -} -"#, - ); - - let (_, source_map) = db.body_with_source_map(def); - assert_eq!(source_map.diagnostics(), &[]); - - for (_, def_map) in body.blocks(&db) { - assert_eq!(def_map.diagnostics(), &[]); - } - - expect![[r#" - fn foo() -> () { - Bar() = Bar(); - }"#]] - .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) -} - -#[test] -fn shadowing_record_variant() { - let (_, body, _) = lower( - r#" -enum A { - B { field: i32 }, -} -fn f() { - use A::*; - match () { - B => {} - }; -} - "#, - ); - assert_eq!(body.bindings.len(), 1, "should have a binding for `B`"); - assert_eq!( - body.bindings[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(), - "B", - "should have a binding for `B`", - ); -} - -#[test] -fn regression_pretty_print_bind_pat() { - let (db, body, owner) = lower( - r#" -fn foo() { - let v @ u = 123; -} -"#, - ); - let printed = body.pretty_print(&db, owner, Edition::CURRENT); - assert_eq!( - printed, - r#"fn foo() -> () { - let v @ u = 123; -}"# - ); -} - -#[test] -fn skip_skips_body() { - let (db, body, owner) = lower( - r#" -#[rust_analyzer::skip] -async fn foo(a: (), b: i32) -> u32 { - 0 + 1 + b() -} -"#, - ); - let printed = body.pretty_print(&db, owner, Edition::CURRENT); - expect!["fn foo(�: (), �: i32) -> impl ::core::future::Future:: �"] - .assert_eq(&printed); -} - -#[test] -fn range_bounds_are_hir_exprs() { - let (_, body, _) = lower( - r#" -pub const L: i32 = 6; -mod x { - pub const R: i32 = 100; -} -const fn f(x: i32) -> i32 { - match x { - -1..=5 => x * 10, - L..=x::R => x * 100, - _ => x, - } -}"#, - ); - - let mtch_arms = body - .exprs - .iter() - .find_map(|(_, expr)| { - if let Expr::Match { arms, .. } = expr { - return Some(arms); - } - - None - }) - .unwrap(); - - let MatchArm { pat, .. } = mtch_arms[1]; - match body.pats[pat] { - Pat::Range { start, end } => { - let hir_start = &body.exprs[start.unwrap()]; - let hir_end = &body.exprs[end.unwrap()]; - - assert!(matches!(hir_start, Expr::Path { .. })); - assert!(matches!(hir_end, Expr::Path { .. })); - } - _ => {} - } -} +mod body; +mod signatures; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs deleted file mode 100644 index e136dd18a55e5..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs +++ /dev/null @@ -1,592 +0,0 @@ -use super::*; -use expect_test::expect; - -#[test] -fn inner_item_smoke() { - check_at( - r#" -struct inner {} -fn outer() { - $0 - fn inner() {} -} -"#, - expect![[r#" - block scope - inner: v - - crate - inner: t - outer: v - "#]], - ); -} - -#[test] -fn use_from_crate() { - check_at( - r#" -struct Struct {} -fn outer() { - fn Struct() {} - use Struct as PlainStruct; - use crate::Struct as CrateStruct; - use self::Struct as SelfStruct; - use super::Struct as SuperStruct; - $0 -} -"#, - expect![[r#" - block scope - CrateStruct: ti - PlainStruct: ti vi - SelfStruct: ti - Struct: v - SuperStruct: _ - - crate - Struct: t - outer: v - "#]], - ); -} - -#[test] -fn merge_namespaces() { - check_at( - r#" -struct name {} -fn outer() { - fn name() {} - - use name as imported; // should import both `name`s - - $0 -} -"#, - expect![[r#" - block scope - imported: ti vi - name: v - - crate - name: t - outer: v - "#]], - ); -} - -#[test] -fn nested_blocks() { - check_at( - r#" -fn outer() { - struct inner1 {} - fn inner() { - use inner1; - use outer; - fn inner2() {} - $0 - } -} -"#, - expect![[r#" - block scope - inner1: ti - inner2: v - outer: vi - - block scope - inner: v - inner1: t - - crate - outer: v - "#]], - ); -} - -#[test] -fn super_imports() { - check_at( - r#" -mod module { - fn f() { - use super::Struct; - $0 - } -} - -struct Struct {} -"#, - expect![[r#" - block scope - Struct: ti - - crate - Struct: t - module: t - - crate::module - f: v - "#]], - ); -} - -#[test] -fn super_imports_2() { - check_at( - r#" -fn outer() { - mod m { - struct ResolveMe {} - fn middle() { - mod m2 { - fn inner() { - use super::ResolveMe; - $0 - } - } - } - } -} -"#, - expect![[r#" - block scope - ResolveMe: ti - - block scope - m2: t - - block scope::m2 - inner: v - - block scope - m: t - - block scope::m - ResolveMe: t - middle: v - - crate - outer: v - "#]], - ); -} - -#[test] -fn nested_module_scoping() { - check_block_scopes_at( - r#" -fn f() { - mod module { - struct Struct {} - fn f() { - use self::Struct; - $0 - } - } -} - "#, - expect![[r#" - BlockId(1) in BlockRelativeModuleId { block: Some(BlockId(0)), local_id: Idx::(1) } - BlockId(0) in BlockRelativeModuleId { block: None, local_id: Idx::(0) } - crate scope - "#]], - ); -} - -#[test] -fn self_imports() { - check_at( - r#" -fn f() { - mod m { - struct ResolveMe {} - fn g() { - fn h() { - use self::ResolveMe; - $0 - } - } - } -} -"#, - expect![[r#" - block scope - ResolveMe: ti - - block scope - h: v - - block scope - m: t - - block scope::m - ResolveMe: t - g: v - - crate - f: v - "#]], - ); -} - -#[test] -fn legacy_macro_items() { - // Checks that legacy-scoped `macro_rules!` from parent namespaces are resolved and expanded - // correctly. - check_at( - r#" -macro_rules! mark { - () => { - struct Hit {} - } -} - -fn f() { - mark!(); - $0 -} -"#, - expect![[r#" - block scope - Hit: t - - crate - f: v - "#]], - ); -} - -#[test] -fn macro_resolve() { - check_at( - r#" -//- /lib.rs crate:lib deps:core -use core::cov_mark; - -fn f() { - fn nested() { - cov_mark::mark!(Hit); - $0 - } -} -//- /core.rs crate:core -pub mod cov_mark { - #[macro_export] - macro_rules! _mark { - ($name:ident) => { - struct $name {} - } - } - - pub use crate::_mark as mark; -} -"#, - expect![[r#" - block scope - Hit: t - - block scope - nested: v - - crate - cov_mark: ti - f: v - "#]], - ); -} - -#[test] -fn macro_exported_in_block_mod() { - check_at( - r#" -#[macro_export] -macro_rules! foo { - () => { pub struct FooWorks; }; -} -macro_rules! bar { - () => { pub struct BarWorks; }; -} -fn main() { - mod module { - foo!(); - bar!(); - $0 - } -} -"#, - expect![[r#" - block scope - module: t - - block scope::module - BarWorks: t v - FooWorks: t v - - crate - foo: m - main: v - "#]], - ); -} - -#[test] -fn macro_resolve_legacy() { - check_at( - r#" -//- /lib.rs -mod module; - -//- /module.rs -macro_rules! m { - () => { - struct Def {} - }; -} - -fn f() { - { - m!(); - $0 - } -} - "#, - expect![[r#" - block scope - Def: t - - crate - module: t - - crate::module - f: v - "#]], - ) -} - -#[test] -fn super_does_not_resolve_to_block_module() { - check_at( - r#" -fn main() { - struct Struct {} - mod module { - use super::Struct; - - $0 - } -} - "#, - expect![[r#" - block scope - Struct: t - module: t - - block scope::module - Struct: _ - - crate - main: v - "#]], - ); -} - -#[test] -fn underscore_import() { - // This used to panic, because the default (private) visibility inside block expressions would - // point into the containing `DefMap`, which visibilities should never be able to do. - cov_mark::check!(adjust_vis_in_block_def_map); - check_at( - r#" -mod m { - fn main() { - use Tr as _; - trait Tr {} - $0 - } -} - "#, - expect![[r#" - block scope - _: t - Tr: t - - crate - m: t - - crate::m - main: v - "#]], - ); -} - -#[test] -fn nested_macro_item_decl() { - cov_mark::check!(macro_call_in_macro_stmts_is_added_to_item_tree); - check_at( - r#" -macro_rules! inner_declare { - ($ident:ident) => { - static $ident: u32 = 0; - }; -} -macro_rules! declare { - ($ident:ident) => { - inner_declare!($ident); - }; -} - -fn foo() { - declare!(bar); - bar; - $0 -} - "#, - expect![[r#" - block scope - bar: v - - crate - foo: v - "#]], - ) -} - -#[test] -fn is_visible_from_same_def_map() { - // Regression test for https://github.com/rust-lang/rust-analyzer/issues/9481 - cov_mark::check!(is_visible_from_same_block_def_map); - check_at( - r#" -fn outer() { - mod tests { - use super::*; - } - use crate::name; - $0 -} - "#, - expect![[r#" - block scope - name: _ - tests: t - - block scope::tests - name: _ - outer: vg - - crate - outer: v - "#]], - ); -} - -#[test] -fn stmt_macro_expansion_with_trailing_expr() { - cov_mark::check!(macro_stmt_with_trailing_macro_expr); - check_at( - r#" -macro_rules! mac { - () => { mac!($) }; - ($x:tt) => { fn inner() {} }; -} -fn foo() { - mac!(); - $0 -} - "#, - expect![[r#" - block scope - inner: v - - crate - foo: v - "#]], - ) -} - -#[test] -fn trailing_expr_macro_expands_stmts() { - check_at( - r#" -macro_rules! foo { - () => { const FOO: u32 = 0;const BAR: u32 = 0; }; -} -fn f() {$0 - foo!{} -}; - "#, - expect![[r#" - block scope - BAR: v - FOO: v - - crate - f: v - "#]], - ) -} - -#[test] -fn resolve_extern_prelude_in_block() { - check_at( - r#" -//- /main.rs crate:main deps:core -fn main() { - mod f { - use core::S; - $0 - } -} - -//- /core.rs crate:core -pub struct S; - "#, - expect![[r#" - block scope - f: t - - block scope::f - S: ti vi - - crate - main: v - "#]], - ) -} - -#[test] -fn shadow_extern_prelude_in_block() { - check_at( - r#" -//- /main.rs crate:main deps:core -fn main() { - mod core { pub struct S; } - { - fn inner() {} // forces a block def map - use core::S; // should resolve to the local one - $0 - } -} - -//- /core.rs crate:core -pub const S; - "#, - expect![[r#" - block scope - S: ti vi - inner: v - - block scope - core: t - - block scope::core - S: t v - - crate - main: v - "#]], - ) -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs new file mode 100644 index 0000000000000..d6645dc1d1d38 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs @@ -0,0 +1,502 @@ +mod block; + +use crate::{DefWithBodyId, ModuleDefId, hir::MatchArm, test_db::TestDB}; +use expect_test::{Expect, expect}; +use la_arena::RawIdx; +use test_fixture::WithFixture; + +use super::super::*; + +fn lower(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (TestDB, Arc, DefWithBodyId) { + let db = TestDB::with_files(ra_fixture); + + let krate = db.fetch_test_crate(); + let def_map = db.crate_def_map(krate); + let mut fn_def = None; + 'outer: for (_, module) in def_map.modules() { + for decl in module.scope.declarations() { + if let ModuleDefId::FunctionId(it) = decl { + fn_def = Some(it); + break 'outer; + } + } + } + let fn_def = fn_def.unwrap().into(); + + let body = db.body(fn_def); + (db, body, fn_def) +} + +fn def_map_at(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String { + let (db, position) = TestDB::with_position(ra_fixture); + + let module = db.module_at_position(position); + module.def_map(&db).dump(&db) +} + +fn check_block_scopes_at(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { + let (db, position) = TestDB::with_position(ra_fixture); + + let module = db.module_at_position(position); + let actual = module.def_map(&db).dump_block_scopes(&db); + expect.assert_eq(&actual); +} + +fn check_at(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { + let actual = def_map_at(ra_fixture); + expect.assert_eq(&actual); +} + +#[test] +fn your_stack_belongs_to_me() { + cov_mark::check!(your_stack_belongs_to_me); + lower( + r#" +#![recursion_limit = "32"] +macro_rules! n_nuple { + ($e:tt) => (); + ($($rest:tt)*) => {{ + (n_nuple!($($rest)*)None,) + }}; +} +fn main() { n_nuple!(1,2,3); } +"#, + ); +} + +#[test] +fn your_stack_belongs_to_me2() { + cov_mark::check!(overflow_but_not_me); + lower( + r#" +#![recursion_limit = "32"] +macro_rules! foo { + () => {{ foo!(); foo!(); }} +} +fn main() { foo!(); } +"#, + ); +} + +#[test] +fn recursion_limit() { + lower( + r#" +#![recursion_limit = "2"] +macro_rules! n_nuple { + ($e:tt) => (); + ($first:tt $($rest:tt)*) => {{ + n_nuple!($($rest)*) + }}; +} +fn main() { n_nuple!(1,2,3); } +"#, + ); +} + +#[test] +fn issue_3642_bad_macro_stackover() { + lower( + r#" +#[macro_export] +macro_rules! match_ast { + (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; + + (match ($node:expr) { + $( ast::$ast:ident($it:ident) => $res:expr, )* + _ => $catch_all:expr $(,)? + }) => {{ + $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )* + { $catch_all } + }}; +} + +fn main() { + let anchor = match_ast! { + match parent { + as => {}, + _ => return None + } + }; +}"#, + ); +} + +#[test] +fn macro_resolve() { + // Regression test for a path resolution bug introduced with inner item handling. + lower( + r#" +macro_rules! vec { + () => { () }; + ($elem:expr; $n:expr) => { () }; + ($($x:expr),+ $(,)?) => { () }; +} +mod m { + fn outer() { + let _ = vec![FileSet::default(); self.len()]; + } +} +"#, + ); +} + +#[test] +fn desugar_for_loop() { + let (db, body, def) = lower( + r#" +//- minicore: iterator +fn main() { + for ident in 0..10 { + foo(); + bar() + } +} +"#, + ); + + expect![[r#" + fn main() { + match builtin#lang(into_iter)( + (0) ..(10) , + ) { + mut 11 => loop { + match builtin#lang(next)( + &mut 11, + ) { + builtin#lang(None) => break, + builtin#lang(Some)(ident) => { + foo(); + bar() + }, + } + }, + } + }"#]] + .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) +} + +#[test] +fn desugar_builtin_format_args() { + let (db, body, def) = lower( + r#" +//- minicore: fmt +fn main() { + let are = "are"; + let count = 10; + builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!"); +} +"#, + ); + + expect![[r#" + fn main() { + let are = "are"; + let count = 10; + builtin#lang(Arguments::new_v1_formatted)( + &[ + "\u{1b}hello ", " ", " friends, we ", " ", "", + ], + &[ + builtin#lang(Argument::new_display)( + &count, + ), builtin#lang(Argument::new_display)( + &"fancy", + ), builtin#lang(Argument::new_debug)( + &are, + ), builtin#lang(Argument::new_display)( + &"!", + ), + ], + &[ + builtin#lang(Placeholder::new)( + 0usize, + ' ', + builtin#lang(Alignment::Unknown), + 8u32, + builtin#lang(Count::Implied), + builtin#lang(Count::Is)( + 2, + ), + ), builtin#lang(Placeholder::new)( + 1usize, + ' ', + builtin#lang(Alignment::Unknown), + 0u32, + builtin#lang(Count::Implied), + builtin#lang(Count::Implied), + ), builtin#lang(Placeholder::new)( + 2usize, + ' ', + builtin#lang(Alignment::Unknown), + 0u32, + builtin#lang(Count::Implied), + builtin#lang(Count::Implied), + ), builtin#lang(Placeholder::new)( + 1usize, + ' ', + builtin#lang(Alignment::Unknown), + 0u32, + builtin#lang(Count::Implied), + builtin#lang(Count::Implied), + ), builtin#lang(Placeholder::new)( + 3usize, + ' ', + builtin#lang(Alignment::Unknown), + 0u32, + builtin#lang(Count::Implied), + builtin#lang(Count::Implied), + ), + ], + unsafe { + builtin#lang(UnsafeArg::new)() + }, + ); + }"#]] + .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) +} + +#[test] +fn test_macro_hygiene() { + let (db, body, def) = lower( + r##" +//- minicore: fmt, from +//- /main.rs +mod error; + +use crate::error::error; + +fn main() { + // _ = forces body expansion instead of block def map expansion + _ = error!("Failed to resolve path `{}`", node.text()); +} +//- /error.rs +macro_rules! _error { + ($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))} +} +pub(crate) use _error as error; +macro_rules! _intermediate { + ($arg:expr) => {$crate::error::SsrError::new($arg)} +} +pub(crate) use _intermediate as intermediate; + +pub struct SsrError(pub(crate) core::fmt::Arguments); + +impl SsrError { + pub(crate) fn new(message: impl Into) -> SsrError { + SsrError(message.into()) + } +} +"##, + ); + + assert_eq!(db.body_with_source_map(def).1.diagnostics(), &[]); + expect![[r#" + fn main() { + _ = ra_test_fixture::error::SsrError::new( + builtin#lang(Arguments::new_v1_formatted)( + &[ + "Failed to resolve path `", "`", + ], + &[ + builtin#lang(Argument::new_display)( + &node.text(), + ), + ], + &[ + builtin#lang(Placeholder::new)( + 0usize, + ' ', + builtin#lang(Alignment::Unknown), + 0u32, + builtin#lang(Count::Implied), + builtin#lang(Count::Implied), + ), + ], + unsafe { + builtin#lang(UnsafeArg::new)() + }, + ), + ); + }"#]] + .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) +} + +#[test] +fn regression_10300() { + let (db, body, def) = lower( + r#" +//- minicore: concat, panic +mod private { + pub use core::concat; +} + +macro_rules! m { + () => { + panic!(concat!($crate::private::concat!("cc"))); + }; +} + +fn f(a: i32, b: u32) -> String { + m!(); +} +"#, + ); + + let (_, source_map) = db.body_with_source_map(def); + assert_eq!(source_map.diagnostics(), &[]); + + for (_, def_map) in body.blocks(&db) { + assert_eq!(def_map.diagnostics(), &[]); + } + + expect![[r#" + fn f(a, b) { + { + core::panicking::panic_fmt( + builtin#lang(Arguments::new_v1_formatted)( + &[ + "cc", + ], + &[], + &[], + unsafe { + builtin#lang(UnsafeArg::new)() + }, + ), + ); + }; + }"#]] + .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) +} + +#[test] +fn destructuring_assignment_tuple_macro() { + // This is a funny one. `let m!()() = Bar()` is an error in rustc, because `m!()()` isn't a valid pattern, + // but in destructuring assignment it is valid, because `m!()()` is a valid expression, and destructuring + // assignments start their lives as expressions. So we have to do the same. + + let (db, body, def) = lower( + r#" +struct Bar(); + +macro_rules! m { + () => { Bar }; +} + +fn foo() { + m!()() = Bar(); +} +"#, + ); + + let (_, source_map) = db.body_with_source_map(def); + assert_eq!(source_map.diagnostics(), &[]); + + for (_, def_map) in body.blocks(&db) { + assert_eq!(def_map.diagnostics(), &[]); + } + + expect![[r#" + fn foo() { + Bar() = Bar(); + }"#]] + .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) +} + +#[test] +fn shadowing_record_variant() { + let (_, body, _) = lower( + r#" +enum A { + B { field: i32 }, +} +fn f() { + use A::*; + match () { + B => {} + }; +} + "#, + ); + assert_eq!(body.bindings.len(), 1, "should have a binding for `B`"); + assert_eq!( + body.bindings[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(), + "B", + "should have a binding for `B`", + ); +} + +#[test] +fn regression_pretty_print_bind_pat() { + let (db, body, owner) = lower( + r#" +fn foo() { + let v @ u = 123; +} +"#, + ); + let printed = body.pretty_print(&db, owner, Edition::CURRENT); + + expect![[r#" + fn foo() { + let v @ u = 123; + }"#]] + .assert_eq(&printed); +} + +#[test] +fn skip_skips_body() { + let (db, body, owner) = lower( + r#" +#[rust_analyzer::skip] +async fn foo(a: (), b: i32) -> u32 { + 0 + 1 + b() +} +"#, + ); + let printed = body.pretty_print(&db, owner, Edition::CURRENT); + expect!["fn foo(�, �) �"].assert_eq(&printed); +} + +#[test] +fn range_bounds_are_hir_exprs() { + let (_, body, _) = lower( + r#" +pub const L: i32 = 6; +mod x { + pub const R: i32 = 100; +} +const fn f(x: i32) -> i32 { + match x { + -1..=5 => x * 10, + L..=x::R => x * 100, + _ => x, + } +}"#, + ); + + let mtch_arms = body + .exprs + .iter() + .find_map(|(_, expr)| { + if let Expr::Match { arms, .. } = expr { + return Some(arms); + } + + None + }) + .unwrap(); + + let MatchArm { pat, .. } = mtch_arms[1]; + match body.pats[pat] { + Pat::Range { start, end } => { + let hir_start = &body.exprs[start.unwrap()]; + let hir_end = &body.exprs[end.unwrap()]; + + assert!(matches!(hir_start, Expr::Path { .. })); + assert!(matches!(hir_end, Expr::Path { .. })); + } + _ => {} + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs new file mode 100644 index 0000000000000..da3b65d4203d1 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs @@ -0,0 +1,592 @@ +use super::*; +use expect_test::expect; + +#[test] +fn inner_item_smoke() { + check_at( + r#" +struct inner {} +fn outer() { + $0 + fn inner() {} +} +"#, + expect![[r#" + block scope + inner: v + + crate + inner: t + outer: v + "#]], + ); +} + +#[test] +fn use_from_crate() { + check_at( + r#" +struct Struct {} +fn outer() { + fn Struct() {} + use Struct as PlainStruct; + use crate::Struct as CrateStruct; + use self::Struct as SelfStruct; + use super::Struct as SuperStruct; + $0 +} +"#, + expect![[r#" + block scope + CrateStruct: ti + PlainStruct: ti vi + SelfStruct: ti + Struct: v + SuperStruct: _ + + crate + Struct: t + outer: v + "#]], + ); +} + +#[test] +fn merge_namespaces() { + check_at( + r#" +struct name {} +fn outer() { + fn name() {} + + use name as imported; // should import both `name`s + + $0 +} +"#, + expect![[r#" + block scope + imported: ti vi + name: v + + crate + name: t + outer: v + "#]], + ); +} + +#[test] +fn nested_blocks() { + check_at( + r#" +fn outer() { + struct inner1 {} + fn inner() { + use inner1; + use outer; + fn inner2() {} + $0 + } +} +"#, + expect![[r#" + block scope + inner1: ti + inner2: v + outer: vi + + block scope + inner: v + inner1: t + + crate + outer: v + "#]], + ); +} + +#[test] +fn super_imports() { + check_at( + r#" +mod module { + fn f() { + use super::Struct; + $0 + } +} + +struct Struct {} +"#, + expect![[r#" + block scope + Struct: ti + + crate + Struct: t + module: t + + crate::module + f: v + "#]], + ); +} + +#[test] +fn super_imports_2() { + check_at( + r#" +fn outer() { + mod m { + struct ResolveMe {} + fn middle() { + mod m2 { + fn inner() { + use super::ResolveMe; + $0 + } + } + } + } +} +"#, + expect![[r#" + block scope + ResolveMe: ti + + block scope + m2: t + + block scope::m2 + inner: v + + block scope + m: t + + block scope::m + ResolveMe: t + middle: v + + crate + outer: v + "#]], + ); +} + +#[test] +fn nested_module_scoping() { + check_block_scopes_at( + r#" +fn f() { + mod module { + struct Struct {} + fn f() { + use self::Struct; + $0 + } + } +} + "#, + expect![[r#" + BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::(1) } + BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::(0) } + crate scope + "#]], + ); +} + +#[test] +fn self_imports() { + check_at( + r#" +fn f() { + mod m { + struct ResolveMe {} + fn g() { + fn h() { + use self::ResolveMe; + $0 + } + } + } +} +"#, + expect![[r#" + block scope + ResolveMe: ti + + block scope + h: v + + block scope + m: t + + block scope::m + ResolveMe: t + g: v + + crate + f: v + "#]], + ); +} + +#[test] +fn legacy_macro_items() { + // Checks that legacy-scoped `macro_rules!` from parent namespaces are resolved and expanded + // correctly. + check_at( + r#" +macro_rules! mark { + () => { + struct Hit {} + } +} + +fn f() { + mark!(); + $0 +} +"#, + expect![[r#" + block scope + Hit: t + + crate + f: v + "#]], + ); +} + +#[test] +fn macro_resolve() { + check_at( + r#" +//- /lib.rs crate:lib deps:core +use core::cov_mark; + +fn f() { + fn nested() { + cov_mark::mark!(Hit); + $0 + } +} +//- /core.rs crate:core +pub mod cov_mark { + #[macro_export] + macro_rules! _mark { + ($name:ident) => { + struct $name {} + } + } + + pub use crate::_mark as mark; +} +"#, + expect![[r#" + block scope + Hit: t + + block scope + nested: v + + crate + cov_mark: ti + f: v + "#]], + ); +} + +#[test] +fn macro_exported_in_block_mod() { + check_at( + r#" +#[macro_export] +macro_rules! foo { + () => { pub struct FooWorks; }; +} +macro_rules! bar { + () => { pub struct BarWorks; }; +} +fn main() { + mod module { + foo!(); + bar!(); + $0 + } +} +"#, + expect![[r#" + block scope + module: t + + block scope::module + BarWorks: t v + FooWorks: t v + + crate + foo: m + main: v + "#]], + ); +} + +#[test] +fn macro_resolve_legacy() { + check_at( + r#" +//- /lib.rs +mod module; + +//- /module.rs +macro_rules! m { + () => { + struct Def {} + }; +} + +fn f() { + { + m!(); + $0 + } +} + "#, + expect![[r#" + block scope + Def: t + + crate + module: t + + crate::module + f: v + "#]], + ) +} + +#[test] +fn super_does_not_resolve_to_block_module() { + check_at( + r#" +fn main() { + struct Struct {} + mod module { + use super::Struct; + + $0 + } +} + "#, + expect![[r#" + block scope + Struct: t + module: t + + block scope::module + Struct: _ + + crate + main: v + "#]], + ); +} + +#[test] +fn underscore_import() { + // This used to panic, because the default (private) visibility inside block expressions would + // point into the containing `DefMap`, which visibilities should never be able to do. + cov_mark::check!(adjust_vis_in_block_def_map); + check_at( + r#" +mod m { + fn main() { + use Tr as _; + trait Tr {} + $0 + } +} + "#, + expect![[r#" + block scope + _: t + Tr: t + + crate + m: t + + crate::m + main: v + "#]], + ); +} + +#[test] +fn nested_macro_item_decl() { + cov_mark::check!(macro_call_in_macro_stmts_is_added_to_item_tree); + check_at( + r#" +macro_rules! inner_declare { + ($ident:ident) => { + static $ident: u32 = 0; + }; +} +macro_rules! declare { + ($ident:ident) => { + inner_declare!($ident); + }; +} + +fn foo() { + declare!(bar); + bar; + $0 +} + "#, + expect![[r#" + block scope + bar: v + + crate + foo: v + "#]], + ) +} + +#[test] +fn is_visible_from_same_def_map() { + // Regression test for https://github.com/rust-lang/rust-analyzer/issues/9481 + cov_mark::check!(is_visible_from_same_block_def_map); + check_at( + r#" +fn outer() { + mod tests { + use super::*; + } + use crate::name; + $0 +} + "#, + expect![[r#" + block scope + name: _ + tests: t + + block scope::tests + name: _ + outer: vg + + crate + outer: v + "#]], + ); +} + +#[test] +fn stmt_macro_expansion_with_trailing_expr() { + cov_mark::check!(macro_stmt_with_trailing_macro_expr); + check_at( + r#" +macro_rules! mac { + () => { mac!($) }; + ($x:tt) => { fn inner() {} }; +} +fn foo() { + mac!(); + $0 +} + "#, + expect![[r#" + block scope + inner: v + + crate + foo: v + "#]], + ) +} + +#[test] +fn trailing_expr_macro_expands_stmts() { + check_at( + r#" +macro_rules! foo { + () => { const FOO: u32 = 0;const BAR: u32 = 0; }; +} +fn f() {$0 + foo!{} +}; + "#, + expect![[r#" + block scope + BAR: v + FOO: v + + crate + f: v + "#]], + ) +} + +#[test] +fn resolve_extern_prelude_in_block() { + check_at( + r#" +//- /main.rs crate:main deps:core +fn main() { + mod f { + use core::S; + $0 + } +} + +//- /core.rs crate:core +pub struct S; + "#, + expect![[r#" + block scope + f: t + + block scope::f + S: ti vi + + crate + main: v + "#]], + ) +} + +#[test] +fn shadow_extern_prelude_in_block() { + check_at( + r#" +//- /main.rs crate:main deps:core +fn main() { + mod core { pub struct S; } + { + fn inner() {} // forces a block def map + use core::S; // should resolve to the local one + $0 + } +} + +//- /core.rs crate:core +pub const S; + "#, + expect![[r#" + block scope + S: ti vi + inner: v + + block scope + core: t + + block scope::core + S: t v + + crate + main: v + "#]], + ) +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs new file mode 100644 index 0000000000000..80561d6470830 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs @@ -0,0 +1,190 @@ +use crate::{ + GenericDefId, ModuleDefId, + expr_store::pretty::{print_function, print_struct}, + test_db::TestDB, +}; +use expect_test::{Expect, expect}; +use test_fixture::WithFixture; + +use super::super::*; + +fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { + let db = TestDB::with_files(ra_fixture); + + let krate = db.fetch_test_crate(); + let def_map = db.crate_def_map(krate); + let mut defs = vec![]; + for (_, module) in def_map.modules() { + for decl in module.scope.declarations() { + let def: GenericDefId = match decl { + ModuleDefId::ModuleId(_) => continue, + ModuleDefId::FunctionId(id) => id.into(), + ModuleDefId::AdtId(id) => id.into(), + ModuleDefId::ConstId(id) => id.into(), + ModuleDefId::StaticId(id) => id.into(), + ModuleDefId::TraitId(id) => id.into(), + ModuleDefId::TraitAliasId(id) => id.into(), + ModuleDefId::TypeAliasId(id) => id.into(), + ModuleDefId::EnumVariantId(_) => continue, + ModuleDefId::BuiltinType(_) => continue, + ModuleDefId::MacroId(_) => continue, + }; + defs.push(def); + } + } + + let mut out = String::new(); + for def in defs { + match def { + GenericDefId::AdtId(adt_id) => match adt_id { + crate::AdtId::StructId(struct_id) => { + out += &print_struct(&db, &db.struct_signature(struct_id), Edition::CURRENT); + } + crate::AdtId::UnionId(_id) => (), + crate::AdtId::EnumId(_id) => (), + }, + GenericDefId::ConstId(_id) => (), + GenericDefId::FunctionId(function_id) => { + out += &print_function(&db, &db.function_signature(function_id), Edition::CURRENT) + } + + GenericDefId::ImplId(_id) => (), + GenericDefId::StaticId(_id) => (), + GenericDefId::TraitAliasId(_id) => (), + GenericDefId::TraitId(_id) => (), + GenericDefId::TypeAliasId(_id) => (), + } + } + + expect.assert_eq(&out); +} + +#[test] +fn structs() { + lower_and_print( + r" +struct S { field: foo, } +struct S(i32, u32, &'static str); +#[repr(Rust)] +struct S; + +struct S<'a, 'b, T: Clone, const C: usize = 3, X = ()> where X: Default, for<'a, 'c> fn() -> i32: for<'b> Trait<'a, Item = Boo>; +#[repr(C, packed)] +struct S {} +", + expect![[r#" + struct S {...} + struct S(...) + ; + struct S; + struct S<'a, 'b, T, const C: usize = 3, X = ()> + where + T: Clone, + X: Default, + for<'a, 'c> fn() -> i32: for<'b> Trait::<'a, Item = Boo> + ; + #[repr(C)] + #[repr(pack(1))] + struct S {...} + "#]], + ); +} + +#[test] +fn functions() { + lower_and_print( + r#" +fn foo<'a, const C: usize = 314235, T: Trait = B>(Struct { foo: bar }: &Struct, _: (), a: u32) -> &'a dyn Fn() -> i32 where (): Default {} +const async unsafe extern "C" fn a() {} +fn ret_impl_trait() -> impl Trait {} +"#, + expect![[r#" + fn foo<'a, const C: usize = 314235, T = B>(&Struct, (), u32) -> &'a dyn Fn::<(), Output = i32> + where + T: Trait::, + (): Default + {...} + const async unsafe extern "C" fn a() -> impl ::core::future::Future:: {...} + fn ret_impl_trait() -> impl Trait {...} + "#]], + ); +} + +#[test] +fn argument_position_impl_trait_functions() { + lower_and_print( + r" +fn impl_trait_args(_: impl Trait) {} +fn impl_trait_args2(_: impl Trait) {} + +fn impl_trait_ret() -> impl Trait {} +fn impl_trait_ret2() -> impl Trait {} + +fn not_allowed1(f: impl Fn(impl Foo)) { + let foo = S; + f(foo); +} + +// This caused stack overflow in #17498 +fn not_allowed2(f: impl Fn(&impl Foo)) { + let foo = S; + f(&foo); +} + +fn not_allowed3(bar: impl Bar) {} + +// This also caused stack overflow +fn not_allowed4(bar: impl Bar<&impl Foo>) {} + +fn allowed1(baz: impl Baz) {} + +fn allowed2<'a>(baz: impl Baz) {} + +fn allowed3(baz: impl Baz>) {} +", + expect![[r#" + fn impl_trait_args(Param[1]) + where + Param[1]: Trait + {...} + fn impl_trait_args2(Param[1]) + where + Param[1]: Trait::<{error}> + {...} + fn impl_trait_ret() -> impl Trait {...} + fn impl_trait_ret2() -> impl Trait::<{error}> {...} + fn not_allowed1(Param[0]) + where + Param[0]: Fn::<({error}), Output = ()> + {...} + fn not_allowed2(Param[0]) + where + Param[0]: Fn::<(&{error}), Output = ()> + {...} + fn not_allowed3(Param[0]) + where + Param[0]: Bar::<{error}> + {...} + fn not_allowed4(Param[0]) + where + Param[0]: Bar::<&{error}> + {...} + fn allowed1(Param[1]) + where + Param[0]: Foo, + Param[1]: Baz:: + {...} + fn allowed2<'a, Param[0], Param[1]>(Param[1]) + where + Param[0]: Foo, + Param[0]: 'a, + Param[1]: Baz:: + {...} + fn allowed3(Param[1]) + where + Param[0]: Foo, + Param[1]: Baz::> + {...} + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index c30ad0163b9db..9d62d9ce6526c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -2,21 +2,21 @@ use std::{cell::Cell, cmp::Ordering, iter}; -use base_db::{CrateId, CrateOrigin, LangCrateOrigin}; +use base_db::{Crate, CrateOrigin, LangCrateOrigin}; use hir_expand::{ - name::{AsName, Name}, Lookup, + mod_path::{ModPath, PathKind}, + name::{AsName, Name}, }; use intern::sym; use rustc_hash::FxHashSet; use crate::{ + ImportPathConfig, ModuleDefId, ModuleId, db::DefDatabase, item_scope::ItemInNs, nameres::DefMap, - path::{ModPath, PathKind}, visibility::{Visibility, VisibilityExplicitness}, - ImportPathConfig, ModuleDefId, ModuleId, }; /// Find a path that can be used to refer to a certain item. This can depend on @@ -50,7 +50,7 @@ pub fn find_path( prefix: prefix_kind, cfg, ignore_local_imports, - is_std_item: db.crate_graph()[item_module.krate()].origin.is_lang(), + is_std_item: item_module.krate().data(db).origin.is_lang(), from, from_def_map: &from.def_map(db), fuel: Cell::new(FIND_PATH_FUEL), @@ -134,10 +134,11 @@ fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Opt if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() { // - if the item is an enum variant, refer to it via the enum - if let Some(mut path) = - find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), max_len) - { - path.push_segment(ctx.db.enum_variant_data(variant).name.clone()); + let loc = variant.lookup(ctx.db); + if let Some(mut path) = find_path_inner(ctx, ItemInNs::Types(loc.parent.into()), max_len) { + path.push_segment( + ctx.db.enum_variants(loc.parent).variants[loc.index as usize].1.clone(), + ); return Some(path); } // If this doesn't work, it seems we have no way of referring to the @@ -174,9 +175,9 @@ fn find_path_for_module( } // - otherwise if the item is the crate root of a dependency crate, return the name from the extern prelude - let root_def_map = ctx.from.derive_crate_root().def_map(ctx.db); + let root_local_def_map = ctx.from.derive_crate_root().local_def_map(ctx.db).1; // rev here so we prefer looking at renamed extern decls first - for (name, (def_id, _extern_crate)) in root_def_map.extern_prelude().rev() { + for (name, (def_id, _extern_crate)) in root_local_def_map.extern_prelude().rev() { if crate_root != def_id { continue; } @@ -360,7 +361,7 @@ fn calculate_best_path( // too (unless we can't name it at all). It could *also* be (re)exported by the same crate // that wants to import it here, but we always prefer to use the external path here. - ctx.db.crate_graph()[ctx.from.krate].dependencies.iter().for_each(|dep| { + ctx.from.krate.data(ctx.db).dependencies.iter().for_each(|dep| { find_in_dep(ctx, visited_modules, item, max_len, best_choice, dep.crate_id) }); } @@ -373,11 +374,10 @@ fn find_in_sysroot( max_len: usize, best_choice: &mut Option, ) { - let crate_graph = ctx.db.crate_graph(); - let dependencies = &crate_graph[ctx.from.krate].dependencies; + let dependencies = &ctx.from.krate.data(ctx.db).dependencies; let mut search = |lang, best_choice: &mut _| { if let Some(dep) = dependencies.iter().filter(|it| it.is_sysroot()).find(|dep| { - match crate_graph[dep.crate_id].origin { + match dep.crate_id.data(ctx.db).origin { CrateOrigin::Lang(l) => l == lang, _ => false, } @@ -419,7 +419,7 @@ fn find_in_dep( item: ItemInNs, max_len: usize, best_choice: &mut Option, - dep: CrateId, + dep: Crate, ) { let import_map = ctx.db.import_map(dep); let Some(import_info_for) = import_map.import_info_for(item) else { @@ -652,7 +652,7 @@ fn find_local_import_locations( #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use hir_expand::db::ExpandDatabase; use itertools::Itertools; use span::Edition; @@ -688,9 +688,10 @@ mod tests { }) .unwrap(); - let def_map = module.def_map(&db); + let (def_map, local_def_map) = module.local_def_map(&db); let resolved = def_map .resolve_path( + &local_def_map, &db, module.local_id, &mod_path, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs deleted file mode 100644 index e2b36da79b232..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs +++ /dev/null @@ -1,900 +0,0 @@ -//! Many kinds of items or constructs can have generic parameters: functions, -//! structs, impls, traits, etc. This module provides a common HIR for these -//! generic parameters. See also the `Generics` type and the `generics_of` query -//! in rustc. - -use std::{ops, sync::LazyLock}; - -use either::Either; -use hir_expand::{ - name::{AsName, Name}, - ExpandResult, -}; -use la_arena::{Arena, RawIdx}; -use stdx::{ - impl_from, - thin_vec::{EmptyOptimizedThinVec, ThinVec}, -}; -use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds}; -use triomphe::Arc; - -use crate::{ - db::DefDatabase, - expander::Expander, - item_tree::{AttrOwner, FileItemTreeId, GenericModItem, GenericsItemTreeNode, ItemTree}, - lower::LowerCtx, - nameres::{DefMap, MacroSubNs}, - path::{AssociatedTypeBinding, GenericArg, GenericArgs, NormalPath, Path}, - type_ref::{ - ArrayType, ConstRef, FnType, LifetimeRef, PathId, RefType, TypeBound, TypeRef, TypeRefId, - TypesMap, TypesSourceMap, - }, - AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LifetimeParamId, - LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, -}; - -/// The index of the self param in the generic of the non-parent definition. -const SELF_PARAM_ID_IN_SELF: la_arena::Idx = - LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0)); - -/// Data about a generic type parameter (to a function, struct, impl, ...). -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct TypeParamData { - /// [`None`] only if the type ref is an [`TypeRef::ImplTrait`]. FIXME: Might be better to just - /// make it always be a value, giving impl trait a special name. - pub name: Option, - pub default: Option, - pub provenance: TypeParamProvenance, -} - -/// Data about a generic lifetime parameter (to a function, struct, impl, ...). -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct LifetimeParamData { - pub name: Name, -} - -/// Data about a generic const parameter (to a function, struct, impl, ...). -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct ConstParamData { - pub name: Name, - pub ty: TypeRefId, - pub default: Option, -} - -#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] -pub enum TypeParamProvenance { - TypeParamList, - TraitSelf, - ArgumentImplTrait, -} - -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub enum TypeOrConstParamData { - TypeParamData(TypeParamData), - ConstParamData(ConstParamData), -} - -impl TypeOrConstParamData { - pub fn name(&self) -> Option<&Name> { - match self { - TypeOrConstParamData::TypeParamData(it) => it.name.as_ref(), - TypeOrConstParamData::ConstParamData(it) => Some(&it.name), - } - } - - pub fn has_default(&self) -> bool { - match self { - TypeOrConstParamData::TypeParamData(it) => it.default.is_some(), - TypeOrConstParamData::ConstParamData(it) => it.default.is_some(), - } - } - - pub fn type_param(&self) -> Option<&TypeParamData> { - match self { - TypeOrConstParamData::TypeParamData(it) => Some(it), - TypeOrConstParamData::ConstParamData(_) => None, - } - } - - pub fn const_param(&self) -> Option<&ConstParamData> { - match self { - TypeOrConstParamData::TypeParamData(_) => None, - TypeOrConstParamData::ConstParamData(it) => Some(it), - } - } - - pub fn is_trait_self(&self) -> bool { - match self { - TypeOrConstParamData::TypeParamData(it) => { - it.provenance == TypeParamProvenance::TraitSelf - } - TypeOrConstParamData::ConstParamData(_) => false, - } - } -} - -impl_from!(TypeParamData, ConstParamData for TypeOrConstParamData); - -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub enum GenericParamData { - TypeParamData(TypeParamData), - ConstParamData(ConstParamData), - LifetimeParamData(LifetimeParamData), -} - -impl GenericParamData { - pub fn name(&self) -> Option<&Name> { - match self { - GenericParamData::TypeParamData(it) => it.name.as_ref(), - GenericParamData::ConstParamData(it) => Some(&it.name), - GenericParamData::LifetimeParamData(it) => Some(&it.name), - } - } - - pub fn type_param(&self) -> Option<&TypeParamData> { - match self { - GenericParamData::TypeParamData(it) => Some(it), - _ => None, - } - } - - pub fn const_param(&self) -> Option<&ConstParamData> { - match self { - GenericParamData::ConstParamData(it) => Some(it), - _ => None, - } - } - - pub fn lifetime_param(&self) -> Option<&LifetimeParamData> { - match self { - GenericParamData::LifetimeParamData(it) => Some(it), - _ => None, - } - } -} - -impl_from!(TypeParamData, ConstParamData, LifetimeParamData for GenericParamData); - -pub enum GenericParamDataRef<'a> { - TypeParamData(&'a TypeParamData), - ConstParamData(&'a ConstParamData), - LifetimeParamData(&'a LifetimeParamData), -} - -/// Data about the generic parameters of a function, struct, impl, etc. -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct GenericParams { - type_or_consts: Arena, - lifetimes: Arena, - where_predicates: Box<[WherePredicate]>, - pub types_map: TypesMap, -} - -impl ops::Index for GenericParams { - type Output = TypeOrConstParamData; - fn index(&self, index: LocalTypeOrConstParamId) -> &TypeOrConstParamData { - &self.type_or_consts[index] - } -} - -impl ops::Index for GenericParams { - type Output = LifetimeParamData; - fn index(&self, index: LocalLifetimeParamId) -> &LifetimeParamData { - &self.lifetimes[index] - } -} - -/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined -/// where clauses like `where T: Foo + Bar` are turned into multiple of these. -/// It might still result in multiple actual predicates though, because of -/// associated type bindings like `Iterator`. -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub enum WherePredicate { - TypeBound { target: WherePredicateTypeTarget, bound: TypeBound }, - Lifetime { target: LifetimeRef, bound: LifetimeRef }, - ForLifetime { lifetimes: Box<[Name]>, target: WherePredicateTypeTarget, bound: TypeBound }, -} - -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub enum WherePredicateTypeTarget { - TypeRef(TypeRefId), - /// For desugared where predicates that can directly refer to a type param. - TypeOrConstParam(LocalTypeOrConstParamId), -} - -impl GenericParams { - /// Number of Generic parameters (type_or_consts + lifetimes) - #[inline] - pub fn len(&self) -> usize { - self.type_or_consts.len() + self.lifetimes.len() - } - - #[inline] - pub fn len_lifetimes(&self) -> usize { - self.lifetimes.len() - } - - #[inline] - pub fn len_type_or_consts(&self) -> usize { - self.type_or_consts.len() - } - - #[inline] - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - #[inline] - pub fn no_predicates(&self) -> bool { - self.where_predicates.is_empty() - } - - #[inline] - pub fn where_predicates(&self) -> std::slice::Iter<'_, WherePredicate> { - self.where_predicates.iter() - } - - /// Iterator of type_or_consts field - #[inline] - pub fn iter_type_or_consts( - &self, - ) -> impl DoubleEndedIterator { - self.type_or_consts.iter() - } - - /// Iterator of lifetimes field - #[inline] - pub fn iter_lt( - &self, - ) -> impl DoubleEndedIterator { - self.lifetimes.iter() - } - - pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option { - self.type_or_consts.iter().find_map(|(id, p)| { - if p.name().as_ref() == Some(&name) && p.type_param().is_some() { - Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) - } else { - None - } - }) - } - - pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option { - self.type_or_consts.iter().find_map(|(id, p)| { - if p.name().as_ref() == Some(&name) && p.const_param().is_some() { - Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) - } else { - None - } - }) - } - - #[inline] - pub fn trait_self_param(&self) -> Option { - if self.type_or_consts.is_empty() { - return None; - } - matches!( - self.type_or_consts[SELF_PARAM_ID_IN_SELF], - TypeOrConstParamData::TypeParamData(TypeParamData { - provenance: TypeParamProvenance::TraitSelf, - .. - }) - ) - .then(|| SELF_PARAM_ID_IN_SELF) - } - - pub fn find_lifetime_by_name( - &self, - name: &Name, - parent: GenericDefId, - ) -> Option { - self.lifetimes.iter().find_map(|(id, p)| { - if &p.name == name { - Some(LifetimeParamId { local_id: id, parent }) - } else { - None - } - }) - } - - pub(crate) fn generic_params_query( - db: &dyn DefDatabase, - def: GenericDefId, - ) -> Arc { - db.generic_params_with_source_map(def).0 - } - - pub(crate) fn generic_params_with_source_map_query( - db: &dyn DefDatabase, - def: GenericDefId, - ) -> (Arc, Option>) { - let _p = tracing::info_span!("generic_params_query").entered(); - - let krate = def.krate(db); - let cfg_options = db.crate_graph(); - let cfg_options = &cfg_options[krate].cfg_options; - - // Returns the generic parameters that are enabled under the current `#[cfg]` options - let enabled_params = - |params: &Arc, item_tree: &ItemTree, parent: GenericModItem| { - let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options); - let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param); - let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param); - - // In the common case, no parameters will by disabled by `#[cfg]` attributes. - // Therefore, make a first pass to check if all parameters are enabled and, if so, - // clone the `Interned` instead of recreating an identical copy. - let all_type_or_consts_enabled = - params.type_or_consts.iter().all(|(idx, _)| enabled(attr_owner_ct(idx))); - let all_lifetimes_enabled = - params.lifetimes.iter().all(|(idx, _)| enabled(attr_owner_lt(idx))); - - if all_type_or_consts_enabled && all_lifetimes_enabled { - params.clone() - } else { - Arc::new(GenericParams { - type_or_consts: all_type_or_consts_enabled - .then(|| params.type_or_consts.clone()) - .unwrap_or_else(|| { - params - .type_or_consts - .iter() - .filter(|&(idx, _)| enabled(attr_owner_ct(idx))) - .map(|(_, param)| param.clone()) - .collect() - }), - lifetimes: all_lifetimes_enabled - .then(|| params.lifetimes.clone()) - .unwrap_or_else(|| { - params - .lifetimes - .iter() - .filter(|&(idx, _)| enabled(attr_owner_lt(idx))) - .map(|(_, param)| param.clone()) - .collect() - }), - where_predicates: params.where_predicates.clone(), - types_map: params.types_map.clone(), - }) - } - }; - fn id_to_generics( - db: &dyn DefDatabase, - id: impl for<'db> Lookup< - Database<'db> = dyn DefDatabase + 'db, - Data = impl ItemTreeLoc, - >, - enabled_params: impl Fn( - &Arc, - &ItemTree, - GenericModItem, - ) -> Arc, - ) -> (Arc, Option>) - where - FileItemTreeId: Into, - { - let id = id.lookup(db).item_tree_id(); - let tree = id.item_tree(db); - let item = &tree[id.value]; - (enabled_params(item.generic_params(), &tree, id.value.into()), None) - } - - match def { - GenericDefId::FunctionId(id) => { - let loc = id.lookup(db); - let tree = loc.id.item_tree(db); - let item = &tree[loc.id.value]; - - let enabled_params = - enabled_params(&item.explicit_generic_params, &tree, loc.id.value.into()); - - let module = loc.container.module(db); - let func_data = db.function_data(id); - if func_data.params.is_empty() { - (enabled_params, None) - } else { - let source_maps = loc.id.item_tree_with_source_map(db).1; - let item_source_maps = source_maps.function(loc.id.value); - let mut generic_params = GenericParamsCollector { - type_or_consts: enabled_params.type_or_consts.clone(), - lifetimes: enabled_params.lifetimes.clone(), - where_predicates: enabled_params.where_predicates.clone().into(), - }; - - let (mut types_map, mut types_source_maps) = - (enabled_params.types_map.clone(), item_source_maps.generics().clone()); - // Don't create an `Expander` if not needed since this - // could cause a reparse after the `ItemTree` has been created due to the spanmap. - let mut expander = None; - for ¶m in func_data.params.iter() { - generic_params.fill_implicit_impl_trait_args( - db, - &mut types_map, - &mut types_source_maps, - &mut expander, - &mut || { - (module.def_map(db), Expander::new(db, loc.id.file_id(), module)) - }, - param, - &item.types_map, - item_source_maps.item(), - ); - } - let generics = generic_params.finish(types_map, &mut types_source_maps); - (generics, Some(Arc::new(types_source_maps))) - } - } - GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::ConstId(_) | GenericDefId::StaticId(_) => ( - Arc::new(GenericParams { - type_or_consts: Default::default(), - lifetimes: Default::default(), - where_predicates: Default::default(), - types_map: Default::default(), - }), - None, - ), - } - } -} - -#[derive(Clone, Default)] -pub(crate) struct GenericParamsCollector { - pub(crate) type_or_consts: Arena, - lifetimes: Arena, - where_predicates: Vec, -} - -impl GenericParamsCollector { - pub(crate) fn fill( - &mut self, - lower_ctx: &mut LowerCtx<'_>, - node: &dyn HasGenericParams, - add_param_attrs: impl FnMut( - Either, - ast::GenericParam, - ), - ) { - if let Some(params) = node.generic_param_list() { - self.fill_params(lower_ctx, params, add_param_attrs) - } - if let Some(where_clause) = node.where_clause() { - self.fill_where_predicates(lower_ctx, where_clause); - } - } - - pub(crate) fn fill_bounds( - &mut self, - lower_ctx: &mut LowerCtx<'_>, - type_bounds: Option, - target: Either, - ) { - for bound in type_bounds.iter().flat_map(|type_bound_list| type_bound_list.bounds()) { - self.add_where_predicate_from_bound(lower_ctx, bound, None, target.clone()); - } - } - - fn fill_params( - &mut self, - lower_ctx: &mut LowerCtx<'_>, - params: ast::GenericParamList, - mut add_param_attrs: impl FnMut( - Either, - ast::GenericParam, - ), - ) { - for type_or_const_param in params.type_or_const_params() { - match type_or_const_param { - ast::TypeOrConstParam::Type(type_param) => { - let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); - // FIXME: Use `Path::from_src` - let default = - type_param.default_type().map(|it| TypeRef::from_ast(lower_ctx, it)); - let param = TypeParamData { - name: Some(name.clone()), - default, - provenance: TypeParamProvenance::TypeParamList, - }; - let idx = self.type_or_consts.alloc(param.into()); - let type_ref = lower_ctx.alloc_type_ref_desugared(TypeRef::Path(name.into())); - self.fill_bounds( - lower_ctx, - type_param.type_bound_list(), - Either::Left(type_ref), - ); - add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param)); - } - ast::TypeOrConstParam::Const(const_param) => { - let name = const_param.name().map_or_else(Name::missing, |it| it.as_name()); - let ty = TypeRef::from_ast_opt(lower_ctx, const_param.ty()); - let param = ConstParamData { - name, - ty, - default: ConstRef::from_const_param(lower_ctx, &const_param), - }; - let idx = self.type_or_consts.alloc(param.into()); - add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param)); - } - } - } - for lifetime_param in params.lifetime_params() { - let name = - lifetime_param.lifetime().map_or_else(Name::missing, |lt| Name::new_lifetime(<)); - let param = LifetimeParamData { name: name.clone() }; - let idx = self.lifetimes.alloc(param); - let lifetime_ref = LifetimeRef::new_name(name); - self.fill_bounds( - lower_ctx, - lifetime_param.type_bound_list(), - Either::Right(lifetime_ref), - ); - add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param)); - } - } - - fn fill_where_predicates( - &mut self, - lower_ctx: &mut LowerCtx<'_>, - where_clause: ast::WhereClause, - ) { - for pred in where_clause.predicates() { - let target = if let Some(type_ref) = pred.ty() { - Either::Left(TypeRef::from_ast(lower_ctx, type_ref)) - } else if let Some(lifetime) = pred.lifetime() { - Either::Right(LifetimeRef::new(&lifetime)) - } else { - continue; - }; - - let lifetimes: Option> = pred.generic_param_list().map(|param_list| { - // Higher-Ranked Trait Bounds - param_list - .lifetime_params() - .map(|lifetime_param| { - lifetime_param - .lifetime() - .map_or_else(Name::missing, |lt| Name::new_lifetime(<)) - }) - .collect() - }); - for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) { - self.add_where_predicate_from_bound( - lower_ctx, - bound, - lifetimes.as_deref(), - target.clone(), - ); - } - } - } - - fn add_where_predicate_from_bound( - &mut self, - lower_ctx: &mut LowerCtx<'_>, - bound: ast::TypeBound, - hrtb_lifetimes: Option<&[Name]>, - target: Either, - ) { - let bound = TypeBound::from_ast(lower_ctx, bound); - self.fill_impl_trait_bounds(lower_ctx.take_impl_traits_bounds()); - let predicate = match (target, bound) { - (Either::Left(type_ref), bound) => match hrtb_lifetimes { - Some(hrtb_lifetimes) => WherePredicate::ForLifetime { - lifetimes: hrtb_lifetimes.to_vec().into_boxed_slice(), - target: WherePredicateTypeTarget::TypeRef(type_ref), - bound, - }, - None => WherePredicate::TypeBound { - target: WherePredicateTypeTarget::TypeRef(type_ref), - bound, - }, - }, - (Either::Right(lifetime), TypeBound::Lifetime(bound)) => { - WherePredicate::Lifetime { target: lifetime, bound } - } - _ => return, - }; - self.where_predicates.push(predicate); - } - - fn fill_impl_trait_bounds(&mut self, impl_bounds: Vec>) { - for bounds in impl_bounds { - let param = TypeParamData { - name: None, - default: None, - provenance: TypeParamProvenance::ArgumentImplTrait, - }; - let param_id = self.type_or_consts.alloc(param.into()); - for bound in &bounds { - self.where_predicates.push(WherePredicate::TypeBound { - target: WherePredicateTypeTarget::TypeOrConstParam(param_id), - bound: bound.clone(), - }); - } - } - } - - fn fill_implicit_impl_trait_args( - &mut self, - db: &dyn DefDatabase, - generics_types_map: &mut TypesMap, - generics_types_source_map: &mut TypesSourceMap, - // FIXME: Change this back to `LazyCell` if https://github.com/rust-lang/libs-team/issues/429 is accepted. - exp: &mut Option<(Arc, Expander)>, - exp_fill: &mut dyn FnMut() -> (Arc, Expander), - type_ref: TypeRefId, - types_map: &TypesMap, - types_source_map: &TypesSourceMap, - ) { - TypeRef::walk(type_ref, types_map, &mut |type_ref| { - if let TypeRef::ImplTrait(bounds) = type_ref { - let param = TypeParamData { - name: None, - default: None, - provenance: TypeParamProvenance::ArgumentImplTrait, - }; - let param_id = self.type_or_consts.alloc(param.into()); - for bound in bounds { - let bound = copy_type_bound( - bound, - types_map, - types_source_map, - generics_types_map, - generics_types_source_map, - ); - self.where_predicates.push(WherePredicate::TypeBound { - target: WherePredicateTypeTarget::TypeOrConstParam(param_id), - bound, - }); - } - } - - if let TypeRef::Macro(mc) = type_ref { - let macro_call = mc.to_node(db.upcast()); - let (def_map, expander) = exp.get_or_insert_with(&mut *exp_fill); - - let module = expander.module.local_id; - let resolver = |path: &_| { - def_map - .resolve_path( - db, - module, - path, - crate::item_scope::BuiltinShadowMode::Other, - Some(MacroSubNs::Bang), - ) - .0 - .take_macros() - }; - if let Ok(ExpandResult { value: Some((mark, expanded)), .. }) = - expander.enter_expand(db, macro_call, resolver) - { - let (mut macro_types_map, mut macro_types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut ctx = - expander.ctx(db, &mut macro_types_map, &mut macro_types_source_map); - let type_ref = TypeRef::from_ast(&mut ctx, expanded.tree()); - self.fill_implicit_impl_trait_args( - db, - generics_types_map, - generics_types_source_map, - &mut *exp, - exp_fill, - type_ref, - ¯o_types_map, - ¯o_types_source_map, - ); - exp.get_or_insert_with(&mut *exp_fill).1.exit(mark); - } - } - }); - } - - pub(crate) fn finish( - self, - mut generics_types_map: TypesMap, - generics_types_source_map: &mut TypesSourceMap, - ) -> Arc { - let Self { mut lifetimes, mut type_or_consts, mut where_predicates } = self; - - if lifetimes.is_empty() && type_or_consts.is_empty() && where_predicates.is_empty() { - static EMPTY: LazyLock> = LazyLock::new(|| { - Arc::new(GenericParams { - lifetimes: Arena::new(), - type_or_consts: Arena::new(), - where_predicates: Box::default(), - types_map: TypesMap::default(), - }) - }); - return Arc::clone(&EMPTY); - } - - lifetimes.shrink_to_fit(); - type_or_consts.shrink_to_fit(); - where_predicates.shrink_to_fit(); - generics_types_map.shrink_to_fit(); - generics_types_source_map.shrink_to_fit(); - Arc::new(GenericParams { - type_or_consts, - lifetimes, - where_predicates: where_predicates.into_boxed_slice(), - types_map: generics_types_map, - }) - } -} - -/// Copies a `TypeRef` from a `TypesMap` (accompanied with `TypesSourceMap`) into another `TypesMap` -/// (and `TypesSourceMap`). -fn copy_type_ref( - type_ref: TypeRefId, - from: &TypesMap, - from_source_map: &TypesSourceMap, - to: &mut TypesMap, - to_source_map: &mut TypesSourceMap, -) -> TypeRefId { - let result = match &from[type_ref] { - TypeRef::Fn(fn_) => { - let params = fn_.params().iter().map(|(name, param_type)| { - (name.clone(), copy_type_ref(*param_type, from, from_source_map, to, to_source_map)) - }); - TypeRef::Fn(FnType::new(fn_.is_varargs(), fn_.is_unsafe(), fn_.abi().clone(), params)) - } - TypeRef::Tuple(types) => TypeRef::Tuple(EmptyOptimizedThinVec::from_iter( - types.iter().map(|&t| copy_type_ref(t, from, from_source_map, to, to_source_map)), - )), - &TypeRef::RawPtr(type_ref, mutbl) => TypeRef::RawPtr( - copy_type_ref(type_ref, from, from_source_map, to, to_source_map), - mutbl, - ), - TypeRef::Reference(ref_) => TypeRef::Reference(Box::new(RefType { - ty: copy_type_ref(ref_.ty, from, from_source_map, to, to_source_map), - lifetime: ref_.lifetime.clone(), - mutability: ref_.mutability, - })), - TypeRef::Array(array) => TypeRef::Array(Box::new(ArrayType { - ty: copy_type_ref(array.ty, from, from_source_map, to, to_source_map), - len: array.len.clone(), - })), - &TypeRef::Slice(type_ref) => { - TypeRef::Slice(copy_type_ref(type_ref, from, from_source_map, to, to_source_map)) - } - TypeRef::ImplTrait(bounds) => TypeRef::ImplTrait(ThinVec::from_iter(copy_type_bounds( - bounds, - from, - from_source_map, - to, - to_source_map, - ))), - TypeRef::DynTrait(bounds) => TypeRef::DynTrait(ThinVec::from_iter(copy_type_bounds( - bounds, - from, - from_source_map, - to, - to_source_map, - ))), - TypeRef::Path(path) => { - TypeRef::Path(copy_path(path, from, from_source_map, to, to_source_map)) - } - TypeRef::Never => TypeRef::Never, - TypeRef::Placeholder => TypeRef::Placeholder, - TypeRef::Macro(macro_call) => TypeRef::Macro(*macro_call), - TypeRef::Error => TypeRef::Error, - }; - let id = to.types.alloc(result); - if let Some(&ptr) = from_source_map.types_map_back.get(id) { - to_source_map.types_map_back.insert(id, ptr); - } - id -} - -fn copy_path( - path: &Path, - from: &TypesMap, - from_source_map: &TypesSourceMap, - to: &mut TypesMap, - to_source_map: &mut TypesSourceMap, -) -> Path { - match path { - Path::BarePath(mod_path) => Path::BarePath(mod_path.clone()), - Path::Normal(path) => { - let type_anchor = path - .type_anchor() - .map(|type_ref| copy_type_ref(type_ref, from, from_source_map, to, to_source_map)); - let mod_path = path.mod_path().clone(); - let generic_args = path.generic_args().iter().map(|generic_args| { - copy_generic_args(generic_args, from, from_source_map, to, to_source_map) - }); - Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args)) - } - Path::LangItem(lang_item, name) => Path::LangItem(*lang_item, name.clone()), - } -} - -fn copy_generic_args( - generic_args: &Option, - from: &TypesMap, - from_source_map: &TypesSourceMap, - to: &mut TypesMap, - to_source_map: &mut TypesSourceMap, -) -> Option { - generic_args.as_ref().map(|generic_args| { - let args = generic_args - .args - .iter() - .map(|arg| match arg { - &GenericArg::Type(ty) => { - GenericArg::Type(copy_type_ref(ty, from, from_source_map, to, to_source_map)) - } - GenericArg::Lifetime(lifetime) => GenericArg::Lifetime(lifetime.clone()), - GenericArg::Const(konst) => GenericArg::Const(konst.clone()), - }) - .collect(); - let bindings = generic_args - .bindings - .iter() - .map(|binding| { - let name = binding.name.clone(); - let args = - copy_generic_args(&binding.args, from, from_source_map, to, to_source_map); - let type_ref = binding.type_ref.map(|type_ref| { - copy_type_ref(type_ref, from, from_source_map, to, to_source_map) - }); - let bounds = - copy_type_bounds(&binding.bounds, from, from_source_map, to, to_source_map) - .collect(); - AssociatedTypeBinding { name, args, type_ref, bounds } - }) - .collect(); - GenericArgs { - args, - has_self_type: generic_args.has_self_type, - bindings, - desugared_from_fn: generic_args.desugared_from_fn, - } - }) -} - -fn copy_type_bounds<'a>( - bounds: &'a [TypeBound], - from: &'a TypesMap, - from_source_map: &'a TypesSourceMap, - to: &'a mut TypesMap, - to_source_map: &'a mut TypesSourceMap, -) -> impl stdx::thin_vec::TrustedLen + 'a { - bounds.iter().map(|bound| copy_type_bound(bound, from, from_source_map, to, to_source_map)) -} - -fn copy_type_bound( - bound: &TypeBound, - from: &TypesMap, - from_source_map: &TypesSourceMap, - to: &mut TypesMap, - to_source_map: &mut TypesSourceMap, -) -> TypeBound { - let mut copy_path_id = |path: PathId| { - let new_path = copy_path(&from[path], from, from_source_map, to, to_source_map); - let new_path_id = to.types.alloc(TypeRef::Path(new_path)); - if let Some(&ptr) = from_source_map.types_map_back.get(path.type_ref()) { - to_source_map.types_map_back.insert(new_path_id, ptr); - } - PathId::from_type_ref_unchecked(new_path_id) - }; - - match bound { - &TypeBound::Path(path, modifier) => TypeBound::Path(copy_path_id(path), modifier), - TypeBound::ForLifetime(lifetimes, path) => { - TypeBound::ForLifetime(lifetimes.clone(), copy_path_id(*path)) - } - TypeBound::Lifetime(lifetime) => TypeBound::Lifetime(lifetime.clone()), - TypeBound::Use(use_args) => TypeBound::Use(use_args.clone()), - TypeBound::Error => TypeBound::Error, - } -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs index 494644d8eff9d..0fc7857d9781a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs @@ -13,11 +13,12 @@ //! See also a neighboring `body` module. pub mod format_args; +pub mod generics; pub mod type_ref; use std::fmt; -use hir_expand::{name::Name, MacroDefId}; +use hir_expand::{MacroDefId, name::Name}; use intern::Symbol; use la_arena::Idx; use rustc_apfloat::ieee::{Half as f16, Quad as f128}; @@ -25,10 +26,13 @@ use syntax::ast; use type_ref::TypeRefId; use crate::{ + BlockId, builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint}, - path::{GenericArgs, Path}, + expr_store::{ + HygieneId, + path::{GenericArgs, Path}, + }, type_ref::{Mutability, Rawness}, - BlockId, ConstBlockId, }; pub use syntax::ast::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp}; @@ -137,11 +141,7 @@ pub enum LiteralOrConst { impl Literal { pub fn negate(self) -> Option { - if let Literal::Int(i, k) = self { - Some(Literal::Int(-i, k)) - } else { - None - } + if let Literal::Int(i, k) = self { Some(Literal::Int(-i, k)) } else { None } } } @@ -212,7 +212,7 @@ pub enum Expr { statements: Box<[Statement]>, tail: Option, }, - Const(ConstBlockId), + Const(ExprId), // FIXME: Fold this into Block with an unsafe flag? Unsafe { id: Option, @@ -555,6 +555,9 @@ pub struct Binding { pub name: Name, pub mode: BindingAnnotation, pub problems: Option, + /// Note that this may not be the direct `SyntaxContextId` of the binding's expansion, because transparent + /// expansions are attributed to their parent expansion (recursively). + pub hygiene: HygieneId, } #[derive(Debug, Clone, Eq, PartialEq)] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs index 24badc52f25ac..f27a4062a63b6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs @@ -4,11 +4,11 @@ use either::Either; use hir_expand::name::Name; use intern::Symbol; use rustc_parse_format as parse; -use span::SyntaxContextId; +use span::SyntaxContext; use stdx::TupleExt; use syntax::{ - ast::{self, IsString}, TextRange, + ast::{self, IsString}, }; use crate::hir::ExprId; @@ -176,7 +176,7 @@ pub(crate) fn parse( is_direct_literal: bool, mut synth: impl FnMut(Name, Option) -> ExprId, mut record_usage: impl FnMut(Name, Option), - call_ctx: SyntaxContextId, + call_ctx: SyntaxContext, ) -> FormatArgs { let Ok(text) = s.value() else { return FormatArgs { @@ -214,7 +214,7 @@ pub(crate) fn parse( }; } - let to_span = |inner_span: parse::InnerSpan| { + let to_span = |inner_span: std::ops::Range| { is_source_literal.then(|| { TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap()) }) @@ -297,7 +297,7 @@ pub(crate) fn parse( unfinished_literal.clear(); } - let span = parser.arg_places.get(placeholder_index).and_then(|&s| to_span(s)); + let span = parser.arg_places.get(placeholder_index).and_then(|s| to_span(s.clone())); placeholder_index += 1; let position_span = to_span(position_span); @@ -460,10 +460,6 @@ impl FormatArgumentsCollector { } } - pub fn new() -> Self { - Default::default() - } - pub fn add(&mut self, arg: FormatArgument) -> usize { let index = self.arguments.len(); if let Some(name) = arg.kind.ident() { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/generics.rs new file mode 100644 index 0000000000000..a9a0e36312c1a --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/generics.rs @@ -0,0 +1,403 @@ +//! Pre-type IR item generics +use std::{ops, sync::LazyLock}; + +use hir_expand::name::Name; +use la_arena::{Arena, Idx, RawIdx}; +use stdx::impl_from; +use thin_vec::ThinVec; +use triomphe::Arc; + +use crate::{ + AdtId, ConstParamId, GenericDefId, LifetimeParamId, TypeOrConstParamId, TypeParamId, + db::DefDatabase, + expr_store::{ExpressionStore, ExpressionStoreSourceMap}, + type_ref::{ConstRef, LifetimeRefId, TypeBound, TypeRefId}, +}; + +pub type LocalTypeOrConstParamId = Idx; +pub type LocalLifetimeParamId = Idx; + +/// Data about a generic type parameter (to a function, struct, impl, ...). +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct TypeParamData { + /// [`None`] only if the type ref is an [`TypeRef::ImplTrait`]. FIXME: Might be better to just + /// make it always be a value, giving impl trait a special name. + pub name: Option, + pub default: Option, + pub provenance: TypeParamProvenance, +} + +/// Data about a generic lifetime parameter (to a function, struct, impl, ...). +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct LifetimeParamData { + pub name: Name, +} + +/// Data about a generic const parameter (to a function, struct, impl, ...). +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct ConstParamData { + pub name: Name, + pub ty: TypeRefId, + pub default: Option, +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] +pub enum TypeParamProvenance { + TypeParamList, + TraitSelf, + ArgumentImplTrait, +} + +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub enum TypeOrConstParamData { + TypeParamData(TypeParamData), + ConstParamData(ConstParamData), +} + +impl TypeOrConstParamData { + pub fn name(&self) -> Option<&Name> { + match self { + TypeOrConstParamData::TypeParamData(it) => it.name.as_ref(), + TypeOrConstParamData::ConstParamData(it) => Some(&it.name), + } + } + + pub fn has_default(&self) -> bool { + match self { + TypeOrConstParamData::TypeParamData(it) => it.default.is_some(), + TypeOrConstParamData::ConstParamData(it) => it.default.is_some(), + } + } + + pub fn type_param(&self) -> Option<&TypeParamData> { + match self { + TypeOrConstParamData::TypeParamData(it) => Some(it), + TypeOrConstParamData::ConstParamData(_) => None, + } + } + + pub fn const_param(&self) -> Option<&ConstParamData> { + match self { + TypeOrConstParamData::TypeParamData(_) => None, + TypeOrConstParamData::ConstParamData(it) => Some(it), + } + } + + pub fn is_trait_self(&self) -> bool { + match self { + TypeOrConstParamData::TypeParamData(it) => { + it.provenance == TypeParamProvenance::TraitSelf + } + TypeOrConstParamData::ConstParamData(_) => false, + } + } +} + +impl_from!(TypeParamData, ConstParamData for TypeOrConstParamData); + +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub enum GenericParamData { + TypeParamData(TypeParamData), + ConstParamData(ConstParamData), + LifetimeParamData(LifetimeParamData), +} + +impl GenericParamData { + pub fn name(&self) -> Option<&Name> { + match self { + GenericParamData::TypeParamData(it) => it.name.as_ref(), + GenericParamData::ConstParamData(it) => Some(&it.name), + GenericParamData::LifetimeParamData(it) => Some(&it.name), + } + } + + pub fn type_param(&self) -> Option<&TypeParamData> { + match self { + GenericParamData::TypeParamData(it) => Some(it), + _ => None, + } + } + + pub fn const_param(&self) -> Option<&ConstParamData> { + match self { + GenericParamData::ConstParamData(it) => Some(it), + _ => None, + } + } + + pub fn lifetime_param(&self) -> Option<&LifetimeParamData> { + match self { + GenericParamData::LifetimeParamData(it) => Some(it), + _ => None, + } + } +} + +impl_from!(TypeParamData, ConstParamData, LifetimeParamData for GenericParamData); + +#[derive(Debug, Clone, Copy)] +pub enum GenericParamDataRef<'a> { + TypeParamData(&'a TypeParamData), + ConstParamData(&'a ConstParamData), + LifetimeParamData(&'a LifetimeParamData), +} + +/// Data about the generic parameters of a function, struct, impl, etc. +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct GenericParams { + pub(crate) type_or_consts: Arena, + pub(crate) lifetimes: Arena, + pub(crate) where_predicates: Box<[WherePredicate]>, +} + +impl ops::Index for GenericParams { + type Output = TypeOrConstParamData; + fn index(&self, index: LocalTypeOrConstParamId) -> &TypeOrConstParamData { + &self.type_or_consts[index] + } +} + +impl ops::Index for GenericParams { + type Output = LifetimeParamData; + fn index(&self, index: LocalLifetimeParamId) -> &LifetimeParamData { + &self.lifetimes[index] + } +} + +/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined +/// where clauses like `where T: Foo + Bar` are turned into multiple of these. +/// It might still result in multiple actual predicates though, because of +/// associated type bindings like `Iterator`. +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub enum WherePredicate { + TypeBound { target: TypeRefId, bound: TypeBound }, + Lifetime { target: LifetimeRefId, bound: LifetimeRefId }, + ForLifetime { lifetimes: ThinVec, target: TypeRefId, bound: TypeBound }, +} + +static EMPTY: LazyLock> = LazyLock::new(|| { + Arc::new(GenericParams { + type_or_consts: Arena::default(), + lifetimes: Arena::default(), + where_predicates: Box::default(), + }) +}); + +impl GenericParams { + /// The index of the self param in the generic of the non-parent definition. + pub(crate) const SELF_PARAM_ID_IN_SELF: la_arena::Idx = + LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0)); + + pub fn new(db: &dyn DefDatabase, def: GenericDefId) -> Arc { + match def { + GenericDefId::AdtId(AdtId::EnumId(it)) => db.enum_signature(it).generic_params.clone(), + GenericDefId::AdtId(AdtId::StructId(it)) => { + db.struct_signature(it).generic_params.clone() + } + GenericDefId::AdtId(AdtId::UnionId(it)) => { + db.union_signature(it).generic_params.clone() + } + GenericDefId::ConstId(_) => EMPTY.clone(), + GenericDefId::FunctionId(function_id) => { + db.function_signature(function_id).generic_params.clone() + } + GenericDefId::ImplId(impl_id) => db.impl_signature(impl_id).generic_params.clone(), + GenericDefId::StaticId(_) => EMPTY.clone(), + GenericDefId::TraitAliasId(trait_alias_id) => { + db.trait_alias_signature(trait_alias_id).generic_params.clone() + } + GenericDefId::TraitId(trait_id) => db.trait_signature(trait_id).generic_params.clone(), + GenericDefId::TypeAliasId(type_alias_id) => { + db.type_alias_signature(type_alias_id).generic_params.clone() + } + } + } + + pub fn generic_params_and_store( + db: &dyn DefDatabase, + def: GenericDefId, + ) -> (Arc, Arc) { + match def { + GenericDefId::AdtId(AdtId::EnumId(id)) => { + let sig = db.enum_signature(id); + (sig.generic_params.clone(), sig.store.clone()) + } + GenericDefId::AdtId(AdtId::StructId(id)) => { + let sig = db.struct_signature(id); + (sig.generic_params.clone(), sig.store.clone()) + } + GenericDefId::AdtId(AdtId::UnionId(id)) => { + let sig = db.union_signature(id); + (sig.generic_params.clone(), sig.store.clone()) + } + GenericDefId::ConstId(id) => { + let sig = db.const_signature(id); + (EMPTY.clone(), sig.store.clone()) + } + GenericDefId::FunctionId(id) => { + let sig = db.function_signature(id); + (sig.generic_params.clone(), sig.store.clone()) + } + GenericDefId::ImplId(id) => { + let sig = db.impl_signature(id); + (sig.generic_params.clone(), sig.store.clone()) + } + GenericDefId::StaticId(id) => { + let sig = db.static_signature(id); + (EMPTY.clone(), sig.store.clone()) + } + GenericDefId::TraitAliasId(id) => { + let sig = db.trait_alias_signature(id); + (sig.generic_params.clone(), sig.store.clone()) + } + GenericDefId::TraitId(id) => { + let sig = db.trait_signature(id); + (sig.generic_params.clone(), sig.store.clone()) + } + GenericDefId::TypeAliasId(id) => { + let sig = db.type_alias_signature(id); + (sig.generic_params.clone(), sig.store.clone()) + } + } + } + + pub fn generic_params_and_store_and_source_map( + db: &dyn DefDatabase, + def: GenericDefId, + ) -> (Arc, Arc, Arc) { + match def { + GenericDefId::AdtId(AdtId::EnumId(id)) => { + let (sig, sm) = db.enum_signature_with_source_map(id); + (sig.generic_params.clone(), sig.store.clone(), sm) + } + GenericDefId::AdtId(AdtId::StructId(id)) => { + let (sig, sm) = db.struct_signature_with_source_map(id); + (sig.generic_params.clone(), sig.store.clone(), sm) + } + GenericDefId::AdtId(AdtId::UnionId(id)) => { + let (sig, sm) = db.union_signature_with_source_map(id); + (sig.generic_params.clone(), sig.store.clone(), sm) + } + GenericDefId::ConstId(id) => { + let (sig, sm) = db.const_signature_with_source_map(id); + (EMPTY.clone(), sig.store.clone(), sm) + } + GenericDefId::FunctionId(id) => { + let (sig, sm) = db.function_signature_with_source_map(id); + (sig.generic_params.clone(), sig.store.clone(), sm) + } + GenericDefId::ImplId(id) => { + let (sig, sm) = db.impl_signature_with_source_map(id); + (sig.generic_params.clone(), sig.store.clone(), sm) + } + GenericDefId::StaticId(id) => { + let (sig, sm) = db.static_signature_with_source_map(id); + (EMPTY.clone(), sig.store.clone(), sm) + } + GenericDefId::TraitAliasId(id) => { + let (sig, sm) = db.trait_alias_signature_with_source_map(id); + (sig.generic_params.clone(), sig.store.clone(), sm) + } + GenericDefId::TraitId(id) => { + let (sig, sm) = db.trait_signature_with_source_map(id); + (sig.generic_params.clone(), sig.store.clone(), sm) + } + GenericDefId::TypeAliasId(id) => { + let (sig, sm) = db.type_alias_signature_with_source_map(id); + (sig.generic_params.clone(), sig.store.clone(), sm) + } + } + } + + /// Number of Generic parameters (type_or_consts + lifetimes) + #[inline] + pub fn len(&self) -> usize { + self.type_or_consts.len() + self.lifetimes.len() + } + + #[inline] + pub fn len_lifetimes(&self) -> usize { + self.lifetimes.len() + } + + #[inline] + pub fn len_type_or_consts(&self) -> usize { + self.type_or_consts.len() + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + #[inline] + pub fn no_predicates(&self) -> bool { + self.where_predicates.is_empty() + } + + #[inline] + pub fn where_predicates(&self) -> std::slice::Iter<'_, WherePredicate> { + self.where_predicates.iter() + } + + /// Iterator of type_or_consts field + #[inline] + pub fn iter_type_or_consts( + &self, + ) -> impl DoubleEndedIterator { + self.type_or_consts.iter() + } + + /// Iterator of lifetimes field + #[inline] + pub fn iter_lt( + &self, + ) -> impl DoubleEndedIterator { + self.lifetimes.iter() + } + + pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option { + self.type_or_consts.iter().find_map(|(id, p)| { + if p.name().as_ref() == Some(&name) && p.type_param().is_some() { + Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) + } else { + None + } + }) + } + + pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option { + self.type_or_consts.iter().find_map(|(id, p)| { + if p.name().as_ref() == Some(&name) && p.const_param().is_some() { + Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) + } else { + None + } + }) + } + + #[inline] + pub fn trait_self_param(&self) -> Option { + if self.type_or_consts.is_empty() { + return None; + } + matches!( + self.type_or_consts[Self::SELF_PARAM_ID_IN_SELF], + TypeOrConstParamData::TypeParamData(TypeParamData { + provenance: TypeParamProvenance::TraitSelf, + .. + }) + ) + .then(|| Self::SELF_PARAM_ID_IN_SELF) + } + + pub fn find_lifetime_by_name( + &self, + name: &Name, + parent: GenericDefId, + ) -> Option { + self.lifetimes.iter().find_map(|(id, p)| { + if &p.name == name { Some(LifetimeParamId { local_id: id, parent }) } else { None } + }) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs index 6de4026dff75b..eb3b92d31f1c1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs @@ -1,29 +1,21 @@ //! HIR for references to types. Paths in these are not yet resolved. They can //! be directly created from an ast::TypeRef, without further queries. -use core::fmt; -use std::{fmt::Write, ops::Index}; +use std::fmt::Write; -use hir_expand::{ - db::ExpandDatabase, - name::{AsName, Name}, - AstId, InFile, -}; -use intern::{sym, Symbol}; -use la_arena::{Arena, ArenaMap, Idx}; -use span::Edition; -use stdx::thin_vec::{thin_vec_with_header_struct, EmptyOptimizedThinVec, ThinVec}; -use syntax::{ - ast::{self, HasGenericArgs, HasName, IsString}, - AstPtr, -}; +use hir_expand::name::Name; +use intern::Symbol; +use la_arena::Idx; +use thin_vec::ThinVec; use crate::{ + LifetimeParamId, TypeParamId, builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, - hir::Literal, - lower::LowerCtx, - path::{GenericArg, Path}, - SyntheticSyntax, + expr_store::{ + ExpressionStore, + path::{GenericArg, Path}, + }, + hir::{ExprId, Literal}, }; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] @@ -34,11 +26,7 @@ pub enum Mutability { impl Mutability { pub fn from_mutable(mutable: bool) -> Mutability { - if mutable { - Mutability::Mut - } else { - Mutability::Shared - } + if mutable { Mutability::Mut } else { Mutability::Shared } } pub fn as_keyword_for_ref(self) -> &'static str { @@ -80,11 +68,7 @@ pub enum Rawness { impl Rawness { pub fn from_raw(is_raw: bool) -> Rawness { - if is_raw { - Rawness::RawPtr - } else { - Rawness::Ref - } + if is_raw { Rawness::RawPtr } else { Rawness::Ref } } pub fn is_raw(&self) -> bool { @@ -114,40 +98,32 @@ pub struct TraitRef { pub path: PathId, } -impl TraitRef { - /// Converts an `ast::PathType` to a `hir::TraitRef`. - pub(crate) fn from_ast(ctx: &mut LowerCtx<'_>, node: ast::Type) -> Option { - // FIXME: Use `Path::from_src` - match &node { - ast::Type::PathType(path) => path - .path() - .and_then(|it| ctx.lower_path(it)) - .map(|path| TraitRef { path: ctx.alloc_path(path, AstPtr::new(&node)) }), - _ => None, - } - } +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct FnType { + pub params: Box<[(Option, TypeRefId)]>, + pub is_varargs: bool, + pub is_unsafe: bool, + pub abi: Option, } -thin_vec_with_header_struct! { - pub new(pub(crate)) struct FnType, FnTypeHeader { - pub params: [(Option, TypeRefId)], - pub is_varargs: bool, - pub is_unsafe: bool, - pub abi: Option; ref, +impl FnType { + #[inline] + pub fn split_params_and_ret(&self) -> (&[(Option, TypeRefId)], TypeRefId) { + let (ret, params) = self.params.split_last().expect("should have at least return type"); + (params, ret.1) } } #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub struct ArrayType { pub ty: TypeRefId, - // FIXME: This should be Ast pub len: ConstRef, } #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub struct RefType { pub ty: TypeRefId, - pub lifetime: Option, + pub lifetime: Option, pub mutability: Mutability, } @@ -156,17 +132,19 @@ pub struct RefType { pub enum TypeRef { Never, Placeholder, - Tuple(EmptyOptimizedThinVec), + Tuple(ThinVec), Path(Path), RawPtr(TypeRefId, Mutability), + // FIXME: Unbox this once `Idx` has a niche, + // as `RefType` should shrink by 4 bytes then Reference(Box), - Array(Box), + Array(ArrayType), Slice(TypeRefId), /// A fn pointer. Last element of the vector is the return type. - Fn(FnType), + Fn(Box), ImplTrait(ThinVec), DynTrait(ThinVec), - Macro(AstId), + TypeParam(TypeParamId), Error, } @@ -175,97 +153,33 @@ const _: () = assert!(size_of::() == 16); pub type TypeRefId = Idx; -#[derive(Default, Clone, PartialEq, Eq, Debug, Hash)] -pub struct TypesMap { - pub(crate) types: Arena, -} - -impl TypesMap { - pub const EMPTY: &TypesMap = &TypesMap { types: Arena::new() }; - - pub(crate) fn shrink_to_fit(&mut self) { - let TypesMap { types } = self; - types.shrink_to_fit(); - } -} - -impl Index for TypesMap { - type Output = TypeRef; - - #[inline] - fn index(&self, index: TypeRefId) -> &Self::Output { - &self.types[index] - } -} - -impl Index for TypesMap { - type Output = Path; - - #[inline] - fn index(&self, index: PathId) -> &Self::Output { - let TypeRef::Path(path) = &self[index.type_ref()] else { - unreachable!("`PathId` always points to `TypeRef::Path`"); - }; - path - } -} - -pub type TypePtr = AstPtr; -pub type TypeSource = InFile; - -#[derive(Default, Clone, PartialEq, Eq, Debug, Hash)] -pub struct TypesSourceMap { - pub(crate) types_map_back: ArenaMap, -} - -impl TypesSourceMap { - pub const EMPTY: Self = Self { types_map_back: ArenaMap::new() }; - - pub fn type_syntax(&self, id: TypeRefId) -> Result { - self.types_map_back.get(id).cloned().ok_or(SyntheticSyntax) - } - - pub(crate) fn shrink_to_fit(&mut self) { - let TypesSourceMap { types_map_back } = self; - types_map_back.shrink_to_fit(); - } -} +pub type LifetimeRefId = Idx; #[derive(Clone, PartialEq, Eq, Hash, Debug)] -pub struct LifetimeRef { - pub name: Name, -} - -impl LifetimeRef { - pub(crate) fn new_name(name: Name) -> Self { - LifetimeRef { name } - } - - pub(crate) fn new(lifetime: &ast::Lifetime) -> Self { - LifetimeRef { name: Name::new_lifetime(lifetime) } - } - - pub fn missing() -> LifetimeRef { - LifetimeRef { name: Name::missing() } - } +pub enum LifetimeRef { + Named(Name), + Static, + Placeholder, + Param(LifetimeParamId), + Error, } #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum TypeBound { Path(PathId, TraitBoundModifier), - ForLifetime(Box<[Name]>, PathId), - Lifetime(LifetimeRef), - Use(Box<[UseArgRef]>), + ForLifetime(ThinVec, PathId), + Lifetime(LifetimeRefId), + Use(ThinVec), Error, } #[cfg(target_pointer_width = "64")] -const _: [(); 24] = [(); size_of::()]; +const _: [(); 16] = [(); size_of::()]; #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum UseArgRef { Name(Name), - Lifetime(LifetimeRef), + Lifetime(LifetimeRefId), } /// A modifier on a bound, currently this is only used for `?Sized`, where the @@ -277,124 +191,19 @@ pub enum TraitBoundModifier { } impl TypeRef { - /// Converts an `ast::TypeRef` to a `hir::TypeRef`. - pub fn from_ast(ctx: &mut LowerCtx<'_>, node: ast::Type) -> TypeRefId { - let ty = match &node { - ast::Type::ParenType(inner) => return TypeRef::from_ast_opt(ctx, inner.ty()), - ast::Type::TupleType(inner) => TypeRef::Tuple(EmptyOptimizedThinVec::from_iter( - Vec::from_iter(inner.fields().map(|it| TypeRef::from_ast(ctx, it))), - )), - ast::Type::NeverType(..) => TypeRef::Never, - ast::Type::PathType(inner) => { - // FIXME: Use `Path::from_src` - inner - .path() - .and_then(|it| ctx.lower_path(it)) - .map(TypeRef::Path) - .unwrap_or(TypeRef::Error) - } - ast::Type::PtrType(inner) => { - let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty()); - let mutability = Mutability::from_mutable(inner.mut_token().is_some()); - TypeRef::RawPtr(inner_ty, mutability) - } - ast::Type::ArrayType(inner) => { - let len = ConstRef::from_const_arg(ctx, inner.const_arg()); - TypeRef::Array(Box::new(ArrayType { - ty: TypeRef::from_ast_opt(ctx, inner.ty()), - len, - })) - } - ast::Type::SliceType(inner) => TypeRef::Slice(TypeRef::from_ast_opt(ctx, inner.ty())), - ast::Type::RefType(inner) => { - let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty()); - let lifetime = inner.lifetime().map(|lt| LifetimeRef::new(<)); - let mutability = Mutability::from_mutable(inner.mut_token().is_some()); - TypeRef::Reference(Box::new(RefType { ty: inner_ty, lifetime, mutability })) - } - ast::Type::InferType(_inner) => TypeRef::Placeholder, - ast::Type::FnPtrType(inner) => { - let ret_ty = inner - .ret_type() - .and_then(|rt| rt.ty()) - .map(|it| TypeRef::from_ast(ctx, it)) - .unwrap_or_else(|| ctx.alloc_type_ref_desugared(TypeRef::unit())); - let mut is_varargs = false; - let mut params = if let Some(pl) = inner.param_list() { - if let Some(param) = pl.params().last() { - is_varargs = param.dotdotdot_token().is_some(); - } - - pl.params() - .map(|it| { - let type_ref = TypeRef::from_ast_opt(ctx, it.ty()); - let name = match it.pat() { - Some(ast::Pat::IdentPat(it)) => Some( - it.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing), - ), - _ => None, - }; - (name, type_ref) - }) - .collect() - } else { - Vec::with_capacity(1) - }; - fn lower_abi(abi: ast::Abi) -> Symbol { - match abi.abi_string() { - Some(tok) => Symbol::intern(tok.text_without_quotes()), - // `extern` default to be `extern "C"`. - _ => sym::C.clone(), - } - } - - let abi = inner.abi().map(lower_abi); - params.push((None, ret_ty)); - TypeRef::Fn(FnType::new(is_varargs, inner.unsafe_token().is_some(), abi, params)) - } - // for types are close enough for our purposes to the inner type for now... - ast::Type::ForType(inner) => return TypeRef::from_ast_opt(ctx, inner.ty()), - ast::Type::ImplTraitType(inner) => { - if ctx.outer_impl_trait() { - // Disallow nested impl traits - TypeRef::Error - } else { - ctx.with_outer_impl_trait_scope(true, |ctx| { - TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) - }) - } - } - ast::Type::DynTraitType(inner) => { - TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) - } - ast::Type::MacroType(mt) => match mt.macro_call() { - Some(mc) => TypeRef::Macro(ctx.ast_id(&mc)), - None => TypeRef::Error, - }, - }; - ctx.alloc_type_ref(ty, AstPtr::new(&node)) - } - - pub(crate) fn from_ast_opt(ctx: &mut LowerCtx<'_>, node: Option) -> TypeRefId { - match node { - Some(node) => TypeRef::from_ast(ctx, node), - None => ctx.alloc_error_type(), - } - } - pub(crate) fn unit() -> TypeRef { - TypeRef::Tuple(EmptyOptimizedThinVec::empty()) + TypeRef::Tuple(ThinVec::new()) } - pub fn walk(this: TypeRefId, map: &TypesMap, f: &mut impl FnMut(&TypeRef)) { + pub fn walk(this: TypeRefId, map: &ExpressionStore, f: &mut impl FnMut(&TypeRef)) { go(this, f, map); - fn go(type_ref: TypeRefId, f: &mut impl FnMut(&TypeRef), map: &TypesMap) { + fn go(type_ref: TypeRefId, f: &mut impl FnMut(&TypeRef), map: &ExpressionStore) { let type_ref = &map[type_ref]; f(type_ref); match type_ref { TypeRef::Fn(fn_) => { - fn_.params().iter().for_each(|&(_, param_type)| go(param_type, f, map)) + fn_.params.iter().for_each(|&(_, param_type)| go(param_type, f, map)) } TypeRef::Tuple(types) => types.iter().for_each(|&t| go(t, f, map)), TypeRef::RawPtr(type_ref, _) | TypeRef::Slice(type_ref) => go(*type_ref, f, map), @@ -411,11 +220,11 @@ impl TypeRef { } } TypeRef::Path(path) => go_path(path, f, map), - TypeRef::Never | TypeRef::Placeholder | TypeRef::Macro(_) | TypeRef::Error => {} + TypeRef::Never | TypeRef::Placeholder | TypeRef::Error | TypeRef::TypeParam(_) => {} }; } - fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef), map: &TypesMap) { + fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef), map: &ExpressionStore) { if let Some(type_ref) = path.type_anchor() { go(type_ref, f, map); } @@ -448,71 +257,8 @@ impl TypeRef { } } -pub(crate) fn type_bounds_from_ast( - lower_ctx: &mut LowerCtx<'_>, - type_bounds_opt: Option, -) -> ThinVec { - if let Some(type_bounds) = type_bounds_opt { - ThinVec::from_iter(Vec::from_iter( - type_bounds.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)), - )) - } else { - ThinVec::from_iter([]) - } -} - impl TypeBound { - pub(crate) fn from_ast(ctx: &mut LowerCtx<'_>, node: ast::TypeBound) -> Self { - let mut lower_path_type = |path_type: &ast::PathType| ctx.lower_path(path_type.path()?); - - match node.kind() { - ast::TypeBoundKind::PathType(path_type) => { - let m = match node.question_mark_token() { - Some(_) => TraitBoundModifier::Maybe, - None => TraitBoundModifier::None, - }; - lower_path_type(&path_type) - .map(|p| { - TypeBound::Path(ctx.alloc_path(p, AstPtr::new(&path_type).upcast()), m) - }) - .unwrap_or(TypeBound::Error) - } - ast::TypeBoundKind::ForType(for_type) => { - let lt_refs = match for_type.generic_param_list() { - Some(gpl) => gpl - .lifetime_params() - .flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(<))) - .collect(), - None => Box::default(), - }; - let path = for_type.ty().and_then(|ty| match &ty { - ast::Type::PathType(path_type) => lower_path_type(path_type).map(|p| (p, ty)), - _ => None, - }); - match path { - Some((p, ty)) => { - TypeBound::ForLifetime(lt_refs, ctx.alloc_path(p, AstPtr::new(&ty))) - } - None => TypeBound::Error, - } - } - ast::TypeBoundKind::Use(gal) => TypeBound::Use( - gal.use_bound_generic_args() - .map(|p| match p { - ast::UseBoundGenericArg::Lifetime(l) => { - UseArgRef::Lifetime(LifetimeRef::new(&l)) - } - ast::UseBoundGenericArg::NameRef(n) => UseArgRef::Name(n.as_name()), - }) - .collect(), - ), - ast::TypeBoundKind::Lifetime(lifetime) => { - TypeBound::Lifetime(LifetimeRef::new(&lifetime)) - } - } - } - - pub fn as_path<'a>(&self, map: &'a TypesMap) -> Option<(&'a Path, TraitBoundModifier)> { + pub fn as_path<'a>(&self, map: &'a ExpressionStore) -> Option<(&'a Path, TraitBoundModifier)> { match self { &TypeBound::Path(p, m) => Some((&map[p], m)), &TypeBound::ForLifetime(_, p) => Some((&map[p], TraitBoundModifier::None)), @@ -521,90 +267,9 @@ impl TypeBound { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum ConstRef { - Scalar(Box), - Path(Name), - Complex(AstId), -} - -impl ConstRef { - pub(crate) fn from_const_arg(lower_ctx: &LowerCtx<'_>, arg: Option) -> Self { - if let Some(arg) = arg { - if let Some(expr) = arg.expr() { - return Self::from_expr(expr, Some(lower_ctx.ast_id(&arg))); - } - } - Self::Scalar(Box::new(LiteralConstRef::Unknown)) - } - - pub(crate) fn from_const_param( - lower_ctx: &LowerCtx<'_>, - param: &ast::ConstParam, - ) -> Option { - param.default_val().map(|default| Self::from_const_arg(lower_ctx, Some(default))) - } - - pub fn display<'a>( - &'a self, - db: &'a dyn ExpandDatabase, - edition: Edition, - ) -> impl fmt::Display + 'a { - struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef, Edition); - impl fmt::Display for Display<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.1 { - ConstRef::Scalar(s) => s.fmt(f), - ConstRef::Path(n) => n.display(self.0, self.2).fmt(f), - ConstRef::Complex(_) => f.write_str("{const}"), - } - } - } - Display(db, self, edition) - } - - // We special case literals and single identifiers, to speed up things. - fn from_expr(expr: ast::Expr, ast_id: Option>) -> Self { - fn is_path_ident(p: &ast::PathExpr) -> bool { - let Some(path) = p.path() else { - return false; - }; - if path.coloncolon_token().is_some() { - return false; - } - if let Some(s) = path.segment() { - if s.coloncolon_token().is_some() || s.generic_arg_list().is_some() { - return false; - } - } - true - } - match expr { - ast::Expr::PathExpr(p) if is_path_ident(&p) => { - match p.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) { - Some(it) => Self::Path(it.as_name()), - None => Self::Scalar(Box::new(LiteralConstRef::Unknown)), - } - } - ast::Expr::Literal(literal) => Self::Scalar(Box::new(match literal.kind() { - ast::LiteralKind::IntNumber(num) => { - num.value().map(LiteralConstRef::UInt).unwrap_or(LiteralConstRef::Unknown) - } - ast::LiteralKind::Char(c) => { - c.value().map(LiteralConstRef::Char).unwrap_or(LiteralConstRef::Unknown) - } - ast::LiteralKind::Bool(f) => LiteralConstRef::Bool(f), - _ => LiteralConstRef::Unknown, - })), - _ => { - if let Some(ast_id) = ast_id { - Self::Complex(ast_id) - } else { - Self::Scalar(Box::new(LiteralConstRef::Unknown)) - } - } - } - } +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ConstRef { + pub expr: ExprId, } /// A literal constant value diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs index d43776b8a66ad..db571f045d740 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs @@ -2,22 +2,22 @@ use std::fmt; -use base_db::CrateId; -use fst::{raw::IndexedValue, Automaton, Streamer}; +use base_db::Crate; +use fst::{Automaton, Streamer, raw::IndexedValue}; use hir_expand::name::Name; use itertools::Itertools; use rustc_hash::FxHashSet; use smallvec::SmallVec; use span::Edition; -use stdx::{format_to, TupleExt}; +use stdx::format_to; use triomphe::Arc; use crate::{ + AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId, db::DefDatabase, item_scope::{ImportOrExternCrate, ItemInNs}, nameres::DefMap, visibility::Visibility, - AssocItemId, FxIndexMap, ModuleDefId, ModuleId, TraitId, }; /// Item import details stored in the `ImportMap`. @@ -31,6 +31,8 @@ pub struct ImportInfo { pub is_doc_hidden: bool, /// Whether this item is annotated with `#[unstable(..)]`. pub is_unstable: bool, + /// The value of `#[rust_analyzer::completions(...)]`, if exists. + pub complete: Complete, } /// A map from publicly exported items to its name. @@ -66,19 +68,14 @@ impl ImportMap { for (k, v) in self.item_to_info_map.iter() { format_to!(out, "{:?} ({:?}) -> ", k, v.1); for v in &v.0 { - format_to!( - out, - "{}:{:?}, ", - v.name.display(db.upcast(), Edition::CURRENT), - v.container - ); + format_to!(out, "{}:{:?}, ", v.name.display(db, Edition::CURRENT), v.container); } format_to!(out, "\n"); } out } - pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { + pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: Crate) -> Arc { let _p = tracing::info_span!("import_map_query").entered(); let map = Self::collect_import_map(db, krate); @@ -129,7 +126,7 @@ impl ImportMap { self.item_to_info_map.get(&item).map(|(info, _)| &**info) } - fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex { + fn collect_import_map(db: &dyn DefDatabase, krate: Crate) -> ImportMapIndex { let _p = tracing::info_span!("collect_import_map").entered(); let def_map = db.crate_def_map(krate); @@ -155,11 +152,7 @@ impl ImportMap { let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| { let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public); - if per_ns.is_none() { - None - } else { - Some((name, per_ns)) - } + if per_ns.is_none() { None } else { Some((name, per_ns)) } }); for (name, per_ns) in visible_items { @@ -176,16 +169,22 @@ impl ImportMap { ItemInNs::Macros(id) => Some(id.into()), } }; - let (is_doc_hidden, is_unstable) = attr_id.map_or((false, false), |attr_id| { - let attrs = db.attrs(attr_id); - (attrs.has_doc_hidden(), attrs.is_unstable()) - }); + let (is_doc_hidden, is_unstable, do_not_complete) = match attr_id { + None => (false, false, Complete::Yes), + Some(attr_id) => { + let attrs = db.attrs(attr_id); + let do_not_complete = + Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), &attrs); + (attrs.has_doc_hidden(), attrs.is_unstable(), do_not_complete) + } + }; let import_info = ImportInfo { name: name.clone(), container: module, is_doc_hidden, is_unstable, + complete: do_not_complete, }; if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() { @@ -222,7 +221,7 @@ impl ImportMap { trait_import_info: &ImportInfo, ) { let _p = tracing::info_span!("collect_trait_assoc_items").entered(); - for &(ref assoc_item_name, item) in &db.trait_data(tr).items { + for &(ref assoc_item_name, item) in &db.trait_items(tr).items { let module_def_id = match item { AssocItemId::FunctionId(f) => ModuleDefId::from(f), AssocItemId::ConstId(c) => ModuleDefId::from(c), @@ -239,12 +238,17 @@ impl ImportMap { ItemInNs::Values(module_def_id) }; - let attrs = &db.attrs(item.into()); + let attr_id = item.into(); + let attrs = &db.attrs(attr_id); + let item_do_not_complete = Complete::extract(false, attrs); + let do_not_complete = + Complete::for_trait_item(trait_import_info.complete, item_do_not_complete); let assoc_item_info = ImportInfo { container: trait_import_info.container, name: assoc_item_name.clone(), is_doc_hidden: attrs.has_doc_hidden(), is_unstable: attrs.is_unstable(), + complete: do_not_complete, }; let (infos, _) = @@ -400,15 +404,13 @@ impl Query { /// This returns a list of items that could be imported from dependencies of `krate`. pub fn search_dependencies( db: &dyn DefDatabase, - krate: CrateId, + krate: Crate, query: &Query, -) -> FxHashSet { +) -> FxHashSet<(ItemInNs, Complete)> { let _p = tracing::info_span!("search_dependencies", ?query).entered(); - let graph = db.crate_graph(); - let import_maps: Vec<_> = - graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect(); + krate.data(db).dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect(); let mut op = fst::map::OpBuilder::new(); @@ -445,7 +447,7 @@ fn search_maps( import_maps: &[Arc], mut stream: fst::map::Union<'_>, query: &Query, -) -> FxHashSet { +) -> FxHashSet<(ItemInNs, Complete)> { let mut res = FxHashSet::default(); while let Some((_, indexed_values)) = stream.next() { for &IndexedValue { index: import_map_idx, value } in indexed_values { @@ -465,8 +467,9 @@ fn search_maps( }) .filter(|&(_, info)| { query.search_mode.check(&query.query, query.case_sensitive, info.name.as_str()) - }); - res.extend(iter.map(TupleExt::head)); + }) + .map(|(item, import_info)| (item, import_info.complete)); + res.extend(iter); } } @@ -475,11 +478,11 @@ fn search_maps( #[cfg(test)] mod tests { - use base_db::{SourceDatabase, Upcast}; - use expect_test::{expect, Expect}; + use base_db::RootQueryDb; + use expect_test::{Expect, expect}; use test_fixture::WithFixture; - use crate::{test_db::TestDB, ItemContainerId, Lookup}; + use crate::{ItemContainerId, Lookup, test_db::TestDB}; use super::*; @@ -512,21 +515,23 @@ mod tests { expect: Expect, ) { let db = TestDB::with_files(ra_fixture); - let crate_graph = db.crate_graph(); - let krate = crate_graph + let all_crates = db.all_crates(); + let krate = all_crates .iter() + .copied() .find(|&krate| { - crate_graph[krate] + krate + .extra_data(&db) .display_name .as_ref() .is_some_and(|it| it.crate_name().as_str() == crate_name) }) .expect("could not find crate"); - let actual = search_dependencies(db.upcast(), krate, &query) + let actual = search_dependencies(&db, krate, &query) .into_iter() - .filter_map(|dependency| { - let dependency_krate = dependency.krate(db.upcast())?; + .filter_map(|(dependency, _)| { + let dependency_krate = dependency.krate(&db)?; let dependency_imports = db.import_map(dependency_krate); let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) { @@ -545,7 +550,7 @@ mod tests { Some(format!( "{}::{} ({})\n", - crate_graph[dependency_krate].display_name.as_ref()?, + dependency_krate.extra_data(&db).display_name.as_ref()?, path, mark )) @@ -575,8 +580,8 @@ mod tests { let trait_info = dependency_imports.import_info_for(ItemInNs::Types(trait_id.into()))?; - let trait_data = db.trait_data(trait_id); - let (assoc_item_name, _) = trait_data + let trait_items = db.trait_items(trait_id); + let (assoc_item_name, _) = trait_items .items .iter() .find(|(_, assoc_item_id)| &dependency_assoc_item_id == assoc_item_id)?; @@ -584,23 +589,24 @@ mod tests { Some(format!( "{}::{}", render_path(db, &trait_info[0]), - assoc_item_name.display(db.upcast(), Edition::CURRENT) + assoc_item_name.display(db, Edition::CURRENT) )) } fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let db = TestDB::with_files(ra_fixture); - let crate_graph = db.crate_graph(); + let all_crates = db.all_crates(); - let actual = crate_graph + let actual = all_crates .iter() + .copied() .filter_map(|krate| { - let cdata = &crate_graph[krate]; + let cdata = &krate.extra_data(&db); let name = cdata.display_name.as_ref()?; let map = db.import_map(krate); - Some(format!("{name}:\n{}\n", map.fmt_for_test(db.upcast()))) + Some(format!("{name}:\n{}\n", map.fmt_for_test(&db))) }) .sorted() .collect::(); @@ -623,7 +629,7 @@ mod tests { module = parent; } - segments.iter().rev().map(|it| it.display(db.upcast(), Edition::CURRENT)).join("::") + segments.iter().rev().map(|it| it.display(db, Edition::CURRENT)).join("::") } #[test] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index 0ca1eb9bcfe37..5362c0588dbe9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -3,23 +3,23 @@ use std::sync::LazyLock; -use base_db::CrateId; -use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCallId}; +use base_db::Crate; +use hir_expand::{AstId, MacroCallId, attrs::AttrId, db::ExpandDatabase, name::Name}; use indexmap::map::Entry; use itertools::Itertools; use la_arena::Idx; use rustc_hash::{FxHashMap, FxHashSet}; -use smallvec::{smallvec, SmallVec}; +use smallvec::{SmallVec, smallvec}; use span::Edition; use stdx::format_to; use syntax::ast; use crate::{ + AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId, + LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId, db::DefDatabase, per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem}, visibility::{Visibility, VisibilityExplicitness}, - AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId, - LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId, }; #[derive(Debug, Default)] @@ -167,7 +167,7 @@ pub struct ItemScope { // the resolutions of the imports of this scope use_imports_types: FxHashMap, use_imports_values: FxHashMap, - use_imports_macros: FxHashMap, + use_imports_macros: FxHashMap, use_decls: Vec, extern_crate_decls: Vec, @@ -242,7 +242,7 @@ impl ItemScope { self.types.iter().map(|(n, &i)| (n, i)) } - pub fn macros(&self) -> impl Iterator)> + '_ { + pub fn macros(&self) -> impl Iterator)> + '_ { self.macros.iter().map(|(n, &i)| (n, i)) } @@ -250,9 +250,9 @@ impl ItemScope { self.use_imports_types .keys() .copied() + .chain(self.use_imports_macros.keys().copied()) .filter_map(ImportOrExternCrate::import_or_glob) .chain(self.use_imports_values.keys().copied()) - .chain(self.use_imports_macros.keys().copied()) .filter_map(ImportOrGlob::into_import) .sorted() .dedup() @@ -263,7 +263,7 @@ impl ItemScope { let mut def_map; let mut scope = self; - while let Some(&m) = scope.use_imports_macros.get(&ImportOrGlob::Import(import)) { + while let Some(&m) = scope.use_imports_macros.get(&ImportOrExternCrate::Import(import)) { match m { ImportOrDef::Import(i) => { let module_id = i.use_.lookup(db).container; @@ -358,7 +358,7 @@ impl ItemScope { } /// Get a name from current module scope, legacy macros are not included - pub(crate) fn get(&self, name: &Name) -> PerNs { + pub fn get(&self, name: &Name) -> PerNs { PerNs { types: self.types.get(name).copied(), values: self.values.get(name).copied(), @@ -453,7 +453,7 @@ impl ItemScope { ) } - pub(crate) fn macro_invoc(&self, call: AstId) -> Option { + pub fn macro_invoc(&self, call: AstId) -> Option { self.macro_invocations.get(&call).copied() } @@ -682,7 +682,6 @@ impl ItemScope { } _ => _ = glob_imports.macros.remove(&lookup), } - let import = import.and_then(ImportOrExternCrate::import_or_glob); let prev = std::mem::replace(&mut fld.import, import); if let Some(import) = import { self.use_imports_macros.insert( @@ -698,7 +697,6 @@ impl ItemScope { { if glob_imports.macros.remove(&lookup) { cov_mark::hit!(import_shadowed); - let import = import.and_then(ImportOrExternCrate::import_or_glob); let prev = std::mem::replace(&mut fld.import, import); if let Some(import) = import { self.use_imports_macros.insert( @@ -783,8 +781,9 @@ impl ItemScope { if let Some(Item { import, .. }) = def.macros { buf.push_str(" m"); match import { - Some(ImportOrGlob::Import(_)) => buf.push('i'), - Some(ImportOrGlob::Glob(_)) => buf.push('g'), + Some(ImportOrExternCrate::Import(_)) => buf.push('i'), + Some(ImportOrExternCrate::Glob(_)) => buf.push('g'), + Some(ImportOrExternCrate::ExternCrate(_)) => buf.push('e'), None => (), } } @@ -893,9 +892,7 @@ impl PerNs { ModuleDefId::TraitAliasId(_) => PerNs::types(def, v, import), ModuleDefId::TypeAliasId(_) => PerNs::types(def, v, import), ModuleDefId::BuiltinType(_) => PerNs::types(def, v, import), - ModuleDefId::MacroId(mac) => { - PerNs::macros(mac, v, import.and_then(ImportOrExternCrate::import_or_glob)) - } + ModuleDefId::MacroId(mac) => PerNs::macros(mac, v, import), } } } @@ -916,7 +913,7 @@ impl ItemInNs { } /// Returns the crate defining this item (or `None` if `self` is built-in). - pub fn krate(&self, db: &dyn DefDatabase) -> Option { + pub fn krate(&self, db: &dyn DefDatabase) -> Option { match self { ItemInNs::Types(id) | ItemInNs::Values(id) => id.module(db).map(|m| m.krate), ItemInNs::Macros(id) => Some(id.module(db).krate), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index 382afbcb1dd4f..1b97eb72b6f20 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -44,27 +44,23 @@ use std::{ }; use ast::{AstNode, StructKind}; -use base_db::CrateId; -use either::Either; -use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile}; +use base_db::Crate; +use hir_expand::{ + ExpandTo, HirFileId, InFile, + attrs::RawAttrs, + mod_path::{ModPath, PathKind}, + name::Name, +}; use intern::{Interned, Symbol}; use la_arena::{Arena, Idx, RawIdx}; use rustc_hash::FxHashMap; use smallvec::SmallVec; -use span::{AstIdNode, Edition, FileAstId, SyntaxContextId}; +use span::{AstIdNode, Edition, FileAstId, SyntaxContext}; use stdx::never; -use syntax::{ast, match_ast, SyntaxKind}; +use syntax::{SyntaxKind, ast, match_ast}; use triomphe::Arc; -use crate::{ - attr::Attrs, - db::DefDatabase, - generics::GenericParams, - path::{GenericArgs, ImportAlias, ModPath, Path, PathKind}, - type_ref::{Mutability, TraitRef, TypeBound, TypeRefId, TypesMap, TypesSourceMap}, - visibility::{RawVisibility, VisibilityExplicitness}, - BlockId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, -}; +use crate::{BlockId, Lookup, attr::Attrs, db::DefDatabase}; #[derive(Copy, Clone, Eq, PartialEq)] pub struct RawVisibilityId(u32); @@ -100,23 +96,16 @@ pub struct ItemTree { impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { - db.file_item_tree_with_source_map(file_id).0 - } - - pub(crate) fn file_item_tree_with_source_map_query( - db: &dyn DefDatabase, - file_id: HirFileId, - ) -> (Arc, Arc) { let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered(); - static EMPTY: OnceLock<(Arc, Arc)> = OnceLock::new(); + static EMPTY: OnceLock> = OnceLock::new(); let ctx = lower::Ctx::new(db, file_id); let syntax = db.parse_or_expand(file_id); let mut top_attrs = None; - let (mut item_tree, source_maps) = match_ast! { + let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map())); + top_attrs = Some(RawAttrs::new(db, &file, ctx.span_map())); ctx.lower_module_items(&file) }, ast::MacroItems(items) => { @@ -143,55 +132,42 @@ impl ItemTree { { EMPTY .get_or_init(|| { - ( - Arc::new(ItemTree { - top_level: SmallVec::new_const(), - attrs: FxHashMap::default(), - data: None, - }), - Arc::default(), - ) + Arc::new(ItemTree { + top_level: SmallVec::new_const(), + attrs: FxHashMap::default(), + data: None, + }) }) .clone() } else { item_tree.shrink_to_fit(); - (Arc::new(item_tree), Arc::new(source_maps)) + Arc::new(item_tree) } } pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc { - db.block_item_tree_with_source_map(block).0 - } - - pub(crate) fn block_item_tree_with_source_map_query( - db: &dyn DefDatabase, - block: BlockId, - ) -> (Arc, Arc) { let _p = tracing::info_span!("block_item_tree_query", ?block).entered(); - static EMPTY: OnceLock<(Arc, Arc)> = OnceLock::new(); + static EMPTY: OnceLock> = OnceLock::new(); let loc = block.lookup(db); - let block = loc.ast_id.to_node(db.upcast()); + let block = loc.ast_id.to_node(db); let ctx = lower::Ctx::new(db, loc.ast_id.file_id); - let (mut item_tree, source_maps) = ctx.lower_block(&block); + let mut item_tree = ctx.lower_block(&block); if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty() { EMPTY .get_or_init(|| { - ( - Arc::new(ItemTree { - top_level: SmallVec::new_const(), - attrs: FxHashMap::default(), - data: None, - }), - Arc::default(), - ) + Arc::new(ItemTree { + top_level: SmallVec::new_const(), + attrs: FxHashMap::default(), + data: None, + }) }) .clone() } else { item_tree.shrink_to_fit(); - (Arc::new(item_tree), Arc::new(source_maps)) + Arc::new(item_tree) } } @@ -202,8 +178,8 @@ impl ItemTree { } /// Returns the inner attributes of the source file. - pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: CrateId) -> Attrs { - Attrs::filter( + pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs { + Attrs::expand_cfg_attr( db, krate, self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone(), @@ -214,8 +190,24 @@ impl ItemTree { self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY) } - pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: CrateId, of: AttrOwner) -> Attrs { - Attrs::filter(db, krate, self.raw_attrs(of).clone()) + pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: Crate, of: AttrOwner) -> Attrs { + Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone()) + } + + /// Returns a count of a few, expensive items. + /// + /// For more detail, see [`ItemTreeDataStats`]. + pub fn item_tree_stats(&self) -> ItemTreeDataStats { + match self.data { + Some(ref data) => ItemTreeDataStats { + traits: data.traits.len(), + impls: data.impls.len(), + mods: data.mods.len(), + macro_calls: data.macro_calls.len(), + macro_rules: data.macro_rules.len(), + }, + None => ItemTreeDataStats::default(), + } } pub fn pretty_print(&self, db: &dyn DefDatabase, edition: Edition) -> String { @@ -231,7 +223,10 @@ impl ItemTree { } fn shrink_to_fit(&mut self) { - if let Some(data) = &mut self.data { + let ItemTree { top_level, attrs, data } = self; + top_level.shrink_to_fit(); + attrs.shrink_to_fit(); + if let Some(data) = data { let ItemTreeData { uses, extern_crates, @@ -329,157 +324,12 @@ struct ItemTreeData { } #[derive(Default, Debug, Eq, PartialEq)] -pub struct ItemTreeSourceMaps { - all_concatenated: Box<[TypesSourceMap]>, - structs_offset: u32, - unions_offset: u32, - enum_generics_offset: u32, - variants_offset: u32, - consts_offset: u32, - statics_offset: u32, - trait_generics_offset: u32, - trait_alias_generics_offset: u32, - impls_offset: u32, - type_aliases_offset: u32, -} - -#[derive(Clone, Copy)] -pub struct GenericItemSourceMap<'a>(&'a [TypesSourceMap; 2]); - -impl<'a> GenericItemSourceMap<'a> { - #[inline] - pub fn item(self) -> &'a TypesSourceMap { - &self.0[0] - } - - #[inline] - pub fn generics(self) -> &'a TypesSourceMap { - &self.0[1] - } -} - -#[derive(Default, Debug, Eq, PartialEq)] -pub struct GenericItemSourceMapBuilder { - pub item: TypesSourceMap, - pub generics: TypesSourceMap, -} - -#[derive(Default, Debug, Eq, PartialEq)] -struct ItemTreeSourceMapsBuilder { - functions: Vec, - structs: Vec, - unions: Vec, - enum_generics: Vec, - variants: Vec, - consts: Vec, - statics: Vec, - trait_generics: Vec, - trait_alias_generics: Vec, - impls: Vec, - type_aliases: Vec, -} - -impl ItemTreeSourceMapsBuilder { - fn build(self) -> ItemTreeSourceMaps { - let ItemTreeSourceMapsBuilder { - functions, - structs, - unions, - enum_generics, - variants, - consts, - statics, - trait_generics, - trait_alias_generics, - impls, - type_aliases, - } = self; - let structs_offset = functions.len() as u32 * 2; - let unions_offset = structs_offset + (structs.len() as u32 * 2); - let enum_generics_offset = unions_offset + (unions.len() as u32 * 2); - let variants_offset = enum_generics_offset + (enum_generics.len() as u32); - let consts_offset = variants_offset + (variants.len() as u32); - let statics_offset = consts_offset + (consts.len() as u32); - let trait_generics_offset = statics_offset + (statics.len() as u32); - let trait_alias_generics_offset = trait_generics_offset + (trait_generics.len() as u32); - let impls_offset = trait_alias_generics_offset + (trait_alias_generics.len() as u32); - let type_aliases_offset = impls_offset + (impls.len() as u32 * 2); - let all_concatenated = generics_concat(functions) - .chain(generics_concat(structs)) - .chain(generics_concat(unions)) - .chain(enum_generics) - .chain(variants) - .chain(consts) - .chain(statics) - .chain(trait_generics) - .chain(trait_alias_generics) - .chain(generics_concat(impls)) - .chain(generics_concat(type_aliases)) - .collect(); - return ItemTreeSourceMaps { - all_concatenated, - structs_offset, - unions_offset, - enum_generics_offset, - variants_offset, - consts_offset, - statics_offset, - trait_generics_offset, - trait_alias_generics_offset, - impls_offset, - type_aliases_offset, - }; - - fn generics_concat( - source_maps: Vec, - ) -> impl Iterator { - source_maps.into_iter().flat_map(|it| [it.item, it.generics]) - } - } -} - -impl ItemTreeSourceMaps { - #[inline] - fn generic_item(&self, offset: u32, index: u32) -> GenericItemSourceMap<'_> { - GenericItemSourceMap( - self.all_concatenated[(offset + (index * 2)) as usize..][..2].try_into().unwrap(), - ) - } - - #[inline] - fn non_generic_item(&self, offset: u32, index: u32) -> &TypesSourceMap { - &self.all_concatenated[(offset + index) as usize] - } - - #[inline] - pub fn function(&self, index: FileItemTreeId) -> GenericItemSourceMap<'_> { - self.generic_item(0, index.0.into_raw().into_u32()) - } -} - -macro_rules! index_item_source_maps { - ( $( $name:ident; $field:ident[$tree_id:ident]; $fn:ident; $ret:ty, )* ) => { - impl ItemTreeSourceMaps { - $( - #[inline] - pub fn $name(&self, index: FileItemTreeId<$tree_id>) -> $ret { - self.$fn(self.$field, index.0.into_raw().into_u32()) - } - )* - } - }; -} -index_item_source_maps! { - strukt; structs_offset[Struct]; generic_item; GenericItemSourceMap<'_>, - union; unions_offset[Union]; generic_item; GenericItemSourceMap<'_>, - enum_generic; enum_generics_offset[Enum]; non_generic_item; &TypesSourceMap, - variant; variants_offset[Variant]; non_generic_item; &TypesSourceMap, - konst; consts_offset[Const]; non_generic_item; &TypesSourceMap, - statik; statics_offset[Static]; non_generic_item; &TypesSourceMap, - trait_generic; trait_generics_offset[Trait]; non_generic_item; &TypesSourceMap, - trait_alias_generic; trait_alias_generics_offset[TraitAlias]; non_generic_item; &TypesSourceMap, - impl_; impls_offset[Impl]; generic_item; GenericItemSourceMap<'_>, - type_alias; type_aliases_offset[TypeAlias]; generic_item; GenericItemSourceMap<'_>, +pub struct ItemTreeDataStats { + pub traits: usize, + pub impls: usize, + pub mods: usize, + pub macro_calls: usize, + pub macro_rules: usize, } #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] @@ -490,10 +340,8 @@ pub enum AttrOwner { TopLevel, Variant(FileItemTreeId), + // while not relevant to early name resolution, fields can contain visibility Field(FieldParent, ItemTreeFieldId), - Param(FileItemTreeId, ItemTreeParamId), - TypeOrConstParamData(GenericModItem, LocalTypeOrConstParamId), - LifetimeParamData(GenericModItem, LocalLifetimeParamId), } impl AttrOwner { @@ -506,10 +354,9 @@ impl AttrOwner { pub enum FieldParent { Struct(FileItemTreeId), Union(FileItemTreeId), - Variant(FileItemTreeId), + EnumVariant(FileItemTreeId), } -pub type ItemTreeParamId = Idx; pub type ItemTreeFieldId = Idx; macro_rules! from_attrs { @@ -536,9 +383,6 @@ pub trait ItemTreeNode: Clone { fn lookup(tree: &ItemTree, index: Idx) -> &Self; fn attr_owner(id: FileItemTreeId) -> AttrOwner; } -pub trait GenericsItemTreeNode: ItemTreeNode { - fn generic_params(&self) -> &Arc; -} pub struct FileItemTreeId(Idx); @@ -591,7 +435,7 @@ pub struct TreeId { } impl TreeId { - pub(crate) fn new(file: HirFileId, block: Option) -> Self { + pub fn new(file: HirFileId, block: Option) -> Self { Self { file, block } } @@ -602,16 +446,6 @@ impl TreeId { } } - pub fn item_tree_with_source_map( - &self, - db: &dyn DefDatabase, - ) -> (Arc, Arc) { - match self.block { - Some(block) => db.block_item_tree_with_source_map(block), - None => db.file_item_tree_with_source_map(self.file), - } - } - pub fn file_id(self) -> HirFileId { self.file } @@ -644,13 +478,6 @@ impl ItemTreeId { self.tree.item_tree(db) } - pub fn item_tree_with_source_map( - self, - db: &dyn DefDatabase, - ) -> (Arc, Arc) { - self.tree.item_tree_with_source_map(db) - } - pub fn resolved(self, db: &dyn DefDatabase, cb: impl FnOnce(&N) -> R) -> R where ItemTree: Index, Output = N>, @@ -682,7 +509,7 @@ impl Hash for ItemTreeId { } macro_rules! mod_items { - ( $( $typ:ident $(<$generic_params:ident>)? in $fld:ident -> $ast:ty ),+ $(,)? ) => { + ( $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => { #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] pub enum ModItem { $( @@ -690,16 +517,6 @@ macro_rules! mod_items { )+ } - #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] - pub enum GenericModItem { - $( - $( - #[cfg_attr(ignore_fragment, $generic_params)] - $typ(FileItemTreeId<$typ>), - )? - )+ - } - impl ModItem { pub fn ast_id(&self, tree: &ItemTree) -> FileAstId { match self { @@ -708,52 +525,12 @@ macro_rules! mod_items { } } - impl GenericModItem { - pub fn ast_id(&self, tree: &ItemTree) -> FileAstId { - match self { - $( - $( - #[cfg_attr(ignore_fragment, $generic_params)] - GenericModItem::$typ(it) => tree[it.index()].ast_id().upcast(), - )? - )+ - } - } - } - - impl From for ModItem { - fn from(id: GenericModItem) -> ModItem { - match id { - $( - $( - #[cfg_attr(ignore_fragment, $generic_params)] - GenericModItem::$typ(id) => ModItem::$typ(id), - )? - )+ - } - } - } - - impl From for AttrOwner { - fn from(t: GenericModItem) -> AttrOwner { - AttrOwner::ModItem(t.into()) - } - } - $( impl From> for ModItem { fn from(id: FileItemTreeId<$typ>) -> ModItem { ModItem::$typ(id) } } - $( - #[cfg_attr(ignore_fragment, $generic_params)] - impl From> for GenericModItem { - fn from(id: FileItemTreeId<$typ>) -> GenericModItem { - GenericModItem::$typ(id) - } - } - )? )+ $( @@ -780,14 +557,6 @@ macro_rules! mod_items { &self.data().$fld[index] } } - - $( - impl GenericsItemTreeNode for $typ { - fn generic_params(&self) -> &Arc { - &self.$generic_params - } - } - )? )+ }; } @@ -796,16 +565,16 @@ mod_items! { Use in uses -> ast::Use, ExternCrate in extern_crates -> ast::ExternCrate, ExternBlock in extern_blocks -> ast::ExternBlock, - Function in functions -> ast::Fn, - Struct in structs -> ast::Struct, - Union in unions -> ast::Union, - Enum in enums -> ast::Enum, + Function in functions -> ast::Fn, + Struct in structs -> ast::Struct, + Union in unions -> ast::Union, + Enum in enums -> ast::Enum, Const in consts -> ast::Const, Static in statics -> ast::Static, - Trait in traits -> ast::Trait, - TraitAlias in trait_aliases -> ast::TraitAlias, - Impl in impls -> ast::Impl, - TypeAlias in type_aliases -> ast::TypeAlias, + Trait in traits -> ast::Trait, + TraitAlias in trait_aliases -> ast::TraitAlias, + Impl in impls -> ast::Impl, + TypeAlias in type_aliases -> ast::TypeAlias, Mod in mods -> ast::Module, MacroCall in macro_calls -> ast::MacroCall, MacroRules in macro_rules -> ast::MacroRules, @@ -881,6 +650,34 @@ pub struct UseTree { kind: UseTreeKind, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ImportAlias { + /// Unnamed alias, as in `use Foo as _;` + Underscore, + /// Named alias + Alias(Name), +} + +impl ImportAlias { + pub fn display(&self, edition: Edition) -> impl fmt::Display + '_ { + ImportAliasDisplay { value: self, edition } + } +} + +struct ImportAliasDisplay<'a> { + value: &'a ImportAlias, + edition: Edition, +} + +impl fmt::Display for ImportAliasDisplay<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.value { + ImportAlias::Underscore => f.write_str("_"), + ImportAlias::Alias(name) => fmt::Display::fmt(&name.display_no_db(self.edition), f), + } + } +} + #[derive(Debug, Clone, Eq, PartialEq)] pub enum UseTreeKind { /// ```ignore @@ -921,66 +718,30 @@ pub struct ExternBlock { pub struct Function { pub name: Name, pub visibility: RawVisibilityId, - pub explicit_generic_params: Arc, - pub abi: Option, - pub params: Box<[Param]>, - pub ret_type: TypeRefId, pub ast_id: FileAstId, - pub types_map: Arc, - pub(crate) flags: FnFlags, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Param { - pub type_ref: Option, -} - -bitflags::bitflags! { - #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] - pub(crate) struct FnFlags: u16 { - const HAS_SELF_PARAM = 1 << 0; - const HAS_BODY = 1 << 1; - const HAS_DEFAULT_KW = 1 << 2; - const HAS_CONST_KW = 1 << 3; - const HAS_ASYNC_KW = 1 << 4; - const HAS_UNSAFE_KW = 1 << 5; - const IS_VARARGS = 1 << 6; - const HAS_SAFE_KW = 1 << 7; - /// The `#[target_feature]` attribute is necessary to check safety (with RFC 2396), - /// but keeping it for all functions will consume a lot of memory when there are - /// only very few functions with it. So we only encode its existence here, and lookup - /// it if needed. - const HAS_TARGET_FEATURE = 1 << 8; - const DEPRECATED_SAFE_2024 = 1 << 9; - } } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Struct { pub name: Name, pub visibility: RawVisibilityId, - pub generic_params: Arc, pub fields: Box<[Field]>, pub shape: FieldsShape, pub ast_id: FileAstId, - pub types_map: Arc, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Union { pub name: Name, pub visibility: RawVisibilityId, - pub generic_params: Arc, pub fields: Box<[Field]>, pub ast_id: FileAstId, - pub types_map: Arc, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Enum { pub name: Name, pub visibility: RawVisibilityId, - pub generic_params: Arc, pub variants: Range>, pub ast_id: FileAstId, } @@ -991,7 +752,6 @@ pub struct Variant { pub fields: Box<[Field]>, pub shape: FieldsShape, pub ast_id: FileAstId, - pub types_map: Arc, } #[derive(Debug, Copy, Clone, PartialEq, Eq)] @@ -1001,12 +761,38 @@ pub enum FieldsShape { Unit, } +/// Visibility of an item, not yet resolved. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum RawVisibility { + /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is + /// equivalent to `pub(self)`. + Module(Interned, VisibilityExplicitness), + /// `pub`. + Public, +} + +/// Whether the item was imported through an explicit `pub(crate) use` or just a `use` without +/// visibility. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum VisibilityExplicitness { + Explicit, + Implicit, +} + +impl VisibilityExplicitness { + pub fn is_explicit(&self) -> bool { + matches!(self, Self::Explicit) + } +} + +// FIXME: Remove this from item tree? /// A single field of an enum variant or struct #[derive(Debug, Clone, PartialEq, Eq)] pub struct Field { pub name: Name, - pub type_ref: TypeRefId, pub visibility: RawVisibilityId, + // FIXME: Not an item tree property + pub is_unsafe: bool, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -1014,32 +800,20 @@ pub struct Const { /// `None` for `const _: () = ();` pub name: Option, pub visibility: RawVisibilityId, - pub type_ref: TypeRefId, pub ast_id: FileAstId, - pub has_body: bool, - pub types_map: Arc, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Static { pub name: Name, pub visibility: RawVisibilityId, - // TODO: use bitflags when we have more flags - pub mutable: bool, - pub has_safe_kw: bool, - pub has_unsafe_kw: bool, - pub type_ref: TypeRefId, pub ast_id: FileAstId, - pub types_map: Arc, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Trait { pub name: Name, pub visibility: RawVisibilityId, - pub generic_params: Arc, - pub is_auto: bool, - pub is_unsafe: bool, pub items: Box<[AssocItem]>, pub ast_id: FileAstId, } @@ -1048,32 +822,20 @@ pub struct Trait { pub struct TraitAlias { pub name: Name, pub visibility: RawVisibilityId, - pub generic_params: Arc, pub ast_id: FileAstId, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Impl { - pub generic_params: Arc, - pub target_trait: Option, - pub self_ty: TypeRefId, - pub is_negative: bool, - pub is_unsafe: bool, pub items: Box<[AssocItem]>, pub ast_id: FileAstId, - pub types_map: Arc, } #[derive(Debug, Clone, PartialEq, Eq)] pub struct TypeAlias { pub name: Name, pub visibility: RawVisibilityId, - /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`. - pub bounds: Box<[TypeBound]>, - pub generic_params: Arc, - pub type_ref: Option, pub ast_id: FileAstId, - pub types_map: Arc, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -1098,7 +860,7 @@ pub struct MacroCall { pub path: Interned, pub ast_id: FileAstId, pub expand_to: ExpandTo, - pub ctxt: SyntaxContextId, + pub ctxt: SyntaxContext, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -1126,7 +888,7 @@ impl Use { ) -> ast::UseTree { // Re-lower the AST item and get the source map. // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. - let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); + let ast = InFile::new(file_id, self.ast_id).to_node(db); let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); let (_, source_map) = lower::lower_use_tree(db, ast_use_tree, &mut |range| { db.span_map(file_id).span_for_range(range).ctx @@ -1143,7 +905,7 @@ impl Use { ) -> Arena { // Re-lower the AST item and get the source map. // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. - let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); + let ast = InFile::new(file_id, self.ast_id).to_node(db); let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); lower::lower_use_tree(db, ast_use_tree, &mut |range| { db.span_map(file_id).span_for_range(range).ctx diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs index 71848845a84df..b490e1683c01f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs @@ -3,42 +3,29 @@ use std::{cell::OnceCell, collections::hash_map::Entry}; use hir_expand::{ - mod_path::path, + HirFileId, + mod_path::PathKind, name::AsName, span_map::{SpanMap, SpanMapRef}, - HirFileId, }; -use intern::{sym, Symbol}; +use intern::{Symbol, sym}; use la_arena::Arena; -use rustc_hash::FxHashMap; -use span::{AstIdMap, SyntaxContextId}; -use stdx::thin_vec::ThinVec; +use span::{AstIdMap, SyntaxContext}; use syntax::{ - ast::{self, HasModuleItem, HasName, HasTypeBounds, IsString}, AstNode, + ast::{self, HasModuleItem, HasName, IsString}, }; use triomphe::Arc; use crate::{ db::DefDatabase, - generics::{GenericParams, GenericParamsCollector, TypeParamData, TypeParamProvenance}, item_tree::{ - AssocItem, AttrOwner, Const, Either, Enum, ExternBlock, ExternCrate, Field, FieldParent, - FieldsShape, FileItemTreeId, FnFlags, Function, GenericArgs, GenericItemSourceMapBuilder, - GenericModItem, Idx, Impl, ImportAlias, Interned, ItemTree, ItemTreeData, - ItemTreeSourceMaps, ItemTreeSourceMapsBuilder, Macro2, MacroCall, MacroRules, Mod, ModItem, - ModKind, ModPath, Mutability, Name, Param, Path, Range, RawAttrs, RawIdx, RawVisibilityId, - Static, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind, - Variant, - }, - lower::LowerCtx, - path::AssociatedTypeBinding, - type_ref::{ - LifetimeRef, PathId, RefType, TraitBoundModifier, TraitRef, TypeBound, TypeRef, TypeRefId, - TypesMap, TypesSourceMap, + AssocItem, AttrOwner, Const, Enum, ExternBlock, ExternCrate, Field, FieldParent, + FieldsShape, FileItemTreeId, Function, Idx, Impl, ImportAlias, Interned, ItemTree, + ItemTreeData, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, ModPath, Name, Range, + RawAttrs, RawIdx, RawVisibility, RawVisibilityId, Static, Struct, StructKind, Trait, + TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind, Variant, VisibilityExplicitness, }, - visibility::RawVisibility, - LocalLifetimeParamId, LocalTypeOrConstParamId, }; fn id(index: Idx) -> FileItemTreeId { @@ -49,11 +36,8 @@ pub(super) struct Ctx<'a> { db: &'a dyn DefDatabase, tree: ItemTree, source_ast_id_map: Arc, - generic_param_attr_buffer: - FxHashMap, RawAttrs>, span_map: OnceCell, file: HirFileId, - source_maps: ItemTreeSourceMapsBuilder, } impl<'a> Ctx<'a> { @@ -61,11 +45,9 @@ impl<'a> Ctx<'a> { Self { db, tree: ItemTree::default(), - generic_param_attr_buffer: FxHashMap::default(), source_ast_id_map: db.ast_id_map(file), file, span_map: OnceCell::new(), - source_maps: ItemTreeSourceMapsBuilder::default(), } } @@ -73,39 +55,13 @@ impl<'a> Ctx<'a> { self.span_map.get_or_init(|| self.db.span_map(self.file)).as_ref() } - fn body_ctx<'b, 'c>( - &self, - types_map: &'b mut TypesMap, - types_source_map: &'b mut TypesSourceMap, - ) -> LowerCtx<'c> - where - 'a: 'c, - 'b: 'c, - { - // FIXME: This seems a bit wasteful that if `LowerCtx` will initialize the span map we won't benefit. - LowerCtx::with_span_map_cell( - self.db, - self.file, - self.span_map.clone(), - types_map, - types_source_map, - ) - } - - pub(super) fn lower_module_items( - mut self, - item_owner: &dyn HasModuleItem, - ) -> (ItemTree, ItemTreeSourceMaps) { + pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree { self.tree.top_level = item_owner.items().flat_map(|item| self.lower_mod_item(&item)).collect(); - assert!(self.generic_param_attr_buffer.is_empty()); - (self.tree, self.source_maps.build()) + self.tree } - pub(super) fn lower_macro_stmts( - mut self, - stmts: ast::MacroStmts, - ) -> (ItemTree, ItemTreeSourceMaps) { + pub(super) fn lower_macro_stmts(mut self, stmts: ast::MacroStmts) -> ItemTree { self.tree.top_level = stmts .statements() .filter_map(|stmt| { @@ -135,14 +91,11 @@ impl<'a> Ctx<'a> { } } - assert!(self.generic_param_attr_buffer.is_empty()); - (self.tree, self.source_maps.build()) + self.tree } - pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> (ItemTree, ItemTreeSourceMaps) { - self.tree - .attrs - .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map())); + pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree { + self.tree.attrs.insert(AttrOwner::TopLevel, RawAttrs::new(self.db, block, self.span_map())); self.tree.top_level = block .statements() .filter_map(|stmt| match stmt { @@ -164,8 +117,7 @@ impl<'a> Ctx<'a> { } } - assert!(self.generic_param_attr_buffer.is_empty()); - (self.tree, self.source_maps.build()) + self.tree } fn data(&mut self) -> &mut ItemTreeData { @@ -192,7 +144,7 @@ impl<'a> Ctx<'a> { ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(), ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(), }; - let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map()); + let attrs = RawAttrs::new(self.db, item, self.span_map()); self.add_attrs(mod_item.into(), attrs); Some(mod_item) @@ -218,7 +170,7 @@ impl<'a> Ctx<'a> { ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()), ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), }?; - let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map()); + let attrs = RawAttrs::new(self.db, item_node, self.span_map()); self.add_attrs( match item { AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)), @@ -232,31 +184,13 @@ impl<'a> Ctx<'a> { } fn lower_struct(&mut self, strukt: &ast::Struct) -> Option> { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let visibility = self.lower_visibility(strukt); let name = strukt.name()?.as_name(); let ast_id = self.source_ast_id_map.ast_id(strukt); - let (fields, kind, attrs) = self.lower_fields(&strukt.kind(), &mut body_ctx); - let (generic_params, generics_source_map) = - self.lower_generic_params(HasImplicitSelf::No, strukt); - types_map.shrink_to_fit(); - types_source_map.shrink_to_fit(); - let res = Struct { - name, - visibility, - generic_params, - fields, - shape: kind, - ast_id, - types_map: Arc::new(types_map), - }; + let (fields, kind, attrs) = self.lower_fields(&strukt.kind()); + let res = Struct { name, visibility, fields, shape: kind, ast_id }; let id = id(self.data().structs.alloc(res)); - self.source_maps.structs.push(GenericItemSourceMapBuilder { - item: types_source_map, - generics: generics_source_map, - }); + for (idx, attr) in attrs { self.add_attrs( AttrOwner::Field( @@ -266,14 +200,12 @@ impl<'a> Ctx<'a> { attr, ); } - self.write_generic_params_attributes(id.into()); Some(id) } fn lower_fields( &mut self, strukt_kind: &ast::StructKind, - body_ctx: &mut LowerCtx<'_>, ) -> (Box<[Field]>, FieldsShape, Vec<(usize, RawAttrs)>) { match strukt_kind { ast::StructKind::Record(it) => { @@ -281,9 +213,9 @@ impl<'a> Ctx<'a> { let mut attrs = vec![]; for (i, field) in it.fields().enumerate() { - let data = self.lower_record_field(&field, body_ctx); + let data = self.lower_record_field(&field); fields.push(data); - let attr = RawAttrs::new(self.db.upcast(), &field, self.span_map()); + let attr = RawAttrs::new(self.db, &field, self.span_map()); if !attr.is_empty() { attrs.push((i, attr)) } @@ -295,9 +227,9 @@ impl<'a> Ctx<'a> { let mut attrs = vec![]; for (i, field) in it.fields().enumerate() { - let data = self.lower_tuple_field(i, &field, body_ctx); + let data = self.lower_tuple_field(i, &field); fields.push(data); - let attr = RawAttrs::new(self.db.upcast(), &field, self.span_map()); + let attr = RawAttrs::new(self.db, &field, self.span_map()); if !attr.is_empty() { attrs.push((i, attr)) } @@ -308,63 +240,32 @@ impl<'a> Ctx<'a> { } } - fn lower_record_field( - &mut self, - field: &ast::RecordField, - body_ctx: &mut LowerCtx<'_>, - ) -> Field { + fn lower_record_field(&mut self, field: &ast::RecordField) -> Field { let name = match field.name() { Some(name) => name.as_name(), None => Name::missing(), }; let visibility = self.lower_visibility(field); - let type_ref = TypeRef::from_ast_opt(body_ctx, field.ty()); - Field { name, type_ref, visibility } + Field { name, visibility, is_unsafe: field.unsafe_token().is_some() } } - fn lower_tuple_field( - &mut self, - idx: usize, - field: &ast::TupleField, - body_ctx: &mut LowerCtx<'_>, - ) -> Field { + fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field { let name = Name::new_tuple_field(idx); let visibility = self.lower_visibility(field); - let type_ref = TypeRef::from_ast_opt(body_ctx, field.ty()); - Field { name, type_ref, visibility } + Field { name, visibility, is_unsafe: false } } fn lower_union(&mut self, union: &ast::Union) -> Option> { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let visibility = self.lower_visibility(union); let name = union.name()?.as_name(); let ast_id = self.source_ast_id_map.ast_id(union); let (fields, _, attrs) = match union.record_field_list() { - Some(record_field_list) => { - self.lower_fields(&StructKind::Record(record_field_list), &mut body_ctx) - } + Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)), None => (Box::default(), FieldsShape::Record, Vec::default()), }; - let (generic_params, generics_source_map) = - self.lower_generic_params(HasImplicitSelf::No, union); - types_map.shrink_to_fit(); - types_source_map.shrink_to_fit(); - let res = Union { - name, - visibility, - generic_params, - fields, - ast_id, - types_map: Arc::new(types_map), - }; + let res = Union { name, visibility, fields, ast_id }; let id = id(self.data().unions.alloc(res)); - self.source_maps.unions.push(GenericItemSourceMapBuilder { - item: types_source_map, - generics: generics_source_map, - }); for (idx, attr) in attrs { self.add_attrs( AttrOwner::Field( @@ -374,7 +275,6 @@ impl<'a> Ctx<'a> { attr, ); } - self.write_generic_params_attributes(id.into()); Some(id) } @@ -388,12 +288,8 @@ impl<'a> Ctx<'a> { FileItemTreeId(self.next_variant_idx())..FileItemTreeId(self.next_variant_idx()) } }; - let (generic_params, generics_source_map) = - self.lower_generic_params(HasImplicitSelf::No, enum_); - let res = Enum { name, visibility, generic_params, variants, ast_id }; + let res = Enum { name, visibility, variants, ast_id }; let id = id(self.data().enums.alloc(res)); - self.source_maps.enum_generics.push(generics_source_map); - self.write_generic_params_attributes(id.into()); Some(id) } @@ -401,34 +297,25 @@ impl<'a> Ctx<'a> { let start = self.next_variant_idx(); for variant in variants.variants() { let idx = self.lower_variant(&variant); - self.add_attrs( - id(idx).into(), - RawAttrs::new(self.db.upcast(), &variant, self.span_map()), - ); + self.add_attrs(id(idx).into(), RawAttrs::new(self.db, &variant, self.span_map())); } let end = self.next_variant_idx(); FileItemTreeId(start)..FileItemTreeId(end) } fn lower_variant(&mut self, variant: &ast::Variant) -> Idx { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let name = match variant.name() { Some(name) => name.as_name(), None => Name::missing(), }; - let (fields, kind, attrs) = self.lower_fields(&variant.kind(), &mut body_ctx); + let (fields, kind, attrs) = self.lower_fields(&variant.kind()); let ast_id = self.source_ast_id_map.ast_id(variant); - types_map.shrink_to_fit(); - types_source_map.shrink_to_fit(); - let res = Variant { name, fields, shape: kind, ast_id, types_map: Arc::new(types_map) }; + let res = Variant { name, fields, shape: kind, ast_id }; let id = self.data().variants.alloc(res); - self.source_maps.variants.push(types_source_map); for (idx, attr) in attrs { self.add_attrs( AttrOwner::Field( - FieldParent::Variant(FileItemTreeId(id)), + FieldParent::EnumVariant(FileItemTreeId(id)), Idx::from_raw(RawIdx::from_u32(idx as u32)), ), attr, @@ -438,144 +325,14 @@ impl<'a> Ctx<'a> { } fn lower_function(&mut self, func: &ast::Fn) -> Option> { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); - let visibility = self.lower_visibility(func); let name = func.name()?.as_name(); - let mut has_self_param = false; - let mut has_var_args = false; - let mut params = vec![]; - let mut attrs = vec![]; - let mut push_attr = |idx, attr: RawAttrs| { - if !attr.is_empty() { - attrs.push((idx, attr)) - } - }; - if let Some(param_list) = func.param_list() { - if let Some(self_param) = param_list.self_param() { - push_attr( - params.len(), - RawAttrs::new(self.db.upcast(), &self_param, self.span_map()), - ); - let self_type = match self_param.ty() { - Some(type_ref) => TypeRef::from_ast(&mut body_ctx, type_ref), - None => { - let self_type = body_ctx.alloc_type_ref_desugared(TypeRef::Path( - Name::new_symbol_root(sym::Self_.clone()).into(), - )); - match self_param.kind() { - ast::SelfParamKind::Owned => self_type, - ast::SelfParamKind::Ref => body_ctx.alloc_type_ref_desugared( - TypeRef::Reference(Box::new(RefType { - ty: self_type, - lifetime: self_param.lifetime().as_ref().map(LifetimeRef::new), - mutability: Mutability::Shared, - })), - ), - ast::SelfParamKind::MutRef => body_ctx.alloc_type_ref_desugared( - TypeRef::Reference(Box::new(RefType { - ty: self_type, - lifetime: self_param.lifetime().as_ref().map(LifetimeRef::new), - mutability: Mutability::Mut, - })), - ), - } - } - }; - params.push(Param { type_ref: Some(self_type) }); - has_self_param = true; - } - for param in param_list.params() { - push_attr(params.len(), RawAttrs::new(self.db.upcast(), ¶m, self.span_map())); - let param = match param.dotdotdot_token() { - Some(_) => { - has_var_args = true; - Param { type_ref: None } - } - None => { - let type_ref = TypeRef::from_ast_opt(&mut body_ctx, param.ty()); - Param { type_ref: Some(type_ref) } - } - }; - params.push(param); - } - } - - let ret_type = match func.ret_type() { - Some(rt) => match rt.ty() { - Some(type_ref) => TypeRef::from_ast(&mut body_ctx, type_ref), - None if rt.thin_arrow_token().is_some() => body_ctx.alloc_error_type(), - None => body_ctx.alloc_type_ref_desugared(TypeRef::unit()), - }, - None => body_ctx.alloc_type_ref_desugared(TypeRef::unit()), - }; - - let ret_type = if func.async_token().is_some() { - let future_impl = desugar_future_path(&mut body_ctx, ret_type); - let ty_bound = TypeBound::Path(future_impl, TraitBoundModifier::None); - body_ctx.alloc_type_ref_desugared(TypeRef::ImplTrait(ThinVec::from_iter([ty_bound]))) - } else { - ret_type - }; - - let abi = func.abi().map(lower_abi); - let ast_id = self.source_ast_id_map.ast_id(func); - let mut flags = FnFlags::default(); - if func.body().is_some() { - flags |= FnFlags::HAS_BODY; - } - if has_self_param { - flags |= FnFlags::HAS_SELF_PARAM; - } - if func.default_token().is_some() { - flags |= FnFlags::HAS_DEFAULT_KW; - } - if func.const_token().is_some() { - flags |= FnFlags::HAS_CONST_KW; - } - if func.async_token().is_some() { - flags |= FnFlags::HAS_ASYNC_KW; - } - if func.unsafe_token().is_some() { - flags |= FnFlags::HAS_UNSAFE_KW; - } - if func.safe_token().is_some() { - flags |= FnFlags::HAS_SAFE_KW; - } - if has_var_args { - flags |= FnFlags::IS_VARARGS; - } - - types_map.shrink_to_fit(); - types_source_map.shrink_to_fit(); - let (generic_params, generics_source_map) = - self.lower_generic_params(HasImplicitSelf::No, func); - let res = Function { - name, - visibility, - explicit_generic_params: generic_params, - abi, - params: params.into_boxed_slice(), - ret_type, - ast_id, - types_map: Arc::new(types_map), - flags, - }; + let res = Function { name, visibility, ast_id }; let id = id(self.data().functions.alloc(res)); - self.source_maps.functions.push(GenericItemSourceMapBuilder { - item: types_source_map, - generics: generics_source_map, - }); - for (idx, attr) in attrs { - self.add_attrs(AttrOwner::Param(id, Idx::from_raw(RawIdx::from_u32(idx as u32))), attr); - } - self.write_generic_params_attributes(id.into()); Some(id) } @@ -583,82 +340,27 @@ impl<'a> Ctx<'a> { &mut self, type_alias: &ast::TypeAlias, ) -> Option> { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let name = type_alias.name()?.as_name(); - let type_ref = type_alias.ty().map(|it| TypeRef::from_ast(&mut body_ctx, it)); let visibility = self.lower_visibility(type_alias); - let bounds = self.lower_type_bounds(type_alias, &mut body_ctx); let ast_id = self.source_ast_id_map.ast_id(type_alias); - let (generic_params, generics_source_map) = - self.lower_generic_params(HasImplicitSelf::No, type_alias); - types_map.shrink_to_fit(); - types_source_map.shrink_to_fit(); - let res = TypeAlias { - name, - visibility, - bounds, - generic_params, - type_ref, - ast_id, - types_map: Arc::new(types_map), - }; + let res = TypeAlias { name, visibility, ast_id }; let id = id(self.data().type_aliases.alloc(res)); - self.source_maps.type_aliases.push(GenericItemSourceMapBuilder { - item: types_source_map, - generics: generics_source_map, - }); - self.write_generic_params_attributes(id.into()); Some(id) } fn lower_static(&mut self, static_: &ast::Static) -> Option> { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let name = static_.name()?.as_name(); - let type_ref = TypeRef::from_ast_opt(&mut body_ctx, static_.ty()); let visibility = self.lower_visibility(static_); - let mutable = static_.mut_token().is_some(); - let has_safe_kw = static_.safe_token().is_some(); - let has_unsafe_kw = static_.unsafe_token().is_some(); let ast_id = self.source_ast_id_map.ast_id(static_); - types_map.shrink_to_fit(); - types_source_map.shrink_to_fit(); - let res = Static { - name, - visibility, - mutable, - type_ref, - ast_id, - has_safe_kw, - has_unsafe_kw, - types_map: Arc::new(types_map), - }; - self.source_maps.statics.push(types_source_map); + let res = Static { name, visibility, ast_id }; Some(id(self.data().statics.alloc(res))) } fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let name = konst.name().map(|it| it.as_name()); - let type_ref = TypeRef::from_ast_opt(&mut body_ctx, konst.ty()); let visibility = self.lower_visibility(konst); let ast_id = self.source_ast_id_map.ast_id(konst); - types_map.shrink_to_fit(); - types_source_map.shrink_to_fit(); - let res = Const { - name, - visibility, - type_ref, - ast_id, - has_body: konst.body().is_some(), - types_map: Arc::new(types_map), - }; - self.source_maps.consts.push(types_source_map); + let res = Const { name, visibility, ast_id }; id(self.data().consts.alloc(res)) } @@ -687,8 +389,6 @@ impl<'a> Ctx<'a> { let name = trait_def.name()?.as_name(); let visibility = self.lower_visibility(trait_def); let ast_id = self.source_ast_id_map.ast_id(trait_def); - let is_auto = trait_def.auto_token().is_some(); - let is_unsafe = trait_def.unsafe_token().is_some(); let items = trait_def .assoc_item_list() @@ -697,12 +397,8 @@ impl<'a> Ctx<'a> { .filter_map(|item_node| self.lower_assoc_item(&item_node)) .collect(); - let (generic_params, generics_source_map) = - self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def); - let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id }; + let def = Trait { name, visibility, items, ast_id }; let id = id(self.data().traits.alloc(def)); - self.source_maps.trait_generics.push(generics_source_map); - self.write_generic_params_attributes(id.into()); Some(id) } @@ -713,32 +409,14 @@ impl<'a> Ctx<'a> { let name = trait_alias_def.name()?.as_name(); let visibility = self.lower_visibility(trait_alias_def); let ast_id = self.source_ast_id_map.ast_id(trait_alias_def); - let (generic_params, generics_source_map) = self.lower_generic_params( - HasImplicitSelf::Yes(trait_alias_def.type_bound_list()), - trait_alias_def, - ); - let alias = TraitAlias { name, visibility, generic_params, ast_id }; + let alias = TraitAlias { name, visibility, ast_id }; let id = id(self.data().trait_aliases.alloc(alias)); - self.source_maps.trait_alias_generics.push(generics_source_map); - self.write_generic_params_attributes(id.into()); Some(id) } fn lower_impl(&mut self, impl_def: &ast::Impl) -> FileItemTreeId { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); - let ast_id = self.source_ast_id_map.ast_id(impl_def); - // FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl - // as if it was an non-trait impl. Ideally we want to create a unique missing ref that only - // equals itself. - let self_ty = TypeRef::from_ast_opt(&mut body_ctx, impl_def.self_ty()); - let target_trait = impl_def.trait_().and_then(|tr| TraitRef::from_ast(&mut body_ctx, tr)); - let is_negative = impl_def.excl_token().is_some(); - let is_unsafe = impl_def.unsafe_token().is_some(); - // We cannot use `assoc_items()` here as that does not include macro calls. let items = impl_def .assoc_item_list() @@ -748,27 +426,8 @@ impl<'a> Ctx<'a> { .collect(); // Note that trait impls don't get implicit `Self` unlike traits, because here they are a // type alias rather than a type parameter, so this is handled by the resolver. - let (generic_params, generics_source_map) = - self.lower_generic_params(HasImplicitSelf::No, impl_def); - types_map.shrink_to_fit(); - types_source_map.shrink_to_fit(); - let res = Impl { - generic_params, - target_trait, - self_ty, - is_negative, - is_unsafe, - items, - ast_id, - types_map: Arc::new(types_map), - }; - let id = id(self.data().impls.alloc(res)); - self.source_maps.impls.push(GenericItemSourceMapBuilder { - item: types_source_map, - generics: generics_source_map, - }); - self.write_generic_params_attributes(id.into()); - id + let res = Impl { items, ast_id }; + id(self.data().impls.alloc(res)) } fn lower_use(&mut self, use_item: &ast::Use) -> Option> { @@ -801,7 +460,7 @@ impl<'a> Ctx<'a> { let span_map = self.span_map(); let path = m.path()?; let range = path.syntax().text_range(); - let path = Interned::new(ModPath::from_src(self.db.upcast(), path, &mut |range| { + let path = Interned::new(ModPath::from_src(self.db, path, &mut |range| { span_map.span_for_range(range).ctx })?); let ast_id = self.source_ast_id_map.ast_id(m); @@ -844,7 +503,7 @@ impl<'a> Ctx<'a> { ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(), ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(), }; - let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map()); + let attrs = RawAttrs::new(self.db, &item, self.span_map()); self.add_attrs(mod_item.into(), attrs); Some(mod_item) }) @@ -855,75 +514,8 @@ impl<'a> Ctx<'a> { id(self.data().extern_blocks.alloc(res)) } - fn write_generic_params_attributes(&mut self, parent: GenericModItem) { - self.generic_param_attr_buffer.drain().for_each(|(idx, attrs)| { - self.tree.attrs.insert( - match idx { - Either::Left(id) => AttrOwner::TypeOrConstParamData(parent, id), - Either::Right(id) => AttrOwner::LifetimeParamData(parent, id), - }, - attrs, - ); - }) - } - - fn lower_generic_params( - &mut self, - has_implicit_self: HasImplicitSelf, - node: &dyn ast::HasGenericParams, - ) -> (Arc, TypesSourceMap) { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); - debug_assert!(self.generic_param_attr_buffer.is_empty(),); - body_ctx.take_impl_traits_bounds(); - let mut generics = GenericParamsCollector::default(); - - if let HasImplicitSelf::Yes(bounds) = has_implicit_self { - // Traits and trait aliases get the Self type as an implicit first type parameter. - generics.type_or_consts.alloc( - TypeParamData { - name: Some(Name::new_symbol_root(sym::Self_.clone())), - default: None, - provenance: TypeParamProvenance::TraitSelf, - } - .into(), - ); - // add super traits as bounds on Self - // i.e., `trait Foo: Bar` is equivalent to `trait Foo where Self: Bar` - let bound_target = Either::Left(body_ctx.alloc_type_ref_desugared(TypeRef::Path( - Name::new_symbol_root(sym::Self_.clone()).into(), - ))); - generics.fill_bounds(&mut body_ctx, bounds, bound_target); - } - - let span_map = body_ctx.span_map().clone(); - let add_param_attrs = |item: Either, - param| { - let attrs = RawAttrs::new(self.db.upcast(), ¶m, span_map.as_ref()); - debug_assert!(self.generic_param_attr_buffer.insert(item, attrs).is_none()); - }; - generics.fill(&mut body_ctx, node, add_param_attrs); - - let generics = generics.finish(types_map, &mut types_source_map); - (generics, types_source_map) - } - - fn lower_type_bounds( - &mut self, - node: &dyn ast::HasTypeBounds, - body_ctx: &mut LowerCtx<'_>, - ) -> Box<[TypeBound]> { - match node.type_bound_list() { - Some(bound_list) => { - bound_list.bounds().map(|it| TypeBound::from_ast(body_ctx, it)).collect() - } - None => Box::default(), - } - } - fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId { - let vis = RawVisibility::from_ast(self.db, item.visibility(), &mut |range| { + let vis = visibility_from_ast(self.db, item.visibility(), &mut |range| { self.span_map().span_for_range(range).ctx }); self.data().vis.alloc(vis) @@ -936,33 +528,11 @@ impl<'a> Ctx<'a> { } } -fn desugar_future_path(ctx: &mut LowerCtx<'_>, orig: TypeRefId) -> PathId { - let path = path![core::future::Future]; - let mut generic_args: Vec<_> = - std::iter::repeat(None).take(path.segments().len() - 1).collect(); - let binding = AssociatedTypeBinding { - name: Name::new_symbol_root(sym::Output.clone()), - args: None, - type_ref: Some(orig), - bounds: Box::default(), - }; - generic_args.push(Some(GenericArgs { bindings: Box::new([binding]), ..GenericArgs::empty() })); - - let path = Path::from_known_path(path, generic_args); - PathId::from_type_ref_unchecked(ctx.alloc_type_ref_desugared(TypeRef::Path(path))) -} - -enum HasImplicitSelf { - /// Inner list is a type bound list for the implicit `Self`. - Yes(Option), - No, -} - fn lower_abi(abi: ast::Abi) -> Symbol { match abi.abi_string() { Some(tok) => Symbol::intern(tok.text_without_quotes()), // `extern` default to be `extern "C"`. - _ => sym::C.clone(), + _ => sym::C, } } @@ -975,7 +545,7 @@ impl UseTreeLowering<'_> { fn lower_use_tree( &mut self, tree: ast::UseTree, - span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId, + span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext, ) -> Option { if let Some(use_tree_list) = tree.use_tree_list() { let prefix = match tree.path() { @@ -984,7 +554,7 @@ impl UseTreeLowering<'_> { // E.g. `use something::{inner}` (prefix is `None`, path is `something`) // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) Some(path) => { - match ModPath::from_src(self.db.upcast(), path, span_for_range) { + match ModPath::from_src(self.db, path, span_for_range) { Some(it) => Some(it), None => return None, // FIXME: report errors somewhere } @@ -1005,7 +575,7 @@ impl UseTreeLowering<'_> { } else { let is_glob = tree.star_token().is_some(); let path = match tree.path() { - Some(path) => Some(ModPath::from_src(self.db.upcast(), path, span_for_range)?), + Some(path) => Some(ModPath::from_src(self.db, path, span_for_range)?), None => None, }; let alias = tree.rename().map(|a| { @@ -1042,9 +612,38 @@ impl UseTreeLowering<'_> { pub(crate) fn lower_use_tree( db: &dyn DefDatabase, tree: ast::UseTree, - span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId, + span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext, ) -> Option<(UseTree, Arena)> { let mut lowering = UseTreeLowering { db, mapping: Arena::new() }; let tree = lowering.lower_use_tree(tree, span_for_range)?; Some((tree, lowering.mapping)) } + +fn private_vis() -> RawVisibility { + RawVisibility::Module( + Interned::new(ModPath::from_kind(PathKind::SELF)), + VisibilityExplicitness::Implicit, + ) +} + +fn visibility_from_ast( + db: &dyn DefDatabase, + node: Option, + span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext, +) -> RawVisibility { + let Some(node) = node else { return private_vis() }; + let path = match node.kind() { + ast::VisibilityKind::In(path) => { + let path = ModPath::from_src(db, path, span_for_range); + match path { + None => return private_vis(), + Some(path) => path, + } + } + ast::VisibilityKind::PubCrate => ModPath::from_kind(PathKind::Crate), + ast::VisibilityKind::PubSuper => ModPath::from_kind(PathKind::Super(1)), + ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::SELF), + ast::VisibilityKind::Pub => return RawVisibility::Public, + }; + RawVisibility::Module(Interned::new(path), VisibilityExplicitness::Explicit) +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index 70bf2f13c88a1..47c6eb13293f5 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -6,16 +6,12 @@ use la_arena::{Idx, RawIdx}; use span::{Edition, ErasedFileAstId}; use crate::{ - generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, item_tree::{ AttrOwner, Const, DefDatabase, Enum, ExternBlock, ExternCrate, Field, FieldParent, - FieldsShape, FileItemTreeId, FnFlags, Function, GenericModItem, GenericParams, Impl, - ItemTree, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, Param, Path, RawAttrs, - RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, TypeBound, Union, Use, - UseTree, UseTreeKind, Variant, + FieldsShape, FileItemTreeId, Function, Impl, ItemTree, Macro2, MacroCall, MacroRules, Mod, + ModItem, ModKind, RawAttrs, RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, + Union, Use, UseTree, UseTreeKind, Variant, }, - pretty::{print_path, print_type_bounds, print_type_ref}, - type_ref::{TypeRefId, TypesMap}, visibility::RawVisibility, }; @@ -100,7 +96,7 @@ impl Printer<'_> { self, "#{}[{}{}]{}", inner, - attr.path.display(self.db.upcast(), self.edition), + attr.path.display(self.db, self.edition), attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(), separated_by, ); @@ -116,34 +112,30 @@ impl Printer<'_> { fn print_visibility(&mut self, vis: RawVisibilityId) { match &self.tree[vis] { RawVisibility::Module(path, _expl) => { - w!(self, "pub({}) ", path.display(self.db.upcast(), self.edition)) + w!(self, "pub({}) ", path.display(self.db, self.edition)) } RawVisibility::Public => w!(self, "pub "), }; } - fn print_fields( - &mut self, - parent: FieldParent, - kind: FieldsShape, - fields: &[Field], - map: &TypesMap, - ) { + fn print_fields(&mut self, parent: FieldParent, kind: FieldsShape, fields: &[Field]) { let edition = self.edition; match kind { FieldsShape::Record => { self.whitespace(); w!(self, "{{"); self.indented(|this| { - for (idx, Field { name, type_ref, visibility }) in fields.iter().enumerate() { + for (idx, Field { name, visibility, is_unsafe }) in fields.iter().enumerate() { this.print_attrs_of( AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))), "\n", ); this.print_visibility(*visibility); - w!(this, "{}: ", name.display(self.db.upcast(), edition)); - this.print_type_ref(*type_ref, map); - wln!(this, ","); + if *is_unsafe { + w!(this, "unsafe "); + } + + wln!(this, "{},", name.display(self.db, edition)); } }); w!(self, "}}"); @@ -151,15 +143,16 @@ impl Printer<'_> { FieldsShape::Tuple => { w!(self, "("); self.indented(|this| { - for (idx, Field { name, type_ref, visibility }) in fields.iter().enumerate() { + for (idx, Field { name, visibility, is_unsafe }) in fields.iter().enumerate() { this.print_attrs_of( AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))), "\n", ); this.print_visibility(*visibility); - w!(this, "{}: ", name.display(self.db.upcast(), edition)); - this.print_type_ref(*type_ref, map); - wln!(this, ","); + if *is_unsafe { + w!(this, "unsafe "); + } + wln!(this, "{},", name.display(self.db, edition)); } }); w!(self, ")"); @@ -168,49 +161,23 @@ impl Printer<'_> { } } - fn print_fields_and_where_clause( - &mut self, - parent: FieldParent, - kind: FieldsShape, - fields: &[Field], - params: &GenericParams, - map: &TypesMap, - ) { - match kind { - FieldsShape::Record => { - if self.print_where_clause(params) { - wln!(self); - } - self.print_fields(parent, kind, fields, map); - } - FieldsShape::Unit => { - self.print_where_clause(params); - self.print_fields(parent, kind, fields, map); - } - FieldsShape::Tuple => { - self.print_fields(parent, kind, fields, map); - self.print_where_clause(params); - } - } - } - fn print_use_tree(&mut self, use_tree: &UseTree) { match &use_tree.kind { UseTreeKind::Single { path, alias } => { - w!(self, "{}", path.display(self.db.upcast(), self.edition)); + w!(self, "{}", path.display(self.db, self.edition)); if let Some(alias) = alias { w!(self, " as {}", alias.display(self.edition)); } } UseTreeKind::Glob { path } => { if let Some(path) = path { - w!(self, "{}::", path.display(self.db.upcast(), self.edition)); + w!(self, "{}::", path.display(self.db, self.edition)); } w!(self, "*"); } UseTreeKind::Prefixed { prefix, list } => { if let Some(prefix) = prefix { - w!(self, "{}::", prefix.display(self.db.upcast(), self.edition)); + w!(self, "{}::", prefix.display(self.db, self.edition)); } w!(self, "{{"); for (i, tree) in list.iter().enumerate() { @@ -240,7 +207,7 @@ impl Printer<'_> { let ExternCrate { name, alias, visibility, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "extern crate {}", name.display(self.db.upcast(), self.edition)); + w!(self, "extern crate {}", name.display(self.db, self.edition)); if let Some(alias) = alias { w!(self, " as {}", alias.display(self.edition)); } @@ -262,89 +229,17 @@ impl Printer<'_> { wln!(self, "}}"); } ModItem::Function(it) => { - let Function { - name, - visibility, - explicit_generic_params, - abi, - params, - ret_type, - ast_id, - types_map, - flags, - } = &self.tree[it]; + let Function { name, visibility, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - if flags.contains(FnFlags::HAS_DEFAULT_KW) { - w!(self, "default "); - } - if flags.contains(FnFlags::HAS_CONST_KW) { - w!(self, "const "); - } - if flags.contains(FnFlags::HAS_ASYNC_KW) { - w!(self, "async "); - } - if flags.contains(FnFlags::HAS_UNSAFE_KW) { - w!(self, "unsafe "); - } - if flags.contains(FnFlags::HAS_SAFE_KW) { - w!(self, "safe "); - } - if let Some(abi) = abi { - w!(self, "extern \"{}\" ", abi); - } - w!(self, "fn {}", name.display(self.db.upcast(), self.edition)); - self.print_generic_params(explicit_generic_params, it.into()); - w!(self, "("); - if !params.is_empty() { - self.indented(|this| { - for (idx, Param { type_ref }) in params.iter().enumerate() { - this.print_attrs_of( - AttrOwner::Param(it, Idx::from_raw(RawIdx::from(idx as u32))), - "\n", - ); - if idx == 0 && flags.contains(FnFlags::HAS_SELF_PARAM) { - w!(this, "self: "); - } - if let Some(type_ref) = type_ref { - this.print_type_ref(*type_ref, types_map); - } else { - wln!(this, "..."); - } - wln!(this, ","); - } - }); - } - w!(self, ") -> "); - self.print_type_ref(*ret_type, types_map); - self.print_where_clause(explicit_generic_params); - if flags.contains(FnFlags::HAS_BODY) { - wln!(self, " {{ ... }}"); - } else { - wln!(self, ";"); - } + wln!(self, "fn {};", name.display(self.db, self.edition)); } ModItem::Struct(it) => { - let Struct { - visibility, - name, - fields, - shape: kind, - generic_params, - ast_id, - types_map, - } = &self.tree[it]; + let Struct { visibility, name, fields, shape: kind, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "struct {}", name.display(self.db.upcast(), self.edition)); - self.print_generic_params(generic_params, it.into()); - self.print_fields_and_where_clause( - FieldParent::Struct(it), - *kind, - fields, - generic_params, - types_map, - ); + w!(self, "struct {}", name.display(self.db, self.edition)); + self.print_fields(FieldParent::Struct(it), *kind, fields); if matches!(kind, FieldsShape::Record) { wln!(self); } else { @@ -352,98 +247,56 @@ impl Printer<'_> { } } ModItem::Union(it) => { - let Union { name, visibility, fields, generic_params, ast_id, types_map } = - &self.tree[it]; + let Union { name, visibility, fields, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "union {}", name.display(self.db.upcast(), self.edition)); - self.print_generic_params(generic_params, it.into()); - self.print_fields_and_where_clause( - FieldParent::Union(it), - FieldsShape::Record, - fields, - generic_params, - types_map, - ); + w!(self, "union {}", name.display(self.db, self.edition)); + self.print_fields(FieldParent::Union(it), FieldsShape::Record, fields); wln!(self); } ModItem::Enum(it) => { - let Enum { name, visibility, variants, generic_params, ast_id } = &self.tree[it]; + let Enum { name, visibility, variants, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "enum {}", name.display(self.db.upcast(), self.edition)); - self.print_generic_params(generic_params, it.into()); - self.print_where_clause_and_opening_brace(generic_params); + w!(self, "enum {}", name.display(self.db, self.edition)); let edition = self.edition; self.indented(|this| { for variant in FileItemTreeId::range_iter(variants.clone()) { - let Variant { name, fields, shape: kind, ast_id, types_map } = - &this.tree[variant]; + let Variant { name, fields, shape: kind, ast_id } = &this.tree[variant]; this.print_ast_id(ast_id.erase()); this.print_attrs_of(variant, "\n"); - w!(this, "{}", name.display(self.db.upcast(), edition)); - this.print_fields(FieldParent::Variant(variant), *kind, fields, types_map); + w!(this, "{}", name.display(self.db, edition)); + this.print_fields(FieldParent::EnumVariant(variant), *kind, fields); wln!(this, ","); } }); wln!(self, "}}"); } ModItem::Const(it) => { - let Const { name, visibility, type_ref, ast_id, has_body: _, types_map } = - &self.tree[it]; + let Const { name, visibility, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); w!(self, "const "); match name { - Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)), + Some(name) => w!(self, "{}", name.display(self.db, self.edition)), None => w!(self, "_"), } - w!(self, ": "); - self.print_type_ref(*type_ref, types_map); wln!(self, " = _;"); } ModItem::Static(it) => { - let Static { - name, - visibility, - mutable, - type_ref, - ast_id, - has_safe_kw, - has_unsafe_kw, - types_map, - } = &self.tree[it]; + let Static { name, visibility, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - if *has_safe_kw { - w!(self, "safe "); - } - if *has_unsafe_kw { - w!(self, "unsafe "); - } w!(self, "static "); - if *mutable { - w!(self, "mut "); - } - w!(self, "{}: ", name.display(self.db.upcast(), self.edition)); - self.print_type_ref(*type_ref, types_map); + w!(self, "{}", name.display(self.db, self.edition)); w!(self, " = _;"); wln!(self); } ModItem::Trait(it) => { - let Trait { name, visibility, is_auto, is_unsafe, items, generic_params, ast_id } = - &self.tree[it]; + let Trait { name, visibility, items, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - if *is_unsafe { - w!(self, "unsafe "); - } - if *is_auto { - w!(self, "auto "); - } - w!(self, "trait {}", name.display(self.db.upcast(), self.edition)); - self.print_generic_params(generic_params, it.into()); - self.print_where_clause_and_opening_brace(generic_params); + w!(self, "trait {} {{", name.display(self.db, self.edition)); self.indented(|this| { for item in &**items { this.print_mod_item((*item).into()); @@ -452,43 +305,15 @@ impl Printer<'_> { wln!(self, "}}"); } ModItem::TraitAlias(it) => { - let TraitAlias { name, visibility, generic_params, ast_id } = &self.tree[it]; + let TraitAlias { name, visibility, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "trait {}", name.display(self.db.upcast(), self.edition)); - self.print_generic_params(generic_params, it.into()); - w!(self, " = "); - self.print_where_clause(generic_params); - w!(self, ";"); - wln!(self); + wln!(self, "trait {} = ..;", name.display(self.db, self.edition)); } ModItem::Impl(it) => { - let Impl { - target_trait, - self_ty, - is_negative, - is_unsafe, - items, - generic_params, - ast_id, - types_map, - } = &self.tree[it]; + let Impl { items, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); - if *is_unsafe { - w!(self, "unsafe"); - } - w!(self, "impl"); - self.print_generic_params(generic_params, it.into()); - w!(self, " "); - if *is_negative { - w!(self, "!"); - } - if let Some(tr) = target_trait { - self.print_path(&types_map[tr.path], types_map); - w!(self, " for "); - } - self.print_type_ref(*self_ty, types_map); - self.print_where_clause_and_opening_brace(generic_params); + w!(self, "impl {{"); self.indented(|this| { for item in &**items { this.print_mod_item((*item).into()); @@ -497,28 +322,10 @@ impl Printer<'_> { wln!(self, "}}"); } ModItem::TypeAlias(it) => { - let TypeAlias { - name, - visibility, - bounds, - type_ref, - generic_params, - ast_id, - types_map, - } = &self.tree[it]; + let TypeAlias { name, visibility, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "type {}", name.display(self.db.upcast(), self.edition)); - self.print_generic_params(generic_params, it.into()); - if !bounds.is_empty() { - w!(self, ": "); - self.print_type_bounds(bounds, types_map); - } - if let Some(ty) = type_ref { - w!(self, " = "); - self.print_type_ref(*ty, types_map); - } - self.print_where_clause(generic_params); + w!(self, "type {}", name.display(self.db, self.edition)); w!(self, ";"); wln!(self); } @@ -526,7 +333,7 @@ impl Printer<'_> { let Mod { name, visibility, kind, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "mod {}", name.display(self.db.upcast(), self.edition)); + w!(self, "mod {}", name.display(self.db, self.edition)); match kind { ModKind::Inline { items } => { w!(self, " {{"); @@ -546,145 +353,29 @@ impl Printer<'_> { let MacroCall { path, ast_id, expand_to, ctxt } = &self.tree[it]; let _ = writeln!( self, - "// AstId: {:?}, SyntaxContext: {}, ExpandTo: {:?}", + "// AstId: {:?}, SyntaxContextId: {}, ExpandTo: {:?}", ast_id.erase().into_raw(), ctxt, expand_to ); - wln!(self, "{}!(...);", path.display(self.db.upcast(), self.edition)); + wln!(self, "{}!(...);", path.display(self.db, self.edition)); } ModItem::MacroRules(it) => { let MacroRules { name, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); - wln!( - self, - "macro_rules! {} {{ ... }}", - name.display(self.db.upcast(), self.edition) - ); + wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db, self.edition)); } ModItem::Macro2(it) => { let Macro2 { name, visibility, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast(), self.edition)); + wln!(self, "macro {} {{ ... }}", name.display(self.db, self.edition)); } } self.blank(); } - fn print_type_ref(&mut self, type_ref: TypeRefId, map: &TypesMap) { - let edition = self.edition; - print_type_ref(self.db, type_ref, map, self, edition).unwrap(); - } - - fn print_type_bounds(&mut self, bounds: &[TypeBound], map: &TypesMap) { - let edition = self.edition; - print_type_bounds(self.db, bounds, map, self, edition).unwrap(); - } - - fn print_path(&mut self, path: &Path, map: &TypesMap) { - let edition = self.edition; - print_path(self.db, path, map, self, edition).unwrap(); - } - - fn print_generic_params(&mut self, params: &GenericParams, parent: GenericModItem) { - if params.is_empty() { - return; - } - - w!(self, "<"); - let mut first = true; - for (idx, lt) in params.iter_lt() { - if !first { - w!(self, ", "); - } - first = false; - self.print_attrs_of(AttrOwner::LifetimeParamData(parent, idx), " "); - w!(self, "{}", lt.name.display(self.db.upcast(), self.edition)); - } - for (idx, x) in params.iter_type_or_consts() { - if !first { - w!(self, ", "); - } - first = false; - self.print_attrs_of(AttrOwner::TypeOrConstParamData(parent, idx), " "); - match x { - TypeOrConstParamData::TypeParamData(ty) => match &ty.name { - Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)), - None => w!(self, "_anon_{}", idx.into_raw()), - }, - TypeOrConstParamData::ConstParamData(konst) => { - w!(self, "const {}: ", konst.name.display(self.db.upcast(), self.edition)); - self.print_type_ref(konst.ty, ¶ms.types_map); - } - } - } - w!(self, ">"); - } - - fn print_where_clause_and_opening_brace(&mut self, params: &GenericParams) { - if self.print_where_clause(params) { - w!(self, "\n{{"); - } else { - self.whitespace(); - w!(self, "{{"); - } - } - - fn print_where_clause(&mut self, params: &GenericParams) -> bool { - if params.where_predicates().next().is_none() { - return false; - } - - w!(self, "\nwhere"); - let edition = self.edition; - self.indented(|this| { - for (i, pred) in params.where_predicates().enumerate() { - if i != 0 { - wln!(this, ","); - } - - let (target, bound) = match pred { - WherePredicate::TypeBound { target, bound } => (target, bound), - WherePredicate::Lifetime { target, bound } => { - w!( - this, - "{}: {}", - target.name.display(self.db.upcast(), edition), - bound.name.display(self.db.upcast(), edition) - ); - continue; - } - WherePredicate::ForLifetime { lifetimes, target, bound } => { - w!(this, "for<"); - for (i, lt) in lifetimes.iter().enumerate() { - if i != 0 { - w!(this, ", "); - } - w!(this, "{}", lt.display(self.db.upcast(), edition)); - } - w!(this, "> "); - (target, bound) - } - }; - - match target { - WherePredicateTypeTarget::TypeRef(ty) => { - this.print_type_ref(*ty, ¶ms.types_map) - } - WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() { - Some(name) => w!(this, "{}", name.display(self.db.upcast(), edition)), - None => w!(this, "_anon_{}", id.into_raw()), - }, - } - w!(this, ": "); - this.print_type_bounds(std::slice::from_ref(bound), ¶ms.types_map); - } - }); - true - } - fn print_ast_id(&mut self, ast_id: ErasedFileAstId) { wln!(self, "// AstId: {:?}", ast_id.into_raw()); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs index 80b699649fba8..824fbfa5921a7 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs @@ -1,4 +1,4 @@ -use expect_test::{expect, Expect}; +use expect_test::{Expect, expect}; use span::Edition; use test_fixture::WithFixture; @@ -83,11 +83,11 @@ extern "C" { #[on_extern_static] // AstId: 3 - pub(self) static EX_STATIC: u8 = _; + pub(self) static EX_STATIC = _; #[on_extern_fn] // AstId: 4 - pub(self) fn ex_fn() -> (); + pub(self) fn ex_fn; } "##]], ); @@ -131,35 +131,35 @@ enum E { // AstId: 2 pub(self) struct Struct { #[doc = " fld docs"] - pub(self) fld: (), + pub(self) fld, } // AstId: 3 pub(self) struct Tuple( #[attr] - pub(self) 0: u8, + pub(self) 0, ); // AstId: 4 pub(self) union Ize { - pub(self) a: (), - pub(self) b: (), + pub(self) a, + pub(self) b, } // AstId: 5 - pub(self) enum E { + pub(self) enum E // AstId: 6 #[doc = " comment on Unit"] Unit, // AstId: 7 #[doc = " comment on Tuple"] Tuple( - pub(self) 0: u8, + pub(self) 0, ), // AstId: 8 Struct { #[doc = " comment on a: u8"] - pub(self) a: u8, + pub(self) a, }, } "#]], @@ -186,33 +186,23 @@ trait Tr: SuperTrait + 'lifetime { "#, expect![[r#" // AstId: 1 - pub static mut ST: () = _; + pub static ST = _; // AstId: 2 - pub(self) const _: Anon = _; + pub(self) const _ = _; #[attr] #[inner_attr_in_fn] // AstId: 3 - pub(self) fn f( - #[attr] - u8, - (), - ) -> () { ... } + pub(self) fn f; // AstId: 4 - pub(self) trait Tr - where - Self: SuperTrait, - Self: 'lifetime - { + pub(self) trait Tr { // AstId: 6 - pub(self) type Assoc: AssocBound = Default; + pub(self) type Assoc; // AstId: 7 - pub(self) fn method( - self: &Self, - ) -> (); + pub(self) fn method; } "#]], ); @@ -242,7 +232,7 @@ mod outline; pub(self) use super::*; // AstId: 4 - pub(self) fn fn_in_module() -> () { ... } + pub(self) fn fn_in_module; } // AstId: 2 @@ -270,159 +260,12 @@ m!(); // AstId: 2 pub macro m2 { ... } - // AstId: 3, SyntaxContext: 2, ExpandTo: Items + // AstId: 3, SyntaxContextId: ROOT2024, ExpandTo: Items m!(...); "#]], ); } -#[test] -fn mod_paths() { - check( - r#" -struct S { - a: self::Ty, - b: super::SuperTy, - c: super::super::SuperSuperTy, - d: ::abs::Path, - e: crate::Crate, - f: plain::path::Ty, -} - "#, - expect![[r#" - // AstId: 1 - pub(self) struct S { - pub(self) a: self::Ty, - pub(self) b: super::SuperTy, - pub(self) c: super::super::SuperSuperTy, - pub(self) d: ::abs::Path, - pub(self) e: crate::Crate, - pub(self) f: plain::path::Ty, - } - "#]], - ) -} - -#[test] -fn types() { - check( - r#" -struct S { - a: Mixed<'a, T, Item=(), OtherItem=u8>, - b: ::Syntax, - c: ::Path::<'a>, - d: dyn for<'a> Trait<'a>, -} - "#, - expect![[r#" - // AstId: 1 - pub(self) struct S { - pub(self) a: Mixed::<'a, T, Item = (), OtherItem = u8>, - pub(self) b: Qualified::::Syntax, - pub(self) c: ::Path::<'a>, - pub(self) d: dyn for<'a> Trait::<'a>, - } - "#]], - ) -} - -#[test] -fn generics() { - check( - r#" -struct S<'a, 'b: 'a, T: Copy + 'a + 'b, const K: u8 = 0> { - field: &'a &'b T, -} - -struct Tuple(T, U); - -impl<'a, 'b: 'a, T: Copy + 'a + 'b, const K: u8 = 0> S<'a, 'b, T, K> { - fn f(arg: impl Copy) -> impl Copy {} -} - -enum Enum<'a, T, const U: u8> {} -union Union<'a, T, const U: u8> {} - -trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {} - "#, - expect![[r#" - // AstId: 1 - pub(self) struct S<'a, 'b, T, const K: u8> - where - T: Copy, - T: 'a, - T: 'b, - 'b: 'a - { - pub(self) field: &'a &'b T, - } - - // AstId: 2 - pub(self) struct Tuple( - pub(self) 0: T, - pub(self) 1: U, - ) - where - T: Copy, - U: ?Sized; - - // AstId: 3 - impl<'a, 'b, T, const K: u8> S::<'a, 'b, T, K> - where - T: Copy, - T: 'a, - T: 'b, - 'b: 'a - { - // AstId: 9 - pub(self) fn f( - impl Copy, - ) -> impl Copy - where - G: 'a { ... } - } - - // AstId: 4 - pub(self) enum Enum<'a, T, const U: u8> { - } - - // AstId: 5 - pub(self) union Union<'a, T, const U: u8> { - } - - // AstId: 6 - pub(self) trait Tr<'a, Self, T> - where - Self: Super, - T: 'a, - Self: for<'a> Tr::<'a, T> - { - } - "#]], - ) -} - -#[test] -fn generics_with_attributes() { - check( - r#" -struct S<#[cfg(never)] T>; -struct S; -struct S; - "#, - expect![[r#" - // AstId: 1 - pub(self) struct S<#[cfg(never)] T>; - - // AstId: 2 - pub(self) struct S; - - // AstId: 3 - pub(self) struct S; - "#]], - ) -} - #[test] fn pub_self() { check( diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index 59f51db9f7401..51a833b5f150f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -3,13 +3,13 @@ //! This attribute to tell the compiler about semi built-in std library //! features, such as Fn family of traits. use hir_expand::name::Name; -use intern::{sym, Symbol}; +use intern::{Symbol, sym}; use rustc_hash::FxHashMap; use triomphe::Arc; use crate::{ - db::DefDatabase, path::Path, AdtId, AssocItemId, AttrDefId, CrateId, EnumId, EnumVariantId, - FunctionId, ImplId, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId, + AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId, + StaticId, StructId, TraitId, TypeAliasId, UnionId, db::DefDatabase, expr_store::path::Path, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -83,111 +83,99 @@ impl LangItemTarget { } } -#[derive(Default, Debug, Clone, PartialEq, Eq)] -pub struct LangItems { - items: FxHashMap, -} - -impl LangItems { - pub fn target(&self, item: LangItem) -> Option { - self.items.get(&item).copied() - } +/// Salsa query. This will look for lang items in a specific crate. +#[salsa_macros::tracked(return_ref)] +pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option> { + let _p = tracing::info_span!("crate_lang_items_query").entered(); - /// Salsa query. This will look for lang items in a specific crate. - pub(crate) fn crate_lang_items_query( - db: &dyn DefDatabase, - krate: CrateId, - ) -> Option> { - let _p = tracing::info_span!("crate_lang_items_query").entered(); + let mut lang_items = LangItems::default(); - let mut lang_items = LangItems::default(); + let crate_def_map = db.crate_def_map(krate); - let crate_def_map = db.crate_def_map(krate); + for (_, module_data) in crate_def_map.modules() { + for impl_def in module_data.scope.impls() { + lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef); + for &(_, assoc) in db.impl_items(impl_def).items.iter() { + match assoc { + AssocItemId::FunctionId(f) => { + lang_items.collect_lang_item(db, f, LangItemTarget::Function) + } + AssocItemId::TypeAliasId(t) => { + lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias) + } + AssocItemId::ConstId(_) => (), + } + } + } - for (_, module_data) in crate_def_map.modules() { - for impl_def in module_data.scope.impls() { - lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef); - for &(_, assoc) in db.impl_data(impl_def).items.iter() { - match assoc { + for def in module_data.scope.declarations() { + match def { + ModuleDefId::TraitId(trait_) => { + lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait); + db.trait_items(trait_).items.iter().for_each(|&(_, assoc_id)| match assoc_id { AssocItemId::FunctionId(f) => { - lang_items.collect_lang_item(db, f, LangItemTarget::Function) + lang_items.collect_lang_item(db, f, LangItemTarget::Function); } - AssocItemId::TypeAliasId(t) => { - lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias) + AssocItemId::TypeAliasId(alias) => { + lang_items.collect_lang_item(db, alias, LangItemTarget::TypeAlias) } - AssocItemId::ConstId(_) => (), - } + AssocItemId::ConstId(_) => {} + }); } - } - - for def in module_data.scope.declarations() { - match def { - ModuleDefId::TraitId(trait_) => { - lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait); - db.trait_data(trait_).items.iter().for_each( - |&(_, assoc_id)| match assoc_id { - AssocItemId::FunctionId(f) => { - lang_items.collect_lang_item(db, f, LangItemTarget::Function); - } - AssocItemId::TypeAliasId(alias) => lang_items.collect_lang_item( - db, - alias, - LangItemTarget::TypeAlias, - ), - AssocItemId::ConstId(_) => {} - }, - ); - } - ModuleDefId::AdtId(AdtId::EnumId(e)) => { - lang_items.collect_lang_item(db, e, LangItemTarget::EnumId); - crate_def_map.enum_definitions[&e].iter().for_each(|&id| { - lang_items.collect_lang_item(db, id, LangItemTarget::EnumVariant); - }); - } - ModuleDefId::AdtId(AdtId::StructId(s)) => { - lang_items.collect_lang_item(db, s, LangItemTarget::Struct); - } - ModuleDefId::AdtId(AdtId::UnionId(u)) => { - lang_items.collect_lang_item(db, u, LangItemTarget::Union); - } - ModuleDefId::FunctionId(f) => { - lang_items.collect_lang_item(db, f, LangItemTarget::Function); - } - ModuleDefId::StaticId(s) => { - lang_items.collect_lang_item(db, s, LangItemTarget::Static); - } - ModuleDefId::TypeAliasId(t) => { - lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias); - } - _ => {} + ModuleDefId::AdtId(AdtId::EnumId(e)) => { + lang_items.collect_lang_item(db, e, LangItemTarget::EnumId); + db.enum_variants(e).variants.iter().for_each(|&(id, _)| { + lang_items.collect_lang_item(db, id, LangItemTarget::EnumVariant); + }); + } + ModuleDefId::AdtId(AdtId::StructId(s)) => { + lang_items.collect_lang_item(db, s, LangItemTarget::Struct); + } + ModuleDefId::AdtId(AdtId::UnionId(u)) => { + lang_items.collect_lang_item(db, u, LangItemTarget::Union); + } + ModuleDefId::FunctionId(f) => { + lang_items.collect_lang_item(db, f, LangItemTarget::Function); + } + ModuleDefId::StaticId(s) => { + lang_items.collect_lang_item(db, s, LangItemTarget::Static); } + ModuleDefId::TypeAliasId(t) => { + lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias); + } + _ => {} } } + } - if lang_items.items.is_empty() { - None - } else { - Some(Arc::new(lang_items)) - } + if lang_items.items.is_empty() { None } else { Some(Box::new(lang_items)) } +} + +/// Salsa query. Look for a lang item, starting from the specified crate and recursively +/// traversing its dependencies. +#[salsa_macros::tracked] +pub fn lang_item( + db: &dyn DefDatabase, + start_crate: Crate, + item: LangItem, +) -> Option { + let _p = tracing::info_span!("lang_item_query").entered(); + if let Some(target) = + crate_lang_items(db, start_crate).as_ref().and_then(|it| it.items.get(&item).copied()) + { + return Some(target); } + start_crate.data(db).dependencies.iter().find_map(|dep| lang_item(db, dep.crate_id, item)) +} - /// Salsa query. Look for a lang item, starting from the specified crate and recursively - /// traversing its dependencies. - pub(crate) fn lang_item_query( - db: &dyn DefDatabase, - start_crate: CrateId, - item: LangItem, - ) -> Option { - let _p = tracing::info_span!("lang_item_query").entered(); - if let Some(target) = - db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied()) - { - return Some(target); - } - db.crate_graph()[start_crate] - .dependencies - .iter() - .find_map(|dep| db.lang_item(dep.crate_id, item)) +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub struct LangItems { + items: FxHashMap, +} + +impl LangItems { + pub fn target(&self, item: LangItem) -> Option { + self.items.get(&item).copied() } fn collect_lang_item( @@ -209,19 +197,14 @@ pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option Arc<[Arc<[TraitId]>]> { +pub(crate) fn notable_traits_in_deps(db: &dyn DefDatabase, krate: Crate) -> Arc<[Arc<[TraitId]>]> { let _p = tracing::info_span!("notable_traits_in_deps", ?krate).entered(); - let crate_graph = db.crate_graph(); - Arc::from_iter( - crate_graph.transitive_deps(krate).filter_map(|krate| db.crate_notable_traits(krate)), + db.transitive_deps(krate).into_iter().filter_map(|krate| db.crate_notable_traits(krate)), ) } -pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Option> { +pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option> { let _p = tracing::info_span!("crate_notable_traits", ?krate).entered(); let mut traits = Vec::new(); @@ -238,11 +221,7 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: CrateId) -> Opti } } - if traits.is_empty() { - None - } else { - Some(traits.into_iter().collect()) - } + if traits.is_empty() { None } else { Some(traits.into_iter().collect()) } } pub enum GenericRequirement { @@ -285,23 +264,38 @@ macro_rules! language_item_table { } impl LangItem { + pub fn resolve_function(self, db: &dyn DefDatabase, start_crate: Crate) -> Option { + lang_item(db, start_crate, self).and_then(|t| t.as_function()) + } + + pub fn resolve_trait(self, db: &dyn DefDatabase, start_crate: Crate) -> Option { + lang_item(db, start_crate, self).and_then(|t| t.as_trait()) + } + + pub fn resolve_enum(self, db: &dyn DefDatabase, start_crate: Crate) -> Option { + lang_item(db, start_crate, self).and_then(|t| t.as_enum()) + } + + pub fn resolve_type_alias( + self, + db: &dyn DefDatabase, + start_crate: Crate, + ) -> Option { + lang_item(db, start_crate, self).and_then(|t| t.as_type_alias()) + } + /// Opposite of [`LangItem::name`] pub fn from_name(name: &hir_expand::name::Name) -> Option { Self::from_symbol(name.symbol()) } - pub fn path(&self, db: &dyn DefDatabase, start_crate: CrateId) -> Option { - let t = db.lang_item(start_crate, *self)?; + pub fn path(&self, db: &dyn DefDatabase, start_crate: Crate) -> Option { + let t = lang_item(db, start_crate, *self)?; Some(Path::LangItem(t, None)) } - pub fn ty_rel_path( - &self, - db: &dyn DefDatabase, - start_crate: CrateId, - seg: Name, - ) -> Option { - let t = db.lang_item(start_crate, *self)?; + pub fn ty_rel_path(&self, db: &dyn DefDatabase, start_crate: Crate, seg: Name) -> Option { + let t = lang_item(db, start_crate, *self)?; Some(Path::LangItem(t, Some(seg))) } } @@ -366,6 +360,7 @@ language_item_table! { IndexMut, sym::index_mut, index_mut_trait, Target::Trait, GenericRequirement::Exact(1); UnsafeCell, sym::unsafe_cell, unsafe_cell_type, Target::Struct, GenericRequirement::None; + UnsafePinned, sym::unsafe_pinned, unsafe_pinned_type, Target::Struct, GenericRequirement::None; VaList, sym::va_list, va_list, Target::Struct, GenericRequirement::None; Deref, sym::deref, deref_trait, Target::Trait, GenericRequirement::Exact(0); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index 9c947df35e990..28011bda7c543 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -18,32 +18,22 @@ extern crate ra_ap_rustc_parse_format as rustc_parse_format; #[cfg(feature = "in-rust-tree")] extern crate rustc_abi; -#[cfg(feature = "in-rust-tree")] -extern crate rustc_hashes; - #[cfg(not(feature = "in-rust-tree"))] extern crate ra_ap_rustc_abi as rustc_abi; -#[cfg(not(feature = "in-rust-tree"))] -extern crate ra_ap_rustc_hashes as rustc_hashes; - pub mod db; pub mod attr; pub mod builtin_type; pub mod item_scope; -pub mod path; pub mod per_ns; -pub mod expander; -pub mod lower; +pub mod signatures; pub mod dyn_map; pub mod item_tree; -pub mod data; -pub mod generics; pub mod lang_item; pub mod hir; @@ -59,57 +49,54 @@ pub mod find_path; pub mod import_map; pub mod visibility; -use intern::Interned; +use intern::{Interned, sym}; pub use rustc_abi as layout; use triomphe::Arc; +pub use crate::signatures::LocalFieldId; + #[cfg(test)] mod macro_expansion_tests; -mod pretty; #[cfg(test)] mod test_db; -use std::{ - hash::{Hash, Hasher}, - panic::{RefUnwindSafe, UnwindSafe}, -}; +use std::hash::{Hash, Hasher}; -use base_db::{ - impl_intern_key, - ra_salsa::{self, InternValueTrivial}, - CrateId, -}; +use base_db::{Crate, impl_intern_key}; use hir_expand::{ + AstId, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, + MacroDefKind, builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, eager::expand_eager_macro_input, impl_intern_lookup, + mod_path::ModPath, name::Name, proc_macro::{CustomProcMacroExpander, ProcMacroKind}, - AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, - MacroDefId, MacroDefKind, }; use item_tree::ExternBlock; use la_arena::Idx; use nameres::DefMap; -use span::{AstIdNode, Edition, FileAstId, SyntaxContextId}; +use span::{AstIdNode, Edition, FileAstId, SyntaxContext}; use stdx::impl_from; -use syntax::{ast, AstNode}; +use syntax::ast; -pub use hir_expand::{tt, Intern, Lookup}; +pub use hir_expand::{Intern, Lookup, tt}; use crate::{ + attr::Attrs, builtin_type::BuiltinType, - data::adt::VariantData, db::DefDatabase, + hir::generics::{LocalLifetimeParamId, LocalTypeOrConstParamId}, item_tree::{ Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, Macro2, MacroRules, Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use, Variant, }, + nameres::LocalDefMap, + signatures::VariantFields, }; -type FxIndexMap = - indexmap::IndexMap>; +type FxIndexMap = indexmap::IndexMap; /// A wrapper around three booleans #[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)] pub struct ImportPathConfig { @@ -192,8 +179,7 @@ pub trait ItemTreeLoc { macro_rules! impl_intern { ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => { - impl_intern_key!($id); - impl InternValueTrivial for $loc {} + impl_intern_key!($id, $loc); impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup); }; } @@ -213,87 +199,58 @@ macro_rules! impl_loc { }; } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct FunctionId(ra_salsa::InternId); type FunctionLoc = AssocItemLoc; impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function); impl_loc!(FunctionLoc, id: Function, container: ItemContainerId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct StructId(ra_salsa::InternId); type StructLoc = ItemLoc; impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct); impl_loc!(StructLoc, id: Struct, container: ModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct UnionId(ra_salsa::InternId); pub type UnionLoc = ItemLoc; impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union); impl_loc!(UnionLoc, id: Union, container: ModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct EnumId(ra_salsa::InternId); pub type EnumLoc = ItemLoc; impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum); impl_loc!(EnumLoc, id: Enum, container: ModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct ConstId(ra_salsa::InternId); type ConstLoc = AssocItemLoc; impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const); impl_loc!(ConstLoc, id: Const, container: ItemContainerId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct StaticId(ra_salsa::InternId); pub type StaticLoc = AssocItemLoc; impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static); impl_loc!(StaticLoc, id: Static, container: ItemContainerId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct TraitId(ra_salsa::InternId); pub type TraitLoc = ItemLoc; impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait); impl_loc!(TraitLoc, id: Trait, container: ModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TraitAliasId(ra_salsa::InternId); pub type TraitAliasLoc = ItemLoc; impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias); impl_loc!(TraitAliasLoc, id: TraitAlias, container: ModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TypeAliasId(ra_salsa::InternId); type TypeAliasLoc = AssocItemLoc; impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias); impl_loc!(TypeAliasLoc, id: TypeAlias, container: ItemContainerId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct ImplId(ra_salsa::InternId); type ImplLoc = ItemLoc; impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl); impl_loc!(ImplLoc, id: Impl, container: ModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct UseId(ra_salsa::InternId); type UseLoc = ItemLoc; impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use); impl_loc!(UseLoc, id: Use, container: ModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct ExternCrateId(ra_salsa::InternId); type ExternCrateLoc = ItemLoc; impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate); impl_loc!(ExternCrateLoc, id: ExternCrate, container: ModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct ExternBlockId(ra_salsa::InternId); type ExternBlockLoc = ItemLoc; impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block); impl_loc!(ExternBlockLoc, id: ExternBlock, container: ModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct EnumVariantId(ra_salsa::InternId); - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct EnumVariantLoc { pub id: ItemTreeId, @@ -302,9 +259,6 @@ pub struct EnumVariantLoc { } impl_intern!(EnumVariantId, EnumVariantLoc, intern_enum_variant, lookup_intern_enum_variant); impl_loc!(EnumVariantLoc, id: Variant, parent: EnumId); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct Macro2Id(ra_salsa::InternId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Macro2Loc { pub container: ModuleId, @@ -316,8 +270,6 @@ pub struct Macro2Loc { impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2); impl_loc!(Macro2Loc, id: Macro2, container: ModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct MacroRulesId(ra_salsa::InternId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct MacroRulesLoc { pub container: ModuleId, @@ -345,8 +297,7 @@ pub enum MacroExpander { BuiltInDerive(BuiltinDeriveExpander), BuiltInEager(EagerExpander), } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct ProcMacroId(ra_salsa::InternId); + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct ProcMacroLoc { pub container: CrateRootModuleId, @@ -358,8 +309,6 @@ pub struct ProcMacroLoc { impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro); impl_loc!(ProcMacroLoc, id: Function, container: CrateRootModuleId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct BlockId(ra_salsa::InternId); #[derive(Debug, Hash, PartialEq, Eq, Clone)] pub struct BlockLoc { pub ast_id: AstId, @@ -368,24 +317,10 @@ pub struct BlockLoc { } impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block); -/// Id of the anonymous const block expression and patterns. This is very similar to `ClosureId` and -/// shouldn't be a `DefWithBodyId` since its type inference is dependent on its parent. -#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] -pub struct ConstBlockId(ra_salsa::InternId); -impl_intern!(ConstBlockId, ConstBlockLoc, intern_anonymous_const, lookup_intern_anonymous_const); - -#[derive(Debug, Hash, PartialEq, Eq, Clone)] -pub struct ConstBlockLoc { - /// The parent of the anonymous const block. - pub parent: DefWithBodyId, - /// The root expression of this const block in the parent body. - pub root: hir::ExprId, -} - /// A `ModuleId` that is always a crate's root module. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct CrateRootModuleId { - krate: CrateId, + krate: Crate, } impl CrateRootModuleId { @@ -393,7 +328,11 @@ impl CrateRootModuleId { db.crate_def_map(self.krate) } - pub fn krate(self) -> CrateId { + pub(crate) fn local_def_map(&self, db: &dyn DefDatabase) -> (Arc, Arc) { + db.crate_local_def_map(self.krate) + } + + pub fn krate(self) -> Crate { self.krate } } @@ -421,8 +360,8 @@ impl From for ModuleDefId { } } -impl From for CrateRootModuleId { - fn from(krate: CrateId) -> Self { +impl From for CrateRootModuleId { + fn from(krate: Crate) -> Self { CrateRootModuleId { krate } } } @@ -441,7 +380,7 @@ impl TryFrom for CrateRootModuleId { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct ModuleId { - krate: CrateId, + krate: Crate, /// If this `ModuleId` was derived from a `DefMap` for a block expression, this stores the /// `BlockId` of that block expression. If `None`, this module is part of the crate-level /// `DefMap` of `krate`. @@ -458,11 +397,22 @@ impl ModuleId { } } + pub(crate) fn local_def_map(self, db: &dyn DefDatabase) -> (Arc, Arc) { + match self.block { + Some(block) => (db.block_def_map(block), self.only_local_def_map(db)), + None => db.crate_local_def_map(self.krate), + } + } + + pub(crate) fn only_local_def_map(self, db: &dyn DefDatabase) -> Arc { + db.crate_local_def_map(self.krate).1 + } + pub fn crate_def_map(self, db: &dyn DefDatabase) -> Arc { db.crate_def_map(self.krate) } - pub fn krate(self) -> CrateId { + pub fn krate(self) -> Crate { self.krate } @@ -470,11 +420,7 @@ impl ModuleId { let def_map = self.def_map(db); let parent = def_map[self.local_id].parent?; def_map[parent].children.iter().find_map(|(name, module_id)| { - if *module_id == self.local_id { - Some(name.clone()) - } else { - None - } + if *module_id == self.local_id { Some(name.clone()) } else { None } }) } @@ -525,8 +471,6 @@ pub struct FieldId { pub local_id: LocalFieldId, } -pub type LocalFieldId = Idx; - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct TupleId(pub u32); @@ -536,12 +480,11 @@ pub struct TupleFieldId { pub index: u32, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct TypeOrConstParamId { pub parent: GenericDefId, pub local_id: LocalTypeOrConstParamId, } -impl InternValueTrivial for TypeOrConstParamId {} /// A TypeOrConstParamId with an invariant that it actually belongs to a type #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -595,15 +538,11 @@ impl From for TypeOrConstParamId { } } -pub type LocalTypeOrConstParamId = Idx; - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct LifetimeParamId { pub parent: GenericDefId, pub local_id: LocalLifetimeParamId, } -pub type LocalLifetimeParamId = Idx; -impl InternValueTrivial for LifetimeParamId {} #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum ItemContainerId { @@ -615,7 +554,7 @@ pub enum ItemContainerId { impl_from!(ModuleId for ItemContainerId); /// A Data Type -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] pub enum AdtId { StructId(StructId), UnionId(UnionId), @@ -624,7 +563,7 @@ pub enum AdtId { impl_from!(StructId, UnionId, EnumId for AdtId); /// A macro -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] pub enum MacroId { Macro2Id(Macro2Id), MacroRulesId(MacroRulesId), @@ -678,222 +617,59 @@ impl_from!( for ModuleDefId ); -/// Something that holds types, required for the current const arg lowering implementation as they -/// need to be able to query where they are defined. -#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] -pub enum TypeOwnerId { - FunctionId(FunctionId), - StaticId(StaticId), - ConstId(ConstId), - InTypeConstId(InTypeConstId), - AdtId(AdtId), - TraitId(TraitId), - TraitAliasId(TraitAliasId), - TypeAliasId(TypeAliasId), - ImplId(ImplId), - EnumVariantId(EnumVariantId), -} - -impl TypeOwnerId { - fn as_generic_def_id(self, db: &dyn DefDatabase) -> Option { - Some(match self { - TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it), - TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it), - TypeOwnerId::StaticId(it) => GenericDefId::StaticId(it), - TypeOwnerId::AdtId(it) => GenericDefId::AdtId(it), - TypeOwnerId::TraitId(it) => GenericDefId::TraitId(it), - TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it), - TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it), - TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it), - TypeOwnerId::EnumVariantId(it) => { - GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent)) - } - TypeOwnerId::InTypeConstId(_) => return None, - }) - } -} - -impl_from!( - FunctionId, - StaticId, - ConstId, - InTypeConstId, - AdtId, - TraitId, - TraitAliasId, - TypeAliasId, - ImplId, - EnumVariantId - for TypeOwnerId -); - -// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let it: Type = _; }`) -impl From for TypeOwnerId { - fn from(value: DefWithBodyId) -> Self { - match value { - DefWithBodyId::FunctionId(it) => it.into(), - DefWithBodyId::StaticId(it) => it.into(), - DefWithBodyId::ConstId(it) => it.into(), - DefWithBodyId::InTypeConstId(it) => it.into(), - DefWithBodyId::VariantId(it) => it.into(), - } - } -} - -impl From for TypeOwnerId { - fn from(value: GenericDefId) -> Self { - match value { - GenericDefId::FunctionId(it) => it.into(), - GenericDefId::AdtId(it) => it.into(), - GenericDefId::TraitId(it) => it.into(), - GenericDefId::TraitAliasId(it) => it.into(), - GenericDefId::TypeAliasId(it) => it.into(), - GenericDefId::ImplId(it) => it.into(), - GenericDefId::ConstId(it) => it.into(), - GenericDefId::StaticId(it) => it.into(), - } - } -} - -// FIXME: This should not be a thing -/// A thing that we want to store in interned ids, but we don't know its type in `hir-def`. This is -/// currently only used in `InTypeConstId` for storing the type (which has type `Ty` defined in -/// the `hir-ty` crate) of the constant in its id, which is a temporary hack so we may want -/// to remove this after removing that. -pub trait OpaqueInternableThing: - std::any::Any + std::fmt::Debug + Sync + Send + UnwindSafe + RefUnwindSafe -{ - fn as_any(&self) -> &dyn std::any::Any; - fn box_any(&self) -> Box; - fn dyn_hash(&self, state: &mut dyn Hasher); - fn dyn_eq(&self, other: &dyn OpaqueInternableThing) -> bool; - fn dyn_clone(&self) -> Box; -} - -impl Hash for dyn OpaqueInternableThing { - fn hash(&self, state: &mut H) { - self.dyn_hash(state); - } -} - -impl PartialEq for dyn OpaqueInternableThing { - fn eq(&self, other: &Self) -> bool { - self.dyn_eq(other) - } -} - -impl Eq for dyn OpaqueInternableThing {} - -impl Clone for Box { - fn clone(&self) -> Self { - self.dyn_clone() - } -} - -// FIXME(const-generic-body): Use an stable id for in type consts. -// -// The current id uses `AstId` which will be changed by every change in the code. Ideally -// we should use an id which is relative to the type owner, so that every change will only invalidate the -// id if it happens inside of the type owner. -// -// The solution probably is to have some query on `TypeOwnerId` to traverse its constant children and store -// their `AstId` in a list (vector or arena), and use the index of that list in the id here. That query probably -// needs name resolution, and might go far and handles the whole path lowering or type lowering for a `TypeOwnerId`. -// -// Whatever path the solution takes, it should answer 3 questions at the same time: -// * Is the id stable enough? -// * How to find a constant id using an ast node / position in the source code? This is needed when we want to -// provide ide functionalities inside an in type const (which we currently don't support) e.g. go to definition -// for a local defined there. A complex id might have some trouble in this reverse mapping. -// * How to find the return type of a constant using its id? We have this data when we are doing type lowering -// and the name of the struct that contains this constant is resolved, so a query that only traverses the -// type owner by its syntax tree might have a hard time here. - -/// A constant in a type as a substitution for const generics (like `Foo<{ 2 + 2 }>`) or as an array -/// length (like `[u8; 2 + 2]`). These constants are body owner and are a variant of `DefWithBodyId`. These -/// are not called `AnonymousConstId` to prevent confusion with [`ConstBlockId`]. -#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] -pub struct InTypeConstId(ra_salsa::InternId); -impl_intern!(InTypeConstId, InTypeConstLoc, intern_in_type_const, lookup_intern_in_type_const); - -// We would like to set `derive(PartialEq)` -// but the compiler complains about that `.expected_ty` does not implement the `Copy` trait. -#[allow(clippy::derived_hash_with_manual_eq)] -#[derive(Debug, Hash, Eq, Clone)] -pub struct InTypeConstLoc { - pub id: AstId, - /// The thing this const arg appears in - pub owner: TypeOwnerId, - // FIXME(const-generic-body): The expected type should not be - pub expected_ty: Box, -} - -impl PartialEq for InTypeConstLoc { - fn eq(&self, other: &Self) -> bool { - self.id == other.id && self.owner == other.owner && *self.expected_ty == *other.expected_ty - } -} - -impl InTypeConstId { - pub fn source(&self, db: &dyn DefDatabase) -> ast::ConstArg { - let src = self.lookup(db).id; - let file_id = src.file_id; - let root = &db.parse_or_expand(file_id); - db.ast_id_map(file_id).get(src.value).to_node(root) - } -} - /// A constant, which might appears as a const item, an anonymous const block in expressions /// or patterns, or as a constant in types with const generics. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] pub enum GeneralConstId { ConstId(ConstId), StaticId(StaticId), - ConstBlockId(ConstBlockId), - InTypeConstId(InTypeConstId), } -impl_from!(ConstId, StaticId, ConstBlockId, InTypeConstId for GeneralConstId); +impl_from!(ConstId, StaticId for GeneralConstId); impl GeneralConstId { - pub fn generic_def(self, db: &dyn DefDatabase) -> Option { + pub fn generic_def(self, _db: &dyn DefDatabase) -> Option { match self { GeneralConstId::ConstId(it) => Some(it.into()), GeneralConstId::StaticId(it) => Some(it.into()), - GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(db), - GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(db), } } pub fn name(self, db: &dyn DefDatabase) -> String { match self { GeneralConstId::StaticId(it) => { - db.static_data(it).name.display(db.upcast(), Edition::CURRENT).to_string() + let loc = it.lookup(db); + let tree = loc.item_tree_id().item_tree(db); + let name = tree[loc.id.value].name.display(db, Edition::CURRENT); + name.to_string() + } + GeneralConstId::ConstId(const_id) => { + let loc = const_id.lookup(db); + let tree = loc.item_tree_id().item_tree(db); + tree[loc.id.value].name.as_ref().map_or_else( + || "_".to_owned(), + |name| name.display(db, Edition::CURRENT).to_string(), + ) } - GeneralConstId::ConstId(const_id) => db - .const_data(const_id) - .name - .as_ref() - .map(|it| it.as_str()) - .unwrap_or("_") - .to_owned(), - GeneralConstId::ConstBlockId(id) => format!("{{anonymous const {id:?}}}"), - GeneralConstId::InTypeConstId(id) => format!("{{in type const {id:?}}}"), } } } -/// The defs which have a body. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +/// The defs which have a body (have root expressions for type inference). +#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] pub enum DefWithBodyId { FunctionId(FunctionId), StaticId(StaticId), ConstId(ConstId), - InTypeConstId(InTypeConstId), VariantId(EnumVariantId), + // /// All fields of a variant are inference roots + // VariantId(VariantId), + // /// The signature can contain inference roots in a bunch of places + // /// like const parameters or const arguments in paths + // This should likely be kept on its own with a separate query + // GenericDefId(GenericDefId), } - -impl_from!(FunctionId, ConstId, StaticId, InTypeConstId for DefWithBodyId); +impl_from!(FunctionId, ConstId, StaticId for DefWithBodyId); impl From for DefWithBodyId { fn from(id: EnumVariantId) -> Self { @@ -908,9 +684,6 @@ impl DefWithBodyId { DefWithBodyId::StaticId(s) => Some(s.into()), DefWithBodyId::ConstId(c) => Some(c.into()), DefWithBodyId::VariantId(c) => Some(c.lookup(db).parent.into()), - // FIXME: stable rust doesn't allow generics in constants, but we should - // use `TypeOwnerId::as_generic_def_id` when it does. - DefWithBodyId::InTypeConstId(_) => None, } } } @@ -928,7 +701,7 @@ pub enum AssocItemId { // casting them, and somehow making the constructors private, which would be annoying. impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId); -#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] +#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] pub enum GenericDefId { AdtId(AdtId), // consts can have type parameters from their parents (i.e. associated consts of traits) @@ -962,7 +735,7 @@ impl GenericDefId { ) -> (HirFileId, Option) { fn file_id_and_params_of_item_loc( db: &dyn DefDatabase, - def: impl for<'db> Lookup = dyn DefDatabase + 'db, Data = Loc>, + def: impl Lookup, ) -> (HirFileId, Option) where Loc: src::HasSource, @@ -1017,15 +790,13 @@ impl From for GenericDefId { } } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] pub enum CallableDefId { FunctionId(FunctionId), StructId(StructId), EnumVariantId(EnumVariantId), } -impl InternValueTrivial for CallableDefId {} - impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId); impl From for ModuleDefId { fn from(def: CallableDefId) -> ModuleDefId { @@ -1038,7 +809,7 @@ impl From for ModuleDefId { } impl CallableDefId { - pub fn krate(self, db: &dyn DefDatabase) -> CrateId { + pub fn krate(self, db: &dyn DefDatabase) -> Crate { match self { CallableDefId::FunctionId(f) => f.krate(db), CallableDefId::StructId(s) => s.krate(db), @@ -1135,7 +906,7 @@ impl From for AttrDefId { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] pub enum VariantId { EnumVariantId(EnumVariantId), StructId(StructId), @@ -1144,12 +915,8 @@ pub enum VariantId { impl_from!(EnumVariantId, StructId, UnionId for VariantId); impl VariantId { - pub fn variant_data(self, db: &dyn DefDatabase) -> Arc { - match self { - VariantId::StructId(it) => db.struct_data(it).variant_data.clone(), - VariantId::UnionId(it) => db.union_data(it).variant_data.clone(), - VariantId::EnumVariantId(it) => db.enum_variant_data(it).variant_data.clone(), - } + pub fn variant_data(self, db: &dyn DefDatabase) -> Arc { + db.variant_fields(self) } pub fn file_id(self, db: &dyn DefDatabase) -> HirFileId { @@ -1175,7 +942,7 @@ pub trait HasModule { /// Returns the crate this thing is defined within. #[inline] #[doc(alias = "crate")] - fn krate(&self, db: &dyn DefDatabase) -> CrateId { + fn krate(&self, db: &dyn DefDatabase) -> Crate { self.module(db).krate } } @@ -1197,7 +964,7 @@ pub trait HasModule { impl HasModule for ItemId where N: ItemTreeNode, - ItemId: for<'db> Lookup = dyn DefDatabase + 'db, Data = ItemLoc> + Copy, + ItemId: Lookup> + Copy, { #[inline] fn module(&self, db: &dyn DefDatabase) -> ModuleId { @@ -1222,7 +989,7 @@ where #[inline] fn module_for_assoc_item_loc<'db>( db: &(dyn 'db + DefDatabase), - id: impl Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, + id: impl Lookup>, ) -> ModuleId { id.lookup(db).container.module(db) } @@ -1325,23 +1092,6 @@ impl HasModule for MacroId { } } -impl HasModule for TypeOwnerId { - fn module(&self, db: &dyn DefDatabase) -> ModuleId { - match *self { - TypeOwnerId::FunctionId(it) => it.module(db), - TypeOwnerId::StaticId(it) => it.module(db), - TypeOwnerId::ConstId(it) => it.module(db), - TypeOwnerId::AdtId(it) => it.module(db), - TypeOwnerId::TraitId(it) => it.module(db), - TypeOwnerId::TraitAliasId(it) => it.module(db), - TypeOwnerId::TypeAliasId(it) => it.module(db), - TypeOwnerId::ImplId(it) => it.module(db), - TypeOwnerId::EnumVariantId(it) => it.module(db), - TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.module(db), - } - } -} - impl HasModule for DefWithBodyId { fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { @@ -1349,7 +1099,6 @@ impl HasModule for DefWithBodyId { DefWithBodyId::StaticId(it) => it.module(db), DefWithBodyId::ConstId(it) => it.module(db), DefWithBodyId::VariantId(it) => it.module(db), - DefWithBodyId::InTypeConstId(it) => it.lookup(db).owner.module(db), } } } @@ -1417,112 +1166,31 @@ impl ModuleDefId { }) } } - -/// A helper trait for converting to MacroCallId -pub trait AsMacroCall { - fn as_call_id( - &self, - db: &dyn ExpandDatabase, - krate: CrateId, - resolver: impl Fn(&path::ModPath) -> Option + Copy, - ) -> Option { - self.as_call_id_with_errors(db, krate, resolver).ok()?.value - } - - fn as_call_id_with_errors( - &self, - db: &dyn ExpandDatabase, - krate: CrateId, - resolver: impl Fn(&path::ModPath) -> Option + Copy, - ) -> Result>, UnresolvedMacro>; -} - -impl AsMacroCall for InFile<&ast::MacroCall> { - fn as_call_id_with_errors( - &self, - db: &dyn ExpandDatabase, - krate: CrateId, - resolver: impl Fn(&path::ModPath) -> Option + Copy, - ) -> Result>, UnresolvedMacro> { - let expands_to = hir_expand::ExpandTo::from_call_site(self.value); - let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); - let span_map = db.span_map(self.file_id); - let path = self.value.path().and_then(|path| { - let range = path.syntax().text_range(); - let mod_path = path::ModPath::from_src(db, path, &mut |range| { - span_map.as_ref().span_for_range(range).ctx - })?; - let call_site = span_map.span_for_range(range); - Some((call_site, mod_path)) - }); - - let Some((call_site, path)) = path else { - return Ok(ExpandResult::only_err(ExpandError::other( - span_map.span_for_range(self.value.syntax().text_range()), - "malformed macro invocation", - ))); - }; - - macro_call_as_call_id_with_eager( - db, - ast_id, - &path, - call_site.ctx, - expands_to, - krate, - resolver, - resolver, - ) - } -} - /// Helper wrapper for `AstId` with `ModPath` #[derive(Clone, Debug, Eq, PartialEq)] struct AstIdWithPath { ast_id: AstId, - path: Interned, + path: Interned, } impl AstIdWithPath { - fn new( - file_id: HirFileId, - ast_id: FileAstId, - path: Interned, - ) -> AstIdWithPath { + fn new(file_id: HirFileId, ast_id: FileAstId, path: Interned) -> AstIdWithPath { AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path } } } -fn macro_call_as_call_id( - db: &dyn ExpandDatabase, - call: &AstIdWithPath, - call_site: SyntaxContextId, - expand_to: ExpandTo, - krate: CrateId, - resolver: impl Fn(&path::ModPath) -> Option + Copy, -) -> Result, UnresolvedMacro> { - macro_call_as_call_id_with_eager( - db, - call.ast_id, - &call.path, - call_site, - expand_to, - krate, - resolver, - resolver, - ) - .map(|res| res.value) -} - -fn macro_call_as_call_id_with_eager( +pub fn macro_call_as_call_id( db: &dyn ExpandDatabase, ast_id: AstId, - path: &path::ModPath, - call_site: SyntaxContextId, + path: &ModPath, + call_site: SyntaxContext, expand_to: ExpandTo, - krate: CrateId, - resolver: impl FnOnce(&path::ModPath) -> Option, - eager_resolver: impl Fn(&path::ModPath) -> Option, + krate: Crate, + resolver: impl Fn(&ModPath) -> Option + Copy, + eager_callback: &mut dyn FnMut( + InFile<(syntax::AstPtr, span::FileAstId)>, + MacroCallId, + ), ) -> Result>, UnresolvedMacro> { let def = resolver(path).ok_or_else(|| UnresolvedMacro { path: path.clone() })?; @@ -1534,7 +1202,8 @@ fn macro_call_as_call_id_with_eager( ast_id, def, call_site, - &|path| eager_resolver(path).filter(MacroDefId::is_fn_like), + &|path| resolver(path).filter(MacroDefId::is_fn_like), + eager_callback, ), _ if def.is_fn_like() => ExpandResult { value: Some(def.make_call( @@ -1552,8 +1221,86 @@ fn macro_call_as_call_id_with_eager( #[derive(Debug)] pub struct UnresolvedMacro { - pub path: hir_expand::mod_path::ModPath, + pub path: ModPath, } #[derive(Default, Debug, Eq, PartialEq, Clone, Copy)] pub struct SyntheticSyntax; + +// Feature: Completions Attribute +// Crate authors can opt their type out of completions in some cases. +// This is done with the `#[rust_analyzer::completions(...)]` attribute. +// +// All completeable things support `#[rust_analyzer::completions(ignore_flyimport)]`, +// which causes the thing to get excluded from flyimport completion. It will still +// be completed when in scope. This is analogous to the setting `rust-analyzer.completion.autoimport.exclude` +// with `"type": "always"`. +// +// In addition, traits support two more modes: `#[rust_analyzer::completions(ignore_flyimport_methods)]`, +// which means the trait itself may still be flyimported but its methods won't, and +// `#[rust_analyzer::completions(ignore_methods)]`, which means the methods won't be completed even when +// the trait is in scope (but the trait itself may still be completed). The methods will still be completed +// on `dyn Trait`, `impl Trait` or where the trait is specified in bounds. These modes correspond to +// the settings `rust-analyzer.completion.autoimport.exclude` with `"type": "methods"` and +// `rust-analyzer.completion.excludeTraits`, respectively. +// +// Malformed attributes will be ignored without warnings. +// +// Note that users have no way to override this attribute, so be careful and only include things +// users definitely do not want to be completed! + +/// `#[rust_analyzer::completions(...)]` options. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum Complete { + /// No `#[rust_analyzer::completions(...)]`. + Yes, + /// `#[rust_analyzer::completions(ignore_flyimport)]`. + IgnoreFlyimport, + /// `#[rust_analyzer::completions(ignore_flyimport_methods)]` (on a trait only). + IgnoreFlyimportMethods, + /// `#[rust_analyzer::completions(ignore_methods)]` (on a trait only). + IgnoreMethods, +} + +impl Complete { + pub fn extract(is_trait: bool, attrs: &Attrs) -> Complete { + let mut do_not_complete = Complete::Yes; + for ra_attr in attrs.rust_analyzer_tool() { + let segments = ra_attr.path.segments(); + if segments.len() != 2 { + continue; + } + let action = segments[1].symbol(); + if *action == sym::completions { + match ra_attr.token_tree_value().map(|tt| tt.token_trees().flat_tokens()) { + Some([tt::TokenTree::Leaf(tt::Leaf::Ident(ident))]) => { + if ident.sym == sym::ignore_flyimport { + do_not_complete = Complete::IgnoreFlyimport; + } else if is_trait { + if ident.sym == sym::ignore_methods { + do_not_complete = Complete::IgnoreMethods; + } else if ident.sym == sym::ignore_flyimport_methods { + do_not_complete = Complete::IgnoreFlyimportMethods; + } + } + } + _ => {} + } + } + } + do_not_complete + } + + #[inline] + pub fn for_trait_item(trait_attr: Complete, item_attr: Complete) -> Complete { + match (trait_attr, item_attr) { + ( + Complete::IgnoreFlyimportMethods + | Complete::IgnoreFlyimport + | Complete::IgnoreMethods, + _, + ) => Complete::IgnoreFlyimport, + _ => item_attr, + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs deleted file mode 100644 index 7cddd48eb174c..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs +++ /dev/null @@ -1,149 +0,0 @@ -//! Context for lowering paths. -use std::{cell::OnceCell, mem}; - -use hir_expand::{span_map::SpanMap, AstId, HirFileId, InFile}; -use span::{AstIdMap, AstIdNode, Edition, EditionedFileId, FileId, RealSpanMap}; -use stdx::thin_vec::ThinVec; -use syntax::ast; -use triomphe::Arc; - -use crate::{ - db::DefDatabase, - path::Path, - type_ref::{PathId, TypeBound, TypePtr, TypeRef, TypeRefId, TypesMap, TypesSourceMap}, -}; - -pub struct LowerCtx<'a> { - pub db: &'a dyn DefDatabase, - file_id: HirFileId, - span_map: OnceCell, - ast_id_map: OnceCell>, - impl_trait_bounds: Vec>, - // Prevent nested impl traits like `impl Foo`. - outer_impl_trait: bool, - types_map: &'a mut TypesMap, - types_source_map: &'a mut TypesSourceMap, -} - -impl<'a> LowerCtx<'a> { - pub fn new( - db: &'a dyn DefDatabase, - file_id: HirFileId, - types_map: &'a mut TypesMap, - types_source_map: &'a mut TypesSourceMap, - ) -> Self { - LowerCtx { - db, - file_id, - span_map: OnceCell::new(), - ast_id_map: OnceCell::new(), - impl_trait_bounds: Vec::new(), - outer_impl_trait: false, - types_map, - types_source_map, - } - } - - pub fn with_span_map_cell( - db: &'a dyn DefDatabase, - file_id: HirFileId, - span_map: OnceCell, - types_map: &'a mut TypesMap, - types_source_map: &'a mut TypesSourceMap, - ) -> Self { - LowerCtx { - db, - file_id, - span_map, - ast_id_map: OnceCell::new(), - impl_trait_bounds: Vec::new(), - outer_impl_trait: false, - types_map, - types_source_map, - } - } - - /// Prepares a `LowerCtx` for synthetic AST that needs to be lowered. This is intended for IDE things. - pub fn for_synthetic_ast( - db: &'a dyn DefDatabase, - ast_id_map: Arc, - types_map: &'a mut TypesMap, - types_source_map: &'a mut TypesSourceMap, - ) -> Self { - let file_id = EditionedFileId::new( - FileId::from_raw(EditionedFileId::MAX_FILE_ID), - Edition::Edition2015, - ); - LowerCtx { - db, - // Make up an invalid file id, so that if we will try to actually access it salsa will panic. - file_id: file_id.into(), - span_map: SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(file_id))).into(), - ast_id_map: ast_id_map.into(), - impl_trait_bounds: Vec::new(), - outer_impl_trait: false, - types_map, - types_source_map, - } - } - - pub(crate) fn span_map(&self) -> &SpanMap { - self.span_map.get_or_init(|| self.db.span_map(self.file_id)) - } - - pub(crate) fn lower_path(&mut self, ast: ast::Path) -> Option { - Path::from_src(self, ast) - } - - pub(crate) fn ast_id(&self, item: &N) -> AstId { - InFile::new( - self.file_id, - self.ast_id_map.get_or_init(|| self.db.ast_id_map(self.file_id)).ast_id(item), - ) - } - - pub fn update_impl_traits_bounds_from_type_ref(&mut self, type_ref: TypeRefId) { - TypeRef::walk(type_ref, self.types_map, &mut |tr| { - if let TypeRef::ImplTrait(bounds) = tr { - self.impl_trait_bounds.push(bounds.clone()); - } - }); - } - - pub fn take_impl_traits_bounds(&mut self) -> Vec> { - mem::take(&mut self.impl_trait_bounds) - } - - pub(crate) fn outer_impl_trait(&self) -> bool { - self.outer_impl_trait - } - - pub(crate) fn with_outer_impl_trait_scope( - &mut self, - impl_trait: bool, - f: impl FnOnce(&mut Self) -> R, - ) -> R { - let old = mem::replace(&mut self.outer_impl_trait, impl_trait); - let result = f(self); - self.outer_impl_trait = old; - result - } - - pub(crate) fn alloc_type_ref(&mut self, type_ref: TypeRef, node: TypePtr) -> TypeRefId { - let id = self.types_map.types.alloc(type_ref); - self.types_source_map.types_map_back.insert(id, InFile::new(self.file_id, node)); - id - } - - pub(crate) fn alloc_type_ref_desugared(&mut self, type_ref: TypeRef) -> TypeRefId { - self.types_map.types.alloc(type_ref) - } - - pub(crate) fn alloc_error_type(&mut self) -> TypeRefId { - self.types_map.types.alloc(TypeRef::Error) - } - - pub(crate) fn alloc_path(&mut self, path: Path, node: TypePtr) -> PathId { - PathId::from_type_ref_unchecked(self.alloc_type_ref(TypeRef::Path(path), node)) - } -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs index c31d322132897..777953d3f212b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs @@ -336,7 +336,7 @@ enum Command { } impl <> $crate::cmp::PartialOrd for Command< > where { - fn partial_cmp(&self , other: &Self ) -> $crate::option::Option::Option<$crate::cmp::Ordering> { + fn partial_cmp(&self , other: &Self ) -> $crate::option::Option<$crate::cmp::Ordering> { match $crate::intrinsics::discriminant_value(self ).partial_cmp(&$crate::intrinsics::discriminant_value(other)) { $crate::option::Option::Some($crate::cmp::Ordering::Equal)=> { match (self , other) { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index a3b48831a0b0f..e21d1415aa29e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -285,8 +285,6 @@ fn main() { /* parse error: expected expression */ builtin #format_args (x = ); /* parse error: expected expression */ -/* parse error: expected R_PAREN */ -/* parse error: expected expression, item or let statement */ builtin #format_args (x = , x = 2); /* parse error: expected expression */ builtin #format_args ("{}", x = ); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs index 8c5bd3b6d3696..38fc4b3d118ae 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -35,9 +35,9 @@ macro_rules! f { }; } -struct#0:1@58..64#4# MyTraitMap2#0:2@31..42#2# {#0:1@72..73#4# - map#0:1@86..89#4#:#0:1@89..90#4# #0:1@89..90#4#::#0:1@91..93#4#std#0:1@93..96#4#::#0:1@96..98#4#collections#0:1@98..109#4#::#0:1@109..111#4#HashSet#0:1@111..118#4#<#0:1@118..119#4#(#0:1@119..120#4#)#0:1@120..121#4#>#0:1@121..122#4#,#0:1@122..123#4# -}#0:1@132..133#4# +struct#0:1@58..64#14336# MyTraitMap2#0:2@31..42#ROOT2024# {#0:1@72..73#14336# + map#0:1@86..89#14336#:#0:1@89..90#14336# #0:1@89..90#14336#::#0:1@91..93#14336#std#0:1@93..96#14336#::#0:1@96..98#14336#collections#0:1@98..109#14336#::#0:1@109..111#14336#HashSet#0:1@111..118#14336#<#0:1@118..119#14336#(#0:1@119..120#14336#)#0:1@120..121#14336#>#0:1@121..122#14336#,#0:1@122..123#14336# +}#0:1@132..133#14336# "#]], ); } @@ -75,12 +75,12 @@ macro_rules! f { }; } -fn#0:2@30..32#2# main#0:2@33..37#2#(#0:2@37..38#2#)#0:2@38..39#2# {#0:2@40..41#2# - 1#0:2@50..51#2#;#0:2@51..52#2# - 1.0#0:2@61..64#2#;#0:2@64..65#2# - (#0:2@74..75#2#(#0:2@75..76#2#1#0:2@76..77#2#,#0:2@77..78#2# )#0:2@78..79#2#,#0:2@79..80#2# )#0:2@80..81#2#.#0:2@81..82#2#0#0:2@82..85#2#.#0:2@82..85#2#0#0:2@82..85#2#;#0:2@85..86#2# - let#0:2@95..98#2# x#0:2@99..100#2# =#0:2@101..102#2# 1#0:2@103..104#2#;#0:2@104..105#2# -}#0:2@110..111#2# +fn#0:2@30..32#ROOT2024# main#0:2@33..37#ROOT2024#(#0:2@37..38#ROOT2024#)#0:2@38..39#ROOT2024# {#0:2@40..41#ROOT2024# + 1#0:2@50..51#ROOT2024#;#0:2@51..52#ROOT2024# + 1.0#0:2@61..64#ROOT2024#;#0:2@64..65#ROOT2024# + (#0:2@74..75#ROOT2024#(#0:2@75..76#ROOT2024#1#0:2@76..77#ROOT2024#,#0:2@77..78#ROOT2024# )#0:2@78..79#ROOT2024#,#0:2@79..80#ROOT2024# )#0:2@80..81#ROOT2024#.#0:2@81..82#ROOT2024#0#0:2@82..85#ROOT2024#.#0:2@82..85#ROOT2024#0#0:2@82..85#ROOT2024#;#0:2@85..86#ROOT2024# + let#0:2@95..98#ROOT2024# x#0:2@99..100#ROOT2024# =#0:2@101..102#ROOT2024# 1#0:2@103..104#ROOT2024#;#0:2@104..105#ROOT2024# +}#0:2@110..111#ROOT2024# "#]], @@ -171,7 +171,7 @@ fn main(foo: ()) { } fn main(foo: ()) { - /* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#2#; + /* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#ROOT2024#; } } @@ -197,7 +197,7 @@ macro_rules! mk_struct { #[macro_use] mod foo; -struct#1:1@59..65#4# Foo#0:2@32..35#2#(#1:1@70..71#4#u32#0:2@41..44#2#)#1:1@74..75#4#;#1:1@75..76#4# +struct#1:1@59..65#14336# Foo#0:2@32..35#ROOT2024#(#1:1@70..71#14336#u32#0:2@41..44#ROOT2024#)#1:1@74..75#14336#;#1:1@75..76#14336# "#]], ); } @@ -423,10 +423,10 @@ m! { foo, bar } macro_rules! m { ($($i:ident),*) => ( impl Bar { $(fn $i() {})* } ); } -impl#\4# Bar#\4# {#\4# - fn#\4# foo#\2#(#\4#)#\4# {#\4#}#\4# - fn#\4# bar#\2#(#\4#)#\4# {#\4#}#\4# -}#\4# +impl#\14336# Bar#\14336# {#\14336# + fn#\14336# foo#\ROOT2024#(#\14336#)#\14336# {#\14336#}#\14336# + fn#\14336# bar#\ROOT2024#(#\14336#)#\14336# {#\14336#}#\14336# +}#\14336# "#]], ); } @@ -1408,7 +1408,7 @@ ok!(); macro_rules! m2 { ($($a:expr => $b:ident)* _ => $c:expr) => { ok!(); } } -ok!(); +/* error: unexpected token in input */ok!(); "#]], ); } @@ -1979,3 +1979,53 @@ fn f() { "#]], ); } + +#[test] +fn semicolon_does_not_glue() { + check( + r#" +macro_rules! bug { + ($id: expr) => { + true + }; + ($id: expr; $($attr: ident),*) => { + true + }; + ($id: expr; $($attr: ident),*; $norm: expr) => { + true + }; + ($id: expr; $($attr: ident),*;; $print: expr) => { + true + }; + ($id: expr; $($attr: ident),*; $norm: expr; $print: expr) => { + true + }; +} +fn f() { + let _ = bug!(a;;;test); +} + "#, + expect![[r#" +macro_rules! bug { + ($id: expr) => { + true + }; + ($id: expr; $($attr: ident),*) => { + true + }; + ($id: expr; $($attr: ident),*; $norm: expr) => { + true + }; + ($id: expr; $($attr: ident),*;; $print: expr) => { + true + }; + ($id: expr; $($attr: ident),*; $norm: expr; $print: expr) => { + true + }; +} +fn f() { + let _ = true; +} + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs index e9a977da913bf..e33a366769b09 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs @@ -162,9 +162,10 @@ fn test() { } #[test] -fn expr_dont_match_inline_const() { +fn expr_inline_const() { check( r#" +//- /lib.rs edition:2021 macro_rules! foo { ($e:expr) => { $e } } @@ -181,6 +182,30 @@ macro_rules! foo { fn test() { /* error: no rule matches input tokens */missing; } +"#]], + ); + check( + r#" +//- /lib.rs edition:2024 +macro_rules! foo { + ($e:expr) => { $e } +} + +fn test() { + foo!(const { 3 }); +} +"#, + expect![[r#" +macro_rules! foo { + ($e:expr) => { $e } +} + +fn test() { + (const { + 3 + } + ); +} "#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 1bbed01443de8..cb4fcd887d8a5 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -582,8 +582,8 @@ macro_rules! arbitrary { } impl $crate::arbitrary::Arbitrary for Vec { - type Parameters = RangedParams1; - type Strategy = VecStrategy; + type Parameters = RangedParams1 ; + type Strategy = VecStrategy ; fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { { let product_unpack![range, a] = args; vec(any_with::(a), range) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index a2d0ba3deb845..800c96ebdae07 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -16,34 +16,33 @@ mod proc_macros; use std::{iter, ops::Range, sync}; -use base_db::SourceDatabase; +use base_db::RootQueryDb; use expect_test::Expect; use hir_expand::{ + AstId, InFile, MacroCallId, MacroCallKind, MacroKind, db::ExpandDatabase, proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind}, span_map::SpanMapRef, - InFile, MacroCallKind, MacroFileId, MacroFileIdExt, MacroKind, }; use intern::Symbol; use itertools::Itertools; use span::{Edition, Span}; use stdx::{format_to, format_to_acc}; use syntax::{ - ast::{self, edit::IndentLevel}, - AstNode, + AstNode, AstPtr, SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT}, SyntaxNode, T, + ast::{self, edit::IndentLevel}, }; use test_fixture::WithFixture; use crate::{ + AdtId, Lookup, ModuleDefId, db::DefDatabase, - nameres::{DefMap, MacroSubNs, ModuleSource}, - resolver::HasResolver, + nameres::{DefMap, ModuleSource}, src::HasSource, test_db::TestDB, tt::TopSubtree, - AdtId, AsMacroCall, Lookup, ModuleDefId, }; #[track_caller] @@ -63,9 +62,11 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) MacroCallKind::Derive { ast_id, .. } => ast_id.map(|it| it.erase()), MacroCallKind::Attr { ast_id, .. } => ast_id.map(|it| it.erase()), }; - let ast = db - .parse(ast_id.file_id.file_id().expect("macros inside macros are not supported")) - .syntax_node(); + + let editioned_file_id = + ast_id.file_id.file_id().expect("macros inside macros are not supported"); + + let ast = db.parse(editioned_file_id).syntax_node(); let ast_id_map = db.ast_id_map(ast_id.file_id); let node = ast_id_map.get_erased(ast_id.value).to_node(&ast); Some((node.text_range(), errors)) @@ -76,7 +77,6 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) expect.assert_eq(&errors); } -#[track_caller] fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, mut expect: Expect) { let extra_proc_macros = vec![( r#" @@ -93,50 +93,59 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream disabled: false, }, )]; + + fn resolve( + db: &dyn DefDatabase, + def_map: &DefMap, + ast_id: AstId, + ast_ptr: InFile>, + ) -> Option { + def_map.modules().find_map(|module| { + for decl in + module.1.scope.declarations().chain(module.1.scope.unnamed_consts().map(Into::into)) + { + let body = match decl { + ModuleDefId::FunctionId(it) => it.into(), + ModuleDefId::ConstId(it) => it.into(), + ModuleDefId::StaticId(it) => it.into(), + _ => continue, + }; + + let (body, sm) = db.body_with_source_map(body); + if let Some(it) = + body.blocks(db).find_map(|block| resolve(db, &block.1, ast_id, ast_ptr)) + { + return Some(it); + } + if let Some((_, res)) = sm.macro_calls().find(|it| it.0 == ast_ptr) { + return Some(res); + } + } + module.1.scope.macro_invoc(ast_id) + }) + } + let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros); let krate = db.fetch_test_crate(); let def_map = db.crate_def_map(krate); let local_id = DefMap::ROOT; - let module = def_map.module_id(local_id); - let resolver = module.resolver(&db); let source = def_map[local_id].definition_source(&db); let source_file = match source.value { ModuleSource::SourceFile(it) => it, ModuleSource::Module(_) | ModuleSource::BlockExpr(_) => panic!(), }; - // What we want to do is to replace all macros (fn-like, derive, attr) with - // their expansions. Turns out, we don't actually store enough information - // to do this precisely though! Specifically, if a macro expands to nothing, - // it leaves zero traces in def-map, so we can't get its expansion after the - // fact. - // - // This is the usual - // - // resolve/record tension! - // - // So here we try to do a resolve, which is necessary a heuristic. For macro - // calls, we use `as_call_id_with_errors`. For derives, we look at the impls - // in the module and assume that, if impls's source is a different - // `HirFileId`, than it came from macro expansion. - let mut text_edits = Vec::new(); let mut expansions = Vec::new(); - for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { - let macro_call = InFile::new(source.file_id, ¯o_call); - let res = macro_call - .as_call_id_with_errors(&db, krate, |path| { - resolver - .resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang)) - .map(|(it, _)| db.macro_def(it)) - }) - .unwrap(); - let macro_call_id = res.value.unwrap(); - let macro_file = MacroFileId { macro_call_id }; - let mut expansion_result = db.parse_macro_expansion(macro_file); - expansion_result.err = expansion_result.err.or(res.err); - expansions.push((macro_call.value.clone(), expansion_result)); + for macro_call_node in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { + let ast_id = db.ast_id_map(source.file_id).ast_id(¯o_call_node); + let ast_id = InFile::new(source.file_id, ast_id); + let ptr = InFile::new(source.file_id, AstPtr::new(¯o_call_node)); + let macro_call_id = resolve(&db, &def_map, ast_id, ptr) + .unwrap_or_else(|| panic!("unable to find semantic macro call {macro_call_node}")); + let expansion_result = db.parse_macro_expansion(macro_call_id); + expansions.push((macro_call_node.clone(), expansion_result)); } for (call, exp) in expansions.into_iter().rev() { @@ -357,7 +366,7 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander { _: Span, _: Span, _: Span, - _: Option, + _: String, ) -> Result { let (parse, _) = syntax_bridge::token_tree_to_syntax_node( subtree, @@ -371,4 +380,8 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander { panic!("got invalid macro input: {:?}", parse.errors()); } } + + fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool { + other.as_any().type_id() == std::any::TypeId::of::() + } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index a43c0eb9d70bb..b2e1adc3650d4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -181,9 +181,9 @@ fn foo(&self) { self.0. 1; } -fn#0:1@45..47#2# foo#0:1@48..51#2#(#0:1@51..52#2#�:1@52..53#2#self#0:1@53..57#2# )#0:1@57..58#2# {#0:1@59..60#2# - self#0:1@65..69#2# .#0:1@69..70#2#0#0:1@70..71#2#.#0:1@71..72#2#1#0:1@73..74#2#;#0:1@74..75#2# -}#0:1@76..77#2#"#]], +fn#0:1@45..47#ROOT2024# foo#0:1@48..51#ROOT2024#(#0:1@51..52#ROOT2024#�:1@52..53#ROOT2024#self#0:1@53..57#ROOT2024# )#0:1@57..58#ROOT2024# {#0:1@59..60#ROOT2024# + self#0:1@65..69#ROOT2024# .#0:1@69..70#ROOT2024#0#0:1@70..71#ROOT2024#.#0:1@71..72#ROOT2024#1#0:1@73..74#ROOT2024#;#0:1@74..75#ROOT2024# +}#0:1@76..77#ROOT2024#"#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 3b6e3c5916e32..fc66d8e28d8c6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -47,6 +47,7 @@ //! path and, upon success, we run macro expansion and "collect module" phase on //! the result +pub mod assoc; pub mod attr_resolution; mod collector; pub mod diagnostics; @@ -59,30 +60,30 @@ mod tests; use std::ops::Deref; -use base_db::CrateId; +use base_db::Crate; use hir_expand::{ - name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, + EditionedFileId, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, mod_path::ModPath, + name::Name, proc_macro::ProcMacroKind, }; use intern::Symbol; use itertools::Itertools; use la_arena::Arena; use rustc_hash::{FxHashMap, FxHashSet}; -use span::{Edition, EditionedFileId, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID}; +use span::{Edition, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID}; use stdx::format_to; -use syntax::{ast, AstNode, SmolStr, SyntaxNode}; +use syntax::{AstNode, SmolStr, SyntaxNode, ToSmolStr, ast}; use triomphe::Arc; use tt::TextRange; use crate::{ + AstId, BlockId, BlockLoc, CrateRootModuleId, ExternCrateId, FunctionId, FxIndexMap, + LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId, db::DefDatabase, item_scope::{BuiltinShadowMode, ItemScope}, item_tree::{ItemTreeId, Mod, TreeId}, nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode}, - path::ModPath, per_ns::PerNs, visibility::{Visibility, VisibilityExplicitness}, - AstId, BlockId, BlockLoc, CrateRootModuleId, EnumId, EnumVariantId, ExternCrateId, FunctionId, - FxIndexMap, LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId, }; pub use self::path_resolution::ResolvePathResultPrefixInfo; @@ -95,6 +96,39 @@ const PREDEFINED_TOOLS: &[SmolStr] = &[ SmolStr::new_static("rust_analyzer"), ]; +/// Parts of the def map that are only needed when analyzing code in the same crate. +/// +/// There are some data in the def map (e.g. extern prelude) that is only needed when analyzing +/// things in the same crate (and maybe in the IDE layer), e.g. the extern prelude. If we put +/// it in the DefMap dependant DefMaps will be invalidated when they change (e.g. when we add +/// a dependency to the crate). Instead we split them out of the DefMap into a LocalDefMap struct. +/// `crate_local_def_map()` returns both, and `crate_def_map()` returns only the external-relevant +/// DefMap. +#[derive(Debug, PartialEq, Eq, Default)] +pub struct LocalDefMap { + // FIXME: There are probably some other things that could be here, but this is less severe and you + // need to be careful with things that block def maps also have. + /// The extern prelude which contains all root modules of external crates that are in scope. + extern_prelude: FxIndexMap)>, +} + +impl LocalDefMap { + pub(crate) const EMPTY: &Self = + &Self { extern_prelude: FxIndexMap::with_hasher(rustc_hash::FxBuildHasher) }; + + fn shrink_to_fit(&mut self) { + let Self { extern_prelude } = self; + extern_prelude.shrink_to_fit(); + } + + pub(crate) fn extern_prelude( + &self, + ) -> impl DoubleEndedIterator))> + '_ + { + self.extern_prelude.iter().map(|(name, &def)| (name, def)) + } +} + /// Contains the results of (early) name resolution. /// /// A `DefMap` stores the module tree and the definitions that are in scope in every module after @@ -107,7 +141,7 @@ const PREDEFINED_TOOLS: &[SmolStr] = &[ #[derive(Debug, PartialEq, Eq)] pub struct DefMap { /// The crate this `DefMap` belongs to. - krate: CrateId, + krate: Crate, /// When this is a block def map, this will hold the block id of the block and module that /// contains this block. block: Option, @@ -124,12 +158,15 @@ pub struct DefMap { /// this contains all kinds of macro, not just `macro_rules!` macro. /// ExternCrateId being None implies it being imported from the general prelude import. macro_use_prelude: FxHashMap)>, - pub(crate) enum_definitions: FxHashMap>, + // FIXME: AstId's are fairly unstable /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper /// attributes. // FIXME: Figure out a better way for the IDE layer to resolve these? derive_helpers_in_scope: FxHashMap, Vec<(Name, MacroId, MacroCallId)>>, + // FIXME: AstId's are fairly unstable + /// A mapping from [`hir_expand::MacroDefId`] to [`crate::MacroId`]. + pub macro_def_to_macro_id: FxHashMap, /// The diagnostics that need to be emitted for this crate. diagnostics: Vec, @@ -141,9 +178,6 @@ pub struct DefMap { /// Data that belongs to a crate which is shared between a crate's def map and all its block def maps. #[derive(Clone, Debug, PartialEq, Eq)] struct DefMapCrateData { - /// The extern prelude which contains all root modules of external crates that are in scope. - extern_prelude: FxIndexMap)>, - /// Side table for resolving derive helpers. exported_derives: FxHashMap>, fn_proc_macro_mapping: FxHashMap, @@ -166,7 +200,6 @@ struct DefMapCrateData { impl DefMapCrateData { fn new(edition: Edition) -> Self { Self { - extern_prelude: FxIndexMap::default(), exported_derives: FxHashMap::default(), fn_proc_macro_mapping: FxHashMap::default(), registered_attrs: Vec::new(), @@ -182,7 +215,6 @@ impl DefMapCrateData { fn shrink_to_fit(&mut self) { let Self { - extern_prelude, exported_derives, fn_proc_macro_mapping, registered_attrs, @@ -194,7 +226,6 @@ impl DefMapCrateData { edition: _, recursion_limit: _, } = self; - extern_prelude.shrink_to_fit(); exported_derives.shrink_to_fit(); fn_proc_macro_mapping.shrink_to_fit(); registered_attrs.shrink_to_fit(); @@ -219,11 +250,11 @@ struct BlockRelativeModuleId { } impl BlockRelativeModuleId { - fn def_map(self, db: &dyn DefDatabase, krate: CrateId) -> Arc { + fn def_map(self, db: &dyn DefDatabase, krate: Crate) -> Arc { self.into_module(krate).def_map(db) } - fn into_module(self, krate: CrateId) -> ModuleId { + fn into_module(self, krate: Crate) -> ModuleId { ModuleId { krate, block: self.block, local_id: self.local_id } } @@ -295,18 +326,19 @@ impl ModuleOrigin { /// That is, a file or a `mod foo {}` with items. pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile { match self { - &ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => { - let sf = db.parse(definition).tree(); - InFile::new(definition.into(), ModuleSource::SourceFile(sf)) + &ModuleOrigin::File { definition: editioned_file_id, .. } + | &ModuleOrigin::CrateRoot { definition: editioned_file_id } => { + let sf = db.parse(editioned_file_id).tree(); + InFile::new(editioned_file_id.into(), ModuleSource::SourceFile(sf)) } &ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new( definition_tree_id.file_id(), ModuleSource::Module( - AstId::new(definition_tree_id.file_id(), definition).to_node(db.upcast()), + AstId::new(definition_tree_id.file_id(), definition).to_node(db), ), ), ModuleOrigin::BlockExpr { block, .. } => { - InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db.upcast()))) + InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db))) } } } @@ -334,14 +366,28 @@ impl DefMap { self.data.edition } - pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: CrateId) -> Arc { - let crate_graph = db.crate_graph(); - let krate = &crate_graph[crate_id]; - let name = krate.display_name.as_deref().map(Symbol::as_str).unwrap_or_default(); - let _p = tracing::info_span!("crate_def_map_query", ?name).entered(); + pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: Crate) -> Arc { + db.crate_local_def_map(crate_id).0 + } + + pub(crate) fn crate_local_def_map_query( + db: &dyn DefDatabase, + crate_id: Crate, + ) -> (Arc, Arc) { + let krate = crate_id.data(db); + let _p = tracing::info_span!( + "crate_def_map_query", + name=?crate_id + .extra_data(db) + .display_name + .as_ref() + .map(|it| it.crate_name().to_smolstr()) + .unwrap_or_default() + ) + .entered(); let module_data = ModuleData::new( - ModuleOrigin::CrateRoot { definition: krate.root_file_id() }, + ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) }, Visibility::Public, ); @@ -351,10 +397,14 @@ impl DefMap { module_data, None, ); - let def_map = - collector::collect_defs(db, def_map, TreeId::new(krate.root_file_id().into(), None)); + let (def_map, local_def_map) = collector::collect_defs( + db, + def_map, + TreeId::new(krate.root_file_id(db).into(), None), + None, + ); - Arc::new(def_map) + (Arc::new(def_map), Arc::new(local_def_map)) } pub(crate) fn block_def_map_query(db: &dyn DefDatabase, block_id: BlockId) -> Arc { @@ -367,10 +417,10 @@ impl DefMap { let module_data = ModuleData::new(ModuleOrigin::BlockExpr { block: ast_id, id: block_id }, visibility); - let parent_map = module.def_map(db); + let (crate_map, crate_local_map) = db.crate_local_def_map(module.krate); let def_map = DefMap::empty( module.krate, - parent_map.data.clone(), + crate_map.data.clone(), module_data, Some(BlockInfo { block: block_id, @@ -378,13 +428,17 @@ impl DefMap { }), ); - let def_map = - collector::collect_defs(db, def_map, TreeId::new(ast_id.file_id, Some(block_id))); + let (def_map, _) = collector::collect_defs( + db, + def_map, + TreeId::new(ast_id.file_id, Some(block_id)), + Some(crate_local_map), + ); Arc::new(def_map) } fn empty( - krate: CrateId, + krate: Crate, crate_data: Arc, module_data: ModuleData, block: Option, @@ -401,8 +455,8 @@ impl DefMap { macro_use_prelude: FxHashMap::default(), derive_helpers_in_scope: FxHashMap::default(), diagnostics: Vec::new(), - enum_definitions: FxHashMap::default(), data: crate_data, + macro_def_to_macro_id: FxHashMap::default(), } } fn shrink_to_fit(&mut self) { @@ -416,14 +470,14 @@ impl DefMap { krate: _, prelude: _, data: _, - enum_definitions, + macro_def_to_macro_id, } = self; + macro_def_to_macro_id.shrink_to_fit(); macro_use_prelude.shrink_to_fit(); diagnostics.shrink_to_fit(); modules.shrink_to_fit(); derive_helpers_in_scope.shrink_to_fit(); - enum_definitions.shrink_to_fit(); for (_, module) in modules.iter_mut() { module.children.shrink_to_fit(); module.scope.shrink_to_fit(); @@ -432,11 +486,15 @@ impl DefMap { } impl DefMap { - pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator + '_ { + pub fn modules_for_file<'a>( + &'a self, + db: &'a dyn DefDatabase, + file_id: FileId, + ) -> impl Iterator + 'a { self.modules .iter() .filter(move |(_id, data)| { - data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) + data.origin.file_id().map(|file_id| file_id.file_id(db)) == Some(file_id) }) .map(|(id, _data)| id) } @@ -476,7 +534,7 @@ impl DefMap { self.data.fn_proc_macro_mapping.get(&id).copied() } - pub fn krate(&self) -> CrateId { + pub fn krate(&self) -> Crate { self.krate } @@ -551,12 +609,12 @@ impl DefMap { ) { format_to!(buf, "{}\n", path); - map.modules[module].scope.dump(db.upcast(), buf); + map.modules[module].scope.dump(db, buf); for (name, child) in map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0)) { - let path = format!("{path}::{}", name.display(db.upcast(), Edition::LATEST)); + let path = format!("{path}::{}", name.display(db, Edition::LATEST)); buf.push('\n'); go(buf, db, map, &path, *child); } @@ -587,19 +645,13 @@ impl DefMap { self.prelude } - pub(crate) fn extern_prelude( - &self, - ) -> impl DoubleEndedIterator))> + '_ - { - self.data.extern_prelude.iter().map(|(name, &def)| (name, def)) - } - pub(crate) fn macro_use_prelude(&self) -> &FxHashMap)> { &self.macro_use_prelude } pub(crate) fn resolve_path( &self, + local_def_map: &LocalDefMap, db: &dyn DefDatabase, original_module: LocalModuleId, path: &ModPath, @@ -607,6 +659,7 @@ impl DefMap { expected_macro_subns: Option, ) -> (PerNs, Option) { let res = self.resolve_path_fp_with_macro( + local_def_map, db, ResolveMode::Other, original_module, @@ -621,12 +674,14 @@ impl DefMap { /// points at the unresolved segments. pub(crate) fn resolve_path_locally( &self, + local_def_map: &LocalDefMap, db: &dyn DefDatabase, original_module: LocalModuleId, path: &ModPath, shadow: BuiltinShadowMode, ) -> (PerNs, Option, ResolvePathResultPrefixInfo) { let res = self.resolve_path_fp_with_macro_single( + local_def_map, db, ResolveMode::Other, original_module, @@ -695,17 +750,14 @@ impl ModuleData { &ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => { InFile::new( definition.into(), - ErasedAstId::new(definition.into(), ROOT_ERASED_FILE_AST_ID) - .to_range(db.upcast()), + ErasedAstId::new(definition.into(), ROOT_ERASED_FILE_AST_ID).to_range(db), ) } &ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new( definition_tree_id.file_id(), - AstId::new(definition_tree_id.file_id(), definition).to_range(db.upcast()), + AstId::new(definition_tree_id.file_id(), definition).to_range(db), ), - ModuleOrigin::BlockExpr { block, .. } => { - InFile::new(block.file_id, block.to_range(db.upcast())) - } + ModuleOrigin::BlockExpr { block, .. } => InFile::new(block.file_id, block.to_range(db)), } } @@ -713,7 +765,7 @@ impl ModuleData { /// `None` for the crate root or block. pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option> { let decl = self.origin.declaration()?; - let value = decl.to_node(db.upcast()); + let value = decl.to_node(db); Some(InFile { file_id: decl.file_id, value }) } @@ -721,7 +773,7 @@ impl ModuleData { /// `None` for the crate root or block. pub fn declaration_source_range(&self, db: &dyn DefDatabase) -> Option> { let decl = self.origin.declaration()?; - Some(InFile { file_id: decl.file_id, value: decl.to_range(db.upcast()) }) + Some(InFile { file_id: decl.file_id, value: decl.to_range(db) }) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs new file mode 100644 index 0000000000000..448b908936a28 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs @@ -0,0 +1,311 @@ +//! Expansion of associated items + +use hir_expand::{AstId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind, name::Name}; +use syntax::ast; +use triomphe::Arc; + +use crate::{ + AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId, + ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc, + db::DefDatabase, + item_tree::{AssocItem, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId}, + macro_call_as_call_id, + nameres::{ + DefMap, LocalDefMap, MacroSubNs, + attr_resolution::ResolvedAttr, + diagnostics::{DefDiagnostic, DefDiagnostics}, + }, +}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TraitItems { + pub items: Box<[(Name, AssocItemId)]>, + // box it as the vec is usually empty anyways + // FIXME: AstIds are rather unstable... + pub macro_calls: Option, MacroCallId)>>>, +} + +impl TraitItems { + #[inline] + pub(crate) fn trait_items_query(db: &dyn DefDatabase, tr: TraitId) -> Arc { + db.trait_items_with_diagnostics(tr).0 + } + + pub(crate) fn trait_items_with_diagnostics_query( + db: &dyn DefDatabase, + tr: TraitId, + ) -> (Arc, DefDiagnostics) { + let ItemLoc { container: module_id, id: tree_id } = tr.lookup(db); + + let collector = AssocItemCollector::new(db, module_id, ItemContainerId::TraitId(tr)); + let item_tree = tree_id.item_tree(db); + let (items, macro_calls, diagnostics) = + collector.collect(&item_tree, tree_id.tree_id(), &item_tree[tree_id.value].items); + + (Arc::new(TraitItems { macro_calls, items }), DefDiagnostics::new(diagnostics)) + } + + pub fn associated_types(&self) -> impl Iterator + '_ { + self.items.iter().filter_map(|(_name, item)| match item { + AssocItemId::TypeAliasId(t) => Some(*t), + _ => None, + }) + } + + pub fn associated_type_by_name(&self, name: &Name) -> Option { + self.items.iter().find_map(|(item_name, item)| match item { + AssocItemId::TypeAliasId(t) if item_name == name => Some(*t), + _ => None, + }) + } + + pub fn method_by_name(&self, name: &Name) -> Option { + self.items.iter().find_map(|(item_name, item)| match item { + AssocItemId::FunctionId(t) if item_name == name => Some(*t), + _ => None, + }) + } + + pub fn attribute_calls(&self) -> impl Iterator, MacroCallId)> + '_ { + self.macro_calls.iter().flat_map(|it| it.iter()).copied() + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct ImplItems { + pub items: Box<[(Name, AssocItemId)]>, + // box it as the vec is usually empty anyways + // FIXME: AstIds are rather unstable... + pub macro_calls: Option, MacroCallId)>>>, +} + +impl ImplItems { + #[inline] + pub(crate) fn impl_items_query(db: &dyn DefDatabase, id: ImplId) -> Arc { + db.impl_items_with_diagnostics(id).0 + } + + pub(crate) fn impl_items_with_diagnostics_query( + db: &dyn DefDatabase, + id: ImplId, + ) -> (Arc, DefDiagnostics) { + let _p = tracing::info_span!("impl_items_with_diagnostics_query").entered(); + let ItemLoc { container: module_id, id: tree_id } = id.lookup(db); + + let collector = AssocItemCollector::new(db, module_id, ItemContainerId::ImplId(id)); + let item_tree = tree_id.item_tree(db); + let (items, macro_calls, diagnostics) = + collector.collect(&item_tree, tree_id.tree_id(), &item_tree[tree_id.value].items); + + (Arc::new(ImplItems { items, macro_calls }), DefDiagnostics::new(diagnostics)) + } + + pub fn attribute_calls(&self) -> impl Iterator, MacroCallId)> + '_ { + self.macro_calls.iter().flat_map(|it| it.iter()).copied() + } +} + +struct AssocItemCollector<'a> { + db: &'a dyn DefDatabase, + module_id: ModuleId, + def_map: Arc, + local_def_map: Arc, + diagnostics: Vec, + container: ItemContainerId, + + depth: usize, + items: Vec<(Name, AssocItemId)>, + macro_calls: Vec<(AstId, MacroCallId)>, +} + +impl<'a> AssocItemCollector<'a> { + fn new(db: &'a dyn DefDatabase, module_id: ModuleId, container: ItemContainerId) -> Self { + let (def_map, local_def_map) = module_id.local_def_map(db); + Self { + db, + module_id, + def_map, + local_def_map, + container, + items: Vec::new(), + + depth: 0, + macro_calls: Vec::new(), + diagnostics: Vec::new(), + } + } + + fn collect( + mut self, + item_tree: &ItemTree, + tree_id: TreeId, + assoc_items: &[AssocItem], + ) -> ( + Box<[(Name, AssocItemId)]>, + Option, MacroCallId)>>>, + Vec, + ) { + self.items.reserve(assoc_items.len()); + for &item in assoc_items { + self.collect_item(item_tree, tree_id, item); + } + ( + self.items.into_boxed_slice(), + if self.macro_calls.is_empty() { None } else { Some(Box::new(self.macro_calls)) }, + self.diagnostics, + ) + } + + fn collect_item(&mut self, item_tree: &ItemTree, tree_id: TreeId, item: AssocItem) { + let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into()); + if !attrs.is_cfg_enabled(self.module_id.krate.cfg_options(self.db)) { + self.diagnostics.push(DefDiagnostic::unconfigured_code( + self.module_id.local_id, + tree_id, + ModItem::from(item).into(), + attrs.cfg().unwrap(), + self.module_id.krate.cfg_options(self.db).clone(), + )); + return; + } + + 'attrs: for attr in &*attrs { + let ast_id = AstId::new(tree_id.file_id(), item.ast_id(item_tree).upcast()); + let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id }; + + match self.def_map.resolve_attr_macro( + &self.local_def_map, + self.db, + self.module_id.local_id, + ast_id_with_path, + attr, + ) { + Ok(ResolvedAttr::Macro(call_id)) => { + let loc = self.db.lookup_intern_macro_call(call_id); + if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind { + // If there's no expander for the proc macro (e.g. the + // proc macro is ignored, or building the proc macro + // crate failed), skip expansion like we would if it was + // disabled. This is analogous to the handling in + // `DefCollector::collect_macros`. + if let Some(err) = exp.as_expand_error(self.module_id.krate) { + self.diagnostics.push(DefDiagnostic::macro_error( + self.module_id.local_id, + ast_id, + (*attr.path).clone(), + err, + )); + continue 'attrs; + } + } + + self.macro_calls.push((ast_id, call_id)); + self.collect_macro_items(call_id); + return; + } + Ok(_) => (), + Err(_) => { + self.diagnostics.push(DefDiagnostic::unresolved_macro_call( + self.module_id.local_id, + MacroCallKind::Attr { ast_id, attr_args: None, invoc_attr_index: attr.id }, + attr.path().clone(), + )); + } + } + } + + self.record_item(item_tree, tree_id, item); + } + + fn record_item(&mut self, item_tree: &ItemTree, tree_id: TreeId, item: AssocItem) { + match item { + AssocItem::Function(id) => { + let item = &item_tree[id]; + let def = + FunctionLoc { container: self.container, id: ItemTreeId::new(tree_id, id) } + .intern(self.db); + self.items.push((item.name.clone(), def.into())); + } + AssocItem::TypeAlias(id) => { + let item = &item_tree[id]; + let def = + TypeAliasLoc { container: self.container, id: ItemTreeId::new(tree_id, id) } + .intern(self.db); + self.items.push((item.name.clone(), def.into())); + } + AssocItem::Const(id) => { + let item = &item_tree[id]; + let Some(name) = item.name.clone() else { return }; + let def = ConstLoc { container: self.container, id: ItemTreeId::new(tree_id, id) } + .intern(self.db); + self.items.push((name, def.into())); + } + AssocItem::MacroCall(call) => { + let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call]; + + let resolver = |path: &_| { + self.def_map + .resolve_path( + &self.local_def_map, + self.db, + self.module_id.local_id, + path, + crate::item_scope::BuiltinShadowMode::Other, + Some(MacroSubNs::Bang), + ) + .0 + .take_macros() + .map(|it| self.db.macro_def(it)) + }; + match macro_call_as_call_id( + self.db, + InFile::new(tree_id.file_id(), ast_id), + path, + ctxt, + expand_to, + self.module_id.krate(), + resolver, + &mut |ptr, call_id| { + self.macro_calls.push((ptr.map(|(_, it)| it.upcast()), call_id)) + }, + ) { + // FIXME: Expansion error? + Ok(call_id) => match call_id.value { + Some(call_id) => { + self.macro_calls + .push((InFile::new(tree_id.file_id(), ast_id.upcast()), call_id)); + self.collect_macro_items(call_id); + } + None => (), + }, + Err(_) => { + self.diagnostics.push(DefDiagnostic::unresolved_macro_call( + self.module_id.local_id, + MacroCallKind::FnLike { + ast_id: InFile::new(tree_id.file_id(), ast_id), + expand_to, + eager: None, + }, + Clone::clone(path), + )); + } + } + } + } + } + + fn collect_macro_items(&mut self, macro_call_id: MacroCallId) { + if self.depth > self.def_map.recursion_limit() as usize { + tracing::warn!("macro expansion is too deep"); + return; + } + let tree_id = TreeId::new(macro_call_id.into(), None); + let item_tree = self.db.file_item_tree(macro_call_id.into()); + + self.depth += 1; + for item in item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item) { + self.collect_item(&item_tree, tree_id, item); + } + self.depth -= 1; + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs index d1f6ed023c2fa..e7e96804ae737 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs @@ -1,21 +1,21 @@ //! Post-nameres attribute resolution. -use base_db::CrateId; +use base_db::Crate; use hir_expand::{ + MacroCallId, MacroCallKind, MacroDefId, attrs::{Attr, AttrId, AttrInput}, inert_attr_macro::find_builtin_attr_idx, - MacroCallId, MacroCallKind, MacroDefId, + mod_path::{ModPath, PathKind}, }; -use span::SyntaxContextId; +use span::SyntaxContext; use syntax::ast; use triomphe::Arc; use crate::{ + AstIdWithPath, LocalModuleId, MacroId, UnresolvedMacro, db::DefDatabase, item_scope::BuiltinShadowMode, - nameres::path_resolution::ResolveMode, - path::{self, ModPath, PathKind}, - AstIdWithPath, LocalModuleId, MacroId, UnresolvedMacro, + nameres::{LocalDefMap, path_resolution::ResolveMode}, }; use super::{DefMap, MacroSubNs}; @@ -30,6 +30,7 @@ pub enum ResolvedAttr { impl DefMap { pub(crate) fn resolve_attr_macro( &self, + local_def_map: &LocalDefMap, db: &dyn DefDatabase, original_module: LocalModuleId, ast_id: AstIdWithPath, @@ -42,6 +43,7 @@ impl DefMap { } let resolved_res = self.resolve_path_fp_with_macro( + local_def_map, db, ResolveMode::Other, original_module, @@ -105,7 +107,7 @@ pub(super) fn attr_macro_as_call_id( db: &dyn DefDatabase, item_attr: &AstIdWithPath, macro_attr: &Attr, - krate: CrateId, + krate: Crate, def: MacroDefId, ) -> MacroCallId { let arg = match macro_attr.input.as_deref() { @@ -119,7 +121,7 @@ pub(super) fn attr_macro_as_call_id( }; def.make_call( - db.upcast(), + db, krate, MacroCallKind::Attr { ast_id: item_attr.ast_id, @@ -135,16 +137,16 @@ pub(super) fn derive_macro_as_call_id( item_attr: &AstIdWithPath, derive_attr_index: AttrId, derive_pos: u32, - call_site: SyntaxContextId, - krate: CrateId, - resolver: impl Fn(&path::ModPath) -> Option<(MacroId, MacroDefId)>, + call_site: SyntaxContext, + krate: Crate, + resolver: impl Fn(&ModPath) -> Option<(MacroId, MacroDefId)>, derive_macro_id: MacroCallId, ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { let (macro_id, def_id) = resolver(&item_attr.path) .filter(|(_, def_id)| def_id.is_derive()) .ok_or_else(|| UnresolvedMacro { path: item_attr.path.as_ref().clone() })?; let call_id = def_id.make_call( - db.upcast(), + db, krate, MacroCallKind::Derive { ast_id: item_attr.ast_id, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 16f3fd56eb9ed..8df0f092cd0b7 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -5,62 +5,66 @@ use std::{cmp::Ordering, iter, mem, ops::Not}; -use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin}; +use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin}; use cfg::{CfgAtom, CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ + EditionedFileId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, + MacroDefKind, attrs::{Attr, AttrId}, builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro}, + mod_path::{ModPath, PathKind}, name::{AsName, Name}, proc_macro::CustomProcMacroExpander, - ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, - MacroFileIdExt, }; -use intern::{sym, Interned}; -use itertools::{izip, Itertools}; +use intern::{Interned, sym}; +use itertools::{Itertools, izip}; use la_arena::Idx; use rustc_hash::{FxHashMap, FxHashSet}; -use span::{Edition, EditionedFileId, FileAstId, SyntaxContextId}; +use span::{Edition, FileAstId, SyntaxContext}; use syntax::ast; use triomphe::Arc; use crate::{ + AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, ExternBlockLoc, + ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, + LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, + MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, + StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId, UseLoc, attr::Attrs, db::DefDatabase, item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports}, item_tree::{ - self, AttrOwner, FieldsShape, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, - ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, UseTreeKind, + self, AttrOwner, FieldsShape, FileItemTreeId, ImportAlias, ImportKind, ItemTree, + ItemTreeId, ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, + UseTreeKind, }, - macro_call_as_call_id, macro_call_as_call_id_with_eager, + macro_call_as_call_id, nameres::{ + BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, ModuleData, ModuleOrigin, ResolveMode, attr_resolution::{attr_macro_as_call_id, derive_macro_as_call_id}, diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, - proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroDef, ProcMacroKind}, - sub_namespace_match, BuiltinShadowMode, DefMap, MacroSubNs, ModuleData, ModuleOrigin, - ResolveMode, + proc_macro::{ProcMacroDef, ProcMacroKind, parse_macro_name_and_helper_attrs}, + sub_namespace_match, }, - path::{ImportAlias, ModPath, PathKind}, per_ns::{Item, PerNs}, tt, visibility::{RawVisibility, Visibility}, - AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantLoc, - ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, - ItemContainerId, LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId, - MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId, - ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, - UnresolvedMacro, UseId, UseLoc, }; const GLOB_RECURSION_LIMIT: usize = 100; const FIXED_POINT_LIMIT: usize = 8192; -pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap { - let crate_graph = db.crate_graph(); - - let krate = &crate_graph[def_map.krate]; +pub(super) fn collect_defs( + db: &dyn DefDatabase, + def_map: DefMap, + tree_id: TreeId, + crate_local_def_map: Option>, +) -> (DefMap, LocalDefMap) { + let krate = &def_map.krate.data(db); + let cfg_options = def_map.krate.cfg_options(db); // populate external prelude and dependency list let mut deps = @@ -72,8 +76,10 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI } let proc_macros = if krate.is_proc_macro { - db.proc_macros() - .for_crate(def_map.krate, db.syntax_context(tree_id.file_id(), krate.edition)) + db.proc_macros_for_crate(def_map.krate) + .and_then(|proc_macros| { + proc_macros.list(db.syntax_context(tree_id.file_id(), krate.edition)) + }) .unwrap_or_default() } else { Default::default() @@ -82,13 +88,15 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI let mut collector = DefCollector { db, def_map, + local_def_map: LocalDefMap::default(), + crate_local_def_map, deps, glob_imports: FxHashMap::default(), unresolved_imports: Vec::new(), indeterminate_imports: Vec::new(), unresolved_macros: Vec::new(), mod_dirs: FxHashMap::default(), - cfg_options: &krate.cfg_options, + cfg_options, proc_macros, from_glob_import: Default::default(), skip_attrs: Default::default(), @@ -101,9 +109,10 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI collector.seed_with_top_level(); } collector.collect(); - let mut def_map = collector.finish(); + let (mut def_map, mut local_def_map) = collector.finish(); def_map.shrink_to_fit(); - def_map + local_def_map.shrink_to_fit(); + (def_map, local_def_map) } #[derive(Copy, Clone, Debug, Eq, PartialEq)] @@ -183,13 +192,13 @@ enum MacroDirectiveKind { FnLike { ast_id: AstIdWithPath, expand_to: ExpandTo, - ctxt: SyntaxContextId, + ctxt: SyntaxContext, }, Derive { ast_id: AstIdWithPath, derive_attr: AttrId, derive_pos: usize, - ctxt: SyntaxContextId, + ctxt: SyntaxContext, /// The "parent" macro it is resolved to. derive_macro_id: MacroCallId, }, @@ -205,8 +214,11 @@ enum MacroDirectiveKind { struct DefCollector<'a> { db: &'a dyn DefDatabase, def_map: DefMap, + local_def_map: LocalDefMap, + /// Set only in case of blocks. + crate_local_def_map: Option>, // The dependencies of the current crate, including optional deps like `test`. - deps: FxHashMap, + deps: FxHashMap, glob_imports: FxHashMap>, unresolved_imports: Vec, indeterminate_imports: Vec<(ImportDirective, PerNs)>, @@ -238,8 +250,7 @@ impl DefCollector<'_> { fn seed_with_top_level(&mut self) { let _p = tracing::info_span!("seed_with_top_level").entered(); - let crate_graph = self.db.crate_graph(); - let file_id = crate_graph[self.def_map.krate].root_file_id(); + let file_id = self.def_map.krate.data(self.db).root_file_id(self.db); let item_tree = self.db.file_item_tree(file_id.into()); let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate); let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); @@ -257,41 +268,40 @@ impl DefCollector<'_> { let Some(attr_name) = attr.path.as_ident() else { continue }; match () { - () if *attr_name == sym::recursion_limit.clone() => { + () if *attr_name == sym::recursion_limit => { if let Some(limit) = attr.string_value() { if let Ok(limit) = limit.as_str().parse() { crate_data.recursion_limit = Some(limit); } } } - () if *attr_name == sym::crate_type.clone() => { + () if *attr_name == sym::crate_type => { if attr.string_value() == Some(&sym::proc_dash_macro) { self.is_proc_macro = true; } } - () if *attr_name == sym::no_core.clone() => crate_data.no_core = true, - () if *attr_name == sym::no_std.clone() => crate_data.no_std = true, - () if *attr_name == sym::rustc_coherence_is_core.clone() => { + () if *attr_name == sym::no_core => crate_data.no_core = true, + () if *attr_name == sym::no_std => crate_data.no_std = true, + () if *attr_name == sym::rustc_coherence_is_core => { crate_data.rustc_coherence_is_core = true; } - () if *attr_name == sym::feature.clone() => { - let features = attr - .parse_path_comma_token_tree(self.db.upcast()) - .into_iter() - .flatten() - .filter_map(|(feat, _)| match feat.segments() { - [name] => Some(name.symbol().clone()), - _ => None, - }); + () if *attr_name == sym::feature => { + let features = + attr.parse_path_comma_token_tree(self.db).into_iter().flatten().filter_map( + |(feat, _)| match feat.segments() { + [name] => Some(name.symbol().clone()), + _ => None, + }, + ); crate_data.unstable_features.extend(features); } - () if *attr_name == sym::register_attr.clone() => { + () if *attr_name == sym::register_attr => { if let Some(ident) = attr.single_ident_value() { crate_data.registered_attrs.push(ident.sym.clone()); cov_mark::hit!(register_attr); } } - () if *attr_name == sym::register_tool.clone() => { + () if *attr_name == sym::register_tool => { if let Some(ident) = attr.single_ident_value() { crate_data.registered_tools.push(ident.sym.clone()); cov_mark::hit!(register_tool); @@ -310,20 +320,24 @@ impl DefCollector<'_> { // don't do pre-configured attribute resolution yet. // So here check if we are no_core / no_std and we are trying to add the // corresponding dep from the sysroot - let skip = match crate_graph[dep.crate_id].origin { - CrateOrigin::Lang(LangCrateOrigin::Core) => { - crate_data.no_core && dep.is_sysroot() - } - CrateOrigin::Lang(LangCrateOrigin::Std) => { - crate_data.no_std && dep.is_sysroot() - } - _ => false, - }; + + // Depending on the crate data of a dependency seems bad for incrementality, but + // we only do that for sysroot crates (this is why the order of the `&&` is important) + // - which are normally standard library crate, which realistically aren't going + // to have their crate ID invalidated, because they stay on the same root file and + // they're dependencies of everything else, so if some collision miraculously occurs + // we will resolve it by disambiguating the other crate. + let skip = dep.is_sysroot() + && match dep.crate_id.data(self.db).origin { + CrateOrigin::Lang(LangCrateOrigin::Core) => crate_data.no_core, + CrateOrigin::Lang(LangCrateOrigin::Std) => crate_data.no_std, + _ => false, + }; if skip { continue; } - crate_data + self.local_def_map .extern_prelude .insert(name.clone(), (CrateRootModuleId { krate: dep.crate_id }, None)); } @@ -376,7 +390,7 @@ impl DefCollector<'_> { 'resolve_attr: loop { let _p = tracing::info_span!("resolve_macros loop").entered(); 'resolve_macros: loop { - self.db.unwind_if_cancelled(); + self.db.unwind_if_revision_cancelled(); { let _p = tracing::info_span!("resolve_imports loop").entered(); @@ -493,20 +507,20 @@ impl DefCollector<'_> { } let krate = if self.def_map.data.no_std { - Name::new_symbol_root(sym::core.clone()) - } else if self.def_map.extern_prelude().any(|(name, _)| *name == sym::std.clone()) { - Name::new_symbol_root(sym::std.clone()) + Name::new_symbol_root(sym::core) + } else if self.local_def_map().extern_prelude().any(|(name, _)| *name == sym::std) { + Name::new_symbol_root(sym::std) } else { // If `std` does not exist for some reason, fall back to core. This mostly helps // keep r-a's own tests minimal. - Name::new_symbol_root(sym::core.clone()) + Name::new_symbol_root(sym::core) }; let edition = match self.def_map.data.edition { - Edition::Edition2015 => Name::new_symbol_root(sym::rust_2015.clone()), - Edition::Edition2018 => Name::new_symbol_root(sym::rust_2018.clone()), - Edition::Edition2021 => Name::new_symbol_root(sym::rust_2021.clone()), - Edition::Edition2024 => Name::new_symbol_root(sym::rust_2024.clone()), + Edition::Edition2015 => Name::new_symbol_root(sym::rust_2015), + Edition::Edition2018 => Name::new_symbol_root(sym::rust_2018), + Edition::Edition2021 => Name::new_symbol_root(sym::rust_2021), + Edition::Edition2024 => Name::new_symbol_root(sym::rust_2024), }; let path_kind = match self.def_map.data.edition { @@ -515,11 +529,17 @@ impl DefCollector<'_> { }; let path = ModPath::from_segments( path_kind, - [krate, Name::new_symbol_root(sym::prelude.clone()), edition], + [krate, Name::new_symbol_root(sym::prelude), edition], ); - let (per_ns, _) = - self.def_map.resolve_path(self.db, DefMap::ROOT, &path, BuiltinShadowMode::Other, None); + let (per_ns, _) = self.def_map.resolve_path( + self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map), + self.db, + DefMap::ROOT, + &path, + BuiltinShadowMode::Other, + None, + ); match per_ns.types { Some(Item { def: ModuleDefId::ModuleId(m), import, .. }) => { @@ -528,13 +548,17 @@ impl DefCollector<'_> { types => { tracing::debug!( "could not resolve prelude path `{}` to module (resolved to {:?})", - path.display(self.db.upcast(), Edition::LATEST), + path.display(self.db, Edition::LATEST), types ); } } } + fn local_def_map(&mut self) -> &LocalDefMap { + self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map) + } + /// Adds a definition of procedural macro `name` to the root module. /// /// # Notes on procedural macro resolution @@ -555,6 +579,7 @@ impl DefCollector<'_> { &mut self, def: ProcMacroDef, id: ItemTreeId, + ast_id: AstId, fn_id: FunctionId, ) { let kind = def.kind.to_basedb_kind(); @@ -578,6 +603,8 @@ impl DefCollector<'_> { edition: self.def_map.data.edition, } .intern(self.db); + + self.def_map.macro_def_to_macro_id.insert(ast_id.erase(), proc_macro_id.into()); self.define_proc_macro(def.name.clone(), proc_macro_id); let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); if let ProcMacroKind::Derive { helpers } = def.kind { @@ -660,7 +687,13 @@ impl DefCollector<'_> { ) { let vis = self .def_map - .resolve_visibility(self.db, module_id, vis, false) + .resolve_visibility( + self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map), + self.db, + module_id, + vis, + false, + ) .unwrap_or(Visibility::Public); self.def_map.modules[module_id].scope.declare(macro_.into()); self.update( @@ -694,7 +727,7 @@ impl DefCollector<'_> { /// created by `use` in the root module, ignoring the visibility of `use`. fn import_macros_from_extern_crate( &mut self, - krate: CrateId, + krate: Crate, names: Option>, extern_crate: Option, ) { @@ -775,10 +808,11 @@ impl DefCollector<'_> { } fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport { - let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast(), Edition::LATEST)) + let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db, Edition::LATEST)) .entered(); tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition); let res = self.def_map.resolve_path_fp_with_macro( + self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map), self.db, ResolveMode::Import, module_id, @@ -814,7 +848,13 @@ impl DefCollector<'_> { let mut def = directive.status.namespaces(); let vis = self .def_map - .resolve_visibility(self.db, module_id, &directive.import.visibility, false) + .resolve_visibility( + self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map), + self.db, + module_id, + &directive.import.visibility, + false, + ) .unwrap_or(Visibility::Public); match import.source { @@ -929,27 +969,16 @@ impl DefCollector<'_> { Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => { cov_mark::hit!(glob_enum); // glob import from enum => just import all the variants - - // We need to check if the def map the enum is from is us, if it is we can't - // call the def-map query since we are currently constructing it! - let loc = e.lookup(self.db); - let tree = loc.id.item_tree(self.db); - let current_def_map = self.def_map.krate == loc.container.krate - && self.def_map.block_id() == loc.container.block; - let def_map; - let resolutions = if current_def_map { - &self.def_map.enum_definitions[&e] - } else { - def_map = loc.container.def_map(self.db); - &def_map.enum_definitions[&e] - } - .iter() - .map(|&variant| { - let name = tree[variant.lookup(self.db).id.value].name.clone(); - let res = PerNs::both(variant.into(), variant.into(), vis, None); - (Some(name), res) - }) - .collect::>(); + let resolutions = self + .db + .enum_variants(e) + .variants + .iter() + .map(|&(variant, ref name)| { + let res = PerNs::both(variant.into(), variant.into(), vis, None); + (Some(name.clone()), res) + }) + .collect::>(); self.update( module_id, &resolutions, @@ -977,7 +1006,7 @@ impl DefCollector<'_> { vis: Visibility, import: Option, ) { - self.db.unwind_if_cancelled(); + self.db.unwind_if_revision_cancelled(); self.update_recursive(module_id, resolutions, vis, import, 0) } @@ -1199,6 +1228,7 @@ impl DefCollector<'_> { No, } + let mut eager_callback_buffer = vec![]; let mut res = ReachedFixedPoint::Yes; // Retain unresolved macros after this round of resolution. let mut retain = |directive: &MacroDirective| { @@ -1210,6 +1240,7 @@ impl DefCollector<'_> { }; let resolver = |path: &_| { let resolved_res = self.def_map.resolve_path_fp_with_macro( + self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map), self.db, ResolveMode::Other, directive.module_id, @@ -1224,22 +1255,29 @@ impl DefCollector<'_> { match &directive.kind { MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => { let call_id = macro_call_as_call_id( - self.db.upcast(), - ast_id, + self.db, + ast_id.ast_id, + &ast_id.path, *call_site, *expand_to, self.def_map.krate, resolver_def_id, + &mut |ptr, call_id| { + eager_callback_buffer.push((directive.module_id, ptr, call_id)); + }, ); - if let Ok(Some(call_id)) = call_id { - self.def_map.modules[directive.module_id] - .scope - .add_macro_invoc(ast_id.ast_id, call_id); + if let Ok(call_id) = call_id { + // FIXME: Expansion error + if let Some(call_id) = call_id.value { + self.def_map.modules[directive.module_id] + .scope + .add_macro_invoc(ast_id.ast_id, call_id); - push_resolved(directive, call_id); + push_resolved(directive, call_id); - res = ReachedFixedPoint::No; - return Resolved::Yes; + res = ReachedFixedPoint::No; + return Resolved::Yes; + } } } MacroDirectiveKind::Derive { @@ -1339,8 +1377,7 @@ impl DefCollector<'_> { MacroDefKind::BuiltInAttr(_, expander) if expander.is_test() || expander.is_bench() || expander.is_test_case() ) { - let test_is_active = - self.cfg_options.check_atom(&CfgAtom::Flag(sym::test.clone())); + let test_is_active = self.cfg_options.check_atom(&CfgAtom::Flag(sym::test)); if test_is_active { return recollect_without(self); } @@ -1375,7 +1412,7 @@ impl DefCollector<'_> { let ast_id = ast_id.with_value(ast_adt_id); - match attr.parse_path_comma_token_tree(self.db.upcast()) { + match attr.parse_path_comma_token_tree(self.db) { Some(derive_macros) => { let call_id = call_id(); let mut len = 0; @@ -1455,6 +1492,10 @@ impl DefCollector<'_> { macros.extend(mem::take(&mut self.unresolved_macros)); self.unresolved_macros = macros; + for (module_id, ptr, call_id) in eager_callback_buffer { + self.def_map.modules[module_id].scope.add_macro_invoc(ptr.map(|(_, it)| it), call_id); + } + for (module_id, depth, container, macro_call_id) in resolved { self.collect_macro_expansion(module_id, macro_call_id, depth, container); } @@ -1474,11 +1515,11 @@ impl DefCollector<'_> { tracing::warn!("macro expansion is too deep"); return; } - let file_id = macro_call_id.as_file(); + let file_id = macro_call_id.into(); let item_tree = self.db.file_item_tree(file_id); - let mod_dir = if macro_call_id.as_macro_file().is_include_macro(self.db.upcast()) { + let mod_dir = if macro_call_id.is_include_macro(self.db) { ModDir::root() } else { self.mod_dirs[&module_id].clone() @@ -1495,7 +1536,7 @@ impl DefCollector<'_> { .collect(item_tree.top_level_items(), container); } - fn finish(mut self) -> DefMap { + fn finish(mut self) -> (DefMap, LocalDefMap) { // Emit diagnostics for all remaining unexpanded macros. let _p = tracing::info_span!("DefCollector::finish").entered(); @@ -1504,13 +1545,15 @@ impl DefCollector<'_> { MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => { // FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error! let macro_call_as_call_id = macro_call_as_call_id( - self.db.upcast(), - ast_id, + self.db, + ast_id.ast_id, + &ast_id.path, *call_site, *expand_to, self.def_map.krate, |path| { let resolved_res = self.def_map.resolve_path_fp_with_macro( + self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map), self.db, ResolveMode::Other, directive.module_id, @@ -1520,6 +1563,7 @@ impl DefCollector<'_> { ); resolved_res.resolved_def.take_macros().map(|it| self.db.macro_def(it)) }, + &mut |_, _| (), ); if let Err(UnresolvedMacro { path }) = macro_call_as_call_id { self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( @@ -1582,7 +1626,7 @@ impl DefCollector<'_> { )); } - self.def_map + (self.def_map, self.local_def_map) } } @@ -1635,9 +1679,9 @@ impl ModCollector<'_, '_> { None, ) }; - let resolve_vis = |def_map: &DefMap, visibility| { + let resolve_vis = |def_map: &DefMap, local_def_map: &LocalDefMap, visibility| { def_map - .resolve_visibility(db, module_id, visibility, false) + .resolve_visibility(local_def_map, db, module_id, visibility, false) .unwrap_or(Visibility::Public) }; @@ -1658,6 +1702,11 @@ impl ModCollector<'_, '_> { let module = self.def_collector.def_map.module_id(module_id); let def_map = &mut self.def_collector.def_map; + let local_def_map = self + .def_collector + .crate_local_def_map + .as_deref() + .unwrap_or(&self.def_collector.local_def_map); match item { ModItem::Mod(m) => self.collect_module(m, &attrs), @@ -1667,7 +1716,7 @@ impl ModCollector<'_, '_> { id: ItemTreeId::new(self.tree_id, item_tree_id), } .intern(db); - let is_prelude = attrs.by_key(&sym::prelude_import).exists(); + let is_prelude = attrs.by_key(sym::prelude_import).exists(); Import::from_use( self.item_tree, ItemTreeId::new(self.tree_id, item_tree_id), @@ -1711,13 +1760,13 @@ impl ModCollector<'_, '_> { }; if let Some(resolved) = resolved { - let vis = resolve_vis(def_map, &self.item_tree[*visibility]); + let vis = resolve_vis(def_map, local_def_map, &self.item_tree[*visibility]); if is_crate_root { // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 if let Some(name) = name { - Arc::get_mut(&mut def_map.data) - .unwrap() + self.def_collector + .local_def_map .extern_prelude .insert(name.clone(), (resolved, Some(id))); } @@ -1725,7 +1774,7 @@ impl ModCollector<'_, '_> { if !is_self { self.process_macro_use_extern_crate( id, - attrs.by_key(&sym::macro_use).attrs(), + attrs.by_key(sym::macro_use).attrs(), resolved.krate, ); } @@ -1784,7 +1833,7 @@ impl ModCollector<'_, '_> { let fn_id = FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); - let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); if self.def_collector.def_map.block.is_none() && self.def_collector.is_proc_macro @@ -1794,6 +1843,7 @@ impl ModCollector<'_, '_> { self.def_collector.export_proc_macro( proc_macro, ItemTreeId::new(self.tree_id, id), + InFile::new(self.file_id(), self.item_tree[id].ast_id()), fn_id, ); } @@ -1804,7 +1854,7 @@ impl ModCollector<'_, '_> { ModItem::Struct(id) => { let it = &self.item_tree[id]; - let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); update_def( self.def_collector, StructLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } @@ -1818,7 +1868,7 @@ impl ModCollector<'_, '_> { ModItem::Union(id) => { let it = &self.item_tree[id]; - let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); update_def( self.def_collector, UnionLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } @@ -1835,41 +1885,8 @@ impl ModCollector<'_, '_> { EnumLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } .intern(db); - let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); update_def(self.def_collector, enum_.into(), &it.name, vis, false); - - let mut index = 0; - let variants = FileItemTreeId::range_iter(it.variants.clone()) - .filter_map(|variant| { - let is_enabled = self - .item_tree - .attrs(db, krate, variant.into()) - .cfg() - .and_then(|cfg| self.is_cfg_enabled(&cfg).not().then_some(cfg)) - .map_or(Ok(()), Err); - match is_enabled { - Err(cfg) => { - self.emit_unconfigured_diagnostic( - self.tree_id, - variant.into(), - &cfg, - ); - None - } - Ok(()) => Some({ - let loc = EnumVariantLoc { - id: ItemTreeId::new(self.tree_id, variant), - parent: enum_, - index, - } - .intern(db); - index += 1; - loc - }), - } - }) - .collect(); - self.def_collector.def_map.enum_definitions.insert(enum_, variants); } ModItem::Const(id) => { let it = &self.item_tree[id]; @@ -1878,7 +1895,8 @@ impl ModCollector<'_, '_> { match &it.name { Some(name) => { - let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + let vis = + resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); update_def(self.def_collector, const_id.into(), name, vis, false); } None => { @@ -1892,7 +1910,7 @@ impl ModCollector<'_, '_> { ModItem::Static(id) => { let it = &self.item_tree[id]; - let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); update_def( self.def_collector, StaticLoc { container, id: ItemTreeId::new(self.tree_id, id) } @@ -1906,7 +1924,7 @@ impl ModCollector<'_, '_> { ModItem::Trait(id) => { let it = &self.item_tree[id]; - let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); update_def( self.def_collector, TraitLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } @@ -1920,7 +1938,7 @@ impl ModCollector<'_, '_> { ModItem::TraitAlias(id) => { let it = &self.item_tree[id]; - let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); update_def( self.def_collector, TraitAliasLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } @@ -1934,7 +1952,7 @@ impl ModCollector<'_, '_> { ModItem::TypeAlias(id) => { let it = &self.item_tree[id]; - let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); update_def( self.def_collector, TypeAliasLoc { container, id: ItemTreeId::new(self.tree_id, id) } @@ -1971,13 +1989,12 @@ impl ModCollector<'_, '_> { &mut self, extern_crate_id: ExternCrateId, macro_use_attrs: impl Iterator, - target_crate: CrateId, + target_crate: Crate, ) { cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use); let mut single_imports = Vec::new(); for attr in macro_use_attrs { - let Some(paths) = attr.parse_path_comma_token_tree(self.def_collector.db.upcast()) - else { + let Some(paths) = attr.parse_path_comma_token_tree(self.def_collector.db) else { // `#[macro_use]` (without any paths) found, forget collected names and just import // all visible macros. self.def_collector.import_macros_from_extern_crate( @@ -2002,8 +2019,8 @@ impl ModCollector<'_, '_> { } fn collect_module(&mut self, module_id: FileItemTreeId, attrs: &Attrs) { - let path_attr = attrs.by_key(&sym::path).string_value_unescape(); - let is_macro_use = attrs.by_key(&sym::macro_use).exists(); + let path_attr = attrs.by_key(sym::path).string_value_unescape(); + let is_macro_use = attrs.by_key(sym::macro_use).exists(); let module = &self.item_tree[module_id]; match &module.kind { // inline module, just recurse @@ -2080,7 +2097,7 @@ impl ModCollector<'_, '_> { let is_macro_use = is_macro_use || item_tree .top_level_attrs(db, krate) - .by_key(&sym::macro_use) + .by_key(sym::macro_use) .exists(); if is_macro_use { self.import_all_legacy_macros(module_id); @@ -2115,7 +2132,16 @@ impl ModCollector<'_, '_> { ) -> LocalModuleId { let def_map = &mut self.def_collector.def_map; let vis = def_map - .resolve_visibility(self.def_collector.db, self.module_id, visibility, false) + .resolve_visibility( + self.def_collector + .crate_local_def_map + .as_deref() + .unwrap_or(&self.def_collector.local_def_map), + self.def_collector.db, + self.module_id, + visibility, + false, + ) .unwrap_or(Visibility::Public); let origin = match definition { None => ModuleOrigin::Inline { @@ -2198,7 +2224,7 @@ impl ModCollector<'_, '_> { } tracing::debug!( "non-builtin attribute {}", - attr.path.display(self.def_collector.db.upcast(), Edition::LATEST) + attr.path.display(self.def_collector.db, Edition::LATEST) ); let ast_id = AstIdWithPath::new( @@ -2230,11 +2256,11 @@ impl ModCollector<'_, '_> { let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into()); let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast()); - let export_attr = attrs.by_key(&sym::macro_export); + let export_attr = || attrs.by_key(sym::macro_export); - let is_export = export_attr.exists(); + let is_export = export_attr().exists(); let local_inner = if is_export { - export_attr.tt_values().flat_map(|it| it.iter()).any(|it| match it { + export_attr().tt_values().flat_map(|it| it.iter()).any(|it| match it { tt::TtElement::Leaf(tt::Leaf::Ident(ident)) => ident.sym == sym::local_inner_macros, _ => false, }) @@ -2243,17 +2269,17 @@ impl ModCollector<'_, '_> { }; // Case 1: builtin macros - let expander = if attrs.by_key(&sym::rustc_builtin_macro).exists() { + let expander = if attrs.by_key(sym::rustc_builtin_macro).exists() { // `#[rustc_builtin_macro = "builtin_name"]` overrides the `macro_rules!` name. let name; - let name = match attrs.by_key(&sym::rustc_builtin_macro).string_value_with_span() { + let name = match attrs.by_key(sym::rustc_builtin_macro).string_value_with_span() { Some((it, span)) => { name = Name::new_symbol(it.clone(), span.ctx); &name } None => { let explicit_name = - attrs.by_key(&sym::rustc_builtin_macro).tt_values().next().and_then(|tt| { + attrs.by_key(sym::rustc_builtin_macro).tt_values().next().and_then(|tt| { match tt.token_trees().flat_tokens().first() { Some(tt::TokenTree::Leaf(tt::Leaf::Ident(name))) => Some(name), _ => None, @@ -2283,7 +2309,7 @@ impl ModCollector<'_, '_> { // Case 2: normal `macro_rules!` macro MacroExpander::Declarative }; - let allow_internal_unsafe = attrs.by_key(&sym::allow_internal_unsafe).exists(); + let allow_internal_unsafe = attrs.by_key(sym::allow_internal_unsafe).exists(); let mut flags = MacroRulesLocFlags::empty(); flags.set(MacroRulesLocFlags::LOCAL_INNER, local_inner); @@ -2297,6 +2323,10 @@ impl ModCollector<'_, '_> { edition: self.def_collector.def_map.data.edition, } .intern(self.def_collector.db); + self.def_collector.def_map.macro_def_to_macro_id.insert( + InFile::new(self.file_id(), self.item_tree[id].ast_id()).erase(), + macro_id.into(), + ); self.def_collector.define_macro_rules( self.module_id, mac.name.clone(), @@ -2313,14 +2343,14 @@ impl ModCollector<'_, '_> { // Case 1: builtin macros let mut helpers_opt = None; let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into()); - let expander = if attrs.by_key(&sym::rustc_builtin_macro).exists() { + let expander = if attrs.by_key(sym::rustc_builtin_macro).exists() { if let Some(expander) = find_builtin_macro(&mac.name) { match expander { Either::Left(it) => MacroExpander::BuiltIn(it), Either::Right(it) => MacroExpander::BuiltInEager(it), } } else if let Some(expander) = find_builtin_derive(&mac.name) { - if let Some(attr) = attrs.by_key(&sym::rustc_builtin_macro).tt_values().next() { + if let Some(attr) = attrs.by_key(sym::rustc_builtin_macro).tt_values().next() { // NOTE: The item *may* have both `#[rustc_builtin_macro]` and `#[proc_macro_derive]`, // in which case rustc ignores the helper attributes from the latter, but it // "doesn't make sense in practice" (see rust-lang/rust#87027). @@ -2331,8 +2361,8 @@ impl ModCollector<'_, '_> { stdx::always!( name == mac.name, "built-in macro {} has #[rustc_builtin_macro] which declares different name {}", - mac.name.display(self.def_collector.db.upcast(), Edition::LATEST), - name.display(self.def_collector.db.upcast(), Edition::LATEST), + mac.name.display(self.def_collector.db, Edition::LATEST), + name.display(self.def_collector.db, Edition::LATEST), ); helpers_opt = Some(helpers); } @@ -2351,7 +2381,7 @@ impl ModCollector<'_, '_> { // Case 2: normal `macro` MacroExpander::Declarative }; - let allow_internal_unsafe = attrs.by_key(&sym::allow_internal_unsafe).exists(); + let allow_internal_unsafe = attrs.by_key(sym::allow_internal_unsafe).exists(); let macro_id = Macro2Loc { container: module, @@ -2361,6 +2391,10 @@ impl ModCollector<'_, '_> { edition: self.def_collector.def_map.data.edition, } .intern(self.def_collector.db); + self.def_collector.def_map.macro_def_to_macro_id.insert( + InFile::new(self.file_id(), self.item_tree[id].ast_id()).erase(), + macro_id.into(), + ); self.def_collector.define_macro_def( self.module_id, mac.name.clone(), @@ -2389,9 +2423,10 @@ impl ModCollector<'_, '_> { // new legacy macros that create textual scopes. We need a way to resolve names in textual // scopes without eager expansion. + let mut eager_callback_buffer = vec![]; // Case 1: try to resolve macro calls with single-segment name and expand macro_rules - if let Ok(res) = macro_call_as_call_id_with_eager( - db.upcast(), + if let Ok(res) = macro_call_as_call_id( + db, ast_id.ast_id, &ast_id.path, ctxt, @@ -2415,18 +2450,13 @@ impl ModCollector<'_, '_> { .map(|it| self.def_collector.db.macro_def(it)) }) }, - |path| { - let resolved_res = self.def_collector.def_map.resolve_path_fp_with_macro( - db, - ResolveMode::Other, - self.module_id, - path, - BuiltinShadowMode::Module, - Some(MacroSubNs::Bang), - ); - resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it)) - }, + &mut |ptr, call_id| eager_callback_buffer.push((ptr, call_id)), ) { + for (ptr, call_id) in eager_callback_buffer { + self.def_collector.def_map.modules[self.module_id] + .scope + .add_macro_invoc(ptr.map(|(_, it)| it), call_id); + } // FIXME: if there were errors, this might've been in the eager expansion from an // unresolved macro, so we need to push this into late macro resolution. see fixme above if res.err.is_none() { @@ -2517,7 +2547,6 @@ impl ModCollector<'_, '_> { #[cfg(test)] mod tests { - use base_db::SourceDatabase; use test_fixture::WithFixture; use crate::{nameres::DefMapCrateData, test_db::TestDB}; @@ -2528,6 +2557,8 @@ mod tests { let mut collector = DefCollector { db, def_map, + local_def_map: LocalDefMap::default(), + crate_local_def_map: None, deps: FxHashMap::default(), glob_imports: FxHashMap::default(), unresolved_imports: Vec::new(), @@ -2550,7 +2581,7 @@ mod tests { let (db, file_id) = TestDB::with_single_file(not_ra_fixture); let krate = db.test_crate(); - let edition = db.crate_graph()[krate].edition; + let edition = krate.data(&db).edition; let module_origin = ModuleOrigin::CrateRoot { definition: file_id }; let def_map = DefMap::empty( krate, @@ -2588,7 +2619,7 @@ foo!(KABOOM); // the release mode. That's why the argument is not an ra_fixture -- // otherwise injection highlighting gets stuck. // - // We need to find a way to fail this faster. + // We need to find a way to fail this faster! do_resolve( r#" macro_rules! foo { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs index bc1617c55b029..de3d2f48367f7 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs @@ -3,15 +3,14 @@ use std::ops::Not; use cfg::{CfgExpr, CfgOptions}; -use hir_expand::{attrs::AttrId, ExpandErrorKind, MacroCallKind}; +use hir_expand::{ExpandErrorKind, MacroCallKind, attrs::AttrId, mod_path::ModPath}; use la_arena::Idx; use syntax::ast; use crate::{ + AstId, item_tree::{self, AttrOwner, ItemTreeId, TreeId}, nameres::LocalModuleId, - path::ModPath, - AstId, }; #[derive(Debug, PartialEq, Eq)] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs index 17d09bcbd0478..0c50f13edfb6c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs @@ -1,10 +1,9 @@ //! This module resolves `mod foo;` declaration to file. use arrayvec::ArrayVec; use base_db::AnchoredPath; -use hir_expand::{name::Name, HirFileIdExt}; -use span::EditionedFileId; +use hir_expand::{EditionedFileId, name::Name}; -use crate::{db::DefDatabase, HirFileId}; +use crate::{HirFileId, db::DefDatabase}; const MOD_DEPTH_LIMIT: usize = 32; @@ -77,9 +76,9 @@ impl ModDir { } }; - let orig_file_id = file_id.original_file_respecting_includes(db.upcast()); + let orig_file_id = file_id.original_file_respecting_includes(db); for candidate in candidate_files.iter() { - let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() }; + let path = AnchoredPath { anchor: orig_file_id.file_id(db), path: candidate.as_str() }; if let Some(file_id) = db.resolve_path(path) { let is_mod_rs = candidate.ends_with("/mod.rs"); @@ -87,12 +86,12 @@ impl ModDir { let dir_path = if root_dir_owner { DirPath::empty() } else { - DirPath::new(format!("{}/", name)) + DirPath::new(format!("{name}/")) }; if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) { return Ok(( // FIXME: Edition, is this rightr? - EditionedFileId::new(file_id, orig_file_id.edition()), + EditionedFileId::new(db, file_id, orig_file_id.edition(db)), is_mod_rs, mod_dir, )); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs index 47c08d3d1dc67..a49155d878ca1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs @@ -11,19 +11,22 @@ //! `ReachedFixedPoint` signals about this. use either::Either; -use hir_expand::{name::Name, Lookup}; +use hir_expand::{ + Lookup, + mod_path::{ModPath, PathKind}, + name::Name, +}; use span::Edition; use triomphe::Arc; use crate::{ + AdtId, LocalModuleId, ModuleDefId, db::DefDatabase, - item_scope::{ImportOrExternCrate, BUILTIN_SCOPE}, + item_scope::{BUILTIN_SCOPE, ImportOrExternCrate}, item_tree::FieldsShape, - nameres::{sub_namespace_match, BlockInfo, BuiltinShadowMode, DefMap, MacroSubNs}, - path::{ModPath, PathKind}, + nameres::{BlockInfo, BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, sub_namespace_match}, per_ns::PerNs, visibility::{RawVisibility, Visibility}, - AdtId, LocalModuleId, ModuleDefId, }; #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -91,6 +94,7 @@ impl PerNs { impl DefMap { pub(crate) fn resolve_visibility( &self, + local_def_map: &LocalDefMap, db: &dyn DefDatabase, // module to import to original_module: LocalModuleId, @@ -101,8 +105,14 @@ impl DefMap { ) -> Option { let mut vis = match visibility { RawVisibility::Module(path, explicitness) => { - let (result, remaining) = - self.resolve_path(db, original_module, path, BuiltinShadowMode::Module, None); + let (result, remaining) = self.resolve_path( + local_def_map, + db, + original_module, + path, + BuiltinShadowMode::Module, + None, + ); if remaining.is_some() { return None; } @@ -137,6 +147,7 @@ impl DefMap { // the result. pub(super) fn resolve_path_fp_with_macro( &self, + local_def_map: &LocalDefMap, db: &dyn DefDatabase, mode: ResolveMode, // module to import to @@ -148,6 +159,7 @@ impl DefMap { expected_macro_subns: Option, ) -> ResolvePathResult { let mut result = self.resolve_path_fp_with_macro_single( + local_def_map, db, mode, original_module, @@ -196,6 +208,7 @@ impl DefMap { current_map = &arc; let new = current_map.resolve_path_fp_in_all_preludes( + local_def_map, db, mode, original_module, @@ -210,6 +223,7 @@ impl DefMap { } let new = current_map.resolve_path_fp_with_macro_single( + local_def_map, db, mode, original_module, @@ -224,6 +238,7 @@ impl DefMap { pub(super) fn resolve_path_fp_with_macro_single( &self, + local_def_map: &LocalDefMap, db: &dyn DefDatabase, mode: ResolveMode, original_module: LocalModuleId, @@ -258,7 +273,12 @@ impl DefMap { None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), }; tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment); - self.resolve_name_in_crate_root_or_extern_prelude(db, original_module, segment) + self.resolve_name_in_crate_root_or_extern_prelude( + local_def_map, + db, + original_module, + segment, + ) } PathKind::Plain => { let (_, segment) = match segments.next() { @@ -276,6 +296,7 @@ impl DefMap { tracing::debug!("resolving {:?} in module", segment); self.resolve_name_in_module( + local_def_map, db, original_module, segment, @@ -321,7 +342,9 @@ impl DefMap { // with), resolve the remaining path segments in that `DefMap`. let path = ModPath::from_segments(PathKind::SELF, path.segments().iter().cloned()); + // This is the same crate, so the local def map is the same. return def_map.resolve_path_fp_with_macro( + local_def_map, db, mode, local_id, @@ -333,10 +356,10 @@ impl DefMap { PerNs::types(module.into(), Visibility::Public, None) } - PathKind::Abs => match self.resolve_path_abs(&mut segments, path) { + PathKind::Abs => match self.resolve_path_abs(local_def_map, &mut segments, path) { Either::Left(it) => it, Either::Right(reached_fixed_point) => { - return ResolvePathResult::empty(reached_fixed_point) + return ResolvePathResult::empty(reached_fixed_point); } }, }; @@ -347,6 +370,7 @@ impl DefMap { /// Resolves a path only in the preludes, without accounting for item scopes. pub(super) fn resolve_path_fp_in_all_preludes( &self, + local_def_map: &LocalDefMap, db: &dyn DefDatabase, mode: ResolveMode, original_module: LocalModuleId, @@ -368,7 +392,7 @@ impl DefMap { None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), }; tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment); - self.resolve_name_in_extern_prelude(segment) + self.resolve_name_in_extern_prelude(local_def_map, segment) } PathKind::Plain => { let (_, segment) = match segments.next() { @@ -376,16 +400,16 @@ impl DefMap { None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), }; tracing::debug!("resolving {:?} in module", segment); - self.resolve_name_in_all_preludes(db, segment) + self.resolve_name_in_all_preludes(local_def_map, db, segment) } - PathKind::Abs => match self.resolve_path_abs(&mut segments, path) { + PathKind::Abs => match self.resolve_path_abs(local_def_map, &mut segments, path) { Either::Left(it) => it, Either::Right(reached_fixed_point) => { - return ResolvePathResult::empty(reached_fixed_point) + return ResolvePathResult::empty(reached_fixed_point); } }, PathKind::DollarCrate(_) | PathKind::Crate | PathKind::Super(_) => { - return ResolvePathResult::empty(ReachedFixedPoint::Yes) + return ResolvePathResult::empty(ReachedFixedPoint::Yes); } }; @@ -395,6 +419,7 @@ impl DefMap { /// 2018-style absolute path -- only extern prelude fn resolve_path_abs<'a>( &self, + local_def_map: &LocalDefMap, segments: &mut impl Iterator, path: &ModPath, ) -> Either { @@ -402,7 +427,7 @@ impl DefMap { Some((_, segment)) => segment, None => return Either::Right(ReachedFixedPoint::Yes), }; - if let Some(&(def, extern_crate)) = self.data.extern_prelude.get(segment) { + if let Some(&(def, extern_crate)) = local_def_map.extern_prelude.get(segment) { tracing::debug!("absolute path {:?} resolved to crate {:?}", path, def); Either::Left(PerNs::types( def.into(), @@ -451,6 +476,7 @@ impl DefMap { // this point, we know we're resolving a multi-segment path so macro kind // expectation is discarded. let resolution = defp_map.resolve_path_fp_with_macro( + LocalDefMap::EMPTY, db, ResolveMode::Other, module.local_id, @@ -483,33 +509,24 @@ impl DefMap { ModuleDefId::AdtId(AdtId::EnumId(e)) => { // enum variant cov_mark::hit!(can_import_enum_variant); - let def_map; - let loc = e.lookup(db); - let tree = loc.id.item_tree(db); - let current_def_map = - self.krate == loc.container.krate && self.block_id() == loc.container.block; - let res = if current_def_map { - &self.enum_definitions[&e] - } else { - def_map = loc.container.def_map(db); - &def_map.enum_definitions[&e] - } - .iter() - .find_map(|&variant| { - let variant_data = &tree[variant.lookup(db).id.value]; - (variant_data.name == *segment).then(|| match variant_data.shape { - FieldsShape::Record => { - PerNs::types(variant.into(), Visibility::Public, None) - } - FieldsShape::Tuple | FieldsShape::Unit => PerNs::both( - variant.into(), - variant.into(), - Visibility::Public, - None, - ), - }) - }); + let res = + db.enum_variants(e).variants.iter().find(|(_, name)| name == segment).map( + |&(variant, _)| { + let item_tree_id = variant.lookup(db).id; + match item_tree_id.item_tree(db)[item_tree_id.value].shape { + FieldsShape::Record => { + PerNs::types(variant.into(), Visibility::Public, None) + } + FieldsShape::Tuple | FieldsShape::Unit => PerNs::both( + variant.into(), + variant.into(), + Visibility::Public, + None, + ), + } + }, + ); // FIXME: Need to filter visibility here and below? Not sure. return match res { Some(res) => { @@ -568,6 +585,7 @@ impl DefMap { fn resolve_name_in_module( &self, + local_def_map: &LocalDefMap, db: &dyn DefDatabase, module: LocalModuleId, name: &Name, @@ -611,7 +629,7 @@ impl DefMap { // they might been shadowed by local names. return PerNs::none(); } - self.resolve_name_in_extern_prelude(name) + self.resolve_name_in_extern_prelude(local_def_map, name) }; let macro_use_prelude = || self.resolve_in_macro_use_prelude(name); let prelude = || { @@ -628,19 +646,24 @@ impl DefMap { .or_else(prelude) } - fn resolve_name_in_all_preludes(&self, db: &dyn DefDatabase, name: &Name) -> PerNs { + fn resolve_name_in_all_preludes( + &self, + local_def_map: &LocalDefMap, + db: &dyn DefDatabase, + name: &Name, + ) -> PerNs { // Resolve in: // - extern prelude / macro_use prelude // - std prelude - let extern_prelude = self.resolve_name_in_extern_prelude(name); + let extern_prelude = self.resolve_name_in_extern_prelude(local_def_map, name); let macro_use_prelude = || self.resolve_in_macro_use_prelude(name); let prelude = || self.resolve_in_prelude(db, name); extern_prelude.or_else(macro_use_prelude).or_else(prelude) } - fn resolve_name_in_extern_prelude(&self, name: &Name) -> PerNs { - self.data.extern_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| { + fn resolve_name_in_extern_prelude(&self, local_def_map: &LocalDefMap, name: &Name) -> PerNs { + local_def_map.extern_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| { PerNs::types( it.into(), Visibility::Public, @@ -650,18 +673,18 @@ impl DefMap { } fn resolve_in_macro_use_prelude(&self, name: &Name) -> PerNs { - self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| { + self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| { PerNs::macros( it, Visibility::Public, - // FIXME? - None, // extern_crate.map(ImportOrExternCrate::ExternCrate), + extern_crate.map(ImportOrExternCrate::ExternCrate), ) }) } fn resolve_name_in_crate_root_or_extern_prelude( &self, + local_def_map: &LocalDefMap, db: &dyn DefDatabase, module: LocalModuleId, name: &Name, @@ -678,7 +701,7 @@ impl DefMap { // Don't resolve extern prelude in pseudo-module of a block. return PerNs::none(); } - self.resolve_name_in_extern_prelude(name) + self.resolve_name_in_extern_prelude(local_def_map, name) }; from_crate_root.or_else(from_extern_prelude) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs index b93a1c87b432f..cd8882183bb4d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs @@ -30,26 +30,36 @@ impl ProcMacroKind { } impl Attrs { - #[rustfmt::skip] pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option { if self.is_proc_macro() { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang }) } else if self.is_proc_macro_attribute() { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr }) - } else if self.by_key(&sym::proc_macro_derive).exists() { - let derive = self.by_key(&sym::proc_macro_derive).tt_values().next()?; - let def = parse_macro_name_and_helper_attrs(derive) - .map(|(name, helpers)| ProcMacroDef { name, kind: ProcMacroKind::Derive { helpers } }); - - if def.is_none() { - tracing::trace!("malformed `#[proc_macro_derive]`: {}", derive); - } - - def + } else if self.by_key(sym::proc_macro_derive).exists() { + let derive = self.parse_proc_macro_derive(); + Some(match derive { + Some((name, helpers)) => { + ProcMacroDef { name, kind: ProcMacroKind::Derive { helpers } } + } + None => ProcMacroDef { + name: func_name.clone(), + kind: ProcMacroKind::Derive { helpers: Box::default() }, + }, + }) } else { None } } + + pub fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> { + let derive = self.by_key(sym::proc_macro_derive).tt_values().next()?; + parse_macro_name_and_helper_attrs(derive) + } + + pub fn parse_rustc_builtin_macro(&self) -> Option<(Name, Box<[Name]>)> { + let derive = self.by_key(sym::rustc_builtin_macro).tt_values().next()?; + parse_macro_name_and_helper_attrs(derive) + } } // This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs index 73fc6787bfe81..3fd095a9a98a8 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs @@ -4,8 +4,8 @@ mod macros; mod mod_resolution; mod primitives; -use base_db::SourceDatabase; -use expect_test::{expect, Expect}; +use base_db::RootQueryDb; +use expect_test::{Expect, expect}; use test_fixture::WithFixture; use triomphe::Arc; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs index c8b7ec463a0fd..179a9c8fec21b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs @@ -1,7 +1,13 @@ -use base_db::SourceDatabaseFileInputExt as _; +use base_db::{ + CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData, + DependencyBuilder, Env, RootQueryDb, SourceDatabase, +}; +use intern::Symbol; +use span::Edition; use test_fixture::WithFixture; +use triomphe::Arc; -use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId}; +use crate::{AdtId, ModuleDefId, db::DefDatabase, nameres::tests::TestDB}; fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) { let (mut db, pos) = TestDB::with_position(ra_fixture_initial); @@ -12,7 +18,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: }); assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") } - db.set_file_text(pos.file_id.file_id(), ra_fixture_change); + db.set_file_text(pos.file_id.file_id(&db), ra_fixture_change); { let events = db.log_executed(|| { @@ -22,6 +28,80 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: } } +#[test] +fn crate_metadata_changes_should_not_invalidate_unrelated_def_maps() { + let (mut db, files) = TestDB::with_many_files( + r#" +//- /a.rs crate:a +pub fn foo() {} + +//- /b.rs crate:b +pub struct Bar; + +//- /c.rs crate:c deps:b +pub const BAZ: u32 = 0; + "#, + ); + + for &krate in db.all_crates().iter() { + db.crate_def_map(krate); + } + + let all_crates_before = db.all_crates(); + + { + // Add a dependency a -> b. + let mut new_crate_graph = CrateGraphBuilder::default(); + + let mut add_crate = |crate_name, root_file_idx: usize| { + new_crate_graph.add_crate_root( + files[root_file_idx].file_id(&db), + Edition::CURRENT, + Some(CrateDisplayName::from_canonical_name(crate_name)), + None, + Default::default(), + None, + Env::default(), + CrateOrigin::Local { repo: None, name: Some(Symbol::intern(crate_name)) }, + false, + Arc::new( + // FIXME: This is less than ideal + TryFrom::try_from( + &*std::env::current_dir().unwrap().as_path().to_string_lossy(), + ) + .unwrap(), + ), + Arc::new(CrateWorkspaceData { data_layout: Err("".into()), toolchain: None }), + ) + }; + let a = add_crate("a", 0); + let b = add_crate("b", 1); + let c = add_crate("c", 2); + new_crate_graph + .add_dep(c, DependencyBuilder::new(CrateName::new("b").unwrap(), b)) + .unwrap(); + new_crate_graph + .add_dep(b, DependencyBuilder::new(CrateName::new("a").unwrap(), a)) + .unwrap(); + new_crate_graph.set_in_db(&mut db); + } + + let all_crates_after = db.all_crates(); + assert!( + Arc::ptr_eq(&all_crates_before, &all_crates_after), + "the all_crates list should not have been invalidated" + ); + + let events = db.log_executed(|| { + for &krate in db.all_crates().iter() { + db.crate_def_map(krate); + } + }); + let invalidated_def_maps = + events.iter().filter(|event| event.contains("crate_def_map")).count(); + assert_eq!(invalidated_def_maps, 1, "{events:#?}") +} + #[test] fn typing_inside_a_function_should_not_invalidate_def_map() { check_def_map_is_not_recomputed( @@ -255,10 +335,10 @@ m!(Z); assert_eq!(module_data.scope.resolutions().count(), 4); }); let n_recalculated_item_trees = - events.iter().filter(|it| it.contains("item_tree(")).count(); + events.iter().filter(|it| it.contains("file_item_tree_shim")).count(); assert_eq!(n_recalculated_item_trees, 6); let n_reparsed_macros = - events.iter().filter(|it| it.contains("parse_macro_expansion(")).count(); + events.iter().filter(|it| it.contains("parse_macro_expansion_shim")).count(); assert_eq!(n_reparsed_macros, 3); } @@ -268,7 +348,7 @@ fn quux() { 92 } m!(Y); m!(Z); "#; - db.set_file_text(pos.file_id.file_id(), new_text); + db.set_file_text(pos.file_id.file_id(&db), new_text); { let events = db.log_executed(|| { @@ -276,10 +356,11 @@ m!(Z); let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); assert_eq!(module_data.scope.resolutions().count(), 4); }); - let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count(); - assert_eq!(n_recalculated_item_trees, 1); + let n_recalculated_item_trees = + events.iter().filter(|it| it.contains("file_item_tree_shim")).count(); + assert_eq!(n_recalculated_item_trees, 1, "{events:#?}"); let n_reparsed_macros = - events.iter().filter(|it| it.contains("parse_macro_expansion(")).count(); + events.iter().filter(|it| it.contains("parse_macro_expansion_shim")).count(); assert_eq!(n_reparsed_macros, 0); } } @@ -310,14 +391,15 @@ pub type Ty = (); let events = db.log_executed(|| { db.file_item_tree(pos.file_id.into()); }); - let n_calculated_item_trees = events.iter().filter(|it| it.contains("item_tree(")).count(); + let n_calculated_item_trees = + events.iter().filter(|it| it.contains("file_item_tree_shim")).count(); assert_eq!(n_calculated_item_trees, 1); - let n_parsed_files = events.iter().filter(|it| it.contains("parse(")).count(); + let n_parsed_files = events.iter().filter(|it| it.contains("parse")).count(); assert_eq!(n_parsed_files, 1); } - // Delete the parse tree. - base_db::ParseQuery.in_db(&db).purge(); + // FIXME(salsa-transition): bring this back + // base_db::ParseQuery.in_db(&db).purge(); { let events = db.log_executed(|| { @@ -327,22 +409,22 @@ pub type Ty = (); assert_eq!(module_data.scope.impls().count(), 1); for imp in module_data.scope.impls() { - db.impl_data(imp); + db.impl_signature(imp); } for (_, res) in module_data.scope.resolutions() { match res.values.map(|it| it.def).or(res.types.map(|it| it.def)).unwrap() { - ModuleDefId::FunctionId(f) => _ = db.function_data(f), + ModuleDefId::FunctionId(f) => _ = db.function_signature(f), ModuleDefId::AdtId(adt) => match adt { - AdtId::StructId(it) => _ = db.struct_data(it), - AdtId::UnionId(it) => _ = db.union_data(it), - AdtId::EnumId(it) => _ = db.enum_data(it), + AdtId::StructId(it) => _ = db.struct_signature(it), + AdtId::UnionId(it) => _ = db.union_signature(it), + AdtId::EnumId(it) => _ = db.enum_signature(it), }, - ModuleDefId::ConstId(it) => _ = db.const_data(it), - ModuleDefId::StaticId(it) => _ = db.static_data(it), - ModuleDefId::TraitId(it) => _ = db.trait_data(it), - ModuleDefId::TraitAliasId(it) => _ = db.trait_alias_data(it), - ModuleDefId::TypeAliasId(it) => _ = db.type_alias_data(it), + ModuleDefId::ConstId(it) => _ = db.const_signature(it), + ModuleDefId::StaticId(it) => _ = db.static_signature(it), + ModuleDefId::TraitId(it) => _ = db.trait_signature(it), + ModuleDefId::TraitAliasId(it) => _ = db.trait_alias_signature(it), + ModuleDefId::TypeAliasId(it) => _ = db.type_alias_signature(it), ModuleDefId::EnumVariantId(_) | ModuleDefId::ModuleId(_) | ModuleDefId::MacroId(_) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs index 610886d55f40f..5f8a01523d820 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs @@ -1095,7 +1095,7 @@ pub fn derive_macro_2(_item: TokenStream) -> TokenStream { } "#, ); - let krate = db.crate_graph().iter().next().unwrap(); + let krate = *db.all_crates().last().expect("no crate graph present"); let def_map = db.crate_def_map(krate); assert_eq!(def_map.data.exported_derives.len(), 1); @@ -1445,7 +1445,7 @@ struct TokenStream; fn proc_attr(a: TokenStream, b: TokenStream) -> TokenStream { a } "#, ); - let krate = db.crate_graph().iter().next().unwrap(); + let krate = *db.all_crates().last().expect("no crate graph present"); let def_map = db.crate_def_map(krate); let root_module = &def_map[DefMap::ROOT].scope; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs deleted file mode 100644 index 713e7389736a0..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs +++ /dev/null @@ -1,338 +0,0 @@ -//! A desugared representation of paths like `crate::foo` or `::bar`. -mod lower; -#[cfg(test)] -mod tests; - -use std::{ - fmt::{self, Display}, - iter, -}; - -use crate::{ - lang_item::LangItemTarget, - lower::LowerCtx, - type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRefId}, -}; -use hir_expand::name::Name; -use intern::Interned; -use span::Edition; -use stdx::thin_vec::thin_vec_with_header_struct; -use syntax::ast; - -pub use hir_expand::mod_path::{path, ModPath, PathKind}; - -pub use lower::hir_segment_to_ast_segment; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ImportAlias { - /// Unnamed alias, as in `use Foo as _;` - Underscore, - /// Named alias - Alias(Name), -} - -impl ImportAlias { - pub fn display(&self, edition: Edition) -> impl Display + '_ { - ImportAliasDisplay { value: self, edition } - } -} - -struct ImportAliasDisplay<'a> { - value: &'a ImportAlias, - edition: Edition, -} -impl Display for ImportAliasDisplay<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.value { - ImportAlias::Underscore => f.write_str("_"), - ImportAlias::Alias(name) => Display::fmt(&name.display_no_db(self.edition), f), - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Path { - /// `BarePath` is used when the path has neither generics nor type anchor, since the vast majority of paths - /// are in this category, and splitting `Path` this way allows it to be more thin. When the path has either generics - /// or type anchor, it is `Path::Normal` with the generics filled with `None` even if there are none (practically - /// this is not a problem since many more paths have generics than a type anchor). - BarePath(Interned), - /// `Path::Normal` will always have either generics or type anchor. - Normal(NormalPath), - /// A link to a lang item. It is used in desugaring of things like `it?`. We can show these - /// links via a normal path since they might be private and not accessible in the usage place. - LangItem(LangItemTarget, Option), -} - -// This type is being used a lot, make sure it doesn't grow unintentionally. -#[cfg(target_arch = "x86_64")] -const _: () = { - assert!(size_of::() == 16); - assert!(size_of::>() == 16); -}; - -thin_vec_with_header_struct! { - pub new(pub(crate)) struct NormalPath, NormalPathHeader { - pub generic_args: [Option], - pub type_anchor: Option, - pub mod_path: Interned; ref, - } -} - -/// Generic arguments to a path segment (e.g. the `i32` in `Option`). This -/// also includes bindings of associated types, like in `Iterator`. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericArgs { - pub args: Box<[GenericArg]>, - /// This specifies whether the args contain a Self type as the first - /// element. This is the case for path segments like ``, where - /// `T` is actually a type parameter for the path `Trait` specifying the - /// Self type. Otherwise, when we have a path `Trait`, the Self type - /// is left out. - pub has_self_type: bool, - /// Associated type bindings like in `Iterator`. - pub bindings: Box<[AssociatedTypeBinding]>, - /// Whether these generic args were desugared from `Trait(Arg) -> Output` - /// parenthesis notation typically used for the `Fn` traits. - pub desugared_from_fn: bool, -} - -/// An associated type binding like in `Iterator`. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct AssociatedTypeBinding { - /// The name of the associated type. - pub name: Name, - /// The generic arguments to the associated type. e.g. For `Trait = &'a T>`, this - /// would be `['a, T]`. - pub args: Option, - /// The type bound to this associated type (in `Item = T`, this would be the - /// `T`). This can be `None` if there are bounds instead. - pub type_ref: Option, - /// Bounds for the associated type, like in `Iterator`. (This is the unstable `associated_type_bounds` - /// feature.) - pub bounds: Box<[TypeBound]>, -} - -/// A single generic argument. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum GenericArg { - Type(TypeRefId), - Lifetime(LifetimeRef), - Const(ConstRef), -} - -impl Path { - /// Converts an `ast::Path` to `Path`. Works with use trees. - /// It correctly handles `$crate` based path from macro call. - pub fn from_src(ctx: &mut LowerCtx<'_>, path: ast::Path) -> Option { - lower::lower_path(ctx, path) - } - - /// Converts a known mod path to `Path`. - pub fn from_known_path(path: ModPath, generic_args: Vec>) -> Path { - Path::Normal(NormalPath::new(None, Interned::new(path), generic_args)) - } - - /// Converts a known mod path to `Path`. - pub fn from_known_path_with_no_generic(path: ModPath) -> Path { - Path::BarePath(Interned::new(path)) - } - - #[inline] - pub fn kind(&self) -> &PathKind { - match self { - Path::BarePath(mod_path) => &mod_path.kind, - Path::Normal(path) => &path.mod_path().kind, - Path::LangItem(..) => &PathKind::Abs, - } - } - - #[inline] - pub fn type_anchor(&self) -> Option { - match self { - Path::Normal(path) => path.type_anchor(), - Path::LangItem(..) | Path::BarePath(_) => None, - } - } - - #[inline] - pub fn generic_args(&self) -> Option<&[Option]> { - match self { - Path::Normal(path) => Some(path.generic_args()), - Path::LangItem(..) | Path::BarePath(_) => None, - } - } - - pub fn segments(&self) -> PathSegments<'_> { - match self { - Path::BarePath(mod_path) => { - PathSegments { segments: mod_path.segments(), generic_args: None } - } - Path::Normal(path) => PathSegments { - segments: path.mod_path().segments(), - generic_args: Some(path.generic_args()), - }, - Path::LangItem(_, seg) => PathSegments { segments: seg.as_slice(), generic_args: None }, - } - } - - pub fn mod_path(&self) -> Option<&ModPath> { - match self { - Path::BarePath(mod_path) => Some(mod_path), - Path::Normal(path) => Some(path.mod_path()), - Path::LangItem(..) => None, - } - } - - pub fn qualifier(&self) -> Option { - match self { - Path::BarePath(mod_path) => { - if mod_path.is_ident() { - return None; - } - Some(Path::BarePath(Interned::new(ModPath::from_segments( - mod_path.kind, - mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), - )))) - } - Path::Normal(path) => { - let mod_path = path.mod_path(); - if mod_path.is_ident() { - return None; - } - let type_anchor = path.type_anchor(); - let generic_args = path.generic_args(); - let qualifier_mod_path = Interned::new(ModPath::from_segments( - mod_path.kind, - mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), - )); - let qualifier_generic_args = &generic_args[..generic_args.len() - 1]; - if type_anchor.is_none() && qualifier_generic_args.iter().all(|it| it.is_none()) { - Some(Path::BarePath(qualifier_mod_path)) - } else { - Some(Path::Normal(NormalPath::new( - type_anchor, - qualifier_mod_path, - qualifier_generic_args.iter().cloned(), - ))) - } - } - Path::LangItem(..) => None, - } - } - - pub fn is_self_type(&self) -> bool { - match self { - Path::BarePath(mod_path) => mod_path.is_Self(), - Path::Normal(path) => { - path.type_anchor().is_none() - && path.mod_path().is_Self() - && path.generic_args().iter().all(|args| args.is_none()) - } - Path::LangItem(..) => false, - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct PathSegment<'a> { - pub name: &'a Name, - pub args_and_bindings: Option<&'a GenericArgs>, -} - -impl PathSegment<'_> { - pub const MISSING: PathSegment<'static> = - PathSegment { name: &Name::missing(), args_and_bindings: None }; -} - -#[derive(Debug, Clone, Copy)] -pub struct PathSegments<'a> { - segments: &'a [Name], - generic_args: Option<&'a [Option]>, -} - -impl<'a> PathSegments<'a> { - pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: None }; - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - pub fn len(&self) -> usize { - self.segments.len() - } - pub fn first(&self) -> Option> { - self.get(0) - } - pub fn last(&self) -> Option> { - self.get(self.len().checked_sub(1)?) - } - - pub fn get(&self, idx: usize) -> Option> { - let res = PathSegment { - name: self.segments.get(idx)?, - args_and_bindings: self.generic_args.and_then(|it| it.get(idx)?.as_ref()), - }; - Some(res) - } - - pub fn skip(&self, len: usize) -> PathSegments<'a> { - PathSegments { - segments: self.segments.get(len..).unwrap_or(&[]), - generic_args: self.generic_args.and_then(|it| it.get(len..)), - } - } - - pub fn take(&self, len: usize) -> PathSegments<'a> { - PathSegments { - segments: self.segments.get(..len).unwrap_or(self.segments), - generic_args: self.generic_args.map(|it| it.get(..len).unwrap_or(it)), - } - } - - pub fn strip_last(&self) -> PathSegments<'a> { - PathSegments { - segments: self.segments.split_last().map_or(&[], |it| it.1), - generic_args: self.generic_args.map(|it| it.split_last().map_or(&[][..], |it| it.1)), - } - } - - pub fn strip_last_two(&self) -> PathSegments<'a> { - PathSegments { - segments: self.segments.get(..self.segments.len().saturating_sub(2)).unwrap_or(&[]), - generic_args: self - .generic_args - .map(|it| it.get(..it.len().saturating_sub(2)).unwrap_or(&[])), - } - } - - pub fn iter(&self) -> impl Iterator> { - self.segments - .iter() - .zip(self.generic_args.into_iter().flatten().chain(iter::repeat(&None))) - .map(|(name, args)| PathSegment { name, args_and_bindings: args.as_ref() }) - } -} - -impl GenericArgs { - pub(crate) fn from_ast( - lower_ctx: &mut LowerCtx<'_>, - node: ast::GenericArgList, - ) -> Option { - lower::lower_generic_args(lower_ctx, node) - } - - pub(crate) fn empty() -> GenericArgs { - GenericArgs { - args: Box::default(), - has_self_type: false, - bindings: Box::default(), - desugared_from_fn: false, - } - } -} - -impl From for Path { - fn from(name: Name) -> Path { - Path::BarePath(Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name)))) - } -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs deleted file mode 100644 index 3b7e7653fba55..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs +++ /dev/null @@ -1,357 +0,0 @@ -//! Transforms syntax into `Path` objects, ideally with accounting for hygiene - -use std::iter; - -use crate::{lower::LowerCtx, path::NormalPath, type_ref::ConstRef}; - -use hir_expand::{ - mod_path::resolve_crate_root, - name::{AsName, Name}, -}; -use intern::{sym, Interned}; -use stdx::thin_vec::EmptyOptimizedThinVec; -use syntax::ast::{self, AstNode, HasGenericArgs, HasTypeBounds}; - -use crate::{ - path::{AssociatedTypeBinding, GenericArg, GenericArgs, ModPath, Path, PathKind}, - type_ref::{LifetimeRef, TypeBound, TypeRef}, -}; - -#[cfg(test)] -thread_local! { - /// This is used to test `hir_segment_to_ast_segment()`. It's a hack, but it makes testing much easier. - pub(super) static SEGMENT_LOWERING_MAP: std::cell::RefCell> = std::cell::RefCell::default(); -} - -/// Converts an `ast::Path` to `Path`. Works with use trees. -/// It correctly handles `$crate` based path from macro call. -// If you modify the logic of the lowering, make sure to check if `hir_segment_to_ast_segment()` -// also needs an update. -pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option { - let mut kind = PathKind::Plain; - let mut type_anchor = None; - let mut segments = Vec::new(); - let mut generic_args = Vec::new(); - #[cfg(test)] - let mut ast_segments = Vec::new(); - #[cfg(test)] - let mut ast_segments_offset = 0; - #[allow(unused_mut)] - let mut push_segment = |_segment: &ast::PathSegment, segments: &mut Vec, name| { - #[cfg(test)] - ast_segments.push(_segment.clone()); - segments.push(name); - }; - loop { - let segment = path.segment()?; - - if segment.coloncolon_token().is_some() { - kind = PathKind::Abs; - } - - match segment.kind()? { - ast::PathSegmentKind::Name(name_ref) => { - if name_ref.text() == "$crate" { - if path.qualifier().is_some() { - // FIXME: Report an error. - return None; - } - break kind = resolve_crate_root( - ctx.db.upcast(), - ctx.span_map().span_for_range(name_ref.syntax().text_range()).ctx, - ) - .map(PathKind::DollarCrate) - .unwrap_or(PathKind::Crate); - } - let name = name_ref.as_name(); - let args = segment - .generic_arg_list() - .and_then(|it| lower_generic_args(ctx, it)) - .or_else(|| { - lower_generic_args_from_fn_path( - ctx, - segment.parenthesized_arg_list(), - segment.ret_type(), - ) - }); - if args.is_some() { - generic_args.resize(segments.len(), None); - generic_args.push(args); - } - push_segment(&segment, &mut segments, name); - } - ast::PathSegmentKind::SelfTypeKw => { - push_segment(&segment, &mut segments, Name::new_symbol_root(sym::Self_.clone())); - } - ast::PathSegmentKind::Type { type_ref, trait_ref } => { - assert!(path.qualifier().is_none()); // this can only occur at the first segment - - let self_type = TypeRef::from_ast(ctx, type_ref?); - - match trait_ref { - // ::foo - None => { - type_anchor = Some(self_type); - kind = PathKind::Plain; - } - // >::Foo desugars to Trait::Foo - Some(trait_ref) => { - let path = Path::from_src(ctx, trait_ref.path()?)?; - let mod_path = path.mod_path()?; - let path_generic_args = path.generic_args(); - let num_segments = mod_path.segments().len(); - kind = mod_path.kind; - - segments.extend(mod_path.segments().iter().cloned().rev()); - #[cfg(test)] - { - ast_segments_offset = mod_path.segments().len(); - } - if let Some(path_generic_args) = path_generic_args { - generic_args.resize(segments.len() - num_segments, None); - generic_args.extend(Vec::from(path_generic_args).into_iter().rev()); - } else { - generic_args.resize(segments.len(), None); - } - - let self_type = GenericArg::Type(self_type); - - // Insert the type reference (T in the above example) as Self parameter for the trait - let last_segment = generic_args.get_mut(segments.len() - num_segments)?; - *last_segment = Some(match last_segment.take() { - Some(it) => GenericArgs { - args: iter::once(self_type) - .chain(it.args.iter().cloned()) - .collect(), - - has_self_type: true, - bindings: it.bindings.clone(), - desugared_from_fn: it.desugared_from_fn, - }, - None => GenericArgs { - args: Box::new([self_type]), - has_self_type: true, - ..GenericArgs::empty() - }, - }); - } - } - } - ast::PathSegmentKind::CrateKw => { - if path.qualifier().is_some() { - // FIXME: Report an error. - return None; - } - kind = PathKind::Crate; - break; - } - ast::PathSegmentKind::SelfKw => { - if path.qualifier().is_some() { - // FIXME: Report an error. - return None; - } - // don't break out if `self` is the last segment of a path, this mean we got a - // use tree like `foo::{self}` which we want to resolve as `foo` - if !segments.is_empty() { - kind = PathKind::SELF; - break; - } - } - ast::PathSegmentKind::SuperKw => { - let nested_super_count = if let PathKind::Super(n) = kind { n } else { 0 }; - kind = PathKind::Super(nested_super_count + 1); - } - } - path = match qualifier(&path) { - Some(it) => it, - None => break, - }; - } - segments.reverse(); - if !generic_args.is_empty() || type_anchor.is_some() { - generic_args.resize(segments.len(), None); - generic_args.reverse(); - } - - if segments.is_empty() && kind == PathKind::Plain && type_anchor.is_none() { - // plain empty paths don't exist, this means we got a single `self` segment as our path - kind = PathKind::SELF; - } - - // handle local_inner_macros : - // Basically, even in rustc it is quite hacky: - // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 - // We follow what it did anyway :) - if segments.len() == 1 && kind == PathKind::Plain { - if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - let syn_ctxt = ctx.span_map().span_for_range(path.segment()?.syntax().text_range()).ctx; - if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn { - if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner { - kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) { - Some(crate_root) => PathKind::DollarCrate(crate_root), - None => PathKind::Crate, - } - } - } - } - } - - #[cfg(test)] - { - ast_segments.reverse(); - SEGMENT_LOWERING_MAP - .with_borrow_mut(|map| map.extend(ast_segments.into_iter().zip(ast_segments_offset..))); - } - - let mod_path = Interned::new(ModPath::from_segments(kind, segments)); - if type_anchor.is_none() && generic_args.is_empty() { - return Some(Path::BarePath(mod_path)); - } else { - return Some(Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args))); - } - - fn qualifier(path: &ast::Path) -> Option { - if let Some(q) = path.qualifier() { - return Some(q); - } - // FIXME: this bottom up traversal is not too precise. - // Should we handle do a top-down analysis, recording results? - let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?; - let use_tree = use_tree_list.parent_use_tree(); - use_tree.path() - } -} - -/// This function finds the AST segment that corresponds to the HIR segment -/// with index `segment_idx` on the path that is lowered from `path`. -pub fn hir_segment_to_ast_segment(path: &ast::Path, segment_idx: u32) -> Option { - // Too tightly coupled to `lower_path()`, but unfortunately we cannot decouple them, - // as keeping source maps for all paths segments will have a severe impact on memory usage. - - let mut segments = path.segments(); - if let Some(ast::PathSegmentKind::Type { trait_ref: Some(trait_ref), .. }) = - segments.clone().next().and_then(|it| it.kind()) - { - segments.next(); - return find_segment(trait_ref.path()?.segments().chain(segments), segment_idx); - } - return find_segment(segments, segment_idx); - - fn find_segment( - segments: impl Iterator, - segment_idx: u32, - ) -> Option { - segments - .filter(|segment| match segment.kind() { - Some( - ast::PathSegmentKind::CrateKw - | ast::PathSegmentKind::SelfKw - | ast::PathSegmentKind::SuperKw - | ast::PathSegmentKind::Type { .. }, - ) - | None => false, - Some(ast::PathSegmentKind::Name(name)) => name.text() != "$crate", - Some(ast::PathSegmentKind::SelfTypeKw) => true, - }) - .nth(segment_idx as usize) - } -} - -pub(super) fn lower_generic_args( - lower_ctx: &mut LowerCtx<'_>, - node: ast::GenericArgList, -) -> Option { - let mut args = Vec::new(); - let mut bindings = Vec::new(); - for generic_arg in node.generic_args() { - match generic_arg { - ast::GenericArg::TypeArg(type_arg) => { - let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty()); - lower_ctx.update_impl_traits_bounds_from_type_ref(type_ref); - args.push(GenericArg::Type(type_ref)); - } - ast::GenericArg::AssocTypeArg(assoc_type_arg) => { - if assoc_type_arg.param_list().is_some() { - // We currently ignore associated return type bounds. - continue; - } - if let Some(name_ref) = assoc_type_arg.name_ref() { - // Nested impl traits like `impl Foo` are allowed - lower_ctx.with_outer_impl_trait_scope(false, |lower_ctx| { - let name = name_ref.as_name(); - let args = assoc_type_arg - .generic_arg_list() - .and_then(|args| lower_generic_args(lower_ctx, args)); - let type_ref = - assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it)); - let type_ref = type_ref - .inspect(|&tr| lower_ctx.update_impl_traits_bounds_from_type_ref(tr)); - let bounds = if let Some(l) = assoc_type_arg.type_bound_list() { - l.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect() - } else { - Box::default() - }; - bindings.push(AssociatedTypeBinding { name, args, type_ref, bounds }); - }); - } - } - ast::GenericArg::LifetimeArg(lifetime_arg) => { - if let Some(lifetime) = lifetime_arg.lifetime() { - let lifetime_ref = LifetimeRef::new(&lifetime); - args.push(GenericArg::Lifetime(lifetime_ref)) - } - } - ast::GenericArg::ConstArg(arg) => { - let arg = ConstRef::from_const_arg(lower_ctx, Some(arg)); - args.push(GenericArg::Const(arg)) - } - } - } - - if args.is_empty() && bindings.is_empty() { - return None; - } - Some(GenericArgs { - args: args.into_boxed_slice(), - has_self_type: false, - bindings: bindings.into_boxed_slice(), - desugared_from_fn: false, - }) -} - -/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y) -/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`). -fn lower_generic_args_from_fn_path( - ctx: &mut LowerCtx<'_>, - args: Option, - ret_type: Option, -) -> Option { - let params = args?; - let mut param_types = Vec::new(); - for param in params.type_args() { - let type_ref = TypeRef::from_ast_opt(ctx, param.ty()); - param_types.push(type_ref); - } - let args = Box::new([GenericArg::Type( - ctx.alloc_type_ref_desugared(TypeRef::Tuple(EmptyOptimizedThinVec::from_iter(param_types))), - )]); - let bindings = if let Some(ret_type) = ret_type { - let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty()); - Box::new([AssociatedTypeBinding { - name: Name::new_symbol_root(sym::Output.clone()), - args: None, - type_ref: Some(type_ref), - bounds: Box::default(), - }]) - } else { - // -> () - let type_ref = ctx.alloc_type_ref_desugared(TypeRef::unit()); - Box::new([AssociatedTypeBinding { - name: Name::new_symbol_root(sym::Output.clone()), - args: None, - type_ref: Some(type_ref), - bounds: Box::default(), - }]) - }; - Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: true }) -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/tests.rs deleted file mode 100644 index 67a27bf85e89c..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/path/tests.rs +++ /dev/null @@ -1,126 +0,0 @@ -use expect_test::{expect, Expect}; -use span::Edition; -use syntax::ast::{self, make}; -use test_fixture::WithFixture; - -use crate::{ - lower::LowerCtx, - path::{ - lower::{hir_segment_to_ast_segment, SEGMENT_LOWERING_MAP}, - Path, - }, - pretty, - test_db::TestDB, - type_ref::{TypesMap, TypesSourceMap}, -}; - -fn lower_path(path: ast::Path) -> (TestDB, TypesMap, Option) { - let (db, file_id) = TestDB::with_single_file(""); - let mut types_map = TypesMap::default(); - let mut types_source_map = TypesSourceMap::default(); - let mut ctx = LowerCtx::new(&db, file_id.into(), &mut types_map, &mut types_source_map); - let lowered_path = ctx.lower_path(path); - (db, types_map, lowered_path) -} - -#[track_caller] -fn check_hir_to_ast(path: &str, ignore_segments: &[&str]) { - let path = make::path_from_text(path); - SEGMENT_LOWERING_MAP.with_borrow_mut(|map| map.clear()); - let _ = lower_path(path.clone()).2.expect("failed to lower path"); - SEGMENT_LOWERING_MAP.with_borrow(|map| { - for (segment, segment_idx) in map { - if ignore_segments.contains(&&*segment.to_string()) { - continue; - } - - let restored_segment = hir_segment_to_ast_segment(&path, *segment_idx as u32) - .unwrap_or_else(|| { - panic!( - "failed to map back segment `{segment}` \ - numbered {segment_idx} in HIR from path `{path}`" - ) - }); - assert_eq!( - segment, &restored_segment, - "mapping back `{segment}` numbered {segment_idx} in HIR \ - from path `{path}` produced incorrect segment `{restored_segment}`" - ); - } - }); -} - -#[test] -fn hir_to_ast_trait_ref() { - check_hir_to_ast("::E::F", &["A"]); -} - -#[test] -fn hir_to_ast_plain_path() { - check_hir_to_ast("A::B::C::D::E::F", &[]); -} - -#[test] -fn hir_to_ast_crate_path() { - check_hir_to_ast("crate::A::B::C", &[]); - check_hir_to_ast("crate::super::super::A::B::C", &[]); -} - -#[test] -fn hir_to_ast_self_path() { - check_hir_to_ast("self::A::B::C", &[]); - check_hir_to_ast("self::super::super::A::B::C", &[]); -} - -#[test] -fn hir_to_ast_super_path() { - check_hir_to_ast("super::A::B::C", &[]); - check_hir_to_ast("super::super::super::A::B::C", &[]); -} - -#[test] -fn hir_to_ast_type_anchor_path() { - check_hir_to_ast("::C::D", &["A", "B"]); -} - -#[test] -fn hir_to_ast_path_super_in_middle() { - check_hir_to_ast("A::super::B::super::super::C::D", &[]); -} - -#[track_caller] -fn check_fail_lowering(path: &str) { - let (_, _, lowered_path) = lower_path(make::path_from_text(path)); - assert!(lowered_path.is_none(), "path `{path}` should fail lowering"); -} - -#[test] -fn keywords_in_middle_fail_lowering1() { - check_fail_lowering("self::A::self::B::super::C::crate::D"); -} - -#[test] -fn keywords_in_middle_fail_lowering2() { - check_fail_lowering("A::super::self::C::D"); -} - -#[test] -fn keywords_in_middle_fail_lowering3() { - check_fail_lowering("A::crate::B::C::D"); -} - -#[track_caller] -fn check_path_lowering(path: &str, expected: Expect) { - let (db, types_map, lowered_path) = lower_path(make::path_from_text(path)); - let lowered_path = lowered_path.expect("failed to lower path"); - let mut buf = String::new(); - pretty::print_path(&db, &lowered_path, &types_map, &mut buf, Edition::CURRENT) - .expect("failed to pretty-print path"); - expected.assert_eq(&buf); -} - -#[test] -fn fn_like_path_with_coloncolon() { - check_path_lowering("Fn::(A, B) -> C", expect![[r#"Fn::<(A, B), Output = C>"#]]); - check_path_lowering("Fn::(A, B)", expect![[r#"Fn::<(A, B), Output = ()>"#]]); -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs index c2d3f67f17e77..8721cd65dbac7 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/per_ns.rs @@ -6,9 +6,9 @@ use bitflags::bitflags; use crate::{ + MacroId, ModuleDefId, item_scope::{ImportId, ImportOrExternCrate, ImportOrGlob, ItemInNs}, visibility::Visibility, - MacroId, ModuleDefId, }; #[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)] @@ -37,7 +37,8 @@ pub struct Item { pub type TypesItem = Item; pub type ValuesItem = Item; -pub type MacrosItem = Item; +// May be Externcrate for `[macro_use]`'d macros +pub type MacrosItem = Item; #[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)] pub struct PerNs { @@ -84,7 +85,7 @@ impl PerNs { } } - pub fn macros(def: MacroId, vis: Visibility, import: Option) -> PerNs { + pub fn macros(def: MacroId, vis: Visibility, import: Option) -> PerNs { PerNs { types: None, values: None, macros: Some(Item { def, vis, import }) } } @@ -116,7 +117,7 @@ impl PerNs { self.macros.map(|it| it.def) } - pub fn take_macros_import(self) -> Option<(MacroId, Option)> { + pub fn take_macros_import(self) -> Option<(MacroId, Option)> { self.macros.map(|it| (it.def, it.import)) } @@ -146,11 +147,7 @@ impl PerNs { } pub fn or_else(self, f: impl FnOnce() -> PerNs) -> PerNs { - if self.is_full() { - self - } else { - self.or(f()) - } + if self.is_full() { self } else { self.or(f()) } } pub fn iter_items(self) -> impl Iterator)> { @@ -162,9 +159,6 @@ impl PerNs { self.values .map(|it| (ItemInNs::Values(it.def), it.import.map(ImportOrExternCrate::from))), ) - .chain( - self.macros - .map(|it| (ItemInNs::Macros(it.def), it.import.map(ImportOrExternCrate::from))), - ) + .chain(self.macros.map(|it| (ItemInNs::Macros(it.def), it.import))) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs deleted file mode 100644 index eb9488feaa914..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs +++ /dev/null @@ -1,306 +0,0 @@ -//! Display and pretty printing routines. - -use std::{ - fmt::{self, Write}, - mem, -}; - -use hir_expand::mod_path::PathKind; -use itertools::Itertools; -use span::Edition; - -use crate::{ - db::DefDatabase, - lang_item::LangItemTarget, - path::{GenericArg, GenericArgs, Path}, - type_ref::{ - Mutability, TraitBoundModifier, TypeBound, TypeRef, TypeRefId, TypesMap, UseArgRef, - }, -}; - -pub(crate) fn print_path( - db: &dyn DefDatabase, - path: &Path, - map: &TypesMap, - buf: &mut dyn Write, - edition: Edition, -) -> fmt::Result { - if let Path::LangItem(it, s) = path { - write!(buf, "builtin#lang(")?; - match *it { - LangItemTarget::ImplDef(it) => write!(buf, "{it:?}")?, - LangItemTarget::EnumId(it) => { - write!(buf, "{}", db.enum_data(it).name.display(db.upcast(), edition))? - } - LangItemTarget::Function(it) => { - write!(buf, "{}", db.function_data(it).name.display(db.upcast(), edition))? - } - LangItemTarget::Static(it) => { - write!(buf, "{}", db.static_data(it).name.display(db.upcast(), edition))? - } - LangItemTarget::Struct(it) => { - write!(buf, "{}", db.struct_data(it).name.display(db.upcast(), edition))? - } - LangItemTarget::Union(it) => { - write!(buf, "{}", db.union_data(it).name.display(db.upcast(), edition))? - } - LangItemTarget::TypeAlias(it) => { - write!(buf, "{}", db.type_alias_data(it).name.display(db.upcast(), edition))? - } - LangItemTarget::Trait(it) => { - write!(buf, "{}", db.trait_data(it).name.display(db.upcast(), edition))? - } - LangItemTarget::EnumVariant(it) => { - write!(buf, "{}", db.enum_variant_data(it).name.display(db.upcast(), edition))? - } - } - - if let Some(s) = s { - write!(buf, "::{}", s.display(db.upcast(), edition))?; - } - return write!(buf, ")"); - } - match path.type_anchor() { - Some(anchor) => { - write!(buf, "<")?; - print_type_ref(db, anchor, map, buf, edition)?; - write!(buf, ">::")?; - } - None => match path.kind() { - PathKind::Plain => {} - &PathKind::SELF => write!(buf, "self")?, - PathKind::Super(n) => { - for i in 0..*n { - if i == 0 { - buf.write_str("super")?; - } else { - buf.write_str("::super")?; - } - } - } - PathKind::Crate => write!(buf, "crate")?, - PathKind::Abs => {} - PathKind::DollarCrate(_) => write!(buf, "$crate")?, - }, - } - - for (i, segment) in path.segments().iter().enumerate() { - if i != 0 || !matches!(path.kind(), PathKind::Plain) { - write!(buf, "::")?; - } - - write!(buf, "{}", segment.name.display(db.upcast(), edition))?; - if let Some(generics) = segment.args_and_bindings { - write!(buf, "::<")?; - print_generic_args(db, generics, map, buf, edition)?; - - write!(buf, ">")?; - } - } - - Ok(()) -} - -pub(crate) fn print_generic_args( - db: &dyn DefDatabase, - generics: &GenericArgs, - map: &TypesMap, - buf: &mut dyn Write, - edition: Edition, -) -> fmt::Result { - let mut first = true; - let args = if generics.has_self_type { - let (self_ty, args) = generics.args.split_first().unwrap(); - write!(buf, "Self=")?; - print_generic_arg(db, self_ty, map, buf, edition)?; - first = false; - args - } else { - &generics.args - }; - for arg in args { - if !first { - write!(buf, ", ")?; - } - first = false; - print_generic_arg(db, arg, map, buf, edition)?; - } - for binding in generics.bindings.iter() { - if !first { - write!(buf, ", ")?; - } - first = false; - write!(buf, "{}", binding.name.display(db.upcast(), edition))?; - if !binding.bounds.is_empty() { - write!(buf, ": ")?; - print_type_bounds(db, &binding.bounds, map, buf, edition)?; - } - if let Some(ty) = binding.type_ref { - write!(buf, " = ")?; - print_type_ref(db, ty, map, buf, edition)?; - } - } - Ok(()) -} - -pub(crate) fn print_generic_arg( - db: &dyn DefDatabase, - arg: &GenericArg, - map: &TypesMap, - buf: &mut dyn Write, - edition: Edition, -) -> fmt::Result { - match arg { - GenericArg::Type(ty) => print_type_ref(db, *ty, map, buf, edition), - GenericArg::Const(c) => write!(buf, "{}", c.display(db.upcast(), edition)), - GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition)), - } -} - -pub(crate) fn print_type_ref( - db: &dyn DefDatabase, - type_ref: TypeRefId, - map: &TypesMap, - buf: &mut dyn Write, - edition: Edition, -) -> fmt::Result { - // FIXME: deduplicate with `HirDisplay` impl - match &map[type_ref] { - TypeRef::Never => write!(buf, "!")?, - TypeRef::Placeholder => write!(buf, "_")?, - TypeRef::Tuple(fields) => { - write!(buf, "(")?; - for (i, field) in fields.iter().enumerate() { - if i != 0 { - write!(buf, ", ")?; - } - print_type_ref(db, *field, map, buf, edition)?; - } - write!(buf, ")")?; - } - TypeRef::Path(path) => print_path(db, path, map, buf, edition)?, - TypeRef::RawPtr(pointee, mtbl) => { - let mtbl = match mtbl { - Mutability::Shared => "*const", - Mutability::Mut => "*mut", - }; - write!(buf, "{mtbl} ")?; - print_type_ref(db, *pointee, map, buf, edition)?; - } - TypeRef::Reference(ref_) => { - let mtbl = match ref_.mutability { - Mutability::Shared => "", - Mutability::Mut => "mut ", - }; - write!(buf, "&")?; - if let Some(lt) = &ref_.lifetime { - write!(buf, "{} ", lt.name.display(db.upcast(), edition))?; - } - write!(buf, "{mtbl}")?; - print_type_ref(db, ref_.ty, map, buf, edition)?; - } - TypeRef::Array(array) => { - write!(buf, "[")?; - print_type_ref(db, array.ty, map, buf, edition)?; - write!(buf, "; {}]", array.len.display(db.upcast(), edition))?; - } - TypeRef::Slice(elem) => { - write!(buf, "[")?; - print_type_ref(db, *elem, map, buf, edition)?; - write!(buf, "]")?; - } - TypeRef::Fn(fn_) => { - let ((_, return_type), args) = - fn_.params().split_last().expect("TypeRef::Fn is missing return type"); - if fn_.is_unsafe() { - write!(buf, "unsafe ")?; - } - if let Some(abi) = fn_.abi() { - buf.write_str("extern ")?; - buf.write_str(abi.as_str())?; - buf.write_char(' ')?; - } - write!(buf, "fn(")?; - for (i, (_, typeref)) in args.iter().enumerate() { - if i != 0 { - write!(buf, ", ")?; - } - print_type_ref(db, *typeref, map, buf, edition)?; - } - if fn_.is_varargs() { - if !args.is_empty() { - write!(buf, ", ")?; - } - write!(buf, "...")?; - } - write!(buf, ") -> ")?; - print_type_ref(db, *return_type, map, buf, edition)?; - } - TypeRef::Macro(_ast_id) => { - write!(buf, "")?; - } - TypeRef::Error => write!(buf, "{{unknown}}")?, - TypeRef::ImplTrait(bounds) => { - write!(buf, "impl ")?; - print_type_bounds(db, bounds, map, buf, edition)?; - } - TypeRef::DynTrait(bounds) => { - write!(buf, "dyn ")?; - print_type_bounds(db, bounds, map, buf, edition)?; - } - } - - Ok(()) -} - -pub(crate) fn print_type_bounds( - db: &dyn DefDatabase, - bounds: &[TypeBound], - map: &TypesMap, - buf: &mut dyn Write, - edition: Edition, -) -> fmt::Result { - for (i, bound) in bounds.iter().enumerate() { - if i != 0 { - write!(buf, " + ")?; - } - - match bound { - TypeBound::Path(path, modifier) => { - match modifier { - TraitBoundModifier::None => (), - TraitBoundModifier::Maybe => write!(buf, "?")?, - } - print_path(db, &map[*path], map, buf, edition)?; - } - TypeBound::ForLifetime(lifetimes, path) => { - write!( - buf, - "for<{}> ", - lifetimes.iter().map(|it| it.display(db.upcast(), edition)).format(", ") - )?; - print_path(db, &map[*path], map, buf, edition)?; - } - TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition))?, - TypeBound::Use(args) => { - write!(buf, "use<")?; - let mut first = true; - for arg in args { - if !mem::take(&mut first) { - write!(buf, ", ")?; - } - match arg { - UseArgRef::Name(it) => write!(buf, "{}", it.display(db.upcast(), edition))?, - UseArgRef::Lifetime(it) => { - write!(buf, "{}", it.name.display(db.upcast(), edition))? - } - } - } - write!(buf, ">")? - } - TypeBound::Error => write!(buf, "{{unknown}}")?, - } - } - - Ok(()) -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index a2e6e4cc04368..8a8d17018c1bd 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -1,37 +1,43 @@ //! Name resolution façade. -use std::{fmt, iter, mem}; +use std::{fmt, mem}; -use base_db::CrateId; -use hir_expand::{name::Name, MacroDefId}; -use intern::{sym, Symbol}; +use base_db::Crate; +use hir_expand::{ + MacroDefId, + mod_path::{ModPath, PathKind}, + name::Name, +}; +use intern::{Symbol, sym}; use itertools::Itertools as _; use rustc_hash::FxHashSet; -use smallvec::{smallvec, SmallVec}; -use span::SyntaxContextId; +use smallvec::{SmallVec, smallvec}; +use span::SyntaxContext; use triomphe::Arc; use crate::{ + AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, + ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId, GenericParamId, HasModule, + ImplId, ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, + MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, + TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UseId, VariantId, builtin_type::BuiltinType, - data::ExternCrateDeclData, db::DefDatabase, expr_store::{ - scope::{ExprScopes, ScopeId}, HygieneId, + path::Path, + scope::{ExprScopes, ScopeId}, }, - generics::{GenericParams, TypeOrConstParamData}, - hir::{BindingId, ExprId, LabelId}, - item_scope::{BuiltinShadowMode, ImportOrExternCrate, ImportOrGlob, BUILTIN_SCOPE}, + hir::{ + BindingId, ExprId, LabelId, + generics::{GenericParams, TypeOrConstParamData}, + }, + item_scope::{BUILTIN_SCOPE, BuiltinShadowMode, ImportOrExternCrate, ImportOrGlob, ItemScope}, + item_tree::ImportAlias, lang_item::LangItemTarget, - nameres::{DefMap, MacroSubNs, ResolvePathResultPrefixInfo}, - path::{ModPath, Path, PathKind}, + nameres::{DefMap, LocalDefMap, MacroSubNs, ResolvePathResultPrefixInfo}, per_ns::PerNs, - type_ref::{LifetimeRef, TypesMap}, + type_ref::LifetimeRef, visibility::{RawVisibility, Visibility}, - AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, - ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId, GenericParamId, HasModule, - ImplId, ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, - MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, - TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId, }; #[derive(Debug, Clone)] @@ -47,6 +53,7 @@ pub struct Resolver { #[derive(Clone)] struct ModuleItemMap { def_map: Arc, + local_def_map: Arc, module_id: LocalModuleId, } @@ -76,16 +83,13 @@ impl fmt::Debug for ExprScope { enum Scope { /// All the items and imported names of a module BlockScope(ModuleItemMap), - /// Brings the generic parameters of an item into scope + /// Brings the generic parameters of an item into scope as well as the `Self` type alias / + /// generic for ADTs and impls. GenericParams { def: GenericDefId, params: Arc }, - /// Brings `Self` in `impl` block into scope - ImplDefScope(ImplId), - /// Brings `Self` in enum, struct and union definitions into scope - AdtScope(AdtId), /// Local bindings ExprScope(ExprScope), /// Macro definition inside bodies that affects all paths after it in the same block. - MacroDefScope(Box), + MacroDefScope(MacroDefId), } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -101,9 +105,8 @@ pub enum TypeNs { BuiltinType(BuiltinType), TraitId(TraitId), TraitAliasId(TraitAliasId), - // Module belong to type ns, but the resolver is used when all module paths - // are fully resolved. - // ModuleId(ModuleId) + + ModuleId(ModuleId), } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -180,7 +183,7 @@ impl Resolver { { let path = match path { Path::BarePath(mod_path) => mod_path, - Path::Normal(it) => it.mod_path(), + Path::Normal(it) => &it.mod_path, Path::LangItem(l, seg) => { let type_ns = match *l { LangItemTarget::Union(it) => TypeNs::AdtId(it.into()), @@ -207,12 +210,33 @@ impl Resolver { return self.module_scope.resolve_path_in_type_ns(db, path); } - let remaining_idx = || if path.segments().len() == 1 { None } else { Some(1) }; + let remaining_idx = || { + if path.segments().len() == 1 { None } else { Some(1) } + }; for scope in self.scopes() { match scope { Scope::ExprScope(_) | Scope::MacroDefScope(_) => continue, Scope::GenericParams { params, def } => { + if let &GenericDefId::ImplId(impl_) = def { + if *first_name == sym::Self_ { + return Some(( + TypeNs::SelfType(impl_), + remaining_idx(), + None, + ResolvePathResultPrefixInfo::default(), + )); + } + } else if let &GenericDefId::AdtId(adt) = def { + if *first_name == sym::Self_ { + return Some(( + TypeNs::AdtSelfType(adt), + remaining_idx(), + None, + ResolvePathResultPrefixInfo::default(), + )); + } + } if let Some(id) = params.find_type_by_name(first_name, *def) { return Some(( TypeNs::GenericParam(id), @@ -222,28 +246,26 @@ impl Resolver { )); } } - &Scope::ImplDefScope(impl_) => { - if *first_name == sym::Self_.clone() { - return Some(( - TypeNs::SelfType(impl_), - remaining_idx(), - None, - ResolvePathResultPrefixInfo::default(), - )); - } - } - &Scope::AdtScope(adt) => { - if *first_name == sym::Self_.clone() { - return Some(( - TypeNs::AdtSelfType(adt), - remaining_idx(), - None, - ResolvePathResultPrefixInfo::default(), - )); - } - } Scope::BlockScope(m) => { if let Some(res) = m.resolve_path_in_type_ns(db, path) { + let res = match res.0 { + TypeNs::ModuleId(_) if res.1.is_none() => { + if let Some(ModuleDefId::BuiltinType(builtin)) = BUILTIN_SCOPE + .get(first_name) + .and_then(|builtin| builtin.take_types()) + { + ( + TypeNs::BuiltinType(builtin), + remaining_idx(), + None, + ResolvePathResultPrefixInfo::default(), + ) + } else { + res + } + } + _ => res, + }; return Some(res); } } @@ -269,11 +291,18 @@ impl Resolver { db: &dyn DefDatabase, visibility: &RawVisibility, ) -> Option { - let within_impl = self.scopes().any(|scope| matches!(scope, Scope::ImplDefScope(_))); match visibility { RawVisibility::Module(_, _) => { - let (item_map, module) = self.item_scope(); - item_map.resolve_visibility(db, module, visibility, within_impl) + let (item_map, item_local_map, module) = self.item_scope_(); + item_map.resolve_visibility( + item_local_map, + db, + module, + visibility, + self.scopes().any(|scope| { + matches!(scope, Scope::GenericParams { def: GenericDefId::ImplId(_), .. }) + }), + ) } RawVisibility::Public => Some(Visibility::Public), } @@ -296,7 +325,7 @@ impl Resolver { ) -> Option<(ResolveValueResult, ResolvePathResultPrefixInfo)> { let path = match path { Path::BarePath(mod_path) => mod_path, - Path::Normal(it) => it.mod_path(), + Path::Normal(it) => &it.mod_path, Path::LangItem(l, None) => { return Some(( ResolveValueResult::ValueNs( @@ -314,7 +343,7 @@ impl Resolver { None, ), ResolvePathResultPrefixInfo::default(), - )) + )); } Path::LangItem(l, Some(_)) => { let type_ns = match *l { @@ -336,7 +365,7 @@ impl Resolver { } }; let n_segments = path.segments().len(); - let tmp = Name::new_symbol_root(sym::self_.clone()); + let tmp = Name::new_symbol_root(sym::self_); let first_name = if path.is_self() { &tmp } else { path.segments().first()? }; let skip_to_mod = path.kind != PathKind::Plain && !path.is_self(); if skip_to_mod { @@ -367,6 +396,14 @@ impl Resolver { handle_macro_def_scope(db, &mut hygiene_id, &mut hygiene_info, macro_id) } Scope::GenericParams { params, def } => { + if let &GenericDefId::ImplId(impl_) = def { + if *first_name == sym::Self_ { + return Some(( + ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None), + ResolvePathResultPrefixInfo::default(), + )); + } + } if let Some(id) = params.find_const_by_name(first_name, *def) { let val = ValueNs::GenericParam(id); return Some(( @@ -375,16 +412,6 @@ impl Resolver { )); } } - &Scope::ImplDefScope(impl_) => { - if *first_name == sym::Self_.clone() { - return Some(( - ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None), - ResolvePathResultPrefixInfo::default(), - )); - } - } - // bare `Self` doesn't work in the value namespace in a struct/enum definition - Scope::AdtScope(_) => continue, Scope::BlockScope(m) => { if let Some(def) = m.resolve_path_in_value_ns(db, path) { return Some(def); @@ -397,6 +424,22 @@ impl Resolver { match scope { Scope::ExprScope(_) | Scope::MacroDefScope(_) => continue, Scope::GenericParams { params, def } => { + if let &GenericDefId::ImplId(impl_) = def { + if *first_name == sym::Self_ { + return Some(( + ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1, None), + ResolvePathResultPrefixInfo::default(), + )); + } + } else if let &GenericDefId::AdtId(adt) = def { + if *first_name == sym::Self_ { + let ty = TypeNs::AdtSelfType(adt); + return Some(( + ResolveValueResult::Partial(ty, 1, None), + ResolvePathResultPrefixInfo::default(), + )); + } + } if let Some(id) = params.find_type_by_name(first_name, *def) { let ty = TypeNs::GenericParam(id); return Some(( @@ -405,23 +448,6 @@ impl Resolver { )); } } - &Scope::ImplDefScope(impl_) => { - if *first_name == sym::Self_.clone() { - return Some(( - ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1, None), - ResolvePathResultPrefixInfo::default(), - )); - } - } - Scope::AdtScope(adt) => { - if *first_name == sym::Self_.clone() { - let ty = TypeNs::AdtSelfType(*adt); - return Some(( - ResolveValueResult::Partial(ty, 1, None), - ResolvePathResultPrefixInfo::default(), - )); - } - } Scope::BlockScope(m) => { if let Some(def) = m.resolve_path_in_value_ns(db, path) { return Some(def); @@ -467,10 +493,17 @@ impl Resolver { db: &dyn DefDatabase, path: &ModPath, expected_macro_kind: Option, - ) -> Option<(MacroId, Option)> { - let (item_map, module) = self.item_scope(); + ) -> Option<(MacroId, Option)> { + let (item_map, item_local_map, module) = self.item_scope_(); item_map - .resolve_path(db, module, path, BuiltinShadowMode::Other, expected_macro_kind) + .resolve_path( + item_local_map, + db, + module, + path, + BuiltinShadowMode::Other, + expected_macro_kind, + ) .0 .take_macros_import() } @@ -485,16 +518,19 @@ impl Resolver { } pub fn resolve_lifetime(&self, lifetime: &LifetimeRef) -> Option { - if lifetime.name == sym::tick_static.clone() { - return Some(LifetimeNs::Static); - } - - self.scopes().find_map(|scope| match scope { - Scope::GenericParams { def, params } => { - params.find_lifetime_by_name(&lifetime.name, *def).map(LifetimeNs::LifetimeParam) + match lifetime { + LifetimeRef::Static => Some(LifetimeNs::Static), + LifetimeRef::Named(name) => self.scopes().find_map(|scope| match scope { + Scope::GenericParams { def, params } => { + params.find_lifetime_by_name(name, *def).map(LifetimeNs::LifetimeParam) + } + _ => None, + }), + LifetimeRef::Placeholder | LifetimeRef::Error => None, + LifetimeRef::Param(lifetime_param_id) => { + Some(LifetimeNs::LifetimeParam(*lifetime_param_id)) } - _ => None, - }) + } } /// Returns a set of names available in the current scope. @@ -544,7 +580,7 @@ impl Resolver { for scope in self.scopes() { scope.process_names(&mut res, db); } - let ModuleItemMap { ref def_map, module_id } = self.module_scope; + let ModuleItemMap { ref def_map, module_id, ref local_def_map } = self.module_scope; // FIXME: should we provide `self` here? // f( // Name::self_param(), @@ -566,7 +602,7 @@ impl Resolver { res.add(name, ScopeDef::ModuleDef(def.into())); }, ); - def_map.extern_prelude().for_each(|(name, (def, _extern_crate))| { + local_def_map.extern_prelude().for_each(|(name, (def, _extern_crate))| { res.add(name, ScopeDef::ModuleDef(ModuleDefId::ModuleId(def.into()))); }); BUILTIN_SCOPE.iter().for_each(|(name, &def)| { @@ -581,6 +617,7 @@ impl Resolver { res.map } + /// Note: Not to be used directly within hir-def/hir-ty pub fn extern_crate_decls_in_scope<'a>( &'a self, db: &'a dyn DefDatabase, @@ -588,12 +625,22 @@ impl Resolver { self.module_scope.def_map[self.module_scope.module_id] .scope .extern_crate_decls() - .map(|id| ExternCrateDeclData::extern_crate_decl_data_query(db, id).name.clone()) + .filter_map(|id| { + let loc = id.lookup(db); + let tree = loc.item_tree_id().item_tree(db); + match &tree[loc.id.value].alias { + Some(alias) => match alias { + ImportAlias::Underscore => None, + ImportAlias::Alias(name) => Some(name.clone()), + }, + None => Some(tree[loc.id.value].name.clone()), + } + }) } pub fn extern_crates_in_scope(&self) -> impl Iterator + '_ { self.module_scope - .def_map + .local_def_map .extern_prelude() .map(|(name, module_id)| (name.clone(), module_id.0.into())) } @@ -606,13 +653,12 @@ impl Resolver { for scope in self.scopes() { match scope { Scope::BlockScope(m) => traits.extend(m.def_map[m.module_id].scope.traits()), - &Scope::ImplDefScope(impl_) => { - let impl_data = db.impl_data(impl_); + &Scope::GenericParams { def: GenericDefId::ImplId(impl_), .. } => { + let impl_data = db.impl_signature(impl_); if let Some(target_trait) = impl_data.target_trait { - if let Some(TypeNs::TraitId(trait_)) = self.resolve_path_in_type_ns_fully( - db, - &impl_data.types_map[target_trait.path], - ) { + if let Some(TypeNs::TraitId(trait_)) = self + .resolve_path_in_type_ns_fully(db, &impl_data.store[target_trait.path]) + { traits.insert(trait_); } } @@ -641,29 +687,21 @@ impl Resolver { } pub fn module(&self) -> ModuleId { - let (def_map, local_id) = self.item_scope(); + let (def_map, _, local_id) = self.item_scope_(); def_map.module_id(local_id) } - pub fn krate(&self) -> CrateId { - self.module_scope.def_map.krate() + pub fn item_scope(&self) -> &ItemScope { + let (def_map, _, local_id) = self.item_scope_(); + &def_map[local_id].scope } - pub fn def_map(&self) -> &DefMap { - self.item_scope().0 + pub fn krate(&self) -> Crate { + self.module_scope.def_map.krate() } - pub fn where_predicates_in_scope( - &self, - ) -> impl Iterator { - self.scopes() - .filter_map(|scope| match scope { - Scope::GenericParams { params, def } => Some((params, def)), - _ => None, - }) - .flat_map(|(params, def)| { - params.where_predicates().zip(iter::repeat((def, ¶ms.types_map))) - }) + pub fn def_map(&self) -> &DefMap { + self.item_scope_().0 } pub fn generic_def(&self) -> Option { @@ -694,19 +732,9 @@ impl Resolver { }) } - pub fn type_owner(&self) -> Option { - self.scopes().find_map(|scope| match scope { - Scope::BlockScope(_) | Scope::MacroDefScope(_) => None, - &Scope::GenericParams { def, .. } => Some(def.into()), - &Scope::ImplDefScope(id) => Some(id.into()), - &Scope::AdtScope(adt) => Some(adt.into()), - Scope::ExprScope(it) => Some(it.owner.into()), - }) - } - pub fn impl_def(&self) -> Option { self.scopes().find_map(|scope| match scope { - Scope::ImplDefScope(def) => Some(*def), + &Scope::GenericParams { def: GenericDefId::ImplId(def), .. } => Some(def), _ => None, }) } @@ -748,7 +776,6 @@ impl Resolver { return None; } } - Scope::AdtScope(_) | Scope::ImplDefScope(_) => continue, Scope::BlockScope(m) => { if m.resolve_path_in_value_ns(db, current_name_as_path).is_some() { // It does not resolve to our renamed variable. @@ -801,7 +828,6 @@ impl Resolver { return None; } } - Scope::AdtScope(_) | Scope::ImplDefScope(_) => continue, Scope::BlockScope(m) => { if m.resolve_path_in_value_ns(db, name_as_path).is_some() { return None; @@ -829,7 +855,7 @@ impl Resolver { scope_id: ScopeId, ) { if let Some(macro_id) = expr_scopes.macro_def(scope_id) { - resolver.scopes.push(Scope::MacroDefScope(macro_id.clone())); + resolver.scopes.push(Scope::MacroDefScope(**macro_id)); } resolver.scopes.push(Scope::ExprScope(ExprScope { owner, @@ -838,9 +864,12 @@ impl Resolver { })); if let Some(block) = expr_scopes.block(scope_id) { let def_map = db.block_def_map(block); - resolver - .scopes - .push(Scope::BlockScope(ModuleItemMap { def_map, module_id: DefMap::ROOT })); + let local_def_map = block.lookup(db).module.only_local_def_map(db); + resolver.scopes.push(Scope::BlockScope(ModuleItemMap { + def_map, + local_def_map, + module_id: DefMap::ROOT, + })); // FIXME: This adds as many module scopes as there are blocks, but resolving in each // already traverses all parents, so this is O(n²). I think we could only store the // innermost module scope instead? @@ -881,7 +910,7 @@ impl Resolver { fn handle_macro_def_scope( db: &dyn DefDatabase, hygiene_id: &mut HygieneId, - hygiene_info: &mut Option<(SyntaxContextId, MacroDefId)>, + hygiene_info: &mut Option<(SyntaxContext, MacroDefId)>, macro_id: &MacroDefId, ) { if let Some((parent_ctx, label_macro_id)) = hygiene_info { @@ -889,11 +918,10 @@ fn handle_macro_def_scope( // A macro is allowed to refer to variables from before its declaration. // Therefore, if we got to the rib of its declaration, give up its hygiene // and use its parent expansion. - let parent_ctx = db.lookup_intern_syntax_context(*parent_ctx); - *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent); - *hygiene_info = parent_ctx.outer_expn.map(|expansion| { - let expansion = db.lookup_intern_macro_call(expansion); - (parent_ctx.parent, expansion.def) + *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db)); + *hygiene_info = parent_ctx.outer_expn(db).map(|expansion| { + let expansion = db.lookup_intern_macro_call(expansion.into()); + (parent_ctx.parent(db), expansion.def) }); } } @@ -903,12 +931,12 @@ fn handle_macro_def_scope( fn hygiene_info( db: &dyn DefDatabase, hygiene_id: HygieneId, -) -> Option<(SyntaxContextId, MacroDefId)> { +) -> Option<(SyntaxContext, MacroDefId)> { if !hygiene_id.is_root() { - let ctx = hygiene_id.lookup(db); - ctx.outer_expn.map(|expansion| { - let expansion = db.lookup_intern_macro_call(expansion); - (ctx.parent, expansion.def) + let ctx = hygiene_id.lookup(); + ctx.outer_expn(db).map(|expansion| { + let expansion = db.lookup_intern_macro_call(expansion.into()); + (ctx.parent(db), expansion.def) }) } else { None @@ -928,9 +956,10 @@ impl Resolver { path: &ModPath, shadow: BuiltinShadowMode, ) -> PerNs { - let (item_map, module) = self.item_scope(); + let (item_map, item_local_map, module) = self.item_scope_(); // This method resolves `path` just like import paths, so no expected macro subns is given. - let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow, None); + let (module_res, segment_index) = + item_map.resolve_path(item_local_map, db, module, path, shadow, None); if segment_index.is_some() { return PerNs::none(); } @@ -938,13 +967,17 @@ impl Resolver { } /// The innermost block scope that contains items or the module scope that contains this resolver. - fn item_scope(&self) -> (&DefMap, LocalModuleId) { + fn item_scope_(&self) -> (&DefMap, &LocalDefMap, LocalModuleId) { self.scopes() .find_map(|scope| match scope { - Scope::BlockScope(m) => Some((&*m.def_map, m.module_id)), + Scope::BlockScope(m) => Some((&*m.def_map, &*m.local_def_map, m.module_id)), _ => None, }) - .unwrap_or((&self.module_scope.def_map, self.module_scope.module_id)) + .unwrap_or(( + &self.module_scope.def_map, + &self.module_scope.local_def_map, + self.module_scope.module_id, + )) } } @@ -972,8 +1005,13 @@ impl Scope { }) }); } - Scope::GenericParams { params, def: parent } => { - let parent = *parent; + &Scope::GenericParams { ref params, def: parent } => { + if let GenericDefId::ImplId(impl_) = parent { + acc.add(&Name::new_symbol_root(sym::Self_), ScopeDef::ImplSelfType(impl_)); + } else if let GenericDefId::AdtId(adt) = parent { + acc.add(&Name::new_symbol_root(sym::Self_), ScopeDef::AdtSelfType(adt)); + } + for (local_id, param) in params.iter_type_or_consts() { if let Some(name) = ¶m.name() { let id = TypeOrConstParamId { parent, local_id }; @@ -996,12 +1034,6 @@ impl Scope { acc.add(¶m.name, ScopeDef::GenericParam(id.into())) } } - Scope::ImplDefScope(i) => { - acc.add(&Name::new_symbol_root(sym::Self_.clone()), ScopeDef::ImplSelfType(*i)); - } - Scope::AdtScope(i) => { - acc.add(&Name::new_symbol_root(sym::Self_.clone()), ScopeDef::AdtSelfType(*i)); - } Scope::ExprScope(scope) => { if let Some((label, name)) = scope.expr_scopes.label(scope.scope_id) { acc.add(&name, ScopeDef::Label(label)) @@ -1045,13 +1077,14 @@ fn resolver_for_scope_( for scope in scope_chain.into_iter().rev() { if let Some(block) = scopes.block(scope) { let def_map = db.block_def_map(block); - r = r.push_block_scope(def_map); + let local_def_map = block.lookup(db).module.only_local_def_map(db); + r = r.push_block_scope(def_map, local_def_map); // FIXME: This adds as many module scopes as there are blocks, but resolving in each // already traverses all parents, so this is O(n²). I think we could only store the // innermost module scope instead? } if let Some(macro_id) = scopes.macro_def(scope) { - r = r.push_scope(Scope::MacroDefScope(macro_id.clone())); + r = r.push_scope(Scope::MacroDefScope(**macro_id)); } r = r.push_expr_scope(owner, Arc::clone(&scopes), scope); @@ -1070,13 +1103,12 @@ impl Resolver { self.push_scope(Scope::GenericParams { def, params }) } - fn push_impl_def_scope(self, impl_def: ImplId) -> Resolver { - self.push_scope(Scope::ImplDefScope(impl_def)) - } - - fn push_block_scope(self, def_map: Arc) -> Resolver { - debug_assert!(def_map.block_id().is_some()); - self.push_scope(Scope::BlockScope(ModuleItemMap { def_map, module_id: DefMap::ROOT })) + fn push_block_scope(self, def_map: Arc, local_def_map: Arc) -> Resolver { + self.push_scope(Scope::BlockScope(ModuleItemMap { + def_map, + local_def_map, + module_id: DefMap::ROOT, + })) } fn push_expr_scope( @@ -1095,8 +1127,13 @@ impl ModuleItemMap { db: &dyn DefDatabase, path: &ModPath, ) -> Option<(ResolveValueResult, ResolvePathResultPrefixInfo)> { - let (module_def, unresolved_idx, prefix_info) = - self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other); + let (module_def, unresolved_idx, prefix_info) = self.def_map.resolve_path_locally( + &self.local_def_map, + db, + self.module_id, + path, + BuiltinShadowMode::Other, + ); match unresolved_idx { None => { let (value, import) = to_value_ns(module_def)?; @@ -1129,8 +1166,13 @@ impl ModuleItemMap { path: &ModPath, ) -> Option<(TypeNs, Option, Option, ResolvePathResultPrefixInfo)> { - let (module_def, idx, prefix_info) = - self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other); + let (module_def, idx, prefix_info) = self.def_map.resolve_path_locally( + &self.local_def_map, + db, + self.module_id, + path, + BuiltinShadowMode::Other, + ); let (res, import) = to_type_ns(module_def)?; Some((res, idx, import, prefix_info)) } @@ -1168,11 +1210,12 @@ fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option)> { ModuleDefId::TraitId(it) => TypeNs::TraitId(it), ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it), + ModuleDefId::ModuleId(it) => TypeNs::ModuleId(it), + ModuleDefId::FunctionId(_) | ModuleDefId::ConstId(_) | ModuleDefId::MacroId(_) - | ModuleDefId::StaticId(_) - | ModuleDefId::ModuleId(_) => return None, + | ModuleDefId::StaticId(_) => return None, }; Some((res, def.import)) } @@ -1225,11 +1268,14 @@ pub trait HasResolver: Copy { impl HasResolver for ModuleId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - let mut def_map = self.def_map(db); + let (mut def_map, local_def_map) = self.local_def_map(db); let mut module_id = self.local_id; if !self.is_block_module() { - return Resolver { scopes: vec![], module_scope: ModuleItemMap { def_map, module_id } }; + return Resolver { + scopes: vec![], + module_scope: ModuleItemMap { def_map, local_def_map, module_id }, + }; } let mut modules: SmallVec<[_; 1]> = smallvec![]; @@ -1243,10 +1289,14 @@ impl HasResolver for ModuleId { } let mut resolver = Resolver { scopes: Vec::with_capacity(modules.len()), - module_scope: ModuleItemMap { def_map, module_id }, + module_scope: ModuleItemMap { + def_map, + local_def_map: local_def_map.clone(), + module_id, + }, }; for def_map in modules.into_iter().rev() { - resolver = resolver.push_block_scope(def_map); + resolver = resolver.push_block_scope(def_map, local_def_map.clone()); } resolver } @@ -1254,9 +1304,10 @@ impl HasResolver for ModuleId { impl HasResolver for CrateRootModuleId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { + let (def_map, local_def_map) = self.local_def_map(db); Resolver { scopes: vec![], - module_scope: ModuleItemMap { def_map: self.def_map(db), module_id: DefMap::ROOT }, + module_scope: ModuleItemMap { def_map, local_def_map, module_id: DefMap::ROOT }, } } } @@ -1276,10 +1327,7 @@ impl HasResolver for TraitAliasId { impl + Copy> HasResolver for T { fn resolver(self, db: &dyn DefDatabase) -> Resolver { let def = self.into(); - def.module(db) - .resolver(db) - .push_generic_params_scope(db, def.into()) - .push_scope(Scope::AdtScope(def)) + def.module(db).resolver(db).push_generic_params_scope(db, def.into()) } } @@ -1309,11 +1357,7 @@ impl HasResolver for TypeAliasId { impl HasResolver for ImplId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { - self.lookup(db) - .container - .resolver(db) - .push_generic_params_scope(db, self.into()) - .push_impl_def_scope(self) + self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) } } @@ -1336,23 +1380,6 @@ impl HasResolver for UseId { } } -impl HasResolver for TypeOwnerId { - fn resolver(self, db: &dyn DefDatabase) -> Resolver { - match self { - TypeOwnerId::FunctionId(it) => it.resolver(db), - TypeOwnerId::StaticId(it) => it.resolver(db), - TypeOwnerId::ConstId(it) => it.resolver(db), - TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.resolver(db), - TypeOwnerId::AdtId(it) => it.resolver(db), - TypeOwnerId::TraitId(it) => it.resolver(db), - TypeOwnerId::TraitAliasId(it) => it.resolver(db), - TypeOwnerId::TypeAliasId(it) => it.resolver(db), - TypeOwnerId::ImplId(it) => it.resolver(db), - TypeOwnerId::EnumVariantId(it) => it.resolver(db), - } - } -} - impl HasResolver for DefWithBodyId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { match self { @@ -1360,7 +1387,6 @@ impl HasResolver for DefWithBodyId { DefWithBodyId::FunctionId(f) => f.resolver(db), DefWithBodyId::StaticId(s) => s.resolver(db), DefWithBodyId::VariantId(v) => v.resolver(db), - DefWithBodyId::InTypeConstId(c) => c.lookup(db).owner.resolver(db), } } } @@ -1438,7 +1464,7 @@ impl HasResolver for MacroRulesId { fn lookup_resolver<'db>( db: &(dyn DefDatabase + 'db), lookup: impl Lookup< - Database<'db> = dyn DefDatabase + 'db, + Database = dyn DefDatabase, Data = impl ItemTreeLoc, >, ) -> Resolver { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs new file mode 100644 index 0000000000000..44cfd72c48f5e --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs @@ -0,0 +1,975 @@ +//! Item signature IR definitions + +use std::ops::Not as _; + +use bitflags::bitflags; +use cfg::{CfgExpr, CfgOptions}; +use either::Either; +use hir_expand::{InFile, Intern, Lookup, name::Name}; +use intern::{Symbol, sym}; +use la_arena::{Arena, Idx}; +use rustc_abi::{IntegerType, ReprOptions}; +use syntax::{ + AstNode, SyntaxNodePtr, + ast::{self, HasGenericParams, IsString}, +}; +use thin_vec::ThinVec; +use triomphe::Arc; + +use crate::{ + ConstId, EnumId, EnumVariantId, EnumVariantLoc, FunctionId, HasModule, ImplId, ItemContainerId, + ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, UnionId, VariantId, + db::DefDatabase, + expr_store::{ + ExpressionStore, ExpressionStoreSourceMap, + lower::{ + ExprCollector, lower_function, lower_generic_params, lower_trait, lower_trait_alias, + lower_type_alias, + }, + }, + hir::{ExprId, PatId, generics::GenericParams}, + item_tree::{ + AttrOwner, Field, FieldParent, FieldsShape, FileItemTreeId, ItemTree, ItemTreeId, ModItem, + RawVisibility, RawVisibilityId, + }, + lang_item::LangItem, + src::HasSource, + type_ref::{TraitRef, TypeBound, TypeRefId}, +}; + +#[derive(Debug, PartialEq, Eq)] +pub struct StructSignature { + pub name: Name, + pub generic_params: Arc, + pub store: Arc, + pub flags: StructFlags, + pub shape: FieldsShape, + pub repr: Option, +} + +bitflags! { + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub struct StructFlags: u8 { + /// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute. + const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1; + /// Indicates whether the struct has a `#[fundamental]` attribute. + const FUNDAMENTAL = 1 << 2; + /// Indicates whether the struct is `PhantomData`. + const IS_PHANTOM_DATA = 1 << 3; + /// Indicates whether this struct is `Box`. + const IS_BOX = 1 << 4; + /// Indicates whether this struct is `ManuallyDrop`. + const IS_MANUALLY_DROP = 1 << 5; + /// Indicates whether this struct is `UnsafeCell`. + const IS_UNSAFE_CELL = 1 << 6; + /// Indicates whether this struct is `UnsafePinned`. + const IS_UNSAFE_PINNED = 1 << 7; + } +} + +impl StructSignature { + pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc, Arc) { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()); + + let mut flags = StructFlags::empty(); + if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { + flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; + } + if attrs.by_key(sym::fundamental).exists() { + flags |= StructFlags::FUNDAMENTAL; + } + if let Some(lang) = attrs.lang_item() { + match lang { + LangItem::PhantomData => flags |= StructFlags::IS_PHANTOM_DATA, + LangItem::OwnedBox => flags |= StructFlags::IS_BOX, + LangItem::ManuallyDrop => flags |= StructFlags::IS_MANUALLY_DROP, + LangItem::UnsafeCell => flags |= StructFlags::IS_UNSAFE_CELL, + LangItem::UnsafePinned => flags |= StructFlags::IS_UNSAFE_PINNED, + _ => (), + } + } + let repr = attrs.repr(); + + let hir_expand::files::InFileWrapper { file_id, value } = loc.source(db); + let (store, generic_params, source_map) = lower_generic_params( + db, + loc.container, + id.into(), + file_id, + value.generic_param_list(), + value.where_clause(), + ); + ( + Arc::new(StructSignature { + generic_params, + store, + flags, + shape: item_tree[loc.id.value].shape, + name: item_tree[loc.id.value].name.clone(), + repr, + }), + Arc::new(source_map), + ) + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct UnionSignature { + pub name: Name, + pub generic_params: Arc, + pub store: Arc, + pub flags: StructFlags, + pub repr: Option, +} + +impl UnionSignature { + pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc, Arc) { + let loc = id.lookup(db); + let krate = loc.container.krate; + let item_tree = loc.id.item_tree(db); + let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); + let mut flags = StructFlags::empty(); + if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { + flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; + } + if attrs.by_key(sym::fundamental).exists() { + flags |= StructFlags::FUNDAMENTAL; + } + + let repr = attrs.repr(); + + let hir_expand::files::InFileWrapper { file_id, value } = loc.source(db); + let (store, generic_params, source_map) = lower_generic_params( + db, + loc.container, + id.into(), + file_id, + value.generic_param_list(), + value.where_clause(), + ); + ( + Arc::new(UnionSignature { + generic_params, + store, + flags, + repr, + name: item_tree[loc.id.value].name.clone(), + }), + Arc::new(source_map), + ) + } +} + +bitflags! { + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub struct EnumFlags: u8 { + const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1; + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct EnumSignature { + pub name: Name, + pub generic_params: Arc, + pub store: Arc, + pub flags: EnumFlags, + pub repr: Option, +} + +impl EnumSignature { + pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc, Arc) { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()); + let mut flags = EnumFlags::empty(); + if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { + flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; + } + + let repr = attrs.repr(); + + let hir_expand::files::InFileWrapper { file_id, value } = loc.source(db); + let (store, generic_params, source_map) = lower_generic_params( + db, + loc.container, + id.into(), + file_id, + value.generic_param_list(), + value.where_clause(), + ); + + ( + Arc::new(EnumSignature { + generic_params, + store, + flags, + repr, + name: item_tree[loc.id.value].name.clone(), + }), + Arc::new(source_map), + ) + } + + pub fn variant_body_type(&self) -> IntegerType { + match self.repr { + Some(ReprOptions { int: Some(builtin), .. }) => builtin, + _ => IntegerType::Pointer(true), + } + } +} +bitflags::bitflags! { + #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] + pub struct ConstFlags: u8 { + const HAS_BODY = 1 << 1; + const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7; + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct ConstSignature { + pub name: Option, + // generic_params: Arc, + pub store: Arc, + pub type_ref: TypeRefId, + pub flags: ConstFlags, +} + +impl ConstSignature { + pub fn query(db: &dyn DefDatabase, id: ConstId) -> (Arc, Arc) { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + + let module = loc.container.module(db); + let attrs = item_tree.attrs(db, module.krate, ModItem::from(loc.id.value).into()); + let mut flags = ConstFlags::empty(); + if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { + flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL; + } + let source = loc.source(db); + if source.value.body().is_some() { + flags.insert(ConstFlags::HAS_BODY); + } + + let (store, source_map, type_ref) = + crate::expr_store::lower::lower_type_ref(db, module, source.map(|it| it.ty())); + + ( + Arc::new(ConstSignature { + store: Arc::new(store), + type_ref, + flags, + name: item_tree[loc.id.value].name.clone(), + }), + Arc::new(source_map), + ) + } + + pub fn has_body(&self) -> bool { + self.flags.contains(ConstFlags::HAS_BODY) + } +} + +bitflags::bitflags! { + #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] + pub struct StaticFlags: u8 { + const HAS_BODY = 1 << 1; + const MUTABLE = 1 << 3; + const UNSAFE = 1 << 4; + const EXPLICIT_SAFE = 1 << 5; + const EXTERN = 1 << 6; + const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7; + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct StaticSignature { + pub name: Name, + + // generic_params: Arc, + pub store: Arc, + pub type_ref: TypeRefId, + pub flags: StaticFlags, +} +impl StaticSignature { + pub fn query(db: &dyn DefDatabase, id: StaticId) -> (Arc, Arc) { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + + let module = loc.container.module(db); + let attrs = item_tree.attrs(db, module.krate, ModItem::from(loc.id.value).into()); + let mut flags = StaticFlags::empty(); + if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { + flags |= StaticFlags::RUSTC_ALLOW_INCOHERENT_IMPL; + } + + if matches!(loc.container, ItemContainerId::ExternBlockId(_)) { + flags.insert(StaticFlags::EXTERN); + } + + let source = loc.source(db); + if source.value.body().is_some() { + flags.insert(StaticFlags::HAS_BODY); + } + if source.value.mut_token().is_some() { + flags.insert(StaticFlags::MUTABLE); + } + if source.value.unsafe_token().is_some() { + flags.insert(StaticFlags::UNSAFE); + } + if source.value.safe_token().is_some() { + flags.insert(StaticFlags::EXPLICIT_SAFE); + } + + let (store, source_map, type_ref) = + crate::expr_store::lower::lower_type_ref(db, module, source.map(|it| it.ty())); + + ( + Arc::new(StaticSignature { + store: Arc::new(store), + type_ref, + flags, + name: item_tree[loc.id.value].name.clone(), + }), + Arc::new(source_map), + ) + } +} + +bitflags::bitflags! { + #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] + pub struct ImplFlags: u8 { + const NEGATIVE = 1 << 1; + const UNSAFE = 1 << 3; + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct ImplSignature { + pub generic_params: Arc, + pub store: Arc, + pub self_ty: TypeRefId, + pub target_trait: Option, + pub flags: ImplFlags, +} + +impl ImplSignature { + pub fn query(db: &dyn DefDatabase, id: ImplId) -> (Arc, Arc) { + let loc = id.lookup(db); + + let mut flags = ImplFlags::empty(); + let src = loc.source(db); + if src.value.unsafe_token().is_some() { + flags.insert(ImplFlags::UNSAFE); + } + if src.value.excl_token().is_some() { + flags.insert(ImplFlags::NEGATIVE); + } + + let (store, source_map, self_ty, target_trait, generic_params) = + crate::expr_store::lower::lower_impl(db, loc.container, src, id); + + ( + Arc::new(ImplSignature { + store: Arc::new(store), + generic_params, + self_ty, + target_trait, + flags, + }), + Arc::new(source_map), + ) + } +} + +bitflags::bitflags! { + #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] + pub struct TraitFlags: u8 { + const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1; + const FUNDAMENTAL = 1 << 2; + const UNSAFE = 1 << 3; + const AUTO = 1 << 4; + const SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 5; + const SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 6; + const RUSTC_PAREN_SUGAR = 1 << 7; + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct TraitSignature { + pub name: Name, + pub generic_params: Arc, + pub store: Arc, + pub flags: TraitFlags, +} + +impl TraitSignature { + pub fn query(db: &dyn DefDatabase, id: TraitId) -> (Arc, Arc) { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + + let mut flags = TraitFlags::empty(); + let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()); + let source = loc.source(db); + if source.value.auto_token().is_some() { + flags.insert(TraitFlags::AUTO); + } + if source.value.unsafe_token().is_some() { + flags.insert(TraitFlags::UNSAFE); + } + if attrs.by_key(sym::fundamental).exists() { + flags |= TraitFlags::FUNDAMENTAL; + } + if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { + flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; + } + if attrs.by_key(sym::rustc_paren_sugar).exists() { + flags |= TraitFlags::RUSTC_PAREN_SUGAR; + } + let mut skip_array_during_method_dispatch = + attrs.by_key(sym::rustc_skip_array_during_method_dispatch).exists(); + let mut skip_boxed_slice_during_method_dispatch = false; + for tt in attrs.by_key(sym::rustc_skip_during_method_dispatch).tt_values() { + for tt in tt.iter() { + if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt { + skip_array_during_method_dispatch |= ident.sym == sym::array; + skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice; + } + } + } + + if skip_array_during_method_dispatch { + flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH; + } + if skip_boxed_slice_during_method_dispatch { + flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH; + } + + let (store, source_map, generic_params) = lower_trait(db, loc.container, source, id); + + ( + Arc::new(TraitSignature { + store: Arc::new(store), + generic_params, + flags, + name: item_tree[loc.id.value].name.clone(), + }), + Arc::new(source_map), + ) + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct TraitAliasSignature { + pub name: Name, + pub generic_params: Arc, + pub store: Arc, +} + +impl TraitAliasSignature { + pub fn query( + db: &dyn DefDatabase, + id: TraitAliasId, + ) -> (Arc, Arc) { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + + let source = loc.source(db); + let (store, source_map, generic_params) = lower_trait_alias(db, loc.container, source, id); + + ( + Arc::new(TraitAliasSignature { + generic_params, + store: Arc::new(store), + name: item_tree[loc.id.value].name.clone(), + }), + Arc::new(source_map), + ) + } +} + +bitflags! { + #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] + pub struct FnFlags: u16 { + const HAS_BODY = 1 << 1; + const DEFAULT = 1 << 2; + const CONST = 1 << 3; + const ASYNC = 1 << 4; + const UNSAFE = 1 << 5; + const HAS_VARARGS = 1 << 6; + const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7; + const HAS_SELF_PARAM = 1 << 8; + /// The `#[target_feature]` attribute is necessary to check safety (with RFC 2396), + /// but keeping it for all functions will consume a lot of memory when there are + /// only very few functions with it. So we only encode its existence here, and lookup + /// it if needed. + const HAS_TARGET_FEATURE = 1 << 9; + const DEPRECATED_SAFE_2024 = 1 << 10; + const EXPLICIT_SAFE = 1 << 11; + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct FunctionSignature { + pub name: Name, + pub generic_params: Arc, + pub store: Arc, + pub params: Box<[TypeRefId]>, + pub ret_type: Option, + pub abi: Option, + pub flags: FnFlags, + // FIXME: we should put this behind a fn flags + query to avoid bloating the struct + pub legacy_const_generics_indices: Option>>, +} + +impl FunctionSignature { + pub fn query( + db: &dyn DefDatabase, + id: FunctionId, + ) -> (Arc, Arc) { + let loc = id.lookup(db); + let module = loc.container.module(db); + let item_tree = loc.id.item_tree(db); + + let mut flags = FnFlags::empty(); + let attrs = item_tree.attrs(db, module.krate, ModItem::from(loc.id.value).into()); + if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { + flags.insert(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL); + } + + if attrs.by_key(sym::target_feature).exists() { + flags.insert(FnFlags::HAS_TARGET_FEATURE); + } + let legacy_const_generics_indices = attrs.rustc_legacy_const_generics(); + + let source = loc.source(db); + + if source.value.unsafe_token().is_some() { + if attrs.by_key(sym::rustc_deprecated_safe_2024).exists() { + flags.insert(FnFlags::DEPRECATED_SAFE_2024); + } else { + flags.insert(FnFlags::UNSAFE); + } + } + if source.value.async_token().is_some() { + flags.insert(FnFlags::ASYNC); + } + if source.value.const_token().is_some() { + flags.insert(FnFlags::CONST); + } + if source.value.default_token().is_some() { + flags.insert(FnFlags::DEFAULT); + } + if source.value.safe_token().is_some() { + flags.insert(FnFlags::EXPLICIT_SAFE); + } + if source.value.body().is_some() { + flags.insert(FnFlags::HAS_BODY); + } + + let abi = source.value.abi().map(|abi| { + abi.abi_string().map_or_else(|| sym::C, |it| Symbol::intern(it.text_without_quotes())) + }); + let (store, source_map, generic_params, params, ret_type, self_param, variadic) = + lower_function(db, module, source, id); + if self_param { + flags.insert(FnFlags::HAS_SELF_PARAM); + } + if variadic { + flags.insert(FnFlags::HAS_VARARGS); + } + ( + Arc::new(FunctionSignature { + generic_params, + store: Arc::new(store), + params, + ret_type, + abi, + flags, + legacy_const_generics_indices, + name: item_tree[loc.id.value].name.clone(), + }), + Arc::new(source_map), + ) + } + + pub fn has_body(&self) -> bool { + self.flags.contains(FnFlags::HAS_BODY) + } + + /// True if the first param is `self`. This is relevant to decide whether this + /// can be called as a method. + pub fn has_self_param(&self) -> bool { + self.flags.contains(FnFlags::HAS_SELF_PARAM) + } + + pub fn is_default(&self) -> bool { + self.flags.contains(FnFlags::DEFAULT) + } + + pub fn is_const(&self) -> bool { + self.flags.contains(FnFlags::CONST) + } + + pub fn is_async(&self) -> bool { + self.flags.contains(FnFlags::ASYNC) + } + + pub fn is_unsafe(&self) -> bool { + self.flags.contains(FnFlags::UNSAFE) + } + + pub fn is_deprecated_safe_2024(&self) -> bool { + self.flags.contains(FnFlags::DEPRECATED_SAFE_2024) + } + + pub fn is_safe(&self) -> bool { + self.flags.contains(FnFlags::EXPLICIT_SAFE) + } + + pub fn is_varargs(&self) -> bool { + self.flags.contains(FnFlags::HAS_VARARGS) + } + + pub fn has_target_feature(&self) -> bool { + self.flags.contains(FnFlags::HAS_TARGET_FEATURE) + } +} + +bitflags! { + #[derive(Debug, Clone, Copy, Eq, PartialEq, Default)] + pub struct TypeAliasFlags: u8 { + const RUSTC_HAS_INCOHERENT_INHERENT_IMPL = 1 << 1; + const IS_EXTERN = 1 << 6; + const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7; + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct TypeAliasSignature { + pub name: Name, + pub generic_params: Arc, + pub store: Arc, + pub bounds: Box<[TypeBound]>, + pub ty: Option, + pub flags: TypeAliasFlags, +} + +impl TypeAliasSignature { + pub fn query( + db: &dyn DefDatabase, + id: TypeAliasId, + ) -> (Arc, Arc) { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + + let mut flags = TypeAliasFlags::empty(); + let attrs = item_tree.attrs( + db, + loc.container.module(db).krate(), + ModItem::from(loc.id.value).into(), + ); + if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { + flags.insert(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL); + } + if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { + flags.insert(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL); + } + if matches!(loc.container, ItemContainerId::ExternBlockId(_)) { + flags.insert(TypeAliasFlags::IS_EXTERN); + } + let source = loc.source(db); + let (store, source_map, generic_params, bounds, ty) = + lower_type_alias(db, loc.container.module(db), source, id); + + ( + Arc::new(TypeAliasSignature { + store: Arc::new(store), + generic_params, + flags, + bounds, + name: item_tree[loc.id.value].name.clone(), + ty, + }), + Arc::new(source_map), + ) + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct FunctionBody { + pub store: Arc, + pub parameters: Box<[PatId]>, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct SimpleBody { + pub store: Arc, +} +pub type StaticBody = SimpleBody; +pub type ConstBody = SimpleBody; +pub type EnumVariantBody = SimpleBody; + +#[derive(Debug, PartialEq, Eq)] +pub struct VariantFieldsBody { + pub store: Arc, + pub fields: Box<[Option]>, +} + +/// A single field of an enum variant or struct +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct FieldData { + pub name: Name, + pub type_ref: TypeRefId, + pub visibility: RawVisibility, + pub is_unsafe: bool, +} + +pub type LocalFieldId = Idx; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct VariantFields { + fields: Arena, + pub store: Arc, + pub shape: FieldsShape, +} +impl VariantFields { + #[inline] + pub(crate) fn query( + db: &dyn DefDatabase, + id: VariantId, + ) -> (Arc, Arc) { + let (shape, (fields, store, source_map)) = match id { + VariantId::EnumVariantId(id) => { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + let parent = loc.parent.lookup(db); + let variant = &item_tree[loc.id.value]; + ( + variant.shape, + lower_fields( + db, + parent.container, + &item_tree, + FieldParent::EnumVariant(loc.id.value), + loc.source(db).map(|src| { + variant.fields.iter().zip( + src.field_list() + .map(|it| { + match it { + ast::FieldList::RecordFieldList(record_field_list) => { + Either::Left(record_field_list.fields().map(|it| { + (SyntaxNodePtr::new(it.syntax()), it.ty()) + })) + } + ast::FieldList::TupleFieldList(field_list) => { + Either::Right(field_list.fields().map(|it| { + (SyntaxNodePtr::new(it.syntax()), it.ty()) + })) + } + } + .into_iter() + }) + .into_iter() + .flatten(), + ) + }), + Some(item_tree[parent.id.value].visibility), + ), + ) + } + VariantId::StructId(id) => { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + let strukt = &item_tree[loc.id.value]; + ( + strukt.shape, + lower_fields( + db, + loc.container, + &item_tree, + FieldParent::Struct(loc.id.value), + loc.source(db).map(|src| { + strukt.fields.iter().zip( + src.field_list() + .map(|it| { + match it { + ast::FieldList::RecordFieldList(record_field_list) => { + Either::Left(record_field_list.fields().map(|it| { + (SyntaxNodePtr::new(it.syntax()), it.ty()) + })) + } + ast::FieldList::TupleFieldList(field_list) => { + Either::Right(field_list.fields().map(|it| { + (SyntaxNodePtr::new(it.syntax()), it.ty()) + })) + } + } + .into_iter() + }) + .into_iter() + .flatten(), + ) + }), + None, + ), + ) + } + VariantId::UnionId(id) => { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + let union = &item_tree[loc.id.value]; + ( + FieldsShape::Record, + lower_fields( + db, + loc.container, + &item_tree, + FieldParent::Union(loc.id.value), + loc.source(db).map(|src| { + union.fields.iter().zip( + src.record_field_list() + .map(|it| { + it.fields() + .map(|it| (SyntaxNodePtr::new(it.syntax()), it.ty())) + }) + .into_iter() + .flatten(), + ) + }), + None, + ), + ) + } + }; + + (Arc::new(VariantFields { fields, store: Arc::new(store), shape }), Arc::new(source_map)) + } + + pub fn len(&self) -> usize { + self.fields.len() + } + + pub fn fields(&self) -> &Arena { + &self.fields + } + + pub fn field(&self, name: &Name) -> Option { + self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None }) + } +} + +fn lower_fields<'a>( + db: &dyn DefDatabase, + module: ModuleId, + item_tree: &ItemTree, + parent: FieldParent, + fields: InFile))>>, + override_visibility: Option, +) -> (Arena, ExpressionStore, ExpressionStoreSourceMap) { + let mut arena = Arena::new(); + let cfg_options = module.krate.cfg_options(db); + let mut col = ExprCollector::new(db, module, fields.file_id); + for (idx, (field, (ptr, ty))) in fields.value.enumerate() { + let attr_owner = AttrOwner::make_field_indexed(parent, idx); + let attrs = item_tree.attrs(db, module.krate, attr_owner); + if attrs.is_cfg_enabled(cfg_options) { + arena.alloc(FieldData { + name: field.name.clone(), + type_ref: col + .lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator), + visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(), + is_unsafe: field.is_unsafe, + }); + } else { + col.source_map.diagnostics.push( + crate::expr_store::ExpressionStoreDiagnostics::InactiveCode { + node: InFile::new(fields.file_id, ptr), + cfg: attrs.cfg().unwrap(), + opts: cfg_options.clone(), + }, + ); + } + } + let store = col.store.finish(); + (arena, store, col.source_map) +} + +#[derive(Debug, PartialEq, Eq)] +pub struct InactiveEnumVariantCode { + pub cfg: CfgExpr, + pub opts: CfgOptions, + pub ast_id: span::FileAstId, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct EnumVariants { + pub variants: Box<[(EnumVariantId, Name)]>, +} + +impl EnumVariants { + pub(crate) fn enum_variants_query( + db: &dyn DefDatabase, + e: EnumId, + ) -> (Arc, Option>>) { + let loc = e.lookup(db); + let item_tree = loc.id.item_tree(db); + + let mut diagnostics = ThinVec::new(); + let cfg_options = loc.container.krate.cfg_options(db); + let mut index = 0; + let variants = FileItemTreeId::range_iter(item_tree[loc.id.value].variants.clone()) + .filter_map(|variant| { + let attrs = item_tree.attrs(db, loc.container.krate, variant.into()); + if attrs.is_cfg_enabled(cfg_options) { + let enum_variant = EnumVariantLoc { + id: ItemTreeId::new(loc.id.tree_id(), variant), + parent: e, + index, + } + .intern(db); + index += 1; + Some((enum_variant, item_tree[variant].name.clone())) + } else { + diagnostics.push(InactiveEnumVariantCode { + ast_id: item_tree[variant].ast_id, + cfg: attrs.cfg().unwrap(), + opts: cfg_options.clone(), + }); + None + } + }) + .collect(); + + ( + Arc::new(EnumVariants { variants }), + diagnostics.is_empty().not().then(|| Arc::new(diagnostics)), + ) + } + + pub fn variant(&self, name: &Name) -> Option { + self.variants.iter().find_map(|(v, n)| if n == name { Some(*v) } else { None }) + } + + // [Adopted from rustc](https://github.com/rust-lang/rust/blob/bd53aa3bf7a24a70d763182303bd75e5fc51a9af/compiler/rustc_middle/src/ty/adt.rs#L446-L448) + pub fn is_payload_free(&self, db: &dyn DefDatabase) -> bool { + self.variants.iter().all(|&(v, _)| { + // The condition check order is slightly modified from rustc + // to improve performance by early returning with relatively fast checks + let variant = &db.variant_fields(v.into()); + if !variant.fields().is_empty() { + return false; + } + // The outer if condition is whether this variant has const ctor or not + if !matches!(variant.shape, FieldsShape::Unit) { + let body = db.body(v.into()); + // A variant with explicit discriminant + if body.exprs[body.body_expr] != crate::hir::Expr::Missing { + return false; + } + } + true + }) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/src.rs b/src/tools/rust-analyzer/crates/hir-def/src/src.rs index c7ebfeecf5141..3867f39b8b173 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/src.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/src.rs @@ -3,13 +3,13 @@ use either::Either; use hir_expand::InFile; use la_arena::ArenaMap; -use syntax::{ast, AstNode, AstPtr}; +use syntax::{AstNode, AstPtr, ast}; use crate::{ - db::DefDatabase, - item_tree::{AttrOwner, FieldParent, ItemTreeNode}, GenericDefId, ItemTreeLoc, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, UseId, VariantId, + db::DefDatabase, + item_tree::{AttrOwner, FieldParent, ItemTreeNode}, }; pub trait HasSource { @@ -131,7 +131,7 @@ impl HasChildSource for VariantId { item_tree = lookup.id.item_tree(db); ( lookup.source(db).map(|it| it.kind()), - FieldParent::Variant(lookup.id.value), + FieldParent::EnumVariant(lookup.id.value), lookup.parent.lookup(db).container, ) } @@ -158,7 +158,7 @@ impl HasChildSource for VariantId { let mut map = ArenaMap::new(); match &src.value { ast::StructKind::Tuple(fl) => { - let cfg_options = &db.crate_graph()[container.krate].cfg_options; + let cfg_options = container.krate.cfg_options(db); let mut idx = 0; for (i, fd) in fl.fields().enumerate() { let attrs = item_tree.attrs( @@ -177,7 +177,7 @@ impl HasChildSource for VariantId { } } ast::StructKind::Record(fl) => { - let cfg_options = &db.crate_graph()[container.krate].cfg_options; + let cfg_options = container.krate.cfg_options(db); let mut idx = 0; for (i, fd) in fl.fields().enumerate() { let attrs = item_tree.attrs( diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs index 54e6c1fd206d8..4709754829516 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs @@ -3,58 +3,53 @@ use std::{fmt, panic, sync::Mutex}; use base_db::{ - ra_salsa::{self, Durability}, - AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, + Crate, CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, RootQueryDb, + SourceDatabase, SourceRoot, SourceRootId, SourceRootInput, }; -use hir_expand::{db::ExpandDatabase, files::FilePosition, InFile}; -use span::{EditionedFileId, FileId}; -use syntax::{algo, ast, AstNode}; +use hir_expand::{InFile, files::FilePosition}; +use salsa::{AsDynDatabase, Durability}; +use span::FileId; +use syntax::{AstNode, algo, ast}; use triomphe::Arc; use crate::{ + LocalModuleId, Lookup, ModuleDefId, ModuleId, db::DefDatabase, nameres::{DefMap, ModuleSource}, src::HasSource, - LocalModuleId, Lookup, ModuleDefId, ModuleId, }; -#[ra_salsa::database( - base_db::SourceRootDatabaseStorage, - base_db::SourceDatabaseStorage, - hir_expand::db::ExpandDatabaseStorage, - crate::db::InternDatabaseStorage, - crate::db::DefDatabaseStorage -)] +#[salsa_macros::db] +#[derive(Clone)] pub(crate) struct TestDB { - storage: ra_salsa::Storage, - events: Mutex>>, + storage: salsa::Storage, + files: Arc, + crates_map: Arc, + events: Arc>>>, } impl Default for TestDB { fn default() -> Self { - let mut this = Self { storage: Default::default(), events: Default::default() }; - this.setup_syntax_context_root(); + let mut this = Self { + storage: Default::default(), + events: Default::default(), + files: Default::default(), + crates_map: Default::default(), + }; this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); + // This needs to be here otherwise `CrateGraphBuilder` panics. + this.set_all_crates(Arc::new(Box::new([]))); + CrateGraphBuilder::default().set_in_db(&mut this); this } } -impl Upcast for TestDB { - fn upcast(&self) -> &(dyn ExpandDatabase + 'static) { - self - } -} - -impl Upcast for TestDB { - fn upcast(&self) -> &(dyn DefDatabase + 'static) { - self - } -} - -impl ra_salsa::Database for TestDB { - fn salsa_event(&self, event: ra_salsa::Event) { +#[salsa_macros::db] +impl salsa::Database for TestDB { + fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) { let mut events = self.events.lock().unwrap(); if let Some(events) = &mut *events { + let event = event(); events.push(event); } } @@ -68,34 +63,79 @@ impl fmt::Debug for TestDB { impl panic::RefUnwindSafe for TestDB {} -impl FileLoader for TestDB { - fn resolve_path(&self, path: AnchoredPath<'_>) -> Option { - FileLoaderDelegate(self).resolve_path(path) +#[salsa_macros::db] +impl SourceDatabase for TestDB { + fn file_text(&self, file_id: base_db::FileId) -> FileText { + self.files.file_text(file_id) + } + + fn set_file_text(&mut self, file_id: base_db::FileId, text: &str) { + let files = Arc::clone(&self.files); + files.set_file_text(self, file_id, text); } - fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> { - FileLoaderDelegate(self).relevant_crates(file_id) + + fn set_file_text_with_durability( + &mut self, + file_id: base_db::FileId, + text: &str, + durability: Durability, + ) { + let files = Arc::clone(&self.files); + files.set_file_text_with_durability(self, file_id, text, durability); + } + + /// Source root of the file. + fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput { + self.files.source_root(source_root_id) + } + + fn set_source_root_with_durability( + &mut self, + source_root_id: SourceRootId, + source_root: Arc, + durability: Durability, + ) { + let files = Arc::clone(&self.files); + files.set_source_root_with_durability(self, source_root_id, source_root, durability); + } + + fn file_source_root(&self, id: base_db::FileId) -> FileSourceRootInput { + self.files.file_source_root(id) + } + + fn set_file_source_root_with_durability( + &mut self, + id: base_db::FileId, + source_root_id: SourceRootId, + durability: Durability, + ) { + let files = Arc::clone(&self.files); + files.set_file_source_root_with_durability(self, id, source_root_id, durability); + } + + fn crates_map(&self) -> Arc { + self.crates_map.clone() } } impl TestDB { - pub(crate) fn fetch_test_crate(&self) -> CrateId { - let crate_graph = self.crate_graph(); - let it = crate_graph + pub(crate) fn fetch_test_crate(&self) -> Crate { + let all_crates = self.all_crates(); + all_crates .iter() - .find(|&idx| { - crate_graph[idx].display_name.as_ref().map(|it| it.canonical_name().as_str()) + .copied() + .find(|&krate| { + krate.extra_data(self).display_name.as_ref().map(|it| it.canonical_name().as_str()) == Some("ra_test_fixture") }) - .or_else(|| crate_graph.iter().next()) - .unwrap(); - it + .unwrap_or(*all_crates.last().unwrap()) } pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId { for &krate in self.relevant_crates(file_id).iter() { let crate_def_map = self.crate_def_map(krate); for (local_id, data) in crate_def_map.modules() { - if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) { + if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) { return crate_def_map.module_id(local_id); } } @@ -104,7 +144,7 @@ impl TestDB { } pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId { - let file_module = self.module_for_file(position.file_id.file_id()); + let file_module = self.module_for_file(position.file_id.file_id(self)); let mut def_map = file_module.def_map(self); let module = self.mod_at_position(&def_map, position); @@ -203,12 +243,12 @@ impl TestDB { // Find the innermost block expression that has a `DefMap`. let def_with_body = fn_def?.into(); - let (_, source_map) = self.body_with_source_map(def_with_body); + let source_map = self.body_with_source_map(def_with_body).1; let scopes = self.expr_scopes(def_with_body); - let root = self.parse(position.file_id); - let scope_iter = algo::ancestors_at_offset(&root.syntax_node(), position.offset) - .filter_map(|node| { + let root_syntax_node = self.parse(position.file_id).syntax_node(); + let scope_iter = + algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| { let block = ast::BlockExpr::cast(node)?; let expr = ast::Expr::from(block); let expr_id = source_map @@ -231,7 +271,7 @@ impl TestDB { None } - pub(crate) fn log(&self, f: impl FnOnce()) -> Vec { + pub(crate) fn log(&self, f: impl FnOnce()) -> Vec { *self.events.lock().unwrap() = Some(Vec::new()); f(); self.events.lock().unwrap().take().unwrap() @@ -244,8 +284,11 @@ impl TestDB { .filter_map(|e| match e.kind { // This is pretty horrible, but `Debug` is the only way to inspect // QueryDescriptor at the moment. - ra_salsa::EventKind::WillExecute { database_key } => { - Some(format!("{:?}", database_key.debug(self))) + salsa::EventKind::WillExecute { database_key } => { + let ingredient = self + .as_dyn_database() + .ingredient_debug_name(database_key.ingredient_index()); + Some(ingredient.to_string()) } _ => None, }) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs index c4473e454a1bc..b42c8d383d4a6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs @@ -2,80 +2,19 @@ use std::iter; -use intern::Interned; +use hir_expand::Lookup; use la_arena::ArenaMap; -use span::SyntaxContextId; -use syntax::ast; use triomphe::Arc; use crate::{ + ConstId, FunctionId, HasModule, ItemContainerId, ItemLoc, ItemTreeLoc, LocalFieldId, + LocalModuleId, ModuleId, TraitId, TypeAliasId, VariantId, db::DefDatabase, nameres::DefMap, - path::{ModPath, PathKind}, - resolver::HasResolver, - ConstId, FunctionId, HasModule, LocalFieldId, LocalModuleId, ModuleId, VariantId, + resolver::{HasResolver, Resolver}, }; -/// Visibility of an item, not yet resolved. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum RawVisibility { - /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is - /// equivalent to `pub(self)`. - Module(Interned, VisibilityExplicitness), - /// `pub`. - Public, -} - -impl RawVisibility { - pub(crate) fn private() -> RawVisibility { - RawVisibility::Module( - Interned::new(ModPath::from_kind(PathKind::SELF)), - VisibilityExplicitness::Implicit, - ) - } - - pub(crate) fn from_ast( - db: &dyn DefDatabase, - node: Option, - span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId, - ) -> RawVisibility { - let node = match node { - None => return RawVisibility::private(), - Some(node) => node, - }; - Self::from_ast_with_span_map(db, node, span_for_range) - } - - fn from_ast_with_span_map( - db: &dyn DefDatabase, - node: ast::Visibility, - span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId, - ) -> RawVisibility { - let path = match node.kind() { - ast::VisibilityKind::In(path) => { - let path = ModPath::from_src(db.upcast(), path, span_for_range); - match path { - None => return RawVisibility::private(), - Some(path) => path, - } - } - ast::VisibilityKind::PubCrate => ModPath::from_kind(PathKind::Crate), - ast::VisibilityKind::PubSuper => ModPath::from_kind(PathKind::Super(1)), - ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::SELF), - ast::VisibilityKind::Pub => return RawVisibility::Public, - }; - RawVisibility::Module(Interned::new(path), VisibilityExplicitness::Explicit) - } - - pub fn resolve( - &self, - db: &dyn DefDatabase, - resolver: &crate::resolver::Resolver, - ) -> Visibility { - // we fall back to public visibility (i.e. fail open) if the path can't be resolved - resolver.resolve_visibility(db, self).unwrap_or(Visibility::Public) - } -} +pub use crate::item_tree::{RawVisibility, VisibilityExplicitness}; /// Visibility of an item, with the path resolved. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] @@ -87,6 +26,15 @@ pub enum Visibility { } impl Visibility { + pub fn resolve( + db: &dyn DefDatabase, + resolver: &crate::resolver::Resolver, + raw_vis: &RawVisibility, + ) -> Self { + // we fall back to public visibility (i.e. fail open) if the path can't be resolved + resolver.resolve_visibility(db, raw_vis).unwrap_or(Visibility::Public) + } + pub(crate) fn is_visible_from_other_crate(self) -> bool { matches!(self, Visibility::Public) } @@ -254,30 +202,20 @@ impl Visibility { } } -/// Whether the item was imported through an explicit `pub(crate) use` or just a `use` without -/// visibility. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub enum VisibilityExplicitness { - Explicit, - Implicit, -} - -impl VisibilityExplicitness { - pub fn is_explicit(&self) -> bool { - matches!(self, Self::Explicit) - } -} - /// Resolve visibility of all specific fields of a struct or union variant. pub(crate) fn field_visibilities_query( db: &dyn DefDatabase, variant_id: VariantId, ) -> Arc> { - let var_data = variant_id.variant_data(db); + let variant_fields = db.variant_fields(variant_id); + let fields = variant_fields.fields(); + if fields.is_empty() { + return Arc::default(); + } let resolver = variant_id.module(db).resolver(db); let mut res = ArenaMap::default(); - for (field_id, field_data) in var_data.fields().iter() { - res.insert(field_id, field_data.visibility.resolve(db, &resolver)); + for (field_id, field_data) in fields.iter() { + res.insert(field_id, Visibility::resolve(db, &resolver, &field_data.visibility)); } Arc::new(res) } @@ -285,11 +223,43 @@ pub(crate) fn field_visibilities_query( /// Resolve visibility of a function. pub(crate) fn function_visibility_query(db: &dyn DefDatabase, def: FunctionId) -> Visibility { let resolver = def.resolver(db); - db.function_data(def).visibility.resolve(db, &resolver) + let loc = def.lookup(db); + let tree = loc.item_tree_id().item_tree(db); + if let ItemContainerId::TraitId(trait_id) = loc.container { + trait_vis(db, &resolver, trait_id) + } else { + Visibility::resolve(db, &resolver, &tree[tree[loc.id.value].visibility]) + } } /// Resolve visibility of a const. pub(crate) fn const_visibility_query(db: &dyn DefDatabase, def: ConstId) -> Visibility { let resolver = def.resolver(db); - db.const_data(def).visibility.resolve(db, &resolver) + let loc = def.lookup(db); + let tree = loc.item_tree_id().item_tree(db); + if let ItemContainerId::TraitId(trait_id) = loc.container { + trait_vis(db, &resolver, trait_id) + } else { + Visibility::resolve(db, &resolver, &tree[tree[loc.id.value].visibility]) + } +} + +/// Resolve visibility of a type alias. +pub(crate) fn type_alias_visibility_query(db: &dyn DefDatabase, def: TypeAliasId) -> Visibility { + let resolver = def.resolver(db); + let loc = def.lookup(db); + let tree = loc.item_tree_id().item_tree(db); + if let ItemContainerId::TraitId(trait_id) = loc.container { + trait_vis(db, &resolver, trait_id) + } else { + Visibility::resolve(db, &resolver, &tree[tree[loc.id.value].visibility]) + } +} + +#[inline] +fn trait_vis(db: &dyn DefDatabase, resolver: &Resolver, trait_id: TraitId) -> Visibility { + let ItemLoc { id: tree_id, .. } = trait_id.lookup(db); + let item_tree = tree_id.item_tree(db); + let tr_def = &item_tree[tree_id.value]; + Visibility::resolve(db, resolver, &item_tree[tr_def.visibility]) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml index 7d561e0527d91..ed818c5be3f71 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml @@ -12,15 +12,16 @@ rust-version.workspace = true [lib] [dependencies] -cov-mark = "2.0.0-pre.1" +cov-mark = "2.0.0" tracing.workspace = true either.workspace = true rustc-hash.workspace = true -la-arena.workspace = true itertools.workspace = true -hashbrown.workspace = true smallvec.workspace = true triomphe.workspace = true +query-group.workspace = true +salsa.workspace = true +salsa-macros.workspace = true # local deps stdx.workspace = true @@ -35,7 +36,7 @@ parser.workspace = true syntax-bridge.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.5.1" [features] in-rust-tree = ["syntax/in-rust-tree"] diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs index c9c793d54f26c..bb17eb0627606 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs @@ -1,26 +1,25 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. use std::{borrow::Cow, fmt, ops}; -use base_db::CrateId; -use cfg::CfgExpr; +use base_db::Crate; +use cfg::{CfgExpr, CfgOptions}; use either::Either; -use intern::{sym, Interned, Symbol}; +use intern::{Interned, Symbol, sym}; use mbe::{DelimiterKind, Punct}; -use smallvec::{smallvec, SmallVec}; -use span::{Span, SyntaxContextId}; +use smallvec::{SmallVec, smallvec}; +use span::{Span, SyntaxContext}; use syntax::unescape; -use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode}; -use syntax_bridge::{desugar_doc_comment_text, syntax_node_to_token_tree, DocCommentDesugarMode}; +use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast}; +use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree}; use triomphe::ThinArc; -use crate::name::Name; use crate::{ db::ExpandDatabase, mod_path::ModPath, + name::Name, span_map::SpanMapRef, - tt::{self, token_to_literal, TopSubtree}, - InFile, + tt::{self, TopSubtree, token_to_literal}, }; /// Syntactical attributes, without filtering of `cfg_attr`s. @@ -49,32 +48,7 @@ impl RawAttrs { owner: &dyn ast::HasAttrs, span_map: SpanMapRef<'_>, ) -> Self { - let entries: Vec<_> = collect_attrs(owner) - .filter_map(|(id, attr)| match attr { - Either::Left(attr) => { - attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id)) - } - Either::Right(comment) => comment.doc_comment().map(|doc| { - let span = span_map.span_for_range(comment.syntax().text_range()); - let (text, kind) = - desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro); - Attr { - id, - input: Some(Box::new(AttrInput::Literal(tt::Literal { - symbol: text, - span, - kind, - suffix: None, - }))), - path: Interned::new(ModPath::from(Name::new_symbol( - sym::doc.clone(), - span.ctx, - ))), - ctxt: span.ctx, - } - }), - }) - .collect(); + let entries: Vec<_> = Self::attrs_iter::(db, owner, span_map).collect(); let entries = if entries.is_empty() { None @@ -85,12 +59,61 @@ impl RawAttrs { RawAttrs { entries } } - pub fn from_attrs_owner( + /// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded. + pub fn new_expanded( db: &dyn ExpandDatabase, - owner: InFile<&dyn ast::HasAttrs>, + owner: &dyn ast::HasAttrs, span_map: SpanMapRef<'_>, + cfg_options: &CfgOptions, ) -> Self { - Self::new(db, owner.value, span_map) + let entries: Vec<_> = + Self::attrs_iter_expanded::(db, owner, span_map, cfg_options).collect(); + + let entries = if entries.is_empty() { + None + } else { + Some(ThinArc::from_header_and_iter((), entries.into_iter())) + }; + + RawAttrs { entries } + } + + pub fn attrs_iter( + db: &dyn ExpandDatabase, + owner: &dyn ast::HasAttrs, + span_map: SpanMapRef<'_>, + ) -> impl Iterator { + collect_attrs(owner).filter_map(move |(id, attr)| match attr { + Either::Left(attr) => { + attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id)) + } + Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| { + let span = span_map.span_for_range(comment.syntax().text_range()); + let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro); + Attr { + id, + input: Some(Box::new(AttrInput::Literal(tt::Literal { + symbol: text, + span, + kind, + suffix: None, + }))), + path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))), + ctxt: span.ctx, + } + }), + Either::Right(_) => None, + }) + } + + pub fn attrs_iter_expanded( + db: &dyn ExpandDatabase, + owner: &dyn ast::HasAttrs, + span_map: SpanMapRef<'_>, + cfg_options: &CfgOptions, + ) -> impl Iterator { + Self::attrs_iter::(db, owner, span_map) + .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options)) } pub fn merge(&self, other: Self) -> Self { @@ -117,52 +140,20 @@ impl RawAttrs { } } - /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`. - // FIXME: This should return a different type, signaling it was filtered? - pub fn filter(self, db: &dyn ExpandDatabase, krate: CrateId) -> RawAttrs { - let has_cfg_attrs = self - .iter() - .any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr.clone())); + /// Processes `cfg_attr`s + pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs { + let has_cfg_attrs = + self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr)); if !has_cfg_attrs { return self; } - let crate_graph = db.crate_graph(); - let new_attrs = - self.iter() - .flat_map(|attr| -> SmallVec<[_; 1]> { - let is_cfg_attr = - attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr.clone()); - if !is_cfg_attr { - return smallvec![attr.clone()]; - } - - let subtree = match attr.token_tree_value() { - Some(it) => it, - _ => return smallvec![attr.clone()], - }; - - let (cfg, parts) = match parse_cfg_attr_input(subtree) { - Some(it) => it, - None => return smallvec![attr.clone()], - }; - let index = attr.id; - let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map( - |(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)), - ); - - let cfg_options = &crate_graph[krate].cfg_options; - let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg); - let cfg = CfgExpr::parse(&cfg); - if cfg_options.check(&cfg) == Some(false) { - smallvec![] - } else { - cov_mark::hit!(cfg_attr_active); - - attrs.collect() - } - }) - .collect::>(); + let cfg_options = krate.cfg_options(db); + let new_attrs = self + .iter() + .cloned() + .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options)) + .collect::>(); let entries = if new_attrs.is_empty() { None } else { @@ -211,7 +202,7 @@ pub struct Attr { pub id: AttrId, pub path: Interned, pub input: Option>, - pub ctxt: SyntaxContextId, + pub ctxt: SyntaxContext, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -306,13 +297,12 @@ impl Attr { Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree)))) } (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => { - let input = match input.flat_tokens().get(1) { + match input.flat_tokens().get(1) { Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => { Some(Box::new(AttrInput::Literal(lit.clone()))) } _ => None, - }; - input + } } _ => None, }; @@ -322,6 +312,42 @@ impl Attr { pub fn path(&self) -> &ModPath { &self.path } + + pub fn expand_cfg_attr( + self, + db: &dyn ExpandDatabase, + cfg_options: &CfgOptions, + ) -> impl IntoIterator { + let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr); + if !is_cfg_attr { + return smallvec![self]; + } + + let subtree = match self.token_tree_value() { + Some(it) => it, + _ => return smallvec![self.clone()], + }; + + let (cfg, parts) = match parse_cfg_attr_input(subtree) { + Some(it) => it, + None => return smallvec![self.clone()], + }; + let index = self.id; + let attrs = parts + .enumerate() + .take(1 << AttrId::CFG_ATTR_BITS) + .filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx))); + + let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg); + let cfg = CfgExpr::parse(&cfg); + if cfg_options.check(&cfg) == Some(false) { + smallvec![] + } else { + cov_mark::hit!(cfg_attr_active); + + attrs.collect::>() + } + } } impl Attr { @@ -403,7 +429,7 @@ impl Attr { } pub fn cfg(&self) -> Option { - if *self.path.as_ident()? == sym::cfg.clone() { + if *self.path.as_ident()? == sym::cfg { self.token_tree_value().map(CfgExpr::parse) } else { None diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs index 7b9b7f36e2cdb..0bf4943b60cec 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs @@ -7,9 +7,9 @@ mod derive_macro; mod fn_macro; pub use self::{ - attr_macro::{find_builtin_attr, pseudo_derive_attr_expansion, BuiltinAttrExpander}, - derive_macro::{find_builtin_derive, BuiltinDeriveExpander}, + attr_macro::{BuiltinAttrExpander, find_builtin_attr, pseudo_derive_attr_expansion}, + derive_macro::{BuiltinDeriveExpander, find_builtin_derive}, fn_macro::{ - find_builtin_macro, include_input_to_file_id, BuiltinFnLikeExpander, EagerExpander, + BuiltinFnLikeExpander, EagerExpander, find_builtin_macro, include_input_to_file_id, }, }; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs index e9dc17a28f688..25dd933f26772 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs @@ -1,8 +1,8 @@ //! Builtin attributes. use intern::sym; -use span::{MacroCallId, Span}; +use span::Span; -use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind}; +use crate::{ExpandResult, MacroCallId, MacroCallKind, db::ExpandDatabase, name, tt}; use super::quote; @@ -130,7 +130,7 @@ fn derive_expand( return ExpandResult::ok(tt::TopSubtree::empty(tt::DelimSpan { open: span, close: span, - })) + })); } }; pseudo_derive_attr_expansion(tt, derives, span) diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs index 28b6812139446..68283b916d74b 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs @@ -1,26 +1,27 @@ //! Builtin derives. use intern::sym; -use itertools::{izip, Itertools}; +use itertools::{Itertools, izip}; use parser::SyntaxKind; use rustc_hash::FxHashSet; -use span::{Edition, MacroCallId, Span, SyntaxContextId}; +use span::{Edition, Span, SyntaxContext}; use stdx::never; use syntax_bridge::DocCommentDesugarMode; use tracing::debug; use crate::{ + ExpandError, ExpandResult, MacroCallId, builtin::quote::{dollar_crate, quote}, db::ExpandDatabase, hygiene::span_with_def_site_ctxt, name::{self, AsName, Name}, span_map::ExpansionSpanMap, - tt, ExpandError, ExpandResult, + tt, }; use syntax::{ ast::{ - self, edit_in_place::GenericParamsOwnerEdit, make, AstNode, FieldList, HasAttrs, - HasGenericArgs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds, + self, AstNode, FieldList, HasAttrs, HasGenericArgs, HasGenericParams, HasModuleItem, + HasName, HasTypeBounds, edit_in_place::GenericParamsOwnerEdit, make, }, ted, }; @@ -58,7 +59,7 @@ impl BuiltinDeriveExpander { tt: &tt::TopSubtree, span: Span, ) -> ExpandResult { - let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT); + let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT); self.expander()(db, span, tt) } } @@ -117,7 +118,7 @@ impl VariantShape { quote! {span => #it : #mapped , } }); quote! {span => - #path { ##fields } + #path { # #fields } } } &VariantShape::Tuple(n) => { @@ -128,7 +129,7 @@ impl VariantShape { } }); quote! {span => - #path ( ##fields ) + #path ( # #fields ) } } VariantShape::Unit => path, @@ -237,7 +238,7 @@ fn parse_adt( fn parse_adt_from_syntax( adt: &ast::Adt, - tm: &span::SpanMap, + tm: &span::SpanMap, call_site: Span, ) -> Result { let (name, generic_param_list, where_clause, shape) = match &adt { @@ -389,7 +390,7 @@ fn to_adt_syntax( db: &dyn ExpandDatabase, tt: &tt::TopSubtree, call_site: Span, -) -> Result<(ast::Adt, span::SpanMap), ExpandError> { +) -> Result<(ast::Adt, span::SpanMap), ExpandError> { let (parsed, tm) = crate::db::token_tree_to_syntax_node( db, tt, @@ -464,7 +465,7 @@ fn expand_simple_derive( return ExpandResult::new( tt::TopSubtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }), e, - ) + ); } }; ExpandResult::ok(expand_simple_derive_with_parsed( @@ -523,7 +524,7 @@ fn expand_simple_derive_with_parsed( let name = info.name; quote! {invoc_span => - impl < ##params #extra_impl_params > #trait_path for #name < ##args > where ##where_block { #trait_body } + impl < # #params #extra_impl_params > #trait_path for #name < # #args > where # #where_block { #trait_body } } } @@ -572,7 +573,7 @@ fn clone_expand( quote! {span => fn clone(&self) -> Self { match self { - ##arms + # #arms } } } @@ -650,7 +651,7 @@ fn debug_expand( } }); quote! {span => - f.debug_struct(#name) ##for_fields .finish() + f.debug_struct(#name) # #for_fields .finish() } } VariantShape::Tuple(n) => { @@ -660,7 +661,7 @@ fn debug_expand( } }); quote! {span => - f.debug_tuple(#name) ##for_fields .finish() + f.debug_tuple(#name) # #for_fields .finish() } } VariantShape::Unit => quote! {span => @@ -703,7 +704,7 @@ fn debug_expand( quote! {span => fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result { match self { - ##arms + # #arms } } } @@ -736,7 +737,7 @@ fn hash_expand( let it = names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); }); quote! {span => { - ##it + # #it } } }; let fat_arrow = fat_arrow(span); @@ -754,7 +755,7 @@ fn hash_expand( fn hash(&self, ra_expand_state: &mut H) { #check_discriminant match self { - ##arms + # #arms } } } @@ -803,7 +804,7 @@ fn partial_eq_expand( let t2 = tt::Ident::new(&format!("{}_other", first.sym), first.span); quote!(span =>#t1 .eq( #t2 )) }; - quote!(span =>#first ##rest) + quote!(span =>#first # #rest) } }; quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , } @@ -814,7 +815,7 @@ fn partial_eq_expand( quote! {span => fn eq(&self, other: &Self) -> bool { match (self, other) { - ##arms + # #arms _unused #fat_arrow false } } @@ -891,7 +892,7 @@ fn ord_expand( let fat_arrow = fat_arrow(span); let mut body = quote! {span => match (self, other) { - ##arms + # #arms _unused #fat_arrow #krate::cmp::Ordering::Equal } }; @@ -961,14 +962,14 @@ fn partial_ord_expand( right, quote! {span => match (self, other) { - ##arms + # #arms _unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal) } }, span, ); quote! {span => - fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> { + fn partial_cmp(&self, other: &Self) -> #krate::option::Option<#krate::cmp::Ordering> { #body } } @@ -1072,7 +1073,7 @@ fn coerce_pointee_expand( "exactly one generic type parameter must be marked \ as `#[pointee]` to derive `CoercePointee` traits", ), - ) + ); } (Some(_), Some(_)) => { return ExpandResult::new( @@ -1082,7 +1083,7 @@ fn coerce_pointee_expand( "only one type parameter can be marked as `#[pointee]` \ when deriving `CoercePointee` traits", ), - ) + ); } } }; @@ -1120,7 +1121,9 @@ fn coerce_pointee_expand( tt::TopSubtree::empty(tt::DelimSpan::from_single(span)), ExpandError::other( span, - format!("`derive(CoercePointee)` requires `{pointee_param_name}` to be marked `?Sized`"), + format!( + "`derive(CoercePointee)` requires `{pointee_param_name}` to be marked `?Sized`" + ), ), ); } @@ -1311,15 +1314,15 @@ fn coerce_pointee_expand( } }) }); - let self_for_traits = make::path_from_segments( + + make::path_from_segments( [make::generic_ty_path_segment( make::name_ref(&struct_name.text()), self_params_for_traits, )], false, ) - .clone_for_update(); - self_for_traits + .clone_for_update() }; let mut span_map = span::SpanMap::empty(); @@ -1335,7 +1338,7 @@ fn coerce_pointee_expand( let info = match parse_adt_from_syntax(&adt, &span_map, span) { Ok(it) => it, Err(err) => { - return ExpandResult::new(tt::TopSubtree::empty(tt::DelimSpan::from_single(span)), err) + return ExpandResult::new(tt::TopSubtree::empty(tt::DelimSpan::from_single(span)), err); } }; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index 55242ab3e57d1..621e174cac997 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -3,24 +3,27 @@ use base_db::AnchoredPath; use cfg::CfgExpr; use either::Either; -use intern::{sym, Symbol}; -use mbe::{expect_fragment, DelimiterKind}; -use span::{Edition, EditionedFileId, Span}; +use intern::{ + Symbol, + sym::{self}, +}; +use mbe::{DelimiterKind, expect_fragment}; +use span::{Edition, FileId, Span}; use stdx::format_to; use syntax::{ format_smolstr, - unescape::{unescape_byte, unescape_char, unescape_unicode, Mode}, + unescape::{Mode, unescape_byte, unescape_char, unescape_unicode}, }; use syntax_bridge::syntax_node_to_token_tree; use crate::{ - builtin::quote::{dollar_crate, quote, WithDelimiter}, + EditionedFileId, ExpandError, ExpandResult, Lookup as _, MacroCallId, + builtin::quote::{WithDelimiter, dollar_crate, quote}, db::ExpandDatabase, hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt}, name, span_map::SpanMap, tt::{self, DelimSpan, TtElement, TtIter}, - ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId, }; macro_rules! register_builtin { @@ -69,7 +72,7 @@ impl BuiltinFnLikeExpander { tt: &tt::TopSubtree, span: Span, ) -> ExpandResult { - let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT); + let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT); self.expander()(db, id, tt, span) } @@ -86,7 +89,7 @@ impl EagerExpander { tt: &tt::TopSubtree, span: Span, ) -> ExpandResult { - let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT); + let span = span_with_def_site_ctxt(db, span, id.into(), Edition::CURRENT); self.expander()(db, id, tt, span) } @@ -174,10 +177,10 @@ fn line_expand( ExpandResult::ok(tt::TopSubtree::invisible_from_leaves( span, [tt::Leaf::Literal(tt::Literal { - symbol: sym::INTEGER_0.clone(), + symbol: sym::INTEGER_0, span, kind: tt::LitKind::Integer, - suffix: Some(sym::u32.clone()), + suffix: Some(sym::u32), })], )) } @@ -221,14 +224,14 @@ fn assert_expand( tt: &tt::TopSubtree, span: Span, ) -> ExpandResult { - let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT); + let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT); let mut iter = tt.iter(); let cond = expect_fragment( &mut iter, parser::PrefixEntryPoint::Expr, - db.crate_graph()[id.lookup(db).krate].edition, + id.lookup(db).krate.data(db).edition, tt.top_subtree().delimiter.delim_span(), ); _ = iter.expect_char(','); @@ -237,9 +240,9 @@ fn assert_expand( let dollar_crate = dollar_crate(span); let panic_args = rest.iter(); let mac = if use_panic_2021(db, span) { - quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) } + quote! {call_site_span => #dollar_crate::panic::panic_2021!(# #panic_args) } } else { - quote! {call_site_span => #dollar_crate::panic!(##panic_args) } + quote! {call_site_span => #dollar_crate::panic!(# #panic_args) } }; let value = cond.value; let expanded = quote! {call_site_span =>{ @@ -330,7 +333,7 @@ fn cfg_expand( ) -> ExpandResult { let loc = db.lookup_intern_macro_call(id); let expr = CfgExpr::parse(tt); - let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false); + let enabled = loc.krate.cfg_options(db).check(&expr) != Some(false); let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) }; ExpandResult::ok(expanded) } @@ -342,13 +345,9 @@ fn panic_expand( span: Span, ) -> ExpandResult { let dollar_crate = dollar_crate(span); - let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT); + let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT); - let mac = if use_panic_2021(db, call_site_span) { - sym::panic_2021.clone() - } else { - sym::panic_2015.clone() - }; + let mac = if use_panic_2021(db, call_site_span) { sym::panic_2021 } else { sym::panic_2015 }; // Pass the original arguments let subtree = WithDelimiter { @@ -373,12 +372,12 @@ fn unreachable_expand( span: Span, ) -> ExpandResult { let dollar_crate = dollar_crate(span); - let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT); + let call_site_span = span_with_call_site_ctxt(db, span, id.into(), Edition::CURRENT); let mac = if use_panic_2021(db, call_site_span) { - sym::unreachable_2021.clone() + sym::unreachable_2021 } else { - sym::unreachable_2015.clone() + sym::unreachable_2015 }; // Pass the original arguments @@ -401,14 +400,14 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool { // stack that does not have #[allow_internal_unstable(edition_panic)]. // (To avoid using the edition of e.g. the assert!() or debug_assert!() definition.) loop { - let Some(expn) = db.lookup_intern_syntax_context(span.ctx).outer_expn else { + let Some(expn) = span.ctx.outer_expn(db) else { break false; }; - let expn = db.lookup_intern_macro_call(expn); + let expn = db.lookup_intern_macro_call(expn.into()); // FIXME: Record allow_internal_unstable in the macro def (not been done yet because it // would consume quite a bit extra memory for all call locs...) // if let Some(features) = expn.def.allow_internal_unstable { - // if features.iter().any(|&f| f == sym::edition_panic.clone()) { + // if features.iter().any(|&f| f == sym::edition_panic) { // span = expn.call_site; // continue; // } @@ -424,12 +423,15 @@ fn compile_error_expand( span: Span, ) -> ExpandResult { let err = match &*tt.0 { - [_, tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { - symbol: text, - span: _, - kind: tt::LitKind::Str | tt::LitKind::StrRaw(_), - suffix: _, - }))] => ExpandError::other(span, Box::from(unescape_str(text).as_str())), + [ + _, + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + symbol: text, + span: _, + kind: tt::LitKind::Str | tt::LitKind::StrRaw(_), + suffix: _, + })), + ] => ExpandError::other(span, Box::from(unescape_str(text).as_str())), _ => ExpandError::other(span, "`compile_error!` argument must be a string"), }; @@ -656,17 +658,17 @@ fn relative_file( allow_recursion: bool, err_span: Span, ) -> Result { - let lookup = call_id.lookup(db); - let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id(); + let lookup = db.lookup_intern_macro_call(call_id); + let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id(db); let path = AnchoredPath { anchor: call_site, path: path_str }; - let res = db + let res: FileId = db .resolve_path(path) .ok_or_else(|| ExpandError::other(err_span, format!("failed to load file `{path_str}`")))?; // Prevent include itself if res == call_site && !allow_recursion { Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`"))) } else { - Ok(EditionedFileId::new(res, db.crate_graph()[lookup.krate].edition)) + Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition)) } } @@ -725,19 +727,19 @@ fn include_expand( tt: &tt::TopSubtree, span: Span, ) -> ExpandResult { - let file_id = match include_input_to_file_id(db, arg_id, tt) { - Ok(it) => it, + let editioned_file_id = match include_input_to_file_id(db, arg_id, tt) { + Ok(editioned_file_id) => editioned_file_id, Err(e) => { return ExpandResult::new( tt::TopSubtree::empty(DelimSpan { open: span, close: span }), e, - ) + ); } }; - let span_map = db.real_span_map(file_id); + let span_map = db.real_span_map(editioned_file_id); // FIXME: Parse errors ExpandResult::ok(syntax_node_to_token_tree( - &db.parse(file_id).syntax_node(), + &db.parse(editioned_file_id).syntax_node(), SpanMap::RealSpanMap(span_map), span, syntax_bridge::DocCommentDesugarMode::ProcMacro, @@ -776,15 +778,15 @@ fn include_str_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::TopSubtree, - span: Span, + call_site: Span, ) -> ExpandResult { - let (path, span) = match parse_string(tt) { + let (path, input_span) = match parse_string(tt) { Ok(it) => it, Err(e) => { return ExpandResult::new( - tt::TopSubtree::empty(DelimSpan { open: span, close: span }), + tt::TopSubtree::empty(DelimSpan { open: call_site, close: call_site }), e, - ) + ); } }; @@ -792,22 +794,22 @@ fn include_str_expand( // it's unusual to `include_str!` a Rust file), but we can return an empty string. // Ideally, we'd be able to offer a precise expansion if the user asks for macro // expansion. - let file_id = match relative_file(db, arg_id, path.as_str(), true, span) { + let file_id = match relative_file(db, arg_id, path.as_str(), true, input_span) { Ok(file_id) => file_id, Err(_) => { - return ExpandResult::ok(quote!(span =>"")); + return ExpandResult::ok(quote!(call_site =>"")); } }; - let text = db.file_text(file_id.file_id()); - let text = &*text; + let text = db.file_text(file_id.file_id(db)); + let text = &*text.text(db); - ExpandResult::ok(quote!(span =>#text)) + ExpandResult::ok(quote!(call_site =>#text)) } fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &Symbol) -> Option { let krate = db.lookup_intern_macro_call(arg_id).krate; - db.crate_graph()[krate].env.get(key.as_str()) + krate.env(db).get(key.as_str()) } fn env_expand( @@ -822,7 +824,7 @@ fn env_expand( return ExpandResult::new( tt::TopSubtree::empty(DelimSpan { open: span, close: span }), e, - ) + ); } }; @@ -860,7 +862,7 @@ fn option_env_expand( return ExpandResult::new( tt::TopSubtree::empty(DelimSpan { open: call_site, close: call_site }), e, - ) + ); } }; let dollar_crate = dollar_crate(call_site); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs index 9b637fc768446..62b7b638e7b67 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs @@ -1,7 +1,7 @@ //! A simplified version of quote-crate like quasi quote macro #![allow(clippy::crate_in_macro_def)] -use intern::{sym, Symbol}; +use intern::{Symbol, sym}; use span::Span; use syntax::ToSmolStr; use tt::IdentIsRaw; @@ -9,7 +9,7 @@ use tt::IdentIsRaw; use crate::{name::Name, tt::TopSubtreeBuilder}; pub(crate) fn dollar_crate(span: Span) -> tt::Ident { - tt::Ident { sym: sym::dollar_crate.clone(), span, is_raw: tt::IdentIsRaw::No } + tt::Ident { sym: sym::dollar_crate, span, is_raw: tt::IdentIsRaw::No } } // A helper macro quote macro @@ -61,7 +61,7 @@ macro_rules! quote_impl__ { $crate::builtin::quote::__quote!($span $builder $($tail)*); }; - ($span:ident $builder:ident ## $first:ident $($tail:tt)* ) => {{ + ($span:ident $builder:ident # # $first:ident $($tail:tt)* ) => {{ ::std::iter::IntoIterator::into_iter($first).for_each(|it| $crate::builtin::quote::ToTokenTree::to_tokens(it, $span, $builder)); $crate::builtin::quote::__quote!($span $builder $($tail)*); }}; @@ -203,7 +203,7 @@ impl_to_to_tokentrees! { span: u32 => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } }; span: usize => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } }; span: i32 => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } }; - span: bool => self { crate::tt::Ident{sym: if self { sym::true_.clone() } else { sym::false_.clone() }, span, is_raw: tt::IdentIsRaw::No } }; + span: bool => self { crate::tt::Ident{sym: if self { sym::true_ } else { sym::false_ }, span, is_raw: tt::IdentIsRaw::No } }; _span: crate::tt::Leaf => self { self }; _span: crate::tt::Literal => self { self }; _span: crate::tt::Ident => self { self }; @@ -226,7 +226,7 @@ mod tests { use ::tt::IdentIsRaw; use expect_test::expect; use intern::Symbol; - use span::{Edition, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; + use span::{Edition, ROOT_ERASED_FILE_AST_ID, SpanAnchor, SyntaxContext}; use syntax::{TextRange, TextSize}; use super::quote; @@ -240,7 +240,7 @@ mod tests { ), ast_id: ROOT_ERASED_FILE_AST_ID, }, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }; #[test] @@ -277,8 +277,8 @@ mod tests { assert_eq!(quoted.to_string(), "hello"); let t = format!("{quoted:#?}"); expect![[r#" - SUBTREE $$ 937550:0@0..0#2 937550:0@0..0#2 - IDENT hello 937550:0@0..0#2"#]] + SUBTREE $$ 937550:0@0..0#ROOT2024 937550:0@0..0#ROOT2024 + IDENT hello 937550:0@0..0#ROOT2024"#]] .assert_eq(&t); } @@ -324,6 +324,9 @@ mod tests { } }; - assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {name : self . name . clone () , id : self . id . clone () ,}}}"); + assert_eq!( + quoted.to_string(), + "impl Clone for Foo {fn clone (& self) -> Self {Self {name : self . name . clone () , id : self . id . clone () ,}}}" + ); } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs index 626a82ae08eab..c6ea4a3a33db8 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs @@ -1,28 +1,28 @@ //! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro use std::iter::Peekable; -use base_db::CrateId; +use base_db::Crate; use cfg::{CfgAtom, CfgExpr}; -use intern::{sym, Symbol}; +use intern::{Symbol, sym}; use rustc_hash::FxHashSet; use syntax::{ - ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList}, AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T, + ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList}, }; use tracing::{debug, warn}; -use crate::{db::ExpandDatabase, proc_macro::ProcMacroKind, MacroCallLoc, MacroDefKind}; +use crate::{MacroCallLoc, MacroDefKind, db::ExpandDatabase, proc_macro::ProcMacroKind}; -fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Option { +fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option { if !attr.simple_name().as_deref().map(|v| v == "cfg")? { return None; } let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?; - let enabled = db.crate_graph()[krate].cfg_options.check(&cfg) != Some(false); + let enabled = krate.cfg_options(db).check(&cfg) != Some(false); Some(enabled) } -fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Option { +fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option { if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? { return None; } @@ -32,17 +32,17 @@ fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Optio pub fn check_cfg_attr_value( db: &dyn ExpandDatabase, attr: &TokenTree, - krate: CrateId, + krate: Crate, ) -> Option { let cfg_expr = parse_from_attr_token_tree(attr)?; - let enabled = db.crate_graph()[krate].cfg_options.check(&cfg_expr) != Some(false); + let enabled = krate.cfg_options(db).check(&cfg_expr) != Some(false); Some(enabled) } fn process_has_attrs_with_possible_comma( db: &dyn ExpandDatabase, items: impl Iterator, - krate: CrateId, + krate: Crate, remove: &mut FxHashSet, ) -> Option<()> { for item in items { @@ -144,7 +144,7 @@ fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet fn process_enum( db: &dyn ExpandDatabase, variants: VariantList, - krate: CrateId, + krate: Crate, remove: &mut FxHashSet, ) -> Option<()> { 'variant: for variant in variants.variants() { @@ -344,8 +344,8 @@ where #[cfg(test)] mod tests { use cfg::DnfExpr; - use expect_test::{expect, Expect}; - use syntax::{ast::Attr, AstNode, SourceFile}; + use expect_test::{Expect, expect}; + use syntax::{AstNode, SourceFile, ast::Attr}; use crate::cfg_process::parse_from_attr_token_tree; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs index 1fdf251ba5210..3959741e6f13e 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs @@ -1,29 +1,26 @@ //! Defines a unit of change that can applied to the database to get the next //! state. Changes are transactional. -use base_db::{ - ra_salsa::Durability, CrateGraph, CrateId, CrateWorkspaceData, FileChange, SourceRoot, - SourceRootDatabase, -}; -use rustc_hash::FxHashMap; +use base_db::{CrateGraphBuilder, FileChange, SourceRoot, salsa::Durability}; use span::FileId; use triomphe::Arc; -use crate::{db::ExpandDatabase, proc_macro::ProcMacros}; +use crate::{db::ExpandDatabase, proc_macro::ProcMacrosBuilder}; #[derive(Debug, Default)] pub struct ChangeWithProcMacros { pub source_change: FileChange, - pub proc_macros: Option, + pub proc_macros: Option, } impl ChangeWithProcMacros { - pub fn new() -> Self { - Self::default() - } - - pub fn apply(self, db: &mut (impl ExpandDatabase + SourceRootDatabase)) { - self.source_change.apply(db); + pub fn apply(self, db: &mut impl ExpandDatabase) { + let crates_id_map = self.source_change.apply(db); if let Some(proc_macros) = self.proc_macros { + let proc_macros = proc_macros.build( + crates_id_map + .as_ref() + .expect("cannot set proc macros without setting the crate graph too"), + ); db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); } } @@ -32,16 +29,11 @@ impl ChangeWithProcMacros { self.source_change.change_file(file_id, new_text) } - pub fn set_crate_graph( - &mut self, - graph: CrateGraph, - ws_data: FxHashMap>, - ) { + pub fn set_crate_graph(&mut self, graph: CrateGraphBuilder) { self.source_change.set_crate_graph(graph); - self.source_change.set_ws_data(ws_data); } - pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) { + pub fn set_proc_macros(&mut self, proc_macros: ProcMacrosBuilder) { self.proc_macros = Some(proc_macros); } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs index 8ca8bf1ba4a6e..7cb1b6c02075b 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs @@ -1,30 +1,27 @@ //! Defines database & queries for macro expansion. -use base_db::{ra_salsa, CrateId, SourceDatabase}; +use base_db::{Crate, RootQueryDb}; use either::Either; use mbe::MatchedArmIndex; use rustc_hash::FxHashSet; -use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId}; -use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T}; -use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode}; +use span::{AstIdMap, Edition, Span, SyntaxContext}; +use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast}; +use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree}; use triomphe::Arc; use crate::{ - attrs::{collect_attrs, AttrId}, + AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, + EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId, + MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, + attrs::{AttrId, collect_attrs}, builtin::pseudo_derive_attr_expansion, cfg_process, declarative::DeclarativeMacroExpander, fixup::{self, SyntaxFixupUndoInfo}, - hygiene::{ - span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt, - SyntaxContextExt as _, - }, - proc_macro::ProcMacros, - span_map::{RealSpanMap, SpanMap, SpanMapRef}, - tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, - CustomProcMacroExpander, EagerCallInfo, EagerExpander, ExpandError, ExpandResult, ExpandTo, - ExpansionSpanMap, HirFileId, HirFileIdRepr, Lookup, MacroCallId, MacroCallKind, MacroCallLoc, - MacroDefId, MacroDefKind, MacroFileId, + hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt}, + proc_macro::{CrateProcMacros, CustomProcMacroExpander, ProcMacros}, + span_map::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef}, + tt, }; /// This is just to ensure the types of smart_macro_arg and macro_arg are the same type MacroArgResult = (Arc, SyntaxFixupUndoInfo, Span); @@ -52,32 +49,37 @@ pub enum TokenExpander { ProcMacro(CustomProcMacroExpander), } -#[ra_salsa::query_group(ExpandDatabaseStorage)] -pub trait ExpandDatabase: SourceDatabase { - /// The proc macros. - #[ra_salsa::input] +#[query_group::query_group] +pub trait ExpandDatabase: RootQueryDb { + /// The proc macros. Do not use this! Use `proc_macros_for_crate()` instead. + #[salsa::input] fn proc_macros(&self) -> Arc; + /// Incrementality query to prevent queries from directly depending on `ExpandDatabase::proc_macros`. + #[salsa::invoke(crate::proc_macro::proc_macros_for_crate)] + fn proc_macros_for_crate(&self, krate: Crate) -> Option>; + + #[salsa::invoke(ast_id_map)] fn ast_id_map(&self, file_id: HirFileId) -> Arc; - /// Main public API -- parses a hir file, not caring whether it's a real - /// file or a macro expansion. - #[ra_salsa::transparent] + #[salsa::transparent] fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode; + /// Implementation for the macro case. - #[ra_salsa::lru] + #[salsa::lru(512)] fn parse_macro_expansion( &self, - macro_file: MacroFileId, + macro_file: MacroCallId, ) -> ExpandResult<(Parse, Arc)>; - #[ra_salsa::transparent] - #[ra_salsa::invoke(SpanMap::new)] + + #[salsa::transparent] + #[salsa::invoke(SpanMap::new)] fn span_map(&self, file_id: HirFileId) -> SpanMap; - #[ra_salsa::transparent] - #[ra_salsa::invoke(crate::span_map::expansion_span_map)] - fn expansion_span_map(&self, file_id: MacroFileId) -> Arc; - #[ra_salsa::invoke(crate::span_map::real_span_map)] + #[salsa::transparent] + #[salsa::invoke(crate::span_map::expansion_span_map)] + fn expansion_span_map(&self, file_id: MacroCallId) -> Arc; + #[salsa::invoke(crate::span_map::real_span_map)] fn real_span_map(&self, file_id: EditionedFileId) -> Arc; /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the @@ -85,66 +87,74 @@ pub trait ExpandDatabase: SourceDatabase { /// /// We encode macro definitions into ids of macro calls, this what allows us /// to be incremental. - #[ra_salsa::interned] + #[salsa::transparent] fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId; - #[ra_salsa::interned] - fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId; - - #[ra_salsa::transparent] - fn setup_syntax_context_root(&self) -> (); - #[ra_salsa::transparent] - #[ra_salsa::invoke(crate::hygiene::dump_syntax_contexts)] - fn dump_syntax_contexts(&self) -> String; + #[salsa::transparent] + fn lookup_intern_macro_call(&self, macro_call: MacroCallId) -> MacroCallLoc; /// Lowers syntactic macro call to a token tree representation. That's a firewall /// query, only typing in the macro call itself changes the returned /// subtree. #[deprecated = "calling this is incorrect, call `macro_arg_considering_derives` instead"] + #[salsa::invoke(macro_arg)] fn macro_arg(&self, id: MacroCallId) -> MacroArgResult; - #[ra_salsa::transparent] + + #[salsa::transparent] fn macro_arg_considering_derives( &self, id: MacroCallId, kind: &MacroCallKind, ) -> MacroArgResult; + /// Fetches the expander for this macro. - #[ra_salsa::transparent] - #[ra_salsa::invoke(TokenExpander::macro_expander)] + #[salsa::transparent] + #[salsa::invoke(TokenExpander::macro_expander)] fn macro_expander(&self, id: MacroDefId) -> TokenExpander; + /// Fetches (and compiles) the expander of this decl macro. - #[ra_salsa::invoke(DeclarativeMacroExpander::expander)] + #[salsa::invoke(DeclarativeMacroExpander::expander)] fn decl_macro_expander( &self, - def_crate: CrateId, + def_crate: Crate, id: AstId, ) -> Arc; + /// Special case of the previous query for procedural macros. We can't LRU /// proc macros, since they are not deterministic in general, and /// non-determinism breaks salsa in a very, very, very bad way. /// @edwin0cheng heroically debugged this once! See #4315 for details + #[salsa::invoke(expand_proc_macro)] fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult>; /// Retrieves the span to be used for a proc-macro expansions spans. /// This is a firewall query as it requires parsing the file, which we don't want proc-macros to /// directly depend on as that would cause to frequent invalidations, mainly because of the /// parse queries being LRU cached. If they weren't the invalidations would only happen if the /// user wrote in the file that defines the proc-macro. + #[salsa::invoke_interned(proc_macro_span)] fn proc_macro_span(&self, fun: AstId) -> Span; + /// Firewall query that returns the errors from the `parse_macro_expansion` query. + #[salsa::invoke(parse_macro_expansion_error)] fn parse_macro_expansion_error( &self, macro_call: MacroCallId, ) -> Option>>>; - #[ra_salsa::transparent] - fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContextId; + + #[salsa::transparent] + fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext; +} + +#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext)] +pub struct SyntaxContextWrapper { + pub data: SyntaxContext, } -fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContextId { - match file.repr() { - HirFileIdRepr::FileId(_) => SyntaxContextId::root(edition), - HirFileIdRepr::MacroFile(m) => { - db.macro_arg_considering_derives(m.macro_call_id, &m.macro_call_id.lookup(db).kind) - .2 - .ctx +fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContext { + match file { + HirFileId::FileId(_) => SyntaxContext::root(edition), + HirFileId::MacroFile(m) => { + let kind = db.lookup_intern_macro_call(m).kind; + db.macro_arg_considering_derives(m, &kind).2.ctx } } } @@ -272,9 +282,9 @@ pub fn expand_speculative( loc.krate, &tt, attr_arg.as_ref(), - span_with_def_site_ctxt(db, span, actual_macro_call, loc.def.edition), - span_with_call_site_ctxt(db, span, actual_macro_call, loc.def.edition), - span_with_mixed_site_ctxt(db, span, actual_macro_call, loc.def.edition), + span_with_def_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition), + span_with_call_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition), + span_with_mixed_site_ctxt(db, span, actual_macro_call.into(), loc.def.edition), ) } MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => { @@ -318,14 +328,16 @@ pub fn expand_speculative( Some((node.syntax_node(), token)) } -fn ast_id_map(db: &dyn ExpandDatabase, file_id: span::HirFileId) -> triomphe::Arc { +fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> triomphe::Arc { triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id))) } +/// Main public API -- parses a hir file, not caring whether it's a real +/// file or a macro expansion. fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { - match file_id.repr() { - HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(), - HirFileIdRepr::MacroFile(macro_file) => { + match file_id { + HirFileId::FileId(file_id) => db.parse(file_id).syntax_node(), + HirFileId::MacroFile(macro_file) => { db.parse_macro_expansion(macro_file).value.0.syntax_node() } } @@ -335,14 +347,13 @@ fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { // instead of having it be untyped fn parse_macro_expansion( db: &dyn ExpandDatabase, - macro_file: MacroFileId, + macro_file: MacroCallId, ) -> ExpandResult<(Parse, Arc)> { let _p = tracing::info_span!("parse_macro_expansion").entered(); - let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let loc = db.lookup_intern_macro_call(macro_file); let def_edition = loc.def.edition; let expand_to = loc.expand_to(); - let mbe::ValueResult { value: (tt, matched_arm), err } = - macro_expand(db, macro_file.macro_call_id, loc); + let mbe::ValueResult { value: (tt, matched_arm), err } = macro_expand(db, macro_file, loc); let (parse, mut rev_token_map) = token_tree_to_syntax_node( db, @@ -363,23 +374,19 @@ fn parse_macro_expansion_error( macro_call_id: MacroCallId, ) -> Option>>> { let e: ExpandResult> = - db.parse_macro_expansion(MacroFileId { macro_call_id }).map(|it| Arc::from(it.0.errors())); - if e.value.is_empty() && e.err.is_none() { - None - } else { - Some(Arc::new(e)) - } + db.parse_macro_expansion(macro_call_id).map(|it| Arc::from(it.0.errors())); + if e.value.is_empty() && e.err.is_none() { None } else { Some(Arc::new(e)) } } pub(crate) fn parse_with_map( db: &dyn ExpandDatabase, file_id: HirFileId, ) -> (Parse, SpanMap) { - match file_id.repr() { - HirFileIdRepr::FileId(file_id) => { + match file_id { + HirFileId::FileId(file_id) => { (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id))) } - HirFileIdRepr::MacroFile(macro_file) => { + HirFileId::MacroFile(macro_file) => { let (parse, map) = db.parse_macro_expansion(macro_file).value; (parse, SpanMap::ExpansionSpanMap(map)) } @@ -597,7 +604,7 @@ fn macro_expand( let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind { MacroDefKind::ProcMacro(..) => { - return db.expand_proc_macro(macro_call_id).map(CowArc::Arc).zip_val(None) + return db.expand_proc_macro(macro_call_id).map(CowArc::Arc).zip_val(None); } _ => { let (macro_arg, undo_info, span) = @@ -699,9 +706,9 @@ fn expand_proc_macro( loc.krate, ¯o_arg, attr_arg, - span_with_def_site_ctxt(db, span, id, loc.def.edition), - span_with_call_site_ctxt(db, span, id, loc.def.edition), - span_with_mixed_site_ctxt(db, span, id, loc.def.edition), + span_with_def_site_ctxt(db, span, id.into(), loc.def.edition), + span_with_call_site_ctxt(db, span, id.into(), loc.def.edition), + span_with_mixed_site_ctxt(db, span, id.into(), loc.def.edition), ) }; @@ -728,12 +735,7 @@ pub(crate) fn token_tree_to_syntax_node( ExpandTo::Type => syntax_bridge::TopEntryPoint::Type, ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr, }; - syntax_bridge::token_tree_to_syntax_node( - tt, - entry_point, - &mut |ctx| ctx.lookup(db).edition, - edition, - ) + syntax_bridge::token_tree_to_syntax_node(tt, entry_point, &mut |ctx| ctx.edition(db), edition) } fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> { @@ -747,16 +749,17 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> { err: Some(ExpandError::other( tt.delimiter.open, format!( - "macro invocation exceeds token limit: produced {} tokens, limit is {}", - count, TOKEN_LIMIT, + "macro invocation exceeds token limit: produced {count} tokens, limit is {TOKEN_LIMIT}", ), )), }) } } -fn setup_syntax_context_root(db: &dyn ExpandDatabase) { - for edition in Edition::iter() { - db.intern_syntax_context(SyntaxContextData::root(edition)); - } +fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId { + MacroCallId::new(db, macro_call) +} + +fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc { + macro_call.loc(db) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs index fef77acb7bbb7..0d100c1364ab1 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs @@ -1,18 +1,19 @@ //! Compiled declarative macro expanders (`macro_rules!` and `macro`) -use base_db::CrateId; +use base_db::Crate; use intern::sym; -use span::{Edition, HirFileIdRepr, MacroCallId, Span, SyntaxContextId}; +use span::{Edition, Span, SyntaxContext}; use stdx::TupleExt; -use syntax::{ast, AstNode}; +use syntax::{AstNode, ast}; use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ + AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId, attrs::RawAttrs, db::ExpandDatabase, - hygiene::{apply_mark, Transparency}, - tt, AstId, ExpandError, ExpandErrorKind, ExpandResult, Lookup, + hygiene::{Transparency, apply_mark}, + tt, }; /// Old-style `macro_rules` or the new macros 2.0 @@ -41,7 +42,10 @@ impl DeclarativeMacroExpander { .mac .expand( &tt, - |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency, self.edition), + |s| { + s.ctx = + apply_mark(db, s.ctx, call_id.into(), self.transparency, self.edition) + }, span, loc.def.edition, ) @@ -70,7 +74,7 @@ impl DeclarativeMacroExpander { pub(crate) fn expander( db: &dyn ExpandDatabase, - def_crate: CrateId, + def_crate: Crate, id: AstId, ) -> Arc { let (root, map) = crate::db::parse_with_map(db, id.file_id); @@ -78,13 +82,13 @@ impl DeclarativeMacroExpander { let transparency = |node| { // ... would be nice to have the item tree here - let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate); + let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db)); match attrs .iter() .find(|it| { it.path .as_ident() - .map(|it| *it == sym::rustc_macro_transparency.clone()) + .map(|it| *it == sym::rustc_macro_transparency) .unwrap_or(false) })? .token_tree_value()? @@ -100,14 +104,14 @@ impl DeclarativeMacroExpander { _ => None, } }; - let ctx_edition = |ctx: SyntaxContextId| { - let crate_graph = db.crate_graph(); + let ctx_edition = |ctx: SyntaxContext| { if ctx.is_root() { - crate_graph[def_crate].edition + def_crate.data(db).edition } else { - let data = db.lookup_intern_syntax_context(ctx); // UNWRAP-SAFETY: Only the root context has no outer expansion - crate_graph[data.outer_expn.unwrap().lookup(db).def.krate].edition + let krate = + db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap().into()).def.krate; + krate.data(db).edition } }; let (mac, transparency) = match id.to_ptr(db).to_node(&root) { @@ -160,9 +164,9 @@ impl DeclarativeMacroExpander { transparency(¯o_def).unwrap_or(Transparency::Opaque), ), }; - let edition = ctx_edition(match id.file_id.repr() { - HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id.lookup(db).ctxt, - HirFileIdRepr::FileId(file) => SyntaxContextId::root(file.edition()), + let edition = ctx_edition(match id.file_id { + HirFileId::MacroFile(macro_file) => macro_file.lookup(db).ctxt, + HirFileId::FileId(file) => SyntaxContext::root(file.edition(db)), }); Arc::new(DeclarativeMacroExpander { mac, transparency, edition }) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs index f476d1b564c4c..28d3fcdab9dba 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs @@ -18,28 +18,34 @@ //! //! //! See the full discussion : -use base_db::CrateId; -use span::SyntaxContextId; -use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; +use base_db::Crate; +use span::SyntaxContext; +use syntax::{AstPtr, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent, ted}; use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ + AstId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, + MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, ast::{self, AstNode}, db::ExpandDatabase, mod_path::ModPath, - AstId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern, - MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; +pub type EagerCallBackFn<'a> = &'a mut dyn FnMut( + InFile<(syntax::AstPtr, span::FileAstId)>, + MacroCallId, +); + pub fn expand_eager_macro_input( db: &dyn ExpandDatabase, - krate: CrateId, + krate: Crate, macro_call: &ast::MacroCall, ast_id: AstId, def: MacroDefId, - call_site: SyntaxContextId, + call_site: SyntaxContext, resolver: &dyn Fn(&ModPath) -> Option, + eager_callback: EagerCallBackFn<'_>, ) -> ExpandResult> { let expand_to = ExpandTo::from_call_site(macro_call); @@ -47,17 +53,17 @@ pub fn expand_eager_macro_input( // When `lazy_expand` is called, its *parent* file must already exist. // Here we store an eager macro id for the argument expanded subtree // for that purpose. - let arg_id = MacroCallLoc { + let loc = MacroCallLoc { def, krate, kind: MacroCallKind::FnLike { ast_id, expand_to: ExpandTo::Expr, eager: None }, ctxt: call_site, - } - .intern(db); + }; + let arg_id = db.intern_macro_call(loc); #[allow(deprecated)] // builtin eager macros are never derives let (_, _, span) = db.macro_arg(arg_id); let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = - db.parse_macro_expansion(arg_id.as_macro_file()); + db.parse_macro_expansion(arg_id); let mut arg_map = ExpansionSpanMap::empty(); @@ -67,10 +73,11 @@ pub fn expand_eager_macro_input( &arg_exp_map, &mut arg_map, TextSize::new(0), - InFile::new(arg_id.as_file(), arg_exp.syntax_node()), + InFile::new(arg_id.into(), arg_exp.syntax_node()), krate, call_site, resolver, + eager_callback, ) }; let err = parse_err.or(err); @@ -107,7 +114,7 @@ pub fn expand_eager_macro_input( ctxt: call_site, }; - ExpandResult { value: Some(loc.intern(db)), err } + ExpandResult { value: Some(db.intern_macro_call(loc)), err } } fn lazy_expand( @@ -115,8 +122,9 @@ fn lazy_expand( def: &MacroDefId, macro_call: &ast::MacroCall, ast_id: AstId, - krate: CrateId, - call_site: SyntaxContextId, + krate: Crate, + call_site: SyntaxContext, + eager_callback: EagerCallBackFn<'_>, ) -> ExpandResult<(InFile>, Arc)> { let expand_to = ExpandTo::from_call_site(macro_call); let id = def.make_call( @@ -125,10 +133,9 @@ fn lazy_expand( MacroCallKind::FnLike { ast_id, expand_to, eager: None }, call_site, ); - let macro_file = id.as_macro_file(); + eager_callback(ast_id.map(|ast_id| (AstPtr::new(macro_call), ast_id)), id); - db.parse_macro_expansion(macro_file) - .map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1)) + db.parse_macro_expansion(id).map(|parse| (InFile::new(id.into(), parse.0), parse.1)) } fn eager_macro_recur( @@ -137,9 +144,10 @@ fn eager_macro_recur( expanded_map: &mut ExpansionSpanMap, mut offset: TextSize, curr: InFile, - krate: CrateId, - call_site: SyntaxContextId, + krate: Crate, + call_site: SyntaxContext, macro_resolver: &dyn Fn(&ModPath) -> Option, + eager_callback: EagerCallBackFn<'_>, ) -> ExpandResult> { let original = curr.value.clone_for_update(); @@ -176,7 +184,7 @@ fn eager_macro_recur( Some(path) => match macro_resolver(&path) { Some(def) => def, None => { - let edition = db.crate_graph()[krate].edition; + let edition = krate.data(db).edition; error = Some(ExpandError::other( span_map.span_at(call.syntax().text_range().start()), format!("unresolved macro {}", path.display(db, edition)), @@ -205,11 +213,16 @@ fn eager_macro_recur( def, call_site, macro_resolver, + eager_callback, ); match value { Some(call_id) => { + eager_callback( + curr.with_value(ast_id).map(|ast_id| (AstPtr::new(&call), ast_id)), + call_id, + ); let ExpandResult { value: (parse, map), err: err2 } = - db.parse_macro_expansion(call_id.as_macro_file()); + db.parse_macro_expansion(call_id); map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span)); @@ -230,8 +243,15 @@ fn eager_macro_recur( | MacroDefKind::BuiltInAttr(..) | MacroDefKind::BuiltInDerive(..) | MacroDefKind::ProcMacro(..) => { - let ExpandResult { value: (parse, tm), err } = - lazy_expand(db, &def, &call, curr.with_value(ast_id), krate, call_site); + let ExpandResult { value: (parse, tm), err } = lazy_expand( + db, + &def, + &call, + curr.with_value(ast_id), + krate, + call_site, + eager_callback, + ); // replace macro inside let ExpandResult { value, err: error } = eager_macro_recur( @@ -244,6 +264,7 @@ fn eager_macro_recur( krate, call_site, macro_resolver, + eager_callback, ); let err = err.or(error); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs index f3bcc77268224..321ee8feb9a8c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs @@ -2,15 +2,13 @@ use std::borrow::Borrow; use either::Either; -use span::{ - AstIdNode, EditionedFileId, ErasedFileAstId, FileAstId, HirFileId, HirFileIdRepr, MacroFileId, - SyntaxContextId, -}; +use span::{AstIdNode, ErasedFileAstId, FileAstId, FileId, SyntaxContext}; use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize}; use crate::{ + EditionedFileId, HirFileId, MacroCallId, MacroKind, db::{self, ExpandDatabase}, - map_node_range_up, map_node_range_up_rooted, span_for_offset, MacroFileIdExt, MacroKind, + map_node_range_up, map_node_range_up_rooted, span_for_offset, }; /// `InFile` stores a value of `T` inside a particular file/syntax tree. @@ -26,7 +24,7 @@ pub struct InFileWrapper { pub value: T, } pub type InFile = InFileWrapper; -pub type InMacroFile = InFileWrapper; +pub type InMacroFile = InFileWrapper; pub type InRealFile = InFileWrapper; #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] @@ -35,12 +33,13 @@ pub struct FilePositionWrapper { pub offset: TextSize, } pub type HirFilePosition = FilePositionWrapper; -pub type MacroFilePosition = FilePositionWrapper; +pub type MacroFilePosition = FilePositionWrapper; pub type FilePosition = FilePositionWrapper; -impl From> for FilePositionWrapper { - fn from(value: FilePositionWrapper) -> Self { - FilePositionWrapper { file_id: value.file_id.into(), offset: value.offset } +impl FilePosition { + #[inline] + pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FilePositionWrapper { + FilePositionWrapper { file_id: self.file_id.file_id(db), offset: self.offset } } } #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] @@ -49,12 +48,13 @@ pub struct FileRangeWrapper { pub range: TextRange, } pub type HirFileRange = FileRangeWrapper; -pub type MacroFileRange = FileRangeWrapper; +pub type MacroFileRange = FileRangeWrapper; pub type FileRange = FileRangeWrapper; -impl From> for FileRangeWrapper { - fn from(value: FileRangeWrapper) -> Self { - FileRangeWrapper { file_id: value.file_id.into(), range: value.range } +impl FileRange { + #[inline] + pub fn into_file_id(self, db: &dyn ExpandDatabase) -> FileRangeWrapper { + FileRangeWrapper { file_id: self.file_id.file_id(db), range: self.range } } } @@ -76,6 +76,9 @@ impl AstId { pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> AstPtr { db.ast_id_map(self.file_id).get(self.value) } + pub fn erase(&self) -> ErasedAstId { + crate::InFile::new(self.file_id, self.value.erase()) + } } pub type ErasedAstId = crate::InFile; @@ -162,7 +165,7 @@ impl FileIdToSyntax for EditionedFileId { db.parse(self).syntax_node() } } -impl FileIdToSyntax for MacroFileId { +impl FileIdToSyntax for MacroCallId { fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode { db.parse_macro_expansion(self).value.0.syntax_node() } @@ -215,7 +218,7 @@ impl> InFile { let succ = move |node: &InFile| match node.value.parent() { Some(parent) => Some(node.with_value(parent)), None => db - .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) + .lookup_intern_macro_call(node.file_id.macro_file()?) .to_node_item(db) .syntax() .cloned() @@ -232,7 +235,7 @@ impl> InFile { let succ = move |node: &InFile| match node.value.parent() { Some(parent) => Some(node.with_value(parent)), None => db - .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) + .lookup_intern_macro_call(node.file_id.macro_file()?) .to_node_item(db) .syntax() .cloned() @@ -272,11 +275,11 @@ impl> InFile { ) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, // as we don't have node inputs otherwise and therefore can't find an `N` node in the input - let file_id = match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - return Some(InRealFile { file_id, value: self.value.borrow().clone() }) + let file_id = match self.file_id { + HirFileId::FileId(file_id) => { + return Some(InRealFile { file_id, value: self.value.borrow().clone() }); } - HirFileIdRepr::MacroFile(m) + HirFileId::MacroFile(m) if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) => { m @@ -284,7 +287,7 @@ impl> InFile { _ => return None, }; - let FileRange { file_id, range } = map_node_range_up_rooted( + let FileRange { file_id: editioned_file_id, range } = map_node_range_up_rooted( db, &db.expansion_span_map(file_id), self.value.borrow().text_range(), @@ -292,13 +295,13 @@ impl> InFile { let kind = self.kind(); let value = db - .parse(file_id) + .parse(editioned_file_id) .syntax_node() .covering_element(range) .ancestors() .take_while(|it| it.text_range() == range) .find(|it| it.kind() == kind)?; - Some(InRealFile::new(file_id, value)) + Some(InRealFile::new(editioned_file_id, value)) } } @@ -307,7 +310,7 @@ impl InFile<&SyntaxNode> { pub fn original_file_range_opt( self, db: &dyn db::ExpandDatabase, - ) -> Option<(FileRange, SyntaxContextId)> { + ) -> Option<(FileRange, SyntaxContext)> { self.borrow().map(SyntaxNode::text_range).original_node_file_range_opt(db) } } @@ -324,9 +327,9 @@ impl InMacroFile { impl InFile { /// Falls back to the macro call range if the node cannot be mapped up fully. pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, - HirFileIdRepr::MacroFile(mac_file) => { + match self.file_id { + HirFileId::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, + HirFileId::MacroFile(mac_file) => { let (range, ctxt) = span_for_offset( db, &db.expansion_span_map(mac_file), @@ -340,7 +343,7 @@ impl InFile { } // Fall back to whole macro call. - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + let loc = db.lookup_intern_macro_call(mac_file); loc.kind.original_call_range(db) } } @@ -348,11 +351,11 @@ impl InFile { /// Attempts to map the syntax node back up its macro calls. pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { + match self.file_id { + HirFileId::FileId(file_id) => { Some(FileRange { file_id, range: self.value.text_range() }) } - HirFileIdRepr::MacroFile(mac_file) => { + HirFileId::MacroFile(mac_file) => { let (range, ctxt) = span_for_offset( db, &db.expansion_span_map(mac_file), @@ -361,18 +364,14 @@ impl InFile { // FIXME: Figure out an API that makes proper use of ctx, this only exists to // keep pre-token map rewrite behaviour. - if ctxt.is_root() { - Some(range) - } else { - None - } + if ctxt.is_root() { Some(range) } else { None } } } } } impl InMacroFile { - pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) { + pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContext) { span_for_offset(db, &db.expansion_span_map(self.file_id), self.value) } } @@ -381,17 +380,17 @@ impl InFile { pub fn original_node_file_range( self, db: &dyn db::ExpandDatabase, - ) -> (FileRange, SyntaxContextId) { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - (FileRange { file_id, range: self.value }, SyntaxContextId::root(file_id.edition())) + ) -> (FileRange, SyntaxContext) { + match self.file_id { + HirFileId::FileId(file_id) => { + (FileRange { file_id, range: self.value }, SyntaxContext::root(file_id.edition(db))) } - HirFileIdRepr::MacroFile(mac_file) => { + HirFileId::MacroFile(mac_file) => { match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) { Some(it) => it, None => { - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); - (loc.kind.original_call_range(db), SyntaxContextId::root(loc.def.edition)) + let loc = db.lookup_intern_macro_call(mac_file); + (loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition)) } } } @@ -399,13 +398,13 @@ impl InFile { } pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, - HirFileIdRepr::MacroFile(mac_file) => { + match self.file_id { + HirFileId::FileId(file_id) => FileRange { file_id, range: self.value }, + HirFileId::MacroFile(mac_file) => { match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) { Some(it) => it, _ => { - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + let loc = db.lookup_intern_macro_call(mac_file); loc.kind.original_call_range(db) } } @@ -417,13 +416,13 @@ impl InFile { self, db: &dyn db::ExpandDatabase, ) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, - HirFileIdRepr::MacroFile(mac_file) => { + match self.file_id { + HirFileId::FileId(file_id) => FileRange { file_id, range: self.value }, + HirFileId::MacroFile(mac_file) => { match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) { Some(it) => it, _ => { - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + let loc = db.lookup_intern_macro_call(mac_file); loc.kind.original_call_range_with_body(db) } } @@ -434,13 +433,13 @@ impl InFile { pub fn original_node_file_range_opt( self, db: &dyn db::ExpandDatabase, - ) -> Option<(FileRange, SyntaxContextId)> { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => Some(( + ) -> Option<(FileRange, SyntaxContext)> { + match self.file_id { + HirFileId::FileId(file_id) => Some(( FileRange { file_id, range: self.value }, - SyntaxContextId::root(file_id.edition()), + SyntaxContext::root(file_id.edition(db)), )), - HirFileIdRepr::MacroFile(mac_file) => { + HirFileId::MacroFile(mac_file) => { map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) } } @@ -451,34 +450,34 @@ impl InFile { pub fn original_ast_node_rooted(self, db: &dyn db::ExpandDatabase) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, // as we don't have node inputs otherwise and therefore can't find an `N` node in the input - let file_id = match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - return Some(InRealFile { file_id, value: self.value }) + let file_id = match self.file_id { + HirFileId::FileId(file_id) => { + return Some(InRealFile { file_id, value: self.value }); } - HirFileIdRepr::MacroFile(m) => m, + HirFileId::MacroFile(m) => m, }; if !matches!(file_id.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) { return None; } - let FileRange { file_id, range } = map_node_range_up_rooted( + let FileRange { file_id: editioned_file_id, range } = map_node_range_up_rooted( db, &db.expansion_span_map(file_id), self.value.syntax().text_range(), )?; // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? - let anc = db.parse(file_id).syntax_node().covering_element(range); + let anc = db.parse(editioned_file_id).syntax_node().covering_element(range); let value = anc.ancestors().find_map(N::cast)?; - Some(InRealFile::new(file_id, value)) + Some(InRealFile::new(editioned_file_id, value)) } } impl InFile { pub fn into_real_file(self) -> Result, InFile> { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }), - HirFileIdRepr::MacroFile(_) => Err(self), + match self.file_id { + HirFileId::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }), + HirFileId::MacroFile(_) => Err(self), } } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs index 28894537d48f7..4a4a3e52aea43 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs @@ -4,13 +4,14 @@ use intern::sym; use rustc_hash::{FxHashMap, FxHashSet}; use span::{ - ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, FIXUP_ERASED_FILE_AST_ID_MARKER, - ROOT_ERASED_FILE_AST_ID, + ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, + SyntaxContext, }; use stdx::never; use syntax::{ + SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, ast::{self, AstNode, HasLoopBody}, - match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, + match_ast, }; use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; @@ -81,7 +82,7 @@ pub(crate) fn fixup_syntax( original.push(original_tree); let span = span_map.span_for_range(node_range); let replacement = Leaf::Ident(Ident { - sym: sym::__ra_fixup.clone(), + sym: sym::__ra_fixup, span: Span { range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END), anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor }, @@ -101,7 +102,7 @@ pub(crate) fn fixup_syntax( // incomplete field access: some_expr.| append.insert(node.clone().into(), vec![ Leaf::Ident(Ident { - sym: sym::__ra_fixup.clone(), + sym: sym::__ra_fixup, span: fake_span(node_range), is_raw: tt::IdentIsRaw::No }), @@ -140,7 +141,7 @@ pub(crate) fn fixup_syntax( }; append.insert(if_token.into(), vec![ Leaf::Ident(Ident { - sym: sym::__ra_fixup.clone(), + sym: sym::__ra_fixup, span: fake_span(node_range), is_raw: tt::IdentIsRaw::No }), @@ -170,7 +171,7 @@ pub(crate) fn fixup_syntax( }; append.insert(while_token.into(), vec![ Leaf::Ident(Ident { - sym: sym::__ra_fixup.clone(), + sym: sym::__ra_fixup, span: fake_span(node_range), is_raw: tt::IdentIsRaw::No }), @@ -216,7 +217,7 @@ pub(crate) fn fixup_syntax( }; append.insert(match_token.into(), vec![ Leaf::Ident(Ident { - sym: sym::__ra_fixup.clone(), + sym: sym::__ra_fixup, span: fake_span(node_range), is_raw: tt::IdentIsRaw::No }), @@ -245,9 +246,9 @@ pub(crate) fn fixup_syntax( }; let [pat, in_token, iter] = [ - sym::underscore.clone(), - sym::in_.clone(), - sym::__ra_fixup.clone(), + sym::underscore, + sym::in_, + sym::__ra_fixup, ].map(|sym| Leaf::Ident(Ident { sym, @@ -283,7 +284,7 @@ pub(crate) fn fixup_syntax( if it.name_ref().is_some() && it.expr().is_none() { append.insert(colon.into(), vec![ Leaf::Ident(Ident { - sym: sym::__ra_fixup.clone(), + sym: sym::__ra_fixup, span: fake_span(node_range), is_raw: tt::IdentIsRaw::No }) @@ -296,7 +297,7 @@ pub(crate) fn fixup_syntax( if it.segment().is_none() { append.insert(colon.into(), vec![ Leaf::Ident(Ident { - sym: sym::__ra_fixup.clone(), + sym: sym::__ra_fixup, span: fake_span(node_range), is_raw: tt::IdentIsRaw::No }) @@ -308,7 +309,7 @@ pub(crate) fn fixup_syntax( if it.body().is_none() { append.insert(node.into(), vec![ Leaf::Ident(Ident { - sym: sym::__ra_fixup.clone(), + sym: sym::__ra_fixup, span: fake_span(node_range), is_raw: tt::IdentIsRaw::No }) @@ -353,7 +354,7 @@ pub(crate) fn reverse_fixups(tt: &mut TopSubtree, undo_info: &SyntaxFixupUndoInf let span = |file_id| Span { range: TextRange::empty(TextSize::new(0)), anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, - ctx: SyntaxContextId::root(span::Edition::Edition2015), + ctx: SyntaxContext::root(span::Edition::Edition2015), }; delimiter.open = span(delimiter.open.anchor.file_id); delimiter.close = span(delimiter.close.anchor.file_id); @@ -465,7 +466,7 @@ fn reverse_fixups_(tt: &mut TopSubtree, undo_info: &[TopSubtree]) { #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use span::{Edition, EditionedFileId, FileId}; use syntax::TextRange; use syntax_bridge::DocCommentDesugarMode; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs index fe05af0ac9d31..28800c6fabdbd 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs @@ -22,11 +22,11 @@ // FIXME: Move this into the span crate? Not quite possible today as that depends on `MacroCallLoc` // which contains a bunch of unrelated things -use std::iter; +use std::convert::identity; -use span::{Edition, MacroCallId, Span, SyntaxContextData, SyntaxContextId}; +use span::{Edition, MacroCallId, Span, SyntaxContext}; -use crate::db::{ExpandDatabase, InternSyntaxContextQuery}; +use crate::db::ExpandDatabase; pub use span::Transparency; @@ -65,23 +65,23 @@ fn span_with_ctxt_from_mark( edition: Edition, ) -> Span { Span { - ctx: apply_mark(db, SyntaxContextId::root(edition), expn_id, transparency, edition), + ctx: apply_mark(db, SyntaxContext::root(edition), expn_id, transparency, edition), ..span } } pub(super) fn apply_mark( db: &dyn ExpandDatabase, - ctxt: SyntaxContextId, - call_id: MacroCallId, + ctxt: span::SyntaxContext, + call_id: span::MacroCallId, transparency: Transparency, edition: Edition, -) -> SyntaxContextId { +) -> SyntaxContext { if transparency == Transparency::Opaque { return apply_mark_internal(db, ctxt, call_id, transparency, edition); } - let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt; + let call_site_ctxt = db.lookup_intern_macro_call(call_id.into()).ctxt; let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { call_site_ctxt.normalize_to_macros_2_0(db) } else { @@ -109,167 +109,35 @@ pub(super) fn apply_mark( fn apply_mark_internal( db: &dyn ExpandDatabase, - ctxt: SyntaxContextId, + ctxt: SyntaxContext, call_id: MacroCallId, transparency: Transparency, edition: Edition, -) -> SyntaxContextId { - use base_db::ra_salsa; - +) -> SyntaxContext { let call_id = Some(call_id); - let syntax_context_data = db.lookup_intern_syntax_context(ctxt); - let mut opaque = syntax_context_data.opaque; - let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent; + let mut opaque = ctxt.opaque(db); + let mut opaque_and_semitransparent = ctxt.opaque_and_semitransparent(db); if transparency >= Transparency::Opaque { let parent = opaque; - opaque = ra_salsa::plumbing::get_query_table::(db).get_or_insert( - (parent, call_id, transparency, edition), - |new_opaque| SyntaxContextData { - outer_expn: call_id, - outer_transparency: transparency, - parent, - opaque: new_opaque, - opaque_and_semitransparent: new_opaque, - edition, - }, - ); + opaque = SyntaxContext::new(db, call_id, transparency, edition, parent, identity, identity); } if transparency >= Transparency::SemiTransparent { let parent = opaque_and_semitransparent; opaque_and_semitransparent = - ra_salsa::plumbing::get_query_table::(db).get_or_insert( - (parent, call_id, transparency, edition), - |new_opaque_and_semitransparent| SyntaxContextData { - outer_expn: call_id, - outer_transparency: transparency, - parent, - opaque, - opaque_and_semitransparent: new_opaque_and_semitransparent, - edition, - }, - ); + SyntaxContext::new(db, call_id, transparency, edition, parent, |_| opaque, identity); } let parent = ctxt; - db.intern_syntax_context(SyntaxContextData { - outer_expn: call_id, - outer_transparency: transparency, - parent, - opaque, - opaque_and_semitransparent, + SyntaxContext::new( + db, + call_id, + transparency, edition, - }) -} - -pub trait SyntaxContextExt { - fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self; - fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self; - fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self; - fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option, Transparency); - fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option, Transparency); - fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>; - fn is_opaque(self, db: &dyn ExpandDatabase) -> bool; -} - -impl SyntaxContextExt for SyntaxContextId { - fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self { - db.lookup_intern_syntax_context(self).opaque_and_semitransparent - } - fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self { - db.lookup_intern_syntax_context(self).opaque - } - fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self { - db.lookup_intern_syntax_context(self).parent - } - fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option, Transparency) { - let data = db.lookup_intern_syntax_context(self); - (data.outer_expn, data.outer_transparency) - } - fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option, Transparency) { - let data = db.lookup_intern_syntax_context(*self); - *self = data.parent; - (data.outer_expn, data.outer_transparency) - } - fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)> { - let mut marks = marks_rev(self, db).collect::>(); - marks.reverse(); - marks - } - fn is_opaque(self, db: &dyn ExpandDatabase) -> bool { - !self.is_root() && db.lookup_intern_syntax_context(self).outer_transparency.is_opaque() - } -} - -// FIXME: Make this a SyntaxContextExt method once we have RPIT -pub fn marks_rev( - ctxt: SyntaxContextId, - db: &dyn ExpandDatabase, -) -> impl Iterator + '_ { - iter::successors(Some(ctxt), move |&mark| Some(mark.parent_ctxt(db))) - .take_while(|&it| !it.is_root()) - .map(|ctx| { - let mark = ctx.outer_mark(db); - // We stop before taking the root expansion, as such we cannot encounter a `None` outer - // expansion, as only the ROOT has it. - (mark.0.unwrap(), mark.1) - }) -} - -pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String { - use crate::db::{InternMacroCallLookupQuery, InternSyntaxContextLookupQuery}; - use base_db::ra_salsa::debug::DebugQueryTable; - - let mut s = String::from("Expansions:"); - let mut entries = InternMacroCallLookupQuery.in_db(db).entries::>(); - entries.sort_by_key(|e| e.key); - for e in entries { - let id = e.key; - let expn_data = e.value.as_ref().unwrap(); - s.push_str(&format!( - "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, kind: {:?}", - id, - expn_data.kind.file_id(), - expn_data.ctxt, - expn_data.kind.descr(), - )); - } - - s.push_str("\n\nSyntaxContexts:\n"); - let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::>(); - entries.sort_by_key(|e| e.key); - for e in entries { - struct SyntaxContextDebug<'a>( - &'a dyn ExpandDatabase, - SyntaxContextId, - &'a SyntaxContextData, - ); - - impl std::fmt::Debug for SyntaxContextDebug<'_> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - fancy_debug(self.2, self.1, self.0, f) - } - } - - fn fancy_debug( - this: &SyntaxContextData, - self_id: SyntaxContextId, - db: &dyn ExpandDatabase, - f: &mut std::fmt::Formatter<'_>, - ) -> std::fmt::Result { - write!(f, "#{self_id} parent: #{}, outer_mark: (", this.parent)?; - match this.outer_expn { - Some(id) => { - write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)? - } - None => write!(f, "root")?, - } - write!(f, ", {:?})", this.outer_transparency) - } - - stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap())); - } - s + parent, + |_| opaque, + |_| opaque_and_semitransparent, + ) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs index 4c4174e2680f5..543ac0619dd3e 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/inert_attr_macro.rs @@ -562,7 +562,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ ), BuiltinAttribute { - // name: sym::rustc_diagnostic_item.clone(), + // name: sym::rustc_diagnostic_item, name: "rustc_diagnostic_item", // FIXME: This can be `true` once we always use `tcx.is_diagnostic_item`. // only_local: false, @@ -571,7 +571,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ // duplicates: ErrorFollowing, // gate: Gated( // Stability::Unstable, - // sym::rustc_attrs.clone(), + // sym::rustc_attrs, // "diagnostic items compiler internal support for linting", // cfg_fn!(rustc_attrs), // ), diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index c1d808cbf2c5a..d844d8f41eeff 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -27,28 +27,26 @@ mod prettify_macro_expansion_; use attrs::collect_attrs; use rustc_hash::FxHashMap; +use salsa::plumbing::{AsId, FromId}; use stdx::TupleExt; use triomphe::Arc; use core::fmt; use std::hash::Hash; -use base_db::{ra_salsa::InternValueTrivial, CrateId}; +use base_db::Crate; use either::Either; -use span::{ - Edition, EditionedFileId, ErasedFileAstId, FileAstId, HirFileIdRepr, Span, SpanAnchor, - SyntaxContextData, SyntaxContextId, -}; +use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext}; use syntax::{ - ast::{self, AstNode}, SyntaxNode, SyntaxToken, TextRange, TextSize, + ast::{self, AstNode}, }; use crate::{ attrs::AttrId, builtin::{ - include_input_to_file_id, BuiltinAttrExpander, BuiltinDeriveExpander, - BuiltinFnLikeExpander, EagerExpander, + BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander, + include_input_to_file_id, }, db::ExpandDatabase, mod_path::ModPath, @@ -62,12 +60,12 @@ pub use crate::{ prettify_macro_expansion_::prettify_macro_expansion, }; +pub use base_db::EditionedFileId; pub use mbe::{DeclarativeMacro, ValueResult}; -pub use span::{HirFileId, MacroCallId, MacroFileId}; pub mod tt { pub use span::Span; - pub use tt::{token_to_literal, DelimiterKind, IdentIsRaw, LitKind, Spacing}; + pub use tt::{DelimiterKind, IdentIsRaw, LitKind, Spacing, token_to_literal}; pub type Delimiter = ::tt::Delimiter; pub type DelimSpan = ::tt::DelimSpan; @@ -89,17 +87,17 @@ pub mod tt { macro_rules! impl_intern_lookup { ($db:ident, $id:ident, $loc:ident, $intern:ident, $lookup:ident) => { impl $crate::Intern for $loc { - type Database<'db> = dyn $db + 'db; + type Database = dyn $db; type ID = $id; - fn intern(self, db: &Self::Database<'_>) -> $id { + fn intern(self, db: &Self::Database) -> Self::ID { db.$intern(self) } } impl $crate::Lookup for $id { - type Database<'db> = dyn $db + 'db; + type Database = dyn $db; type Data = $loc; - fn lookup(&self, db: &Self::Database<'_>) -> $loc { + fn lookup(&self, db: &Self::Database) -> Self::Data { db.$lookup(*self) } } @@ -108,15 +106,15 @@ macro_rules! impl_intern_lookup { // ideally these would be defined in base-db, but the orphan rule doesn't let us pub trait Intern { - type Database<'db>: ?Sized; + type Database: ?Sized; type ID; - fn intern(self, db: &Self::Database<'_>) -> Self::ID; + fn intern(self, db: &Self::Database) -> Self::ID; } pub trait Lookup { - type Database<'db>: ?Sized; + type Database: ?Sized; type Data; - fn lookup(&self, db: &Self::Database<'_>) -> Self::Data; + fn lookup(&self, db: &Self::Database) -> Self::Data; } impl_intern_lookup!( @@ -127,14 +125,6 @@ impl_intern_lookup!( lookup_intern_macro_call ); -impl_intern_lookup!( - ExpandDatabase, - SyntaxContextId, - SyntaxContextData, - intern_syntax_context, - lookup_intern_syntax_context -); - pub type ExpandResult = ValueResult; #[derive(Debug, PartialEq, Eq, Clone, Hash)] @@ -165,7 +155,7 @@ impl ExpandError { pub enum ExpandErrorKind { /// Attribute macro expansion is disabled. ProcMacroAttrExpansionDisabled, - MissingProcMacroExpander(CrateId), + MissingProcMacroExpander(Crate), /// The macro for this call is disabled. MacroDisabled, /// The macro definition has errors. @@ -208,14 +198,16 @@ impl ExpandErrorKind { kind: RenderedExpandError::DISABLED, }, &ExpandErrorKind::MissingProcMacroExpander(def_crate) => { - match db.proc_macros().get_error_for_crate(def_crate) { + match db.proc_macros_for_crate(def_crate).as_ref().and_then(|it| it.get_error()) { Some((e, hard_err)) => RenderedExpandError { message: e.to_owned(), error: hard_err, kind: RenderedExpandError::GENERAL_KIND, }, None => RenderedExpandError { - message: format!("internal error: proc-macro map is missing error entry for crate {def_crate:?}"), + message: format!( + "internal error: proc-macro map is missing error entry for crate {def_crate:?}" + ), error: true, kind: RenderedExpandError::GENERAL_KIND, }, @@ -258,15 +250,14 @@ impl From for ExpandError { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MacroCallLoc { pub def: MacroDefId, - pub krate: CrateId, + pub krate: Crate, pub kind: MacroCallKind, - pub ctxt: SyntaxContextId, + pub ctxt: SyntaxContext, } -impl InternValueTrivial for MacroCallLoc {} #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct MacroDefId { - pub krate: CrateId, + pub krate: Crate, pub edition: Edition, pub kind: MacroDefKind, pub local_inner: bool, @@ -288,6 +279,17 @@ impl MacroDefKind { pub fn is_declarative(&self) -> bool { matches!(self, MacroDefKind::Declarative(..)) } + + pub fn erased_ast_id(&self) -> ErasedAstId { + match *self { + MacroDefKind::ProcMacro(id, ..) => id.erase(), + MacroDefKind::BuiltIn(id, _) + | MacroDefKind::BuiltInAttr(id, _) + | MacroDefKind::BuiltInDerive(id, _) + | MacroDefKind::BuiltInEager(id, _) + | MacroDefKind::Declarative(id, ..) => id.erase(), + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -338,51 +340,34 @@ pub enum MacroCallKind { }, } -pub trait HirFileIdExt { - fn edition(self, db: &dyn ExpandDatabase) -> Edition; - /// Returns the original file of this macro call hierarchy. - fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId; - - /// Returns the original file of this macro call hierarchy while going into the included file if - /// one of the calls comes from an `include!``. - fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> EditionedFileId; - - /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. - fn original_call_node(self, db: &dyn ExpandDatabase) -> Option>; - - fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option>; -} - -impl HirFileIdExt for HirFileId { - fn edition(self, db: &dyn ExpandDatabase) -> Edition { - match self.repr() { - HirFileIdRepr::FileId(file_id) => file_id.edition(), - HirFileIdRepr::MacroFile(m) => m.macro_call_id.lookup(db).def.edition, +impl HirFileId { + pub fn edition(self, db: &dyn ExpandDatabase) -> Edition { + match self { + HirFileId::FileId(file_id) => file_id.editioned_file_id(db).edition(), + HirFileId::MacroFile(m) => db.lookup_intern_macro_call(m).def.edition, } } - fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId { + pub fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId { let mut file_id = self; loop { - match file_id.repr() { - HirFileIdRepr::FileId(id) => break id, - HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { - file_id = macro_call_id.lookup(db).kind.file_id(); + match file_id { + HirFileId::FileId(id) => break id, + HirFileId::MacroFile(macro_call_id) => { + file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id() } } } } - fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId { + pub fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> EditionedFileId { loop { - match self.repr() { - HirFileIdRepr::FileId(id) => break id, - HirFileIdRepr::MacroFile(file) => { - let loc = db.lookup_intern_macro_call(file.macro_call_id); + match self { + HirFileId::FileId(id) => break id, + HirFileId::MacroFile(file) => { + let loc = db.lookup_intern_macro_call(file); if loc.def.is_include() { if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind { - if let Ok(it) = - include_input_to_file_id(db, file.macro_call_id, &eager.arg) - { + if let Ok(it) = include_input_to_file_id(db, file, &eager.arg) { break it; } } @@ -393,23 +378,26 @@ impl HirFileIdExt for HirFileId { } } - fn original_call_node(self, db: &dyn ExpandDatabase) -> Option> { - let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db); + pub fn original_call_node(self, db: &dyn ExpandDatabase) -> Option> { + let mut call = db.lookup_intern_macro_call(self.macro_file()?).to_node(db); loop { - match call.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - break Some(InRealFile { file_id, value: call.value }) + match call.file_id { + HirFileId::FileId(file_id) => { + break Some(InRealFile { file_id, value: call.value }); } - HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { + HirFileId::MacroFile(macro_call_id) => { call = db.lookup_intern_macro_call(macro_call_id).to_node(db); } } } } - fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option> { + pub fn as_builtin_derive_attr_node( + &self, + db: &dyn ExpandDatabase, + ) -> Option> { let macro_file = self.macro_file()?; - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let loc = db.lookup_intern_macro_call(macro_file); let attr = match loc.def.kind { MacroDefKind::BuiltInDerive(..) => loc.to_node(db), _ => return None, @@ -436,57 +424,34 @@ pub enum MacroKind { ProcMacro, } -pub trait MacroFileIdExt { - fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool; - fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool; - fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option; - fn expansion_level(self, db: &dyn ExpandDatabase) -> u32; - /// If this is a macro call, returns the syntax node of the call. - fn call_node(self, db: &dyn ExpandDatabase) -> InFile; - fn parent(self, db: &dyn ExpandDatabase) -> HirFileId; - - fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo; - - fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind; - - /// Return whether this file is an include macro - fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool; - - fn is_eager(&self, db: &dyn ExpandDatabase) -> bool; - - /// Return whether this file is the pseudo expansion of the derive attribute. - /// See [`crate::builtin_attr_macro::derive_attr_expand`]. - fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool; -} - -impl MacroFileIdExt for MacroFileId { - fn call_node(self, db: &dyn ExpandDatabase) -> InFile { - db.lookup_intern_macro_call(self.macro_call_id).to_node(db) +impl MacroCallId { + pub fn call_node(self, db: &dyn ExpandDatabase) -> InFile { + db.lookup_intern_macro_call(self).to_node(db) } - fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 { + pub fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 { let mut level = 0; let mut macro_file = self; loop { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let loc = db.lookup_intern_macro_call(macro_file); level += 1; - macro_file = match loc.kind.file_id().repr() { - HirFileIdRepr::FileId(_) => break level, - HirFileIdRepr::MacroFile(it) => it, + macro_file = match loc.kind.file_id() { + HirFileId::FileId(_) => break level, + HirFileId::MacroFile(it) => it, }; } } - fn parent(self, db: &dyn ExpandDatabase) -> HirFileId { - self.macro_call_id.lookup(db).kind.file_id() + pub fn parent(self, db: &dyn ExpandDatabase) -> HirFileId { + db.lookup_intern_macro_call(self).kind.file_id() } /// Return expansion information if it is a macro-expansion file - fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo { + pub fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo { ExpansionInfo::new(db, self) } - fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind { - match db.lookup_intern_macro_call(self.macro_call_id).def.kind { + pub fn kind(self, db: &dyn ExpandDatabase) -> MacroKind { + match db.lookup_intern_macro_call(self).def.kind { MacroDefKind::Declarative(..) => MacroKind::Declarative, MacroDefKind::BuiltIn(..) | MacroDefKind::BuiltInEager(..) => { MacroKind::DeclarativeBuiltIn @@ -499,33 +464,33 @@ impl MacroFileIdExt for MacroFileId { } } - fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool { - db.lookup_intern_macro_call(self.macro_call_id).def.is_include() + pub fn is_include_macro(self, db: &dyn ExpandDatabase) -> bool { + db.lookup_intern_macro_call(self).def.is_include() } - fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool { - db.lookup_intern_macro_call(self.macro_call_id).def.is_include_like() + pub fn is_include_like_macro(self, db: &dyn ExpandDatabase) -> bool { + db.lookup_intern_macro_call(self).def.is_include_like() } - fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool { - db.lookup_intern_macro_call(self.macro_call_id).def.is_env_or_option_env() + pub fn is_env_or_option_env(self, db: &dyn ExpandDatabase) -> bool { + db.lookup_intern_macro_call(self).def.is_env_or_option_env() } - fn is_eager(&self, db: &dyn ExpandDatabase) -> bool { - let loc = db.lookup_intern_macro_call(self.macro_call_id); + pub fn is_eager(self, db: &dyn ExpandDatabase) -> bool { + let loc = db.lookup_intern_macro_call(self); matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) } - fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option { - let loc = db.lookup_intern_macro_call(self.macro_call_id); + pub fn eager_arg(self, db: &dyn ExpandDatabase) -> Option { + let loc = db.lookup_intern_macro_call(self); match &loc.kind { MacroCallKind::FnLike { eager, .. } => eager.as_ref().map(|it| it.arg_id), _ => None, } } - fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool { - let loc = db.lookup_intern_macro_call(self.macro_call_id); + pub fn is_derive_attr_pseudo_expansion(self, db: &dyn ExpandDatabase) -> bool { + let loc = db.lookup_intern_macro_call(self); loc.def.is_attribute_derive() } } @@ -534,11 +499,11 @@ impl MacroDefId { pub fn make_call( self, db: &dyn ExpandDatabase, - krate: CrateId, + krate: Crate, kind: MacroCallKind, - ctxt: SyntaxContextId, + ctxt: SyntaxContext, ) -> MacroCallId { - MacroCallLoc { def: self, krate, kind, ctxt }.intern(db) + db.intern_macro_call(MacroCallLoc { def: self, krate, kind, ctxt }) } pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile { @@ -692,7 +657,7 @@ impl MacroCallLoc { } impl MacroCallKind { - fn descr(&self) -> &'static str { + pub fn descr(&self) -> &'static str { match self { MacroCallKind::FnLike { .. } => "macro call", MacroCallKind::Derive { .. } => "derive macro", @@ -723,11 +688,11 @@ impl MacroCallKind { pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange { let mut kind = self; let file_id = loop { - match kind.file_id().repr() { - HirFileIdRepr::MacroFile(file) => { - kind = db.lookup_intern_macro_call(file.macro_call_id).kind; + match kind.file_id() { + HirFileId::MacroFile(file) => { + kind = db.lookup_intern_macro_call(file).kind; } - HirFileIdRepr::FileId(file_id) => break file_id, + HirFileId::FileId(file_id) => break file_id, } }; @@ -748,11 +713,11 @@ impl MacroCallKind { pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange { let mut kind = self; let file_id = loop { - match kind.file_id().repr() { - HirFileIdRepr::MacroFile(file) => { - kind = db.lookup_intern_macro_call(file.macro_call_id).kind; + match kind.file_id() { + HirFileId::MacroFile(file) => { + kind = db.lookup_intern_macro_call(file).kind; } - HirFileIdRepr::FileId(file_id) => break file_id, + HirFileId::FileId(file_id) => break file_id, } }; @@ -840,7 +805,7 @@ impl ExpansionInfo { pub fn map_range_down_exact( &self, span: Span, - ) -> Option + '_>> { + ) -> Option + '_>> { let tokens = self.exp_map.ranges_with_span_exact(span).flat_map(move |(range, ctx)| { self.expanded.value.covering_element(range).into_token().zip(Some(ctx)) }); @@ -855,7 +820,7 @@ impl ExpansionInfo { pub fn map_range_down( &self, span: Span, - ) -> Option + '_>> { + ) -> Option + '_>> { let tokens = self.exp_map.ranges_with_span(span).flat_map(move |(range, ctx)| { self.expanded.value.covering_element(range).into_token().zip(Some(ctx)) }); @@ -868,7 +833,7 @@ impl ExpansionInfo { &self, db: &dyn ExpandDatabase, offset: TextSize, - ) -> (FileRange, SyntaxContextId) { + ) -> (FileRange, SyntaxContext) { debug_assert!(self.expanded.value.text_range().contains(offset)); span_for_offset(db, &self.exp_map, offset) } @@ -878,7 +843,7 @@ impl ExpansionInfo { &self, db: &dyn ExpandDatabase, range: TextRange, - ) -> Option<(FileRange, SyntaxContextId)> { + ) -> Option<(FileRange, SyntaxContext)> { debug_assert!(self.expanded.value.text_range().contains_range(range)); map_node_range_up(db, &self.exp_map, range) } @@ -893,7 +858,7 @@ impl ExpansionInfo { let span = self.exp_map.span_at(token.start()); match &self.arg_map { SpanMap::RealSpanMap(_) => { - let file_id = span.anchor.file_id.into(); + let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into(); let anchor_offset = db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start(); InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] } @@ -916,9 +881,9 @@ impl ExpansionInfo { } } - pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { + pub fn new(db: &dyn ExpandDatabase, macro_file: MacroCallId) -> ExpansionInfo { let _p = tracing::info_span!("ExpansionInfo::new").entered(); - let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let loc = db.lookup_intern_macro_call(macro_file); let arg_tt = loc.kind.arg(db); let arg_map = db.span_map(arg_tt.file_id); @@ -950,9 +915,10 @@ pub fn map_node_range_up_rooted( start = start.min(span.range.start()); end = end.max(span.range.end()); } + let file_id = EditionedFileId::from_span(db, anchor.file_id); let anchor_offset = - db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); - Some(FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset }) + db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start(); + Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }) } /// Maps up the text range out of the expansion hierarchy back into the original file its from. @@ -962,7 +928,7 @@ pub fn map_node_range_up( db: &dyn ExpandDatabase, exp_map: &ExpansionSpanMap, range: TextRange, -) -> Option<(FileRange, SyntaxContextId)> { +) -> Option<(FileRange, SyntaxContext)> { let mut spans = exp_map.spans_for_range(range); let Span { range, anchor, ctx } = spans.next()?; let mut start = range.start(); @@ -975,12 +941,10 @@ pub fn map_node_range_up( start = start.min(span.range.start()); end = end.max(span.range.end()); } + let file_id = EditionedFileId::from_span(db, anchor.file_id); let anchor_offset = - db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); - Some(( - FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset }, - ctx, - )) + db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start(); + Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx)) } /// Maps up the text range out of the expansion hierarchy back into the original file its from. @@ -989,7 +953,7 @@ pub fn map_node_range_up_aggregated( db: &dyn ExpandDatabase, exp_map: &ExpansionSpanMap, range: TextRange, -) -> FxHashMap<(SpanAnchor, SyntaxContextId), TextRange> { +) -> FxHashMap<(SpanAnchor, SyntaxContext), TextRange> { let mut map = FxHashMap::default(); for span in exp_map.spans_for_range(range) { let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range); @@ -999,8 +963,9 @@ pub fn map_node_range_up_aggregated( ); } for ((anchor, _), range) in &mut map { + let file_id = EditionedFileId::from_span(db, anchor.file_id); let anchor_offset = - db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); + db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start(); *range += anchor_offset; } map @@ -1011,14 +976,12 @@ pub fn span_for_offset( db: &dyn ExpandDatabase, exp_map: &ExpansionSpanMap, offset: TextSize, -) -> (FileRange, SyntaxContextId) { +) -> (FileRange, SyntaxContext) { let span = exp_map.span_at(offset); - let anchor_offset = db - .ast_id_map(span.anchor.file_id.into()) - .get_erased(span.anchor.ast_id) - .text_range() - .start(); - (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx) + let file_id = EditionedFileId::from_span(db, span.anchor.file_id); + let anchor_offset = + db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start(); + (FileRange { file_id, range: span.range + anchor_offset }, span.ctx) } /// In Rust, macros expand token trees to token trees. When we want to turn a @@ -1086,3 +1049,77 @@ impl ExpandTo { } intern::impl_internable!(ModPath, attrs::AttrInput); + +#[salsa_macros::interned(no_lifetime, debug)] +#[doc(alias = "MacroFileId")] +pub struct MacroCallId { + pub loc: MacroCallLoc, +} + +impl From for MacroCallId { + #[inline] + fn from(value: span::MacroCallId) -> Self { + MacroCallId::from_id(value.0) + } +} + +impl From for span::MacroCallId { + #[inline] + fn from(value: MacroCallId) -> span::MacroCallId { + span::MacroCallId(value.as_id()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] +pub enum HirFileId { + FileId(EditionedFileId), + MacroFile(MacroCallId), +} + +impl From for HirFileId { + #[inline] + fn from(file_id: EditionedFileId) -> Self { + HirFileId::FileId(file_id) + } +} + +impl From for HirFileId { + #[inline] + fn from(file_id: MacroCallId) -> Self { + HirFileId::MacroFile(file_id) + } +} + +impl HirFileId { + #[inline] + pub fn macro_file(self) -> Option { + match self { + HirFileId::FileId(_) => None, + HirFileId::MacroFile(it) => Some(it), + } + } + + #[inline] + pub fn is_macro(self) -> bool { + matches!(self, HirFileId::MacroFile(_)) + } + + #[inline] + pub fn file_id(self) -> Option { + match self { + HirFileId::FileId(it) => Some(it), + HirFileId::MacroFile(_) => None, + } + } +} + +impl PartialEq for HirFileId { + fn eq(&self, &other: &EditionedFileId) -> bool { + *self == HirFileId::from(other) + } +} +impl PartialEq for EditionedFileId { + fn eq(&self, &other: &HirFileId) -> bool { + other == HirFileId::from(*self) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs index 75b5861454056..9f1e3879e1eeb 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs @@ -7,15 +7,15 @@ use std::{ use crate::{ db::ExpandDatabase, - hygiene::{marks_rev, SyntaxContextExt, Transparency}, + hygiene::Transparency, name::{AsName, Name}, tt, }; -use base_db::CrateId; +use base_db::Crate; use intern::sym; use smallvec::SmallVec; -use span::{Edition, SyntaxContextId}; -use syntax::{ast, AstNode}; +use span::{Edition, SyntaxContext}; +use syntax::{AstNode, ast}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct ModPath { @@ -33,7 +33,7 @@ pub enum PathKind { Abs, // FIXME: Can we remove this somehow? /// `$crate` from macro expansion - DollarCrate(CrateId), + DollarCrate(Crate), } impl PathKind { @@ -44,7 +44,7 @@ impl ModPath { pub fn from_src( db: &dyn ExpandDatabase, path: ast::Path, - span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId, + span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext, ) -> Option { convert_path(db, path, span_for_range) } @@ -111,8 +111,7 @@ impl ModPath { #[allow(non_snake_case)] pub fn is_Self(&self) -> bool { - self.kind == PathKind::Plain - && matches!(&*self.segments, [name] if *name == sym::Self_.clone()) + self.kind == PathKind::Plain && matches!(&*self.segments, [name] if *name == sym::Self_) } /// If this path is a single identifier, like `foo`, return its name. @@ -209,7 +208,7 @@ fn display_fmt_path( fn convert_path( db: &dyn ExpandDatabase, path: ast::Path, - span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContextId, + span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext, ) -> Option { let mut segments = path.segments(); @@ -251,7 +250,7 @@ fn convert_path( } } ast::PathSegmentKind::SelfTypeKw => { - ModPath::from_segments(PathKind::Plain, Some(Name::new_symbol_root(sym::Self_.clone()))) + ModPath::from_segments(PathKind::Plain, Some(Name::new_symbol_root(sym::Self_))) } ast::PathSegmentKind::CrateKw => ModPath::from_segments(PathKind::Crate, iter::empty()), ast::PathSegmentKind::SelfKw => handle_super_kw(0)?, @@ -277,8 +276,8 @@ fn convert_path( if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain { if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { let syn_ctx = span_for_range(segment.syntax().text_range()); - if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn { - if db.lookup_intern_macro_call(macro_call_id).def.local_inner { + if let Some(macro_call_id) = syn_ctx.outer_expn(db) { + if db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner { mod_path.kind = match resolve_crate_root(db, syn_ctx) { Some(crate_root) => PathKind::DollarCrate(crate_root), None => PathKind::Crate, @@ -333,15 +332,15 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: tt::TokenTreesView<'_>) -> Optio Some(ModPath { kind, segments }) } -pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option { +pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContext) -> Option { // When resolving `$crate` from a `macro_rules!` invoked in a `macro`, // we don't want to pretend that the `macro_rules!` definition is in the `macro` - // as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks. + // as described in `SyntaxContextId::apply_mark`, so we ignore prepended opaque marks. // FIXME: This is only a guess and it doesn't work correctly for `macro_rules!` // definitions actually produced by `macro` and `macro` definitions produced by // `macro_rules!`, but at least such configurations are not stable yet. ctxt = ctxt.normalize_to_macro_rules(db); - let mut iter = marks_rev(ctxt, db).peekable(); + let mut iter = ctxt.marks_rev(db).peekable(); let mut result_mark = None; // Find the last opaque mark from the end if it exists. while let Some(&(mark, Transparency::Opaque)) = iter.peek() { @@ -353,7 +352,7 @@ pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> result_mark = Some(mark); } - result_mark.map(|call| db.lookup_intern_macro_call(call).def.krate) + result_mark.map(|call| db.lookup_intern_macro_call(call.into()).def.krate) } pub use crate::name as __name; @@ -399,7 +398,7 @@ pub use crate::__path as path; macro_rules! __tool_path { ($start:ident $(:: $seg:ident)*) => ({ $crate::mod_path::ModPath::from_segments($crate::mod_path::PathKind::Plain, vec![ - $crate::name::Name::new_symbol_root($crate::intern::sym::rust_analyzer.clone()), $crate::name::Name::new_symbol_root($crate::intern::sym::$start.clone()), $($crate::name::Name::new_symbol_root($crate::intern::sym::$seg.clone()),)* + $crate::name::Name::new_symbol_root($crate::intern::sym::rust_analyzer), $crate::name::Name::new_symbol_root($crate::intern::sym::$start.clone()), $($crate::name::Name::new_symbol_root($crate::intern::sym::$seg.clone()),)* ]) }); } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index 0758bd4515ef2..217d991d110d5 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -2,8 +2,8 @@ use std::fmt; -use intern::{sym, Symbol}; -use span::{Edition, SyntaxContextId}; +use intern::{Symbol, sym}; +use span::{Edition, SyntaxContext}; use syntax::utils::is_raw_identifier; use syntax::{ast, format_smolstr}; @@ -74,7 +74,7 @@ impl Name { Name { symbol: Symbol::intern(text), ctx: () } } - pub fn new(text: &str, mut ctx: SyntaxContextId) -> Name { + pub fn new(text: &str, mut ctx: SyntaxContext) -> Name { // For comparisons etc. we remove the edition, because sometimes we search for some `Name` // and we don't know which edition it came from. // Can't do that for all `SyntaxContextId`s because it breaks Salsa. @@ -88,41 +88,40 @@ impl Name { pub fn new_root(text: &str) -> Name { // The edition doesn't matter for hygiene. - Self::new(text, SyntaxContextId::root(Edition::Edition2015)) + Self::new(text, SyntaxContext::root(Edition::Edition2015)) } pub fn new_tuple_field(idx: usize) -> Name { let symbol = match idx { - 0 => sym::INTEGER_0.clone(), - 1 => sym::INTEGER_1.clone(), - 2 => sym::INTEGER_2.clone(), - 3 => sym::INTEGER_3.clone(), - 4 => sym::INTEGER_4.clone(), - 5 => sym::INTEGER_5.clone(), - 6 => sym::INTEGER_6.clone(), - 7 => sym::INTEGER_7.clone(), - 8 => sym::INTEGER_8.clone(), - 9 => sym::INTEGER_9.clone(), - 10 => sym::INTEGER_10.clone(), - 11 => sym::INTEGER_11.clone(), - 12 => sym::INTEGER_12.clone(), - 13 => sym::INTEGER_13.clone(), - 14 => sym::INTEGER_14.clone(), - 15 => sym::INTEGER_15.clone(), + 0 => sym::INTEGER_0, + 1 => sym::INTEGER_1, + 2 => sym::INTEGER_2, + 3 => sym::INTEGER_3, + 4 => sym::INTEGER_4, + 5 => sym::INTEGER_5, + 6 => sym::INTEGER_6, + 7 => sym::INTEGER_7, + 8 => sym::INTEGER_8, + 9 => sym::INTEGER_9, + 10 => sym::INTEGER_10, + 11 => sym::INTEGER_11, + 12 => sym::INTEGER_12, + 13 => sym::INTEGER_13, + 14 => sym::INTEGER_14, + 15 => sym::INTEGER_15, _ => Symbol::intern(&idx.to_string()), }; Name { symbol, ctx: () } } - pub fn new_lifetime(lt: &ast::Lifetime) -> Name { - let text = lt.text(); - match text.strip_prefix("'r#") { - Some(text) => Self::new_text(&format_smolstr!("'{text}")), - None => Self::new_text(text.as_str()), + pub fn new_lifetime(lt: &str) -> Name { + match lt.strip_prefix("'r#") { + Some(lt) => Self::new_text(&format_smolstr!("'{lt}")), + None => Self::new_text(lt), } } - pub fn new_symbol(symbol: Symbol, ctx: SyntaxContextId) -> Self { + pub fn new_symbol(symbol: Symbol, ctx: SyntaxContext) -> Self { debug_assert!(!symbol.as_str().starts_with("r#")); _ = ctx; Self { symbol, ctx: () } @@ -130,7 +129,7 @@ impl Name { // FIXME: This needs to go once we have hygiene pub fn new_symbol_root(sym: Symbol) -> Self { - Self::new_symbol(sym, SyntaxContextId::root(Edition::Edition2015)) + Self::new_symbol(sym, SyntaxContext::root(Edition::Edition2015)) } /// A fake name for things missing in the source code. @@ -143,7 +142,7 @@ impl Name { /// name is equal only to itself. It's not clear how to implement this in /// salsa though, so we punt on that bit for a moment. pub const fn missing() -> Name { - Name { symbol: sym::consts::MISSING_NAME, ctx: () } + Name { symbol: sym::MISSING_NAME, ctx: () } } /// Returns true if this is a fake name for things missing in the source code. See @@ -192,7 +191,7 @@ impl Name { // FIXME: Remove this in favor of `display`, see fixme on `as_str` #[doc(hidden)] pub fn display_no_db(&self, edition: Edition) -> impl fmt::Display + '_ { - Display { name: self, needs_escaping: is_raw_identifier(self.symbol.as_str(), edition) } + Display { name: self, edition } } pub fn symbol(&self) -> &Symbol { @@ -202,15 +201,28 @@ impl Name { struct Display<'a> { name: &'a Name, - needs_escaping: bool, + edition: Edition, } impl fmt::Display for Display<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.needs_escaping { - write!(f, "r#")?; + let mut symbol = self.name.symbol.as_str(); + + if symbol == "'static" { + // FIXME: '`static` can also be a label, and there it does need escaping. + // But knowing where it is will require adding a parameter to `display()`, + // and that is an infectious change. + return f.write_str(symbol); + } + + if let Some(s) = symbol.strip_prefix('\'') { + f.write_str("'")?; + symbol = s; + } + if is_raw_identifier(symbol, self.edition) { + f.write_str("r#")?; } - fmt::Display::fmt(self.name.symbol.as_str(), f) + f.write_str(symbol) } } @@ -260,7 +272,7 @@ impl AsName for ast::FieldKind { } } -impl AsName for base_db::Dependency { +impl AsName for base_db::BuiltDependency { fn as_name(&self) -> Name { Name::new_symbol_root((*self.name).clone()) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs b/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs index c744fbce77b7c..11cc434c2d826 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs @@ -1,9 +1,9 @@ //! Pretty printing of macros output. -use base_db::CrateId; +use base_db::Crate; use rustc_hash::FxHashMap; use syntax::NodeOrToken; -use syntax::{ast::make, SyntaxNode}; +use syntax::{SyntaxNode, ast::make}; use crate::{db::ExpandDatabase, span_map::ExpansionSpanMap}; @@ -13,22 +13,20 @@ pub fn prettify_macro_expansion( db: &dyn ExpandDatabase, syn: SyntaxNode, span_map: &ExpansionSpanMap, - target_crate_id: CrateId, + target_crate_id: Crate, ) -> SyntaxNode { // Because `syntax_bridge::prettify_macro_expansion::prettify_macro_expansion()` clones subtree for `syn`, // that means it will be offsetted to the beginning. let span_offset = syn.text_range().start(); - let crate_graph = db.crate_graph(); - let target_crate = &crate_graph[target_crate_id]; + let target_crate = target_crate_id.data(db); let mut syntax_ctx_id_to_dollar_crate_replacement = FxHashMap::default(); syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(syn, &mut |dollar_crate| { let ctx = span_map.span_at(dollar_crate.text_range().start() + span_offset).ctx; let replacement = syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| { - let ctx_data = db.lookup_intern_syntax_context(ctx); let macro_call_id = - ctx_data.outer_expn.expect("`$crate` cannot come from `SyntaxContextId::ROOT`"); - let macro_call = db.lookup_intern_macro_call(macro_call_id); + ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`"); + let macro_call = db.lookup_intern_macro_call(macro_call_id.into()); let macro_def_crate = macro_call.def.krate; // First, if this is the same crate as the macro, nothing will work but `crate`. // If not, if the target trait has the macro's crate as a dependency, using the dependency name @@ -42,7 +40,7 @@ pub fn prettify_macro_expansion( target_crate.dependencies.iter().find(|dep| dep.crate_id == macro_def_crate) { make::tokens::ident(dep.name.as_str()) - } else if let Some(crate_name) = &crate_graph[macro_def_crate].display_name { + } else if let Some(crate_name) = ¯o_def_crate.extra_data(db).display_name { make::tokens::ident(crate_name.crate_name().as_str()) } else { return dollar_crate.clone(); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs index 3dc3dcd760cd3..8a1a33d7e3b42 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs @@ -1,24 +1,36 @@ //! Proc Macro Expander stuff use core::fmt; +use std::any::Any; use std::{panic::RefUnwindSafe, sync}; -use base_db::{CrateId, Env}; +use base_db::{Crate, CrateBuilderId, CratesIdMap, Env}; use intern::Symbol; use rustc_hash::FxHashMap; use span::Span; +use triomphe::Arc; -use crate::{db::ExpandDatabase, tt, ExpandError, ExpandErrorKind, ExpandResult}; +use crate::{ExpandError, ExpandErrorKind, ExpandResult, db::ExpandDatabase, tt}; -#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] +#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug, Hash)] pub enum ProcMacroKind { CustomDerive, Bang, Attr, } +pub trait AsAny: Any { + fn as_any(&self) -> &dyn Any; +} + +impl AsAny for T { + fn as_any(&self) -> &dyn Any { + self + } +} + /// A proc-macro expander implementation. -pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { +pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe + AsAny { /// Run the expander with the given input subtree, optional attribute input subtree (for /// [`ProcMacroKind::Attr`]), environment variables, and span information. fn expand( @@ -29,10 +41,20 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { def_site: Span, call_site: Span, mixed_site: Span, - current_dir: Option, + current_dir: String, ) -> Result; + + fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool; } +impl PartialEq for dyn ProcMacroExpander { + fn eq(&self, other: &Self) -> bool { + self.eq_dyn(other) + } +} + +impl Eq for dyn ProcMacroExpander {} + #[derive(Debug)] pub enum ProcMacroExpansionError { /// The proc-macro panicked. @@ -45,41 +67,70 @@ pub type ProcMacroLoadResult = Result, (String, bool)>; type StoredProcMacroLoadResult = Result, (Box, bool)>; #[derive(Default, Debug)] -pub struct ProcMacrosBuilder(FxHashMap); +pub struct ProcMacrosBuilder(FxHashMap>); + impl ProcMacrosBuilder { - pub fn insert(&mut self, proc_macros_crate: CrateId, proc_macro: ProcMacroLoadResult) { + pub fn insert( + &mut self, + proc_macros_crate: CrateBuilderId, + mut proc_macro: ProcMacroLoadResult, + ) { + if let Ok(proc_macros) = &mut proc_macro { + // Sort proc macros to improve incrementality when only their order has changed (ideally the build system + // will not change their order, but just to be sure). + proc_macros.sort_unstable_by(|proc_macro, proc_macro2| { + (proc_macro.name.as_str(), proc_macro.kind) + .cmp(&(proc_macro2.name.as_str(), proc_macro2.kind)) + }); + } self.0.insert( proc_macros_crate, match proc_macro { - Ok(it) => Ok(it.into_boxed_slice()), - Err((e, hard_err)) => Err((e.into_boxed_str(), hard_err)), + Ok(it) => Arc::new(CrateProcMacros(Ok(it.into_boxed_slice()))), + Err((e, hard_err)) => { + Arc::new(CrateProcMacros(Err((e.into_boxed_str(), hard_err)))) + } }, ); } - pub fn build(mut self) -> ProcMacros { - self.0.shrink_to_fit(); - ProcMacros(self.0) + + pub(crate) fn build(self, crates_id_map: &CratesIdMap) -> ProcMacros { + let mut map = self + .0 + .into_iter() + .map(|(krate, proc_macro)| (crates_id_map[&krate], proc_macro)) + .collect::>(); + map.shrink_to_fit(); + ProcMacros(map) } } -#[derive(Default, Debug)] -pub struct ProcMacros(FxHashMap); - -impl FromIterator<(CrateId, ProcMacroLoadResult)> for ProcMacros { - fn from_iter>(iter: T) -> Self { +impl FromIterator<(CrateBuilderId, ProcMacroLoadResult)> for ProcMacrosBuilder { + fn from_iter>(iter: T) -> Self { let mut builder = ProcMacrosBuilder::default(); for (k, v) in iter { builder.insert(k, v); } - builder.build() + builder } } +#[derive(Debug, PartialEq, Eq)] +pub struct CrateProcMacros(StoredProcMacroLoadResult); + +#[derive(Default, Debug)] +pub struct ProcMacros(FxHashMap>); impl ProcMacros { - fn get(&self, krate: CrateId, idx: u32, err_span: Span) -> Result<&ProcMacro, ExpandError> { - let proc_macros = match self.0.get(&krate) { - Some(Ok(proc_macros)) => proc_macros, - Some(Err(_)) | None => { + fn get(&self, krate: Crate) -> Option> { + self.0.get(&krate).cloned() + } +} + +impl CrateProcMacros { + fn get(&self, idx: u32, err_span: Span) -> Result<&ProcMacro, ExpandError> { + let proc_macros = match &self.0 { + Ok(proc_macros) => proc_macros, + Err(_) => { return Err(ExpandError::other( err_span, "internal error: no proc macros for crate", @@ -98,18 +149,17 @@ impl ProcMacros { ) } - pub fn get_error_for_crate(&self, krate: CrateId) -> Option<(&str, bool)> { - self.0.get(&krate).and_then(|it| it.as_ref().err()).map(|(e, hard_err)| (&**e, *hard_err)) + pub fn get_error(&self) -> Option<(&str, bool)> { + self.0.as_ref().err().map(|(e, hard_err)| (&**e, *hard_err)) } /// Fetch the [`CustomProcMacroExpander`]s and their corresponding names for the given crate. - pub fn for_crate( + pub fn list( &self, - krate: CrateId, - def_site_ctx: span::SyntaxContextId, + def_site_ctx: span::SyntaxContext, ) -> Option> { - match self.0.get(&krate) { - Some(Ok(proc_macros)) => Some({ + match &self.0 { + Ok(proc_macros) => Some( proc_macros .iter() .enumerate() @@ -117,15 +167,15 @@ impl ProcMacros { let name = crate::name::Name::new_symbol(it.name.clone(), def_site_ctx); (name, CustomProcMacroExpander::new(idx as u32), it.disabled) }) - .collect() - }), + .collect(), + ), _ => None, } } } /// A loaded proc-macro. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Eq)] pub struct ProcMacro { /// The name of the proc macro. pub name: Symbol, @@ -137,6 +187,23 @@ pub struct ProcMacro { pub disabled: bool, } +// `#[derive(PartialEq)]` generates a strange "cannot move" error. +impl PartialEq for ProcMacro { + fn eq(&self, other: &Self) -> bool { + let Self { name, kind, expander, disabled } = self; + let Self { + name: other_name, + kind: other_kind, + expander: other_expander, + disabled: other_disabled, + } = other; + name == other_name + && kind == other_kind + && expander == other_expander + && disabled == other_disabled + } +} + /// A custom proc-macro expander handle. This handle together with its crate resolves to a [`ProcMacro`] #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub struct CustomProcMacroExpander { @@ -186,8 +253,7 @@ impl CustomProcMacroExpander { self.proc_macro_id == Self::PROC_MACRO_ATTR_DISABLED } - /// The macro is explicitly disabled due to proc-macro attribute expansion being disabled. - pub fn as_expand_error(&self, def_crate: CrateId) -> Option { + pub fn as_expand_error(&self, def_crate: Crate) -> Option { match self.proc_macro_id { Self::PROC_MACRO_ATTR_DISABLED => Some(ExpandErrorKind::ProcMacroAttrExpansionDisabled), Self::DISABLED_ID => Some(ExpandErrorKind::MacroDisabled), @@ -199,8 +265,8 @@ impl CustomProcMacroExpander { pub fn expand( self, db: &dyn ExpandDatabase, - def_crate: CrateId, - calling_crate: CrateId, + def_crate: Crate, + calling_crate: Crate, tt: &tt::TopSubtree, attr_arg: Option<&tt::TopSubtree>, def_site: Span, @@ -221,8 +287,22 @@ impl CustomProcMacroExpander { ExpandError::new(call_site, ExpandErrorKind::MacroDisabled), ), id => { - let proc_macros = db.proc_macros(); - let proc_macro = match proc_macros.get(def_crate, id, call_site) { + let proc_macros = match db.proc_macros_for_crate(def_crate) { + Some(it) => it, + None => { + return ExpandResult::new( + tt::TopSubtree::empty(tt::DelimSpan { + open: call_site, + close: call_site, + }), + ExpandError::other( + call_site, + "internal error: no proc macros for crate", + ), + ); + } + }; + let proc_macro = match proc_macros.get(id, call_site) { Ok(proc_macro) => proc_macro, Err(e) => { return ExpandResult::new( @@ -231,15 +311,14 @@ impl CustomProcMacroExpander { close: call_site, }), e, - ) + ); } }; - let krate_graph = db.crate_graph(); // Proc macros have access to the environment variables of the invoking crate. - let env = &krate_graph[calling_crate].env; - let current_dir = - krate_graph[calling_crate].proc_macro_cwd.as_deref().map(ToString::to_string); + let env = calling_crate.env(db); + // FIXME: Can we avoid the string allocation here? + let current_dir = calling_crate.data(db).proc_macro_cwd.to_string(); match proc_macro.expander.expand( tt, @@ -278,3 +357,10 @@ impl CustomProcMacroExpander { } } } + +pub(crate) fn proc_macros_for_crate( + db: &dyn ExpandDatabase, + krate: Crate, +) -> Option> { + db.proc_macros().get(krate) +} diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs index 740c27b89cea1..e5a778a95c7c9 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs @@ -1,15 +1,15 @@ //! Span maps for real files and macro expansions. -use span::{EditionedFileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId}; +use span::{Span, SyntaxContext}; use stdx::TupleExt; -use syntax::{ast, AstNode, TextRange}; +use syntax::{AstNode, TextRange, ast}; use triomphe::Arc; pub use span::RealSpanMap; -use crate::{attrs::collect_attrs, db::ExpandDatabase}; +use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase}; -pub type ExpansionSpanMap = span::SpanMap; +pub type ExpansionSpanMap = span::SpanMap; /// Spanmap for a macro file or a real file #[derive(Clone, Debug, PartialEq, Eq)] @@ -61,9 +61,9 @@ impl SpanMap { #[inline] pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap { - match file_id.repr() { - HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)), - HirFileIdRepr::MacroFile(m) => { + match file_id { + HirFileId::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)), + HirFileId::MacroFile(m) => { SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1) } } @@ -79,11 +79,15 @@ impl SpanMapRef<'_> { } } -pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) -> Arc { +pub(crate) fn real_span_map( + db: &dyn ExpandDatabase, + editioned_file_id: base_db::EditionedFileId, +) -> Arc { use syntax::ast::HasModuleItem; let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)]; - let ast_id_map = db.ast_id_map(file_id.into()); - let tree = db.parse(file_id).tree(); + let ast_id_map = db.ast_id_map(editioned_file_id.into()); + + let tree = db.parse(editioned_file_id).tree(); // This is an incrementality layer. Basically we can't use absolute ranges for our spans as that // would mean we'd invalidate everything whenever we type. So instead we make the text ranges // relative to some AstIds reducing the risk of invalidation as typing somewhere no longer @@ -134,7 +138,7 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) - }); Arc::new(RealSpanMap::from_file( - file_id, + editioned_file_id.editioned_file_id(db), pairs.into_boxed_slice(), tree.syntax().text_range().end(), )) @@ -142,7 +146,7 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) - pub(crate) fn expansion_span_map( db: &dyn ExpandDatabase, - file_id: MacroFileId, + file_id: MacroCallId, ) -> Arc { db.parse_macro_expansion(file_id).value.1 } diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml index 1d12bee646c4f..efa544cf39651 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml @@ -12,27 +12,29 @@ rust-version.workspace = true [lib] [dependencies] -cov-mark = "2.0.0-pre.1" +cov-mark = "2.0.0" itertools.workspace = true arrayvec.workspace = true bitflags.workspace = true smallvec.workspace = true -ena = "0.14.0" +ena = "0.14.3" either.workspace = true -oorandom = "11.1.3" +oorandom = "11.1.5" tracing.workspace = true rustc-hash.workspace = true -scoped-tls = "1.0.0" +scoped-tls = "1.0.1" chalk-solve.workspace = true chalk-ir.workspace = true chalk-recursive.workspace = true chalk-derive.workspace = true la-arena.workspace = true triomphe.workspace = true -nohash-hasher.workspace = true -typed-arena = "2.0.1" +typed-arena = "2.0.2" indexmap.workspace = true -rustc_apfloat = "0.2.0" +rustc_apfloat = "0.2.2" +query-group.workspace = true +salsa.workspace = true +salsa-macros.workspace = true ra-ap-rustc_abi.workspace = true ra-ap-rustc_index.workspace = true @@ -49,7 +51,7 @@ syntax.workspace = true span.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.5.1" tracing.workspace = true tracing-subscriber.workspace = true tracing-tree.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs index 171ba001c4a79..7acc9456ec9cb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs @@ -12,8 +12,8 @@ use intern::sym; use triomphe::Arc; use crate::{ - db::HirDatabase, infer::unify::InferenceTable, Canonical, Goal, Interner, ProjectionTyExt, - TraitEnvironment, Ty, TyBuilder, TyKind, + Canonical, Goal, Interner, ProjectionTyExt, TraitEnvironment, Ty, TyBuilder, TyKind, + db::HirDatabase, infer::unify::InferenceTable, }; const AUTODEREF_RECURSION_LIMIT: usize = 20; @@ -198,20 +198,17 @@ pub(crate) fn deref_by_trait( // blanked impl on `Deref`. #[expect(clippy::overly_complex_bool_expr)] if use_receiver_trait && false { - if let Some(receiver) = - db.lang_item(table.trait_env.krate, LangItem::Receiver).and_then(|l| l.as_trait()) - { + if let Some(receiver) = LangItem::Receiver.resolve_trait(db, table.trait_env.krate) { return Some(receiver); } } // Old rustc versions might not have `Receiver` trait. // Fallback to `Deref` if they don't - db.lang_item(table.trait_env.krate, LangItem::Deref).and_then(|l| l.as_trait()) + LangItem::Deref.resolve_trait(db, table.trait_env.krate) }; let trait_id = trait_id()?; - let target = db - .trait_data(trait_id) - .associated_type_by_name(&Name::new_symbol_root(sym::Target.clone()))?; + let target = + db.trait_items(trait_id).associated_type_by_name(&Name::new_symbol_root(sym::Target))?; let projection = { let b = TyBuilder::subst_for_def(db, trait_id, None); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs index 76d9c60f6f903..77d15a73af6ff 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs @@ -3,21 +3,21 @@ use std::iter; use chalk_ir::{ + AdtId, DebruijnIndex, Scalar, cast::{Cast, CastTo, Caster}, fold::TypeFoldable, interner::HasInterner, - AdtId, DebruijnIndex, Scalar, }; use hir_def::{ - builtin_type::BuiltinType, DefWithBodyId, GenericDefId, GenericParamId, TraitId, TypeAliasId, + DefWithBodyId, GenericDefId, GenericParamId, TraitId, TypeAliasId, builtin_type::BuiltinType, }; use smallvec::SmallVec; use crate::{ - consteval::unknown_const_as_generic, db::HirDatabase, error_lifetime, generics::generics, - infer::unify::InferenceTable, primitive, to_assoc_type_id, to_chalk_trait_id, Binders, - BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution, - TraitRef, Ty, TyDefId, TyExt, TyKind, + Binders, BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy, + Substitution, TraitRef, Ty, TyDefId, TyExt, TyKind, consteval::unknown_const_as_generic, + db::HirDatabase, error_lifetime, generics::generics, infer::unify::InferenceTable, primitive, + to_assoc_type_id, to_chalk_trait_id, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -76,7 +76,7 @@ impl TyBuilder { } let subst = Substitution::from_iter( Interner, - self.vec.into_iter().chain(self.parent_subst.iter(Interner).cloned()), + self.parent_subst.iter(Interner).cloned().chain(self.vec), ); (self.data, subst) } @@ -209,12 +209,12 @@ impl TyBuilder<()> { } pub fn placeholder_subst(db: &dyn HirDatabase, def: impl Into) -> Substitution { - let params = generics(db.upcast(), def.into()); + let params = generics(db, def.into()); params.placeholder_subst(db) } pub fn unknown_subst(db: &dyn HirDatabase, def: impl Into) -> Substitution { - let params = generics(db.upcast(), def.into()); + let params = generics(db, def.into()); Substitution::from_iter( Interner, params.iter_id().map(|id| match id { @@ -233,7 +233,7 @@ impl TyBuilder<()> { def: impl Into, parent_subst: Option, ) -> TyBuilder<()> { - let generics = generics(db.upcast(), def.into()); + let generics = generics(db, def.into()); assert!(generics.parent_generics().is_some() == parent_subst.is_some()); let params = generics .iter_self() @@ -259,11 +259,10 @@ impl TyBuilder<()> { /// This method prepopulates the builder with placeholder substitution of `parent`, so you /// should only push exactly 3 `GenericArg`s before building. pub fn subst_for_coroutine(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> { - let parent_subst = parent - .as_generic_def_id(db.upcast()) - .map(|p| generics(db.upcast(), p).placeholder_subst(db)); + let parent_subst = + parent.as_generic_def_id(db).map(|p| generics(db, p).placeholder_subst(db)); // These represent resume type, yield type, and return type of coroutine. - let params = std::iter::repeat(ParamKind::Type).take(3).collect(); + let params = std::iter::repeat_n(ParamKind::Type, 3).collect(); TyBuilder::new((), params, parent_subst) } @@ -274,13 +273,15 @@ impl TyBuilder<()> { ) -> Substitution { let sig_ty = sig_ty.cast(Interner); let self_subst = iter::once(&sig_ty); - let Some(parent) = parent.as_generic_def_id(db.upcast()) else { + let Some(parent) = parent.as_generic_def_id(db) else { return Substitution::from_iter(Interner, self_subst); }; Substitution::from_iter( Interner, - self_subst - .chain(generics(db.upcast(), parent).placeholder_subst(db).iter(Interner)) + generics(db, parent) + .placeholder_subst(db) + .iter(Interner) + .chain(self_subst) .cloned() .collect::>(), ) @@ -305,29 +306,28 @@ impl TyBuilder { // Note that we're building ADT, so we never have parent generic parameters. let defaults = db.generic_defaults(self.data.into()); - for default_ty in &defaults[self.vec.len()..] { - // NOTE(skip_binders): we only check if the arg type is error type. - if let Some(x) = default_ty.skip_binders().ty(Interner) { - if x.is_unknown() { - self.vec.push(fallback().cast(Interner)); - continue; + if let Some(defaults) = defaults.get(self.vec.len()..) { + for default_ty in defaults { + // NOTE(skip_binders): we only check if the arg type is error type. + if let Some(x) = default_ty.skip_binders().ty(Interner) { + if x.is_unknown() { + self.vec.push(fallback().cast(Interner)); + continue; + } } + // Each default can only depend on the previous parameters. + self.vec.push(default_ty.clone().substitute(Interner, &*self.vec).cast(Interner)); } - // Each default can only depend on the previous parameters. - let subst_so_far = Substitution::from_iter( - Interner, - self.vec - .iter() - .cloned() - .chain(self.param_kinds[self.vec.len()..].iter().map(|it| match it { - ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner), - ParamKind::Lifetime => error_lifetime().cast(Interner), - ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), - })) - .take(self.param_kinds.len()), - ); - self.vec.push(default_ty.clone().substitute(Interner, &subst_so_far).cast(Interner)); } + + // The defaults may be missing if no param has default, so fill that. + let filler = self.param_kinds[self.vec.len()..].iter().map(|x| match x { + ParamKind::Type => fallback().cast(Interner), + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), + }); + self.vec.extend(filler.casted(Interner)); + self } @@ -340,7 +340,7 @@ impl TyBuilder { pub struct Tuple(usize); impl TyBuilder { pub fn tuple(size: usize) -> TyBuilder { - TyBuilder::new(Tuple(size), iter::repeat(ParamKind::Type).take(size).collect(), None) + TyBuilder::new(Tuple(size), std::iter::repeat_n(ParamKind::Type, size).collect(), None) } pub fn build(self) -> Ty { @@ -356,7 +356,7 @@ impl TyBuilder { let elements = elements.into_iter(); let len = elements.len(); let mut b = - TyBuilder::new(Tuple(len), iter::repeat(ParamKind::Type).take(len).collect(), None); + TyBuilder::new(Tuple(len), std::iter::repeat_n(ParamKind::Type, len).collect(), None); for e in elements { b = b.push(e); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 65fb342f75258..cd799c03ddf7f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -8,31 +8,33 @@ use intern::sym; use span::Edition; use tracing::debug; -use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds}; +use chalk_ir::{CanonicalVarKinds, cast::Caster, fold::shift::Shift}; use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait}; -use base_db::CrateId; +use base_db::Crate; use hir_def::{ - data::{adt::StructFlags, TraitFlags}, - hir::Movability, - lang_item::{LangItem, LangItemTarget}, AssocItemId, BlockId, CallableDefId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, VariantId, + hir::Movability, + lang_item::LangItem, + signatures::{ImplFlags, StructFlags, TraitFlags}, }; use crate::{ + AliasEq, AliasTy, BoundVar, DebruijnIndex, Interner, ProjectionTy, ProjectionTyExt, + QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, + WhereClause, db::{HirDatabase, InternedCoroutine}, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, generics::generics, + lower::LifetimeElisionKind, make_binders, make_single_type_binders, - mapping::{from_chalk, ToChalk, TypeAliasAsValue}, - method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS}, + mapping::{ToChalk, TypeAliasAsValue, from_chalk}, + method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TraitImpls, TyFingerprint}, to_assoc_type_id, to_chalk_trait_id, traits::ChalkContext, utils::ClosureSubst, - wrap_empty_binders, AliasEq, AliasTy, BoundVar, DebruijnIndex, FnDefId, Interner, ProjectionTy, - ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, - TyExt, TyKind, WhereClause, + wrap_empty_binders, }; pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum; @@ -52,7 +54,23 @@ pub(crate) type Variances = chalk_ir::Variances; impl chalk_solve::RustIrDatabase for ChalkContext<'_> { fn associated_ty_data(&self, id: AssocTypeId) -> Arc { - self.db.associated_ty_data(id) + self.db.associated_ty_data(from_assoc_type_id(id)) + } + fn associated_ty_from_impl( + &self, + impl_id: chalk_ir::ImplId, + assoc_type_id: chalk_ir::AssocTypeId, + ) -> Option> { + let alias_id = from_assoc_type_id(assoc_type_id); + let trait_sig = self.db.type_alias_signature(alias_id); + self.db.impl_items(hir_def::ImplId::from_chalk(self.db, impl_id)).items.iter().find_map( + |(name, item)| match item { + AssocItemId::TypeAliasId(alias) if &trait_sig.name == name => { + Some(TypeAliasAsValue(*alias).to_chalk(self.db)) + } + _ => None, + }, + ) } fn trait_datum(&self, trait_id: TraitId) -> Arc { self.db.trait_datum(self.krate, trait_id) @@ -67,7 +85,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { fn discriminant_type(&self, ty: chalk_ir::Ty) -> chalk_ir::Ty { if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) { if let hir_def::AdtId::EnumId(e) = id.0 { - let enum_data = self.db.enum_data(e); + let enum_data = self.db.enum_signature(e); let ty = enum_data.repr.unwrap_or_default().discr_type(); return chalk_ir::TyKind::Scalar(match ty { hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed { @@ -104,7 +122,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { &self, fn_def_id: chalk_ir::FnDefId, ) -> Arc> { - self.db.fn_def_datum(fn_def_id) + self.db.fn_def_datum(from_chalk(self.db, fn_def_id)) } fn impls_for_trait( @@ -137,7 +155,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { let fps: &[TyFingerprint] = match binder_kind(&ty, binders) { Some(chalk_ir::TyVariableKind::Integer) => &ALL_INT_FPS, Some(chalk_ir::TyVariableKind::Float) => &ALL_FLOAT_FPS, - _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]), + _ => self_ty_fp.as_slice(), }; let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db); @@ -145,19 +163,18 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { let mut result = vec![]; if fps.is_empty() { debug!("Unrestricted search for {:?} impls...", trait_); - let _ = self.for_trait_impls(trait_, self_ty_fp, |impls| { + _ = self.for_trait_impls(trait_, self_ty_fp, |impls| { result.extend(impls.for_trait(trait_).map(id_to_chalk)); ControlFlow::Continue(()) }); } else { - let _ = self.for_trait_impls(trait_, self_ty_fp, |impls| { - result.extend( - fps.iter().flat_map(move |fp| { + _ = + self.for_trait_impls(trait_, self_ty_fp, |impls| { + result.extend(fps.iter().flat_map(move |fp| { impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk) - }), - ); - ControlFlow::Continue(()) - }); + })); + ControlFlow::Continue(()) + }); }; debug!("impls_for_trait returned {} impls", result.len()); @@ -245,10 +262,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { well_known_trait: rust_ir::WellKnownTrait, ) -> Option> { let lang_attr = lang_item_from_well_known_trait(well_known_trait); - let trait_ = match self.db.lang_item(self.krate, lang_attr) { - Some(LangItemTarget::Trait(trait_)) => trait_, - _ => return None, - }; + let trait_ = lang_attr.resolve_trait(self.db, self.krate)?; Some(to_chalk_trait_id(trait_)) } @@ -290,15 +304,13 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { } crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => { if let Some((future_trait, future_output)) = - self.db - .lang_item(self.krate, LangItem::Future) - .and_then(|item| item.as_trait()) - .and_then(|trait_| { - let alias = self.db.trait_data(trait_).associated_type_by_name( - &Name::new_symbol_root(sym::Output.clone()), - )?; - Some((trait_, alias)) - }) + LangItem::Future.resolve_trait(self.db, self.krate).and_then(|trait_| { + let alias = self + .db + .trait_items(trait_) + .associated_type_by_name(&Name::new_symbol_root(sym::Output))?; + Some((trait_, alias)) + }) { // Making up Symbol’s value as variable is void: AsyncBlock: // @@ -320,10 +332,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { }); let mut binder = vec![]; binder.push(crate::wrap_empty_binders(impl_bound)); - let sized_trait = self - .db - .lang_item(self.krate, LangItem::Sized) - .and_then(|item| item.as_trait()); + let sized_trait = LangItem::Sized.resolve_trait(self.db, self.krate); if let Some(sized_trait_) = sized_trait { let sized_bound = WhereClause::Implemented(TraitRef { trait_id: to_chalk_trait_id(sized_trait_), @@ -426,19 +435,19 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { fn trait_name(&self, trait_id: chalk_ir::TraitId) -> String { let id = from_chalk_trait_id(trait_id); - self.db.trait_data(id).name.display(self.db.upcast(), self.edition()).to_string() + self.db.trait_signature(id).name.display(self.db, self.edition()).to_string() } fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String { let edition = self.edition(); match adt_id { hir_def::AdtId::StructId(id) => { - self.db.struct_data(id).name.display(self.db.upcast(), edition).to_string() + self.db.struct_signature(id).name.display(self.db, edition).to_string() } hir_def::AdtId::EnumId(id) => { - self.db.enum_data(id).name.display(self.db.upcast(), edition).to_string() + self.db.enum_signature(id).name.display(self.db, edition).to_string() } hir_def::AdtId::UnionId(id) => { - self.db.union_data(id).name.display(self.db.upcast(), edition).to_string() + self.db.union_signature(id).name.display(self.db, edition).to_string() } } } @@ -447,14 +456,14 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { Arc::new(rust_ir::AdtSizeAlign::from_one_zst(false)) } fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId) -> String { - let id = self.db.associated_ty_data(assoc_ty_id).name; - self.db.type_alias_data(id).name.display(self.db.upcast(), self.edition()).to_string() + let id = self.db.associated_ty_data(from_assoc_type_id(assoc_ty_id)).name; + self.db.type_alias_signature(id).name.display(self.db, self.edition()).to_string() } fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId) -> String { - format!("Opaque_{}", opaque_ty_id.0) + format!("Opaque_{:?}", opaque_ty_id.0) } fn fn_def_name(&self, fn_def_id: chalk_ir::FnDefId) -> String { - format!("fn_{}", fn_def_id.0) + format!("fn_{:?}", fn_def_id.0) } fn coroutine_datum( &self, @@ -467,12 +476,13 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { // `resume_type`, `yield_type`, and `return_type` of the coroutine in question. let subst = TyBuilder::subst_for_coroutine(self.db, parent).fill_with_unknown().build(); + let len = subst.len(Interner); let input_output = rust_ir::CoroutineInputOutputDatum { - resume_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)) + resume_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, len - 3)) .intern(Interner), - yield_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 1)) + yield_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, len - 2)) .intern(Interner), - return_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 2)) + return_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, len - 1)) .intern(Interner), // FIXME: calculate upvars upvars: vec![], @@ -523,7 +533,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { impl ChalkContext<'_> { fn edition(&self) -> Edition { - self.db.crate_graph()[self.krate].edition + self.krate.data(self.db).edition } fn for_trait_impls( @@ -537,13 +547,13 @@ impl ChalkContext<'_> { // `impl_datum` relies on that and will panic if the trait can't be resolved. let in_deps = self.db.trait_impls_in_deps(self.krate); let in_self = self.db.trait_impls_in_crate(self.krate); - let trait_module = trait_id.module(self.db.upcast()); + let trait_module = trait_id.module(self.db); let type_module = match self_ty_fp { - Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())), + Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db)), Some(TyFingerprint::ForeignType(type_id)) => { - Some(from_foreign_def_id(type_id).module(self.db.upcast())) + Some(from_foreign_def_id(type_id).module(self.db)) } - Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())), + Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db)), _ => None, }; @@ -552,7 +562,7 @@ impl ChalkContext<'_> { let block_impls = iter::successors(self.block, |&block_id| { cov_mark::hit!(block_local_impls); - self.db.block_def_map(block_id).parent().and_then(|module| module.containing_block()) + block_id.loc(self.db).module.containing_block() }) .inspect(|&block_id| { // make sure we don't search the same block twice @@ -583,17 +593,17 @@ impl chalk_ir::UnificationDatabase for &dyn HirDatabase { &self, fn_def_id: chalk_ir::FnDefId, ) -> chalk_ir::Variances { - HirDatabase::fn_def_variance(*self, fn_def_id) + HirDatabase::fn_def_variance(*self, from_chalk(*self, fn_def_id)) } fn adt_variance(&self, adt_id: chalk_ir::AdtId) -> chalk_ir::Variances { - HirDatabase::adt_variance(*self, adt_id) + HirDatabase::adt_variance(*self, adt_id.0) } } pub(crate) fn program_clauses_for_chalk_env_query( db: &dyn HirDatabase, - krate: CrateId, + krate: Crate, block: Option, environment: chalk_ir::Environment, ) -> chalk_ir::ProgramClauses { @@ -602,28 +612,35 @@ pub(crate) fn program_clauses_for_chalk_env_query( pub(crate) fn associated_ty_data_query( db: &dyn HirDatabase, - id: AssocTypeId, + type_alias: TypeAliasId, ) -> Arc { - debug!("associated_ty_data {:?}", id); - let type_alias: TypeAliasId = from_assoc_type_id(id); - let trait_ = match type_alias.lookup(db.upcast()).container { + debug!("associated_ty_data {:?}", type_alias); + let trait_ = match type_alias.lookup(db).container { ItemContainerId::TraitId(t) => t, _ => panic!("associated type not in trait"), }; // Lower bounds -- we could/should maybe move this to a separate query in `lower` - let type_alias_data = db.type_alias_data(type_alias); - let generic_params = generics(db.upcast(), type_alias.into()); - let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast()); - let mut ctx = - crate::TyLoweringContext::new(db, &resolver, &type_alias_data.types_map, type_alias.into()) - .with_type_param_mode(crate::lower::ParamLoweringMode::Variable); + let type_alias_data = db.type_alias_signature(type_alias); + let generic_params = generics(db, type_alias.into()); + let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); + let mut ctx = crate::TyLoweringContext::new( + db, + &resolver, + &type_alias_data.store, + type_alias.into(), + LifetimeElisionKind::AnonymousReportError, + ) + .with_type_param_mode(crate::lower::ParamLoweringMode::Variable); let trait_subst = TyBuilder::subst_for_def(db, trait_, None) - .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, generic_params.len_self()) + .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0) .build(); let pro_ty = TyBuilder::assoc_type_projection(db, type_alias, Some(trait_subst)) - .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0) + .fill_with_bound_vars( + crate::DebruijnIndex::INNERMOST, + generic_params.parent_generics().map_or(0, |it| it.len()), + ) .build(); let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner); @@ -637,9 +654,8 @@ pub(crate) fn associated_ty_data_query( } if !ctx.unsized_types.contains(&self_ty) { - let sized_trait = db - .lang_item(resolver.krate(), LangItem::Sized) - .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id)); + let sized_trait = + LangItem::Sized.resolve_trait(db, resolver.krate()).map(to_chalk_trait_id); let sized_bound = sized_trait.into_iter().map(|sized_trait| { let trait_bound = rust_ir::TraitBound { trait_id: sized_trait, args_no_self: Default::default() }; @@ -656,7 +672,7 @@ pub(crate) fn associated_ty_data_query( let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses: vec![] }; let datum = AssociatedTyDatum { trait_id: to_chalk_trait_id(trait_), - id, + id: to_assoc_type_id(type_alias), name: type_alias, binders: make_binders(db, &generic_params, bound_data), }; @@ -665,26 +681,27 @@ pub(crate) fn associated_ty_data_query( pub(crate) fn trait_datum_query( db: &dyn HirDatabase, - krate: CrateId, + krate: Crate, trait_id: TraitId, ) -> Arc { debug!("trait_datum {:?}", trait_id); let trait_ = from_chalk_trait_id(trait_id); - let trait_data = db.trait_data(trait_); + let trait_data = db.trait_signature(trait_); debug!("trait {:?} = {:?}", trait_id, trait_data.name); - let generic_params = generics(db.upcast(), trait_.into()); + let generic_params = generics(db, trait_.into()); let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); let flags = rust_ir::TraitFlags { - auto: trait_data.flags.contains(TraitFlags::IS_AUTO), - upstream: trait_.lookup(db.upcast()).container.krate() != krate, + auto: trait_data.flags.contains(TraitFlags::AUTO), + upstream: trait_.lookup(db).container.krate() != krate, non_enumerable: true, coinductive: false, // only relevant for Chalk testing // FIXME: set these flags correctly marker: false, - fundamental: trait_data.flags.contains(TraitFlags::IS_FUNDAMENTAL), + fundamental: trait_data.flags.contains(TraitFlags::FUNDAMENTAL), }; let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars); - let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect(); + let associated_ty_ids = + db.trait_items(trait_).associated_types().map(to_assoc_type_id).collect(); let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses }; let well_known = db.lang_attr(trait_.into()).and_then(well_known_trait_from_lang_item); let trait_datum = TraitDatum { @@ -750,35 +767,32 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem { pub(crate) fn adt_datum_query( db: &dyn HirDatabase, - krate: CrateId, + krate: Crate, chalk_ir::AdtId(adt_id): AdtId, ) -> Arc { debug!("adt_datum {:?}", adt_id); - let generic_params = generics(db.upcast(), adt_id.into()); + let generic_params = generics(db, adt_id.into()); let bound_vars_subst = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst); let (fundamental, phantom_data) = match adt_id { hir_def::AdtId::StructId(s) => { - let flags = db.struct_data(s).flags; - ( - flags.contains(StructFlags::IS_FUNDAMENTAL), - flags.contains(StructFlags::IS_PHANTOM_DATA), - ) + let flags = db.struct_signature(s).flags; + (flags.contains(StructFlags::FUNDAMENTAL), flags.contains(StructFlags::IS_PHANTOM_DATA)) } // FIXME set fundamental flags correctly hir_def::AdtId::UnionId(_) => (false, false), hir_def::AdtId::EnumId(_) => (false, false), }; let flags = rust_ir::AdtFlags { - upstream: adt_id.module(db.upcast()).krate() != krate, + upstream: adt_id.module(db).krate() != krate, fundamental, phantom_data, }; // this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it let _variant_id_to_fields = |id: VariantId| { - let variant_data = &id.variant_data(db.upcast()); + let variant_data = &id.variant_data(db); let fields = if variant_data.fields().is_empty() { vec![] } else { @@ -800,7 +814,7 @@ pub(crate) fn adt_datum_query( } hir_def::AdtId::EnumId(id) => { let variants = db - .enum_data(id) + .enum_variants(id) .variants .iter() .map(|&(variant_id, _)| variant_id_to_fields(variant_id.into())) @@ -824,7 +838,7 @@ pub(crate) fn adt_datum_query( pub(crate) fn impl_datum_query( db: &dyn HirDatabase, - krate: CrateId, + krate: Crate, impl_id: ImplId, ) -> Arc { let _p = tracing::info_span!("impl_datum_query").entered(); @@ -833,35 +847,31 @@ pub(crate) fn impl_datum_query( impl_def_datum(db, krate, impl_) } -fn impl_def_datum( - db: &dyn HirDatabase, - krate: CrateId, - impl_id: hir_def::ImplId, -) -> Arc { +fn impl_def_datum(db: &dyn HirDatabase, krate: Crate, impl_id: hir_def::ImplId) -> Arc { let trait_ref = db .impl_trait(impl_id) // ImplIds for impls where the trait ref can't be resolved should never reach Chalk .expect("invalid impl passed to Chalk") .into_value_and_skipped_binders() .0; - let impl_data = db.impl_data(impl_id); + let impl_data = db.impl_signature(impl_id); - let generic_params = generics(db.upcast(), impl_id.into()); + let generic_params = generics(db, impl_id.into()); let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); let trait_ = trait_ref.hir_trait_id(); - let impl_type = if impl_id.lookup(db.upcast()).container.krate() == krate { + let impl_type = if impl_id.lookup(db).container.krate() == krate { rust_ir::ImplType::Local } else { rust_ir::ImplType::External }; let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars); - let negative = impl_data.is_negative; - + let negative = impl_data.flags.contains(ImplFlags::NEGATIVE); let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive }; let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses }; - let trait_data = db.trait_data(trait_); - let associated_ty_value_ids = impl_data + let trait_data = db.trait_items(trait_); + let associated_ty_value_ids = db + .impl_items(impl_id) .items .iter() .filter_map(|(_, item)| match item { @@ -870,7 +880,7 @@ fn impl_def_datum( }) .filter(|&type_alias| { // don't include associated types that don't exist in the trait - let name = &db.type_alias_data(type_alias).name; + let name = &db.type_alias_signature(type_alias).name; trait_data.associated_type_by_name(name).is_some() }) .map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db)) @@ -887,7 +897,7 @@ fn impl_def_datum( pub(crate) fn associated_ty_value_query( db: &dyn HirDatabase, - krate: CrateId, + krate: Crate, id: AssociatedTyValueId, ) -> Arc { let type_alias: TypeAliasAsValue = from_chalk(db, id); @@ -896,11 +906,11 @@ pub(crate) fn associated_ty_value_query( fn type_alias_associated_ty_value( db: &dyn HirDatabase, - _krate: CrateId, + _krate: Crate, type_alias: TypeAliasId, ) -> Arc { - let type_alias_data = db.type_alias_data(type_alias); - let impl_id = match type_alias.lookup(db.upcast()).container { + let type_alias_data = db.type_alias_signature(type_alias); + let impl_id = match type_alias.lookup(db).container { ItemContainerId::ImplId(it) => it, _ => panic!("assoc ty value should be in impl"), }; @@ -912,7 +922,7 @@ fn type_alias_associated_ty_value( .0; // we don't return any assoc ty values if the impl'd trait can't be resolved let assoc_ty = db - .trait_data(trait_ref.hir_trait_id()) + .trait_items(trait_ref.hir_trait_id()) .associated_type_by_name(&type_alias_data.name) .expect("assoc ty value should not exist"); // validated when building the impl data as well let (ty, binders) = db.ty(type_alias.into()).into_value_and_skipped_binders(); @@ -925,10 +935,12 @@ fn type_alias_associated_ty_value( Arc::new(value) } -pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Arc { - let callable_def: CallableDefId = from_chalk(db, fn_def_id); - let generic_def = GenericDefId::from_callable(db.upcast(), callable_def); - let generic_params = generics(db.upcast(), generic_def); +pub(crate) fn fn_def_datum_query( + db: &dyn HirDatabase, + callable_def: CallableDefId, +) -> Arc { + let generic_def = GenericDefId::from_callable(db, callable_def); + let generic_params = generics(db, generic_def); let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders(); let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); let where_clauses = convert_where_clauses(db, generic_def, &bound_vars); @@ -945,7 +957,7 @@ pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Ar where_clauses, }; let datum = FnDefDatum { - id: fn_def_id, + id: callable_def.to_chalk(db), sig: chalk_ir::FnSig { abi: sig.abi, safety: chalk_ir::Safety::Safe, @@ -956,11 +968,13 @@ pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Ar Arc::new(datum) } -pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances { - let callable_def: CallableDefId = from_chalk(db, fn_def_id); +pub(crate) fn fn_def_variance_query( + db: &dyn HirDatabase, + callable_def: CallableDefId, +) -> Variances { Variances::from_iter( Interner, - db.variances_of(GenericDefId::from_callable(db.upcast(), callable_def)) + db.variances_of(GenericDefId::from_callable(db, callable_def)) .as_deref() .unwrap_or_default() .iter() @@ -973,10 +987,7 @@ pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> ) } -pub(crate) fn adt_variance_query( - db: &dyn HirDatabase, - chalk_ir::AdtId(adt_id): AdtId, -) -> Variances { +pub(crate) fn adt_variance_query(db: &dyn HirDatabase, adt_id: hir_def::AdtId) -> Variances { Variances::from_iter( Interner, db.variances_of(adt_id.into()).as_deref().unwrap_or_default().iter().map(|v| match v { @@ -1026,10 +1037,10 @@ pub(super) fn generic_predicate_to_inline_bound( Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound))) } WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => { - let generics = - generics(db.upcast(), from_assoc_type_id(projection_ty.associated_ty_id).into()); - let (assoc_args, trait_args) = - projection_ty.substitution.as_slice(Interner).split_at(generics.len_self()); + let generics = generics(db, from_assoc_type_id(projection_ty.associated_ty_id).into()); + let parent_len = generics.parent_generics().map_or(0, |g| g.len_self()); + let (trait_args, assoc_args) = + projection_ty.substitution.as_slice(Interner).split_at(parent_len); let (self_ty, args_no_self) = trait_args.split_first().expect("projection without trait self type"); if self_ty.assert_ty_ref(Interner) != &self_ty_shifted_in { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs index 51c178b90d72b..aabc4c4234dbc 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs @@ -1,22 +1,22 @@ //! Various extensions traits for Chalk types. use chalk_ir::{ - cast::Cast, FloatTy, IntTy, Mutability, Scalar, TyVariableKind, TypeOutlives, UintTy, + FloatTy, IntTy, Mutability, Scalar, TyVariableKind, TypeOutlives, UintTy, cast::Cast, }; use hir_def::{ + DefWithBodyId, FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId, builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint}, - generics::TypeOrConstParamData, + hir::generics::{TypeOrConstParamData, TypeParamProvenance}, lang_item::LangItem, type_ref::Rawness, - DefWithBodyId, FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId, }; use crate::{ - db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, - from_placeholder_idx, generics::generics, to_chalk_trait_id, utils::ClosureSubst, AdtId, - AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, ClosureId, - DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy, + AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, + ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy, QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause, + db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, + from_placeholder_idx, generics::generics, to_chalk_trait_id, utils::ClosureSubst, }; pub trait TyExt { @@ -191,7 +191,7 @@ impl TyExt for Ty { match *self.kind(Interner) { TyKind::Adt(AdtId(adt), ..) => Some(adt.into()), TyKind::FnDef(callable, ..) => Some(GenericDefId::from_callable( - db.upcast(), + db, db.lookup_intern_callable_def(callable.into()), )), TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()), @@ -250,10 +250,8 @@ impl TyExt for Ty { TyKind::OpaqueType(opaque_ty_id, subst) => { match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) { ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => { - let krate = def.module(db.upcast()).krate(); - if let Some(future_trait) = - db.lang_item(krate, LangItem::Future).and_then(|item| item.as_trait()) - { + let krate = def.module(db).krate(); + if let Some(future_trait) = LangItem::Future.resolve_trait(db, krate) { // This is only used by type walking. // Parameters will be walked outside, and projection predicate is not used. // So just provide the Future trait. @@ -314,7 +312,7 @@ impl TyExt for Ty { let param_data = &generic_params[id.local_id]; match param_data { TypeOrConstParamData::TypeParamData(p) => match p.provenance { - hir_def::generics::TypeParamProvenance::ArgumentImplTrait => { + TypeParamProvenance::ArgumentImplTrait => { let substs = TyBuilder::placeholder_subst(db, id.parent); let predicates = db .generic_predicates(id.parent) @@ -348,17 +346,12 @@ impl TyExt for Ty { fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option { match self.kind(Interner) { - TyKind::AssociatedType(id, ..) => { - match from_assoc_type_id(*id).lookup(db.upcast()).container { - ItemContainerId::TraitId(trait_id) => Some(trait_id), - _ => None, - } - } + TyKind::AssociatedType(id, ..) => match from_assoc_type_id(*id).lookup(db).container { + ItemContainerId::TraitId(trait_id) => Some(trait_id), + _ => None, + }, TyKind::Alias(AliasTy::Projection(projection_ty)) => { - match from_assoc_type_id(projection_ty.associated_ty_id) - .lookup(db.upcast()) - .container - { + match from_assoc_type_id(projection_ty.associated_ty_id).lookup(db).container { ItemContainerId::TraitId(trait_id) => Some(trait_id), _ => None, } @@ -368,9 +361,8 @@ impl TyExt for Ty { } fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool { - let crate_id = owner.module(db.upcast()).krate(); - let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|it| it.as_trait()) - else { + let crate_id = owner.module(db).krate(); + let Some(copy_trait) = LangItem::Copy.resolve_trait(db, crate_id) else { return false; }; let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build(); @@ -422,16 +414,15 @@ pub trait ProjectionTyExt { impl ProjectionTyExt for ProjectionTy { fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef { // FIXME: something like `Split` trait from chalk-solve might be nice. - let generics = generics(db.upcast(), from_assoc_type_id(self.associated_ty_id).into()); - let substitution = Substitution::from_iter( - Interner, - self.substitution.iter(Interner).skip(generics.len_self()), - ); + let generics = generics(db, from_assoc_type_id(self.associated_ty_id).into()); + let parent_len = generics.parent_generics().map_or(0, |g| g.len_self()); + let substitution = + Substitution::from_iter(Interner, self.substitution.iter(Interner).take(parent_len)); TraitRef { trait_id: to_chalk_trait_id(self.trait_(db)), substitution } } fn trait_(&self, db: &dyn HirDatabase) -> TraitId { - match from_assoc_type_id(self.associated_ty_id).lookup(db.upcast()).container { + match from_assoc_type_id(self.associated_ty_id).lookup(db).container { ItemContainerId::TraitId(it) => it, _ => panic!("projection ty without parent trait"), } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index fb604569f439d..d1a1e135ffffa 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -1,27 +1,25 @@ //! Constant evaluation details -use base_db::{ra_salsa::Cycle, CrateId}; -use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex}; +use base_db::Crate; +use chalk_ir::{BoundVar, DebruijnIndex, cast::Cast}; use hir_def::{ - expr_store::{Body, HygieneId}, + EnumVariantId, GeneralConstId, HasModule as _, StaticId, + expr_store::{Body, HygieneId, path::Path}, hir::{Expr, ExprId}, - path::Path, resolver::{Resolver, ValueNs}, type_ref::LiteralConstRef, - ConstBlockLoc, EnumVariantId, GeneralConstId, HasModule as _, StaticId, }; use hir_expand::Lookup; use stdx::never; use triomphe::Arc; use crate::{ - db::HirDatabase, display::DisplayTarget, generics::Generics, infer::InferenceContext, - lower::ParamLoweringMode, mir::monomorphize_mir_body_bad, to_placeholder_idx, Const, ConstData, - ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, TraitEnvironment, Ty, - TyBuilder, + Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, + TraitEnvironment, Ty, TyBuilder, db::HirDatabase, display::DisplayTarget, generics::Generics, + infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx, }; -use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError}; +use super::mir::{MirEvalError, MirLowerError, interpret_mir, lower_to_mir, pad16}; /// Extension trait for [`Const`] pub trait ConstExt { @@ -96,11 +94,11 @@ pub(crate) fn path_to_const<'g>( resolver: &Resolver, path: &Path, mode: ParamLoweringMode, - args: impl FnOnce() -> Option<&'g Generics>, + args: impl FnOnce() -> &'g Generics, debruijn: DebruijnIndex, expected_ty: Ty, ) -> Option { - match resolver.resolve_path_in_value_ns_fully(db.upcast(), path, HygieneId::ROOT) { + match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) { Some(ValueNs::GenericParam(p)) => { let ty = db.const_param_ty(p); let value = match mode { @@ -109,7 +107,7 @@ pub(crate) fn path_to_const<'g>( } ParamLoweringMode::Variable => { let args = args(); - match args.and_then(|args| args.type_or_const_param_idx(p.into())) { + match args.type_or_const_param_idx(p.into()) { Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)), None => { never!( @@ -157,17 +155,17 @@ pub fn intern_const_ref( db: &dyn HirDatabase, value: &LiteralConstRef, ty: Ty, - krate: CrateId, + krate: Crate, ) -> Const { - let layout = db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate)); + let layout = || db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate)); let bytes = match value { LiteralConstRef::Int(i) => { // FIXME: We should handle failure of layout better. - let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16); + let size = layout().map(|it| it.size.bytes_usize()).unwrap_or(16); ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default()) } LiteralConstRef::UInt(i) => { - let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16); + let size = layout().map(|it| it.size.bytes_usize()).unwrap_or(16); ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default()) } LiteralConstRef::Bool(b) => ConstScalar::Bytes(Box::new([*b as u8]), MemoryMap::default()), @@ -180,7 +178,7 @@ pub fn intern_const_ref( } /// Interns a possibly-unknown target usize -pub fn usize_const(db: &dyn HirDatabase, value: Option, krate: CrateId) -> Const { +pub fn usize_const(db: &dyn HirDatabase, value: Option, krate: Crate) -> Const { intern_const_ref( db, &value.map_or(LiteralConstRef::Unknown, LiteralConstRef::UInt), @@ -221,28 +219,25 @@ pub fn try_const_isize(db: &dyn HirDatabase, c: &Const) -> Option { } } -pub(crate) fn const_eval_recover( +pub(crate) fn const_eval_cycle_result( _: &dyn HirDatabase, - _: &Cycle, - _: &GeneralConstId, - _: &Substitution, - _: &Option>, + _: GeneralConstId, + _: Substitution, + _: Option>, ) -> Result { Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) } -pub(crate) fn const_eval_static_recover( +pub(crate) fn const_eval_static_cycle_result( _: &dyn HirDatabase, - _: &Cycle, - _: &StaticId, + _: StaticId, ) -> Result { Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) } -pub(crate) fn const_eval_discriminant_recover( +pub(crate) fn const_eval_discriminant_cycle_result( _: &dyn HirDatabase, - _: &Cycle, - _: &EnumVariantId, + _: EnumVariantId, ) -> Result { Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) } @@ -258,21 +253,9 @@ pub(crate) fn const_eval_query( db.monomorphized_mir_body(c.into(), subst, db.trait_environment(c.into()))? } GeneralConstId::StaticId(s) => { - let krate = s.module(db.upcast()).krate(); + let krate = s.module(db).krate(); db.monomorphized_mir_body(s.into(), subst, TraitEnvironment::empty(krate))? } - GeneralConstId::ConstBlockId(c) => { - let ConstBlockLoc { parent, root } = db.lookup_intern_anonymous_const(c); - let body = db.body(parent); - let infer = db.infer(parent); - Arc::new(monomorphize_mir_body_bad( - db, - lower_to_mir(db, parent, &body, &infer, root)?, - subst, - db.trait_environment_for_body(parent), - )?) - } - GeneralConstId::InTypeConstId(c) => db.mir_body(c.into())?, }; let c = interpret_mir(db, body, false, trait_env)?.0?; Ok(c) @@ -297,13 +280,13 @@ pub(crate) fn const_eval_discriminant_variant( ) -> Result { let def = variant_id.into(); let body = db.body(def); - let loc = variant_id.lookup(db.upcast()); + let loc = variant_id.lookup(db); if body.exprs[body.body_expr] == Expr::Missing { let prev_idx = loc.index.checked_sub(1); let value = match prev_idx { Some(prev_idx) => { 1 + db.const_eval_discriminant( - db.enum_data(loc.parent).variants[prev_idx as usize].0, + db.enum_variants(loc.parent).variants[prev_idx as usize].0, )? } _ => 0, @@ -311,7 +294,7 @@ pub(crate) fn const_eval_discriminant_variant( return Ok(value); } - let repr = db.enum_data(loc.parent).repr; + let repr = db.enum_signature(loc.parent).repr; let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed()); let mir_body = db.monomorphized_mir_body( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs index 26a3b7022976f..6449a4dc7e8c6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs @@ -1,17 +1,17 @@ -use base_db::SourceDatabase; +use base_db::RootQueryDb; use chalk_ir::Substitution; use hir_def::db::DefDatabase; +use hir_expand::EditionedFileId; use rustc_apfloat::{ - ieee::{Half as f16, Quad as f128}, Float, + ieee::{Half as f16, Quad as f128}, }; -use span::EditionedFileId; use test_fixture::WithFixture; use test_utils::skip_slow_tests; use crate::{ - consteval::try_const_usize, db::HirDatabase, display::DisplayTarget, mir::pad16, - test_db::TestDB, Const, ConstScalar, Interner, MemoryMap, + Const, ConstScalar, Interner, MemoryMap, consteval::try_const_usize, db::HirDatabase, + display::DisplayTarget, mir::pad16, test_db::TestDB, }; use super::{ @@ -101,10 +101,8 @@ fn check_answer( fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String { let mut err = String::new(); let span_formatter = |file, range| format!("{file:?} {range:?}"); - let display_target = DisplayTarget::from_crate( - &db, - *db.crate_graph().crates_in_topological_order().last().unwrap(), - ); + let display_target = + DisplayTarget::from_crate(&db, *db.all_crates().last().expect("no crate graph present")); match e { ConstEvalError::MirLowerError(e) => { e.pretty_print(&mut err, &db, span_formatter, display_target) @@ -118,14 +116,14 @@ fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String { } fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result { - let module_id = db.module_for_file(file_id.file_id()); + let module_id = db.module_for_file(file_id.file_id(db)); let def_map = module_id.def_map(db); let scope = &def_map[module_id.local_id].scope; let const_id = scope .declarations() .find_map(|x| match x { hir_def::ModuleDefId::ConstId(x) => { - if db.const_data(x).name.as_ref()?.display(db, file_id.edition()).to_string() + if db.const_signature(x).name.as_ref()?.display(db, file_id.edition(db)).to_string() == "GOAL" { Some(x) @@ -2460,6 +2458,8 @@ fn extern_weak_statics() { } #[test] +// FIXME +#[should_panic] fn from_ne_bytes() { check_number( r#" @@ -2536,6 +2536,8 @@ fn const_transfer_memory() { } #[test] +// FIXME +#[should_panic] fn anonymous_const_block() { check_number( r#" diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index 76031491d9a07..980ee264b0271 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -3,23 +3,22 @@ use std::sync; -use base_db::{ - impl_intern_key, - ra_salsa::{self, InternValueTrivial}, - CrateId, Upcast, -}; +use base_db::{Crate, impl_intern_key}; use hir_def::{ - db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, CallableDefId, - ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId, - LifetimeParamId, LocalFieldId, StaticId, TraitId, TypeAliasId, TypeOrConstParamId, VariantId, + AdtId, BlockId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, + GeneralConstId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId, + TypeAliasId, TypeOrConstParamId, VariantId, db::DefDatabase, hir::ExprId, + layout::TargetDataLayout, }; use hir_expand::name::Name; use la_arena::ArenaMap; +use salsa::plumbing::AsId; use smallvec::SmallVec; use triomphe::Arc; use crate::{ - chalk_db, + Binders, Const, ImplTraitId, ImplTraits, InferenceResult, Interner, PolyFnSig, Substitution, + TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, chalk_db, consteval::ConstEvalError, drop::DropGlue, dyn_compatibility::DynCompatibilityViolation, @@ -27,26 +26,24 @@ use crate::{ lower::{Diagnostics, GenericDefaults, GenericPredicates}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, mir::{BorrowckResult, MirBody, MirLowerError}, - Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner, - PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, }; -#[ra_salsa::query_group(HirDatabaseStorage)] -pub trait HirDatabase: DefDatabase + Upcast { - #[ra_salsa::invoke(crate::infer::infer_query)] +#[query_group::query_group] +pub trait HirDatabase: DefDatabase + std::fmt::Debug { + #[salsa::invoke(crate::infer::infer_query)] fn infer(&self, def: DefWithBodyId) -> Arc; // region:mir - #[ra_salsa::invoke(crate::mir::mir_body_query)] - #[ra_salsa::cycle(crate::mir::mir_body_recover)] + #[salsa::invoke(crate::mir::mir_body_query)] + #[salsa::cycle(cycle_result = crate::mir::mir_body_cycle_result)] fn mir_body(&self, def: DefWithBodyId) -> Result, MirLowerError>; - #[ra_salsa::invoke(crate::mir::mir_body_for_closure_query)] - fn mir_body_for_closure(&self, def: ClosureId) -> Result, MirLowerError>; + #[salsa::invoke(crate::mir::mir_body_for_closure_query)] + fn mir_body_for_closure(&self, def: InternedClosureId) -> Result, MirLowerError>; - #[ra_salsa::invoke(crate::mir::monomorphized_mir_body_query)] - #[ra_salsa::cycle(crate::mir::monomorphized_mir_body_recover)] + #[salsa::invoke(crate::mir::monomorphized_mir_body_query)] + #[salsa::cycle(cycle_result = crate::mir::monomorphized_mir_body_cycle_result)] fn monomorphized_mir_body( &self, def: DefWithBodyId, @@ -54,20 +51,20 @@ pub trait HirDatabase: DefDatabase + Upcast { env: Arc, ) -> Result, MirLowerError>; - #[ra_salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)] + #[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)] fn monomorphized_mir_body_for_closure( &self, - def: ClosureId, + def: InternedClosureId, subst: Substitution, env: Arc, ) -> Result, MirLowerError>; - #[ra_salsa::invoke(crate::mir::borrowck_query)] - #[ra_salsa::lru] + #[salsa::invoke(crate::mir::borrowck_query)] + #[salsa::lru(2024)] fn borrowck(&self, def: DefWithBodyId) -> Result, MirLowerError>; - #[ra_salsa::invoke(crate::consteval::const_eval_query)] - #[ra_salsa::cycle(crate::consteval::const_eval_recover)] + #[salsa::invoke(crate::consteval::const_eval_query)] + #[salsa::cycle(cycle_result = crate::consteval::const_eval_cycle_result)] fn const_eval( &self, def: GeneralConstId, @@ -75,15 +72,15 @@ pub trait HirDatabase: DefDatabase + Upcast { trait_env: Option>, ) -> Result; - #[ra_salsa::invoke(crate::consteval::const_eval_static_query)] - #[ra_salsa::cycle(crate::consteval::const_eval_static_recover)] + #[salsa::invoke(crate::consteval::const_eval_static_query)] + #[salsa::cycle(cycle_result = crate::consteval::const_eval_static_cycle_result)] fn const_eval_static(&self, def: StaticId) -> Result; - #[ra_salsa::invoke(crate::consteval::const_eval_discriminant_variant)] - #[ra_salsa::cycle(crate::consteval::const_eval_discriminant_recover)] + #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)] + #[salsa::cycle(cycle_result = crate::consteval::const_eval_discriminant_cycle_result)] fn const_eval_discriminant(&self, def: EnumVariantId) -> Result; - #[ra_salsa::invoke(crate::method_resolution::lookup_impl_method_query)] + #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] fn lookup_impl_method( &self, env: Arc, @@ -93,8 +90,8 @@ pub trait HirDatabase: DefDatabase + Upcast { // endregion:mir - #[ra_salsa::invoke(crate::layout::layout_of_adt_query)] - #[ra_salsa::cycle(crate::layout::layout_of_adt_recover)] + #[salsa::invoke(crate::layout::layout_of_adt_query)] + #[salsa::cycle(cycle_result = crate::layout::layout_of_adt_cycle_result)] fn layout_of_adt( &self, def: AdtId, @@ -102,63 +99,73 @@ pub trait HirDatabase: DefDatabase + Upcast { env: Arc, ) -> Result, LayoutError>; - #[ra_salsa::invoke(crate::layout::layout_of_ty_query)] - #[ra_salsa::cycle(crate::layout::layout_of_ty_recover)] + #[salsa::invoke(crate::layout::layout_of_ty_query)] + #[salsa::cycle(cycle_result = crate::layout::layout_of_ty_cycle_result)] fn layout_of_ty(&self, ty: Ty, env: Arc) -> Result, LayoutError>; - #[ra_salsa::invoke(crate::layout::target_data_layout_query)] - fn target_data_layout(&self, krate: CrateId) -> Result, Arc>; + #[salsa::invoke(crate::layout::target_data_layout_query)] + fn target_data_layout(&self, krate: Crate) -> Result, Arc>; - #[ra_salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)] + #[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)] fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option; - #[ra_salsa::invoke(crate::lower::ty_query)] - #[ra_salsa::cycle(crate::lower::ty_recover)] + #[salsa::invoke(crate::lower::ty_query)] + #[salsa::transparent] fn ty(&self, def: TyDefId) -> Binders; - #[ra_salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)] + #[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)] + #[salsa::cycle(cycle_result = crate::lower::type_for_type_alias_with_diagnostics_cycle_result)] fn type_for_type_alias_with_diagnostics(&self, def: TypeAliasId) -> (Binders, Diagnostics); /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is /// a `StructId` or `EnumVariantId` with a record constructor. - #[ra_salsa::invoke(crate::lower::value_ty_query)] + #[salsa::invoke(crate::lower::value_ty_query)] fn value_ty(&self, def: ValueTyDefId) -> Option>; - #[ra_salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)] - #[ra_salsa::cycle(crate::lower::impl_self_ty_with_diagnostics_recover)] + #[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)] + #[salsa::cycle(cycle_result = crate::lower::impl_self_ty_with_diagnostics_cycle_result)] fn impl_self_ty_with_diagnostics(&self, def: ImplId) -> (Binders, Diagnostics); - #[ra_salsa::invoke(crate::lower::impl_self_ty_query)] + + #[salsa::invoke(crate::lower::impl_self_ty_query)] + #[salsa::transparent] fn impl_self_ty(&self, def: ImplId) -> Binders; - #[ra_salsa::invoke(crate::lower::const_param_ty_with_diagnostics_query)] + // FIXME: Make this a non-interned query. + #[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)] fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics); - #[ra_salsa::invoke(crate::lower::const_param_ty_query)] + + #[salsa::invoke(crate::lower::const_param_ty_query)] + #[salsa::transparent] fn const_param_ty(&self, def: ConstParamId) -> Ty; - #[ra_salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)] + #[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)] fn impl_trait_with_diagnostics(&self, def: ImplId) -> Option<(Binders, Diagnostics)>; - #[ra_salsa::invoke(crate::lower::impl_trait_query)] + + #[salsa::invoke(crate::lower::impl_trait_query)] + #[salsa::transparent] fn impl_trait(&self, def: ImplId) -> Option>; - #[ra_salsa::invoke(crate::lower::field_types_with_diagnostics_query)] + #[salsa::invoke(crate::lower::field_types_with_diagnostics_query)] fn field_types_with_diagnostics( &self, var: VariantId, ) -> (Arc>>, Diagnostics); - #[ra_salsa::invoke(crate::lower::field_types_query)] + + #[salsa::invoke(crate::lower::field_types_query)] + #[salsa::transparent] fn field_types(&self, var: VariantId) -> Arc>>; - #[ra_salsa::invoke(crate::lower::callable_item_sig)] + #[salsa::invoke(crate::lower::callable_item_signature_query)] fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; - #[ra_salsa::invoke(crate::lower::return_type_impl_traits)] + #[salsa::invoke(crate::lower::return_type_impl_traits)] fn return_type_impl_traits(&self, def: FunctionId) -> Option>>; - #[ra_salsa::invoke(crate::lower::type_alias_impl_traits)] + #[salsa::invoke(crate::lower::type_alias_impl_traits)] fn type_alias_impl_traits(&self, def: TypeAliasId) -> Option>>; - #[ra_salsa::invoke(crate::lower::generic_predicates_for_param_query)] - #[ra_salsa::cycle(crate::lower::generic_predicates_for_param_recover)] + #[salsa::invoke(crate::lower::generic_predicates_for_param_query)] + #[salsa::cycle(cycle_result = crate::lower::generic_predicates_for_param_cycle_result)] fn generic_predicates_for_param( &self, def: GenericDefId, @@ -166,150 +173,156 @@ pub trait HirDatabase: DefDatabase + Upcast { assoc_name: Option, ) -> GenericPredicates; - #[ra_salsa::invoke(crate::lower::generic_predicates_query)] + #[salsa::invoke(crate::lower::generic_predicates_query)] fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates; - #[ra_salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)] + #[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)] fn generic_predicates_without_parent_with_diagnostics( &self, def: GenericDefId, ) -> (GenericPredicates, Diagnostics); - #[ra_salsa::invoke(crate::lower::generic_predicates_without_parent_query)] + + #[salsa::invoke(crate::lower::generic_predicates_without_parent_query)] + #[salsa::transparent] fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates; - #[ra_salsa::invoke(crate::lower::trait_environment_for_body_query)] - #[ra_salsa::transparent] + #[salsa::invoke(crate::lower::trait_environment_for_body_query)] + #[salsa::transparent] fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc; - #[ra_salsa::invoke(crate::lower::trait_environment_query)] + #[salsa::invoke(crate::lower::trait_environment_query)] fn trait_environment(&self, def: GenericDefId) -> Arc; - #[ra_salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)] - #[ra_salsa::cycle(crate::lower::generic_defaults_with_diagnostics_recover)] + #[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)] + #[salsa::cycle(cycle_result = crate::lower::generic_defaults_with_diagnostics_cycle_result)] fn generic_defaults_with_diagnostics( &self, def: GenericDefId, ) -> (GenericDefaults, Diagnostics); - #[ra_salsa::invoke(crate::lower::generic_defaults_query)] + + /// This returns an empty list if no parameter has default. + /// + /// The binders of the returned defaults are only up to (not including) this parameter. + #[salsa::invoke(crate::lower::generic_defaults_query)] + #[salsa::transparent] fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults; - #[ra_salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] - fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc; + #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] + fn inherent_impls_in_crate(&self, krate: Crate) -> Arc; - #[ra_salsa::invoke(InherentImpls::inherent_impls_in_block_query)] + #[salsa::invoke(InherentImpls::inherent_impls_in_block_query)] fn inherent_impls_in_block(&self, block: BlockId) -> Option>; /// Collects all crates in the dependency graph that have impls for the /// given fingerprint. This is only used for primitive types and types /// annotated with `rustc_has_incoherent_inherent_impls`; for other types /// we just look at the crate where the type is defined. - #[ra_salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)] + #[salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)] fn incoherent_inherent_impl_crates( &self, - krate: CrateId, + krate: Crate, fp: TyFingerprint, - ) -> SmallVec<[CrateId; 2]>; + ) -> SmallVec<[Crate; 2]>; - #[ra_salsa::invoke(TraitImpls::trait_impls_in_crate_query)] - fn trait_impls_in_crate(&self, krate: CrateId) -> Arc; + #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)] + fn trait_impls_in_crate(&self, krate: Crate) -> Arc; - #[ra_salsa::invoke(TraitImpls::trait_impls_in_block_query)] + #[salsa::invoke(TraitImpls::trait_impls_in_block_query)] fn trait_impls_in_block(&self, block: BlockId) -> Option>; - #[ra_salsa::invoke(TraitImpls::trait_impls_in_deps_query)] - fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<[Arc]>; + #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)] + fn trait_impls_in_deps(&self, krate: Crate) -> Arc<[Arc]>; // Interned IDs for Chalk integration - #[ra_salsa::interned] + #[salsa::interned] fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_type_or_const_param_id( &self, param_id: TypeOrConstParamId, ) -> InternedTypeOrConstParamId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_closure(&self, id: InternedClosure) -> InternedClosureId; - #[ra_salsa::interned] + + #[salsa::interned] fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId; - #[ra_salsa::invoke(chalk_db::associated_ty_data_query)] - fn associated_ty_data( - &self, - id: chalk_db::AssocTypeId, - ) -> sync::Arc; + #[salsa::invoke(chalk_db::associated_ty_data_query)] + fn associated_ty_data(&self, id: TypeAliasId) -> sync::Arc; - #[ra_salsa::invoke(chalk_db::trait_datum_query)] + #[salsa::invoke(chalk_db::trait_datum_query)] fn trait_datum( &self, - krate: CrateId, + krate: Crate, trait_id: chalk_db::TraitId, ) -> sync::Arc; - #[ra_salsa::invoke(chalk_db::adt_datum_query)] - fn adt_datum( - &self, - krate: CrateId, - struct_id: chalk_db::AdtId, - ) -> sync::Arc; + #[salsa::invoke(chalk_db::adt_datum_query)] + fn adt_datum(&self, krate: Crate, struct_id: chalk_db::AdtId) -> sync::Arc; - #[ra_salsa::invoke(chalk_db::impl_datum_query)] - fn impl_datum( - &self, - krate: CrateId, - impl_id: chalk_db::ImplId, - ) -> sync::Arc; + #[salsa::invoke(chalk_db::impl_datum_query)] + fn impl_datum(&self, krate: Crate, impl_id: chalk_db::ImplId) + -> sync::Arc; - #[ra_salsa::invoke(chalk_db::fn_def_datum_query)] - fn fn_def_datum(&self, fn_def_id: FnDefId) -> sync::Arc; + #[salsa::invoke(chalk_db::fn_def_datum_query)] + fn fn_def_datum(&self, fn_def_id: CallableDefId) -> sync::Arc; - #[ra_salsa::invoke(chalk_db::fn_def_variance_query)] - fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances; + #[salsa::invoke(chalk_db::fn_def_variance_query)] + fn fn_def_variance(&self, fn_def_id: CallableDefId) -> chalk_db::Variances; - #[ra_salsa::invoke(chalk_db::adt_variance_query)] - fn adt_variance(&self, adt_id: chalk_db::AdtId) -> chalk_db::Variances; + #[salsa::invoke(chalk_db::adt_variance_query)] + fn adt_variance(&self, adt_id: AdtId) -> chalk_db::Variances; - #[ra_salsa::invoke(crate::variance::variances_of)] - #[ra_salsa::cycle(crate::variance::variances_of_cycle)] + #[salsa::invoke(crate::variance::variances_of)] + #[salsa::cycle( + // cycle_fn = crate::variance::variances_of_cycle_fn, + // cycle_initial = crate::variance::variances_of_cycle_initial, + cycle_result = crate::variance::variances_of_cycle_initial, + )] fn variances_of(&self, def: GenericDefId) -> Option>; - #[ra_salsa::invoke(chalk_db::associated_ty_value_query)] + #[salsa::invoke(chalk_db::associated_ty_value_query)] fn associated_ty_value( &self, - krate: CrateId, + krate: Crate, id: chalk_db::AssociatedTyValueId, ) -> sync::Arc; - #[ra_salsa::invoke(crate::traits::normalize_projection_query)] - #[ra_salsa::transparent] + #[salsa::invoke(crate::traits::normalize_projection_query)] + #[salsa::transparent] fn normalize_projection( &self, projection: crate::ProjectionTy, env: Arc, ) -> Ty; - #[ra_salsa::invoke(crate::traits::trait_solve_query)] + #[salsa::invoke(crate::traits::trait_solve_query)] fn trait_solve( &self, - krate: CrateId, + krate: Crate, block: Option, goal: crate::Canonical>, ) -> Option; - #[ra_salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)] + #[salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)] fn program_clauses_for_chalk_env( &self, - krate: CrateId, + krate: Crate, block: Option, env: chalk_ir::Environment, ) -> chalk_ir::ProgramClauses; - #[ra_salsa::invoke(crate::drop::has_drop_glue)] - #[ra_salsa::cycle(crate::drop::has_drop_glue_recover)] - fn has_drop_glue(&self, ty: Ty, env: Arc) -> DropGlue {} + #[salsa::invoke(crate::drop::has_drop_glue)] + #[salsa::cycle(cycle_result = crate::drop::has_drop_glue_cycle_result)] + fn has_drop_glue(&self, ty: Ty, env: Arc) -> DropGlue; } #[test] @@ -317,41 +330,22 @@ fn hir_database_is_dyn_compatible() { fn _assert_dyn_compatible(_: &dyn HirDatabase) {} } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct InternedTypeOrConstParamId(ra_salsa::InternId); -impl_intern_key!(InternedTypeOrConstParamId); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct InternedLifetimeParamId(ra_salsa::InternId); -impl_intern_key!(InternedLifetimeParamId); +impl_intern_key!(InternedTypeOrConstParamId, TypeOrConstParamId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct InternedConstParamId(ra_salsa::InternId); -impl_intern_key!(InternedConstParamId); +impl_intern_key!(InternedLifetimeParamId, LifetimeParamId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct InternedOpaqueTyId(ra_salsa::InternId); -impl_intern_key!(InternedOpaqueTyId); +impl_intern_key!(InternedConstParamId, ConstParamId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct InternedClosureId(ra_salsa::InternId); -impl_intern_key!(InternedClosureId); +impl_intern_key!(InternedOpaqueTyId, ImplTraitId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct InternedClosure(pub DefWithBodyId, pub ExprId); +impl_intern_key!(InternedClosureId, InternedClosure); -impl InternValueTrivial for InternedClosure {} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct InternedCoroutineId(ra_salsa::InternId); -impl_intern_key!(InternedCoroutineId); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId); -impl InternValueTrivial for InternedCoroutine {} +impl_intern_key!(InternedCoroutineId, InternedCoroutine); -/// This exists just for Chalk, because Chalk just has a single `FnDefId` where -/// we have different IDs for struct and enum variant constructors. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] -pub struct InternedCallableDefId(ra_salsa::InternId); -impl_intern_key!(InternedCallableDefId); +// This exists just for Chalk, because Chalk just has a single `FnDefId` where +// we have different IDs for struct and enum variant constructors. +impl_intern_key!(InternedCallableDefId, CallableDefId); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs index 845d333335365..047a348fb09a7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs @@ -5,12 +5,12 @@ mod match_check; mod unsafe_check; pub use crate::diagnostics::{ - decl_check::{incorrect_case, CaseType, IncorrectCase}, + decl_check::{CaseType, IncorrectCase, incorrect_case}, expr::{ - record_literal_missing_fields, record_pattern_missing_fields, BodyValidationDiagnostic, + BodyValidationDiagnostic, record_literal_missing_fields, record_pattern_missing_fields, }, unsafe_check::{ - missing_unsafe, unsafe_operations, unsafe_operations_for_body, InsideUnsafeBlock, - UnsafetyReason, + InsideUnsafeBlock, UnsafetyReason, missing_unsafe, unsafe_operations, + unsafe_operations_for_body, }, }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index 774991560e9ca..099100a73288d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -16,20 +16,20 @@ mod case_conv; use std::fmt; use hir_def::{ - data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, ConstId, EnumId, - EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId, - StructId, TraitId, TypeAliasId, + AdtId, ConstId, EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, + ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, db::DefDatabase, hir::Pat, + item_tree::FieldsShape, signatures::StaticFlags, src::HasSource, }; use hir_expand::{ + HirFileId, name::{AsName, Name}, - HirFileId, HirFileIdExt, }; use intern::sym; use stdx::{always, never}; use syntax::{ + AstNode, AstPtr, ToSmolStr, ast::{self, HasName}, utils::is_raw_identifier, - AstNode, AstPtr, ToSmolStr, }; use crate::db::HirDatabase; @@ -154,7 +154,7 @@ impl<'a> DeclValidator<'a> { fn validate_module(&mut self, module_id: ModuleId) { // Check the module name. - let Some(module_name) = module_id.name(self.db.upcast()) else { return }; + let Some(module_name) = module_id.name(self.db) else { return }; let Some(module_name_replacement) = to_lower_snake_case(module_name.as_str()).map(|new_name| Replacement { current_name: module_name, @@ -164,8 +164,8 @@ impl<'a> DeclValidator<'a> { else { return; }; - let module_data = &module_id.def_map(self.db.upcast())[module_id.local_id]; - let Some(module_src) = module_data.declaration_source(self.db.upcast()) else { + let module_data = &module_id.def_map(self.db)[module_id.local_id]; + let Some(module_src) = module_data.declaration_source(self.db) else { return; }; self.create_incorrect_case_diagnostic_for_ast_node( @@ -178,7 +178,7 @@ impl<'a> DeclValidator<'a> { fn validate_trait(&mut self, trait_id: TraitId) { // Check the trait name. - let data = self.db.trait_data(trait_id); + let data = self.db.trait_signature(trait_id); self.create_incorrect_case_diagnostic_for_item_name( trait_id, &data.name, @@ -188,7 +188,7 @@ impl<'a> DeclValidator<'a> { } fn validate_func(&mut self, func: FunctionId) { - let container = func.lookup(self.db.upcast()).container; + let container = func.lookup(self.db).container; if matches!(container, ItemContainerId::ExternBlockId(_)) { cov_mark::hit!(extern_func_incorrect_case_ignored); return; @@ -197,11 +197,11 @@ impl<'a> DeclValidator<'a> { // Check the function name. // Skipped if function is an associated item of a trait implementation. if !self.is_trait_impl_container(container) { - let data = self.db.function_data(func); + let data = self.db.function_signature(func); // Don't run the lint on extern "[not Rust]" fn items with the // #[no_mangle] attribute. - let no_mangle = self.db.attrs(func.into()).by_key(&sym::no_mangle).exists(); + let no_mangle = self.db.attrs(func.into()).by_key(sym::no_mangle).exists(); if no_mangle && data.abi.as_ref().is_some_and(|abi| *abi != sym::Rust) { cov_mark::hit!(extern_func_no_mangle_ignored); } else { @@ -251,7 +251,7 @@ impl<'a> DeclValidator<'a> { return; } - let (_, source_map) = self.db.body_with_source_map(func.into()); + let source_map = self.db.body_with_source_map(func.into()).1; for (id, replacement) in pats_replacements { let Ok(source_ptr) = source_map.pat_syntax(id) else { continue; @@ -259,7 +259,7 @@ impl<'a> DeclValidator<'a> { let Some(ptr) = source_ptr.value.cast::() else { continue; }; - let root = source_ptr.file_syntax(self.db.upcast()); + let root = source_ptr.file_syntax(self.db); let ident_pat = ptr.to_node(&root); let Some(parent) = ident_pat.syntax().parent() else { continue; @@ -287,13 +287,13 @@ impl<'a> DeclValidator<'a> { } fn edition(&self, id: impl HasModule) -> span::Edition { - let krate = id.krate(self.db.upcast()); - self.db.crate_graph()[krate].edition + let krate = id.krate(self.db); + krate.data(self.db).edition } fn validate_struct(&mut self, struct_id: StructId) { // Check the structure name. - let data = self.db.struct_data(struct_id); + let data = self.db.struct_signature(struct_id); self.create_incorrect_case_diagnostic_for_item_name( struct_id, &data.name, @@ -307,12 +307,13 @@ impl<'a> DeclValidator<'a> { /// Check incorrect names for struct fields. fn validate_struct_fields(&mut self, struct_id: StructId) { - let data = self.db.struct_data(struct_id); - let VariantData::Record { fields, .. } = data.variant_data.as_ref() else { + let data = self.db.variant_fields(struct_id.into()); + if data.shape != FieldsShape::Record { return; }; let edition = self.edition(struct_id); - let mut struct_fields_replacements = fields + let mut struct_fields_replacements = data + .fields() .iter() .filter_map(|(_, field)| { to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map( @@ -330,8 +331,8 @@ impl<'a> DeclValidator<'a> { return; } - let struct_loc = struct_id.lookup(self.db.upcast()); - let struct_src = struct_loc.source(self.db.upcast()); + let struct_loc = struct_id.lookup(self.db); + let struct_src = struct_loc.source(self.db); let Some(ast::FieldList::RecordFieldList(struct_fields_list)) = struct_src.value.field_list() @@ -378,7 +379,7 @@ impl<'a> DeclValidator<'a> { } fn validate_enum(&mut self, enum_id: EnumId) { - let data = self.db.enum_data(enum_id); + let data = self.db.enum_signature(enum_id); // Check the enum name. self.create_incorrect_case_diagnostic_for_item_name( @@ -394,7 +395,7 @@ impl<'a> DeclValidator<'a> { /// Check incorrect names for enum variants. fn validate_enum_variants(&mut self, enum_id: EnumId) { - let data = self.db.enum_data(enum_id); + let data = self.db.enum_variants(enum_id); for (variant_id, _) in data.variants.iter() { self.validate_enum_variant_fields(*variant_id); @@ -420,8 +421,8 @@ impl<'a> DeclValidator<'a> { return; } - let enum_loc = enum_id.lookup(self.db.upcast()); - let enum_src = enum_loc.source(self.db.upcast()); + let enum_loc = enum_id.lookup(self.db); + let enum_src = enum_loc.source(self.db); let Some(enum_variants_list) = enum_src.value.variant_list() else { always!( @@ -467,12 +468,13 @@ impl<'a> DeclValidator<'a> { /// Check incorrect names for fields of enum variant. fn validate_enum_variant_fields(&mut self, variant_id: EnumVariantId) { - let variant_data = self.db.enum_variant_data(variant_id); - let VariantData::Record { fields, .. } = variant_data.variant_data.as_ref() else { + let variant_data = self.db.variant_fields(variant_id.into()); + if variant_data.shape != FieldsShape::Record { return; }; let edition = self.edition(variant_id); - let mut variant_field_replacements = fields + let mut variant_field_replacements = variant_data + .fields() .iter() .filter_map(|(_, field)| { to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map( @@ -490,8 +492,8 @@ impl<'a> DeclValidator<'a> { return; } - let variant_loc = variant_id.lookup(self.db.upcast()); - let variant_src = variant_loc.source(self.db.upcast()); + let variant_loc = variant_id.lookup(self.db); + let variant_src = variant_loc.source(self.db); let Some(ast::FieldList::RecordFieldList(variant_fields_list)) = variant_src.value.field_list() @@ -538,13 +540,13 @@ impl<'a> DeclValidator<'a> { } fn validate_const(&mut self, const_id: ConstId) { - let container = const_id.lookup(self.db.upcast()).container; + let container = const_id.lookup(self.db).container; if self.is_trait_impl_container(container) { cov_mark::hit!(trait_impl_assoc_const_incorrect_case_ignored); return; } - let data = self.db.const_data(const_id); + let data = self.db.const_signature(const_id); let Some(name) = &data.name else { return; }; @@ -557,8 +559,8 @@ impl<'a> DeclValidator<'a> { } fn validate_static(&mut self, static_id: StaticId) { - let data = self.db.static_data(static_id); - if data.is_extern { + let data = self.db.static_signature(static_id); + if data.flags.contains(StaticFlags::EXTERN) { cov_mark::hit!(extern_static_incorrect_case_ignored); return; } @@ -572,14 +574,14 @@ impl<'a> DeclValidator<'a> { } fn validate_type_alias(&mut self, type_alias_id: TypeAliasId) { - let container = type_alias_id.lookup(self.db.upcast()).container; + let container = type_alias_id.lookup(self.db).container; if self.is_trait_impl_container(container) { cov_mark::hit!(trait_impl_assoc_type_incorrect_case_ignored); return; } // Check the type alias name. - let data = self.db.type_alias_data(type_alias_id); + let data = self.db.type_alias_signature(type_alias_id); self.create_incorrect_case_diagnostic_for_item_name( type_alias_id, &data.name, @@ -597,7 +599,7 @@ impl<'a> DeclValidator<'a> { ) where N: AstNode + HasName + fmt::Debug, S: HasSource, - L: Lookup = dyn DefDatabase + 'a> + HasModule + Copy, + L: Lookup + HasModule + Copy, { let to_expected_case_type = match expected_case { CaseType::LowerSnakeCase => to_lower_snake_case, @@ -605,19 +607,16 @@ impl<'a> DeclValidator<'a> { CaseType::UpperCamelCase => to_camel_case, }; let edition = self.edition(item_id); - let Some(replacement) = to_expected_case_type( - &name.display(self.db.upcast(), edition).to_smolstr(), - ) - .map(|new_name| Replacement { - current_name: name.clone(), - suggested_text: new_name, - expected_case, - }) else { + let Some(replacement) = + to_expected_case_type(&name.display(self.db, edition).to_smolstr()).map(|new_name| { + Replacement { current_name: name.clone(), suggested_text: new_name, expected_case } + }) + else { return; }; - let item_loc = item_id.lookup(self.db.upcast()); - let item_src = item_loc.source(self.db.upcast()); + let item_loc = item_id.lookup(self.db); + let item_src = item_loc.source(self.db); self.create_incorrect_case_diagnostic_for_ast_node( replacement, item_src.file_id, @@ -645,13 +644,13 @@ impl<'a> DeclValidator<'a> { return; }; - let edition = file_id.original_file(self.db.upcast()).edition(); + let edition = file_id.original_file(self.db).edition(self.db); let diagnostic = IncorrectCase { file: file_id, ident_type, ident: AstPtr::new(&name_ast), expected_case: replacement.expected_case, - ident_text: replacement.current_name.display(self.db.upcast(), edition).to_string(), + ident_text: replacement.current_name.display(self.db, edition).to_string(), suggested_text: replacement.suggested_text, }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs index 348f8a0f4a856..234c7e4b03c3c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs @@ -97,7 +97,7 @@ fn is_snake_case bool>(ident: &str, wrong_case: F) -> bool { #[cfg(test)] mod tests { use super::*; - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; fn check Option>(fun: F, input: &str, expect: Expect) { // `None` is translated to empty string, meaning that there is nothing to fix. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index cc6f4d9e52eb8..57106412765a2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -4,40 +4,40 @@ use std::fmt; -use base_db::CrateId; +use base_db::Crate; use chalk_solve::rust_ir::AdtKind; use either::Either; use hir_def::{ + AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup, lang_item::LangItem, resolver::{HasResolver, ValueNs}, - AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup, }; use intern::sym; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_pattern_analysis::constructor::Constructor; use syntax::{ - ast::{self, UnaryOp}, AstNode, + ast::{self, UnaryOp}, }; use tracing::debug; use triomphe::Arc; use typed_arena::Arena; use crate::{ + Adjust, InferenceResult, Interner, Ty, TyExt, TyKind, db::HirDatabase, diagnostics::match_check::{ self, pat_analysis::{self, DeconstructedPat, MatchCheckCtx, WitnessPat}, }, display::{DisplayTarget, HirDisplay}, - Adjust, InferenceResult, Interner, Ty, TyExt, TyKind, }; pub(crate) use hir_def::{ + LocalFieldId, VariantId, expr_store::Body, hir::{Expr, ExprId, MatchArm, Pat, PatId, Statement}, - LocalFieldId, VariantId, }; pub enum BodyValidationDiagnostic { @@ -164,9 +164,8 @@ impl ExprValidator { None => return, }; - let checker = filter_map_next_checker.get_or_insert_with(|| { - FilterMapNextChecker::new(&self.owner.resolver(db.upcast()), db) - }); + let checker = filter_map_next_checker + .get_or_insert_with(|| FilterMapNextChecker::new(&self.owner.resolver(db), db)); if checker.check(call_id, receiver, &callee).is_some() { self.diagnostics.push(BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { @@ -191,7 +190,7 @@ impl ExprValidator { return; } - let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db); + let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db); let pattern_arena = Arena::new(); let mut m_arms = Vec::with_capacity(arms.len()); @@ -264,7 +263,7 @@ impl ExprValidator { scrut_ty, witnesses, m_arms.is_empty(), - self.owner.krate(db.upcast()), + self.owner.krate(db), ), }); } @@ -288,17 +287,16 @@ impl ExprValidator { match &self.body[scrutinee_expr] { Expr::UnaryOp { op: UnaryOp::Deref, .. } => false, Expr::Path(path) => { - let value_or_partial = - self.owner.resolver(db.upcast()).resolve_path_in_value_ns_fully( - db.upcast(), - path, - self.body.expr_path_hygiene(scrutinee_expr), - ); + let value_or_partial = self.owner.resolver(db).resolve_path_in_value_ns_fully( + db, + path, + self.body.expr_path_hygiene(scrutinee_expr), + ); value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_))) } Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind(Interner) { TyKind::Adt(adt, ..) - if db.adt_datum(self.owner.krate(db.upcast()), *adt).kind == AdtKind::Union => + if db.adt_datum(self.owner.krate(db), *adt).kind == AdtKind::Union => { false } @@ -319,7 +317,7 @@ impl ExprValidator { return; }; let pattern_arena = Arena::new(); - let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db); + let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db); for stmt in &**statements { let &Statement::Let { pat, initializer, else_branch: None, .. } = stmt else { continue; @@ -359,7 +357,7 @@ impl ExprValidator { ty, witnesses, false, - self.owner.krate(db.upcast()), + self.owner.krate(db), ), }); } @@ -434,11 +432,11 @@ impl ExprValidator { let last_then_expr_ty = &self.infer[last_then_expr]; if last_then_expr_ty.is_never() { // Only look at sources if the then branch diverges and we have an else branch. - let (_, source_map) = db.body_with_source_map(self.owner); + let source_map = db.body_with_source_map(self.owner).1; let Ok(source_ptr) = source_map.expr_syntax(id) else { return; }; - let root = source_ptr.file_syntax(db.upcast()); + let root = source_ptr.file_syntax(db); let either::Left(ast::Expr::IfExpr(if_expr)) = source_ptr.value.to_node(&root) else { @@ -484,19 +482,16 @@ struct FilterMapNextChecker { impl FilterMapNextChecker { fn new(resolver: &hir_def::resolver::Resolver, db: &dyn HirDatabase) -> Self { // Find and store the FunctionIds for Iterator::filter_map and Iterator::next - let (next_function_id, filter_map_function_id) = match db - .lang_item(resolver.krate(), LangItem::IteratorNext) - .and_then(|it| it.as_function()) + let (next_function_id, filter_map_function_id) = match LangItem::IteratorNext + .resolve_function(db, resolver.krate()) { Some(next_function_id) => ( Some(next_function_id), - match next_function_id.lookup(db.upcast()).container { + match next_function_id.lookup(db).container { ItemContainerId::TraitId(iterator_trait_id) => { - let iterator_trait_items = &db.trait_data(iterator_trait_id).items; + let iterator_trait_items = &db.trait_items(iterator_trait_id).items; iterator_trait_items.iter().find_map(|(name, it)| match it { - &AssocItemId::FunctionId(id) if *name == sym::filter_map.clone() => { - Some(id) - } + &AssocItemId::FunctionId(id) if *name == sym::filter_map => Some(id), _ => None, }) } @@ -558,7 +553,7 @@ pub fn record_literal_missing_fields( return None; } - let variant_data = variant_def.variant_data(db.upcast()); + let variant_data = variant_def.variant_data(db); let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); let missed_fields: Vec = variant_data @@ -588,7 +583,7 @@ pub fn record_pattern_missing_fields( return None; } - let variant_data = variant_def.variant_data(db.upcast()); + let variant_data = variant_def.variant_data(db); let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); let missed_fields: Vec = variant_data @@ -630,7 +625,7 @@ fn missing_match_arms<'p>( scrut_ty: &Ty, witnesses: Vec>, arms_is_empty: bool, - krate: CrateId, + krate: Crate, ) -> String { struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>, DisplayTarget); impl fmt::Display for DisplayWitness<'_, '_> { @@ -642,7 +637,7 @@ fn missing_match_arms<'p>( } let non_empty_enum = match scrut_ty.as_adt() { - Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(), + Some((AdtId::EnumId(e), _)) => !cx.db.enum_variants(e).variants.is_empty(), _ => false, }; let display_target = DisplayTarget::from_crate(cx.db, krate); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index b0f9fc53e29ee..7df22a45cb4ef 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -11,19 +11,21 @@ pub(crate) mod pat_analysis; use chalk_ir::Mutability; use hir_def::{ - data::adt::VariantData, expr_store::Body, hir::PatId, AdtId, EnumVariantId, LocalFieldId, - VariantId, + AdtId, EnumVariantId, LocalFieldId, Lookup, VariantId, + expr_store::{Body, path::Path}, + hir::PatId, + item_tree::FieldsShape, }; use hir_expand::name::Name; use span::Edition; use stdx::{always, never}; use crate::{ + InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, db::HirDatabase, display::{HirDisplay, HirDisplayError, HirFormatter}, infer::BindingMode, lang_items::is_box, - InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, }; use self::pat_util::EnumerateAndAdjustIterator; @@ -155,7 +157,7 @@ impl<'a> PatCtxt<'a> { (BindingMode::Ref(_), _) => { never!( "`ref {}` has wrong type {:?}", - name.display(self.db.upcast(), Edition::LATEST), + name.display(self.db, Edition::LATEST), ty ); self.errors.push(PatternError::UnexpectedType); @@ -167,13 +169,13 @@ impl<'a> PatCtxt<'a> { } hir_def::hir::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => { - let expected_len = variant.unwrap().variant_data(self.db.upcast()).fields().len(); + let expected_len = variant.unwrap().variant_data(self.db).fields().len(); let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis); self.lower_variant_or_leaf(pat, ty, subpatterns) } hir_def::hir::Pat::Record { ref args, .. } if variant.is_some() => { - let variant_data = variant.unwrap().variant_data(self.db.upcast()); + let variant_data = variant.unwrap().variant_data(self.db); let subpatterns = args .iter() .map(|field| { @@ -242,7 +244,7 @@ impl<'a> PatCtxt<'a> { ty: &Ty, subpatterns: Vec, ) -> PatKind { - let kind = match self.infer.variant_resolution_for_pat(pat) { + match self.infer.variant_resolution_for_pat(pat) { Some(variant_id) => { if let VariantId::EnumVariantId(enum_variant) = variant_id { let substs = match ty.kind(Interner) { @@ -266,11 +268,10 @@ impl<'a> PatCtxt<'a> { self.errors.push(PatternError::UnresolvedVariant); PatKind::Wild } - }; - kind + } } - fn lower_path(&mut self, pat: PatId, _path: &hir_def::path::Path) -> Pat { + fn lower_path(&mut self, pat: PatId, _path: &Path) -> Pat { let ty = &self.infer[pat]; let pat_from_kind = |kind| Pat { ty: ty.clone(), kind: Box::new(kind) }; @@ -303,7 +304,7 @@ impl HirDisplay for Pat { PatKind::Wild => write!(f, "_"), PatKind::Never => write!(f, "!"), PatKind::Binding { name, subpattern } => { - write!(f, "{}", name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", name.display(f.db, f.edition()))?; if let Some(subpattern) = subpattern { write!(f, " @ ")?; subpattern.hir_fmt(f)?; @@ -323,26 +324,29 @@ impl HirDisplay for Pat { if let Some(variant) = variant { match variant { VariantId::EnumVariantId(v) => { + let loc = v.lookup(f.db); write!( f, "{}", - f.db.enum_variant_data(v).name.display(f.db.upcast(), f.edition()) + f.db.enum_variants(loc.parent).variants[loc.index as usize] + .1 + .display(f.db, f.edition()) )?; } VariantId::StructId(s) => write!( f, "{}", - f.db.struct_data(s).name.display(f.db.upcast(), f.edition()) + f.db.struct_signature(s).name.display(f.db, f.edition()) )?, VariantId::UnionId(u) => write!( f, "{}", - f.db.union_data(u).name.display(f.db.upcast(), f.edition()) + f.db.union_signature(u).name.display(f.db, f.edition()) )?, }; - let variant_data = variant.variant_data(f.db.upcast()); - if let VariantData::Record { fields: rec_fields, .. } = &*variant_data { + let variant_data = variant.variant_data(f.db); + if variant_data.shape == FieldsShape::Record { write!(f, " {{ ")?; let mut printed = 0; @@ -351,20 +355,20 @@ impl HirDisplay for Pat { .filter(|p| !matches!(*p.pattern.kind, PatKind::Wild)) .map(|p| { printed += 1; - WriteWith(move |f| { + WriteWith(|f| { write!( f, "{}: ", - rec_fields[p.field] + variant_data.fields()[p.field] .name - .display(f.db.upcast(), f.edition()) + .display(f.db, f.edition()) )?; p.pattern.hir_fmt(f) }) }); f.write_joined(subpats, ", ")?; - if printed < rec_fields.len() { + if printed < variant_data.fields().len() { write!(f, "{}..", if printed > 0 { ", " } else { "" })?; } @@ -372,8 +376,8 @@ impl HirDisplay for Pat { } } - let num_fields = variant - .map_or(subpatterns.len(), |v| v.variant_data(f.db.upcast()).fields().len()); + let num_fields = + variant.map_or(subpatterns.len(), |v| v.variant_data(f.db).fields().len()); if num_fields != 0 || variant.is_none() { write!(f, "(")?; let subpats = (0..num_fields).map(|i| { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index 91eb59fb3140f..068fc22f2cac3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -6,21 +6,21 @@ use std::fmt; use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use intern::sym; use rustc_pattern_analysis::{ - constructor::{Constructor, ConstructorSet, VariantVisibility}, - usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport}, Captures, IndexVec, PatCx, PrivateUninhabitedField, + constructor::{Constructor, ConstructorSet, VariantVisibility}, + usefulness::{PlaceValidity, UsefulnessReport, compute_match_usefulness}, }; -use smallvec::{smallvec, SmallVec}; +use smallvec::{SmallVec, smallvec}; use stdx::never; use crate::{ + AdtId, Interner, Scalar, Ty, TyExt, TyKind, db::HirDatabase, infer::normalize, inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from}, - AdtId, Interner, Scalar, Ty, TyExt, TyKind, }; -use super::{is_box, FieldPat, Pat, PatKind}; +use super::{FieldPat, Pat, PatKind, is_box}; use Constructor::*; @@ -44,12 +44,12 @@ impl EnumVariantContiguousIndex { fn from_enum_variant_id(db: &dyn HirDatabase, target_evid: EnumVariantId) -> Self { // Find the index of this variant in the list of variants. use hir_def::Lookup; - let i = target_evid.lookup(db.upcast()).index as usize; + let i = target_evid.lookup(db).index as usize; EnumVariantContiguousIndex(i) } fn to_enum_variant_id(self, db: &dyn HirDatabase, eid: EnumId) -> EnumVariantId { - db.enum_data(eid).variants[self.0].0 + db.enum_variants(eid).variants[self.0].0 } } @@ -105,8 +105,8 @@ impl<'db> MatchCheckCtx<'db> { /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`. fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool { - let is_local = adt.krate(self.db.upcast()) == self.module.krate(); - !is_local && self.db.attrs(adt.into()).by_key(&sym::non_exhaustive).exists() + let is_local = adt.krate(self.db) == self.module.krate(); + !is_local && self.db.attrs(adt.into()).by_key(sym::non_exhaustive).exists() } fn variant_id_for_adt( @@ -139,7 +139,7 @@ impl<'db> MatchCheckCtx<'db> { let (_, substs) = ty.as_adt().unwrap(); let field_tys = self.db.field_types(variant); - let fields_len = variant.variant_data(self.db.upcast()).fields().len() as u32; + let fields_len = variant.variant_data(self.db).fields().len() as u32; (0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| { let ty = field_tys[fid].clone().substitute(Interner, substs); @@ -222,7 +222,7 @@ impl<'db> MatchCheckCtx<'db> { } }; let variant = Self::variant_id_for_adt(self.db, &ctor, adt).unwrap(); - arity = variant.variant_data(self.db.upcast()).fields().len(); + arity = variant.variant_data(self.db).fields().len(); } _ => { never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty); @@ -301,6 +301,7 @@ impl<'db> MatchCheckCtx<'db> { // ignore this issue. Ref => PatKind::Deref { subpattern: subpatterns.next().unwrap() }, Slice(_) => unimplemented!(), + DerefPattern(_) => unimplemented!(), &Str(void) => match void {}, Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild, Never => PatKind::Never, @@ -341,7 +342,7 @@ impl PatCx for MatchCheckCtx<'_> { 1 } else { let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap(); - variant.variant_data(self.db.upcast()).fields().len() + variant.variant_data(self.db).fields().len() } } _ => { @@ -351,6 +352,7 @@ impl PatCx for MatchCheckCtx<'_> { }, Ref => 1, Slice(..) => unimplemented!(), + DerefPattern(..) => unimplemented!(), Never | Bool(..) | IntRange(..) | F16Range(..) | F32Range(..) | F64Range(..) | F128Range(..) | Str(..) | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0, @@ -389,8 +391,7 @@ impl PatCx for MatchCheckCtx<'_> { .map(move |(fid, ty)| { let is_visible = || { matches!(adt, hir_def::AdtId::EnumId(..)) - || visibilities[fid] - .is_visible_from(self.db.upcast(), self.module) + || visibilities[fid].is_visible_from(self.db, self.module) }; let is_uninhabited = self.is_uninhabited(&ty); let private_uninhabited = is_uninhabited && !is_visible(); @@ -412,6 +413,7 @@ impl PatCx for MatchCheckCtx<'_> { } }, Slice(_) => unreachable!("Found a `Slice` constructor in match checking"), + DerefPattern(_) => unreachable!("Found a `DerefPattern` constructor in match checking"), Never | Bool(..) | IntRange(..) | F16Range(..) | F32Range(..) | F64Range(..) | F128Range(..) | Str(..) | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => { @@ -449,7 +451,7 @@ impl PatCx for MatchCheckCtx<'_> { TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(), TyKind::Array(..) | TyKind::Slice(..) => unhandled(), &TyKind::Adt(AdtId(adt @ hir_def::AdtId::EnumId(enum_id)), ref subst) => { - let enum_data = cx.db.enum_data(enum_id); + let enum_data = cx.db.enum_variants(enum_id); let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive(adt); if enum_data.variants.is_empty() && !is_declared_nonexhaustive { @@ -493,13 +495,13 @@ impl PatCx for MatchCheckCtx<'_> { // if let Some(variant) = variant { // match variant { // VariantId::EnumVariantId(v) => { - // write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?; + // write!(f, "{}", db.enum_variant_data(v).name.display(db))?; // } // VariantId::StructId(s) => { - // write!(f, "{}", db.struct_data(s).name.display(db.upcast()))? + // write!(f, "{}", db.struct_data(s).name.display(db))? // } // VariantId::UnionId(u) => { - // write!(f, "{}", db.union_data(u).name.display(db.upcast()))? + // write!(f, "{}", db.union_data(u).name.display(db))? // } // } // } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index d2b908839c42e..73b99db726841 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -5,18 +5,18 @@ use std::mem; use either::Either; use hir_def::{ - expr_store::Body, + AdtId, DefWithBodyId, FieldId, FunctionId, VariantId, + expr_store::{Body, path::Path}, hir::{Expr, ExprId, ExprOrPatId, Pat, PatId, Statement, UnaryOp}, - path::Path, resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs}, + signatures::StaticFlags, type_ref::Rawness, - AdtId, DefWithBodyId, FieldId, FunctionId, VariantId, }; use span::Edition; use crate::{ - db::HirDatabase, utils::is_fn_unsafe_to_call, InferenceResult, Interner, TargetFeatures, TyExt, - TyKind, + InferenceResult, Interner, TargetFeatures, TyExt, TyKind, db::HirDatabase, + utils::is_fn_unsafe_to_call, }; #[derive(Debug, Default)] @@ -31,11 +31,10 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> MissingUnsafe let _p = tracing::info_span!("missing_unsafe").entered(); let is_unsafe = match def { - DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe(), - DefWithBodyId::StaticId(_) - | DefWithBodyId::ConstId(_) - | DefWithBodyId::VariantId(_) - | DefWithBodyId::InTypeConstId(_) => false, + DefWithBodyId::FunctionId(it) => db.function_signature(it).is_unsafe(), + DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) | DefWithBodyId::VariantId(_) => { + false + } }; let mut res = MissingUnsafeResult { fn_is_unsafe: is_unsafe, ..MissingUnsafeResult::default() }; @@ -128,7 +127,7 @@ pub fn unsafe_operations( } }; let mut visitor = UnsafeVisitor::new(db, infer, body, def, &mut visitor_callback); - _ = visitor.resolver.update_to_inner_scope(db.upcast(), def, current); + _ = visitor.resolver.update_to_inner_scope(db, def, current); visitor.walk_expr(current); } @@ -155,12 +154,12 @@ impl<'a> UnsafeVisitor<'a> { def: DefWithBodyId, unsafe_expr_cb: &'a mut dyn FnMut(UnsafeDiagnostic), ) -> Self { - let resolver = def.resolver(db.upcast()); + let resolver = def.resolver(db); let def_target_features = match def { DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())), _ => TargetFeatures::default(), }; - let edition = db.crate_graph()[resolver.module().krate()].edition; + let edition = resolver.module().krate().data(db).edition; Self { db, infer, @@ -201,7 +200,7 @@ impl<'a> UnsafeVisitor<'a> { } fn walk_pats_top(&mut self, pats: impl Iterator, parent_expr: ExprId) { - let guard = self.resolver.update_to_inner_scope(self.db.upcast(), self.def, parent_expr); + let guard = self.resolver.update_to_inner_scope(self.db, self.def, parent_expr); pats.for_each(|pat| self.walk_pat(pat)); self.resolver.reset_to_guard(guard); } @@ -269,8 +268,7 @@ impl<'a> UnsafeVisitor<'a> { } } Expr::Path(path) => { - let guard = - self.resolver.update_to_inner_scope(self.db.upcast(), self.def, current); + let guard = self.resolver.update_to_inner_scope(self.db, self.def, current); self.mark_unsafe_path(current.into(), path); self.resolver.reset_to_guard(guard); } @@ -350,6 +348,7 @@ impl<'a> UnsafeVisitor<'a> { Expr::Closure { args, .. } => { self.walk_pats_top(args.iter().copied(), current); } + Expr::Const(e) => self.walk_expr(*e), _ => {} } @@ -358,13 +357,14 @@ impl<'a> UnsafeVisitor<'a> { fn mark_unsafe_path(&mut self, node: ExprOrPatId, path: &Path) { let hygiene = self.body.expr_or_pat_path_hygiene(node); - let value_or_partial = - self.resolver.resolve_path_in_value_ns(self.db.upcast(), path, hygiene); + let value_or_partial = self.resolver.resolve_path_in_value_ns(self.db, path, hygiene); if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial { - let static_data = self.db.static_data(id); - if static_data.mutable { + let static_data = self.db.static_signature(id); + if static_data.flags.contains(StaticFlags::MUTABLE) { self.on_unsafe_op(node, UnsafetyReason::MutableStatic); - } else if static_data.is_extern && !static_data.has_safe_kw { + } else if static_data.flags.contains(StaticFlags::EXTERN) + && !static_data.flags.contains(StaticFlags::EXPLICIT_SAFE) + { self.on_unsafe_op(node, UnsafetyReason::ExternStatic); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 95ce36390d33d..f0989d9de91f0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -7,32 +7,34 @@ use std::{ mem, }; -use base_db::CrateId; +use base_db::Crate; use chalk_ir::{BoundVar, Safety, TyKind}; use either::Either; use hir_def::{ - data::adt::VariantData, + GenericDefId, HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, + ModuleId, TraitId, db::DefDatabase, + expr_store::{ExpressionStore, path::Path}, find_path::{self, PrefixKind}, - generics::{TypeOrConstParamData, TypeParamProvenance}, + hir::generics::{TypeOrConstParamData, TypeParamProvenance, WherePredicate}, item_scope::ItemInNs, - lang_item::{LangItem, LangItemTarget}, + item_tree::FieldsShape, + lang_item::LangItem, nameres::DefMap, - path::{Path, PathKind}, + signatures::VariantFields, type_ref::{ - TraitBoundModifier, TypeBound, TypeRef, TypeRefId, TypesMap, TypesSourceMap, UseArgRef, + ConstRef, LifetimeRef, LifetimeRefId, TraitBoundModifier, TypeBound, TypeRef, TypeRefId, + UseArgRef, }, visibility::Visibility, - GenericDefId, HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, - ModuleId, TraitId, }; -use hir_expand::name::Name; -use intern::{sym, Internable, Interned}; +use hir_expand::{mod_path::PathKind, name::Name}; +use intern::{Internable, Interned, sym}; use itertools::Itertools; use la_arena::ArenaMap; use rustc_apfloat::{ - ieee::{Half as f16, Quad as f128}, Float, + ieee::{Half as f16, Quad as f128}, }; use rustc_hash::FxHashSet; use smallvec::SmallVec; @@ -41,6 +43,11 @@ use stdx::never; use triomphe::Arc; use crate::{ + AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const, + ConstScalar, ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, + LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, + QuantifiedWhereClause, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, + TyExt, WhereClause, consteval::try_const_usize, db::{HirDatabase, InternedClosure}, from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, @@ -51,12 +58,7 @@ use crate::{ mapping::from_chalk, mir::pad16, primitive, to_assoc_type_id, - utils::{self, detect_variant_from_bytes, ClosureSubst}, - AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const, - ConstScalar, ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, - LifetimeData, LifetimeOutlives, MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, - QuantifiedWhereClause, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, - TyExt, WhereClause, + utils::{self, ClosureSubst, detect_variant_from_bytes}, }; pub trait HirWrite: fmt::Write { @@ -88,11 +90,26 @@ pub struct HirFormatter<'a> { show_container_bounds: bool, omit_verbose_types: bool, closure_style: ClosureStyle, + display_lifetimes: DisplayLifetime, display_kind: DisplayKind, display_target: DisplayTarget, bounds_formatting_ctx: BoundsFormattingCtx, } +// FIXME: To consider, ref and dyn trait lifetimes can be omitted if they are `'_`, path args should +// not be when in signatures +// So this enum does not encode this well enough +// Also 'static can be omitted for ref and dyn trait lifetimes in static/const item types +// FIXME: Also named lifetimes may be rendered in places where their name is not in scope? +#[derive(Copy, Clone)] +pub enum DisplayLifetime { + Always, + OnlyStatic, + OnlyNamed, + OnlyNamedOrStatic, + Never, +} + #[derive(Default)] enum BoundsFormattingCtx { Entered { @@ -153,6 +170,21 @@ impl HirFormatter<'_> { } } } + + fn render_lifetime(&self, lifetime: &Lifetime) -> bool { + match self.display_lifetimes { + DisplayLifetime::Always => true, + DisplayLifetime::OnlyStatic => matches!(***lifetime.interned(), LifetimeData::Static), + DisplayLifetime::OnlyNamed => { + matches!(***lifetime.interned(), LifetimeData::Placeholder(_)) + } + DisplayLifetime::OnlyNamedOrStatic => matches!( + ***lifetime.interned(), + LifetimeData::Static | LifetimeData::Placeholder(_) + ), + DisplayLifetime::Never => false, + } + } } pub trait HirDisplay { @@ -187,6 +219,7 @@ pub trait HirDisplay { display_kind, closure_style, show_container_bounds, + display_lifetimes: DisplayLifetime::OnlyNamedOrStatic, } } @@ -210,6 +243,7 @@ pub trait HirDisplay { display_target, display_kind: DisplayKind::Diagnostics, show_container_bounds: false, + display_lifetimes: DisplayLifetime::OnlyNamedOrStatic, } } @@ -234,6 +268,7 @@ pub trait HirDisplay { display_target, display_kind: DisplayKind::Diagnostics, show_container_bounds: false, + display_lifetimes: DisplayLifetime::OnlyNamedOrStatic, } } @@ -258,6 +293,7 @@ pub trait HirDisplay { display_target, display_kind: DisplayKind::Diagnostics, show_container_bounds: false, + display_lifetimes: DisplayLifetime::OnlyNamedOrStatic, } } @@ -282,6 +318,7 @@ pub trait HirDisplay { display_target: DisplayTarget::from_crate(db, module_id.krate()), display_kind: DisplayKind::SourceCode { target_module_id: module_id, allow_opaque }, show_container_bounds: false, + display_lifetimes: DisplayLifetime::OnlyNamedOrStatic, bounds_formatting_ctx: Default::default(), }) { Ok(()) => {} @@ -310,6 +347,7 @@ pub trait HirDisplay { display_target, display_kind: DisplayKind::Test, show_container_bounds: false, + display_lifetimes: DisplayLifetime::Always, } } @@ -334,12 +372,13 @@ pub trait HirDisplay { display_target, display_kind: DisplayKind::Diagnostics, show_container_bounds, + display_lifetimes: DisplayLifetime::OnlyNamedOrStatic, } } } impl HirFormatter<'_> { - pub fn krate(&self) -> CrateId { + pub fn krate(&self) -> Crate { self.display_target.krate } @@ -408,13 +447,13 @@ impl HirFormatter<'_> { #[derive(Debug, Clone, Copy)] pub struct DisplayTarget { - krate: CrateId, + krate: Crate, pub edition: Edition, } impl DisplayTarget { - pub fn from_crate(db: &dyn HirDatabase, krate: CrateId) -> Self { - let edition = db.crate_graph()[krate].edition; + pub fn from_crate(db: &dyn HirDatabase, krate: Crate) -> Self { + let edition = krate.data(db).edition; Self { krate, edition } } } @@ -478,6 +517,7 @@ pub struct HirDisplayWrapper<'a, T> { display_kind: DisplayKind, display_target: DisplayTarget, show_container_bounds: bool, + display_lifetimes: DisplayLifetime, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] @@ -500,7 +540,7 @@ impl HirDisplayWrapper<'_, T> { self.t.hir_fmt(&mut HirFormatter { db: self.db, fmt: f, - buf: String::with_capacity(20), + buf: String::with_capacity(self.max_size.unwrap_or(20)), curr_size: 0, max_size: self.max_size, entity_limit: self.limited_size, @@ -509,6 +549,7 @@ impl HirDisplayWrapper<'_, T> { display_target: self.display_target, closure_style: self.closure_style, show_container_bounds: self.show_container_bounds, + display_lifetimes: self.display_lifetimes, bounds_formatting_ctx: Default::default(), }) } @@ -517,6 +558,11 @@ impl HirDisplayWrapper<'_, T> { self.closure_style = c; self } + + pub fn with_lifetime_display(mut self, l: DisplayLifetime) -> Self { + self.display_lifetimes = l; + self + } } impl fmt::Display for HirDisplayWrapper<'_, T> @@ -529,7 +575,9 @@ where Err(HirDisplayError::FmtError) => Err(fmt::Error), Err(HirDisplayError::DisplaySourceCodeError(_)) => { // This should never happen - panic!("HirDisplay::hir_fmt failed with DisplaySourceCodeError when calling Display::fmt!") + panic!( + "HirDisplay::hir_fmt failed with DisplaySourceCodeError when calling Display::fmt!" + ) } } } @@ -565,7 +613,7 @@ impl HirDisplay for ProjectionTy { if !f.bounds_formatting_ctx.contains(self) { let db = f.db; let id = from_placeholder_idx(db, *idx); - let generics = generics(db.upcast(), id.parent); + let generics = generics(db, id.parent); let substs = generics.placeholder_subst(db); let bounds = db @@ -612,13 +660,12 @@ impl HirDisplay for ProjectionTy { write!( f, ">::{}", - f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)) + f.db.type_alias_signature(from_assoc_type_id(self.associated_ty_id)) .name - .display(f.db.upcast(), f.edition()) + .display(f.db, f.edition()) )?; - let proj_params_count = - self.substitution.len(Interner) - trait_ref.substitution.len(Interner); - let proj_params = &self.substitution.as_slice(Interner)[..proj_params_count]; + let proj_params = + &self.substitution.as_slice(Interner)[trait_ref.substitution.len(Interner)..]; hir_fmt_generics(f, proj_params, None, None) } } @@ -651,21 +698,16 @@ impl HirDisplay for Const { ConstValue::InferenceVar(..) => write!(f, "#c#"), ConstValue::Placeholder(idx) => { let id = from_placeholder_idx(f.db, *idx); - let generics = generics(f.db.upcast(), id.parent); + let generics = generics(f.db, id.parent); let param_data = &generics[id.local_id]; - write!(f, "{}", param_data.name().unwrap().display(f.db.upcast(), f.edition()))?; + write!(f, "{}", param_data.name().unwrap().display(f.db, f.edition()))?; Ok(()) } ConstValue::Concrete(c) => match &c.interned { ConstScalar::Bytes(b, m) => render_const_scalar(f, b, m, &data.ty), ConstScalar::UnevaluatedConst(c, parameters) => { - write!(f, "{}", c.name(f.db.upcast()))?; - hir_fmt_generics( - f, - parameters.as_slice(Interner), - c.generic_def(f.db.upcast()), - None, - )?; + write!(f, "{}", c.name(f.db))?; + hir_fmt_generics(f, parameters.as_slice(Interner), c.generic_def(f.db), None)?; Ok(()) } ConstScalar::Unknown => f.write_char('_'), @@ -784,8 +826,8 @@ fn render_const_scalar( } TyKind::Adt(adt, _) if b.len() == 2 * size_of::() => match adt.0 { hir_def::AdtId::StructId(s) => { - let data = f.db.struct_data(s); - write!(f, "&{}", data.name.display(f.db.upcast(), f.edition()))?; + let data = f.db.struct_signature(s); + write!(f, "&{}", data.name.display(f.db, f.edition()))?; Ok(()) } _ => f.write_str(""), @@ -842,11 +884,11 @@ fn render_const_scalar( }; match adt.0 { hir_def::AdtId::StructId(s) => { - let data = f.db.struct_data(s); - write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?; + let data = f.db.struct_signature(s); + write!(f, "{}", data.name.display(f.db, f.edition()))?; let field_types = f.db.field_types(s.into()); render_variant_after_name( - &data.variant_data, + &f.db.variant_fields(s.into()), f, &field_types, f.db.trait_environment(adt.0.into()), @@ -857,7 +899,7 @@ fn render_const_scalar( ) } hir_def::AdtId::UnionId(u) => { - write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast(), f.edition())) + write!(f, "{}", f.db.union_signature(u).name.display(f.db, f.edition())) } hir_def::AdtId::EnumId(e) => { let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else { @@ -868,11 +910,17 @@ fn render_const_scalar( else { return f.write_str(""); }; - let data = f.db.enum_variant_data(var_id); - write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?; + let loc = var_id.lookup(f.db); + write!( + f, + "{}", + f.db.enum_variants(loc.parent).variants[loc.index as usize] + .1 + .display(f.db, f.edition()) + )?; let field_types = f.db.field_types(var_id.into()); render_variant_after_name( - &data.variant_data, + &f.db.variant_fields(var_id.into()), f, &field_types, f.db.trait_environment(adt.0.into()), @@ -930,7 +978,7 @@ fn render_const_scalar( } fn render_variant_after_name( - data: &VariantData, + data: &VariantFields, f: &mut HirFormatter<'_>, field_types: &ArenaMap>, trait_env: Arc, @@ -939,8 +987,8 @@ fn render_variant_after_name( b: &[u8], memory_map: &MemoryMap, ) -> Result<(), HirDisplayError> { - match data { - VariantData::Record { fields, .. } | VariantData::Tuple { fields, .. } => { + match data.shape { + FieldsShape::Record | FieldsShape::Tuple => { let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| { let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize(); let ty = field_types[id].clone().substitute(Interner, subst); @@ -950,15 +998,15 @@ fn render_variant_after_name( let size = layout.size.bytes_usize(); render_const_scalar(f, &b[offset..offset + size], memory_map, &ty) }; - let mut it = fields.iter(); - if matches!(data, VariantData::Record { .. }) { + let mut it = data.fields().iter(); + if matches!(data.shape, FieldsShape::Record) { write!(f, " {{")?; if let Some((id, data)) = it.next() { - write!(f, " {}: ", data.name.display(f.db.upcast(), f.edition()))?; + write!(f, " {}: ", data.name.display(f.db, f.edition()))?; render_field(f, id)?; } for (id, data) in it { - write!(f, ", {}: ", data.name.display(f.db.upcast(), f.edition()))?; + write!(f, ", {}: ", data.name.display(f.db, f.edition()))?; render_field(f, id)?; } write!(f, " }}")?; @@ -976,7 +1024,7 @@ fn render_variant_after_name( } Ok(()) } - VariantData::Unit => Ok(()), + FieldsShape::Unit => Ok(()), } } @@ -1018,9 +1066,7 @@ impl HirDisplay for Ty { kind @ (TyKind::Raw(m, t) | TyKind::Ref(m, _, t)) => { if let TyKind::Ref(_, l, _) = kind { f.write_char('&')?; - if cfg!(test) { - // rendering these unconditionally is probably too much (at least for inlay - // hints) so we gate it to testing only for the time being + if f.render_lifetime(l) { l.hir_fmt(f)?; f.write_char(' ')?; } @@ -1044,16 +1090,17 @@ impl HirDisplay for Ty { bounds.iter().any(|bound| { if let WhereClause::Implemented(trait_ref) = bound.skip_binders() { let trait_ = trait_ref.hir_trait_id(); - fn_traits(db.upcast(), trait_).any(|it| it == trait_) + fn_traits(db, trait_).any(|it| it == trait_) } else { false } }) }; let (preds_to_print, has_impl_fn_pred) = match t.kind(Interner) { - TyKind::Dyn(dyn_ty) if dyn_ty.bounds.skip_binders().interned().len() > 1 => { + TyKind::Dyn(dyn_ty) => { let bounds = dyn_ty.bounds.skip_binders().interned(); - (bounds.len(), contains_impl_fn(bounds)) + let render_lifetime = f.render_lifetime(&dyn_ty.lifetime); + (bounds.len() + render_lifetime as usize, contains_impl_fn(bounds)) } TyKind::Alias(AliasTy::Opaque(OpaqueTy { opaque_ty_id, @@ -1072,8 +1119,7 @@ impl HirDisplay for Ty { // Don't count Sized but count when it absent // (i.e. when explicit ?Sized bound is set). - let default_sized = - SizedByDefault::Sized { anchor: func.krate(db.upcast()) }; + let default_sized = SizedByDefault::Sized { anchor: func.krate(db) }; let sized_bounds = bounds .skip_binders() .iter() @@ -1083,7 +1129,7 @@ impl HirDisplay for Ty { WhereClause::Implemented(trait_ref) if default_sized.is_sized_trait( trait_ref.hir_trait_id(), - db.upcast(), + db, ), ) }) @@ -1151,25 +1197,28 @@ impl HirDisplay for Ty { write!(f, "fn ")?; f.start_location_link(def.into()); match def { - CallableDefId::FunctionId(ff) => write!( - f, - "{}", - db.function_data(ff).name.display(f.db.upcast(), f.edition()) - )?, + CallableDefId::FunctionId(ff) => { + write!(f, "{}", db.function_signature(ff).name.display(f.db, f.edition()))? + } CallableDefId::StructId(s) => { - write!(f, "{}", db.struct_data(s).name.display(f.db.upcast(), f.edition()))? + write!(f, "{}", db.struct_signature(s).name.display(f.db, f.edition()))? + } + CallableDefId::EnumVariantId(e) => { + let loc = e.lookup(db); + write!( + f, + "{}", + db.enum_variants(loc.parent).variants[loc.index as usize] + .1 + .display(db, f.edition()) + )? } - CallableDefId::EnumVariantId(e) => write!( - f, - "{}", - db.enum_variant_data(e).name.display(f.db.upcast(), f.edition()) - )?, }; f.end_location_link(); if parameters.len(Interner) > 0 { - let generic_def_id = GenericDefId::from_callable(db.upcast(), def); - let generics = generics(db.upcast(), generic_def_id); + let generic_def_id = GenericDefId::from_callable(db, def); + let generics = generics(db, generic_def_id); let (parent_len, self_param, type_, const_, impl_, lifetime) = generics.provenance_split(); let parameters = parameters.as_slice(Interner); @@ -1188,27 +1237,31 @@ impl HirDisplay for Ty { // Normally, functions cannot have default parameters, but they can, // for function-like things such as struct names or enum variants. - // The former cannot have defaults but parents, and the later cannot have - // parents but defaults. - // So, if `parent_len` > 0, it have a parent and thus it doesn't have any - // default. Therefore, we shouldn't subtract defaults because those defaults - // are from their parents. - // And if `parent_len` == 0, either parents don't exists or they don't have - // any defaults. Thus, we can - and should - subtract defaults. - let without_impl = if parent_len > 0 { - params_len - parent_len - impl_ + // The former cannot have defaults but does have parents, + // but the latter cannot have parents but can have defaults. + // + // However, it's also true that *traits* can have defaults too. + // In this case, there can be no function params. + let parent_end = if parent_len > 0 { + // If `parent_len` > 0, then there cannot be defaults on the function + // and all defaults must come from the parent. + parent_len - defaults } else { - params_len - parent_len - impl_ - defaults + parent_len }; - // parent's params (those from enclosing impl or trait, if any). - let (fn_params, parent_params) = parameters.split_at(without_impl + impl_); + let fn_params_no_impl_or_defaults = parameters.len() - parent_end - impl_; + let (parent_params, fn_params) = parameters.split_at(parent_end); write!(f, "<")?; hir_fmt_generic_arguments(f, parent_params, None)?; if !parent_params.is_empty() && !fn_params.is_empty() { write!(f, ", ")?; } - hir_fmt_generic_arguments(f, &fn_params[0..without_impl], None)?; + hir_fmt_generic_arguments( + f, + &fn_params[..fn_params_no_impl_or_defaults], + None, + )?; write!(f, ">")?; } } @@ -1224,17 +1277,17 @@ impl HirDisplay for Ty { TyKind::Adt(AdtId(def_id), parameters) => { f.start_location_link((*def_id).into()); match f.display_kind { - DisplayKind::Diagnostics { .. } | DisplayKind::Test { .. } => { + DisplayKind::Diagnostics | DisplayKind::Test => { let name = match *def_id { - hir_def::AdtId::StructId(it) => db.struct_data(it).name.clone(), - hir_def::AdtId::UnionId(it) => db.union_data(it).name.clone(), - hir_def::AdtId::EnumId(it) => db.enum_data(it).name.clone(), + hir_def::AdtId::StructId(it) => db.struct_signature(it).name.clone(), + hir_def::AdtId::UnionId(it) => db.union_signature(it).name.clone(), + hir_def::AdtId::EnumId(it) => db.enum_signature(it).name.clone(), }; - write!(f, "{}", name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", name.display(f.db, f.edition()))?; } DisplayKind::SourceCode { target_module_id: module_id, allow_opaque: _ } => { if let Some(path) = find_path::find_path( - db.upcast(), + db, ItemInNs::Types((*def_id).into()), module_id, PrefixKind::Plain, @@ -1247,7 +1300,7 @@ impl HirDisplay for Ty { allow_unstable: true, }, ) { - write!(f, "{}", path.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", path.display(f.db, f.edition()))?; } else { return Err(HirDisplayError::DisplaySourceCodeError( DisplaySourceCodeError::PathNotFound, @@ -1263,22 +1316,22 @@ impl HirDisplay for Ty { } TyKind::AssociatedType(assoc_type_id, parameters) => { let type_alias = from_assoc_type_id(*assoc_type_id); - let trait_ = match type_alias.lookup(db.upcast()).container { + let trait_ = match type_alias.lookup(db).container { ItemContainerId::TraitId(it) => it, _ => panic!("not an associated type"), }; - let trait_data = db.trait_data(trait_); - let type_alias_data = db.type_alias_data(type_alias); + let trait_data = db.trait_signature(trait_); + let type_alias_data = db.type_alias_signature(type_alias); // Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types) if f.display_kind.is_test() { f.start_location_link(trait_.into()); - write!(f, "{}", trait_data.name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", trait_data.name.display(f.db, f.edition()))?; f.end_location_link(); write!(f, "::")?; f.start_location_link(type_alias.into()); - write!(f, "{}", type_alias_data.name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", type_alias_data.name.display(f.db, f.edition()))?; f.end_location_link(); // Note that the generic args for the associated type come before those for the // trait (including the self type). @@ -1294,9 +1347,9 @@ impl HirDisplay for Ty { } TyKind::Foreign(type_alias) => { let alias = from_foreign_def_id(*type_alias); - let type_alias = db.type_alias_data(alias); + let type_alias = db.type_alias_signature(alias); f.start_location_link(alias.into()); - write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", type_alias.name.display(f.db, f.edition()))?; f.end_location_link(); } TyKind::OpaqueType(opaque_ty_id, parameters) => { @@ -1313,7 +1366,7 @@ impl HirDisplay for Ty { let data = (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, ¶meters); - let krate = func.krate(db.upcast()); + let krate = func.krate(db); write_bounds_like_dyn_trait_with_prefix( f, "impl", @@ -1328,7 +1381,7 @@ impl HirDisplay for Ty { db.type_alias_impl_traits(alias).expect("impl trait id without data"); let data = (*datas).as_ref().map(|it| it.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, ¶meters); - let krate = alias.krate(db.upcast()); + let krate = alias.krate(db); write_bounds_like_dyn_trait_with_prefix( f, "impl", @@ -1338,13 +1391,11 @@ impl HirDisplay for Ty { )?; } ImplTraitId::AsyncBlockTypeImplTrait(body, ..) => { - let future_trait = db - .lang_item(body.module(db.upcast()).krate(), LangItem::Future) - .and_then(LangItemTarget::as_trait); + let future_trait = + LangItem::Future.resolve_trait(db, body.module(db).krate()); let output = future_trait.and_then(|t| { - db.trait_data(t).associated_type_by_name(&Name::new_symbol_root( - sym::Output.clone(), - )) + db.trait_items(t) + .associated_type_by_name(&Name::new_symbol_root(sym::Output)) }); write!(f, "impl ")?; if let Some(t) = future_trait { @@ -1381,7 +1432,7 @@ impl HirDisplay for Ty { match f.closure_style { ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"), ClosureStyle::ClosureWithId => { - return write!(f, "{{closure#{:?}}}", id.0.as_u32()) + return write!(f, "{{closure#{:?}}}", id.0.as_u32()); } ClosureStyle::ClosureWithSubst => { write!(f, "{{closure#{:?}}}", id.0.as_u32())?; @@ -1420,7 +1471,7 @@ impl HirDisplay for Ty { } TyKind::Placeholder(idx) => { let id = from_placeholder_idx(db, *idx); - let generics = generics(db.upcast(), id.parent); + let generics = generics(db, id.parent); let param_data = &generics[id.local_id]; match param_data { TypeOrConstParamData::TypeParamData(p) => match p.provenance { @@ -1431,7 +1482,7 @@ impl HirDisplay for Ty { p.name .clone() .unwrap_or_else(Name::missing) - .display(f.db.upcast(), f.edition()) + .display(f.db, f.edition()) )? } TypeParamProvenance::ArgumentImplTrait => { @@ -1453,7 +1504,7 @@ impl HirDisplay for Ty { WhereClause::LifetimeOutlives(_) => false, }) .collect::>(); - let krate = id.parent.module(db.upcast()).krate(); + let krate = id.parent.module(db).krate(); write_bounds_like_dyn_trait_with_prefix( f, "impl", @@ -1464,14 +1515,14 @@ impl HirDisplay for Ty { } }, TypeOrConstParamData::ConstParamData(p) => { - write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", p.name.display(f.db, f.edition()))?; } } } TyKind::BoundVar(idx) => idx.hir_fmt(f)?, TyKind::Dyn(dyn_ty) => { // Reorder bounds to satisfy `write_bounds_like_dyn_trait()`'s expectation. - // FIXME: `Iterator::partition_in_place()` or `Vec::drain_filter()` may make it + // FIXME: `Iterator::partition_in_place()` or `Vec::extract_if()` may make it // more efficient when either of them hits stable. let mut bounds: SmallVec<[_; 4]> = dyn_ty.bounds.skip_binders().iter(Interner).cloned().collect(); @@ -1480,6 +1531,17 @@ impl HirDisplay for Ty { bounds.extend(others); bounds.extend(auto_traits); + if f.render_lifetime(&dyn_ty.lifetime) { + // we skip the binders in `write_bounds_like_dyn_trait_with_prefix` + bounds.push(Binders::empty( + Interner, + chalk_ir::WhereClause::TypeOutlives(chalk_ir::TypeOutlives { + ty: self.clone(), + lifetime: dyn_ty.lifetime.clone(), + }), + )); + } + write_bounds_like_dyn_trait_with_prefix( f, "dyn", @@ -1503,7 +1565,7 @@ impl HirDisplay for Ty { let data = (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, &opaque_ty.substitution); - let krate = func.krate(db.upcast()); + let krate = func.krate(db); write_bounds_like_dyn_trait_with_prefix( f, "impl", @@ -1518,7 +1580,7 @@ impl HirDisplay for Ty { let data = (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, &opaque_ty.substitution); - let krate = alias.krate(db.upcast()); + let krate = alias.krate(db); write_bounds_like_dyn_trait_with_prefix( f, "impl", @@ -1630,7 +1692,7 @@ fn generic_args_sans_defaults<'ga>( Some(default_parameter) => { // !is_err(default_parameter.skip_binders()) // && - arg != &default_parameter.clone().substitute(Interner, ¶meters) + arg != &default_parameter.clone().substitute(Interner, ¶meters[..i]) } } }; @@ -1711,7 +1773,7 @@ fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator false, Self::Sized { anchor } => { - let sized_trait = db - .lang_item(anchor, LangItem::Sized) - .and_then(|lang_item| lang_item.as_trait()); + let sized_trait = LangItem::Sized.resolve_trait(db, anchor); Some(trait_) == sized_trait } } @@ -1766,7 +1826,7 @@ fn write_bounds_like_dyn_trait( match p.skip_binders() { WhereClause::Implemented(trait_ref) => { let trait_ = trait_ref.hir_trait_id(); - if default_sized.is_sized_trait(trait_, f.db.upcast()) { + if default_sized.is_sized_trait(trait_, f.db) { is_sized = true; if matches!(default_sized, SizedByDefault::Sized { .. }) { // Don't print +Sized, but rather +?Sized if absent. @@ -1774,7 +1834,7 @@ fn write_bounds_like_dyn_trait( } } if !is_fn_trait { - is_fn_trait = fn_traits(f.db.upcast(), trait_).any(|it| it == trait_); + is_fn_trait = fn_traits(f.db, trait_).any(|it| it == trait_); } if !is_fn_trait && angle_open { write!(f, ">")?; @@ -1787,7 +1847,7 @@ fn write_bounds_like_dyn_trait( // existential) here, which is the only thing that's // possible in actual Rust, and hence don't print it f.start_location_link(trait_.into()); - write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?; f.end_location_link(); if is_fn_trait { if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) { @@ -1859,17 +1919,18 @@ fn write_bounds_like_dyn_trait( } if let AliasTy::Projection(proj) = alias { let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id); - let type_alias = f.db.type_alias_data(assoc_ty_id); + let type_alias = f.db.type_alias_signature(assoc_ty_id); f.start_location_link(assoc_ty_id.into()); - write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", type_alias.name.display(f.db, f.edition()))?; f.end_location_link(); - let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self(); + let proj_arg_count = generics(f.db, assoc_ty_id.into()).len_self(); + let parent_len = proj.substitution.len(Interner) - proj_arg_count; if proj_arg_count > 0 { write!(f, "<")?; hir_fmt_generic_arguments( f, - &proj.substitution.as_slice(Interner)[..proj_arg_count], + &proj.substitution.as_slice(Interner)[parent_len..], None, )?; write!(f, ">")?; @@ -1885,8 +1946,7 @@ fn write_bounds_like_dyn_trait( write!(f, ">")?; } if let SizedByDefault::Sized { anchor } = default_sized { - let sized_trait = - f.db.lang_item(anchor, LangItem::Sized).and_then(|lang_item| lang_item.as_trait()); + let sized_trait = LangItem::Sized.resolve_trait(f.db, anchor); if !is_sized { if !first { write!(f, " + ")?; @@ -1912,7 +1972,7 @@ impl HirDisplay for TraitRef { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { let trait_ = self.hir_trait_id(); f.start_location_link(trait_.into()); - write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?; f.end_location_link(); let substs = self.substitution.as_slice(Interner); hir_fmt_generics(f, &substs[1..], None, substs[0].ty(Interner)) @@ -1943,7 +2003,7 @@ impl HirDisplay for WhereClause { write!( f, "{}", - f.db.type_alias_data(type_alias).name.display(f.db.upcast(), f.edition()), + f.db.type_alias_signature(type_alias).name.display(f.db, f.edition()), )?; f.end_location_link(); write!(f, " = ")?; @@ -1978,12 +2038,11 @@ impl HirDisplay for LifetimeData { match self { LifetimeData::Placeholder(idx) => { let id = lt_from_placeholder_idx(f.db, *idx); - let generics = generics(f.db.upcast(), id.parent); + let generics = generics(f.db, id.parent); let param_data = &generics[id.local_id]; - write!(f, "{}", param_data.name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", param_data.name.display(f.db, f.edition()))?; Ok(()) } - _ if f.display_kind.is_source_code() => write!(f, "'_"), LifetimeData::BoundVar(idx) => idx.hir_fmt(f), LifetimeData::InferenceVar(_) => write!(f, "_"), LifetimeData::Static => write!(f, "'static"), @@ -2022,14 +2081,14 @@ pub fn write_visibility( match vis { Visibility::Public => write!(f, "pub "), Visibility::Module(vis_id, _) => { - let def_map = module_id.def_map(f.db.upcast()); + let def_map = module_id.def_map(f.db); let root_module_id = def_map.module_id(DefMap::ROOT); if vis_id == module_id { // pub(self) or omitted Ok(()) } else if root_module_id == vis_id { write!(f, "pub(crate) ") - } else if module_id.containing_module(f.db.upcast()) == Some(vis_id) { + } else if module_id.containing_module(f.db) == Some(vis_id) { write!(f, "pub(super) ") } else { write!(f, "pub(in ...) ") @@ -2038,70 +2097,119 @@ pub fn write_visibility( } } -pub trait HirDisplayWithTypesMap { +pub trait HirDisplayWithExpressionStore { fn hir_fmt( &self, f: &mut HirFormatter<'_>, - types_map: &TypesMap, + store: &ExpressionStore, ) -> Result<(), HirDisplayError>; } -impl HirDisplayWithTypesMap for &'_ T { +impl HirDisplayWithExpressionStore for &'_ T { fn hir_fmt( &self, f: &mut HirFormatter<'_>, - types_map: &TypesMap, + store: &ExpressionStore, ) -> Result<(), HirDisplayError> { - T::hir_fmt(&**self, f, types_map) + T::hir_fmt(&**self, f, store) } } -pub fn hir_display_with_types_map<'a, T: HirDisplayWithTypesMap + 'a>( +pub fn hir_display_with_store<'a, T: HirDisplayWithExpressionStore + 'a>( value: T, - types_map: &'a TypesMap, + store: &'a ExpressionStore, ) -> impl HirDisplay + 'a { - TypesMapAdapter(value, types_map) + ExpressionStoreAdapter(value, store) } -struct TypesMapAdapter<'a, T>(T, &'a TypesMap); +struct ExpressionStoreAdapter<'a, T>(T, &'a ExpressionStore); -impl<'a, T> TypesMapAdapter<'a, T> { - fn wrap(types_map: &'a TypesMap) -> impl Fn(T) -> TypesMapAdapter<'a, T> { - move |value| TypesMapAdapter(value, types_map) +impl<'a, T> ExpressionStoreAdapter<'a, T> { + fn wrap(store: &'a ExpressionStore) -> impl Fn(T) -> ExpressionStoreAdapter<'a, T> { + move |value| ExpressionStoreAdapter(value, store) } } -impl HirDisplay for TypesMapAdapter<'_, T> { +impl HirDisplay for ExpressionStoreAdapter<'_, T> { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { T::hir_fmt(&self.0, f, self.1) } } +impl HirDisplayWithExpressionStore for LifetimeRefId { + fn hir_fmt( + &self, + f: &mut HirFormatter<'_>, + store: &ExpressionStore, + ) -> Result<(), HirDisplayError> { + match &store[*self] { + LifetimeRef::Named(name) => write!(f, "{}", name.display(f.db, f.edition())), + LifetimeRef::Static => write!(f, "'static"), + LifetimeRef::Placeholder => write!(f, "'_"), + LifetimeRef::Error => write!(f, "'{{error}}"), + &LifetimeRef::Param(lifetime_param_id) => { + let generic_params = f.db.generic_params(lifetime_param_id.parent); + write!( + f, + "{}", + generic_params[lifetime_param_id.local_id].name.display(f.db, f.edition()) + ) + } + } + } +} -impl HirDisplayWithTypesMap for TypeRefId { +impl HirDisplayWithExpressionStore for TypeRefId { fn hir_fmt( &self, f: &mut HirFormatter<'_>, - types_map: &TypesMap, + store: &ExpressionStore, ) -> Result<(), HirDisplayError> { - match &types_map[*self] { + match &store[*self] { TypeRef::Never => write!(f, "!")?, + TypeRef::TypeParam(param) => { + let generic_params = f.db.generic_params(param.parent()); + match generic_params[param.local_id()].name() { + Some(name) => write!(f, "{}", name.display(f.db, f.edition()))?, + None => { + write!(f, "impl ")?; + f.write_joined( + generic_params + .where_predicates() + .filter_map(|it| match it { + WherePredicate::TypeBound { target, bound } + | WherePredicate::ForLifetime { lifetimes: _, target, bound } + if matches!( + store[*target], + TypeRef::TypeParam(t) if t == *param + ) => + { + Some(bound) + } + _ => None, + }) + .map(ExpressionStoreAdapter::wrap(store)), + " + ", + )?; + } + } + } TypeRef::Placeholder => write!(f, "_")?, TypeRef::Tuple(elems) => { write!(f, "(")?; - f.write_joined(elems.iter().map(TypesMapAdapter::wrap(types_map)), ", ")?; + f.write_joined(elems.iter().map(ExpressionStoreAdapter::wrap(store)), ", ")?; if elems.len() == 1 { write!(f, ",")?; } write!(f, ")")?; } - TypeRef::Path(path) => path.hir_fmt(f, types_map)?, + TypeRef::Path(path) => path.hir_fmt(f, store)?, TypeRef::RawPtr(inner, mutability) => { let mutability = match mutability { hir_def::type_ref::Mutability::Shared => "*const ", hir_def::type_ref::Mutability::Mut => "*mut ", }; write!(f, "{mutability}")?; - inner.hir_fmt(f, types_map)?; + inner.hir_fmt(f, store)?; } TypeRef::Reference(ref_) => { let mutability = match ref_.mutability { @@ -2110,83 +2218,67 @@ impl HirDisplayWithTypesMap for TypeRefId { }; write!(f, "&")?; if let Some(lifetime) = &ref_.lifetime { - write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?; + lifetime.hir_fmt(f, store)?; + write!(f, " ")?; } write!(f, "{mutability}")?; - ref_.ty.hir_fmt(f, types_map)?; + ref_.ty.hir_fmt(f, store)?; } TypeRef::Array(array) => { write!(f, "[")?; - array.ty.hir_fmt(f, types_map)?; - write!(f, "; {}]", array.len.display(f.db.upcast(), f.edition()))?; + array.ty.hir_fmt(f, store)?; + write!(f, "; ")?; + array.len.hir_fmt(f, store)?; + write!(f, "]")?; } TypeRef::Slice(inner) => { write!(f, "[")?; - inner.hir_fmt(f, types_map)?; + inner.hir_fmt(f, store)?; write!(f, "]")?; } TypeRef::Fn(fn_) => { - if fn_.is_unsafe() { + if fn_.is_unsafe { write!(f, "unsafe ")?; } - if let Some(abi) = fn_.abi() { + if let Some(abi) = &fn_.abi { f.write_str("extern \"")?; f.write_str(abi.as_str())?; f.write_str("\" ")?; } write!(f, "fn(")?; - if let Some(((_, return_type), function_parameters)) = fn_.params().split_last() { + if let Some(((_, return_type), function_parameters)) = fn_.params.split_last() { for index in 0..function_parameters.len() { let (param_name, param_type) = &function_parameters[index]; if let Some(name) = param_name { - write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}: ", name.display(f.db, f.edition()))?; } - param_type.hir_fmt(f, types_map)?; + param_type.hir_fmt(f, store)?; if index != function_parameters.len() - 1 { write!(f, ", ")?; } } - if fn_.is_varargs() { - write!(f, "{}...", if fn_.params().len() == 1 { "" } else { ", " })?; + if fn_.is_varargs { + write!(f, "{}...", if fn_.params.len() == 1 { "" } else { ", " })?; } write!(f, ")")?; - match &types_map[*return_type] { + match &store[*return_type] { TypeRef::Tuple(tup) if tup.is_empty() => {} _ => { write!(f, " -> ")?; - return_type.hir_fmt(f, types_map)?; + return_type.hir_fmt(f, store)?; } } } } TypeRef::ImplTrait(bounds) => { write!(f, "impl ")?; - f.write_joined(bounds.iter().map(TypesMapAdapter::wrap(types_map)), " + ")?; + f.write_joined(bounds.iter().map(ExpressionStoreAdapter::wrap(store)), " + ")?; } TypeRef::DynTrait(bounds) => { write!(f, "dyn ")?; - f.write_joined(bounds.iter().map(TypesMapAdapter::wrap(types_map)), " + ")?; - } - TypeRef::Macro(macro_call) => { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut ctx = hir_def::lower::LowerCtx::new( - f.db.upcast(), - macro_call.file_id, - &mut types_map, - &mut types_source_map, - ); - let macro_call = macro_call.to_node(f.db.upcast()); - match macro_call.path() { - Some(path) => match Path::from_src(&mut ctx, path) { - Some(path) => path.hir_fmt(f, &types_map)?, - None => write!(f, "{{macro}}")?, - }, - None => write!(f, "{{macro}}")?, - } - write!(f, "!(..)")?; + f.write_joined(bounds.iter().map(ExpressionStoreAdapter::wrap(store)), " + ")?; } TypeRef::Error => write!(f, "{{error}}")?, } @@ -2194,11 +2286,24 @@ impl HirDisplayWithTypesMap for TypeRefId { } } -impl HirDisplayWithTypesMap for TypeBound { +impl HirDisplayWithExpressionStore for ConstRef { fn hir_fmt( &self, f: &mut HirFormatter<'_>, - types_map: &TypesMap, + _store: &ExpressionStore, + ) -> Result<(), HirDisplayError> { + // FIXME + write!(f, "{{const}}")?; + + Ok(()) + } +} + +impl HirDisplayWithExpressionStore for TypeBound { + fn hir_fmt( + &self, + f: &mut HirFormatter<'_>, + store: &ExpressionStore, ) -> Result<(), HirDisplayError> { match self { &TypeBound::Path(path, modifier) => { @@ -2206,48 +2311,47 @@ impl HirDisplayWithTypesMap for TypeBound { TraitBoundModifier::None => (), TraitBoundModifier::Maybe => write!(f, "?")?, } - types_map[path].hir_fmt(f, types_map) - } - TypeBound::Lifetime(lifetime) => { - write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition())) + store[path].hir_fmt(f, store) } + TypeBound::Lifetime(lifetime) => lifetime.hir_fmt(f, store), TypeBound::ForLifetime(lifetimes, path) => { let edition = f.edition(); write!( f, "for<{}> ", - lifetimes.iter().map(|it| it.display(f.db.upcast(), edition)).format(", ") + lifetimes.iter().map(|it| it.display(f.db, edition)).format(", ") )?; - types_map[*path].hir_fmt(f, types_map) + store[*path].hir_fmt(f, store) } TypeBound::Use(args) => { let edition = f.edition(); - write!( - f, - "use<{}> ", - args.iter() - .map(|it| match it { - UseArgRef::Lifetime(lt) => lt.name.display(f.db.upcast(), edition), - UseArgRef::Name(n) => n.display(f.db.upcast(), edition), - }) - .format(", ") - ) + let last = args.len().saturating_sub(1); + for (idx, arg) in args.iter().enumerate() { + match arg { + UseArgRef::Lifetime(lt) => lt.hir_fmt(f, store)?, + UseArgRef::Name(n) => write!(f, "{}", n.display(f.db, edition))?, + } + if idx != last { + write!(f, ", ")?; + } + } + write!(f, "> ") } TypeBound::Error => write!(f, "{{error}}"), } } } -impl HirDisplayWithTypesMap for Path { +impl HirDisplayWithExpressionStore for Path { fn hir_fmt( &self, f: &mut HirFormatter<'_>, - types_map: &TypesMap, + store: &ExpressionStore, ) -> Result<(), HirDisplayError> { match (self.type_anchor(), self.kind()) { (Some(anchor), _) => { write!(f, "<")?; - anchor.hir_fmt(f, types_map)?; + anchor.hir_fmt(f, store)?; write!(f, ">")?; } (_, PathKind::Plain) => {} @@ -2266,12 +2370,12 @@ impl HirDisplayWithTypesMap for Path { // Resolve `$crate` to the crate's display name. // FIXME: should use the dependency name instead if available, but that depends on // the crate invoking `HirDisplay` - let crate_graph = f.db.crate_graph(); - let name = crate_graph[*id] + let crate_data = id.extra_data(f.db); + let name = crate_data .display_name .as_ref() - .map(|name| name.canonical_name()) - .unwrap_or(&sym::dollar_crate); + .map(|name| (*name.canonical_name()).clone()) + .unwrap_or(sym::dollar_crate); write!(f, "{name}")? } } @@ -2290,7 +2394,7 @@ impl HirDisplayWithTypesMap for Path { }); if let Some(ty) = trait_self_ty { write!(f, "<")?; - ty.hir_fmt(f, types_map)?; + ty.hir_fmt(f, store)?; write!(f, " as ")?; // Now format the path of the trait... } @@ -2299,81 +2403,89 @@ impl HirDisplayWithTypesMap for Path { if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 { write!(f, "::")?; } - write!(f, "{}", segment.name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", segment.name.display(f.db, f.edition()))?; if let Some(generic_args) = segment.args_and_bindings { // We should be in type context, so format as `Foo` instead of `Foo::`. // Do we actually format expressions? - if generic_args.desugared_from_fn { - // First argument will be a tuple, which already includes the parentheses. - // If the tuple only contains 1 item, write it manually to avoid the trailing `,`. - let tuple = match generic_args.args[0] { - hir_def::path::GenericArg::Type(ty) => match &types_map[ty] { - TypeRef::Tuple(it) => Some(it), + match generic_args.parenthesized { + hir_def::expr_store::path::GenericArgsParentheses::ReturnTypeNotation => { + write!(f, "(..)")?; + } + hir_def::expr_store::path::GenericArgsParentheses::ParenSugar => { + // First argument will be a tuple, which already includes the parentheses. + // If the tuple only contains 1 item, write it manually to avoid the trailing `,`. + let tuple = match generic_args.args[0] { + hir_def::expr_store::path::GenericArg::Type(ty) => match &store[ty] { + TypeRef::Tuple(it) => Some(it), + _ => None, + }, _ => None, - }, - _ => None, - }; - if let Some(v) = tuple { - if v.len() == 1 { - write!(f, "(")?; - v[0].hir_fmt(f, types_map)?; - write!(f, ")")?; - } else { - generic_args.args[0].hir_fmt(f, types_map)?; + }; + if let Some(v) = tuple { + if v.len() == 1 { + write!(f, "(")?; + v[0].hir_fmt(f, store)?; + write!(f, ")")?; + } else { + generic_args.args[0].hir_fmt(f, store)?; + } } - } - if let Some(ret) = generic_args.bindings[0].type_ref { - if !matches!(&types_map[ret], TypeRef::Tuple(v) if v.is_empty()) { - write!(f, " -> ")?; - ret.hir_fmt(f, types_map)?; + if let Some(ret) = generic_args.bindings[0].type_ref { + if !matches!(&store[ret], TypeRef::Tuple(v) if v.is_empty()) { + write!(f, " -> ")?; + ret.hir_fmt(f, store)?; + } } } - return Ok(()); - } - - let mut first = true; - // Skip the `Self` bound if exists. It's handled outside the loop. - for arg in &generic_args.args[generic_args.has_self_type as usize..] { - if first { - first = false; - write!(f, "<")?; - } else { - write!(f, ", ")?; - } - arg.hir_fmt(f, types_map)?; - } - for binding in generic_args.bindings.iter() { - if first { - first = false; - write!(f, "<")?; - } else { - write!(f, ", ")?; - } - write!(f, "{}", binding.name.display(f.db.upcast(), f.edition()))?; - match &binding.type_ref { - Some(ty) => { - write!(f, " = ")?; - ty.hir_fmt(f, types_map)? + hir_def::expr_store::path::GenericArgsParentheses::No => { + let mut first = true; + // Skip the `Self` bound if exists. It's handled outside the loop. + for arg in &generic_args.args[generic_args.has_self_type as usize..] { + if first { + first = false; + write!(f, "<")?; + } else { + write!(f, ", ")?; + } + arg.hir_fmt(f, store)?; } - None => { - write!(f, ": ")?; - f.write_joined( - binding.bounds.iter().map(TypesMapAdapter::wrap(types_map)), - " + ", - )?; + for binding in generic_args.bindings.iter() { + if first { + first = false; + write!(f, "<")?; + } else { + write!(f, ", ")?; + } + write!(f, "{}", binding.name.display(f.db, f.edition()))?; + match &binding.type_ref { + Some(ty) => { + write!(f, " = ")?; + ty.hir_fmt(f, store)? + } + None => { + write!(f, ": ")?; + f.write_joined( + binding + .bounds + .iter() + .map(ExpressionStoreAdapter::wrap(store)), + " + ", + )?; + } + } } - } - } - // There may be no generic arguments to print, in case of a trait having only a - // single `Self` bound which is converted to `::Assoc`. - if !first { - write!(f, ">")?; - } + // There may be no generic arguments to print, in case of a trait having only a + // single `Self` bound which is converted to `::Assoc`. + if !first { + write!(f, ">")?; + } - // Current position: `|` - if generic_args.has_self_type { - write!(f, ">")?; + // Current position: `|` + if generic_args.has_self_type { + write!(f, ">")?; + } + } } } } @@ -2382,20 +2494,19 @@ impl HirDisplayWithTypesMap for Path { } } -impl HirDisplayWithTypesMap for hir_def::path::GenericArg { +impl HirDisplayWithExpressionStore for hir_def::expr_store::path::GenericArg { fn hir_fmt( &self, f: &mut HirFormatter<'_>, - types_map: &TypesMap, + store: &ExpressionStore, ) -> Result<(), HirDisplayError> { match self { - hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f, types_map), - hir_def::path::GenericArg::Const(c) => { - write!(f, "{}", c.display(f.db.upcast(), f.edition())) - } - hir_def::path::GenericArg::Lifetime(lifetime) => { - write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition())) + hir_def::expr_store::path::GenericArg::Type(ty) => ty.hir_fmt(f, store), + hir_def::expr_store::path::GenericArg::Const(_c) => { + // write!(f, "{}", c.display(f.db, f.edition())) + write!(f, "") } + hir_def::expr_store::path::GenericArg::Lifetime(lifetime) => lifetime.hir_fmt(f, store), } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs index 351926c86c473..70763759ef0ee 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs @@ -1,18 +1,17 @@ //! Utilities for computing drop info about types. -use base_db::ra_salsa; use chalk_ir::cast::Cast; -use hir_def::data::adt::StructFlags; -use hir_def::lang_item::LangItem; use hir_def::AdtId; +use hir_def::lang_item::LangItem; +use hir_def::signatures::StructFlags; use stdx::never; use triomphe::Arc; use crate::{ - db::HirDatabase, method_resolution::TyFingerprint, AliasTy, Canonical, CanonicalVarKinds, - InEnvironment, Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind, + AliasTy, Canonical, CanonicalVarKinds, ConcreteConst, ConstScalar, ConstValue, InEnvironment, + Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind, db::HirDatabase, + method_resolution::TyFingerprint, }; -use crate::{ConcreteConst, ConstScalar, ConstValue}; fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool { let module = match adt { @@ -20,9 +19,7 @@ fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool { AdtId::StructId(id) => db.lookup_intern_struct(id).container, AdtId::UnionId(id) => db.lookup_intern_union(id).container, }; - let Some(drop_trait) = - db.lang_item(module.krate(), LangItem::Drop).and_then(|it| it.as_trait()) - else { + let Some(drop_trait) = LangItem::Drop.resolve_trait(db, module.krate()) else { return false; }; let impls = match module.containing_block() { @@ -32,8 +29,7 @@ fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool { }, None => db.trait_impls_in_crate(module.krate()), }; - let result = impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some(); - result + impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some() } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] @@ -55,7 +51,7 @@ pub(crate) fn has_drop_glue(db: &dyn HirDatabase, ty: Ty, env: Arc { - if db.struct_data(id).flags.contains(StructFlags::IS_MANUALLY_DROP) { + if db.struct_signature(id).flags.contains(StructFlags::IS_MANUALLY_DROP) { return DropGlue::None; } db.field_types(id.into()) @@ -72,7 +68,7 @@ pub(crate) fn has_drop_glue(db: &dyn HirDatabase, ty: Ty, env: Arc DropGlue::None, AdtId::EnumId(id) => db - .enum_data(id) + .enum_variants(id) .variants .iter() .map(|&(variant, _)| { @@ -176,19 +172,14 @@ fn projection_has_drop_glue( let normalized = db.normalize_projection(projection, env.clone()); match normalized.kind(Interner) { TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(..) => { - if is_copy(db, ty, env) { - DropGlue::None - } else { - DropGlue::DependOnParams - } + if is_copy(db, ty, env) { DropGlue::None } else { DropGlue::DependOnParams } } _ => db.has_drop_glue(normalized, env), } } fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc) -> bool { - let Some(copy_trait) = db.lang_item(env.krate, LangItem::Copy).and_then(|it| it.as_trait()) - else { + let Some(copy_trait) = LangItem::Copy.resolve_trait(db, env.krate) else { return false; }; let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(ty).build(); @@ -199,11 +190,10 @@ fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc) -> bool { db.trait_solve(env.krate, env.block, goal).is_some() } -pub(crate) fn has_drop_glue_recover( +pub(crate) fn has_drop_glue_cycle_result( _db: &dyn HirDatabase, - _cycle: &ra_salsa::Cycle, - _ty: &Ty, - _env: &Arc, + _ty: Ty, + _env: Arc, ) -> DropGlue { DropGlue::None } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs index e042c35d0c6f2..106b996b13ef0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs @@ -3,28 +3,26 @@ use std::ops::ControlFlow; use chalk_ir::{ + DebruijnIndex, cast::Cast, visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, - DebruijnIndex, }; use chalk_solve::rust_ir::InlineBound; use hir_def::{ - data::TraitFlags, lang_item::LangItem, AssocItemId, ConstId, FunctionId, GenericDefId, - HasModule, TraitId, TypeAliasId, + AssocItemId, ConstId, FunctionId, GenericDefId, HasModule, TraitId, TypeAliasId, + lang_item::LangItem, signatures::TraitFlags, }; use rustc_hash::FxHashSet; use smallvec::SmallVec; use crate::{ - all_super_traits, + AliasEq, AliasTy, Binders, BoundVar, CallableSig, GoalData, ImplTraitId, Interner, OpaqueTyId, + ProjectionTyExt, Solution, Substitution, TraitRef, Ty, TyKind, WhereClause, all_super_traits, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, generics::{generics, trait_self_param_idx}, - lower::callable_item_sig, - to_assoc_type_id, to_chalk_trait_id, + to_chalk_trait_id, utils::elaborate_clause_supertraits, - AliasEq, AliasTy, Binders, BoundVar, CallableSig, GoalData, ImplTraitId, Interner, OpaqueTyId, - ProjectionTyExt, Solution, Substitution, TraitRef, Ty, TyKind, WhereClause, }; #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -54,7 +52,7 @@ pub fn dyn_compatibility( db: &dyn HirDatabase, trait_: TraitId, ) -> Option { - for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() { + for super_trait in all_super_traits(db, trait_).into_iter().skip(1).rev() { if db.dyn_compatibility_of_trait(super_trait).is_some() { return Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait)); } @@ -71,7 +69,7 @@ pub fn dyn_compatibility_with_callback( where F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>, { - for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() { + for super_trait in all_super_traits(db, trait_).into_iter().skip(1).rev() { if db.dyn_compatibility_of_trait(super_trait).is_some() { cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?; } @@ -103,7 +101,7 @@ where // rustc checks for non-lifetime binders here, but we don't support HRTB yet - let trait_data = db.trait_data(trait_); + let trait_data = db.trait_items(trait_); for (_, assoc_item) in &trait_data.items { dyn_compatibility_violation_for_assoc_item(db, trait_, *assoc_item, cb)?; } @@ -116,7 +114,7 @@ pub fn dyn_compatibility_of_trait_query( trait_: TraitId, ) -> Option { let mut res = None; - let _ = dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| { + _ = dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| { res = Some(osv); ControlFlow::Break(()) }); @@ -125,12 +123,12 @@ pub fn dyn_compatibility_of_trait_query( } fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> bool { - let krate = def.module(db.upcast()).krate(); - let Some(sized) = db.lang_item(krate, LangItem::Sized).and_then(|l| l.as_trait()) else { + let krate = def.module(db).krate(); + let Some(sized) = LangItem::Sized.resolve_trait(db, krate) else { return false; }; - let Some(trait_self_param_idx) = trait_self_param_idx(db.upcast(), def) else { + let Some(trait_self_param_idx) = trait_self_param_idx(db, def) else { return false; }; @@ -166,14 +164,13 @@ fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool { // Same as the above, `predicates_reference_self` fn bounds_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool { - let trait_data = db.trait_data(trait_); + let trait_data = db.trait_items(trait_); trait_data .items .iter() .filter_map(|(_, it)| match *it { AssocItemId::TypeAliasId(id) => { - let assoc_ty_id = to_assoc_type_id(id); - let assoc_ty_data = db.associated_ty_data(assoc_ty_id); + let assoc_ty_data = db.associated_ty_data(id); Some(assoc_ty_data) } _ => None, @@ -256,7 +253,7 @@ fn contains_illegal_self_type_reference>( outer_binder: DebruijnIndex, allow_self_projection: AllowSelfProjection, ) -> bool { - let Some(trait_self_param_idx) = trait_self_param_idx(db.upcast(), def) else { + let Some(trait_self_param_idx) = trait_self_param_idx(db, def) else { return false; }; struct IllegalSelfTypeVisitor<'a> { @@ -290,8 +287,7 @@ fn contains_illegal_self_type_reference>( AllowSelfProjection::Yes => { let trait_ = proj.trait_(self.db); if self.super_traits.is_none() { - self.super_traits = - Some(all_super_traits(self.db.upcast(), self.trait_)); + self.super_traits = Some(all_super_traits(self.db, self.trait_)); } if self.super_traits.as_ref().is_some_and(|s| s.contains(&trait_)) { ControlFlow::Continue(()) @@ -347,7 +343,7 @@ where }) } AssocItemId::TypeAliasId(it) => { - let def_map = db.crate_def_map(trait_.krate(db.upcast())); + let def_map = db.crate_def_map(trait_.krate(db)); if def_map.is_unstable_feature_enabled(&intern::sym::generic_associated_type_extended) { ControlFlow::Continue(()) } else { @@ -371,7 +367,7 @@ fn virtual_call_violations_for_method( where F: FnMut(MethodViolationCode) -> ControlFlow<()>, { - let func_data = db.function_data(func); + let func_data = db.function_signature(func); if !func_data.has_self_param() { cb(MethodViolationCode::StaticMethod)?; } @@ -380,7 +376,7 @@ where cb(MethodViolationCode::AsyncFn)?; } - let sig = callable_item_sig(db, func.into()); + let sig = db.callable_item_signature(func.into()); if sig.skip_binders().params().iter().skip(1).any(|ty| { contains_illegal_self_type_reference( db, @@ -421,7 +417,7 @@ where } let predicates = &*db.generic_predicates_without_parent(func.into()); - let trait_self_idx = trait_self_param_idx(db.upcast(), func.into()); + let trait_self_idx = trait_self_param_idx(db, func.into()); for pred in predicates { let pred = pred.skip_binders().skip_binders(); @@ -431,8 +427,8 @@ where // Allow `impl AutoTrait` predicates if let WhereClause::Implemented(TraitRef { trait_id, substitution }) = pred { - let trait_data = db.trait_data(from_chalk_trait_id(*trait_id)); - if trait_data.flags.contains(TraitFlags::IS_AUTO) + let trait_data = db.trait_signature(from_chalk_trait_id(*trait_id)); + if trait_data.flags.contains(TraitFlags::AUTO) && substitution .as_slice(Interner) .first() @@ -468,7 +464,7 @@ fn receiver_is_dispatchable( func: FunctionId, sig: &Binders, ) -> bool { - let Some(trait_self_idx) = trait_self_param_idx(db.upcast(), func.into()) else { + let Some(trait_self_idx) = trait_self_param_idx(db, func.into()) else { return false; }; @@ -486,17 +482,17 @@ fn receiver_is_dispatchable( return true; } - let placeholder_subst = generics(db.upcast(), func.into()).placeholder_subst(db); + let placeholder_subst = generics(db, func.into()).placeholder_subst(db); let substituted_sig = sig.clone().substitute(Interner, &placeholder_subst); let Some(receiver_ty) = substituted_sig.params().first() else { return false; }; - let krate = func.module(db.upcast()).krate(); + let krate = func.module(db).krate(); let traits = ( - db.lang_item(krate, LangItem::Unsize).and_then(|it| it.as_trait()), - db.lang_item(krate, LangItem::DispatchFromDyn).and_then(|it| it.as_trait()), + LangItem::Unsize.resolve_trait(db, krate), + LangItem::DispatchFromDyn.resolve_trait(db, krate), ); let (Some(unsize_did), Some(dispatch_from_dyn_did)) = traits else { return false; @@ -519,7 +515,7 @@ fn receiver_is_dispatchable( trait_id: to_chalk_trait_id(trait_), substitution: Substitution::from_iter( Interner, - std::iter::once(unsized_self_ty.clone().cast(Interner)) + std::iter::once(unsized_self_ty.cast(Interner)) .chain(placeholder_subst.iter(Interner).skip(1).cloned()), ), }); @@ -552,20 +548,16 @@ fn receiver_is_dispatchable( } fn receiver_for_self_ty(db: &dyn HirDatabase, func: FunctionId, ty: Ty) -> Option { - let generics = generics(db.upcast(), func.into()); - let trait_self_idx = trait_self_param_idx(db.upcast(), func.into())?; + let generics = generics(db, func.into()); + let trait_self_idx = trait_self_param_idx(db, func.into())?; let subst = generics.placeholder_subst(db); let subst = Substitution::from_iter( Interner, subst.iter(Interner).enumerate().map(|(idx, arg)| { - if idx == trait_self_idx { - ty.clone().cast(Interner) - } else { - arg.clone() - } + if idx == trait_self_idx { ty.clone().cast(Interner) } else { arg.clone() } }), ); - let sig = callable_item_sig(db, func.into()); + let sig = db.callable_item_signature(func.into()); let sig = sig.substitute(Interner, &subst); sig.params_and_return.first().cloned() } @@ -597,7 +589,7 @@ fn contains_illegal_impl_trait_in_trait( let ret = sig.skip_binders().ret(); let mut visitor = OpaqueTypeCollector(FxHashSet::default()); - let _ = ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST); + _ = ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST); // Since we haven't implemented RPITIT in proper way like rustc yet, // just check whether `ret` contains RPIT for now diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs index 50851325bd519..5078e8cfaa8b9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs @@ -33,15 +33,18 @@ fn check_dyn_compatibility<'a>( expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect(); let (db, file_ids) = TestDB::with_many_files(ra_fixture); for (trait_id, name) in file_ids.into_iter().flat_map(|file_id| { - let module_id = db.module_for_file(file_id); + let module_id = db.module_for_file(file_id.file_id(&db)); let def_map = module_id.def_map(&db); let scope = &def_map[module_id.local_id].scope; scope .declarations() .filter_map(|def| { if let hir_def::ModuleDefId::TraitId(trait_id) = def { - let name = - db.trait_data(trait_id).name.display_no_db(file_id.edition()).to_smolstr(); + let name = db + .trait_signature(trait_id) + .name + .display_no_db(file_id.edition(&db)) + .to_smolstr(); Some((trait_id, name)) } else { None @@ -53,7 +56,7 @@ fn check_dyn_compatibility<'a>( continue; }; let mut osvs = FxHashSet::default(); - let _ = dyn_compatibility_with_callback(&db, trait_id, &mut |osv| { + _ = dyn_compatibility_with_callback(&db, trait_id, &mut |osv| { osvs.insert(match osv { DynCompatibilityViolation::SizedSelf => SizedSelf, DynCompatibilityViolation::SelfReferential => SelfReferential, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs index 18cf6e5ce36ef..bb4aaf7889589 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs @@ -1,41 +1,41 @@ //! Utilities for working with generics. //! //! The layout for generics as expected by chalk are as follows: +//! - Parent parameters //! - Optional Self parameter //! - Lifetime parameters //! - Type or Const parameters -//! - Parent parameters //! //! where parent follows the same scheme. use std::ops; -use chalk_ir::{cast::Cast as _, BoundVar, DebruijnIndex}; +use chalk_ir::{BoundVar, DebruijnIndex, cast::Cast as _}; use hir_def::{ + ConstParamId, GenericDefId, GenericParamId, ItemContainerId, LifetimeParamId, Lookup, + TypeOrConstParamId, TypeParamId, db::DefDatabase, - generics::{ - GenericParamDataRef, GenericParams, LifetimeParamData, TypeOrConstParamData, - TypeParamProvenance, + expr_store::ExpressionStore, + hir::generics::{ + GenericParamDataRef, GenericParams, LifetimeParamData, LocalLifetimeParamId, + LocalTypeOrConstParamId, TypeOrConstParamData, TypeParamProvenance, WherePredicate, }, - type_ref::TypesMap, - ConstParamId, GenericDefId, GenericParamId, ItemContainerId, LifetimeParamId, - LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, }; use itertools::chain; -use stdx::TupleExt; use triomphe::Arc; -use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution}; +use crate::{Interner, Substitution, db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx}; pub fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def))); - let params = db.generic_params(def); + let (params, store) = db.generic_params_and_store(def); let has_trait_self_param = params.trait_self_param().is_some(); - Generics { def, params, parent_generics, has_trait_self_param } + Generics { def, params, parent_generics, has_trait_self_param, store } } #[derive(Clone, Debug)] pub struct Generics { def: GenericDefId, params: Arc, + store: Arc, parent_generics: Option>, has_trait_self_param: bool, } @@ -55,12 +55,16 @@ impl Generics { self.def } - pub(crate) fn self_types_map(&self) -> &TypesMap { - &self.params.types_map + pub(crate) fn store(&self) -> &ExpressionStore { + &self.store + } + + pub(crate) fn where_predicates(&self) -> impl Iterator { + self.params.where_predicates() } pub(crate) fn iter_id(&self) -> impl Iterator + '_ { - self.iter_self_id().chain(self.iter_parent_id()) + self.iter_parent_id().chain(self.iter_self_id()) } pub(crate) fn iter_self_id(&self) -> impl Iterator + '_ { @@ -73,31 +77,26 @@ impl Generics { pub(crate) fn iter_self_type_or_consts( &self, - ) -> impl DoubleEndedIterator { - self.params.iter_type_or_consts() - } - - pub(crate) fn iter_self_type_or_consts_id( - &self, - ) -> impl DoubleEndedIterator + '_ { - self.params.iter_type_or_consts().map(from_toc_id(self)).map(TupleExt::head) + ) -> impl DoubleEndedIterator + '_ + { + let mut toc = self.params.iter_type_or_consts(); + let trait_self_param = self.has_trait_self_param.then(|| toc.next()).flatten(); + chain!(trait_self_param, toc) } - /// Iterate over the params followed by the parent params. + /// Iterate over the parent params followed by self params. pub(crate) fn iter( &self, ) -> impl DoubleEndedIterator)> + '_ { - self.iter_self().chain(self.iter_parent()) + self.iter_parent().chain(self.iter_self()) } - pub(crate) fn iter_parents_with_types_map( + pub(crate) fn iter_parents_with_store( &self, - ) -> impl Iterator), &TypesMap)> + '_ { - self.iter_parent().zip( - self.parent_generics() - .into_iter() - .flat_map(|it| std::iter::repeat(&it.params.types_map)), - ) + ) -> impl Iterator), &ExpressionStore)> + '_ + { + self.iter_parent() + .zip(self.parent_generics().into_iter().flat_map(|it| std::iter::repeat(&*it.store))) } /// Iterate over the params without parent params. @@ -110,7 +109,7 @@ impl Generics { } /// Iterator over types and const params of parent. - fn iter_parent( + pub(crate) fn iter_parent( &self, ) -> impl DoubleEndedIterator)> + '_ { self.parent_generics().into_iter().flat_map(|it| { @@ -132,6 +131,10 @@ impl Generics { self.params.len() } + pub(crate) fn len_lifetimes_self(&self) -> usize { + self.params.len_lifetimes() + } + /// (parent total, self param, type params, const params, impl trait list, lifetimes) pub(crate) fn provenance_split(&self) -> (usize, bool, usize, usize, usize, usize) { let mut self_param = false; @@ -147,7 +150,7 @@ impl Generics { TypeOrConstParamData::ConstParamData(_) => const_params += 1, }); - let lifetime_params = self.params.iter_lt().count(); + let lifetime_params = self.params.len_lifetimes(); let parent_len = self.parent_generics().map_or(0, Generics::len); (parent_len, self_param, type_params, const_params, impl_trait_params, lifetime_params) @@ -160,17 +163,19 @@ impl Generics { fn find_type_or_const_param(&self, param: TypeOrConstParamId) -> Option { if param.parent == self.def { let idx = param.local_id.into_raw().into_u32() as usize; - debug_assert!(idx <= self.params.len_type_or_consts()); + debug_assert!( + idx <= self.params.len_type_or_consts(), + "idx: {} len: {}", + idx, + self.params.len_type_or_consts() + ); if self.params.trait_self_param() == Some(param.local_id) { return Some(idx); } - Some(self.params.len_lifetimes() + idx) + Some(self.parent_generics().map_or(0, |g| g.len()) + self.params.len_lifetimes() + idx) } else { debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(param.parent)); - self.parent_generics() - .and_then(|g| g.find_type_or_const_param(param)) - // Remember that parent parameters come after parameters for self. - .map(|idx| self.len_self() + idx) + self.parent_generics().and_then(|g| g.find_type_or_const_param(param)) } } @@ -182,12 +187,14 @@ impl Generics { if lifetime.parent == self.def { let idx = lifetime.local_id.into_raw().into_u32() as usize; debug_assert!(idx <= self.params.len_lifetimes()); - Some(self.params.trait_self_param().is_some() as usize + idx) + Some( + self.parent_generics().map_or(0, |g| g.len()) + + self.params.trait_self_param().is_some() as usize + + idx, + ) } else { debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(lifetime.parent)); - self.parent_generics() - .and_then(|g| g.find_lifetime(lifetime)) - .map(|idx| self.len_self() + idx) + self.parent_generics().and_then(|g| g.find_lifetime(lifetime)) } } @@ -251,8 +258,7 @@ pub(crate) fn trait_self_param_idx(db: &dyn DefDatabase, def: GenericDefId) -> O let parent_def = parent_generic_def(db, def)?; let parent_params = db.generic_params(parent_def); let parent_self_idx = parent_params.trait_self_param()?.into_raw().into_u32() as usize; - let self_params = db.generic_params(def); - Some(self_params.len() + parent_self_idx) + Some(parent_self_idx) } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 3e0ce7f1933a3..f0ec31db8bb91 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -26,27 +26,26 @@ pub(crate) mod unify; use std::{cell::OnceCell, convert::identity, iter, ops::Index}; use chalk_ir::{ + DebruijnIndex, Mutability, Safety, Scalar, TyKind, TypeFlags, Variance, cast::Cast, fold::TypeFoldable, interner::HasInterner, visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, - DebruijnIndex, Mutability, Safety, Scalar, TyKind, TypeFlags, Variance, }; use either::Either; use hir_def::{ + AdtId, AssocItemId, ConstId, DefWithBodyId, FieldId, FunctionId, GenericDefId, GenericParamId, + ImplId, ItemContainerId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId, builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, - data::{ConstData, StaticData}, - expr_store::{Body, HygieneId}, + expr_store::{Body, ExpressionStore, HygieneId, path::Path}, hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId}, - lang_item::{LangItem, LangItemTarget}, + lang_item::{LangItem, LangItemTarget, lang_item}, layout::Integer, - path::{ModPath, Path}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, - type_ref::{LifetimeRef, TypeRefId, TypesMap}, - AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ImplId, ItemContainerId, Lookup, - TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId, + signatures::{ConstSignature, StaticSignature}, + type_ref::{ConstRef, LifetimeRefId, TypeRefId}, }; -use hir_expand::name::Name; +use hir_expand::{mod_path::ModPath, name::Name}; use indexmap::IndexSet; use intern::sym; use la_arena::{ArenaMap, Entry}; @@ -55,6 +54,10 @@ use stdx::{always, never}; use triomphe::Arc; use crate::{ + AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId, + ImplTraitIdx, InEnvironment, IncorrectGenericsLenKind, Interner, Lifetime, OpaqueTyId, + ParamLoweringMode, PathLoweringDiagnostic, ProjectionTy, Substitution, TraitEnvironment, Ty, + TyBuilder, TyExt, db::HirDatabase, fold_tys, generics::Generics, @@ -64,14 +67,11 @@ use crate::{ expr::ExprIsRead, unify::InferenceTable, }, - lower::{diagnostics::TyLoweringDiagnostic, ImplTraitLoweringMode}, + lower::{ImplTraitLoweringMode, LifetimeElisionKind, diagnostics::TyLoweringDiagnostic}, mir::MirSpan, - to_assoc_type_id, + static_lifetime, to_assoc_type_id, traits::FnTrait, - utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder}, - AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId, - ImplTraitIdx, InEnvironment, Interner, Lifetime, OpaqueTyId, ParamLoweringMode, - PathLoweringDiagnostic, ProjectionTy, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, + utils::UnevaluatedConstEvaluatorFolder, }; // This lint has a false positive here. See the link below for details. @@ -88,7 +88,7 @@ pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; /// The entry point of type inference. pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc { let _p = tracing::info_span!("infer_query").entered(); - let resolver = def.resolver(db.upcast()); + let resolver = def.resolver(db); let body = db.body(def); let mut ctx = InferenceContext::new(db, def, &body, resolver); @@ -96,11 +96,11 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc { ctx.collect_fn(f); } - DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)), - DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)), + DefWithBodyId::ConstId(c) => ctx.collect_const(c, &db.const_signature(c)), + DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)), DefWithBodyId::VariantId(v) => { ctx.return_ty = TyBuilder::builtin( - match db.enum_data(v.lookup(db.upcast()).parent).variant_body_type() { + match db.enum_signature(v.lookup(db).parent).variant_body_type() { hir_def::layout::IntegerType::Pointer(signed) => match signed { true => BuiltinType::Int(BuiltinInt::Isize), false => BuiltinType::Uint(BuiltinUint::Usize), @@ -124,16 +124,6 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc { - // FIXME(const-generic-body): We should not get the return type in this way. - ctx.return_ty = c - .lookup(db.upcast()) - .expected_ty - .box_any() - .downcast::() - .unwrap() - .0; - } } ctx.infer_body(); @@ -286,6 +276,20 @@ pub enum InferenceDiagnostic { node: ExprOrPatId, diag: PathLoweringDiagnostic, }, + MethodCallIncorrectGenericsLen { + expr: ExprId, + provided_count: u32, + expected_count: u32, + kind: IncorrectGenericsLenKind, + def: GenericDefId, + }, + MethodCallIncorrectGenericsOrder { + expr: ExprId, + param_id: GenericParamId, + arg_idx: u32, + /// Whether the `GenericArgs` contains a `Self` arg. + has_self_arg: bool, + }, } /// A mismatch between an expected and an inferred type. @@ -489,7 +493,7 @@ pub struct InferenceResult { /// ``` /// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`. pub binding_modes: ArenaMap, - pub expr_adjustments: FxHashMap>, + pub expr_adjustments: FxHashMap>, pub(crate) closure_info: FxHashMap, FnTrait)>, // FIXME: remove this field pub mutated_bindings_in_closure: FxHashSet, @@ -597,7 +601,8 @@ pub(crate) struct InferenceContext<'a> { /// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext /// and resolve the path via its methods. This will ensure proper error reporting. pub(crate) resolver: Resolver, - generics: OnceCell>, + generic_def: GenericDefId, + generics: OnceCell, table: unify::InferenceTable<'a>, /// The traits in scope, disregarding block modules. This is used for caching purposes. traits_in_scope: FxHashSet, @@ -708,8 +713,14 @@ impl<'a> InferenceContext<'a> { return_coercion: None, db, owner, + generic_def: match owner { + DefWithBodyId::FunctionId(it) => it.into(), + DefWithBodyId::StaticId(it) => it.into(), + DefWithBodyId::ConstId(it) => it.into(), + DefWithBodyId::VariantId(it) => it.lookup(db).parent.into(), + }, body, - traits_in_scope: resolver.traits_in_scope(db.upcast()), + traits_in_scope: resolver.traits_in_scope(db), resolver, diverges: Diverges::Maybe, breakables: Vec::new(), @@ -724,14 +735,8 @@ impl<'a> InferenceContext<'a> { } } - pub(crate) fn generics(&self) -> Option<&Generics> { - self.generics - .get_or_init(|| { - self.resolver - .generic_def() - .map(|def| crate::generics::generics(self.db.upcast(), def)) - }) - .as_ref() + pub(crate) fn generics(&self) -> &Generics { + self.generics.get_or_init(|| crate::generics::generics(self.db, self.generic_def)) } // FIXME: This function should be private in module. It is currently only used in the consteval, since we need @@ -780,8 +785,8 @@ impl<'a> InferenceContext<'a> { // Comment from rustc: // Even though coercion casts provide type hints, we check casts after fallback for // backwards compatibility. This makes fallback a stronger type hint than a cast coercion. - let mut apply_adjustments = |expr, adj| { - expr_adjustments.insert(expr, adj); + let mut apply_adjustments = |expr, adj: Vec<_>| { + expr_adjustments.insert(expr, adj.into_boxed_slice()); }; let mut set_coercion_cast = |expr| { coercion_casts.insert(expr); @@ -803,22 +808,27 @@ impl<'a> InferenceContext<'a> { *ty = table.resolve_completely(ty.clone()); *has_errors = *has_errors || ty.contains_unknown(); } + type_of_expr.shrink_to_fit(); for ty in type_of_pat.values_mut() { *ty = table.resolve_completely(ty.clone()); *has_errors = *has_errors || ty.contains_unknown(); } + type_of_pat.shrink_to_fit(); for ty in type_of_binding.values_mut() { *ty = table.resolve_completely(ty.clone()); *has_errors = *has_errors || ty.contains_unknown(); } + type_of_binding.shrink_to_fit(); for ty in type_of_rpit.values_mut() { *ty = table.resolve_completely(ty.clone()); *has_errors = *has_errors || ty.contains_unknown(); } + type_of_rpit.shrink_to_fit(); for ty in type_of_for_iterator.values_mut() { *ty = table.resolve_completely(ty.clone()); *has_errors = *has_errors || ty.contains_unknown(); } + type_of_for_iterator.shrink_to_fit(); *has_errors |= !type_mismatches.is_empty(); @@ -833,6 +843,7 @@ impl<'a> InferenceContext<'a> { ) .is_ok() }); + type_mismatches.shrink_to_fit(); diagnostics.retain_mut(|diagnostic| { use InferenceDiagnostic::*; match diagnostic { @@ -861,24 +872,29 @@ impl<'a> InferenceContext<'a> { } true }); + diagnostics.shrink_to_fit(); for (_, subst) in method_resolutions.values_mut() { *subst = table.resolve_completely(subst.clone()); *has_errors = *has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown()); } + method_resolutions.shrink_to_fit(); for (_, subst) in assoc_resolutions.values_mut() { *subst = table.resolve_completely(subst.clone()); *has_errors = *has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown()); } + assoc_resolutions.shrink_to_fit(); for adjustment in expr_adjustments.values_mut().flatten() { adjustment.target = table.resolve_completely(adjustment.target.clone()); *has_errors = *has_errors || adjustment.target.contains_unknown(); } + expr_adjustments.shrink_to_fit(); for adjustment in pat_adjustments.values_mut().flatten() { *adjustment = table.resolve_completely(adjustment.clone()); *has_errors = *has_errors || adjustment.contains_unknown(); } + pat_adjustments.shrink_to_fit(); result.tuple_field_access_types = tuple_field_accesses_rev .into_iter() .enumerate() @@ -888,15 +904,20 @@ impl<'a> InferenceContext<'a> { *has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown()); }) .collect(); + result.tuple_field_access_types.shrink_to_fit(); result.diagnostics = diagnostics; result } - fn collect_const(&mut self, data: &ConstData) { - let return_ty = - self.make_ty(data.type_ref, &data.types_map, InferenceTyDiagnosticSource::Signature); + fn collect_const(&mut self, id: ConstId, data: &ConstSignature) { + let return_ty = self.make_ty( + data.type_ref, + &data.store, + InferenceTyDiagnosticSource::Signature, + LifetimeElisionKind::for_const(id.loc(self.db).container), + ); // Constants might be defining usage sites of TAITs. self.make_tait_coercion_table(iter::once(&return_ty)); @@ -904,9 +925,13 @@ impl<'a> InferenceContext<'a> { self.return_ty = return_ty; } - fn collect_static(&mut self, data: &StaticData) { - let return_ty = - self.make_ty(data.type_ref, &data.types_map, InferenceTyDiagnosticSource::Signature); + fn collect_static(&mut self, data: &StaticSignature) { + let return_ty = self.make_ty( + data.type_ref, + &data.store, + InferenceTyDiagnosticSource::Signature, + LifetimeElisionKind::Elided(static_lifetime()), + ); // Statics might be defining usage sites of TAITs. self.make_tait_coercion_table(iter::once(&return_ty)); @@ -915,13 +940,17 @@ impl<'a> InferenceContext<'a> { } fn collect_fn(&mut self, func: FunctionId) { - let data = self.db.function_data(func); - let mut param_tys = - self.with_ty_lowering(&data.types_map, InferenceTyDiagnosticSource::Signature, |ctx| { - ctx.type_param_mode(ParamLoweringMode::Placeholder) - .impl_trait_mode(ImplTraitLoweringMode::Param); + let data = self.db.function_signature(func); + let mut param_tys = self.with_ty_lowering( + &data.store, + InferenceTyDiagnosticSource::Signature, + LifetimeElisionKind::for_fn_params(&data), + |ctx| { + ctx.type_param_mode(ParamLoweringMode::Placeholder); data.params.iter().map(|&type_ref| ctx.lower_ty(type_ref)).collect::>() - }); + }, + ); + // Check if function contains a va_list, if it does then we append it to the parameter types // that are collected from the function data if data.is_varargs() { @@ -956,35 +985,44 @@ impl<'a> InferenceContext<'a> { tait_candidates.insert(ty); } } - let return_ty = data.ret_type; - - let return_ty = - self.with_ty_lowering(&data.types_map, InferenceTyDiagnosticSource::Signature, |ctx| { - ctx.type_param_mode(ParamLoweringMode::Placeholder) - .impl_trait_mode(ImplTraitLoweringMode::Opaque) - .lower_ty(return_ty) - }); - let return_ty = self.insert_type_vars(return_ty); - - let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) { - // RPIT opaque types use substitution of their parent function. - let fn_placeholders = TyBuilder::placeholder_subst(self.db, func); - let mut mode = ImplTraitReplacingMode::ReturnPosition(FxHashSet::default()); - let result = - self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders, &mut mode); - if let ImplTraitReplacingMode::ReturnPosition(taits) = mode { - tait_candidates.extend(taits); - } - let rpits = rpits.skip_binders(); - for (id, _) in rpits.impl_traits.iter() { - if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) { - never!("Missed RPIT in `insert_inference_vars_for_rpit`"); - e.insert(TyKind::Error.intern(Interner)); + let return_ty = match data.ret_type { + Some(return_ty) => { + let return_ty = self.with_ty_lowering( + &data.store, + InferenceTyDiagnosticSource::Signature, + LifetimeElisionKind::for_fn_ret(), + |ctx| { + ctx.type_param_mode(ParamLoweringMode::Placeholder) + .impl_trait_mode(ImplTraitLoweringMode::Opaque); + ctx.lower_ty(return_ty) + }, + ); + let return_ty = self.insert_type_vars(return_ty); + if let Some(rpits) = self.db.return_type_impl_traits(func) { + // RPIT opaque types use substitution of their parent function. + let fn_placeholders = TyBuilder::placeholder_subst(self.db, func); + let mut mode = ImplTraitReplacingMode::ReturnPosition(FxHashSet::default()); + let result = self.insert_inference_vars_for_impl_trait( + return_ty, + fn_placeholders, + &mut mode, + ); + if let ImplTraitReplacingMode::ReturnPosition(taits) = mode { + tait_candidates.extend(taits); + } + let rpits = rpits.skip_binders(); + for (id, _) in rpits.impl_traits.iter() { + if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) { + never!("Missed RPIT in `insert_inference_vars_for_rpit`"); + e.insert(TyKind::Error.intern(Interner)); + } + } + result + } else { + return_ty } } - result - } else { - return_ty + None => self.result.standard_types.unit.clone(), }; self.return_ty = self.normalize_associated_types_in(return_ty); @@ -1143,7 +1181,7 @@ impl<'a> InferenceContext<'a> { non_assocs: FxHashMap::default(), }; for ty in tait_candidates { - let _ = ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST); + _ = ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST); } // Non-assoc TAITs can be define-used everywhere as long as they are @@ -1190,11 +1228,7 @@ impl<'a> InferenceContext<'a> { if let Some(impl_id) = impl_id { taits.extend(collector.assocs.into_iter().filter_map(|(id, (impl_, ty))| { - if impl_ == impl_id { - Some((id, ty)) - } else { - None - } + if impl_ == impl_id { Some((id, ty)) } else { None } })); } @@ -1239,7 +1273,7 @@ impl<'a> InferenceContext<'a> { self.result.type_of_expr.insert(expr, ty); } - fn write_expr_adj(&mut self, expr: ExprId, adjustments: Vec) { + fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment]>) { if adjustments.is_empty() { return; } @@ -1291,48 +1325,91 @@ impl<'a> InferenceContext<'a> { fn with_ty_lowering( &mut self, - types_map: &TypesMap, + store: &ExpressionStore, types_source: InferenceTyDiagnosticSource, + lifetime_elision: LifetimeElisionKind, f: impl FnOnce(&mut TyLoweringContext<'_>) -> R, ) -> R { let mut ctx = TyLoweringContext::new( self.db, &self.resolver, - types_map, - self.owner.into(), + store, &self.diagnostics, types_source, + self.generic_def, + lifetime_elision, ); f(&mut ctx) } fn with_body_ty_lowering(&mut self, f: impl FnOnce(&mut TyLoweringContext<'_>) -> R) -> R { - self.with_ty_lowering(&self.body.types, InferenceTyDiagnosticSource::Body, f) + self.with_ty_lowering( + self.body, + InferenceTyDiagnosticSource::Body, + LifetimeElisionKind::Infer, + f, + ) } fn make_ty( &mut self, type_ref: TypeRefId, - types_map: &TypesMap, + store: &ExpressionStore, type_source: InferenceTyDiagnosticSource, + lifetime_elision: LifetimeElisionKind, ) -> Ty { - let ty = self.with_ty_lowering(types_map, type_source, |ctx| ctx.lower_ty(type_ref)); + let ty = self + .with_ty_lowering(store, type_source, lifetime_elision, |ctx| ctx.lower_ty(type_ref)); let ty = self.insert_type_vars(ty); self.normalize_associated_types_in(ty) } fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty { - self.make_ty(type_ref, &self.body.types, InferenceTyDiagnosticSource::Body) + self.make_ty( + type_ref, + self.body, + InferenceTyDiagnosticSource::Body, + LifetimeElisionKind::Infer, + ) + } + + fn make_body_const(&mut self, const_ref: ConstRef, ty: Ty) -> Const { + let const_ = self.with_ty_lowering( + self.body, + InferenceTyDiagnosticSource::Body, + LifetimeElisionKind::Infer, + |ctx| { + ctx.type_param_mode = ParamLoweringMode::Placeholder; + ctx.lower_const(&const_ref, ty) + }, + ); + self.insert_type_vars(const_) + } + + fn make_path_as_body_const(&mut self, path: &Path, ty: Ty) -> Const { + let const_ = self.with_ty_lowering( + self.body, + InferenceTyDiagnosticSource::Body, + LifetimeElisionKind::Infer, + |ctx| { + ctx.type_param_mode = ParamLoweringMode::Placeholder; + ctx.lower_path_as_const(path, ty) + }, + ); + self.insert_type_vars(const_) } fn err_ty(&self) -> Ty { self.result.standard_types.unknown.clone() } - fn make_body_lifetime(&mut self, lifetime_ref: &LifetimeRef) -> Lifetime { - let lt = self.with_ty_lowering(TypesMap::EMPTY, InferenceTyDiagnosticSource::Body, |ctx| { - ctx.lower_lifetime(lifetime_ref) - }); + fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Lifetime { + let lt = self.with_ty_lowering( + self.body, + InferenceTyDiagnosticSource::Body, + LifetimeElisionKind::Infer, + |ctx| ctx.lower_lifetime(lifetime_ref), + ); self.insert_type_vars(lt) } @@ -1460,7 +1537,7 @@ impl<'a> InferenceContext<'a> { ) -> Ty { match assoc_ty { Some(res_assoc_ty) => { - let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container { + let trait_ = match res_assoc_ty.lookup(self.db).container { hir_def::ItemContainerId::TraitId(trait_) => trait_, _ => panic!("resolve_associated_type called with non-associated type"), }; @@ -1498,10 +1575,11 @@ impl<'a> InferenceContext<'a> { let mut ctx = TyLoweringContext::new( self.db, &self.resolver, - &self.body.types, - self.owner.into(), + &self.body.store, &self.diagnostics, InferenceTyDiagnosticSource::Body, + self.generic_def, + LifetimeElisionKind::Infer, ); let mut path_ctx = ctx.at_path(path, node); let (resolution, unresolved) = if value_ns { @@ -1511,14 +1589,14 @@ impl<'a> InferenceContext<'a> { match res { ResolveValueResult::ValueNs(value, _) => match value { ValueNs::EnumVariantId(var) => { - let substs = path_ctx.substs_from_path(var.into(), true); + let substs = path_ctx.substs_from_path(var.into(), true, false); drop(ctx); - let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into()); + let ty = self.db.ty(var.lookup(self.db).parent.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); return (ty, Some(var.into())); } ValueNs::StructId(strukt) => { - let substs = path_ctx.substs_from_path(strukt.into(), true); + let substs = path_ctx.substs_from_path(strukt.into(), true, false); drop(ctx); let ty = self.db.ty(strukt.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); @@ -1538,39 +1616,39 @@ impl<'a> InferenceContext<'a> { None => return (self.err_ty(), None), } }; - let Some(mod_path) = path.mod_path() else { - never!("resolver should always resolve lang item paths"); - return (self.err_ty(), None); - }; return match resolution { TypeNs::AdtId(AdtId::StructId(strukt)) => { - let substs = path_ctx.substs_from_path(strukt.into(), true); + let substs = path_ctx.substs_from_path(strukt.into(), true, false); drop(ctx); let ty = self.db.ty(strukt.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); forbid_unresolved_segments((ty, Some(strukt.into())), unresolved) } TypeNs::AdtId(AdtId::UnionId(u)) => { - let substs = path_ctx.substs_from_path(u.into(), true); + let substs = path_ctx.substs_from_path(u.into(), true, false); drop(ctx); let ty = self.db.ty(u.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); forbid_unresolved_segments((ty, Some(u.into())), unresolved) } TypeNs::EnumVariantId(var) => { - let substs = path_ctx.substs_from_path(var.into(), true); + let substs = path_ctx.substs_from_path(var.into(), true, false); drop(ctx); - let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into()); + let ty = self.db.ty(var.lookup(self.db).parent.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); forbid_unresolved_segments((ty, Some(var.into())), unresolved) } TypeNs::SelfType(impl_id) => { - let generics = crate::generics::generics(self.db.upcast(), impl_id.into()); + let generics = crate::generics::generics(self.db, impl_id.into()); let substs = generics.placeholder_subst(self.db); let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs); let Some(remaining_idx) = unresolved else { drop(ctx); + let Some(mod_path) = path.mod_path() else { + never!("resolver should always resolve lang item paths"); + return (self.err_ty(), None); + }; return self.resolve_variant_on_alias(ty, None, mod_path); }; @@ -1587,7 +1665,7 @@ impl<'a> InferenceContext<'a> { // If we can resolve to an enum variant, it takes priority over associated type // of the same name. if let Some((AdtId::EnumId(id), _)) = ty.as_adt() { - let enum_data = self.db.enum_data(id); + let enum_data = self.db.enum_variants(id); if let Some(variant) = enum_data.variant(current_segment.name) { return if remaining_segments.len() == 1 { (ty, Some(variant.into())) @@ -1609,7 +1687,7 @@ impl<'a> InferenceContext<'a> { // `lower_partly_resolved_path()` returns `None` as type namespace unless // `remaining_segments` is empty, which is never the case here. We don't know // which namespace the new `ty` is in until normalized anyway. - (ty, _) = path_ctx.lower_partly_resolved_path(resolution, false); + (ty, _) = path_ctx.lower_partly_resolved_path(resolution, true); tried_resolving_once = true; ty = self.table.insert_type_vars(ty); @@ -1634,7 +1712,11 @@ impl<'a> InferenceContext<'a> { (ty, variant) } TypeNs::TypeAliasId(it) => { - let substs = path_ctx.substs_from_path_segment(it.into(), true, None); + let Some(mod_path) = path.mod_path() else { + never!("resolver should always resolve lang item paths"); + return (self.err_ty(), None); + }; + let substs = path_ctx.substs_from_path_segment(it.into(), true, None, false); drop(ctx); let ty = self.db.ty(it.into()); let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); @@ -1652,7 +1734,8 @@ impl<'a> InferenceContext<'a> { TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) - | TypeNs::TraitAliasId(_) => { + | TypeNs::TraitAliasId(_) + | TypeNs::ModuleId(_) => { // FIXME diagnostic (self.err_ty(), None) } @@ -1701,7 +1784,7 @@ impl<'a> InferenceContext<'a> { let segment = path.segments().last().unwrap(); // this could be an enum variant or associated type if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() { - let enum_data = self.db.enum_data(enum_id); + let enum_data = self.db.enum_variants(enum_id); if let Some(variant) = enum_data.variant(segment) { return (ty, Some(variant.into())); } @@ -1718,13 +1801,11 @@ impl<'a> InferenceContext<'a> { fn resolve_lang_item(&self, item: LangItem) -> Option { let krate = self.resolver.krate(); - self.db.lang_item(krate, item) + lang_item(self.db, krate, item) } fn resolve_output_on(&self, trait_: TraitId) -> Option { - self.db - .trait_data(trait_) - .associated_type_by_name(&Name::new_symbol_root(sym::Output.clone())) + self.db.trait_items(trait_).associated_type_by_name(&Name::new_symbol_root(sym::Output)) } fn resolve_lang_trait(&self, lang: LangItem) -> Option { @@ -1743,7 +1824,7 @@ impl<'a> InferenceContext<'a> { let ItemContainerId::TraitId(trait_) = self .resolve_lang_item(LangItem::IntoFutureIntoFuture)? .as_function()? - .lookup(self.db.upcast()) + .lookup(self.db) .container else { return None; @@ -1914,11 +1995,7 @@ impl Expectation { match self { Expectation::HasType(ety) => { let ety = table.resolve_ty_shallow(ety); - if ety.is_ty_var() { - Expectation::None - } else { - Expectation::HasType(ety) - } + if ety.is_ty_var() { Expectation::None } else { Expectation::HasType(ety) } } Expectation::RValueLikeUnsized(ety) => Expectation::RValueLikeUnsized(ety.clone()), _ => Expectation::None, @@ -2044,7 +2121,7 @@ impl chalk_ir::zip::Zipper for UnknownMismatch<'_> { | (_, TyKind::Error) | (TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(_, _), _) | (_, TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(_, _)) => { - return Err(chalk_ir::NoSolution) + return Err(chalk_ir::NoSolution); } _ => (), } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs index eb193686e967f..10d85792c9d60 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs @@ -1,13 +1,13 @@ //! Type cast logic. Basically coercion + additional casts. use chalk_ir::{Mutability, Scalar, TyVariableKind, UintTy}; -use hir_def::{hir::ExprId, AdtId}; +use hir_def::{AdtId, hir::ExprId}; use stdx::never; use crate::{ - infer::{coerce::CoerceNever, unify::InferenceTable}, Adjustment, Binders, DynTy, InferenceDiagnostic, Interner, PlaceholderIndex, QuantifiedWhereClauses, Ty, TyExt, TyKind, TypeFlags, WhereClause, + infer::{coerce::CoerceNever, unify::InferenceTable}, }; #[derive(Debug)] @@ -43,14 +43,10 @@ impl CastTy { let (AdtId::EnumId(id), _) = t.as_adt()? else { return None; }; - let enum_data = table.db.enum_data(id); - if enum_data.is_payload_free(table.db.upcast()) { - Some(Self::Int(Int::CEnum)) - } else { - None - } + let enum_data = table.db.enum_variants(id); + if enum_data.is_payload_free(table.db) { Some(Self::Int(Int::CEnum)) } else { None } } - TyKind::Raw(m, ty) => Some(Self::Ptr(table.resolve_ty_shallow(ty), *m)), + TyKind::Raw(m, ty) => Some(Self::Ptr(ty.clone(), *m)), TyKind::Function(_) => Some(Self::FnPtr), _ => None, } @@ -105,9 +101,8 @@ impl CastCheck { F: FnMut(ExprId, Vec), G: FnMut(ExprId), { - table.resolve_obligations_as_possible(); - self.expr_ty = table.resolve_ty_shallow(&self.expr_ty); - self.cast_ty = table.resolve_ty_shallow(&self.cast_ty); + self.expr_ty = table.eagerly_normalize_and_resolve_shallow_in(self.expr_ty.clone()); + self.cast_ty = table.eagerly_normalize_and_resolve_shallow_in(self.cast_ty.clone()); if self.expr_ty.contains_unknown() || self.cast_ty.contains_unknown() { return Ok(()); @@ -153,7 +148,7 @@ impl CastCheck { (None, Some(t_cast)) => match self.expr_ty.kind(Interner) { TyKind::FnDef(..) => { let sig = self.expr_ty.callable_sig(table.db).expect("FnDef had no sig"); - let sig = table.normalize_associated_types_in(sig); + let sig = table.eagerly_normalize_and_resolve_shallow_in(sig); let fn_ptr = TyKind::Function(sig.to_fn_ptr()).intern(Interner); if let Ok((adj, _)) = table.coerce(&self.expr_ty, &fn_ptr, CoerceNever::Yes) { @@ -165,7 +160,6 @@ impl CastCheck { (CastTy::FnPtr, t_cast) } TyKind::Ref(mutbl, _, inner_ty) => { - let inner_ty = table.resolve_ty_shallow(inner_ty); return match t_cast { CastTy::Int(_) | CastTy::Float => match inner_ty.kind(Interner) { TyKind::Scalar( @@ -180,13 +174,13 @@ impl CastCheck { }, // array-ptr-cast CastTy::Ptr(t, m) => { - let t = table.resolve_ty_shallow(&t); + let t = table.eagerly_normalize_and_resolve_shallow_in(t); if !table.is_sized(&t) { return Err(CastError::IllegalCast); } self.check_ref_cast( table, - &inner_ty, + inner_ty, *mutbl, &t, m, @@ -359,7 +353,7 @@ impl CastCheck { } } -#[derive(PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq)] enum PointerKind { // thin pointer Thin, @@ -373,8 +367,7 @@ enum PointerKind { } fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result, ()> { - let ty = table.resolve_ty_shallow(ty); - let ty = table.normalize_associated_types_in(ty); + let ty = table.eagerly_normalize_and_resolve_shallow_in(ty.clone()); if table.is_sized(&ty) { return Ok(Some(PointerKind::Thin)); @@ -389,8 +382,8 @@ fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result bool { use std::ops::ControlFlow; use chalk_ir::{ - visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, DebruijnIndex, + visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, }; struct DynTraitVisitor; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index 9283c46d0f611..800897c6fc3a2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -1,49 +1,148 @@ //! Inference of closure parameter types based on the closure's expected type. -use std::{cmp, convert::Infallible, mem}; +use std::{cmp, convert::Infallible, mem, ops::ControlFlow}; use chalk_ir::{ - cast::Cast, - fold::{FallibleTypeFolder, TypeFoldable}, BoundVar, DebruijnIndex, FnSubst, Mutability, TyKind, + cast::Cast, + fold::{FallibleTypeFolder, Shift, TypeFoldable}, + visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, }; use either::Either; use hir_def::{ - data::adt::VariantData, + DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId, + expr_store::path::Path, hir::{ - Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId, - Statement, UnaryOp, + Array, AsmOperand, BinaryOp, BindingId, CaptureBy, ClosureKind, Expr, ExprId, ExprOrPatId, + Pat, PatId, Statement, UnaryOp, }, + item_tree::FieldsShape, lang_item::LangItem, - path::Path, resolver::ValueNs, - DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId, }; +use hir_def::{Lookup, type_ref::TypeRefId}; use hir_expand::name::Name; use intern::sym; -use rustc_hash::FxHashMap; -use smallvec::{smallvec, SmallVec}; +use rustc_hash::{FxHashMap, FxHashSet}; +use smallvec::{SmallVec, smallvec}; use stdx::{format_to, never}; use syntax::utils::is_raw_identifier; use crate::{ - db::{HirDatabase, InternedClosure}, - error_lifetime, from_chalk_trait_id, from_placeholder_idx, + Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy, + DynTyExt, FnAbi, FnPointer, FnSig, GenericArg, Interner, OpaqueTy, ProjectionTy, + ProjectionTyExt, Substitution, Ty, TyBuilder, TyExt, WhereClause, + db::{HirDatabase, InternedClosure, InternedCoroutine}, + error_lifetime, from_assoc_type_id, from_chalk_trait_id, from_placeholder_idx, generics::Generics, - infer::coerce::CoerceNever, + infer::{BreakableKind, CoerceMany, Diverges, coerce::CoerceNever}, make_binders, mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem}, to_chalk_trait_id, traits::FnTrait, utils::{self, elaborate_clause_supertraits}, - Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy, - DynTyExt, FnAbi, FnPointer, FnSig, Interner, OpaqueTy, ProjectionTyExt, Substitution, Ty, - TyExt, WhereClause, }; use super::{Expectation, InferenceContext}; +#[derive(Debug)] +pub(super) struct ClosureSignature { + pub(super) ret_ty: Ty, + pub(super) expected_sig: FnPointer, +} + impl InferenceContext<'_> { + pub(super) fn infer_closure( + &mut self, + body: &ExprId, + args: &[PatId], + ret_type: &Option, + arg_types: &[Option], + closure_kind: ClosureKind, + tgt_expr: ExprId, + expected: &Expectation, + ) -> Ty { + assert_eq!(args.len(), arg_types.len()); + + let (expected_sig, expected_kind) = match expected.to_option(&mut self.table) { + Some(expected_ty) => self.deduce_closure_signature(&expected_ty, closure_kind), + None => (None, None), + }; + + let ClosureSignature { expected_sig: bound_sig, ret_ty: body_ret_ty } = + self.sig_of_closure(body, ret_type, arg_types, closure_kind, expected_sig); + let bound_sig = self.normalize_associated_types_in(bound_sig); + let sig_ty = TyKind::Function(bound_sig.clone()).intern(Interner); + + let (id, ty, resume_yield_tys) = match closure_kind { + ClosureKind::Coroutine(_) => { + let sig_tys = bound_sig.substitution.0.as_slice(Interner); + // FIXME: report error when there are more than 1 parameter. + let resume_ty = match sig_tys.first() { + // When `sig_tys.len() == 1` the first type is the return type, not the + // first parameter type. + Some(ty) if sig_tys.len() > 1 => ty.assert_ty_ref(Interner).clone(), + _ => self.result.standard_types.unit.clone(), + }; + let yield_ty = self.table.new_type_var(); + + let subst = TyBuilder::subst_for_coroutine(self.db, self.owner) + .push(resume_ty.clone()) + .push(yield_ty.clone()) + .push(body_ret_ty.clone()) + .build(); + + let coroutine_id = + self.db.intern_coroutine(InternedCoroutine(self.owner, tgt_expr)).into(); + let coroutine_ty = TyKind::Coroutine(coroutine_id, subst).intern(Interner); + + (None, coroutine_ty, Some((resume_ty, yield_ty))) + } + ClosureKind::Closure | ClosureKind::Async => { + let closure_id = + self.db.intern_closure(InternedClosure(self.owner, tgt_expr)).into(); + let closure_ty = TyKind::Closure( + closure_id, + TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()), + ) + .intern(Interner); + self.deferred_closures.entry(closure_id).or_default(); + self.add_current_closure_dependency(closure_id); + (Some(closure_id), closure_ty, None) + } + }; + + // Eagerly try to relate the closure type with the expected + // type, otherwise we often won't have enough information to + // infer the body. + self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected, expected_kind); + + // Now go through the argument patterns + for (arg_pat, arg_ty) in args.iter().zip(bound_sig.substitution.0.as_slice(Interner).iter()) + { + self.infer_top_pat(*arg_pat, arg_ty.assert_ty_ref(Interner), None); + } + + // FIXME: lift these out into a struct + let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); + let prev_closure = mem::replace(&mut self.current_closure, id); + let prev_ret_ty = mem::replace(&mut self.return_ty, body_ret_ty.clone()); + let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(body_ret_ty)); + let prev_resume_yield_tys = mem::replace(&mut self.resume_yield_tys, resume_yield_tys); + + self.with_breakable_ctx(BreakableKind::Border, None, None, |this| { + this.infer_return(*body); + }); + + self.diverges = prev_diverges; + self.return_ty = prev_ret_ty; + self.return_coercion = prev_ret_coercion; + self.current_closure = prev_closure; + self.resume_yield_tys = prev_resume_yield_tys; + + self.table.normalize_associated_types_in(ty) + } + // This function handles both closures and coroutines. pub(super) fn deduce_closure_type_from_expectations( &mut self, @@ -51,19 +150,21 @@ impl InferenceContext<'_> { closure_ty: &Ty, sig_ty: &Ty, expectation: &Expectation, + expected_kind: Option, ) { let expected_ty = match expectation.to_option(&mut self.table) { Some(ty) => ty, None => return, }; - if let TyKind::Closure(closure_id, _) = closure_ty.kind(Interner) { - if let Some(closure_kind) = self.deduce_closure_kind_from_expectations(&expected_ty) { + match (closure_ty.kind(Interner), expected_kind) { + (TyKind::Closure(closure_id, _), Some(closure_kind)) => { self.result .closure_info .entry(*closure_id) .or_insert_with(|| (Vec::new(), closure_kind)); } + _ => {} } // Deduction from where-clauses in scope, as well as fn-pointer coercion are handled here. @@ -86,63 +187,153 @@ impl InferenceContext<'_> { // Closure kind deductions are mostly from `rustc_hir_typeck/src/closure.rs`. // Might need to port closure sig deductions too. - fn deduce_closure_kind_from_expectations(&mut self, expected_ty: &Ty) -> Option { + pub(super) fn deduce_closure_signature( + &mut self, + expected_ty: &Ty, + closure_kind: ClosureKind, + ) -> (Option>, Option) { match expected_ty.kind(Interner) { TyKind::Alias(AliasTy::Opaque(OpaqueTy { .. })) | TyKind::OpaqueType(..) => { - let clauses = expected_ty - .impl_trait_bounds(self.db) - .into_iter() - .flatten() - .map(|b| b.into_value_and_skipped_binders().0); - self.deduce_closure_kind_from_predicate_clauses(clauses) + let clauses = expected_ty.impl_trait_bounds(self.db).into_iter().flatten().map( + |b: chalk_ir::Binders>| { + b.into_value_and_skipped_binders().0 + }, + ); + self.deduce_closure_kind_from_predicate_clauses(expected_ty, clauses, closure_kind) + } + TyKind::Dyn(dyn_ty) => { + let sig = + dyn_ty.bounds.skip_binders().as_slice(Interner).iter().find_map(|bound| { + if let WhereClause::AliasEq(AliasEq { + alias: AliasTy::Projection(projection_ty), + ty: projected_ty, + }) = bound.skip_binders() + { + if let Some(sig) = self.deduce_sig_from_projection( + closure_kind, + projection_ty, + projected_ty, + ) { + return Some(sig); + } + } + None + }); + + let kind = dyn_ty.principal().and_then(|principal_trait_ref| { + self.fn_trait_kind_from_trait_id(from_chalk_trait_id( + principal_trait_ref.skip_binders().skip_binders().trait_id, + )) + }); + + (sig, kind) } - TyKind::Dyn(dyn_ty) => dyn_ty.principal_id().and_then(|trait_id| { - self.fn_trait_kind_from_trait_id(from_chalk_trait_id(trait_id)) - }), TyKind::InferenceVar(ty, chalk_ir::TyVariableKind::General) => { let clauses = self.clauses_for_self_ty(*ty); - self.deduce_closure_kind_from_predicate_clauses(clauses.into_iter()) + self.deduce_closure_kind_from_predicate_clauses( + expected_ty, + clauses.into_iter(), + closure_kind, + ) } - TyKind::Function(_) => Some(FnTrait::Fn), - _ => None, + TyKind::Function(fn_ptr) => match closure_kind { + ClosureKind::Closure => (Some(fn_ptr.substitution.clone()), Some(FnTrait::Fn)), + ClosureKind::Async | ClosureKind::Coroutine(_) => (None, None), + }, + _ => (None, None), } } fn deduce_closure_kind_from_predicate_clauses( &self, + expected_ty: &Ty, clauses: impl DoubleEndedIterator, - ) -> Option { + closure_kind: ClosureKind, + ) -> (Option>, Option) { + let mut expected_sig = None; let mut expected_kind = None; for clause in elaborate_clause_supertraits(self.db, clauses.rev()) { + if expected_sig.is_none() { + if let WhereClause::AliasEq(AliasEq { + alias: AliasTy::Projection(projection), + ty, + }) = &clause + { + let inferred_sig = + self.deduce_sig_from_projection(closure_kind, projection, ty); + // Make sure that we didn't infer a signature that mentions itself. + // This can happen when we elaborate certain supertrait bounds that + // mention projections containing the `Self` type. See rust-lang/rust#105401. + struct MentionsTy<'a> { + expected_ty: &'a Ty, + } + impl TypeVisitor for MentionsTy<'_> { + type BreakTy = (); + + fn interner(&self) -> Interner { + Interner + } + + fn as_dyn( + &mut self, + ) -> &mut dyn TypeVisitor + { + self + } + + fn visit_ty( + &mut self, + t: &Ty, + db: chalk_ir::DebruijnIndex, + ) -> ControlFlow<()> { + if t == self.expected_ty { + ControlFlow::Break(()) + } else { + t.super_visit_with(self, db) + } + } + } + if inferred_sig + .visit_with( + &mut MentionsTy { expected_ty }, + chalk_ir::DebruijnIndex::INNERMOST, + ) + .is_continue() + { + expected_sig = inferred_sig; + } + } + } + let trait_id = match clause { WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), .. - }) => Some(projection.trait_(self.db)), - WhereClause::Implemented(trait_ref) => { - Some(from_chalk_trait_id(trait_ref.trait_id)) - } - _ => None, + }) => projection.trait_(self.db), + WhereClause::Implemented(trait_ref) => from_chalk_trait_id(trait_ref.trait_id), + _ => continue, }; - if let Some(closure_kind) = - trait_id.and_then(|trait_id| self.fn_trait_kind_from_trait_id(trait_id)) - { - // `FnX`'s variants order is opposite from rustc, so use `cmp::max` instead of `cmp::min` - expected_kind = Some( - expected_kind - .map_or_else(|| closure_kind, |current| cmp::max(current, closure_kind)), - ); + if let Some(closure_kind) = self.fn_trait_kind_from_trait_id(trait_id) { + // always use the closure kind that is more permissive. + match (expected_kind, closure_kind) { + (None, _) => expected_kind = Some(closure_kind), + (Some(FnTrait::FnMut), FnTrait::Fn) => expected_kind = Some(FnTrait::Fn), + (Some(FnTrait::FnOnce), FnTrait::Fn | FnTrait::FnMut) => { + expected_kind = Some(closure_kind) + } + _ => {} + } } } - expected_kind + (expected_sig, expected_kind) } fn deduce_sig_from_dyn_ty(&self, dyn_ty: &DynTy) -> Option { // Search for a predicate like `<$self as FnX>::Output == Ret` let fn_traits: SmallVec<[ChalkTraitId; 3]> = - utils::fn_traits(self.db.upcast(), self.owner.module(self.db.upcast()).krate()) + utils::fn_traits(self.db, self.owner.module(self.db).krate()) .map(to_chalk_trait_id) .collect(); @@ -153,7 +344,8 @@ impl InferenceContext<'_> { if let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) = bound.skip_binders() { - let assoc_data = self.db.associated_ty_data(projection.associated_ty_id); + let assoc_data = + self.db.associated_ty_data(from_assoc_type_id(projection.associated_ty_id)); if !fn_traits.contains(&assoc_data.trait_id) { return None; } @@ -185,9 +377,176 @@ impl InferenceContext<'_> { None } + fn deduce_sig_from_projection( + &self, + closure_kind: ClosureKind, + projection_ty: &ProjectionTy, + projected_ty: &Ty, + ) -> Option> { + let container = + from_assoc_type_id(projection_ty.associated_ty_id).lookup(self.db).container; + let trait_ = match container { + hir_def::ItemContainerId::TraitId(trait_) => trait_, + _ => return None, + }; + + // For now, we only do signature deduction based off of the `Fn` and `AsyncFn` traits, + // for closures and async closures, respectively. + match closure_kind { + ClosureKind::Closure | ClosureKind::Async + if self.fn_trait_kind_from_trait_id(trait_).is_some() => + { + self.extract_sig_from_projection(projection_ty, projected_ty) + } + _ => None, + } + } + + fn extract_sig_from_projection( + &self, + projection_ty: &ProjectionTy, + projected_ty: &Ty, + ) -> Option> { + let arg_param_ty = projection_ty.substitution.as_slice(Interner)[1].assert_ty_ref(Interner); + + let TyKind::Tuple(_, input_tys) = arg_param_ty.kind(Interner) else { + return None; + }; + + let ret_param_ty = projected_ty; + + Some(FnSubst(Substitution::from_iter( + Interner, + input_tys.iter(Interner).map(|t| t.cast(Interner)).chain(Some(GenericArg::new( + Interner, + chalk_ir::GenericArgData::Ty(ret_param_ty.clone()), + ))), + ))) + } + fn fn_trait_kind_from_trait_id(&self, trait_id: hir_def::TraitId) -> Option { FnTrait::from_lang_item(self.db.lang_attr(trait_id.into())?) } + + fn supplied_sig_of_closure( + &mut self, + body: &ExprId, + ret_type: &Option, + arg_types: &[Option], + closure_kind: ClosureKind, + ) -> ClosureSignature { + let mut sig_tys = Vec::with_capacity(arg_types.len() + 1); + + // collect explicitly written argument types + for arg_type in arg_types.iter() { + let arg_ty = match arg_type { + // FIXME: I think rustc actually lowers closure params with `LifetimeElisionKind::AnonymousCreateParameter` + // (but the return type with infer). + Some(type_ref) => self.make_body_ty(*type_ref), + None => self.table.new_type_var(), + }; + sig_tys.push(arg_ty); + } + + // add return type + let ret_ty = match ret_type { + Some(type_ref) => self.make_body_ty(*type_ref), + None => self.table.new_type_var(), + }; + if let ClosureKind::Async = closure_kind { + sig_tys.push(self.lower_async_block_type_impl_trait(ret_ty.clone(), *body)); + } else { + sig_tys.push(ret_ty.clone()); + } + + let expected_sig = FnPointer { + num_binders: 0, + sig: FnSig { abi: FnAbi::RustCall, safety: chalk_ir::Safety::Safe, variadic: false }, + substitution: FnSubst( + Substitution::from_iter(Interner, sig_tys.iter().cloned()).shifted_in(Interner), + ), + }; + + ClosureSignature { ret_ty, expected_sig } + } + + /// The return type is the signature of the closure, and the return type + /// *as represented inside the body* (so, for async closures, the `Output` ty) + pub(super) fn sig_of_closure( + &mut self, + body: &ExprId, + ret_type: &Option, + arg_types: &[Option], + closure_kind: ClosureKind, + expected_sig: Option>, + ) -> ClosureSignature { + if let Some(e) = expected_sig { + self.sig_of_closure_with_expectation(body, ret_type, arg_types, closure_kind, e) + } else { + self.sig_of_closure_no_expectation(body, ret_type, arg_types, closure_kind) + } + } + + fn sig_of_closure_no_expectation( + &mut self, + body: &ExprId, + ret_type: &Option, + arg_types: &[Option], + closure_kind: ClosureKind, + ) -> ClosureSignature { + self.supplied_sig_of_closure(body, ret_type, arg_types, closure_kind) + } + + fn sig_of_closure_with_expectation( + &mut self, + body: &ExprId, + ret_type: &Option, + arg_types: &[Option], + closure_kind: ClosureKind, + expected_sig: FnSubst, + ) -> ClosureSignature { + let expected_sig = FnPointer { + num_binders: 0, + sig: FnSig { abi: FnAbi::RustCall, safety: chalk_ir::Safety::Safe, variadic: false }, + substitution: expected_sig, + }; + + // If the expected signature does not match the actual arg types, + // then just return the expected signature + if expected_sig.substitution.0.len(Interner) != arg_types.len() + 1 { + let ret_ty = match ret_type { + Some(type_ref) => self.make_body_ty(*type_ref), + None => self.table.new_type_var(), + }; + return ClosureSignature { expected_sig, ret_ty }; + } + + self.merge_supplied_sig_with_expectation( + body, + ret_type, + arg_types, + closure_kind, + expected_sig, + ) + } + + fn merge_supplied_sig_with_expectation( + &mut self, + body: &ExprId, + ret_type: &Option, + arg_types: &[Option], + closure_kind: ClosureKind, + expected_sig: FnPointer, + ) -> ClosureSignature { + let supplied_sig = self.supplied_sig_of_closure(body, ret_type, arg_types, closure_kind); + + let snapshot = self.table.snapshot(); + if !self.table.unify(&expected_sig.substitution, &supplied_sig.expected_sig.substitution) { + self.table.rollback_to(snapshot); + } + + supplied_sig + } } // The below functions handle capture and closure kind (Fn, FnMut, ..) @@ -208,7 +567,7 @@ impl HirPlace { |_, _, _| { unreachable!("Closure field only happens in MIR"); }, - ctx.owner.module(ctx.db.upcast()).krate(), + ctx.owner.module(ctx.db).krate(), ); } ty @@ -223,7 +582,7 @@ impl HirPlace { kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow, }) = current_capture { - if self.projections[len..].iter().any(|it| *it == ProjectionElem::Deref) { + if self.projections[len..].contains(&ProjectionElem::Deref) { current_capture = CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }); } @@ -282,18 +641,20 @@ impl CapturedItem { match proj { ProjectionElem::Deref => {} ProjectionElem::Field(Either::Left(f)) => { - match &*f.parent.variant_data(db.upcast()) { - VariantData::Record { fields, .. } => { + let variant_data = f.parent.variant_data(db); + match variant_data.shape { + FieldsShape::Record => { result.push('_'); - result.push_str(fields[f.local_id].name.as_str()) + result.push_str(variant_data.fields()[f.local_id].name.as_str()) } - VariantData::Tuple { fields, .. } => { - let index = fields.iter().position(|it| it.0 == f.local_id); + FieldsShape::Tuple => { + let index = + variant_data.fields().iter().position(|it| it.0 == f.local_id); if let Some(index) = index { format_to!(result, "_{index}"); } } - VariantData::Unit => {} + FieldsShape::Unit => {} } } ProjectionElem::Field(Either::Right(f)) => format_to!(result, "_{}", f.index), @@ -307,7 +668,7 @@ impl CapturedItem { } } } - if is_raw_identifier(&result, db.crate_graph()[owner.module(db.upcast()).krate()].edition) { + if is_raw_identifier(&result, owner.module(db).krate().data(db).edition) { result.insert_str(0, "r#"); } result @@ -315,27 +676,31 @@ impl CapturedItem { pub fn display_place_source_code(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { let body = db.body(owner); - let krate = owner.krate(db.upcast()); - let edition = db.crate_graph()[krate].edition; - let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string(); + let krate = owner.krate(db); + let edition = krate.data(db).edition; + let mut result = body[self.place.local].name.display(db, edition).to_string(); for proj in &self.place.projections { match proj { // In source code autoderef kicks in. ProjectionElem::Deref => {} ProjectionElem::Field(Either::Left(f)) => { - let variant_data = f.parent.variant_data(db.upcast()); - match &*variant_data { - VariantData::Record { fields, .. } => format_to!( + let variant_data = f.parent.variant_data(db); + match variant_data.shape { + FieldsShape::Record => format_to!( result, ".{}", - fields[f.local_id].name.display(db.upcast(), edition) + variant_data.fields()[f.local_id].name.display(db, edition) ), - VariantData::Tuple { fields, .. } => format_to!( + FieldsShape::Tuple => format_to!( result, ".{}", - fields.iter().position(|it| it.0 == f.local_id).unwrap_or_default() + variant_data + .fields() + .iter() + .position(|it| it.0 == f.local_id) + .unwrap_or_default() ), - VariantData::Unit => {} + FieldsShape::Unit => {} } } ProjectionElem::Field(Either::Right(f)) => { @@ -367,9 +732,9 @@ impl CapturedItem { pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { let body = db.body(owner); - let krate = owner.krate(db.upcast()); - let edition = db.crate_graph()[krate].edition; - let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string(); + let krate = owner.krate(db); + let edition = krate.data(db).edition; + let mut result = body[self.place.local].name.display(db, edition).to_string(); let mut field_need_paren = false; for proj in &self.place.projections { match proj { @@ -381,17 +746,18 @@ impl CapturedItem { if field_need_paren { result = format!("({result})"); } - let variant_data = f.parent.variant_data(db.upcast()); - let field = match &*variant_data { - VariantData::Record { fields, .. } => { - fields[f.local_id].name.as_str().to_owned() + let variant_data = f.parent.variant_data(db); + let field = match variant_data.shape { + FieldsShape::Record => { + variant_data.fields()[f.local_id].name.as_str().to_owned() } - VariantData::Tuple { fields, .. } => fields + FieldsShape::Tuple => variant_data + .fields() .iter() .position(|it| it.0 == f.local_id) .unwrap_or_default() .to_string(), - VariantData::Unit => "[missing field]".to_owned(), + FieldsShape::Unit => "[missing field]".to_owned(), }; result = format!("{result}.{field}"); field_need_paren = false; @@ -493,10 +859,7 @@ impl CapturedItemWithoutTy { Ok(BoundVar::new(outer_binder, idx).to_ty(Interner)) } } - let Some(generics) = ctx.generics() else { - return Binders::empty(Interner, ty); - }; - let filler = &mut Filler { db: ctx.db, generics }; + let filler = &mut Filler { db: ctx.db, generics: ctx.generics() }; let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty); make_binders(ctx.db, filler.generics, result) } @@ -506,8 +869,8 @@ impl CapturedItemWithoutTy { impl InferenceContext<'_> { fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option { let r = self.place_of_expr_without_adjust(tgt_expr)?; - let default = vec![]; - let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default); + let adjustments = + self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default(); apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments) } @@ -517,10 +880,8 @@ impl InferenceContext<'_> { return None; } let hygiene = self.body.expr_or_pat_path_hygiene(id); - let result = self - .resolver - .resolve_path_in_value_ns_fully(self.db.upcast(), path, hygiene) - .and_then(|result| match result { + self.resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).and_then(|result| { + match result { ValueNs::LocalBinding(binding) => { let mir_span = match id { ExprOrPatId::ExprId(id) => MirSpan::ExprId(id), @@ -530,8 +891,8 @@ impl InferenceContext<'_> { Some(HirPlace { local: binding, projections: Vec::new() }) } _ => None, - }); - result + } + }) } /// Changes `current_capture_span_stack` to contain the stack of spans for this expr. @@ -540,7 +901,7 @@ impl InferenceContext<'_> { match &self.body[tgt_expr] { Expr::Path(p) => { let resolver_guard = - self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); + self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr); let result = self.path_place(p, tgt_expr.into()); self.resolver.reset_to_guard(resolver_guard); return result; @@ -815,8 +1176,8 @@ impl InferenceContext<'_> { { if let Some(deref_fn) = self .db - .trait_data(deref_trait) - .method_by_name(&Name::new_symbol_root(sym::deref_mut.clone())) + .trait_items(deref_trait) + .method_by_name(&Name::new_symbol_root(sym::deref_mut)) { break 'b deref_fn == f; } @@ -902,7 +1263,7 @@ impl InferenceContext<'_> { &Expr::Assignment { target, value } => { self.walk_expr(value); let resolver_guard = - self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); + self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr); match self.place_of_expr(value) { Some(rhs_place) => { self.inside_assignment = true; @@ -961,9 +1322,9 @@ impl InferenceContext<'_> { | Pat::Or(_) => (), Pat::TupleStruct { .. } | Pat::Record { .. } => { if let Some(variant) = self.result.variant_resolution_for_pat(p) { - let adt = variant.adt_id(self.db.upcast()); + let adt = variant.adt_id(self.db); let is_multivariant = match adt { - hir_def::AdtId::EnumId(e) => self.db.enum_data(e).variants.len() != 1, + hir_def::AdtId::EnumId(e) => self.db.enum_variants(e).variants.len() != 1, _ => false, }; if is_multivariant { @@ -1052,7 +1413,7 @@ impl InferenceContext<'_> { |_, _, _| { unreachable!("Closure field only happens in MIR"); }, - self.owner.module(self.db.upcast()).krate(), + self.owner.module(self.db).krate(), ); if ty.as_raw_ptr().is_some() || ty.is_union() { capture.kind = CaptureKind::ByRef(BorrowKind::Shared); @@ -1159,7 +1520,7 @@ impl InferenceContext<'_> { self.consume_place(place) } VariantId::StructId(s) => { - let vd = &*self.db.struct_data(s).variant_data; + let vd = &*self.db.variant_fields(s.into()); for field_pat in args.iter() { let arg = field_pat.pat; let Some(local_id) = vd.field(&field_pat.name) else { @@ -1211,7 +1572,7 @@ impl InferenceContext<'_> { self.consume_place(place) } VariantId::StructId(s) => { - let vd = &*self.db.struct_data(s).variant_data; + let vd = &*self.db.variant_fields(s.into()); let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); let fields = vd.fields().iter(); @@ -1340,7 +1701,7 @@ impl InferenceContext<'_> { for (derefed_callee, callee_ty, params, expr) in exprs { if let &Expr::Call { callee, .. } = &self.body[expr] { let mut adjustments = - self.result.expr_adjustments.remove(&callee).unwrap_or_default(); + self.result.expr_adjustments.remove(&callee).unwrap_or_default().into_vec(); self.write_fn_trait_method_resolution( kind, &derefed_callee, @@ -1349,7 +1710,7 @@ impl InferenceContext<'_> { ¶ms, expr, ); - self.result.expr_adjustments.insert(callee, adjustments); + self.result.expr_adjustments.insert(callee, adjustments.into_boxed_slice()); } } } @@ -1387,8 +1748,42 @@ impl InferenceContext<'_> { } } } + assert!(deferred_closures.is_empty(), "we should have analyzed all closures"); result } + + pub(super) fn add_current_closure_dependency(&mut self, dep: ClosureId) { + if let Some(c) = self.current_closure { + if !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep) { + self.closure_dependencies.entry(c).or_default().push(dep); + } + } + + fn dep_creates_cycle( + closure_dependencies: &FxHashMap>, + visited: &mut FxHashSet, + from: ClosureId, + to: ClosureId, + ) -> bool { + if !visited.insert(from) { + return false; + } + + if from == to { + return true; + } + + if let Some(deps) = closure_dependencies.get(&to) { + for dep in deps { + if dep_creates_cycle(closure_dependencies, visited, from, *dep) { + return true; + } + } + } + + false + } + } } /// Call this only when the last span in the stack isn't a split. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index acd86b1f3ed8a..39bd90849fe8f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -7,15 +7,14 @@ use std::iter; -use chalk_ir::{cast::Cast, BoundVar, Goal, Mutability, TyKind, TyVariableKind}; -use hir_def::{ - hir::ExprId, - lang_item::{LangItem, LangItemTarget}, -}; +use chalk_ir::{BoundVar, Goal, Mutability, TyKind, TyVariableKind, cast::Cast}; +use hir_def::{hir::ExprId, lang_item::LangItem}; use stdx::always; use triomphe::Arc; use crate::{ + Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Lifetime, + Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, autoderef::{Autoderef, AutoderefKind}, db::HirDatabase, infer::{ @@ -23,8 +22,6 @@ use crate::{ TypeError, TypeMismatch, }, utils::ClosureSubst, - Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Lifetime, - Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, }; use super::unify::InferenceTable; @@ -148,11 +145,11 @@ impl CoerceMany { if let (Ok(result1), Ok(result2)) = (result1, result2) { ctx.table.register_infer_ok(InferOk { value: (), goals: result1.goals }); for &e in &self.expressions { - ctx.write_expr_adj(e, result1.value.0.clone()); + ctx.write_expr_adj(e, result1.value.0.clone().into_boxed_slice()); } ctx.table.register_infer_ok(InferOk { value: (), goals: result2.goals }); if let Some(expr) = expr { - ctx.write_expr_adj(expr, result2.value.0); + ctx.write_expr_adj(expr, result2.value.0.into_boxed_slice()); self.expressions.push(expr); } return self.final_ty = Some(target_ty); @@ -182,7 +179,7 @@ impl CoerceMany { { self.final_ty = Some(res); for &e in &self.expressions { - ctx.write_expr_adj(e, adjustments.clone()); + ctx.write_expr_adj(e, adjustments.clone().into_boxed_slice()); } } else { match cause { @@ -263,7 +260,7 @@ impl InferenceContext<'_> { ) -> Result { let (adjustments, ty) = self.coerce_inner(from_ty, to_ty, coerce_never)?; if let Some(expr) = expr { - self.write_expr_adj(expr, adjustments); + self.write_expr_adj(expr, adjustments.into_boxed_slice()); } Ok(ty) } @@ -701,8 +698,8 @@ impl InferenceTable<'_> { reborrow.as_ref().map_or_else(|| from_ty.clone(), |(_, adj)| adj.target.clone()); let krate = self.trait_env.krate; - let coerce_unsized_trait = match self.db.lang_item(krate, LangItem::CoerceUnsized) { - Some(LangItemTarget::Trait(trait_)) => trait_, + let coerce_unsized_trait = match LangItem::CoerceUnsized.resolve_trait(self.db, krate) { + Some(trait_) => trait_, _ => return Err(TypeError), }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs index e4f5b5ed378dc..e3c4f5562d5c4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs @@ -6,13 +6,17 @@ use std::cell::RefCell; use std::ops::{Deref, DerefMut}; use either::Either; -use hir_def::{hir::ExprOrPatId, path::Path, resolver::Resolver, type_ref::TypesMap, TypeOwnerId}; +use hir_def::GenericDefId; +use hir_def::expr_store::ExpressionStore; +use hir_def::expr_store::path::Path; +use hir_def::{hir::ExprOrPatId, resolver::Resolver}; use la_arena::{Idx, RawIdx}; +use crate::lower::LifetimeElisionKind; use crate::{ + InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringContext, TyLoweringDiagnostic, db::HirDatabase, lower::path::{PathDiagnosticCallback, PathLoweringContext}, - InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringContext, TyLoweringDiagnostic, }; // Unfortunately, this struct needs to use interior mutability (but we encapsulate it) @@ -58,12 +62,17 @@ impl<'a> InferenceTyLoweringContext<'a> { pub(super) fn new( db: &'a dyn HirDatabase, resolver: &'a Resolver, - types_map: &'a TypesMap, - owner: TypeOwnerId, + store: &'a ExpressionStore, diagnostics: &'a Diagnostics, source: InferenceTyDiagnosticSource, + generic_def: GenericDefId, + lifetime_elision: LifetimeElisionKind, ) -> Self { - Self { ctx: TyLoweringContext::new(db, resolver, types_map, owner), diagnostics, source } + Self { + ctx: TyLoweringContext::new(db, resolver, store, generic_def, lifetime_elision), + diagnostics, + source, + } } #[inline] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 80e3ca1fa282e..8084b394d044b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -5,17 +5,17 @@ use std::{ mem, }; -use chalk_ir::{cast::Cast, fold::Shift, DebruijnIndex, Mutability, TyVariableKind}; +use chalk_ir::{DebruijnIndex, Mutability, TyVariableKind, cast::Cast}; use either::Either; use hir_def::{ + BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId, + expr_store::path::{GenericArg, GenericArgs, Path}, hir::{ - ArithOp, Array, AsmOperand, AsmOptions, BinaryOp, ClosureKind, Expr, ExprId, ExprOrPatId, - LabelId, Literal, Pat, PatId, Statement, UnaryOp, + ArithOp, Array, AsmOperand, AsmOptions, BinaryOp, Expr, ExprId, ExprOrPatId, LabelId, + Literal, Pat, PatId, Statement, UnaryOp, generics::GenericParamDataRef, }, lang_item::{LangItem, LangItemTarget}, - path::{GenericArg, GenericArgs, Path}, resolver::ValueNs, - BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId, }; use hir_expand::name::Name; use intern::sym; @@ -23,34 +23,33 @@ use stdx::always; use syntax::ast::RangeOp; use crate::{ - autoderef::{builtin_deref, deref_by_trait, Autoderef}, + Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, DeclContext, + DeclOrigin, IncorrectGenericsLenKind, Interner, Rawness, Scalar, Substitution, + TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, + autoderef::{Autoderef, builtin_deref, deref_by_trait}, consteval, - db::{InternedClosure, InternedCoroutine}, - error_lifetime, - generics::{generics, Generics}, + generics::generics, infer::{ + BreakableKind, coerce::{CoerceMany, CoerceNever, CoercionCause}, find_continuable, pat::contains_explicit_ref_binding, - BreakableKind, }, lang_items::lang_items_for_bin_op, lower::{ - const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode, + LifetimeElisionKind, ParamLoweringMode, lower_to_chalk_mutability, + path::{GenericArgsLowerer, TypeLikeConst, substs_from_args_and_bindings}, }, - mapping::{from_chalk, ToChalk}, + mapping::{ToChalk, from_chalk}, method_resolution::{self, VisibleFromModule}, primitive::{self, UintTy}, static_lifetime, to_chalk_trait_id, traits::FnTrait, - Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, DeclContext, - DeclOrigin, FnAbi, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, Substitution, - TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, }; use super::{ - cast::CastCheck, coerce::auto_deref_adjust_steps, find_breakable, BreakableContext, Diverges, - Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch, + BreakableContext, Diverges, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch, + cast::CastCheck, coerce::auto_deref_adjust_steps, find_breakable, }; #[derive(Clone, Copy, PartialEq, Eq)] @@ -198,14 +197,10 @@ impl InferenceContext<'_> { match &self.body[expr] { // Lang item paths cannot currently be local variables or statics. Expr::Path(Path::LangItem(_, _)) => false, - Expr::Path(Path::Normal(path)) => path.type_anchor().is_none(), + Expr::Path(Path::Normal(path)) => path.type_anchor.is_none(), Expr::Path(path) => self .resolver - .resolve_path_in_value_ns_fully( - self.db.upcast(), - path, - self.body.expr_path_hygiene(expr), - ) + .resolve_path_in_value_ns_fully(self.db, path, self.body.expr_path_hygiene(expr)) .is_none_or(|res| matches!(res, ValueNs::LocalBinding(_) | ValueNs::StaticId(_))), Expr::Underscore => true, Expr::UnaryOp { op: UnaryOp::Deref, .. } => true, @@ -289,7 +284,7 @@ impl InferenceContext<'_> { expected: &Expectation, is_read: ExprIsRead, ) -> Ty { - self.db.unwind_if_cancelled(); + self.db.unwind_if_revision_cancelled(); let ty = match &self.body[tgt_expr] { Expr::Missing => self.err_ty(), @@ -349,8 +344,7 @@ impl InferenceContext<'_> { } Expr::Const(id) => { self.with_breakable_ctx(BreakableKind::Border, None, None, |this| { - let loc = this.db.lookup_intern_anonymous_const(*id); - this.infer_expr(loc.root, expected, ExprIsRead::Yes) + this.infer_expr(*id, expected, ExprIsRead::Yes) }) .1 } @@ -378,117 +372,8 @@ impl InferenceContext<'_> { None => self.result.standard_types.never.clone(), } } - Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => { - assert_eq!(args.len(), arg_types.len()); - - let mut sig_tys = Vec::with_capacity(arg_types.len() + 1); - - // collect explicitly written argument types - for arg_type in arg_types.iter() { - let arg_ty = match arg_type { - Some(type_ref) => self.make_body_ty(*type_ref), - None => self.table.new_type_var(), - }; - sig_tys.push(arg_ty); - } - - // add return type - let ret_ty = match ret_type { - Some(type_ref) => self.make_body_ty(*type_ref), - None => self.table.new_type_var(), - }; - if let ClosureKind::Async = closure_kind { - sig_tys.push(self.lower_async_block_type_impl_trait(ret_ty.clone(), *body)); - } else { - sig_tys.push(ret_ty.clone()); - } - - let sig_ty = TyKind::Function(FnPointer { - num_binders: 0, - sig: FnSig { - abi: FnAbi::RustCall, - safety: chalk_ir::Safety::Safe, - variadic: false, - }, - substitution: FnSubst( - Substitution::from_iter(Interner, sig_tys.iter().cloned()) - .shifted_in(Interner), - ), - }) - .intern(Interner); - - let (id, ty, resume_yield_tys) = match closure_kind { - ClosureKind::Coroutine(_) => { - // FIXME: report error when there are more than 1 parameter. - let resume_ty = match sig_tys.first() { - // When `sig_tys.len() == 1` the first type is the return type, not the - // first parameter type. - Some(ty) if sig_tys.len() > 1 => ty.clone(), - _ => self.result.standard_types.unit.clone(), - }; - let yield_ty = self.table.new_type_var(); - - let subst = TyBuilder::subst_for_coroutine(self.db, self.owner) - .push(resume_ty.clone()) - .push(yield_ty.clone()) - .push(ret_ty.clone()) - .build(); - - let coroutine_id = self - .db - .intern_coroutine(InternedCoroutine(self.owner, tgt_expr)) - .into(); - let coroutine_ty = TyKind::Coroutine(coroutine_id, subst).intern(Interner); - - (None, coroutine_ty, Some((resume_ty, yield_ty))) - } - ClosureKind::Closure | ClosureKind::Async => { - let closure_id = - self.db.intern_closure(InternedClosure(self.owner, tgt_expr)).into(); - let closure_ty = TyKind::Closure( - closure_id, - TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()), - ) - .intern(Interner); - self.deferred_closures.entry(closure_id).or_default(); - if let Some(c) = self.current_closure { - self.closure_dependencies.entry(c).or_default().push(closure_id); - } - (Some(closure_id), closure_ty, None) - } - }; - - // Eagerly try to relate the closure type with the expected - // type, otherwise we often won't have enough information to - // infer the body. - self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected); - - // Now go through the argument patterns - for (arg_pat, arg_ty) in args.iter().zip(&sig_tys) { - self.infer_top_pat(*arg_pat, arg_ty, None); - } - - // FIXME: lift these out into a struct - let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); - let prev_closure = mem::replace(&mut self.current_closure, id); - let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone()); - let prev_ret_coercion = - mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty))); - let prev_resume_yield_tys = - mem::replace(&mut self.resume_yield_tys, resume_yield_tys); - - self.with_breakable_ctx(BreakableKind::Border, None, None, |this| { - this.infer_return(*body); - }); - - self.diverges = prev_diverges; - self.return_ty = prev_ret_ty; - self.return_coercion = prev_ret_coercion; - self.current_closure = prev_closure; - self.resume_yield_tys = prev_resume_yield_tys; - - ty - } + Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => self + .infer_closure(body, args, ret_type, arg_types, *closure_kind, tgt_expr, expected), Expr::Call { callee, args, .. } => self.infer_call(tgt_expr, *callee, args, expected), Expr::MethodCall { receiver, args, method_name, generic_args } => self .infer_method_call( @@ -657,16 +542,15 @@ impl InferenceContext<'_> { _ if fields.is_empty() => {} Some(def) => { let field_types = self.db.field_types(def); - let variant_data = def.variant_data(self.db.upcast()); + let variant_data = def.variant_data(self.db); let visibilities = self.db.field_visibilities(def); for field in fields.iter() { let field_def = { match variant_data.field(&field.name) { Some(local_id) => { - if !visibilities[local_id].is_visible_from( - self.db.upcast(), - self.resolver.module(), - ) { + if !visibilities[local_id] + .is_visible_from(self.db, self.resolver.module()) + { self.push_diagnostic( InferenceDiagnostic::NoSuchField { field: field.expr.into(), @@ -772,8 +656,8 @@ impl InferenceContext<'_> { if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) { if let Some(deref_fn) = self .db - .trait_data(deref_trait) - .method_by_name(&Name::new_symbol_root(sym::deref.clone())) + .trait_items(deref_trait) + .method_by_name(&Name::new_symbol_root(sym::deref)) { // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that // the mutability is not wrong, and will be fixed in `self.infer_mut`). @@ -858,7 +742,7 @@ impl InferenceContext<'_> { } else { let rhs_ty = self.infer_expr(value, &Expectation::none(), ExprIsRead::Yes); let resolver_guard = - self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); + self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr); self.inside_assignment = true; self.infer_top_pat(target, &rhs_ty, None); self.inside_assignment = false; @@ -921,17 +805,18 @@ impl InferenceContext<'_> { .map_or((self.err_ty(), Vec::new()), |adj| { adj.apply(&mut self.table, base_ty) }); + // mutability will be fixed up in `InferenceContext::infer_mut`; adj.push(Adjustment::borrow( Mutability::Not, self_ty.clone(), self.table.new_lifetime_var(), )); - self.write_expr_adj(*base, adj); + self.write_expr_adj(*base, adj.into_boxed_slice()); if let Some(func) = self .db - .trait_data(index_trait) - .method_by_name(&Name::new_symbol_root(sym::index.clone())) + .trait_items(index_trait) + .method_by_name(&Name::new_symbol_root(sym::index)) { let subst = TyBuilder::subst_for_def(self.db, index_trait, None); if subst.remaining() != 2 { @@ -942,9 +827,9 @@ impl InferenceContext<'_> { } let assoc = self.resolve_ops_index_output(); self.resolve_associated_type_with_params( - self_ty.clone(), + self_ty, assoc, - &[index_ty.clone().cast(Interner)], + &[index_ty.cast(Interner)], ) } else { self.err_ty() @@ -1143,7 +1028,7 @@ impl InferenceContext<'_> { } fn infer_expr_path(&mut self, path: &Path, id: ExprOrPatId, scope_id: ExprId) -> Ty { - let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, scope_id); + let g = self.resolver.update_to_inner_scope(self.db, self.owner, scope_id); let ty = match self.infer_path(path, id) { Some(ty) => ty, None => { @@ -1168,8 +1053,7 @@ impl InferenceContext<'_> { let ret_ty = self.table.new_type_var(); let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone()); - let prev_ret_coercion = - mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone()))); + let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(ret_ty.clone())); // FIXME: We should handle async blocks like we handle closures let expected = &Expectation::has_type(ret_ty); @@ -1258,7 +1142,7 @@ impl InferenceContext<'_> { let Some(trait_) = fn_x.get_id(self.db, self.table.trait_env.krate) else { return; }; - let trait_data = self.db.trait_data(trait_); + let trait_data = self.db.trait_items(trait_); if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) { let subst = TyBuilder::subst_for_def(self.db, trait_, None) .push(callee_ty.clone()) @@ -1426,7 +1310,7 @@ impl InferenceContext<'_> { let trait_func = lang_items_for_bin_op(op).and_then(|(name, lang_item)| { let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?; - let func = self.db.trait_data(trait_id).method_by_name(&name)?; + let func = self.db.trait_items(trait_id).method_by_name(&name)?; Some((trait_id, func)) }); let (trait_, func) = match trait_func { @@ -1472,10 +1356,10 @@ impl InferenceContext<'_> { if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) { self.write_expr_adj( lhs, - vec![Adjustment { + Box::new([Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), target: p_left.clone(), - }], + }]), ); } } @@ -1484,10 +1368,10 @@ impl InferenceContext<'_> { if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) { self.write_expr_adj( rhs, - vec![Adjustment { + Box::new([Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), target: p_right.clone(), - }], + }]), ); } } @@ -1517,7 +1401,7 @@ impl InferenceContext<'_> { expected: &Expectation, ) -> Ty { let coerce_ty = expected.coercion_target_type(&mut self.table); - let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr); + let g = self.resolver.update_to_inner_scope(self.db, self.owner, expr); let prev_env = block_id.map(|block_id| { let prev_env = self.table.trait_env.clone(); TraitEnvironment::with_block(&mut self.table.trait_env, block_id); @@ -1556,11 +1440,7 @@ impl InferenceContext<'_> { target_is_read, ) }; - if type_ref.is_some() { - decl_ty - } else { - ty - } + if type_ref.is_some() { decl_ty } else { ty } } else { decl_ty }; @@ -1681,20 +1561,20 @@ impl InferenceContext<'_> { }) }); } - TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => { - let local_id = self.db.struct_data(*s).variant_data.field(name)?; - let field = FieldId { parent: (*s).into(), local_id }; + &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref parameters) => { + let local_id = self.db.variant_fields(s.into()).field(name)?; + let field = FieldId { parent: s.into(), local_id }; (field, parameters.clone()) } - TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => { - let local_id = self.db.union_data(*u).variant_data.field(name)?; - let field = FieldId { parent: (*u).into(), local_id }; + &TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), ref parameters) => { + let local_id = self.db.variant_fields(u.into()).field(name)?; + let field = FieldId { parent: u.into(), local_id }; (field, parameters.clone()) } _ => return None, }; let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id] - .is_visible_from(self.db.upcast(), self.resolver.module()); + .is_visible_from(self.db, self.resolver.module()); if !is_visible { if private_field.is_none() { private_field = Some((field_id, parameters)); @@ -1747,7 +1627,7 @@ impl InferenceContext<'_> { match self.lookup_field(&receiver_ty, name) { Some((ty, field_id, adjustments, is_public)) => { - self.write_expr_adj(receiver, adjustments); + self.write_expr_adj(receiver, adjustments.into_boxed_slice()); self.result.field_resolutions.insert(tgt_expr, field_id); if !is_public { if let Either::Left(field) = field_id { @@ -1781,9 +1661,8 @@ impl InferenceContext<'_> { match resolved { Some((adjust, func, _)) => { let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); - let generics = generics(self.db.upcast(), func.into()); - let substs = self.substs_for_method_call(generics, None); - self.write_expr_adj(receiver, adjustments); + let substs = self.substs_for_method_call(tgt_expr, func.into(), None); + self.write_expr_adj(receiver, adjustments.into_boxed_slice()); self.write_method_resolution(tgt_expr, func, substs.clone()); self.check_method_call( @@ -1828,9 +1707,7 @@ impl InferenceContext<'_> { if let TyKind::Closure(c, _) = self.table.resolve_completely(callee_ty.clone()).kind(Interner) { - if let Some(par) = self.current_closure { - self.closure_dependencies.entry(par).or_default().push(*c); - } + self.add_current_closure_dependency(*c); self.deferred_closures.entry(*c).or_default().push(( derefed_callee.clone(), callee_ty.clone(), @@ -1848,7 +1725,7 @@ impl InferenceContext<'_> { tgt_expr, ); } - self.write_expr_adj(callee, adjustments); + self.write_expr_adj(callee, adjustments.into_boxed_slice()); (params, ret_ty) } None => { @@ -1932,10 +1809,9 @@ impl InferenceContext<'_> { } let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); - self.write_expr_adj(receiver, adjustments); + self.write_expr_adj(receiver, adjustments.into_boxed_slice()); - let generics = generics(self.db.upcast(), func.into()); - let substs = self.substs_for_method_call(generics, generic_args); + let substs = self.substs_for_method_call(tgt_expr, func.into(), generic_args); self.write_method_resolution(tgt_expr, func, substs.clone()); self.check_method_call( tgt_expr, @@ -1952,7 +1828,7 @@ impl InferenceContext<'_> { let field_with_same_name_exists = match self.lookup_field(&receiver_ty, method_name) { Some((ty, field_id, adjustments, _public)) => { - self.write_expr_adj(receiver, adjustments); + self.write_expr_adj(receiver, adjustments.into_boxed_slice()); self.result.field_resolutions.insert(tgt_expr, field_id); Some(ty) } @@ -1985,8 +1861,7 @@ impl InferenceContext<'_> { let recovered = match assoc_func_with_same_name { Some(f) => { - let generics = generics(self.db.upcast(), f.into()); - let substs = self.substs_for_method_call(generics, generic_args); + let substs = self.substs_for_method_call(tgt_expr, f.into(), generic_args); let f = self .db .value_ty(f.into()) @@ -2176,87 +2051,147 @@ impl InferenceContext<'_> { fn substs_for_method_call( &mut self, - def_generics: Generics, + expr: ExprId, + def: GenericDefId, generic_args: Option<&GenericArgs>, ) -> Substitution { - let ( - parent_params, - has_self_param, - type_params, - const_params, - impl_trait_params, - lifetime_params, - ) = def_generics.provenance_split(); - assert!(!has_self_param); // method shouldn't have another Self param - let total_len = - parent_params + type_params + const_params + impl_trait_params + lifetime_params; - let mut substs = Vec::with_capacity(total_len); - - // handle provided arguments - if let Some(generic_args) = generic_args { - // if args are provided, it should be all of them, but we can't rely on that - let self_params = type_params + const_params + lifetime_params; - - let mut args = generic_args.args.iter().peekable(); - for kind_id in def_generics.iter_self_id().take(self_params) { - let arg = args.peek(); - let arg = match (kind_id, arg) { - // Lifetimes can be inferred. - // Once we have implemented lifetime inference correctly, - // this should be handled in a proper way. - ( - GenericParamId::LifetimeParamId(_), - None | Some(GenericArg::Type(_) | GenericArg::Const(_)), - ) => error_lifetime().cast(Interner), - - // If we run out of `generic_args`, stop pushing substs - (_, None) => break, - - // Normal cases - (_, Some(_)) => generic_arg_to_chalk( - self.db, - kind_id, - args.next().unwrap(), // `peek()` is `Some(_)`, so guaranteed no panic - self, - &self.body.types, - |this, type_ref| this.make_body_ty(type_ref), - |this, c, ty| { - const_or_path_to_chalk( - this.db, - &this.resolver, - this.owner.into(), - ty, - c, - ParamLoweringMode::Placeholder, - || this.generics(), - DebruijnIndex::INNERMOST, - ) - }, - |this, lt_ref| this.make_body_lifetime(lt_ref), - ), - }; + struct LowererCtx<'a, 'b> { + ctx: &'a mut InferenceContext<'b>, + expr: ExprId, + } - substs.push(arg); + impl GenericArgsLowerer for LowererCtx<'_, '_> { + fn report_len_mismatch( + &mut self, + def: GenericDefId, + provided_count: u32, + expected_count: u32, + kind: IncorrectGenericsLenKind, + ) { + self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsLen { + expr: self.expr, + provided_count, + expected_count, + kind, + def, + }); + } + + fn report_arg_mismatch( + &mut self, + param_id: GenericParamId, + arg_idx: u32, + has_self_arg: bool, + ) { + self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsOrder { + expr: self.expr, + param_id, + arg_idx, + has_self_arg, + }); + } + + fn provided_kind( + &mut self, + param_id: GenericParamId, + param: GenericParamDataRef<'_>, + arg: &GenericArg, + ) -> crate::GenericArg { + match (param, arg) { + (GenericParamDataRef::LifetimeParamData(_), GenericArg::Lifetime(lifetime)) => { + self.ctx.make_body_lifetime(*lifetime).cast(Interner) + } + (GenericParamDataRef::TypeParamData(_), GenericArg::Type(type_ref)) => { + self.ctx.make_body_ty(*type_ref).cast(Interner) + } + (GenericParamDataRef::ConstParamData(_), GenericArg::Const(konst)) => { + let GenericParamId::ConstParamId(const_id) = param_id else { + unreachable!("non-const param ID for const param"); + }; + let const_ty = self.ctx.db.const_param_ty(const_id); + self.ctx.make_body_const(*konst, const_ty).cast(Interner) + } + _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"), + } } - }; - // Handle everything else as unknown. This also handles generic arguments for the method's - // parent (impl or trait), which should come after those for the method. - for (id, _data) in def_generics.iter().skip(substs.len()) { - match id { - GenericParamId::TypeParamId(_) => { - substs.push(self.table.new_type_var().cast(Interner)) + fn provided_type_like_const( + &mut self, + const_ty: Ty, + arg: TypeLikeConst<'_>, + ) -> crate::Const { + match arg { + TypeLikeConst::Path(path) => self.ctx.make_path_as_body_const(path, const_ty), + TypeLikeConst::Infer => self.ctx.table.new_const_var(const_ty), } - GenericParamId::ConstParamId(id) => { - substs.push(self.table.new_const_var(self.db.const_param_ty(id)).cast(Interner)) + } + + fn inferred_kind( + &mut self, + _def: GenericDefId, + param_id: GenericParamId, + _param: GenericParamDataRef<'_>, + _infer_args: bool, + _preceding_args: &[crate::GenericArg], + ) -> crate::GenericArg { + // Always create an inference var, even when `infer_args == false`. This helps with diagnostics, + // and I think it's also required in the presence of `impl Trait` (that must be inferred). + match param_id { + GenericParamId::TypeParamId(_) => self.ctx.table.new_type_var().cast(Interner), + GenericParamId::ConstParamId(const_id) => self + .ctx + .table + .new_const_var(self.ctx.db.const_param_ty(const_id)) + .cast(Interner), + GenericParamId::LifetimeParamId(_) => { + self.ctx.table.new_lifetime_var().cast(Interner) + } } - GenericParamId::LifetimeParamId(_) => { - substs.push(self.table.new_lifetime_var().cast(Interner)) + } + + fn parent_arg(&mut self, param_id: GenericParamId) -> crate::GenericArg { + match param_id { + GenericParamId::TypeParamId(_) => self.ctx.table.new_type_var().cast(Interner), + GenericParamId::ConstParamId(const_id) => self + .ctx + .table + .new_const_var(self.ctx.db.const_param_ty(const_id)) + .cast(Interner), + GenericParamId::LifetimeParamId(_) => { + self.ctx.table.new_lifetime_var().cast(Interner) + } } } + + fn report_elided_lifetimes_in_path( + &mut self, + _def: GenericDefId, + _expected_count: u32, + _hard_error: bool, + ) { + unreachable!("we set `LifetimeElisionKind::Infer`") + } + + fn report_elision_failure(&mut self, _def: GenericDefId, _expected_count: u32) { + unreachable!("we set `LifetimeElisionKind::Infer`") + } + + fn report_missing_lifetime(&mut self, _def: GenericDefId, _expected_count: u32) { + unreachable!("we set `LifetimeElisionKind::Infer`") + } } - assert_eq!(substs.len(), total_len); - Substitution::from_iter(Interner, substs) + + substs_from_args_and_bindings( + self.db, + self.body, + generic_args, + def, + true, + LifetimeElisionKind::Infer, + false, + None, + &mut LowererCtx { ctx: self, expr }, + ) } fn register_obligations_for_call(&mut self, callable_ty: &Ty) { @@ -2264,7 +2199,7 @@ impl InferenceContext<'_> { if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) { let def: CallableDefId = from_chalk(self.db, *fn_def); let generic_predicates = - self.db.generic_predicates(GenericDefId::from_callable(self.db.upcast(), def)); + self.db.generic_predicates(GenericDefId::from_callable(self.db, def)); for predicate in generic_predicates.iter() { let (predicate, binders) = predicate .clone() @@ -2276,15 +2211,14 @@ impl InferenceContext<'_> { // add obligation for trait implementation, if this is a trait method match def { CallableDefId::FunctionId(f) => { - if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container { + if let ItemContainerId::TraitId(trait_) = f.lookup(self.db).container { // construct a TraitRef - let params_len = parameters.len(Interner); - let trait_params_len = generics(self.db.upcast(), trait_.into()).len(); + let trait_params_len = generics(self.db, trait_.into()).len(); let substs = Substitution::from_iter( Interner, // The generic parameters for the trait come after those for the // function. - ¶meters.as_slice(Interner)[params_len - trait_params_len..], + ¶meters.as_slice(Interner)[..trait_params_len], ); self.push_obligation( TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: substs } @@ -2311,7 +2245,7 @@ impl InferenceContext<'_> { _ => return Default::default(), }; - let data = self.db.function_data(func); + let data = self.db.function_signature(func); let Some(legacy_const_generics_indices) = &data.legacy_const_generics_indices else { return Default::default(); }; @@ -2402,11 +2336,7 @@ impl InferenceContext<'_> { BinaryOp::Assignment { .. } => unreachable!("handled above"), }; - if is_assign { - self.result.standard_types.unit.clone() - } else { - output_ty - } + if is_assign { self.result.standard_types.unit.clone() } else { output_ty } } fn is_builtin_binop(&mut self, lhs: &Ty, rhs: &Ty, op: BinaryOp) -> bool { @@ -2468,7 +2398,7 @@ impl InferenceContext<'_> { } } - fn with_breakable_ctx( + pub(super) fn with_breakable_ctx( &mut self, kind: BreakableKind, ty: Option, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs index d74a383f44ef4..ac450c0b5591a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs @@ -1,7 +1,7 @@ //! Finds if an expression is an immutable context or a mutable context, which is used in selecting //! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar. -use chalk_ir::{cast::Cast, Mutability}; +use chalk_ir::{Mutability, cast::Cast}; use hir_def::{ hir::{ Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement, @@ -13,9 +13,9 @@ use hir_expand::name::Name; use intern::sym; use crate::{ - infer::{expr::ExprIsRead, Expectation, InferenceContext}, - lower::lower_to_chalk_mutability, Adjust, Adjustment, AutoBorrow, Interner, OverloadedDeref, TyBuilder, TyKind, + infer::{Expectation, InferenceContext, expr::ExprIsRead}, + lower::lower_to_chalk_mutability, }; impl InferenceContext<'_> { @@ -69,8 +69,7 @@ impl InferenceContext<'_> { } } Expr::Const(id) => { - let loc = self.db.lookup_intern_anonymous_const(*id); - self.infer_mut_expr(loc.root, Mutability::Not); + self.infer_mut_expr(*id, Mutability::Not); } Expr::Let { pat, expr } => self.infer_mut_expr(*expr, self.pat_bound_mutability(*pat)), Expr::Block { id: _, statements, tail, label: _ } @@ -127,15 +126,13 @@ impl InferenceContext<'_> { &Expr::Index { base, index } => { if mutability == Mutability::Mut { if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { - if let Some(index_trait) = self - .db - .lang_item(self.table.trait_env.krate, LangItem::IndexMut) - .and_then(|l| l.as_trait()) + if let Some(index_trait) = + LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate) { if let Some(index_fn) = self .db - .trait_data(index_trait) - .method_by_name(&Name::new_symbol_root(sym::index_mut.clone())) + .trait_items(index_trait) + .method_by_name(&Name::new_symbol_root(sym::index_mut)) { *f = index_fn; let mut base_ty = None; @@ -184,10 +181,8 @@ impl InferenceContext<'_> { let mut mutability = mutability; if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { if mutability == Mutability::Mut { - if let Some(deref_trait) = self - .db - .lang_item(self.table.trait_env.krate, LangItem::DerefMut) - .and_then(|l| l.as_trait()) + if let Some(deref_trait) = + LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate) { let ty = self.result.type_of_expr.get(*expr); let is_mut_ptr = ty.is_some_and(|ty| { @@ -201,8 +196,8 @@ impl InferenceContext<'_> { mutability = Mutability::Not; } else if let Some(deref_fn) = self .db - .trait_data(deref_trait) - .method_by_name(&Name::new_symbol_root(sym::deref_mut.clone())) + .trait_items(deref_trait) + .method_by_name(&Name::new_symbol_root(sym::deref_mut)) { *f = deref_fn; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index db93116f1071a..a9a3265858e4e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -3,24 +3,24 @@ use std::iter::repeat_with; use hir_def::{ - expr_store::Body, - hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, Literal, Pat, PatId}, - path::Path, HasModule, + expr_store::{Body, path::Path}, + hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, Literal, Pat, PatId}, }; use hir_expand::name::Name; use stdx::TupleExt; use crate::{ + DeclContext, DeclOrigin, InferenceDiagnostic, Interner, Mutability, Scalar, Substitution, Ty, + TyBuilder, TyExt, TyKind, consteval::{self, try_const_usize, usize_const}, infer::{ - coerce::CoerceNever, expr::ExprIsRead, BindingMode, Expectation, InferenceContext, - TypeMismatch, + BindingMode, Expectation, InferenceContext, TypeMismatch, coerce::CoerceNever, + expr::ExprIsRead, }, lower::lower_to_chalk_mutability, primitive::UintTy, - static_lifetime, DeclContext, DeclOrigin, InferenceDiagnostic, Interner, Mutability, Scalar, - Substitution, Ty, TyBuilder, TyExt, TyKind, + static_lifetime, }; impl InferenceContext<'_> { @@ -38,7 +38,7 @@ impl InferenceContext<'_> { decl: Option, ) -> Ty { let (ty, def) = self.resolve_variant(id.into(), path, true); - let var_data = def.map(|it| it.variant_data(self.db.upcast())); + let var_data = def.map(|it| it.variant_data(self.db)); if let Some(variant) = def { self.write_variant_resolution(id.into(), variant); } @@ -60,7 +60,7 @@ impl InferenceContext<'_> { _ if subs.is_empty() => {} Some(def) => { let field_types = self.db.field_types(def); - let variant_data = def.variant_data(self.db.upcast()); + let variant_data = def.variant_data(self.db); let visibilities = self.db.field_visibilities(def); let (pre, post) = match ellipsis { @@ -79,7 +79,7 @@ impl InferenceContext<'_> { match variant_data.field(&Name::new_tuple_field(i)) { Some(local_id) => { if !visibilities[local_id] - .is_visible_from(self.db.upcast(), self.resolver.module()) + .is_visible_from(self.db, self.resolver.module()) { // FIXME(DIAGNOSE): private tuple field } @@ -129,7 +129,7 @@ impl InferenceContext<'_> { _ if subs.len() == 0 => {} Some(def) => { let field_types = self.db.field_types(def); - let variant_data = def.variant_data(self.db.upcast()); + let variant_data = def.variant_data(self.db); let visibilities = self.db.field_visibilities(def); let substs = ty.as_adt().map(TupleExt::tail); @@ -139,7 +139,7 @@ impl InferenceContext<'_> { match variant_data.field(&name) { Some(local_id) => { if !visibilities[local_id] - .is_visible_from(self.db.upcast(), self.resolver.module()) + .is_visible_from(self.db, self.resolver.module()) { self.push_diagnostic(InferenceDiagnostic::NoSuchField { field: inner.into(), @@ -435,7 +435,7 @@ impl InferenceContext<'_> { decl: Option, ) -> Ty { let (expectation_type, expectation_lt) = match expected.as_reference() { - Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime.clone()), + Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime), None => { let inner_ty = self.table.new_type_var(); let inner_lt = self.table.new_lifetime_var(); @@ -594,11 +594,10 @@ impl InferenceContext<'_> { } let len = before.len() + suffix.len(); - let size = - consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db.upcast())); + let size = consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db)); let elem_ty = self.table.new_type_var(); - let array_ty = TyKind::Array(elem_ty.clone(), size).intern(Interner); + let array_ty = TyKind::Array(elem_ty, size).intern(Interner); Some(array_ty) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index 6254bc12392b0..9d4bbe53464dc 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -2,21 +2,23 @@ use chalk_ir::cast::Cast; use hir_def::{ - path::{Path, PathSegment}, - resolver::{ResolveValueResult, TypeNs, ValueNs}, AdtId, AssocItemId, GenericDefId, ItemContainerId, Lookup, + expr_store::path::{Path, PathSegment}, + resolver::{ResolveValueResult, TypeNs, ValueNs}, }; use hir_expand::name::Name; use stdx::never; use crate::{ + InferenceDiagnostic, Interner, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, + TyKind, ValueTyDefId, builder::ParamKind, consteval, error_lifetime, generics::generics, infer::diagnostics::InferenceTyLoweringContext as TyLoweringContext, + lower::LifetimeElisionKind, method_resolution::{self, VisibleFromModule}, - to_chalk_trait_id, InferenceDiagnostic, Interner, Substitution, TraitRef, TraitRefExt, Ty, - TyBuilder, TyExt, TyKind, ValueTyDefId, + to_chalk_trait_id, }; use super::{ExprOrPatId, InferenceContext, InferenceTyDiagnosticSource}; @@ -63,10 +65,10 @@ impl InferenceContext<'_> { never!("uninferred pattern?"); None } - } + }; } ValueNs::ImplSelf(impl_id) => { - let generics = crate::generics::generics(self.db.upcast(), impl_id.into()); + let generics = crate::generics::generics(self.db, impl_id.into()); let substs = generics.placeholder_subst(self.db); let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs); return if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() { @@ -81,7 +83,7 @@ impl InferenceContext<'_> { }; } ValueNs::GenericParam(it) => { - return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty(it))) + return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty(it))); } }; @@ -99,16 +101,14 @@ impl InferenceContext<'_> { if let Some(last_segment) = last_segment { path_ctx.set_current_segment(last_segment) } - path_ctx.substs_from_path(value_def, true) + path_ctx.substs_from_path(value_def, true, false) }); let substs = substs.as_slice(Interner); if let ValueNs::EnumVariantId(_) = value { - let mut it = self_subst - .as_ref() - .map_or(&[][..], |s| s.as_slice(Interner)) + let mut it = substs .iter() - .chain(substs) + .chain(self_subst.as_ref().map_or(&[][..], |s| s.as_slice(Interner))) .cloned(); let builder = TyBuilder::subst_for_def(self.db, generic_def, None); let substs = builder @@ -127,13 +127,13 @@ impl InferenceContext<'_> { } let parent_substs = self_subst.or_else(|| { - let generics = generics(self.db.upcast(), generic_def); + let generics = generics(self.db, generic_def); let parent_params_len = generics.parent_generics()?.len(); - let parent_args = &substs[substs.len() - parent_params_len..]; + let parent_args = &substs[..parent_params_len]; Some(Substitution::from_iter(Interner, parent_args)) }); let parent_substs_len = parent_substs.as_ref().map_or(0, |s| s.len(Interner)); - let mut it = substs.iter().take(substs.len() - parent_substs_len).cloned(); + let mut it = substs.iter().skip(parent_substs_len).cloned(); let builder = TyBuilder::subst_for_def(self.db, generic_def, parent_substs); let substs = builder .fill(|x| { @@ -158,10 +158,11 @@ impl InferenceContext<'_> { let mut ctx = TyLoweringContext::new( self.db, &self.resolver, - &self.body.types, - self.owner.into(), + self.body, &self.diagnostics, InferenceTyDiagnosticSource::Body, + self.generic_def, + LifetimeElisionKind::Infer, ); let mut path_ctx = if no_diagnostics { ctx.at_path_forget_diagnostics(path) @@ -176,7 +177,7 @@ impl InferenceContext<'_> { let ty = self.table.normalize_associated_types_in(ty); path_ctx.ignore_last_segment(); - let (ty, _) = path_ctx.lower_ty_relative_path(ty, orig_ns); + let (ty, _) = path_ctx.lower_ty_relative_path(ty, orig_ns, true); drop_ctx(ctx, no_diagnostics); let ty = self.table.insert_type_vars(ty); let ty = self.table.normalize_associated_types_in(ty); @@ -206,7 +207,7 @@ impl InferenceContext<'_> { (TypeNs::TraitId(trait_), true) => { let self_ty = self.table.new_type_var(); let trait_ref = - path_ctx.lower_trait_ref_from_resolved_path(trait_, self_ty); + path_ctx.lower_trait_ref_from_resolved_path(trait_, self_ty, true); drop_ctx(ctx, no_diagnostics); self.resolve_trait_assoc_item(trait_ref, last_segment, id) } @@ -254,15 +255,15 @@ impl InferenceContext<'_> { // We need to add `Self: Trait` obligation when `def` is a trait assoc item. let container = match def { - GenericDefId::FunctionId(id) => id.lookup(self.db.upcast()).container, - GenericDefId::ConstId(id) => id.lookup(self.db.upcast()).container, + GenericDefId::FunctionId(id) => id.lookup(self.db).container, + GenericDefId::ConstId(id) => id.lookup(self.db).container, _ => return, }; if let ItemContainerId::TraitId(trait_) = container { - let param_len = generics(self.db.upcast(), def).len_self(); + let parent_len = generics(self.db, def).parent_generics().map_or(0, |g| g.len_self()); let parent_subst = - Substitution::from_iter(Interner, subst.iter(Interner).skip(param_len)); + Substitution::from_iter(Interner, subst.iter(Interner).take(parent_len)); let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: parent_subst }; self.push_obligation(trait_ref.cast(Interner)); @@ -277,10 +278,10 @@ impl InferenceContext<'_> { ) -> Option<(ValueNs, Substitution)> { let trait_ = trait_ref.hir_trait_id(); let item = - self.db.trait_data(trait_).items.iter().map(|(_name, id)| *id).find_map(|item| { + self.db.trait_items(trait_).items.iter().map(|(_name, id)| *id).find_map(|item| { match item { AssocItemId::FunctionId(func) => { - if segment.name == &self.db.function_data(func).name { + if segment.name == &self.db.function_signature(func).name { Some(AssocItemId::FunctionId(func)) } else { None @@ -288,7 +289,7 @@ impl InferenceContext<'_> { } AssocItemId::ConstId(konst) => { - if self.db.const_data(konst).name.as_ref() == Some(segment.name) { + if self.db.const_signature(konst).name.as_ref() == Some(segment.name) { Some(AssocItemId::ConstId(konst)) } else { None @@ -350,10 +351,8 @@ impl InferenceContext<'_> { let (item, visible) = res?; let (def, container) = match item { - AssocItemId::FunctionId(f) => { - (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container) - } - AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container), + AssocItemId::FunctionId(f) => (ValueNs::FunctionId(f), f.lookup(self.db).container), + AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container), AssocItemId::TypeAliasId(_) => unreachable!(), }; let substs = match container { @@ -398,7 +397,7 @@ impl InferenceContext<'_> { Some((AdtId::EnumId(e), subst)) => (e, subst), _ => return None, }; - let enum_data = self.db.enum_data(enum_id); + let enum_data = self.db.enum_variants(enum_id); let variant = enum_data.variant(name)?; self.write_variant_resolution(id, variant.into()); Some((ValueNs::EnumVariantId(variant), subst.clone())) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index 8a8992cf372da..631b571465fe1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -1,15 +1,15 @@ //! Unification and canonicalization logic. -use std::{fmt, iter, mem}; +use std::{fmt, mem}; use chalk_ir::{ - cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy, - IntTy, TyVariableKind, UniverseIndex, + CanonicalVarKind, FloatTy, IntTy, TyVariableKind, UniverseIndex, cast::Cast, + fold::TypeFoldable, interner::HasInterner, zip::Zip, }; use chalk_solve::infer::ParameterEnaVariableExt; use either::Either; use ena::unify::UnifyKey; -use hir_def::{lang_item::LangItem, AdtId}; +use hir_def::{AdtId, lang_item::LangItem}; use hir_expand::name::Name; use intern::sym; use rustc_hash::FxHashMap; @@ -18,12 +18,12 @@ use triomphe::Arc; use super::{InferOk, InferResult, InferenceContext, TypeError}; use crate::{ + AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue, DebruijnIndex, DomainGoal, + GenericArg, GenericArgData, Goal, GoalData, Guidance, InEnvironment, InferenceVar, Interner, + Lifetime, OpaqueTyId, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution, Substitution, + TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, VariableKind, WhereClause, consteval::unknown_const, db::HirDatabase, fold_generic_args, fold_tys_and_consts, - to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue, - DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData, Guidance, InEnvironment, - InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, - Solution, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, VariableKind, - WhereClause, + to_chalk_trait_id, traits::FnTrait, }; impl InferenceContext<'_> { @@ -364,6 +364,64 @@ impl<'a> InferenceTable<'a> { ) } + /// Works almost same as [`Self::normalize_associated_types_in`], but this also resolves shallow + /// the inference variables + pub(crate) fn eagerly_normalize_and_resolve_shallow_in(&mut self, ty: T) -> T + where + T: HasInterner + TypeFoldable, + { + fn eagerly_resolve_ty( + table: &mut InferenceTable<'_>, + ty: Ty, + mut tys: SmallVec<[Ty; N]>, + ) -> Ty { + if tys.contains(&ty) { + return ty; + } + tys.push(ty.clone()); + + match ty.kind(Interner) { + TyKind::Alias(AliasTy::Projection(proj_ty)) => { + let ty = table.normalize_projection_ty(proj_ty.clone()); + eagerly_resolve_ty(table, ty, tys) + } + TyKind::InferenceVar(..) => { + let ty = table.resolve_ty_shallow(&ty); + eagerly_resolve_ty(table, ty, tys) + } + _ => ty, + } + } + + fold_tys_and_consts( + ty, + |e, _| match e { + Either::Left(ty) => { + Either::Left(eagerly_resolve_ty::<8>(self, ty, SmallVec::new())) + } + Either::Right(c) => Either::Right(match &c.data(Interner).value { + chalk_ir::ConstValue::Concrete(cc) => match &cc.interned { + crate::ConstScalar::UnevaluatedConst(c_id, subst) => { + // FIXME: same as `normalize_associated_types_in` + if subst.len(Interner) == 0 { + if let Ok(eval) = self.db.const_eval(*c_id, subst.clone(), None) { + eval + } else { + unknown_const(c.data(Interner).ty.clone()) + } + } else { + unknown_const(c.data(Interner).ty.clone()) + } + } + _ => c, + }, + _ => c, + }), + }, + DebruijnIndex::INNERMOST, + ) + } + pub(crate) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty { let var = self.new_type_var(); let alias_eq = AliasEq { alias: AliasTy::Projection(proj_ty), ty: var.clone() }; @@ -386,7 +444,7 @@ impl<'a> InferenceTable<'a> { } fn extend_type_variable_table(&mut self, to_index: usize) { let count = to_index - self.type_variable_table.len() + 1; - self.type_variable_table.extend(iter::repeat(TypeVariableFlags::default()).take(count)); + self.type_variable_table.extend(std::iter::repeat_n(TypeVariableFlags::default(), count)); } fn new_var(&mut self, kind: TyVariableKind, diverging: bool) -> Ty { @@ -795,13 +853,13 @@ impl<'a> InferenceTable<'a> { num_args: usize, ) -> Option<(FnTrait, Vec, Ty)> { for (fn_trait_name, output_assoc_name, subtraits) in [ - (FnTrait::FnOnce, sym::Output.clone(), &[FnTrait::Fn, FnTrait::FnMut][..]), - (FnTrait::AsyncFnMut, sym::CallRefFuture.clone(), &[FnTrait::AsyncFn]), - (FnTrait::AsyncFnOnce, sym::CallOnceFuture.clone(), &[]), + (FnTrait::FnOnce, sym::Output, &[FnTrait::Fn, FnTrait::FnMut][..]), + (FnTrait::AsyncFnMut, sym::CallRefFuture, &[FnTrait::AsyncFn]), + (FnTrait::AsyncFnOnce, sym::CallOnceFuture, &[]), ] { let krate = self.trait_env.krate; let fn_trait = fn_trait_name.get_id(self.db, krate)?; - let trait_data = self.db.trait_data(fn_trait); + let trait_data = self.db.trait_items(fn_trait); let output_assoc_type = trait_data.associated_type_by_name(&Name::new_symbol_root(output_assoc_name))?; @@ -890,11 +948,7 @@ impl<'a> InferenceTable<'a> { TyKind::Error => self.new_type_var(), TyKind::InferenceVar(..) => { let ty_resolved = self.resolve_ty_shallow(&ty); - if ty_resolved.is_unknown() { - self.new_type_var() - } else { - ty - } + if ty_resolved.is_unknown() { self.new_type_var() } else { ty } } _ => ty, } @@ -922,15 +976,33 @@ impl<'a> InferenceTable<'a> { /// Check if given type is `Sized` or not pub(crate) fn is_sized(&mut self, ty: &Ty) -> bool { + fn short_circuit_trivial_tys(ty: &Ty) -> Option { + match ty.kind(Interner) { + TyKind::Scalar(..) + | TyKind::Ref(..) + | TyKind::Raw(..) + | TyKind::Never + | TyKind::FnDef(..) + | TyKind::Array(..) + | TyKind::Function(..) => Some(true), + TyKind::Slice(..) | TyKind::Str | TyKind::Dyn(..) => Some(false), + _ => None, + } + } + let mut ty = ty.clone(); + ty = self.eagerly_normalize_and_resolve_shallow_in(ty); + if let Some(sized) = short_circuit_trivial_tys(&ty) { + return sized; + } + { let mut structs = SmallVec::<[_; 8]>::new(); // Must use a loop here and not recursion because otherwise users will conduct completely // artificial examples of structs that have themselves as the tail field and complain r-a crashes. while let Some((AdtId::StructId(id), subst)) = ty.as_adt() { - let struct_data = self.db.struct_data(id); - if let Some((last_field, _)) = struct_data.variant_data.fields().iter().next_back() - { + let struct_data = self.db.variant_fields(id.into()); + if let Some((last_field, _)) = struct_data.fields().iter().next_back() { let last_field_ty = self.db.field_types(id.into())[last_field] .clone() .substitute(Interner, subst); @@ -942,36 +1014,22 @@ impl<'a> InferenceTable<'a> { // Structs can have DST as its last field and such cases are not handled // as unsized by the chalk, so we do this manually. ty = last_field_ty; + ty = self.eagerly_normalize_and_resolve_shallow_in(ty); + if let Some(sized) = short_circuit_trivial_tys(&ty) { + return sized; + } } else { break; }; } } - // Early return for some obvious types - if matches!( - ty.kind(Interner), - TyKind::Scalar(..) - | TyKind::Ref(..) - | TyKind::Raw(..) - | TyKind::Never - | TyKind::FnDef(..) - | TyKind::Array(..) - | TyKind::Function(_) - ) { - return true; - } - - let Some(sized) = self - .db - .lang_item(self.trait_env.krate, LangItem::Sized) - .and_then(|sized| sized.as_trait()) - else { + let Some(sized) = LangItem::Sized.resolve_trait(self.db, self.trait_env.krate) else { return false; }; let sized_pred = WhereClause::Implemented(TraitRef { trait_id: to_chalk_trait_id(sized), - substitution: Substitution::from1(Interner, ty.clone()), + substitution: Substitution::from1(Interner, ty), }); let goal = GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(sized_pred)).intern(Interner); matches!(self.try_obligation(goal), Some(Solution::Unique(_))) @@ -1032,7 +1090,7 @@ mod resolve { .assert_ty_ref(Interner) .clone(); } - let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) { + if let Some(known_ty) = self.table.var_unification_table.probe_var(var) { // known_ty may contain other variables that are known by now self.var_stack.push(var); let result = known_ty.fold_with(self, outer_binder); @@ -1043,8 +1101,7 @@ mod resolve { (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) .assert_ty_ref(Interner) .clone() - }; - result + } } fn fold_inference_const( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs index d6039c548b6f5..e0c3279d3fb01 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs @@ -2,14 +2,14 @@ use std::ops::ControlFlow::{self, Break, Continue}; use chalk_ir::{ - visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, DebruijnIndex, + visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, }; -use hir_def::{visibility::Visibility, AdtId, EnumVariantId, ModuleId, VariantId}; +use hir_def::{AdtId, EnumVariantId, ModuleId, VariantId, visibility::Visibility}; use rustc_hash::FxHashSet; use crate::{ - consteval::try_const_usize, db::HirDatabase, Binders, Interner, Substitution, Ty, TyKind, + Binders, Interner, Substitution, Ty, TyKind, consteval::try_const_usize, db::HirDatabase, }; // FIXME: Turn this into a query, it can be quite slow @@ -98,7 +98,7 @@ impl UninhabitedFrom<'_> { AdtId::UnionId(_) => CONTINUE_OPAQUELY_INHABITED, AdtId::StructId(s) => self.visit_variant(s.into(), subst), AdtId::EnumId(e) => { - let enum_data = self.db.enum_data(e); + let enum_data = self.db.enum_variants(e); for &(variant, _) in enum_data.variants.iter() { let variant_inhabitedness = self.visit_variant(variant.into(), subst); @@ -117,7 +117,7 @@ impl UninhabitedFrom<'_> { variant: VariantId, subst: &Substitution, ) -> ControlFlow { - let variant_data = self.db.variant_data(variant); + let variant_data = self.db.variant_fields(variant); let fields = variant_data.fields(); if fields.is_empty() { return CONTINUE_OPAQUELY_INHABITED; @@ -139,7 +139,7 @@ impl UninhabitedFrom<'_> { ty: &Binders, subst: &Substitution, ) -> ControlFlow { - if vis.is_none_or(|it| it.is_visible_from(self.db.upcast(), self.target_mod)) { + if vis.is_none_or(|it| it.is_visible_from(self.db, self.target_mod)) { let ty = ty.clone().substitute(Interner, subst); ty.visit_with(self, DebruijnIndex::INNERMOST) } else { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs index 804c3aea3a5c9..fecb3f4242a92 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs @@ -2,16 +2,15 @@ //! representation of the various objects Chalk deals with (types, goals etc.). use crate::{ - chalk_db, tls, AliasTy, CanonicalVarKind, CanonicalVarKinds, ClosureId, Const, ConstData, - ConstScalar, Constraint, Constraints, FnAbi, FnDefId, GenericArg, GenericArgData, Goal, - GoalData, Goals, InEnvironment, Lifetime, LifetimeData, OpaqueTy, OpaqueTyId, ProgramClause, - ProgramClauseData, ProgramClauses, ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses, - Substitution, Ty, TyData, TyKind, VariableKind, VariableKinds, + AliasTy, CanonicalVarKind, CanonicalVarKinds, ClosureId, Const, ConstData, ConstScalar, + Constraint, Constraints, FnAbi, FnDefId, GenericArg, GenericArgData, Goal, GoalData, Goals, + InEnvironment, Lifetime, LifetimeData, OpaqueTy, OpaqueTyId, ProgramClause, ProgramClauseData, + ProgramClauses, ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, Ty, + TyData, TyKind, VariableKind, VariableKinds, chalk_db, tls, }; -use base_db::ra_salsa::InternId; use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variance}; use hir_def::TypeAliasId; -use intern::{impl_internable, Interned}; +use intern::{Interned, impl_internable}; use smallvec::SmallVec; use std::fmt; use triomphe::Arc; @@ -44,7 +43,7 @@ impl_internable!( InternedWrapper, InternedWrapper, InternedWrapper>, - InternedWrapper>, + InternedWrapper>, InternedWrapper>, InternedWrapper>, ); @@ -61,14 +60,14 @@ impl chalk_ir::interner::Interner for Interner { type InternedGoal = Arc; type InternedGoals = Vec; type InternedSubstitution = Interned>>; - type InternedProgramClauses = Interned>>; + type InternedProgramClauses = Interned>>; type InternedProgramClause = ProgramClauseData; type InternedQuantifiedWhereClauses = Interned>>; type InternedVariableKinds = Interned>>; type InternedCanonicalVarKinds = Interned>>; type InternedConstraints = Vec>; type InternedVariances = SmallVec<[Variance; 16]>; - type DefId = InternId; + type DefId = salsa::Id; type InternedAdtId = hir_def::AdtId; type Identifier = TypeAliasId; type FnAbi = FnAbi; @@ -98,7 +97,7 @@ impl chalk_ir::interner::Interner for Interner { opaque_ty_id: OpaqueTyId, fmt: &mut fmt::Formatter<'_>, ) -> Option { - Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0)) + Some(write!(fmt, "OpaqueTy#{:?}", opaque_ty_id.0)) } fn debug_fn_def_id(fn_def_id: FnDefId, fmt: &mut fmt::Formatter<'_>) -> Option { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs index ff9c52fbb6c17..3ef7f50c9a2ea 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs @@ -1,6 +1,6 @@ //! Functions to detect special lang items -use hir_def::{data::adt::StructFlags, lang_item::LangItem, AdtId}; +use hir_def::{AdtId, lang_item::LangItem, signatures::StructFlags}; use hir_expand::name::Name; use intern::sym; @@ -8,13 +8,7 @@ use crate::db::HirDatabase; pub fn is_box(db: &dyn HirDatabase, adt: AdtId) -> bool { let AdtId::StructId(id) = adt else { return false }; - db.struct_data(id).flags.contains(StructFlags::IS_BOX) -} - -pub fn is_unsafe_cell(db: &dyn HirDatabase, adt: AdtId) -> bool { - let AdtId::StructId(id) = adt else { return false }; - - db.struct_data(id).flags.contains(StructFlags::IS_UNSAFE_CELL) + db.struct_signature(id).flags.contains(StructFlags::IS_BOX) } pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangItem)> { @@ -22,53 +16,43 @@ pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangIte Some(match op { BinaryOp::LogicOp(_) => return None, BinaryOp::ArithOp(aop) => match aop { - ArithOp::Add => (Name::new_symbol_root(sym::add.clone()), LangItem::Add), - ArithOp::Mul => (Name::new_symbol_root(sym::mul.clone()), LangItem::Mul), - ArithOp::Sub => (Name::new_symbol_root(sym::sub.clone()), LangItem::Sub), - ArithOp::Div => (Name::new_symbol_root(sym::div.clone()), LangItem::Div), - ArithOp::Rem => (Name::new_symbol_root(sym::rem.clone()), LangItem::Rem), - ArithOp::Shl => (Name::new_symbol_root(sym::shl.clone()), LangItem::Shl), - ArithOp::Shr => (Name::new_symbol_root(sym::shr.clone()), LangItem::Shr), - ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor.clone()), LangItem::BitXor), - ArithOp::BitOr => (Name::new_symbol_root(sym::bitor.clone()), LangItem::BitOr), - ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand.clone()), LangItem::BitAnd), + ArithOp::Add => (Name::new_symbol_root(sym::add), LangItem::Add), + ArithOp::Mul => (Name::new_symbol_root(sym::mul), LangItem::Mul), + ArithOp::Sub => (Name::new_symbol_root(sym::sub), LangItem::Sub), + ArithOp::Div => (Name::new_symbol_root(sym::div), LangItem::Div), + ArithOp::Rem => (Name::new_symbol_root(sym::rem), LangItem::Rem), + ArithOp::Shl => (Name::new_symbol_root(sym::shl), LangItem::Shl), + ArithOp::Shr => (Name::new_symbol_root(sym::shr), LangItem::Shr), + ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor), LangItem::BitXor), + ArithOp::BitOr => (Name::new_symbol_root(sym::bitor), LangItem::BitOr), + ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand), LangItem::BitAnd), }, BinaryOp::Assignment { op: Some(aop) } => match aop { - ArithOp::Add => (Name::new_symbol_root(sym::add_assign.clone()), LangItem::AddAssign), - ArithOp::Mul => (Name::new_symbol_root(sym::mul_assign.clone()), LangItem::MulAssign), - ArithOp::Sub => (Name::new_symbol_root(sym::sub_assign.clone()), LangItem::SubAssign), - ArithOp::Div => (Name::new_symbol_root(sym::div_assign.clone()), LangItem::DivAssign), - ArithOp::Rem => (Name::new_symbol_root(sym::rem_assign.clone()), LangItem::RemAssign), - ArithOp::Shl => (Name::new_symbol_root(sym::shl_assign.clone()), LangItem::ShlAssign), - ArithOp::Shr => (Name::new_symbol_root(sym::shr_assign.clone()), LangItem::ShrAssign), - ArithOp::BitXor => { - (Name::new_symbol_root(sym::bitxor_assign.clone()), LangItem::BitXorAssign) - } - ArithOp::BitOr => { - (Name::new_symbol_root(sym::bitor_assign.clone()), LangItem::BitOrAssign) - } - ArithOp::BitAnd => { - (Name::new_symbol_root(sym::bitand_assign.clone()), LangItem::BitAndAssign) - } + ArithOp::Add => (Name::new_symbol_root(sym::add_assign), LangItem::AddAssign), + ArithOp::Mul => (Name::new_symbol_root(sym::mul_assign), LangItem::MulAssign), + ArithOp::Sub => (Name::new_symbol_root(sym::sub_assign), LangItem::SubAssign), + ArithOp::Div => (Name::new_symbol_root(sym::div_assign), LangItem::DivAssign), + ArithOp::Rem => (Name::new_symbol_root(sym::rem_assign), LangItem::RemAssign), + ArithOp::Shl => (Name::new_symbol_root(sym::shl_assign), LangItem::ShlAssign), + ArithOp::Shr => (Name::new_symbol_root(sym::shr_assign), LangItem::ShrAssign), + ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor_assign), LangItem::BitXorAssign), + ArithOp::BitOr => (Name::new_symbol_root(sym::bitor_assign), LangItem::BitOrAssign), + ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand_assign), LangItem::BitAndAssign), }, BinaryOp::CmpOp(cop) => match cop { - CmpOp::Eq { negated: false } => { - (Name::new_symbol_root(sym::eq.clone()), LangItem::PartialEq) - } - CmpOp::Eq { negated: true } => { - (Name::new_symbol_root(sym::ne.clone()), LangItem::PartialEq) - } + CmpOp::Eq { negated: false } => (Name::new_symbol_root(sym::eq), LangItem::PartialEq), + CmpOp::Eq { negated: true } => (Name::new_symbol_root(sym::ne), LangItem::PartialEq), CmpOp::Ord { ordering: Ordering::Less, strict: false } => { - (Name::new_symbol_root(sym::le.clone()), LangItem::PartialOrd) + (Name::new_symbol_root(sym::le), LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Less, strict: true } => { - (Name::new_symbol_root(sym::lt.clone()), LangItem::PartialOrd) + (Name::new_symbol_root(sym::lt), LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Greater, strict: false } => { - (Name::new_symbol_root(sym::ge.clone()), LangItem::PartialOrd) + (Name::new_symbol_root(sym::ge), LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Greater, strict: true } => { - (Name::new_symbol_root(sym::gt.clone()), LangItem::PartialOrd) + (Name::new_symbol_root(sym::gt), LangItem::PartialOrd) } }, BinaryOp::Assignment { op: None } => return None, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index bbd419d9659bf..c253fe25672f5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -2,14 +2,13 @@ use std::fmt; -use base_db::ra_salsa::Cycle; use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy}; use hir_def::{ + LocalFieldId, StructId, layout::{ Float, Integer, LayoutCalculator, LayoutCalculatorError, LayoutData, Primitive, ReprOptions, Scalar, StructKind, TargetDataLayout, WrappingRange, }, - LocalFieldId, StructId, }; use la_arena::{Idx, RawIdx}; use rustc_abi::AddressSpace; @@ -18,17 +17,15 @@ use rustc_index::IndexVec; use triomphe::Arc; use crate::{ + Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, consteval::try_const_usize, db::{HirDatabase, InternedClosure}, infer::normalize, utils::ClosureSubst, - Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, }; -pub use self::{ - adt::{layout_of_adt_query, layout_of_adt_recover}, - target::target_data_layout_query, -}; +pub(crate) use self::adt::layout_of_adt_cycle_result; +pub use self::{adt::layout_of_adt_query, target::target_data_layout_query}; mod adt; mod target; @@ -168,7 +165,7 @@ pub fn layout_of_ty_query( let result = match kind { TyKind::Adt(AdtId(def), subst) => { if let hir_def::AdtId::StructId(s) = def { - let data = db.struct_data(*s); + let data = db.struct_signature(*s); let repr = data.repr.unwrap_or_default(); if repr.simd() { return layout_of_simd_ty(db, *s, repr.packed(), subst, trait_env, &target); @@ -322,7 +319,7 @@ pub fn layout_of_ty_query( return Err(LayoutError::NotImplemented); } crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { - return Err(LayoutError::NotImplemented) + return Err(LayoutError::NotImplemented); } } } @@ -344,7 +341,7 @@ pub fn layout_of_ty_query( cx.calc.univariant(&fields, &ReprOptions::default(), StructKind::AlwaysSized)? } TyKind::Coroutine(_, _) | TyKind::CoroutineWitness(_, _) => { - return Err(LayoutError::NotImplemented) + return Err(LayoutError::NotImplemented); } TyKind::Error => return Err(LayoutError::HasErrorType), TyKind::AssociatedType(id, subst) => { @@ -367,28 +364,36 @@ pub fn layout_of_ty_query( Ok(Arc::new(result)) } -pub fn layout_of_ty_recover( +pub(crate) fn layout_of_ty_cycle_result( _: &dyn HirDatabase, - _: &Cycle, - _: &Ty, - _: &Arc, + _: Ty, + _: Arc, ) -> Result, LayoutError> { Err(LayoutError::RecursiveTypeWithoutIndirection) } fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty { match pointee.kind(Interner) { - TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), subst) => { - let data = db.struct_data(*i); - let mut it = data.variant_data.fields().iter().rev(); + &TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), ref subst) => { + let data = db.variant_fields(i.into()); + let mut it = data.fields().iter().rev(); match it.next() { Some((f, _)) => { - let last_field_ty = field_ty(db, (*i).into(), f, subst); + let last_field_ty = field_ty(db, i.into(), f, subst); struct_tail_erasing_lifetimes(db, last_field_ty) } None => pointee, } } + TyKind::Tuple(_, subst) => { + if let Some(last_field_ty) = + subst.iter(Interner).last().and_then(|arg| arg.ty(Interner)) + { + struct_tail_erasing_lifetimes(db, last_field_ty.clone()) + } else { + pointee + } + } _ => pointee, } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs index eb4729fab8426..3a020bf050d68 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs @@ -2,11 +2,10 @@ use std::{cmp, ops::Bound}; -use base_db::ra_salsa::Cycle; use hir_def::{ - data::adt::VariantData, - layout::{Integer, ReprOptions, TargetDataLayout}, AdtId, VariantId, + layout::{Integer, ReprOptions, TargetDataLayout}, + signatures::{StructFlags, VariantFields}, }; use intern::sym; use rustc_index::IndexVec; @@ -14,10 +13,9 @@ use smallvec::SmallVec; use triomphe::Arc; use crate::{ - db::HirDatabase, - lang_items::is_unsafe_cell, - layout::{field_ty, Layout, LayoutError}, Substitution, TraitEnvironment, + db::HirDatabase, + layout::{Layout, LayoutError, field_ty}, }; use super::LayoutCx; @@ -34,33 +32,37 @@ pub fn layout_of_adt_query( }; let dl = &*target; let cx = LayoutCx::new(dl); - let handle_variant = |def: VariantId, var: &VariantData| { + let handle_variant = |def: VariantId, var: &VariantFields| { var.fields() .iter() .map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &subst), trait_env.clone())) .collect::, _>>() }; - let (variants, repr) = match def { + let (variants, repr, is_special_no_niche) = match def { AdtId::StructId(s) => { - let data = db.struct_data(s); + let sig = db.struct_signature(s); let mut r = SmallVec::<[_; 1]>::new(); - r.push(handle_variant(s.into(), &data.variant_data)?); - (r, data.repr.unwrap_or_default()) + r.push(handle_variant(s.into(), &db.variant_fields(s.into()))?); + ( + r, + sig.repr.unwrap_or_default(), + sig.flags.intersects(StructFlags::IS_UNSAFE_CELL | StructFlags::IS_UNSAFE_PINNED), + ) } AdtId::UnionId(id) => { - let data = db.union_data(id); + let data = db.union_signature(id); let mut r = SmallVec::new(); - r.push(handle_variant(id.into(), &data.variant_data)?); - (r, data.repr.unwrap_or_default()) + r.push(handle_variant(id.into(), &db.variant_fields(id.into()))?); + (r, data.repr.unwrap_or_default(), false) } AdtId::EnumId(e) => { - let data = db.enum_data(e); - let r = data + let variants = db.enum_variants(e); + let r = variants .variants .iter() - .map(|&(v, _)| handle_variant(v.into(), &db.enum_variant_data(v).variant_data)) + .map(|&(v, _)| handle_variant(v.into(), &db.variant_fields(v.into()))) .collect::, _>>()?; - (r, data.repr.unwrap_or_default()) + (r, db.enum_signature(e).repr.unwrap_or_default(), false) } }; let variants = variants @@ -75,12 +77,12 @@ pub fn layout_of_adt_query( &repr, &variants, matches!(def, AdtId::EnumId(..)), - is_unsafe_cell(db, def), + is_special_no_niche, layout_scalar_valid_range(db, def), |min, max| repr_discr(dl, &repr, min, max).unwrap_or((Integer::I8, false)), variants.iter_enumerated().filter_map(|(id, _)| { let AdtId::EnumId(e) = def else { return None }; - let d = db.const_eval_discriminant(db.enum_data(e).variants[id.0].0).ok()?; + let d = db.const_eval_discriminant(db.enum_variants(e).variants[id.0].0).ok()?; Some((id, d)) }), // FIXME: The current code for niche-filling relies on variant indices @@ -125,18 +127,14 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound, } Bound::Unbounded }; - ( - get(&sym::rustc_layout_scalar_valid_range_start), - get(&sym::rustc_layout_scalar_valid_range_end), - ) + (get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end)) } -pub fn layout_of_adt_recover( +pub(crate) fn layout_of_adt_cycle_result( _: &dyn HirDatabase, - _: &Cycle, - _: &AdtId, - _: &Substitution, - _: &Arc, + _: AdtId, + _: Substitution, + _: Arc, ) -> Result, LayoutError> { Err(LayoutError::RecursiveTypeWithoutIndirection) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs index 7d77f6d0731a3..e1e1c44996cde 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs @@ -1,6 +1,6 @@ //! Target dependent parameters needed for layouts -use base_db::CrateId; +use base_db::Crate; use hir_def::layout::TargetDataLayout; use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors}; use triomphe::Arc; @@ -9,9 +9,9 @@ use crate::db::HirDatabase; pub fn target_data_layout_query( db: &dyn HirDatabase, - krate: CrateId, + krate: Crate, ) -> Result, Arc> { - match &db.crate_workspace_data()[&krate].data_layout { + match &krate.workspace_data(db).data_layout { Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) { Ok(it) => Ok(Arc::new(it)), Err(e) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs index 8b74b7328bd83..cc7d74f4fb0a3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs @@ -1,17 +1,17 @@ use chalk_ir::{AdtId, TyKind}; use either::Either; use hir_def::db::DefDatabase; -use project_model::{toolchain_info::QueryConfig, Sysroot}; +use project_model::{Sysroot, toolchain_info::QueryConfig}; use rustc_hash::FxHashMap; use syntax::ToSmolStr; use test_fixture::WithFixture; use triomphe::Arc; use crate::{ + Interner, Substitution, db::HirDatabase, layout::{Layout, LayoutError}, test_db::TestDB, - Interner, Substitution, }; mod closure; @@ -38,27 +38,36 @@ fn eval_goal( let adt_or_type_alias_id = file_ids .into_iter() .find_map(|file_id| { - let module_id = db.module_for_file(file_id.file_id()); + let module_id = db.module_for_file(file_id.file_id(&db)); let def_map = module_id.def_map(&db); let scope = &def_map[module_id.local_id].scope; let adt_or_type_alias_id = scope.declarations().find_map(|x| match x { hir_def::ModuleDefId::AdtId(x) => { let name = match x { - hir_def::AdtId::StructId(x) => { - db.struct_data(x).name.display_no_db(file_id.edition()).to_smolstr() - } - hir_def::AdtId::UnionId(x) => { - db.union_data(x).name.display_no_db(file_id.edition()).to_smolstr() - } - hir_def::AdtId::EnumId(x) => { - db.enum_data(x).name.display_no_db(file_id.edition()).to_smolstr() - } + hir_def::AdtId::StructId(x) => db + .struct_signature(x) + .name + .display_no_db(file_id.edition(&db)) + .to_smolstr(), + hir_def::AdtId::UnionId(x) => db + .union_signature(x) + .name + .display_no_db(file_id.edition(&db)) + .to_smolstr(), + hir_def::AdtId::EnumId(x) => db + .enum_signature(x) + .name + .display_no_db(file_id.edition(&db)) + .to_smolstr(), }; (name == "Goal").then_some(Either::Left(x)) } hir_def::ModuleDefId::TypeAliasId(x) => { - let name = - db.type_alias_data(x).name.display_no_db(file_id.edition()).to_smolstr(); + let name = db + .type_alias_signature(x) + .name + .display_no_db(file_id.edition(&db)) + .to_smolstr(); (name == "Goal").then_some(Either::Right(x)) } _ => None, @@ -94,14 +103,15 @@ fn eval_expr( ); let (db, file_id) = TestDB::with_single_file(&ra_fixture); - let module_id = db.module_for_file(file_id.file_id()); + let module_id = db.module_for_file(file_id.file_id(&db)); let def_map = module_id.def_map(&db); let scope = &def_map[module_id.local_id].scope; let function_id = scope .declarations() .find_map(|x| match x { hir_def::ModuleDefId::FunctionId(x) => { - let name = db.function_data(x).name.display_no_db(file_id.edition()).to_smolstr(); + let name = + db.function_signature(x).name.display_no_db(file_id.edition(&db)).to_smolstr(); (name == "main").then_some(x) } _ => None, @@ -111,7 +121,7 @@ fn eval_expr( let b = hir_body .bindings .iter() - .find(|x| x.1.name.display_no_db(file_id.edition()).to_smolstr() == "goal") + .find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal") .unwrap() .0; let infer = db.infer(function_id.into()); @@ -284,6 +294,18 @@ fn repr_packed() { check_size_and_align("#[repr(Rust, packed(5))] struct Goal(i32);", "", 4, 1); } +#[test] +fn multiple_repr_attrs() { + size_and_align!( + #[repr(C)] + #[repr(packed)] + struct Goal { + id: i32, + u: u8, + } + ) +} + #[test] fn generic() { size_and_align! { @@ -468,6 +490,16 @@ fn tuple() { } } +#[test] +fn tuple_ptr_with_dst_tail() { + size_and_align!( + struct Goal(*const ([u8],)); + ); + size_and_align!( + struct Goal(*const (u128, [u8])); + ); +} + #[test] fn non_zero_and_non_null() { size_and_align! { @@ -490,10 +522,7 @@ fn niche_optimization() { } #[test] -fn const_eval() { - size_and_align! { - struct Goal([i32; 2 + 2]); - } +fn const_eval_simple() { size_and_align! { const X: usize = 5; struct Goal([i32; X]); @@ -505,6 +534,15 @@ fn const_eval() { struct Ar([T; foo::BAR]); struct Goal(Ar>); } +} + +#[test] +// FIXME +#[should_panic] +fn const_eval_complex() { + size_and_align! { + struct Goal([i32; 2 + 2]); + } size_and_align! { type Goal = [u8; 2 + 2]; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index cc02b71f05c19..128569d55dc9b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -56,21 +56,20 @@ mod variance; use std::hash::Hash; -use base_db::ra_salsa::InternValueTrivial; use chalk_ir::{ + NoSolution, fold::{Shift, TypeFoldable}, interner::HasInterner, - NoSolution, }; use either::Either; -use hir_def::{hir::ExprId, type_ref::Rawness, CallableDefId, GeneralConstId, TypeOrConstParamId}; +use hir_def::{CallableDefId, GeneralConstId, TypeOrConstParamId, hir::ExprId, type_ref::Rawness}; use hir_expand::name::Name; -use indexmap::{map::Entry, IndexMap}; -use intern::{sym, Symbol}; +use indexmap::{IndexMap, map::Entry}; +use intern::{Symbol, sym}; use la_arena::{Arena, Idx}; use mir::{MirEvalError, VTableMap}; use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet}; -use syntax::ast::{make, ConstArg}; +use syntax::ast::{ConstArg, make}; use traits::FnTrait; use triomphe::Arc; @@ -87,16 +86,16 @@ pub use builder::{ParamKind, TyBuilder}; pub use chalk_ext::*; pub use drop::DropGlue; pub use infer::{ + Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, InferenceResult, + InferenceTyDiagnosticSource, OverloadedDeref, PointerCast, cast::CastError, closure::{CaptureKind, CapturedItem}, - could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode, - InferenceDiagnostic, InferenceResult, InferenceTyDiagnosticSource, OverloadedDeref, - PointerCast, + could_coerce, could_unify, could_unify_deeply, }; pub use interner::Interner; pub use lower::{ - associated_type_shorthand_candidates, diagnostics::*, ImplTraitLoweringMode, ParamLoweringMode, - TyDefId, TyLoweringContext, ValueTyDefId, + ImplTraitLoweringMode, LifetimeElisionKind, ParamLoweringMode, TyDefId, TyLoweringContext, + ValueTyDefId, associated_type_shorthand_candidates, diagnostics::*, }; pub use mapping::{ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, @@ -106,13 +105,13 @@ pub use mapping::{ pub use method_resolution::check_orphan_rules; pub use target_feature::TargetFeatures; pub use traits::TraitEnvironment; -pub use utils::{all_super_traits, direct_super_traits, is_fn_unsafe_to_call, Unsafety}; +pub use utils::{Unsafety, all_super_traits, direct_super_traits, is_fn_unsafe_to_call}; pub use variance::Variance; pub use chalk_ir::{ + AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind, cast::Cast, visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, - AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind, }; pub type ForeignDefId = chalk_ir::ForeignDefId; @@ -302,7 +301,7 @@ impl Hash for ConstScalar { /// Return an index of a parameter in the generic type parameter list by it's id. pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option { - generics::generics(db.upcast(), id.parent).type_or_const_param_idx(id) + generics::generics(db, id.parent).type_or_const_param_idx(id) } pub(crate) fn wrap_empty_binders(value: T) -> Binders @@ -348,20 +347,24 @@ pub(crate) fn make_binders>( generics: &Generics, value: T, ) -> Binders { - Binders::new( - VariableKinds::from_iter( - Interner, - generics.iter_id().map(|x| match x { - hir_def::GenericParamId::ConstParamId(id) => { - chalk_ir::VariableKind::Const(db.const_param_ty(id)) - } - hir_def::GenericParamId::TypeParamId(_) => { - chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) - } - hir_def::GenericParamId::LifetimeParamId(_) => chalk_ir::VariableKind::Lifetime, - }), - ), - value, + Binders::new(variable_kinds_from_iter(db, generics.iter_id()), value) +} + +pub(crate) fn variable_kinds_from_iter( + db: &dyn HirDatabase, + iter: impl Iterator, +) -> VariableKinds { + VariableKinds::from_iter( + Interner, + iter.map(|x| match x { + hir_def::GenericParamId::ConstParamId(id) => { + chalk_ir::VariableKind::Const(db.const_param_ty(id)) + } + hir_def::GenericParamId::TypeParamId(_) => { + chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) + } + hir_def::GenericParamId::LifetimeParamId(_) => chalk_ir::VariableKind::Lifetime, + }), ) } @@ -526,13 +529,13 @@ pub type PolyFnSig = Binders; impl CallableSig { pub fn from_params_and_return( - params: impl ExactSizeIterator, + params: impl Iterator, ret: Ty, is_varargs: bool, safety: Safety, abi: FnAbi, ) -> CallableSig { - let mut params_and_return = Vec::with_capacity(params.len() + 1); + let mut params_and_return = Vec::with_capacity(params.size_hint().0 + 1); params_and_return.extend(params); params_and_return.push(ret); CallableSig { params_and_return: params_and_return.into(), is_varargs, safety, abi } @@ -610,7 +613,6 @@ pub enum ImplTraitId { TypeAliasImplTrait(hir_def::TypeAliasId, ImplTraitIdx), AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId), } -impl InternValueTrivial for ImplTraitId {} #[derive(PartialEq, Eq, Debug, Hash)] pub struct ImplTraits { @@ -647,10 +649,8 @@ pub(crate) fn fold_free_vars + TypeFoldable< F1: FnMut(BoundVar, DebruijnIndex) -> Ty, F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const, >(F1, F2); - impl< - F1: FnMut(BoundVar, DebruijnIndex) -> Ty, - F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const, - > TypeFolder for FreeVarFolder + impl Ty, F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const> + TypeFolder for FreeVarFolder { fn as_dyn(&mut self) -> &mut dyn TypeFolder { self @@ -780,8 +780,8 @@ where T: HasInterner + TypeFoldable + Clone, { use chalk_ir::{ - fold::{FallibleTypeFolder, TypeSuperFoldable}, Fallible, + fold::{FallibleTypeFolder, TypeSuperFoldable}, }; struct ErrorReplacer { vars: usize, @@ -842,11 +842,7 @@ where _var: InferenceVar, _outer_binder: DebruijnIndex, ) -> Fallible { - if cfg!(debug_assertions) { - Err(NoSolution) - } else { - Ok(unknown_const(ty)) - } + if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(unknown_const(ty)) } } fn try_fold_free_var_const( @@ -855,11 +851,7 @@ where _bound_var: BoundVar, _outer_binder: DebruijnIndex, ) -> Fallible { - if cfg!(debug_assertions) { - Err(NoSolution) - } else { - Ok(unknown_const(ty)) - } + if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(unknown_const(ty)) } } fn try_fold_inference_lifetime( @@ -867,11 +859,7 @@ where _var: InferenceVar, _outer_binder: DebruijnIndex, ) -> Fallible { - if cfg!(debug_assertions) { - Err(NoSolution) - } else { - Ok(error_lifetime()) - } + if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(error_lifetime()) } } fn try_fold_free_var_lifetime( @@ -879,11 +867,7 @@ where _bound_var: BoundVar, _outer_binder: DebruijnIndex, ) -> Fallible { - if cfg!(debug_assertions) { - Err(NoSolution) - } else { - Ok(error_lifetime()) - } + if cfg!(debug_assertions) { Err(NoSolution) } else { Ok(error_lifetime()) } } } let mut error_replacer = ErrorReplacer { vars: 0 }; @@ -908,8 +892,8 @@ pub fn callable_sig_from_fn_trait( let krate = trait_env.krate; let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?; let output_assoc_type = db - .trait_data(fn_once_trait) - .associated_type_by_name(&Name::new_symbol_root(sym::Output.clone()))?; + .trait_items(fn_once_trait) + .associated_type_by_name(&Name::new_symbol_root(sym::Output))?; let mut table = InferenceTable::new(db, trait_env.clone()); let b = TyBuilder::trait_ref(db, fn_once_trait); @@ -1033,7 +1017,7 @@ where T: ?Sized + TypeVisitable, { let mut collector = PlaceholderCollector { db, placeholders: FxHashSet::default() }; - let _ = value.visit_with(&mut collector, DebruijnIndex::INNERMOST); + _ = value.visit_with(&mut collector, DebruijnIndex::INNERMOST); collector.placeholders.into_iter().collect() } @@ -1042,15 +1026,6 @@ pub fn known_const_to_ast( db: &dyn HirDatabase, display_target: DisplayTarget, ) -> Option { - if let ConstValue::Concrete(c) = &konst.interned().value { - match c.interned { - ConstScalar::UnevaluatedConst(GeneralConstId::InTypeConstId(cid), _) => { - return Some(cid.source(db.upcast())); - } - ConstScalar::Unknown => return None, - _ => (), - } - } Some(make::expr_const_value(konst.display(db, display_target).to_string().as_str())) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index af73b5ed9a7b4..9def39d5f979b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -14,65 +14,57 @@ use std::{ ops::{self, Not as _}, }; -use base_db::{ra_salsa::Cycle, CrateId}; +use base_db::Crate; use chalk_ir::{ + Mutability, Safety, TypeOutlives, cast::Cast, fold::{Shift, TypeFoldable}, interner::HasInterner, - Mutability, Safety, TypeOutlives, }; use either::Either; use hir_def::{ + AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, + FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LocalFieldId, + Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, UnionId, VariantId, builtin_type::BuiltinType, - data::{adt::StructKind, TraitFlags}, - expander::Expander, - generics::{ - GenericParamDataRef, TypeOrConstParamData, TypeParamProvenance, WherePredicate, - WherePredicateTypeTarget, - }, + expr_store::{ExpressionStore, path::Path}, + hir::generics::{GenericParamDataRef, TypeOrConstParamData, WherePredicate}, + item_tree::FieldsShape, lang_item::LangItem, - nameres::MacroSubNs, - path::{GenericArg, ModPath, Path, PathKind}, resolver::{HasResolver, LifetimeNs, Resolver, TypeNs}, + signatures::{FunctionSignature, TraitFlags, TypeAliasFlags}, type_ref::{ - ConstRef, LifetimeRef, PathId, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, - TypeRef, TypeRefId, TypesMap, TypesSourceMap, + ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier, + TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId, }, - AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, - FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, LocalFieldId, - Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, UnionId, VariantId, }; -use hir_expand::{name::Name, ExpandResult}; +use hir_expand::name::Name; use la_arena::{Arena, ArenaMap}; use rustc_hash::FxHashSet; use rustc_pattern_analysis::Captures; use stdx::{impl_from, never}; -use syntax::ast; use triomphe::{Arc, ThinArc}; use crate::{ + AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DynTy, FnAbi, FnPointer, FnSig, + FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, LifetimeData, + LifetimeOutlives, PolyFnSig, ProgramClause, QuantifiedWhereClause, QuantifiedWhereClauses, + Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause, all_super_traits, - consteval::{ - intern_const_ref, intern_const_scalar, path_to_const, unknown_const, - unknown_const_as_generic, - }, + consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic}, db::HirDatabase, error_lifetime, - generics::{generics, trait_self_param_idx, Generics}, + generics::{Generics, generics, trait_self_param_idx}, lower::{ diagnostics::*, path::{PathDiagnosticCallback, PathLoweringContext}, }, make_binders, - mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk}, + mapping::{ToChalk, from_chalk_trait_id, lt_to_placeholder_idx}, static_lifetime, to_chalk_trait_id, to_placeholder_idx, - utils::{all_super_trait_refs, InTypeConstIdMetadata}, - AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy, FnAbi, - FnPointer, FnSig, FnSubst, ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, - LifetimeData, LifetimeOutlives, ParamKind, PolyFnSig, ProgramClause, QuantifiedWhereClause, - QuantifiedWhereClauses, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, - TyKind, WhereClause, + utils::all_super_trait_refs, + variable_kinds_from_iter, }; #[derive(Debug, Default)] @@ -83,72 +75,108 @@ struct ImplTraitLoweringState { mode: ImplTraitLoweringMode, // This is structured as a struct with fields and not as an enum because it helps with the borrow checker. opaque_type_data: Arena, - param_and_variable_counter: u16, } impl ImplTraitLoweringState { fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState { - Self { mode, opaque_type_data: Arena::new(), param_and_variable_counter: 0 } + Self { mode, opaque_type_data: Arena::new() } } - fn param(counter: u16) -> Self { - Self { - mode: ImplTraitLoweringMode::Param, - opaque_type_data: Arena::new(), - param_and_variable_counter: counter, +} + +pub(crate) struct PathDiagnosticCallbackData(TypeRefId); + +#[derive(Debug, Clone)] +pub enum LifetimeElisionKind { + /// Create a new anonymous lifetime parameter and reference it. + /// + /// If `report_in_path`, report an error when encountering lifetime elision in a path: + /// ```compile_fail + /// struct Foo<'a> { x: &'a () } + /// async fn foo(x: Foo) {} + /// ``` + /// + /// Note: the error should not trigger when the elided lifetime is in a pattern or + /// expression-position path: + /// ``` + /// struct Foo<'a> { x: &'a () } + /// async fn foo(Foo { x: _ }: Foo<'_>) {} + /// ``` + AnonymousCreateParameter { report_in_path: bool }, + + /// Replace all anonymous lifetimes by provided lifetime. + Elided(Lifetime), + + /// Give a hard error when either `&` or `'_` is written. Used to + /// rule out things like `where T: Foo<'_>`. Does not imply an + /// error on default object bounds (e.g., `Box`). + AnonymousReportError, + + /// Resolves elided lifetimes to `'static` if there are no other lifetimes in scope, + /// otherwise give a warning that the previous behavior of introducing a new early-bound + /// lifetime is a bug and will be removed (if `only_lint` is enabled). + StaticIfNoLifetimeInScope { only_lint: bool }, + + /// Signal we cannot find which should be the anonymous lifetime. + ElisionFailure, + + /// Infer all elided lifetimes. + Infer, +} + +impl LifetimeElisionKind { + #[inline] + pub(crate) fn for_const(const_parent: ItemContainerId) -> LifetimeElisionKind { + match const_parent { + ItemContainerId::ExternBlockId(_) | ItemContainerId::ModuleId(_) => { + LifetimeElisionKind::Elided(static_lifetime()) + } + ItemContainerId::ImplId(_) => { + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: true } + } + ItemContainerId::TraitId(_) => { + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: false } + } } } - fn variable(counter: u16) -> Self { - Self { - mode: ImplTraitLoweringMode::Variable, - opaque_type_data: Arena::new(), - param_and_variable_counter: counter, - } + + #[inline] + pub(crate) fn for_fn_params(data: &FunctionSignature) -> LifetimeElisionKind { + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: data.is_async() } } -} -pub(crate) struct PathDiagnosticCallbackData(TypeRefId); + #[inline] + pub(crate) fn for_fn_ret() -> LifetimeElisionKind { + // FIXME: We should use the elided lifetime here, or `ElisionFailure`. + LifetimeElisionKind::Elided(error_lifetime()) + } +} #[derive(Debug)] pub struct TyLoweringContext<'a> { pub db: &'a dyn HirDatabase, resolver: &'a Resolver, - generics: OnceCell>, - types_map: &'a TypesMap, - /// If this is set, that means we're in a context of a freshly expanded macro, and that means - /// we should not use `TypeRefId` in diagnostics because the caller won't have the `TypesMap`, - /// instead we need to put `TypeSource` from the source map. - types_source_map: Option<&'a TypesSourceMap>, + store: &'a ExpressionStore, + def: GenericDefId, + generics: OnceCell, in_binders: DebruijnIndex, - // FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases - // where expected - owner: Option, /// Note: Conceptually, it's thinkable that we could be in a location where /// some type params should be represented as placeholders, and others /// should be converted to variables. I think in practice, this isn't /// possible currently, so this should be fine for now. pub type_param_mode: ParamLoweringMode, impl_trait_mode: ImplTraitLoweringState, - expander: Option, /// Tracks types with explicit `?Sized` bounds. pub(crate) unsized_types: FxHashSet, pub(crate) diagnostics: Vec, + lifetime_elision: LifetimeElisionKind, } impl<'a> TyLoweringContext<'a> { pub fn new( db: &'a dyn HirDatabase, resolver: &'a Resolver, - types_map: &'a TypesMap, - owner: TypeOwnerId, - ) -> Self { - Self::new_maybe_unowned(db, resolver, types_map, None, Some(owner)) - } - - pub fn new_maybe_unowned( - db: &'a dyn HirDatabase, - resolver: &'a Resolver, - types_map: &'a TypesMap, - types_source_map: Option<&'a TypesSourceMap>, - owner: Option, + store: &'a ExpressionStore, + def: GenericDefId, + lifetime_elision: LifetimeElisionKind, ) -> Self { let impl_trait_mode = ImplTraitLoweringState::new(ImplTraitLoweringMode::Disallowed); let type_param_mode = ParamLoweringMode::Placeholder; @@ -156,16 +184,15 @@ impl<'a> TyLoweringContext<'a> { Self { db, resolver, - generics: OnceCell::new(), - types_map, - types_source_map, - owner, + def, + generics: Default::default(), + store, in_binders, impl_trait_mode, type_param_mode, - expander: None, unsized_types: FxHashSet::default(), diagnostics: Vec::new(), + lifetime_elision, } } @@ -188,6 +215,17 @@ impl<'a> TyLoweringContext<'a> { self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f) } + fn with_lifetime_elision( + &mut self, + lifetime_elision: LifetimeElisionKind, + f: impl FnOnce(&mut TyLoweringContext<'_>) -> T, + ) -> T { + let old_lifetime_elision = mem::replace(&mut self.lifetime_elision, lifetime_elision); + let result = f(self); + self.lifetime_elision = old_lifetime_elision; + result + } + pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self { Self { impl_trait_mode: ImplTraitLoweringState::new(impl_trait_mode), ..self } } @@ -207,17 +245,7 @@ impl<'a> TyLoweringContext<'a> { } pub fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) { - let source = match self.types_source_map { - Some(source_map) => { - let Ok(source) = source_map.type_syntax(type_ref) else { - stdx::never!("error in synthetic type"); - return; - }; - Either::Right(source) - } - None => Either::Left(type_ref), - }; - self.diagnostics.push(TyLoweringDiagnostic { source, kind }); + self.diagnostics.push(TyLoweringDiagnostic { source: type_ref, kind }); } } @@ -228,15 +256,6 @@ pub enum ImplTraitLoweringMode { /// i.e. for arguments of the function we're currently checking, and return /// types of functions we're calling. Opaque, - /// `impl Trait` gets lowered into a type variable. Used for argument - /// position impl Trait when inside the respective function, since it allows - /// us to support that without Chalk. - Param, - /// `impl Trait` gets lowered into a variable that can unify with some - /// type. This is used in places where values flow 'in', i.e. for arguments - /// of functions we're calling, and the return type of the function we're - /// currently checking. - Variable, /// `impl Trait` is disallowed and will be an error. #[default] Disallowed, @@ -254,29 +273,57 @@ impl<'a> TyLoweringContext<'a> { } pub fn lower_const(&mut self, const_ref: &ConstRef, const_type: Ty) -> Const { - let Some(owner) = self.owner else { return unknown_const(const_type) }; - let debruijn = self.in_binders; - const_or_path_to_chalk( + let const_ref = &self.store[const_ref.expr]; + match const_ref { + hir_def::hir::Expr::Path(path) => path_to_const( + self.db, + self.resolver, + path, + self.type_param_mode, + || self.generics(), + self.in_binders, + const_type.clone(), + ) + .unwrap_or_else(|| unknown_const(const_type)), + hir_def::hir::Expr::Literal(literal) => intern_const_ref( + self.db, + &match *literal { + hir_def::hir::Literal::Float(_, _) + | hir_def::hir::Literal::String(_) + | hir_def::hir::Literal::ByteString(_) + | hir_def::hir::Literal::CString(_) => LiteralConstRef::Unknown, + hir_def::hir::Literal::Char(c) => LiteralConstRef::Char(c), + hir_def::hir::Literal::Bool(b) => LiteralConstRef::Bool(b), + hir_def::hir::Literal::Int(val, _) => LiteralConstRef::Int(val), + hir_def::hir::Literal::Uint(val, _) => LiteralConstRef::UInt(val), + }, + const_type, + self.resolver.krate(), + ), + _ => unknown_const(const_type), + } + } + + pub fn lower_path_as_const(&mut self, path: &Path, const_type: Ty) -> Const { + path_to_const( self.db, self.resolver, - owner, - const_type, - const_ref, + path, self.type_param_mode, || self.generics(), - debruijn, + self.in_binders, + const_type.clone(), ) + .unwrap_or_else(|| unknown_const(const_type)) } - fn generics(&self) -> Option<&Generics> { - self.generics - .get_or_init(|| self.resolver.generic_def().map(|def| generics(self.db.upcast(), def))) - .as_ref() + fn generics(&self) -> &Generics { + self.generics.get_or_init(|| generics(self.db, self.def)) } pub fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty, Option) { let mut res = None; - let type_ref = &self.types_map[type_ref_id]; + let type_ref = &self.store[type_ref_id]; let ty = match type_ref { TypeRef::Never => TyKind::Never.intern(Interner), TypeRef::Tuple(inner) => { @@ -290,6 +337,20 @@ impl<'a> TyLoweringContext<'a> { res = res_; ty } + &TypeRef::TypeParam(type_param_id) => { + res = Some(TypeNs::GenericParam(type_param_id)); + match self.type_param_mode { + ParamLoweringMode::Placeholder => { + TyKind::Placeholder(to_placeholder_idx(self.db, type_param_id.into())) + } + ParamLoweringMode::Variable => { + let idx = + self.generics().type_or_const_param_idx(type_param_id.into()).unwrap(); + TyKind::BoundVar(BoundVar::new(self.in_binders, idx)) + } + } + .intern(Interner) + } &TypeRef::RawPtr(inner, mutability) => { let inner_ty = self.lower_ty(inner); TyKind::Raw(lower_to_chalk_mutability(mutability), inner_ty).intern(Interner) @@ -309,24 +370,32 @@ impl<'a> TyLoweringContext<'a> { let lifetime = ref_ .lifetime .as_ref() - .map_or_else(error_lifetime, |lr| self.lower_lifetime(lr)); + .map_or_else(error_lifetime, |&lr| self.lower_lifetime(lr)); TyKind::Ref(lower_to_chalk_mutability(ref_.mutability), lifetime, inner_ty) .intern(Interner) } TypeRef::Placeholder => TyKind::Error.intern(Interner), TypeRef::Fn(fn_) => { let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { - Substitution::from_iter( - Interner, - fn_.params().iter().map(|&(_, tr)| ctx.lower_ty(tr)), - ) + let (params, ret) = fn_.split_params_and_ret(); + let mut subst = Vec::with_capacity(fn_.params.len()); + ctx.with_lifetime_elision( + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }, + |ctx| { + subst.extend(params.iter().map(|&(_, tr)| ctx.lower_ty(tr))); + }, + ); + ctx.with_lifetime_elision(LifetimeElisionKind::for_fn_ret(), |ctx| { + subst.push(ctx.lower_ty(ret)); + }); + Substitution::from_iter(Interner, subst) }); TyKind::Function(FnPointer { num_binders: 0, // FIXME lower `for<'a> fn()` correctly sig: FnSig { - abi: fn_.abi().as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), - safety: if fn_.is_unsafe() { Safety::Unsafe } else { Safety::Safe }, - variadic: fn_.is_varargs(), + abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), + safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe }, + variadic: fn_.is_varargs, }, substitution: FnSubst(substs), }) @@ -336,9 +405,9 @@ impl<'a> TyLoweringContext<'a> { TypeRef::ImplTrait(bounds) => { match self.impl_trait_mode.mode { ImplTraitLoweringMode::Opaque => { - let origin = match self.resolver.generic_def() { - Some(GenericDefId::FunctionId(it)) => Either::Left(it), - Some(GenericDefId::TypeAliasId(it)) => Either::Right(it), + let origin = match self.def { + GenericDefId::FunctionId(it) => Either::Left(it), + GenericDefId::TypeAliasId(it) => Either::Right(it), _ => panic!( "opaque impl trait lowering must be in function or type alias" ), @@ -370,144 +439,16 @@ impl<'a> TyLoweringContext<'a> { |a| ImplTraitId::TypeAliasImplTrait(a, idx), ); let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into(); - let generics = - generics(self.db.upcast(), origin.either(|f| f.into(), |a| a.into())); + let generics = generics(self.db, origin.either(|f| f.into(), |a| a.into())); let parameters = generics.bound_vars_subst(self.db, self.in_binders); TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner) } - ImplTraitLoweringMode::Param => { - let idx = self.impl_trait_mode.param_and_variable_counter; - // Count the number of `impl Trait` things that appear within our bounds. - // Since those have been emitted as implicit type args already. - self.impl_trait_mode.param_and_variable_counter = - idx + self.count_impl_traits(type_ref_id) as u16; - let db = self.db; - let kind = self - .generics() - .expect("param impl trait lowering must be in a generic def") - .iter() - .filter_map(|(id, data)| match (id, data) { - ( - GenericParamId::TypeParamId(id), - GenericParamDataRef::TypeParamData(data), - ) if data.provenance == TypeParamProvenance::ArgumentImplTrait => { - Some(id) - } - _ => None, - }) - .nth(idx as usize) - .map_or(TyKind::Error, |id| { - TyKind::Placeholder(to_placeholder_idx(db, id.into())) - }); - kind.intern(Interner) - } - ImplTraitLoweringMode::Variable => { - let idx = self.impl_trait_mode.param_and_variable_counter; - // Count the number of `impl Trait` things that appear within our bounds. - // Since t hose have been emitted as implicit type args already. - self.impl_trait_mode.param_and_variable_counter = - idx + self.count_impl_traits(type_ref_id) as u16; - let debruijn = self.in_binders; - let kind = self - .generics() - .expect("variable impl trait lowering must be in a generic def") - .iter() - .enumerate() - .filter_map(|(i, (id, data))| match (id, data) { - ( - GenericParamId::TypeParamId(_), - GenericParamDataRef::TypeParamData(data), - ) if data.provenance == TypeParamProvenance::ArgumentImplTrait => { - Some(i) - } - _ => None, - }) - .nth(idx as usize) - .map_or(TyKind::Error, |id| { - TyKind::BoundVar(BoundVar { debruijn, index: id }) - }); - kind.intern(Interner) - } ImplTraitLoweringMode::Disallowed => { // FIXME: report error TyKind::Error.intern(Interner) } } } - TypeRef::Macro(macro_call) => { - let (expander, recursion_start) = { - match &mut self.expander { - // There already is an expander here, this means we are already recursing - Some(expander) => (expander, false), - // No expander was created yet, so we are at the start of the expansion recursion - // and therefore have to create an expander. - None => { - let expander = self.expander.insert(Expander::new( - self.db.upcast(), - macro_call.file_id, - self.resolver.module(), - )); - (expander, true) - } - } - }; - let ty = { - let macro_call = macro_call.to_node(self.db.upcast()); - let resolver = |path: &_| { - self.resolver - .resolve_path_as_macro(self.db.upcast(), path, Some(MacroSubNs::Bang)) - .map(|(it, _)| it) - }; - match expander.enter_expand::(self.db.upcast(), macro_call, resolver) - { - Ok(ExpandResult { value: Some((mark, expanded)), .. }) => { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - - let mut ctx = expander.ctx( - self.db.upcast(), - &mut types_map, - &mut types_source_map, - ); - // FIXME: Report syntax errors in expansion here - let type_ref = TypeRef::from_ast(&mut ctx, expanded.tree()); - - // Can't mutate `self`, must create a new instance, because of the lifetimes. - let mut inner_ctx = TyLoweringContext { - db: self.db, - resolver: self.resolver, - generics: self.generics.clone(), - types_map: &types_map, - types_source_map: Some(&types_source_map), - in_binders: self.in_binders, - owner: self.owner, - type_param_mode: self.type_param_mode, - impl_trait_mode: mem::take(&mut self.impl_trait_mode), - expander: self.expander.take(), - unsized_types: mem::take(&mut self.unsized_types), - diagnostics: mem::take(&mut self.diagnostics), - }; - - let ty = inner_ctx.lower_ty(type_ref); - - self.impl_trait_mode = inner_ctx.impl_trait_mode; - self.expander = inner_ctx.expander; - self.unsized_types = inner_ctx.unsized_types; - self.diagnostics = inner_ctx.diagnostics; - - self.expander.as_mut().unwrap().exit(mark); - Some(ty) - } - _ => None, - } - }; - - // drop the expander, resetting it to pre-recursion state - if recursion_start { - self.expander = None; - } - ty.unwrap_or_else(|| TyKind::Error.intern(Interner)) - } TypeRef::Error => TyKind::Error.intern(Interner), }; (ty, res) @@ -517,9 +458,10 @@ impl<'a> TyLoweringContext<'a> { /// lower the self types of the predicates since that could lead to cycles. /// So we just check here if the `type_ref` resolves to a generic param, and which. fn lower_ty_only_param(&mut self, type_ref_id: TypeRefId) -> Option { - let type_ref = &self.types_map[type_ref_id]; + let type_ref = &self.store[type_ref_id]; let path = match type_ref { TypeRef::Path(path) => path, + &TypeRef::TypeParam(idx) => return Some(idx.into()), _ => return None, }; if path.type_anchor().is_some() { @@ -555,7 +497,7 @@ impl<'a> TyLoweringContext<'a> { PathLoweringContext::new( self, Self::on_path_diagnostic_callback(path_id.type_ref()), - &self.types_map[path_id], + &self.store[path_id], ) } @@ -564,7 +506,7 @@ impl<'a> TyLoweringContext<'a> { if let Some(type_ref) = path.type_anchor() { let (ty, res) = self.lower_ty_ext(type_ref); let mut ctx = self.at_path(path_id); - return ctx.lower_ty_relative_path(ty, res); + return ctx.lower_ty_relative_path(ty, res, false); } let mut ctx = self.at_path(path_id); @@ -594,7 +536,7 @@ impl<'a> TyLoweringContext<'a> { TypeNs::TraitId(tr) => tr, _ => return None, }; - Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty), ctx)) + Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty, false), ctx)) } fn lower_trait_ref( @@ -605,36 +547,21 @@ impl<'a> TyLoweringContext<'a> { self.lower_trait_ref_from_path(trait_ref.path, explicit_self_ty).map(|it| it.0) } + /// When lowering predicates from parents (impl, traits) for children defs (fns, consts, types), `generics` should + /// contain the `Generics` for the **child**, while `predicate_owner` should contain the `GenericDefId` of the + /// **parent**. This is important so we generate the correct bound var/placeholder. pub(crate) fn lower_where_predicate<'b>( &'b mut self, where_predicate: &'b WherePredicate, - &def: &GenericDefId, ignore_bindings: bool, ) -> impl Iterator + use<'a, 'b> { match where_predicate { WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound } => { - let self_ty = match target { - WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(*type_ref), - &WherePredicateTypeTarget::TypeOrConstParam(local_id) => { - let param_id = hir_def::TypeOrConstParamId { parent: def, local_id }; - match self.type_param_mode { - ParamLoweringMode::Placeholder => { - TyKind::Placeholder(to_placeholder_idx(self.db, param_id)) - } - ParamLoweringMode::Variable => { - let idx = generics(self.db.upcast(), def) - .type_or_const_param_idx(param_id) - .expect("matching generics"); - TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx)) - } - } - .intern(Interner) - } - }; + let self_ty = self.lower_ty(*target); Either::Left(self.lower_type_bound(bound, self_ty, ignore_bindings)) } - WherePredicate::Lifetime { bound, target } => Either::Right(iter::once( + &WherePredicate::Lifetime { bound, target } => Either::Right(iter::once( crate::wrap_empty_binders(WhereClause::LifetimeOutlives(LifetimeOutlives { a: self.lower_lifetime(bound), b: self.lower_lifetime(target), @@ -657,17 +584,13 @@ impl<'a> TyLoweringContext<'a> { // FIXME Don't silently drop the hrtb lifetimes here if let Some((trait_ref, ctx)) = self.lower_trait_ref_from_path(path, self_ty) { if !ignore_bindings { - assoc_bounds = - ctx.assoc_type_bindings_from_type_bound(bound, trait_ref.clone()); + assoc_bounds = ctx.assoc_type_bindings_from_type_bound(trait_ref.clone()); } clause = Some(crate::wrap_empty_binders(WhereClause::Implemented(trait_ref))); } } &TypeBound::Path(path, TraitBoundModifier::Maybe) => { - let sized_trait = self - .db - .lang_item(self.resolver.krate(), LangItem::Sized) - .and_then(|lang_item| lang_item.as_trait()); + let sized_trait = LangItem::Sized.resolve_trait(self.db, self.resolver.krate()); // Don't lower associated type bindings as the only possible relaxed trait bound // `?Sized` has no of them. // If we got another trait here ignore the bound completely. @@ -678,7 +601,7 @@ impl<'a> TyLoweringContext<'a> { self.unsized_types.insert(self_ty); } } - TypeBound::Lifetime(l) => { + &TypeBound::Lifetime(l) => { let lifetime = self.lower_lifetime(l); clause = Some(crate::wrap_empty_binders(WhereClause::TypeOutlives(TypeOutlives { ty: self_ty, @@ -725,15 +648,15 @@ impl<'a> TyLoweringContext<'a> { let lhs_id = lhs.trait_id; let lhs_is_auto = ctx .db - .trait_data(from_chalk_trait_id(lhs_id)) + .trait_signature(from_chalk_trait_id(lhs_id)) .flags - .contains(TraitFlags::IS_AUTO); + .contains(TraitFlags::AUTO); let rhs_id = rhs.trait_id; let rhs_is_auto = ctx .db - .trait_data(from_chalk_trait_id(rhs_id)) + .trait_signature(from_chalk_trait_id(rhs_id)) .flags - .contains(TraitFlags::IS_AUTO); + .contains(TraitFlags::AUTO); if !lhs_is_auto && !rhs_is_auto { multiple_regular_traits = true; @@ -800,7 +723,7 @@ impl<'a> TyLoweringContext<'a> { } } - fn lower_impl_trait(&mut self, bounds: &[TypeBound], krate: CrateId) -> ImplTrait { + fn lower_impl_trait(&mut self, bounds: &[TypeBound], krate: Crate) -> ImplTrait { cov_mark::hit!(lower_rpit); let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner); let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { @@ -810,10 +733,8 @@ impl<'a> TyLoweringContext<'a> { } if !ctx.unsized_types.contains(&self_ty) { - let sized_trait = ctx - .db - .lang_item(krate, LangItem::Sized) - .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id)); + let sized_trait = + LangItem::Sized.resolve_trait(ctx.db, krate).map(to_chalk_trait_id); let sized_clause = sized_trait.map(|trait_id| { let clause = WhereClause::Implemented(TraitRef { trait_id, @@ -829,8 +750,8 @@ impl<'a> TyLoweringContext<'a> { ImplTrait { bounds: crate::make_single_type_binders(predicates) } } - pub fn lower_lifetime(&self, lifetime: &LifetimeRef) -> Lifetime { - match self.resolver.resolve_lifetime(lifetime) { + pub fn lower_lifetime(&self, lifetime: LifetimeRefId) -> Lifetime { + match self.resolver.resolve_lifetime(&self.store[lifetime]) { Some(resolution) => match resolution { LifetimeNs::Static => static_lifetime(), LifetimeNs::LifetimeParam(id) => match self.type_param_mode { @@ -838,8 +759,7 @@ impl<'a> TyLoweringContext<'a> { LifetimeData::Placeholder(lt_to_placeholder_idx(self.db, id)) } ParamLoweringMode::Variable => { - let generics = self.generics().expect("generics in scope"); - let idx = match generics.lifetime_idx(id) { + let idx = match self.generics().lifetime_idx(id) { None => return error_lifetime(), Some(idx) => idx, }; @@ -852,21 +772,10 @@ impl<'a> TyLoweringContext<'a> { None => error_lifetime(), } } - - // FIXME: This does not handle macros! - fn count_impl_traits(&self, type_ref: TypeRefId) -> usize { - let mut count = 0; - TypeRef::walk(type_ref, self.types_map, &mut |type_ref| { - if matches!(type_ref, TypeRef::ImplTrait(_)) { - count += 1; - } - }); - count - } } /// Build the signature of a callable item (function, struct or enum variant). -pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig { +pub(crate) fn callable_item_signature_query(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig { match def { CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f), CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s), @@ -897,7 +806,7 @@ fn named_associated_type_shorthand_candidates( ) -> Option { let mut search = |t| { all_super_trait_refs(db, t, |t| { - let data = db.trait_data(t.hir_trait_id()); + let data = db.trait_items(t.hir_trait_id()); for (name, assoc_id) in &data.items { if let AssocItemId::TypeAliasId(alias) = assoc_id { @@ -918,14 +827,8 @@ fn named_associated_type_shorthand_candidates( let impl_id_as_generic_def: GenericDefId = impl_id.into(); if impl_id_as_generic_def != def { - // `trait_ref` contains `BoundVar`s bound by impl's `Binders`, but here we need - // `BoundVar`s from `def`'s point of view. - // FIXME: A `HirDatabase` query may be handy if this process is needed in more - // places. It'd be almost identical as `impl_trait_query` where `resolver` would be - // of `def` instead of `impl_id`. - let starting_idx = generics(db.upcast(), def).len_self(); let subst = TyBuilder::subst_for_def(db, impl_id, None) - .fill_with_bound_vars(DebruijnIndex::INNERMOST, starting_idx) + .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0) .build(); let trait_ref = subst.apply(trait_ref, Interner); search(trait_ref) @@ -949,18 +852,10 @@ fn named_associated_type_shorthand_candidates( } // Handle `Self::Type` referring to own associated type in trait definitions if let GenericDefId::TraitId(trait_id) = param_id.parent() { - let trait_generics = generics(db.upcast(), trait_id.into()); + let trait_generics = generics(db, trait_id.into()); if trait_generics[param_id.local_id()].is_trait_self() { - let def_generics = generics(db.upcast(), def); - let starting_idx = match def { - GenericDefId::TraitId(_) => 0, - // `def` is an item within trait. We need to substitute `BoundVar`s but - // remember that they are for parent (i.e. trait) generic params so they - // come after our own params. - _ => def_generics.len_self(), - }; let trait_ref = TyBuilder::trait_ref(db, trait_id) - .fill_with_bound_vars(DebruijnIndex::INNERMOST, starting_idx) + .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0) .build(); return search(trait_ref); } @@ -989,18 +884,22 @@ pub(crate) fn field_types_with_diagnostics_query( db: &dyn HirDatabase, variant_id: VariantId, ) -> (Arc>>, Diagnostics) { - let var_data = variant_id.variant_data(db.upcast()); + let var_data = db.variant_fields(variant_id); let (resolver, def): (_, GenericDefId) = match variant_id { - VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()), - VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()), - VariantId::EnumVariantId(it) => { - (it.resolver(db.upcast()), it.lookup(db.upcast()).parent.into()) - } + VariantId::StructId(it) => (it.resolver(db), it.into()), + VariantId::UnionId(it) => (it.resolver(db), it.into()), + VariantId::EnumVariantId(it) => (it.resolver(db), it.lookup(db).parent.into()), }; - let generics = generics(db.upcast(), def); + let generics = generics(db, def); let mut res = ArenaMap::default(); - let mut ctx = TyLoweringContext::new(db, &resolver, var_data.types_map(), def.into()) - .with_type_param_mode(ParamLoweringMode::Variable); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &var_data.store, + def, + LifetimeElisionKind::AnonymousReportError, + ) + .with_type_param_mode(ParamLoweringMode::Variable); for (field_id, field_data) in var_data.fields().iter() { res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(field_data.type_ref))); } @@ -1021,34 +920,26 @@ pub(crate) fn generic_predicates_for_param_query( param_id: TypeOrConstParamId, assoc_name: Option, ) -> GenericPredicates { - let resolver = def.resolver(db.upcast()); - let mut ctx = if let GenericDefId::FunctionId(_) = def { - TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) - .with_impl_trait_mode(ImplTraitLoweringMode::Variable) - .with_type_param_mode(ParamLoweringMode::Variable) - } else { - TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) - .with_type_param_mode(ParamLoweringMode::Variable) - }; - let generics = generics(db.upcast(), def); + let generics = generics(db, def); + let resolver = def.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + generics.store(), + def, + LifetimeElisionKind::AnonymousReportError, + ) + .with_type_param_mode(ParamLoweringMode::Variable); // we have to filter out all other predicates *first*, before attempting to lower them - let predicate = |pred: &_, def: &_, ctx: &mut TyLoweringContext<'_>| match pred { + let predicate = |pred: &_, ctx: &mut TyLoweringContext<'_>| match pred { WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound, .. } => { - let invalid_target = match target { - WherePredicateTypeTarget::TypeRef(type_ref) => { - ctx.lower_ty_only_param(*type_ref) != Some(param_id) - } - &WherePredicateTypeTarget::TypeOrConstParam(local_id) => { - let target_id = TypeOrConstParamId { parent: *def, local_id }; - target_id != param_id - } - }; + let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) }; if invalid_target { // If this is filtered out without lowering, `?Sized` is not gathered into `ctx.unsized_types` if let TypeBound::Path(_, TraitBoundModifier::Maybe) = bound { - ctx.lower_where_predicate(pred, def, true).for_each(drop); + ctx.lower_where_predicate(pred, true).for_each(drop); } return false; } @@ -1057,17 +948,17 @@ pub(crate) fn generic_predicates_for_param_query( &TypeBound::ForLifetime(_, path) | &TypeBound::Path(path, _) => { // Only lower the bound if the trait could possibly define the associated // type we're looking for. - let path = &ctx.types_map[path]; + let path = &ctx.store[path]; let Some(assoc_name) = &assoc_name else { return true }; let Some(TypeNs::TraitId(tr)) = - resolver.resolve_path_in_type_ns_fully(db.upcast(), path) + resolver.resolve_path_in_type_ns_fully(db, path) else { return false; }; - all_super_traits(db.upcast(), tr).iter().any(|tr| { - db.trait_data(*tr).items.iter().any(|(name, item)| { + all_super_traits(db, tr).iter().any(|tr| { + db.trait_items(*tr).items.iter().any(|(name, item)| { matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name }) }) @@ -1078,13 +969,14 @@ pub(crate) fn generic_predicates_for_param_query( WherePredicate::Lifetime { .. } => false, }; let mut predicates = Vec::new(); - for (params, def) in resolver.all_generic_params() { - ctx.types_map = ¶ms.types_map; - for pred in params.where_predicates() { - if predicate(pred, def, &mut ctx) { + for maybe_parent_generics in + std::iter::successors(Some(&generics), |generics| generics.parent_generics()) + { + ctx.store = maybe_parent_generics.store(); + for pred in maybe_parent_generics.where_predicates() { + if predicate(pred, &mut ctx) { predicates.extend( - ctx.lower_where_predicate(pred, def, true) - .map(|p| make_binders(db, &generics, p)), + ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p)), ); } } @@ -1109,12 +1001,11 @@ pub(crate) fn generic_predicates_for_param_query( GenericPredicates(predicates.is_empty().not().then(|| predicates.into())) } -pub(crate) fn generic_predicates_for_param_recover( +pub(crate) fn generic_predicates_for_param_cycle_result( _db: &dyn HirDatabase, - _cycle: &Cycle, - _def: &GenericDefId, - _param_id: &TypeOrConstParamId, - _assoc_name: &Option, + _def: GenericDefId, + _param_id: TypeOrConstParamId, + _assoc_name: Option, ) -> GenericPredicates { GenericPredicates(None) } @@ -1123,8 +1014,8 @@ pub(crate) fn trait_environment_for_body_query( db: &dyn HirDatabase, def: DefWithBodyId, ) -> Arc { - let Some(def) = def.as_generic_def_id(db.upcast()) else { - let krate = def.module(db.upcast()).krate(); + let Some(def) = def.as_generic_def_id(db) else { + let krate = def.module(db).krate(); return TraitEnvironment::empty(krate); }; db.trait_environment(def) @@ -1134,21 +1025,24 @@ pub(crate) fn trait_environment_query( db: &dyn HirDatabase, def: GenericDefId, ) -> Arc { - let resolver = def.resolver(db.upcast()); - let mut ctx = if let GenericDefId::FunctionId(_) = def { - TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) - .with_impl_trait_mode(ImplTraitLoweringMode::Param) - .with_type_param_mode(ParamLoweringMode::Placeholder) - } else { - TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) - .with_type_param_mode(ParamLoweringMode::Placeholder) - }; + let generics = generics(db, def); + let resolver = def.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + generics.store(), + def, + LifetimeElisionKind::AnonymousReportError, + ) + .with_type_param_mode(ParamLoweringMode::Placeholder); let mut traits_in_scope = Vec::new(); let mut clauses = Vec::new(); - for (params, def) in resolver.all_generic_params() { - ctx.types_map = ¶ms.types_map; - for pred in params.where_predicates() { - for pred in ctx.lower_where_predicate(pred, def, false) { + for maybe_parent_generics in + std::iter::successors(Some(&generics), |generics| generics.parent_generics()) + { + ctx.store = maybe_parent_generics.store(); + for pred in maybe_parent_generics.where_predicates() { + for pred in ctx.lower_where_predicate(pred, false) { if let WhereClause::Implemented(tr) = pred.skip_binders() { traits_in_scope .push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id())); @@ -1159,7 +1053,7 @@ pub(crate) fn trait_environment_query( } } - if let Some(trait_id) = def.assoc_trait_container(db.upcast()) { + if let Some(trait_id) = def.assoc_trait_container(db) { // add `Self: Trait` to the environment in trait // function default implementations (and speculative code // inside consts or type aliases) @@ -1170,7 +1064,7 @@ pub(crate) fn trait_environment_query( clauses.push(pred.cast::(Interner).into_from_env_clause(Interner)); } - let subst = generics(db.upcast(), def).placeholder_subst(db); + let subst = generics.placeholder_subst(db); if !subst.is_empty(Interner) { let explicitly_unsized_tys = ctx.unsized_types; if let Some(implicitly_sized_clauses) = @@ -1221,7 +1115,7 @@ pub(crate) fn generic_predicates_without_parent_with_diagnostics_query( db: &dyn HirDatabase, def: GenericDefId, ) -> (GenericPredicates, Diagnostics) { - generic_predicates_filtered_by(db, def, |_, d| *d == def) + generic_predicates_filtered_by(db, def, |_, d| d == def) } /// Resolve the where clause(s) of an item with generics, @@ -1232,28 +1126,30 @@ fn generic_predicates_filtered_by( filter: F, ) -> (GenericPredicates, Diagnostics) where - F: Fn(&WherePredicate, &GenericDefId) -> bool, + F: Fn(&WherePredicate, GenericDefId) -> bool, { - let resolver = def.resolver(db.upcast()); - let (impl_trait_lowering, param_lowering) = match def { - GenericDefId::FunctionId(_) => { - (ImplTraitLoweringMode::Variable, ParamLoweringMode::Variable) - } - _ => (ImplTraitLoweringMode::Disallowed, ParamLoweringMode::Variable), - }; - let mut ctx = TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) - .with_impl_trait_mode(impl_trait_lowering) - .with_type_param_mode(param_lowering); - let generics = generics(db.upcast(), def); + let generics = generics(db, def); + let resolver = def.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + generics.store(), + def, + LifetimeElisionKind::AnonymousReportError, + ) + .with_type_param_mode(ParamLoweringMode::Variable); let mut predicates = Vec::new(); - for (params, def) in resolver.all_generic_params() { - ctx.types_map = ¶ms.types_map; - for pred in params.where_predicates() { - if filter(pred, def) { + for maybe_parent_generics in + std::iter::successors(Some(&generics), |generics| generics.parent_generics()) + { + ctx.store = maybe_parent_generics.store(); + for pred in maybe_parent_generics.where_predicates() { + if filter(pred, maybe_parent_generics.def()) { + // We deliberately use `generics` and not `maybe_parent_generics` here. This is not a mistake! + // If we use the parent generics predicates.extend( - ctx.lower_where_predicate(pred, def, false) - .map(|p| make_binders(db, &generics, p)), + ctx.lower_where_predicate(pred, false).map(|p| make_binders(db, &generics, p)), ); } } @@ -1271,6 +1167,7 @@ where ); }; } + ( GenericPredicates(predicates.is_empty().not().then(|| predicates.into())), create_diagnostics(ctx.diagnostics), @@ -1286,11 +1183,9 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>( substitution: &'subst Substitution, resolver: &Resolver, ) -> Option + Captures<'a> + Captures<'subst>> { - let sized_trait = db - .lang_item(resolver.krate(), LangItem::Sized) - .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id))?; + let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()).map(to_chalk_trait_id)?; - let trait_self_idx = trait_self_param_idx(db.upcast(), def); + let trait_self_idx = trait_self_param_idx(db, def); Some( substitution @@ -1298,11 +1193,7 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>( .enumerate() .filter_map( move |(idx, generic_arg)| { - if Some(idx) == trait_self_idx { - None - } else { - Some(generic_arg) - } + if Some(idx) == trait_self_idx { None } else { Some(generic_arg) } }, ) .filter_map(|generic_arg| generic_arg.ty(Interner)) @@ -1338,35 +1229,46 @@ pub(crate) fn generic_defaults_with_diagnostics_query( db: &dyn HirDatabase, def: GenericDefId, ) -> (GenericDefaults, Diagnostics) { - let generic_params = generics(db.upcast(), def); + let generic_params = generics(db, def); if generic_params.len() == 0 { return (GenericDefaults(None), None); } - let resolver = def.resolver(db.upcast()); - let parent_start_idx = generic_params.len_self(); + let resolver = def.resolver(db); - let mut ctx = - TyLoweringContext::new(db, &resolver, generic_params.self_types_map(), def.into()) - .with_impl_trait_mode(ImplTraitLoweringMode::Disallowed) - .with_type_param_mode(ParamLoweringMode::Variable); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + generic_params.store(), + def, + LifetimeElisionKind::AnonymousReportError, + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Disallowed) + .with_type_param_mode(ParamLoweringMode::Variable); let mut idx = 0; + let mut has_any_default = false; let mut defaults = generic_params - .iter_self() - .map(|(id, p)| { - let result = - handle_generic_param(&mut ctx, idx, id, p, parent_start_idx, &generic_params); + .iter_parents_with_store() + .map(|((id, p), store)| { + ctx.store = store; + let (result, has_default) = handle_generic_param(&mut ctx, idx, id, p, &generic_params); + has_any_default |= has_default; idx += 1; result }) .collect::>(); - let diagnostics = create_diagnostics(mem::take(&mut ctx.diagnostics)); - defaults.extend(generic_params.iter_parents_with_types_map().map(|((id, p), types_map)| { - ctx.types_map = types_map; - let result = handle_generic_param(&mut ctx, idx, id, p, parent_start_idx, &generic_params); + ctx.diagnostics.clear(); // Don't include diagnostics from the parent. + defaults.extend(generic_params.iter_self().map(|(id, p)| { + let (result, has_default) = handle_generic_param(&mut ctx, idx, id, p, &generic_params); + has_any_default |= has_default; idx += 1; result })); - let defaults = GenericDefaults(Some(Arc::from_iter(defaults))); + let diagnostics = create_diagnostics(mem::take(&mut ctx.diagnostics)); + let defaults = if has_any_default { + GenericDefaults(Some(Arc::from_iter(defaults))) + } else { + GenericDefaults(None) + }; return (defaults, diagnostics); fn handle_generic_param( @@ -1374,18 +1276,21 @@ pub(crate) fn generic_defaults_with_diagnostics_query( idx: usize, id: GenericParamId, p: GenericParamDataRef<'_>, - parent_start_idx: usize, generic_params: &Generics, - ) -> Binders { + ) -> (Binders, bool) { + let binders = variable_kinds_from_iter(ctx.db, generic_params.iter_id().take(idx)); match p { GenericParamDataRef::TypeParamData(p) => { - let ty = p.default.as_ref().map_or(TyKind::Error.intern(Interner), |ty| { - // Each default can only refer to previous parameters. - // Type variable default referring to parameter coming - // after it is forbidden (FIXME: report diagnostic) - fallback_bound_vars(ctx.lower_ty(*ty), idx, parent_start_idx) - }); - crate::make_binders(ctx.db, generic_params, ty.cast(Interner)) + let ty = p.default.as_ref().map_or_else( + || TyKind::Error.intern(Interner), + |ty| { + // Each default can only refer to previous parameters. + // Type variable default referring to parameter coming + // after it is forbidden (FIXME: report diagnostic) + fallback_bound_vars(ctx.lower_ty(*ty), idx) + }, + ); + (Binders::new(binders, ty.cast(Interner)), p.default.is_some()) } GenericParamDataRef::ConstParamData(p) => { let GenericParamId::ConstParamId(id) = id else { @@ -1401,50 +1306,52 @@ pub(crate) fn generic_defaults_with_diagnostics_query( }, ); // Each default can only refer to previous parameters, see above. - val = fallback_bound_vars(val, idx, parent_start_idx); - make_binders(ctx.db, generic_params, val) + val = fallback_bound_vars(val, idx); + (Binders::new(binders, val), p.default.is_some()) } GenericParamDataRef::LifetimeParamData(_) => { - make_binders(ctx.db, generic_params, error_lifetime().cast(Interner)) + (Binders::new(binders, error_lifetime().cast(Interner)), false) } } } } -pub(crate) fn generic_defaults_with_diagnostics_recover( - db: &dyn HirDatabase, - _cycle: &Cycle, - def: &GenericDefId, +pub(crate) fn generic_defaults_with_diagnostics_cycle_result( + _db: &dyn HirDatabase, + _def: GenericDefId, ) -> (GenericDefaults, Diagnostics) { - let generic_params = generics(db.upcast(), *def); - if generic_params.len() == 0 { - return (GenericDefaults(None), None); - } - // FIXME: this code is not covered in tests. - // we still need one default per parameter - let defaults = GenericDefaults(Some(Arc::from_iter(generic_params.iter_id().map(|id| { - let val = match id { - GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner), - GenericParamId::ConstParamId(id) => unknown_const_as_generic(db.const_param_ty(id)), - GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), - }; - crate::make_binders(db, &generic_params, val) - })))); - (defaults, None) + (GenericDefaults(None), None) } fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { - let data = db.function_data(def); - let resolver = def.resolver(db.upcast()); - let mut ctx_params = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()) - .with_impl_trait_mode(ImplTraitLoweringMode::Variable) - .with_type_param_mode(ParamLoweringMode::Variable); + let data = db.function_signature(def); + let resolver = def.resolver(db); + let mut ctx_params = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::for_fn_params(&data), + ) + .with_type_param_mode(ParamLoweringMode::Variable); let params = data.params.iter().map(|&tr| ctx_params.lower_ty(tr)); - let mut ctx_ret = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) - .with_type_param_mode(ParamLoweringMode::Variable); - let ret = ctx_ret.lower_ty(data.ret_type); - let generics = generics(db.upcast(), def.into()); + + let ret = match data.ret_type { + Some(ret_type) => { + let mut ctx_ret = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::for_fn_ret(), + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) + .with_type_param_mode(ParamLoweringMode::Variable); + ctx_ret.lower_ty(ret_type) + } + None => TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner), + }; + let generics = generics(db, def.into()); let sig = CallableSig::from_params_and_return( params, ret, @@ -1458,7 +1365,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { /// Build the declared type of a function. This should not need to look at the /// function body. fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders { - let generics = generics(db.upcast(), def.into()); + let generics = generics(db, def.into()); let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); make_binders( db, @@ -1469,36 +1376,40 @@ fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders { /// Build the declared type of a const. fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders { - let data = db.const_data(def); - let generics = generics(db.upcast(), def.into()); - let resolver = def.resolver(db.upcast()); - let mut ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()) - .with_type_param_mode(ParamLoweringMode::Variable); + let data = db.const_signature(def); + let generics = generics(db, def.into()); + let resolver = def.resolver(db); + let parent = def.loc(db).container; + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::for_const(parent), + ) + .with_type_param_mode(ParamLoweringMode::Variable); make_binders(db, &generics, ctx.lower_ty(data.type_ref)) } /// Build the declared type of a static. fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders { - let data = db.static_data(def); - let resolver = def.resolver(db.upcast()); - let mut ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()); + let data = db.static_signature(def); + let resolver = def.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::Elided(static_lifetime()), + ); Binders::empty(Interner, ctx.lower_ty(data.type_ref)) } fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig { - let struct_data = db.struct_data(def); - let fields = struct_data.variant_data.fields(); - let resolver = def.resolver(db.upcast()); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - struct_data.variant_data.types_map(), - AdtId::from(def).into(), - ) - .with_type_param_mode(ParamLoweringMode::Variable); - let params = fields.iter().map(|(_, field)| ctx.lower_ty(field.type_ref)); + let field_tys = db.field_types(def.into()); + let params = field_tys.iter().map(|(_, ty)| ty.skip_binders().clone()); let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders(); Binders::new( binders, @@ -1508,12 +1419,12 @@ fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnS /// Build the type of a tuple struct constructor. fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Option> { - let struct_data = db.struct_data(def); - match struct_data.variant_data.kind() { - StructKind::Record => None, - StructKind::Unit => Some(type_for_adt(db, def.into())), - StructKind::Tuple => { - let generics = generics(db.upcast(), AdtId::from(def).into()); + let struct_data = db.variant_fields(def.into()); + match struct_data.shape { + FieldsShape::Record => None, + FieldsShape::Unit => Some(type_for_adt(db, def.into())), + FieldsShape::Tuple => { + let generics = generics(db, AdtId::from(def).into()); let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); Some(make_binders( db, @@ -1525,19 +1436,10 @@ fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Option PolyFnSig { - let var_data = db.enum_variant_data(def); - let fields = var_data.variant_data.fields(); - let resolver = def.resolver(db.upcast()); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - var_data.variant_data.types_map(), - DefWithBodyId::VariantId(def).into(), - ) - .with_type_param_mode(ParamLoweringMode::Variable); - let params = fields.iter().map(|(_, field)| ctx.lower_ty(field.type_ref)); - let (ret, binders) = - type_for_adt(db, def.lookup(db.upcast()).parent.into()).into_value_and_skipped_binders(); + let field_tys = db.field_types(def.into()); + let params = field_tys.iter().map(|(_, ty)| ty.skip_binders().clone()); + let parent = def.lookup(db).parent; + let (ret, binders) = type_for_adt(db, parent.into()).into_value_and_skipped_binders(); Binders::new( binders, CallableSig::from_params_and_return(params, ret, false, Safety::Safe, FnAbi::RustCall), @@ -1549,12 +1451,12 @@ fn type_for_enum_variant_constructor( db: &dyn HirDatabase, def: EnumVariantId, ) -> Option> { - let e = def.lookup(db.upcast()).parent; - match db.enum_variant_data(def).variant_data.kind() { - StructKind::Record => None, - StructKind::Unit => Some(type_for_adt(db, e.into())), - StructKind::Tuple => { - let generics = generics(db.upcast(), e.into()); + let e = def.lookup(db).parent; + match db.variant_fields(def.into()).shape { + FieldsShape::Record => None, + FieldsShape::Unit => Some(type_for_adt(db, e.into())), + FieldsShape::Tuple => { + let generics = generics(db, e.into()); let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); Some(make_binders( db, @@ -1566,8 +1468,18 @@ fn type_for_enum_variant_constructor( } } +#[salsa_macros::tracked(cycle_result = type_for_adt_cycle_result)] +fn type_for_adt_tracked(db: &dyn HirDatabase, adt: AdtId) -> Binders { + type_for_adt(db, adt) +} + +fn type_for_adt_cycle_result(db: &dyn HirDatabase, adt: AdtId) -> Binders { + let generics = generics(db, adt.into()); + make_binders(db, &generics, TyKind::Error.intern(Interner)) +} + fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders { - let generics = generics(db.upcast(), adt.into()); + let generics = generics(db, adt.into()); let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); let ty = TyKind::Adt(crate::AdtId(adt), subst).intern(Interner); make_binders(db, &generics, ty) @@ -1577,21 +1489,40 @@ pub(crate) fn type_for_type_alias_with_diagnostics_query( db: &dyn HirDatabase, t: TypeAliasId, ) -> (Binders, Diagnostics) { - let generics = generics(db.upcast(), t.into()); - let resolver = t.resolver(db.upcast()); - let type_alias_data = db.type_alias_data(t); - let mut ctx = TyLoweringContext::new(db, &resolver, &type_alias_data.types_map, t.into()) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) - .with_type_param_mode(ParamLoweringMode::Variable); - let inner = if type_alias_data.is_extern { + let generics = generics(db, t.into()); + let type_alias_data = db.type_alias_signature(t); + let mut diags = None; + let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) { TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner) } else { - type_alias_data - .type_ref + let resolver = t.resolver(db); + let alias = db.type_alias_signature(t); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &alias.store, + t.into(), + LifetimeElisionKind::AnonymousReportError, + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) + .with_type_param_mode(ParamLoweringMode::Variable); + let res = alias + .ty .map(|type_ref| ctx.lower_ty(type_ref)) - .unwrap_or_else(|| TyKind::Error.intern(Interner)) + .unwrap_or_else(|| TyKind::Error.intern(Interner)); + diags = create_diagnostics(ctx.diagnostics); + res }; - (make_binders(db, &generics, inner), create_diagnostics(ctx.diagnostics)) + + (make_binders(db, &generics, inner), diags) +} + +pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result( + db: &dyn HirDatabase, + adt: TypeAliasId, +) -> (Binders, Diagnostics) { + let generics = generics(db, adt.into()); + (make_binders(db, &generics, TyKind::Error.intern(Interner)), None) } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -1602,7 +1533,7 @@ pub enum TyDefId { } impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] pub enum ValueTyDefId { FunctionId(FunctionId), StructId(StructId), @@ -1619,7 +1550,7 @@ impl ValueTyDefId { Self::FunctionId(id) => id.into(), Self::StructId(id) => id.into(), Self::UnionId(id) => id.into(), - Self::EnumVariantId(var) => var.lookup(db.upcast()).parent.into(), + Self::EnumVariantId(var) => var.lookup(db).parent.into(), Self::ConstId(id) => id.into(), Self::StaticId(id) => id.into(), } @@ -1633,20 +1564,11 @@ impl ValueTyDefId { pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders { match def { TyDefId::BuiltinType(it) => Binders::empty(Interner, TyBuilder::builtin(it)), - TyDefId::AdtId(it) => type_for_adt(db, it), + TyDefId::AdtId(it) => type_for_adt_tracked(db, it), TyDefId::TypeAliasId(it) => db.type_for_type_alias_with_diagnostics(it).0, } } -pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &Cycle, def: &TyDefId) -> Binders { - let generics = match *def { - TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)), - TyDefId::AdtId(it) => generics(db.upcast(), it.into()), - TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()), - }; - make_binders(db, &generics, TyKind::Error.intern(Interner)) -} - pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Option> { match def { ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)), @@ -1666,11 +1588,17 @@ pub(crate) fn impl_self_ty_with_diagnostics_query( db: &dyn HirDatabase, impl_id: ImplId, ) -> (Binders, Diagnostics) { - let impl_data = db.impl_data(impl_id); - let resolver = impl_id.resolver(db.upcast()); - let generics = generics(db.upcast(), impl_id.into()); - let mut ctx = TyLoweringContext::new(db, &resolver, &impl_data.types_map, impl_id.into()) - .with_type_param_mode(ParamLoweringMode::Variable); + let impl_data = db.impl_signature(impl_id); + let resolver = impl_id.resolver(db); + let generics = generics(db, impl_id.into()); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &impl_data.store, + impl_id.into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, + ) + .with_type_param_mode(ParamLoweringMode::Variable); ( make_binders(db, &generics, ctx.lower_ty(impl_data.self_ty)), create_diagnostics(ctx.diagnostics), @@ -1686,11 +1614,16 @@ pub(crate) fn const_param_ty_with_diagnostics_query( db: &dyn HirDatabase, def: ConstParamId, ) -> (Ty, Diagnostics) { - let parent_data = db.generic_params(def.parent()); + let (parent_data, store) = db.generic_params_and_store(def.parent()); let data = &parent_data[def.local_id()]; - let resolver = def.parent().resolver(db.upcast()); - let mut ctx = - TyLoweringContext::new(db, &resolver, &parent_data.types_map, def.parent().into()); + let resolver = def.parent().resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &store, + def.parent(), + LifetimeElisionKind::AnonymousReportError, + ); let ty = match data { TypeOrConstParamData::TypeParamData(_) => { never!(); @@ -1701,12 +1634,11 @@ pub(crate) fn const_param_ty_with_diagnostics_query( (ty, create_diagnostics(ctx.diagnostics)) } -pub(crate) fn impl_self_ty_with_diagnostics_recover( +pub(crate) fn impl_self_ty_with_diagnostics_cycle_result( db: &dyn HirDatabase, - _cycle: &Cycle, - impl_id: &ImplId, + impl_id: ImplId, ) -> (Binders, Diagnostics) { - let generics = generics(db.upcast(), (*impl_id).into()); + let generics = generics(db, impl_id.into()); (make_binders(db, &generics, TyKind::Error.intern(Interner)), None) } @@ -1718,10 +1650,16 @@ pub(crate) fn impl_trait_with_diagnostics_query( db: &dyn HirDatabase, impl_id: ImplId, ) -> Option<(Binders, Diagnostics)> { - let impl_data = db.impl_data(impl_id); - let resolver = impl_id.resolver(db.upcast()); - let mut ctx = TyLoweringContext::new(db, &resolver, &impl_data.types_map, impl_id.into()) - .with_type_param_mode(ParamLoweringMode::Variable); + let impl_data = db.impl_signature(impl_id); + let resolver = impl_id.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &impl_data.store, + impl_id.into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, + ) + .with_type_param_mode(ParamLoweringMode::Variable); let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders(); let target_trait = impl_data.target_trait.as_ref()?; let trait_ref = Binders::new(binders, ctx.lower_trait_ref(target_trait, self_ty)?); @@ -1733,13 +1671,16 @@ pub(crate) fn return_type_impl_traits( def: hir_def::FunctionId, ) -> Option>> { // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe - let data = db.function_data(def); - let resolver = def.resolver(db.upcast()); - let mut ctx_ret = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) - .with_type_param_mode(ParamLoweringMode::Variable); - let _ret = ctx_ret.lower_ty(data.ret_type); - let generics = generics(db.upcast(), def.into()); + let data = db.function_signature(def); + let resolver = def.resolver(db); + let mut ctx_ret = + TyLoweringContext::new(db, &resolver, &data.store, def.into(), LifetimeElisionKind::Infer) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) + .with_type_param_mode(ParamLoweringMode::Variable); + if let Some(ret_type) = data.ret_type { + let _ret = ctx_ret.lower_ty(ret_type); + } + let generics = generics(db, def.into()); let return_type_impl_traits = ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data }; if return_type_impl_traits.impl_traits.is_empty() { @@ -1753,19 +1694,25 @@ pub(crate) fn type_alias_impl_traits( db: &dyn HirDatabase, def: hir_def::TypeAliasId, ) -> Option>> { - let data = db.type_alias_data(def); - let resolver = def.resolver(db.upcast()); - let mut ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) - .with_type_param_mode(ParamLoweringMode::Variable); - if let Some(type_ref) = data.type_ref { + let data = db.type_alias_signature(def); + let resolver = def.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::AnonymousReportError, + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) + .with_type_param_mode(ParamLoweringMode::Variable); + if let Some(type_ref) = data.ty { let _ty = ctx.lower_ty(type_ref); } let type_alias_impl_traits = ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data }; if type_alias_impl_traits.impl_traits.is_empty() { None } else { - let generics = generics(db.upcast(), def.into()); + let generics = generics(db, def.into()); Some(Arc::new(make_binders(db, &generics, type_alias_impl_traits))) } } @@ -1777,132 +1724,14 @@ pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mut } } -/// Checks if the provided generic arg matches its expected kind, then lower them via -/// provided closures. Use unknown if there was kind mismatch. -/// -pub(crate) fn generic_arg_to_chalk<'a, T>( - db: &dyn HirDatabase, - kind_id: GenericParamId, - arg: &'a GenericArg, - this: &mut T, - types_map: &TypesMap, - for_type: impl FnOnce(&mut T, TypeRefId) -> Ty + 'a, - for_const: impl FnOnce(&mut T, &ConstRef, Ty) -> Const + 'a, - for_lifetime: impl FnOnce(&mut T, &LifetimeRef) -> Lifetime + 'a, -) -> crate::GenericArg { - let kind = match kind_id { - GenericParamId::TypeParamId(_) => ParamKind::Type, - GenericParamId::ConstParamId(id) => { - let ty = db.const_param_ty(id); - ParamKind::Const(ty) - } - GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime, - }; - match (arg, kind) { - (GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, *type_ref).cast(Interner), - (GenericArg::Const(c), ParamKind::Const(c_ty)) => for_const(this, c, c_ty).cast(Interner), - (GenericArg::Lifetime(lifetime_ref), ParamKind::Lifetime) => { - for_lifetime(this, lifetime_ref).cast(Interner) - } - (GenericArg::Const(_), ParamKind::Type) => TyKind::Error.intern(Interner).cast(Interner), - (GenericArg::Lifetime(_), ParamKind::Type) => TyKind::Error.intern(Interner).cast(Interner), - (GenericArg::Type(t), ParamKind::Const(c_ty)) => { - // We want to recover simple idents, which parser detects them - // as types. Maybe here is not the best place to do it, but - // it works. - if let TypeRef::Path(p) = &types_map[*t] { - if let Some(p) = p.mod_path() { - if p.kind == PathKind::Plain { - if let [n] = p.segments() { - let c = ConstRef::Path(n.clone()); - return for_const(this, &c, c_ty).cast(Interner); - } - } - } - } - unknown_const_as_generic(c_ty) - } - (GenericArg::Lifetime(_), ParamKind::Const(c_ty)) => unknown_const_as_generic(c_ty), - (GenericArg::Type(_), ParamKind::Lifetime) => error_lifetime().cast(Interner), - (GenericArg::Const(_), ParamKind::Lifetime) => error_lifetime().cast(Interner), - } -} - -pub(crate) fn const_or_path_to_chalk<'g>( - db: &dyn HirDatabase, - resolver: &Resolver, - owner: TypeOwnerId, - expected_ty: Ty, - value: &ConstRef, - mode: ParamLoweringMode, - args: impl FnOnce() -> Option<&'g Generics>, - debruijn: DebruijnIndex, -) -> Const { - match value { - ConstRef::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()), - ConstRef::Path(n) => { - let path = ModPath::from_segments(PathKind::Plain, Some(n.clone())); - path_to_const( - db, - resolver, - &Path::from_known_path_with_no_generic(path), - mode, - args, - debruijn, - expected_ty.clone(), - ) - .unwrap_or_else(|| unknown_const(expected_ty)) - } - &ConstRef::Complex(it) => { - let crate_data = &db.crate_graph()[resolver.krate()]; - if crate_data.env.get("__ra_is_test_fixture").is_none() && crate_data.origin.is_local() - { - // FIXME: current `InTypeConstId` is very unstable, so we only use it in non local crate - // that are unlikely to be edited. - return unknown_const(expected_ty); - } - let c = db - .intern_in_type_const(InTypeConstLoc { - id: it, - owner, - expected_ty: Box::new(InTypeConstIdMetadata(expected_ty.clone())), - }) - .into(); - intern_const_scalar( - ConstScalar::UnevaluatedConst(c, Substitution::empty(Interner)), - expected_ty, - ) - } - } -} - /// Replaces any 'free' `BoundVar`s in `s` by `TyKind::Error` from the perspective of generic -/// parameter whose index is `param_index`. A `BoundVar` is free when it is or (syntactically) -/// appears after the generic parameter of `param_index`. +/// parameter whose index is `param_index`. A `BoundVar` is free when it appears after the +/// generic parameter of `param_index`. fn fallback_bound_vars + HasInterner>( s: T, param_index: usize, - parent_start: usize, ) -> T { - // Keep in mind that parent generic parameters, if any, come *after* those of the item in - // question. In the diagrams below, `c*` and `p*` represent generic parameters of the item and - // its parent respectively. - let is_allowed = |index| { - if param_index < parent_start { - // The parameter of `param_index` is one from the item in question. Any parent generic - // parameters or the item's generic parameters that come before `param_index` is - // allowed. - // [c1, .., cj, .., ck, p1, .., pl] where cj is `param_index` - // ^^^^^^ ^^^^^^^^^^ these are allowed - !(param_index..parent_start).contains(&index) - } else { - // The parameter of `param_index` is one from the parent generics. Only parent generic - // parameters that come before `param_index` are allowed. - // [c1, .., ck, p1, .., pj, .., pl] where pj is `param_index` - // ^^^^^^ these are allowed - (parent_start..param_index).contains(&index) - } - }; + let is_allowed = |index| (0..param_index).contains(&index); crate::fold_free_vars( s, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs index 5c77bcd0736ab..009f047109dfb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/diagnostics.rs @@ -1,13 +1,11 @@ //! This files contains the declaration of diagnostics kinds for ty and path lowering. -use either::Either; use hir_def::type_ref::TypeRefId; - -type TypeSource = Either; +use hir_def::{GenericDefId, GenericParamId}; #[derive(Debug, PartialEq, Eq, Clone)] pub struct TyLoweringDiagnostic { - pub source: TypeSource, + pub source: TypeRefId, pub kind: TyLoweringDiagnosticKind, } @@ -24,13 +22,69 @@ pub enum GenericArgsProhibitedReason { PrimitiveTy, Const, Static, + LocalVariable, /// When there is a generic enum, within the expression `Enum::Variant`, /// either `Enum` or `Variant` are allowed to have generic arguments, but not both. EnumVariant, } +/// A path can have many generic arguments: each segment may have one associated with the +/// segment, and in addition, each associated type binding may have generic arguments. This +/// enum abstracts over both. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum PathGenericsSource { + /// Generic arguments directly on the segment. + Segment(u32), + /// Generic arguments on an associated type, e.g. `Foo = C>` or `Foo: Bound>`. + AssocType { segment: u32, assoc_type: u32 }, +} + #[derive(Debug, PartialEq, Eq, Clone)] pub enum PathLoweringDiagnostic { - GenericArgsProhibited { segment: u32, reason: GenericArgsProhibitedReason }, - ParenthesizedGenericArgsWithoutFnTrait { segment: u32 }, + GenericArgsProhibited { + segment: u32, + reason: GenericArgsProhibitedReason, + }, + ParenthesizedGenericArgsWithoutFnTrait { + segment: u32, + }, + /// The expected lifetimes & types and consts counts can be found by inspecting the `GenericDefId`. + IncorrectGenericsLen { + generics_source: PathGenericsSource, + provided_count: u32, + expected_count: u32, + kind: IncorrectGenericsLenKind, + def: GenericDefId, + }, + IncorrectGenericsOrder { + generics_source: PathGenericsSource, + param_id: GenericParamId, + arg_idx: u32, + /// Whether the `GenericArgs` contains a `Self` arg. + has_self_arg: bool, + }, + ElidedLifetimesInPath { + generics_source: PathGenericsSource, + def: GenericDefId, + expected_count: u32, + hard_error: bool, + }, + /// An elided lifetimes was used (either implicitly, by not specifying lifetimes, or explicitly, by using `'_`), + /// but lifetime elision could not find a lifetime to replace it with. + ElisionFailure { + generics_source: PathGenericsSource, + def: GenericDefId, + expected_count: u32, + }, + MissingLifetime { + generics_source: PathGenericsSource, + def: GenericDefId, + expected_count: u32, + }, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum IncorrectGenericsLenKind { + Lifetimes, + TypesAndConsts, } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index a165932ddcc8c..726eaf8b0a1dc 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -1,33 +1,35 @@ //! A wrapper around [`TyLoweringContext`] specifically for lowering paths. -use std::iter; - -use chalk_ir::{cast::Cast, fold::Shift, BoundVar}; +use chalk_ir::{BoundVar, cast::Cast, fold::Shift}; use either::Either; use hir_def::{ - data::TraitFlags, - expr_store::HygieneId, - generics::{TypeParamProvenance, WherePredicate, WherePredicateTypeTarget}, - path::{GenericArg, GenericArgs, Path, PathSegment, PathSegments}, + GenericDefId, GenericParamId, Lookup, TraitId, + expr_store::{ + ExpressionStore, HygieneId, + path::{GenericArg, GenericArgs, GenericArgsParentheses, Path, PathSegment, PathSegments}, + }, + hir::generics::{ + GenericParamDataRef, TypeOrConstParamData, TypeParamData, TypeParamProvenance, + }, resolver::{ResolveValueResult, TypeNs, ValueNs}, - type_ref::{TypeBound, TypeRef, TypesMap}, - GenericDefId, GenericParamId, ItemContainerId, Lookup, TraitId, + signatures::TraitFlags, + type_ref::{TypeRef, TypeRefId}, }; use smallvec::SmallVec; use stdx::never; use crate::{ - consteval::unknown_const_as_generic, + AliasEq, AliasTy, GenericArgsProhibitedReason, ImplTraitLoweringMode, IncorrectGenericsLenKind, + Interner, ParamLoweringMode, PathGenericsSource, PathLoweringDiagnostic, ProjectionTy, + QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyDefId, TyKind, + TyLoweringContext, ValueTyDefId, WhereClause, + consteval::{unknown_const, unknown_const_as_generic}, + db::HirDatabase, error_lifetime, - generics::generics, - lower::{ - generic_arg_to_chalk, named_associated_type_shorthand_candidates, ImplTraitLoweringState, - }, - to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, + generics::{Generics, generics}, + lower::{LifetimeElisionKind, named_associated_type_shorthand_candidates}, + static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, utils::associated_type_by_name_including_super_traits, - AliasEq, AliasTy, GenericArgsProhibitedReason, ImplTraitLoweringMode, Interner, - ParamLoweringMode, PathLoweringDiagnostic, ProjectionTy, QuantifiedWhereClause, Substitution, - TraitRef, Ty, TyBuilder, TyDefId, TyKind, TyLoweringContext, ValueTyDefId, WhereClause, }; type CallbackData<'a> = Either< @@ -117,17 +119,31 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { .expect("invalid segment passed to PathLoweringContext::set_current_segment()"); } + #[inline] + fn with_lifetime_elision( + &mut self, + lifetime_elision: LifetimeElisionKind, + f: impl FnOnce(&mut PathLoweringContext<'_, '_>) -> T, + ) -> T { + let old_lifetime_elision = + std::mem::replace(&mut self.ctx.lifetime_elision, lifetime_elision); + let result = f(self); + self.ctx.lifetime_elision = old_lifetime_elision; + result + } + pub(crate) fn lower_ty_relative_path( &mut self, ty: Ty, // We need the original resolution to lower `Self::AssocTy` correctly res: Option, + infer_args: bool, ) -> (Ty, Option) { match self.segments.len() - self.current_segment_idx { 0 => (ty, res), 1 => { // resolve unselected assoc types - (self.select_associated_type(res), None) + (self.select_associated_type(res, infer_args), None) } _ => { // FIXME report error (ambiguous associated type) @@ -136,19 +152,6 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { } } - fn prohibit_parenthesized_generic_args(&mut self) -> bool { - if let Some(generic_args) = self.current_or_prev_segment.args_and_bindings { - if generic_args.desugared_from_fn { - let segment = self.current_segment_u32(); - self.on_diagnostic( - PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, - ); - return true; - } - } - false - } - // When calling this, the current segment is the resolved segment (we don't advance it yet). pub(crate) fn lower_partly_resolved_path( &mut self, @@ -164,12 +167,13 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { let trait_ref = self.lower_trait_ref_from_resolved_path( trait_, TyKind::Error.intern(Interner), + infer_args, ); self.skip_resolved_segment(); let segment = self.current_or_prev_segment; let found = - self.ctx.db.trait_data(trait_).associated_type_by_name(segment.name); + self.ctx.db.trait_items(trait_).associated_type_by_name(segment.name); match found { Some(associated_ty) => { @@ -179,17 +183,17 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { // this point (`trait_ref.substitution`). let substitution = self.substs_from_path_segment( associated_ty.into(), - false, + infer_args, None, + true, ); - let len_self = - generics(self.ctx.db.upcast(), associated_ty.into()).len_self(); let substitution = Substitution::from_iter( Interner, - substitution - .iter(Interner) - .take(len_self) - .chain(trait_ref.substitution.iter(Interner)), + trait_ref.substitution.iter(Interner).chain( + substitution + .iter(Interner) + .skip(trait_ref.substitution.len(Interner)), + ), ); TyKind::Alias(AliasTy::Projection(ProjectionTy { associated_ty_id: to_assoc_type_id(associated_ty), @@ -225,12 +229,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { TyKind::Placeholder(to_placeholder_idx(self.ctx.db, param_id.into())) } ParamLoweringMode::Variable => { - let idx = match self - .ctx - .generics() - .expect("generics in scope") - .type_or_const_param_idx(param_id.into()) - { + let idx = match self.ctx.generics().type_or_const_param_idx(param_id.into()) { None => { never!("no matching generics"); return (TyKind::Error.intern(Interner), None); @@ -243,7 +242,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { } .intern(Interner), TypeNs::SelfType(impl_id) => { - let generics = self.ctx.generics().expect("impl should have generic param scope"); + let generics = self.ctx.generics(); match self.ctx.type_param_mode { ParamLoweringMode::Placeholder => { @@ -253,22 +252,13 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { let subst = generics.placeholder_subst(self.ctx.db); self.ctx.db.impl_self_ty(impl_id).substitute(Interner, &subst) } - ParamLoweringMode::Variable => { - let starting_from = match generics.def() { - GenericDefId::ImplId(_) => 0, - // `def` is an item within impl. We need to substitute `BoundVar`s but - // remember that they are for parent (i.e. impl) generic params so they - // come after our own params. - _ => generics.len_self(), - }; - TyBuilder::impl_self_ty(self.ctx.db, impl_id) - .fill_with_bound_vars(self.ctx.in_binders, starting_from) - .build() - } + ParamLoweringMode::Variable => TyBuilder::impl_self_ty(self.ctx.db, impl_id) + .fill_with_bound_vars(self.ctx.in_binders, 0) + .build(), } } TypeNs::AdtSelfType(adt) => { - let generics = generics(self.ctx.db.upcast(), adt.into()); + let generics = generics(self.ctx.db, adt.into()); let substs = match self.ctx.type_param_mode { ParamLoweringMode::Placeholder => generics.placeholder_subst(self.ctx.db), ParamLoweringMode::Variable => { @@ -282,11 +272,13 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { TypeNs::BuiltinType(it) => self.lower_path_inner(it.into(), infer_args), TypeNs::TypeAliasId(it) => self.lower_path_inner(it.into(), infer_args), // FIXME: report error - TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None), + TypeNs::EnumVariantId(_) | TypeNs::ModuleId(_) => { + return (TyKind::Error.intern(Interner), None); + } }; self.skip_resolved_segment(); - self.lower_ty_relative_path(ty, Some(resolution)) + self.lower_ty_relative_path(ty, Some(resolution), infer_args) } fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) { @@ -313,6 +305,9 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { TypeNs::BuiltinType(_) => { prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy) } + TypeNs::ModuleId(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::Module) + } TypeNs::AdtId(_) | TypeNs::EnumVariantId(_) | TypeNs::TypeAliasId(_) @@ -330,10 +325,8 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { } pub(crate) fn resolve_path_in_type_ns(&mut self) -> Option<(TypeNs, Option)> { - let (resolution, remaining_index, _, prefix_info) = self - .ctx - .resolver - .resolve_path_in_type_ns_with_prefix_info(self.ctx.db.upcast(), self.path)?; + let (resolution, remaining_index, _, prefix_info) = + self.ctx.resolver.resolve_path_in_type_ns_with_prefix_info(self.ctx.db, self.path)?; let segments = self.segments; if segments.is_empty() || matches!(self.path, Path::LangItem(..)) { @@ -388,7 +381,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { hygiene_id: HygieneId, ) -> Option { let (res, prefix_info) = self.ctx.resolver.resolve_path_in_value_ns_with_prefix_info( - self.ctx.db.upcast(), + self.ctx.db, self.path, hygiene_id, )?; @@ -459,14 +452,19 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { // and statics can be generic, or just because it was easier for rustc implementors. // That means we'll show the wrong error code. Because of us it's easier to do it // this way :) - ValueNs::GenericParam(_) | ValueNs::ConstId(_) => { + ValueNs::GenericParam(_) => { prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const) } ValueNs::StaticId(_) => { prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static) } - ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::EnumVariantId(_) => {} - ValueNs::LocalBinding(_) => {} + ValueNs::LocalBinding(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::LocalVariable) + } + ValueNs::FunctionId(_) + | ValueNs::StructId(_) + | ValueNs::EnumVariantId(_) + | ValueNs::ConstId(_) => {} } } ResolveValueResult::Partial(resolution, _, _) => { @@ -476,22 +474,21 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { Some(res) } - fn select_associated_type(&mut self, res: Option) -> Ty { - let Some((generics, res)) = self.ctx.generics().zip(res) else { + fn select_associated_type(&mut self, res: Option, infer_args: bool) -> Ty { + let Some(res) = res else { return TyKind::Error.intern(Interner); }; let segment = self.current_or_prev_segment; let ty = named_associated_type_shorthand_candidates( self.ctx.db, - generics.def(), + self.ctx.def, res, Some(segment.name.clone()), move |name, t, associated_ty| { - let generics = self.ctx.generics().unwrap(); - if name != segment.name { return None; } + let generics = self.ctx.generics(); let parent_subst = t.substitution.clone(); let parent_subst = match self.ctx.type_param_mode { @@ -511,15 +508,14 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { // generic params. It's inefficient to splice the `Substitution`s, so we may want // that method to optionally take parent `Substitution` as we already know them at // this point (`t.substitution`). - let substs = self.substs_from_path_segment(associated_ty.into(), false, None); - - let len_self = - crate::generics::generics(self.ctx.db.upcast(), associated_ty.into()) - .len_self(); + let substs = + self.substs_from_path_segment(associated_ty.into(), infer_args, None, true); let substs = Substitution::from_iter( Interner, - substs.iter(Interner).take(len_self).chain(parent_subst.iter(Interner)), + parent_subst + .iter(Interner) + .chain(substs.iter(Interner).skip(parent_subst.len(Interner))), ); Some( @@ -541,7 +537,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { TyDefId::AdtId(it) => it.into(), TyDefId::TypeAliasId(it) => it.into(), }; - let substs = self.substs_from_path_segment(generic_def, infer_args, None); + let substs = self.substs_from_path_segment(generic_def, infer_args, None, false); self.ctx.db.ty(typeable).substitute(Interner, &substs) } @@ -554,6 +550,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { // special-case enum variants resolved: ValueTyDefId, infer_args: bool, + lowering_assoc_type_generics: bool, ) -> Substitution { let prev_current_segment_idx = self.current_segment_idx; let prev_current_segment = self.current_or_prev_segment; @@ -587,10 +584,15 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { self.current_or_prev_segment = penultimate; } } - var.lookup(self.ctx.db.upcast()).parent.into() + var.lookup(self.ctx.db).parent.into() } }; - let result = self.substs_from_path_segment(generic_def, infer_args, None); + let result = self.substs_from_path_segment( + generic_def, + infer_args, + None, + lowering_assoc_type_generics, + ); self.current_segment_idx = prev_current_segment_idx; self.current_or_prev_segment = prev_current_segment; result @@ -601,16 +603,41 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { def: GenericDefId, infer_args: bool, explicit_self_ty: Option, + lowering_assoc_type_generics: bool, ) -> Substitution { - let prohibit_parens = match def { - GenericDefId::TraitId(trait_) => { - let trait_data = self.ctx.db.trait_data(trait_); - !trait_data.flags.contains(TraitFlags::RUSTC_PAREN_SUGAR) + let mut lifetime_elision = self.ctx.lifetime_elision.clone(); + + if let Some(args) = self.current_or_prev_segment.args_and_bindings { + if args.parenthesized != GenericArgsParentheses::No { + let prohibit_parens = match def { + GenericDefId::TraitId(trait_) => { + // RTN is prohibited anyways if we got here. + let is_rtn = + args.parenthesized == GenericArgsParentheses::ReturnTypeNotation; + let is_fn_trait = self + .ctx + .db + .trait_signature(trait_) + .flags + .contains(TraitFlags::RUSTC_PAREN_SUGAR); + is_rtn || !is_fn_trait + } + _ => true, + }; + + if prohibit_parens { + let segment = self.current_segment_u32(); + self.on_diagnostic( + PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, + ); + + return TyBuilder::unknown_subst(self.ctx.db, def); + } + + // `Fn()`-style generics are treated like functions for the purpose of lifetime elision. + lifetime_elision = + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }; } - _ => true, - }; - if prohibit_parens && self.prohibit_parenthesized_generic_args() { - return TyBuilder::unknown_subst(self.ctx.db, def); } self.substs_from_args_and_bindings( @@ -618,6 +645,9 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { def, infer_args, explicit_self_ty, + PathGenericsSource::Segment(self.current_segment_u32()), + lowering_assoc_type_generics, + lifetime_elision, ) } @@ -627,152 +657,185 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { def: GenericDefId, infer_args: bool, explicit_self_ty: Option, + generics_source: PathGenericsSource, + lowering_assoc_type_generics: bool, + lifetime_elision: LifetimeElisionKind, ) -> Substitution { - // Order is - // - Optional Self parameter - // - Lifetime parameters - // - Type or Const parameters - // - Parent parameters - let def_generics = generics(self.ctx.db.upcast(), def); - let ( - parent_params, - self_param, - type_params, - const_params, - impl_trait_params, - lifetime_params, - ) = def_generics.provenance_split(); - let item_len = - self_param as usize + type_params + const_params + impl_trait_params + lifetime_params; - let total_len = parent_params + item_len; - - let mut substs = Vec::new(); - - // we need to iterate the lifetime and type/const params separately as our order of them - // differs from the supplied syntax - - let ty_error = || TyKind::Error.intern(Interner).cast(Interner); - let mut def_toc_iter = def_generics.iter_self_type_or_consts_id(); - let fill_self_param = || { - if self_param { - let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error); - - if let Some(id) = def_toc_iter.next() { - assert!(matches!(id, GenericParamId::TypeParamId(_))); - substs.push(self_ty); + struct LowererCtx<'a, 'b, 'c> { + ctx: &'a mut PathLoweringContext<'b, 'c>, + generics_source: PathGenericsSource, + } + + impl GenericArgsLowerer for LowererCtx<'_, '_, '_> { + fn report_len_mismatch( + &mut self, + def: GenericDefId, + provided_count: u32, + expected_count: u32, + kind: IncorrectGenericsLenKind, + ) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::IncorrectGenericsLen { + generics_source: self.generics_source, + provided_count, + expected_count, + kind, + def, + }); + } + + fn report_arg_mismatch( + &mut self, + param_id: GenericParamId, + arg_idx: u32, + has_self_arg: bool, + ) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::IncorrectGenericsOrder { + generics_source: self.generics_source, + param_id, + arg_idx, + has_self_arg, + }); + } + + fn provided_kind( + &mut self, + param_id: GenericParamId, + param: GenericParamDataRef<'_>, + arg: &GenericArg, + ) -> crate::GenericArg { + match (param, arg) { + (GenericParamDataRef::LifetimeParamData(_), GenericArg::Lifetime(lifetime)) => { + self.ctx.ctx.lower_lifetime(*lifetime).cast(Interner) + } + (GenericParamDataRef::TypeParamData(_), GenericArg::Type(type_ref)) => { + self.ctx.ctx.lower_ty(*type_ref).cast(Interner) + } + (GenericParamDataRef::ConstParamData(_), GenericArg::Const(konst)) => { + let GenericParamId::ConstParamId(const_id) = param_id else { + unreachable!("non-const param ID for const param"); + }; + self.ctx + .ctx + .lower_const(konst, self.ctx.ctx.db.const_param_ty(const_id)) + .cast(Interner) + } + _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"), } } - }; - let mut had_explicit_args = false; - - if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings { - // Fill in the self param first - if has_self_type && self_param { - had_explicit_args = true; - if let Some(id) = def_toc_iter.next() { - assert!(matches!(id, GenericParamId::TypeParamId(_))); - had_explicit_args = true; - if let GenericArg::Type(ty) = &args[0] { - substs.push(self.ctx.lower_ty(*ty).cast(Interner)); + + fn provided_type_like_const( + &mut self, + const_ty: Ty, + arg: TypeLikeConst<'_>, + ) -> crate::Const { + match arg { + TypeLikeConst::Path(path) => self.ctx.ctx.lower_path_as_const(path, const_ty), + TypeLikeConst::Infer => unknown_const(const_ty), + } + } + + fn inferred_kind( + &mut self, + def: GenericDefId, + param_id: GenericParamId, + param: GenericParamDataRef<'_>, + infer_args: bool, + preceding_args: &[crate::GenericArg], + ) -> crate::GenericArg { + let default = || { + self.ctx + .ctx + .db + .generic_defaults(def) + .get(preceding_args.len()) + .map(|default| default.clone().substitute(Interner, preceding_args)) + }; + match param { + GenericParamDataRef::LifetimeParamData(_) => error_lifetime().cast(Interner), + GenericParamDataRef::TypeParamData(param) => { + if !infer_args && param.default.is_some() { + if let Some(default) = default() { + return default; + } + } + TyKind::Error.intern(Interner).cast(Interner) + } + GenericParamDataRef::ConstParamData(param) => { + if !infer_args && param.default.is_some() { + if let Some(default) = default() { + return default; + } + } + let GenericParamId::ConstParamId(const_id) = param_id else { + unreachable!("non-const param ID for const param"); + }; + unknown_const_as_generic(self.ctx.ctx.db.const_param_ty(const_id)) + .cast(Interner) } } - } else { - fill_self_param() - }; - - // Then fill in the supplied lifetime args, or error lifetimes if there are too few - // (default lifetimes aren't a thing) - for arg in args - .iter() - .filter_map(|arg| match arg { - GenericArg::Lifetime(arg) => Some(self.ctx.lower_lifetime(arg)), - _ => None, - }) - .chain(iter::repeat(error_lifetime())) - .take(lifetime_params) - { - substs.push(arg.cast(Interner)); - } - - let skip = if has_self_type { 1 } else { 0 }; - // Fill in supplied type and const args - // Note if non-lifetime args are provided, it should be all of them, but we can't rely on that - for (arg, id) in args - .iter() - .filter(|arg| !matches!(arg, GenericArg::Lifetime(_))) - .skip(skip) - .take(type_params + const_params) - .zip(def_toc_iter) - { - had_explicit_args = true; - let arg = generic_arg_to_chalk( - self.ctx.db, - id, - arg, - self.ctx, - self.ctx.types_map, - |ctx, type_ref| ctx.lower_ty(type_ref), - |ctx, const_ref, ty| ctx.lower_const(const_ref, ty), - |ctx, lifetime_ref| ctx.lower_lifetime(lifetime_ref), - ); - substs.push(arg); } - } else { - fill_self_param(); - } - let param_to_err = |id| match id { - GenericParamId::ConstParamId(x) => { - unknown_const_as_generic(self.ctx.db.const_param_ty(x)) + fn parent_arg(&mut self, param_id: GenericParamId) -> crate::GenericArg { + match param_id { + GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner), + GenericParamId::ConstParamId(const_id) => { + unknown_const_as_generic(self.ctx.ctx.db.const_param_ty(const_id)) + } + GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), + } } - GenericParamId::TypeParamId(_) => ty_error(), - GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), - }; - // handle defaults. In expression or pattern path segments without - // explicitly specified type arguments, missing type arguments are inferred - // (i.e. defaults aren't used). - // Generic parameters for associated types are not supposed to have defaults, so we just - // ignore them. - let is_assoc_ty = || match def { - GenericDefId::TypeAliasId(id) => { - matches!(id.lookup(self.ctx.db.upcast()).container, ItemContainerId::TraitId(_)) - } - _ => false, - }; - let fill_defaults = (!infer_args || had_explicit_args) && !is_assoc_ty(); - if fill_defaults { - let defaults = &*self.ctx.db.generic_defaults(def); - let (item, _parent) = defaults.split_at(item_len); - let parent_from = item_len - substs.len(); - - let mut rem = - def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::>(); - // Fill in defaults for type/const params - for (idx, default_ty) in item[substs.len()..].iter().enumerate() { - // each default can depend on the previous parameters - let substs_so_far = Substitution::from_iter( - Interner, - substs.iter().cloned().chain(rem[idx..].iter().cloned()), - ); - substs.push(default_ty.clone().substitute(Interner, &substs_so_far)); + + fn report_elided_lifetimes_in_path( + &mut self, + def: GenericDefId, + expected_count: u32, + hard_error: bool, + ) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::ElidedLifetimesInPath { + generics_source: self.generics_source, + def, + expected_count, + hard_error, + }); + } + + fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::ElisionFailure { + generics_source: self.generics_source, + def, + expected_count, + }); + } + + fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::MissingLifetime { + generics_source: self.generics_source, + def, + expected_count, + }); } - // Fill in remaining parent params - substs.extend(rem.drain(parent_from..)); - } else { - // Fill in remaining def params and parent params - substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err)); } - assert_eq!(substs.len(), total_len, "expected {} substs, got {}", total_len, substs.len()); - Substitution::from_iter(Interner, substs) + substs_from_args_and_bindings( + self.ctx.db, + self.ctx.store, + args_and_bindings, + def, + infer_args, + lifetime_elision, + lowering_assoc_type_generics, + explicit_self_ty, + &mut LowererCtx { ctx: self, generics_source }, + ) } pub(crate) fn lower_trait_ref_from_resolved_path( &mut self, resolved: TraitId, explicit_self_ty: Ty, + infer_args: bool, ) -> TraitRef { - let substs = self.trait_ref_substs_from_path(resolved, explicit_self_ty); + let substs = self.trait_ref_substs_from_path(resolved, explicit_self_ty, infer_args); TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs } } @@ -780,17 +843,17 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { &mut self, resolved: TraitId, explicit_self_ty: Ty, + infer_args: bool, ) -> Substitution { - self.substs_from_path_segment(resolved.into(), false, Some(explicit_self_ty)) + self.substs_from_path_segment(resolved.into(), infer_args, Some(explicit_self_ty), false) } pub(super) fn assoc_type_bindings_from_type_bound<'c>( mut self, - bound: &'c TypeBound, trait_ref: TraitRef, ) -> Option + use<'a, 'b, 'c>> { self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| { - args_and_bindings.bindings.iter().flat_map(move |binding| { + args_and_bindings.bindings.iter().enumerate().flat_map(move |(binding_idx, binding)| { let found = associated_type_by_name_including_super_traits( self.ctx.db, trait_ref.clone(), @@ -800,23 +863,32 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { None => return SmallVec::new(), Some(t) => t, }; - // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent - // generic params. It's inefficient to splice the `Substitution`s, so we may want - // that method to optionally take parent `Substitution` as we already know them at - // this point (`super_trait_ref.substitution`). - let substitution = self.substs_from_args_and_bindings( - binding.args.as_ref(), - associated_ty.into(), - false, // this is not relevant - Some(super_trait_ref.self_type_parameter(Interner)), - ); - let self_params = generics(self.ctx.db.upcast(), associated_ty.into()).len_self(); + let substitution = + self.with_lifetime_elision(LifetimeElisionKind::AnonymousReportError, |this| { + // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent + // generic params. It's inefficient to splice the `Substitution`s, so we may want + // that method to optionally take parent `Substitution` as we already know them at + // this point (`super_trait_ref.substitution`). + this.substs_from_args_and_bindings( + binding.args.as_ref(), + associated_ty.into(), + false, // this is not relevant + Some(super_trait_ref.self_type_parameter(Interner)), + PathGenericsSource::AssocType { + segment: this.current_segment_u32(), + assoc_type: binding_idx as u32, + }, + false, + this.ctx.lifetime_elision.clone(), + ) + }); let substitution = Substitution::from_iter( Interner, - substitution - .iter(Interner) - .take(self_params) - .chain(super_trait_ref.substitution.iter(Interner)), + super_trait_ref.substitution.iter(Interner).chain( + substitution + .iter(Interner) + .skip(super_trait_ref.substitution.len(Interner)), + ), ); let projection_ty = ProjectionTy { associated_ty_id: to_assoc_type_id(associated_ty), @@ -825,93 +897,397 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity( binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), ); + if let Some(type_ref) = binding.type_ref { - match (&self.ctx.types_map[type_ref], self.ctx.impl_trait_mode.mode) { - (TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (), - (_, ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque) => { - let ty = self.ctx.lower_ty(type_ref); - let alias_eq = - AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty }; - predicates - .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq))); - } - (_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => { - // Find the generic index for the target of our `bound` - let target_param_idx = - self.ctx.resolver.where_predicates_in_scope().find_map( - |(p, (_, types_map))| match p { - WherePredicate::TypeBound { - target: WherePredicateTypeTarget::TypeOrConstParam(idx), - bound: b, - } if std::ptr::eq::( - self.ctx.types_map, - types_map, - ) && bound == b => - { - Some(idx) - } - _ => None, - }, - ); - let ty = if let Some(target_param_idx) = target_param_idx { - let mut counter = 0; - let generics = self.ctx.generics().expect("generics in scope"); - for (idx, data) in generics.iter_self_type_or_consts() { - // Count the number of `impl Trait` things that appear before - // the target of our `bound`. - // Our counter within `impl_trait_mode` should be that number - // to properly lower each types within `type_ref` - if data.type_param().is_some_and(|p| { - p.provenance == TypeParamProvenance::ArgumentImplTrait - }) { - counter += 1; - } - if idx == *target_param_idx { - break; - } - } - let mut ext = TyLoweringContext::new_maybe_unowned( - self.ctx.db, - self.ctx.resolver, - self.ctx.types_map, - self.ctx.types_source_map, - self.ctx.owner, - ) - .with_type_param_mode(self.ctx.type_param_mode); - match self.ctx.impl_trait_mode.mode { - ImplTraitLoweringMode::Param => { - ext.impl_trait_mode = - ImplTraitLoweringState::param(counter); - } - ImplTraitLoweringMode::Variable => { - ext.impl_trait_mode = - ImplTraitLoweringState::variable(counter); - } - _ => unreachable!(), - } - let ty = ext.lower_ty(type_ref); - self.ctx.diagnostics.extend(ext.diagnostics); - ty - } else { - self.ctx.lower_ty(type_ref) - }; - - let alias_eq = - AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty }; - predicates - .push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq))); + let lifetime_elision = + if args_and_bindings.parenthesized == GenericArgsParentheses::ParenSugar { + // `Fn()`-style generics are elided like functions. This is `Output` (we lower to it in hir-def). + LifetimeElisionKind::for_fn_ret() + } else { + self.ctx.lifetime_elision.clone() + }; + self.with_lifetime_elision(lifetime_elision, |this| { + match (&this.ctx.store[type_ref], this.ctx.impl_trait_mode.mode) { + (TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (), + ( + _, + ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque, + ) => { + let ty = this.ctx.lower_ty(type_ref); + let alias_eq = AliasEq { + alias: AliasTy::Projection(projection_ty.clone()), + ty, + }; + predicates.push(crate::wrap_empty_binders(WhereClause::AliasEq( + alias_eq, + ))); + } } - } - } - for bound in binding.bounds.iter() { - predicates.extend(self.ctx.lower_type_bound( - bound, - TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner), - false, - )); + }); } + + self.with_lifetime_elision(LifetimeElisionKind::AnonymousReportError, |this| { + for bound in binding.bounds.iter() { + predicates.extend( + this.ctx.lower_type_bound( + bound, + TyKind::Alias(AliasTy::Projection(projection_ty.clone())) + .intern(Interner), + false, + ), + ); + } + }); + predicates }) }) } } + +/// A const that were parsed like a type. +pub(crate) enum TypeLikeConst<'a> { + Infer, + Path(&'a Path), +} + +pub(crate) trait GenericArgsLowerer { + fn report_elided_lifetimes_in_path( + &mut self, + def: GenericDefId, + expected_count: u32, + hard_error: bool, + ); + + fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32); + + fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32); + + fn report_len_mismatch( + &mut self, + def: GenericDefId, + provided_count: u32, + expected_count: u32, + kind: IncorrectGenericsLenKind, + ); + + fn report_arg_mismatch(&mut self, param_id: GenericParamId, arg_idx: u32, has_self_arg: bool); + + fn provided_kind( + &mut self, + param_id: GenericParamId, + param: GenericParamDataRef<'_>, + arg: &GenericArg, + ) -> crate::GenericArg; + + fn provided_type_like_const(&mut self, const_ty: Ty, arg: TypeLikeConst<'_>) -> crate::Const; + + fn inferred_kind( + &mut self, + def: GenericDefId, + param_id: GenericParamId, + param: GenericParamDataRef<'_>, + infer_args: bool, + preceding_args: &[crate::GenericArg], + ) -> crate::GenericArg; + + fn parent_arg(&mut self, param_id: GenericParamId) -> crate::GenericArg; +} + +/// Returns true if there was an error. +fn check_generic_args_len( + args_and_bindings: Option<&GenericArgs>, + def: GenericDefId, + def_generics: &Generics, + infer_args: bool, + lifetime_elision: &LifetimeElisionKind, + lowering_assoc_type_generics: bool, + ctx: &mut impl GenericArgsLowerer, +) -> bool { + let mut had_error = false; + + let (mut provided_lifetimes_count, mut provided_types_and_consts_count) = (0usize, 0usize); + if let Some(args_and_bindings) = args_and_bindings { + let args_no_self = &args_and_bindings.args[usize::from(args_and_bindings.has_self_type)..]; + for arg in args_no_self { + match arg { + GenericArg::Lifetime(_) => provided_lifetimes_count += 1, + GenericArg::Type(_) | GenericArg::Const(_) => provided_types_and_consts_count += 1, + } + } + } + + let lifetime_args_len = def_generics.len_lifetimes_self(); + if provided_lifetimes_count == 0 && lifetime_args_len > 0 && !lowering_assoc_type_generics { + // In generic associated types, we never allow inferring the lifetimes. + match lifetime_elision { + &LifetimeElisionKind::AnonymousCreateParameter { report_in_path } => { + ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, report_in_path); + had_error |= report_in_path; + } + LifetimeElisionKind::AnonymousReportError => { + ctx.report_missing_lifetime(def, lifetime_args_len as u32); + had_error = true + } + LifetimeElisionKind::ElisionFailure => { + ctx.report_elision_failure(def, lifetime_args_len as u32); + had_error = true; + } + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => { + // FIXME: Check there are other lifetimes in scope, and error/lint. + } + LifetimeElisionKind::Elided(_) => { + ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, false); + } + LifetimeElisionKind::Infer => { + // Allow eliding lifetimes. + } + } + } else if lifetime_args_len != provided_lifetimes_count { + ctx.report_len_mismatch( + def, + provided_lifetimes_count as u32, + lifetime_args_len as u32, + IncorrectGenericsLenKind::Lifetimes, + ); + had_error = true; + } + + let defaults_count = + def_generics.iter_self_type_or_consts().filter(|(_, param)| param.has_default()).count(); + let named_type_and_const_params_count = def_generics + .iter_self_type_or_consts() + .filter(|(_, param)| match param { + TypeOrConstParamData::TypeParamData(param) => { + param.provenance == TypeParamProvenance::TypeParamList + } + TypeOrConstParamData::ConstParamData(_) => true, + }) + .count(); + let expected_max = named_type_and_const_params_count; + let expected_min = + if infer_args { 0 } else { named_type_and_const_params_count - defaults_count }; + if provided_types_and_consts_count < expected_min + || expected_max < provided_types_and_consts_count + { + ctx.report_len_mismatch( + def, + provided_types_and_consts_count as u32, + named_type_and_const_params_count as u32, + IncorrectGenericsLenKind::TypesAndConsts, + ); + had_error = true; + } + + had_error +} + +pub(crate) fn substs_from_args_and_bindings( + db: &dyn HirDatabase, + store: &ExpressionStore, + args_and_bindings: Option<&GenericArgs>, + def: GenericDefId, + mut infer_args: bool, + lifetime_elision: LifetimeElisionKind, + lowering_assoc_type_generics: bool, + explicit_self_ty: Option, + ctx: &mut impl GenericArgsLowerer, +) -> Substitution { + // Order is + // - Parent parameters + // - Optional Self parameter + // - Lifetime parameters + // - Type or Const parameters + let def_generics = generics(db, def); + let args_slice = args_and_bindings.map(|it| &*it.args).unwrap_or_default(); + + // We do not allow inference if there are specified args, i.e. we do not allow partial inference. + let has_non_lifetime_args = + args_slice.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_))); + infer_args &= !has_non_lifetime_args; + + let had_count_error = check_generic_args_len( + args_and_bindings, + def, + &def_generics, + infer_args, + &lifetime_elision, + lowering_assoc_type_generics, + ctx, + ); + + let mut substs = Vec::with_capacity(def_generics.len()); + + substs.extend(def_generics.iter_parent_id().map(|id| ctx.parent_arg(id))); + + let mut args = args_slice.iter().enumerate().peekable(); + let mut params = def_generics.iter_self().peekable(); + + // If we encounter a type or const when we expect a lifetime, we infer the lifetimes. + // If we later encounter a lifetime, we know that the arguments were provided in the + // wrong order. `force_infer_lt` records the type or const that forced lifetimes to be + // inferred, so we can use it for diagnostics later. + let mut force_infer_lt = None; + + let has_self_arg = args_and_bindings.is_some_and(|it| it.has_self_type); + // First, handle `Self` parameter. Consume it from the args if provided, otherwise from `explicit_self_ty`, + // and lastly infer it. + if let Some(&( + self_param_id, + self_param @ GenericParamDataRef::TypeParamData(TypeParamData { + provenance: TypeParamProvenance::TraitSelf, + .. + }), + )) = params.peek() + { + let self_ty = if has_self_arg { + let (_, self_ty) = args.next().expect("has_self_type=true, should have Self type"); + ctx.provided_kind(self_param_id, self_param, self_ty) + } else { + explicit_self_ty.map(|it| it.cast(Interner)).unwrap_or_else(|| { + ctx.inferred_kind(def, self_param_id, self_param, infer_args, &substs) + }) + }; + params.next(); + substs.push(self_ty); + } + + loop { + // We're going to iterate through the generic arguments that the user + // provided, matching them with the generic parameters we expect. + // Mismatches can occur as a result of elided lifetimes, or for malformed + // input. We try to handle both sensibly. + match (args.peek(), params.peek()) { + (Some(&(arg_idx, arg)), Some(&(param_id, param))) => match (arg, param) { + (GenericArg::Type(_), GenericParamDataRef::TypeParamData(type_param)) + if type_param.provenance == TypeParamProvenance::ArgumentImplTrait => + { + // Do not allow specifying `impl Trait` explicitly. We already err at that, but if we won't handle it here + // we will handle it as if it was specified, instead of inferring it. + substs.push(ctx.inferred_kind(def, param_id, param, infer_args, &substs)); + params.next(); + } + (GenericArg::Lifetime(_), GenericParamDataRef::LifetimeParamData(_)) + | (GenericArg::Type(_), GenericParamDataRef::TypeParamData(_)) + | (GenericArg::Const(_), GenericParamDataRef::ConstParamData(_)) => { + substs.push(ctx.provided_kind(param_id, param, arg)); + args.next(); + params.next(); + } + ( + GenericArg::Type(_) | GenericArg::Const(_), + GenericParamDataRef::LifetimeParamData(_), + ) => { + // We expected a lifetime argument, but got a type or const + // argument. That means we're inferring the lifetime. + substs.push(ctx.inferred_kind(def, param_id, param, infer_args, &substs)); + params.next(); + force_infer_lt = Some((arg_idx as u32, param_id)); + } + (GenericArg::Type(type_ref), GenericParamDataRef::ConstParamData(_)) => { + if let Some(konst) = type_looks_like_const(store, *type_ref) { + let GenericParamId::ConstParamId(param_id) = param_id else { + panic!("unmatching param kinds"); + }; + let const_ty = db.const_param_ty(param_id); + substs.push(ctx.provided_type_like_const(const_ty, konst).cast(Interner)); + args.next(); + params.next(); + } else { + // See the `_ => { ... }` branch. + if !had_count_error { + ctx.report_arg_mismatch(param_id, arg_idx as u32, has_self_arg); + } + while args.next().is_some() {} + } + } + _ => { + // We expected one kind of parameter, but the user provided + // another. This is an error. However, if we already know that + // the arguments don't match up with the parameters, we won't issue + // an additional error, as the user already knows what's wrong. + if !had_count_error { + ctx.report_arg_mismatch(param_id, arg_idx as u32, has_self_arg); + } + + // We've reported the error, but we want to make sure that this + // problem doesn't bubble down and create additional, irrelevant + // errors. In this case, we're simply going to ignore the argument + // and any following arguments. The rest of the parameters will be + // inferred. + while args.next().is_some() {} + } + }, + + (Some(&(_, arg)), None) => { + // We should never be able to reach this point with well-formed input. + // There are two situations in which we can encounter this issue. + // + // 1. The number of arguments is incorrect. In this case, an error + // will already have been emitted, and we can ignore it. + // 2. We've inferred some lifetimes, which have been provided later (i.e. + // after a type or const). We want to throw an error in this case. + if !had_count_error { + assert!( + matches!(arg, GenericArg::Lifetime(_)), + "the only possible situation here is incorrect lifetime order" + ); + let (provided_arg_idx, param_id) = + force_infer_lt.expect("lifetimes ought to have been inferred"); + ctx.report_arg_mismatch(param_id, provided_arg_idx, has_self_arg); + } + + break; + } + + (None, Some(&(param_id, param))) => { + // If there are fewer arguments than parameters, it means we're inferring the remaining arguments. + let param = if let GenericParamId::LifetimeParamId(_) = param_id { + match &lifetime_elision { + LifetimeElisionKind::ElisionFailure + | LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true } + | LifetimeElisionKind::AnonymousReportError => { + assert!(had_count_error); + ctx.inferred_kind(def, param_id, param, infer_args, &substs) + } + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => { + static_lifetime().cast(Interner) + } + LifetimeElisionKind::Elided(lifetime) => lifetime.clone().cast(Interner), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false } + | LifetimeElisionKind::Infer => { + // FIXME: With `AnonymousCreateParameter`, we need to create a new lifetime parameter here + // (but this will probably be done in hir-def lowering instead). + ctx.inferred_kind(def, param_id, param, infer_args, &substs) + } + } + } else { + ctx.inferred_kind(def, param_id, param, infer_args, &substs) + }; + substs.push(param); + params.next(); + } + + (None, None) => break, + } + } + + Substitution::from_iter(Interner, substs) +} + +fn type_looks_like_const( + store: &ExpressionStore, + type_ref: TypeRefId, +) -> Option> { + // A path/`_` const will be parsed as a type, instead of a const, because when parsing/lowering + // in hir-def we don't yet know the expected argument kind. rustc does this a bit differently, + // when lowering to HIR it resolves the path, and if it doesn't resolve to the type namespace + // it is lowered as a const. Our behavior could deviate from rustc when the value is resolvable + // in both the type and value namespaces, but I believe we only allow more code. + let type_ref = &store[type_ref]; + match type_ref { + TypeRef::Path(path) => Some(TypeLikeConst::Path(path)), + TypeRef::Placeholder => Some(TypeLikeConst::Infer), + _ => None, + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs index 2f38e8fa14c0b..2abc1ac62a99a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs @@ -5,12 +5,15 @@ use chalk_solve::rust_ir; -use base_db::ra_salsa::{self, InternKey}; use hir_def::{LifetimeParamId, TraitId, TypeAliasId, TypeOrConstParamId}; +use salsa::{ + Id, + plumbing::{AsId, FromId}, +}; use crate::{ - chalk_db, db::HirDatabase, AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId, - Interner, OpaqueTyId, PlaceholderIndex, + AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId, Interner, OpaqueTyId, + PlaceholderIndex, chalk_db, db::HirDatabase, }; pub(crate) trait ToChalk { @@ -30,11 +33,11 @@ impl ToChalk for hir_def::ImplId { type Chalk = chalk_db::ImplId; fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::ImplId { - chalk_ir::ImplId(self.as_intern_id()) + chalk_ir::ImplId(self.as_id()) } fn from_chalk(_db: &dyn HirDatabase, impl_id: chalk_db::ImplId) -> hir_def::ImplId { - InternKey::from_intern_id(impl_id.0) + FromId::from_id(impl_id.0.as_id()) } } @@ -56,84 +59,85 @@ impl ToChalk for TypeAliasAsValue { type Chalk = chalk_db::AssociatedTyValueId; fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::AssociatedTyValueId { - rust_ir::AssociatedTyValueId(self.0.as_intern_id()) + rust_ir::AssociatedTyValueId(self.0.as_id()) } fn from_chalk( _db: &dyn HirDatabase, assoc_ty_value_id: chalk_db::AssociatedTyValueId, ) -> TypeAliasAsValue { - TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0)) + TypeAliasAsValue(TypeAliasId::from_id(assoc_ty_value_id.0)) } } impl From for crate::db::InternedCallableDefId { fn from(fn_def_id: FnDefId) -> Self { - InternKey::from_intern_id(fn_def_id.0) + Self::from_id(fn_def_id.0) } } impl From for FnDefId { fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self { - chalk_ir::FnDefId(callable_def_id.as_intern_id()) + chalk_ir::FnDefId(callable_def_id.as_id()) } } impl From for crate::db::InternedOpaqueTyId { fn from(id: OpaqueTyId) -> Self { - InternKey::from_intern_id(id.0) + FromId::from_id(id.0) } } impl From for OpaqueTyId { fn from(id: crate::db::InternedOpaqueTyId) -> Self { - chalk_ir::OpaqueTyId(id.as_intern_id()) + chalk_ir::OpaqueTyId(id.as_id()) } } impl From> for crate::db::InternedClosureId { fn from(id: chalk_ir::ClosureId) -> Self { - Self::from_intern_id(id.0) + FromId::from_id(id.0) } } impl From for chalk_ir::ClosureId { fn from(id: crate::db::InternedClosureId) -> Self { - chalk_ir::ClosureId(id.as_intern_id()) + chalk_ir::ClosureId(id.as_id()) } } impl From> for crate::db::InternedCoroutineId { fn from(id: chalk_ir::CoroutineId) -> Self { - Self::from_intern_id(id.0) + Self::from_id(id.0) } } impl From for chalk_ir::CoroutineId { fn from(id: crate::db::InternedCoroutineId) -> Self { - chalk_ir::CoroutineId(id.as_intern_id()) + chalk_ir::CoroutineId(id.as_id()) } } pub fn to_foreign_def_id(id: TypeAliasId) -> ForeignDefId { - chalk_ir::ForeignDefId(ra_salsa::InternKey::as_intern_id(&id)) + chalk_ir::ForeignDefId(id.as_id()) } pub fn from_foreign_def_id(id: ForeignDefId) -> TypeAliasId { - ra_salsa::InternKey::from_intern_id(id.0) + FromId::from_id(id.0) } pub fn to_assoc_type_id(id: TypeAliasId) -> AssocTypeId { - chalk_ir::AssocTypeId(ra_salsa::InternKey::as_intern_id(&id)) + chalk_ir::AssocTypeId(id.as_id()) } pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId { - ra_salsa::InternKey::from_intern_id(id.0) + FromId::from_id(id.0) } pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId { assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT); - let interned_id = ra_salsa::InternKey::from_intern_id(ra_salsa::InternId::from(idx.idx)); + // SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound. + let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) }); db.lookup_intern_type_or_const_param_id(interned_id) } @@ -141,13 +145,14 @@ pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Place let interned_id = db.intern_type_or_const_param_id(id); PlaceholderIndex { ui: chalk_ir::UniverseIndex::ROOT, - idx: ra_salsa::InternKey::as_intern_id(&interned_id).as_usize(), + idx: interned_id.as_id().as_u32() as usize, } } pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId { assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT); - let interned_id = ra_salsa::InternKey::from_intern_id(ra_salsa::InternId::from(idx.idx)); + // SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound. + let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) }); db.lookup_intern_lifetime_param_id(interned_id) } @@ -155,14 +160,14 @@ pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> Place let interned_id = db.intern_lifetime_param_id(id); PlaceholderIndex { ui: chalk_ir::UniverseIndex::ROOT, - idx: ra_salsa::InternKey::as_intern_id(&interned_id).as_usize(), + idx: interned_id.as_id().as_u32() as usize, } } pub fn to_chalk_trait_id(id: TraitId) -> ChalkTraitId { - chalk_ir::TraitId(ra_salsa::InternKey::as_intern_id(&id)) + chalk_ir::TraitId(id.as_id()) } pub fn from_chalk_trait_id(id: ChalkTraitId) -> TraitId { - ra_salsa::InternKey::from_intern_id(id.0) + FromId::from_id(id.0) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index c722800527190..8e549ca0cbd52 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -5,34 +5,34 @@ use std::ops::ControlFlow; use arrayvec::ArrayVec; -use base_db::CrateId; -use chalk_ir::{cast::Cast, UniverseIndex, WithKind}; +use base_db::Crate; +use chalk_ir::{UniverseIndex, WithKind, cast::Cast}; use hir_def::{ - data::{adt::StructFlags, ImplData, TraitFlags}, - nameres::DefMap, AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup, ModuleId, TraitId, + nameres::{DefMap, assoc::ImplItems}, + signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags}, }; use hir_expand::name::Name; use intern::sym; use rustc_hash::{FxHashMap, FxHashSet}; -use smallvec::{smallvec, SmallVec}; +use smallvec::{SmallVec, smallvec}; use stdx::never; use triomphe::Arc; use crate::{ + AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, GenericArgData, + Goal, Guidance, InEnvironment, Interner, Mutability, Scalar, Solution, Substitution, + TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, TyVariableKind, + VariableKind, WhereClause, autoderef::{self, AutoderefKind}, db::HirDatabase, error_lifetime, from_chalk_trait_id, from_foreign_def_id, - infer::{unify::InferenceTable, Adjust, Adjustment, OverloadedDeref, PointerCast}, + infer::{Adjust, Adjustment, OverloadedDeref, PointerCast, unify::InferenceTable}, lang_items::is_box, primitive::{FloatTy, IntTy, UintTy}, to_chalk_trait_id, utils::all_super_traits, - AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, GenericArgData, - Goal, Guidance, InEnvironment, Interner, Mutability, Scalar, Solution, Substitution, - TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, TyVariableKind, - VariableKind, WhereClause, }; /// This is used as a key for indexing impls. @@ -148,7 +148,7 @@ pub struct TraitImpls { } impl TraitImpls { - pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { + pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: Crate) -> Arc { let _p = tracing::info_span!("trait_impls_in_crate_query", ?krate).entered(); let mut impls = FxHashMap::default(); @@ -166,22 +166,16 @@ impl TraitImpls { Self::collect_def_map(db, &mut impls, &db.block_def_map(block)); - if impls.is_empty() { - None - } else { - Some(Arc::new(Self::finish(impls))) - } + if impls.is_empty() { None } else { Some(Arc::new(Self::finish(impls))) } } pub(crate) fn trait_impls_in_deps_query( db: &dyn HirDatabase, - krate: CrateId, + krate: Crate, ) -> Arc<[Arc]> { let _p = tracing::info_span!("trait_impls_in_deps_query", ?krate).entered(); - let crate_graph = db.crate_graph(); - Arc::from_iter( - crate_graph.transitive_deps(krate).map(|krate| db.trait_impls_in_crate(krate)), + db.transitive_deps(krate).into_iter().map(|krate| db.trait_impls_in_crate(krate)), ) } @@ -203,7 +197,7 @@ impl TraitImpls { // FIXME: Reservation impls should be considered during coherence checks. If we are // (ever) to implement coherence checks, this filtering should be done by the trait // solver. - if db.attrs(impl_id.into()).by_key(&sym::rustc_reservation_impl).exists() { + if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() { continue; } let target_trait = match db.impl_trait(impl_id) { @@ -219,7 +213,7 @@ impl TraitImpls { // const _: () = { ... }; for konst in module_data.scope.unnamed_consts() { let body = db.body(konst.into()); - for (_, block_def_map) in body.blocks(db.upcast()) { + for (_, block_def_map) in body.blocks(db) { Self::collect_def_map(db, map, &block_def_map); } } @@ -282,7 +276,7 @@ pub struct InherentImpls { } impl InherentImpls { - pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { + pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: Crate) -> Arc { let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered(); let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; @@ -319,7 +313,7 @@ impl InherentImpls { fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) { for (_module_id, module_data) in def_map.modules() { for impl_id in module_data.scope.impls() { - let data = db.impl_data(impl_id); + let data = db.impl_signature(impl_id); if data.target_trait.is_some() { continue; } @@ -327,7 +321,7 @@ impl InherentImpls { let self_ty = db.impl_self_ty(impl_id); let self_ty = self_ty.skip_binders(); - match is_inherent_impl_coherent(db, def_map, &data, self_ty) { + match is_inherent_impl_coherent(db, def_map, impl_id, self_ty) { true => { // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution) if let Some(fp) = TyFingerprint::for_inherent_impl(self_ty) { @@ -342,7 +336,7 @@ impl InherentImpls { // const _: () = { ... }; for konst in module_data.scope.unnamed_consts() { let body = db.body(konst.into()); - for (_, block_def_map) in body.blocks(db.upcast()) { + for (_, block_def_map) in body.blocks(db) { self.collect_def_map(db, &block_def_map); } } @@ -367,16 +361,15 @@ impl InherentImpls { pub(crate) fn incoherent_inherent_impl_crates( db: &dyn HirDatabase, - krate: CrateId, + krate: Crate, fp: TyFingerprint, -) -> SmallVec<[CrateId; 2]> { +) -> SmallVec<[Crate; 2]> { let _p = tracing::info_span!("incoherent_inherent_impl_crates").entered(); let mut res = SmallVec::new(); - let crate_graph = db.crate_graph(); // should pass crate for finger print and do reverse deps - for krate in crate_graph.transitive_deps(krate) { + for krate in db.transitive_deps(krate) { let impls = db.inherent_impls_in_crate(krate); if impls.map.get(&fp).is_some_and(|v| !v.is_empty()) { res.push(krate); @@ -386,49 +379,54 @@ pub(crate) fn incoherent_inherent_impl_crates( res } -pub fn def_crates( - db: &dyn HirDatabase, - ty: &Ty, - cur_crate: CrateId, -) -> Option> { +pub fn def_crates(db: &dyn HirDatabase, ty: &Ty, cur_crate: Crate) -> Option> { match ty.kind(Interner) { &TyKind::Adt(AdtId(def_id), _) => { let rustc_has_incoherent_inherent_impls = match def_id { hir_def::AdtId::StructId(id) => db - .struct_data(id) + .struct_signature(id) .flags - .contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL), + .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), hir_def::AdtId::UnionId(id) => db - .union_data(id) + .union_signature(id) + .flags + .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), + hir_def::AdtId::EnumId(id) => db + .enum_signature(id) .flags - .contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL), - hir_def::AdtId::EnumId(id) => db.enum_data(id).rustc_has_incoherent_inherent_impls, + .contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), }; Some(if rustc_has_incoherent_inherent_impls { db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Adt(def_id)) } else { - smallvec![def_id.module(db.upcast()).krate()] + smallvec![def_id.module(db).krate()] }) } &TyKind::Foreign(id) => { let alias = from_foreign_def_id(id); - Some(if db.type_alias_data(alias).rustc_has_incoherent_inherent_impls { - db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::ForeignType(id)) - } else { - smallvec![alias.module(db.upcast()).krate()] - }) + Some( + if db + .type_alias_signature(alias) + .flags + .contains(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL) + { + db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::ForeignType(id)) + } else { + smallvec![alias.module(db).krate()] + }, + ) } TyKind::Dyn(_) => { let trait_id = ty.dyn_trait()?; Some( if db - .trait_data(trait_id) + .trait_signature(trait_id) .flags .contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Dyn(trait_id)) } else { - smallvec![trait_id.module(db.upcast()).krate()] + smallvec![trait_id.module(db).krate()] }, ) } @@ -517,9 +515,15 @@ impl From> for VisibleFromModule { } } +#[derive(Debug, Clone)] +pub enum AutorefOrPtrAdjustment { + Autoref(Mutability), + ToConstPtr, +} + #[derive(Debug, Clone, Default)] pub struct ReceiverAdjustments { - autoref: Option, + autoref: Option, autoderefs: usize, unsize_array: bool, } @@ -537,10 +541,15 @@ impl ReceiverAdjustments { } Some((kind, new_ty)) => { ty = new_ty.clone(); + let mutbl = match self.autoref { + Some(AutorefOrPtrAdjustment::Autoref(m)) => Some(m), + Some(AutorefOrPtrAdjustment::ToConstPtr) => Some(Mutability::Not), + // FIXME should we know the mutability here, when autoref is `None`? + None => None, + }; adjust.push(Adjustment { kind: Adjust::Deref(match kind { - // FIXME should we know the mutability here, when autoref is `None`? - AutoderefKind::Overloaded => Some(OverloadedDeref(self.autoref)), + AutoderefKind::Overloaded => Some(OverloadedDeref(mutbl)), AutoderefKind::Builtin => None, }), target: new_ty, @@ -548,11 +557,27 @@ impl ReceiverAdjustments { } } } - if let Some(m) = self.autoref { + if let Some(autoref) = &self.autoref { let lt = table.new_lifetime_var(); - let a = Adjustment::borrow(m, ty, lt); - ty = a.target.clone(); - adjust.push(a); + match autoref { + AutorefOrPtrAdjustment::Autoref(m) => { + let a = Adjustment::borrow(*m, ty, lt); + ty = a.target.clone(); + adjust.push(a); + } + AutorefOrPtrAdjustment::ToConstPtr => { + if let TyKind::Raw(Mutability::Mut, pointee) = ty.kind(Interner) { + let a = Adjustment { + kind: Adjust::Pointer(PointerCast::MutToConstPointer), + target: TyKind::Raw(Mutability::Not, pointee.clone()).intern(Interner), + }; + ty = a.target.clone(); + adjust.push(a); + } else { + never!("`ToConstPtr` target is not a raw mutable pointer"); + } + } + }; } if self.unsize_array { ty = 'it: { @@ -577,8 +602,8 @@ impl ReceiverAdjustments { (ty, adjust) } - fn with_autoref(&self, m: Mutability) -> ReceiverAdjustments { - Self { autoref: Some(m), ..*self } + fn with_autoref(&self, a: AutorefOrPtrAdjustment) -> ReceiverAdjustments { + Self { autoref: Some(a), ..*self } } } @@ -596,7 +621,7 @@ pub(crate) fn iterate_method_candidates( mut callback: impl FnMut(ReceiverAdjustments, AssocItemId, bool) -> Option, ) -> Option { let mut slot = None; - let _ = iterate_method_candidates_dyn( + _ = iterate_method_candidates_dyn( ty, db, env, @@ -622,15 +647,15 @@ pub fn lookup_impl_const( const_id: ConstId, subs: Substitution, ) -> (ConstId, Substitution) { - let trait_id = match const_id.lookup(db.upcast()).container { + let trait_id = match const_id.lookup(db).container { ItemContainerId::TraitId(id) => id, _ => return (const_id, subs), }; let substitution = Substitution::from_iter(Interner, subs.iter(Interner)); let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution }; - let const_data = db.const_data(const_id); - let name = match const_data.name.as_ref() { + let const_signature = db.const_signature(const_id); + let name = match const_signature.name.as_ref() { Some(name) => name, None => return (const_id, subs), }; @@ -650,14 +675,14 @@ pub fn is_dyn_method( func: FunctionId, fn_subst: Substitution, ) -> Option { - let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else { + let ItemContainerId::TraitId(trait_id) = func.lookup(db).container else { return None; }; let trait_params = db.generic_params(trait_id.into()).len(); let fn_params = fn_subst.len(Interner) - trait_params; let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), - substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).skip(fn_params)), + substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).take(trait_params)), }; let self_ty = trait_ref.self_type_parameter(Interner); if let TyKind::Dyn(d) = self_ty.kind(Interner) { @@ -669,7 +694,7 @@ pub fn is_dyn_method( .map(|it| it.skip_binders()) .flat_map(|it| match it { WhereClause::Implemented(tr) => { - all_super_traits(db.upcast(), from_chalk_trait_id(tr.trait_id)) + all_super_traits(db, from_chalk_trait_id(tr.trait_id)) } _ => smallvec![], }) @@ -692,33 +717,29 @@ pub(crate) fn lookup_impl_method_query( func: FunctionId, fn_subst: Substitution, ) -> (FunctionId, Substitution) { - let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else { + let ItemContainerId::TraitId(trait_id) = func.lookup(db).container else { return (func, fn_subst); }; let trait_params = db.generic_params(trait_id.into()).len(); - let fn_params = fn_subst.len(Interner) - trait_params; let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), - substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).skip(fn_params)), + substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).take(trait_params)), }; - let name = &db.function_data(func).name; + let name = &db.function_signature(func).name; let Some((impl_fn, impl_subst)) = lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name).and_then(|assoc| { - if let (AssocItemId::FunctionId(id), subst) = assoc { - Some((id, subst)) - } else { - None - } + if let (AssocItemId::FunctionId(id), subst) = assoc { Some((id, subst)) } else { None } }) else { return (func, fn_subst); }; + ( impl_fn, Substitution::from_iter( Interner, - fn_subst.iter(Interner).take(fn_params).chain(impl_subst.iter(Interner)), + impl_subst.iter(Interner).chain(fn_subst.iter(Interner).skip(trait_params)), ), ) } @@ -734,13 +755,11 @@ fn lookup_impl_assoc_item_for_trait_ref( let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?; let impls = db.trait_impls_in_deps(env.krate); - let trait_module = hir_trait_id.module(db.upcast()); + let trait_module = hir_trait_id.module(db); let type_module = match self_ty_fp { - TyFingerprint::Adt(adt_id) => Some(adt_id.module(db.upcast())), - TyFingerprint::ForeignType(type_id) => { - Some(from_foreign_def_id(type_id).module(db.upcast())) - } - TyFingerprint::Dyn(trait_id) => Some(trait_id.module(db.upcast())), + TyFingerprint::Adt(adt_id) => Some(adt_id.module(db)), + TyFingerprint::ForeignType(type_id) => Some(from_foreign_def_id(type_id).module(db)), + TyFingerprint::Dyn(trait_id) => Some(trait_id.module(db)), _ => None, }; @@ -771,11 +790,10 @@ fn find_matching_impl( mut impls: impl Iterator, mut table: InferenceTable<'_>, actual_trait_ref: TraitRef, -) -> Option<(Arc, Substitution)> { +) -> Option<(Arc, Substitution)> { let db = table.db; impls.find_map(|impl_| { table.run_in_snapshot(|table| { - let impl_data = db.impl_data(impl_); let impl_substs = TyBuilder::subst_for_def(db, impl_, None).fill_with_inference_vars(table).build(); let trait_ref = db @@ -793,7 +811,7 @@ fn find_matching_impl( let goal = crate::Goal::all(Interner, wcs); table.try_obligation(goal.clone())?; table.register_obligation(goal); - Some((impl_data, table.resolve_completely(impl_substs))) + Some((db.impl_items(impl_), table.resolve_completely(impl_substs))) }) }) } @@ -801,7 +819,7 @@ fn find_matching_impl( fn is_inherent_impl_coherent( db: &dyn HirDatabase, def_map: &DefMap, - impl_data: &ImplData, + impl_id: ImplId, self_ty: &Ty, ) -> bool { let self_ty = self_ty.kind(Interner); @@ -816,9 +834,9 @@ fn is_inherent_impl_coherent( | TyKind::Str | TyKind::Scalar(_) => def_map.is_rustc_coherence_is_core(), - &TyKind::Adt(AdtId(adt), _) => adt.module(db.upcast()).krate() == def_map.krate(), + &TyKind::Adt(AdtId(adt), _) => adt.module(db).krate() == def_map.krate(), TyKind::Dyn(it) => it.principal_id().is_some_and(|trait_id| { - from_chalk_trait_id(trait_id).module(db.upcast()).krate() == def_map.krate() + from_chalk_trait_id(trait_id).module(db).krate() == def_map.krate() }), _ => true, @@ -837,29 +855,40 @@ fn is_inherent_impl_coherent( &TyKind::Adt(AdtId(adt), _) => match adt { hir_def::AdtId::StructId(id) => db - .struct_data(id) + .struct_signature(id) .flags - .contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL), + .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), hir_def::AdtId::UnionId(id) => db - .union_data(id) + .union_signature(id) + .flags + .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), + hir_def::AdtId::EnumId(it) => db + .enum_signature(it) .flags - .contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL), - hir_def::AdtId::EnumId(it) => db.enum_data(it).rustc_has_incoherent_inherent_impls, + .contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), }, TyKind::Dyn(it) => it.principal_id().is_some_and(|trait_id| { - db.trait_data(from_chalk_trait_id(trait_id)) + db.trait_signature(from_chalk_trait_id(trait_id)) .flags .contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) }), _ => false, }; + let items = db.impl_items(impl_id); rustc_has_incoherent_inherent_impls - && !impl_data.items.is_empty() - && impl_data.items.iter().all(|&(_, assoc)| match assoc { - AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl, - AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl, - AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl, + && !items.items.is_empty() + && items.items.iter().all(|&(_, assoc)| match assoc { + AssocItemId::FunctionId(it) => { + db.function_signature(it).flags.contains(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL) + } + AssocItemId::ConstId(it) => { + db.const_signature(it).flags.contains(ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL) + } + AssocItemId::TypeAliasId(it) => db + .type_alias_signature(it) + .flags + .contains(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL), }) } } @@ -878,45 +907,52 @@ pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool { return true; }; - let local_crate = impl_.lookup(db.upcast()).container.krate(); + let local_crate = impl_.lookup(db).container.krate(); let is_local = |tgt_crate| tgt_crate == local_crate; let trait_ref = impl_trait.substitute(Interner, &substs); let trait_id = from_chalk_trait_id(trait_ref.trait_id); - if is_local(trait_id.module(db.upcast()).krate()) { + if is_local(trait_id.module(db).krate()) { // trait to be implemented is local return true; } - let unwrap_fundamental = |ty: Ty| match ty.kind(Interner) { - TyKind::Ref(_, _, referenced) => referenced.clone(), - &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref subs) => { - let struct_data = db.struct_data(s); - if struct_data.flags.contains(StructFlags::IS_FUNDAMENTAL) { - let next = subs.type_parameters(Interner).next(); - match next { - Some(ty) => ty, - None => ty, + let unwrap_fundamental = |mut ty: Ty| { + // Unwrap all layers of fundamental types with a loop. + loop { + match ty.kind(Interner) { + TyKind::Ref(_, _, referenced) => ty = referenced.clone(), + &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref subs) => { + let struct_signature = db.struct_signature(s); + if struct_signature.flags.contains(StructFlags::FUNDAMENTAL) { + let next = subs.type_parameters(Interner).next(); + match next { + Some(it) => ty = it, + None => break ty, + } + } else { + break ty; + } } - } else { - ty + _ => break ty, } } - _ => ty, }; // - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type. + + // FIXME: param coverage + // - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`) let is_not_orphan = trait_ref.substitution.type_parameters(Interner).any(|ty| { match unwrap_fundamental(ty).kind(Interner) { - &TyKind::Adt(AdtId(id), _) => is_local(id.module(db.upcast()).krate()), + &TyKind::Adt(AdtId(id), _) => is_local(id.module(db).krate()), TyKind::Error => true, - TyKind::Dyn(it) => it.principal_id().is_some_and(|trait_id| { - is_local(from_chalk_trait_id(trait_id).module(db.upcast()).krate()) - }), + TyKind::Dyn(it) => it + .principal_id() + .is_some_and(|trait_id| is_local(from_chalk_trait_id(trait_id).module(db).krate())), _ => false, } }); - // FIXME: param coverage - // - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`) + #[allow(clippy::let_and_return)] is_not_orphan } @@ -1042,7 +1078,7 @@ fn iterate_method_candidates_with_autoref( let mut maybe_reborrowed = first_adjustment.clone(); if let Some((_, _, m)) = receiver_ty.value.as_reference() { // Prefer reborrow of references to move - maybe_reborrowed.autoref = Some(m); + maybe_reborrowed.autoref = Some(AutorefOrPtrAdjustment::Autoref(m)); maybe_reborrowed.autoderefs += 1; } @@ -1054,15 +1090,34 @@ fn iterate_method_candidates_with_autoref( binders: receiver_ty.binders.clone(), }; - iterate_method_candidates_by_receiver(refed, first_adjustment.with_autoref(Mutability::Not))?; + iterate_method_candidates_by_receiver( + refed, + first_adjustment.with_autoref(AutorefOrPtrAdjustment::Autoref(Mutability::Not)), + )?; let ref_muted = Canonical { value: TyKind::Ref(Mutability::Mut, error_lifetime(), receiver_ty.value.clone()) .intern(Interner), - binders: receiver_ty.binders, + binders: receiver_ty.binders.clone(), }; - iterate_method_candidates_by_receiver(ref_muted, first_adjustment.with_autoref(Mutability::Mut)) + iterate_method_candidates_by_receiver( + ref_muted, + first_adjustment.with_autoref(AutorefOrPtrAdjustment::Autoref(Mutability::Mut)), + )?; + + if let Some((ty, Mutability::Mut)) = receiver_ty.value.as_raw_ptr() { + let const_ptr_ty = Canonical { + value: TyKind::Raw(Mutability::Not, ty.clone()).intern(Interner), + binders: receiver_ty.binders, + }; + iterate_method_candidates_by_receiver( + const_ptr_ty, + first_adjustment.with_autoref(AutorefOrPtrAdjustment::ToConstPtr), + )?; + } + + ControlFlow::Continue(()) } pub trait MethodCandidateCallback { @@ -1213,7 +1268,7 @@ fn iterate_trait_method_candidates( let TraitEnvironment { krate, block, .. } = *table.trait_env; 'traits: for &t in traits_in_scope { - let data = db.trait_data(t); + let data = db.trait_signature(t); // Traits annotated with `#[rustc_skip_during_method_dispatch]` are skipped during // method resolution, if the receiver is an array, and we're compiling for editions before @@ -1225,7 +1280,7 @@ fn iterate_trait_method_candidates( { // FIXME: this should really be using the edition of the method name's span, in case it // comes from a macro - if !db.crate_graph()[krate].edition.at_least_2021() { + if !krate.data(db).edition.at_least_2021() { continue; } } @@ -1238,7 +1293,7 @@ fn iterate_trait_method_candidates( { // FIXME: this should really be using the edition of the method name's span, in case it // comes from a macro - if !db.crate_graph()[krate].edition.at_least_2024() { + if !krate.data(db).edition.at_least_2024() { continue; } } @@ -1247,7 +1302,7 @@ fn iterate_trait_method_candidates( // trait, but if we find out it doesn't, we'll skip the rest of the // iteration let mut known_implemented = false; - for &(_, item) in data.items.iter() { + for &(_, item) in db.trait_items(t).items.iter() { // Don't pass a `visible_from_module` down to `is_valid_candidate`, // since only inherent methods should be included into visibility checking. let visible = @@ -1291,7 +1346,7 @@ fn iterate_inherent_methods( let env = table.trait_env.clone(); let traits = env .traits_in_scope_from_clauses(self_ty.clone()) - .flat_map(|t| all_super_traits(db.upcast(), t)); + .flat_map(|t| all_super_traits(db, t)); iterate_inherent_trait_methods( self_ty, table, @@ -1304,7 +1359,7 @@ fn iterate_inherent_methods( } TyKind::Dyn(_) => { if let Some(principal_trait) = self_ty.dyn_trait() { - let traits = all_super_traits(db.upcast(), principal_trait); + let traits = all_super_traits(db, principal_trait); iterate_inherent_trait_methods( self_ty, table, @@ -1374,7 +1429,7 @@ fn iterate_inherent_methods( ) -> ControlFlow<()> { let db = table.db; for t in traits { - let data = db.trait_data(t); + let data = db.trait_items(t); for &(_, item) in data.items.iter() { // We don't pass `visible_from_module` as all trait items should be visible. let visible = match is_valid_trait_method_candidate( @@ -1407,7 +1462,7 @@ fn iterate_inherent_methods( callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { for &impl_id in impls.for_self_ty(self_ty) { - for &(ref item_name, item) in table.db.impl_data(impl_id).items.iter() { + for &(ref item_name, item) in table.db.impl_items(impl_id).items.iter() { let visible = match is_valid_impl_method_candidate( table, self_ty, @@ -1495,7 +1550,7 @@ fn is_valid_impl_method_candidate( check_that!(name.is_none_or(|n| n == item_name)); if let Some(from_module) = visible_from_module { - if !db.const_visibility(c).is_visible_from(db.upcast(), from_module) { + if !db.const_visibility(c).is_visible_from(db, from_module) { cov_mark::hit!(const_candidate_not_visible); return IsValidCandidate::NotVisible; } @@ -1528,7 +1583,7 @@ fn is_valid_trait_method_candidate( let db = table.db; match item { AssocItemId::FunctionId(fn_id) => { - let data = db.function_data(fn_id); + let data = db.function_signature(fn_id); check_that!(name.is_none_or(|n| n == &data.name)); @@ -1559,7 +1614,7 @@ fn is_valid_trait_method_candidate( } AssocItemId::ConstId(c) => { check_that!(receiver_ty.is_none()); - check_that!(name.is_none_or(|n| db.const_data(c).name.as_ref() == Some(n))); + check_that!(name.is_none_or(|n| db.const_signature(c).name.as_ref() == Some(n))); IsValidCandidate::Yes } @@ -1581,10 +1636,10 @@ fn is_valid_impl_fn_candidate( check_that!(name.is_none_or(|n| n == item_name)); let db = table.db; - let data = db.function_data(fn_id); + let data = db.function_signature(fn_id); if let Some(from_module) = visible_from_module { - if !db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module) { + if !db.function_visibility(fn_id).is_visible_from(db, from_module) { cov_mark::hit!(autoderef_candidate_not_visible); return IsValidCandidate::NotVisible; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index 56c431ef8dab6..bf80ed7967aa8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -3,22 +3,22 @@ use std::{collections::hash_map::Entry, fmt::Display, iter}; use crate::{ + CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap, + Substitution, TraitEnvironment, Ty, TyExt, TyKind, consteval::usize_const, db::HirDatabase, display::{DisplayTarget, HirDisplay}, - infer::{normalize, PointerCast}, + infer::{PointerCast, normalize}, lang_items::is_box, mapping::ToChalk, - CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap, - Substitution, TraitEnvironment, Ty, TyExt, TyKind, }; -use base_db::CrateId; +use base_db::Crate; use chalk_ir::Mutability; use either::Either; use hir_def::{ + DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId, expr_store::Body, hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId}, - DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId, }; use la_arena::{Arena, ArenaMap, Idx, RawIdx}; @@ -28,21 +28,22 @@ mod lower; mod monomorphization; mod pretty; -pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason}; +pub use borrowck::{BorrowckResult, MutabilityReason, borrowck_query}; pub use eval::{ - interpret_mir, pad16, render_const_using_debug_impl, Evaluator, MirEvalError, VTableMap, -}; -pub use lower::{ - lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError, + Evaluator, MirEvalError, VTableMap, interpret_mir, pad16, render_const_using_debug_impl, }; +pub use lower::{MirLowerError, lower_to_mir, mir_body_for_closure_query, mir_body_query}; pub use monomorphization::{ monomorphize_mir_body_bad, monomorphized_mir_body_for_closure_query, - monomorphized_mir_body_query, monomorphized_mir_body_recover, + monomorphized_mir_body_query, }; use rustc_hash::FxHashMap; -use smallvec::{smallvec, SmallVec}; +use smallvec::{SmallVec, smallvec}; use stdx::{impl_from, never}; +pub(crate) use lower::mir_body_cycle_result; +pub(crate) use monomorphization::monomorphized_mir_body_cycle_result; + use super::consteval::{intern_const_scalar, try_const_usize}; pub type BasicBlockId = Idx; @@ -76,7 +77,14 @@ pub struct Local { /// currently implements it, but it seems like this may be something to check against in the /// validator. #[derive(Debug, PartialEq, Eq, Clone)] -pub enum Operand { +pub struct Operand { + kind: OperandKind, + // FIXME : This should actually just be of type `MirSpan`. + span: Option, +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum OperandKind { /// Creates a value by loading the given place. /// /// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there @@ -100,7 +108,13 @@ pub enum Operand { impl Operand { fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap, ty: Ty) -> Self { - Operand::Constant(intern_const_scalar(ConstScalar::Bytes(data, memory_map), ty)) + Operand { + kind: OperandKind::Constant(intern_const_scalar( + ConstScalar::Bytes(data, memory_map), + ty, + )), + span: None, + } } fn from_bytes(data: Box<[u8]>, ty: Ty) -> Self { @@ -142,7 +156,7 @@ impl ProjectionElem { mut base: Ty, db: &dyn HirDatabase, closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty, - krate: CrateId, + krate: Crate, ) -> Ty { // we only bail on mir building when there are type mismatches // but error types may pop up resulting in us still attempting to build the mir @@ -1075,11 +1089,11 @@ impl MirBody { f: &mut impl FnMut(&mut Place, &mut ProjectionStore), store: &mut ProjectionStore, ) { - match op { - Operand::Copy(p) | Operand::Move(p) => { + match &mut op.kind { + OperandKind::Copy(p) | OperandKind::Move(p) => { f(p, store); } - Operand::Constant(_) | Operand::Static(_) => (), + OperandKind::Constant(_) | OperandKind::Static(_) => (), } } for (_, block) in self.basic_blocks.iter_mut() { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs index fbcca388e781d..fb0c0dee095f1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs @@ -12,16 +12,16 @@ use stdx::never; use triomphe::Arc; use crate::{ + ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags, db::{HirDatabase, InternedClosure}, display::DisplayTarget, - mir::Operand, + mir::OperandKind, utils::ClosureSubst, - ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags, }; use super::{ - BasicBlockId, BorrowKind, LocalId, MirBody, MirLowerError, MirSpan, MutBorrowKind, Place, - ProjectionElem, Rvalue, StatementKind, TerminatorKind, + BasicBlockId, BorrowKind, LocalId, MirBody, MirLowerError, MirSpan, MutBorrowKind, Operand, + Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -71,7 +71,7 @@ fn all_mir_bodies( c: ClosureId, cb: &mut impl FnMut(Arc), ) -> Result<(), MirLowerError> { - match db.mir_body_for_closure(c) { + match db.mir_body_for_closure(c.into()) { Ok(body) => { cb(body.clone()); body.closures.iter().try_for_each(|&it| for_closure(db, it, cb)) @@ -120,8 +120,8 @@ fn make_fetch_closure_field( fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec { let mut result = vec![]; - let mut for_operand = |op: &Operand, span: MirSpan| match op { - Operand::Copy(p) | Operand::Move(p) => { + let mut for_operand = |op: &Operand, span: MirSpan| match op.kind { + OperandKind::Copy(p) | OperandKind::Move(p) => { let mut ty: Ty = body.locals[p.local].ty.clone(); let mut is_dereference_of_ref = false; for proj in p.projection.lookup(&body.projection_store) { @@ -132,20 +132,20 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec ty, db, make_fetch_closure_field(db), - body.owner.module(db.upcast()).krate(), + body.owner.module(db).krate(), ); } if is_dereference_of_ref && !ty.clone().is_copy(db, body.owner) && !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR) { - result.push(MovedOutOfRef { span, ty }); + result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty }); } } - Operand::Constant(_) | Operand::Static(_) => (), + OperandKind::Constant(_) | OperandKind::Static(_) => (), }; for (_, block) in body.basic_blocks.iter() { - db.unwind_if_cancelled(); + db.unwind_if_revision_cancelled(); for statement in &block.statements { match &statement.kind { StatementKind::Assign(_, r) => match r { @@ -215,15 +215,15 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec { let mut result = vec![]; - let mut for_operand = |op: &Operand, span: MirSpan| match op { - Operand::Copy(p) | Operand::Move(p) => { + let mut for_operand = |op: &Operand, span: MirSpan| match op.kind { + OperandKind::Copy(p) | OperandKind::Move(p) => { let mut ty: Ty = body.locals[p.local].ty.clone(); for proj in p.projection.lookup(&body.projection_store) { ty = proj.projected_ty( ty, db, make_fetch_closure_field(db), - body.owner.module(db.upcast()).krate(), + body.owner.module(db).krate(), ); } if !ty.clone().is_copy(db, body.owner) @@ -232,10 +232,10 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec result.push(PartiallyMoved { span, ty, local: p.local }); } } - Operand::Constant(_) | Operand::Static(_) => (), + OperandKind::Constant(_) | OperandKind::Static(_) => (), }; for (_, block) in body.basic_blocks.iter() { - db.unwind_if_cancelled(); + db.unwind_if_revision_cancelled(); for statement in &block.statements { match &statement.kind { StatementKind::Assign(_, r) => match r { @@ -306,7 +306,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec { let mut borrows = FxHashMap::default(); for (_, block) in body.basic_blocks.iter() { - db.unwind_if_cancelled(); + db.unwind_if_revision_cancelled(); for statement in &block.statements { if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind { borrows @@ -369,18 +369,9 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio } ProjectionElem::OpaqueCast(_) => (), } - ty = proj.projected_ty( - ty, - db, - make_fetch_closure_field(db), - body.owner.module(db.upcast()).krate(), - ); - } - if is_part_of { - ProjectionCase::DirectPart - } else { - ProjectionCase::Direct + ty = proj.projected_ty(ty, db, make_fetch_closure_field(db), body.owner.module(db).krate()); } + if is_part_of { ProjectionCase::DirectPart } else { ProjectionCase::Direct } } /// Returns a map from basic blocks to the set of locals that might be ever initialized before @@ -423,10 +414,7 @@ fn ever_initialized_map( let Some(terminator) = &block.terminator else { never!( "Terminator should be none only in construction.\nThe body:\n{}", - body.pretty_print( - db, - DisplayTarget::from_crate(db, body.owner.krate(db.upcast())) - ) + body.pretty_print(db, DisplayTarget::from_crate(db, body.owner.krate(db))) ); return; }; @@ -477,7 +465,7 @@ fn ever_initialized_map( dfs(db, body, l, &mut stack, &mut result); } for l in body.locals.iter().map(|it| it.0) { - db.unwind_if_cancelled(); + db.unwind_if_revision_cancelled(); if !result[body.start_block].contains_idx(l) { result[body.start_block].insert(l, false); stack.clear(); @@ -504,7 +492,7 @@ fn record_usage(local: LocalId, result: &mut ArenaMap } fn record_usage_for_operand(arg: &Operand, result: &mut ArenaMap) { - if let Operand::Copy(p) | Operand::Move(p) = arg { + if let OperandKind::Copy(p) | OperandKind::Move(p) = arg.kind { record_usage(p.local, result); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 74a34e2981710..21e5428520e2a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -2,26 +2,27 @@ use std::{borrow::Cow, cell::RefCell, fmt::Write, iter, mem, ops::Range}; -use base_db::CrateId; -use chalk_ir::{cast::Cast, Mutability}; +use base_db::Crate; +use chalk_ir::{Mutability, cast::Cast}; use either::Either; use hir_def::{ + AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId, + VariantId, builtin_type::BuiltinType, - data::adt::{StructFlags, VariantData}, expr_store::HygieneId, + item_tree::FieldsShape, lang_item::LangItem, layout::{TagEncoding, Variants}, resolver::{HasResolver, TypeNs, ValueNs}, - AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId, - VariantId, + signatures::{StaticFlags, StructFlags}, }; -use hir_expand::{mod_path::path, name::Name, HirFileIdExt, InFile}; +use hir_expand::{InFile, mod_path::path, name::Name}; use intern::sym; use la_arena::ArenaMap; use rustc_abi::TargetDataLayout; use rustc_apfloat::{ - ieee::{Half as f16, Quad as f128}, Float, + ieee::{Half as f16, Quad as f128}, }; use rustc_hash::{FxHashMap, FxHashSet}; use span::FileId; @@ -30,7 +31,9 @@ use syntax::{SyntaxNodePtr, TextRange}; use triomphe::Arc; use crate::{ - consteval::{intern_const_scalar, try_const_usize, ConstEvalError}, + CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, Interner, + MemoryMap, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, + consteval::{ConstEvalError, intern_const_scalar, try_const_usize}, db::{HirDatabase, InternedClosure}, display::{ClosureStyle, DisplayTarget, HirDisplay}, infer::PointerCast, @@ -39,15 +42,13 @@ use crate::{ method_resolution::{is_dyn_method, lookup_impl_const}, static_lifetime, traits::FnTrait, - utils::{detect_variant_from_bytes, ClosureSubst}, - CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, Interner, - MemoryMap, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, + utils::{ClosureSubst, detect_variant_from_bytes}, }; use super::{ - return_slot, AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError, - MirSpan, Operand, Place, PlaceElem, ProjectionElem, ProjectionStore, Rvalue, StatementKind, - TerminatorKind, UnOp, + AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan, + Operand, OperandKind, Place, PlaceElem, ProjectionElem, ProjectionStore, Rvalue, StatementKind, + TerminatorKind, UnOp, return_slot, }; mod shim; @@ -186,7 +187,7 @@ pub struct Evaluator<'a> { cached_fn_trait_func: Option, cached_fn_mut_trait_func: Option, cached_fn_once_trait_func: Option, - crate_id: CrateId, + crate_id: Crate, // FIXME: This is a workaround, see the comment on `interpret_mir` assert_placeholder_ty_is_unused: bool, /// A general limit on execution, to prevent non terminating programs from breaking r-a main process @@ -368,11 +369,11 @@ impl MirEvalError { for (func, span, def) in stack.iter().take(30).rev() { match func { Either::Left(func) => { - let function_name = db.function_data(*func); + let function_name = db.function_signature(*func); writeln!( f, "In function {} ({:?})", - function_name.name.display(db.upcast(), display_target.edition), + function_name.name.display(db, display_target.edition), func )?; } @@ -406,9 +407,9 @@ impl MirEvalError { }, MirSpan::Unknown => continue, }; - let file_id = span.file_id.original_file(db.upcast()); + let file_id = span.file_id.original_file(db); let text_range = span.value.text_range(); - writeln!(f, "{}", span_formatter(file_id.file_id(), text_range))?; + writeln!(f, "{}", span_formatter(file_id.file_id(db), text_range))?; } } match err { @@ -421,10 +422,10 @@ impl MirEvalError { )?; } MirEvalError::MirLowerError(func, err) => { - let function_name = db.function_data(*func); - let self_ = match func.lookup(db.upcast()).container { + let function_name = db.function_signature(*func); + let self_ = match func.lookup(db).container { ItemContainerId::ImplId(impl_id) => Some({ - let generics = crate::generics::generics(db.upcast(), impl_id.into()); + let generics = crate::generics::generics(db, impl_id.into()); let substs = generics.placeholder_subst(db); db.impl_self_ty(impl_id) .substitute(Interner, &substs) @@ -432,10 +433,7 @@ impl MirEvalError { .to_string() }), ItemContainerId::TraitId(it) => Some( - db.trait_data(it) - .name - .display(db.upcast(), display_target.edition) - .to_string(), + db.trait_signature(it).name.display(db, display_target.edition).to_string(), ), _ => None, }; @@ -444,7 +442,7 @@ impl MirEvalError { "MIR lowering for function `{}{}{}` ({:?}) failed due:", self_.as_deref().unwrap_or_default(), if self_.is_some() { "::" } else { "" }, - function_name.name.display(db.upcast(), display_target.edition), + function_name.name.display(db, display_target.edition), func )?; err.pretty_print(f, db, span_formatter, display_target)?; @@ -627,7 +625,7 @@ impl Evaluator<'_> { assert_placeholder_ty_is_unused: bool, trait_env: Option>, ) -> Result> { - let crate_id = owner.module(db.upcast()).krate(); + let crate_id = owner.module(db).krate(); let target_data_layout = match db.target_data_layout(crate_id) { Ok(target_data_layout) => target_data_layout, Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)), @@ -657,24 +655,15 @@ impl Evaluator<'_> { mir_or_dyn_index_cache: RefCell::new(Default::default()), unused_locals_store: RefCell::new(Default::default()), cached_ptr_size, - cached_fn_trait_func: db - .lang_item(crate_id, LangItem::Fn) - .and_then(|x| x.as_trait()) - .and_then(|x| { - db.trait_data(x).method_by_name(&Name::new_symbol_root(sym::call.clone())) - }), - cached_fn_mut_trait_func: db - .lang_item(crate_id, LangItem::FnMut) - .and_then(|x| x.as_trait()) - .and_then(|x| { - db.trait_data(x).method_by_name(&Name::new_symbol_root(sym::call_mut.clone())) - }), - cached_fn_once_trait_func: db - .lang_item(crate_id, LangItem::FnOnce) - .and_then(|x| x.as_trait()) - .and_then(|x| { - db.trait_data(x).method_by_name(&Name::new_symbol_root(sym::call_once.clone())) - }), + cached_fn_trait_func: LangItem::Fn + .resolve_trait(db, crate_id) + .and_then(|x| db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call))), + cached_fn_mut_trait_func: LangItem::FnMut.resolve_trait(db, crate_id).and_then(|x| { + db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_mut)) + }), + cached_fn_once_trait_func: LangItem::FnOnce.resolve_trait(db, crate_id).and_then(|x| { + db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_once)) + }), }) } @@ -820,12 +809,12 @@ impl Evaluator<'_> { Variants::Multiple { variants, .. } => { &variants[match f.parent { hir_def::VariantId::EnumVariantId(it) => { - RustcEnumVariantIdx(it.lookup(self.db.upcast()).index as usize) + RustcEnumVariantIdx(it.lookup(self.db).index as usize) } _ => { return Err(MirEvalError::InternalError( "mismatched layout".into(), - )) + )); } }] } @@ -867,10 +856,10 @@ impl Evaluator<'_> { } fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result { - Ok(match o { - Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?, - Operand::Constant(c) => c.data(Interner).ty.clone(), - &Operand::Static(s) => { + Ok(match &o.kind { + OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?, + OperandKind::Constant(c) => c.data(Interner).ty.clone(), + &OperandKind::Static(s) => { let ty = self.db.infer(s.into())[self.db.body(s.into()).body_expr].clone(); TyKind::Ref(Mutability::Not, static_lifetime(), ty).intern(Interner) } @@ -1119,7 +1108,7 @@ impl Evaluator<'_> { "Stack overflow. Tried to grow stack to {stack_size} bytes" ))); } - self.stack.extend(iter::repeat(0).take(stack_size)); + self.stack.extend(std::iter::repeat_n(0, stack_size)); Ok((locals, prev_stack_pointer)) } @@ -1641,7 +1630,8 @@ impl Evaluator<'_> { match &layout.variants { Variants::Empty => unreachable!(), Variants::Single { index } => { - let r = self.const_eval_discriminant(self.db.enum_data(e).variants[index.0].0)?; + let r = + self.const_eval_discriminant(self.db.enum_variants(e).variants[index.0].0)?; Ok(r) } Variants::Multiple { tag, tag_encoding, variants, .. } => { @@ -1666,7 +1656,7 @@ impl Evaluator<'_> { .unwrap_or(*untagged_variant) .0; let result = - self.const_eval_discriminant(self.db.enum_data(e).variants[idx].0)?; + self.const_eval_discriminant(self.db.enum_variants(e).variants[idx].0)?; Ok(result) } } @@ -1760,7 +1750,7 @@ impl Evaluator<'_> { AdtId::EnumId(_) => not_supported!("unsizing enums"), }; let Some((last_field, _)) = - self.db.struct_data(id).variant_data.fields().iter().next_back() + self.db.variant_fields(id.into()).fields().iter().next_back() else { not_supported!("unsizing struct without field"); }; @@ -1788,11 +1778,11 @@ impl Evaluator<'_> { subst: Substitution, locals: &Locals, ) -> Result<(usize, Arc, Option<(usize, usize, i128)>)> { - let adt = it.adt_id(self.db.upcast()); + let adt = it.adt_id(self.db); if let DefWithBodyId::VariantId(f) = locals.body.owner { if let VariantId::EnumVariantId(it) = it { if let AdtId::EnumId(e) = adt { - if f.lookup(self.db.upcast()).parent == e { + if f.lookup(self.db).parent == e { // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and // infinite sized type errors) we use a dummy layout let i = self.const_eval_discriminant(it)?; @@ -1810,7 +1800,7 @@ impl Evaluator<'_> { _ => not_supported!("multi variant layout for non-enums"), }; let mut discriminant = self.const_eval_discriminant(enum_variant_id)?; - let lookup = enum_variant_id.lookup(self.db.upcast()); + let lookup = enum_variant_id.lookup(self.db); let rustc_enum_variant_idx = RustcEnumVariantIdx(lookup.index as usize); let variant_layout = variants[rustc_enum_variant_idx].clone(); let have_tag = match tag_encoding { @@ -1863,7 +1853,7 @@ impl Evaluator<'_> { "encoded tag ({offset}, {size}, {value}) is out of bounds 0..{size}" ) .into(), - )) + )); } } } @@ -1875,7 +1865,7 @@ impl Evaluator<'_> { None => { return Err(MirEvalError::InternalError( format!("field offset ({offset}) is out of bounds 0..{size}").into(), - )) + )); } } } @@ -1883,16 +1873,16 @@ impl Evaluator<'_> { } fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result { - Ok(match it { - Operand::Copy(p) | Operand::Move(p) => { + Ok(match &it.kind { + OperandKind::Copy(p) | OperandKind::Move(p) => { locals.drop_flags.remove_place(p, &locals.body.projection_store); self.eval_place(p, locals)? } - Operand::Static(st) => { + OperandKind::Static(st) => { let addr = self.eval_static(*st, locals)?; Interval::new(addr, self.ptr_size()) } - Operand::Constant(konst) => self.allocate_const_in_heap(locals, konst)?, + OperandKind::Constant(konst) => self.allocate_const_in_heap(locals, konst)?, }) } @@ -1917,7 +1907,7 @@ impl Evaluator<'_> { .db .const_eval(const_id, subst, Some(self.trait_env.clone())) .map_err(|e| { - let name = const_id.name(self.db.upcast()); + let name = const_id.name(self.db); MirEvalError::ConstEvalError(name, Box::new(e)) })?; if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value { @@ -2053,7 +2043,7 @@ impl Evaluator<'_> { _ => { return Err(MirEvalError::UndefinedBehavior(format!( "invalid memory write at address {addr:?}" - ))) + ))); } } @@ -2068,7 +2058,7 @@ impl Evaluator<'_> { } if let DefWithBodyId::VariantId(f) = locals.body.owner { if let Some((AdtId::EnumId(e), _)) = ty.as_adt() { - if f.lookup(self.db.upcast()).parent == e { + if f.lookup(self.db).parent == e { // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and // infinite sized type errors) we use a dummy size return Ok(Some((16, 16))); @@ -2121,7 +2111,7 @@ impl Evaluator<'_> { return Err(MirEvalError::Panic(format!("Memory allocation of {size} bytes failed"))); } let pos = self.heap.len(); - self.heap.extend(iter::repeat(0).take(size)); + self.heap.extend(std::iter::repeat_n(0, size)); Ok(Address::Heap(pos)) } @@ -2242,10 +2232,10 @@ impl Evaluator<'_> { } chalk_ir::TyKind::Adt(adt, subst) => match adt.0 { AdtId::StructId(s) => { - let data = this.db.struct_data(s); + let data = this.db.variant_fields(s.into()); let layout = this.layout(ty)?; let field_types = this.db.field_types(s.into()); - for (f, _) in data.variant_data.fields().iter() { + for (f, _) in data.fields().iter() { let offset = layout .fields .offset(u32::from(f.into_raw()) as usize) @@ -2271,7 +2261,7 @@ impl Evaluator<'_> { bytes, e, ) { - let data = &this.db.enum_variant_data(v).variant_data; + let data = &this.db.variant_fields(v.into()); let field_types = this.db.field_types(v.into()); for (f, _) in data.fields().iter() { let offset = @@ -2451,7 +2441,7 @@ impl Evaluator<'_> { let mir_body = self .db .monomorphized_mir_body_for_closure( - closure, + closure.into(), generic_args.clone(), self.trait_env.clone(), ) @@ -2558,6 +2548,7 @@ impl Evaluator<'_> { } else { let (imp, generic_args) = self.db.lookup_impl_method(self.trait_env.clone(), def, generic_args.clone()); + let mir_body = self .db .monomorphized_mir_body(imp.into(), generic_args, self.trait_env.clone()) @@ -2616,13 +2607,10 @@ impl Evaluator<'_> { let ty = ty.clone().cast(Interner); let generics_for_target = Substitution::from_iter( Interner, - generic_args.iter(Interner).enumerate().map(|(i, it)| { - if i == self_ty_idx { - &ty - } else { - it - } - }), + generic_args + .iter(Interner) + .enumerate() + .map(|(i, it)| if i == self_ty_idx { &ty } else { it }), ); self.exec_fn_with_args( def, @@ -2756,8 +2744,8 @@ impl Evaluator<'_> { if let Some(o) = self.static_locations.get(&st) { return Ok(*o); }; - let static_data = self.db.static_data(st); - let result = if !static_data.is_extern { + let static_data = self.db.static_signature(st); + let result = if !static_data.flags.contains(StaticFlags::EXTERN) { let konst = self.db.const_eval_static(st).map_err(|e| { MirEvalError::ConstEvalError(static_data.name.as_str().to_owned(), Box::new(e)) })?; @@ -2781,14 +2769,14 @@ impl Evaluator<'_> { match r { Ok(r) => Ok(r), Err(e) => { - let db = self.db.upcast(); + let db = self.db; let loc = variant.lookup(db); let enum_loc = loc.parent.lookup(db); - let edition = self.db.crate_graph()[self.crate_id].edition; + let edition = self.crate_id.data(self.db).edition; let name = format!( "{}::{}", - enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition), - loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition), + enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition), + loc.id.item_tree(db)[loc.id.value].name.display(db, edition), ); Err(MirEvalError::ConstEvalError(name, Box::new(e))) } @@ -2816,8 +2804,8 @@ impl Evaluator<'_> { span: MirSpan, ) -> Result<()> { let Some(drop_fn) = (|| { - let drop_trait = self.db.lang_item(self.crate_id, LangItem::Drop)?.as_trait()?; - self.db.trait_data(drop_trait).method_by_name(&Name::new_symbol_root(sym::drop.clone())) + let drop_trait = LangItem::Drop.resolve_trait(self.db, self.crate_id)?; + self.db.trait_items(drop_trait).method_by_name(&Name::new_symbol_root(sym::drop)) })() else { // in some tests we don't have drop trait in minicore, and // we can ignore drop in them. @@ -2842,16 +2830,16 @@ impl Evaluator<'_> { TyKind::Adt(id, subst) => { match id.0 { AdtId::StructId(s) => { - let data = self.db.struct_data(s); + let data = self.db.struct_signature(s); if data.flags.contains(StructFlags::IS_MANUALLY_DROP) { return Ok(()); } let layout = self.layout_adt(id.0, subst.clone())?; - match data.variant_data.as_ref() { - VariantData::Record { fields, .. } - | VariantData::Tuple { fields, .. } => { + let variant_fields = self.db.variant_fields(s.into()); + match variant_fields.shape { + FieldsShape::Record | FieldsShape::Tuple => { let field_types = self.db.field_types(s.into()); - for (field, _) in fields.iter() { + for (field, _) in variant_fields.fields().iter() { let offset = layout .fields .offset(u32::from(field.into_raw()) as usize) @@ -2861,7 +2849,7 @@ impl Evaluator<'_> { self.run_drop_glue_deep(ty, locals, addr, &[], span)?; } } - VariantData::Unit => (), + FieldsShape::Unit => (), } } AdtId::UnionId(_) => (), // union fields don't need drop @@ -2919,15 +2907,15 @@ pub fn render_const_using_debug_impl( drop_flags: DropFlags::default(), }; let data = evaluator.allocate_const_in_heap(locals, c)?; - let resolver = owner.resolver(db.upcast()); + let resolver = owner.resolver(db); let Some(TypeNs::TraitId(debug_trait)) = resolver.resolve_path_in_type_ns_fully( - db.upcast(), - &hir_def::path::Path::from_known_path_with_no_generic(path![core::fmt::Debug]), + db, + &hir_def::expr_store::path::Path::from_known_path_with_no_generic(path![core::fmt::Debug]), ) else { not_supported!("core::fmt::Debug not found"); }; let Some(debug_fmt_fn) = - db.trait_data(debug_trait).method_by_name(&Name::new_symbol_root(sym::fmt.clone())) + db.trait_items(debug_trait).method_by_name(&Name::new_symbol_root(sym::fmt)) else { not_supported!("core::fmt::Debug::fmt not found"); }; @@ -2952,8 +2940,8 @@ pub fn render_const_using_debug_impl( evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a2.to_bytes())?; evaluator.write_memory(a3.offset(3 * evaluator.ptr_size()), &[1])?; let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully( - db.upcast(), - &hir_def::path::Path::from_known_path_with_no_generic(path![std::fmt::format]), + db, + &hir_def::expr_store::path::Path::from_known_path_with_no_generic(path![std::fmt::format]), HygieneId::ROOT, ) else { not_supported!("std::fmt::format not found"); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index f61ecabb7e41d..26ef95d264be0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -6,23 +6,22 @@ use std::cmp::{self, Ordering}; use chalk_ir::TyKind; use hir_def::{ builtin_type::{BuiltinInt, BuiltinUint}, - lang_item::LangItemTarget, resolver::HasResolver, }; use hir_expand::name::Name; -use intern::{sym, Symbol}; +use intern::{Symbol, sym}; use stdx::never; use crate::{ + DropGlue, display::DisplayTarget, error_lifetime, mir::eval::{ - pad16, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, HasModule, HirDisplay, + Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, HasModule, HirDisplay, InternedClosure, Interner, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Substitution, - Ty, TyBuilder, TyExt, + Ty, TyBuilder, TyExt, pad16, }, - DropGlue, }; mod simd; @@ -57,16 +56,16 @@ impl Evaluator<'_> { return Ok(false); } - let function_data = self.db.function_data(def); + let function_data = self.db.function_signature(def); let attrs = self.db.attrs(def.into()); - let is_intrinsic = attrs.by_key(&sym::rustc_intrinsic).exists() + let is_intrinsic = attrs.by_key(sym::rustc_intrinsic).exists() // Keep this around for a bit until extern "rustc-intrinsic" abis are no longer used || (match &function_data.abi { Some(abi) => *abi == sym::rust_dash_intrinsic, - None => match def.lookup(self.db.upcast()).container { + None => match def.lookup(self.db).container { hir_def::ItemContainerId::ExternBlockId(block) => { - let id = block.lookup(self.db.upcast()).id; - id.item_tree(self.db.upcast())[id.value].abi.as_ref() + let id = block.lookup(self.db).id; + id.item_tree(self.db)[id.value].abi.as_ref() == Some(&sym::rust_dash_intrinsic) } _ => false, @@ -82,13 +81,13 @@ impl Evaluator<'_> { locals, span, !function_data.has_body() - || attrs.by_key(&sym::rustc_intrinsic_must_be_overridden).exists(), + || attrs.by_key(sym::rustc_intrinsic_must_be_overridden).exists(), ); } - let is_extern_c = match def.lookup(self.db.upcast()).container { + let is_extern_c = match def.lookup(self.db).container { hir_def::ItemContainerId::ExternBlockId(block) => { - let id = block.lookup(self.db.upcast()).id; - id.item_tree(self.db.upcast())[id.value].abi.as_ref() == Some(&sym::C) + let id = block.lookup(self.db).id; + id.item_tree(self.db)[id.value].abi.as_ref() == Some(&sym::C) } _ => false, }; @@ -124,7 +123,7 @@ impl Evaluator<'_> { destination.write_from_bytes(self, &result)?; return Ok(true); } - if let ItemContainerId::TraitId(t) = def.lookup(self.db.upcast()).container { + if let ItemContainerId::TraitId(t) = def.lookup(self.db).container { if self.db.lang_attr(t.into()) == Some(LangItem::Clone) { let [self_ty] = generic_args.as_slice(Interner) else { not_supported!("wrong generic arg count for clone"); @@ -154,11 +153,10 @@ impl Evaluator<'_> { ) -> Result> { // `PanicFmt` is redirected to `ConstPanicFmt` if let Some(LangItem::PanicFmt) = self.db.lang_attr(def.into()) { - let resolver = - self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db.upcast()); + let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db); - let Some(hir_def::lang_item::LangItemTarget::Function(const_panic_fmt)) = - self.db.lang_item(resolver.krate(), LangItem::ConstPanicFmt) + let Some(const_panic_fmt) = + LangItem::ConstPanicFmt.resolve_function(self.db, resolver.krate()) else { not_supported!("const_panic_fmt lang item not found or not a function"); }; @@ -300,7 +298,7 @@ impl Evaluator<'_> { use LangItem::*; let attrs = self.db.attrs(def.into()); - if attrs.by_key(&sym::rustc_const_panic_str).exists() { + if attrs.by_key(sym::rustc_const_panic_str).exists() { // `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE. return Some(LangItem::BeginPanic); } @@ -569,7 +567,7 @@ impl Evaluator<'_> { } String::from_utf8_lossy(&name_buf) }; - let value = self.db.crate_graph()[self.crate_id].env.get(&name); + let value = self.crate_id.env(self.db).get(&name); match value { None => { // Write null as fail @@ -828,14 +826,14 @@ impl Evaluator<'_> { }; let ty_name = match ty.display_source_code( self.db, - locals.body.owner.module(self.db.upcast()), + locals.body.owner.module(self.db), true, ) { Ok(ty_name) => ty_name, // Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to // render full paths. Err(_) => { - let krate = locals.body.owner.krate(self.db.upcast()); + let krate = locals.body.owner.krate(self.db); ty.display(self.db, DisplayTarget::from_crate(self.db, krate)).to_string() } }; @@ -1258,12 +1256,12 @@ impl Evaluator<'_> { let addr = tuple.interval.addr.offset(offset); args.push(IntervalAndTy::new(addr, field, self, locals)?); } - if let Some(target) = self.db.lang_item(self.crate_id, LangItem::FnOnce) { - if let Some(def) = target.as_trait().and_then(|it| { - self.db - .trait_data(it) - .method_by_name(&Name::new_symbol_root(sym::call_once.clone())) - }) { + if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id) { + if let Some(def) = self + .db + .trait_items(target) + .method_by_name(&Name::new_symbol_root(sym::call_once)) + { self.exec_fn_trait( def, &args, @@ -1357,7 +1355,7 @@ impl Evaluator<'_> { _ => { return Err(MirEvalError::InternalError( "three_way_compare expects an integral type".into(), - )) + )); } }; let rhs = rhs.get(self)?; @@ -1377,9 +1375,7 @@ impl Evaluator<'_> { } } } - if let Some(LangItemTarget::EnumId(e)) = - self.db.lang_item(self.crate_id, LangItem::Ordering) - { + if let Some(e) = LangItem::Ordering.resolve_enum(self.db, self.crate_id) { let ty = self.db.ty(e.into()); let r = self .compute_discriminant(ty.skip_binders().clone(), &[result as i8 as u8])?; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs index e229a4ab31727..984648cfec328 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs @@ -2,8 +2,8 @@ use std::cmp::Ordering; -use crate::consteval::try_const_usize; use crate::TyKind; +use crate::consteval::try_const_usize; use super::*; @@ -31,8 +31,8 @@ impl Evaluator<'_> { Some(len) => len, _ => { if let AdtId::StructId(id) = id.0 { - let struct_data = self.db.struct_data(id); - let fields = struct_data.variant_data.fields(); + let struct_data = self.db.variant_fields(id.into()); + let fields = struct_data.fields(); let Some((first_field, _)) = fields.iter().next() else { not_supported!("simd type with no field"); }; @@ -127,7 +127,7 @@ impl Evaluator<'_> { Ordering::Greater => ["ge", "gt", "ne"].contains(&name), }; let result = if result { 255 } else { 0 }; - destination_bytes.extend(std::iter::repeat(result).take(dest_size)); + destination_bytes.extend(std::iter::repeat_n(result, dest_size)); } destination.write_from_bytes(self, &destination_bytes) @@ -164,7 +164,7 @@ impl Evaluator<'_> { None => { return Err(MirEvalError::InternalError( "simd type with unevaluatable len param".into(), - )) + )); } }; let (left_len, _) = self.detect_simd_ty(&left.ty)?; @@ -179,7 +179,7 @@ impl Evaluator<'_> { None => { return Err(MirEvalError::InternalError( "out of bound access in simd shuffle".into(), - )) + )); } }; result.extend(val); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs index 084c391d26cbb..3abbbe45e6f87 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs @@ -1,22 +1,24 @@ use hir_def::db::DefDatabase; -use span::{Edition, EditionedFileId}; +use hir_expand::EditionedFileId; +use span::Edition; use syntax::{TextRange, TextSize}; use test_fixture::WithFixture; use crate::display::DisplayTarget; -use crate::{db::HirDatabase, mir::MirLowerError, test_db::TestDB, Interner, Substitution}; +use crate::{Interner, Substitution, db::HirDatabase, mir::MirLowerError, test_db::TestDB}; -use super::{interpret_mir, MirEvalError}; +use super::{MirEvalError, interpret_mir}; fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> { - let module_id = db.module_for_file(file_id); + let module_id = db.module_for_file(file_id.file_id(db)); let def_map = module_id.def_map(db); let scope = &def_map[module_id.local_id].scope; let func_id = scope .declarations() .find_map(|x| match x { hir_def::ModuleDefId::FunctionId(x) => { - if db.function_data(x).name.display(db, Edition::CURRENT).to_string() == "main" { + if db.function_signature(x).name.display(db, Edition::CURRENT).to_string() == "main" + { Some(x) } else { None @@ -68,7 +70,7 @@ fn check_pass_and_stdio( let span_formatter = |file, range: TextRange| { format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end())) }; - let krate = db.module_for_file(file_id).krate(); + let krate = db.module_for_file(file_id.file_id(&db)).krate(); e.pretty_print(&mut err, &db, span_formatter, DisplayTarget::from_crate(&db, krate)) .unwrap(); panic!("Error in interpreting: {err}"); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 520717e799521..7b48b15d9ea91 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -2,21 +2,19 @@ use std::{fmt::Write, iter, mem}; -use base_db::{ra_salsa::Cycle, CrateId}; +use base_db::Crate; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use hir_def::{ - data::adt::{StructKind, VariantData}, - expr_store::{Body, HygieneId}, + AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId, + Lookup, TraitId, TupleId, TypeOrConstParamId, + expr_store::{Body, ExpressionStore, HygieneId, path::Path}, hir::{ ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField, }, - lang_item::{LangItem, LangItemTarget}, - path::Path, + item_tree::FieldsShape, + lang_item::{LangItem, LangItemTarget, lang_item}, resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs}, - type_ref::TypesMap, - AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId, - Lookup, TraitId, TupleId, TypeOrConstParamId, }; use hir_expand::name::Name; use la_arena::ArenaMap; @@ -27,29 +25,31 @@ use syntax::TextRange; use triomphe::Arc; use crate::{ + Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt, consteval::ConstEvalError, - db::{HirDatabase, InternedClosure}, - display::{hir_display_with_types_map, DisplayTarget, HirDisplay}, + db::{HirDatabase, InternedClosure, InternedClosureId}, + display::{DisplayTarget, HirDisplay, hir_display_with_store}, error_lifetime, generics::generics, - infer::{cast::CastTy, unify::InferenceTable, CaptureKind, CapturedItem, TypeMismatch}, + infer::{CaptureKind, CapturedItem, TypeMismatch, cast::CastTy, unify::InferenceTable}, inhabitedness::is_ty_uninhabited_from, layout::LayoutError, mapping::ToChalk, mir::{ - intern_const_scalar, return_slot, AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp, - BorrowKind, CastKind, ClosureId, ConstScalar, Either, Expr, FieldId, Idx, InferenceResult, - Interner, Local, LocalId, MemoryMap, MirBody, MirSpan, Mutability, Operand, Place, - PlaceElem, PointerCast, ProjectionElem, ProjectionStore, RawIdx, Rvalue, Statement, - StatementKind, Substitution, SwitchTargets, Terminator, TerminatorKind, TupleFieldId, Ty, - UnOp, VariantId, + AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp, BorrowKind, CastKind, ConstScalar, + Either, Expr, FieldId, Idx, InferenceResult, Interner, Local, LocalId, MemoryMap, MirBody, + MirSpan, Mutability, Operand, Place, PlaceElem, PointerCast, ProjectionElem, + ProjectionStore, RawIdx, Rvalue, Statement, StatementKind, Substitution, SwitchTargets, + Terminator, TerminatorKind, TupleFieldId, Ty, UnOp, VariantId, intern_const_scalar, + return_slot, }, static_lifetime, traits::FnTrait, utils::ClosureSubst, - Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt, }; +use super::OperandKind; + mod as_place; mod pattern_matching; @@ -179,7 +179,7 @@ impl MirLowerError { writeln!( f, "Missing function definition for {}", - body.pretty_print_expr(db.upcast(), *owner, *it, display_target.edition) + body.pretty_print_expr(db, *owner, *it, display_target.edition) )?; } MirLowerError::HasErrors => writeln!(f, "Type inference result contains errors")?, @@ -195,10 +195,7 @@ impl MirLowerError { writeln!( f, "Generic arg not provided for {}", - param - .name() - .unwrap_or(&Name::missing()) - .display(db.upcast(), display_target.edition) + param.name().unwrap_or(&Name::missing()).display(db, display_target.edition) )?; writeln!(f, "Provided args: [")?; for g in subst.iter(Interner) { @@ -255,10 +252,10 @@ impl MirLowerError { db: &dyn HirDatabase, p: &Path, display_target: DisplayTarget, - types_map: &TypesMap, + store: &ExpressionStore, ) -> Self { Self::UnresolvedName( - hir_display_with_types_map(p, types_map).display(db, display_target).to_string(), + hir_display_with_store(p, store).display(db, display_target).to_string(), ) } } @@ -290,7 +287,7 @@ impl<'ctx> MirLowerCtx<'ctx> { owner, closures: vec![], }; - let resolver = owner.resolver(db.upcast()); + let resolver = owner.resolver(db); MirLowerCtx { result: mir, @@ -329,7 +326,7 @@ impl<'ctx> MirLowerCtx<'ctx> { let Some((p, current)) = self.lower_expr_as_place(current, expr_id, true)? else { return Ok(None); }; - Ok(Some((Operand::Copy(p), current))) + Ok(Some((Operand { kind: OperandKind::Copy(p), span: Some(expr_id.into()) }, current))) } fn lower_expr_to_place_with_adjust( @@ -352,7 +349,12 @@ impl<'ctx> MirLowerCtx<'ctx> { else { return Ok(None); }; - self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into()); + self.push_assignment( + current, + place, + Operand { kind: OperandKind::Copy(p), span: None }.into(), + expr_id.into(), + ); Ok(Some(current)) } Adjust::Borrow(AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) => { @@ -376,7 +378,7 @@ impl<'ctx> MirLowerCtx<'ctx> { place, Rvalue::Cast( CastKind::PointerCoercion(*cast), - Operand::Copy(p), + Operand { kind: OperandKind::Copy(p), span: None }, last.target.clone(), ), expr_id.into(), @@ -415,63 +417,62 @@ impl<'ctx> MirLowerCtx<'ctx> { } Expr::Missing => { if let DefWithBodyId::FunctionId(f) = self.owner { - let assoc = f.lookup(self.db.upcast()); + let assoc = f.lookup(self.db); if let ItemContainerId::TraitId(t) = assoc.container { - let name = &self.db.function_data(f).name; + let name = &self.db.function_signature(f).name; return Err(MirLowerError::TraitFunctionDefinition(t, name.clone())); } } Err(MirLowerError::IncompleteExpr) } Expr::Path(p) => { - let pr = if let Some((assoc, subst)) = - self.infer.assoc_resolutions_for_expr(expr_id) - { - match assoc { - hir_def::AssocItemId::ConstId(c) => { - self.lower_const( - c.into(), - current, - place, - subst, - expr_id.into(), - self.expr_ty_without_adjust(expr_id), - )?; - return Ok(Some(current)); - } - hir_def::AssocItemId::FunctionId(_) => { - // FnDefs are zero sized, no action is needed. - return Ok(Some(current)); + let pr = + if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) { + match assoc { + hir_def::AssocItemId::ConstId(c) => { + self.lower_const( + c.into(), + current, + place, + subst, + expr_id.into(), + self.expr_ty_without_adjust(expr_id), + )?; + return Ok(Some(current)); + } + hir_def::AssocItemId::FunctionId(_) => { + // FnDefs are zero sized, no action is needed. + return Ok(Some(current)); + } + hir_def::AssocItemId::TypeAliasId(_) => { + // FIXME: If it is unreachable, use proper error instead of `not_supported`. + not_supported!("associated functions and types") + } } - hir_def::AssocItemId::TypeAliasId(_) => { - // FIXME: If it is unreachable, use proper error instead of `not_supported`. - not_supported!("associated functions and types") + } else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) { + match variant { + VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e), + VariantId::StructId(s) => ValueNs::StructId(s), + VariantId::UnionId(_) => implementation_error!("Union variant as path"), } - } - } else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) { - match variant { - VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e), - VariantId::StructId(s) => ValueNs::StructId(s), - VariantId::UnionId(_) => implementation_error!("Union variant as path"), - } - } else { - let resolver_guard = - self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id); - let hygiene = self.body.expr_path_hygiene(expr_id); - let result = self - .resolver - .resolve_path_in_value_ns_fully(self.db.upcast(), p, hygiene) - .ok_or_else(|| { - MirLowerError::unresolved_path( - self.db, - p, - DisplayTarget::from_crate(self.db, self.krate()), - &self.body.types, - ) - })?; - self.resolver.reset_to_guard(resolver_guard); - result - }; + } else { + let resolver_guard = + self.resolver.update_to_inner_scope(self.db, self.owner, expr_id); + let hygiene = self.body.expr_path_hygiene(expr_id); + let result = self + .resolver + .resolve_path_in_value_ns_fully(self.db, p, hygiene) + .ok_or_else(|| { + MirLowerError::unresolved_path( + self.db, + p, + DisplayTarget::from_crate(self.db, self.krate()), + self.body, + ) + })?; + self.resolver.reset_to_guard(resolver_guard); + result + }; match pr { ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => { let Some((temp, current)) = @@ -482,7 +483,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_assignment( current, place, - Operand::Copy(temp).into(), + Operand { kind: OperandKind::Copy(temp), span: None }.into(), expr_id.into(), ); Ok(Some(current)) @@ -499,8 +500,8 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(Some(current)) } ValueNs::EnumVariantId(variant_id) => { - let variant_data = &self.db.enum_variant_data(variant_id).variant_data; - if variant_data.kind() == StructKind::Unit { + let variant_fields = &self.db.variant_fields(variant_id.into()); + if variant_fields.shape == FieldsShape::Unit { let ty = self.infer.type_of_expr[expr_id].clone(); current = self.lower_enum_variant( variant_id, @@ -515,29 +516,31 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(Some(current)) } ValueNs::GenericParam(p) => { - let Some(def) = self.owner.as_generic_def_id(self.db.upcast()) else { + let Some(def) = self.owner.as_generic_def_id(self.db) else { not_supported!("owner without generic def id"); }; - let gen = generics(self.db.upcast(), def); + let generics = generics(self.db, def); let ty = self.expr_ty_without_adjust(expr_id); self.push_assignment( current, place, - Operand::Constant( - ConstData { - ty, - value: chalk_ir::ConstValue::BoundVar(BoundVar::new( - DebruijnIndex::INNERMOST, - gen.type_or_const_param_idx(p.into()).ok_or( - MirLowerError::TypeError( - "fail to lower const generic param", - ), - )?, - )), - } - .intern(Interner), - ) - .into(), + Rvalue::from(Operand { + kind: OperandKind::Constant( + ConstData { + ty, + value: chalk_ir::ConstValue::BoundVar(BoundVar::new( + DebruijnIndex::INNERMOST, + generics.type_or_const_param_idx(p.into()).ok_or( + MirLowerError::TypeError( + "fail to lower const generic param", + ), + )?, + )), + } + .intern(Interner), + ), + span: None, + }), expr_id.into(), ); Ok(Some(current)) @@ -579,7 +582,7 @@ impl<'ctx> MirLowerCtx<'ctx> { }; self.push_fake_read(current, cond_place, expr_id.into()); let resolver_guard = - self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id); + self.resolver.update_to_inner_scope(self.db, self.owner, expr_id); let (then_target, else_target) = self.pattern_match(current, None, cond_place, *pat)?; self.resolver.reset_to_guard(resolver_guard); @@ -695,7 +698,7 @@ impl<'ctx> MirLowerCtx<'ctx> { let (func_id, generic_args) = self.infer.method_resolution(expr_id).ok_or_else(|| { MirLowerError::UnresolvedMethod( - method_name.display(self.db.upcast(), self.edition()).to_string(), + method_name.display(self.db, self.edition()).to_string(), ) })?; let func = Operand::from_fn(self.db, func_id, generic_args); @@ -717,7 +720,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_fake_read(current, cond_place, expr_id.into()); let mut end = None; let resolver_guard = - self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id); + self.resolver.update_to_inner_scope(self.db, self.owner, expr_id); for MatchArm { pat, guard, expr } in arms.iter() { let (then, mut otherwise) = self.pattern_match(current, None, cond_place, *pat)?; @@ -840,7 +843,7 @@ impl<'ctx> MirLowerCtx<'ctx> { let variant_id = self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path { Some(p) => MirLowerError::UnresolvedName( - hir_display_with_types_map(&**p, &self.body.types) + hir_display_with_store(&**p, self.body) .display(self.db, self.display_target()) .to_string(), ), @@ -850,13 +853,13 @@ impl<'ctx> MirLowerCtx<'ctx> { TyKind::Adt(_, s) => s.clone(), _ => not_supported!("Non ADT record literal"), }; - let variant_data = variant_id.variant_data(self.db.upcast()); + let variant_fields = self.db.variant_fields(variant_id); match variant_id { VariantId::EnumVariantId(_) | VariantId::StructId(_) => { - let mut operands = vec![None; variant_data.fields().len()]; + let mut operands = vec![None; variant_fields.fields().len()]; for RecordLitField { name, expr } in fields.iter() { let field_id = - variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?; + variant_fields.field(name).ok_or(MirLowerError::UnresolvedField)?; let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)? else { return Ok(None); @@ -882,7 +885,7 @@ impl<'ctx> MirLowerCtx<'ctx> { })), &mut self.result.projection_store, ); - Operand::Copy(p) + Operand { kind: OperandKind::Copy(p), span: None } } }) .collect(), @@ -899,7 +902,7 @@ impl<'ctx> MirLowerCtx<'ctx> { not_supported!("Union record literal with more than one field"); }; let local_id = - variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?; + variant_fields.field(name).ok_or(MirLowerError::UnresolvedField)?; let place = place.project( PlaceElem::Field(Either::Left(FieldId { parent: union_id.into(), @@ -914,17 +917,18 @@ impl<'ctx> MirLowerCtx<'ctx> { Expr::Await { .. } => not_supported!("await"), Expr::Yeet { .. } => not_supported!("yeet"), Expr::Async { .. } => not_supported!("async block"), - &Expr::Const(id) => { - let subst = self.placeholder_subst(); - self.lower_const( - id.into(), - current, - place, - subst, - expr_id.into(), - self.expr_ty_without_adjust(expr_id), - )?; - Ok(Some(current)) + &Expr::Const(_) => { + // let subst = self.placeholder_subst(); + // self.lower_const( + // id.into(), + // current, + // place, + // subst, + // expr_id.into(), + // self.expr_ty_without_adjust(expr_id), + // )?; + // Ok(Some(current)) + not_supported!("const block") } Expr::Cast { expr, type_ref: _ } => { let Some((it, current)) = self.lower_expr_to_some_operand(*expr, current)? else { @@ -984,7 +988,12 @@ impl<'ctx> MirLowerCtx<'ctx> { else { return Ok(None); }; - self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into()); + self.push_assignment( + current, + place, + Operand { kind: OperandKind::Copy(p), span: None }.into(), + expr_id.into(), + ); Ok(Some(current)) } Expr::UnaryOp { @@ -1061,8 +1070,11 @@ impl<'ctx> MirLowerCtx<'ctx> { else { return Ok(None); }; - let r_value = - Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place), rhs_op); + let r_value = Rvalue::CheckedBinaryOp( + op.into(), + Operand { kind: OperandKind::Copy(lhs_place), span: None }, + rhs_op, + ); self.push_assignment(current, lhs_place, r_value, expr_id.into()); return Ok(Some(current)); } @@ -1130,7 +1142,7 @@ impl<'ctx> MirLowerCtx<'ctx> { }; self.push_fake_read(current, value, expr_id.into()); let resolver_guard = - self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id); + self.resolver.update_to_inner_scope(self.db, self.owner, expr_id); current = self.pattern_match_assignment(current, value, target)?; self.resolver.reset_to_guard(resolver_guard); Ok(Some(current)) @@ -1165,8 +1177,7 @@ impl<'ctx> MirLowerCtx<'ctx> { Rvalue::Aggregate( AggregateKind::Adt(st.into(), subst.clone()), self.db - .struct_data(st) - .variant_data + .variant_fields(st.into()) .fields() .iter() .map(|it| { @@ -1238,9 +1249,11 @@ impl<'ctx> MirLowerCtx<'ctx> { Rvalue::Ref(*bk, p), capture_spans[0], ); - operands.push(Operand::Move(tmp)); + operands.push(Operand { kind: OperandKind::Move(tmp), span: None }); + } + CaptureKind::ByValue => { + operands.push(Operand { kind: OperandKind::Move(p), span: None }) } - CaptureKind::ByValue => operands.push(Operand::Move(p)), } } self.push_assignment( @@ -1279,7 +1292,7 @@ impl<'ctx> MirLowerCtx<'ctx> { _ => { return Err(MirLowerError::TypeError( "Array expression with non array type", - )) + )); } }; let Some(values) = elements @@ -1311,7 +1324,7 @@ impl<'ctx> MirLowerCtx<'ctx> { _ => { return Err(MirLowerError::TypeError( "Array repeat expression with non array type", - )) + )); } }; let r = Rvalue::Repeat(init, len); @@ -1330,7 +1343,7 @@ impl<'ctx> MirLowerCtx<'ctx> { } fn placeholder_subst(&mut self) -> Substitution { - match self.owner.as_generic_def_id(self.db.upcast()) { + match self.owner.as_generic_def_id(self.db) { Some(it) => TyBuilder::placeholder_subst(self.db, it), None => Substitution::empty(Interner), } @@ -1371,13 +1384,13 @@ impl<'ctx> MirLowerCtx<'ctx> { MirLowerError::unresolved_path( self.db, c, - DisplayTarget::from_crate(db, owner.krate(db.upcast())), - &self.body.types, + DisplayTarget::from_crate(db, owner.krate(db)), + self.body, ) }; let pr = self .resolver - .resolve_path_in_value_ns(self.db.upcast(), c, HygieneId::ROOT) + .resolve_path_in_value_ns(self.db, c, HygieneId::ROOT) .ok_or_else(unresolved_name)?; match pr { ResolveValueResult::ValueNs(v, _) => { @@ -1442,7 +1455,7 @@ impl<'ctx> MirLowerCtx<'ctx> { _ => { return Err(MirLowerError::TypeError( "float with size other than 2, 4, 8 or 16 bytes", - )) + )); } }, }; @@ -1477,12 +1490,12 @@ impl<'ctx> MirLowerCtx<'ctx> { // We can't evaluate constant with substitution now, as generics are not monomorphized in lowering. intern_const_scalar(ConstScalar::UnevaluatedConst(const_id, subst), ty) } else { - let name = const_id.name(self.db.upcast()); + let name = const_id.name(self.db); self.db .const_eval(const_id, subst, None) .map_err(|e| MirLowerError::ConstEvalError(name.into(), Box::new(e)))? }; - Ok(Operand::Constant(c)) + Ok(Operand { kind: OperandKind::Constant(c), span: None }) } fn write_bytes_to_place( @@ -1636,10 +1649,12 @@ impl<'ctx> MirLowerCtx<'ctx> { f: impl FnOnce(&mut MirLowerCtx<'_>, BasicBlockId) -> Result<()>, ) -> Result> { let begin = self.new_basic_block(); - let prev = mem::replace( - &mut self.current_loop_blocks, - Some(LoopBlocks { begin, end: None, place, drop_scope_index: self.drop_scopes.len() }), - ); + let prev = self.current_loop_blocks.replace(LoopBlocks { + begin, + end: None, + place, + drop_scope_index: self.drop_scopes.len(), + }); let prev_label = if let Some(label) = label { // We should generate the end now, to make sure that it wouldn't change later. It is // bad as we may emit end (unnecessary unreachable block) for unterminating loop, but @@ -1708,7 +1723,7 @@ impl<'ctx> MirLowerCtx<'ctx> { } fn is_uninhabited(&self, expr_id: ExprId) -> bool { - is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db.upcast())) + is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db)) } /// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and @@ -1730,8 +1745,8 @@ impl<'ctx> MirLowerCtx<'ctx> { } fn resolve_lang_item(&self, item: LangItem) -> Result { - let crate_id = self.owner.module(self.db.upcast()).krate(); - self.db.lang_item(crate_id, item).ok_or(MirLowerError::LangItemNotFound(item)) + let crate_id = self.owner.module(self.db).krate(); + lang_item(self.db, crate_id, item).ok_or(MirLowerError::LangItemNotFound(item)) } fn lower_block_to_place( @@ -1758,11 +1773,8 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_fake_read(current, init_place, span); // Using the initializer for the resolver scope is good enough for us, as it cannot create new declarations // and has all declarations of the `let`. - let resolver_guard = self.resolver.update_to_inner_scope( - self.db.upcast(), - self.owner, - *expr_id, - ); + let resolver_guard = + self.resolver.update_to_inner_scope(self.db, self.owner, *expr_id); (current, else_block) = self.pattern_match(current, None, init_place, *pat)?; self.resolver.reset_to_guard(resolver_guard); @@ -1906,13 +1918,13 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(r) => Ok(r), Err(e) => { let edition = self.edition(); - let db = self.db.upcast(); + let db = self.db; let loc = variant.lookup(db); let enum_loc = loc.parent.lookup(db); let name = format!( "{}::{}", - enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition), - loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition), + enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition), + loc.id.item_tree(db)[loc.id.value].name.display(db, edition), ); Err(MirLowerError::ConstEvalError(name.into(), Box::new(e))) } @@ -1920,11 +1932,11 @@ impl<'ctx> MirLowerCtx<'ctx> { } fn edition(&self) -> Edition { - self.db.crate_graph()[self.krate()].edition + self.krate().data(self.db).edition } - fn krate(&self) -> CrateId { - self.owner.krate(self.db.upcast()) + fn krate(&self) -> Crate { + self.owner.krate(self.db) } fn display_target(&self) -> DisplayTarget { @@ -2016,9 +2028,9 @@ fn cast_kind(table: &mut InferenceTable<'_>, source_ty: &Ty, target_ty: &Ty) -> pub fn mir_body_for_closure_query( db: &dyn HirDatabase, - closure: ClosureId, + closure: InternedClosureId, ) -> Result> { - let InternedClosure(owner, expr) = db.lookup_intern_closure(closure.into()); + let InternedClosure(owner, expr) = db.lookup_intern_closure(closure); let body = db.body(owner); let infer = db.infer(owner); let Expr::Closure { args, body: root, .. } = &body[expr] else { @@ -2027,7 +2039,7 @@ pub fn mir_body_for_closure_query( let TyKind::Closure(_, substs) = &infer[expr].kind(Interner) else { implementation_error!("closure expression is not closure"); }; - let (captures, kind) = infer.closure_info(&closure); + let (captures, kind) = infer.closure_info(&closure.into()); let mut ctx = MirLowerCtx::new(db, owner, &body, &infer); // 0 is return local ctx.result.locals.alloc(Local { ty: infer[*root].clone() }); @@ -2046,7 +2058,7 @@ pub fn mir_body_for_closure_query( let Some(sig) = ClosureSubst(substs).sig_ty().callable_sig(db) else { implementation_error!("closure has not callable sig"); }; - let resolver_guard = ctx.resolver.update_to_inner_scope(db.upcast(), owner, expr); + let resolver_guard = ctx.resolver.update_to_inner_scope(db, owner, expr); let current = ctx.lower_params_and_bindings( args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())), None, @@ -2120,26 +2132,29 @@ pub fn mir_body_for_closure_query( } pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result> { - let krate = def.krate(db.upcast()); - let edition = db.crate_graph()[krate].edition; + let krate = def.krate(db); + let edition = krate.data(db).edition; let detail = match def { DefWithBodyId::FunctionId(it) => { - db.function_data(it).name.display(db.upcast(), edition).to_string() + db.function_signature(it).name.display(db, edition).to_string() } DefWithBodyId::StaticId(it) => { - db.static_data(it).name.display(db.upcast(), edition).to_string() + db.static_signature(it).name.display(db, edition).to_string() } DefWithBodyId::ConstId(it) => db - .const_data(it) + .const_signature(it) .name .clone() .unwrap_or_else(Name::missing) - .display(db.upcast(), edition) + .display(db, edition) .to_string(), DefWithBodyId::VariantId(it) => { - db.enum_variant_data(it).name.display(db.upcast(), edition).to_string() + let loc = it.lookup(db); + db.enum_variants(loc.parent).variants[loc.index as usize] + .1 + .display(db, edition) + .to_string() } - DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"), }; let _p = tracing::info_span!("mir_body_query", ?detail).entered(); let body = db.body(def); @@ -2149,10 +2164,9 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result Result> { Err(MirLowerError::Loop) } @@ -2174,11 +2188,7 @@ pub fn lower_to_mir( ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) }); let binding_picker = |b: BindingId| { let owner = ctx.body.binding_owners.get(&b).copied(); - if root_expr == body.body_expr { - owner.is_none() - } else { - owner == Some(root_expr) - } + if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) } }; // 1 to param_len is for params // FIXME: replace with let chain once it becomes stable diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs index 420f2aaff46d6..c22bada7a903a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs @@ -1,6 +1,6 @@ //! MIR lowering for places -use crate::mir::MutBorrowKind; +use crate::mir::{MutBorrowKind, Operand, OperandKind}; use super::*; use hir_def::FunctionId; @@ -136,10 +136,9 @@ impl MirLowerCtx<'_> { match &self.body.exprs[expr_id] { Expr::Path(p) => { let resolver_guard = - self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id); + self.resolver.update_to_inner_scope(self.db, self.owner, expr_id); let hygiene = self.body.expr_path_hygiene(expr_id); - let resolved = - self.resolver.resolve_path_in_value_ns_fully(self.db.upcast(), p, hygiene); + let resolved = self.resolver.resolve_path_in_value_ns_fully(self.db, p, hygiene); self.resolver.reset_to_guard(resolver_guard); let Some(pr) = resolved else { return try_rvalue(self); @@ -156,7 +155,7 @@ impl MirLowerCtx<'_> { self.push_assignment( current, temp, - Operand::Static(s).into(), + Operand { kind: OperandKind::Static(s), span: None }.into(), expr_id.into(), ); Ok(Some(( @@ -194,10 +193,10 @@ impl MirLowerCtx<'_> { if let Some(deref_trait) = self.resolve_lang_item(LangItem::DerefMut)?.as_trait() { - if let Some(deref_fn) = - self.db.trait_data(deref_trait).method_by_name( - &Name::new_symbol_root(sym::deref_mut.clone()), - ) + if let Some(deref_fn) = self + .db + .trait_items(deref_trait) + .method_by_name(&Name::new_symbol_root(sym::deref_mut)) { break 'b deref_fn == f; } @@ -306,7 +305,7 @@ impl MirLowerCtx<'_> { ); let Some(current) = self.lower_call( index_fn_op, - Box::new([Operand::Copy(place), index_operand]), + Box::new([Operand { kind: OperandKind::Copy(place), span: None }, index_operand]), result, current, false, @@ -332,14 +331,14 @@ impl MirLowerCtx<'_> { ( Mutability::Not, LangItem::Deref, - Name::new_symbol_root(sym::deref.clone()), + Name::new_symbol_root(sym::deref), BorrowKind::Shared, ) } else { ( Mutability::Mut, LangItem::DerefMut, - Name::new_symbol_root(sym::deref_mut.clone()), + Name::new_symbol_root(sym::deref_mut), BorrowKind::Mut { kind: MutBorrowKind::Default }, ) }; @@ -353,7 +352,7 @@ impl MirLowerCtx<'_> { .ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?; let deref_fn = self .db - .trait_data(deref_trait) + .trait_items(deref_trait) .method_by_name(&trait_method_name) .ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?; let deref_fn_op = Operand::const_zst( @@ -366,7 +365,7 @@ impl MirLowerCtx<'_> { let mut result: Place = self.temp(target_ty_ref, current, span)?.into(); let Some(current) = self.lower_call( deref_fn_op, - Box::new([Operand::Copy(ref_place)]), + Box::new([Operand { kind: OperandKind::Copy(ref_place), span: None }]), result, current, false, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index 783f92b2043f6..b3c1f6f387f22 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -1,19 +1,19 @@ //! MIR lowering for patterns -use hir_def::{hir::ExprId, AssocItemId}; +use hir_def::{AssocItemId, hir::ExprId, signatures::VariantFields}; use crate::{ + BindingMode, mir::{ + LocalId, MutBorrowKind, Operand, OperandKind, lower::{ BasicBlockId, BinOp, BindingId, BorrowKind, Either, Expr, FieldId, Idx, Interner, - MemoryMap, MirLowerCtx, MirLowerError, MirSpan, Mutability, Operand, Pat, PatId, Place, + MemoryMap, MirLowerCtx, MirLowerError, MirSpan, Mutability, Pat, PatId, Place, PlaceElem, ProjectionElem, RecordFieldPat, ResolveValueResult, Result, Rvalue, Substitution, SwitchTargets, TerminatorKind, TupleFieldId, TupleId, TyBuilder, TyKind, - ValueNs, VariantData, VariantId, + ValueNs, VariantId, }, - LocalId, MutBorrowKind, }, - BindingMode, }; macro_rules! not_supported { @@ -139,7 +139,7 @@ impl MirLowerCtx<'_> { _ => { return Err(MirLowerError::TypeError( "non tuple type matched with tuple pattern", - )) + )); } }; self.pattern_match_tuple_like( @@ -217,10 +217,14 @@ impl MirLowerCtx<'_> { self.push_assignment( current, discr, - Rvalue::CheckedBinaryOp(binop, lv, Operand::Copy(cond_place)), + Rvalue::CheckedBinaryOp( + binop, + lv, + Operand { kind: OperandKind::Copy(cond_place), span: None }, + ), pattern.into(), ); - let discr = Operand::Copy(discr); + let discr = Operand { kind: OperandKind::Copy(discr), span: None }; self.set_terminator( current, TerminatorKind::SwitchInt { @@ -262,7 +266,10 @@ impl MirLowerCtx<'_> { self.set_terminator( current, TerminatorKind::SwitchInt { - discr: Operand::Copy(place_len), + discr: Operand { + kind: OperandKind::Copy(place_len), + span: None, + }, targets: SwitchTargets::static_if( pattern_len as u128, next, @@ -282,10 +289,14 @@ impl MirLowerCtx<'_> { self.push_assignment( current, discr, - Rvalue::CheckedBinaryOp(BinOp::Le, c, Operand::Copy(place_len)), + Rvalue::CheckedBinaryOp( + BinOp::Le, + c, + Operand { kind: OperandKind::Copy(place_len), span: None }, + ), pattern.into(), ); - let discr = Operand::Copy(discr); + let discr = Operand { kind: OperandKind::Copy(discr), span: None }; self.set_terminator( current, TerminatorKind::SwitchInt { @@ -350,17 +361,12 @@ impl MirLowerCtx<'_> { )?, None => { let unresolved_name = || { - MirLowerError::unresolved_path( - self.db, - p, - self.display_target(), - &self.body.types, - ) + MirLowerError::unresolved_path(self.db, p, self.display_target(), self.body) }; let hygiene = self.body.pat_path_hygiene(pattern); let pr = self .resolver - .resolve_path_in_value_ns(self.db.upcast(), p, hygiene) + .resolve_path_in_value_ns(self.db, p, hygiene) .ok_or_else(unresolved_name)?; if let ( @@ -412,8 +418,8 @@ impl MirLowerCtx<'_> { tmp2, Rvalue::CheckedBinaryOp( BinOp::Eq, - Operand::Copy(tmp), - Operand::Copy(cond_place), + Operand { kind: OperandKind::Copy(tmp), span: None }, + Operand { kind: OperandKind::Copy(cond_place), span: None }, ), span, ); @@ -422,7 +428,7 @@ impl MirLowerCtx<'_> { self.set_terminator( current, TerminatorKind::SwitchInt { - discr: Operand::Copy(tmp2), + discr: Operand { kind: OperandKind::Copy(tmp2), span: None }, targets: SwitchTargets::static_if(1, next, else_target), }, span, @@ -491,7 +497,7 @@ impl MirLowerCtx<'_> { self.push_assignment( current, lhs_place, - Operand::Copy(cond_place).into(), + Operand { kind: OperandKind::Copy(cond_place), span: None }.into(), expr.into(), ); (current, current_else) @@ -528,7 +534,9 @@ impl MirLowerCtx<'_> { current, target_place.into(), match mode { - BindingMode::Move => Operand::Copy(cond_place).into(), + BindingMode::Move => { + Operand { kind: OperandKind::Copy(cond_place), span: None }.into() + } BindingMode::Ref(Mutability::Not) => Rvalue::Ref(BorrowKind::Shared, cond_place), BindingMode::Ref(Mutability::Mut) => { Rvalue::Ref(BorrowKind::Mut { kind: MutBorrowKind::Default }, cond_place) @@ -552,10 +560,14 @@ impl MirLowerCtx<'_> { self.push_assignment( current, discr, - Rvalue::CheckedBinaryOp(BinOp::Eq, c, Operand::Copy(cond_place)), + Rvalue::CheckedBinaryOp( + BinOp::Eq, + c, + Operand { kind: OperandKind::Copy(cond_place), span: None }, + ), pattern.into(), ); - let discr = Operand::Copy(discr); + let discr = Operand { kind: OperandKind::Copy(discr), span: None }; self.set_terminator( current, TerminatorKind::SwitchInt { @@ -588,7 +600,7 @@ impl MirLowerCtx<'_> { self.set_terminator( current, TerminatorKind::SwitchInt { - discr: Operand::Copy(tmp), + discr: Operand { kind: OperandKind::Copy(tmp), span: None }, targets: SwitchTargets::static_if(e, next, *else_target), }, span, @@ -597,7 +609,7 @@ impl MirLowerCtx<'_> { } self.pattern_matching_variant_fields( shape, - &self.db.enum_variant_data(v).variant_data, + &self.db.variant_fields(v.into()), variant, current, current_else, @@ -607,7 +619,7 @@ impl MirLowerCtx<'_> { } VariantId::StructId(s) => self.pattern_matching_variant_fields( shape, - &self.db.struct_data(s).variant_data, + &self.db.variant_fields(s.into()), variant, current, current_else, @@ -615,7 +627,7 @@ impl MirLowerCtx<'_> { mode, )?, VariantId::UnionId(_) => { - return Err(MirLowerError::TypeError("pattern matching on union")) + return Err(MirLowerError::TypeError("pattern matching on union")); } }) } @@ -623,7 +635,7 @@ impl MirLowerCtx<'_> { fn pattern_matching_variant_fields( &mut self, shape: AdtPatternShape<'_>, - variant_data: &VariantData, + variant_data: &VariantFields, v: VariantId, current: BasicBlockId, current_else: Option, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs index 92132fa047362..d8f443145ca06 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs @@ -9,24 +9,23 @@ use std::mem; -use base_db::ra_salsa::Cycle; use chalk_ir::{ - fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable}, ConstData, DebruijnIndex, + fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable}, }; use hir_def::DefWithBodyId; use triomphe::Arc; use crate::{ + Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind, consteval::{intern_const_scalar, unknown_const}, - db::{HirDatabase, InternedClosure}, + db::{HirDatabase, InternedClosure, InternedClosureId}, from_placeholder_idx, - generics::{generics, Generics}, + generics::{Generics, generics}, infer::normalize, - ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind, }; -use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind}; +use super::{MirBody, MirLowerError, Operand, OperandKind, Rvalue, StatementKind, TerminatorKind}; macro_rules! not_supported { ($it: expr) => { @@ -78,7 +77,7 @@ impl FallibleTypeFolder for Filler<'_> { owner: self.owner, trait_env: self.trait_env.clone(), subst: &subst, - generics: Some(generics(self.db.upcast(), func.into())), + generics: Some(generics(self.db, func.into())), }; filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder) } @@ -171,8 +170,8 @@ impl Filler<'_> { } fn fill_operand(&mut self, op: &mut Operand) -> Result<(), MirLowerError> { - match op { - Operand::Constant(c) => { + match &mut op.kind { + OperandKind::Constant(c) => { match &c.data(Interner).value { chalk_ir::ConstValue::BoundVar(b) => { let resolved = self @@ -216,7 +215,7 @@ impl Filler<'_> { } self.fill_const(c)?; } - Operand::Copy(_) | Operand::Move(_) | Operand::Static(_) => (), + OperandKind::Copy(_) | OperandKind::Move(_) | OperandKind::Static(_) => (), } Ok(()) } @@ -306,7 +305,7 @@ pub fn monomorphized_mir_body_query( subst: Substitution, trait_env: Arc, ) -> Result, MirLowerError> { - let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def)); + let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def)); let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; let body = db.mir_body(owner)?; let mut body = (*body).clone(); @@ -314,24 +313,23 @@ pub fn monomorphized_mir_body_query( Ok(Arc::new(body)) } -pub fn monomorphized_mir_body_recover( - _: &dyn HirDatabase, - _: &Cycle, - _: &DefWithBodyId, - _: &Substitution, - _: &Arc, +pub(crate) fn monomorphized_mir_body_cycle_result( + _db: &dyn HirDatabase, + _: DefWithBodyId, + _: Substitution, + _: Arc, ) -> Result, MirLowerError> { Err(MirLowerError::Loop) } pub fn monomorphized_mir_body_for_closure_query( db: &dyn HirDatabase, - closure: ClosureId, + closure: InternedClosureId, subst: Substitution, trait_env: Arc, ) -> Result, MirLowerError> { - let InternedClosure(owner, _) = db.lookup_intern_closure(closure.into()); - let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def)); + let InternedClosure(owner, _) = db.lookup_intern_closure(closure); + let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def)); let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; let body = db.mir_body_for_closure(closure)?; let mut body = (*body).clone(); @@ -347,7 +345,7 @@ pub fn monomorphize_mir_body_bad( trait_env: Arc, ) -> Result { let owner = body.owner; - let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def)); + let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def)); let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; filler.fill_body(&mut body)?; Ok(body) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs index 7d7d4106cb955..7ae6e907e7adb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs @@ -7,19 +7,19 @@ use std::{ use either::Either; use hir_def::{expr_store::Body, hir::BindingId}; -use hir_expand::{name::Name, Lookup}; +use hir_expand::{Lookup, name::Name}; use la_arena::ArenaMap; use crate::{ + ClosureId, db::HirDatabase, display::{ClosureStyle, DisplayTarget, HirDisplay}, mir::{PlaceElem, ProjectionElem, StatementKind, TerminatorKind}, - ClosureId, }; use super::{ - AggregateKind, BasicBlockId, BorrowKind, LocalId, MirBody, MutBorrowKind, Operand, Place, - Rvalue, UnOp, + AggregateKind, BasicBlockId, BorrowKind, LocalId, MirBody, MutBorrowKind, Operand, OperandKind, + Place, Rvalue, UnOp, }; macro_rules! w { @@ -43,45 +43,38 @@ impl MirBody { let mut ctx = MirPrettyCtx::new(self, &hir_body, db, display_target); ctx.for_body(|this| match ctx.body.owner { hir_def::DefWithBodyId::FunctionId(id) => { - let data = db.function_data(id); - w!(this, "fn {}() ", data.name.display(db.upcast(), this.display_target.edition)); + let data = db.function_signature(id); + w!(this, "fn {}() ", data.name.display(db, this.display_target.edition)); } hir_def::DefWithBodyId::StaticId(id) => { - let data = db.static_data(id); - w!( - this, - "static {}: _ = ", - data.name.display(db.upcast(), this.display_target.edition) - ); + let data = db.static_signature(id); + w!(this, "static {}: _ = ", data.name.display(db, this.display_target.edition)); } hir_def::DefWithBodyId::ConstId(id) => { - let data = db.const_data(id); + let data = db.const_signature(id); w!( this, "const {}: _ = ", data.name .as_ref() .unwrap_or(&Name::missing()) - .display(db.upcast(), this.display_target.edition) + .display(db, this.display_target.edition) ); } hir_def::DefWithBodyId::VariantId(id) => { - let loc = id.lookup(db.upcast()); - let enum_loc = loc.parent.lookup(db.upcast()); + let loc = id.lookup(db); + let enum_loc = loc.parent.lookup(db); w!( this, "enum {}::{} = ", - enum_loc.id.item_tree(db.upcast())[enum_loc.id.value] + enum_loc.id.item_tree(db)[enum_loc.id.value] .name - .display(db.upcast(), this.display_target.edition), - loc.id.item_tree(db.upcast())[loc.id.value] + .display(db, this.display_target.edition), + loc.id.item_tree(db)[loc.id.value] .name - .display(db.upcast(), this.display_target.edition), + .display(db, this.display_target.edition), ) } - hir_def::DefWithBodyId::InTypeConstId(id) => { - w!(this, "in type const {id:?} = "); - } }); ctx.result } @@ -134,7 +127,7 @@ impl HirDisplay for LocalName { match self { LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())), LocalName::Binding(n, l) => { - write!(f, "{}_{}", n.display(f.db.upcast(), f.edition()), u32::from(l.into_raw())) + write!(f, "{}_{}", n.display(f.db, f.edition()), u32::from(l.into_raw())) } } } @@ -154,7 +147,7 @@ impl<'a> MirPrettyCtx<'a> { } fn for_closure(&mut self, closure: ClosureId) { - let body = match self.db.mir_body_for_closure(closure) { + let body = match self.db.mir_body_for_closure(closure.into()) { Ok(it) => it, Err(e) => { wln!(self, "// error in {closure:?}: {e:?}"); @@ -333,27 +326,25 @@ impl<'a> MirPrettyCtx<'a> { w!(this, ")"); } ProjectionElem::Field(Either::Left(field)) => { - let variant_data = field.parent.variant_data(this.db.upcast()); - let name = &variant_data.fields()[field.local_id].name; + let variant_fields = this.db.variant_fields(field.parent); + let name = &variant_fields.fields()[field.local_id].name; match field.parent { hir_def::VariantId::EnumVariantId(e) => { w!(this, "("); f(this, local, head); - let variant_name = &this.db.enum_variant_data(e).name; + let loc = e.lookup(this.db); w!( this, " as {}).{}", - variant_name.display(this.db.upcast(), this.display_target.edition), - name.display(this.db.upcast(), this.display_target.edition) + this.db.enum_variants(loc.parent).variants[loc.index as usize] + .1 + .display(this.db, this.display_target.edition), + name.display(this.db, this.display_target.edition) ); } hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => { f(this, local, head); - w!( - this, - ".{}", - name.display(this.db.upcast(), this.display_target.edition) - ); + w!(this, ".{}", name.display(this.db, this.display_target.edition)); } } } @@ -383,14 +374,14 @@ impl<'a> MirPrettyCtx<'a> { } fn operand(&mut self, r: &Operand) { - match r { - Operand::Copy(p) | Operand::Move(p) => { + match &r.kind { + OperandKind::Copy(p) | OperandKind::Move(p) => { // MIR at the time of writing doesn't have difference between move and copy, so we show them // equally. Feel free to change it. self.place(p); } - Operand::Constant(c) => w!(self, "Const({})", self.hir_display(c)), - Operand::Static(s) => w!(self, "Static({:?})", s), + OperandKind::Constant(c) => w!(self, "Const({})", self.hir_display(c)), + OperandKind::Static(s) => w!(self, "Static({:?})", s), } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs index fe9416c6cfc69..9d1238701bcfa 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs @@ -4,7 +4,7 @@ use std::sync::LazyLock; use hir_def::attr::Attrs; use hir_def::tt; -use intern::{sym, Symbol}; +use intern::{Symbol, sym}; use rustc_hash::{FxHashMap, FxHashSet}; #[derive(Debug, Default)] @@ -36,17 +36,19 @@ impl TargetFeatures { /// Retrieves the target features from the attributes, and does not expand the target features implied by them. pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self { let enabled = attrs - .by_key(&sym::target_feature) + .by_key(sym::target_feature) .tt_values() - .filter_map(|tt| { - match tt.token_trees().flat_tokens() { - [ - tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)), - tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })), - tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { kind: tt::LitKind::Str, symbol: features, .. })), - ] if enable_ident.sym == sym::enable => Some(features), - _ => None, - } + .filter_map(|tt| match tt.token_trees().flat_tokens() { + [ + tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)), + tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })), + tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + kind: tt::LitKind::Str, + symbol: features, + .. + })), + ] if enable_ident.sym == sym::enable => Some(features), + _ => None, }) .flat_map(|features| features.as_str().split(',').map(Symbol::intern)) .collect(); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs index f37dd91d8e90f..bcd8aa6c4e956 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs @@ -3,35 +3,40 @@ use std::{fmt, panic, sync::Mutex}; use base_db::{ - ra_salsa::{self, Durability}, - AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, + CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, RootQueryDb, SourceDatabase, + SourceRoot, SourceRootId, SourceRootInput, }; -use hir_def::{db::DefDatabase, ModuleId}; -use hir_expand::db::ExpandDatabase; + +use hir_def::{ModuleId, db::DefDatabase}; +use hir_expand::EditionedFileId; use rustc_hash::FxHashMap; -use span::{EditionedFileId, FileId}; +use salsa::{AsDynDatabase, Durability}; +use span::FileId; use syntax::TextRange; use test_utils::extract_annotations; use triomphe::Arc; -#[ra_salsa::database( - base_db::SourceRootDatabaseStorage, - base_db::SourceDatabaseStorage, - hir_expand::db::ExpandDatabaseStorage, - hir_def::db::InternDatabaseStorage, - hir_def::db::DefDatabaseStorage, - crate::db::HirDatabaseStorage -)] +#[salsa_macros::db] +#[derive(Clone)] pub(crate) struct TestDB { - storage: ra_salsa::Storage, - events: Mutex>>, + storage: salsa::Storage, + files: Arc, + crates_map: Arc, + events: Arc>>>, } impl Default for TestDB { fn default() -> Self { - let mut this = Self { storage: Default::default(), events: Default::default() }; - this.setup_syntax_context_root(); + let mut this = Self { + storage: Default::default(), + events: Default::default(), + files: Default::default(), + crates_map: Default::default(), + }; this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); + // This needs to be here otherwise `CrateGraphBuilder` panics. + this.set_all_crates(Arc::new(Box::new([]))); + CrateGraphBuilder::default().set_in_db(&mut this); this } } @@ -42,54 +47,80 @@ impl fmt::Debug for TestDB { } } -impl Upcast for TestDB { - fn upcast(&self) -> &(dyn ExpandDatabase + 'static) { - self +#[salsa_macros::db] +impl SourceDatabase for TestDB { + fn file_text(&self, file_id: base_db::FileId) -> FileText { + self.files.file_text(file_id) } -} -impl Upcast for TestDB { - fn upcast(&self) -> &(dyn DefDatabase + 'static) { - self + fn set_file_text(&mut self, file_id: base_db::FileId, text: &str) { + let files = Arc::clone(&self.files); + files.set_file_text(self, file_id, text); } -} -impl ra_salsa::Database for TestDB { - fn salsa_event(&self, event: ra_salsa::Event) { - let mut events = self.events.lock().unwrap(); - if let Some(events) = &mut *events { - events.push(event); - } + fn set_file_text_with_durability( + &mut self, + file_id: base_db::FileId, + text: &str, + durability: Durability, + ) { + let files = Arc::clone(&self.files); + files.set_file_text_with_durability(self, file_id, text, durability); } -} -impl ra_salsa::ParallelDatabase for TestDB { - fn snapshot(&self) -> ra_salsa::Snapshot { - ra_salsa::Snapshot::new(TestDB { - storage: self.storage.snapshot(), - events: Default::default(), - }) + /// Source root of the file. + fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput { + self.files.source_root(source_root_id) } -} -impl panic::RefUnwindSafe for TestDB {} + fn set_source_root_with_durability( + &mut self, + source_root_id: SourceRootId, + source_root: Arc, + durability: Durability, + ) { + let files = Arc::clone(&self.files); + files.set_source_root_with_durability(self, source_root_id, source_root, durability); + } -impl FileLoader for TestDB { - fn resolve_path(&self, path: AnchoredPath<'_>) -> Option { - FileLoaderDelegate(self).resolve_path(path) + fn file_source_root(&self, id: base_db::FileId) -> FileSourceRootInput { + self.files.file_source_root(id) } - fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> { - FileLoaderDelegate(self).relevant_crates(file_id) + + fn set_file_source_root_with_durability( + &mut self, + id: base_db::FileId, + source_root_id: SourceRootId, + durability: Durability, + ) { + let files = Arc::clone(&self.files); + files.set_file_source_root_with_durability(self, id, source_root_id, durability); + } + + fn crates_map(&self) -> Arc { + self.crates_map.clone() } } +#[salsa_macros::db] +impl salsa::Database for TestDB { + fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) { + let mut events = self.events.lock().unwrap(); + if let Some(events) = &mut *events { + events.push(event()); + } + } +} + +impl panic::RefUnwindSafe for TestDB {} + impl TestDB { pub(crate) fn module_for_file_opt(&self, file_id: impl Into) -> Option { let file_id = file_id.into(); for &krate in self.relevant_crates(file_id).iter() { let crate_def_map = self.crate_def_map(krate); for (local_id, data) in crate_def_map.modules() { - if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) { + if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) { return Some(crate_def_map.module_id(local_id)); } } @@ -105,8 +136,7 @@ impl TestDB { &self, ) -> FxHashMap> { let mut files = Vec::new(); - let crate_graph = self.crate_graph(); - for krate in crate_graph.iter() { + for &krate in self.all_crates().iter() { let crate_def_map = self.crate_def_map(krate); for (module_id, _) in crate_def_map.modules() { let file_id = crate_def_map[module_id].origin.file_id(); @@ -116,8 +146,8 @@ impl TestDB { files .into_iter() .filter_map(|file_id| { - let text = self.file_text(file_id.file_id()); - let annotations = extract_annotations(&text); + let text = self.file_text(file_id.file_id(self)); + let annotations = extract_annotations(&text.text(self)); if annotations.is_empty() { return None; } @@ -128,7 +158,7 @@ impl TestDB { } impl TestDB { - pub(crate) fn log(&self, f: impl FnOnce()) -> Vec { + pub(crate) fn log(&self, f: impl FnOnce()) -> Vec { *self.events.lock().unwrap() = Some(Vec::new()); f(); self.events.lock().unwrap().take().unwrap() @@ -141,8 +171,11 @@ impl TestDB { .filter_map(|e| match e.kind { // This is pretty horrible, but `Debug` is the only way to inspect // QueryDescriptor at the moment. - ra_salsa::EventKind::WillExecute { database_key } => { - Some(format!("{:?}", database_key.debug(self))) + salsa::EventKind::WillExecute { database_key } => { + let ingredient = self + .as_dyn_database() + .ingredient_debug_name(database_key.ingredient_index()); + Some(ingredient.to_string()) } _ => None, }) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index 81e38be2285ab..cc37f65c26c21 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -15,36 +15,36 @@ mod type_alias_impl_traits; use std::env; use std::sync::LazyLock; -use base_db::{CrateId, SourceDatabaseFileInputExt as _}; +use base_db::{Crate, SourceDatabase}; use expect_test::Expect; use hir_def::{ + AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId, SyntheticSyntax, db::DefDatabase, expr_store::{Body, BodySourceMap}, hir::{ExprId, Pat, PatId}, item_scope::ItemScope, nameres::DefMap, src::HasSource, - AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId, SyntheticSyntax, }; -use hir_expand::{db::ExpandDatabase, FileRange, InFile}; +use hir_expand::{FileRange, InFile, db::ExpandDatabase}; use itertools::Itertools; use rustc_hash::FxHashMap; use stdx::format_to; use syntax::{ - ast::{self, AstNode, HasName}, SyntaxNode, + ast::{self, AstNode, HasName}, }; use test_fixture::WithFixture; -use tracing_subscriber::{layer::SubscriberExt, Registry}; +use tracing_subscriber::{Registry, layer::SubscriberExt}; use tracing_tree::HierarchicalLayer; use triomphe::Arc; use crate::{ + InferenceResult, Ty, db::HirDatabase, display::{DisplayTarget, HirDisplay}, infer::{Adjustment, TypeMismatch}, test_db::TestDB, - InferenceResult, Ty, }; // These tests compare the inference results for all expressions in a file @@ -124,9 +124,9 @@ fn check_impl( } assert!(had_annotations || allow_none, "no `//^` annotations found"); - let mut defs: Vec<(DefWithBodyId, CrateId)> = Vec::new(); + let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new(); for file_id in files { - let module = db.module_for_file_opt(file_id); + let module = db.module_for_file_opt(file_id.file_id(&db)); let module = match module { Some(m) => m, None => continue, @@ -160,7 +160,6 @@ fn check_impl( let loc = it.lookup(&db); loc.source(&db).value.syntax().text_range().start() } - DefWithBodyId::InTypeConstId(it) => it.source(&db).syntax().text_range().start(), }); let mut unexpected_type_mismatches = String::new(); for (def, krate) in defs { @@ -302,7 +301,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { let mut infer_def = |inference_result: Arc, body: Arc, body_source_map: Arc, - krate: CrateId| { + krate: Crate| { let display_target = DisplayTarget::from_crate(&db, krate); let mut types: Vec<(InFile, &Ty)> = Vec::new(); let mut mismatches: Vec<(InFile, &TypeMismatch)> = Vec::new(); @@ -388,10 +387,10 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { } }; - let module = db.module_for_file(file_id); + let module = db.module_for_file(file_id.file_id(&db)); let def_map = module.def_map(&db); - let mut defs: Vec<(DefWithBodyId, CrateId)> = Vec::new(); + let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new(); visit_module(&db, &def_map, module.local_id, &mut |it| { let def = match it { ModuleDefId::FunctionId(it) => it.into(), @@ -419,7 +418,6 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { let loc = it.lookup(&db); loc.source(&db).value.syntax().text_range().start() } - DefWithBodyId::InTypeConstId(it) => it.source(&db).syntax().text_range().start(), }); for (def, krate) in defs { let (body, source_map) = db.body_with_source_map(def); @@ -439,7 +437,7 @@ pub(crate) fn visit_module( ) { visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb); for impl_id in crate_def_map[module_id].scope.impls() { - let impl_data = db.impl_data(impl_id); + let impl_data = db.impl_items(impl_id); for &(_, item) in impl_data.items.iter() { match item { AssocItemId::FunctionId(it) => { @@ -481,14 +479,14 @@ pub(crate) fn visit_module( visit_body(db, &body, cb); } ModuleDefId::AdtId(hir_def::AdtId::EnumId(it)) => { - db.enum_data(it).variants.iter().for_each(|&(it, _)| { + db.enum_variants(it).variants.iter().for_each(|&(it, _)| { let body = db.body(it.into()); cb(it.into()); visit_body(db, &body, cb); }); } ModuleDefId::TraitId(it) => { - let trait_data = db.trait_data(it); + let trait_data = db.trait_items(it); for &(_, item) in trait_data.items.iter() { match item { AssocItemId::FunctionId(it) => cb(it.into()), @@ -570,7 +568,7 @@ fn salsa_bug() { ", ); - let module = db.module_for_file(pos.file_id); + let module = db.module_for_file(pos.file_id.file_id(&db)); let crate_def_map = module.def_map(&db); visit_module(&db, &crate_def_map, module.local_id, &mut |def| { db.infer(match def { @@ -607,9 +605,9 @@ fn salsa_bug() { } "; - db.set_file_text(pos.file_id.file_id(), new_text); + db.set_file_text(pos.file_id.file_id(&db), new_text); - let module = db.module_for_file(pos.file_id); + let module = db.module_for_file(pos.file_id.file_id(&db)); let crate_def_map = module.def_map(&db); visit_module(&db, &crate_def_map, module.local_id, &mut |def| { db.infer(match def { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs index 6f7bfc4ea7a00..73f1ae56457d6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs @@ -1,9 +1,9 @@ -use base_db::ra_salsa::InternKey; -use expect_test::{expect, Expect}; +use expect_test::{Expect, expect}; use hir_def::db::DefDatabase; -use hir_expand::files::InFileWrapper; +use hir_expand::{HirFileId, files::InFileWrapper}; use itertools::Itertools; -use span::{HirFileId, TextRange}; +use salsa::plumbing::FromId; +use span::TextRange; use syntax::{AstNode, AstPtr}; use test_fixture::WithFixture; @@ -16,7 +16,7 @@ use super::visit_module; fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (db, file_id) = TestDB::with_single_file(ra_fixture); - let module = db.module_for_file(file_id); + let module = db.module_for_file(file_id.file_id(&db)); let def_map = module.def_map(&db); let mut defs = Vec::new(); @@ -34,8 +34,8 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec let infer = db.infer(def); let db = &db; captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| { - let closure = db.lookup_intern_closure(InternedClosureId::from_intern_id(closure_id.0)); - let (_, source_map) = db.body_with_source_map(closure.0); + let closure = db.lookup_intern_closure(InternedClosureId::from_id(closure_id.0)); + let source_map = db.body_with_source_map(closure.0).1; let closure_text_range = source_map .expr_syntax(closure.1) .expect("failed to map closure to SyntaxNode") @@ -384,7 +384,9 @@ fn main() { }; } "#, - expect!["57..149;20..25;78..80,98..100,118..124,134..135 ByRef(Mut { kind: Default }) a &'? mut bool"], + expect![ + "57..149;20..25;78..80,98..100,118..124,134..135 ByRef(Mut { kind: Default }) a &'? mut bool" + ], ); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs index 7e7c1f835c787..ddc5b715194df 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs @@ -22,9 +22,9 @@ struct S { a: T } fn f(_: &[T]) -> T { loop {} } fn g(_: S<&[T]>) -> T { loop {} } -fn gen() -> *mut [T; 2] { loop {} } +fn generate() -> *mut [T; 2] { loop {} } fn test1() -> *mut [U] { - gen() + generate() } fn test2() { @@ -561,7 +561,7 @@ trait Foo {} fn test(f: impl Foo, g: &(impl Foo + ?Sized)) { let _: &dyn Foo = &f; let _: &dyn Foo = g; - //^ expected &'? dyn Foo, got &'? impl Foo + ?Sized + //^ expected &'? (dyn Foo + 'static), got &'? impl Foo + ?Sized } "#, ); @@ -827,11 +827,11 @@ struct V { t: T } fn main() { let a: V<&dyn Tr>; (a,) = V { t: &S }; - //^^^^expected V<&'? S>, got (V<&'? dyn Tr>,) + //^^^^expected V<&'? S>, got (V<&'? (dyn Tr + '?)>,) let mut a: V<&dyn Tr> = V { t: &S }; (a,) = V { t: &S }; - //^^^^expected V<&'? S>, got (V<&'? dyn Tr>,) + //^^^^expected V<&'? S>, got (V<&'? (dyn Tr + '?)>,) } "#, ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs index 60c03b52246c4..a986b54a7b064 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs @@ -65,13 +65,13 @@ trait A { } trait B: A {} -fn test( +fn test<'a>( _: &(dyn A + Send), - //^ &'_ (dyn A + Send) - _: &(dyn Send + A), - //^ &'_ (dyn A + Send) + //^ &(dyn A + Send + 'static) + _: &'a (dyn Send + A), + //^ &'a (dyn A + Send + 'static) _: &dyn B, - //^ &'_ (dyn B) + //^ &(dyn B + 'static) ) {} "#, ); @@ -85,7 +85,7 @@ fn render_dyn_for_ty() { trait Foo<'a> {} fn foo(foo: &dyn for<'a> Foo<'a>) {} - // ^^^ &'_ dyn Foo<'_> + // ^^^ &(dyn Foo<'?> + 'static) "#, ); } @@ -111,11 +111,11 @@ fn test( b; //^ impl Foo c; - //^ &'_ impl Foo + ?Sized + //^ &impl Foo + ?Sized d; //^ S ref_any; - //^^^^^^^ &'_ impl ?Sized + //^^^^^^^ &impl ?Sized empty; } //^^^^^ impl Sized "#, @@ -192,7 +192,7 @@ fn test( b; //^ fn(impl Foo) -> impl Foo c; -} //^ fn(&'_ impl Foo + ?Sized) -> &'_ impl Foo + ?Sized +} //^ fn(&impl Foo + ?Sized) -> &impl Foo + ?Sized "#, ); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs index 3757d722ac83b..0542be0ba896d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs @@ -1,4 +1,4 @@ -use base_db::SourceDatabaseFileInputExt as _; +use base_db::SourceDatabase; use hir_def::ModuleDefId; use test_fixture::WithFixture; @@ -17,7 +17,7 @@ fn foo() -> i32 { ); { let events = db.log_executed(|| { - let module = db.module_for_file(pos.file_id.file_id()); + let module = db.module_for_file(pos.file_id.file_id(&db)); let crate_def_map = module.def_map(&db); visit_module(&db, &crate_def_map, module.local_id, &mut |def| { if let ModuleDefId::FunctionId(it) = def { @@ -25,7 +25,7 @@ fn foo() -> i32 { } }); }); - assert!(format!("{events:?}").contains("infer")) + assert!(format!("{events:?}").contains("infer_shim")) } let new_text = " @@ -35,11 +35,11 @@ fn foo() -> i32 { 1 }"; - db.set_file_text(pos.file_id.file_id(), new_text); + db.set_file_text(pos.file_id.file_id(&db), new_text); { let events = db.log_executed(|| { - let module = db.module_for_file(pos.file_id.file_id()); + let module = db.module_for_file(pos.file_id.file_id(&db)); let crate_def_map = module.def_map(&db); visit_module(&db, &crate_def_map, module.local_id, &mut |def| { if let ModuleDefId::FunctionId(it) = def { @@ -47,7 +47,7 @@ fn foo() -> i32 { } }); }); - assert!(!format!("{events:?}").contains("infer"), "{events:#?}") + assert!(!format!("{events:?}").contains("infer_shim"), "{events:#?}") } } @@ -68,7 +68,7 @@ fn baz() -> i32 { ); { let events = db.log_executed(|| { - let module = db.module_for_file(pos.file_id.file_id()); + let module = db.module_for_file(pos.file_id.file_id(&db)); let crate_def_map = module.def_map(&db); visit_module(&db, &crate_def_map, module.local_id, &mut |def| { if let ModuleDefId::FunctionId(it) = def { @@ -76,7 +76,7 @@ fn baz() -> i32 { } }); }); - assert!(format!("{events:?}").contains("infer")) + assert!(format!("{events:?}").contains("infer_shim")) } let new_text = " @@ -91,11 +91,11 @@ fn baz() -> i32 { } "; - db.set_file_text(pos.file_id.file_id(), new_text); + db.set_file_text(pos.file_id.file_id(&db), new_text); { let events = db.log_executed(|| { - let module = db.module_for_file(pos.file_id.file_id()); + let module = db.module_for_file(pos.file_id.file_id(&db)); let crate_def_map = module.def_map(&db); visit_module(&db, &crate_def_map, module.local_id, &mut |def| { if let ModuleDefId::FunctionId(it) = def { @@ -103,6 +103,6 @@ fn baz() -> i32 { } }); }); - assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}") + assert_eq!(format!("{events:?}").matches("infer_shim").count(), 1, "{events:#?}") } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs index 3a258ecad10a6..94826acca305f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs @@ -1153,9 +1153,9 @@ fn dyn_trait_super_trait_not_in_scope() { 51..55 'self': &'? Self 64..69 '{ 0 }': u32 66..67 '0': u32 - 176..177 'd': &'? dyn Trait + 176..177 'd': &'? (dyn Trait + 'static) 191..207 '{ ...o(); }': () - 197..198 'd': &'? dyn Trait + 197..198 'd': &'? (dyn Trait + 'static) 197..204 'd.foo()': u32 "#]], ); @@ -2019,10 +2019,10 @@ impl dyn Error + Send { /// Attempts to downcast the box to a concrete type. pub fn downcast(self: Box) -> Result, Box> { let err: Box = self; - // ^^^^ expected Box, got Box + // ^^^^ expected Box, got Box // FIXME, type mismatch should not occur ::downcast(err).map_err(|_| loop {}) - //^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box) -> Result, Box> + //^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box) -> Result, Box> } } "#, @@ -2170,3 +2170,26 @@ fn main() { "#, ); } + +#[test] +fn mut_to_const_pointer() { + check( + r#" +pub trait X { + fn perform(self) -> u64; +} + +impl X for *const u8 { + fn perform(self) -> u64 { + 42 + } +} + +fn test(x: *mut u8) { + let _v = x.perform(); + // ^ adjustments: Pointer(MutToConstPointer) + // ^^^^^^^^^^^ type: u64 +} +"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index c4822a90f9e7d..47c695c69748d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -629,7 +629,7 @@ fn issue_4053_diesel_where_clauses() { 488..522 '{ ... }': () 498..502 'self': SelectStatement 498..508 'self.order': O - 498..515 'self.o...into()': dyn QueryFragment + 498..515 'self.o...into()': dyn QueryFragment + 'static "#]], ); } @@ -773,7 +773,7 @@ fn issue_4800() { "#, expect![[r#" 379..383 'self': &'? mut PeerSet - 401..424 '{ ... }': dyn Future + 401..424 '{ ... }': dyn Future + 'static 411..418 'loop {}': ! 416..418 '{}': () 575..579 'self': &'? mut Self @@ -1584,23 +1584,6 @@ type Member = ConstGen; ); } -#[test] -fn cfgd_out_self_param() { - cov_mark::check!(cfgd_out_self_param); - check_no_mismatches( - r#" -struct S; -impl S { - fn f(#[cfg(never)] &self) {} -} - -fn f(s: S) { - s.f(); -} -"#, - ); -} - #[test] fn tuple_struct_pattern_with_unmatched_args_crash() { check_infer( @@ -2295,3 +2278,26 @@ fn test(x: bool) { "#]], ); } + +#[test] +fn issue_19730() { + check_infer( + r#" +trait Trait {} + +trait Foo { + type Bar: Trait; + + fn foo(bar: Self::Bar) { + let _ = bar; + } +} +"#, + expect![[r#" + 83..86 'bar': Foo::Bar + 105..133 '{ ... }': () + 119..120 '_': Foo::Bar + 123..126 'bar': Foo::Bar + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs index 4c5cca21655d0..eeebe38f1826d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs @@ -1784,6 +1784,8 @@ impl Foo for u8 { } #[test] +// FIXME +#[should_panic] fn const_eval_in_function_signature() { check_types( r#" @@ -2739,11 +2741,11 @@ impl B for Astruct {} 715..744 '#[rust...1i32])': Box<[i32; 1], Global> 737..743 '[1i32]': [i32; 1] 738..742 '1i32': i32 - 755..756 'v': Vec, Global> - 776..793 '<[_]> ...to_vec': fn into_vec, Global>(Box<[Box], Global>) -> Vec, Global> - 776..850 '<[_]> ...ct)]))': Vec, Global> - 794..849 '#[rust...uct)])': Box<[Box; 1], Global> - 816..848 '[#[rus...ruct)]': [Box; 1] + 755..756 'v': Vec, Global> + 776..793 '<[_]> ...to_vec': fn into_vec, Global>(Box<[Box], Global>) -> Vec, Global> + 776..850 '<[_]> ...ct)]))': Vec, Global> + 794..849 '#[rust...uct)])': Box<[Box; 1], Global> + 816..848 '[#[rus...ruct)]': [Box; 1] 817..847 '#[rust...truct)': Box 839..846 'Astruct': Astruct "#]], diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs index dda7bfb2baf9a..14137605c9f21 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs @@ -1475,26 +1475,26 @@ fn test(x: Box>, y: &dyn Trait) { expect![[r#" 29..33 'self': &'? Self 54..58 'self': &'? Self - 198..200 '{}': Box> - 210..211 'x': Box> - 234..235 'y': &'? dyn Trait + 198..200 '{}': Box + 'static> + 210..211 'x': Box + 'static> + 234..235 'y': &'? (dyn Trait + 'static) 254..371 '{ ...2(); }': () - 260..261 'x': Box> - 267..268 'y': &'? dyn Trait - 278..279 'z': Box> - 282..285 'bar': fn bar() -> Box> - 282..287 'bar()': Box> - 293..294 'x': Box> + 260..261 'x': Box + 'static> + 267..268 'y': &'? (dyn Trait + 'static) + 278..279 'z': Box + 'static> + 282..285 'bar': fn bar() -> Box + 'static> + 282..287 'bar()': Box + 'static> + 293..294 'x': Box + 'static> 293..300 'x.foo()': u64 - 306..307 'y': &'? dyn Trait + 306..307 'y': &'? (dyn Trait + 'static) 306..313 'y.foo()': u64 - 319..320 'z': Box> + 319..320 'z': Box + 'static> 319..326 'z.foo()': u64 - 332..333 'x': Box> + 332..333 'x': Box + 'static> 332..340 'x.foo2()': i64 - 346..347 'y': &'? dyn Trait + 346..347 'y': &'? (dyn Trait + 'static) 346..354 'y.foo2()': i64 - 360..361 'z': Box> + 360..361 'z': Box + 'static> 360..368 'z.foo2()': i64 "#]], ); @@ -1523,14 +1523,14 @@ fn test(s: S) { expect![[r#" 32..36 'self': &'? Self 102..106 'self': &'? S - 128..139 '{ loop {} }': &'? dyn Trait + 128..139 '{ loop {} }': &'? (dyn Trait + 'static) 130..137 'loop {}': ! 135..137 '{}': () 175..179 'self': &'? Self 251..252 's': S 267..289 '{ ...z(); }': () 273..274 's': S - 273..280 's.bar()': &'? dyn Trait + 273..280 's.bar()': &'? (dyn Trait + 'static) 273..286 's.bar().baz()': (u32, i32) "#]], ); @@ -1556,20 +1556,20 @@ fn test(x: Trait, y: &Trait) -> u64 { }"#, expect![[r#" 26..30 'self': &'? Self - 60..62 '{}': dyn Trait - 72..73 'x': dyn Trait - 82..83 'y': &'? dyn Trait + 60..62 '{}': dyn Trait + 'static + 72..73 'x': dyn Trait + 'static + 82..83 'y': &'? (dyn Trait + 'static) 100..175 '{ ...o(); }': u64 - 106..107 'x': dyn Trait - 113..114 'y': &'? dyn Trait - 124..125 'z': dyn Trait - 128..131 'bar': fn bar() -> dyn Trait - 128..133 'bar()': dyn Trait - 139..140 'x': dyn Trait + 106..107 'x': dyn Trait + 'static + 113..114 'y': &'? (dyn Trait + 'static) + 124..125 'z': dyn Trait + 'static + 128..131 'bar': fn bar() -> dyn Trait + 'static + 128..133 'bar()': dyn Trait + 'static + 139..140 'x': dyn Trait + 'static 139..146 'x.foo()': u64 - 152..153 'y': &'? dyn Trait + 152..153 'y': &'? (dyn Trait + 'static) 152..159 'y.foo()': u64 - 165..166 'z': dyn Trait + 165..166 'z': dyn Trait + 'static 165..172 'z.foo()': u64 "#]], ); @@ -1589,10 +1589,10 @@ fn main() { expect![[r#" 31..35 'self': &'? S 37..39 '{}': () - 47..48 '_': &'? dyn Fn(S) + 47..48 '_': &'? (dyn Fn(S) + 'static) 58..60 '{}': () 71..105 '{ ...()); }': () - 77..78 'f': fn f(&'? dyn Fn(S)) + 77..78 'f': fn f(&'? (dyn Fn(S) + 'static)) 77..102 'f(&|nu...foo())': () 79..101 '&|numb....foo()': &'? impl Fn(S) 80..101 '|numbe....foo()': impl Fn(S) @@ -2927,13 +2927,13 @@ fn test(x: &dyn Foo) { foo(x); }"#, expect![[r#" - 21..22 'x': &'? dyn Foo + 21..22 'x': &'? (dyn Foo + 'static) 34..36 '{}': () - 46..47 'x': &'? dyn Foo + 46..47 'x': &'? (dyn Foo + 'static) 59..74 '{ foo(x); }': () - 65..68 'foo': fn foo(&'? dyn Foo) + 65..68 'foo': fn foo(&'? (dyn Foo + 'static)) 65..71 'foo(x)': () - 69..70 'x': &'? dyn Foo + 69..70 'x': &'? (dyn Foo + 'static) "#]], ); } @@ -3210,13 +3210,13 @@ fn foo() { 218..324 '{ ...&s); }': () 228..229 's': Option 232..236 'None': Option - 246..247 'f': Box)> - 281..310 'Box { ... {}) }': Box)> + 246..247 'f': Box) + 'static> + 281..310 'Box { ... {}) }': Box) + 'static> 294..308 '&mut (|ps| {})': &'? mut impl FnOnce(&'? Option) 300..307 '|ps| {}': impl FnOnce(&'? Option) 301..303 'ps': &'? Option 305..307 '{}': () - 316..317 'f': Box)> + 316..317 'f': Box) + 'static> 316..321 'f(&s)': () 318..320 '&s': &'? Option 319..320 's': Option @@ -4213,7 +4213,7 @@ fn g<'a, T: 'a>(v: impl Trait = &'a T>) { let a = v.get::(); //^ &'a T let a = v.get::<()>(); - //^ Trait::Assoc<(), impl Trait = &'a T>> + //^ Trait::Assoc = &'a T>, ()> } fn h<'a>(v: impl Trait = &'a i32> + Trait = &'a i64>) { let a = v.get::(); @@ -4252,9 +4252,9 @@ fn f<'a>(v: &dyn Trait = &'a i32>) { "#, expect![[r#" 90..94 'self': &'? Self - 127..128 'v': &'? (dyn Trait = &'a i32>) + 127..128 'v': &'? (dyn Trait = &'a i32> + 'static) 164..195 '{ ...f(); }': () - 170..171 'v': &'? (dyn Trait = &'a i32>) + 170..171 'v': &'? (dyn Trait = &'a i32> + 'static) 170..184 'v.get::()': &'? i32 170..192 'v.get:...eref()': &'? i32 "#]], @@ -4280,7 +4280,7 @@ where let a = t.get::(); //^ usize let a = t.get::<()>(); - //^ Trait::Assoc<(), T> + //^ Trait::Assoc } "#, @@ -4735,7 +4735,7 @@ pub async fn foo_async<'a>() -> Box { fn foo() { foo_async(); - //^^^^^^^^^^^impl Future> + ?Sized + //^^^^^^^^^^^impl Future> + ?Sized } "#, ) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs index 6cb59491fac82..f5911e2161d0c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs @@ -5,8 +5,8 @@ use itertools::Itertools; use span::Edition; use crate::{ - chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk, - CallableDefId, Interner, ProjectionTyExt, + CallableDefId, Interner, ProjectionTyExt, chalk_db, db::HirDatabase, from_assoc_type_id, + from_chalk_trait_id, mapping::from_chalk, }; use hir_def::{AdtId, ItemContainerId, Lookup, TypeAliasId}; @@ -21,11 +21,11 @@ impl DebugContext<'_> { f: &mut fmt::Formatter<'_>, ) -> Result<(), fmt::Error> { let name = match id.0 { - AdtId::StructId(it) => self.0.struct_data(it).name.clone(), - AdtId::UnionId(it) => self.0.union_data(it).name.clone(), - AdtId::EnumId(it) => self.0.enum_data(it).name.clone(), + AdtId::StructId(it) => self.0.struct_signature(it).name.clone(), + AdtId::UnionId(it) => self.0.union_signature(it).name.clone(), + AdtId::EnumId(it) => self.0.enum_signature(it).name.clone(), }; - name.display(self.0.upcast(), Edition::LATEST).fmt(f)?; + name.display(self.0, Edition::LATEST).fmt(f)?; Ok(()) } @@ -35,8 +35,8 @@ impl DebugContext<'_> { f: &mut fmt::Formatter<'_>, ) -> Result<(), fmt::Error> { let trait_: hir_def::TraitId = from_chalk_trait_id(id); - let trait_data = self.0.trait_data(trait_); - trait_data.name.display(self.0.upcast(), Edition::LATEST).fmt(f)?; + let trait_data = self.0.trait_signature(trait_); + trait_data.name.display(self.0, Edition::LATEST).fmt(f)?; Ok(()) } @@ -46,17 +46,17 @@ impl DebugContext<'_> { fmt: &mut fmt::Formatter<'_>, ) -> Result<(), fmt::Error> { let type_alias: TypeAliasId = from_assoc_type_id(id); - let type_alias_data = self.0.type_alias_data(type_alias); - let trait_ = match type_alias.lookup(self.0.upcast()).container { + let type_alias_data = self.0.type_alias_signature(type_alias); + let trait_ = match type_alias.lookup(self.0).container { ItemContainerId::TraitId(t) => t, _ => panic!("associated type not in trait"), }; - let trait_data = self.0.trait_data(trait_); + let trait_data = self.0.trait_signature(trait_); write!( fmt, "{}::{}", - trait_data.name.display(self.0.upcast(), Edition::LATEST), - type_alias_data.name.display(self.0.upcast(), Edition::LATEST) + trait_data.name.display(self.0, Edition::LATEST), + type_alias_data.name.display(self.0, Edition::LATEST) )?; Ok(()) } @@ -67,16 +67,16 @@ impl DebugContext<'_> { fmt: &mut fmt::Formatter<'_>, ) -> Result<(), fmt::Error> { let type_alias = from_assoc_type_id(projection_ty.associated_ty_id); - let type_alias_data = self.0.type_alias_data(type_alias); - let trait_ = match type_alias.lookup(self.0.upcast()).container { + let type_alias_data = self.0.type_alias_signature(type_alias); + let trait_ = match type_alias.lookup(self.0).container { ItemContainerId::TraitId(t) => t, _ => panic!("associated type not in trait"), }; - let trait_name = &self.0.trait_data(trait_).name; + let trait_name = &self.0.trait_signature(trait_).name; let trait_ref = projection_ty.trait_ref(self.0); let trait_params = trait_ref.substitution.as_slice(Interner); let self_ty = trait_ref.self_type_parameter(Interner); - write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0.upcast(), Edition::LATEST))?; + write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0, Edition::LATEST))?; if trait_params.len() > 1 { write!( fmt, @@ -84,10 +84,9 @@ impl DebugContext<'_> { trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))), )?; } - write!(fmt, ">::{}", type_alias_data.name.display(self.0.upcast(), Edition::LATEST))?; + write!(fmt, ">::{}", type_alias_data.name.display(self.0, Edition::LATEST))?; - let proj_params_count = projection_ty.substitution.len(Interner) - trait_params.len(); - let proj_params = &projection_ty.substitution.as_slice(Interner)[..proj_params_count]; + let proj_params = &projection_ty.substitution.as_slice(Interner)[trait_params.len()..]; if !proj_params.is_empty() { write!( fmt, @@ -106,16 +105,19 @@ impl DebugContext<'_> { ) -> Result<(), fmt::Error> { let def: CallableDefId = from_chalk(self.0, fn_def_id); let name = match def { - CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(), - CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(), - CallableDefId::EnumVariantId(e) => self.0.enum_variant_data(e).name.clone(), + CallableDefId::FunctionId(ff) => self.0.function_signature(ff).name.clone(), + CallableDefId::StructId(s) => self.0.struct_signature(s).name.clone(), + CallableDefId::EnumVariantId(e) => { + let loc = e.lookup(self.0); + self.0.enum_variants(loc.parent).variants[loc.index as usize].1.clone() + } }; match def { CallableDefId::FunctionId(_) => { - write!(fmt, "{{fn {}}}", name.display(self.0.upcast(), Edition::LATEST)) + write!(fmt, "{{fn {}}}", name.display(self.0, Edition::LATEST)) } CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => { - write!(fmt, "{{ctor {}}}", name.display(self.0.upcast(), Edition::LATEST)) + write!(fmt, "{{ctor {}}}", name.display(self.0, Edition::LATEST)) } } } @@ -131,11 +133,7 @@ mod unsafe_tls { pub(crate) fn with_current_program( op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R, ) -> R { - if PROGRAM.is_set() { - PROGRAM.with(|prog| op(Some(prog))) - } else { - op(None) - } + if PROGRAM.is_set() { PROGRAM.with(|prog| op(Some(prog))) } else { op(None) } } pub(crate) fn set_current_program(p: &dyn HirDatabase, op: OP) -> R diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index 8cb7dbf60f37b..f9f8776cff7cb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -3,15 +3,12 @@ use core::fmt; use std::env::var; -use chalk_ir::{fold::TypeFoldable, DebruijnIndex, GoalData}; +use chalk_ir::{DebruijnIndex, GoalData, fold::TypeFoldable}; use chalk_recursive::Cache; -use chalk_solve::{logging_db::LoggingRustIrDatabase, rust_ir, Solver}; +use chalk_solve::{Solver, logging_db::LoggingRustIrDatabase, rust_ir}; -use base_db::CrateId; -use hir_def::{ - lang_item::{LangItem, LangItemTarget}, - BlockId, TraitId, -}; +use base_db::Crate; +use hir_def::{BlockId, TraitId, lang_item::LangItem}; use hir_expand::name::Name; use intern::sym; use span::Edition; @@ -19,9 +16,9 @@ use stdx::{never, panic_context}; use triomphe::Arc; use crate::{ - db::HirDatabase, infer::unify::InferenceTable, utils::UnevaluatedConstEvaluatorFolder, AliasEq, - AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment, Interner, ProjectionTy, - ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause, + AliasEq, AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment, Interner, ProjectionTy, + ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause, db::HirDatabase, + infer::unify::InferenceTable, utils::UnevaluatedConstEvaluatorFolder, }; /// This controls how much 'time' we give the Chalk solver before giving up. @@ -30,7 +27,7 @@ const CHALK_SOLVER_FUEL: i32 = 1000; #[derive(Debug, Copy, Clone)] pub(crate) struct ChalkContext<'a> { pub(crate) db: &'a dyn HirDatabase, - pub(crate) krate: CrateId, + pub(crate) krate: Crate, pub(crate) block: Option, } @@ -48,7 +45,7 @@ fn create_chalk_solver() -> chalk_recursive::RecursiveSolver { /// we assume that `T: Default`. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TraitEnvironment { - pub krate: CrateId, + pub krate: Crate, pub block: Option, // FIXME make this a BTreeMap traits_from_clauses: Box<[(Ty, TraitId)]>, @@ -56,7 +53,7 @@ pub struct TraitEnvironment { } impl TraitEnvironment { - pub fn empty(krate: CrateId) -> Arc { + pub fn empty(krate: Crate) -> Arc { Arc::new(TraitEnvironment { krate, block: None, @@ -66,7 +63,7 @@ impl TraitEnvironment { } pub fn new( - krate: CrateId, + krate: Crate, block: Option, traits_from_clauses: Box<[(Ty, TraitId)]>, env: chalk_ir::Environment, @@ -109,19 +106,20 @@ pub(crate) fn normalize_projection_query( /// Solve a trait goal using Chalk. pub(crate) fn trait_solve_query( db: &dyn HirDatabase, - krate: CrateId, + krate: Crate, block: Option, goal: Canonical>, ) -> Option { - let detail = match &goal.value.goal.data(Interner) { - GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => { - db.trait_data(it.hir_trait_id()).name.display(db.upcast(), Edition::LATEST).to_string() - } + let _p = tracing::info_span!("trait_solve_query", detail = ?match &goal.value.goal.data(Interner) { + GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => db + .trait_signature(it.hir_trait_id()) + .name + .display(db, Edition::LATEST) + .to_string(), GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(), _ => "??".to_owned(), - }; - let _p = tracing::info_span!("trait_solve_query", ?detail).entered(); - tracing::info!("trait_solve_query({:?})", goal.value.goal); + }) + .entered(); if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), @@ -148,7 +146,7 @@ pub(crate) fn trait_solve_query( fn solve( db: &dyn HirDatabase, - krate: CrateId, + krate: Crate, block: Option, goal: &chalk_ir::UCanonical>>, ) -> Option> { @@ -160,7 +158,7 @@ fn solve( let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL); let should_continue = || { - db.unwind_if_cancelled(); + db.unwind_if_revision_cancelled(); let remaining = fuel.get(); fuel.set(remaining - 1); if remaining == 0 { @@ -190,11 +188,7 @@ fn solve( // don't set the TLS for Chalk unless Chalk debugging is active, to make // extra sure we only use it for debugging - if is_chalk_debug() { - crate::tls::set_current_program(db, solve) - } else { - solve() - } + if is_chalk_debug() { crate::tls::set_current_program(db, solve) } else { solve() } } struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase>); @@ -285,20 +279,16 @@ impl FnTrait { pub fn method_name(self) -> Name { match self { - FnTrait::FnOnce => Name::new_symbol_root(sym::call_once.clone()), - FnTrait::FnMut => Name::new_symbol_root(sym::call_mut.clone()), - FnTrait::Fn => Name::new_symbol_root(sym::call.clone()), - FnTrait::AsyncFnOnce => Name::new_symbol_root(sym::async_call_once.clone()), - FnTrait::AsyncFnMut => Name::new_symbol_root(sym::async_call_mut.clone()), - FnTrait::AsyncFn => Name::new_symbol_root(sym::async_call.clone()), + FnTrait::FnOnce => Name::new_symbol_root(sym::call_once), + FnTrait::FnMut => Name::new_symbol_root(sym::call_mut), + FnTrait::Fn => Name::new_symbol_root(sym::call), + FnTrait::AsyncFnOnce => Name::new_symbol_root(sym::async_call_once), + FnTrait::AsyncFnMut => Name::new_symbol_root(sym::async_call_mut), + FnTrait::AsyncFn => Name::new_symbol_root(sym::async_call), } } - pub fn get_id(self, db: &dyn HirDatabase, krate: CrateId) -> Option { - let target = db.lang_item(krate, self.lang_item())?; - match target { - LangItemTarget::Trait(t) => Some(t), - _ => None, - } + pub fn get_id(self, db: &dyn HirDatabase, krate: Crate) -> Option { + self.lang_item().resolve_trait(db, krate) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index 89d89fe2230af..1e0ff423ded62 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -1,47 +1,42 @@ //! Helper functions for working with def, which don't need to be a separate //! query, but can't be computed directly from `*Data` (ie, which need a `db`). -use std::{hash::Hash, iter}; +use std::iter; -use base_db::CrateId; +use base_db::Crate; use chalk_ir::{ - fold::{FallibleTypeFolder, Shift}, DebruijnIndex, + fold::{FallibleTypeFolder, Shift}, }; use hir_def::{ + EnumId, EnumVariantId, FunctionId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId, db::DefDatabase, - generics::{WherePredicate, WherePredicateTypeTarget}, + hir::generics::WherePredicate, lang_item::LangItem, resolver::{HasResolver, TypeNs}, type_ref::{TraitBoundModifier, TypeRef}, - EnumId, EnumVariantId, FunctionId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId, - TypeOrConstParamId, }; use hir_expand::name::Name; use intern::sym; use rustc_abi::TargetDataLayout; use rustc_hash::FxHashSet; -use smallvec::{smallvec, SmallVec}; +use smallvec::{SmallVec, smallvec}; use span::Edition; use stdx::never; use crate::{ + ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TargetFeatures, TraitRef, + TraitRefExt, Ty, WhereClause, consteval::unknown_const, db::HirDatabase, layout::{Layout, TagEncoding}, mir::pad16, - ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TargetFeatures, TraitRef, - TraitRefExt, Ty, WhereClause, }; -pub(crate) fn fn_traits( - db: &dyn DefDatabase, - krate: CrateId, -) -> impl Iterator + '_ { +pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: Crate) -> impl Iterator + '_ { [LangItem::Fn, LangItem::FnMut, LangItem::FnOnce] .into_iter() - .filter_map(move |lang| db.lang_item(krate, lang)) - .flat_map(|it| it.as_trait()) + .filter_map(move |lang| lang.resolve_trait(db, krate)) } /// Returns an iterator over the direct super traits (including the trait itself). @@ -167,26 +162,20 @@ impl Iterator for ClauseElaborator<'_> { fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) { let resolver = trait_.resolver(db); - let generic_params = db.generic_params(trait_.into()); + let (generic_params, store) = db.generic_params_and_store(trait_.into()); let trait_self = generic_params.trait_self_param(); generic_params .where_predicates() .filter_map(|pred| match pred { WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound } => { - let is_trait = match target { - WherePredicateTypeTarget::TypeRef(type_ref) => { - match &generic_params.types_map[*type_ref] { - TypeRef::Path(p) => p.is_self_type(), - _ => false, - } - } - WherePredicateTypeTarget::TypeOrConstParam(local_id) => { - Some(*local_id) == trait_self - } + let is_trait = match &store[*target] { + TypeRef::Path(p) => p.is_self_type(), + TypeRef::TypeParam(p) => Some(p.local_id()) == trait_self, + _ => false, }; match is_trait { - true => bound.as_path(&generic_params.types_map), + true => bound.as_path(&store), false => None, } } @@ -229,14 +218,14 @@ pub(super) fn associated_type_by_name_including_super_traits( name: &Name, ) -> Option<(TraitRef, TypeAliasId)> { all_super_trait_refs(db, trait_ref, |t| { - let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?; + let assoc_type = db.trait_items(t.hir_trait_id()).associated_type_by_name(name)?; Some((t, assoc_type)) }) } /// It is a bit different from the rustc equivalent. Currently it stores: -/// - 0: the function signature, encoded as a function pointer type -/// - 1..n: generics of the parent +/// - 0..n-1: generics of the parent +/// - n: the function signature, encoded as a function pointer type /// /// and it doesn't store the closure types and fields. /// @@ -247,7 +236,7 @@ pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution); impl<'a> ClosureSubst<'a> { pub(crate) fn parent_subst(&self) -> &'a [GenericArg] { match self.0.as_slice(Interner) { - [_, x @ ..] => x, + [x @ .., _] => x, _ => { never!("Closure missing parameter"); &[] @@ -257,7 +246,7 @@ impl<'a> ClosureSubst<'a> { pub(crate) fn sig_ty(&self) -> &'a Ty { match self.0.as_slice(Interner) { - [x, ..] => x.assert_ty_ref(Interner), + [.., x] => x.assert_ty_ref(Interner), _ => { unreachable!("Closure missing sig_ty parameter"); } @@ -279,7 +268,7 @@ pub fn is_fn_unsafe_to_call( caller_target_features: &TargetFeatures, call_edition: Edition, ) -> Unsafety { - let data = db.function_data(func); + let data = db.function_signature(func); if data.is_unsafe() { return Unsafety::Unsafe; } @@ -301,16 +290,16 @@ pub fn is_fn_unsafe_to_call( } } - let loc = func.lookup(db.upcast()); + let loc = func.lookup(db); match loc.container { hir_def::ItemContainerId::ExternBlockId(block) => { - let id = block.lookup(db.upcast()).id; + let id = block.lookup(db).id; let is_intrinsic_block = - id.item_tree(db.upcast())[id.value].abi.as_ref() == Some(&sym::rust_dash_intrinsic); + id.item_tree(db)[id.value].abi.as_ref() == Some(&sym::rust_dash_intrinsic); if is_intrinsic_block { // legacy intrinsics // extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute - if db.attrs(func.into()).by_key(&sym::rustc_safe_intrinsic).exists() { + if db.attrs(func.into()).by_key(sym::rustc_safe_intrinsic).exists() { Unsafety::Safe } else { Unsafety::Unsafe @@ -318,11 +307,7 @@ pub fn is_fn_unsafe_to_call( } else { // Function in an `extern` block are always unsafe to call, except when // it is marked as `safe`. - if data.is_safe() { - Unsafety::Safe - } else { - Unsafety::Unsafe - } + if data.is_safe() { Unsafety::Safe } else { Unsafety::Unsafe } } } _ => Unsafety::Safe, @@ -372,7 +357,7 @@ pub(crate) fn detect_variant_from_bytes<'a>( let (var_id, var_layout) = match &layout.variants { hir_def::layout::Variants::Empty => unreachable!(), hir_def::layout::Variants::Single { index } => { - (db.enum_data(e).variants[index.0].0, layout) + (db.enum_variants(e).variants[index.0].0, layout) } hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => { let size = tag.size(target_data_layout).bytes_usize(); @@ -382,7 +367,7 @@ pub(crate) fn detect_variant_from_bytes<'a>( TagEncoding::Direct => { let (var_idx, layout) = variants.iter_enumerated().find_map(|(var_idx, v)| { - let def = db.enum_data(e).variants[var_idx.0].0; + let def = db.enum_variants(e).variants[var_idx.0].0; (db.const_eval_discriminant(def) == Ok(tag)).then_some((def, v)) })?; (var_idx, layout) @@ -395,35 +380,10 @@ pub(crate) fn detect_variant_from_bytes<'a>( .filter(|x| x != untagged_variant) .nth(candidate_tag) .unwrap_or(*untagged_variant); - (db.enum_data(e).variants[variant.0].0, &variants[variant]) + (db.enum_variants(e).variants[variant.0].0, &variants[variant]) } } } }; Some((var_id, var_layout)) } - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub(crate) struct InTypeConstIdMetadata(pub(crate) Ty); - -impl OpaqueInternableThing for InTypeConstIdMetadata { - fn dyn_hash(&self, mut state: &mut dyn std::hash::Hasher) { - self.hash(&mut state); - } - - fn dyn_eq(&self, other: &dyn OpaqueInternableThing) -> bool { - other.as_any().downcast_ref::() == Some(self) - } - - fn dyn_clone(&self) -> Box { - Box::new(self.clone()) - } - - fn as_any(&self) -> &dyn std::any::Any { - self - } - - fn box_any(&self) -> Box { - Box::new(self.clone()) - } -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs index 3a22158ce6f1d..6e1cd9a310f15 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs @@ -14,14 +14,13 @@ //! while installing firewall per item queries to prevent invalidation issues. use crate::db::HirDatabase; -use crate::generics::{generics, Generics}; +use crate::generics::{Generics, generics}; use crate::{ AliasTy, Const, ConstScalar, DynTyExt, GenericArg, GenericArgData, Interner, Lifetime, LifetimeData, Ty, TyKind, }; -use base_db::ra_salsa::Cycle; use chalk_ir::Mutability; -use hir_def::data::adt::StructFlags; +use hir_def::signatures::StructFlags; use hir_def::{AdtId, GenericDefId, GenericParamId, VariantId}; use std::fmt; use std::ops::Not; @@ -34,7 +33,7 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option (), GenericDefId::AdtId(adt) => { if let AdtId::StructId(id) = adt { - let flags = &db.struct_data(id).flags; + let flags = &db.struct_signature(id).flags; if flags.contains(StructFlags::IS_UNSAFE_CELL) { return Some(Arc::from_iter(vec![Variance::Invariant; 1])); } else if flags.contains(StructFlags::IS_PHANTOM_DATA) { @@ -45,7 +44,7 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option return None, } - let generics = generics(db.upcast(), def); + let generics = generics(db, def); let count = generics.len(); if count == 0 { return None; @@ -55,12 +54,20 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option>, +// _count: u32, +// _def: GenericDefId, +// ) -> salsa::CycleRecoveryAction>> { +// salsa::CycleRecoveryAction::Iterate +// } + +pub(crate) fn variances_of_cycle_initial( db: &dyn HirDatabase, - _cycle: &Cycle, - def: &GenericDefId, + def: GenericDefId, ) -> Option> { - let generics = generics(db.upcast(), *def); + let generics = generics(db, def); let count = generics.len(); if count == 0 { @@ -206,7 +213,7 @@ impl Context<'_> { AdtId::StructId(s) => add_constraints_from_variant(VariantId::StructId(s)), AdtId::UnionId(u) => add_constraints_from_variant(VariantId::UnionId(u)), AdtId::EnumId(e) => { - db.enum_data(e).variants.iter().for_each(|&(variant, _)| { + db.enum_variants(e).variants.iter().for_each(|&(variant, _)| { add_constraints_from_variant(VariantId::EnumVariantId(variant)) }); } @@ -487,13 +494,13 @@ impl Context<'_> { #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use hir_def::{ - generics::GenericParamDataRef, src::HasSource, AdtId, GenericDefId, ModuleDefId, + AdtId, GenericDefId, ModuleDefId, hir::generics::GenericParamDataRef, src::HasSource, }; use itertools::Itertools; use stdx::format_to; - use syntax::{ast::HasName, AstNode}; + use syntax::{AstNode, ast::HasName}; use test_fixture::WithFixture; use hir_def::Lookup; @@ -953,10 +960,6 @@ struct S3(S); #[test] fn prove_fixedpoint() { - // FIXME: This is wrong, this should be `FixedPoint[T: covariant, U: covariant, V: covariant]` - // This is a limitation of current salsa where a cycle may only set a fallback value to the - // query result, but we need to solve a fixpoint here. The new salsa will have this - // fortunately. check( r#" struct FixedPoint(&'static FixedPoint<(), T, U>, V); @@ -979,7 +982,7 @@ struct FixedPoint(&'static FixedPoint<(), T, U>, V); let (db, file_id) = TestDB::with_single_file(ra_fixture); let mut defs: Vec = Vec::new(); - let module = db.module_for_file_opt(file_id).unwrap(); + let module = db.module_for_file_opt(file_id.file_id(&db)).unwrap(); let def_map = module.def_map(&db); crate::tests::visit_module(&db, &def_map, module.local_id, &mut |it| { defs.push(match it { diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs index 4e45b5a250eb5..b1c478d1bf401 100644 --- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs @@ -3,14 +3,17 @@ use std::ops::ControlFlow; use hir_def::{ + AssocItemId, AttrDefId, ModuleDefId, attr::AttrsWithOwner, + expr_store::path::Path, item_scope::ItemInNs, - path::{ModPath, Path}, per_ns::Namespace, resolver::{HasResolver, Resolver, TypeNs}, - AssocItemId, AttrDefId, ModuleDefId, }; -use hir_expand::{mod_path::PathKind, name::Name}; +use hir_expand::{ + mod_path::{ModPath, PathKind}, + name::Name, +}; use hir_ty::{db::HirDatabase, method_resolution}; use crate::{ @@ -30,7 +33,7 @@ macro_rules! impl_has_attrs { impl HasAttrs for $def { fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { let def = AttrDefId::$def_id(self.into()); - AttrsWithOwner::new(db.upcast(), def) + AttrsWithOwner::new(db, def) } fn attr_id(self) -> AttrDefId { AttrDefId::$def_id(self.into()) @@ -92,7 +95,7 @@ impl HasAttrs for AssocItem { impl HasAttrs for crate::Crate { fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { let def = AttrDefId::ModuleId(self.root_module().id); - AttrsWithOwner::new(db.upcast(), def) + AttrsWithOwner::new(db, def) } fn attr_id(self) -> AttrDefId { AttrDefId::ModuleId(self.root_module().id) @@ -116,27 +119,27 @@ fn resolve_doc_path_on_( ns: Option, ) -> Option { let resolver = match attr_id { - AttrDefId::ModuleId(it) => it.resolver(db.upcast()), - AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()), - AttrDefId::AdtId(it) => it.resolver(db.upcast()), - AttrDefId::FunctionId(it) => it.resolver(db.upcast()), - AttrDefId::EnumVariantId(it) => it.resolver(db.upcast()), - AttrDefId::StaticId(it) => it.resolver(db.upcast()), - AttrDefId::ConstId(it) => it.resolver(db.upcast()), - AttrDefId::TraitId(it) => it.resolver(db.upcast()), - AttrDefId::TraitAliasId(it) => it.resolver(db.upcast()), - AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()), - AttrDefId::ImplId(it) => it.resolver(db.upcast()), - AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()), - AttrDefId::UseId(it) => it.resolver(db.upcast()), - AttrDefId::MacroId(it) => it.resolver(db.upcast()), - AttrDefId::ExternCrateId(it) => it.resolver(db.upcast()), + AttrDefId::ModuleId(it) => it.resolver(db), + AttrDefId::FieldId(it) => it.parent.resolver(db), + AttrDefId::AdtId(it) => it.resolver(db), + AttrDefId::FunctionId(it) => it.resolver(db), + AttrDefId::EnumVariantId(it) => it.resolver(db), + AttrDefId::StaticId(it) => it.resolver(db), + AttrDefId::ConstId(it) => it.resolver(db), + AttrDefId::TraitId(it) => it.resolver(db), + AttrDefId::TraitAliasId(it) => it.resolver(db), + AttrDefId::TypeAliasId(it) => it.resolver(db), + AttrDefId::ImplId(it) => it.resolver(db), + AttrDefId::ExternBlockId(it) => it.resolver(db), + AttrDefId::UseId(it) => it.resolver(db), + AttrDefId::MacroId(it) => it.resolver(db), + AttrDefId::ExternCrateId(it) => it.resolver(db), AttrDefId::GenericParamId(_) => return None, }; let mut modpath = doc_modpath_from_str(link)?; - let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath); + let resolved = resolver.resolve_module_path_in_items(db, &modpath); if resolved.is_none() { let last_name = modpath.pop_segment()?; resolve_assoc_or_field(db, resolver, modpath, last_name, ns) @@ -165,7 +168,7 @@ fn resolve_assoc_or_field( let path = Path::from_known_path_with_no_generic(path); // FIXME: This does not handle `Self` on trait definitions, which we should resolve to the // trait itself. - let base_def = resolver.resolve_path_in_type_ns_fully(db.upcast(), &path)?; + let base_def = resolver.resolve_path_in_type_ns_fully(db, &path)?; let ty = match base_def { TypeNs::SelfType(id) => Impl::from(id).self_ty(db), @@ -194,7 +197,7 @@ fn resolve_assoc_or_field( // Doc paths in this context may only resolve to an item of this trait // (i.e. no items of its supertraits), so we need to handle them here // independently of others. - return db.trait_data(id).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| { + return db.trait_items(id).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| { let def = match *assoc_id { AssocItemId::FunctionId(it) => ModuleDef::Function(it.into()), AssocItemId::ConstId(it) => ModuleDef::Const(it.into()), @@ -207,6 +210,9 @@ fn resolve_assoc_or_field( // XXX: Do these get resolved? return None; } + TypeNs::ModuleId(_) => { + return None; + } }; // Resolve inherent items first, then trait items, then fields. @@ -252,7 +258,7 @@ fn resolve_impl_trait_item( let environment = resolver .generic_def() .map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); - let traits_in_scope = resolver.traits_in_scope(db.upcast()); + let traits_in_scope = resolver.traits_in_scope(db); let mut result = None; @@ -260,7 +266,7 @@ fn resolve_impl_trait_item( // attributes here. Use path resolution directly instead. // // FIXME: resolve type aliases (which are not yielded by iterate_path_candidates) - let _ = method_resolution::iterate_path_candidates( + _ = method_resolution::iterate_path_candidates( &canonical, db, environment, @@ -273,11 +279,7 @@ fn resolve_impl_trait_item( // disambiguation) so we just pick the first one we find as well. result = as_module_def_if_namespace_matches(assoc_item_id.into(), ns); - if result.is_some() { - ControlFlow::Break(()) - } else { - ControlFlow::Continue(()) - } + if result.is_some() { ControlFlow::Break(()) } else { ControlFlow::Continue(()) } }, ); diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs index 22760c41aaecf..64d97b3f2a238 100644 --- a/src/tools/rust-analyzer/crates/hir/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir/src/db.rs @@ -3,44 +3,43 @@ //! we didn't do that. //! //! But we need this for at least LRU caching at the query level. -pub use hir_def::db::{ - AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BlockItemTreeWithSourceMapQuery, BodyQuery, - BodyWithSourceMapQuery, ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery, - CrateLangItemsQuery, CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase, - DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery, - ExpandProcAttrMacrosQuery, ExprScopesQuery, ExternCrateDeclDataQuery, FieldVisibilitiesQuery, - FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, FileItemTreeWithSourceMapQuery, - FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, - GenericParamsWithSourceMapQuery, ImplDataWithDiagnosticsQuery, ImportMapQuery, - IncludeMacroInvocQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery, - InternDatabase, InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery, - InternExternCrateQuery, InternFunctionQuery, InternImplQuery, InternInTypeConstQuery, - InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery, - InternStructQuery, InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery, - InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery, MacroDefQuery, - MacroRulesDataQuery, NotableTraitsInDepsQuery, ProcMacroDataQuery, StaticDataQuery, - StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataWithDiagnosticsQuery, - TypeAliasDataQuery, UnionDataWithDiagnosticsQuery, -}; -pub use hir_expand::db::{ - AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, - ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery, - ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacroSpanQuery, ProcMacrosQuery, - RealSpanMapQuery, -}; -pub use hir_ty::db::{ - AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery, - CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery, - ConstParamTyQuery, DynCompatibilityOfTraitQuery, FieldTypesQuery, FnDefDatumQuery, - FnDefVarianceQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery, - GenericPredicatesQuery, GenericPredicatesWithoutParentQuery, HirDatabase, HirDatabaseStorage, - ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery, InferQuery, - InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery, - InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery, - InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, LayoutOfTyQuery, LookupImplMethodQuery, - MirBodyForClosureQuery, MirBodyQuery, MonomorphizedMirBodyForClosureQuery, - MonomorphizedMirBodyQuery, ProgramClausesForChalkEnvQuery, ReturnTypeImplTraitsQuery, - TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery, TraitImplsInBlockQuery, - TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery, - TypeAliasImplTraitsQuery, ValueTyQuery, -}; +pub use hir_def::db::DefDatabase; +// AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BlockItemTreeWithSourceMapQuery, BodyQuery, +// BodyWithSourceMapQuery, ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery, +// CrateLangItemsQuery, CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase, +// DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery, +// ExpandProcAttrMacrosQuery, ExprScopesQuery, ExternCrateDeclDataQuery, FieldVisibilitiesQuery, +// FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, FileItemTreeWithSourceMapQuery, +// FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, +// GenericParamsWithSourceMapQuery, ImplItemsWithDiagnosticsQuery, ImportMapQuery, +// IncludeMacroInvocQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery, +// InternDatabase, InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery, +// InternExternCrateQuery, InternFunctionQuery, InternImplQuery, InternInTypeConstQuery, +// InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery, +// InternStructQuery, InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery, +// InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery, MacroDefQuery, +// MacroRulesDataQuery, NotableTraitsInDepsQuery, ProcMacroDataQuery, StaticDataQuery, +// StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitItemsWithDiagnosticsQuery, +// TypeAliasDataQuery, UnionDataWithDiagnosticsQuery, +// }; +pub use hir_expand::db::ExpandDatabase; +// AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, +// ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery, +// ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacroSpanQuery, ProcMacrosQuery, +// RealSpanMapQuery, +pub use hir_ty::db::HirDatabase; +// AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery, +// CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery, +// ConstParamTyQuery, DynCompatibilityOfTraitQuery, FieldTypesQuery, FnDefDatumQuery, +// FnDefVarianceQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery, +// GenericPredicatesQuery, GenericPredicatesWithoutParentQuery, HirDatabase, HirDatabaseStorage, +// ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery, InferQuery, +// InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery, +// InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery, +// InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, LayoutOfTyQuery, LookupImplMethodQuery, +// MirBodyForClosureQuery, MirBodyQuery, MonomorphizedMirBodyForClosureQuery, +// MonomorphizedMirBodyQuery, ProgramClausesForChalkEnvQuery, ReturnTypeImplTraitsQuery, +// TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery, TraitImplsInBlockQuery, +// TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery, +// TypeAliasImplTraitsQuery, ValueTyQuery, +// }; diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index 1ed0daa375630..b6e3002ed5d4e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -6,31 +6,33 @@ use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_def::{ - expr_store::ExprOrPatPtr, + DefWithBodyId, GenericParamId, SyntheticSyntax, + expr_store::{ + ExprOrPatPtr, ExpressionStoreSourceMap, hir_assoc_type_binding_to_ast, + hir_generic_arg_to_ast, hir_segment_to_ast_segment, + }, hir::ExprOrPatId, - path::{hir_segment_to_ast_segment, ModPath}, - type_ref::TypesSourceMap, - DefWithBodyId, SyntheticSyntax, }; -use hir_expand::{name::Name, HirFileId, InFile}; +use hir_expand::{HirFileId, InFile, mod_path::ModPath, name::Name}; use hir_ty::{ + CastError, InferenceDiagnostic, InferenceTyDiagnosticSource, PathGenericsSource, + PathLoweringDiagnostic, TyLoweringDiagnostic, TyLoweringDiagnosticKind, db::HirDatabase, diagnostics::{BodyValidationDiagnostic, UnsafetyReason}, - CastError, InferenceDiagnostic, InferenceTyDiagnosticSource, PathLoweringDiagnostic, - TyLoweringDiagnostic, TyLoweringDiagnosticKind, }; use syntax::{ + AstNode, AstPtr, SyntaxError, SyntaxNodePtr, TextRange, ast::{self, HasGenericArgs}, - match_ast, AstNode, AstPtr, SyntaxError, SyntaxNodePtr, TextRange, + match_ast, }; use triomphe::Arc; -use crate::{AssocItem, Field, Function, Local, Trait, Type}; +use crate::{AssocItem, Field, Function, GenericDef, Local, Trait, Type}; pub use hir_def::VariantId; pub use hir_ty::{ + GenericArgsProhibitedReason, IncorrectGenericsLenKind, diagnostics::{CaseType, IncorrectCase}, - GenericArgsProhibitedReason, }; macro_rules! diagnostics { @@ -113,6 +115,11 @@ diagnostics![ UnusedVariable, GenericArgsProhibited, ParenthesizedGenericArgsWithoutFnTrait, + BadRtn, + IncorrectGenericsLen, + IncorrectGenericsOrder, + MissingLifetime, + ElidedLifetimesInPath, ]; #[derive(Debug)] @@ -420,6 +427,61 @@ pub struct ParenthesizedGenericArgsWithoutFnTrait { pub args: InFile>, } +#[derive(Debug)] +pub struct BadRtn { + pub rtn: InFile>, +} + +#[derive(Debug)] +pub struct IncorrectGenericsLen { + /// Points at the name if there are no generics. + pub generics_or_segment: InFile>>, + pub kind: IncorrectGenericsLenKind, + pub provided: u32, + pub expected: u32, + pub def: GenericDef, +} + +#[derive(Debug)] +pub struct MissingLifetime { + /// Points at the name if there are no generics. + pub generics_or_segment: InFile>>, + pub expected: u32, + pub def: GenericDef, +} + +#[derive(Debug)] +pub struct ElidedLifetimesInPath { + /// Points at the name if there are no generics. + pub generics_or_segment: InFile>>, + pub expected: u32, + pub def: GenericDef, + pub hard_error: bool, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum GenericArgKind { + Lifetime, + Type, + Const, +} + +impl GenericArgKind { + fn from_id(id: GenericParamId) -> Self { + match id { + GenericParamId::TypeParamId(_) => GenericArgKind::Type, + GenericParamId::ConstParamId(_) => GenericArgKind::Const, + GenericParamId::LifetimeParamId(_) => GenericArgKind::Lifetime, + } + } +} + +#[derive(Debug)] +pub struct IncorrectGenericsOrder { + pub provided_arg: InFile>, + pub expected_kind: GenericArgKind, +} + impl AnyDiagnostic { pub(crate) fn body_validation_diagnostic( db: &dyn HirDatabase, @@ -428,7 +490,7 @@ impl AnyDiagnostic { ) -> Option { match diagnostic { BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => { - let variant_data = variant.variant_data(db.upcast()); + let variant_data = variant.variant_data(db); let missed_fields = missed_fields .into_iter() .map(|idx| variant_data.fields()[idx].name.clone()) @@ -439,7 +501,7 @@ impl AnyDiagnostic { Either::Right(record_pat) => source_map.pat_syntax(record_pat).ok()?, }; let file = record.file_id; - let root = record.file_syntax(db.upcast()); + let root = record.file_syntax(db); match record.value.to_node(&root) { Either::Left(ast::Expr::RecordExpr(record_expr)) => { if record_expr.record_expr_field_list().is_some() { @@ -488,7 +550,7 @@ impl AnyDiagnostic { BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => { match source_map.expr_syntax(match_expr) { Ok(source_ptr) => { - let root = source_ptr.file_syntax(db.upcast()); + let root = source_ptr.file_syntax(db); if let Either::Left(ast::Expr::MatchExpr(match_expr)) = &source_ptr.value.to_node(&root) { @@ -559,14 +621,21 @@ impl AnyDiagnostic { db: &dyn HirDatabase, def: DefWithBodyId, d: &InferenceDiagnostic, - outer_types_source_map: &TypesSourceMap, source_map: &hir_def::expr_store::BodySourceMap, + sig_map: &hir_def::expr_store::ExpressionStoreSourceMap, ) -> Option { let expr_syntax = |expr| { - source_map.expr_syntax(expr).inspect_err(|_| stdx::never!("synthetic syntax")).ok() + source_map + .expr_syntax(expr) + .inspect_err(|_| stdx::never!("inference diagnostic in desugared expr")) + .ok() + }; + let pat_syntax = |pat| { + source_map + .pat_syntax(pat) + .inspect_err(|_| stdx::never!("inference diagnostic in desugared pattern")) + .ok() }; - let pat_syntax = - |pat| source_map.pat_syntax(pat).inspect_err(|_| stdx::never!("synthetic syntax")).ok(); let expr_or_pat_syntax = |id| match id { ExprOrPatId::ExprId(expr) => expr_syntax(expr), ExprOrPatId::PatId(pat) => pat_syntax(pat), @@ -682,8 +751,8 @@ impl AnyDiagnostic { } InferenceDiagnostic::TyDiagnostic { source, diag } => { let source_map = match source { - InferenceTyDiagnosticSource::Body => &source_map.types, - InferenceTyDiagnosticSource::Signature => outer_types_source_map, + InferenceTyDiagnosticSource::Body => source_map, + InferenceTyDiagnosticSource::Signature => sig_map, }; Self::ty_diagnostic(diag, source_map, db)? } @@ -702,6 +771,47 @@ impl AnyDiagnostic { }; Self::path_diagnostic(diag, source.with_value(path))? } + &InferenceDiagnostic::MethodCallIncorrectGenericsLen { + expr, + provided_count, + expected_count, + kind, + def, + } => { + let syntax = expr_syntax(expr)?; + let file_id = syntax.file_id; + let syntax = + syntax.with_value(syntax.value.cast::()?).to_node(db); + let generics_or_name = syntax + .generic_arg_list() + .map(Either::Left) + .or_else(|| syntax.name_ref().map(Either::Right))?; + let generics_or_name = InFile::new(file_id, AstPtr::new(&generics_or_name)); + IncorrectGenericsLen { + generics_or_segment: generics_or_name, + kind, + provided: provided_count, + expected: expected_count, + def: def.into(), + } + .into() + } + &InferenceDiagnostic::MethodCallIncorrectGenericsOrder { + expr, + param_id, + arg_idx, + has_self_arg, + } => { + let syntax = expr_syntax(expr)?; + let file_id = syntax.file_id; + let syntax = + syntax.with_value(syntax.value.cast::()?).to_node(db); + let generic_args = syntax.generic_arg_list()?; + let provided_arg = hir_generic_arg_to_ast(&generic_args, arg_idx, has_self_arg)?; + let provided_arg = InFile::new(file_id, AstPtr::new(&provided_arg)); + let expected_kind = GenericArgKind::from_id(param_id); + IncorrectGenericsOrder { provided_arg, expected_kind }.into() + } }) } @@ -712,6 +822,12 @@ impl AnyDiagnostic { Some(match *diag { PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => { let segment = hir_segment_to_ast_segment(&path.value, segment)?; + + if let Some(rtn) = segment.return_type_syntax() { + // RTN errors are emitted as `GenericArgsProhibited` or `ParenthesizedGenericArgsWithoutFnTrait`. + return Some(BadRtn { rtn: path.with_value(AstPtr::new(&rtn)) }.into()); + } + let args = if let Some(generics) = segment.generic_arg_list() { AstPtr::new(&generics).wrap_left() } else { @@ -722,27 +838,84 @@ impl AnyDiagnostic { } PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment } => { let segment = hir_segment_to_ast_segment(&path.value, segment)?; + + if let Some(rtn) = segment.return_type_syntax() { + // RTN errors are emitted as `GenericArgsProhibited` or `ParenthesizedGenericArgsWithoutFnTrait`. + return Some(BadRtn { rtn: path.with_value(AstPtr::new(&rtn)) }.into()); + } + let args = AstPtr::new(&segment.parenthesized_arg_list()?); let args = path.with_value(args); ParenthesizedGenericArgsWithoutFnTrait { args }.into() } + PathLoweringDiagnostic::IncorrectGenericsLen { + generics_source, + provided_count, + expected_count, + kind, + def, + } => { + let generics_or_segment = + path_generics_source_to_ast(&path.value, generics_source)?; + let generics_or_segment = path.with_value(AstPtr::new(&generics_or_segment)); + IncorrectGenericsLen { + generics_or_segment, + kind, + provided: provided_count, + expected: expected_count, + def: def.into(), + } + .into() + } + PathLoweringDiagnostic::IncorrectGenericsOrder { + generics_source, + param_id, + arg_idx, + has_self_arg, + } => { + let generic_args = + path_generics_source_to_ast(&path.value, generics_source)?.left()?; + let provided_arg = hir_generic_arg_to_ast(&generic_args, arg_idx, has_self_arg)?; + let provided_arg = path.with_value(AstPtr::new(&provided_arg)); + let expected_kind = GenericArgKind::from_id(param_id); + IncorrectGenericsOrder { provided_arg, expected_kind }.into() + } + PathLoweringDiagnostic::MissingLifetime { generics_source, expected_count, def } + | PathLoweringDiagnostic::ElisionFailure { generics_source, expected_count, def } => { + let generics_or_segment = + path_generics_source_to_ast(&path.value, generics_source)?; + let generics_or_segment = path.with_value(AstPtr::new(&generics_or_segment)); + MissingLifetime { generics_or_segment, expected: expected_count, def: def.into() } + .into() + } + PathLoweringDiagnostic::ElidedLifetimesInPath { + generics_source, + expected_count, + def, + hard_error, + } => { + let generics_or_segment = + path_generics_source_to_ast(&path.value, generics_source)?; + let generics_or_segment = path.with_value(AstPtr::new(&generics_or_segment)); + ElidedLifetimesInPath { + generics_or_segment, + expected: expected_count, + def: def.into(), + hard_error, + } + .into() + } }) } pub(crate) fn ty_diagnostic( diag: &TyLoweringDiagnostic, - source_map: &TypesSourceMap, + source_map: &ExpressionStoreSourceMap, db: &dyn HirDatabase, ) -> Option { - let source = match diag.source { - Either::Left(type_ref_id) => { - let Ok(source) = source_map.type_syntax(type_ref_id) else { - stdx::never!("error on synthetic type syntax"); - return None; - }; - source - } - Either::Right(source) => source, + let Ok(source) = source_map.type_syntax(diag.source) else { + stdx::never!("error on synthetic type syntax"); + return None; }; let syntax = || source.value.to_node(&db.parse_or_expand(source.file_id)); Some(match &diag.kind { @@ -753,3 +926,27 @@ impl AnyDiagnostic { }) } } + +fn path_generics_source_to_ast( + path: &ast::Path, + generics_source: PathGenericsSource, +) -> Option> { + Some(match generics_source { + PathGenericsSource::Segment(segment) => { + let segment = hir_segment_to_ast_segment(path, segment)?; + segment + .generic_arg_list() + .map(Either::Left) + .or_else(|| segment.name_ref().map(Either::Right))? + } + PathGenericsSource::AssocType { segment, assoc_type } => { + let segment = hir_segment_to_ast_segment(path, segment)?; + let segment_args = segment.generic_arg_list()?; + let assoc = hir_assoc_type_binding_to_ast(&segment_args, assoc_type)?; + assoc + .generic_arg_list() + .map(Either::Left) + .or_else(|| assoc.name_ref().map(Either::Right))? + } + }) +} diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index 6f4168ab0867d..124ab8e274af8 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -1,59 +1,59 @@ //! HirDisplay implementations for various hir types. + use either::Either; use hir_def::{ - data::{ - adt::{StructKind, VariantData}, - TraitFlags, - }, - generics::{ - GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate, - WherePredicateTypeTarget, - }, - lang_item::LangItem, - type_ref::{TypeBound, TypeRef}, AdtId, GenericDefId, + expr_store::ExpressionStore, + hir::generics::{GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate}, + item_tree::FieldsShape, + lang_item::LangItem, + signatures::{StaticFlags, TraitFlags}, + type_ref::{TypeBound, TypeRef, TypeRefId}, }; use hir_ty::{ + AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyKind, WhereClause, + db::HirDatabase, display::{ - hir_display_with_types_map, write_bounds_like_dyn_trait_with_prefix, write_visibility, - HirDisplay, HirDisplayError, HirDisplayWithTypesMap, HirFormatter, SizedByDefault, + HirDisplay, HirDisplayError, HirDisplayWithExpressionStore, HirFormatter, SizedByDefault, + hir_display_with_store, write_bounds_like_dyn_trait_with_prefix, write_visibility, }, - AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyKind, WhereClause, }; use itertools::Itertools; use crate::{ Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Crate, Enum, ExternCrateDecl, Field, Function, GenericParam, HasCrate, HasVisibility, Impl, LifetimeParam, - Macro, Module, SelfParam, Static, Struct, Trait, TraitAlias, TraitRef, TupleField, TyBuilder, - Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant, + Macro, Module, SelfParam, Static, Struct, StructKind, Trait, TraitAlias, TraitRef, TupleField, + TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant, }; impl HirDisplay for Function { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { let db = f.db; - let data = db.function_data(self.id); + let data = db.function_signature(self.id); let container = self.as_assoc_item(db).map(|it| it.container(db)); let mut module = self.module(db); // Write container (trait or impl) let container_params = match container { Some(AssocItemContainer::Trait(trait_)) => { - let params = f.db.generic_params(trait_.id.into()); + let (params, params_store) = f.db.generic_params_and_store(trait_.id.into()); if f.show_container_bounds() && !params.is_empty() { write_trait_header(&trait_, f)?; f.write_char('\n')?; - has_disaplayable_predicates(¶ms).then_some(params) + has_disaplayable_predicates(f.db, ¶ms, ¶ms_store) + .then_some((params, params_store)) } else { None } } Some(AssocItemContainer::Impl(impl_)) => { - let params = f.db.generic_params(impl_.id.into()); + let (params, params_store) = f.db.generic_params_and_store(impl_.id.into()); if f.show_container_bounds() && !params.is_empty() { write_impl_header(&impl_, f)?; f.write_char('\n')?; - has_disaplayable_predicates(¶ms).then_some(params) + has_disaplayable_predicates(f.db, ¶ms, ¶ms_store) + .then_some((params, params_store)) } else { None } @@ -88,7 +88,7 @@ impl HirDisplay for Function { if let Some(abi) = &data.abi { write!(f, "extern \"{}\" ", abi.as_str())?; } - write!(f, "fn {}", data.name.display(f.db.upcast(), f.edition()))?; + write!(f, "fn {}", data.name.display(f.db, f.edition()))?; write_generic_params(GenericDefId::FunctionId(self.id), f)?; @@ -112,12 +112,11 @@ impl HirDisplay for Function { } let pat_id = body.params[param.idx - body.self_param.is_some() as usize]; - let pat_str = - body.pretty_print_pat(db.upcast(), self.id.into(), pat_id, true, f.edition()); + let pat_str = body.pretty_print_pat(db, self.id.into(), pat_id, true, f.edition()); f.write_str(&pat_str)?; f.write_str(": ")?; - type_ref.hir_fmt(f, &data.types_map)?; + type_ref.hir_fmt(f, &data.store)?; } if data.is_varargs() { @@ -133,12 +132,12 @@ impl HirDisplay for Function { // Use ugly pattern match to strip the Future trait. // Better way? let ret_type = if !data.is_async() { - Some(data.ret_type) - } else { - match &data.types_map[data.ret_type] { + data.ret_type + } else if let Some(ret_type) = data.ret_type { + match &data.store[ret_type] { TypeRef::ImplTrait(bounds) => match &bounds[0] { &TypeBound::Path(path, _) => Some( - *data.types_map[path] + *data.store[path] .segments() .iter() .last() @@ -154,21 +153,23 @@ impl HirDisplay for Function { }, _ => None, } + } else { + None }; if let Some(ret_type) = ret_type { - match &data.types_map[ret_type] { + match &data.store[ret_type] { TypeRef::Tuple(tup) if tup.is_empty() => {} _ => { f.write_str(" -> ")?; - ret_type.hir_fmt(f, &data.types_map)?; + ret_type.hir_fmt(f, &data.store)?; } } } // Write where clauses let has_written_where = write_where_clause(GenericDefId::FunctionId(self.id), f)?; - if let Some(container_params) = container_params { + if let Some((container_params, container_params_store)) = container_params { if !has_written_where { f.write_str("\nwhere")?; } @@ -177,7 +178,7 @@ impl HirDisplay for Function { AssocItemContainer::Impl(_) => "impl", }; write!(f, "\n // Bounds from {container_name}:",)?; - write_where_predicates(&container_params, f)?; + write_where_predicates(&container_params, &container_params_store, f)?; } Ok(()) } @@ -191,8 +192,8 @@ fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDi write_generic_params(def_id, f)?; if let Some(trait_) = impl_.trait_(db) { - let trait_data = db.trait_data(trait_.id); - write!(f, " {} for", trait_data.name.display(db.upcast(), f.edition()))?; + let trait_data = db.trait_signature(trait_.id); + write!(f, " {} for", trait_data.name.display(db, f.edition()))?; } f.write_char(' ')?; @@ -203,15 +204,16 @@ fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDi impl HirDisplay for SelfParam { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - let data = f.db.function_data(self.func); + let data = f.db.function_signature(self.func); let param = *data.params.first().unwrap(); - match &data.types_map[param] { + match &data.store[param] { TypeRef::Path(p) if p.is_self_type() => f.write_str("self"), - TypeRef::Reference(ref_) if matches!(&data.types_map[ref_.ty], TypeRef::Path(p) if p.is_self_type()) => + TypeRef::Reference(ref_) if matches!(&data.store[ref_.ty], TypeRef::Path(p) if p.is_self_type()) => { f.write_char('&')?; if let Some(lifetime) = &ref_.lifetime { - write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?; + lifetime.hir_fmt(f, &data.store)?; + f.write_char(' ')?; } if let hir_def::type_ref::Mutability::Mut = ref_.mutability { f.write_str("mut ")?; @@ -220,7 +222,7 @@ impl HirDisplay for SelfParam { } _ => { f.write_str("self: ")?; - param.hir_fmt(f, &data.types_map) + param.hir_fmt(f, &data.store) } } } @@ -242,12 +244,12 @@ impl HirDisplay for Struct { // FIXME: Render repr if its set explicitly? write_visibility(module_id, self.visibility(f.db), f)?; f.write_str("struct ")?; - write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?; + write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?; let def_id = GenericDefId::AdtId(AdtId::StructId(self.id)); write_generic_params(def_id, f)?; - let variant_data = self.variant_data(f.db); - match variant_data.kind() { + let variant_data = self.variant_fields(f.db); + match self.kind(f.db) { StructKind::Tuple => { f.write_char('(')?; let mut it = variant_data.fields().iter().peekable(); @@ -281,7 +283,7 @@ impl HirDisplay for Enum { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; f.write_str("enum ")?; - write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?; + write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?; let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id)); write_generic_params(def_id, f)?; @@ -298,7 +300,7 @@ impl HirDisplay for Union { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; f.write_str("union ")?; - write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?; + write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?; let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id)); write_generic_params(def_id, f)?; @@ -358,7 +360,7 @@ fn write_variants( } else { f.write_str("{\n")?; for variant in &variants[..count] { - write!(f, " {}", variant.name(f.db).display(f.db.upcast(), f.edition()))?; + write!(f, " {}", variant.name(f.db).display(f.db, f.edition()))?; match variant.kind(f.db) { StructKind::Tuple => { let fields_str = @@ -387,39 +389,39 @@ fn write_variants( impl HirDisplay for Field { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?; - write!(f, "{}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?; + write!(f, "{}: ", self.name(f.db).display(f.db, f.edition()))?; self.ty(f.db).hir_fmt(f) } } impl HirDisplay for TupleField { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write!(f, "pub {}: ", self.name().display(f.db.upcast(), f.edition()))?; + write!(f, "pub {}: ", self.name().display(f.db, f.edition()))?; self.ty(f.db).hir_fmt(f) } } impl HirDisplay for Variant { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?; - let data = self.variant_data(f.db); - match &*data { - VariantData::Unit => {} - VariantData::Tuple { fields, types_map } => { + write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?; + let data = f.db.variant_fields(self.id.into()); + match data.shape { + FieldsShape::Unit => {} + FieldsShape::Tuple => { f.write_char('(')?; let mut first = true; - for (_, field) in fields.iter() { + for (_, field) in data.fields().iter() { if first { first = false; } else { f.write_str(", ")?; } // Enum variant fields must be pub. - field.type_ref.hir_fmt(f, types_map)?; + field.type_ref.hir_fmt(f, &data.store)?; } f.write_char(')')?; } - VariantData::Record { .. } => { + FieldsShape::Record => { if let Some(limit) = f.entity_limit { write_fields(&self.fields(f.db), false, limit, true, f)?; } @@ -439,7 +441,7 @@ impl HirDisplay for ExternCrateDecl { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; f.write_str("extern crate ")?; - write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?; + write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?; if let Some(alias) = self.alias(f.db) { write!(f, " as {}", alias.display(f.edition()))?; } @@ -493,7 +495,7 @@ impl HirDisplay for TypeParam { match param_data { TypeOrConstParamData::TypeParamData(p) => match p.provenance { TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => { - write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast(), f.edition()))? + write!(f, "{}", p.name.clone().unwrap().display(f.db, f.edition()))? } TypeParamProvenance::ArgumentImplTrait => { return write_bounds_like_dyn_trait_with_prefix( @@ -506,7 +508,7 @@ impl HirDisplay for TypeParam { } }, TypeOrConstParamData::ConstParamData(p) => { - write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", p.name.display(f.db, f.edition()))?; } } @@ -514,8 +516,7 @@ impl HirDisplay for TypeParam { return Ok(()); } - let sized_trait = - f.db.lang_item(krate, LangItem::Sized).and_then(|lang_item| lang_item.as_trait()); + let sized_trait = LangItem::Sized.resolve_trait(f.db, krate); let has_only_sized_bound = predicates.iter().all(move |pred| match pred.skip_binders() { WhereClause::Implemented(it) => Some(it.hir_trait_id()) == sized_trait, _ => false, @@ -540,13 +541,13 @@ impl HirDisplay for TypeParam { impl HirDisplay for LifetimeParam { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition())) + write!(f, "{}", self.name(f.db).display(f.db, f.edition())) } } impl HirDisplay for ConstParam { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write!(f, "const {}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?; + write!(f, "const {}: ", self.name(f.db).display(f.db, f.edition()))?; self.ty(f.db).hir_fmt(f) } } @@ -555,7 +556,7 @@ fn write_generic_params( def: GenericDefId, f: &mut HirFormatter<'_>, ) -> Result<(), HirDisplayError> { - let params = f.db.generic_params(def); + let (params, store) = f.db.generic_params_and_store(def); if params.iter_lt().next().is_none() && params.iter_type_or_consts().all(|it| it.1.const_param().is_none()) && params @@ -578,7 +579,7 @@ fn write_generic_params( }; for (_, lifetime) in params.iter_lt() { delim(f)?; - write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", lifetime.name.display(f.db, f.edition()))?; } for (_, ty) in params.iter_type_or_consts() { if let Some(name) = &ty.name() { @@ -588,20 +589,20 @@ fn write_generic_params( continue; } delim(f)?; - write!(f, "{}", name.display(f.db.upcast(), f.edition()))?; + write!(f, "{}", name.display(f.db, f.edition()))?; if let Some(default) = &ty.default { f.write_str(" = ")?; - default.hir_fmt(f, ¶ms.types_map)?; + default.hir_fmt(f, &store)?; } } TypeOrConstParamData::ConstParamData(c) => { delim(f)?; - write!(f, "const {}: ", name.display(f.db.upcast(), f.edition()))?; - c.ty.hir_fmt(f, ¶ms.types_map)?; + write!(f, "const {}: ", name.display(f.db, f.edition()))?; + c.ty.hir_fmt(f, &store)?; if let Some(default) = &c.default { f.write_str(" = ")?; - write!(f, "{}", default.display(f.db.upcast(), f.edition()))?; + default.hir_fmt(f, &store)?; } } } @@ -616,48 +617,47 @@ fn write_where_clause( def: GenericDefId, f: &mut HirFormatter<'_>, ) -> Result { - let params = f.db.generic_params(def); - if !has_disaplayable_predicates(¶ms) { + let (params, store) = f.db.generic_params_and_store(def); + if !has_disaplayable_predicates(f.db, ¶ms, &store) { return Ok(false); } f.write_str("\nwhere")?; - write_where_predicates(¶ms, f)?; + write_where_predicates(¶ms, &store, f)?; Ok(true) } -fn has_disaplayable_predicates(params: &GenericParams) -> bool { +fn has_disaplayable_predicates( + db: &dyn HirDatabase, + params: &GenericParams, + store: &ExpressionStore, +) -> bool { params.where_predicates().any(|pred| { !matches!( pred, - WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(id), .. } - if params[*id].name().is_none() + WherePredicate::TypeBound { target, .. } + if matches!(store[*target], + TypeRef::TypeParam(id) if db.generic_params(id.parent())[id.local_id()].name().is_none() + ) ) }) } fn write_where_predicates( params: &GenericParams, + store: &ExpressionStore, f: &mut HirFormatter<'_>, ) -> Result<(), HirDisplayError> { use WherePredicate::*; // unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`. - let is_unnamed_type_target = |params: &GenericParams, target: &WherePredicateTypeTarget| { - matches!(target, - WherePredicateTypeTarget::TypeOrConstParam(id) if params[*id].name().is_none() + let is_unnamed_type_target = |target: TypeRefId| { + matches!(store[target], + TypeRef::TypeParam(id) if f.db.generic_params(id.parent())[id.local_id()].name().is_none() ) }; - let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target { - WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f, ¶ms.types_map), - WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() { - Some(name) => write!(f, "{}", name.display(f.db.upcast(), f.edition())), - None => f.write_str("{unnamed}"), - }, - }; - let check_same_target = |pred1: &WherePredicate, pred2: &WherePredicate| match (pred1, pred2) { (TypeBound { target: t1, .. }, TypeBound { target: t2, .. }) => t1 == t2, (Lifetime { target: t1, .. }, Lifetime { target: t2, .. }) => t1 == t2, @@ -670,41 +670,36 @@ fn write_where_predicates( let mut iter = params.where_predicates().peekable(); while let Some(pred) = iter.next() { - if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(params, target)) { + if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(*target)) { continue; } f.write_str("\n ")?; match pred { TypeBound { target, bound } => { - write_target(target, f)?; + target.hir_fmt(f, store)?; f.write_str(": ")?; - bound.hir_fmt(f, ¶ms.types_map)?; + bound.hir_fmt(f, store)?; } Lifetime { target, bound } => { - let target = target.name.display(f.db.upcast(), f.edition()); - let bound = bound.name.display(f.db.upcast(), f.edition()); - write!(f, "{target}: {bound}")?; + target.hir_fmt(f, store)?; + write!(f, ": ")?; + bound.hir_fmt(f, store)?; } ForLifetime { lifetimes, target, bound } => { - let lifetimes = - lifetimes.iter().map(|it| it.display(f.db.upcast(), f.edition())).join(", "); + let lifetimes = lifetimes.iter().map(|it| it.display(f.db, f.edition())).join(", "); write!(f, "for<{lifetimes}> ")?; - write_target(target, f)?; + target.hir_fmt(f, store)?; f.write_str(": ")?; - bound.hir_fmt(f, ¶ms.types_map)?; + bound.hir_fmt(f, store)?; } } while let Some(nxt) = iter.next_if(|nxt| check_same_target(pred, nxt)) { f.write_str(" + ")?; match nxt { - TypeBound { bound, .. } | ForLifetime { bound, .. } => { - bound.hir_fmt(f, ¶ms.types_map)? - } - Lifetime { bound, .. } => { - write!(f, "{}", bound.name.display(f.db.upcast(), f.edition()))? - } + TypeBound { bound, .. } | ForLifetime { bound, .. } => bound.hir_fmt(f, store)?, + Lifetime { bound, .. } => bound.hir_fmt(f, store)?, } } f.write_str(",")?; @@ -723,13 +718,13 @@ impl HirDisplay for Const { module = module.nearest_non_block_module(db); } write_visibility(module.id, self.visibility(db), f)?; - let data = db.const_data(self.id); + let data = db.const_signature(self.id); f.write_str("const ")?; match &data.name { - Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?, + Some(name) => write!(f, "{}: ", name.display(f.db, f.edition()))?, None => f.write_str("_: ")?, } - data.type_ref.hir_fmt(f, &data.types_map)?; + data.type_ref.hir_fmt(f, &data.store)?; Ok(()) } } @@ -737,13 +732,13 @@ impl HirDisplay for Const { impl HirDisplay for Static { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; - let data = f.db.static_data(self.id); + let data = f.db.static_signature(self.id); f.write_str("static ")?; - if data.mutable { + if data.flags.contains(StaticFlags::MUTABLE) { f.write_str("mut ")?; } - write!(f, "{}: ", data.name.display(f.db.upcast(), f.edition()))?; - data.type_ref.hir_fmt(f, &data.types_map)?; + write!(f, "{}: ", data.name.display(f.db, f.edition()))?; + data.type_ref.hir_fmt(f, &data.store)?; Ok(()) } } @@ -795,14 +790,14 @@ impl HirDisplay for Trait { fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(trait_.module(f.db).id, trait_.visibility(f.db), f)?; - let data = f.db.trait_data(trait_.id); - if data.flags.contains(TraitFlags::IS_UNSAFE) { + let data = f.db.trait_signature(trait_.id); + if data.flags.contains(TraitFlags::UNSAFE) { f.write_str("unsafe ")?; } - if data.flags.contains(TraitFlags::IS_AUTO) { + if data.flags.contains(TraitFlags::AUTO) { f.write_str("auto ")?; } - write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?; + write!(f, "trait {}", data.name.display(f.db, f.edition()))?; write_generic_params(GenericDefId::TraitId(trait_.id), f)?; Ok(()) } @@ -810,8 +805,8 @@ fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), Hi impl HirDisplay for TraitAlias { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; - let data = f.db.trait_alias_data(self.id); - write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?; + let data = f.db.trait_alias_signature(self.id); + write!(f, "trait {}", data.name.display(f.db, f.edition()))?; let def_id = GenericDefId::TraitAliasId(self.id); write_generic_params(def_id, f)?; f.write_str(" = ")?; @@ -826,20 +821,20 @@ impl HirDisplay for TraitAlias { impl HirDisplay for TypeAlias { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; - let data = f.db.type_alias_data(self.id); - write!(f, "type {}", data.name.display(f.db.upcast(), f.edition()))?; + let data = f.db.type_alias_signature(self.id); + write!(f, "type {}", data.name.display(f.db, f.edition()))?; let def_id = GenericDefId::TypeAliasId(self.id); write_generic_params(def_id, f)?; if !data.bounds.is_empty() { f.write_str(": ")?; f.write_joined( - data.bounds.iter().map(|bound| hir_display_with_types_map(bound, &data.types_map)), + data.bounds.iter().map(|bound| hir_display_with_store(bound, &data.store)), " + ", )?; } - if let Some(ty) = data.type_ref { + if let Some(ty) = data.ty { f.write_str(" = ")?; - ty.hir_fmt(f, &data.types_map)?; + ty.hir_fmt(f, &data.store)?; } write_where_clause(def_id, f)?; Ok(()) @@ -854,11 +849,11 @@ impl HirDisplay for Module { return match self.krate(f.db).display_name(f.db) { Some(name) => write!(f, "extern crate {name}"), None => f.write_str("extern crate {unknown}"), - } + }; } } match self.name(f.db) { - Some(name) => write!(f, "mod {}", name.display(f.db.upcast(), f.edition())), + Some(name) => write!(f, "mod {}", name.display(f.db, f.edition())), None => f.write_str("mod {unknown}"), } } @@ -880,6 +875,6 @@ impl HirDisplay for Macro { hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"), hir_def::MacroId::ProcMacroId(_) => f.write_str("proc_macro"), }?; - write!(f, " {}", self.name(f.db).display(f.db.upcast(), f.edition())) + write!(f, " {}", self.name(f.db).display(f.db, f.edition())) } } diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs index 72df07ef8c0cc..c6446693df3e4 100644 --- a/src/tools/rust-analyzer/crates/hir/src/from_id.rs +++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs @@ -4,9 +4,9 @@ //! are splitting the hir. use hir_def::{ - hir::{BindingId, LabelId}, AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId, GenericParamId, ModuleDefId, VariantId, + hir::{BindingId, LabelId}, }; use crate::{ @@ -30,7 +30,7 @@ macro_rules! from_id { } from_id![ - (base_db::CrateId, crate::Crate), + (base_db::Crate, crate::Crate), (hir_def::ModuleId, crate::Module), (hir_def::StructId, crate::Struct), (hir_def::UnionId, crate::Union), @@ -40,7 +40,6 @@ from_id![ (hir_def::TraitAliasId, crate::TraitAlias), (hir_def::StaticId, crate::Static), (hir_def::ConstId, crate::Const), - (hir_def::InTypeConstId, crate::InTypeConst), (hir_def::FunctionId, crate::Function), (hir_def::ImplId, crate::Impl), (hir_def::TypeOrConstParamId, crate::TypeOrConstParam), @@ -147,7 +146,6 @@ impl From for DefWithBodyId { DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id), DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id), DefWithBody::Variant(it) => DefWithBodyId::VariantId(it.into()), - DefWithBody::InTypeConst(it) => DefWithBodyId::InTypeConstId(it.id), } } } @@ -159,7 +157,6 @@ impl From for DefWithBody { DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()), DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()), DefWithBodyId::VariantId(it) => DefWithBody::Variant(it.into()), - DefWithBodyId::InTypeConstId(it) => DefWithBody::InTypeConst(it.into()), } } } diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs index 372c725293441..fe7429c867254 100644 --- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs +++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs @@ -2,20 +2,20 @@ use either::Either; use hir_def::{ + CallableDefId, Lookup, MacroId, VariantId, nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource as _}, - CallableDefId, Lookup, MacroId, VariantId, }; -use hir_expand::{HirFileId, InFile}; +use hir_expand::{EditionedFileId, HirFileId, InFile}; use hir_ty::db::InternedClosure; -use span::EditionedFileId; use syntax::ast; use tt::TextRange; use crate::{ - db::HirDatabase, Adt, Callee, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl, + Adt, Callee, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl, InlineAsmOperand, Label, LifetimeParam, LocalSource, Macro, Module, Param, SelfParam, Static, Struct, Trait, TraitAlias, TypeAlias, TypeOrConstParam, Union, Variant, VariantDef, + db::HirDatabase, }; pub trait HasSource { @@ -35,23 +35,23 @@ pub trait HasSource { impl Module { /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. pub fn definition_source(self, db: &dyn HirDatabase) -> InFile { - let def_map = self.id.def_map(db.upcast()); - def_map[self.id.local_id].definition_source(db.upcast()) + let def_map = self.id.def_map(db); + def_map[self.id.local_id].definition_source(db) } /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. pub fn definition_source_range(self, db: &dyn HirDatabase) -> InFile { - let def_map = self.id.def_map(db.upcast()); - def_map[self.id.local_id].definition_source_range(db.upcast()) + let def_map = self.id.def_map(db); + def_map[self.id.local_id].definition_source_range(db) } pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId { - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); def_map[self.id.local_id].definition_source_file_id() } pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool { - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); match def_map[self.id.local_id].origin { ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs, _ => false, @@ -59,7 +59,7 @@ impl Module { } pub fn as_source_file_id(self, db: &dyn HirDatabase) -> Option { - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); match def_map[self.id.local_id].origin { ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition, .. } => { Some(definition) @@ -69,22 +69,22 @@ impl Module { } pub fn is_inline(self, db: &dyn HirDatabase) -> bool { - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); def_map[self.id.local_id].origin.is_inline() } /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// `None` for the crate root. pub fn declaration_source(self, db: &dyn HirDatabase) -> Option> { - let def_map = self.id.def_map(db.upcast()); - def_map[self.id.local_id].declaration_source(db.upcast()) + let def_map = self.id.def_map(db); + def_map[self.id.local_id].declaration_source(db) } /// Returns a text range which declares this module, either a `mod foo;` or a `mod foo {}`. /// `None` for the crate root. pub fn declaration_source_range(self, db: &dyn HirDatabase) -> Option> { - let def_map = self.id.def_map(db.upcast()); - def_map[self.id.local_id].declaration_source_range(db.upcast()) + let def_map = self.id.def_map(db); + def_map[self.id.local_id].declaration_source_range(db) } } @@ -92,7 +92,7 @@ impl HasSource for Field { type Ast = FieldSource; fn source(self, db: &dyn HirDatabase) -> Option> { let var = VariantId::from(self.parent); - let src = var.child_source(db.upcast()); + let src = var.child_source(db); let field_source = src.map(|it| match it[self.id].clone() { Either::Left(it) => FieldSource::Pos(it), Either::Right(it) => FieldSource::Named(it), @@ -123,96 +123,88 @@ impl HasSource for VariantDef { impl HasSource for Struct { type Ast = ast::Struct; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for Union { type Ast = ast::Union; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for Enum { type Ast = ast::Enum; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for Variant { type Ast = ast::Variant; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for Function { type Ast = ast::Fn; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for Const { type Ast = ast::Const; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for Static { type Ast = ast::Static; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for Trait { type Ast = ast::Trait; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for TraitAlias { type Ast = ast::TraitAlias; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for TypeAlias { type Ast = ast::TypeAlias; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for Macro { type Ast = Either; fn source(self, db: &dyn HirDatabase) -> Option> { match self.id { - MacroId::Macro2Id(it) => Some( - it.lookup(db.upcast()) - .source(db.upcast()) - .map(ast::Macro::MacroDef) - .map(Either::Left), - ), - MacroId::MacroRulesId(it) => Some( - it.lookup(db.upcast()) - .source(db.upcast()) - .map(ast::Macro::MacroRules) - .map(Either::Left), - ), - MacroId::ProcMacroId(it) => { - Some(it.lookup(db.upcast()).source(db.upcast()).map(Either::Right)) + MacroId::Macro2Id(it) => { + Some(it.lookup(db).source(db).map(ast::Macro::MacroDef).map(Either::Left)) + } + MacroId::MacroRulesId(it) => { + Some(it.lookup(db).source(db).map(ast::Macro::MacroRules).map(Either::Left)) } + MacroId::ProcMacroId(it) => Some(it.lookup(db).source(db).map(Either::Right)), } } } impl HasSource for Impl { type Ast = ast::Impl; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for TypeOrConstParam { type Ast = Either; fn source(self, db: &dyn HirDatabase) -> Option> { - let child_source = self.id.parent.child_source(db.upcast()); + let child_source = self.id.parent.child_source(db); child_source.map(|it| it.get(self.id.local_id).cloned()).transpose() } } @@ -220,7 +212,7 @@ impl HasSource for TypeOrConstParam { impl HasSource for LifetimeParam { type Ast = ast::LifetimeParam; fn source(self, db: &dyn HirDatabase) -> Option> { - let child_source = self.id.parent.child_source(db.upcast()); + let child_source = self.id.parent.child_source(db); child_source.map(|it| it.get(self.id.local_id).cloned()).transpose() } } @@ -290,7 +282,7 @@ impl HasSource for Label { fn source(self, db: &dyn HirDatabase) -> Option> { let (_body, source_map) = db.body_with_source_map(self.parent); let src = source_map.label_syntax(self.label_id); - let root = src.file_syntax(db.upcast()); + let root = src.file_syntax(db); Some(src.map(|ast| ast.to_node(&root))) } } @@ -299,16 +291,16 @@ impl HasSource for ExternCrateDecl { type Ast = ast::ExternCrate; fn source(self, db: &dyn HirDatabase) -> Option> { - Some(self.id.lookup(db.upcast()).source(db.upcast())) + Some(self.id.lookup(db).source(db)) } } impl HasSource for InlineAsmOperand { type Ast = ast::AsmOperandNamed; fn source(self, db: &dyn HirDatabase) -> Option> { - let (_body, source_map) = db.body_with_source_map(self.owner); + let source_map = db.body_with_source_map(self.owner).1; if let Ok(src) = source_map.expr_syntax(self.expr) { - let root = src.file_syntax(db.upcast()); + let root = src.file_syntax(db); return src .map(|ast| match ast.to_node(&root) { Either::Left(ast::Expr::AsmExpr(asm)) => asm diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 29f4584665087..3f1d5bb01f2a6 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -39,62 +39,61 @@ use std::{ }; use arrayvec::ArrayVec; -use base_db::{CrateDisplayName, CrateId, CrateOrigin, LangCrateOrigin}; +use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin}; use either::Either; use hir_def::{ - data::{adt::VariantData, TraitFlags}, - expr_store::ExpressionStoreDiagnostics, - generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance}, - hir::{BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat}, - item_tree::{AttrOwner, FieldParent, ItemTreeFieldId, ItemTreeNode}, - lang_item::LangItemTarget, + AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, + CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, + FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, + LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, SyntheticSyntax, + TraitAliasId, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, + expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap}, + hir::{ + BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat, + generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance}, + }, + item_tree::{AttrOwner, FieldParent, ImportAlias, ItemTreeFieldId, ItemTreeNode}, layout::{self, ReprOptions, TargetDataLayout}, nameres::{self, diagnostics::DefDiagnostic}, - path::ImportAlias, per_ns::PerNs, resolver::{HasResolver, Resolver}, - type_ref::TypesSourceMap, - AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, - CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, - FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstId, ItemContainerId, - LifetimeParamId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, - SyntheticSyntax, TraitAliasId, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, + signatures::{ImplFlags, StaticFlags, TraitFlags, VariantFields}, }; use hir_expand::{ - attrs::collect_attrs, proc_macro::ProcMacroKind, AstId, MacroCallKind, RenderedExpandError, - ValueResult, + AstId, MacroCallKind, RenderedExpandError, ValueResult, attrs::collect_attrs, + proc_macro::ProcMacroKind, }; use hir_ty::{ - all_super_traits, autoderef, check_orphan_rules, - consteval::{try_const_usize, unknown_const_as_generic, ConstExt}, + AliasTy, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, + GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution, + TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, TyLoweringDiagnostic, + ValueTyDefId, WhereClause, all_super_traits, autoderef, check_orphan_rules, + consteval::{ConstExt, try_const_usize, unknown_const_as_generic}, diagnostics::BodyValidationDiagnostic, direct_super_traits, error_lifetime, known_const_to_ast, layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding}, method_resolution, - mir::{interpret_mir, MutBorrowKind}, + mir::{MutBorrowKind, interpret_mir}, primitive::UintTy, traits::FnTrait, - AliasTy, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, - GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution, - TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, TyLoweringDiagnostic, - ValueTyDefId, WhereClause, }; use itertools::Itertools; use nameres::diagnostics::DefDiagnosticKind; use rustc_hash::FxHashSet; use smallvec::SmallVec; -use span::{Edition, EditionedFileId, FileId, MacroCallId}; +use span::{Edition, FileId}; use stdx::{format_to, impl_from, never}; use syntax::{ - ast::{self, HasAttrs as _, HasGenericParams, HasName}, - format_smolstr, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr, T, + AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr, + ast::{self, HasAttrs as _, HasName}, + format_smolstr, }; use triomphe::{Arc, ThinArc}; use crate::db::{DefDatabase, HirDatabase}; pub use crate::{ - attrs::{resolve_doc_path_on, HasAttrs}, + attrs::{HasAttrs, resolve_doc_path_on}, diagnostics::*, has_source::HasSource, semantics::{ @@ -114,22 +113,22 @@ pub use crate::{ pub use { cfg::{CfgAtom, CfgExpr, CfgOptions}, hir_def::{ + Complete, + ImportPathConfig, attr::{AttrSourceMap, Attrs, AttrsWithOwner}, - data::adt::StructKind, find_path::PrefixKind, import_map, lang_item::LangItem, nameres::{DefMap, ModuleSource}, - path::{ModPath, PathKind}, per_ns::Namespace, type_ref::{Mutability, TypeRef}, visibility::Visibility, - ImportPathConfig, // FIXME: This is here since some queries take it as input that are used // outside of hir. {ModuleDefId, TraitId}, }, hir_expand::{ + EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind, attrs::{Attr, AttrId}, change::ChangeWithProcMacros, files::{ @@ -137,15 +136,15 @@ pub use { HirFileRange, InFile, InFileWrapper, InMacroFile, InRealFile, MacroFilePosition, MacroFileRange, }, - hygiene::{marks_rev, SyntaxContextExt}, inert_attr_macro::AttributeTemplate, - mod_path::tool_path, + mod_path::{ModPath, PathKind, tool_path}, name::Name, prettify_macro_expansion, proc_macro::{ProcMacros, ProcMacrosBuilder}, - tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt, MacroKind, + tt, }, hir_ty::{ + CastError, DropGlue, FnAbi, PointerCast, Safety, Variance, consteval::ConstEvalError, diagnostics::UnsafetyReason, display::{ClosureStyle, DisplayTarget, HirDisplay, HirDisplayError, HirWrite}, @@ -153,18 +152,17 @@ pub use { layout::LayoutError, method_resolution::TyFingerprint, mir::{MirEvalError, MirLowerError}, - CastError, DropGlue, FnAbi, PointerCast, Safety, Variance, }, // FIXME: Properly encapsulate mir - hir_ty::{mir, Interner as ChalkTyInterner}, - intern::{sym, Symbol}, + hir_ty::{Interner as ChalkTyInterner, mir}, + intern::{Symbol, sym}, }; // These are negative re-exports: pub using these names is forbidden, they // should remain private to hir internals. #[allow(unused)] use { - hir_def::path::Path, + hir_def::expr_store::path::Path, hir_expand::{ name::AsName, span_map::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef}, @@ -176,7 +174,7 @@ use { /// root module. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Crate { - pub(crate) id: CrateId, + pub(crate) id: base_db::Crate, } #[derive(Debug)] @@ -187,7 +185,7 @@ pub struct CrateDependency { impl Crate { pub fn origin(self, db: &dyn HirDatabase) -> CrateOrigin { - db.crate_graph()[self.id].origin.clone() + self.id.data(db).origin.clone() } pub fn is_builtin(self, db: &dyn HirDatabase) -> bool { @@ -195,7 +193,8 @@ impl Crate { } pub fn dependencies(self, db: &dyn HirDatabase) -> Vec { - db.crate_graph()[self.id] + self.id + .data(db) .dependencies .iter() .map(|dep| { @@ -207,12 +206,11 @@ impl Crate { } pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec { - let crate_graph = db.crate_graph(); - crate_graph + let all_crates = db.all_crates(); + all_crates .iter() - .filter(|&krate| { - crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id) - }) + .copied() + .filter(|&krate| krate.data(db).dependencies.iter().any(|it| it.crate_id == self.id)) .map(|id| Crate { id }) .collect() } @@ -221,7 +219,7 @@ impl Crate { self, db: &dyn HirDatabase, ) -> impl Iterator { - db.crate_graph().transitive_rev_deps(self.id).map(|id| Crate { id }) + db.transitive_rev_deps(self.id).into_iter().map(|id| Crate { id }) } pub fn root_module(self) -> Module { @@ -234,54 +232,57 @@ impl Crate { } pub fn root_file(self, db: &dyn HirDatabase) -> FileId { - db.crate_graph()[self.id].root_file_id + self.id.data(db).root_file_id } pub fn edition(self, db: &dyn HirDatabase) -> Edition { - db.crate_graph()[self.id].edition + self.id.data(db).edition } pub fn version(self, db: &dyn HirDatabase) -> Option { - db.crate_graph()[self.id].version.clone() + self.id.extra_data(db).version.clone() } pub fn display_name(self, db: &dyn HirDatabase) -> Option { - db.crate_graph()[self.id].display_name.clone() + self.id.extra_data(db).display_name.clone() } pub fn query_external_importables( self, db: &dyn DefDatabase, query: import_map::Query, - ) -> impl Iterator> { + ) -> impl Iterator, Complete)> { let _p = tracing::info_span!("query_external_importables").entered(); - import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| { - match ItemInNs::from(item) { - ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id), - ItemInNs::Macros(mac_id) => Either::Right(mac_id), - } - }) + import_map::search_dependencies(db, self.into(), &query).into_iter().map( + |(item, do_not_complete)| { + let item = match ItemInNs::from(item) { + ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id), + ItemInNs::Macros(mac_id) => Either::Right(mac_id), + }; + (item, do_not_complete) + }, + ) } pub fn all(db: &dyn HirDatabase) -> Vec { - db.crate_graph().iter().map(|id| Crate { id }).collect() + db.all_crates().iter().map(|&id| Crate { id }).collect() } /// Try to get the root URL of the documentation of a crate. pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option { // Look for #![doc(html_root_url = "...")] let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into())); - let doc_url = attrs.by_key(&sym::doc).find_string_value_in_tt(&sym::html_root_url); + let doc_url = attrs.by_key(sym::doc).find_string_value_in_tt(sym::html_root_url); doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") } - pub fn cfg(&self, db: &dyn HirDatabase) -> Arc { - db.crate_graph()[self.id].cfg_options.clone() + pub fn cfg<'db>(&self, db: &'db dyn HirDatabase) -> &'db CfgOptions { + self.id.cfg_options(db) } - pub fn potential_cfg(&self, db: &dyn HirDatabase) -> Arc { - let data = &db.crate_graph()[self.id]; - data.potential_cfg_options.clone().unwrap_or_else(|| data.cfg_options.clone()) + pub fn potential_cfg<'db>(&self, db: &'db dyn HirDatabase) -> &'db CfgOptions { + let data = self.id.extra_data(db); + data.potential_cfg_options.as_ref().unwrap_or_else(|| self.id.cfg_options(db)) } pub fn to_display_target(self, db: &dyn HirDatabase) -> DisplayTarget { @@ -289,14 +290,13 @@ impl Crate { } fn core(db: &dyn HirDatabase) -> Option { - let crate_graph = db.crate_graph(); - let result = crate_graph + db.all_crates() .iter() + .copied() .find(|&krate| { - matches!(crate_graph[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) + matches!(krate.data(db).origin, CrateOrigin::Lang(LangCrateOrigin::Core)) }) - .map(Crate::from); - result + .map(Crate::from) } } @@ -370,7 +370,7 @@ impl ModuleDef { segments.extend(m.name(db)) } segments.reverse(); - Some(segments.iter().map(|it| it.display(db.upcast(), edition)).join("::")) + Some(segments.iter().map(|it| it.display(db, edition)).join("::")) } pub fn canonical_module_path( @@ -490,9 +490,7 @@ impl HasCrate for ModuleDef { fn krate(&self, db: &dyn HirDatabase) -> Crate { match self.module(db) { Some(module) => module.krate(), - None => Crate::core(db).unwrap_or_else(|| { - (*db.crate_graph().crates_in_topological_order().last().unwrap()).into() - }), + None => Crate::core(db).unwrap_or_else(|| db.all_crates()[0].into()), } } } @@ -518,7 +516,7 @@ impl HasVisibility for ModuleDef { impl Module { /// Name of this module. pub fn name(self, db: &dyn HirDatabase) -> Option { - self.id.name(db.upcast()) + self.id.name(db) } /// Returns the crate this module is part of. @@ -540,7 +538,7 @@ impl Module { /// Iterates over all child modules. pub fn children(self, db: &dyn HirDatabase) -> impl Iterator { - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); let children = def_map[self.id.local_id] .children .values() @@ -551,7 +549,7 @@ impl Module { /// Finds a parent module. pub fn parent(self, db: &dyn HirDatabase) -> Option { - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); let parent_id = def_map.containing_module(self.id.local_id)?; Some(Module { id: parent_id }) } @@ -560,7 +558,7 @@ impl Module { pub fn nearest_non_block_module(self, db: &dyn HirDatabase) -> Module { let mut id = self.id; while id.is_block_module() { - id = id.containing_module(db.upcast()).expect("block without parent module"); + id = id.containing_module(db).expect("block without parent module"); } Module { id } } @@ -581,18 +579,13 @@ impl Module { db: &dyn HirDatabase, visible_from: Option, ) -> Vec<(Name, ScopeDef)> { - self.id.def_map(db.upcast())[self.id.local_id] + self.id.def_map(db)[self.id.local_id] .scope .entries() .filter_map(|(name, def)| { if let Some(m) = visible_from { - let filtered = - def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id)); - if filtered.is_none() && !def.is_none() { - None - } else { - Some((name, filtered)) - } + let filtered = def.filter_visibility(|vis| vis.is_visible_from(db, m.id)); + if filtered.is_none() && !def.is_none() { None } else { Some((name, filtered)) } } else { Some((name, def)) } @@ -603,6 +596,18 @@ impl Module { .collect() } + pub fn resolve_mod_path( + &self, + db: &dyn HirDatabase, + segments: impl IntoIterator, + ) -> Option> { + let items = self + .id + .resolver(db) + .resolve_module_path_in_items(db, &ModPath::from_segments(PathKind::Plain, segments)); + Some(items.iter_items().map(|(item, _)| item.into())) + } + /// Fills `acc` with the module's diagnostics. pub fn diagnostics( self, @@ -611,8 +616,8 @@ impl Module { style_lints: bool, ) { let _p = tracing::info_span!("diagnostics", name = ?self.name(db)).entered(); - let edition = db.crate_graph()[self.id.krate()].edition; - let def_map = self.id.def_map(db.upcast()); + let edition = self.id.krate().data(db).edition; + let def_map = self.id.def_map(db); for diag in def_map.diagnostics() { if diag.in_module != self.id.local_id { // FIXME: This is accidentally quadratic. @@ -637,7 +642,7 @@ impl Module { acc.extend(def.diagnostics(db, style_lints)) } ModuleDef::Trait(t) => { - for diag in db.trait_data_with_diagnostics(t.id).1.iter() { + for diag in db.trait_items_with_diagnostics(t.id).1.iter() { emit_def_diagnostic(db, acc, diag, edition); } @@ -654,46 +659,59 @@ impl Module { ModuleDef::Adt(adt) => { match adt { Adt::Struct(s) => { - let tree_id = s.id.lookup(db.upcast()).id; - let tree_source_maps = tree_id.item_tree_with_source_map(db.upcast()).1; + let source_map = db.struct_signature_with_source_map(s.id).1; + expr_store_diagnostics(db, acc, &source_map); + let source_map = db.variant_fields_with_source_map(s.id.into()).1; + expr_store_diagnostics(db, acc, &source_map); push_ty_diagnostics( db, acc, db.field_types_with_diagnostics(s.id.into()).1, - tree_source_maps.strukt(tree_id.value).item(), + &source_map, ); - for diag in db.struct_data_with_diagnostics(s.id).1.iter() { - emit_def_diagnostic(db, acc, diag, edition); - } } Adt::Union(u) => { - let tree_id = u.id.lookup(db.upcast()).id; - let tree_source_maps = tree_id.item_tree_with_source_map(db.upcast()).1; + let source_map = db.union_signature_with_source_map(u.id).1; + expr_store_diagnostics(db, acc, &source_map); + let source_map = db.variant_fields_with_source_map(u.id.into()).1; + expr_store_diagnostics(db, acc, &source_map); push_ty_diagnostics( db, acc, db.field_types_with_diagnostics(u.id.into()).1, - tree_source_maps.union(tree_id.value).item(), + &source_map, ); - for diag in db.union_data_with_diagnostics(u.id).1.iter() { - emit_def_diagnostic(db, acc, diag, edition); - } } Adt::Enum(e) => { - for v in e.variants(db) { - let tree_id = v.id.lookup(db.upcast()).id; - let tree_source_maps = - tree_id.item_tree_with_source_map(db.upcast()).1; + let source_map = db.enum_signature_with_source_map(e.id).1; + expr_store_diagnostics(db, acc, &source_map); + let (variants, diagnostics) = db.enum_variants_with_diagnostics(e.id); + let file = e.id.lookup(db).id.file_id(); + let ast_id_map = db.ast_id_map(file); + if let Some(diagnostics) = &diagnostics { + for diag in diagnostics.iter() { + acc.push( + InactiveCode { + node: InFile::new( + file, + ast_id_map.get(diag.ast_id).syntax_node_ptr(), + ), + cfg: diag.cfg.clone(), + opts: diag.opts.clone(), + } + .into(), + ); + } + } + for &(v, _) in &variants.variants { + let source_map = db.variant_fields_with_source_map(v.into()).1; push_ty_diagnostics( db, acc, - db.field_types_with_diagnostics(v.id.into()).1, - tree_source_maps.variant(tree_id.value), + db.field_types_with_diagnostics(v.into()).1, + &source_map, ); - acc.extend(ModuleDef::Variant(v).diagnostics(db, style_lints)); - for diag in db.enum_variant_data_with_diagnostics(v.id).1.iter() { - emit_def_diagnostic(db, acc, diag, edition); - } + expr_store_diagnostics(db, acc, &source_map); } } } @@ -701,13 +719,13 @@ impl Module { } ModuleDef::Macro(m) => emit_macro_def_diagnostics(db, acc, m), ModuleDef::TypeAlias(type_alias) => { - let tree_id = type_alias.id.lookup(db.upcast()).id; - let tree_source_maps = tree_id.item_tree_with_source_map(db.upcast()).1; + let source_map = db.type_alias_signature_with_source_map(type_alias.id).1; + expr_store_diagnostics(db, acc, &source_map); push_ty_diagnostics( db, acc, db.type_for_type_alias_with_diagnostics(type_alias.id).1, - tree_source_maps.type_alias(tree_id.value).item(), + &source_map, ); acc.extend(def.diagnostics(db, style_lints)); } @@ -722,15 +740,14 @@ impl Module { for impl_def in self.impl_defs(db) { GenericDef::Impl(impl_def).diagnostics(db, acc); - let loc = impl_def.id.lookup(db.upcast()); - let (tree, tree_source_maps) = loc.id.item_tree_with_source_map(db.upcast()); - let source_map = tree_source_maps.impl_(loc.id.value).item(); + let loc = impl_def.id.lookup(db); + let tree = loc.id.item_tree(db); + let source_map = db.impl_signature_with_source_map(impl_def.id).1; + expr_store_diagnostics(db, acc, &source_map); + let node = &tree[loc.id.value]; let file_id = loc.id.file_id(); - if file_id - .macro_file() - .is_some_and(|it| it.kind(db.upcast()) == MacroKind::DeriveBuiltIn) - { + if file_id.macro_file().is_some_and(|it| it.kind(db) == MacroKind::DeriveBuiltIn) { // these expansion come from us, diagnosing them is a waste of resources // FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow continue; @@ -742,7 +759,7 @@ impl Module { let ast_id_map = db.ast_id_map(file_id); - for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() { + for diag in db.impl_items_with_diagnostics(impl_def.id).1.iter() { emit_def_diagnostic(db, acc, diag, edition); } @@ -762,7 +779,7 @@ impl Module { let drop_maybe_dangle = (|| { // FIXME: This can be simplified a lot by exposing hir-ty's utils.rs::Generics helper let trait_ = trait_?; - let drop_trait = db.lang_item(self.krate().into(), LangItem::Drop)?.as_trait()?; + let drop_trait = LangItem::Drop.resolve_trait(db, self.krate().into())?; if drop_trait != trait_.into() { return None; } @@ -780,7 +797,7 @@ impl Module { )) }); let res = type_params.chain(lifetime_params).any(|p| { - db.attrs(AttrDefId::GenericParamId(p)).by_key(&sym::may_dangle).exists() + db.attrs(AttrDefId::GenericParamId(p)).by_key(sym::may_dangle).exists() }); Some(res) })() @@ -800,13 +817,13 @@ impl Module { // Negative impls can't have items, don't emit missing items diagnostic for them if let (false, Some(trait_)) = (impl_is_negative, trait_) { - let items = &db.trait_data(trait_.into()).items; + let items = &db.trait_items(trait_.into()).items; let required_items = items.iter().filter(|&(_, assoc)| match *assoc { - AssocItemId::FunctionId(it) => !db.function_data(it).has_body(), - AssocItemId::ConstId(id) => !db.const_data(id).has_body, - AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(), + AssocItemId::FunctionId(it) => !db.function_signature(it).has_body(), + AssocItemId::ConstId(id) => !db.const_signature(id).has_body(), + AssocItemId::TypeAliasId(it) => db.type_alias_signature(it).ty.is_none(), }); - impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().cloned()); + impl_assoc_items_scratch.extend(db.impl_items(impl_def.id).items.iter().cloned()); let redundant = impl_assoc_items_scratch .iter() @@ -853,23 +870,23 @@ impl Module { db, acc, db.impl_self_ty_with_diagnostics(impl_def.id).1, - source_map, + &source_map, ); push_ty_diagnostics( db, acc, db.impl_trait_with_diagnostics(impl_def.id).and_then(|it| it.1), - source_map, + &source_map, ); - for &(_, item) in db.impl_data(impl_def.id).items.iter() { + for &(_, item) in db.impl_items(impl_def.id).items.iter() { AssocItem::from(item).diagnostics(db, acc, style_lints); } } } pub fn declarations(self, db: &dyn HirDatabase) -> Vec { - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); let scope = &def_map[self.id.local_id].scope; scope .declarations() @@ -879,13 +896,13 @@ impl Module { } pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec { - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); let scope = &def_map[self.id.local_id].scope; scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| it.into()).collect() } pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec { - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); def_map[self.id.local_id].scope.impls().map(Impl::from).collect() } @@ -934,11 +951,12 @@ fn macro_call_diagnostics( let file_id = loc.kind.file_id(); let node = InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id())); - let RenderedExpandError { message, error, kind } = err.render_to_string(db.upcast()); - let precise_location = if err.span().anchor.file_id == file_id { + let RenderedExpandError { message, error, kind } = err.render_to_string(db); + let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id); + let precise_location = if editioned_file_id == file_id { Some( err.span().range - + db.ast_id_map(err.span().anchor.file_id.into()) + + db.ast_id_map(editioned_file_id.into()) .get_erased(err.span().anchor.ast_id) .text_range() .start(), @@ -967,8 +985,8 @@ fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec never!("declarative expander for non decl-macro: {:?}", e); return; }; - let krate = HasModule::krate(&m.id, db.upcast()); - let edition = db.crate_graph()[krate].edition; + let krate = HasModule::krate(&m.id, db); + let edition = krate.data(db).edition; emit_def_diagnostic_( db, acc, @@ -996,7 +1014,7 @@ fn emit_def_diagnostic_( ) { match diag { DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => { - let decl = declaration.to_ptr(db.upcast()); + let decl = declaration.to_ptr(db); acc.push( UnresolvedModule { decl: InFile::new(declaration.file_id, decl), @@ -1006,18 +1024,18 @@ fn emit_def_diagnostic_( ) } DefDiagnosticKind::UnresolvedExternCrate { ast } => { - let item = ast.to_ptr(db.upcast()); + let item = ast.to_ptr(db); acc.push(UnresolvedExternCrate { decl: InFile::new(ast.file_id, item) }.into()); } DefDiagnosticKind::MacroError { ast, path, err } => { - let item = ast.to_ptr(db.upcast()); - let RenderedExpandError { message, error, kind } = err.render_to_string(db.upcast()); + let item = ast.to_ptr(db); + let RenderedExpandError { message, error, kind } = err.render_to_string(db); acc.push( MacroError { node: InFile::new(ast.file_id, item.syntax_node_ptr()), precise_location: None, - message: format!("{}: {message}", path.display(db.upcast(), edition)), + message: format!("{}: {message}", path.display(db, edition)), error, kind, } @@ -1026,17 +1044,17 @@ fn emit_def_diagnostic_( } DefDiagnosticKind::UnresolvedImport { id, index } => { let file_id = id.file_id(); - let item_tree = id.item_tree(db.upcast()); + let item_tree = id.item_tree(db); let import = &item_tree[id.value]; - let use_tree = import.use_tree_to_ast(db.upcast(), file_id, *index); + let use_tree = import.use_tree_to_ast(db, file_id, *index); acc.push( UnresolvedImport { decl: InFile::new(file_id, AstPtr::new(&use_tree)) }.into(), ); } DefDiagnosticKind::UnconfiguredCode { tree, item, cfg, opts } => { - let item_tree = tree.item_tree(db.upcast()); + let item_tree = tree.item_tree(db); let ast_id_map = db.ast_id_map(tree.file_id()); // FIXME: This parses... We could probably store relative ranges for the children things // here in the item tree? @@ -1058,7 +1076,7 @@ fn emit_def_diagnostic_( AttrOwner::Variant(it) => { ast_id_map.get(item_tree[it].ast_id).syntax_node_ptr() } - AttrOwner::Field(FieldParent::Variant(parent), idx) => process_field_list( + AttrOwner::Field(FieldParent::EnumVariant(parent), idx) => process_field_list( ast_id_map .get(item_tree[parent].ast_id) .to_node(&db.parse_or_expand(tree.file_id())) @@ -1081,33 +1099,6 @@ fn emit_def_diagnostic_( .nth(idx.into_raw().into_u32() as usize)? .syntax(), ), - AttrOwner::Param(parent, idx) => SyntaxNodePtr::new( - ast_id_map - .get(item_tree[parent.index()].ast_id) - .to_node(&db.parse_or_expand(tree.file_id())) - .param_list()? - .params() - .nth(idx.into_raw().into_u32() as usize)? - .syntax(), - ), - AttrOwner::TypeOrConstParamData(parent, idx) => SyntaxNodePtr::new( - ast_id_map - .get(parent.ast_id(&item_tree)) - .to_node(&db.parse_or_expand(tree.file_id())) - .generic_param_list()? - .type_or_const_params() - .nth(idx.into_raw().into_u32() as usize)? - .syntax(), - ), - AttrOwner::LifetimeParamData(parent, idx) => SyntaxNodePtr::new( - ast_id_map - .get(parent.ast_id(&item_tree)) - .to_node(&db.parse_or_expand(tree.file_id())) - .generic_param_list()? - .lifetime_params() - .nth(idx.into_raw().into_u32() as usize)? - .syntax(), - ), }; acc.push( InactiveCode { @@ -1133,7 +1124,7 @@ fn emit_def_diagnostic_( ); } DefDiagnosticKind::UnimplementedBuiltinMacro { ast } => { - let node = ast.to_node(db.upcast()); + let node = ast.to_node(db); // Must have a name, otherwise we wouldn't emit it. let name = node.name().expect("unimplemented builtin macro with no name"); acc.push( @@ -1144,7 +1135,7 @@ fn emit_def_diagnostic_( ); } DefDiagnosticKind::InvalidDeriveTarget { ast, id } => { - let node = ast.to_node(db.upcast()); + let node = ast.to_node(db); let derive = node.attrs().nth(*id); match derive { Some(derive) => { @@ -1159,7 +1150,7 @@ fn emit_def_diagnostic_( } } DefDiagnosticKind::MalformedDerive { ast, id } => { - let node = ast.to_node(db.upcast()); + let node = ast.to_node(db); let derive = node.attrs().nth(*id); match derive { Some(derive) => { @@ -1174,7 +1165,7 @@ fn emit_def_diagnostic_( } } DefDiagnosticKind::MacroDefError { ast, message } => { - let node = ast.to_node(db.upcast()); + let node = ast.to_node(db); acc.push( MacroDefError { node: InFile::new(ast.file_id, AstPtr::new(&node)), @@ -1195,7 +1186,7 @@ fn precise_macro_call_location( // - e.g. the full attribute for macro errors, but only the name for name resolution match ast { MacroCallKind::FnLike { ast_id, .. } => { - let node = ast_id.to_node(db.upcast()); + let node = ast_id.to_node(db); ( ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), node.path() @@ -1205,7 +1196,7 @@ fn precise_macro_call_location( ) } MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => { - let node = ast_id.to_node(db.upcast()); + let node = ast_id.to_node(db); // Compute the precise location of the macro name's token in the derive // list. let token = (|| { @@ -1213,15 +1204,15 @@ fn precise_macro_call_location( .nth(derive_attr_index.ast_index()) .and_then(|x| Either::left(x.1))?; let token_tree = derive_attr.meta()?.token_tree()?; - let group_by = token_tree + let chunk_by = token_tree .syntax() .children_with_tokens() .filter_map(|elem| match elem { syntax::NodeOrToken::Token(tok) => Some(tok), _ => None, }) - .group_by(|t| t.kind() == T![,]); - let (_, mut group) = group_by + .chunk_by(|t| t.kind() == T![,]); + let (_, mut group) = chunk_by .into_iter() .filter(|&(comma, _)| !comma) .nth(*derive_index as usize)?; @@ -1233,7 +1224,7 @@ fn precise_macro_call_location( ) } MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { - let node = ast_id.to_node(db.upcast()); + let node = ast_id.to_node(db); let attr = collect_attrs(&node) .nth(invoc_attr_index.ast_index()) .and_then(|x| Either::left(x.1)) @@ -1251,7 +1242,7 @@ fn precise_macro_call_location( impl HasVisibility for Module { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); let module_data = &def_map[self.id.local_id]; module_data.visibility } @@ -1323,7 +1314,7 @@ impl AstNode for FieldSource { impl Field { pub fn name(&self, db: &dyn HirDatabase) -> Name { - self.parent.variant_data(db).fields()[self.id].name.clone() + db.variant_fields(self.parent.into()).fields()[self.id].name.clone() } pub fn index(&self) -> usize { @@ -1338,7 +1329,7 @@ impl Field { let generic_def_id: GenericDefId = match self.parent { VariantDef::Struct(it) => it.id.into(), VariantDef::Union(it) => it.id.into(), - VariantDef::Variant(it) => it.id.lookup(db.upcast()).parent.into(), + VariantDef::Variant(it) => it.id.lookup(db).parent.into(), }; let substs = TyBuilder::placeholder_subst(db, generic_def_id); let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs); @@ -1372,7 +1363,7 @@ impl Field { self.ty(db).ty, db.trait_environment(match hir_def::VariantId::from(self.parent) { hir_def::VariantId::EnumVariantId(id) => { - GenericDefId::AdtId(id.lookup(db.upcast()).parent.into()) + GenericDefId::AdtId(id.lookup(db).parent.into()) } hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()), hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()), @@ -1388,10 +1379,11 @@ impl Field { impl HasVisibility for Field { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - let variant_data = self.parent.variant_data(db); + let variant_data = db.variant_fields(self.parent.into()); let visibility = &variant_data.fields()[self.id].visibility; let parent_id: hir_def::VariantId = self.parent.into(); - visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast())) + // FIXME: RawVisibility::Public doesn't need to construct a resolver + Visibility::resolve(db, &parent_id.resolver(db), visibility) } } @@ -1402,16 +1394,15 @@ pub struct Struct { impl Struct { pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).container } + Module { id: self.id.lookup(db).container } } pub fn name(self, db: &dyn HirDatabase) -> Name { - db.struct_data(self.id).name.clone() + db.struct_signature(self.id).name.clone() } pub fn fields(self, db: &dyn HirDatabase) -> Vec { - db.struct_data(self.id) - .variant_data + db.variant_fields(self.id.into()) .fields() .iter() .map(|(id, _)| Field { parent: self.into(), id }) @@ -1431,15 +1422,19 @@ impl Struct { } pub fn repr(self, db: &dyn HirDatabase) -> Option { - db.struct_data(self.id).repr + db.struct_signature(self.id).repr } pub fn kind(self, db: &dyn HirDatabase) -> StructKind { - self.variant_data(db).kind() + match self.variant_fields(db).shape { + hir_def::item_tree::FieldsShape::Record => StructKind::Record, + hir_def::item_tree::FieldsShape::Tuple => StructKind::Tuple, + hir_def::item_tree::FieldsShape::Unit => StructKind::Unit, + } } - fn variant_data(self, db: &dyn HirDatabase) -> Arc { - db.struct_data(self.id).variant_data.clone() + fn variant_fields(self, db: &dyn HirDatabase) -> Arc { + db.variant_fields(self.id.into()) } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { @@ -1449,7 +1444,13 @@ impl Struct { impl HasVisibility for Struct { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - db.struct_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + Visibility::resolve( + db, + &self.id.resolver(db), + &item_tree[item_tree[loc.id.value].visibility], + ) } } @@ -1460,11 +1461,11 @@ pub struct Union { impl Union { pub fn name(self, db: &dyn HirDatabase) -> Name { - db.union_data(self.id).name.clone() + db.union_signature(self.id).name.clone() } pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).container } + Module { id: self.id.lookup(db).container } } pub fn ty(self, db: &dyn HirDatabase) -> Type { @@ -1479,19 +1480,21 @@ impl Union { Type::from_value_def(db, self.id) } + pub fn kind(self, db: &dyn HirDatabase) -> StructKind { + match db.variant_fields(self.id.into()).shape { + hir_def::item_tree::FieldsShape::Record => StructKind::Record, + hir_def::item_tree::FieldsShape::Tuple => StructKind::Tuple, + hir_def::item_tree::FieldsShape::Unit => StructKind::Unit, + } + } + pub fn fields(self, db: &dyn HirDatabase) -> Vec { - db.union_data(self.id) - .variant_data + db.variant_fields(self.id.into()) .fields() .iter() .map(|(id, _)| Field { parent: self.into(), id }) .collect() } - - fn variant_data(self, db: &dyn HirDatabase) -> Arc { - db.union_data(self.id).variant_data.clone() - } - pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { db.attrs(self.id.into()).is_unstable() } @@ -1499,7 +1502,13 @@ impl Union { impl HasVisibility for Union { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - db.union_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + Visibility::resolve( + db, + &self.id.resolver(db), + &item_tree[item_tree[loc.id.value].visibility], + ) } } @@ -1510,19 +1519,23 @@ pub struct Enum { impl Enum { pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).container } + Module { id: self.id.lookup(db).container } } pub fn name(self, db: &dyn HirDatabase) -> Name { - db.enum_data(self.id).name.clone() + db.enum_signature(self.id).name.clone() } pub fn variants(self, db: &dyn HirDatabase) -> Vec { - db.enum_data(self.id).variants.iter().map(|&(id, _)| Variant { id }).collect() + db.enum_variants(self.id).variants.iter().map(|&(id, _)| Variant { id }).collect() + } + + pub fn num_variants(self, db: &dyn HirDatabase) -> usize { + db.enum_variants(self.id).variants.len() } pub fn repr(self, db: &dyn HirDatabase) -> Option { - db.enum_data(self.id).repr + db.enum_signature(self.id).repr } pub fn ty(self, db: &dyn HirDatabase) -> Type { @@ -1536,8 +1549,8 @@ impl Enum { /// The type of the enum variant bodies. pub fn variant_body_ty(self, db: &dyn HirDatabase) -> Type { Type::new_for_crate( - self.id.lookup(db.upcast()).container.krate(), - TyBuilder::builtin(match db.enum_data(self.id).variant_body_type() { + self.id.lookup(db).container.krate(), + TyBuilder::builtin(match db.enum_signature(self.id).variant_body_type() { layout::IntegerType::Pointer(sign) => match sign { true => hir_def::builtin_type::BuiltinType::Int( hir_def::builtin_type::BuiltinInt::Isize, @@ -1582,7 +1595,13 @@ impl Enum { impl HasVisibility for Enum { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - db.enum_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + Visibility::resolve( + db, + &self.id.resolver(db), + &item_tree[item_tree[loc.id.value].visibility], + ) } } @@ -1600,11 +1619,11 @@ pub struct Variant { impl Variant { pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.module(db.upcast()) } + Module { id: self.id.module(db) } } pub fn parent_enum(self, db: &dyn HirDatabase) -> Enum { - self.id.lookup(db.upcast()).parent.into() + self.id.lookup(db).parent.into() } pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type { @@ -1612,11 +1631,13 @@ impl Variant { } pub fn name(self, db: &dyn HirDatabase) -> Name { - db.enum_variant_data(self.id).name.clone() + let lookup = self.id.lookup(db); + let enum_ = lookup.parent; + db.enum_variants(enum_).variants[lookup.index as usize].1.clone() } pub fn fields(self, db: &dyn HirDatabase) -> Vec { - self.variant_data(db) + db.variant_fields(self.id.into()) .fields() .iter() .map(|(id, _)| Field { parent: self.into(), id }) @@ -1624,11 +1645,11 @@ impl Variant { } pub fn kind(self, db: &dyn HirDatabase) -> StructKind { - self.variant_data(db).kind() - } - - pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc { - db.enum_variant_data(self.id).variant_data.clone() + match db.variant_fields(self.id.into()).shape { + hir_def::item_tree::FieldsShape::Record => StructKind::Record, + hir_def::item_tree::FieldsShape::Tuple => StructKind::Tuple, + hir_def::item_tree::FieldsShape::Unit => StructKind::Unit, + } } pub fn value(self, db: &dyn HirDatabase) -> Option { @@ -1645,7 +1666,7 @@ impl Variant { Ok(match &parent_layout.0.variants { layout::Variants::Multiple { variants, .. } => Layout( { - let lookup = self.id.lookup(db.upcast()); + let lookup = self.id.lookup(db); let rustc_enum_variant_idx = RustcEnumVariantIdx(lookup.index as usize); Arc::new(variants[rustc_enum_variant_idx].clone()) }, @@ -1660,6 +1681,13 @@ impl Variant { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum StructKind { + Record, + Tuple, + Unit, +} + /// Variants inherit visibility from the parent enum. impl HasVisibility for Variant { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { @@ -1679,10 +1707,11 @@ impl_from!(Struct, Union, Enum for Adt); impl Adt { pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool { let subst = db.generic_defaults(self.into()); - subst.iter().any(|ty| match ty.skip_binders().data(Interner) { - GenericArgData::Ty(it) => it.is_unknown(), - _ => false, - }) + (subst.is_empty() && db.generic_params(self.into()).len_type_or_consts() != 0) + || subst.iter().any(|ty| match ty.skip_binders().data(Interner) { + GenericArgData::Ty(it) => it.is_unknown(), + _ => false, + }) } pub fn layout(self, db: &dyn HirDatabase) -> Result { @@ -1741,9 +1770,9 @@ impl Adt { /// Returns the lifetime of the DataType pub fn lifetime(&self, db: &dyn HirDatabase) -> Option { let resolver = match self { - Adt::Struct(s) => s.id.resolver(db.upcast()), - Adt::Union(u) => u.id.resolver(db.upcast()), - Adt::Enum(e) => e.id.resolver(db.upcast()), + Adt::Struct(s) => s.id.resolver(db), + Adt::Union(u) => u.id.resolver(db), + Adt::Enum(e) => e.id.resolver(db), }; resolver .generic_params() @@ -1757,19 +1786,11 @@ impl Adt { } pub fn as_struct(&self) -> Option { - if let Self::Struct(v) = self { - Some(*v) - } else { - None - } + if let Self::Struct(v) = self { Some(*v) } else { None } } pub fn as_enum(&self) -> Option { - if let Self::Enum(v) = self { - Some(*v) - } else { - None - } + if let Self::Enum(v) = self { Some(*v) } else { None } } } @@ -1815,14 +1836,6 @@ impl VariantDef { VariantDef::Variant(e) => e.name(db), } } - - pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc { - match self { - VariantDef::Struct(it) => it.variant_data(db), - VariantDef::Union(it) => it.variant_data(db), - VariantDef::Variant(it) => it.variant_data(db), - } - } } /// The defs which have a body. @@ -1832,9 +1845,8 @@ pub enum DefWithBody { Static(Static), Const(Const), Variant(Variant), - InTypeConst(InTypeConst), } -impl_from!(Function, Const, Static, Variant, InTypeConst for DefWithBody); +impl_from!(Function, Const, Static, Variant for DefWithBody); impl DefWithBody { pub fn module(self, db: &dyn HirDatabase) -> Module { @@ -1843,7 +1855,6 @@ impl DefWithBody { DefWithBody::Function(f) => f.module(db), DefWithBody::Static(s) => s.module(db), DefWithBody::Variant(v) => v.module(db), - DefWithBody::InTypeConst(c) => c.module(db), } } @@ -1853,7 +1864,6 @@ impl DefWithBody { DefWithBody::Static(s) => Some(s.name(db)), DefWithBody::Const(c) => c.name(db), DefWithBody::Variant(v) => Some(v.name(db)), - DefWithBody::InTypeConst(_) => None, } } @@ -1864,11 +1874,6 @@ impl DefWithBody { DefWithBody::Static(it) => it.ty(db), DefWithBody::Const(it) => it.ty(db), DefWithBody::Variant(it) => it.parent_enum(db).variant_body_ty(db), - DefWithBody::InTypeConst(it) => Type::new_with_resolver_inner( - db, - &DefWithBodyId::from(it.id).resolver(db.upcast()), - TyKind::Error.intern(Interner), - ), } } @@ -1878,14 +1883,13 @@ impl DefWithBody { DefWithBody::Static(it) => it.id.into(), DefWithBody::Const(it) => it.id.into(), DefWithBody::Variant(it) => it.into(), - DefWithBody::InTypeConst(it) => it.id.into(), } } /// A textual representation of the HIR of this def's body for debugging purposes. pub fn debug_hir(self, db: &dyn HirDatabase) -> String { let body = db.body(self.id()); - body.pretty_print(db.upcast(), self.id(), Edition::CURRENT) + body.pretty_print(db, self.id(), Edition::CURRENT) } /// A textual representation of the MIR of this def's body for debugging purposes. @@ -1906,83 +1910,25 @@ impl DefWithBody { let krate = self.module(db).id.krate(); let (body, source_map) = db.body_with_source_map(self.into()); - let item_tree_source_maps; - let outer_types_source_map = match self { - DefWithBody::Function(function) => { - let function = function.id.lookup(db.upcast()).id; - item_tree_source_maps = function.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.function(function.value).item() + let sig_source_map = match self { + DefWithBody::Function(id) => db.function_signature_with_source_map(id.into()).1, + DefWithBody::Static(id) => db.static_signature_with_source_map(id.into()).1, + DefWithBody::Const(id) => db.const_signature_with_source_map(id.into()).1, + DefWithBody::Variant(variant) => { + let enum_id = variant.parent_enum(db).id; + db.enum_signature_with_source_map(enum_id).1 } - DefWithBody::Static(statik) => { - let statik = statik.id.lookup(db.upcast()).id; - item_tree_source_maps = statik.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.statik(statik.value) - } - DefWithBody::Const(konst) => { - let konst = konst.id.lookup(db.upcast()).id; - item_tree_source_maps = konst.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.konst(konst.value) - } - DefWithBody::Variant(_) | DefWithBody::InTypeConst(_) => &TypesSourceMap::EMPTY, }; - for (_, def_map) in body.blocks(db.upcast()) { + for (_, def_map) in body.blocks(db) { Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc, style_lints); } source_map .macro_calls() - .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id.macro_call_id, acc)); + .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc)); - for diag in source_map.diagnostics() { - acc.push(match diag { - ExpressionStoreDiagnostics::InactiveCode { node, cfg, opts } => { - InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into() - } - ExpressionStoreDiagnostics::MacroError { node, err } => { - let RenderedExpandError { message, error, kind } = - err.render_to_string(db.upcast()); - - let precise_location = if err.span().anchor.file_id == node.file_id { - Some( - err.span().range - + db.ast_id_map(err.span().anchor.file_id.into()) - .get_erased(err.span().anchor.ast_id) - .text_range() - .start(), - ) - } else { - None - }; - MacroError { - node: (*node).map(|it| it.into()), - precise_location, - message, - error, - kind, - } - .into() - } - ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => { - UnresolvedMacroCall { - macro_call: (*node).map(|ast_ptr| ast_ptr.into()), - precise_location: None, - path: path.clone(), - is_bang: true, - } - .into() - } - ExpressionStoreDiagnostics::AwaitOutsideOfAsync { node, location } => { - AwaitOutsideOfAsync { node: *node, location: location.clone() }.into() - } - ExpressionStoreDiagnostics::UnreachableLabel { node, name } => { - UnreachableLabel { node: *node, name: name.clone() }.into() - } - ExpressionStoreDiagnostics::UndeclaredLabel { node, name } => { - UndeclaredLabel { node: *node, name: name.clone() }.into() - } - }); - } + expr_store_diagnostics(db, acc, &source_map); let infer = db.infer(self.into()); for d in &infer.diagnostics { @@ -1990,8 +1936,8 @@ impl DefWithBody { db, self.into(), d, - outer_types_source_map, &source_map, + &sig_source_map, )); } @@ -2109,7 +2055,7 @@ impl DefWithBody { continue; } let mut need_mut = &mol[local]; - if body[binding_id].name == sym::self_.clone() + if body[binding_id].name == sym::self_ && need_mut == &mir::MutabilityReason::Unused { need_mut = &mir::MutabilityReason::Not; @@ -2179,14 +2125,66 @@ impl DefWithBody { DefWithBody::Static(it) => it.into(), DefWithBody::Const(it) => it.into(), DefWithBody::Variant(it) => it.into(), - // FIXME: don't ignore diagnostics for in type const - DefWithBody::InTypeConst(_) => return, }; for diag in hir_ty::diagnostics::incorrect_case(db, def.into()) { acc.push(diag.into()) } } } + +fn expr_store_diagnostics( + db: &dyn HirDatabase, + acc: &mut Vec, + source_map: &ExpressionStoreSourceMap, +) { + for diag in source_map.diagnostics() { + acc.push(match diag { + ExpressionStoreDiagnostics::InactiveCode { node, cfg, opts } => { + InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into() + } + ExpressionStoreDiagnostics::MacroError { node, err } => { + let RenderedExpandError { message, error, kind } = err.render_to_string(db); + + let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id); + let precise_location = if editioned_file_id == node.file_id { + Some( + err.span().range + + db.ast_id_map(editioned_file_id.into()) + .get_erased(err.span().anchor.ast_id) + .text_range() + .start(), + ) + } else { + None + }; + MacroError { + node: (node).map(|it| it.into()), + precise_location, + message, + error, + kind, + } + .into() + } + ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall { + macro_call: (*node).map(|ast_ptr| ast_ptr.into()), + precise_location: None, + path: path.clone(), + is_bang: true, + } + .into(), + ExpressionStoreDiagnostics::AwaitOutsideOfAsync { node, location } => { + AwaitOutsideOfAsync { node: *node, location: location.clone() }.into() + } + ExpressionStoreDiagnostics::UnreachableLabel { node, name } => { + UnreachableLabel { node: *node, name: name.clone() }.into() + } + ExpressionStoreDiagnostics::UndeclaredLabel { node, name } => { + UndeclaredLabel { node: *node, name: name.clone() }.into() + } + }); + } +} #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Function { pub(crate) id: FunctionId, @@ -2194,11 +2192,11 @@ pub struct Function { impl Function { pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.module(db.upcast()).into() + self.id.module(db).into() } pub fn name(self, db: &dyn HirDatabase) -> Name { - db.function_data(self.id).name.clone() + db.function_signature(self.id).name.clone() } pub fn ty(self, db: &dyn HirDatabase) -> Type { @@ -2206,7 +2204,7 @@ impl Function { } pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type { - let resolver = self.id.resolver(db.upcast()); + let resolver = self.id.resolver(db); let substs = TyBuilder::placeholder_subst(db, self.id); let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); let ty = TyKind::Function(callable_sig.to_fn_ptr()).intern(Interner); @@ -2215,7 +2213,7 @@ impl Function { /// Get this function's return type pub fn ret_type(self, db: &dyn HirDatabase) -> Type { - let resolver = self.id.resolver(db.upcast()); + let resolver = self.id.resolver(db); let substs = TyBuilder::placeholder_subst(db, self.id); let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); let ty = callable_sig.ret().clone(); @@ -2228,8 +2226,8 @@ impl Function { db: &dyn HirDatabase, generics: impl Iterator, ) -> Type { - let resolver = self.id.resolver(db.upcast()); - let parent_id: Option = match self.id.lookup(db.upcast()).container { + let resolver = self.id.resolver(db); + let parent_id: Option = match self.id.lookup(db).container { ItemContainerId::ImplId(it) => Some(it.into()), ItemContainerId::TraitId(it) => Some(it.into()), ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, @@ -2256,7 +2254,7 @@ impl Function { if !self.is_async(db) { return None; } - let resolver = self.id.resolver(db.upcast()); + let resolver = self.id.resolver(db); let substs = TyBuilder::placeholder_subst(db, self.id); let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); let ret_ty = callable_sig.ret().clone(); @@ -2269,7 +2267,7 @@ impl Function { } pub fn has_self_param(self, db: &dyn HirDatabase) -> bool { - db.function_data(self.id).has_self_param() + db.function_signature(self.id).has_self_param() } pub fn self_param(self, db: &dyn HirDatabase) -> Option { @@ -2292,7 +2290,7 @@ impl Function { } pub fn num_params(self, db: &dyn HirDatabase) -> usize { - db.function_data(self.id).params.len() + db.function_signature(self.id).params.len() } pub fn method_params(self, db: &dyn HirDatabase) -> Option> { @@ -2304,7 +2302,7 @@ impl Function { let environment = db.trait_environment(self.id.into()); let substs = TyBuilder::placeholder_subst(db, self.id); let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); - let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 }; + let skip = if db.function_signature(self.id).has_self_param() { 1 } else { 0 }; callable_sig .params() .iter() @@ -2324,7 +2322,7 @@ impl Function { generics: impl Iterator, ) -> Vec { let environment = db.trait_environment(self.id.into()); - let parent_id: Option = match self.id.lookup(db.upcast()).container { + let parent_id: Option = match self.id.lookup(db).container { ItemContainerId::ImplId(it) => Some(it.into()), ItemContainerId::TraitId(it) => Some(it.into()), ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, @@ -2350,7 +2348,7 @@ impl Function { }) .build(); let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); - let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 }; + let skip = if db.function_signature(self.id).has_self_param() { 1 } else { 0 }; callable_sig .params() .iter() @@ -2364,15 +2362,19 @@ impl Function { } pub fn is_const(self, db: &dyn HirDatabase) -> bool { - db.function_data(self.id).is_const() + db.function_signature(self.id).is_const() } pub fn is_async(self, db: &dyn HirDatabase) -> bool { - db.function_data(self.id).is_async() + db.function_signature(self.id).is_async() + } + + pub fn is_varargs(self, db: &dyn HirDatabase) -> bool { + db.function_signature(self.id).is_varargs() } pub fn extern_block(self, db: &dyn HirDatabase) -> Option { - match self.id.lookup(db.upcast()).container { + match self.id.lookup(db).container { ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }), _ => None, } @@ -2384,14 +2386,11 @@ impl Function { } let Some(impl_traits) = self.ret_type(db).as_impl_traits(db) else { return false }; - let Some(future_trait_id) = - db.lang_item(self.ty(db).env.krate, LangItem::Future).and_then(|t| t.as_trait()) + let Some(future_trait_id) = LangItem::Future.resolve_trait(db, self.ty(db).env.krate) else { return false; }; - let Some(sized_trait_id) = - db.lang_item(self.ty(db).env.krate, LangItem::Sized).and_then(|t| t.as_trait()) - else { + let Some(sized_trait_id) = LangItem::Sized.resolve_trait(db, self.ty(db).env.krate) else { return false; }; @@ -2415,7 +2414,7 @@ impl Function { /// is this a `fn main` or a function with an `export_name` of `main`? pub fn is_main(self, db: &dyn HirDatabase) -> bool { db.attrs(self.id.into()).export_name() == Some(&sym::main) - || self.module(db).is_crate_root() && db.function_data(self.id).name == sym::main + || self.module(db).is_crate_root() && db.function_signature(self.id).name == sym::main } /// Is this a function with an `export_name` of `main`? @@ -2457,7 +2456,7 @@ impl Function { /// /// This is false in the case of required (not provided) trait methods. pub fn has_body(self, db: &dyn HirDatabase) -> bool { - db.function_data(self.id).has_body() + db.function_signature(self.id).has_body() } pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option { @@ -2469,7 +2468,7 @@ impl Function { { return None; } - let def_map = db.crate_def_map(HasModule::krate(&self.id, db.upcast())); + let def_map = db.crate_def_map(HasModule::krate(&self.id, db)); def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() }) } @@ -2601,11 +2600,11 @@ pub struct SelfParam { impl SelfParam { pub fn access(self, db: &dyn HirDatabase) -> Access { - let func_data = db.function_data(self.func); + let func_data = db.function_signature(self.func); func_data .params .first() - .map(|¶m| match &func_data.types_map[param] { + .map(|¶m| match &func_data.store[param] { TypeRef::Reference(ref_) => match ref_.mutability { hir_def::type_ref::Mutability::Shared => Access::Shared, hir_def::type_ref::Mutability::Mut => Access::Exclusive, @@ -2630,7 +2629,7 @@ impl SelfParam { // FIXME: Find better API to also handle const generics pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator) -> Type { - let parent_id: GenericDefId = match self.func.lookup(db.upcast()).container { + let parent_id: GenericDefId = match self.func.lookup(db).container { ItemContainerId::ImplId(it) => it.into(), ItemContainerId::TraitId(it) => it.into(), ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => { @@ -2671,48 +2670,57 @@ pub struct ExternCrateDecl { impl ExternCrateDecl { pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.module(db.upcast()).into() + self.id.module(db).into() } pub fn resolved_crate(self, db: &dyn HirDatabase) -> Option { - db.extern_crate_decl_data(self.id).crate_id.map(Into::into) + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + let krate = loc.container.krate(); + let name = &item_tree[loc.id.value].name; + if *name == sym::self_ { + Some(krate.into()) + } else { + krate.data(db).dependencies.iter().find_map(|dep| { + if dep.name.symbol() == name.symbol() { Some(dep.crate_id.into()) } else { None } + }) + } } pub fn name(self, db: &dyn HirDatabase) -> Name { - db.extern_crate_decl_data(self.id).name.clone() + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + item_tree[loc.id.value].name.clone() } pub fn alias(self, db: &dyn HirDatabase) -> Option { - db.extern_crate_decl_data(self.id).alias.clone() + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + item_tree[loc.id.value].alias.clone() } /// Returns the name under which this crate is made accessible, taking `_` into account. pub fn alias_or_name(self, db: &dyn HirDatabase) -> Option { - let extern_crate_decl_data = db.extern_crate_decl_data(self.id); - match &extern_crate_decl_data.alias { + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + + match &item_tree[loc.id.value].alias { Some(ImportAlias::Underscore) => None, Some(ImportAlias::Alias(alias)) => Some(alias.clone()), - None => Some(extern_crate_decl_data.name.clone()), + None => Some(item_tree[loc.id.value].name.clone()), } } } impl HasVisibility for ExternCrateDecl { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - db.extern_crate_decl_data(self.id) - .visibility - .resolve(db.upcast(), &self.id.resolver(db.upcast())) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct InTypeConst { - pub(crate) id: InTypeConstId, -} - -impl InTypeConst { - pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).owner.module(db.upcast()) } + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + Visibility::resolve( + db, + &self.id.resolver(db), + &item_tree[item_tree[loc.id.value].visibility], + ) } } @@ -2723,11 +2731,11 @@ pub struct Const { impl Const { pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.module(db.upcast()) } + Module { id: self.id.module(db) } } pub fn name(self, db: &dyn HirDatabase) -> Option { - db.const_data(self.id).name.clone() + db.const_signature(self.id).name.clone() } pub fn value(self, db: &dyn HirDatabase) -> Option { @@ -2796,15 +2804,15 @@ pub struct Static { impl Static { pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.module(db.upcast()) } + Module { id: self.id.module(db) } } pub fn name(self, db: &dyn HirDatabase) -> Name { - db.static_data(self.id).name.clone() + db.static_signature(self.id).name.clone() } pub fn is_mut(self, db: &dyn HirDatabase) -> bool { - db.static_data(self.id).mutable + db.static_signature(self.id).flags.contains(StaticFlags::MUTABLE) } pub fn value(self, db: &dyn HirDatabase) -> Option { @@ -2816,7 +2824,7 @@ impl Static { } pub fn extern_block(self, db: &dyn HirDatabase) -> Option { - match self.id.lookup(db.upcast()).container { + match self.id.lookup(db).container { ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }), _ => None, } @@ -2831,7 +2839,13 @@ impl Static { impl HasVisibility for Static { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - db.static_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + Visibility::resolve( + db, + &self.id.resolver(db), + &item_tree[item_tree[loc.id.value].visibility], + ) } } @@ -2842,40 +2856,37 @@ pub struct Trait { impl Trait { pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option { - db.lang_item(krate.into(), LangItem::from_name(name)?) - .and_then(LangItemTarget::as_trait) - .map(Into::into) + LangItem::from_name(name)?.resolve_trait(db, krate.into()).map(Into::into) } pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).container } + Module { id: self.id.lookup(db).container } } pub fn name(self, db: &dyn HirDatabase) -> Name { - db.trait_data(self.id).name.clone() + db.trait_signature(self.id).name.clone() } pub fn direct_supertraits(self, db: &dyn HirDatabase) -> Vec { - let traits = direct_super_traits(db.upcast(), self.into()); + let traits = direct_super_traits(db, self.into()); traits.iter().map(|tr| Trait::from(*tr)).collect() } pub fn all_supertraits(self, db: &dyn HirDatabase) -> Vec { - let traits = all_super_traits(db.upcast(), self.into()); + let traits = all_super_traits(db, self.into()); traits.iter().map(|tr| Trait::from(*tr)).collect() } pub fn function(self, db: &dyn HirDatabase, name: impl PartialEq) -> Option { - db.trait_data(self.id).items.iter().find(|(n, _)| name == *n).and_then( - |&(_, it)| match it { - AssocItemId::FunctionId(id) => Some(Function { id }), - _ => None, - }, - ) + db.trait_items(self.id).items.iter().find(|(n, _)| name == *n).and_then(|&(_, it)| match it + { + AssocItemId::FunctionId(id) => Some(Function { id }), + _ => None, + }) } pub fn items(self, db: &dyn HirDatabase) -> Vec { - db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect() + db.trait_items(self.id).items.iter().map(|(_name, it)| (*it).into()).collect() } pub fn items_with_supertraits(self, db: &dyn HirDatabase) -> Vec { @@ -2883,11 +2894,11 @@ impl Trait { } pub fn is_auto(self, db: &dyn HirDatabase) -> bool { - db.trait_data(self.id).flags.contains(TraitFlags::IS_AUTO) + db.trait_signature(self.id).flags.contains(TraitFlags::AUTO) } pub fn is_unsafe(&self, db: &dyn HirDatabase) -> bool { - db.trait_data(self.id).flags.contains(TraitFlags::IS_UNSAFE) + db.trait_signature(self.id).flags.contains(TraitFlags::UNSAFE) } pub fn type_or_const_param_count( @@ -2911,25 +2922,40 @@ impl Trait { db: &dyn HirDatabase, ) -> Option> { let mut violations = vec![]; - let _ = hir_ty::dyn_compatibility::dyn_compatibility_with_callback(db, self.id, &mut |violation| { - violations.push(violation); - ControlFlow::Continue(()) - }); + _ = hir_ty::dyn_compatibility::dyn_compatibility_with_callback( + db, + self.id, + &mut |violation| { + violations.push(violation); + ControlFlow::Continue(()) + }, + ); violations.is_empty().not().then_some(violations) } fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId, MacroCallId)]> { - db.trait_data(self.id) + db.trait_items(self.id) .macro_calls .as_ref() .map(|it| it.as_ref().clone().into_boxed_slice()) .unwrap_or_default() } + + /// `#[rust_analyzer::completions(...)]` mode. + pub fn complete(self, db: &dyn HirDatabase) -> Complete { + Complete::extract(true, &self.attrs(db)) + } } impl HasVisibility for Trait { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - db.trait_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + Visibility::resolve( + db, + &self.id.resolver(db), + &item_tree[item_tree[loc.id.value].visibility], + ) } } @@ -2940,17 +2966,23 @@ pub struct TraitAlias { impl TraitAlias { pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.lookup(db.upcast()).container } + Module { id: self.id.lookup(db).container } } pub fn name(self, db: &dyn HirDatabase) -> Name { - db.trait_alias_data(self.id).name.clone() + db.trait_alias_signature(self.id).name.clone() } } impl HasVisibility for TraitAlias { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - db.trait_alias_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + let loc = self.id.lookup(db); + let item_tree = loc.id.item_tree(db); + Visibility::resolve( + db, + &self.id.resolver(db), + &item_tree[item_tree[loc.id.value].visibility], + ) } } @@ -2962,14 +2994,15 @@ pub struct TypeAlias { impl TypeAlias { pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool { let subst = db.generic_defaults(self.id.into()); - subst.iter().any(|ty| match ty.skip_binders().data(Interner) { - GenericArgData::Ty(it) => it.is_unknown(), - _ => false, - }) + (subst.is_empty() && db.generic_params(self.id.into()).len_type_or_consts() != 0) + || subst.iter().any(|ty| match ty.skip_binders().data(Interner) { + GenericArgData::Ty(it) => it.is_unknown(), + _ => false, + }) } pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.module(db.upcast()) } + Module { id: self.id.module(db) } } pub fn ty(self, db: &dyn HirDatabase) -> Type { @@ -2981,15 +3014,13 @@ impl TypeAlias { } pub fn name(self, db: &dyn HirDatabase) -> Name { - db.type_alias_data(self.id).name.clone() + db.type_alias_signature(self.id).name.clone() } } impl HasVisibility for TypeAlias { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { - let function_data = db.type_alias_data(self.id); - let visibility = &function_data.visibility; - visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + db.type_alias_visibility(self.id) } } @@ -3000,7 +3031,7 @@ pub struct ExternBlock { impl ExternBlock { pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.module(db.upcast()) } + Module { id: self.id.module(db) } } } @@ -3009,7 +3040,7 @@ pub struct StaticLifetime; impl StaticLifetime { pub fn name(self) -> Name { - Name::new_symbol_root(sym::tick_static.clone()) + Name::new_symbol_root(sym::tick_static) } } @@ -3024,7 +3055,8 @@ impl BuiltinType { } pub fn ty(self, db: &dyn HirDatabase) -> Type { - Type::new_for_crate(db.crate_graph().iter().next().unwrap(), TyBuilder::builtin(self.inner)) + let core = Crate::core(db).map(|core| core.id).unwrap_or_else(|| db.all_crates()[0]); + Type::new_for_crate(core, TyBuilder::builtin(self.inner)) } pub fn name(self) -> Name { @@ -3091,19 +3123,39 @@ pub struct Macro { impl Macro { pub fn module(self, db: &dyn HirDatabase) -> Module { - Module { id: self.id.module(db.upcast()) } + Module { id: self.id.module(db) } } pub fn name(self, db: &dyn HirDatabase) -> Name { match self.id { - MacroId::Macro2Id(id) => db.macro2_data(id).name.clone(), - MacroId::MacroRulesId(id) => db.macro_rules_data(id).name.clone(), - MacroId::ProcMacroId(id) => db.proc_macro_data(id).name.clone(), + MacroId::Macro2Id(id) => { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + item_tree[loc.id.value].name.clone() + } + MacroId::MacroRulesId(id) => { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + item_tree[loc.id.value].name.clone() + } + MacroId::ProcMacroId(id) => { + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + match loc.kind { + ProcMacroKind::CustomDerive => db + .attrs(id.into()) + .parse_proc_macro_derive() + .map_or_else(|| item_tree[loc.id.value].name.clone(), |(it, _)| it), + ProcMacroKind::Bang | ProcMacroKind::Attr => { + item_tree[loc.id.value].name.clone() + } + } + } } } pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool { - matches!(self.id, MacroId::MacroRulesId(id) if db.macro_rules_data(id).macro_export) + matches!(self.id, MacroId::MacroRulesId(_) if db.attrs(self.id.into()).by_key(sym::macro_export).exists()) } pub fn is_proc_macro(self) -> bool { @@ -3112,7 +3164,7 @@ impl Macro { pub fn kind(&self, db: &dyn HirDatabase) -> MacroKind { match self.id { - MacroId::Macro2Id(it) => match it.lookup(db.upcast()).expander { + MacroId::Macro2Id(it) => match it.lookup(db).expander { MacroExpander::Declarative => MacroKind::Declarative, MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => { MacroKind::DeclarativeBuiltIn @@ -3120,7 +3172,7 @@ impl Macro { MacroExpander::BuiltInAttr(_) => MacroKind::AttrBuiltIn, MacroExpander::BuiltInDerive(_) => MacroKind::DeriveBuiltIn, }, - MacroId::MacroRulesId(it) => match it.lookup(db.upcast()).expander { + MacroId::MacroRulesId(it) => match it.lookup(db).expander { MacroExpander::Declarative => MacroKind::Declarative, MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => { MacroKind::DeclarativeBuiltIn @@ -3128,7 +3180,7 @@ impl Macro { MacroExpander::BuiltInAttr(_) => MacroKind::AttrBuiltIn, MacroExpander::BuiltInDerive(_) => MacroKind::DeriveBuiltIn, }, - MacroId::ProcMacroId(it) => match it.lookup(db.upcast()).kind { + MacroId::ProcMacroId(it) => match it.lookup(db).kind { ProcMacroKind::CustomDerive => MacroKind::Derive, ProcMacroKind::Bang => MacroKind::ProcMacro, ProcMacroKind::Attr => MacroKind::Attr, @@ -3146,10 +3198,10 @@ impl Macro { pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> bool { match self.id { MacroId::Macro2Id(it) => { - matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_)) + matches!(it.lookup(db).expander, MacroExpander::BuiltInDerive(_)) } MacroId::MacroRulesId(it) => { - matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_)) + matches!(it.lookup(db).expander, MacroExpander::BuiltInDerive(_)) } MacroId::ProcMacroId(_) => false, } @@ -3158,10 +3210,10 @@ impl Macro { pub fn is_env_or_option_env(&self, db: &dyn HirDatabase) -> bool { match self.id { MacroId::Macro2Id(it) => { - matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env()) + matches!(it.lookup(db).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env()) } MacroId::MacroRulesId(it) => { - matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env()) + matches!(it.lookup(db).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env()) } MacroId::ProcMacroId(_) => false, } @@ -3170,10 +3222,10 @@ impl Macro { pub fn is_asm_or_global_asm(&self, db: &dyn HirDatabase) -> bool { match self.id { MacroId::Macro2Id(it) => { - matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltIn(m) if m.is_asm()) + matches!(it.lookup(db).expander, MacroExpander::BuiltIn(m) if m.is_asm()) } MacroId::MacroRulesId(it) => { - matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltIn(m) if m.is_asm()) + matches!(it.lookup(db).expander, MacroExpander::BuiltIn(m) if m.is_asm()) } MacroId::ProcMacroId(_) => false, } @@ -3192,9 +3244,13 @@ impl HasVisibility for Macro { fn visibility(&self, db: &dyn HirDatabase) -> Visibility { match self.id { MacroId::Macro2Id(id) => { - let data = db.macro2_data(id); - let visibility = &data.visibility; - visibility.resolve(db.upcast(), &self.id.resolver(db.upcast())) + let loc = id.lookup(db); + let item_tree = loc.id.item_tree(db); + Visibility::resolve( + db, + &id.resolver(db), + &item_tree[item_tree[loc.id.value].visibility], + ) } MacroId::MacroRulesId(_) => Visibility::Public, MacroId::ProcMacroId(_) => Visibility::Public, @@ -3335,7 +3391,7 @@ impl AsAssocItem for DefWithBody { match self { DefWithBody::Function(it) => it.as_assoc_item(db), DefWithBody::Const(it) => it.as_assoc_item(db), - DefWithBody::Static(_) | DefWithBody::Variant(_) | DefWithBody::InTypeConst(_) => None, + DefWithBody::Static(_) | DefWithBody::Variant(_) => None, } } } @@ -3346,11 +3402,11 @@ fn as_assoc_item<'db, ID, DEF, LOC>( id: ID, ) -> Option where - ID: Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, + ID: Lookup>, DEF: From, LOC: ItemTreeNode, { - match id.lookup(db.upcast()).container { + match id.lookup(db).container { ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => Some(ctor(DEF::from(id))), ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, } @@ -3362,11 +3418,11 @@ fn as_extern_assoc_item<'db, ID, DEF, LOC>( id: ID, ) -> Option where - ID: Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, + ID: Lookup>, DEF: From, LOC: ItemTreeNode, { - match id.lookup(db.upcast()).container { + match id.lookup(db).container { ItemContainerId::ExternBlockId(_) => Some(ctor(DEF::from(id))), ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) | ItemContainerId::ModuleId(_) => { None @@ -3432,9 +3488,9 @@ impl AssocItem { pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer { let container = match self { - AssocItem::Function(it) => it.id.lookup(db.upcast()).container, - AssocItem::Const(it) => it.id.lookup(db.upcast()).container, - AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container, + AssocItem::Function(it) => it.id.lookup(db).container, + AssocItem::Const(it) => it.id.lookup(db).container, + AssocItem::TypeAlias(it) => it.id.lookup(db).container, }; match container { ItemContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()), @@ -3506,17 +3562,16 @@ impl AssocItem { DefWithBody::from(func).diagnostics(db, acc, style_lints); } AssocItem::Const(const_) => { + GenericDef::Const(const_).diagnostics(db, acc); DefWithBody::from(const_).diagnostics(db, acc, style_lints); } AssocItem::TypeAlias(type_alias) => { GenericDef::TypeAlias(type_alias).diagnostics(db, acc); - let tree_id = type_alias.id.lookup(db.upcast()).id; - let tree_source_maps = tree_id.item_tree_with_source_map(db.upcast()).1; push_ty_diagnostics( db, acc, db.type_for_type_alias_with_diagnostics(type_alias.id).1, - tree_source_maps.type_alias(tree_id.value).item(), + &db.type_alias_signature_with_source_map(type_alias.id).1, ); for diag in hir_ty::diagnostics::incorrect_case(db, type_alias.id.into()) { acc.push(diag.into()); @@ -3623,67 +3678,32 @@ impl GenericDef { pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec) { let def = self.id(); - let item_tree_source_maps; - let (generics, generics_source_map) = db.generic_params_with_source_map(def); + let generics = db.generic_params(def); if generics.is_empty() && generics.no_predicates() { return; } - let source_map = match &generics_source_map { - Some(it) => it, - None => match def { - GenericDefId::FunctionId(it) => { - let id = it.lookup(db.upcast()).id; - item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.function(id.value).generics() - } - GenericDefId::AdtId(AdtId::EnumId(it)) => { - let id = it.lookup(db.upcast()).id; - item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.enum_generic(id.value) - } - GenericDefId::AdtId(AdtId::StructId(it)) => { - let id = it.lookup(db.upcast()).id; - item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.strukt(id.value).generics() - } - GenericDefId::AdtId(AdtId::UnionId(it)) => { - let id = it.lookup(db.upcast()).id; - item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.union(id.value).generics() - } - GenericDefId::TraitId(it) => { - let id = it.lookup(db.upcast()).id; - item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.trait_generic(id.value) - } - GenericDefId::TraitAliasId(it) => { - let id = it.lookup(db.upcast()).id; - item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.trait_alias_generic(id.value) - } - GenericDefId::TypeAliasId(it) => { - let id = it.lookup(db.upcast()).id; - item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.type_alias(id.value).generics() - } - GenericDefId::ImplId(it) => { - let id = it.lookup(db.upcast()).id; - item_tree_source_maps = id.item_tree_with_source_map(db.upcast()).1; - item_tree_source_maps.impl_(id.value).generics() - } - GenericDefId::ConstId(_) => return, - GenericDefId::StaticId(_) => return, - }, + let source_map = match def { + GenericDefId::AdtId(AdtId::EnumId(it)) => db.enum_signature_with_source_map(it).1, + GenericDefId::AdtId(AdtId::StructId(it)) => db.struct_signature_with_source_map(it).1, + GenericDefId::AdtId(AdtId::UnionId(it)) => db.union_signature_with_source_map(it).1, + GenericDefId::ConstId(_) => return, + GenericDefId::FunctionId(it) => db.function_signature_with_source_map(it).1, + GenericDefId::ImplId(it) => db.impl_signature_with_source_map(it).1, + GenericDefId::StaticId(_) => return, + GenericDefId::TraitAliasId(it) => db.trait_alias_signature_with_source_map(it).1, + GenericDefId::TraitId(it) => db.trait_signature_with_source_map(it).1, + GenericDefId::TypeAliasId(it) => db.type_alias_signature_with_source_map(it).1, }; - push_ty_diagnostics(db, acc, db.generic_defaults_with_diagnostics(def).1, source_map); + expr_store_diagnostics(db, acc, &source_map); + push_ty_diagnostics(db, acc, db.generic_defaults_with_diagnostics(def).1, &source_map); push_ty_diagnostics( db, acc, db.generic_predicates_without_parent_with_diagnostics(def).1, - source_map, + &source_map, ); for (param_id, param) in generics.iter_type_or_consts() { if let TypeOrConstParamData::ConstParamData(_) = param { @@ -3694,11 +3714,28 @@ impl GenericDef { TypeOrConstParamId { parent: def, local_id: param_id }, )) .1, - source_map, + &source_map, ); } } } + + /// Returns a string describing the kind of this type. + #[inline] + pub fn description(self) -> &'static str { + match self { + GenericDef::Function(_) => "function", + GenericDef::Adt(Adt::Struct(_)) => "struct", + GenericDef::Adt(Adt::Enum(_)) => "enum", + GenericDef::Adt(Adt::Union(_)) => "union", + GenericDef::Trait(_) => "trait", + GenericDef::TraitAlias(_) => "trait alias", + GenericDef::TypeAlias(_) => "type alias", + GenericDef::Impl(_) => "impl", + GenericDef::Const(_) => "constant", + GenericDef::Static(_) => "static", + } + } } // We cannot call this `Substitution` unfortunately... @@ -3716,9 +3753,9 @@ impl GenericSubstitution { pub fn types(&self, db: &dyn HirDatabase) -> Vec<(Symbol, Type)> { let container = match self.def { - GenericDefId::ConstId(id) => Some(id.lookup(db.upcast()).container), - GenericDefId::FunctionId(id) => Some(id.lookup(db.upcast()).container), - GenericDefId::TypeAliasId(id) => Some(id.lookup(db.upcast()).container), + GenericDefId::ConstId(id) => Some(id.lookup(db).container), + GenericDefId::FunctionId(id) => Some(id.lookup(db).container), + GenericDefId::TypeAliasId(id) => Some(id.lookup(db).container), _ => None, }; let container_type_params = container @@ -3741,12 +3778,19 @@ impl GenericSubstitution { TypeOrConstParamData::TypeParamData(param) => Some(param.name.clone()), TypeOrConstParamData::ConstParamData(_) => None, }); - // The `Substitution` is first self then container, we want the reverse order. - let self_params = self.subst.type_parameters(Interner).zip(type_params); - let container_params = self.subst.as_slice(Interner)[generics.len()..] + let parent_len = self.subst.len(Interner) + - generics + .iter_type_or_consts() + .filter(|g| matches!(g.1, TypeOrConstParamData::TypeParamData(..))) + .count(); + let container_params = self.subst.as_slice(Interner)[..parent_len] .iter() .filter_map(|param| param.ty(Interner).cloned()) .zip(container_type_params.into_iter().flatten()); + let self_params = self.subst.as_slice(Interner)[parent_len..] + .iter() + .filter_map(|param| param.ty(Interner).cloned()) + .zip(type_params); container_params .chain(self_params) .filter_map(|(ty, name)| { @@ -3784,7 +3828,7 @@ impl LocalSource { } pub fn original_file(&self, db: &dyn HirDatabase) -> EditionedFileId { - self.source.file_id.original_file(db.upcast()) + self.source.file_id.original_file(db) } pub fn file(&self) -> HirFileId { @@ -3832,7 +3876,7 @@ impl Local { } pub fn is_self(self, db: &dyn HirDatabase) -> bool { - self.name(db) == sym::self_.clone() + self.name(db) == sym::self_ } pub fn is_mut(self, db: &dyn HirDatabase) -> bool { @@ -3865,7 +3909,7 @@ impl Local { let (body, source_map) = db.body_with_source_map(self.parent); match body.self_param.zip(source_map.self_param_syntax()) { Some((param, source)) if param == self.binding_id => { - let root = source.file_syntax(db.upcast()); + let root = source.file_syntax(db); vec![LocalSource { local: self, source: source.map(|ast| Either::Right(ast.to_node(&root))), @@ -3876,7 +3920,7 @@ impl Local { .iter() .map(|&definition| { let src = source_map.pat_syntax(definition).unwrap(); // Hmm... - let root = src.file_syntax(db.upcast()); + let root = src.file_syntax(db); LocalSource { local: self, source: src.map(|ast| match ast.to_node(&root) { @@ -3894,7 +3938,7 @@ impl Local { let (body, source_map) = db.body_with_source_map(self.parent); match body.self_param.zip(source_map.self_param_syntax()) { Some((param, source)) if param == self.binding_id => { - let root = source.file_syntax(db.upcast()); + let root = source.file_syntax(db); LocalSource { local: self, source: source.map(|ast| Either::Right(ast.to_node(&root))), @@ -3905,7 +3949,7 @@ impl Local { .first() .map(|&definition| { let src = source_map.pat_syntax(definition).unwrap(); // Hmm... - let root = src.file_syntax(db.upcast()); + let root = src.file_syntax(db); LocalSource { local: self, source: src.map(|ast| match ast.to_node(&root) { @@ -3944,19 +3988,15 @@ impl DeriveHelper { pub fn name(&self, db: &dyn HirDatabase) -> Name { match self.derive { - MacroId::Macro2Id(it) => db - .macro2_data(it) - .helpers - .as_deref() - .and_then(|it| it.get(self.idx as usize)) - .cloned(), + makro @ MacroId::Macro2Id(_) => db + .attrs(makro.into()) + .parse_rustc_builtin_macro() + .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()), MacroId::MacroRulesId(_) => None, - MacroId::ProcMacroId(proc_macro) => db - .proc_macro_data(proc_macro) - .helpers - .as_deref() - .and_then(|it| it.get(self.idx as usize)) - .cloned(), + makro @ MacroId::ProcMacroId(_) => db + .attrs(makro.into()) + .parse_proc_macro_derive() + .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()), } .unwrap_or_else(Name::missing) } @@ -3965,7 +4005,7 @@ impl DeriveHelper { // FIXME: Wrong name? This is could also be a registered attribute #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct BuiltinAttr { - krate: Option, + krate: Option, idx: u32, } @@ -4011,7 +4051,7 @@ impl BuiltinAttr { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct ToolModule { - krate: CrateId, + krate: base_db::Crate, idx: u32, } @@ -4096,7 +4136,7 @@ impl GenericParam { GenericParam::ConstParam(_) => return None, GenericParam::LifetimeParam(it) => it.id.parent, }; - let generics = hir_ty::generics::generics(db.upcast(), parent); + let generics = hir_ty::generics::generics(db, parent); let index = match self { GenericParam::TypeParam(it) => generics.type_or_const_param_idx(it.id.into())?, GenericParam::ConstParam(_) => return None, @@ -4121,7 +4161,7 @@ impl TypeParam { } pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.parent().module(db.upcast()).into() + self.id.parent().module(db).into() } /// Is this type parameter implicitly introduced (eg. `Self` in a trait or an `impl Trait` @@ -4130,14 +4170,13 @@ impl TypeParam { let params = db.generic_params(self.id.parent()); let data = ¶ms[self.id.local_id()]; match data.type_param().unwrap().provenance { - hir_def::generics::TypeParamProvenance::TypeParamList => false, - hir_def::generics::TypeParamProvenance::TraitSelf - | hir_def::generics::TypeParamProvenance::ArgumentImplTrait => true, + TypeParamProvenance::TypeParamList => false, + TypeParamProvenance::TraitSelf | TypeParamProvenance::ArgumentImplTrait => true, } } pub fn ty(self, db: &dyn HirDatabase) -> Type { - let resolver = self.id.parent().resolver(db.upcast()); + let resolver = self.id.parent().resolver(db); let ty = TyKind::Placeholder(hir_ty::to_placeholder_idx(db, self.id.into())).intern(Interner); Type::new_with_resolver_inner(db, &resolver, ty) @@ -4160,7 +4199,7 @@ impl TypeParam { pub fn default(self, db: &dyn HirDatabase) -> Option { let ty = generic_arg_from_param(db, self.id.into())?; - let resolver = self.id.parent().resolver(db.upcast()); + let resolver = self.id.parent().resolver(db); match ty.data(Interner) { GenericArgData::Ty(it) if *it.kind(Interner) != TyKind::Error => { Some(Type::new_with_resolver_inner(db, &resolver, it.clone())) @@ -4186,7 +4225,7 @@ impl LifetimeParam { } pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.parent.module(db.upcast()).into() + self.id.parent.module(db).into() } pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef { @@ -4216,7 +4255,7 @@ impl ConstParam { } pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.parent().module(db.upcast()).into() + self.id.parent().module(db).into() } pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef { @@ -4241,7 +4280,8 @@ fn generic_arg_from_param(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Optio let local_idx = hir_ty::param_idx(db, id)?; let defaults = db.generic_defaults(id.parent); let ty = defaults.get(local_idx)?.clone(); - let subst = TyBuilder::placeholder_subst(db, id.parent); + let full_subst = TyBuilder::placeholder_subst(db, id.parent); + let subst = &full_subst.as_slice(Interner)[..local_idx]; Some(ty.substitute(Interner, &subst)) } @@ -4260,7 +4300,7 @@ impl TypeOrConstParam { } pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.parent.module(db.upcast()).into() + self.id.parent.module(db).into() } pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef { @@ -4270,10 +4310,10 @@ impl TypeOrConstParam { pub fn split(self, db: &dyn HirDatabase) -> Either { let params = db.generic_params(self.id.parent); match ¶ms[self.id.local_id] { - hir_def::generics::TypeOrConstParamData::TypeParamData(_) => { + TypeOrConstParamData::TypeParamData(_) => { Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) }) } - hir_def::generics::TypeOrConstParamData::ConstParamData(_) => { + TypeOrConstParamData::ConstParamData(_) => { Either::Left(ConstParam { id: ConstParamId::from_unchecked(self.id) }) } } @@ -4289,18 +4329,18 @@ impl TypeOrConstParam { pub fn as_type_param(self, db: &dyn HirDatabase) -> Option { let params = db.generic_params(self.id.parent); match ¶ms[self.id.local_id] { - hir_def::generics::TypeOrConstParamData::TypeParamData(_) => { + TypeOrConstParamData::TypeParamData(_) => { Some(TypeParam { id: TypeParamId::from_unchecked(self.id) }) } - hir_def::generics::TypeOrConstParamData::ConstParamData(_) => None, + TypeOrConstParamData::ConstParamData(_) => None, } } pub fn as_const_param(self, db: &dyn HirDatabase) -> Option { let params = db.generic_params(self.id.parent); match ¶ms[self.id.local_id] { - hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None, - hir_def::generics::TypeOrConstParamData::ConstParamData(_) => { + TypeOrConstParamData::TypeParamData(_) => None, + TypeOrConstParamData::ConstParamData(_) => { Some(ConstParam { id: ConstParamId::from_unchecked(self.id) }) } } @@ -4321,7 +4361,7 @@ impl Impl { } pub fn all_in_module(db: &dyn HirDatabase, module: Module) -> Vec { - module.id.def_map(db.upcast())[module.id.local_id].scope.impls().map(Into::into).collect() + module.id.def_map(db)[module.id.local_id].scope.impls().map(Into::into).collect() } pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec { @@ -4367,8 +4407,7 @@ impl Impl { ); } - if let Some(block) = - ty.adt_id(Interner).and_then(|def| def.0.module(db.upcast()).containing_block()) + if let Some(block) = ty.adt_id(Interner).and_then(|def| def.0.module(db).containing_block()) { if let Some(inherent_impls) = db.inherent_impls_in_block(block) { all.extend( @@ -4413,41 +4452,41 @@ impl Impl { pub fn trait_ref(self, db: &dyn HirDatabase) -> Option { let substs = TyBuilder::placeholder_subst(db, self.id); let trait_ref = db.impl_trait(self.id)?.substitute(Interner, &substs); - let resolver = self.id.resolver(db.upcast()); + let resolver = self.id.resolver(db); Some(TraitRef::new_with_resolver(db, &resolver, trait_ref)) } pub fn self_ty(self, db: &dyn HirDatabase) -> Type { - let resolver = self.id.resolver(db.upcast()); + let resolver = self.id.resolver(db); let substs = TyBuilder::placeholder_subst(db, self.id); let ty = db.impl_self_ty(self.id).substitute(Interner, &substs); Type::new_with_resolver_inner(db, &resolver, ty) } pub fn items(self, db: &dyn HirDatabase) -> Vec { - db.impl_data(self.id).items.iter().map(|&(_, it)| it.into()).collect() + db.impl_items(self.id).items.iter().map(|&(_, it)| it.into()).collect() } pub fn is_negative(self, db: &dyn HirDatabase) -> bool { - db.impl_data(self.id).is_negative + db.impl_signature(self.id).flags.contains(ImplFlags::NEGATIVE) } pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool { - db.impl_data(self.id).is_unsafe + db.impl_signature(self.id).flags.contains(ImplFlags::UNSAFE) } pub fn module(self, db: &dyn HirDatabase) -> Module { - self.id.lookup(db.upcast()).container.into() + self.id.lookup(db).container.into() } pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option> { let src = self.source(db)?; let macro_file = src.file_id.macro_file()?; - let loc = macro_file.macro_call_id.lookup(db.upcast()); + let loc = macro_file.lookup(db); let (derive_attr, derive_index) = match loc.kind { MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => { - let module_id = self.id.lookup(db.upcast()).container; + let module_id = self.id.lookup(db).container; ( db.crate_def_map(module_id.krate())[module_id.local_id] .scope @@ -4457,9 +4496,8 @@ impl Impl { } _ => return None, }; - let file_id = MacroFileId { macro_call_id: derive_attr }; let path = db - .parse_macro_expansion(file_id) + .parse_macro_expansion(derive_attr) .value .0 .syntax_node() @@ -4467,7 +4505,7 @@ impl Impl { .nth(derive_index as usize) .and_then(::cast) .and_then(|it| it.path())?; - Some(InMacroFile { file_id, value: path }) + Some(InMacroFile { file_id: derive_attr, value: path }) } pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool { @@ -4475,7 +4513,7 @@ impl Impl { } fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId, MacroCallId)]> { - db.impl_data(self.id) + db.impl_items(self.id) .macro_calls .as_ref() .map(|it| it.as_ref().clone().into_boxed_slice()) @@ -4656,6 +4694,7 @@ pub struct CaptureUsages { impl CaptureUsages { pub fn sources(&self, db: &dyn HirDatabase) -> Vec { let (body, source_map) = db.body_with_source_map(self.parent); + let mut result = Vec::with_capacity(self.spans.len()); for &span in self.spans.iter() { let is_ref = span.is_ref_span(&body); @@ -4728,12 +4767,12 @@ impl Type { Type { env: environment, ty } } - pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type { + pub(crate) fn new_for_crate(krate: base_db::Crate, ty: Ty) -> Type { Type { env: TraitEnvironment::empty(krate), ty } } fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type { - let resolver = lexical_env.resolver(db.upcast()); + let resolver = lexical_env.resolver(db); let environment = resolver .generic_def() .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d)); @@ -4778,7 +4817,7 @@ impl Type { ValueTyDefId::StructId(it) => GenericDefId::AdtId(AdtId::StructId(it)), ValueTyDefId::UnionId(it) => GenericDefId::AdtId(AdtId::UnionId(it)), ValueTyDefId::EnumVariantId(it) => { - GenericDefId::AdtId(AdtId::EnumId(it.lookup(db.upcast()).parent)) + GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent)) } ValueTyDefId::StaticId(_) => return Type::new(db, def, ty.skip_binders().clone()), }, @@ -4790,7 +4829,7 @@ impl Type { Type { env: ty.env, ty: TyBuilder::slice(ty.ty) } } - pub fn new_tuple(krate: CrateId, tys: &[Type]) -> Type { + pub fn new_tuple(krate: base_db::Crate, tys: &[Type]) -> Type { let tys = tys.iter().map(|it| it.ty.clone()); Type { env: TraitEnvironment::empty(krate), ty: TyBuilder::tuple_with(tys) } } @@ -4822,7 +4861,7 @@ impl Type { pub fn contains_reference(&self, db: &dyn HirDatabase) -> bool { return go(db, self.env.krate, &self.ty); - fn go(db: &dyn HirDatabase, krate: CrateId, ty: &Ty) -> bool { + fn go(db: &dyn HirDatabase, krate: base_db::Crate, ty: &Ty) -> bool { match ty.kind(Interner) { // Reference itself TyKind::Ref(_, _, _) => true, @@ -4935,18 +4974,14 @@ impl Type { /// `std::future::Future` and returns the `Output` associated type. /// This function is used in `.await` syntax completion. pub fn into_future_output(&self, db: &dyn HirDatabase) -> Option { - let trait_ = db - .lang_item(self.env.krate, LangItem::IntoFutureIntoFuture) - .and_then(|it| { - let into_future_fn = it.as_function()?; + let trait_ = LangItem::IntoFutureIntoFuture + .resolve_function(db, self.env.krate) + .and_then(|into_future_fn| { let assoc_item = as_assoc_item(db, AssocItem::Function, into_future_fn)?; let into_future_trait = assoc_item.container_or_implemented_trait(db)?; Some(into_future_trait.id) }) - .or_else(|| { - let future_trait = db.lang_item(self.env.krate, LangItem::Future)?; - future_trait.as_trait() - })?; + .or_else(|| LangItem::Future.resolve_trait(db, self.env.krate))?; let canonical_ty = Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) }; @@ -4954,32 +4989,28 @@ impl Type { return None; } - let output_assoc_type = db - .trait_data(trait_) - .associated_type_by_name(&Name::new_symbol_root(sym::Output.clone()))?; + let output_assoc_type = + db.trait_items(trait_).associated_type_by_name(&Name::new_symbol_root(sym::Output))?; self.normalize_trait_assoc_type(db, &[], output_assoc_type.into()) } /// This does **not** resolve `IntoFuture`, only `Future`. pub fn future_output(self, db: &dyn HirDatabase) -> Option { - let future_output = - db.lang_item(self.env.krate, LangItem::FutureOutput)?.as_type_alias()?; + let future_output = LangItem::FutureOutput.resolve_type_alias(db, self.env.krate)?; self.normalize_trait_assoc_type(db, &[], future_output.into()) } /// This does **not** resolve `IntoIterator`, only `Iterator`. pub fn iterator_item(self, db: &dyn HirDatabase) -> Option { - let iterator_trait = db.lang_item(self.env.krate, LangItem::Iterator)?.as_trait()?; + let iterator_trait = LangItem::Iterator.resolve_trait(db, self.env.krate)?; let iterator_item = db - .trait_data(iterator_trait) - .associated_type_by_name(&Name::new_symbol_root(sym::Item.clone()))?; + .trait_items(iterator_trait) + .associated_type_by_name(&Name::new_symbol_root(sym::Item))?; self.normalize_trait_assoc_type(db, &[], iterator_item.into()) } pub fn impls_iterator(self, db: &dyn HirDatabase) -> bool { - let Some(iterator_trait) = - db.lang_item(self.env.krate, LangItem::Iterator).and_then(|it| it.as_trait()) - else { + let Some(iterator_trait) = LangItem::Iterator.resolve_trait(db, self.env.krate) else { return false; }; let canonical_ty = @@ -4989,12 +5020,13 @@ impl Type { /// Resolves the projection `::IntoIter` and returns the resulting type pub fn into_iterator_iter(self, db: &dyn HirDatabase) -> Option { - let trait_ = db.lang_item(self.env.krate, LangItem::IntoIterIntoIter).and_then(|it| { - let into_iter_fn = it.as_function()?; - let assoc_item = as_assoc_item(db, AssocItem::Function, into_iter_fn)?; - let into_iter_trait = assoc_item.container_or_implemented_trait(db)?; - Some(into_iter_trait.id) - })?; + let trait_ = LangItem::IntoIterIntoIter.resolve_function(db, self.env.krate).and_then( + |into_iter_fn| { + let assoc_item = as_assoc_item(db, AssocItem::Function, into_iter_fn)?; + let into_iter_trait = assoc_item.container_or_implemented_trait(db)?; + Some(into_iter_trait.id) + }, + )?; let canonical_ty = Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) }; @@ -5003,8 +5035,8 @@ impl Type { } let into_iter_assoc_type = db - .trait_data(trait_) - .associated_type_by_name(&Name::new_symbol_root(sym::IntoIter.clone()))?; + .trait_items(trait_) + .associated_type_by_name(&Name::new_symbol_root(sym::IntoIter))?; self.normalize_trait_assoc_type(db, &[], into_iter_assoc_type.into()) } @@ -5057,7 +5089,7 @@ impl Type { alias: TypeAlias, ) -> Option { let mut args = args.iter(); - let trait_id = match alias.id.lookup(db.upcast()).container { + let trait_id = match alias.id.lookup(db).container { ItemContainerId::TraitId(id) => id, _ => unreachable!("non assoc type alias reached in normalize_trait_assoc_type()"), }; @@ -5076,18 +5108,12 @@ impl Type { let projection = TyBuilder::assoc_type_projection(db, alias.id, Some(parent_subst)).build(); let ty = db.normalize_projection(projection, self.env.clone()); - if ty.is_unknown() { - None - } else { - Some(self.derived(ty)) - } + if ty.is_unknown() { None } else { Some(self.derived(ty)) } } pub fn is_copy(&self, db: &dyn HirDatabase) -> bool { - let lang_item = db.lang_item(self.env.krate, LangItem::Copy); - let copy_trait = match lang_item { - Some(LangItemTarget::Trait(it)) => it, - _ => return false, + let Some(copy_trait) = LangItem::Copy.resolve_trait(db, self.env.krate) else { + return false; }; self.impls_trait(db, copy_trait.into(), &[]) } @@ -5257,7 +5283,10 @@ impl Type { /// Returns types that this type dereferences to (including this type itself). The returned /// iterator won't yield the same type more than once even if the deref chain contains a cycle. - pub fn autoderef(&self, db: &dyn HirDatabase) -> impl Iterator + '_ { + pub fn autoderef<'db>( + &self, + db: &'db dyn HirDatabase, + ) -> impl Iterator + use<'_, 'db> { self.autoderef_(db).map(move |ty| self.derived(ty)) } @@ -5297,7 +5326,7 @@ impl Type { let impls = db.inherent_impls_in_crate(krate); for impl_def in impls.for_self_ty(&self.ty) { - for &(_, item) in db.impl_data(*impl_def).items.iter() { + for &(_, item) in db.impl_items(*impl_def).items.iter() { if callback(item) { return; } @@ -5497,7 +5526,7 @@ impl Type { .generic_def() .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); - let _ = method_resolution::iterate_method_candidates_dyn( + _ = method_resolution::iterate_method_candidates_dyn( &canonical, db, environment, @@ -5584,7 +5613,7 @@ impl Type { .generic_def() .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); - let _ = method_resolution::iterate_path_candidates( + _ = method_resolution::iterate_path_candidates( &canonical, db, environment, @@ -5617,7 +5646,7 @@ impl Type { let _p = tracing::info_span!("applicable_inherent_traits").entered(); self.autoderef_(db) .filter_map(|ty| ty.dyn_trait()) - .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id)) + .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db, dyn_trait_id)) .map(Trait::from) } @@ -5628,12 +5657,15 @@ impl Type { .flat_map(|ty| { self.env .traits_in_scope_from_clauses(ty) - .flat_map(|t| hir_ty::all_super_traits(db.upcast(), t)) + .flat_map(|t| hir_ty::all_super_traits(db, t)) }) .map(Trait::from) } - pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option> { + pub fn as_impl_traits( + &self, + db: &dyn HirDatabase, + ) -> Option + use<>> { self.ty.impl_trait_bounds(db).map(|it| { it.into_iter().filter_map(|pred| match pred.skip_binders() { hir_ty::WhereClause::Implemented(trait_ref) => { @@ -6072,7 +6104,7 @@ pub trait HasVisibility { fn visibility(&self, db: &dyn HirDatabase) -> Visibility; fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool { let vis = self.visibility(db); - vis.is_visible_from(db.upcast(), module.id) + vis.is_visible_from(db, module.id) } } @@ -6083,7 +6115,7 @@ pub trait HasCrate { impl HasCrate for T { fn krate(&self, db: &dyn HirDatabase) -> Crate { - self.module(db.upcast()).krate().into() + self.module(db).krate().into() } } @@ -6195,78 +6227,78 @@ pub trait HasContainer { impl HasContainer for ExternCrateDecl { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - container_id_to_hir(self.id.lookup(db.upcast()).container.into()) + container_id_to_hir(self.id.lookup(db).container.into()) } } impl HasContainer for Module { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { // FIXME: handle block expressions as modules (their parent is in a different DefMap) - let def_map = self.id.def_map(db.upcast()); + let def_map = self.id.def_map(db); match def_map[self.id.local_id].parent { Some(parent_id) => ItemContainer::Module(Module { id: def_map.module_id(parent_id) }), - None => ItemContainer::Crate(def_map.krate()), + None => ItemContainer::Crate(def_map.krate().into()), } } } impl HasContainer for Function { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - container_id_to_hir(self.id.lookup(db.upcast()).container) + container_id_to_hir(self.id.lookup(db).container) } } impl HasContainer for Struct { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container }) + ItemContainer::Module(Module { id: self.id.lookup(db).container }) } } impl HasContainer for Union { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container }) + ItemContainer::Module(Module { id: self.id.lookup(db).container }) } } impl HasContainer for Enum { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container }) + ItemContainer::Module(Module { id: self.id.lookup(db).container }) } } impl HasContainer for TypeAlias { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - container_id_to_hir(self.id.lookup(db.upcast()).container) + container_id_to_hir(self.id.lookup(db).container) } } impl HasContainer for Const { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - container_id_to_hir(self.id.lookup(db.upcast()).container) + container_id_to_hir(self.id.lookup(db).container) } } impl HasContainer for Static { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - container_id_to_hir(self.id.lookup(db.upcast()).container) + container_id_to_hir(self.id.lookup(db).container) } } impl HasContainer for Trait { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container }) + ItemContainer::Module(Module { id: self.id.lookup(db).container }) } } impl HasContainer for TraitAlias { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container }) + ItemContainer::Module(Module { id: self.id.lookup(db).container }) } } impl HasContainer for ExternBlock { fn container(&self, db: &dyn HirDatabase) -> ItemContainer { - ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container }) + ItemContainer::Module(Module { id: self.id.lookup(db).container }) } } @@ -6285,7 +6317,7 @@ pub enum ItemContainer { Impl(Impl), Module(Module), ExternBlock(ExternBlock), - Crate(CrateId), + Crate(Crate), } /// Subset of `ide_db::Definition` that doc links can resolve to. @@ -6299,7 +6331,7 @@ fn push_ty_diagnostics( db: &dyn HirDatabase, acc: &mut Vec, diagnostics: Option>, - source_map: &TypesSourceMap, + source_map: &ExpressionStoreSourceMap, ) { if let Some(diagnostics) = diagnostics { acc.extend( @@ -6348,3 +6380,48 @@ where self(item) } } + +pub fn resolve_absolute_path<'a, I: Iterator + Clone + 'a>( + db: &'a dyn HirDatabase, + mut segments: I, +) -> impl Iterator + use<'a, I> { + segments + .next() + .into_iter() + .flat_map(move |crate_name| { + db.all_crates() + .iter() + .filter(|&krate| { + krate + .extra_data(db) + .display_name + .as_ref() + .is_some_and(|name| *name.crate_name().symbol() == crate_name) + }) + .filter_map(|&krate| { + let segments = segments.clone(); + let mut def_map = db.crate_def_map(krate); + let mut module = &def_map[DefMap::ROOT]; + let mut segments = segments.with_position().peekable(); + while let Some((_, segment)) = segments.next_if(|&(position, _)| { + !matches!(position, itertools::Position::Last | itertools::Position::Only) + }) { + let res = module + .scope + .get(&Name::new_symbol_root(segment)) + .take_types() + .and_then(|res| match res { + ModuleDefId::ModuleId(it) => Some(it), + _ => None, + })?; + def_map = res.def_map(db); + module = &def_map[res.local_id]; + } + let (_, item_name) = segments.next()?; + let res = module.scope.get(&Name::new_symbol_root(item_name)); + Some(res.iter_items().map(|(item, _)| item.into())) + }) + .collect::>() + }) + .flatten() +} diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 5e2eebcd13c69..4d092c1f0bb0e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -12,49 +12,46 @@ use std::{ use either::Either; use hir_def::{ - expr_store::{Body, ExprOrPatSource}, + DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId, + expr_store::{Body, ExprOrPatSource, path::Path}, hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat}, - lower::LowerCtx, - nameres::{MacroSubNs, ModuleOrigin}, - path::ModPath, + nameres::ModuleOrigin, resolver::{self, HasResolver, Resolver, TypeNs}, - type_ref::{Mutability, TypesMap, TypesSourceMap}, - AsMacroCall, DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId, + type_ref::Mutability, }; use hir_expand::{ + EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId, attrs::collect_attrs, builtin::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, - files::InRealFile, - hygiene::SyntaxContextExt as _, + files::{FileRangeWrapper, InRealFile}, inert_attr_macro::find_builtin_attr_idx, + mod_path::{ModPath, PathKind}, name::AsName, - ExpandResult, FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, }; use hir_ty::diagnostics::unsafe_operations_for_body; -use intern::{sym, Symbol}; +use intern::{Interned, Symbol, sym}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; -use smallvec::{smallvec, SmallVec}; -use span::{AstIdMap, EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId}; +use smallvec::{SmallVec, smallvec}; +use span::{Edition, FileId, SyntaxContext}; use stdx::TupleExt; use syntax::{ - algo::skip_trivia_token, - ast::{self, HasAttrs as _, HasGenericParams}, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize, + algo::skip_trivia_token, + ast::{self, HasAttrs as _, HasGenericParams}, }; -use triomphe::Arc; use crate::{ + Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam, + Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl, + InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef, + Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField, + Type, TypeAlias, TypeParam, Union, Variant, VariantDef, db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, - source_analyzer::{name_hygiene, resolve_hir_path, SourceAnalyzer}, - Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam, - Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, - HirFileId, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, - Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Static, Struct, ToolModule, Trait, - TraitAlias, TupleField, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, + source_analyzer::{SourceAnalyzer, name_hygiene, resolve_hir_path}, }; const CONTINUE_NO_BREAKS: ControlFlow = ControlFlow::Continue(()); @@ -138,8 +135,8 @@ pub struct Semantics<'db, DB> { pub struct SemanticsImpl<'db> { pub db: &'db dyn HirDatabase, s2d_cache: RefCell, - /// MacroCall to its expansion's MacroFileId cache - macro_call_cache: RefCell, MacroFileId>>, + /// MacroCall to its expansion's MacroCallId cache + macro_call_cache: RefCell, MacroCallId>>, } impl fmt::Debug for Semantics<'_, DB> { @@ -308,21 +305,23 @@ impl<'db> SemanticsImpl<'db> { } pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile { + let hir_file_id = file_id.into(); let tree = self.db.parse(file_id).tree(); - self.cache(tree.syntax().clone(), file_id.into()); + self.cache(tree.syntax().clone(), hir_file_id); tree } - /// If not crate is found for the file, returns the last crate in topological order. - pub fn first_crate_or_default(&self, file: FileId) -> Crate { + /// If not crate is found for the file, try to return the last crate in topological order. + pub fn first_crate(&self, file: FileId) -> Option { match self.file_to_module_defs(file).next() { - Some(module) => module.krate(), - None => (*self.db.crate_graph().crates_in_topological_order().last().unwrap()).into(), + Some(module) => Some(module.krate()), + None => self.db.all_crates().last().copied().map(Into::into), } } pub fn attach_first_edition(&self, file: FileId) -> Option { Some(EditionedFileId::new( + self.db, file, self.file_to_module_defs(file).next()?.krate().edition(self.db), )) @@ -331,23 +330,24 @@ impl<'db> SemanticsImpl<'db> { pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile { let file_id = self .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + .unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT)); + let tree = self.db.parse(file_id).tree(); self.cache(tree.syntax().clone(), file_id.into()); tree } pub fn find_parent_file(&self, file_id: HirFileId) -> Option> { - match file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - let module = self.file_to_module_defs(file_id.file_id()).next()?; + match file_id { + HirFileId::FileId(file_id) => { + let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?; let def_map = self.db.crate_def_map(module.krate().id); match def_map[module.id.local_id].origin { ModuleOrigin::CrateRoot { .. } => None, ModuleOrigin::File { declaration, declaration_tree_id, .. } => { let file_id = declaration_tree_id.file_id(); let in_file = InFile::new(file_id, declaration); - let node = in_file.to_node(self.db.upcast()); + let node = in_file.to_node(self.db); let root = find_root(node.syntax()); self.cache(root, file_id); Some(in_file.with_value(node.syntax().clone())) @@ -355,11 +355,8 @@ impl<'db> SemanticsImpl<'db> { _ => unreachable!("FileId can only belong to a file module"), } } - HirFileIdRepr::MacroFile(macro_file) => { - let node = self - .db - .lookup_intern_macro_call(macro_file.macro_call_id) - .to_node(self.db.upcast()); + HirFileId::MacroFile(macro_file) => { + let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db); let root = find_root(&node.value); self.cache(root, node.file_id); Some(node) @@ -370,8 +367,8 @@ impl<'db> SemanticsImpl<'db> { /// Returns the `SyntaxNode` of the module. If this is a file module, returns /// the `SyntaxNode` of the *definition* file, not of the *declaration*. pub fn module_definition_node(&self, module: Module) -> InFile { - let def_map = module.id.def_map(self.db.upcast()); - let definition = def_map[module.id.local_id].origin.definition_source(self.db.upcast()); + let def_map = module.id.def_map(self.db); + let definition = def_map[module.id.local_id].origin.definition_source(self.db); let definition = definition.map(|it| it.node()); let root_node = find_root(&definition.value); self.cache(root_node, definition.file_id); @@ -384,7 +381,7 @@ impl<'db> SemanticsImpl<'db> { node } - pub fn expand(&self, file_id: MacroFileId) -> ExpandResult { + pub fn expand(&self, file_id: MacroCallId) -> ExpandResult { let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node()); self.cache(res.value.clone(), file_id.into()); res @@ -394,13 +391,7 @@ impl<'db> SemanticsImpl<'db> { let sa = self.analyze_no_infer(macro_call.syntax())?; let macro_call = InFile::new(sa.file_id, macro_call); - let file_id = if let Some(call) = - ::to_def(self, macro_call) - { - call.as_macro_file() - } else { - sa.expand(self.db, macro_call)? - }; + let file_id = sa.expand(self.db, macro_call)?; let node = self.parse_or_expand(file_id.into()); Some(node) @@ -408,15 +399,13 @@ impl<'db> SemanticsImpl<'db> { pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option { let file_id = self.find_file(attr.syntax()).file_id; - let krate = match file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - self.file_to_module_defs(file_id.file_id()).next()?.krate().id - } - HirFileIdRepr::MacroFile(macro_file) => { - self.db.lookup_intern_macro_call(macro_file.macro_call_id).krate + let krate = match file_id { + HirFileId::FileId(file_id) => { + self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate().id } + HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate, }; - hir_expand::check_cfg_attr_value(self.db.upcast(), attr, krate) + hir_expand::check_cfg_attr_value(self.db, attr, krate) } /// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy @@ -428,14 +417,8 @@ impl<'db> SemanticsImpl<'db> { let sa = self.analyze_no_infer(macro_call.syntax())?; let macro_call = InFile::new(sa.file_id, macro_call); - let file_id = if let Some(call) = - ::to_def(self, macro_call) - { - call.as_macro_file() - } else { - sa.expand(self.db, macro_call)? - }; - let macro_call = self.db.lookup_intern_macro_call(file_id.macro_call_id); + let file_id = sa.expand(self.db, macro_call)?; + let macro_call = self.db.lookup_intern_macro_call(file_id); let skip = matches!( macro_call.def.kind, @@ -468,7 +451,7 @@ impl<'db> SemanticsImpl<'db> { pub fn expand_attr_macro(&self, item: &ast::Item) -> Option> { let src = self.wrap_node_infile(item.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?; - Some(self.expand(macro_call_id.as_macro_file())) + Some(self.expand(macro_call_id)) } pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option { @@ -477,7 +460,7 @@ impl<'db> SemanticsImpl<'db> { let call_id = self.with_ctx(|ctx| { ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it) })?; - Some(self.parse_or_expand(call_id.as_file())) + Some(self.parse_or_expand(call_id.into())) } pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option>> { @@ -497,7 +480,7 @@ impl<'db> SemanticsImpl<'db> { .derive_macro_calls(attr)? .into_iter() .flat_map(|call| { - let file_id = call?.as_macro_file(); + let file_id = call?; let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id); let root_node = value.0.syntax_node(); self.cache(root_node.clone(), file_id.into()); @@ -538,7 +521,7 @@ impl<'db> SemanticsImpl<'db> { Some(result) } - pub fn derive_helper(&self, attr: &ast::Attr) -> Option> { + pub fn derive_helper(&self, attr: &ast::Attr) -> Option> { let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it { ast::Item::Struct(it) => Some(ast::Adt::Struct(it)), ast::Item::Enum(it) => Some(ast::Adt::Enum(it)), @@ -554,7 +537,7 @@ impl<'db> SemanticsImpl<'db> { .derive_helpers_in_scope(InFile::new(sa.file_id, id))? .iter() .filter(|&(name, _, _)| *name == attr_name) - .map(|&(_, macro_, call)| (macro_.into(), call.as_macro_file())) + .map(|&(_, macro_, call)| (macro_.into(), call)) .collect(); res.is_empty().not().then_some(res) } @@ -571,16 +554,12 @@ impl<'db> SemanticsImpl<'db> { speculative_args: &ast::TokenTree, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { - let SourceAnalyzer { file_id, resolver, .. } = - self.analyze_no_infer(actual_macro_call.syntax())?; - let macro_call = InFile::new(file_id, actual_macro_call); - let krate = resolver.krate(); - let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { - resolver.resolve_path_as_macro_def(self.db.upcast(), path, Some(MacroSubNs::Bang)) - })?; + let analyzer = self.analyze_no_infer(actual_macro_call.syntax())?; + let macro_call = InFile::new(analyzer.file_id, actual_macro_call); + let macro_file = analyzer.expansion(macro_call)?; hir_expand::db::expand_speculative( - self.db.upcast(), - macro_call_id, + self.db, + macro_file, speculative_args.syntax(), token_to_map, ) @@ -588,16 +567,11 @@ impl<'db> SemanticsImpl<'db> { pub fn speculative_expand_raw( &self, - macro_file: MacroFileId, + macro_file: MacroCallId, speculative_args: &SyntaxNode, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> { - hir_expand::db::expand_speculative( - self.db.upcast(), - macro_file.macro_call_id, - speculative_args, - token_to_map, - ) + hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map) } /// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the @@ -611,7 +585,7 @@ impl<'db> SemanticsImpl<'db> { let macro_call = self.wrap_node_infile(actual_macro_call.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?; hir_expand::db::expand_speculative( - self.db.upcast(), + self.db, macro_call_id, speculative_args.syntax(), token_to_map, @@ -630,7 +604,7 @@ impl<'db> SemanticsImpl<'db> { ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it) })?; hir_expand::db::expand_speculative( - self.db.upcast(), + self.db, macro_call_id, speculative_args.syntax(), token_to_map, @@ -641,7 +615,7 @@ impl<'db> SemanticsImpl<'db> { /// and returns the conflicting locals. pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &str) -> Vec { let body = self.db.body(to_be_renamed.parent); - let resolver = to_be_renamed.parent.resolver(self.db.upcast()); + let resolver = to_be_renamed.parent.resolver(self.db); let starting_expr = body.binding_owners.get(&to_be_renamed.binding_id).copied().unwrap_or(body.body_expr); let mut visitor = RenameConflictsVisitor { @@ -755,6 +729,35 @@ impl<'db> SemanticsImpl<'db> { } } + pub fn debug_hir_at(&self, token: SyntaxToken) -> Option { + self.analyze_no_infer(&token.parent()?).and_then(|it| { + Some(match it.body_or_sig.as_ref()? { + crate::source_analyzer::BodyOrSig::Body { def, body, .. } => { + hir_def::expr_store::pretty::print_body_hir( + self.db, + body, + *def, + it.file_id.edition(self.db), + ) + } + &crate::source_analyzer::BodyOrSig::VariantFields { def, .. } => { + hir_def::expr_store::pretty::print_variant_body_hir( + self.db, + def, + it.file_id.edition(self.db), + ) + } + &crate::source_analyzer::BodyOrSig::Sig { def, .. } => { + hir_def::expr_store::pretty::print_signature( + self.db, + def, + it.file_id.edition(self.db), + ) + } + }) + }) + } + /// Maps a node down by mapping its first and last token down. pub fn descend_node_into_attributes(&self, node: N) -> SmallVec<[N; 1]> { // This might not be the correct way to do this, but it works for now @@ -873,7 +876,7 @@ impl<'db> SemanticsImpl<'db> { pub fn descend_into_macros_cb( &self, token: SyntaxToken, - mut cb: impl FnMut(InFile, SyntaxContextId), + mut cb: impl FnMut(InFile, SyntaxContext), ) { if let Ok(token) = self.wrap_token_infile(token).into_real_file() { self.descend_into_macros_impl(token, &mut |t, ctx| { @@ -897,13 +900,17 @@ impl<'db> SemanticsImpl<'db> { res } - pub fn descend_into_macros_no_opaque(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { + pub fn descend_into_macros_no_opaque( + &self, + token: SyntaxToken, + ) -> SmallVec<[InFile; 1]> { let mut res = smallvec![]; - if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { + let token = self.wrap_token_infile(token); + if let Ok(token) = token.clone().into_real_file() { self.descend_into_macros_impl(token, &mut |t, ctx| { - if !ctx.is_opaque(self.db.upcast()) { + if !ctx.is_opaque(self.db) { // Don't descend into opaque contexts - res.push(t.value); + res.push(t); } CONTINUE_NO_BREAKS }); @@ -917,9 +924,9 @@ impl<'db> SemanticsImpl<'db> { pub fn descend_into_macros_breakable( &self, token: InRealFile, - mut cb: impl FnMut(InFile, SyntaxContextId) -> ControlFlow, + mut cb: impl FnMut(InFile, SyntaxContext) -> ControlFlow, ) -> Option { - self.descend_into_macros_impl(token.clone(), &mut cb) + self.descend_into_macros_impl(token, &mut cb) } /// Descends the token into expansions, returning the tokens that matches the input @@ -934,7 +941,7 @@ impl<'db> SemanticsImpl<'db> { let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let matches = (kind == mapped_kind || any_ident_match()) && text == value.text() - && !ctx.is_opaque(self.db.upcast()); + && !ctx.is_opaque(self.db); if matches { r.push(value); } @@ -951,21 +958,13 @@ impl<'db> SemanticsImpl<'db> { let text = token.text(); let kind = token.kind(); if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { - self.descend_into_macros_breakable( - token.clone(), - |InFile { value, file_id: _ }, _ctx| { - let mapped_kind = value.kind(); - let any_ident_match = - || kind.is_any_identifier() && value.kind().is_any_identifier(); - let matches = - (kind == mapped_kind || any_ident_match()) && text == value.text(); - if matches { - ControlFlow::Break(value) - } else { - ControlFlow::Continue(()) - } - }, - ) + self.descend_into_macros_breakable(token, |InFile { value, file_id: _ }, _ctx| { + let mapped_kind = value.kind(); + let any_ident_match = + || kind.is_any_identifier() && value.kind().is_any_identifier(); + let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); + if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) } + }) } else { None } @@ -975,7 +974,7 @@ impl<'db> SemanticsImpl<'db> { fn descend_into_macros_impl( &self, InRealFile { value: token, file_id }: InRealFile, - f: &mut dyn FnMut(InFile, SyntaxContextId) -> ControlFlow, + f: &mut dyn FnMut(InFile, SyntaxContext) -> ControlFlow, ) -> Option { let _p = tracing::info_span!("descend_into_macros_impl").entered(); @@ -1012,7 +1011,7 @@ impl<'db> SemanticsImpl<'db> { None => { stack.push(( file_id.into(), - smallvec![(token, SyntaxContextId::root(file_id.edition()))], + smallvec![(token, SyntaxContext::root(file_id.edition(self.db)))], )); } } @@ -1041,7 +1040,6 @@ impl<'db> SemanticsImpl<'db> { }) }); if let Some((call_id, item)) = containing_attribute_macro_call { - let file_id = call_id.as_macro_file(); let attr_id = match self.db.lookup_intern_macro_call(call_id).kind { hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => { invoc_attr_index.ast_index() @@ -1070,7 +1068,7 @@ impl<'db> SemanticsImpl<'db> { .unwrap_or_else(|| text_range.start()); let text_range = TextRange::new(start, text_range.end()); filter_duplicates(tokens, text_range); - return process_expansion_for_token(&mut stack, file_id); + return process_expansion_for_token(&mut stack, call_id); } // Then check for token trees, that means we are either in a function-like macro or @@ -1093,24 +1091,16 @@ impl<'db> SemanticsImpl<'db> { let file_id = match m_cache.get(&mcall) { Some(&it) => it, None => { - let it = if let Some(call) = - ::to_def( - self, - mcall.as_ref(), - ) { - call.as_macro_file() - } else { - token - .parent() - .and_then(|parent| { - self.analyze_impl( - InFile::new(expansion, &parent), - None, - false, - ) - })? - .expand(self.db, mcall.as_ref())? - }; + let it = token + .parent() + .and_then(|parent| { + self.analyze_impl( + InFile::new(expansion, &parent), + None, + false, + ) + })? + .expand(self.db, mcall.as_ref())?; m_cache.insert(mcall, it); it } @@ -1119,10 +1109,10 @@ impl<'db> SemanticsImpl<'db> { filter_duplicates(tokens, text_range); process_expansion_for_token(&mut stack, file_id).or(file_id - .eager_arg(self.db.upcast()) + .eager_arg(self.db) .and_then(|arg| { // also descend into eager expansions - process_expansion_for_token(&mut stack, arg.as_macro_file()) + process_expansion_for_token(&mut stack, arg) })) } // derive or derive helper @@ -1146,7 +1136,6 @@ impl<'db> SemanticsImpl<'db> { match derive_call { Some(call_id) => { // resolved to a derive - let file_id = call_id.as_macro_file(); let text_range = attr.syntax().text_range(); // remove any other token in this macro input, all their mappings are the // same as this @@ -1154,7 +1143,7 @@ impl<'db> SemanticsImpl<'db> { !text_range.contains_range(t.text_range()) }); return process_expansion_for_token( - &mut stack, file_id, + &mut stack, call_id, ); } None => Some(adt), @@ -1202,10 +1191,7 @@ impl<'db> SemanticsImpl<'db> { // as there may be multiple derives registering the same helper // name, we gotta make sure to call this for all of them! // FIXME: We need to call `f` for all of them as well though! - res = res.or(process_expansion_for_token( - &mut stack, - derive.as_macro_file(), - )); + res = res.or(process_expansion_for_token(&mut stack, *derive)); } res } @@ -1251,21 +1237,19 @@ impl<'db> SemanticsImpl<'db> { /// macro file the node resides in. pub fn original_range(&self, node: &SyntaxNode) -> FileRange { let node = self.find_file(node); - node.original_file_range_rooted(self.db.upcast()) + node.original_file_range_rooted(self.db) } /// Attempts to map the node out of macro expanded files returning the original file range. pub fn original_range_opt(&self, node: &SyntaxNode) -> Option { let node = self.find_file(node); - node.original_file_range_opt(self.db.upcast()) - .filter(|(_, ctx)| ctx.is_root()) - .map(TupleExt::head) + node.original_file_range_opt(self.db).filter(|(_, ctx)| ctx.is_root()).map(TupleExt::head) } /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_ast_node(&self, node: N) -> Option { - self.wrap_node_infile(node).original_ast_node_rooted(self.db.upcast()).map( + self.wrap_node_infile(node).original_ast_node_rooted(self.db).map( |InRealFile { file_id, value }| { self.cache(find_root(value.syntax()), file_id.into()); value @@ -1277,7 +1261,7 @@ impl<'db> SemanticsImpl<'db> { /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option { let InFile { file_id, .. } = self.find_file(node); - InFile::new(file_id, node).original_syntax_node_rooted(self.db.upcast()).map( + InFile::new(file_id, node).original_syntax_node_rooted(self.db).map( |InRealFile { file_id, value }| { self.cache(find_root(&value), file_id.into()); value @@ -1285,10 +1269,14 @@ impl<'db> SemanticsImpl<'db> { ) } - pub fn diagnostics_display_range(&self, src: InFile) -> FileRange { + pub fn diagnostics_display_range( + &self, + src: InFile, + ) -> FileRangeWrapper { let root = self.parse_or_expand(src.file_id); let node = src.map(|it| it.to_node(&root)); - node.as_ref().original_file_range_rooted(self.db.upcast()) + let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db); + FileRangeWrapper { file_id: file_id.file_id(self.db), range } } fn token_ancestors_with_macros( @@ -1349,31 +1337,19 @@ impl<'db> SemanticsImpl<'db> { pub fn resolve_type(&self, ty: &ast::Type) -> Option { let analyze = self.analyze(ty.syntax())?; - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut ctx = - LowerCtx::new(self.db.upcast(), analyze.file_id, &mut types_map, &mut types_source_map); - let type_ref = crate::TypeRef::from_ast(&mut ctx, ty.clone()); - let ty = hir_ty::TyLoweringContext::new_maybe_unowned( - self.db, - &analyze.resolver, - &types_map, - None, - analyze.resolver.type_owner(), - ) - .lower_ty(type_ref); - Some(Type::new_with_resolver(self.db, &analyze.resolver, ty)) + analyze.type_of_type(self.db, ty) } pub fn resolve_trait(&self, path: &ast::Path) -> Option { + let parent_ty = path.syntax().parent().and_then(ast::Type::cast)?; let analyze = self.analyze(path.syntax())?; - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut ctx = - LowerCtx::new(self.db.upcast(), analyze.file_id, &mut types_map, &mut types_source_map); - let hir_path = Path::from_src(&mut ctx, path.clone())?; - match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? { - TypeNs::TraitId(id) => Some(Trait { id }), + let ty = analyze.store_sm()?.node_type(InFile::new(analyze.file_id, &parent_ty))?; + let path = match &analyze.store()?.types[ty] { + hir_def::type_ref::TypeRef::Path(path) => path, + _ => return None, + }; + match analyze.resolver.resolve_path_in_type_ns_fully(self.db, path)? { + TypeNs::TraitId(trait_id) => Some(trait_id.into()), _ => None, } } @@ -1388,7 +1364,7 @@ impl<'db> SemanticsImpl<'db> { let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?; - analyzer.expr_adjustments(self.db, expr).map(|it| { + analyzer.expr_adjustments(expr).map(|it| { it.iter() .map(|adjust| { let target = @@ -1521,7 +1497,7 @@ impl<'db> SemanticsImpl<'db> { } pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option> { - self.analyze(field.syntax())?.resolve_field(self.db, field) + self.analyze(field.syntax())?.resolve_field(field) } pub fn resolve_field_fallback( @@ -1641,30 +1617,25 @@ impl<'db> SemanticsImpl<'db> { self.analyze(name.syntax())?.resolve_use_type_arg(name) } + pub fn resolve_offset_of_field( + &self, + name_ref: &ast::NameRef, + ) -> Option<(Either, GenericSubstitution)> { + self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref) + } + pub fn resolve_mod_path( &self, scope: &SyntaxNode, path: &ModPath, ) -> Option> { let analyze = self.analyze(scope)?; - let items = analyze.resolver.resolve_module_path_in_items(self.db.upcast(), path); - Some(items.iter_items().map(|(item, _)| item.into())) - } - - pub fn resolve_mod_path_relative( - &self, - to: Module, - segments: impl IntoIterator, - ) -> Option> { - let items = to.id.resolver(self.db.upcast()).resolve_module_path_in_items( - self.db.upcast(), - &ModPath::from_segments(hir_def::path::PathKind::Plain, segments), - ); + let items = analyze.resolver.resolve_module_path_in_items(self.db, path); Some(items.iter_items().map(|(item, _)| item.into())) } fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { - self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit) + self.analyze(record_lit.syntax())?.resolve_variant(record_lit) } pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option { @@ -1764,6 +1735,7 @@ impl<'db> SemanticsImpl<'db> { &self, node: InFile<&SyntaxNode>, offset: Option, + // replace this, just make the inference result a `LazyCell` infer_body: bool, ) -> Option { let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered(); @@ -1776,16 +1748,30 @@ impl<'db> SemanticsImpl<'db> { SourceAnalyzer::new_for_body(self.db, def, node, offset) } else { SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset) - }) + }); + } + ChildContainer::VariantId(def) => { + return Some(SourceAnalyzer::new_variant_body(self.db, def, node, offset)); } - ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), - ChildContainer::TraitAliasId(it) => it.resolver(self.db.upcast()), - ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), - ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()), - ChildContainer::EnumId(it) => it.resolver(self.db.upcast()), - ChildContainer::VariantId(it) => it.resolver(self.db.upcast()), - ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()), - ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), + ChildContainer::TraitId(it) => { + return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)); + } + ChildContainer::TraitAliasId(it) => { + return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)); + } + ChildContainer::ImplId(it) => { + return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)); + } + ChildContainer::EnumId(it) => { + return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)); + } + ChildContainer::TypeAliasId(it) => { + return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)); + } + ChildContainer::GenericDefId(it) => { + return Some(SourceAnalyzer::new_generic_def(self.db, it, node, offset)); + } + ChildContainer::ModuleId(it) => it.resolver(self.db), }; Some(SourceAnalyzer::new_for_resolver(resolver, node)) } @@ -1891,22 +1877,21 @@ impl<'db> SemanticsImpl<'db> { } } +// FIXME This can't be the best way to do this fn macro_call_to_macro_id( ctx: &mut SourceToDefCtx<'_, '_>, macro_call_id: MacroCallId, ) -> Option { - use span::HirFileIdRepr; - - let db: &dyn ExpandDatabase = ctx.db.upcast(); + let db: &dyn ExpandDatabase = ctx.db; let loc = db.lookup_intern_macro_call(macro_call_id); match loc.def.ast_id() { Either::Left(it) => { - let node = match it.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { + let node = match it.file_id { + HirFileId::FileId(file_id) => { it.to_ptr(db).to_node(&db.parse(file_id).syntax_node()) } - HirFileIdRepr::MacroFile(macro_file) => { + HirFileId::MacroFile(macro_file) => { let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file); it.to_ptr(db).to_node(&expansion_info.expanded().value) } @@ -1914,11 +1899,11 @@ fn macro_call_to_macro_id( ctx.macro_to_def(InFile::new(it.file_id, &node)) } Either::Right(it) => { - let node = match it.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { + let node = match it.file_id { + HirFileId::FileId(file_id) => { it.to_ptr(db).to_node(&db.parse(file_id).syntax_node()) } - HirFileIdRepr::MacroFile(macro_file) => { + HirFileId::MacroFile(macro_file) => { let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file); it.to_ptr(db).to_node(&expansion_info.expanded().value) } @@ -2028,12 +2013,12 @@ impl SemanticsScope<'_> { /// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type pub fn visible_traits(&self) -> VisibleTraits { let resolver = &self.resolver; - VisibleTraits(resolver.traits_in_scope(self.db.upcast())) + VisibleTraits(resolver.traits_in_scope(self.db)) } /// Calls the passed closure `f` on all names in scope. pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { - let scope = self.resolver.names_in_scope(self.db.upcast()); + let scope = self.resolver.names_in_scope(self.db); for (name, entries) in scope { for entry in entries { let def = match entry { @@ -2059,28 +2044,45 @@ impl SemanticsScope<'_> { /// Resolve a path as-if it was written at the given scope. This is /// necessary a heuristic, as it doesn't take hygiene into account. pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option { - let root = ast_path.syntax().ancestors().last().unwrap(); - let ast_id_map = Arc::new(AstIdMap::from_source(&root)); - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut ctx = LowerCtx::for_synthetic_ast( - self.db.upcast(), - ast_id_map, - &mut types_map, - &mut types_source_map, - ); - let path = Path::from_src(&mut ctx, ast_path.clone())?; + let mut kind = PathKind::Plain; + let mut segments = vec![]; + let mut first = true; + for segment in ast_path.segments() { + if first { + first = false; + if segment.coloncolon_token().is_some() { + kind = PathKind::Abs; + } + } + + let Some(k) = segment.kind() else { continue }; + match k { + ast::PathSegmentKind::Name(name_ref) => segments.push(name_ref.as_name()), + ast::PathSegmentKind::Type { .. } => continue, + ast::PathSegmentKind::SelfTypeKw => { + segments.push(Name::new_symbol_root(sym::Self_)) + } + ast::PathSegmentKind::SelfKw => kind = PathKind::Super(0), + ast::PathSegmentKind::SuperKw => match kind { + PathKind::Super(s) => kind = PathKind::Super(s + 1), + PathKind::Plain => kind = PathKind::Super(1), + PathKind::Crate | PathKind::Abs | PathKind::DollarCrate(_) => continue, + }, + ast::PathSegmentKind::CrateKw => kind = PathKind::Crate, + } + } + resolve_hir_path( self.db, &self.resolver, - &path, + &Path::BarePath(Interned::new(ModPath::from_segments(kind, segments))), name_hygiene(self.db, InFile::new(self.file_id, ast_path.syntax())), - &types_map, + None, ) } - pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator { - let items = self.resolver.resolve_module_path_in_items(self.db.upcast(), path); + pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator + use<> { + let items = self.resolver.resolve_module_path_in_items(self.db, path); items.iter_items().map(|(item, _)| item.into()) } @@ -2109,7 +2111,7 @@ impl SemanticsScope<'_> { } pub fn extern_crate_decls(&self) -> impl Iterator + '_ { - self.resolver.extern_crate_decls_in_scope(self.db.upcast()) + self.resolver.extern_crate_decls_in_scope(self.db) } pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool { @@ -2145,7 +2147,7 @@ impl RenameConflictsVisitor<'_> { if let Some(name) = path.as_ident() { if *name.symbol() == self.new_name { if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed( - self.db.upcast(), + self.db, name, path, self.body.expr_or_pat_path_hygiene(node), @@ -2156,7 +2158,7 @@ impl RenameConflictsVisitor<'_> { } else if *name.symbol() == self.old_name { if let Some(conflicting) = self.resolver.rename_will_conflict_with_another_variable( - self.db.upcast(), + self.db, name, path, self.body.expr_or_pat_path_hygiene(node), @@ -2174,12 +2176,12 @@ impl RenameConflictsVisitor<'_> { fn rename_conflicts(&mut self, expr: ExprId) { match &self.body[expr] { Expr::Path(path) => { - let guard = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr); + let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr); self.resolve_path(expr.into(), path); self.resolver.reset_to_guard(guard); } &Expr::Assignment { target, .. } => { - let guard = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr); + let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr); self.body.walk_pats(target, &mut |pat| { if let Pat::Path(path) = &self.body[pat] { self.resolve_path(pat.into(), path); diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs index d0fdf5cbdf7a3..9393d08ad3f96 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs @@ -5,22 +5,22 @@ //! node for a *child*, and get its hir. use either::Either; -use hir_expand::{attrs::collect_attrs, HirFileId}; -use syntax::{ast, AstPtr}; +use hir_expand::{HirFileId, attrs::collect_attrs}; +use syntax::{AstPtr, ast}; use hir_def::{ + AdtId, AssocItemId, DefWithBodyId, EnumId, FieldId, GenericDefId, ImplId, ItemTreeLoc, + LifetimeParamId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, TypeOrConstParamId, + VariantId, db::DefDatabase, dyn_map::{ - keys::{self, Key}, DynMap, + keys::{self, Key}, }, item_scope::ItemScope, item_tree::ItemTreeNode, nameres::DefMap, src::{HasChildSource, HasSource}, - AdtId, AssocItemId, DefWithBodyId, EnumId, FieldId, GenericDefId, ImplId, ItemTreeLoc, - LifetimeParamId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, TypeOrConstParamId, - VariantId, }; pub(crate) trait ChildBySource { @@ -34,11 +34,11 @@ pub(crate) trait ChildBySource { impl ChildBySource for TraitId { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { - let data = db.trait_data(*self); + let data = db.trait_items(*self); data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each( |(ast_id, call_id)| { - res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id); + res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id); }, ); data.items.iter().for_each(|&(_, item)| { @@ -49,11 +49,11 @@ impl ChildBySource for TraitId { impl ChildBySource for ImplId { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { - let data = db.impl_data(*self); + let data = db.impl_items(*self); // FIXME: Macro calls data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each( |(ast_id, call_id)| { - res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id); + res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id); }, ); data.items.iter().for_each(|&(_, item)| { @@ -84,7 +84,7 @@ impl ChildBySource for ItemScope { .for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST)); self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each( |(ast_id, call_id)| { - res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id); + res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id); }, ); self.legacy_macros().for_each(|(_, ids)| { @@ -99,7 +99,7 @@ impl ChildBySource for ItemScope { }); self.derive_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each( |(ast_id, calls)| { - let adt = ast_id.to_node(db.upcast()); + let adt = ast_id.to_node(db); calls.for_each(|(attr_id, call_id, calls)| { if let Some((_, Either::Left(attr))) = collect_attrs(&adt).nth(attr_id.ast_index()) @@ -112,7 +112,7 @@ impl ChildBySource for ItemScope { ); self.iter_macro_invoc().filter(|(id, _)| id.file_id == file_id).for_each( |(ast_id, &call)| { - let ast = ast_id.to_ptr(db.upcast()); + let ast = ast_id.to_ptr(db); res[keys::MACRO_CALL].insert(ast, call); }, ); @@ -182,7 +182,7 @@ impl ChildBySource for EnumId { let tree = loc.id.item_tree(db); let ast_id_map = db.ast_id_map(loc.id.file_id()); - db.enum_data(*self).variants.iter().for_each(|&(variant, _)| { + db.enum_variants(*self).variants.iter().for_each(|&(variant, _)| { res[keys::ENUM_VARIANT] .insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant); }); @@ -197,14 +197,14 @@ impl ChildBySource for DefWithBodyId { } sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| { - res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id); + res[keys::MACRO_CALL].insert(ast.value, exp_id); }); for (block, def_map) in body.blocks(db) { // All block expressions are merged into the same map, because they logically all add // inner items to the containing `DefWithBodyId`. def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id); - res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db.upcast()), block); + res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db), block); } } } @@ -254,7 +254,7 @@ fn insert_item_loc( id: ID, key: Key, ) where - ID: for<'db> Lookup = dyn DefDatabase + 'db, Data = Data> + 'static, + ID: Lookup + 'static, Data: ItemTreeLoc, N: ItemTreeNode, N::Source: 'static, diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index 18cbaa15aeaed..587c51d8cc99a 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -87,37 +87,38 @@ use either::Either; use hir_def::{ - dyn_map::{ - keys::{self, Key}, - DynMap, - }, - hir::{BindingId, Expr, LabelId}, AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId, + dyn_map::{ + DynMap, + keys::{self, Key}, + }, + hir::{BindingId, Expr, LabelId}, }; use hir_expand::{ - attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId, - MacroFileIdExt, + EditionedFileId, ExpansionInfo, HirFileId, InMacroFile, MacroCallId, attrs::AttrId, + name::AsName, }; use rustc_hash::FxHashMap; use smallvec::SmallVec; -use span::{EditionedFileId, FileId, MacroFileId}; +use span::FileId; use stdx::impl_from; use syntax::{ - ast::{self, HasName}, AstNode, AstPtr, SyntaxNode, + ast::{self, HasName}, }; +use tt::TextRange; -use crate::{db::HirDatabase, semantics::child_by_source::ChildBySource, InFile, InlineAsmOperand}; +use crate::{InFile, InlineAsmOperand, db::HirDatabase, semantics::child_by_source::ChildBySource}; #[derive(Default)] pub(super) struct SourceToDefCache { pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>, - expansion_info_cache: FxHashMap, + expansion_info_cache: FxHashMap, pub(super) file_to_def_cache: FxHashMap>, - pub(super) included_file_cache: FxHashMap>, + pub(super) included_file_cache: FxHashMap>, /// Rootnode to HirFileId cache pub(super) root_to_file_cache: FxHashMap, } @@ -137,14 +138,14 @@ impl SourceToDefCache { &mut self, db: &dyn HirDatabase, file: EditionedFileId, - ) -> Option { + ) -> Option { if let Some(&m) = self.included_file_cache.get(&file) { return m; } self.included_file_cache.insert(file, None); - for &crate_id in db.relevant_crates(file.into()).iter() { + for &crate_id in db.relevant_crates(file.file_id(db)).iter() { db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| { - self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id })); + self.included_file_cache.insert(file_id, Some(macro_call_id)); }); } self.included_file_cache.get(&file).copied().flatten() @@ -153,10 +154,10 @@ impl SourceToDefCache { pub(super) fn get_or_insert_expansion( &mut self, db: &dyn HirDatabase, - macro_file: MacroFileId, + macro_file: MacroCallId, ) -> &ExpansionInfo { self.expansion_info_cache.entry(macro_file).or_insert_with(|| { - let exp_info = macro_file.expansion_info(db.upcast()); + let exp_info = macro_file.expansion_info(db); let InMacroFile { file_id, value } = exp_info.expanded(); Self::cache(&mut self.root_to_file_cache, value, file_id.into()); @@ -176,13 +177,14 @@ impl SourceToDefCtx<'_, '_> { let _p = tracing::info_span!("SourceToDefCtx::file_to_def").entered(); self.cache.file_to_def_cache.entry(file).or_insert_with(|| { let mut mods = SmallVec::new(); + for &crate_id in self.db.relevant_crates(file).iter() { // Note: `mod` declarations in block modules cannot be supported here let crate_def_map = self.db.crate_def_map(crate_id); let n_mods = mods.len(); let modules = |file| { crate_def_map - .modules_for_file(file) + .modules_for_file(self.db, file) .map(|local_id| crate_def_map.module_id(local_id)) }; mods.extend(modules(file)); @@ -191,18 +193,16 @@ impl SourceToDefCtx<'_, '_> { self.db .include_macro_invoc(crate_id) .iter() - .filter(|&&(_, file_id)| file_id == file) + .filter(|&&(_, file_id)| file_id.file_id(self.db) == file) .flat_map(|&(macro_call_id, file_id)| { - self.cache - .included_file_cache - .insert(file_id, Some(MacroFileId { macro_call_id })); + self.cache.included_file_cache.insert(file_id, Some(macro_call_id)); modules( macro_call_id - .lookup(self.db.upcast()) + .lookup(self.db) .kind .file_id() - .original_file(self.db.upcast()) - .file_id(), + .original_file(self.db) + .file_id(self.db), ) }), ); @@ -218,7 +218,7 @@ impl SourceToDefCtx<'_, '_> { pub(super) fn module_to_def(&mut self, src: InFile<&ast::Module>) -> Option { let _p = tracing::info_span!("module_to_def").entered(); let parent_declaration = self - .ancestors_with_macros(src.syntax_ref(), |_, ancestor| { + .parent_ancestors_with_macros(src.syntax_ref(), |_, ancestor, _| { ancestor.map(Either::::cast).transpose() }) .map(|it| it.transpose()); @@ -231,21 +231,21 @@ impl SourceToDefCtx<'_, '_> { self.module_to_def(parent_declaration.as_ref()) } None => { - let file_id = src.file_id.original_file(self.db.upcast()); - self.file_to_def(file_id.file_id()).first().copied() + let file_id = src.file_id.original_file(self.db); + self.file_to_def(file_id.file_id(self.db)).first().copied() } }?; let child_name = src.value.name()?.as_name(); - let def_map = parent_module.def_map(self.db.upcast()); + let def_map = parent_module.def_map(self.db); let &child_id = def_map[parent_module.local_id].children.get(&child_name)?; Some(def_map.module_id(child_id)) } pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option { let _p = tracing::info_span!("source_file_to_def").entered(); - let file_id = src.file_id.original_file(self.db.upcast()); - self.file_to_def(file_id.file_id()).first().copied() + let file_id = src.file_id.original_file(self.db); + self.file_to_def(file_id.file_id(self.db)).first().copied() } pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option { @@ -344,7 +344,7 @@ impl SourceToDefCtx<'_, '_> { }) .position(|it| it == *src.value)?; let container = self.find_pat_or_label_container(src.syntax_ref())?; - let (_, source_map) = self.db.body_with_source_map(container); + let source_map = self.db.body_with_source_map(container).1; let expr = source_map.node_expr(src.with_value(&ast::Expr::AsmExpr(asm)))?.as_expr()?; Some(InlineAsmOperand { owner: container, expr, index }) } @@ -377,7 +377,8 @@ impl SourceToDefCtx<'_, '_> { src: InFile<&ast::Label>, ) -> Option<(DefWithBodyId, LabelId)> { let container = self.find_pat_or_label_container(src.syntax_ref())?; - let (_body, source_map) = self.db.body_with_source_map(container); + let source_map = self.db.body_with_source_map(container).1; + let label_id = source_map.node_label(src)?; Some((container, label_id)) } @@ -516,45 +517,22 @@ impl SourceToDefCtx<'_, '_> { pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option { let _p = tracing::info_span!("find_container").entered(); - let def = - self.ancestors_with_macros(src, |this, container| this.container_to_def(container)); + let def = self.parent_ancestors_with_macros(src, |this, container, child| { + this.container_to_def(container, child) + }); if let Some(def) = def { return Some(def); } let def = self - .file_to_def(src.file_id.original_file(self.db.upcast()).file_id()) + .file_to_def(src.file_id.original_file(self.db).file_id(self.db)) .first() .copied()?; Some(def.into()) } - /// Skips the attributed item that caused the macro invocation we are climbing up - fn ancestors_with_macros( - &mut self, - node: InFile<&SyntaxNode>, - mut cb: impl FnMut(&mut Self, InFile) -> Option, - ) -> Option { - let parent = |this: &mut Self, node: InFile<&SyntaxNode>| match node.value.parent() { - Some(parent) => Some(node.with_value(parent)), - None => { - let macro_file = node.file_id.macro_file()?; - let expansion_info = this.cache.get_or_insert_expansion(this.db, macro_file); - expansion_info.arg().map(|node| node?.parent()).transpose() - } - }; - let mut node = node.cloned(); - while let Some(parent) = parent(self, node.as_ref()) { - if let Some(res) = cb(self, parent.clone()) { - return Some(res); - } - node = parent; - } - None - } - fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option { - self.ancestors_with_macros(src, |this, InFile { file_id, value }| { + self.parent_ancestors_with_macros(src, |this, InFile { file_id, value }, _| { let item = ast::Item::cast(value)?; match &item { ast::Item::Fn(it) => this.fn_to_def(InFile::new(file_id, it)).map(Into::into), @@ -575,12 +553,13 @@ impl SourceToDefCtx<'_, '_> { }) } + // FIXME: Remove this when we do inference in signatures fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option { - self.ancestors_with_macros(src, |this, InFile { file_id, value }| { + self.parent_ancestors_with_macros(src, |this, InFile { file_id, value }, _| { let item = match ast::Item::cast(value.clone()) { Some(it) => it, None => { - let variant = ast::Variant::cast(value.clone())?; + let variant = ast::Variant::cast(value)?; return this .enum_variant_to_def(InFile::new(file_id, &variant)) .map(Into::into); @@ -597,7 +576,43 @@ impl SourceToDefCtx<'_, '_> { }) } - fn container_to_def(&mut self, container: InFile) -> Option { + /// Skips the attributed item that caused the macro invocation we are climbing up + fn parent_ancestors_with_macros( + &mut self, + node: InFile<&SyntaxNode>, + mut cb: impl FnMut( + &mut Self, + /*parent: */ InFile, + /*child: */ &SyntaxNode, + ) -> Option, + ) -> Option { + let parent = |this: &mut Self, node: InFile<&SyntaxNode>| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => { + let macro_file = node.file_id.macro_file()?; + let expansion_info = this.cache.get_or_insert_expansion(this.db, macro_file); + expansion_info.arg().map(|node| node?.parent()).transpose() + } + }; + let mut deepest_child_in_same_file = node.cloned(); + let mut node = node.cloned(); + while let Some(parent) = parent(self, node.as_ref()) { + if parent.file_id != node.file_id { + deepest_child_in_same_file = parent.clone(); + } + if let Some(res) = cb(self, parent.clone(), &deepest_child_in_same_file.value) { + return Some(res); + } + node = parent; + } + None + } + + fn container_to_def( + &mut self, + container: InFile, + child: &SyntaxNode, + ) -> Option { let cont = if let Some(item) = ast::Item::cast(container.value.clone()) { match &item { ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(), @@ -612,29 +627,92 @@ impl SourceToDefCtx<'_, '_> { } ast::Item::Struct(it) => { let def = self.struct_to_def(container.with_value(it))?; - VariantId::from(def).into() + let is_in_body = it.field_list().is_some_and(|it| { + it.syntax().text_range().contains(child.text_range().start()) + }); + if is_in_body { + VariantId::from(def).into() + } else { + ChildContainer::GenericDefId(def.into()) + } } ast::Item::Union(it) => { let def = self.union_to_def(container.with_value(it))?; - VariantId::from(def).into() + let is_in_body = it.record_field_list().is_some_and(|it| { + it.syntax().text_range().contains(child.text_range().start()) + }); + if is_in_body { + VariantId::from(def).into() + } else { + ChildContainer::GenericDefId(def.into()) + } } ast::Item::Fn(it) => { let def = self.fn_to_def(container.with_value(it))?; - DefWithBodyId::from(def).into() + let child_offset = child.text_range().start(); + let is_in_body = + it.body().is_some_and(|it| it.syntax().text_range().contains(child_offset)); + let in_param_pat = || { + it.param_list().is_some_and(|it| { + it.self_param() + .and_then(|it| { + Some(TextRange::new( + it.syntax().text_range().start(), + it.name()?.syntax().text_range().end(), + )) + }) + .is_some_and(|r| r.contains_inclusive(child_offset)) + || it + .params() + .filter_map(|it| it.pat()) + .any(|it| it.syntax().text_range().contains(child_offset)) + }) + }; + if is_in_body || in_param_pat() { + DefWithBodyId::from(def).into() + } else { + ChildContainer::GenericDefId(def.into()) + } } ast::Item::Static(it) => { let def = self.static_to_def(container.with_value(it))?; - DefWithBodyId::from(def).into() + let is_in_body = it.body().is_some_and(|it| { + it.syntax().text_range().contains(child.text_range().start()) + }); + if is_in_body { + DefWithBodyId::from(def).into() + } else { + ChildContainer::GenericDefId(def.into()) + } } ast::Item::Const(it) => { let def = self.const_to_def(container.with_value(it))?; - DefWithBodyId::from(def).into() + let is_in_body = it.body().is_some_and(|it| { + it.syntax().text_range().contains(child.text_range().start()) + }); + if is_in_body { + DefWithBodyId::from(def).into() + } else { + ChildContainer::GenericDefId(def.into()) + } } _ => return None, } - } else { - let it = ast::Variant::cast(container.value)?; + } else if let Some(it) = ast::Variant::cast(container.value.clone()) { let def = self.enum_variant_to_def(InFile::new(container.file_id, &it))?; + let is_in_body = + it.eq_token().is_some_and(|it| it.text_range().end() < child.text_range().start()); + if is_in_body { DefWithBodyId::from(def).into() } else { VariantId::from(def).into() } + } else { + let it = match Either::::cast(container.value)? { + Either::Left(it) => ast::Param::cast(it.syntax().parent()?)?.syntax().parent(), + Either::Right(it) => ast::SelfParam::cast(it.syntax().parent()?)?.syntax().parent(), + } + .and_then(ast::ParamList::cast)? + .syntax() + .parent() + .and_then(ast::Fn::cast)?; + let def = self.fn_to_def(InFile::new(container.file_id, &it))?; DefWithBodyId::from(def).into() }; Some(cont) @@ -671,7 +749,6 @@ impl_from! { impl ChildContainer { fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap { let _p = tracing::info_span!("ChildContainer::child_by_source").entered(); - let db = db.upcast(); match self { ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id), ChildContainer::ModuleId(it) => it.child_by_source(db, file_id), diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index d1245f5f7d681..c1a75ce7e574e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -8,48 +8,49 @@ use std::iter::{self, once}; use crate::{ - db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr, - BuiltinType, Callable, Const, DeriveHelper, Field, Function, GenericSubstitution, Local, Macro, - ModuleDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField, Type, TypeAlias, Variant, + Adt, AssocItem, BindingMode, BuiltinAttr, BuiltinType, Callable, Const, DeriveHelper, Field, + Function, GenericSubstitution, Local, Macro, ModuleDef, Static, Struct, ToolModule, Trait, + TraitAlias, TupleField, Type, TypeAlias, Variant, db::HirDatabase, semantics::PathResolution, }; use either::Either; use hir_def::{ + AdtId, AssocItemId, CallableDefId, ConstId, DefWithBodyId, FieldId, FunctionId, GenericDefId, + ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StructId, TraitId, VariantId, expr_store::{ + Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, HygieneId, + lower::ExprCollector, + path::Path, scope::{ExprScopes, ScopeId}, - Body, BodySourceMap, HygieneId, }, hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat}, lang_item::LangItem, - lower::LowerCtx, nameres::MacroSubNs, - path::{ModPath, Path, PathKind}, - resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, - type_ref::{Mutability, TypesMap, TypesSourceMap}, - AsMacroCall, AssocItemId, CallableDefId, ConstId, DefWithBodyId, FieldId, FunctionId, - ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StructId, TraitId, VariantId, + resolver::{HasResolver, Resolver, TypeNs, ValueNs, resolver_for_scope}, + type_ref::{Mutability, TypeRefId}, }; use hir_expand::{ - mod_path::path, + HirFileId, InFile, MacroCallId, + mod_path::{ModPath, PathKind, path}, name::{AsName, Name}, - HirFileId, InFile, InMacroFile, MacroFileId, MacroFileIdExt, }; use hir_ty::{ + Adjustment, AliasTy, InferenceResult, Interner, LifetimeElisionKind, ProjectionTy, + Substitution, TraitEnvironment, Ty, TyExt, TyKind, TyLoweringContext, diagnostics::{ - record_literal_missing_fields, record_pattern_missing_fields, unsafe_operations, - InsideUnsafeBlock, + InsideUnsafeBlock, record_literal_missing_fields, record_pattern_missing_fields, + unsafe_operations, }, from_assoc_type_id, lang_items::lang_items_for_bin_op, - method_resolution, Adjustment, InferenceResult, Interner, Substitution, TraitEnvironment, Ty, - TyExt, TyKind, TyLoweringContext, + method_resolution, }; use intern::sym; use itertools::Itertools; use smallvec::SmallVec; -use syntax::ast::{RangeItem, RangeOp}; +use stdx::never; use syntax::{ - ast::{self, AstNode}, SyntaxKind, SyntaxNode, TextRange, TextSize, + ast::{self, AstNode, RangeItem, RangeOp}, }; use triomphe::Arc; @@ -59,8 +60,29 @@ use triomphe::Arc; pub(crate) struct SourceAnalyzer { pub(crate) file_id: HirFileId, pub(crate) resolver: Resolver, - def: Option<(DefWithBodyId, Arc, Arc)>, - infer: Option>, + pub(crate) body_or_sig: Option, +} + +#[derive(Debug)] +pub(crate) enum BodyOrSig { + Body { + def: DefWithBodyId, + body: Arc, + source_map: Arc, + infer: Option>, + }, + // To be folded into body once it is considered one + VariantFields { + def: VariantId, + store: Arc, + source_map: Arc, + }, + Sig { + def: GenericDefId, + store: Arc, + source_map: Arc, + // infer: Option>, + }, } impl SourceAnalyzer { @@ -103,99 +125,163 @@ impl SourceAnalyzer { scope_for_offset(db, &scopes, &source_map, node.file_id, offset) } }; - let resolver = resolver_for_scope(db.upcast(), def, scope); - SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer, file_id } + let resolver = resolver_for_scope(db, def, scope); + SourceAnalyzer { + resolver, + body_or_sig: Some(BodyOrSig::Body { def, body, source_map, infer }), + file_id, + } + } + + pub(crate) fn new_generic_def( + db: &dyn HirDatabase, + def: GenericDefId, + InFile { file_id, .. }: InFile<&SyntaxNode>, + _offset: Option, + ) -> SourceAnalyzer { + let (_params, store, source_map) = db.generic_params_and_store_and_source_map(def); + let resolver = def.resolver(db); + SourceAnalyzer { + resolver, + body_or_sig: Some(BodyOrSig::Sig { def, store, source_map }), + file_id, + } + } + + pub(crate) fn new_variant_body( + db: &dyn HirDatabase, + def: VariantId, + InFile { file_id, .. }: InFile<&SyntaxNode>, + _offset: Option, + ) -> SourceAnalyzer { + let (fields, source_map) = db.variant_fields_with_source_map(def); + let resolver = def.resolver(db); + SourceAnalyzer { + resolver, + body_or_sig: Some(BodyOrSig::VariantFields { + def, + store: fields.store.clone(), + source_map, + }), + file_id, + } } pub(crate) fn new_for_resolver( resolver: Resolver, node: InFile<&SyntaxNode>, ) -> SourceAnalyzer { - SourceAnalyzer { resolver, def: None, infer: None, file_id: node.file_id } + SourceAnalyzer { resolver, body_or_sig: None, file_id: node.file_id } } - fn body_source_map(&self) -> Option<&BodySourceMap> { - self.def.as_ref().map(|(.., source_map)| &**source_map) + // FIXME: Remove this + fn body_(&self) -> Option<(DefWithBodyId, &Body, &BodySourceMap, Option<&InferenceResult>)> { + self.body_or_sig.as_ref().and_then(|it| match it { + BodyOrSig::Body { def, body, source_map, infer } => { + Some((*def, &**body, &**source_map, infer.as_deref())) + } + _ => None, + }) + } + + fn infer(&self) -> Option<&InferenceResult> { + self.body_or_sig.as_ref().and_then(|it| match it { + BodyOrSig::Sig { .. } => None, + BodyOrSig::VariantFields { .. } => None, + BodyOrSig::Body { infer, .. } => infer.as_deref(), + }) } + fn body(&self) -> Option<&Body> { - self.def.as_ref().map(|(_, body, _)| &**body) + self.body_or_sig.as_ref().and_then(|it| match it { + BodyOrSig::Sig { .. } => None, + BodyOrSig::VariantFields { .. } => None, + BodyOrSig::Body { body, .. } => Some(&**body), + }) + } + + pub(crate) fn store(&self) -> Option<&ExpressionStore> { + self.body_or_sig.as_ref().map(|it| match it { + BodyOrSig::Sig { store, .. } => &**store, + BodyOrSig::VariantFields { store, .. } => &**store, + BodyOrSig::Body { body, .. } => &body.store, + }) + } + + pub(crate) fn store_sm(&self) -> Option<&ExpressionStoreSourceMap> { + self.body_or_sig.as_ref().map(|it| match it { + BodyOrSig::Sig { source_map, .. } => &**source_map, + BodyOrSig::VariantFields { source_map, .. } => &**source_map, + BodyOrSig::Body { source_map, .. } => &source_map.store, + }) + } + + pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option { + self.store_sm()?.expansion(node) } fn trait_environment(&self, db: &dyn HirDatabase) -> Arc { - self.def.as_ref().map(|(def, ..)| *def).map_or_else( + self.body_().map(|(def, ..)| def).map_or_else( || TraitEnvironment::empty(self.resolver.krate()), |def| db.trait_environment_for_body(def), ) } - fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { - let src = match expr { - ast::Expr::MacroExpr(expr) => { - self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?))?.into() - } - _ => InFile::new(self.file_id, expr.clone()), - }; - let sm = self.body_source_map()?; - sm.node_expr(src.as_ref()) + fn expr_id(&self, expr: ast::Expr) -> Option { + let src = InFile { file_id: self.file_id, value: expr }; + self.store_sm()?.node_expr(src.as_ref()) } fn pat_id(&self, pat: &ast::Pat) -> Option { - // FIXME: macros, see `expr_id` let src = InFile { file_id: self.file_id, value: pat }; - self.body_source_map()?.node_pat(src) + self.store_sm()?.node_pat(src) + } + + fn type_id(&self, pat: &ast::Type) -> Option { + let src = InFile { file_id: self.file_id, value: pat }; + self.store_sm()?.node_type(src) } fn binding_id_of_pat(&self, pat: &ast::IdentPat) -> Option { let pat_id = self.pat_id(&pat.clone().into())?; - if let Pat::Bind { id, .. } = self.body()?.pats[pat_id.as_pat()?] { + if let Pat::Bind { id, .. } = self.store()?.pats[pat_id.as_pat()?] { Some(id) } else { None } } - fn expand_expr( - &self, - db: &dyn HirDatabase, - expr: InFile, - ) -> Option> { - let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?; - let expanded = db.parse_macro_expansion(macro_file).value.0.syntax_node(); - let res = if let Some(stmts) = ast::MacroStmts::cast(expanded.clone()) { - match stmts.expr()? { - ast::Expr::MacroExpr(mac) => { - self.expand_expr(db, InFile::new(macro_file.into(), mac.macro_call()?))? - } - expr => InMacroFile::new(macro_file, expr), - } - } else if let Some(call) = ast::MacroCall::cast(expanded.clone()) { - self.expand_expr(db, InFile::new(macro_file.into(), call))? - } else { - InMacroFile::new(macro_file, ast::Expr::cast(expanded)?) - }; - - Some(res) - } - - pub(crate) fn expr_adjustments( - &self, - db: &dyn HirDatabase, - expr: &ast::Expr, - ) -> Option<&[Adjustment]> { + pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment]> { // It is safe to omit destructuring assignments here because they have no adjustments (neither // expressions nor patterns). - let expr_id = self.expr_id(db, expr)?.as_expr()?; - let infer = self.infer.as_ref()?; + let expr_id = self.expr_id(expr.clone())?.as_expr()?; + let infer = self.infer()?; infer.expr_adjustments.get(&expr_id).map(|v| &**v) } + pub(crate) fn type_of_type(&self, db: &dyn HirDatabase, ty: &ast::Type) -> Option { + let type_ref = self.type_id(ty)?; + let ty = TyLoweringContext::new( + db, + &self.resolver, + self.store()?, + self.resolver.generic_def()?, + // FIXME: Is this correct here? Anyway that should impact mostly diagnostics, which we don't emit here + // (this can impact the lifetimes generated, e.g. in `const` they won't be `'static`, but this seems like a + // small problem). + LifetimeElisionKind::Infer, + ) + .lower_ty(type_ref); + Some(Type::new_with_resolver(db, &self.resolver, ty)) + } + pub(crate) fn type_of_expr( &self, db: &dyn HirDatabase, expr: &ast::Expr, ) -> Option<(Type, Option)> { - let expr_id = self.expr_id(db, expr)?; - let infer = self.infer.as_ref()?; + let expr_id = self.expr_id(expr.clone())?; + let infer = self.infer()?; let coerced = expr_id .as_expr() .and_then(|expr_id| infer.expr_adjustments.get(&expr_id)) @@ -211,7 +297,7 @@ impl SourceAnalyzer { pat: &ast::Pat, ) -> Option<(Type, Option)> { let expr_or_pat_id = self.pat_id(pat)?; - let infer = self.infer.as_ref()?; + let infer = self.infer()?; let coerced = match expr_or_pat_id { ExprOrPatId::ExprId(idx) => infer .expr_adjustments @@ -234,7 +320,7 @@ impl SourceAnalyzer { pat: &ast::IdentPat, ) -> Option { let binding_id = self.binding_id_of_pat(pat)?; - let infer = self.infer.as_ref()?; + let infer = self.infer()?; let ty = infer[binding_id].clone(); let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty); Some(mk_ty(ty)) @@ -246,7 +332,7 @@ impl SourceAnalyzer { _param: &ast::SelfParam, ) -> Option { let binding = self.body()?.self_param?; - let ty = self.infer.as_ref()?[binding].clone(); + let ty = self.infer()?[binding].clone(); Some(Type::new_with_resolver(db, &self.resolver, ty)) } @@ -256,7 +342,7 @@ impl SourceAnalyzer { pat: &ast::IdentPat, ) -> Option { let id = self.pat_id(&pat.clone().into())?; - let infer = self.infer.as_ref()?; + let infer = self.infer()?; infer.binding_modes.get(id.as_pat()?).map(|bm| match bm { hir_ty::BindingMode::Move => BindingMode::Move, hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut), @@ -271,7 +357,7 @@ impl SourceAnalyzer { pat: &ast::Pat, ) -> Option> { let pat_id = self.pat_id(pat)?; - let infer = self.infer.as_ref()?; + let infer = self.infer()?; Some( infer .pat_adjustments @@ -287,8 +373,8 @@ impl SourceAnalyzer { db: &dyn HirDatabase, call: &ast::MethodCallExpr, ) -> Option { - let expr_id = self.expr_id(db, &call.clone().into())?.as_expr()?; - let (func, substs) = self.infer.as_ref()?.method_resolution(expr_id)?; + let expr_id = self.expr_id(call.clone().into())?.as_expr()?; + let (func, substs) = self.infer()?.method_resolution(expr_id)?; let ty = db.value_ty(func.into())?.substitute(Interner, &substs); let ty = Type::new_with_resolver(db, &self.resolver, ty); let mut res = ty.as_callable(db)?; @@ -301,8 +387,8 @@ impl SourceAnalyzer { db: &dyn HirDatabase, call: &ast::MethodCallExpr, ) -> Option { - let expr_id = self.expr_id(db, &call.clone().into())?.as_expr()?; - let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?; + let expr_id = self.expr_id(call.clone().into())?.as_expr()?; + let (f_in_trait, substs) = self.infer()?.method_resolution(expr_id)?; Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into()) } @@ -312,8 +398,8 @@ impl SourceAnalyzer { db: &dyn HirDatabase, call: &ast::MethodCallExpr, ) -> Option<(Either, Option)> { - let expr_id = self.expr_id(db, &call.clone().into())?.as_expr()?; - let inference_result = self.infer.as_ref()?; + let expr_id = self.expr_id(call.clone().into())?.as_expr()?; + let inference_result = self.infer()?; match inference_result.method_resolution(expr_id) { Some((f_in_trait, substs)) => { let (fn_, subst) = @@ -342,12 +428,11 @@ impl SourceAnalyzer { pub(crate) fn resolve_field( &self, - db: &dyn HirDatabase, field: &ast::FieldExpr, ) -> Option> { - let &(def, ..) = self.def.as_ref()?; - let expr_id = self.expr_id(db, &field.clone().into())?.as_expr()?; - self.infer.as_ref()?.field_resolution(expr_id).map(|it| { + let (def, ..) = self.body_()?; + let expr_id = self.expr_id(field.clone().into())?.as_expr()?; + self.infer()?.field_resolution(expr_id).map(|it| { it.map_either(Into::into, |f| TupleField { owner: def, tuple: f.tuple, index: f.index }) }) } @@ -358,7 +443,7 @@ impl SourceAnalyzer { infer: &InferenceResult, db: &dyn HirDatabase, ) -> Option { - let body = self.body()?; + let body = self.store()?; if let Expr::Field { expr: object_expr, name: _ } = body[field_expr] { let (adt, subst) = type_of_expr_including_adjust(infer, object_expr)?.as_adt()?; return Some(GenericSubstitution::new( @@ -375,9 +460,9 @@ impl SourceAnalyzer { db: &dyn HirDatabase, field: &ast::FieldExpr, ) -> Option<(Either, Function>, Option)> { - let &(def, ..) = self.def.as_ref()?; - let expr_id = self.expr_id(db, &field.clone().into())?.as_expr()?; - let inference_result = self.infer.as_ref()?; + let (def, ..) = self.body_()?; + let expr_id = self.expr_id(field.clone().into())?.as_expr()?; + let inference_result = self.infer()?; match inference_result.field_resolution(expr_id) { Some(field) => match field { Either::Left(field) => Some(( @@ -419,7 +504,7 @@ impl SourceAnalyzer { (RangeOp::Inclusive, None, None) => return None, (RangeOp::Inclusive, Some(_), None) => return None, }; - self.resolver.resolve_known_struct(db.upcast(), &path) + self.resolver.resolve_known_struct(db, &path) } pub(crate) fn resolve_range_expr( @@ -439,7 +524,7 @@ impl SourceAnalyzer { (RangeOp::Inclusive, None, None) => return None, (RangeOp::Inclusive, Some(_), None) => return None, }; - self.resolver.resolve_known_struct(db.upcast(), &path) + self.resolver.resolve_known_struct(db, &path) } pub(crate) fn resolve_await_to_poll( @@ -447,11 +532,11 @@ impl SourceAnalyzer { db: &dyn HirDatabase, await_expr: &ast::AwaitExpr, ) -> Option { - let mut ty = self.ty_of_expr(db, &await_expr.expr()?)?.clone(); + let mut ty = self.ty_of_expr(await_expr.expr()?)?.clone(); let into_future_trait = self .resolver - .resolve_known_trait(db.upcast(), &path![core::future::IntoFuture]) + .resolve_known_trait(db, &path![core::future::IntoFuture]) .map(Trait::from); if let Some(into_future_trait) = into_future_trait { @@ -460,7 +545,7 @@ impl SourceAnalyzer { let items = into_future_trait.items(db); let into_future_type = items.into_iter().find_map(|item| match item { AssocItem::TypeAlias(alias) - if alias.name(db) == Name::new_symbol_root(sym::IntoFuture.clone()) => + if alias.name(db) == Name::new_symbol_root(sym::IntoFuture) => { Some(alias) } @@ -471,8 +556,8 @@ impl SourceAnalyzer { } } - let future_trait = db.lang_item(self.resolver.krate(), LangItem::Future)?.as_trait()?; - let poll_fn = db.lang_item(self.resolver.krate(), LangItem::FuturePoll)?.as_function()?; + let future_trait = LangItem::Future.resolve_trait(db, self.resolver.krate())?; + let poll_fn = LangItem::FuturePoll.resolve_function(db, self.resolver.krate())?; // HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself // doesn't have any generic parameters, so we skip building another subst for `poll()`. let substs = hir_ty::TyBuilder::subst_for_def(db, future_trait, None).push(ty).build(); @@ -489,38 +574,30 @@ impl SourceAnalyzer { // This can be either `Deref::deref` or `DerefMut::deref_mut`. // Since deref kind is inferenced and stored in `InferenceResult.method_resolution`, // use that result to find out which one it is. - let (deref_trait, deref) = self.lang_trait_fn( - db, - LangItem::Deref, - &Name::new_symbol_root(sym::deref.clone()), - )?; - self.infer - .as_ref() + let (deref_trait, deref) = + self.lang_trait_fn(db, LangItem::Deref, &Name::new_symbol_root(sym::deref))?; + self.infer() .and_then(|infer| { - let expr = self.expr_id(db, &prefix_expr.clone().into())?.as_expr()?; + let expr = self.expr_id(prefix_expr.clone().into())?.as_expr()?; let (func, _) = infer.method_resolution(expr)?; let (deref_mut_trait, deref_mut) = self.lang_trait_fn( db, LangItem::DerefMut, - &Name::new_symbol_root(sym::deref_mut.clone()), + &Name::new_symbol_root(sym::deref_mut), )?; - if func == deref_mut { - Some((deref_mut_trait, deref_mut)) - } else { - None - } + if func == deref_mut { Some((deref_mut_trait, deref_mut)) } else { None } }) .unwrap_or((deref_trait, deref)) } ast::UnaryOp::Not => { - self.lang_trait_fn(db, LangItem::Not, &Name::new_symbol_root(sym::not.clone()))? + self.lang_trait_fn(db, LangItem::Not, &Name::new_symbol_root(sym::not))? } ast::UnaryOp::Neg => { - self.lang_trait_fn(db, LangItem::Neg, &Name::new_symbol_root(sym::neg.clone()))? + self.lang_trait_fn(db, LangItem::Neg, &Name::new_symbol_root(sym::neg))? } }; - let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?; + let ty = self.ty_of_expr(prefix_expr.expr()?)?; // HACK: subst for all methods coincides with that for their trait because the methods // don't have any generic parameters, so we skip building another subst for the methods. @@ -534,27 +611,22 @@ impl SourceAnalyzer { db: &dyn HirDatabase, index_expr: &ast::IndexExpr, ) -> Option { - let base_ty = self.ty_of_expr(db, &index_expr.base()?)?; - let index_ty = self.ty_of_expr(db, &index_expr.index()?)?; + let base_ty = self.ty_of_expr(index_expr.base()?)?; + let index_ty = self.ty_of_expr(index_expr.index()?)?; let (index_trait, index_fn) = - self.lang_trait_fn(db, LangItem::Index, &Name::new_symbol_root(sym::index.clone()))?; + self.lang_trait_fn(db, LangItem::Index, &Name::new_symbol_root(sym::index))?; let (op_trait, op_fn) = self - .infer - .as_ref() + .infer() .and_then(|infer| { - let expr = self.expr_id(db, &index_expr.clone().into())?.as_expr()?; + let expr = self.expr_id(index_expr.clone().into())?.as_expr()?; let (func, _) = infer.method_resolution(expr)?; let (index_mut_trait, index_mut_fn) = self.lang_trait_fn( db, LangItem::IndexMut, - &Name::new_symbol_root(sym::index_mut.clone()), + &Name::new_symbol_root(sym::index_mut), )?; - if func == index_mut_fn { - Some((index_mut_trait, index_mut_fn)) - } else { - None - } + if func == index_mut_fn { Some((index_mut_trait, index_mut_fn)) } else { None } }) .unwrap_or((index_trait, index_fn)); // HACK: subst for all methods coincides with that for their trait because the methods @@ -572,8 +644,8 @@ impl SourceAnalyzer { binop_expr: &ast::BinExpr, ) -> Option { let op = binop_expr.op_kind()?; - let lhs = self.ty_of_expr(db, &binop_expr.lhs()?)?; - let rhs = self.ty_of_expr(db, &binop_expr.rhs()?)?; + let lhs = self.ty_of_expr(binop_expr.lhs()?)?; + let rhs = self.ty_of_expr(binop_expr.rhs()?)?; let (op_trait, op_fn) = lang_items_for_bin_op(op) .and_then(|(name, lang_item)| self.lang_trait_fn(db, lang_item, &name))?; @@ -592,10 +664,10 @@ impl SourceAnalyzer { db: &dyn HirDatabase, try_expr: &ast::TryExpr, ) -> Option { - let ty = self.ty_of_expr(db, &try_expr.expr()?)?; + let ty = self.ty_of_expr(try_expr.expr()?)?; - let op_fn = db.lang_item(self.resolver.krate(), LangItem::TryTraitBranch)?.as_function()?; - let op_trait = match op_fn.lookup(db.upcast()).container { + let op_fn = LangItem::TryTraitBranch.resolve_function(db, self.resolver.krate())?; + let op_trait = match op_fn.lookup(db).container { ItemContainerId::TraitId(id) => id, _ => return None, }; @@ -613,7 +685,7 @@ impl SourceAnalyzer { ) -> Option<(Field, Option, Type, GenericSubstitution)> { let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?; let expr = ast::Expr::from(record_expr); - let expr_id = self.body_source_map()?.node_expr(InFile::new(self.file_id, &expr))?; + let expr_id = self.store_sm()?.node_expr(InFile::new(self.file_id, &expr))?; let ast_name = field.field_name()?; let local_name = ast_name.as_name(); @@ -626,7 +698,7 @@ impl SourceAnalyzer { once(local_name.clone()), )); match self.resolver.resolve_path_in_value_ns_fully( - db.upcast(), + db, &path, name_hygiene(db, InFile::new(self.file_id, ast_name.syntax())), ) { @@ -636,9 +708,9 @@ impl SourceAnalyzer { _ => None, } }; - let (adt, subst) = self.infer.as_ref()?.type_of_expr_or_pat(expr_id)?.as_adt()?; - let variant = self.infer.as_ref()?.variant_resolution_for_expr_or_pat(expr_id)?; - let variant_data = variant.variant_data(db.upcast()); + let (adt, subst) = self.infer()?.type_of_expr_or_pat(expr_id)?.as_adt()?; + let variant = self.infer()?.variant_resolution_for_expr_or_pat(expr_id)?; + let variant_data = variant.variant_data(db); let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? }; let field_ty = db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst); @@ -658,10 +730,10 @@ impl SourceAnalyzer { let field_name = field.field_name()?.as_name(); let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?; let pat_id = self.pat_id(&record_pat.into())?; - let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id.as_pat()?)?; - let variant_data = variant.variant_data(db.upcast()); + let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?; + let variant_data = variant.variant_data(db); let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? }; - let (adt, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?; + let (adt, subst) = self.infer()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?; let field_ty = db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst); Some(( @@ -676,14 +748,15 @@ impl SourceAnalyzer { db: &dyn HirDatabase, macro_call: InFile<&ast::MacroCall>, ) -> Option { - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut ctx = - LowerCtx::new(db.upcast(), macro_call.file_id, &mut types_map, &mut types_source_map); - let path = macro_call.value.path().and_then(|ast| Path::from_src(&mut ctx, ast))?; - self.resolver - .resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang)) - .map(|(it, _)| it.into()) + let bs = self.store_sm()?; + bs.expansion(macro_call).and_then(|it| { + // FIXME: Block def maps + let def = it.lookup(db).def; + db.crate_def_map(def.krate) + .macro_def_to_macro_id + .get(&def.kind.erased_ast_id()) + .map(|it| (*it).into()) + }) } pub(crate) fn resolve_bind_pat_to_const( @@ -692,20 +765,20 @@ impl SourceAnalyzer { pat: &ast::IdentPat, ) -> Option { let expr_or_pat_id = self.pat_id(&pat.clone().into())?; - let body = self.body()?; + let store = self.store()?; let path = match expr_or_pat_id { - ExprOrPatId::ExprId(idx) => match &body[idx] { + ExprOrPatId::ExprId(idx) => match &store[idx] { Expr::Path(path) => path, _ => return None, }, - ExprOrPatId::PatId(idx) => match &body[idx] { + ExprOrPatId::PatId(idx) => match &store[idx] { Pat::Path(path) => path, _ => return None, }, }; - let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, TypesMap::EMPTY)?; + let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, Some(store))?; match res { PathResolution::Def(def) => Some(def), _ => None, @@ -720,6 +793,78 @@ impl SourceAnalyzer { .map(crate::TypeParam::from) } + pub(crate) fn resolve_offset_of_field( + &self, + db: &dyn HirDatabase, + name_ref: &ast::NameRef, + ) -> Option<(Either, GenericSubstitution)> { + let offset_of_expr = ast::OffsetOfExpr::cast(name_ref.syntax().parent()?)?; + let container = offset_of_expr.ty()?; + let container = self.type_of_type(db, &container)?; + + let trait_env = container.env; + let mut container = Either::Right(container.ty); + for field_name in offset_of_expr.fields() { + if let Some( + TyKind::Alias(AliasTy::Projection(ProjectionTy { associated_ty_id, substitution })) + | TyKind::AssociatedType(associated_ty_id, substitution), + ) = container.as_ref().right().map(|it| it.kind(Interner)) + { + let projection = ProjectionTy { + associated_ty_id: *associated_ty_id, + substitution: substitution.clone(), + }; + container = Either::Right(db.normalize_projection(projection, trait_env.clone())); + } + let handle_variants = |variant, subst: &Substitution, container: &mut _| { + let fields = db.variant_fields(variant); + let field = fields.field(&field_name.as_name())?; + let field_types = db.field_types(variant); + *container = Either::Right(field_types[field].clone().substitute(Interner, subst)); + let generic_def = match variant { + VariantId::EnumVariantId(it) => it.loc(db).parent.into(), + VariantId::StructId(it) => it.into(), + VariantId::UnionId(it) => it.into(), + }; + Some(( + Either::Right(Field { parent: variant.into(), id: field }), + generic_def, + subst.clone(), + )) + }; + let temp_ty = TyKind::Error.intern(Interner); + let (field_def, generic_def, subst) = + match std::mem::replace(&mut container, Either::Right(temp_ty.clone())) { + Either::Left((variant_id, subst)) => { + handle_variants(VariantId::from(variant_id), &subst, &mut container)? + } + Either::Right(container_ty) => match container_ty.kind(Interner) { + TyKind::Adt(adt_id, subst) => match adt_id.0 { + AdtId::StructId(id) => { + handle_variants(id.into(), subst, &mut container)? + } + AdtId::UnionId(id) => { + handle_variants(id.into(), subst, &mut container)? + } + AdtId::EnumId(id) => { + let variants = db.enum_variants(id); + let variant = variants.variant(&field_name.as_name())?; + container = Either::Left((variant, subst.clone())); + (Either::Left(Variant { id: variant }), id.into(), subst.clone()) + } + }, + _ => return None, + }, + }; + + if field_name.syntax().text_range() == name_ref.syntax().text_range() { + return Some((field_def, GenericSubstitution::new(generic_def, subst, trait_env))); + } + } + never!("the `NameRef` is a child of the `OffsetOfExpr`, we should've visited it"); + None + } + pub(crate) fn resolve_path( &self, db: &dyn HirDatabase, @@ -730,9 +875,9 @@ impl SourceAnalyzer { let mut prefer_value_ns = false; let resolved = (|| { - let infer = self.infer.as_deref()?; + let infer = self.infer()?; if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) { - let expr_id = self.expr_id(db, &path_expr.into())?; + let expr_id = self.expr_id(path_expr.into())?; if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_id) { let (assoc, subst) = match assoc { AssocItemId::FunctionId(f_in_trait) => { @@ -830,7 +975,7 @@ impl SourceAnalyzer { return Some((PathResolution::Def(ModuleDef::Variant(variant.into())), None)); } } else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) { - let expr_id = self.expr_id(db, &rec_lit.into())?; + let expr_id = self.expr_id(rec_lit.into())?; if let Some(VariantId::EnumVariantId(variant)) = infer.variant_resolution_for_expr_or_pat(expr_id) { @@ -857,17 +1002,20 @@ impl SourceAnalyzer { return resolved; } - let (mut types_map, mut types_source_map) = - (TypesMap::default(), TypesSourceMap::default()); - let mut ctx = - LowerCtx::new(db.upcast(), self.file_id, &mut types_map, &mut types_source_map); - let hir_path = Path::from_src(&mut ctx, path.clone())?; + // FIXME: collectiong here shouldnt be necessary? + let mut collector = ExprCollector::new(db, self.resolver.module(), self.file_id); + let hir_path = + collector.lower_path(path.clone(), &mut ExprCollector::impl_trait_error_allocator)?; + let parent_hir_path = path + .parent_path() + .and_then(|p| collector.lower_path(p, &mut ExprCollector::impl_trait_error_allocator)); + let store = collector.store.finish(); // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are // trying to resolve foo::bar. if let Some(use_tree) = parent().and_then(ast::UseTree::cast) { if use_tree.coloncolon_token().is_some() { - return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map) + return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store) .map(|it| (it, None)); } } @@ -884,9 +1032,8 @@ impl SourceAnalyzer { // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are // trying to resolve foo::bar. - if let Some(parent_path) = path.parent_path() { - let parent_hir_path = Path::from_src(&mut ctx, parent_path); - return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map) { + if let Some(parent_hir_path) = parent_hir_path { + return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store) { None if meta_path.is_some() => path .first_segment() .and_then(|it| it.name_ref()) @@ -906,13 +1053,9 @@ impl SourceAnalyzer { // } // ``` Some(it) if matches!(it, PathResolution::Def(ModuleDef::BuiltinType(_))) => { - if let (Some(mod_path), Some(parent_hir_path)) = - (hir_path.mod_path(), parent_hir_path) - { - if let Some(ModuleDefId::ModuleId(id)) = self - .resolver - .resolve_module_path_in_items(db.upcast(), mod_path) - .take_types() + if let Some(mod_path) = hir_path.mod_path() { + if let Some(ModuleDefId::ModuleId(id)) = + self.resolver.resolve_module_path_in_items(db, mod_path).take_types() { let parent_hir_name = parent_hir_path.segments().get(1).map(|it| it.name); @@ -973,7 +1116,7 @@ impl SourceAnalyzer { // FIXME: Multiple derives can have the same helper let name_ref = name_ref.as_name(); for (macro_id, mut helpers) in - helpers.iter().group_by(|(_, macro_id, ..)| macro_id).into_iter() + helpers.iter().chunk_by(|(_, macro_id, ..)| macro_id).into_iter() { if let Some(idx) = helpers.position(|(name, ..)| *name == name_ref) { @@ -1006,8 +1149,7 @@ impl SourceAnalyzer { } if parent().is_some_and(|it| ast::Visibility::can_cast(it.kind())) { // No substitution because only modules can be inside visibilities, and those have no generics. - resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map) - .map(|it| (it, None)) + resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store).map(|it| (it, None)) } else { // Probably a type, no need to show substitutions for those. let res = resolve_hir_path_( @@ -1016,16 +1158,16 @@ impl SourceAnalyzer { &hir_path, prefer_value_ns, name_hygiene(db, InFile::new(self.file_id, path.syntax())), - &types_map, + Some(&store), )?; let subst = (|| { let parent = parent()?; let ty = if let Some(expr) = ast::Expr::cast(parent.clone()) { - let expr_id = self.expr_id(db, &expr)?; - self.infer.as_ref()?.type_of_expr_or_pat(expr_id)? + let expr_id = self.expr_id(expr)?; + self.infer()?.type_of_expr_or_pat(expr_id)? } else if let Some(pat) = ast::Pat::cast(parent) { let pat_id = self.pat_id(&pat)?; - &self.infer.as_ref()?[pat_id] + &self.infer()?[pat_id] } else { return None; }; @@ -1072,10 +1214,10 @@ impl SourceAnalyzer { db: &dyn HirDatabase, literal: &ast::RecordExpr, ) -> Option> { - let body = self.body()?; - let infer = self.infer.as_ref()?; + let body = self.store()?; + let infer = self.infer()?; - let expr_id = self.expr_id(db, &literal.clone().into())?; + let expr_id = self.expr_id(literal.clone().into())?; let substs = infer[expr_id].as_adt()?.1; let (variant, missing_fields, _exhaustive) = match expr_id { @@ -1095,8 +1237,8 @@ impl SourceAnalyzer { db: &dyn HirDatabase, pattern: &ast::RecordPat, ) -> Option> { - let body = self.body()?; - let infer = self.infer.as_ref()?; + let body = self.store()?; + let infer = self.infer()?; let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?; let substs = infer.type_of_pat[pat_id].as_adt()?.1; @@ -1130,24 +1272,17 @@ impl SourceAnalyzer { &self, db: &dyn HirDatabase, macro_call: InFile<&ast::MacroCall>, - ) -> Option { - let krate = self.resolver.krate(); - // FIXME: This causes us to parse, generally this is the wrong approach for resolving a - // macro call to a macro call id! - let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { - self.resolver.resolve_path_as_macro_def(db.upcast(), path, Some(MacroSubNs::Bang)) - })?; - // why the 64? - Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64) + ) -> Option { + self.store_sm().and_then(|bs| bs.expansion(macro_call)).or_else(|| { + self.resolver.item_scope().macro_invoc( + macro_call.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)), + ) + }) } - pub(crate) fn resolve_variant( - &self, - db: &dyn HirDatabase, - record_lit: ast::RecordExpr, - ) -> Option { - let infer = self.infer.as_ref()?; - let expr_id = self.expr_id(db, &record_lit.into())?; + pub(crate) fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { + let infer = self.infer()?; + let expr_id = self.expr_id(record_lit.into())?; infer.variant_resolution_for_expr_or_pat(expr_id) } @@ -1156,11 +1291,11 @@ impl SourceAnalyzer { db: &dyn HirDatabase, macro_expr: InFile<&ast::MacroExpr>, ) -> bool { - if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) { + if let Some((def, body, sm, Some(infer))) = self.body_() { if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) { let mut is_unsafe = false; let mut walk_expr = |expr_id| { - unsafe_operations(db, infer, *def, body, expr_id, &mut |inside_unsafe_block| { + unsafe_operations(db, infer, def, body, expr_id, &mut |inside_unsafe_block| { is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No }) }; @@ -1182,7 +1317,7 @@ impl SourceAnalyzer { format_args: InFile<&ast::FormatArgsExpr>, offset: TextSize, ) -> Option<(TextRange, Option)> { - let (hygiene, implicits) = self.body_source_map()?.implicit_format_args(format_args)?; + let (hygiene, implicits) = self.store_sm()?.implicit_format_args(format_args)?; implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| { ( *range, @@ -1206,9 +1341,9 @@ impl SourceAnalyzer { line: usize, offset: TextSize, ) -> Option<(DefWithBodyId, (ExprId, TextRange, usize))> { - let (def, _, body_source_map) = self.def.as_ref()?; + let (def, _, body_source_map, _) = self.body_()?; let (expr, args) = body_source_map.asm_template_args(asm)?; - Some(*def).zip( + Some(def).zip( args.get(line)? .iter() .find(|(range, _)| range.contains_inclusive(offset)) @@ -1221,7 +1356,7 @@ impl SourceAnalyzer { db: &'a dyn HirDatabase, format_args: InFile<&ast::FormatArgsExpr>, ) -> Option)> + 'a> { - let (hygiene, names) = self.body_source_map()?.implicit_format_args(format_args)?; + let (hygiene, names) = self.store_sm()?.implicit_format_args(format_args)?; Some(names.iter().map(move |(range, name)| { ( *range, @@ -1243,8 +1378,8 @@ impl SourceAnalyzer { &self, asm: InFile<&ast::AsmExpr>, ) -> Option<(DefWithBodyId, (ExprId, &[Vec<(TextRange, usize)>]))> { - let (def, _, body_source_map) = self.def.as_ref()?; - Some(*def).zip(body_source_map.asm_template_args(asm)) + let (def, _, body_source_map, _) = self.body_()?; + Some(def).zip(body_source_map.asm_template_args(asm)) } fn resolve_impl_method_or_trait_def( @@ -1290,13 +1425,13 @@ impl SourceAnalyzer { lang_trait: LangItem, method_name: &Name, ) -> Option<(TraitId, FunctionId)> { - let trait_id = db.lang_item(self.resolver.krate(), lang_trait)?.as_trait()?; - let fn_id = db.trait_data(trait_id).method_by_name(method_name)?; + let trait_id = lang_trait.resolve_trait(db, self.resolver.krate())?; + let fn_id = db.trait_items(trait_id).method_by_name(method_name)?; Some((trait_id, fn_id)) } - fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> { - self.infer.as_ref()?.type_of_expr_or_pat(self.expr_id(db, expr)?) + fn ty_of_expr(&self, expr: ast::Expr) -> Option<&Ty> { + self.infer()?.type_of_expr_or_pat(self.expr_id(expr)?) } } @@ -1306,7 +1441,7 @@ fn scope_for( source_map: &BodySourceMap, node: InFile<&SyntaxNode>, ) -> Option { - node.ancestors_with_macros(db.upcast()) + node.ancestors_with_macros(db) .take_while(|it| { !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind()) @@ -1334,12 +1469,11 @@ fn scope_for_offset( } // FIXME handle attribute expansion - let source = - iter::successors(file_id.macro_file().map(|it| it.call_node(db.upcast())), |it| { - Some(it.file_id.macro_file()?.call_node(db.upcast())) - }) - .find(|it| it.file_id == from_file) - .filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?; + let source = iter::successors(file_id.macro_file().map(|it| it.call_node(db)), |it| { + Some(it.file_id.macro_file()?.call_node(db)) + }) + .find(|it| it.file_id == from_file) + .filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?; Some((source.text_range(), scope)) }) .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end()) @@ -1369,7 +1503,7 @@ fn adjust( if source.file_id != from_file { return None; } - let root = source.file_syntax(db.upcast()); + let root = source.file_syntax(db); let node = source.value.to_node(&root); Some((node.syntax().text_range(), scope)) }) @@ -1396,9 +1530,9 @@ pub(crate) fn resolve_hir_path( resolver: &Resolver, path: &Path, hygiene: HygieneId, - types_map: &TypesMap, + store: Option<&ExpressionStore>, ) -> Option { - resolve_hir_path_(db, resolver, path, false, hygiene, types_map) + resolve_hir_path_(db, resolver, path, false, hygiene, store) } #[inline] @@ -1408,7 +1542,7 @@ pub(crate) fn resolve_hir_path_as_attr_macro( path: &Path, ) -> Option { resolver - .resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Attr)) + .resolve_path_as_macro(db, path.mod_path()?, Some(MacroSubNs::Attr)) .map(|(it, _)| it) .map(Into::into) } @@ -1419,23 +1553,18 @@ fn resolve_hir_path_( path: &Path, prefer_value_ns: bool, hygiene: HygieneId, - types_map: &TypesMap, + store: Option<&ExpressionStore>, ) -> Option { let types = || { let (ty, unresolved) = match path.type_anchor() { - Some(type_ref) => { - let (_, res) = TyLoweringContext::new_maybe_unowned( - db, - resolver, - types_map, - None, - resolver.type_owner(), - ) - .lower_ty_ext(type_ref); + Some(type_ref) => resolver.generic_def().and_then(|def| { + let (_, res) = + TyLoweringContext::new(db, resolver, store?, def, LifetimeElisionKind::Infer) + .lower_ty_ext(type_ref); res.map(|ty_ns| (ty_ns, path.segments().first())) - } + }), None => { - let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?; + let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db, path)?; match remaining_idx { Some(remaining_idx) => { if remaining_idx + 1 == path.segments().len() { @@ -1453,7 +1582,7 @@ fn resolve_hir_path_( // within the trait's associated types. if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) { if let Some(type_alias_id) = - db.trait_data(trait_id).associated_type_by_name(unresolved.name) + db.trait_items(trait_id).associated_type_by_name(unresolved.name) { return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); } @@ -1470,6 +1599,7 @@ fn resolve_hir_path_( TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()), TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()), + TypeNs::ModuleId(it) => PathResolution::Def(ModuleDef::Module(it.into())), }; match unresolved { Some(unresolved) => resolver @@ -1494,14 +1624,14 @@ fn resolve_hir_path_( let items = || { resolver - .resolve_module_path_in_items(db.upcast(), path.mod_path()?) + .resolve_module_path_in_items(db, path.mod_path()?) .take_types() .map(|it| PathResolution::Def(it.into())) }; let macros = || { resolver - .resolve_path_as_macro(db.upcast(), path.mod_path()?, None) + .resolve_path_as_macro(db, path.mod_path()?, None) .map(|(def, _)| PathResolution::Def(ModuleDef::Macro(def.into()))) }; @@ -1517,7 +1647,7 @@ fn resolve_hir_value_path( path: &Path, hygiene: HygieneId, ) -> Option { - resolver.resolve_path_in_value_ns_fully(db.upcast(), path, hygiene).and_then(|val| { + resolver.resolve_path_in_value_ns_fully(db, path, hygiene).and_then(|val| { let res = match val { ValueNs::LocalBinding(binding_id) => { let var = Local { parent: body_owner?, binding_id }; @@ -1552,23 +1682,18 @@ fn resolve_hir_path_qualifier( db: &dyn HirDatabase, resolver: &Resolver, path: &Path, - types_map: &TypesMap, + store: &ExpressionStore, ) -> Option { (|| { let (ty, unresolved) = match path.type_anchor() { - Some(type_ref) => { - let (_, res) = TyLoweringContext::new_maybe_unowned( - db, - resolver, - types_map, - None, - resolver.type_owner(), - ) - .lower_ty_ext(type_ref); + Some(type_ref) => resolver.generic_def().and_then(|def| { + let (_, res) = + TyLoweringContext::new(db, resolver, store, def, LifetimeElisionKind::Infer) + .lower_ty_ext(type_ref); res.map(|ty_ns| (ty_ns, path.segments().first())) - } + }), None => { - let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?; + let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db, path)?; match remaining_idx { Some(remaining_idx) => { if remaining_idx + 1 == path.segments().len() { @@ -1586,7 +1711,7 @@ fn resolve_hir_path_qualifier( // within the trait's associated types. if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) { if let Some(type_alias_id) = - db.trait_data(trait_id).associated_type_by_name(unresolved.name) + db.trait_items(trait_id).associated_type_by_name(unresolved.name) { return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); } @@ -1603,6 +1728,7 @@ fn resolve_hir_path_qualifier( TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()), TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()), + TypeNs::ModuleId(it) => PathResolution::Def(ModuleDef::Module(it.into())), }; match unresolved { Some(unresolved) => resolver @@ -1623,7 +1749,7 @@ fn resolve_hir_path_qualifier( })() .or_else(|| { resolver - .resolve_module_path_in_items(db.upcast(), path.mod_path()?) + .resolve_module_path_in_items(db, path.mod_path()?) .take_types() .map(|it| PathResolution::Def(it.into())) }) @@ -1635,8 +1761,7 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H }; let span_map = db.expansion_span_map(macro_file); let ctx = span_map.span_at(name.value.text_range().start()).ctx; - let ctx = db.lookup_intern_syntax_context(ctx); - HygieneId::new(ctx.opaque_and_semitransparent) + HygieneId::new(ctx.opaque_and_semitransparent(db)) } fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> { diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index 81eb6a70ad73e..e87ab87407ff2 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -2,24 +2,24 @@ use either::Either; use hir_def::{ + AdtId, AssocItemId, Complete, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId, + ModuleDefId, ModuleId, TraitId, db::DefDatabase, item_scope::{ImportId, ImportOrExternCrate, ImportOrGlob}, per_ns::Item, src::{HasChildSource, HasSource}, visibility::{Visibility, VisibilityExplicitness}, - AdtId, AssocItemId, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId, - ModuleDefId, ModuleId, TraitId, }; -use hir_expand::{name::Name, HirFileId}; +use hir_expand::{HirFileId, name::Name}; use hir_ty::{ db::HirDatabase, - display::{hir_display_with_types_map, DisplayTarget, HirDisplay}, + display::{HirDisplay, hir_display_with_store}, }; use intern::Symbol; use rustc_hash::FxHashMap; -use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr}; +use syntax::{AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast::HasName}; -use crate::{Module, ModuleDef, Semantics}; +use crate::{HasCrate, Module, ModuleDef, Semantics}; pub type FxIndexSet = indexmap::IndexSet>; @@ -34,6 +34,7 @@ pub struct FileSymbol { /// Whether this symbol is a doc alias for the original symbol. pub is_alias: bool, pub is_assoc: bool, + pub do_not_complete: Complete, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -65,7 +66,6 @@ pub struct SymbolCollector<'a> { symbols: FxIndexSet, work: Vec, current_container_name: Option, - display_target: DisplayTarget, } /// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect @@ -77,10 +77,6 @@ impl<'a> SymbolCollector<'a> { symbols: Default::default(), work: Default::default(), current_container_name: None, - display_target: DisplayTarget::from_crate( - db, - *db.crate_graph().crates_in_topological_order().last().unwrap(), - ), } } @@ -92,8 +88,7 @@ impl<'a> SymbolCollector<'a> { pub fn collect(&mut self, module: Module) { let _p = tracing::info_span!("SymbolCollector::collect", ?module).entered(); - tracing::info!(?module, "SymbolCollector::collect",); - self.display_target = module.krate().to_display_target(self.db); + tracing::info!(?module, "SymbolCollector::collect"); // The initial work is the root module we're collecting, additional work will // be populated as we traverse the module's definitions. @@ -111,7 +106,7 @@ impl<'a> SymbolCollector<'a> { fn do_work(&mut self, work: SymbolCollectorWork) { let _p = tracing::info_span!("SymbolCollector::do_work", ?work).entered(); tracing::info!(?work, "SymbolCollector::do_work"); - self.db.unwind_if_cancelled(); + self.db.unwind_if_revision_cancelled(); let parent_name = work.parent.map(|name| name.as_str().to_smolstr()); self.with_container_name(parent_name, |s| s.collect_from_module(work.module_id)); @@ -122,35 +117,43 @@ impl<'a> SymbolCollector<'a> { match def { ModuleDefId::ModuleId(id) => this.push_module(id, name), ModuleDefId::FunctionId(id) => { - this.push_decl(id, name, false); + this.push_decl(id, name, false, None); this.collect_from_body(id, Some(name.clone())); } - ModuleDefId::AdtId(AdtId::StructId(id)) => this.push_decl(id, name, false), - ModuleDefId::AdtId(AdtId::EnumId(id)) => this.push_decl(id, name, false), - ModuleDefId::AdtId(AdtId::UnionId(id)) => this.push_decl(id, name, false), + ModuleDefId::AdtId(AdtId::StructId(id)) => { + this.push_decl(id, name, false, None); + } + ModuleDefId::AdtId(AdtId::EnumId(id)) => { + this.push_decl(id, name, false, None); + } + ModuleDefId::AdtId(AdtId::UnionId(id)) => { + this.push_decl(id, name, false, None); + } ModuleDefId::ConstId(id) => { - this.push_decl(id, name, false); + this.push_decl(id, name, false, None); this.collect_from_body(id, Some(name.clone())); } ModuleDefId::StaticId(id) => { - this.push_decl(id, name, false); + this.push_decl(id, name, false, None); this.collect_from_body(id, Some(name.clone())); } ModuleDefId::TraitId(id) => { - this.push_decl(id, name, false); - this.collect_from_trait(id); + let trait_do_not_complete = this.push_decl(id, name, false, None); + this.collect_from_trait(id, trait_do_not_complete); } ModuleDefId::TraitAliasId(id) => { - this.push_decl(id, name, false); + this.push_decl(id, name, false, None); } ModuleDefId::TypeAliasId(id) => { - this.push_decl(id, name, false); + this.push_decl(id, name, false, None); + } + ModuleDefId::MacroId(id) => { + match id { + MacroId::Macro2Id(id) => this.push_decl(id, name, false, None), + MacroId::MacroRulesId(id) => this.push_decl(id, name, false, None), + MacroId::ProcMacroId(id) => this.push_decl(id, name, false, None), + }; } - ModuleDefId::MacroId(id) => match id { - MacroId::Macro2Id(id) => this.push_decl(id, name, false), - MacroId::MacroRulesId(id) => this.push_decl(id, name, false), - MacroId::ProcMacroId(id) => this.push_decl(id, name, false), - }, // Don't index these. ModuleDefId::BuiltinType(_) => {} ModuleDefId::EnumVariantId(_) => {} @@ -169,7 +172,7 @@ impl<'a> SymbolCollector<'a> { let mut push_import = |this: &mut Self, i: ImportId, name: &Name, def: ModuleDefId, vis| { let source = import_child_source_cache .entry(i.use_) - .or_insert_with(|| i.use_.child_source(this.db.upcast())); + .or_insert_with(|| i.use_.child_source(this.db)); let Some(use_tree_src) = source.value.get(i.idx) else { return }; let rename = use_tree_src.rename().and_then(|rename| rename.name()); let name_syntax = match rename { @@ -194,13 +197,14 @@ impl<'a> SymbolCollector<'a> { loc: dec_loc, is_alias: false, is_assoc: false, + do_not_complete: Complete::Yes, }); }; let push_extern_crate = |this: &mut Self, i: ExternCrateId, name: &Name, def: ModuleDefId, vis| { - let loc = i.lookup(this.db.upcast()); - let source = loc.source(this.db.upcast()); + let loc = i.lookup(this.db); + let source = loc.source(this.db); let rename = source.value.rename().and_then(|rename| rename.name()); let name_syntax = match rename { @@ -223,10 +227,11 @@ impl<'a> SymbolCollector<'a> { loc: dec_loc, is_alias: false, is_assoc: false, + do_not_complete: Complete::Yes, }); }; - let def_map = module_id.def_map(self.db.upcast()); + let def_map = module_id.def_map(self.db); let scope = &def_map[module_id.local_id].scope; for impl_id in scope.impls() { @@ -252,8 +257,9 @@ impl<'a> SymbolCollector<'a> { for (name, Item { def, vis, import }) in scope.macros() { if let Some(i) = import { match i { - ImportOrGlob::Import(i) => push_import(self, i, name, def.into(), vis), - ImportOrGlob::Glob(_) => (), + ImportOrExternCrate::Import(i) => push_import(self, i, name, def.into(), vis), + ImportOrExternCrate::Glob(_) => (), + ImportOrExternCrate::ExternCrate(_) => (), } continue; } @@ -279,12 +285,12 @@ impl<'a> SymbolCollector<'a> { for (name, id) in scope.legacy_macros() { for &id in id { - if id.module(self.db.upcast()) == module_id { + if id.module(self.db) == module_id { match id { - MacroId::Macro2Id(id) => self.push_decl(id, name, false), - MacroId::MacroRulesId(id) => self.push_decl(id, name, false), - MacroId::ProcMacroId(id) => self.push_decl(id, name, false), - } + MacroId::Macro2Id(id) => self.push_decl(id, name, false, None), + MacroId::MacroRulesId(id) => self.push_decl(id, name, false, None), + MacroId::ProcMacroId(id) => self.push_decl(id, name, false, None), + }; } } } @@ -295,7 +301,7 @@ impl<'a> SymbolCollector<'a> { let body = self.db.body(body_id); // Descend into the blocks and enqueue collection of all modules within. - for (_, def_map) in body.blocks(self.db.upcast()) { + for (_, def_map) in body.blocks(self.db) { for (id, _) in def_map.modules() { self.work.push(SymbolCollectorWork { module_id: def_map.module_id(id), @@ -306,24 +312,27 @@ impl<'a> SymbolCollector<'a> { } fn collect_from_impl(&mut self, impl_id: ImplId) { - let impl_data = self.db.impl_data(impl_id); + let impl_data = self.db.impl_signature(impl_id); let impl_name = Some( - hir_display_with_types_map(impl_data.self_ty, &impl_data.types_map) - .display(self.db, self.display_target) + hir_display_with_store(impl_data.self_ty, &impl_data.store) + .display( + self.db, + crate::Impl::from(impl_id).krate(self.db).to_display_target(self.db), + ) .to_smolstr(), ); self.with_container_name(impl_name, |s| { - for &(ref name, assoc_item_id) in &impl_data.items { - s.push_assoc_item(assoc_item_id, name) + for &(ref name, assoc_item_id) in &self.db.impl_items(impl_id).items { + s.push_assoc_item(assoc_item_id, name, None) } }) } - fn collect_from_trait(&mut self, trait_id: TraitId) { - let trait_data = self.db.trait_data(trait_id); + fn collect_from_trait(&mut self, trait_id: TraitId, trait_do_not_complete: Complete) { + let trait_data = self.db.trait_signature(trait_id); self.with_container_name(Some(trait_data.name.as_str().into()), |s| { - for &(ref name, assoc_item_id) in &trait_data.items { - s.push_assoc_item(assoc_item_id, name); + for &(ref name, assoc_item_id) in &self.db.trait_items(trait_id).items { + s.push_assoc_item(assoc_item_id, name, Some(trait_do_not_complete)); } }); } @@ -338,23 +347,34 @@ impl<'a> SymbolCollector<'a> { } } - fn push_assoc_item(&mut self, assoc_item_id: AssocItemId, name: &Name) { + fn push_assoc_item( + &mut self, + assoc_item_id: AssocItemId, + name: &Name, + trait_do_not_complete: Option, + ) { match assoc_item_id { - AssocItemId::FunctionId(id) => self.push_decl(id, name, true), - AssocItemId::ConstId(id) => self.push_decl(id, name, true), - AssocItemId::TypeAliasId(id) => self.push_decl(id, name, true), - } + AssocItemId::FunctionId(id) => self.push_decl(id, name, true, trait_do_not_complete), + AssocItemId::ConstId(id) => self.push_decl(id, name, true, trait_do_not_complete), + AssocItemId::TypeAliasId(id) => self.push_decl(id, name, true, trait_do_not_complete), + }; } - fn push_decl<'db, L>(&mut self, id: L, name: &Name, is_assoc: bool) + fn push_decl( + &mut self, + id: L, + name: &Name, + is_assoc: bool, + trait_do_not_complete: Option, + ) -> Complete where - L: Lookup = dyn DefDatabase + 'db> + Into, + L: Lookup + Into, ::Data: HasSource, <::Data as HasSource>::Value: HasName, { - let loc = id.lookup(self.db.upcast()); - let source = loc.source(self.db.upcast()); - let Some(name_node) = source.value.name() else { return }; + let loc = id.lookup(self.db); + let source = loc.source(self.db); + let Some(name_node) = source.value.name() else { return Complete::Yes }; let def = ModuleDef::from(id.into()); let dec_loc = DeclarationLocation { hir_file_id: source.file_id, @@ -362,7 +382,14 @@ impl<'a> SymbolCollector<'a> { name_ptr: AstPtr::new(&name_node).wrap_left(), }; + let mut do_not_complete = Complete::Yes; + if let Some(attrs) = def.attrs(self.db) { + do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs); + if let Some(trait_do_not_complete) = trait_do_not_complete { + do_not_complete = Complete::for_trait_item(trait_do_not_complete, do_not_complete); + } + for alias in attrs.doc_aliases() { self.symbols.insert(FileSymbol { name: alias.clone(), @@ -371,6 +398,7 @@ impl<'a> SymbolCollector<'a> { container_name: self.current_container_name.clone(), is_alias: true, is_assoc, + do_not_complete, }); } } @@ -382,14 +410,17 @@ impl<'a> SymbolCollector<'a> { loc: dec_loc, is_alias: false, is_assoc, + do_not_complete, }); + + do_not_complete } fn push_module(&mut self, module_id: ModuleId, name: &Name) { - let def_map = module_id.def_map(self.db.upcast()); + let def_map = module_id.def_map(self.db); let module_data = &def_map[module_id.local_id]; let Some(declaration) = module_data.origin.declaration() else { return }; - let module = declaration.to_node(self.db.upcast()); + let module = declaration.to_node(self.db); let Some(name_node) = module.name() else { return }; let dec_loc = DeclarationLocation { hir_file_id: declaration.file_id, @@ -399,7 +430,10 @@ impl<'a> SymbolCollector<'a> { let def = ModuleDef::Module(module_id.into()); + let mut do_not_complete = Complete::Yes; if let Some(attrs) = def.attrs(self.db) { + do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs); + for alias in attrs.doc_aliases() { self.symbols.insert(FileSymbol { name: alias.clone(), @@ -408,6 +442,7 @@ impl<'a> SymbolCollector<'a> { container_name: self.current_container_name.clone(), is_alias: true, is_assoc: false, + do_not_complete, }); } } @@ -419,6 +454,7 @@ impl<'a> SymbolCollector<'a> { loc: dec_loc, is_alias: false, is_assoc: false, + do_not_complete, }); } } diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs index 0d672dc332f39..78ee3b5aa683a 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs @@ -22,7 +22,7 @@ fn mod_item_path( ) -> Option { let db = sema_scope.db; let m = sema_scope.module(); - m.find_path(db.upcast(), *def, cfg) + m.find_path(db, *def, cfg) } /// Helper function to get path to `ModuleDef` as string @@ -33,7 +33,7 @@ fn mod_item_path_str( edition: Edition, ) -> Result { let path = mod_item_path(sema_scope, def, cfg); - path.map(|it| it.display(sema_scope.db.upcast(), edition).to_string()) + path.map(|it| it.display(sema_scope.db, edition).to_string()) .ok_or(DisplaySourceCodeError::PathNotFound) } @@ -111,15 +111,15 @@ impl Expr { container_name(container, sema_scope, cfg, edition, display_target)?; let const_name = it .name(db) - .map(|c| c.display(db.upcast(), edition).to_string()) + .map(|c| c.display(db, edition).to_string()) .unwrap_or(String::new()); Ok(format!("{container_name}::{const_name}")) } None => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)), }, Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)), - Expr::Local(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()), - Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()), + Expr::Local(it) => Ok(it.name(db).display(db, edition).to_string()), + Expr::ConstParam(it) => Ok(it.name(db).display(db, edition).to_string()), Expr::FamousType { value, .. } => Ok(value.to_string()), Expr::Function { func, params, .. } => { let args = params @@ -133,7 +133,7 @@ impl Expr { Some(container) => { let container_name = container_name(container, sema_scope, cfg, edition, display_target)?; - let fn_name = func.name(db).display(db.upcast(), edition).to_string(); + let fn_name = func.name(db).display(db, edition).to_string(); Ok(format!("{container_name}::{fn_name}({args})")) } None => { @@ -147,7 +147,7 @@ impl Expr { return Ok(many_formatter(&target.ty(db))); } - let func_name = func.name(db).display(db.upcast(), edition).to_string(); + let func_name = func.name(db).display(db, edition).to_string(); let self_param = func.self_param(db).unwrap(); let target_str = target.gen_source_code(sema_scope, many_formatter, cfg, display_target)?; @@ -199,7 +199,7 @@ impl Expr { .map(|(a, f)| { let tmp = format!( "{}: {}", - f.name(db).display(db.upcast(), edition), + f.name(db).display(db, edition), a.gen_source_code( sema_scope, many_formatter, @@ -241,7 +241,7 @@ impl Expr { .map(|(a, f)| { let tmp = format!( "{}: {}", - f.name(db).display(db.upcast(), edition), + f.name(db).display(db, edition), a.gen_source_code( sema_scope, many_formatter, @@ -279,7 +279,7 @@ impl Expr { let strukt = expr.gen_source_code(sema_scope, many_formatter, cfg, display_target)?; - let field = field.name(db).display(db.upcast(), edition).to_string(); + let field = field.name(db).display(db, edition).to_string(); Ok(format!("{strukt}.{field}")) } Expr::Reference(expr) => { @@ -387,7 +387,7 @@ fn container_name( let self_ty = imp.self_ty(sema_scope.db); // Should it be guaranteed that `mod_item_path` always exists? match self_ty.as_adt().and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg)) { - Some(path) => path.display(sema_scope.db.upcast(), edition).to_string(), + Some(path) => path.display(sema_scope.db, edition).to_string(), None => self_ty.display(sema_scope.db, display_target).to_string(), } } diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs index 847304d503a84..bcff44fcd016e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search/tactics.rs @@ -10,9 +10,9 @@ use std::iter; +use hir_ty::TyBuilder; use hir_ty::db::HirDatabase; use hir_ty::mir::BorrowKind; -use hir_ty::TyBuilder; use itertools::Itertools; use rustc_hash::FxHashSet; use span::Edition; diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml index 3768c2257cadd..53af980c194c5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true [lib] [dependencies] -cov-mark = "2.0.0-pre.1" +cov-mark = "2.0.0" itertools.workspace = true either.workspace = true @@ -26,7 +26,7 @@ ide-db.workspace = true hir.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.5.1" # local deps test-utils.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs index 05105c8c92c5e..fb569f8cdae00 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs @@ -5,7 +5,7 @@ //! assists if we are allowed to. use hir::ImportPathConfig; -use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap}; +use ide_db::{SnippetCap, assists::ExprFillDefaultMode, imports::insert_use::InsertUseConfig}; use crate::AssistKind; @@ -21,6 +21,7 @@ pub struct AssistConfig { pub term_search_fuel: u64, pub term_search_borrowck: bool, pub code_action_grouping: bool, + pub expr_fill_default: ExprFillDefaultMode, } impl AssistConfig { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs index b1189f0d0b06e..9eb9452a2b836 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs @@ -1,17 +1,16 @@ //! See [`AssistContext`]. -use hir::{FileRange, Semantics}; -use ide_db::EditionedFileId; -use ide_db::{label::Label, FileId, RootDatabase}; +use hir::{EditionedFileId, FileRange, Semantics}; +use ide_db::{FileId, RootDatabase, label::Label}; use syntax::Edition; use syntax::{ - algo::{self, find_node_at_offset, find_node_at_range}, AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize, TokenAtOffset, + algo::{self, find_node_at_offset, find_node_at_range}, }; use crate::{ - assist_config::AssistConfig, Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel, + Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel, assist_config::AssistConfig, }; pub(crate) use ide_db::source_change::{SourceChangeBuilder, TreeMutator}; @@ -105,12 +104,16 @@ impl<'a> AssistContext<'a> { self.frange.range.start() } + pub(crate) fn vfs_file_id(&self) -> FileId { + self.frange.file_id.file_id(self.db()) + } + pub(crate) fn file_id(&self) -> EditionedFileId { self.frange.file_id } pub(crate) fn edition(&self) -> Edition { - self.frange.file_id.edition() + self.frange.file_id.edition(self.db()) } pub(crate) fn has_empty_selection(&self) -> bool { @@ -165,7 +168,7 @@ impl Assists { pub(crate) fn new(ctx: &AssistContext<'_>, resolve: AssistResolveStrategy) -> Assists { Assists { resolve, - file: ctx.frange.file_id.file_id(), + file: ctx.frange.file_id.file_id(ctx.db()), buf: Vec::new(), allowed: ctx.config.allowed.clone(), } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs index 42f615e71daf6..745ae67f30959 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs @@ -1,13 +1,13 @@ use syntax::{ - ast::{self, edit_in_place::Indent, syntax_factory::SyntaxFactory}, AstNode, + ast::{self, edit_in_place::Indent, syntax_factory::SyntaxFactory}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: add_braces // -// Adds braces to lambda and match arm expressions. +// Adds braces to closure bodies and match arm expressions. // // ``` // fn foo(n: i32) -> i32 { @@ -32,14 +32,14 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( let (expr_type, expr) = get_replacement_node(ctx)?; acc.add( - AssistId("add_braces", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("add_braces"), match expr_type { - ParentType::ClosureExpr => "Add braces to closure body", - ParentType::MatchArmExpr => "Add braces to arm expression", + ParentType::ClosureExpr => "Add braces to this closure body", + ParentType::MatchArmExpr => "Add braces to this match arm expression", }, expr.syntax().text_range(), |builder| { - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let mut editor = builder.make_editor(expr.syntax()); let block_expr = make.block_expr(None, Some(expr.clone())); @@ -48,7 +48,7 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( editor.replace(expr.syntax(), block_expr.syntax()); editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs index 1a5de9cb071bb..10b0879e6364d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs @@ -1,10 +1,6 @@ use hir::Semantics; -use ide_db::{ - assists::{AssistId, AssistKind}, - source_change::SourceChangeBuilder, - RootDatabase, -}; -use syntax::{ast, AstNode}; +use ide_db::{RootDatabase, assists::AssistId, source_change::SourceChangeBuilder}; +use syntax::{AstNode, ast}; use crate::{AssistContext, Assists}; @@ -53,7 +49,7 @@ pub(crate) fn add_explicit_enum_discriminant( } acc.add( - AssistId("add_explicit_enum_discriminant", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("add_explicit_enum_discriminant"), "Add explicit enum discriminants", enum_node.syntax().text_range(), |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs index 8bc285614e039..35a65cc309111 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_type.rs @@ -2,7 +2,7 @@ use hir::HirDisplay; use ide_db::syntax_helpers::node_ext::walk_ty; use syntax::ast::{self, AstNode, LetStmt, Param}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: add_explicit_type // @@ -71,7 +71,7 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> O let inferred_type = ty.display_source_code(ctx.db(), module.into(), false).ok()?; acc.add( - AssistId("add_explicit_type", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("add_explicit_type"), format!("Insert explicit type `{inferred_type}`"), pat_range, |builder| match ascribed_ty { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs index 001f1e8bb1585..d2b903447133f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_label_to_loop.rs @@ -1,10 +1,10 @@ use ide_db::syntax_helpers::node_ext::for_each_break_and_continue_expr; use syntax::{ - ast::{self, AstNode, HasLoopBody}, T, + ast::{self, AstNode, HasLoopBody}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: add_label_to_loop // @@ -35,7 +35,7 @@ pub(crate) fn add_label_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>) -> O } acc.add( - AssistId("add_label_to_loop", AssistKind::Generate), + AssistId::generate("add_label_to_loop"), "Add Label", loop_expr.syntax().text_range(), |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs index 43c0a72fa4774..dcdc7ea9cdced 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_lifetime_to_type.rs @@ -1,6 +1,6 @@ use syntax::ast::{self, AstNode, HasGenericParams, HasName}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: add_lifetime_to_type // @@ -37,31 +37,26 @@ pub(crate) fn add_lifetime_to_type(acc: &mut Assists, ctx: &AssistContext<'_>) - let ref_types = fetch_borrowed_types(&node)?; let target = node.syntax().text_range(); - acc.add( - AssistId("add_lifetime_to_type", AssistKind::Generate), - "Add lifetime", - target, - |builder| { - match node.generic_param_list() { - Some(gen_param) => { - if let Some(left_angle) = gen_param.l_angle_token() { - builder.insert(left_angle.text_range().end(), "'a, "); - } + acc.add(AssistId::generate("add_lifetime_to_type"), "Add lifetime", target, |builder| { + match node.generic_param_list() { + Some(gen_param) => { + if let Some(left_angle) = gen_param.l_angle_token() { + builder.insert(left_angle.text_range().end(), "'a, "); } - None => { - if let Some(name) = node.name() { - builder.insert(name.syntax().text_range().end(), "<'a>"); - } + } + None => { + if let Some(name) = node.name() { + builder.insert(name.syntax().text_range().end(), "<'a>"); } } + } - for ref_type in ref_types { - if let Some(amp_token) = ref_type.amp_token() { - builder.insert(amp_token.text_range().end(), "'a "); - } + for ref_type in ref_types { + if let Some(amp_token) = ref_type.amp_token() { + builder.insert(amp_token.text_range().end(), "'a "); } - }, - ) + } + }) } fn fetch_borrowed_types(node: &ast::Adt) -> Option> { @@ -99,11 +94,7 @@ fn fetch_borrowed_types(node: &ast::Adt) -> Option> { } }; - if ref_types.is_empty() { - None - } else { - Some(ref_types) - } + if ref_types.is_empty() { None } else { Some(ref_types) } } fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option> { @@ -134,11 +125,7 @@ fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option usize { {40 + 2} } + $0fn get_n(&self) -> usize { N } - fn get_m(&self) -> usize { {m::VAL + 1} } + fn get_m(&self) -> usize { M } }"#, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 37f5f44dfa020..858d4369914a6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -1,17 +1,18 @@ use std::iter::{self, Peekable}; use either::Either; -use hir::{sym, Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics}; -use ide_db::syntax_helpers::suggest_name; +use hir::{Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym}; use ide_db::RootDatabase; +use ide_db::assists::ExprFillDefaultMode; +use ide_db::syntax_helpers::suggest_name; use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast}; use itertools::Itertools; use syntax::ast::edit::IndentLevel; use syntax::ast::edit_in_place::Indent; use syntax::ast::syntax_factory::SyntaxFactory; -use syntax::ast::{self, make, AstNode, MatchArmList, MatchExpr, Pat}; +use syntax::ast::{self, AstNode, MatchArmList, MatchExpr, Pat, make}; -use crate::{utils, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, utils}; // Assist: add_missing_match_arms // @@ -76,7 +77,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) let cfg = ctx.config.import_path_config(); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let module = ctx.sema.scope(expr.syntax())?.module(); let (mut missing_pats, is_non_exhaustive, has_hidden_variants): ( @@ -204,7 +205,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) } acc.add( - AssistId("add_missing_match_arms", AssistKind::QuickFix), + AssistId::quick_fix("add_missing_match_arms"), "Fill match arms", ctx.sema.original_range(match_expr.syntax()).range, |builder| { @@ -216,7 +217,17 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) // filter out hidden patterns because they're handled by the catch-all arm !hidden }) - .map(|(pat, _)| make.match_arm(pat, None, make::ext::expr_todo())); + .map(|(pat, _)| { + make.match_arm( + pat, + None, + match ctx.config.expr_fill_default { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => make::ext::expr_todo(), + }, + ) + }); let mut arms: Vec<_> = match_arm_list .arms() @@ -246,7 +257,15 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) if needs_catch_all_arm && !has_catch_all_arm { cov_mark::hit!(added_wildcard_pattern); - let arm = make.match_arm(make.wildcard_pat().into(), None, make::ext::expr_todo()); + let arm = make.match_arm( + make.wildcard_pat().into(), + None, + match ctx.config.expr_fill_default { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => make::ext::expr_todo(), + }, + ); arms.push(arm); } @@ -294,7 +313,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) } editor.add_mappings(make.take()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } @@ -386,7 +405,7 @@ impl ExtendedEnum { fn is_non_exhaustive(self, db: &RootDatabase, krate: Crate) -> bool { match self { ExtendedEnum::Enum(e) => { - e.attrs(db).by_key(&sym::non_exhaustive).exists() && e.module(db).krate() != krate + e.attrs(db).by_key(sym::non_exhaustive).exists() && e.module(db).krate() != krate } _ => false, } @@ -461,7 +480,7 @@ fn build_pat( let fields = var.fields(db); let pat: ast::Pat = match var.kind(db) { hir::StructKind::Tuple => { - let mut name_generator = suggest_name::NameGenerator::new(); + let mut name_generator = suggest_name::NameGenerator::default(); let pats = fields.into_iter().map(|f| { let name = name_generator.for_type(&f.ty(db), db, edition); match name { @@ -474,8 +493,8 @@ fn build_pat( hir::StructKind::Record => { let fields = fields .into_iter() - .map(|f| make.name_ref(f.name(db).as_str())) - .map(|name_ref| make.record_pat_field_shorthand(name_ref)); + .map(|f| make.ident_pat(false, false, make.name(f.name(db).as_str()))) + .map(|ident| make.record_pat_field_shorthand(ident.into())); let fields = make.record_pat_field_list(fields, None); make.record_pat_with_fields(path, fields).into() } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs index e5f0201bd527e..a7104ce068da8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs @@ -1,7 +1,7 @@ use hir::HirDisplay; -use syntax::{ast, match_ast, AstNode, SyntaxKind, SyntaxToken, TextRange, TextSize}; +use syntax::{AstNode, SyntaxKind, SyntaxToken, TextRange, TextSize, ast, match_ast}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: add_return_type // @@ -25,7 +25,7 @@ pub(crate) fn add_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt let ty = ty.display_source_code(ctx.db(), module.into(), true).ok()?; acc.add( - AssistId("add_return_type", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("add_return_type"), match fn_type { FnType::Function => "Add this function's return type", FnType::Closure { .. } => "Add this closure's return type", diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs index 04d63f5bc8fe6..be13b04873c8e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs @@ -1,14 +1,14 @@ use either::Either; use ide_db::defs::{Definition, NameRefClass}; use syntax::{ - ast::{self, make, syntax_factory::SyntaxFactory, HasArgList, HasGenericArgs}, - syntax_editor::Position, AstNode, + ast::{self, HasArgList, HasGenericArgs, make, syntax_factory::SyntaxFactory}, + syntax_editor::Position, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; // Assist: add_turbo_fish @@ -71,7 +71,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let def = match NameRefClass::classify(&ctx.sema, &name_ref)? { NameRefClass::Definition(def, _) => def, NameRefClass::FieldShorthand { .. } | NameRefClass::ExternCrateShorthand { .. } => { - return None + return None; } }; let fun = match def { @@ -89,7 +89,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let_stmt.pat()?; acc.add( - AssistId("add_type_ascription", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("add_type_ascription"), "Add `: _` before assignment operator", ident.text_range(), |builder| { @@ -119,7 +119,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti } } - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, )? } else { @@ -135,13 +135,13 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti .count(); acc.add( - AssistId("add_turbo_fish", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("add_turbo_fish"), "Add `::<>`", ident.text_range(), |builder| { builder.trigger_parameter_hints(); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let mut editor = match &turbofish_target { Either::Left(it) => builder.make_editor(it.syntax()), Either::Right(it) => builder.make_editor(it.syntax()), @@ -181,7 +181,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti } editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs index 67bf8eed23df1..3b447d1f6d572 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs @@ -6,19 +6,18 @@ use ide_db::{ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr}, }; use syntax::{ + SyntaxKind, T, ast::{ - self, - prec::{precedence, ExprPrecedence}, - syntax_factory::SyntaxFactory, - AstNode, + self, AstNode, Expr::BinExpr, HasArgList, + prec::{ExprPrecedence, precedence}, + syntax_factory::SyntaxFactory, }, syntax_editor::{Position, SyntaxEditor}, - SyntaxKind, T, }; -use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, utils::invert_boolean_expression}; // Assist: apply_demorgan // @@ -65,7 +64,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti _ => return None, }; - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let demorganed = bin_expr.clone_subtree(); let mut editor = SyntaxEditor::new(demorganed.syntax().clone()); @@ -108,11 +107,11 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti acc.add_group( &GroupLabel("Apply De Morgan's law".to_owned()), - AssistId("apply_demorgan", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("apply_demorgan"), "Apply De Morgan's law", op_range, |builder| { - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let paren_expr = bin_expr.syntax().parent().and_then(ast::ParenExpr::cast); let neg_expr = paren_expr .clone() @@ -148,7 +147,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti } editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } @@ -191,11 +190,11 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_> let label = format!("Apply De Morgan's law to `Iterator::{}`", name.text().as_str()); acc.add_group( &GroupLabel("Apply De Morgan's law".to_owned()), - AssistId("apply_demorgan_iterator", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("apply_demorgan_iterator"), label, op_range, |builder| { - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let mut editor = builder.make_editor(method_call.syntax()); // replace the method name let new_name = match name.text().as_str() { @@ -231,7 +230,7 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_> } editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs index a92a000c3fbd3..d310e11011be1 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs @@ -1,16 +1,18 @@ use std::cmp::Reverse; -use hir::{db::HirDatabase, Module}; +use either::Either; +use hir::{Module, Type, db::HirDatabase}; use ide_db::{ + active_parameter::ActiveParameter, helpers::mod_path_to_ast, imports::{ import_assets::{ImportAssets, ImportCandidate, LocatedImport}, - insert_use::{insert_use, insert_use_as_alias, ImportScope}, + insert_use::{ImportScope, insert_use, insert_use_as_alias}, }, }; -use syntax::{ast, AstNode, Edition, NodeOrToken, SyntaxElement}; +use syntax::{AstNode, Edition, SyntaxNode, ast, match_ast}; -use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; +use crate::{AssistContext, AssistId, Assists, GroupLabel}; // Feature: Auto Import // @@ -92,7 +94,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let cfg = ctx.config.import_path_config(); - let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; + let (import_assets, syntax_under_caret, expected) = find_importable_node(ctx)?; let mut proposed_imports: Vec<_> = import_assets .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind) .collect(); @@ -100,17 +102,8 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< return None; } - let range = match &syntax_under_caret { - NodeOrToken::Node(node) => ctx.sema.original_range(node).range, - NodeOrToken::Token(token) => token.text_range(), - }; - let scope = ImportScope::find_insert_use_container( - &match syntax_under_caret { - NodeOrToken::Node(it) => it, - NodeOrToken::Token(it) => it.parent()?, - }, - &ctx.sema, - )?; + let range = ctx.sema.original_range(&syntax_under_caret).range; + let scope = ImportScope::find_insert_use_container(&syntax_under_caret, &ctx.sema)?; // we aren't interested in different namespaces proposed_imports.sort_by(|a, b| a.import_path.cmp(&b.import_path)); @@ -118,8 +111,9 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< let current_module = ctx.sema.scope(scope.as_syntax_node()).map(|scope| scope.module()); // prioritize more relevant imports - proposed_imports - .sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref()))); + proposed_imports.sort_by_key(|import| { + Reverse(relevance_score(ctx, import, expected.as_ref(), current_module.as_ref())) + }); let edition = current_module.map(|it| it.krate().edition(ctx.db())).unwrap_or(Edition::CURRENT); let group_label = group_label(import_assets.import_candidate()); @@ -127,7 +121,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< let import_path = import.import_path; let (assist_id, import_name) = - (AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db(), edition)); + (AssistId::quick_fix("auto_import"), import_path.display(ctx.db(), edition)); acc.add_group( &group_label, assist_id, @@ -180,22 +174,61 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< pub(super) fn find_importable_node( ctx: &AssistContext<'_>, -) -> Option<(ImportAssets, SyntaxElement)> { +) -> Option<(ImportAssets, SyntaxNode, Option)> { + // Deduplicate this with the `expected_type_and_name` logic for completions + let expected = |expr_or_pat: Either| match expr_or_pat { + Either::Left(expr) => { + let parent = expr.syntax().parent()?; + // FIXME: Expand this + match_ast! { + match parent { + ast::ArgList(list) => { + ActiveParameter::at_arg( + &ctx.sema, + list, + expr.syntax().text_range().start(), + ).map(|ap| ap.ty) + }, + ast::LetStmt(stmt) => { + ctx.sema.type_of_pat(&stmt.pat()?).map(|t| t.original) + }, + _ => None, + } + } + } + Either::Right(pat) => { + let parent = pat.syntax().parent()?; + // FIXME: Expand this + match_ast! { + match parent { + ast::LetStmt(stmt) => { + ctx.sema.type_of_expr(&stmt.initializer()?).map(|t| t.original) + }, + _ => None, + } + } + } + }; + if let Some(path_under_caret) = ctx.find_node_at_offset_with_descend::() { + let expected = + path_under_caret.top_path().syntax().parent().and_then(Either::cast).and_then(expected); ImportAssets::for_exact_path(&path_under_caret, &ctx.sema) - .zip(Some(path_under_caret.syntax().clone().into())) + .map(|it| (it, path_under_caret.syntax().clone(), expected)) } else if let Some(method_under_caret) = ctx.find_node_at_offset_with_descend::() { + let expected = expected(Either::Left(method_under_caret.clone().into())); ImportAssets::for_method_call(&method_under_caret, &ctx.sema) - .zip(Some(method_under_caret.syntax().clone().into())) + .map(|it| (it, method_under_caret.syntax().clone(), expected)) } else if ctx.find_node_at_offset_with_descend::().is_some() { None } else if let Some(pat) = ctx .find_node_at_offset_with_descend::() .filter(ast::IdentPat::is_simple_ident) { - ImportAssets::for_ident_pat(&ctx.sema, &pat).zip(Some(pat.syntax().clone().into())) + let expected = expected(Either::Right(pat.clone().into())); + ImportAssets::for_ident_pat(&ctx.sema, &pat).map(|it| (it, pat.syntax().clone(), expected)) } else { None } @@ -219,6 +252,7 @@ fn group_label(import_candidate: &ImportCandidate) -> GroupLabel { pub(crate) fn relevance_score( ctx: &AssistContext<'_>, import: &LocatedImport, + expected: Option<&Type>, current_module: Option<&Module>, ) -> i32 { let mut score = 0; @@ -230,6 +264,35 @@ pub(crate) fn relevance_score( hir::ItemInNs::Macros(makro) => Some(makro.module(db)), }; + if let Some(expected) = expected { + let ty = match import.item_to_import { + hir::ItemInNs::Types(module_def) | hir::ItemInNs::Values(module_def) => { + match module_def { + hir::ModuleDef::Function(function) => Some(function.ret_type(ctx.db())), + hir::ModuleDef::Adt(adt) => Some(match adt { + hir::Adt::Struct(it) => it.ty(ctx.db()), + hir::Adt::Union(it) => it.ty(ctx.db()), + hir::Adt::Enum(it) => it.ty(ctx.db()), + }), + hir::ModuleDef::Variant(variant) => Some(variant.constructor_ty(ctx.db())), + hir::ModuleDef::Const(it) => Some(it.ty(ctx.db())), + hir::ModuleDef::Static(it) => Some(it.ty(ctx.db())), + hir::ModuleDef::TypeAlias(it) => Some(it.ty(ctx.db())), + hir::ModuleDef::BuiltinType(it) => Some(it.ty(ctx.db())), + _ => None, + } + } + hir::ItemInNs::Macros(_) => None, + }; + if let Some(ty) = ty { + if ty == *expected { + score = 100000; + } else if ty.could_unify_with(ctx.db(), expected) { + score = 10000; + } + } + } + match item_module.zip(current_module) { // get the distance between the imported path and the current module // (prefer items that are more local) @@ -279,12 +342,12 @@ mod tests { use super::*; use hir::{FileRange, Semantics}; - use ide_db::{assists::AssistResolveStrategy, RootDatabase}; + use ide_db::{RootDatabase, assists::AssistResolveStrategy}; use test_fixture::WithFixture; use crate::tests::{ - check_assist, check_assist_by_label, check_assist_not_applicable, check_assist_target, - TEST_CONFIG, + TEST_CONFIG, check_assist, check_assist_by_label, check_assist_not_applicable, + check_assist_target, }; fn check_auto_import_order(before: &str, order: &[&str]) { @@ -554,7 +617,7 @@ mod baz { } ", r" - use PubMod3::PubStruct; + use PubMod1::PubStruct; PubStruct @@ -1722,4 +1785,96 @@ mod foo { ", ); } + + #[test] + fn prefers_type_match() { + check_assist( + auto_import, + r" +mod sync { pub mod atomic { pub enum Ordering { V } } } +mod cmp { pub enum Ordering { V } } +fn takes_ordering(_: sync::atomic::Ordering) {} +fn main() { + takes_ordering(Ordering$0); +} +", + r" +use sync::atomic::Ordering; + +mod sync { pub mod atomic { pub enum Ordering { V } } } +mod cmp { pub enum Ordering { V } } +fn takes_ordering(_: sync::atomic::Ordering) {} +fn main() { + takes_ordering(Ordering); +} +", + ); + check_assist( + auto_import, + r" +mod sync { pub mod atomic { pub enum Ordering { V } } } +mod cmp { pub enum Ordering { V } } +fn takes_ordering(_: cmp::Ordering) {} +fn main() { + takes_ordering(Ordering$0); +} +", + r" +use cmp::Ordering; + +mod sync { pub mod atomic { pub enum Ordering { V } } } +mod cmp { pub enum Ordering { V } } +fn takes_ordering(_: cmp::Ordering) {} +fn main() { + takes_ordering(Ordering); +} +", + ); + } + + #[test] + fn prefers_type_match2() { + check_assist( + auto_import, + r" +mod sync { pub mod atomic { pub enum Ordering { V } } } +mod cmp { pub enum Ordering { V } } +fn takes_ordering(_: sync::atomic::Ordering) {} +fn main() { + takes_ordering(Ordering$0::V); +} +", + r" +use sync::atomic::Ordering; + +mod sync { pub mod atomic { pub enum Ordering { V } } } +mod cmp { pub enum Ordering { V } } +fn takes_ordering(_: sync::atomic::Ordering) {} +fn main() { + takes_ordering(Ordering::V); +} +", + ); + check_assist( + auto_import, + r" +mod sync { pub mod atomic { pub enum Ordering { V } } } +mod cmp { pub enum Ordering { V } } +fn takes_ordering(_: cmp::Ordering) {} +fn main() { + takes_ordering(Ordering$0::V); +} +", + r" +use cmp::Ordering; + +mod sync { pub mod atomic { pub enum Ordering { V } } } +mod cmp { pub enum Ordering { V } } +fn takes_ordering(_: cmp::Ordering) {} +fn main() { + takes_ordering(Ordering::V); +} +", + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs index 8f053f4df949f..00c7d25b257b2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs @@ -1,12 +1,8 @@ use crate::assist_context::{AssistContext, Assists}; -use ide_db::{ - assists::{AssistId, AssistKind}, - defs::Definition, - LineIndexDatabase, -}; +use ide_db::{LineIndexDatabase, assists::AssistId, defs::Definition}; use syntax::{ - ast::{self, edit_in_place::Indent}, AstNode, + ast::{self, edit_in_place::Indent}, }; // Assist: bind_unused_param @@ -42,11 +38,11 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O let r_curly_range = stmt_list.r_curly_token()?.text_range(); acc.add( - AssistId("bind_unused_param", AssistKind::QuickFix), + AssistId::quick_fix("bind_unused_param"), format!("Bind as `let _ = {ident_pat};`"), param.syntax().text_range(), |builder| { - let line_index = ctx.db().line_index(ctx.file_id().into()); + let line_index = ctx.db().line_index(ctx.vfs_file_id()); let indent = func.indent_level(); let text_indent = indent + 1; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs index 07fd5e34181ef..9b9f0c4522ed2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs @@ -1,14 +1,14 @@ use syntax::{ - ast::{self, HasName, HasVisibility}, AstNode, SyntaxKind::{ self, ASSOC_ITEM_LIST, CONST, ENUM, FN, MACRO_DEF, MODULE, SOURCE_FILE, STATIC, STRUCT, TRAIT, TYPE_ALIAS, USE, VISIBILITY, }, SyntaxNode, T, + ast::{self, HasName, HasVisibility}, }; -use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, utils::vis_offset}; // Assist: change_visibility // @@ -76,7 +76,7 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { }; acc.add( - AssistId("change_visibility", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("change_visibility"), "Change visibility to pub(crate)", target, |edit| { @@ -112,7 +112,7 @@ fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> { if vis.syntax().text() == "pub" { let target = vis.syntax().text_range(); return acc.add( - AssistId("change_visibility", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("change_visibility"), "Change Visibility to pub(crate)", target, |edit| { @@ -123,7 +123,7 @@ fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> { if vis.syntax().text() == "pub(crate)" { let target = vis.syntax().text_range(); return acc.add( - AssistId("change_visibility", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("change_visibility"), "Change visibility to pub", target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs index 151c71c0a767e..bcd06c1ef725d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs @@ -1,21 +1,21 @@ -use hir::{sym, AsAssocItem, Semantics}; +use hir::{AsAssocItem, Semantics, sym}; use ide_db::{ + RootDatabase, famous_defs::FamousDefs, syntax_helpers::node_ext::{ block_as_lone_tail, for_each_tail_expr, is_pattern_cond, preorder_expr, }, - RootDatabase, }; use itertools::Itertools; use syntax::{ - ast::{self, edit::AstNodeEdit, syntax_factory::SyntaxFactory, HasArgList}, - syntax_editor::SyntaxEditor, AstNode, SyntaxNode, + ast::{self, HasArgList, edit::AstNodeEdit, syntax_factory::SyntaxFactory}, + syntax_editor::SyntaxEditor, }; use crate::{ + AssistContext, AssistId, Assists, utils::{invert_boolean_expression, unwrap_trivial_block}, - AssistContext, AssistId, AssistKind, Assists, }; // Assist: convert_if_to_bool_then @@ -73,7 +73,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_> let target = expr.syntax().text_range(); acc.add( - AssistId("convert_if_to_bool_then", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_if_to_bool_then"), "Convert `if` expression to `bool::then` call", target, |builder| { @@ -98,7 +98,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_> let closure_body = ast::Expr::cast(edit.new_root().clone()).unwrap(); let mut editor = builder.make_editor(expr.syntax()); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let closure_body = match closure_body { ast::Expr::BlockExpr(block) => unwrap_trivial_block(block), e => e, @@ -135,7 +135,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_> editor.replace(expr.syntax(), mcall.syntax()); editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } @@ -181,7 +181,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_> let target = mcall.syntax().text_range(); acc.add( - AssistId("convert_bool_then_to_if", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_bool_then_to_if"), "Convert `bool::then` call to `if`", target, |builder| { @@ -196,7 +196,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_> // Wrap all tails in `Some(...)` let none_path = mapless_make.expr_path(mapless_make.ident_path("None")); let some_path = mapless_make.expr_path(mapless_make.ident_path("Some")); - for_each_tail_expr(&ast::Expr::BlockExpr(closure_body.clone()), &mut |e| { + for_each_tail_expr(&ast::Expr::BlockExpr(closure_body), &mut |e| { let e = match e { ast::Expr::BreakExpr(e) => e.expr(), ast::Expr::ReturnExpr(e) => e.expr(), @@ -216,7 +216,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_> let closure_body = ast::BlockExpr::cast(edit.new_root().clone()).unwrap(); let mut editor = builder.make_editor(mcall.syntax()); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let cond = match &receiver { ast::Expr::ParenExpr(expr) => expr.expr().unwrap_or(receiver), @@ -233,7 +233,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_> editor.replace(mcall.syntax().clone(), if_expr.syntax().clone()); editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } @@ -245,7 +245,7 @@ fn option_variants( let fam = FamousDefs(sema, sema.scope(expr)?.krate()); let option_variants = fam.core_option_Option()?.variants(sema.db); match &*option_variants { - &[variant0, variant1] => Some(if variant0.name(sema.db) == sym::None.clone() { + &[variant0, variant1] => Some(if variant0.name(sema.db) == sym::None { (variant0, variant1) } else { (variant1, variant0) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs index 7716e99e604b3..00e9fdf124d16 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs @@ -2,23 +2,23 @@ use either::Either; use hir::ModuleDef; use ide_db::text_edit::TextRange; use ide_db::{ - assists::{AssistId, AssistKind}, + FxHashSet, + assists::AssistId, defs::Definition, helpers::mod_path_to_ast, - imports::insert_use::{insert_use, ImportScope}, + imports::insert_use::{ImportScope, insert_use}, search::{FileReference, UsageSearchResult}, source_change::SourceChangeBuilder, - FxHashSet, }; use itertools::Itertools; use syntax::{ + AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, ast::{ - self, + self, HasName, edit::IndentLevel, edit_in_place::{AttrsOwnerEdit, Indent}, - make, HasName, + make, }, - AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, }; use crate::{ @@ -62,7 +62,7 @@ pub(crate) fn convert_bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) - let target = name.syntax().text_range(); acc.add( - AssistId("convert_bool_to_enum", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_bool_to_enum"), "Convert boolean to enum", target, |edit| { @@ -209,7 +209,7 @@ fn replace_usages( delayed_mutations: &mut Vec<(ImportScope, ast::Path)>, ) { for (file_id, references) in usages { - edit.edit_file(file_id.file_id()); + edit.edit_file(file_id.file_id(ctx.db())); let refs_with_imports = augment_references_with_imports(ctx, references, target_module); @@ -1136,7 +1136,7 @@ fn foo() { } //- /main.rs -use foo::Foo; +use foo::{Bool, Foo}; mod foo; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs index d34cf895cd90a..43515de71e20d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs @@ -1,23 +1,20 @@ use either::Either; use hir::{CaptureKind, ClosureCapture, FileRangeWrapper, HirDisplay}; use ide_db::{ - assists::{AssistId, AssistKind}, - base_db::SourceDatabase, - defs::Definition, - search::FileReferenceNode, - source_change::SourceChangeBuilder, - FxHashSet, + FxHashSet, assists::AssistId, base_db::SourceDatabase, defs::Definition, + search::FileReferenceNode, source_change::SourceChangeBuilder, }; use stdx::format_to; use syntax::{ + AstNode, Direction, SyntaxKind, SyntaxNode, T, TextSize, ToSmolStr, algo::{skip_trivia_token, skip_whitespace_token}, ast::{ - self, + self, HasArgList, HasGenericParams, HasName, edit::{AstNodeEdit, IndentLevel}, - make, HasArgList, HasGenericParams, HasName, + make, }, hacks::parse_expr_from_str, - ted, AstNode, Direction, SyntaxKind, SyntaxNode, TextSize, ToSmolStr, T, + ted, }; use crate::assist_context::{AssistContext, Assists}; @@ -146,7 +143,7 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>) }; acc.add( - AssistId("convert_closure_to_fn", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_closure_to_fn"), "Convert closure to fn", closure.param_list()?.syntax().text_range(), |builder| { @@ -252,7 +249,7 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>) ); fn_ = fn_.dedent(IndentLevel::from_token(&fn_.syntax().last_token().unwrap())); - builder.edit_file(ctx.file_id()); + builder.edit_file(ctx.vfs_file_id()); match &closure_name { Some((closure_decl, _, _)) => { fn_ = fn_.indent(closure_decl.indent_level()); @@ -509,9 +506,8 @@ fn wrap_capture_in_deref_if_needed( } fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Expr { - let place = - parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.file_id().edition()) - .expect("`display_place_source_code()` produced an invalid expr"); + let place = parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.edition()) + .expect("`display_place_source_code()` produced an invalid expr"); let needs_mut = match capture.kind() { CaptureKind::SharedRef => false, CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true, @@ -590,7 +586,7 @@ fn handle_call( let indent = if insert_newlines { first_arg_indent.unwrap().to_string() } else { String::new() }; // FIXME: This text manipulation seems risky. - let text = ctx.db().file_text(file_id.file_id()); + let text = ctx.db().file_text(file_id.file_id(ctx.db())).text(ctx.db()); let mut text = text[..u32::from(range.end()).try_into().unwrap()].trim_end(); if !text.ends_with(')') { return None; @@ -633,7 +629,7 @@ fn handle_call( to_insert.push(','); } - builder.edit_file(file_id); + builder.edit_file(file_id.file_id(ctx.db())); builder.insert(offset, to_insert); Some(()) @@ -1070,7 +1066,7 @@ fn foo() { r#" fn foo() { let (mut a, b) = (0.1, "abc"); - fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) { + fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) { *a = 1.2; let c = *b; } @@ -1102,7 +1098,7 @@ fn foo() { r#" fn foo() { let (mut a, b) = (0.1, "abc"); - fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) { + fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) { let _: &mut bool = p2; *a = 1.2; let c = *b; @@ -1140,7 +1136,7 @@ fn foo() { r#" fn foo() { let (mut a, b) = (0.1, "abc"); - fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) { + fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) { let _: &mut bool = p2; *a = 1.2; let c = *b; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs index fbc0b9f6739ff..0d36a5ddb304c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs @@ -1,10 +1,10 @@ use itertools::Itertools; use syntax::{ - ast::{self, edit::IndentLevel, Comment, CommentKind, CommentShape, Whitespace}, AstToken, Direction, SyntaxElement, TextRange, + ast::{self, Comment, CommentKind, CommentShape, Whitespace, edit::IndentLevel}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: line_to_block // @@ -38,7 +38,7 @@ fn block_to_line(acc: &mut Assists, comment: ast::Comment) -> Option<()> { let target = comment.syntax().text_range(); acc.add( - AssistId("block_to_line", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("block_to_line"), "Replace block comment with line comments", target, |edit| { @@ -80,7 +80,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> { ); acc.add( - AssistId("line_to_block", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("line_to_block"), "Replace line comments with a single block comment", target, |edit| { @@ -167,11 +167,7 @@ pub(crate) fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix); // Don't add the indentation if the line is empty - if contents.is_empty() { - contents.to_owned() - } else { - indentation.to_string() + contents - } + if contents.is_empty() { contents.to_owned() } else { indentation.to_string() + contents } } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs index 5a9db67a5fb68..187cc74306e25 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_from_or_to_doc.rs @@ -1,10 +1,10 @@ use itertools::Itertools; use syntax::{ - ast::{self, edit::IndentLevel, Comment, CommentPlacement, Whitespace}, AstToken, Direction, SyntaxElement, TextRange, + ast::{self, Comment, CommentPlacement, Whitespace, edit::IndentLevel}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: comment_to_doc // @@ -39,7 +39,7 @@ fn doc_to_comment(acc: &mut Assists, comment: ast::Comment) -> Option<()> { }; acc.add( - AssistId("doc_to_comment", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("doc_to_comment"), "Replace doc comment with comment", target, |edit| { @@ -86,7 +86,7 @@ fn comment_to_doc(acc: &mut Assists, comment: ast::Comment, style: CommentPlacem }; acc.add( - AssistId("comment_to_doc", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("comment_to_doc"), "Replace comment with doc comment", target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_for_to_while_let.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_for_to_while_let.rs new file mode 100644 index 0000000000000..2d6a59a7c365c --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_for_to_while_let.rs @@ -0,0 +1,422 @@ +use hir::{ + Name, + sym::{self}, +}; +use ide_db::{famous_defs::FamousDefs, syntax_helpers::suggest_name}; +use syntax::{ + AstNode, + ast::{self, HasLoopBody, edit::IndentLevel, make, syntax_factory::SyntaxFactory}, + syntax_editor::Position, +}; + +use crate::{AssistContext, AssistId, Assists}; + +// Assist: convert_for_loop_to_while_let +// +// Converts a for loop into a while let on the Iterator. +// +// ``` +// fn main() { +// let x = vec![1, 2, 3]; +// for$0 v in x { +// let y = v * 2; +// }; +// } +// ``` +// -> +// ``` +// fn main() { +// let x = vec![1, 2, 3]; +// let mut tmp = x.into_iter(); +// while let Some(v) = tmp.next() { +// let y = v * 2; +// }; +// } +// ``` +pub(crate) fn convert_for_loop_to_while_let( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + let for_loop = ctx.find_node_at_offset::()?; + let iterable = for_loop.iterable()?; + let pat = for_loop.pat()?; + let body = for_loop.loop_body()?; + if body.syntax().text_range().start() < ctx.offset() { + cov_mark::hit!(not_available_in_body); + return None; + } + + acc.add( + AssistId::refactor_rewrite("convert_for_loop_to_while_let"), + "Replace this for loop with `while let`", + for_loop.syntax().text_range(), + |builder| { + let make = SyntaxFactory::with_mappings(); + let mut editor = builder.make_editor(for_loop.syntax()); + + let (iterable, method) = if impls_core_iter(&ctx.sema, &iterable) { + (iterable, None) + } else if let Some((expr, method)) = is_ref_and_impls_iter_method(&ctx.sema, &iterable) + { + (expr, Some(make.name_ref(method.as_str()))) + } else if let ast::Expr::RefExpr(_) = iterable { + (make::expr_paren(iterable).into(), Some(make.name_ref("into_iter"))) + } else { + (iterable, Some(make.name_ref("into_iter"))) + }; + + let iterable = if let Some(method) = method { + make::expr_method_call(iterable, method, make::arg_list([])).into() + } else { + iterable + }; + + let mut new_name = suggest_name::NameGenerator::new_from_scope_locals( + ctx.sema.scope(for_loop.syntax()), + ); + let tmp_var = new_name.suggest_name("tmp"); + + let mut_expr = make.let_stmt( + make.ident_pat(false, true, make.name(&tmp_var)).into(), + None, + Some(iterable), + ); + let indent = IndentLevel::from_node(for_loop.syntax()); + editor.insert( + Position::before(for_loop.syntax()), + make::tokens::whitespace(format!("\n{indent}").as_str()), + ); + editor.insert(Position::before(for_loop.syntax()), mut_expr.syntax()); + + let opt_pat = make.tuple_struct_pat(make::ext::ident_path("Some"), [pat]); + let iter_next_expr = make.expr_method_call( + make.expr_path(make::ext::ident_path(&tmp_var)), + make.name_ref("next"), + make.arg_list([]), + ); + let cond = make.expr_let(opt_pat.into(), iter_next_expr.into()); + + let while_loop = make.expr_while_loop(cond.into(), body); + + editor.replace(for_loop.syntax(), while_loop.syntax()); + + editor.add_mappings(make.finish_with_mappings()); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }, + ) +} + +/// If iterable is a reference where the expression behind the reference implements a method +/// returning an Iterator called iter or iter_mut (depending on the type of reference) then return +/// the expression behind the reference and the method name +fn is_ref_and_impls_iter_method( + sema: &hir::Semantics<'_, ide_db::RootDatabase>, + iterable: &ast::Expr, +) -> Option<(ast::Expr, hir::Name)> { + let ref_expr = match iterable { + ast::Expr::RefExpr(r) => r, + _ => return None, + }; + let wanted_method = Name::new_symbol_root(if ref_expr.mut_token().is_some() { + sym::iter_mut + } else { + sym::iter + }); + let expr_behind_ref = ref_expr.expr()?; + let ty = sema.type_of_expr(&expr_behind_ref)?.adjusted(); + let scope = sema.scope(iterable.syntax())?; + let krate = scope.krate(); + let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?; + + let has_wanted_method = ty + .iterate_method_candidates(sema.db, &scope, None, Some(&wanted_method), |func| { + if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) { + return Some(()); + } + None + }) + .is_some(); + if !has_wanted_method { + return None; + } + + Some((expr_behind_ref, wanted_method)) +} + +/// Whether iterable implements core::Iterator +fn impls_core_iter(sema: &hir::Semantics<'_, ide_db::RootDatabase>, iterable: &ast::Expr) -> bool { + (|| { + let it_typ = sema.type_of_expr(iterable)?.adjusted(); + + let module = sema.scope(iterable.syntax())?.module(); + + let krate = module.krate(); + let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?; + cov_mark::hit!(test_already_impls_iterator); + Some(it_typ.impls_trait(sema.db, iter_trait, &[])) + })() + .unwrap_or(false) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn each_to_for_simple_for() { + check_assist( + convert_for_loop_to_while_let, + r" +fn main() { + let mut x = vec![1, 2, 3]; + for $0v in x { + v *= 2; + }; +}", + r" +fn main() { + let mut x = vec![1, 2, 3]; + let mut tmp = x.into_iter(); + while let Some(v) = tmp.next() { + v *= 2; + }; +}", + ) + } + + #[test] + fn each_to_for_for_in_range() { + check_assist( + convert_for_loop_to_while_let, + r#" +//- minicore: range, iterators +impl core::iter::Iterator for core::ops::Range { + type Item = T; + + fn next(&mut self) -> Option { + None + } +} + +fn main() { + for $0x in 0..92 { + print!("{}", x); + } +}"#, + r#" +impl core::iter::Iterator for core::ops::Range { + type Item = T; + + fn next(&mut self) -> Option { + None + } +} + +fn main() { + let mut tmp = 0..92; + while let Some(x) = tmp.next() { + print!("{}", x); + } +}"#, + ) + } + + #[test] + fn each_to_for_not_available_in_body() { + cov_mark::check!(not_available_in_body); + check_assist_not_applicable( + convert_for_loop_to_while_let, + r" +fn main() { + let mut x = vec![1, 2, 3]; + for v in x { + $0v *= 2; + } +}", + ) + } + + #[test] + fn each_to_for_for_borrowed() { + check_assist( + convert_for_loop_to_while_let, + r#" +//- minicore: iterators +use core::iter::{Repeat, repeat}; + +struct S; +impl S { + fn iter(&self) -> Repeat { repeat(92) } + fn iter_mut(&mut self) -> Repeat { repeat(92) } +} + +fn main() { + let x = S; + for $0v in &x { + let a = v * 2; + } +} +"#, + r#" +use core::iter::{Repeat, repeat}; + +struct S; +impl S { + fn iter(&self) -> Repeat { repeat(92) } + fn iter_mut(&mut self) -> Repeat { repeat(92) } +} + +fn main() { + let x = S; + let mut tmp = x.iter(); + while let Some(v) = tmp.next() { + let a = v * 2; + } +} +"#, + ) + } + + #[test] + fn each_to_for_for_borrowed_no_iter_method() { + check_assist( + convert_for_loop_to_while_let, + r" +struct NoIterMethod; +fn main() { + let x = NoIterMethod; + for $0v in &x { + let a = v * 2; + } +} +", + r" +struct NoIterMethod; +fn main() { + let x = NoIterMethod; + let mut tmp = (&x).into_iter(); + while let Some(v) = tmp.next() { + let a = v * 2; + } +} +", + ) + } + + #[test] + fn each_to_for_for_borrowed_no_iter_method_mut() { + check_assist( + convert_for_loop_to_while_let, + r" +struct NoIterMethod; +fn main() { + let x = NoIterMethod; + for $0v in &mut x { + let a = v * 2; + } +} +", + r" +struct NoIterMethod; +fn main() { + let x = NoIterMethod; + let mut tmp = (&mut x).into_iter(); + while let Some(v) = tmp.next() { + let a = v * 2; + } +} +", + ) + } + + #[test] + fn each_to_for_for_borrowed_mut() { + check_assist( + convert_for_loop_to_while_let, + r#" +//- minicore: iterators +use core::iter::{Repeat, repeat}; + +struct S; +impl S { + fn iter(&self) -> Repeat { repeat(92) } + fn iter_mut(&mut self) -> Repeat { repeat(92) } +} + +fn main() { + let x = S; + for $0v in &mut x { + let a = v * 2; + } +} +"#, + r#" +use core::iter::{Repeat, repeat}; + +struct S; +impl S { + fn iter(&self) -> Repeat { repeat(92) } + fn iter_mut(&mut self) -> Repeat { repeat(92) } +} + +fn main() { + let x = S; + let mut tmp = x.iter_mut(); + while let Some(v) = tmp.next() { + let a = v * 2; + } +} +"#, + ) + } + + #[test] + fn each_to_for_for_borrowed_mut_behind_var() { + check_assist( + convert_for_loop_to_while_let, + r" +fn main() { + let mut x = vec![1, 2, 3]; + let y = &mut x; + for $0v in y { + *v *= 2; + } +}", + r" +fn main() { + let mut x = vec![1, 2, 3]; + let y = &mut x; + let mut tmp = y.into_iter(); + while let Some(v) = tmp.next() { + *v *= 2; + } +}", + ) + } + + #[test] + fn each_to_for_already_impls_iterator() { + cov_mark::check!(test_already_impls_iterator); + check_assist( + convert_for_loop_to_while_let, + r#" +//- minicore: iterators +fn main() { + for$0 a in core::iter::repeat(92).take(1) { + println!("{}", a); + } +} +"#, + r#" +fn main() { + let mut tmp = core::iter::repeat(92).take(1); + while let Some(a) = tmp.next() { + println!("{}", a); + } +} +"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs index dd2e9cbcb5f2d..db41927f1df2f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs @@ -1,11 +1,11 @@ use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait}; use itertools::Itertools; use syntax::{ - ast::{self, make, AstNode, HasGenericArgs, HasName}, + ast::{self, AstNode, HasGenericArgs, HasName, make}, ted, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: convert_from_to_tryfrom // @@ -71,7 +71,7 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> } acc.add( - AssistId("convert_from_to_tryfrom", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_from_to_tryfrom"), "Convert From to TryFrom", impl_.syntax().text_range(), |builder| { @@ -80,7 +80,7 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_> let from_fn_name = builder.make_mut(from_fn_name); let tail_expr = builder.make_mut(tail_expr); let return_exprs = return_exprs.map(|r| builder.make_mut(r)).collect_vec(); - let associated_items = builder.make_mut(associated_items).clone(); + let associated_items = builder.make_mut(associated_items); ted::replace( trait_ty.syntax(), @@ -128,6 +128,7 @@ fn wrap_ok(expr: ast::Expr) -> ast::Expr { make::expr_path(make::ext::ident_path("Ok")), make::arg_list(std::iter::once(expr)), ) + .into() } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs index fd3378e8c2636..846f4e9b258ae 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_integer_literal.rs @@ -1,6 +1,6 @@ -use syntax::{ast, ast::Radix, AstToken}; +use syntax::{AstToken, ast, ast::Radix}; -use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; +use crate::{AssistContext, AssistId, Assists, GroupLabel}; // Assist: convert_integer_literal // @@ -47,7 +47,7 @@ pub(crate) fn convert_integer_literal(acc: &mut Assists, ctx: &AssistContext<'_> acc.add_group( &group_id, - AssistId("convert_integer_literal", AssistKind::RefactorInline), + AssistId::refactor_rewrite("convert_integer_literal"), label, range, |builder| builder.replace(range, converted), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs index 8c59ef4314f06..b80276a95fbf5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs @@ -1,7 +1,7 @@ use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait}; use syntax::ast::{self, AstNode, HasGenericArgs, HasName}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // FIXME: this should be a diagnostic @@ -85,7 +85,7 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) - .filter(|name| name.text() == "self" || name.text() == "Self"); acc.add( - AssistId("convert_into_to_from", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_into_to_from"), "Convert Into to From", impl_.syntax().text_range(), |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs index 3c9a91741047e..3917ca197bb8c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs @@ -1,12 +1,12 @@ -use hir::{sym, Name}; +use hir::{Name, sym}; use ide_db::famous_defs::FamousDefs; use stdx::format_to; use syntax::{ - ast::{self, edit_in_place::Indent, make, HasArgList, HasLoopBody}, AstNode, + ast::{self, HasArgList, HasLoopBody, edit_in_place::Indent, make}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: convert_iter_for_each_to_for // @@ -53,7 +53,7 @@ pub(crate) fn convert_iter_for_each_to_for( let range = stmt.as_ref().map_or(method.syntax(), AstNode::syntax).text_range(); acc.add( - AssistId("convert_iter_for_each_to_for", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_iter_for_each_to_for"), "Replace this `Iterator::for_each` with a for loop", range, |builder| { @@ -108,7 +108,7 @@ pub(crate) fn convert_for_loop_with_for_each( } acc.add( - AssistId("convert_for_loop_with_for_each", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_for_loop_with_for_each"), "Replace this for loop with `Iterator::for_each`", for_loop.syntax().text_range(), |builder| { @@ -154,9 +154,9 @@ fn is_ref_and_impls_iter_method( _ => return None, }; let wanted_method = Name::new_symbol_root(if ref_expr.mut_token().is_some() { - sym::iter_mut.clone() + sym::iter_mut } else { - sym::iter.clone() + sym::iter }); let expr_behind_ref = ref_expr.expr()?; let ty = sema.type_of_expr(&expr_behind_ref)?.adjusted(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs index 79c34c14da720..ebfed9f9ca991 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs @@ -1,10 +1,11 @@ -use hir::Semantics; -use ide_db::RootDatabase; -use syntax::ast::RangeItem; -use syntax::ast::{edit::AstNodeEdit, AstNode, HasName, LetStmt, Name, Pat}; use syntax::T; +use syntax::ast::RangeItem; +use syntax::ast::edit::IndentLevel; +use syntax::ast::edit_in_place::Indent; +use syntax::ast::syntax_factory::SyntaxFactory; +use syntax::ast::{self, AstNode, HasName, LetStmt, Pat}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: convert_let_else_to_match // @@ -25,159 +26,205 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // } // ``` pub(crate) fn convert_let_else_to_match(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - // should focus on else token to trigger + // Should focus on the `else` token to trigger let let_stmt = ctx .find_token_syntax_at_offset(T![else]) .and_then(|it| it.parent()?.parent()) .or_else(|| ctx.find_token_syntax_at_offset(T![let])?.parent())?; let let_stmt = LetStmt::cast(let_stmt)?; - let let_else_block = let_stmt.let_else()?.block_expr()?; - let let_init = let_stmt.initializer()?; + let else_block = let_stmt.let_else()?.block_expr()?; + let else_expr = if else_block.statements().next().is_none() { + else_block.tail_expr()? + } else { + else_block.into() + }; + let init = let_stmt.initializer()?; + // Ignore let stmt with type annotation if let_stmt.ty().is_some() { - // don't support let with type annotation return None; } let pat = let_stmt.pat()?; - let mut binders = Vec::new(); - binders_in_pat(&mut binders, &pat, &ctx.sema)?; - let target = let_stmt.syntax().text_range(); + let make = SyntaxFactory::with_mappings(); + let mut idents = Vec::default(); + let pat_without_mut = remove_mut_and_collect_idents(&make, &pat, &mut idents)?; + let bindings = idents + .into_iter() + .filter_map(|ref pat| { + // Identifiers which resolve to constants are not bindings + if ctx.sema.resolve_bind_pat_to_const(pat).is_none() { + Some((pat.name()?, pat.ref_token().is_none() && pat.mut_token().is_some())) + } else { + None + } + }) + .collect::>(); + acc.add( - AssistId("convert_let_else_to_match", AssistKind::RefactorRewrite), - "Convert let-else to let and match", - target, - |edit| { - let indent_level = let_stmt.indent_level().0 as usize; - let indent = " ".repeat(indent_level); - let indent1 = " ".repeat(indent_level + 1); + AssistId::refactor_rewrite("convert_let_else_to_match"), + if bindings.is_empty() { + "Convert let-else to match" + } else { + "Convert let-else to let and match" + }, + let_stmt.syntax().text_range(), + |builder| { + let mut editor = builder.make_editor(let_stmt.syntax()); - let binders_str = binders_to_str(&binders, false); - let binders_str_mut = binders_to_str(&binders, true); + let binding_paths = bindings + .iter() + .map(|(name, _)| make.expr_path(make.ident_path(&name.to_string()))) + .collect::>(); - let init_expr = let_init.syntax().text(); - let mut pat_no_mut = pat.syntax().text().to_string(); - // remove the mut from the pattern - for (b, ismut) in binders.iter() { - if *ismut { - pat_no_mut = pat_no_mut.replace(&format!("mut {b}"), &b.to_string()); - } - } + let binding_arm = make.match_arm( + pat_without_mut, + None, + // There are three possible cases: + // + // - No bindings: `None => {}` + // - Single binding: `Some(it) => it` + // - Multiple bindings: `Foo::Bar { a, b, .. } => (a, b)` + match binding_paths.len() { + 0 => make.expr_empty_block().into(), + + 1 => binding_paths[0].clone(), + _ => make.expr_tuple(binding_paths).into(), + }, + ); + let else_arm = make.match_arm(make.wildcard_pat().into(), None, else_expr); + let match_ = make.expr_match(init, make.match_arm_list([binding_arm, else_arm])); + match_.reindent_to(IndentLevel::from_node(let_stmt.syntax())); - let only_expr = let_else_block.statements().next().is_none(); - let branch2 = match &let_else_block.tail_expr() { - Some(tail) if only_expr => format!("{tail},"), - _ => let_else_block.syntax().text().to_string(), - }; - let replace = if binders.is_empty() { - format!( - "match {init_expr} {{ -{indent1}{pat_no_mut} => {binders_str} -{indent1}_ => {branch2} -{indent}}}" - ) + if bindings.is_empty() { + editor.replace(let_stmt.syntax(), match_.syntax()); } else { - format!( - "let {binders_str_mut} = match {init_expr} {{ -{indent1}{pat_no_mut} => {binders_str}, -{indent1}_ => {branch2} -{indent}}};" - ) - }; - edit.replace(target, replace); + let ident_pats = bindings + .into_iter() + .map(|(name, is_mut)| make.ident_pat(false, is_mut, name).into()) + .collect::>(); + let new_let_stmt = make.let_stmt( + if ident_pats.len() == 1 { + ident_pats[0].clone() + } else { + make.tuple_pat(ident_pats).into() + }, + None, + Some(match_.into()), + ); + editor.replace(let_stmt.syntax(), new_let_stmt.syntax()); + } + + editor.add_mappings(make.finish_with_mappings()); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } -/// Gets a list of binders in a pattern, and whether they are mut. -fn binders_in_pat( - acc: &mut Vec<(Name, bool)>, - pat: &Pat, - sem: &Semantics<'_, RootDatabase>, -) -> Option<()> { - use Pat::*; - match pat { - IdentPat(p) => { - let ident = p.name()?; - let ismut = p.ref_token().is_none() && p.mut_token().is_some(); - // check for const reference - if sem.resolve_bind_pat_to_const(p).is_none() { - acc.push((ident, ismut)); - } +fn remove_mut_and_collect_idents( + make: &SyntaxFactory, + pat: &ast::Pat, + acc: &mut Vec, +) -> Option { + Some(match pat { + ast::Pat::IdentPat(p) => { + acc.push(p.clone()); + let non_mut_pat = make.ident_pat( + p.ref_token().is_some(), + p.ref_token().is_some() && p.mut_token().is_some(), + p.name()?, + ); if let Some(inner) = p.pat() { - binders_in_pat(acc, &inner, sem)?; + non_mut_pat.set_pat(remove_mut_and_collect_idents(make, &inner, acc)); } - Some(()) + non_mut_pat.into() } - BoxPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)), - RestPat(_) | LiteralPat(_) | PathPat(_) | WildcardPat(_) | ConstBlockPat(_) => Some(()), - OrPat(p) => { - for p in p.pats() { - binders_in_pat(acc, &p, sem)?; - } - Some(()) + ast::Pat::BoxPat(p) => { + make.box_pat(remove_mut_and_collect_idents(make, &p.pat()?, acc)?).into() } - ParenPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)), - RangePat(p) => { - if let Some(st) = p.start() { - binders_in_pat(acc, &st, sem)? - } - if let Some(ed) = p.end() { - binders_in_pat(acc, &ed, sem)? - } - Some(()) + ast::Pat::OrPat(p) => make + .or_pat( + p.pats() + .map(|pat| remove_mut_and_collect_idents(make, &pat, acc)) + .collect::>>()?, + p.leading_pipe().is_some(), + ) + .into(), + ast::Pat::ParenPat(p) => { + make.paren_pat(remove_mut_and_collect_idents(make, &p.pat()?, acc)?).into() } - RecordPat(p) => { - for f in p.record_pat_field_list()?.fields() { - let pat = f.pat()?; - binders_in_pat(acc, &pat, sem)?; - } - Some(()) - } - RefPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)), - SlicePat(p) => { - for p in p.pats() { - binders_in_pat(acc, &p, sem)?; - } - Some(()) - } - TuplePat(p) => { - for p in p.fields() { - binders_in_pat(acc, &p, sem)?; - } - Some(()) - } - TupleStructPat(p) => { - for p in p.fields() { - binders_in_pat(acc, &p, sem)?; + ast::Pat::RangePat(p) => make + .range_pat( + if let Some(start) = p.start() { + Some(remove_mut_and_collect_idents(make, &start, acc)?) + } else { + None + }, + if let Some(end) = p.end() { + Some(remove_mut_and_collect_idents(make, &end, acc)?) + } else { + None + }, + ) + .into(), + ast::Pat::RecordPat(p) => make + .record_pat_with_fields( + p.path()?, + make.record_pat_field_list( + p.record_pat_field_list()? + .fields() + .map(|field| { + remove_mut_and_collect_idents(make, &field.pat()?, acc).map(|pat| { + if let Some(name_ref) = field.name_ref() { + make.record_pat_field(name_ref, pat) + } else { + make.record_pat_field_shorthand(pat) + } + }) + }) + .collect::>>()?, + p.record_pat_field_list()?.rest_pat(), + ), + ) + .into(), + ast::Pat::RefPat(p) => { + let inner = p.pat()?; + if let ast::Pat::IdentPat(ident) = inner { + acc.push(ident); + p.clone_for_update().into() + } else { + make.ref_pat(remove_mut_and_collect_idents(make, &inner, acc)?).into() } - Some(()) } + ast::Pat::SlicePat(p) => make + .slice_pat( + p.pats() + .map(|pat| remove_mut_and_collect_idents(make, &pat, acc)) + .collect::>>()?, + ) + .into(), + ast::Pat::TuplePat(p) => make + .tuple_pat( + p.fields() + .map(|field| remove_mut_and_collect_idents(make, &field, acc)) + .collect::>>()?, + ) + .into(), + ast::Pat::TupleStructPat(p) => make + .tuple_struct_pat( + p.path()?, + p.fields() + .map(|field| remove_mut_and_collect_idents(make, &field, acc)) + .collect::>>()?, + ) + .into(), + ast::Pat::RestPat(_) + | ast::Pat::LiteralPat(_) + | ast::Pat::PathPat(_) + | ast::Pat::WildcardPat(_) + | ast::Pat::ConstBlockPat(_) => pat.clone(), // don't support macro pat yet - MacroPat(_) => None, - } -} - -fn binders_to_str(binders: &[(Name, bool)], addmut: bool) -> String { - let vars = binders - .iter() - .map( - |(ident, ismut)| { - if *ismut && addmut { - format!("mut {ident}") - } else { - ident.to_string() - } - }, - ) - .collect::>() - .join(", "); - if binders.is_empty() { - String::from("{}") - } else if binders.len() == 1 { - vars - } else { - format!("({vars})") - } + ast::Pat::MacroPat(_) => return None, + }) } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs index fd159eb824d6d..efcbcef00e903 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs @@ -1,12 +1,13 @@ use ide_db::defs::{Definition, NameRefClass}; use syntax::{ + AstNode, SyntaxNode, ast::{self, HasName, Name}, - ted, AstNode, SyntaxNode, + ted, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; // Assist: convert_match_to_let_else @@ -54,7 +55,7 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<' let extracted_variable_positions = find_extracted_variable(ctx, &extracting_arm)?; acc.add( - AssistId("convert_match_to_let_else", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_match_to_let_else"), "Convert match to let-else", let_stmt.syntax().text_range(), |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs index 8d4ff84084bd3..ed8aad7b2c605 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs @@ -2,11 +2,12 @@ use either::Either; use ide_db::{defs::Definition, search::FileReference}; use itertools::Itertools; use syntax::{ + SyntaxKind, ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility}, - match_ast, ted, SyntaxKind, + match_ast, ted, }; -use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder}; // Assist: convert_named_struct_to_tuple_struct // @@ -68,7 +69,7 @@ pub(crate) fn convert_named_struct_to_tuple_struct( }; acc.add( - AssistId("convert_named_struct_to_tuple_struct", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_named_struct_to_tuple_struct"), "Convert to tuple struct", strukt.syntax().text_range(), |edit| { @@ -98,7 +99,7 @@ fn edit_struct_def( let tuple_fields = ast::make::tuple_field_list(tuple_fields); let record_fields_text_range = record_fields.syntax().text_range(); - edit.edit_file(ctx.file_id()); + edit.edit_file(ctx.vfs_file_id()); edit.replace(record_fields_text_range, tuple_fields.syntax().text()); if let Either::Left(strukt) = strukt { @@ -148,7 +149,7 @@ fn edit_struct_references( let usages = strukt_def.usages(&ctx.sema).include_self_refs().all(); for (file_id, refs) in usages { - edit.edit_file(file_id.file_id()); + edit.edit_file(file_id.file_id(ctx.db())); for r in refs { process_struct_name_reference(ctx, r, edit); } @@ -226,7 +227,7 @@ fn edit_field_references( let def = Definition::Field(field); let usages = def.usages(&ctx.sema).all(); for (file_id, refs) in usages { - edit.edit_file(file_id.file_id()); + edit.edit_file(file_id.file_id(ctx.db())); for r in refs { if let Some(name_ref) = r.name.as_name_ref() { // Only edit the field reference if it's part of a `.field` access diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs index ea2752b881857..c0fd69779aeae 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs @@ -1,4 +1,4 @@ -use ide_db::assists::{AssistId, AssistKind}; +use ide_db::assists::AssistId; use syntax::ast::{self, HasGenericParams, HasName}; use syntax::{AstNode, SyntaxKind}; @@ -44,7 +44,7 @@ pub(crate) fn convert_nested_function_to_closure( let param_list = function.param_list()?; acc.add( - AssistId("convert_nested_function_to_closure", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_nested_function_to_closure"), "Convert nested function to closure", target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs index b7a77644496fa..71a61f2db0011 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs @@ -5,20 +5,21 @@ use ide_db::{ ty_filter::TryEnum, }; use syntax::{ + AstNode, + SyntaxKind::{FN, FOR_EXPR, LOOP_EXPR, WHILE_EXPR, WHITESPACE}, + T, ast::{ self, edit::{AstNodeEdit, IndentLevel}, make, }, - ted, AstNode, - SyntaxKind::{FN, FOR_EXPR, LOOP_EXPR, WHILE_EXPR, WHITESPACE}, - T, + ted, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, utils::invert_boolean_expression_legacy, - AssistId, AssistKind, }; // Assist: convert_to_guarded_return @@ -127,7 +128,7 @@ fn if_expr_to_guarded_return( let target = if_expr.syntax().text_range(); acc.add( - AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_to_guarded_return"), "Convert to guarded return", target, |edit| { @@ -209,7 +210,7 @@ fn let_stmt_to_guarded_return( }; acc.add( - AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_to_guarded_return"), "Convert to guarded return", target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index 91af9b05bbb85..cca4cb9d8f775 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -1,18 +1,19 @@ use either::Either; use hir::ModuleDef; use ide_db::{ - assists::{AssistId, AssistKind}, + FxHashSet, + assists::AssistId, defs::Definition, helpers::mod_path_to_ast, - imports::insert_use::{insert_use, ImportScope}, + imports::insert_use::{ImportScope, insert_use}, search::{FileReference, UsageSearchResult}, source_change::SourceChangeBuilder, syntax_helpers::node_ext::{for_each_tail_expr, walk_expr}, - FxHashSet, }; use syntax::{ - ast::{self, edit::IndentLevel, edit_in_place::Indent, make, HasName}, - match_ast, ted, AstNode, SyntaxNode, + AstNode, SyntaxNode, + ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, make}, + match_ast, ted, }; use crate::assist_context::{AssistContext, Assists}; @@ -62,7 +63,7 @@ pub(crate) fn convert_tuple_return_type_to_struct( let target = type_ref.syntax().text_range(); acc.add( - AssistId("convert_tuple_return_type_to_struct", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_tuple_return_type_to_struct"), "Convert tuple return type to tuple struct", target, move |edit| { @@ -105,7 +106,7 @@ fn replace_usages( target_module: &hir::Module, ) { for (file_id, references) in usages.iter() { - edit.edit_file(file_id.file_id()); + edit.edit_file(file_id.file_id(ctx.db())); let refs_with_imports = augment_references_with_imports(edit, ctx, references, struct_name, target_module); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index f6e516db88835..777e366da956b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -1,11 +1,12 @@ use either::Either; use ide_db::defs::{Definition, NameRefClass}; use syntax::{ + SyntaxKind, SyntaxNode, ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility}, - match_ast, ted, SyntaxKind, SyntaxNode, + match_ast, ted, }; -use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder}; // Assist: convert_tuple_struct_to_named_struct // @@ -64,7 +65,7 @@ pub(crate) fn convert_tuple_struct_to_named_struct( let target = strukt.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range(); acc.add( - AssistId("convert_tuple_struct_to_named_struct", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_tuple_struct_to_named_struct"), "Convert to named struct", target, |edit| { @@ -94,7 +95,7 @@ fn edit_struct_def( let record_fields = ast::make::record_field_list(record_fields); let tuple_fields_text_range = tuple_fields.syntax().text_range(); - edit.edit_file(ctx.file_id()); + edit.edit_file(ctx.vfs_file_id()); if let Either::Left(strukt) = strukt { if let Some(w) = strukt.where_clause() { @@ -141,7 +142,7 @@ fn edit_struct_references( match node { ast::TupleStructPat(tuple_struct_pat) => { let file_range = ctx.sema.original_range_opt(&node)?; - edit.edit_file(file_range.file_id); + edit.edit_file(file_range.file_id.file_id(ctx.db())); edit.replace( file_range.range, ast::make::record_pat_with_fields( @@ -196,7 +197,7 @@ fn edit_struct_references( }; for (file_id, refs) in usages { - edit.edit_file(file_id.file_id()); + edit.edit_file(file_id.file_id(ctx.db())); for r in refs { for node in r.name.syntax().ancestors() { if edit_node(edit, node).is_some() { @@ -221,7 +222,7 @@ fn edit_field_references( let def = Definition::Field(field); let usages = def.usages(&ctx.sema).all(); for (file_id, refs) in usages { - edit.edit_file(file_id.file_id()); + edit.edit_file(file_id.file_id(ctx.db())); for r in refs { if let Some(name_ref) = r.name.as_name_ref() { edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs index 6a5b11f542560..e582aa814ae14 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs @@ -3,7 +3,7 @@ use ide_db::RootDatabase; use stdx::format_to; use syntax::ast::{self, AstNode}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: convert_two_arm_bool_match_to_matches_macro // @@ -56,7 +56,7 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro( let expr = match_expr.expr()?; acc.add( - AssistId("convert_two_arm_bool_match_to_matches_macro", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_two_arm_bool_match_to_matches_macro"), "Convert to matches!", target_range, |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs index beec64d13b689..dbe3ee0ed6039 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs @@ -3,18 +3,18 @@ use std::iter; use either::Either; use ide_db::syntax_helpers::node_ext::is_pattern_cond; use syntax::{ + AstNode, T, ast::{ - self, + self, HasLoopBody, edit::{AstNodeEdit, IndentLevel}, - make, HasLoopBody, + make, }, - AstNode, T, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, utils::invert_boolean_expression_legacy, - AssistId, AssistKind, }; // Assist: convert_while_to_loop @@ -47,7 +47,7 @@ pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>) let target = while_expr.syntax().text_range(); acc.add( - AssistId("convert_while_to_loop", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("convert_while_to_loop"), "Convert while to loop", target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs index e34e50904875d..b8c647ac8b71d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs @@ -1,14 +1,15 @@ -use hir::{sym, HasVisibility}; -use ide_db::text_edit::TextRange; +use hir::{HasVisibility, sym}; use ide_db::{ - assists::{AssistId, AssistKind}, + FxHashMap, FxHashSet, + assists::AssistId, defs::Definition, helpers::mod_path_to_ast, search::{FileReference, SearchScope}, - FxHashMap, FxHashSet, }; use itertools::Itertools; -use syntax::{ast, ted, AstNode, Edition, SmolStr, SyntaxNode, ToSmolStr}; +use syntax::ast::syntax_factory::SyntaxFactory; +use syntax::syntax_editor::SyntaxEditor; +use syntax::{AstNode, Edition, SmolStr, SyntaxNode, ToSmolStr, ast}; use crate::{ assist_context::{AssistContext, Assists, SourceChangeBuilder}, @@ -47,7 +48,7 @@ pub(crate) fn destructure_struct_binding(acc: &mut Assists, ctx: &AssistContext< let data = collect_data(ident_pat, ctx)?; acc.add( - AssistId("destructure_struct_binding", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("destructure_struct_binding"), "Destructure struct binding", data.ident_pat.syntax().text_range(), |edit| destructure_struct_binding_impl(ctx, edit, &data), @@ -62,13 +63,10 @@ fn destructure_struct_binding_impl( data: &StructEditData, ) { let field_names = generate_field_names(ctx, data); - let assignment_edit = build_assignment_edit(ctx, builder, data, &field_names); - let usage_edits = build_usage_edits(ctx, builder, data, &field_names.into_iter().collect()); - - assignment_edit.apply(); - for edit in usage_edits { - edit.apply(builder); - } + let mut editor = builder.make_editor(data.ident_pat.syntax()); + destructure_pat(ctx, &mut editor, data, &field_names); + update_usages(ctx, &mut editor, data, &field_names.into_iter().collect()); + builder.add_file_edits(ctx.vfs_file_id(), editor); } struct StructEditData { @@ -95,7 +93,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option, - builder: &mut SourceChangeBuilder, + editor: &mut SyntaxEditor, data: &StructEditData, field_names: &[(SmolStr, SmolStr)], -) -> AssignmentEdit { - let ident_pat = builder.make_mut(data.ident_pat.clone()); +) { + let ident_pat = &data.ident_pat; let struct_path = mod_path_to_ast(&data.struct_def_path, data.edition); let is_ref = ident_pat.ref_token().is_some(); let is_mut = ident_pat.mut_token().is_some(); + let make = SyntaxFactory::with_mappings(); let new_pat = match data.kind { hir::StructKind::Tuple => { let ident_pats = field_names.iter().map(|(_, new_name)| { - let name = ast::make::name(new_name); - ast::Pat::from(ast::make::ident_pat(is_ref, is_mut, name)) + let name = make.name(new_name); + ast::Pat::from(make.ident_pat(is_ref, is_mut, name)) }); - ast::Pat::TupleStructPat(ast::make::tuple_struct_pat(struct_path, ident_pats)) + ast::Pat::TupleStructPat(make.tuple_struct_pat(struct_path, ident_pats)) } hir::StructKind::Record => { let fields = field_names.iter().map(|(old_name, new_name)| { // Use shorthand syntax if possible if old_name == new_name && !is_mut { - ast::make::record_pat_field_shorthand(ast::make::name_ref(old_name)) + make.record_pat_field_shorthand( + make.ident_pat(false, false, make.name(old_name)).into(), + ) } else { - ast::make::record_pat_field( - ast::make::name_ref(old_name), - ast::Pat::IdentPat(ast::make::ident_pat( - is_ref, - is_mut, - ast::make::name(new_name), - )), + make.record_pat_field( + make.name_ref(old_name), + ast::Pat::IdentPat(make.ident_pat(is_ref, is_mut, make.name(new_name))), ) } }); + let field_list = make + .record_pat_field_list(fields, data.has_private_members.then_some(make.rest_pat())); - let field_list = ast::make::record_pat_field_list( - fields, - data.has_private_members.then_some(ast::make::rest_pat()), - ); - ast::Pat::RecordPat(ast::make::record_pat_with_fields(struct_path, field_list)) + ast::Pat::RecordPat(make.record_pat_with_fields(struct_path, field_list)) } - hir::StructKind::Unit => ast::make::path_pat(struct_path), + hir::StructKind::Unit => make.path_pat(struct_path), }; // If the binding is nested inside a record, we need to wrap the new // destructured pattern in a non-shorthand record field - let new_pat = if data.is_nested { - let record_pat_field = - ast::make::record_pat_field(ast::make::name_ref(&ident_pat.to_string()), new_pat) - .clone_for_update(); - NewPat::RecordPatField(record_pat_field) + let destructured_pat = if data.is_nested { + make.record_pat_field(make.name_ref(&ident_pat.to_string()), new_pat).syntax().clone() } else { - NewPat::Pat(new_pat.clone_for_update()) + new_pat.syntax().clone() }; - AssignmentEdit { old_pat: ident_pat, new_pat } + editor.add_mappings(make.finish_with_mappings()); + editor.replace(data.ident_pat.syntax(), destructured_pat); } fn generate_field_names(ctx: &AssistContext<'_>, data: &StructEditData) -> Vec<(SmolStr, SmolStr)> { @@ -267,85 +260,57 @@ fn new_field_name(base_name: SmolStr, names_in_scope: &FxHashSet) -> Sm name } -struct AssignmentEdit { - old_pat: ast::IdentPat, - new_pat: NewPat, -} - -enum NewPat { - Pat(ast::Pat), - RecordPatField(ast::RecordPatField), -} - -impl AssignmentEdit { - fn apply(self) { - match self.new_pat { - NewPat::Pat(pat) => ted::replace(self.old_pat.syntax(), pat.syntax()), - NewPat::RecordPatField(record_pat_field) => { - ted::replace(self.old_pat.syntax(), record_pat_field.syntax()) - } - } - } -} - -fn build_usage_edits( +fn update_usages( ctx: &AssistContext<'_>, - builder: &mut SourceChangeBuilder, + editor: &mut SyntaxEditor, data: &StructEditData, field_names: &FxHashMap, -) -> Vec { - data.usages +) { + let make = SyntaxFactory::with_mappings(); + let edits = data + .usages .iter() - .filter_map(|r| build_usage_edit(ctx, builder, data, r, field_names)) - .collect_vec() + .filter_map(|r| build_usage_edit(ctx, &make, data, r, field_names)) + .collect_vec(); + editor.add_mappings(make.finish_with_mappings()); + for (old, new) in edits { + editor.replace(old, new); + } } fn build_usage_edit( ctx: &AssistContext<'_>, - builder: &mut SourceChangeBuilder, + make: &SyntaxFactory, data: &StructEditData, usage: &FileReference, field_names: &FxHashMap, -) -> Option { +) -> Option<(SyntaxNode, SyntaxNode)> { match usage.name.syntax().ancestors().find_map(ast::FieldExpr::cast) { Some(field_expr) => Some({ let field_name: SmolStr = field_expr.name_ref()?.to_string().into(); let new_field_name = field_names.get(&field_name)?; - let new_expr = ast::make::expr_path(ast::make::ext::ident_path(new_field_name)); + let new_expr = make.expr_path(ast::make::ext::ident_path(new_field_name)); // If struct binding is a reference, we might need to deref field usages if data.is_ref { let (replace_expr, ref_data) = determine_ref_and_parens(ctx, &field_expr); - StructUsageEdit::IndexField( - builder.make_mut(replace_expr), - ref_data.wrap_expr(new_expr).clone_for_update(), + ( + replace_expr.syntax().clone_for_update(), + ref_data.wrap_expr(new_expr).syntax().clone_for_update(), ) } else { - StructUsageEdit::IndexField( - builder.make_mut(field_expr).into(), - new_expr.clone_for_update(), - ) + (field_expr.syntax().clone(), new_expr.syntax().clone()) } }), - None => Some(StructUsageEdit::Path(usage.range)), - } -} - -enum StructUsageEdit { - Path(TextRange), - IndexField(ast::Expr, ast::Expr), -} - -impl StructUsageEdit { - fn apply(self, edit: &mut SourceChangeBuilder) { - match self { - StructUsageEdit::Path(target_expr) => { - edit.replace(target_expr, "todo!()"); - } - StructUsageEdit::IndexField(target_expr, replace_with) => { - ted::replace(target_expr.syntax(), replace_with.syntax()) - } - } + None => Some(( + usage.name.syntax().as_node().unwrap().clone(), + make.expr_macro( + ast::make::ext::ident_path("todo"), + make.token_tree(syntax::SyntaxKind::L_PAREN, []), + ) + .syntax() + .clone(), + )), } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs index 39142d606207c..f09389f8302f3 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs @@ -1,5 +1,5 @@ use ide_db::{ - assists::{AssistId, AssistKind}, + assists::AssistId, defs::Definition, search::{FileReference, SearchScope}, syntax_helpers::suggest_name, @@ -7,7 +7,7 @@ use ide_db::{ }; use itertools::Itertools; use syntax::{ - ast::{self, make, AstNode, FieldExpr, HasName, IdentPat}, + ast::{self, AstNode, FieldExpr, HasName, IdentPat, make}, ted, }; @@ -65,7 +65,7 @@ pub(crate) fn destructure_tuple_binding_impl( if with_sub_pattern { acc.add( - AssistId("destructure_tuple_binding_in_sub_pattern", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("destructure_tuple_binding_in_sub_pattern"), "Destructure tuple in sub-pattern", data.ident_pat.syntax().text_range(), |edit| destructure_tuple_edit_impl(ctx, edit, &data, true), @@ -73,7 +73,7 @@ pub(crate) fn destructure_tuple_binding_impl( } acc.add( - AssistId("destructure_tuple_binding", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("destructure_tuple_binding"), if with_sub_pattern { "Destructure tuple in place" } else { "Destructure tuple" }, data.ident_pat.syntax().text_range(), |edit| destructure_tuple_edit_impl(ctx, edit, &data, false), @@ -142,7 +142,7 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option name, - None => name_generator.suggest_name(&format!("_{}", id)), + None => name_generator.suggest_name(&format!("_{id}")), } .to_string() }) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs index d264928046707..74bb0ba3f6020 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs @@ -1,14 +1,14 @@ use either::Either; use itertools::Itertools; use syntax::{ - ast::{self, edit::IndentLevel, CommentPlacement, Whitespace}, AstToken, TextRange, + ast::{self, CommentPlacement, Whitespace, edit::IndentLevel}, }; use crate::{ + AssistContext, AssistId, Assists, handlers::convert_comment_block::{line_comment_text, relevant_line_comments}, utils::required_hashes, - AssistContext, AssistId, AssistKind, Assists, }; // Assist: desugar_doc_comment @@ -54,7 +54,7 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> }; acc.add( - AssistId("desugar_doc_comment", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("desugar_doc_comment"), "Desugar doc-comment to attribute macro", target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs index 0b95d6177f904..307414c79715a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs @@ -7,13 +7,14 @@ use ide_db::{ }; use stdx::never; use syntax::{ - ast::{self, make, Use, UseTree, VisibilityKind}, - ted, AstNode, Direction, SyntaxNode, SyntaxToken, T, + AstNode, Direction, SyntaxNode, SyntaxToken, T, + ast::{self, Use, UseTree, VisibilityKind, make}, + ted, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; // Assist: expand_glob_import @@ -61,7 +62,7 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone()); acc.add( - AssistId("expand_glob_import", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("expand_glob_import"), "Expand glob import", target.text_range(), |builder| { @@ -122,7 +123,7 @@ pub(crate) fn expand_glob_reexport(acc: &mut Assists, ctx: &AssistContext<'_>) - let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone()); acc.add( - AssistId("expand_glob_reexport", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("expand_glob_reexport"), "Expand glob reexport", target.text_range(), |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs index c79a982c38d09..b71de5e00c6ad 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_rest_pattern.rs @@ -1,28 +1,13 @@ use hir::{PathResolution, StructKind}; use ide_db::syntax_helpers::suggest_name::NameGenerator; use syntax::{ - ast::{self, make}, - match_ast, AstNode, ToSmolStr, + AstNode, ToSmolStr, + ast::{self, syntax_factory::SyntaxFactory}, + match_ast, }; use crate::{AssistContext, AssistId, Assists}; -pub(crate) fn expand_rest_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let rest_pat = ctx.find_node_at_offset::()?; - let parent = rest_pat.syntax().parent()?; - match_ast! { - match parent { - ast::RecordPatFieldList(it) => expand_record_rest_pattern(acc, ctx, it.syntax().parent().and_then(ast::RecordPat::cast)?, rest_pat), - ast::TupleStructPat(it) => expand_tuple_struct_rest_pattern(acc, ctx, it, rest_pat), - // FIXME - // ast::TuplePat(it) => (), - // FIXME - // ast::SlicePat(it) => (), - _ => return None, - } - } -} - // Assist: expand_record_rest_pattern // // Fills fields by replacing rest pattern in record patterns. @@ -49,7 +34,6 @@ fn expand_record_rest_pattern( rest_pat: ast::RestPat, ) -> Option<()> { let missing_fields = ctx.sema.record_pattern_missing_fields(&record_pat); - if missing_fields.is_empty() { cov_mark::hit!(no_missing_fields); return None; @@ -61,24 +45,35 @@ fn expand_record_rest_pattern( return None; } - let new_field_list = - make::record_pat_field_list(old_field_list.fields(), None).clone_for_update(); - for (f, _) in missing_fields.iter() { - let edition = ctx.sema.scope(record_pat.syntax())?.krate().edition(ctx.db()); - let field = make::record_pat_field_shorthand(make::name_ref( - &f.name(ctx.sema.db).display_no_db(edition).to_smolstr(), - )); - new_field_list.add_field(field.clone_for_update()); - } - - let target_range = rest_pat.syntax().text_range(); + let edition = ctx.sema.scope(record_pat.syntax())?.krate().edition(ctx.db()); acc.add( - AssistId("expand_record_rest_pattern", crate::AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("expand_record_rest_pattern"), "Fill struct fields", - target_range, - move |builder| builder.replace_ast(old_field_list, new_field_list), + rest_pat.syntax().text_range(), + |builder| { + let make = SyntaxFactory::with_mappings(); + let mut editor = builder.make_editor(rest_pat.syntax()); + let new_field_list = make.record_pat_field_list(old_field_list.fields(), None); + for (f, _) in missing_fields.iter() { + let field = make.record_pat_field_shorthand( + make.ident_pat( + false, + false, + make.name(&f.name(ctx.sema.db).display_no_db(edition).to_smolstr()), + ) + .into(), + ); + new_field_list.add_field(field); + } + + editor.replace(old_field_list.syntax(), new_field_list.syntax()); + + editor.add_mappings(make.finish_with_mappings()); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }, ) } + // Assist: expand_tuple_struct_rest_pattern // // Fills fields by replacing rest pattern in tuple struct patterns. @@ -133,34 +128,58 @@ fn expand_tuple_struct_rest_pattern( return None; } - let mut name_gen = NameGenerator::new_from_scope_locals(ctx.sema.scope(pat.syntax())); - let new_pat = make::tuple_struct_pat( - path, - pat.fields() - .take(prefix_count) - .chain(fields[prefix_count..fields.len() - suffix_count].iter().map(|f| { - make::ident_pat( - false, - false, - match name_gen.for_type(&f.ty(ctx.sema.db), ctx.sema.db, ctx.edition()) { - Some(name) => make::name(&name), - None => make::name(&format!("_{}", f.index())), - }, - ) - .into() - })) - .chain(pat.fields().skip(prefix_count + 1)), - ); - - let target_range = rest_pat.syntax().text_range(); acc.add( - AssistId("expand_tuple_struct_rest_pattern", crate::AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("expand_tuple_struct_rest_pattern"), "Fill tuple struct fields", - target_range, - move |builder| builder.replace_ast(pat, new_pat), + rest_pat.syntax().text_range(), + |builder| { + let make = SyntaxFactory::with_mappings(); + let mut editor = builder.make_editor(rest_pat.syntax()); + + let mut name_gen = NameGenerator::new_from_scope_locals(ctx.sema.scope(pat.syntax())); + let new_pat = make.tuple_struct_pat( + path, + pat.fields() + .take(prefix_count) + .chain(fields[prefix_count..fields.len() - suffix_count].iter().map(|f| { + make.ident_pat( + false, + false, + match name_gen.for_type(&f.ty(ctx.sema.db), ctx.sema.db, ctx.edition()) + { + Some(name) => make.name(&name), + None => make.name(&format!("_{}", f.index())), + }, + ) + .into() + })) + .chain(pat.fields().skip(prefix_count + 1)), + ); + + editor.replace(pat.syntax(), new_pat.syntax()); + + editor.add_mappings(make.finish_with_mappings()); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }, ) } +pub(crate) fn expand_rest_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let rest_pat = ctx.find_node_at_offset::()?; + let parent = rest_pat.syntax().parent()?; + match_ast! { + match parent { + ast::RecordPatFieldList(it) => expand_record_rest_pattern(acc, ctx, it.syntax().parent().and_then(ast::RecordPat::cast)?, rest_pat), + ast::TupleStructPat(it) => expand_tuple_struct_rest_pattern(acc, ctx, it, rest_pat), + // FIXME + // ast::TuplePat(it) => (), + // FIXME + // ast::SlicePat(it) => (), + _ => return None, + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index e4d347ef16bd6..54699a9454f09 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -1,14 +1,15 @@ -use crate::{utils, AssistContext, Assists}; +use crate::{AssistContext, Assists, utils}; use ide_db::{ assists::{AssistId, AssistKind}, - syntax_helpers::format_string_exprs::{parse_format_exprs, Arg}, + syntax_helpers::format_string_exprs::{Arg, parse_format_exprs}, }; use itertools::Itertools; use syntax::{ - ast::{self, make}, - ted, AstNode, AstToken, NodeOrToken, + AstNode, AstToken, NodeOrToken, SyntaxKind::WHITESPACE, T, + ast::{self, make}, + ted, }; // Assist: extract_expressions_from_format_string @@ -52,6 +53,7 @@ pub(crate) fn extract_expressions_from_format_string( } else { AssistKind::QuickFix }, + None, ), "Extract format expressions", tt.syntax().text_range(), @@ -61,21 +63,28 @@ pub(crate) fn extract_expressions_from_format_string( // Extract existing arguments in macro let tokens = tt.token_trees_and_tokens().collect_vec(); - let existing_args = if let [_opening_bracket, NodeOrToken::Token(_format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(_end_bracket)] = - tokens.as_slice() + let existing_args = if let [ + _opening_bracket, + NodeOrToken::Token(_format_string), + _args_start_comma, + tokens @ .., + NodeOrToken::Token(_end_bracket), + ] = tokens.as_slice() { - let args = tokens.split(|it| matches!(it, NodeOrToken::Token(t) if t.kind() == T![,])).map(|arg| { - // Strip off leading and trailing whitespace tokens - let arg = match arg.split_first() { - Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest, - _ => arg, - }; - let arg = match arg.split_last() { - Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest, - _ => arg, - }; - arg - }); + let args = tokens + .split(|it| matches!(it, NodeOrToken::Token(t) if t.kind() == T![,])) + .map(|arg| { + // Strip off leading and trailing whitespace tokens + let arg = match arg.split_first() { + Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest, + _ => arg, + }; + + match arg.split_last() { + Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest, + _ => arg, + } + }); args.collect() } else { @@ -100,7 +109,8 @@ pub(crate) fn extract_expressions_from_format_string( Arg::Expr(s) => { // insert arg // FIXME: use the crate's edition for parsing - let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT_FIXME).syntax_node(); + let expr = + ast::Expr::parse(&s, syntax::Edition::CURRENT_FIXME).syntax_node(); let mut expr_tt = utils::tt_from_syntax(expr); new_tt_bits.append(&mut expr_tt); } @@ -120,7 +130,6 @@ pub(crate) fn extract_expressions_from_format_string( } } - // Insert new args let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update(); ted::replace(tt.syntax(), new_tt.syntax()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index 6f4b886a28d75..e977798c4fd01 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -7,33 +7,34 @@ use hir::{ TypeInfo, TypeParam, }; use ide_db::{ + FxIndexSet, RootDatabase, assists::GroupLabel, defs::{Definition, NameRefClass}, famous_defs::FamousDefs, helpers::mod_path_to_ast, - imports::insert_use::{insert_use, ImportScope}, + imports::insert_use::{ImportScope, insert_use}, search::{FileReference, ReferenceCategory, SearchScope}, source_change::SourceChangeBuilder, syntax_helpers::node_ext::{ for_each_tail_expr, preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr, }, - FxIndexSet, RootDatabase, }; use itertools::Itertools; use syntax::{ + Edition, SyntaxElement, + SyntaxKind::{self, COMMENT}, + SyntaxNode, SyntaxToken, T, TextRange, TextSize, TokenAtOffset, WalkEvent, ast::{ - self, edit::IndentLevel, edit_in_place::Indent, AstNode, AstToken, HasGenericParams, - HasName, + self, AstNode, AstToken, HasGenericParams, HasName, edit::IndentLevel, + edit_in_place::Indent, }, - match_ast, ted, Edition, SyntaxElement, - SyntaxKind::{self, COMMENT}, - SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T, + match_ast, ted, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists, TreeMutator}, utils::generate_impl, - AssistId, }; // Assist: extract_function @@ -107,7 +108,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op acc.add_group( &GroupLabel("Extract into...".to_owned()), - AssistId("extract_function", crate::AssistKind::RefactorExtract), + AssistId::refactor_extract("extract_function"), "Extract into function", target_range, move |builder| { @@ -247,11 +248,8 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef let mut names_in_scope = vec![]; semantics_scope.process_all_names(&mut |name, _| { names_in_scope.push( - name.display( - semantics_scope.db.upcast(), - semantics_scope.krate().edition(semantics_scope.db), - ) - .to_string(), + name.display(semantics_scope.db, semantics_scope.krate().edition(semantics_scope.db)) + .to_string(), ) }); @@ -750,7 +748,7 @@ impl FunctionBody { ast::Stmt::Item(_) => (), ast::Stmt::LetStmt(stmt) => { if let Some(pat) = stmt.pat() { - let _ = walk_pat(&pat, &mut |pat| { + _ = walk_pat(&pat, &mut |pat| { cb(pat); std::ops::ControlFlow::<(), ()>::Continue(()) }); @@ -799,15 +797,21 @@ impl FunctionBody { ) -> (FxIndexSet, Option) { let mut self_param = None; let mut res = FxIndexSet::default(); - let mut add_name_if_local = |name_ref: Option<_>| { - let local_ref = - match name_ref.and_then(|name_ref| NameRefClass::classify(sema, &name_ref)) { - Some( - NameRefClass::Definition(Definition::Local(local_ref), _) - | NameRefClass::FieldShorthand { local_ref, field_ref: _, adt_subst: _ }, - ) => local_ref, - _ => return, - }; + + fn local_from_name_ref( + sema: &Semantics<'_, RootDatabase>, + name_ref: ast::NameRef, + ) -> Option { + match NameRefClass::classify(sema, &name_ref) { + Some( + NameRefClass::Definition(Definition::Local(local_ref), _) + | NameRefClass::FieldShorthand { local_ref, field_ref: _, adt_subst: _ }, + ) => Some(local_ref), + _ => None, + } + } + + let mut add_name_if_local = |local_ref: Local| { let InFile { file_id, value } = local_ref.primary_source(sema.db).source; // locals defined inside macros are not relevant to us if !file_id.is_macro() { @@ -823,13 +827,20 @@ impl FunctionBody { }; self.walk_expr(&mut |expr| match expr { ast::Expr::PathExpr(path_expr) => { - add_name_if_local(path_expr.path().and_then(|it| it.as_single_name_ref())) + if let Some(local) = path_expr + .path() + .and_then(|it| it.as_single_name_ref()) + .and_then(|name_ref| local_from_name_ref(sema, name_ref)) + { + add_name_if_local(local); + } } ast::Expr::ClosureExpr(closure_expr) => { if let Some(body) = closure_expr.body() { body.syntax() .descendants() - .map(ast::NameRef::cast) + .filter_map(ast::NameRef::cast) + .filter_map(|name_ref| local_from_name_ref(sema, name_ref)) .for_each(&mut add_name_if_local); } } @@ -838,9 +849,31 @@ impl FunctionBody { tt.syntax() .descendants_with_tokens() .filter_map(SyntaxElement::into_token) - .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self])) - .flat_map(|t| sema.descend_into_macros_exact(t)) - .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast))); + .filter(|it| { + matches!(it.kind(), SyntaxKind::STRING | SyntaxKind::IDENT | T![self]) + }) + .for_each(|t| { + if ast::String::can_cast(t.kind()) { + if let Some(parts) = + ast::String::cast(t).and_then(|s| sema.as_format_args_parts(&s)) + { + parts + .into_iter() + .filter_map(|(_, value)| value.and_then(|it| it.left())) + .filter_map(|path| match path { + PathResolution::Local(local) => Some(local), + _ => None, + }) + .for_each(&mut add_name_if_local); + } + } else { + sema.descend_into_macros_exact(t) + .into_iter() + .filter_map(|t| t.parent().and_then(ast::NameRef::cast)) + .filter_map(|name_ref| local_from_name_ref(sema, name_ref)) + .for_each(&mut add_name_if_local); + } + }); } } _ => (), @@ -1428,10 +1461,10 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> Sy let name = fun.name.clone(); let mut call_expr = if fun.self_param.is_some() { let self_arg = make::expr_path(make::ext::ident_path("self")); - make::expr_method_call(self_arg, name, args) + make::expr_method_call(self_arg, name, args).into() } else { let func = make::expr_path(make::path_unqualified(make::path_segment(name))); - make::expr_call(func, args) + make::expr_call(func, args).into() }; let handler = FlowHandler::from_ret_ty(fun, &ret_ty); @@ -1689,11 +1722,7 @@ fn make_where_clause( }) .peekable(); - if predicates.peek().is_some() { - Some(make::where_clause(predicates)) - } else { - None - } + if predicates.peek().is_some() { Some(make::where_clause(predicates)) } else { None } } fn pred_is_required( @@ -1917,14 +1946,15 @@ fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) - }; let func = make::expr_path(make::ext::ident_path(constructor)); let args = make::arg_list(iter::once(tail_expr)); - make::expr_call(func, args) + make::expr_call(func, args).into() }) } FlowHandler::If { .. } => { let controlflow_continue = make::expr_call( make::expr_path(make::path_from_text("ControlFlow::Continue")), make::arg_list([make::ext::expr_unit()]), - ); + ) + .into(); with_tail_expr(block, controlflow_continue) } FlowHandler::IfOption { .. } => { @@ -1934,12 +1964,12 @@ fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) - FlowHandler::MatchOption { .. } => map_tail_expr(block, |tail_expr| { let some = make::expr_path(make::ext::ident_path("Some")); let args = make::arg_list(iter::once(tail_expr)); - make::expr_call(some, args) + make::expr_call(some, args).into() }), FlowHandler::MatchResult { .. } => map_tail_expr(block, |tail_expr| { let ok = make::expr_path(make::ext::ident_path("Ok")); let args = make::arg_list(iter::once(tail_expr)); - make::expr_call(ok, args) + make::expr_call(ok, args).into() }), } } @@ -2127,17 +2157,18 @@ fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option) -> Op FlowHandler::If { .. } => make::expr_call( make::expr_path(make::path_from_text("ControlFlow::Break")), make::arg_list([make::ext::expr_unit()]), - ), + ) + .into(), FlowHandler::IfOption { .. } => { let expr = arg_expr.unwrap_or_else(make::ext::expr_unit); let args = make::arg_list([expr]); - make::expr_call(make::expr_path(make::ext::ident_path("Some")), args) + make::expr_call(make::expr_path(make::ext::ident_path("Some")), args).into() } FlowHandler::MatchOption { .. } => make::expr_path(make::ext::ident_path("None")), FlowHandler::MatchResult { .. } => { let expr = arg_expr.unwrap_or_else(make::ext::expr_unit); let args = make::arg_list([expr]); - make::expr_call(make::expr_path(make::ext::ident_path("Err")), args) + make::expr_call(make::expr_path(make::ext::ident_path("Err")), args).into() } }; Some(make::expr_return(Some(value)).clone_for_update()) @@ -5002,7 +5033,7 @@ fn main() { fun_name(bar); } -fn $0fun_name(bar: &str) { +fn $0fun_name(bar: &'static str) { m!(bar); } "#, @@ -6134,6 +6165,28 @@ fn $0fun_name(a: i32, b: i32, c: i32, x: i32) -> i32 { ); } + #[test] + fn fmt_macro_argument() { + check_assist( + extract_function, + r#" +//- minicore: fmt +fn existing(a: i32, b: i32, c: i32) { + $0print!("{a}{}{}", b, "{c}");$0 +} +"#, + r#" +fn existing(a: i32, b: i32, c: i32) { + fun_name(a, b); +} + +fn $0fun_name(a: i32, b: i32) { + print!("{a}{}{}", b, "{c}"); +} +"#, + ); + } + #[test] fn in_left_curly_is_not_applicable() { cov_mark::check!(extract_function_in_braces_is_not_applicable); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs index 6e3be0ce69279..b82b7984d4a45 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs @@ -1,25 +1,26 @@ use std::iter; use either::Either; -use hir::{HasSource, HirFileIdExt, ModuleSource}; +use hir::{HasSource, ModuleSource}; use ide_db::{ - assists::{AssistId, AssistKind}, + FileId, FxHashMap, FxHashSet, + assists::AssistId, defs::{Definition, NameClass, NameRefClass}, search::{FileReference, SearchScope}, - FileId, FxHashMap, FxHashSet, }; use itertools::Itertools; use smallvec::SmallVec; use syntax::{ + AstNode, + SyntaxKind::{self, WHITESPACE}, + SyntaxNode, TextRange, TextSize, algo::find_node_at_range, ast::{ - self, + self, HasVisibility, edit::{AstNodeEdit, IndentLevel}, - make, HasVisibility, + make, }, - match_ast, ted, AstNode, - SyntaxKind::{self, WHITESPACE}, - SyntaxNode, TextRange, TextSize, + match_ast, ted, }; use crate::{AssistContext, Assists}; @@ -90,7 +91,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let old_item_indent = module.body_items[0].indent_level(); acc.add( - AssistId("extract_module", AssistKind::RefactorExtract), + AssistId::refactor_extract("extract_module"), "Extract Module", module.text_range, |builder| { @@ -112,7 +113,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let (usages_to_be_processed, record_fields, use_stmts_to_be_inserted) = module.get_usages_and_record_fields(ctx); - builder.edit_file(ctx.file_id()); + builder.edit_file(ctx.vfs_file_id()); use_stmts_to_be_inserted.into_iter().for_each(|(_, use_stmt)| { builder.insert(ctx.selection_trimmed().end(), format!("\n{use_stmt}")); }); @@ -124,7 +125,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let mut usages_to_be_processed_for_cur_file = vec![]; for (file_id, usages) in usages_to_be_processed { - if file_id == ctx.file_id() { + if file_id == ctx.vfs_file_id() { usages_to_be_processed_for_cur_file = usages; continue; } @@ -134,7 +135,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti } } - builder.edit_file(ctx.file_id()); + builder.edit_file(ctx.vfs_file_id()); for (text_range, usage) in usages_to_be_processed_for_cur_file { builder.replace(text_range, usage); } @@ -363,7 +364,7 @@ impl Module { None }); - refs_in_files.entry(file_id.file_id()).or_default().extend(usages); + refs_in_files.entry(file_id.file_id(ctx.db())).or_default().extend(usages); } } @@ -457,6 +458,7 @@ impl Module { let selection_range = ctx.selection_trimmed(); let file_id = ctx.file_id(); let usage_res = def.usages(&ctx.sema).in_scope(&SearchScope::single_file(file_id)).all(); + let file = ctx.sema.parse(file_id); // track uses which does not exists in `Use` @@ -483,7 +485,7 @@ impl Module { ctx, curr_parent_module, selection_range, - file_id.file_id(), + file_id.file_id(ctx.db()), ); // Find use stmt that use def in current file @@ -670,7 +672,7 @@ fn check_def_in_mod_and_out_sel( let have_same_parent = if let Some(ast_module) = &curr_parent_module { ctx.sema.to_module_def(ast_module).is_some_and(|it| it == $x.module(ctx.db())) } else { - source.file_id.original_file(ctx.db()) == curr_file_id + source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id }; let in_sel = !selection_range.contains_range(source.value.syntax().text_range()); @@ -686,7 +688,7 @@ fn check_def_in_mod_and_out_sel( (Some(ast_module), Some(hir_module)) => { ctx.sema.to_module_def(ast_module).is_some_and(|it| it == hir_module) } - _ => source.file_id.original_file(ctx.db()) == curr_file_id, + _ => source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id, }; if have_same_parent { @@ -1159,8 +1161,8 @@ mod modname { } #[test] - fn test_extract_module_for_impl_not_having_corresponding_adt_in_selection_and_not_in_same_mod_but_with_super( - ) { + fn test_extract_module_for_impl_not_having_corresponding_adt_in_selection_and_not_in_same_mod_but_with_super() + { check_assist( extract_module, r" diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index d4f2ea3bd941b..b9c42285d257b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -3,25 +3,26 @@ use std::iter; use either::Either; use hir::{HasCrate, Module, ModuleDef, Name, Variant}; use ide_db::{ + FxHashSet, RootDatabase, defs::Definition, helpers::mod_path_to_ast, - imports::insert_use::{insert_use, ImportScope, InsertUseConfig}, + imports::insert_use::{ImportScope, InsertUseConfig, insert_use}, path_transform::PathTransform, search::FileReference, - FxHashSet, RootDatabase, }; use itertools::Itertools; use syntax::{ - ast::{ - self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams, - HasName, HasVisibility, - }, - match_ast, ted, Edition, SyntaxElement, + Edition, SyntaxElement, SyntaxKind::*, SyntaxNode, T, + ast::{ + self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, edit::IndentLevel, + edit_in_place::Indent, make, + }, + match_ast, ted, }; -use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder}; // Assist: extract_struct_from_enum_variant // @@ -54,7 +55,7 @@ pub(crate) fn extract_struct_from_enum_variant( let enum_hir = ctx.sema.to_def(&enum_ast)?; let target = variant.syntax().text_range(); acc.add( - AssistId("extract_struct_from_enum_variant", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("extract_struct_from_enum_variant"), "Extract struct from enum variant", target, |builder| { @@ -73,7 +74,7 @@ pub(crate) fn extract_struct_from_enum_variant( def_file_references = Some(references); continue; } - builder.edit_file(file_id.file_id()); + builder.edit_file(file_id.file_id(ctx.db())); let processed = process_references( ctx, builder, @@ -86,7 +87,7 @@ pub(crate) fn extract_struct_from_enum_variant( apply_references(ctx.config.insert_use, path, node, import, edition) }); } - builder.edit_file(ctx.file_id()); + builder.edit_file(ctx.vfs_file_id()); let variant = builder.make_mut(variant.clone()); if let Some(references) = def_file_references { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs index 67b8f5e505031..d843ac64567aa 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -1,11 +1,11 @@ use either::Either; use ide_db::syntax_helpers::node_ext::walk_ty; use syntax::{ - ast::{self, edit::IndentLevel, make, AstNode, HasGenericArgs, HasGenericParams, HasName}, + ast::{self, AstNode, HasGenericArgs, HasGenericParams, HasName, edit::IndentLevel, make}, syntax_editor, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: extract_type_alias // @@ -40,7 +40,7 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> let target = ty.syntax().text_range(); acc.add( - AssistId("extract_type_alias", AssistKind::RefactorExtract), + AssistId::refactor_extract("extract_type_alias"), "Extract type as type alias", target, |builder| { @@ -87,7 +87,7 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> ], ); - builder.add_file_edits(ctx.file_id(), edit); + builder.add_file_edits(ctx.vfs_file_id(), edit); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index 7b6f76d00452e..31e84e9adcf44 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -1,19 +1,19 @@ use hir::{HirDisplay, TypeInfo}; use ide_db::{ assists::GroupLabel, - syntax_helpers::{suggest_name, LexedStr}, + syntax_helpers::{LexedStr, suggest_name}, }; use syntax::{ + NodeOrToken, SyntaxKind, SyntaxNode, T, algo::ancestors_at_offset, ast::{ - self, edit::IndentLevel, edit_in_place::Indent, make, syntax_factory::SyntaxFactory, - AstNode, + self, AstNode, edit::IndentLevel, edit_in_place::Indent, make, + syntax_factory::SyntaxFactory, }, syntax_editor::Position, - NodeOrToken, SyntaxKind, SyntaxNode, T, }; -use crate::{utils::is_body_const, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, utils::is_body_const}; // Assist: extract_variable // @@ -170,7 +170,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op |edit| { let (var_name, expr_replace) = kind.get_name_and_expr(ctx, &to_extract); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let mut editor = edit.make_editor(&expr_replace); let pat_name = make.name(&var_name); @@ -263,7 +263,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op } editor.add_mappings(make.finish_with_mappings()); - edit.add_file_edits(ctx.file_id(), editor); + edit.add_file_edits(ctx.vfs_file_id(), editor); edit.rename(); }, ); @@ -311,7 +311,7 @@ impl ExtractionKind { ExtractionKind::Static => "extract_static", }; - AssistId(s, AssistKind::RefactorExtract) + AssistId::refactor_extract(s) } fn label(&self) -> &'static str { @@ -378,7 +378,7 @@ fn get_literal_name(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option return None; } - match LexedStr::single_token(ctx.file_id().edition(), &inner) { + match LexedStr::single_token(ctx.edition(), &inner) { Some((SyntaxKind::IDENT, None)) => Some(inner), _ => None, } @@ -631,7 +631,7 @@ fn main() { "#, r#" fn main() { - const $0HELLO: &str = "hello"; + const $0HELLO: &'static str = "hello"; } "#, "Extract into constant", @@ -726,7 +726,7 @@ fn main() { "#, r#" fn main() { - static $0HELLO: &str = "hello"; + static $0HELLO: &'static str = "hello"; } "#, "Extract into static", @@ -2528,13 +2528,13 @@ fn foo() { check_assist_by_label( extract_variable, r#" -struct Entry(&str); +struct Entry<'a>(&'a str); fn foo() { let entry = Entry($0"Hello"$0); } "#, r#" -struct Entry(&str); +struct Entry<'a>(&'a str); fn foo() { let $0hello = "Hello"; let entry = Entry(hello); @@ -2546,13 +2546,13 @@ fn foo() { check_assist_by_label( extract_variable, r#" -struct Entry(&str); +struct Entry<'a>(&'a str); fn foo() { let entry = Entry($0"Hello"$0); } "#, r#" -struct Entry(&str); +struct Entry<'a>(&'a str); fn foo() { const $0HELLO: &str = "Hello"; let entry = Entry(HELLO); @@ -2564,13 +2564,13 @@ fn foo() { check_assist_by_label( extract_variable, r#" -struct Entry(&str); +struct Entry<'a>(&'a str); fn foo() { let entry = Entry($0"Hello"$0); } "#, r#" -struct Entry(&str); +struct Entry<'a>(&'a str); fn foo() { static $0HELLO: &str = "Hello"; let entry = Entry(HELLO); @@ -2587,13 +2587,13 @@ fn foo() { check_assist_by_label( extract_variable, r#" -struct Entry { message: &str } +struct Entry<'a> { message: &'a str } fn foo() { let entry = Entry { message: $0"Hello"$0 }; } "#, r#" -struct Entry { message: &str } +struct Entry<'a> { message: &'a str } fn foo() { let $0message = "Hello"; let entry = Entry { message }; @@ -2605,13 +2605,13 @@ fn foo() { check_assist_by_label( extract_variable, r#" -struct Entry { message: &str } +struct Entry<'a> { message: &'a str } fn foo() { let entry = Entry { message: $0"Hello"$0 }; } "#, r#" -struct Entry { message: &str } +struct Entry<'a> { message: &'a str } fn foo() { const $0HELLO: &str = "Hello"; let entry = Entry { message: HELLO }; @@ -2623,13 +2623,13 @@ fn foo() { check_assist_by_label( extract_variable, r#" -struct Entry { message: &str } +struct Entry<'a> { message: &'a str } fn foo() { let entry = Entry { message: $0"Hello"$0 }; } "#, r#" -struct Entry { message: &str } +struct Entry<'a> { message: &'a str } fn foo() { static $0HELLO: &str = "Hello"; let entry = Entry { message: HELLO }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs index 47e4a68293f0c..19e0a73f33356 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs @@ -1,13 +1,11 @@ -use hir::{ - db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef, -}; +use hir::{HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef, db::HirDatabase}; use ide_db::FileId; use syntax::{ - ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _}, AstNode, TextRange, + ast::{self, HasVisibility as _, edit_in_place::HasVisibilityEdit, make}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // FIXME: this really should be a fix for diagnostic, rather than an assist. @@ -78,7 +76,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) } }; - acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| { + acc.add(AssistId::quick_fix("fix_visibility"), assist_label, target, |edit| { edit.edit_file(target_file); let vis_owner = edit.make_mut(vis_owner); @@ -131,8 +129,8 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> target_name.display(ctx.db(), current_edition) ); - acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| { - edit.edit_file(target_file.file_id()); + acc.add(AssistId::quick_fix("fix_visibility"), assist_label, target, |edit| { + edit.edit_file(target_file.file_id(ctx.db())); let vis_owner = edit.make_mut(vis_owner); vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); @@ -162,7 +160,7 @@ fn target_data_for_def( Some(( ast::AnyHasVisibility::new(source.value), range, - file_id.original_file(db.upcast()).file_id(), + file_id.original_file(db).file_id(db), )) } @@ -203,9 +201,9 @@ fn target_data_for_def( hir::ModuleDef::Module(m) => { target_name = m.name(db); let in_file_source = m.declaration_source(db)?; - let file_id = in_file_source.file_id.original_file(db.upcast()); + let file_id = in_file_source.file_id.original_file(db); let range = in_file_source.value.syntax().text_range(); - (ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id()) + (ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id(db)) } // FIXME hir::ModuleDef::Macro(_) => return None, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs index 818a868fe3449..247e8109abc9d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs @@ -1,9 +1,9 @@ use syntax::{ - ast::{self, syntax_factory::SyntaxFactory, AstNode, BinExpr}, SyntaxKind, T, + ast::{self, AstNode, BinExpr, syntax_factory::SyntaxFactory}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: flip_binexpr // @@ -43,19 +43,19 @@ pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option } acc.add( - AssistId("flip_binexpr", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("flip_binexpr"), "Flip binary expression", op_token.text_range(), |builder| { let mut editor = builder.make_editor(&expr.syntax().parent().unwrap()); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); if let FlipAction::FlipAndReplaceOp(binary_op) = action { editor.replace(op_token, make.token(binary_op)) }; editor.replace(lhs.syntax(), rhs.syntax()); editor.replace(rhs.syntax(), lhs.syntax()); editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs index dd27269b001c6..1e95d4772349e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_comma.rs @@ -1,11 +1,11 @@ use syntax::{ + AstNode, Direction, NodeOrToken, SyntaxKind, SyntaxToken, T, algo::non_trivia_sibling, ast::{self, syntax_factory::SyntaxFactory}, syntax_editor::SyntaxMapping, - AstNode, Direction, NodeOrToken, SyntaxKind, SyntaxToken, T, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: flip_comma // @@ -40,7 +40,7 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( } let target = comma.text_range(); - acc.add(AssistId("flip_comma", AssistKind::RefactorRewrite), "Flip comma", target, |builder| { + acc.add(AssistId::refactor_rewrite("flip_comma"), "Flip comma", target, |builder| { let parent = comma.parent().unwrap(); let mut editor = builder.make_editor(&parent); @@ -55,7 +55,7 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( editor.replace(next.clone(), prev.clone()); } - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }) } @@ -101,7 +101,7 @@ fn flip_tree(tree: ast::TokenTree, comma: SyntaxToken) -> (ast::TokenTree, Synta ] .concat(); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let new_token_tree = make.token_tree(tree.left_delimiter_token().unwrap().kind(), result); (new_token_tree, make.finish_with_mappings()) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_or_pattern.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_or_pattern.rs index d9fa03e7191b3..4829f5bec206b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_or_pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_or_pattern.rs @@ -1,10 +1,10 @@ use syntax::{ + Direction, T, algo::non_trivia_sibling, ast::{self, AstNode}, - Direction, T, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: flip_or_pattern // @@ -31,17 +31,12 @@ pub(crate) fn flip_or_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt let after = non_trivia_sibling(pipe.clone().into(), Direction::Next)?.into_node()?; let target = pipe.text_range(); - acc.add( - AssistId("flip_or_pattern", AssistKind::RefactorRewrite), - "Flip patterns", - target, - |builder| { - let mut editor = builder.make_editor(parent.syntax()); - editor.replace(before.clone(), after.clone()); - editor.replace(after, before); - builder.add_file_edits(ctx.file_id(), editor); - }, - ) + acc.add(AssistId::refactor_rewrite("flip_or_pattern"), "Flip patterns", target, |builder| { + let mut editor = builder.make_editor(parent.syntax()); + editor.replace(before.clone(), after.clone()); + editor.replace(after, before); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }) } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs index 3528f5e81324d..9756268c7cc33 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_trait_bound.rs @@ -1,10 +1,10 @@ use syntax::{ + Direction, T, algo::non_trivia_sibling, ast::{self, AstNode}, - Direction, T, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: flip_trait_bound // @@ -29,14 +29,14 @@ pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let target = plus.text_range(); acc.add( - AssistId("flip_trait_bound", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("flip_trait_bound"), "Flip trait bounds", target, |builder| { let mut editor = builder.make_editor(parent.syntax()); editor.replace(before.clone(), after.clone()); editor.replace(after, before); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs index 7f7db07152d34..fce0ce399463c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs @@ -1,14 +1,13 @@ use crate::assist_context::{AssistContext, Assists}; -use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module}; +use hir::{HasVisibility, HirDisplay, Module}; use ide_db::{ - assists::{AssistId, AssistKind}, - base_db::Upcast, - defs::{Definition, NameRefClass}, FileId, + assists::AssistId, + defs::{Definition, NameRefClass}, }; use syntax::{ - ast::{self, edit::IndentLevel, NameRef}, AstNode, Direction, SyntaxKind, TextSize, + ast::{self, NameRef, edit::IndentLevel}, }; // Assist: generate_constant @@ -88,17 +87,12 @@ pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext<'_>) -> O ); let text = get_text_for_generate_constant(not_exist_name_ref, indent, outer_exists, type_name)?; - acc.add( - AssistId("generate_constant", AssistKind::QuickFix), - "Generate constant", - target, - |builder| { - if let Some(file_id) = file_id { - builder.edit_file(file_id); - } - builder.insert(offset, format!("{text}{post_string}")); - }, - ) + acc.add(AssistId::quick_fix("generate_constant"), "Generate constant", target, |builder| { + if let Some(file_id) = file_id { + builder.edit_file(file_id); + } + builder.insert(offset, format!("{text}{post_string}")); + }) } fn get_text_for_generate_constant( @@ -128,7 +122,7 @@ fn target_data_for_generate_constant( return None; } let in_file_source = current_module.definition_source(ctx.sema.db); - let file_id = in_file_source.file_id.original_file(ctx.sema.db.upcast()); + let file_id = in_file_source.file_id.original_file(ctx.sema.db); match in_file_source.value { hir::ModuleSource::Module(module_node) => { let indent = IndentLevel::from_node(module_node.syntax()); @@ -140,9 +134,9 @@ fn target_data_for_generate_constant( .any(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains('\n')); let post_string = if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") }; - Some((offset, indent + 1, Some(file_id.file_id()), post_string)) + Some((offset, indent + 1, Some(file_id.file_id(ctx.db())), post_string)) } - _ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id()), "\n".into())), + _ => Some((TextSize::from(0), 0.into(), Some(file_id.file_id(ctx.db())), "\n".into())), } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs index a6e3d49e0d1ae..6198dbc4ed99b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs @@ -1,7 +1,7 @@ -use ide_db::{famous_defs::FamousDefs, RootDatabase}; +use ide_db::{RootDatabase, famous_defs::FamousDefs}; use syntax::ast::{self, AstNode, HasName}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: generate_default_from_enum_variant // @@ -47,7 +47,7 @@ pub(crate) fn generate_default_from_enum_variant( let target = variant.syntax().text_range(); acc.add( - AssistId("generate_default_from_enum_variant", AssistKind::Generate), + AssistId::generate("generate_default_from_enum_variant"), "Generate `Default` impl from this enum variant", target, |edit| { @@ -77,11 +77,7 @@ fn existing_default_impl( let default_trait = FamousDefs(sema, krate).core_default_Default()?; let enum_type = enum_.ty(sema.db); - if enum_type.impls_trait(sema.db, default_trait, &[]) { - Some(()) - } else { - None - } + if enum_type.impls_trait(sema.db, default_trait, &[]) { Some(()) } else { None } } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs index dc27af5cbed20..79a78ab3698b8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs @@ -1,13 +1,13 @@ use ide_db::famous_defs::FamousDefs; use stdx::format_to; use syntax::{ - ast::{self, make, HasGenericParams, HasName, Impl}, AstNode, + ast::{self, HasGenericParams, HasName, Impl, make}, }; use crate::{ - assist_context::{AssistContext, Assists}, AssistId, + assist_context::{AssistContext, Assists}, }; // Assist: generate_default_from_new @@ -65,7 +65,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<' let insert_location = impl_.syntax().text_range(); acc.add( - AssistId("generate_default_from_new", crate::AssistKind::Generate), + AssistId::generate("generate_default_from_new"), "Generate a Default impl from a new fn", insert_location, move |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index 220259451e860..ca66cb69dcc05 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -1,15 +1,15 @@ use hir::{HasCrate, HasVisibility}; -use ide_db::{path_transform::PathTransform, FxHashSet}; +use ide_db::{FxHashSet, path_transform::PathTransform}; use syntax::{ ast::{ - self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _, + self, AstNode, HasGenericParams, HasName, HasVisibility as _, edit_in_place::Indent, make, }, ted, }; use crate::{ - utils::{convert_param_list_to_arg_list, find_struct_impl}, AssistContext, AssistId, AssistKind, Assists, GroupLabel, + utils::{convert_param_list_to_arg_list, find_struct_impl}, }; // Assist: generate_delegate_methods @@ -92,19 +92,18 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' }); } methods.sort_by(|(a, _), (b, _)| a.cmp(b)); - for (name, method) in methods { + for (index, (name, method)) in methods.into_iter().enumerate() { let adt = ast::Adt::Struct(strukt.clone()); let name = name.display(ctx.db(), current_edition).to_string(); // if `find_struct_impl` returns None, that means that a function named `name` already exists. let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else { continue; }; - let field = make::ext::field_from_idents(["self", &field_name])?; acc.add_group( &GroupLabel("Generate delegate methods…".to_owned()), - AssistId("generate_delegate_methods", AssistKind::Generate), + AssistId("generate_delegate_methods", AssistKind::Generate, Some(index)), format!("Generate delegate for `{field_name}.{name}()`",), target, |edit| { @@ -141,7 +140,8 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' .map(convert_param_list_to_arg_list) .unwrap_or_else(|| make::arg_list([])); - let tail_expr = make::expr_method_call(field, make::name_ref(&name), arg_list); + let tail_expr = + make::expr_method_call(field, make::name_ref(&name), arg_list).into(); let tail_expr_finished = if is_async { make::expr_await(tail_expr) } else { tail_expr }; let body = make::block_expr([], Some(tail_expr_finished)); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 55b860d0ff545..848c63810a4b0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -5,25 +5,25 @@ use crate::{ utils::convert_param_list_to_arg_list, }; use either::Either; -use hir::{db::HirDatabase, HasVisibility}; +use hir::{HasVisibility, db::HirDatabase}; use ide_db::{ + FxHashMap, FxHashSet, assists::{AssistId, GroupLabel}, path_transform::PathTransform, syntax_helpers::suggest_name, - FxHashMap, FxHashSet, }; use itertools::Itertools; use syntax::{ + AstNode, Edition, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr, ast::{ - self, - edit::{self, AstNodeEdit}, - edit_in_place::AttrsOwnerEdit, - make, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericArgs, + self, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericArgs, HasGenericParams, HasName, HasTypeBounds, HasVisibility as astHasVisibility, Path, WherePred, + edit::{self, AstNodeEdit}, + edit_in_place::AttrsOwnerEdit, + make, }, ted::{self, Position}, - AstNode, Edition, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr, }; // Assist: generate_delegate_trait @@ -124,7 +124,7 @@ impl Field { ) -> Option { let db = ctx.sema.db; - let module = ctx.sema.file_to_module_def(ctx.file_id())?; + let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?; let edition = module.krate().edition(ctx.db()); let (name, range, ty) = match f { @@ -201,7 +201,7 @@ impl Struct { pub(crate) fn delegate(&self, field: Field, acc: &mut Assists, ctx: &AssistContext<'_>) { let db = ctx.db(); - for delegee in &field.impls { + for (index, delegee) in field.impls.iter().enumerate() { let trait_ = match delegee { Delegee::Bound(b) => b, Delegee::Impls(i, _) => i, @@ -229,7 +229,11 @@ impl Struct { acc.add_group( &GroupLabel(format!("Generate delegate trait impls for field `{}`", field.name)), - AssistId("generate_delegate_trait", ide_db::assists::AssistKind::Generate), + AssistId( + "generate_delegate_trait", + ide_db::assists::AssistKind::Generate, + Some(index), + ), format!("Generate delegate trait impl `{}` for `{}`", signature, field.name), field.range, |builder| { @@ -747,7 +751,7 @@ fn func_assoc_item( } .clone_for_update(); - let body = make::block_expr(vec![], Some(call)).clone_for_update(); + let body = make::block_expr(vec![], Some(call.into())).clone_for_update(); let func = make::fn_( item.visibility(), item.name()?, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs index e558bb6da89bc..c7b97dcd231d1 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs @@ -1,16 +1,16 @@ use std::fmt::Display; use hir::{ModPath, ModuleDef}; -use ide_db::{famous_defs::FamousDefs, RootDatabase}; +use ide_db::{RootDatabase, famous_defs::FamousDefs}; use syntax::{ - ast::{self, HasName}, AstNode, Edition, SyntaxNode, + ast::{self, HasName}, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists, SourceChangeBuilder}, utils::generate_trait_impl_text, - AssistId, AssistKind, }; // Assist: generate_deref @@ -65,7 +65,7 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( let field_name = field.name()?; let target = field.syntax().text_range(); acc.add( - AssistId("generate_deref", AssistKind::Generate), + AssistId::generate("generate_deref"), format!("Generate `{deref_type_to_generate:?}` impl using `{field_name}`"), target, |edit| { @@ -106,7 +106,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() let field_type = field.ty()?; let target = field.syntax().text_range(); acc.add( - AssistId("generate_deref", AssistKind::Generate), + AssistId::generate("generate_deref"), format!("Generate `{deref_type_to_generate:?}` impl using `{field}`"), target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs index 53ba144ba9e3b..73a69c82fbcdd 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs @@ -1,9 +1,9 @@ use syntax::{ - ast::{self, edit_in_place::AttrsOwnerEdit, make, AstNode, HasAttrs}, T, + ast::{self, AstNode, HasAttrs, edit_in_place::AttrsOwnerEdit, make}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: generate_derive // @@ -39,7 +39,7 @@ pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt Some(tt) => Some(tt.right_delimiter_token()?), }; - acc.add(AssistId("generate_derive", AssistKind::Generate), "Add `#[derive]`", target, |edit| { + acc.add(AssistId::generate("generate_derive"), "Add `#[derive]`", target, |edit| { match derive_attr { None => { let derive = make::attr_outer(make::meta_token_tree( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs index 862be791d1737..d4d1b3490cb64 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs @@ -1,11 +1,12 @@ use hir::{AsAssocItem, HasVisibility, ModuleDef, Visibility}; -use ide_db::assists::{AssistId, AssistKind}; +use ide_db::assists::AssistId; use itertools::Itertools; use stdx::{format_to, to_lower_snake_case}; use syntax::{ + AstNode, AstToken, Edition, algo::skip_whitespace_token, - ast::{self, edit::IndentLevel, HasDocComments, HasGenericArgs, HasName}, - match_ast, AstNode, AstToken, Edition, + ast::{self, HasDocComments, HasGenericArgs, HasName, edit::IndentLevel}, + match_ast, }; use crate::assist_context::{AssistContext, Assists}; @@ -55,7 +56,7 @@ pub(crate) fn generate_documentation_template( let indent_level = IndentLevel::from_node(parent_syntax); acc.add( - AssistId("generate_documentation_template", AssistKind::Generate), + AssistId::generate("generate_documentation_template"), "Generate a documentation template", text_range, |builder| { @@ -114,7 +115,7 @@ pub(crate) fn generate_doc_example(acc: &mut Assists, ctx: &AssistContext<'_>) - let indent_level = IndentLevel::from_node(&node); acc.add( - AssistId("generate_doc_example", AssistKind::Generate), + AssistId::generate("generate_doc_example"), "Generate a documentation example", node.text_range(), |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs index b5d3ed4369708..3e6d0bec68a6f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs @@ -4,8 +4,8 @@ use syntax::ast::HasVisibility; use syntax::ast::{self, AstNode, HasName}; use crate::{ + AssistContext, AssistId, Assists, utils::{add_method_to_adt, find_struct_impl}, - AssistContext, AssistId, AssistKind, Assists, }; // Assist: generate_enum_is_method @@ -57,7 +57,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_> let target = variant.syntax().text_range(); acc.add_group( &GroupLabel("Generate an `is_`,`as_`, or `try_into_` for this enum variant".to_owned()), - AssistId("generate_enum_is_method", AssistKind::Generate), + AssistId::generate("generate_enum_is_method"), "Generate an `is_` method for this enum variant", target, |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs index ee643ce9a4ac3..3974bcf618756 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs @@ -5,8 +5,8 @@ use syntax::ast::HasVisibility; use syntax::ast::{self, AstNode, HasName}; use crate::{ + AssistContext, AssistId, Assists, utils::{add_method_to_adt, find_struct_impl}, - AssistContext, AssistId, AssistKind, Assists, }; // Assist: generate_enum_try_into_method @@ -153,7 +153,7 @@ fn generate_enum_projection_method( let target = variant.syntax().text_range(); acc.add_group( &GroupLabel("Generate an `is_`,`as_`, or `try_into_` for this enum variant".to_owned()), - AssistId(assist_id, AssistKind::Generate), + AssistId::generate(assist_id), assist_description, target, |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs index bb08cb904ead7..3514ebb811ee2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -1,8 +1,9 @@ use hir::{HasSource, HirDisplay, InRealFile}; -use ide_db::assists::{AssistId, AssistKind}; +use ide_db::assists::AssistId; use syntax::{ - ast::{self, syntax_factory::SyntaxFactory, HasArgList}, - match_ast, AstNode, SyntaxNode, + AstNode, SyntaxNode, + ast::{self, HasArgList, syntax_factory::SyntaxFactory}, + match_ast, }; use crate::assist_context::{AssistContext, Assists}; @@ -57,21 +58,16 @@ pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) let db = ctx.db(); let InRealFile { file_id, value: enum_node } = e.source(db)?.original_ast_node_rooted(db)?; - acc.add( - AssistId("generate_enum_variant", AssistKind::Generate), - "Generate variant", - target, - |builder| { - let mut editor = builder.make_editor(enum_node.syntax()); - let make = SyntaxFactory::new(); - let field_list = parent.make_field_list(ctx, &make); - let variant = make.variant(None, make.name(&name_ref.text()), field_list, None); - if let Some(it) = enum_node.variant_list() { - it.add_variant(&mut editor, &variant); - } - builder.add_file_edits(file_id, editor); - }, - ) + acc.add(AssistId::generate("generate_enum_variant"), "Generate variant", target, |builder| { + let mut editor = builder.make_editor(enum_node.syntax()); + let make = SyntaxFactory::with_mappings(); + let field_list = parent.make_field_list(ctx, &make); + let variant = make.variant(None, make.name(&name_ref.text()), field_list, None); + if let Some(it) = enum_node.variant_list() { + it.add_variant(&mut editor, &variant); + } + builder.add_file_edits(file_id.file_id(ctx.db()), editor); + }) } #[derive(Debug)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs index 9d01ec00f836c..b63baa696d9ae 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_fn_type_alias.rs @@ -1,8 +1,9 @@ use either::Either; -use ide_db::assists::{AssistId, AssistKind, GroupLabel}; +use ide_db::assists::{AssistId, GroupLabel}; use syntax::{ - ast::{self, edit::IndentLevel, make, HasGenericParams, HasName}, - syntax_editor, AstNode, + AstNode, + ast::{self, HasGenericParams, HasName, edit::IndentLevel, make}, + syntax_editor, }; use crate::{AssistContext, Assists}; @@ -116,7 +117,7 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) } } - builder.add_file_edits(ctx.file_id(), edit); + builder.add_file_edits(ctx.vfs_file_id(), edit); }, ); } @@ -138,7 +139,7 @@ impl ParamStyle { ParamStyle::Unnamed => "generate_fn_type_alias_unnamed", }; - AssistId(s, AssistKind::Generate) + AssistId::generate(s) } fn label(&self) -> &'static str { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs index 6091f06b96699..af949a0649899 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs @@ -1,9 +1,7 @@ -use ide_db::{famous_defs::FamousDefs, RootDatabase}; +use ide_db::{RootDatabase, famous_defs::FamousDefs}; use syntax::ast::{self, AstNode, HasName}; -use crate::{ - utils::generate_trait_impl_text_intransitive, AssistContext, AssistId, AssistKind, Assists, -}; +use crate::{AssistContext, AssistId, Assists, utils::generate_trait_impl_text_intransitive}; // Assist: generate_from_impl_for_enum // @@ -53,7 +51,7 @@ pub(crate) fn generate_from_impl_for_enum( let target = variant.syntax().text_range(); acc.add( - AssistId("generate_from_impl_for_enum", AssistKind::Generate), + AssistId::generate("generate_from_impl_for_enum"), "Generate `From` impl for this enum variant", target, |edit| { @@ -92,11 +90,7 @@ fn existing_from_impl( let wrapped_type = variant.fields(sema.db).first()?.ty(sema.db); - if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) { - Some(()) - } else { - None - } + if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) { Some(()) } else { None } } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index 7af2a2e1e6a33..30084d23d1fb8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -1,28 +1,30 @@ use hir::{ - Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, - StructKind, Type, TypeInfo, + Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, StructKind, Type, + TypeInfo, }; use ide_db::{ + FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap, + assists::ExprFillDefaultMode, defs::{Definition, NameRefClass}, famous_defs::FamousDefs, helpers::is_editable_crate, path_transform::PathTransform, source_change::SourceChangeBuilder, - FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap, }; use itertools::Itertools; use stdx::to_lower_snake_case; use syntax::{ + Edition, SyntaxKind, SyntaxNode, T, TextRange, ast::{ - self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, BlockExpr, CallExpr, - HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds, + self, AstNode, BlockExpr, CallExpr, HasArgList, HasGenericParams, HasModuleItem, + HasTypeBounds, edit::IndentLevel, edit_in_place::Indent, make, }, - ted, Edition, SyntaxKind, SyntaxNode, TextRange, T, + ted, }; use crate::{ + AssistContext, AssistId, Assists, utils::{convert_reference_type, find_struct_impl}, - AssistContext, AssistId, AssistKind, Assists, }; // Assist: generate_function @@ -45,7 +47,7 @@ use crate::{ // bar("", baz()); // } // -// fn bar(arg: &str, baz: Baz) ${0:-> _} { +// fn bar(arg: &'static str, baz: Baz) ${0:-> _} { // todo!() // } // @@ -171,16 +173,15 @@ fn add_func_to_accumulator( adt_info: Option, label: String, ) -> Option<()> { - acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |edit| { + acc.add(AssistId::generate("generate_function"), label, text_range, |edit| { edit.edit_file(file); let target = function_builder.target.clone(); let edition = function_builder.target_edition; let func = function_builder.render(ctx.config.snippet_cap, edit); - if let Some(adt) = - adt_info - .and_then(|adt_info| if adt_info.impl_exists { None } else { Some(adt_info.adt) }) + if let Some(adt) = adt_info + .and_then(|adt_info| if adt_info.impl_exists { None } else { Some(adt_info.adt) }) { let name = make::ty_path(make::ext::ident_path(&format!( "{}", @@ -205,11 +206,12 @@ fn get_adt_source( fn_name: &str, ) -> Option<(Option, FileId)> { let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db); + let file = ctx.sema.parse(range.file_id); let adt_source = ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?; find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()]) - .map(|impl_| (impl_, range.file_id.file_id())) + .map(|impl_| (impl_, range.file_id.file_id(ctx.db()))) } struct FunctionBuilder { @@ -275,7 +277,11 @@ impl FunctionBuilder { target_module, &mut necessary_generic_params, ); - let placeholder_expr = make::ext::expr_todo(); + let placeholder_expr = match ctx.config.expr_fill_default { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => make::ext::expr_todo(), + }; fn_body = make::block_expr(vec![], Some(placeholder_expr)); }; @@ -330,7 +336,11 @@ impl FunctionBuilder { let (generic_param_list, where_clause) = fn_generic_params(ctx, necessary_generic_params, &target)?; - let placeholder_expr = make::ext::expr_todo(); + let placeholder_expr = match ctx.config.expr_fill_default { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => make::ext::expr_todo(), + }; let fn_body = make::block_expr(vec![], Some(placeholder_expr)); Some(Self { @@ -382,14 +392,14 @@ impl FunctionBuilder { // Focus the return type if there is one match ret_type { Some(ret_type) => { - edit.add_placeholder_snippet(cap, ret_type.clone()); + edit.add_placeholder_snippet(cap, ret_type); } None => { - edit.add_placeholder_snippet(cap, tail_expr.clone()); + edit.add_placeholder_snippet(cap, tail_expr); } } } else { - edit.add_placeholder_snippet(cap, tail_expr.clone()); + edit.add_placeholder_snippet(cap, tail_expr); } } @@ -443,7 +453,11 @@ fn make_fn_body_as_new_function( let adt_info = adt_info.as_ref()?; let path_self = make::ext::ident_path("Self"); - let placeholder_expr = make::ext::expr_todo(); + let placeholder_expr = match ctx.config.expr_fill_default { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => make::ext::expr_todo(), + }; let tail_expr = if let Some(strukt) = adt_info.adt.as_struct() { match strukt.kind(ctx.db()) { StructKind::Record => { @@ -470,7 +484,7 @@ fn make_fn_body_as_new_function( .map(|_| placeholder_expr.clone()) .collect::>(); - make::expr_call(make::expr_path(path_self), make::arg_list(args)) + make::expr_call(make::expr_path(path_self), make::arg_list(args)).into() } StructKind::Unit => make::expr_path(path_self), } @@ -496,7 +510,7 @@ fn get_fn_target( target_module: Option, call: CallExpr, ) -> Option<(GeneratedFunctionTarget, FileId)> { - let mut file = ctx.file_id().into(); + let mut file = ctx.vfs_file_id(); let target = match target_module { Some(target_module) => { let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module); @@ -1161,7 +1175,7 @@ fn next_space_for_fn_in_module( target_module: hir::Module, ) -> (FileId, GeneratedFunctionTarget) { let module_source = target_module.definition_source(db); - let file = module_source.file_id.original_file(db.upcast()); + let file = module_source.file_id.original_file(db); let assist_item = match &module_source.value { hir::ModuleSource::SourceFile(it) => match it.items().last() { Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()), @@ -1186,7 +1200,7 @@ fn next_space_for_fn_in_module( } }; - (file.file_id(), assist_item) + (file.file_id(db), assist_item) } #[derive(Clone, Copy)] @@ -1504,7 +1518,7 @@ fn foo() { bar("bar") } -fn bar(arg: &str) { +fn bar(arg: &'static str) { ${0:todo!()} } "#, @@ -2121,7 +2135,7 @@ fn foo() { bar(baz(), baz(), "foo", "bar") } -fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) { +fn bar(baz_1: Baz, baz_2: Baz, arg_1: &'static str, arg_2: &'static str) { ${0:todo!()} } "#, @@ -3089,7 +3103,7 @@ pub struct Foo { field_2: String, } impl Foo { - fn new(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) -> Self { + fn new(baz_1: Baz, baz_2: Baz, arg_1: &'static str, arg_2: &'static str) -> Self { ${0:Self { field_1: todo!(), field_2: todo!() }} } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs index 1b16ba5fc8ff3..c7e5e41aac4ce 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs @@ -1,13 +1,14 @@ use ide_db::{famous_defs::FamousDefs, source_change::SourceChangeBuilder}; use stdx::{format_to, to_lower_snake_case}; use syntax::{ - ast::{self, edit_in_place::Indent, make, AstNode, HasName, HasVisibility}, - ted, TextRange, + TextRange, + ast::{self, AstNode, HasName, HasVisibility, edit_in_place::Indent, make}, + ted, }; use crate::{ + AssistContext, AssistId, Assists, GroupLabel, utils::{convert_reference_type, find_struct_impl, generate_impl}, - AssistContext, AssistId, AssistKind, Assists, GroupLabel, }; // Assist: generate_setter @@ -62,7 +63,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt acc.add_group( &GroupLabel("Generate getter/setter".to_owned()), - AssistId("generate_setter", AssistKind::Generate), + AssistId::generate("generate_setter"), "Generate a setter method", target, |builder| build_source_change(builder, ctx, info_of_record_fields, setter_info), @@ -203,7 +204,7 @@ pub(crate) fn generate_getter_impl( acc.add_group( &GroupLabel("Generate getter/setter".to_owned()), - AssistId(id, AssistKind::Generate), + AssistId::generate(id), label, target, |builder| build_source_change(builder, ctx, info_of_record_fields, getter_info), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs index 4439830947ade..2862e6d5afba3 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs @@ -1,9 +1,9 @@ use syntax::{ - ast::{self, edit_in_place::Indent, make, AstNode, HasName}, + ast::{self, AstNode, HasName, edit_in_place::Indent, make}, ted, }; -use crate::{utils, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, utils}; fn insert_impl(impl_: ast::Impl, nominal: &ast::Adt) { let indent = nominal.indent_level(); @@ -44,7 +44,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio } acc.add( - AssistId("generate_impl", AssistKind::Generate), + AssistId::generate("generate_impl"), format!("Generate impl for `{name}`"), target, |edit| { @@ -90,7 +90,7 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> } acc.add( - AssistId("generate_trait_impl", AssistKind::Generate), + AssistId::generate("generate_trait_impl"), format!("Generate trait impl for `{name}`"), target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs index ad422b25c39e0..af9c493b48044 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs @@ -1,12 +1,12 @@ -use hir::{sym, HasSource, Name}; +use hir::{HasSource, Name, sym}; use syntax::{ - ast::{self, HasName}, AstNode, + ast::{self, HasName}, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; // Assist: generate_is_empty_from_len @@ -54,13 +54,13 @@ pub(crate) fn generate_is_empty_from_len(acc: &mut Assists, ctx: &AssistContext< } let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?; - let len_fn = get_impl_method(ctx, &impl_, &Name::new_symbol_root(sym::len.clone()))?; + let len_fn = get_impl_method(ctx, &impl_, &Name::new_symbol_root(sym::len))?; if !len_fn.ret_type(ctx.sema.db).is_usize() { cov_mark::hit!(len_fn_different_return_type); return None; } - if get_impl_method(ctx, &impl_, &Name::new_symbol_root(sym::is_empty.clone())).is_some() { + if get_impl_method(ctx, &impl_, &Name::new_symbol_root(sym::is_empty)).is_some() { cov_mark::hit!(is_empty_already_implemented); return None; } @@ -69,7 +69,7 @@ pub(crate) fn generate_is_empty_from_len(acc: &mut Assists, ctx: &AssistContext< let range = node.syntax().value.text_range(); acc.add( - AssistId("generate_is_empty_from_len", AssistKind::Generate), + AssistId::generate("generate_is_empty_from_len"), "Generate a is_empty impl from a len function", range, |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs index 6aa561ad7f037..2ac960ed7e183 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs @@ -1,10 +1,11 @@ use ide_db::famous_defs::FamousDefs; use syntax::{ + AstNode, ast::{self, make}, - ted, AstNode, + ted, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredictable case [#15581]. // Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need. @@ -101,7 +102,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> let target = impl_def.syntax().text_range(); acc.add( - AssistId("generate_mut_trait_impl", AssistKind::Generate), + AssistId::generate("generate_mut_trait_impl"), "Generate `IndexMut` impl from this `Index` trait", target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 70d14d6b95d85..f963f48d62ab6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -2,13 +2,13 @@ use ide_db::{ imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor, }; use syntax::{ - ast::{self, edit_in_place::Indent, make, AstNode, HasName, HasVisibility, StructKind}, + ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make}, ted, }; use crate::{ + AssistContext, AssistId, Assists, utils::{find_struct_impl, generate_impl}, - AssistContext, AssistId, AssistKind, Assists, }; // Assist: generate_new @@ -48,7 +48,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let current_module = ctx.sema.scope(strukt.syntax())?.module(); let target = strukt.syntax().text_range(); - acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| { + acc.add(AssistId::generate("generate_new"), "Generate `new`", target, |builder| { let trivial_constructors = field_list .fields() .map(|f| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 5f7350bc2812b..154b502e1bf97 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -1,13 +1,13 @@ use crate::assist_context::{AssistContext, Assists}; use ide_db::assists::AssistId; use syntax::{ + AstNode, SyntaxKind, T, ast::{ - self, + self, HasGenericParams, HasName, edit_in_place::{HasVisibilityEdit, Indent}, - make, HasGenericParams, HasName, + make, }, ted::{self, Position}, - AstNode, SyntaxKind, T, }; // NOTES : @@ -95,7 +95,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ let impl_name = impl_ast.self_ty()?; acc.add( - AssistId("generate_trait_from_impl", ide_db::assists::AssistKind::Generate), + AssistId::generate("generate_trait_from_impl"), "Generate trait from impl", impl_ast.syntax().text_range(), |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs index 9e09f198feb4a..6f028e58d0cdd 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs @@ -3,30 +3,32 @@ use std::collections::BTreeSet; use ast::make; use either::Either; use hir::{ + FileRange, PathResolution, Semantics, TypeInfo, db::{ExpandDatabase, HirDatabase}, - sym, FileRange, PathResolution, Semantics, TypeInfo, + sym, }; use ide_db::{ - base_db::CrateId, + EditionedFileId, RootDatabase, + base_db::Crate, defs::Definition, imports::insert_use::remove_path_if_in_use_stmt, path_transform::PathTransform, search::{FileReference, FileReferenceNode, SearchScope}, source_change::SourceChangeBuilder, syntax_helpers::{node_ext::expr_as_name_ref, prettify_macro_expansion}, - EditionedFileId, RootDatabase, }; -use itertools::{izip, Itertools}; +use itertools::{Itertools, izip}; use syntax::{ + AstNode, NodeOrToken, SyntaxKind, ast::{ - self, edit::IndentLevel, edit_in_place::Indent, HasArgList, HasGenericArgs, Pat, PathExpr, + self, HasArgList, HasGenericArgs, Pat, PathExpr, edit::IndentLevel, edit_in_place::Indent, }, - ted, AstNode, NodeOrToken, SyntaxKind, + ted, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; // Assist: inline_into_callers @@ -69,6 +71,7 @@ use crate::{ // ``` pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let def_file = ctx.file_id(); + let vfs_def_file = ctx.vfs_file_id(); let name = ctx.find_node_at_offset::()?; let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?; let func_body = ast_func.body()?; @@ -96,7 +99,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> } acc.add( - AssistId("inline_into_callers", AssistKind::RefactorInline), + AssistId::refactor_inline("inline_into_callers"), "Inline into all callers", name.syntax().text_range(), |builder| { @@ -104,7 +107,8 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> let current_file_usage = usages.references.remove(&def_file); let mut remove_def = true; - let mut inline_refs_for_file = |file_id, refs: Vec| { + let mut inline_refs_for_file = |file_id: EditionedFileId, refs: Vec| { + let file_id = file_id.file_id(ctx.db()); builder.edit_file(file_id); let call_krate = ctx.sema.file_to_module_def(file_id).map(|it| it.krate()); let count = refs.len(); @@ -141,7 +145,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> } match current_file_usage { Some(refs) => inline_refs_for_file(def_file, refs), - None => builder.edit_file(def_file), + None => builder.edit_file(vfs_def_file), } if remove_def { builder.delete(ast_func.syntax().text_range()); @@ -192,7 +196,7 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< let name_ref: ast::NameRef = ctx.find_node_at_offset()?; let call_info = CallInfo::from_name_ref( name_ref.clone(), - ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(), + ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into(), )?; let (function, label) = match &call_info.node { ast::CallableExpr::Call(call) => { @@ -230,32 +234,27 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< } let syntax = call_info.node.syntax().clone(); - acc.add( - AssistId("inline_call", AssistKind::RefactorInline), - label, - syntax.text_range(), - |builder| { - let replacement = inline(&ctx.sema, file_id, function, &fn_body, ¶ms, &call_info); - builder.replace_ast( - match call_info.node { - ast::CallableExpr::Call(it) => ast::Expr::CallExpr(it), - ast::CallableExpr::MethodCall(it) => ast::Expr::MethodCallExpr(it), - }, - replacement, - ); - }, - ) + acc.add(AssistId::refactor_inline("inline_call"), label, syntax.text_range(), |builder| { + let replacement = inline(&ctx.sema, file_id, function, &fn_body, ¶ms, &call_info); + builder.replace_ast( + match call_info.node { + ast::CallableExpr::Call(it) => ast::Expr::CallExpr(it), + ast::CallableExpr::MethodCall(it) => ast::Expr::MethodCallExpr(it), + }, + replacement, + ); + }) } struct CallInfo { node: ast::CallableExpr, arguments: Vec, generic_arg_list: Option, - krate: CrateId, + krate: Crate, } impl CallInfo { - fn from_name_ref(name_ref: ast::NameRef, krate: CrateId) -> Option { + fn from_name_ref(name_ref: ast::NameRef, krate: Crate) -> Option { let parent = name_ref.syntax().parent()?; if let Some(call) = ast::MethodCallExpr::cast(parent.clone()) { let receiver = call.receiver()?; @@ -452,7 +451,7 @@ fn inline( let ty = sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty); - let is_self = param.name(sema.db).is_some_and(|name| name == sym::self_.clone()); + let is_self = param.name(sema.db).is_some_and(|name| name == sym::self_); if is_self { let mut this_pat = make::ident_pat(false, false, make::name("this")); @@ -515,7 +514,7 @@ fn inline( && usage.syntax().parent().and_then(ast::Expr::cast).is_some() => { cov_mark::hit!(inline_call_inline_closure); - let expr = make::expr_paren(expr.clone()); + let expr = make::expr_paren(expr.clone()).into(); inline_direct(usage, &expr); } // inline single use literals @@ -570,7 +569,7 @@ fn inline( let no_stmts = body.statements().next().is_none(); match body.tail_expr() { Some(expr) if matches!(expr, ast::Expr::ClosureExpr(_)) && no_stmts => { - make::expr_paren(expr).clone_for_update() + make::expr_paren(expr).clone_for_update().into() } Some(expr) if !is_async_fn && no_stmts => expr, _ => match node @@ -580,7 +579,7 @@ fn inline( .and_then(|bin_expr| bin_expr.lhs()) { Some(lhs) if lhs.syntax() == node.syntax() => { - make::expr_paren(ast::Expr::BlockExpr(body)).clone_for_update() + make::expr_paren(ast::Expr::BlockExpr(body)).clone_for_update().into() } _ => ast::Expr::BlockExpr(body), }, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs index ca5882d0313ac..e5ed04fdc7c9e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs @@ -1,7 +1,7 @@ use hir::HasCrate; -use syntax::{ast, AstNode}; +use syntax::{AstNode, ast}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: inline_const_as_literal // @@ -44,7 +44,7 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_> .ok()? .render(ctx.sema.db, konst.krate(ctx.sema.db).to_display_target(ctx.sema.db)); - let id = AssistId("inline_const_as_literal", AssistKind::RefactorInline); + let id = AssistId::refactor_inline("inline_const_as_literal"); let label = "Inline const as literal".to_owned(); let target = variable.syntax().text_range(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs index 36eed290dc88d..5d4bdc6ec76cd 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs @@ -1,17 +1,17 @@ use hir::{PathResolution, Semantics}; use ide_db::{ + EditionedFileId, RootDatabase, defs::Definition, search::{FileReference, FileReferenceNode, UsageSearchResult}, - EditionedFileId, RootDatabase, }; use syntax::{ - ast::{self, syntax_factory::SyntaxFactory, AstNode, AstToken, HasName}, SyntaxElement, TextRange, + ast::{self, AstNode, AstToken, HasName, syntax_factory::SyntaxFactory}, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; // Assist: inline_local_variable @@ -74,7 +74,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>) }; acc.add( - AssistId("inline_local_variable", AssistKind::RefactorInline), + AssistId::refactor_inline("inline_local_variable"), "Inline variable", target.text_range(), move |builder| { @@ -91,7 +91,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>) } } - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); for (name, should_wrap) in wrap_in_parens { let replacement = if should_wrap { @@ -110,7 +110,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>) } editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs index cd6f900ba15da..b09bef36ae15d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs @@ -2,7 +2,7 @@ use hir::db::ExpandDatabase; use ide_db::syntax_helpers::prettify_macro_expansion; use syntax::ast::{self, AstNode}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: inline_macro // @@ -38,16 +38,16 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let unexpanded = ctx.find_node_at_offset::()?; let macro_call = ctx.sema.to_def(&unexpanded)?; - let target_crate_id = ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(); + let target_crate_id = ctx.sema.file_to_module_def(ctx.vfs_file_id())?.krate().into(); let text_range = unexpanded.syntax().text_range(); acc.add( - AssistId("inline_macro", AssistKind::RefactorInline), + AssistId::refactor_inline("inline_macro"), "Inline macro".to_owned(), text_range, |builder| { - let expanded = ctx.sema.parse_or_expand(macro_call.as_file()); - let span_map = ctx.sema.db.expansion_span_map(macro_call.as_macro_file()); + let expanded = ctx.sema.parse_or_expand(macro_call.into()); + let span_map = ctx.sema.db.expansion_span_map(macro_call); // Don't call `prettify_macro_expansion()` outside the actual assist action; it does some heavy rowan tree manipulation, // which can be very costly for big macros when it is done *even without the assist being invoked*. let expanded = prettify_macro_expansion(ctx.db(), expanded, &span_map, target_crate_id); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs index 76d465b011039..4511072b041b1 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -10,13 +10,14 @@ use ide_db::{ }; use itertools::Itertools; use syntax::{ - ast::{self, make, HasGenericParams, HasName}, - ted, AstNode, NodeOrToken, SyntaxNode, + AstNode, NodeOrToken, SyntaxNode, + ast::{self, HasGenericParams, HasName, make}, + ted, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; use super::inline_call::split_refs_and_uses; @@ -59,7 +60,7 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) // until this is ok acc.add( - AssistId("inline_type_alias_uses", AssistKind::RefactorInline), + AssistId::refactor_inline("inline_type_alias_uses"), "Inline type alias into all uses", name.syntax().text_range(), |builder| { @@ -86,17 +87,17 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) builder.replace(target, replacement); } - if file_id == ctx.file_id() { + if file_id == ctx.vfs_file_id() { builder.delete(ast_alias.syntax().text_range()); definition_deleted = true; } }; for (file_id, refs) in usages.into_iter() { - inline_refs_for_file(file_id.file_id(), refs); + inline_refs_for_file(file_id.file_id(ctx.db()), refs); } if !definition_deleted { - builder.edit_file(ctx.file_id()); + builder.edit_file(ctx.vfs_file_id()); builder.delete(ast_alias.syntax().text_range()); } }, @@ -148,7 +149,7 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O let target = alias_instance.syntax().text_range(); acc.add( - AssistId("inline_type_alias", AssistKind::RefactorInline), + AssistId::refactor_inline("inline_type_alias"), "Inline type alias", target, |builder| builder.replace(target, replacement.to_text(&concrete_type)), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs index e405af5533d5e..47b273535a88f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs @@ -1,9 +1,6 @@ use hir::{AsAssocItem, HirDisplay}; -use ide_db::{ - assists::{AssistId, AssistKind}, - famous_defs::FamousDefs, -}; -use syntax::{ast, AstNode}; +use ide_db::{assists::AssistId, famous_defs::FamousDefs}; +use syntax::{AstNode, ast}; use crate::assist_context::{AssistContext, Assists}; @@ -60,7 +57,7 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>) let sc = adjusted_tc.display_source_code(db, scope.module().into(), true).ok()?; acc.add( - AssistId("into_to_qualified_from", AssistKind::Generate), + AssistId::generate("into_to_qualified_from"), "Convert `into` to fully qualified `from`", nameref.syntax().text_range(), |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs index 62909c586e3d4..264e3767a2324 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs @@ -1,11 +1,11 @@ use ide_db::FxHashSet; use syntax::{ - ast::{self, edit_in_place::GenericParamsOwnerEdit, make, HasGenericParams}, - ted::{self, Position}, AstNode, TextRange, + ast::{self, HasGenericParams, edit_in_place::GenericParamsOwnerEdit, make}, + ted::{self, Position}, }; -use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder}; static ASSIST_NAME: &str = "introduce_named_lifetime"; static ASSIST_LABEL: &str = "Introduce named lifetime"; @@ -83,7 +83,7 @@ fn generate_fn_def_assist( _ => return None, } }; - acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| { + acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |builder| { let fn_def = builder.make_mut(fn_def); let lifetime = builder.make_mut(lifetime); let loc_needing_lifetime = @@ -107,7 +107,7 @@ fn generate_impl_def_assist( lifetime: ast::Lifetime, ) -> Option<()> { let new_lifetime_param = generate_unique_lifetime_param_name(impl_def.generic_param_list())?; - acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| { + acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |builder| { let impl_def = builder.make_mut(impl_def); let lifetime = builder.make_mut(lifetime); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs index 994e4a0eddaf6..db51070a6430b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs @@ -1,8 +1,8 @@ use ide_db::syntax_helpers::suggest_name; use itertools::Itertools; -use syntax::ast::{self, syntax_factory::SyntaxFactory, AstNode, HasGenericParams, HasName}; +use syntax::ast::{self, AstNode, HasGenericParams, HasName, syntax_factory::SyntaxFactory}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: introduce_named_type_parameter // @@ -24,10 +24,10 @@ pub(crate) fn introduce_named_type_parameter( let fn_ = param.syntax().ancestors().nth(2).and_then(ast::Fn::cast)?; let type_bound_list = impl_trait_type.type_bound_list()?; - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let target = fn_.syntax().text_range(); acc.add( - AssistId("introduce_named_type_parameter", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("introduce_named_type_parameter"), "Replace impl trait with type parameter", target, |builder| { @@ -59,7 +59,7 @@ pub(crate) fn introduce_named_type_parameter( } editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs index ac710503d8a0d..d198870b023e6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs @@ -1,13 +1,13 @@ use ide_db::syntax_helpers::node_ext::is_pattern_cond; use syntax::{ - ast::{self, AstNode}, T, + ast::{self, AstNode}, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, utils::invert_boolean_expression_legacy, - AssistId, AssistKind, }; // Assist: invert_if @@ -47,7 +47,7 @@ pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() ast::ElseBranch::IfExpr(_) => return None, }; - acc.add(AssistId("invert_if", AssistKind::RefactorRewrite), "Invert if", if_range, |edit| { + acc.add(AssistId::refactor_rewrite("invert_if"), "Invert if", if_range, |edit| { let flip_cond = invert_boolean_expression_legacy(cond.clone()); edit.replace_ast(cond, flip_cond); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs index 4171230836908..6bf7f5849148f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs @@ -1,19 +1,20 @@ use either::Either; use ide_db::imports::{ insert_use::{ImportGranularity, InsertUseConfig}, - merge_imports::{try_merge_imports, try_merge_trees, try_normalize_use_tree, MergeBehavior}, + merge_imports::{MergeBehavior, try_merge_imports, try_merge_trees}, }; -use itertools::Itertools; use syntax::{ + AstNode, SyntaxElement, SyntaxNode, algo::neighbor, - ast::{self, edit_in_place::Removable}, - match_ast, ted, AstNode, SyntaxElement, SyntaxNode, + ast::{self, syntax_factory::SyntaxFactory}, + match_ast, + syntax_editor::Removable, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, utils::next_prev, - AssistId, AssistKind, }; use Edit::*; @@ -68,55 +69,33 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio (selection_range, edits?) }; - acc.add( - AssistId("merge_imports", AssistKind::RefactorRewrite), - "Merge imports", - target, - |builder| { - let edits_mut: Vec = edits - .into_iter() - .map(|it| match it { - Remove(Either::Left(it)) => Remove(Either::Left(builder.make_mut(it))), - Remove(Either::Right(it)) => Remove(Either::Right(builder.make_mut(it))), - Replace(old, new) => Replace(builder.make_syntax_mut(old), new), - }) - .collect(); - for edit in edits_mut { - match edit { - Remove(it) => it.as_ref().either(Removable::remove, Removable::remove), - Replace(old, new) => { - ted::replace(old, &new); - - // If there's a selection and we're replacing a use tree in a tree list, - // normalize the parent use tree if it only contains the merged subtree. - if !ctx.has_empty_selection() { - let normalized_use_tree = ast::UseTree::cast(new) - .as_ref() - .and_then(ast::UseTree::parent_use_tree_list) - .and_then(|use_tree_list| { - if use_tree_list.use_trees().collect_tuple::<(_,)>().is_some() { - Some(use_tree_list.parent_use_tree()) - } else { - None - } - }) - .and_then(|target_tree| { - try_normalize_use_tree( - &target_tree, - ctx.config.insert_use.granularity.into(), - ) - .map(|top_use_tree_flat| (target_tree, top_use_tree_flat)) - }); - if let Some((old_tree, new_tree)) = normalized_use_tree { - cov_mark::hit!(replace_parent_with_normalized_use_tree); - ted::replace(old_tree.syntax(), new_tree.syntax()); - } - } + let parent_node = match ctx.covering_element() { + SyntaxElement::Node(n) => n, + SyntaxElement::Token(t) => t.parent()?, + }; + + acc.add(AssistId::refactor_rewrite("merge_imports"), "Merge imports", target, |builder| { + let make = SyntaxFactory::with_mappings(); + let mut editor = builder.make_editor(&parent_node); + + for edit in edits { + match edit { + Remove(it) => { + let node = it.as_ref(); + if let Some(left) = node.left() { + left.remove(&mut editor); + } else if let Some(right) = node.right() { + right.remove(&mut editor); } } + Replace(old, new) => { + editor.replace(old, &new); + } } - }, - ) + } + editor.add_mappings(make.finish_with_mappings()); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }) } trait Merge: AstNode + Clone { @@ -727,11 +706,10 @@ use std::{ ); cov_mark::check!(merge_with_selected_use_tree_neighbors); - cov_mark::check!(replace_parent_with_normalized_use_tree); check_assist( merge_imports, r"use std::$0{fmt::Display, fmt::Debug}$0;", - r"use std::fmt::{Debug, Display};", + r"use std::{fmt::{Debug, Display}};", ); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs index f83de931eaba7..42f35210b4967 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_match_arms.rs @@ -2,12 +2,12 @@ use hir::Type; use ide_db::FxHashMap; use std::iter::successors; use syntax::{ + Direction, algo::neighbor, ast::{self, AstNode, HasName}, - Direction, }; -use crate::{AssistContext, AssistId, AssistKind, Assists, TextRange}; +use crate::{AssistContext, AssistId, Assists, TextRange}; // Assist: merge_match_arms // @@ -73,7 +73,7 @@ pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op } acc.add( - AssistId("merge_match_arms", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("merge_match_arms"), "Merge match arms", current_text_range, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs index 7a0037fa202bb..73cb8204f2096 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs @@ -1,12 +1,12 @@ use ide_db::syntax_helpers::node_ext::is_pattern_cond; use syntax::{ - ast::{self, AstNode, BinaryOp}, T, + ast::{self, AstNode, BinaryOp}, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; // Assist: merge_nested_if // @@ -69,29 +69,24 @@ pub(crate) fn merge_nested_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt let nested_if_then_branch = nested_if_to_merge.then_branch()?; let then_branch_range = then_branch.syntax().text_range(); - acc.add( - AssistId("merge_nested_if", AssistKind::RefactorRewrite), - "Merge nested if", - if_range, - |edit| { - let cond_text = if has_logic_op_or(&cond) { - format!("({})", cond.syntax().text()) - } else { - cond.syntax().text().to_string() - }; + acc.add(AssistId::refactor_rewrite("merge_nested_if"), "Merge nested if", if_range, |edit| { + let cond_text = if has_logic_op_or(&cond) { + format!("({})", cond.syntax().text()) + } else { + cond.syntax().text().to_string() + }; - let nested_if_cond_text = if has_logic_op_or(&nested_if_cond) { - format!("({})", nested_if_cond.syntax().text()) - } else { - nested_if_cond.syntax().text().to_string() - }; + let nested_if_cond_text = if has_logic_op_or(&nested_if_cond) { + format!("({})", nested_if_cond.syntax().text()) + } else { + nested_if_cond.syntax().text().to_string() + }; - let replace_cond = format!("{cond_text} && {nested_if_cond_text}"); + let replace_cond = format!("{cond_text} && {nested_if_cond_text}"); - edit.replace(cond_range, replace_cond); - edit.replace(then_branch_range, nested_if_then_branch.syntax().text()); - }, - ) + edit.replace(cond_range, replace_cond); + edit.replace(then_branch_range, nested_if_then_branch.syntax().text()); + }) } /// Returns whether the given if condition has logical operators. diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs index 5101d8fa0a9e3..7e8735bd7a246 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_bounds.rs @@ -1,13 +1,13 @@ use syntax::{ ast::{ - self, + self, AstNode, HasName, HasTypeBounds, edit_in_place::{GenericParamsOwnerEdit, Removable}, - make, AstNode, HasName, HasTypeBounds, + make, }, match_ast, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: move_bounds_to_where_clause // @@ -42,7 +42,7 @@ pub(crate) fn move_bounds_to_where_clause( let target = type_param_list.syntax().text_range(); acc.add( - AssistId("move_bounds_to_where_clause", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("move_bounds_to_where_clause"), "Move to where clause", target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs index 743ea9476150d..0c1dc9eb9349f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -1,8 +1,8 @@ use hir::{AsAssocItem, AssocItemContainer, FileRange, HasCrate, HasSource}; use ide_db::{assists::AssistId, defs::Definition, search::SearchScope}; use syntax::{ - ast::{self, edit::IndentLevel, edit_in_place::Indent, AstNode}, SyntaxKind, + ast::{self, AstNode, edit::IndentLevel, edit_in_place::Indent}, }; use crate::assist_context::{AssistContext, Assists}; @@ -83,7 +83,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> } acc.add( - AssistId("move_const_to_impl", crate::AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("move_const_to_impl"), "Move const to impl block", const_.syntax().text_range(), |builder| { @@ -105,7 +105,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> builder.delete(range_to_delete); let usages = usages.iter().flat_map(|(file_id, usages)| { - let edition = file_id.edition(); + let edition = file_id.edition(ctx.db()); usages.iter().map(move |usage| (edition, usage.range)) }); for (edition, range) in usages { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs index 10915f8aafb8d..a36d3136a16da 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs @@ -1,8 +1,5 @@ -use ide_db::{ - assists::{AssistId, AssistKind}, - base_db::AnchoredPathBuf, -}; -use syntax::{ast, AstNode, ToSmolStr}; +use ide_db::{assists::AssistId, base_db::AnchoredPathBuf}; +use syntax::{AstNode, ToSmolStr, ast}; use crate::{ assist_context::{AssistContext, Assists}, @@ -25,7 +22,7 @@ use crate::{ // ``` pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let source_file = ctx.find_node_at_offset::()?; - let module = ctx.sema.file_to_module_def(ctx.file_id())?; + let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?; // Enable this assist if the user select all "meaningful" content in the source file let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed()); let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range()); @@ -41,13 +38,13 @@ pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let target = source_file.syntax().text_range(); let module_name = module.name(ctx.db())?.as_str().to_smolstr(); let path = format!("../{module_name}.rs"); - let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; + let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path }; acc.add( - AssistId("move_from_mod_rs", AssistKind::Refactor), + AssistId::refactor("move_from_mod_rs"), format!("Convert {module_name}/mod.rs to {module_name}.rs"), target, |builder| { - builder.move_file(ctx.file_id(), dst); + builder.move_file(ctx.vfs_file_id(), dst); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs index a487960d8d4c5..644d1f6cafefc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs @@ -1,9 +1,9 @@ use syntax::{ - ast::{edit::AstNodeEdit, make, AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat}, SyntaxKind::WHITESPACE, + ast::{AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat, edit::AstNodeEdit, make}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: move_guard_to_arm_body // @@ -49,7 +49,7 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>) let target = guard.syntax().text_range(); acc.add( - AssistId("move_guard_to_arm_body", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("move_guard_to_arm_body"), "Move guard to arm body", target, |edit| { @@ -118,7 +118,7 @@ pub(crate) fn move_arm_cond_to_match_guard( let (conds_blocks, tail) = parse_if_chain(if_expr)?; acc.add( - AssistId("move_arm_cond_to_match_guard", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("move_arm_cond_to_match_guard"), "Move condition to match guard", replace_node.text_range(), |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs index bbf18e21948eb..da62b817fcdb7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs @@ -1,16 +1,16 @@ use std::iter; use ast::edit::IndentLevel; -use hir::{sym, HasAttrs}; +use hir::{HasAttrs, sym}; use ide_db::base_db::AnchoredPathBuf; use itertools::Itertools; use stdx::format_to; use syntax::{ - ast::{self, edit::AstNodeEdit, HasName}, AstNode, SmolStr, TextRange, + ast::{self, HasName, edit::AstNodeEdit}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: move_module_to_file // @@ -45,7 +45,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) -> let parent_module = module_def.parent(ctx.db())?; acc.add( - AssistId("move_module_to_file", AssistKind::RefactorExtract), + AssistId::refactor_extract("move_module_to_file"), "Extract module to file", target, |builder| { @@ -57,7 +57,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) -> if !parent_module.is_mod_rs(db) && parent_module .attrs(db) - .by_key(&sym::path) + .by_key(sym::path) .string_value_unescape() .is_none() => { @@ -104,7 +104,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) -> buf, ); - let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; + let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path }; builder.create_file(dst, contents); }, ) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs index 7b38c795dc80f..5e95b264fc8e4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs @@ -1,8 +1,5 @@ -use ide_db::{ - assists::{AssistId, AssistKind}, - base_db::AnchoredPathBuf, -}; -use syntax::{ast, AstNode, ToSmolStr}; +use ide_db::{assists::AssistId, base_db::AnchoredPathBuf}; +use syntax::{AstNode, ToSmolStr, ast}; use crate::{ assist_context::{AssistContext, Assists}, @@ -25,7 +22,7 @@ use crate::{ // ``` pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let source_file = ctx.find_node_at_offset::()?; - let module = ctx.sema.file_to_module_def(ctx.file_id())?; + let module = ctx.sema.file_to_module_def(ctx.vfs_file_id())?; // Enable this assist if the user select all "meaningful" content in the source file let trimmed_selected_range = trimmed_text_range(&source_file, ctx.selection_trimmed()); let trimmed_file_range = trimmed_text_range(&source_file, source_file.syntax().text_range()); @@ -41,13 +38,13 @@ pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let target = source_file.syntax().text_range(); let module_name = module.name(ctx.db())?.as_str().to_smolstr(); let path = format!("./{module_name}/mod.rs"); - let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; + let dst = AnchoredPathBuf { anchor: ctx.vfs_file_id(), path }; acc.add( - AssistId("move_to_mod_rs", AssistKind::Refactor), + AssistId::refactor("move_to_mod_rs"), format!("Convert {module_name}.rs to {module_name}/mod.rs"), target, |builder| { - builder.move_file(ctx.file_id(), dst); + builder.move_file(ctx.vfs_file_id(), dst); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs index 0b91eb676df01..bba28b5fc8af5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/normalize_import.rs @@ -1,9 +1,9 @@ use ide_db::imports::merge_imports::try_normalize_import; -use syntax::{ast, AstNode}; +use syntax::{AstNode, ast}; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; // Assist: normalize_import @@ -28,14 +28,9 @@ pub(crate) fn normalize_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let normalized_use_item = try_normalize_import(&use_item, ctx.config.insert_use.granularity.into())?; - acc.add( - AssistId("normalize_import", AssistKind::RefactorRewrite), - "Normalize import", - target, - |builder| { - builder.replace_ast(use_item, normalized_use_item); - }, - ) + acc.add(AssistId::refactor_rewrite("normalize_import"), "Normalize import", target, |builder| { + builder.replace_ast(use_item, normalized_use_item); + }) } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs index a13799f9b1317..1fe40f8ee83ed 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/number_representation.rs @@ -1,6 +1,6 @@ -use syntax::{ast, ast::Radix, AstToken}; +use syntax::{AstToken, ast, ast::Radix}; -use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; +use crate::{AssistContext, AssistId, Assists, GroupLabel}; const MIN_NUMBER_OF_DIGITS_TO_FORMAT: usize = 5; @@ -42,7 +42,7 @@ pub(crate) fn reformat_number_literal(acc: &mut Assists, ctx: &AssistContext<'_> let range = literal.syntax().text_range(); acc.add_group( &group_id, - AssistId("reformat_number_literal", AssistKind::RefactorInline), + AssistId::refactor_inline("reformat_number_literal"), label, range, |builder| builder.replace(range, converted), @@ -54,7 +54,7 @@ fn remove_separators(acc: &mut Assists, literal: ast::IntNumber) -> Option<()> { let range = literal.syntax().text_range(); acc.add_group( &group_id, - AssistId("reformat_number_literal", AssistKind::RefactorInline), + AssistId::refactor_inline("reformat_number_literal"), "Remove digit separators", range, |builder| builder.replace(range, literal.text().replace('_', "")), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs index 0cc771ff39791..6316a8f0db24d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs @@ -1,12 +1,10 @@ use hir::HirDisplay; -use ide_db::{ - assists::{AssistId, AssistKind}, - defs::Definition, -}; +use ide_db::{assists::AssistId, defs::Definition}; use stdx::to_upper_snake_case; use syntax::{ - ast::{self, make, HasName}, - ted, AstNode, + AstNode, + ast::{self, HasName, make}, + ted, }; use crate::{ @@ -67,7 +65,7 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>) } acc.add( - AssistId("promote_local_to_const", AssistKind::Refactor), + AssistId::refactor("promote_local_to_const"), "Promote local to constant", let_stmt.syntax().text_range(), |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs index f222b3eb903c4..5f626d2957111 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs @@ -1,11 +1,12 @@ use syntax::{ + AstNode, ast::{self, make}, - ted, AstNode, + ted, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; // Assist: pull_assignment_up @@ -67,7 +68,7 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) -> } acc.add( - AssistId("pull_assignment_up", AssistKind::RefactorExtract), + AssistId::refactor_extract("pull_assignment_up"), "Pull assignment up", tgt.syntax().text_range(), move |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs index c3600af5a6c58..985121780b1ab 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs @@ -1,6 +1,6 @@ -use hir::{db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, ItemInNs, ModuleDef}; -use ide_db::assists::{AssistId, AssistKind}; -use syntax::{ast, AstNode}; +use hir::{AsAssocItem, AssocItem, AssocItemContainer, ItemInNs, ModuleDef, db::HirDatabase}; +use ide_db::assists::AssistId; +use syntax::{AstNode, ast}; use crate::{ assist_context::{AssistContext, Assists}, @@ -54,7 +54,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call); acc.add( - AssistId("qualify_method_call", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("qualify_method_call"), format!("Qualify `{ident}` method call"), range, |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs index 2a8465f634cfb..07d2f52a34ee9 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs @@ -7,18 +7,17 @@ use ide_db::{ helpers::mod_path_to_ast, imports::import_assets::{ImportCandidate, LocatedImport}, }; -use syntax::ast::HasGenericArgs; use syntax::Edition; +use syntax::ast::HasGenericArgs; use syntax::{ - ast, - ast::{make, HasArgList}, - AstNode, NodeOrToken, + AstNode, ast, + ast::{HasArgList, make}, }; use crate::{ + AssistId, GroupLabel, assist_context::{AssistContext, Assists}, handlers::auto_import::find_importable_node, - AssistId, AssistKind, GroupLabel, }; // Assist: qualify_path @@ -39,7 +38,7 @@ use crate::{ // # pub mod std { pub mod collections { pub struct HashMap { } } } // ``` pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; + let (import_assets, syntax_under_caret, expected) = find_importable_node(ctx)?; let cfg = ctx.config.import_path_config(); let mut proposed_imports: Vec<_> = @@ -48,64 +47,57 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option return None; } + let range = ctx.sema.original_range(&syntax_under_caret).range; + let current_module = ctx.sema.scope(&syntax_under_caret).map(|scope| scope.module()); + let candidate = import_assets.import_candidate(); - let qualify_candidate = match syntax_under_caret.clone() { - NodeOrToken::Node(syntax_under_caret) => match candidate { - ImportCandidate::Path(candidate) if !candidate.qualifier.is_empty() => { - cov_mark::hit!(qualify_path_qualifier_start); - let path = ast::Path::cast(syntax_under_caret)?; - let (prev_segment, segment) = (path.qualifier()?.segment()?, path.segment()?); - QualifyCandidate::QualifierStart(segment, prev_segment.generic_arg_list()) - } - ImportCandidate::Path(_) => { - cov_mark::hit!(qualify_path_unqualified_name); - let path = ast::Path::cast(syntax_under_caret)?; - let generics = path.segment()?.generic_arg_list(); - QualifyCandidate::UnqualifiedName(generics) - } - ImportCandidate::TraitAssocItem(_) => { - cov_mark::hit!(qualify_path_trait_assoc_item); - let path = ast::Path::cast(syntax_under_caret)?; - let (qualifier, segment) = (path.qualifier()?, path.segment()?); - QualifyCandidate::TraitAssocItem(qualifier, segment) - } - ImportCandidate::TraitMethod(_) => { - cov_mark::hit!(qualify_path_trait_method); - let mcall_expr = ast::MethodCallExpr::cast(syntax_under_caret)?; - QualifyCandidate::TraitMethod(ctx.sema.db, mcall_expr) - } - }, - // derive attribute path - NodeOrToken::Token(_) => QualifyCandidate::UnqualifiedName(None), + let qualify_candidate = match candidate { + ImportCandidate::Path(candidate) if !candidate.qualifier.is_empty() => { + cov_mark::hit!(qualify_path_qualifier_start); + let path = ast::Path::cast(syntax_under_caret)?; + let (prev_segment, segment) = (path.qualifier()?.segment()?, path.segment()?); + QualifyCandidate::QualifierStart(segment, prev_segment.generic_arg_list()) + } + ImportCandidate::Path(_) => { + cov_mark::hit!(qualify_path_unqualified_name); + let path = ast::Path::cast(syntax_under_caret)?; + let generics = path.segment()?.generic_arg_list(); + QualifyCandidate::UnqualifiedName(generics) + } + ImportCandidate::TraitAssocItem(_) => { + cov_mark::hit!(qualify_path_trait_assoc_item); + let path = ast::Path::cast(syntax_under_caret)?; + let (qualifier, segment) = (path.qualifier()?, path.segment()?); + QualifyCandidate::TraitAssocItem(qualifier, segment) + } + ImportCandidate::TraitMethod(_) => { + cov_mark::hit!(qualify_path_trait_method); + let mcall_expr = ast::MethodCallExpr::cast(syntax_under_caret)?; + QualifyCandidate::TraitMethod(ctx.sema.db, mcall_expr) + } }; // we aren't interested in different namespaces proposed_imports.sort_by(|a, b| a.import_path.cmp(&b.import_path)); proposed_imports.dedup_by(|a, b| a.import_path == b.import_path); - let range = match &syntax_under_caret { - NodeOrToken::Node(node) => ctx.sema.original_range(node).range, - NodeOrToken::Token(token) => token.text_range(), - }; - let current_module = ctx - .sema - .scope(&match syntax_under_caret { - NodeOrToken::Node(node) => node.clone(), - NodeOrToken::Token(t) => t.parent()?, - }) - .map(|scope| scope.module()); let current_edition = current_module.map(|it| it.krate().edition(ctx.db())).unwrap_or(Edition::CURRENT); // prioritize more relevant imports proposed_imports.sort_by_key(|import| { - Reverse(super::auto_import::relevance_score(ctx, import, current_module.as_ref())) + Reverse(super::auto_import::relevance_score( + ctx, + import, + expected.as_ref(), + current_module.as_ref(), + )) }); let group_label = group_label(candidate); for import in proposed_imports { acc.add_group( &group_label, - AssistId("qualify_path", AssistKind::QuickFix), + AssistId::quick_fix("qualify_path"), label(ctx.db(), candidate, &import, current_edition), range, |builder| { @@ -354,7 +346,7 @@ pub mod PubMod3 { } "#, r#" -PubMod3::PubStruct +PubMod1::PubStruct pub mod PubMod1 { pub struct PubStruct; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs index 5a197f23d0e3a..94b49c5df0915 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs @@ -1,8 +1,11 @@ use std::borrow::Cow; -use syntax::{ast, ast::IsString, AstToken, TextRange, TextSize}; +use syntax::{AstToken, TextRange, TextSize, ast, ast::IsString}; -use crate::{utils::required_hashes, AssistContext, AssistId, AssistKind, Assists}; +use crate::{ + AssistContext, AssistId, Assists, + utils::{required_hashes, string_suffix}, +}; // Assist: make_raw_string // @@ -28,17 +31,20 @@ pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt let value = token.value().ok()?; let target = token.syntax().text_range(); acc.add( - AssistId("make_raw_string", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("make_raw_string"), "Rewrite as raw string", target, |edit| { let hashes = "#".repeat(required_hashes(&value).max(1)); + let range = token.syntax().text_range(); + let suffix = string_suffix(token.text()).unwrap_or_default(); + let range = TextRange::new(range.start(), range.end() - TextSize::of(suffix)); if matches!(value, Cow::Borrowed(_)) { // Avoid replacing the whole string to better position the cursor. - edit.insert(token.syntax().text_range().start(), format!("r{hashes}")); - edit.insert(token.syntax().text_range().end(), hashes); + edit.insert(range.start(), format!("r{hashes}")); + edit.insert(range.end(), hashes); } else { - edit.replace(token.syntax().text_range(), format!("r{hashes}\"{value}\"{hashes}")); + edit.replace(range, format!("r{hashes}\"{value}\"{hashes}")); } }, ) @@ -67,21 +73,25 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> O let value = token.value().ok()?; let target = token.syntax().text_range(); acc.add( - AssistId("make_usual_string", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("make_usual_string"), "Rewrite as regular string", target, |edit| { // parse inside string to escape `"` let escaped = value.escape_default().to_string(); + let suffix = string_suffix(token.text()).unwrap_or_default(); if let Some(offsets) = token.quote_offsets() { if token.text()[offsets.contents - token.syntax().text_range().start()] == escaped { + let end_quote = offsets.quotes.1; + let end_quote = + TextRange::new(end_quote.start(), end_quote.end() - TextSize::of(suffix)); edit.replace(offsets.quotes.0, "\""); - edit.replace(offsets.quotes.1, "\""); + edit.replace(end_quote, "\""); return; } } - edit.replace(token.syntax().text_range(), format!("\"{escaped}\"")); + edit.replace(token.syntax().text_range(), format!("\"{escaped}\"{suffix}")); }, ) } @@ -108,9 +118,10 @@ pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> } let text_range = token.syntax().text_range(); let target = text_range; - acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| { + acc.add(AssistId::refactor("add_hash"), "Add #", target, |edit| { + let suffix = string_suffix(token.text()).unwrap_or_default(); edit.insert(text_range.start() + TextSize::of('r'), "#"); - edit.insert(text_range.end(), "#"); + edit.insert(text_range.end() - TextSize::of(suffix), "#"); }) } @@ -150,9 +161,13 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< return None; } - acc.add(AssistId("remove_hash", AssistKind::RefactorRewrite), "Remove #", text_range, |edit| { + acc.add(AssistId::refactor_rewrite("remove_hash"), "Remove #", text_range, |edit| { + let suffix = string_suffix(text).unwrap_or_default(); edit.delete(TextRange::at(text_range.start() + TextSize::of('r'), TextSize::of('#'))); - edit.delete(TextRange::new(text_range.end() - TextSize::of('#'), text_range.end())); + edit.delete( + TextRange::new(text_range.end() - TextSize::of('#'), text_range.end()) + - TextSize::of(suffix), + ); }) } @@ -262,6 +277,23 @@ string"###; ) } + #[test] + fn make_raw_string_has_suffix() { + check_assist( + make_raw_string, + r#" + fn f() { + let s = $0"random string"i32; + } + "#, + r##" + fn f() { + let s = r#"random string"#i32; + } + "##, + ) + } + #[test] fn make_raw_string_not_works_on_partial_string() { check_assist_not_applicable( @@ -316,6 +348,23 @@ string"###; ) } + #[test] + fn add_hash_has_suffix_works() { + check_assist( + add_hash, + r#" + fn f() { + let s = $0r"random string"i32; + } + "#, + r##" + fn f() { + let s = r#"random string"#i32; + } + "##, + ) + } + #[test] fn add_more_hash_works() { check_assist( @@ -333,6 +382,23 @@ string"###; ) } + #[test] + fn add_more_hash_has_suffix_works() { + check_assist( + add_hash, + r##" + fn f() { + let s = $0r#"random"string"#i32; + } + "##, + r###" + fn f() { + let s = r##"random"string"##i32; + } + "###, + ) + } + #[test] fn add_hash_not_works() { check_assist_not_applicable( @@ -367,6 +433,15 @@ string"###; ) } + #[test] + fn remove_hash_has_suffix_works() { + check_assist( + remove_hash, + r##"fn f() { let s = $0r#"random string"#i32; }"##, + r#"fn f() { let s = r"random string"i32; }"#, + ) + } + #[test] fn cant_remove_required_hash() { cov_mark::check!(cant_remove_required_hash); @@ -397,6 +472,23 @@ string"###; ) } + #[test] + fn remove_more_hash_has_suffix_works() { + check_assist( + remove_hash, + r###" + fn f() { + let s = $0r##"random string"##i32; + } + "###, + r##" + fn f() { + let s = r#"random string"#i32; + } + "##, + ) + } + #[test] fn remove_hash_does_not_work() { check_assist_not_applicable(remove_hash, r#"fn f() { let s = $0"random string"; }"#); @@ -437,6 +529,23 @@ string"###; ) } + #[test] + fn make_usual_string_has_suffix_works() { + check_assist( + make_usual_string, + r##" + fn f() { + let s = $0r#"random string"#i32; + } + "##, + r#" + fn f() { + let s = "random string"i32; + } + "#, + ) + } + #[test] fn make_usual_string_with_quote_works() { check_assist( @@ -471,6 +580,23 @@ string"###; ) } + #[test] + fn make_usual_string_more_hash_has_suffix_works() { + check_assist( + make_usual_string, + r###" + fn f() { + let s = $0r##"random string"##i32; + } + "###, + r##" + fn f() { + let s = "random string"i32; + } + "##, + ) + } + #[test] fn make_usual_string_not_works() { check_assist_not_applicable( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs index 1f57f7d3d3765..52ace03f3cfee 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs @@ -1,10 +1,11 @@ use itertools::Itertools; use syntax::{ - ast::{self, make, AstNode, AstToken}, - match_ast, ted, Edition, NodeOrToken, SyntaxElement, TextRange, TextSize, T, + Edition, NodeOrToken, SyntaxElement, T, TextRange, TextSize, + ast::{self, AstNode, AstToken, make}, + match_ast, ted, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: remove_dbg // @@ -41,7 +42,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::>(); acc.add( - AssistId("remove_dbg", AssistKind::QuickFix), + AssistId::quick_fix("remove_dbg"), "Remove dbg!()", replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range))?, |builder| { @@ -73,7 +74,7 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt } let mac_input = tt.syntax().children_with_tokens().skip(1).take_while(|it| *it != r_delim); - let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]); + let input_expressions = mac_input.chunk_by(|tok| tok.kind() == T![,]); let input_expressions = input_expressions .into_iter() .filter_map(|(is_sep, group)| (!is_sep).then_some(group)) @@ -145,7 +146,7 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt None => false, }; let expr = replace_nested_dbgs(expr.clone()); - let expr = if wrap { make::expr_paren(expr) } else { expr.clone_subtree() }; + let expr = if wrap { make::expr_paren(expr).into() } else { expr.clone_subtree() }; (macro_call.syntax().text_range(), Some(expr)) } // dbg!(expr0, expr1, ...) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs index 43740a5a6d5c7..b07a361adf48e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_mut.rs @@ -1,6 +1,6 @@ use syntax::{SyntaxKind, T}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: remove_mut // @@ -21,18 +21,13 @@ pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( let mut_token = ctx.find_token_syntax_at_offset(T![mut])?; let target = mut_token.text_range(); - acc.add( - AssistId("remove_mut", AssistKind::Refactor), - "Remove `mut` keyword", - target, - |builder| { - let mut editor = builder.make_editor(&mut_token.parent().unwrap()); - match mut_token.next_token() { - Some(it) if it.kind() == SyntaxKind::WHITESPACE => editor.delete(it), - _ => (), - } - editor.delete(mut_token); - builder.add_file_edits(ctx.file_id(), editor); - }, - ) + acc.add(AssistId::refactor("remove_mut"), "Remove `mut` keyword", target, |builder| { + let mut editor = builder.make_editor(&mut_token.parent().unwrap()); + match mut_token.next_token() { + Some(it) if it.kind() == SyntaxKind::WHITESPACE => editor.delete(it), + _ => (), + } + editor.delete(mut_token); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs index e7beb23bf8e7f..d514c1c291583 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs @@ -1,10 +1,10 @@ use syntax::{ + AstNode, SyntaxKind, T, ast::{self, syntax_factory::SyntaxFactory}, syntax_editor::Position, - AstNode, SyntaxKind, T, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: remove_parentheses // @@ -40,7 +40,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) -> let target = parens.syntax().text_range(); acc.add( - AssistId("remove_parentheses", AssistKind::Refactor), + AssistId::refactor("remove_parentheses"), "Remove redundant parentheses", target, |builder| { @@ -54,12 +54,12 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) -> None => false, }; if need_to_add_ws { - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); editor.insert(Position::before(parens.syntax()), make.whitespace(" ")); editor.add_mappings(make.finish_with_mappings()); } editor.replace(parens.syntax(), expr.syntax()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_underscore.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_underscore.rs new file mode 100644 index 0000000000000..912e1936b593e --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_underscore.rs @@ -0,0 +1,191 @@ +use ide_db::{ + assists::AssistId, + defs::{Definition, NameClass, NameRefClass}, +}; +use syntax::{AstNode, ast}; + +use crate::{AssistContext, Assists}; + +// Assist: remove_underscore_from_used_variables +// +// Removes underscore from used variables. +// +// ``` +// fn main() { +// let mut _$0foo = 1; +// _foo = 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let mut foo = 1; +// foo = 2; +// } +// ``` +pub(crate) fn remove_underscore(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let (text, text_range, def) = if let Some(name_ref) = ctx.find_node_at_offset::() { + let text = name_ref.text(); + if !text.starts_with('_') { + return None; + } + + let def = match NameClass::classify(&ctx.sema, &name_ref)? { + NameClass::Definition(def @ Definition::Local(_)) => def, + NameClass::PatFieldShorthand { local_def, .. } => Definition::Local(local_def), + _ => return None, + }; + (text.to_owned(), name_ref.syntax().text_range(), def) + } else if let Some(name_ref) = ctx.find_node_at_offset::() { + let text = name_ref.text(); + if !text.starts_with('_') { + return None; + } + let def = match NameRefClass::classify(&ctx.sema, &name_ref)? { + NameRefClass::Definition(def @ Definition::Local(_), _) => def, + NameRefClass::FieldShorthand { local_ref, .. } => Definition::Local(local_ref), + _ => return None, + }; + (text.to_owned(), name_ref.syntax().text_range(), def) + } else { + return None; + }; + + if !def.usages(&ctx.sema).at_least_one() { + return None; + } + + let new_name = text.trim_start_matches('_'); + acc.add( + AssistId::refactor("remove_underscore_from_used_variables"), + "Remove underscore from a used variable", + text_range, + |builder| { + let changes = def.rename(&ctx.sema, new_name).unwrap(); + builder.source_change = changes; + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn remove_underscore_from_used_variable() { + check_assist( + remove_underscore, + r#" +fn main() { + let mut _$0foo = 1; + _foo = 2; +} +"#, + r#" +fn main() { + let mut foo = 1; + foo = 2; +} +"#, + ); + } + + #[test] + fn not_applicable_for_unused() { + check_assist_not_applicable( + remove_underscore, + r#" +fn main() { + let _$0unused = 1; +} +"#, + ); + } + + #[test] + fn not_applicable_for_no_underscore() { + check_assist_not_applicable( + remove_underscore, + r#" +fn main() { + let f$0oo = 1; + foo = 2; +} +"#, + ); + } + + #[test] + fn remove_multiple_underscores() { + check_assist( + remove_underscore, + r#" +fn main() { + let mut _$0_foo = 1; + __foo = 2; +} +"#, + r#" +fn main() { + let mut foo = 1; + foo = 2; +} +"#, + ); + } + + #[test] + fn remove_underscore_on_usage() { + check_assist( + remove_underscore, + r#" +fn main() { + let mut _foo = 1; + _$0foo = 2; +} +"#, + r#" +fn main() { + let mut foo = 1; + foo = 2; +} +"#, + ); + } + + #[test] + fn remove_underscore_in_function_parameter_usage() { + check_assist( + remove_underscore, + r#" +fn foo(_foo: i32) { + let bar = _$0foo + 1; +} +"#, + r#" +fn foo(foo: i32) { + let bar = foo + 1; +} +"#, + ) + } + + #[test] + fn remove_underscore_in_function_parameter() { + check_assist( + remove_underscore, + r#" +fn foo(_$0foo: i32) { + let bar = _foo + 1; +} +"#, + r#" +fn foo(foo: i32) { + let bar = foo + 1; +} +"#, + ) + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs index 0570b447782ec..1baf814ca6826 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs @@ -1,18 +1,18 @@ use std::collections::hash_map::Entry; -use hir::{FileRange, HirFileIdExt, InFile, InRealFile, Module, ModuleSource}; +use hir::{FileRange, InFile, InRealFile, Module, ModuleSource}; use ide_db::text_edit::TextRange; use ide_db::{ + FxHashMap, RootDatabase, defs::Definition, search::{FileReference, ReferenceCategory, SearchScope}, - FxHashMap, RootDatabase, }; use syntax::{ - ast::{self, Rename}, AstNode, + ast::{self, Rename}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: remove_unused_imports // @@ -126,7 +126,7 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>) // Peek so we terminate early if an unused use is found. Only do the rest of the work if the user selects the assist. if unused.peek().is_some() { acc.add( - AssistId("remove_unused_imports", AssistKind::QuickFix), + AssistId::quick_fix("remove_unused_imports"), "Remove all the unused imports", selected_el.text_range(), |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs index 5ddb17b20729a..8b824c7c7f497 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs @@ -1,16 +1,15 @@ -use ide_db::{defs::Definition, search::FileReference, EditionedFileId}; +use ide_db::{EditionedFileId, defs::Definition, search::FileReference}; use syntax::{ + AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, T, TextRange, algo::{find_node_at_range, least_common_ancestor_element}, ast::{self, HasArgList}, syntax_editor::Element, - AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, T, }; use SyntaxKind::WHITESPACE; use crate::{ - assist_context::SourceChangeBuilder, utils::next_prev, AssistContext, AssistId, AssistKind, - Assists, + AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder, utils::next_prev, }; // Assist: remove_unused_param @@ -77,7 +76,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> } let parent = param.syntax().parent()?; acc.add( - AssistId("remove_unused_param", AssistKind::Refactor), + AssistId::refactor("remove_unused_param"), "Remove unused parameter", param.syntax().text_range(), |builder| { @@ -89,7 +88,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> for (file_id, references) in fn_def.usages(&ctx.sema).all() { process_usages(ctx, builder, file_id, references, param_position, is_self_present); } - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } @@ -97,12 +96,14 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> fn process_usages( ctx: &AssistContext<'_>, builder: &mut SourceChangeBuilder, - file_id: EditionedFileId, + editioned_file_id: EditionedFileId, references: Vec, arg_to_remove: usize, is_self_present: bool, ) { - let source_file = ctx.sema.parse(file_id); + let source_file = ctx.sema.parse(editioned_file_id); + let file_id = editioned_file_id.file_id(ctx.db()); + builder.edit_file(file_id); let possible_ranges = references .into_iter() .filter_map(|usage| process_usage(&source_file, usage, arg_to_remove, is_self_present)); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs index a79a82be45079..990677d372139 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs @@ -1,9 +1,9 @@ use either::Either; use ide_db::FxHashMap; use itertools::Itertools; -use syntax::{ast, syntax_editor::SyntaxEditor, AstNode, SmolStr, SyntaxElement, ToSmolStr}; +use syntax::{AstNode, SmolStr, SyntaxElement, ToSmolStr, ast, syntax_editor::SyntaxEditor}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: reorder_fields // @@ -67,7 +67,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti } let target = record.as_ref().either(AstNode::syntax, AstNode::syntax).text_range(); acc.add( - AssistId("reorder_fields", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("reorder_fields"), "Reorder record fields", target, |builder| { @@ -82,7 +82,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti } } - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs index c3404173eafe6..0ad5ec9d44246 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs @@ -2,11 +2,11 @@ use hir::{PathResolution, Semantics}; use ide_db::{FxHashMap, RootDatabase}; use itertools::Itertools; use syntax::{ - ast::{self, HasName}, AstNode, SyntaxElement, + ast::{self, HasName}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: reorder_impl_items // @@ -95,7 +95,7 @@ pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) -> let target = items.syntax().text_range(); acc.add( - AssistId("reorder_impl_items", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("reorder_impl_items"), "Sort items by trait definition", target, |builder| { @@ -106,7 +106,7 @@ pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) -> .zip(sorted) .for_each(|(old, new)| editor.replace(old.syntax(), new.syntax())); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs index 4b20b35c44624..6b385a03625b7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs @@ -1,7 +1,7 @@ -use ide_db::assists::{AssistId, AssistKind, GroupLabel}; +use ide_db::assists::{AssistId, GroupLabel}; use syntax::{ - ast::{self, ArithOp, BinaryOp}, AstNode, TextRange, + ast::{self, ArithOp, BinaryOp}, }; use crate::assist_context::{AssistContext, Assists}; @@ -132,7 +132,7 @@ impl ArithKind { ArithKind::Wrapping => "replace_arith_with_wrapping", }; - AssistId(s, AssistKind::RefactorRewrite) + AssistId::refactor_rewrite(s) } fn label(&self) -> &'static str { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 31e828eae2712..806c8fba9ea43 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -1,20 +1,20 @@ -use hir::{InFile, MacroFileIdExt, ModuleDef}; +use hir::{InFile, ModuleDef}; use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator}; use itertools::Itertools; use syntax::{ - ast::{self, make, AstNode, HasName}, - ted, SyntaxKind::WHITESPACE, T, + ast::{self, AstNode, HasName, make}, + ted::{self, Position}, }; use crate::{ + AssistConfig, AssistId, assist_context::{AssistContext, Assists, SourceChangeBuilder}, utils::{ - add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, generate_trait_impl, - DefaultMethods, IgnoreAssocItems, + DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl, filter_assoc_items, + gen_trait_fn_body, generate_trait_impl, }, - AssistId, AssistKind, }; // Assist: replace_derive_with_manual_impl @@ -73,12 +73,12 @@ pub(crate) fn replace_derive_with_manual_impl( let current_edition = current_crate.edition(ctx.db()); let found_traits = items_locator::items_with_name( - &ctx.sema, + ctx.db(), current_crate, NameToImport::exact_case_sensitive(path.segments().last()?.to_string()), items_locator::AssocSearchMode::Exclude, ) - .filter_map(|item| match item.into_module_def() { + .filter_map(|(item, _)| match item.into_module_def() { ModuleDef::Trait(trait_) => Some(trait_), _ => None, }) @@ -125,78 +125,105 @@ fn add_assist( let annotated_name = adt.name()?; let label = format!("Convert to manual `impl {replace_trait_path} for {annotated_name}`"); - acc.add( - AssistId("replace_derive_with_manual_impl", AssistKind::Refactor), - label, - target, - |builder| { - let insert_after = ted::Position::after(builder.make_mut(adt.clone()).syntax()); - - let impl_def_with_items = - impl_def_from_trait(&ctx.sema, adt, &annotated_name, trait_, replace_trait_path); - update_attribute(builder, old_derives, old_tree, old_trait_path, attr); + acc.add(AssistId::refactor("replace_derive_with_manual_impl"), label, target, |builder| { + let insert_after = ted::Position::after(builder.make_mut(adt.clone()).syntax()); + let impl_is_unsafe = trait_.map(|s| s.is_unsafe(ctx.db())).unwrap_or(false); + let impl_def_with_items = impl_def_from_trait( + &ctx.sema, + ctx.config, + adt, + &annotated_name, + trait_, + replace_trait_path, + ); + update_attribute(builder, old_derives, old_tree, old_trait_path, attr); - let trait_path = make::ty_path(replace_trait_path.clone()); + let trait_path = make::ty_path(replace_trait_path.clone()); - match (ctx.config.snippet_cap, impl_def_with_items) { - (None, None) => { - let impl_def = generate_trait_impl(adt, trait_path); + match (ctx.config.snippet_cap, impl_def_with_items) { + (None, None) => { + let impl_def = generate_trait_impl(adt, trait_path); + if impl_is_unsafe { + ted::insert( + Position::first_child_of(impl_def.syntax()), + make::token(T![unsafe]), + ); + } - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ted::insert_all( + insert_after, + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); + } + (None, Some((impl_def, _))) => { + if impl_is_unsafe { + ted::insert( + Position::first_child_of(impl_def.syntax()), + make::token(T![unsafe]), ); } - (None, Some((impl_def, _))) => { - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ted::insert_all( + insert_after, + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); + } + (Some(cap), None) => { + let impl_def = generate_trait_impl(adt, trait_path); + + if impl_is_unsafe { + ted::insert( + Position::first_child_of(impl_def.syntax()), + make::token(T![unsafe]), ); } - (Some(cap), None) => { - let impl_def = generate_trait_impl(adt, trait_path); - if let Some(l_curly) = - impl_def.assoc_item_list().and_then(|it| it.l_curly_token()) - { - builder.add_tabstop_after_token(cap, l_curly); - } + if let Some(l_curly) = impl_def.assoc_item_list().and_then(|it| it.l_curly_token()) + { + builder.add_tabstop_after_token(cap, l_curly); + } - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ted::insert_all( + insert_after, + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); + } + (Some(cap), Some((impl_def, first_assoc_item))) => { + let mut added_snippet = false; + + if impl_is_unsafe { + ted::insert( + Position::first_child_of(impl_def.syntax()), + make::token(T![unsafe]), ); } - (Some(cap), Some((impl_def, first_assoc_item))) => { - let mut added_snippet = false; - if let ast::AssocItem::Fn(ref func) = first_assoc_item { - if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) - { - if m.syntax().text() == "todo!()" { - // Make the `todo!()` a placeholder - builder.add_placeholder_snippet(cap, m); - added_snippet = true; - } - } - } - if !added_snippet { - // If we haven't already added a snippet, add a tabstop before the generated function - builder.add_tabstop_before(cap, first_assoc_item); + if let ast::AssocItem::Fn(ref func) = first_assoc_item { + if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) { + if m.syntax().text() == "todo!()" { + // Make the `todo!()` a placeholder + builder.add_placeholder_snippet(cap, m); + added_snippet = true; + } } + } - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], - ); + if !added_snippet { + // If we haven't already added a snippet, add a tabstop before the generated function + builder.add_tabstop_before(cap, first_assoc_item); } - }; - }, - ) + + ted::insert_all( + insert_after, + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); + } + }; + }) } fn impl_def_from_trait( sema: &hir::Semantics<'_, ide_db::RootDatabase>, + config: &AssistConfig, adt: &ast::Adt, annotated_name: &ast::Name, trait_: Option, @@ -221,7 +248,7 @@ fn impl_def_from_trait( let impl_def = generate_trait_impl(adt, make::ty_path(trait_path.clone())); let first_assoc_item = - add_trait_assoc_items_to_impl(sema, &trait_items, trait_, &impl_def, &target_scope); + add_trait_assoc_items_to_impl(sema, config, &trait_items, trait_, &impl_def, &target_scope); // Generate a default `impl` function body for the derived trait. if let ast::AssocItem::Fn(ref func) = first_assoc_item { @@ -1402,6 +1429,23 @@ impl core::fmt::Debug for Foo { f.debug_struct("Foo").finish() } } +"#, + ) + } + + #[test] + fn unsafeness_of_a_trait_observed() { + check_assist( + replace_derive_with_manual_impl, + r#" +//- minicore: send, derive +#[derive(Sen$0d)] +pub struct Foo; +"#, + r#" +pub struct Foo; + +unsafe impl Send for Foo {$0} "#, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs index e324d6eaaad2f..15d3db5e749f0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs @@ -2,19 +2,19 @@ use std::iter::successors; use either::Either; use ide_db::{ + RootDatabase, defs::NameClass, syntax_helpers::node_ext::{is_pattern_cond, single_let}, ty_filter::TryEnum, - RootDatabase, }; use syntax::{ - ast::{self, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory, HasName}, - AstNode, TextRange, T, + AstNode, T, TextRange, + ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory}, }; use crate::{ + AssistContext, AssistId, Assists, utils::{does_pat_match_variant, does_pat_variant_nested_or_literal, unwrap_trivial_block}, - AssistContext, AssistId, AssistKind, Assists, }; // Assist: replace_if_let_with_match @@ -101,11 +101,11 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<' let let_ = if pat_seen { " let" } else { "" }; acc.add( - AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_if_let_with_match"), format!("Replace if{let_} with match"), available_range, move |builder| { - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let match_expr = { let else_arm = make_else_arm(ctx, &make, else_block, &cond_bodies); let make_match_arm = |(pat, body): (_, ast::BlockExpr)| { @@ -142,7 +142,7 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<' let mut editor = builder.make_editor(if_expr.syntax()); editor.replace(if_expr.syntax(), expr.syntax()); editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } @@ -249,11 +249,11 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<' _ => " let", }; acc.add( - AssistId("replace_match_with_if_let", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_match_with_if_let"), format!("Replace match with if{let_}"), match_expr.syntax().text_range(), move |builder| { - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let make_block_expr = |expr: ast::Expr| { // Blocks with modifiers (unsafe, async, etc.) are parsed as BlockExpr, but are // formatted without enclosing braces. If we encounter such block exprs, @@ -291,7 +291,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<' let mut editor = builder.make_editor(match_expr.syntax()); editor.replace(match_expr.syntax(), if_let_expr.syntax()); editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs index 47972ff619acb..e933bcc40dbbb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs @@ -1,10 +1,10 @@ use ide_db::syntax_helpers::suggest_name; use syntax::{ - ast::{self, make, AstNode}, + ast::{self, AstNode, make}, ted, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: replace_is_some_with_if_let_some // @@ -56,7 +56,7 @@ pub(crate) fn replace_is_method_with_if_let_method( }; acc.add( - AssistId(assist_id, AssistKind::RefactorRewrite), + AssistId::refactor_rewrite(assist_id), message, call_expr.syntax().text_range(), |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs index c071d3022d251..90f4ff7ad2511 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs @@ -1,10 +1,10 @@ use ide_db::ty_filter::TryEnum; use syntax::{ - ast::{self, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory}, AstNode, T, + ast::{self, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: replace_let_with_if_let // @@ -38,31 +38,43 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_> let target = let_kw.text_range(); acc.add( - AssistId("replace_let_with_if_let", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_let_with_if_let"), "Replace let with if let", target, |builder| { let mut editor = builder.make_editor(let_stmt.syntax()); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let ty = ctx.sema.type_of_expr(&init); - let happy_variant = ty - .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted())) - .map(|it| it.happy_case()); - let pat = match happy_variant { - None => original_pat, - Some(var_name) => { - make.tuple_struct_pat(make.ident_path(var_name), [original_pat]).into() + let pat = if let_stmt.let_else().is_some() { + // Do not add the wrapper type that implements `Try`, + // since the statement already wraps the pattern. + original_pat + } else { + let happy_variant = ty + .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted())) + .map(|it| it.happy_case()); + match happy_variant { + None => original_pat, + Some(var_name) => { + make.tuple_struct_pat(make.ident_path(var_name), [original_pat]).into() + } } }; let block = make.block_expr([], None); block.indent(IndentLevel::from_node(let_stmt.syntax())); - let if_expr = make.expr_if(make.expr_let(pat, init).into(), block, None); + let if_expr = make.expr_if( + make.expr_let(pat, init).into(), + block, + let_stmt + .let_else() + .and_then(|let_else| let_else.block_expr().map(ast::ElseBranch::from)), + ); let if_stmt = make.expr_stmt(if_expr.into()); editor.replace(let_stmt.syntax(), if_stmt.syntax()); editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ) } @@ -90,6 +102,27 @@ enum E { X(T), Y(T) } fn main() { if let x = E::X(92) { } +} + ", + ) + } + + #[test] + fn replace_let_else() { + check_assist( + replace_let_with_if_let, + r" +//- minicore: option +fn main() { + let a = Some(1); + $0let Some(_) = a else { unreachable!() }; +} + ", + r" +fn main() { + let a = Some(1); + if let Some(_) = a { + } else { unreachable!() } } ", ) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs index 12d025f07594e..14161d9fd91c3 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs @@ -1,7 +1,7 @@ -use ide_db::assists::{AssistId, AssistKind}; +use ide_db::assists::AssistId; use syntax::{ - ast::{self, make, Expr, HasArgList}, AstNode, + ast::{self, Expr, HasArgList, make}, }; use crate::{AssistContext, Assists}; @@ -60,7 +60,7 @@ pub(crate) fn replace_with_lazy_method(acc: &mut Assists, ctx: &AssistContext<'_ )?; acc.add( - AssistId("replace_with_lazy_method", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_with_lazy_method"), format!("Replace {method_name} with {method_name_lazy}"), call.syntax().text_range(), |builder| { @@ -74,16 +74,12 @@ pub(crate) fn replace_with_lazy_method(acc: &mut Assists, ctx: &AssistContext<'_ fn into_closure(param: &Expr) -> Expr { (|| { if let ast::Expr::CallExpr(call) = param { - if call.arg_list()?.args().count() == 0 { - Some(call.expr()?) - } else { - None - } + if call.arg_list()?.args().count() == 0 { Some(call.expr()?) } else { None } } else { None } })() - .unwrap_or_else(|| make::expr_closure(None, param.clone())) + .unwrap_or_else(|| make::expr_closure(None, param.clone()).into()) } // Assist: replace_with_eager_method @@ -140,7 +136,7 @@ pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<' )?; acc.add( - AssistId("replace_with_eager_method", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_with_eager_method"), format!("Replace {method_name} with {method_name_eager}"), call.syntax().text_range(), |builder| { @@ -154,16 +150,12 @@ pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<' fn into_call(param: &Expr) -> Expr { (|| { if let ast::Expr::ClosureExpr(closure) = param { - if closure.param_list()?.params().count() == 0 { - Some(closure.body()?) - } else { - None - } + if closure.param_list()?.params().count() == 0 { Some(closure.body()?) } else { None } } else { None } })() - .unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new()))) + .unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new())).into()) } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs index 26fd887cc99e9..3cd7b58f4ddd4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs @@ -1,19 +1,20 @@ use hir::{FileRange, Semantics}; use ide_db::text_edit::TextRange; use ide_db::{ + EditionedFileId, RootDatabase, defs::Definition, search::{SearchScope, UsageSearchResult}, - EditionedFileId, RootDatabase, }; use syntax::{ + AstNode, ast::{ - self, make::impl_trait_type, HasGenericParams, HasName, HasTypeBounds, Name, NameLike, - PathType, + self, HasGenericParams, HasName, HasTypeBounds, Name, NameLike, PathType, + make::impl_trait_type, }, - match_ast, ted, AstNode, + match_ast, ted, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: replace_named_generic_with_impl // @@ -69,7 +70,7 @@ pub(crate) fn replace_named_generic_with_impl( let target = type_param.syntax().text_range(); acc.add( - AssistId("replace_named_generic_with_impl", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_named_generic_with_impl"), "Replace named generic with impl trait", target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs index f026b3230dd6d..c067747bc1bb1 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs @@ -1,14 +1,15 @@ use hir::AsAssocItem; use ide_db::{ helpers::mod_path_to_ast, - imports::insert_use::{insert_use, ImportScope}, + imports::insert_use::{ImportScope, insert_use}, }; use syntax::{ - ast::{self, make, HasGenericArgs}, - match_ast, ted, AstNode, Edition, SyntaxNode, + AstNode, Edition, SyntaxNode, + ast::{self, HasGenericArgs, make}, + match_ast, ted, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: replace_qualified_name_with_use // @@ -74,7 +75,7 @@ pub(crate) fn replace_qualified_name_with_use( let scope = ImportScope::find_insert_use_container(original_path.syntax(), &ctx.sema)?; let target = original_path.syntax().text_range(); acc.add( - AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_qualified_name_with_use"), "Replace qualified path with use", target, |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs index a48b20acbcac8..fb5b234d55987 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_string_with_char.rs @@ -1,12 +1,11 @@ use syntax::{ - ast, - ast::IsString, AstToken, SyntaxKind::{CHAR, STRING}, - TextRange, TextSize, + TextRange, TextSize, ast, + ast::IsString, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, utils::string_suffix}; // Assist: replace_string_with_char // @@ -34,14 +33,16 @@ pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext<'_ let quote_offsets = token.quote_offsets()?; acc.add( - AssistId("replace_string_with_char", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_string_with_char"), "Replace string with char", target, |edit| { let (left, right) = quote_offsets.quotes; + let suffix = TextSize::of(string_suffix(token.text()).unwrap_or_default()); + let right = TextRange::new(right.start(), right.end() - suffix); edit.replace(left, '\''); edit.replace(right, '\''); - if value == "'" { + if token.text_without_quotes() == "'" { edit.insert(left.end(), '\\'); } }, @@ -68,16 +69,18 @@ pub(crate) fn replace_char_with_string(acc: &mut Assists, ctx: &AssistContext<'_ let target = token.text_range(); acc.add( - AssistId("replace_char_with_string", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_char_with_string"), "Replace char with string", target, |edit| { - if token.text() == "'\"'" { - edit.replace(token.text_range(), r#""\"""#); + let suffix = string_suffix(token.text()).unwrap_or_default(); + if token.text().starts_with("'\"'") { + edit.replace(token.text_range(), format!(r#""\""{suffix}"#)); } else { let len = TextSize::of('\''); + let suffix = TextSize::of(suffix); edit.replace(TextRange::at(target.start(), len), '"'); - edit.replace(TextRange::at(target.end() - len, len), '"'); + edit.replace(TextRange::at(target.end() - suffix - len, len), '"'); } }, ) @@ -106,6 +109,23 @@ fn f() { ) } + #[test] + fn replace_string_with_char_has_suffix() { + check_assist( + replace_string_with_char, + r#" +fn f() { + let s = "$0c"i32; +} +"#, + r##" +fn f() { + let s = 'c'i32; +} +"##, + ) + } + #[test] fn replace_string_with_char_assist_with_multi_byte_char() { check_assist( @@ -288,6 +308,40 @@ fn f() { ) } + #[test] + fn replace_char_with_string_quote_has_suffix() { + check_assist( + replace_char_with_string, + r#" +fn f() { + find($0'"'i32); +} +"#, + r#" +fn f() { + find("\""i32); +} +"#, + ) + } + + #[test] + fn replace_char_with_string_escaped_quote_has_suffix() { + check_assist( + replace_char_with_string, + r#" +fn f() { + find($0'\"'i32); +} +"#, + r#" +fn f() { + find("\""i32); +} +"#, + ) + } + #[test] fn replace_string_with_char_quote() { check_assist( @@ -301,6 +355,91 @@ fn f() { fn f() { find('\''); } +"#, + ) + } + + #[test] + fn replace_string_with_escaped_char_quote() { + check_assist( + replace_string_with_char, + r#" +fn f() { + find($0"\'"); +} +"#, + r#" +fn f() { + find('\''); +} +"#, + ) + } + + #[test] + fn replace_string_with_char_quote_has_suffix() { + check_assist( + replace_string_with_char, + r#" +fn f() { + find($0"'"i32); +} +"#, + r#" +fn f() { + find('\''i32); +} +"#, + ) + } + + #[test] + fn replace_string_with_escaped_char_quote_has_suffix() { + check_assist( + replace_string_with_char, + r#" +fn f() { + find($0"\'"i32); +} +"#, + r#" +fn f() { + find('\''i32); +} +"#, + ) + } + + #[test] + fn replace_raw_string_with_char_quote() { + check_assist( + replace_string_with_char, + r#" +fn f() { + find($0r"'"); +} +"#, + r#" +fn f() { + find('\''); +} +"#, + ) + } + + #[test] + fn replace_string_with_code_escaped_char_quote() { + check_assist( + replace_string_with_char, + r#" +fn f() { + find($0"\x27"); +} +"#, + r#" +fn f() { + find('\x27'); +} "#, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs index 88b50543dda87..c6e864fcfdba8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs @@ -1,16 +1,13 @@ use std::iter; -use ide_db::{ - assists::{AssistId, AssistKind}, - ty_filter::TryEnum, -}; +use ide_db::{assists::AssistId, ty_filter::TryEnum}; use syntax::{ + AstNode, T, ast::{ self, edit::{AstNodeEdit, IndentLevel}, make, }, - AstNode, T, }; use crate::assist_context::{AssistContext, Assists}; @@ -48,7 +45,7 @@ pub(crate) fn replace_try_expr_with_match( let target = qm_kw_parent.syntax().text_range(); acc.add( - AssistId("replace_try_expr_with_match", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_try_expr_with_match"), "Replace try expression with match", target, |edit| { @@ -64,10 +61,13 @@ pub(crate) fn replace_try_expr_with_match( TryEnum::Option => { make::expr_return(Some(make::expr_path(make::ext::ident_path("None")))) } - TryEnum::Result => make::expr_return(Some(make::expr_call( - make::expr_path(make::ext::ident_path("Err")), - make::arg_list(iter::once(make::expr_path(make::ext::ident_path("err")))), - ))), + TryEnum::Result => make::expr_return(Some( + make::expr_call( + make::expr_path(make::ext::ident_path("Err")), + make::arg_list(iter::once(make::expr_path(make::ext::ident_path("err")))), + ) + .into(), + )), }; let happy_arm = make::match_arm( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs index 3a6391cd38006..a692259410dc0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs @@ -1,12 +1,12 @@ use hir::HirDisplay; use syntax::{ - ast::{Expr, GenericArg, GenericArgList, HasGenericArgs, LetStmt, Type::InferType}, AstNode, TextRange, + ast::{Expr, GenericArg, GenericArgList, HasGenericArgs, LetStmt, Type::InferType}, }; use crate::{ + AssistId, assist_context::{AssistContext, Assists}, - AssistId, AssistKind, }; // Assist: replace_turbofish_with_explicit_type @@ -74,7 +74,7 @@ pub(crate) fn replace_turbofish_with_explicit_type( let ident_range = let_stmt.pat()?.syntax().text_range(); return acc.add( - AssistId("replace_turbofish_with_explicit_type", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_turbofish_with_explicit_type"), "Replace turbofish with explicit type", TextRange::new(initializer_start, turbofish_range.end()), |builder| { @@ -89,7 +89,7 @@ pub(crate) fn replace_turbofish_with_explicit_type( let underscore_range = t.syntax().text_range(); return acc.add( - AssistId("replace_turbofish_with_explicit_type", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("replace_turbofish_with_explicit_type"), "Replace `_` with turbofish type", turbofish_range, |builder| { @@ -339,7 +339,7 @@ fn main() { check_assist( replace_turbofish_with_explicit_type, r#" -//- minicore: option, future +//- minicore: option, future, try struct Fut(T); impl core::future::Future for Fut { type Output = Option; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs index 54e16d4d80a4c..e973e70345dc2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/sort_items.rs @@ -3,11 +3,11 @@ use std::cmp::Ordering; use itertools::Itertools; use syntax::{ - ast::{self, HasName}, AstNode, SyntaxNode, + ast::{self, HasName}, }; -use crate::{utils::get_methods, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, utils::get_methods}; // Assist: sort_items // @@ -126,20 +126,15 @@ impl AddRewrite for Assists { new: Vec, target: &SyntaxNode, ) -> Option<()> { - self.add( - AssistId("sort_items", AssistKind::RefactorRewrite), - label, - target.text_range(), - |builder| { - let mut editor = builder.make_editor(target); - - old.into_iter() - .zip(new) - .for_each(|(old, new)| editor.replace(old.syntax(), new.syntax())); - - builder.add_file_edits(builder.file_id, editor) - }, - ) + self.add(AssistId::refactor_rewrite("sort_items"), label, target.text_range(), |builder| { + let mut editor = builder.make_editor(target); + + old.into_iter() + .zip(new) + .for_each(|(old, new)| editor.replace(old.syntax(), new.syntax())); + + builder.add_file_edits(builder.file_id, editor) + }) } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs index 775ededecbcc8..1729a0667c0a2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/split_import.rs @@ -1,6 +1,6 @@ -use syntax::{ast, AstNode, T}; +use syntax::{AstNode, T, ast}; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: split_import // @@ -29,7 +29,7 @@ pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option } let target = colon_colon.text_range(); - acc.add(AssistId("split_import", AssistKind::RefactorRewrite), "Split import", target, |edit| { + acc.add(AssistId::refactor_rewrite("split_import"), "Split import", target, |edit| { let use_tree = edit.make_mut(use_tree.clone()); let path = edit.make_mut(path); use_tree.split_prefix(&path); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs index e10897b3bef75..6af8e1482c245 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs @@ -1,12 +1,12 @@ //! Term search assist use hir::term_search::{TermSearchConfig, TermSearchCtx}; use ide_db::{ - assists::{AssistId, AssistKind, GroupLabel}, + assists::{AssistId, GroupLabel}, famous_defs::FamousDefs, }; use itertools::Itertools; -use syntax::{ast, AstNode}; +use syntax::{AstNode, ast}; use crate::assist_context::{AssistContext, Assists}; @@ -68,7 +68,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< for code in paths { acc.add_group( &GroupLabel(String::from("Term search")), - AssistId("term_search", AssistKind::Generate), + AssistId::generate("term_search"), format!("Replace {macro_name}!() with {code}"), goal_range, |builder| { @@ -144,7 +144,7 @@ fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#, term_search, r#"//- minicore: todo, unimplemented, option fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, - r#"fn f() { let a: i32 = 1; let b: Option = Some(a); }"#, + r#"fn f() { let a: i32 = 1; let b: Option = None; }"#, ) } @@ -156,7 +156,7 @@ fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, enum Option { None, Some(T) } fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, r#"enum Option { None, Some(T) } -fn f() { let a: i32 = 1; let b: Option = Option::Some(a); }"#, +fn f() { let a: i32 = 1; let b: Option = Option::None; }"#, ) } @@ -168,7 +168,7 @@ fn f() { let a: i32 = 1; let b: Option = Option::Some(a); }"#, enum Option { None, Some(T) } fn f() { let a: Option = Option::None; let b: Option> = todo$0!(); }"#, r#"enum Option { None, Some(T) } -fn f() { let a: Option = Option::None; let b: Option> = Option::Some(a); }"#, +fn f() { let a: Option = Option::None; let b: Option> = Option::None; }"#, ) } @@ -221,7 +221,7 @@ fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = todo$0!(); term_search, r#"//- minicore: todo, unimplemented fn f() { let a: bool = todo$0!(); }"#, - r#"fn f() { let a: bool = false; }"#, + r#"fn f() { let a: bool = true; }"#, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs index 8f937a04122d6..eed070cb07dd6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs @@ -1,11 +1,8 @@ use hir::ModuleDef; -use ide_db::{ - assists::{AssistId, AssistKind}, - famous_defs::FamousDefs, -}; +use ide_db::{assists::AssistId, famous_defs::FamousDefs}; use syntax::{ - ast::{self, HasGenericArgs, HasVisibility}, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, + ast::{self, HasGenericArgs, HasVisibility}, }; use crate::{AssistContext, Assists}; @@ -60,7 +57,7 @@ pub(crate) fn sugar_impl_future_into_async( let future_output = unwrap_future_output(main_trait_path)?; acc.add( - AssistId("sugar_impl_future_into_async", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("sugar_impl_future_into_async"), "Convert `impl Future` into async", function.syntax().text_range(), |builder| { @@ -145,7 +142,7 @@ pub(crate) fn desugar_async_into_impl_future( let trait_path = trait_path.display(ctx.db(), edition); acc.add( - AssistId("desugar_async_into_impl_future", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("desugar_async_into_impl_future"), "Convert async into `impl Future`", function.syntax().text_range(), |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs index 264a2f0326ecf..386625b86b271 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs @@ -1,9 +1,9 @@ use syntax::{ - ast::{self, HasAttrs}, AstNode, AstToken, + ast::{self, HasAttrs}, }; -use crate::{utils::test_related_attribute_syn, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists, utils::test_related_attribute_syn}; // Assist: toggle_ignore // @@ -30,13 +30,13 @@ pub(crate) fn toggle_ignore(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio match has_ignore_attribute(&func) { None => acc.add( - AssistId("toggle_ignore", AssistKind::None), + AssistId::refactor("toggle_ignore"), "Ignore this test", attr.syntax().text_range(), |builder| builder.insert(attr.syntax().text_range().end(), "\n#[ignore]"), ), Some(ignore_attr) => acc.add( - AssistId("toggle_ignore", AssistKind::None), + AssistId::refactor("toggle_ignore"), "Re-enable this test", ignore_attr.syntax().text_range(), |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs index e452b5f77870c..109269bd6e611 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs @@ -1,7 +1,8 @@ -use ide_db::assists::{AssistId, AssistKind}; +use ide_db::assists::AssistId; use syntax::{ + AstNode, T, ast::{self, make}, - ted, AstNode, T, + ted, }; use crate::{AssistContext, Assists}; @@ -62,7 +63,7 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) }; acc.add( - AssistId("toggle_macro_delimiter", AssistKind::Refactor), + AssistId::refactor("toggle_macro_delimiter"), match token { MacroDelims::LPar | MacroDelims::RPar => "Replace delimiters with braces", MacroDelims::LBra | MacroDelims::RBra => "Replace delimiters with parentheses", diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_imports.rs new file mode 100644 index 0000000000000..c066f41ca47b7 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_imports.rs @@ -0,0 +1,262 @@ +use syntax::{ + AstNode, SyntaxKind, + ast::{ + self, HasAttrs, HasVisibility, edit::IndentLevel, edit_in_place::AttrsOwnerEdit, make, + syntax_factory::SyntaxFactory, + }, + syntax_editor::{Element, Position, Removable}, +}; + +use crate::{ + AssistId, + assist_context::{AssistContext, Assists}, +}; + +// Assist: unmerge_imports +// +// Extracts a use item from a use list into a standalone use list. +// +// ``` +// use std::fmt::{Debug, Display$0}; +// ``` +// -> +// ``` +// use std::fmt::{Debug}; +// use std::fmt::Display; +// ``` +pub(crate) fn unmerge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let tree = ctx.find_node_at_offset::()?; + + let tree_list = tree.syntax().parent().and_then(ast::UseTreeList::cast)?; + if tree_list.use_trees().count() < 2 { + cov_mark::hit!(skip_single_import); + return None; + } + + let use_ = tree_list.syntax().ancestors().find_map(ast::Use::cast)?; + let path = resolve_full_path(&tree)?; + + // If possible, explain what is going to be done. + let label = match tree.path().and_then(|path| path.first_segment()) { + Some(name) => format!("Unmerge use of `{name}`"), + None => "Unmerge use".into(), + }; + + let target = tree.syntax().text_range(); + acc.add(AssistId::refactor_rewrite("unmerge_imports"), label, target, |builder| { + let make = SyntaxFactory::with_mappings(); + let new_use = make.use_( + use_.visibility(), + make.use_tree(path, tree.use_tree_list(), tree.rename(), tree.star_token().is_some()), + ); + // Add any attributes that are present on the use tree + use_.attrs().for_each(|attr| { + new_use.add_attr(attr.clone_for_update()); + }); + + let mut editor = builder.make_editor(use_.syntax()); + // Remove the use tree from the current use item + tree.remove(&mut editor); + // Insert a newline and indentation, followed by the new use item + editor.insert_all( + Position::after(use_.syntax()), + vec![ + make.whitespace(&format!("\n{}", IndentLevel::from_node(use_.syntax()))) + .syntax_element(), + new_use.syntax().syntax_element(), + ], + ); + editor.add_mappings(make.finish_with_mappings()); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }) +} + +fn resolve_full_path(tree: &ast::UseTree) -> Option { + let paths = tree + .syntax() + .ancestors() + .take_while(|n| n.kind() != SyntaxKind::USE) + .filter_map(ast::UseTree::cast) + .filter_map(|t| t.path()); + + let final_path = paths.reduce(|prev, next| make::path_concat(next, prev))?; + if final_path.segment().is_some_and(|it| it.self_token().is_some()) { + final_path.qualifier() + } else { + Some(final_path) + } +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn skip_single_import() { + cov_mark::check!(skip_single_import); + check_assist_not_applicable( + unmerge_imports, + r" +use std::fmt::Debug$0; +", + ); + check_assist_not_applicable( + unmerge_imports, + r" +use std::fmt::{Debug$0}; +", + ); + check_assist_not_applicable( + unmerge_imports, + r" +use std::fmt::Debug as Dbg$0; +", + ); + } + + #[test] + fn skip_single_glob_import() { + check_assist_not_applicable( + unmerge_imports, + r" +use std::fmt::*$0; +", + ); + } + + #[test] + fn unmerge_import() { + check_assist( + unmerge_imports, + r" +use std::fmt::{Debug, Display$0}; +", + r" +use std::fmt::{Debug}; +use std::fmt::Display; +", + ); + + check_assist( + unmerge_imports, + r" +use std::fmt::{Debug, format$0, Display}; +", + r" +use std::fmt::{Debug, Display}; +use std::fmt::format; +", + ); + } + + #[test] + fn unmerge_glob_import() { + check_assist( + unmerge_imports, + r" +use std::fmt::{*$0, Display}; +", + r" +use std::fmt::{Display}; +use std::fmt::*; +", + ); + } + + #[test] + fn unmerge_renamed_import() { + check_assist( + unmerge_imports, + r" +use std::fmt::{Debug, Display as Disp$0}; +", + r" +use std::fmt::{Debug}; +use std::fmt::Display as Disp; +", + ); + } + + #[test] + fn unmerge_indented_import() { + check_assist( + unmerge_imports, + r" +mod format { + use std::fmt::{Debug, Display$0 as Disp, format}; +} +", + r" +mod format { + use std::fmt::{Debug, format}; + use std::fmt::Display as Disp; +} +", + ); + } + + #[test] + fn unmerge_nested_import() { + check_assist( + unmerge_imports, + r" +use foo::bar::{baz::{qux$0, foobar}, barbaz}; +", + r" +use foo::bar::{baz::{foobar}, barbaz}; +use foo::bar::baz::qux; +", + ); + check_assist( + unmerge_imports, + r" +use foo::bar::{baz$0::{qux, foobar}, barbaz}; +", + r" +use foo::bar::{barbaz}; +use foo::bar::baz::{qux, foobar}; +", + ); + } + + #[test] + fn unmerge_import_with_visibility() { + check_assist( + unmerge_imports, + r" +pub use std::fmt::{Debug, Display$0}; +", + r" +pub use std::fmt::{Debug}; +pub use std::fmt::Display; +", + ); + } + + #[test] + fn unmerge_import_on_self() { + check_assist( + unmerge_imports, + r"use std::process::{Command, self$0};", + r"use std::process::{Command}; +use std::process;", + ); + } + + #[test] + fn unmerge_import_with_attributes() { + check_assist( + unmerge_imports, + r" +#[allow(deprecated)] +use foo::{bar, baz$0};", + r" +#[allow(deprecated)] +use foo::{bar}; +#[allow(deprecated)] +use foo::baz;", + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs index 6b9f661d4de54..5aedff5cc775d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs @@ -1,11 +1,11 @@ use syntax::{ + Direction, SyntaxKind, T, algo::neighbor, - ast::{self, edit::IndentLevel, make, AstNode}, + ast::{self, AstNode, edit::IndentLevel, make}, ted::{self, Position}, - Direction, SyntaxKind, T, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: unmerge_match_arm // @@ -47,14 +47,20 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O let old_parent_range = new_parent.text_range(); acc.add( - AssistId("unmerge_match_arm", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("unmerge_match_arm"), "Unmerge match arm", pipe_token.text_range(), |edit| { let pats_after = pipe_token .siblings_with_tokens(Direction::Next) - .filter_map(|it| ast::Pat::cast(it.into_node()?)); - let new_pat = make::or_pat(pats_after, or_pat.leading_pipe().is_some()); + .filter_map(|it| ast::Pat::cast(it.into_node()?)) + .collect::>(); + // It is guaranteed that `pats_after` has at least one element + let new_pat = if pats_after.len() == 1 { + pats_after[0].clone() + } else { + make::or_pat(pats_after, or_pat.leading_pipe().is_some()).into() + }; let new_match_arm = make::match_arm(new_pat, match_arm.guard(), match_arm_body).clone_for_update(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs deleted file mode 100644 index 38ca572fa6609..0000000000000 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs +++ /dev/null @@ -1,233 +0,0 @@ -use syntax::{ - ast::{self, edit_in_place::Removable, make, HasVisibility}, - ted::{self, Position}, - AstNode, SyntaxKind, -}; - -use crate::{ - assist_context::{AssistContext, Assists}, - AssistId, AssistKind, -}; - -// Assist: unmerge_use -// -// Extracts single use item from use list. -// -// ``` -// use std::fmt::{Debug, Display$0}; -// ``` -// -> -// ``` -// use std::fmt::{Debug}; -// use std::fmt::Display; -// ``` -pub(crate) fn unmerge_use(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let tree: ast::UseTree = ctx.find_node_at_offset::()?.clone_for_update(); - - let tree_list = tree.syntax().parent().and_then(ast::UseTreeList::cast)?; - if tree_list.use_trees().count() < 2 { - cov_mark::hit!(skip_single_use_item); - return None; - } - - let use_: ast::Use = tree_list.syntax().ancestors().find_map(ast::Use::cast)?; - let path = resolve_full_path(&tree)?; - - let old_parent_range = use_.syntax().parent()?.text_range(); - let new_parent = use_.syntax().parent()?; - - // If possible, explain what is going to be done. - let label = match tree.path().and_then(|path| path.first_segment()) { - Some(name) => format!("Unmerge use of `{name}`"), - None => "Unmerge use".into(), - }; - - let target = tree.syntax().text_range(); - acc.add(AssistId("unmerge_use", AssistKind::RefactorRewrite), label, target, |builder| { - let new_use = make::use_( - use_.visibility(), - make::use_tree(path, tree.use_tree_list(), tree.rename(), tree.star_token().is_some()), - ) - .clone_for_update(); - - tree.remove(); - ted::insert(Position::after(use_.syntax()), new_use.syntax()); - - builder.replace(old_parent_range, new_parent.to_string()); - }) -} - -fn resolve_full_path(tree: &ast::UseTree) -> Option { - let paths = tree - .syntax() - .ancestors() - .take_while(|n| n.kind() != SyntaxKind::USE) - .filter_map(ast::UseTree::cast) - .filter_map(|t| t.path()); - - let final_path = paths.reduce(|prev, next| make::path_concat(next, prev))?; - if final_path.segment().is_some_and(|it| it.self_token().is_some()) { - final_path.qualifier() - } else { - Some(final_path) - } -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - #[test] - fn skip_single_use_item() { - cov_mark::check!(skip_single_use_item); - check_assist_not_applicable( - unmerge_use, - r" -use std::fmt::Debug$0; -", - ); - check_assist_not_applicable( - unmerge_use, - r" -use std::fmt::{Debug$0}; -", - ); - check_assist_not_applicable( - unmerge_use, - r" -use std::fmt::Debug as Dbg$0; -", - ); - } - - #[test] - fn skip_single_glob_import() { - check_assist_not_applicable( - unmerge_use, - r" -use std::fmt::*$0; -", - ); - } - - #[test] - fn unmerge_use_item() { - check_assist( - unmerge_use, - r" -use std::fmt::{Debug, Display$0}; -", - r" -use std::fmt::{Debug}; -use std::fmt::Display; -", - ); - - check_assist( - unmerge_use, - r" -use std::fmt::{Debug, format$0, Display}; -", - r" -use std::fmt::{Debug, Display}; -use std::fmt::format; -", - ); - } - - #[test] - fn unmerge_glob_import() { - check_assist( - unmerge_use, - r" -use std::fmt::{*$0, Display}; -", - r" -use std::fmt::{Display}; -use std::fmt::*; -", - ); - } - - #[test] - fn unmerge_renamed_use_item() { - check_assist( - unmerge_use, - r" -use std::fmt::{Debug, Display as Disp$0}; -", - r" -use std::fmt::{Debug}; -use std::fmt::Display as Disp; -", - ); - } - - #[test] - fn unmerge_indented_use_item() { - check_assist( - unmerge_use, - r" -mod format { - use std::fmt::{Debug, Display$0 as Disp, format}; -} -", - r" -mod format { - use std::fmt::{Debug, format}; - use std::fmt::Display as Disp; -} -", - ); - } - - #[test] - fn unmerge_nested_use_item() { - check_assist( - unmerge_use, - r" -use foo::bar::{baz::{qux$0, foobar}, barbaz}; -", - r" -use foo::bar::{baz::{foobar}, barbaz}; -use foo::bar::baz::qux; -", - ); - check_assist( - unmerge_use, - r" -use foo::bar::{baz$0::{qux, foobar}, barbaz}; -", - r" -use foo::bar::{barbaz}; -use foo::bar::baz::{qux, foobar}; -", - ); - } - - #[test] - fn unmerge_use_item_with_visibility() { - check_assist( - unmerge_use, - r" -pub use std::fmt::{Debug, Display$0}; -", - r" -pub use std::fmt::{Debug}; -pub use std::fmt::Display; -", - ); - } - - #[test] - fn unmerge_use_item_on_self() { - check_assist( - unmerge_use, - r"use std::process::{Command, self$0};", - r"use std::process::{Command}; -use std::process;", - ); - } -} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs index abe7fb132f0b3..ac10a829bbf1b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -1,13 +1,13 @@ use ide_db::{ - assists::{AssistId, AssistKind}, + EditionedFileId, + assists::AssistId, defs::Definition, search::{FileReference, FileReferenceNode}, syntax_helpers::node_ext::full_path_of_name_ref, - EditionedFileId, }; use syntax::{ - ast::{self, NameRef}, AstNode, SyntaxKind, TextRange, + ast::{self, NameRef}, }; use crate::{AssistContext, Assists}; @@ -60,7 +60,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O // Otherwise, we may remove the `async` keyword. acc.add( - AssistId("unnecessary_async", AssistKind::QuickFix), + AssistId::quick_fix("unnecessary_async"), "Remove unnecessary async", async_range, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs index baf4ddae2fbc9..ebb8ef99100e7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs @@ -1,10 +1,10 @@ use ide_db::imports::insert_use::ImportScope; use syntax::{ - ast::{self, prec::ExprPrecedence, AstNode, HasArgList}, TextRange, + ast::{self, AstNode, HasArgList, prec::ExprPrecedence}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: unqualify_method_call // @@ -69,7 +69,7 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) ); acc.add( - AssistId("unqualify_method_call", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("unqualify_method_call"), "Unqualify method call", call.syntax().text_range(), |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs index fd37140e9c2bf..a83f6835ca615 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs @@ -1,13 +1,13 @@ use syntax::{ + AstNode, SyntaxKind, T, TextRange, ast::{ self, edit::{AstNodeEdit, IndentLevel}, make, }, - AstNode, SyntaxKind, TextRange, T, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: unwrap_block // @@ -27,9 +27,8 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // } // ``` pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite); + let assist_id = AssistId::refactor_rewrite("unwrap_block"); let assist_label = "Unwrap block"; - let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?; let mut block = ast::BlockExpr::cast(l_curly_token.parent_ancestors().nth(1)?)?; let target = block.syntax().text_range(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs index f647b531b7742..cf38262fbf443 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_return_type.rs @@ -4,11 +4,12 @@ use ide_db::{ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr}, }; use syntax::{ - ast::{self, syntax_factory::SyntaxFactory, HasArgList, HasGenericArgs}, - match_ast, AstNode, NodeOrToken, SyntaxKind, + AstNode, NodeOrToken, SyntaxKind, + ast::{self, HasArgList, HasGenericArgs, syntax_factory::SyntaxFactory}, + match_ast, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: unwrap_option_return_type // @@ -66,7 +67,7 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> acc.add(kind.assist_id(), kind.label(), type_ref.syntax().text_range(), |builder| { let mut editor = builder.make_editor(&parent); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let mut exprs_to_unwrap = Vec::new(); let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_unwrap, e); @@ -168,7 +169,7 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> } editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }) } @@ -186,7 +187,7 @@ impl UnwrapperKind { UnwrapperKind::Result => "unwrap_result_return_type", }; - AssistId(s, AssistKind::RefactorRewrite) + AssistId::refactor_rewrite(s) } fn label(&self) -> &'static str { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs index d09614c51127e..ecfecbb04ff22 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_tuple.rs @@ -1,9 +1,9 @@ use syntax::{ - ast::{self, edit::AstNodeEdit}, AstNode, T, + ast::{self, edit::AstNodeEdit}, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: unwrap_tuple // @@ -56,7 +56,7 @@ pub(crate) fn unwrap_tuple(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let parent = let_kw.parent()?; acc.add( - AssistId("unwrap_tuple", AssistKind::RefactorRewrite), + AssistId::refactor_rewrite("unwrap_tuple"), "Unwrap tuple", let_kw.text_range(), |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs index 0b145dcb06ba3..9ea78719b20c0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type.rs @@ -7,11 +7,12 @@ use ide_db::{ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr}, }; use syntax::{ - ast::{self, syntax_factory::SyntaxFactory, Expr, HasGenericArgs, HasGenericParams}, - match_ast, AstNode, + AstNode, + ast::{self, Expr, HasGenericArgs, HasGenericParams, syntax_factory::SyntaxFactory}, + match_ast, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: wrap_return_type_in_option // @@ -76,7 +77,7 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op type_ref.syntax().text_range(), |builder| { let mut editor = builder.make_editor(&parent); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); let alias = wrapper_alias(ctx, &make, &core_wrapper, type_ref, kind.symbol()); let new_return_ty = alias.unwrap_or_else(|| match kind { WrapperKind::Option => make.ty_option(type_ref.clone()), @@ -132,7 +133,7 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op } editor.add_mappings(make.finish_with_mappings()); - builder.add_file_edits(ctx.file_id(), editor); + builder.add_file_edits(ctx.vfs_file_id(), editor); }, ); } @@ -154,7 +155,7 @@ impl WrapperKind { WrapperKind::Result => "wrap_return_type_in_result", }; - AssistId(s, AssistKind::RefactorRewrite) + AssistId::refactor_rewrite(s) } fn label(&self) -> &'static str { @@ -180,8 +181,8 @@ impl WrapperKind { fn symbol(&self) -> hir::Symbol { match self { - WrapperKind::Option => hir::sym::Option.clone(), - WrapperKind::Result => hir::sym::Result.clone(), + WrapperKind::Option => hir::sym::Option, + WrapperKind::Result => hir::sym::Result, } } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs index 149cb4c43849d..e1b94673e7756 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs @@ -1,13 +1,12 @@ use ide_db::source_change::SourceChangeBuilder; use itertools::Itertools; use syntax::{ - algo, - ast::{self, make, AstNode}, + NodeOrToken, SyntaxToken, T, TextRange, algo, + ast::{self, AstNode, make}, ted::{self, Position}, - NodeOrToken, SyntaxToken, TextRange, T, }; -use crate::{AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, Assists}; // Assist: wrap_unwrap_cfg_attr // @@ -117,7 +116,7 @@ pub(crate) fn wrap_unwrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>) - (Some(attr), Some(ident)) if attr.simple_name().map(|v| v.eq("derive")).unwrap_or_default() => { - Some(attempt_get_derive(attr.clone(), ident)) + Some(attempt_get_derive(attr, ident)) } (Some(attr), _) => Some(WrapUnwrapOption::WrapAttr(attr)), @@ -129,7 +128,7 @@ pub(crate) fn wrap_unwrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>) - NodeOrToken::Node(node) => ast::Attr::cast(node).map(WrapUnwrapOption::WrapAttr), NodeOrToken::Token(ident) if ident.kind() == syntax::T![ident] => { let attr = ident.parent_ancestors().find_map(ast::Attr::cast)?; - Some(attempt_get_derive(attr.clone(), ident)) + Some(attempt_get_derive(attr, ident)) } _ => None, } @@ -211,7 +210,7 @@ fn wrap_derive( }; acc.add( - AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor), + AssistId::refactor("wrap_unwrap_cfg_attr"), format!("Wrap #[derive({path_text})] in `cfg_attr`",), range, handle_source_change, @@ -234,7 +233,7 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) -> if let Some(meta) = attr.meta() { if let (Some(eq), Some(expr)) = (meta.eq_token(), meta.expr()) { raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" "))); - raw_tokens.push(NodeOrToken::Token(eq.clone())); + raw_tokens.push(NodeOrToken::Token(eq)); raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" "))); expr.syntax().descendants_with_tokens().for_each(|it| { @@ -268,7 +267,7 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) -> } }; acc.add( - AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor), + AssistId::refactor("wrap_unwrap_cfg_attr"), "Convert to `cfg_attr`", range, handle_source_change, @@ -296,11 +295,7 @@ fn unwrap_cfg_attr(acc: &mut Assists, attr: ast::Attr) -> Option<()> { continue; } let Some(attr_name) = tt.into_token().and_then(|token| { - if token.kind() == T![ident] { - Some(make::ext::ident_path(token.text())) - } else { - None - } + if token.kind() == T![ident] { Some(make::ext::ident_path(token.text())) } else { None } }) else { continue; }; @@ -341,7 +336,7 @@ fn unwrap_cfg_attr(acc: &mut Assists, attr: ast::Attr) -> Option<()> { f.replace(range, inner_attrs); }; acc.add( - AssistId("wrap_unwrap_cfg_attr", AssistKind::Refactor), + AssistId::refactor("wrap_unwrap_cfg_attr"), "Extract Inner Attributes from `cfg_attr`", range, handle_source_change, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index e8480b0de1906..627ed37b04e58 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -68,7 +68,7 @@ pub mod utils; use hir::Semantics; use ide_db::{EditionedFileId, RootDatabase}; -use syntax::TextRange; +use syntax::{Edition, TextRange}; pub(crate) use crate::assist_context::{AssistContext, Assists}; @@ -90,7 +90,7 @@ pub fn assists( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(range.file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(range.file_id)); + .unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT)); let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range }); let mut acc = Assists::new(&ctx, resolve); handlers::all().iter().for_each(|handler| { @@ -122,6 +122,7 @@ mod handlers { mod convert_closure_to_fn; mod convert_comment_block; mod convert_comment_from_or_to_doc; + mod convert_for_to_while_let; mod convert_from_to_tryfrom; mod convert_integer_literal; mod convert_into_to_from; @@ -199,6 +200,7 @@ mod handlers { mod remove_dbg; mod remove_mut; mod remove_parentheses; + mod remove_underscore; mod remove_unused_imports; mod remove_unused_param; mod reorder_fields; @@ -220,8 +222,8 @@ mod handlers { mod toggle_async_sugar; mod toggle_ignore; mod toggle_macro_delimiter; + mod unmerge_imports; mod unmerge_match_arm; - mod unmerge_use; mod unnecessary_async; mod unqualify_method_call; mod unwrap_block; @@ -252,6 +254,7 @@ mod handlers { convert_closure_to_fn::convert_closure_to_fn, convert_comment_block::convert_comment_block, convert_comment_from_or_to_doc::convert_comment_from_or_to_doc, + convert_for_to_while_let::convert_for_loop_to_while_let, convert_from_to_tryfrom::convert_from_to_tryfrom, convert_integer_literal::convert_integer_literal, convert_into_to_from::convert_into_to_from, @@ -333,6 +336,7 @@ mod handlers { remove_dbg::remove_dbg, remove_mut::remove_mut, remove_parentheses::remove_parentheses, + remove_underscore::remove_underscore, remove_unused_imports::remove_unused_imports, remove_unused_param::remove_unused_param, reorder_fields::reorder_fields, @@ -359,7 +363,7 @@ mod handlers { toggle_ignore::toggle_ignore, toggle_macro_delimiter::toggle_macro_delimiter, unmerge_match_arm::unmerge_match_arm, - unmerge_use::unmerge_use, + unmerge_imports::unmerge_imports, unnecessary_async::unnecessary_async, unqualify_method_call::unqualify_method_call, unwrap_block::unwrap_block, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs index 7d7012c462222..5e6889792db6e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs @@ -1,12 +1,13 @@ mod generated; use expect_test::expect; -use hir::{FileRange, Semantics}; +use hir::Semantics; use ide_db::{ - base_db::{SourceDatabase, SourceRootDatabase}, + EditionedFileId, FileRange, RootDatabase, SnippetCap, + assists::ExprFillDefaultMode, + base_db::SourceDatabase, imports::insert_use::{ImportGranularity, InsertUseConfig}, source_change::FileSystemEdit, - EditionedFileId, RootDatabase, SnippetCap, }; use stdx::{format_to, trim_indent}; use syntax::TextRange; @@ -14,8 +15,8 @@ use test_fixture::WithFixture; use test_utils::{assert_eq_text, extract_offset}; use crate::{ - assists, handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind, - AssistResolveStrategy, Assists, SingleResolve, + Assist, AssistConfig, AssistContext, AssistKind, AssistResolveStrategy, Assists, SingleResolve, + assists, handlers::Handler, }; pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { @@ -35,6 +36,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { term_search_fuel: 400, term_search_borrowck: true, code_action_grouping: true, + expr_fill_default: ExprFillDefaultMode::Todo, }; pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig { @@ -54,6 +56,7 @@ pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig { term_search_fuel: 400, term_search_borrowck: true, code_action_grouping: false, + expr_fill_default: ExprFillDefaultMode::Todo, }; pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig { @@ -73,6 +76,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig { term_search_fuel: 400, term_search_borrowck: true, code_action_grouping: true, + expr_fill_default: ExprFillDefaultMode::Todo, }; pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig { @@ -92,6 +96,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig { term_search_fuel: 400, term_search_borrowck: true, code_action_grouping: true, + expr_fill_default: ExprFillDefaultMode::Todo, }; pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) { @@ -222,17 +227,17 @@ pub(crate) fn check_assist_unresolved( fn check_doc_test(assist_id: &str, before: &str, after: &str) { let after = trim_indent(after); let (db, file_id, selection) = RootDatabase::with_range_or_offset(before); - let before = db.file_text(file_id.file_id()).to_string(); - let frange = FileRange { file_id, range: selection.into() }; + let before = db.file_text(file_id.file_id(&db)).text(&db).to_string(); + let frange = ide_db::FileRange { file_id: file_id.file_id(&db), range: selection.into() }; - let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange.into()) + let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange) .into_iter() .find(|assist| assist.id.0 == assist_id) .unwrap_or_else(|| { panic!( "\n\nAssist is not applicable: {}\nAvailable assists: {}", assist_id, - assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into()) + assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange) .into_iter() .map(|assist| assist.id.0) .collect::>() @@ -247,7 +252,7 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) { .expect("Assist did not contain any source changes"); let mut actual = before; if let Some((source_file_edit, snippet_edit)) = - source_change.get_source_and_snippet_edit(file_id.file_id()) + source_change.get_source_and_snippet_edit(file_id.file_id(&db)) { source_file_edit.apply(&mut actual); if let Some(snippet_edit) = snippet_edit { @@ -281,9 +286,9 @@ fn check_with_config( ) { let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before); db.enable_proc_attr_macros(); - let text_without_caret = db.file_text(file_with_caret_id.into()).to_string(); + let text_without_caret = db.file_text(file_with_caret_id.file_id(&db)).text(&db).to_string(); - let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() }; + let frange = hir::FileRange { file_id: file_with_caret_id, range: range_or_offset.into() }; let sema = Semantics::new(&db); let ctx = AssistContext::new(sema, &config, frange); @@ -297,7 +302,9 @@ fn check_with_config( let assist = match assist_label { Some(label) => res.into_iter().find(|resolved| resolved.label == label), - None => res.pop(), + None if res.is_empty() => None, + // Pick the first as that is the one with the highest priority + None => Some(res.swap_remove(0)), }; match (assist, expected) { @@ -311,14 +318,14 @@ fn check_with_config( let mut buf = String::new(); for (file_id, (edit, snippet_edit)) in source_change.source_file_edits { - let mut text = db.file_text(file_id).as_ref().to_owned(); + let mut text = db.file_text(file_id).text(&db).as_ref().to_owned(); edit.apply(&mut text); if let Some(snippet_edit) = snippet_edit { snippet_edit.apply(&mut text); } if !skip_header { - let sr = db.file_source_root(file_id); - let sr = db.source_root(sr); + let source_root_id = db.file_source_root(file_id).source_root_id(&db); + let sr = db.source_root(source_root_id).source_root(&db); let path = sr.path_for_file(&file_id).unwrap(); format_to!(buf, "//- {}\n", path) } @@ -329,15 +336,16 @@ fn check_with_config( let (dst, contents) = match file_system_edit { FileSystemEdit::CreateFile { dst, initial_contents } => (dst, initial_contents), FileSystemEdit::MoveFile { src, dst } => { - (dst, db.file_text(src).as_ref().to_owned()) + (dst, db.file_text(src).text(&db).as_ref().to_owned()) } FileSystemEdit::MoveDir { src, src_id, dst } => { // temporary placeholder for MoveDir since we are not using MoveDir in ide assists yet. (dst, format!("{src_id:?}\n{src:?}")) } }; - let sr = db.file_source_root(dst.anchor); - let sr = db.source_root(sr); + + let source_root_id = db.file_source_root(dst.anchor).source_root_id(&db); + let sr = db.source_root(source_root_id).source_root(&db); let mut base = sr.path_for_file(&dst.anchor).unwrap().clone(); base.pop(); let created_file_path = base.join(&dst.path).unwrap(); @@ -387,8 +395,9 @@ fn assist_order_field_struct() { let before = "struct Foo { $0bar: u32 }"; let (before_cursor_pos, before) = extract_offset(before); let (db, file_id) = with_single_file(&before); - let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) }; - let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into()); + let frange = + FileRange { file_id: file_id.file_id(&db), range: TextRange::empty(before_cursor_pos) }; + let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange); let mut assists = assists.iter(); assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)"); @@ -414,7 +423,12 @@ pub fn test_some_range(a: int) -> bool { "#, ); - let assists = assists(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange.into()); + let assists = assists( + &db, + &TEST_CONFIG, + AssistResolveStrategy::None, + FileRange { file_id: frange.file_id.file_id(&db), range: frange.range }, + ); let expected = labels(&assists); expect![[r#" @@ -442,7 +456,12 @@ pub fn test_some_range(a: int) -> bool { let mut cfg = TEST_CONFIG; cfg.allowed = Some(vec![AssistKind::Refactor]); - let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into()); + let assists = assists( + &db, + &cfg, + AssistResolveStrategy::None, + FileRange { file_id: frange.file_id.file_id(&db), range: frange.range }, + ); let expected = labels(&assists); expect![[r#" @@ -456,7 +475,12 @@ pub fn test_some_range(a: int) -> bool { { let mut cfg = TEST_CONFIG; cfg.allowed = Some(vec![AssistKind::RefactorExtract]); - let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into()); + let assists = assists( + &db, + &cfg, + AssistResolveStrategy::None, + FileRange { file_id: frange.file_id.file_id(&db), range: frange.range }, + ); let expected = labels(&assists); expect![[r#" @@ -468,7 +492,12 @@ pub fn test_some_range(a: int) -> bool { { let mut cfg = TEST_CONFIG; cfg.allowed = Some(vec![AssistKind::QuickFix]); - let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into()); + let assists = assists( + &db, + &cfg, + AssistResolveStrategy::None, + FileRange { file_id: frange.file_id.file_id(&db), range: frange.range }, + ); let expected = labels(&assists); expect![[r#""#]].assert_eq(&expected); @@ -493,7 +522,12 @@ pub fn test_some_range(a: int) -> bool { cfg.allowed = Some(vec![AssistKind::RefactorExtract]); { - let assists = assists(&db, &cfg, AssistResolveStrategy::None, frange.into()); + let assists = assists( + &db, + &cfg, + AssistResolveStrategy::None, + FileRange { file_id: frange.file_id.file_id(&db), range: frange.range }, + ); assert_eq!(4, assists.len()); let mut assists = assists.into_iter(); @@ -503,6 +537,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_variable", RefactorExtract, + None, ), label: "Extract into variable", group: Some( @@ -523,6 +558,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_constant", RefactorExtract, + None, ), label: "Extract into constant", group: Some( @@ -543,6 +579,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_static", RefactorExtract, + None, ), label: "Extract into static", group: Some( @@ -563,6 +600,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_function", RefactorExtract, + None, ), label: "Extract into function", group: Some( @@ -585,8 +623,9 @@ pub fn test_some_range(a: int) -> bool { AssistResolveStrategy::Single(SingleResolve { assist_id: "SOMETHING_MISMATCHING".to_owned(), assist_kind: AssistKind::RefactorExtract, + assist_subtype: None, }), - frange.into(), + FileRange { file_id: frange.file_id.file_id(&db), range: frange.range }, ); assert_eq!(4, assists.len()); let mut assists = assists.into_iter(); @@ -597,6 +636,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_variable", RefactorExtract, + None, ), label: "Extract into variable", group: Some( @@ -617,6 +657,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_constant", RefactorExtract, + None, ), label: "Extract into constant", group: Some( @@ -637,6 +678,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_static", RefactorExtract, + None, ), label: "Extract into static", group: Some( @@ -657,6 +699,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_function", RefactorExtract, + None, ), label: "Extract into function", group: Some( @@ -679,8 +722,9 @@ pub fn test_some_range(a: int) -> bool { AssistResolveStrategy::Single(SingleResolve { assist_id: "extract_variable".to_owned(), assist_kind: AssistKind::RefactorExtract, + assist_subtype: None, }), - frange.into(), + FileRange { file_id: frange.file_id.file_id(&db), range: frange.range }, ); assert_eq!(4, assists.len()); let mut assists = assists.into_iter(); @@ -691,6 +735,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_variable", RefactorExtract, + None, ), label: "Extract into variable", group: Some( @@ -710,24 +755,21 @@ pub fn test_some_range(a: int) -> bool { Indel { insert: "let", delete: 45..47, - annotation: None, }, Indel { insert: "var_name", delete: 48..60, - annotation: None, }, Indel { insert: "=", delete: 61..81, - annotation: None, }, Indel { insert: "5;\n if let 2..6 = var_name {\n true\n } else {\n false\n }", delete: 82..108, - annotation: None, }, ], + annotation: None, }, Some( SnippetEdit( @@ -760,6 +802,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_constant", RefactorExtract, + None, ), label: "Extract into constant", group: Some( @@ -780,6 +823,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_static", RefactorExtract, + None, ), label: "Extract into static", group: Some( @@ -800,6 +844,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_function", RefactorExtract, + None, ), label: "Extract into function", group: Some( @@ -816,7 +861,12 @@ pub fn test_some_range(a: int) -> bool { } { - let assists = assists(&db, &cfg, AssistResolveStrategy::All, frange.into()); + let assists = assists( + &db, + &cfg, + AssistResolveStrategy::All, + FileRange { file_id: frange.file_id.file_id(&db), range: frange.range }, + ); assert_eq!(4, assists.len()); let mut assists = assists.into_iter(); @@ -826,6 +876,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_variable", RefactorExtract, + None, ), label: "Extract into variable", group: Some( @@ -845,24 +896,21 @@ pub fn test_some_range(a: int) -> bool { Indel { insert: "let", delete: 45..47, - annotation: None, }, Indel { insert: "var_name", delete: 48..60, - annotation: None, }, Indel { insert: "=", delete: 61..81, - annotation: None, }, Indel { insert: "5;\n if let 2..6 = var_name {\n true\n } else {\n false\n }", delete: 82..108, - annotation: None, }, ], + annotation: None, }, Some( SnippetEdit( @@ -895,6 +943,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_constant", RefactorExtract, + None, ), label: "Extract into constant", group: Some( @@ -914,29 +963,25 @@ pub fn test_some_range(a: int) -> bool { Indel { insert: "const", delete: 45..47, - annotation: None, }, Indel { insert: "VAR_NAME:", delete: 48..60, - annotation: None, }, Indel { insert: "i32", delete: 61..81, - annotation: None, }, Indel { insert: "=", delete: 82..86, - annotation: None, }, Indel { insert: "5;\n if let 2..6 = VAR_NAME {\n true\n } else {\n false\n }", delete: 87..108, - annotation: None, }, ], + annotation: None, }, Some( SnippetEdit( @@ -969,6 +1014,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_static", RefactorExtract, + None, ), label: "Extract into static", group: Some( @@ -988,29 +1034,25 @@ pub fn test_some_range(a: int) -> bool { Indel { insert: "static", delete: 45..47, - annotation: None, }, Indel { insert: "VAR_NAME:", delete: 48..60, - annotation: None, }, Indel { insert: "i32", delete: 61..81, - annotation: None, }, Indel { insert: "=", delete: 82..86, - annotation: None, }, Indel { insert: "5;\n if let 2..6 = VAR_NAME {\n true\n } else {\n false\n }", delete: 87..108, - annotation: None, }, ], + annotation: None, }, Some( SnippetEdit( @@ -1043,6 +1085,7 @@ pub fn test_some_range(a: int) -> bool { id: AssistId( "extract_function", RefactorExtract, + None, ), label: "Extract into function", group: Some( @@ -1062,14 +1105,13 @@ pub fn test_some_range(a: int) -> bool { Indel { insert: "fun_name()", delete: 59..60, - annotation: None, }, Indel { insert: "\n\nfn fun_name() -> i32 {\n 5\n}", delete: 110..110, - annotation: None, }, ], + annotation: None, }, Some( SnippetEdit( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index 4234124d670ff..01ab0be34b280 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -439,6 +439,30 @@ fn main() { ) } +#[test] +fn doctest_convert_for_loop_to_while_let() { + check_doc_test( + "convert_for_loop_to_while_let", + r#####" +fn main() { + let x = vec![1, 2, 3]; + for$0 v in x { + let y = v * 2; + }; +} +"#####, + r#####" +fn main() { + let x = vec![1, 2, 3]; + let mut tmp = x.into_iter(); + while let Some(v) = tmp.next() { + let y = v * 2; + }; +} +"#####, + ) +} + #[test] fn doctest_convert_for_loop_with_for_each() { check_doc_test( @@ -1713,7 +1737,7 @@ fn foo() { bar("", baz()); } -fn bar(arg: &str, baz: Baz) ${0:-> _} { +fn bar(arg: &'static str, baz: Baz) ${0:-> _} { todo!() } @@ -2724,6 +2748,25 @@ fn main() { ) } +#[test] +fn doctest_remove_underscore_from_used_variables() { + check_doc_test( + "remove_underscore_from_used_variables", + r#####" +fn main() { + let mut _$0foo = 1; + _foo = 2; +} +"#####, + r#####" +fn main() { + let mut foo = 1; + foo = 2; +} +"#####, + ) +} + #[test] fn doctest_remove_unused_imports() { check_doc_test( @@ -3296,6 +3339,20 @@ sth!{ } ) } +#[test] +fn doctest_unmerge_imports() { + check_doc_test( + "unmerge_imports", + r#####" +use std::fmt::{Debug, Display$0}; +"#####, + r#####" +use std::fmt::{Debug}; +use std::fmt::Display; +"#####, + ) +} + #[test] fn doctest_unmerge_match_arm() { check_doc_test( @@ -3322,20 +3379,6 @@ fn handle(action: Action) { ) } -#[test] -fn doctest_unmerge_use() { - check_doc_test( - "unmerge_use", - r#####" -use std::fmt::{Debug, Display$0}; -"#####, - r#####" -use std::fmt::{Debug}; -use std::fmt::Display; -"#####, - ) -} - #[test] fn doctest_unnecessary_async() { check_doc_test( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index a6fa1706710d1..ef6914fda1d5c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -2,32 +2,36 @@ pub(crate) use gen_trait_fn_body::gen_trait_fn_body; use hir::{ - db::{ExpandDatabase, HirDatabase}, DisplayTarget, HasAttrs as HirHasAttrs, HirDisplay, InFile, ModuleDef, PathResolution, Semantics, + db::{ExpandDatabase, HirDatabase}, }; use ide_db::{ + RootDatabase, + assists::ExprFillDefaultMode, famous_defs::FamousDefs, path_transform::PathTransform, syntax_helpers::{node_ext::preorder_expr, prettify_macro_expansion}, - RootDatabase, }; use stdx::format_to; use syntax::{ + AstNode, AstToken, Direction, NodeOrToken, SourceFile, + SyntaxKind::*, + SyntaxNode, SyntaxToken, T, TextRange, TextSize, WalkEvent, ast::{ - self, + self, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace, edit::{AstNodeEdit, IndentLevel}, edit_in_place::{AttrsOwnerEdit, Indent, Removable}, make, syntax_factory::SyntaxFactory, - HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace, }, - ted, AstNode, AstToken, Direction, NodeOrToken, SourceFile, - SyntaxKind::*, - SyntaxNode, SyntaxToken, TextRange, TextSize, WalkEvent, T, + ted, }; -use crate::assist_context::{AssistContext, SourceChangeBuilder}; +use crate::{ + AssistConfig, + assist_context::{AssistContext, SourceChangeBuilder}, +}; mod gen_trait_fn_body; pub(crate) mod ref_field_expr; @@ -82,11 +86,7 @@ pub fn test_related_attribute_syn(fn_def: &ast::Fn) -> Option { fn_def.attrs().find_map(|attr| { let path = attr.path()?; let text = path.syntax().text().to_string(); - if text.starts_with("test") || text.ends_with("test") { - Some(attr) - } else { - None - } + if text.starts_with("test") || text.ends_with("test") { Some(attr) } else { None } }) } @@ -178,6 +178,7 @@ pub fn filter_assoc_items( /// inserted. pub fn add_trait_assoc_items_to_impl( sema: &Semantics<'_, RootDatabase>, + config: &AssistConfig, original_items: &[InFile], trait_: hir::Trait, impl_: &ast::Impl, @@ -216,13 +217,21 @@ pub fn add_trait_assoc_items_to_impl( }); let assoc_item_list = impl_.get_or_create_assoc_item_list(); + let mut first_item = None; for item in items { first_item.get_or_insert_with(|| item.clone()); match &item { ast::AssocItem::Fn(fn_) if fn_.body().is_none() => { let body = AstNodeEdit::indent( - &make::block_expr(None, Some(make::ext::expr_todo())), + &make::block_expr( + None, + Some(match config.expr_fill_default { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => make::ext::expr_todo(), + }), + ), new_indent_level, ); ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax()) @@ -333,7 +342,11 @@ fn invert_special_case_legacy(expr: &ast::Expr) -> Option { T![>] => T![<=], T![>=] => T![<], // Parenthesize other expressions before prefixing `!` - _ => return Some(make::expr_prefix(T![!], make::expr_paren(expr.clone())).into()), + _ => { + return Some( + make::expr_prefix(T![!], make::expr_paren(expr.clone()).into()).into(), + ); + } }; ted::replace(op_token, make::token(rev_token)); Some(bin.into()) @@ -350,7 +363,7 @@ fn invert_special_case_legacy(expr: &ast::Expr) -> Option { "is_err" => "is_ok", _ => return None, }; - Some(make::expr_method_call(receiver, make::name_ref(method), arg_list)) + Some(make::expr_method_call(receiver, make::name_ref(method), arg_list).into()) } ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::UnaryOp::Not => match pe.expr()? { ast::Expr::ParenExpr(parexpr) => parexpr.expr(), @@ -498,11 +511,7 @@ pub(crate) fn find_struct_impl( }; let not_trait_impl = blk.trait_(db).is_none(); - if !(same_ty && not_trait_impl) { - None - } else { - Some(impl_blk) - } + if !(same_ty && not_trait_impl) { None } else { Some(impl_blk) } }); if let Some(ref impl_blk) = block { @@ -859,6 +868,7 @@ impl ReferenceConversion { make::expr_ref(expr, false) } else { make::expr_method_call(expr, make::name_ref("as_ref"), make::arg_list([])) + .into() } } } @@ -1028,6 +1038,20 @@ fn test_required_hashes() { assert_eq!(5, required_hashes("#ab\"##\"####c")); } +/// Calculate the string literal suffix length +pub(crate) fn string_suffix(s: &str) -> Option<&str> { + s.rfind(['"', '\'', '#']).map(|i| &s[i + 1..]) +} +#[test] +fn test_string_suffix() { + assert_eq!(Some(""), string_suffix(r#""abc""#)); + assert_eq!(Some(""), string_suffix(r#""""#)); + assert_eq!(Some("a"), string_suffix(r#"""a"#)); + assert_eq!(Some("i32"), string_suffix(r#"""i32"#)); + assert_eq!(Some("i32"), string_suffix(r#"r""i32"#)); + assert_eq!(Some("i32"), string_suffix(r##"r#""#i32"##)); +} + /// Replaces the record expression, handling field shorthands including inside macros. pub(crate) fn replace_record_field_expr( ctx: &AssistContext<'_>, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs index 7a9bdfe1ecc24..4ea56dc46aaaa 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs @@ -2,7 +2,7 @@ use hir::TraitRef; use syntax::{ - ast::{self, edit::AstNodeEdit, make, AstNode, BinaryOp, CmpOp, HasName, LogicOp}, + ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make}, ted, }; @@ -35,7 +35,7 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { stdx::always!(func.name().is_some_and(|name| name.text() == "clone")); fn gen_clone_call(target: ast::Expr) -> ast::Expr { let method = make::name_ref("clone"); - make::expr_method_call(target, method, make::arg_list(None)) + make::expr_method_call(target, method, make::arg_list(None)).into() } let expr = match adt { // `Clone` cannot be derived for unions, so no default impl can be provided. @@ -83,7 +83,8 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { } let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter()); let struct_name = make::expr_path(variant_name); - let tuple_expr = make::expr_call(struct_name, make::arg_list(fields)); + let tuple_expr = + make::expr_call(struct_name, make::arg_list(fields)).into(); arms.push(make::match_arm(pat.into(), None, tuple_expr)); } @@ -126,7 +127,7 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { fields.push(gen_clone_call(target)); } let struct_name = make::expr_path(make::ext::ident_path("Self")); - make::expr_call(struct_name, make::arg_list(fields)) + make::expr_call(struct_name, make::arg_list(fields)).into() } // => Self { } None => { @@ -165,7 +166,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let method = make::name_ref("debug_struct"); let struct_name = format!("\"{name}\""); let args = make::arg_list(Some(make::expr_literal(&struct_name).into())); - let mut expr = make::expr_method_call(target, method, args); + let mut expr = make::expr_method_call(target, method, args).into(); let mut pats = vec![]; for field in list.fields() { @@ -181,12 +182,13 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let path = &format!("{field_name}"); let path = make::expr_path(make::ext::ident_path(path)); let args = make::arg_list(vec![name, path]); - expr = make::expr_method_call(expr, method_name, args); + expr = make::expr_method_call(expr, method_name, args).into(); } // => .finish() let method = make::name_ref("finish"); - let expr = make::expr_method_call(expr, method, make::arg_list(None)); + let expr = + make::expr_method_call(expr, method, make::arg_list(None)).into(); // => MyStruct { fields.. } => f.debug_struct("MyStruct")...finish(), let pat = make::record_pat(variant_name.clone(), pats.into_iter()); @@ -198,7 +200,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let method = make::name_ref("debug_tuple"); let struct_name = format!("\"{name}\""); let args = make::arg_list(Some(make::expr_literal(&struct_name).into())); - let mut expr = make::expr_method_call(target, method, args); + let mut expr = make::expr_method_call(target, method, args).into(); let mut pats = vec![]; for (i, _) in list.fields().enumerate() { @@ -214,12 +216,13 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let field_path = &name.to_string(); let field_path = make::expr_path(make::ext::ident_path(field_path)); let args = make::arg_list(vec![field_path]); - expr = make::expr_method_call(expr, method_name, args); + expr = make::expr_method_call(expr, method_name, args).into(); } // => .finish() let method = make::name_ref("finish"); - let expr = make::expr_method_call(expr, method, make::arg_list(None)); + let expr = + make::expr_method_call(expr, method, make::arg_list(None)).into(); // => MyStruct (fields..) => f.debug_tuple("MyStruct")...finish(), let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter()); @@ -227,12 +230,14 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { } None => { let fmt_string = make::expr_literal(&(format!("\"{name}\""))).into(); - let args = make::arg_list([target, fmt_string]); - let macro_name = make::expr_path(make::ext::ident_path("write")); - let macro_call = make::expr_macro_call(macro_name, args); + let args = make::ext::token_tree_from_node( + make::arg_list([target, fmt_string]).syntax(), + ); + let macro_name = make::ext::ident_path("write"); + let macro_call = make::expr_macro(macro_name, args); let variant_name = make::path_pat(variant_name); - arms.push(make::match_arm(variant_name, None, macro_call)); + arms.push(make::match_arm(variant_name, None, macro_call.into())); } } } @@ -254,12 +259,12 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let expr = match strukt.field_list() { // => f.debug_struct("Name").finish() - None => make::expr_method_call(target, make::name_ref("debug_struct"), args), + None => make::expr_method_call(target, make::name_ref("debug_struct"), args).into(), // => f.debug_struct("Name").field("foo", &self.foo).finish() Some(ast::FieldList::RecordFieldList(field_list)) => { let method = make::name_ref("debug_struct"); - let mut expr = make::expr_method_call(target, method, args); + let mut expr = make::expr_method_call(target, method, args).into(); for field in field_list.fields() { let name = field.name()?; let f_name = make::expr_literal(&(format!("\"{name}\""))).into(); @@ -267,7 +272,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let f_path = make::expr_ref(f_path, false); let f_path = make::expr_field(f_path, &format!("{name}")); let args = make::arg_list([f_name, f_path]); - expr = make::expr_method_call(expr, make::name_ref("field"), args); + expr = make::expr_method_call(expr, make::name_ref("field"), args).into(); } expr } @@ -275,20 +280,21 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { // => f.debug_tuple("Name").field(self.0).finish() Some(ast::FieldList::TupleFieldList(field_list)) => { let method = make::name_ref("debug_tuple"); - let mut expr = make::expr_method_call(target, method, args); + let mut expr = make::expr_method_call(target, method, args).into(); for (i, _) in field_list.fields().enumerate() { let f_path = make::expr_path(make::ext::ident_path("self")); let f_path = make::expr_ref(f_path, false); let f_path = make::expr_field(f_path, &format!("{i}")); let method = make::name_ref("field"); - expr = make::expr_method_call(expr, method, make::arg_list(Some(f_path))); + expr = make::expr_method_call(expr, method, make::arg_list(Some(f_path))) + .into(); } expr } }; let method = make::name_ref("finish"); - let expr = make::expr_method_call(expr, method, make::arg_list(None)); + let expr = make::expr_method_call(expr, method, make::arg_list(None)).into(); let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1)); ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); Some(()) @@ -300,7 +306,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { fn gen_default_call() -> Option { let fn_name = make::ext::path_from_idents(["Default", "default"])?; - Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None))) + Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)).into()) } match adt { // `Debug` cannot be derived for unions, so no default impl can be provided. @@ -327,7 +333,7 @@ fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { .fields() .map(|_| gen_default_call()) .collect::>>()?; - make::expr_call(struct_name, make::arg_list(fields)) + make::expr_call(struct_name, make::arg_list(fields)).into() } None => { let struct_name = make::ext::ident_path("Self"); @@ -348,7 +354,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { fn gen_hash_call(target: ast::Expr) -> ast::Stmt { let method = make::name_ref("hash"); let arg = make::expr_path(make::ext::ident_path("state")); - let expr = make::expr_method_call(target, method, make::arg_list(Some(arg))); + let expr = make::expr_method_call(target, method, make::arg_list(Some(arg))).into(); make::expr_stmt(expr).into() } @@ -361,7 +367,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let fn_name = make_discriminant()?; let arg = make::expr_path(make::ext::ident_path("self")); - let fn_call = make::expr_call(fn_name, make::arg_list(Some(arg))); + let fn_call = make::expr_call(fn_name, make::arg_list(Some(arg))).into(); let stmt = gen_hash_call(fn_call); make::block_expr(Some(stmt), None).indent(ast::edit::IndentLevel(1)) @@ -444,9 +450,11 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option) - ast::Adt::Enum(enum_) => { // => std::mem::discriminant(self) == std::mem::discriminant(other) let lhs_name = make::expr_path(make::ext::ident_path("self")); - let lhs = make::expr_call(make_discriminant()?, make::arg_list(Some(lhs_name.clone()))); + let lhs = make::expr_call(make_discriminant()?, make::arg_list(Some(lhs_name.clone()))) + .into(); let rhs_name = make::expr_path(make::ext::ident_path("other")); - let rhs = make::expr_call(make_discriminant()?, make::arg_list(Some(rhs_name.clone()))); + let rhs = make::expr_call(make_discriminant()?, make::arg_list(Some(rhs_name.clone()))) + .into(); let eq_check = make::expr_bin_op(lhs, BinaryOp::CmpOp(CmpOp::Eq { negated: false }), rhs); @@ -613,7 +621,7 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option) fn gen_partial_cmp_call(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr { let rhs = make::expr_ref(rhs, false); let method = make::name_ref("partial_cmp"); - make::expr_method_call(lhs, method, make::arg_list(Some(rhs))) + make::expr_method_call(lhs, method, make::arg_list(Some(rhs))).into() } // Check that self type and rhs type match. We don't know how to implement the method diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs index d434872ea595e..840b26a7ad58b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs @@ -4,8 +4,8 @@ //! It determines whether to deref the new expression and/or wrap it in parentheses, //! based on the parent of the existing expression. use syntax::{ - ast::{self, make, FieldExpr, MethodCallExpr}, AstNode, T, + ast::{self, FieldExpr, MethodCallExpr, make}, }; use crate::AssistContext; @@ -125,7 +125,7 @@ impl RefData { } if self.needs_parentheses { - expr = make::expr_paren(expr); + expr = make::expr_paren(expr).into(); } expr diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml index 68cc7a0b9a6df..94c01e333ed44 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true [lib] [dependencies] -cov-mark = "2.0.0-pre.1" +cov-mark = "2.0.0" itertools.workspace = true tracing.workspace = true @@ -29,7 +29,7 @@ syntax.workspace = true hir.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.5.1" # local deps test-utils.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index a22e7b272ea05..5d68aca9e615f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -24,17 +24,19 @@ pub(crate) mod vis; use std::iter; -use hir::{sym, HasAttrs, Name, ScopeDef, Variant}; -use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SymbolKind}; -use syntax::{ast, SmolStr, ToSmolStr}; +use hir::{HasAttrs, Name, ScopeDef, Variant, sym}; +use ide_db::{RootDatabase, SymbolKind, imports::import_assets::LocatedImport}; +use syntax::{SmolStr, ToSmolStr, ast}; use crate::{ + CompletionContext, CompletionItem, CompletionItemKind, context::{ DotAccess, ItemListKind, NameContext, NameKind, NameRefContext, NameRefKind, PathCompletionCtx, PathKind, PatternContext, TypeLocation, Visible, }, item::Builder, render::{ + RenderContext, const_::render_const, function::{render_fn, render_method}, literal::{render_struct_literal, render_variant_lit}, @@ -44,9 +46,7 @@ use crate::{ render_tuple_field, type_alias::{render_type_alias, render_type_alias_with_eq}, union_literal::render_union_literal, - RenderContext, }, - CompletionContext, CompletionItem, CompletionItemKind, }; /// Represents an in-progress set of completions being built. @@ -631,8 +631,7 @@ fn enum_variants_with_paths( let mut process_variant = |variant: Variant| { let self_path = hir::ModPath::from_segments( hir::PathKind::Plain, - iter::once(Name::new_symbol_root(sym::Self_.clone())) - .chain(iter::once(variant.name(ctx.db))), + iter::once(Name::new_symbol_root(sym::Self_)).chain(iter::once(variant.name(ctx.db))), ); cb(acc, ctx, variant, self_path); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs index cf5427bae38de..3c195f80fea47 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs @@ -5,22 +5,22 @@ use std::sync::LazyLock; use ide_db::{ + FxHashMap, SymbolKind, generated::lints::{ - Lint, CLIPPY_LINTS, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, FEATURES, RUSTDOC_LINTS, + CLIPPY_LINT_GROUPS, CLIPPY_LINTS, DEFAULT_LINTS, FEATURES, Lint, RUSTDOC_LINTS, }, syntax_helpers::node_ext::parse_tt_as_comma_sep_paths, - FxHashMap, SymbolKind, }; use itertools::Itertools; use syntax::{ - ast::{self, AttrKind}, AstNode, Edition, SyntaxKind, T, + ast::{self, AttrKind}, }; use crate::{ + Completions, context::{AttrCtx, CompletionContext, PathCompletionCtx, Qualified}, item::CompletionItem, - Completions, }; mod cfg; @@ -380,7 +380,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option> { .children_with_tokens() .skip(1) .take_while(|it| it.as_token() != Some(&r_paren)); - let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]); + let input_expressions = tokens.chunk_by(|tok| tok.kind() == T![,]); Some( input_expressions .into_iter() diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs index cda0da13b26eb..1676a8467c85f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs @@ -2,9 +2,9 @@ use ide_db::SymbolKind; use itertools::Itertools; -use syntax::{algo, ast::Ident, AstToken, Direction, NodeOrToken, SyntaxKind}; +use syntax::{AstToken, Direction, NodeOrToken, SyntaxKind, algo, ast::Ident}; -use crate::{completions::Completions, context::CompletionContext, CompletionItem}; +use crate::{CompletionItem, completions::Completions, context::CompletionContext}; pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) { let add_completion = |item: &str| { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs index 1f8927401b2f8..2fc07e0138280 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs @@ -1,13 +1,13 @@ //! Completion for derives use hir::ScopeDef; -use ide_db::{documentation::HasDocs, SymbolKind}; +use ide_db::{SymbolKind, documentation::HasDocs}; use itertools::Itertools; use syntax::{SmolStr, ToSmolStr}; use crate::{ + Completions, context::{CompletionContext, ExistingDerives, PathCompletionCtx, Qualified}, item::CompletionItem, - Completions, }; pub(crate) fn complete_derive_path( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs index 04f40e805ad68..c87c46d98127b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs @@ -1,8 +1,8 @@ //! Completion for lints -use ide_db::{documentation::Documentation, generated::lints::Lint, SymbolKind}; +use ide_db::{SymbolKind, documentation::Documentation, generated::lints::Lint}; use syntax::ast; -use crate::{context::CompletionContext, item::CompletionItem, Completions}; +use crate::{Completions, context::CompletionContext, item::CompletionItem}; pub(super) fn complete_lint( acc: &mut Completions, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs index deb12282c025b..0641a4f6c3fe5 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs @@ -3,7 +3,7 @@ use hir::ModuleDef; use ide_db::SymbolKind; use syntax::ast; -use crate::{context::CompletionContext, item::CompletionItem, Completions}; +use crate::{Completions, context::CompletionContext, item::CompletionItem}; pub(super) fn complete_macro_use( acc: &mut Completions, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs index 12652b448925b..cb7ccf7373123 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs @@ -3,7 +3,7 @@ use ide_db::SymbolKind; use syntax::ast; -use crate::{context::CompletionContext, item::CompletionItem, Completions}; +use crate::{Completions, context::CompletionContext, item::CompletionItem}; pub(super) fn complete_repr( acc: &mut Completions, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index b38b9ac1f5391..4f21136d214ee 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -2,16 +2,16 @@ use std::ops::ControlFlow; -use hir::{HasContainer, ItemContainer, MethodCandidateCallback, Name}; +use hir::{Complete, HasContainer, ItemContainer, MethodCandidateCallback, Name}; use ide_db::FxHashSet; use syntax::SmolStr; use crate::{ + CompletionItem, CompletionItemKind, Completions, context::{ CompletionContext, DotAccess, DotAccessExprCtx, DotAccessKind, PathCompletionCtx, PathExprCtx, Qualified, }, - CompletionItem, CompletionItemKind, Completions, }; /// Complete dot accesses, i.e. fields or methods. @@ -259,7 +259,9 @@ fn complete_methods( // This needs to come before the `seen_methods` test, so that if we see the same method twice, // once as inherent and once not, we will include it. if let ItemContainer::Trait(trait_) = func.container(self.ctx.db) { - if self.ctx.exclude_traits.contains(&trait_) { + if self.ctx.exclude_traits.contains(&trait_) + || trait_.complete(self.ctx.db) == Complete::IgnoreMethods + { return ControlFlow::Continue(()); } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs index 40af5203e9c32..cd18b3dcfdc2b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs @@ -1,36 +1,47 @@ //! Completes environment variables defined by Cargo //! () -use hir::MacroFileIdExt; use ide_db::syntax_helpers::node_ext::macro_call_for_string_token; use syntax::{ - ast::{self, IsString}, AstToken, + ast::{self, IsString}, }; use crate::{ - completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind, + CompletionItem, CompletionItemKind, completions::Completions, context::CompletionContext, }; const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ - ("CARGO","Path to the cargo binary performing the build"), - ("CARGO_MANIFEST_DIR","The directory containing the manifest of your package"), - ("CARGO_PKG_VERSION","The full version of your package"), - ("CARGO_PKG_VERSION_MAJOR","The major version of your package"), - ("CARGO_PKG_VERSION_MINOR","The minor version of your package"), - ("CARGO_PKG_VERSION_PATCH","The patch version of your package"), - ("CARGO_PKG_VERSION_PRE","The pre-release version of your package"), - ("CARGO_PKG_AUTHORS","Colon separated list of authors from the manifest of your package"), - ("CARGO_PKG_NAME","The name of your package"), - ("CARGO_PKG_DESCRIPTION","The description from the manifest of your package"), - ("CARGO_PKG_HOMEPAGE","The home page from the manifest of your package"), - ("CARGO_PKG_REPOSITORY","The repository from the manifest of your package"), - ("CARGO_PKG_LICENSE","The license from the manifest of your package"), - ("CARGO_PKG_LICENSE_FILE","The license file from the manifest of your package"), - ("CARGO_PKG_RUST_VERSION","The Rust version from the manifest of your package. Note that this is the minimum Rust version supported by the package, not the current Rust version"), - ("CARGO_CRATE_NAME","The name of the crate that is currently being compiled"), - ("CARGO_BIN_NAME","The name of the binary that is currently being compiled (if it is a binary). This name does not include any file extension, such as .exe"), - ("CARGO_PRIMARY_PACKAGE","This environment variable will be set if the package being built is primary. Primary packages are the ones the user selected on the command-line, either with -p flags or the defaults based on the current directory and the default workspace members. This environment variable will not be set when building dependencies. This is only set when compiling the package (not when running binaries or tests)"), - ("CARGO_TARGET_TMPDIR","Only set when building integration test or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code") + ("CARGO", "Path to the cargo binary performing the build"), + ("CARGO_MANIFEST_DIR", "The directory containing the manifest of your package"), + ("CARGO_PKG_VERSION", "The full version of your package"), + ("CARGO_PKG_VERSION_MAJOR", "The major version of your package"), + ("CARGO_PKG_VERSION_MINOR", "The minor version of your package"), + ("CARGO_PKG_VERSION_PATCH", "The patch version of your package"), + ("CARGO_PKG_VERSION_PRE", "The pre-release version of your package"), + ("CARGO_PKG_AUTHORS", "Colon separated list of authors from the manifest of your package"), + ("CARGO_PKG_NAME", "The name of your package"), + ("CARGO_PKG_DESCRIPTION", "The description from the manifest of your package"), + ("CARGO_PKG_HOMEPAGE", "The home page from the manifest of your package"), + ("CARGO_PKG_REPOSITORY", "The repository from the manifest of your package"), + ("CARGO_PKG_LICENSE", "The license from the manifest of your package"), + ("CARGO_PKG_LICENSE_FILE", "The license file from the manifest of your package"), + ( + "CARGO_PKG_RUST_VERSION", + "The Rust version from the manifest of your package. Note that this is the minimum Rust version supported by the package, not the current Rust version", + ), + ("CARGO_CRATE_NAME", "The name of the crate that is currently being compiled"), + ( + "CARGO_BIN_NAME", + "The name of the binary that is currently being compiled (if it is a binary). This name does not include any file extension, such as .exe", + ), + ( + "CARGO_PRIMARY_PACKAGE", + "This environment variable will be set if the package being built is primary. Primary packages are the ones the user selected on the command-line, either with -p flags or the defaults based on the current directory and the default workspace members. This environment variable will not be set when building dependencies. This is only set when compiling the package (not when running binaries or tests)", + ), + ( + "CARGO_TARGET_TMPDIR", + "Only set when building integration test or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code", + ), ]; pub(crate) fn complete_cargo_env_vars( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs index b28b6e50e2284..7fbd1fbc1af4b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs @@ -2,14 +2,14 @@ use std::ops::ControlFlow; -use hir::{sym, Name, PathCandidateCallback, ScopeDef}; +use hir::{Complete, Name, PathCandidateCallback, ScopeDef, sym}; use ide_db::FxHashSet; use syntax::ast; use crate::{ + CompletionContext, Completions, completions::record::add_default_update, context::{BreakableKind, PathCompletionCtx, PathExprCtx, Qualified}, - CompletionContext, Completions, }; struct PathCallback<'a, F> { @@ -33,10 +33,10 @@ where fn on_trait_item(&mut self, item: hir::AssocItem) -> ControlFlow<()> { // The excluded check needs to come before the `seen` test, so that if we see the same method twice, // once as inherent and once not, we will include it. - if item - .container_trait(self.ctx.db) - .is_none_or(|trait_| !self.ctx.exclude_traits.contains(&trait_)) - && self.seen.insert(item) + if item.container_trait(self.ctx.db).is_none_or(|trait_| { + !self.ctx.exclude_traits.contains(&trait_) + && trait_.complete(self.ctx.db) != Complete::IgnoreMethods + }) && self.seen.insert(item) { (self.add_assoc_item)(self.acc, item); } @@ -79,11 +79,7 @@ pub(crate) fn complete_expr_path( let wants_const_token = ref_expr_parent.is_some() && has_raw_token && !has_const_token && !has_mut_token; let wants_mut_token = if ref_expr_parent.is_some() { - if has_raw_token { - !has_const_token && !has_mut_token - } else { - !has_mut_token - } + if has_raw_token { !has_const_token && !has_mut_token } else { !has_mut_token } } else { false }; @@ -108,7 +104,9 @@ pub(crate) fn complete_expr_path( .iter() .copied() .map(hir::Trait::from) - .filter(|it| !ctx.exclude_traits.contains(it)) + .filter(|it| { + !ctx.exclude_traits.contains(it) && it.complete(ctx.db) != Complete::IgnoreMethods + }) .flat_map(|it| it.items(ctx.sema.db)) .for_each(|item| add_assoc_item(acc, item)), Qualified::TypeAnchor { trait_: Some(trait_), .. } => { @@ -262,7 +260,7 @@ pub(crate) fn complete_expr_path( path_ctx, strukt, None, - Some(Name::new_symbol_root(sym::Self_.clone())), + Some(Name::new_symbol_root(sym::Self_)), ); } } @@ -282,7 +280,7 @@ pub(crate) fn complete_expr_path( ctx, un, None, - Some(Name::new_symbol_root(sym::Self_.clone())), + Some(Name::new_symbol_root(sym::Self_)), ); } } @@ -349,6 +347,7 @@ pub(crate) fn complete_expr_path( if !in_block_expr { add_keyword("unsafe", "unsafe {\n $0\n}"); + add_keyword("const", "const {\n $0\n}"); } add_keyword("match", "match $1 {\n $0\n}"); add_keyword("while", "while $1 {\n $0\n}"); @@ -364,9 +363,14 @@ pub(crate) fn complete_expr_path( add_keyword("true", "true"); add_keyword("false", "false"); - if in_condition || in_block_expr { - add_keyword("letm", "let mut $0"); - add_keyword("let", "let $0"); + if in_condition { + add_keyword("letm", "let mut $1 = $0"); + add_keyword("let", "let $1 = $0"); + } + + if in_block_expr { + add_keyword("letm", "let mut $1 = $0;"); + add_keyword("let", "let $1 = $0;"); } if after_if_expr { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs index 7c2cc2a6c1d8f..570d1a0a2db8a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs @@ -1,11 +1,11 @@ //! Completes function abi strings. use syntax::{ - ast::{self, IsString}, AstNode, AstToken, SmolStr, + ast::{self, IsString}, }; use crate::{ - completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind, + CompletionItem, CompletionItemKind, completions::Completions, context::CompletionContext, }; // Most of these are feature gated, we should filter/add feature gate completions once we have them. diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs index 7cb710c2d963c..71a3e4eb4ed6d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs @@ -1,10 +1,10 @@ //! Completion for extern crates use hir::Name; -use ide_db::{documentation::HasDocs, SymbolKind}; +use ide_db::{SymbolKind, documentation::HasDocs}; use syntax::ToSmolStr; -use crate::{context::CompletionContext, CompletionItem, CompletionItemKind}; +use crate::{CompletionItem, CompletionItemKind, context::CompletionContext}; use super::Completions; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs index b795bbd872a0a..1441b0e3a01ae 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/field.rs @@ -1,8 +1,8 @@ //! Completion of field list position. use crate::{ - context::{PathCompletionCtx, Qualified}, CompletionContext, Completions, + context::{PathCompletionCtx, Qualified}, }; pub(crate) fn complete_field_list_tuple_variant( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs index b5555e6610240..a747561380906 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs @@ -5,16 +5,16 @@ use ide_db::imports::{ insert_use::ImportScope, }; use itertools::Itertools; -use syntax::{ast, AstNode, SyntaxNode}; +use syntax::{AstNode, SyntaxNode, ast}; use crate::{ + Completions, config::AutoImportExclusionType, context::{ CompletionContext, DotAccess, PathCompletionCtx, PathKind, PatternContext, Qualified, TypeLocation, }, - render::{render_resolution_with_import, render_resolution_with_import_pat, RenderContext}, - Completions, + render::{RenderContext, render_resolution_with_import, render_resolution_with_import_pat}, }; // Feature: Completion With Autoimport @@ -268,19 +268,7 @@ fn import_on_the_fly( && !ctx.is_item_hidden(original_item) && ctx.check_stability(original_item.attrs(ctx.db).as_deref()) }) - .filter(|import| { - let def = import.item_to_import.into_module_def(); - if let Some(&kind) = ctx.exclude_flyimport.get(&def) { - if kind == AutoImportExclusionType::Always { - return false; - } - let method_imported = import.item_to_import != import.original_item; - if method_imported { - return false; - } - } - true - }) + .filter(|import| filter_excluded_flyimport(ctx, import)) .sorted_by(|a, b| { let key = |import_path| { ( @@ -366,24 +354,7 @@ fn import_on_the_fly_method( !ctx.is_item_hidden(&import.item_to_import) && !ctx.is_item_hidden(&import.original_item) }) - .filter(|import| { - let def = import.item_to_import.into_module_def(); - if let Some(&kind) = ctx.exclude_flyimport.get(&def) { - if kind == AutoImportExclusionType::Always { - return false; - } - let method_imported = import.item_to_import != import.original_item; - if method_imported { - return false; - } - } - - if let ModuleDef::Trait(_) = import.item_to_import.into_module_def() { - !ctx.exclude_flyimport.contains_key(&def) - } else { - true - } - }) + .filter(|import| filter_excluded_flyimport(ctx, import)) .sorted_by(|a, b| { let key = |import_path| { ( @@ -401,14 +372,32 @@ fn import_on_the_fly_method( Some(()) } +fn filter_excluded_flyimport(ctx: &CompletionContext<'_>, import: &LocatedImport) -> bool { + let def = import.item_to_import.into_module_def(); + let is_exclude_flyimport = ctx.exclude_flyimport.get(&def).copied(); + + if matches!(is_exclude_flyimport, Some(AutoImportExclusionType::Always)) + || !import.complete_in_flyimport.0 + { + return false; + } + let method_imported = import.item_to_import != import.original_item; + if method_imported + && (is_exclude_flyimport.is_some() + || ctx.exclude_flyimport.contains_key(&import.original_item.into_module_def())) + { + // If this is a method, exclude it either if it was excluded itself (which may not be caught above, + // because `item_to_import` is the trait), or if its trait was excluded. We don't need to check + // the attributes here, since they pass from trait to methods on import map construction. + return false; + } + true +} + fn import_name(ctx: &CompletionContext<'_>) -> String { let token_kind = ctx.token.kind(); - if token_kind.is_any_identifier() { - ctx.token.to_string() - } else { - String::new() - } + if token_kind.is_any_identifier() { ctx.token.to_string() } else { String::new() } } fn import_assets_for_path( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs index e86eaad4d0f24..6d1e973dc4c5c 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs @@ -3,14 +3,14 @@ use hir::HirDisplay; use ide_db::FxHashMap; use syntax::{ - algo, + AstNode, Direction, SyntaxKind, TextRange, TextSize, algo, ast::{self, HasModuleItem}, - match_ast, AstNode, Direction, SyntaxKind, TextRange, TextSize, + match_ast, }; use crate::{ - context::{ParamContext, ParamKind, PatternContext}, CompletionContext, CompletionItem, CompletionItemKind, Completions, + context::{ParamContext, ParamKind, PatternContext}, }; // FIXME: Make this a submodule of [`pattern`] diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs index dcd40c3412c70..5ae65b05bc42e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs @@ -1,11 +1,11 @@ //! Completes identifiers in format string literals. use hir::{ModuleDef, ScopeDef}; -use ide_db::{syntax_helpers::format_string::is_format_string, SymbolKind}; +use ide_db::{SymbolKind, syntax_helpers::format_string::is_format_string}; use itertools::Itertools; -use syntax::{ast, AstToken, TextRange, TextSize, ToSmolStr}; +use syntax::{AstToken, TextRange, TextSize, ToSmolStr, ast}; -use crate::{context::CompletionContext, CompletionItem, CompletionItemKind, Completions}; +use crate::{CompletionItem, CompletionItemKind, Completions, context::CompletionContext}; /// Complete identifiers in format strings. pub(crate) fn format_string( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs index 3ab341e4eded4..893997cee473e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list.rs @@ -1,8 +1,8 @@ //! Completion of paths and keywords at item list position. use crate::{ - context::{ItemListKind, PathCompletionCtx, PathExprCtx, Qualified}, CompletionContext, Completions, + context::{ItemListKind, PathCompletionCtx, PathExprCtx, Qualified}, }; pub(crate) mod trait_impl; @@ -114,6 +114,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option add_keyword("trait", "trait $1 {\n $0\n}"); if no_vis_qualifiers { add_keyword("impl", "impl $1 {\n $0\n}"); + add_keyword("impl for", "impl $1 for $2 {\n $0\n}"); } } @@ -141,9 +142,10 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option add_keyword("struct", "struct $0"); add_keyword("trait", "trait $1 {\n $0\n}"); add_keyword("union", "union $1 {\n $0\n}"); - add_keyword("use", "use $0"); + add_keyword("use", "use $0;"); if no_vis_qualifiers { add_keyword("impl", "impl $1 {\n $0\n}"); + add_keyword("impl for", "impl $1 for $2 {\n $0\n}"); } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index 831f5665f4aa0..58aead73fd6fc 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -31,20 +31,21 @@ //! } //! ``` -use hir::{db::ExpandDatabase, MacroFileId, Name}; +use hir::{MacroCallId, Name, db::ExpandDatabase}; use ide_db::text_edit::TextEdit; use ide_db::{ - documentation::HasDocs, path_transform::PathTransform, - syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items, SymbolKind, + SymbolKind, documentation::HasDocs, path_transform::PathTransform, + syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items, }; use syntax::{ - ast::{self, edit_in_place::AttrsOwnerEdit, make, HasGenericArgs, HasTypeBounds}, - format_smolstr, ted, AstNode, SmolStr, SyntaxElement, SyntaxKind, TextRange, ToSmolStr, T, + AstNode, SmolStr, SyntaxElement, SyntaxKind, T, TextRange, ToSmolStr, + ast::{self, HasGenericArgs, HasTypeBounds, edit_in_place::AttrsOwnerEdit, make}, + format_smolstr, ted, }; use crate::{ - context::PathCompletionCtx, CompletionContext, CompletionItem, CompletionItemKind, - CompletionRelevance, Completions, + CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance, Completions, + context::PathCompletionCtx, }; #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -464,7 +465,7 @@ fn add_const_impl( fn make_const_compl_syntax( ctx: &CompletionContext<'_>, const_: &ast::Const, - macro_file: Option, + macro_file: Option, ) -> SmolStr { let const_ = if let Some(macro_file) = macro_file { let span_map = ctx.db.expansion_span_map(macro_file); @@ -492,7 +493,7 @@ fn make_const_compl_syntax( fn function_declaration( ctx: &CompletionContext<'_>, node: &ast::Fn, - macro_file: Option, + macro_file: Option, ) -> String { let node = if let Some(macro_file) = macro_file { let span_map = ctx.db.expansion_span_map(macro_file); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs index 26c29e0202c0d..64bb1fce6ba02 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs @@ -56,6 +56,7 @@ mod tests { kw extern kw fn kw impl + kw impl for kw trait "#]], ); @@ -76,6 +77,7 @@ fn foo(a: A) { a.$0 } kw await expr.await sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -103,6 +105,7 @@ fn foo() { kw await expr.await sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -132,6 +135,7 @@ fn foo(a: A) { a.$0 } kw await expr.await sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -332,7 +336,7 @@ fn main() { } #[test] - fn completes_let_with_space() { + fn completes_let_in_block() { check_edit( "let", r#" @@ -342,7 +346,7 @@ fn main() { "#, r#" fn main() { - let $0 + let $1 = $0; } "#, ); @@ -355,7 +359,97 @@ fn main() { "#, r#" fn main() { - let mut $0 + let mut $1 = $0; +} +"#, + ); + } + + #[test] + fn completes_let_in_condition() { + check_edit( + "let", + r#" +fn main() { + if $0 {} +} +"#, + r#" +fn main() { + if let $1 = $0 {} +} +"#, + ); + check_edit( + "letm", + r#" +fn main() { + if $0 {} +} +"#, + r#" +fn main() { + if let mut $1 = $0 {} +} +"#, + ); + } + + #[test] + fn completes_let_in_no_empty_condition() { + check_edit( + "let", + r#" +fn main() { + if $0x {} +} +"#, + r#" +fn main() { + if let $1 = $0x {} +} +"#, + ); + check_edit( + "letm", + r#" +fn main() { + if $0x {} +} +"#, + r#" +fn main() { + if let mut $1 = $0x {} +} +"#, + ); + } + + #[test] + fn completes_let_in_condition_block() { + check_edit( + "let", + r#" +fn main() { + if { $0 } {} +} +"#, + r#" +fn main() { + if { let $1 = $0; } {} +} +"#, + ); + check_edit( + "letm", + r#" +fn main() { + if { $0 } {} +} +"#, + r#" +fn main() { + if { let mut $1 = $0; } {} } "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs index 53a62fe49c5aa..8902cd09cec0c 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs @@ -7,7 +7,7 @@ //! there is no value in lifting these out into the outline module test since they will either not //! show up for normal completions, or they won't show completions other than lifetimes depending //! on the fixture input. -use hir::{sym, Name, ScopeDef}; +use hir::{Name, ScopeDef, sym}; use crate::{ completions::Completions, @@ -31,13 +31,13 @@ pub(crate) fn complete_lifetime( acc.add_lifetime(ctx, name); } }); - acc.add_lifetime(ctx, Name::new_symbol_root(sym::tick_static.clone())); + acc.add_lifetime(ctx, Name::new_symbol_root(sym::tick_static)); if !in_lifetime_param_bound && def.is_some_and(|def| { !matches!(def, hir::GenericDef::Function(_) | hir::GenericDef::Impl(_)) }) { - acc.add_lifetime(ctx, Name::new_symbol_root(sym::tick_underscore.clone())); + acc.add_lifetime(ctx, Name::new_symbol_root(sym::tick_underscore)); } } @@ -116,13 +116,13 @@ fn foo<'lifetime>(foo: &'a$0) {} check( r#" struct Foo; -impl<'impl> Foo { +impl<'r#impl> Foo { fn foo<'func>(&'a$0 self) {} } "#, expect![[r#" lt 'func - lt 'impl + lt 'r#impl lt 'static "#]], ); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs index cca6a22f290d2..013747e4d0cc7 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs @@ -2,14 +2,14 @@ use std::iter; -use hir::{HirFileIdExt, Module}; +use hir::Module; use ide_db::{ - base_db::{SourceRootDatabase, VfsPath}, FxHashSet, RootDatabase, SymbolKind, + base_db::{SourceDatabase, VfsPath}, }; -use syntax::{ast, AstNode, SyntaxKind}; +use syntax::{AstNode, SyntaxKind, ast}; -use crate::{context::CompletionContext, CompletionItem, Completions}; +use crate::{CompletionItem, Completions, context::CompletionContext}; /// Complete mod declaration, i.e. `mod $0;` pub(crate) fn complete_mod( @@ -43,11 +43,14 @@ pub(crate) fn complete_mod( let module_definition_file = current_module.definition_source_file_id(ctx.db).original_file(ctx.db); - let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file.file_id())); + let source_root_id = + ctx.db.file_source_root(module_definition_file.file_id(ctx.db)).source_root_id(ctx.db); + let source_root = ctx.db.source_root(source_root_id).source_root(ctx.db); + let directory_to_look_for_submodules = directory_to_look_for_submodules( current_module, ctx.db, - source_root.path_for_file(&module_definition_file.file_id())?, + source_root.path_for_file(&module_definition_file.file_id(ctx.db))?, )?; let existing_mod_declarations = current_module @@ -63,9 +66,11 @@ pub(crate) fn complete_mod( source_root .iter() - .filter(|&submodule_candidate_file| submodule_candidate_file != module_definition_file) .filter(|&submodule_candidate_file| { - module_declaration_file.is_none_or(|it| it != submodule_candidate_file) + submodule_candidate_file != module_definition_file.file_id(ctx.db) + }) + .filter(|&submodule_candidate_file| { + module_declaration_file.is_none_or(|it| it.file_id(ctx.db) != submodule_candidate_file) }) .filter_map(|submodule_file| { let submodule_path = source_root.path_for_file(&submodule_file)?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs index 8f38e02ed7685..ea3511d31caf2 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs @@ -1,12 +1,12 @@ //! Completes constants and paths in unqualified patterns. -use hir::{db::DefDatabase, AssocItem, ScopeDef}; +use hir::{AssocItem, ScopeDef}; use ide_db::syntax_helpers::suggest_name; use syntax::ast::Pat; use crate::{ - context::{PathCompletionCtx, PatternContext, PatternRefutability, Qualified}, CompletionContext, Completions, + context::{PathCompletionCtx, PatternContext, PatternRefutability, Qualified}, }; /// Completes constants and paths in unqualified patterns. @@ -48,7 +48,7 @@ pub(crate) fn complete_pattern( // Suggest name only in let-stmt and fn param if pattern_ctx.should_suggest_name { - let mut name_generator = suggest_name::NameGenerator::new(); + let mut name_generator = suggest_name::NameGenerator::default(); if let Some(suggested) = ctx .expected_type .as_ref() @@ -60,7 +60,7 @@ pub(crate) fn complete_pattern( } let refutable = pattern_ctx.refutability == PatternRefutability::Refutable; - let single_variant_enum = |enum_: hir::Enum| ctx.db.enum_data(enum_.into()).variants.len() == 1; + let single_variant_enum = |enum_: hir::Enum| enum_.num_variants(ctx.db) == 1; if let Some(hir::Adt::Enum(e)) = ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt()) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index 28e2853096e0e..54be7d2fbc33f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -5,24 +5,24 @@ mod format_like; use base_db::SourceDatabase; use hir::{ItemInNs, Semantics}; use ide_db::{ + RootDatabase, SnippetCap, documentation::{Documentation, HasDocs}, imports::insert_use::ImportScope, text_edit::TextEdit, ty_filter::TryEnum, - RootDatabase, SnippetCap, }; use stdx::never; use syntax::{ - ast::{self, AstNode, AstToken}, SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR}, TextRange, TextSize, + ast::{self, AstNode, AstToken}, }; use crate::{ + CompletionItem, CompletionItemKind, CompletionRelevance, Completions, SnippetScope, completions::postfix::format_like::add_format_like_completions, context::{BreakableKind, CompletionContext, DotAccess, DotAccessKind}, item::{Builder, CompletionRelevancePostfixMatch}, - CompletionItem, CompletionItemKind, CompletionRelevance, Completions, SnippetScope, }; pub(crate) fn complete_postfix( @@ -155,22 +155,29 @@ pub(crate) fn complete_postfix( postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc, ctx.db); postfix_snippet("deref", "*expr", &format!("*{receiver_text}")).add_to(acc, ctx.db); - let mut unsafe_should_be_wrapped = true; + let mut block_should_be_wrapped = true; if dot_receiver.syntax().kind() == BLOCK_EXPR { - unsafe_should_be_wrapped = false; + block_should_be_wrapped = false; if let Some(parent) = dot_receiver.syntax().parent() { if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) { - unsafe_should_be_wrapped = true; + block_should_be_wrapped = true; } } }; - let unsafe_completion_string = if unsafe_should_be_wrapped { + let unsafe_completion_string = if block_should_be_wrapped { format!("unsafe {{ {receiver_text} }}") } else { format!("unsafe {receiver_text}") }; postfix_snippet("unsafe", "unsafe {}", &unsafe_completion_string).add_to(acc, ctx.db); + let const_completion_string = if block_should_be_wrapped { + format!("const {{ {receiver_text} }}") + } else { + format!("const {receiver_text}") + }; + postfix_snippet("const", "const {}", &const_completion_string).add_to(acc, ctx.db); + // The rest of the postfix completions create an expression that moves an argument, // so it's better to consider references now to avoid breaking the compilation @@ -276,8 +283,8 @@ fn get_receiver_text( if receiver_is_ambiguous_float_literal { range.range = TextRange::at(range.range.start(), range.range.len() - TextSize::of('.')) } - let file_text = sema.db.file_text(range.file_id.file_id()); - let mut text = file_text[range.range].to_owned(); + let file_text = sema.db.file_text(range.file_id.file_id(sema.db)); + let mut text = file_text.text(sema.db)[range.range].to_owned(); // The receiver texts should be interpreted as-is, as they are expected to be // normal Rust expressions. @@ -414,8 +421,8 @@ mod tests { use expect_test::expect; use crate::{ - tests::{check, check_edit, check_edit_with_config, TEST_CONFIG}, CompletionConfig, Snippet, + tests::{TEST_CONFIG, check, check_edit, check_edit_with_config}, }; #[test] @@ -430,6 +437,7 @@ fn main() { expect![[r#" sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -463,6 +471,7 @@ fn main() { expect![[r#" sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -490,6 +499,7 @@ fn main() { expect![[r#" sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -516,6 +526,7 @@ fn main() { expect![[r#" sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -653,59 +664,74 @@ fn main() { #[test] fn postfix_completion_for_unsafe() { - check_edit("unsafe", r#"fn main() { foo.$0 }"#, r#"fn main() { unsafe { foo } }"#); - check_edit("unsafe", r#"fn main() { { foo }.$0 }"#, r#"fn main() { unsafe { foo } }"#); + postfix_completion_for_block("unsafe"); + } + + #[test] + fn postfix_completion_for_const() { + postfix_completion_for_block("const"); + } + + fn postfix_completion_for_block(kind: &str) { + check_edit(kind, r#"fn main() { foo.$0 }"#, &format!("fn main() {{ {kind} {{ foo }} }}")); check_edit( - "unsafe", + kind, + r#"fn main() { { foo }.$0 }"#, + &format!("fn main() {{ {kind} {{ foo }} }}"), + ); + check_edit( + kind, r#"fn main() { if x { foo }.$0 }"#, - r#"fn main() { unsafe { if x { foo } } }"#, + &format!("fn main() {{ {kind} {{ if x {{ foo }} }} }}"), ); check_edit( - "unsafe", + kind, r#"fn main() { loop { foo }.$0 }"#, - r#"fn main() { unsafe { loop { foo } } }"#, + &format!("fn main() {{ {kind} {{ loop {{ foo }} }} }}"), ); check_edit( - "unsafe", + kind, r#"fn main() { if true {}.$0 }"#, - r#"fn main() { unsafe { if true {} } }"#, + &format!("fn main() {{ {kind} {{ if true {{}} }} }}"), ); check_edit( - "unsafe", + kind, r#"fn main() { while true {}.$0 }"#, - r#"fn main() { unsafe { while true {} } }"#, + &format!("fn main() {{ {kind} {{ while true {{}} }} }}"), ); check_edit( - "unsafe", + kind, r#"fn main() { for i in 0..10 {}.$0 }"#, - r#"fn main() { unsafe { for i in 0..10 {} } }"#, + &format!("fn main() {{ {kind} {{ for i in 0..10 {{}} }} }}"), ); check_edit( - "unsafe", + kind, r#"fn main() { let x = if true {1} else {2}.$0 }"#, - r#"fn main() { let x = unsafe { if true {1} else {2} } }"#, + &format!("fn main() {{ let x = {kind} {{ if true {{1}} else {{2}} }} }}"), ); // completion will not be triggered check_edit( - "unsafe", + kind, r#"fn main() { let x = true else {panic!()}.$0}"#, - r#"fn main() { let x = true else {panic!()}.unsafe $0}"#, + &format!("fn main() {{ let x = true else {{panic!()}}.{kind} $0}}"), ); } #[test] fn custom_postfix_completion() { let config = CompletionConfig { - snippets: vec![Snippet::new( - &[], - &["break".into()], - &["ControlFlow::Break(${receiver})".into()], - "", - &["core::ops::ControlFlow".into()], - crate::SnippetScope::Expr, - ) - .unwrap()], + snippets: vec![ + Snippet::new( + &[], + &["break".into()], + &["ControlFlow::Break(${receiver})".into()], + "", + &["core::ops::ControlFlow".into()], + crate::SnippetScope::Expr, + ) + .unwrap(), + ], ..TEST_CONFIG }; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs index c612170eb54bc..7faa1139595f8 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix/format_like.rs @@ -17,15 +17,15 @@ // ![Format String Completion](https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif) use ide_db::{ - syntax_helpers::format_string_exprs::{parse_format_exprs, with_placeholders, Arg}, SnippetCap, + syntax_helpers::format_string_exprs::{Arg, parse_format_exprs, with_placeholders}, }; -use syntax::{ast, AstToken}; +use syntax::{AstToken, ast}; use crate::{ + Completions, completions::postfix::{build_postfix_snippet_builder, escape_snippet_bits}, context::CompletionContext, - Completions, }; /// Mapping ("postfix completion item" => "macro to use") diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs index d0c4c24d060f8..c18aab007b2cf 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs @@ -1,14 +1,14 @@ //! Complete fields in record literals and patterns. use ide_db::SymbolKind; use syntax::{ - ast::{self, Expr}, SmolStr, + ast::{self, Expr}, }; use crate::{ - context::{DotAccess, DotAccessExprCtx, DotAccessKind, PatternContext}, CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch, Completions, + context::{DotAccess, DotAccessExprCtx, DotAccessKind, PatternContext}, }; pub(crate) fn complete_record_pattern_fields( @@ -144,8 +144,8 @@ mod tests { use ide_db::SnippetCap; use crate::{ - tests::{check_edit, check_edit_with_config, TEST_CONFIG}, CompletionConfig, + tests::{TEST_CONFIG, check_edit, check_edit_with_config}, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs index 357709e0c1fde..31aae11676228 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs @@ -1,11 +1,11 @@ //! This file provides snippet completions, like `pd` => `eprintln!(...)`. -use ide_db::{documentation::Documentation, imports::insert_use::ImportScope, SnippetCap}; +use ide_db::{SnippetCap, documentation::Documentation, imports::insert_use::ImportScope}; use crate::{ + CompletionContext, CompletionItem, CompletionItemKind, Completions, SnippetScope, context::{ItemListKind, PathCompletionCtx, PathExprCtx, Qualified}, item::Builder, - CompletionContext, CompletionItem, CompletionItemKind, Completions, SnippetScope, }; pub(crate) fn complete_expr_snippet( @@ -153,23 +153,25 @@ fn add_custom_completions( #[cfg(test)] mod tests { use crate::{ - tests::{check_edit_with_config, TEST_CONFIG}, CompletionConfig, Snippet, + tests::{TEST_CONFIG, check_edit_with_config}, }; #[test] fn custom_snippet_completion() { check_edit_with_config( CompletionConfig { - snippets: vec![Snippet::new( - &["break".into()], - &[], - &["ControlFlow::Break(())".into()], - "", - &["core::ops::ControlFlow".into()], - crate::SnippetScope::Expr, - ) - .unwrap()], + snippets: vec![ + Snippet::new( + &["break".into()], + &[], + &["ControlFlow::Break(())".into()], + "", + &["core::ops::ControlFlow".into()], + crate::SnippetScope::Expr, + ) + .unwrap(), + ], ..TEST_CONFIG }, "break", diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs index b07148809323f..79db705af495d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs @@ -1,12 +1,12 @@ //! Completion of names from the current scope in type position. use hir::{HirDisplay, ScopeDef}; -use syntax::{ast, AstNode}; +use syntax::{AstNode, ast}; use crate::{ + CompletionContext, Completions, context::{PathCompletionCtx, Qualified, TypeAscriptionTarget, TypeLocation}, render::render_type_inference, - CompletionContext, Completions, }; pub(crate) fn complete_type_path( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs index b384987c51ce1..4d6d0b758a381 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs @@ -2,12 +2,12 @@ use hir::ScopeDef; use ide_db::{FxHashSet, SymbolKind}; -use syntax::{ast, format_smolstr, AstNode}; +use syntax::{AstNode, ast, format_smolstr}; use crate::{ + CompletionItem, CompletionItemKind, CompletionRelevance, Completions, context::{CompletionContext, PathCompletionCtx, Qualified}, item::Builder, - CompletionItem, CompletionItemKind, CompletionRelevance, Completions, }; pub(crate) fn complete_use_path( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs index 0ea5157fb46fe..d15c35ac84991 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/vis.rs @@ -1,8 +1,8 @@ //! Completion for visibility specifiers. use crate::{ - context::{CompletionContext, PathCompletionCtx, Qualified}, Completions, + context::{CompletionContext, PathCompletionCtx, Qualified}, }; pub(crate) fn complete_vis_path( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs index 45aab38e8ea09..844fce5ef8019 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs @@ -5,9 +5,9 @@ //! completions if we are allowed to. use hir::ImportPathConfig; -use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap}; +use ide_db::{SnippetCap, imports::insert_use::InsertUseConfig}; -use crate::{snippet::Snippet, CompletionFieldsToResolve}; +use crate::{CompletionFieldsToResolve, snippet::Snippet}; #[derive(Clone, Debug, PartialEq, Eq)] pub struct CompletionConfig<'a> { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index e686a29309461..3baf1f3de6109 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -6,25 +6,27 @@ mod tests; use std::{iter, ops::ControlFlow}; +use base_db::RootQueryDb as _; use hir::{ - DisplayTarget, HasAttrs, Local, ModPath, ModuleDef, ModuleSource, Name, PathResolution, - ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo, + DisplayTarget, HasAttrs, Local, ModuleDef, ModuleSource, Name, PathResolution, ScopeDef, + Semantics, SemanticsScope, Symbol, Type, TypeInfo, }; use ide_db::{ - base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition, - FxHashMap, FxHashSet, RootDatabase, + FilePosition, FxHashMap, FxHashSet, RootDatabase, famous_defs::FamousDefs, + helpers::is_editable_crate, }; use syntax::{ - ast::{self, AttrKind, NameOrNameRef}, - match_ast, AstNode, Edition, SmolStr, + AstNode, Edition, SmolStr, SyntaxKind::{self, *}, - SyntaxToken, TextRange, TextSize, T, + SyntaxToken, T, TextRange, TextSize, + ast::{self, AttrKind, NameOrNameRef}, + match_ast, }; use crate::{ - config::AutoImportExclusionType, - context::analysis::{expand_and_analyze, AnalysisResult}, CompletionConfig, + config::AutoImportExclusionType, + context::analysis::{AnalysisResult, expand_and_analyze}, }; const COMPLETION_MARKER: &str = "raCompletionMarker"; @@ -675,11 +677,7 @@ impl CompletionContext<'_> { }; } - if self.is_doc_hidden(attrs, defining_crate) { - Visible::No - } else { - Visible::Yes - } + if self.is_doc_hidden(attrs, defining_crate) { Visible::No } else { Visible::Yes } } pub(crate) fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool { @@ -706,15 +704,16 @@ impl<'a> CompletionContext<'a> { let _p = tracing::info_span!("CompletionContext::new").entered(); let sema = Semantics::new(db); - let file_id = sema.attach_first_edition(file_id)?; - let original_file = sema.parse(file_id); + let editioned_file_id = sema.attach_first_edition(file_id)?; + let original_file = sema.parse(editioned_file_id); // Insert a fake ident to get a valid parse tree. We will use this file // to determine context, though the original_file will be used for // actual completion. let file_with_fake_ident = { - let parse = db.parse(file_id); - parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, file_id.edition()).tree() + let (_, edition) = editioned_file_id.unpack(db); + let parse = db.parse(editioned_file_id); + parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, edition).tree() }; // always pick the token to the immediate left of the cursor, as that is what we are actually @@ -794,15 +793,12 @@ impl<'a> CompletionContext<'a> { .exclude_traits .iter() .filter_map(|path| { - scope - .resolve_mod_path(&ModPath::from_segments( - hir::PathKind::Plain, - path.split("::").map(Symbol::intern).map(Name::new_symbol_root), - )) - .find_map(|it| match it { + hir::resolve_absolute_path(db, path.split("::").map(Symbol::intern)).find_map( + |it| match it { hir::ItemInNs::Types(ModuleDef::Trait(t)) => Some(t), _ => None, - }) + }, + ) }) .collect(); @@ -810,17 +806,14 @@ impl<'a> CompletionContext<'a> { .exclude_flyimport .iter() .flat_map(|(path, kind)| { - scope - .resolve_mod_path(&ModPath::from_segments( - hir::PathKind::Plain, - path.split("::").map(Symbol::intern).map(Name::new_symbol_root), - )) + hir::resolve_absolute_path(db, path.split("::").map(Symbol::intern)) .map(|it| (it.into_module_def(), *kind)) }) .collect(); exclude_flyimport .extend(exclude_traits.iter().map(|&t| (t.into(), AutoImportExclusionType::Always))); + // FIXME: This should be part of `CompletionAnalysis` / `expand_and_analyze` let complete_semicolon = if config.add_semicolon_to_unit { let inside_closure_ret = token.parent_ancestors().try_for_each(|ancestor| { match_ast! { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index 1a34548f70824..391e2379dcd51 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -2,24 +2,28 @@ use std::iter; use hir::{ExpandResult, Semantics, Type, TypeInfo, Variant}; -use ide_db::{active_parameter::ActiveParameter, RootDatabase}; +use ide_db::{RootDatabase, active_parameter::ActiveParameter}; use itertools::Either; use syntax::{ - algo::{self, ancestors_at_offset, find_node_at_offset, non_trivia_sibling}, + AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, + T, TextRange, TextSize, + algo::{ + self, ancestors_at_offset, find_node_at_offset, non_trivia_sibling, + previous_non_trivia_token, + }, ast::{ self, AttrKind, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName, NameOrNameRef, }, - match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, - SyntaxToken, TextRange, TextSize, T, + match_ast, }; use crate::context::{ - AttrCtx, BreakableKind, CompletionAnalysis, DotAccess, DotAccessExprCtx, DotAccessKind, - ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext, - NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathExprCtx, PathKind, PatternContext, - PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation, - COMPLETION_MARKER, + AttrCtx, BreakableKind, COMPLETION_MARKER, CompletionAnalysis, DotAccess, DotAccessExprCtx, + DotAccessKind, ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, + NameRefContext, NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathExprCtx, PathKind, + PatternContext, PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, + TypeLocation, }; #[derive(Debug)] @@ -383,11 +387,7 @@ fn expand( match ( sema.expand_macro_call(&actual_macro_call), - sema.speculative_expand_macro_call( - &actual_macro_call, - &speculative_args, - fake_ident_token.clone(), - ), + sema.speculative_expand_macro_call(&actual_macro_call, &speculative_args, fake_ident_token), ) { // successful expansions (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) => { @@ -657,9 +657,8 @@ fn expected_type_and_name( )) } else { cov_mark::hit!(expected_type_struct_field_without_leading_char); - let expr_field = token.prev_sibling_or_token()? - .into_node() - .and_then(ast::RecordExprField::cast)?; + cov_mark::hit!(expected_type_struct_field_followed_by_comma); + let expr_field = previous_non_trivia_token(token.clone())?.parent().and_then(ast::RecordExprField::cast)?; let (_, _, ty) = sema.resolve_record_field(&expr_field)?; Some(( Some(ty), @@ -677,7 +676,6 @@ fn expected_type_and_name( .or_else(|| sema.type_of_expr(&expr).map(TypeInfo::original)); (ty, field_name) } else { - cov_mark::hit!(expected_type_struct_field_followed_by_comma); (field_ty, field_name) } }, @@ -1812,22 +1810,6 @@ fn is_in_block(node: &SyntaxNode) -> bool { .unwrap_or(false) } -fn previous_non_trivia_token(e: impl Into) -> Option { - let mut token = match e.into() { - SyntaxElement::Node(n) => n.first_token()?, - SyntaxElement::Token(t) => t, - } - .prev_token(); - while let Some(inner) = token { - if !inner.kind().is_trivia() { - return Some(inner); - } else { - token = inner.prev_token(); - } - } - None -} - fn next_non_trivia_token(e: impl Into) -> Option { let mut token = match e.into() { SyntaxElement::Node(n) => n.last_token()?, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs index a03f632cdfdfa..75c20968e1e5f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs @@ -1,9 +1,9 @@ -use expect_test::{expect, Expect}; +use expect_test::{Expect, expect}; use hir::HirDisplay; use crate::{ context::CompletionContext, - tests::{position, TEST_CONFIG}, + tests::{TEST_CONFIG, position}, }; fn check_expected_type_and_name(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { @@ -371,6 +371,17 @@ fn foo() { "#, expect![[r#"ty: Foo, name: ?"#]], ); + check_expected_type_and_name( + r#" +struct Foo { field: u32 } +fn foo() { + Foo { + ..self::$0 + } +} +"#, + expect!["ty: ?, name: ?"], + ); } #[test] diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index 8d6dc4c801301..19cdef30bd966 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -5,17 +5,17 @@ use std::{fmt, mem}; use hir::Mutability; use ide_db::text_edit::TextEdit; use ide_db::{ - documentation::Documentation, imports::import_assets::LocatedImport, RootDatabase, SnippetCap, - SymbolKind, + RootDatabase, SnippetCap, SymbolKind, documentation::Documentation, + imports::import_assets::LocatedImport, }; use itertools::Itertools; use smallvec::SmallVec; use stdx::{format_to, impl_from, never}; -use syntax::{format_smolstr, Edition, SmolStr, TextRange, TextSize}; +use syntax::{Edition, SmolStr, TextRange, TextSize, format_smolstr}; use crate::{ context::{CompletionContext, PathCompletionCtx}, - render::{render_path_resolution, RenderContext}, + render::{RenderContext, render_path_resolution}, }; /// `CompletionItem` describes a single completion entity which expands to 1 or more entries in the @@ -135,7 +135,7 @@ impl fmt::Debug for CompletionItem { }, CompletionItemRefMode::Dereference => "*", }; - s.field("ref_match", &format!("{}@{offset:?}", prefix)); + s.field("ref_match", &format!("{prefix}@{offset:?}")); } if self.trigger_call_info { s.field("trigger_call_info", &true); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index a990b39481a19..1fdd4cdb1c6bb 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -11,10 +11,10 @@ mod snippet; mod tests; use ide_db::{ + FilePosition, FxHashSet, RootDatabase, imports::insert_use::{self, ImportScope}, syntax_helpers::tree_diff::diff, text_edit::TextEdit, - FilePosition, FxHashSet, RootDatabase, }; use syntax::ast::make; @@ -275,7 +275,9 @@ pub fn resolve_completion_edits( let _p = tracing::info_span!("resolve_completion_edits").entered(); let sema = hir::Semantics::new(db); - let original_file = sema.parse(sema.attach_first_edition(file_id)?); + let editioned_file_id = sema.attach_first_edition(file_id)?; + + let original_file = sema.parse(editioned_file_id); let original_token = syntax::AstNode::syntax(&original_file).token_at_offset(offset).left_biased()?; let position_for_import = &original_token.parent()?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index c82905eddefbb..00c0b470f9875 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -10,17 +10,19 @@ pub(crate) mod type_alias; pub(crate) mod union_literal; pub(crate) mod variant; -use hir::{sym, AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type}; +use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type, sym}; use ide_db::text_edit::TextEdit; use ide_db::{ + RootDatabase, SnippetCap, SymbolKind, documentation::{Documentation, HasDocs}, helpers::item_name, imports::import_assets::LocatedImport, - RootDatabase, SnippetCap, SymbolKind, }; -use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange, ToSmolStr}; +use syntax::{AstNode, SmolStr, SyntaxKind, TextRange, ToSmolStr, ast, format_smolstr}; use crate::{ + CompletionContext, CompletionItem, CompletionItemKind, CompletionItemRefMode, + CompletionRelevance, context::{DotAccess, DotAccessKind, PathCompletionCtx, PathKind, PatternContext}, item::{Builder, CompletionRelevanceTypeMatch}, render::{ @@ -28,8 +30,6 @@ use crate::{ literal::render_variant_lit, macro_::{render_macro, render_macro_pat}, }, - CompletionContext, CompletionItem, CompletionItemKind, CompletionItemRefMode, - CompletionRelevance, }; /// Interface for data and methods required for items rendering. #[derive(Debug, Clone)] @@ -92,7 +92,7 @@ impl<'a> RenderContext<'a> { fn is_deprecated(&self, def: impl HasAttrs) -> bool { let attrs = def.attrs(self.db()); - attrs.by_key(&sym::deprecated).exists() + attrs.by_key(sym::deprecated).exists() } fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool { @@ -334,7 +334,7 @@ pub(crate) fn render_expr( continue; }; - item.add_import(LocatedImport::new(path, trait_item, trait_item)); + item.add_import(LocatedImport::new_no_completion(path, trait_item, trait_item)); } Some(item) @@ -683,14 +683,14 @@ fn path_ref_match( mod tests { use std::cmp; - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use ide_db::SymbolKind; use itertools::Itertools; use crate::{ - item::CompletionRelevanceTypeMatch, - tests::{check_edit, do_completion, get_all_items, TEST_CONFIG}, CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch, + item::CompletionRelevanceTypeMatch, + tests::{TEST_CONFIG, check_edit, do_completion, get_all_items}, }; #[track_caller] @@ -1276,6 +1276,53 @@ fn main() { fo$0 } ); } + #[test] + fn fn_detail_includes_variadics() { + check( + r#" +unsafe extern "C" fn foo(a: u32, b: u32, ...) {} + +fn main() { fo$0 } +"#, + SymbolKind::Function, + expect![[r#" + [ + CompletionItem { + label: "foo(…)", + detail_left: None, + detail_right: Some( + "unsafe fn(u32, u32, ...)", + ), + source_range: 62..64, + delete: 62..64, + insert: "foo(${1:a}, ${2:b});$0", + kind: SymbolKind( + Function, + ), + lookup: "foo", + detail: "unsafe fn(u32, u32, ...)", + trigger_call_info: true, + }, + CompletionItem { + label: "main()", + detail_left: None, + detail_right: Some( + "fn()", + ), + source_range: 62..64, + delete: 62..64, + insert: "main();$0", + kind: SymbolKind( + Function, + ), + lookup: "main", + detail: "fn()", + }, + ] + "#]], + ); + } + #[test] fn enum_detail_just_name_for_unit() { check( @@ -2042,8 +2089,8 @@ fn f() { A { bar: b$0 }; } expect![[r#" fn bar() fn() -> u8 [type+name] fn baz() fn() -> u8 [type] - ex bar() [type] ex baz() [type] + ex bar() [type] st A A [] fn f() fn() [] "#]], @@ -2773,14 +2820,13 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } Indel { insert: "(", delete: 107..107, - annotation: None, }, Indel { insert: "qux)()", delete: 109..110, - annotation: None, }, ], + annotation: None, }, kind: SymbolKind( Field, @@ -2960,6 +3006,7 @@ fn main() { sn refm &mut expr [] sn deref *expr [] sn unsafe unsafe {} [] + sn const const {} [] sn match match expr {} [] sn box Box::new(expr) [] sn dbg dbg!(expr) [] @@ -2990,6 +3037,7 @@ fn main() { sn refm &mut expr [] sn deref *expr [] sn unsafe unsafe {} [] + sn const const {} [] sn match match expr {} [] sn box Box::new(expr) [] sn dbg dbg!(expr) [] diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs index 4693bdc047f97..2fe517fa8cd01 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs @@ -1,12 +1,13 @@ //! Renderer for function calls. -use hir::{db::HirDatabase, AsAssocItem, HirDisplay}; +use hir::{AsAssocItem, HirDisplay, db::HirDatabase}; use ide_db::{SnippetCap, SymbolKind}; use itertools::Itertools; use stdx::{format_to, to_lower_snake_case}; -use syntax::{format_smolstr, AstNode, SmolStr, ToSmolStr}; +use syntax::{AstNode, SmolStr, ToSmolStr, format_smolstr}; use crate::{ + CallableSnippets, context::{ CompleteSemicolon, CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind, }, @@ -15,9 +16,8 @@ use crate::{ CompletionRelevanceReturnType, CompletionRelevanceTraitInfo, }, render::{ - compute_exact_name_match, compute_ref_match, compute_type_match, match_types, RenderContext, + RenderContext, compute_exact_name_match, compute_ref_match, compute_type_match, match_types, }, - CallableSnippets, }; #[derive(Debug)] @@ -293,11 +293,7 @@ fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'sta for (name, local) in ctx.locals.iter().sorted_by_key(|&(k, _)| k.clone()) { if name.as_str() == arg { return if local.ty(ctx.db) == derefed_ty { - if ty.is_mutable_reference() { - "&mut " - } else { - "&" - } + if ty.is_mutable_reference() { "&mut " } else { "&" } } else { "" }; @@ -324,7 +320,9 @@ fn detail(ctx: &CompletionContext<'_>, func: hir::Function) -> String { format_to!(detail, "unsafe "); } - format_to!(detail, "fn({})", params_display(ctx, func)); + detail.push_str("fn("); + params_display(ctx, &mut detail, func); + detail.push(')'); if !ret_ty.is_unit() { format_to!(detail, " -> {}", ret_ty.display(ctx.db, ctx.display_target)); } @@ -346,24 +344,28 @@ fn detail_full(ctx: &CompletionContext<'_>, func: hir::Function) -> String { detail } -fn params_display(ctx: &CompletionContext<'_>, func: hir::Function) -> String { +fn params_display(ctx: &CompletionContext<'_>, detail: &mut String, func: hir::Function) { if let Some(self_param) = func.self_param(ctx.db) { + format_to!(detail, "{}", self_param.display(ctx.db, ctx.display_target)); let assoc_fn_params = func.assoc_fn_params(ctx.db); let params = assoc_fn_params .iter() .skip(1) // skip the self param because we are manually handling that .map(|p| p.ty().display(ctx.db, ctx.display_target)); - format!( - "{}{}", - self_param.display(ctx.db, ctx.display_target), - params.format_with("", |display, f| { - f(&", ")?; - f(&display) - }) - ) + for param in params { + format_to!(detail, ", {}", param); + } } else { let assoc_fn_params = func.assoc_fn_params(ctx.db); - assoc_fn_params.iter().map(|p| p.ty().display(ctx.db, ctx.display_target)).join(", ") + format_to!( + detail, + "{}", + assoc_fn_params.iter().map(|p| p.ty().display(ctx.db, ctx.display_target)).format(", ") + ); + } + + if func.is_varargs(ctx.db) { + detail.push_str(", ..."); } } @@ -398,8 +400,8 @@ fn params( #[cfg(test)] mod tests { use crate::{ - tests::{check_edit, check_edit_with_config, TEST_CONFIG}, CallableSnippets, CompletionConfig, + tests::{TEST_CONFIG, check_edit, check_edit_with_config}, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs index ffda52fb47852..5a9e35a7290bf 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs @@ -1,23 +1,22 @@ //! Renderer for `enum` variants. -use hir::{db::HirDatabase, StructKind}; +use hir::{StructKind, db::HirDatabase}; use ide_db::{ - documentation::{Documentation, HasDocs}, SymbolKind, + documentation::{Documentation, HasDocs}, }; use crate::{ + CompletionItemKind, CompletionRelevance, CompletionRelevanceReturnType, context::{CompletionContext, PathCompletionCtx, PathKind}, item::{Builder, CompletionItem, CompletionRelevanceFn}, render::{ - compute_type_match, + RenderContext, compute_type_match, variant::{ - format_literal_label, format_literal_lookup, render_record_lit, render_tuple_lit, - visible_fields, RenderedLiteral, + RenderedLiteral, format_literal_label, format_literal_lookup, render_record_lit, + render_tuple_lit, visible_fields, }, - RenderContext, }, - CompletionItemKind, CompletionRelevance, CompletionRelevanceReturnType, }; pub(crate) fn render_variant_lit( @@ -164,11 +163,7 @@ impl Variant { Variant::Struct(it) => visible_fields(ctx, &fields, it)?, Variant::EnumVariant(it) => visible_fields(ctx, &fields, it)?, }; - if !fields_omitted { - Some(visible_fields) - } else { - None - } + if !fields_omitted { Some(visible_fields) } else { None } } fn kind(self, db: &dyn HirDatabase) -> StructKind { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs index 8b2476d153f1f..4674dae031440 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs @@ -1,8 +1,8 @@ //! Renderer for macro invocations. use hir::HirDisplay; -use ide_db::{documentation::Documentation, SymbolKind}; -use syntax::{format_smolstr, SmolStr, ToSmolStr}; +use ide_db::{SymbolKind, documentation::Documentation}; +use syntax::{SmolStr, ToSmolStr, format_smolstr}; use crate::{ context::{PathCompletionCtx, PathKind, PatternContext}, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs index 124abb17b6a1c..dcc51a86a8edc 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs @@ -1,17 +1,17 @@ //! Renderer for patterns. -use hir::{db::HirDatabase, Name, StructKind}; -use ide_db::{documentation::HasDocs, SnippetCap}; +use hir::{Name, StructKind, db::HirDatabase}; +use ide_db::{SnippetCap, documentation::HasDocs}; use itertools::Itertools; use syntax::{Edition, SmolStr, ToSmolStr}; use crate::{ + CompletionItem, CompletionItemKind, context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext}, render::{ - variant::{format_literal_label, format_literal_lookup, visible_fields}, RenderContext, + variant::{format_literal_label, format_literal_lookup, visible_fields}, }, - CompletionItem, CompletionItemKind, }; pub(crate) fn render_struct_pat( @@ -64,11 +64,11 @@ pub(crate) fn render_variant_pat( ), None => { let name = local_name.unwrap_or_else(|| variant.name(ctx.db())); - let it = ( + + ( name.as_str().to_smolstr(), name.display(ctx.db(), ctx.completion.edition).to_smolstr(), - ); - it + ) } }; @@ -191,7 +191,7 @@ fn render_record_as_pat( format!( "{name} {{ {}{} }}", fields.enumerate().format_with(", ", |(idx, field), f| { - f(&format_args!("{}${}", field.name(db).display(db.upcast(), edition), idx + 1)) + f(&format_args!("{}${}", field.name(db).display(db, edition), idx + 1)) }), if fields_omitted { ", .." } else { "" }, name = name diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs index 09154e81c0304..23f0d4e06f2c8 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs @@ -6,11 +6,11 @@ use itertools::Itertools; use syntax::ToSmolStr; use crate::{ + CompletionItem, CompletionItemKind, render::{ - variant::{format_literal_label, format_literal_lookup, visible_fields}, RenderContext, + variant::{format_literal_label, format_literal_lookup, visible_fields}, }, - CompletionItem, CompletionItemKind, }; pub(crate) fn render_union_literal( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs index 83718e57229a5..42324b4290a77 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs @@ -1,7 +1,7 @@ //! Code common to structs, unions, and enum variants. use crate::context::CompletionContext; -use hir::{sym, HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind}; +use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind, sym}; use ide_db::SnippetCap; use itertools::Itertools; use syntax::SmolStr; @@ -96,7 +96,7 @@ pub(crate) fn visible_fields( .copied() .collect::>(); let has_invisible_field = n_fields - fields.len() > 0; - let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(&sym::non_exhaustive).exists() + let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(sym::non_exhaustive).exists() && item.krate(ctx.db) != module.krate(); let fields_omitted = has_invisible_field || is_foreign_non_exhaustive; Some((fields, fields_omitted)) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs index 07f33a826e4c7..9dc0c0234dc56 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs @@ -174,7 +174,7 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[ModPath]) -> Option 1).then(|| LocatedImport::new(path.clone(), item, item))) + Some((path.len() > 1).then(|| LocatedImport::new_no_completion(path.clone(), item, item))) }; let mut res = Vec::with_capacity(requires.len()); for import in requires { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index 9d91f95eb65b8..fdc3d9a13bc92 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -28,8 +28,8 @@ use base_db::SourceDatabase; use expect_test::Expect; use hir::PrefixKind; use ide_db::{ - imports::insert_use::{ImportGranularity, InsertUseConfig}, FilePosition, RootDatabase, SnippetCap, + imports::insert_use::{ImportGranularity, InsertUseConfig}, }; use itertools::Itertools; use stdx::{format_to, trim_indent}; @@ -37,8 +37,8 @@ use test_fixture::ChangeFixture; use test_utils::assert_eq_text; use crate::{ - resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, - CompletionItem, CompletionItemKind, + CallableSnippets, CompletionConfig, CompletionFieldsToResolve, CompletionItem, + CompletionItemKind, resolve_completion_edits, }; /// Lots of basic item definitions @@ -155,13 +155,14 @@ fn completion_list_with_config( pub(crate) fn position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (RootDatabase, FilePosition) { - let change_fixture = ChangeFixture::parse(ra_fixture); let mut database = RootDatabase::default(); + let change_fixture = ChangeFixture::parse(&database, ra_fixture); database.enable_proc_attr_macros(); database.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - (database, FilePosition { file_id: file_id.file_id(), offset }) + let position = FilePosition { file_id: file_id.file_id(&database), offset }; + (database, position) } pub(crate) fn do_completion(code: &str, kind: CompletionItemKind) -> Vec { @@ -246,7 +247,7 @@ pub(crate) fn check_edit_with_config( .filter(|it| it.lookup() == what) .collect_tuple() .unwrap_or_else(|| panic!("can't find {what:?} completion in {completions:#?}")); - let mut actual = db.file_text(position.file_id).to_string(); + let mut actual = db.file_text(position.file_id).text(&db).to_string(); let mut combined_edit = completion.text_edit.clone(); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs index 9b3c676c48a1f..d5137949d42f7 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs @@ -1,13 +1,13 @@ //! Completion tests for expressions. -use expect_test::{expect, Expect}; +use expect_test::{Expect, expect}; use crate::{ + CompletionConfig, config::AutoImportExclusionType, tests::{ - check, check_edit, check_with_base_items, completion_list_with_config, BASE_ITEMS_FIXTURE, - TEST_CONFIG, + BASE_ITEMS_FIXTURE, TEST_CONFIG, check, check_edit, check_with_base_items, + completion_list_with_config, }, - CompletionConfig, }; fn check_with_config( @@ -58,6 +58,7 @@ fn baz() { un Union Union ev TupleV(…) TupleV(u32) bt u32 u32 + kw const kw crate:: kw false kw for @@ -101,6 +102,7 @@ fn func(param0 @ (param1, param2): (i32, i32)) { lc param1 i32 lc param2 i32 bt u32 u32 + kw const kw crate:: kw false kw for @@ -169,6 +171,7 @@ impl Unit { kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -247,6 +250,7 @@ fn complete_in_block() { kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -298,6 +302,7 @@ fn complete_after_if_expr() { kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -335,6 +340,7 @@ fn complete_in_match_arm() { expect![[r#" fn foo() fn() bt u32 u32 + kw const kw crate:: kw false kw for @@ -372,6 +378,7 @@ fn completes_in_loop_ctx() { kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -400,6 +407,7 @@ fn completes_in_loop_ctx() { sn box Box::new(expr) sn break break expr sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -424,6 +432,7 @@ fn completes_in_let_initializer() { expect![[r#" fn main() fn() bt u32 u32 + kw const kw crate:: kw false kw for @@ -448,6 +457,7 @@ fn completes_after_ref_expr() { expect![[r#" fn main() fn() bt u32 u32 + kw const kw crate:: kw false kw for @@ -471,6 +481,7 @@ fn completes_after_ref_expr() { fn main() fn() bt u32 u32 kw const + kw const kw crate:: kw false kw for @@ -492,6 +503,7 @@ fn completes_after_ref_expr() { expect![[r#" fn main() fn() bt u32 u32 + kw const kw crate:: kw false kw for @@ -512,6 +524,7 @@ fn completes_after_ref_expr() { expect![[r#" fn main() fn() bt u32 u32 + kw const kw crate:: kw false kw for @@ -532,6 +545,7 @@ fn completes_after_ref_expr() { expect![[r#" fn main() fn() bt u32 u32 + kw const kw crate:: kw false kw for @@ -566,6 +580,7 @@ fn foo() { fn foo() fn() st Foo Foo bt u32 u32 + kw const kw crate:: kw false kw for @@ -601,6 +616,7 @@ fn foo() { fn foo() fn() lc bar i32 bt u32 u32 + kw const kw crate:: kw false kw for @@ -632,6 +648,7 @@ fn quux(x: i32) { lc x i32 ma m!(…) macro_rules! m bt u32 u32 + kw const kw crate:: kw false kw for @@ -659,6 +676,7 @@ fn quux(x: i32) { lc x i32 ma m!(…) macro_rules! m bt u32 u32 + kw const kw crate:: kw false kw for @@ -688,6 +706,7 @@ fn quux(x: i32) { lc y i32 ma m!(…) macro_rules! m bt u32 u32 + kw const kw crate:: kw false kw for @@ -870,6 +889,7 @@ fn brr() { st YoloVariant YoloVariant st YoloVariant {…} YoloVariant { f: usize } bt u32 u32 + kw const kw crate:: kw false kw for @@ -945,6 +965,7 @@ fn foo() { if foo {} $0 } kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -987,6 +1008,7 @@ fn foo() { if foo {} el$0 } kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -1016,6 +1038,7 @@ fn foo() { bar(if foo {} $0) } expect![[r#" fn foo() fn() bt u32 u32 + kw const kw crate:: kw else kw else if @@ -1040,6 +1063,7 @@ fn foo() { bar(if foo {} el$0) } expect![[r#" fn foo() fn() bt u32 u32 + kw const kw crate:: kw else kw else if @@ -1077,6 +1101,7 @@ fn foo() { if foo {} $0 let x = 92; } kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -1119,6 +1144,7 @@ fn foo() { if foo {} el$0 let x = 92; } kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -1161,6 +1187,7 @@ fn foo() { if foo {} el$0 { let x = 92; } } kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -1213,6 +1240,7 @@ pub struct UnstableThisShouldNotBeListed; kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -1267,6 +1295,7 @@ pub struct UnstableButWeAreOnNightlyAnyway; kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -1309,6 +1338,7 @@ fn main() { me foo() fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -1335,6 +1365,7 @@ fn main() { me foo() fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -1365,6 +1396,7 @@ fn main() { me foo() fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -1391,6 +1423,7 @@ fn main() { me foo() fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -1417,6 +1450,7 @@ fn main() { me foo() fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -1442,6 +1476,7 @@ fn main() { expect![[r#" sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -1482,7 +1517,7 @@ fn main() { en Enum Enum fn function() fn() fn main() fn() - lc variable &str + lc variable &'static str ma helper!(…) macro_rules! helper ma m!(…) macro_rules! m ma makro!(…) macro_rules! makro @@ -1505,6 +1540,7 @@ fn main() { kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -1531,7 +1567,10 @@ fn main() { #[test] fn excluded_trait_method_is_excluded() { check_with_config( - CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG }, + CompletionConfig { + exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()], + ..TEST_CONFIG + }, r#" trait ExcludedTrait { fn foo(&self) {} @@ -1551,22 +1590,20 @@ fn foo() { } "#, expect![[r#" - me bar() (as ExcludedTrait) fn(&self) - me baz() (as ExcludedTrait) fn(&self) - me foo() (as ExcludedTrait) fn(&self) - me inherent() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me inherent() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn const const {} + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); } @@ -1574,7 +1611,10 @@ fn foo() { #[test] fn excluded_trait_not_excluded_when_inherent() { check_with_config( - CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG }, + CompletionConfig { + exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()], + ..TEST_CONFIG + }, r#" trait ExcludedTrait { fn foo(&self) {} @@ -1594,6 +1634,7 @@ fn foo(v: &dyn ExcludedTrait) { me foo() (as ExcludedTrait) fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -1607,7 +1648,10 @@ fn foo(v: &dyn ExcludedTrait) { "#]], ); check_with_config( - CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG }, + CompletionConfig { + exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()], + ..TEST_CONFIG + }, r#" trait ExcludedTrait { fn foo(&self) {} @@ -1627,6 +1671,7 @@ fn foo(v: impl ExcludedTrait) { me foo() (as ExcludedTrait) fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -1640,7 +1685,10 @@ fn foo(v: impl ExcludedTrait) { "#]], ); check_with_config( - CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG }, + CompletionConfig { + exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()], + ..TEST_CONFIG + }, r#" trait ExcludedTrait { fn foo(&self) {} @@ -1660,6 +1708,7 @@ fn foo(v: T) { me foo() (as ExcludedTrait) fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -1678,7 +1727,7 @@ fn foo(v: T) { fn excluded_trait_method_is_excluded_from_flyimport() { check_with_config( CompletionConfig { - exclude_traits: &["test::module2::ExcludedTrait".to_owned()], + exclude_traits: &["ra_test_fixture::module2::ExcludedTrait".to_owned()], ..TEST_CONFIG }, r#" @@ -1702,22 +1751,20 @@ fn foo() { } "#, expect![[r#" - me bar() (use module2::ExcludedTrait) fn(&self) - me baz() (use module2::ExcludedTrait) fn(&self) - me foo() (use module2::ExcludedTrait) fn(&self) - me inherent() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me inherent() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn const const {} + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); } @@ -1727,7 +1774,7 @@ fn flyimport_excluded_trait_method_is_excluded_from_flyimport() { check_with_config( CompletionConfig { exclude_flyimport: vec![( - "test::module2::ExcludedTrait".to_owned(), + "ra_test_fixture::module2::ExcludedTrait".to_owned(), AutoImportExclusionType::Methods, )], ..TEST_CONFIG @@ -1753,22 +1800,20 @@ fn foo() { } "#, expect![[r#" - me bar() (use module2::ExcludedTrait) fn(&self) - me baz() (use module2::ExcludedTrait) fn(&self) - me foo() (use module2::ExcludedTrait) fn(&self) - me inherent() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn deref *expr - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr - sn return return expr - sn unsafe unsafe {} + me inherent() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn const const {} + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn deref *expr + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn return return expr + sn unsafe unsafe {} "#]], ); } @@ -1776,7 +1821,10 @@ fn foo() { #[test] fn excluded_trait_method_is_excluded_from_path_completion() { check_with_config( - CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG }, + CompletionConfig { + exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()], + ..TEST_CONFIG + }, r#" pub trait ExcludedTrait { fn foo(&self) {} @@ -1796,10 +1844,7 @@ fn foo() { } "#, expect![[r#" - me bar(…) (as ExcludedTrait) fn(&self) - me baz(…) (as ExcludedTrait) fn(&self) - me foo(…) (as ExcludedTrait) fn(&self) - me inherent(…) fn(&self) + me inherent(…) fn(&self) "#]], ); } @@ -1807,7 +1852,10 @@ fn foo() { #[test] fn excluded_trait_method_is_not_excluded_when_trait_is_specified() { check_with_config( - CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG }, + CompletionConfig { + exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()], + ..TEST_CONFIG + }, r#" pub trait ExcludedTrait { fn foo(&self) {} @@ -1833,7 +1881,10 @@ fn foo() { "#]], ); check_with_config( - CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG }, + CompletionConfig { + exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()], + ..TEST_CONFIG + }, r#" pub trait ExcludedTrait { fn foo(&self) {} @@ -1863,7 +1914,10 @@ fn foo() { #[test] fn excluded_trait_not_excluded_when_inherent_path() { check_with_config( - CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG }, + CompletionConfig { + exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()], + ..TEST_CONFIG + }, r#" trait ExcludedTrait { fn foo(&self) {} @@ -1884,7 +1938,10 @@ fn foo() { "#]], ); check_with_config( - CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG }, + CompletionConfig { + exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()], + ..TEST_CONFIG + }, r#" trait ExcludedTrait { fn foo(&self) {} @@ -1956,6 +2013,7 @@ fn bar() { kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -2028,6 +2086,7 @@ fn foo() { kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -2051,3 +2110,19 @@ fn foo() { "#]], ); } + +#[test] +fn escaped_label() { + check( + r#" +fn main() { + 'r#break: { + break '$0; + } +} + "#, + expect![[r#" + lb 'r#break + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs index 2e7c53def7fc5..27c91bc7c4558 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs @@ -1,9 +1,9 @@ -use expect_test::{expect, Expect}; +use expect_test::{Expect, expect}; use crate::{ - context::{CompletionAnalysis, NameContext, NameKind, NameRefKind}, - tests::{check_edit, check_edit_with_config, TEST_CONFIG}, CompletionConfig, + context::{CompletionAnalysis, NameContext, NameKind, NameRefKind}, + tests::{TEST_CONFIG, check_edit, check_edit_with_config}, }; fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { @@ -1810,9 +1810,10 @@ fn function() { #[test] fn excluded_trait_item_included_when_exact_match() { + // FIXME: This does not work, we need to change the code. check_with_config( CompletionConfig { - exclude_traits: &["test::module2::ExcludedTrait".to_owned()], + exclude_traits: &["ra_test_fixture::module2::ExcludedTrait".to_owned()], ..TEST_CONFIG }, r#" @@ -1828,10 +1829,122 @@ mod module2 { fn foo() { true.foo$0 +} + "#, + expect![""], + ); +} + +#[test] +fn excluded_via_attr() { + check( + r#" +mod module2 { + #[rust_analyzer::completions(ignore_flyimport)] + pub trait ExcludedTrait { + fn foo(&self) {} + fn bar(&self) {} + fn baz(&self) {} + } + + impl ExcludedTrait for T {} +} + +fn foo() { + true.$0 +} + "#, + expect![""], + ); + check( + r#" +mod module2 { + #[rust_analyzer::completions(ignore_flyimport_methods)] + pub trait ExcludedTrait { + fn foo(&self) {} + fn bar(&self) {} + fn baz(&self) {} + } + + impl ExcludedTrait for T {} +} + +fn foo() { + true.$0 +} + "#, + expect![""], + ); + check( + r#" +mod module2 { + #[rust_analyzer::completions(ignore_methods)] + pub trait ExcludedTrait { + fn foo(&self) {} + fn bar(&self) {} + fn baz(&self) {} + } + + impl ExcludedTrait for T {} +} + +fn foo() { + true.$0 +} + "#, + expect![""], + ); + check( + r#" +mod module2 { + #[rust_analyzer::completions(ignore_flyimport)] + pub trait ExcludedTrait { + fn foo(&self) {} + fn bar(&self) {} + fn baz(&self) {} + } + + impl ExcludedTrait for T {} +} + +fn foo() { + ExcludedTrait$0 +} + "#, + expect![""], + ); + check( + r#" +mod module2 { + #[rust_analyzer::completions(ignore_methods)] + pub trait ExcludedTrait { + fn foo(&self) {} + fn bar(&self) {} + fn baz(&self) {} + } + + impl ExcludedTrait for T {} +} + +fn foo() { + ExcludedTrait$0 } "#, expect![[r#" - me foo() (use module2::ExcludedTrait) fn(&self) + tt ExcludedTrait (use module2::ExcludedTrait) "#]], ); + check( + r#" +mod module2 { + #[rust_analyzer::completions(ignore_flyimport)] + pub struct Foo {} +} + +fn foo() { + Foo$0 +} + "#, + expect![""], + ); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs index be2c37d10162e..55689034fb478 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs @@ -284,6 +284,7 @@ fn bar() { kw if kw if let kw impl + kw impl for kw let kw letm kw loop diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs index 841c42123a017..fcdf10c85616c 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs @@ -16,6 +16,7 @@ fn in_mod_item_list() { kw extern kw fn kw impl + kw impl for kw mod kw pub kw pub(crate) @@ -50,6 +51,7 @@ fn in_source_file_item_list() { kw extern kw fn kw impl + kw impl for kw mod kw pub kw pub(crate) @@ -83,6 +85,7 @@ fn in_item_list_after_attr() { kw extern kw fn kw impl + kw impl for kw mod kw pub kw pub(crate) @@ -122,6 +125,7 @@ fn after_unsafe_token() { kw extern kw fn kw impl + kw impl for kw trait "#]], ); @@ -385,6 +389,7 @@ fn after_unit_struct() { kw extern kw fn kw impl + kw impl for kw mod kw pub kw pub(crate) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs index 6b1dfe366ce2d..626d1677d5553 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs @@ -22,6 +22,7 @@ fn main() { me foo() fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -55,6 +56,7 @@ fn main() { me foo() fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -90,6 +92,7 @@ fn main() {} me foo() fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -125,6 +128,7 @@ fn main() {} me foo() fn(&self) sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs index 9ab66243b5c8c..00977ea4e533b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs @@ -1,13 +1,13 @@ use base_db::SourceDatabase; -use expect_test::{expect, Expect}; +use expect_test::{Expect, expect}; use itertools::Itertools; -use crate::tests::{completion_list_with_config_raw, position, TEST_CONFIG}; +use crate::tests::{TEST_CONFIG, completion_list_with_config_raw, position}; fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let completions = completion_list_with_config_raw(TEST_CONFIG, ra_fixture, true, None); let (db, position) = position(ra_fixture); - let mut actual = db.file_text(position.file_id).to_string(); + let mut actual = db.file_text(position.file_id).text(&db).to_string(); completions .into_iter() .exactly_one() diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs index 005263d100a5b..148203107c4cf 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs @@ -1,14 +1,14 @@ //! Tests that don't fit into a specific category. -use expect_test::{expect, Expect}; +use expect_test::{Expect, expect}; use ide_db::SymbolKind; use crate::{ + CompletionItemKind, tests::{ - check, check_edit, check_no_kw, check_with_trigger_character, do_completion_with_config, - TEST_CONFIG, + TEST_CONFIG, check, check_edit, check_no_kw, check_with_trigger_character, + do_completion_with_config, }, - CompletionItemKind, }; #[test] @@ -105,7 +105,7 @@ mod macros { fn completes_std_prelude_if_core_is_defined() { check_no_kw( r#" -//- /main.rs crate:main deps:core,std +//- /main.rs crate:main deps:core,std edition:2021 fn foo() { let x: $0 } //- /core/lib.rs crate:core @@ -1008,6 +1008,7 @@ fn here_we_go() { kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -1059,6 +1060,7 @@ fn here_we_go() { kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -1146,6 +1148,7 @@ fn here_we_go() { me baz() (alias qux) fn(&self) -> u8 sn box Box::new(expr) sn call function(expr) + sn const const {} sn dbg dbg!(expr) sn dbgr dbg!(&expr) sn deref *expr @@ -1183,6 +1186,7 @@ fn bar() { qu$0 } kw if kw if let kw impl + kw impl for kw let kw letm kw loop @@ -1264,6 +1268,7 @@ fn here_we_go() { md foo st Bar (alias Qux) (use foo::Bar) Bar bt u32 u32 + kw const kw crate:: kw false kw for @@ -1353,7 +1358,7 @@ pub fn foo<'x, T>(x: &'x mut T) -> u8 where T: Clone, { 0u8 } fn main() { fo$0 } "#, CompletionItemKind::SymbolKind(ide_db::SymbolKind::Function), - expect!("fn(&mut T) -> u8"), + expect!("fn(&'x mut T) -> u8"), expect!("pub fn foo<'x, T>(x: &'x mut T) -> u8 where T: Clone,"), ); @@ -1386,7 +1391,7 @@ fn main() { } "#, CompletionItemKind::SymbolKind(SymbolKind::Method), - expect!("const fn(&'foo mut self, &Foo) -> !"), + expect!("const fn(&'foo mut self, &'foo Foo) -> !"), expect!("pub const fn baz<'foo>(&'foo mut self, x: &'foo Foo) -> !"), ); } @@ -1439,6 +1444,7 @@ fn foo() { kw if kw if let kw impl + kw impl for kw let kw letm kw loop diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs index c7e2d058257e3..125e11e9e3589 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs @@ -429,18 +429,18 @@ trait Tr { impl Tr<$0 "#, expect![[r#" - en Enum Enum - ma makro!(…) macro_rules! makro + en Enum Enum + ma makro!(…) macro_rules! makro md module - sp Self dyn Tr<{unknown}> - st Record Record - st S S - st Tuple Tuple - st Unit Unit + sp Self dyn Tr<{unknown}> + 'static + st Record Record + st S S + st Tuple Tuple + st Unit Unit tt Tr tt Trait - un Union Union - bt u32 u32 + un Union Union + bt u32 u32 kw crate:: kw self:: "#]], diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml index 641998c3dacaf..583318de26df0 100644 --- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true [lib] [dependencies] -cov-mark = "2.0.0-pre.1" +cov-mark = "2.0.0" crossbeam-channel.workspace = true tracing.workspace = true rayon.workspace = true @@ -22,7 +22,10 @@ either.workspace = true itertools.workspace = true arrayvec.workspace = true indexmap.workspace = true -memchr = "2.6.4" +memchr = "2.7.4" +salsa.workspace = true +salsa-macros.workspace = true +query-group.workspace = true triomphe.workspace = true nohash-hasher.workspace = true bitflags.workspace = true @@ -34,6 +37,7 @@ profile.workspace = true stdx.workspace = true syntax.workspace = true span.workspace = true +vfs.workspace = true # ide should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. hir.workspace = true @@ -41,7 +45,7 @@ hir.workspace = true line-index.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.5.1" # local deps test-utils.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs index 11808fed3be6a..7b5723f37f760 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/active_parameter.rs @@ -3,9 +3,11 @@ use either::Either; use hir::{InFile, Semantics, Type}; use parser::T; +use span::TextSize; use syntax::{ + AstNode, NodeOrToken, SyntaxToken, ast::{self, AstChildren, HasArgList, HasAttrs, HasName}, - match_ast, AstNode, NodeOrToken, SyntaxToken, + match_ast, }; use crate::RootDatabase; @@ -20,7 +22,24 @@ impl ActiveParameter { /// Returns information about the call argument this token is part of. pub fn at_token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option { let (signature, active_parameter) = callable_for_token(sema, token)?; + Self::from_signature_and_active_parameter(sema, signature, active_parameter) + } + /// Returns information about the call argument this token is part of. + pub fn at_arg( + sema: &Semantics<'_, RootDatabase>, + list: ast::ArgList, + at: TextSize, + ) -> Option { + let (signature, active_parameter) = callable_for_arg_list(sema, list, at)?; + Self::from_signature_and_active_parameter(sema, signature, active_parameter) + } + + fn from_signature_and_active_parameter( + sema: &Semantics<'_, RootDatabase>, + signature: hir::Callable, + active_parameter: Option, + ) -> Option { let idx = active_parameter?; let mut params = signature.params(); if idx >= params.len() { @@ -48,20 +67,32 @@ pub fn callable_for_token( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, ) -> Option<(hir::Callable, Option)> { + let offset = token.text_range().start(); // Find the calling expression and its NameRef let parent = token.parent()?; - let calling_node = parent.ancestors().filter_map(ast::CallableExpr::cast).find(|it| { - it.arg_list() - .is_some_and(|it| it.syntax().text_range().contains(token.text_range().start())) - })?; + let calling_node = parent + .ancestors() + .filter_map(ast::CallableExpr::cast) + .find(|it| it.arg_list().is_some_and(|it| it.syntax().text_range().contains(offset)))?; + + callable_for_node(sema, &calling_node, offset) +} - callable_for_node(sema, &calling_node, &token) +/// Returns a [`hir::Callable`] this token is a part of and its argument index of said callable. +pub fn callable_for_arg_list( + sema: &Semantics<'_, RootDatabase>, + arg_list: ast::ArgList, + at: TextSize, +) -> Option<(hir::Callable, Option)> { + debug_assert!(arg_list.syntax().text_range().contains(at)); + let callable = arg_list.syntax().parent().and_then(ast::CallableExpr::cast)?; + callable_for_node(sema, &callable, at) } pub fn callable_for_node( sema: &Semantics<'_, RootDatabase>, calling_node: &ast::CallableExpr, - token: &SyntaxToken, + offset: TextSize, ) -> Option<(hir::Callable, Option)> { let callable = match calling_node { ast::CallableExpr::Call(call) => sema.resolve_expr_as_callable(&call.expr()?), @@ -73,7 +104,7 @@ pub fn callable_for_node( .children_with_tokens() .filter_map(NodeOrToken::into_token) .filter(|t| t.kind() == T![,]) - .take_while(|t| t.text_range().start() <= token.text_range().start()) + .take_while(|t| t.text_range().start() <= offset) .count() }); Some((callable, active_param)) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs index 46ff4fbf9e904..008b6fdbe2c68 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs @@ -1,17 +1,12 @@ //! Applies changes to the IDE state transactionally. -use base_db::{ - ra_salsa::{ - debug::{DebugQueryTable, TableEntry}, - Database, Durability, Query, QueryTable, - }, - SourceRootId, -}; -use profile::{memory_usage, Bytes}; +use base_db::SourceRootId; +use profile::Bytes; use rustc_hash::FxHashSet; +use salsa::{Database as _, Durability}; use triomphe::Arc; -use crate::{symbol_index::SymbolsDatabase, ChangeWithProcMacros, RootDatabase}; +use crate::{ChangeWithProcMacros, RootDatabase, symbol_index::SymbolsDatabase}; impl RootDatabase { pub fn request_cancellation(&mut self) { @@ -34,8 +29,8 @@ impl RootDatabase { local_roots.insert(root_id); } } - self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); - self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH); + self.set_local_roots_with_durability(Arc::new(local_roots), Durability::MEDIUM); + self.set_library_roots_with_durability(Arc::new(library_roots), Durability::MEDIUM); } change.apply(self); } @@ -52,23 +47,23 @@ impl RootDatabase { pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes, usize)> { let mut acc: Vec<(String, Bytes, usize)> = vec![]; - fn collect_query_count<'q, Q>(table: &QueryTable<'q, Q>) -> usize - where - QueryTable<'q, Q>: DebugQueryTable, - Q: Query, - ::Storage: 'q, - { - struct EntryCounter(usize); - impl FromIterator> for EntryCounter { - fn from_iter(iter: T) -> EntryCounter - where - T: IntoIterator>, - { - EntryCounter(iter.into_iter().count()) - } - } - table.entries::().0 - } + // fn collect_query_count<'q, Q>(table: &QueryTable<'q, Q>) -> usize + // where + // QueryTable<'q, Q>: DebugQueryTable, + // Q: Query, + // ::Storage: 'q, + // { + // struct EntryCounter(usize); + // impl FromIterator> for EntryCounter { + // fn from_iter(iter: T) -> EntryCounter + // where + // T: IntoIterator>, + // { + // EntryCounter(iter.into_iter().count()) + // } + // } + // table.entries::().0 + // } macro_rules! purge_each_query { ($($q:path)*) => {$( @@ -83,170 +78,174 @@ impl RootDatabase { )*} } purge_each_query![ - // SymbolsDatabase - crate::symbol_index::ModuleSymbolsQuery - crate::symbol_index::LibrarySymbolsQuery - crate::symbol_index::LocalRootsQuery - crate::symbol_index::LibraryRootsQuery - // HirDatabase - hir::db::AdtDatumQuery - hir::db::AdtVarianceQuery - hir::db::AssociatedTyDataQuery - hir::db::AssociatedTyValueQuery - hir::db::BorrowckQuery - hir::db::CallableItemSignatureQuery - hir::db::ConstEvalDiscriminantQuery - hir::db::ConstEvalQuery - hir::db::ConstEvalStaticQuery - hir::db::ConstParamTyQuery - hir::db::DynCompatibilityOfTraitQuery - hir::db::FieldTypesQuery - hir::db::FnDefDatumQuery - hir::db::FnDefVarianceQuery - hir::db::GenericDefaultsQuery - hir::db::GenericPredicatesForParamQuery - hir::db::GenericPredicatesQuery - hir::db::GenericPredicatesWithoutParentQuery - hir::db::ImplDatumQuery - hir::db::ImplSelfTyQuery - hir::db::ImplTraitQuery - hir::db::IncoherentInherentImplCratesQuery - hir::db::InferQuery - hir::db::InherentImplsInBlockQuery - hir::db::InherentImplsInCrateQuery - hir::db::InternCallableDefQuery - hir::db::InternClosureQuery - hir::db::InternCoroutineQuery - hir::db::InternImplTraitIdQuery - hir::db::InternLifetimeParamIdQuery - hir::db::InternTypeOrConstParamIdQuery - hir::db::LayoutOfAdtQuery - hir::db::LayoutOfTyQuery - hir::db::LookupImplMethodQuery - hir::db::MirBodyForClosureQuery - hir::db::MirBodyQuery - hir::db::MonomorphizedMirBodyForClosureQuery - hir::db::MonomorphizedMirBodyQuery - hir::db::ProgramClausesForChalkEnvQuery - hir::db::ReturnTypeImplTraitsQuery - hir::db::TargetDataLayoutQuery - hir::db::TraitDatumQuery - hir::db::TraitEnvironmentQuery - hir::db::TraitImplsInBlockQuery - hir::db::TraitImplsInCrateQuery - hir::db::TraitImplsInDepsQuery - hir::db::TraitSolveQuery - hir::db::TyQuery - hir::db::TypeAliasImplTraitsQuery - hir::db::ValueTyQuery + // // SymbolsDatabase + // crate::symbol_index::ModuleSymbolsQuery + // crate::symbol_index::LibrarySymbolsQuery + // crate::symbol_index::LocalRootsQuery + // crate::symbol_index::LibraryRootsQuery + // // HirDatabase + // hir::db::AdtDatumQuery + // hir::db::AdtVarianceQuery + // hir::db::AssociatedTyDataQuery + // hir::db::AssociatedTyValueQuery + // hir::db::BorrowckQuery + // hir::db::CallableItemSignatureQuery + // hir::db::ConstEvalDiscriminantQuery + // hir::db::ConstEvalQuery + // hir::db::ConstEvalStaticQuery + // hir::db::ConstParamTyQuery + // hir::db::DynCompatibilityOfTraitQuery + // hir::db::FieldTypesQuery + // hir::db::FnDefDatumQuery + // hir::db::FnDefVarianceQuery + // hir::db::GenericDefaultsQuery + // hir::db::GenericPredicatesForParamQuery + // hir::db::GenericPredicatesQuery + // hir::db::GenericPredicatesWithoutParentQuery + // hir::db::ImplDatumQuery + // hir::db::ImplSelfTyQuery + // hir::db::ImplTraitQuery + // hir::db::IncoherentInherentImplCratesQuery + // hir::db::InferQuery + // hir::db::InherentImplsInBlockQuery + // hir::db::InherentImplsInCrateQuery + // hir::db::InternCallableDefQuery + // hir::db::InternClosureQuery + // hir::db::InternCoroutineQuery + // hir::db::InternImplTraitIdQuery + // hir::db::InternLifetimeParamIdQuery + // hir::db::InternTypeOrConstParamIdQuery + // hir::db::LayoutOfAdtQuery + // hir::db::LayoutOfTyQuery + // hir::db::LookupImplMethodQuery + // hir::db::MirBodyForClosureQuery + // hir::db::MirBodyQuery + // hir::db::MonomorphizedMirBodyForClosureQuery + // hir::db::MonomorphizedMirBodyQuery + // hir::db::ProgramClausesForChalkEnvQuery + // hir::db::ReturnTypeImplTraitsQuery + // hir::db::TargetDataLayoutQuery + // hir::db::TraitDatumQuery + // hir::db::TraitEnvironmentQuery + // hir::db::TraitImplsInBlockQuery + // hir::db::TraitImplsInCrateQuery + // hir::db::TraitImplsInDepsQuery + // hir::db::TraitSolveQuery + // hir::db::TyQuery + // hir::db::TypeAliasImplTraitsQuery + // hir::db::ValueTyQuery - // DefDatabase - hir::db::AttrsQuery - hir::db::BlockDefMapQuery - hir::db::BlockItemTreeQuery - hir::db::BlockItemTreeWithSourceMapQuery - hir::db::BodyQuery - hir::db::BodyWithSourceMapQuery - hir::db::ConstDataQuery - hir::db::ConstVisibilityQuery - hir::db::CrateDefMapQuery - hir::db::CrateLangItemsQuery - hir::db::CrateNotableTraitsQuery - hir::db::CrateSupportsNoStdQuery - hir::db::EnumDataQuery - hir::db::EnumVariantDataWithDiagnosticsQuery - hir::db::ExpandProcAttrMacrosQuery - hir::db::ExprScopesQuery - hir::db::ExternCrateDeclDataQuery - hir::db::FieldVisibilitiesQuery - hir::db::FieldsAttrsQuery - hir::db::FieldsAttrsSourceMapQuery - hir::db::FileItemTreeQuery - hir::db::FileItemTreeWithSourceMapQuery - hir::db::FunctionDataQuery - hir::db::FunctionVisibilityQuery - hir::db::GenericParamsQuery - hir::db::GenericParamsWithSourceMapQuery - hir::db::ImplDataWithDiagnosticsQuery - hir::db::ImportMapQuery - hir::db::IncludeMacroInvocQuery - hir::db::InternAnonymousConstQuery - hir::db::InternBlockQuery - hir::db::InternConstQuery - hir::db::InternEnumQuery - hir::db::InternExternBlockQuery - hir::db::InternExternCrateQuery - hir::db::InternFunctionQuery - hir::db::InternImplQuery - hir::db::InternInTypeConstQuery - hir::db::InternMacro2Query - hir::db::InternMacroRulesQuery - hir::db::InternProcMacroQuery - hir::db::InternStaticQuery - hir::db::InternStructQuery - hir::db::InternTraitAliasQuery - hir::db::InternTraitQuery - hir::db::InternTypeAliasQuery - hir::db::InternUnionQuery - hir::db::InternUseQuery - hir::db::LangItemQuery - hir::db::Macro2DataQuery - hir::db::MacroDefQuery - hir::db::MacroRulesDataQuery - hir::db::NotableTraitsInDepsQuery - hir::db::ProcMacroDataQuery - hir::db::StaticDataQuery - hir::db::StructDataWithDiagnosticsQuery - hir::db::TraitAliasDataQuery - hir::db::TraitDataWithDiagnosticsQuery - hir::db::TypeAliasDataQuery - hir::db::UnionDataWithDiagnosticsQuery + // // DefDatabase + // hir::db::AttrsQuery + // hir::db::BlockDefMapQuery + // hir::db::BlockItemTreeQuery + // hir::db::BlockItemTreeWithSourceMapQuery + // hir::db::BodyQuery + // hir::db::BodyWithSourceMapQuery + // hir::db::ConstDataQuery + // hir::db::ConstVisibilityQuery + // hir::db::CrateDefMapQuery + // hir::db::CrateLangItemsQuery + // hir::db::CrateNotableTraitsQuery + // hir::db::CrateSupportsNoStdQuery + // hir::db::EnumDataQuery + // hir::db::ExpandProcAttrMacrosQuery + // hir::db::ExprScopesQuery + // hir::db::ExternCrateDeclDataQuery + // hir::db::FieldVisibilitiesQuery + // hir::db::FieldsAttrsQuery + // hir::db::FieldsAttrsSourceMapQuery + // hir::db::FileItemTreeQuery + // hir::db::FileItemTreeWithSourceMapQuery + // hir::db::FunctionDataQuery + // hir::db::FunctionVisibilityQuery + // hir::db::GenericParamsQuery + // hir::db::GenericParamsWithSourceMapQuery + // hir::db::ImplItemsWithDiagnosticsQuery + // hir::db::ImportMapQuery + // hir::db::IncludeMacroInvocQuery + // hir::db::InternAnonymousConstQuery + // hir::db::InternBlockQuery + // hir::db::InternConstQuery + // hir::db::InternEnumQuery + // hir::db::InternExternBlockQuery + // hir::db::InternExternCrateQuery + // hir::db::InternFunctionQuery + // hir::db::InternImplQuery + // hir::db::InternInTypeConstQuery + // hir::db::InternMacro2Query + // hir::db::InternMacroRulesQuery + // hir::db::InternProcMacroQuery + // hir::db::InternStaticQuery + // hir::db::InternStructQuery + // hir::db::InternTraitAliasQuery + // hir::db::InternTraitQuery + // hir::db::InternTypeAliasQuery + // hir::db::InternUnionQuery + // hir::db::InternUseQuery + // hir::db::LangItemQuery + // hir::db::Macro2DataQuery + // hir::db::MacroDefQuery + // hir::db::MacroRulesDataQuery + // hir::db::NotableTraitsInDepsQuery + // hir::db::ProcMacroDataQuery + // hir::db::StaticDataQuery + // hir::db::TraitAliasDataQuery + // hir::db::TraitItemsWithDiagnosticsQuery + // hir::db::TypeAliasDataQuery + // hir::db::VariantDataWithDiagnosticsQuery - // InternDatabase - hir::db::InternFunctionQuery - hir::db::InternStructQuery - hir::db::InternUnionQuery - hir::db::InternEnumQuery - hir::db::InternConstQuery - hir::db::InternStaticQuery - hir::db::InternTraitQuery - hir::db::InternTraitAliasQuery - hir::db::InternTypeAliasQuery - hir::db::InternImplQuery - hir::db::InternExternBlockQuery - hir::db::InternBlockQuery - hir::db::InternMacro2Query - hir::db::InternProcMacroQuery - hir::db::InternMacroRulesQuery + // // InternDatabase + // hir::db::InternFunctionQuery + // hir::db::InternStructQuery + // hir::db::InternUnionQuery + // hir::db::InternEnumQuery + // hir::db::InternConstQuery + // hir::db::InternStaticQuery + // hir::db::InternTraitQuery + // hir::db::InternTraitAliasQuery + // hir::db::InternTypeAliasQuery + // hir::db::InternImplQuery + // hir::db::InternExternBlockQuery + // hir::db::InternBlockQuery + // hir::db::InternMacro2Query + // hir::db::InternProcMacroQuery + // hir::db::InternMacroRulesQuery - // ExpandDatabase - hir::db::AstIdMapQuery - hir::db::DeclMacroExpanderQuery - hir::db::ExpandProcMacroQuery - hir::db::InternMacroCallQuery - hir::db::InternSyntaxContextQuery - hir::db::MacroArgQuery - hir::db::ParseMacroExpansionErrorQuery - hir::db::ParseMacroExpansionQuery - hir::db::ProcMacroSpanQuery - hir::db::ProcMacrosQuery - hir::db::RealSpanMapQuery + // // ExpandDatabase + // hir::db::AstIdMapQuery + // hir::db::DeclMacroExpanderQuery + // hir::db::ExpandProcMacroQuery + // hir::db::InternMacroCallQuery + // hir::db::InternSyntaxContextQuery + // hir::db::MacroArgQuery + // hir::db::ParseMacroExpansionErrorQuery + // hir::db::ParseMacroExpansionQuery + // hir::db::ProcMacroSpanQuery + // hir::db::ProcMacrosQuery + // hir::db::RealSpanMapQuery - // LineIndexDatabase - crate::LineIndexQuery + // // LineIndexDatabase + // crate::LineIndexQuery - // SourceDatabase - base_db::ParseQuery - base_db::ParseErrorsQuery - base_db::CrateGraphQuery - base_db::CrateWorkspaceDataQuery + // // SourceDatabase + // base_db::ParseQuery + // base_db::ParseErrorsQuery + // base_db::AllCratesQuery + // base_db::InternUniqueCrateDataQuery + // base_db::InternUniqueCrateDataLookupQuery + // base_db::CrateDataQuery + // base_db::ExtraCrateDataQuery + // base_db::CrateCfgQuery + // base_db::CrateEnvQuery + // base_db::CrateWorkspaceDataQuery - // SourceDatabaseExt - base_db::FileTextQuery - base_db::CompressedFileTextQuery - base_db::FileSourceRootQuery - base_db::SourceRootQuery - base_db::SourceRootCratesQuery + // // SourceDatabaseExt + // base_db::FileTextQuery + // base_db::CompressedFileTextQuery + // base_db::FileSourceRootQuery + // base_db::SourceRootQuery + // base_db::SourceRootCratesQuery ]; acc.sort_by_key(|it| std::cmp::Reverse(it.1)); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/assists.rs b/src/tools/rust-analyzer/crates/ide-db/src/assists.rs index 1c40685ebb130..384eb57c0fd59 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/assists.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/assists.rs @@ -43,9 +43,6 @@ pub enum Command { #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum AssistKind { - // FIXME: does the None variant make sense? Probably not. - None, - QuickFix, Generate, Refactor, @@ -61,7 +58,7 @@ impl AssistKind { } match self { - AssistKind::None | AssistKind::Generate => true, + AssistKind::Generate => true, AssistKind::Refactor => matches!( other, AssistKind::RefactorExtract @@ -74,7 +71,6 @@ impl AssistKind { pub fn name(&self) -> &str { match self { - AssistKind::None => "None", AssistKind::QuickFix => "QuickFix", AssistKind::Generate => "Generate", AssistKind::Refactor => "Refactor", @@ -90,7 +86,6 @@ impl FromStr for AssistKind { fn from_str(s: &str) -> Result { match s { - "None" => Ok(AssistKind::None), "QuickFix" => Ok(AssistKind::QuickFix), "Generate" => Ok(AssistKind::Generate), "Refactor" => Ok(AssistKind::Refactor), @@ -105,7 +100,33 @@ impl FromStr for AssistKind { /// Unique identifier of the assist, should not be shown to the user /// directly. #[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct AssistId(pub &'static str, pub AssistKind); +pub struct AssistId(pub &'static str, pub AssistKind, pub Option); + +impl AssistId { + pub fn quick_fix(id: &'static str) -> AssistId { + AssistId(id, AssistKind::QuickFix, None) + } + + pub fn generate(id: &'static str) -> AssistId { + AssistId(id, AssistKind::Generate, None) + } + + pub fn refactor(id: &'static str) -> AssistId { + AssistId(id, AssistKind::Refactor, None) + } + + pub fn refactor_extract(id: &'static str) -> AssistId { + AssistId(id, AssistKind::RefactorExtract, None) + } + + pub fn refactor_inline(id: &'static str) -> AssistId { + AssistId(id, AssistKind::RefactorInline, None) + } + + pub fn refactor_rewrite(id: &'static str) -> AssistId { + AssistId(id, AssistKind::RefactorRewrite, None) + } +} /// A way to control how many assist to resolve during the assist resolution. /// When an assist is resolved, its edits are calculated that might be costly to always do by default. @@ -128,6 +149,8 @@ pub struct SingleResolve { pub assist_id: String, // The kind of the assist. pub assist_kind: AssistKind, + /// Subtype of the assist. When many assists have the same id, it differentiates among them. + pub assist_subtype: Option, } impl AssistResolveStrategy { @@ -136,7 +159,9 @@ impl AssistResolveStrategy { AssistResolveStrategy::None => false, AssistResolveStrategy::All => true, AssistResolveStrategy::Single(single_resolve) => { - single_resolve.assist_id == id.0 && single_resolve.assist_kind == id.1 + single_resolve.assist_id == id.0 + && single_resolve.assist_kind == id.1 + && single_resolve.assist_subtype == id.2 } } } @@ -144,3 +169,15 @@ impl AssistResolveStrategy { #[derive(Clone, Debug)] pub struct GroupLabel(pub String); + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum ExprFillDefaultMode { + Todo, + Default, + Underscore, +} +impl Default for ExprFillDefaultMode { + fn default() -> Self { + Self::Todo + } +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs index 502314ed1e0ec..bf4f541ff54ca 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs @@ -5,9 +5,9 @@ // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). +use crate::RootDatabase; use crate::documentation::{Documentation, HasDocs}; use crate::famous_defs::FamousDefs; -use crate::RootDatabase; use arrayvec::ArrayVec; use either::Either; use hir::{ @@ -21,8 +21,9 @@ use hir::{ use span::Edition; use stdx::{format_to, impl_from}; use syntax::{ + SyntaxKind, SyntaxNode, SyntaxToken, ast::{self, AstNode}, - match_ast, SyntaxKind, SyntaxNode, SyntaxToken, + match_ast, }; // FIXME: a more precise name would probably be `Symbol`? @@ -838,6 +839,14 @@ impl NameRefClass { ast::AsmRegSpec(_) => { Some(NameRefClass::Definition(Definition::InlineAsmRegOrRegClass(()), None)) }, + ast::OffsetOfExpr(_) => { + let (def, subst) = sema.resolve_offset_of_field(name_ref)?; + let def = match def { + Either::Left(variant) => Definition::Variant(variant), + Either::Right(field) => Definition::Field(field), + }; + Some(NameRefClass::Definition(def, Some(subst))) + }, _ => None } } @@ -988,7 +997,6 @@ impl TryFrom for Definition { DefWithBody::Static(it) => Ok(it.into()), DefWithBody::Const(it) => Ok(it.into()), DefWithBody::Variant(it) => Ok(it.into()), - DefWithBody::InTypeConst(_) => Err(()), } } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs index b83efcd02f772..ef2c83992c049 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs @@ -1,14 +1,15 @@ //! Documentation attribute related utilities. use either::Either; use hir::{ + AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile, db::{DefDatabase, HirDatabase}, - resolve_doc_path_on, sym, AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile, + resolve_doc_path_on, sym, }; use itertools::Itertools; use span::{TextRange, TextSize}; use syntax::{ - ast::{self, IsString}, AstToken, + ast::{self, IsString}, }; /// Holds documentation @@ -92,7 +93,7 @@ pub fn docs_with_rangemap( attrs: &AttrsWithOwner, ) -> Option<(Documentation, DocsRangeMap)> { let docs = attrs - .by_key(&sym::doc) + .by_key(sym::doc) .attrs() .filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id))); let indent = doc_indent(attrs); @@ -134,7 +135,7 @@ pub fn docs_with_rangemap( } pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option { - let docs = attrs.by_key(&sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()); + let docs = attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()); let indent = doc_indent(attrs); let mut buf = String::new(); for doc in docs { @@ -151,11 +152,7 @@ pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option { buf.push('\n'); } buf.pop(); - if buf.is_empty() { - None - } else { - Some(buf) - } + if buf.is_empty() { None } else { Some(buf) } } macro_rules! impl_has_docs { @@ -269,7 +266,7 @@ fn get_doc_string_in_attr(it: &ast::Attr) -> Option { fn doc_indent(attrs: &hir::Attrs) -> usize { let mut min = !0; - for val in attrs.by_key(&sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) { + for val in attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) { if let Some(m) = val.lines().filter_map(|line| line.chars().position(|c| !c.is_whitespace())).min() { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs index 2f4d07446f2c1..994150b1ac4c2 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs @@ -1,6 +1,6 @@ //! See [`FamousDefs`]. -use base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase}; +use base_db::{CrateOrigin, LangCrateOrigin}; use hir::{Crate, Enum, Function, Macro, Module, ScopeDef, Semantics, Trait}; use crate::RootDatabase; @@ -198,11 +198,10 @@ impl FamousDefs<'_, '_> { fn find_lang_crate(&self, origin: LangCrateOrigin) -> Option { let krate = self.1; let db = self.0.db; - let crate_graph = self.0.db.crate_graph(); let res = krate .dependencies(db) .into_iter() - .find(|dep| crate_graph[dep.krate.into()].origin == CrateOrigin::Lang(origin))? + .find(|dep| dep.krate.origin(db) == CrateOrigin::Lang(origin))? .krate; Some(res) } @@ -221,11 +220,7 @@ impl FamousDefs<'_, '_> { for segment in path { module = module.children(db).find_map(|child| { let name = child.name(db)?; - if name.as_str() == segment { - Some(child) - } else { - None - } + if name.as_str() == segment { Some(child) } else { None } })?; } let def = diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs index 0a7a7d1fb2411..706d04484f6fe 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs @@ -3789,35 +3789,6 @@ The tracking issue for this feature is: [#64797] [#64797]: https://github.com/rust-lang/rust/issues/64797 ------------------------ -"##, - default_severity: Severity::Allow, - warn_since: None, - deny_since: None, - }, - Lint { - label: "cfg_boolean_literals", - description: r##"# `cfg_boolean_literals` - -The tracking issue for this feature is: [#131204] - -[#131204]: https://github.com/rust-lang/rust/issues/131204 - ------------------------- - -The `cfg_boolean_literals` feature makes it possible to use the `true`/`false` -literal as cfg predicate. They always evaluate to true/false respectively. - -## Examples - -```rust -#![feature(cfg_boolean_literals)] - -#[cfg(true)] -const A: i32 = 5; - -#[cfg(all(false))] -const A: i32 = 58 * 89; -``` "##, default_severity: Severity::Allow, warn_since: None, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs index 84fa58d743bbc..340429037e67a 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs @@ -2,17 +2,18 @@ use std::collections::VecDeque; -use base_db::SourceRootDatabase; +use base_db::SourceDatabase; use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics}; use span::{Edition, FileId}; use syntax::{ - ast::{self, make}, AstToken, SyntaxKind, SyntaxToken, ToSmolStr, TokenAtOffset, + ast::{self, make}, }; use crate::{ + RootDatabase, defs::{Definition, IdentClass}, - generated, RootDatabase, + generated, }; pub fn item_name(db: &RootDatabase, item: ItemInNs) -> Option { @@ -108,8 +109,8 @@ pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool { pub fn is_editable_crate(krate: Crate, db: &RootDatabase) -> bool { let root_file = krate.root_file(db); - let source_root_id = db.file_source_root(root_file); - !db.source_root(source_root_id).is_library + let source_root_id = db.file_source_root(root_file).source_root_id(db); + !db.source_root(source_root_id).source_root(db).is_library } // FIXME: This is a weird function diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs index 77fc59b4eccb5..ac592dfe93cf9 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs @@ -3,20 +3,20 @@ use std::ops::ControlFlow; use hir::{ - db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig, + AsAssocItem, AssocItem, AssocItemContainer, Complete, Crate, HasCrate, ImportPathConfig, ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics, - SemanticsScope, Trait, TyFingerprint, Type, + SemanticsScope, Trait, TyFingerprint, Type, db::HirDatabase, }; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use syntax::{ - ast::{self, make, HasName}, AstNode, SyntaxNode, + ast::{self, HasName, make}, }; use crate::{ - items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT}, FxIndexSet, RootDatabase, + items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT}, }; /// A candidate for import, derived during various IDE activities: @@ -183,6 +183,9 @@ impl ImportAssets { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct CompleteInFlyimport(pub bool); + /// An import (not necessary the only one) that corresponds a certain given [`PathImportCandidate`]. /// (the structure is not entirely correct, since there can be situations requiring two imports, see FIXME below for the details) #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -198,11 +201,31 @@ pub struct LocatedImport { /// the original item is the associated constant, but the import has to be a trait that /// defines this constant. pub original_item: ItemInNs, + /// The value of `#[rust_analyzer::completions(...)]`, if existing. + pub complete_in_flyimport: CompleteInFlyimport, } impl LocatedImport { - pub fn new(import_path: ModPath, item_to_import: ItemInNs, original_item: ItemInNs) -> Self { - Self { import_path, item_to_import, original_item } + pub fn new( + import_path: ModPath, + item_to_import: ItemInNs, + original_item: ItemInNs, + complete_in_flyimport: CompleteInFlyimport, + ) -> Self { + Self { import_path, item_to_import, original_item, complete_in_flyimport } + } + + pub fn new_no_completion( + import_path: ModPath, + item_to_import: ItemInNs, + original_item: ItemInNs, + ) -> Self { + Self { + import_path, + item_to_import, + original_item, + complete_in_flyimport: CompleteInFlyimport(true), + } } } @@ -273,12 +296,13 @@ impl ImportAssets { Some(it) => it, None => return >::default().into_iter(), }; + let db = sema.db; let krate = self.module_with_candidate.krate(); let scope_definitions = self.scope_definitions(sema); let mod_path = |item| { get_mod_path( - sema.db, - item_for_path_search(sema.db, item)?, + db, + item_for_path_search(db, item)?, &self.module_with_candidate, prefixed, cfg, @@ -288,7 +312,7 @@ impl ImportAssets { match &self.import_candidate { ImportCandidate::Path(path_candidate) => path_applicable_imports( - sema, + db, &scope, krate, path_candidate, @@ -297,7 +321,7 @@ impl ImportAssets { ), ImportCandidate::TraitAssocItem(trait_candidate) | ImportCandidate::TraitMethod(trait_candidate) => trait_applicable_items( - sema, + db, krate, &scope, trait_candidate, @@ -325,7 +349,7 @@ impl ImportAssets { } fn path_applicable_imports( - sema: &Semantics<'_, RootDatabase>, + db: &RootDatabase, scope: &SemanticsScope<'_>, current_crate: Crate, path_candidate: &PathImportCandidate, @@ -337,7 +361,7 @@ fn path_applicable_imports( match &*path_candidate.qualifier { [] => { items_locator::items_with_name( - sema, + db, current_crate, path_candidate.name.clone(), // FIXME: we could look up assoc items by the input and propose those in completion, @@ -350,12 +374,17 @@ fn path_applicable_imports( // see also an ignored test under FIXME comment in the qualify_path.rs module AssocSearchMode::Exclude, ) - .filter_map(|item| { + .filter_map(|(item, do_not_complete)| { if !scope_filter(item) { return None; } let mod_path = mod_path(item)?; - Some(LocatedImport::new(mod_path, item, item)) + Some(LocatedImport::new( + mod_path, + item, + item, + CompleteInFlyimport(do_not_complete != Complete::IgnoreFlyimport), + )) }) .take(DEFAULT_QUERY_SEARCH_LIMIT) .collect() @@ -365,22 +394,23 @@ fn path_applicable_imports( // what follows // FIXME: This doesn't handle visibility [first_qsegment, qualifier_rest @ ..] => items_locator::items_with_name( - sema, + db, current_crate, NameToImport::Exact(first_qsegment.as_str().to_owned(), true), AssocSearchMode::Exclude, ) - .filter_map(|item| { + .filter_map(|(item, do_not_complete)| { // we found imports for `first_qsegment`, now we need to filter these imports by whether // they result in resolving the rest of the path successfully validate_resolvable( - sema, + db, scope, mod_path, scope_filter, &path_candidate.name, item, qualifier_rest, + CompleteInFlyimport(do_not_complete != Complete::IgnoreFlyimport), ) }) .take(DEFAULT_QUERY_SEARCH_LIMIT) @@ -391,13 +421,14 @@ fn path_applicable_imports( /// Validates and builds an import for `resolved_qualifier` if the `unresolved_qualifier` appended /// to it resolves and there is a validate `candidate` after that. fn validate_resolvable( - sema: &Semantics<'_, RootDatabase>, + db: &RootDatabase, scope: &SemanticsScope<'_>, mod_path: impl Fn(ItemInNs) -> Option, scope_filter: impl Fn(ItemInNs) -> bool, candidate: &NameToImport, resolved_qualifier: ItemInNs, unresolved_qualifier: &[Name], + complete_in_flyimport: CompleteInFlyimport, ) -> Option { let _p = tracing::info_span!("ImportAssets::import_for_item").entered(); @@ -406,8 +437,8 @@ fn validate_resolvable( if !unresolved_qualifier.is_empty() { match resolved_qualifier { ItemInNs::Types(ModuleDef::Module(module)) => { - adjusted_resolved_qualifier = sema - .resolve_mod_path_relative(module, unresolved_qualifier.iter().cloned())? + adjusted_resolved_qualifier = module + .resolve_mod_path(db, unresolved_qualifier.iter().cloned())? .next()?; } // can't resolve multiple segments for non-module item path bases @@ -424,7 +455,7 @@ fn validate_resolvable( let ty = match qualifier { ModuleDef::Module(module) => { return items_locator::items_with_name_in_module( - sema, + db, module, candidate.clone(), AssocSearchMode::Exclude, @@ -433,23 +464,30 @@ fn validate_resolvable( false => ControlFlow::Continue(()), }, ) - .map(|item| LocatedImport::new(import_path_candidate, resolved_qualifier, item)) + .map(|item| { + LocatedImport::new( + import_path_candidate, + resolved_qualifier, + item, + complete_in_flyimport, + ) + }); } // FIXME ModuleDef::Trait(_) => return None, // FIXME ModuleDef::TraitAlias(_) => return None, - ModuleDef::TypeAlias(alias) => alias.ty(sema.db), - ModuleDef::BuiltinType(builtin) => builtin.ty(sema.db), - ModuleDef::Adt(adt) => adt.ty(sema.db), + ModuleDef::TypeAlias(alias) => alias.ty(db), + ModuleDef::BuiltinType(builtin) => builtin.ty(db), + ModuleDef::Adt(adt) => adt.ty(db), _ => return None, }; - ty.iterate_path_candidates(sema.db, scope, &FxHashSet::default(), None, None, |assoc| { + ty.iterate_path_candidates(db, scope, &FxHashSet::default(), None, None, |assoc| { // FIXME: Support extra trait imports - if assoc.container_or_implemented_trait(sema.db).is_some() { + if assoc.container_or_implemented_trait(db).is_some() { return None; } - let name = assoc.name(sema.db)?; + let name = assoc.name(db)?; let is_match = match candidate { NameToImport::Prefix(text, true) => name.as_str().starts_with(text), NameToImport::Prefix(text, false) => { @@ -471,6 +509,7 @@ fn validate_resolvable( import_path_candidate.clone(), resolved_qualifier, assoc_to_item(assoc), + complete_in_flyimport, )) }) } @@ -495,7 +534,7 @@ fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Optio } fn trait_applicable_items( - sema: &Semantics<'_, RootDatabase>, + db: &RootDatabase, current_crate: Crate, scope: &SemanticsScope<'_>, trait_candidate: &TraitImportCandidate, @@ -505,21 +544,19 @@ fn trait_applicable_items( ) -> FxIndexSet { let _p = tracing::info_span!("ImportAssets::trait_applicable_items").entered(); - let db = sema.db; - let inherent_traits = trait_candidate.receiver_ty.applicable_inherent_traits(db); let env_traits = trait_candidate.receiver_ty.env_traits(db); let related_traits = inherent_traits.chain(env_traits).collect::>(); - let mut required_assoc_items = FxHashSet::default(); + let mut required_assoc_items = FxHashMap::default(); let mut trait_candidates: FxHashSet<_> = items_locator::items_with_name( - sema, + db, current_crate, trait_candidate.assoc_item_name.clone(), AssocSearchMode::AssocItemsOnly, ) - .filter_map(|input| item_as_assoc(db, input)) - .filter_map(|assoc| { + .filter_map(|(input, do_not_complete)| Some((item_as_assoc(db, input)?, do_not_complete))) + .filter_map(|(assoc, do_not_complete)| { if !trait_assoc_item && matches!(assoc, AssocItem::Const(_) | AssocItem::TypeAlias(_)) { return None; } @@ -528,7 +565,8 @@ fn trait_applicable_items( if related_traits.contains(&assoc_item_trait) { return None; } - required_assoc_items.insert(assoc); + required_assoc_items + .insert(assoc, CompleteInFlyimport(do_not_complete != Complete::IgnoreFlyimport)); Some(assoc_item_trait.into()) }) .collect(); @@ -600,7 +638,7 @@ fn trait_applicable_items( None, None, |assoc| { - if required_assoc_items.contains(&assoc) { + if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) { let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?; let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); let import_path = trait_import_paths @@ -611,6 +649,7 @@ fn trait_applicable_items( import_path, trait_item, assoc_to_item(assoc), + complete_in_flyimport, )); } None::<()> @@ -625,7 +664,7 @@ fn trait_applicable_items( None, |function| { let assoc = function.as_assoc_item(db)?; - if required_assoc_items.contains(&assoc) { + if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) { let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?; let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); let import_path = trait_import_paths @@ -636,6 +675,7 @@ fn trait_applicable_items( import_path, trait_item, assoc_to_item(assoc), + complete_in_flyimport, )); } None::<()> diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs index 8e25ad3472d3b..d26e5d62ced51 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs @@ -6,20 +6,20 @@ use std::cmp::Ordering; use hir::Semantics; use syntax::{ - algo, + Direction, NodeOrToken, SyntaxKind, SyntaxNode, algo, ast::{ - self, edit_in_place::Removable, make, AstNode, HasAttrs, HasModuleItem, HasVisibility, - PathSegmentKind, + self, AstNode, HasAttrs, HasModuleItem, HasVisibility, PathSegmentKind, + edit_in_place::Removable, make, }, - ted, Direction, NodeOrToken, SyntaxKind, SyntaxNode, + ted, }; use crate::{ + RootDatabase, imports::merge_imports::{ - common_prefix, eq_attrs, eq_visibility, try_merge_imports, use_tree_cmp, MergeBehavior, - NormalizationStyle, + MergeBehavior, NormalizationStyle, common_prefix, eq_attrs, eq_visibility, + try_merge_imports, use_tree_cmp, }, - RootDatabase, }; pub use hir::PrefixKind; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs index decb0ea9d8a8a..428ba1d511897 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs @@ -1,6 +1,6 @@ use stdx::trim_indent; use test_fixture::WithFixture; -use test_utils::{assert_eq_text, CURSOR_MARKER}; +use test_utils::{CURSOR_MARKER, assert_eq_text}; use super::*; @@ -1250,9 +1250,11 @@ fn check_with_config( ) { let (db, file_id, pos) = if ra_fixture_before.contains(CURSOR_MARKER) { let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture_before); + (db, file_id, Some(range_or_offset)) } else { let (db, file_id) = RootDatabase::with_single_file(ra_fixture_before); + (db, file_id, None) }; let sema = &Semantics::new(&db); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs index 9e89dfe87abe5..61962e593476c 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs @@ -5,13 +5,12 @@ use itertools::{EitherOrBoth, Itertools}; use parser::T; use stdx::is_upper_snake_case; use syntax::{ - algo, + Direction, SyntaxElement, algo, ast::{ - self, edit_in_place::Removable, make, AstNode, HasAttrs, HasName, HasVisibility, - PathSegmentKind, + self, AstNode, HasAttrs, HasName, HasVisibility, PathSegmentKind, edit_in_place::Removable, + make, }, ted::{self, Position}, - Direction, SyntaxElement, }; use crate::syntax_helpers::node_ext::vis_eq; @@ -191,7 +190,7 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior) && !use_trees.is_empty() && rhs_t.use_tree_list().is_some() => { - return None + return None; } Err(insert_idx) => { use_trees.insert(insert_idx, rhs_t.clone()); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs index 4d9c051354a61..4b0a84a559e23 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs @@ -5,12 +5,12 @@ use std::ops::ControlFlow; use either::Either; -use hir::{import_map, Crate, ItemInNs, Module, Semantics}; +use hir::{Complete, Crate, ItemInNs, Module, import_map}; use crate::{ + RootDatabase, imports::import_assets::NameToImport, symbol_index::{self, SymbolsDatabase as _}, - RootDatabase, }; /// A value to use, when uncertain which limit to pick. @@ -20,13 +20,13 @@ pub use import_map::AssocSearchMode; // FIXME: Do callbacks instead to avoid allocations. /// Searches for importable items with the given name in the crate and its dependencies. -pub fn items_with_name<'a>( - sema: &'a Semantics<'_, RootDatabase>, +pub fn items_with_name( + db: &RootDatabase, krate: Crate, name: NameToImport, assoc_item_search: AssocSearchMode, -) -> impl Iterator + 'a { - let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate.display_name(sema.db).map(|name| name.to_string())) +) -> impl Iterator { + let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate.display_name(db).map(|name| name.to_string())) .entered(); let prefix = matches!(name, NameToImport::Prefix(..)); @@ -68,12 +68,12 @@ pub fn items_with_name<'a>( } }; - find_items(sema, krate, local_query, external_query) + find_items(db, krate, local_query, external_query) } /// Searches for importable items with the given name in the crate and its dependencies. pub fn items_with_name_in_module( - sema: &Semantics<'_, RootDatabase>, + db: &RootDatabase, module: Module, name: NameToImport, assoc_item_search: AssocSearchMode, @@ -86,7 +86,7 @@ pub fn items_with_name_in_module( let local_query = match name { NameToImport::Prefix(exact_name, case_sensitive) | NameToImport::Exact(exact_name, case_sensitive) => { - let mut local_query = symbol_index::Query::new(exact_name.clone()); + let mut local_query = symbol_index::Query::new(exact_name); local_query.assoc_search_mode(assoc_item_search); if prefix { local_query.prefix(); @@ -99,7 +99,7 @@ pub fn items_with_name_in_module( local_query } NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => { - let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone()); + let mut local_query = symbol_index::Query::new(fuzzy_search_string); local_query.fuzzy(); local_query.assoc_search_mode(assoc_item_search); @@ -110,7 +110,7 @@ pub fn items_with_name_in_module( local_query } }; - local_query.search(&[sema.db.module_symbols(module)], |local_candidate| { + local_query.search(&[db.module_symbols(module)], |local_candidate| { cb(match local_candidate.def { hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def), def => ItemInNs::from(def), @@ -118,32 +118,34 @@ pub fn items_with_name_in_module( }) } -fn find_items<'a>( - sema: &'a Semantics<'_, RootDatabase>, +fn find_items( + db: &RootDatabase, krate: Crate, local_query: symbol_index::Query, external_query: import_map::Query, -) -> impl Iterator + 'a { +) -> impl Iterator { let _p = tracing::info_span!("find_items").entered(); - let db = sema.db; // NOTE: `external_query` includes `assoc_item_search`, so we don't need to // filter on our own. - let external_importables = - krate.query_external_importables(db, external_query).map(|external_importable| { - match external_importable { + let external_importables = krate.query_external_importables(db, external_query).map( + |(external_importable, do_not_complete)| { + let external_importable = match external_importable { Either::Left(module_def) => ItemInNs::from(module_def), Either::Right(macro_def) => ItemInNs::from(macro_def), - } - }); + }; + (external_importable, do_not_complete) + }, + ); // Query the local crate using the symbol index. let mut local_results = Vec::new(); local_query.search(&symbol_index::crate_symbols(db, krate), |local_candidate| { - local_results.push(match local_candidate.def { + let def = match local_candidate.def { hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def), def => ItemInNs::from(def), - }); + }; + local_results.push((def, local_candidate.do_not_complete)); ControlFlow::<()>::Continue(()) }); local_results.into_iter().chain(external_importables) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index 96115eee6dc2a..63cc7cde28081 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -45,18 +45,18 @@ pub mod syntax_helpers { pub use parser::LexedStr; } -pub use hir::ChangeWithProcMacros; +pub use hir::{ChangeWithProcMacros, EditionedFileId}; +use salsa::Durability; use std::{fmt, mem::ManuallyDrop}; use base_db::{ - ra_salsa::{self, Durability}, - AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, - DEFAULT_FILE_TEXT_LRU_CAP, + CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Files, RootQueryDb, + SourceDatabase, SourceRoot, SourceRootId, SourceRootInput, query_group, }; use hir::{ - db::{DefDatabase, ExpandDatabase, HirDatabase}, FilePositionWrapper, FileRangeWrapper, + db::{DefDatabase, ExpandDatabase}, }; use triomphe::Arc; @@ -67,7 +67,7 @@ pub use ::line_index; /// `base_db` is normally also needed in places where `ide_db` is used, so this re-export is for convenience. pub use base_db; -pub use span::{EditionedFileId, FileId}; +pub use span::{self, FileId}; pub type FxIndexSet = indexmap::IndexSet>; pub type FxIndexMap = @@ -76,22 +76,24 @@ pub type FxIndexMap = pub type FilePosition = FilePositionWrapper; pub type FileRange = FileRangeWrapper; -#[ra_salsa::database( - base_db::SourceRootDatabaseStorage, - base_db::SourceDatabaseStorage, - hir::db::ExpandDatabaseStorage, - hir::db::DefDatabaseStorage, - hir::db::HirDatabaseStorage, - hir::db::InternDatabaseStorage, - LineIndexDatabaseStorage, - symbol_index::SymbolsDatabaseStorage -)] +#[salsa_macros::db] pub struct RootDatabase { + // FIXME: Revisit this commit now that we migrated to the new salsa, given we store arcs in this + // db directly now // We use `ManuallyDrop` here because every codegen unit that contains a // `&RootDatabase -> &dyn OtherDatabase` cast will instantiate its drop glue in the vtable, // which duplicates `Weak::drop` and `Arc::drop` tens of thousands of times, which makes // compile times of all `ide_*` and downstream crates suffer greatly. - storage: ManuallyDrop>, + storage: ManuallyDrop>, + files: Arc, + crates_map: Arc, +} + +impl std::panic::RefUnwindSafe for RootDatabase {} + +#[salsa_macros::db] +impl salsa::Database for RootDatabase { + fn salsa_event(&self, _event: &dyn Fn() -> salsa::Event) {} } impl Drop for RootDatabase { @@ -100,43 +102,76 @@ impl Drop for RootDatabase { } } +impl Clone for RootDatabase { + fn clone(&self) -> Self { + Self { + storage: self.storage.clone(), + files: self.files.clone(), + crates_map: self.crates_map.clone(), + } + } +} + impl fmt::Debug for RootDatabase { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RootDatabase").finish() } } -impl Upcast for RootDatabase { - #[inline] - fn upcast(&self) -> &(dyn ExpandDatabase + 'static) { - self +#[salsa_macros::db] +impl SourceDatabase for RootDatabase { + fn file_text(&self, file_id: vfs::FileId) -> FileText { + self.files.file_text(file_id) } -} -impl Upcast for RootDatabase { - #[inline] - fn upcast(&self) -> &(dyn DefDatabase + 'static) { - self + fn set_file_text(&mut self, file_id: vfs::FileId, text: &str) { + let files = Arc::clone(&self.files); + files.set_file_text(self, file_id, text); } -} -impl Upcast for RootDatabase { - #[inline] - fn upcast(&self) -> &(dyn HirDatabase + 'static) { - self + fn set_file_text_with_durability( + &mut self, + file_id: vfs::FileId, + text: &str, + durability: Durability, + ) { + let files = Arc::clone(&self.files); + files.set_file_text_with_durability(self, file_id, text, durability); + } + + /// Source root of the file. + fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput { + self.files.source_root(source_root_id) } -} -impl FileLoader for RootDatabase { - fn resolve_path(&self, path: AnchoredPath<'_>) -> Option { - FileLoaderDelegate(self).resolve_path(path) + fn set_source_root_with_durability( + &mut self, + source_root_id: SourceRootId, + source_root: Arc, + durability: Durability, + ) { + let files = Arc::clone(&self.files); + files.set_source_root_with_durability(self, source_root_id, source_root, durability); } - fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> { - FileLoaderDelegate(self).relevant_crates(file_id) + + fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput { + self.files.file_source_root(id) } -} -impl ra_salsa::Database for RootDatabase {} + fn set_file_source_root_with_durability( + &mut self, + id: vfs::FileId, + source_root_id: SourceRootId, + durability: Durability, + ) { + let files = Arc::clone(&self.files); + files.set_file_source_root_with_durability(self, id, source_root_id, durability); + } + + fn crates_map(&self) -> Arc { + self.crates_map.clone() + } +} impl Default for RootDatabase { fn default() -> RootDatabase { @@ -146,14 +181,19 @@ impl Default for RootDatabase { impl RootDatabase { pub fn new(lru_capacity: Option) -> RootDatabase { - let mut db = RootDatabase { storage: ManuallyDrop::new(ra_salsa::Storage::default()) }; - db.set_crate_graph_with_durability(Default::default(), Durability::HIGH); - db.set_proc_macros_with_durability(Default::default(), Durability::HIGH); - db.set_local_roots_with_durability(Default::default(), Durability::HIGH); - db.set_library_roots_with_durability(Default::default(), Durability::HIGH); + let mut db = RootDatabase { + storage: ManuallyDrop::new(salsa::Storage::default()), + files: Default::default(), + crates_map: Default::default(), + }; + // This needs to be here otherwise `CrateGraphBuilder` will panic. + db.set_all_crates(Arc::new(Box::new([]))); + CrateGraphBuilder::default().set_in_db(&mut db); + db.set_proc_macros_with_durability(Default::default(), Durability::MEDIUM); + db.set_local_roots_with_durability(Default::default(), Durability::MEDIUM); + db.set_library_roots_with_durability(Default::default(), Durability::MEDIUM); db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH); db.update_base_query_lru_capacities(lru_capacity); - db.setup_syntax_context_root(); db } @@ -161,57 +201,51 @@ impl RootDatabase { self.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); } - pub fn update_base_query_lru_capacities(&mut self, lru_capacity: Option) { - let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP); - base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP); - base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); - // macro expansions are usually rather small, so we can afford to keep more of them alive - hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity); - hir::db::BorrowckQuery.in_db_mut(self).set_lru_capacity(base_db::DEFAULT_BORROWCK_LRU_CAP); - hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048); + pub fn update_base_query_lru_capacities(&mut self, _lru_capacity: Option) { + // let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP); + // base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP); + // base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); + // // macro expansions are usually rather small, so we can afford to keep more of them alive + // hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity); + // hir::db::BorrowckQuery.in_db_mut(self).set_lru_capacity(base_db::DEFAULT_BORROWCK_LRU_CAP); + // hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048); } - pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap, u16>) { - use hir::db as hir_db; - - base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP); - base_db::ParseQuery.in_db_mut(self).set_lru_capacity( - lru_capacities - .get(stringify!(ParseQuery)) - .copied() - .unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP), - ); - hir_db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity( - lru_capacities - .get(stringify!(ParseMacroExpansionQuery)) - .copied() - .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP), - ); - hir_db::BorrowckQuery.in_db_mut(self).set_lru_capacity( - lru_capacities - .get(stringify!(BorrowckQuery)) - .copied() - .unwrap_or(base_db::DEFAULT_BORROWCK_LRU_CAP), - ); - hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048); + pub fn update_lru_capacities(&mut self, _lru_capacities: &FxHashMap, u16>) { + // FIXME(salsa-transition): bring this back; allow changing LRU settings at runtime. + // use hir::db as hir_db; + + // base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP); + // base_db::ParseQuery.in_db_mut(self).set_lru_capacity( + // lru_capacities + // .get(stringify!(ParseQuery)) + // .copied() + // .unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP), + // ); + // hir_db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity( + // lru_capacities + // .get(stringify!(ParseMacroExpansionQuery)) + // .copied() + // .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP), + // ); + // hir_db::BorrowckQuery.in_db_mut(self).set_lru_capacity( + // lru_capacities + // .get(stringify!(BorrowckQuery)) + // .copied() + // .unwrap_or(base_db::DEFAULT_BORROWCK_LRU_CAP), + // ); + // hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048); } } -impl ra_salsa::ParallelDatabase for RootDatabase { - fn snapshot(&self) -> ra_salsa::Snapshot { - ra_salsa::Snapshot::new(RootDatabase { - storage: ManuallyDrop::new(self.storage.snapshot()), - }) - } -} - -#[ra_salsa::query_group(LineIndexDatabaseStorage)] -pub trait LineIndexDatabase: base_db::SourceDatabase { +#[query_group::query_group] +pub trait LineIndexDatabase: base_db::RootQueryDb { + #[salsa::invoke_interned(line_index)] fn line_index(&self, file_id: FileId) -> Arc; } fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc { - let text = db.file_text(file_id); + let text = db.file_text(file_id).text(db); Arc::new(LineIndex::new(&text)) } @@ -288,11 +322,7 @@ pub struct SnippetCap { impl SnippetCap { pub const fn new(allow_snippets: bool) -> Option { - if allow_snippets { - Some(SnippetCap { _private: () }) - } else { - None - } + if allow_snippets { Some(SnippetCap { _private: () }) } else { None } } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index a348a4ef7d3fb..232648af661ff 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -7,8 +7,9 @@ use itertools::Itertools; use rustc_hash::FxHashMap; use span::Edition; use syntax::{ - ast::{self, make, AstNode, HasGenericArgs}, - ted, NodeOrToken, SyntaxNode, + NodeOrToken, SyntaxNode, + ast::{self, AstNode, HasGenericArgs, make}, + ted, }; #[derive(Default)] @@ -209,7 +210,7 @@ impl<'a> PathTransform<'a> { .flat_map(|it| it.lifetime_params(db)) .zip(self.substs.lifetimes.clone()) .filter_map(|(k, v)| { - Some((k.name(db).display(db.upcast(), target_edition).to_string(), v.lifetime()?)) + Some((k.name(db).display(db, target_edition).to_string(), v.lifetime()?)) }) .collect(); let ctx = Ctx { @@ -324,7 +325,7 @@ impl Ctx<'_> { allow_unstable: true, }; let found_path = self.target_module.find_path( - self.source_scope.db.upcast(), + self.source_scope.db, hir::ModuleDef::Trait(trait_ref), cfg, )?; @@ -383,8 +384,7 @@ impl Ctx<'_> { prefer_absolute: false, allow_unstable: true, }; - let found_path = - self.target_module.find_path(self.source_scope.db.upcast(), def, cfg)?; + let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?; let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update(); if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) { if let Some(segment) = res.segment() { @@ -424,7 +424,7 @@ impl Ctx<'_> { allow_unstable: true, }; let found_path = self.target_module.find_path( - self.source_scope.db.upcast(), + self.source_scope.db, ModuleDef::from(adt), cfg, )?; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs index 22dc3d9e29d65..cbe31405ab787 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs @@ -6,16 +6,14 @@ mod topologic_sort; use std::time::Duration; -use hir::{db::DefDatabase, Symbol}; +use hir::{Symbol, db::DefDatabase}; use itertools::Itertools; +use salsa::{Cancelled, Database}; use crate::{ - base_db::{ - ra_salsa::{Database, ParallelDatabase, Snapshot}, - Cancelled, CrateId, SourceDatabase, - }, - symbol_index::SymbolsDatabase, FxIndexMap, RootDatabase, + base_db::{Crate, RootQueryDb}, + symbol_index::SymbolsDatabase, }; /// We're indexing many crates. @@ -37,20 +35,23 @@ pub fn parallel_prime_caches( ) { let _p = tracing::info_span!("parallel_prime_caches").entered(); - let graph = db.crate_graph(); let mut crates_to_prime = { + // FIXME: We already have the crate list topologically sorted (but without the things + // `TopologicalSortIter` gives us). Maybe there is a way to avoid using it and rip it out + // of the codebase? let mut builder = topologic_sort::TopologicalSortIter::builder(); - for crate_id in graph.iter() { - builder.add(crate_id, graph[crate_id].dependencies.iter().map(|d| d.crate_id)); + for &crate_id in db.all_crates().iter() { + builder.add(crate_id, crate_id.data(db).dependencies.iter().map(|d| d.crate_id)); } builder.build() }; enum ParallelPrimeCacheWorkerProgress { - BeginCrate { crate_id: CrateId, crate_name: Symbol }, - EndCrate { crate_id: CrateId }, + BeginCrate { crate_id: Crate, crate_name: Symbol }, + EndCrate { crate_id: Crate }, + Cancelled(Cancelled), } // We split off def map computation from other work, @@ -66,32 +67,40 @@ pub fn parallel_prime_caches( let (work_sender, progress_receiver) = { let (progress_sender, progress_receiver) = crossbeam_channel::unbounded(); let (work_sender, work_receiver) = crossbeam_channel::unbounded(); - let prime_caches_worker = move |db: Snapshot| { + let prime_caches_worker = move |db: RootDatabase| { while let Ok((crate_id, crate_name, kind)) = work_receiver.recv() { progress_sender .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?; - match kind { + let cancelled = Cancelled::catch(|| match kind { PrimingPhase::DefMap => _ = db.crate_def_map(crate_id), PrimingPhase::ImportMap => _ = db.import_map(crate_id), PrimingPhase::CrateSymbols => _ = db.crate_symbols(crate_id.into()), - } + }); - progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?; + match cancelled { + Ok(()) => progress_sender + .send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?, + Err(cancelled) => progress_sender + .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?, + } } Ok::<_, crossbeam_channel::SendError<_>>(()) }; for id in 0..num_worker_threads { - let worker = prime_caches_worker.clone(); - let db = db.snapshot(); - - stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) - .allow_leak(true) - .name(format!("PrimeCaches#{id}")) - .spawn(move || Cancelled::catch(|| worker(db))) - .expect("failed to spawn thread"); + stdx::thread::Builder::new( + stdx::thread::ThreadIntent::Worker, + format!("PrimeCaches#{id}"), + ) + .allow_leak(true) + .spawn({ + let worker = prime_caches_worker.clone(); + let db = db.clone(); + move || worker(db) + }) + .expect("failed to spawn thread"); } (work_sender, progress_receiver) @@ -108,18 +117,16 @@ pub fn parallel_prime_caches( let mut additional_phases = vec![]; while crates_done < crates_total { - db.unwind_if_cancelled(); - - for crate_id in &mut crates_to_prime { - let krate = &graph[crate_id]; - let name = krate - .display_name - .as_deref() - .cloned() - .unwrap_or_else(|| Symbol::integer(crate_id.into_raw().into_u32() as usize)); - if krate.origin.is_lang() { - additional_phases.push((crate_id, name.clone(), PrimingPhase::ImportMap)); - } else if krate.origin.is_local() { + db.unwind_if_revision_cancelled(); + + for krate in &mut crates_to_prime { + let name = krate.extra_data(db).display_name.as_deref().cloned().unwrap_or_else(|| { + Symbol::integer(salsa::plumbing::AsId::as_id(&krate).as_u32() as usize) + }); + let origin = &krate.data(db).origin; + if origin.is_lang() { + additional_phases.push((krate, name.clone(), PrimingPhase::ImportMap)); + } else if origin.is_local() { // Compute the symbol search index. // This primes the cache for `ide_db::symbol_index::world_symbols()`. // @@ -129,10 +136,10 @@ pub fn parallel_prime_caches( // FIXME: We should do it unconditionally if the configuration is set to default to // searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we // would need to pipe that configuration information down here. - additional_phases.push((crate_id, name.clone(), PrimingPhase::CrateSymbols)); + additional_phases.push((krate, name.clone(), PrimingPhase::CrateSymbols)); } - work_sender.send((crate_id, name, PrimingPhase::DefMap)).ok(); + work_sender.send((krate, name, PrimingPhase::DefMap)).ok(); } // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision @@ -144,9 +151,14 @@ pub fn parallel_prime_caches( continue; } Err(crossbeam_channel::RecvTimeoutError::Disconnected) => { - // our workers may have died from a cancelled task, so we'll check and re-raise here. - db.unwind_if_cancelled(); - break; + // all our workers have exited, mark us as finished and exit + cb(ParallelPrimeCachesProgress { + crates_currently_indexing: vec![], + crates_done, + crates_total: crates_done, + work_type: "Indexing", + }); + return; } }; match worker_progress { @@ -158,6 +170,10 @@ pub fn parallel_prime_caches( crates_to_prime.mark_done(crate_id); crates_done += 1; } + ParallelPrimeCacheWorkerProgress::Cancelled(cancelled) => { + // Cancelled::throw should probably be public + std::panic::resume_unwind(Box::new(cancelled)); + } }; let progress = ParallelPrimeCachesProgress { @@ -177,7 +193,7 @@ pub fn parallel_prime_caches( } while crates_done < crates_total { - db.unwind_if_cancelled(); + db.unwind_if_revision_cancelled(); // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision // is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or @@ -188,9 +204,14 @@ pub fn parallel_prime_caches( continue; } Err(crossbeam_channel::RecvTimeoutError::Disconnected) => { - // our workers may have died from a cancelled task, so we'll check and re-raise here. - db.unwind_if_cancelled(); - break; + // all our workers have exited, mark us as finished and exit + cb(ParallelPrimeCachesProgress { + crates_currently_indexing: vec![], + crates_done, + crates_total: crates_done, + work_type: "Populating symbols", + }); + return; } }; match worker_progress { @@ -201,6 +222,10 @@ pub fn parallel_prime_caches( crates_currently_indexing.swap_remove(&crate_id); crates_done += 1; } + ParallelPrimeCacheWorkerProgress::Cancelled(cancelled) => { + // Cancelled::throw should probably be public + std::panic::resume_unwind(Box::new(cancelled)); + } }; let progress = ParallelPrimeCachesProgress { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches/topologic_sort.rs b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches/topologic_sort.rs index 7353d71fa4f86..c8a0386310367 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches/topologic_sort.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches/topologic_sort.rs @@ -7,14 +7,20 @@ pub(crate) struct TopologicSortIterBuilder { nodes: FxHashMap>, } -impl TopologicSortIterBuilder +// this implementation has different bounds on T than would be implied by #[derive(Default)] +impl Default for TopologicSortIterBuilder where T: Copy + Eq + PartialEq + Hash, { - fn new() -> Self { + fn default() -> Self { Self { nodes: Default::default() } } +} +impl TopologicSortIterBuilder +where + T: Copy + Eq + PartialEq + Hash, +{ fn get_or_create_entry(&mut self, item: T) -> &mut Entry { self.nodes.entry(item).or_default() } @@ -54,7 +60,7 @@ where T: Copy + Eq + PartialEq + Hash, { pub(crate) fn builder() -> TopologicSortIterBuilder { - TopologicSortIterBuilder::new() + TopologicSortIterBuilder::default() } pub(crate) fn pending(&self) -> usize { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs index 1633065f65217..b8119e1aab366 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs @@ -28,22 +28,22 @@ use crate::{ }; use base_db::AnchoredPathBuf; use either::Either; -use hir::{FieldSource, FileRange, HirFileIdExt, InFile, ModuleSource, Semantics}; -use span::{Edition, EditionedFileId, FileId, SyntaxContextId}; -use stdx::{never, TupleExt}; +use hir::{EditionedFileId, FieldSource, FileRange, InFile, ModuleSource, Semantics}; +use span::{Edition, FileId, SyntaxContext}; +use stdx::{TupleExt, never}; use syntax::{ + AstNode, SyntaxKind, T, TextRange, ast::{self, HasName}, utils::is_raw_identifier, - AstNode, SyntaxKind, TextRange, T, }; use crate::{ + RootDatabase, defs::Definition, search::{FileReference, FileReferenceNode}, source_change::{FileSystemEdit, SourceChange}, syntax_helpers::node_ext::expr_as_name_ref, traits::convert_to_def_in_trait, - RootDatabase, }; pub type Result = std::result::Result; @@ -113,7 +113,7 @@ impl Definition { /// renamed and extern crate names will report its range, though a rename will introduce /// an alias instead. pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option { - let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then_some(range); + let syn_ctx_is_root = |(range, ctx): (_, SyntaxContext)| ctx.is_root().then_some(range); let res = match self { Definition::Macro(mac) => { let src = sema.source(mac)?; @@ -220,7 +220,7 @@ impl Definition { fn name_range( def: D, sema: &Semantics<'_, RootDatabase>, - ) -> Option<(FileRange, SyntaxContextId)> + ) -> Option<(FileRange, SyntaxContext)> where D: hir::HasSource, D::Ast: ast::HasName, @@ -249,7 +249,7 @@ fn rename_mod( let InFile { file_id, value: def_source } = module.definition_source(sema.db); if let ModuleSource::SourceFile(..) = def_source { - let anchor = file_id.original_file(sema.db).file_id(); + let anchor = file_id.original_file(sema.db).file_id(sema.db); let is_mod_rs = module.is_mod_rs(sema.db); let has_detached_child = module.children(sema.db).any(|child| !child.is_inline(sema.db)); @@ -296,13 +296,13 @@ fn rename_mod( .original_file_range_opt(sema.db) .map(TupleExt::head) { - let new_name = if is_raw_identifier(new_name, file_id.edition()) { + let new_name = if is_raw_identifier(new_name, file_id.edition(sema.db)) { format!("r#{new_name}") } else { new_name.to_owned() }; source_change.insert_source_edit( - file_id.file_id(), + file_id.file_id(sema.db), TextEdit::replace(file_range.range, new_name), ) }; @@ -315,8 +315,8 @@ fn rename_mod( let usages = def.usages(sema).all(); let ref_edits = usages.iter().map(|(file_id, references)| { ( - EditionedFileId::file_id(file_id), - source_edit_from_references(references, def, new_name, file_id.edition()), + file_id.file_id(sema.db), + source_edit_from_references(references, def, new_name, file_id.edition(sema.db)), ) }); source_change.extend(ref_edits); @@ -362,19 +362,15 @@ fn rename_reference( let mut source_change = SourceChange::default(); source_change.extend(usages.iter().map(|(file_id, references)| { ( - EditionedFileId::file_id(file_id), - source_edit_from_references(references, def, new_name, file_id.edition()), + file_id.file_id(sema.db), + source_edit_from_references(references, def, new_name, file_id.edition(sema.db)), ) })); - let mut insert_def_edit = |def| { - let (file_id, edit) = source_edit_from_def(sema, def, new_name, &mut source_change)?; - source_change.insert_source_edit(file_id, edit); - Ok(()) - }; // This needs to come after the references edits, because we change the annotation of existing edits // if a conflict is detected. - insert_def_edit(def)?; + let (file_id, edit) = source_edit_from_def(sema, def, new_name, &mut source_change)?; + source_change.insert_source_edit(file_id, edit); Ok(source_change) } @@ -545,7 +541,7 @@ fn source_edit_from_def( source_change: &mut SourceChange, ) -> Result<(FileId, TextEdit)> { let new_name_edition_aware = |new_name: &str, file_id: EditionedFileId| { - if is_raw_identifier(new_name, file_id.edition()) { + if is_raw_identifier(new_name, file_id.edition(sema.db)) { format!("r#{new_name}") } else { new_name.to_owned() @@ -642,7 +638,7 @@ fn source_edit_from_def( edit.set_annotation(conflict_annotation); let Some(file_id) = file_id else { bail!("No file available to rename") }; - return Ok((EditionedFileId::file_id(file_id), edit)); + return Ok((file_id.file_id(sema.db), edit)); } let FileRange { file_id, range } = def .range_for_rename(sema) @@ -658,7 +654,7 @@ fn source_edit_from_def( _ => (range, new_name.to_owned()), }; edit.replace(range, new_name_edition_aware(&new_name, file_id)); - Ok((file_id.file_id(), edit.finish())) + Ok((file_id.file_id(sema.db), edit.finish())) } #[derive(Copy, Clone, Debug, PartialEq)] diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 02cd8b8bdf510..30be5bc21b498 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -7,28 +7,28 @@ use std::mem; use std::{cell::LazyCell, cmp::Reverse}; -use base_db::{ra_salsa::Database, SourceDatabase, SourceRootDatabase}; +use base_db::{RootQueryDb, SourceDatabase}; use either::Either; use hir::{ - sym, Adt, AsAssocItem, DefWithBody, FileRange, FileRangeWrapper, HasAttrs, HasContainer, - HasSource, HirFileIdExt, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer, - ModuleSource, PathResolution, Semantics, Visibility, + Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs, + HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer, + ModuleSource, PathResolution, Semantics, Visibility, sym, }; use memchr::memmem::Finder; use parser::SyntaxKind; use rustc_hash::{FxHashMap, FxHashSet}; -use span::EditionedFileId; +use salsa::Database; use syntax::{ + AstNode, AstToken, SmolStr, SyntaxElement, SyntaxNode, TextRange, TextSize, ToSmolStr, ast::{self, HasName, Rename}, - match_ast, AstNode, AstToken, SmolStr, SyntaxElement, SyntaxNode, TextRange, TextSize, - ToSmolStr, + match_ast, }; use triomphe::Arc; use crate::{ + RootDatabase, defs::{Definition, NameClass, NameRefClass}, traits::{as_trait_assoc_def, convert_to_def_in_trait}, - RootDatabase, }; #[derive(Debug, Default, Clone)] @@ -161,13 +161,15 @@ impl SearchScope { fn crate_graph(db: &RootDatabase) -> SearchScope { let mut entries = FxHashMap::default(); - let graph = db.crate_graph(); - for krate in graph.iter() { - let root_file = graph[krate].root_file_id; - let source_root_id = db.file_source_root(root_file); - let source_root = db.source_root(source_root_id); + let all_crates = db.all_crates(); + for &krate in all_crates.iter() { + let crate_data = krate.data(db); + let source_root = db.file_source_root(crate_data.root_file_id).source_root_id(db); + let source_root = db.source_root(source_root).source_root(db); entries.extend( - source_root.iter().map(|id| (EditionedFileId::new(id, graph[krate].edition), None)), + source_root + .iter() + .map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)), ); } SearchScope { entries } @@ -178,10 +180,13 @@ impl SearchScope { let mut entries = FxHashMap::default(); for rev_dep in of.transitive_reverse_dependencies(db) { let root_file = rev_dep.root_file(db); - let source_root_id = db.file_source_root(root_file); - let source_root = db.source_root(source_root_id); + + let source_root = db.file_source_root(root_file).source_root_id(db); + let source_root = db.source_root(source_root).source_root(db); entries.extend( - source_root.iter().map(|id| (EditionedFileId::new(id, rev_dep.edition(db)), None)), + source_root + .iter() + .map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)), ); } SearchScope { entries } @@ -190,12 +195,13 @@ impl SearchScope { /// Build a search scope spanning the given crate. fn krate(db: &RootDatabase, of: hir::Crate) -> SearchScope { let root_file = of.root_file(db); - let source_root_id = db.file_source_root(root_file); - let source_root = db.source_root(source_root_id); + + let source_root_id = db.file_source_root(root_file).source_root_id(db); + let source_root = db.source_root(source_root_id).source_root(db); SearchScope { entries: source_root .iter() - .map(|id| (EditionedFileId::new(id, of.edition(db)), None)) + .map(|id| (EditionedFileId::new(db, id, of.edition(db)), None)) .collect(), } } @@ -308,8 +314,6 @@ impl Definition { DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()), DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()), DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()), - // FIXME: implement - DefWithBody::InTypeConst(_) => return SearchScope::empty(), }; return match def { Some(def) => SearchScope::file_range( @@ -325,8 +329,6 @@ impl Definition { DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()), DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()), DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()), - // FIXME: implement - DefWithBody::InTypeConst(_) => return SearchScope::empty(), }; return match def { Some(def) => SearchScope::file_range( @@ -367,7 +369,7 @@ impl Definition { if let Definition::Macro(macro_def) = self { return match macro_def.kind(db) { hir::MacroKind::Declarative => { - if macro_def.attrs(db).by_key(&sym::macro_export).exists() { + if macro_def.attrs(db).by_key(sym::macro_export).exists() { SearchScope::reverse_dependencies(db, module.krate()) } else { SearchScope::krate(db, module.krate()) @@ -483,7 +485,7 @@ impl<'a> FindUsages<'a> { scope: &'b SearchScope, ) -> impl Iterator, EditionedFileId, TextRange)> + 'b { scope.entries.iter().map(|(&file_id, &search_range)| { - let text = db.file_text(file_id.file_id()); + let text = db.file_text(file_id.file_id(db)).text(db); let search_range = search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); @@ -853,7 +855,10 @@ impl<'a> FindUsages<'a> { name, is_possibly_self.into_iter().map(|position| { ( - self.sema.db.file_text(position.file_id.file_id()), + self.sema + .db + .file_text(position.file_id.file_id(self.sema.db)) + .text(self.sema.db), position.file_id, position.range, ) @@ -947,7 +952,6 @@ impl<'a> FindUsages<'a> { let include_self_kw_refs = self.include_self_kw_refs.as_ref().map(|ty| (ty, Finder::new("Self"))); for (text, file_id, search_range) in Self::scope_files(sema.db, &search_scope) { - self.sema.db.unwind_if_cancelled(); let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone()); // Search for occurrences of the items name @@ -1001,7 +1005,8 @@ impl<'a> FindUsages<'a> { let finder = &Finder::new("super"); for (text, file_id, search_range) in Self::scope_files(sema.db, &scope) { - self.sema.db.unwind_if_cancelled(); + self.sema.db.unwind_if_revision_cancelled(); + let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone()); for offset in Self::match_indices(&text, finder, search_range) { @@ -1050,7 +1055,8 @@ impl<'a> FindUsages<'a> { return; }; - let text = sema.db.file_text(file_id.file_id()); + let file_text = sema.db.file_text(file_id.file_id(self.sema.db)); + let text = file_text.text(sema.db); let search_range = search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); @@ -1279,7 +1285,7 @@ impl<'a> FindUsages<'a> { if convert_to_def_in_trait(self.sema.db, def) != convert_to_def_in_trait(self.sema.db, self.def) => { - return false + return false; } (Some(_), Definition::TypeAlias(_)) => {} // We looking at an assoc item of a trait definition, so reference all the diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs index b4d0b0dc9f0af..b1b58d6568cb5 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs @@ -6,7 +6,7 @@ use std::{collections::hash_map::Entry, fmt, iter, mem}; use crate::text_edit::{TextEdit, TextEditBuilder}; -use crate::{assists::Command, syntax_helpers::tree_diff::diff, SnippetCap}; +use crate::{SnippetCap, assists::Command, syntax_helpers::tree_diff::diff}; use base_db::AnchoredPathBuf; use itertools::Itertools; use nohash_hasher::IntMap; @@ -14,8 +14,8 @@ use rustc_hash::FxHashMap; use span::FileId; use stdx::never; use syntax::{ - syntax_editor::{SyntaxAnnotation, SyntaxEditor}, AstNode, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize, + syntax_editor::{SyntaxAnnotation, SyntaxEditor}, }; /// An annotation ID associated with an indel, to describe changes. @@ -469,7 +469,7 @@ impl SourceChangeBuilder { } fn add_snippet_annotation(&mut self, kind: AnnotationSnippet) -> SyntaxAnnotation { - let annotation = SyntaxAnnotation::new(); + let annotation = SyntaxAnnotation::default(); self.snippet_annotations.push((kind, annotation)); self.source_change.is_snippet = true; annotation @@ -479,13 +479,14 @@ impl SourceChangeBuilder { self.commit(); // Only one file can have snippet edits - stdx::never!(self - .source_change - .source_file_edits - .iter() - .filter(|(_, (_, snippet_edit))| snippet_edit.is_some()) - .at_most_one() - .is_err()); + stdx::never!( + self.source_change + .source_file_edits + .iter() + .filter(|(_, (_, snippet_edit))| snippet_edit.is_some()) + .at_most_one() + .is_err() + ); mem::take(&mut self.source_change) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index 2737436993deb..d1ba79e8c785e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -27,16 +27,13 @@ use std::{ ops::ControlFlow, }; -use base_db::{ - ra_salsa::{self, ParallelDatabase}, - SourceRootDatabase, SourceRootId, Upcast, -}; -use fst::{raw::IndexedValue, Automaton, Streamer}; +use base_db::{RootQueryDb, SourceDatabase, SourceRootId}; +use fst::{Automaton, Streamer, raw::IndexedValue}; use hir::{ + Crate, Module, db::HirDatabase, import_map::{AssocSearchMode, SearchMode}, symbols::{FileSymbol, SymbolCollector}, - Crate, Module, }; use rayon::prelude::*; use rustc_hash::FxHashSet; @@ -99,38 +96,42 @@ impl Query { } } -#[ra_salsa::query_group(SymbolsDatabaseStorage)] -pub trait SymbolsDatabase: HirDatabase + SourceRootDatabase + Upcast { +#[query_group::query_group] +pub trait SymbolsDatabase: HirDatabase + SourceDatabase { /// The symbol index for a given module. These modules should only be in source roots that /// are inside local_roots. + // FIXME: Is it worth breaking the encapsulation boundary of `hir`, and make this take a `ModuleId`, + // in order for it to be a non-interned query? + #[salsa::invoke_interned(module_symbols)] fn module_symbols(&self, module: Module) -> Arc; /// The symbol index for a given source root within library_roots. + #[salsa::invoke_interned(library_symbols)] fn library_symbols(&self, source_root_id: SourceRootId) -> Arc; - #[ra_salsa::transparent] + #[salsa::transparent] /// The symbol indices of modules that make up a given crate. fn crate_symbols(&self, krate: Crate) -> Box<[Arc]>; /// The set of "local" (that is, from the current workspace) roots. /// Files in local roots are assumed to change frequently. - #[ra_salsa::input] + #[salsa::input] fn local_roots(&self) -> Arc>; /// The set of roots for crates.io libraries. /// Files in libraries are assumed to never change. - #[ra_salsa::input] + #[salsa::input] fn library_roots(&self) -> Arc>; } fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Arc { let _p = tracing::info_span!("library_symbols").entered(); - let mut symbol_collector = SymbolCollector::new(db.upcast()); + let mut symbol_collector = SymbolCollector::new(db); db.source_root_crates(source_root_id) .iter() - .flat_map(|&krate| Crate::from(krate).modules(db.upcast())) + .flat_map(|&krate| Crate::from(krate).modules(db)) // we specifically avoid calling other SymbolsDatabase queries here, even though they do the same thing, // as the index for a library is not going to really ever change, and we do not want to store each // the module or crate indices for those in salsa unless we need to. @@ -142,32 +143,12 @@ fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Ar fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc { let _p = tracing::info_span!("module_symbols").entered(); - Arc::new(SymbolIndex::new(SymbolCollector::new_module(db.upcast(), module))) + Arc::new(SymbolIndex::new(SymbolCollector::new_module(db, module))) } pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc]> { let _p = tracing::info_span!("crate_symbols").entered(); - krate.modules(db.upcast()).into_iter().map(|module| db.module_symbols(module)).collect() -} - -/// Need to wrap Snapshot to provide `Clone` impl for `map_with` -struct Snap(DB); -impl Snap> { - fn new(db: &DB) -> Self { - Self(db.snapshot()) - } -} -impl Clone for Snap> { - fn clone(&self) -> Snap> { - Snap(self.0.snapshot()) - } -} -impl std::ops::Deref for Snap { - type Target = DB; - - fn deref(&self) -> &Self::Target { - &self.0 - } + krate.modules(db).into_iter().map(|module| db.module_symbols(module)).collect() } // Feature: Workspace Symbol @@ -201,7 +182,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec { let indices: Vec<_> = if query.libs { db.library_roots() .par_iter() - .map_with(Snap::new(db), |snap, &root| snap.library_symbols(root)) + .map_with(db.clone(), |snap, &root| snap.library_symbols(root)) .collect() } else { let mut crates = Vec::new(); @@ -211,7 +192,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec { } let indices: Vec<_> = crates .into_par_iter() - .map_with(Snap::new(db), |snap, krate| snap.crate_symbols(krate.into())) + .map_with(db.clone(), |snap, krate| snap.crate_symbols(krate.into())) .collect(); indices.iter().flat_map(|indices| indices.iter().cloned()).collect() }; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs index 92478ef480d6a..7e8c921d9ed39 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string.rs @@ -1,7 +1,7 @@ //! Tools to work with format string literals for the `format_args!` family of macros. use syntax::{ - ast::{self, IsString}, AstNode, AstToken, TextRange, TextSize, + ast::{self, IsString}, }; // FIXME: This can probably be re-implemented via the HIR? diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs index c104aa571894d..8f25833fffb8d 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs @@ -183,7 +183,7 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec), ()> { #[cfg(test)] mod tests { use super::*; - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; fn check(input: &str, expect: &Expect) { let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_owned(), vec![])); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs index 56a66070ef7f3..bdff64dd0812c 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -5,8 +5,8 @@ use itertools::Itertools; use parser::T; use span::Edition; use syntax::{ - ast::{self, HasLoopBody, MacroCall, PathSegmentKind, VisibilityKind}, AstNode, AstToken, Preorder, RustLanguage, WalkEvent, + ast::{self, HasLoopBody, MacroCall, PathSegmentKind, VisibilityKind}, }; pub fn expr_as_name_ref(expr: &ast::Expr) -> Option { @@ -121,7 +121,7 @@ pub fn walk_patterns_in_expr(start: &ast::Expr, cb: &mut dyn FnMut(ast::Pat)) { match ast::Stmt::cast(node.clone()) { Some(ast::Stmt::LetStmt(l)) => { if let Some(pat) = l.pat() { - let _ = walk_pat(&pat, &mut |pat| { + _ = walk_pat(&pat, &mut |pat| { cb(pat); ControlFlow::<(), ()>::Continue(()) }); @@ -159,7 +159,7 @@ pub fn walk_patterns_in_expr(start: &ast::Expr, cb: &mut dyn FnMut(ast::Pat)) { } } else if let Some(pat) = ast::Pat::cast(node) { preorder.skip_subtree(); - let _ = walk_pat(&pat, &mut |pat| { + _ = walk_pat(&pat, &mut |pat| { cb(pat); ControlFlow::<(), ()>::Continue(()) }); @@ -484,7 +484,7 @@ pub fn parse_tt_as_comma_sep_paths( None => None, Some(tok) => Some(tok), }); - let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]); + let input_expressions = tokens.chunk_by(|tok| tok.kind() == T![,]); let paths = input_expressions .into_iter() .filter_map(|(is_sep, group)| (!is_sep).then_some(group)) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs index e085bf15cb92d..9b9f450bc7342 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs @@ -7,8 +7,9 @@ use itertools::Itertools; use rustc_hash::FxHashMap; use stdx::to_lower_snake_case; use syntax::{ + AstNode, Edition, SmolStr, SmolStrBuilder, ToSmolStr, ast::{self, HasName}, - match_ast, AstNode, Edition, SmolStr, SmolStrBuilder, ToSmolStr, + match_ast, }; use crate::RootDatabase; @@ -82,7 +83,7 @@ const USELESS_METHODS: &[&str] = &[ /// /// ``` /// # use ide_db::syntax_helpers::suggest_name::NameGenerator; -/// let mut generator = NameGenerator::new(); +/// let mut generator = NameGenerator::default(); /// assert_eq!(generator.suggest_name("a"), "a"); /// assert_eq!(generator.suggest_name("a"), "a1"); /// @@ -95,21 +96,16 @@ pub struct NameGenerator { } impl NameGenerator { - /// Create a new empty generator - pub fn new() -> Self { - Self { pool: FxHashMap::default() } - } - /// Create a new generator with existing names. When suggesting a name, it will /// avoid conflicts with existing names. pub fn new_with_names<'a>(existing_names: impl Iterator) -> Self { - let mut generator = Self::new(); + let mut generator = Self::default(); existing_names.for_each(|name| generator.insert(name)); generator } pub fn new_from_scope_locals(scope: Option>) -> Self { - let mut generator = Self::new(); + let mut generator = Self::default(); if let Some(scope) = scope { scope.process_all_names(&mut |name, scope| { if let hir::ScopeDef::Local(_) = scope { @@ -457,9 +453,10 @@ mod tests { fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected: &str) { let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture); let frange = FileRange { file_id, range: range_or_offset.into() }; - let sema = Semantics::new(&db); + let source_file = sema.parse(frange.file_id); + let element = source_file.syntax().covering_element(frange.range); let expr = element.ancestors().find_map(ast::Expr::cast).expect("selection is not an expression"); @@ -468,7 +465,7 @@ mod tests { frange.range, "selection is not an expression(yet contained in one)" ); - let name = NameGenerator::new().for_variable(&expr, &sema); + let name = NameGenerator::default().for_variable(&expr, &sema); assert_eq!(&name, expected); } @@ -1115,7 +1112,7 @@ fn main() { #[test] fn conflicts_with_existing_names() { - let mut generator = NameGenerator::new(); + let mut generator = NameGenerator::default(); assert_eq!(generator.suggest_name("a"), "a"); assert_eq!(generator.suggest_name("a"), "a1"); assert_eq!(generator.suggest_name("a"), "a2"); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs index 02e24c47761c9..7163c08e1e317 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs @@ -2,7 +2,7 @@ use rustc_hash::FxHashMap; use syntax::{NodeOrToken, SyntaxElement, SyntaxNode}; -use crate::{text_edit::TextEditBuilder, FxIndexMap}; +use crate::{FxIndexMap, text_edit::TextEditBuilder}; #[derive(Debug, Hash, PartialEq, Eq)] enum TreeDiffInsertPos { @@ -153,7 +153,7 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use itertools::Itertools; use parser::{Edition, SyntaxKind}; use syntax::{AstNode, SourceFile, SyntaxElement}; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt index efcf53ded64f0..455a6805907cc 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt @@ -2,7 +2,9 @@ ( Module { id: ModuleId { - krate: Idx::(0), + krate: Crate( + Id(3000), + ), block: None, local_id: Idx::(0), }, @@ -14,17 +16,16 @@ Struct( Struct { id: StructId( - 1, + 3401, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -40,6 +41,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "Struct", @@ -47,17 +49,16 @@ Struct( Struct { id: StructId( - 0, + 3400, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -73,6 +74,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "mul1", @@ -80,17 +82,16 @@ Struct( Struct { id: StructId( - 0, + 3400, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -106,6 +107,7 @@ container_name: None, is_alias: true, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "mul2", @@ -113,17 +115,16 @@ Struct( Struct { id: StructId( - 0, + 3400, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -139,6 +140,7 @@ container_name: None, is_alias: true, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "s1", @@ -146,17 +148,16 @@ Struct( Struct { id: StructId( - 0, + 3400, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -172,6 +173,7 @@ container_name: None, is_alias: true, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "s1", @@ -179,17 +181,16 @@ Struct( Struct { id: StructId( - 1, + 3401, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -205,6 +206,7 @@ container_name: None, is_alias: true, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "s2", @@ -212,17 +214,16 @@ Struct( Struct { id: StructId( - 0, + 3400, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -238,6 +239,7 @@ container_name: None, is_alias: true, is_assoc: false, + do_not_complete: Yes, }, ], ), diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt index 7dce95592b819..5e5ae1d168e7e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -2,7 +2,9 @@ ( Module { id: ModuleId { - krate: Idx::(0), + krate: Crate( + Id(3000), + ), block: None, local_id: Idx::(0), }, @@ -13,16 +15,15 @@ def: TypeAlias( TypeAlias { id: TypeAliasId( - 0, + 6800, ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: TYPE_ALIAS, @@ -38,22 +39,22 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "CONST", def: Const( Const { id: ConstId( - 0, + 6000, ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: CONST, @@ -69,22 +70,22 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "CONST_WITH_INNER", def: Const( Const { id: ConstId( - 2, + 6002, ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: CONST, @@ -100,6 +101,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "Enum", @@ -107,17 +109,16 @@ Enum( Enum { id: EnumId( - 0, + 4c00, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: ENUM, @@ -133,6 +134,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "ItemLikeMacro", @@ -140,17 +142,16 @@ Macro { id: Macro2Id( Macro2Id( - 0, + 4800, ), ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -166,6 +167,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "Macro", @@ -173,17 +175,16 @@ Macro { id: Macro2Id( Macro2Id( - 0, + 4800, ), ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: MACRO_DEF, @@ -199,22 +200,22 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "STATIC", def: Static( Static { id: StaticId( - 0, + 6400, ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STATIC, @@ -230,6 +231,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "Struct", @@ -237,17 +239,16 @@ Struct( Struct { id: StructId( - 1, + 4401, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -263,6 +264,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "StructFromMacro", @@ -270,14 +272,16 @@ Struct( Struct { id: StructId( - 0, + 4400, ), }, ), ), loc: DeclarationLocation { hir_file_id: MacroFile( - 0, + MacroCallId( + Id(3800), + ), ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -293,6 +297,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "StructInFn", @@ -300,17 +305,16 @@ Struct( Struct { id: StructId( - 5, + 4405, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -328,6 +332,7 @@ ), is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "StructInNamedConst", @@ -335,17 +340,16 @@ Struct( Struct { id: StructId( - 6, + 4406, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -363,6 +367,7 @@ ), is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "StructInUnnamedConst", @@ -370,17 +375,16 @@ Struct( Struct { id: StructId( - 7, + 4407, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -396,6 +400,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "StructT", @@ -403,17 +408,16 @@ Struct( Struct { id: StructId( - 2, + 4402, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -429,22 +433,22 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "Trait", def: Trait( Trait { id: TraitId( - 0, + 5800, ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: TRAIT, @@ -460,6 +464,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "Trait", @@ -467,17 +472,16 @@ Macro { id: Macro2Id( Macro2Id( - 0, + 4800, ), ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -493,6 +497,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "Union", @@ -500,17 +505,16 @@ Union( Union { id: UnionId( - 0, + 5000, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: UNION, @@ -526,24 +530,26 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "a_mod", def: Module( Module { id: ModuleId { - krate: Idx::(0), + krate: Crate( + Id(3000), + ), block: None, local_id: Idx::(1), }, }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -559,24 +565,26 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "b_mod", def: Module( Module { id: ModuleId { - krate: Idx::(0), + krate: Crate( + Id(3000), + ), block: None, local_id: Idx::(2), }, }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -592,6 +600,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "define_struct", @@ -599,17 +608,16 @@ Macro { id: MacroRulesId( MacroRulesId( - 1, + 3401, ), ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -625,22 +633,22 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "generic_impl_fn", def: Function( Function { id: FunctionId( - 2, + 5c02, ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: FN, @@ -658,22 +666,22 @@ ), is_alias: false, is_assoc: true, + do_not_complete: Yes, }, FileSymbol { name: "impl_fn", def: Function( Function { id: FunctionId( - 1, + 5c01, ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: FN, @@ -691,6 +699,7 @@ ), is_alias: false, is_assoc: true, + do_not_complete: Yes, }, FileSymbol { name: "macro_rules_macro", @@ -698,17 +707,16 @@ Macro { id: MacroRulesId( MacroRulesId( - 0, + 3400, ), ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -724,22 +732,22 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "main", def: Function( Function { id: FunctionId( - 0, + 5c00, ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: FN, @@ -755,6 +763,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "really_define_struct", @@ -762,17 +771,16 @@ Macro { id: MacroRulesId( MacroRulesId( - 1, + 3401, ), ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -788,22 +796,22 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "trait_fn", def: Function( Function { id: FunctionId( - 3, + 5c03, ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: FN, @@ -821,13 +829,16 @@ ), is_alias: false, is_assoc: true, + do_not_complete: Yes, }, ], ), ( Module { id: ModuleId { - krate: Idx::(0), + krate: Crate( + Id(3000), + ), block: None, local_id: Idx::(1), }, @@ -839,17 +850,16 @@ Struct( Struct { id: StructId( - 3, + 4403, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 0, + hir_file_id: FileId( + EditionedFileId( + Id(2000), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -865,13 +875,16 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, ], ), ( Module { id: ModuleId { - krate: Idx::(0), + krate: Crate( + Id(3000), + ), block: None, local_id: Idx::(2), }, @@ -882,16 +895,15 @@ def: Trait( Trait { id: TraitId( - 0, + 5800, ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 1, + hir_file_id: FileId( + EditionedFileId( + Id(2001), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -907,6 +919,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "IsThisJustATrait", @@ -914,17 +927,16 @@ Macro { id: Macro2Id( Macro2Id( - 0, + 4800, ), ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 1, + hir_file_id: FileId( + EditionedFileId( + Id(2001), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -940,6 +952,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "StructInModB", @@ -947,17 +960,16 @@ Struct( Struct { id: StructId( - 4, + 4404, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 1, + hir_file_id: FileId( + EditionedFileId( + Id(2001), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -973,6 +985,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "SuperItemLikeMacro", @@ -980,17 +993,16 @@ Macro { id: Macro2Id( Macro2Id( - 0, + 4800, ), ), }, ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 1, + hir_file_id: FileId( + EditionedFileId( + Id(2001), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -1006,6 +1018,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, FileSymbol { name: "ThisStruct", @@ -1013,17 +1026,16 @@ Struct( Struct { id: StructId( - 4, + 4404, ), }, ), ), loc: DeclarationLocation { - hir_file_id: EditionedFileId( - FileId( - 1, + hir_file_id: FileId( + EditionedFileId( + Id(2001), ), - Edition2021, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -1039,6 +1051,7 @@ container_name: None, is_alias: false, is_assoc: false, + do_not_complete: Yes, }, ], ), diff --git a/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs b/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs index b59010f2f8c83..6e9bd7bdcc21a 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs @@ -18,18 +18,19 @@ pub struct Indel { pub insert: String, /// Refers to offsets in the original text pub delete: TextRange, - pub annotation: Option, } #[derive(Default, Debug, Clone)] pub struct TextEdit { /// Invariant: disjoint and sorted by `delete`. indels: Vec, + annotation: Option, } #[derive(Debug, Default, Clone)] pub struct TextEditBuilder { indels: Vec, + annotation: Option, } impl Indel { @@ -40,7 +41,7 @@ impl Indel { Indel::replace(range, String::new()) } pub fn replace(range: TextRange, replace_with: String) -> Indel { - Indel { delete: range, insert: replace_with, annotation: None } + Indel { delete: range, insert: replace_with } } pub fn apply(&self, text: &mut String) { @@ -142,12 +143,12 @@ impl TextEdit { Some(res) } - pub fn set_annotation(&mut self, annotation: Option) { - if annotation.is_some() { - for indel in &mut self.indels { - indel.annotation = annotation; - } - } + pub(crate) fn set_annotation(&mut self, conflict_annotation: Option) { + self.annotation = conflict_annotation; + } + + pub fn change_annotation(&self) -> Option { + self.annotation } } @@ -183,10 +184,10 @@ impl TextEditBuilder { self.indel(Indel::insert(offset, text)); } pub fn finish(self) -> TextEdit { - let mut indels = self.indels; + let TextEditBuilder { mut indels, annotation } = self; assert_disjoint_or_equal(&mut indels); indels = coalesce_indels(indels); - TextEdit { indels } + TextEdit { indels, annotation } } pub fn invalidates_offset(&self, offset: TextSize) -> bool { self.indels.iter().any(|indel| indel.delete.contains_inclusive(offset)) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs index 0f67496d0989e..61e28386d0721 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs @@ -1,9 +1,9 @@ //! Functionality for obtaining data related to traits from the DB. -use crate::{defs::Definition, RootDatabase}; -use hir::{db::HirDatabase, AsAssocItem, Semantics}; +use crate::{RootDatabase, defs::Definition}; +use hir::{AsAssocItem, Semantics, db::HirDatabase}; use rustc_hash::FxHashSet; -use syntax::{ast, AstNode}; +use syntax::{AstNode, ast}; /// Given the `impl` block, attempts to find the trait this `impl` corresponds to. pub fn resolve_target_trait( @@ -113,7 +113,7 @@ fn assoc_item_of_trait( #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use hir::FilePosition; use hir::Semantics; use span::Edition; @@ -126,8 +126,8 @@ mod tests { pub(crate) fn position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (RootDatabase, FilePosition) { - let change_fixture = ChangeFixture::parse(ra_fixture); let mut database = RootDatabase::default(); + let change_fixture = ChangeFixture::parse(&database, ra_fixture); database.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); @@ -138,6 +138,7 @@ mod tests { fn check_trait(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (db, position) = position(ra_fixture); let sema = Semantics::new(&db); + let file = sema.parse(position.file_id); let impl_block: ast::Impl = sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap(); @@ -152,6 +153,7 @@ mod tests { fn check_missing_assoc(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (db, position) = position(ra_fixture); let sema = Semantics::new(&db); + let file = sema.parse(position.file_id); let impl_block: ast::Impl = sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap(); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs index 2fdd8358637df..63ce0ddbb8fc2 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs @@ -5,7 +5,7 @@ use std::iter; use hir::Semantics; -use syntax::ast::{self, make, Pat}; +use syntax::ast::{self, Pat, make}; use crate::RootDatabase; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs index c3f0bf3706904..a4a93e36f0e1a 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs @@ -3,8 +3,8 @@ use hir::StructKind; use span::Edition; use syntax::{ - ast::{make, Expr, Path}, ToSmolStr, + ast::{Expr, Path, make}, }; /// given a type return the trivial constructor (if one exists) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml index 483cb6df86236..96be51e1b2666 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true [lib] [dependencies] -cov-mark = "2.0.0-pre.1" +cov-mark = "2.0.0" either.workspace = true itertools.workspace = true serde_json.workspace = true @@ -27,7 +27,7 @@ ide-db.workspace = true paths.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.5.1" # local deps test-utils.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs index 92b6e748ca5ed..92ca7a74184fb 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs @@ -1,4 +1,4 @@ -use crate::{adjusted_display_range, Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticsContext, adjusted_display_range}; // Diagnostic: await-outside-of-async // diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/bad_rtn.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/bad_rtn.rs new file mode 100644 index 0000000000000..9ed85f9f208e8 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/bad_rtn.rs @@ -0,0 +1,52 @@ +use ide_db::Severity; + +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: bad-rtn +// +// This diagnostic is shown when a RTN (Return Type Notation, `Type::method(..): Send`) is written in an improper place. +pub(crate) fn bad_rtn(ctx: &DiagnosticsContext<'_>, d: &hir::BadRtn) -> Diagnostic { + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::Ra("bad-rtn", Severity::Error), + "return type notation not allowed in this position yet", + d.rtn.map(Into::into), + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn fn_traits_also_emit() { + check_diagnostics( + r#" +//- minicore: fn +fn foo< + A: Fn(..), + // ^^^^ error: return type notation not allowed in this position yet +>() {} + "#, + ); + } + + #[test] + fn bad_rtn() { + check_diagnostics( + r#" +mod module { + pub struct Type; +} +trait Trait {} + +fn foo() +where + module(..)::Type: Trait + // ^^^^ error: return type notation not allowed in this position yet +{ +} + "#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/elided_lifetimes_in_path.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/elided_lifetimes_in_path.rs new file mode 100644 index 0000000000000..438dd2fdcb6c0 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/elided_lifetimes_in_path.rs @@ -0,0 +1,112 @@ +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: elided-lifetimes-in-path +// +// This diagnostic is triggered when lifetimes are elided in paths. It is a lint only for some cases, +// and a hard error for others. +pub(crate) fn elided_lifetimes_in_path( + ctx: &DiagnosticsContext<'_>, + d: &hir::ElidedLifetimesInPath, +) -> Diagnostic { + if d.hard_error { + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0726"), + "implicit elided lifetime not allowed here", + d.generics_or_segment.map(Into::into), + ) + .experimental() + } else { + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcLint("elided_lifetimes_in_paths"), + "hidden lifetime parameters in types are deprecated", + d.generics_or_segment.map(Into::into), + ) + .experimental() + } +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn fn_() { + check_diagnostics( + r#" +#![warn(elided_lifetimes_in_paths)] + +struct Foo<'a>(&'a ()); + +fn foo(_: Foo) {} + // ^^^ warn: hidden lifetime parameters in types are deprecated + "#, + ); + check_diagnostics( + r#" +#![warn(elided_lifetimes_in_paths)] + +struct Foo<'a>(&'a ()); + +fn foo(_: Foo<'_>) -> Foo { loop {} } + // ^^^ warn: hidden lifetime parameters in types are deprecated + "#, + ); + } + + #[test] + fn async_fn() { + check_diagnostics( + r#" +struct Foo<'a>(&'a ()); + +async fn foo(_: Foo) {} + // ^^^ error: implicit elided lifetime not allowed here + "#, + ); + check_diagnostics( + r#" +#![warn(elided_lifetimes_in_paths)] + +struct Foo<'a>(&'a ()); + +fn foo(_: Foo<'_>) -> Foo { loop {} } + // ^^^ warn: hidden lifetime parameters in types are deprecated + "#, + ); + } + + #[test] + fn no_error_when_explicitly_elided() { + check_diagnostics( + r#" +#![warn(elided_lifetimes_in_paths)] + +struct Foo<'a>(&'a ()); +trait Trait<'a> {} + +fn foo(_: Foo<'_>) -> Foo<'_> { loop {} } +async fn bar(_: Foo<'_>) -> Foo<'_> { loop {} } +impl Foo<'_> {} +impl Trait<'_> for Foo<'_> {} + "#, + ); + } + + #[test] + fn impl_() { + check_diagnostics( + r#" +struct Foo<'a>(&'a ()); +trait Trait<'a> {} + +impl Foo {} + // ^^^ error: implicit elided lifetime not allowed here + +impl Trait for Foo<'_> {} + // ^^^^^ error: implicit elided lifetime not allowed here + "#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs index af25c2b2e3329..a6da0fd9c5e31 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs @@ -31,7 +31,7 @@ fn foo() { x(); // ^^^ error: expected function, found i32 ""(); - // ^^^^ error: expected function, found &str + // ^^^^ error: expected function, found &'static str foo(); } "#, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs index 876c2ccd49d7a..1dc6a7bf9cae7 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs @@ -1,27 +1,30 @@ //! Suggests shortening `Foo { field: field }` to `Foo { field }` in both //! expressions and patterns. +use ide_db::RootDatabase; use ide_db::text_edit::TextEdit; -use ide_db::{source_change::SourceChange, EditionedFileId, FileRange}; -use syntax::{ast, match_ast, AstNode, SyntaxNode}; +use ide_db::{EditionedFileId, FileRange, source_change::SourceChange}; +use syntax::{AstNode, SyntaxNode, ast, match_ast}; -use crate::{fix, Diagnostic, DiagnosticCode}; +use crate::{Diagnostic, DiagnosticCode, fix}; pub(crate) fn field_shorthand( + db: &RootDatabase, acc: &mut Vec, file_id: EditionedFileId, node: &SyntaxNode, ) { match_ast! { match node { - ast::RecordExpr(it) => check_expr_field_shorthand(acc, file_id, it), - ast::RecordPat(it) => check_pat_field_shorthand(acc, file_id, it), + ast::RecordExpr(it) => check_expr_field_shorthand(db, acc, file_id, it), + ast::RecordPat(it) => check_pat_field_shorthand(db, acc, file_id, it), _ => () } }; } fn check_expr_field_shorthand( + db: &RootDatabase, acc: &mut Vec, file_id: EditionedFileId, record_expr: ast::RecordExpr, @@ -49,16 +52,17 @@ fn check_expr_field_shorthand( let edit = edit_builder.finish(); let field_range = record_field.syntax().text_range(); + let vfs_file_id = file_id.file_id(db); acc.push( Diagnostic::new( DiagnosticCode::Clippy("redundant_field_names"), "Shorthand struct initialization", - FileRange { file_id: file_id.into(), range: field_range }, + FileRange { file_id: vfs_file_id, range: field_range }, ) .with_fixes(Some(vec![fix( "use_expr_field_shorthand", "Use struct shorthand initialization", - SourceChange::from_text_edit(file_id, edit), + SourceChange::from_text_edit(vfs_file_id, edit), field_range, )])), ); @@ -66,6 +70,7 @@ fn check_expr_field_shorthand( } fn check_pat_field_shorthand( + db: &RootDatabase, acc: &mut Vec, file_id: EditionedFileId, record_pat: ast::RecordPat, @@ -93,16 +98,17 @@ fn check_pat_field_shorthand( let edit = edit_builder.finish(); let field_range = record_pat_field.syntax().text_range(); + let vfs_file_id = file_id.file_id(db); acc.push( Diagnostic::new( DiagnosticCode::Clippy("redundant_field_names"), "Shorthand struct pattern", - FileRange { file_id: file_id.into(), range: field_range }, + FileRange { file_id: vfs_file_id, range: field_range }, ) .with_fixes(Some(vec![fix( "use_pat_field_shorthand", "Use struct field shorthand", - SourceChange::from_text_edit(file_id, edit), + SourceChange::from_text_edit(vfs_file_id, edit), field_range, )])), ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs index 7d62daf716c47..b617c09498393 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs @@ -3,9 +3,9 @@ use hir::GenericArgsProhibitedReason; use ide_db::assists::Assist; use ide_db::source_change::SourceChange; use ide_db::text_edit::TextEdit; -use syntax::{ast, AstNode, TextRange}; +use syntax::{AstNode, TextRange, ast}; -use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix}; // Diagnostic: generic-args-prohibited // @@ -36,6 +36,7 @@ fn describe_reason(reason: GenericArgsProhibitedReason) -> String { } GenericArgsProhibitedReason::Const => "constants", GenericArgsProhibitedReason::Static => "statics", + GenericArgsProhibitedReason::LocalVariable => "local variables", }; format!("generic arguments are not allowed on {kind}") } @@ -63,7 +64,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::GenericArgsProhibited) -> Option Some(vec![fix( "remove_generic_args", "Remove these generics", - SourceChange::from_text_edit(file_id, TextEdit::delete(range)), + SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), TextEdit::delete(range)), syntax.syntax().text_range(), )]) } @@ -320,7 +321,7 @@ trait E::Trait> // ^^^^^ 💡 error: generic arguments are not allowed on builtin types } -impl::Trait> E for () +impl::Trait> E<()> for () // ^^^^^^ 💡 error: generic arguments are not allowed on modules where bool: foo::Trait // ^^^^^ 💡 error: generic arguments are not allowed on builtin types @@ -518,14 +519,14 @@ fn baz() { } #[test] - fn const_and_static() { + fn const_param_and_static() { check_diagnostics( r#" const CONST: i32 = 0; static STATIC: i32 = 0; -fn baz() { - let _ = CONST::<()>; - // ^^^^^^ 💡 error: generic arguments are not allowed on constants +fn baz() { + let _ = CONST_PARAM::<()>; + // ^^^^^^ 💡 error: generic arguments are not allowed on constants let _ = STATIC::<()>; // ^^^^^^ 💡 error: generic arguments are not allowed on statics } @@ -533,6 +534,19 @@ fn baz() { ); } + #[test] + fn local_variable() { + check_diagnostics( + r#" +fn baz() { + let x = 1; + let _ = x::<()>; + // ^^^^^^ 💡 error: generic arguments are not allowed on local variables +} + "#, + ); + } + #[test] fn enum_variant() { check_diagnostics( diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs index 96a368eb0ea3f..47e1c84fecd0a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -39,8 +39,9 @@ pub(crate) fn inactive_code( #[cfg(test)] mod tests { - use crate::{tests::check_diagnostics_with_config, DiagnosticsConfig}; + use crate::{DiagnosticsConfig, tests::check_diagnostics_with_config}; + #[track_caller] pub(crate) fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let config = DiagnosticsConfig { disabled: std::iter::once("unlinked-file".to_owned()).collect(), diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs index d3f302077528a..0b9a2ec9db3dd 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs @@ -1,7 +1,7 @@ use hir::InFile; use syntax::{AstNode, TextRange}; -use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range}; // Diagnostic: incoherent-impl // diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs index 246330e6efaac..289a076573252 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -1,13 +1,13 @@ -use hir::{db::ExpandDatabase, CaseType, InFile}; +use hir::{CaseType, InFile, db::ExpandDatabase}; use ide_db::{assists::Assist, defs::NameClass}; use syntax::AstNode; use crate::{ - // references::rename::rename_with_semantics, - unresolved_fix, Diagnostic, DiagnosticCode, DiagnosticsContext, + // references::rename::rename_with_semantics, + unresolved_fix, }; // Diagnostic: incorrect-ident-case @@ -786,6 +786,8 @@ static FOO: () = { } #[test] + // FIXME + #[should_panic] fn enum_variant_body_inner_item() { check_diagnostics( r#" diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs new file mode 100644 index 0000000000000..17c7f75880c90 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs @@ -0,0 +1,187 @@ +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; +use hir::IncorrectGenericsLenKind; + +// Diagnostic: incorrect-generics-len +// +// This diagnostic is triggered if the number of generic arguments does not match their declaration. +pub(crate) fn incorrect_generics_len( + ctx: &DiagnosticsContext<'_>, + d: &hir::IncorrectGenericsLen, +) -> Diagnostic { + let owner_description = d.def.description(); + let expected = d.expected; + let provided = d.provided; + let kind_description = match d.kind { + IncorrectGenericsLenKind::Lifetimes => "lifetime", + IncorrectGenericsLenKind::TypesAndConsts => "generic", + }; + let message = format!( + "this {owner_description} takes {expected} {kind_description} argument{} \ + but {provided} {kind_description} argument{} {} supplied", + if expected == 1 { "" } else { "s" }, + if provided == 1 { "" } else { "s" }, + if provided == 1 { "was" } else { "were" }, + ); + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0107"), + message, + d.generics_or_segment.map(Into::into), + ) + .experimental() +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn partially_specified_generics() { + check_diagnostics( + r#" +struct Bar(T, U); + +fn foo() { + let _ = Bar::<()>; + // ^^^^^^ error: this struct takes 2 generic arguments but 1 generic argument was supplied +} + + "#, + ); + } + + #[test] + fn enum_variant() { + check_diagnostics( + r#" +enum Enum { + Variant(T, U), +} + +fn foo() { + let _ = Enum::<()>::Variant; + // ^^^^^^ error: this enum takes 2 generic arguments but 1 generic argument was supplied + let _ = Enum::Variant::<()>; + // ^^^^^^ error: this enum takes 2 generic arguments but 1 generic argument was supplied +} + + "#, + ); + } + + #[test] + fn lifetimes() { + check_diagnostics( + r#" +struct Foo<'a, 'b>(&'a &'b ()); + +fn foo(Foo(_): Foo) -> Foo { + let _: Foo = Foo(&&()); + let _: Foo::<> = Foo::<>(&&()); + let _: Foo::<'static> + // ^^^^^^^^^^^ error: this struct takes 2 lifetime arguments but 1 lifetime argument was supplied + = Foo::<'static>(&&()); + // ^^^^^^^^^^^ error: this struct takes 2 lifetime arguments but 1 lifetime argument was supplied + |_: Foo| -> Foo {loop{}}; + + loop {} +} + + "#, + ); + } + + #[test] + fn no_error_for_elided_lifetimes() { + check_diagnostics( + r#" +struct Foo<'a>(&'a ()); + +fn foo(_v: &()) -> Foo { loop {} } + "#, + ); + } + + #[test] + fn errs_for_elided_lifetimes_if_lifetimes_are_explicitly_provided() { + check_diagnostics( + r#" +struct Foo<'a, 'b>(&'a &'b ()); + +fn foo(_v: Foo<'_> + // ^^^^ error: this struct takes 2 lifetime arguments but 1 lifetime argument was supplied +) -> Foo<'static> { loop {} } + // ^^^^^^^^^ error: this struct takes 2 lifetime arguments but 1 lifetime argument was supplied + "#, + ); + } + + #[test] + fn types_and_consts() { + check_diagnostics( + r#" +struct Foo<'a, T>(&'a T); +fn foo(_v: Foo) {} + // ^^^ error: this struct takes 1 generic argument but 0 generic arguments were supplied + +struct Bar(T); +fn bar() { + let _ = Bar::<()>; + // ^^^^^^ error: this struct takes 2 generic arguments but 1 generic argument was supplied +} + "#, + ); + } + + #[test] + fn respects_defaults() { + check_diagnostics( + r#" +struct Foo(T); +fn foo(_v: Foo) {} + +struct Bar(T); +fn bar(_v: Bar<()>) {} + "#, + ); + } + + #[test] + fn constant() { + check_diagnostics( + r#" +const CONST: i32 = 0; +fn baz() { + let _ = CONST::<()>; + // ^^^^^^ error: this constant takes 0 generic arguments but 1 generic argument was supplied +} + "#, + ); + } + + #[test] + fn assoc_type() { + check_diagnostics( + r#" +trait Trait { + type Assoc; +} + +fn foo = bool>>() {} + // ^^^^^ error: this type alias takes 0 generic arguments but 1 generic argument was supplied + "#, + ); + } + + #[test] + fn regression_19669() { + check_diagnostics( + r#" +//- minicore: from +fn main() { + let _: i32 = Into::into(0); +} +"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_order.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_order.rs new file mode 100644 index 0000000000000..84496df2d7cfb --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_order.rs @@ -0,0 +1,80 @@ +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; +use hir::GenericArgKind; +use syntax::SyntaxKind; + +// Diagnostic: incorrect-generics-order +// +// This diagnostic is triggered the order of provided generic arguments does not match their declaration. +pub(crate) fn incorrect_generics_order( + ctx: &DiagnosticsContext<'_>, + d: &hir::IncorrectGenericsOrder, +) -> Diagnostic { + let provided_description = match d.provided_arg.value.kind() { + SyntaxKind::CONST_ARG => "constant", + SyntaxKind::LIFETIME_ARG => "lifetime", + SyntaxKind::TYPE_ARG => "type", + _ => panic!("non-generic-arg passed to `incorrect_generics_order()`"), + }; + let expected_description = match d.expected_kind { + GenericArgKind::Lifetime => "lifetime", + GenericArgKind::Type => "type", + GenericArgKind::Const => "constant", + }; + let message = + format!("{provided_description} provided when a {expected_description} was expected"); + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0747"), + message, + d.provided_arg.map(Into::into), + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn lifetime_out_of_order() { + check_diagnostics( + r#" +struct Foo<'a, T>(&'a T); + +fn bar(_v: Foo<(), 'static>) {} + // ^^ error: type provided when a lifetime was expected + "#, + ); + } + + #[test] + fn types_and_consts() { + check_diagnostics( + r#" +struct Foo(T); +fn foo1(_v: Foo<1>) {} + // ^ error: constant provided when a type was expected +fn foo2(_v: Foo<{ (1, 2) }>) {} + // ^^^^^^^^^^ error: constant provided when a type was expected + +struct Bar; +fn bar(_v: Bar<()>) {} + // ^^ error: type provided when a constant was expected + +struct Baz(T); +fn baz(_v: Baz<1, ()>) {} + // ^ error: constant provided when a type was expected + "#, + ); + } + + #[test] + fn no_error_when_num_incorrect() { + check_diagnostics( + r#" +struct Baz(T, U); +fn baz(_v: Baz<1>) {} + // ^^^ error: this struct takes 2 generic arguments but 1 generic argument was supplied + "#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs index 82cd1f2fde6db..d72b21099ce35 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_cast.rs @@ -166,7 +166,7 @@ fn main() { let _ = ptr as bool; //^^^^^^^^^^^ error: cannot cast `*const ()` as `bool` let v = "hello" as bool; - //^^^^^^^^^^^^^^^ error: casting `&str` as `bool` is invalid: needs casting through a raw pointer first + //^^^^^^^^^^^^^^^ error: casting `&'static str` as `bool` is invalid: needs casting through a raw pointer first } "#, ); @@ -440,8 +440,9 @@ fn main() { q as *const [i32]; //^^^^^^^^^^^^^^^^^ error: cannot cast thin pointer `*const i32` to fat pointer `*const [i32]` + // FIXME: This should emit diagnostics but disabled to prevent many false positives let t: *mut (dyn Trait + 'static) = 0 as *mut _; - //^^^^^^^^^^^ error: cannot cast `usize` to a fat pointer `*mut _` + let mut fail: *const str = 0 as *const str; //^^^^^^^^^^^^^^^ error: cannot cast `usize` to a fat pointer `*const str` let mut fail2: *const str = 0isize as *const str; @@ -955,7 +956,7 @@ fn main() { fn main() { let pointer: usize = &1_i32 as *const i32 as usize; let _reference: &'static i32 = unsafe { pointer as *const i32 as &'static i32 }; - //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: non-primitive cast: `*const i32` as `&i32` + //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: non-primitive cast: `*const i32` as `&'static i32` } "#, ); @@ -991,7 +992,7 @@ impl Deref for Foo { fn main() { let _ = "foo" as bool; - //^^^^^^^^^^^^^ error: casting `&str` as `bool` is invalid: needs casting through a raw pointer first + //^^^^^^^^^^^^^ error: casting `&'static str` as `bool` is invalid: needs casting through a raw pointer first let _ = Foo as bool; //^^^^^^^^^^^ error: non-primitive cast: `Foo` as `bool` @@ -1161,6 +1162,49 @@ struct ZerocopyKnownLayoutMaybeUninit(<::Type as KnownLayout> fn test(ptr: *mut [u8]) -> *mut ZerocopyKnownLayoutMaybeUninit { ptr as *mut _ } +"#, + ); + } + + #[test] + fn regression_19431() { + check_diagnostics( + r#" +//- minicore: coerce_unsized +struct Dst([u8]); + +struct Struct { + body: Dst, +} + +trait Field { + type Type: ?Sized; +} + +impl Field for Struct { + type Type = Dst; +} + +trait KnownLayout { + type MaybeUninit: ?Sized; + type PointerMetadata; +} + +impl KnownLayout for [T] { + type MaybeUninit = [T]; + type PointerMetadata = usize; +} + +impl KnownLayout for Dst { + type MaybeUninit = Dst; + type PointerMetadata = <[u8] as KnownLayout>::PointerMetadata; +} + +struct ZerocopyKnownLayoutMaybeUninit(<::Type as KnownLayout>::MaybeUninit); + +fn test(ptr: *mut ZerocopyKnownLayoutMaybeUninit) -> *mut <::Type as KnownLayout>::MaybeUninit { + ptr as *mut _ +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index f22041ebe233b..ac1b599c49e2a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -4,19 +4,19 @@ use hir::{ImportPathConfig, PathResolution, Semantics}; use ide_db::text_edit::TextEdit; use ide_db::{ + EditionedFileId, FileRange, FxHashMap, RootDatabase, helpers::mod_path_to_ast, - imports::insert_use::{insert_use, ImportScope}, + imports::insert_use::{ImportScope, insert_use}, source_change::SourceChangeBuilder, - EditionedFileId, FileRange, FxHashMap, RootDatabase, }; use itertools::Itertools; use stdx::{format_to, never}; use syntax::{ - ast::{self, make}, Edition, SyntaxKind, SyntaxNode, + ast::{self, make}, }; -use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsConfig, Severity}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsConfig, Severity, fix}; #[derive(Default)] struct State { @@ -128,14 +128,15 @@ pub(crate) fn json_in_items( state.has_serialize = serialize_resolved.is_some(); state.build_struct("Root", &it); edit.insert(range.start(), state.result); + let vfs_file_id = file_id.file_id(sema.db); acc.push( Diagnostic::new( DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning), "JSON syntax is not valid as a Rust item", - FileRange { file_id: file_id.into(), range }, + FileRange { file_id: vfs_file_id, range }, ) .with_fixes(Some(vec![{ - let mut scb = SourceChangeBuilder::new(file_id); + let mut scb = SourceChangeBuilder::new(vfs_file_id); let scope = match import_scope { ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), @@ -183,7 +184,7 @@ pub(crate) fn json_in_items( } } let mut sc = scb.finish(); - sc.insert_source_edit(file_id, edit.finish()); + sc.insert_source_edit(vfs_file_id, edit.finish()); fix("convert_json_to_struct", "Convert JSON to struct", sc, range) }])), ); @@ -196,8 +197,8 @@ pub(crate) fn json_in_items( #[cfg(test)] mod tests { use crate::{ - tests::{check_diagnostics_with_config, check_fix, check_no_fix}, DiagnosticsConfig, + tests::{check_diagnostics_with_config, check_fix, check_no_fix}, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs index 2f132985895c6..a2648a1995d7f 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -38,8 +38,8 @@ pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefErr #[cfg(test)] mod tests { use crate::{ - tests::{check_diagnostics, check_diagnostics_with_config}, DiagnosticsConfig, + tests::{check_diagnostics, check_diagnostics_with_config}, }; #[test] @@ -123,6 +123,7 @@ include!("foo/bar.rs"); #[test] fn good_out_dir_diagnostic() { + // FIXME: The diagnostic here is duplicated for each eager expansion check_diagnostics( r#" #[rustc_builtin_macro] @@ -134,6 +135,8 @@ macro_rules! concat { () => {} } include!(concat!(env!("OUT_DIR"), "/out.rs")); //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run + //^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run + //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run "#, ); } @@ -238,6 +241,7 @@ macro_rules! outer { fn f() { outer!(); } //^^^^^^^^ error: leftover tokens + //^^^^^^^^ error: Syntax Error in Expansion: expected expression "#, ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index 0520bb3fe9b9b..63fd9b4e3f06b 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -2,11 +2,11 @@ use either::Either; use hir::InFile; use ide_db::FileRange; use syntax::{ - ast::{self, HasArgList}, AstNode, AstPtr, + ast::{self, HasArgList}, }; -use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range}; // Diagnostic: mismatched-tuple-struct-pat-arg-count // diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs index 938b7182bc946..a354d123f5ab3 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -1,20 +1,26 @@ use either::Either; use hir::{ + AssocItem, HirDisplay, ImportPathConfig, InFile, Type, db::{ExpandDatabase, HirDatabase}, - sym, AssocItem, HirDisplay, HirFileIdExt, ImportPathConfig, InFile, Type, + sym, }; use ide_db::{ - assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search, - source_change::SourceChange, syntax_helpers::tree_diff::diff, text_edit::TextEdit, - use_trivial_constructor::use_trivial_constructor, FxHashMap, + FxHashMap, + assists::{Assist, ExprFillDefaultMode}, + famous_defs::FamousDefs, + imports::import_assets::item_for_path_search, + source_change::SourceChange, + syntax_helpers::tree_diff::diff, + text_edit::TextEdit, + use_trivial_constructor::use_trivial_constructor, }; use stdx::format_to; use syntax::{ - ast::{self, make}, AstNode, Edition, SyntaxNode, SyntaxNodePtr, ToSmolStr, + ast::{self, make}, }; -use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix}; // Diagnostic: missing-fields // @@ -83,7 +89,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option, d: &hir::MissingFields) -> Option make::ext::expr_todo(), - crate::ExprFillDefaultMode::Default => { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => { get_default_constructor(ctx, d, ty).unwrap_or_else(make::ext::expr_todo) } }; @@ -140,11 +147,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option, d: &hir::MissingFields) -> Option ast::Type { let ty_str = match ty.as_adt() { - Some(adt) => adt.name(db).display(db.upcast(), edition).to_string(), + Some(adt) => adt.name(db).display(db, edition).to_string(), None => { ty.display_source_code(db, module.into(), false).ok().unwrap_or_else(|| "_".to_owned()) } @@ -209,14 +217,17 @@ fn get_default_constructor( } } - let krate = ctx.sema.file_to_module_def(d.file.original_file(ctx.sema.db))?.krate(); + let krate = ctx + .sema + .file_to_module_def(d.file.original_file(ctx.sema.db).file_id(ctx.sema.db))? + .krate(); let module = krate.root_module(); // Look for a ::new() associated function let has_new_func = ty .iterate_assoc_items(ctx.sema.db, krate, |assoc_item| { if let AssocItem::Function(func) = assoc_item { - if func.name(ctx.sema.db) == sym::new.clone() + if func.name(ctx.sema.db) == sym::new && func.assoc_fn_params(ctx.sema.db).is_empty() { return Some(()); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_lifetime.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_lifetime.rs new file mode 100644 index 0000000000000..8cdbb6384ff5a --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_lifetime.rs @@ -0,0 +1,92 @@ +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: missing-lifetime +// +// This diagnostic is triggered when a lifetime argument is missing. +pub(crate) fn missing_lifetime( + ctx: &DiagnosticsContext<'_>, + d: &hir::MissingLifetime, +) -> Diagnostic { + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0106"), + "missing lifetime specifier", + d.generics_or_segment.map(Into::into), + ) + .experimental() +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn in_fields() { + check_diagnostics( + r#" +struct Foo<'a>(&'a ()); +struct Bar(Foo); + // ^^^ error: missing lifetime specifier + "#, + ); + } + + #[test] + fn bounds() { + check_diagnostics( + r#" +struct Foo<'a, T>(&'a T); +trait Trait<'a> { + type Assoc; +} + +fn foo<'a, T: Trait>( + // ^^^^^ error: missing lifetime specifier + _: impl Trait<'a, Assoc: Trait>, + // ^^^^^ error: missing lifetime specifier +) +where + Foo: Trait<'a>, + // ^^^ error: missing lifetime specifier +{ +} + "#, + ); + } + + #[test] + fn generic_defaults() { + check_diagnostics( + r#" +struct Foo<'a>(&'a ()); + +struct Bar(T); + // ^^^ error: missing lifetime specifier + "#, + ); + } + + #[test] + fn type_alias_type() { + check_diagnostics( + r#" +struct Foo<'a>(&'a ()); + +type Bar = Foo; + // ^^^ error: missing lifetime specifier + "#, + ); + } + + #[test] + fn const_param_ty() { + check_diagnostics( + r#" +struct Foo<'a>(&'a ()); + +fn bar() {} + // ^^^ error: missing lifetime specifier + "#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 0bf600e5dfaf1..d3d3c3aa38dc2 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -18,10 +18,10 @@ pub(crate) fn missing_match_arms( #[cfg(test)] mod tests { use crate::{ + DiagnosticsConfig, tests::{ check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled, }, - DiagnosticsConfig, }; use test_utils::skip_slow_tests; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index 040aa2949aa93..3c36b455ca9d9 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -1,11 +1,11 @@ use hir::db::ExpandDatabase; -use hir::{HirFileIdExt, UnsafeLint, UnsafetyReason}; +use hir::{UnsafeLint, UnsafetyReason}; use ide_db::text_edit::TextEdit; use ide_db::{assists::Assist, source_change::SourceChange}; -use syntax::{ast, SyntaxNode}; -use syntax::{match_ast, AstNode}; +use syntax::{AstNode, match_ast}; +use syntax::{SyntaxNode, ast}; -use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix}; // Diagnostic: missing-unsafe // @@ -51,8 +51,10 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Option, d: &hir::MovedOutOf mod tests { use crate::tests::check_diagnostics; - // FIXME: spans are broken + #[test] + fn operand_field_span_respected() { + check_diagnostics( + r#" +struct NotCopy; +struct S { + field: NotCopy, +} + +fn f(s: &S) -> S { + S { field: s.field } + //^^^^^^^ error: cannot move `NotCopy` out of reference +} + "#, + ); + } #[test] fn move_by_explicit_deref() { @@ -85,7 +100,7 @@ fn consume(_: X) { fn main() { let a = &X(Y); consume(*a); - //^^^^^^^^^^^ error: cannot move `X` out of reference + //^^ error: cannot move `X` out of reference let a = &X(5); consume(*a); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 0e3c4c7aa3642..5d25f2c6a90fd 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -1,9 +1,9 @@ use hir::db::ExpandDatabase; use ide_db::source_change::SourceChange; use ide_db::text_edit::TextEdit; -use syntax::{ast, AstNode, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, T}; +use syntax::{AstNode, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, T, ast}; -use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix}; // Diagnostic: need-mut // @@ -39,7 +39,7 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Option Some(vec![fix( "add_mut", "Change it to be mutable", - SourceChange::from_text_edit(file_id, edit), + SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), edit), use_range, )]) })(); @@ -82,7 +82,7 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Op Some(vec![fix( "remove_mut", "Remove unnecessary `mut`", - SourceChange::from_text_edit(file_id, edit), + SourceChange::from_text_edit(file_id.file_id(ctx.sema.db), edit), use_range, )]) })(); @@ -1258,7 +1258,7 @@ fn foo(mut foo: Foo) { pub struct A {} pub unsafe fn foo(a: *mut A) { - let mut b = || -> *mut A { &mut *a }; + let mut b = || -> *mut A { unsafe { &mut *a } }; //^^^^^ 💡 warn: variable does not need to be mutable let _ = b(); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs index 0f126a1a6562c..fa3347aa12e62 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -1,13 +1,13 @@ use either::Either; -use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics, VariantId}; +use hir::{HasSource, HirDisplay, Semantics, VariantId, db::ExpandDatabase}; use ide_db::text_edit::TextEdit; -use ide_db::{source_change::SourceChange, EditionedFileId, RootDatabase}; +use ide_db::{EditionedFileId, RootDatabase, source_change::SourceChange}; use syntax::{ - ast::{self, edit::IndentLevel, make}, AstNode, + ast::{self, edit::IndentLevel, make}, }; -use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, fix}; // Diagnostic: no-such-field // @@ -108,7 +108,7 @@ fn missing_record_expr_field_fixes( } let source_change = SourceChange::from_text_edit( - def_file_id, + def_file_id.file_id(sema.db), TextEdit::insert(last_field_syntax.text_range().end(), new_field), ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs index c8e3cff364a9e..6b78645002617 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -1,9 +1,9 @@ -use hir::{db::ExpandDatabase, diagnostics::RemoveTrailingReturn, FileRange}; +use hir::{FileRange, db::ExpandDatabase, diagnostics::RemoveTrailingReturn}; use ide_db::text_edit::TextEdit; use ide_db::{assists::Assist, source_change::SourceChange}; -use syntax::{ast, AstNode}; +use syntax::{AstNode, ast}; -use crate::{adjusted_display_range, fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range, fix}; // Diagnostic: remove-trailing-return // @@ -49,7 +49,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveTrailingReturn) -> Option, d: &RemoveUnnecessaryElse) -> Option foo::Foo for Bar {} "#, ); } + + #[test] + fn twice_fundamental() { + check_diagnostics( + r#" +//- /foo.rs crate:foo +pub trait Trait {} +//- /bar.rs crate:bar deps:foo +struct Foo; +impl foo::Trait for &&Foo {} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index 3db2e013a3978..d5c4bcf768adb 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -1,7 +1,7 @@ -use hir::{db::ExpandDatabase, HasSource, HirDisplay}; +use hir::{HasSource, HirDisplay, db::ExpandDatabase}; use ide_db::text_edit::TextRange; use ide_db::{ - assists::{Assist, AssistId, AssistKind}, + assists::{Assist, AssistId}, label::Label, source_change::SourceChangeBuilder, }; @@ -54,10 +54,12 @@ pub(crate) fn trait_impl_redundant_assoc_item( } }; + let hir::FileRange { file_id, range } = + hir::InFile::new(d.file_id, diagnostic_range).original_node_file_range_rooted(db); Diagnostic::new( DiagnosticCode::RustcHardError("E0407"), format!("{redundant_item_name} is not a member of trait `{trait_name}`"), - hir::InFile::new(d.file_id, diagnostic_range).original_node_file_range_rooted(db), + ide_db::FileRange { file_id: file_id.file_id(ctx.sema.db), range }, ) .with_fixes(quickfix_for_redundant_assoc_item( ctx, @@ -93,11 +95,11 @@ fn quickfix_for_redundant_assoc_item( Some(()) }; let file_id = d.file_id.file_id()?; - let mut source_change_builder = SourceChangeBuilder::new(file_id); + let mut source_change_builder = SourceChangeBuilder::new(file_id.file_id(ctx.sema.db)); add_assoc_item_def(&mut source_change_builder)?; Some(vec![Assist { - id: AssistId("add assoc item def into trait def", AssistKind::QuickFix), + id: AssistId::quick_fix("add assoc item def into trait def"), label: Label::new("Add assoc item def into trait def".to_owned()), group: None, target: range, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs index c726a3bcd3cad..500c5de791dc8 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -1,21 +1,20 @@ use either::Either; -use hir::{db::ExpandDatabase, CallableKind, ClosureStyle, HirDisplay, HirFileIdExt, InFile}; +use hir::{CallableKind, ClosureStyle, HirDisplay, InFile, db::ExpandDatabase}; use ide_db::{ famous_defs::FamousDefs, source_change::{SourceChange, SourceChangeBuilder}, text_edit::TextEdit, }; use syntax::{ + AstNode, AstPtr, TextSize, ast::{ - self, + self, BlockExpr, Expr, ExprStmt, HasArgList, edit::{AstNodeEdit, IndentLevel}, syntax_factory::SyntaxFactory, - BlockExpr, Expr, ExprStmt, HasArgList, }, - AstNode, AstPtr, TextSize, }; -use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range, fix}; // Diagnostic: type-mismatch // @@ -72,11 +71,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option::cast)?; editor = builder.make_editor(parent.syntax()); - let make = SyntaxFactory::new(); + let make = SyntaxFactory::with_mappings(); match parent { Either::Left(ret_expr) => { @@ -261,7 +258,7 @@ fn remove_unnecessary_wrapper( } } - builder.add_file_edits(file_id, editor); + builder.add_file_edits(file_id.file_id(ctx.sema.db), editor); let name = format!("Remove unnecessary {}() wrapper", variant.name(db).as_str()); acc.push(fix( "remove_unnecessary_wrapper", @@ -293,8 +290,10 @@ fn remove_semicolon( let semicolon_range = expr_before_semi.semicolon_token()?.text_range(); let edit = TextEdit::delete(semicolon_range); - let source_change = - SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit); + let source_change = SourceChange::from_text_edit( + expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db), + edit, + ); acc.push(fix("remove_semicolon", "Remove this semicolon", source_change, semicolon_range)); Some(()) @@ -307,10 +306,9 @@ fn str_ref_to_owned( acc: &mut Vec, ) -> Option<()> { let expected = d.expected.display(ctx.sema.db, ctx.display_target); - let actual = d.actual.display(ctx.sema.db, ctx.display_target); - // FIXME do this properly - if expected.to_string() != "String" || actual.to_string() != "&str" { + let is_applicable = d.actual.strip_reference().is_str() && expected.to_string() == "String"; + if !is_applicable { return None; } @@ -321,8 +319,10 @@ fn str_ref_to_owned( let to_owned = ".to_owned()".to_owned(); let edit = TextEdit::insert(expr.syntax().text_range().end(), to_owned); - let source_change = - SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit); + let source_change = SourceChange::from_text_edit( + expr_ptr.file_id.original_file(ctx.sema.db).file_id(ctx.sema.db), + edit, + ); acc.push(fix("str_ref_to_owned", "Add .to_owned() here", source_change, expr_range)); Some(()) @@ -1046,19 +1046,6 @@ fn test() -> String { ); } - #[test] - fn closure_mismatch_show_different_type() { - check_diagnostics( - r#" -fn f() { - let mut x = (|| 1, 2); - x = (|| 3, 4); - //^^^^ error: expected {closure#0}, found {closure#1} -} - "#, - ); - } - #[test] fn type_mismatch_range_adjustment() { cov_mark::check!(type_mismatch_range_adjustment); @@ -1188,7 +1175,7 @@ trait B {} fn test(a: &dyn A) -> &dyn B { a - //^ error: expected &dyn B, found &dyn A + //^ error: expected &(dyn B + 'static), found &(dyn A + 'static) } "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs index c25318eda4855..a933f1b426118 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -1,11 +1,13 @@ +use std::ops::Not; + use hir::{ - db::ExpandDatabase, - term_search::{term_search, TermSearchConfig, TermSearchCtx}, ClosureStyle, HirDisplay, ImportPathConfig, + db::ExpandDatabase, + term_search::{TermSearchConfig, TermSearchCtx, term_search}, }; use ide_db::text_edit::TextEdit; use ide_db::{ - assists::{Assist, AssistId, AssistKind, GroupLabel}, + assists::{Assist, AssistId, GroupLabel}, label::Label, source_change::SourceChange, }; @@ -60,9 +62,13 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option let mut formatter = |_: &hir::Type| String::from("_"); - let assists: Vec = paths + let assists: Vec = d + .expected + .is_unknown() + .not() + .then(|| "todo!()".to_owned()) .into_iter() - .filter_map(|path| { + .chain(paths.into_iter().filter_map(|path| { path.gen_source_code( &scope, &mut formatter, @@ -75,33 +81,27 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option ctx.display_target, ) .ok() - }) + })) .unique() .map(|code| Assist { - id: AssistId("typed-hole", AssistKind::QuickFix), + id: AssistId::quick_fix("typed-hole"), label: Label::new(format!("Replace `_` with `{code}`")), group: Some(GroupLabel("Replace `_` with a term".to_owned())), target: original_range.range, source_change: Some(SourceChange::from_text_edit( - original_range.file_id, + original_range.file_id.file_id(ctx.sema.db), TextEdit::replace(original_range.range, code), )), command: None, }) .collect(); - if !assists.is_empty() { - Some(assists) - } else { - None - } + if !assists.is_empty() { Some(assists) } else { None } } #[cfg(test)] mod tests { - use crate::tests::{ - check_diagnostics, check_fixes_unordered, check_has_fix, check_has_single_fix, - }; + use crate::tests::{check_diagnostics, check_fixes_unordered, check_has_fix}; #[test] fn unknown() { @@ -123,9 +123,9 @@ fn main() { if _ {} //^ 💡 error: invalid `_` expression, expected type `bool` let _: fn() -> i32 = _; - //^ error: invalid `_` expression, expected type `fn() -> i32` + //^ 💡 error: invalid `_` expression, expected type `fn() -> i32` let _: fn() -> () = _; // FIXME: This should trigger an assist because `main` matches via *coercion* - //^ error: invalid `_` expression, expected type `fn()` + //^ 💡 error: invalid `_` expression, expected type `fn()` } "#, ); @@ -151,7 +151,7 @@ fn main() { fn main() { let mut x = t(); x = _; - //^ error: invalid `_` expression, expected type `&str` + //^ 💡 error: invalid `_` expression, expected type `&'static str` x = ""; } fn t() -> T { loop {} } @@ -312,7 +312,7 @@ fn main() { #[test] fn ignore_impl_func_with_incorrect_return() { - check_has_single_fix( + check_fixes_unordered( r#" struct Bar {} trait Foo { @@ -327,7 +327,8 @@ fn main() { let a: i32 = 1; let c: Bar = _$0; }"#, - r#" + vec![ + r#" struct Bar {} trait Foo { type Res; @@ -341,6 +342,21 @@ fn main() { let a: i32 = 1; let c: Bar = Bar { }; }"#, + r#" +struct Bar {} +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for i32 { + type Res = Self; + fn foo(&self) -> Self::Res { 1 } +} +fn main() { + let a: i32 = 1; + let c: Bar = todo!(); +}"#, + ], ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index f3109b9bb73a2..47fa305936263 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -2,20 +2,19 @@ use std::iter; -use hir::{db::DefDatabase, DefMap, InFile, ModuleSource}; +use hir::{DefMap, InFile, ModuleSource, db::DefDatabase}; +use ide_db::base_db::RootQueryDb; use ide_db::text_edit::TextEdit; use ide_db::{ - base_db::{FileLoader, SourceDatabase, SourceRootDatabase}, - source_change::SourceChange, - FileId, FileRange, LineIndexDatabase, + FileId, FileRange, LineIndexDatabase, base_db::SourceDatabase, source_change::SourceChange, }; use paths::Utf8Component; use syntax::{ - ast::{self, edit::IndentLevel, HasModuleItem, HasName}, AstNode, TextRange, + ast::{self, HasModuleItem, HasName, edit::IndentLevel}, }; -use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; +use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity, fix}; // Diagnostic: unlinked-file // @@ -36,7 +35,9 @@ pub(crate) fn unlinked_file( "This file is not included anywhere in the module tree, so rust-analyzer can't offer IDE services." }; - let message = format!("{message}\n\nIf you're intentionally working on unowned files, you can silence this warning by adding \"unlinked-file\" to rust-analyzer.diagnostics.disabled in your settings."); + let message = format!( + "{message}\n\nIf you're intentionally working on unowned files, you can silence this warning by adding \"unlinked-file\" to rust-analyzer.diagnostics.disabled in your settings." + ); let mut unused = true; @@ -48,6 +49,7 @@ pub(crate) fn unlinked_file( // Only show this diagnostic on the first three characters of // the file, to avoid overwhelming the user during startup. range = SourceDatabase::file_text(ctx.sema.db, file_id) + .text(ctx.sema.db) .char_indices() .take(3) .last() @@ -78,7 +80,11 @@ fn fixes( // If there's an existing module that could add `mod` or `pub mod` items to include the unlinked file, // suggest that as a fix. - let source_root = ctx.sema.db.source_root(ctx.sema.db.file_source_root(file_id)); + let db = ctx.sema.db; + + let source_root = ctx.sema.db.file_source_root(file_id).source_root_id(db); + let source_root = ctx.sema.db.source_root(source_root).source_root(db); + let our_path = source_root.path_for_file(&file_id)?; let parent = our_path.parent()?; let (module_name, _) = our_path.name_and_extension()?; @@ -93,12 +99,14 @@ fn fixes( }; // check crate roots, i.e. main.rs, lib.rs, ... - 'crates: for &krate in &*ctx.sema.db.relevant_crates(file_id) { + let relevant_crates = db.relevant_crates(file_id); + 'crates: for &krate in &*relevant_crates { let crate_def_map = ctx.sema.db.crate_def_map(krate); let root_module = &crate_def_map[DefMap::ROOT]; let Some(root_file_id) = root_module.origin.file_id() else { continue }; - let Some(crate_root_path) = source_root.path_for_file(&root_file_id.file_id()) else { + let Some(crate_root_path) = source_root.path_for_file(&root_file_id.file_id(ctx.sema.db)) + else { continue; }; let Some(rel) = parent.strip_prefix(&crate_root_path.parent()?) else { continue }; @@ -124,7 +132,12 @@ fn fixes( let InFile { file_id: parent_file_id, value: source } = current.definition_source(ctx.sema.db); let parent_file_id = parent_file_id.file_id()?; - return make_fixes(parent_file_id.file_id(), source, &module_name, trigger_range); + return make_fixes( + parent_file_id.file_id(ctx.sema.db), + source, + &module_name, + trigger_range, + ); } // if we aren't adding to a crate root, walk backwards such that we support `#[path = ...]` overrides if possible @@ -141,10 +154,12 @@ fn fixes( paths.into_iter().find_map(|path| source_root.file_for_path(&path)) })?; stack.pop(); - 'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { + let relevant_crates = db.relevant_crates(parent_id); + 'crates: for &krate in relevant_crates.iter() { let crate_def_map = ctx.sema.db.crate_def_map(krate); let Some((_, module)) = crate_def_map.modules().find(|(_, module)| { - module.origin.file_id().map(Into::into) == Some(parent_id) && !module.origin.is_inline() + module.origin.file_id().map(|file_id| file_id.file_id(ctx.sema.db)) == Some(parent_id) + && !module.origin.is_inline() }) else { continue; }; @@ -174,7 +189,12 @@ fn fixes( let InFile { file_id: parent_file_id, value: source } = current.definition_source(ctx.sema.db); let parent_file_id = parent_file_id.file_id()?; - return make_fixes(parent_file_id.file_id(), source, &module_name, trigger_range); + return make_fixes( + parent_file_id.file_id(ctx.sema.db), + source, + &module_name, + trigger_range, + ); } } @@ -193,9 +213,11 @@ fn make_fixes( let mod_decl = format!("mod {new_mod_name};"); let pub_mod_decl = format!("pub mod {new_mod_name};"); + let pub_crate_mod_decl = format!("pub(crate) mod {new_mod_name};"); let mut mod_decl_builder = TextEdit::builder(); let mut pub_mod_decl_builder = TextEdit::builder(); + let mut pub_crate_mod_decl_builder = TextEdit::builder(); let mut items = match &source { ModuleSource::SourceFile(it) => it.items(), @@ -224,6 +246,7 @@ fn make_fixes( let indent = IndentLevel::from_node(last.syntax()); mod_decl_builder.insert(offset, format!("\n{indent}{mod_decl}")); pub_mod_decl_builder.insert(offset, format!("\n{indent}{pub_mod_decl}")); + pub_crate_mod_decl_builder.insert(offset, format!("\n{indent}{pub_crate_mod_decl}")); } None => { // Prepend before the first item in the file. @@ -234,6 +257,8 @@ fn make_fixes( let indent = IndentLevel::from_node(first.syntax()); mod_decl_builder.insert(offset, format!("{mod_decl}\n\n{indent}")); pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n{indent}")); + pub_crate_mod_decl_builder + .insert(offset, format!("{pub_crate_mod_decl}\n\n{indent}")); } None => { // No items in the file, so just append at the end. @@ -251,6 +276,8 @@ fn make_fixes( }; mod_decl_builder.insert(offset, format!("{indent}{mod_decl}\n")); pub_mod_decl_builder.insert(offset, format!("{indent}{pub_mod_decl}\n")); + pub_crate_mod_decl_builder + .insert(offset, format!("{indent}{pub_crate_mod_decl}\n")); } } } @@ -269,6 +296,12 @@ fn make_fixes( SourceChange::from_text_edit(parent_file_id, pub_mod_decl_builder.finish()), trigger_range, ), + fix( + "add_pub_crate_mod_declaration", + &format!("Insert `{pub_crate_mod_decl}`"), + SourceChange::from_text_edit(parent_file_id, pub_crate_mod_decl_builder.finish()), + trigger_range, + ), ]) } @@ -296,6 +329,11 @@ fn f() {} r#" pub mod foo; +fn f() {} +"#, + r#" +pub(crate) mod foo; + fn f() {} "#, ], diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 6ab713a5896c9..a4f4813cf5b07 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -1,25 +1,24 @@ use std::iter; use either::Either; -use hir::{db::ExpandDatabase, Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union}; +use hir::{Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union, db::ExpandDatabase}; use ide_db::text_edit::TextEdit; use ide_db::{ - assists::{Assist, AssistId, AssistKind}, + assists::{Assist, AssistId}, helpers::is_editable_crate, label::Label, source_change::{SourceChange, SourceChangeBuilder}, }; use syntax::{ - algo, - ast::{self, edit::IndentLevel, make, FieldList, Name, Visibility}, - AstNode, AstPtr, Direction, SyntaxKind, TextSize, + AstNode, AstPtr, Direction, SyntaxKind, TextSize, algo, + ast::{self, FieldList, Name, Visibility, edit::IndentLevel, make}, }; use syntax::{ - ast::{edit::AstNodeEdit, Type}, SyntaxNode, + ast::{Type, edit::AstNodeEdit}, }; -use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range}; // Diagnostic: unresolved-field // @@ -62,11 +61,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option { // Add a field list to the Unit Struct - let mut src_change_builder = SourceChangeBuilder::new(struct_range.file_id); + let mut src_change_builder = + SourceChangeBuilder::new(struct_range.file_id.file_id(ctx.sema.db)); let field_name = match field_name.chars().next() { // FIXME : See match arm below regarding tuple structs. Some(ch) if ch.is_numeric() => return None, @@ -211,7 +208,7 @@ fn add_field_to_struct_fix( src_change_builder.replace(semi_colon.text_range(), record_field_list.to_string()); Some(Assist { - id: AssistId("convert-unit-struct-to-record-struct", AssistKind::QuickFix), + id: AssistId::quick_fix("convert-unit-struct-to-record-struct"), label: Label::new("Convert Unit Struct to Record Struct and add field".to_owned()), group: None, target: error_range.range, @@ -270,12 +267,12 @@ fn method_fix( let expr = expr_ptr.value.to_node(&root); let FileRange { range, file_id } = ctx.sema.original_range_opt(expr.syntax())?; Some(Assist { - id: AssistId("expected-field-found-method-call-fix", AssistKind::QuickFix), + id: AssistId::quick_fix("expected-field-found-method-call-fix"), label: Label::new("Use parentheses to call the method".to_owned()), group: None, target: range, source_change: Some(SourceChange::from_text_edit( - file_id, + file_id.file_id(ctx.sema.db), TextEdit::insert(range.end(), "()".to_owned()), )), command: None, @@ -285,11 +282,11 @@ fn method_fix( mod tests { use crate::{ + DiagnosticsConfig, tests::{ check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled, check_fix, check_no_fix, }, - DiagnosticsConfig, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 35e7521af7061..7f07009dc5616 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -1,16 +1,17 @@ -use hir::{db::ExpandDatabase, FileRange, HirDisplay, InFile}; +use hir::{FileRange, HirDisplay, InFile, db::ExpandDatabase}; use ide_db::text_edit::TextEdit; use ide_db::{ - assists::{Assist, AssistId, AssistKind}, + assists::{Assist, AssistId}, label::Label, source_change::SourceChange, }; use syntax::{ - ast::{self, make, HasArgList}, - format_smolstr, AstNode, SmolStr, TextRange, ToSmolStr, + AstNode, SmolStr, TextRange, ToSmolStr, + ast::{self, HasArgList, make}, + format_smolstr, }; -use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_range}; // Diagnostic: unresolved-method // @@ -67,11 +68,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option< fixes.push(assoc_func_fix); } - if fixes.is_empty() { - None - } else { - Some(fixes) - } + if fixes.is_empty() { None } else { Some(fixes) } } fn field_fix( @@ -99,13 +96,13 @@ fn field_fix( _ => return None, }; Some(Assist { - id: AssistId("expected-method-found-field-fix", AssistKind::QuickFix), + id: AssistId::quick_fix("expected-method-found-field-fix"), label: Label::new("Use parentheses to call the value of the field".to_owned()), group: None, target: range, source_change: Some(SourceChange::from_iter([ - (file_id.into(), TextEdit::insert(range.start(), "(".to_owned())), - (file_id.into(), TextEdit::insert(range.end(), ")".to_owned())), + (file_id.file_id(ctx.sema.db), TextEdit::insert(range.start(), "(".to_owned())), + (file_id.file_id(ctx.sema.db), TextEdit::insert(range.end(), ")".to_owned())), ])), command: None, }) @@ -178,14 +175,14 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) - let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id; Some(Assist { - id: AssistId("method_call_to_assoc_func_call_fix", AssistKind::QuickFix), + id: AssistId::quick_fix("method_call_to_assoc_func_call_fix"), label: Label::new(format!( "Use associated func call instead: `{assoc_func_call_expr_string}`" )), group: None, target: range, source_change: Some(SourceChange::from_text_edit( - file_id, + file_id.file_id(ctx.sema.db), TextEdit::replace(range, assoc_func_call_expr_string), )), command: None, @@ -272,7 +269,7 @@ impl A { } fn main() { let a = A {a: 0, b: ""}; - A::::foo(); + A::::foo(); } "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs index 2bd8e484f8537..599cabe3e4f20 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -1,9 +1,9 @@ -use hir::{db::ExpandDatabase, HirFileIdExt}; +use hir::db::ExpandDatabase; use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit}; use itertools::Itertools; use syntax::AstNode; -use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix}; // Diagnostic: unresolved-module // @@ -43,7 +43,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option, file_id: EditionedFileId, node: &SyntaxNode, @@ -38,13 +40,13 @@ pub(crate) fn useless_braces( Diagnostic::new( DiagnosticCode::RustcLint("unused_braces"), "Unnecessary braces in use statement".to_owned(), - FileRange { file_id: file_id.into(), range: use_range }, + FileRange { file_id: file_id.file_id(db), range: use_range }, ) .with_main_node(InFile::new(file_id.into(), SyntaxNodePtr::new(node))) .with_fixes(Some(vec![fix( "remove_braces", "Remove unnecessary braces", - SourceChange::from_text_edit(file_id, edit), + SourceChange::from_text_edit(file_id.file_id(db), edit), use_range, )])), ); @@ -56,8 +58,8 @@ pub(crate) fn useless_braces( #[cfg(test)] mod tests { use crate::{ - tests::{check_diagnostics, check_diagnostics_with_config, check_fix}, DiagnosticsConfig, + tests::{check_diagnostics, check_diagnostics_with_config, check_fix}, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index e15d349578914..607721d611d7d 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -25,18 +25,23 @@ mod handlers { pub(crate) mod await_outside_of_async; + pub(crate) mod bad_rtn; pub(crate) mod break_outside_of_loop; + pub(crate) mod elided_lifetimes_in_path; pub(crate) mod expected_function; pub(crate) mod generic_args_prohibited; pub(crate) mod inactive_code; pub(crate) mod incoherent_impl; pub(crate) mod incorrect_case; + pub(crate) mod incorrect_generics_len; + pub(crate) mod incorrect_generics_order; pub(crate) mod invalid_cast; pub(crate) mod invalid_derive_target; pub(crate) mod macro_error; pub(crate) mod malformed_derive; pub(crate) mod mismatched_arg_count; pub(crate) mod missing_fields; + pub(crate) mod missing_lifetime; pub(crate) mod missing_match_arms; pub(crate) mod missing_unsafe; pub(crate) mod moved_out_of_ref; @@ -82,23 +87,23 @@ use std::{collections::hash_map, iter, sync::LazyLock}; use either::Either; use hir::{ - db::ExpandDatabase, diagnostics::AnyDiagnostic, Crate, DisplayTarget, HirFileId, InFile, - Semantics, + Crate, DisplayTarget, HirFileId, InFile, Semantics, db::ExpandDatabase, + diagnostics::AnyDiagnostic, }; use ide_db::{ - assists::{Assist, AssistId, AssistKind, AssistResolveStrategy}, - base_db::{ReleaseChannel, SourceDatabase}, - generated::lints::{Lint, LintGroup, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, DEFAULT_LINT_GROUPS}, + EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap, + assists::{Assist, AssistId, AssistResolveStrategy, ExprFillDefaultMode}, + base_db::{ReleaseChannel, RootQueryDb as _}, + generated::lints::{CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS, DEFAULT_LINTS, Lint, LintGroup}, imports::insert_use::InsertUseConfig, label::Label, source_change::SourceChange, syntax_helpers::node_ext::parse_tt_as_comma_sep_paths, - EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap, }; use itertools::Itertools; use syntax::{ + AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, T, TextRange, ast::{self, AstNode, HasAttrs}, - AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, TextRange, T, }; // FIXME: Make this an enum @@ -127,7 +132,7 @@ impl DiagnosticCode { format!("https://rust-lang.github.io/rust-clippy/master/#/{e}") } DiagnosticCode::Ra(e, _) => { - format!("https://rust-analyzer.github.io/manual.html#{e}") + format!("https://rust-analyzer.github.io/book/diagnostics.html#{e}") } } } @@ -214,17 +219,6 @@ impl Diagnostic { } } -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum ExprFillDefaultMode { - Todo, - Default, -} -impl Default for ExprFillDefaultMode { - fn default() -> Self { - Self::Todo - } -} - #[derive(Debug, Clone)] pub struct DiagnosticsConfig { /// Whether native diagnostics are enabled. @@ -301,8 +295,11 @@ impl DiagnosticsContext<'_> { } } })() + .map(|frange| ide_db::FileRange { + file_id: frange.file_id.file_id(self.sema.db), + range: frange.range, + }) .unwrap_or_else(|| sema.diagnostics_display_range(*node)) - .into() } } @@ -319,13 +316,14 @@ pub fn syntax_diagnostics( } let sema = Semantics::new(db); - let file_id = sema + let editioned_file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + + let (file_id, _) = editioned_file_id.unpack(db); // [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. - db.parse_errors(file_id) - .as_deref() + db.parse_errors(editioned_file_id) .into_iter() .flatten() .take(128) @@ -333,7 +331,7 @@ pub fn syntax_diagnostics( Diagnostic::new( DiagnosticCode::SyntaxError, format!("Syntax Error: {err}"), - FileRange { file_id: file_id.into(), range: err.range() }, + FileRange { file_id, range: err.range() }, ) }) .collect() @@ -349,26 +347,28 @@ pub fn semantic_diagnostics( ) -> Vec { let _p = tracing::info_span!("semantic_diagnostics").entered(); let sema = Semantics::new(db); - let file_id = sema + let editioned_file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + + let (file_id, edition) = editioned_file_id.unpack(db); let mut res = Vec::new(); - let parse = sema.parse(file_id); + let parse = sema.parse(editioned_file_id); // FIXME: This iterates the entire file which is a rather expensive operation. // We should implement these differently in some form? // Salsa caching + incremental re-parse would be better here for node in parse.syntax().descendants() { - handlers::useless_braces::useless_braces(&mut res, file_id, &node); - handlers::field_shorthand::field_shorthand(&mut res, file_id, &node); + handlers::useless_braces::useless_braces(db, &mut res, editioned_file_id, &node); + handlers::field_shorthand::field_shorthand(db, &mut res, editioned_file_id, &node); handlers::json_is_not_rust::json_in_items( &sema, &mut res, - file_id, + editioned_file_id, &node, config, - file_id.edition(), + edition, ); } @@ -378,29 +378,32 @@ pub fn semantic_diagnostics( module.and_then(|m| db.toolchain_channel(m.krate().into())), Some(ReleaseChannel::Nightly) | None ); - let krate = module.map(|module| module.krate()).unwrap_or_else(|| { - (*db.crate_graph().crates_in_topological_order().last().unwrap()).into() - }); - let display_target = krate.to_display_target(db); - let ctx = DiagnosticsContext { - config, - sema, - resolve, - edition: file_id.edition(), - is_nightly, - display_target, + + let krate = match module { + Some(module) => module.krate(), + None => { + match db.all_crates().last() { + Some(last) => (*last).into(), + // short-circuit, return an empty vec of diagnostics + None => return vec![], + } + } }; + let display_target = krate.to_display_target(db); + let ctx = DiagnosticsContext { config, sema, resolve, edition, is_nightly, display_target }; let mut diags = Vec::new(); match module { // A bunch of parse errors in a file indicate some bigger structural parse changes in the // file, so we skip semantic diagnostics so we can show these faster. Some(m) => { - if db.parse_errors(file_id).as_deref().is_none_or(|es| es.len() < 16) { + if db.parse_errors(editioned_file_id).is_none_or(|es| es.len() < 16) { m.diagnostics(db, &mut diags, config.style_lints); } } - None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id.file_id()), + None => { + handlers::unlinked_file::unlinked_file(&ctx, &mut res, editioned_file_id.file_id(db)) + } } for diag in diags { @@ -488,6 +491,11 @@ pub fn semantic_diagnostics( AnyDiagnostic::ParenthesizedGenericArgsWithoutFnTrait(d) => { handlers::parenthesized_generic_args_without_fn_trait::parenthesized_generic_args_without_fn_trait(&ctx, &d) } + AnyDiagnostic::BadRtn(d) => handlers::bad_rtn::bad_rtn(&ctx, &d), + AnyDiagnostic::IncorrectGenericsLen(d) => handlers::incorrect_generics_len::incorrect_generics_len(&ctx, &d), + AnyDiagnostic::IncorrectGenericsOrder(d) => handlers::incorrect_generics_order::incorrect_generics_order(&ctx, &d), + AnyDiagnostic::MissingLifetime(d) => handlers::missing_lifetime::missing_lifetime(&ctx, &d), + AnyDiagnostic::ElidedLifetimesInPath(d) => handlers::elided_lifetimes_in_path::elided_lifetimes_in_path(&ctx, &d), }; res.push(d) } @@ -517,7 +525,7 @@ pub fn semantic_diagnostics( &mut FxHashMap::default(), &mut lints, &mut Vec::new(), - file_id.edition(), + editioned_file_id.edition(db), ); res.retain(|d| d.severity != Severity::Allow); @@ -559,9 +567,8 @@ fn handle_diag_from_macros( let span_map = sema.db.expansion_span_map(macro_file); let mut spans = span_map.spans_for_range(node.text_range()); if spans.any(|span| { - sema.db.lookup_intern_syntax_context(span.ctx).outer_expn.is_some_and(|expansion| { - let macro_call = - sema.db.lookup_intern_macro_call(expansion.as_macro_file().macro_call_id); + span.ctx.outer_expn(sema.db).is_some_and(|expansion| { + let macro_call = sema.db.lookup_intern_macro_call(expansion.into()); // We don't want to show diagnostics for non-local macros at all, but proc macros authors // seem to rely on being able to emit non-warning-free code, so we don't want to show warnings // for them even when the proc macro comes from the same workspace (in rustc that's not a @@ -767,9 +774,9 @@ fn fill_lint_attrs( } }); - let all_matching_groups = lint_groups(&diag.code, edition) - .iter() - .filter_map(|lint_group| cached.get(lint_group)); + let lints = lint_groups(&diag.code, edition); + let all_matching_groups = + lints.iter().filter_map(|lint_group| cached.get(lint_group)); let cached_severity = all_matching_groups.min_by_key(|it| it.depth).map(|it| it.severity); @@ -977,7 +984,7 @@ fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextR fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist { assert!(!id.contains(' ')); Assist { - id: AssistId(id, AssistKind::QuickFix), + id: AssistId::quick_fix(id), label: Label::new(label.to_owned()), group: None, target, @@ -993,8 +1000,8 @@ fn adjusted_display_range( ) -> FileRange { let source_file = ctx.sema.parse_or_expand(diag_ptr.file_id); let node = diag_ptr.value.to_node(&source_file); - diag_ptr + let hir::FileRange { file_id, range } = diag_ptr .with_value(adj(node).unwrap_or_else(|| diag_ptr.value.text_range())) - .original_node_file_range_rooted(ctx.sema.db) - .into() + .original_node_file_range_rooted(ctx.sema.db); + ide_db::FileRange { file_id: file_id.file_id(ctx.sema.db), range } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index fc2a7db7174e9..4e4bd47e1c2f2 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -3,14 +3,16 @@ mod overly_long_real_world_cases; use ide_db::{ - assists::AssistResolveStrategy, base_db::SourceDatabase, LineIndexDatabase, RootDatabase, + LineIndexDatabase, RootDatabase, + assists::{AssistResolveStrategy, ExprFillDefaultMode}, + base_db::SourceDatabase, }; use itertools::Itertools; use stdx::trim_indent; use test_fixture::WithFixture; -use test_utils::{assert_eq_text, extract_annotations, MiniCore}; +use test_utils::{MiniCore, assert_eq_text, extract_annotations}; -use crate::{DiagnosticsConfig, ExprFillDefaultMode, Severity}; +use crate::{DiagnosticsConfig, Severity}; /// Takes a multi-file input fixture with annotated cursor positions, /// and checks that: @@ -75,7 +77,7 @@ fn check_nth_fix_with_config( &db, &config, &AssistResolveStrategy::All, - file_position.file_id.into(), + file_position.file_id.file_id(&db), ) .pop() .expect("no diagnostics"); @@ -85,7 +87,7 @@ fn check_nth_fix_with_config( let actual = { let source_change = fix.source_change.as_ref().unwrap(); let file_id = *source_change.source_file_edits.keys().next().unwrap(); - let mut actual = db.file_text(file_id).to_string(); + let mut actual = db.file_text(file_id).text(&db).to_string(); for (edit, snippet_edit) in source_change.source_file_edits.values() { edit.apply(&mut actual); @@ -128,7 +130,7 @@ pub(crate) fn check_has_fix( &db, &conf, &AssistResolveStrategy::All, - file_position.file_id.into(), + file_position.file_id.file_id(&db), ) .into_iter() .find(|d| { @@ -142,7 +144,7 @@ pub(crate) fn check_has_fix( let actual = { let source_change = fix.source_change.as_ref().unwrap(); let file_id = *source_change.source_file_edits.keys().next().unwrap(); - let mut actual = db.file_text(file_id).to_string(); + let mut actual = db.file_text(file_id).text(&db).to_string(); for (edit, snippet_edit) in source_change.source_file_edits.values() { edit.apply(&mut actual); @@ -160,55 +162,6 @@ pub(crate) fn check_has_fix( assert!(fix.is_some(), "no diagnostic with desired fix"); } -#[track_caller] -pub(crate) fn check_has_single_fix( - #[rust_analyzer::rust_fixture] ra_fixture_before: &str, - #[rust_analyzer::rust_fixture] ra_fixture_after: &str, -) { - let after = trim_indent(ra_fixture_after); - - let (db, file_position) = RootDatabase::with_position(ra_fixture_before); - let mut conf = DiagnosticsConfig::test_sample(); - conf.expr_fill_default = ExprFillDefaultMode::Default; - let mut n_fixes = 0; - let fix = super::full_diagnostics( - &db, - &conf, - &AssistResolveStrategy::All, - file_position.file_id.into(), - ) - .into_iter() - .find(|d| { - d.fixes - .as_ref() - .and_then(|fixes| { - n_fixes += fixes.len(); - fixes.iter().find(|fix| { - if !fix.target.contains_inclusive(file_position.offset) { - return false; - } - let actual = { - let source_change = fix.source_change.as_ref().unwrap(); - let file_id = *source_change.source_file_edits.keys().next().unwrap(); - let mut actual = db.file_text(file_id).to_string(); - - for (edit, snippet_edit) in source_change.source_file_edits.values() { - edit.apply(&mut actual); - if let Some(snippet_edit) = snippet_edit { - snippet_edit.apply(&mut actual); - } - } - actual - }; - after == actual - }) - }) - .is_some() - }); - assert!(fix.is_some(), "no diagnostic with desired fix"); - assert!(n_fixes == 1, "Too many fixes suggested"); -} - /// Checks that there's a diagnostic *without* fix at `$0`. pub(crate) fn check_no_fix(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture); @@ -216,7 +169,7 @@ pub(crate) fn check_no_fix(#[rust_analyzer::rust_fixture] ra_fixture: &str) { &db, &DiagnosticsConfig::test_sample(), &AssistResolveStrategy::All, - file_position.file_id.into(), + file_position.file_id.file_id(&db), ) .pop() .unwrap(); @@ -250,7 +203,7 @@ pub(crate) fn check_diagnostics_with_config( .iter() .copied() .flat_map(|file_id| { - super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into()) + super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.file_id(&db)) .into_iter() .map(|d| { let mut annotation = String::new(); @@ -272,12 +225,13 @@ pub(crate) fn check_diagnostics_with_config( .map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation))) .into_group_map(); for file_id in files { - let file_id = file_id.into(); + let file_id = file_id.file_id(&db); let line_index = db.line_index(file_id); let mut actual = annotations.remove(&file_id).unwrap_or_default(); - let expected = extract_annotations(&db.file_text(file_id)); - actual.sort_by_key(|(range, _)| range.start()); + let mut expected = extract_annotations(&db.file_text(file_id).text(&db)); + expected.sort_by_key(|(range, s)| (range.start(), s.clone())); + actual.sort_by_key(|(range, s)| (range.start(), s.clone())); // FIXME: We should panic on duplicates instead, but includes currently cause us to report // diagnostics twice for the calling module when both files are queried. actual.dedup(); @@ -289,7 +243,7 @@ pub(crate) fn check_diagnostics_with_config( for (e, _) in &actual { eprintln!( "Code in range {e:?} = {}", - &db.file_text(file_id)[usize::from(e.start())..usize::from(e.end())] + &db.file_text(file_id).text(&db)[usize::from(e.start())..usize::from(e.end())] ) } } @@ -316,7 +270,7 @@ fn test_disabled_diagnostics() { config.disabled.insert("E0583".into()); let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#); - let file_id = file_id.into(); + let file_id = file_id.file_id(&db); let diagnostics = super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); assert!(diagnostics.is_empty()); diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml index fa75e5a421476..1212fa9f9c65f 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml @@ -12,20 +12,18 @@ rust-version.workspace = true [lib] [dependencies] -cov-mark = "2.0.0-pre.1" +cov-mark = "2.0.0" itertools.workspace = true -triomphe.workspace = true -nohash-hasher.workspace = true # local deps hir.workspace = true ide-db.workspace = true parser.workspace = true -stdx.workspace = true syntax.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.5.1" +triomphe.workspace = true # local deps test-utils.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs index ca937a03f82d2..8d6b7c637d730 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/fragments.rs @@ -6,7 +6,7 @@ //! needs to determine it somehow. We do this in a stupid way -- by pasting SSR //! rule into different contexts and checking what works. -use syntax::{ast, AstNode, SyntaxNode}; +use syntax::{AstNode, SyntaxNode, ast}; pub(crate) fn ty(s: &str) -> Result { fragment::("type T = {};", s) diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs index a14e69030e325..181cc74a51d4f 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs @@ -1,10 +1,10 @@ //! This module allows building an SSR MatchFinder by parsing the SSR rule //! from a comment. -use ide_db::{base_db::SourceDatabase, EditionedFileId, FilePosition, FileRange, RootDatabase}; +use ide_db::{EditionedFileId, FilePosition, FileRange, RootDatabase, base_db::RootQueryDb}; use syntax::{ - ast::{self, AstNode, AstToken}, TextRange, + ast::{self, AstNode, AstToken}, }; use crate::MatchFinder; @@ -17,7 +17,9 @@ pub fn ssr_from_comment( frange: FileRange, ) -> Option<(MatchFinder<'_>, TextRange)> { let comment = { - let file = db.parse(EditionedFileId::current_edition(frange.file_id)); + let file_id = EditionedFileId::current_edition(db, frange.file_id); + + let file = db.parse(file_id); file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast) }?; let comment_text_without_prefix = comment.text().strip_prefix(comment.prefix()).unwrap(); diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index 889258c94c535..339c199ec29ac 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -80,10 +80,11 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc use crate::{errors::bail, matching::MatchFailureReason}; use hir::{FileRange, Semantics}; +use ide_db::symbol_index::SymbolsDatabase; use ide_db::text_edit::TextEdit; -use ide_db::{base_db::SourceDatabase, EditionedFileId, FileId, FxHashMap, RootDatabase}; +use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase, base_db::SourceDatabase}; use resolving::ResolvedRule; -use syntax::{ast, AstNode, SyntaxNode, TextRange}; +use syntax::{AstNode, SyntaxNode, TextRange, ast}; // A structured search replace rule. Create by calling `parse` on a str. #[derive(Debug)] @@ -126,7 +127,7 @@ impl<'db> MatchFinder<'db> { let sema = Semantics::new(db); let file_id = sema .attach_first_edition(lookup_context.file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(lookup_context.file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, lookup_context.file_id)); let resolution_scope = resolving::ResolutionScope::new( &sema, hir::FilePosition { file_id, offset: lookup_context.offset }, @@ -137,10 +138,11 @@ impl<'db> MatchFinder<'db> { /// Constructs an instance using the start of the first file in `db` as the lookup context. pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result, SsrError> { - use ide_db::base_db::SourceRootDatabase; - use ide_db::symbol_index::SymbolsDatabase; - if let Some(first_file_id) = - db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next()) + if let Some(first_file_id) = db + .local_roots() + .iter() + .next() + .and_then(|root| db.source_root(*root).source_root(db).iter().next()) { MatchFinder::in_context( db, @@ -171,7 +173,7 @@ impl<'db> MatchFinder<'db> { let mut matches_by_file = FxHashMap::default(); for m in self.matches().matches { matches_by_file - .entry(m.range.file_id.file_id()) + .entry(m.range.file_id.file_id(self.sema.db)) .or_insert_with(SsrMatches::default) .matches .push(m); @@ -184,7 +186,7 @@ impl<'db> MatchFinder<'db> { replacing::matches_to_edit( self.sema.db, &matches, - &self.sema.db.file_text(file_id), + &self.sema.db.file_text(file_id).text(self.sema.db), &self.rules, ), ) @@ -225,7 +227,7 @@ impl<'db> MatchFinder<'db> { ) -> Vec { let file = self.sema.parse(file_id); let mut res = Vec::new(); - let file_text = self.sema.db.file_text(file_id.into()); + let file_text = self.sema.db.file_text(file_id.file_id(self.sema.db)).text(self.sema.db); let mut remaining_text = &*file_text; let mut base = 0; let len = snippet.len() as u32; diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs index e219ba4bf6398..cff4eede04269 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs @@ -2,16 +2,16 @@ //! process of matching, placeholder values are recorded. use crate::{ + SsrMatches, parsing::{Constraint, NodeKind, Placeholder, Var}, resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo}, - SsrMatches, }; use hir::{FileRange, ImportPathConfig, Semantics}; -use ide_db::{base_db::SourceDatabase, FxHashMap}; +use ide_db::{FxHashMap, base_db::RootQueryDb}; use std::{cell::Cell, iter::Peekable}; use syntax::{ - ast::{self, AstNode, AstToken, HasGenericArgs}, SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken, + ast::{self, AstNode, AstToken, HasGenericArgs}, }; // Creates a match error. If we're currently attempting to match some code that we thought we were @@ -627,11 +627,10 @@ impl<'db, 'sema> Matcher<'db, 'sema> { })? .original; let krate = self.sema.scope(expr.syntax()).map(|it| it.krate()).unwrap_or_else(|| { - hir::Crate::from( - *self.sema.db.crate_graph().crates_in_topological_order().last().unwrap(), - ) + hir::Crate::from(*self.sema.db.all_crates().last().expect("no crate graph present")) }); - let res = code_type + + code_type .autoderef(self.sema.db) .enumerate() .find(|(_, deref_code_type)| pattern_type == deref_code_type) @@ -644,8 +643,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> { pattern_type.display(self.sema.db, display_target), code_type.display(self.sema.db, display_target) ) - }); - res + }) } fn get_placeholder_for_node(&self, node: &SyntaxNode) -> Option<&Placeholder> { @@ -808,10 +806,20 @@ mod tests { let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }"; let (db, position, selections) = crate::tests::single_file(input); + let position = ide_db::FilePosition { + file_id: position.file_id.file_id(&db), + offset: position.offset, + }; let mut match_finder = MatchFinder::in_context( &db, - position.into(), - selections.into_iter().map(Into::into).collect(), + position, + selections + .into_iter() + .map(|frange| ide_db::FileRange { + file_id: frange.file_id.file_id(&db), + range: frange.range, + }) + .collect(), ) .unwrap(); match_finder.add_rule(rule).unwrap(); @@ -822,7 +830,7 @@ mod tests { let edits = match_finder.edits(); assert_eq!(edits.len(), 1); - let edit = &edits[&position.file_id.into()]; + let edit = &edits[&position.file_id]; let mut after = input.to_owned(); edit.apply(&mut after); assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }"); diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs index ea40d5b815ef3..2c0f1658d837f 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs @@ -9,7 +9,7 @@ use std::{fmt::Display, str::FromStr}; use syntax::{SmolStr, SyntaxKind, SyntaxNode, T}; use crate::errors::bail; -use crate::{fragments, SsrError, SsrPattern, SsrRule}; +use crate::{SsrError, SsrPattern, SsrRule, fragments}; #[derive(Debug)] pub(crate) struct ParsedRule { diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs index 11c1615a560eb..3c92697926f3d 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs @@ -5,11 +5,11 @@ use ide_db::{FxHashMap, FxHashSet}; use itertools::Itertools; use parser::Edition; use syntax::{ - ast::{self, AstNode, AstToken}, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize, + ast::{self, AstNode, AstToken}, }; -use crate::{fragments, resolving::ResolvedRule, Match, SsrMatches}; +use crate::{Match, SsrMatches, fragments, resolving::ResolvedRule}; /// Returns a text edit that will replace each match in `matches` with its corresponding replacement /// template. Placeholders in the template will have been substituted with whatever they matched to @@ -34,7 +34,7 @@ fn matches_to_edit_at_offset( for m in &matches.matches { edit_builder.replace( m.range.range.checked_sub(relative_start).unwrap(), - render_replace(db, m, file_src, rules, m.range.file_id.edition()), + render_replace(db, m, file_src, rules, m.range.file_id.edition(db)), ); } edit_builder.finish() diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs index 270ee0b3ec967..a687db4bf58d6 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs @@ -4,11 +4,11 @@ use hir::AsAssocItem; use ide_db::FxHashMap; use parsing::Placeholder; use syntax::{ - ast::{self, HasGenericArgs}, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, + ast::{self, HasGenericArgs}, }; -use crate::{errors::error, parsing, SsrError}; +use crate::{SsrError, errors::error, parsing}; pub(crate) struct ResolutionScope<'db> { scope: hir::SemanticsScope<'db>, diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs index b1cade39266a0..d89911fca403d 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs @@ -1,17 +1,16 @@ //! Searching for matches. use crate::{ - matching, + Match, MatchFinder, matching, resolving::{ResolvedPath, ResolvedPattern, ResolvedRule}, - Match, MatchFinder, }; use hir::FileRange; use ide_db::{ + EditionedFileId, FileId, FxHashSet, defs::Definition, search::{SearchScope, UsageSearchResult}, - EditionedFileId, FileId, FxHashSet, }; -use syntax::{ast, AstNode, SyntaxKind, SyntaxNode}; +use syntax::{AstNode, SyntaxKind, SyntaxNode, ast}; /// A cache for the results of find_usages. This is for when we have multiple patterns that have the /// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type @@ -139,7 +138,7 @@ impl MatchFinder<'_> { files.push( self.sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(file_id)), + .unwrap_or_else(|| EditionedFileId::current_edition(self.sema.db, file_id)), ); }); SearchScope::files(&files) @@ -156,10 +155,10 @@ impl MatchFinder<'_> { fn search_files_do(&self, mut callback: impl FnMut(FileId)) { if self.restrict_ranges.is_empty() { // Unrestricted search. - use ide_db::base_db::SourceRootDatabase; + use ide_db::base_db::SourceDatabase; use ide_db::symbol_index::SymbolsDatabase; for &root in self.sema.db.local_roots().iter() { - let sr = self.sema.db.source_root(root); + let sr = self.sema.db.source_root(root).source_root(self.sema.db); for file_id in sr.iter() { callback(file_id); } @@ -230,7 +229,9 @@ impl MatchFinder<'_> { } let Some(node_range) = self.sema.original_range_opt(code) else { return false }; for range in &self.restrict_ranges { - if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) { + if range.file_id == node_range.file_id.file_id(self.sema.db) + && range.range.contains_range(node_range.range) + { return true; } } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs index d783e1952526c..46b633b8a3250 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs @@ -1,8 +1,8 @@ -use expect_test::{expect, Expect}; +use expect_test::{Expect, expect}; use hir::{FilePosition, FileRange}; use ide_db::{ - base_db::{ra_salsa::Durability, SourceDatabase}, EditionedFileId, FxHashSet, + base_db::{SourceDatabase, salsa::Durability}, }; use test_utils::RangeOrOffset; use triomphe::Arc; @@ -67,7 +67,7 @@ fn parser_undefined_placeholder_in_replacement() { /// the start of the file. If there's a second cursor marker, then we'll return a single range. pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec) { use ide_db::symbol_index::SymbolsDatabase; - use test_fixture::{WithFixture, WORKSPACE}; + use test_fixture::{WORKSPACE, WithFixture}; let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) { ide_db::RootDatabase::with_range_or_offset(code) } else { @@ -98,10 +98,18 @@ fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) { fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { let (db, position, selections) = single_file(input); + let position = + ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset }; let mut match_finder = MatchFinder::in_context( &db, - position.into(), - selections.into_iter().map(Into::into).collect(), + position, + selections + .into_iter() + .map(|selection| ide_db::FileRange { + file_id: selection.file_id.file_id(&db), + range: selection.range, + }) + .collect(), ) .unwrap(); for rule in rules { @@ -114,8 +122,8 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) { } // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters // stuff. - let mut actual = db.file_text(position.file_id.into()).to_string(); - edits[&position.file_id.into()].apply(&mut actual); + let mut actual = db.file_text(position.file_id).text(&db).to_string(); + edits[&position.file_id].apply(&mut actual); expected.assert_eq(&actual); } @@ -136,8 +144,14 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { let (db, position, selections) = single_file(code); let mut match_finder = MatchFinder::in_context( &db, - position.into(), - selections.into_iter().map(Into::into).collect(), + ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset }, + selections + .into_iter() + .map(|selection| ide_db::FileRange { + file_id: selection.file_id.file_id(&db), + range: selection.range, + }) + .collect(), ) .unwrap(); match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); @@ -153,8 +167,14 @@ fn assert_no_match(pattern: &str, code: &str) { let (db, position, selections) = single_file(code); let mut match_finder = MatchFinder::in_context( &db, - position.into(), - selections.into_iter().map(Into::into).collect(), + ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset }, + selections + .into_iter() + .map(|selection| ide_db::FileRange { + file_id: selection.file_id.file_id(&db), + range: selection.range, + }) + .collect(), ) .unwrap(); match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); @@ -169,8 +189,14 @@ fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expecte let (db, position, selections) = single_file(code); let mut match_finder = MatchFinder::in_context( &db, - position.into(), - selections.into_iter().map(Into::into).collect(), + ide_db::FilePosition { file_id: position.file_id.file_id(&db), offset: position.offset }, + selections + .into_iter() + .map(|selection| ide_db::FileRange { + file_id: selection.file_id.file_id(&db), + range: selection.range, + }) + .collect(), ) .unwrap(); match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap(); diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml index 9af56c40e982e..1d19daf2f5aa9 100644 --- a/src/tools/rust-analyzer/crates/ide/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true [lib] [dependencies] -cov-mark = "2.0.0-pre.1" +cov-mark = "2.0.0" arrayvec.workspace = true either.workspace = true itertools.workspace = true @@ -25,7 +25,7 @@ dot.workspace = true smallvec.workspace = true triomphe.workspace = true nohash-hasher.workspace = true -rustc_apfloat = "0.2.0" +rustc_apfloat = "0.2.2" # local deps cfg.workspace = true @@ -46,7 +46,7 @@ hir.workspace = true toolchain.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.5.1" # local deps test-utils.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs index e47891bbdfe7e..3d71da985b24b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs +++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs @@ -1,17 +1,17 @@ use hir::{HasSource, InFile, InRealFile, Semantics}; use ide_db::{ - defs::Definition, helpers::visit_file_defs, FileId, FilePosition, FileRange, FxIndexSet, - RootDatabase, + FileId, FilePosition, FileRange, FxIndexSet, RootDatabase, defs::Definition, + helpers::visit_file_defs, }; use itertools::Itertools; -use syntax::{ast::HasName, AstNode, TextRange}; +use syntax::{AstNode, TextRange, ast::HasName}; use crate::{ + NavigationTarget, RunnableKind, annotations::fn_references::find_all_methods, goto_implementation::goto_implementation, references::find_all_refs, - runnables::{runnables, Runnable}, - NavigationTarget, RunnableKind, + runnables::{Runnable, runnables}, }; mod fn_references; @@ -149,7 +149,7 @@ pub(crate) fn annotations( source_file_id: FileId, ) -> Option<(TextRange, Option)> { if let Some(InRealFile { file_id, value }) = node.original_ast_node_rooted(db) { - if file_id == source_file_id { + if file_id.file_id(db) == source_file_id { return Some(( value.syntax().text_range(), value.name().map(|name| name.syntax().text_range()), @@ -209,9 +209,9 @@ fn should_skip_runnable(kind: &RunnableKind, binary_target: bool) -> bool { #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; - use crate::{fixture, Annotation, AnnotationConfig}; + use crate::{Annotation, AnnotationConfig, fixture}; use super::AnnotationLocation; diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs b/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs index 08cc10509cb8a..427a2eff82017 100644 --- a/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/annotations/fn_references.rs @@ -4,7 +4,7 @@ use hir::Semantics; use ide_assists::utils::test_related_attribute_syn; use ide_db::RootDatabase; -use syntax::{ast, ast::HasName, AstNode, SyntaxNode, TextRange}; +use syntax::{AstNode, SyntaxNode, TextRange, ast, ast::HasName}; use crate::FileId; @@ -34,8 +34,8 @@ fn method_range(item: SyntaxNode) -> Option<(TextRange, Option)> { mod tests { use syntax::TextRange; - use crate::fixture; use crate::TextSize; + use crate::fixture; use std::ops::RangeInclusive; #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs index afd6f740c42c6..4b8d07a253375 100644 --- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs +++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs @@ -4,14 +4,14 @@ use std::iter; use hir::Semantics; use ide_db::{ + FileRange, FxIndexMap, RootDatabase, defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, search::FileReference, - FileRange, FxIndexMap, RootDatabase, }; -use syntax::{ast, AstNode, SyntaxKind::IDENT}; +use syntax::{AstNode, SyntaxKind::IDENT, ast}; -use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav}; +use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav, goto_definition}; #[derive(Debug, Clone)] pub struct CallItem { @@ -76,9 +76,9 @@ pub(crate) fn incoming_calls( } let range = sema.original_range(name.syntax()); - calls.add(nav.call_site, range.into()); + calls.add(nav.call_site, range.into_file_id(db)); if let Some(other) = nav.def_site { - calls.add(other, range.into()); + calls.add(other, range.into_file_id(db)); } } } @@ -143,7 +143,7 @@ pub(crate) fn outgoing_calls( Some(nav_target.into_iter().zip(iter::repeat(range))) }) .flatten() - .for_each(|(nav, range)| calls.add(nav, range.into())); + .for_each(|(nav, range)| calls.add(nav, range.into_file_id(db))); Some(calls.into_items()) } @@ -165,7 +165,7 @@ impl CallLocations { #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use ide_db::FilePosition; use itertools::Itertools; diff --git a/src/tools/rust-analyzer/crates/ide/src/child_modules.rs b/src/tools/rust-analyzer/crates/ide/src/child_modules.rs new file mode 100644 index 0000000000000..b781596187b91 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide/src/child_modules.rs @@ -0,0 +1,123 @@ +use hir::Semantics; +use ide_db::{FilePosition, RootDatabase}; +use syntax::{ + algo::find_node_at_offset, + ast::{self, AstNode}, +}; + +use crate::NavigationTarget; + +// Feature: Child Modules +// +// Navigates to the child modules of the current module. +// +// | Editor | Action Name | +// |---------|-------------| +// | VS Code | **rust-analyzer: Locate child modules** | + +/// This returns `Vec` because a module may be included from several places. +pub(crate) fn child_modules(db: &RootDatabase, position: FilePosition) -> Vec { + let sema = Semantics::new(db); + let source_file = sema.parse_guess_edition(position.file_id); + // First go to the parent module which contains the cursor + let module = find_node_at_offset::(source_file.syntax(), position.offset); + + match module { + Some(module) => { + // Return all child modules inside the ItemList of the parent module + sema.to_def(&module) + .into_iter() + .flat_map(|module| module.children(db)) + .map(|module| NavigationTarget::from_module_to_decl(db, module).call_site()) + .collect() + } + None => { + // Return all the child modules inside the source file + sema.file_to_module_defs(position.file_id) + .flat_map(|module| module.children(db)) + .map(|module| NavigationTarget::from_module_to_decl(db, module).call_site()) + .collect() + } + } +} + +#[cfg(test)] +mod tests { + use ide_db::FileRange; + + use crate::fixture; + + fn check_child_module(#[rust_analyzer::rust_fixture] ra_fixture: &str) { + let (analysis, position, expected) = fixture::annotations(ra_fixture); + let navs = analysis.child_modules(position).unwrap(); + let navs = navs + .iter() + .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) + .collect::>(); + assert_eq!(expected.into_iter().map(|(fr, _)| fr).collect::>(), navs); + } + + #[test] + fn test_resolve_child_module() { + check_child_module( + r#" +//- /lib.rs +$0 +mod foo; + //^^^ + +//- /foo.rs +// empty +"#, + ); + } + + #[test] + fn test_resolve_child_module_on_module_decl() { + check_child_module( + r#" +//- /lib.rs +mod $0foo; +//- /foo.rs +mod bar; + //^^^ + +//- /foo/bar.rs +// empty +"#, + ); + } + + #[test] + fn test_resolve_child_module_for_inline() { + check_child_module( + r#" +//- /lib.rs +mod foo { + mod $0bar { + mod baz {} + } //^^^ +} +"#, + ); + } + + #[test] + fn test_resolve_multi_child_module() { + check_child_module( + r#" +//- /main.rs +$0 +mod foo; + //^^^ +mod bar; + //^^^ +//- /foo.rs +// empty + +//- /bar.rs +// empty +"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index 8d2ca33bf254d..f0247f32d7ec6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -6,28 +6,29 @@ mod tests; mod intra_doc_links; use pulldown_cmark::{BrokenLink, CowStr, Event, InlineStr, LinkType, Options, Parser, Tag}; -use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions}; +use pulldown_cmark_to_cmark::{Options as CMarkOptions, cmark_resume_with_options}; use stdx::format_to; use url::Url; -use hir::{db::HirDatabase, sym, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs}; +use hir::{Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs, db::HirDatabase, sym}; use ide_db::{ - base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase}, + RootDatabase, + base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb}, defs::{Definition, NameClass, NameRefClass}, - documentation::{docs_with_rangemap, Documentation, HasDocs}, + documentation::{Documentation, HasDocs, docs_with_rangemap}, helpers::pick_best_token, - RootDatabase, }; use syntax::{ - ast::{self, IsString}, - match_ast, AstNode, AstToken, + AstNode, AstToken, SyntaxKind::*, - SyntaxNode, SyntaxToken, TextRange, TextSize, T, + SyntaxNode, SyntaxToken, T, TextRange, TextSize, + ast::{self, IsString}, + match_ast, }; use crate::{ - doc_links::intra_doc_links::{parse_intra_doc_link, strip_prefixes_suffixes}, FilePosition, Semantics, + doc_links::intra_doc_links::{parse_intra_doc_link, strip_prefixes_suffixes}, }; /// Web and local links to an item's documentation. @@ -504,9 +505,7 @@ fn get_doc_base_urls( let Some(krate) = krate else { return Default::default() }; let Some(display_name) = krate.display_name(db) else { return Default::default() }; - let crate_data = &db.crate_graph()[krate.into()]; - - let (web_base, local_base) = match &crate_data.origin { + let (web_base, local_base) = match krate.origin(db) { // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself. // FIXME: Use the toolchains channel instead of nightly CrateOrigin::Lang( @@ -598,9 +597,9 @@ fn filename_and_frag_for_def( Definition::Module(m) => match m.name(db) { // `#[doc(keyword = "...")]` is internal used only by rust compiler Some(name) => { - match m.attrs(db).by_key(&sym::doc).find_string_value_in_tt(&sym::keyword) { + match m.attrs(db).by_key(sym::doc).find_string_value_in_tt(sym::keyword) { Some(kw) => { - format!("keyword.{}.html", kw) + format!("keyword.{kw}.html") } None => format!("{}/index.html", name.as_str()), } @@ -628,7 +627,7 @@ fn filename_and_frag_for_def( return Some((def, file, Some(format!("variant.{}", ev.name(db).as_str())))); } Definition::Const(c) => { - format!("const.{}.html", c.name(db)?.as_str()) + format!("constant.{}.html", c.name(db)?.as_str()) } Definition::Static(s) => { format!("static.{}.html", s.name(db).as_str()) diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs index 6cc240d652499..c331734c785ed 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/intra_doc_links.rs @@ -53,7 +53,7 @@ pub(super) fn strip_prefixes_suffixes(s: &str) -> &str { #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use super::*; diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs index b09e3a3c8047c..91785be8d8bad 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs @@ -1,18 +1,19 @@ use std::iter; -use expect_test::{expect, Expect}; +use expect_test::{Expect, expect}; use hir::Semantics; use ide_db::{ + FilePosition, FileRange, RootDatabase, defs::Definition, documentation::{Documentation, HasDocs}, - FilePosition, FileRange, RootDatabase, }; use itertools::Itertools; -use syntax::{ast, match_ast, AstNode, SyntaxNode}; +use syntax::{AstNode, SyntaxNode, ast, match_ast}; use crate::{ + TryToNav, doc_links::{extract_definitions_from_docs, resolve_doc_path_for_def, rewrite_links}, - fixture, TryToNav, + fixture, }; fn check_external_docs( @@ -43,7 +44,7 @@ fn check_external_docs( fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); - let sema = &Semantics::new(&*analysis.db); + let sema = &Semantics::new(&analysis.db); let (cursor_def, docs) = def_under_cursor(sema, &position); let res = rewrite_links(sema.db, docs.as_str(), cursor_def); expect.assert_eq(&res) @@ -54,7 +55,7 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, mut expected) = fixture::annotations(ra_fixture); expected.sort_by_key(key_fn); - let sema = &Semantics::new(&*analysis.db); + let sema = &Semantics::new(&analysis.db); let (cursor_def, docs) = def_under_cursor(sema, &position); let defs = extract_definitions_from_docs(&docs); let actual: Vec<_> = defs @@ -683,7 +684,9 @@ fn rewrite_intra_doc_link_with_anchor() { //! $0[PartialEq#derivable] fn main() {} "#, - expect!["[PartialEq#derivable](https://doc.rust-lang.org/stable/core/cmp/trait.PartialEq.html#derivable)"], + expect![ + "[PartialEq#derivable](https://doc.rust-lang.org/stable/core/cmp/trait.PartialEq.html#derivable)" + ], ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs index ad4308e06a14b..241a702038da4 100644 --- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs +++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs @@ -1,12 +1,12 @@ use hir::db::ExpandDatabase; -use hir::{ExpandResult, InFile, MacroFileIdExt, Semantics}; -use ide_db::base_db::CrateId; +use hir::{ExpandResult, InFile, Semantics}; use ide_db::{ - helpers::pick_best_token, syntax_helpers::prettify_macro_expansion, FileId, RootDatabase, + FileId, RootDatabase, base_db::Crate, helpers::pick_best_token, + syntax_helpers::prettify_macro_expansion, }; -use span::{Edition, SpanMap, SyntaxContextId, TextRange, TextSize}; +use span::{Edition, SpanMap, SyntaxContext, TextRange, TextSize}; use stdx::format_to; -use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T}; +use syntax::{AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, ast, ted}; use crate::FilePosition; @@ -99,7 +99,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< .display( db, sema.attach_first_edition(position.file_id) - .map(|it| it.edition()) + .map(|it| it.edition(db)) .unwrap_or(Edition::CURRENT), ) .to_string(), @@ -142,7 +142,7 @@ fn expand_macro_recur( sema: &Semantics<'_, RootDatabase>, macro_call: &ast::Item, error: &mut String, - result_span_map: &mut SpanMap, + result_span_map: &mut SpanMap, offset_in_original_node: TextSize, ) -> Option { let ExpandResult { value: expanded, err } = match macro_call { @@ -170,7 +170,7 @@ fn expand( sema: &Semantics<'_, RootDatabase>, expanded: SyntaxNode, error: &mut String, - result_span_map: &mut SpanMap, + result_span_map: &mut SpanMap, mut offset_in_original_node: i32, ) -> SyntaxNode { let children = expanded.descendants().filter_map(ast::Item::cast); @@ -207,8 +207,8 @@ fn format( kind: SyntaxKind, file_id: FileId, expanded: SyntaxNode, - span_map: &SpanMap, - krate: CrateId, + span_map: &SpanMap, + krate: Crate, ) -> String { let expansion = prettify_macro_expansion(db, expanded, span_map, krate).to_string(); @@ -234,7 +234,8 @@ fn _format( file_id: FileId, expansion: &str, ) -> Option { - use ide_db::base_db::{FileLoader, SourceDatabase}; + use ide_db::base_db::RootQueryDb; + // hack until we get hygiene working (same character amount to preserve formatting as much as possible) const DOLLAR_CRATE_REPLACE: &str = "__r_a_"; const BUILTIN_REPLACE: &str = "builtin__POUND"; @@ -249,7 +250,7 @@ fn _format( let expansion = format!("{prefix}{expansion}{suffix}"); let &crate_id = db.relevant_crates(file_id).iter().next()?; - let edition = db.crate_graph()[crate_id].edition; + let edition = crate_id.data(db).edition; #[allow(clippy::disallowed_methods)] let mut cmd = std::process::Command::new(toolchain::Tool::Rustfmt.path()); @@ -289,7 +290,7 @@ fn _format( #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use crate::fixture; @@ -550,7 +551,7 @@ macro_rules! foo { } fn main() { - let res = fo$0o!(); + fo$0o!() } "#, expect![[r#" @@ -559,6 +560,24 @@ fn main() { ); } + #[test] + fn macro_expand_item_expansion_in_expression_call() { + check( + r#" +macro_rules! foo { + () => {fn f() {}}; +} + +fn main() { + let res = fo$0o!(); +} +"#, + expect![[r#" + foo! + fn f(){}"#]], + ); + } + #[test] fn macro_expand_derive() { check( @@ -677,4 +696,26 @@ crate::Foo; crate::Foo;"#]], ); } + + #[test] + fn semi_glueing() { + check( + r#" +macro_rules! __log_value { + ($key:ident :$capture:tt =) => {}; +} + +macro_rules! __log { + ($key:tt $(:$capture:tt)? $(= $value:expr)?; $($arg:tt)+) => { + __log_value!($key $(:$capture)* = $($value)*); + }; +} + +__log!(written:%; "Test"$0); + "#, + expect![[r#" + __log! + "#]], + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs index 76414854e91ef..a374f9752fcfa 100644 --- a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs +++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs @@ -3,11 +3,11 @@ use std::iter::successors; use hir::Semantics; use ide_db::RootDatabase; use syntax::{ - algo::{self, skip_trivia_token}, - ast::{self, AstNode, AstToken}, Direction, NodeOrToken, SyntaxKind::{self, *}, - SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, T, + SyntaxNode, SyntaxToken, T, TextRange, TextSize, TokenAtOffset, + algo::{self, skip_trivia_token}, + ast::{self, AstNode, AstToken}, }; use crate::FileRange; @@ -178,11 +178,7 @@ fn extend_tokens_from_range( .last()?; let range = first.text_range().cover(last.text_range()); - if range.contains_range(original_range) && original_range != range { - Some(range) - } else { - None - } + if range.contains_range(original_range) && original_range != range { Some(range) } else { None } } /// Find the shallowest node with same range, which allows us to traverse siblings. @@ -216,11 +212,7 @@ fn extend_single_word_in_comment_or_string( let to: TextSize = (cursor_position + end_idx).into(); let range = TextRange::new(from, to); - if range.is_empty() { - None - } else { - Some(range + leaf.text_range().start()) - } + if range.is_empty() { None } else { Some(range + leaf.text_range().start()) } } fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange { diff --git a/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs b/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs index 5ed2144430741..956379e722d53 100644 --- a/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs +++ b/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs @@ -1,6 +1,6 @@ use ide_db::{ - base_db::{CrateOrigin, SourceDatabase}, FileId, FxIndexSet, RootDatabase, + base_db::{CrateOrigin, RootQueryDb}, }; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -20,21 +20,24 @@ pub struct CrateInfo { // // ![Show Dependency Tree](https://user-images.githubusercontent.com/5748995/229394139-2625beab-f4c9-484b-84ed-ad5dee0b1e1a.png) pub(crate) fn fetch_crates(db: &RootDatabase) -> FxIndexSet { - let crate_graph = db.crate_graph(); - crate_graph + db.all_crates() .iter() - .map(|crate_id| &crate_graph[crate_id]) - .filter(|&data| !matches!(data.origin, CrateOrigin::Local { .. })) - .map(crate_info) + .copied() + .map(|crate_id| (crate_id.data(db), crate_id.extra_data(db))) + .filter(|(data, _)| !matches!(data.origin, CrateOrigin::Local { .. })) + .map(|(data, extra_data)| crate_info(data, extra_data)) .collect() } -fn crate_info(data: &ide_db::base_db::CrateData) -> CrateInfo { - let crate_name = crate_name(data); - let version = data.version.clone(); +fn crate_info( + data: &ide_db::base_db::BuiltCrateData, + extra_data: &ide_db::base_db::ExtraCrateData, +) -> CrateInfo { + let crate_name = crate_name(extra_data); + let version = extra_data.version.clone(); CrateInfo { name: crate_name, version, root_file_id: data.root_file_id } } -fn crate_name(data: &ide_db::base_db::CrateData) -> Option { +fn crate_name(data: &ide_db::base_db::ExtraCrateData) -> Option { data.display_name.as_ref().map(|it| it.canonical_name().as_str().to_owned()) } diff --git a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs index 52fbab6fa12b1..347da4e85b4aa 100644 --- a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs +++ b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs @@ -1,8 +1,8 @@ use ide_db::SymbolKind; use syntax::{ + AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, TextRange, WalkEvent, ast::{self, HasAttrs, HasGenericParams, HasName}, - match_ast, AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, TextRange, - WalkEvent, + match_ast, }; #[derive(Debug, Clone)] @@ -250,7 +250,7 @@ fn structure_token(token: SyntaxToken) -> Option { #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use super::*; diff --git a/src/tools/rust-analyzer/crates/ide/src/fixture.rs b/src/tools/rust-analyzer/crates/ide/src/fixture.rs index a0612f48d37e8..fbf89042fae15 100644 --- a/src/tools/rust-analyzer/crates/ide/src/fixture.rs +++ b/src/tools/rust-analyzer/crates/ide/src/fixture.rs @@ -1,16 +1,16 @@ //! Utilities for creating `Analysis` instances for tests. use test_fixture::ChangeFixture; -use test_utils::{extract_annotations, RangeOrOffset}; +use test_utils::{RangeOrOffset, extract_annotations}; use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange}; /// Creates analysis for a single file. pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileId) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); - (host.analysis(), change_fixture.files[0].into()) + (host.analysis(), change_fixture.files[0].file_id(&host.db)) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. @@ -18,23 +18,23 @@ pub(crate) fn position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, FilePosition) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - (host.analysis(), FilePosition { file_id: file_id.into(), offset }) + (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }) } /// Creates analysis for a single file, returns range marked with a pair of $0. pub(crate) fn range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileRange) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let range = range_or_offset.expect_range(); - (host.analysis(), FileRange { file_id: file_id.into(), range }) + (host.analysis(), FileRange { file_id: file_id.file_id(&host.db), range }) } /// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0. @@ -42,11 +42,11 @@ pub(crate) fn range_or_position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, FileId, RangeOrOffset) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); - (host.analysis(), file_id.into(), range_or_offset) + (host.analysis(), file_id.file_id(&host.db), range_or_offset) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. @@ -54,24 +54,25 @@ pub(crate) fn annotations( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, FilePosition, Vec<(FileRange, String)>) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); + let db = &host.db; let annotations = change_fixture .files .iter() .flat_map(|&file_id| { - let file_text = host.analysis().file_text(file_id.into()).unwrap(); + let file_text = host.analysis().file_text(file_id.file_id(&host.db)).unwrap(); let annotations = extract_annotations(&file_text); annotations .into_iter() - .map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data)) + .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data)) }) .collect(); - (host.analysis(), FilePosition { file_id: file_id.into(), offset }, annotations) + (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }, annotations) } /// Creates analysis from a multi-file fixture with annotations without $0 @@ -79,19 +80,20 @@ pub(crate) fn annotations_without_marker( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, Vec<(FileRange, String)>) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(ra_fixture); + let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); + let db = &host.db; let annotations = change_fixture .files .iter() .flat_map(|&file_id| { - let file_text = host.analysis().file_text(file_id.into()).unwrap(); + let file_text = host.analysis().file_text(file_id.file_id(db)).unwrap(); let annotations = extract_annotations(&file_text); annotations .into_iter() - .map(move |(range, data)| (FileRange { file_id: file_id.into(), range }, data)) + .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data)) }) .collect(); (host.analysis(), annotations) diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs index e5a94ff9fe964..194e8c968f758 100755 --- a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs +++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs @@ -1,9 +1,10 @@ -use ide_db::{syntax_helpers::node_ext::vis_eq, FxHashSet}; +use ide_db::{FxHashSet, syntax_helpers::node_ext::vis_eq}; use syntax::{ - ast::{self, AstNode, AstToken}, - match_ast, Direction, NodeOrToken, SourceFile, + Direction, NodeOrToken, SourceFile, SyntaxKind::{self, *}, TextRange, TextSize, + ast::{self, AstNode, AstToken}, + match_ast, }; use std::hash::Hash; diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs index 3742edc8db84b..38c032d382e3d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs @@ -1,13 +1,13 @@ use hir::{AsAssocItem, Semantics}; use ide_db::{ - defs::{Definition, NameClass, NameRefClass}, RootDatabase, + defs::{Definition, NameClass, NameRefClass}, }; -use syntax::{ast, match_ast, AstNode, SyntaxKind::*, T}; +use syntax::{AstNode, SyntaxKind::*, T, ast, match_ast}; use crate::{ - goto_definition::goto_definition, navigation_target::TryToNav, FilePosition, NavigationTarget, - RangeInfo, + FilePosition, NavigationTarget, RangeInfo, goto_definition::goto_definition, + navigation_target::TryToNav, }; // Feature: Go to Declaration @@ -32,7 +32,7 @@ pub(crate) fn goto_declaration( .descend_into_macros_no_opaque(original_token) .iter() .filter_map(|token| { - let parent = token.parent()?; + let parent = token.value.parent()?; let def = match_ast! { match parent { ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? { @@ -52,7 +52,7 @@ pub(crate) fn goto_declaration( }; let assoc = match def? { Definition::Module(module) => { - return Some(NavigationTarget::from_module_to_decl(db, module)) + return Some(NavigationTarget::from_module_to_decl(db, module)); } Definition::Const(c) => c.as_assoc_item(db), Definition::TypeAlias(ta) => ta.as_assoc_item(db), @@ -69,11 +69,7 @@ pub(crate) fn goto_declaration( .flatten() .collect(); - if info.is_empty() { - goto_definition(db, position) - } else { - Some(RangeInfo::new(range, info)) - } + if info.is_empty() { goto_definition(db, position) } else { Some(RangeInfo::new(range, info)) } } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 60a904233a9a5..b894e857522f9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -1,28 +1,28 @@ use std::{iter, mem::discriminant}; use crate::{ + FilePosition, NavigationTarget, RangeInfo, TryToNav, UpmappingResult, doc_links::token_as_doc_comment, navigation_target::{self, ToNav}, - FilePosition, NavigationTarget, RangeInfo, TryToNav, UpmappingResult, }; use hir::{ - sym, AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, MacroFileIdExt, - ModuleDef, Semantics, + AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, ModuleDef, Semantics, sym, }; use ide_db::{ - base_db::{AnchoredPath, FileLoader, SourceDatabase}, + RootDatabase, SymbolKind, + base_db::{AnchoredPath, SourceDatabase}, defs::{Definition, IdentClass}, famous_defs::FamousDefs, helpers::pick_best_token, - RootDatabase, SymbolKind, }; use itertools::Itertools; use span::{Edition, FileId}; use syntax::{ - ast::{self, HasLoopBody}, - match_ast, AstNode, AstToken, + AstNode, AstToken, SyntaxKind::*, - SyntaxNode, SyntaxToken, TextRange, T, + SyntaxNode, SyntaxToken, T, TextRange, + ast::{self, HasLoopBody}, + match_ast, }; // Feature: Go to Definition @@ -43,7 +43,7 @@ pub(crate) fn goto_definition( let sema = &Semantics::new(db); let file = sema.parse_guess_edition(file_id).syntax().clone(); let edition = - sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); + sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT); let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT | INT_NUMBER @@ -91,16 +91,19 @@ pub(crate) fn goto_definition( .descend_into_macros_no_opaque(original_token.clone()) .into_iter() .filter_map(|token| { - let parent = token.parent()?; + let parent = token.value.parent()?; - if let Some(token) = ast::String::cast(token.clone()) { - if let Some(x) = try_lookup_include_path(sema, token, file_id) { + let token_file_id = token.file_id; + if let Some(token) = ast::String::cast(token.value.clone()) { + if let Some(x) = + try_lookup_include_path(sema, InFile::new(token_file_id, token), file_id) + { return Some(vec![x]); } } if ast::TokenTree::can_cast(parent.kind()) { - if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token) { + if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.value) { return Some(vec![x]); } } @@ -204,20 +207,22 @@ fn find_definition_for_known_blanket_dual_impls( fn try_lookup_include_path( sema: &Semantics<'_, RootDatabase>, - token: ast::String, + token: InFile, file_id: FileId, ) -> Option { - let file = sema.hir_file_for(&token.syntax().parent()?).macro_file()?; + let file = token.file_id.macro_file()?; + + // Check that we are in the eager argument expansion of an include macro + // that is we are the string input of it if !iter::successors(Some(file), |file| file.parent(sema.db).macro_file()) - // Check that we are in the eager argument expansion of an include macro .any(|file| file.is_include_like_macro(sema.db) && file.eager_arg(sema.db).is_none()) { return None; } - let path = token.value().ok()?; + let path = token.value.value().ok()?; let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?; - let size = sema.db.file_text(file_id).len().try_into().ok()?; + let size = sema.db.file_text(file_id).text(sema.db).len().try_into().ok()?; Some(NavigationTarget { file_id, full_range: TextRange::new(0.into(), size), @@ -358,7 +363,7 @@ fn nav_for_exit_points( if let Some(FileRange { file_id, range }) = focus_frange { let contains_frange = |nav: &NavigationTarget| { - nav.file_id == file_id && nav.full_range.contains_range(range) + nav.file_id == file_id.file_id(db) && nav.full_range.contains_range(range) }; if let Some(def_site) = nav.def_site.as_mut() { @@ -2047,7 +2052,10 @@ fn main() { ); } + // macros in this position are not yet supported #[test] + // FIXME + #[should_panic] fn goto_doc_include_str() { check( r#" @@ -2190,8 +2198,8 @@ where T : Bound struct A; impl Bound for A{} fn f() { - let gen = Gen::(A); - gen.g$0(); + let g = Gen::(A); + g.g$0(); } "#, ); @@ -2216,8 +2224,8 @@ where T : Bound struct A; impl Bound for A{} fn f() { - let gen = Gen::(A); - gen.g$0(); + let g = Gen::(A); + g.g$0(); } "#, ); @@ -3324,4 +3332,218 @@ fn main() { "#, ); } + + #[test] + fn struct_shadow_by_module() { + check( + r#" +mod foo { + pub mod bar { + // ^^^ + pub type baz = usize; + } +} +struct bar; +fn main() { + use foo::bar; + let x: ba$0r::baz = 5; + +} +"#, + ); + } + + #[test] + fn type_alias_shadow_by_module() { + check( + r#" +mod foo { + pub mod bar { + // ^^^ + pub fn baz() {} + } +} + +trait Qux {} + +fn item() { + use foo::bar; + ba$0r::baz(); +} +} +"#, + ); + + check( + r#" +mod foo { + pub mod bar { + // ^^^ + pub fn baz() {} + } +} + +fn item(x: bar) { + use foo::bar; + let x: bar$0 = x; +} +"#, + ); + } + + #[test] + fn trait_shadow_by_module() { + check( + r#" +pub mod foo { + pub mod Bar {} + // ^^^ +} + +trait Bar {} + +fn main() { + use foo::Bar; + fn f() {} +} + "#, + ); + } + + #[test] + fn const_shadow_by_module() { + check( + r#" +pub mod foo { + pub struct u8 {} + pub mod bar { + pub mod u8 {} + } +} + +fn main() { + use foo::u8; + { + use foo::bar::u8; + + fn f1() {} + } + fn f2() {} +} +"#, + ); + + check( + r#" +pub mod foo { + pub struct u8 {} + // ^^ + pub mod bar { + pub mod u8 {} + } +} + +fn main() { + use foo::u8; + { + use foo::bar::u8; + + fn f1() {} + } + fn f2() {} +} +"#, + ); + + check( + r#" +pub mod foo { + pub struct buz {} + pub mod bar { + pub mod buz {} + // ^^^ + } +} + +fn main() { + use foo::buz; + { + use foo::bar::buz; + + fn f1() {} + } +} +"#, + ); + } + + #[test] + fn offset_of() { + check( + r#" +//- minicore: offset_of +struct Foo { + field: i32, + // ^^^^^ +} + +fn foo() { + let _ = core::mem::offset_of!(Foo, fiel$0d); +} + "#, + ); + + check( + r#" +//- minicore: offset_of +struct Bar(Foo); +struct Foo { + field: i32, + // ^^^^^ +} + +fn foo() { + let _ = core::mem::offset_of!(Bar, 0.fiel$0d); +} + "#, + ); + + check( + r#" +//- minicore: offset_of +struct Bar(Baz); +enum Baz { + Abc(Foo), + None, +} +struct Foo { + field: i32, + // ^^^^^ +} + +fn foo() { + let _ = core::mem::offset_of!(Bar, 0.Abc.0.fiel$0d); +} + "#, + ); + + check( + r#" +//- minicore: offset_of +struct Bar(Baz); +enum Baz { + Abc(Foo), + // ^^^ + None, +} +struct Foo { + field: i32, +} + +fn foo() { + let _ = core::mem::offset_of!(Bar, 0.Ab$0c.0.field); +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs index e1d834b5d1c69..1bc28f28b6f57 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs @@ -1,10 +1,10 @@ use hir::{AsAssocItem, Impl, Semantics}; use ide_db::{ + RootDatabase, defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, - RootDatabase, }; -use syntax::{ast, AstNode, SyntaxKind::*, T}; +use syntax::{AstNode, SyntaxKind::*, T, ast}; use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs index ddc274a830352..a78f5cdc9d0e6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs @@ -1,6 +1,6 @@ use hir::GenericParam; -use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase}; -use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T}; +use ide_db::{RootDatabase, defs::Definition, helpers::pick_best_token}; +use syntax::{AstNode, SyntaxKind::*, SyntaxToken, T, ast, match_ast}; use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; @@ -71,8 +71,8 @@ pub(crate) fn goto_type_definition( sema.descend_into_macros_no_opaque(token) .into_iter() .filter_map(|token| { - let ty = sema - .token_ancestors_with_macros(token) + sema + .token_ancestors_with_macros(token.value) // When `token` is within a macro call, we can't determine its type. Don't continue // this traversal because otherwise we'll end up returning the type of *that* macro // call, which is not what we want in general. @@ -87,7 +87,7 @@ pub(crate) fn goto_type_definition( ast::Pat(it) => sema.type_of_pat(&it)?.original, ast::SelfParam(it) => sema.type_of_self(&it)?, ast::Type(it) => sema.resolve_type(&it)?, - ast::RecordField(it) => sema.to_def(&it)?.ty(db.upcast()), + ast::RecordField(it) => sema.to_def(&it)?.ty(db), // can't match on RecordExprField directly as `ast::Expr` will match an iteration too early otherwise ast::NameRef(it) => { if let Some(record_field) = ast::RecordExprField::for_name_ref(&it) { @@ -103,8 +103,7 @@ pub(crate) fn goto_type_definition( }; Some(ty) - }); - ty + }) }) .for_each(process_ty); Some(RangeInfo::new(range, res)) diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index 6463206596af5..80624eeae80c7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -1,7 +1,8 @@ use std::iter; -use hir::{db, FilePosition, FileRange, HirFileId, InFile, Semantics}; +use hir::{EditionedFileId, FilePosition, FileRange, HirFileId, InFile, Semantics, db}; use ide_db::{ + FxHashMap, FxHashSet, RootDatabase, defs::{Definition, IdentClass}, helpers::pick_best_token, search::{FileReference, ReferenceCategory, SearchScope}, @@ -9,17 +10,17 @@ use ide_db::{ eq_label_lt, for_each_tail_expr, full_path_of_name_ref, is_closure_or_blk_with_modif, preorder_expr_with_ctx_checker, }, - FxHashMap, FxHashSet, RootDatabase, }; -use span::EditionedFileId; +use span::FileId; use syntax::{ - ast::{self, HasLoopBody}, - match_ast, AstNode, + AstNode, SyntaxKind::{self, IDENT, INT_NUMBER}, - SyntaxToken, TextRange, WalkEvent, T, + SyntaxToken, T, TextRange, WalkEvent, + ast::{self, HasLoopBody}, + match_ast, }; -use crate::{goto_definition, navigation_target::ToNav, NavigationTarget, TryToNav}; +use crate::{NavigationTarget, TryToNav, goto_definition, navigation_target::ToNav}; #[derive(PartialEq, Eq, Hash)] pub struct HighlightedRange { @@ -59,13 +60,14 @@ pub(crate) fn highlight_related( let _p = tracing::info_span!("highlight_related").entered(); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id)); + let span_file_id = file_id.editioned_file_id(sema.db); let syntax = sema.parse(file_id).syntax().clone(); let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?` T![->] => 4, - kind if kind.is_keyword(file_id.edition()) => 3, + kind if kind.is_keyword(span_file_id.edition()) => 3, IDENT | INT_NUMBER => 2, T![|] => 1, _ => 0, @@ -87,11 +89,18 @@ pub(crate) fn highlight_related( T![break] | T![loop] | T![while] | T![continue] if config.break_points => { highlight_break_points(sema, token).remove(&file_id) } - T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id), - T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id), - _ if config.references => { - highlight_references(sema, token, FilePosition { file_id, offset }) + T![|] if config.closure_captures => { + highlight_closure_captures(sema, token, file_id, span_file_id.file_id()) + } + T![move] if config.closure_captures => { + highlight_closure_captures(sema, token, file_id, span_file_id.file_id()) } + _ if config.references => highlight_references( + sema, + token, + FilePosition { file_id, offset }, + span_file_id.file_id(), + ), _ => None, } } @@ -100,6 +109,7 @@ fn highlight_closure_captures( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, file_id: EditionedFileId, + vfs_file_id: FileId, ) -> Option> { let closure = token.parent_ancestors().take(2).find_map(ast::ClosureExpr::cast)?; let search_range = closure.body()?.syntax().text_range(); @@ -132,7 +142,7 @@ fn highlight_closure_captures( .sources(sema.db) .into_iter() .flat_map(|x| x.to_nav(sema.db)) - .filter(|decl| decl.file_id == file_id) + .filter(|decl| decl.file_id == vfs_file_id) .filter_map(|decl| decl.focus_range) .map(move |range| HighlightedRange { range, category }) .chain(usages) @@ -145,6 +155,7 @@ fn highlight_references( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, FilePosition { file_id, offset }: FilePosition, + vfs_file_id: FileId, ) -> Option> { let defs = if let Some((range, resolution)) = sema.check_for_format_args_template(token.clone(), offset) @@ -152,7 +163,10 @@ fn highlight_references( match resolution.map(Definition::from) { Some(def) => iter::once(def).collect(), None => { - return Some(vec![HighlightedRange { range, category: ReferenceCategory::empty() }]) + return Some(vec![HighlightedRange { + range, + category: ReferenceCategory::empty(), + }]); } } } else { @@ -224,6 +238,23 @@ fn highlight_references( } } + // highlight the tail expr of the labelled block + if matches!(def, Definition::Label(_)) { + let label = token.parent_ancestors().nth(1).and_then(ast::Label::cast); + if let Some(block) = + label.and_then(|label| label.syntax().parent()).and_then(ast::BlockExpr::cast) + { + for_each_tail_expr(&block.into(), &mut |tail| { + if !matches!(tail, ast::Expr::BreakExpr(_)) { + res.insert(HighlightedRange { + range: tail.syntax().text_range(), + category: ReferenceCategory::empty(), + }); + } + }); + } + } + // highlight the defs themselves match def { Definition::Local(local) => { @@ -236,7 +267,7 @@ fn highlight_references( .sources(sema.db) .into_iter() .flat_map(|x| x.to_nav(sema.db)) - .filter(|decl| decl.file_id == file_id) + .filter(|decl| decl.file_id == vfs_file_id) .filter_map(|decl| decl.focus_range) .map(|range| HighlightedRange { range, category }) .for_each(|x| { @@ -254,7 +285,7 @@ fn highlight_references( }, }; for nav in navs { - if nav.file_id != file_id { + if nav.file_id != vfs_file_id { continue; } let hl_range = nav.focus_range.map(|range| { @@ -274,11 +305,7 @@ fn highlight_references( } res.extend(usages); - if res.is_empty() { - None - } else { - Some(res.into_iter().collect()) - } + if res.is_empty() { None } else { Some(res.into_iter().collect()) } } fn hl_exit_points( @@ -442,6 +469,18 @@ pub(crate) fn highlight_break_points( push_to_highlights(file_id, text_range); }); + if matches!(expr, ast::Expr::BlockExpr(_)) { + for_each_tail_expr(&expr, &mut |tail| { + if matches!(tail, ast::Expr::BreakExpr(_)) { + return; + } + + let file_id = sema.hir_file_for(tail.syntax()); + let range = tail.syntax().text_range(); + push_to_highlights(file_id, Some(range)); + }); + } + Some(highlights) } @@ -2068,4 +2107,41 @@ pub unsafe fn bootstrap() -> ! { "#, ) } + + #[test] + fn labeled_block_tail_expr() { + check( + r#" +fn foo() { + 'a: { + // ^^^ + if true { break$0 'a 0; } + // ^^^^^^^^ + 5 + // ^ + } +} +"#, + ); + } + + #[test] + fn labeled_block_tail_expr_2() { + check( + r#" +fn foo() { + let _ = 'b$0lk: { + // ^^^^ + let x = 1; + if true { break 'blk 42; } + // ^^^^ + if false { break 'blk 24; } + // ^^^^ + 100 + // ^^^ + }; +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index b00de6ba40833..075afcec019f7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -7,26 +7,30 @@ use std::{iter, ops::Not}; use either::Either; use hir::{ - db::DefDatabase, DisplayTarget, GenericDef, GenericSubstitution, HasCrate, HasSource, LangItem, - Semantics, + DisplayTarget, GenericDef, GenericSubstitution, HasCrate, HasSource, LangItem, Semantics, + db::DefDatabase, }; use ide_db::{ + FileRange, FxIndexSet, Ranker, RootDatabase, defs::{Definition, IdentClass, NameRefClass, OperatorClass}, famous_defs::FamousDefs, helpers::pick_best_token, - FileRange, FxIndexSet, Ranker, RootDatabase, }; -use itertools::{multizip, Itertools}; +use itertools::{Itertools, multizip}; use span::Edition; -use syntax::{ast, AstNode, SyntaxKind::*, SyntaxNode, T}; +use syntax::{ + AstNode, + SyntaxKind::{self, *}, + SyntaxNode, T, ast, +}; use crate::{ + FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav, doc_links::token_as_doc_comment, markdown_remove::remove_markdown, markup::Markup, navigation_target::UpmappingResult, runnables::{runnable_fn, runnable_mod}, - FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav, }; #[derive(Clone, Debug, PartialEq, Eq)] pub struct HoverConfig { @@ -129,8 +133,8 @@ pub(crate) fn hover( let sema = &hir::Semantics::new(db); let file = sema.parse_guess_edition(file_id).syntax().clone(); let edition = - sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); - let display_target = sema.first_crate_or_default(file_id).to_display_target(db); + sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT); + let display_target = sema.first_crate(file_id)?.to_display_target(db); let mut res = if range.is_empty() { hover_offset( sema, @@ -274,11 +278,13 @@ fn hover_offset( } class => { - let is_def = matches!(class, IdentClass::NameClass(_)); + let render_extras = matches!(class, IdentClass::NameClass(_)) + // Render extra information for `Self` keyword as well + || ast::NameRef::cast(node.clone()).is_some_and(|name_ref| name_ref.token_kind() == SyntaxKind::SELF_TYPE_KW); multizip(( class.definitions(), iter::repeat(None), - iter::repeat(is_def), + iter::repeat(render_extras), iter::repeat(node), )) .collect::>() @@ -422,7 +428,7 @@ pub(crate) fn hover_for_definition( subst: Option, scope_node: &SyntaxNode, macro_arm: Option, - hovered_definition: bool, + render_extras: bool, config: &HoverConfig, edition: Edition, display_target: DisplayTarget, @@ -456,7 +462,7 @@ pub(crate) fn hover_for_definition( famous_defs.as_ref(), ¬able_traits, macro_arm, - hovered_definition, + render_extras, subst_types.as_ref(), config, edition, @@ -499,6 +505,7 @@ fn notable_traits( ) }) }) + .sorted_by_cached_key(|(trait_, _)| trait_.name(db)) .collect::>() } @@ -512,7 +519,7 @@ fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option { - return it.try_to_nav(db).map(UpmappingResult::call_site).map(to_action) + return it.try_to_nav(db).map(UpmappingResult::call_site).map(to_action); } Definition::Adt(it) => Some(it), Definition::SelfType(it) => it.self_ty(db).as_adt(), @@ -544,7 +551,7 @@ fn runnable_action( Definition::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable), Definition::Function(func) => { let src = func.source(sema.db)?; - if src.file_id != file_id { + if src.file_id.file_id().is_none_or(|f| f.file_id(sema.db) != file_id) { cov_mark::hit!(hover_macro_generated_struct_fn_doc_comment); cov_mark::hit!(hover_macro_generated_struct_fn_doc_attr); return None; @@ -627,9 +634,7 @@ fn walk_and_push_ty( } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { push_new_def(trait_.into()); } else if let Some(tp) = t.as_type_param(db) { - let sized_trait = db - .lang_item(t.krate(db).into(), LangItem::Sized) - .and_then(|lang_item| lang_item.as_trait()); + let sized_trait = LangItem::Sized.resolve_trait(db, t.krate(db).into()); tp.trait_bounds(db) .into_iter() .filter(|&it| Some(it.into()) != sized_trait) diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 31ef89a07cde1..69b83f3b12d89 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -3,34 +3,34 @@ use std::{env, mem, ops::Not}; use either::Either; use hir::{ - db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, CaptureKind, DisplayTarget, DropGlue, + Adt, AsAssocItem, AsExternAssocItem, CaptureKind, DisplayTarget, DropGlue, DynCompatibilityViolation, HasCrate, HasSource, HirDisplay, Layout, LayoutError, MethodViolationCode, Name, Semantics, Symbol, Trait, Type, TypeInfo, VariantDef, + db::ExpandDatabase, }; use ide_db::{ - base_db::SourceDatabase, + RootDatabase, defs::Definition, documentation::HasDocs, famous_defs::FamousDefs, generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES}, syntax_helpers::prettify_macro_expansion, - RootDatabase, }; use itertools::Itertools; use rustc_apfloat::{ - ieee::{Half as f16, Quad as f128}, Float, + ieee::{Half as f16, Quad as f128}, }; use span::Edition; use stdx::format_to; -use syntax::{algo, ast, match_ast, AstNode, AstToken, Direction, SyntaxToken, T}; +use syntax::{AstNode, AstToken, Direction, SyntaxToken, T, algo, ast, match_ast}; use crate::{ - doc_links::{remove_links, rewrite_links}, - hover::{notable_traits, walk_and_push_ty, SubstTyLen}, - interpret::render_const_eval_error, HoverAction, HoverConfig, HoverResult, Markup, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, + doc_links::{remove_links, rewrite_links}, + hover::{SubstTyLen, notable_traits, walk_and_push_ty}, + interpret::render_const_eval_error, }; pub(super) fn type_info_of( @@ -346,11 +346,7 @@ pub(super) fn try_for_lint(attr: &ast::Attr, token: &SyntaxToken) -> Option return None, }; @@ -418,7 +414,7 @@ fn definition_owner_name(db: &RootDatabase, def: Definition, edition: Edition) - "{}::{}", name.display(db, edition), it.name(db).display(db, edition) - )) + )); } None => Some(it.name(db)), } @@ -436,7 +432,7 @@ fn definition_owner_name(db: &RootDatabase, def: Definition, edition: Edition) - "{}::{}", name.display(db, edition), it.name(db)?.display(db, edition) - )) + )); } None => it.name(db), } @@ -466,8 +462,7 @@ pub(super) fn path( item_name: Option, edition: Edition, ) -> String { - let crate_name = - db.crate_graph()[module.krate().into()].display_name.as_ref().map(|it| it.to_string()); + let crate_name = module.krate().display_name(db).as_ref().map(|it| it.to_string()); let module_path = module .path_to_root(db) .into_iter() @@ -482,7 +477,7 @@ pub(super) fn definition( famous_defs: Option<&FamousDefs<'_, '_>>, notable_traits: &[(Trait, Vec<(Option, Name)>)], macro_arm: Option, - hovered_definition: bool, + render_extras: bool, subst_types: Option<&Vec<(Symbol, Type)>>, config: &HoverConfig, edition: Edition, @@ -645,6 +640,12 @@ pub(super) fn definition( Definition::Local(it) => { render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None) } + Definition::SelfType(it) => render_memory_layout( + config.memory_layout, + || it.self_ty(db).layout(db), + |_| None, + |_| None, + ), _ => None, }; @@ -717,18 +718,17 @@ pub(super) fn definition( } _ => return None, }; - let rendered_drop_glue = match drop_info.drop_glue { - DropGlue::None => "does not contain types with destructors (drop glue)", - DropGlue::DependOnParams => { - "may contain types with destructors (drop glue) depending on type parameters" + let rendered_drop_glue = if drop_info.has_dtor == Some(true) { + "impl Drop" + } else { + match drop_info.drop_glue { + DropGlue::HasDropGlue => "needs Drop", + DropGlue::None => "no Drop", + DropGlue::DependOnParams => "type param may need Drop", } - DropGlue::HasDropGlue => "contain types with destructors (drop glue)", }; - Some(match drop_info.has_dtor { - Some(true) => format!("{}; has a destructor", rendered_drop_glue), - Some(false) => format!("{}; doesn't have a destructor", rendered_drop_glue), - None => rendered_drop_glue.to_owned(), - }) + + Some(rendered_drop_glue.to_owned()) }; let dyn_compatibility_info = || match def { @@ -746,7 +746,7 @@ pub(super) fn definition( }; let mut extra = String::new(); - if hovered_definition { + if render_extras { if let Some(notable_traits) = render_notable_trait(db, notable_traits, edition, display_target) { @@ -760,15 +760,18 @@ pub(super) fn definition( if let Some(layout_info) = layout_info() { extra.push_str("\n___\n"); extra.push_str(&layout_info); + if let Some(drop_info) = drop_info() { + extra.push_str(", "); + extra.push_str(&drop_info) + } + } else if let Some(drop_info) = drop_info() { + extra.push_str("\n___\n"); + extra.push_str(&drop_info); } if let Some(dyn_compatibility_info) = dyn_compatibility_info() { extra.push_str("\n___\n"); extra.push_str(&dyn_compatibility_info); } - if let Some(drop_info) = drop_info() { - extra.push_str("\n___\n"); - extra.push_str(&drop_info); - } } let mut desc = String::new(); desc.push_str(&label); @@ -906,9 +909,9 @@ fn render_notable_trait( let mut needs_impl_header = true; for (trait_, assoc_types) in notable_traits { desc.push_str(if mem::take(&mut needs_impl_header) { - "Implements notable traits: " + "Implements notable traits: `" } else { - ", " + "`, `" }); format_to!(desc, "{}", trait_.name(db).display(db, edition)); if !assoc_types.is_empty() { @@ -928,7 +931,12 @@ fn render_notable_trait( desc.push('>'); } } - desc.is_empty().not().then_some(desc) + if desc.is_empty() { + None + } else { + desc.push('`'); + Some(desc) + } } fn type_info( @@ -955,37 +963,12 @@ fn type_info( res.markup = if let Some(adjusted_ty) = adjusted { walk_and_push_ty(db, &adjusted_ty, &mut push_new_def); - let notable = { - let mut desc = String::new(); - let mut needs_impl_header = true; - for (trait_, assoc_types) in notable_traits(db, &original) { - desc.push_str(if mem::take(&mut needs_impl_header) { - "Implements Notable Traits: " - } else { - ", " - }); - format_to!(desc, "{}", trait_.name(db).display(db, edition)); - if !assoc_types.is_empty() { - desc.push('<'); - format_to!( - desc, - "{}", - assoc_types.into_iter().format_with(", ", |(ty, name), f| { - f(&name.display(db, edition))?; - f(&" = ")?; - match ty { - Some(ty) => f(&ty.display(db, display_target)), - None => f(&"?"), - } - }) - ); - desc.push('>'); - } - } - if !desc.is_empty() { - desc.push('\n'); - } - desc + let notable = if let Some(notable) = + render_notable_trait(db, ¬able_traits(db, &original), edition, display_target) + { + format!("{notable}\n") + } else { + String::new() }; let original = original.display(db, display_target).to_string(); diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 6b470d921f7a7..7b7eef9d57936 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -1,9 +1,9 @@ -use expect_test::{expect, Expect}; -use ide_db::{base_db::SourceDatabase, FileRange}; +use expect_test::{Expect, expect}; +use ide_db::{FileRange, base_db::SourceDatabase}; use syntax::TextRange; use crate::{ - fixture, HoverConfig, HoverDocFormat, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, + HoverConfig, HoverDocFormat, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, fixture, }; const HOVER_BASE_CONFIG: HoverConfig = HoverConfig { @@ -47,7 +47,7 @@ fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { .unwrap(); let content = analysis.db.file_text(position.file_id); - let hovered_element = &content[hover.range]; + let hovered_element = &content.text(&analysis.db)[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); expect.assert_eq(&actual) @@ -72,7 +72,7 @@ fn check_hover_fields_limit( .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id); + let content = analysis.db.file_text(position.file_id).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -98,7 +98,7 @@ fn check_hover_enum_variants_limit( .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id); + let content = analysis.db.file_text(position.file_id).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -124,7 +124,7 @@ fn check_assoc_count( .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id); + let content = analysis.db.file_text(position.file_id).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -141,7 +141,7 @@ fn check_hover_no_links(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id); + let content = analysis.db.file_text(position.file_id).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -158,7 +158,7 @@ fn check_hover_no_memory_layout(#[rust_analyzer::rust_fixture] ra_fixture: &str, .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id); + let content = analysis.db.file_text(position.file_id).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -179,7 +179,7 @@ fn check_hover_no_markdown(#[rust_analyzer::rust_fixture] ra_fixture: &str, expe .unwrap() .unwrap(); - let content = analysis.db.file_text(position.file_id); + let content = analysis.db.file_text(position.file_id).text(&analysis.db); let hovered_element = &content[hover.range]; let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); @@ -567,11 +567,7 @@ fn main() { --- - size = 8, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 8, align = 4, no Drop "#]], ); } @@ -816,11 +812,7 @@ struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 } --- - size = 1, align = 1, offset = 6 - - --- - - does not contain types with destructors (drop glue) + size = 1, align = 1, offset = 6, no Drop "#]], ); } @@ -871,11 +863,7 @@ fn main() { --- - size = 4, align = 4, offset = 0 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, offset = 0, no Drop "#]], ); } @@ -945,11 +933,7 @@ struct Foo$0(pub u32) where u32: Copy; --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 4, align = 4, no Drop "#]], ); } @@ -975,11 +959,7 @@ struct Foo$0 { field: u32 } --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 4, align = 4, no Drop "#]], ); check( @@ -1004,11 +984,7 @@ struct Foo$0 where u32: Copy { field: u32 } --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 4, align = 4, no Drop "#]], ); } @@ -1037,11 +1013,7 @@ fn hover_record_struct_limit() { --- - size = 12 (0xC), align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 12 (0xC), align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1064,11 +1036,7 @@ fn hover_record_struct_limit() { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 4, align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1094,11 +1062,7 @@ fn hover_record_struct_limit() { --- - size = 16 (0x10), align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 16 (0x10), align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1119,11 +1083,7 @@ fn hover_record_struct_limit() { --- - size = 12 (0xC), align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 12 (0xC), align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1144,11 +1104,7 @@ fn hover_record_struct_limit() { --- - size = 12 (0xC), align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 12 (0xC), align = 4, no Drop "#]], ); @@ -1171,11 +1127,7 @@ fn hover_record_struct_limit() { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 0, align = 1, no Drop "#]], ); } @@ -1200,11 +1152,7 @@ fn hover_record_variant_limit() { --- - size = 12 (0xC), align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 12 (0xC), align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1225,11 +1173,7 @@ fn hover_record_variant_limit() { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1250,11 +1194,7 @@ fn hover_record_variant_limit() { --- - size = 16 (0x10), align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 16 (0x10), align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1275,11 +1215,7 @@ fn hover_record_variant_limit() { --- - size = 12 (0xC), align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 12 (0xC), align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1300,11 +1236,7 @@ fn hover_record_variant_limit() { --- - size = 12 (0xC), align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 12 (0xC), align = 4, no Drop "#]], ); } @@ -1330,11 +1262,7 @@ fn hover_enum_limit() { --- - size = 1, align = 1, niches = 254 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 1, align = 1, niches = 254, no Drop "#]], ); check_hover_enum_variants_limit( @@ -1356,11 +1284,7 @@ fn hover_enum_limit() { --- - size = 1, align = 1, niches = 254 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 1, align = 1, niches = 254, no Drop "#]], ); check_hover_enum_variants_limit( @@ -1379,11 +1303,7 @@ fn hover_enum_limit() { --- - size = 1, align = 1, niches = 254 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 1, align = 1, niches = 254, no Drop "#]], ); check_hover_enum_variants_limit( @@ -1402,11 +1322,7 @@ fn hover_enum_limit() { --- - size = 1, align = 1, niches = 254 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 1, align = 1, niches = 254, no Drop "#]], ); check_hover_enum_variants_limit( @@ -1443,11 +1359,7 @@ fn hover_enum_limit() { --- - size = 12 (0xC), align = 4, niches = a lot - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 12 (0xC), align = 4, niches = a lot, no Drop "#]], ); } @@ -1473,11 +1385,7 @@ fn hover_union_limit() { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 4, align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1499,11 +1407,7 @@ fn hover_union_limit() { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 4, align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1522,11 +1426,7 @@ fn hover_union_limit() { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 4, align = 4, no Drop "#]], ); check_hover_fields_limit( @@ -1545,11 +1445,7 @@ fn hover_union_limit() { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 4, align = 4, no Drop "#]], ); } @@ -1575,11 +1471,7 @@ struct Foo$0 where u32: Copy; --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 0, align = 1, no Drop "#]], ); } @@ -1605,7 +1497,7 @@ type Fo$0o: Trait = S where T: Trait; --- - does not contain types with destructors (drop glue) + no Drop "#]], ); } @@ -1754,11 +1646,7 @@ fn main() { --- - size = 8, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 8, align = 4, no Drop "#]], ); check_hover_range( @@ -1813,11 +1701,7 @@ fn main() { let b$0ar = Some(12); } --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, no Drop "#]], ); } @@ -1845,7 +1729,7 @@ enum Option { --- - does not contain types with destructors (drop glue) + no Drop --- @@ -1908,11 +1792,7 @@ fn hover_for_local_variable_pat() { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, no Drop "#]], ) } @@ -1944,11 +1824,7 @@ fn hover_for_param_edge() { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, no Drop "#]], ) } @@ -1974,7 +1850,7 @@ fn hover_for_param_with_multiple_traits() { --- - may contain types with destructors (drop glue) depending on type parameters + type param may need Drop "#]], ) } @@ -2000,11 +1876,7 @@ fn main() { let foo_$0test = Thing::new(); } --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, no Drop "#]], ) } @@ -2089,6 +1961,10 @@ impl Thing { x: u32, } ``` + + --- + + size = 4, align = 4 "#]], ); check_hover_fields_limit( @@ -2109,6 +1985,10 @@ impl Thing { ```rust struct Thing ``` + + --- + + size = 4, align = 4 "#]], ); check( @@ -2130,6 +2010,10 @@ impl Thing { x: u32, } ``` + + --- + + size = 4, align = 4 "#]], ); check( @@ -2151,6 +2035,10 @@ impl Thing { A, } ``` + + --- + + size = 0, align = 1 "#]], ); check( @@ -2172,6 +2060,10 @@ impl Thing { A, } ``` + + --- + + size = 0, align = 1 "#]], ); check( @@ -2190,6 +2082,10 @@ impl usize { ```rust usize ``` + + --- + + size = 8, align = 8 "#]], ); check( @@ -2208,6 +2104,32 @@ impl fn() -> usize { ```rust fn() -> usize ``` + + --- + + size = 8, align = 8, niches = 1 + "#]], + ); + check( + r#" +pub struct Foo +where + Self$0:; +"#, + expect![[r#" + *Self* + + ```rust + ra_test_fixture + ``` + + ```rust + pub struct Foo + ``` + + --- + + size = 0, align = 1, no Drop "#]], ); } @@ -2753,11 +2675,7 @@ fn test_hover_function_pointer_show_identifiers() { --- - size = 8, align = 8, niches = 1 - - --- - - does not contain types with destructors (drop glue) + size = 8, align = 8, niches = 1, no Drop "#]], ); } @@ -2779,11 +2697,7 @@ fn test_hover_function_pointer_no_identifier() { --- - size = 8, align = 8, niches = 1 - - --- - - does not contain types with destructors (drop glue) + size = 8, align = 8, niches = 1, no Drop "#]], ); } @@ -3026,11 +2940,7 @@ pub struct B$0ar --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 0, align = 1, no Drop --- @@ -3061,11 +2971,7 @@ pub struct B$0ar --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 0, align = 1, no Drop --- @@ -3158,11 +3064,7 @@ fn test_hover_layout_of_variant() { --- - size = 4, align = 2 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 2, no Drop "#]], ); } @@ -3187,7 +3089,7 @@ fn test_hover_layout_of_variant_generic() { --- - does not contain types with destructors (drop glue) + no Drop "#]], ); } @@ -3212,11 +3114,7 @@ struct S$0(core::marker::PhantomData); --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 0, align = 1, no Drop "#]], ); } @@ -3244,11 +3142,7 @@ fn test_hover_layout_of_enum() { --- - size = 16 (0x10), align = 8, niches = 254 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 16 (0x10), align = 8, niches = 254, no Drop "#]], ); } @@ -3270,7 +3164,7 @@ fn test_hover_no_memory_layout() { --- - does not contain types with destructors (drop glue) + no Drop "#]], ); @@ -4578,11 +4472,7 @@ fn main() { --- - size = 8, align = 8, niches = 1 - - --- - - does not contain types with destructors (drop glue) + size = 8, align = 8, niches = 1, no Drop --- @@ -4596,11 +4486,7 @@ fn main() { --- - size = 4, align = 4, offset = 0 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, offset = 0, no Drop "#]], ); } @@ -4620,16 +4506,12 @@ struct S$0T(T); ``` ```rust - struct ST(T) + struct ST(T) ``` --- - size = 0, align = 1 - - --- - - may contain types with destructors (drop glue) depending on type parameters; doesn't have a destructor + size = 0, align = 1, type param may need Drop "#]], ); } @@ -4654,11 +4536,7 @@ struct S$0T(T); --- - size = 0, align = 1 - - --- - - may contain types with destructors (drop glue) depending on type parameters; doesn't have a destructor + size = 0, align = 1, type param may need Drop "#]], ); } @@ -4679,16 +4557,12 @@ struct S$0T(T); ``` ```rust - struct ST(T) + struct ST(T) ``` --- - size = 0, align = 1 - - --- - - may contain types with destructors (drop glue) depending on type parameters; doesn't have a destructor + size = 0, align = 1, type param may need Drop "#]], ); } @@ -4712,11 +4586,7 @@ fn main() { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -4740,11 +4610,7 @@ fn main() { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -4763,16 +4629,12 @@ fn main() { *value* ```rust - let value: Const<-1> + let value: Const<_> ``` --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -4796,11 +4658,7 @@ fn main() { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -4824,11 +4682,7 @@ fn main() { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -4851,11 +4705,7 @@ impl Foo { --- - size = 8, align = 8, niches = 1 - - --- - - does not contain types with destructors (drop glue) + size = 8, align = 8, niches = 1, no Drop "#]], ); } @@ -4879,11 +4729,7 @@ impl Foo { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -5368,16 +5214,12 @@ type Fo$0o2 = Foo<2>; ``` ```rust - type Foo2 = Foo<2> + type Foo2 = Foo<> ``` --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -5427,11 +5269,7 @@ enum E { --- - size = 1, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 1, align = 1, no Drop --- @@ -5460,11 +5298,7 @@ enum E { --- - size = 1, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 1, align = 1, no Drop --- @@ -5494,11 +5328,7 @@ enum E { --- - size = 1, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 1, align = 1, no Drop --- @@ -5528,11 +5358,7 @@ enum E { --- - size = 1, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 1, align = 1, no Drop --- @@ -6197,7 +6023,7 @@ const FOO$0: &[i32; 5] = &[12; 5]; ``` ```rust - const FOO: &[i32; 5] = &[12, 12, 12, 12, 12] + const FOO: &[i32; {const}] = &[12, 12, 12, 12, 12] ``` "#]], ); @@ -6463,11 +6289,7 @@ fn main() { --- - size = 32 (0x20), align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 32 (0x20), align = 4, no Drop "#]], ); } @@ -6882,109 +6704,14 @@ pub fn foo() {} #[test] fn hover_feature() { - check( - r#"#![feature(intrinsics$0)]"#, - expect![[r#" - *intrinsics* - ``` - intrinsics - ``` - ___ - - # `intrinsics` - - The tracking issue for this feature is: None. - - Intrinsics are rarely intended to be stable directly, but are usually - exported in some sort of stable manner. Prefer using the stable interfaces to - the intrinsic directly when you can. - - ------------------------ - - - ## Intrinsics with fallback logic - - Many intrinsics can be written in pure rust, albeit inefficiently or without supporting - some features that only exist on some backends. Backends can simply not implement those - intrinsics without causing any code miscompilations or failures to compile. - All intrinsic fallback bodies are automatically made cross-crate inlineable (like `#[inline]`) - by the codegen backend, but not the MIR inliner. - - ```rust - #![feature(intrinsics)] - #![allow(internal_features)] - - #[rustc_intrinsic] - const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {} - ``` - - Since these are just regular functions, it is perfectly ok to create the intrinsic twice: - - ```rust - #![feature(intrinsics)] - #![allow(internal_features)] - - #[rustc_intrinsic] - const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {} - - mod foo { - #[rustc_intrinsic] - const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) { - panic!("noisy const dealloc") - } - } - - ``` - - The behaviour on backends that override the intrinsic is exactly the same. On other - backends, the intrinsic behaviour depends on which implementation is called, just like - with any regular function. - - ## Intrinsics lowered to MIR instructions - - Various intrinsics have native MIR operations that they correspond to. Instead of requiring - backends to implement both the intrinsic and the MIR operation, the `lower_intrinsics` pass - will convert the calls to the MIR operation. Backends do not need to know about these intrinsics - at all. These intrinsics only make sense without a body, and can either be declared as a "rust-intrinsic" - or as a `#[rustc_intrinsic]`. The body is never used, as calls to the intrinsic do not exist - anymore after MIR analyses. - - ## Intrinsics without fallback logic - - These must be implemented by all backends. - - ### `#[rustc_intrinsic]` declarations - - These are written like intrinsics with fallback bodies, but the body is irrelevant. - Use `loop {}` for the body or call the intrinsic recursively and add - `#[rustc_intrinsic_must_be_overridden]` to the function to ensure that backends don't - invoke the body. - - ### Legacy extern ABI based intrinsics - - These are imported as if they were FFI functions, with the special - `rust-intrinsic` ABI. For example, if one was in a freestanding - context, but wished to be able to `transmute` between types, and - perform efficient pointer arithmetic, one would import those functions - via a declaration like - - ```rust - #![feature(intrinsics)] - #![allow(internal_features)] - # fn main() {} - - extern "rust-intrinsic" { - fn transmute(x: T) -> U; - - fn arith_offset(dst: *const T, offset: isize) -> *const T; - } - ``` - - As with any other FFI functions, these are by default always `unsafe` to call. - You can add `#[rustc_safe_intrinsic]` to the intrinsic to make it safe to call. - - "#]], - ) + let (analysis, position) = fixture::position(r#"#![feature(intrinsics$0)]"#); + analysis + .hover( + &HoverConfig { links_in_hover: true, ..HOVER_BASE_CONFIG }, + FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, + ) + .unwrap() + .unwrap(); } #[test] @@ -7358,9 +7085,9 @@ fn foo() { } "#, expect![[r#" - ```rust - &str - ```"#]], + ```rust + &'static str + ```"#]], ); } @@ -7766,11 +7493,7 @@ enum Enum { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, no Drop "#]], ); } @@ -7796,11 +7519,7 @@ enum Enum { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, no Drop "#]], ); } @@ -8470,11 +8189,7 @@ fn test() { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -8513,7 +8228,7 @@ format_args!("{aaaaa$0}"); *aaaaa* ```rust - let aaaaa: &str + let aaaaa: &'static str ``` "#]], ); @@ -8533,7 +8248,7 @@ format_args!("{$0aaaaa}"); *aaaaa* ```rust - let aaaaa: &str + let aaaaa: &'static str ``` "#]], ); @@ -8553,7 +8268,7 @@ format_args!(r"{$0aaaaa}"); *aaaaa* ```rust - let aaaaa: &str + let aaaaa: &'static str ``` "#]], ); @@ -8578,7 +8293,7 @@ foo!(r"{$0aaaaa}"); *aaaaa* ```rust - let aaaaa: &str + let aaaaa: &'static str ``` "#]], ); @@ -8622,7 +8337,7 @@ fn main() { expect![[r#" *"🦀\u{1f980}\\\x41"* ```rust - &str + &'static str ``` ___ @@ -8638,7 +8353,7 @@ fn main() { expect![[r#" *r"🦀\u{1f980}\\\x41"* ```rust - &str + &'static str ``` ___ @@ -8660,7 +8375,7 @@ fsdghs"; fsdghs"* ```rust - &str + &'static str ``` ___ @@ -8680,7 +8395,7 @@ fn main() { expect![[r#" *c"🦀\u{1f980}\\\x41"* ```rust - &{unknown} + &'static {unknown} ``` ___ @@ -8699,7 +8414,7 @@ fn main() { expect![[r#" *r"`[^`]*`"* ```rust - &str + &'static str ``` ___ @@ -8714,7 +8429,7 @@ fn main() { expect![[r#" *r"`"* ```rust - &str + &'static str ``` ___ @@ -8729,7 +8444,7 @@ fn main() { expect![[r#" *r" "* ```rust - &str + &'static str ``` ___ @@ -8745,12 +8460,12 @@ fn main() { expect![[r#" *r" Hello World "* ```rust - &str + &'static str ``` ___ value of literal: ` Hello World ` -"#]], + "#]], ) } @@ -8765,7 +8480,7 @@ fn main() { expect![[r#" *b"\xF0\x9F\xA6\x80\\"* ```rust - &[u8; 5] + &'static [u8; 5] ``` ___ @@ -8781,7 +8496,7 @@ fn main() { expect![[r#" *br"\xF0\x9F\xA6\x80\\"* ```rust - &[u8; 18] + &'static [u8; 18] ``` ___ @@ -9119,15 +8834,11 @@ fn main(notable$0: u32) {} --- - Implements notable traits: Notable\ + Implements notable traits: `Notable` --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, no Drop "#]], ); } @@ -9219,11 +8930,7 @@ extern "C" { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -9252,7 +8959,7 @@ fn main() { S ``` ___ - Implements notable traits: Notable, Future, Iterator"#]], + Implements notable traits: `Future`, `Iterator`, `Notable`"#]], ); } @@ -9363,17 +9070,13 @@ struct Pedro$0<'a> { ```rust struct Pedro<'a> { - hola: &str, + hola: &'a str, } ``` --- - size = 16 (0x10), align = 8, niches = 1 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 16 (0x10), align = 8, niches = 1, no Drop "#]], ) } @@ -9394,7 +9097,7 @@ fn main(a$0: impl T) {} --- - may contain types with destructors (drop glue) depending on type parameters + type param may need Drop "#]], ); } @@ -9415,11 +9118,7 @@ fn main(a$0: T) {} --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -9472,11 +9171,7 @@ fn main() { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -9810,11 +9505,7 @@ type A$0 = B; --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop --- @@ -9847,11 +9538,7 @@ type A$0 = B; --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop --- @@ -9885,11 +9572,7 @@ type A$0 = B; --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop --- @@ -9921,11 +9604,7 @@ type A$0 = B; --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); @@ -10049,11 +9728,7 @@ fn main() { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); @@ -10081,11 +9756,7 @@ fn main() { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); @@ -10120,11 +9791,7 @@ fn main() { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); } @@ -10270,7 +9937,7 @@ fn baz() { --- - `U` = `i32`, `T` = `&str` + `U` = `i32`, `T` = `&'static str` "#]], ); } @@ -10363,7 +10030,7 @@ fn bar() { --- - `T` = `i8`, `U` = `&str` + `T` = `i8`, `U` = `&'static str` "#]], ); } @@ -10443,11 +10110,7 @@ fn bar() { --- - size = 4, align = 4 - - --- - - does not contain types with destructors (drop glue) + size = 4, align = 4, no Drop --- @@ -10461,7 +10124,7 @@ fn bar() { --- - may contain types with destructors (drop glue) depending on type parameters + type param may need Drop --- @@ -10694,11 +10357,7 @@ struct NoDrop$0; --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue); doesn't have a destructor + size = 0, align = 1, no Drop "#]], ); check( @@ -10722,11 +10381,7 @@ impl Drop for NeedsDrop { --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue); has a destructor + size = 0, align = 1, impl Drop "#]], ); check( @@ -10751,11 +10406,7 @@ type NoDrop$0 = core::mem::ManuallyDrop; --- - size = 0, align = 1 - - --- - - does not contain types with destructors (drop glue) + size = 0, align = 1, no Drop "#]], ); check( @@ -10786,11 +10437,7 @@ struct DropField$0 { --- - size = 4, align = 4 - - --- - - contain types with destructors (drop glue); doesn't have a destructor + size = 4, align = 4, needs Drop "#]], ); check( @@ -10811,7 +10458,7 @@ type Foo$0 = impl Sized; --- - contain types with destructors (drop glue) + needs Drop "#]], ); check( @@ -10839,11 +10486,7 @@ enum Enum { --- - size = 16 (0x10), align = 8, niches = 1 - - --- - - does not contain types with destructors (drop glue) + size = 16 (0x10), align = 8, niches = 1, no Drop "#]], ); check( @@ -10863,7 +10506,7 @@ struct Foo$0(T); --- - may contain types with destructors (drop glue) depending on type parameters; doesn't have a destructor + type param may need Drop "#]], ); check( @@ -10886,7 +10529,7 @@ struct Foo$0(T); --- - does not contain types with destructors (drop glue); doesn't have a destructor + no Drop "#]], ); check( @@ -10912,7 +10555,7 @@ struct Foo$0(T::Assoc); --- - does not contain types with destructors (drop glue); doesn't have a destructor + no Drop "#]], ); check( @@ -10943,7 +10586,7 @@ pub struct ManuallyDrop$0 { --- - does not contain types with destructors (drop glue); doesn't have a destructor + no Drop "#]], ); } @@ -10986,3 +10629,73 @@ impl PublicFlags for NoteDialects { "#]], ); } + +#[test] +fn bounds_from_container_do_not_panic() { + check( + r#" +//- minicore: copy +struct Foo(T); + +impl Foo { + fn foo(&self, _u: U) {} +} + +fn bar(v: &Foo) { + v.$0foo(1u32); +} + "#, + expect![[r#" + *foo* + + ```rust + ra_test_fixture::Foo + ``` + + ```rust + impl Foo + fn foo(&self, _u: U) + where + U: Copy, + // Bounds from impl: + T: Copy, + ``` + + --- + + `T` = `i32`, `U` = `u32` + "#]], + ); +} + +#[test] +fn extra_lifetime_param_on_trait_method_subst() { + check( + r#" +struct AudioFormat; + +trait ValueEnum { + fn to_possible_value(&self); +} + +impl ValueEnum for AudioFormat { + fn to_possible_value<'a>(&'a self) {} +} + +fn main() { + ValueEnum::to_possible_value$0(&AudioFormat); +} + "#, + expect![[r#" + *to_possible_value* + + ```rust + ra_test_fixture::AudioFormat + ``` + + ```rust + fn to_possible_value<'a>(&'a self) + ``` + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 6babdff52a2be..82704af647db3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -5,21 +5,21 @@ use std::{ use either::Either; use hir::{ - sym, ClosureStyle, DisplayTarget, HasVisibility, HirDisplay, HirDisplayError, HirWrite, - ModuleDef, ModuleDefId, Semantics, + ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError, + HirWrite, ModuleDef, ModuleDefId, Semantics, sym, }; -use ide_db::{famous_defs::FamousDefs, FileRange, RootDatabase}; -use ide_db::{text_edit::TextEdit, FxHashSet}; +use ide_db::{FileRange, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder}; +use ide_db::{FxHashSet, text_edit::TextEdit}; use itertools::Itertools; -use smallvec::{smallvec, SmallVec}; -use span::EditionedFileId; +use smallvec::{SmallVec, smallvec}; use stdx::never; use syntax::{ + SmolStr, SyntaxNode, TextRange, TextSize, WalkEvent, ast::{self, AstNode, HasGenericParams}, - format_smolstr, match_ast, SmolStr, SyntaxNode, TextRange, TextSize, WalkEvent, + format_smolstr, match_ast, }; -use crate::{navigation_target::TryToNav, FileId}; +use crate::{FileId, navigation_target::TryToNav}; mod adjustment; mod bind_pat; @@ -85,7 +85,7 @@ pub(crate) fn inlay_hints( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); let file = sema.parse(file_id); let file = file.syntax(); @@ -136,7 +136,7 @@ pub(crate) fn inlay_hints_resolve( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); let file = sema.parse(file_id); let file = file.syntax(); @@ -207,7 +207,11 @@ fn hints( file_id: EditionedFileId, node: SyntaxNode, ) { - let display_target = sema.first_crate_or_default(file_id.file_id()).to_display_target(sema.db); + let file_id = file_id.editioned_file_id(sema.db); + let Some(krate) = sema.first_crate(file_id.file_id()) else { + return; + }; + let display_target = krate.to_display_target(sema.db); closing_brace::hints(hints, sema, config, file_id, display_target, node.clone()); if let Some(any_has_generic_args) = ast::AnyHasGenericArgs::cast(node.clone()) { generic_param::hints(hints, famous_defs, config, any_has_generic_args); @@ -219,12 +223,12 @@ fn hints( chaining::hints(hints, famous_defs, config, display_target, &expr); adjustment::hints(hints, famous_defs, config, display_target, &expr); match expr { - ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)), + ast::Expr::CallExpr(it) => param_name::hints(hints, famous_defs, config, ast::Expr::from(it)), ast::Expr::MethodCallExpr(it) => { - param_name::hints(hints, famous_defs, config, file_id, ast::Expr::from(it)) + param_name::hints(hints, famous_defs, config, ast::Expr::from(it)) } ast::Expr::ClosureExpr(it) => { - closure_captures::hints(hints, famous_defs, config, file_id, it.clone()); + closure_captures::hints(hints, famous_defs, config, it.clone()); closure_ret::hints(hints, famous_defs, config, display_target, it) }, ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, file_id, it), @@ -793,7 +797,7 @@ fn hint_iterator( if ty.impls_trait(db, iter_trait, &[]) { let assoc_type_item = iter_trait.items(db).into_iter().find_map(|item| match item { - hir::AssocItem::TypeAlias(alias) if alias.name(db) == sym::Item.clone() => Some(alias), + hir::AssocItem::TypeAlias(alias) if alias.name(db) == sym::Item => Some(alias), _ => None, })?; if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) { @@ -809,7 +813,8 @@ fn ty_to_text_edit( config: &InlayHintsConfig, node_for_hint: &SyntaxNode, ty: &hir::Type, - offset_to_insert: TextSize, + offset_to_insert_ty: TextSize, + additional_edits: &dyn Fn(&mut TextEditBuilder), prefix: impl Into, ) -> Option> { // FIXME: Limit the length and bail out on excess somehow? @@ -818,8 +823,11 @@ fn ty_to_text_edit( .and_then(|scope| ty.display_source_code(scope.db, scope.module().into(), false).ok())?; Some(config.lazy_text_edit(|| { let mut builder = TextEdit::builder(); - builder.insert(offset_to_insert, prefix.into()); - builder.insert(offset_to_insert, rendered); + builder.insert(offset_to_insert_ty, prefix.into()); + builder.insert(offset_to_insert_ty, rendered); + + additional_edits(&mut builder); + builder.finish() })) } @@ -836,9 +844,9 @@ mod tests { use itertools::Itertools; use test_utils::extract_annotations; - use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode}; use crate::DiscriminantHints; - use crate::{fixture, inlay_hints::InlayHintsConfig, LifetimeElisionHints}; + use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode}; + use crate::{LifetimeElisionHints, fixture, inlay_hints::InlayHintsConfig}; use super::{ClosureReturnTypeHints, GenericParameterHints, InlayFieldsToResolve}; @@ -993,6 +1001,53 @@ fn foo() { fn foo() { let } +"#, + ); + } + + #[test] + fn closure_dependency_cycle_no_panic() { + check( + r#" +fn foo() { + let closure; + // ^^^^^^^ impl Fn() + closure = || { + closure(); + }; +} + +fn bar() { + let closure1; + // ^^^^^^^^ impl Fn() + let closure2; + // ^^^^^^^^ impl Fn() + closure1 = || { + closure2(); + }; + closure2 = || { + closure1(); + }; +} + "#, + ); + } + + #[test] + fn regression_19610() { + check( + r#" +trait Trait { + type Assoc; +} +struct Foo(A); +impl> Foo { + fn foo<'a, 'b>(_: &'a [i32], _: &'b [i32]) {} +} + +fn bar() { + Foo::foo(&[1], &[2]); +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs index 91b8187295236..f2844a2eaa614 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs @@ -13,7 +13,7 @@ use hir::{ use ide_db::famous_defs::FamousDefs; use ide_db::text_edit::TextEditBuilder; -use syntax::ast::{self, prec::ExprPrecedence, AstNode}; +use syntax::ast::{self, AstNode, prec::ExprPrecedence}; use crate::{ AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintLabel, InlayHintLabelPart, @@ -224,7 +224,7 @@ fn mode_and_needs_parens_for_adjustment_hints( expr: &ast::Expr, mode: AdjustmentHintsMode, ) -> (bool, bool, bool) { - use {std::cmp::Ordering::*, AdjustmentHintsMode::*}; + use {AdjustmentHintsMode::*, std::cmp::Ordering::*}; match mode { Prefix | Postfix => { @@ -284,8 +284,8 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, #[cfg(test)] mod tests { use crate::{ - inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, AdjustmentHints, AdjustmentHintsMode, InlayHintsConfig, + inlay_hints::tests::{DISABLED_CONFIG, check_with_config}, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 4379153acaa17..36fdd90e8aea2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -4,7 +4,7 @@ //! let _x /* i32 */= f(4, 4); //! ``` use hir::{DisplayTarget, Semantics}; -use ide_db::{famous_defs::FamousDefs, RootDatabase}; +use ide_db::{RootDatabase, famous_defs::FamousDefs}; use itertools::Itertools; use syntax::{ @@ -13,8 +13,8 @@ use syntax::{ }; use crate::{ - inlay_hints::{closure_has_block_body, label_of_ty, ty_to_text_edit}, InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind, + inlay_hints::{closure_has_block_body, label_of_ty, ty_to_text_edit}, }; pub(super) fn hints( @@ -87,6 +87,7 @@ pub(super) fn hints( .as_ref() .map_or_else(|| pat.syntax().text_range(), |t| t.text_range()) .end(), + &|_| (), if colon_token.is_some() { "" } else { ": " }, ) } else { @@ -181,10 +182,10 @@ mod tests { use syntax::{TextRange, TextSize}; use test_utils::extract_annotations; - use crate::{fixture, inlay_hints::InlayHintsConfig, ClosureReturnTypeHints}; + use crate::{ClosureReturnTypeHints, fixture, inlay_hints::InlayHintsConfig}; use crate::inlay_hints::tests::{ - check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG, + DISABLED_CONFIG, TEST_CONFIG, check, check_edit, check_no_edit, check_with_config, }; #[track_caller] @@ -379,9 +380,9 @@ fn main() { let foo = foo3(); // ^^^ impl Fn(f64, f64) -> u32 let foo = foo4(); - // ^^^ &dyn Fn(f64, f64) -> u32 + // ^^^ &'static (dyn Fn(f64, f64) -> u32 + 'static) let foo = foo5(); - // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32 + // ^^^ &'static (dyn Fn(&(dyn Fn(f64, f64) -> u32 + 'static), f64) -> u32 + 'static) let foo = foo6(); // ^^^ impl Fn(f64, f64) -> u32 let foo = foo7(); @@ -412,7 +413,7 @@ fn main() { let foo = foo3(); // ^^^ impl Fn(f64, f64) -> u32 let foo = foo4(); - // ^^^ &dyn Fn(f64, f64) -> u32 + // ^^^ &'static (dyn Fn(f64, f64) -> u32 + 'static) let foo = foo5(); let foo = foo6(); let foo = foo7(); @@ -527,7 +528,7 @@ fn main() { //^^^^ i32 let _ = 22; let test = "test"; - //^^^^ &str + //^^^^ &'static str let test = InnerStruct {}; //^^^^ InnerStruct @@ -617,12 +618,12 @@ impl Iterator for IntoIter { fn main() { let mut data = Vec::new(); - //^^^^ Vec<&str> + //^^^^ Vec<&'static str> data.push("foo"); for i in data { - //^ &str + //^ &'static str let z = i; - //^ &str + //^ &'static str } } "#, @@ -650,8 +651,8 @@ fn main() { //^^ Vec> let _v = { Vec::>::new() }; //^^ Vec> - let _v = { Vec::>::new() }; - //^^ Vec> + let _v = { Vec::>::new() }; + //^^ Vec> } "#, ); @@ -855,28 +856,6 @@ fn main() { //^ |i32| -> () let p = (y, z); //^ (|i32| -> i32, |i32| -> ()) -} - "#, - ); - check_with_config( - InlayHintsConfig { - type_hints: true, - closure_style: ClosureStyle::ClosureWithId, - ..DISABLED_CONFIG - }, - r#" -//- minicore: fn -fn main() { - let x = || 2; - //^ {closure#0} - let y = |t: i32| x() + t; - //^ {closure#1} - let mut t = 5; - //^ i32 - let z = |k: i32| { t += k; }; - //^ {closure#2} - let p = (y, z); - //^ ({closure#1}, {closure#2}) } "#, ); @@ -1038,7 +1017,7 @@ fn test(t: T) { "#, expect![[r#" fn test(t: T) { - let f = |a: i32, b: &str, c: T| {}; + let f = |a: i32, b: &'static str, c: T| {}; let result: () = f(42, "", t); } "#]], @@ -1140,12 +1119,11 @@ fn test() { #[test] fn no_edit_for_closure_return_without_body_block() { - // We can lift this limitation; see FIXME in closure_ret module. let config = InlayHintsConfig { closure_return_type_hints: ClosureReturnTypeHints::Always, ..TEST_CONFIG }; - check_no_edit( + check_edit( config, r#" struct S(T); @@ -1154,6 +1132,13 @@ fn test() { let f = |a: S| S(a); } "#, + expect![[r#" + struct S(T); + fn test() { + let f = || -> i32 { 3 }; + let f = |a: S| -> S> { S(a) }; + } + "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs index 5bbb4fe4e66e3..d29173206889d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs @@ -128,8 +128,8 @@ mod tests { use expect_test::expect; use crate::{ - inlay_hints::tests::{check_edit, check_with_config, DISABLED_CONFIG}, InlayHintsConfig, + inlay_hints::tests::{DISABLED_CONFIG, check_edit, check_with_config}, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs index e9b728bcaa75d..8ddbfaeffe879 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs @@ -1,7 +1,7 @@ //! Implementation of trait bound hints. //! //! Currently this renders the implied `Sized` bound. -use ide_db::{famous_defs::FamousDefs, FileRange}; +use ide_db::{FileRange, famous_defs::FamousDefs}; use span::EditionedFileId; use syntax::ast::{self, AstNode, HasTypeBounds}; @@ -86,7 +86,7 @@ mod tests { use crate::inlay_hints::InlayHintsConfig; - use crate::inlay_hints::tests::{check_expect, check_with_config, DISABLED_CONFIG}; + use crate::inlay_hints::tests::{DISABLED_CONFIG, check_expect, check_with_config}; #[track_caller] fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs index 604719bc366f5..ff157fa171b50 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs @@ -2,8 +2,8 @@ use hir::DisplayTarget; use ide_db::famous_defs::FamousDefs; use syntax::{ - ast::{self, AstNode}, Direction, NodeOrToken, SyntaxKind, T, + ast::{self, AstNode}, }; use crate::{InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind}; @@ -76,16 +76,15 @@ pub(super) fn hints( #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use ide_db::text_edit::{TextRange, TextSize}; use crate::{ - fixture, + InlayHintsConfig, fixture, inlay_hints::{ - tests::{check_expect, check_with_config, DISABLED_CONFIG, TEST_CONFIG}, LazyProperty, + tests::{DISABLED_CONFIG, TEST_CONFIG, check_expect, check_with_config}, }, - InlayHintsConfig, }; #[track_caller] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs index bec6d38ee9cac..2ec85da4a429b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs @@ -7,13 +7,14 @@ use hir::{DisplayTarget, HirDisplay, Semantics}; use ide_db::{FileRange, RootDatabase}; use span::EditionedFileId; use syntax::{ + SyntaxKind, SyntaxNode, T, ast::{self, AstNode, HasLoopBody, HasName}, - match_ast, SyntaxKind, SyntaxNode, T, + match_ast, }; use crate::{ - inlay_hints::LazyProperty, InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, - InlayKind, + InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind, + inlay_hints::LazyProperty, }; pub(super) fn hints( @@ -159,8 +160,8 @@ pub(super) fn hints( #[cfg(test)] mod tests { use crate::{ - inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, InlayHintsConfig, + inlay_hints::tests::{DISABLED_CONFIG, check_with_config}, }; #[test] @@ -193,7 +194,7 @@ impl Tr for () { //^ impl Tr for () impl dyn Tr { } -//^ impl dyn Tr +//^ impl dyn Tr + 'static static S0: () = 0; static S1: () = {}; diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs index 9b981c0a3acf7..3186a566d2bce 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs @@ -3,8 +3,7 @@ //! Tests live in [`bind_pat`][super::bind_pat] module. use ide_db::famous_defs::FamousDefs; use ide_db::text_edit::{TextRange, TextSize}; -use span::EditionedFileId; -use stdx::{never, TupleExt}; +use stdx::{TupleExt, never}; use syntax::ast::{self, AstNode}; use crate::{ @@ -15,7 +14,6 @@ pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - _file_id: EditionedFileId, closure: ast::ClosureExpr, ) -> Option<()> { if !config.closure_capture_hints { @@ -75,10 +73,12 @@ pub(super) fn hints( // force cache the source file, otherwise sema lookup will potentially panic _ = sema.parse_or_expand(source.file()); source.name().and_then(|name| { - name.syntax() - .original_file_range_opt(sema.db) - .map(TupleExt::head) - .map(Into::into) + name.syntax().original_file_range_opt(sema.db).map(TupleExt::head).map( + |frange| ide_db::FileRange { + file_id: frange.file_id.file_id(sema.db), + range: frange.range, + }, + ) }) }), tooltip: None, @@ -96,8 +96,8 @@ pub(super) fn hints( #[cfg(test)] mod tests { use crate::{ - inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, InlayHintsConfig, + inlay_hints::tests::{DISABLED_CONFIG, check_with_config}, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs index 61c9c25fe7396..9e600b5455be2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs @@ -2,12 +2,12 @@ //! //! Tests live in [`bind_pat`][super::bind_pat] module. use hir::DisplayTarget; -use ide_db::famous_defs::FamousDefs; +use ide_db::{famous_defs::FamousDefs, text_edit::TextEditBuilder}; use syntax::ast::{self, AstNode}; use crate::{ - inlay_hints::{closure_has_block_body, label_of_ty, ty_to_text_edit}, ClosureReturnTypeHints, InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind, + inlay_hints::{closure_has_block_body, label_of_ty, ty_to_text_edit}, }; pub(super) fn hints( @@ -35,8 +35,9 @@ pub(super) fn hints( let param_list = closure.param_list()?; - let closure = sema.descend_node_into_attributes(closure).pop()?; - let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure.clone()))?.adjusted(); + let resolve_parent = Some(closure.syntax().text_range()); + let descended_closure = sema.descend_node_into_attributes(closure.clone()).pop()?; + let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(descended_closure.clone()))?.adjusted(); let callable = ty.as_callable(sema.db)?; let ty = callable.return_type(); if arrow.is_none() && ty.is_unit() { @@ -48,23 +49,30 @@ pub(super) fn hints( if arrow.is_none() { label.prepend_str(" -> "); } - // FIXME?: We could provide text edit to insert braces for closures with non-block body. - let text_edit = if has_block_body { - ty_to_text_edit( - sema, - config, - closure.syntax(), - &ty, - arrow - .as_ref() - .map_or_else(|| param_list.syntax().text_range(), |t| t.text_range()) - .end(), - if arrow.is_none() { " -> " } else { "" }, - ) - } else { - None + + let offset_to_insert_ty = + arrow.as_ref().map_or_else(|| param_list.syntax().text_range(), |t| t.text_range()).end(); + + // Insert braces if necessary + let insert_braces = |builder: &mut TextEditBuilder| { + if !has_block_body { + if let Some(range) = closure.body().map(|b| b.syntax().text_range()) { + builder.insert(range.start(), "{ ".to_owned()); + builder.insert(range.end(), " }".to_owned()); + } + } }; + let text_edit = ty_to_text_edit( + sema, + config, + descended_closure.syntax(), + &ty, + offset_to_insert_ty, + &insert_braces, + if arrow.is_none() { " -> " } else { "" }, + ); + acc.push(InlayHint { range: param_list.syntax().text_range(), kind: InlayKind::Type, @@ -73,14 +81,14 @@ pub(super) fn hints( position: InlayHintPosition::After, pad_left: false, pad_right: false, - resolve_parent: Some(closure.syntax().text_range()), + resolve_parent, }); Some(()) } #[cfg(test)] mod tests { - use crate::inlay_hints::tests::{check_with_config, DISABLED_CONFIG}; + use crate::inlay_hints::tests::{DISABLED_CONFIG, check_with_config}; use super::*; diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs index f1e1955d14ca7..827a0438dd022 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs @@ -6,7 +6,7 @@ //! ``` use hir::Semantics; use ide_db::text_edit::TextEdit; -use ide_db::{famous_defs::FamousDefs, RootDatabase}; +use ide_db::{RootDatabase, famous_defs::FamousDefs}; use span::EditionedFileId; use syntax::ast::{self, AstNode, HasName}; @@ -107,8 +107,8 @@ mod tests { use expect_test::expect; use crate::inlay_hints::{ - tests::{check_edit, check_with_config, DISABLED_CONFIG}, DiscriminantHints, InlayHintsConfig, + tests::{DISABLED_CONFIG, check_edit, check_with_config}, }; #[track_caller] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs index 652dff0bc56e7..20f54b2cd19d6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs @@ -1,7 +1,7 @@ //! Extern block hints use ide_db::{famous_defs::FamousDefs, text_edit::TextEdit}; use span::EditionedFileId; -use syntax::{ast, AstNode, SyntaxToken}; +use syntax::{AstNode, SyntaxToken, ast}; use crate::{InlayHint, InlayHintsConfig}; @@ -98,7 +98,7 @@ fn item_hint( #[cfg(test)] mod tests { - use crate::inlay_hints::tests::{check_with_config, DISABLED_CONFIG}; + use crate::inlay_hints::tests::{DISABLED_CONFIG, check_with_config}; #[test] fn unadorned() { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs index 762a4c2655181..6e1b3bdbdf039 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs @@ -1,12 +1,14 @@ //! Implementation of inlay hints for generic parameters. +use either::Either; use ide_db::{active_parameter::generic_def_for_node, famous_defs::FamousDefs}; use syntax::{ - ast::{self, AnyHasGenericArgs, HasGenericArgs, HasName}, AstNode, + ast::{self, AnyHasGenericArgs, HasGenericArgs, HasName}, }; use crate::{ - inlay_hints::GenericParameterHints, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, + InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, + inlay_hints::{GenericParameterHints, param_name}, }; use super::param_name::is_argument_similar_to_param_name; @@ -62,8 +64,17 @@ pub(crate) fn hints( let param_name = param.name(sema.db); let should_hide = { - let argument = get_string_representation(&arg)?; - is_argument_similar_to_param_name(&argument, param_name.as_str()) + let param_name = param_name.as_str(); + get_segment_representation(&arg).map_or(false, |seg| match seg { + Either::Left(Either::Left(argument)) => { + is_argument_similar_to_param_name(&argument, param_name) + } + Either::Left(Either::Right(argument)) => argument + .segment() + .and_then(|it| it.name_ref()) + .is_some_and(|it| it.text().eq_ignore_ascii_case(param_name)), + Either::Right(lifetime) => lifetime.text().eq_ignore_ascii_case(param_name), + }) }; if should_hide { @@ -91,7 +102,10 @@ pub(crate) fn hints( } }; let linked_location = source_syntax.and_then(|it| sema.original_range_opt(&it)); - linked_location.map(Into::into) + linked_location.map(|frange| ide_db::FileRange { + file_id: frange.file_id.file_id(sema.db), + range: frange.range, + }) }), ); @@ -111,32 +125,34 @@ pub(crate) fn hints( Some(()) } -fn get_string_representation(arg: &ast::GenericArg) -> Option { +fn get_segment_representation( + arg: &ast::GenericArg, +) -> Option, ast::Path>, ast::Lifetime>> { return match arg { ast::GenericArg::AssocTypeArg(_) => None, - ast::GenericArg::ConstArg(const_arg) => Some(const_arg.to_string()), + ast::GenericArg::ConstArg(const_arg) => { + param_name::get_segment_representation(&const_arg.expr()?).map(Either::Left) + } ast::GenericArg::LifetimeArg(lifetime_arg) => { let lifetime = lifetime_arg.lifetime()?; - Some(lifetime.to_string()) + Some(Either::Right(lifetime)) } ast::GenericArg::TypeArg(type_arg) => { let ty = type_arg.ty()?; - Some( - type_path_segment(&ty) - .map_or_else(|| type_arg.to_string(), |segment| segment.to_string()), - ) + type_path(&ty).map(Either::Right).map(Either::Left) } }; - fn type_path_segment(ty: &ast::Type) -> Option { + fn type_path(ty: &ast::Type) -> Option { match ty { - ast::Type::ArrayType(it) => type_path_segment(&it.ty()?), - ast::Type::ForType(it) => type_path_segment(&it.ty()?), - ast::Type::ParenType(it) => type_path_segment(&it.ty()?), - ast::Type::PathType(path_type) => path_type.path()?.segment(), - ast::Type::PtrType(it) => type_path_segment(&it.ty()?), - ast::Type::RefType(it) => type_path_segment(&it.ty()?), - ast::Type::SliceType(it) => type_path_segment(&it.ty()?), + ast::Type::ArrayType(it) => type_path(&it.ty()?), + ast::Type::ForType(it) => type_path(&it.ty()?), + ast::Type::ParenType(it) => type_path(&it.ty()?), + ast::Type::PathType(path_type) => path_type.path(), + ast::Type::PtrType(it) => type_path(&it.ty()?), + ast::Type::RefType(it) => type_path(&it.ty()?), + ast::Type::SliceType(it) => type_path(&it.ty()?), + ast::Type::MacroType(macro_type) => macro_type.macro_call()?.path(), _ => None, } } @@ -145,11 +161,11 @@ fn get_string_representation(arg: &ast::GenericArg) -> Option { #[cfg(test)] mod tests { use crate::{ + InlayHintsConfig, inlay_hints::{ - tests::{check_with_config, DISABLED_CONFIG}, GenericParameterHints, + tests::{DISABLED_CONFIG, check_with_config}, }, - InlayHintsConfig, }; #[track_caller] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs index 390139d214eb0..f52e27946fff7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs @@ -6,16 +6,17 @@ //! } //! ``` use hir::{ + ChalkTyInterner, DefWithBody, db::{DefDatabase as _, HirDatabase as _}, mir::{MirSpan, TerminatorKind}, - ChalkTyInterner, DefWithBody, }; -use ide_db::{famous_defs::FamousDefs, FileRange}; +use ide_db::{FileRange, famous_defs::FamousDefs}; use span::EditionedFileId; use syntax::{ + ToSmolStr, ast::{self, AstNode}, - match_ast, ToSmolStr, + match_ast, }; use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind}; @@ -107,7 +108,7 @@ pub(super) fn hints( .and_then(|d| source_map.pat_syntax(*d).ok()) .and_then(|d| { Some(FileRange { - file_id: d.file_id.file_id()?.into(), + file_id: d.file_id.file_id()?.file_id(sema.db), range: d.value.text_range(), }) }) @@ -143,8 +144,8 @@ fn nearest_token_after_node( #[cfg(test)] mod tests { use crate::{ - inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, InlayHintsConfig, + inlay_hints::tests::{DISABLED_CONFIG, check_with_config}, }; const ONLY_DROP_CONFIG: InlayHintsConfig = diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs index ae5b519b43d00..f3be09f30a135 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs @@ -7,8 +7,8 @@ use ide_db::famous_defs::FamousDefs; use ide_db::text_edit::TextEdit; use span::EditionedFileId; use syntax::{ - ast::{self, AstNode}, SyntaxKind, + ast::{self, AstNode}, }; use crate::{InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind, LifetimeElisionHints}; @@ -56,8 +56,8 @@ pub(super) fn hints( #[cfg(test)] mod tests { use crate::{ - inlay_hints::tests::{check_with_config, TEST_CONFIG}, InlayHintsConfig, LifetimeElisionHints, + inlay_hints::tests::{TEST_CONFIG, check_with_config}, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs index 1fdd698991710..baba49a427d19 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs @@ -4,18 +4,18 @@ //! ``` use std::iter; -use ide_db::{famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty, FxHashMap}; +use ide_db::{FxHashMap, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty}; use itertools::Itertools; use span::EditionedFileId; +use syntax::{SmolStr, format_smolstr}; use syntax::{ - ast::{self, AstNode, HasGenericParams, HasName}, SyntaxKind, SyntaxToken, + ast::{self, AstNode, HasGenericParams, HasName}, }; -use syntax::{format_smolstr, SmolStr}; use crate::{ - inlay_hints::InlayHintCtx, InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind, - LifetimeElisionHints, + InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind, LifetimeElisionHints, + inlay_hints::InlayHintCtx, }; pub(super) fn fn_hints( @@ -268,13 +268,14 @@ fn hints_( ctx.lifetime_stacks.iter().flat_map(|it| it.iter()).cloned().zip(iter::repeat(0)).collect(); // allocate names let mut gen_idx_name = { - let mut gen = (0u8..).map(|idx| match idx { + let mut generic = (0u8..).map(|idx| match idx { idx if idx < 10 => SmolStr::from_iter(['\'', (idx + 48) as char]), idx => format_smolstr!("'{idx}"), }); let ctx = &*ctx; move || { - gen.by_ref() + generic + .by_ref() .find(|s| ctx.lifetime_stacks.iter().flat_map(|it| it.iter()).all(|n| n != s)) .unwrap_or_default() } @@ -406,8 +407,8 @@ fn hints_( #[cfg(test)] mod tests { use crate::{ - inlay_hints::tests::{check, check_with_config, TEST_CONFIG}, InlayHintsConfig, LifetimeElisionHints, + inlay_hints::tests::{TEST_CONFIG, check, check_with_config}, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs index 8f01b1bd38b50..5ff9fee60abfa 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs @@ -4,16 +4,14 @@ //! _ = max(/*x*/4, /*y*/4); //! ``` +use std::iter::zip; + use either::Either; -use hir::{Callable, Semantics}; -use ide_db::{famous_defs::FamousDefs, RootDatabase}; +use hir::Semantics; +use ide_db::{RootDatabase, famous_defs::FamousDefs}; -use span::EditionedFileId; use stdx::to_lower_snake_case; -use syntax::{ - ast::{self, AstNode, HasArgList, HasName, UnaryOp}, - ToSmolStr, -}; +use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp}; use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind}; @@ -21,7 +19,6 @@ pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, krate): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - _file_id: EditionedFileId, expr: ast::Expr, ) -> Option<()> { if !config.parameter_hints { @@ -29,6 +26,12 @@ pub(super) fn hints( } let (callable, arg_list) = get_callable(sema, &expr)?; + let unary_function = callable.n_params() == 1; + let function_name = match callable.kind() { + hir::CallableKind::Function(function) => Some(function.name(sema.db)), + _ => None, + }; + let function_name = function_name.as_ref().map(|it| it.as_str()); let hints = callable .params() .into_iter() @@ -40,7 +43,13 @@ pub(super) fn hints( Some((p, param_name, arg, range)) }) .filter(|(_, param_name, arg, _)| { - !should_hide_param_name_hint(sema, &callable, param_name.as_str(), arg) + !should_hide_param_name_hint( + sema, + unary_function, + function_name, + param_name.as_str(), + arg, + ) }) .map(|(param, param_name, _, hir::FileRange { range, .. })| { let colon = if config.render_colons { ":" } else { "" }; @@ -56,7 +65,10 @@ pub(super) fn hints( _ => None, }, }?; - sema.original_range_opt(name_syntax.syntax()).map(Into::into) + sema.original_range_opt(name_syntax.syntax()).map(|frange| ide_db::FileRange { + file_id: frange.file_id.file_id(sema.db), + range: frange.range, + }) }), ); InlayHint { @@ -94,9 +106,13 @@ fn get_callable( } } +const INSIGNIFICANT_METHOD_NAMES: &[&str] = &["clone", "as_ref", "into"]; +const INSIGNIFICANT_PARAMETER_NAMES: &[&str] = &["predicate", "value", "pat", "rhs", "other"]; + fn should_hide_param_name_hint( sema: &Semantics<'_, RootDatabase>, - callable: &hir::Callable, + unary_function: bool, + function_name: Option<&str>, param_name: &str, argument: &ast::Expr, ) -> bool { @@ -114,95 +130,128 @@ fn should_hide_param_name_hint( return true; } - if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) { - return false; + if param_name.starts_with("ra_fixture") { + return true; } - let fn_name = match callable.kind() { - hir::CallableKind::Function(it) => Some(it.name(sema.db).as_str().to_smolstr()), - _ => None, - }; - let fn_name = fn_name.as_deref(); - is_param_name_suffix_of_fn_name(param_name, callable, fn_name) - || is_argument_expr_similar_to_param_name(argument, param_name) - || param_name.starts_with("ra_fixture") - || (callable.n_params() == 1 && is_obvious_param(param_name)) - || is_adt_constructor_similar_to_param_name(sema, argument, param_name) + if unary_function { + if let Some(function_name) = function_name { + if is_param_name_suffix_of_fn_name(param_name, function_name) { + return true; + } + } + if is_obvious_param(param_name) { + return true; + } + } + + is_argument_expr_similar_to_param_name(sema, argument, param_name) } /// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal. /// /// `fn strip_suffix(suffix)` will be hidden. /// `fn stripsuffix(suffix)` will not be hidden. -fn is_param_name_suffix_of_fn_name( +fn is_param_name_suffix_of_fn_name(param_name: &str, fn_name: &str) -> bool { + fn_name == param_name + || fn_name + .len() + .checked_sub(param_name.len()) + .and_then(|at| fn_name.is_char_boundary(at).then(|| fn_name.split_at(at))) + .is_some_and(|(prefix, suffix)| { + suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_') + }) +} + +fn is_argument_expr_similar_to_param_name( + sema: &Semantics<'_, RootDatabase>, + argument: &ast::Expr, param_name: &str, - callable: &Callable, - fn_name: Option<&str>, ) -> bool { - match (callable.n_params(), fn_name) { - (1, Some(function)) => { - function == param_name - || function - .len() - .checked_sub(param_name.len()) - .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at))) - .is_some_and(|(prefix, suffix)| { - suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_') - }) + match get_segment_representation(argument) { + Some(Either::Left(argument)) => is_argument_similar_to_param_name(&argument, param_name), + Some(Either::Right(path)) => { + path.segment() + .and_then(|it| it.name_ref()) + .is_some_and(|name_ref| name_ref.text().eq_ignore_ascii_case(param_name)) + || is_adt_constructor_similar_to_param_name(sema, &path, param_name) } - _ => false, + None => false, } } -fn is_argument_expr_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool { - let argument = match get_string_representation(argument) { - Some(argument) => argument, - None => return false, - }; - is_argument_similar_to_param_name(&argument, param_name) -} - /// Check whether param_name and argument are the same or /// whether param_name is a prefix/suffix of argument(split at `_`). -pub(super) fn is_argument_similar_to_param_name(argument: &str, param_name: &str) -> bool { - // std is honestly too panic happy... - let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at)); - - let param_name = param_name.trim_start_matches('_'); - let argument = argument.trim_start_matches('_'); - - match str_split_at(argument, param_name.len()) { - Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => { - return rest.is_empty() || rest.starts_with('_'); - } - _ => (), - } - match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) { - Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => { - return rest.is_empty() || rest.ends_with('_'); - } - _ => (), - } - false +pub(super) fn is_argument_similar_to_param_name( + argument: &[ast::NameRef], + param_name: &str, +) -> bool { + debug_assert!(!argument.is_empty()); + debug_assert!(!param_name.is_empty()); + let param_name = param_name.split('_'); + let argument = argument.iter().flat_map(|it| it.text_non_mutable().split('_')); + + let prefix_match = zip(argument.clone(), param_name.clone()) + .all(|(arg, param)| arg.eq_ignore_ascii_case(param)); + let postfix_match = || { + zip(argument.rev(), param_name.rev()).all(|(arg, param)| arg.eq_ignore_ascii_case(param)) + }; + prefix_match || postfix_match() } -fn get_string_representation(expr: &ast::Expr) -> Option { +pub(super) fn get_segment_representation( + expr: &ast::Expr, +) -> Option, ast::Path>> { match expr { ast::Expr::MethodCallExpr(method_call_expr) => { + let receiver = + method_call_expr.receiver().and_then(|expr| get_segment_representation(&expr)); let name_ref = method_call_expr.name_ref()?; - match name_ref.text().as_str() { - "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()), - name_ref => Some(name_ref.to_owned()), + if INSIGNIFICANT_METHOD_NAMES.contains(&name_ref.text().as_str()) { + return receiver; } + Some(Either::Left(match receiver { + Some(Either::Left(mut left)) => { + left.push(name_ref); + left + } + Some(Either::Right(_)) | None => vec![name_ref], + })) + } + ast::Expr::FieldExpr(field_expr) => { + let expr = field_expr.expr().and_then(|expr| get_segment_representation(&expr)); + let name_ref = field_expr.name_ref()?; + let res = match expr { + Some(Either::Left(mut left)) => { + left.push(name_ref); + left + } + Some(Either::Right(_)) | None => vec![name_ref], + }; + Some(Either::Left(res)) } - ast::Expr::MacroExpr(macro_expr) => { - Some(macro_expr.macro_call()?.path()?.segment()?.to_string()) + // paths + ast::Expr::MacroExpr(macro_expr) => macro_expr.macro_call()?.path().map(Either::Right), + ast::Expr::RecordExpr(record_expr) => record_expr.path().map(Either::Right), + ast::Expr::PathExpr(path_expr) => { + let path = path_expr.path()?; + // single segment paths are likely locals + Some(match path.as_single_name_ref() { + None => Either::Right(path), + Some(name_ref) => Either::Left(vec![name_ref]), + }) } - ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()), - ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()), - ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?), - ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?), - ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?), + ast::Expr::PrefixExpr(prefix_expr) if prefix_expr.op_kind() == Some(UnaryOp::Not) => None, + // recurse + ast::Expr::PrefixExpr(prefix_expr) => get_segment_representation(&prefix_expr.expr()?), + ast::Expr::RefExpr(ref_expr) => get_segment_representation(&ref_expr.expr()?), + ast::Expr::CastExpr(cast_expr) => get_segment_representation(&cast_expr.expr()?), + ast::Expr::CallExpr(call_expr) => get_segment_representation(&call_expr.expr()?), + ast::Expr::AwaitExpr(await_expr) => get_segment_representation(&await_expr.expr()?), + ast::Expr::IndexExpr(index_expr) => get_segment_representation(&index_expr.base()?), + ast::Expr::ParenExpr(paren_expr) => get_segment_representation(&paren_expr.expr()?), + ast::Expr::TryExpr(try_expr) => get_segment_representation(&try_expr.expr()?), + // ast::Expr::ClosureExpr(closure_expr) => todo!(), _ => None, } } @@ -210,30 +259,15 @@ fn get_string_representation(expr: &ast::Expr) -> Option { fn is_obvious_param(param_name: &str) -> bool { // avoid displaying hints for common functions like map, filter, etc. // or other obvious words used in std - let is_obvious_param_name = - matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other"); - param_name.len() == 1 || is_obvious_param_name + param_name.len() == 1 || INSIGNIFICANT_PARAMETER_NAMES.contains(¶m_name) } fn is_adt_constructor_similar_to_param_name( sema: &Semantics<'_, RootDatabase>, - argument: &ast::Expr, + path: &ast::Path, param_name: &str, ) -> bool { - let path = match argument { - ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e { - ast::Expr::PathExpr(p) => p.path(), - _ => None, - }), - ast::Expr::PathExpr(p) => p.path(), - ast::Expr::RecordExpr(r) => r.path(), - _ => return false, - }; - let path = match path { - Some(it) => it, - None => return false, - }; - (|| match sema.resolve_path(&path)? { + (|| match sema.resolve_path(path)? { hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => { Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name) } @@ -257,8 +291,8 @@ fn is_adt_constructor_similar_to_param_name( #[cfg(test)] mod tests { use crate::{ - inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, InlayHintsConfig, + inlay_hints::tests::{DISABLED_CONFIG, check_with_config}, }; #[track_caller] @@ -501,6 +535,7 @@ fn enum_matches_param_name(completion_kind: CompletionKind) {} fn foo(param: u32) {} fn bar(param_eter: u32) {} +fn baz(a_d_e: u32) {} enum CompletionKind { Keyword, @@ -553,6 +588,14 @@ fn main() { //^^^^^^^^^^^ param_eter non_ident_pat((0, 0)); + + baz(a.d.e); + baz(a.dc.e); + // ^^^^^^ a_d_e + baz(ac.d.e); + // ^^^^^^ a_d_e + baz(a.d.ec); + // ^^^^^^ a_d_e }"#, ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs index de9b0e98a4beb..d67d84588402e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs @@ -5,7 +5,7 @@ //! ``` use ide_db::famous_defs::FamousDefs; use span::EditionedFileId; -use syntax::{ast, SyntaxToken, T}; +use syntax::{SyntaxToken, T, ast}; use crate::{InlayHint, InlayHintsConfig}; @@ -41,8 +41,8 @@ fn inlay_hint(token: SyntaxToken) -> InlayHint { #[cfg(test)] mod tests { use crate::{ - inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, InlayHintsConfig, + inlay_hints::tests::{DISABLED_CONFIG, check_with_config}, }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret.rs b/src/tools/rust-analyzer/crates/ide/src/interpret.rs index 74dad488b4d30..8f9d2d6bf111b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/interpret.rs +++ b/src/tools/rust-analyzer/crates/ide/src/interpret.rs @@ -1,8 +1,8 @@ use hir::{ConstEvalError, DefWithBody, DisplayTarget, Semantics}; -use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase}; +use ide_db::{FilePosition, LineIndexDatabase, RootDatabase, base_db::SourceDatabase}; use std::time::{Duration, Instant}; use stdx::format_to; -use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange}; +use syntax::{AstNode, TextRange, algo::ancestors_at_offset, ast}; // Feature: Interpret A Function, Static Or Const. // @@ -35,10 +35,10 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<(Dura _ => return None, }; let span_formatter = |file_id, text_range: TextRange| { - let path = &db - .source_root(db.file_source_root(file_id)) - .path_for_file(&file_id) - .map(|x| x.to_string()); + let source_root = db.file_source_root(file_id).source_root_id(db); + let source_root = db.source_root(source_root).source_root(db); + + let path = source_root.path_for_file(&file_id).map(|x| x.to_string()); let path = path.as_deref().unwrap_or(""); match db.line_index(file_id).try_line_col(text_range.start()) { Some(line_col) => format!("file://{path}:{}:{}", line_col.line + 1, line_col.col), @@ -64,10 +64,9 @@ pub(crate) fn render_const_eval_error( display_target: DisplayTarget, ) -> String { let span_formatter = |file_id, text_range: TextRange| { - let path = &db - .source_root(db.file_source_root(file_id)) - .path_for_file(&file_id) - .map(|x| x.to_string()); + let source_root = db.file_source_root(file_id).source_root_id(db); + let source_root = db.source_root(source_root).source_root(db); + let path = source_root.path_for_file(&file_id).map(|x| x.to_string()); let path = path.as_deref().unwrap_or(""); match db.line_index(file_id).try_line_col(text_range.start()) { Some(line_col) => format!("file://{path}:{}:{}", line_col.line + 1, line_col.col), diff --git a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs index ea18a97070c3a..0188c105faa78 100644 --- a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs +++ b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs @@ -2,10 +2,10 @@ use ide_assists::utils::extract_trivial_expression; use ide_db::syntax_helpers::node_ext::expr_as_name_ref; use itertools::Itertools; use syntax::{ - ast::{self, AstNode, AstToken, IsString}, NodeOrToken, SourceFile, SyntaxElement, SyntaxKind::{self, USE_TREE, WHITESPACE}, - SyntaxToken, TextRange, TextSize, T, + SyntaxToken, T, TextRange, TextSize, + ast::{self, AstNode, AstToken, IsString}, }; use ide_db::text_edit::{TextEdit, TextEditBuilder}; diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index 8ac1a96cc6524..aa525a86123dc 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -20,6 +20,7 @@ mod navigation_target; mod annotations; mod call_hierarchy; +mod child_modules; mod doc_links; mod expand_macro; mod extend_selection; @@ -57,23 +58,22 @@ mod view_memory_layout; mod view_mir; mod view_syntax_tree; -use std::{iter, panic::UnwindSafe}; +use std::panic::{AssertUnwindSafe, UnwindSafe}; use cfg::CfgOptions; use fetch_crates::CrateInfo; -use hir::{sym, ChangeWithProcMacros}; +use hir::{ChangeWithProcMacros, EditionedFileId, sym}; use ide_db::{ + FxHashMap, FxIndexSet, LineIndexDatabase, base_db::{ - ra_salsa::{self, ParallelDatabase}, - CrateOrigin, CrateWorkspaceData, Env, FileLoader, FileSet, SourceDatabase, - SourceRootDatabase, VfsPath, + CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath, + salsa::Cancelled, }, - prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase, + prime_caches, symbol_index, }; -use span::EditionedFileId; use syntax::SourceFile; use triomphe::Arc; -use view_memory_layout::{view_memory_layout, RecursiveMemoryLayout}; +use view_memory_layout::{RecursiveMemoryLayout, view_memory_layout}; use crate::navigation_target::ToNav; @@ -110,8 +110,8 @@ pub use crate::{ StaticIndex, StaticIndexedFile, TokenId, TokenStaticData, VendoredLibrariesConfig, }, syntax_highlighting::{ - tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, HighlightConfig, HlRange, + tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, }, test_explorer::{TestItem, TestItemKind}, }; @@ -123,9 +123,10 @@ pub use ide_completion::{ CallableSnippets, CompletionConfig, CompletionFieldsToResolve, CompletionItem, CompletionItemKind, CompletionItemRefMode, CompletionRelevance, Snippet, SnippetScope, }; -pub use ide_db::text_edit::{Indel, TextEdit}; pub use ide_db::{ - base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId}, + FileId, FilePosition, FileRange, RootDatabase, Severity, SymbolKind, + assists::ExprFillDefaultMode, + base_db::{Crate, CrateGraphBuilder, FileChange, SourceRoot, SourceRootId}, documentation::Documentation, label::Label, line_index::{LineCol, LineIndex}, @@ -133,9 +134,9 @@ pub use ide_db::{ search::{ReferenceCategory, SearchScope}, source_change::{FileSystemEdit, SnippetEdit, SourceChange}, symbol_index::Query, - FileId, FilePosition, FileRange, RootDatabase, Severity, SymbolKind, + text_edit::{Indel, TextEdit}, }; -pub use ide_diagnostics::{Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode}; +pub use ide_diagnostics::{Diagnostic, DiagnosticCode, DiagnosticsConfig}; pub use ide_ssr::SsrError; pub use span::Edition; pub use syntax::{TextRange, TextSize}; @@ -181,7 +182,7 @@ impl AnalysisHost { /// Returns a snapshot of the current state, which you can query for /// semantic information. pub fn analysis(&self) -> Analysis { - Analysis { db: self.db.snapshot() } + Analysis { db: self.db.clone() } } /// Applies changes to the current state of the world. If there are @@ -217,7 +218,7 @@ impl Default for AnalysisHost { /// `Analysis` are canceled (most method return `Err(Canceled)`). #[derive(Debug)] pub struct Analysis { - db: ra_salsa::Snapshot, + db: RootDatabase, } // As a general design guideline, `Analysis` API are intended to be independent @@ -237,34 +238,37 @@ impl Analysis { file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_owned())); let source_root = SourceRoot::new_local(file_set); - let mut change = ChangeWithProcMacros::new(); + let mut change = ChangeWithProcMacros::default(); change.set_roots(vec![source_root]); - let mut crate_graph = CrateGraph::default(); + let mut crate_graph = CrateGraphBuilder::default(); // FIXME: cfg options // Default to enable test for single file. let mut cfg_options = CfgOptions::default(); - cfg_options.insert_atom(sym::test.clone()); + + // FIXME: This is less than ideal + let proc_macro_cwd = Arc::new( + TryFrom::try_from(&*std::env::current_dir().unwrap().as_path().to_string_lossy()) + .unwrap(), + ); + cfg_options.insert_atom(sym::test); crate_graph.add_crate_root( file_id, Edition::CURRENT, None, None, - Arc::new(cfg_options), + cfg_options, None, Env::default(), CrateOrigin::Local { repo: None, name: None }, false, - None, - ); - change.change_file(file_id, Some(text)); - let ws_data = crate_graph - .iter() - .zip(iter::repeat(Arc::new(CrateWorkspaceData { + proc_macro_cwd, + Arc::new(CrateWorkspaceData { data_layout: Err("fixture has no layout".into()), toolchain: None, - }))) - .collect(); - change.set_crate_graph(crate_graph, ws_data); + }), + ); + change.change_file(file_id, Some(text)); + change.set_crate_graph(crate_graph); host.apply_change(change); (host.analysis(), file_id) @@ -276,12 +280,12 @@ impl Analysis { } pub fn source_root_id(&self, file_id: FileId) -> Cancellable { - self.with_db(|db| db.file_source_root(file_id)) + self.with_db(|db| db.file_source_root(file_id).source_root_id(db)) } pub fn is_local_source_root(&self, source_root_id: SourceRootId) -> Cancellable { self.with_db(|db| { - let sr = db.source_root(source_root_id); + let sr = db.source_root(source_root_id).source_root(db); !sr.is_library }) } @@ -295,18 +299,25 @@ impl Analysis { /// Gets the text of the source file. pub fn file_text(&self, file_id: FileId) -> Cancellable> { - self.with_db(|db| SourceDatabase::file_text(db, file_id)) + self.with_db(|db| SourceDatabase::file_text(db, file_id).text(db)) } /// Gets the syntax tree of the file. pub fn parse(&self, file_id: FileId) -> Cancellable { // FIXME edition - self.with_db(|db| db.parse(EditionedFileId::current_edition(file_id)).tree()) + self.with_db(|db| { + let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); + + db.parse(editioned_file_id_wrapper).tree() + }) } /// Returns true if this file belongs to an immutable library. pub fn is_library_file(&self, file_id: FileId) -> Cancellable { - self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library) + self.with_db(|db| { + let source_root = db.file_source_root(file_id).source_root_id(db); + db.source_root(source_root).source_root(db).is_library + }) } /// Gets the file's `LineIndex`: data structure to convert between absolute @@ -324,7 +335,8 @@ impl Analysis { /// supported). pub fn matching_brace(&self, position: FilePosition) -> Cancellable> { self.with_db(|db| { - let parse = db.parse(EditionedFileId::current_edition(position.file_id)); + let file_id = EditionedFileId::current_edition(&self.db, position.file_id); + let parse = db.parse(file_id); let file = parse.tree(); matching_brace::matching_brace(&file, position.offset) }) @@ -358,7 +370,7 @@ impl Analysis { self.with_db(|db| test_explorer::discover_tests_in_crate_by_test_id(db, crate_id)) } - pub fn discover_tests_in_crate(&self, crate_id: CrateId) -> Cancellable> { + pub fn discover_tests_in_crate(&self, crate_id: Crate) -> Cancellable> { self.with_db(|db| test_explorer::discover_tests_in_crate(db, crate_id)) } @@ -383,7 +395,9 @@ impl Analysis { /// stuff like trailing commas. pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable { self.with_db(|db| { - let parse = db.parse(EditionedFileId::current_edition(frange.file_id)); + let editioned_file_id_wrapper = + EditionedFileId::current_edition(&self.db, frange.file_id); + let parse = db.parse(editioned_file_id_wrapper); join_lines::join_lines(config, &parse.tree(), frange.range) }) } @@ -419,9 +433,9 @@ impl Analysis { pub fn file_structure(&self, file_id: FileId) -> Cancellable> { // FIXME: Edition self.with_db(|db| { - file_structure::file_structure( - &db.parse(EditionedFileId::current_edition(file_id)).tree(), - ) + let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); + + file_structure::file_structure(&db.parse(editioned_file_id_wrapper).tree()) }) } @@ -450,9 +464,9 @@ impl Analysis { /// Returns the set of folding ranges. pub fn folding_ranges(&self, file_id: FileId) -> Cancellable> { self.with_db(|db| { - folding_ranges::folding_ranges( - &db.parse(EditionedFileId::current_edition(file_id)).tree(), - ) + let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); + + folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree()) }) } @@ -506,7 +520,11 @@ impl Analysis { position: FilePosition, search_scope: Option, ) -> Cancellable>> { - self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope)) + let search_scope = AssertUnwindSafe(search_scope); + self.with_db(|db| { + let _ = &search_scope; + references::find_all_refs(&Semantics::new(db), position, search_scope.0) + }) } /// Returns a short text describing element at position. @@ -577,34 +595,44 @@ impl Analysis { self.with_db(|db| parent_module::parent_module(db, position)) } + /// Returns vec of `mod name;` declaration which are created by the current module. + pub fn child_modules(&self, position: FilePosition) -> Cancellable> { + self.with_db(|db| child_modules::child_modules(db, position)) + } + /// Returns crates that this file belongs to. - pub fn crates_for(&self, file_id: FileId) -> Cancellable> { + pub fn crates_for(&self, file_id: FileId) -> Cancellable> { self.with_db(|db| parent_module::crates_for(db, file_id)) } /// Returns crates that this file belongs to. - pub fn transitive_rev_deps(&self, crate_id: CrateId) -> Cancellable> { - self.with_db(|db| db.crate_graph().transitive_rev_deps(crate_id).collect()) + pub fn transitive_rev_deps(&self, crate_id: Crate) -> Cancellable> { + self.with_db(|db| Vec::from_iter(db.transitive_rev_deps(crate_id))) } /// Returns crates that this file *might* belong to. - pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable> { + pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable> { self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect()) } /// Returns the edition of the given crate. - pub fn crate_edition(&self, crate_id: CrateId) -> Cancellable { - self.with_db(|db| db.crate_graph()[crate_id].edition) + pub fn crate_edition(&self, crate_id: Crate) -> Cancellable { + self.with_db(|db| crate_id.data(db).edition) + } + + /// Returns whether the given crate is a proc macro. + pub fn is_proc_macro_crate(&self, crate_id: Crate) -> Cancellable { + self.with_db(|db| crate_id.data(db).is_proc_macro) } /// Returns true if this crate has `no_std` or `no_core` specified. - pub fn is_crate_no_std(&self, crate_id: CrateId) -> Cancellable { + pub fn is_crate_no_std(&self, crate_id: Crate) -> Cancellable { self.with_db(|db| hir::db::DefDatabase::crate_def_map(db, crate_id).is_no_std()) } /// Returns the root file of the given crate. - pub fn crate_root(&self, crate_id: CrateId) -> Cancellable { - self.with_db(|db| db.crate_graph()[crate_id].root_file_id) + pub fn crate_root(&self, crate_id: Crate) -> Cancellable { + self.with_db(|db| crate_id.data(db).root_file_id) } /// Returns the set of possible targets to run for the current file. @@ -618,7 +646,11 @@ impl Analysis { position: FilePosition, search_scope: Option, ) -> Cancellable> { - self.with_db(|db| runnables::related_tests(db, position, search_scope)) + let search_scope = AssertUnwindSafe(search_scope); + self.with_db(|db| { + let _ = &search_scope; + runnables::related_tests(db, position, search_scope.0) + }) } /// Computes syntax highlighting for the given file @@ -717,7 +749,7 @@ impl Analysis { frange: FileRange, ) -> Cancellable> { let include_fixes = match &assist_config.allowed { - Some(it) => it.iter().any(|&it| it == AssistKind::None || it == AssistKind::QuickFix), + Some(it) => it.contains(&AssistKind::QuickFix), None => true, }; @@ -811,6 +843,10 @@ impl Analysis { self.with_db(|db| view_memory_layout(db, position)) } + pub fn editioned_file_id_to_vfs(&self, file_id: hir::EditionedFileId) -> FileId { + file_id.file_id(&self.db) + } + /// Performs an operation on the database that may be canceled. /// /// rust-analyzer needs to be able to answer semantic questions about the @@ -828,7 +864,8 @@ impl Analysis { where F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, { - Cancelled::catch(|| f(&self.db)) + let snap = self.db.clone(); + Cancelled::catch(|| f(&snap)) } } diff --git a/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs index 67346ea9cf90f..b2b91d6e3cf34 100644 --- a/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs +++ b/src/tools/rust-analyzer/crates/ide/src/matching_brace.rs @@ -1,6 +1,6 @@ use syntax::{ + SourceFile, SyntaxKind, T, TextSize, ast::{self, AstNode}, - SourceFile, SyntaxKind, TextSize, T, }; // Feature: Matching Brace diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs index 5754b4fa82f43..795c1f2ca3c0b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs +++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs @@ -5,15 +5,15 @@ use core::fmt; use hir::{Adt, AsAssocItem, Crate, HirDisplay, MacroKind, Semantics}; use ide_db::{ + FilePosition, RootDatabase, base_db::{CrateOrigin, LangCrateOrigin}, defs::{Definition, IdentClass}, helpers::pick_best_token, - FilePosition, RootDatabase, }; use itertools::Itertools; use syntax::{AstNode, SyntaxKind::*, T}; -use crate::{doc_links::token_as_doc_comment, parent_module::crates_for, RangeInfo}; +use crate::{RangeInfo, doc_links::token_as_doc_comment, parent_module::crates_for}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum MonikerDescriptorKind { @@ -194,11 +194,7 @@ pub(crate) fn def_to_kind(db: &RootDatabase, def: Definition) -> SymbolInformati Definition::Function(it) => { if it.as_assoc_item(db).is_some() { if it.has_self_param(db) { - if it.has_body(db) { - Method - } else { - TraitMethod - } + if it.has_body(db) { Method } else { TraitMethod } } else { StaticMethod } @@ -405,7 +401,7 @@ fn display(db: &RootDatabase, module: hir::Module, it: T) -> Stri #[cfg(test)] mod tests { - use crate::{fixture, MonikerResult}; + use crate::{MonikerResult, fixture}; use super::MonikerKind; @@ -455,7 +451,7 @@ mod tests { assert_eq!(x.len(), 1); match x.into_iter().next().unwrap() { MonikerResult::Local { enclosing_moniker } => { - panic!("Unexpected local enclosed in {:?}", enclosing_moniker); + panic!("Unexpected local enclosed in {enclosing_moniker:?}"); } MonikerResult::Moniker(x) => { assert_eq!(identifier, x.identifier.to_string()); diff --git a/src/tools/rust-analyzer/crates/ide/src/move_item.rs b/src/tools/rust-analyzer/crates/ide/src/move_item.rs index 3fb3a788b9182..f3bb3df1cd8d7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/move_item.rs +++ b/src/tools/rust-analyzer/crates/ide/src/move_item.rs @@ -3,9 +3,9 @@ use std::{iter::once, mem}; use hir::Semantics; use ide_db::syntax_helpers::tree_diff::diff; use ide_db::text_edit::{TextEdit, TextEditBuilder}; -use ide_db::{helpers::pick_best_token, FileRange, RootDatabase}; +use ide_db::{FileRange, RootDatabase, helpers::pick_best_token}; use itertools::Itertools; -use syntax::{ast, match_ast, AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange}; +use syntax::{AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, ast, match_ast}; #[derive(Copy, Clone, Debug)] pub enum Direction { @@ -174,7 +174,7 @@ fn replace_nodes<'a>( #[cfg(test)] mod tests { use crate::fixture; - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use crate::Direction; diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index d67aaac06fb95..9334b73fc7b4f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -5,19 +5,20 @@ use std::fmt; use arrayvec::ArrayVec; use either::Either; use hir::{ - db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasCrate, - HasSource, HirDisplay, HirFileId, InFile, LocalSource, ModuleSource, + AssocItem, FieldSource, HasContainer, HasCrate, HasSource, HirDisplay, HirFileId, InFile, + LocalSource, ModuleSource, db::ExpandDatabase, symbols::FileSymbol, }; use ide_db::{ + FileId, FileRange, RootDatabase, SymbolKind, defs::Definition, documentation::{Documentation, HasDocs}, - FileId, FileRange, RootDatabase, SymbolKind, }; use span::Edition; use stdx::never; use syntax::{ + AstNode, SmolStr, SyntaxNode, TextRange, ToSmolStr, ast::{self, HasName}, - format_smolstr, AstNode, SmolStr, SyntaxNode, TextRange, ToSmolStr, + format_smolstr, }; /// `NavigationTarget` represents an element in the editor's UI which you can @@ -816,14 +817,10 @@ pub(crate) fn orig_range_with_focus_r( ) -> UpmappingResult<(FileRange, Option)> { let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) }; - let call_kind = - || db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id).kind; + let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).kind; - let def_range = || { - db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) - .def - .definition_range(db) - }; + let def_range = + || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).def.definition_range(db); // FIXME: Also make use of the syntax context to determine which site we are at? let value_range = InFile::new(hir_file, value).original_node_file_range_opt(db); @@ -900,7 +897,7 @@ pub(crate) fn orig_range_with_focus_r( UpmappingResult { call_site: ( - call_site_range.into(), + call_site_range.into_file_id(db), call_site_focus.and_then(|hir::FileRange { file_id, range }| { if call_site_range.file_id == file_id && call_site_range.range.contains_range(range) { @@ -912,7 +909,7 @@ pub(crate) fn orig_range_with_focus_r( ), def_site: def_site.map(|(def_site_range, def_site_focus)| { ( - def_site_range.into(), + def_site_range.into_file_id(db), def_site_focus.and_then(|hir::FileRange { file_id, range }| { if def_site_range.file_id == file_id && def_site_range.range.contains_range(range) @@ -933,7 +930,10 @@ fn orig_range( value: &SyntaxNode, ) -> UpmappingResult<(FileRange, Option)> { UpmappingResult { - call_site: (InFile::new(hir_file, value).original_file_range_rooted(db).into(), None), + call_site: ( + InFile::new(hir_file, value).original_file_range_rooted(db).into_file_id(db), + None, + ), def_site: None, } } @@ -944,7 +944,10 @@ fn orig_range_r( value: TextRange, ) -> UpmappingResult<(FileRange, Option)> { UpmappingResult { - call_site: (InFile::new(hir_file, value).original_node_file_range(db).0.into(), None), + call_site: ( + InFile::new(hir_file, value).original_node_file_range(db).0.into_file_id(db), + None, + ), def_site: None, } } @@ -953,7 +956,7 @@ fn orig_range_r( mod tests { use expect_test::expect; - use crate::{fixture, Query}; + use crate::{Query, fixture}; #[test] fn test_nav_for_symbol() { diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs index 6d82f9b0634b4..6dc01c4506336 100644 --- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs +++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs @@ -1,7 +1,7 @@ -use hir::{db::DefDatabase, Semantics}; +use hir::{Semantics, db::DefDatabase}; use ide_db::{ - base_db::{CrateId, FileLoader}, FileId, FilePosition, RootDatabase, + base_db::{Crate, RootQueryDb}, }; use itertools::Itertools; use syntax::{ @@ -53,11 +53,13 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec Vec { +pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec { db.relevant_crates(file_id) .iter() .copied() - .filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some()) + .filter(|&crate_id| { + db.crate_def_map(crate_id).modules_for_file(db, file_id).next().is_some() + }) .sorted() .collect() } diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 069818d50e76a..4fa116444b7ff 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -11,21 +11,22 @@ use hir::{PathResolution, Semantics}; use ide_db::{ + FileId, RootDatabase, defs::{Definition, NameClass, NameRefClass}, search::{ReferenceCategory, SearchScope, UsageSearchResult}, - FileId, RootDatabase, }; use itertools::Itertools; use nohash_hasher::IntMap; use span::Edition; use syntax::{ - ast::{self, HasName}, - match_ast, AstNode, + AstNode, SyntaxKind::*, - SyntaxNode, TextRange, TextSize, T, + SyntaxNode, T, TextRange, TextSize, + ast::{self, HasName}, + match_ast, }; -use crate::{highlight_related, FilePosition, HighlightedRange, NavigationTarget, TryToNav}; +use crate::{FilePosition, HighlightedRange, NavigationTarget, TryToNav, highlight_related}; #[derive(Debug, Clone)] pub struct ReferenceSearchResult { @@ -67,7 +68,7 @@ pub(crate) fn find_all_refs( .into_iter() .map(|(file_id, refs)| { ( - file_id.into(), + file_id.file_id(sema.db), refs.into_iter() .map(|file_ref| (file_ref.range, file_ref.category)) .unique() @@ -123,11 +124,11 @@ pub(crate) fn find_all_refs( } } -pub(crate) fn find_defs<'a>( - sema: &'a Semantics<'_, RootDatabase>, +pub(crate) fn find_defs( + sema: &Semantics<'_, RootDatabase>, syntax: &SyntaxNode, offset: TextSize, -) -> Option + 'a> { +) -> Option> { let token = syntax.token_at_offset(offset).find(|t| { matches!( t.kind(), @@ -306,8 +307,10 @@ fn handle_control_flow_keywords( FilePosition { file_id, offset }: FilePosition, ) -> Option { let file = sema.parse_guess_edition(file_id); - let edition = - sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); + let edition = sema + .attach_first_edition(file_id) + .map(|it| it.edition(sema.db)) + .unwrap_or(Edition::CURRENT); let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword(edition))?; let references = match token.kind() { @@ -327,7 +330,7 @@ fn handle_control_flow_keywords( .into_iter() .map(|HighlightedRange { range, category }| (range, category)) .collect(); - (file_id.into(), ranges) + (file_id.file_id(sema.db), ranges) }) .collect(); @@ -336,12 +339,12 @@ fn handle_control_flow_keywords( #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; - use ide_db::FileId; - use span::EditionedFileId; + use expect_test::{Expect, expect}; + use hir::EditionedFileId; + use ide_db::{FileId, RootDatabase}; use stdx::format_to; - use crate::{fixture, SearchScope}; + use crate::{SearchScope, fixture}; #[test] fn exclude_tests() { @@ -1003,7 +1006,9 @@ pub(super) struct Foo$0 { check_with_scope( code, - Some(SearchScope::single_file(EditionedFileId::current_edition(FileId::from_raw(2)))), + Some(&mut |db| { + SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2))) + }), expect![[r#" quux Function FileId(0) 19..35 26..30 @@ -1259,11 +1264,12 @@ impl Foo { fn check_with_scope( #[rust_analyzer::rust_fixture] ra_fixture: &str, - search_scope: Option, + search_scope: Option<&mut dyn FnMut(&RootDatabase) -> SearchScope>, expect: Expect, ) { let (analysis, pos) = fixture::position(ra_fixture); - let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap(); + let refs = + analysis.find_all_refs(pos, search_scope.map(|it| it(&analysis.db))).unwrap().unwrap(); let mut actual = String::new(); for mut refs in refs { diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index d0e1c2097a7a9..e6cda60cd95b5 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -4,16 +4,16 @@ //! tests. This module also implements a couple of magic tricks, like renaming //! `self` and to `self` (to switch between associated function and method). -use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics}; +use hir::{AsAssocItem, InFile, Semantics}; use ide_db::{ + FileId, FileRange, RootDatabase, defs::{Definition, NameClass, NameRefClass}, - rename::{bail, format_err, source_edit_from_references, IdentifierKind}, + rename::{IdentifierKind, bail, format_err, source_edit_from_references}, source_change::SourceChangeBuilder, - FileId, FileRange, RootDatabase, }; use itertools::Itertools; use stdx::{always, never}; -use syntax::{ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize}; +use syntax::{AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, ast}; use ide_db::text_edit::TextEdit; @@ -120,7 +120,7 @@ pub(crate) fn rename( source_change.extend(usages.references.get_mut(&file_id).iter().map(|refs| { ( position.file_id, - source_edit_from_references(refs, def, new_name, file_id.edition()), + source_edit_from_references(refs, def, new_name, file_id.edition(db)), ) })); @@ -297,7 +297,7 @@ fn find_definitions( // remove duplicates, comparing `Definition`s Ok(v.into_iter() .unique_by(|&(.., def)| def) - .map(|(a, b, c)| (a.into(), b, c)) + .map(|(a, b, c)| (a.into_file_id(sema.db), b, c)) .collect::>() .into_iter()) } @@ -368,10 +368,13 @@ fn rename_to_self( let usages = def.usages(sema).all(); let mut source_change = SourceChange::default(); source_change.extend(usages.iter().map(|(file_id, references)| { - (file_id.into(), source_edit_from_references(references, def, "self", file_id.edition())) + ( + file_id.file_id(sema.db), + source_edit_from_references(references, def, "self", file_id.edition(sema.db)), + ) })); source_change.insert_source_edit( - file_id.original_file(sema.db), + file_id.original_file(sema.db).file_id(sema.db), TextEdit::replace(param_source.syntax().text_range(), String::from(self_param)), ); Ok(source_change) @@ -402,9 +405,12 @@ fn rename_self_to_param( bail!("Cannot rename reference to `_` as it is being referenced multiple times"); } let mut source_change = SourceChange::default(); - source_change.insert_source_edit(file_id.original_file(sema.db), edit); + source_change.insert_source_edit(file_id.original_file(sema.db).file_id(sema.db), edit); source_change.extend(usages.iter().map(|(file_id, references)| { - (file_id.into(), source_edit_from_references(references, def, new_name, file_id.edition())) + ( + file_id.file_id(sema.db), + source_edit_from_references(references, def, new_name, file_id.edition(sema.db)), + ) })); Ok(source_change) } @@ -443,7 +449,7 @@ fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Opt #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use ide_db::source_change::SourceChange; use ide_db::text_edit::TextEdit; use itertools::Itertools; @@ -509,10 +515,9 @@ mod tests { let found_conflicts = source_change .source_file_edits .iter() + .filter(|(_, (edit, _))| edit.change_annotation().is_some()) .flat_map(|(file_id, (edit, _))| { - edit.into_iter() - .filter(|edit| edit.annotation.is_some()) - .map(move |edit| (*file_id, edit.delete)) + edit.into_iter().map(move |edit| (*file_id, edit.delete)) }) .sorted_unstable_by_key(|(file_id, range)| (*file_id, range.start())) .collect_vec(); @@ -1081,7 +1086,6 @@ mod foo$0; Indel { insert: "foo2", delete: 4..7, - annotation: None, }, ], ), @@ -1129,7 +1133,6 @@ use crate::foo$0::FooContent; Indel { insert: "quux", delete: 8..11, - annotation: None, }, ], ), @@ -1141,7 +1144,6 @@ use crate::foo$0::FooContent; Indel { insert: "quux", delete: 11..14, - annotation: None, }, ], ), @@ -1183,7 +1185,6 @@ mod fo$0o; Indel { insert: "foo2", delete: 4..7, - annotation: None, }, ], ), @@ -1232,7 +1233,6 @@ mod outer { mod fo$0o; } Indel { insert: "bar", delete: 16..19, - annotation: None, }, ], ), @@ -1304,7 +1304,6 @@ pub mod foo$0; Indel { insert: "foo2", delete: 27..30, - annotation: None, }, ], ), @@ -1316,7 +1315,6 @@ pub mod foo$0; Indel { insert: "foo2", delete: 8..11, - annotation: None, }, ], ), @@ -1372,7 +1370,6 @@ mod quux; Indel { insert: "foo2", delete: 4..7, - annotation: None, }, ], ), @@ -1506,12 +1503,10 @@ pub fn baz() {} Indel { insert: "r#fn", delete: 4..7, - annotation: None, }, Indel { insert: "r#fn", delete: 22..25, - annotation: None, }, ], ), @@ -1576,12 +1571,10 @@ pub fn baz() {} Indel { insert: "foo", delete: 4..8, - annotation: None, }, Indel { insert: "foo", delete: 23..27, - annotation: None, }, ], ), @@ -1643,7 +1636,6 @@ fn bar() { Indel { insert: "dyn", delete: 7..10, - annotation: None, }, ], ), @@ -1655,7 +1647,6 @@ fn bar() { Indel { insert: "r#dyn", delete: 18..21, - annotation: None, }, ], ), @@ -1685,7 +1676,6 @@ fn bar() { Indel { insert: "r#dyn", delete: 7..10, - annotation: None, }, ], ), @@ -1697,7 +1687,6 @@ fn bar() { Indel { insert: "dyn", delete: 18..21, - annotation: None, }, ], ), @@ -1727,7 +1716,6 @@ fn bar() { Indel { insert: "r#dyn", delete: 7..10, - annotation: None, }, ], ), @@ -1739,7 +1727,6 @@ fn bar() { Indel { insert: "dyn", delete: 18..21, - annotation: None, }, ], ), @@ -1776,12 +1763,10 @@ fn bar() { Indel { insert: "abc", delete: 7..10, - annotation: None, }, Indel { insert: "abc", delete: 32..35, - annotation: None, }, ], ), @@ -1793,7 +1778,6 @@ fn bar() { Indel { insert: "abc", delete: 18..23, - annotation: None, }, ], ), @@ -1827,12 +1811,10 @@ fn bar() { Indel { insert: "abc", delete: 7..12, - annotation: None, }, Indel { insert: "abc", delete: 34..39, - annotation: None, }, ], ), @@ -1844,7 +1826,6 @@ fn bar() { Indel { insert: "abc", delete: 18..21, - annotation: None, }, ], ), diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index b8deed01fb7f2..ab139602404cb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -4,28 +4,29 @@ use arrayvec::ArrayVec; use ast::HasName; use cfg::{CfgAtom, CfgExpr}; use hir::{ - db::HirDatabase, sym, symbols::FxIndexSet, AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, - HasSource, HirFileIdExt, ModPath, Name, PathKind, Semantics, Symbol, + AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, ModPath, Name, PathKind, Semantics, + Symbol, db::HirDatabase, sym, symbols::FxIndexSet, }; use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn}; use ide_db::{ - base_db::SourceDatabase, + FilePosition, FxHashMap, FxIndexMap, RootDatabase, SymbolKind, + base_db::RootQueryDb, defs::Definition, documentation::docs_from_attrs, helpers::visit_file_defs, search::{FileReferenceNode, SearchScope}, - FilePosition, FxHashMap, FxIndexMap, RootDatabase, SymbolKind, }; use itertools::Itertools; use smallvec::SmallVec; use span::{Edition, TextSize}; use stdx::format_to; use syntax::{ + SmolStr, SyntaxNode, ToSmolStr, ast::{self, AstNode}, - format_smolstr, SmolStr, SyntaxNode, ToSmolStr, + format_smolstr, }; -use crate::{references, FileId, NavigationTarget, ToNav, TryToNav}; +use crate::{FileId, NavigationTarget, ToNav, TryToNav, references}; #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Runnable { @@ -284,8 +285,10 @@ fn find_related_tests_in_module( let file_id = mod_source.file_id.original_file(sema.db); let mod_scope = SearchScope::file_range(hir::FileRange { file_id, range: mod_source.value }); - let fn_pos = - FilePosition { file_id: file_id.into(), offset: fn_name.syntax().text_range().start() }; + let fn_pos = FilePosition { + file_id: file_id.file_id(sema.db), + offset: fn_name.syntax().text_range().start(), + }; find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests) } @@ -499,7 +502,7 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { let krate = def.krate(db); let edition = krate.map(|it| it.edition(db)).unwrap_or(Edition::CURRENT); let display_target = krate - .unwrap_or_else(|| (*db.crate_graph().crates_in_topological_order().last().unwrap()).into()) + .unwrap_or_else(|| (*db.all_crates().last().expect("no crate graph present")).into()) .to_display_target(db); if !has_runnable_doc_test(&attrs) { return None; @@ -752,7 +755,7 @@ impl UpdateTest { #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use crate::fixture; @@ -1209,13 +1212,13 @@ impl Foo { r#" //- /lib.rs $0 -macro_rules! gen { +macro_rules! generate { () => { #[test] fn foo_test() {} } } -macro_rules! gen2 { +macro_rules! generate2 { () => { mod tests2 { #[test] @@ -1223,25 +1226,25 @@ macro_rules! gen2 { } } } -macro_rules! gen_main { +macro_rules! generate_main { () => { fn main() {} } } mod tests { - gen!(); + generate!(); } -gen2!(); -gen_main!(); +generate2!(); +generate_main!(); "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..315, name: \"\", kind: Module })", - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 267..292, focus_range: 271..276, name: \"tests\", kind: Module, description: \"mod tests\" })", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 283..290, name: \"foo_test\", kind: Function })", - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)", - "(Test, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"foo_test2\", kind: Function }, true)", - "(Bin, NavigationTarget { file_id: FileId(0), full_range: 302..314, name: \"main\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..310, name: \"foo_test\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"foo_test2\", kind: Function }, true)", + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..344, name: \"main\", kind: Function })", ] "#]], ); diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index b5468a5aee9ff..0e17b35590747 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -5,20 +5,22 @@ use std::collections::BTreeSet; use either::Either; use hir::{ - AssocItem, DisplayTarget, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, Trait, + AssocItem, DisplayTarget, GenericDef, GenericParam, HirDisplay, ModuleDef, PathResolution, + Semantics, Trait, }; use ide_db::{ - active_parameter::{callable_for_node, generic_def_for_node}, - documentation::{Documentation, HasDocs}, FilePosition, FxIndexMap, + active_parameter::{callable_for_arg_list, generic_def_for_node}, + documentation::{Documentation, HasDocs}, }; +use itertools::Itertools; use span::Edition; use stdx::format_to; use syntax::{ - algo, - ast::{self, AstChildren, HasArgList}, - match_ast, AstNode, Direction, NodeOrToken, SyntaxElementChildren, SyntaxNode, SyntaxToken, - TextRange, TextSize, ToSmolStr, T, + AstNode, Direction, NodeOrToken, SyntaxElementChildren, SyntaxNode, SyntaxToken, T, TextRange, + TextSize, ToSmolStr, algo, + ast::{self, AstChildren}, + match_ast, }; use crate::RootDatabase; @@ -83,8 +85,8 @@ pub(crate) fn signature_help( .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; let token = sema.descend_into_macros_single_exact(token); let edition = - sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); - let display_target = sema.first_crate_or_default(file_id).to_display_target(db); + sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT); + let display_target = sema.first_crate(file_id)?.to_display_target(db); for node in token.parent_ancestors() { match_ast! { @@ -163,20 +165,8 @@ fn signature_help_for_call( edition: Edition, display_target: DisplayTarget, ) -> Option { - // Find the calling expression and its NameRef - let mut nodes = arg_list.syntax().ancestors().skip(1); - let calling_node = loop { - if let Some(callable) = ast::CallableExpr::cast(nodes.next()?) { - let inside_callable = callable - .arg_list() - .is_some_and(|it| it.syntax().text_range().contains(token.text_range().start())); - if inside_callable { - break callable; - } - } - }; - - let (callable, active_parameter) = callable_for_node(sema, &calling_node, &token)?; + let (callable, active_parameter) = + callable_for_arg_list(sema, arg_list, token.text_range().start())?; let mut res = SignatureHelp { doc: None, signature: String::new(), parameters: vec![], active_parameter }; @@ -187,6 +177,20 @@ fn signature_help_for_call( hir::CallableKind::Function(func) => { res.doc = func.docs(db); format_to!(res.signature, "fn {}", func.name(db).display(db, edition)); + + let generic_params = GenericDef::Function(func) + .params(db) + .iter() + .filter(|param| match param { + GenericParam::TypeParam(type_param) => !type_param.is_implicit(db), + GenericParam::ConstParam(_) | GenericParam::LifetimeParam(_) => true, + }) + .map(|param| param.display(db, display_target)) + .join(", "); + if !generic_params.is_empty() { + format_to!(res.signature, "<{}>", generic_params); + } + fn_params = Some(match callable.receiver_param(db) { Some(_self) => func.params_without_self(db), None => func.assoc_fn_params(db), @@ -195,15 +199,34 @@ fn signature_help_for_call( hir::CallableKind::TupleStruct(strukt) => { res.doc = strukt.docs(db); format_to!(res.signature, "struct {}", strukt.name(db).display(db, edition)); + + let generic_params = GenericDef::Adt(strukt.into()) + .params(db) + .iter() + .map(|param| param.display(db, display_target)) + .join(", "); + if !generic_params.is_empty() { + format_to!(res.signature, "<{}>", generic_params); + } } hir::CallableKind::TupleEnumVariant(variant) => { res.doc = variant.docs(db); format_to!( res.signature, - "enum {}::{}", + "enum {}", variant.parent_enum(db).name(db).display(db, edition), - variant.name(db).display(db, edition) ); + + let generic_params = GenericDef::Adt(variant.parent_enum(db).into()) + .params(db) + .iter() + .map(|param| param.display(db, display_target)) + .join(", "); + if !generic_params.is_empty() { + format_to!(res.signature, "<{}>", generic_params); + } + + format_to!(res.signature, "::{}", variant.name(db).display(db, edition)) } hir::CallableKind::Closure(closure) => { let fn_trait = closure.fn_trait(db); @@ -327,7 +350,7 @@ fn signature_help_for_generics( } // These don't have generic args that can be specified hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) | hir::GenericDef::Static(_) => { - return None + return None; } } @@ -351,6 +374,20 @@ fn signature_help_for_generics( buf.clear(); format_to!(buf, "{}", param.display(db, display_target)); + match param { + GenericParam::TypeParam(param) => { + if let Some(ty) = param.default(db) { + format_to!(buf, " = {}", ty.display(db, display_target)); + } + } + GenericParam::ConstParam(param) => { + if let Some(expr) = param.default(db, display_target).and_then(|konst| konst.expr()) + { + format_to!(buf, " = {}", expr); + } + } + _ => {} + } res.push_generic_param(&buf); } if let hir::GenericDef::Trait(tr) = generics_def { @@ -695,9 +732,8 @@ fn signature_help_for_tuple_pat_ish( } #[cfg(test)] mod tests { - use std::iter; - use expect_test::{expect, Expect}; + use expect_test::{Expect, expect}; use ide_db::FilePosition; use stdx::format_to; use test_fixture::ChangeFixture; @@ -708,13 +744,14 @@ mod tests { pub(crate) fn position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (RootDatabase, FilePosition) { - let change_fixture = ChangeFixture::parse(ra_fixture); let mut database = RootDatabase::default(); + let change_fixture = ChangeFixture::parse(&database, ra_fixture); database.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - (database, FilePosition { file_id: file_id.into(), offset }) + let position = FilePosition { file_id: file_id.file_id(&database), offset }; + (database, position) } #[track_caller] @@ -742,11 +779,11 @@ mod tests { let gap = start.checked_sub(offset).unwrap_or_else(|| { panic!("parameter ranges out of order: {:?}", sig_help.parameter_ranges()) }); - rendered.extend(iter::repeat(' ').take(gap as usize)); + rendered.extend(std::iter::repeat_n(' ', gap as usize)); let param_text = &sig_help.signature[*range]; let width = param_text.chars().count(); // … let marker = if is_active { '^' } else { '-' }; - rendered.extend(iter::repeat(marker).take(width)); + rendered.extend(std::iter::repeat_n(marker, width)); offset += gap + u32::from(range.len()); } if !sig_help.parameter_ranges().is_empty() { @@ -828,8 +865,8 @@ fn foo(x: T, y: U) -> u32 fn bar() { foo($03, ); } "#, expect![[r#" - fn foo(x: i32, y: U) -> u32 - ^^^^^^ ---- + fn foo(x: i32, y: U) -> u32 + ^^^^^^ ---- "#]], ); } @@ -842,7 +879,7 @@ fn foo() -> T where T: Copy + Display {} fn bar() { foo($0); } "#, expect![[r#" - fn foo() -> T + fn foo() -> T "#]], ); } @@ -1292,8 +1329,8 @@ fn main() { } "#, expect![[r#" - struct S({unknown}) - ^^^^^^^^^ + struct S({unknown}) + ^^^^^^^^^ "#]], ); } @@ -1388,7 +1425,7 @@ id! { fn test() { S.foo($0); } "#, expect![[r#" - fn foo(&'a mut self) + fn foo<'a>(&'a mut self) "#]], ); } @@ -1737,8 +1774,8 @@ fn sup() { } "#, expect![[r#" - fn test(&mut self, val: V) - ^^^^^^ + fn test(&mut self, val: V) + ^^^^^^ "#]], ); } @@ -1914,8 +1951,8 @@ fn f() { } "#, expect![[r#" - fn foo(x: Wrap>) - ^^^^^^^^^^^^^^^^^^^^^^ + fn foo(x: Wrap>) + ^^^^^^^^^^^^^^^^^^^^^^ "#]], ); } @@ -2407,4 +2444,96 @@ fn main() { "#]], ); } + + #[test] + fn test_tuple_generic_param() { + check( + r#" +struct S(T); + +fn main() { + let s: S<$0 +} + "#, + expect![[r#" + struct S + ^ + "#]], + ); + } + + #[test] + fn test_enum_generic_param() { + check( + r#" +enum Option { + Some(T), + None, +} + +fn main() { + let opt: Option<$0 +} + "#, + expect![[r#" + enum Option + ^ + "#]], + ); + } + + #[test] + fn test_enum_variant_generic_param() { + check( + r#" +enum Option { + Some(T), + None, +} + +fn main() { + let opt = Option::Some($0); +} + "#, + expect![[r#" + enum Option::Some({unknown}) + ^^^^^^^^^ + "#]], + ); + } + + #[test] + fn test_generic_arg_with_default() { + check( + r#" +struct S { + field: T, +} + +fn main() { + let s: S<$0 +} + "#, + expect![[r#" + struct S + ^^^^^^ + "#]], + ); + + check( + r#" +struct S { + field: C, +} + +fn main() { + let s: S<$0 +} + "#, + expect![[r#" + struct S + ^^^^^^^^^^^^^^^ + "#]], + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/ssr.rs b/src/tools/rust-analyzer/crates/ide/src/ssr.rs index 90e350949b81f..7df4499a0c2f7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/ssr.rs +++ b/src/tools/rust-analyzer/crates/ide/src/ssr.rs @@ -2,8 +2,8 @@ //! assist in ide_assists because that would require the ide_assists crate //! depend on the ide_ssr crate. -use ide_assists::{Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel}; -use ide_db::{label::Label, source_change::SourceChange, FileRange, RootDatabase}; +use ide_assists::{Assist, AssistId, AssistResolveStrategy, GroupLabel}; +use ide_db::{FileRange, RootDatabase, label::Label, source_change::SourceChange}; pub(crate) fn ssr_assists( db: &RootDatabase, @@ -16,7 +16,7 @@ pub(crate) fn ssr_assists( Some(ssr_data) => ssr_data, None => return ssr_assists, }; - let id = AssistId("ssr", AssistKind::RefactorRewrite); + let id = AssistId::refactor_rewrite("ssr"); let (source_change_for_file, source_change_for_workspace) = if resolve.should_resolve(&id) { let edits = match_finder.edits(); @@ -59,8 +59,8 @@ mod tests { use expect_test::expect; use ide_assists::{Assist, AssistResolveStrategy}; use ide_db::{ - base_db::ra_salsa::Durability, symbol_index::SymbolsDatabase, FileRange, FxHashSet, - RootDatabase, + FileRange, FxHashSet, RootDatabase, base_db::salsa::Durability, + symbol_index::SymbolsDatabase, }; use test_fixture::WithFixture; use triomphe::Arc; @@ -78,7 +78,7 @@ mod tests { ssr_assists( &db, &resolve, - FileRange { file_id: file_id.into(), range: range_or_offset.into() }, + FileRange { file_id: file_id.file_id(&db), range: range_or_offset.into() }, ) } @@ -120,6 +120,7 @@ mod tests { id: AssistId( "ssr", RefactorRewrite, + None, ), label: "Apply SSR in file", group: Some( @@ -139,9 +140,9 @@ mod tests { Indel { insert: "3", delete: 33..34, - annotation: None, }, ], + annotation: None, }, None, ), @@ -163,6 +164,7 @@ mod tests { id: AssistId( "ssr", RefactorRewrite, + None, ), label: "Apply SSR in workspace", group: Some( @@ -182,9 +184,9 @@ mod tests { Indel { insert: "3", delete: 33..34, - annotation: None, }, ], + annotation: None, }, None, ), @@ -196,9 +198,9 @@ mod tests { Indel { insert: "3", delete: 11..12, - annotation: None, }, ], + annotation: None, }, None, ), @@ -240,6 +242,7 @@ mod tests { id: AssistId( "ssr", RefactorRewrite, + None, ), label: "Apply SSR in file", group: Some( @@ -260,6 +263,7 @@ mod tests { id: AssistId( "ssr", RefactorRewrite, + None, ), label: "Apply SSR in workspace", group: Some( diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index 332aecf1e3cc5..efee39c13db94 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -1,25 +1,25 @@ //! This module provides `StaticIndex` which is used for powering //! read-only code browsers and emitting LSIF -use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics}; +use arrayvec::ArrayVec; +use hir::{Crate, Module, Semantics, db::HirDatabase}; use ide_db::{ - base_db::{SourceDatabase, SourceRootDatabase, VfsPath}, - defs::Definition, + FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, + base_db::{RootQueryDb, SourceDatabase, VfsPath}, + defs::{Definition, IdentClass}, documentation::Documentation, famous_defs::FamousDefs, - helpers::get_definition, - FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, }; use span::Edition; -use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T}; +use syntax::{AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T, TextRange}; use crate::navigation_target::UpmappingResult; use crate::{ - hover::{hover_for_definition, SubstTyLen}, + Analysis, Fold, HoverConfig, HoverResult, InlayHint, InlayHintsConfig, TryToNav, + hover::{SubstTyLen, hover_for_definition}, inlay_hints::{AdjustmentHintsMode, InlayFieldsToResolve}, - moniker::{def_to_kind, def_to_moniker, MonikerResult, SymbolInformationKind}, + moniker::{MonikerResult, SymbolInformationKind, def_to_kind, def_to_moniker}, parent_module::crates_for, - Analysis, Fold, HoverConfig, HoverResult, InlayHint, InlayHintsConfig, TryToNav, }; /// A static representation of fully analyzed source code. @@ -120,12 +120,28 @@ fn documentation_for_definition( famous_defs.as_ref(), def.krate(sema.db) .unwrap_or_else(|| { - (*sema.db.crate_graph().crates_in_topological_order().last().unwrap()).into() + (*sema.db.all_crates().last().expect("no crate graph present")).into() }) .to_display_target(sema.db), ) } +// FIXME: This is a weird function +fn get_definitions( + sema: &Semantics<'_, RootDatabase>, + token: SyntaxToken, +) -> Option> { + for token in sema.descend_into_macros_exact(token) { + let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); + if let Some(defs) = def { + if !defs.is_empty() { + return Some(defs); + } + } + } + None +} + pub enum VendoredLibrariesConfig<'a> { Included { workspace_root: &'a VfsPath }, Excluded, @@ -175,9 +191,14 @@ impl StaticIndex<'_> { // hovers let sema = hir::Semantics::new(self.db); let root = sema.parse_guess_edition(file_id).syntax().clone(); - let edition = - sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); - let display_target = sema.first_crate_or_default(file_id).to_display_target(self.db); + let edition = sema + .attach_first_edition(file_id) + .map(|it| it.edition(self.db)) + .unwrap_or(Edition::CURRENT); + let display_target = match sema.first_crate(file_id) { + Some(krate) => krate.to_display_target(sema.db), + None => return, + }; let tokens = root.descendants_with_tokens().filter_map(|it| match it { syntax::NodeOrToken::Node(_) => None, syntax::NodeOrToken::Token(it) => Some(it), @@ -254,11 +275,14 @@ impl StaticIndex<'_> { for token in tokens { let range = token.text_range(); let node = token.parent().unwrap(); - let def = match get_definition(&sema, token.clone()) { - Some(it) => it, + match get_definitions(&sema, token.clone()) { + Some(it) => { + for i in it { + add_token(i, range, &node); + } + } None => continue, }; - add_token(def, range, &node); } self.files.push(result); } @@ -267,14 +291,14 @@ impl StaticIndex<'_> { analysis: &'a Analysis, vendored_libs_config: VendoredLibrariesConfig<'_>, ) -> StaticIndex<'a> { - let db = &*analysis.db; + let db = &analysis.db; let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source_file_id(db).original_file(db); - let source_root = db.file_source_root(file_id.into()); - let source_root = db.source_root(source_root); + let source_root = db.file_source_root(file_id.file_id(&analysis.db)).source_root_id(db); + let source_root = db.source_root(source_root).source_root(db); let is_vendored = match vendored_libs_config { VendoredLibrariesConfig::Included { workspace_root } => source_root - .path_for_file(&file_id.into()) + .path_for_file(&file_id.file_id(&analysis.db)) .is_some_and(|module_path| module_path.starts_with(workspace_root)), VendoredLibrariesConfig::Excluded => false, }; @@ -294,7 +318,7 @@ impl StaticIndex<'_> { if visited_files.contains(&file_id) { continue; } - this.add_file(file_id.into()); + this.add_file(file_id.file_id(&analysis.db)); // mark the file visited_files.insert(file_id); } @@ -304,8 +328,8 @@ impl StaticIndex<'_> { #[cfg(test)] mod tests { - use crate::{fixture, StaticIndex}; - use ide_db::{base_db::VfsPath, FileRange, FxHashSet}; + use crate::{StaticIndex, fixture}; + use ide_db::{FileRange, FxHashMap, FxHashSet, base_db::VfsPath}; use syntax::TextSize; use super::VendoredLibrariesConfig; @@ -360,6 +384,71 @@ mod tests { } } + #[track_caller] + fn check_references( + #[rust_analyzer::rust_fixture] ra_fixture: &str, + vendored_libs_config: VendoredLibrariesConfig<'_>, + ) { + let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); + let s = StaticIndex::compute(&analysis, vendored_libs_config); + let mut range_set: FxHashMap<_, i32> = ranges.iter().map(|it| (it.0, 0)).collect(); + + // Make sure that all references have at least one range. We use a HashMap instead of a + // a HashSet so that we can have more than one reference at the same range. + for (_, t) in s.tokens.iter() { + for r in &t.references { + if r.is_definition { + continue; + } + if r.range.range.start() == TextSize::from(0) { + // ignore whole file range corresponding to module definition + continue; + } + match range_set.entry(r.range) { + std::collections::hash_map::Entry::Occupied(mut entry) => { + let count = entry.get_mut(); + *count += 1; + } + std::collections::hash_map::Entry::Vacant(_) => { + panic!("additional reference {r:?}"); + } + } + } + } + for (range, count) in range_set.iter() { + if *count == 0 { + panic!("unfound reference {range:?}"); + } + } + } + + #[test] + fn field_initialization() { + check_references( + r#" +struct Point { + x: f64, + //^^^ + y: f64, + //^^^ +} + fn foo() { + let x = 5.; + let y = 10.; + let mut p = Point { x, y }; + //^^^^^ ^ ^ + p.x = 9.; + //^ ^ + p.y = 10.; + //^ ^ + } +"#, + VendoredLibrariesConfig::Included { + workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()), + }, + ); + } + #[test] fn struct_and_enum() { check_all_ranges( @@ -379,6 +468,17 @@ struct Foo; //^^^ enum E { X(Foo) } //^ ^ +"#, + VendoredLibrariesConfig::Included { + workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()), + }, + ); + + check_references( + r#" +struct Foo; +enum E { X(Foo) } + // ^^^ "#, VendoredLibrariesConfig::Included { workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()), diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs index a44be67668ce3..cfcd76d2aa3bd 100644 --- a/src/tools/rust-analyzer/crates/ide/src/status.rs +++ b/src/tools/rust-analyzer/crates/ide/src/status.rs @@ -1,29 +1,8 @@ -use std::{fmt, marker::PhantomData}; - -use hir::{ - db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery}, - Attr, Attrs, ExpandResult, MacroFileId, Module, -}; -use ide_db::{ - base_db::{ - ra_salsa::{ - debug::{DebugQueryTable, TableEntry}, - Query, QueryTable, - }, - CompressedFileTextQuery, CrateData, ParseQuery, SourceDatabase, SourceRootId, - }, - symbol_index::ModuleSymbolsQuery, -}; -use ide_db::{ - symbol_index::{LibrarySymbolsQuery, SymbolIndex}, - RootDatabase, -}; +use ide_db::RootDatabase; +use ide_db::base_db::{BuiltCrateData, ExtraCrateData}; use itertools::Itertools; -use profile::{memory_usage, Bytes}; -use span::{EditionedFileId, FileId}; +use span::FileId; use stdx::format_to; -use syntax::{ast, Parse, SyntaxNode}; -use triomphe::Arc; // Feature: Status // @@ -37,17 +16,17 @@ use triomphe::Arc; pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { let mut buf = String::new(); - format_to!(buf, "{}\n", collect_query(CompressedFileTextQuery.in_db(db))); - format_to!(buf, "{}\n", collect_query(ParseQuery.in_db(db))); - format_to!(buf, "{}\n", collect_query(ParseMacroExpansionQuery.in_db(db))); - format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db))); - format_to!(buf, "{}\n", collect_query(ModuleSymbolsQuery.in_db(db))); - format_to!(buf, "{} in total\n", memory_usage()); + // format_to!(buf, "{}\n", collect_query(CompressedFileTextQuery.in_db(db))); + // format_to!(buf, "{}\n", collect_query(ParseQuery.in_db(db))); + // format_to!(buf, "{}\n", collect_query(ParseMacroExpansionQuery.in_db(db))); + // format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db))); + // format_to!(buf, "{}\n", collect_query(ModuleSymbolsQuery.in_db(db))); + // format_to!(buf, "{} in total\n", memory_usage()); - format_to!(buf, "\nDebug info:\n"); - format_to!(buf, "{}\n", collect_query(AttrsQuery.in_db(db))); - format_to!(buf, "{} ast id maps\n", collect_query_count(AstIdMapQuery.in_db(db))); - format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db))); + // format_to!(buf, "\nDebug info:\n"); + // format_to!(buf, "{}\n", collect_query(AttrsQuery.in_db(db))); + // format_to!(buf, "{} ast id maps\n", collect_query_count(AstIdMapQuery.in_db(db))); + // format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db))); if let Some(file_id) = file_id { format_to!(buf, "\nCrates for file {}:\n", file_id.index()); @@ -55,27 +34,25 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { if crates.is_empty() { format_to!(buf, "Does not belong to any crate"); } - let crate_graph = db.crate_graph(); for crate_id in crates { - let CrateData { + let BuiltCrateData { root_file_id, edition, - version, - display_name, - cfg_options, - potential_cfg_options, - env, dependencies, origin, is_proc_macro, proc_macro_cwd, - } = &crate_graph[crate_id]; + } = crate_id.data(db); + let ExtraCrateData { version, display_name, potential_cfg_options } = + crate_id.extra_data(db); + let cfg_options = crate_id.cfg_options(db); + let env = crate_id.env(db); format_to!( buf, "Crate: {}\n", match display_name { - Some(it) => format!("{it}({})", crate_id.into_raw()), - None => format!("{}", crate_id.into_raw()), + Some(it) => format!("{it}({crate_id:?})"), + None => format!("{crate_id:?}"), } ); format_to!(buf, " Root module file id: {}\n", root_file_id.index()); @@ -89,7 +66,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { format_to!(buf, " Proc macro cwd: {:?}\n", proc_macro_cwd); let deps = dependencies .iter() - .map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw())) + .map(|dep| format!("{}={:?}", dep.name, dep.crate_id)) .format(", "); format_to!(buf, " Dependencies: {}\n", deps); } @@ -97,190 +74,3 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { buf.trim().to_owned() } - -fn collect_query<'q, Q>(table: QueryTable<'q, Q>) -> ::Collector -where - QueryTable<'q, Q>: DebugQueryTable, - Q: QueryCollect, - ::Storage: 'q, - ::Collector: StatCollect< - as DebugQueryTable>::Key, - as DebugQueryTable>::Value, - >, -{ - struct StatCollectorWrapper(C); - impl, K, V> FromIterator> for StatCollectorWrapper { - fn from_iter(iter: T) -> StatCollectorWrapper - where - T: IntoIterator>, - { - let mut res = C::default(); - for entry in iter { - res.collect_entry(entry.key, entry.value); - } - StatCollectorWrapper(res) - } - } - table.entries::::Collector>>().0 -} - -fn collect_query_count<'q, Q>(table: QueryTable<'q, Q>) -> usize -where - QueryTable<'q, Q>: DebugQueryTable, - Q: Query, - ::Storage: 'q, -{ - struct EntryCounter(usize); - impl FromIterator> for EntryCounter { - fn from_iter(iter: T) -> EntryCounter - where - T: IntoIterator>, - { - EntryCounter(iter.into_iter().count()) - } - } - table.entries::().0 -} - -trait QueryCollect: Query { - type Collector; -} - -impl QueryCollect for LibrarySymbolsQuery { - type Collector = SymbolsStats; -} - -impl QueryCollect for ParseQuery { - type Collector = SyntaxTreeStats; -} - -impl QueryCollect for ParseMacroExpansionQuery { - type Collector = SyntaxTreeStats; -} - -impl QueryCollect for CompressedFileTextQuery { - type Collector = FilesStats; -} - -impl QueryCollect for ModuleSymbolsQuery { - type Collector = SymbolsStats; -} - -impl QueryCollect for AttrsQuery { - type Collector = AttrsStats; -} - -trait StatCollect: Default { - fn collect_entry(&mut self, key: K, value: Option); -} - -#[derive(Default)] -struct FilesStats { - total: usize, - size: Bytes, -} - -impl fmt::Display for FilesStats { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(fmt, "{} of files", self.size) - } -} - -impl StatCollect> for FilesStats { - fn collect_entry(&mut self, _: FileId, value: Option>) { - self.total += 1; - self.size += value.unwrap().len(); - } -} - -#[derive(Default)] -pub(crate) struct SyntaxTreeStats { - total: usize, - pub(crate) retained: usize, -} - -impl fmt::Display for SyntaxTreeStats { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - fmt, - "{} trees, {} preserved{}", - self.total, - self.retained, - if MACROS { " (macros)" } else { "" } - ) - } -} - -impl StatCollect> for SyntaxTreeStats { - fn collect_entry(&mut self, _: EditionedFileId, value: Option>) { - self.total += 1; - self.retained += value.is_some() as usize; - } -} - -impl StatCollect, M)>> for SyntaxTreeStats { - fn collect_entry( - &mut self, - _: MacroFileId, - value: Option, M)>>, - ) { - self.total += 1; - self.retained += value.is_some() as usize; - } -} - -struct SymbolsStats { - total: usize, - size: Bytes, - phantom: PhantomData, -} - -impl Default for SymbolsStats { - fn default() -> Self { - Self { total: Default::default(), size: Default::default(), phantom: PhantomData } - } -} - -impl fmt::Display for SymbolsStats { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(fmt, "{} of module index symbols ({})", self.size, self.total) - } -} -impl fmt::Display for SymbolsStats { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(fmt, "{} of library index symbols ({})", self.size, self.total) - } -} -impl StatCollect> for SymbolsStats { - fn collect_entry(&mut self, _: Key, value: Option>) { - if let Some(symbols) = value { - self.total += symbols.len(); - self.size += symbols.memory_size(); - } - } -} - -#[derive(Default)] -struct AttrsStats { - entries: usize, - total: usize, -} - -impl fmt::Display for AttrsStats { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - let size = self.entries * size_of::() + self.total * size_of::(); - let size = Bytes::new(size as _); - write!( - fmt, - "{} attribute query entries, {} total attributes ({} for storing entries)", - self.entries, self.total, size - ) - } -} - -impl StatCollect for AttrsStats { - fn collect_entry(&mut self, _: Key, value: Option) { - self.entries += 1; - self.total += value.map_or(0, |it| it.len()); - } -} diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index 83082496d5b64..e1bc76318f8b9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -15,26 +15,23 @@ mod tests; use std::ops::ControlFlow; use either::Either; -use hir::{ - DefWithBody, HirFileIdExt, InFile, InRealFile, MacroFileIdExt, MacroKind, Name, Semantics, -}; +use hir::{DefWithBody, EditionedFileId, InFile, InRealFile, MacroKind, Name, Semantics}; use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind}; -use span::EditionedFileId; use syntax::{ - ast::{self, IsString}, AstNode, AstToken, NodeOrToken, SyntaxKind::*, - SyntaxNode, SyntaxToken, TextRange, WalkEvent, T, + SyntaxNode, SyntaxToken, T, TextRange, WalkEvent, + ast::{self, IsString}, }; use crate::{ + FileId, HlMod, HlOperator, HlPunct, HlTag, syntax_highlighting::{ escape::{highlight_escape_byte, highlight_escape_char, highlight_escape_string}, format::highlight_format_string, highlights::Highlights, tags::Highlight, }, - FileId, HlMod, HlOperator, HlPunct, HlTag, }; pub(crate) use html::highlight_as_html; @@ -199,7 +196,7 @@ pub(crate) fn highlight( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); // Determine the root based on the given range. let (root, range_to_highlight) = { @@ -218,10 +215,7 @@ pub(crate) fn highlight( }; let mut hl = highlights::Highlights::new(root.text_range()); - let krate = match sema.scope(&root) { - Some(it) => it.krate(), - None => return hl.to_vec(), - }; + let krate = sema.scope(&root).map(|it| it.krate()); traverse(&mut hl, &sema, config, InRealFile::new(file_id, &root), krate, range_to_highlight); hl.to_vec() } @@ -231,10 +225,10 @@ fn traverse( sema: &Semantics<'_, RootDatabase>, config: HighlightConfig, InRealFile { file_id, value: root }: InRealFile<&SyntaxNode>, - krate: hir::Crate, + krate: Option, range_to_highlight: TextRange, ) { - let is_unlinked = sema.file_to_module_def(file_id).is_none(); + let is_unlinked = sema.file_to_module_def(file_id.file_id(sema.db)).is_none(); enum AttrOrDerive { Attr(ast::Item), @@ -494,7 +488,7 @@ fn string_injections( sema: &Semantics<'_, RootDatabase>, config: HighlightConfig, file_id: EditionedFileId, - krate: hir::Crate, + krate: Option, token: SyntaxToken, descended_token: &SyntaxToken, ) -> ControlFlow<()> { @@ -508,7 +502,14 @@ fn string_injections( { return ControlFlow::Break(()); } - highlight_format_string(hl, sema, krate, &string, &descended_string, file_id.edition()); + highlight_format_string( + hl, + sema, + krate, + &string, + &descended_string, + file_id.edition(sema.db), + ); if !string.is_raw() { highlight_escape_string(hl, &string); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs index cc02aff2acf8a..3716dcfed006e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs @@ -1,21 +1,21 @@ //! Syntax highlighting for format macro strings. use ide_db::{ - defs::Definition, - syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier}, SymbolKind, + defs::Definition, + syntax_helpers::format_string::{FormatSpecifier, is_format_string, lex_format_specifiers}, }; use span::Edition; -use syntax::{ast, AstToken}; +use syntax::{AstToken, ast}; use crate::{ - syntax_highlighting::{highlight::highlight_def, highlights::Highlights}, HlRange, HlTag, + syntax_highlighting::{highlight::highlight_def, highlights::Highlights}, }; pub(super) fn highlight_format_string( stack: &mut Highlights, sema: &hir::Semantics<'_, ide_db::RootDatabase>, - krate: hir::Crate, + krate: Option, string: &ast::String, expanded_string: &ast::String, edition: Edition, diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs index 282fbb4433b22..87db0cd7dc53c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs @@ -3,23 +3,23 @@ use std::ops::ControlFlow; use either::Either; -use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics}; +use hir::{AsAssocItem, HasVisibility, Semantics}; use ide_db::{ + FxHashMap, RootDatabase, SymbolKind, defs::{Definition, IdentClass, NameClass, NameRefClass}, syntax_helpers::node_ext::walk_pat, - FxHashMap, RootDatabase, SymbolKind, }; use span::Edition; use stdx::hash_once; use syntax::{ - ast, match_ast, AstNode, AstPtr, AstToken, NodeOrToken, + AstNode, AstPtr, AstToken, NodeOrToken, SyntaxKind::{self, *}, - SyntaxNode, SyntaxNodePtr, SyntaxToken, T, + SyntaxNode, SyntaxNodePtr, SyntaxToken, T, ast, match_ast, }; use crate::{ - syntax_highlighting::tags::{HlOperator, HlPunct}, Highlight, HlMod, HlTag, + syntax_highlighting::tags::{HlOperator, HlPunct}, }; pub(super) fn token( @@ -63,7 +63,7 @@ pub(super) fn token( pub(super) fn name_like( sema: &Semantics<'_, RootDatabase>, - krate: hir::Crate, + krate: Option, bindings_shadow_count: Option<&mut FxHashMap>, is_unsafe_node: &impl Fn(AstPtr>) -> bool, syntactic_name_ref_highlighting: bool, @@ -113,7 +113,8 @@ fn punctuation( ) -> Highlight { let operator_parent = token.parent(); let parent_kind = operator_parent.as_ref().map_or(EOF, SyntaxNode::kind); - let h = match (kind, parent_kind) { + + match (kind, parent_kind) { (T![?], TRY_EXPR) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow, (T![&], BIN_EXPR) => HlOperator::Bitwise.into(), (T![&], REF_EXPR | REF_PAT) => HlTag::Operator(HlOperator::Other).into(), @@ -143,11 +144,7 @@ fn punctuation( let ptr = operator_parent .as_ref() .and_then(|it| AstPtr::try_from_raw(SyntaxNodePtr::new(it))); - if ptr.is_some_and(is_unsafe_node) { - h | HlMod::Unsafe - } else { - h - } + if ptr.is_some_and(is_unsafe_node) { h | HlMod::Unsafe } else { h } } (T![-], PREFIX_EXPR) => { let prefix_expr = @@ -223,11 +220,7 @@ fn punctuation( let is_unsafe = is_unsafe_macro || operator_parent .and_then(|it| { - if ast::ArgList::can_cast(it.kind()) { - it.parent() - } else { - Some(it) - } + if ast::ArgList::can_cast(it.kind()) { it.parent() } else { Some(it) } }) .and_then(|it| AstPtr::try_from_raw(SyntaxNodePtr::new(&it))) .is_some_and(is_unsafe_node); @@ -248,8 +241,7 @@ fn punctuation( _ => HlPunct::Other, } .into(), - }; - h + } } fn keyword(token: SyntaxToken, kind: SyntaxKind) -> Highlight { @@ -280,7 +272,7 @@ fn keyword(token: SyntaxToken, kind: SyntaxKind) -> Highlight { fn highlight_name_ref( sema: &Semantics<'_, RootDatabase>, - krate: hir::Crate, + krate: Option, bindings_shadow_count: Option<&mut FxHashMap>, binding_hash: &mut Option, is_unsafe_node: &impl Fn(AstPtr>) -> bool, @@ -296,7 +288,7 @@ fn highlight_name_ref( let name_class = match NameRefClass::classify(sema, &name_ref) { Some(name_kind) => name_kind, None if syntactic_name_ref_highlighting => { - return highlight_name_ref_by_syntax(name_ref, sema, krate, is_unsafe_node) + return highlight_name_ref_by_syntax(name_ref, sema, krate, is_unsafe_node); } // FIXME: This is required for helper attributes used by proc-macros, as those do not map down // to anything when used. @@ -409,9 +401,10 @@ fn highlight_name_ref( NameRefClass::ExternCrateShorthand { decl, krate: resolved_krate } => { let mut h = HlTag::Symbol(SymbolKind::Module).into(); - if resolved_krate != krate { - h |= HlMod::Library + if krate.as_ref().is_some_and(|krate| resolved_krate != *krate) { + h |= HlMod::Library; } + let is_public = decl.visibility(db) == hir::Visibility::Public; if is_public { h |= HlMod::Public @@ -439,7 +432,7 @@ fn highlight_name( bindings_shadow_count: Option<&mut FxHashMap>, binding_hash: &mut Option, is_unsafe_node: &impl Fn(AstPtr>) -> bool, - krate: hir::Crate, + krate: Option, name: ast::Name, edition: Edition, ) -> Highlight { @@ -484,7 +477,7 @@ fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 { pub(super) fn highlight_def( sema: &Semantics<'_, RootDatabase>, - krate: hir::Crate, + krate: Option, def: Definition, edition: Edition, is_ref: bool, @@ -668,7 +661,7 @@ pub(super) fn highlight_def( }; let def_crate = def.krate(db); - let is_from_other_crate = def_crate != Some(krate); + let is_from_other_crate = def_crate != krate; let is_from_builtin_crate = def_crate.is_some_and(|def_crate| def_crate.is_builtin(db)); let is_builtin = matches!( def, @@ -689,7 +682,7 @@ pub(super) fn highlight_def( fn highlight_method_call_by_name_ref( sema: &Semantics<'_, RootDatabase>, - krate: hir::Crate, + krate: Option, name_ref: &ast::NameRef, is_unsafe_node: &impl Fn(AstPtr>) -> bool, ) -> Option { @@ -699,7 +692,7 @@ fn highlight_method_call_by_name_ref( fn highlight_method_call( sema: &Semantics<'_, RootDatabase>, - krate: hir::Crate, + krate: Option, method_call: &ast::MethodCallExpr, is_unsafe_node: &impl Fn(AstPtr>) -> bool, ) -> Option { @@ -726,7 +719,7 @@ fn highlight_method_call( } let def_crate = func.module(sema.db).krate(); - let is_from_other_crate = def_crate != krate; + let is_from_other_crate = krate.as_ref().map_or(false, |krate| def_crate != *krate); let is_from_builtin_crate = def_crate.is_builtin(sema.db); let is_public = func.visibility(sema.db) == hir::Visibility::Public; @@ -799,7 +792,7 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight { fn highlight_name_ref_by_syntax( name: ast::NameRef, sema: &Semantics<'_, RootDatabase>, - krate: hir::Crate, + krate: Option, is_unsafe_node: &impl Fn(AstPtr>) -> bool, ) -> Highlight { let default = HlTag::UnresolvedReference; @@ -818,12 +811,9 @@ fn highlight_name_ref_by_syntax( let h = HlTag::Symbol(SymbolKind::Field); let is_unsafe = ast::Expr::cast(parent) .is_some_and(|it| is_unsafe_node(AstPtr::new(&it).wrap_left())); - if is_unsafe { - h | HlMod::Unsafe - } else { - h.into() - } + if is_unsafe { h | HlMod::Unsafe } else { h.into() } } + RECORD_EXPR_FIELD | RECORD_PAT_FIELD => HlTag::Symbol(SymbolKind::Field).into(), PATH_SEGMENT => { let name_based_fallback = || { if name.text().chars().next().unwrap_or_default().is_uppercase() { @@ -862,6 +852,8 @@ fn highlight_name_ref_by_syntax( .into(), } } + ASSOC_TYPE_ARG => SymbolKind::TypeAlias.into(), + USE_BOUND_GENERIC_ARGS => SymbolKind::TypeParam.into(), _ => default.into(), } } diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs index 07d40bafeba17..9fd807f031f1f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs @@ -1,21 +1,20 @@ //! Renders a bit of code as HTML. -use hir::Semantics; +use hir::{EditionedFileId, Semantics}; use oorandom::Rand32; -use span::EditionedFileId; use stdx::format_to; use syntax::AstNode; use crate::{ - syntax_highlighting::{highlight, HighlightConfig}, FileId, RootDatabase, + syntax_highlighting::{HighlightConfig, highlight}, }; pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); let file = sema.parse(file_id); let file = file.syntax(); fn rainbowify(seed: u64) -> String { @@ -40,7 +39,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo macro_bang: true, syntactic_name_ref_highlighting: false, }, - file_id.into(), + file_id.file_id(db), None, ); let text = file.to_string(); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs index 1be90ad6a1ebc..0998e14c87ba0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs @@ -3,21 +3,20 @@ use std::mem; use either::Either; -use hir::{sym, HirFileId, InFile, Semantics}; +use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym}; use ide_db::{ - active_parameter::ActiveParameter, defs::Definition, documentation::docs_with_rangemap, - rust_doc::is_rust_fence, SymbolKind, + SymbolKind, active_parameter::ActiveParameter, defs::Definition, + documentation::docs_with_rangemap, rust_doc::is_rust_fence, }; -use span::EditionedFileId; use syntax::{ - ast::{self, AstNode, IsString, QuoteOffsets}, AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize, + ast::{self, AstNode, IsString, QuoteOffsets}, }; use crate::{ - doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def}, - syntax_highlighting::{highlights::Highlights, injector::Injector, HighlightConfig}, Analysis, HlMod, HlRange, HlTag, RootDatabase, + doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def}, + syntax_highlighting::{HighlightConfig, highlights::Highlights, injector::Injector}, }; pub(super) fn ra_fixture( @@ -161,7 +160,7 @@ pub(super) fn doc_comment( let mut new_comments = Vec::new(); let mut string; - for attr in attributes.by_key(&sym::doc).attrs() { + for attr in attributes.by_key(sym::doc).attrs() { let InFile { file_id, value: src } = attrs_source_map.source_of(attr); if file_id != src_file_id { continue; diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs index a902fd717f099..c30f797324967 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs @@ -53,11 +53,7 @@ impl Delta { where T: Ord + Sub, { - if to >= from { - Delta::Add(to - from) - } else { - Delta::Sub(from - to) - } + if to >= from { Delta::Add(to - from) } else { Delta::Sub(from - to) } } } diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html index c8c8c5dba4c40..d00f279c82995 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html @@ -45,14 +45,13 @@ //! fn test() {} //! ``` +//! Syntactic name ref highlighting testing //! ```rust //! extern crate self; -//! extern crate std; +//! extern crate other as otter; //! extern crate core; -//! extern crate alloc; -//! extern crate proc_macro; -//! extern crate test; -//! extern crate Krate; +//! trait T { type Assoc; } +//! fn f<Arg>() -> use<Arg> where (): T<Assoc = ()> {} //! ``` mod outline_module; diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_19357.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_19357.html new file mode 100644 index 0000000000000..36ed8c594f7e2 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_19357.html @@ -0,0 +1,46 @@ + + +
    fn main() {
    +    let x = &raw mut 5;
    +}
    +
    \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html index 7f6b4c2c880e1..e1a8d876c417b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html @@ -41,14 +41,14 @@ .unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
    fn main() {
    -    let hello = "hello";
    -    let x = hello.to_string();
    -    let y = hello.to_string();
    +    let hello = "hello";
    +    let x = hello.to_string();
    +    let y = hello.to_string();
     
    -    let x = "other color please!";
    -    let y = x.to_string();
    +    let x = "other color please!";
    +    let y = x.to_string();
     }
     
     fn bar() {
    -    let mut hello = "hello";
    +    let mut hello = "hello";
     }
    \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs index 8f69bb8230000..dd359326c61d6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs @@ -1,11 +1,11 @@ use std::time::Instant; -use expect_test::{expect_file, ExpectFile}; +use expect_test::{ExpectFile, expect_file}; use ide_db::SymbolKind; use span::Edition; -use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear}; +use test_utils::{AssertLinear, bench, bench_fixture, skip_slow_tests}; -use crate::{fixture, FileRange, HighlightConfig, HlTag, TextRange}; +use crate::{FileRange, HighlightConfig, HlTag, TextRange, fixture}; const HL_CONFIG: HighlightConfig = HighlightConfig { strings: true, @@ -739,14 +739,13 @@ fn test_highlight_doc_comment() { //! fn test() {} //! ``` +//! Syntactic name ref highlighting testing //! ```rust //! extern crate self; -//! extern crate std; +//! extern crate other as otter; //! extern crate core; -//! extern crate alloc; -//! extern crate proc_macro; -//! extern crate test; -//! extern crate Krate; +//! trait T { type Assoc; } +//! fn f() -> use where (): T {} //! ``` mod outline_module; @@ -1302,7 +1301,7 @@ fn benchmark_syntax_highlighting_parser() { }) .count() }; - assert_eq!(hash, 1167); + assert_eq!(hash, 1606); } #[test] @@ -1421,3 +1420,18 @@ fn template() {} false, ); } + +#[test] +fn issue_19357() { + check_highlighting( + r#" +//- /foo.rs +fn main() { + let x = &raw mut 5; +} +//- /main.rs +"#, + expect_file!["./test_data/highlight_issue_19357.html"], + false, + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs b/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs index 30b1d4c39b301..06cbd50e946ac 100644 --- a/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs +++ b/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs @@ -1,17 +1,15 @@ //! Discovers tests use hir::{Crate, Module, ModuleDef, Semantics}; -use ide_db::{ - base_db::{CrateGraph, CrateId, SourceDatabase}, - FileId, RootDatabase, -}; +use ide_db::base_db; +use ide_db::{FileId, RootDatabase, base_db::RootQueryDb}; use syntax::TextRange; -use crate::{runnables::runnable_fn, NavigationTarget, Runnable, TryToNav}; +use crate::{NavigationTarget, Runnable, TryToNav, runnables::runnable_fn}; #[derive(Debug)] pub enum TestItemKind { - Crate(CrateId), + Crate(base_db::Crate), Module, Function, } @@ -28,12 +26,12 @@ pub struct TestItem { } pub(crate) fn discover_test_roots(db: &RootDatabase) -> Vec { - let crate_graph = db.crate_graph(); - crate_graph + db.all_crates() .iter() - .filter(|&id| crate_graph[id].origin.is_local()) + .copied() + .filter(|&id| id.data(db).origin.is_local()) .filter_map(|id| { - let test_id = crate_graph[id].display_name.as_ref()?.to_string(); + let test_id = id.extra_data(db).display_name.as_ref()?.to_string(); Some(TestItem { kind: TestItemKind::Crate(id), label: test_id.clone(), @@ -47,12 +45,12 @@ pub(crate) fn discover_test_roots(db: &RootDatabase) -> Vec { .collect() } -fn find_crate_by_id(crate_graph: &CrateGraph, crate_id: &str) -> Option { +fn find_crate_by_id(db: &RootDatabase, crate_id: &str) -> Option { // here, we use display_name as the crate id. This is not super ideal, but it works since we // only show tests for the local crates. - crate_graph.iter().find(|&id| { - crate_graph[id].origin.is_local() - && crate_graph[id].display_name.as_ref().is_some_and(|x| x.to_string() == crate_id) + db.all_crates().iter().copied().find(|&id| { + id.data(db).origin.is_local() + && id.extra_data(db).display_name.as_ref().is_some_and(|x| x.to_string() == crate_id) }) } @@ -115,8 +113,7 @@ pub(crate) fn discover_tests_in_crate_by_test_id( db: &RootDatabase, crate_test_id: &str, ) -> Vec { - let crate_graph = db.crate_graph(); - let Some(crate_id) = find_crate_by_id(&crate_graph, crate_test_id) else { + let Some(crate_id) = find_crate_by_id(db, crate_test_id) else { return vec![]; }; discover_tests_in_crate(db, crate_id) @@ -171,12 +168,14 @@ fn find_module_id_and_test_parents( Some((r, id)) } -pub(crate) fn discover_tests_in_crate(db: &RootDatabase, crate_id: CrateId) -> Vec { - let crate_graph = db.crate_graph(); - if !crate_graph[crate_id].origin.is_local() { +pub(crate) fn discover_tests_in_crate( + db: &RootDatabase, + crate_id: base_db::Crate, +) -> Vec { + if !crate_id.data(db).origin.is_local() { return vec![]; } - let Some(crate_test_id) = &crate_graph[crate_id].display_name else { + let Some(crate_test_id) = &crate_id.extra_data(db).display_name else { return vec![]; }; let kind = TestItemKind::Crate(crate_id); diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs index 8c9dd05145272..4df7e25223d91 100644 --- a/src/tools/rust-analyzer/crates/ide/src/typing.rs +++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs @@ -15,14 +15,15 @@ mod on_enter; +use hir::EditionedFileId; +use ide_db::{FilePosition, RootDatabase, base_db::RootQueryDb}; +use span::Edition; use std::iter; -use ide_db::{base_db::SourceDatabase, FilePosition, RootDatabase}; -use span::{Edition, EditionedFileId}; use syntax::{ - algo::{ancestors_at_offset, find_node_at_offset}, - ast::{self, edit::IndentLevel, AstToken}, AstNode, Parse, SourceFile, SyntaxKind, TextRange, TextSize, + algo::{ancestors_at_offset, find_node_at_offset}, + ast::{self, AstToken, edit::IndentLevel}, }; use ide_db::text_edit::TextEdit; @@ -73,7 +74,8 @@ pub(crate) fn on_char_typed( // FIXME: We are hitting the database here, if we are unlucky this call might block momentarily // causing the editor to feel sluggish! let edition = Edition::CURRENT_FIXME; - let file = &db.parse(EditionedFileId::new(position.file_id, edition)); + let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition); + let file = &db.parse(editioned_file_id_wrapper); let char_matches_position = file.tree().syntax().text().char_at(position.offset) == Some(char_typed); if !stdx::always!(char_matches_position) { diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs index c6d1c283f4eca..fdc583a15cc71 100644 --- a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs +++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs @@ -1,15 +1,14 @@ //! Handles the `Enter` key press. At the momently, this only continues //! comments, but should handle indent some time in the future as well. -use ide_db::RootDatabase; -use ide_db::{base_db::SourceDatabase, FilePosition}; -use span::EditionedFileId; +use ide_db::base_db::RootQueryDb; +use ide_db::{FilePosition, RootDatabase}; use syntax::{ - algo::find_node_at_offset, - ast::{self, edit::IndentLevel, AstToken}, AstNode, SmolStr, SourceFile, SyntaxKind::*, SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, + algo::find_node_at_offset, + ast::{self, AstToken, edit::IndentLevel}, }; use ide_db::text_edit::TextEdit; @@ -51,7 +50,9 @@ use ide_db::text_edit::TextEdit; // // ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif) pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option { - let parse = db.parse(EditionedFileId::current_edition(position.file_id)); + let editioned_file_id_wrapper = + ide_db::base_db::EditionedFileId::current_edition(db, position.file_id); + let parse = db.parse(editioned_file_id_wrapper); let file = parse.tree(); let token = file.syntax().token_at_offset(position.offset).left_biased()?; diff --git a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs index eb6eb7da1e90a..7985279679c43 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs @@ -1,9 +1,11 @@ use dot::{Id, LabelText}; +use ide_db::base_db::salsa::plumbing::AsId; use ide_db::{ - base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceRootDatabase}, - FxHashSet, RootDatabase, + FxHashMap, RootDatabase, + base_db::{ + BuiltCrateData, BuiltDependency, Crate, ExtraCrateData, RootQueryDb, SourceDatabase, + }, }; -use triomphe::Arc; // Feature: View Crate Graph // @@ -16,76 +18,81 @@ use triomphe::Arc; // |---------|-------------| // | VS Code | **rust-analyzer: View Crate Graph** | pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result { - let crate_graph = db.crate_graph(); - let crates_to_render = crate_graph + let all_crates = db.all_crates(); + let crates_to_render = all_crates .iter() - .filter(|krate| { + .copied() + .map(|krate| (krate, (krate.data(db), krate.extra_data(db)))) + .filter(|(_, (crate_data, _))| { if full { true } else { // Only render workspace crates - let root_id = db.file_source_root(crate_graph[*krate].root_file_id); - !db.source_root(root_id).is_library + let root_id = db.file_source_root(crate_data.root_file_id).source_root_id(db); + !db.source_root(root_id).source_root(db).is_library } }) .collect(); - let graph = DotCrateGraph { graph: crate_graph, crates_to_render }; + let graph = DotCrateGraph { crates_to_render }; let mut dot = Vec::new(); dot::render(&graph, &mut dot).unwrap(); Ok(String::from_utf8(dot).unwrap()) } -struct DotCrateGraph { - graph: Arc, - crates_to_render: FxHashSet, +struct DotCrateGraph<'db> { + crates_to_render: FxHashMap, } -type Edge<'a> = (CrateId, &'a Dependency); +type Edge<'a> = (Crate, &'a BuiltDependency); -impl<'a> dot::GraphWalk<'a, CrateId, Edge<'a>> for DotCrateGraph { - fn nodes(&'a self) -> dot::Nodes<'a, CrateId> { - self.crates_to_render.iter().copied().collect() +impl<'a> dot::GraphWalk<'a, Crate, Edge<'a>> for DotCrateGraph<'_> { + fn nodes(&'a self) -> dot::Nodes<'a, Crate> { + self.crates_to_render.keys().copied().collect() } fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { self.crates_to_render .iter() - .flat_map(|krate| { - self.graph[*krate] + .flat_map(|(krate, (crate_data, _))| { + crate_data .dependencies .iter() - .filter(|dep| self.crates_to_render.contains(&dep.crate_id)) + .filter(|dep| self.crates_to_render.contains_key(&dep.crate_id)) .map(move |dep| (*krate, dep)) }) .collect() } - fn source(&'a self, edge: &Edge<'a>) -> CrateId { + fn source(&'a self, edge: &Edge<'a>) -> Crate { edge.0 } - fn target(&'a self, edge: &Edge<'a>) -> CrateId { + fn target(&'a self, edge: &Edge<'a>) -> Crate { edge.1.crate_id } } -impl<'a> dot::Labeller<'a, CrateId, Edge<'a>> for DotCrateGraph { +impl<'a> dot::Labeller<'a, Crate, Edge<'a>> for DotCrateGraph<'_> { fn graph_id(&'a self) -> Id<'a> { Id::new("rust_analyzer_crate_graph").unwrap() } - fn node_id(&'a self, n: &CrateId) -> Id<'a> { - Id::new(format!("_{}", u32::from(n.into_raw()))).unwrap() + fn node_id(&'a self, n: &Crate) -> Id<'a> { + let id = n.as_id().as_u32(); + Id::new(format!("_{id:?}")).unwrap() } - fn node_shape(&'a self, _node: &CrateId) -> Option> { + fn node_shape(&'a self, _node: &Crate) -> Option> { Some(LabelText::LabelStr("box".into())) } - fn node_label(&'a self, n: &CrateId) -> LabelText<'a> { - let name = - self.graph[*n].display_name.as_ref().map_or("(unnamed crate)", |name| name.as_str()); + fn node_label(&'a self, n: &Crate) -> LabelText<'a> { + let name = self.crates_to_render[n] + .1 + .display_name + .as_ref() + .map_or("(unnamed crate)", |name| name.as_str()); LabelText::LabelStr(name.into()) } } diff --git a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs index bfdf9d0f3374e..ec5e993f5a67d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs @@ -1,6 +1,6 @@ -use hir::{DefWithBody, Semantics}; +use hir::Semantics; use ide_db::{FilePosition, RootDatabase}; -use syntax::{algo::ancestors_at_offset, ast, AstNode}; +use syntax::AstNode; // Feature: View Hir // @@ -10,21 +10,10 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode}; // // ![View Hir](https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif) pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String { - body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_owned()) -} - -fn body_hir(db: &RootDatabase, position: FilePosition) -> Option { - let sema = Semantics::new(db); - let source_file = sema.parse_guess_edition(position.file_id); - - let item = ancestors_at_offset(source_file.syntax(), position.offset) - .filter(|it| !ast::MacroCall::can_cast(it.kind())) - .find_map(ast::Item::cast)?; - let def: DefWithBody = match item { - ast::Item::Fn(it) => sema.to_def(&it)?.into(), - ast::Item::Const(it) => sema.to_def(&it)?.into(), - ast::Item::Static(it) => sema.to_def(&it)?.into(), - _ => return None, - }; - Some(def.debug_hir(db)) + (|| { + let sema = Semantics::new(db); + let source_file = sema.parse_guess_edition(position.file_id); + sema.debug_hir_at(source_file.syntax().token_at_offset(position.offset).next()?) + })() + .unwrap_or_else(|| "Not inside a lowerable item".to_owned()) } diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs index 67c241cbb9153..2cd751463bdb8 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs @@ -1,6 +1,5 @@ -use hir::{db::DefDatabase, Semantics}; +use hir::{EditionedFileId, Semantics, db::DefDatabase}; use ide_db::{FileId, RootDatabase}; -use span::EditionedFileId; // Feature: Debug ItemTree // @@ -13,6 +12,6 @@ pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String { let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); - db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition()) + .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition(db)) } diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs index 34bca7bce12cf..140ae4265be7d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs @@ -2,9 +2,9 @@ use std::fmt; use hir::{DisplayTarget, Field, HirDisplay, Layout, Semantics, Type}; use ide_db::{ + RootDatabase, defs::Definition, helpers::{get_definition, pick_best_token}, - RootDatabase, }; use syntax::{AstNode, SyntaxKind}; @@ -83,7 +83,7 @@ pub(crate) fn view_memory_layout( ) -> Option { let sema = Semantics::new(db); let file = sema.parse_guess_edition(position.file_id); - let display_target = sema.first_crate_or_default(position.file_id).to_display_target(db); + let display_target = sema.first_crate(position.file_id)?.to_display_target(db); let token = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind { SyntaxKind::IDENT => 3, diff --git a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs index aa4ff64a819e1..6ca231c7a81a6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs @@ -1,6 +1,6 @@ use hir::{DefWithBody, Semantics}; use ide_db::{FilePosition, RootDatabase}; -use syntax::{algo::ancestors_at_offset, ast, AstNode}; +use syntax::{AstNode, algo::ancestors_at_offset, ast}; // Feature: View Mir // diff --git a/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs index 407720864bfdb..ecd93e8b28190 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_syntax_tree.rs @@ -1,13 +1,13 @@ use hir::Semantics; use ide_db::{ - line_index::{LineCol, LineIndex}, FileId, LineIndexDatabase, RootDatabase, + line_index::{LineCol, LineIndex}, }; use span::{TextRange, TextSize}; use stdx::format_to; use syntax::{ - ast::{self, IsString}, AstNode, AstToken, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken, WalkEvent, + ast::{self, IsString}, }; use triomphe::Arc; diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml index 397eba0929673..9ff656cb744e4 100644 --- a/src/tools/rust-analyzer/crates/intern/Cargo.toml +++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml @@ -13,7 +13,6 @@ rust-version.workspace = true [dependencies] -# We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap.workspace = true hashbrown.workspace = true rustc-hash.workspace = true diff --git a/src/tools/rust-analyzer/crates/intern/src/lib.rs b/src/tools/rust-analyzer/crates/intern/src/lib.rs index 58327419f6314..398d224c07ad2 100644 --- a/src/tools/rust-analyzer/crates/intern/src/lib.rs +++ b/src/tools/rust-analyzer/crates/intern/src/lib.rs @@ -3,79 +3,84 @@ //! Eventually this should probably be replaced with salsa-based interning. use std::{ + borrow::Borrow, fmt::{self, Debug, Display}, - hash::{BuildHasherDefault, Hash, Hasher}, + hash::{BuildHasher, BuildHasherDefault, Hash, Hasher}, ops::Deref, sync::OnceLock, }; use dashmap::{DashMap, SharedValue}; -use hashbrown::{hash_map::RawEntryMut, HashMap}; +use hashbrown::raw::RawTable; use rustc_hash::FxHasher; use triomphe::Arc; type InternMap = DashMap, (), BuildHasherDefault>; -type Guard = dashmap::RwLockWriteGuard< - 'static, - HashMap, SharedValue<()>, BuildHasherDefault>, ->; +type Guard = dashmap::RwLockWriteGuard<'static, RawTable<(Arc, SharedValue<()>)>>; mod symbol; -pub use self::symbol::{symbols as sym, Symbol}; +pub use self::symbol::{Symbol, symbols as sym}; pub struct Interned { arc: Arc, } impl Interned { + #[inline] pub fn new(obj: T) -> Self { - let (mut shard, hash) = Self::select(&obj); - // Atomically, - // - check if `obj` is already in the map - // - if so, clone its `Arc` and return it - // - if not, box it up, insert it, and return a clone - // This needs to be atomic (locking the shard) to avoid races with other thread, which could - // insert the same object between us looking it up and inserting it. - match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &obj) { - RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() }, - RawEntryMut::Vacant(vac) => Self { - arc: vac.insert_hashed_nocheck(hash, Arc::new(obj), SharedValue::new(())).0.clone(), - }, - } + Self::new_generic(obj) } } impl Interned { + #[inline] pub fn new_str(s: &str) -> Self { - let (mut shard, hash) = Self::select(s); + Self::new_generic(s) + } +} + +impl Interned { + #[inline] + pub fn new_generic(obj: U) -> Self + where + U: Borrow, + Arc: From, + { + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, obj.borrow()); // Atomically, // - check if `obj` is already in the map // - if so, clone its `Arc` and return it // - if not, box it up, insert it, and return a clone // This needs to be atomic (locking the shard) to avoid races with other thread, which could // insert the same object between us looking it up and inserting it. - match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) { - RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() }, - RawEntryMut::Vacant(vac) => Self { - arc: vac.insert_hashed_nocheck(hash, Arc::from(s), SharedValue::new(())).0.clone(), + let bucket = match shard.find_or_find_insert_slot( + hash, + |(other, _)| **other == *obj.borrow(), + |(x, _)| Self::hash(storage, x), + ) { + Ok(bucket) => bucket, + // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then. + Err(insert_slot) => unsafe { + shard.insert_in_slot(hash, insert_slot, (Arc::from(obj), SharedValue::new(()))) }, - } + }; + // SAFETY: We just retrieved/inserted this bucket. + unsafe { Self { arc: bucket.as_ref().0.clone() } } } -} -impl Interned { #[inline] - fn select(obj: &T) -> (Guard, u64) { - let storage = T::storage().get(); - let hash = { - let mut hasher = std::hash::BuildHasher::build_hasher(storage.hasher()); - obj.hash(&mut hasher); - hasher.finish() - }; + fn select(storage: &'static InternMap, obj: &T) -> (Guard, u64) { + let hash = Self::hash(storage, obj); let shard_idx = storage.determine_shard(hash as usize); let shard = &storage.shards()[shard_idx]; (shard.write(), hash) } + + #[inline] + fn hash(storage: &'static InternMap, obj: &T) -> u64 { + storage.hasher().hash_one(obj) + } } impl Drop for Interned { @@ -93,21 +98,20 @@ impl Drop for Interned { impl Interned { #[cold] fn drop_slow(&mut self) { - let (mut shard, hash) = Self::select(&self.arc); + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, &self.arc); if Arc::count(&self.arc) != 2 { // Another thread has interned another copy return; } - match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &self.arc) { - RawEntryMut::Occupied(occ) => occ.remove(), - RawEntryMut::Vacant(_) => unreachable!(), - }; + shard.remove_entry(hash, |(other, _)| **other == *self.arc); // Shrink the backing storage if the shard is less than 50% occupied. if shard.len() * 2 < shard.capacity() { - shard.shrink_to_fit(); + let len = shard.len(); + shard.shrink_to(len, |(x, _)| Self::hash(storage, x)); } } } @@ -177,7 +181,10 @@ pub struct InternStorage { map: OnceLock>, } -#[allow(clippy::new_without_default)] // this a const fn, so it can't be default +#[allow( + clippy::new_without_default, + reason = "this a const fn, so it can't be default yet. See " +)] impl InternStorage { pub const fn new() -> Self { Self { map: OnceLock::new() } diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol.rs b/src/tools/rust-analyzer/crates/intern/src/symbol.rs index 0fa6701ca3fa3..8b2d6e8717d23 100644 --- a/src/tools/rust-analyzer/crates/intern/src/symbol.rs +++ b/src/tools/rust-analyzer/crates/intern/src/symbol.rs @@ -2,16 +2,15 @@ //! supporting compile time declaration of symbols that will never be freed. use std::{ - borrow::Borrow, fmt, - hash::{BuildHasherDefault, Hash, Hasher}, + hash::{BuildHasher, BuildHasherDefault, Hash}, mem::{self, ManuallyDrop}, ptr::NonNull, sync::OnceLock, }; use dashmap::{DashMap, SharedValue}; -use hashbrown::{hash_map::RawEntryMut, HashMap}; +use hashbrown::raw::RawTable; use rustc_hash::FxHasher; use triomphe::Arc; @@ -127,91 +126,95 @@ impl fmt::Debug for Symbol { const _: () = assert!(size_of::() == size_of::>()); const _: () = assert!(align_of::() == align_of::>()); -static MAP: OnceLock>> = OnceLock::new(); +type Map = DashMap>; +static MAP: OnceLock = OnceLock::new(); impl Symbol { pub fn intern(s: &str) -> Self { - let (mut shard, hash) = Self::select_shard(s); + let storage = MAP.get_or_init(symbols::prefill); + let (mut shard, hash) = Self::select_shard(storage, s); // Atomically, // - check if `obj` is already in the map // - if so, copy out its entry, conditionally bumping the backing Arc and return it // - if not, put it into a box and then into an Arc, insert it, bump the ref-count and return the copy // This needs to be atomic (locking the shard) to avoid races with other thread, which could // insert the same object between us looking it up and inserting it. - match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) { - RawEntryMut::Occupied(occ) => Self { repr: increase_arc_refcount(occ.key().0) }, - RawEntryMut::Vacant(vac) => Self { - repr: increase_arc_refcount( - vac.insert_hashed_nocheck( - hash, - SymbolProxy(TaggedArcPtr::arc(Arc::new(Box::::from(s)))), + let bucket = match shard.find_or_find_insert_slot( + hash, + |(other, _)| other.as_str() == s, + |(x, _)| Self::hash(storage, x.as_str()), + ) { + Ok(bucket) => bucket, + // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then. + Err(insert_slot) => unsafe { + shard.insert_in_slot( + hash, + insert_slot, + ( + Symbol { repr: TaggedArcPtr::arc(Arc::new(Box::::from(s))) }, SharedValue::new(()), - ) - .0 - .0, - ), + ), + ) }, - } + }; + // SAFETY: We just retrieved/inserted this bucket. + unsafe { bucket.as_ref().0.clone() } } pub fn integer(i: usize) -> Self { match i { - 0 => symbols::INTEGER_0.clone(), - 1 => symbols::INTEGER_1.clone(), - 2 => symbols::INTEGER_2.clone(), - 3 => symbols::INTEGER_3.clone(), - 4 => symbols::INTEGER_4.clone(), - 5 => symbols::INTEGER_5.clone(), - 6 => symbols::INTEGER_6.clone(), - 7 => symbols::INTEGER_7.clone(), - 8 => symbols::INTEGER_8.clone(), - 9 => symbols::INTEGER_9.clone(), - 10 => symbols::INTEGER_10.clone(), - 11 => symbols::INTEGER_11.clone(), - 12 => symbols::INTEGER_12.clone(), - 13 => symbols::INTEGER_13.clone(), - 14 => symbols::INTEGER_14.clone(), - 15 => symbols::INTEGER_15.clone(), + 0 => symbols::INTEGER_0, + 1 => symbols::INTEGER_1, + 2 => symbols::INTEGER_2, + 3 => symbols::INTEGER_3, + 4 => symbols::INTEGER_4, + 5 => symbols::INTEGER_5, + 6 => symbols::INTEGER_6, + 7 => symbols::INTEGER_7, + 8 => symbols::INTEGER_8, + 9 => symbols::INTEGER_9, + 10 => symbols::INTEGER_10, + 11 => symbols::INTEGER_11, + 12 => symbols::INTEGER_12, + 13 => symbols::INTEGER_13, + 14 => symbols::INTEGER_14, + 15 => symbols::INTEGER_15, i => Symbol::intern(&format!("{i}")), } } pub fn empty() -> Self { - symbols::__empty.clone() + symbols::__empty } + #[inline] pub fn as_str(&self) -> &str { self.repr.as_str() } #[inline] fn select_shard( + storage: &'static Map, s: &str, - ) -> ( - dashmap::RwLockWriteGuard< - 'static, - HashMap, BuildHasherDefault>, - >, - u64, - ) { - let storage = MAP.get_or_init(symbols::prefill); - let hash = { - let mut hasher = std::hash::BuildHasher::build_hasher(storage.hasher()); - s.hash(&mut hasher); - hasher.finish() - }; + ) -> (dashmap::RwLockWriteGuard<'static, RawTable<(Symbol, SharedValue<()>)>>, u64) { + let hash = Self::hash(storage, s); let shard_idx = storage.determine_shard(hash as usize); let shard = &storage.shards()[shard_idx]; (shard.write(), hash) } + #[inline] + fn hash(storage: &'static Map, s: &str) -> u64 { + storage.hasher().hash_one(s) + } + #[cold] fn drop_slow(arc: &Arc>) { - let (mut shard, hash) = Self::select_shard(arc); + let storage = MAP.get_or_init(symbols::prefill); + let (mut shard, hash) = Self::select_shard(storage, arc); match Arc::count(arc) { - 0 => unreachable!(), - 1 => unreachable!(), + 0 | 1 => unreachable!(), 2 => (), _ => { // Another thread has interned another copy @@ -219,19 +222,17 @@ impl Symbol { } } - let ptr = match shard.raw_entry_mut().from_key_hashed_nocheck::(hash, arc.as_ref()) { - RawEntryMut::Occupied(occ) => occ.remove_entry(), - RawEntryMut::Vacant(_) => unreachable!(), - } - .0 - .0; + let s = &***arc; + let (ptr, _) = shard.remove_entry(hash, |(x, _)| x.as_str() == s).unwrap(); + let ptr = ManuallyDrop::new(ptr); // SAFETY: We're dropping, we have ownership. - ManuallyDrop::into_inner(unsafe { ptr.try_as_arc_owned().unwrap() }); + ManuallyDrop::into_inner(unsafe { ptr.repr.try_as_arc_owned().unwrap() }); debug_assert_eq!(Arc::count(arc), 1); // Shrink the backing storage if the shard is less than 50% occupied. if shard.len() * 2 < shard.capacity() { - shard.shrink_to_fit(); + let len = shard.len(); + shard.shrink_to(len, |(x, _)| Self::hash(storage, x.as_str())); } } } @@ -276,22 +277,6 @@ impl fmt::Display for Symbol { } } -// only exists so we can use `from_key_hashed_nocheck` with a &str -#[derive(Debug, PartialEq, Eq)] -struct SymbolProxy(TaggedArcPtr); - -impl Hash for SymbolProxy { - fn hash(&self, state: &mut H) { - self.0.as_str().hash(state); - } -} - -impl Borrow for SymbolProxy { - fn borrow(&self) -> &str { - self.0.as_str() - } -} - #[cfg(test)] mod tests { use super::*; diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs index 6b77c72cee897..abde48d151271 100644 --- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs +++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs @@ -1,72 +1,47 @@ //! Module defining all known symbols required by the rest of rust-analyzer. #![allow(non_upper_case_globals)] -use std::hash::{BuildHasherDefault, Hash as _, Hasher as _}; +use std::hash::{BuildHasher, BuildHasherDefault}; use dashmap::{DashMap, SharedValue}; use rustc_hash::FxHasher; -use crate::{ - symbol::{SymbolProxy, TaggedArcPtr}, - Symbol, -}; +use crate::{Symbol, symbol::TaggedArcPtr}; macro_rules! define_symbols { (@WITH_NAME: $($alias:ident = $value:literal,)* @PLAIN: $($name:ident,)*) => { - // We define symbols as both `const`s and `static`s because some const code requires const symbols, - // but code from before the transition relies on the lifetime of the predefined symbols and making them - // `const`s make it error (because now they're temporaries). In the future we probably should only - // use consts. - - /// Predefined symbols as `const`s (instead of the default `static`s). - pub mod consts { - use super::{Symbol, TaggedArcPtr}; - - // The strings should be in `static`s so that symbol equality holds. - $( - pub const $name: Symbol = { - static SYMBOL_STR: &str = stringify!($name); - Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) } - }; - )* - $( - pub const $alias: Symbol = { - static SYMBOL_STR: &str = $value; - Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) } - }; - )* - } - + // The strings should be in `static`s so that symbol equality holds. $( - pub static $name: Symbol = consts::$name; + pub const $name: Symbol = { + static SYMBOL_STR: &str = stringify!($name); + Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) } + }; )* $( - pub static $alias: Symbol = consts::$alias; + pub const $alias: Symbol = { + static SYMBOL_STR: &str = $value; + Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) } + }; )* - pub(super) fn prefill() -> DashMap> { - let mut dashmap_ = >>::with_hasher(BuildHasherDefault::default()); + pub(super) fn prefill() -> DashMap> { + let mut dashmap_ = >>::with_hasher(BuildHasherDefault::default()); - let hash_thing_ = |hasher_: &BuildHasherDefault, it_: &SymbolProxy| { - let mut hasher_ = std::hash::BuildHasher::build_hasher(hasher_); - it_.hash(&mut hasher_); - hasher_.finish() - }; + let hasher_ = dashmap_.hasher().clone(); + let hash_one = |it_: &str| hasher_.hash_one(it_); { $( - - let proxy_ = SymbolProxy($name.repr); - let hash_ = hash_thing_(dashmap_.hasher(), &proxy_); + let s = stringify!($name); + let hash_ = hash_one(s); let shard_idx_ = dashmap_.determine_shard(hash_ as usize); - dashmap_.shards_mut()[shard_idx_].get_mut().raw_entry_mut().from_hash(hash_, |k| k == &proxy_).insert(proxy_, SharedValue::new(())); + dashmap_.shards_mut()[shard_idx_].get_mut().insert(hash_, ($name, SharedValue::new(())), |(x, _)| hash_one(x.as_str())); )* $( - - let proxy_ = SymbolProxy($alias.repr); - let hash_ = hash_thing_(dashmap_.hasher(), &proxy_); + let s = $value; + let hash_ = hash_one(s); let shard_idx_ = dashmap_.determine_shard(hash_ as usize); - dashmap_.shards_mut()[shard_idx_].get_mut().raw_entry_mut().from_hash(hash_, |k| k == &proxy_).insert(proxy_, SharedValue::new(())); + dashmap_.shards_mut()[shard_idx_].get_mut().insert(hash_, ($alias, SharedValue::new(())), |(x, _)| hash_one(x.as_str())); )* } dashmap_ @@ -161,6 +136,7 @@ define_symbols! { bitxor_assign, bitxor, bool, + bootstrap, box_free, Box, boxed, @@ -511,6 +487,7 @@ define_symbols! { unreachable_2021, unreachable, unsafe_cell, + unsafe_pinned, unsize, unstable, usize, @@ -521,4 +498,12 @@ define_symbols! { win64, array, boxed_slice, + completions, + ignore_flyimport, + ignore_flyimport_methods, + ignore_methods, + position, + flags, + precision, + width, } diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml index 23fd50a05644c..91b012e05071f 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml +++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml @@ -21,7 +21,6 @@ tracing.workspace = true hir-expand.workspace = true ide-db.workspace = true -paths.workspace = true proc-macro-api.workspace = true project-model.workspace = true span.workspace = true diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index 72ca85c6a2fde..2686a75c7c86b 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -2,25 +2,26 @@ //! for incorporating changes. // Note, don't remove any public api from this. This API is consumed by external tools // to run rust-analyzer as a library. -use std::{collections::hash_map::Entry, iter, mem, path::Path, sync}; +use std::{collections::hash_map::Entry, mem, path::Path, sync}; -use crossbeam_channel::{unbounded, Receiver}; +use crossbeam_channel::{Receiver, unbounded}; use hir_expand::proc_macro::{ ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacroLoadResult, - ProcMacros, + ProcMacrosBuilder, }; use ide_db::{ - base_db::{CrateGraph, CrateWorkspaceData, Env, SourceRoot, SourceRootId}, - prime_caches, ChangeWithProcMacros, FxHashMap, RootDatabase, + ChangeWithProcMacros, FxHashMap, RootDatabase, + base_db::{CrateGraphBuilder, Env, SourceRoot, SourceRootId}, + prime_caches, }; use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroClient}; use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace}; use span::Span; use vfs::{ + AbsPath, AbsPathBuf, VfsPath, file_set::FileSetConfig, loader::{Handle, LoadingProgress}, - AbsPath, AbsPathBuf, VfsPath, }; #[derive(Debug)] @@ -65,7 +66,7 @@ pub fn load_workspace_at( pub fn load_workspace( ws: ProjectWorkspace, - extra_env: &FxHashMap, + extra_env: &FxHashMap>, load_config: &LoadCargoConfig, ) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option)> { let (sender, receiver) = unbounded(); @@ -139,7 +140,6 @@ pub fn load_workspace( }); let db = load_crate_graph( - &ws, crate_graph, proc_macros, project_folders.source_root_config, @@ -292,7 +292,7 @@ impl ProjectFolders { }; let file_set_roots = vec![VfsPath::from(ratoml_path.to_owned())]; - let entry = vfs::loader::Entry::Files(vec![ratoml_path.to_owned()]); + let entry = vfs::loader::Entry::Files(vec![ratoml_path]); res.watch.push(res.load.len()); res.load.push(entry); @@ -418,18 +418,15 @@ pub fn load_proc_macro( } fn load_crate_graph( - ws: &ProjectWorkspace, - crate_graph: CrateGraph, - proc_macros: ProcMacros, + crate_graph: CrateGraphBuilder, + proc_macros: ProcMacrosBuilder, source_root_config: SourceRootConfig, vfs: &mut vfs::Vfs, receiver: &Receiver, ) -> RootDatabase { - let ProjectWorkspace { toolchain, target_layout, .. } = ws; - let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::().ok()); let mut db = RootDatabase::new(lru_cap); - let mut analysis_change = ChangeWithProcMacros::new(); + let mut analysis_change = ChangeWithProcMacros::default(); db.enable_proc_attr_macros(); @@ -461,14 +458,7 @@ fn load_crate_graph( let source_roots = source_root_config.partition(vfs); analysis_change.set_roots(source_roots); - let ws_data = crate_graph - .iter() - .zip(iter::repeat(From::from(CrateWorkspaceData { - data_layout: target_layout.clone(), - toolchain: toolchain.clone(), - }))) - .collect(); - analysis_change.set_crate_graph(crate_graph, ws_data); + analysis_change.set_crate_graph(crate_graph); analysis_change.set_proc_macros(proc_macros); db.apply_change(analysis_change); @@ -494,7 +484,7 @@ fn expander_to_proc_macro( } } -#[derive(Debug)] +#[derive(Debug, PartialEq, Eq)] struct Expander(proc_macro_api::ProcMacro); impl ProcMacroExpander for Expander { @@ -506,7 +496,7 @@ impl ProcMacroExpander for Expander { def_site: Span, call_site: Span, mixed_site: Span, - current_dir: Option, + current_dir: String, ) -> Result, ProcMacroExpansionError> { match self.0.expand( subtree.view(), @@ -522,11 +512,15 @@ impl ProcMacroExpander for Expander { Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), } } + + fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool { + other.as_any().downcast_ref::().is_some_and(|other| self == other) + } } #[cfg(test)] mod tests { - use ide_db::base_db::SourceDatabase; + use ide_db::base_db::RootQueryDb; use vfs::file_set::FileSetConfigBuilder; use super::*; @@ -543,7 +537,7 @@ mod tests { let (db, _vfs, _proc_macro) = load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap(); - let n_crates = db.crate_graph().iter().count(); + let n_crates = db.all_crates().len(); // RA has quite a few crates, but the exact count doesn't matter assert!(n_crates > 20); } @@ -633,7 +627,7 @@ mod tests { let fsc = builder.build(); let src = SourceRootConfig { fsc, local_filesets: vec![0, 1, 2, 3] }; let mut vc = src.source_root_parent_map().into_iter().collect::>(); - vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0)); + vc.sort_by(|x, y| x.0.0.cmp(&y.0.0)); assert_eq!(vc, vec![(SourceRootId(2), SourceRootId(1)), (SourceRootId(3), SourceRootId(1))]) } @@ -648,7 +642,7 @@ mod tests { let fsc = builder.build(); let src = SourceRootConfig { fsc, local_filesets: vec![0, 1, 3] }; let mut vc = src.source_root_parent_map().into_iter().collect::>(); - vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0)); + vc.sort_by(|x, y| x.0.0.cmp(&y.0.0)); assert_eq!(vc, vec![(SourceRootId(3), SourceRootId(1)),]) } @@ -663,7 +657,7 @@ mod tests { let fsc = builder.build(); let src = SourceRootConfig { fsc, local_filesets: vec![0, 1, 3] }; let mut vc = src.source_root_parent_map().into_iter().collect::>(); - vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0)); + vc.sort_by(|x, y| x.0.0.cmp(&y.0.0)); assert_eq!(vc, vec![(SourceRootId(3), SourceRootId(1)),]) } @@ -679,7 +673,7 @@ mod tests { let fsc = builder.build(); let src = SourceRootConfig { fsc, local_filesets: vec![0, 1] }; let mut vc = src.source_root_parent_map().into_iter().collect::>(); - vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0)); + vc.sort_by(|x, y| x.0.0.cmp(&y.0.0)); assert_eq!(vc, vec![(SourceRootId(1), SourceRootId(0)),]) } @@ -695,7 +689,7 @@ mod tests { let fsc = builder.build(); let src = SourceRootConfig { fsc, local_filesets: vec![0, 1] }; let mut vc = src.source_root_parent_map().into_iter().collect::>(); - vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0)); + vc.sort_by(|x, y| x.0.0.cmp(&y.0.0)); assert_eq!(vc, vec![(SourceRootId(1), SourceRootId(0)),]) } diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml index e6fbb298ebdb7..f3ab093bae08a 100644 --- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml +++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml @@ -12,15 +12,13 @@ rust-version.workspace = true [lib] [dependencies] -cov-mark = "2.0.0-pre.1" +cov-mark = "2.0.0" rustc-hash.workspace = true smallvec.workspace = true -tracing.workspace = true arrayvec.workspace = true ra-ap-rustc_lexer.workspace = true # local deps -syntax.workspace = true parser.workspace = true tt.workspace = true stdx.workspace = true @@ -31,9 +29,10 @@ syntax-bridge.workspace = true [dev-dependencies] test-utils.workspace = true expect-test.workspace = true +syntax.workspace = true [features] -in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"] +in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree"] [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs index 89c300300379c..db75dceae1cb9 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs @@ -5,18 +5,19 @@ use rustc_hash::FxHashMap; use span::{Edition, Span}; use stdx::itertools::Itertools; use syntax::{ - ast::{self, HasName}, AstNode, + ast::{self, HasName}, }; use syntax_bridge::{ - dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, - syntax_node_to_token_tree, DocCommentDesugarMode, + DocCommentDesugarMode, + dummy_test_span_utils::{DUMMY, DummyTestSpanMap}, + syntax_node_to_token_tree, }; use test_utils::{bench, bench_fixture, skip_slow_tests}; use crate::{ - parser::{MetaVarKind, Op, RepeatKind, Separator}, DeclarativeMacro, + parser::{MetaVarKind, Op, RepeatKind, Separator}, }; #[test] @@ -53,7 +54,7 @@ fn benchmark_expand_macro_rules() { .map(|(id, tt)| { let res = rules[&id].expand(&tt, |_| (), DUMMY, Edition::CURRENT); assert!(res.err.is_none()); - res.value.0 .0.len() + res.value.0.0.len() }) .sum() }; diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs index 5539a88c707d1..f910f9f9d753f 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs @@ -9,7 +9,7 @@ use intern::Symbol; use rustc_hash::FxHashMap; use span::{Edition, Span}; -use crate::{parser::MetaVarKind, ExpandError, ExpandErrorKind, ExpandResult, MatchedArmIndex}; +use crate::{ExpandError, ExpandErrorKind, ExpandResult, MatchedArmIndex, parser::MetaVarKind}; pub(crate) fn expand_rules( rules: &[crate::Rule], diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs index b7f25aa380961..940aaacb02ed5 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs @@ -61,19 +61,19 @@ use std::{rc::Rc, sync::Arc}; -use intern::{sym, Symbol}; -use smallvec::{smallvec, SmallVec}; +use intern::{Symbol, sym}; +use smallvec::{SmallVec, smallvec}; use span::{Edition, Span}; use tt::{ - iter::{TtElement, TtIter}, DelimSpan, + iter::{TtElement, TtIter}, }; use crate::{ + ExpandError, ExpandErrorKind, MetaTemplate, ValueResult, expander::{Binding, Bindings, ExpandResult, Fragment}, expect_fragment, parser::{ExprKind, MetaVarKind, Op, RepeatKind, Separator}, - ExpandError, ExpandErrorKind, MetaTemplate, ValueResult, }; impl<'a> Bindings<'a> { diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs index 7710ea7938951..ec277ba72e90e 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs @@ -1,14 +1,14 @@ //! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like //! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` -use intern::{sym, Symbol}; +use intern::{Symbol, sym}; use span::{Edition, Span}; -use tt::{iter::TtElement, Delimiter, TopSubtreeBuilder}; +use tt::{Delimiter, TopSubtreeBuilder, iter::TtElement}; use crate::{ + ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate, expander::{Binding, Bindings, Fragment}, parser::{ConcatMetaVarExprElem, MetaVarKind, Op, RepeatKind, Separator}, - ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate, }; impl<'t> Bindings<'t> { @@ -80,7 +80,7 @@ impl<'t> Bindings<'t> { | MetaVarKind::Expr(_) | MetaVarKind::Ident => { builder.push(tt::Leaf::Ident(tt::Ident { - sym: sym::missing.clone(), + sym: sym::missing, span, is_raw: tt::IdentIsRaw::No, })); @@ -93,7 +93,7 @@ impl<'t> Bindings<'t> { spacing: tt::Spacing::Joint, }), tt::Leaf::Ident(tt::Ident { - sym: sym::missing.clone(), + sym: sym::missing, span, is_raw: tt::IdentIsRaw::No, }), @@ -101,7 +101,7 @@ impl<'t> Bindings<'t> { } MetaVarKind::Literal => { builder.push(tt::Leaf::Ident(tt::Ident { - sym: sym::missing.clone(), + sym: sym::missing, span, is_raw: tt::IdentIsRaw::No, })); @@ -210,8 +210,11 @@ fn expand_subtree( } Op::Ignore { name, id } => { // Expand the variable, but ignore the result. This registers the repetition count. - // FIXME: Any emitted errors are dropped. - let _ = ctx.bindings.get_fragment(name, *id, &mut ctx.nesting, marker); + let e = ctx.bindings.get_fragment(name, *id, &mut ctx.nesting, marker).err(); + // FIXME: The error gets dropped if there were any previous errors. + // This should be reworked in a way where the errors can be combined + // and reported rather than storing the first error encountered. + err = err.or(e); } Op::Index { depth } => { let index = @@ -239,9 +242,7 @@ fn expand_subtree( let mut binding = match ctx.bindings.get(name, ctx.call_site) { Ok(b) => b, Err(e) => { - if err.is_none() { - err = Some(e); - } + err = err.or(Some(e)); continue; } }; @@ -331,7 +332,10 @@ fn expand_subtree( } _ => { if err.is_none() { - err = Some(ExpandError::binding_error(var.span, "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`")) + err = Some(ExpandError::binding_error( + var.span, + "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`", + )) } continue; } @@ -386,8 +390,13 @@ fn expand_var( match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) { Ok(fragment) => { match fragment { - Fragment::Tokens(tt) => builder.extend_with_tt(tt.strip_invisible()), - Fragment::TokensOwned(tt) => builder.extend_with_tt(tt.view().strip_invisible()), + // rustc spacing is not like ours. Ours is like proc macros', it dictates how puncts will actually be joined. + // rustc uses them mostly for pretty printing. So we have to deviate a bit from what rustc does here. + // Basically, a metavariable can never be joined with whatever after it. + Fragment::Tokens(tt) => builder.extend_with_tt_alone(tt.strip_invisible()), + Fragment::TokensOwned(tt) => { + builder.extend_with_tt_alone(tt.view().strip_invisible()) + } Fragment::Expr(sub) => { let sub = sub.strip_invisible(); let mut span = id; @@ -399,7 +408,7 @@ fn expand_var( if wrap_in_parens { builder.open(tt::DelimiterKind::Parenthesis, span); } - builder.extend_with_tt(sub); + builder.extend_with_tt_alone(sub); if wrap_in_parens { builder.close(span); } diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs index bebd29ef74700..9f9fa36abd46a 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs @@ -21,10 +21,10 @@ mod benchmark; #[cfg(test)] mod tests; -use span::{Edition, Span, SyntaxContextId}; +use span::{Edition, Span, SyntaxContext}; use syntax_bridge::to_parser_input; -use tt::iter::TtIter; use tt::DelimSpan; +use tt::iter::TtIter; use std::fmt; use std::sync::Arc; @@ -149,7 +149,7 @@ impl DeclarativeMacro { /// The old, `macro_rules! m {}` flavor. pub fn parse_macro_rules( tt: &tt::TopSubtree, - ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition, + ctx_edition: impl Copy + Fn(SyntaxContext) -> Edition, ) -> DeclarativeMacro { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing @@ -189,7 +189,7 @@ impl DeclarativeMacro { pub fn parse_macro2( args: Option<&tt::TopSubtree>, body: &tt::TopSubtree, - ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition, + ctx_edition: impl Copy + Fn(SyntaxContext) -> Edition, ) -> DeclarativeMacro { let mut rules = Vec::new(); let mut err = None; @@ -262,7 +262,7 @@ impl DeclarativeMacro { impl Rule { fn parse( - edition: impl Copy + Fn(SyntaxContextId) -> Edition, + edition: impl Copy + Fn(SyntaxContext) -> Edition, src: &mut TtIter<'_, Span>, ) -> Result { let (_, lhs) = diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs index 0a670053c9882..fbc353d610348 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/parser.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs @@ -4,9 +4,12 @@ use std::sync::Arc; use arrayvec::ArrayVec; -use intern::{sym, Symbol}; -use span::{Edition, Span, SyntaxContextId}; -use tt::iter::{TtElement, TtIter}; +use intern::{Symbol, sym}; +use span::{Edition, Span, SyntaxContext}; +use tt::{ + MAX_GLUED_PUNCT_LEN, + iter::{TtElement, TtIter}, +}; use crate::ParseError; @@ -28,14 +31,14 @@ pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>); impl MetaTemplate { pub(crate) fn parse_pattern( - edition: impl Copy + Fn(SyntaxContextId) -> Edition, + edition: impl Copy + Fn(SyntaxContext) -> Edition, pattern: TtIter<'_, Span>, ) -> Result { MetaTemplate::parse(edition, pattern, Mode::Pattern) } pub(crate) fn parse_template( - edition: impl Copy + Fn(SyntaxContextId) -> Edition, + edition: impl Copy + Fn(SyntaxContext) -> Edition, template: TtIter<'_, Span>, ) -> Result { MetaTemplate::parse(edition, template, Mode::Template) @@ -46,7 +49,7 @@ impl MetaTemplate { } fn parse( - edition: impl Copy + Fn(SyntaxContextId) -> Edition, + edition: impl Copy + Fn(SyntaxContext) -> Edition, mut src: TtIter<'_, Span>, mode: Mode, ) -> Result { @@ -96,7 +99,7 @@ pub(crate) enum Op { delimiter: tt::Delimiter, }, Literal(tt::Literal), - Punct(Box, 3>>), + Punct(Box, MAX_GLUED_PUNCT_LEN>>), Ident(tt::Ident), } @@ -151,7 +154,7 @@ pub(crate) enum MetaVarKind { pub(crate) enum Separator { Literal(tt::Literal), Ident(tt::Ident), - Puncts(ArrayVec, 3>), + Puncts(ArrayVec, MAX_GLUED_PUNCT_LEN>), } // Note that when we compare a Separator, we just care about its textual value. @@ -179,7 +182,7 @@ enum Mode { } fn next_op( - edition: impl Copy + Fn(SyntaxContextId) -> Edition, + edition: impl Copy + Fn(SyntaxContext) -> Edition, first_peeked: TtElement<'_, Span>, src: &mut TtIter<'_, Span>, mode: Mode, @@ -194,7 +197,7 @@ fn next_op( let mut res = ArrayVec::new(); res.push(*p); Box::new(res) - })) + })); } Some(it) => it, }; @@ -212,20 +215,20 @@ fn next_op( Mode::Pattern => { return Err(ParseError::unexpected( "`${}` metavariable expressions are not allowed in matchers", - )) + )); } }, _ => { return Err(ParseError::expected( "expected `$()` repetition or `${}` expression", - )) + )); } }, TtElement::Leaf(leaf) => match leaf { tt::Leaf::Ident(ident) if ident.sym == sym::crate_ => { // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. Op::Ident(tt::Ident { - sym: sym::dollar_crate.clone(), + sym: sym::dollar_crate, span: ident.span, is_raw: tt::IdentIsRaw::No, }) @@ -246,7 +249,7 @@ fn next_op( Mode::Pattern => { return Err(ParseError::unexpected( "`$$` is not allowed on the pattern side", - )) + )); } Mode::Template => Op::Punct({ let mut res = ArrayVec::new(); @@ -255,7 +258,7 @@ fn next_op( }), }, tt::Leaf::Punct(_) | tt::Leaf::Literal(_) => { - return Err(ParseError::expected("expected ident")) + return Err(ParseError::expected("expected ident")); } }, } @@ -287,7 +290,7 @@ fn next_op( } fn eat_fragment_kind( - edition: impl Copy + Fn(SyntaxContextId) -> Edition, + edition: impl Copy + Fn(SyntaxContext) -> Edition, src: &mut TtIter<'_, Span>, mode: Mode, ) -> Result, ParseError> { @@ -348,7 +351,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option, Repeat }; match tt { tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => { - return Err(ParseError::InvalidRepeat) + return Err(ParseError::InvalidRepeat); } tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()), tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()), diff --git a/src/tools/rust-analyzer/crates/mbe/src/tests.rs b/src/tools/rust-analyzer/crates/mbe/src/tests.rs index 4a73b6fa05a88..a5672e4e0504b 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/tests.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/tests.rs @@ -3,7 +3,7 @@ // FIXME: Move more of the nameres independent tests from // crates\hir-def\src\macro_expansion_tests\mod.rs to this use expect_test::expect; -use span::{Edition, EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; +use span::{Edition, EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContext}; use stdx::format_to; use tt::{TextRange, TextSize}; @@ -26,7 +26,7 @@ fn check_( file_id: EditionedFileId::new(FileId::from_raw(0), def_edition), ast_id: ErasedFileAstId::from_raw(0), }, - SyntaxContextId::root(Edition::CURRENT), + SyntaxContext::root(Edition::CURRENT), decl, ) .unwrap(); @@ -42,7 +42,7 @@ fn check_( let arg_tt = syntax_bridge::parse_to_token_tree( call_edition, call_anchor, - SyntaxContextId::root(Edition::CURRENT), + SyntaxContext::root(Edition::CURRENT), arg, ) .unwrap(); @@ -52,7 +52,7 @@ fn check_( Span { range: TextRange::up_to(TextSize::of(arg)), anchor: call_anchor, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }, def_edition, ); @@ -109,8 +109,8 @@ fn unbalanced_brace() { "#, r#""#, expect![[r#" - SUBTREE $$ 1:0@0..0#2 1:0@0..0#2 - SUBTREE {} 0:0@9..10#2 0:0@11..12#2 + SUBTREE $$ 1:0@0..0#ROOT2024 1:0@0..0#ROOT2024 + SUBTREE {} 0:0@9..10#ROOT2024 0:0@11..12#ROOT2024 {}"#]], ); @@ -132,25 +132,25 @@ fn token_mapping_smoke_test() { struct MyTraitMap2 "#, expect![[r#" - SUBTREE $$ 1:0@0..20#2 1:0@0..20#2 - IDENT struct 0:0@34..40#2 - IDENT MyTraitMap2 1:0@8..19#2 - SUBTREE {} 0:0@48..49#2 0:0@100..101#2 - IDENT map 0:0@58..61#2 - PUNCH : [alone] 0:0@61..62#2 - PUNCH : [joint] 0:0@63..64#2 - PUNCH : [alone] 0:0@64..65#2 - IDENT std 0:0@65..68#2 - PUNCH : [joint] 0:0@68..69#2 - PUNCH : [alone] 0:0@69..70#2 - IDENT collections 0:0@70..81#2 - PUNCH : [joint] 0:0@81..82#2 - PUNCH : [alone] 0:0@82..83#2 - IDENT HashSet 0:0@83..90#2 - PUNCH < [alone] 0:0@90..91#2 - SUBTREE () 0:0@91..92#2 0:0@92..93#2 - PUNCH > [joint] 0:0@93..94#2 - PUNCH , [alone] 0:0@94..95#2 + SUBTREE $$ 1:0@0..20#ROOT2024 1:0@0..20#ROOT2024 + IDENT struct 0:0@34..40#ROOT2024 + IDENT MyTraitMap2 1:0@8..19#ROOT2024 + SUBTREE {} 0:0@48..49#ROOT2024 0:0@100..101#ROOT2024 + IDENT map 0:0@58..61#ROOT2024 + PUNCH : [alone] 0:0@61..62#ROOT2024 + PUNCH : [joint] 0:0@63..64#ROOT2024 + PUNCH : [alone] 0:0@64..65#ROOT2024 + IDENT std 0:0@65..68#ROOT2024 + PUNCH : [joint] 0:0@68..69#ROOT2024 + PUNCH : [alone] 0:0@69..70#ROOT2024 + IDENT collections 0:0@70..81#ROOT2024 + PUNCH : [joint] 0:0@81..82#ROOT2024 + PUNCH : [alone] 0:0@82..83#ROOT2024 + IDENT HashSet 0:0@83..90#ROOT2024 + PUNCH < [alone] 0:0@90..91#ROOT2024 + SUBTREE () 0:0@91..92#ROOT2024 0:0@92..93#ROOT2024 + PUNCH > [joint] 0:0@93..94#ROOT2024 + PUNCH , [alone] 0:0@94..95#ROOT2024 struct MyTraitMap2 { map: ::std::collections::HashSet<()>, @@ -179,28 +179,28 @@ fn main() { } "#, expect![[r#" - SUBTREE $$ 1:0@0..63#2 1:0@0..63#2 - IDENT fn 1:0@1..3#2 - IDENT main 1:0@4..8#2 - SUBTREE () 1:0@8..9#2 1:0@9..10#2 - SUBTREE {} 1:0@11..12#2 1:0@61..62#2 - LITERAL Integer 1 1:0@17..18#2 - PUNCH ; [alone] 1:0@18..19#2 - LITERAL Float 1.0 1:0@24..27#2 - PUNCH ; [alone] 1:0@27..28#2 - SUBTREE () 1:0@33..34#2 1:0@39..40#2 - SUBTREE () 1:0@34..35#2 1:0@37..38#2 - LITERAL Integer 1 1:0@35..36#2 - PUNCH , [alone] 1:0@36..37#2 - PUNCH , [alone] 1:0@38..39#2 - PUNCH . [alone] 1:0@40..41#2 - LITERAL Float 0.0 1:0@41..44#2 - PUNCH ; [alone] 1:0@44..45#2 - IDENT let 1:0@50..53#2 - IDENT x 1:0@54..55#2 - PUNCH = [alone] 1:0@56..57#2 - LITERAL Integer 1 1:0@58..59#2 - PUNCH ; [alone] 1:0@59..60#2 + SUBTREE $$ 1:0@0..63#ROOT2024 1:0@0..63#ROOT2024 + IDENT fn 1:0@1..3#ROOT2024 + IDENT main 1:0@4..8#ROOT2024 + SUBTREE () 1:0@8..9#ROOT2024 1:0@9..10#ROOT2024 + SUBTREE {} 1:0@11..12#ROOT2024 1:0@61..62#ROOT2024 + LITERAL Integer 1 1:0@17..18#ROOT2024 + PUNCH ; [alone] 1:0@18..19#ROOT2024 + LITERAL Float 1.0 1:0@24..27#ROOT2024 + PUNCH ; [alone] 1:0@27..28#ROOT2024 + SUBTREE () 1:0@33..34#ROOT2024 1:0@39..40#ROOT2024 + SUBTREE () 1:0@34..35#ROOT2024 1:0@37..38#ROOT2024 + LITERAL Integer 1 1:0@35..36#ROOT2024 + PUNCH , [alone] 1:0@36..37#ROOT2024 + PUNCH , [alone] 1:0@38..39#ROOT2024 + PUNCH . [alone] 1:0@40..41#ROOT2024 + LITERAL Float 0.0 1:0@41..44#ROOT2024 + PUNCH ; [alone] 1:0@44..45#ROOT2024 + IDENT let 1:0@50..53#ROOT2024 + IDENT x 1:0@54..55#ROOT2024 + PUNCH = [alone] 1:0@56..57#ROOT2024 + LITERAL Integer 1 1:0@58..59#ROOT2024 + PUNCH ; [alone] 1:0@59..60#ROOT2024 fn main(){ 1; @@ -226,14 +226,14 @@ fn expr_2021() { const { 1 }, "#, expect![[r#" - SUBTREE $$ 1:0@0..25#2 1:0@0..25#2 - IDENT _ 1:0@5..6#2 - PUNCH ; [joint] 0:0@36..37#2 - SUBTREE () 0:0@34..35#2 0:0@34..35#2 - IDENT const 1:0@12..17#2 - SUBTREE {} 1:0@18..19#2 1:0@22..23#2 - LITERAL Integer 1 1:0@20..21#2 - PUNCH ; [alone] 0:0@39..40#2 + SUBTREE $$ 1:0@0..25#ROOT2024 1:0@0..25#ROOT2024 + IDENT _ 1:0@5..6#ROOT2024 + PUNCH ; [joint] 0:0@36..37#ROOT2024 + SUBTREE () 0:0@34..35#ROOT2024 0:0@34..35#ROOT2024 + IDENT const 1:0@12..17#ROOT2024 + SUBTREE {} 1:0@18..19#ROOT2024 1:0@22..23#ROOT2024 + LITERAL Integer 1 1:0@20..21#ROOT2024 + PUNCH ; [alone] 0:0@39..40#ROOT2024 _; (const { @@ -254,13 +254,13 @@ fn expr_2021() { expect![[r#" ExpandError { inner: ( - 1:0@5..6#2, + 1:0@5..6#ROOT2024, NoMatchingRule, ), } - SUBTREE $$ 1:0@0..8#2 1:0@0..8#2 - PUNCH ; [alone] 0:0@39..40#2 + SUBTREE $$ 1:0@0..8#ROOT2024 1:0@0..8#ROOT2024 + PUNCH ; [alone] 0:0@39..40#ROOT2024 ;"#]], ); @@ -278,13 +278,13 @@ fn expr_2021() { expect![[r#" ExpandError { inner: ( - 1:0@5..10#2, + 1:0@5..10#ROOT2024, NoMatchingRule, ), } - SUBTREE $$ 1:0@0..18#2 1:0@0..18#2 - PUNCH ; [alone] 0:0@39..40#2 + SUBTREE $$ 1:0@0..18#ROOT2024 1:0@0..18#ROOT2024 + PUNCH ; [alone] 0:0@39..40#ROOT2024 ;"#]], ); @@ -304,26 +304,26 @@ fn expr_2021() { break 'foo bar, "#, expect![[r#" - SUBTREE $$ 1:0@0..76#2 1:0@0..76#2 - LITERAL Integer 4 1:0@5..6#2 - PUNCH ; [joint] 0:0@41..42#2 - LITERAL Str literal 1:0@12..21#2 - PUNCH ; [joint] 0:0@41..42#2 - SUBTREE () 0:0@39..40#2 0:0@39..40#2 - IDENT funcall 1:0@27..34#2 - SUBTREE () 1:0@34..35#2 1:0@35..36#2 - PUNCH ; [joint] 0:0@41..42#2 - SUBTREE () 0:0@39..40#2 0:0@39..40#2 - IDENT future 1:0@42..48#2 - PUNCH . [alone] 1:0@48..49#2 - IDENT await 1:0@49..54#2 - PUNCH ; [joint] 0:0@41..42#2 - SUBTREE () 0:0@39..40#2 0:0@39..40#2 - IDENT break 1:0@60..65#2 - PUNCH ' [joint] 1:0@66..67#2 - IDENT foo 1:0@67..70#2 - IDENT bar 1:0@71..74#2 - PUNCH ; [alone] 0:0@44..45#2 + SUBTREE $$ 1:0@0..76#ROOT2024 1:0@0..76#ROOT2024 + LITERAL Integer 4 1:0@5..6#ROOT2024 + PUNCH ; [joint] 0:0@41..42#ROOT2024 + LITERAL Str literal 1:0@12..21#ROOT2024 + PUNCH ; [joint] 0:0@41..42#ROOT2024 + SUBTREE () 0:0@39..40#ROOT2024 0:0@39..40#ROOT2024 + IDENT funcall 1:0@27..34#ROOT2024 + SUBTREE () 1:0@34..35#ROOT2024 1:0@35..36#ROOT2024 + PUNCH ; [joint] 0:0@41..42#ROOT2024 + SUBTREE () 0:0@39..40#ROOT2024 0:0@39..40#ROOT2024 + IDENT future 1:0@42..48#ROOT2024 + PUNCH . [alone] 1:0@48..49#ROOT2024 + IDENT await 1:0@49..54#ROOT2024 + PUNCH ; [joint] 0:0@41..42#ROOT2024 + SUBTREE () 0:0@39..40#ROOT2024 0:0@39..40#ROOT2024 + IDENT break 1:0@60..65#ROOT2024 + PUNCH ' [joint] 1:0@66..67#ROOT2024 + IDENT foo 1:0@67..70#ROOT2024 + IDENT bar 1:0@71..74#ROOT2024 + PUNCH ; [alone] 0:0@44..45#ROOT2024 4; "literal"; @@ -345,13 +345,13 @@ fn expr_2021() { expect![[r#" ExpandError { inner: ( - 1:0@5..6#2, + 1:0@5..6#ROOT2024, NoMatchingRule, ), } - SUBTREE $$ 1:0@0..8#2 1:0@0..8#2 - PUNCH ; [alone] 0:0@44..45#2 + SUBTREE $$ 1:0@0..8#ROOT2024 1:0@0..8#ROOT2024 + PUNCH ; [alone] 0:0@44..45#ROOT2024 ;"#]], ); diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml index a36a39dbee6ce..c80510eedfb8a 100644 --- a/src/tools/rust-analyzer/crates/parser/Cargo.toml +++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml @@ -14,12 +14,13 @@ rust-version.workspace = true [dependencies] drop_bomb = "0.1.5" ra-ap-rustc_lexer.workspace = true +rustc-literal-escaper.workspace = true tracing = { workspace = true, optional = true } edition.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.5.1" stdx.workspace = true diff --git a/src/tools/rust-analyzer/crates/parser/src/event.rs b/src/tools/rust-analyzer/crates/parser/src/event.rs index b197b086f377a..5be9cb2a24699 100644 --- a/src/tools/rust-analyzer/crates/parser/src/event.rs +++ b/src/tools/rust-analyzer/crates/parser/src/event.rs @@ -5,8 +5,8 @@ use std::mem; use crate::{ - output::Output, SyntaxKind::{self, *}, + output::Output, }; /// `Parser` produces a flat list of `Event`s. diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs index fe6b904bd889a..8ddf50db043a6 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs @@ -39,9 +39,9 @@ mod patterns; mod types; use crate::{ - parser::{CompletedMarker, Marker, Parser}, SyntaxKind::{self, *}, - TokenSet, T, + T, TokenSet, + parser::{CompletedMarker, Marker, Parser}, }; pub(crate) mod entry { diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs index fe1316c9bfde3..0ac25da329416 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs @@ -4,8 +4,8 @@ use crate::grammar::attributes::ATTRIBUTE_FIRST; use super::*; +pub(super) use atom::{EXPR_RECOVERY_SET, LITERAL_FIRST, literal}; pub(crate) use atom::{block_expr, match_arm_list}; -pub(super) use atom::{literal, LITERAL_FIRST}; #[derive(PartialEq, Eq)] pub(super) enum Semicolon { @@ -58,7 +58,7 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) { // } attributes::outer_attrs(p); - if p.at(T![let]) { + if p.at(T![let]) || (p.at(T![super]) && p.nth_at(1, T![let])) { let_stmt(p, semicolon); m.complete(p, LET_STMT); return; @@ -113,8 +113,9 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) { } // test let_stmt -// fn f() { let x: i32 = 92; } +// fn f() { let x: i32 = 92; super let y; super::foo; } pub(super) fn let_stmt(p: &mut Parser<'_>, with_semi: Semicolon) { + p.eat(T![super]); p.bump(T![let]); patterns::pattern(p); if p.at(T![:]) { diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs index 407320e1d0825..5faf6fc2759e1 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs @@ -46,7 +46,6 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = T!['['], T![|], T![async], - T![box], T![break], T![const], T![continue], @@ -68,7 +67,8 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = LIFETIME_IDENT, ])); -pub(super) const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[T![')'], T![']']]); +pub(in crate::grammar) const EXPR_RECOVERY_SET: TokenSet = + TokenSet::new(&[T!['}'], T![')'], T![']'], T![,]]); pub(super) fn atom_expr( p: &mut Parser<'_>, @@ -258,6 +258,15 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option { p.expect(T!['(']); type_(p); p.expect(T![,]); + // Due to our incomplete handling of macro groups, especially + // those with empty delimiters, we wrap `expr` fragments in + // parentheses sometimes. Since `offset_of` is a macro, and takes + // `expr`, the field names could be wrapped in parentheses. + let wrapped_in_parens = p.eat(T!['(']); + // test offset_of_parens + // fn foo() { + // builtin#offset_of(Foo, (bar.baz.0)); + // } while !p.at(EOF) && !p.at(T![')']) { name_ref_mod_path_or_index(p); if !p.at(T![')']) { @@ -265,6 +274,9 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option { } } p.expect(T![')']); + if wrapped_in_parens { + p.expect(T![')']); + } Some(m.complete(p, OFFSET_OF_EXPR)) } else if p.at_contextual_kw(T![format_args]) { p.bump_remap(T![format_args]); diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs index 0ac11371c5436..b9f4866574a6c 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs @@ -32,6 +32,9 @@ pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[ T![impl], T![trait], T![const], + T![async], + T![unsafe], + T![extern], T![static], T![let], T![mod], @@ -132,7 +135,7 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker, is_in_extern: bool) -> Res has_mods = true; } - // test_err gen_fn + // test_err gen_fn 2021 // gen fn gen_fn() {} // async gen fn async_gen_fn() {} if p.at(T![gen]) && p.nth(1) == T![fn] { diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs index 9a16c9db6daf1..a37569614028a 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/adt.rs @@ -107,7 +107,7 @@ pub(crate) fn variant_list(p: &mut Parser<'_>) { } // test record_field_list -// struct S { a: i32, b: f32 } +// struct S { a: i32, b: f32, unsafe c: u8 } pub(crate) fn record_field_list(p: &mut Parser<'_>) { assert!(p.at(T!['{'])); let m = p.start(); @@ -131,6 +131,7 @@ pub(crate) fn record_field_list(p: &mut Parser<'_>) { // struct S { #[attr] f: f32 } attributes::outer_attrs(p); opt_visibility(p, false); + p.eat(T![unsafe]); if p.at(IDENT) { name(p); p.expect(T![:]); diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs index 9549ec9b4005e..8e255985a205d 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items/consts.rs @@ -24,6 +24,18 @@ fn const_or_static(p: &mut Parser<'_>, m: Marker, is_const: bool) { name(p); } + // FIXME: Recover on statics with generic params/where clause. + if is_const { + // test generic_const + // const C: u32 = 0; + // impl Foo { + // const C<'a>: &'a () = &(); + // } + generic_params::opt_generic_param_list(p); + } + // test_err generic_static + // static C: u32 = 0; + if p.at(T![:]) { types::ascription(p); } else { @@ -32,6 +44,20 @@ fn const_or_static(p: &mut Parser<'_>, m: Marker, is_const: bool) { if p.eat(T![=]) { expressions::expr(p); } + + if is_const { + // test const_where_clause + // const C: u32 = 0 + // where i32: Copy; + // trait Foo { + // const C: i32 where i32: Copy; + // } + generic_params::opt_where_clause(p); + } + // test_err static_where_clause + // static C: u32 = 0 + // where i32: Copy; + p.expect(T![;]); m.complete(p, if is_const { CONST } else { STATIC }); } diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs index 3410505cd46db..770827c6b0d41 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs @@ -81,7 +81,7 @@ fn path_for_qualifier( } const EXPR_PATH_SEGMENT_RECOVERY_SET: TokenSet = - items::ITEM_RECOVERY_SET.union(TokenSet::new(&[T![')'], T![,], T![let]])); + expressions::EXPR_RECOVERY_SET.union(items::ITEM_RECOVERY_SET); const TYPE_PATH_SEGMENT_RECOVERY_SET: TokenSet = types::TYPE_RECOVERY_SET; fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) -> Option { diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs index 460051a0f4a52..4dd44c030f305 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs @@ -199,8 +199,19 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) { } } -const PAT_RECOVERY_SET: TokenSet = - TokenSet::new(&[T![let], T![if], T![while], T![loop], T![match], T![')'], T![,], T![=]]); +const PAT_RECOVERY_SET: TokenSet = TokenSet::new(&[ + T![let], + T![if], + T![while], + T![loop], + T![match], + T![')'], + T![']'], + T!['}'], + T![,], + T![=], + T![&], +]); fn atom_pat(p: &mut Parser<'_>, recovery_set: TokenSet) -> Option { let m = match p.current() { diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs index 0133b7d5d820f..9d31e435cf987 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs @@ -20,10 +20,15 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[ pub(super) const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[ T![')'], + // test_err type_in_array_recover + // const _: [&]; + T![']'], + T!['}'], T![>], T![,], // test_err struct_field_recover // struct S { f pub g: () } + // struct S { f: pub g: () } T![pub], ]); diff --git a/src/tools/rust-analyzer/crates/parser/src/input.rs b/src/tools/rust-analyzer/crates/parser/src/input.rs index cabdff214df35..4490956f97046 100644 --- a/src/tools/rust-analyzer/crates/parser/src/input.rs +++ b/src/tools/rust-analyzer/crates/parser/src/input.rs @@ -12,7 +12,6 @@ type bits = u64; /// `Tokens` doesn't include whitespace and comments. Main input to the parser. /// /// Struct of arrays internally, but this shouldn't really matter. -#[derive(Default)] pub struct Input { kind: Vec, joint: Vec, @@ -21,6 +20,14 @@ pub struct Input { /// `pub` impl used by callers to create `Tokens`. impl Input { + #[inline] + pub fn with_capacity(capacity: usize) -> Self { + Self { + kind: Vec::with_capacity(capacity), + joint: Vec::with_capacity(capacity / size_of::()), + contextual_kind: Vec::with_capacity(capacity), + } + } #[inline] pub fn push(&mut self, kind: SyntaxKind) { self.push_impl(kind, SyntaxKind::EOF) diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs index c97596d5097ec..0a5c16dc4c499 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs @@ -10,7 +10,9 @@ use std::ops; -use rustc_lexer::unescape::{EscapeError, Mode}; +use rustc_literal_escaper::{ + EscapeError, Mode, unescape_byte, unescape_char, unescape_mixed, unescape_unicode, +}; use crate::{ Edition, @@ -177,6 +179,15 @@ impl<'a> Converter<'a> { COMMENT } + rustc_lexer::TokenKind::Frontmatter { has_invalid_preceding_whitespace, invalid_infostring } => { + if *has_invalid_preceding_whitespace { + err = "invalid preceding whitespace for frontmatter opening" + } else if *invalid_infostring { + err = "invalid infostring for frontmatter" + } + FRONTMATTER + } + rustc_lexer::TokenKind::Whitespace => WHITESPACE, rustc_lexer::TokenKind::Ident if token_text == "_" => UNDERSCORE, @@ -282,7 +293,7 @@ impl<'a> Converter<'a> { let text = &self.res.text[self.offset + 1..][..len - 1]; let i = text.rfind('\'').unwrap(); let text = &text[..i]; - if let Err(e) = rustc_lexer::unescape::unescape_char(text) { + if let Err(e) = unescape_char(text) { err = error_to_diagnostic_message(e, Mode::Char); } } @@ -295,7 +306,7 @@ impl<'a> Converter<'a> { let text = &self.res.text[self.offset + 2..][..len - 2]; let i = text.rfind('\'').unwrap(); let text = &text[..i]; - if let Err(e) = rustc_lexer::unescape::unescape_byte(text) { + if let Err(e) = unescape_byte(text) { err = error_to_diagnostic_message(e, Mode::Byte); } } @@ -402,14 +413,14 @@ fn unescape_string_error_message(text: &str, mode: Mode) -> &'static str { let mut error_message = ""; match mode { Mode::CStr => { - rustc_lexer::unescape::unescape_mixed(text, mode, &mut |_, res| { + unescape_mixed(text, mode, &mut |_, res| { if let Err(e) = res { error_message = error_to_diagnostic_message(e, mode); } }); } Mode::ByteStr | Mode::Str => { - rustc_lexer::unescape::unescape_unicode(text, mode, &mut |_, res| { + unescape_unicode(text, mode, &mut |_, res| { if let Err(e) = res { error_message = error_to_diagnostic_message(e, mode); } diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs index 398ad7cf66ce6..7963f00bb25ce 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lib.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs @@ -35,6 +35,8 @@ mod shortcuts; mod syntax_kind; mod token_set; +pub use T_ as T; + #[cfg(test)] mod tests; diff --git a/src/tools/rust-analyzer/crates/parser/src/parser.rs b/src/tools/rust-analyzer/crates/parser/src/parser.rs index b058686276444..36a363afe93a7 100644 --- a/src/tools/rust-analyzer/crates/parser/src/parser.rs +++ b/src/tools/rust-analyzer/crates/parser/src/parser.rs @@ -5,11 +5,11 @@ use std::cell::Cell; use drop_bomb::DropBomb; use crate::{ - event::Event, - input::Input, Edition, SyntaxKind::{self, EOF, ERROR, TOMBSTONE}, - TokenSet, T, + T, TokenSet, + event::Event, + input::Input, }; /// `Parser` struct provides the low-level API for diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs index 32569d5c3fe92..e2baec890c3a6 100644 --- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs +++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs @@ -27,7 +27,7 @@ pub enum StrStep<'a> { impl LexedStr<'_> { pub fn to_input(&self, edition: Edition) -> crate::Input { let _p = tracing::info_span!("LexedStr::to_input").entered(); - let mut res = crate::Input::default(); + let mut res = crate::Input::with_capacity(self.len()); let mut was_joint = false; for i in 0..self.len() { let kind = self.kind(i); diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs index 6a8cca9ccc79d..7311947525ed9 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs @@ -1,6 +1,7 @@ //! Defines [`SyntaxKind`] -- a fieldless enum of all possible syntactic //! constructs of the Rust language. +#[rustfmt::skip] mod generated; use crate::Edition; diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index 1ff0bbea8b1db..b1727509b1379 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -3,7 +3,7 @@ #![allow(bad_style, missing_docs, unreachable_pub)] use crate::Edition; #[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."] -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +#[derive(Debug)] #[repr(u16)] pub enum SyntaxKind { #[doc(hidden)] @@ -150,13 +150,13 @@ pub enum SyntaxKind { STRING, COMMENT, ERROR, + FRONTMATTER, IDENT, LIFETIME_IDENT, NEWLINE, SHEBANG, WHITESPACE, ABI, - ADT, ARG_LIST, ARRAY_EXPR, ARRAY_TYPE, @@ -165,16 +165,13 @@ pub enum SyntaxKind { ASM_DIR_SPEC, ASM_EXPR, ASM_LABEL, - ASM_OPERAND, ASM_OPERAND_EXPR, ASM_OPERAND_NAMED, ASM_OPTION, ASM_OPTIONS, - ASM_PIECE, ASM_REG_OPERAND, ASM_REG_SPEC, ASM_SYM, - ASSOC_ITEM, ASSOC_ITEM_LIST, ASSOC_TYPE_ARG, ATTR, @@ -195,23 +192,18 @@ pub enum SyntaxKind { CONTINUE_EXPR, DYN_TRAIT_TYPE, ENUM, - EXPR, EXPR_STMT, EXTERN_BLOCK, EXTERN_CRATE, - EXTERN_ITEM, EXTERN_ITEM_LIST, FIELD_EXPR, - FIELD_LIST, FN, FN_PTR_TYPE, FORMAT_ARGS_ARG, FORMAT_ARGS_EXPR, FOR_EXPR, FOR_TYPE, - GENERIC_ARG, GENERIC_ARG_LIST, - GENERIC_PARAM, GENERIC_PARAM_LIST, IDENT_PAT, IF_EXPR, @@ -219,7 +211,6 @@ pub enum SyntaxKind { IMPL_TRAIT_TYPE, INDEX_EXPR, INFER_TYPE, - ITEM, ITEM_LIST, LABEL, LET_ELSE, @@ -257,7 +248,6 @@ pub enum SyntaxKind { PAREN_EXPR, PAREN_PAT, PAREN_TYPE, - PAT, PATH, PATH_EXPR, PATH_PAT, @@ -288,7 +278,6 @@ pub enum SyntaxKind { SLICE_TYPE, SOURCE_FILE, STATIC, - STMT, STMT_LIST, STRUCT, TOKEN_TREE, @@ -301,7 +290,6 @@ pub enum SyntaxKind { TUPLE_PAT, TUPLE_STRUCT_PAT, TUPLE_TYPE, - TYPE, TYPE_ALIAS, TYPE_ARG, TYPE_BOUND, @@ -310,12 +298,10 @@ pub enum SyntaxKind { UNDERSCORE_EXPR, UNION, USE, - USE_BOUND_GENERIC_ARG, USE_BOUND_GENERIC_ARGS, USE_TREE, USE_TREE_LIST, VARIANT, - VARIANT_DEF, VARIANT_LIST, VISIBILITY, WHERE_CLAUSE, @@ -343,7 +329,6 @@ impl SyntaxKind { | INT_NUMBER | STRING | ABI - | ADT | ARG_LIST | ARRAY_EXPR | ARRAY_TYPE @@ -352,16 +337,13 @@ impl SyntaxKind { | ASM_DIR_SPEC | ASM_EXPR | ASM_LABEL - | ASM_OPERAND | ASM_OPERAND_EXPR | ASM_OPERAND_NAMED | ASM_OPTION | ASM_OPTIONS - | ASM_PIECE | ASM_REG_OPERAND | ASM_REG_SPEC | ASM_SYM - | ASSOC_ITEM | ASSOC_ITEM_LIST | ASSOC_TYPE_ARG | ATTR @@ -382,23 +364,18 @@ impl SyntaxKind { | CONTINUE_EXPR | DYN_TRAIT_TYPE | ENUM - | EXPR | EXPR_STMT | EXTERN_BLOCK | EXTERN_CRATE - | EXTERN_ITEM | EXTERN_ITEM_LIST | FIELD_EXPR - | FIELD_LIST | FN | FN_PTR_TYPE | FORMAT_ARGS_ARG | FORMAT_ARGS_EXPR | FOR_EXPR | FOR_TYPE - | GENERIC_ARG | GENERIC_ARG_LIST - | GENERIC_PARAM | GENERIC_PARAM_LIST | IDENT_PAT | IF_EXPR @@ -406,7 +383,6 @@ impl SyntaxKind { | IMPL_TRAIT_TYPE | INDEX_EXPR | INFER_TYPE - | ITEM | ITEM_LIST | LABEL | LET_ELSE @@ -444,7 +420,6 @@ impl SyntaxKind { | PAREN_EXPR | PAREN_PAT | PAREN_TYPE - | PAT | PATH | PATH_EXPR | PATH_PAT @@ -475,7 +450,6 @@ impl SyntaxKind { | SLICE_TYPE | SOURCE_FILE | STATIC - | STMT | STMT_LIST | STRUCT | TOKEN_TREE @@ -488,7 +462,6 @@ impl SyntaxKind { | TUPLE_PAT | TUPLE_STRUCT_PAT | TUPLE_TYPE - | TYPE | TYPE_ALIAS | TYPE_ARG | TYPE_BOUND @@ -497,12 +470,10 @@ impl SyntaxKind { | UNDERSCORE_EXPR | UNION | USE - | USE_BOUND_GENERIC_ARG | USE_BOUND_GENERIC_ARGS | USE_TREE | USE_TREE_LIST | VARIANT - | VARIANT_DEF | VARIANT_LIST | VISIBILITY | WHERE_CLAUSE @@ -513,6 +484,7 @@ impl SyntaxKind { | YIELD_EXPR | COMMENT | ERROR + | FRONTMATTER | IDENT | LIFETIME_IDENT | NEWLINE @@ -1024,4 +996,29 @@ impl SyntaxKind { } } #[macro_export] -macro_rules ! T { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } +macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER } ; } +impl ::core::marker::Copy for SyntaxKind {} +impl ::core::clone::Clone for SyntaxKind { + #[inline] + fn clone(&self) -> Self { *self } +} +impl ::core::cmp::PartialEq for SyntaxKind { + #[inline] + fn eq(&self, other: &Self) -> bool { (*self as u16) == (*other as u16) } +} +impl ::core::cmp::Eq for SyntaxKind {} +impl ::core::cmp::PartialOrd for SyntaxKind { + #[inline] + fn partial_cmp(&self, other: &Self) -> core::option::Option { + Some(self.cmp(other)) + } +} +impl ::core::cmp::Ord for SyntaxKind { + #[inline] + fn cmp(&self, other: &Self) -> core::cmp::Ordering { (*self as u16).cmp(&(*other as u16)) } +} +impl ::core::hash::Hash for SyntaxKind { + fn hash(&self, state: &mut H) { + ::core::mem::discriminant(self).hash(state); + } +} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs index 1a747731587c7..24db9478ee568 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs @@ -139,6 +139,10 @@ mod ok { run_and_expect_no_errors("test_data/parser/inline/ok/const_trait_bound.rs"); } #[test] + fn const_where_clause() { + run_and_expect_no_errors("test_data/parser/inline/ok/const_where_clause.rs"); + } + #[test] fn continue_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/continue_expr.rs"); } #[test] fn crate_path() { run_and_expect_no_errors("test_data/parser/inline/ok/crate_path.rs"); } @@ -278,6 +282,8 @@ mod ok { run_and_expect_no_errors("test_data/parser/inline/ok/generic_arg_bounds.rs"); } #[test] + fn generic_const() { run_and_expect_no_errors("test_data/parser/inline/ok/generic_const.rs"); } + #[test] fn generic_param_attribute() { run_and_expect_no_errors("test_data/parser/inline/ok/generic_param_attribute.rs"); } @@ -416,6 +422,10 @@ mod ok { run_and_expect_no_errors("test_data/parser/inline/ok/nocontentexpr_after_item.rs"); } #[test] + fn offset_of_parens() { + run_and_expect_no_errors("test_data/parser/inline/ok/offset_of_parens.rs"); + } + #[test] fn or_pattern() { run_and_expect_no_errors("test_data/parser/inline/ok/or_pattern.rs"); } #[test] fn param_list() { run_and_expect_no_errors("test_data/parser/inline/ok/param_list.rs"); } @@ -745,7 +755,12 @@ mod err { run_and_expect_errors("test_data/parser/inline/err/fn_pointer_type_missing_fn.rs"); } #[test] - fn gen_fn() { run_and_expect_errors("test_data/parser/inline/err/gen_fn.rs"); } + fn gen_fn() { + run_and_expect_errors_with_edition( + "test_data/parser/inline/err/gen_fn.rs", + crate::Edition::Edition2021, + ); + } #[test] fn generic_arg_list_recover() { run_and_expect_errors("test_data/parser/inline/err/generic_arg_list_recover.rs"); @@ -759,6 +774,8 @@ mod err { run_and_expect_errors("test_data/parser/inline/err/generic_param_list_recover.rs"); } #[test] + fn generic_static() { run_and_expect_errors("test_data/parser/inline/err/generic_static.rs"); } + #[test] fn impl_type() { run_and_expect_errors("test_data/parser/inline/err/impl_type.rs"); } #[test] fn let_else_right_curly_brace() { @@ -831,6 +848,10 @@ mod err { run_and_expect_errors("test_data/parser/inline/err/recover_from_missing_const_default.rs"); } #[test] + fn static_where_clause() { + run_and_expect_errors("test_data/parser/inline/err/static_where_clause.rs"); + } + #[test] fn struct_field_recover() { run_and_expect_errors("test_data/parser/inline/err/struct_field_recover.rs"); } @@ -849,6 +870,10 @@ mod err { run_and_expect_errors("test_data/parser/inline/err/tuple_pat_leading_comma.rs"); } #[test] + fn type_in_array_recover() { + run_and_expect_errors("test_data/parser/inline/err/type_in_array_recover.rs"); + } + #[test] fn unsafe_block_in_mod() { run_and_expect_errors("test_data/parser/inline/err/unsafe_block_in_mod.rs"); } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast index d97fc6c72091d..1a8e881dd9e0f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0022_bad_exprs.rast @@ -35,8 +35,8 @@ SOURCE_FILE WHITESPACE " " LET_STMT LET_KW "let" - ERROR - R_BRACK "]" + ERROR + R_BRACK "]" WHITESPACE " " R_CURLY "}" WHITESPACE "\n" @@ -149,7 +149,8 @@ error 17: expected expression, item or let statement error 25: expected a name error 26: expected `;`, `{`, or `(` error 30: expected pattern -error 31: expected SEMICOLON +error 30: expected SEMICOLON +error 30: expected expression, item or let statement error 53: expected expression error 54: expected R_PAREN error 54: expected SEMICOLON diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast index feb617e1aa2ab..b57066f2fb382 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/comma_after_default_values_syntax.rast @@ -23,8 +23,7 @@ SOURCE_FILE L_CURLY "{" WHITESPACE " " DOT2 ".." - ERROR - COMMA "," + COMMA "," WHITESPACE " " R_CURLY "}" SEMICOLON ";" @@ -39,8 +38,7 @@ SOURCE_FILE L_CURLY "{" WHITESPACE " " DOT2 ".." - ERROR - COMMA "," + COMMA "," WHITESPACE " " RECORD_EXPR_FIELD NAME_REF @@ -55,5 +53,6 @@ SOURCE_FILE R_CURLY "}" WHITESPACE "\n" error 21: expected expression +error 21: cannot use a comma after the base struct error 36: expected expression -error 37: expected COMMA +error 36: cannot use a comma after the base struct diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast index f8a7d0e552cac..b6fd5a5d99bdf 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rast @@ -1,4 +1,6 @@ SOURCE_FILE + COMMENT "// 2021" + WHITESPACE "\n" ERROR PATH PATH_SEGMENT @@ -42,6 +44,6 @@ SOURCE_FILE L_CURLY "{" R_CURLY "}" WHITESPACE "\n" -error 3: expected an item -error 24: expected fn, trait or impl -error 28: expected an item +error 11: expected an item +error 32: expected fn, trait or impl +error 36: expected an item diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs index 80882e0a4044a..778693ca9570b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/gen_fn.rs @@ -1,2 +1,3 @@ +// 2021 gen fn gen_fn() {} async gen fn async_gen_fn() {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rast new file mode 100644 index 0000000000000..485ad11f233ac --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rast @@ -0,0 +1,42 @@ +SOURCE_FILE + STATIC + STATIC_KW "static" + WHITESPACE " " + NAME + IDENT "C" + ERROR + L_ANGLE "<" + ERROR + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + ERROR + R_ANGLE ">" + ERROR + COLON ":" + WHITESPACE " " + ERROR + PATH + PATH_SEGMENT + NAME_REF + IDENT "u32" + WHITESPACE " " + ERROR + EQ "=" + WHITESPACE " " + ERROR + INT_NUMBER "0" + ERROR + SEMICOLON ";" + WHITESPACE "\n" +error 8: missing type for `const` or `static` +error 8: expected SEMICOLON +error 8: expected an item +error 12: expected an item +error 12: expected an item +error 13: expected an item +error 18: expected an item +error 19: expected an item +error 21: expected an item +error 22: expected an item diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rs new file mode 100644 index 0000000000000..d76aa7a205bc7 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/generic_static.rs @@ -0,0 +1 @@ +static C: u32 = 0; diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rast new file mode 100644 index 0000000000000..cde3e47ad5c3b --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rast @@ -0,0 +1,44 @@ +SOURCE_FILE + STATIC + STATIC_KW "static" + WHITESPACE " " + NAME + IDENT "C" + COLON ":" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "u32" + WHITESPACE " " + EQ "=" + WHITESPACE " " + LITERAL + INT_NUMBER "0" + WHITESPACE "\n" + ERROR + WHERE_KW "where" + WHITESPACE " " + ERROR + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + ERROR + COLON ":" + WHITESPACE " " + ERROR + PATH + PATH_SEGMENT + NAME_REF + IDENT "Copy" + ERROR + SEMICOLON ";" + WHITESPACE "\n" +error 17: expected SEMICOLON +error 18: expected an item +error 27: expected an item +error 27: expected an item +error 33: expected an item +error 33: expected an item diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rs new file mode 100644 index 0000000000000..c330f35da2430 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/static_where_clause.rs @@ -0,0 +1,2 @@ +static C: u32 = 0 +where i32: Copy; diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rast index 458d7f4e2fa22..5a12c21b647d4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rast @@ -26,6 +26,36 @@ SOURCE_FILE WHITESPACE " " R_CURLY "}" WHITESPACE "\n" + STRUCT + STRUCT_KW "struct" + WHITESPACE " " + NAME + IDENT "S" + WHITESPACE " " + RECORD_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_FIELD + NAME + IDENT "f" + COLON ":" + WHITESPACE " " + RECORD_FIELD + VISIBILITY + PUB_KW "pub" + WHITESPACE " " + NAME + IDENT "g" + COLON ":" + WHITESPACE " " + TUPLE_TYPE + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" error 12: expected COLON error 12: expected type error 12: expected COMMA +error 38: expected type +error 38: expected COMMA diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rs index da32227adcd7d..5b1e5a5b8a210 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/struct_field_recover.rs @@ -1 +1,2 @@ struct S { f pub g: () } +struct S { f: pub g: () } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/type_in_array_recover.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/type_in_array_recover.rast new file mode 100644 index 0000000000000..db76e8d7c88a2 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/type_in_array_recover.rast @@ -0,0 +1,15 @@ +SOURCE_FILE + CONST + CONST_KW "const" + WHITESPACE " " + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + SLICE_TYPE + L_BRACK "[" + REF_TYPE + AMP "&" + R_BRACK "]" + SEMICOLON ";" + WHITESPACE "\n" +error 11: expected type diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/type_in_array_recover.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/type_in_array_recover.rs new file mode 100644 index 0000000000000..039bf82997790 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/type_in_array_recover.rs @@ -0,0 +1 @@ +const _: [&]; diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rast new file mode 100644 index 0000000000000..12148f6afe4bc --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rast @@ -0,0 +1,89 @@ +SOURCE_FILE + CONST + CONST_KW "const" + WHITESPACE " " + NAME + IDENT "C" + GENERIC_PARAM_LIST + L_ANGLE "<" + TYPE_PARAM + NAME + IDENT "i32" + R_ANGLE ">" + COLON ":" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "u32" + WHITESPACE " " + EQ "=" + WHITESPACE " " + LITERAL + INT_NUMBER "0" + WHITESPACE "\n" + WHERE_CLAUSE + WHERE_KW "where" + WHITESPACE " " + WHERE_PRED + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + COLON ":" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Copy" + SEMICOLON ";" + WHITESPACE "\n" + TRAIT + TRAIT_KW "trait" + WHITESPACE " " + NAME + IDENT "Foo" + WHITESPACE " " + ASSOC_ITEM_LIST + L_CURLY "{" + WHITESPACE "\n " + CONST + CONST_KW "const" + WHITESPACE " " + NAME + IDENT "C" + COLON ":" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + WHITESPACE " " + WHERE_CLAUSE + WHERE_KW "where" + WHITESPACE " " + WHERE_PRED + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + COLON ":" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Copy" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rs new file mode 100644 index 0000000000000..5ad4b2fe83234 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_where_clause.rs @@ -0,0 +1,5 @@ +const C: u32 = 0 +where i32: Copy; +trait Foo { + const C: i32 where i32: Copy; +} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rast new file mode 100644 index 0000000000000..bf432b99b9de9 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rast @@ -0,0 +1,71 @@ +SOURCE_FILE + CONST + CONST_KW "const" + WHITESPACE " " + NAME + IDENT "C" + GENERIC_PARAM_LIST + L_ANGLE "<" + TYPE_PARAM + NAME + IDENT "i32" + R_ANGLE ">" + COLON ":" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "u32" + WHITESPACE " " + EQ "=" + WHITESPACE " " + LITERAL + INT_NUMBER "0" + SEMICOLON ";" + WHITESPACE "\n" + IMPL + IMPL_KW "impl" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Foo" + WHITESPACE " " + ASSOC_ITEM_LIST + L_CURLY "{" + WHITESPACE "\n " + CONST + CONST_KW "const" + WHITESPACE " " + NAME + IDENT "C" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" + COLON ":" + WHITESPACE " " + REF_TYPE + AMP "&" + LIFETIME + LIFETIME_IDENT "'a" + WHITESPACE " " + TUPLE_TYPE + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + EQ "=" + WHITESPACE " " + REF_EXPR + AMP "&" + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rs new file mode 100644 index 0000000000000..ce718a46288d5 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_const.rs @@ -0,0 +1,4 @@ +const C: u32 = 0; +impl Foo { + const C<'a>: &'a () = &(); +} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rast index de9d0fc19ee1f..d99dad4cedd17 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rast @@ -32,5 +32,28 @@ SOURCE_FILE INT_NUMBER "92" SEMICOLON ";" WHITESPACE " " + LET_STMT + SUPER_KW "super" + WHITESPACE " " + LET_KW "let" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "y" + SEMICOLON ";" + WHITESPACE " " + EXPR_STMT + PATH_EXPR + PATH + PATH + PATH_SEGMENT + NAME_REF + SUPER_KW "super" + COLON2 "::" + PATH_SEGMENT + NAME_REF + IDENT "foo" + SEMICOLON ";" + WHITESPACE " " R_CURLY "}" WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs index 8003999fd08f7..d4cc1be4aec77 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs @@ -1 +1 @@ -fn f() { let x: i32 = 92; } +fn f() { let x: i32 = 92; super let y; super::foo; } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rast new file mode 100644 index 0000000000000..4e23455cfcc32 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rast @@ -0,0 +1,42 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + OFFSET_OF_EXPR + BUILTIN_KW "builtin" + POUND "#" + OFFSET_OF_KW "offset_of" + L_PAREN "(" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Foo" + COMMA "," + WHITESPACE " " + L_PAREN "(" + NAME_REF + IDENT "bar" + DOT "." + NAME_REF + IDENT "baz" + DOT "." + NAME_REF + INT_NUMBER "0" + R_PAREN ")" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rs new file mode 100644 index 0000000000000..a797d5c8206ea --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/offset_of_parens.rs @@ -0,0 +1,3 @@ +fn foo() { + builtin#offset_of(Foo, (bar.baz.0)); +} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rast index 065d7e7e81f24..07686f509c1bc 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rast @@ -30,6 +30,20 @@ SOURCE_FILE PATH_SEGMENT NAME_REF IDENT "f32" + COMMA "," + WHITESPACE " " + RECORD_FIELD + UNSAFE_KW "unsafe" + WHITESPACE " " + NAME + IDENT "c" + COLON ":" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "u8" WHITESPACE " " R_CURLY "}" WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs index a3bd7787db77c..1f4612f53913b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs @@ -1 +1 @@ -struct S { a: i32, b: f32 } +struct S { a: i32, b: f32, unsafe c: u8 } diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs index 3d722b1ff1155..2c6a82bf0c3fc 100644 --- a/src/tools/rust-analyzer/crates/paths/src/lib.rs +++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs @@ -248,7 +248,9 @@ impl AbsPath { } pub fn canonicalize(&self) -> ! { - panic!("We explicitly do not provide canonicalization API, as that is almost always a wrong solution, see #14430") + panic!( + "We explicitly do not provide canonicalization API, as that is almost always a wrong solution, see #14430" + ) } /// Equivalent of [`Utf8Path::strip_prefix`] for `AbsPath`. diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/json.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/json.rs index ec89f6a9e65d2..c8f774031b584 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/json.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/json.rs @@ -1,6 +1,7 @@ //! Protocol functions for json. use std::io::{self, BufRead, Write}; +/// Reads a JSON message from the input stream. pub fn read_json<'a>( inp: &mut impl BufRead, buf: &'a mut String, @@ -26,10 +27,10 @@ pub fn read_json<'a>( } } +/// Writes a JSON message to the output stream. pub fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { tracing::debug!("> {}", msg); out.write_all(msg.as_bytes())?; out.write_all(b"\n")?; - out.flush()?; - Ok(()) + out.flush() } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs index 4b831e4acebb9..55185aa492ded 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg.rs @@ -10,7 +10,7 @@ use serde_derive::{Deserialize, Serialize}; use crate::ProcMacroKind; pub use self::flat::{ - deserialize_span_data_index_map, serialize_span_data_index_map, FlatTree, SpanDataIndexMap, + FlatTree, SpanDataIndexMap, deserialize_span_data_index_map, serialize_span_data_index_map, }; pub use span::TokenId; @@ -20,69 +20,103 @@ pub const VERSION_CHECK_VERSION: u32 = 1; pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; pub const HAS_GLOBAL_SPANS: u32 = 3; pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4; -/// Whether literals encode their kind as an additional u32 field and idents their rawness as a u32 field +/// Whether literals encode their kind as an additional u32 field and idents their rawness as a u32 field. pub const EXTENDED_LEAF_DATA: u32 = 5; +/// Current API version of the proc-macro protocol. pub const CURRENT_API_VERSION: u32 = EXTENDED_LEAF_DATA; +/// Represents requests sent from the client to the proc-macro-srv. #[derive(Debug, Serialize, Deserialize)] pub enum Request { + /// Retrieves a list of macros from a given dynamic library. /// Since [`NO_VERSION_CHECK_VERSION`] ListMacros { dylib_path: Utf8PathBuf }, + + /// Expands a procedural macro. /// Since [`NO_VERSION_CHECK_VERSION`] ExpandMacro(Box), + + /// Performs an API version check between the client and the server. /// Since [`VERSION_CHECK_VERSION`] ApiVersionCheck {}, + + /// Sets server-specific configurations. /// Since [`RUST_ANALYZER_SPAN_SUPPORT`] SetConfig(ServerConfig), } +/// Defines the mode used for handling span data. #[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)] pub enum SpanMode { + /// Default mode, where spans are identified by an ID. #[default] Id, + + /// Rust Analyzer-specific span handling mode. RustAnalyzer, } +/// Represents responses sent from the proc-macro-srv to the client. #[derive(Debug, Serialize, Deserialize)] pub enum Response { + /// Returns a list of available macros in a dynamic library. /// Since [`NO_VERSION_CHECK_VERSION`] ListMacros(Result, String>), + + /// Returns result of a macro expansion. /// Since [`NO_VERSION_CHECK_VERSION`] ExpandMacro(Result), + + /// Returns the API version supported by the server. /// Since [`NO_VERSION_CHECK_VERSION`] ApiVersionCheck(u32), + + /// Confirms the application of a configuration update. /// Since [`RUST_ANALYZER_SPAN_SUPPORT`] SetConfig(ServerConfig), + + /// Returns the result of a macro expansion, including extended span data. /// Since [`RUST_ANALYZER_SPAN_SUPPORT`] ExpandMacroExtended(Result), } +/// Configuration settings for the proc-macro-srv. #[derive(Debug, Serialize, Deserialize, Default)] #[serde(default)] pub struct ServerConfig { + /// Defines how span data should be handled. pub span_mode: SpanMode, } +/// Represents an extended macro expansion response, including span data mappings. #[derive(Debug, Serialize, Deserialize)] pub struct ExpandMacroExtended { + /// The expanded syntax tree. pub tree: FlatTree, + /// Additional span data mappings. pub span_data_table: Vec, } +/// Represents an error message when a macro expansion results in a panic. #[derive(Debug, Serialize, Deserialize)] pub struct PanicMessage(pub String); +/// Represents a macro expansion request sent from the client. #[derive(Debug, Serialize, Deserialize)] pub struct ExpandMacro { + /// The path to the dynamic library containing the macro. pub lib: Utf8PathBuf, /// Environment variables to set during macro expansion. pub env: Vec<(String, String)>, + /// The current working directory for the macro expansion. pub current_dir: Option, + /// Macro expansion data, including the macro body, name and attributes. #[serde(flatten)] pub data: ExpandMacroData, } +/// Represents the input data required for expanding a macro. #[derive(Debug, Serialize, Deserialize)] pub struct ExpandMacroData { /// Argument of macro call. @@ -103,18 +137,24 @@ pub struct ExpandMacroData { #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] #[serde(default)] pub has_global_spans: ExpnGlobals, + /// Table of additional span data. #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub span_data_table: Vec, } +/// Represents global expansion settings, including span resolution. #[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)] pub struct ExpnGlobals { + /// Determines whether to serialize the expansion settings. #[serde(skip_serializing)] #[serde(default)] pub serialize: bool, + /// Defines the `def_site` span location. pub def_site: usize, + /// Defines the `call_site` span location. pub call_site: usize, + /// Defines the `mixed_site` span location. pub mixed_site: usize, } @@ -150,16 +190,18 @@ pub trait Message: serde::Serialize + DeserializeOwned { impl Message for Request {} impl Message for Response {} +/// Type alias for a function that reads protocol messages from a buffered input stream. #[allow(type_alias_bounds)] type ProtocolRead = for<'i, 'buf> fn(inp: &'i mut R, buf: &'buf mut String) -> io::Result>; +/// Type alias for a function that writes protocol messages to an output stream. #[allow(type_alias_bounds)] type ProtocolWrite = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str) -> io::Result<()>; #[cfg(test)] mod tests { - use intern::{sym, Symbol}; - use span::{Edition, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TextSize}; + use intern::{Symbol, sym}; + use span::{Edition, ErasedFileAstId, Span, SpanAnchor, SyntaxContext, TextRange, TextSize}; use tt::{ Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, TopSubtree, TopSubtreeBuilder, @@ -180,12 +222,12 @@ mod tests { open: Span { range: TextRange::empty(TextSize::new(0)), anchor, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }, close: Span { range: TextRange::empty(TextSize::new(19)), anchor, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }, kind: DelimiterKind::Invisible, }); @@ -196,7 +238,7 @@ mod tests { span: Span { range: TextRange::at(TextSize::new(0), TextSize::of("struct")), anchor, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }, is_raw: tt::IdentIsRaw::No, } @@ -208,7 +250,7 @@ mod tests { span: Span { range: TextRange::at(TextSize::new(5), TextSize::of("r#Foo")), anchor, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }, is_raw: tt::IdentIsRaw::Yes, } @@ -219,7 +261,7 @@ mod tests { span: Span { range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")), anchor, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }, kind: tt::LitKind::Str, suffix: None, @@ -229,7 +271,7 @@ mod tests { span: Span { range: TextRange::at(TextSize::new(13), TextSize::of('@')), anchor, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }, spacing: Spacing::Joint, })); @@ -238,23 +280,23 @@ mod tests { Span { range: TextRange::at(TextSize::new(14), TextSize::of('{')), anchor, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }, ); builder.push(Leaf::Literal(Literal { - symbol: sym::INTEGER_0.clone(), + symbol: sym::INTEGER_0, span: Span { range: TextRange::at(TextSize::new(15), TextSize::of("0u32")), anchor, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }, kind: tt::LitKind::Integer, - suffix: Some(sym::u32.clone()), + suffix: Some(sym::u32), })); builder.close(Span { range: TextRange::at(TextSize::new(19), TextSize::of('}')), anchor, - ctx: SyntaxContextId::root(Edition::CURRENT), + ctx: SyntaxContext::root(Edition::CURRENT), }); builder.build() diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs index c194f301714fc..597ffa05d203e 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs @@ -40,9 +40,7 @@ use std::collections::VecDeque; use intern::Symbol; use rustc_hash::FxHashMap; use serde_derive::{Deserialize, Serialize}; -use span::{ - EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TokenId, -}; +use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContext, TextRange, TokenId}; use crate::legacy_protocol::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA}; @@ -74,7 +72,9 @@ pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap { ast_id: ErasedFileAstId::from_raw(ast_id), }, range: TextRange::new(start.into(), end.into()), - ctx: SyntaxContextId::from_u32(e), + // SAFETY: We only receive spans from the server. If someone mess up the communication UB can happen, + // but that will be their problem. + ctx: unsafe { SyntaxContext::from_u32(e) }, } }) .collect() diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index dc3328ebcda48..25c30b6db4a4c 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -13,20 +13,23 @@ mod process; use paths::{AbsPath, AbsPathBuf}; use span::Span; -use std::{fmt, io, sync::Arc}; +use std::{fmt, io, sync::Arc, time::SystemTime}; use crate::{ legacy_protocol::msg::{ - deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro, - ExpandMacroData, ExpnGlobals, FlatTree, PanicMessage, Request, Response, SpanDataIndexMap, - HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT, + ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, HAS_GLOBAL_SPANS, PanicMessage, + RUST_ANALYZER_SPAN_SUPPORT, Request, Response, SpanDataIndexMap, + deserialize_span_data_index_map, flat::serialize_span_data_index_map, }, process::ProcMacroServerProcess, }; +/// Represents different kinds of procedural macros that can be expanded by the external server. #[derive(Copy, Clone, Eq, PartialEq, Debug, serde_derive::Serialize, serde_derive::Deserialize)] pub enum ProcMacroKind { + /// A macro that derives implementations for a struct or enum. CustomDerive, + /// An attribute-like procedural macro. Attr, // This used to be called FuncLike, so that's what the server expects currently. #[serde(alias = "Bang")] @@ -46,11 +49,13 @@ pub struct ProcMacroClient { path: AbsPathBuf, } +/// Represents a dynamically loaded library containing procedural macros. pub struct MacroDylib { path: AbsPathBuf, } impl MacroDylib { + /// Creates a new MacroDylib instance with the given path. pub fn new(path: AbsPathBuf) -> MacroDylib { MacroDylib { path } } @@ -66,6 +71,7 @@ pub struct ProcMacro { dylib_path: Arc, name: Box, kind: ProcMacroKind, + dylib_last_modified: Option, } impl Eq for ProcMacro {} @@ -73,11 +79,13 @@ impl PartialEq for ProcMacro { fn eq(&self, other: &Self) -> bool { self.name == other.name && self.kind == other.kind - && Arc::ptr_eq(&self.dylib_path, &other.dylib_path) + && self.dylib_path == other.dylib_path + && self.dylib_last_modified == other.dylib_last_modified && Arc::ptr_eq(&self.process, &other.process) } } +/// Represents errors encountered when communicating with the proc-macro server. #[derive(Clone, Debug)] pub struct ServerError { pub message: String, @@ -97,15 +105,17 @@ impl fmt::Display for ServerError { impl ProcMacroClient { /// Spawns an external process as the proc macro server and returns a client connected to it. - pub fn spawn( + pub fn spawn<'a>( process_path: &AbsPath, - env: impl IntoIterator, impl AsRef)> - + Clone, + env: impl IntoIterator< + Item = (impl AsRef, &'a Option>), + > + Clone, ) -> io::Result { let process = ProcMacroServerProcess::run(process_path, env)?; Ok(ProcMacroClient { process: Arc::new(process), path: process_path.to_owned() }) } + /// Returns the absolute path to the proc-macro server. pub fn server_path(&self) -> &AbsPath { &self.path } @@ -116,6 +126,9 @@ impl ProcMacroClient { let macros = self.process.find_proc_macros(&dylib.path)?; let dylib_path = Arc::new(dylib.path); + let dylib_last_modified = std::fs::metadata(dylib_path.as_path()) + .ok() + .and_then(|metadata| metadata.modified().ok()); match macros { Ok(macros) => Ok(macros .into_iter() @@ -124,26 +137,32 @@ impl ProcMacroClient { name: name.into(), kind, dylib_path: dylib_path.clone(), + dylib_last_modified, }) .collect()), Err(message) => Err(ServerError { message, io: None }), } } + /// Checks if the proc-macro server has exited. pub fn exited(&self) -> Option<&ServerError> { self.process.exited() } } impl ProcMacro { + /// Returns the name of the procedural macro. pub fn name(&self) -> &str { &self.name } + /// Returns the type of procedural macro. pub fn kind(&self) -> ProcMacroKind { self.kind } + /// Expands the procedural macro by sending an expansion request to the server. + /// This includes span information and environmental context. pub fn expand( &self, subtree: tt::SubtreeView<'_, Span>, @@ -152,7 +171,7 @@ impl ProcMacro { def_site: Span, call_site: Span, mixed_site: Span, - current_dir: Option, + current_dir: String, ) -> Result, PanicMessage>, ServerError> { let version = self.process.version(); @@ -180,7 +199,7 @@ impl ProcMacro { }, lib: self.dylib_path.to_path_buf().into(), env, - current_dir, + current_dir: Some(current_dir), }; let response = self.process.send_task(Request::ExpandMacro(Box::new(task)))?; diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs index d998b23d3bbef..fcea75ef672a1 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs @@ -11,16 +11,17 @@ use paths::AbsPath; use stdx::JodChild; use crate::{ + ProcMacroKind, ServerError, legacy_protocol::{ json::{read_json, write_json}, msg::{ - Message, Request, Response, ServerConfig, SpanMode, CURRENT_API_VERSION, - RUST_ANALYZER_SPAN_SUPPORT, + CURRENT_API_VERSION, Message, RUST_ANALYZER_SPAN_SUPPORT, Request, Response, + ServerConfig, SpanMode, }, }, - ProcMacroKind, ServerError, }; +/// Represents a process handling proc-macro communication. #[derive(Debug)] pub(crate) struct ProcMacroServerProcess { /// The state of the proc-macro server process, the protocol is currently strictly sequential @@ -32,6 +33,7 @@ pub(crate) struct ProcMacroServerProcess { exited: OnceLock>, } +/// Maintains the state of the proc-macro server process. #[derive(Debug)] struct ProcessSrvState { process: Process, @@ -40,10 +42,12 @@ struct ProcessSrvState { } impl ProcMacroServerProcess { - pub(crate) fn run( + /// Starts the proc-macro server and performs a version check + pub(crate) fn run<'a>( process_path: &AbsPath, - env: impl IntoIterator, impl AsRef)> - + Clone, + env: impl IntoIterator< + Item = (impl AsRef, &'a Option>), + > + Clone, ) -> io::Result { let create_srv = || { let mut process = Process::run(process_path, env.clone())?; @@ -59,8 +63,7 @@ impl ProcMacroServerProcess { let mut srv = create_srv()?; tracing::info!("sending proc-macro server version check"); match srv.version_check() { - Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new( - io::ErrorKind::Other, + Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::other( format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}). This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain." ), @@ -79,20 +82,23 @@ impl ProcMacroServerProcess { Err(e) => { tracing::info!(%e, "proc-macro version check failed"); Err( - io::Error::new(io::ErrorKind::Other, format!("proc-macro server version check failed: {e}")), + io::Error::other(format!("proc-macro server version check failed: {e}")), ) } } } + /// Returns the server error if the process has exited. pub(crate) fn exited(&self) -> Option<&ServerError> { self.exited.get().map(|it| &it.0) } + /// Retrieves the API version of the proc-macro server. pub(crate) fn version(&self) -> u32 { self.version } + /// Checks the API version of the running proc-macro server. fn version_check(&self) -> Result { let request = Request::ApiVersionCheck {}; let response = self.send_task(request)?; @@ -103,6 +109,7 @@ impl ProcMacroServerProcess { } } + /// Enable support for rust-analyzer span mode if the server supports it. fn enable_rust_analyzer_spans(&self) -> Result { let request = Request::SetConfig(ServerConfig { span_mode: SpanMode::RustAnalyzer }); let response = self.send_task(request)?; @@ -113,6 +120,7 @@ impl ProcMacroServerProcess { } } + /// Finds proc-macros in a given dynamic library. pub(crate) fn find_proc_macros( &self, dylib_path: &AbsPath, @@ -127,6 +135,7 @@ impl ProcMacroServerProcess { } } + /// Sends a request to the proc-macro server and waits for a response. pub(crate) fn send_task(&self, req: Request) -> Result { if let Some(server_error) = self.exited.get() { return Err(server_error.0.clone()); @@ -177,20 +186,25 @@ impl ProcMacroServerProcess { } } +/// Manages the execution of the proc-macro server process. #[derive(Debug)] struct Process { child: JodChild, } impl Process { - fn run( + /// Runs a new proc-macro server process with the specified environment variables. + fn run<'a>( path: &AbsPath, - env: impl IntoIterator, impl AsRef)>, + env: impl IntoIterator< + Item = (impl AsRef, &'a Option>), + >, ) -> io::Result { let child = JodChild(mk_child(path, env)?); Ok(Process { child }) } + /// Retrieves stdin and stdout handles for the process. fn stdio(&mut self) -> Option<(ChildStdin, BufReader)> { let stdin = self.child.stdin.take()?; let stdout = self.child.stdout.take()?; @@ -200,14 +214,22 @@ impl Process { } } -fn mk_child( +/// Creates and configures a new child process for the proc-macro server. +fn mk_child<'a>( path: &AbsPath, - env: impl IntoIterator, impl AsRef)>, + extra_env: impl IntoIterator< + Item = (impl AsRef, &'a Option>), + >, ) -> io::Result { #[allow(clippy::disallowed_methods)] let mut cmd = Command::new(path); - cmd.envs(env) - .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") + for env in extra_env { + match env { + (key, Some(val)) => cmd.env(key, val), + (key, None) => cmd.env_remove(key), + }; + } + cmd.env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::inherit()); @@ -221,6 +243,7 @@ fn mk_child( cmd.spawn() } +/// Sends a request to the server and reads the response. fn send_request( mut writer: &mut impl Write, mut reader: &mut impl BufRead, diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml index 57a28b00365f6..ab421021b8bfd 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml @@ -8,6 +8,7 @@ authors.workspace = true edition.workspace = true license.workspace = true rust-version.workspace = true +publish = false [dependencies] proc-macro-srv.workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs index de59e88aac40c..c47ed053254bf 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs @@ -1,6 +1,7 @@ //! A standalone binary for `proc-macro-srv`. //! Driver for proc macro server #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] +#![cfg_attr(not(feature = "sysroot-abi"), allow(unused_crate_dependencies))] #![allow(clippy::print_stderr)] #[cfg(feature = "in-rust-tree")] @@ -14,7 +15,9 @@ use main_loop::run; fn main() -> std::io::Result<()> { let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE"); if v.is_err() { - eprintln!("This is an IDE implementation detail, you can use this tool by exporting RUST_ANALYZER_INTERNALS_DO_NOT_USE."); + eprintln!( + "This is an IDE implementation detail, you can use this tool by exporting RUST_ANALYZER_INTERNALS_DO_NOT_USE." + ); eprintln!( "Note that this tool's API is highly unstable and may break without prior notice" ); diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs index 569070766f1c6..f54dff1f2d822 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main_loop.rs @@ -4,8 +4,8 @@ use std::io; use proc_macro_api::legacy_protocol::{ json::{read_json, write_json}, msg::{ - self, deserialize_span_data_index_map, serialize_span_data_index_map, ExpandMacroData, - ExpnGlobals, Message, SpanMode, TokenId, CURRENT_API_VERSION, + self, CURRENT_API_VERSION, ExpandMacroData, ExpnGlobals, Message, SpanMode, TokenId, + deserialize_span_data_index_map, serialize_span_data_index_map, }, }; use proc_macro_srv::EnvSnapshot; diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index d3b56b402ea83..8fd675d0d31f4 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -13,11 +13,9 @@ rust-version.workspace = true [dependencies] object.workspace = true -libc.workspace = true libloading.workspace = true memmap2.workspace = true -stdx.workspace = true tt.workspace = true syntax-bridge.workspace = true paths.workspace = true @@ -27,6 +25,9 @@ intern.workspace = true ra-ap-rustc_lexer.workspace = true +[target.'cfg(unix)'.dependencies] +libc.workspace = true + [dev-dependencies] expect-test.workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs index 07a10aaae578c..97c0c4bda7dfa 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs @@ -12,5 +12,5 @@ fn main() { let version_string = std::str::from_utf8(&output.stdout[..]) .expect("rustc --version output must be UTF-8") .trim(); - println!("cargo::rustc-env=RUSTC_VERSION={}", version_string); + println!("cargo::rustc-env=RUSTC_VERSION={version_string}"); } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml index 16fcc92962072..eddefb33c0ff1 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml @@ -3,10 +3,10 @@ name = "proc-macro-test" version = "0.0.0" publish = false -edition = "2021" +edition = "2024" license = "MIT OR Apache-2.0" [lib] [build-dependencies] -cargo_metadata = "0.18.1" +cargo_metadata = "0.19.2" diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs index d3d58a6df0115..b97569d4dbdf1 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs @@ -110,7 +110,7 @@ fn main() { let mut artifact_path = None; for message in Message::parse_stream(output.stdout.as_slice()) { if let Message::CompilerArtifact(artifact) = message.unwrap() { - if artifact.target.kind.contains(&"proc-macro".to_string()) + if artifact.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro) && (artifact.package_id.repr.starts_with(&repr) || artifact.package_id.repr == pkgid) { diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/.gitignore b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/.gitignore index 2c96eb1b6517f..2f7896d1d1365 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/.gitignore +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/.gitignore @@ -1,2 +1 @@ target/ -Cargo.lock diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.lock b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.lock new file mode 100644 index 0000000000000..99c7ca10affb2 --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.lock @@ -0,0 +1,7 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "proc-macro-test-impl" +version = "0.0.0" diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml index fb98d758a8b7b..33b7c2bb0ad66 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml @@ -2,7 +2,7 @@ name = "proc-macro-test-impl" version = "0.0.0" license = "MIT OR Apache-2.0" -edition = "2021" +edition = "2024" publish = false [lib] diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs index 749a7760592b5..dfdbb4c95fcac 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs @@ -90,7 +90,7 @@ pub fn attr_error(args: TokenStream, item: TokenStream) -> TokenStream { #[proc_macro_derive(DeriveEmpty)] pub fn derive_empty(_item: TokenStream) -> TokenStream { - TokenStream::new() + TokenStream::default() } #[proc_macro_derive(DerivePanic)] diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs index cbf7a277bfae6..c49159df9916d 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs @@ -9,7 +9,7 @@ use libloading::Library; use object::Object; use paths::{Utf8Path, Utf8PathBuf}; -use crate::{proc_macros::ProcMacros, server_impl::TopSubtree, ProcMacroKind, ProcMacroSrvSpan}; +use crate::{ProcMacroKind, ProcMacroSrvSpan, proc_macros::ProcMacros, server_impl::TopSubtree}; /// Loads dynamic library in platform dependent manner. /// @@ -21,13 +21,32 @@ use crate::{proc_macros::ProcMacros, server_impl::TopSubtree, ProcMacroKind, Pro /// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) /// /// It seems that on Windows that behaviour is default, so we do nothing in that case. +/// +/// # Safety +/// +/// The caller is responsible for ensuring that the path is valid proc-macro library #[cfg(windows)] -fn load_library(file: &Utf8Path) -> Result { +unsafe fn load_library(file: &Utf8Path) -> Result { + // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library unsafe { Library::new(file) } } +/// Loads dynamic library in platform dependent manner. +/// +/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample) +/// and [here](https://github.com/rust-lang/rust/issues/60593). +/// +/// Usage of RTLD_DEEPBIND +/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1) +/// +/// It seems that on Windows that behaviour is default, so we do nothing in that case. +/// +/// # Safety +/// +/// The caller is responsible for ensuring that the path is valid proc-macro library #[cfg(unix)] -fn load_library(file: &Utf8Path) -> Result { +unsafe fn load_library(file: &Utf8Path) -> Result { // not defined by POSIX, different values on mips vs other targets #[cfg(target_env = "gnu")] use libc::RTLD_DEEPBIND; @@ -39,6 +58,7 @@ fn load_library(file: &Utf8Path) -> Result { #[cfg(not(target_env = "gnu"))] const RTLD_DEEPBIND: std::os::raw::c_int = 0x0; + // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) } } @@ -84,26 +104,32 @@ struct ProcMacroLibrary { impl ProcMacroLibrary { fn open(path: &Utf8Path) -> Result { let file = fs::File::open(path)?; + #[allow(clippy::undocumented_unsafe_blocks)] // FIXME let file = unsafe { memmap2::Mmap::map(&file) }?; let obj = object::File::parse(&*file) .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; let version_info = version::read_dylib_info(&obj)?; + if version_info.version_string != crate::RUSTC_VERSION_STRING { + return Err(LoadProcMacroDylibError::AbiMismatch(version_info.version_string)); + } + let symbol_name = find_registrar_symbol(&obj).map_err(invalid_data_err)?.ok_or_else(|| { invalid_data_err(format!("Cannot find registrar symbol in file {path}")) })?; - let lib = load_library(path).map_err(invalid_data_err)?; - let proc_macros = unsafe { - // SAFETY: We extend the lifetime here to avoid referential borrow problems - // We never reveal proc_macros to the outside and drop it before _lib - std::mem::transmute::<&ProcMacros, &'static ProcMacros>(ProcMacros::from_lib( - &lib, - symbol_name, - &version_info.version_string, - )?) - }; - Ok(ProcMacroLibrary { _lib: lib, proc_macros }) + // SAFETY: We have verified the validity of the dylib as a proc-macro library + let lib = unsafe { load_library(path) }.map_err(invalid_data_err)?; + // SAFETY: We have verified the validity of the dylib as a proc-macro library + // The 'static lifetime is a lie, it's actually the lifetime of the library but unavoidable + // due to self-referentiality + // But we make sure that we do not drop it before the symbol is dropped + let proc_macros = + unsafe { lib.get::<&'static &'static ProcMacros>(symbol_name.as_bytes()) }; + match proc_macros { + Ok(proc_macros) => Ok(ProcMacroLibrary { proc_macros: *proc_macros, _lib: lib }), + Err(e) => Err(e.into()), + } } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/version.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/version.rs index 4e28aaced9b0a..3b2551f08c480 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/version.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib/version.rs @@ -27,7 +27,7 @@ pub fn read_dylib_info(obj: &object::File<'_>) -> io::Result { let mut items = ver_str.split_whitespace(); let tag = items.next().ok_or_else(|| err!("version format error"))?; if tag != "rustc" { - return Err(err!("version format error (No rustc tag)")); + return Err(err!("no rustc tag")); } let version_part = items.next().ok_or_else(|| err!("no version string"))?; @@ -83,7 +83,7 @@ fn read_section<'a>(obj: &object::File<'a>, section_name: &str) -> io::Result<&' /// A proc macro crate binary's ".rustc" section has following byte layout: /// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes /// * ff060000 734e6150 is followed, it's the snappy format magic bytes, -/// means bytes from here(including this sequence) are compressed in +/// means bytes from here (including this sequence) are compressed in /// snappy compression format. Version info is inside here, so decompress /// this. /// @@ -110,7 +110,7 @@ pub fn read_version(obj: &object::File<'_>) -> io::Result { )); } let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]); - // Last supported version is: + // Last version with breaking changes is: // https://github.com/rust-lang/rust/commit/b94cfefc860715fb2adf72a6955423d384c69318 let (mut metadata_portion, bytes_before_version) = match version { 8 => { @@ -118,7 +118,7 @@ pub fn read_version(obj: &object::File<'_>) -> io::Result { let data_len = u32::from_be_bytes(len_bytes.try_into().unwrap()) as usize; (&dot_rustc[12..data_len + 12], 13) } - 9 => { + 9 | 10 => { let len_bytes = &dot_rustc[8..16]; let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize; (&dot_rustc[16..data_len + 12], 17) diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index f28821b4afc5c..223c5a54b7034 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -11,10 +11,11 @@ //! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… #![cfg(any(feature = "sysroot-abi", rust_analyzer))] +#![cfg_attr(not(feature = "sysroot-abi"), allow(unused_crate_dependencies))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] #![allow(unreachable_pub, internal_features, clippy::disallowed_types, clippy::print_stderr)] -#![deny(deprecated_safe)] +#![deny(deprecated_safe, clippy::undocumented_unsafe_blocks)] extern crate proc_macro; #[cfg(feature = "in-rust-tree")] @@ -30,7 +31,7 @@ mod proc_macros; mod server_impl; use std::{ - collections::{hash_map::Entry, HashMap}, + collections::{HashMap, hash_map::Entry}, env, ffi::OsString, fs, diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs index 58f5e80dc4ea6..18532706c4aaa 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs @@ -2,11 +2,7 @@ use proc_macro::bridge; -use libloading::Library; - -use crate::{ - dylib::LoadProcMacroDylibError, server_impl::TopSubtree, ProcMacroKind, ProcMacroSrvSpan, -}; +use crate::{ProcMacroKind, ProcMacroSrvSpan, server_impl::TopSubtree}; #[repr(transparent)] pub(crate) struct ProcMacros([bridge::client::ProcMacro]); @@ -18,28 +14,6 @@ impl From for crate::PanicMessage { } impl ProcMacros { - /// Load a new ABI. - /// - /// # Arguments - /// - /// *`lib` - The dynamic library containing the macro implementations - /// *`symbol_name` - The symbol name the macros can be found attributes - /// *`info` - RustCInfo about the compiler that was used to compile the - /// macro crate. This is the information we use to figure out - /// which ABI to return - pub(crate) fn from_lib<'l>( - lib: &'l Library, - symbol_name: String, - version_string: &str, - ) -> Result<&'l ProcMacros, LoadProcMacroDylibError> { - if version_string != crate::RUSTC_VERSION_STRING { - return Err(LoadProcMacroDylibError::AbiMismatch(version_string.to_owned())); - } - unsafe { lib.get::<&'l &'l ProcMacros>(symbol_name.as_bytes()) } - .map(|it| **it) - .map_err(Into::into) - } - pub(crate) fn expand( &self, macro_name: &str, @@ -52,7 +26,7 @@ impl ProcMacros { let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body); let parsed_attributes = attributes - .map_or_else(crate::server_impl::TokenStream::new, |attr| { + .map_or_else(crate::server_impl::TokenStream::default, |attr| { crate::server_impl::TokenStream::with_subtree(attr) }); diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs index 59293ee3f9659..47555a5db2f74 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs @@ -11,10 +11,10 @@ use std::{ use intern::Symbol; use proc_macro::bridge::{self, server}; -use span::{FileId, Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; +use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span}; use tt::{TextRange, TextSize}; -use crate::server_impl::{literal_kind_to_internal, token_stream::TokenStreamBuilder, TopSubtree}; +use crate::server_impl::{TopSubtree, literal_kind_to_internal, token_stream::TokenStreamBuilder}; mod tt { pub use tt::*; @@ -27,10 +27,6 @@ mod tt { type TokenStream = crate::server_impl::TokenStream; -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub struct SourceFile { - file_id: FileId, -} pub struct FreeFunctions; pub struct RaSpanServer { @@ -46,7 +42,6 @@ pub struct RaSpanServer { impl server::Types for RaSpanServer { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; - type SourceFile = SourceFile; type Span = Span; type Symbol = Symbol; } @@ -212,7 +207,7 @@ impl server::TokenStream for RaSpanServer { base: Option, trees: Vec>, ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); + let mut builder = TokenStreamBuilder::default(); if let Some(base) = base { builder.push(base); } @@ -227,7 +222,7 @@ impl server::TokenStream for RaSpanServer { base: Option, streams: Vec, ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); + let mut builder = TokenStreamBuilder::default(); if let Some(base) = base { builder.push(base); } @@ -245,25 +240,17 @@ impl server::TokenStream for RaSpanServer { } } -impl server::SourceFile for RaSpanServer { - fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool { - file1 == file2 - } - fn path(&mut self, _file: &Self::SourceFile) -> String { - // FIXME - String::new() - } - fn is_real(&mut self, _file: &Self::SourceFile) -> bool { - true - } -} - impl server::Span for RaSpanServer { fn debug(&mut self, span: Self::Span) -> String { format!("{:?}", span) } - fn source_file(&mut self, span: Self::Span) -> Self::SourceFile { - SourceFile { file_id: span.anchor.file_id.file_id() } + fn file(&mut self, _: Self::Span) -> String { + // FIXME + String::new() + } + fn local_file(&mut self, _: Self::Span) -> Option { + // FIXME + None } fn save_span(&mut self, _span: Self::Span) -> usize { // FIXME, quote is incompatible with third-party tools @@ -428,7 +415,7 @@ impl server::Server for RaSpanServer { #[cfg(test)] mod tests { - use span::{EditionedFileId, FileId, SyntaxContextId}; + use span::{EditionedFileId, FileId, SyntaxContext}; use super::*; @@ -440,7 +427,7 @@ mod tests { file_id: EditionedFileId::current_edition(FileId::from_raw(0)), ast_id: span::ErasedFileAstId::from_raw(0), }, - ctx: SyntaxContextId::root(span::Edition::CURRENT), + ctx: SyntaxContext::root(span::Edition::CURRENT), }; let s = TokenStream { token_trees: vec![ @@ -482,7 +469,7 @@ mod tests { file_id: EditionedFileId::current_edition(FileId::from_raw(0)), ast_id: span::ErasedFileAstId::from_raw(0), }, - ctx: SyntaxContextId::root(span::Edition::CURRENT), + ctx: SyntaxContext::root(span::Edition::CURRENT), }; let subtree_paren_a = vec![ tt::TokenTree::Subtree(tt::Subtree { diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs index 409cf3cc78134..c002be4be6ffd 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs @@ -5,7 +5,7 @@ use std::ops::{Bound, Range}; use intern::Symbol; use proc_macro::bridge::{self, server}; -use crate::server_impl::{literal_kind_to_internal, token_stream::TokenStreamBuilder, TopSubtree}; +use crate::server_impl::{TopSubtree, literal_kind_to_internal, token_stream::TokenStreamBuilder}; mod tt { pub use span::TokenId; @@ -24,8 +24,6 @@ type Literal = tt::Literal; type Span = tt::TokenId; type TokenStream = crate::server_impl::TokenStream; -#[derive(Clone)] -pub struct SourceFile; pub struct FreeFunctions; pub struct TokenIdServer { @@ -37,7 +35,6 @@ pub struct TokenIdServer { impl server::Types for TokenIdServer { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; - type SourceFile = SourceFile; type Span = Span; type Symbol = Symbol; } @@ -190,7 +187,7 @@ impl server::TokenStream for TokenIdServer { base: Option, trees: Vec>, ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); + let mut builder = TokenStreamBuilder::default(); if let Some(base) = base { builder.push(base); } @@ -205,7 +202,7 @@ impl server::TokenStream for TokenIdServer { base: Option, streams: Vec, ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); + let mut builder = TokenStreamBuilder::default(); if let Some(base) = base { builder.push(base); } @@ -223,24 +220,15 @@ impl server::TokenStream for TokenIdServer { } } -impl server::SourceFile for TokenIdServer { - fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { - true - } - fn path(&mut self, _file: &Self::SourceFile) -> String { - String::new() - } - fn is_real(&mut self, _file: &Self::SourceFile) -> bool { - true - } -} - impl server::Span for TokenIdServer { fn debug(&mut self, span: Self::Span) -> String { format!("{:?}", span.0) } - fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { - SourceFile {} + fn file(&mut self, _span: Self::Span) -> String { + String::new() + } + fn local_file(&mut self, _span: Self::Span) -> Option { + None } fn save_span(&mut self, _span: Self::Span) -> usize { 0 diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs index 645f7e7c59a32..4946a4f2a6218 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs @@ -2,13 +2,20 @@ use proc_macro::bridge; -use crate::server_impl::{delim_to_external, literal_kind_to_external, TopSubtree}; +use crate::server_impl::{TopSubtree, delim_to_external, literal_kind_to_external}; #[derive(Clone)] pub struct TokenStream { pub(super) token_trees: Vec>, } +// #[derive(Default)] would mean that `S: Default`. +impl Default for TokenStream { + fn default() -> Self { + Self { token_trees: Default::default() } + } +} + impl std::fmt::Debug for TokenStream { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("TokenStream") @@ -17,17 +24,7 @@ impl std::fmt::Debug for TokenStream { } } -impl Default for TokenStream { - fn default() -> Self { - Self { token_trees: vec![] } - } -} - impl TokenStream { - pub(crate) fn new() -> Self { - TokenStream::default() - } - pub(crate) fn with_subtree(subtree: TopSubtree) -> Self { let delimiter_kind = subtree.top_subtree().delimiter.kind; let mut token_trees = subtree.0; @@ -145,10 +142,6 @@ pub(super) mod token_stream_impls { } impl TokenStreamBuilder { - pub(super) fn new() -> TokenStreamBuilder { - TokenStreamBuilder { acc: TokenStream::new() } - } - pub(super) fn push(&mut self, stream: TokenStream) { self.acc.token_trees.extend(stream.token_trees) } @@ -157,3 +150,9 @@ impl TokenStreamBuilder { self.acc } } + +impl Default for TokenStreamBuilder { + fn default() -> Self { + Self { acc: TokenStream::default() } + } +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs index 15de88ea656d0..011221459657a 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs @@ -12,7 +12,7 @@ fn test_derive_empty() { "DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"], - expect!["SUBTREE $$ 42:2@0..100#2 42:2@0..100#2"], + expect!["SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024"], ); } @@ -29,12 +29,12 @@ fn test_derive_error() { LITERAL Str #[derive(DeriveError)] struct S ; 1 PUNCH ; [alone] 1"#]], expect![[r#" - SUBTREE $$ 42:2@0..100#2 42:2@0..100#2 - IDENT compile_error 42:2@0..100#2 - PUNCH ! [alone] 42:2@0..100#2 - SUBTREE () 42:2@0..100#2 42:2@0..100#2 - LITERAL Str #[derive(DeriveError)] struct S ; 42:2@0..100#2 - PUNCH ; [alone] 42:2@0..100#2"#]], + SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + IDENT compile_error 42:2@0..100#ROOT2024 + PUNCH ! [alone] 42:2@0..100#ROOT2024 + SUBTREE () 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + LITERAL Str #[derive(DeriveError)] struct S ; 42:2@0..100#ROOT2024 + PUNCH ; [alone] 42:2@0..100#ROOT2024"#]], ); } @@ -53,14 +53,14 @@ fn test_fn_like_macro_noop() { PUNCH , [alone] 1 SUBTREE [] 1 1"#]], expect![[r#" - SUBTREE $$ 42:2@0..100#2 42:2@0..100#2 - IDENT ident 42:2@0..5#2 - PUNCH , [alone] 42:2@5..6#2 - LITERAL Integer 0 42:2@7..8#2 - PUNCH , [alone] 42:2@8..9#2 - LITERAL Integer 1 42:2@10..11#2 - PUNCH , [alone] 42:2@11..12#2 - SUBTREE [] 42:2@13..14#2 42:2@14..15#2"#]], + SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + IDENT ident 42:2@0..5#ROOT2024 + PUNCH , [alone] 42:2@5..6#ROOT2024 + LITERAL Integer 0 42:2@7..8#ROOT2024 + PUNCH , [alone] 42:2@8..9#ROOT2024 + LITERAL Integer 1 42:2@10..11#ROOT2024 + PUNCH , [alone] 42:2@11..12#ROOT2024 + SUBTREE [] 42:2@13..14#ROOT2024 42:2@14..15#ROOT2024"#]], ); } @@ -75,10 +75,10 @@ fn test_fn_like_macro_clone_ident_subtree() { PUNCH , [alone] 1 SUBTREE [] 1 1"#]], expect![[r#" - SUBTREE $$ 42:2@0..100#2 42:2@0..100#2 - IDENT ident 42:2@0..5#2 - PUNCH , [alone] 42:2@5..6#2 - SUBTREE [] 42:2@7..8#2 42:2@7..8#2"#]], + SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + IDENT ident 42:2@0..5#ROOT2024 + PUNCH , [alone] 42:2@5..6#ROOT2024 + SUBTREE [] 42:2@7..8#ROOT2024 42:2@7..8#ROOT2024"#]], ); } @@ -91,12 +91,13 @@ fn test_fn_like_macro_clone_raw_ident() { SUBTREE $$ 1 1 IDENT r#async 1"#]], expect![[r#" - SUBTREE $$ 42:2@0..100#2 42:2@0..100#2 - IDENT r#async 42:2@0..7#2"#]], + SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + IDENT r#async 42:2@0..7#ROOT2024"#]], ); } #[test] +#[cfg(not(bootstrap))] fn test_fn_like_fn_like_span_join() { assert_expand( "fn_like_span_join", @@ -105,12 +106,13 @@ fn test_fn_like_fn_like_span_join() { SUBTREE $$ 1 1 IDENT r#joined 1"#]], expect![[r#" - SUBTREE $$ 42:2@0..100#2 42:2@0..100#2 - IDENT r#joined 42:2@0..11#2"#]], + SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + IDENT r#joined 42:2@0..11#ROOT2024"#]], ); } #[test] +#[cfg(not(bootstrap))] fn test_fn_like_fn_like_span_ops() { assert_expand( "fn_like_span_ops", @@ -121,10 +123,10 @@ fn test_fn_like_fn_like_span_ops() { IDENT resolved_at_def_site 1 IDENT start_span 1"#]], expect![[r#" - SUBTREE $$ 42:2@0..100#2 42:2@0..100#2 - IDENT set_def_site 41:1@0..150#2 - IDENT resolved_at_def_site 42:2@13..33#2 - IDENT start_span 42:2@34..34#2"#]], + SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + IDENT set_def_site 41:1@0..150#ROOT2024 + IDENT resolved_at_def_site 42:2@13..33#ROOT2024 + IDENT start_span 42:2@34..34#ROOT2024"#]], ); } @@ -143,14 +145,14 @@ fn test_fn_like_mk_literals() { LITERAL Integer 123i64 1 LITERAL Integer 123 1"#]], expect![[r#" - SUBTREE $$ 42:2@0..100#2 42:2@0..100#2 - LITERAL ByteStr byte_string 42:2@0..100#2 - LITERAL Char c 42:2@0..100#2 - LITERAL Str string 42:2@0..100#2 - LITERAL Float 3.14f64 42:2@0..100#2 - LITERAL Float 3.14 42:2@0..100#2 - LITERAL Integer 123i64 42:2@0..100#2 - LITERAL Integer 123 42:2@0..100#2"#]], + SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + LITERAL ByteStr byte_string 42:2@0..100#ROOT2024 + LITERAL Char c 42:2@0..100#ROOT2024 + LITERAL Str string 42:2@0..100#ROOT2024 + LITERAL Float 3.14f64 42:2@0..100#ROOT2024 + LITERAL Float 3.14 42:2@0..100#ROOT2024 + LITERAL Integer 123i64 42:2@0..100#ROOT2024 + LITERAL Integer 123 42:2@0..100#ROOT2024"#]], ); } @@ -164,9 +166,9 @@ fn test_fn_like_mk_idents() { IDENT standard 1 IDENT r#raw 1"#]], expect![[r#" - SUBTREE $$ 42:2@0..100#2 42:2@0..100#2 - IDENT standard 42:2@0..100#2 - IDENT r#raw 42:2@0..100#2"#]], + SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + IDENT standard 42:2@0..100#ROOT2024 + IDENT r#raw 42:2@0..100#ROOT2024"#]], ); } @@ -198,27 +200,27 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] 1 LITERAL CStr null 1"#]], expect![[r#" - SUBTREE $$ 42:2@0..100#2 42:2@0..100#2 - LITERAL Integer 1u16 42:2@0..4#2 - PUNCH , [alone] 42:2@4..5#2 - LITERAL Integer 2_u32 42:2@6..11#2 - PUNCH , [alone] 42:2@11..12#2 - PUNCH - [alone] 42:2@13..14#2 - LITERAL Integer 4i64 42:2@14..18#2 - PUNCH , [alone] 42:2@18..19#2 - LITERAL Float 3.14f32 42:2@20..27#2 - PUNCH , [alone] 42:2@27..28#2 - LITERAL Str hello bridge 42:2@29..43#2 - PUNCH , [alone] 42:2@43..44#2 - LITERAL Str suffixedsuffix 42:2@45..61#2 - PUNCH , [alone] 42:2@61..62#2 - LITERAL StrRaw(2) raw 42:2@63..73#2 - PUNCH , [alone] 42:2@73..74#2 - LITERAL Char a 42:2@75..78#2 - PUNCH , [alone] 42:2@78..79#2 - LITERAL Byte b 42:2@80..84#2 - PUNCH , [alone] 42:2@84..85#2 - LITERAL CStr null 42:2@86..93#2"#]], + SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + LITERAL Integer 1u16 42:2@0..4#ROOT2024 + PUNCH , [alone] 42:2@4..5#ROOT2024 + LITERAL Integer 2_u32 42:2@6..11#ROOT2024 + PUNCH , [alone] 42:2@11..12#ROOT2024 + PUNCH - [alone] 42:2@13..14#ROOT2024 + LITERAL Integer 4i64 42:2@14..18#ROOT2024 + PUNCH , [alone] 42:2@18..19#ROOT2024 + LITERAL Float 3.14f32 42:2@20..27#ROOT2024 + PUNCH , [alone] 42:2@27..28#ROOT2024 + LITERAL Str hello bridge 42:2@29..43#ROOT2024 + PUNCH , [alone] 42:2@43..44#ROOT2024 + LITERAL Str suffixedsuffix 42:2@45..61#ROOT2024 + PUNCH , [alone] 42:2@61..62#ROOT2024 + LITERAL StrRaw(2) raw 42:2@63..73#ROOT2024 + PUNCH , [alone] 42:2@73..74#ROOT2024 + LITERAL Char a 42:2@75..78#ROOT2024 + PUNCH , [alone] 42:2@78..79#ROOT2024 + LITERAL Byte b 42:2@80..84#ROOT2024 + PUNCH , [alone] 42:2@84..85#ROOT2024 + LITERAL CStr null 42:2@86..93#ROOT2024"#]], ); } @@ -239,12 +241,12 @@ fn test_attr_macro() { LITERAL Str #[attr_error(some arguments)] mod m {} 1 PUNCH ; [alone] 1"#]], expect![[r#" - SUBTREE $$ 42:2@0..100#2 42:2@0..100#2 - IDENT compile_error 42:2@0..100#2 - PUNCH ! [alone] 42:2@0..100#2 - SUBTREE () 42:2@0..100#2 42:2@0..100#2 - LITERAL Str #[attr_error(some arguments)] mod m {} 42:2@0..100#2 - PUNCH ; [alone] 42:2@0..100#2"#]], + SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + IDENT compile_error 42:2@0..100#ROOT2024 + PUNCH ! [alone] 42:2@0..100#ROOT2024 + SUBTREE () 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024 + LITERAL Str #[attr_error(some arguments)] mod m {} 42:2@0..100#ROOT2024 + PUNCH ; [alone] 42:2@0..100#ROOT2024"#]], ); } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 1b085520d5656..a476a70a74095 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -1,10 +1,10 @@ //! utils used in proc-macro tests use expect_test::Expect; -use span::{EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId, TokenId}; +use span::{EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContext, TokenId}; use tt::TextRange; -use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv}; +use crate::{EnvSnapshot, ProcMacroSrv, dylib, proc_macro_test_dylib_path}; fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream { crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree( @@ -17,7 +17,7 @@ fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStrea fn parse_string_spanned( anchor: SpanAnchor, - call_site: SyntaxContextId, + call_site: SyntaxContext, src: &str, ) -> crate::server_impl::TokenStream { crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree( @@ -81,7 +81,7 @@ fn assert_expand_impl( file_id: EditionedFileId::current_edition(FileId::from_raw(41)), ast_id: ErasedFileAstId::from_raw(1), }, - ctx: SyntaxContextId::root(span::Edition::CURRENT), + ctx: SyntaxContext::root(span::Edition::CURRENT), }; let call_site = Span { range: TextRange::new(0.into(), 100.into()), @@ -89,7 +89,7 @@ fn assert_expand_impl( file_id: EditionedFileId::current_edition(FileId::from_raw(42)), ast_id: ErasedFileAstId::from_raw(2), }, - ctx: SyntaxContextId::root(span::Edition::CURRENT), + ctx: SyntaxContext::root(span::Edition::CURRENT), }; let mixed_site = call_site; diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml index 9384fe265584f..1fb13832720e6 100644 --- a/src/tools/rust-analyzer/crates/profile/Cargo.toml +++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml @@ -13,12 +13,14 @@ rust-version.workspace = true [dependencies] cfg-if = "1.0.0" -libc.workspace = true -jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true } +jemalloc-ctl = { version = "0.5.4", package = "tikv-jemalloc-ctl", optional = true } [target.'cfg(all(target_os = "linux", not(target_env = "ohos")))'.dependencies] perf-event = "=0.4.7" +[target.'cfg(all(target_os = "linux", target_env = "gnu"))'.dependencies] +libc.workspace = true + [target.'cfg(windows)'.dependencies] windows-sys = { version = "0.59", features = [ "Win32_System_Threading", diff --git a/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs b/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs index f5b8eca060f96..1462259d627b3 100644 --- a/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs +++ b/src/tools/rust-analyzer/crates/profile/src/memory_usage.rs @@ -78,7 +78,8 @@ fn memusage_linux() -> MemoryUsage { let alloc = unsafe { libc::mallinfo() }.uordblks as isize; MemoryUsage { allocated: Bytes(alloc) } } else { - let mallinfo2: fn() -> libc::mallinfo2 = unsafe { std::mem::transmute(mallinfo2) }; + let mallinfo2: extern "C" fn() -> libc::mallinfo2 = + unsafe { std::mem::transmute(mallinfo2) }; let alloc = mallinfo2().uordblks as isize; MemoryUsage { allocated: Bytes(alloc) } } diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index 83def0e6b2a91..64ea75922fbe4 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -34,7 +34,7 @@ stdx.workspace = true toolchain.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.5.1" [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs index b0939229f93e2..e0c38ccf3331c 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs @@ -9,7 +9,7 @@ use std::{cell::RefCell, io, mem, process::Command}; use base_db::Env; -use cargo_metadata::{camino::Utf8Path, Message}; +use cargo_metadata::{Message, camino::Utf8Path}; use cfg::CfgAtom; use itertools::Itertools; use la_arena::ArenaMap; @@ -19,8 +19,8 @@ use serde::Deserialize as _; use toolchain::Tool; use crate::{ - utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, - Package, Sysroot, TargetKind, + CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, Package, Sysroot, + TargetKind, utf8_stdout, }; /// Output of the build script and proc-macro building steps for a workspace. @@ -163,7 +163,7 @@ impl WorkspaceBuildScripts { pub(crate) fn rustc_crates( rustc: &CargoWorkspace, current_dir: &AbsPath, - extra_env: &FxHashMap, + extra_env: &FxHashMap>, sysroot: &Sysroot, ) -> Self { let mut bs = WorkspaceBuildScripts::default(); @@ -172,16 +172,14 @@ impl WorkspaceBuildScripts { } let res = (|| { let target_libdir = (|| { - let mut cargo_config = sysroot.tool(Tool::Cargo, current_dir); - cargo_config.envs(extra_env); + let mut cargo_config = sysroot.tool(Tool::Cargo, current_dir, extra_env); cargo_config .args(["rustc", "-Z", "unstable-options", "--print", "target-libdir"]) .env("RUSTC_BOOTSTRAP", "1"); if let Ok(it) = utf8_stdout(&mut cargo_config) { return Ok(it); } - let mut cmd = sysroot.tool(Tool::Rustc, current_dir); - cmd.envs(extra_env); + let mut cmd = sysroot.tool(Tool::Rustc, current_dir, extra_env); cmd.args(["--print", "target-libdir"]); utf8_stdout(&mut cmd) })()?; @@ -343,7 +341,8 @@ impl WorkspaceBuildScripts { Message::CompilerArtifact(message) => { with_output_for(&message.package_id.repr, &mut |name, data| { progress(format!("building proc-macros: {name}")); - if message.target.kind.iter().any(|k| k == "proc-macro") { + if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro) + { // Skip rmeta file if let Some(filename) = message.filenames.iter().find(|file| is_dylib(file)) @@ -389,12 +388,12 @@ impl WorkspaceBuildScripts { ) -> io::Result { let mut cmd = match config.run_build_script_command.as_deref() { Some([program, args @ ..]) => { - let mut cmd = toolchain::command(program, current_dir); + let mut cmd = toolchain::command(program, current_dir, &config.extra_env); cmd.args(args); cmd } _ => { - let mut cmd = sysroot.tool(Tool::Cargo, current_dir); + let mut cmd = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env); cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); cmd.args(&config.extra_args); @@ -447,7 +446,6 @@ impl WorkspaceBuildScripts { } }; - cmd.envs(&config.extra_env); if config.wrap_rustc_in_build_scripts { // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use // that to compile only proc macros and build scripts during the initial diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 014028a0b63e2..6e730b1aea266 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -35,6 +35,8 @@ pub struct CargoWorkspace { target_directory: AbsPathBuf, manifest_path: ManifestPath, is_virtual_workspace: bool, + /// Whether this workspace represents the sysroot workspace. + is_sysroot: bool, /// Environment variables set in the `.cargo/config` file. config_env: Env, } @@ -102,11 +104,14 @@ pub struct CargoConfig { /// Extra args to pass to the cargo command. pub extra_args: Vec, /// Extra env vars to set when invoking the cargo command - pub extra_env: FxHashMap, + pub extra_env: FxHashMap>, pub invocation_strategy: InvocationStrategy, /// Optional path to use instead of `target` when building pub target_dir: Option, + /// Gate `#[test]` behind `#[cfg(test)]` pub set_test: bool, + /// Load the project without any dependencies + pub no_deps: bool, } pub type Package = Idx; @@ -224,21 +229,26 @@ pub enum TargetKind { Example, Test, Bench, + /// Cargo calls this kind `custom-build` BuildScript, Other, } impl TargetKind { - fn new(kinds: &[String]) -> TargetKind { + fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind { for kind in kinds { - return match kind.as_str() { - "bin" => TargetKind::Bin, - "test" => TargetKind::Test, - "bench" => TargetKind::Bench, - "example" => TargetKind::Example, - "custom-build" => TargetKind::BuildScript, - "proc-macro" => TargetKind::Lib { is_proc_macro: true }, - _ if kind.contains("lib") => TargetKind::Lib { is_proc_macro: false }, + return match kind { + cargo_metadata::TargetKind::Bin => TargetKind::Bin, + cargo_metadata::TargetKind::Test => TargetKind::Test, + cargo_metadata::TargetKind::Bench => TargetKind::Bench, + cargo_metadata::TargetKind::Example => TargetKind::Example, + cargo_metadata::TargetKind::CustomBuild => TargetKind::BuildScript, + cargo_metadata::TargetKind::ProcMacro => TargetKind::Lib { is_proc_macro: true }, + cargo_metadata::TargetKind::Lib + | cargo_metadata::TargetKind::DyLib + | cargo_metadata::TargetKind::CDyLib + | cargo_metadata::TargetKind::StaticLib + | cargo_metadata::TargetKind::RLib => TargetKind::Lib { is_proc_macro: false }, _ => continue, }; } @@ -252,6 +262,22 @@ impl TargetKind { pub fn is_proc_macro(self) -> bool { matches!(self, TargetKind::Lib { is_proc_macro: true }) } + + /// If this is a valid cargo target, returns the name cargo uses in command line arguments + /// and output, otherwise None. + /// https://docs.rs/cargo_metadata/latest/cargo_metadata/enum.TargetKind.html + pub fn as_cargo_target(self) -> Option<&'static str> { + match self { + TargetKind::Bin => Some("bin"), + TargetKind::Lib { is_proc_macro: true } => Some("proc-macro"), + TargetKind::Lib { is_proc_macro: false } => Some("lib"), + TargetKind::Example => Some("example"), + TargetKind::Test => Some("test"), + TargetKind::Bench => Some("bench"), + TargetKind::BuildScript => Some("custom-build"), + TargetKind::Other => None, + } + } } #[derive(Default, Clone, Debug, PartialEq, Eq)] @@ -263,7 +289,7 @@ pub struct CargoMetadataConfig { /// Extra args to pass to the cargo command. pub extra_args: Vec, /// Extra env vars to set when invoking the cargo command - pub extra_env: FxHashMap, + pub extra_env: FxHashMap>, } // Deserialize helper for the cargo metadata @@ -285,6 +311,7 @@ impl CargoWorkspace { current_dir: &AbsPath, config: &CargoMetadataConfig, sysroot: &Sysroot, + no_deps: bool, locked: bool, progress: &dyn Fn(String), ) -> anyhow::Result<(cargo_metadata::Metadata, Option)> { @@ -293,8 +320,8 @@ impl CargoWorkspace { current_dir, config, sysroot, + no_deps, locked, - false, progress, ); if let Ok((_, Some(ref e))) = res { @@ -312,15 +339,14 @@ impl CargoWorkspace { current_dir: &AbsPath, config: &CargoMetadataConfig, sysroot: &Sysroot, - locked: bool, no_deps: bool, + locked: bool, progress: &dyn Fn(String), ) -> anyhow::Result<(cargo_metadata::Metadata, Option)> { - let cargo = sysroot.tool(Tool::Cargo, current_dir); + let cargo = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env); let mut meta = MetadataCommand::new(); meta.cargo_path(cargo.get_program()); cargo.get_envs().for_each(|(var, val)| _ = meta.env(var, val.unwrap_or_default())); - config.extra_env.iter().for_each(|(var, val)| _ = meta.env(var, val)); meta.manifest_path(cargo_toml.to_path_buf()); match &config.features { CargoFeatures::All => { @@ -418,6 +444,7 @@ impl CargoWorkspace { mut meta: cargo_metadata::Metadata, ws_manifest_path: ManifestPath, cargo_config_env: Env, + is_sysroot: bool, ) -> CargoWorkspace { let mut pkg_by_id = FxHashMap::default(); let mut packages = Arena::default(); @@ -456,7 +483,7 @@ impl CargoWorkspace { cargo_metadata::Edition::E2015 => Edition::Edition2015, cargo_metadata::Edition::E2018 => Edition::Edition2018, cargo_metadata::Edition::E2021 => Edition::Edition2021, - cargo_metadata::Edition::_E2024 => Edition::Edition2024, + cargo_metadata::Edition::E2024 => Edition::Edition2024, _ => { tracing::error!("Unsupported edition `{:?}`", edition); Edition::CURRENT @@ -539,6 +566,7 @@ impl CargoWorkspace { target_directory, manifest_path: ws_manifest_path, is_virtual_workspace, + is_sysroot, config_env: cargo_config_env, } } @@ -596,7 +624,7 @@ impl CargoWorkspace { // this pkg is inside this cargo workspace, fallback to workspace root if found { return Some(vec![ - ManifestPath::try_from(self.workspace_root().join("Cargo.toml")).ok()? + ManifestPath::try_from(self.workspace_root().join("Cargo.toml")).ok()?, ]); } @@ -632,4 +660,8 @@ impl CargoWorkspace { pub fn env(&self) -> &Env { &self.config_env } + + pub fn is_sysroot(&self) -> bool { + self.is_sysroot + } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/env.rs b/src/tools/rust-analyzer/crates/project-model/src/env.rs index 37fffba295590..e7293b0b2ef6e 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/env.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/env.rs @@ -4,7 +4,7 @@ use paths::Utf8Path; use rustc_hash::FxHashMap; use toolchain::Tool; -use crate::{utf8_stdout, ManifestPath, PackageData, Sysroot, TargetKind}; +use crate::{ManifestPath, PackageData, Sysroot, TargetKind, utf8_stdout}; /// Recreates the compile-time environment variables that Cargo sets. /// @@ -25,7 +25,7 @@ pub(crate) fn inject_cargo_package_env(env: &mut Env, package: &PackageData) { env.set("CARGO_PKG_VERSION_PATCH", package.version.patch.to_string()); env.set("CARGO_PKG_VERSION_PRE", package.version.pre.to_string()); - env.set("CARGO_PKG_AUTHORS", package.authors.join(":").clone()); + env.set("CARGO_PKG_AUTHORS", package.authors.join(":")); env.set("CARGO_PKG_NAME", package.name.clone()); env.set("CARGO_PKG_DESCRIPTION", package.description.as_deref().unwrap_or_default()); @@ -62,11 +62,10 @@ pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: Targe pub(crate) fn cargo_config_env( manifest: &ManifestPath, - extra_env: &FxHashMap, + extra_env: &FxHashMap>, sysroot: &Sysroot, ) -> Env { - let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent()); - cargo_config.envs(extra_env); + let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env); cargo_config .args(["-Z", "unstable-options", "config", "get", "env"]) .env("RUSTC_BOOTSTRAP", "1"); diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index 21a993c5a5ed1..436af64cf1326 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -48,12 +48,12 @@ mod tests; use std::{ fmt, - fs::{self, read_dir, ReadDir}, + fs::{self, ReadDir, read_dir}, io, process::Command, }; -use anyhow::{bail, format_err, Context}; +use anyhow::{Context, bail, format_err}; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashSet; @@ -102,7 +102,9 @@ impl ProjectManifest { if path.extension().unwrap_or_default() == "rs" { return Ok(ProjectManifest::CargoScript(path)); } - bail!("project root must point to a Cargo.toml, rust-project.json or

    (self_arg_ty: Ty<'tcx>, is_self_ty: P) -> ExplicitSelf<'tcx> - where - P: Fn(Ty<'tcx>) -> bool, - { - use self::ExplicitSelf::*; - - match *self_arg_ty.kind() { - _ if is_self_ty(self_arg_ty) => ByValue, - ty::Ref(region, ty, mutbl) if is_self_ty(ty) => ByReference(region, mutbl), - ty::RawPtr(ty, mutbl) if is_self_ty(ty) => ByRawPointer(mutbl), - _ if self_arg_ty.boxed_ty().is_some_and(is_self_ty) => ByBox, - _ => Other, - } - } -} - /// Returns a list of types such that the given type needs drop if and only if /// *any* of the returned types need drop. Returns `Err(AlwaysRequiresDrop)` if /// this type always needs drop. @@ -1690,6 +1596,42 @@ pub fn fold_list<'tcx, F, L, T>( list: L, folder: &mut F, intern: impl FnOnce(TyCtxt<'tcx>, &[T]) -> L, +) -> L +where + F: TypeFolder>, + L: AsRef<[T]>, + T: TypeFoldable> + PartialEq + Copy, +{ + let slice = list.as_ref(); + let mut iter = slice.iter().copied(); + // Look for the first element that changed + match iter.by_ref().enumerate().find_map(|(i, t)| { + let new_t = t.fold_with(folder); + if new_t != t { Some((i, new_t)) } else { None } + }) { + Some((i, new_t)) => { + // An element changed, prepare to intern the resulting list + let mut new_list = SmallVec::<[_; 8]>::with_capacity(slice.len()); + new_list.extend_from_slice(&slice[..i]); + new_list.push(new_t); + for t in iter { + new_list.push(t.fold_with(folder)) + } + intern(folder.cx(), &new_list) + } + None => list, + } +} + +/// Does the equivalent of +/// ```ignore (illustrative) +/// let v = self.iter().map(|p| p.try_fold_with(folder)).collect::>(); +/// folder.tcx().intern_*(&v) +/// ``` +pub fn try_fold_list<'tcx, F, L, T>( + list: L, + folder: &mut F, + intern: impl FnOnce(TyCtxt<'tcx>, &[T]) -> L, ) -> Result where F: FallibleTypeFolder>, @@ -1762,10 +1704,7 @@ pub fn is_doc_notable_trait(tcx: TyCtxt<'_>, def_id: DefId) -> bool { /// the compiler to make some assumptions about its shape; if the user doesn't use a feature gate, they may /// cause an ICE that we otherwise may want to prevent. pub fn intrinsic_raw(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option { - if tcx.features().intrinsics() - && (matches!(tcx.fn_sig(def_id).skip_binder().abi(), ExternAbi::RustIntrinsic) - || tcx.has_attr(def_id, sym::rustc_intrinsic)) - { + if tcx.features().intrinsics() && tcx.has_attr(def_id, sym::rustc_intrinsic) { let must_be_overridden = match tcx.hir_node_by_def_id(def_id) { hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn { has_body, .. }, .. }) => { !has_body diff --git a/compiler/rustc_middle/src/ty/visit.rs b/compiler/rustc_middle/src/ty/visit.rs index e3b7a258c395c..f804217459915 100644 --- a/compiler/rustc_middle/src/ty/visit.rs +++ b/compiler/rustc_middle/src/ty/visit.rs @@ -66,7 +66,7 @@ impl<'tcx> TyCtxt<'tcx> { { type Result = ControlFlow<()>; - fn visit_binder>>( + fn visit_binder>>( &mut self, t: &Binder<'tcx, T>, ) -> Self::Result { @@ -77,7 +77,7 @@ impl<'tcx> TyCtxt<'tcx> { } fn visit_region(&mut self, r: ty::Region<'tcx>) -> Self::Result { - match *r { + match r.kind() { ty::ReBound(debruijn, _) if debruijn < self.outer_index => { ControlFlow::Continue(()) } @@ -139,7 +139,7 @@ impl<'tcx> TyCtxt<'tcx> { { let mut collector = LateBoundRegionsCollector::new(just_constrained); let value = value.skip_binder(); - let value = if just_constrained { self.expand_weak_alias_tys(value) } else { value }; + let value = if just_constrained { self.expand_free_alias_tys(value) } else { value }; value.visit_with(&mut collector); collector.regions } @@ -168,7 +168,7 @@ impl LateBoundRegionsCollector { } impl<'tcx> TypeVisitor> for LateBoundRegionsCollector { - fn visit_binder>>(&mut self, t: &Binder<'tcx, T>) { + fn visit_binder>>(&mut self, t: &Binder<'tcx, T>) { self.current_index.shift_in(1); t.super_visit_with(self); self.current_index.shift_out(1); @@ -182,8 +182,8 @@ impl<'tcx> TypeVisitor> for LateBoundRegionsCollector { ty::Alias(ty::Projection | ty::Inherent | ty::Opaque, _) => { return; } - // All weak alias types should've been expanded beforehand. - ty::Alias(ty::Weak, _) => bug!("unexpected weak alias type"), + // All free alias types should've been expanded beforehand. + ty::Alias(ty::Free, _) => bug!("unexpected free alias type"), _ => {} } } @@ -205,7 +205,7 @@ impl<'tcx> TypeVisitor> for LateBoundRegionsCollector { } fn visit_region(&mut self, r: ty::Region<'tcx>) { - if let ty::ReBound(debruijn, br) = *r { + if let ty::ReBound(debruijn, br) = r.kind() { if debruijn == self.current_index { self.regions.insert(br.kind); } @@ -231,9 +231,7 @@ impl MaxUniverse { impl<'tcx> TypeVisitor> for MaxUniverse { fn visit_ty(&mut self, t: Ty<'tcx>) { if let ty::Placeholder(placeholder) = t.kind() { - self.max_universe = ty::UniverseIndex::from_u32( - self.max_universe.as_u32().max(placeholder.universe.as_u32()), - ); + self.max_universe = self.max_universe.max(placeholder.universe); } t.super_visit_with(self) @@ -241,19 +239,15 @@ impl<'tcx> TypeVisitor> for MaxUniverse { fn visit_const(&mut self, c: ty::consts::Const<'tcx>) { if let ty::ConstKind::Placeholder(placeholder) = c.kind() { - self.max_universe = ty::UniverseIndex::from_u32( - self.max_universe.as_u32().max(placeholder.universe.as_u32()), - ); + self.max_universe = self.max_universe.max(placeholder.universe); } c.super_visit_with(self) } fn visit_region(&mut self, r: ty::Region<'tcx>) { - if let ty::RePlaceholder(placeholder) = *r { - self.max_universe = ty::UniverseIndex::from_u32( - self.max_universe.as_u32().max(placeholder.universe.as_u32()), - ); + if let ty::RePlaceholder(placeholder) = r.kind() { + self.max_universe = self.max_universe.max(placeholder.universe); } } } diff --git a/compiler/rustc_middle/src/ty/walk.rs b/compiler/rustc_middle/src/ty/walk.rs deleted file mode 100644 index a23316ae6fc88..0000000000000 --- a/compiler/rustc_middle/src/ty/walk.rs +++ /dev/null @@ -1,217 +0,0 @@ -//! An iterator over the type substructure. -//! WARNING: this does not keep track of the region depth. - -use rustc_data_structures::sso::SsoHashSet; -use smallvec::{SmallVec, smallvec}; -use tracing::debug; - -use crate::ty::{self, GenericArg, GenericArgKind, Ty}; - -// The TypeWalker's stack is hot enough that it's worth going to some effort to -// avoid heap allocations. -type TypeWalkerStack<'tcx> = SmallVec<[GenericArg<'tcx>; 8]>; - -pub struct TypeWalker<'tcx> { - stack: TypeWalkerStack<'tcx>, - last_subtree: usize, - pub visited: SsoHashSet>, -} - -/// An iterator for walking the type tree. -/// -/// It's very easy to produce a deeply -/// nested type tree with a lot of -/// identical subtrees. In order to work efficiently -/// in this situation walker only visits each type once. -/// It maintains a set of visited types and -/// skips any types that are already there. -impl<'tcx> TypeWalker<'tcx> { - pub fn new(root: GenericArg<'tcx>) -> Self { - Self { stack: smallvec![root], last_subtree: 1, visited: SsoHashSet::new() } - } - - /// Skips the subtree corresponding to the last type - /// returned by `next()`. - /// - /// Example: Imagine you are walking `Foo, usize>`. - /// - /// ```ignore (illustrative) - /// let mut iter: TypeWalker = ...; - /// iter.next(); // yields Foo - /// iter.next(); // yields Bar - /// iter.skip_current_subtree(); // skips i32 - /// iter.next(); // yields usize - /// ``` - pub fn skip_current_subtree(&mut self) { - self.stack.truncate(self.last_subtree); - } -} - -impl<'tcx> Iterator for TypeWalker<'tcx> { - type Item = GenericArg<'tcx>; - - fn next(&mut self) -> Option> { - debug!("next(): stack={:?}", self.stack); - loop { - let next = self.stack.pop()?; - self.last_subtree = self.stack.len(); - if self.visited.insert(next) { - push_inner(&mut self.stack, next); - debug!("next: stack={:?}", self.stack); - return Some(next); - } - } - } -} - -impl<'tcx> GenericArg<'tcx> { - /// Iterator that walks `self` and any types reachable from - /// `self`, in depth-first order. Note that just walks the types - /// that appear in `self`, it does not descend into the fields of - /// structs or variants. For example: - /// - /// ```text - /// isize => { isize } - /// Foo> => { Foo>, Bar, isize } - /// [isize] => { [isize], isize } - /// ``` - pub fn walk(self) -> TypeWalker<'tcx> { - TypeWalker::new(self) - } -} - -impl<'tcx> Ty<'tcx> { - /// Iterator that walks `self` and any types reachable from - /// `self`, in depth-first order. Note that just walks the types - /// that appear in `self`, it does not descend into the fields of - /// structs or variants. For example: - /// - /// ```text - /// isize => { isize } - /// Foo> => { Foo>, Bar, isize } - /// [isize] => { [isize], isize } - /// ``` - pub fn walk(self) -> TypeWalker<'tcx> { - TypeWalker::new(self.into()) - } -} - -impl<'tcx> ty::Const<'tcx> { - /// Iterator that walks `self` and any types reachable from - /// `self`, in depth-first order. Note that just walks the types - /// that appear in `self`, it does not descend into the fields of - /// structs or variants. For example: - /// - /// ```text - /// isize => { isize } - /// Foo> => { Foo>, Bar, isize } - /// [isize] => { [isize], isize } - /// ``` - pub fn walk(self) -> TypeWalker<'tcx> { - TypeWalker::new(self.into()) - } -} - -/// We push `GenericArg`s on the stack in reverse order so as to -/// maintain a pre-order traversal. As of the time of this -/// writing, the fact that the traversal is pre-order is not -/// known to be significant to any code, but it seems like the -/// natural order one would expect (basically, the order of the -/// types as they are written). -fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) { - match parent.unpack() { - GenericArgKind::Type(parent_ty) => match *parent_ty.kind() { - ty::Bool - | ty::Char - | ty::Int(_) - | ty::Uint(_) - | ty::Float(_) - | ty::Str - | ty::Infer(_) - | ty::Param(_) - | ty::Never - | ty::Error(_) - | ty::Placeholder(..) - | ty::Bound(..) - | ty::Foreign(..) => {} - - ty::Pat(ty, pat) => { - match *pat { - ty::PatternKind::Range { start, end } => { - stack.push(end.into()); - stack.push(start.into()); - } - } - stack.push(ty.into()); - } - ty::Array(ty, len) => { - stack.push(len.into()); - stack.push(ty.into()); - } - ty::Slice(ty) => { - stack.push(ty.into()); - } - ty::RawPtr(ty, _) => { - stack.push(ty.into()); - } - ty::Ref(lt, ty, _) => { - stack.push(ty.into()); - stack.push(lt.into()); - } - ty::Alias(_, data) => { - stack.extend(data.args.iter().rev()); - } - ty::Dynamic(obj, lt, _) => { - stack.push(lt.into()); - stack.extend(obj.iter().rev().flat_map(|predicate| { - let (args, opt_ty) = match predicate.skip_binder() { - ty::ExistentialPredicate::Trait(tr) => (tr.args, None), - ty::ExistentialPredicate::Projection(p) => (p.args, Some(p.term)), - ty::ExistentialPredicate::AutoTrait(_) => - // Empty iterator - { - (ty::GenericArgs::empty(), None) - } - }; - - args.iter().rev().chain(opt_ty.map(|term| match term.unpack() { - ty::TermKind::Ty(ty) => ty.into(), - ty::TermKind::Const(ct) => ct.into(), - })) - })); - } - ty::Adt(_, args) - | ty::Closure(_, args) - | ty::CoroutineClosure(_, args) - | ty::Coroutine(_, args) - | ty::CoroutineWitness(_, args) - | ty::FnDef(_, args) => { - stack.extend(args.iter().rev()); - } - ty::Tuple(ts) => stack.extend(ts.iter().rev().map(GenericArg::from)), - ty::FnPtr(sig_tys, _hdr) => { - stack.extend( - sig_tys.skip_binder().inputs_and_output.iter().rev().map(|ty| ty.into()), - ); - } - ty::UnsafeBinder(bound_ty) => { - stack.push(bound_ty.skip_binder().into()); - } - }, - GenericArgKind::Lifetime(_) => {} - GenericArgKind::Const(parent_ct) => match parent_ct.kind() { - ty::ConstKind::Infer(_) - | ty::ConstKind::Param(_) - | ty::ConstKind::Placeholder(_) - | ty::ConstKind::Bound(..) - | ty::ConstKind::Error(_) => {} - - ty::ConstKind::Value(cv) => stack.push(cv.ty.into()), - - ty::ConstKind::Expr(expr) => stack.extend(expr.args().iter().rev()), - ty::ConstKind::Unevaluated(ct) => { - stack.extend(ct.args.iter().rev()); - } - }, - } -} diff --git a/compiler/rustc_middle/src/values.rs b/compiler/rustc_middle/src/values.rs index 39fcc686c555a..4d70a70873267 100644 --- a/compiler/rustc_middle/src/values.rs +++ b/compiler/rustc_middle/src/values.rs @@ -138,18 +138,26 @@ impl<'tcx> Value> for &[ty::Variance] { cycle_error: &CycleError, _guar: ErrorGuaranteed, ) -> Self { - if let Some(frame) = cycle_error.cycle.get(0) - && frame.query.dep_kind == dep_kinds::variances_of - && let Some(def_id) = frame.query.def_id - { - let n = tcx.generics_of(def_id).own_params.len(); - vec![ty::Bivariant; n].leak() - } else { - span_bug!( - cycle_error.usage.as_ref().unwrap().0, - "only `variances_of` returns `&[ty::Variance]`" - ); - } + search_for_cycle_permutation( + &cycle_error.cycle, + |cycle| { + if let Some(frame) = cycle.get(0) + && frame.query.dep_kind == dep_kinds::variances_of + && let Some(def_id) = frame.query.def_id + { + let n = tcx.generics_of(def_id).own_params.len(); + ControlFlow::Break(vec![ty::Bivariant; n].leak()) + } else { + ControlFlow::Continue(()) + } + }, + || { + span_bug!( + cycle_error.usage.as_ref().unwrap().0, + "only `variances_of` returns `&[ty::Variance]`" + ) + }, + ) } } diff --git a/compiler/rustc_mir_build/src/builder/custom/mod.rs b/compiler/rustc_mir_build/src/builder/custom/mod.rs index bfc16816e2e5e..902a6e7f115be 100644 --- a/compiler/rustc_mir_build/src/builder/custom/mod.rs +++ b/compiler/rustc_mir_build/src/builder/custom/mod.rs @@ -103,8 +103,9 @@ fn parse_attribute(attr: &Attribute) -> MirPhase { let mut dialect: Option = None; let mut phase: Option = None; + // Not handling errors properly for this internal attribute; will just abort on errors. for nested in meta_items { - let name = nested.name_or_empty(); + let name = nested.name().unwrap(); let value = nested.value_str().unwrap().as_str().to_string(); match name.as_str() { "dialect" => { diff --git a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs index 19669021eefb4..494ee33fd8b9f 100644 --- a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs +++ b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs @@ -69,6 +69,8 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { target: self.parse_return_to(args[1])?, unwind: self.parse_unwind_action(args[2])?, replace: false, + drop: None, + async_fut: None, }) }, @call(mir_call, args) => { @@ -145,7 +147,7 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { let arm = &self.thir[*arm]; let value = match arm.pattern.kind { PatKind::Constant { value } => value, - PatKind::ExpandedConstant { ref subpattern, def_id: _, is_inline: false } + PatKind::ExpandedConstant { ref subpattern, def_id: _ } if let PatKind::Constant { value } = subpattern.kind => { value diff --git a/compiler/rustc_mir_build/src/builder/expr/as_place.rs b/compiler/rustc_mir_build/src/builder/expr/as_place.rs index 50ca924baf9c8..fbe530811567f 100644 --- a/compiler/rustc_mir_build/src/builder/expr/as_place.rs +++ b/compiler/rustc_mir_build/src/builder/expr/as_place.rs @@ -159,7 +159,7 @@ fn find_capture_matching_projections<'a, 'tcx>( ) -> Option<(usize, &'a Capture<'tcx>)> { let hir_projections = convert_to_hir_projections_and_truncate_for_capture(projections); - upvars.get_by_key_enumerated(var_hir_id.0).find(|(_, capture)| { + upvars.get_by_key_enumerated(var_hir_id.0.local_id).find(|(_, capture)| { let possible_ancestor_proj_kinds: Vec<_> = capture.captured_place.place.projections.iter().map(|proj| proj.kind).collect(); is_ancestor_or_same_capture(&possible_ancestor_proj_kinds, &hir_projections) @@ -258,7 +258,7 @@ impl<'tcx> PlaceBuilder<'tcx> { self.projection ), PlaceBase::Upvar { var_hir_id, closure_def_id: _ } => span_bug!( - cx.tcx.hir().span(var_hir_id.0), + cx.tcx.hir_span(var_hir_id.0), "could not resolve upvar: {var_hir_id:?} + {:?}", self.projection ), diff --git a/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs index 97d34b85f50da..5a97b08db28d6 100644 --- a/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs +++ b/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs @@ -416,7 +416,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { if let Some(mir_place) = place_builder.try_to_place(this) { this.cfg.push_fake_read( block, - this.source_info(this.tcx.hir().span(*hir_id)), + this.source_info(this.tcx.hir_span(*hir_id)), *cause, mir_place, ); @@ -762,6 +762,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { target: success, unwind: UnwindAction::Continue, replace: false, + drop: None, + async_fut: None, }, ); this.diverge_from(block); diff --git a/compiler/rustc_mir_build/src/builder/expr/into.rs b/compiler/rustc_mir_build/src/builder/expr/into.rs index 333e69475c508..a9a07997410c0 100644 --- a/compiler/rustc_mir_build/src/builder/expr/into.rs +++ b/compiler/rustc_mir_build/src/builder/expr/into.rs @@ -328,7 +328,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { destination, target: Some(success), unwind: UnwindAction::Unreachable, - call_source: CallSource::Misc, + call_source: CallSource::Use, fn_span: expr_span, }, ); diff --git a/compiler/rustc_mir_build/src/builder/expr/stmt.rs b/compiler/rustc_mir_build/src/builder/expr/stmt.rs index 7f8a0a34c3123..2dff26f02f3a1 100644 --- a/compiler/rustc_mir_build/src/builder/expr/stmt.rs +++ b/compiler/rustc_mir_build/src/builder/expr/stmt.rs @@ -78,8 +78,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // because AssignOp is only legal for Copy types // (overloaded ops should be desugared into a call). let result = unpack!( - block = - this.build_binary_op(block, op, expr_span, lhs_ty, Operand::Copy(lhs), rhs) + block = this.build_binary_op( + block, + op.into(), + expr_span, + lhs_ty, + Operand::Copy(lhs), + rhs + ) ); this.cfg.push_assign(block, source_info, lhs, result); diff --git a/compiler/rustc_mir_build/src/builder/matches/match_pair.rs b/compiler/rustc_mir_build/src/builder/matches/match_pair.rs index 29d400a957b47..3a7854a5e118d 100644 --- a/compiler/rustc_mir_build/src/builder/matches/match_pair.rs +++ b/compiler/rustc_mir_build/src/builder/matches/match_pair.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use rustc_hir::ByRef; use rustc_middle::mir::*; use rustc_middle::thir::*; use rustc_middle::ty::{self, Ty, TypeVisitableExt}; @@ -101,24 +102,27 @@ impl<'tcx> MatchPairTree<'tcx> { place_builder = resolved; } - // Only add the OpaqueCast projection if the given place is an opaque type and the - // expected type from the pattern is not. - let may_need_cast = match place_builder.base() { - PlaceBase::Local(local) => { - let ty = - Place::ty_from(local, place_builder.projection(), &cx.local_decls, cx.tcx).ty; - ty != pattern.ty && ty.has_opaque_types() + if !cx.tcx.next_trait_solver_globally() { + // Only add the OpaqueCast projection if the given place is an opaque type and the + // expected type from the pattern is not. + let may_need_cast = match place_builder.base() { + PlaceBase::Local(local) => { + let ty = + Place::ty_from(local, place_builder.projection(), &cx.local_decls, cx.tcx) + .ty; + ty != pattern.ty && ty.has_opaque_types() + } + _ => true, + }; + if may_need_cast { + place_builder = place_builder.project(ProjectionElem::OpaqueCast(pattern.ty)); } - _ => true, - }; - if may_need_cast { - place_builder = place_builder.project(ProjectionElem::OpaqueCast(pattern.ty)); } let place = place_builder.try_to_place(cx); let mut subpairs = Vec::new(); let test_case = match pattern.kind { - PatKind::Wild | PatKind::Error(_) => None, + PatKind::Missing | PatKind::Wild | PatKind::Error(_) => None, PatKind::Or { ref pats } => Some(TestCase::Or { pats: pats.iter().map(|pat| FlatPat::new(place_builder.clone(), pat, cx)).collect(), @@ -201,37 +205,8 @@ impl<'tcx> MatchPairTree<'tcx> { None } - PatKind::ExpandedConstant { subpattern: ref pattern, def_id: _, is_inline: false } => { - MatchPairTree::for_pattern(place_builder, pattern, cx, &mut subpairs, extra_data); - None - } - PatKind::ExpandedConstant { subpattern: ref pattern, def_id, is_inline: true } => { + PatKind::ExpandedConstant { subpattern: ref pattern, .. } => { MatchPairTree::for_pattern(place_builder, pattern, cx, &mut subpairs, extra_data); - - // Apply a type ascription for the inline constant to the value at `match_pair.place` - if let Some(source) = place { - let span = pattern.span; - let parent_id = cx.tcx.typeck_root_def_id(cx.def_id.to_def_id()); - let args = ty::InlineConstArgs::new( - cx.tcx, - ty::InlineConstArgsParts { - parent_args: ty::GenericArgs::identity_for_item(cx.tcx, parent_id), - ty: cx.infcx.next_ty_var(span), - }, - ) - .args; - let user_ty = cx.infcx.canonicalize_user_type_annotation(ty::UserType::new( - ty::UserTypeKind::TypeOf(def_id, ty::UserArgs { args, user_self_ty: None }), - )); - let annotation = ty::CanonicalUserTypeAnnotation { - inferred_ty: pattern.ty, - span, - user_ty: Box::new(user_ty), - }; - let variance = ty::Contravariant; - extra_data.ascriptions.push(super::Ascription { annotation, source, variance }); - } - None } @@ -286,7 +261,13 @@ impl<'tcx> MatchPairTree<'tcx> { None } - PatKind::Deref { ref subpattern } => { + PatKind::Deref { ref subpattern } + | PatKind::DerefPattern { ref subpattern, borrow: ByRef::No } => { + if cfg!(debug_assertions) && matches!(pattern.kind, PatKind::DerefPattern { .. }) { + // Only deref patterns on boxes can be lowered using a built-in deref. + debug_assert!(pattern.ty.is_box()); + } + MatchPairTree::for_pattern( place_builder.deref(), subpattern, @@ -297,7 +278,7 @@ impl<'tcx> MatchPairTree<'tcx> { None } - PatKind::DerefPattern { ref subpattern, mutability } => { + PatKind::DerefPattern { ref subpattern, borrow: ByRef::Yes(mutability) } => { // Create a new temporary for each deref pattern. // FIXME(deref_patterns): dedup temporaries to avoid multiple `deref()` calls? let temp = cx.temp( diff --git a/compiler/rustc_mir_build/src/builder/matches/mod.rs b/compiler/rustc_mir_build/src/builder/matches/mod.rs index 3acf2a6a2a61a..977d4f3e931b5 100644 --- a/compiler/rustc_mir_build/src/builder/matches/mod.rs +++ b/compiler/rustc_mir_build/src/builder/matches/mod.rs @@ -920,6 +920,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { PatKind::Constant { .. } | PatKind::Range { .. } + | PatKind::Missing | PatKind::Wild | PatKind::Never | PatKind::Error(_) => {} diff --git a/compiler/rustc_mir_build/src/builder/matches/test.rs b/compiler/rustc_mir_build/src/builder/matches/test.rs index d1f9d4c34fe1e..210b9cce581fd 100644 --- a/compiler/rustc_mir_build/src/builder/matches/test.rs +++ b/compiler/rustc_mir_build/src/builder/matches/test.rs @@ -140,12 +140,35 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let success_block = target_block(TestBranch::Success); let fail_block = target_block(TestBranch::Failure); - let expect_ty = value.ty(); - let expect = self.literal_operand(test.span, value); + let mut expect_ty = value.ty(); + let mut expect = self.literal_operand(test.span, value); let mut place = place; let mut block = block; match ty.kind() { + ty::Str => { + // String literal patterns may have type `str` if `deref_patterns` is + // enabled, in order to allow `deref!("..."): String`. In this case, `value` + // is of type `&str`, so we compare it to `&place`. + if !tcx.features().deref_patterns() { + span_bug!( + test.span, + "matching on `str` went through without enabling deref_patterns" + ); + } + let re_erased = tcx.lifetimes.re_erased; + let ref_str_ty = Ty::new_imm_ref(tcx, re_erased, tcx.types.str_); + let ref_place = self.temp(ref_str_ty, test.span); + // `let ref_place: &str = &place;` + self.cfg.push_assign( + block, + self.source_info(test.span), + ref_place, + Rvalue::Ref(re_erased, BorrowKind::Shared, place), + ); + place = ref_place; + ty = ref_str_ty; + } ty::Adt(def, _) if tcx.is_lang_item(def.did(), LangItem::String) => { if !tcx.features().string_deref_patterns() { span_bug!( @@ -174,6 +197,31 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { place = ref_str; ty = ref_str_ty; } + &ty::Pat(base, _) => { + assert_eq!(ty, value.ty()); + assert!(base.is_trivially_pure_clone_copy()); + + let transmuted_place = self.temp(base, test.span); + self.cfg.push_assign( + block, + self.source_info(scrutinee_span), + transmuted_place, + Rvalue::Cast(CastKind::Transmute, Operand::Copy(place), base), + ); + + let transmuted_expect = self.temp(base, test.span); + self.cfg.push_assign( + block, + self.source_info(test.span), + transmuted_expect, + Rvalue::Cast(CastKind::Transmute, expect, base), + ); + + place = transmuted_place; + expect = Operand::Copy(transmuted_expect); + ty = base; + expect_ty = base; + } _ => {} } @@ -715,7 +763,7 @@ fn trait_method<'tcx>( let item = tcx .associated_items(trait_def_id) .filter_by_name_unhygienic(method_name) - .find(|item| item.kind == ty::AssocKind::Fn) + .find(|item| item.is_fn()) .expect("trait method not found"); let method_ty = Ty::new_fn_def(tcx, item.def_id, args); diff --git a/compiler/rustc_mir_build/src/builder/mod.rs b/compiler/rustc_mir_build/src/builder/mod.rs index c8b69a6ec62f5..9cf051a8760be 100644 --- a/compiler/rustc_mir_build/src/builder/mod.rs +++ b/compiler/rustc_mir_build/src/builder/mod.rs @@ -13,7 +13,7 @@ use rustc_data_structures::sorted_map::SortedIndexMultiMap; use rustc_errors::ErrorGuaranteed; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; -use rustc_hir::{self as hir, BindingMode, ByRef, HirId, Node}; +use rustc_hir::{self as hir, BindingMode, ByRef, HirId, ItemLocalId, Node}; use rustc_index::bit_set::GrowableBitSet; use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; @@ -48,11 +48,11 @@ pub(crate) fn closure_saved_names_of_captured_variables<'tcx>( /// this directly; instead use the cached version via `mir_built`. pub fn build_mir<'tcx>(tcx: TyCtxt<'tcx>, def: LocalDefId) -> Body<'tcx> { tcx.ensure_done().thir_abstract_const(def); - if let Err(e) = tcx.check_match(def) { + if let Err(e) = tcx.ensure_ok().check_match(def) { return construct_error(tcx, def, e); } - if let Err(err) = tcx.check_tail_calls(def) { + if let Err(err) = tcx.ensure_ok().check_tail_calls(def) { return construct_error(tcx, def, err); } @@ -221,7 +221,7 @@ struct Builder<'a, 'tcx> { coverage_info: Option, } -type CaptureMap<'tcx> = SortedIndexMultiMap>; +type CaptureMap<'tcx> = SortedIndexMultiMap>; #[derive(Debug)] struct Capture<'tcx> { @@ -457,7 +457,7 @@ fn construct_fn<'tcx>( // Figure out what primary body this item has. let body = tcx.hir_body_owned_by(fn_def); - let span_with_body = tcx.hir().span_with_body(fn_id); + let span_with_body = tcx.hir_span_with_body(fn_id); let return_ty_span = tcx .hir_fn_decl_by_hir_id(fn_id) .unwrap_or_else(|| span_bug!(span, "can't build MIR for {:?}", fn_def)) @@ -485,7 +485,7 @@ fn construct_fn<'tcx>( }; if let Some(custom_mir_attr) = - tcx.hir_attrs(fn_id).iter().find(|attr| attr.name_or_empty() == sym::custom_mir) + tcx.hir_attrs(fn_id).iter().find(|attr| attr.has_name(sym::custom_mir)) { return custom::build_custom_mir( tcx, @@ -853,6 +853,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let capture_tys = upvar_args.upvar_tys(); let tcx = self.tcx; + let mut upvar_owner = None; self.upvars = tcx .closure_captures(self.def_id) .iter() @@ -866,6 +867,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { HirPlaceBase::Upvar(upvar_id) => upvar_id.var_path.hir_id, _ => bug!("Expected an upvar"), }; + let upvar_base = upvar_owner.get_or_insert(var_id.owner); + assert_eq!(*upvar_base, var_id.owner); + let var_id = var_id.local_id; let mutability = captured_place.mutability; @@ -994,7 +998,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.source_scope = source_scope; } - if self.tcx.intrinsic(self.def_id).is_some_and(|i| i.must_be_overridden) { + if self.tcx.intrinsic(self.def_id).is_some_and(|i| i.must_be_overridden) + || self.tcx.is_sdylib_interface_build() + { let source_info = self.source_info(rustc_span::DUMMY_SP); self.cfg.terminate(block, source_info, TerminatorKind::Unreachable); self.cfg.start_new_block().unit() diff --git a/compiler/rustc_mir_build/src/builder/scope.rs b/compiler/rustc_mir_build/src/builder/scope.rs index e42336a1dbbcc..7c2871298203d 100644 --- a/compiler/rustc_mir_build/src/builder/scope.rs +++ b/compiler/rustc_mir_build/src/builder/scope.rs @@ -89,6 +89,7 @@ use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::middle::region; use rustc_middle::mir::*; use rustc_middle::thir::{ExprId, LintLevel}; +use rustc_middle::ty::{self, TyCtxt}; use rustc_middle::{bug, span_bug}; use rustc_session::lint::Level; use rustc_span::source_map::Spanned; @@ -405,6 +406,8 @@ impl DropTree { unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup), place: drop_node.data.local.into(), replace: false, + drop: None, + async_fut: None, }; cfg.terminate(block, drop_node.data.source_info, terminator); } @@ -848,6 +851,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { target: next, unwind: UnwindAction::Continue, replace: false, + drop: None, + async_fut: None, }, ); block = next; @@ -879,6 +884,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { block.unit() } + fn is_async_drop_impl( + tcx: TyCtxt<'tcx>, + local_decls: &IndexVec>, + typing_env: ty::TypingEnv<'tcx>, + local: Local, + ) -> bool { + let ty = local_decls[local].ty; + if ty.is_async_drop(tcx, typing_env) || ty.is_coroutine() { + return true; + } + ty.needs_async_drop(tcx, typing_env) + } + fn is_async_drop(&self, local: Local) -> bool { + Self::is_async_drop_impl(self.tcx, &self.local_decls, self.typing_env(), local) + } + fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock { // If we are emitting a `drop` statement, we need to have the cached // diverge cleanup pads ready in case that drop panics. @@ -887,14 +908,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let unwind_to = if needs_cleanup { self.diverge_cleanup() } else { DropIdx::MAX }; let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes"); + let has_async_drops = is_coroutine + && scope.drops.iter().any(|v| v.kind == DropKind::Value && self.is_async_drop(v.local)); + let dropline_to = if has_async_drops { Some(self.diverge_dropline()) } else { None }; + let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes"); + let typing_env = self.typing_env(); build_scope_drops( &mut self.cfg, &mut self.scopes.unwind_drops, + &mut self.scopes.coroutine_drops, scope, block, unwind_to, + dropline_to, is_coroutine && needs_cleanup, self.arg_count, + |v: Local| Self::is_async_drop_impl(self.tcx, &self.local_decls, typing_env, v), ) .into_block() } @@ -1310,22 +1339,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.scopes.unwind_drops.add_entry_point(start, next_drop); } - /// Sets up a path that performs all required cleanup for dropping a - /// coroutine, starting from the given block that ends in - /// [TerminatorKind::Yield]. - /// - /// This path terminates in CoroutineDrop. - pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) { + /// Returns the [DropIdx] for the innermost drop for dropline (coroutine drop path). + /// The `DropIdx` will be created if it doesn't already exist. + fn diverge_dropline(&mut self) -> DropIdx { + // It is okay to use dummy span because the getting scope index on the topmost scope + // must always succeed. + self.diverge_dropline_target(self.scopes.topmost(), DUMMY_SP) + } + + /// Similar to diverge_cleanup_target, but for dropline (coroutine drop path) + fn diverge_dropline_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx { debug_assert!( - matches!( - self.cfg.block_data(yield_block).terminator().kind, - TerminatorKind::Yield { .. } - ), - "coroutine_drop_cleanup called on block with non-yield terminator." + self.coroutine.is_some(), + "diverge_dropline_target is valid only for coroutine" ); - let (uncached_scope, mut cached_drop) = self - .scopes - .scopes + let target = self.scopes.scope_index(target_scope, span); + let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target] .iter() .enumerate() .rev() @@ -1334,13 +1363,34 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }) .unwrap_or((0, ROOT_NODE)); - for scope in &mut self.scopes.scopes[uncached_scope..] { + if uncached_scope > target { + return cached_drop; + } + + for scope in &mut self.scopes.scopes[uncached_scope..=target] { for drop in &scope.drops { cached_drop = self.scopes.coroutine_drops.add_drop(*drop, cached_drop); } scope.cached_coroutine_drop_block = Some(cached_drop); } + cached_drop + } + + /// Sets up a path that performs all required cleanup for dropping a + /// coroutine, starting from the given block that ends in + /// [TerminatorKind::Yield]. + /// + /// This path terminates in CoroutineDrop. + pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) { + debug_assert!( + matches!( + self.cfg.block_data(yield_block).terminator().kind, + TerminatorKind::Yield { .. } + ), + "coroutine_drop_cleanup called on block with non-yield terminator." + ); + let cached_drop = self.diverge_dropline(); self.scopes.coroutine_drops.add_entry_point(yield_block, cached_drop); } @@ -1371,6 +1421,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { target: assign, unwind: UnwindAction::Cleanup(assign_unwind), replace: true, + drop: None, + async_fut: None, }, ); self.diverge_from(block); @@ -1432,18 +1484,26 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// * `unwind_to`, describes the drops that would occur at this point in the code if a /// panic occurred (a subset of the drops in `scope`, since we sometimes elide StorageDead and other /// instructions on unwinding) +/// * `dropline_to`, describes the drops that would occur at this point in the code if a +/// coroutine drop occured. /// * `storage_dead_on_unwind`, if true, then we should emit `StorageDead` even when unwinding /// * `arg_count`, number of MIR local variables corresponding to fn arguments (used to assert that we don't drop those) -fn build_scope_drops<'tcx>( +fn build_scope_drops<'tcx, F>( cfg: &mut CFG<'tcx>, unwind_drops: &mut DropTree, + coroutine_drops: &mut DropTree, scope: &Scope, block: BasicBlock, unwind_to: DropIdx, + dropline_to: Option, storage_dead_on_unwind: bool, arg_count: usize, -) -> BlockAnd<()> { - debug!("build_scope_drops({:?} -> {:?})", block, scope); + is_async_drop: F, +) -> BlockAnd<()> +where + F: Fn(Local) -> bool, +{ + debug!("build_scope_drops({:?} -> {:?}), dropline_to={:?}", block, scope, dropline_to); // Build up the drops in evaluation order. The end result will // look like: @@ -1476,6 +1536,9 @@ fn build_scope_drops<'tcx>( // will branch to `drops[n]`. let mut block = block; + // `dropline_to` indicates what needs to be dropped should coroutine drop occur. + let mut dropline_to = dropline_to; + for drop_data in scope.drops.iter().rev() { let source_info = drop_data.source_info; let local = drop_data.local; @@ -1492,6 +1555,12 @@ fn build_scope_drops<'tcx>( debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind); unwind_to = unwind_drops.drops[unwind_to].next; + if let Some(idx) = dropline_to { + debug_assert_eq!(coroutine_drops.drops[idx].data.local, drop_data.local); + debug_assert_eq!(coroutine_drops.drops[idx].data.kind, drop_data.kind); + dropline_to = Some(coroutine_drops.drops[idx].next); + } + // If the operand has been moved, and we are not on an unwind // path, then don't generate the drop. (We only take this into // account for non-unwind paths so as not to disturb the @@ -1501,6 +1570,12 @@ fn build_scope_drops<'tcx>( } unwind_drops.add_entry_point(block, unwind_to); + if let Some(to) = dropline_to + && is_async_drop(local) + { + coroutine_drops.add_entry_point(block, to); + } + let next = cfg.start_new_block(); cfg.terminate( block, @@ -1510,6 +1585,8 @@ fn build_scope_drops<'tcx>( target: next, unwind: UnwindAction::Continue, replace: false, + drop: None, + async_fut: None, }, ); block = next; @@ -1530,7 +1607,7 @@ fn build_scope_drops<'tcx>( // path, then don't generate the drop. (We only take this into // account for non-unwind paths so as not to disturb the // caching mechanism.) - if scope.moved_locals.iter().any(|&o| o == local) { + if scope.moved_locals.contains(&local) { continue; } @@ -1556,6 +1633,11 @@ fn build_scope_drops<'tcx>( debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind); unwind_to = unwind_drops.drops[unwind_to].next; } + if let Some(idx) = dropline_to { + debug_assert_eq!(coroutine_drops.drops[idx].data.local, drop_data.local); + debug_assert_eq!(coroutine_drops.drops[idx].data.kind, drop_data.kind); + dropline_to = Some(coroutine_drops.drops[idx].next); + } // Only temps and vars need their storage dead. assert!(local.index() > arg_count); cfg.push(block, Statement { source_info, kind: StatementKind::StorageDead(local) }); @@ -1611,6 +1693,39 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { } } } + // Link the exit drop tree to dropline drop tree (coroutine drop path) for async drops + if is_coroutine + && drops.drops.iter().any(|DropNode { data, next: _ }| { + data.kind == DropKind::Value && self.is_async_drop(data.local) + }) + { + let dropline_target = self.diverge_dropline_target(else_scope, span); + let mut dropline_indices = IndexVec::from_elem_n(dropline_target, 1); + for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(1) { + match drop_data.data.kind { + DropKind::Storage | DropKind::ForLint => { + let coroutine_drop = self + .scopes + .coroutine_drops + .add_drop(drop_data.data, dropline_indices[drop_data.next]); + dropline_indices.push(coroutine_drop); + } + DropKind::Value => { + let coroutine_drop = self + .scopes + .coroutine_drops + .add_drop(drop_data.data, dropline_indices[drop_data.next]); + if self.is_async_drop(drop_data.data.local) { + self.scopes.coroutine_drops.add_entry_point( + blocks[drop_idx].unwrap(), + dropline_indices[drop_data.next], + ); + } + dropline_indices.push(coroutine_drop); + } + } + } + } blocks[ROOT_NODE].map(BasicBlock::unit) } @@ -1655,9 +1770,11 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { // to be captured by the coroutine. I'm not sure how important this // optimization is, but it is here. for (drop_idx, drop_node) in drops.drops.iter_enumerated() { - if let DropKind::Value = drop_node.data.kind { + if let DropKind::Value = drop_node.data.kind + && let Some(bb) = blocks[drop_idx] + { debug_assert!(drop_node.next < drops.drops.next_index()); - drops.entry_points.push((drop_node.next, blocks[drop_idx].unwrap())); + drops.entry_points.push((drop_node.next, bb)); } } Self::build_unwind_tree(cfg, drops, fn_span, resume_block); @@ -1709,6 +1826,8 @@ impl<'tcx> DropTreeBuilder<'tcx> for CoroutineDrop { let term = cfg.block_data_mut(from).terminator_mut(); if let TerminatorKind::Yield { ref mut drop, .. } = term.kind { *drop = Some(to); + } else if let TerminatorKind::Drop { ref mut drop, .. } = term.kind { + *drop = Some(to); } else { span_bug!( term.source_info.span, diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index 7f2e7d5ca8385..adfce99a9b537 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -195,7 +195,7 @@ impl<'tcx> UnsafetyVisitor<'_, 'tcx> { /// Whether the `unsafe_op_in_unsafe_fn` lint is `allow`ed at the current HIR node. fn unsafe_op_in_unsafe_fn_allowed(&self) -> bool { - self.tcx.lint_level_at_node(UNSAFE_OP_IN_UNSAFE_FN, self.hir_context).0 == Level::Allow + self.tcx.lint_level_at_node(UNSAFE_OP_IN_UNSAFE_FN, self.hir_context).level == Level::Allow } /// Handle closures/coroutines/inline-consts, which is unsafecked with their parent body. @@ -292,8 +292,10 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { }); } BlockSafety::ExplicitUnsafe(hir_id) => { - let used = - matches!(self.tcx.lint_level_at_node(UNUSED_UNSAFE, hir_id), (Level::Allow, _)); + let used = matches!( + self.tcx.lint_level_at_node(UNUSED_UNSAFE, hir_id).level, + Level::Allow + ); self.in_safety_context( SafetyContext::UnsafeBlock { span: block.span, @@ -313,6 +315,7 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { fn visit_pat(&mut self, pat: &'a Pat<'tcx>) { if self.in_union_destructure { match pat.kind { + PatKind::Missing => unreachable!(), // binding to a variable allows getting stuff out of variable PatKind::Binding { .. } // match is conditional on having this value @@ -401,9 +404,9 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { visit::walk_pat(self, pat); self.inside_adt = old_inside_adt; } - PatKind::ExpandedConstant { def_id, is_inline, .. } => { + PatKind::ExpandedConstant { def_id, .. } => { if let Some(def) = def_id.as_local() - && *is_inline + && matches!(self.tcx.def_kind(def_id), DefKind::InlineConst) { self.visit_inner_body(def); } @@ -561,13 +564,17 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { } } ExprKind::InlineAsm(box InlineAsmExpr { - asm_macro: AsmMacro::Asm | AsmMacro::NakedAsm, + asm_macro: asm_macro @ (AsmMacro::Asm | AsmMacro::NakedAsm), ref operands, template: _, options: _, line_spans: _, }) => { - self.requires_unsafe(expr.span, UseOfInlineAssembly); + // The `naked` attribute and the `naked_asm!` block form one atomic unit of + // unsafety, and `naked_asm!` does not itself need to be wrapped in an unsafe block. + if let AsmMacro::Asm = asm_macro { + self.requires_unsafe(expr.span, UseOfInlineAssembly); + } // For inline asm, do not use `walk_expr`, since we want to handle the label block // specially. @@ -943,7 +950,7 @@ impl UnsafeOpKind { } }); let unsafe_not_inherited_note = if let Some((id, _)) = note_non_inherited { - let span = tcx.hir().span(id); + let span = tcx.hir_span(id); let span = tcx.sess.source_map().guess_head_span(span); Some(UnsafeNotInheritedNote { span }) } else { diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs index 0e16f871b16f9..ae09db5023527 100644 --- a/compiler/rustc_mir_build/src/errors.rs +++ b/compiler/rustc_mir_build/src/errors.rs @@ -2,7 +2,7 @@ use rustc_data_structures::fx::FxIndexMap; use rustc_errors::codes::*; use rustc_errors::{ Applicability, Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, - MultiSpan, SubdiagMessageOp, Subdiagnostic, pluralize, + MultiSpan, Subdiagnostic, pluralize, }; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_middle::ty::{self, Ty}; @@ -546,11 +546,7 @@ pub(crate) struct UnsafeNotInheritedLintNote { } impl Subdiagnostic for UnsafeNotInheritedLintNote { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.span_note(self.signature_span, fluent::mir_build_unsafe_fn_safe_body); let body_start = self.body_span.shrink_to_lo(); let body_end = self.body_span.shrink_to_hi(); @@ -1031,11 +1027,7 @@ pub(crate) struct Variant { } impl<'tcx> Subdiagnostic for AdtDefinedHere<'tcx> { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.arg("ty", self.ty); let mut spans = MultiSpan::from(self.adt_def_span); @@ -1117,11 +1109,7 @@ pub(crate) struct Rust2024IncompatiblePatSugg { } impl Subdiagnostic for Rust2024IncompatiblePatSugg { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _f: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { // Format and emit explanatory notes about default binding modes. Reversing the spans' order // means if we have nested spans, the innermost ones will be visited first. for (span, def_br_mutbl) in self.default_mode_labels.into_iter().rev() { diff --git a/compiler/rustc_mir_build/src/lib.rs b/compiler/rustc_mir_build/src/lib.rs index a25697ba086d8..a051cf570b7d1 100644 --- a/compiler/rustc_mir_build/src/lib.rs +++ b/compiler/rustc_mir_build/src/lib.rs @@ -3,11 +3,10 @@ // tidy-alphabetical-start #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![feature(assert_matches)] #![feature(box_patterns)] #![feature(if_let_guard)] -#![feature(let_chains)] #![feature(try_blocks)] // tidy-alphabetical-end diff --git a/compiler/rustc_mir_build/src/thir/constant.rs b/compiler/rustc_mir_build/src/thir/constant.rs index b3210813703ca..b4fa55e1c1fdb 100644 --- a/compiler/rustc_mir_build/src/thir/constant.rs +++ b/compiler/rustc_mir_build/src/thir/constant.rs @@ -37,13 +37,23 @@ pub(crate) fn lit_to_const<'tcx>( let str_bytes = s.as_str().as_bytes(); ty::ValTree::from_raw_bytes(tcx, str_bytes) } + (ast::LitKind::Str(s, _), ty::Str) if tcx.features().deref_patterns() => { + // String literal patterns may have type `str` if `deref_patterns` is enabled, in order + // to allow `deref!("..."): String`. + let str_bytes = s.as_str().as_bytes(); + ty::ValTree::from_raw_bytes(tcx, str_bytes) + } (ast::LitKind::ByteStr(data, _), ty::Ref(_, inner_ty, _)) - if matches!(inner_ty.kind(), ty::Slice(_)) => + if matches!(inner_ty.kind(), ty::Slice(_) | ty::Array(..)) => { let bytes = data as &[u8]; ty::ValTree::from_raw_bytes(tcx, bytes) } - (ast::LitKind::ByteStr(data, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_array() => { + (ast::LitKind::ByteStr(data, _), ty::Slice(_) | ty::Array(..)) + if tcx.features().deref_patterns() => + { + // Byte string literal patterns may have type `[u8]` or `[u8; N]` if `deref_patterns` is + // enabled, in order to allow, e.g., `deref!(b"..."): Vec`. let bytes = data as &[u8]; ty::ValTree::from_raw_bytes(tcx, bytes) } diff --git a/compiler/rustc_mir_build/src/thir/cx/expr.rs b/compiler/rustc_mir_build/src/thir/cx/expr.rs index b8af77245f25d..fde23413972b6 100644 --- a/compiler/rustc_mir_build/src/thir/cx/expr.rs +++ b/compiler/rustc_mir_build/src/thir/cx/expr.rs @@ -9,7 +9,7 @@ use rustc_middle::hir::place::{ Place as HirPlace, PlaceBase as HirPlaceBase, ProjectionKind as HirProjectionKind, }; use rustc_middle::middle::region; -use rustc_middle::mir::{self, BinOp, BorrowKind, UnOp}; +use rustc_middle::mir::{self, AssignOp, BinOp, BorrowKind, UnOp}; use rustc_middle::thir::*; use rustc_middle::ty::adjustment::{ Adjust, Adjustment, AutoBorrow, AutoBorrowMutability, PointerCoercion, @@ -191,7 +191,7 @@ impl<'tcx> ThirBuildCx<'tcx> { let pointer_target = ExprKind::Field { lhs: self.thir.exprs.push(expr), variant_index: FIRST_VARIANT, - name: FieldIdx::from(0u32), + name: FieldIdx::ZERO, }; let arg = Expr { temp_lifetime, ty: pin_ty, span, kind: pointer_target }; let arg = self.thir.exprs.push(arg); @@ -226,7 +226,7 @@ impl<'tcx> ThirBuildCx<'tcx> { adt_def: self.tcx.adt_def(pin_did), variant_index: FIRST_VARIANT, args, - fields: Box::new([FieldExpr { name: FieldIdx::from(0u32), expr }]), + fields: Box::new([FieldExpr { name: FieldIdx::ZERO, expr }]), user_ty: None, base: AdtExprBase::None, })); @@ -489,7 +489,7 @@ impl<'tcx> ThirBuildCx<'tcx> { self.overloaded_operator(expr, Box::new([lhs, rhs])) } else { ExprKind::AssignOp { - op: bin_op(op.node), + op: assign_op(op.node), lhs: self.mirror_expr(lhs), rhs: self.mirror_expr(rhs), } @@ -1347,3 +1347,18 @@ fn bin_op(op: hir::BinOpKind) -> BinOp { _ => bug!("no equivalent for ast binop {:?}", op), } } + +fn assign_op(op: hir::AssignOpKind) -> AssignOp { + match op { + hir::AssignOpKind::AddAssign => AssignOp::AddAssign, + hir::AssignOpKind::SubAssign => AssignOp::SubAssign, + hir::AssignOpKind::MulAssign => AssignOp::MulAssign, + hir::AssignOpKind::DivAssign => AssignOp::DivAssign, + hir::AssignOpKind::RemAssign => AssignOp::RemAssign, + hir::AssignOpKind::BitXorAssign => AssignOp::BitXorAssign, + hir::AssignOpKind::BitAndAssign => AssignOp::BitAndAssign, + hir::AssignOpKind::BitOrAssign => AssignOp::BitOrAssign, + hir::AssignOpKind::ShlAssign => AssignOp::ShlAssign, + hir::AssignOpKind::ShrAssign => AssignOp::ShrAssign, + } +} diff --git a/compiler/rustc_mir_build/src/thir/cx/mod.rs b/compiler/rustc_mir_build/src/thir/cx/mod.rs index b3daed8a7e017..2f593b9a0a741 100644 --- a/compiler/rustc_mir_build/src/thir/cx/mod.rs +++ b/compiler/rustc_mir_build/src/thir/cx/mod.rs @@ -113,7 +113,7 @@ impl<'tcx> ThirBuildCx<'tcx> { apply_adjustments: tcx .hir_attrs(hir_id) .iter() - .all(|attr| attr.name_or_empty() != rustc_span::sym::custom_mir), + .all(|attr| !attr.has_name(rustc_span::sym::custom_mir)), } } diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs index 095d3e75da1ee..78583a402fe9a 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs @@ -676,7 +676,7 @@ impl<'p, 'tcx> MatchVisitor<'p, 'tcx> { unpeeled_pat = subpattern; } - if let PatKind::ExpandedConstant { def_id, is_inline: false, .. } = unpeeled_pat.kind + if let PatKind::ExpandedConstant { def_id, .. } = unpeeled_pat.kind && let DefKind::Const = self.tcx.def_kind(def_id) && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(pat.span) // We filter out paths with multiple path::segments. @@ -1025,7 +1025,7 @@ fn find_fallback_pattern_typo<'tcx>( pat: &Pat<'tcx>, lint: &mut UnreachablePattern<'_>, ) { - if let (Level::Allow, _) = cx.tcx.lint_level_at_node(UNREACHABLE_PATTERNS, hir_id) { + if let Level::Allow = cx.tcx.lint_level_at_node(UNREACHABLE_PATTERNS, hir_id).level { // This is because we use `with_no_trimmed_paths` later, so if we never emit the lint we'd // ICE. At the same time, we don't really need to do all of this if we won't emit anything. return; @@ -1174,7 +1174,7 @@ fn report_arm_reachability<'p, 'tcx>( for (arm, is_useful) in report.arm_usefulness.iter() { if let Usefulness::Redundant(explanation) = is_useful { let hir_id = arm.arm_data; - let arm_span = cx.tcx.hir().span(hir_id); + let arm_span = cx.tcx.hir_span(hir_id); let whole_arm_span = if is_match_arm { // If the arm is followed by a comma, extend the span to include it. let with_whitespace = sm.span_extend_while_whitespace(arm_span); @@ -1296,7 +1296,8 @@ fn report_non_exhaustive_match<'p, 'tcx>( for &arm in arms { let arm = &thir.arms[arm]; - if let PatKind::ExpandedConstant { def_id, is_inline: false, .. } = arm.pattern.kind + if let PatKind::ExpandedConstant { def_id, .. } = arm.pattern.kind + && !matches!(cx.tcx.def_kind(def_id), DefKind::InlineConst) && let Ok(snippet) = cx.tcx.sess.source_map().span_to_snippet(arm.pattern.span) // We filter out paths with multiple path::segments. && snippet.chars().all(|c| c.is_alphanumeric() || c == '_') diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs index 667d59d858e3a..b7d203e3cd78c 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs @@ -58,25 +58,13 @@ struct ConstToPat<'tcx> { span: Span, id: hir::HirId, - treat_byte_string_as_slice: bool, - c: ty::Const<'tcx>, } impl<'tcx> ConstToPat<'tcx> { fn new(pat_ctxt: &PatCtxt<'_, 'tcx>, id: hir::HirId, span: Span, c: ty::Const<'tcx>) -> Self { trace!(?pat_ctxt.typeck_results.hir_owner); - ConstToPat { - tcx: pat_ctxt.tcx, - typing_env: pat_ctxt.typing_env, - span, - id, - treat_byte_string_as_slice: pat_ctxt - .typeck_results - .treat_byte_string_as_slice - .contains(&id.local_id), - c, - } + ConstToPat { tcx: pat_ctxt.tcx, typing_env: pat_ctxt.typing_env, span, id, c } } fn type_marked_structural(&self, ty: Ty<'tcx>) -> bool { @@ -108,8 +96,6 @@ impl<'tcx> ConstToPat<'tcx> { uv: ty::UnevaluatedConst<'tcx>, ty: Ty<'tcx>, ) -> Box> { - trace!(self.treat_byte_string_as_slice); - // It's not *technically* correct to be revealing opaque types here as borrowcheck has // not run yet. However, CTFE itself uses `TypingMode::PostAnalysis` unconditionally even // during typeck and not doing so has a lot of (undesirable) fallout (#101478, #119821). @@ -196,7 +182,10 @@ impl<'tcx> ConstToPat<'tcx> { } } - inlined_const_as_pat + // Wrap the pattern in a marker node to indicate that it is the result of lowering a + // constant. This is used for diagnostics, and for unsafety checking of inline const blocks. + let kind = PatKind::ExpandedConstant { subpattern: inlined_const_as_pat, def_id: uv.def }; + Box::new(Pat { kind, ty, span: self.span }) } fn field_pats( @@ -291,6 +280,16 @@ impl<'tcx> ConstToPat<'tcx> { slice: None, suffix: Box::new([]), }, + ty::Str => { + // String literal patterns may have type `str` if `deref_patterns` is enabled, in + // order to allow `deref!("..."): String`. Since we need a `&str` for the comparison + // when lowering to MIR in `Builder::perform_test`, treat the constant as a `&str`. + // This works because `str` and `&str` have the same valtree representation. + let ref_str_ty = Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, ty); + PatKind::Constant { + value: mir::Const::Ty(ref_str_ty, ty::Const::new_value(tcx, cv, ref_str_ty)), + } + } ty::Ref(_, pointee_ty, ..) => match *pointee_ty.kind() { // `&str` is represented as a valtree, let's keep using this // optimization for now. @@ -307,21 +306,8 @@ impl<'tcx> ConstToPat<'tcx> { ty, ); } else { - // `b"foo"` produces a `&[u8; 3]`, but you can't use constants of array type when - // matching against references, you can only use byte string literals. - // The typechecker has a special case for byte string literals, by treating them - // as slices. This means we turn `&[T; N]` constants into slice patterns, which - // has no negative effects on pattern matching, even if we're actually matching on - // arrays. - let pointee_ty = match *pointee_ty.kind() { - ty::Array(elem_ty, _) if self.treat_byte_string_as_slice => { - Ty::new_slice(tcx, elem_ty) - } - _ => *pointee_ty, - }; // References have the same valtree representation as their pointee. - let subpattern = self.valtree_to_pat(cv, pointee_ty); - PatKind::Deref { subpattern } + PatKind::Deref { subpattern: self.valtree_to_pat(cv, *pointee_ty) } } } }, diff --git a/compiler/rustc_mir_build/src/thir/pattern/migration.rs b/compiler/rustc_mir_build/src/thir/pattern/migration.rs index bd7787b643d57..12c457f13fc12 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/migration.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/migration.rs @@ -4,8 +4,7 @@ use rustc_data_structures::fx::FxIndexMap; use rustc_errors::MultiSpan; use rustc_hir::{BindingMode, ByRef, HirId, Mutability}; use rustc_lint as lint; -use rustc_middle::span_bug; -use rustc_middle::ty::{self, Rust2024IncompatiblePatInfo, Ty, TyCtxt}; +use rustc_middle::ty::{self, Rust2024IncompatiblePatInfo, TyCtxt}; use rustc_span::{Ident, Span}; use crate::errors::{Rust2024IncompatiblePat, Rust2024IncompatiblePatSugg}; @@ -87,19 +86,18 @@ impl<'a> PatMigration<'a> { } /// Tracks when we're lowering a pattern that implicitly dereferences the scrutinee. - /// This should only be called when the pattern type adjustments list `adjustments` is - /// non-empty. Returns the prior default binding mode; this should be followed by a call to - /// [`PatMigration::leave_ref`] to restore it when we leave the pattern. + /// This should only be called when the pattern type adjustments list `adjustments` contains an + /// implicit deref of a reference type. Returns the prior default binding mode; this should be + /// followed by a call to [`PatMigration::leave_ref`] to restore it when we leave the pattern. pub(super) fn visit_implicit_derefs<'tcx>( &mut self, pat_span: Span, - adjustments: &[Ty<'tcx>], + adjustments: &[ty::adjustment::PatAdjustment<'tcx>], ) -> Option<(Span, Mutability)> { - let implicit_deref_mutbls = adjustments.iter().map(|ref_ty| { - let &ty::Ref(_, _, mutbl) = ref_ty.kind() else { - span_bug!(pat_span, "pattern implicitly dereferences a non-ref type"); - }; - mutbl + // Implicitly dereferencing references changes the default binding mode, but implicit derefs + // of smart pointers do not. Thus, we only consider implicit derefs of reference types. + let implicit_deref_mutbls = adjustments.iter().filter_map(|adjust| { + if let &ty::Ref(_, _, mutbl) = adjust.source.kind() { Some(mutbl) } else { None } }); if !self.info.suggest_eliding_modes { diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs index 4bfeab44bf4b9..fcd106d78e253 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs @@ -11,16 +11,18 @@ use rustc_abi::{FieldIdx, Integer}; use rustc_errors::codes::*; use rustc_hir::def::{CtorOf, DefKind, Res}; use rustc_hir::pat_util::EnumerateAndAdjustIterator; -use rustc_hir::{self as hir, RangeEnd}; +use rustc_hir::{self as hir, LangItem, RangeEnd}; use rustc_index::Idx; +use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::mir::interpret::LitToConstInput; use rustc_middle::thir::{ Ascription, FieldPat, LocalVarId, Pat, PatKind, PatRange, PatRangeBoundary, }; +use rustc_middle::ty::adjustment::{PatAdjust, PatAdjustment}; use rustc_middle::ty::layout::IntegerExt; -use rustc_middle::ty::{self, CanonicalUserTypeAnnotation, Ty, TyCtxt, TypeVisitableExt}; +use rustc_middle::ty::{self, CanonicalUserTypeAnnotation, Ty, TyCtxt, TypingMode}; use rustc_middle::{bug, span_bug}; -use rustc_span::def_id::LocalDefId; +use rustc_span::def_id::DefId; use rustc_span::{ErrorGuaranteed, Span}; use tracing::{debug, instrument}; @@ -62,13 +64,15 @@ pub(super) fn pat_from_hir<'a, 'tcx>( impl<'a, 'tcx> PatCtxt<'a, 'tcx> { fn lower_pattern(&mut self, pat: &'tcx hir::Pat<'tcx>) -> Box> { - let adjustments: &[Ty<'tcx>] = + let adjustments: &[PatAdjustment<'tcx>] = self.typeck_results.pat_adjustments().get(pat.hir_id).map_or(&[], |v| &**v); // Track the default binding mode for the Rust 2024 migration suggestion. + // Implicitly dereferencing references changes the default binding mode, but implicit deref + // patterns do not. Only track binding mode changes if a ref type is in the adjustments. let mut opt_old_mode_span = None; if let Some(s) = &mut self.rust_2024_migration - && !adjustments.is_empty() + && adjustments.iter().any(|adjust| adjust.kind == PatAdjust::BuiltinDeref) { opt_old_mode_span = s.visit_implicit_derefs(pat.span, adjustments); } @@ -101,17 +105,21 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { _ => self.lower_pattern_unadjusted(pat), }; - let adjusted_pat = adjustments.iter().rev().fold(unadjusted_pat, |thir_pat, ref_ty| { - debug!("{:?}: wrapping pattern with type {:?}", thir_pat, ref_ty); - Box::new(Pat { - span: thir_pat.span, - ty: *ref_ty, - kind: PatKind::Deref { subpattern: thir_pat }, - }) + let adjusted_pat = adjustments.iter().rev().fold(unadjusted_pat, |thir_pat, adjust| { + debug!("{:?}: wrapping pattern with adjustment {:?}", thir_pat, adjust); + let span = thir_pat.span; + let kind = match adjust.kind { + PatAdjust::BuiltinDeref => PatKind::Deref { subpattern: thir_pat }, + PatAdjust::OverloadedDeref => { + let borrow = self.typeck_results.deref_pat_borrow_mode(adjust.source, pat); + PatKind::DerefPattern { subpattern: thir_pat, borrow } + } + }; + Box::new(Pat { span, ty: adjust.source, kind }) }); if let Some(s) = &mut self.rust_2024_migration - && !adjustments.is_empty() + && adjustments.iter().any(|adjust| adjust.kind == PatAdjust::BuiltinDeref) { s.leave_ref(opt_old_mode_span); } @@ -124,13 +132,13 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { expr: Option<&'tcx hir::PatExpr<'tcx>>, // Out-parameters collecting extra data to be reapplied by the caller ascriptions: &mut Vec>, - inline_consts: &mut Vec, + expanded_consts: &mut Vec, ) -> Result>, ErrorGuaranteed> { let Some(expr) = expr else { return Ok(None) }; // Lower the endpoint into a temporary `PatKind` that will then be // deconstructed to obtain the constant value and other data. - let mut kind: PatKind<'tcx> = self.lower_pat_expr(expr); + let mut kind: PatKind<'tcx> = self.lower_pat_expr(expr, None); // Unpeel any ascription or inline-const wrapper nodes. loop { @@ -139,10 +147,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { ascriptions.push(ascription); kind = subpattern.kind; } - PatKind::ExpandedConstant { is_inline, def_id, subpattern } => { - if is_inline { - inline_consts.extend(def_id.as_local()); - } + PatKind::ExpandedConstant { def_id, subpattern } => { + expanded_consts.push(def_id); kind = subpattern.kind; } _ => break, @@ -221,10 +227,10 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { // Collect extra data while lowering the endpoints, to be reapplied later. let mut ascriptions = vec![]; - let mut inline_consts = vec![]; + let mut expanded_consts = vec![]; let mut lower_endpoint = - |expr| self.lower_pattern_range_endpoint(expr, &mut ascriptions, &mut inline_consts); + |expr| self.lower_pattern_range_endpoint(expr, &mut ascriptions, &mut expanded_consts); let lo = lower_endpoint(lo_expr)?.unwrap_or(PatRangeBoundary::NegInfinity); let hi = lower_endpoint(hi_expr)?.unwrap_or(PatRangeBoundary::PosInfinity); @@ -269,17 +275,12 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { // `Foo::<'a>::A..=Foo::B`), we need to put the ascriptions for the associated // constants somewhere. Have them on the range pattern. for ascription in ascriptions { - kind = PatKind::AscribeUserType { - ascription, - subpattern: Box::new(Pat { span, ty, kind }), - }; + let subpattern = Box::new(Pat { span, ty, kind }); + kind = PatKind::AscribeUserType { ascription, subpattern }; } - for def in inline_consts { - kind = PatKind::ExpandedConstant { - def_id: def.to_def_id(), - is_inline: true, - subpattern: Box::new(Pat { span, ty, kind }), - }; + for def_id in expanded_consts { + let subpattern = Box::new(Pat { span, ty, kind }); + kind = PatKind::ExpandedConstant { def_id, subpattern }; } Ok(kind) } @@ -290,11 +291,13 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { let mut span = pat.span; let kind = match pat.kind { + hir::PatKind::Missing => PatKind::Missing, + hir::PatKind::Wild => PatKind::Wild, hir::PatKind::Never => PatKind::Never, - hir::PatKind::Expr(value) => self.lower_pat_expr(value), + hir::PatKind::Expr(value) => self.lower_pat_expr(value, Some(ty)), hir::PatKind::Range(ref lo_expr, ref hi_expr, end) => { let (lo_expr, hi_expr) = (lo_expr.as_deref(), hi_expr.as_deref()); @@ -303,9 +306,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { } hir::PatKind::Deref(subpattern) => { - let mutable = self.typeck_results.pat_has_ref_mut_binding(subpattern); - let mutability = if mutable { hir::Mutability::Mut } else { hir::Mutability::Not }; - PatKind::DerefPattern { subpattern: self.lower_pattern(subpattern), mutability } + let borrow = self.typeck_results.deref_pat_borrow_mode(ty, subpattern); + PatKind::DerefPattern { subpattern: self.lower_pattern(subpattern), borrow } } hir::PatKind::Ref(subpattern, _) => { // Track the default binding mode for the Rust 2024 migration suggestion. @@ -551,8 +553,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { let res = self.typeck_results.qpath_res(qpath, id); let (def_id, user_ty) = match res { - Res::Def(DefKind::Const, def_id) => (def_id, None), - Res::Def(DefKind::AssocConst, def_id) => { + Res::Def(DefKind::Const, def_id) | Res::Def(DefKind::AssocConst, def_id) => { (def_id, self.typeck_results.user_provided_types().get(id)) } @@ -566,16 +567,10 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { // Lower the named constant to a THIR pattern. let args = self.typeck_results.node_args(id); + // FIXME(mgca): we will need to special case IACs here to have type system compatible + // generic args, instead of how we represent them in body expressions. let c = ty::Const::new_unevaluated(self.tcx, ty::UnevaluatedConst { def: def_id, args }); - let subpattern = self.const_to_pat(c, ty, id, span); - - // Wrap the pattern in a marker node to indicate that it is the result - // of lowering a named constant. This marker is used for improved - // diagnostics in some situations, but has no effect at runtime. - let mut pattern = { - let kind = PatKind::ExpandedConstant { subpattern, def_id, is_inline: false }; - Box::new(Pat { span, ty, kind }) - }; + let mut pattern = self.const_to_pat(c, ty, id, span); // If this is an associated constant with an explicit user-written // type, add an ascription node (e.g. ` as MyTrait>::CONST`). @@ -612,38 +607,83 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { let ty = tcx.typeck(def_id).node_type(block.hir_id); let typeck_root_def_id = tcx.typeck_root_def_id(def_id.to_def_id()); - let parent_args = - tcx.erase_regions(ty::GenericArgs::identity_for_item(tcx, typeck_root_def_id)); + let parent_args = ty::GenericArgs::identity_for_item(tcx, typeck_root_def_id); let args = ty::InlineConstArgs::new(tcx, ty::InlineConstArgsParts { parent_args, ty }).args; - debug_assert!(!args.has_free_regions()); - let ct = ty::UnevaluatedConst { def: def_id.to_def_id(), args }; - let subpattern = self.const_to_pat(ty::Const::new_unevaluated(self.tcx, ct), ty, id, span); - - // Wrap the pattern in a marker node to indicate that it is the result - // of lowering an inline const block. - PatKind::ExpandedConstant { subpattern, def_id: def_id.to_def_id(), is_inline: true } + let c = ty::Const::new_unevaluated(self.tcx, ct); + let pattern = self.const_to_pat(c, ty, id, span); + + // Apply a type ascription for the inline constant. + let annotation = { + let infcx = tcx.infer_ctxt().build(TypingMode::non_body_analysis()); + let args = ty::InlineConstArgs::new( + tcx, + ty::InlineConstArgsParts { parent_args, ty: infcx.next_ty_var(span) }, + ) + .args; + infcx.canonicalize_user_type_annotation(ty::UserType::new(ty::UserTypeKind::TypeOf( + def_id.to_def_id(), + ty::UserArgs { args, user_self_ty: None }, + ))) + }; + let annotation = + CanonicalUserTypeAnnotation { user_ty: Box::new(annotation), span, inferred_ty: ty }; + PatKind::AscribeUserType { + subpattern: pattern, + ascription: Ascription { + annotation, + // Note that we use `Contravariant` here. See the `variance` field documentation + // for details. + variance: ty::Contravariant, + }, + } } /// Lowers the kinds of "expression" that can appear in a HIR pattern: /// - Paths (e.g. `FOO`, `foo::BAR`, `Option::None`) /// - Inline const blocks (e.g. `const { 1 + 1 }`) /// - Literals, possibly negated (e.g. `-128u8`, `"hello"`) - fn lower_pat_expr(&mut self, expr: &'tcx hir::PatExpr<'tcx>) -> PatKind<'tcx> { - let (lit, neg) = match &expr.kind { - hir::PatExprKind::Path(qpath) => { - return self.lower_path(qpath, expr.hir_id, expr.span).kind; - } + fn lower_pat_expr( + &mut self, + expr: &'tcx hir::PatExpr<'tcx>, + pat_ty: Option>, + ) -> PatKind<'tcx> { + match &expr.kind { + hir::PatExprKind::Path(qpath) => self.lower_path(qpath, expr.hir_id, expr.span).kind, hir::PatExprKind::ConstBlock(anon_const) => { - return self.lower_inline_const(anon_const, expr.hir_id, expr.span); + self.lower_inline_const(anon_const, expr.hir_id, expr.span) } - hir::PatExprKind::Lit { lit, negated } => (lit, *negated), - }; - - let ct_ty = self.typeck_results.node_type(expr.hir_id); - let lit_input = LitToConstInput { lit: &lit.node, ty: ct_ty, neg }; - let constant = self.tcx.at(expr.span).lit_to_const(lit_input); - self.const_to_pat(constant, ct_ty, expr.hir_id, lit.span).kind + hir::PatExprKind::Lit { lit, negated } => { + // We handle byte string literal patterns by using the pattern's type instead of the + // literal's type in `const_to_pat`: if the literal `b"..."` matches on a slice reference, + // the pattern's type will be `&[u8]` whereas the literal's type is `&[u8; 3]`; using the + // pattern's type means we'll properly translate it to a slice reference pattern. This works + // because slices and arrays have the same valtree representation. + // HACK: As an exception, use the literal's type if `pat_ty` is `String`; this can happen if + // `string_deref_patterns` is enabled. There's a special case for that when lowering to MIR. + // FIXME(deref_patterns): This hack won't be necessary once `string_deref_patterns` is + // superseded by a more general implementation of deref patterns. + let ct_ty = match pat_ty { + Some(pat_ty) + if let ty::Adt(def, _) = *pat_ty.kind() + && self.tcx.is_lang_item(def.did(), LangItem::String) => + { + if !self.tcx.features().string_deref_patterns() { + span_bug!( + expr.span, + "matching on `String` went through without enabling string_deref_patterns" + ); + } + self.typeck_results.node_type(expr.hir_id) + } + Some(pat_ty) => pat_ty, + None => self.typeck_results.node_type(expr.hir_id), + }; + let lit_input = LitToConstInput { lit: &lit.node, ty: ct_ty, neg: *negated }; + let constant = self.tcx.at(expr.span).lit_to_const(lit_input); + self.const_to_pat(constant, ct_ty, expr.hir_id, lit.span).kind + } + } } } diff --git a/compiler/rustc_mir_build/src/thir/print.rs b/compiler/rustc_mir_build/src/thir/print.rs index 16cef0ec3acbc..37248941e2c45 100644 --- a/compiler/rustc_mir_build/src/thir/print.rs +++ b/compiler/rustc_mir_build/src/thir/print.rs @@ -664,6 +664,7 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> { print_indented!(self, "kind: PatKind {", depth_lvl); match pat_kind { + PatKind::Missing => unreachable!(), PatKind::Wild => { print_indented!(self, "Wild", depth_lvl + 1); } @@ -740,10 +741,9 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> { print_indented!(self, format!("value: {:?}", value), depth_lvl + 2); print_indented!(self, "}", depth_lvl + 1); } - PatKind::ExpandedConstant { def_id, is_inline, subpattern } => { + PatKind::ExpandedConstant { def_id, subpattern } => { print_indented!(self, "ExpandedConstant {", depth_lvl + 1); print_indented!(self, format!("def_id: {def_id:?}"), depth_lvl + 2); - print_indented!(self, format!("is_inline: {is_inline:?}"), depth_lvl + 2); print_indented!(self, "subpattern:", depth_lvl + 2); self.print_pat(subpattern, depth_lvl + 2); print_indented!(self, "}", depth_lvl + 1); diff --git a/compiler/rustc_mir_dataflow/src/framework/cursor.rs b/compiler/rustc_mir_dataflow/src/framework/cursor.rs index c46ae9775cf68..3f6e7a0661921 100644 --- a/compiler/rustc_mir_dataflow/src/framework/cursor.rs +++ b/compiler/rustc_mir_dataflow/src/framework/cursor.rs @@ -1,5 +1,6 @@ //! Random access inspection of the results of a dataflow analysis. +use std::borrow::Cow; use std::cmp::Ordering; use std::ops::{Deref, DerefMut}; @@ -9,38 +10,30 @@ use rustc_middle::mir::{self, BasicBlock, Location}; use super::{Analysis, Direction, Effect, EffectIndex, Results}; -/// Some `ResultsCursor`s want to own a `Results`, and some want to borrow a `Results`, either -/// mutable or immutably. This type allows all of the above. It's similar to `Cow`. -pub enum ResultsHandle<'a, 'tcx, A> -where - A: Analysis<'tcx>, -{ - BorrowedMut(&'a mut Results<'tcx, A>), - Owned(Results<'tcx, A>), +/// Some `ResultsCursor`s want to own an `Analysis`, and some want to borrow an `Analysis`, either +/// mutable or immutably. This type allows all of the above. It's similar to `Cow`, but `Cow` +/// doesn't allow mutable borrowing. +enum CowMut<'a, T> { + BorrowedMut(&'a mut T), + Owned(T), } -impl<'tcx, A> Deref for ResultsHandle<'_, 'tcx, A> -where - A: Analysis<'tcx>, -{ - type Target = Results<'tcx, A>; +impl Deref for CowMut<'_, T> { + type Target = T; - fn deref(&self) -> &Results<'tcx, A> { + fn deref(&self) -> &T { match self { - ResultsHandle::BorrowedMut(borrowed) => borrowed, - ResultsHandle::Owned(owned) => owned, + CowMut::BorrowedMut(borrowed) => borrowed, + CowMut::Owned(owned) => owned, } } } -impl<'tcx, A> DerefMut for ResultsHandle<'_, 'tcx, A> -where - A: Analysis<'tcx>, -{ - fn deref_mut(&mut self) -> &mut Results<'tcx, A> { +impl DerefMut for CowMut<'_, T> { + fn deref_mut(&mut self) -> &mut T { match self { - ResultsHandle::BorrowedMut(borrowed) => borrowed, - ResultsHandle::Owned(owned) => owned, + CowMut::BorrowedMut(borrowed) => borrowed, + CowMut::Owned(owned) => owned, } } } @@ -60,7 +53,8 @@ where A: Analysis<'tcx>, { body: &'mir mir::Body<'tcx>, - results: ResultsHandle<'mir, 'tcx, A>, + analysis: CowMut<'mir, A>, + results: Cow<'mir, Results>, state: A::Domain, pos: CursorPosition, @@ -88,11 +82,15 @@ where self.body } - /// Returns a new cursor that can inspect `results`. - pub fn new(body: &'mir mir::Body<'tcx>, results: ResultsHandle<'mir, 'tcx, A>) -> Self { - let bottom_value = results.analysis.bottom_value(body); + fn new( + body: &'mir mir::Body<'tcx>, + analysis: CowMut<'mir, A>, + results: Cow<'mir, Results>, + ) -> Self { + let bottom_value = analysis.bottom_value(body); ResultsCursor { body, + analysis, results, // Initialize to the `bottom_value` and set `state_needs_reset` to tell the cursor that @@ -107,6 +105,24 @@ where } } + /// Returns a new cursor that takes ownership of and inspects analysis results. + pub fn new_owning( + body: &'mir mir::Body<'tcx>, + analysis: A, + results: Results, + ) -> Self { + Self::new(body, CowMut::Owned(analysis), Cow::Owned(results)) + } + + /// Returns a new cursor that borrows and inspects analysis results. + pub fn new_borrowing( + body: &'mir mir::Body<'tcx>, + analysis: &'mir mut A, + results: &'mir Results, + ) -> Self { + Self::new(body, CowMut::BorrowedMut(analysis), Cow::Borrowed(results)) + } + /// Allows inspection of unreachable basic blocks even with `debug_assertions` enabled. #[cfg(test)] pub(crate) fn allow_unreachable(&mut self) { @@ -114,24 +130,9 @@ where self.reachable_blocks.insert_all() } - /// Returns the underlying `Results`. - pub fn results(&self) -> &Results<'tcx, A> { - &self.results - } - - /// Returns the underlying `Results`. - pub fn mut_results(&mut self) -> &mut Results<'tcx, A> { - &mut self.results - } - /// Returns the `Analysis` used to generate the underlying `Results`. pub fn analysis(&self) -> &A { - &self.results.analysis - } - - /// Returns the `Analysis` used to generate the underlying `Results`. - pub fn mut_analysis(&mut self) -> &mut A { - &mut self.results.analysis + &self.analysis } /// Resets the cursor to hold the entry set for the given basic block. @@ -143,7 +144,7 @@ where #[cfg(debug_assertions)] assert!(self.reachable_blocks.contains(block)); - self.state.clone_from(self.results.entry_set_for_block(block)); + self.state.clone_from(&self.results[block]); self.pos = CursorPosition::block_entry(block); self.state_needs_reset = false; } @@ -235,7 +236,7 @@ where let target_effect_index = effect.at_index(target.statement_index); A::Direction::apply_effects_in_range( - &mut self.results.analysis, + &mut *self.analysis, &mut self.state, target.block, block_data, @@ -251,7 +252,7 @@ where /// This can be used, e.g., to apply the call return effect directly to the cursor without /// creating an extra copy of the dataflow state. pub fn apply_custom_effect(&mut self, f: impl FnOnce(&mut A, &mut A::Domain)) { - f(&mut self.results.analysis, &mut self.state); + f(&mut self.analysis, &mut self.state); self.state_needs_reset = true; } } diff --git a/compiler/rustc_mir_dataflow/src/framework/direction.rs b/compiler/rustc_mir_dataflow/src/framework/direction.rs index 3d7f9e2d8e71b..e955e38ad10fa 100644 --- a/compiler/rustc_mir_dataflow/src/framework/direction.rs +++ b/compiler/rustc_mir_dataflow/src/framework/direction.rs @@ -5,7 +5,7 @@ use rustc_middle::mir::{ }; use super::visitor::ResultsVisitor; -use super::{Analysis, Effect, EffectIndex, Results}; +use super::{Analysis, Effect, EffectIndex}; pub trait Direction { const IS_FORWARD: bool; @@ -36,14 +36,14 @@ pub trait Direction { A: Analysis<'tcx>; /// Called by `ResultsVisitor` to recompute the analysis domain values for - /// all locations in a basic block (starting from the entry value stored - /// in `Results`) and to visit them with `vis`. + /// all locations in a basic block (starting from `entry_state` and to + /// visit them with `vis`. fn visit_results_in_block<'mir, 'tcx, A>( state: &mut A::Domain, block: BasicBlock, block_data: &'mir mir::BasicBlockData<'tcx>, - results: &mut Results<'tcx, A>, - vis: &mut impl ResultsVisitor<'mir, 'tcx, A>, + analysis: &mut A, + vis: &mut impl ResultsVisitor<'tcx, A>, ) where A: Analysis<'tcx>; } @@ -211,28 +211,26 @@ impl Direction for Backward { state: &mut A::Domain, block: BasicBlock, block_data: &'mir mir::BasicBlockData<'tcx>, - results: &mut Results<'tcx, A>, - vis: &mut impl ResultsVisitor<'mir, 'tcx, A>, + analysis: &mut A, + vis: &mut impl ResultsVisitor<'tcx, A>, ) where A: Analysis<'tcx>, { - state.clone_from(results.entry_set_for_block(block)); - vis.visit_block_end(state); let loc = Location { block, statement_index: block_data.statements.len() }; let term = block_data.terminator(); - results.analysis.apply_early_terminator_effect(state, term, loc); - vis.visit_after_early_terminator_effect(results, state, term, loc); - results.analysis.apply_primary_terminator_effect(state, term, loc); - vis.visit_after_primary_terminator_effect(results, state, term, loc); + analysis.apply_early_terminator_effect(state, term, loc); + vis.visit_after_early_terminator_effect(analysis, state, term, loc); + analysis.apply_primary_terminator_effect(state, term, loc); + vis.visit_after_primary_terminator_effect(analysis, state, term, loc); for (statement_index, stmt) in block_data.statements.iter().enumerate().rev() { let loc = Location { block, statement_index }; - results.analysis.apply_early_statement_effect(state, stmt, loc); - vis.visit_after_early_statement_effect(results, state, stmt, loc); - results.analysis.apply_primary_statement_effect(state, stmt, loc); - vis.visit_after_primary_statement_effect(results, state, stmt, loc); + analysis.apply_early_statement_effect(state, stmt, loc); + vis.visit_after_early_statement_effect(analysis, state, stmt, loc); + analysis.apply_primary_statement_effect(state, stmt, loc); + vis.visit_after_primary_statement_effect(analysis, state, stmt, loc); } vis.visit_block_start(state); @@ -393,29 +391,27 @@ impl Direction for Forward { state: &mut A::Domain, block: BasicBlock, block_data: &'mir mir::BasicBlockData<'tcx>, - results: &mut Results<'tcx, A>, - vis: &mut impl ResultsVisitor<'mir, 'tcx, A>, + analysis: &mut A, + vis: &mut impl ResultsVisitor<'tcx, A>, ) where A: Analysis<'tcx>, { - state.clone_from(results.entry_set_for_block(block)); - vis.visit_block_start(state); for (statement_index, stmt) in block_data.statements.iter().enumerate() { let loc = Location { block, statement_index }; - results.analysis.apply_early_statement_effect(state, stmt, loc); - vis.visit_after_early_statement_effect(results, state, stmt, loc); - results.analysis.apply_primary_statement_effect(state, stmt, loc); - vis.visit_after_primary_statement_effect(results, state, stmt, loc); + analysis.apply_early_statement_effect(state, stmt, loc); + vis.visit_after_early_statement_effect(analysis, state, stmt, loc); + analysis.apply_primary_statement_effect(state, stmt, loc); + vis.visit_after_primary_statement_effect(analysis, state, stmt, loc); } let loc = Location { block, statement_index: block_data.statements.len() }; let term = block_data.terminator(); - results.analysis.apply_early_terminator_effect(state, term, loc); - vis.visit_after_early_terminator_effect(results, state, term, loc); - results.analysis.apply_primary_terminator_effect(state, term, loc); - vis.visit_after_primary_terminator_effect(results, state, term, loc); + analysis.apply_early_terminator_effect(state, term, loc); + vis.visit_after_early_terminator_effect(analysis, state, term, loc); + analysis.apply_primary_terminator_effect(state, term, loc); + vis.visit_after_primary_terminator_effect(analysis, state, term, loc); vis.visit_block_end(state); } diff --git a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs index 448fad2dc3ece..a7d5422a3d721 100644 --- a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs +++ b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs @@ -21,7 +21,9 @@ use tracing::debug; use {rustc_ast as ast, rustc_graphviz as dot}; use super::fmt::{DebugDiffWithAdapter, DebugWithAdapter, DebugWithContext}; -use super::{Analysis, CallReturnPlaces, Direction, Results, ResultsCursor, ResultsVisitor}; +use super::{ + Analysis, CallReturnPlaces, Direction, Results, ResultsCursor, ResultsVisitor, visit_results, +}; use crate::errors::{ DuplicateValuesFor, PathMustEndInFilename, RequiresAnArgument, UnknownFormatter, }; @@ -32,7 +34,8 @@ use crate::errors::{ pub(super) fn write_graphviz_results<'tcx, A>( tcx: TyCtxt<'tcx>, body: &Body<'tcx>, - results: &mut Results<'tcx, A>, + analysis: &mut A, + results: &Results, pass_name: Option<&'static str>, ) -> std::io::Result<()> where @@ -77,7 +80,7 @@ where let mut buf = Vec::new(); - let graphviz = Formatter::new(body, results, style); + let graphviz = Formatter::new(body, analysis, results, style); let mut render_opts = vec![dot::RenderOption::Fontname(tcx.sess.opts.unstable_opts.graphviz_font.clone())]; if tcx.sess.opts.unstable_opts.graphviz_dark_mode { @@ -109,27 +112,29 @@ impl RustcMirAttrs { .flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter())); for attr in rustc_mir_attrs { - let attr_result = if attr.has_name(sym::borrowck_graphviz_postflow) { - Self::set_field(&mut ret.basename_and_suffix, tcx, &attr, |s| { - let path = PathBuf::from(s.to_string()); - match path.file_name() { - Some(_) => Ok(path), - None => { - tcx.dcx().emit_err(PathMustEndInFilename { span: attr.span() }); + let attr_result = match attr.name() { + Some(name @ sym::borrowck_graphviz_postflow) => { + Self::set_field(&mut ret.basename_and_suffix, tcx, name, &attr, |s| { + let path = PathBuf::from(s.to_string()); + match path.file_name() { + Some(_) => Ok(path), + None => { + tcx.dcx().emit_err(PathMustEndInFilename { span: attr.span() }); + Err(()) + } + } + }) + } + Some(name @ sym::borrowck_graphviz_format) => { + Self::set_field(&mut ret.formatter, tcx, name, &attr, |s| match s { + sym::two_phase => Ok(s), + _ => { + tcx.dcx().emit_err(UnknownFormatter { span: attr.span() }); Err(()) } - } - }) - } else if attr.has_name(sym::borrowck_graphviz_format) { - Self::set_field(&mut ret.formatter, tcx, &attr, |s| match s { - sym::gen_kill | sym::two_phase => Ok(s), - _ => { - tcx.dcx().emit_err(UnknownFormatter { span: attr.span() }); - Err(()) - } - }) - } else { - Ok(()) + }) + } + _ => Ok(()), }; result = result.and(attr_result); @@ -141,12 +146,12 @@ impl RustcMirAttrs { fn set_field( field: &mut Option, tcx: TyCtxt<'_>, + name: Symbol, attr: &ast::MetaItemInner, mapper: impl FnOnce(Symbol) -> Result, ) -> Result<(), ()> { if field.is_some() { - tcx.dcx() - .emit_err(DuplicateValuesFor { span: attr.span(), name: attr.name_or_empty() }); + tcx.dcx().emit_err(DuplicateValuesFor { span: attr.span(), name }); return Err(()); } @@ -156,7 +161,7 @@ impl RustcMirAttrs { Ok(()) } else { tcx.dcx() - .emit_err(RequiresAnArgument { span: attr.span(), name: attr.name_or_empty() }); + .emit_err(RequiresAnArgument { span: attr.span(), name: attr.name().unwrap() }); Err(()) } } @@ -199,11 +204,13 @@ struct Formatter<'mir, 'tcx, A> where A: Analysis<'tcx>, { + body: &'mir Body<'tcx>, // The `RefCell` is used because `::node_label` - // takes `&self`, but it needs to modify the cursor. This is also the + // takes `&self`, but it needs to modify the analysis. This is also the // reason for the `Formatter`/`BlockFormatter` split; `BlockFormatter` has // the operations that involve the mutation, i.e. within the `borrow_mut`. - cursor: RefCell>, + analysis: RefCell<&'mir mut A>, + results: &'mir Results, style: OutputStyle, reachable: DenseBitSet, } @@ -214,15 +221,12 @@ where { fn new( body: &'mir Body<'tcx>, - results: &'mir mut Results<'tcx, A>, + analysis: &'mir mut A, + results: &'mir Results, style: OutputStyle, ) -> Self { let reachable = traversal::reachable_as_bitset(body); - Formatter { cursor: results.as_results_cursor(body).into(), style, reachable } - } - - fn body(&self) -> &'mir Body<'tcx> { - self.cursor.borrow().body() + Formatter { body, analysis: analysis.into(), results, style, reachable } } } @@ -251,7 +255,7 @@ where type Edge = CfgEdge; fn graph_id(&self) -> dot::Id<'_> { - let name = graphviz_safe_def_name(self.body().source.def_id()); + let name = graphviz_safe_def_name(self.body.source.def_id()); dot::Id::new(format!("graph_for_def_id_{name}")).unwrap() } @@ -260,10 +264,16 @@ where } fn node_label(&self, block: &Self::Node) -> dot::LabelText<'_> { - let mut cursor = self.cursor.borrow_mut(); - let mut fmt = - BlockFormatter { cursor: &mut cursor, style: self.style, bg: Background::Light }; - let label = fmt.write_node_label(*block).unwrap(); + let analysis = &mut **self.analysis.borrow_mut(); + + let diffs = StateDiffCollector::run(self.body, *block, analysis, self.results, self.style); + + let mut fmt = BlockFormatter { + cursor: ResultsCursor::new_borrowing(self.body, analysis, self.results), + style: self.style, + bg: Background::Light, + }; + let label = fmt.write_node_label(*block, diffs).unwrap(); dot::LabelText::html(String::from_utf8(label).unwrap()) } @@ -273,7 +283,7 @@ where } fn edge_label(&self, e: &Self::Edge) -> dot::LabelText<'_> { - let label = &self.body()[e.source].terminator().kind.fmt_successor_labels()[e.index]; + let label = &self.body[e.source].terminator().kind.fmt_successor_labels()[e.index]; dot::LabelText::label(label.clone()) } } @@ -286,7 +296,7 @@ where type Edge = CfgEdge; fn nodes(&self) -> dot::Nodes<'_, Self::Node> { - self.body() + self.body .basic_blocks .indices() .filter(|&idx| self.reachable.contains(idx)) @@ -295,10 +305,10 @@ where } fn edges(&self) -> dot::Edges<'_, Self::Edge> { - let body = self.body(); - body.basic_blocks + self.body + .basic_blocks .indices() - .flat_map(|bb| dataflow_successors(body, bb)) + .flat_map(|bb| dataflow_successors(self.body, bb)) .collect::>() .into() } @@ -308,20 +318,20 @@ where } fn target(&self, edge: &Self::Edge) -> Self::Node { - self.body()[edge.source].terminator().successors().nth(edge.index).unwrap() + self.body[edge.source].terminator().successors().nth(edge.index).unwrap() } } -struct BlockFormatter<'a, 'mir, 'tcx, A> +struct BlockFormatter<'mir, 'tcx, A> where A: Analysis<'tcx>, { - cursor: &'a mut ResultsCursor<'mir, 'tcx, A>, + cursor: ResultsCursor<'mir, 'tcx, A>, bg: Background, style: OutputStyle, } -impl<'tcx, A> BlockFormatter<'_, '_, 'tcx, A> +impl<'tcx, A> BlockFormatter<'_, 'tcx, A> where A: Analysis<'tcx>, A::Domain: DebugWithContext, @@ -334,7 +344,11 @@ where bg } - fn write_node_label(&mut self, block: BasicBlock) -> io::Result> { + fn write_node_label( + &mut self, + block: BasicBlock, + diffs: StateDiffCollector, + ) -> io::Result> { use std::io::Write; // Sample output: @@ -390,7 +404,7 @@ where self.write_row_with_full_state(w, "", "(on start)")?; // D + E: Statement and terminator transfer functions - self.write_statements_and_terminator(w, block)?; + self.write_statements_and_terminator(w, block, diffs)?; // F: State at end of block @@ -573,14 +587,8 @@ where &mut self, w: &mut impl io::Write, block: BasicBlock, + diffs: StateDiffCollector, ) -> io::Result<()> { - let diffs = StateDiffCollector::run( - self.cursor.body(), - block, - self.cursor.mut_results(), - self.style, - ); - let mut diffs_before = diffs.before.map(|v| v.into_iter()); let mut diffs_after = diffs.after.into_iter(); @@ -689,7 +697,8 @@ impl StateDiffCollector { fn run<'tcx, A>( body: &Body<'tcx>, block: BasicBlock, - results: &mut Results<'tcx, A>, + analysis: &mut A, + results: &Results, style: OutputStyle, ) -> Self where @@ -697,17 +706,17 @@ impl StateDiffCollector { D: DebugWithContext, { let mut collector = StateDiffCollector { - prev_state: results.analysis.bottom_value(body), + prev_state: analysis.bottom_value(body), after: vec![], before: (style == OutputStyle::BeforeAndAfter).then_some(vec![]), }; - results.visit_with(body, std::iter::once(block), &mut collector); + visit_results(body, std::iter::once(block), analysis, results, &mut collector); collector } } -impl<'tcx, A> ResultsVisitor<'_, 'tcx, A> for StateDiffCollector +impl<'tcx, A> ResultsVisitor<'tcx, A> for StateDiffCollector where A: Analysis<'tcx>, A::Domain: DebugWithContext, @@ -726,49 +735,49 @@ where fn visit_after_early_statement_effect( &mut self, - results: &mut Results<'tcx, A>, + analysis: &mut A, state: &A::Domain, _statement: &mir::Statement<'tcx>, _location: Location, ) { if let Some(before) = self.before.as_mut() { - before.push(diff_pretty(state, &self.prev_state, &results.analysis)); + before.push(diff_pretty(state, &self.prev_state, analysis)); self.prev_state.clone_from(state) } } fn visit_after_primary_statement_effect( &mut self, - results: &mut Results<'tcx, A>, + analysis: &mut A, state: &A::Domain, _statement: &mir::Statement<'tcx>, _location: Location, ) { - self.after.push(diff_pretty(state, &self.prev_state, &results.analysis)); + self.after.push(diff_pretty(state, &self.prev_state, analysis)); self.prev_state.clone_from(state) } fn visit_after_early_terminator_effect( &mut self, - results: &mut Results<'tcx, A>, + analysis: &mut A, state: &A::Domain, _terminator: &mir::Terminator<'tcx>, _location: Location, ) { if let Some(before) = self.before.as_mut() { - before.push(diff_pretty(state, &self.prev_state, &results.analysis)); + before.push(diff_pretty(state, &self.prev_state, analysis)); self.prev_state.clone_from(state) } } fn visit_after_primary_terminator_effect( &mut self, - results: &mut Results<'tcx, A>, + analysis: &mut A, state: &A::Domain, _terminator: &mir::Terminator<'tcx>, _location: Location, ) { - self.after.push(diff_pretty(state, &self.prev_state, &results.analysis)); + self.after.push(diff_pretty(state, &self.prev_state, analysis)); self.prev_state.clone_from(state) } } diff --git a/compiler/rustc_mir_dataflow/src/framework/mod.rs b/compiler/rustc_mir_dataflow/src/framework/mod.rs index 09f6cdb5c4a72..9cadec100b534 100644 --- a/compiler/rustc_mir_dataflow/src/framework/mod.rs +++ b/compiler/rustc_mir_dataflow/src/framework/mod.rs @@ -58,8 +58,9 @@ mod visitor; pub use self::cursor::ResultsCursor; pub use self::direction::{Backward, Direction, Forward}; pub use self::lattice::{JoinSemiLattice, MaybeReachable}; -pub use self::results::{EntryStates, Results}; -pub use self::visitor::{ResultsVisitor, visit_results}; +pub(crate) use self::results::AnalysisAndResults; +pub use self::results::Results; +pub use self::visitor::{ResultsVisitor, visit_reachable_results, visit_results}; /// Analysis domains are all bitsets of various kinds. This trait holds /// operations needed by all of them. @@ -247,17 +248,15 @@ pub trait Analysis<'tcx> { tcx: TyCtxt<'tcx>, body: &'mir mir::Body<'tcx>, pass_name: Option<&'static str>, - ) -> Results<'tcx, Self> + ) -> AnalysisAndResults<'tcx, Self> where Self: Sized, Self::Domain: DebugWithContext, { - let mut entry_states = - IndexVec::from_fn_n(|_| self.bottom_value(body), body.basic_blocks.len()); - self.initialize_start_block(body, &mut entry_states[mir::START_BLOCK]); + let mut results = IndexVec::from_fn_n(|_| self.bottom_value(body), body.basic_blocks.len()); + self.initialize_start_block(body, &mut results[mir::START_BLOCK]); - if Self::Direction::IS_BACKWARD && entry_states[mir::START_BLOCK] != self.bottom_value(body) - { + if Self::Direction::IS_BACKWARD && results[mir::START_BLOCK] != self.bottom_value(body) { bug!("`initialize_start_block` is not yet supported for backward dataflow analyses"); } @@ -280,10 +279,9 @@ pub trait Analysis<'tcx> { // every iteration. let mut state = self.bottom_value(body); while let Some(bb) = dirty_queue.pop() { - // Set the state to the entry state of the block. - // This is equivalent to `state = entry_states[bb].clone()`, - // but it saves an allocation, thus improving compile times. - state.clone_from(&entry_states[bb]); + // Set the state to the entry state of the block. This is equivalent to `state = + // results[bb].clone()`, but it saves an allocation, thus improving compile times. + state.clone_from(&results[bb]); Self::Direction::apply_effects_in_block( &mut self, @@ -292,7 +290,7 @@ pub trait Analysis<'tcx> { bb, &body[bb], |target: BasicBlock, state: &Self::Domain| { - let set_changed = entry_states[target].join(state); + let set_changed = results[target].join(state); if set_changed { dirty_queue.insert(target); } @@ -300,16 +298,14 @@ pub trait Analysis<'tcx> { ); } - let mut results = Results { analysis: self, entry_states }; - if tcx.sess.opts.unstable_opts.dump_mir_dataflow { - let res = write_graphviz_results(tcx, body, &mut results, pass_name); + let res = write_graphviz_results(tcx, body, &mut self, &results, pass_name); if let Err(e) = res { error!("Failed to write graphviz dataflow results: {}", e); } } - results + AnalysisAndResults { analysis: self, results } } } diff --git a/compiler/rustc_mir_dataflow/src/framework/results.rs b/compiler/rustc_mir_dataflow/src/framework/results.rs index 8e2c3afddb352..7b7e981d3a554 100644 --- a/compiler/rustc_mir_dataflow/src/framework/results.rs +++ b/compiler/rustc_mir_dataflow/src/framework/results.rs @@ -1,63 +1,30 @@ //! Dataflow analysis results. use rustc_index::IndexVec; -use rustc_middle::mir::{BasicBlock, Body, traversal}; +use rustc_middle::mir::{BasicBlock, Body}; -use super::{Analysis, ResultsCursor, ResultsVisitor, visit_results}; -use crate::framework::cursor::ResultsHandle; +use super::{Analysis, ResultsCursor}; -pub type EntryStates<'tcx, A> = IndexVec>::Domain>; +/// The results of a dataflow analysis that has converged to fixpoint. It only holds the domain +/// values at the entry of each basic block. Domain values in other parts of the block are +/// recomputed on the fly by visitors (i.e. `ResultsCursor`, or `ResultsVisitor` impls). +pub type Results = IndexVec; -/// A dataflow analysis that has converged to fixpoint. It only holds the domain values at the -/// entry of each basic block. Domain values in other parts of the block are recomputed on the fly -/// by visitors (i.e. `ResultsCursor`, or `ResultsVisitor` impls). -#[derive(Clone)] -pub struct Results<'tcx, A> +/// Utility type used in a few places where it's convenient to bundle an analysis with its results. +pub struct AnalysisAndResults<'tcx, A> where A: Analysis<'tcx>, { pub analysis: A, - pub entry_states: EntryStates<'tcx, A>, + pub results: Results, } -impl<'tcx, A> Results<'tcx, A> +impl<'tcx, A> AnalysisAndResults<'tcx, A> where A: Analysis<'tcx>, { - /// Creates a `ResultsCursor` that mutably borrows the `Results`, which is appropriate when the - /// `Results` is also used outside the cursor. - pub fn as_results_cursor<'mir>( - &'mir mut self, - body: &'mir Body<'tcx>, - ) -> ResultsCursor<'mir, 'tcx, A> { - ResultsCursor::new(body, ResultsHandle::BorrowedMut(self)) - } - - /// Creates a `ResultsCursor` that takes ownership of the `Results`. + /// Creates a `ResultsCursor` that takes ownership of `self`. pub fn into_results_cursor<'mir>(self, body: &'mir Body<'tcx>) -> ResultsCursor<'mir, 'tcx, A> { - ResultsCursor::new(body, ResultsHandle::Owned(self)) - } - - /// Gets the dataflow state for the given block. - pub fn entry_set_for_block(&self, block: BasicBlock) -> &A::Domain { - &self.entry_states[block] - } - - pub fn visit_with<'mir>( - &mut self, - body: &'mir Body<'tcx>, - blocks: impl IntoIterator, - vis: &mut impl ResultsVisitor<'mir, 'tcx, A>, - ) { - visit_results(body, blocks, self, vis) - } - - pub fn visit_reachable_with<'mir>( - &mut self, - body: &'mir Body<'tcx>, - vis: &mut impl ResultsVisitor<'mir, 'tcx, A>, - ) { - let blocks = traversal::reachable(body); - visit_results(body, blocks.map(|(bb, _)| bb), self, vis) + ResultsCursor::new_owning(body, self.analysis, self.results) } } diff --git a/compiler/rustc_mir_dataflow/src/framework/tests.rs b/compiler/rustc_mir_dataflow/src/framework/tests.rs index ae0f1179e6fac..8602bb5576523 100644 --- a/compiler/rustc_mir_dataflow/src/framework/tests.rs +++ b/compiler/rustc_mir_dataflow/src/framework/tests.rs @@ -79,7 +79,7 @@ fn mock_body<'tcx>() -> mir::Body<'tcx> { /// /// The `102` in the block's entry set is derived from the basic block index and ensures that the /// expected state is unique across all basic blocks. Remember, it is generated by -/// `mock_entry_states`, not from actually running `MockAnalysis` to fixpoint. +/// `mock_results`, not from actually running `MockAnalysis` to fixpoint. struct MockAnalysis<'tcx, D> { body: &'tcx mir::Body<'tcx>, dir: PhantomData, @@ -96,7 +96,7 @@ impl MockAnalysis<'_, D> { ret } - fn mock_entry_states(&self) -> IndexVec> { + fn mock_results(&self) -> IndexVec> { let empty = self.bottom_value(self.body); let mut ret = IndexVec::from_elem(empty, &self.body.basic_blocks); @@ -255,7 +255,7 @@ fn test_cursor(analysis: MockAnalysis<'_, D>) { let body = analysis.body; let mut cursor = - Results { entry_states: analysis.mock_entry_states(), analysis }.into_results_cursor(body); + AnalysisAndResults { results: analysis.mock_results(), analysis }.into_results_cursor(body); cursor.allow_unreachable(); diff --git a/compiler/rustc_mir_dataflow/src/framework/visitor.rs b/compiler/rustc_mir_dataflow/src/framework/visitor.rs index a03aecee7be12..fbb9e4108726d 100644 --- a/compiler/rustc_mir_dataflow/src/framework/visitor.rs +++ b/compiler/rustc_mir_dataflow/src/framework/visitor.rs @@ -1,4 +1,4 @@ -use rustc_middle::mir::{self, BasicBlock, Location}; +use rustc_middle::mir::{self, BasicBlock, Location, traversal}; use super::{Analysis, Direction, Results}; @@ -7,12 +7,13 @@ use super::{Analysis, Direction, Results}; pub fn visit_results<'mir, 'tcx, A>( body: &'mir mir::Body<'tcx>, blocks: impl IntoIterator, - results: &mut Results<'tcx, A>, - vis: &mut impl ResultsVisitor<'mir, 'tcx, A>, + analysis: &mut A, + results: &Results, + vis: &mut impl ResultsVisitor<'tcx, A>, ) where A: Analysis<'tcx>, { - let mut state = results.analysis.bottom_value(body); + let mut state = analysis.bottom_value(body); #[cfg(debug_assertions)] let reachable_blocks = mir::traversal::reachable_as_bitset(body); @@ -22,14 +23,28 @@ pub fn visit_results<'mir, 'tcx, A>( assert!(reachable_blocks.contains(block)); let block_data = &body[block]; - A::Direction::visit_results_in_block(&mut state, block, block_data, results, vis); + state.clone_from(&results[block]); + A::Direction::visit_results_in_block(&mut state, block, block_data, analysis, vis); } } +/// Like `visit_results`, but only for reachable blocks. +pub fn visit_reachable_results<'mir, 'tcx, A>( + body: &'mir mir::Body<'tcx>, + analysis: &mut A, + results: &Results, + vis: &mut impl ResultsVisitor<'tcx, A>, +) where + A: Analysis<'tcx>, +{ + let blocks = traversal::reachable(body).map(|(bb, _)| bb); + visit_results(body, blocks, analysis, results, vis) +} + /// A visitor over the results of an `Analysis`. Use this when you want to inspect domain values in /// many or all locations; use `ResultsCursor` if you want to inspect domain values only in certain /// locations. -pub trait ResultsVisitor<'mir, 'tcx, A> +pub trait ResultsVisitor<'tcx, A> where A: Analysis<'tcx>, { @@ -38,9 +53,9 @@ where /// Called after the "early" effect of the given statement is applied to `state`. fn visit_after_early_statement_effect( &mut self, - _results: &mut Results<'tcx, A>, + _analysis: &mut A, _state: &A::Domain, - _statement: &'mir mir::Statement<'tcx>, + _statement: &mir::Statement<'tcx>, _location: Location, ) { } @@ -48,9 +63,9 @@ where /// Called after the "primary" effect of the given statement is applied to `state`. fn visit_after_primary_statement_effect( &mut self, - _results: &mut Results<'tcx, A>, + _analysis: &mut A, _state: &A::Domain, - _statement: &'mir mir::Statement<'tcx>, + _statement: &mir::Statement<'tcx>, _location: Location, ) { } @@ -58,9 +73,9 @@ where /// Called after the "early" effect of the given terminator is applied to `state`. fn visit_after_early_terminator_effect( &mut self, - _results: &mut Results<'tcx, A>, + _analysis: &mut A, _state: &A::Domain, - _terminator: &'mir mir::Terminator<'tcx>, + _terminator: &mir::Terminator<'tcx>, _location: Location, ) { } @@ -70,9 +85,9 @@ where /// The `call_return_effect` (if one exists) will *not* be applied to `state`. fn visit_after_primary_terminator_effect( &mut self, - _results: &mut Results<'tcx, A>, + _analysis: &mut A, _state: &A::Domain, - _terminator: &'mir mir::Terminator<'tcx>, + _terminator: &mir::Terminator<'tcx>, _location: Location, ) { } diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized.rs b/compiler/rustc_mir_dataflow/src/impls/initialized.rs index f5ffc42d52ab0..18165b0b9bd08 100644 --- a/compiler/rustc_mir_dataflow/src/impls/initialized.rs +++ b/compiler/rustc_mir_dataflow/src/impls/initialized.rs @@ -376,7 +376,14 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> { // the result of `is_unwind_dead`. let mut edges = terminator.edges(); if self.skip_unreachable_unwind - && let mir::TerminatorKind::Drop { target, unwind, place, replace: _ } = terminator.kind + && let mir::TerminatorKind::Drop { + target, + unwind, + place, + replace: _, + drop: _, + async_fut: _, + } = terminator.kind && matches!(unwind, mir::UnwindAction::Cleanup(_)) && self.is_unwind_dead(place, state) { diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs index 82c57ef5678dc..658fbf505e446 100644 --- a/compiler/rustc_mir_dataflow/src/lib.rs +++ b/compiler/rustc_mir_dataflow/src/lib.rs @@ -1,11 +1,10 @@ // tidy-alphabetical-start -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![feature(assert_matches)] #![feature(associated_type_defaults)] #![feature(box_patterns)] #![feature(exact_size_is_empty)] #![feature(file_buffered)] -#![feature(let_chains)] #![feature(never_type)] #![feature(try_blocks)] // tidy-alphabetical-end @@ -19,8 +18,8 @@ pub use self::drop_flag_effects::{ move_path_children_matching, on_all_children_bits, on_lookup_result_bits, }; pub use self::framework::{ - Analysis, Backward, Direction, EntryStates, Forward, GenKill, JoinSemiLattice, MaybeReachable, - Results, ResultsCursor, ResultsVisitor, fmt, graphviz, lattice, visit_results, + Analysis, Backward, Direction, Forward, GenKill, JoinSemiLattice, MaybeReachable, Results, + ResultsCursor, ResultsVisitor, fmt, graphviz, lattice, visit_reachable_results, visit_results, }; use self::move_paths::MoveData; diff --git a/compiler/rustc_mir_dataflow/src/points.rs b/compiler/rustc_mir_dataflow/src/points.rs index 5d2a78acbf526..70d1a34b5fb13 100644 --- a/compiler/rustc_mir_dataflow/src/points.rs +++ b/compiler/rustc_mir_dataflow/src/points.rs @@ -98,7 +98,8 @@ rustc_index::newtype_index! { pub fn save_as_intervals<'tcx, N, A>( elements: &DenseLocationMap, body: &mir::Body<'tcx>, - mut results: Results<'tcx, A>, + mut analysis: A, + results: Results, ) -> SparseIntervalMatrix where N: Idx, @@ -109,7 +110,8 @@ where visit_results( body, body.basic_blocks.reverse_postorder().iter().copied(), - &mut results, + &mut analysis, + &results, &mut visitor, ); visitor.values @@ -120,14 +122,14 @@ struct Visitor<'a, N: Idx> { values: SparseIntervalMatrix, } -impl<'mir, 'tcx, A, N> ResultsVisitor<'mir, 'tcx, A> for Visitor<'_, N> +impl<'tcx, A, N> ResultsVisitor<'tcx, A> for Visitor<'_, N> where A: Analysis<'tcx, Domain = DenseBitSet>, N: Idx, { - fn visit_after_primary_statement_effect( + fn visit_after_primary_statement_effect<'mir>( &mut self, - _results: &mut Results<'tcx, A>, + _analysis: &mut A, state: &A::Domain, _statement: &'mir mir::Statement<'tcx>, location: Location, @@ -139,9 +141,9 @@ where }); } - fn visit_after_primary_terminator_effect( + fn visit_after_primary_terminator_effect<'mir>( &mut self, - _results: &mut Results<'tcx, A>, + _analysis: &mut A, state: &A::Domain, _terminator: &'mir mir::Terminator<'tcx>, location: Location, diff --git a/compiler/rustc_mir_dataflow/src/rustc_peek.rs b/compiler/rustc_mir_dataflow/src/rustc_peek.rs index 399141aa9212e..303fc767b9a38 100644 --- a/compiler/rustc_mir_dataflow/src/rustc_peek.rs +++ b/compiler/rustc_mir_dataflow/src/rustc_peek.rs @@ -39,23 +39,23 @@ pub fn sanity_check<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) { let move_data = MoveData::gather_moves(body, tcx, |_| true); if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_maybe_init).is_some() { - let flow_inits = - MaybeInitializedPlaces::new(tcx, body, &move_data).iterate_to_fixpoint(tcx, body, None); - - sanity_check_via_rustc_peek(tcx, flow_inits.into_results_cursor(body)); + let flow_inits = MaybeInitializedPlaces::new(tcx, body, &move_data) + .iterate_to_fixpoint(tcx, body, None) + .into_results_cursor(body); + sanity_check_via_rustc_peek(tcx, flow_inits); } if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_maybe_uninit).is_some() { let flow_uninits = MaybeUninitializedPlaces::new(tcx, body, &move_data) - .iterate_to_fixpoint(tcx, body, None); - - sanity_check_via_rustc_peek(tcx, flow_uninits.into_results_cursor(body)); + .iterate_to_fixpoint(tcx, body, None) + .into_results_cursor(body); + sanity_check_via_rustc_peek(tcx, flow_uninits); } if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_liveness).is_some() { - let flow_liveness = MaybeLiveLocals.iterate_to_fixpoint(tcx, body, None); - - sanity_check_via_rustc_peek(tcx, flow_liveness.into_results_cursor(body)); + let flow_liveness = + MaybeLiveLocals.iterate_to_fixpoint(tcx, body, None).into_results_cursor(body); + sanity_check_via_rustc_peek(tcx, flow_liveness); } if has_rustc_mir_with(tcx, def_id, sym::stop_after_dataflow).is_some() { diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index 36fb1c2b36d02..83fd8ccba60e5 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -405,6 +405,9 @@ impl<'tcx> Map<'tcx> { if exclude.contains(local) { continue; } + if decl.ty.is_async_drop_in_place_coroutine(tcx) { + continue; + } // Create a place for the local. debug_assert!(self.locals[local].is_none()); diff --git a/compiler/rustc_mir_transform/messages.ftl b/compiler/rustc_mir_transform/messages.ftl index 5628f4c9381b3..a1264471a2df5 100644 --- a/compiler/rustc_mir_transform/messages.ftl +++ b/compiler/rustc_mir_transform/messages.ftl @@ -84,3 +84,4 @@ mir_transform_undefined_transmute = pointers cannot be transmuted to integers du .help = for more information, see https://doc.rust-lang.org/std/mem/fn.transmute.html mir_transform_unknown_pass_name = MIR pass `{$name}` is unknown and will be ignored +mir_transform_unnecessary_transmute = unnecessary transmute diff --git a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs index b33326cb873df..a414d120e68b5 100644 --- a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs +++ b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs @@ -83,7 +83,9 @@ fn add_move_for_packed_drop<'tcx>( is_cleanup: bool, ) { debug!("add_move_for_packed_drop({:?} @ {:?})", terminator, loc); - let TerminatorKind::Drop { ref place, target, unwind, replace } = terminator.kind else { + let TerminatorKind::Drop { ref place, target, unwind, replace, drop, async_fut } = + terminator.kind + else { unreachable!(); }; @@ -106,6 +108,8 @@ fn add_move_for_packed_drop<'tcx>( target: storage_dead_block, unwind, replace, + drop, + async_fut, }, ); } diff --git a/compiler/rustc_mir_transform/src/check_alignment.rs b/compiler/rustc_mir_transform/src/check_alignment.rs index b70cca1484070..8f88613b79f35 100644 --- a/compiler/rustc_mir_transform/src/check_alignment.rs +++ b/compiler/rustc_mir_transform/src/check_alignment.rs @@ -1,3 +1,4 @@ +use rustc_abi::Align; use rustc_index::IndexVec; use rustc_middle::mir::interpret::Scalar; use rustc_middle::mir::visit::PlaceContext; @@ -5,16 +6,12 @@ use rustc_middle::mir::*; use rustc_middle::ty::{Ty, TyCtxt}; use rustc_session::Session; -use crate::check_pointers::{BorrowCheckMode, PointerCheck, check_pointers}; +use crate::check_pointers::{BorrowedFieldProjectionMode, PointerCheck, check_pointers}; pub(super) struct CheckAlignment; impl<'tcx> crate::MirPass<'tcx> for CheckAlignment { fn is_enabled(&self, sess: &Session) -> bool { - // FIXME(#112480) MSVC and rustc disagree on minimum stack alignment on x86 Windows - if sess.target.llvm_target == "i686-pc-windows-msvc" { - return false; - } sess.ub_checks() } @@ -22,15 +19,15 @@ impl<'tcx> crate::MirPass<'tcx> for CheckAlignment { // Skip trivially aligned place types. let excluded_pointees = [tcx.types.bool, tcx.types.i8, tcx.types.u8]; - // We have to exclude borrows here: in `&x.field`, the exact - // requirement is that the final reference must be aligned, but - // `check_pointers` would check that `x` is aligned, which would be wrong. + // When checking the alignment of references to field projections (`&(*ptr).a`), + // we need to make sure that the reference is aligned according to the field type + // and not to the pointer type. check_pointers( tcx, body, &excluded_pointees, insert_alignment_check, - BorrowCheckMode::ExcludeBorrows, + BorrowedFieldProjectionMode::FollowProjections, ); } @@ -87,6 +84,33 @@ fn insert_alignment_check<'tcx>( ))), }); + // If this target does not have reliable alignment, further limit the mask by anding it with + // the mask for the highest reliable alignment. + #[allow(irrefutable_let_patterns)] + if let max_align = tcx.sess.target.max_reliable_alignment() + && max_align < Align::MAX + { + let max_mask = max_align.bytes() - 1; + let max_mask = Operand::Constant(Box::new(ConstOperand { + span: source_info.span, + user_ty: None, + const_: Const::Val( + ConstValue::Scalar(Scalar::from_target_usize(max_mask, &tcx)), + tcx.types.usize, + ), + })); + stmts.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + alignment_mask, + Rvalue::BinaryOp( + BinOp::BitAnd, + Box::new((Operand::Copy(alignment_mask), max_mask)), + ), + ))), + }); + } + // BitAnd the alignment mask with the pointer let alignment_bits = local_decls.push(LocalDecl::with_source_info(tcx.types.usize, source_info)).into(); diff --git a/compiler/rustc_mir_transform/src/check_call_recursion.rs b/compiler/rustc_mir_transform/src/check_call_recursion.rs index e49723a6c39db..cace4cd6bba56 100644 --- a/compiler/rustc_mir_transform/src/check_call_recursion.rs +++ b/compiler/rustc_mir_transform/src/check_call_recursion.rs @@ -3,6 +3,7 @@ use std::ops::ControlFlow; use rustc_data_structures::graph::iterate::{ NodeStatus, TriColorDepthFirstSearch, TriColorVisitor, }; +use rustc_hir::LangItem; use rustc_hir::def::DefKind; use rustc_middle::mir::{self, BasicBlock, BasicBlocks, Body, Terminator, TerminatorKind}; use rustc_middle::ty::{self, GenericArg, GenericArgs, Instance, Ty, TyCtxt}; @@ -44,8 +45,7 @@ impl<'tcx> MirLint<'tcx> for CheckDropRecursion { if let DefKind::AssocFn = tcx.def_kind(def_id) && let Some(trait_ref) = tcx.impl_of_method(def_id.to_def_id()).and_then(|def_id| tcx.impl_trait_ref(def_id)) - && let Some(drop_trait) = tcx.lang_items().drop_trait() - && drop_trait == trait_ref.instantiate_identity().def_id + && tcx.is_lang_item(trait_ref.instantiate_identity().def_id, LangItem::Drop) // avoid erroneous `Drop` impls from causing ICEs below && let sig = tcx.fn_sig(def_id).instantiate_identity() && sig.inputs().skip_binder().len() == 1 diff --git a/compiler/rustc_mir_transform/src/check_const_item_mutation.rs b/compiler/rustc_mir_transform/src/check_const_item_mutation.rs index ceea72c6755a0..375db17fb73a0 100644 --- a/compiler/rustc_mir_transform/src/check_const_item_mutation.rs +++ b/compiler/rustc_mir_transform/src/check_const_item_mutation.rs @@ -53,9 +53,13 @@ impl<'tcx> ConstMutationChecker<'_, 'tcx> { // // #[const_mutation_allowed] // pub const LOG: Log = Log { msg: "" }; - match self.tcx.calculate_dtor(def_id, |_, _| Ok(())) { - Some(_) => None, - None => Some(def_id), + // FIXME: this should not be checking for `Drop` impls, + // but whether it or any field has a Drop impl (`needs_drop`) + // as fields' Drop impls may make this observable, too. + match self.tcx.type_of(def_id).skip_binder().ty_adt_def().map(|adt| adt.has_dtor(self.tcx)) + { + Some(true) => None, + Some(false) | None => Some(def_id), } } diff --git a/compiler/rustc_mir_transform/src/check_null.rs b/compiler/rustc_mir_transform/src/check_null.rs index 543e1845e6558..ad74e335bd9eb 100644 --- a/compiler/rustc_mir_transform/src/check_null.rs +++ b/compiler/rustc_mir_transform/src/check_null.rs @@ -4,7 +4,7 @@ use rustc_middle::mir::*; use rustc_middle::ty::{Ty, TyCtxt}; use rustc_session::Session; -use crate::check_pointers::{BorrowCheckMode, PointerCheck, check_pointers}; +use crate::check_pointers::{BorrowedFieldProjectionMode, PointerCheck, check_pointers}; pub(super) struct CheckNull; @@ -14,7 +14,13 @@ impl<'tcx> crate::MirPass<'tcx> for CheckNull { } fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - check_pointers(tcx, body, &[], insert_null_check, BorrowCheckMode::IncludeBorrows); + check_pointers( + tcx, + body, + &[], + insert_null_check, + BorrowedFieldProjectionMode::NoFollowProjections, + ); } fn is_required(&self) -> bool { diff --git a/compiler/rustc_mir_transform/src/check_pointers.rs b/compiler/rustc_mir_transform/src/check_pointers.rs index 2d04b62193585..bf94f1aad24b3 100644 --- a/compiler/rustc_mir_transform/src/check_pointers.rs +++ b/compiler/rustc_mir_transform/src/check_pointers.rs @@ -12,13 +12,13 @@ pub(crate) struct PointerCheck<'tcx> { pub(crate) assert_kind: Box>>, } -/// Indicates whether we insert the checks for borrow places of a raw pointer. -/// Concretely places with [MutatingUseContext::Borrow] or -/// [NonMutatingUseContext::SharedBorrow]. +/// When checking for borrows of field projections (`&(*ptr).a`), we might want +/// to check for the field type (type of `.a` in the example). This enum defines +/// the variations (pass the pointer [Ty] or the field [Ty]). #[derive(Copy, Clone)] -pub(crate) enum BorrowCheckMode { - IncludeBorrows, - ExcludeBorrows, +pub(crate) enum BorrowedFieldProjectionMode { + FollowProjections, + NoFollowProjections, } /// Utility for adding a check for read/write on every sized, raw pointer. @@ -27,8 +27,8 @@ pub(crate) enum BorrowCheckMode { /// new basic block directly before the pointer access. (Read/write accesses /// are determined by the `PlaceContext` of the MIR visitor.) Then calls /// `on_finding` to insert the actual logic for a pointer check (e.g. check for -/// alignment). A check can choose to be inserted for (mutable) borrows of -/// raw pointers via the `borrow_check_mode` parameter. +/// alignment). A check can choose to follow borrows of field projections via +/// the `field_projection_mode` parameter. /// /// This utility takes care of the right order of blocks, the only thing a /// caller must do in `on_finding` is: @@ -45,7 +45,7 @@ pub(crate) fn check_pointers<'tcx, F>( body: &mut Body<'tcx>, excluded_pointees: &[Ty<'tcx>], on_finding: F, - borrow_check_mode: BorrowCheckMode, + field_projection_mode: BorrowedFieldProjectionMode, ) where F: Fn( /* tcx: */ TyCtxt<'tcx>, @@ -82,7 +82,7 @@ pub(crate) fn check_pointers<'tcx, F>( local_decls, typing_env, excluded_pointees, - borrow_check_mode, + field_projection_mode, ); finder.visit_statement(statement, location); @@ -128,7 +128,7 @@ struct PointerFinder<'a, 'tcx> { typing_env: ty::TypingEnv<'tcx>, pointers: Vec<(Place<'tcx>, Ty<'tcx>, PlaceContext)>, excluded_pointees: &'a [Ty<'tcx>], - borrow_check_mode: BorrowCheckMode, + field_projection_mode: BorrowedFieldProjectionMode, } impl<'a, 'tcx> PointerFinder<'a, 'tcx> { @@ -137,7 +137,7 @@ impl<'a, 'tcx> PointerFinder<'a, 'tcx> { local_decls: &'a mut LocalDecls<'tcx>, typing_env: ty::TypingEnv<'tcx>, excluded_pointees: &'a [Ty<'tcx>], - borrow_check_mode: BorrowCheckMode, + field_projection_mode: BorrowedFieldProjectionMode, ) -> Self { PointerFinder { tcx, @@ -145,7 +145,7 @@ impl<'a, 'tcx> PointerFinder<'a, 'tcx> { typing_env, excluded_pointees, pointers: Vec::new(), - borrow_check_mode, + field_projection_mode, } } @@ -163,15 +163,14 @@ impl<'a, 'tcx> PointerFinder<'a, 'tcx> { MutatingUseContext::Store | MutatingUseContext::Call | MutatingUseContext::Yield - | MutatingUseContext::Drop, + | MutatingUseContext::Drop + | MutatingUseContext::Borrow, ) => true, PlaceContext::NonMutatingUse( - NonMutatingUseContext::Copy | NonMutatingUseContext::Move, + NonMutatingUseContext::Copy + | NonMutatingUseContext::Move + | NonMutatingUseContext::SharedBorrow, ) => true, - PlaceContext::MutatingUse(MutatingUseContext::Borrow) - | PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow) => { - matches!(self.borrow_check_mode, BorrowCheckMode::IncludeBorrows) - } _ => false, } } @@ -183,19 +182,29 @@ impl<'a, 'tcx> Visitor<'tcx> for PointerFinder<'a, 'tcx> { return; } - // Since Deref projections must come first and only once, the pointer for an indirect place - // is the Local that the Place is based on. + // Get the place and type we visit. let pointer = Place::from(place.local); - let pointer_ty = self.local_decls[place.local].ty; + let pointer_ty = pointer.ty(self.local_decls, self.tcx).ty; // We only want to check places based on raw pointers - if !pointer_ty.is_raw_ptr() { + let &ty::RawPtr(mut pointee_ty, _) = pointer_ty.kind() else { trace!("Indirect, but not based on an raw ptr, not checking {:?}", place); return; + }; + + // If we see a borrow of a field projection, we want to pass the field type to the + // check and not the pointee type. + if matches!(self.field_projection_mode, BorrowedFieldProjectionMode::FollowProjections) + && matches!( + context, + PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow) + | PlaceContext::MutatingUse(MutatingUseContext::Borrow) + ) + { + // Naturally, the field type is type of the initial place we look at. + pointee_ty = place.ty(self.local_decls, self.tcx).ty; } - let pointee_ty = - pointer_ty.builtin_deref(true).expect("no builtin_deref for an raw pointer"); // Ideally we'd support this in the future, but for now we are limited to sized types. if !pointee_ty.is_sized(self.tcx, self.typing_env) { trace!("Raw pointer, but pointee is not known to be sized: {:?}", pointer_ty); @@ -207,6 +216,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PointerFinder<'a, 'tcx> { ty::Array(ty, _) => *ty, _ => pointee_ty, }; + // Check if we excluded this pointee type from the check. if self.excluded_pointees.contains(&element_ty) { trace!("Skipping pointer for type: {:?}", pointee_ty); return; diff --git a/compiler/rustc_mir_transform/src/check_undefined_transmutes.rs b/compiler/rustc_mir_transform/src/check_undefined_transmutes.rs index ed3b1ae4f42f1..daddb5dedbcf9 100644 --- a/compiler/rustc_mir_transform/src/check_undefined_transmutes.rs +++ b/compiler/rustc_mir_transform/src/check_undefined_transmutes.rs @@ -42,7 +42,7 @@ impl<'a, 'tcx> UndefinedTransmutesChecker<'a, 'tcx> { if self.tcx.is_const_fn(def_id) || matches!( self.tcx.opt_associated_item(def_id), - Some(AssocItem { kind: AssocKind::Const, .. }) + Some(AssocItem { kind: AssocKind::Const { .. }, .. }) ) { let fn_sig = function.ty(self.body, self.tcx).fn_sig(self.tcx).skip_binder(); diff --git a/compiler/rustc_mir_transform/src/check_unnecessary_transmutes.rs b/compiler/rustc_mir_transform/src/check_unnecessary_transmutes.rs new file mode 100644 index 0000000000000..8da17a056e31b --- /dev/null +++ b/compiler/rustc_mir_transform/src/check_unnecessary_transmutes.rs @@ -0,0 +1,124 @@ +use rustc_middle::mir::visit::Visitor; +use rustc_middle::mir::{Body, Location, Operand, Terminator, TerminatorKind}; +use rustc_middle::ty::*; +use rustc_session::lint::builtin::UNNECESSARY_TRANSMUTES; +use rustc_span::source_map::Spanned; +use rustc_span::{Span, sym}; + +use crate::errors::UnnecessaryTransmute as Error; + +/// Check for transmutes that overlap with stdlib methods. +/// For example, transmuting `[u8; 4]` to `u32`. +pub(super) struct CheckUnnecessaryTransmutes; + +impl<'tcx> crate::MirLint<'tcx> for CheckUnnecessaryTransmutes { + fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) { + let mut checker = UnnecessaryTransmuteChecker { body, tcx }; + checker.visit_body(body); + } +} + +struct UnnecessaryTransmuteChecker<'a, 'tcx> { + body: &'a Body<'tcx>, + tcx: TyCtxt<'tcx>, +} + +impl<'a, 'tcx> UnnecessaryTransmuteChecker<'a, 'tcx> { + fn is_unnecessary_transmute( + &self, + function: &Operand<'tcx>, + arg: String, + span: Span, + ) -> Option { + let fn_sig = function.ty(self.body, self.tcx).fn_sig(self.tcx).skip_binder(); + let [input] = fn_sig.inputs() else { return None }; + + let err = |sugg| Error { span, sugg, help: None }; + + Some(match (input.kind(), fn_sig.output().kind()) { + // dont check the length; transmute does that for us. + // [u8; _] => primitive + (Array(t, _), Uint(_) | Float(_) | Int(_)) if *t.kind() == Uint(UintTy::U8) => Error { + sugg: format!("{}::from_ne_bytes({arg})", fn_sig.output()), + help: Some( + "there's also `from_le_bytes` and `from_be_bytes` if you expect a particular byte order", + ), + span, + }, + // primitive => [u8; _] + (Uint(_) | Float(_) | Int(_), Array(t, _)) if *t.kind() == Uint(UintTy::U8) => Error { + sugg: format!("{input}::to_ne_bytes({arg})"), + help: Some( + "there's also `to_le_bytes` and `to_be_bytes` if you expect a particular byte order", + ), + span, + }, + // char → u32 + (Char, Uint(UintTy::U32)) => err(format!("u32::from({arg})")), + // char (→ u32) → i32 + (Char, Int(IntTy::I32)) => err(format!("u32::from({arg}).cast_signed()")), + // u32 → char + (Uint(UintTy::U32), Char) => Error { + sugg: format!("char::from_u32_unchecked({arg})"), + help: Some("consider `char::from_u32(…).unwrap()`"), + span, + }, + // i32 → char + (Int(IntTy::I32), Char) => Error { + sugg: format!("char::from_u32_unchecked(i32::cast_unsigned({arg}))"), + help: Some("consider `char::from_u32(i32::cast_unsigned(…)).unwrap()`"), + span, + }, + // uNN → iNN + (Uint(ty), Int(_)) => err(format!("{}::cast_signed({arg})", ty.name_str())), + // iNN → uNN + (Int(ty), Uint(_)) => err(format!("{}::cast_unsigned({arg})", ty.name_str())), + // fNN → xsize + (Float(ty), Uint(UintTy::Usize)) => { + err(format!("{}::to_bits({arg}) as usize", ty.name_str())) + } + (Float(ty), Int(IntTy::Isize)) => { + err(format!("{}::to_bits({arg}) as isize", ty.name_str())) + } + // fNN (→ uNN) → iNN + (Float(ty), Int(..)) => err(format!("{}::to_bits({arg}).cast_signed()", ty.name_str())), + // fNN → uNN + (Float(ty), Uint(..)) => err(format!("{}::to_bits({arg})", ty.name_str())), + // xsize → fNN + (Uint(UintTy::Usize) | Int(IntTy::Isize), Float(ty)) => { + err(format!("{}::from_bits({arg} as _)", ty.name_str(),)) + } + // iNN (→ uNN) → fNN + (Int(int_ty), Float(ty)) => err(format!( + "{}::from_bits({}::cast_unsigned({arg}))", + ty.name_str(), + int_ty.name_str() + )), + // uNN → fNN + (Uint(_), Float(ty)) => err(format!("{}::from_bits({arg})", ty.name_str())), + // bool → { x8 } + (Bool, Int(..) | Uint(..)) => err(format!("({arg}) as {}", fn_sig.output())), + // u8 → bool + (Uint(_), Bool) => err(format!("({arg} == 1)")), + _ => return None, + }) + } +} + +impl<'tcx> Visitor<'tcx> for UnnecessaryTransmuteChecker<'_, 'tcx> { + // Check each block's terminator for calls to pointer to integer transmutes + // in const functions or associated constants and emit a lint. + fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { + if let TerminatorKind::Call { func, args, .. } = &terminator.kind + && let [Spanned { span: arg, .. }] = **args + && let Some((func_def_id, _)) = func.const_fn_def() + && self.tcx.is_intrinsic(func_def_id, sym::transmute) + && let span = self.body.source_info(location).span + && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(arg) + && let Some(lint) = self.is_unnecessary_transmute(func, snippet, span) + && let Some(hir_id) = terminator.source_info.scope.lint_root(&self.body.source_scopes) + { + self.tcx.emit_node_span_lint(UNNECESSARY_TRANSMUTES, hir_id, span, lint); + } + } +} diff --git a/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs b/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs index cb84401985735..4be67b873f737 100644 --- a/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs +++ b/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs @@ -35,7 +35,8 @@ impl<'tcx> crate::MirPass<'tcx> for CleanupPostBorrowck { // MIR building, and are not needed after InstrumentCoverage. CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. }, ) - | StatementKind::FakeRead(..) => statement.make_nop(), + | StatementKind::FakeRead(..) + | StatementKind::BackwardIncompatibleDropHint { .. } => statement.make_nop(), StatementKind::Assign(box ( _, Rvalue::Cast( diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs index 04d96f117072f..cddb2f8477858 100644 --- a/compiler/rustc_mir_transform/src/coroutine.rs +++ b/compiler/rustc_mir_transform/src/coroutine.rs @@ -51,9 +51,15 @@ //! Otherwise it drops all the values in scope at the last suspension point. mod by_move_body; +mod drop; use std::{iter, ops}; pub(super) use by_move_body::coroutine_by_move_body_def_id; +use drop::{ + cleanup_async_drops, create_coroutine_drop_shim, create_coroutine_drop_shim_async, + create_coroutine_drop_shim_proxy_async, elaborate_coroutine_drops, expand_async_drops, + has_expandable_async_drops, insert_clean_drop, +}; use rustc_abi::{FieldIdx, VariantIdx}; use rustc_data_structures::fx::FxHashSet; use rustc_errors::pluralize; @@ -64,6 +70,7 @@ use rustc_index::bit_set::{BitMatrix, DenseBitSet, GrowableBitSet}; use rustc_index::{Idx, IndexVec}; use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; use rustc_middle::mir::*; +use rustc_middle::ty::util::Discr; use rustc_middle::ty::{ self, CoroutineArgs, CoroutineArgsExt, GenericArgsRef, InstanceKind, Ty, TyCtxt, TypingMode, }; @@ -72,9 +79,13 @@ use rustc_mir_dataflow::impls::{ MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive, always_storage_live_locals, }; -use rustc_mir_dataflow::{Analysis, Results, ResultsVisitor}; +use rustc_mir_dataflow::{ + Analysis, Results, ResultsCursor, ResultsVisitor, visit_reachable_results, +}; use rustc_span::def_id::{DefId, LocalDefId}; -use rustc_span::{Span, sym}; +use rustc_span::source_map::dummy_spanned; +use rustc_span::symbol::sym; +use rustc_span::{DUMMY_SP, Span}; use rustc_target::spec::PanicStrategy; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::infer::TyCtxtInferExt as _; @@ -159,6 +170,7 @@ fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtx } const SELF_ARG: Local = Local::from_u32(1); +const CTX_ARG: Local = Local::from_u32(2); /// A `yield` point in the coroutine. struct SuspensionPoint<'tcx> { @@ -203,7 +215,7 @@ struct TransformVisitor<'tcx> { impl<'tcx> TransformVisitor<'tcx> { fn insert_none_ret_block(&self, body: &mut Body<'tcx>) -> BasicBlock { - let block = BasicBlock::new(body.basic_blocks.len()); + let block = body.basic_blocks.next_index(); let source_info = SourceInfo::outermost(body.span); let none_value = match self.coroutine_kind { @@ -539,15 +551,15 @@ fn replace_local<'tcx>( /// The async lowering step and the type / lifetime inference / checking are /// still using the `ResumeTy` indirection for the time being, and that indirection /// is removed here. After this transform, the coroutine body only knows about `&mut Context<'_>`. -fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { +fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> Ty<'tcx> { let context_mut_ref = Ty::new_task_context(tcx); // replace the type of the `resume` argument - replace_resume_ty_local(tcx, body, Local::new(2), context_mut_ref); + replace_resume_ty_local(tcx, body, CTX_ARG, context_mut_ref); let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, None); - for bb in START_BLOCK..body.basic_blocks.next_index() { + for bb in body.basic_blocks.indices() { let bb_data = &body[bb]; if bb_data.is_cleanup { continue; @@ -556,11 +568,11 @@ fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { match &bb_data.terminator().kind { TerminatorKind::Call { func, .. } => { let func_ty = func.ty(body, tcx); - if let ty::FnDef(def_id, _) = *func_ty.kind() { - if def_id == get_context_def_id { - let local = eliminate_get_context_call(&mut body[bb]); - replace_resume_ty_local(tcx, body, local, context_mut_ref); - } + if let ty::FnDef(def_id, _) = *func_ty.kind() + && def_id == get_context_def_id + { + let local = eliminate_get_context_call(&mut body[bb]); + replace_resume_ty_local(tcx, body, local, context_mut_ref); } } TerminatorKind::Yield { resume_arg, .. } => { @@ -569,6 +581,7 @@ fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { _ => {} } } + context_mut_ref } fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local { @@ -669,18 +682,29 @@ fn locals_live_across_suspend_points<'tcx>( .iterate_to_fixpoint(tcx, body, None) .into_results_cursor(body); - // Calculate the MIR locals which have been previously - // borrowed (even if they are still active). - let borrowed_locals_results = - MaybeBorrowedLocals.iterate_to_fixpoint(tcx, body, Some("coroutine")); - - let mut borrowed_locals_cursor = borrowed_locals_results.clone().into_results_cursor(body); + // Calculate the MIR locals that have been previously borrowed (even if they are still active). + let borrowed_locals = MaybeBorrowedLocals.iterate_to_fixpoint(tcx, body, Some("coroutine")); + let mut borrowed_locals_analysis1 = borrowed_locals.analysis; + let mut borrowed_locals_analysis2 = borrowed_locals_analysis1.clone(); // trivial + let borrowed_locals_cursor1 = ResultsCursor::new_borrowing( + body, + &mut borrowed_locals_analysis1, + &borrowed_locals.results, + ); + let mut borrowed_locals_cursor2 = ResultsCursor::new_borrowing( + body, + &mut borrowed_locals_analysis2, + &borrowed_locals.results, + ); // Calculate the MIR locals that we need to keep storage around for. - let mut requires_storage_results = - MaybeRequiresStorage::new(borrowed_locals_results.into_results_cursor(body)) - .iterate_to_fixpoint(tcx, body, None); - let mut requires_storage_cursor = requires_storage_results.as_results_cursor(body); + let mut requires_storage = + MaybeRequiresStorage::new(borrowed_locals_cursor1).iterate_to_fixpoint(tcx, body, None); + let mut requires_storage_cursor = ResultsCursor::new_borrowing( + body, + &mut requires_storage.analysis, + &requires_storage.results, + ); // Calculate the liveness of MIR locals ignoring borrows. let mut liveness = @@ -709,8 +733,8 @@ fn locals_live_across_suspend_points<'tcx>( // If a borrow is converted to a raw reference, we must also assume that it lives // forever. Note that the final liveness is still bounded by the storage liveness // of the local, which happens using the `intersect` operation below. - borrowed_locals_cursor.seek_before_primary_effect(loc); - live_locals.union(borrowed_locals_cursor.get()); + borrowed_locals_cursor2.seek_before_primary_effect(loc); + live_locals.union(borrowed_locals_cursor2.get()); } // Store the storage liveness for later use so we can restore the state @@ -752,7 +776,8 @@ fn locals_live_across_suspend_points<'tcx>( body, &saved_locals, always_live_locals.clone(), - requires_storage_results, + &mut requires_storage.analysis, + &requires_storage.results, ); LivenessInfo { @@ -817,7 +842,8 @@ fn compute_storage_conflicts<'mir, 'tcx>( body: &'mir Body<'tcx>, saved_locals: &'mir CoroutineSavedLocals, always_live_locals: DenseBitSet, - mut requires_storage: Results<'tcx, MaybeRequiresStorage<'mir, 'tcx>>, + analysis: &mut MaybeRequiresStorage<'mir, 'tcx>, + results: &Results>, ) -> BitMatrix { assert_eq!(body.local_decls.len(), saved_locals.domain_size()); @@ -837,7 +863,7 @@ fn compute_storage_conflicts<'mir, 'tcx>( eligible_storage_live: DenseBitSet::new_empty(body.local_decls.len()), }; - requires_storage.visit_reachable_with(body, &mut visitor); + visit_reachable_results(body, analysis, results, &mut visitor); let local_conflicts = visitor.local_conflicts; @@ -875,14 +901,14 @@ struct StorageConflictVisitor<'a, 'tcx> { eligible_storage_live: DenseBitSet, } -impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, MaybeRequiresStorage<'a, 'tcx>> +impl<'a, 'tcx> ResultsVisitor<'tcx, MaybeRequiresStorage<'a, 'tcx>> for StorageConflictVisitor<'a, 'tcx> { fn visit_after_early_statement_effect( &mut self, - _results: &mut Results<'tcx, MaybeRequiresStorage<'a, 'tcx>>, + _analysis: &mut MaybeRequiresStorage<'a, 'tcx>, state: &DenseBitSet, - _statement: &'a Statement<'tcx>, + _statement: &Statement<'tcx>, loc: Location, ) { self.apply_state(state, loc); @@ -890,9 +916,9 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, MaybeRequiresStorage<'a, 'tcx>> fn visit_after_early_terminator_effect( &mut self, - _results: &mut Results<'tcx, MaybeRequiresStorage<'a, 'tcx>>, + _analysis: &mut MaybeRequiresStorage<'a, 'tcx>, state: &DenseBitSet, - _terminator: &'a Terminator<'tcx>, + _terminator: &Terminator<'tcx>, loc: Location, ) { self.apply_state(state, loc); @@ -1036,9 +1062,8 @@ fn insert_switch<'tcx>( body: &mut Body<'tcx>, cases: Vec<(usize, BasicBlock)>, transform: &TransformVisitor<'tcx>, - default: TerminatorKind<'tcx>, + default_block: BasicBlock, ) { - let default_block = insert_term_block(body, default); let (assign, discr) = transform.get_discr(body); let switch_targets = SwitchTargets::new(cases.iter().map(|(i, bb)| ((*i) as u128, *bb)), default_block); @@ -1054,129 +1079,44 @@ fn insert_switch<'tcx>( }, ); - let blocks = body.basic_blocks_mut().iter_mut(); - - for target in blocks.flat_map(|b| b.terminator_mut().successors_mut()) { - *target = BasicBlock::new(target.index() + 1); - } -} - -fn elaborate_coroutine_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - use crate::elaborate_drop::{Unwind, elaborate_drop}; - use crate::patch::MirPatch; - use crate::shim::DropShimElaborator; - - // Note that `elaborate_drops` only drops the upvars of a coroutine, and - // this is ok because `open_drop` can only be reached within that own - // coroutine's resume function. - let typing_env = body.typing_env(tcx); - - let mut elaborator = DropShimElaborator { body, patch: MirPatch::new(body), tcx, typing_env }; - - for (block, block_data) in body.basic_blocks.iter_enumerated() { - let (target, unwind, source_info) = match block_data.terminator() { - Terminator { - source_info, - kind: TerminatorKind::Drop { place, target, unwind, replace: _ }, - } => { - if let Some(local) = place.as_local() - && local == SELF_ARG - { - (target, unwind, source_info) - } else { - continue; - } - } - _ => continue, - }; - let unwind = if block_data.is_cleanup { - Unwind::InCleanup - } else { - Unwind::To(match *unwind { - UnwindAction::Cleanup(tgt) => tgt, - UnwindAction::Continue => elaborator.patch.resume_block(), - UnwindAction::Unreachable => elaborator.patch.unreachable_cleanup_block(), - UnwindAction::Terminate(reason) => elaborator.patch.terminate_block(reason), - }) - }; - elaborate_drop( - &mut elaborator, - *source_info, - Place::from(SELF_ARG), - (), - *target, - unwind, - block, - ); + for b in body.basic_blocks_mut().iter_mut() { + b.terminator_mut().successors_mut(|target| *target += 1); } - elaborator.patch.apply(body); } -fn create_coroutine_drop_shim<'tcx>( - tcx: TyCtxt<'tcx>, - transform: &TransformVisitor<'tcx>, - coroutine_ty: Ty<'tcx>, - body: &Body<'tcx>, - drop_clean: BasicBlock, -) -> Body<'tcx> { - let mut body = body.clone(); - // Take the coroutine info out of the body, since the drop shim is - // not a coroutine body itself; it just has its drop built out of it. - let _ = body.coroutine.take(); - // Make sure the resume argument is not included here, since we're - // building a body for `drop_in_place`. - body.arg_count = 1; - +fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock { let source_info = SourceInfo::outermost(body.span); + body.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { source_info, kind }), + is_cleanup: false, + }) +} - let mut cases = create_cases(&mut body, transform, Operation::Drop); - - cases.insert(0, (CoroutineArgs::UNRESUMED, drop_clean)); - - // The returned state and the poisoned state fall through to the default - // case which is just to return - - insert_switch(&mut body, cases, transform, TerminatorKind::Return); - - for block in body.basic_blocks_mut() { - let kind = &mut block.terminator_mut().kind; - if let TerminatorKind::CoroutineDrop = *kind { - *kind = TerminatorKind::Return; - } +fn return_poll_ready_assign<'tcx>(tcx: TyCtxt<'tcx>, source_info: SourceInfo) -> Statement<'tcx> { + // Poll::Ready(()) + let poll_def_id = tcx.require_lang_item(LangItem::Poll, None); + let args = tcx.mk_args(&[tcx.types.unit.into()]); + let val = Operand::Constant(Box::new(ConstOperand { + span: source_info.span, + user_ty: None, + const_: Const::zero_sized(tcx.types.unit), + })); + let ready_val = Rvalue::Aggregate( + Box::new(AggregateKind::Adt(poll_def_id, VariantIdx::from_usize(0), args, None, None)), + IndexVec::from_raw(vec![val]), + ); + Statement { + kind: StatementKind::Assign(Box::new((Place::return_place(), ready_val))), + source_info, } - - // Replace the return variable - body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(tcx.types.unit, source_info); - - make_coroutine_state_argument_indirect(tcx, &mut body); - - // Change the coroutine argument from &mut to *mut - body.local_decls[SELF_ARG] = - LocalDecl::with_source_info(Ty::new_mut_ptr(tcx, coroutine_ty), source_info); - - // Make sure we remove dead blocks to remove - // unrelated code from the resume part of the function - simplify::remove_dead_blocks(&mut body); - - // Update the body's def to become the drop glue. - let coroutine_instance = body.source.instance; - let drop_in_place = tcx.require_lang_item(LangItem::DropInPlace, None); - let drop_instance = InstanceKind::DropGlue(drop_in_place, Some(coroutine_ty)); - - // Temporary change MirSource to coroutine's instance so that dump_mir produces more sensible - // filename. - body.source.instance = coroutine_instance; - dump_mir(tcx, false, "coroutine_drop", &0, &body, |_, _| Ok(())); - body.source.instance = drop_instance; - - body } -fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock { +fn insert_poll_ready_block<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> BasicBlock { let source_info = SourceInfo::outermost(body.span); body.basic_blocks_mut().push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { source_info, kind }), + statements: [return_poll_ready_assign(tcx, source_info)].to_vec(), + terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }), is_cleanup: false, }) } @@ -1186,7 +1126,7 @@ fn insert_panic_block<'tcx>( body: &mut Body<'tcx>, message: AssertMessage<'tcx>, ) -> BasicBlock { - let assert_block = BasicBlock::new(body.basic_blocks.len()); + let assert_block = body.basic_blocks.next_index(); let kind = TerminatorKind::Assert { cond: Operand::Constant(Box::new(ConstOperand { span: body.span, @@ -1209,14 +1149,8 @@ fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, typing_env: ty::Typing } // If there's a return terminator the function may return. - for block in body.basic_blocks.iter() { - if let TerminatorKind::Return = block.terminator().kind { - return true; - } - } - + body.basic_blocks.iter().any(|block| matches!(block.terminator().kind, TerminatorKind::Return)) // Otherwise the function can't return. - false } fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool { @@ -1262,45 +1196,50 @@ fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool { false } +// Poison the coroutine when it unwinds +fn generate_poison_block_and_redirect_unwinds_there<'tcx>( + transform: &TransformVisitor<'tcx>, + body: &mut Body<'tcx>, +) { + let source_info = SourceInfo::outermost(body.span); + let poison_block = body.basic_blocks_mut().push(BasicBlockData { + statements: vec![ + transform.set_discr(VariantIdx::new(CoroutineArgs::POISONED), source_info), + ], + terminator: Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }), + is_cleanup: true, + }); + + for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() { + let source_info = block.terminator().source_info; + + if let TerminatorKind::UnwindResume = block.terminator().kind { + // An existing `Resume` terminator is redirected to jump to our dedicated + // "poisoning block" above. + if idx != poison_block { + *block.terminator_mut() = + Terminator { source_info, kind: TerminatorKind::Goto { target: poison_block } }; + } + } else if !block.is_cleanup + // Any terminators that *can* unwind but don't have an unwind target set are also + // pointed at our poisoning block (unless they're part of the cleanup path). + && let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut() + { + *unwind = UnwindAction::Cleanup(poison_block); + } + } +} + fn create_coroutine_resume_function<'tcx>( tcx: TyCtxt<'tcx>, transform: TransformVisitor<'tcx>, body: &mut Body<'tcx>, can_return: bool, + can_unwind: bool, ) { - let can_unwind = can_unwind(tcx, body); - // Poison the coroutine when it unwinds if can_unwind { - let source_info = SourceInfo::outermost(body.span); - let poison_block = body.basic_blocks_mut().push(BasicBlockData { - statements: vec![ - transform.set_discr(VariantIdx::new(CoroutineArgs::POISONED), source_info), - ], - terminator: Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }), - is_cleanup: true, - }); - - for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() { - let source_info = block.terminator().source_info; - - if let TerminatorKind::UnwindResume = block.terminator().kind { - // An existing `Resume` terminator is redirected to jump to our dedicated - // "poisoning block" above. - if idx != poison_block { - *block.terminator_mut() = Terminator { - source_info, - kind: TerminatorKind::Goto { target: poison_block }, - }; - } - } else if !block.is_cleanup { - // Any terminators that *can* unwind but don't have an unwind target set are also - // pointed at our poisoning block (unless they're part of the cleanup path). - if let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut() { - *unwind = UnwindAction::Cleanup(poison_block); - } - } - } + generate_poison_block_and_redirect_unwinds_there(&transform, body); } let mut cases = create_cases(body, &transform, Operation::Resume); @@ -1325,7 +1264,13 @@ fn create_coroutine_resume_function<'tcx>( let block = match transform.coroutine_kind { CoroutineKind::Desugared(CoroutineDesugaring::Async, _) | CoroutineKind::Coroutine(_) => { - insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind)) + // For `async_drop_in_place::{closure}` we just keep return Poll::Ready, + // because async drop of such coroutine keeps polling original coroutine + if tcx.is_async_drop_in_place_coroutine(body.source.def_id()) { + insert_poll_ready_block(tcx, body) + } else { + insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind)) + } } CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) | CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => { @@ -1335,17 +1280,20 @@ fn create_coroutine_resume_function<'tcx>( cases.insert(1, (CoroutineArgs::RETURNED, block)); } - insert_switch(body, cases, &transform, TerminatorKind::Unreachable); + let default_block = insert_term_block(body, TerminatorKind::Unreachable); + insert_switch(body, cases, &transform, default_block); make_coroutine_state_argument_indirect(tcx, body); match transform.coroutine_kind { + CoroutineKind::Coroutine(_) + | CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) => + { + make_coroutine_state_argument_pinned(tcx, body); + } // Iterator::next doesn't accept a pinned argument, // unlike for all other coroutine kinds. CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {} - _ => { - make_coroutine_state_argument_pinned(tcx, body); - } } // Make sure we remove dead blocks to remove @@ -1357,25 +1305,6 @@ fn create_coroutine_resume_function<'tcx>( dump_mir(tcx, false, "coroutine_resume", &0, body, |_, _| Ok(())); } -fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock { - let return_block = insert_term_block(body, TerminatorKind::Return); - - let term = TerminatorKind::Drop { - place: Place::from(SELF_ARG), - target: return_block, - unwind: UnwindAction::Continue, - replace: false, - }; - let source_info = SourceInfo::outermost(body.span); - - // Create a block to destroy an unresumed coroutines. This can only destroy upvars. - body.basic_blocks_mut().push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { source_info, kind: term }), - is_cleanup: false, - }) -} - /// An operation that can be performed on a coroutine. #[derive(PartialEq, Copy, Clone)] enum Operation { @@ -1408,8 +1337,7 @@ fn create_cases<'tcx>( let mut statements = Vec::new(); // Create StorageLive instructions for locals with live storage - for i in 0..(body.local_decls.len()) { - let l = Local::new(i); + for l in body.local_decls.indices() { let needs_storage_live = point.storage_liveness.contains(l) && !transform.remap.contains(l) && !transform.always_live_locals.contains(l); @@ -1421,7 +1349,7 @@ fn create_cases<'tcx>( if operation == Operation::Resume { // Move the resume argument to the destination place of the `Yield` terminator - let resume_arg = Local::new(2); // 0 = return, 1 = self + let resume_arg = CTX_ARG; statements.push(Statement { source_info, kind: StatementKind::Assign(Box::new(( @@ -1528,22 +1456,19 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { }; let old_ret_ty = body.return_ty(); - assert!(body.coroutine_drop().is_none()); + assert!(body.coroutine_drop().is_none() && body.coroutine_drop_async().is_none()); + + dump_mir(tcx, false, "coroutine_before", &0, body, |_, _| Ok(())); // The first argument is the coroutine type passed by value let coroutine_ty = body.local_decls.raw[1].ty; let coroutine_kind = body.coroutine_kind().unwrap(); // Get the discriminant type and args which typeck computed - let (discr_ty, movable) = match *coroutine_ty.kind() { - ty::Coroutine(_, args) => { - let args = args.as_coroutine(); - (args.discr_ty(tcx), coroutine_kind.movability() == hir::Movability::Movable) - } - _ => { - tcx.dcx().span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}")); - } + let ty::Coroutine(_, args) = coroutine_ty.kind() else { + tcx.dcx().span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}")); }; + let discr_ty = args.as_coroutine().discr_ty(tcx); let new_ret_ty = match coroutine_kind { CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => { @@ -1577,19 +1502,32 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { // RETURN_PLACE then is a fresh unused local with type ret_ty. let old_ret_local = replace_local(RETURN_PLACE, new_ret_ty, body, tcx); + // We need to insert clean drop for unresumed state and perform drop elaboration + // (finally in open_drop_for_tuple) before async drop expansion. + // Async drops, produced by this drop elaboration, will be expanded, + // and corresponding futures kept in layout. + let has_async_drops = matches!( + coroutine_kind, + CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) + ) && has_expandable_async_drops(tcx, body, coroutine_ty); + // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies. if matches!( coroutine_kind, CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) ) { - transform_async_context(tcx, body); + let context_mut_ref = transform_async_context(tcx, body); + expand_async_drops(tcx, body, context_mut_ref, coroutine_kind, coroutine_ty); + dump_mir(tcx, false, "coroutine_async_drop_expand", &0, body, |_, _| Ok(())); + } else { + cleanup_async_drops(body); } // We also replace the resume argument and insert an `Assign`. // This is needed because the resume argument `_2` might be live across a `yield`, in which // case there is no `Assign` to it that the transform can turn into a store to the coroutine // state. After the yield the slot in the coroutine state would then be uninitialized. - let resume_local = Local::new(2); + let resume_local = CTX_ARG; let resume_ty = body.local_decls[resume_local].ty; let old_resume_local = replace_local(resume_local, resume_ty, body, tcx); @@ -1610,6 +1548,7 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { let always_live_locals = always_storage_live_locals(body); + let movable = coroutine_kind.movability() == hir::Movability::Movable; let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable); @@ -1669,10 +1608,14 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { body.coroutine.as_mut().unwrap().resume_ty = None; body.coroutine.as_mut().unwrap().coroutine_layout = Some(layout); + // FIXME: Drops, produced by insert_clean_drop + elaborate_coroutine_drops, + // are currently sync only. To allow async for them, we need to move those calls + // before expand_async_drops, and fix the related problems. + // // Insert `drop(coroutine_struct)` which is used to drop upvars for coroutines in // the unresumed state. // This is expanded to a drop ladder in `elaborate_coroutine_drops`. - let drop_clean = insert_clean_drop(body); + let drop_clean = insert_clean_drop(tcx, body, has_async_drops); dump_mir(tcx, false, "coroutine_pre-elab", &0, body, |_, _| Ok(())); @@ -1683,13 +1626,32 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { dump_mir(tcx, false, "coroutine_post-transform", &0, body, |_, _| Ok(())); - // Create a copy of our MIR and use it to create the drop shim for the coroutine - let drop_shim = create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean); + let can_unwind = can_unwind(tcx, body); - body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim); + // Create a copy of our MIR and use it to create the drop shim for the coroutine + if has_async_drops { + // If coroutine has async drops, generating async drop shim + let mut drop_shim = + create_coroutine_drop_shim_async(tcx, &transform, body, drop_clean, can_unwind); + // Run derefer to fix Derefs that are not in the first place + deref_finder(tcx, &mut drop_shim); + body.coroutine.as_mut().unwrap().coroutine_drop_async = Some(drop_shim); + } else { + // If coroutine has no async drops, generating sync drop shim + let mut drop_shim = + create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean); + // Run derefer to fix Derefs that are not in the first place + deref_finder(tcx, &mut drop_shim); + body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim); + + // For coroutine with sync drop, generating async proxy for `future_drop_poll` call + let mut proxy_shim = create_coroutine_drop_shim_proxy_async(tcx, body); + deref_finder(tcx, &mut proxy_shim); + body.coroutine.as_mut().unwrap().coroutine_drop_proxy_async = Some(proxy_shim); + } // Create the Coroutine::resume / Future::poll function - create_coroutine_resume_function(tcx, transform, body, can_return); + create_coroutine_resume_function(tcx, transform, body, can_return, can_unwind); // Run derefer to fix Derefs that are not in the first place deref_finder(tcx, body); diff --git a/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs b/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs index 89a306c610477..0a839d91404ec 100644 --- a/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs +++ b/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs @@ -73,6 +73,7 @@ use rustc_data_structures::unord::UnordMap; use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_hir::definitions::DisambiguatorState; use rustc_middle::bug; use rustc_middle::hir::place::{Projection, ProjectionKind}; use rustc_middle::mir::visit::MutVisitor; @@ -213,12 +214,21 @@ pub(crate) fn coroutine_by_move_body_def_id<'tcx>( let mut by_move_body = body.clone(); MakeByMoveBody { tcx, field_remapping, by_move_coroutine_ty }.visit_body(&mut by_move_body); - // This will always be `{closure#1}`, since the original coroutine is `{closure#0}`. - let body_def = tcx.create_def(parent_def_id, None, DefKind::SyntheticCoroutineBody); + // This path is unique since we're in a query so we'll only be called once with `parent_def_id` + // and this is the only location creating `SyntheticCoroutineBody`. + let body_def = tcx.create_def( + parent_def_id, + None, + DefKind::SyntheticCoroutineBody, + None, + &mut DisambiguatorState::new(), + ); by_move_body.source = mir::MirSource::from_instance(InstanceKind::Item(body_def.def_id().to_def_id())); dump_mir(tcx, false, "built", &"after", &by_move_body, |_, _| Ok(())); + // Feed HIR because we try to access this body's attrs in the inliner. + body_def.feed_hir(); // Inherited from the by-ref coroutine. body_def.codegen_fn_attrs(tcx.codegen_fn_attrs(coroutine_def_id).clone()); body_def.coverage_attr_on(tcx.coverage_attr_on(coroutine_def_id)); diff --git a/compiler/rustc_mir_transform/src/coroutine/drop.rs b/compiler/rustc_mir_transform/src/coroutine/drop.rs new file mode 100644 index 0000000000000..6b266da5a69e5 --- /dev/null +++ b/compiler/rustc_mir_transform/src/coroutine/drop.rs @@ -0,0 +1,725 @@ +//! Drops and async drops related logic for coroutine transformation pass + +use super::*; + +// Fix return Poll::Pending statement into Poll<()>::Pending for async drop function +struct FixReturnPendingVisitor<'tcx> { + tcx: TyCtxt<'tcx>, +} + +impl<'tcx> MutVisitor<'tcx> for FixReturnPendingVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_assign( + &mut self, + place: &mut Place<'tcx>, + rvalue: &mut Rvalue<'tcx>, + _location: Location, + ) { + if place.local != RETURN_PLACE { + return; + } + + // Converting `_0 = Poll::::Pending` to `_0 = Poll::<()>::Pending` + if let Rvalue::Aggregate(kind, _) = rvalue { + if let AggregateKind::Adt(_, _, ref mut args, _, _) = **kind { + *args = self.tcx.mk_args(&[self.tcx.types.unit.into()]); + } + } + } +} + +// rv = call fut.poll() +fn build_poll_call<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + poll_unit_place: &Place<'tcx>, + switch_block: BasicBlock, + fut_pin_place: &Place<'tcx>, + fut_ty: Ty<'tcx>, + context_ref_place: &Place<'tcx>, + unwind: UnwindAction, +) -> BasicBlock { + let poll_fn = tcx.require_lang_item(LangItem::FuturePoll, None); + let poll_fn = Ty::new_fn_def(tcx, poll_fn, [fut_ty]); + let poll_fn = Operand::Constant(Box::new(ConstOperand { + span: DUMMY_SP, + user_ty: None, + const_: Const::zero_sized(poll_fn), + })); + let call = TerminatorKind::Call { + func: poll_fn.clone(), + args: [ + dummy_spanned(Operand::Move(*fut_pin_place)), + dummy_spanned(Operand::Move(*context_ref_place)), + ] + .into(), + destination: *poll_unit_place, + target: Some(switch_block), + unwind, + call_source: CallSource::Misc, + fn_span: DUMMY_SP, + }; + insert_term_block(body, call) +} + +// pin_fut = Pin::new_unchecked(&mut fut) +fn build_pin_fut<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + fut_place: Place<'tcx>, + unwind: UnwindAction, +) -> (BasicBlock, Place<'tcx>) { + let span = body.span; + let source_info = SourceInfo::outermost(span); + let fut_ty = fut_place.ty(&body.local_decls, tcx).ty; + let fut_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, fut_ty); + let fut_ref_place = Place::from(body.local_decls.push(LocalDecl::new(fut_ref_ty, span))); + let pin_fut_new_unchecked_fn = Ty::new_fn_def( + tcx, + tcx.require_lang_item(LangItem::PinNewUnchecked, Some(span)), + [fut_ref_ty], + ); + let fut_pin_ty = pin_fut_new_unchecked_fn.fn_sig(tcx).output().skip_binder(); + let fut_pin_place = Place::from(body.local_decls.push(LocalDecl::new(fut_pin_ty, span))); + let pin_fut_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand { + span, + user_ty: None, + const_: Const::zero_sized(pin_fut_new_unchecked_fn), + })); + + let storage_live = + Statement { source_info, kind: StatementKind::StorageLive(fut_pin_place.local) }; + + let fut_ref_assign = Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + fut_ref_place, + Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Mut { kind: MutBorrowKind::Default }, + fut_place, + ), + ))), + }; + + // call Pin::new_unchecked(&mut fut) + let pin_fut_bb = body.basic_blocks_mut().push(BasicBlockData { + statements: [storage_live, fut_ref_assign].to_vec(), + terminator: Some(Terminator { + source_info, + kind: TerminatorKind::Call { + func: pin_fut_new_unchecked_fn, + args: [dummy_spanned(Operand::Move(fut_ref_place))].into(), + destination: fut_pin_place, + target: None, // will be fixed later + unwind, + call_source: CallSource::Misc, + fn_span: span, + }, + }), + is_cleanup: false, + }); + (pin_fut_bb, fut_pin_place) +} + +// Build Poll switch for async drop +// match rv { +// Ready() => ready_block +// Pending => yield_block +//} +fn build_poll_switch<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + poll_enum: Ty<'tcx>, + poll_unit_place: &Place<'tcx>, + ready_block: BasicBlock, + yield_block: BasicBlock, +) -> BasicBlock { + let poll_enum_adt = poll_enum.ty_adt_def().unwrap(); + + let Discr { val: poll_ready_discr, ty: poll_discr_ty } = poll_enum + .discriminant_for_variant( + tcx, + poll_enum_adt.variant_index_with_id(tcx.require_lang_item(LangItem::PollReady, None)), + ) + .unwrap(); + let poll_pending_discr = poll_enum + .discriminant_for_variant( + tcx, + poll_enum_adt.variant_index_with_id(tcx.require_lang_item(LangItem::PollPending, None)), + ) + .unwrap() + .val; + let source_info = SourceInfo::outermost(body.span); + let poll_discr_place = + Place::from(body.local_decls.push(LocalDecl::new(poll_discr_ty, source_info.span))); + let discr_assign = Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + poll_discr_place, + Rvalue::Discriminant(*poll_unit_place), + ))), + }; + let unreachable_block = insert_term_block(body, TerminatorKind::Unreachable); + body.basic_blocks_mut().push(BasicBlockData { + statements: [discr_assign].to_vec(), + terminator: Some(Terminator { + source_info, + kind: TerminatorKind::SwitchInt { + discr: Operand::Move(poll_discr_place), + targets: SwitchTargets::new( + [(poll_ready_discr, ready_block), (poll_pending_discr, yield_block)] + .into_iter(), + unreachable_block, + ), + }, + }), + is_cleanup: false, + }) +} + +// Gather blocks, reachable through 'drop' targets of Yield and Drop terminators (chained) +fn gather_dropline_blocks<'tcx>(body: &mut Body<'tcx>) -> DenseBitSet { + let mut dropline: DenseBitSet = DenseBitSet::new_empty(body.basic_blocks.len()); + for (bb, data) in traversal::reverse_postorder(body) { + if dropline.contains(bb) { + data.terminator().successors().for_each(|v| { + dropline.insert(v); + }); + } else { + match data.terminator().kind { + TerminatorKind::Yield { drop: Some(v), .. } => { + dropline.insert(v); + } + TerminatorKind::Drop { drop: Some(v), .. } => { + dropline.insert(v); + } + _ => (), + } + } + } + dropline +} + +/// Cleanup all async drops (reset to sync) +pub(super) fn cleanup_async_drops<'tcx>(body: &mut Body<'tcx>) { + for block in body.basic_blocks_mut() { + if let TerminatorKind::Drop { + place: _, + target: _, + unwind: _, + replace: _, + ref mut drop, + ref mut async_fut, + } = block.terminator_mut().kind + { + if drop.is_some() || async_fut.is_some() { + *drop = None; + *async_fut = None; + } + } + } +} + +pub(super) fn has_expandable_async_drops<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + coroutine_ty: Ty<'tcx>, +) -> bool { + for bb in START_BLOCK..body.basic_blocks.next_index() { + // Drops in unwind path (cleanup blocks) are not expanded to async drops, only sync drops in unwind path + if body[bb].is_cleanup { + continue; + } + let TerminatorKind::Drop { place, target: _, unwind: _, replace: _, drop: _, async_fut } = + body[bb].terminator().kind + else { + continue; + }; + let place_ty = place.ty(&body.local_decls, tcx).ty; + if place_ty == coroutine_ty { + continue; + } + if async_fut.is_none() { + continue; + } + return true; + } + return false; +} + +/// Expand Drop terminator for async drops into mainline poll-switch and dropline poll-switch +pub(super) fn expand_async_drops<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + context_mut_ref: Ty<'tcx>, + coroutine_kind: hir::CoroutineKind, + coroutine_ty: Ty<'tcx>, +) { + let dropline = gather_dropline_blocks(body); + // Clean drop and async_fut fields if potentially async drop is not expanded (stays sync) + let remove_asyncness = |block: &mut BasicBlockData<'tcx>| { + if let TerminatorKind::Drop { + place: _, + target: _, + unwind: _, + replace: _, + ref mut drop, + ref mut async_fut, + } = block.terminator_mut().kind + { + *drop = None; + *async_fut = None; + } + }; + for bb in START_BLOCK..body.basic_blocks.next_index() { + // Drops in unwind path (cleanup blocks) are not expanded to async drops, only sync drops in unwind path + if body[bb].is_cleanup { + remove_asyncness(&mut body[bb]); + continue; + } + let TerminatorKind::Drop { place, target, unwind, replace: _, drop, async_fut } = + body[bb].terminator().kind + else { + continue; + }; + + let place_ty = place.ty(&body.local_decls, tcx).ty; + if place_ty == coroutine_ty { + remove_asyncness(&mut body[bb]); + continue; + } + + let Some(fut_local) = async_fut else { + remove_asyncness(&mut body[bb]); + continue; + }; + + let is_dropline_bb = dropline.contains(bb); + + if !is_dropline_bb && drop.is_none() { + remove_asyncness(&mut body[bb]); + continue; + } + + let fut_place = Place::from(fut_local); + let fut_ty = fut_place.ty(&body.local_decls, tcx).ty; + + // poll-code: + // state_call_drop: + // #bb_pin: fut_pin = Pin::new_unchecked(&mut fut) + // #bb_call: rv = call fut.poll() (or future_drop_poll(fut) for internal future drops) + // #bb_check: match (rv) + // pending => return rv (yield) + // ready => *continue_bb|drop_bb* + + // Compute Poll<> (aka Poll with void return) + let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None)); + let poll_enum = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()])); + let poll_decl = LocalDecl::new(poll_enum, body.span); + let poll_unit_place = Place::from(body.local_decls.push(poll_decl)); + + // First state-loop yield for mainline + let context_ref_place = + Place::from(body.local_decls.push(LocalDecl::new(context_mut_ref, body.span))); + let source_info = body[bb].terminator.as_ref().unwrap().source_info; + let arg = Rvalue::Use(Operand::Move(Place::from(CTX_ARG))); + body[bb].statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new((context_ref_place, arg))), + }); + let yield_block = insert_term_block(body, TerminatorKind::Unreachable); // `kind` replaced later to yield + let switch_block = + build_poll_switch(tcx, body, poll_enum, &poll_unit_place, target, yield_block); + let (pin_bb, fut_pin_place) = + build_pin_fut(tcx, body, fut_place.clone(), UnwindAction::Continue); + let call_bb = build_poll_call( + tcx, + body, + &poll_unit_place, + switch_block, + &fut_pin_place, + fut_ty, + &context_ref_place, + unwind, + ); + + // Second state-loop yield for transition to dropline (when coroutine async drop started) + let mut dropline_transition_bb: Option = None; + let mut dropline_yield_bb: Option = None; + let mut dropline_context_ref: Option> = None; + let mut dropline_call_bb: Option = None; + if !is_dropline_bb { + let context_ref_place2: Place<'_> = + Place::from(body.local_decls.push(LocalDecl::new(context_mut_ref, body.span))); + let drop_yield_block = insert_term_block(body, TerminatorKind::Unreachable); // `kind` replaced later to yield + let drop_switch_block = build_poll_switch( + tcx, + body, + poll_enum, + &poll_unit_place, + drop.unwrap(), + drop_yield_block, + ); + let (pin_bb2, fut_pin_place2) = + build_pin_fut(tcx, body, fut_place, UnwindAction::Continue); + let drop_call_bb = build_poll_call( + tcx, + body, + &poll_unit_place, + drop_switch_block, + &fut_pin_place2, + fut_ty, + &context_ref_place2, + unwind, + ); + dropline_transition_bb = Some(pin_bb2); + dropline_yield_bb = Some(drop_yield_block); + dropline_context_ref = Some(context_ref_place2); + dropline_call_bb = Some(drop_call_bb); + } + + // value needed only for return-yields or gen-coroutines, so just const here + let value = Operand::Constant(Box::new(ConstOperand { + span: body.span, + user_ty: None, + const_: Const::from_bool(tcx, false), + })); + use rustc_middle::mir::AssertKind::ResumedAfterDrop; + let panic_bb = insert_panic_block(tcx, body, ResumedAfterDrop(coroutine_kind)); + + if is_dropline_bb { + body[yield_block].terminator_mut().kind = TerminatorKind::Yield { + value: value.clone(), + resume: panic_bb, + resume_arg: context_ref_place, + drop: Some(pin_bb), + }; + } else { + body[yield_block].terminator_mut().kind = TerminatorKind::Yield { + value: value.clone(), + resume: pin_bb, + resume_arg: context_ref_place, + drop: dropline_transition_bb, + }; + body[dropline_yield_bb.unwrap()].terminator_mut().kind = TerminatorKind::Yield { + value, + resume: panic_bb, + resume_arg: dropline_context_ref.unwrap(), + drop: dropline_transition_bb, + }; + } + + if let TerminatorKind::Call { ref mut target, .. } = body[pin_bb].terminator_mut().kind { + *target = Some(call_bb); + } else { + bug!() + } + if !is_dropline_bb { + if let TerminatorKind::Call { ref mut target, .. } = + body[dropline_transition_bb.unwrap()].terminator_mut().kind + { + *target = dropline_call_bb; + } else { + bug!() + } + } + + body[bb].terminator_mut().kind = TerminatorKind::Goto { target: pin_bb }; + } +} + +pub(super) fn elaborate_coroutine_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + use crate::elaborate_drop::{Unwind, elaborate_drop}; + use crate::patch::MirPatch; + use crate::shim::DropShimElaborator; + + // Note that `elaborate_drops` only drops the upvars of a coroutine, and + // this is ok because `open_drop` can only be reached within that own + // coroutine's resume function. + let typing_env = body.typing_env(tcx); + + let mut elaborator = DropShimElaborator { + body, + patch: MirPatch::new(body), + tcx, + typing_env, + produce_async_drops: false, + }; + + for (block, block_data) in body.basic_blocks.iter_enumerated() { + let (target, unwind, source_info, dropline) = match block_data.terminator() { + Terminator { + source_info, + kind: TerminatorKind::Drop { place, target, unwind, replace: _, drop, async_fut: _ }, + } => { + if let Some(local) = place.as_local() + && local == SELF_ARG + { + (target, unwind, source_info, *drop) + } else { + continue; + } + } + _ => continue, + }; + let unwind = if block_data.is_cleanup { + Unwind::InCleanup + } else { + Unwind::To(match *unwind { + UnwindAction::Cleanup(tgt) => tgt, + UnwindAction::Continue => elaborator.patch.resume_block(), + UnwindAction::Unreachable => elaborator.patch.unreachable_cleanup_block(), + UnwindAction::Terminate(reason) => elaborator.patch.terminate_block(reason), + }) + }; + elaborate_drop( + &mut elaborator, + *source_info, + Place::from(SELF_ARG), + (), + *target, + unwind, + block, + dropline, + ); + } + elaborator.patch.apply(body); +} + +pub(super) fn insert_clean_drop<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + has_async_drops: bool, +) -> BasicBlock { + let source_info = SourceInfo::outermost(body.span); + let return_block = if has_async_drops { + insert_poll_ready_block(tcx, body) + } else { + insert_term_block(body, TerminatorKind::Return) + }; + + // FIXME: When move insert_clean_drop + elaborate_coroutine_drops before async drops expand, + // also set dropline here: + // let dropline = if has_async_drops { Some(return_block) } else { None }; + let dropline = None; + + let term = TerminatorKind::Drop { + place: Place::from(SELF_ARG), + target: return_block, + unwind: UnwindAction::Continue, + replace: false, + drop: dropline, + async_fut: None, + }; + + // Create a block to destroy an unresumed coroutines. This can only destroy upvars. + body.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { source_info, kind: term }), + is_cleanup: false, + }) +} + +pub(super) fn create_coroutine_drop_shim<'tcx>( + tcx: TyCtxt<'tcx>, + transform: &TransformVisitor<'tcx>, + coroutine_ty: Ty<'tcx>, + body: &Body<'tcx>, + drop_clean: BasicBlock, +) -> Body<'tcx> { + let mut body = body.clone(); + // Take the coroutine info out of the body, since the drop shim is + // not a coroutine body itself; it just has its drop built out of it. + let _ = body.coroutine.take(); + // Make sure the resume argument is not included here, since we're + // building a body for `drop_in_place`. + body.arg_count = 1; + + let source_info = SourceInfo::outermost(body.span); + + let mut cases = create_cases(&mut body, transform, Operation::Drop); + + cases.insert(0, (CoroutineArgs::UNRESUMED, drop_clean)); + + // The returned state and the poisoned state fall through to the default + // case which is just to return + + let default_block = insert_term_block(&mut body, TerminatorKind::Return); + insert_switch(&mut body, cases, transform, default_block); + + for block in body.basic_blocks_mut() { + let kind = &mut block.terminator_mut().kind; + if let TerminatorKind::CoroutineDrop = *kind { + *kind = TerminatorKind::Return; + } + } + + // Replace the return variable + body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(tcx.types.unit, source_info); + + make_coroutine_state_argument_indirect(tcx, &mut body); + + // Change the coroutine argument from &mut to *mut + body.local_decls[SELF_ARG] = + LocalDecl::with_source_info(Ty::new_mut_ptr(tcx, coroutine_ty), source_info); + + // Make sure we remove dead blocks to remove + // unrelated code from the resume part of the function + simplify::remove_dead_blocks(&mut body); + + // Update the body's def to become the drop glue. + let coroutine_instance = body.source.instance; + let drop_in_place = tcx.require_lang_item(LangItem::DropInPlace, None); + let drop_instance = InstanceKind::DropGlue(drop_in_place, Some(coroutine_ty)); + + // Temporary change MirSource to coroutine's instance so that dump_mir produces more sensible + // filename. + body.source.instance = coroutine_instance; + dump_mir(tcx, false, "coroutine_drop", &0, &body, |_, _| Ok(())); + body.source.instance = drop_instance; + + // Creating a coroutine drop shim happens on `Analysis(PostCleanup) -> Runtime(Initial)` + // but the pass manager doesn't update the phase of the coroutine drop shim. Update the + // phase of the drop shim so that later on when we run the pass manager on the shim, in + // the `mir_shims` query, we don't ICE on the intra-pass validation before we've updated + // the phase of the body from analysis. + body.phase = MirPhase::Runtime(RuntimePhase::Initial); + + body +} + +// Create async drop shim function to drop coroutine itself +pub(super) fn create_coroutine_drop_shim_async<'tcx>( + tcx: TyCtxt<'tcx>, + transform: &TransformVisitor<'tcx>, + body: &Body<'tcx>, + drop_clean: BasicBlock, + can_unwind: bool, +) -> Body<'tcx> { + let mut body = body.clone(); + // Take the coroutine info out of the body, since the drop shim is + // not a coroutine body itself; it just has its drop built out of it. + let _ = body.coroutine.take(); + + FixReturnPendingVisitor { tcx }.visit_body(&mut body); + + // Poison the coroutine when it unwinds + if can_unwind { + generate_poison_block_and_redirect_unwinds_there(transform, &mut body); + } + + let source_info = SourceInfo::outermost(body.span); + + let mut cases = create_cases(&mut body, transform, Operation::Drop); + + cases.insert(0, (CoroutineArgs::UNRESUMED, drop_clean)); + + use rustc_middle::mir::AssertKind::ResumedAfterPanic; + // Panic when resumed on the returned or poisoned state + if can_unwind { + cases.insert( + 1, + ( + CoroutineArgs::POISONED, + insert_panic_block(tcx, &mut body, ResumedAfterPanic(transform.coroutine_kind)), + ), + ); + } + + // RETURNED state also goes to default_block with `return Ready<()>`. + // For fully-polled coroutine, async drop has nothing to do. + let default_block = insert_poll_ready_block(tcx, &mut body); + insert_switch(&mut body, cases, transform, default_block); + + for block in body.basic_blocks_mut() { + let kind = &mut block.terminator_mut().kind; + if let TerminatorKind::CoroutineDrop = *kind { + *kind = TerminatorKind::Return; + block.statements.push(return_poll_ready_assign(tcx, source_info)); + } + } + + // Replace the return variable: Poll to Poll<()> + let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None)); + let poll_enum = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()])); + body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(poll_enum, source_info); + + make_coroutine_state_argument_indirect(tcx, &mut body); + + match transform.coroutine_kind { + // Iterator::next doesn't accept a pinned argument, + // unlike for all other coroutine kinds. + CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {} + _ => { + make_coroutine_state_argument_pinned(tcx, &mut body); + } + } + + // Make sure we remove dead blocks to remove + // unrelated code from the resume part of the function + simplify::remove_dead_blocks(&mut body); + + pm::run_passes_no_validate( + tcx, + &mut body, + &[&abort_unwinding_calls::AbortUnwindingCalls], + None, + ); + + dump_mir(tcx, false, "coroutine_drop_async", &0, &body, |_, _| Ok(())); + + body +} + +// Create async drop shim proxy function for future_drop_poll +// It is just { call coroutine_drop(); return Poll::Ready(); } +pub(super) fn create_coroutine_drop_shim_proxy_async<'tcx>( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, +) -> Body<'tcx> { + let mut body = body.clone(); + // Take the coroutine info out of the body, since the drop shim is + // not a coroutine body itself; it just has its drop built out of it. + let _ = body.coroutine.take(); + let basic_blocks: IndexVec> = IndexVec::new(); + body.basic_blocks = BasicBlocks::new(basic_blocks); + body.var_debug_info.clear(); + + // Keeping return value and args + body.local_decls.truncate(1 + body.arg_count); + + let source_info = SourceInfo::outermost(body.span); + + // Replace the return variable: Poll to Poll<()> + let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None)); + let poll_enum = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()])); + body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(poll_enum, source_info); + + // call coroutine_drop() + let call_bb = body.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: None, + is_cleanup: false, + }); + + // return Poll::Ready() + let ret_bb = insert_poll_ready_block(tcx, &mut body); + + let kind = TerminatorKind::Drop { + place: Place::from(SELF_ARG), + target: ret_bb, + unwind: UnwindAction::Continue, + replace: false, + drop: None, + async_fut: None, + }; + body.basic_blocks_mut()[call_bb].terminator = Some(Terminator { source_info, kind }); + + dump_mir(tcx, false, "coroutine_drop_proxy_async", &0, &body, |_, _| Ok(())); + + body +} diff --git a/compiler/rustc_mir_transform/src/coverage/mappings.rs b/compiler/rustc_mir_transform/src/coverage/mappings.rs index d83c0d40a7e54..b4b4d0416fb99 100644 --- a/compiler/rustc_mir_transform/src/coverage/mappings.rs +++ b/compiler/rustc_mir_transform/src/coverage/mappings.rs @@ -91,12 +91,12 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>( // When debugging flag `-Zcoverage-options=no-mir-spans` is set, we need // to give the same treatment to _all_ functions, because `llvm-cov` // seems to ignore functions that don't have any ordinary code spans. - if let Some(span) = hir_info.fn_sig_span_extended { + if let Some(span) = hir_info.fn_sig_span { code_mappings.push(CodeMapping { span, bcb: START_BCB }); } } else { // Extract coverage spans from MIR statements/terminators as normal. - extract_refined_covspans(mir_body, hir_info, graph, &mut code_mappings); + extract_refined_covspans(tcx, mir_body, hir_info, graph, &mut code_mappings); } branch_pairs.extend(extract_branch_pairs(mir_body, hir_info, graph)); diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index aa4c0ef1e1f93..702c62eddc7fb 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -1,8 +1,7 @@ -pub(super) mod query; - mod counters; mod graph; mod mappings; +pub(super) mod query; mod spans; #[cfg(test)] mod tests; @@ -90,7 +89,7 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir: // Use the coverage graph to prepare intermediate data that will eventually // be used to assign physical counters and counter expressions to points in - // the control-flow graph + // the control-flow graph. let BcbCountersData { node_flow_data, priority_list } = counters::prepare_bcb_counters_data(&graph); @@ -269,9 +268,9 @@ fn inject_statement(mir_body: &mut mir::Body<'_>, counter_kind: CoverageKind, bb struct ExtractedHirInfo { function_source_hash: u64, is_async_fn: bool, - /// The span of the function's signature, extended to the start of `body_span`. + /// The span of the function's signature, if available. /// Must have the same context and filename as the body span. - fn_sig_span_extended: Option, + fn_sig_span: Option, body_span: Span, /// "Holes" are regions within the function body (or its expansions) that /// should not be included in coverage spans for this function @@ -309,30 +308,20 @@ fn extract_hir_info<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> ExtractedHir // The actual signature span is only used if it has the same context and // filename as the body, and precedes the body. - let fn_sig_span_extended = maybe_fn_sig - .map(|fn_sig| fn_sig.span) - .filter(|&fn_sig_span| { - let source_map = tcx.sess.source_map(); - let file_idx = |span: Span| source_map.lookup_source_file_idx(span.lo()); - - fn_sig_span.eq_ctxt(body_span) - && fn_sig_span.hi() <= body_span.lo() - && file_idx(fn_sig_span) == file_idx(body_span) - }) - // If so, extend it to the start of the body span. - .map(|fn_sig_span| fn_sig_span.with_hi(body_span.lo())); + let fn_sig_span = maybe_fn_sig.map(|fn_sig| fn_sig.span).filter(|&fn_sig_span| { + let source_map = tcx.sess.source_map(); + let file_idx = |span: Span| source_map.lookup_source_file_idx(span.lo()); + + fn_sig_span.eq_ctxt(body_span) + && fn_sig_span.hi() <= body_span.lo() + && file_idx(fn_sig_span) == file_idx(body_span) + }); let function_source_hash = hash_mir_source(tcx, hir_body); let hole_spans = extract_hole_spans_from_hir(tcx, hir_body); - ExtractedHirInfo { - function_source_hash, - is_async_fn, - fn_sig_span_extended, - body_span, - hole_spans, - } + ExtractedHirInfo { function_source_hash, is_async_fn, fn_sig_span, body_span, hole_spans } } fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, hir_body: &'tcx hir::Body<'tcx>) -> u64 { diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index 8befe9c5d8dd8..ec76076020eb7 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -1,9 +1,8 @@ -use std::collections::VecDeque; - use rustc_data_structures::fx::FxHashSet; use rustc_middle::mir; +use rustc_middle::ty::TyCtxt; use rustc_span::{DesugaringKind, ExpnKind, MacroKind, Span}; -use tracing::{debug, debug_span, instrument}; +use tracing::instrument; use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph}; use crate::coverage::spans::from_mir::{Hole, RawSpanFromMir, SpanFromMir}; @@ -11,8 +10,9 @@ use crate::coverage::{ExtractedHirInfo, mappings, unexpand}; mod from_mir; -pub(super) fn extract_refined_covspans( - mir_body: &mir::Body<'_>, +pub(super) fn extract_refined_covspans<'tcx>( + tcx: TyCtxt<'tcx>, + mir_body: &mir::Body<'tcx>, hir_info: &ExtractedHirInfo, graph: &CoverageGraph, code_mappings: &mut impl Extend, @@ -39,18 +39,18 @@ pub(super) fn extract_refined_covspans( return; } - // Also add the adjusted function signature span, if available. + // Also add the function signature span, if available. // Otherwise, add a fake span at the start of the body, to avoid an ugly // gap between the start of the body and the first real span. // FIXME: Find a more principled way to solve this problem. covspans.push(SpanFromMir::for_fn_sig( - hir_info.fn_sig_span_extended.unwrap_or_else(|| body_span.shrink_to_lo()), + hir_info.fn_sig_span.unwrap_or_else(|| body_span.shrink_to_lo()), )); // First, perform the passes that need macro information. covspans.sort_by(|a, b| graph.cmp_in_dominator_order(a.bcb, b.bcb)); remove_unwanted_expansion_spans(&mut covspans); - split_visible_macro_spans(&mut covspans); + shrink_visible_macro_spans(tcx, &mut covspans); // We no longer need the extra information in `SpanFromMir`, so convert to `Covspan`. let mut covspans = covspans.into_iter().map(SpanFromMir::into_covspan).collect::>(); @@ -80,26 +80,17 @@ pub(super) fn extract_refined_covspans( holes.sort_by(|a, b| compare_spans(a.span, b.span)); holes.dedup_by(|b, a| a.merge_if_overlapping_or_adjacent(b)); - // Split the covspans into separate buckets that don't overlap any holes. - let buckets = divide_spans_into_buckets(covspans, &holes); - - for mut covspans in buckets { - // Make sure each individual bucket is internally sorted. - covspans.sort_by(compare_covspans); - let _span = debug_span!("processing bucket", ?covspans).entered(); + // Discard any span that overlaps with a hole. + discard_spans_overlapping_holes(&mut covspans, &holes); - let mut covspans = remove_unwanted_overlapping_spans(covspans); - debug!(?covspans, "after removing overlaps"); + // Perform more refinement steps after holes have been dealt with. + let mut covspans = remove_unwanted_overlapping_spans(covspans); + covspans.dedup_by(|b, a| a.merge_if_eligible(b)); - // Do one last merge pass, to simplify the output. - covspans.dedup_by(|b, a| a.merge_if_eligible(b)); - debug!(?covspans, "after merge"); - - code_mappings.extend(covspans.into_iter().map(|Covspan { span, bcb }| { - // Each span produced by the refiner represents an ordinary code region. - mappings::CodeMapping { span, bcb } - })); - } + code_mappings.extend(covspans.into_iter().map(|Covspan { span, bcb }| { + // Each span produced by the refiner represents an ordinary code region. + mappings::CodeMapping { span, bcb } + })); } /// Macros that expand into branches (e.g. `assert!`, `trace!`) tend to generate @@ -129,96 +120,48 @@ fn remove_unwanted_expansion_spans(covspans: &mut Vec) { } /// When a span corresponds to a macro invocation that is visible from the -/// function body, split it into two parts. The first part covers just the -/// macro name plus `!`, and the second part covers the rest of the macro -/// invocation. This seems to give better results for code that uses macros. -fn split_visible_macro_spans(covspans: &mut Vec) { - let mut extra_spans = vec![]; - - covspans.retain(|covspan| { - let Some(ExpnKind::Macro(MacroKind::Bang, visible_macro)) = covspan.expn_kind else { - return true; - }; - - let split_len = visible_macro.as_str().len() as u32 + 1; - let (before, after) = covspan.span.split_at(split_len); - if !covspan.span.contains(before) || !covspan.span.contains(after) { - // Something is unexpectedly wrong with the split point. - // The debug assertion in `split_at` will have already caught this, - // but in release builds it's safer to do nothing and maybe get a - // bug report for unexpected coverage, rather than risk an ICE. - return true; +/// function body, truncate it to just the macro name plus `!`. +/// This seems to give better results for code that uses macros. +fn shrink_visible_macro_spans(tcx: TyCtxt<'_>, covspans: &mut Vec) { + let source_map = tcx.sess.source_map(); + + for covspan in covspans { + if matches!(covspan.expn_kind, Some(ExpnKind::Macro(MacroKind::Bang, _))) { + covspan.span = source_map.span_through_char(covspan.span, '!'); } - - extra_spans.push(SpanFromMir::new(before, covspan.expn_kind.clone(), covspan.bcb)); - extra_spans.push(SpanFromMir::new(after, covspan.expn_kind.clone(), covspan.bcb)); - false // Discard the original covspan that we just split. - }); - - // The newly-split spans are added at the end, so any previous sorting - // is not preserved. - covspans.extend(extra_spans); + } } -/// Uses the holes to divide the given covspans into buckets, such that: -/// - No span in any hole overlaps a bucket (truncating the spans if necessary). -/// - The spans in each bucket are strictly after all spans in previous buckets, -/// and strictly before all spans in subsequent buckets. +/// Discard all covspans that overlap a hole. /// -/// The resulting buckets are sorted relative to each other, but might not be -/// internally sorted. -#[instrument(level = "debug")] -fn divide_spans_into_buckets(input_covspans: Vec, holes: &[Hole]) -> Vec> { - debug_assert!(input_covspans.is_sorted_by(|a, b| compare_spans(a.span, b.span).is_le())); +/// The lists of covspans and holes must be sorted, and any holes that overlap +/// with each other must have already been merged. +fn discard_spans_overlapping_holes(covspans: &mut Vec, holes: &[Hole]) { + debug_assert!(covspans.is_sorted_by(|a, b| compare_spans(a.span, b.span).is_le())); debug_assert!(holes.is_sorted_by(|a, b| compare_spans(a.span, b.span).is_le())); + debug_assert!(holes.array_windows().all(|[a, b]| !a.span.overlaps_or_adjacent(b.span))); + + let mut curr_hole = 0usize; + let mut overlaps_hole = |covspan: &Covspan| -> bool { + while let Some(hole) = holes.get(curr_hole) { + // Both lists are sorted, so we can permanently skip any holes that + // end before the start of the current span. + if hole.span.hi() <= covspan.span.lo() { + curr_hole += 1; + continue; + } - // Now we're ready to start carving holes out of the initial coverage spans, - // and grouping them in buckets separated by the holes. - - let mut input_covspans = VecDeque::from(input_covspans); - let mut fragments = vec![]; - - // For each hole: - // - Identify the spans that are entirely or partly before the hole. - // - Put those spans in a corresponding bucket, truncated to the start of the hole. - // - If one of those spans also extends after the hole, put the rest of it - // in a "fragments" vector that is processed by the next hole. - let mut buckets = (0..holes.len()).map(|_| vec![]).collect::>(); - for (hole, bucket) in holes.iter().zip(&mut buckets) { - let fragments_from_prev = std::mem::take(&mut fragments); - - // Only inspect spans that precede or overlap this hole, - // leaving the rest to be inspected by later holes. - // (This relies on the spans and holes both being sorted.) - let relevant_input_covspans = - drain_front_while(&mut input_covspans, |c| c.span.lo() < hole.span.hi()); - - for covspan in fragments_from_prev.into_iter().chain(relevant_input_covspans) { - let (before, after) = covspan.split_around_hole_span(hole.span); - bucket.extend(before); - fragments.extend(after); + return hole.span.overlaps(covspan.span); } - } - - // After finding the spans before each hole, any remaining fragments/spans - // form their own final bucket, after the final hole. - // (If there were no holes, this will just be all of the initial spans.) - fragments.extend(input_covspans); - buckets.push(fragments); - buckets -} + // No holes left, so this covspan doesn't overlap with any holes. + false + }; -/// Similar to `.drain(..)`, but stops just before it would remove an item not -/// satisfying the predicate. -fn drain_front_while<'a, T>( - queue: &'a mut VecDeque, - mut pred_fn: impl FnMut(&T) -> bool, -) -> impl Iterator { - std::iter::from_fn(move || if pred_fn(queue.front()?) { queue.pop_front() } else { None }) + covspans.retain(|covspan| !overlaps_hole(covspan)); } -/// Takes one of the buckets of (sorted) spans extracted from MIR, and "refines" +/// Takes a list of sorted spans extracted from MIR, and "refines" /// those spans by removing spans that overlap in unwanted ways. #[instrument(level = "debug")] fn remove_unwanted_overlapping_spans(sorted_spans: Vec) -> Vec { @@ -258,35 +201,21 @@ struct Covspan { } impl Covspan { - /// Splits this covspan into 0-2 parts: - /// - The part that is strictly before the hole span, if any. - /// - The part that is strictly after the hole span, if any. - fn split_around_hole_span(&self, hole_span: Span) -> (Option, Option) { - let before = try { - let span = self.span.trim_end(hole_span)?; - Self { span, ..*self } - }; - let after = try { - let span = self.span.trim_start(hole_span)?; - Self { span, ..*self } - }; - - (before, after) - } - - /// If `self` and `other` can be merged (i.e. they have the same BCB), - /// mutates `self.span` to also include `other.span` and returns true. + /// If `self` and `other` can be merged, mutates `self.span` to also + /// include `other.span` and returns true. /// - /// Note that compatible covspans can be merged even if their underlying - /// spans are not overlapping/adjacent; any space between them will also be - /// part of the merged covspan. + /// Two covspans can be merged if they have the same BCB, and they are + /// overlapping or adjacent. fn merge_if_eligible(&mut self, other: &Self) -> bool { - if self.bcb != other.bcb { - return false; + let eligible_for_merge = + |a: &Self, b: &Self| (a.bcb == b.bcb) && a.span.overlaps_or_adjacent(b.span); + + if eligible_for_merge(self, other) { + self.span = self.span.to(other.span); + true + } else { + false } - - self.span = self.span.to(other.span); - true } } diff --git a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs index 1faa2171c0b02..804cd8ab3f7d3 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs @@ -120,22 +120,20 @@ fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option { // an `if condition { block }` has a span that includes the executed block, if true, // but for coverage, the code region executed, up to *and* through the SwitchInt, // actually stops before the if's block.) - TerminatorKind::Unreachable // Unreachable blocks are not connected to the MIR CFG + TerminatorKind::Unreachable | TerminatorKind::Assert { .. } | TerminatorKind::Drop { .. } | TerminatorKind::SwitchInt { .. } - // For `FalseEdge`, only the `real` branch is taken, so it is similar to a `Goto`. | TerminatorKind::FalseEdge { .. } | TerminatorKind::Goto { .. } => None, // Call `func` operand can have a more specific span when part of a chain of calls - TerminatorKind::Call { ref func, .. } - | TerminatorKind::TailCall { ref func, .. } => { + TerminatorKind::Call { ref func, .. } | TerminatorKind::TailCall { ref func, .. } => { let mut span = terminator.source_info.span; - if let mir::Operand::Constant(box constant) = func { - if constant.span.lo() > span.lo() { - span = span.with_lo(constant.span.lo()); - } + if let mir::Operand::Constant(constant) = func + && span.contains(constant.span) + { + span = constant.span; } Some(span) } @@ -147,9 +145,7 @@ fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option { | TerminatorKind::Yield { .. } | TerminatorKind::CoroutineDrop | TerminatorKind::FalseUnwind { .. } - | TerminatorKind::InlineAsm { .. } => { - Some(terminator.source_info.span) - } + | TerminatorKind::InlineAsm { .. } => Some(terminator.source_info.span), } } diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs index 90173da17f0fc..99b95e7312bde 100644 --- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs +++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs @@ -23,7 +23,7 @@ use rustc_mir_dataflow::lattice::{FlatSet, HasBottom}; use rustc_mir_dataflow::value_analysis::{ Map, PlaceIndex, State, TrackElem, ValueOrPlace, debug_with_context, }; -use rustc_mir_dataflow::{Analysis, Results, ResultsVisitor}; +use rustc_mir_dataflow::{Analysis, ResultsVisitor, visit_reachable_results}; use rustc_span::DUMMY_SP; use tracing::{debug, debug_span, instrument}; @@ -61,13 +61,14 @@ impl<'tcx> crate::MirPass<'tcx> for DataflowConstProp { let map = Map::new(tcx, body, place_limit); // Perform the actual dataflow analysis. - let analysis = ConstAnalysis::new(tcx, body, map); - let mut results = - debug_span!("analyze").in_scope(|| analysis.iterate_to_fixpoint(tcx, body, None)); + let mut const_ = debug_span!("analyze") + .in_scope(|| ConstAnalysis::new(tcx, body, map).iterate_to_fixpoint(tcx, body, None)); // Collect results and patch the body afterwards. let mut visitor = Collector::new(tcx, &body.local_decls); - debug_span!("collect").in_scope(|| results.visit_reachable_with(body, &mut visitor)); + debug_span!("collect").in_scope(|| { + visit_reachable_results(body, &mut const_.analysis, &const_.results, &mut visitor) + }); let mut patch = visitor.patch; debug_span!("patch").in_scope(|| patch.visit_body_preserves_cfg(body)); } @@ -958,13 +959,13 @@ fn try_write_constant<'tcx>( interp_ok(()) } -impl<'mir, 'tcx> ResultsVisitor<'mir, 'tcx, ConstAnalysis<'_, 'tcx>> for Collector<'_, 'tcx> { - #[instrument(level = "trace", skip(self, results, statement))] +impl<'tcx> ResultsVisitor<'tcx, ConstAnalysis<'_, 'tcx>> for Collector<'_, 'tcx> { + #[instrument(level = "trace", skip(self, analysis, statement))] fn visit_after_early_statement_effect( &mut self, - results: &mut Results<'tcx, ConstAnalysis<'_, 'tcx>>, + analysis: &mut ConstAnalysis<'_, 'tcx>, state: &State>, - statement: &'mir Statement<'tcx>, + statement: &Statement<'tcx>, location: Location, ) { match &statement.kind { @@ -972,8 +973,8 @@ impl<'mir, 'tcx> ResultsVisitor<'mir, 'tcx, ConstAnalysis<'_, 'tcx>> for Collect OperandCollector { state, visitor: self, - ecx: &mut results.analysis.ecx, - map: &results.analysis.map, + ecx: &mut analysis.ecx, + map: &analysis.map, } .visit_rvalue(rvalue, location); } @@ -981,12 +982,12 @@ impl<'mir, 'tcx> ResultsVisitor<'mir, 'tcx, ConstAnalysis<'_, 'tcx>> for Collect } } - #[instrument(level = "trace", skip(self, results, statement))] + #[instrument(level = "trace", skip(self, analysis, statement))] fn visit_after_primary_statement_effect( &mut self, - results: &mut Results<'tcx, ConstAnalysis<'_, 'tcx>>, + analysis: &mut ConstAnalysis<'_, 'tcx>, state: &State>, - statement: &'mir Statement<'tcx>, + statement: &Statement<'tcx>, location: Location, ) { match statement.kind { @@ -994,12 +995,9 @@ impl<'mir, 'tcx> ResultsVisitor<'mir, 'tcx, ConstAnalysis<'_, 'tcx>> for Collect // Don't overwrite the assignment if it already uses a constant (to keep the span). } StatementKind::Assign(box (place, _)) => { - if let Some(value) = self.try_make_constant( - &mut results.analysis.ecx, - place, - state, - &results.analysis.map, - ) { + if let Some(value) = + self.try_make_constant(&mut analysis.ecx, place, state, &analysis.map) + { self.patch.assignments.insert(location, value); } } @@ -1009,18 +1007,13 @@ impl<'mir, 'tcx> ResultsVisitor<'mir, 'tcx, ConstAnalysis<'_, 'tcx>> for Collect fn visit_after_early_terminator_effect( &mut self, - results: &mut Results<'tcx, ConstAnalysis<'_, 'tcx>>, + analysis: &mut ConstAnalysis<'_, 'tcx>, state: &State>, - terminator: &'mir Terminator<'tcx>, + terminator: &Terminator<'tcx>, location: Location, ) { - OperandCollector { - state, - visitor: self, - ecx: &mut results.analysis.ecx, - map: &results.analysis.map, - } - .visit_terminator(terminator, location); + OperandCollector { state, visitor: self, ecx: &mut analysis.ecx, map: &analysis.map } + .visit_terminator(terminator, location); } } diff --git a/compiler/rustc_mir_transform/src/dest_prop.rs b/compiler/rustc_mir_transform/src/dest_prop.rs index 7395ad496dbdf..4c94a6c524e00 100644 --- a/compiler/rustc_mir_transform/src/dest_prop.rs +++ b/compiler/rustc_mir_transform/src/dest_prop.rs @@ -171,7 +171,7 @@ impl<'tcx> crate::MirPass<'tcx> for DestinationPropagation { let live = MaybeLiveLocals.iterate_to_fixpoint(tcx, body, Some("MaybeLiveLocals-DestProp")); let points = DenseLocationMap::new(body); - let mut live = save_as_intervals(&points, body, live); + let mut live = save_as_intervals(&points, body, live.analysis, live.results); // In order to avoid having to collect data for every single pair of locals in the body, we // do not allow doing more than one merge for places that are derived from the same local at diff --git a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs index 57f7893be1b8c..c7feb9e949b4d 100644 --- a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs +++ b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs @@ -103,9 +103,8 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch { let mut should_cleanup = false; // Also consider newly generated bbs in the same pass - for i in 0..body.basic_blocks.len() { + for parent in body.basic_blocks.indices() { let bbs = &*body.basic_blocks; - let parent = BasicBlock::from_usize(i); let Some(opt_data) = evaluate_candidate(tcx, body, parent) else { continue }; trace!("SUCCESS: found optimization possibility to apply: {opt_data:?}"); @@ -224,7 +223,7 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch { // Since this optimization adds new basic blocks and invalidates others, // clean up the cfg to make it nicer for other passes if should_cleanup { - simplify_cfg(body); + simplify_cfg(tcx, body); } } diff --git a/compiler/rustc_mir_transform/src/elaborate_drop.rs b/compiler/rustc_mir_transform/src/elaborate_drop.rs index 0d8cf524661c8..73a58160a6aac 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drop.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drop.rs @@ -4,12 +4,12 @@ use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx}; use rustc_hir::lang_items::LangItem; use rustc_index::Idx; use rustc_middle::mir::*; -use rustc_middle::span_bug; use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::util::IntTypeExt; -use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt}; +use rustc_middle::ty::{self, GenericArg, GenericArgsRef, Ty, TyCtxt}; +use rustc_middle::{bug, span_bug, traits}; use rustc_span::DUMMY_SP; -use rustc_span::source_map::Spanned; +use rustc_span::source_map::{Spanned, dummy_spanned}; use tracing::{debug, instrument}; use crate::patch::MirPatch; @@ -94,6 +94,9 @@ pub(crate) trait DropElaborator<'a, 'tcx>: fmt::Debug { fn body(&self) -> &'a Body<'tcx>; fn tcx(&self) -> TyCtxt<'tcx>; fn typing_env(&self) -> ty::TypingEnv<'tcx>; + fn allow_async_drops(&self) -> bool; + + fn terminator_loc(&self, bb: BasicBlock) -> Location; // Drop logic @@ -149,6 +152,7 @@ where path: D::Path, succ: BasicBlock, unwind: Unwind, + dropline: Option, } /// "Elaborates" a drop of `place`/`path` and patches `bb`'s terminator to execute it. @@ -167,11 +171,12 @@ pub(crate) fn elaborate_drop<'b, 'tcx, D>( succ: BasicBlock, unwind: Unwind, bb: BasicBlock, + dropline: Option, ) where D: DropElaborator<'b, 'tcx>, 'tcx: 'b, { - DropCtxt { elaborator, source_info, place, path, succ, unwind }.elaborate_drop(bb) + DropCtxt { elaborator, source_info, place, path, succ, unwind, dropline }.elaborate_drop(bb) } impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D> @@ -195,6 +200,209 @@ where self.elaborator.tcx() } + // Generates three blocks: + // * #1:pin_obj_bb: call Pin::new_unchecked(&mut obj) + // * #2:call_drop_bb: fut = call obj.() OR call async_drop_in_place(obj) + // * #3:drop_term_bb: drop (obj, fut, ...) + // We keep async drop unexpanded to poll-loop here, to expand it later, at StateTransform - + // into states expand. + // call_destructor_only - to call only AsyncDrop::drop, not full async_drop_in_place glue + fn build_async_drop( + &mut self, + place: Place<'tcx>, + drop_ty: Ty<'tcx>, + bb: Option, + succ: BasicBlock, + unwind: Unwind, + dropline: Option, + call_destructor_only: bool, + ) -> BasicBlock { + let tcx = self.tcx(); + let span = self.source_info.span; + + let pin_obj_bb = bb.unwrap_or_else(|| { + self.elaborator.patch().new_block(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { + // Temporary terminator, will be replaced by patch + source_info: self.source_info, + kind: TerminatorKind::Return, + }), + is_cleanup: false, + }) + }); + + let (fut_ty, drop_fn_def_id, trait_args) = if call_destructor_only { + // Resolving obj.() + let trait_ref = ty::TraitRef::new( + tcx, + tcx.require_lang_item(LangItem::AsyncDrop, Some(span)), + [drop_ty], + ); + let (drop_trait, trait_args) = match tcx.codegen_select_candidate( + ty::TypingEnv::fully_monomorphized().as_query_input(trait_ref), + ) { + Ok(traits::ImplSource::UserDefined(traits::ImplSourceUserDefinedData { + impl_def_id, + args, + .. + })) => (*impl_def_id, *args), + impl_source => { + span_bug!(span, "invalid `AsyncDrop` impl_source: {:?}", impl_source); + } + }; + let drop_fn_def_id = tcx.associated_item_def_ids(drop_trait)[0]; + let drop_fn = Ty::new_fn_def(tcx, drop_fn_def_id, trait_args); + let sig = drop_fn.fn_sig(tcx); + let sig = tcx.instantiate_bound_regions_with_erased(sig); + (sig.output(), drop_fn_def_id, trait_args) + } else { + // Resolving async_drop_in_place function for drop_ty + let drop_fn_def_id = tcx.require_lang_item(LangItem::AsyncDropInPlace, Some(span)); + let trait_args = tcx.mk_args(&[drop_ty.into()]); + let sig = tcx.fn_sig(drop_fn_def_id).instantiate(tcx, trait_args); + let sig = tcx.instantiate_bound_regions_with_erased(sig); + (sig.output(), drop_fn_def_id, trait_args) + }; + + let fut = Place::from(self.new_temp(fut_ty)); + + // #1:pin_obj_bb >>> obj_ref = &mut obj + let obj_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, drop_ty); + let obj_ref_place = Place::from(self.new_temp(obj_ref_ty)); + + let term_loc = self.elaborator.terminator_loc(pin_obj_bb); + self.elaborator.patch().add_assign( + term_loc, + obj_ref_place, + Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Mut { kind: MutBorrowKind::Default }, + place, + ), + ); + + // pin_obj_place preparation + let pin_obj_new_unchecked_fn = Ty::new_fn_def( + tcx, + tcx.require_lang_item(LangItem::PinNewUnchecked, Some(span)), + [GenericArg::from(obj_ref_ty)], + ); + let pin_obj_ty = pin_obj_new_unchecked_fn.fn_sig(tcx).output().no_bound_vars().unwrap(); + let pin_obj_place = Place::from(self.new_temp(pin_obj_ty)); + let pin_obj_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand { + span, + user_ty: None, + const_: Const::zero_sized(pin_obj_new_unchecked_fn), + })); + + // #3:drop_term_bb + let drop_term_bb = self.new_block( + unwind, + TerminatorKind::Drop { + place, + target: succ, + unwind: unwind.into_action(), + replace: false, + drop: dropline, + async_fut: Some(fut.local), + }, + ); + + // #2:call_drop_bb + let mut call_statements = Vec::new(); + let drop_arg = if call_destructor_only { + pin_obj_place + } else { + let ty::Adt(adt_def, adt_args) = pin_obj_ty.kind() else { + bug!(); + }; + let obj_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty); + let obj_ptr_place = Place::from(self.new_temp(obj_ptr_ty)); + let unwrap_ty = adt_def.non_enum_variant().fields[FieldIdx::ZERO].ty(tcx, adt_args); + let addr = Rvalue::RawPtr( + RawPtrKind::Mut, + pin_obj_place.project_deeper( + &[ProjectionElem::Field(FieldIdx::ZERO, unwrap_ty), ProjectionElem::Deref], + tcx, + ), + ); + call_statements.push(self.assign(obj_ptr_place, addr)); + obj_ptr_place + }; + call_statements.push(Statement { + source_info: self.source_info, + kind: StatementKind::StorageLive(fut.local), + }); + + let call_drop_bb = self.new_block_with_statements( + unwind, + call_statements, + TerminatorKind::Call { + func: Operand::function_handle(tcx, drop_fn_def_id, trait_args, span), + args: [Spanned { node: Operand::Move(drop_arg), span: DUMMY_SP }].into(), + destination: fut, + target: Some(drop_term_bb), + unwind: unwind.into_action(), + call_source: CallSource::Misc, + fn_span: self.source_info.span, + }, + ); + + // StorageDead(fut) in self.succ block (at the begin) + self.elaborator.patch().add_statement( + Location { block: self.succ, statement_index: 0 }, + StatementKind::StorageDead(fut.local), + ); + + // #1:pin_obj_bb >>> call Pin::new_unchecked(&mut obj) + self.elaborator.patch().patch_terminator( + pin_obj_bb, + TerminatorKind::Call { + func: pin_obj_new_unchecked_fn, + args: [dummy_spanned(Operand::Move(obj_ref_place))].into(), + destination: pin_obj_place, + target: Some(call_drop_bb), + unwind: unwind.into_action(), + call_source: CallSource::Misc, + fn_span: span, + }, + ); + pin_obj_bb + } + + fn build_drop(&mut self, bb: BasicBlock) { + let drop_ty = self.place_ty(self.place); + if self.tcx().features().async_drop() + && self.elaborator.body().coroutine.is_some() + && self.elaborator.allow_async_drops() + && !self.elaborator.patch_ref().block(self.elaborator.body(), bb).is_cleanup + && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env()) + { + self.build_async_drop( + self.place, + drop_ty, + Some(bb), + self.succ, + self.unwind, + self.dropline, + false, + ); + } else { + self.elaborator.patch().patch_terminator( + bb, + TerminatorKind::Drop { + place: self.place, + target: self.succ, + unwind: self.unwind.into_action(), + replace: false, + drop: None, + async_fut: None, + }, + ); + } + } + /// This elaborates a single drop instruction, located at `bb`, and /// patches over it. /// @@ -222,15 +430,7 @@ where .patch_terminator(bb, TerminatorKind::Goto { target: self.succ }); } DropStyle::Static => { - self.elaborator.patch().patch_terminator( - bb, - TerminatorKind::Drop { - place: self.place, - target: self.succ, - unwind: self.unwind.into_action(), - replace: false, - }, - ); + self.build_drop(bb); } DropStyle::Conditional => { let drop_bb = self.complete_drop(self.succ, self.unwind); @@ -258,31 +458,27 @@ where ) -> Vec<(Place<'tcx>, Option)> { variant .fields - .iter() - .enumerate() - .map(|(i, f)| { - let field = FieldIdx::new(i); - let subpath = self.elaborator.field_subpath(variant_path, field); + .iter_enumerated() + .map(|(field_idx, field)| { + let subpath = self.elaborator.field_subpath(variant_path, field_idx); let tcx = self.tcx(); assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis); - // The type error for normalization may have been in dropck: see - // `compute_drop_data` in rustc_borrowck, in which case we wouldn't have - // deleted the MIR body and could have an error here as well. - let field_ty = match tcx - .try_normalize_erasing_regions(self.elaborator.typing_env(), f.ty(tcx, args)) - { + let field_ty = match tcx.try_normalize_erasing_regions( + self.elaborator.typing_env(), + field.ty(tcx, args), + ) { Ok(t) => t, Err(_) => Ty::new_error( self.tcx(), - self.elaborator - .body() - .tainted_by_errors - .expect("Error in drop elaboration not found by dropck."), + self.tcx().dcx().span_delayed_bug( + self.elaborator.body().span, + "Error normalizing in drop elaboration.", + ), ), }; - (tcx.mk_place_field(base_place, field, field_ty), subpath) + (tcx.mk_place_field(base_place, field_idx, field_ty), subpath) }) .collect() } @@ -293,6 +489,7 @@ where path: Option, succ: BasicBlock, unwind: Unwind, + dropline: Option, ) -> BasicBlock { if let Some(path) = path { debug!("drop_subpath: for std field {:?}", place); @@ -304,6 +501,7 @@ where place, succ, unwind, + dropline, } .elaborated_drop_block() } else { @@ -315,6 +513,7 @@ where place, succ, unwind, + dropline, // Using `self.path` here to condition the drop on // our own drop flag. path: self.path, @@ -329,25 +528,36 @@ where /// /// `unwind_ladder` is such a list of steps in reverse order, /// which is called if the matching step of the drop glue panics. + /// + /// `dropline_ladder` is a similar list of steps in reverse order, + /// which is called if the matching step of the drop glue will contain async drop + /// (expanded later to Yield) and the containing coroutine will be dropped at this point. fn drop_halfladder( &mut self, unwind_ladder: &[Unwind], + dropline_ladder: &[Option], mut succ: BasicBlock, fields: &[(Place<'tcx>, Option)], ) -> Vec { iter::once(succ) - .chain(fields.iter().rev().zip(unwind_ladder).map(|(&(place, path), &unwind_succ)| { - succ = self.drop_subpath(place, path, succ, unwind_succ); - succ - })) + .chain(itertools::izip!(fields.iter().rev(), unwind_ladder, dropline_ladder).map( + |(&(place, path), &unwind_succ, &dropline_to)| { + succ = self.drop_subpath(place, path, succ, unwind_succ, dropline_to); + succ + }, + )) .collect() } - fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) { + fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind, Option) { // Clear the "master" drop flag at the end. This is needed // because the "master" drop protects the ADT's discriminant, // which is invalidated after the ADT is dropped. - (self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), self.unwind) + ( + self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), + self.unwind, + self.dropline, + ) } /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders @@ -365,6 +575,22 @@ where /// .c2: /// ELAB(drop location.2 [target=`self.unwind`]) /// + /// For possible-async drops in coroutines we also need dropline ladder + /// .d0 (mainline): + /// ELAB(drop location.0 [target=.d1, unwind=.c1, drop=.e1]) + /// .d1 (mainline): + /// ELAB(drop location.1 [target=.d2, unwind=.c2, drop=.e2]) + /// .d2 (mainline): + /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`, drop=`self.drop`]) + /// .c1 (unwind): + /// ELAB(drop location.1 [target=.c2]) + /// .c2 (unwind): + /// ELAB(drop location.2 [target=`self.unwind`]) + /// .e1 (dropline): + /// ELAB(drop location.1 [target=.e2, unwind=.c2]) + /// .e2 (dropline): + /// ELAB(drop location.2 [target=`self.drop`, unwind=`self.unwind`]) + /// /// NOTE: this does not clear the master drop flag, so you need /// to point succ/unwind on a `drop_ladder_bottom`. fn drop_ladder( @@ -372,8 +598,13 @@ where fields: Vec<(Place<'tcx>, Option)>, succ: BasicBlock, unwind: Unwind, - ) -> (BasicBlock, Unwind) { + dropline: Option, + ) -> (BasicBlock, Unwind, Option) { debug!("drop_ladder({:?}, {:?})", self, fields); + assert!( + if unwind.is_cleanup() { dropline.is_none() } else { true }, + "Dropline is set for cleanup drop ladder" + ); let mut fields = fields; fields.retain(|&(place, _)| { @@ -382,17 +613,28 @@ where debug!("drop_ladder - fields needing drop: {:?}", fields); + let dropline_ladder: Vec> = vec![None; fields.len() + 1]; let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1]; - let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind { - let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields); + let unwind_ladder: Vec<_> = if let Unwind::To(succ) = unwind { + let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields); halfladder.into_iter().map(Unwind::To).collect() } else { unwind_ladder }; + let dropline_ladder: Vec<_> = if let Some(succ) = dropline { + let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields); + halfladder.into_iter().map(Some).collect() + } else { + dropline_ladder + }; - let normal_ladder = self.drop_halfladder(&unwind_ladder, succ, &fields); + let normal_ladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields); - (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap()) + ( + *normal_ladder.last().unwrap(), + *unwind_ladder.last().unwrap(), + *dropline_ladder.last().unwrap(), + ) } fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock { @@ -409,8 +651,8 @@ where }) .collect(); - let (succ, unwind) = self.drop_ladder_bottom(); - self.drop_ladder(fields, succ, unwind).0 + let (succ, unwind, dropline) = self.drop_ladder_bottom(); + self.drop_ladder(fields, succ, unwind, dropline).0 } /// Drops the T contained in a `Box` if it has not been moved out of @@ -421,6 +663,7 @@ where args: GenericArgsRef<'tcx>, succ: BasicBlock, unwind: Unwind, + dropline: Option, ) -> BasicBlock { // drop glue is sent straight to codegen // box cannot be directly dereferenced @@ -437,7 +680,7 @@ where let interior = self.tcx().mk_place_deref(Place::from(ptr_local)); let interior_path = self.elaborator.deref_subpath(self.path); - let do_drop_bb = self.drop_subpath(interior, interior_path, succ, unwind); + let do_drop_bb = self.drop_subpath(interior, interior_path, succ, unwind, dropline); let setup_bbd = BasicBlockData { statements: vec![self.assign( @@ -472,19 +715,22 @@ where let skip_contents = adt.is_union() || adt.is_manually_drop(); let contents_drop = if skip_contents { - (self.succ, self.unwind) + (self.succ, self.unwind, self.dropline) } else { self.open_drop_for_adt_contents(adt, args) }; if adt.is_box() { // we need to drop the inside of the box before running the destructor - let succ = self.destructor_call_block(contents_drop); + let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1)); let unwind = contents_drop .1 - .map(|unwind| self.destructor_call_block((unwind, Unwind::InCleanup))); + .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup))); + let dropline = contents_drop + .2 + .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1))); - self.open_drop_for_box_contents(adt, args, succ, unwind) + self.open_drop_for_box_contents(adt, args, succ, unwind, dropline) } else if adt.has_dtor(self.tcx()) { self.destructor_call_block(contents_drop) } else { @@ -496,14 +742,14 @@ where &mut self, adt: ty::AdtDef<'tcx>, args: GenericArgsRef<'tcx>, - ) -> (BasicBlock, Unwind) { - let (succ, unwind) = self.drop_ladder_bottom(); + ) -> (BasicBlock, Unwind, Option) { + let (succ, unwind, dropline) = self.drop_ladder_bottom(); if !adt.is_enum() { let fields = self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args); - self.drop_ladder(fields, succ, unwind) + self.drop_ladder(fields, succ, unwind, dropline) } else { - self.open_drop_for_multivariant(adt, args, succ, unwind) + self.open_drop_for_multivariant(adt, args, succ, unwind, dropline) } } @@ -513,11 +759,14 @@ where args: GenericArgsRef<'tcx>, succ: BasicBlock, unwind: Unwind, - ) -> (BasicBlock, Unwind) { + dropline: Option, + ) -> (BasicBlock, Unwind, Option) { let mut values = Vec::with_capacity(adt.variants().len()); let mut normal_blocks = Vec::with_capacity(adt.variants().len()); let mut unwind_blocks = if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) }; + let mut dropline_blocks = + if dropline.is_none() { None } else { Some(Vec::with_capacity(adt.variants().len())) }; let mut have_otherwise_with_drop_glue = false; let mut have_otherwise = false; @@ -555,11 +804,16 @@ where let unwind_blocks = unwind_blocks.as_mut().unwrap(); let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1]; - let halfladder = self.drop_halfladder(&unwind_ladder, unwind, &fields); + let dropline_ladder: Vec> = vec![None; fields.len() + 1]; + let halfladder = + self.drop_halfladder(&unwind_ladder, &dropline_ladder, unwind, &fields); unwind_blocks.push(halfladder.last().cloned().unwrap()); } - let (normal, _) = self.drop_ladder(fields, succ, unwind); + let (normal, _, drop_bb) = self.drop_ladder(fields, succ, unwind, dropline); normal_blocks.push(normal); + if dropline.is_some() { + dropline_blocks.as_mut().unwrap().push(drop_bb.unwrap()); + } } else { have_otherwise = true; @@ -599,6 +853,9 @@ where Unwind::InCleanup, ) }), + dropline.map(|dropline| { + self.adt_switch_block(adt, dropline_blocks.unwrap(), &values, dropline, unwind) + }), ) } @@ -638,8 +895,8 @@ where self.drop_flag_test_block(switch_block, succ, unwind) } - fn destructor_call_block(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock { - debug!("destructor_call_block({:?}, {:?})", self, succ); + fn destructor_call_block_sync(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock { + debug!("destructor_call_block_sync({:?}, {:?})", self, succ); let tcx = self.tcx(); let drop_trait = tcx.require_lang_item(LangItem::Drop, None); let drop_fn = tcx.associated_item_def_ids(drop_trait)[0]; @@ -687,6 +944,30 @@ where self.drop_flag_test_block(destructor_block, succ, unwind) } + fn destructor_call_block( + &mut self, + (succ, unwind, dropline): (BasicBlock, Unwind, Option), + ) -> BasicBlock { + debug!("destructor_call_block({:?}, {:?})", self, succ); + let ty = self.place_ty(self.place); + if self.tcx().features().async_drop() + && self.elaborator.body().coroutine.is_some() + && self.elaborator.allow_async_drops() + && !unwind.is_cleanup() + && ty.is_async_drop(self.tcx(), self.elaborator.typing_env()) + { + let destructor_block = + self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true); + + let block_start = Location { block: destructor_block, statement_index: 0 }; + self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow); + + self.drop_flag_test_block(destructor_block, succ, unwind) + } else { + self.destructor_call_block_sync((succ, unwind)) + } + } + /// Create a loop that drops an array: /// /// ```text @@ -705,6 +986,7 @@ where len: Local, ety: Ty<'tcx>, unwind: Unwind, + dropline: Option, ) -> BasicBlock { let copy = |place: Place<'tcx>| Operand::Copy(place); let move_ = |place: Place<'tcx>| Operand::Move(place); @@ -748,16 +1030,35 @@ where }; let loop_block = self.elaborator.patch().new_block(loop_block); - self.elaborator.patch().patch_terminator( - drop_block, - TerminatorKind::Drop { - place: tcx.mk_place_deref(ptr), - target: loop_block, - unwind: unwind.into_action(), - replace: false, - }, - ); - + let place = tcx.mk_place_deref(ptr); + if self.tcx().features().async_drop() + && self.elaborator.body().coroutine.is_some() + && self.elaborator.allow_async_drops() + && !unwind.is_cleanup() + && ety.needs_async_drop(self.tcx(), self.elaborator.typing_env()) + { + self.build_async_drop( + place, + ety, + Some(drop_block), + loop_block, + unwind, + dropline, + false, + ); + } else { + self.elaborator.patch().patch_terminator( + drop_block, + TerminatorKind::Drop { + place, + target: loop_block, + unwind: unwind.into_action(), + replace: false, + drop: None, + async_fut: None, + }, + ); + } loop_block } @@ -824,8 +1125,8 @@ where (tcx.mk_place_elem(self.place, project), path) }) .collect::>(); - let (succ, unwind) = self.drop_ladder_bottom(); - return self.drop_ladder(fields, succ, unwind).0; + let (succ, unwind, dropline) = self.drop_ladder_bottom(); + return self.drop_ladder(fields, succ, unwind, dropline).0; } } @@ -859,7 +1160,7 @@ where &mut self.place, Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx), ); - let slice_block = self.drop_loop_pair_for_slice(ety); + let slice_block = self.drop_loop_trio_for_slice(ety); self.place = array_place; delegate_block.terminator = Some(Terminator { @@ -869,18 +1170,22 @@ where self.elaborator.patch().new_block(delegate_block) } - /// Creates a pair of drop-loops of `place`, which drops its contents, even - /// in the case of 1 panic. - fn drop_loop_pair_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock { - debug!("drop_loop_pair_for_slice({:?})", ety); + /// Creates a trio of drop-loops of `place`, which drops its contents, even + /// in the case of 1 panic or in the case of coroutine drop + fn drop_loop_trio_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock { + debug!("drop_loop_trio_for_slice({:?})", ety); let tcx = self.tcx(); let len = self.new_temp(tcx.types.usize); let cur = self.new_temp(tcx.types.usize); - let unwind = - self.unwind.map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup)); + let unwind = self + .unwind + .map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup, None)); + + let dropline = + self.dropline.map(|dropline| self.drop_loop(dropline, cur, len, ety, unwind, None)); - let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind); + let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind, dropline); let [PlaceElem::Deref] = self.place.projection.as_slice() else { span_bug!( @@ -944,7 +1249,7 @@ where let size = size.try_to_target_usize(self.tcx()); self.open_drop_for_array(ty, *ety, size) } - ty::Slice(ety) => self.drop_loop_pair_for_slice(*ety), + ty::Slice(ety) => self.drop_loop_trio_for_slice(*ety), _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty), } @@ -981,21 +1286,53 @@ where fn elaborated_drop_block(&mut self) -> BasicBlock { debug!("elaborated_drop_block({:?})", self); - let blk = self.drop_block(self.succ, self.unwind); + let blk = self.drop_block_simple(self.succ, self.unwind); self.elaborate_drop(blk); blk } - fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock { + fn drop_block_simple(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock { let block = TerminatorKind::Drop { place: self.place, target, unwind: unwind.into_action(), replace: false, + drop: self.dropline, + async_fut: None, }; self.new_block(unwind, block) } + fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock { + let drop_ty = self.place_ty(self.place); + if self.tcx().features().async_drop() + && self.elaborator.body().coroutine.is_some() + && self.elaborator.allow_async_drops() + && !unwind.is_cleanup() + && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env()) + { + self.build_async_drop( + self.place, + drop_ty, + None, + self.succ, + unwind, + self.dropline, + false, + ) + } else { + let block = TerminatorKind::Drop { + place: self.place, + target, + unwind: unwind.into_action(), + replace: false, + drop: None, + async_fut: None, + }; + self.new_block(unwind, block) + } + } + fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock { let block = TerminatorKind::Goto { target }; self.new_block(unwind, block) @@ -1037,6 +1374,19 @@ where }) } + fn new_block_with_statements( + &mut self, + unwind: Unwind, + statements: Vec>, + k: TerminatorKind<'tcx>, + ) -> BasicBlock { + self.elaborator.patch().new_block(BasicBlockData { + statements, + terminator: Some(Terminator { source_info: self.source_info, kind: k }), + is_cleanup: unwind.is_cleanup(), + }) + } + fn new_temp(&mut self, ty: Ty<'tcx>) -> Local { self.elaborator.patch().new_temp(ty, self.source_info.span) } diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs index 530c72ca549a6..42c8cb0b906e8 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drops.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs @@ -158,6 +158,14 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for ElaborateDropsCtxt<'a, 'tcx> { self.env.typing_env } + fn allow_async_drops(&self) -> bool { + true + } + + fn terminator_loc(&self, bb: BasicBlock) -> Location { + self.patch.terminator_loc(self.body, bb) + } + #[instrument(level = "debug", skip(self), ret)] fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle { let ((maybe_init, maybe_uninit), multipart) = match mode { @@ -328,7 +336,9 @@ impl<'a, 'tcx> ElaborateDropsCtxt<'a, 'tcx> { // This function should mirror what `collect_drop_flags` does. for (bb, data) in self.body.basic_blocks.iter_enumerated() { let terminator = data.terminator(); - let TerminatorKind::Drop { place, target, unwind, replace } = terminator.kind else { + let TerminatorKind::Drop { place, target, unwind, replace, drop, async_fut: _ } = + terminator.kind + else { continue; }; @@ -364,7 +374,16 @@ impl<'a, 'tcx> ElaborateDropsCtxt<'a, 'tcx> { } }; self.init_data.seek_before(self.body.terminator_loc(bb)); - elaborate_drop(self, terminator.source_info, place, path, target, unwind, bb) + elaborate_drop( + self, + terminator.source_info, + place, + path, + target, + unwind, + bb, + drop, + ) } LookupResult::Parent(None) => {} LookupResult::Parent(Some(_)) => { diff --git a/compiler/rustc_mir_transform/src/errors.rs b/compiler/rustc_mir_transform/src/errors.rs index 29698b0c2e445..5b03a4987ed71 100644 --- a/compiler/rustc_mir_transform/src/errors.rs +++ b/compiler/rustc_mir_transform/src/errors.rs @@ -158,6 +158,26 @@ pub(crate) struct MustNotSuspendReason { pub reason: String, } +pub(crate) struct UnnecessaryTransmute { + pub span: Span, + pub sugg: String, + pub help: Option<&'static str>, +} + +// Needed for def_path_str +impl<'a> LintDiagnostic<'a, ()> for UnnecessaryTransmute { + fn decorate_lint<'b>(self, diag: &'b mut rustc_errors::Diag<'a, ()>) { + diag.primary_message(fluent::mir_transform_unnecessary_transmute); + diag.span_suggestion( + self.span, + "replace this with", + self.sugg, + lint::Applicability::MachineApplicable, + ); + self.help.map(|help| diag.help(help)); + } +} + #[derive(LintDiagnostic)] #[diag(mir_transform_undefined_transmute)] #[note] diff --git a/compiler/rustc_mir_transform/src/gvn.rs b/compiler/rustc_mir_transform/src/gvn.rs index 0a54c780f31e5..8b8d1efbbd2e0 100644 --- a/compiler/rustc_mir_transform/src/gvn.rs +++ b/compiler/rustc_mir_transform/src/gvn.rs @@ -3,14 +3,16 @@ //! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect //! such redundancies and re-use the already-computed result when possible. //! -//! In a first pass, we compute a symbolic representation of values that are assigned to SSA -//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of -//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values. -//! //! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available //! values, the locals in which they are stored, and the assignment location. //! -//! In a second pass, we traverse all (non SSA) assignments `x = rvalue` and operands. For each +//! We traverse all assignments `x = rvalue` and operands. +//! +//! For each SSA one, we compute a symbolic representation of values that are assigned to SSA +//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of +//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values. +//! +//! For each non-SSA //! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we //! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y` //! associated to this `VnIndex`, and if its definition location strictly dominates the assignment @@ -91,7 +93,7 @@ use rustc_const_eval::interpret::{ ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar, intern_const_alloc_for_constprop, }; -use rustc_data_structures::fx::FxIndexSet; +use rustc_data_structures::fx::{FxIndexSet, MutableValues}; use rustc_data_structures::graph::dominators::Dominators; use rustc_hir::def::DefKind; use rustc_index::bit_set::DenseBitSet; @@ -107,7 +109,7 @@ use rustc_span::def_id::DefId; use smallvec::SmallVec; use tracing::{debug, instrument, trace}; -use crate::ssa::{AssignedValue, SsaLocals}; +use crate::ssa::SsaLocals; pub(super) struct GVN; @@ -126,31 +128,11 @@ impl<'tcx> crate::MirPass<'tcx> for GVN { let dominators = body.basic_blocks.dominators().clone(); let mut state = VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls); - ssa.for_each_assignment_mut( - body.basic_blocks.as_mut_preserves_cfg(), - |local, value, location| { - let value = match value { - // We do not know anything of this assigned value. - AssignedValue::Arg | AssignedValue::Terminator => None, - // Try to get some insight. - AssignedValue::Rvalue(rvalue) => { - let value = state.simplify_rvalue(rvalue, location); - // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark - // `local` as reusable if we have an exact type match. - if state.local_decls[local].ty != rvalue.ty(state.local_decls, tcx) { - return; - } - value - } - }; - // `next_opaque` is `Some`, so `new_opaque` must return `Some`. - let value = value.or_else(|| state.new_opaque()).unwrap(); - state.assign(local, value); - }, - ); - // Stop creating opaques during replacement as it is useless. - state.next_opaque = None; + for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) { + let opaque = state.new_opaque(); + state.assign(local, opaque); + } let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec(); for bb in reverse_postorder { @@ -250,14 +232,14 @@ struct VnState<'body, 'tcx> { locals: IndexVec>, /// Locals that are assigned that value. // This vector does not hold all the values of `VnIndex` that we create. - // It stops at the largest value created in the first phase of collecting assignments. rev_locals: IndexVec>, values: FxIndexSet>, /// Values evaluated as constants if possible. evaluated: IndexVec>>, /// Counter to generate different values. - /// This is an option to stop creating opaques during replacement. - next_opaque: Option, + next_opaque: usize, + /// Cache the deref values. + derefs: Vec, /// Cache the value of the `unsized_locals` features, to avoid fetching it repeatedly in a loop. feature_unsized_locals: bool, ssa: &'body SsaLocals, @@ -289,7 +271,8 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { rev_locals: IndexVec::with_capacity(num_values), values: FxIndexSet::with_capacity_and_hasher(num_values, Default::default()), evaluated: IndexVec::with_capacity(num_values), - next_opaque: Some(1), + next_opaque: 1, + derefs: Vec::new(), feature_unsized_locals: tcx.features().unsized_locals(), ssa, dominators, @@ -310,32 +293,31 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { let evaluated = self.eval_to_const(index); let _index = self.evaluated.push(evaluated); debug_assert_eq!(index, _index); - // No need to push to `rev_locals` if we finished listing assignments. - if self.next_opaque.is_some() { - let _index = self.rev_locals.push(SmallVec::new()); - debug_assert_eq!(index, _index); - } + let _index = self.rev_locals.push(SmallVec::new()); + debug_assert_eq!(index, _index); } index } + fn next_opaque(&mut self) -> usize { + let next_opaque = self.next_opaque; + self.next_opaque += 1; + next_opaque + } + /// Create a new `Value` for which we have no information at all, except that it is distinct /// from all the others. #[instrument(level = "trace", skip(self), ret)] - fn new_opaque(&mut self) -> Option { - let next_opaque = self.next_opaque.as_mut()?; - let value = Value::Opaque(*next_opaque); - *next_opaque += 1; - Some(self.insert(value)) + fn new_opaque(&mut self) -> VnIndex { + let value = Value::Opaque(self.next_opaque()); + self.insert(value) } /// Create a new `Value::Address` distinct from all the others. #[instrument(level = "trace", skip(self), ret)] - fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> Option { - let next_opaque = self.next_opaque.as_mut()?; - let value = Value::Address { place, kind, provenance: *next_opaque }; - *next_opaque += 1; - Some(self.insert(value)) + fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> VnIndex { + let value = Value::Address { place, kind, provenance: self.next_opaque() }; + self.insert(value) } fn get(&self, index: VnIndex) -> &Value<'tcx> { @@ -345,6 +327,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { /// Record that `local` is assigned `value`. `local` must be SSA. #[instrument(level = "trace", skip(self))] fn assign(&mut self, local: Local, value: VnIndex) { + debug_assert!(self.ssa.is_ssa(local)); self.locals[local] = Some(value); // Only register the value if its type is `Sized`, as we will emit copies of it. @@ -355,21 +338,19 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { } } - fn insert_constant(&mut self, value: Const<'tcx>) -> Option { + fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex { let disambiguator = if value.is_deterministic() { // The constant is deterministic, no need to disambiguate. 0 } else { // Multiple mentions of this constant will yield different values, // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`. - let next_opaque = self.next_opaque.as_mut()?; - let disambiguator = *next_opaque; - *next_opaque += 1; + let disambiguator = self.next_opaque(); // `disambiguator: 0` means deterministic. debug_assert_ne!(disambiguator, 0); disambiguator }; - Some(self.insert(Value::Constant { value, disambiguator })) + self.insert(Value::Constant { value, disambiguator }) } fn insert_bool(&mut self, flag: bool) -> VnIndex { @@ -390,6 +371,19 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { self.insert(Value::Aggregate(AggregateTy::Tuple, VariantIdx::ZERO, values)) } + fn insert_deref(&mut self, value: VnIndex) -> VnIndex { + let value = self.insert(Value::Projection(value, ProjectionElem::Deref)); + self.derefs.push(value); + value + } + + fn invalidate_derefs(&mut self) { + for deref in std::mem::take(&mut self.derefs) { + let opaque = self.next_opaque(); + *self.values.get_index_mut2(deref.index()).unwrap() = Value::Opaque(opaque); + } + } + #[instrument(level = "trace", skip(self), ret)] fn eval_to_const(&mut self, value: VnIndex) -> Option> { use Value::*; @@ -648,15 +642,13 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { let proj = match proj { ProjectionElem::Deref => { let ty = place.ty(self.local_decls, self.tcx).ty; - // unsound: https://github.com/rust-lang/rust/issues/130853 - if self.tcx.sess.opts.unstable_opts.unsound_mir_opts - && let Some(Mutability::Not) = ty.ref_mutability() + if let Some(Mutability::Not) = ty.ref_mutability() && let Some(pointee_ty) = ty.builtin_deref(true) && pointee_ty.is_freeze(self.tcx, self.typing_env()) { // An immutable borrow `_x` always points to the same value for the // lifetime of the borrow, so we can merge all instances of `*_x`. - ProjectionElem::Deref + return Some(self.insert_deref(value)); } else { return None; } @@ -830,7 +822,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { location: Location, ) -> Option { match *operand { - Operand::Constant(ref constant) => self.insert_constant(constant.const_), + Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)), Operand::Copy(ref mut place) | Operand::Move(ref mut place) => { let value = self.simplify_place_value(place, location)?; if let Some(const_) = self.try_as_constant(value) { @@ -866,11 +858,11 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { Rvalue::Aggregate(..) => return self.simplify_aggregate(rvalue, location), Rvalue::Ref(_, borrow_kind, ref mut place) => { self.simplify_place_projection(place, location); - return self.new_pointer(*place, AddressKind::Ref(borrow_kind)); + return Some(self.new_pointer(*place, AddressKind::Ref(borrow_kind))); } Rvalue::RawPtr(mutbl, ref mut place) => { self.simplify_place_projection(place, location); - return self.new_pointer(*place, AddressKind::Address(mutbl)); + return Some(self.new_pointer(*place, AddressKind::Address(mutbl))); } Rvalue::WrapUnsafeBinder(ref mut op, ty) => { let value = self.simplify_operand(op, location)?; @@ -988,27 +980,17 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { } } - let tcx = self.tcx; - let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new(); - loop { - if let Some(local) = self.try_as_local(copy_from_local_value, location) { - projection.reverse(); - let place = Place { local, projection: tcx.mk_place_elems(projection.as_slice()) }; - if rvalue.ty(self.local_decls, tcx) == place.ty(self.local_decls, tcx).ty { - self.reused_locals.insert(local); - *rvalue = Rvalue::Use(Operand::Copy(place)); - return Some(copy_from_value); - } - return None; - } else if let Value::Projection(pointer, proj) = *self.get(copy_from_local_value) - && let Some(proj) = self.try_as_place_elem(proj, location) - { - projection.push(proj); - copy_from_local_value = pointer; - } else { - return None; + // Allow introducing places with non-constant offsets, as those are still better than + // reconstructing an aggregate. + if let Some(place) = self.try_as_place(copy_from_local_value, location, true) { + if rvalue.ty(self.local_decls, self.tcx) == place.ty(self.local_decls, self.tcx).ty { + self.reused_locals.insert(place.local); + *rvalue = Rvalue::Use(Operand::Copy(place)); + return Some(copy_from_local_value); } } + + None } fn simplify_aggregate( @@ -1034,7 +1016,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { if is_zst { let ty = rvalue.ty(self.local_decls, tcx); - return self.insert_constant(Const::zero_sized(ty)); + return Some(self.insert_constant(Const::zero_sized(ty))); } } @@ -1063,11 +1045,10 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { } }; - let fields: Option> = field_ops + let mut fields: Vec<_> = field_ops .iter_mut() - .map(|op| self.simplify_operand(op, location).or_else(|| self.new_opaque())) + .map(|op| self.simplify_operand(op, location).unwrap_or_else(|| self.new_opaque())) .collect(); - let mut fields = fields?; if let AggregateTy::RawPtr { data_pointer_ty, output_pointer_ty } = &mut ty { let mut was_updated = false; @@ -1107,9 +1088,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { } } - // unsound: https://github.com/rust-lang/rust/issues/132353 - if tcx.sess.opts.unstable_opts.unsound_mir_opts - && let AggregateTy::Def(_, _) = ty + if let AggregateTy::Def(_, _) = ty && let Some(value) = self.simplify_aggregate_to_copy(rvalue, location, &fields, variant_index) { @@ -1195,7 +1174,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { ) if let ty::Slice(..) = to.builtin_deref(true).unwrap().kind() && let ty::Array(_, len) = from.builtin_deref(true).unwrap().kind() => { - return self.insert_constant(Const::Ty(self.tcx.types.usize, *len)); + return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len))); } _ => Value::UnaryOp(op, arg_index), }; @@ -1391,7 +1370,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind { // Each reification of a generic fn may get a different pointer. // Do not try to merge them. - return self.new_opaque(); + return Some(self.new_opaque()); } let mut was_ever_updated = false; @@ -1507,7 +1486,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { // Trivial case: we are fetching a statically known length. let place_ty = place.ty(self.local_decls, self.tcx).ty; if let ty::Array(_, len) = place_ty.kind() { - return self.insert_constant(Const::Ty(self.tcx.types.usize, *len)); + return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len))); } let mut inner = self.simplify_place_value(place, location)?; @@ -1529,7 +1508,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { && let Some(to) = to.builtin_deref(true) && let ty::Slice(..) = to.kind() { - return self.insert_constant(Const::Ty(self.tcx.types.usize, *len)); + return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len))); } // Fallback: a symbolic `Len`. @@ -1683,14 +1662,14 @@ fn op_to_prop_const<'tcx>( } impl<'tcx> VnState<'_, 'tcx> { - /// If either [`Self::try_as_constant`] as [`Self::try_as_local`] succeeds, + /// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds, /// returns that result as an [`Operand`]. fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option> { if let Some(const_) = self.try_as_constant(index) { Some(Operand::Constant(Box::new(const_))) - } else if let Some(local) = self.try_as_local(index, location) { - self.reused_locals.insert(local); - Some(Operand::Copy(local.into())) + } else if let Some(place) = self.try_as_place(index, location, false) { + self.reused_locals.insert(place.local); + Some(Operand::Copy(place)) } else { None } @@ -1723,6 +1702,35 @@ impl<'tcx> VnState<'_, 'tcx> { Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ }) } + /// Construct a place which holds the same value as `index` and for which all locals strictly + /// dominate `loc`. If you used this place, add its base local to `reused_locals` to remove + /// storage statements. + #[instrument(level = "trace", skip(self), ret)] + fn try_as_place( + &mut self, + mut index: VnIndex, + loc: Location, + allow_complex_projection: bool, + ) -> Option> { + let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new(); + loop { + if let Some(local) = self.try_as_local(index, loc) { + projection.reverse(); + let place = + Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) }; + return Some(place); + } else if let Value::Projection(pointer, proj) = *self.get(index) + && (allow_complex_projection || proj.is_stable_offset()) + && let Some(proj) = self.try_as_place_elem(proj, loc) + { + projection.push(proj); + index = pointer; + } else { + return None; + } + } + } + /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`, /// return it. If you used this local, add it to `reused_locals` to remove storage statements. fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option { @@ -1739,41 +1747,71 @@ impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> { self.tcx } - fn visit_place(&mut self, place: &mut Place<'tcx>, _: PlaceContext, location: Location) { + fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) { self.simplify_place_projection(place, location); + if context.is_mutating_use() && !place.projection.is_empty() { + // Non-local mutation maybe invalidate deref. + self.invalidate_derefs(); + } + self.super_place(place, context, location); } fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) { self.simplify_operand(operand, location); + self.super_operand(operand, location); } fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, location: Location) { if let StatementKind::Assign(box (ref mut lhs, ref mut rvalue)) = stmt.kind { self.simplify_place_projection(lhs, location); - // Do not try to simplify a constant, it's already in canonical shape. - if matches!(rvalue, Rvalue::Use(Operand::Constant(_))) { - return; + let value = self.simplify_rvalue(rvalue, location); + let value = if let Some(local) = lhs.as_local() + && self.ssa.is_ssa(local) + // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark + // `local` as reusable if we have an exact type match. + && self.local_decls[local].ty == rvalue.ty(self.local_decls, self.tcx) + { + let value = value.unwrap_or_else(|| self.new_opaque()); + self.assign(local, value); + Some(value) + } else { + value + }; + if let Some(value) = value { + if let Some(const_) = self.try_as_constant(value) { + *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_))); + } else if let Some(place) = self.try_as_place(value, location, false) + && *rvalue != Rvalue::Use(Operand::Move(place)) + && *rvalue != Rvalue::Use(Operand::Copy(place)) + { + *rvalue = Rvalue::Use(Operand::Copy(place)); + self.reused_locals.insert(place.local); + } } + } + self.super_statement(stmt, location); + } - let value = lhs - .as_local() - .and_then(|local| self.locals[local]) - .or_else(|| self.simplify_rvalue(rvalue, location)); - let Some(value) = value else { return }; - - if let Some(const_) = self.try_as_constant(value) { - *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_))); - } else if let Some(local) = self.try_as_local(value, location) - && *rvalue != Rvalue::Use(Operand::Move(local.into())) + fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) { + if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator { + if let Some(local) = destination.as_local() + && self.ssa.is_ssa(local) { - *rvalue = Rvalue::Use(Operand::Copy(local.into())); - self.reused_locals.insert(local); + let opaque = self.new_opaque(); + self.assign(local, opaque); } - - return; } - self.super_statement(stmt, location); + // Function calls and ASM may invalidate (nested) derefs. We must handle them carefully. + // Currently, only preserving derefs for trivial terminators like SwitchInt and Goto. + let safe_to_preserve_derefs = matches!( + terminator.kind, + TerminatorKind::SwitchInt { .. } | TerminatorKind::Goto { .. } + ); + if !safe_to_preserve_derefs { + self.invalidate_derefs(); + } + self.super_terminator(terminator, location); } } diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index 0ab24e48d443c..ea7d5a0fd4afe 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -63,7 +63,7 @@ impl<'tcx> crate::MirPass<'tcx> for Inline { let _guard = span.enter(); if inline::>(tcx, body) { debug!("running simplify cfg on {:?}", body.source); - simplify_cfg(body); + simplify_cfg(tcx, body); deref_finder(tcx, body); } } @@ -99,7 +99,7 @@ impl<'tcx> crate::MirPass<'tcx> for ForceInline { let _guard = span.enter(); if inline::>(tcx, body) { debug!("running simplify cfg on {:?}", body.source); - simplify_cfg(body); + simplify_cfg(tcx, body); deref_finder(tcx, body); } } @@ -413,7 +413,15 @@ impl<'tcx> Inliner<'tcx> for NormalInliner<'tcx> { let term = blk.terminator(); let caller_attrs = tcx.codegen_fn_attrs(self.caller_def_id()); - if let TerminatorKind::Drop { ref place, target, unwind, replace: _ } = term.kind { + if let TerminatorKind::Drop { + ref place, + target, + unwind, + replace: _, + drop: _, + async_fut: _, + } = term.kind + { work_list.push(target); // If the place doesn't actually need dropping, treat it like a regular goto. @@ -726,6 +734,20 @@ fn check_mir_is_available<'tcx, I: Inliner<'tcx>>( debug!("still needs substitution"); return Err("implementation limitation -- HACK for dropping polymorphic type"); } + InstanceKind::AsyncDropGlue(_, ty) | InstanceKind::AsyncDropGlueCtorShim(_, ty) => { + return if ty.still_further_specializable() { + Err("still needs substitution") + } else { + Ok(()) + }; + } + InstanceKind::FutureDropPollShim(_, ty, ty2) => { + return if ty.still_further_specializable() || ty2.still_further_specializable() { + Err("still needs substitution") + } else { + Ok(()) + }; + } // This cannot result in an immediate cycle since the callee MIR is a shim, which does // not get any optimizations run on it. Any subsequent inlining may cause cycles, but we @@ -740,7 +762,7 @@ fn check_mir_is_available<'tcx, I: Inliner<'tcx>>( | InstanceKind::CloneShim(..) | InstanceKind::ThreadLocalShim(..) | InstanceKind::FnPtrAddrShim(..) - | InstanceKind::AsyncDropGlueCtorShim(..) => return Ok(()), + | InstanceKind::EiiShim { .. } => return Ok(()), } if inliner.tcx().is_constructor(callee_def_id) { @@ -903,9 +925,9 @@ fn inline_call<'tcx, I: Inliner<'tcx>>( let mut integrator = Integrator { args: &args, - new_locals: Local::new(caller_body.local_decls.len()).., - new_scopes: SourceScope::new(caller_body.source_scopes.len()).., - new_blocks: BasicBlock::new(caller_body.basic_blocks.len()).., + new_locals: caller_body.local_decls.next_index().., + new_scopes: caller_body.source_scopes.next_index().., + new_blocks: caller_body.basic_blocks.next_index().., destination: destination_local, callsite_scope: caller_body.source_scopes[callsite.source_info.scope].clone(), callsite, @@ -1169,7 +1191,7 @@ impl Integrator<'_, '_> { if idx < self.args.len() { self.args[idx] } else { - Local::new(self.new_locals.start.index() + (idx - self.args.len())) + self.new_locals.start + (idx - self.args.len()) } }; trace!("mapping local `{:?}` to `{:?}`", local, new); @@ -1177,13 +1199,13 @@ impl Integrator<'_, '_> { } fn map_scope(&self, scope: SourceScope) -> SourceScope { - let new = SourceScope::new(self.new_scopes.start.index() + scope.index()); + let new = self.new_scopes.start + scope.index(); trace!("mapping scope `{:?}` to `{:?}`", scope, new); new } fn map_block(&self, block: BasicBlock) -> BasicBlock { - let new = BasicBlock::new(self.new_blocks.start.index() + block.index()); + let new = self.new_blocks.start + block.index(); trace!("mapping block `{:?}` to `{:?}`", block, new); new } @@ -1345,8 +1367,8 @@ fn try_instance_mir<'tcx>( tcx: TyCtxt<'tcx>, instance: InstanceKind<'tcx>, ) -> Result<&'tcx Body<'tcx>, &'static str> { - if let ty::InstanceKind::DropGlue(_, Some(ty)) - | ty::InstanceKind::AsyncDropGlueCtorShim(_, Some(ty)) = instance + if let ty::InstanceKind::DropGlue(_, Some(ty)) | ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) = + instance && let ty::Adt(def, args) = ty.kind() { let fields = def.all_fields(); diff --git a/compiler/rustc_mir_transform/src/inline/cycle.rs b/compiler/rustc_mir_transform/src/inline/cycle.rs index a40768300f5d0..ff87e891230f8 100644 --- a/compiler/rustc_mir_transform/src/inline/cycle.rs +++ b/compiler/rustc_mir_transform/src/inline/cycle.rs @@ -90,12 +90,16 @@ pub(crate) fn mir_callgraph_reachable<'tcx>( | InstanceKind::ConstructCoroutineInClosureShim { .. } | InstanceKind::ThreadLocalShim { .. } | InstanceKind::CloneShim(..) => {} + InstanceKind::EiiShim { .. } => {} // This shim does not call any other functions, thus there can be no recursion. InstanceKind::FnPtrAddrShim(..) => { continue; } - InstanceKind::DropGlue(..) | InstanceKind::AsyncDropGlueCtorShim(..) => { + InstanceKind::DropGlue(..) + | InstanceKind::FutureDropPollShim(..) + | InstanceKind::AsyncDropGlue(..) + | InstanceKind::AsyncDropGlueCtorShim(..) => { // FIXME: A not fully instantiated drop shim can cause ICEs if one attempts to // have its MIR built. Likely oli-obk just screwed up the `ParamEnv`s, so this // needs some more analysis. diff --git a/compiler/rustc_mir_transform/src/instsimplify.rs b/compiler/rustc_mir_transform/src/instsimplify.rs index da346dfc48c1d..5f0c55ddc092d 100644 --- a/compiler/rustc_mir_transform/src/instsimplify.rs +++ b/compiler/rustc_mir_transform/src/instsimplify.rs @@ -10,7 +10,6 @@ use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, layout}; use rustc_span::{DUMMY_SP, Symbol, sym}; use crate::simplify::simplify_duplicate_switch_targets; -use crate::take_array; pub(super) enum InstSimplify { BeforeInline, @@ -39,26 +38,26 @@ impl<'tcx> crate::MirPass<'tcx> for InstSimplify { attr::contains_name(tcx.hir_krate_attrs(), sym::rustc_preserve_ub_checks); for block in body.basic_blocks.as_mut() { for statement in block.statements.iter_mut() { - match statement.kind { - StatementKind::Assign(box (_place, ref mut rvalue)) => { - if !preserve_ub_checks { - ctx.simplify_ub_check(rvalue); - } - ctx.simplify_bool_cmp(rvalue); - ctx.simplify_ref_deref(rvalue); - ctx.simplify_ptr_aggregate(rvalue); - ctx.simplify_cast(rvalue); - ctx.simplify_repeated_aggregate(rvalue); - ctx.simplify_repeat_once(rvalue); - } - _ => {} + let StatementKind::Assign(box (.., rvalue)) = &mut statement.kind else { + continue; + }; + + if !preserve_ub_checks { + ctx.simplify_ub_check(rvalue); } + ctx.simplify_bool_cmp(rvalue); + ctx.simplify_ref_deref(rvalue); + ctx.simplify_ptr_aggregate(rvalue); + ctx.simplify_cast(rvalue); + ctx.simplify_repeated_aggregate(rvalue); + ctx.simplify_repeat_once(rvalue); } - ctx.simplify_primitive_clone(block.terminator.as_mut().unwrap(), &mut block.statements); - ctx.simplify_intrinsic_assert(block.terminator.as_mut().unwrap()); - ctx.simplify_nounwind_call(block.terminator.as_mut().unwrap()); - simplify_duplicate_switch_targets(block.terminator.as_mut().unwrap()); + let terminator = block.terminator.as_mut().unwrap(); + ctx.simplify_primitive_clone(terminator, &mut block.statements); + ctx.simplify_intrinsic_assert(terminator); + ctx.simplify_nounwind_call(terminator); + simplify_duplicate_switch_targets(terminator); } } @@ -78,20 +77,20 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { /// GVN can also do this optimization, but GVN is only run at mir-opt-level 2 so having this in /// InstSimplify helps unoptimized builds. fn simplify_repeated_aggregate(&self, rvalue: &mut Rvalue<'tcx>) { - let Rvalue::Aggregate(box AggregateKind::Array(_), fields) = rvalue else { + let Rvalue::Aggregate(box AggregateKind::Array(_), fields) = &*rvalue else { return; }; if fields.len() < 5 { return; } - let first = &fields[rustc_abi::FieldIdx::ZERO]; + let (first, rest) = fields[..].split_first().unwrap(); let Operand::Constant(first) = first else { return; }; let Ok(first_val) = first.const_.eval(self.tcx, self.typing_env, first.span) else { return; }; - if fields.iter().all(|field| { + if rest.iter().all(|field| { let Operand::Constant(field) = field else { return false; }; @@ -105,43 +104,34 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { /// Transform boolean comparisons into logical operations. fn simplify_bool_cmp(&self, rvalue: &mut Rvalue<'tcx>) { - match rvalue { - Rvalue::BinaryOp(op @ (BinOp::Eq | BinOp::Ne), box (a, b)) => { - let new = match (op, self.try_eval_bool(a), self.try_eval_bool(b)) { - // Transform "Eq(a, true)" ==> "a" - (BinOp::Eq, _, Some(true)) => Some(Rvalue::Use(a.clone())), + let Rvalue::BinaryOp(op @ (BinOp::Eq | BinOp::Ne), box (a, b)) = &*rvalue else { return }; + *rvalue = match (op, self.try_eval_bool(a), self.try_eval_bool(b)) { + // Transform "Eq(a, true)" ==> "a" + (BinOp::Eq, _, Some(true)) => Rvalue::Use(a.clone()), - // Transform "Ne(a, false)" ==> "a" - (BinOp::Ne, _, Some(false)) => Some(Rvalue::Use(a.clone())), + // Transform "Ne(a, false)" ==> "a" + (BinOp::Ne, _, Some(false)) => Rvalue::Use(a.clone()), - // Transform "Eq(true, b)" ==> "b" - (BinOp::Eq, Some(true), _) => Some(Rvalue::Use(b.clone())), + // Transform "Eq(true, b)" ==> "b" + (BinOp::Eq, Some(true), _) => Rvalue::Use(b.clone()), - // Transform "Ne(false, b)" ==> "b" - (BinOp::Ne, Some(false), _) => Some(Rvalue::Use(b.clone())), + // Transform "Ne(false, b)" ==> "b" + (BinOp::Ne, Some(false), _) => Rvalue::Use(b.clone()), - // Transform "Eq(false, b)" ==> "Not(b)" - (BinOp::Eq, Some(false), _) => Some(Rvalue::UnaryOp(UnOp::Not, b.clone())), + // Transform "Eq(false, b)" ==> "Not(b)" + (BinOp::Eq, Some(false), _) => Rvalue::UnaryOp(UnOp::Not, b.clone()), - // Transform "Ne(true, b)" ==> "Not(b)" - (BinOp::Ne, Some(true), _) => Some(Rvalue::UnaryOp(UnOp::Not, b.clone())), + // Transform "Ne(true, b)" ==> "Not(b)" + (BinOp::Ne, Some(true), _) => Rvalue::UnaryOp(UnOp::Not, b.clone()), - // Transform "Eq(a, false)" ==> "Not(a)" - (BinOp::Eq, _, Some(false)) => Some(Rvalue::UnaryOp(UnOp::Not, a.clone())), + // Transform "Eq(a, false)" ==> "Not(a)" + (BinOp::Eq, _, Some(false)) => Rvalue::UnaryOp(UnOp::Not, a.clone()), - // Transform "Ne(a, true)" ==> "Not(a)" - (BinOp::Ne, _, Some(true)) => Some(Rvalue::UnaryOp(UnOp::Not, a.clone())), + // Transform "Ne(a, true)" ==> "Not(a)" + (BinOp::Ne, _, Some(true)) => Rvalue::UnaryOp(UnOp::Not, a.clone()), - _ => None, - }; - - if let Some(new) = new { - *rvalue = new; - } - } - - _ => {} - } + _ => return, + }; } fn try_eval_bool(&self, a: &Operand<'_>) -> Option { @@ -151,64 +141,58 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { /// Transform `&(*a)` ==> `a`. fn simplify_ref_deref(&self, rvalue: &mut Rvalue<'tcx>) { - if let Rvalue::Ref(_, _, place) | Rvalue::RawPtr(_, place) = rvalue { - if let Some((base, ProjectionElem::Deref)) = place.as_ref().last_projection() { - if rvalue.ty(self.local_decls, self.tcx) != base.ty(self.local_decls, self.tcx).ty { - return; - } - - *rvalue = Rvalue::Use(Operand::Copy(Place { - local: base.local, - projection: self.tcx.mk_place_elems(base.projection), - })); - } + if let Rvalue::Ref(_, _, place) | Rvalue::RawPtr(_, place) = rvalue + && let Some((base, ProjectionElem::Deref)) = place.as_ref().last_projection() + && rvalue.ty(self.local_decls, self.tcx) == base.ty(self.local_decls, self.tcx).ty + { + *rvalue = Rvalue::Use(Operand::Copy(Place { + local: base.local, + projection: self.tcx.mk_place_elems(base.projection), + })); } } /// Transform `Aggregate(RawPtr, [p, ()])` ==> `Cast(PtrToPtr, p)`. fn simplify_ptr_aggregate(&self, rvalue: &mut Rvalue<'tcx>) { if let Rvalue::Aggregate(box AggregateKind::RawPtr(pointee_ty, mutability), fields) = rvalue + && let meta_ty = fields.raw[1].ty(self.local_decls, self.tcx) + && meta_ty.is_unit() { - let meta_ty = fields.raw[1].ty(self.local_decls, self.tcx); - if meta_ty.is_unit() { - // The mutable borrows we're holding prevent printing `rvalue` here - let mut fields = std::mem::take(fields); - let _meta = fields.pop().unwrap(); - let data = fields.pop().unwrap(); - let ptr_ty = Ty::new_ptr(self.tcx, *pointee_ty, *mutability); - *rvalue = Rvalue::Cast(CastKind::PtrToPtr, data, ptr_ty); - } + // The mutable borrows we're holding prevent printing `rvalue` here + let mut fields = std::mem::take(fields); + let _meta = fields.pop().unwrap(); + let data = fields.pop().unwrap(); + let ptr_ty = Ty::new_ptr(self.tcx, *pointee_ty, *mutability); + *rvalue = Rvalue::Cast(CastKind::PtrToPtr, data, ptr_ty); } } fn simplify_ub_check(&self, rvalue: &mut Rvalue<'tcx>) { - if let Rvalue::NullaryOp(NullOp::UbChecks, _) = *rvalue { - let const_ = Const::from_bool(self.tcx, self.tcx.sess.ub_checks()); - let constant = ConstOperand { span: DUMMY_SP, const_, user_ty: None }; - *rvalue = Rvalue::Use(Operand::Constant(Box::new(constant))); - } + let Rvalue::NullaryOp(NullOp::UbChecks, _) = *rvalue else { return }; + + let const_ = Const::from_bool(self.tcx, self.tcx.sess.ub_checks()); + let constant = ConstOperand { span: DUMMY_SP, const_, user_ty: None }; + *rvalue = Rvalue::Use(Operand::Constant(Box::new(constant))); } fn simplify_cast(&self, rvalue: &mut Rvalue<'tcx>) { - if let Rvalue::Cast(kind, operand, cast_ty) = rvalue { - let operand_ty = operand.ty(self.local_decls, self.tcx); - if operand_ty == *cast_ty { - *rvalue = Rvalue::Use(operand.clone()); - } else if *kind == CastKind::Transmute { - // Transmuting an integer to another integer is just a signedness cast - if let (ty::Int(int), ty::Uint(uint)) | (ty::Uint(uint), ty::Int(int)) = - (operand_ty.kind(), cast_ty.kind()) - && int.bit_width() == uint.bit_width() - { - // The width check isn't strictly necessary, as different widths - // are UB and thus we'd be allowed to turn it into a cast anyway. - // But let's keep the UB around for codegen to exploit later. - // (If `CastKind::Transmute` ever becomes *not* UB for mismatched sizes, - // then the width check is necessary for big-endian correctness.) - *kind = CastKind::IntToInt; - return; - } - } + let Rvalue::Cast(kind, operand, cast_ty) = rvalue else { return }; + + let operand_ty = operand.ty(self.local_decls, self.tcx); + if operand_ty == *cast_ty { + *rvalue = Rvalue::Use(operand.clone()); + } else if *kind == CastKind::Transmute + // Transmuting an integer to another integer is just a signedness cast + && let (ty::Int(int), ty::Uint(uint)) | (ty::Uint(uint), ty::Int(int)) = + (operand_ty.kind(), cast_ty.kind()) + && int.bit_width() == uint.bit_width() + { + // The width check isn't strictly necessary, as different widths + // are UB and thus we'd be allowed to turn it into a cast anyway. + // But let's keep the UB around for codegen to exploit later. + // (If `CastKind::Transmute` ever becomes *not* UB for mismatched sizes, + // then the width check is necessary for big-endian correctness.) + *kind = CastKind::IntToInt; } } @@ -229,7 +213,9 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { terminator: &mut Terminator<'tcx>, statements: &mut Vec>, ) { - let TerminatorKind::Call { func, args, destination, target, .. } = &mut terminator.kind + let TerminatorKind::Call { + func, args, destination, target: Some(destination_block), .. + } = &terminator.kind else { return; }; @@ -237,15 +223,8 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { // It's definitely not a clone if there are multiple arguments let [arg] = &args[..] else { return }; - let Some(destination_block) = *target else { return }; - // Only bother looking more if it's easy to know what we're calling - let Some((fn_def_id, fn_args)) = func.const_fn_def() else { return }; - - // Clone needs one arg, so we can cheaply rule out other stuff - if fn_args.len() != 1 { - return; - } + let Some((fn_def_id, ..)) = func.const_fn_def() else { return }; // These types are easily available from locals, so check that before // doing DefId lookups to figure out what we're actually calling. @@ -253,15 +232,12 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { let ty::Ref(_region, inner_ty, Mutability::Not) = *arg_ty.kind() else { return }; - if !inner_ty.is_trivially_pure_clone_copy() { - return; - } - - if !self.tcx.is_lang_item(fn_def_id, LangItem::CloneFn) { + if !self.tcx.is_lang_item(fn_def_id, LangItem::CloneFn) + || !inner_ty.is_trivially_pure_clone_copy() + { return; } - let Ok([arg]) = take_array(args) else { return }; let Some(arg_place) = arg.node.place() else { return }; statements.push(Statement { @@ -273,11 +249,11 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { )), ))), }); - terminator.kind = TerminatorKind::Goto { target: destination_block }; + terminator.kind = TerminatorKind::Goto { target: *destination_block }; } fn simplify_nounwind_call(&self, terminator: &mut Terminator<'tcx>) { - let TerminatorKind::Call { func, unwind, .. } = &mut terminator.kind else { + let TerminatorKind::Call { ref func, ref mut unwind, .. } = terminator.kind else { return; }; @@ -290,7 +266,7 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { ty::FnDef(..) => body_ty.fn_sig(self.tcx).abi(), ty::Closure(..) => ExternAbi::RustCall, ty::Coroutine(..) => ExternAbi::Rust, - _ => bug!("unexpected body ty: {:?}", body_ty), + _ => bug!("unexpected body ty: {body_ty:?}"), }; if !layout::fn_can_unwind(self.tcx, Some(def_id), body_abi) { @@ -299,10 +275,9 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { } fn simplify_intrinsic_assert(&self, terminator: &mut Terminator<'tcx>) { - let TerminatorKind::Call { func, target, .. } = &mut terminator.kind else { - return; - }; - let Some(target_block) = target else { + let TerminatorKind::Call { ref func, target: ref mut target @ Some(target_block), .. } = + terminator.kind + else { return; }; let func_ty = func.ty(self.local_decls, self.tcx); @@ -310,12 +285,10 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { return; }; // The intrinsics we are interested in have one generic parameter - if args.is_empty() { - return; - } + let [arg, ..] = args[..] else { return }; let known_is_valid = - intrinsic_assert_panics(self.tcx, self.typing_env, args[0], intrinsic_name); + intrinsic_assert_panics(self.tcx, self.typing_env, arg, intrinsic_name); match known_is_valid { // We don't know the layout or it's not validity assertion at all, don't touch it None => {} @@ -325,7 +298,7 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> { } Some(false) => { // If we know the assert does not panic, turn the call into a Goto - terminator.kind = TerminatorKind::Goto { target: *target_block }; + terminator.kind = TerminatorKind::Goto { target: target_block }; } } } @@ -346,9 +319,7 @@ fn resolve_rust_intrinsic<'tcx>( tcx: TyCtxt<'tcx>, func_ty: Ty<'tcx>, ) -> Option<(Symbol, GenericArgsRef<'tcx>)> { - if let ty::FnDef(def_id, args) = *func_ty.kind() { - let intrinsic = tcx.intrinsic(def_id)?; - return Some((intrinsic.name, args)); - } - None + let ty::FnDef(def_id, args) = *func_ty.kind() else { return None }; + let intrinsic = tcx.intrinsic(def_id)?; + Some((intrinsic.name, args)) } diff --git a/compiler/rustc_mir_transform/src/jump_threading.rs b/compiler/rustc_mir_transform/src/jump_threading.rs index 0a72a9d669fe6..31b361ec1a929 100644 --- a/compiler/rustc_mir_transform/src/jump_threading.rs +++ b/compiler/rustc_mir_transform/src/jump_threading.rs @@ -150,14 +150,6 @@ impl Condition { fn matches(&self, value: ScalarInt) -> bool { (self.value == value) == (self.polarity == Polarity::Eq) } - - fn inv(mut self) -> Self { - self.polarity = match self.polarity { - Polarity::Eq => Polarity::Ne, - Polarity::Ne => Polarity::Eq, - }; - self - } } #[derive(Copy, Clone, Debug)] @@ -180,8 +172,13 @@ impl<'a> ConditionSet<'a> { self.iter().filter(move |c| c.matches(value)) } - fn map(self, arena: &'a DroplessArena, f: impl Fn(Condition) -> Condition) -> ConditionSet<'a> { - ConditionSet(arena.alloc_from_iter(self.iter().map(f))) + fn map( + self, + arena: &'a DroplessArena, + f: impl Fn(Condition) -> Option, + ) -> Option> { + let set = arena.try_alloc_from_iter(self.iter().map(|c| f(c).ok_or(()))).ok()?; + Some(ConditionSet(set)) } } @@ -202,9 +199,7 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> { debug!(?discr, ?bb); let discr_ty = discr.ty(self.body, self.tcx).ty; - let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else { - return; - }; + let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else { return }; let Some(discr) = self.map.find(discr.as_ref()) else { return }; debug!(?discr); @@ -227,7 +222,7 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> { let conds = ConditionSet(conds); state.insert_value_idx(discr, conds, &self.map); - self.find_opportunity(bb, state, cost, 0); + self.find_opportunity(bb, state, cost, 0) } /// Recursively walk statements backwards from this bb's terminator to find threading @@ -495,19 +490,22 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> { } } } - // Transfer the conditions on the copy rhs, after inversing polarity. + // Transfer the conditions on the copy rhs, after inverting the value of the condition. Rvalue::UnaryOp(UnOp::Not, Operand::Move(place) | Operand::Copy(place)) => { - if !place.ty(self.body, self.tcx).ty.is_bool() { - // Constructing the conditions by inverting the polarity - // of equality is only correct for bools. That is to say, - // `!a == b` is not `a != b` for integers greater than 1 bit. - return; - } + let layout = self.ecx.layout_of(place.ty(self.body, self.tcx).ty).unwrap(); let Some(conditions) = state.try_get_idx(lhs, &self.map) else { return }; let Some(place) = self.map.find(place.as_ref()) else { return }; - // FIXME: I think This could be generalized to not bool if we - // actually perform a logical not on the condition's value. - let conds = conditions.map(self.arena, Condition::inv); + let Some(conds) = conditions.map(self.arena, |mut cond| { + cond.value = self + .ecx + .unary_op(UnOp::Not, &ImmTy::from_scalar_int(cond.value, layout)) + .discard_err()? + .to_scalar_int() + .discard_err()?; + Some(cond) + }) else { + return; + }; state.insert_value_idx(place, conds, &self.map); } // We expect `lhs ?= A`. We found `lhs = Eq(rhs, B)`. @@ -535,11 +533,15 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> { else { return; }; - let conds = conditions.map(self.arena, |c| Condition { - value, - polarity: if c.matches(equals) { Polarity::Eq } else { Polarity::Ne }, - ..c - }); + let Some(conds) = conditions.map(self.arena, |c| { + Some(Condition { + value, + polarity: if c.matches(equals) { Polarity::Eq } else { Polarity::Ne }, + ..c + }) + }) else { + return; + }; state.insert_value_idx(place, conds, &self.map); } @@ -576,17 +578,17 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> { else { return; }; - self.process_immediate(bb, discr_target, discr, state); + self.process_immediate(bb, discr_target, discr, state) } // If we expect `lhs ?= true`, we have an opportunity if we assume `lhs == true`. StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume( Operand::Copy(place) | Operand::Move(place), )) => { let Some(conditions) = state.try_get(place.as_ref(), &self.map) else { return }; - conditions.iter_matches(ScalarInt::TRUE).for_each(register_opportunity); + conditions.iter_matches(ScalarInt::TRUE).for_each(register_opportunity) } StatementKind::Assign(box (lhs_place, rhs)) => { - self.process_assign(bb, lhs_place, rhs, state); + self.process_assign(bb, lhs_place, rhs, state) } _ => {} } @@ -632,7 +634,7 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> { if let Some(place_to_flood) = place_to_flood { state.flood_with(place_to_flood.as_ref(), &self.map, ConditionSet::BOTTOM); } - self.find_opportunity(bb, state, cost.clone(), depth + 1); + self.find_opportunity(bb, state, cost.clone(), depth + 1) } #[instrument(level = "trace", skip(self))] @@ -755,12 +757,12 @@ impl OpportunitySet { // Replace `succ` by `new_succ` where it appears. let mut num_edges = 0; - for s in basic_blocks[current].terminator_mut().successors_mut() { + basic_blocks[current].terminator_mut().successors_mut(|s| { if *s == succ { *s = new_succ; num_edges += 1; } - } + }); // Update predecessors with the new block. let _new_succ = self.predecessors.push(num_edges); diff --git a/compiler/rustc_mir_transform/src/known_panics_lint.rs b/compiler/rustc_mir_transform/src/known_panics_lint.rs index f8db8de4e82c9..481c794190925 100644 --- a/compiler/rustc_mir_transform/src/known_panics_lint.rs +++ b/compiler/rustc_mir_transform/src/known_panics_lint.rs @@ -888,7 +888,14 @@ impl CanConstProp { }; for (local, val) in cpv.can_const_prop.iter_enumerated_mut() { let ty = body.local_decls[local].ty; - if ty.is_union() { + if ty.is_async_drop_in_place_coroutine(tcx) { + // No const propagation for async drop coroutine (AsyncDropGlue). + // Otherwise, tcx.layout_of(typing_env.as_query_input(ty)) will be called + // (early layout request for async drop coroutine) to calculate layout size. + // Layout for `async_drop_in_place::{closure}` may only be known with known T. + *val = ConstPropMode::NoPropagation; + continue; + } else if ty.is_union() { // Unions are incompatible with the current implementation of // const prop because Rust has no concept of an active // variant of a union diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 205d388f4fb50..dc0eacd36130b 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -1,5 +1,5 @@ // tidy-alphabetical-start -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![feature(array_windows)] #![feature(assert_matches)] #![feature(box_patterns)] @@ -8,10 +8,10 @@ #![feature(file_buffered)] #![feature(if_let_guard)] #![feature(impl_trait_in_assoc_type)] -#![feature(let_chains)] #![feature(map_try_insert)] #![feature(never_type)] #![feature(try_blocks)] +#![feature(vec_deque_pop_if)] #![feature(yeet_expr)] // tidy-alphabetical-end @@ -125,6 +125,7 @@ declare_passes! { mod check_null : CheckNull; mod check_packed_ref : CheckPackedRef; mod check_undefined_transmutes : CheckUndefinedTransmutes; + mod check_unnecessary_transmutes: CheckUnnecessaryTransmutes; // This pass is public to allow external drivers to perform MIR cleanup pub mod cleanup_post_borrowck : CleanupPostBorrowck; @@ -391,6 +392,7 @@ fn mir_built(tcx: TyCtxt<'_>, def: LocalDefId) -> &Steal> { &Lint(check_const_item_mutation::CheckConstItemMutation), &Lint(function_item_references::FunctionItemReferences), &Lint(check_undefined_transmutes::CheckUndefinedTransmutes), + &Lint(check_unnecessary_transmutes::CheckUnnecessaryTransmutes), // What we need to do constant evaluation. &simplify::SimplifyCfg::Initial, &Lint(sanity_check::SanityCheck), @@ -497,8 +499,11 @@ fn mir_drops_elaborated_and_const_checked(tcx: TyCtxt<'_>, def: LocalDefId) -> & } // We only need to borrowck non-synthetic MIR. - let tainted_by_errors = - if !tcx.is_synthetic_mir(def) { tcx.mir_borrowck(def).tainted_by_errors } else { None }; + let tainted_by_errors = if !tcx.is_synthetic_mir(def) { + tcx.mir_borrowck(tcx.typeck_root_def_id(def.to_def_id()).expect_local()).err() + } else { + None + }; let is_fn_like = tcx.def_kind(def).is_fn_like(); if is_fn_like { @@ -528,7 +533,7 @@ fn mir_drops_elaborated_and_const_checked(tcx: TyCtxt<'_>, def: LocalDefId) -> & | DefKind::Static { .. } | DefKind::Const | DefKind::AssocConst => { - if let Err(guar) = tcx.check_well_formed(root.expect_local()) { + if let Err(guar) = tcx.ensure_ok().check_well_formed(root.expect_local()) { body.tainted_by_errors = Some(guar); } } @@ -647,7 +652,7 @@ fn run_runtime_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { } } -fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { +pub(crate) fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { fn o1(x: T) -> WithMinOptLevel { WithMinOptLevel(1, x) } @@ -696,8 +701,6 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { // Now, we need to shrink the generated MIR. &ref_prop::ReferencePropagation, &sroa::ScalarReplacementOfAggregates, - &match_branches::MatchBranchSimplification, - // inst combine is after MatchBranchSimplification to clean up Ne(_1, false) &multiple_return_terminators::MultipleReturnTerminators, // After simplifycfg, it allows us to discover new opportunities for peephole // optimizations. @@ -706,6 +709,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { &dead_store_elimination::DeadStoreElimination::Initial, &gvn::GVN, &simplify::SimplifyLocals::AfterGVN, + &match_branches::MatchBranchSimplification, &dataflow_const_prop::DataflowConstProp, &single_use_consts::SingleUseConsts, &o1(simplify_branches::SimplifyConstCondition::AfterConstProp), @@ -794,7 +798,7 @@ fn promoted_mir(tcx: TyCtxt<'_>, def: LocalDefId) -> &IndexVec DropsReachable<'a, 'mir, 'tcx> { target: _, unwind: _, replace: _, + drop: _, + async_fut: _, } = &terminator.kind && place_has_common_prefix(dropped_place, self.place) { @@ -233,8 +235,9 @@ pub(crate) fn run_lint<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId, body: &Body< // When we encounter a DROP of some place P we only care // about the drop if `P` may be initialized. let move_data = MoveData::gather_moves(body, tcx, |_| true); - let maybe_init = MaybeInitializedPlaces::new(tcx, body, &move_data); - let mut maybe_init = maybe_init.iterate_to_fixpoint(tcx, body, None).into_results_cursor(body); + let mut maybe_init = MaybeInitializedPlaces::new(tcx, body, &move_data) + .iterate_to_fixpoint(tcx, body, None) + .into_results_cursor(body); let mut block_drop_value_info = IndexVec::from_elem_n(MovePathIndexAtBlock::Unknown, body.basic_blocks.len()); for (&block, candidates) in &bid_per_block { @@ -512,23 +515,17 @@ struct LocalLabel<'a> { /// A custom `Subdiagnostic` implementation so that the notes are delivered in a specific order impl Subdiagnostic for LocalLabel<'_> { - fn add_to_diag_with< - G: rustc_errors::EmissionGuarantee, - F: rustc_errors::SubdiagMessageOp, - >( - self, - diag: &mut rustc_errors::Diag<'_, G>, - f: &F, - ) { + fn add_to_diag(self, diag: &mut rustc_errors::Diag<'_, G>) { diag.arg("name", self.name); diag.arg("is_generated_name", self.is_generated_name); diag.arg("is_dropped_first_edition_2024", self.is_dropped_first_edition_2024); - let msg = f(diag, crate::fluent_generated::mir_transform_tail_expr_local.into()); + let msg = diag.eagerly_translate(crate::fluent_generated::mir_transform_tail_expr_local); diag.span_label(self.span, msg); for dtor in self.destructors { - dtor.add_to_diag_with(diag, f); + dtor.add_to_diag(diag); } - let msg = f(diag, crate::fluent_generated::mir_transform_label_local_epilogue); + let msg = + diag.eagerly_translate(crate::fluent_generated::mir_transform_label_local_epilogue); diag.span_label(self.span, msg); } } diff --git a/compiler/rustc_mir_transform/src/match_branches.rs b/compiler/rustc_mir_transform/src/match_branches.rs index 0d9d0368d3729..8c0c309689902 100644 --- a/compiler/rustc_mir_transform/src/match_branches.rs +++ b/compiler/rustc_mir_transform/src/match_branches.rs @@ -19,33 +19,33 @@ impl<'tcx> crate::MirPass<'tcx> for MatchBranchSimplification { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let typing_env = body.typing_env(tcx); - let mut should_cleanup = false; - for i in 0..body.basic_blocks.len() { - let bbs = &*body.basic_blocks; - let bb_idx = BasicBlock::from_usize(i); - match bbs[bb_idx].terminator().kind { + let mut apply_patch = false; + let mut patch = MirPatch::new(body); + for (bb, bb_data) in body.basic_blocks.iter_enumerated() { + match &bb_data.terminator().kind { TerminatorKind::SwitchInt { - discr: ref _discr @ (Operand::Copy(_) | Operand::Move(_)), - ref targets, + discr: Operand::Copy(_) | Operand::Move(_), + targets, .. // We require that the possible target blocks don't contain this block. - } if !targets.all_targets().contains(&bb_idx) => {} + } if !targets.all_targets().contains(&bb) => {} // Only optimize switch int statements _ => continue, }; - if SimplifyToIf.simplify(tcx, body, bb_idx, typing_env).is_some() { - should_cleanup = true; + if SimplifyToIf.simplify(tcx, body, &mut patch, bb, typing_env).is_some() { + apply_patch = true; continue; } - if SimplifyToExp::default().simplify(tcx, body, bb_idx, typing_env).is_some() { - should_cleanup = true; + if SimplifyToExp::default().simplify(tcx, body, &mut patch, bb, typing_env).is_some() { + apply_patch = true; continue; } } - if should_cleanup { - simplify_cfg(body); + if apply_patch { + patch.apply(body); + simplify_cfg(tcx, body); } } @@ -61,21 +61,21 @@ trait SimplifyMatch<'tcx> { fn simplify( &mut self, tcx: TyCtxt<'tcx>, - body: &mut Body<'tcx>, + body: &Body<'tcx>, + patch: &mut MirPatch<'tcx>, switch_bb_idx: BasicBlock, typing_env: ty::TypingEnv<'tcx>, ) -> Option<()> { let bbs = &body.basic_blocks; - let (discr, targets) = match bbs[switch_bb_idx].terminator().kind { - TerminatorKind::SwitchInt { ref discr, ref targets, .. } => (discr, targets), - _ => unreachable!(), + let TerminatorKind::SwitchInt { discr, targets, .. } = + &bbs[switch_bb_idx].terminator().kind + else { + unreachable!(); }; let discr_ty = discr.ty(body.local_decls(), tcx); self.can_simplify(tcx, targets, typing_env, bbs, discr_ty)?; - let mut patch = MirPatch::new(body); - // Take ownership of items now that we know we can optimize. let discr = discr.clone(); @@ -88,19 +88,9 @@ trait SimplifyMatch<'tcx> { let parent_end = Location { block: switch_bb_idx, statement_index }; patch.add_statement(parent_end, StatementKind::StorageLive(discr_local)); patch.add_assign(parent_end, Place::from(discr_local), Rvalue::Use(discr)); - self.new_stmts( - tcx, - targets, - typing_env, - &mut patch, - parent_end, - bbs, - discr_local, - discr_ty, - ); + self.new_stmts(tcx, targets, typing_env, patch, parent_end, bbs, discr_local, discr_ty); patch.add_statement(parent_end, StatementKind::StorageDead(discr_local)); patch.patch_terminator(switch_bb_idx, bbs[first].terminator().kind.clone()); - patch.apply(body); Some(()) } diff --git a/compiler/rustc_mir_transform/src/multiple_return_terminators.rs b/compiler/rustc_mir_transform/src/multiple_return_terminators.rs index c63bfdcee8559..f59b849e85c62 100644 --- a/compiler/rustc_mir_transform/src/multiple_return_terminators.rs +++ b/compiler/rustc_mir_transform/src/multiple_return_terminators.rs @@ -18,19 +18,17 @@ impl<'tcx> crate::MirPass<'tcx> for MultipleReturnTerminators { // find basic blocks with no statement and a return terminator let mut bbs_simple_returns = DenseBitSet::new_empty(body.basic_blocks.len()); let bbs = body.basic_blocks_mut(); - for idx in bbs.indices() { - if bbs[idx].statements.is_empty() - && bbs[idx].terminator().kind == TerminatorKind::Return - { + for (idx, bb) in bbs.iter_enumerated() { + if bb.statements.is_empty() && bb.terminator().kind == TerminatorKind::Return { bbs_simple_returns.insert(idx); } } for bb in bbs { - if let TerminatorKind::Goto { target } = bb.terminator().kind { - if bbs_simple_returns.contains(target) { - bb.terminator_mut().kind = TerminatorKind::Return; - } + if let TerminatorKind::Goto { target } = bb.terminator().kind + && bbs_simple_returns.contains(target) + { + bb.terminator_mut().kind = TerminatorKind::Return; } } diff --git a/compiler/rustc_mir_transform/src/patch.rs b/compiler/rustc_mir_transform/src/patch.rs index 6a177faeac81f..a872eae15f185 100644 --- a/compiler/rustc_mir_transform/src/patch.rs +++ b/compiler/rustc_mir_transform/src/patch.rs @@ -148,6 +148,23 @@ impl<'tcx> MirPatch<'tcx> { self.term_patch_map[bb].is_some() } + /// Universal getter for block data, either it is in 'old' blocks or in patched ones + pub(crate) fn block<'a>( + &'a self, + body: &'a Body<'tcx>, + bb: BasicBlock, + ) -> &'a BasicBlockData<'tcx> { + match bb.index().checked_sub(body.basic_blocks.len()) { + Some(new) => &self.new_blocks[new], + None => &body[bb], + } + } + + pub(crate) fn terminator_loc(&self, body: &Body<'tcx>, bb: BasicBlock) -> Location { + let offset = self.block(body, bb).statements.len(); + Location { block: bb, statement_index: offset } + } + /// Queues the addition of a new temporary with additional local info. pub(crate) fn new_local_with_info( &mut self, @@ -181,7 +198,7 @@ impl<'tcx> MirPatch<'tcx> { /// Queues the addition of a new basic block. pub(crate) fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock { - let block = BasicBlock::new(self.term_patch_map.len()); + let block = self.term_patch_map.next_index(); debug!("MirPatch: new_block: {:?}: {:?}", block, data); self.new_blocks.push(data); self.term_patch_map.push(None); @@ -276,10 +293,7 @@ impl<'tcx> MirPatch<'tcx> { } pub(crate) fn source_info_for_location(&self, body: &Body<'tcx>, loc: Location) -> SourceInfo { - let data = match loc.block.index().checked_sub(body.basic_blocks.len()) { - Some(new) => &self.new_blocks[new], - None => &body[loc.block], - }; + let data = self.block(body, loc.block); Self::source_info_for_index(data, loc) } } diff --git a/compiler/rustc_mir_transform/src/post_analysis_normalize.rs b/compiler/rustc_mir_transform/src/post_analysis_normalize.rs index 76c2f082c0bfc..5599dee4ccad3 100644 --- a/compiler/rustc_mir_transform/src/post_analysis_normalize.rs +++ b/compiler/rustc_mir_transform/src/post_analysis_normalize.rs @@ -39,20 +39,22 @@ impl<'tcx> MutVisitor<'tcx> for PostAnalysisNormalizeVisitor<'tcx> { _context: PlaceContext, _location: Location, ) { - // Performance optimization: don't reintern if there is no `OpaqueCast` to remove. - if place.projection.iter().all(|elem| !matches!(elem, ProjectionElem::OpaqueCast(_))) { - return; + if !self.tcx.next_trait_solver_globally() { + // `OpaqueCast` projections are only needed if there are opaque types on which projections + // are performed. After the `PostAnalysisNormalize` pass, all opaque types are replaced with their + // hidden types, so we don't need these projections anymore. + // + // Performance optimization: don't reintern if there is no `OpaqueCast` to remove. + if place.projection.iter().any(|elem| matches!(elem, ProjectionElem::OpaqueCast(_))) { + place.projection = self.tcx.mk_place_elems( + &place + .projection + .into_iter() + .filter(|elem| !matches!(elem, ProjectionElem::OpaqueCast(_))) + .collect::>(), + ); + }; } - // `OpaqueCast` projections are only needed if there are opaque types on which projections - // are performed. After the `PostAnalysisNormalize` pass, all opaque types are replaced with their - // hidden types, so we don't need these projections anymore. - place.projection = self.tcx.mk_place_elems( - &place - .projection - .into_iter() - .filter(|elem| !matches!(elem, ProjectionElem::OpaqueCast(_))) - .collect::>(), - ); self.super_place(place, _context, _location); } diff --git a/compiler/rustc_mir_transform/src/prettify.rs b/compiler/rustc_mir_transform/src/prettify.rs index 8ccfbe2f194b4..8217feff24eca 100644 --- a/compiler/rustc_mir_transform/src/prettify.rs +++ b/compiler/rustc_mir_transform/src/prettify.rs @@ -115,9 +115,7 @@ impl<'tcx> MutVisitor<'tcx> for BasicBlockUpdater<'tcx> { } fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, _location: Location) { - for succ in terminator.successors_mut() { - *succ = self.map[*succ]; - } + terminator.successors_mut(|succ| *succ = self.map[*succ]); } } diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs index c8d8dc147e94f..47d4383097008 100644 --- a/compiler/rustc_mir_transform/src/promote_consts.rs +++ b/compiler/rustc_mir_transform/src/promote_consts.rs @@ -18,7 +18,7 @@ use either::{Left, Right}; use rustc_const_eval::check_consts::{ConstCx, qualifs}; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; -use rustc_index::{Idx, IndexSlice, IndexVec}; +use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::{self, GenericArgs, List, Ty, TyCtxt, TypeVisitableExt}; @@ -864,17 +864,21 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { new_temp } - fn promote_candidate(mut self, candidate: Candidate, next_promoted_id: usize) -> Body<'tcx> { + fn promote_candidate( + mut self, + candidate: Candidate, + next_promoted_index: Promoted, + ) -> Body<'tcx> { let def = self.source.source.def_id(); let (mut rvalue, promoted_op) = { let promoted = &mut self.promoted; - let promoted_id = Promoted::new(next_promoted_id); let tcx = self.tcx; let mut promoted_operand = |ty, span| { promoted.span = span; promoted.local_decls[RETURN_PLACE] = LocalDecl::new(ty, span); let args = tcx.erase_regions(GenericArgs::identity_for_item(tcx, def)); - let uneval = mir::UnevaluatedConst { def, args, promoted: Some(promoted_id) }; + let uneval = + mir::UnevaluatedConst { def, args, promoted: Some(next_promoted_index) }; ConstOperand { span, user_ty: None, const_: Const::Unevaluated(uneval, ty) } }; @@ -1034,7 +1038,7 @@ fn promote_candidates<'tcx>( required_consts: Vec::new(), }; - let mut promoted = promoter.promote_candidate(candidate, promotions.len()); + let mut promoted = promoter.promote_candidate(candidate, promotions.next_index()); promoted.source.promoted = Some(promotions.next_index()); promotions.push(promoted); } diff --git a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs index 1dd34005d6641..797056ad52d4a 100644 --- a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs +++ b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs @@ -58,13 +58,13 @@ impl<'tcx> crate::MirPass<'tcx> for RemoveNoopLandingPads { } } - for target in body[bb].terminator_mut().successors_mut() { + body[bb].terminator_mut().successors_mut(|target| { if *target != resume_block && nop_landing_pads.contains(*target) { debug!(" folding noop jump to {:?} to resume block", target); *target = resume_block; jumps_folded += 1; } - } + }); let is_nop_landing_pad = self.is_nop_landing_pad(bb, body, &nop_landing_pads); if is_nop_landing_pad { diff --git a/compiler/rustc_mir_transform/src/remove_place_mention.rs b/compiler/rustc_mir_transform/src/remove_place_mention.rs index 15fe77d53195a..cb598ceb4dfea 100644 --- a/compiler/rustc_mir_transform/src/remove_place_mention.rs +++ b/compiler/rustc_mir_transform/src/remove_place_mention.rs @@ -8,7 +8,7 @@ pub(super) struct RemovePlaceMention; impl<'tcx> crate::MirPass<'tcx> for RemovePlaceMention { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { - !sess.opts.unstable_opts.mir_keep_place_mention + !sess.opts.unstable_opts.mir_preserve_ub } fn run_pass(&self, _: TyCtxt<'tcx>, body: &mut Body<'tcx>) { diff --git a/compiler/rustc_mir_transform/src/remove_unneeded_drops.rs b/compiler/rustc_mir_transform/src/remove_unneeded_drops.rs index 8a8cdafc69070..43f80508e4a87 100644 --- a/compiler/rustc_mir_transform/src/remove_unneeded_drops.rs +++ b/compiler/rustc_mir_transform/src/remove_unneeded_drops.rs @@ -35,7 +35,7 @@ impl<'tcx> crate::MirPass<'tcx> for RemoveUnneededDrops { // if we applied optimizations, we potentially have some cfg to cleanup to // make it easier for further passes if should_simplify { - simplify_cfg(body); + simplify_cfg(tcx, body); } } diff --git a/compiler/rustc_mir_transform/src/remove_zsts.rs b/compiler/rustc_mir_transform/src/remove_zsts.rs index 78d94a038671d..c4dc8638b26ab 100644 --- a/compiler/rustc_mir_transform/src/remove_zsts.rs +++ b/compiler/rustc_mir_transform/src/remove_zsts.rs @@ -59,6 +59,11 @@ fn trivially_zst<'tcx>(ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Option { | ty::RawPtr(..) | ty::Ref(..) | ty::FnPtr(..) => Some(false), + ty::Coroutine(def_id, _) => { + // For async_drop_in_place::{closure} this is load bearing, not just a perf fix, + // because we don't want to compute the layout before mir analysis is done + if tcx.is_async_drop_in_place_coroutine(*def_id) { Some(false) } else { None } + } // check `layout_of` to see (including unreachable things we won't actually see) _ => None, } diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index c9771467e499c..c187e617f851c 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -6,13 +6,14 @@ use rustc_hir as hir; use rustc_hir::def_id::DefId; use rustc_hir::lang_items::LangItem; use rustc_index::{Idx, IndexVec}; +use rustc_middle::mir::visit::{MutVisitor, PlaceContext}; use rustc_middle::mir::*; use rustc_middle::query::Providers; use rustc_middle::ty::{ self, CoroutineArgs, CoroutineArgsExt, EarlyBinder, GenericArgs, Ty, TyCtxt, }; use rustc_middle::{bug, span_bug}; -use rustc_span::source_map::Spanned; +use rustc_span::source_map::{Spanned, dummy_spanned}; use rustc_span::{DUMMY_SP, Span}; use tracing::{debug, instrument}; @@ -20,7 +21,8 @@ use crate::elaborate_drop::{DropElaborator, DropFlagMode, DropStyle, Unwind, ela use crate::patch::MirPatch; use crate::{ abort_unwinding_calls, add_call_guards, add_moves_for_packed_drops, deref_separator, inline, - instsimplify, mentioned_items, pass_manager as pm, remove_noop_landing_pads, simplify, + instsimplify, mentioned_items, pass_manager as pm, remove_noop_landing_pads, + run_optimization_passes, simplify, }; mod async_destructor_ctor; @@ -29,6 +31,40 @@ pub(super) fn provide(providers: &mut Providers) { providers.mir_shims = make_shim; } +// Replace Pin<&mut ImplCoroutine> accesses (_1.0) into Pin<&mut ProxyCoroutine> acceses +struct FixProxyFutureDropVisitor<'tcx> { + tcx: TyCtxt<'tcx>, + replace_to: Local, +} + +impl<'tcx> MutVisitor<'tcx> for FixProxyFutureDropVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_place( + &mut self, + place: &mut Place<'tcx>, + _context: PlaceContext, + _location: Location, + ) { + if place.local == Local::from_u32(1) { + if place.projection.len() == 1 { + assert!(matches!( + place.projection.first(), + Some(ProjectionElem::Field(FieldIdx::ZERO, _)) + )); + *place = Place::from(self.replace_to); + } else if place.projection.len() == 2 { + assert!(matches!(place.projection[0], ProjectionElem::Field(FieldIdx::ZERO, _))); + assert!(matches!(place.projection[1], ProjectionElem::Deref)); + *place = + Place::from(self.replace_to).project_deeper(&[ProjectionElem::Deref], self.tcx); + } + } + } +} + fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceKind<'tcx>) -> Body<'tcx> { debug!("make_shim({:?})", instance); @@ -66,7 +102,7 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceKind<'tcx>) -> Body< let call_mut = tcx .associated_items(fn_mut) .in_definition_order() - .find(|it| it.kind == ty::AssocKind::Fn) + .find(|it| it.is_fn()) .unwrap() .def_id; @@ -78,6 +114,64 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceKind<'tcx>) -> Body< receiver_by_ref, } => build_construct_coroutine_by_move_shim(tcx, coroutine_closure_def_id, receiver_by_ref), + e @ ty::InstanceKind::EiiShim { def_id: _, extern_item, chosen_impl, weak_linkage: _ } => { + let source = MirSource::from_instance(e); + + // get the signature for the new function this shim is creating + let shim_fn_sig = tcx.fn_sig(extern_item).instantiate_identity(); + let shim_fn_sig = tcx.instantiate_bound_regions_with_erased(shim_fn_sig); + + let span = tcx.def_span(chosen_impl); + let source_info = SourceInfo::outermost(span); + + // we want to generate a call to this function + let args = ty::GenericArgs::identity_for_item(tcx, chosen_impl); + let chosen_fn_ty = Ty::new_fn_def(tcx, chosen_impl, args); + + let func = Operand::Constant(Box::new(ConstOperand { + span, + user_ty: None, + const_: Const::zero_sized(chosen_fn_ty), + })); + + // println!("generating EII shim for extern item {extern_item:?} and impl {chosen_impl:?}"); + + let locals = local_decls_for_sig(&shim_fn_sig, span); + let mut blocks = IndexVec::new(); + + let return_block = BasicBlock::new(1); + blocks.push(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { + source_info, + kind: TerminatorKind::Call { + func, + args: locals + .iter_enumerated() + .map(|i| i.0) + .skip(1) + .map(|local| Spanned { node: Operand::Move(Place::from(local)), span }) + .collect::>() + .into_boxed_slice(), + fn_span: span, + destination: Place::return_place(), + target: Some(return_block), + unwind: UnwindAction::Continue, + call_source: CallSource::Misc, + }, + }), + is_cleanup: false, + }); + + blocks.push(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }), + is_cleanup: false, + }); + + new_body(source, blocks, locals, shim_fn_sig.inputs().len(), span) + } + ty::InstanceKind::DropGlue(def_id, ty) => { // FIXME(#91576): Drop shims for coroutines aren't subject to the MIR passes at the end // of this function. Is this intentional? @@ -129,8 +223,53 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceKind<'tcx>) -> Body< ty::InstanceKind::ThreadLocalShim(..) => build_thread_local_shim(tcx, instance), ty::InstanceKind::CloneShim(def_id, ty) => build_clone_shim(tcx, def_id, ty), ty::InstanceKind::FnPtrAddrShim(def_id, ty) => build_fn_ptr_addr_shim(tcx, def_id, ty), + ty::InstanceKind::FutureDropPollShim(def_id, proxy_ty, impl_ty) => { + let mut body = + async_destructor_ctor::build_future_drop_poll_shim(tcx, def_id, proxy_ty, impl_ty); + + pm::run_passes( + tcx, + &mut body, + &[ + &mentioned_items::MentionedItems, + &abort_unwinding_calls::AbortUnwindingCalls, + &add_call_guards::CriticalCallEdges, + ], + Some(MirPhase::Runtime(RuntimePhase::PostCleanup)), + pm::Optimizations::Allowed, + ); + run_optimization_passes(tcx, &mut body); + debug!("make_shim({:?}) = {:?}", instance, body); + return body; + } + ty::InstanceKind::AsyncDropGlue(def_id, ty) => { + let mut body = async_destructor_ctor::build_async_drop_shim(tcx, def_id, ty); + + // Main pass required here is StateTransform to convert sync drop ladder + // into coroutine. + // Others are minimal passes as for sync drop glue shim + pm::run_passes( + tcx, + &mut body, + &[ + &mentioned_items::MentionedItems, + &abort_unwinding_calls::AbortUnwindingCalls, + &add_call_guards::CriticalCallEdges, + &simplify::SimplifyCfg::MakeShim, + &crate::coroutine::StateTransform, + ], + Some(MirPhase::Runtime(RuntimePhase::PostCleanup)), + pm::Optimizations::Allowed, + ); + run_optimization_passes(tcx, &mut body); + debug!("make_shim({:?}) = {:?}", instance, body); + return body; + } + ty::InstanceKind::AsyncDropGlueCtorShim(def_id, ty) => { - async_destructor_ctor::build_async_destructor_ctor_shim(tcx, def_id, ty) + let body = async_destructor_ctor::build_async_destructor_ctor_shim(tcx, def_id, ty); + debug!("make_shim({:?}) = {:?}", instance, body); + return body; } ty::InstanceKind::Virtual(..) => { bug!("InstanceKind::Virtual ({:?}) is for direct calls only", instance) @@ -214,6 +353,42 @@ fn local_decls_for_sig<'tcx>( .collect() } +fn dropee_emit_retag<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + mut dropee_ptr: Place<'tcx>, + span: Span, +) -> Place<'tcx> { + if tcx.sess.opts.unstable_opts.mir_emit_retag { + let source_info = SourceInfo::outermost(span); + // We want to treat the function argument as if it was passed by `&mut`. As such, we + // generate + // ``` + // temp = &mut *arg; + // Retag(temp, FnEntry) + // ``` + // It's important that we do this first, before anything that depends on `dropee_ptr` + // has been put into the body. + let reborrow = Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Mut { kind: MutBorrowKind::Default }, + tcx.mk_place_deref(dropee_ptr), + ); + let ref_ty = reborrow.ty(body.local_decls(), tcx); + dropee_ptr = body.local_decls.push(LocalDecl::new(ref_ty, span)).into(); + let new_statements = [ + StatementKind::Assign(Box::new((dropee_ptr, reborrow))), + StatementKind::Retag(RetagKind::FnEntry, Box::new(dropee_ptr)), + ]; + for s in new_statements { + body.basic_blocks_mut()[START_BLOCK] + .statements + .push(Statement { source_info, kind: s }); + } + } + dropee_ptr +} + fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option>) -> Body<'tcx> { debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty); @@ -247,39 +422,19 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option>) new_body(source, blocks, local_decls_for_sig(&sig, span), sig.inputs().len(), span); // The first argument (index 0), but add 1 for the return value. - let mut dropee_ptr = Place::from(Local::new(1 + 0)); - if tcx.sess.opts.unstable_opts.mir_emit_retag { - // We want to treat the function argument as if it was passed by `&mut`. As such, we - // generate - // ``` - // temp = &mut *arg; - // Retag(temp, FnEntry) - // ``` - // It's important that we do this first, before anything that depends on `dropee_ptr` - // has been put into the body. - let reborrow = Rvalue::Ref( - tcx.lifetimes.re_erased, - BorrowKind::Mut { kind: MutBorrowKind::Default }, - tcx.mk_place_deref(dropee_ptr), - ); - let ref_ty = reborrow.ty(body.local_decls(), tcx); - dropee_ptr = body.local_decls.push(LocalDecl::new(ref_ty, span)).into(); - let new_statements = [ - StatementKind::Assign(Box::new((dropee_ptr, reborrow))), - StatementKind::Retag(RetagKind::FnEntry, Box::new(dropee_ptr)), - ]; - for s in new_statements { - body.basic_blocks_mut()[START_BLOCK] - .statements - .push(Statement { source_info, kind: s }); - } - } + let dropee_ptr = Place::from(Local::new(1 + 0)); + let dropee_ptr = dropee_emit_retag(tcx, &mut body, dropee_ptr, span); if ty.is_some() { let patch = { let typing_env = ty::TypingEnv::post_analysis(tcx, def_id); - let mut elaborator = - DropShimElaborator { body: &body, patch: MirPatch::new(&body), tcx, typing_env }; + let mut elaborator = DropShimElaborator { + body: &body, + patch: MirPatch::new(&body), + tcx, + typing_env, + produce_async_drops: false, + }; let dropee = tcx.mk_place_deref(dropee_ptr); let resume_block = elaborator.patch.resume_block(); elaborate_drop( @@ -290,6 +445,7 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option>) return_block, Unwind::To(resume_block), START_BLOCK, + None, ); elaborator.patch }; @@ -338,6 +494,7 @@ pub(super) struct DropShimElaborator<'a, 'tcx> { pub patch: MirPatch<'tcx>, pub tcx: TyCtxt<'tcx>, pub typing_env: ty::TypingEnv<'tcx>, + pub produce_async_drops: bool, } impl fmt::Debug for DropShimElaborator<'_, '_> { @@ -365,6 +522,13 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> { self.typing_env } + fn terminator_loc(&self, bb: BasicBlock) -> Location { + self.patch.terminator_loc(self.body, bb) + } + fn allow_async_drops(&self) -> bool { + self.produce_async_drops + } + fn drop_style(&self, _path: Self::Path, mode: DropFlagMode) -> DropStyle { match mode { DropFlagMode::Shallow => { @@ -621,6 +785,8 @@ impl<'tcx> CloneShimBuilder<'tcx> { target: unwind, unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup), replace: false, + drop: None, + async_fut: None, }, /* is_cleanup */ true, ); @@ -886,6 +1052,8 @@ fn build_call_shim<'tcx>( target: BasicBlock::new(2), unwind: UnwindAction::Continue, replace: false, + drop: None, + async_fut: None, }, false, ); @@ -903,6 +1071,8 @@ fn build_call_shim<'tcx>( target: BasicBlock::new(4), unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup), replace: false, + drop: None, + async_fut: None, }, /* is_cleanup */ true, ); diff --git a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs index 94b1b4b1855b5..7976b65aae7b0 100644 --- a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs +++ b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs @@ -1,639 +1,430 @@ -use std::iter; - -use itertools::Itertools; -use rustc_abi::{FieldIdx, VariantIdx}; -use rustc_const_eval::interpret; use rustc_hir::def_id::DefId; use rustc_hir::lang_items::LangItem; +use rustc_hir::{CoroutineDesugaring, CoroutineKind, CoroutineSource, Safety}; use rustc_index::{Idx, IndexVec}; -use rustc_middle::mir::*; -use rustc_middle::ty::adjustment::PointerCoercion; -use rustc_middle::ty::util::{AsyncDropGlueMorphology, Discr}; -use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_middle::{bug, span_bug}; -use rustc_span::source_map::respan; -use rustc_span::{Span, Symbol}; -use rustc_target::spec::PanicStrategy; -use tracing::debug; - -use super::{local_decls_for_sig, new_body}; +use rustc_middle::mir::{ + BasicBlock, BasicBlockData, Body, Local, LocalDecl, MirSource, Operand, Place, Rvalue, + SourceInfo, Statement, StatementKind, Terminator, TerminatorKind, +}; +use rustc_middle::ty::{self, EarlyBinder, Ty, TyCtxt}; + +use super::*; +use crate::patch::MirPatch; pub(super) fn build_async_destructor_ctor_shim<'tcx>( tcx: TyCtxt<'tcx>, def_id: DefId, - ty: Option>, + ty: Ty<'tcx>, ) -> Body<'tcx> { - debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty); - - AsyncDestructorCtorShimBuilder::new(tcx, def_id, ty).build() + debug!("build_async_destructor_ctor_shim(def_id={:?}, ty={:?})", def_id, ty); + debug_assert_eq!(Some(def_id), tcx.lang_items().async_drop_in_place_fn()); + let generic_body = tcx.optimized_mir(def_id); + let args = tcx.mk_args(&[ty.into()]); + let mut body = EarlyBinder::bind(generic_body.clone()).instantiate(tcx, args); + + // Minimal shim passes except MentionedItems, + // it causes error "mentioned_items for DefId(...async_drop_in_place...) have already been set + pm::run_passes( + tcx, + &mut body, + &[ + &simplify::SimplifyCfg::MakeShim, + &abort_unwinding_calls::AbortUnwindingCalls, + &add_call_guards::CriticalCallEdges, + ], + None, + pm::Optimizations::Allowed, + ); + body } -/// Builder for async_drop_in_place shim. Functions as a stack machine -/// to build up an expression using combinators. Stack contains pairs -/// of locals and types. Combinator is a not yet instantiated pair of a -/// function and a type, is considered to be an operator which consumes -/// operands from the stack by instantiating its function and its type -/// with operand types and moving locals into the function call. Top -/// pair is considered to be the last operand. -// FIXME: add mir-opt tests -struct AsyncDestructorCtorShimBuilder<'tcx> { +// build_drop_shim analog for async drop glue (for generated coroutine poll function) +pub(super) fn build_async_drop_shim<'tcx>( tcx: TyCtxt<'tcx>, def_id: DefId, - self_ty: Option>, - span: Span, - source_info: SourceInfo, - typing_env: ty::TypingEnv<'tcx>, - - stack: Vec>, - last_bb: BasicBlock, - top_cleanup_bb: Option, - - locals: IndexVec>, - bbs: IndexVec>, -} - -#[derive(Clone, Copy)] -enum SurfaceDropKind { - Async, - Sync, -} - -impl<'tcx> AsyncDestructorCtorShimBuilder<'tcx> { - const SELF_PTR: Local = Local::from_u32(1); - const INPUT_COUNT: usize = 1; - const MAX_STACK_LEN: usize = 2; - - fn new(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Option>) -> Self { - let args = if let Some(ty) = self_ty { - tcx.mk_args(&[ty.into()]) + ty: Ty<'tcx>, +) -> Body<'tcx> { + debug!("build_async_drop_shim(def_id={:?}, ty={:?})", def_id, ty); + let ty::Coroutine(_, parent_args) = ty.kind() else { + bug!(); + }; + let typing_env = ty::TypingEnv::fully_monomorphized(); + + let drop_ty = parent_args.first().unwrap().expect_ty(); + let drop_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty); + + assert!(tcx.is_coroutine(def_id)); + let coroutine_kind = tcx.coroutine_kind(def_id).unwrap(); + + assert!(matches!( + coroutine_kind, + CoroutineKind::Desugared(CoroutineDesugaring::Async, CoroutineSource::Fn) + )); + + let needs_async_drop = drop_ty.needs_async_drop(tcx, typing_env); + let needs_sync_drop = !needs_async_drop && drop_ty.needs_drop(tcx, typing_env); + + let resume_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None)); + let resume_ty = Ty::new_adt(tcx, resume_adt, ty::List::empty()); + + let fn_sig = ty::Binder::dummy(tcx.mk_fn_sig( + [ty, resume_ty], + tcx.types.unit, + false, + Safety::Safe, + ExternAbi::Rust, + )); + let sig = tcx.instantiate_bound_regions_with_erased(fn_sig); + + assert!(!drop_ty.is_coroutine()); + let span = tcx.def_span(def_id); + let source_info = SourceInfo::outermost(span); + + // The first argument (index 0), but add 1 for the return value. + let coroutine_layout = Place::from(Local::new(1 + 0)); + let coroutine_layout_dropee = + tcx.mk_place_field(coroutine_layout, FieldIdx::new(0), drop_ptr_ty); + + let return_block = BasicBlock::new(1); + let mut blocks = IndexVec::with_capacity(2); + let block = |blocks: &mut IndexVec<_, _>, kind| { + blocks.push(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { source_info, kind }), + is_cleanup: false, + }) + }; + block( + &mut blocks, + if needs_sync_drop { + TerminatorKind::Drop { + place: tcx.mk_place_deref(coroutine_layout_dropee), + target: return_block, + unwind: UnwindAction::Continue, + replace: false, + drop: None, + async_fut: None, + } } else { - ty::GenericArgs::identity_for_item(tcx, def_id) - }; - let sig = tcx.fn_sig(def_id).instantiate(tcx, args); - let sig = tcx.instantiate_bound_regions_with_erased(sig); - let span = tcx.def_span(def_id); + TerminatorKind::Goto { target: return_block } + }, + ); + block(&mut blocks, TerminatorKind::Return); + + let source = MirSource::from_instance(ty::InstanceKind::AsyncDropGlue(def_id, ty)); + let mut body = + new_body(source, blocks, local_decls_for_sig(&sig, span), sig.inputs().len(), span); + + body.coroutine = Some(Box::new(CoroutineInfo::initial( + coroutine_kind, + parent_args.as_coroutine().yield_ty(), + parent_args.as_coroutine().resume_ty(), + ))); + body.phase = MirPhase::Runtime(RuntimePhase::Initial); + if !needs_async_drop { + // Returning noop body for types without `need async drop` + // (or sync Drop in case of !`need async drop` && `need drop`) + return body; + } - let source_info = SourceInfo::outermost(span); + let mut dropee_ptr = Place::from(body.local_decls.push(LocalDecl::new(drop_ptr_ty, span))); + let st_kind = StatementKind::Assign(Box::new(( + dropee_ptr, + Rvalue::Use(Operand::Move(coroutine_layout_dropee)), + ))); + body.basic_blocks_mut()[START_BLOCK].statements.push(Statement { source_info, kind: st_kind }); + dropee_ptr = dropee_emit_retag(tcx, &mut body, dropee_ptr, span); - debug_assert_eq!(sig.inputs().len(), Self::INPUT_COUNT); - let locals = local_decls_for_sig(&sig, span); + let dropline = body.basic_blocks.last_index(); - // Usual case: noop() + unwind resume + return - let mut bbs = IndexVec::with_capacity(3); - let typing_env = ty::TypingEnv::post_analysis(tcx, def_id); - AsyncDestructorCtorShimBuilder { + let patch = { + let mut elaborator = DropShimElaborator { + body: &body, + patch: MirPatch::new(&body), tcx, - def_id, - self_ty, - span, - source_info, typing_env, - - stack: Vec::with_capacity(Self::MAX_STACK_LEN), - last_bb: bbs.push(BasicBlockData::new(None, false)), - top_cleanup_bb: match tcx.sess.panic_strategy() { - PanicStrategy::Unwind => { - // Don't drop input arg because it's just a pointer - Some(bbs.push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { - source_info, - kind: TerminatorKind::UnwindResume, - }), - is_cleanup: true, - })) - } - PanicStrategy::Abort => None, - }, - - locals, - bbs, - } - } - - fn build(self) -> Body<'tcx> { - let (tcx, Some(self_ty)) = (self.tcx, self.self_ty) else { - return self.build_zst_output(); - }; - match self_ty.async_drop_glue_morphology(tcx) { - AsyncDropGlueMorphology::Noop => span_bug!( - self.span, - "async drop glue shim generator encountered type with noop async drop glue morphology" - ), - AsyncDropGlueMorphology::DeferredDropInPlace => { - return self.build_deferred_drop_in_place(); - } - AsyncDropGlueMorphology::Custom => (), - } - - let surface_drop_kind = || { - let adt_def = self_ty.ty_adt_def()?; - if adt_def.async_destructor(tcx).is_some() { - Some(SurfaceDropKind::Async) - } else if adt_def.destructor(tcx).is_some() { - Some(SurfaceDropKind::Sync) - } else { - None - } + produce_async_drops: true, }; + let dropee = tcx.mk_place_deref(dropee_ptr); + let resume_block = elaborator.patch.resume_block(); + elaborate_drop( + &mut elaborator, + source_info, + dropee, + (), + return_block, + Unwind::To(resume_block), + START_BLOCK, + dropline, + ); + elaborator.patch + }; + patch.apply(&mut body); - match self_ty.kind() { - ty::Array(elem_ty, _) => self.build_slice(true, *elem_ty), - ty::Slice(elem_ty) => self.build_slice(false, *elem_ty), - - ty::Tuple(elem_tys) => self.build_chain(None, elem_tys.iter()), - ty::Adt(adt_def, args) if adt_def.is_struct() => { - let field_tys = adt_def.non_enum_variant().fields.iter().map(|f| f.ty(tcx, args)); - self.build_chain(surface_drop_kind(), field_tys) - } - ty::Closure(_, args) => self.build_chain(None, args.as_closure().upvar_tys().iter()), - ty::CoroutineClosure(_, args) => { - self.build_chain(None, args.as_coroutine_closure().upvar_tys().iter()) - } + body +} - ty::Adt(adt_def, args) if adt_def.is_enum() => { - self.build_enum(*adt_def, *args, surface_drop_kind()) - } +// * For async drop a "normal" coroutine: +// `async_drop_in_place::{closure}.poll()` is converted into `T.future_drop_poll()`. +// Every coroutine has its `poll` (calculate yourself a little further) +// and its `future_drop_poll` (drop yourself a little further). +// +// * For async drop of "async drop coroutine" (`async_drop_in_place::{closure}`): +// Correct drop of such coroutine means normal execution of nested async drop. +// async_drop(async_drop(T))::future_drop_poll() => async_drop(T)::poll(). +pub(super) fn build_future_drop_poll_shim<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, + proxy_ty: Ty<'tcx>, + impl_ty: Ty<'tcx>, +) -> Body<'tcx> { + let instance = ty::InstanceKind::FutureDropPollShim(def_id, proxy_ty, impl_ty); + let ty::Coroutine(coroutine_def_id, _) = impl_ty.kind() else { + bug!("build_future_drop_poll_shim not for coroutine impl type: ({:?})", instance); + }; - ty::Adt(adt_def, _) => { - assert!(adt_def.is_union()); - match surface_drop_kind().unwrap() { - SurfaceDropKind::Async => self.build_fused_async_surface(), - SurfaceDropKind::Sync => self.build_fused_sync_surface(), - } - } + let span = tcx.def_span(def_id); - ty::Bound(..) - | ty::Foreign(_) - | ty::Placeholder(_) - | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_) | ty::TyVar(_)) - | ty::Param(_) - | ty::Alias(..) => { - bug!("Building async destructor for unexpected type: {self_ty:?}") - } - - _ => { - bug!( - "Building async destructor constructor shim is not yet implemented for type: {self_ty:?}" - ) - } - } + if tcx.is_async_drop_in_place_coroutine(*coroutine_def_id) { + build_adrop_for_adrop_shim(tcx, proxy_ty, impl_ty, span, instance) + } else { + build_adrop_for_coroutine_shim(tcx, proxy_ty, impl_ty, span, instance) } +} - fn build_enum( - mut self, - adt_def: ty::AdtDef<'tcx>, - args: ty::GenericArgsRef<'tcx>, - surface_drop: Option, - ) -> Body<'tcx> { - let tcx = self.tcx; - - let surface = match surface_drop { - None => None, - Some(kind) => { - self.put_self(); - Some(match kind { - SurfaceDropKind::Async => self.combine_async_surface(), - SurfaceDropKind::Sync => self.combine_sync_surface(), - }) - } - }; - - let mut other = None; - for (variant_idx, discr) in adt_def.discriminants(tcx) { - let variant = adt_def.variant(variant_idx); - - let mut chain = None; - for (field_idx, field) in variant.fields.iter_enumerated() { - let field_ty = field.ty(tcx, args); - self.put_variant_field(variant.name, variant_idx, field_idx, field_ty); - let defer = self.combine_defer(field_ty); - chain = Some(match chain { - None => defer, - Some(chain) => self.combine_chain(chain, defer), - }) - } - let variant_dtor = chain.unwrap_or_else(|| self.put_noop()); - - other = Some(match other { - None => variant_dtor, - Some(other) => { - self.put_self(); - self.put_discr(discr); - self.combine_either(other, variant_dtor) - } - }); - } - let variants_dtor = other.unwrap_or_else(|| self.put_noop()); - - let dtor = match surface { - None => variants_dtor, - Some(surface) => self.combine_chain(surface, variants_dtor), - }; - self.combine_fuse(dtor); - self.return_() - } +// For async drop a "normal" coroutine: +// `async_drop_in_place::{closure}.poll()` is converted into `T.future_drop_poll()`. +// Every coroutine has its `poll` (calculate yourself a little further) +// and its `future_drop_poll` (drop yourself a little further). +fn build_adrop_for_coroutine_shim<'tcx>( + tcx: TyCtxt<'tcx>, + proxy_ty: Ty<'tcx>, + impl_ty: Ty<'tcx>, + span: Span, + instance: ty::InstanceKind<'tcx>, +) -> Body<'tcx> { + let ty::Coroutine(coroutine_def_id, impl_args) = impl_ty.kind() else { + bug!("build_adrop_for_coroutine_shim not for coroutine impl type: ({:?})", instance); + }; + let proxy_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, proxy_ty); + // taking _1.0 (impl from Pin) + let pin_proxy_layout_local = Local::new(1); + let source_info = SourceInfo::outermost(span); + // converting `(_1: Pin<&mut CorLayout>, _2: &mut Context<'_>) -> Poll<()>` + // into `(_1: Pin<&mut ProxyLayout>, _2: &mut Context<'_>) -> Poll<()>` + // let mut _x: &mut CorLayout = &*_1.0.0; + // Replace old _1.0 accesses into _x accesses; + let body = tcx.optimized_mir(*coroutine_def_id).future_drop_poll().unwrap(); + let mut body: Body<'tcx> = EarlyBinder::bind(body.clone()).instantiate(tcx, impl_args); + body.source.instance = instance; + body.phase = MirPhase::Runtime(RuntimePhase::Initial); + body.var_debug_info.clear(); + let pin_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Pin, Some(span))); + let args = tcx.mk_args(&[proxy_ref.into()]); + let pin_proxy_ref = Ty::new_adt(tcx, pin_adt_ref, args); + + let cor_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, impl_ty); + + let proxy_ref_local = body.local_decls.push(LocalDecl::new(proxy_ref, span)); + let cor_ref_local = body.local_decls.push(LocalDecl::new(cor_ref, span)); + + FixProxyFutureDropVisitor { tcx, replace_to: cor_ref_local }.visit_body(&mut body); + // Now changing first arg from Pin<&mut ImplCoroutine> to Pin<&mut ProxyCoroutine> + body.local_decls[pin_proxy_layout_local] = LocalDecl::new(pin_proxy_ref, span); - fn build_chain(mut self, surface_drop: Option, elem_tys: I) -> Body<'tcx> - where - I: Iterator> + ExactSizeIterator, { - let surface = match surface_drop { - None => None, - Some(kind) => { - self.put_self(); - Some(match kind { - SurfaceDropKind::Async => self.combine_async_surface(), - SurfaceDropKind::Sync => self.combine_sync_surface(), - }) - } - }; - - let mut chain = None; - for (field_idx, field_ty) in elem_tys.enumerate().map(|(i, ty)| (FieldIdx::new(i), ty)) { - self.put_field(field_idx, field_ty); - let defer = self.combine_defer(field_ty); - chain = Some(match chain { - None => defer, - Some(chain) => self.combine_chain(chain, defer), - }) - } - let chain = chain.unwrap_or_else(|| self.put_noop()); - - let dtor = match surface { - None => chain, - Some(surface) => self.combine_chain(surface, chain), - }; - self.combine_fuse(dtor); - self.return_() - } - - fn build_zst_output(mut self) -> Body<'tcx> { - self.put_zst_output(); - self.return_() - } - - fn build_deferred_drop_in_place(mut self) -> Body<'tcx> { - self.put_self(); - let deferred = self.combine_deferred_drop_in_place(); - self.combine_fuse(deferred); - self.return_() - } - - fn build_fused_async_surface(mut self) -> Body<'tcx> { - self.put_self(); - let surface = self.combine_async_surface(); - self.combine_fuse(surface); - self.return_() - } - - fn build_fused_sync_surface(mut self) -> Body<'tcx> { - self.put_self(); - let surface = self.combine_sync_surface(); - self.combine_fuse(surface); - self.return_() - } - - fn build_slice(mut self, is_array: bool, elem_ty: Ty<'tcx>) -> Body<'tcx> { - if is_array { - self.put_array_as_slice(elem_ty) - } else { - self.put_self() - } - let dtor = self.combine_slice(elem_ty); - self.combine_fuse(dtor); - self.return_() - } - - fn put_zst_output(&mut self) { - let return_ty = self.locals[RETURN_PLACE].ty; - self.put_operand(Operand::Constant(Box::new(ConstOperand { - span: self.span, - user_ty: None, - const_: Const::zero_sized(return_ty), - }))); - } - - /// Puts `to_drop: *mut Self` on top of the stack. - fn put_self(&mut self) { - self.put_operand(Operand::Copy(Self::SELF_PTR.into())) - } - - /// Given that `Self is [ElemTy; N]` puts `to_drop: *mut [ElemTy]` - /// on top of the stack. - fn put_array_as_slice(&mut self, elem_ty: Ty<'tcx>) { - let slice_ptr_ty = Ty::new_mut_ptr(self.tcx, Ty::new_slice(self.tcx, elem_ty)); - self.put_temp_rvalue(Rvalue::Cast( - CastKind::PointerCoercion(PointerCoercion::Unsize, CoercionSource::Implicit), - Operand::Copy(Self::SELF_PTR.into()), - slice_ptr_ty, - )) - } - - /// If given Self is a struct puts `to_drop: *mut FieldTy` on top - /// of the stack. - fn put_field(&mut self, field: FieldIdx, field_ty: Ty<'tcx>) { - let place = Place { - local: Self::SELF_PTR, - projection: self - .tcx - .mk_place_elems(&[PlaceElem::Deref, PlaceElem::Field(field, field_ty)]), - }; - self.put_temp_rvalue(Rvalue::RawPtr(RawPtrKind::Mut, place)) - } - - /// If given Self is an enum puts `to_drop: *mut FieldTy` on top of - /// the stack. - fn put_variant_field( - &mut self, - variant_sym: Symbol, - variant: VariantIdx, - field: FieldIdx, - field_ty: Ty<'tcx>, - ) { - let place = Place { - local: Self::SELF_PTR, - projection: self.tcx.mk_place_elems(&[ - PlaceElem::Deref, - PlaceElem::Downcast(Some(variant_sym), variant), - PlaceElem::Field(field, field_ty), - ]), - }; - self.put_temp_rvalue(Rvalue::RawPtr(RawPtrKind::Mut, place)) - } - - /// If given Self is an enum puts `to_drop: *mut FieldTy` on top of - /// the stack. - fn put_discr(&mut self, discr: Discr<'tcx>) { - let (size, _) = discr.ty.int_size_and_signed(self.tcx); - self.put_operand(Operand::const_from_scalar( - self.tcx, - discr.ty, - interpret::Scalar::from_uint(discr.val, size), - self.span, - )); - } - - /// Puts `x: RvalueType` on top of the stack. - fn put_temp_rvalue(&mut self, rvalue: Rvalue<'tcx>) { - let last_bb = &mut self.bbs[self.last_bb]; - debug_assert!(last_bb.terminator.is_none()); - let source_info = self.source_info; - - let local_ty = rvalue.ty(&self.locals, self.tcx); - // We need to create a new local to be able to "consume" it with - // a combinator - let local = self.locals.push(LocalDecl::with_source_info(local_ty, source_info)); - last_bb.statements.extend_from_slice(&[ - Statement { source_info, kind: StatementKind::StorageLive(local) }, + let mut idx: usize = 0; + // _proxy = _1.0 : Pin<&ProxyLayout> ==> &ProxyLayout + let proxy_ref_place = Place::from(pin_proxy_layout_local) + .project_deeper(&[PlaceElem::Field(FieldIdx::ZERO, proxy_ref)], tcx); + body.basic_blocks_mut()[START_BLOCK].statements.insert( + idx, Statement { source_info, - kind: StatementKind::Assign(Box::new((local.into(), rvalue))), + kind: StatementKind::Assign(Box::new(( + Place::from(proxy_ref_local), + Rvalue::CopyForDeref(proxy_ref_place), + ))), }, - ]); - - self.put_operand(Operand::Move(local.into())); - } - - /// Puts operand on top of the stack. - fn put_operand(&mut self, operand: Operand<'tcx>) { - if let Some(top_cleanup_bb) = &mut self.top_cleanup_bb { - let source_info = self.source_info; - match &operand { - Operand::Copy(_) | Operand::Constant(_) => { - *top_cleanup_bb = self.bbs.push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { - source_info, - kind: TerminatorKind::Goto { target: *top_cleanup_bb }, - }), - is_cleanup: true, - }); - } - Operand::Move(place) => { - let local = place.as_local().unwrap(); - *top_cleanup_bb = self.bbs.push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { - source_info, - kind: if self.locals[local].ty.needs_drop(self.tcx, self.typing_env) { - TerminatorKind::Drop { - place: local.into(), - target: *top_cleanup_bb, - unwind: UnwindAction::Terminate( - UnwindTerminateReason::InCleanup, - ), - replace: false, - } - } else { - TerminatorKind::Goto { target: *top_cleanup_bb } - }, - }), - is_cleanup: true, - }); - } - }; - } - self.stack.push(operand); - } - - /// Puts `noop: async_drop::Noop` on top of the stack - fn put_noop(&mut self) -> Ty<'tcx> { - self.apply_combinator(0, LangItem::AsyncDropNoop, &[]) - } - - fn combine_async_surface(&mut self) -> Ty<'tcx> { - self.apply_combinator(1, LangItem::SurfaceAsyncDropInPlace, &[self.self_ty.unwrap().into()]) - } - - fn combine_sync_surface(&mut self) -> Ty<'tcx> { - self.apply_combinator( - 1, - LangItem::AsyncDropSurfaceDropInPlace, - &[self.self_ty.unwrap().into()], - ) - } - - fn combine_deferred_drop_in_place(&mut self) -> Ty<'tcx> { - self.apply_combinator( - 1, - LangItem::AsyncDropDeferredDropInPlace, - &[self.self_ty.unwrap().into()], - ) - } - - fn combine_fuse(&mut self, inner_future_ty: Ty<'tcx>) -> Ty<'tcx> { - self.apply_combinator(1, LangItem::AsyncDropFuse, &[inner_future_ty.into()]) - } - - fn combine_slice(&mut self, elem_ty: Ty<'tcx>) -> Ty<'tcx> { - self.apply_combinator(1, LangItem::AsyncDropSlice, &[elem_ty.into()]) - } - - fn combine_defer(&mut self, to_drop_ty: Ty<'tcx>) -> Ty<'tcx> { - self.apply_combinator(1, LangItem::AsyncDropDefer, &[to_drop_ty.into()]) - } - - fn combine_chain(&mut self, first: Ty<'tcx>, second: Ty<'tcx>) -> Ty<'tcx> { - self.apply_combinator(2, LangItem::AsyncDropChain, &[first.into(), second.into()]) - } - - fn combine_either(&mut self, other: Ty<'tcx>, matched: Ty<'tcx>) -> Ty<'tcx> { - self.apply_combinator( - 4, - LangItem::AsyncDropEither, - &[other.into(), matched.into(), self.self_ty.unwrap().into()], - ) - } - - fn return_(mut self) -> Body<'tcx> { - let last_bb = &mut self.bbs[self.last_bb]; - debug_assert!(last_bb.terminator.is_none()); - let source_info = self.source_info; - - let (1, Some(output)) = (self.stack.len(), self.stack.pop()) else { - span_bug!( - self.span, - "async destructor ctor shim builder finished with invalid number of stack items: expected 1 found {}", - self.stack.len(), - ) - }; - #[cfg(debug_assertions)] - if let Some(ty) = self.self_ty { - debug_assert_eq!( - output.ty(&self.locals, self.tcx), - ty.async_destructor_ty(self.tcx), - "output async destructor types did not match for type: {ty:?}", - ); - } - - let dead_storage = match &output { - Operand::Move(place) => Some(Statement { - source_info, - kind: StatementKind::StorageDead(place.as_local().unwrap()), - }), - _ => None, - }; - - last_bb.statements.extend( - iter::once(Statement { - source_info, - kind: StatementKind::Assign(Box::new((RETURN_PLACE.into(), Rvalue::Use(output)))), - }) - .chain(dead_storage), ); - - last_bb.terminator = Some(Terminator { source_info, kind: TerminatorKind::Return }); - - let source = MirSource::from_instance(ty::InstanceKind::AsyncDropGlueCtorShim( - self.def_id, - self.self_ty, - )); - new_body(source, self.bbs, self.locals, Self::INPUT_COUNT, self.span) - } - - fn apply_combinator( - &mut self, - arity: usize, - function: LangItem, - args: &[ty::GenericArg<'tcx>], - ) -> Ty<'tcx> { - let function = self.tcx.require_lang_item(function, Some(self.span)); - let operands_split = self - .stack - .len() - .checked_sub(arity) - .expect("async destructor ctor shim combinator tried to consume too many items"); - let operands = &self.stack[operands_split..]; - - let func_ty = Ty::new_fn_def(self.tcx, function, args.iter().copied()); - let func_sig = func_ty.fn_sig(self.tcx).no_bound_vars().unwrap(); - #[cfg(debug_assertions)] - operands.iter().zip(func_sig.inputs()).for_each(|(operand, expected_ty)| { - let operand_ty = operand.ty(&self.locals, self.tcx); - if operand_ty == *expected_ty { - return; - } - - // If projection of Discriminant then compare with `Ty::discriminant_ty` - if let ty::Alias(ty::Projection, ty::AliasTy { args, def_id, .. }) = expected_ty.kind() - && self.tcx.is_lang_item(*def_id, LangItem::Discriminant) - && args.first().unwrap().as_type().unwrap().discriminant_ty(self.tcx) == operand_ty - { - return; + idx += 1; + let mut cor_ptr_local = proxy_ref_local; + proxy_ty.find_async_drop_impl_coroutine(tcx, |ty| { + if ty != proxy_ty { + let ty_ptr = Ty::new_mut_ptr(tcx, ty); + let impl_ptr_place = Place::from(cor_ptr_local).project_deeper( + &[PlaceElem::Deref, PlaceElem::Field(FieldIdx::ZERO, ty_ptr)], + tcx, + ); + cor_ptr_local = body.local_decls.push(LocalDecl::new(ty_ptr, span)); + // _cor_ptr = _proxy.0.0 (... .0) + body.basic_blocks_mut()[START_BLOCK].statements.insert( + idx, + Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + Place::from(cor_ptr_local), + Rvalue::CopyForDeref(impl_ptr_place), + ))), + }, + ); + idx += 1; } - - span_bug!( - self.span, - "Operand type and combinator argument type are not equal. - operand_ty: {:?} - argument_ty: {:?} -", - operand_ty, - expected_ty - ); - }); - - let target = self.bbs.push(BasicBlockData { - statements: operands - .iter() - .rev() - .filter_map(|o| { - if let Operand::Move(Place { local, projection }) = o { - assert!(projection.is_empty()); - Some(Statement { - source_info: self.source_info, - kind: StatementKind::StorageDead(*local), - }) - } else { - None - } - }) - .collect(), - terminator: None, - is_cleanup: false, }); - let dest_ty = func_sig.output(); - let dest = - self.locals.push(LocalDecl::with_source_info(dest_ty, self.source_info).immutable()); - - let unwind = if let Some(top_cleanup_bb) = &mut self.top_cleanup_bb { - for _ in 0..arity { - *top_cleanup_bb = - self.bbs[*top_cleanup_bb].terminator().successors().exactly_one().ok().unwrap(); - } - UnwindAction::Cleanup(*top_cleanup_bb) - } else { - UnwindAction::Unreachable - }; + // _cor_ref = &*cor_ptr + let reborrow = Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Mut { kind: MutBorrowKind::Default }, + tcx.mk_place_deref(Place::from(cor_ptr_local)), + ); + body.basic_blocks_mut()[START_BLOCK].statements.insert( + idx, + Statement { + source_info, + kind: StatementKind::Assign(Box::new((Place::from(cor_ref_local), reborrow))), + }, + ); + } + body +} - let last_bb = &mut self.bbs[self.last_bb]; - debug_assert!(last_bb.terminator.is_none()); - last_bb.statements.push(Statement { - source_info: self.source_info, - kind: StatementKind::StorageLive(dest), - }); - last_bb.terminator = Some(Terminator { - source_info: self.source_info, +// When dropping async drop coroutine, we continue its execution. +// async_drop(async_drop(T))::future_drop_poll() => async_drop(T)::poll() +fn build_adrop_for_adrop_shim<'tcx>( + tcx: TyCtxt<'tcx>, + proxy_ty: Ty<'tcx>, + impl_ty: Ty<'tcx>, + span: Span, + instance: ty::InstanceKind<'tcx>, +) -> Body<'tcx> { + let source_info = SourceInfo::outermost(span); + let proxy_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, proxy_ty); + // taking _1.0 (impl from Pin) + let pin_proxy_layout_local = Local::new(1); + let proxy_ref_place = Place::from(pin_proxy_layout_local) + .project_deeper(&[PlaceElem::Field(FieldIdx::ZERO, proxy_ref)], tcx); + let cor_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, impl_ty); + + // ret_ty = `Poll<()>` + let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None)); + let ret_ty = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()])); + // env_ty = `Pin<&mut proxy_ty>` + let pin_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Pin, None)); + let env_ty = Ty::new_adt(tcx, pin_adt_ref, tcx.mk_args(&[proxy_ref.into()])); + // sig = `fn (Pin<&mut proxy_ty>, &mut Context) -> Poll<()>` + let sig = tcx.mk_fn_sig( + [env_ty, Ty::new_task_context(tcx)], + ret_ty, + false, + hir::Safety::Safe, + ExternAbi::Rust, + ); + // This function will be called with pinned proxy coroutine layout. + // We need to extract `Arg0.0` to get proxy layout, and then get `.0` + // further to receive impl coroutine (may be needed) + let mut locals = local_decls_for_sig(&sig, span); + let mut blocks = IndexVec::with_capacity(3); + + let proxy_ref_local = locals.push(LocalDecl::new(proxy_ref, span)); + + let call_bb = BasicBlock::new(1); + let return_bb = BasicBlock::new(2); + + let mut statements = Vec::new(); + + statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + Place::from(proxy_ref_local), + Rvalue::CopyForDeref(proxy_ref_place), + ))), + }); + + let mut cor_ptr_local = proxy_ref_local; + proxy_ty.find_async_drop_impl_coroutine(tcx, |ty| { + if ty != proxy_ty { + let ty_ptr = Ty::new_mut_ptr(tcx, ty); + let impl_ptr_place = Place::from(cor_ptr_local) + .project_deeper(&[PlaceElem::Deref, PlaceElem::Field(FieldIdx::ZERO, ty_ptr)], tcx); + cor_ptr_local = locals.push(LocalDecl::new(ty_ptr, span)); + // _cor_ptr = _proxy.0.0 (... .0) + statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + Place::from(cor_ptr_local), + Rvalue::CopyForDeref(impl_ptr_place), + ))), + }); + } + }); + + // convert impl coroutine ptr into ref + let reborrow = Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Mut { kind: MutBorrowKind::Default }, + tcx.mk_place_deref(Place::from(cor_ptr_local)), + ); + let cor_ref_place = Place::from(locals.push(LocalDecl::new(cor_ref, span))); + statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new((cor_ref_place, reborrow))), + }); + + // cor_pin_ty = `Pin<&mut cor_ref>` + let cor_pin_ty = Ty::new_adt(tcx, pin_adt_ref, tcx.mk_args(&[cor_ref.into()])); + let cor_pin_place = Place::from(locals.push(LocalDecl::new(cor_pin_ty, span))); + + let pin_fn = tcx.require_lang_item(LangItem::PinNewUnchecked, Some(span)); + // call Pin::new_unchecked(&mut impl_cor) + blocks.push(BasicBlockData { + statements, + terminator: Some(Terminator { + source_info, kind: TerminatorKind::Call { - func: Operand::Constant(Box::new(ConstOperand { - span: self.span, - user_ty: None, - const_: Const::Val(ConstValue::ZeroSized, func_ty), - })), - destination: dest.into(), - target: Some(target), - unwind, + func: Operand::function_handle(tcx, pin_fn, [cor_ref.into()], span), + args: [dummy_spanned(Operand::Move(cor_ref_place))].into(), + destination: cor_pin_place, + target: Some(call_bb), + unwind: UnwindAction::Continue, call_source: CallSource::Misc, - fn_span: self.span, - args: self.stack.drain(operands_split..).map(|o| respan(self.span, o)).collect(), + fn_span: span, }, - }); - - self.put_operand(Operand::Move(dest.into())); - self.last_bb = target; - - dest_ty - } + }), + is_cleanup: false, + }); + // When dropping async drop coroutine, we continue its execution: + // we call impl::poll (impl_layout, ctx) + let poll_fn = tcx.require_lang_item(LangItem::FuturePoll, None); + let resume_ctx = Place::from(Local::new(2)); + blocks.push(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { + source_info, + kind: TerminatorKind::Call { + func: Operand::function_handle(tcx, poll_fn, [impl_ty.into()], span), + args: [ + dummy_spanned(Operand::Move(cor_pin_place)), + dummy_spanned(Operand::Move(resume_ctx)), + ] + .into(), + destination: Place::return_place(), + target: Some(return_bb), + unwind: UnwindAction::Continue, + call_source: CallSource::Misc, + fn_span: span, + }, + }), + is_cleanup: false, + }); + blocks.push(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }), + is_cleanup: false, + }); + + let source = MirSource::from_instance(instance); + let mut body = new_body(source, blocks, locals, sig.inputs().len(), span); + body.phase = MirPhase::Runtime(RuntimePhase::Initial); + return body; } diff --git a/compiler/rustc_mir_transform/src/simplify.rs b/compiler/rustc_mir_transform/src/simplify.rs index 84905f4a400f3..8f88228d9bbd8 100644 --- a/compiler/rustc_mir_transform/src/simplify.rs +++ b/compiler/rustc_mir_transform/src/simplify.rs @@ -26,6 +26,13 @@ //! Here the block (`{ return; }`) has the return type `char`, rather than `()`, but the MIR we //! naively generate still contains the `_a = ()` write in the unreachable block "after" the //! return. +//! +//! **WARNING**: This is one of the few optimizations that runs on built and analysis MIR, and +//! so its effects may affect the type-checking, borrow-checking, and other analysis of MIR. +//! We must be extremely careful to only apply optimizations that preserve UB and all +//! non-determinism, since changes here can affect which programs compile in an insta-stable way. +//! The normal logic that a program with UB can be changed to do anything does not apply to +//! pre-"runtime" MIR! use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor}; @@ -66,8 +73,8 @@ impl SimplifyCfg { } } -pub(super) fn simplify_cfg(body: &mut Body<'_>) { - CfgSimplifier::new(body).simplify(); +pub(super) fn simplify_cfg<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + CfgSimplifier::new(tcx, body).simplify(); remove_dead_blocks(body); // FIXME: Should probably be moved into some kind of pass manager @@ -79,9 +86,9 @@ impl<'tcx> crate::MirPass<'tcx> for SimplifyCfg { self.name() } - fn run_pass(&self, _: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { debug!("SimplifyCfg({:?}) - simplifying {:?}", self.name(), body.source); - simplify_cfg(body); + simplify_cfg(tcx, body); } fn is_required(&self) -> bool { @@ -90,12 +97,13 @@ impl<'tcx> crate::MirPass<'tcx> for SimplifyCfg { } struct CfgSimplifier<'a, 'tcx> { + preserve_switch_reads: bool, basic_blocks: &'a mut IndexSlice>, pred_count: IndexVec, } impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { - fn new(body: &'a mut Body<'tcx>) -> Self { + fn new(tcx: TyCtxt<'tcx>, body: &'a mut Body<'tcx>) -> Self { let mut pred_count = IndexVec::from_elem(0u32, &body.basic_blocks); // we can't use mir.predecessors() here because that counts @@ -110,9 +118,12 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { } } + // Preserve `SwitchInt` reads on built and analysis MIR, or if `-Zmir-preserve-ub`. + let preserve_switch_reads = matches!(body.phase, MirPhase::Built | MirPhase::Analysis(_)) + || tcx.sess.opts.unstable_opts.mir_preserve_ub; let basic_blocks = body.basic_blocks_mut(); - CfgSimplifier { basic_blocks, pred_count } + CfgSimplifier { preserve_switch_reads, basic_blocks, pred_count } } fn simplify(mut self) { @@ -136,9 +147,8 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { let mut terminator = self.basic_blocks[bb].terminator.take().expect("invalid terminator state"); - for successor in terminator.successors_mut() { - self.collapse_goto_chain(successor, &mut changed); - } + terminator + .successors_mut(|successor| self.collapse_goto_chain(successor, &mut changed)); let mut inner_changed = true; merged_blocks.clear(); @@ -253,9 +263,15 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { // turn a branch with all successors identical to a goto fn simplify_branch(&mut self, terminator: &mut Terminator<'tcx>) -> bool { - match terminator.kind { - TerminatorKind::SwitchInt { .. } => {} - _ => return false, + // Removing a `SwitchInt` terminator may remove reads that result in UB, + // so we must not apply this optimization before borrowck or when + // `-Zmir-preserve-ub` is set. + if self.preserve_switch_reads { + return false; + } + + let TerminatorKind::SwitchInt { .. } = terminator.kind else { + return false; }; let first_succ = { @@ -358,9 +374,7 @@ pub(super) fn remove_dead_blocks(body: &mut Body<'_>) { } for block in basic_blocks { - for target in block.terminator_mut().successors_mut() { - *target = replacements[target.index()]; - } + block.terminator_mut().successors_mut(|target| *target = replacements[target.index()]); } } @@ -597,20 +611,6 @@ impl<'tcx> MutVisitor<'tcx> for LocalUpdater<'tcx> { self.tcx } - fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) { - if let StatementKind::BackwardIncompatibleDropHint { place, reason: _ } = - &mut statement.kind - { - self.visit_local( - &mut place.local, - PlaceContext::MutatingUse(MutatingUseContext::Store), - location, - ); - } else { - self.super_statement(statement, location); - } - } - fn visit_local(&mut self, l: &mut Local, _: PlaceContext, _: Location) { *l = self.map[*l].unwrap(); } diff --git a/compiler/rustc_mir_transform/src/ssa.rs b/compiler/rustc_mir_transform/src/ssa.rs index 3d512fb064ec4..edd0cabca49a4 100644 --- a/compiler/rustc_mir_transform/src/ssa.rs +++ b/compiler/rustc_mir_transform/src/ssa.rs @@ -32,12 +32,6 @@ pub(super) struct SsaLocals { borrowed_locals: DenseBitSet, } -pub(super) enum AssignedValue<'a, 'tcx> { - Arg, - Rvalue(&'a mut Rvalue<'tcx>), - Terminator, -} - impl SsaLocals { pub(super) fn new<'tcx>( tcx: TyCtxt<'tcx>, @@ -152,38 +146,6 @@ impl SsaLocals { }) } - pub(super) fn for_each_assignment_mut<'tcx>( - &self, - basic_blocks: &mut IndexSlice>, - mut f: impl FnMut(Local, AssignedValue<'_, 'tcx>, Location), - ) { - for &local in &self.assignment_order { - match self.assignments[local] { - Set1::One(DefLocation::Argument) => f( - local, - AssignedValue::Arg, - Location { block: START_BLOCK, statement_index: 0 }, - ), - Set1::One(DefLocation::Assignment(loc)) => { - let bb = &mut basic_blocks[loc.block]; - // `loc` must point to a direct assignment to `local`. - let stmt = &mut bb.statements[loc.statement_index]; - let StatementKind::Assign(box (target, ref mut rvalue)) = stmt.kind else { - bug!() - }; - assert_eq!(target.as_local(), Some(local)); - f(local, AssignedValue::Rvalue(rvalue), loc) - } - Set1::One(DefLocation::CallReturn { call, .. }) => { - let bb = &mut basic_blocks[call]; - let loc = Location { block: call, statement_index: bb.statements.len() }; - f(local, AssignedValue::Terminator, loc) - } - _ => {} - } - } - } - /// Compute the equivalence classes for locals, based on copy statements. /// /// The returned vector maps each local to the one it copies. In the following case: diff --git a/compiler/rustc_mir_transform/src/validate.rs b/compiler/rustc_mir_transform/src/validate.rs index e7930f0a1e3f6..f541a32cd2645 100644 --- a/compiler/rustc_mir_transform/src/validate.rs +++ b/compiler/rustc_mir_transform/src/validate.rs @@ -221,12 +221,11 @@ impl<'a, 'tcx> CfgChecker<'a, 'tcx> { // Check for cycles let mut stack = FxHashSet::default(); - for i in 0..parent.len() { - let mut bb = BasicBlock::from_usize(i); + for (mut bb, parent) in parent.iter_enumerated_mut() { stack.clear(); stack.insert(bb); loop { - let Some(parent) = parent[bb].take() else { break }; + let Some(parent) = parent.take() else { break }; let no_cycle = stack.insert(parent); if !no_cycle { self.fail( @@ -373,9 +372,12 @@ impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> { ); } } - TerminatorKind::Drop { target, unwind, .. } => { + TerminatorKind::Drop { target, unwind, drop, .. } => { self.check_edge(location, *target, EdgeKind::Normal); self.check_unwind_edge(location, *unwind); + if let Some(drop) = drop { + self.check_edge(location, *drop, EdgeKind::Normal); + } } TerminatorKind::Call { func, args, .. } | TerminatorKind::TailCall { func, args, .. } => { @@ -748,7 +750,9 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { // since we may be in the process of computing this MIR in the // first place. let layout = if def_id == self.caller_body.source.def_id() { - self.caller_body.coroutine_layout_raw() + self.caller_body + .coroutine_layout_raw() + .or_else(|| self.tcx.coroutine_layout(def_id, args)) } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id) && let ty::ClosureKind::FnOnce = args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap() @@ -758,7 +762,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { // Same if this is the by-move body of a coroutine-closure. self.caller_body.coroutine_layout_raw() } else { - self.tcx.coroutine_layout(def_id, args.as_coroutine().kind_ty()) + self.tcx.coroutine_layout(def_id, args) }; let Some(layout) = layout else { diff --git a/compiler/rustc_monomorphize/messages.ftl b/compiler/rustc_monomorphize/messages.ftl index aae2d79c16109..6b6653e7de021 100644 --- a/compiler/rustc_monomorphize/messages.ftl +++ b/compiler/rustc_monomorphize/messages.ftl @@ -48,7 +48,7 @@ monomorphize_large_assignments = .note = The current maximum size is {$limit}, but it can be customized with the move_size_limit attribute: `#![move_size_limit = "..."]` monomorphize_no_optimized_mir = - missing optimized MIR for an item in the crate `{$crate_name}` + missing optimized MIR for `{$instance}` in the crate `{$crate_name}` .note = missing optimized MIR for this item (was the crate `{$crate_name}` compiled with `--emit=metadata`?) monomorphize_recursion_limit = diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs index 2a1b20ba48b87..de787852c60e6 100644 --- a/compiler/rustc_monomorphize/src/collector.rs +++ b/compiler/rustc_monomorphize/src/collector.rs @@ -205,6 +205,7 @@ //! this is not implemented however: a mono item will be produced //! regardless of whether it is actually needed or not. +use std::cell::OnceCell; use std::path::PathBuf; use rustc_attr_parsing::InlineAttr; @@ -216,6 +217,7 @@ use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId}; use rustc_hir::lang_items::LangItem; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; +use rustc_middle::middle::eii::EiiMapping; use rustc_middle::mir::interpret::{AllocId, ErrorHandled, GlobalAlloc, Scalar}; use rustc_middle::mir::mono::{CollectionMode, InstantiationMode, MonoItem}; use rustc_middle::mir::visit::Visitor as MirVisitor; @@ -348,6 +350,27 @@ impl<'tcx> Extend>> for MonoItems<'tcx> { } } +fn collect_items_root<'tcx>( + tcx: TyCtxt<'tcx>, + starting_item: Spanned>, + state: &SharedState<'tcx>, + recursion_limit: Limit, +) { + if !state.visited.lock_mut().insert(starting_item.node) { + // We've been here already, no need to search again. + return; + } + let mut recursion_depths = DefIdMap::default(); + collect_items_rec( + tcx, + starting_item, + state, + &mut recursion_depths, + recursion_limit, + CollectionMode::UsedItems, + ); +} + /// Collect all monomorphized items reachable from `starting_point`, and emit a note diagnostic if a /// post-monomorphization error is encountered during a collection step. /// @@ -362,24 +385,6 @@ fn collect_items_rec<'tcx>( recursion_limit: Limit, mode: CollectionMode, ) { - if mode == CollectionMode::UsedItems { - if !state.visited.lock_mut().insert(starting_item.node) { - // We've been here already, no need to search again. - return; - } - } else { - if state.visited.lock().contains(&starting_item.node) { - // We've already done a *full* visit on this one, no need to do the "mention" visit. - return; - } - if !state.mentioned.lock_mut().insert(starting_item.node) { - // We've been here already, no need to search again. - return; - } - // There's some risk that we first do a 'mention' visit and then a full visit. But there's no - // harm in that, the mention visit will trigger all the queries and the results are cached. - } - let mut used_items = MonoItems::new(); let mut mentioned_items = MonoItems::new(); let recursion_depth_reset; @@ -536,6 +541,20 @@ fn collect_items_rec<'tcx>( state.usage_map.lock_mut().record_used(starting_item.node, &used_items); } + { + let mut visited = OnceCell::default(); + if mode == CollectionMode::UsedItems { + used_items + .items + .retain(|k, _| visited.get_mut_or_init(|| state.visited.lock_mut()).insert(*k)); + } + + let mut mentioned = OnceCell::default(); + mentioned_items.items.retain(|k, _| { + !visited.get_or_init(|| state.visited.lock()).contains(k) + && mentioned.get_mut_or_init(|| state.mentioned.lock_mut()).insert(*k) + }); + } if mode == CollectionMode::MentionedItems { assert!(used_items.is_empty(), "'mentioned' collection should never encounter used items"); } else { @@ -688,7 +707,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirUsedCollector<'a, 'tcx> { let target_ty = self.monomorphize(target_ty); let source_ty = self.monomorphize(source_ty); let (source_ty, target_ty) = - find_vtable_types_for_unsizing(self.tcx.at(span), source_ty, target_ty); + find_tails_for_unsizing(self.tcx.at(span), source_ty, target_ty); // This could also be a different Unsize instruction, like // from a fixed sized array to a slice. But we are only // interested in things that produce a vtable. @@ -913,7 +932,7 @@ fn visit_instance_use<'tcx>( // We explicitly skip this otherwise to ensure we get a linker error // if anyone tries to call this intrinsic and the codegen backend did not // override the implementation. - let instance = ty::Instance::new(instance.def_id(), instance.args); + let instance = ty::Instance::new_raw(instance.def_id(), instance.args); if tcx.should_codegen_locally(instance) { output.push(create_fn_mono_item(tcx, instance, source)); } @@ -929,14 +948,16 @@ fn visit_instance_use<'tcx>( ty::InstanceKind::ThreadLocalShim(..) => { bug!("{:?} being reified", instance); } - ty::InstanceKind::DropGlue(_, None) | ty::InstanceKind::AsyncDropGlueCtorShim(_, None) => { + ty::InstanceKind::DropGlue(_, None) => { // Don't need to emit noop drop glue if we are calling directly. if !is_direct_call { output.push(create_fn_mono_item(tcx, instance, source)); } } ty::InstanceKind::DropGlue(_, Some(_)) - | ty::InstanceKind::AsyncDropGlueCtorShim(_, Some(_)) + | ty::InstanceKind::FutureDropPollShim(..) + | ty::InstanceKind::AsyncDropGlue(_, _) + | ty::InstanceKind::AsyncDropGlueCtorShim(_, _) | ty::InstanceKind::VTableShim(..) | ty::InstanceKind::ReifyShim(..) | ty::InstanceKind::ClosureOnceShim { .. } @@ -944,7 +965,8 @@ fn visit_instance_use<'tcx>( | ty::InstanceKind::Item(..) | ty::InstanceKind::FnPtrShim(..) | ty::InstanceKind::CloneShim(..) - | ty::InstanceKind::FnPtrAddrShim(..) => { + | ty::InstanceKind::FnPtrAddrShim(..) + | ty::InstanceKind::EiiShim { .. } => { output.push(create_fn_mono_item(tcx, instance, source)); } } @@ -989,6 +1011,7 @@ fn should_codegen_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> tcx.dcx().emit_fatal(NoOptimizedMir { span: tcx.def_span(def_id), crate_name: tcx.crate_name(def_id.krate), + instance: instance.to_string(), }); } @@ -1036,36 +1059,35 @@ fn should_codegen_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> /// /// Finally, there is also the case of custom unsizing coercions, e.g., for /// smart pointers such as `Rc` and `Arc`. -fn find_vtable_types_for_unsizing<'tcx>( +fn find_tails_for_unsizing<'tcx>( tcx: TyCtxtAt<'tcx>, source_ty: Ty<'tcx>, target_ty: Ty<'tcx>, ) -> (Ty<'tcx>, Ty<'tcx>) { - let ptr_vtable = |inner_source: Ty<'tcx>, inner_target: Ty<'tcx>| { - let typing_env = ty::TypingEnv::fully_monomorphized(); - if tcx.type_has_metadata(inner_source, typing_env) { - (inner_source, inner_target) - } else { - tcx.struct_lockstep_tails_for_codegen(inner_source, inner_target, typing_env) - } - }; + let typing_env = ty::TypingEnv::fully_monomorphized(); + debug_assert!(!source_ty.has_param(), "{source_ty} should be fully monomorphic"); + debug_assert!(!target_ty.has_param(), "{target_ty} should be fully monomorphic"); match (source_ty.kind(), target_ty.kind()) { - (&ty::Ref(_, a, _), &ty::Ref(_, b, _) | &ty::RawPtr(b, _)) - | (&ty::RawPtr(a, _), &ty::RawPtr(b, _)) => ptr_vtable(a, b), + ( + &ty::Ref(_, source_pointee, _), + &ty::Ref(_, target_pointee, _) | &ty::RawPtr(target_pointee, _), + ) + | (&ty::RawPtr(source_pointee, _), &ty::RawPtr(target_pointee, _)) => { + tcx.struct_lockstep_tails_for_codegen(source_pointee, target_pointee, typing_env) + } + + // `Box` could go through the ADT code below, b/c it'll unpeel to `Unique`, + // and eventually bottom out in a raw ref, but we can micro-optimize it here. (_, _) if let Some(source_boxed) = source_ty.boxed_ty() && let Some(target_boxed) = target_ty.boxed_ty() => { - ptr_vtable(source_boxed, target_boxed) + tcx.struct_lockstep_tails_for_codegen(source_boxed, target_boxed, typing_env) } - // T as dyn* Trait - (_, &ty::Dynamic(_, _, ty::DynStar)) => ptr_vtable(source_ty, target_ty), - (&ty::Adt(source_adt_def, source_args), &ty::Adt(target_adt_def, target_args)) => { assert_eq!(source_adt_def, target_adt_def); - let CustomCoerceUnsized::Struct(coerce_index) = match crate::custom_coerce_unsize_info(tcx, source_ty, target_ty) { Ok(ccu) => ccu, @@ -1074,21 +1096,23 @@ fn find_vtable_types_for_unsizing<'tcx>( return (e, e); } }; + let coerce_field = &source_adt_def.non_enum_variant().fields[coerce_index]; + // We're getting a possibly unnormalized type, so normalize it. + let source_field = + tcx.normalize_erasing_regions(typing_env, coerce_field.ty(*tcx, source_args)); + let target_field = + tcx.normalize_erasing_regions(typing_env, coerce_field.ty(*tcx, target_args)); + find_tails_for_unsizing(tcx, source_field, target_field) + } - let source_fields = &source_adt_def.non_enum_variant().fields; - let target_fields = &target_adt_def.non_enum_variant().fields; - - assert!( - coerce_index.index() < source_fields.len() - && source_fields.len() == target_fields.len() - ); + // `T` as `dyn* Trait` unsizes *directly*. + // + // FIXME(dyn_star): This case is a bit awkward, b/c we're not really computing + // a tail here. We probably should handle this separately in the *caller* of + // this function, rather than returning something that is semantically different + // than what we return above. + (_, &ty::Dynamic(_, _, ty::DynStar)) => (source_ty, target_ty), - find_vtable_types_for_unsizing( - tcx, - source_fields[coerce_index].ty(*tcx, source_args), - target_fields[coerce_index].ty(*tcx, target_args), - ) - } _ => bug!( "find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}", source_ty, @@ -1307,7 +1331,7 @@ fn visit_mentioned_item<'tcx>( } MentionedItem::UnsizeCast { source_ty, target_ty } => { let (source_ty, target_ty) = - find_vtable_types_for_unsizing(tcx.at(span), source_ty, target_ty); + find_tails_for_unsizing(tcx.at(span), source_ty, target_ty); // This could also be a different Unsize instruction, like // from a fixed sized array to a slice. But we are only // interested in things that produce a vtable. @@ -1384,6 +1408,7 @@ fn collect_roots(tcx: TyCtxt<'_>, mode: MonoItemCollectionStrategy) -> Vec RootCollector<'_, 'v> { ty::Closure(def_id, args) | ty::Coroutine(def_id, args) | ty::CoroutineClosure(def_id, args) => { - Instance::new(def_id, self.tcx.erase_regions(args)) + Instance::new_raw(def_id, self.tcx.erase_regions(args)) } _ => unreachable!(), }; @@ -1526,11 +1551,12 @@ impl<'v> RootCollector<'_, 'v> { !matches!(self.tcx.codegen_fn_attrs(def_id).inline, InlineAttr::Force { .. }) } MonoItemCollectionStrategy::Lazy => { + let cfa = self.tcx.codegen_fn_attrs(def_id); + self.entry_fn.and_then(|(id, _)| id.as_local()) == Some(def_id) || self.tcx.is_reachable_non_generic(def_id) - || self - .tcx - .codegen_fn_attrs(def_id) + // FIXME(jdonszelmann): EII might remove this: + || cfa .flags .contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) } @@ -1584,6 +1610,28 @@ impl<'v> RootCollector<'_, 'v> { self.output.push(create_fn_mono_item(self.tcx, start_instance, DUMMY_SP)); } + + /// For each externally implementable item, we should generate an alias MonoItem that + /// determines what implementation is called. This could be a default implementation. + fn push_extra_eii_roots(&mut self) { + for (shim_did, &EiiMapping { extern_item, chosen_impl, weak_linkage, .. }) in + self.tcx.get_externally_implementable_item_impls(()) + { + self.output.push(create_fn_mono_item( + self.tcx, + ty::Instance { + def: ty::InstanceKind::EiiShim { + def_id: (*shim_did).into(), + extern_item, + chosen_impl, + weak_linkage, + }, + args: ty::GenericArgs::empty(), + }, + DUMMY_SP, + )); + } + } } #[instrument(level = "debug", skip(tcx, output))] @@ -1685,15 +1733,7 @@ pub(crate) fn collect_crate_mono_items<'tcx>( tcx.sess.time("monomorphization_collector_graph_walk", || { par_for_each_in(roots, |root| { - let mut recursion_depths = DefIdMap::default(); - collect_items_rec( - tcx, - dummy_spanned(root), - &state, - &mut recursion_depths, - recursion_limit, - CollectionMode::UsedItems, - ); + collect_items_root(tcx, dummy_spanned(*root), &state, recursion_limit); }); }); diff --git a/compiler/rustc_monomorphize/src/errors.rs b/compiler/rustc_monomorphize/src/errors.rs index dffa372279f9f..0dd20bbb35f8a 100644 --- a/compiler/rustc_monomorphize/src/errors.rs +++ b/compiler/rustc_monomorphize/src/errors.rs @@ -24,6 +24,7 @@ pub(crate) struct NoOptimizedMir { #[note] pub span: Span, pub crate_name: Symbol, + pub instance: String, } #[derive(LintDiagnostic)] @@ -69,10 +70,11 @@ pub(crate) struct UnknownCguCollectionMode<'a> { pub mode: &'a str, } -#[derive(LintDiagnostic)] +#[derive(Diagnostic)] #[diag(monomorphize_abi_error_disabled_vector_type)] #[help] pub(crate) struct AbiErrorDisabledVectorType<'a> { + #[primary_span] #[label] pub span: Span, pub required_feature: &'a str, @@ -81,9 +83,10 @@ pub(crate) struct AbiErrorDisabledVectorType<'a> { pub is_call: bool, } -#[derive(LintDiagnostic)] +#[derive(Diagnostic)] #[diag(monomorphize_abi_error_unsupported_vector_type)] pub(crate) struct AbiErrorUnsupportedVectorType<'a> { + #[primary_span] #[label] pub span: Span, pub ty: Ty<'a>, diff --git a/compiler/rustc_monomorphize/src/lib.rs b/compiler/rustc_monomorphize/src/lib.rs index 5dbae50c499f9..1b484da698aab 100644 --- a/compiler/rustc_monomorphize/src/lib.rs +++ b/compiler/rustc_monomorphize/src/lib.rs @@ -1,10 +1,10 @@ // tidy-alphabetical-start -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 +#![cfg_attr(bootstrap, feature(let_chains))] #![feature(array_windows)] #![feature(file_buffered)] #![feature(if_let_guard)] #![feature(impl_trait_in_assoc_type)] -#![feature(let_chains)] +#![feature(once_cell_get_mut)] // tidy-alphabetical-end use rustc_hir::lang_items::LangItem; diff --git a/compiler/rustc_monomorphize/src/mono_checks/abi_check.rs b/compiler/rustc_monomorphize/src/mono_checks/abi_check.rs index 0f5bdc8d7683f..cfeaee0777610 100644 --- a/compiler/rustc_monomorphize/src/mono_checks/abi_check.rs +++ b/compiler/rustc_monomorphize/src/mono_checks/abi_check.rs @@ -5,7 +5,7 @@ use rustc_hir::{CRATE_HIR_ID, HirId}; use rustc_middle::mir::{self, Location, traversal}; use rustc_middle::ty::layout::LayoutCx; use rustc_middle::ty::{self, Instance, InstanceKind, Ty, TyCtxt, TypingEnv}; -use rustc_session::lint::builtin::{ABI_UNSUPPORTED_VECTOR_TYPES, WASM_C_ABI}; +use rustc_session::lint::builtin::WASM_C_ABI; use rustc_span::def_id::DefId; use rustc_span::{DUMMY_SP, Span, Symbol, sym}; use rustc_target::callconv::{ArgAbi, Conv, FnAbi, PassMode}; @@ -50,34 +50,24 @@ fn do_check_simd_vector_abi<'tcx>( let feature = match feature_def.iter().find(|(bits, _)| size.bits() <= *bits) { Some((_, feature)) => feature, None => { - let (span, hir_id) = loc(); - tcx.emit_node_span_lint( - ABI_UNSUPPORTED_VECTOR_TYPES, - hir_id, + let (span, _hir_id) = loc(); + tcx.dcx().emit_err(errors::AbiErrorUnsupportedVectorType { span, - errors::AbiErrorUnsupportedVectorType { - span, - ty: arg_abi.layout.ty, - is_call, - }, - ); + ty: arg_abi.layout.ty, + is_call, + }); continue; } }; if !have_feature(Symbol::intern(feature)) { // Emit error. - let (span, hir_id) = loc(); - tcx.emit_node_span_lint( - ABI_UNSUPPORTED_VECTOR_TYPES, - hir_id, + let (span, _hir_id) = loc(); + tcx.dcx().emit_err(errors::AbiErrorDisabledVectorType { span, - errors::AbiErrorDisabledVectorType { - span, - required_feature: feature, - ty: arg_abi.layout.ty, - is_call, - }, - ); + required_feature: feature, + ty: arg_abi.layout.ty, + is_call, + }); } } } @@ -99,6 +89,12 @@ fn wasm_abi_safe<'tcx>(tcx: TyCtxt<'tcx>, arg: &ArgAbi<'tcx, Ty<'tcx>>) -> bool return true; } + // Both the old and the new ABIs treat vector types like `v128` the same + // way. + if uses_vector_registers(&arg.mode, &arg.layout.backend_repr) { + return true; + } + // This matches `unwrap_trivial_aggregate` in the wasm ABI logic. if arg.layout.is_aggregate() { let cx = LayoutCx::new(tcx, TypingEnv::fully_monomorphized()); @@ -111,6 +107,11 @@ fn wasm_abi_safe<'tcx>(tcx: TyCtxt<'tcx>, arg: &ArgAbi<'tcx, Ty<'tcx>>) -> bool } } + // Zero-sized types are dropped in both ABIs, so they're safe + if arg.layout.is_zst() { + return true; + } + false } diff --git a/compiler/rustc_monomorphize/src/mono_checks/move_check.rs b/compiler/rustc_monomorphize/src/mono_checks/move_check.rs index 838bfdab1ea59..7251ef478c6f1 100644 --- a/compiler/rustc_monomorphize/src/mono_checks/move_check.rs +++ b/compiler/rustc_monomorphize/src/mono_checks/move_check.rs @@ -3,7 +3,7 @@ use rustc_data_structures::fx::FxIndexSet; use rustc_hir::def_id::DefId; use rustc_middle::mir::visit::Visitor as MirVisitor; use rustc_middle::mir::{self, Location, traversal}; -use rustc_middle::ty::{self, AssocKind, Instance, Ty, TyCtxt, TypeFoldable}; +use rustc_middle::ty::{self, AssocTag, Instance, Ty, TyCtxt, TypeFoldable}; use rustc_session::Limit; use rustc_session::lint::builtin::LARGE_ASSIGNMENTS; use rustc_span::source_map::Spanned; @@ -148,11 +148,7 @@ impl<'tcx> MoveCheckVisitor<'tcx> { span: Span, ) { let source_info = self.body.source_info(location); - for reported_span in &self.move_size_spans { - if reported_span.overlaps(span) { - return; - } - } + let lint_root = source_info.scope.lint_root(&self.body.source_scopes); let Some(lint_root) = lint_root else { // This happens when the issue is in a function from a foreign crate that @@ -162,22 +158,43 @@ impl<'tcx> MoveCheckVisitor<'tcx> { // but correct span? This would make the lint at least accept crate-level lint attributes. return; }; + + // If the source scope is inlined by the MIR inliner, report the lint on the call site. + let reported_span = self + .body + .source_scopes + .get(source_info.scope) + .and_then(|source_scope_data| source_scope_data.inlined) + .map(|(_, call_site)| call_site) + .unwrap_or(span); + + for previously_reported_span in &self.move_size_spans { + if previously_reported_span.overlaps(reported_span) { + return; + } + } + self.tcx.emit_node_span_lint( LARGE_ASSIGNMENTS, lint_root, - span, - LargeAssignmentsLint { span, size: too_large_size.bytes(), limit: limit as u64 }, + reported_span, + LargeAssignmentsLint { + span: reported_span, + size: too_large_size.bytes(), + limit: limit as u64, + }, ); - self.move_size_spans.push(span); + + self.move_size_spans.push(reported_span); } } fn assoc_fn_of_type<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, fn_ident: Ident) -> Option { for impl_def_id in tcx.inherent_impls(def_id) { - if let Some(new) = tcx.associated_items(impl_def_id).find_by_name_and_kind( + if let Some(new) = tcx.associated_items(impl_def_id).find_by_ident_and_kind( tcx, fn_ident, - AssocKind::Fn, + AssocTag::Fn, def_id, ) { return Some(new.def_id); diff --git a/compiler/rustc_monomorphize/src/partitioning.rs b/compiler/rustc_monomorphize/src/partitioning.rs index b1b6f10e0fe2c..b06f6f73f51af 100644 --- a/compiler/rustc_monomorphize/src/partitioning.rs +++ b/compiler/rustc_monomorphize/src/partitioning.rs @@ -115,7 +115,7 @@ use rustc_middle::mir::mono::{ MonoItemPartitions, Visibility, }; use rustc_middle::ty::print::{characteristic_def_id_of_type, with_no_trimmed_paths}; -use rustc_middle::ty::{self, InstanceKind, TyCtxt}; +use rustc_middle::ty::{self, Instance, InstanceKind, TyCtxt}; use rustc_middle::util::Providers; use rustc_session::CodegenUnits; use rustc_session::config::{DumpMonoStatsFormat, SwitchWithOptPath}; @@ -223,7 +223,7 @@ where match mono_item.instantiation_mode(cx.tcx) { InstantiationMode::GloballyShared { .. } => {} InstantiationMode::LocalCopy => { - if Some(mono_item.def_id()) != cx.tcx.lang_items().start_fn() { + if !cx.tcx.is_lang_item(mono_item.def_id(), LangItem::Start) { continue; } } @@ -254,8 +254,9 @@ where always_export_generics, ); - // We can't differentiate something that got inlined. + // We can't differentiate a function that got inlined. let autodiff_active = cfg!(llvm_enzyme) + && matches!(mono_item, MonoItem::Fn(_)) && cx .tcx .codegen_fn_attrs(mono_item.def_id()) @@ -632,6 +633,14 @@ fn characteristic_def_id_of_mono_item<'tcx>( MonoItem::Fn(instance) => { let def_id = match instance.def { ty::InstanceKind::Item(def) => def, + // EII shims have a characteristic defid. + // But it's not their own, its the one of the extern item it is implementing. + ty::InstanceKind::EiiShim { + def_id: _, + extern_item, + chosen_impl: _, + weak_linkage: _, + } => extern_item, ty::InstanceKind::VTableShim(..) | ty::InstanceKind::ReifyShim(..) | ty::InstanceKind::FnPtrShim(..) @@ -643,6 +652,8 @@ fn characteristic_def_id_of_mono_item<'tcx>( | ty::InstanceKind::CloneShim(..) | ty::InstanceKind::ThreadLocalShim(..) | ty::InstanceKind::FnPtrAddrShim(..) + | ty::InstanceKind::FutureDropPollShim(..) + | ty::InstanceKind::AsyncDropGlue(..) | ty::InstanceKind::AsyncDropGlueCtorShim(..) => return None, }; @@ -749,6 +760,7 @@ fn mono_item_linkage_and_visibility<'tcx>( if let Some(explicit_linkage) = mono_item.explicit_linkage(tcx) { return (explicit_linkage, Visibility::Default); } + let vis = mono_item_visibility( tcx, mono_item, @@ -756,7 +768,18 @@ fn mono_item_linkage_and_visibility<'tcx>( can_export_generics, always_export_generics, ); - (Linkage::External, vis) + + // The check for EII implementations and their defaults is also done in shared and static + // libraries. And shared libraries may later be linked together, both implementing the EII. + // This conflicting implementations may show up. We want to ignore this and just link em + // together anyway. LLVM ensures the last one is the one that's chosen + if let MonoItem::Fn(Instance { def: InstanceKind::EiiShim { weak_linkage, .. }, .. }) = + mono_item + { + if *weak_linkage { (Linkage::WeakAny, vis) } else { (Linkage::External, vis) } + } else { + (Linkage::External, vis) + } } type CguNameCache = UnordMap<(DefId, bool), Symbol>; @@ -795,7 +818,18 @@ fn mono_item_visibility<'tcx>( let def_id = match instance.def { InstanceKind::Item(def_id) | InstanceKind::DropGlue(def_id, Some(_)) - | InstanceKind::AsyncDropGlueCtorShim(def_id, Some(_)) => def_id, + | InstanceKind::FutureDropPollShim(def_id, _, _) + | InstanceKind::AsyncDropGlue(def_id, _) + | InstanceKind::AsyncDropGlueCtorShim(def_id, _) => def_id, + + InstanceKind::EiiShim { .. } => { + *can_be_internalized = false; + // Out of the three visibilities, only Default makes symbols visible outside the current + // DSO. For EIIs this is explicitly the intended visibilty. If another DSO is refering + // to an extern item, the implementation may be generated downstream. That symbol does + // have to be visible to the linker! + return Visibility::Default; + } // We match the visibility of statics here InstanceKind::ThreadLocalShim(def_id) => { @@ -811,7 +845,6 @@ fn mono_item_visibility<'tcx>( | InstanceKind::ClosureOnceShim { .. } | InstanceKind::ConstructCoroutineInClosureShim { .. } | InstanceKind::DropGlue(..) - | InstanceKind::AsyncDropGlueCtorShim(..) | InstanceKind::CloneShim(..) | InstanceKind::FnPtrAddrShim(..) => return Visibility::Hidden, }; @@ -916,6 +949,7 @@ fn mono_item_visibility<'tcx>( // LLVM internalize them as this decision is left up to the linker to // omit them, so prevent them from being internalized. let attrs = tcx.codegen_fn_attrs(def_id); + // FIXME(jdonszelmann): EII might replace this if attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) { *can_be_internalized = false; } diff --git a/compiler/rustc_monomorphize/src/partitioning/autodiff.rs b/compiler/rustc_monomorphize/src/partitioning/autodiff.rs index ebe0b258c1b6a..22d593b80b895 100644 --- a/compiler/rustc_monomorphize/src/partitioning/autodiff.rs +++ b/compiler/rustc_monomorphize/src/partitioning/autodiff.rs @@ -2,7 +2,7 @@ use rustc_ast::expand::autodiff_attrs::{AutoDiffItem, DiffActivity}; use rustc_hir::def_id::LOCAL_CRATE; use rustc_middle::bug; use rustc_middle::mir::mono::MonoItem; -use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; +use rustc_middle::ty::{self, Instance, PseudoCanonicalInput, Ty, TyCtxt, TypingEnv}; use rustc_symbol_mangling::symbol_name_for_instance_in_crate; use tracing::{debug, trace}; @@ -22,23 +22,51 @@ fn adjust_activity_to_abi<'tcx>(tcx: TyCtxt<'tcx>, fn_ty: Ty<'tcx>, da: &mut Vec for (i, ty) in sig.inputs().iter().enumerate() { if let Some(inner_ty) = ty.builtin_deref(true) { if inner_ty.is_slice() { + // Now we need to figure out the size of each slice element in memory to allow + // safety checks and usability improvements in the backend. + let sty = match inner_ty.builtin_index() { + Some(sty) => sty, + None => { + panic!("slice element type unknown"); + } + }; + let pci = PseudoCanonicalInput { + typing_env: TypingEnv::fully_monomorphized(), + value: sty, + }; + + let layout = tcx.layout_of(pci); + let elem_size = match layout { + Ok(layout) => layout.size, + Err(_) => { + bug!("autodiff failed to compute slice element size"); + } + }; + let elem_size: u32 = elem_size.bytes() as u32; + // We know that the length will be passed as extra arg. if !da.is_empty() { // We are looking at a slice. The length of that slice will become an // extra integer on llvm level. Integers are always const. // However, if the slice get's duplicated, we want to know to later check the // size. So we mark the new size argument as FakeActivitySize. + // There is one FakeActivitySize per slice, so for convenience we store the + // slice element size in bytes in it. We will use the size in the backend. let activity = match da[i] { DiffActivity::DualOnly | DiffActivity::Dual + | DiffActivity::Dualv | DiffActivity::DuplicatedOnly - | DiffActivity::Duplicated => DiffActivity::FakeActivitySize, + | DiffActivity::Duplicated => { + DiffActivity::FakeActivitySize(Some(elem_size)) + } DiffActivity::Const => DiffActivity::Const, _ => bug!("unexpected activity for ptr/ref"), }; new_activities.push(activity); new_positions.push(i + 1); } + continue; } } diff --git a/compiler/rustc_next_trait_solver/Cargo.toml b/compiler/rustc_next_trait_solver/Cargo.toml index 63aa60f2f26b9..36d53901d9e82 100644 --- a/compiler/rustc_next_trait_solver/Cargo.toml +++ b/compiler/rustc_next_trait_solver/Cargo.toml @@ -9,7 +9,6 @@ derive-where = "1.2.7" rustc_data_structures = { path = "../rustc_data_structures", optional = true } rustc_index = { path = "../rustc_index", default-features = false } rustc_macros = { path = "../rustc_macros", optional = true } -rustc_serialize = { path = "../rustc_serialize", optional = true } rustc_type_ir = { path = "../rustc_type_ir", default-features = false } rustc_type_ir_macros = { path = "../rustc_type_ir_macros" } tracing = "0.1" @@ -20,7 +19,6 @@ default = ["nightly"] nightly = [ "dep:rustc_data_structures", "dep:rustc_macros", - "dep:rustc_serialize", "rustc_index/nightly", "rustc_type_ir/nightly", ] diff --git a/compiler/rustc_next_trait_solver/src/delegate.rs b/compiler/rustc_next_trait_solver/src/delegate.rs index 259b39e2b9eed..90a7c2e9f7879 100644 --- a/compiler/rustc_next_trait_solver/src/delegate.rs +++ b/compiler/rustc_next_trait_solver/src/delegate.rs @@ -36,13 +36,9 @@ pub trait SolverDelegate: Deref + Sized { fn well_formed_goals( &self, param_env: ::ParamEnv, - arg: ::GenericArg, + term: ::Term, ) -> Option::Predicate>>>; - fn clone_opaque_types_for_query_response( - &self, - ) -> Vec<(ty::OpaqueTypeKey, ::Ty)>; - fn make_deduplicated_outlives_constraints( &self, ) -> Vec::GenericArg>>; @@ -61,16 +57,6 @@ pub trait SolverDelegate: Deref + Sized { span: ::Span, universe_map: impl Fn(ty::UniverseIndex) -> ty::UniverseIndex, ) -> ::GenericArg; - - // FIXME: Can we implement this in terms of `add` and `inject`? - fn insert_hidden_type( - &self, - opaque_type_key: ty::OpaqueTypeKey, - param_env: ::ParamEnv, - hidden_ty: ::Ty, - goals: &mut Vec::Predicate>>, - ) -> Result<(), NoSolution>; - fn add_item_bounds_for_hidden_type( &self, def_id: ::DefId, @@ -80,15 +66,6 @@ pub trait SolverDelegate: Deref + Sized { goals: &mut Vec::Predicate>>, ); - fn inject_new_hidden_type_unchecked( - &self, - key: ty::OpaqueTypeKey, - hidden_ty: ::Ty, - span: ::Span, - ); - - fn reset_opaque_types(&self); - fn fetch_eligible_assoc_item( &self, goal_trait_ref: ty::TraitRef, diff --git a/compiler/rustc_next_trait_solver/src/lib.rs b/compiler/rustc_next_trait_solver/src/lib.rs index f575fe03019ed..92cdc28a37b4b 100644 --- a/compiler/rustc_next_trait_solver/src/lib.rs +++ b/compiler/rustc_next_trait_solver/src/lib.rs @@ -6,7 +6,7 @@ // tidy-alphabetical-start #![allow(rustc::usage_of_type_ir_inherent)] -#![cfg_attr(not(bootstrap), allow(rustc::usage_of_type_ir_traits))] +#![allow(rustc::usage_of_type_ir_traits)] // tidy-alphabetical-end pub mod canonicalizer; diff --git a/compiler/rustc_next_trait_solver/src/solve/alias_relate.rs b/compiler/rustc_next_trait_solver/src/solve/alias_relate.rs index 0fc313e33b323..f7bd460094328 100644 --- a/compiler/rustc_next_trait_solver/src/solve/alias_relate.rs +++ b/compiler/rustc_next_trait_solver/src/solve/alias_relate.rs @@ -16,6 +16,7 @@ //! relate them structurally. use rustc_type_ir::inherent::*; +use rustc_type_ir::solve::GoalSource; use rustc_type_ir::{self as ty, Interner}; use tracing::{instrument, trace}; @@ -49,7 +50,10 @@ where // Structurally normalize the lhs. let lhs = if let Some(alias) = lhs.to_alias_term() { let term = self.next_term_infer_of_kind(lhs); - self.add_normalizes_to_goal(goal.with(cx, ty::NormalizesTo { alias, term })); + self.add_goal( + GoalSource::TypeRelating, + goal.with(cx, ty::NormalizesTo { alias, term }), + ); term } else { lhs @@ -58,7 +62,10 @@ where // Structurally normalize the rhs. let rhs = if let Some(alias) = rhs.to_alias_term() { let term = self.next_term_infer_of_kind(rhs); - self.add_normalizes_to_goal(goal.with(cx, ty::NormalizesTo { alias, term })); + self.add_goal( + GoalSource::TypeRelating, + goal.with(cx, ty::NormalizesTo { alias, term }), + ); term } else { rhs diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs index 384a304c4a9d6..345a272895d39 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs @@ -2,20 +2,24 @@ pub(super) mod structural_traits; +use std::ops::ControlFlow; + use derive_where::derive_where; use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; use rustc_type_ir::{ - self as ty, Interner, TypeFoldable, TypeVisitableExt as _, TypingMode, Upcast as _, elaborate, + self as ty, Interner, TypeFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt as _, + TypeVisitor, TypingMode, Upcast as _, elaborate, }; use tracing::{debug, instrument}; use super::trait_goals::TraitGoalProvenVia; +use super::{has_only_region_constraints, inspect}; use crate::delegate::SolverDelegate; use crate::solve::inspect::ProbeKind; use crate::solve::{ BuiltinImplSource, CandidateSource, CanonicalResponse, Certainty, EvalCtxt, Goal, GoalSource, - MaybeCause, NoSolution, QueryResult, + MaybeCause, NoSolution, ParamEnvSource, QueryResult, }; enum AliasBoundKind { @@ -48,18 +52,6 @@ where fn trait_def_id(self, cx: I) -> I::DefId; - /// Try equating an assumption predicate against a goal's predicate. If it - /// holds, then execute the `then` callback, which should do any additional - /// work, then produce a response (typically by executing - /// [`EvalCtxt::evaluate_added_goals_and_make_canonical_response`]). - fn probe_and_match_goal_against_assumption( - ecx: &mut EvalCtxt<'_, D>, - source: CandidateSource, - goal: Goal, - assumption: I::Clause, - then: impl FnOnce(&mut EvalCtxt<'_, D>) -> QueryResult, - ) -> Result, NoSolution>; - /// Consider a clause, which consists of a "assumption" and some "requirements", /// to satisfy a goal. If the requirements hold, then attempt to satisfy our /// goal by equating it with the assumption. @@ -92,16 +84,20 @@ where let ty::Dynamic(bounds, _, _) = goal.predicate.self_ty().kind() else { panic!("expected object type in `probe_and_consider_object_bound_candidate`"); }; - ecx.add_goals( - GoalSource::ImplWhereBound, - structural_traits::predicates_for_object_candidate( - ecx, - goal.param_env, - goal.predicate.trait_ref(cx), - bounds, - ), - ); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + match structural_traits::predicates_for_object_candidate( + ecx, + goal.param_env, + goal.predicate.trait_ref(cx), + bounds, + ) { + Ok(requirements) => { + ecx.add_goals(GoalSource::ImplWhereBound, requirements); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } + Err(_) => { + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) + } + } }) } @@ -114,6 +110,67 @@ where alias_ty: ty::AliasTy, ) -> Vec>; + fn probe_and_consider_param_env_candidate( + ecx: &mut EvalCtxt<'_, D>, + goal: Goal, + assumption: I::Clause, + ) -> Result, NoSolution> { + Self::fast_reject_assumption(ecx, goal, assumption)?; + + ecx.probe(|candidate: &Result, NoSolution>| match candidate { + Ok(candidate) => inspect::ProbeKind::TraitCandidate { + source: candidate.source, + result: Ok(candidate.result), + }, + Err(NoSolution) => inspect::ProbeKind::TraitCandidate { + source: CandidateSource::ParamEnv(ParamEnvSource::Global), + result: Err(NoSolution), + }, + }) + .enter(|ecx| { + Self::match_assumption(ecx, goal, assumption)?; + let source = ecx.characterize_param_env_assumption(goal.param_env, assumption)?; + Ok(Candidate { + source, + result: ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)?, + }) + }) + } + + /// Try equating an assumption predicate against a goal's predicate. If it + /// holds, then execute the `then` callback, which should do any additional + /// work, then produce a response (typically by executing + /// [`EvalCtxt::evaluate_added_goals_and_make_canonical_response`]). + fn probe_and_match_goal_against_assumption( + ecx: &mut EvalCtxt<'_, D>, + source: CandidateSource, + goal: Goal, + assumption: I::Clause, + then: impl FnOnce(&mut EvalCtxt<'_, D>) -> QueryResult, + ) -> Result, NoSolution> { + Self::fast_reject_assumption(ecx, goal, assumption)?; + + ecx.probe_trait_candidate(source).enter(|ecx| { + Self::match_assumption(ecx, goal, assumption)?; + then(ecx) + }) + } + + /// Try to reject the assumption based off of simple heuristics, such as [`ty::ClauseKind`] + /// and `DefId`. + fn fast_reject_assumption( + ecx: &mut EvalCtxt<'_, D>, + goal: Goal, + assumption: I::Clause, + ) -> Result<(), NoSolution>; + + /// Relate the goal and assumption. + fn match_assumption( + ecx: &mut EvalCtxt<'_, D>, + goal: Goal, + assumption: I::Clause, + ) -> Result<(), NoSolution>; + fn consider_impl_candidate( ecx: &mut EvalCtxt<'_, D>, goal: Goal, @@ -251,11 +308,6 @@ where goal: Goal, ) -> Result, NoSolution>; - fn consider_builtin_async_destruct_candidate( - ecx: &mut EvalCtxt<'_, D>, - goal: Goal, - ) -> Result, NoSolution>; - fn consider_builtin_destruct_candidate( ecx: &mut EvalCtxt<'_, D>, goal: Goal, @@ -284,6 +336,21 @@ where ) -> Vec>; } +/// Allows callers of `assemble_and_evaluate_candidates` to choose whether to limit +/// candidate assembly to param-env and alias-bound candidates. +/// +/// On top of being a micro-optimization, as it avoids doing unnecessary work when +/// a param-env trait bound candidate shadows impls for normalization, this is also +/// required to prevent query cycles due to RPITIT inference. See the issue at: +/// . +pub(super) enum AssembleCandidatesFrom { + All, + /// Only assemble candidates from the environment and alias bounds, ignoring + /// user-written and built-in impls. We only expect `ParamEnv` and `AliasBound` + /// candidates to be assembled. + EnvAndBounds, +} + impl EvalCtxt<'_, D> where D: SolverDelegate, @@ -292,6 +359,7 @@ where pub(super) fn assemble_and_evaluate_candidates>( &mut self, goal: Goal, + assemble_from: AssembleCandidatesFrom, ) -> Vec> { let Ok(normalized_self_ty) = self.structurally_normalize_ty(goal.param_env, goal.predicate.self_ty()) @@ -318,16 +386,18 @@ where } } - self.assemble_impl_candidates(goal, &mut candidates); - - self.assemble_builtin_impl_candidates(goal, &mut candidates); - self.assemble_alias_bound_candidates(goal, &mut candidates); - - self.assemble_object_bound_candidates(goal, &mut candidates); - self.assemble_param_env_candidates(goal, &mut candidates); + match assemble_from { + AssembleCandidatesFrom::All => { + self.assemble_impl_candidates(goal, &mut candidates); + self.assemble_builtin_impl_candidates(goal, &mut candidates); + self.assemble_object_bound_candidates(goal, &mut candidates); + } + AssembleCandidatesFrom::EnvAndBounds => {} + } + candidates } @@ -454,9 +524,6 @@ where Some(TraitSolverLangItem::DiscriminantKind) => { G::consider_builtin_discriminant_kind_candidate(self, goal) } - Some(TraitSolverLangItem::AsyncDestruct) => { - G::consider_builtin_async_destruct_candidate(self, goal) - } Some(TraitSolverLangItem::Destruct) => { G::consider_builtin_destruct_candidate(self, goal) } @@ -485,14 +552,8 @@ where goal: Goal, candidates: &mut Vec>, ) { - for (i, assumption) in goal.param_env.caller_bounds().iter().enumerate() { - candidates.extend(G::probe_and_consider_implied_clause( - self, - CandidateSource::ParamEnv(i), - goal, - assumption, - [], - )); + for assumption in goal.param_env.caller_bounds().iter() { + candidates.extend(G::probe_and_consider_param_env_candidate(self, goal, assumption)); } } @@ -573,7 +634,7 @@ where } ty::Alias(kind @ (ty::Projection | ty::Opaque), alias_ty) => (kind, alias_ty), - ty::Alias(ty::Inherent | ty::Weak, _) => { + ty::Alias(ty::Inherent | ty::Free, _) => { self.cx().delay_bug(format!("could not normalize {self_ty:?}, it is not WF")); return; } @@ -749,7 +810,73 @@ where } }) } +} + +pub(super) enum AllowInferenceConstraints { + Yes, + No, +} + +impl EvalCtxt<'_, D> +where + D: SolverDelegate, + I: Interner, +{ + /// Check whether we can ignore impl candidates due to specialization. + /// + /// This is only necessary for `feature(specialization)` and seems quite ugly. + pub(super) fn filter_specialized_impls( + &mut self, + allow_inference_constraints: AllowInferenceConstraints, + candidates: &mut Vec>, + ) { + match self.typing_mode() { + TypingMode::Coherence => return, + TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } + | TypingMode::PostBorrowckAnalysis { .. } + | TypingMode::PostAnalysis => {} + } + + let mut i = 0; + 'outer: while i < candidates.len() { + let CandidateSource::Impl(victim_def_id) = candidates[i].source else { + i += 1; + continue; + }; + + for (j, c) in candidates.iter().enumerate() { + if i == j { + continue; + } + + let CandidateSource::Impl(other_def_id) = c.source else { + continue; + }; + + // See if we can toss out `victim` based on specialization. + // + // While this requires us to know *for sure* that the `lhs` impl applies + // we still use modulo regions here. This is fine as specialization currently + // assumes that specializing impls have to be always applicable, meaning that + // the only allowed region constraints may be constraints also present on the default impl. + if matches!(allow_inference_constraints, AllowInferenceConstraints::Yes) + || has_only_region_constraints(c.result) + { + if self.cx().impl_specializes(other_def_id, victim_def_id) { + candidates.remove(i); + continue 'outer; + } + } + } + + i += 1; + } + } + /// Assemble and merge candidates for goals which are related to an underlying trait + /// goal. Right now, this is normalizes-to and host effect goals. + /// /// We sadly can't simply take all possible candidates for normalization goals /// and check whether they result in the same constraints. We want to make sure /// that trying to normalize an alias doesn't result in constraints which aren't @@ -778,54 +905,63 @@ where /// /// See trait-system-refactor-initiative#124 for more details. #[instrument(level = "debug", skip(self, inject_normalize_to_rigid_candidate), ret)] - pub(super) fn merge_candidates( + pub(super) fn assemble_and_merge_candidates>( &mut self, proven_via: Option, - candidates: Vec>, + goal: Goal, inject_normalize_to_rigid_candidate: impl FnOnce(&mut EvalCtxt<'_, D>) -> QueryResult, ) -> QueryResult { let Some(proven_via) = proven_via else { // We don't care about overflow. If proving the trait goal overflowed, then // it's enough to report an overflow error for that, we don't also have to // overflow during normalization. - return Ok(self.make_ambiguous_response_no_constraints(MaybeCause::Ambiguity)); + // + // We use `forced_ambiguity` here over `make_ambiguous_response_no_constraints` + // because the former will also record a built-in candidate in the inspector. + return self.forced_ambiguity(MaybeCause::Ambiguity).map(|cand| cand.result); }; match proven_via { - // Even when a trait bound has been proven using a where-bound, we - // still need to consider alias-bounds for normalization, see - // tests/ui/next-solver/alias-bound-shadowed-by-env.rs. - // - // FIXME(const_trait_impl): should this behavior also be used by - // constness checking. Doing so is *at least theoretically* breaking, - // see github.com/rust-lang/rust/issues/133044#issuecomment-2500709754 TraitGoalProvenVia::ParamEnv | TraitGoalProvenVia::AliasBound => { - let mut candidates_from_env_and_bounds: Vec<_> = candidates + // Even when a trait bound has been proven using a where-bound, we + // still need to consider alias-bounds for normalization, see + // `tests/ui/next-solver/alias-bound-shadowed-by-env.rs`. + let candidates_from_env_and_bounds: Vec<_> = self + .assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::EnvAndBounds); + + // We still need to prefer where-bounds over alias-bounds however. + // See `tests/ui/winnowing/norm-where-bound-gt-alias-bound.rs`. + let mut considered_candidates: Vec<_> = if candidates_from_env_and_bounds .iter() - .filter(|c| { - matches!( - c.source, - CandidateSource::AliasBound | CandidateSource::ParamEnv(_) - ) - }) - .map(|c| c.result) - .collect(); + .any(|c| matches!(c.source, CandidateSource::ParamEnv(_))) + { + candidates_from_env_and_bounds + .into_iter() + .filter(|c| matches!(c.source, CandidateSource::ParamEnv(_))) + .map(|c| c.result) + .collect() + } else { + candidates_from_env_and_bounds.into_iter().map(|c| c.result).collect() + }; // If the trait goal has been proven by using the environment, we want to treat // aliases as rigid if there are no applicable projection bounds in the environment. - if candidates_from_env_and_bounds.is_empty() { + if considered_candidates.is_empty() { if let Ok(response) = inject_normalize_to_rigid_candidate(self) { - candidates_from_env_and_bounds.push(response); + considered_candidates.push(response); } } - if let Some(response) = self.try_merge_responses(&candidates_from_env_and_bounds) { + if let Some(response) = self.try_merge_responses(&considered_candidates) { Ok(response) } else { - self.flounder(&candidates_from_env_and_bounds) + self.flounder(&considered_candidates) } } TraitGoalProvenVia::Misc => { + let mut candidates = + self.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::All); + // Prefer "orphaned" param-env normalization predicates, which are used // (for example, and ideally only) when proving item bounds for an impl. let candidates_from_env: Vec<_> = candidates @@ -837,6 +973,13 @@ where return Ok(response); } + // We drop specialized impls to allow normalization via a final impl here. In case + // the specializing impl has different inference constraints from the specialized + // impl, proving the trait goal is already ambiguous, so we never get here. This + // means we can just ignore inference constraints and don't have to special-case + // constraining the normalized-to `term`. + self.filter_specialized_impls(AllowInferenceConstraints::Yes, &mut candidates); + let responses: Vec<_> = candidates.iter().map(|c| c.result).collect(); if let Some(response) = self.try_merge_responses(&responses) { Ok(response) @@ -846,4 +989,88 @@ where } } } + + /// Compute whether a param-env assumption is global or non-global after normalizing it. + /// + /// This is necessary because, for example, given: + /// + /// ```ignore,rust + /// where + /// T: Trait, + /// i32: From, + /// ``` + /// + /// The `i32: From` bound is non-global before normalization, but is global after. + /// Since the old trait solver normalized param-envs eagerly, we want to emulate this + /// behavior lazily. + fn characterize_param_env_assumption( + &mut self, + param_env: I::ParamEnv, + assumption: I::Clause, + ) -> Result, NoSolution> { + // FIXME: This should be fixed, but it also requires changing the behavior + // in the old solver which is currently relied on. + if assumption.has_bound_vars() { + return Ok(CandidateSource::ParamEnv(ParamEnvSource::NonGlobal)); + } + + match assumption.visit_with(&mut FindParamInClause { ecx: self, param_env }) { + ControlFlow::Break(Err(NoSolution)) => Err(NoSolution), + ControlFlow::Break(Ok(())) => Ok(CandidateSource::ParamEnv(ParamEnvSource::NonGlobal)), + ControlFlow::Continue(()) => Ok(CandidateSource::ParamEnv(ParamEnvSource::Global)), + } + } +} + +struct FindParamInClause<'a, 'b, D: SolverDelegate, I: Interner> { + ecx: &'a mut EvalCtxt<'b, D>, + param_env: I::ParamEnv, +} + +impl TypeVisitor for FindParamInClause<'_, '_, D, I> +where + D: SolverDelegate, + I: Interner, +{ + type Result = ControlFlow>; + + fn visit_binder>(&mut self, t: &ty::Binder) -> Self::Result { + self.ecx.enter_forall(t.clone(), |ecx, v| { + v.visit_with(&mut FindParamInClause { ecx, param_env: self.param_env }) + }) + } + + fn visit_ty(&mut self, ty: I::Ty) -> Self::Result { + let Ok(ty) = self.ecx.structurally_normalize_ty(self.param_env, ty) else { + return ControlFlow::Break(Err(NoSolution)); + }; + + if let ty::Placeholder(_) = ty.kind() { + ControlFlow::Break(Ok(())) + } else { + ty.super_visit_with(self) + } + } + + fn visit_const(&mut self, ct: I::Const) -> Self::Result { + let Ok(ct) = self.ecx.structurally_normalize_const(self.param_env, ct) else { + return ControlFlow::Break(Err(NoSolution)); + }; + + if let ty::ConstKind::Placeholder(_) = ct.kind() { + ControlFlow::Break(Ok(())) + } else { + ct.super_visit_with(self) + } + } + + fn visit_region(&mut self, r: I::Region) -> Self::Result { + match self.ecx.eager_resolve_region(r).kind() { + ty::ReStatic | ty::ReError(_) => ControlFlow::Continue(()), + ty::ReVar(_) | ty::RePlaceholder(_) => ControlFlow::Break(Ok(())), + ty::ReErased | ty::ReEarlyParam(_) | ty::ReLateParam(_) | ty::ReBound(..) => { + unreachable!() + } + } + } } diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs index a5142de2d3905..b16f74cd8e431 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs @@ -5,9 +5,10 @@ use derive_where::derive_where; use rustc_type_ir::data_structures::HashMap; use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; +use rustc_type_ir::solve::inspect::ProbeKind; use rustc_type_ir::{ - self as ty, Interner, Movability, Mutability, TypeFoldable, TypeFolder, TypeSuperFoldable, - Upcast as _, elaborate, + self as ty, FallibleTypeFolder, Interner, Movability, Mutability, TypeFoldable, + TypeSuperFoldable, Upcast as _, elaborate, }; use rustc_type_ir_macros::{TypeFoldable_Generic, TypeVisitable_Generic}; use tracing::instrument; @@ -37,13 +38,17 @@ where | ty::Never | ty::Char => Ok(ty::Binder::dummy(vec![])), + // This branch is only for `experimental_default_bounds`. + // Other foreign types were rejected earlier in + // `disqualify_auto_trait_candidate_due_to_possible_impl`. + ty::Foreign(..) => Ok(ty::Binder::dummy(vec![])), + // Treat `str` like it's defined as `struct str([u8]);` ty::Str => Ok(ty::Binder::dummy(vec![Ty::new_slice(cx, Ty::new_u8(cx))])), ty::Dynamic(..) | ty::Param(..) - | ty::Foreign(..) - | ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ..) + | ty::Alias(ty::Projection | ty::Inherent | ty::Free, ..) | ty::Placeholder(..) | ty::Bound(..) | ty::Infer(_) => { @@ -719,6 +724,9 @@ pub(in crate::solve) fn const_conditions_for_destruct( let destruct_def_id = cx.require_lang_item(TraitSolverLangItem::Destruct); match self_ty.kind() { + // `ManuallyDrop` is trivially `~const Destruct` as we do not run any drop glue on it. + ty::Adt(adt_def, _) if adt_def.is_manually_drop() => Ok(vec![]), + // An ADT is `~const Destruct` only if all of the fields are, // *and* if there is a `Drop` impl, that `Drop` impl is also `~const`. ty::Adt(adt_def, args) => { @@ -818,22 +826,16 @@ pub(in crate::solve) fn const_conditions_for_destruct( /// impl Baz for dyn Foo {} /// ``` /// -/// However, in order to make such impls well-formed, we need to do an +/// However, in order to make such impls non-cyclical, we need to do an /// additional step of eagerly folding the associated types in the where /// clauses of the impl. In this example, that means replacing /// `::Bar` with `Ty` in the first impl. -/// -// FIXME: This is only necessary as `::Assoc: ItemBound` -// bounds in impls are trivially proven using the item bound candidates. -// This is unsound in general and once that is fixed, we don't need to -// normalize eagerly here. See https://github.com/lcnr/solver-woes/issues/9 -// for more details. pub(in crate::solve) fn predicates_for_object_candidate( - ecx: &EvalCtxt<'_, D>, + ecx: &mut EvalCtxt<'_, D>, param_env: I::ParamEnv, trait_ref: ty::TraitRef, object_bounds: I::BoundExistentialPredicates, -) -> Vec> +) -> Result>, Ambiguous> where D: SolverDelegate, I: Interner, @@ -867,72 +869,130 @@ where .extend(cx.item_bounds(associated_type_def_id).iter_instantiated(cx, trait_ref.args)); } - let mut replace_projection_with = HashMap::default(); + let mut replace_projection_with: HashMap<_, Vec<_>> = HashMap::default(); for bound in object_bounds.iter() { if let ty::ExistentialPredicate::Projection(proj) = bound.skip_binder() { + // FIXME: We *probably* should replace this with a dummy placeholder, + // b/c don't want to replace literal instances of this dyn type that + // show up in the bounds, but just ones that come from substituting + // `Self` with the dyn type. let proj = proj.with_self_ty(cx, trait_ref.self_ty()); - let old_ty = replace_projection_with.insert(proj.def_id(), bound.rebind(proj)); - assert_eq!( - old_ty, - None, - "{:?} has two generic parameters: {:?} and {:?}", - proj.projection_term, - proj.term, - old_ty.unwrap() - ); + replace_projection_with.entry(proj.def_id()).or_default().push(bound.rebind(proj)); } } - let mut folder = - ReplaceProjectionWith { ecx, param_env, mapping: replace_projection_with, nested: vec![] }; - let folded_requirements = requirements.fold_with(&mut folder); + let mut folder = ReplaceProjectionWith { + ecx, + param_env, + self_ty: trait_ref.self_ty(), + mapping: &replace_projection_with, + nested: vec![], + }; - folder + let requirements = requirements.try_fold_with(&mut folder)?; + Ok(folder .nested .into_iter() - .chain(folded_requirements.into_iter().map(|clause| Goal::new(cx, param_env, clause))) - .collect() + .chain(requirements.into_iter().map(|clause| Goal::new(cx, param_env, clause))) + .collect()) } -struct ReplaceProjectionWith<'a, D: SolverDelegate, I: Interner> { - ecx: &'a EvalCtxt<'a, D>, +struct ReplaceProjectionWith<'a, 'b, I: Interner, D: SolverDelegate> { + ecx: &'a mut EvalCtxt<'b, D>, param_env: I::ParamEnv, - mapping: HashMap>>, + self_ty: I::Ty, + mapping: &'a HashMap>>>, nested: Vec>, } -impl, I: Interner> TypeFolder - for ReplaceProjectionWith<'_, D, I> +impl ReplaceProjectionWith<'_, '_, I, D> +where + D: SolverDelegate, + I: Interner, +{ + fn projection_may_match( + &mut self, + source_projection: ty::Binder>, + target_projection: ty::AliasTerm, + ) -> bool { + source_projection.item_def_id() == target_projection.def_id + && self + .ecx + .probe(|_| ProbeKind::ProjectionCompatibility) + .enter(|ecx| -> Result<_, NoSolution> { + let source_projection = ecx.instantiate_binder_with_infer(source_projection); + ecx.eq(self.param_env, source_projection.projection_term, target_projection)?; + ecx.try_evaluate_added_goals() + }) + .is_ok() + } + + /// Try to replace an alias with the term present in the projection bounds of the self type. + /// Returns `Ok` if this alias is not eligible to be replaced, or bail with + /// `Err(Ambiguous)` if it's uncertain which projection bound to replace the term with due + /// to multiple bounds applying. + fn try_eagerly_replace_alias( + &mut self, + alias_term: ty::AliasTerm, + ) -> Result, Ambiguous> { + if alias_term.self_ty() != self.self_ty { + return Ok(None); + } + + let Some(replacements) = self.mapping.get(&alias_term.def_id) else { + return Ok(None); + }; + + // This is quite similar to the `projection_may_match` we use in unsizing, + // but here we want to unify a projection predicate against an alias term + // so we can replace it with the the projection predicate's term. + let mut matching_projections = replacements + .iter() + .filter(|source_projection| self.projection_may_match(**source_projection, alias_term)); + let Some(replacement) = matching_projections.next() else { + // This shouldn't happen. + panic!("could not replace {alias_term:?} with term from from {:?}", self.self_ty); + }; + // FIXME: This *may* have issues with duplicated projections. + if matching_projections.next().is_some() { + // If there's more than one projection that we can unify here, then we + // need to stall until inference constrains things so that there's only + // one choice. + return Err(Ambiguous); + } + + let replacement = self.ecx.instantiate_binder_with_infer(*replacement); + self.nested.extend( + self.ecx + .eq_and_get_goals(self.param_env, alias_term, replacement.projection_term) + .expect("expected to be able to unify goal projection with dyn's projection"), + ); + + Ok(Some(replacement.term)) + } +} + +/// Marker for bailing with ambiguity. +pub(crate) struct Ambiguous; + +impl FallibleTypeFolder for ReplaceProjectionWith<'_, '_, I, D> +where + D: SolverDelegate, + I: Interner, { + type Error = Ambiguous; + fn cx(&self) -> I { self.ecx.cx() } - fn fold_ty(&mut self, ty: I::Ty) -> I::Ty { + fn try_fold_ty(&mut self, ty: I::Ty) -> Result { if let ty::Alias(ty::Projection, alias_ty) = ty.kind() { - if let Some(replacement) = self.mapping.get(&alias_ty.def_id) { - // We may have a case where our object type's projection bound is higher-ranked, - // but the where clauses we instantiated are not. We can solve this by instantiating - // the binder at the usage site. - let proj = self.ecx.instantiate_binder_with_infer(*replacement); - // FIXME: Technically this equate could be fallible... - self.nested.extend( - self.ecx - .eq_and_get_goals( - self.param_env, - alias_ty, - proj.projection_term.expect_ty(self.ecx.cx()), - ) - .expect( - "expected to be able to unify goal projection with dyn's projection", - ), - ); - proj.term.expect_ty() - } else { - ty.super_fold_with(self) + if let Some(term) = self.try_eagerly_replace_alias(alias_ty.into())? { + return Ok(term.expect_ty()); } - } else { - ty.super_fold_with(self) } + + ty.try_super_fold_with(self) } } diff --git a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs index 0b61c368d8e8d..84a83d79cf046 100644 --- a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs @@ -36,39 +36,38 @@ where self.def_id() } - fn probe_and_match_goal_against_assumption( + fn fast_reject_assumption( ecx: &mut EvalCtxt<'_, D>, - source: rustc_type_ir::solve::CandidateSource, goal: Goal, - assumption: ::Clause, - then: impl FnOnce(&mut EvalCtxt<'_, D>) -> QueryResult, - ) -> Result, NoSolution> { + assumption: I::Clause, + ) -> Result<(), NoSolution> { if let Some(host_clause) = assumption.as_host_effect_clause() { if host_clause.def_id() == goal.predicate.def_id() && host_clause.constness().satisfies(goal.predicate.constness) { - if !DeepRejectCtxt::relate_rigid_rigid(ecx.cx()).args_may_unify( + if DeepRejectCtxt::relate_rigid_rigid(ecx.cx()).args_may_unify( goal.predicate.trait_ref.args, host_clause.skip_binder().trait_ref.args, ) { - return Err(NoSolution); + return Ok(()); } - - ecx.probe_trait_candidate(source).enter(|ecx| { - let assumption_trait_pred = ecx.instantiate_binder_with_infer(host_clause); - ecx.eq( - goal.param_env, - goal.predicate.trait_ref, - assumption_trait_pred.trait_ref, - )?; - then(ecx) - }) - } else { - Err(NoSolution) } - } else { - Err(NoSolution) } + + Err(NoSolution) + } + + fn match_assumption( + ecx: &mut EvalCtxt<'_, D>, + goal: Goal, + assumption: I::Clause, + ) -> Result<(), NoSolution> { + let host_clause = assumption.as_host_effect_clause().unwrap(); + + let assumption_trait_pred = ecx.instantiate_binder_with_infer(host_clause); + ecx.eq(goal.param_env, goal.predicate.trait_ref, assumption_trait_pred.trait_ref)?; + + Ok(()) } /// Register additional assumptions for aliases corresponding to `~const` item bounds. @@ -124,7 +123,7 @@ where fn consider_impl_candidate( ecx: &mut EvalCtxt<'_, D>, goal: Goal, - impl_def_id: ::DefId, + impl_def_id: I::DefId, ) -> Result, NoSolution> { let cx = ecx.cx(); @@ -178,7 +177,7 @@ where fn consider_error_guaranteed_candidate( ecx: &mut EvalCtxt<'_, D>, - _guar: ::ErrorGuaranteed, + _guar: I::ErrorGuaranteed, ) -> Result, NoSolution> { ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) @@ -336,13 +335,6 @@ where unreachable!("DiscriminantKind is not const") } - fn consider_builtin_async_destruct_candidate( - _ecx: &mut EvalCtxt<'_, D>, - _goal: Goal, - ) -> Result, NoSolution> { - unreachable!("AsyncDestruct is not const") - } - fn consider_builtin_destruct_candidate( ecx: &mut EvalCtxt<'_, D>, goal: Goal, @@ -399,12 +391,11 @@ where &mut self, goal: Goal>, ) -> QueryResult { - let candidates = self.assemble_and_evaluate_candidates(goal); let (_, proven_via) = self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| { let trait_goal: Goal> = goal.with(ecx.cx(), goal.predicate.trait_ref); ecx.compute_trait_goal(trait_goal) })?; - self.merge_candidates(proven_via, candidates, |_ecx| Err(NoSolution)) + self.assemble_and_merge_candidates(proven_via, goal, |_ecx| Err(NoSolution)) } } diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs index ac6b521f665b6..36f68808a2c8c 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs @@ -22,7 +22,7 @@ use tracing::{debug, instrument, trace}; use crate::canonicalizer::Canonicalizer; use crate::delegate::SolverDelegate; use crate::resolve::EagerResolver; -use crate::solve::eval_ctxt::{CurrentGoalKind, NestedGoals}; +use crate::solve::eval_ctxt::CurrentGoalKind; use crate::solve::{ CanonicalInput, CanonicalResponse, Certainty, EvalCtxt, ExternalConstraintsData, Goal, MaybeCause, NestedNormalizationGoals, NoSolution, PredefinedOpaquesData, QueryInput, @@ -56,7 +56,10 @@ where &self, goal: Goal, ) -> (Vec, CanonicalInput) { - let opaque_types = self.delegate.clone_opaque_types_for_query_response(); + // We only care about one entry per `OpaqueTypeKey` here, + // so we only canonicalize the lookup table and ignore + // duplicate entries. + let opaque_types = self.delegate.clone_opaque_types_lookup_table(); let (goal, opaque_types) = (goal, opaque_types).fold_with(&mut EagerResolver::new(self.delegate)); @@ -81,12 +84,19 @@ where /// the values inferred while solving the instantiated goal. /// - `external_constraints`: additional constraints which aren't expressible /// using simple unification of inference variables. + /// + /// This takes the `shallow_certainty` which represents whether we're confident + /// that the final result of the current goal only depends on the nested goals. + /// + /// In case this is `Certainy::Maybe`, there may still be additional nested goals + /// or inference constraints required for this candidate to be hold. The candidate + /// always requires all already added constraints and nested goals. #[instrument(level = "trace", skip(self), ret)] pub(in crate::solve) fn evaluate_added_goals_and_make_canonical_response( &mut self, - certainty: Certainty, + shallow_certainty: Certainty, ) -> QueryResult { - self.inspect.make_canonical_response(certainty); + self.inspect.make_canonical_response(shallow_certainty); let goals_certainty = self.try_evaluate_added_goals()?; assert_eq!( @@ -103,32 +113,33 @@ where NoSolution })?; - // When normalizing, we've replaced the expected term with an unconstrained - // inference variable. This means that we dropped information which could - // have been important. We handle this by instead returning the nested goals - // to the caller, where they are then handled. - // - // As we return all ambiguous nested goals, we can ignore the certainty returned - // by `try_evaluate_added_goals()`. - let (certainty, normalization_nested_goals) = match self.current_goal_kind { - CurrentGoalKind::NormalizesTo => { - let NestedGoals { normalizes_to_goals, goals } = - std::mem::take(&mut self.nested_goals); - if cfg!(debug_assertions) { - assert!(normalizes_to_goals.is_empty()); + let (certainty, normalization_nested_goals) = + match (self.current_goal_kind, shallow_certainty) { + // When normalizing, we've replaced the expected term with an unconstrained + // inference variable. This means that we dropped information which could + // have been important. We handle this by instead returning the nested goals + // to the caller, where they are then handled. We only do so if we do not + // need to recompute the `NormalizesTo` goal afterwards to avoid repeatedly + // uplifting its nested goals. This is the case if the `shallow_certainty` is + // `Certainty::Yes`. + (CurrentGoalKind::NormalizesTo, Certainty::Yes) => { + let goals = std::mem::take(&mut self.nested_goals); + // As we return all ambiguous nested goals, we can ignore the certainty + // returned by `self.try_evaluate_added_goals()`. if goals.is_empty() { assert!(matches!(goals_certainty, Certainty::Yes)); } + (Certainty::Yes, NestedNormalizationGoals(goals)) } - (certainty, NestedNormalizationGoals(goals)) - } - CurrentGoalKind::Misc | CurrentGoalKind::CoinductiveTrait => { - let certainty = certainty.unify_with(goals_certainty); - (certainty, NestedNormalizationGoals::empty()) - } - }; + _ => { + let certainty = shallow_certainty.and(goals_certainty); + (certainty, NestedNormalizationGoals::empty()) + } + }; - if let Certainty::Maybe(cause @ MaybeCause::Overflow { .. }) = certainty { + if let Certainty::Maybe(cause @ MaybeCause::Overflow { keep_constraints: false, .. }) = + certainty + { // If we have overflow, it's probable that we're substituting a type // into itself infinitely and any partial substitutions in the query // response are probably not useful anyways, so just return an empty @@ -184,6 +195,7 @@ where debug!(?num_non_region_vars, "too many inference variables -> overflow"); return Ok(self.make_ambiguous_response_no_constraints(MaybeCause::Overflow { suggest_increasing_limit: true, + keep_constraints: false, })); } } @@ -235,19 +247,15 @@ where Default::default() }; - ExternalConstraintsData { - region_constraints, - opaque_types: self - .delegate - .clone_opaque_types_for_query_response() - .into_iter() - // Only return *newly defined* opaque types. - .filter(|(a, _)| { - self.predefined_opaques_in_body.opaque_types.iter().all(|(pa, _)| pa != a) - }) - .collect(), - normalization_nested_goals, - } + // We only return *newly defined* opaque types from canonical queries. + // + // Constraints for any existing opaque types are already tracked by changes + // to the `var_values`. + let opaque_types = self + .delegate + .clone_opaque_types_added_since(self.initial_opaque_types_storage_num_entries); + + ExternalConstraintsData { region_constraints, opaque_types, normalization_nested_goals } } /// After calling a canonical query, we apply the constraints returned @@ -355,7 +363,7 @@ where // exist at all (see the FIXME at the start of this method), we have to deal with // them for now. delegate.instantiate_canonical_var_with_infer(info, span, |idx| { - ty::UniverseIndex::from(prev_universe.index() + idx.index()) + prev_universe + idx.index() }) } else if info.is_existential() { // As an optimization we sometimes avoid creating a new inference variable here. @@ -425,7 +433,17 @@ where fn register_new_opaque_types(&mut self, opaque_types: &[(ty::OpaqueTypeKey, I::Ty)]) { for &(key, ty) in opaque_types { - self.delegate.inject_new_hidden_type_unchecked(key, ty, self.origin_span); + let prev = self.delegate.register_hidden_type_in_storage(key, ty, self.origin_span); + // We eagerly resolve inference variables when computing the query response. + // This can cause previously distinct opaque type keys to now be structurally equal. + // + // To handle this, we store any duplicate entries in a separate list to check them + // at the end of typeck/borrowck. We could alternatively eagerly equate the hidden + // types here. However, doing so is difficult as it may result in nested goals and + // any errors may make it harder to track the control flow for diagnostics. + if let Some(prev) = prev { + self.delegate.add_duplicate_opaque_type(key, prev, self.origin_span); + } } } } diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs index 7ef36d0e9ae37..fc5dad9a3edf5 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs @@ -1,8 +1,8 @@ +use std::mem; use std::ops::ControlFlow; -use derive_where::derive_where; #[cfg(feature = "nightly")] -use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_NoContext}; +use rustc_macros::HashStable_NoContext; use rustc_type_ir::data_structures::{HashMap, HashSet, ensure_sufficient_stack}; use rustc_type_ir::fast_reject::DeepRejectCtxt; use rustc_type_ir::inherent::*; @@ -14,17 +14,16 @@ use rustc_type_ir::{ TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, TypingMode, }; -use rustc_type_ir_macros::{Lift_Generic, TypeFoldable_Generic, TypeVisitable_Generic}; -use tracing::{instrument, trace}; +use tracing::{debug, instrument, trace}; +use super::has_only_region_constraints; use crate::coherence; use crate::delegate::SolverDelegate; use crate::solve::inspect::{self, ProofTreeBuilder}; use crate::solve::search_graph::SearchGraph; use crate::solve::{ CanonicalInput, Certainty, FIXPOINT_STEP_LIMIT, Goal, GoalEvaluationKind, GoalSource, - HasChanged, NestedNormalizationGoals, NoSolution, PredefinedOpaquesData, QueryInput, - QueryResult, + HasChanged, NestedNormalizationGoals, NoSolution, QueryInput, QueryResult, }; pub(super) mod canonical; @@ -99,8 +98,6 @@ where current_goal_kind: CurrentGoalKind, pub(super) var_values: CanonicalVarValues, - predefined_opaques_in_body: I::PredefinedOpaques, - /// The highest universe index nameable by the caller. /// /// When we enter a new binder inside of the query we create new universes @@ -111,10 +108,14 @@ where /// if we have a coinductive cycle and because that's the only way we can return /// new placeholders to the caller. pub(super) max_input_universe: ty::UniverseIndex, + /// The opaque types from the canonical input. We only need to return opaque types + /// which have been added to the storage while evaluating this goal. + pub(super) initial_opaque_types_storage_num_entries: + ::OpaqueTypeStorageEntries, pub(super) search_graph: &'a mut SearchGraph, - nested_goals: NestedGoals, + nested_goals: Vec<(GoalSource, Goal)>, pub(super) origin_span: I::Span, @@ -129,38 +130,6 @@ where pub(super) inspect: ProofTreeBuilder, } -#[derive_where(Clone, Debug, Default; I: Interner)] -#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)] -#[cfg_attr( - feature = "nightly", - derive(Decodable_NoContext, Encodable_NoContext, HashStable_NoContext) -)] -struct NestedGoals { - /// These normalizes-to goals are treated specially during the evaluation - /// loop. In each iteration we take the RHS of the projection, replace it with - /// a fresh inference variable, and only after evaluating that goal do we - /// equate the fresh inference variable with the actual RHS of the predicate. - /// - /// This is both to improve caching, and to avoid using the RHS of the - /// projection predicate to influence the normalizes-to candidate we select. - /// - /// Forgetting to replace the RHS with a fresh inference variable when we evaluate - /// this goal results in an ICE.. - pub normalizes_to_goals: Vec>>, - /// The rest of the goals which have not yet processed or remain ambiguous. - pub goals: Vec<(GoalSource, Goal)>, -} - -impl NestedGoals { - fn new() -> Self { - Self { normalizes_to_goals: Vec::new(), goals: Vec::new() } - } - - fn is_empty(&self) -> bool { - self.normalizes_to_goals.is_empty() && self.goals.is_empty() - } -} - #[derive(PartialEq, Eq, Debug, Hash, Clone, Copy)] #[cfg_attr(feature = "nightly", derive(HashStable_NoContext))] pub enum GenerateProofTree { @@ -286,18 +255,23 @@ where // fixing it may cause inference breakage or introduce ambiguity. GoalSource::Misc => PathKind::Unknown, GoalSource::NormalizeGoal(path_kind) => path_kind, - GoalSource::ImplWhereBound => { + GoalSource::ImplWhereBound => match self.current_goal_kind { // We currently only consider a cycle coinductive if it steps // into a where-clause of a coinductive trait. + CurrentGoalKind::CoinductiveTrait => PathKind::Coinductive, + // While normalizing via an impl does step into a where-clause of + // an impl, accessing the associated item immediately steps out of + // it again. This means cycles/recursive calls are not guarded + // by impls used for normalization. // + // See tests/ui/traits/next-solver/cycles/normalizes-to-is-not-productive.rs + // for how this can go wrong. + CurrentGoalKind::NormalizesTo => PathKind::Inductive, // We probably want to make all traits coinductive in the future, - // so we treat cycles involving their where-clauses as ambiguous. - if let CurrentGoalKind::CoinductiveTrait = self.current_goal_kind { - PathKind::Coinductive - } else { - PathKind::Unknown - } - } + // so we treat cycles involving where-clauses of not-yet coinductive + // traits as ambiguous for now. + CurrentGoalKind::Misc => PathKind::Unknown, + }, // Relating types is always unproductive. If we were to map proof trees to // corecursive functions as explained in #136824, relating types never // introduces a constructor which could cause the recursion to be guarded. @@ -327,15 +301,13 @@ where let mut ecx = EvalCtxt { delegate, search_graph: &mut search_graph, - nested_goals: NestedGoals::new(), + nested_goals: Default::default(), inspect: ProofTreeBuilder::new_maybe_root(generate_proof_tree), // Only relevant when canonicalizing the response, // which we don't do within this evaluation context. - predefined_opaques_in_body: delegate - .cx() - .mk_predefined_opaques_in_body(PredefinedOpaquesData::default()), max_input_universe: ty::UniverseIndex::ROOT, + initial_opaque_types_storage_num_entries: Default::default(), variables: Default::default(), var_values: CanonicalVarValues::dummy(), current_goal_kind: CurrentGoalKind::Misc, @@ -369,31 +341,41 @@ where canonical_goal_evaluation: &mut ProofTreeBuilder, f: impl FnOnce(&mut EvalCtxt<'_, D>, Goal) -> R, ) -> R { - let (ref delegate, input, var_values) = - SolverDelegate::build_with_canonical(cx, &canonical_input); + let (ref delegate, input, var_values) = D::build_with_canonical(cx, &canonical_input); + for &(key, ty) in &input.predefined_opaques_in_body.opaque_types { + let prev = delegate.register_hidden_type_in_storage(key, ty, I::Span::dummy()); + // It may be possible that two entries in the opaque type storage end up + // with the same key after resolving contained inference variables. + // + // We could put them in the duplicate list but don't have to. The opaques we + // encounter here are already tracked in the caller, so there's no need to + // also store them here. We'd take them out when computing the query response + // and then discard them, as they're already present in the input. + // + // Ideally we'd drop duplicate opaque type definitions when computing + // the canonical input. This is more annoying to implement and may cause a + // perf regression, so we do it inside of the query for now. + if let Some(prev) = prev { + debug!(?key, ?ty, ?prev, "ignore duplicate in `opaque_types_storage`"); + } + } + + let initial_opaque_types_storage_num_entries = delegate.opaque_types_storage_num_entries(); let mut ecx = EvalCtxt { delegate, variables: canonical_input.canonical.variables, var_values, current_goal_kind: CurrentGoalKind::from_query_input(cx, input), - predefined_opaques_in_body: input.predefined_opaques_in_body, max_input_universe: canonical_input.canonical.max_universe, + initial_opaque_types_storage_num_entries, search_graph, - nested_goals: NestedGoals::new(), + nested_goals: Default::default(), origin_span: I::Span::dummy(), tainted: Ok(()), inspect: canonical_goal_evaluation.new_goal_evaluation_step(var_values), }; - for &(key, ty) in &input.predefined_opaques_in_body.opaque_types { - ecx.delegate.inject_new_hidden_type_unchecked(key, ty, ecx.origin_span); - } - - if !ecx.nested_goals.is_empty() { - panic!("prepopulating opaque types shouldn't add goals: {:?}", ecx.nested_goals); - } - let result = f(&mut ecx, input.goal); ecx.inspect.probe_final_state(ecx.delegate, ecx.max_input_universe); canonical_goal_evaluation.goal_evaluation_step(ecx.inspect); @@ -503,13 +485,8 @@ where Ok(response) => response, }; - let has_changed = if !response.value.var_values.is_identity_modulo_regions() - || !response.value.external_constraints.opaque_types.is_empty() - { - HasChanged::Yes - } else { - HasChanged::No - }; + let has_changed = + if !has_only_region_constraints(response) { HasChanged::Yes } else { HasChanged::No }; let (normalization_nested_goals, certainty) = self.instantiate_and_apply_query_response(goal.param_env, orig_values, response); @@ -560,8 +537,8 @@ where ty::PredicateKind::DynCompatible(trait_def_id) => { self.compute_dyn_compatible_goal(trait_def_id) } - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { - self.compute_well_formed_goal(Goal { param_env, predicate: arg }) + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(term)) => { + self.compute_well_formed_goal(Goal { param_env, predicate: term }) } ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(ct)) => { self.compute_const_evaluatable_goal(Goal { param_env, predicate: ct }) @@ -623,78 +600,83 @@ where /// Goals for the next step get directly added to the nested goals of the `EvalCtxt`. fn evaluate_added_goals_step(&mut self) -> Result, NoSolution> { let cx = self.cx(); - let mut goals = core::mem::take(&mut self.nested_goals); - // If this loop did not result in any progress, what's our final certainty. let mut unchanged_certainty = Some(Certainty::Yes); - for goal in goals.normalizes_to_goals { - // Replace the goal with an unconstrained infer var, so the - // RHS does not affect projection candidate assembly. - let unconstrained_rhs = self.next_term_infer_of_kind(goal.predicate.term); - let unconstrained_goal = goal.with( - cx, - ty::NormalizesTo { alias: goal.predicate.alias, term: unconstrained_rhs }, - ); - - let (NestedNormalizationGoals(nested_goals), _, certainty) = self.evaluate_goal_raw( - GoalEvaluationKind::Nested, - GoalSource::TypeRelating, - unconstrained_goal, - )?; - // Add the nested goals from normalization to our own nested goals. - trace!(?nested_goals); - goals.goals.extend(nested_goals); - - // Finally, equate the goal's RHS with the unconstrained var. - // - // SUBTLE: - // We structurally relate aliases here. This is necessary - // as we otherwise emit a nested `AliasRelate` goal in case the - // returned term is a rigid alias, resulting in overflow. + for (source, goal) in mem::take(&mut self.nested_goals) { + // We treat normalizes-to goals specially here. In each iteration we take the + // RHS of the projection, replace it with a fresh inference variable, and only + // after evaluating that goal do we equate the fresh inference variable with the + // actual RHS of the predicate. // - // It is correct as both `goal.predicate.term` and `unconstrained_rhs` - // start out as an unconstrained inference variable so any aliases get - // fully normalized when instantiating it. + // This is both to improve caching, and to avoid using the RHS of the + // projection predicate to influence the normalizes-to candidate we select. // - // FIXME: Strictly speaking this may be incomplete if the normalized-to - // type contains an ambiguous alias referencing bound regions. We should - // consider changing this to only use "shallow structural equality". - self.eq_structurally_relating_aliases( - goal.param_env, - goal.predicate.term, - unconstrained_rhs, - )?; - - // We only look at the `projection_ty` part here rather than - // looking at the "has changed" return from evaluate_goal, - // because we expect the `unconstrained_rhs` part of the predicate - // to have changed -- that means we actually normalized successfully! - let with_resolved_vars = self.resolve_vars_if_possible(goal); - if goal.predicate.alias != with_resolved_vars.predicate.alias { - unchanged_certainty = None; - } - - match certainty { - Certainty::Yes => {} - Certainty::Maybe(_) => { - self.nested_goals.normalizes_to_goals.push(with_resolved_vars); - unchanged_certainty = unchanged_certainty.map(|c| c.unify_with(certainty)); + // Forgetting to replace the RHS with a fresh inference variable when we evaluate + // this goal results in an ICE. + if let Some(pred) = goal.predicate.as_normalizes_to() { + // We should never encounter higher-ranked normalizes-to goals. + let pred = pred.no_bound_vars().unwrap(); + // Replace the goal with an unconstrained infer var, so the + // RHS does not affect projection candidate assembly. + let unconstrained_rhs = self.next_term_infer_of_kind(pred.term); + let unconstrained_goal = + goal.with(cx, ty::NormalizesTo { alias: pred.alias, term: unconstrained_rhs }); + + let (NestedNormalizationGoals(nested_goals), _, certainty) = + self.evaluate_goal_raw(GoalEvaluationKind::Nested, source, unconstrained_goal)?; + // Add the nested goals from normalization to our own nested goals. + trace!(?nested_goals); + self.nested_goals.extend(nested_goals); + + // Finally, equate the goal's RHS with the unconstrained var. + // + // SUBTLE: + // We structurally relate aliases here. This is necessary + // as we otherwise emit a nested `AliasRelate` goal in case the + // returned term is a rigid alias, resulting in overflow. + // + // It is correct as both `goal.predicate.term` and `unconstrained_rhs` + // start out as an unconstrained inference variable so any aliases get + // fully normalized when instantiating it. + // + // FIXME: Strictly speaking this may be incomplete if the normalized-to + // type contains an ambiguous alias referencing bound regions. We should + // consider changing this to only use "shallow structural equality". + self.eq_structurally_relating_aliases( + goal.param_env, + pred.term, + unconstrained_rhs, + )?; + + // We only look at the `projection_ty` part here rather than + // looking at the "has changed" return from evaluate_goal, + // because we expect the `unconstrained_rhs` part of the predicate + // to have changed -- that means we actually normalized successfully! + let with_resolved_vars = self.resolve_vars_if_possible(goal); + if pred.alias != goal.predicate.as_normalizes_to().unwrap().skip_binder().alias { + unchanged_certainty = None; } - } - } - for (source, goal) in goals.goals { - let (has_changed, certainty) = - self.evaluate_goal(GoalEvaluationKind::Nested, source, goal)?; - if has_changed == HasChanged::Yes { - unchanged_certainty = None; - } + match certainty { + Certainty::Yes => {} + Certainty::Maybe(_) => { + self.nested_goals.push((source, with_resolved_vars)); + unchanged_certainty = unchanged_certainty.map(|c| c.and(certainty)); + } + } + } else { + let (has_changed, certainty) = + self.evaluate_goal(GoalEvaluationKind::Nested, source, goal)?; + if has_changed == HasChanged::Yes { + unchanged_certainty = None; + } - match certainty { - Certainty::Yes => {} - Certainty::Maybe(_) => { - self.nested_goals.goals.push((source, goal)); - unchanged_certainty = unchanged_certainty.map(|c| c.unify_with(certainty)); + match certainty { + Certainty::Yes => {} + Certainty::Maybe(_) => { + self.nested_goals.push((source, goal)); + unchanged_certainty = unchanged_certainty.map(|c| c.and(certainty)); + } } } } @@ -711,23 +693,12 @@ where self.delegate.cx() } - #[instrument(level = "trace", skip(self))] - pub(super) fn add_normalizes_to_goal(&mut self, mut goal: Goal>) { - goal.predicate = goal.predicate.fold_with(&mut ReplaceAliasWithInfer::new( - self, - GoalSource::TypeRelating, - goal.param_env, - )); - self.inspect.add_normalizes_to_goal(self.delegate, self.max_input_universe, goal); - self.nested_goals.normalizes_to_goals.push(goal); - } - #[instrument(level = "debug", skip(self))] pub(super) fn add_goal(&mut self, source: GoalSource, mut goal: Goal) { goal.predicate = goal.predicate.fold_with(&mut ReplaceAliasWithInfer::new(self, source, goal.param_env)); self.inspect.add_goal(self.delegate, self.max_input_universe, source, goal); - self.nested_goals.goals.push((source, goal)); + self.nested_goals.push((source, goal)); } #[instrument(level = "trace", skip(self, goals))] @@ -1024,6 +995,14 @@ where self.delegate.resolve_vars_if_possible(value) } + pub(super) fn eager_resolve_region(&self, r: I::Region) -> I::Region { + if let ty::ReVar(vid) = r.kind() { + self.delegate.opportunistic_resolve_lt_var(vid) + } else { + r + } + } + pub(super) fn fresh_args_for_item(&mut self, def_id: I::DefId) -> I::GenericArgs { let args = self.delegate.fresh_args_for_item(def_id); for arg in args.iter() { @@ -1045,9 +1024,9 @@ where pub(super) fn well_formed_goals( &self, param_env: I::ParamEnv, - arg: I::GenericArg, + term: I::Term, ) -> Option>> { - self.delegate.well_formed_goals(param_env, arg) + self.delegate.well_formed_goals(param_env, term) } pub(super) fn trait_ref_is_knowable( @@ -1070,16 +1049,12 @@ where self.delegate.fetch_eligible_assoc_item(goal_trait_ref, trait_assoc_def_id, impl_def_id) } - pub(super) fn insert_hidden_type( + pub(super) fn register_hidden_type_in_storage( &mut self, opaque_type_key: ty::OpaqueTypeKey, - param_env: I::ParamEnv, hidden_ty: I::Ty, - ) -> Result<(), NoSolution> { - let mut goals = Vec::new(); - self.delegate.insert_hidden_type(opaque_type_key, param_env, hidden_ty, &mut goals)?; - self.add_goals(GoalSource::Misc, goals); - Ok(()) + ) -> Option { + self.delegate.register_hidden_type_in_storage(opaque_type_key, hidden_ty, self.origin_span) } pub(super) fn add_item_bounds_for_hidden_type( @@ -1106,14 +1081,17 @@ where &mut self, key: ty::OpaqueTypeKey, ) -> Option<(ty::OpaqueTypeKey, I::Ty)> { - let mut matching = - self.delegate.clone_opaque_types_for_query_response().into_iter().filter( - |(candidate_key, _)| { - candidate_key.def_id == key.def_id - && DeepRejectCtxt::relate_rigid_rigid(self.cx()) - .args_may_unify(candidate_key.args, key.args) - }, - ); + // We shouldn't have any duplicate entries when using + // this function during `TypingMode::Analysis`. + let duplicate_entries = self.delegate.clone_duplicate_opaque_types(); + assert!(duplicate_entries.is_empty(), "unexpected duplicates: {duplicate_entries:?}"); + let mut matching = self.delegate.clone_opaque_types_lookup_table().into_iter().filter( + |(candidate_key, _)| { + candidate_key.def_id == key.def_id + && DeepRejectCtxt::relate_rigid_rigid(self.cx()) + .args_may_unify(candidate_key.args, key.args) + }, + ); let first = matching.next(); let second = matching.next(); assert_eq!(second, None); diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs index 0a9e7fafaea62..ed0cedc407746 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs @@ -26,32 +26,33 @@ where I: Interner, { pub(in crate::solve) fn enter(self, f: impl FnOnce(&mut EvalCtxt<'_, D>) -> T) -> T { - let ProbeCtxt { ecx: outer_ecx, probe_kind, _result } = self; + let ProbeCtxt { ecx: outer, probe_kind, _result } = self; - let delegate = outer_ecx.delegate; - let max_input_universe = outer_ecx.max_input_universe; - let mut nested_ecx = EvalCtxt { + let delegate = outer.delegate; + let max_input_universe = outer.max_input_universe; + let mut nested = EvalCtxt { delegate, - variables: outer_ecx.variables, - var_values: outer_ecx.var_values, - current_goal_kind: outer_ecx.current_goal_kind, - predefined_opaques_in_body: outer_ecx.predefined_opaques_in_body, + variables: outer.variables, + var_values: outer.var_values, + current_goal_kind: outer.current_goal_kind, max_input_universe, - search_graph: outer_ecx.search_graph, - nested_goals: outer_ecx.nested_goals.clone(), - origin_span: outer_ecx.origin_span, - tainted: outer_ecx.tainted, - inspect: outer_ecx.inspect.take_and_enter_probe(), + initial_opaque_types_storage_num_entries: outer + .initial_opaque_types_storage_num_entries, + search_graph: outer.search_graph, + nested_goals: outer.nested_goals.clone(), + origin_span: outer.origin_span, + tainted: outer.tainted, + inspect: outer.inspect.take_and_enter_probe(), }; - let r = nested_ecx.delegate.probe(|| { - let r = f(&mut nested_ecx); - nested_ecx.inspect.probe_final_state(delegate, max_input_universe); + let r = nested.delegate.probe(|| { + let r = f(&mut nested); + nested.inspect.probe_final_state(delegate, max_input_universe); r }); - if !nested_ecx.inspect.is_noop() { + if !nested.inspect.is_noop() { let probe_kind = probe_kind(&r); - nested_ecx.inspect.probe_kind(probe_kind); - outer_ecx.inspect = nested_ecx.inspect.finish_probe(); + nested.inspect.probe_kind(probe_kind); + outer.inspect = nested.inspect.finish_probe(); } r } diff --git a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs index 6a8e0790f7cb4..f22b275bc44a2 100644 --- a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs +++ b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs @@ -412,20 +412,6 @@ impl, I: Interner> ProofTreeBuilder { } } - pub(crate) fn add_normalizes_to_goal( - &mut self, - delegate: &D, - max_input_universe: ty::UniverseIndex, - goal: Goal>, - ) { - self.add_goal( - delegate, - max_input_universe, - GoalSource::TypeRelating, - goal.with(delegate.cx(), goal.predicate), - ); - } - pub(crate) fn add_goal( &mut self, delegate: &D, diff --git a/compiler/rustc_next_trait_solver/src/solve/mod.rs b/compiler/rustc_next_trait_solver/src/solve/mod.rs index 199f0c7512e1b..8173146e2fe24 100644 --- a/compiler/rustc_next_trait_solver/src/solve/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/mod.rs @@ -70,6 +70,17 @@ fn has_no_inference_or_external_constraints( && normalization_nested_goals.is_empty() } +fn has_only_region_constraints(response: ty::Canonical>) -> bool { + let ExternalConstraintsData { + region_constraints: _, + ref opaque_types, + ref normalization_nested_goals, + } = *response.value.external_constraints; + response.value.var_values.is_identity_modulo_regions() + && opaque_types.is_empty() + && normalization_nested_goals.is_empty() +} + impl<'a, D, I> EvalCtxt<'a, D> where D: SolverDelegate, @@ -126,7 +137,7 @@ where } #[instrument(level = "trace", skip(self))] - fn compute_well_formed_goal(&mut self, goal: Goal) -> QueryResult { + fn compute_well_formed_goal(&mut self, goal: Goal) -> QueryResult { match self.well_formed_goals(goal.param_env, goal.predicate) { Some(goals) => { self.add_goals(GoalSource::Misc, goals); @@ -242,16 +253,18 @@ where } fn bail_with_ambiguity(&mut self, responses: &[CanonicalResponse]) -> CanonicalResponse { - debug_assert!(!responses.is_empty()); - if let Certainty::Maybe(maybe_cause) = - responses.iter().fold(Certainty::AMBIGUOUS, |certainty, response| { - certainty.unify_with(response.value.certainty) - }) - { - self.make_ambiguous_response_no_constraints(maybe_cause) - } else { - panic!("expected flounder response to be ambiguous") - } + debug_assert!(responses.len() > 1); + let maybe_cause = responses.iter().fold(MaybeCause::Ambiguity, |maybe_cause, response| { + // Pull down the certainty of `Certainty::Yes` to ambiguity when combining + // these responses, b/c we're combining more than one response and this we + // don't know which one applies. + let candidate = match response.value.certainty { + Certainty::Yes => MaybeCause::Ambiguity, + Certainty::Maybe(candidate) => candidate, + }; + maybe_cause.or(candidate) + }); + self.make_ambiguous_response_no_constraints(maybe_cause) } /// If we fail to merge responses we flounder and return overflow or ambiguity. @@ -329,7 +342,8 @@ where TypingMode::Coherence | TypingMode::PostAnalysis => false, // During analysis, opaques are rigid unless they may be defined by // the current body. - TypingMode::Analysis { defining_opaque_types: non_rigid_opaques } + TypingMode::Analysis { defining_opaque_types_and_generators: non_rigid_opaques } + | TypingMode::Borrowck { defining_opaque_types: non_rigid_opaques } | TypingMode::PostBorrowckAnalysis { defined_opaque_types: non_rigid_opaques } => { !def_id.as_local().is_some_and(|def_id| non_rigid_opaques.contains(&def_id)) } diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/free_alias.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/free_alias.rs new file mode 100644 index 0000000000000..8aa6e4a3d7118 --- /dev/null +++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/free_alias.rs @@ -0,0 +1,43 @@ +//! Computes a normalizes-to (projection) goal for inherent associated types, +//! `#![feature(lazy_type_alias)]` and `#![feature(type_alias_impl_trait)]`. +//! +//! Since a free alias is never ambiguous, this just computes the `type_of` of +//! the alias and registers the where-clauses of the type alias. + +use rustc_type_ir::{self as ty, Interner}; + +use crate::delegate::SolverDelegate; +use crate::solve::{Certainty, EvalCtxt, Goal, GoalSource, QueryResult}; + +impl EvalCtxt<'_, D> +where + D: SolverDelegate, + I: Interner, +{ + pub(super) fn normalize_free_alias( + &mut self, + goal: Goal>, + ) -> QueryResult { + let cx = self.cx(); + let free_alias = goal.predicate.alias; + + // Check where clauses + self.add_goals( + GoalSource::Misc, + cx.predicates_of(free_alias.def_id) + .iter_instantiated(cx, free_alias.args) + .map(|pred| goal.with(cx, pred)), + ); + + let actual = if free_alias.kind(cx).is_type() { + cx.type_of(free_alias.def_id).instantiate(cx, free_alias.args) + } else { + // FIXME(mgca): once const items are actual aliases defined as equal to type system consts + // this should instead return that. + panic!("normalizing free const aliases in the type system is unsupported"); + }; + + self.instantiate_normalizes_to_term(goal, actual.into()); + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } +} diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/inherent.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/inherent.rs index 1d1ff09ee4104..2640238f5a904 100644 --- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/inherent.rs +++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/inherent.rs @@ -15,12 +15,12 @@ where D: SolverDelegate, I: Interner, { - pub(super) fn normalize_inherent_associated_type( + pub(super) fn normalize_inherent_associated_term( &mut self, goal: Goal>, ) -> QueryResult { let cx = self.cx(); - let inherent = goal.predicate.alias.expect_ty(cx); + let inherent = goal.predicate.alias; let impl_def_id = cx.parent(inherent.def_id); let impl_args = self.fresh_args_for_item(impl_def_id); @@ -48,8 +48,13 @@ where .map(|pred| goal.with(cx, pred)), ); - let normalized = cx.type_of(inherent.def_id).instantiate(cx, inherent_args); - self.instantiate_normalizes_to_term(goal, normalized.into()); + let normalized = if inherent.kind(cx).is_type() { + cx.type_of(inherent.def_id).instantiate(cx, inherent_args).into() + } else { + // FIXME(mgca): Properly handle IACs in the type system + panic!("normalizing inherent associated consts in the type system is unsupported"); + }; + self.instantiate_normalizes_to_term(goal, normalized); self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } } diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs index de6d21da0f592..b90e34e78101c 100644 --- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs @@ -1,12 +1,12 @@ mod anon_const; +mod free_alias; mod inherent; mod opaque_types; -mod weak_types; use rustc_type_ir::fast_reject::DeepRejectCtxt; use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; -use rustc_type_ir::{self as ty, Interner, NormalizesTo, Upcast as _}; +use rustc_type_ir::{self as ty, Interner, NormalizesTo, PredicateKind, Upcast as _}; use tracing::instrument; use crate::delegate::SolverDelegate; @@ -32,27 +32,29 @@ where let cx = self.cx(); match goal.predicate.alias.kind(cx) { ty::AliasTermKind::ProjectionTy | ty::AliasTermKind::ProjectionConst => { - let candidates = self.assemble_and_evaluate_candidates(goal); let trait_ref = goal.predicate.alias.trait_ref(cx); let (_, proven_via) = self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| { let trait_goal: Goal> = goal.with(cx, trait_ref); ecx.compute_trait_goal(trait_goal) })?; - self.merge_candidates(proven_via, candidates, |ecx| { + self.assemble_and_merge_candidates(proven_via, goal, |ecx| { ecx.probe(|&result| ProbeKind::RigidAlias { result }).enter(|this| { this.structurally_instantiate_normalizes_to_term( goal, goal.predicate.alias, ); - this.add_goal(GoalSource::AliasWellFormed, goal.with(cx, trait_ref)); this.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) }) }) } - ty::AliasTermKind::InherentTy => self.normalize_inherent_associated_type(goal), + ty::AliasTermKind::InherentTy | ty::AliasTermKind::InherentConst => { + self.normalize_inherent_associated_term(goal) + } ty::AliasTermKind::OpaqueTy => self.normalize_opaque_type(goal), - ty::AliasTermKind::WeakTy => self.normalize_weak_type(goal), + ty::AliasTermKind::FreeTy | ty::AliasTermKind::FreeConst => { + self.normalize_free_alias(goal) + } ty::AliasTermKind::UnevaluatedConst => self.normalize_anon_const(goal), } } @@ -104,50 +106,48 @@ where self.trait_def_id(cx) } - fn probe_and_match_goal_against_assumption( + fn fast_reject_assumption( ecx: &mut EvalCtxt<'_, D>, - source: CandidateSource, goal: Goal, assumption: I::Clause, - then: impl FnOnce(&mut EvalCtxt<'_, D>) -> QueryResult, - ) -> Result, NoSolution> { + ) -> Result<(), NoSolution> { if let Some(projection_pred) = assumption.as_projection_clause() { if projection_pred.item_def_id() == goal.predicate.def_id() { - let cx = ecx.cx(); - if !DeepRejectCtxt::relate_rigid_rigid(ecx.cx()).args_may_unify( + if DeepRejectCtxt::relate_rigid_rigid(ecx.cx()).args_may_unify( goal.predicate.alias.args, projection_pred.skip_binder().projection_term.args, ) { - return Err(NoSolution); + return Ok(()); } - ecx.probe_trait_candidate(source).enter(|ecx| { - let assumption_projection_pred = - ecx.instantiate_binder_with_infer(projection_pred); - ecx.eq( - goal.param_env, - goal.predicate.alias, - assumption_projection_pred.projection_term, - )?; - - ecx.instantiate_normalizes_to_term(goal, assumption_projection_pred.term); - - // Add GAT where clauses from the trait's definition - // FIXME: We don't need these, since these are the type's own WF obligations. - ecx.add_goals( - GoalSource::Misc, - cx.own_predicates_of(goal.predicate.def_id()) - .iter_instantiated(cx, goal.predicate.alias.args) - .map(|pred| goal.with(cx, pred)), - ); - - then(ecx) - }) - } else { - Err(NoSolution) } - } else { - Err(NoSolution) } + + Err(NoSolution) + } + + fn match_assumption( + ecx: &mut EvalCtxt<'_, D>, + goal: Goal, + assumption: I::Clause, + ) -> Result<(), NoSolution> { + let projection_pred = assumption.as_projection_clause().unwrap(); + + let assumption_projection_pred = ecx.instantiate_binder_with_infer(projection_pred); + ecx.eq(goal.param_env, goal.predicate.alias, assumption_projection_pred.projection_term)?; + + ecx.instantiate_normalizes_to_term(goal, assumption_projection_pred.term); + + // Add GAT where clauses from the trait's definition + // FIXME: We don't need these, since these are the type's own WF obligations. + let cx = ecx.cx(); + ecx.add_goals( + GoalSource::AliasWellFormed, + cx.own_predicates_of(goal.predicate.def_id()) + .iter_instantiated(cx, goal.predicate.alias.args) + .map(|pred| goal.with(cx, pred)), + ); + + Ok(()) } fn consider_additional_alias_assumptions( @@ -196,10 +196,16 @@ where .map(|pred| goal.with(cx, pred)); ecx.add_goals(GoalSource::ImplWhereBound, where_clause_bounds); + // Bail if the nested goals don't hold here. This is to avoid unnecessarily + // computing the `type_of` query for associated types that never apply, as + // this may result in query cycles in the case of RPITITs. + // See . + ecx.try_evaluate_added_goals()?; + // Add GAT where clauses from the trait's definition. // FIXME: We don't need these, since these are the type's own WF obligations. ecx.add_goals( - GoalSource::Misc, + GoalSource::AliasWellFormed, cx.own_predicates_of(goal.predicate.def_id()) .iter_instantiated(cx, goal.predicate.alias.args) .map(|pred| goal.with(cx, pred)), @@ -215,9 +221,6 @@ where ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) }; - // In case the associated item is hidden due to specialization, we have to - // return ambiguity this would otherwise be incomplete, resulting in - // unsoundness during coherence (#105782). let target_item_def_id = match ecx.fetch_eligible_assoc_item( goal_trait_ref, goal.predicate.def_id(), @@ -225,14 +228,78 @@ where ) { Ok(Some(target_item_def_id)) => target_item_def_id, Ok(None) => { - return ecx - .evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); + match ecx.typing_mode() { + // In case the associated item is hidden due to specialization, + // normalizing this associated item is always ambiguous. Treating + // the associated item as rigid would be incomplete and allow for + // overlapping impls, see #105782. + // + // As this ambiguity is unavoidable we emit a nested ambiguous + // goal instead of using `Certainty::AMBIGUOUS`. This allows us to + // return the nested goals to the parent `AliasRelate` goal. This + // would be relevant if any of the nested goals refer to the `term`. + // This is not the case here and we only prefer adding an ambiguous + // nested goal for consistency. + ty::TypingMode::Coherence => { + ecx.add_goal(GoalSource::Misc, goal.with(cx, PredicateKind::Ambiguous)); + return ecx + .evaluate_added_goals_and_make_canonical_response(Certainty::Yes); + } + // Outside of coherence, we treat the associated item as rigid instead. + ty::TypingMode::Analysis { .. } + | ty::TypingMode::Borrowck { .. } + | ty::TypingMode::PostBorrowckAnalysis { .. } + | ty::TypingMode::PostAnalysis => { + ecx.structurally_instantiate_normalizes_to_term( + goal, + goal.predicate.alias, + ); + return ecx + .evaluate_added_goals_and_make_canonical_response(Certainty::Yes); + } + }; } Err(guar) => return error_response(ecx, guar), }; if !cx.has_item_definition(target_item_def_id) { - return error_response(ecx, cx.delay_bug("missing item")); + // If the impl is missing an item, it's either because the user forgot to + // provide it, or the user is not *obligated* to provide it (because it + // has a trivially false `Sized` predicate). If it's the latter, we cannot + // delay a bug because we can have trivially false where clauses, so we + // treat it as rigid. + if cx.impl_self_is_guaranteed_unsized(impl_def_id) { + match ecx.typing_mode() { + // Trying to normalize such associated items is always ambiguous + // during coherence to avoid cyclic reasoning. See the example in + // tests/ui/traits/trivial-unsized-projection-in-coherence.rs. + // + // As this ambiguity is unavoidable we emit a nested ambiguous + // goal instead of using `Certainty::AMBIGUOUS`. This allows us to + // return the nested goals to the parent `AliasRelate` goal. This + // would be relevant if any of the nested goals refer to the `term`. + // This is not the case here and we only prefer adding an ambiguous + // nested goal for consistency. + ty::TypingMode::Coherence => { + ecx.add_goal(GoalSource::Misc, goal.with(cx, PredicateKind::Ambiguous)); + return ecx + .evaluate_added_goals_and_make_canonical_response(Certainty::Yes); + } + ty::TypingMode::Analysis { .. } + | ty::TypingMode::Borrowck { .. } + | ty::TypingMode::PostBorrowckAnalysis { .. } + | ty::TypingMode::PostAnalysis => { + ecx.structurally_instantiate_normalizes_to_term( + goal, + goal.predicate.alias, + ); + return ecx + .evaluate_added_goals_and_make_canonical_response(Certainty::Yes); + } + } + } else { + return error_response(ecx, cx.delay_bug("missing item")); + } } let target_container_def_id = cx.parent(target_item_def_id); @@ -268,6 +335,8 @@ where cx.type_of(target_item_def_id).map_bound(|ty| ty.into()) } ty::AliasTermKind::ProjectionConst => { + // FIXME(mgca): once const items are actual aliases defined as equal to type system consts + // this should instead return that. if cx.features().associated_const_equality() { panic!("associated const projection is not supported yet") } else { @@ -846,66 +915,6 @@ where }) } - fn consider_builtin_async_destruct_candidate( - ecx: &mut EvalCtxt<'_, D>, - goal: Goal, - ) -> Result, NoSolution> { - let self_ty = goal.predicate.self_ty(); - let async_destructor_ty = match self_ty.kind() { - ty::Bool - | ty::Char - | ty::Int(..) - | ty::Uint(..) - | ty::Float(..) - | ty::Array(..) - | ty::RawPtr(..) - | ty::Ref(..) - | ty::FnDef(..) - | ty::FnPtr(..) - | ty::Closure(..) - | ty::CoroutineClosure(..) - | ty::Infer(ty::IntVar(..) | ty::FloatVar(..)) - | ty::Never - | ty::Adt(_, _) - | ty::Str - | ty::Slice(_) - | ty::Tuple(_) - | ty::Error(_) => self_ty.async_destructor_ty(ecx.cx()), - - ty::UnsafeBinder(_) => { - // FIXME(unsafe_binders): Instantiate the binder with placeholders I guess. - todo!() - } - - // Given an alias, parameter, or placeholder we add an impl candidate normalizing to a rigid - // alias. In case there's a where-bound further constraining this alias it is preferred over - // this impl candidate anyways. It's still a bit scuffed. - ty::Alias(_, _) | ty::Param(_) | ty::Placeholder(..) => { - return ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { - ecx.structurally_instantiate_normalizes_to_term(goal, goal.predicate.alias); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - }); - } - - ty::Infer(ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) - | ty::Foreign(..) - | ty::Bound(..) => panic!( - "unexpected self ty `{:?}` when normalizing `::AsyncDestructor`", - goal.predicate.self_ty() - ), - - ty::Pat(..) | ty::Dynamic(..) | ty::Coroutine(..) | ty::CoroutineWitness(..) => panic!( - "`consider_builtin_async_destruct_candidate` is not yet implemented for type: {self_ty:?}" - ), - }; - - ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { - ecx.eq(goal.param_env, goal.predicate.term, async_destructor_ty.into()) - .expect("expected goal term to be fully unconstrained"); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - }) - } - fn consider_builtin_destruct_candidate( _ecx: &mut EvalCtxt<'_, D>, goal: Goal, diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs index 817dffa127bc1..df3ad1e468bb8 100644 --- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs +++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs @@ -3,6 +3,7 @@ use rustc_index::bit_set::GrowableBitSet; use rustc_type_ir::inherent::*; +use rustc_type_ir::solve::GoalSource; use rustc_type_ir::{self as ty, Interner, TypingMode, fold_regions}; use crate::delegate::SolverDelegate; @@ -31,13 +32,18 @@ where goal.param_env, expected, ); - self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) + // Trying to normalize an opaque type during coherence is always ambiguous. + // We add a nested ambiguous goal here instead of using `Certainty::AMBIGUOUS`. + // This allows us to return the nested goals to the parent `AliasRelate` goal. + // This can then allow nested goals to fail after we've constrained the `term`. + self.add_goal(GoalSource::Misc, goal.with(cx, ty::PredicateKind::Ambiguous)); + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } - TypingMode::Analysis { defining_opaque_types } => { + TypingMode::Analysis { defining_opaque_types_and_generators } => { let Some(def_id) = opaque_ty .def_id .as_local() - .filter(|&def_id| defining_opaque_types.contains(&def_id)) + .filter(|&def_id| defining_opaque_types_and_generators.contains(&def_id)) else { self.structurally_instantiate_normalizes_to_term(goal, goal.predicate.alias); return self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes); @@ -86,8 +92,44 @@ where } // Otherwise, define a new opaque type - // FIXME: should we use `inject_hidden_type_unchecked` here? - self.insert_hidden_type(opaque_type_key, goal.param_env, expected)?; + let prev = self.register_hidden_type_in_storage(opaque_type_key, expected); + assert_eq!(prev, None); + self.add_item_bounds_for_hidden_type( + def_id.into(), + opaque_ty.args, + goal.param_env, + expected, + ); + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } + // Very similar to `TypingMode::Analysis` with some notably differences: + // - we accept opaque types even if they have non-universal arguments + // - we do a structural lookup instead of semantically unifying regions + // - the hidden type starts out as the type from HIR typeck with fresh region + // variables instead of a fully unconstrained inference variable + TypingMode::Borrowck { defining_opaque_types } => { + let Some(def_id) = opaque_ty + .def_id + .as_local() + .filter(|&def_id| defining_opaque_types.contains(&def_id)) + else { + self.structurally_instantiate_normalizes_to_term(goal, goal.predicate.alias); + return self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes); + }; + + let opaque_type_key = ty::OpaqueTypeKey { def_id, args: opaque_ty.args }; + let actual = self + .register_hidden_type_in_storage(opaque_type_key, expected) + .unwrap_or_else(|| { + let actual = + cx.type_of_opaque_hir_typeck(def_id).instantiate(cx, opaque_ty.args); + let actual = fold_regions(cx, actual, |re, _dbi| match re.kind() { + ty::ReErased => self.next_region_var(), + _ => re, + }); + actual + }); + self.eq(goal.param_env, expected, actual)?; self.add_item_bounds_for_hidden_type( def_id.into(), opaque_ty.args, diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/weak_types.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/weak_types.rs deleted file mode 100644 index 14e68dd52b6c1..0000000000000 --- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/weak_types.rs +++ /dev/null @@ -1,37 +0,0 @@ -//! Computes a normalizes-to (projection) goal for inherent associated types, -//! `#![feature(lazy_type_alias)]` and `#![feature(type_alias_impl_trait)]`. -//! -//! Since a weak alias is never ambiguous, this just computes the `type_of` of -//! the alias and registers the where-clauses of the type alias. - -use rustc_type_ir::{self as ty, Interner}; - -use crate::delegate::SolverDelegate; -use crate::solve::{Certainty, EvalCtxt, Goal, GoalSource, QueryResult}; - -impl EvalCtxt<'_, D> -where - D: SolverDelegate, - I: Interner, -{ - pub(super) fn normalize_weak_type( - &mut self, - goal: Goal>, - ) -> QueryResult { - let cx = self.cx(); - let weak_ty = goal.predicate.alias; - - // Check where clauses - self.add_goals( - GoalSource::Misc, - cx.predicates_of(weak_ty.def_id) - .iter_instantiated(cx, weak_ty.args) - .map(|pred| goal.with(cx, pred)), - ); - - let actual = cx.type_of(weak_ty.def_id).instantiate(cx, weak_ty.args); - self.instantiate_normalizes_to_term(goal, actual.into()); - - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } -} diff --git a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs index eba496fa22659..ecffbbff7a2dc 100644 --- a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs +++ b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs @@ -62,6 +62,7 @@ where response_no_constraints(cx, input, Certainty::overflow(false)) } TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } | TypingMode::PostBorrowckAnalysis { .. } | TypingMode::PostAnalysis => Err(NoSolution), }, diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index b72f776e5cb48..e3addf8bf93fc 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -13,11 +13,11 @@ use tracing::{instrument, trace}; use crate::delegate::SolverDelegate; use crate::solve::assembly::structural_traits::{self, AsyncCallableRelevantTypes}; -use crate::solve::assembly::{self, Candidate}; +use crate::solve::assembly::{self, AllowInferenceConstraints, AssembleCandidatesFrom, Candidate}; use crate::solve::inspect::ProbeKind; use crate::solve::{ BuiltinImplSource, CandidateSource, Certainty, EvalCtxt, Goal, GoalSource, MaybeCause, - NoSolution, QueryResult, + NoSolution, ParamEnvSource, }; impl assembly::GoalKind for TraitPredicate @@ -72,6 +72,7 @@ where (ty::ImplPolarity::Reservation, _) => match ecx.typing_mode() { TypingMode::Coherence => Certainty::AMBIGUOUS, TypingMode::Analysis { .. } + | TypingMode::Borrowck { .. } | TypingMode::PostBorrowckAnalysis { .. } | TypingMode::PostAnalysis => return Err(NoSolution), }, @@ -124,45 +125,45 @@ where .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } - fn probe_and_match_goal_against_assumption( + fn fast_reject_assumption( ecx: &mut EvalCtxt<'_, D>, - source: CandidateSource, goal: Goal, assumption: I::Clause, - then: impl FnOnce(&mut EvalCtxt<'_, D>) -> QueryResult, - ) -> Result, NoSolution> { + ) -> Result<(), NoSolution> { if let Some(trait_clause) = assumption.as_trait_clause() { if trait_clause.def_id() == goal.predicate.def_id() && trait_clause.polarity() == goal.predicate.polarity { - if !DeepRejectCtxt::relate_rigid_rigid(ecx.cx()).args_may_unify( + if DeepRejectCtxt::relate_rigid_rigid(ecx.cx()).args_may_unify( goal.predicate.trait_ref.args, trait_clause.skip_binder().trait_ref.args, ) { - return Err(NoSolution); + return Ok(()); } - - ecx.probe_trait_candidate(source).enter(|ecx| { - let assumption_trait_pred = ecx.instantiate_binder_with_infer(trait_clause); - ecx.eq( - goal.param_env, - goal.predicate.trait_ref, - assumption_trait_pred.trait_ref, - )?; - then(ecx) - }) - } else { - Err(NoSolution) } - } else { - Err(NoSolution) } + + Err(NoSolution) + } + + fn match_assumption( + ecx: &mut EvalCtxt<'_, D>, + goal: Goal, + assumption: I::Clause, + ) -> Result<(), NoSolution> { + let trait_clause = assumption.as_trait_clause().unwrap(); + + let assumption_trait_pred = ecx.instantiate_binder_with_infer(trait_clause); + ecx.eq(goal.param_env, goal.predicate.trait_ref, assumption_trait_pred.trait_ref)?; + + Ok(()) } fn consider_auto_trait_candidate( ecx: &mut EvalCtxt<'_, D>, goal: Goal, ) -> Result, NoSolution> { + let cx = ecx.cx(); if goal.predicate.polarity != ty::PredicatePolarity::Positive { return Err(NoSolution); } @@ -173,20 +174,42 @@ where // Only consider auto impls of unsafe traits when there are no unsafe // fields. - if ecx.cx().trait_is_unsafe(goal.predicate.def_id()) + if cx.trait_is_unsafe(goal.predicate.def_id()) && goal.predicate.self_ty().has_unsafe_fields() { return Err(NoSolution); } - // We only look into opaque types during analysis for opaque types - // outside of their defining scope. Doing so for opaques in the - // defining scope may require calling `typeck` on the same item we're - // currently type checking, which will result in a fatal cycle that - // ideally we want to avoid, since we can make progress on this goal - // via an alias bound or a locally-inferred hidden type instead. + // We leak the implemented auto traits of opaques outside of their defining scope. + // This depends on `typeck` of the defining scope of that opaque, which may result in + // fatal query cycles. + // + // We only get to this point if we're outside of the defining scope as we'd otherwise + // be able to normalize the opaque type. We may also cycle in case `typeck` of a defining + // scope relies on the current context, e.g. either because it also leaks auto trait + // bounds of opaques defined in the current context or by evaluating the current item. + // + // To avoid this we don't try to leak auto trait bounds if they can also be proven via + // item bounds of the opaque. These bounds are always applicable as auto traits must not + // have any generic parameters. They would also get preferred over the impl candidate + // when merging candidates anyways. + // + // See tests/ui/impl-trait/auto-trait-leakage/avoid-query-cycle-via-item-bound.rs. if let ty::Alias(ty::Opaque, opaque_ty) = goal.predicate.self_ty().kind() { debug_assert!(ecx.opaque_type_is_rigid(opaque_ty.def_id)); + for item_bound in cx.item_self_bounds(opaque_ty.def_id).skip_binder() { + if item_bound + .as_trait_clause() + .is_some_and(|b| b.def_id() == goal.predicate.def_id()) + { + return Err(NoSolution); + } + } + } + + // We need to make sure to stall any coroutines we are inferring to avoid query cycles. + if let Some(cand) = ecx.try_stall_coroutine_witness(goal.predicate.self_ty()) { + return cand; } ecx.probe_and_evaluate_goal_for_constituent_tys( @@ -240,6 +263,11 @@ where return Err(NoSolution); } + // We need to make sure to stall any coroutines we are inferring to avoid query cycles. + if let Some(cand) = ecx.try_stall_coroutine_witness(goal.predicate.self_ty()) { + return cand; + } + ecx.probe_and_evaluate_goal_for_constituent_tys( CandidateSource::BuiltinImpl(BuiltinImplSource::Misc), goal, @@ -570,19 +598,6 @@ where .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } - fn consider_builtin_async_destruct_candidate( - ecx: &mut EvalCtxt<'_, D>, - goal: Goal, - ) -> Result, NoSolution> { - if goal.predicate.polarity != ty::PredicatePolarity::Positive { - return Err(NoSolution); - } - - // `AsyncDestruct` is automatically implemented for every type. - ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) - .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) - } - fn consider_builtin_destruct_candidate( ecx: &mut EvalCtxt<'_, D>, goal: Goal, @@ -925,7 +940,7 @@ where target_projection: ty::Binder>| { source_projection.item_def_id() == target_projection.item_def_id() && ecx - .probe(|_| ProbeKind::UpcastProjectionCompatibility) + .probe(|_| ProbeKind::ProjectionCompatibility) .enter(|ecx| -> Result<_, NoSolution> { ecx.enter_forall(target_projection, |ecx, target_projection| { let source_projection = @@ -1086,6 +1101,25 @@ where goal: Goal>, ) -> Option, NoSolution>> { let self_ty = goal.predicate.self_ty(); + let check_impls = || { + let mut disqualifying_impl = None; + self.cx().for_each_relevant_impl( + goal.predicate.def_id(), + goal.predicate.self_ty(), + |impl_def_id| { + disqualifying_impl = Some(impl_def_id); + }, + ); + if let Some(def_id) = disqualifying_impl { + trace!(?def_id, ?goal, "disqualified auto-trait implementation"); + // No need to actually consider the candidate here, + // since we do that in `consider_impl_candidate`. + return Some(Err(NoSolution)); + } else { + None + } + }; + match self_ty.kind() { // Stall int and float vars until they are resolved to a concrete // numerical type. That's because the check for impls below treats @@ -1096,12 +1130,16 @@ where Some(self.forced_ambiguity(MaybeCause::Ambiguity)) } + // Backward compatibility for default auto traits. + // Test: ui/traits/default_auto_traits/extern-types.rs + ty::Foreign(..) if self.cx().is_default_trait(goal.predicate.def_id()) => check_impls(), + // These types cannot be structurally decomposed into constituent // types, and therefore have no built-in auto impl. ty::Dynamic(..) | ty::Param(..) | ty::Foreign(..) - | ty::Alias(ty::Projection | ty::Weak | ty::Inherent, ..) + | ty::Alias(ty::Projection | ty::Free | ty::Inherent, ..) | ty::Placeholder(..) => Some(Err(NoSolution)), ty::Infer(_) | ty::Bound(_, _) => panic!("unexpected type `{self_ty:?}`"), @@ -1156,24 +1194,7 @@ where | ty::Never | ty::Tuple(_) | ty::Adt(_, _) - | ty::UnsafeBinder(_) => { - let mut disqualifying_impl = None; - self.cx().for_each_relevant_impl( - goal.predicate.def_id(), - goal.predicate.self_ty(), - |impl_def_id| { - disqualifying_impl = Some(impl_def_id); - }, - ); - if let Some(def_id) = disqualifying_impl { - trace!(?def_id, ?goal, "disqualified auto-trait implementation"); - // No need to actually consider the candidate here, - // since we do that in `consider_impl_candidate`. - return Some(Err(NoSolution)); - } else { - None - } - } + | ty::UnsafeBinder(_) => check_impls(), ty::Error(_) => None, } } @@ -1231,10 +1252,10 @@ where D: SolverDelegate, I: Interner, { + #[instrument(level = "debug", skip(self), ret)] pub(super) fn merge_trait_candidates( &mut self, - goal: Goal>, - candidates: Vec>, + mut candidates: Vec>, ) -> Result<(CanonicalResponse, Option), NoSolution> { if let TypingMode::Coherence = self.typing_mode() { let all_candidates: Vec<_> = candidates.into_iter().map(|c| c.result).collect(); @@ -1261,40 +1282,15 @@ where // If there are non-global where-bounds, prefer where-bounds // (including global ones) over everything else. - let has_non_global_where_bounds = candidates.iter().any(|c| match c.source { - CandidateSource::ParamEnv(idx) => { - let where_bound = goal.param_env.caller_bounds().get(idx).unwrap(); - let ty::ClauseKind::Trait(trait_pred) = where_bound.kind().skip_binder() else { - unreachable!("expected trait-bound: {where_bound:?}"); - }; - - if trait_pred.has_bound_vars() || !trait_pred.is_global() { - return true; - } - - // We don't consider a trait-bound global if it has a projection bound. - // - // See ui/traits/next-solver/normalization-shadowing/global-trait-with-project.rs - // for an example where this is necessary. - for p in goal.param_env.caller_bounds().iter() { - if let ty::ClauseKind::Projection(proj) = p.kind().skip_binder() { - if proj.projection_term.trait_ref(self.cx()) == trait_pred.trait_ref { - return true; - } - } - } - - false - } - _ => false, - }); + let has_non_global_where_bounds = candidates + .iter() + .any(|c| matches!(c.source, CandidateSource::ParamEnv(ParamEnvSource::NonGlobal))); if has_non_global_where_bounds { let where_bounds: Vec<_> = candidates .iter() .filter(|c| matches!(c.source, CandidateSource::ParamEnv(_))) .map(|c| c.result) .collect(); - return if let Some(response) = self.try_merge_responses(&where_bounds) { Ok((response, Some(TraitGoalProvenVia::ParamEnv))) } else { @@ -1315,9 +1311,26 @@ where }; } + self.filter_specialized_impls(AllowInferenceConstraints::No, &mut candidates); + + // If there are *only* global where bounds, then make sure to return that this + // is still reported as being proven-via the param-env so that rigid projections + // operate correctly. Otherwise, drop all global where-bounds before merging the + // remaining candidates. + let proven_via = if candidates + .iter() + .all(|c| matches!(c.source, CandidateSource::ParamEnv(ParamEnvSource::Global))) + { + TraitGoalProvenVia::ParamEnv + } else { + candidates + .retain(|c| !matches!(c.source, CandidateSource::ParamEnv(ParamEnvSource::Global))); + TraitGoalProvenVia::Misc + }; + let all_candidates: Vec<_> = candidates.into_iter().map(|c| c.result).collect(); if let Some(response) = self.try_merge_responses(&all_candidates) { - Ok((response, Some(TraitGoalProvenVia::Misc))) + Ok((response, Some(proven_via))) } else { self.flounder(&all_candidates).map(|r| (r, None)) } @@ -1328,7 +1341,31 @@ where &mut self, goal: Goal>, ) -> Result<(CanonicalResponse, Option), NoSolution> { - let candidates = self.assemble_and_evaluate_candidates(goal); - self.merge_trait_candidates(goal, candidates) + let candidates = self.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::All); + self.merge_trait_candidates(candidates) + } + + fn try_stall_coroutine_witness( + &mut self, + self_ty: I::Ty, + ) -> Option, NoSolution>> { + if let ty::CoroutineWitness(def_id, _) = self_ty.kind() { + match self.typing_mode() { + TypingMode::Analysis { + defining_opaque_types_and_generators: stalled_generators, + } => { + if def_id.as_local().is_some_and(|def_id| stalled_generators.contains(&def_id)) + { + return Some(self.forced_ambiguity(MaybeCause::Ambiguity)); + } + } + TypingMode::Coherence + | TypingMode::PostAnalysis + | TypingMode::Borrowck { defining_opaque_types: _ } + | TypingMode::PostBorrowckAnalysis { defined_opaque_types: _ } => {} + } + } + + None } } diff --git a/compiler/rustc_parse/Cargo.toml b/compiler/rustc_parse/Cargo.toml index c9dcab0c871dd..6504081f0b9ce 100644 --- a/compiler/rustc_parse/Cargo.toml +++ b/compiler/rustc_parse/Cargo.toml @@ -6,6 +6,7 @@ edition = "2024" [dependencies] # tidy-alphabetical-start bitflags = "2.4.1" +rustc-literal-escaper = "0.0.2" rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_data_structures = { path = "../rustc_data_structures" } diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl index 3253222b8f23f..3d7e0fcc30869 100644 --- a/compiler/rustc_parse/messages.ftl +++ b/compiler/rustc_parse/messages.ftl @@ -246,9 +246,9 @@ parse_expected_struct_field = expected one of `,`, `:`, or `{"}"}`, found `{$tok parse_expected_trait_in_trait_impl_found_type = expected a trait, found type -parse_expr_rarrow_call = `->` used for field access or method call +parse_expr_rarrow_call = `->` is not valid syntax for field accesses and method calls .suggestion = try using `.` instead - .help = the `.` operator will dereference the value if needed + .help = the `.` operator will automatically dereference the value, except if the value is a raw pointer parse_extern_crate_name_with_dashes = crate name using dashes are not valid in `extern crate` statements .label = dash-separated idents are not valid @@ -297,6 +297,19 @@ parse_forgot_paren = perhaps you forgot parentheses? parse_found_expr_would_be_stmt = expected expression, found `{$token}` .label = expected expression +parse_frontmatter_extra_characters_after_close = extra characters after frontmatter close are not allowed +parse_frontmatter_invalid_close_preceding_whitespace = invalid preceding whitespace for frontmatter close + .note = frontmatter close should not be preceded by whitespace +parse_frontmatter_invalid_infostring = invalid infostring for frontmatter + .note = frontmatter infostrings must be a single identifier immediately following the opening +parse_frontmatter_invalid_opening_preceding_whitespace = invalid preceding whitespace for frontmatter opening + .note = frontmatter opening should not be preceded by whitespace +parse_frontmatter_length_mismatch = frontmatter close does not match the opening + .label_opening = the opening here has {$len_opening} dashes... + .label_close = ...while the close has {$len_close} dashes +parse_frontmatter_unclosed = unclosed frontmatter + .note = frontmatter opening here was not closed + parse_function_body_equals_expr = function body cannot be `= expression;` .suggestion = surround the expression with `{"{"}` and `{"}"}` instead of `=` and `;` @@ -543,7 +556,7 @@ parse_maybe_recover_from_bad_qpath_stage_2 = .suggestion = types that don't start with an identifier need to be surrounded with angle brackets in qualified paths parse_maybe_recover_from_bad_type_plus = - expected a path on the left-hand side of `+`, not `{$ty}` + expected a path on the left-hand side of `+` parse_maybe_report_ambiguous_plus = ambiguous `+` in a type @@ -642,7 +655,9 @@ parse_mut_on_nested_ident_pattern = `mut` must be attached to each individual bi .suggestion = add `mut` to each binding parse_mut_on_non_ident_pattern = `mut` must be followed by a named binding .suggestion = remove the `mut` prefix -parse_need_plus_after_trait_object_lifetime = lifetime in trait object type must be followed by `+` + +parse_need_plus_after_trait_object_lifetime = lifetimes must be followed by `+` to form a trait object type + .suggestion = consider adding a trait bound after the potential lifetime bound parse_nested_adt = `{$kw_str}` definition cannot be nested inside `{$keyword}` .suggestion = consider creating a new `{$kw_str}` definition instead of nesting @@ -673,6 +688,8 @@ parse_note_pattern_alternatives_use_single_vert = alternatives in or-patterns ar parse_nul_in_c_str = null characters in C string literals are not supported +parse_or_in_let_chain = `||` operators are not supported in let chain conditions + parse_or_pattern_not_allowed_in_fn_parameters = top-level or-patterns are not allowed in function parameters parse_or_pattern_not_allowed_in_let_binding = top-level or-patterns are not allowed in `let` bindings parse_out_of_range_hex_escape = out of range hex escape @@ -698,6 +715,16 @@ parse_parenthesized_lifetime_suggestion = remove the parentheses parse_path_double_colon = path separator must be a double colon .suggestion = use a double colon instead + +parse_path_found_attribute_in_params = `Trait(...)` syntax does not support attributes in parameters + .suggestion = remove the attributes + +parse_path_found_c_variadic_params = `Trait(...)` syntax does not support c_variadic parameters + .suggestion = remove the `...` + +parse_path_found_named_params = `Trait(...)` syntax does not support named parameters + .suggestion = remove the parameter name + parse_pattern_method_param_without_body = patterns aren't allowed in methods without bodies .suggestion = give this argument a name or use an underscore to ignore it @@ -806,9 +833,6 @@ parse_trait_alias_cannot_be_unsafe = trait aliases cannot be `unsafe` parse_transpose_dyn_or_impl = `for<...>` expected after `{$kw}`, not before .suggestion = move `{$kw}` before the `for<...>` -parse_type_ascription_removed = - if you meant to annotate an expression with a type, the type ascription syntax has been removed, see issue #101728 - parse_unclosed_unicode_escape = unterminated unicode escape .label = missing a closing `{"}"}` .terminate = terminate the unicode escape @@ -838,8 +862,6 @@ parse_unexpected_expr_in_pat_const_sugg = consider extracting the expression int parse_unexpected_expr_in_pat_create_guard_sugg = consider moving the expression to a match arm guard -parse_unexpected_expr_in_pat_inline_const_sugg = consider wrapping the expression in an inline `const` (requires `{"#"}![feature(inline_const_pat)]`) - parse_unexpected_expr_in_pat_update_guard_sugg = consider moving the expression to the match arm guard parse_unexpected_if_with_if = unexpected `if` in the condition expression @@ -860,7 +882,7 @@ parse_unexpected_parentheses_in_match_arm_pattern = unexpected parentheses surro parse_unexpected_self_in_generic_parameters = unexpected keyword `Self` in generic parameters .note = you cannot use `Self` as a generic parameter because it is reserved for associated items -parse_unexpected_token_after_dot = unexpected token: `{$actual}` +parse_unexpected_token_after_dot = unexpected token: {$actual} parse_unexpected_token_after_label = expected `while`, `for`, `loop` or `{"{"}` after a label .suggestion_remove_label = consider removing the label @@ -896,6 +918,7 @@ parse_unknown_prefix = prefix `{$prefix}` is unknown .label = unknown prefix .note = prefixed identifiers and literals are reserved since Rust 2021 .suggestion_br = use `br` for a raw byte string + .suggestion_cr = use `cr` for a raw C-string .suggestion_str = if you meant to write a string literal, use double quotes .suggestion_whitespace = consider inserting whitespace here diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index f813c3380fcbd..766baf6f80c75 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -7,8 +7,7 @@ use rustc_ast::util::parser::ExprPrecedence; use rustc_ast::{Path, Visibility}; use rustc_errors::codes::*; use rustc_errors::{ - Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, SubdiagMessageOp, - Subdiagnostic, + Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, Subdiagnostic, }; use rustc_macros::{Diagnostic, Subdiagnostic}; use rustc_session::errors::ExprParenthesesNeeded; @@ -30,7 +29,6 @@ pub(crate) struct AmbiguousPlus { #[derive(Diagnostic)] #[diag(parse_maybe_recover_from_bad_type_plus, code = E0178)] pub(crate) struct BadTypePlus { - pub ty: String, #[primary_span] pub span: Span, #[subdiagnostic] @@ -480,6 +478,13 @@ pub(crate) struct ExpectedExpressionFoundLet { pub comparison: Option, } +#[derive(Diagnostic)] +#[diag(parse_or_in_let_chain)] +pub(crate) struct OrInLetChain { + #[primary_span] + pub span: Span, +} + #[derive(Subdiagnostic, Clone, Copy)] #[multipart_suggestion( parse_maybe_missing_let, @@ -730,6 +735,61 @@ pub(crate) struct FoundExprWouldBeStmt { pub suggestion: ExprParenthesesNeeded, } +#[derive(Diagnostic)] +#[diag(parse_frontmatter_extra_characters_after_close)] +pub(crate) struct FrontmatterExtraCharactersAfterClose { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(parse_frontmatter_invalid_infostring)] +#[note] +pub(crate) struct FrontmatterInvalidInfostring { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(parse_frontmatter_invalid_opening_preceding_whitespace)] +pub(crate) struct FrontmatterInvalidOpeningPrecedingWhitespace { + #[primary_span] + pub span: Span, + #[note] + pub note_span: Span, +} + +#[derive(Diagnostic)] +#[diag(parse_frontmatter_unclosed)] +pub(crate) struct FrontmatterUnclosed { + #[primary_span] + pub span: Span, + #[note] + pub note_span: Span, +} + +#[derive(Diagnostic)] +#[diag(parse_frontmatter_invalid_close_preceding_whitespace)] +pub(crate) struct FrontmatterInvalidClosingPrecedingWhitespace { + #[primary_span] + pub span: Span, + #[note] + pub note_span: Span, +} + +#[derive(Diagnostic)] +#[diag(parse_frontmatter_length_mismatch)] +pub(crate) struct FrontmatterLengthMismatch { + #[primary_span] + pub span: Span, + #[label(parse_label_opening)] + pub opening: Span, + #[label(parse_label_close)] + pub close: Span, + pub len_opening: usize, + pub len_close: usize, +} + #[derive(Diagnostic)] #[diag(parse_leading_plus_not_supported)] pub(crate) struct LeadingPlusNotSupported { @@ -810,16 +870,16 @@ pub(crate) enum WrapInParentheses { #[derive(Diagnostic)] #[diag(parse_array_brackets_instead_of_braces)] -pub(crate) struct ArrayBracketsInsteadOfSpaces { +pub(crate) struct ArrayBracketsInsteadOfBraces { #[primary_span] pub span: Span, #[subdiagnostic] - pub sub: ArrayBracketsInsteadOfSpacesSugg, + pub sub: ArrayBracketsInsteadOfBracesSugg, } #[derive(Subdiagnostic)] #[multipart_suggestion(parse_suggestion, applicability = "maybe-incorrect")] -pub(crate) struct ArrayBracketsInsteadOfSpacesSugg { +pub(crate) struct ArrayBracketsInsteadOfBracesSugg { #[suggestion_part(code = "[")] pub left: Span, #[suggestion_part(code = "]")] @@ -1551,11 +1611,7 @@ pub(crate) struct FnTraitMissingParen { } impl Subdiagnostic for FnTraitMissingParen { - fn add_to_diag_with>( - self, - diag: &mut Diag<'_, G>, - _: &F, - ) { + fn add_to_diag(self, diag: &mut Diag<'_, G>) { diag.span_label(self.span, crate::fluent_generated::parse_fn_trait_missing_paren); diag.span_suggestion_short( self.span.shrink_to_hi(), @@ -1590,6 +1646,30 @@ pub(crate) struct ExpectedFnPathFoundFnKeyword { pub fn_token_span: Span, } +#[derive(Diagnostic)] +#[diag(parse_path_found_named_params)] +pub(crate) struct FnPathFoundNamedParams { + #[primary_span] + #[suggestion(applicability = "machine-applicable", code = "")] + pub named_param_span: Span, +} + +#[derive(Diagnostic)] +#[diag(parse_path_found_c_variadic_params)] +pub(crate) struct PathFoundCVariadicParams { + #[primary_span] + #[suggestion(applicability = "machine-applicable", code = "")] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(parse_path_found_attribute_in_params)] +pub(crate) struct PathFoundAttributeInParams { + #[primary_span] + #[suggestion(applicability = "machine-applicable", code = "")] + pub span: Span, +} + #[derive(Diagnostic)] #[diag(parse_path_double_colon)] pub(crate) struct PathSingleColon { @@ -1598,9 +1678,6 @@ pub(crate) struct PathSingleColon { #[suggestion(applicability = "machine-applicable", code = ":", style = "verbose")] pub suggestion: Span, - - #[note(parse_type_ascription_removed)] - pub type_ascription: bool, } #[derive(Diagnostic)] @@ -1617,9 +1694,6 @@ pub(crate) struct ColonAsSemi { #[primary_span] #[suggestion(applicability = "machine-applicable", code = ";", style = "verbose")] pub span: Span, - - #[note(parse_type_ascription_removed)] - pub type_ascription: bool, } #[derive(Diagnostic)] @@ -1696,10 +1770,10 @@ pub(crate) struct SelfArgumentPointer { #[derive(Diagnostic)] #[diag(parse_unexpected_token_after_dot)] -pub(crate) struct UnexpectedTokenAfterDot<'a> { +pub(crate) struct UnexpectedTokenAfterDot { #[primary_span] pub span: Span, - pub actual: Cow<'a, str>, + pub actual: String, } #[derive(Diagnostic)] @@ -2152,6 +2226,13 @@ pub(crate) enum UnknownPrefixSugg { style = "verbose" )] UseBr(#[primary_span] Span), + #[suggestion( + parse_suggestion_cr, + code = "cr", + applicability = "maybe-incorrect", + style = "verbose" + )] + UseCr(#[primary_span] Span), #[suggestion( parse_suggestion_whitespace, code = " ", @@ -2769,17 +2850,6 @@ pub(crate) enum UnexpectedExpressionInPatternSugg { /// The statement's block's indentation. indentation: String, }, - - #[multipart_suggestion( - parse_unexpected_expr_in_pat_inline_const_sugg, - applicability = "maybe-incorrect" - )] - InlineConst { - #[suggestion_part(code = "const {{ ")] - start_span: Span, - #[suggestion_part(code = " }}")] - end_span: Span, - }, } #[derive(Diagnostic)] @@ -2817,6 +2887,8 @@ pub(crate) struct ReturnTypesUseThinArrow { pub(crate) struct NeedPlusAfterTraitObjectLifetime { #[primary_span] pub span: Span, + #[suggestion(code = " + /* Trait */", applicability = "has-placeholders")] + pub suggestion: Span, } #[derive(Diagnostic)] diff --git a/compiler/rustc_parse/src/lexer/diagnostics.rs b/compiler/rustc_parse/src/lexer/diagnostics.rs index e1f19beb53aee..0b97d4e6993bb 100644 --- a/compiler/rustc_parse/src/lexer/diagnostics.rs +++ b/compiler/rustc_parse/src/lexer/diagnostics.rs @@ -1,14 +1,17 @@ use rustc_ast::token::Delimiter; use rustc_errors::Diag; +use rustc_session::parse::ParseSess; use rustc_span::Span; use rustc_span::source_map::SourceMap; use super::UnmatchedDelim; +use crate::errors::MismatchedClosingDelimiter; +use crate::pprust; #[derive(Default)] pub(super) struct TokenTreeDiagInfo { /// Stack of open delimiters and their spans. Used for error message. - pub open_braces: Vec<(Delimiter, Span)>, + pub open_delimiters: Vec<(Delimiter, Span)>, pub unmatched_delims: Vec, /// Used only for error recovery when arriving to EOF with mismatched braces. @@ -108,7 +111,7 @@ pub(super) fn report_suspicious_mismatch_block( } else { // If there is no suspicious span, give the last properly closed block may help if let Some(parent) = diag_info.matching_block_spans.last() - && diag_info.open_braces.last().is_none() + && diag_info.open_delimiters.last().is_none() && diag_info.empty_block_spans.iter().all(|&sp| sp != parent.0.to(parent.1)) { err.span_label(parent.0, "this opening brace..."); @@ -116,3 +119,24 @@ pub(super) fn report_suspicious_mismatch_block( } } } + +pub(crate) fn make_unclosed_delims_error( + unmatched: UnmatchedDelim, + psess: &ParseSess, +) -> Option> { + // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to + // `unmatched_delims` only for error recovery in the `Parser`. + let found_delim = unmatched.found_delim?; + let mut spans = vec![unmatched.found_span]; + if let Some(sp) = unmatched.unclosed_span { + spans.push(sp); + }; + let err = psess.dcx().create_err(MismatchedClosingDelimiter { + spans, + delimiter: pprust::token_kind_to_string(&found_delim.as_close_token_kind()).to_string(), + unmatched: unmatched.found_span, + opening_candidate: unmatched.candidate_span, + unclosed: unmatched.unclosed_span, + }); + Some(err) +} diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index 1d17290e1c706..78c5742414b81 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -1,25 +1,28 @@ use std::ops::Range; +use diagnostics::make_unclosed_delims_error; use rustc_ast::ast::{self, AttrStyle}; use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind}; use rustc_ast::tokenstream::TokenStream; use rustc_ast::util::unicode::contains_text_flow_control_chars; use rustc_errors::codes::*; use rustc_errors::{Applicability, Diag, DiagCtxtHandle, StashKey}; -use rustc_lexer::unescape::{self, EscapeError, Mode}; -use rustc_lexer::{Base, Cursor, DocStyle, LiteralKind, RawStrError}; +use rustc_lexer::{ + Base, Cursor, DocStyle, FrontmatterAllowed, LiteralKind, RawStrError, is_whitespace, +}; +use rustc_literal_escaper::{EscapeError, Mode, unescape_mixed, unescape_unicode}; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::{ RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX, TEXT_DIRECTION_CODEPOINT_IN_COMMENT, }; use rustc_session::parse::ParseSess; -use rustc_span::{BytePos, Pos, Span, Symbol}; +use rustc_span::{BytePos, Pos, Span, Symbol, sym}; use tracing::debug; +use crate::errors; use crate::lexer::diagnostics::TokenTreeDiagInfo; use crate::lexer::unicode_chars::UNICODE_ARRAY; -use crate::{errors, make_unclosed_delims_error}; mod diagnostics; mod tokentrees; @@ -55,7 +58,7 @@ pub(crate) fn lex_token_trees<'psess, 'src>( start_pos = start_pos + BytePos::from_usize(shebang_len); } - let cursor = Cursor::new(src); + let cursor = Cursor::new(src, FrontmatterAllowed::Yes); let mut lexer = Lexer { psess, start_pos, @@ -192,6 +195,11 @@ impl<'psess, 'src> Lexer<'psess, 'src> { let content = self.str_from_to(content_start, content_end); self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style) } + rustc_lexer::TokenKind::Frontmatter { has_invalid_preceding_whitespace, invalid_infostring } => { + self.validate_frontmatter(start, has_invalid_preceding_whitespace, invalid_infostring); + preceded_by_whitespace = true; + continue; + } rustc_lexer::TokenKind::Whitespace => { preceded_by_whitespace = true; continue; @@ -255,8 +263,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> { // was consumed. let lit_start = start + BytePos(prefix_len); self.pos = lit_start; - self.cursor = Cursor::new(&str_before[prefix_len as usize..]); - + self.cursor = Cursor::new(&str_before[prefix_len as usize..], FrontmatterAllowed::No); self.report_unknown_prefix(start); let prefix_span = self.mk_sp(start, lit_start); return (Token::new(self.ident(start), prefix_span), preceded_by_whitespace); @@ -361,7 +368,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> { // Reset the state so we just lex the `'r`. let lt_start = start + BytePos(2); self.pos = lt_start; - self.cursor = Cursor::new(&str_before[2 as usize..]); + self.cursor = Cursor::new(&str_before[2 as usize..], FrontmatterAllowed::No); let lifetime_name = self.str_from(start); let ident = Symbol::intern(lifetime_name); @@ -371,12 +378,12 @@ impl<'psess, 'src> Lexer<'psess, 'src> { rustc_lexer::TokenKind::Semi => token::Semi, rustc_lexer::TokenKind::Comma => token::Comma, rustc_lexer::TokenKind::Dot => token::Dot, - rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis), - rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis), - rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace), - rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace), - rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket), - rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket), + rustc_lexer::TokenKind::OpenParen => token::OpenParen, + rustc_lexer::TokenKind::CloseParen => token::CloseParen, + rustc_lexer::TokenKind::OpenBrace => token::OpenBrace, + rustc_lexer::TokenKind::CloseBrace => token::CloseBrace, + rustc_lexer::TokenKind::OpenBracket => token::OpenBracket, + rustc_lexer::TokenKind::CloseBracket => token::CloseBracket, rustc_lexer::TokenKind::At => token::At, rustc_lexer::TokenKind::Pound => token::Pound, rustc_lexer::TokenKind::Tilde => token::Tilde, @@ -474,6 +481,91 @@ impl<'psess, 'src> Lexer<'psess, 'src> { } } + fn validate_frontmatter( + &self, + start: BytePos, + has_invalid_preceding_whitespace: bool, + invalid_infostring: bool, + ) { + let s = self.str_from(start); + let real_start = s.find("---").unwrap(); + let frontmatter_opening_pos = BytePos(real_start as u32) + start; + let s_new = &s[real_start..]; + let within = s_new.trim_start_matches('-'); + let len_opening = s_new.len() - within.len(); + + let frontmatter_opening_end_pos = frontmatter_opening_pos + BytePos(len_opening as u32); + if has_invalid_preceding_whitespace { + let line_start = + BytePos(s[..real_start].rfind("\n").map_or(0, |i| i as u32 + 1)) + start; + let span = self.mk_sp(line_start, frontmatter_opening_end_pos); + let label_span = self.mk_sp(line_start, frontmatter_opening_pos); + self.dcx().emit_err(errors::FrontmatterInvalidOpeningPrecedingWhitespace { + span, + note_span: label_span, + }); + } + + if invalid_infostring { + let line_end = s[real_start..].find('\n').unwrap_or(s[real_start..].len()); + let span = self.mk_sp( + frontmatter_opening_end_pos, + frontmatter_opening_pos + BytePos(line_end as u32), + ); + self.dcx().emit_err(errors::FrontmatterInvalidInfostring { span }); + } + + let last_line_start = within.rfind('\n').map_or(0, |i| i + 1); + let last_line = &within[last_line_start..]; + let last_line_trimmed = last_line.trim_start_matches(is_whitespace); + let last_line_start_pos = frontmatter_opening_end_pos + BytePos(last_line_start as u32); + + let frontmatter_span = self.mk_sp(frontmatter_opening_pos, self.pos); + self.psess.gated_spans.gate(sym::frontmatter, frontmatter_span); + + if !last_line_trimmed.starts_with("---") { + let label_span = self.mk_sp(frontmatter_opening_pos, frontmatter_opening_end_pos); + self.dcx().emit_err(errors::FrontmatterUnclosed { + span: frontmatter_span, + note_span: label_span, + }); + return; + } + + if last_line_trimmed.len() != last_line.len() { + let line_end = last_line_start_pos + BytePos(last_line.len() as u32); + let span = self.mk_sp(last_line_start_pos, line_end); + let whitespace_end = + last_line_start_pos + BytePos((last_line.len() - last_line_trimmed.len()) as u32); + let label_span = self.mk_sp(last_line_start_pos, whitespace_end); + self.dcx().emit_err(errors::FrontmatterInvalidClosingPrecedingWhitespace { + span, + note_span: label_span, + }); + } + + let rest = last_line_trimmed.trim_start_matches('-'); + let len_close = last_line_trimmed.len() - rest.len(); + if len_close != len_opening { + let span = self.mk_sp(frontmatter_opening_pos, self.pos); + let opening = self.mk_sp(frontmatter_opening_pos, frontmatter_opening_end_pos); + let last_line_close_pos = last_line_start_pos + BytePos(len_close as u32); + let close = self.mk_sp(last_line_start_pos, last_line_close_pos); + self.dcx().emit_err(errors::FrontmatterLengthMismatch { + span, + opening, + close, + len_opening, + len_close, + }); + } + + if !rest.trim_matches(is_whitespace).is_empty() { + let span = self.mk_sp(last_line_start_pos, self.pos); + self.dcx().emit_err(errors::FrontmatterExtraCharactersAfterClose { span }); + } + } + fn cook_doc_comment( &self, content_start: BytePos, @@ -789,13 +881,14 @@ impl<'psess, 'src> Lexer<'psess, 'src> { fn report_unknown_prefix(&self, start: BytePos) { let prefix_span = self.mk_sp(start, self.pos); let prefix = self.str_from_to(start, self.pos); - let expn_data = prefix_span.ctxt().outer_expn_data(); if expn_data.edition.at_least_rust_2021() { // In Rust 2021, this is a hard error. let sugg = if prefix == "rb" { Some(errors::UnknownPrefixSugg::UseBr(prefix_span)) + } else if prefix == "rc" { + Some(errors::UnknownPrefixSugg::UseCr(prefix_span)) } else if expn_data.is_root() { if self.cursor.first() == '\'' && let Some(start) = self.last_lifetime @@ -838,7 +931,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> { let space_pos = start + BytePos(1); let space_span = self.mk_sp(space_pos, space_pos); - let mut cursor = Cursor::new(str_before); + let mut cursor = Cursor::new(str_before, FrontmatterAllowed::No); let (is_string, span, unterminated) = match cursor.guarded_double_quoted_string() { Some(rustc_lexer::GuardedStr { n_hashes, terminated, token_len }) => { @@ -904,7 +997,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> { // For backwards compatibility, roll back to after just the first `#` // and return the `Pound` token. self.pos = start + BytePos(1); - self.cursor = Cursor::new(&str_before[1..]); + self.cursor = Cursor::new(&str_before[1..], FrontmatterAllowed::No); token::Pound } } @@ -970,9 +1063,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> { postfix_len: u32, ) -> (token::LitKind, Symbol) { self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| { - unescape::unescape_unicode(src, mode, &mut |span, result| { - callback(span, result.map(drop)) - }) + unescape_unicode(src, mode, &mut |span, result| callback(span, result.map(drop))) }) } @@ -986,9 +1077,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> { postfix_len: u32, ) -> (token::LitKind, Symbol) { self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| { - unescape::unescape_mixed(src, mode, &mut |span, result| { - callback(span, result.map(drop)) - }) + unescape_mixed(src, mode, &mut |span, result| callback(span, result.map(drop))) }) } } diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs index b3f83a320241e..fbea958dcc598 100644 --- a/compiler/rustc_parse/src/lexer/tokentrees.rs +++ b/compiler/rustc_parse/src/lexer/tokentrees.rs @@ -18,38 +18,33 @@ impl<'psess, 'src> Lexer<'psess, 'src> { let mut buf = Vec::new(); loop { - match self.token.kind { - token::OpenDelim(delim) => { - // Invisible delimiters cannot occur here because `TokenTreesReader` parses - // code directly from strings, with no macro expansion involved. - debug_assert!(!matches!(delim, Delimiter::Invisible(_))); - buf.push(match self.lex_token_tree_open_delim(delim) { - Ok(val) => val, - Err(errs) => return Err(errs), - }) - } - token::CloseDelim(delim) => { - // Invisible delimiters cannot occur here because `TokenTreesReader` parses - // code directly from strings, with no macro expansion involved. - debug_assert!(!matches!(delim, Delimiter::Invisible(_))); - return if is_delimited { - Ok((open_spacing, TokenStream::new(buf))) - } else { - Err(vec![self.close_delim_err(delim)]) - }; - } - token::Eof => { - return if is_delimited { - Err(vec![self.eof_err()]) - } else { - Ok((open_spacing, TokenStream::new(buf))) - }; - } - _ => { - // Get the next normal token. - let (this_tok, this_spacing) = self.bump(); - buf.push(TokenTree::Token(this_tok, this_spacing)); - } + if let Some(delim) = self.token.kind.open_delim() { + // Invisible delimiters cannot occur here because `TokenTreesReader` parses + // code directly from strings, with no macro expansion involved. + debug_assert!(!matches!(delim, Delimiter::Invisible(_))); + buf.push(match self.lex_token_tree_open_delim(delim) { + Ok(val) => val, + Err(errs) => return Err(errs), + }) + } else if let Some(delim) = self.token.kind.close_delim() { + // Invisible delimiters cannot occur here because `TokenTreesReader` parses + // code directly from strings, with no macro expansion involved. + debug_assert!(!matches!(delim, Delimiter::Invisible(_))); + return if is_delimited { + Ok((open_spacing, TokenStream::new(buf))) + } else { + Err(vec![self.close_delim_err(delim)]) + }; + } else if self.token.kind == token::Eof { + return if is_delimited { + Err(vec![self.eof_err()]) + } else { + Ok((open_spacing, TokenStream::new(buf))) + }; + } else { + // Get the next normal token. + let (this_tok, this_spacing) = self.bump(); + buf.push(TokenTree::Token(this_tok, this_spacing)); } } } @@ -59,8 +54,8 @@ impl<'psess, 'src> Lexer<'psess, 'src> { let mut err = self.dcx().struct_span_err(self.token.span, msg); let unclosed_delimiter_show_limit = 5; - let len = usize::min(unclosed_delimiter_show_limit, self.diag_info.open_braces.len()); - for &(_, span) in &self.diag_info.open_braces[..len] { + let len = usize::min(unclosed_delimiter_show_limit, self.diag_info.open_delimiters.len()); + for &(_, span) in &self.diag_info.open_delimiters[..len] { err.span_label(span, "unclosed delimiter"); self.diag_info.unmatched_delims.push(UnmatchedDelim { found_delim: None, @@ -70,19 +65,19 @@ impl<'psess, 'src> Lexer<'psess, 'src> { }); } - if let Some((_, span)) = self.diag_info.open_braces.get(unclosed_delimiter_show_limit) - && self.diag_info.open_braces.len() >= unclosed_delimiter_show_limit + 2 + if let Some((_, span)) = self.diag_info.open_delimiters.get(unclosed_delimiter_show_limit) + && self.diag_info.open_delimiters.len() >= unclosed_delimiter_show_limit + 2 { err.span_label( *span, format!( "another {} unclosed delimiters begin from here", - self.diag_info.open_braces.len() - unclosed_delimiter_show_limit + self.diag_info.open_delimiters.len() - unclosed_delimiter_show_limit ), ); } - if let Some((delim, _)) = self.diag_info.open_braces.last() { + if let Some((delim, _)) = self.diag_info.open_delimiters.last() { report_suspicious_mismatch_block( &mut err, &self.diag_info, @@ -100,7 +95,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> { // The span for beginning of the delimited section. let pre_span = self.token.span; - self.diag_info.open_braces.push((open_delim, self.token.span)); + self.diag_info.open_delimiters.push((open_delim, self.token.span)); // Lex the token trees within the delimiters. // We stop at any delimiter so we can try to recover if the user @@ -111,14 +106,15 @@ impl<'psess, 'src> Lexer<'psess, 'src> { let delim_span = DelimSpan::from_pair(pre_span, self.token.span); let sm = self.psess.source_map(); - let close_spacing = match self.token.kind { - // Correct delimiter. - token::CloseDelim(close_delim) if close_delim == open_delim => { - let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap(); - let close_brace_span = self.token.span; + let close_spacing = if let Some(close_delim) = self.token.kind.close_delim() { + if close_delim == open_delim { + // Correct delimiter. + let (open_delimiter, open_delimiter_span) = + self.diag_info.open_delimiters.pop().unwrap(); + let close_delimiter_span = self.token.span; if tts.is_empty() && close_delim == Delimiter::Brace { - let empty_block_span = open_brace_span.to(close_brace_span); + let empty_block_span = open_delimiter_span.to(close_delimiter_span); if !sm.is_multiline(empty_block_span) { // Only track if the block is in the form of `{}`, otherwise it is // likely that it was written on purpose. @@ -127,16 +123,17 @@ impl<'psess, 'src> Lexer<'psess, 'src> { } // only add braces - if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, open_delim) { + if let (Delimiter::Brace, Delimiter::Brace) = (open_delimiter, open_delim) { // Add all the matching spans, we will sort by span later - self.diag_info.matching_block_spans.push((open_brace_span, close_brace_span)); + self.diag_info + .matching_block_spans + .push((open_delimiter_span, close_delimiter_span)); } // Move past the closing delimiter. self.bump_minimal() - } - // Incorrect delimiter. - token::CloseDelim(close_delim) => { + } else { + // Incorrect delimiter. let mut unclosed_delimiter = None; let mut candidate = None; @@ -146,18 +143,18 @@ impl<'psess, 'src> Lexer<'psess, 'src> { // This is a conservative error: only report the last unclosed // delimiter. The previous unclosed delimiters could actually be // closed! The lexer just hasn't gotten to them yet. - if let Some(&(_, sp)) = self.diag_info.open_braces.last() { + if let Some(&(_, sp)) = self.diag_info.open_delimiters.last() { unclosed_delimiter = Some(sp); }; - for (brace, brace_span) in &self.diag_info.open_braces { - if same_indentation_level(sm, self.token.span, *brace_span) - && brace == &close_delim + for (delimiter, delimiter_span) in &self.diag_info.open_delimiters { + if same_indentation_level(sm, self.token.span, *delimiter_span) + && delimiter == &close_delim { // high likelihood of these two corresponding - candidate = Some(*brace_span); + candidate = Some(*delimiter_span); } } - let (_, _) = self.diag_info.open_braces.pop().unwrap(); + let (_, _) = self.diag_info.open_delimiters.pop().unwrap(); self.diag_info.unmatched_delims.push(UnmatchedDelim { found_delim: Some(close_delim), found_span: self.token.span, @@ -165,7 +162,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> { candidate_span: candidate, }); } else { - self.diag_info.open_braces.pop(); + self.diag_info.open_delimiters.pop(); } // If the incorrect delimiter matches an earlier opening @@ -175,21 +172,20 @@ impl<'psess, 'src> Lexer<'psess, 'src> { // fn foo() { // bar(baz( // } // Incorrect delimiter but matches the earlier `{` - if !self.diag_info.open_braces.iter().any(|&(b, _)| b == close_delim) { + if !self.diag_info.open_delimiters.iter().any(|&(d, _)| d == close_delim) { self.bump_minimal() } else { // The choice of value here doesn't matter. Spacing::Alone } } - token::Eof => { - // Silently recover, the EOF token will be seen again - // and an error emitted then. Thus we don't pop from - // self.open_braces here. The choice of spacing value here - // doesn't matter. - Spacing::Alone - } - _ => unreachable!(), + } else { + assert_eq!(self.token.kind, token::Eof); + // Silently recover, the EOF token will be seen again + // and an error emitted then. Thus we don't pop from + // self.open_delimiters here. The choice of spacing value here + // doesn't matter. + Spacing::Alone }; let spacing = DelimSpacing::new(open_spacing, close_spacing); diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs index 2e066f0179c3f..ec59a1a01314e 100644 --- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs +++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs @@ -4,7 +4,7 @@ use std::iter::once; use std::ops::Range; use rustc_errors::{Applicability, DiagCtxtHandle, ErrorGuaranteed}; -use rustc_lexer::unescape::{EscapeError, Mode}; +use rustc_literal_escaper::{EscapeError, Mode}; use rustc_span::{BytePos, Span}; use tracing::debug; diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index ff03b42484b41..751d13af4331b 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -5,7 +5,7 @@ use rustc_span::{BytePos, Pos, Span, kw}; use super::Lexer; use crate::errors::TokenSubstitution; -use crate::token::{self, Delimiter}; +use crate::token; #[rustfmt::skip] // for line breaks pub(super) static UNICODE_ARRAY: &[(char, &str, &str)] = &[ @@ -315,12 +315,12 @@ const ASCII_ARRAY: &[(&str, &str, Option)] = &[ ("!", "Exclamation Mark", Some(token::Bang)), ("?", "Question Mark", Some(token::Question)), (".", "Period", Some(token::Dot)), - ("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))), - (")", "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))), - ("[", "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))), - ("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))), - ("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))), - ("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))), + ("(", "Left Parenthesis", Some(token::OpenParen)), + (")", "Right Parenthesis", Some(token::CloseParen)), + ("[", "Left Square Bracket", Some(token::OpenBracket)), + ("]", "Right Square Bracket", Some(token::CloseBracket)), + ("{", "Left Curly Brace", Some(token::OpenBrace)), + ("}", "Right Curly Brace", Some(token::CloseBrace)), ("*", "Asterisk", Some(token::Star)), ("/", "Slash", Some(token::Slash)), ("\\", "Backslash", None), @@ -376,7 +376,7 @@ pub(super) fn check_for_substitution( ascii_name, }) }; - (token.clone(), sugg) + (*token, sugg) } /// Extract string if found at current position with given delimiters diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 79939aab7fc2b..d06922f1e0479 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -4,15 +4,14 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 -#![feature(array_windows)] +#![cfg_attr(bootstrap, feature(let_chains))] #![feature(assert_matches)] #![feature(box_patterns)] #![feature(debug_closure_helpers)] #![feature(if_let_guard)] #![feature(iter_intersperse)] -#![feature(let_chains)] #![feature(string_from_utf8_lossy_owned)] +#![recursion_limit = "256"] // tidy-alphabetical-end use std::path::{Path, PathBuf}; @@ -33,7 +32,7 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments"); #[macro_use] pub mod parser; -use parser::{Parser, make_unclosed_delims_error}; +use parser::Parser; pub mod lexer; pub mod validate_attr; diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 53614049f087a..41d3889c44838 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -1,5 +1,6 @@ use rustc_ast as ast; use rustc_ast::token::{self, MetaVarKind}; +use rustc_ast::tokenstream::ParserRange; use rustc_ast::{Attribute, attr}; use rustc_errors::codes::*; use rustc_errors::{Diag, PResult}; @@ -8,8 +9,7 @@ use thin_vec::ThinVec; use tracing::debug; use super::{ - AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing, - UsePreAttrPos, + AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos, }; use crate::{errors, exp, fluent_generated as fluent}; diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index cff998fa13790..44fdf146f9c73 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -1,21 +1,18 @@ use std::borrow::Cow; -use std::{iter, mem}; +use std::mem; -use rustc_ast::token::{Delimiter, Token, TokenKind}; +use rustc_ast::token::Token; use rustc_ast::tokenstream::{ - AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing, DelimSpan, LazyAttrTokenStream, - Spacing, ToAttrTokenStream, + AttrsTarget, LazyAttrTokenStream, NodeRange, ParserRange, Spacing, TokenCursor, }; use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, HasTokens}; use rustc_data_structures::fx::FxHashSet; use rustc_errors::PResult; use rustc_session::parse::ParseSess; -use rustc_span::{DUMMY_SP, Span, sym}; +use rustc_span::{DUMMY_SP, sym}; +use thin_vec::ThinVec; -use super::{ - Capturing, FlatToken, ForceCollect, NodeRange, NodeReplacement, Parser, ParserRange, - TokenCursor, Trailing, -}; +use super::{Capturing, ForceCollect, Parser, Trailing}; // When collecting tokens, this fully captures the start point. Usually its // just after outer attributes, but occasionally it's before. @@ -94,99 +91,10 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool { }) } -// From a value of this type we can reconstruct the `TokenStream` seen by the -// `f` callback passed to a call to `Parser::collect_tokens`, by -// replaying the getting of the tokens. This saves us producing a `TokenStream` -// if it is never needed, e.g. a captured `macro_rules!` argument that is never -// passed to a proc macro. In practice, token stream creation happens rarely -// compared to calls to `collect_tokens` (see some statistics in #78736) so we -// are doing as little up-front work as possible. -// -// This also makes `Parser` very cheap to clone, since -// there is no intermediate collection buffer to clone. -struct LazyAttrTokenStreamImpl { - start_token: (Token, Spacing), - cursor_snapshot: TokenCursor, - num_calls: u32, - break_last_token: u32, - node_replacements: Box<[NodeReplacement]>, -} - -impl ToAttrTokenStream for LazyAttrTokenStreamImpl { - fn to_attr_token_stream(&self) -> AttrTokenStream { - // The token produced by the final call to `{,inlined_}next` was not - // actually consumed by the callback. The combination of chaining the - // initial token and using `take` produces the desired result - we - // produce an empty `TokenStream` if no calls were made, and omit the - // final token otherwise. - let mut cursor_snapshot = self.cursor_snapshot.clone(); - let tokens = iter::once(FlatToken::Token(self.start_token.clone())) - .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) - .take(self.num_calls as usize); - - if self.node_replacements.is_empty() { - make_attr_token_stream(tokens, self.break_last_token) - } else { - let mut tokens: Vec<_> = tokens.collect(); - let mut node_replacements = self.node_replacements.to_vec(); - node_replacements.sort_by_key(|(range, _)| range.0.start); - - #[cfg(debug_assertions)] - for [(node_range, tokens), (next_node_range, next_tokens)] in - node_replacements.array_windows() - { - assert!( - node_range.0.end <= next_node_range.0.start - || node_range.0.end >= next_node_range.0.end, - "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})", - node_range, - tokens, - next_node_range, - next_tokens, - ); - } - - // Process the replace ranges, starting from the highest start - // position and working our way back. If have tokens like: - // - // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` - // - // Then we will generate replace ranges for both - // the `#[cfg(FALSE)] field: bool` and the entire - // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` - // - // By starting processing from the replace range with the greatest - // start position, we ensure that any (outer) replace range which - // encloses another (inner) replace range will fully overwrite the - // inner range's replacement. - for (node_range, target) in node_replacements.into_iter().rev() { - assert!( - !node_range.0.is_empty(), - "Cannot replace an empty node range: {:?}", - node_range.0 - ); - - // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus - // enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the - // total length of `tokens` constant throughout the replacement process, allowing - // us to do all replacements without adjusting indices. - let target_len = target.is_some() as usize; - tokens.splice( - (node_range.0.start as usize)..(node_range.0.end as usize), - target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain( - iter::repeat(FlatToken::Empty).take(node_range.0.len() - target_len), - ), - ); - } - make_attr_token_stream(tokens.into_iter(), self.break_last_token) - } - } -} - impl<'a> Parser<'a> { pub(super) fn collect_pos(&self) -> CollectPos { CollectPos { - start_token: (self.token.clone(), self.token_spacing), + start_token: (self.token, self.token_spacing), cursor_snapshot: self.token_cursor.clone(), start_pos: self.num_bump_calls, } @@ -387,10 +295,10 @@ impl<'a> Parser<'a> { // This is hot enough for `deep-vector` that checking the conditions for an empty iterator // is measurably faster than actually executing the iterator. - let node_replacements: Box<[_]> = if parser_replacements_start == parser_replacements_end + let node_replacements = if parser_replacements_start == parser_replacements_end && inner_attr_parser_replacements.is_empty() { - Box::new([]) + ThinVec::new() } else { // Grab any replace ranges that occur *inside* the current AST node. Convert them // from `ParserRange` form to `NodeRange` form. We will perform the actual @@ -429,13 +337,13 @@ impl<'a> Parser<'a> { // - `attrs`: includes the outer and the inner attr. // - `tokens`: lazy tokens for `g` (with its inner attr deleted). - let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl { - start_token: collect_pos.start_token, - cursor_snapshot: collect_pos.cursor_snapshot, + let tokens = LazyAttrTokenStream::new_pending( + collect_pos.start_token, + collect_pos.cursor_snapshot, num_calls, - break_last_token: self.break_last_token, + self.break_last_token, node_replacements, - }); + ); let mut tokens_used = false; // If in "definite capture mode" we need to register a replace range @@ -483,71 +391,6 @@ impl<'a> Parser<'a> { } } -/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an -/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and -/// close delims. -fn make_attr_token_stream( - iter: impl Iterator, - break_last_token: u32, -) -> AttrTokenStream { - #[derive(Debug)] - struct FrameData { - // This is `None` for the first frame, `Some` for all others. - open_delim_sp: Option<(Delimiter, Span, Spacing)>, - inner: Vec, - } - // The stack always has at least one element. Storing it separately makes for shorter code. - let mut stack_top = FrameData { open_delim_sp: None, inner: vec![] }; - let mut stack_rest = vec![]; - for flat_token in iter { - match flat_token { - FlatToken::Token((Token { kind: TokenKind::OpenDelim(delim), span }, spacing)) => { - stack_rest.push(mem::replace( - &mut stack_top, - FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] }, - )); - } - FlatToken::Token((Token { kind: TokenKind::CloseDelim(delim), span }, spacing)) => { - let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap()); - let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap(); - assert!( - open_delim.eq_ignoring_invisible_origin(&delim), - "Mismatched open/close delims: open={open_delim:?} close={span:?}" - ); - let dspan = DelimSpan::from_pair(open_sp, span); - let dspacing = DelimSpacing::new(open_spacing, spacing); - let stream = AttrTokenStream::new(frame_data.inner); - let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream); - stack_top.inner.push(delimited); - } - FlatToken::Token((token, spacing)) => { - stack_top.inner.push(AttrTokenTree::Token(token, spacing)) - } - FlatToken::AttrsTarget(target) => { - stack_top.inner.push(AttrTokenTree::AttrsTarget(target)) - } - FlatToken::Empty => {} - } - } - - if break_last_token > 0 { - let last_token = stack_top.inner.pop().unwrap(); - if let AttrTokenTree::Token(last_token, spacing) = last_token { - let (unglued, _) = last_token.kind.break_two_token_op(break_last_token).unwrap(); - - // Tokens are always ASCII chars, so we can use byte arithmetic here. - let mut first_span = last_token.span.shrink_to_lo(); - first_span = - first_span.with_hi(first_span.lo() + rustc_span::BytePos(break_last_token)); - - stack_top.inner.push(AttrTokenTree::Token(Token::new(unglued, first_span), spacing)); - } else { - panic!("Unexpected last token {last_token:?}") - } - } - AttrTokenStream::new(stack_top.inner) -} - /// Tokens are needed if: /// - any non-single-segment attributes (other than doc comments) are present, /// e.g. `rustfmt::skip`; or @@ -562,14 +405,3 @@ fn needs_tokens(attrs: &[ast::Attribute]) -> bool { } }) } - -// Some types are used a lot. Make sure they don't unintentionally get bigger. -#[cfg(target_pointer_width = "64")] -mod size_asserts { - use rustc_data_structures::static_assert_size; - - use super::*; - // tidy-alphabetical-start - static_assert_size!(LazyAttrTokenStreamImpl, 96); - // tidy-alphabetical-end -} diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index ef044fe9d6385..23c8db7bca784 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -4,7 +4,7 @@ use std::ops::{Deref, DerefMut}; use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind}; +use rustc_ast::token::{self, Lit, LitKind, Token, TokenKind}; use rustc_ast::util::parser::AssocOp; use rustc_ast::{ AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block, @@ -304,10 +304,10 @@ impl<'a> Parser<'a> { TokenKind::Comma, TokenKind::Semi, TokenKind::PathSep, - TokenKind::OpenDelim(Delimiter::Brace), - TokenKind::OpenDelim(Delimiter::Parenthesis), - TokenKind::CloseDelim(Delimiter::Brace), - TokenKind::CloseDelim(Delimiter::Parenthesis), + TokenKind::OpenBrace, + TokenKind::OpenParen, + TokenKind::CloseBrace, + TokenKind::CloseParen, ]; if let TokenKind::DocComment(..) = self.prev_token.kind && valid_follow.contains(&self.token.kind) @@ -322,7 +322,7 @@ impl<'a> Parser<'a> { let mut recovered_ident = None; // we take this here so that the correct original token is retained in // the diagnostic, regardless of eager recovery. - let bad_token = self.token.clone(); + let bad_token = self.token; // suggest prepending a keyword in identifier position with `r#` let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident() @@ -382,7 +382,7 @@ impl<'a> Parser<'a> { // if the previous token is a valid keyword // that might use a generic, then suggest a correct // generic placement (later on) - let maybe_keyword = self.prev_token.clone(); + let maybe_keyword = self.prev_token; if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) { // if we have a valid keyword, attempt to parse generics // also obtain the keywords symbol @@ -507,7 +507,7 @@ impl<'a> Parser<'a> { } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) { // The current token is in the same line as the prior token, not recoverable. } else if [token::Comma, token::Colon].contains(&self.token.kind) - && self.prev_token == token::CloseDelim(Delimiter::Parenthesis) + && self.prev_token == token::CloseParen { // Likely typo: The current token is on a new line and is expected to be // `.`, `;`, `?`, or an operator after a close delimiter token. @@ -518,8 +518,7 @@ impl<'a> Parser<'a> { // ^ // https://github.com/rust-lang/rust/issues/72253 } else if self.look_ahead(1, |t| { - t == &token::CloseDelim(Delimiter::Brace) - || t.can_begin_expr() && *t != token::Colon + t == &token::CloseBrace || t.can_begin_expr() && *t != token::Colon }) && [token::Comma, token::Colon].contains(&self.token.kind) { // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is @@ -530,14 +529,14 @@ impl<'a> Parser<'a> { // let y = 42; let guar = self.dcx().emit_err(ExpectedSemi { span: self.token.span, - token: self.token.clone(), + token: self.token, unexpected_token_label: None, sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span), }); self.bump(); return Ok(guar); } else if self.look_ahead(0, |t| { - t == &token::CloseDelim(Delimiter::Brace) + t == &token::CloseBrace || ((t.can_begin_expr() || t.can_begin_item()) && t != &token::Semi && t != &token::Pound) @@ -555,7 +554,7 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let guar = self.dcx().emit_err(ExpectedSemi { span, - token: self.token.clone(), + token: self.token, unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); @@ -609,6 +608,8 @@ impl<'a> Parser<'a> { // FIXME: translation requires list formatting (for `expect`) let mut err = self.dcx().struct_span_err(self.token.span, msg_exp); + self.label_expected_raw_ref(&mut err); + // Look for usages of '=>' where '>=' was probably intended if self.token == token::FatArrow && expected.iter().any(|tok| matches!(tok, TokenType::Operator | TokenType::Le)) @@ -673,8 +674,7 @@ impl<'a> Parser<'a> { // `pub` may be used for an item or `pub(crate)` if self.prev_token.is_ident_named(sym::public) - && (self.token.can_begin_item() - || self.token == TokenKind::OpenDelim(Delimiter::Parenthesis)) + && (self.token.can_begin_item() || self.token == TokenKind::OpenParen) { err.span_suggestion_short( self.prev_token.span, @@ -750,6 +750,25 @@ impl<'a> Parser<'a> { Err(err) } + /// Adds a label when `&raw EXPR` was written instead of `&raw const EXPR`/`&raw mut EXPR`. + /// + /// Given that not all parser diagnostics flow through `expected_one_of_not_found`, this + /// label may need added to other diagnostics emission paths as needed. + pub(super) fn label_expected_raw_ref(&mut self, err: &mut Diag<'_>) { + if self.prev_token.is_keyword(kw::Raw) + && self.expected_token_types.contains(TokenType::KwMut) + && self.expected_token_types.contains(TokenType::KwConst) + && self.token.can_begin_expr() + { + err.span_suggestions( + self.prev_token.span.shrink_to_hi(), + "`&raw` must be followed by `const` or `mut` to be a raw reference expression", + [" const".to_string(), " mut".to_string()], + Applicability::MaybeIncorrect, + ); + } + } + /// Checks if the current token or the previous token are misspelled keywords /// and adds a helpful suggestion. fn check_for_misspelled_kw(&self, err: &mut Diag<'_>, expected: &[TokenType]) { @@ -801,7 +820,7 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let mut err = self.dcx().create_err(ExpectedSemi { span, - token: self.token.clone(), + token: self.token, unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); @@ -822,9 +841,7 @@ impl<'a> Parser<'a> { if expr.attrs.len() == 1 { "this attribute" } else { "these attributes" }, ), ); - if self.token == token::Pound - && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket)) - { + if self.token == token::Pound && self.look_ahead(1, |t| *t == token::OpenBracket) { // We have // #[attr] // expr @@ -1016,9 +1033,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, P> { err.span_label(lo.to(decl_hi), "while parsing the body of this closure"); let guar = match before.kind { - token::OpenDelim(Delimiter::Brace) - if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => - { + token::OpenBrace if token.kind != token::OpenBrace => { // `{ || () }` should have been `|| { () }` err.multipart_suggestion( "you might have meant to open the body of the closure, instead of enclosing \ @@ -1033,9 +1048,7 @@ impl<'a> Parser<'a> { self.eat_to_tokens(&[exp!(CloseBrace)]); guar } - token::OpenDelim(Delimiter::Parenthesis) - if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => - { + token::OpenParen if token.kind != token::OpenBrace => { // We are within a function call or tuple, we can emit the error // and recover. self.eat_to_tokens(&[exp!(CloseParen), exp!(Comma)]); @@ -1050,7 +1063,7 @@ impl<'a> Parser<'a> { ); err.emit() } - _ if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => { + _ if token.kind != token::OpenBrace => { // We don't have a heuristic to correctly identify where the block // should be closed. err.multipart_suggestion_verbose( @@ -1204,7 +1217,7 @@ impl<'a> Parser<'a> { trailing_span = trailing_span.to(self.token.span); self.bump(); } - if self.token == token::OpenDelim(Delimiter::Parenthesis) { + if self.token == token::OpenParen { // Recover from bad turbofish: `foo.collect::Vec<_>()`. segment.args = Some(AngleBracketedArgs { args, span }.into()); @@ -1449,9 +1462,7 @@ impl<'a> Parser<'a> { let modifiers = [(token::Lt, 1), (token::Gt, -1), (token::Shr, -2)]; self.consume_tts(1, &modifiers); - if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep] - .contains(&self.token.kind) - { + if !matches!(self.token.kind, token::OpenParen | token::PathSep) { // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the // parser and bail out. self.restore_snapshot(snapshot); @@ -1489,7 +1500,7 @@ impl<'a> Parser<'a> { Err(self.dcx().create_err(err)) } } - } else if self.token == token::OpenDelim(Delimiter::Parenthesis) { + } else if self.token == token::OpenParen { // We have high certainty that this was a bad turbofish at this point. // `foo< bar >(` if let ExprKind::Binary(o, ..) = inner_op.kind @@ -1549,10 +1560,7 @@ impl<'a> Parser<'a> { self.bump(); // `(` // Consume the fn call arguments. - let modifiers = [ - (token::OpenDelim(Delimiter::Parenthesis), 1), - (token::CloseDelim(Delimiter::Parenthesis), -1), - ]; + let modifiers = [(token::OpenParen, 1), (token::CloseParen, -1)]; self.consume_tts(1, &modifiers); if self.token == token::Eof { @@ -1636,19 +1644,19 @@ impl<'a> Parser<'a> { self.bump(); // `+` let _bounds = self.parse_generic_bounds()?; - let sum_span = ty.span.to(self.prev_token.span); - let sub = match &ty.kind { TyKind::Ref(_lifetime, mut_ty) => { let lo = mut_ty.ty.span.shrink_to_lo(); let hi = self.prev_token.span.shrink_to_hi(); BadTypePlusSub::AddParen { suggestion: AddParen { lo, hi } } } - TyKind::Ptr(..) | TyKind::BareFn(..) => BadTypePlusSub::ForgotParen { span: sum_span }, - _ => BadTypePlusSub::ExpectPath { span: sum_span }, + TyKind::Ptr(..) | TyKind::BareFn(..) => { + BadTypePlusSub::ForgotParen { span: ty.span.to(self.prev_token.span) } + } + _ => BadTypePlusSub::ExpectPath { span: ty.span }, }; - self.dcx().emit_err(BadTypePlus { ty: pprust::ty_to_string(ty), span: sum_span, sub }); + self.dcx().emit_err(BadTypePlus { span: ty.span, sub }); Ok(()) } @@ -1922,10 +1930,7 @@ impl<'a> Parser<'a> { && self.token == token::Colon && self.look_ahead(1, |next| line_idx(self.token.span) < line_idx(next.span)) { - self.dcx().emit_err(ColonAsSemi { - span: self.token.span, - type_ascription: self.psess.unstable_features.is_nightly_build(), - }); + self.dcx().emit_err(ColonAsSemi { span: self.token.span }); self.bump(); return true; } @@ -1960,7 +1965,7 @@ impl<'a> Parser<'a> { fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P, bool)> { let is_question = self.eat(exp!(Question)); // Handle `await? `. - let expr = if self.token == token::OpenDelim(Delimiter::Brace) { + let expr = if self.token == token::OpenBrace { // Handle `await { }`. // This needs to be handled separately from the next arm to avoid // interpreting `await { }?` as `?.await`. @@ -1996,9 +2001,7 @@ impl<'a> Parser<'a> { /// If encountering `future.await()`, consumes and emits an error. pub(super) fn recover_from_await_method_call(&mut self) { - if self.token == token::OpenDelim(Delimiter::Parenthesis) - && self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis)) - { + if self.token == token::OpenParen && self.look_ahead(1, |t| t == &token::CloseParen) { // future.await() let lo = self.token.span; self.bump(); // ( @@ -2011,9 +2014,7 @@ impl<'a> Parser<'a> { /// /// If encountering `x.use()`, consumes and emits an error. pub(super) fn recover_from_use(&mut self) { - if self.token == token::OpenDelim(Delimiter::Parenthesis) - && self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis)) - { + if self.token == token::OpenParen && self.look_ahead(1, |t| t == &token::CloseParen) { // var.use() let lo = self.token.span; self.bump(); // ( @@ -2027,7 +2028,7 @@ impl<'a> Parser<'a> { pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P> { let is_try = self.token.is_keyword(kw::Try); let is_questionmark = self.look_ahead(1, |t| t == &token::Bang); //check for ! - let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for ( + let is_open = self.look_ahead(2, |t| t == &token::OpenParen); //check for ( if is_try && is_questionmark && is_open { let lo = self.token.span; @@ -2035,7 +2036,7 @@ impl<'a> Parser<'a> { self.bump(); //remove ! let try_span = lo.to(self.token.span); //we take the try!( span self.bump(); //remove ( - let is_empty = self.token == token::CloseDelim(Delimiter::Parenthesis); //check if the block is empty + let is_empty = self.token == token::CloseParen; //check if the block is empty self.consume_block(exp!(OpenParen), exp!(CloseParen), ConsumeClosingDelim::No); //eat the block let hi = self.token.span; self.bump(); //remove ) @@ -2130,7 +2131,7 @@ impl<'a> Parser<'a> { loop { debug!("recover_stmt_ loop {:?}", self.token); match self.token.kind { - token::OpenDelim(Delimiter::Brace) => { + token::OpenBrace => { brace_depth += 1; self.bump(); if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0 @@ -2138,11 +2139,11 @@ impl<'a> Parser<'a> { in_block = true; } } - token::OpenDelim(Delimiter::Bracket) => { + token::OpenBracket => { bracket_depth += 1; self.bump(); } - token::CloseDelim(Delimiter::Brace) => { + token::CloseBrace => { if brace_depth == 0 { debug!("recover_stmt_ return - close delim {:?}", self.token); break; @@ -2154,7 +2155,7 @@ impl<'a> Parser<'a> { break; } } - token::CloseDelim(Delimiter::Bracket) => { + token::CloseBracket => { bracket_depth -= 1; if bracket_depth < 0 { bracket_depth = 0; @@ -2201,12 +2202,10 @@ impl<'a> Parser<'a> { if let token::DocComment(..) = self.token.kind { self.dcx().emit_err(DocCommentOnParamType { span: self.token.span }); self.bump(); - } else if self.token == token::Pound - && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket)) - { + } else if self.token == token::Pound && self.look_ahead(1, |t| *t == token::OpenBracket) { let lo = self.token.span; // Skip every token until next possible arg. - while self.token != token::CloseDelim(Delimiter::Bracket) { + while self.token != token::CloseBracket { self.bump(); } let sp = lo.to(self.token.span); @@ -2225,9 +2224,7 @@ impl<'a> Parser<'a> { // If we find a pattern followed by an identifier, it could be an (incorrect) // C-style parameter declaration. if self.check_ident() - && self.look_ahead(1, |t| { - *t == token::Comma || *t == token::CloseDelim(Delimiter::Parenthesis) - }) + && self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseParen) { // `fn foo(String s) {}` let ident = self.parse_ident().unwrap(); @@ -2243,7 +2240,7 @@ impl<'a> Parser<'a> { } else if require_name && (self.token == token::Comma || self.token == token::Lt - || self.token == token::CloseDelim(Delimiter::Parenthesis)) + || self.token == token::CloseParen) { let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)"; @@ -2854,7 +2851,7 @@ impl<'a> Parser<'a> { // Check for `'a : {` if !(self.check_lifetime() && self.look_ahead(1, |t| *t == token::Colon) - && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))) + && self.look_ahead(2, |t| *t == token::OpenBrace)) { return false; } diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 92e83577f1b13..2a7910a6af4dd 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -4,29 +4,29 @@ use core::mem; use core::ops::{Bound, ControlFlow}; use ast::mut_visit::{self, MutVisitor}; -use ast::token::{IdentIsRaw, MetaVarKind}; +use ast::token::IdentIsRaw; use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered}; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, Token, TokenKind}; use rustc_ast::tokenstream::TokenTree; use rustc_ast::util::case::Case; use rustc_ast::util::classify; use rustc_ast::util::parser::{AssocOp, ExprPrecedence, Fixity, prec_let_scrutinee_needs_par}; use rustc_ast::visit::{Visitor, walk_expr}; use rustc_ast::{ - self as ast, AnonConst, Arm, AttrStyle, AttrVec, BinOp, BinOpKind, BlockCheckMode, CaptureBy, - ClosureBinder, DUMMY_NODE_ID, Expr, ExprField, ExprKind, FnDecl, FnRetTy, Label, MacCall, - MetaItemLit, Movability, Param, RangeLimits, StmtKind, Ty, TyKind, UnOp, UnsafeBinderCastKind, - YieldKind, + self as ast, AnonConst, Arm, AssignOp, AssignOpKind, AttrStyle, AttrVec, BinOp, BinOpKind, + BlockCheckMode, CaptureBy, ClosureBinder, DUMMY_NODE_ID, Expr, ExprField, ExprKind, FnDecl, + FnRetTy, Label, MacCall, MetaItemLit, Movability, Param, RangeLimits, StmtKind, Ty, TyKind, + UnOp, UnsafeBinderCastKind, YieldKind, }; -use rustc_ast_pretty::pprust; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::{Applicability, Diag, PResult, StashKey, Subdiagnostic}; -use rustc_lexer::unescape::unescape_char; +use rustc_literal_escaper::unescape_char; use rustc_macros::Subdiagnostic; use rustc_session::errors::{ExprParenthesesNeeded, report_lit_error}; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP; +use rustc_span::edition::Edition; use rustc_span::source_map::{self, Spanned}; use rustc_span::{BytePos, ErrorGuaranteed, Ident, Pos, Span, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; @@ -345,7 +345,7 @@ impl<'a> Parser<'a> { fn error_found_expr_would_be_stmt(&self, lhs: &Expr) { self.dcx().emit_err(errors::FoundExprWouldBeStmt { span: self.token.span, - token: self.token.clone(), + token: self.token, suggestion: ExprParenthesesNeeded::surrounding(lhs.span), }); } @@ -360,7 +360,7 @@ impl<'a> Parser<'a> { ( Some( AssocOp::Binary(BinOpKind::Shr | BinOpKind::Gt | BinOpKind::Ge) - | AssocOp::AssignOp(BinOpKind::Shr), + | AssocOp::AssignOp(AssignOpKind::ShrAssign), ), _, ) if self.restrictions.contains(Restrictions::CONST_EXPR) => { @@ -418,7 +418,7 @@ impl<'a> Parser<'a> { cur_op_span: Span, ) -> PResult<'a, P> { let rhs = if self.is_at_start_of_range_notation_rhs() { - let maybe_lt = self.token.clone(); + let maybe_lt = self.token; let attrs = self.parse_outer_attributes()?; Some( self.parse_expr_assoc_with(Bound::Excluded(prec), attrs) @@ -437,7 +437,7 @@ impl<'a> Parser<'a> { fn is_at_start_of_range_notation_rhs(&self) -> bool { if self.token.can_begin_expr() { // Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`. - if self.token == token::OpenDelim(Delimiter::Brace) { + if self.token == token::OpenBrace { return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); } true @@ -543,8 +543,8 @@ impl<'a> Parser<'a> { } // Recover from `++x`: token::Plus if this.look_ahead(1, |t| *t == token::Plus) => { - let starts_stmt = this.prev_token == token::Semi - || this.prev_token == token::CloseDelim(Delimiter::Brace); + let starts_stmt = + this.prev_token == token::Semi || this.prev_token == token::CloseBrace; let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span)); // Eat both `+`s. this.bump(); @@ -605,14 +605,14 @@ impl<'a> Parser<'a> { // can't continue an expression after an ident token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), token::Literal(..) | token::Pound => true, - _ => t.is_whole_expr(), + _ => t.is_metavar_expr(), }; self.token.is_ident_named(sym::not) && self.look_ahead(1, token_cannot_continue_expr) } /// Recover on `not expr` in favor of `!expr`. fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { - let negated_token = self.look_ahead(1, |t| t.clone()); + let negated_token = self.look_ahead(1, |t| *t); let sub_diag = if negated_token.is_numeric_lit() { errors::NotAsNegationOperatorSub::SuggestNotBitwise @@ -638,7 +638,12 @@ impl<'a> Parser<'a> { /// Returns the span of expr if it was not interpolated, or the span of the interpolated token. fn interpolated_or_expr_span(&self, expr: &Expr) -> Span { match self.prev_token.kind { - TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) | TokenKind::Interpolated(..) => { + token::NtIdent(..) | token::NtLifetime(..) => self.prev_token.span, + token::CloseInvisible(InvisibleOrigin::MetaVar(_)) => { + // `expr.span` is the interpolated span, because invisible open + // and close delims both get marked with the same span, one + // that covers the entire thing between them. (See + // `rustc_expand::mbe::transcribe::transcribe`.) self.prev_token.span } _ => expr.span, @@ -823,6 +828,18 @@ impl<'a> Parser<'a> { if let Some(lt) = lifetime { self.error_remove_borrow_lifetime(span, lt.ident.span.until(expr.span)); } + + // Add expected tokens if we parsed `&raw` as an expression. + // This will make sure we see "expected `const`, `mut`", and + // guides recovery in case we write `&raw expr`. + if borrow_kind == ast::BorrowKind::Ref + && mutbl == ast::Mutability::Not + && matches!(&expr.kind, ExprKind::Path(None, p) if p.is_ident(kw::Raw)) + { + self.expected_token_types.insert(TokenType::KwMut); + self.expected_token_types.insert(TokenType::KwConst); + } + Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr))) } @@ -896,8 +913,8 @@ impl<'a> Parser<'a> { return Ok(e); } e = match self.token.kind { - token::OpenDelim(Delimiter::Parenthesis) => self.parse_expr_fn_call(lo, e), - token::OpenDelim(Delimiter::Bracket) => self.parse_expr_index(lo, e)?, + token::OpenParen => self.parse_expr_fn_call(lo, e), + token::OpenBracket => self.parse_expr_index(lo, e)?, _ => return Ok(e), } } @@ -979,12 +996,30 @@ impl<'a> Parser<'a> { } fn error_unexpected_after_dot(&self) { - let actual = pprust::token_to_string(&self.token); + let actual = super::token_descr(&self.token); let span = self.token.span; let sm = self.psess.source_map(); let (span, actual) = match (&self.token.kind, self.subparser_name) { - (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => { - (span.shrink_to_hi(), actual.into()) + (token::Eof, Some(_)) if let Ok(snippet) = sm.span_to_snippet(sm.next_point(span)) => { + (span.shrink_to_hi(), format!("`{}`", snippet)) + } + (token::CloseInvisible(InvisibleOrigin::MetaVar(_)), _) => { + // No need to report an error. This case will only occur when parsing a pasted + // metavariable, and we should have emitted an error when parsing the macro call in + // the first place. E.g. in this code: + // ``` + // macro_rules! m { ($e:expr) => { $e }; } + // + // fn main() { + // let f = 1; + // m!(f.); + // } + // ``` + // we'll get an error "unexpected token: `)` when parsing the `m!(f.)`, so we don't + // want to issue a second error when parsing the expansion `«f.»` (where `«`/`»` + // represent the invisible delimiters). + self.dcx().span_delayed_bug(span, "bad dot expr in metavariable"); + return; } _ => (span, actual), }; @@ -1168,7 +1203,7 @@ impl<'a> Parser<'a> { } } - if matches!(self.token.kind, token::CloseDelim(..) | token::Comma) { + if self.token.kind.close_delim().is_some() || self.token.kind == token::Comma { break; } else if trailing_dot.is_none() { // This loop should only repeat if there is a trailing dot. @@ -1198,7 +1233,7 @@ impl<'a> Parser<'a> { /// Parse a function call expression, `expr(...)`. fn parse_expr_fn_call(&mut self, lo: Span, fun: P) -> P { - let snapshot = if self.token == token::OpenDelim(Delimiter::Parenthesis) { + let snapshot = if self.token == token::OpenParen { Some((self.create_snapshot_for_diagnostic(), fun.kind.clone())) } else { None @@ -1294,7 +1329,7 @@ impl<'a> Parser<'a> { /// Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P, lo: Span) -> PResult<'a, P> { - if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) { + if self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) { return Ok(self.mk_await_expr(self_arg, lo)); } @@ -1362,22 +1397,34 @@ impl<'a> Parser<'a> { maybe_recover_from_interpolated_ty_qpath!(self, true); let span = self.token.span; - if let token::Interpolated(nt) = &self.token.kind { - match &**nt { - token::NtExpr(e) | token::NtLiteral(e) => { - let e = e.clone(); - self.bump(); - return Ok(e); - } - token::NtBlock(block) => { - let block = block.clone(); - self.bump(); - return Ok(self.mk_expr(self.prev_token.span, ExprKind::Block(block, None))); + if let Some(expr) = self.eat_metavar_seq_with_matcher( + |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }), + |this| { + // Force collection (as opposed to just `parse_expr`) is required to avoid the + // attribute duplication seen in #138478. + let expr = this.parse_expr_force_collect(); + // FIXME(nnethercote) Sometimes with expressions we get a trailing comma, possibly + // related to the FIXME in `collect_tokens_for_expr`. Examples are the multi-line + // `assert_eq!` calls involving arguments annotated with `#[rustfmt::skip]` in + // `compiler/rustc_index/src/bit_set/tests.rs`. + if this.token.kind == token::Comma { + this.bump(); } - }; - } else if let Some(path) = self.eat_metavar_seq(MetaVarKind::Path, |this| { - this.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type)) - }) { + expr + }, + ) { + return Ok(expr); + } else if let Some(lit) = + self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) + { + return Ok(lit); + } else if let Some(block) = + self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) + { + return Ok(self.mk_expr(span, ExprKind::Block(block, None))); + } else if let Some(path) = + self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type)) + { return Ok(self.mk_expr(span, ExprKind::Path(None, path))); } @@ -1471,9 +1518,9 @@ impl<'a> Parser<'a> { this.parse_expr_let(restrictions) } else if this.eat_keyword(exp!(Underscore)) { Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore)) - } else if this.token.uninterpolated_span().at_least_rust_2018() { + } else if this.token_uninterpolated_span().at_least_rust_2018() { // `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly. - if this.token.uninterpolated_span().at_least_rust_2024() + if this.token_uninterpolated_span().at_least_rust_2024() // check for `gen {}` and `gen move {}` // or `async gen {}` and `async gen move {}` && (this.is_gen_block(kw::Gen, 0) @@ -1572,7 +1619,7 @@ impl<'a> Parser<'a> { } fn parse_expr_path_start(&mut self) -> PResult<'a, P> { - let maybe_eq_tok = self.prev_token.clone(); + let maybe_eq_tok = self.prev_token; let (qself, path) = if self.eat_lt() { let lt_span = self.prev_token.span; let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| { @@ -1630,14 +1677,11 @@ impl<'a> Parser<'a> { self.parse_expr_for(label, lo) } else if self.eat_keyword(exp!(Loop)) { self.parse_expr_loop(label, lo) - } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace)) - || self.token.is_whole_block() - { + } else if self.check_noexpect(&token::OpenBrace) || self.token.is_metavar_block() { self.parse_expr_block(label, lo, BlockCheckMode::Default) } else if !ate_colon && self.may_recover() - && (matches!(self.token.kind, token::CloseDelim(_) | token::Comma) - || self.token.is_punct()) + && (self.token.kind.close_delim().is_some() || self.token.is_punct()) && could_be_unclosed_char_literal(label_.ident) { let (lit, _) = @@ -1832,19 +1876,21 @@ impl<'a> Parser<'a> { }, }); Some(lexpr) - } else if self.token != token::OpenDelim(Delimiter::Brace) + } else if self.token != token::OpenBrace || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) { let mut expr = self.parse_expr_opt()?; if let Some(expr) = &mut expr { if label.is_some() - && matches!( - expr.kind, + && match &expr.kind { ExprKind::While(_, _, None) - | ExprKind::ForLoop { label: None, .. } - | ExprKind::Loop(_, None, _) - | ExprKind::Block(_, None) - ) + | ExprKind::ForLoop { label: None, .. } + | ExprKind::Loop(_, None, _) => true, + ExprKind::Block(block, None) => { + matches!(block.rules, BlockCheckMode::Default) + } + _ => false, + } { self.psess.buffer_lint( BREAK_WITH_LABEL_AND_LOOP, @@ -1968,7 +2014,7 @@ impl<'a> Parser<'a> { // Eat tokens until the macro call ends. if self.may_recover() { - while !matches!(self.token.kind, token::CloseDelim(..) | token::Eof) { + while !self.token.kind.is_close_delim_or_eof() { self.bump(); } } @@ -2033,7 +2079,7 @@ impl<'a> Parser<'a> { &mut self, mk_lit_char: impl FnOnce(Symbol, Span) -> L, ) -> PResult<'a, L> { - let token = self.token.clone(); + let token = self.token; let err = |self_: &Self| { let msg = format!("unexpected token: {}", super::token_descr(&token)); self_.dcx().struct_span_err(token.span, msg) @@ -2062,87 +2108,110 @@ impl<'a> Parser<'a> { .or_else(|()| self.handle_missing_lit(Parser::mk_meta_item_lit_char)) } - fn recover_after_dot(&mut self) -> Option { - let mut recovered = None; + fn recover_after_dot(&mut self) { if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. We don't currently have any syntax where // dot would follow an optional literal, so we do this unconditionally. - recovered = self.look_ahead(1, |next_token| { + let recovered = self.look_ahead(1, |next_token| { + // If it's an integer that looks like a float, then recover as such. + // + // We will never encounter the exponent part of a floating + // point literal here, since there's no use of the exponent + // syntax that also constitutes a valid integer, so we need + // not check for that. if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = next_token.kind + && suffix.is_none_or(|s| s == sym::f32 || s == sym::f64) + && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_') + && self.token.span.hi() == next_token.span.lo() { - // If this integer looks like a float, then recover as such. - // - // We will never encounter the exponent part of a floating - // point literal here, since there's no use of the exponent - // syntax that also constitutes a valid integer, so we need - // not check for that. - if suffix.is_none_or(|s| s == sym::f32 || s == sym::f64) - && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_') - && self.token.span.hi() == next_token.span.lo() - { - let s = String::from("0.") + symbol.as_str(); - let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); - return Some(Token::new(kind, self.token.span.to(next_token.span))); - } + let s = String::from("0.") + symbol.as_str(); + let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); + Some(Token::new(kind, self.token.span.to(next_token.span))) + } else { + None } - None }); - if let Some(token) = &recovered { - self.bump(); + if let Some(recovered) = recovered { self.dcx().emit_err(errors::FloatLiteralRequiresIntegerPart { - span: token.span, - suggestion: token.span.shrink_to_lo(), + span: recovered.span, + suggestion: recovered.span.shrink_to_lo(), }); + self.bump(); + self.token = recovered; } } + } - recovered + /// Keep this in sync with `Token::can_begin_literal_maybe_minus` and + /// `Lit::from_token` (excluding unary negation). + fn eat_token_lit(&mut self) -> Option { + let check_expr = |expr: P| { + if let ast::ExprKind::Lit(token_lit) = expr.kind { + Some(token_lit) + } else if let ast::ExprKind::Unary(UnOp::Neg, inner) = &expr.kind + && let ast::Expr { kind: ast::ExprKind::Lit(_), .. } = **inner + { + None + } else { + panic!("unexpected reparsed expr/literal: {:?}", expr.kind); + } + }; + match self.token.uninterpolate().kind { + token::Ident(name, IdentIsRaw::No) if name.is_bool_lit() => { + self.bump(); + Some(token::Lit::new(token::Bool, name, None)) + } + token::Literal(token_lit) => { + self.bump(); + Some(token_lit) + } + token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Literal)) => { + let lit = self + .eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) + .expect("metavar seq literal"); + check_expr(lit) + } + token::OpenInvisible(InvisibleOrigin::MetaVar( + mv_kind @ MetaVarKind::Expr { can_begin_literal_maybe_minus: true, .. }, + )) => { + let expr = self + .eat_metavar_seq(mv_kind, |this| this.parse_expr()) + .expect("metavar seq expr"); + check_expr(expr) + } + _ => None, + } } /// Matches `lit = true | false | token_lit`. /// Returns `None` if the next token is not a literal. - pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> { - let recovered = self.recover_after_dot(); - let token = recovered.as_ref().unwrap_or(&self.token); - let span = token.span; - - token::Lit::from_token(token).map(|token_lit| { - self.bump(); - (token_lit, span) - }) + fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> { + self.recover_after_dot(); + let span = self.token.span; + self.eat_token_lit().map(|token_lit| (token_lit, span)) } /// Matches `lit = true | false | token_lit`. /// Returns `None` if the next token is not a literal. - pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option { - let recovered = self.recover_after_dot(); - let token = recovered.as_ref().unwrap_or(&self.token); - match token::Lit::from_token(token) { - Some(lit) => { - match MetaItemLit::from_token_lit(lit, token.span) { - Ok(lit) => { - self.bump(); - Some(lit) - } - Err(err) => { - let span = token.uninterpolated_span(); - self.bump(); - let guar = report_lit_error(self.psess, err, lit, span); - // Pack possible quotes and prefixes from the original literal into - // the error literal's symbol so they can be pretty-printed faithfully. - let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); - let symbol = Symbol::intern(&suffixless_lit.to_string()); - let lit = token::Lit::new(token::Err(guar), symbol, lit.suffix); - Some( - MetaItemLit::from_token_lit(lit, span) - .unwrap_or_else(|_| unreachable!()), - ) - } + fn parse_opt_meta_item_lit(&mut self) -> Option { + self.recover_after_dot(); + let span = self.token.span; + let uninterpolated_span = self.token_uninterpolated_span(); + self.eat_token_lit().map(|token_lit| { + match MetaItemLit::from_token_lit(token_lit, span) { + Ok(lit) => lit, + Err(err) => { + let guar = report_lit_error(&self.psess, err, token_lit, uninterpolated_span); + // Pack possible quotes and prefixes from the original literal into + // the error literal's symbol so they can be pretty-printed faithfully. + let suffixless_lit = token::Lit::new(token_lit.kind, token_lit.symbol, None); + let symbol = Symbol::intern(&suffixless_lit.to_string()); + let token_lit = token::Lit::new(token::Err(guar), symbol, token_lit.suffix); + MetaItemLit::from_token_lit(token_lit, uninterpolated_span).unwrap() } } - None => None, - } + }) } pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) { @@ -2166,9 +2235,10 @@ impl<'a> Parser<'a> { /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). /// Keep this in sync with `Token::can_begin_literal_maybe_minus`. pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P> { - if let token::Interpolated(nt) = &self.token.kind { - match &**nt { - // FIXME(nnethercote) The `NtExpr` case should only match if + if let Some(expr) = self.eat_metavar_seq_with_matcher( + |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }), + |this| { + // FIXME(nnethercote) The `expr` case should only match if // `e` is an `ExprKind::Lit` or an `ExprKind::Unary` containing // an `UnOp::Neg` and an `ExprKind::Lit`, like how // `can_begin_literal_maybe_minus` works. But this method has @@ -2178,13 +2248,14 @@ impl<'a> Parser<'a> { // `ExprKind::Path` must be accepted when parsing range // patterns. That requires some care. So for now, we continue // being less strict here than we should be. - token::NtExpr(e) | token::NtLiteral(e) => { - let e = e.clone(); - self.bump(); - return Ok(e); - } - _ => {} - }; + this.parse_expr() + }, + ) { + return Ok(expr); + } else if let Some(lit) = + self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) + { + return Ok(lit); } let lo = self.token.span; @@ -2200,7 +2271,9 @@ impl<'a> Parser<'a> { } fn is_array_like_block(&mut self) -> bool { - self.look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_))) + self.token.kind == TokenKind::OpenBrace + && self + .look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_))) && self.look_ahead(2, |t| t == &token::Comma) && self.look_ahead(3, |t| t.can_begin_expr()) } @@ -2212,9 +2285,9 @@ impl<'a> Parser<'a> { let mut snapshot = self.create_snapshot_for_diagnostic(); match snapshot.parse_expr_array_or_repeat(exp!(CloseBrace)) { Ok(arr) => { - let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces { + let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfBraces { span: arr.span, - sub: errors::ArrayBracketsInsteadOfSpacesSugg { + sub: errors::ArrayBracketsInsteadOfBracesSugg { left: lo, right: snapshot.prev_token.span, }, @@ -2251,8 +2324,8 @@ impl<'a> Parser<'a> { |p| p.parse_expr(), ) { Ok(_) - // When the close delim is `)`, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`, - // but the actual `token.kind` is `token::CloseDelim(Delimiter::Bracket)`. + // When the close delim is `)`, `token.kind` is expected to be `token::CloseParen`, + // but the actual `token.kind` is `token::CloseBracket`. // This is because the `token.kind` of the close delim is treated as the same as // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different. // Therefore, `token.kind` should not be compared here. @@ -2285,7 +2358,7 @@ impl<'a> Parser<'a> { } } - if self.token.is_whole_block() { + if self.token.is_metavar_block() { self.dcx().emit_err(errors::InvalidBlockMacroSegment { span: self.token.span, context: lo.to(self.token.span), @@ -2310,7 +2383,7 @@ impl<'a> Parser<'a> { fn parse_expr_closure(&mut self) -> PResult<'a, P> { let lo = self.token.span; - let before = self.prev_token.clone(); + let before = self.prev_token; let binder = if self.check_keyword(exp!(For)) { let lo = self.token.span; let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; @@ -2328,7 +2401,7 @@ impl<'a> Parser<'a> { let movability = if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable }; - let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() { + let coroutine_kind = if self.token_uninterpolated_span().at_least_rust_2018() { self.parse_coroutine_kind(Case::Sensitive) } else { None @@ -2337,23 +2410,21 @@ impl<'a> Parser<'a> { let capture_clause = self.parse_capture_clause()?; let (fn_decl, fn_arg_span) = self.parse_fn_block_decl()?; let decl_hi = self.prev_token.span; - let mut body = match fn_decl.output { + let mut body = match &fn_decl.output { + // No return type. FnRetTy::Default(_) => { let restrictions = self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET; - let prev = self.prev_token.clone(); - let token = self.token.clone(); + let prev = self.prev_token; + let token = self.token; let attrs = self.parse_outer_attributes()?; match self.parse_expr_res(restrictions, attrs) { Ok((expr, _)) => expr, Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?, } } - _ => { - // If an explicit return type is given, require a block to appear (RFC 968). - let body_lo = self.token.span; - self.parse_expr_block(None, body_lo, BlockCheckMode::Default)? - } + // Explicit return type (`->`) needs block `-> T { }`. + FnRetTy::Ty(ty) => self.parse_closure_block_body(ty.span)?, }; match coroutine_kind { @@ -2405,6 +2476,49 @@ impl<'a> Parser<'a> { Ok(closure) } + /// If an explicit return type is given, require a block to appear (RFC 968). + fn parse_closure_block_body(&mut self, ret_span: Span) -> PResult<'a, P> { + if self.may_recover() + && self.token.can_begin_expr() + && self.token.kind != TokenKind::OpenBrace + && !self.token.is_metavar_block() + { + let snapshot = self.create_snapshot_for_diagnostic(); + let restrictions = + self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET; + let tok = self.token.clone(); + match self.parse_expr_res(restrictions, AttrWrapper::empty()) { + Ok((expr, _)) => { + let descr = super::token_descr(&tok); + let mut diag = self + .dcx() + .struct_span_err(tok.span, format!("expected `{{`, found {descr}")); + diag.span_label( + ret_span, + "explicit return type requires closure body to be enclosed in braces", + ); + diag.multipart_suggestion_verbose( + "wrap the expression in curly braces", + vec![ + (expr.span.shrink_to_lo(), "{ ".to_string()), + (expr.span.shrink_to_hi(), " }".to_string()), + ], + Applicability::MachineApplicable, + ); + diag.emit(); + return Ok(expr); + } + Err(diag) => { + diag.cancel(); + self.restore_snapshot(snapshot); + } + } + } + + let body_lo = self.token.span; + self.parse_expr_block(None, body_lo, BlockCheckMode::Default) + } + /// Parses an optional `move` or `use` prefix to a closure-like construct. fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> { if self.eat_keyword(exp!(Move)) { @@ -2489,7 +2603,10 @@ impl<'a> Parser<'a> { /// Parses an `if` expression (`if` token already eaten). fn parse_expr_if(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; - let cond = self.parse_expr_cond()?; + // Scoping code checks the top level edition of the `if`; let's match it here. + // The `CondChecker` also checks the edition of the `let` itself, just to make sure. + let let_chains_policy = LetChainsPolicy::EditionDependent { current_edition: lo.edition() }; + let cond = self.parse_expr_cond(let_chains_policy)?; self.parse_if_after_cond(lo, cond) } @@ -2549,7 +2666,7 @@ impl<'a> Parser<'a> { } } else { let attrs = self.parse_outer_attributes()?; // For recovery. - let maybe_fatarrow = self.token.clone(); + let maybe_fatarrow = self.token; let block = if self.check(exp!(OpenBrace)) { self.parse_block()? } else if let Some(block) = recover_block_from_condition(self) { @@ -2598,18 +2715,17 @@ impl<'a> Parser<'a> { } /// Parses the condition of a `if` or `while` expression. + /// + /// The specified `edition` in `let_chains_policy` should be that of the whole `if` construct, + /// i.e. the same span we use to later decide whether the drop behaviour should be that of + /// edition `..=2021` or that of `2024..`. // Public because it is used in rustfmt forks such as https://github.com/tucant/rustfmt/blob/30c83df9e1db10007bdd16dafce8a86b404329b2/src/parse/macros/html.rs#L57 for custom if expressions. - pub fn parse_expr_cond(&mut self) -> PResult<'a, P> { + pub fn parse_expr_cond(&mut self, let_chains_policy: LetChainsPolicy) -> PResult<'a, P> { let attrs = self.parse_outer_attributes()?; let (mut cond, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?; - CondChecker::new(self).visit_expr(&mut cond); - - if let ExprKind::Let(_, _, _, Recovered::No) = cond.kind { - // Remove the last feature gating of a `let` expression since it's stable. - self.psess.gated_spans.ungate_last(sym::let_chains, cond.span); - } + CondChecker::new(self, let_chains_policy).visit_expr(&mut cond); Ok(cond) } @@ -2769,7 +2885,7 @@ impl<'a> Parser<'a> { } fn parse_for_head(&mut self) -> PResult<'a, (P, P)> { - let begin_paren = if self.token == token::OpenDelim(Delimiter::Parenthesis) { + let begin_paren = if self.token == token::OpenParen { // Record whether we are about to parse `for (`. // This is used below for recovery in case of `for ( $stuff ) $block` // in which case we will suggest `for $stuff $block`. @@ -2803,7 +2919,7 @@ impl<'a> Parser<'a> { return Err(err); } }; - return if self.token == token::CloseDelim(Delimiter::Parenthesis) { + return if self.token == token::CloseParen { // We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the // parser state and emit a targeted suggestion. let span = vec![start_span, self.token.span]; @@ -2836,7 +2952,7 @@ impl<'a> Parser<'a> { /// Parses `for await? in ` (`for` token already eaten). fn parse_expr_for(&mut self, opt_label: Option